id_within_dataset
int64
46
2.71M
snippet
stringlengths
63
481k
tokens
sequencelengths
20
15.6k
language
stringclasses
2 values
nl
stringlengths
1
32.4k
is_duplicated
bool
2 classes
1,743,640
def convert_frequency(value): """! @brief Applies scale suffix to frequency value string.""" value = value.strip() suffix = value[-1].lower() if suffix in ('k', 'm'): value = int(value[:-1]) if suffix == 'k': value *= 1000 elif suffix == 'm': value *= 1000000 return value else: return int(value)
[ "def", "convert_frequency", "(", "value", ")", ":", "value", "=", "value", ".", "strip", "(", ")", "suffix", "=", "value", "[", "-", "1", "]", ".", "lower", "(", ")", "if", "suffix", "in", "(", "'k'", ",", "'m'", ")", ":", "value", "=", "int", "(", "value", "[", ":", "-", "1", "]", ")", "if", "suffix", "==", "'k'", ":", "value", "*=", "1000", "elif", "suffix", "==", "'m'", ":", "value", "*=", "1000000", "return", "value", "else", ":", "return", "int", "(", "value", ")" ]
python
! @brief Applies scale suffix to frequency value string.
false
2,478,306
def format_h1(s, format="text", indents=0): """ Encloses string in format text Args: s: string format: string starting with "text", "markdown", or "rest" indents: number of leading intenting spaces Returns: list >>> print("\\n".join(format_h2("Header 1", indents=10))) Header 1 -------- >>> print("\\n".join(format_h2("Header 1", "markdown", 0))) ## Header 1 """ _CHAR = "=" if format.startswith("text"): return format_underline(s, _CHAR, indents) elif format.startswith("markdown"): return ["# {}".format(s)] elif format.startswith("rest"): return format_underline(s, _CHAR, 0)
[ "def", "format_h1", "(", "s", ",", "format", "=", "\"text\"", ",", "indents", "=", "0", ")", ":", "_CHAR", "=", "\"=\"", "if", "format", ".", "startswith", "(", "\"text\"", ")", ":", "return", "format_underline", "(", "s", ",", "_CHAR", ",", "indents", ")", "elif", "format", ".", "startswith", "(", "\"markdown\"", ")", ":", "return", "[", "\"# {}\"", ".", "format", "(", "s", ")", "]", "elif", "format", ".", "startswith", "(", "\"rest\"", ")", ":", "return", "format_underline", "(", "s", ",", "_CHAR", ",", "0", ")" ]
python
Encloses string in format text Args: s: string format: string starting with "text", "markdown", or "rest" indents: number of leading intenting spaces Returns: list >>> print("\\n".join(format_h2("Header 1", indents=10))) Header 1 -------- >>> print("\\n".join(format_h2("Header 1", "markdown", 0))) ## Header 1
false
1,820,432
def from_internal(self, attribute_profile, internal_dict): """ Converts the internal data to "type" :type attribute_profile: str :type internal_dict: dict[str, str] :rtype: dict[str, str] :param attribute_profile: To which external type to convert (ex: oidc, saml, ...) :param internal_dict: attributes to map :return: attribute values and names in the specified "profile" """ external_dict = {} for internal_attribute_name in internal_dict: try: attribute_mapping = self.from_internal_attributes[internal_attribute_name] except KeyError: logger.debug("no attribute mapping found for the internal attribute '%s'", internal_attribute_name) continue if attribute_profile not in attribute_mapping: # skip this internal attribute if we have no mapping in the specified profile logger.debug("no mapping found for '%s' in attribute profile '%s'" % (internal_attribute_name, attribute_profile)) continue external_attribute_names = self.from_internal_attributes[internal_attribute_name][attribute_profile] # select the first attribute name external_attribute_name = external_attribute_names[0] logger.debug("frontend attribute %s mapped from %s" % (external_attribute_name, internal_attribute_name)) if self.separator in external_attribute_name: nested_attribute_names = external_attribute_name.split(self.separator) nested_dict = self._create_nested_attribute_value(nested_attribute_names[1:], internal_dict[internal_attribute_name]) external_dict[nested_attribute_names[0]] = nested_dict else: external_dict[external_attribute_name] = internal_dict[internal_attribute_name] return external_dict
[ "def", "from_internal", "(", "self", ",", "attribute_profile", ",", "internal_dict", ")", ":", "external_dict", "=", "{", "}", "for", "internal_attribute_name", "in", "internal_dict", ":", "try", ":", "attribute_mapping", "=", "self", ".", "from_internal_attributes", "[", "internal_attribute_name", "]", "except", "KeyError", ":", "logger", ".", "debug", "(", "\"no attribute mapping found for the internal attribute '%s'\"", ",", "internal_attribute_name", ")", "continue", "if", "attribute_profile", "not", "in", "attribute_mapping", ":", "logger", ".", "debug", "(", "\"no mapping found for '%s' in attribute profile '%s'\"", "%", "(", "internal_attribute_name", ",", "attribute_profile", ")", ")", "continue", "external_attribute_names", "=", "self", ".", "from_internal_attributes", "[", "internal_attribute_name", "]", "[", "attribute_profile", "]", "external_attribute_name", "=", "external_attribute_names", "[", "0", "]", "logger", ".", "debug", "(", "\"frontend attribute %s mapped from %s\"", "%", "(", "external_attribute_name", ",", "internal_attribute_name", ")", ")", "if", "self", ".", "separator", "in", "external_attribute_name", ":", "nested_attribute_names", "=", "external_attribute_name", ".", "split", "(", "self", ".", "separator", ")", "nested_dict", "=", "self", ".", "_create_nested_attribute_value", "(", "nested_attribute_names", "[", "1", ":", "]", ",", "internal_dict", "[", "internal_attribute_name", "]", ")", "external_dict", "[", "nested_attribute_names", "[", "0", "]", "]", "=", "nested_dict", "else", ":", "external_dict", "[", "external_attribute_name", "]", "=", "internal_dict", "[", "internal_attribute_name", "]", "return", "external_dict" ]
python
Converts the internal data to "type" :type attribute_profile: str :type internal_dict: dict[str, str] :rtype: dict[str, str] :param attribute_profile: To which external type to convert (ex: oidc, saml, ...) :param internal_dict: attributes to map :return: attribute values and names in the specified "profile"
false
1,952,276
def weather_at_place(self, name): """ Queries the OWM Weather API for the currently observed weather at the specified toponym (eg: "London,uk") :param name: the location's toponym :type name: str or unicode :returns: an *Observation* instance or ``None`` if no weather data is available :raises: *ParseResponseException* when OWM Weather API responses' data cannot be parsed or *APICallException* when OWM Weather API can not be reached """ assert isinstance(name, str), "Value must be a string" encoded_name = name params = {'q': encoded_name, 'lang': self._language} uri = http_client.HttpClient.to_url(OBSERVATION_URL, self._API_key, self._subscription_type, self._use_ssl) _, json_data = self._wapi.cacheable_get_json(uri, params=params) return self._parsers['observation'].parse_JSON(json_data)
[ "def", "weather_at_place", "(", "self", ",", "name", ")", ":", "assert", "isinstance", "(", "name", ",", "str", ")", ",", "\"Value must be a string\"", "encoded_name", "=", "name", "params", "=", "{", "'q'", ":", "encoded_name", ",", "'lang'", ":", "self", ".", "_language", "}", "uri", "=", "http_client", ".", "HttpClient", ".", "to_url", "(", "OBSERVATION_URL", ",", "self", ".", "_API_key", ",", "self", ".", "_subscription_type", ",", "self", ".", "_use_ssl", ")", "_", ",", "json_data", "=", "self", ".", "_wapi", ".", "cacheable_get_json", "(", "uri", ",", "params", "=", "params", ")", "return", "self", ".", "_parsers", "[", "'observation'", "]", ".", "parse_JSON", "(", "json_data", ")" ]
python
Queries the OWM Weather API for the currently observed weather at the specified toponym (eg: "London,uk") :param name: the location's toponym :type name: str or unicode :returns: an *Observation* instance or ``None`` if no weather data is available :raises: *ParseResponseException* when OWM Weather API responses' data cannot be parsed or *APICallException* when OWM Weather API can not be reached
false
1,887,852
def _check_settings(self, app): """Verify required settings. Produce a helpful error messages for incorrect settings.""" # Check for invalid settings # -------------------------- # Check self.UserInvitationClass and USER_ENABLE_INVITE_USER if self.USER_ENABLE_INVITE_USER and not self.UserInvitationClass: raise ConfigError( 'UserInvitationClass is missing while USER_ENABLE_INVITE_USER is True.' \ ' Specify UserInvitationClass with UserManager(app, db, User, UserInvitationClass=...' \ ' or set USER_ENABLE_INVITE_USER=False.') # Check for deprecated settings # ----------------------------- # Check for deprecated USER_ENABLE_CONFIRM_EMAIL setting = app.config.get('USER_ENABLE_LOGIN_WITHOUT_CONFIRM_EMAIL', None) if setting is not None: print( 'Deprecation warning: USER_ENABLE_LOGIN_WITHOUT_CONFIRM_EMAIL'\ ' will be deprecated.' \ ' It has been replaced by USER_ALLOW_LOGIN_WITHOUT_CONFIRMED_EMAIL.'\ ' Please change this as soon as possible.') self.USER_ALLOW_LOGIN_WITHOUT_CONFIRMED_EMAIL = setting # Check for deprecated USER_ENABLE_RETYPE_PASSWORD setting = app.config.get('USER_ENABLE_RETYPE_PASSWORD', None) if setting is not None: print( 'Deprecation warning: USER_ENABLE_RETYPE_PASSWORD'\ ' will be deprecated.' \ ' It has been replaced with USER_REQUIRE_RETYPE_PASSWORD.'\ ' Please change this as soon as possible.') self.USER_REQUIRE_RETYPE_PASSWORD = setting # Check for deprecated USER_SHOW_USERNAME_EMAIL_DOES_NOT_EXIST setting = app.config.get('USER_SHOW_USERNAME_EMAIL_DOES_NOT_EXIST', None) if setting is not None: print( 'Deprecation warning: USER_SHOW_USERNAME_EMAIL_DOES_NOT_EXIST' \ ' will be deprecated.' \ ' It has been replaced with USER_SHOW_USERNAME_DOES_NOT_EXIST' ' and USER_SHOW_EMAIL_DOES_NOT_EXIST.' ' Please change this as soon as possible.') self.USER_SHOW_USERNAME_DOES_NOT_EXIST = setting self.USER_SHOW_EMAIL_DOES_NOT_EXIST = setting # Check for deprecated USER_PASSWORD_HASH setting = app.config.get('USER_PASSWORD_HASH', None) if setting is not None: print( "Deprecation warning: USER_PASSWORD_HASH=<string>"\ " will be deprecated."\ " It has been replaced with USER_PASSLIB_CRYPTCONTEXT_SCHEMES=<list>." " Please change USER_PASSWORD_HASH='something' to"\ " USER_PASSLIB_CRYPTCONTEXT_SCHEMES=['something'] as soon as possible.") self.USER_PASSLIB_CRYPTCONTEXT_SCHEMES = [setting] # Check that USER_EMAIL_SENDER_EMAIL is set when USER_ENABLE_EMAIL is True if not self.USER_EMAIL_SENDER_EMAIL and self.USER_ENABLE_EMAIL: raise ConfigError( 'USER_EMAIL_SENDER_EMAIL is missing while USER_ENABLE_EMAIL is True.'\ ' specify USER_EMAIL_SENDER_EMAIL (and USER_EMAIL_SENDER_NAME) or set USER_ENABLE_EMAIL to False.') # Disable settings that rely on a feature setting that's not enabled # ------------------------------------------------------------------ # USER_ENABLE_REGISTER=True must have USER_ENABLE_USERNAME=True or USER_ENABLE_EMAIL=True. if not self.USER_ENABLE_USERNAME and not self.USER_ENABLE_EMAIL: self.USER_ENABLE_REGISTER = False # Settings that depend on USER_ENABLE_EMAIL if not self.USER_ENABLE_EMAIL: self.USER_ENABLE_CONFIRM_EMAIL = False self.USER_ENABLE_MULTIPLE_EMAILS = False self.USER_ENABLE_FORGOT_PASSWORD = False self.USER_SEND_PASSWORD_CHANGED_EMAIL = False self.USER_SEND_REGISTERED_EMAIL = False self.USER_SEND_USERNAME_CHANGED_EMAIL = False self.USER_REQUIRE_INVITATION = False # Settings that depend on USER_ENABLE_USERNAME if not self.USER_ENABLE_USERNAME: self.USER_ENABLE_CHANGE_USERNAME = False
[ "def", "_check_settings", "(", "self", ",", "app", ")", ":", "if", "self", ".", "USER_ENABLE_INVITE_USER", "and", "not", "self", ".", "UserInvitationClass", ":", "raise", "ConfigError", "(", "'UserInvitationClass is missing while USER_ENABLE_INVITE_USER is True.'", "' Specify UserInvitationClass with UserManager(app, db, User, UserInvitationClass=...'", "' or set USER_ENABLE_INVITE_USER=False.'", ")", "setting", "=", "app", ".", "config", ".", "get", "(", "'USER_ENABLE_LOGIN_WITHOUT_CONFIRM_EMAIL'", ",", "None", ")", "if", "setting", "is", "not", "None", ":", "print", "(", "'Deprecation warning: USER_ENABLE_LOGIN_WITHOUT_CONFIRM_EMAIL'", "' will be deprecated.'", "' It has been replaced by USER_ALLOW_LOGIN_WITHOUT_CONFIRMED_EMAIL.'", "' Please change this as soon as possible.'", ")", "self", ".", "USER_ALLOW_LOGIN_WITHOUT_CONFIRMED_EMAIL", "=", "setting", "setting", "=", "app", ".", "config", ".", "get", "(", "'USER_ENABLE_RETYPE_PASSWORD'", ",", "None", ")", "if", "setting", "is", "not", "None", ":", "print", "(", "'Deprecation warning: USER_ENABLE_RETYPE_PASSWORD'", "' will be deprecated.'", "' It has been replaced with USER_REQUIRE_RETYPE_PASSWORD.'", "' Please change this as soon as possible.'", ")", "self", ".", "USER_REQUIRE_RETYPE_PASSWORD", "=", "setting", "setting", "=", "app", ".", "config", ".", "get", "(", "'USER_SHOW_USERNAME_EMAIL_DOES_NOT_EXIST'", ",", "None", ")", "if", "setting", "is", "not", "None", ":", "print", "(", "'Deprecation warning: USER_SHOW_USERNAME_EMAIL_DOES_NOT_EXIST'", "' will be deprecated.'", "' It has been replaced with USER_SHOW_USERNAME_DOES_NOT_EXIST'", "' and USER_SHOW_EMAIL_DOES_NOT_EXIST.'", "' Please change this as soon as possible.'", ")", "self", ".", "USER_SHOW_USERNAME_DOES_NOT_EXIST", "=", "setting", "self", ".", "USER_SHOW_EMAIL_DOES_NOT_EXIST", "=", "setting", "setting", "=", "app", ".", "config", ".", "get", "(", "'USER_PASSWORD_HASH'", ",", "None", ")", "if", "setting", "is", "not", "None", ":", "print", "(", "\"Deprecation warning: USER_PASSWORD_HASH=<string>\"", "\" will be deprecated.\"", "\" It has been replaced with USER_PASSLIB_CRYPTCONTEXT_SCHEMES=<list>.\"", "\" Please change USER_PASSWORD_HASH='something' to\"", "\" USER_PASSLIB_CRYPTCONTEXT_SCHEMES=['something'] as soon as possible.\"", ")", "self", ".", "USER_PASSLIB_CRYPTCONTEXT_SCHEMES", "=", "[", "setting", "]", "if", "not", "self", ".", "USER_EMAIL_SENDER_EMAIL", "and", "self", ".", "USER_ENABLE_EMAIL", ":", "raise", "ConfigError", "(", "'USER_EMAIL_SENDER_EMAIL is missing while USER_ENABLE_EMAIL is True.'", "' specify USER_EMAIL_SENDER_EMAIL (and USER_EMAIL_SENDER_NAME) or set USER_ENABLE_EMAIL to False.'", ")", "if", "not", "self", ".", "USER_ENABLE_USERNAME", "and", "not", "self", ".", "USER_ENABLE_EMAIL", ":", "self", ".", "USER_ENABLE_REGISTER", "=", "False", "if", "not", "self", ".", "USER_ENABLE_EMAIL", ":", "self", ".", "USER_ENABLE_CONFIRM_EMAIL", "=", "False", "self", ".", "USER_ENABLE_MULTIPLE_EMAILS", "=", "False", "self", ".", "USER_ENABLE_FORGOT_PASSWORD", "=", "False", "self", ".", "USER_SEND_PASSWORD_CHANGED_EMAIL", "=", "False", "self", ".", "USER_SEND_REGISTERED_EMAIL", "=", "False", "self", ".", "USER_SEND_USERNAME_CHANGED_EMAIL", "=", "False", "self", ".", "USER_REQUIRE_INVITATION", "=", "False", "if", "not", "self", ".", "USER_ENABLE_USERNAME", ":", "self", ".", "USER_ENABLE_CHANGE_USERNAME", "=", "False" ]
python
Verify required settings. Produce a helpful error messages for incorrect settings.
false
1,624,091
def set_splash_message(self, text): """Sets the text in the bottom of the Splash screen.""" self.splash_text = text self._show_message(text) self.timer_ellipsis.start(500)
[ "def", "set_splash_message", "(", "self", ",", "text", ")", ":", "self", ".", "splash_text", "=", "text", "self", ".", "_show_message", "(", "text", ")", "self", ".", "timer_ellipsis", ".", "start", "(", "500", ")" ]
python
Sets the text in the bottom of the Splash screen.
false
2,480,954
def __init__(self, function=dict): """Initialize lazy dictionary with given function. :param function: it must return a dictionary like structure """ super(LazyDict, self).__init__() self._cached_dict = None self._function = function
[ "def", "__init__", "(", "self", ",", "function", "=", "dict", ")", ":", "super", "(", "LazyDict", ",", "self", ")", ".", "__init__", "(", ")", "self", ".", "_cached_dict", "=", "None", "self", ".", "_function", "=", "function" ]
python
Initialize lazy dictionary with given function. :param function: it must return a dictionary like structure
false
2,109,023
def _lderiv(self,l,n): """ NAME: _lderiv PURPOSE: evaluate the derivative w.r.t. lambda for this potential INPUT: l - prolate spheroidal coordinate lambda n - prolate spheroidal coordinate nu OUTPUT: derivative w.r.t. lambda HISTORY: 2015-02-15 - Written - Trick (MPIA) """ return 0.5/nu.sqrt(l)/(nu.sqrt(l)+nu.sqrt(n))**2
[ "def", "_lderiv", "(", "self", ",", "l", ",", "n", ")", ":", "return", "0.5", "/", "nu", ".", "sqrt", "(", "l", ")", "/", "(", "nu", ".", "sqrt", "(", "l", ")", "+", "nu", ".", "sqrt", "(", "n", ")", ")", "**", "2" ]
python
NAME: _lderiv PURPOSE: evaluate the derivative w.r.t. lambda for this potential INPUT: l - prolate spheroidal coordinate lambda n - prolate spheroidal coordinate nu OUTPUT: derivative w.r.t. lambda HISTORY: 2015-02-15 - Written - Trick (MPIA)
false
2,570,755
def parse_metadata(xml): """Given an XML document (string) returned from metadata_query(), parse the response into a list of track info dicts. May raise an APIError if the lookup fails. """ try: root = etree.fromstring(xml) except (ExpatError, etree.ParseError): # The Last.fm API occasionally generates malformed XML when its # includes an illegal character (UTF8-legal but prohibited by # the XML standard). raise CommunicationError('malformed XML response') status = root.attrib['status'] if status == 'failed': error = root.find('error') raise APIError(int(error.attrib['code']), error.text) out = [] for track in root.find('tracks').findall('track'): out.append({ 'rank': float(track.attrib['rank']), 'artist': track.find('artist').find('name').text, 'artist_mbid': track.find('artist').find('mbid').text, 'title': track.find('name').text, 'track_mbid': track.find('mbid').text, }) return out
[ "def", "parse_metadata", "(", "xml", ")", ":", "try", ":", "root", "=", "etree", ".", "fromstring", "(", "xml", ")", "except", "(", "ExpatError", ",", "etree", ".", "ParseError", ")", ":", "raise", "CommunicationError", "(", "'malformed XML response'", ")", "status", "=", "root", ".", "attrib", "[", "'status'", "]", "if", "status", "==", "'failed'", ":", "error", "=", "root", ".", "find", "(", "'error'", ")", "raise", "APIError", "(", "int", "(", "error", ".", "attrib", "[", "'code'", "]", ")", ",", "error", ".", "text", ")", "out", "=", "[", "]", "for", "track", "in", "root", ".", "find", "(", "'tracks'", ")", ".", "findall", "(", "'track'", ")", ":", "out", ".", "append", "(", "{", "'rank'", ":", "float", "(", "track", ".", "attrib", "[", "'rank'", "]", ")", ",", "'artist'", ":", "track", ".", "find", "(", "'artist'", ")", ".", "find", "(", "'name'", ")", ".", "text", ",", "'artist_mbid'", ":", "track", ".", "find", "(", "'artist'", ")", ".", "find", "(", "'mbid'", ")", ".", "text", ",", "'title'", ":", "track", ".", "find", "(", "'name'", ")", ".", "text", ",", "'track_mbid'", ":", "track", ".", "find", "(", "'mbid'", ")", ".", "text", ",", "}", ")", "return", "out" ]
python
Given an XML document (string) returned from metadata_query(), parse the response into a list of track info dicts. May raise an APIError if the lookup fails.
false
2,477,872
def __init__(self, graph: "Graph"): """ Initialization. Arguments: graph (Graph): The graph the node list belongs to. """ self._graph: Graph = graph """The graph the node list belongs to.""" self._nodes: Dict[int, Node] = {} """Storage for the nodes of node list as a node index to node instance mapping.""" self._node_name_map: Dict[str, Node] = {} """Dictionary that maps node names to node instances."""
[ "def", "__init__", "(", "self", ",", "graph", ":", "\"Graph\"", ")", ":", "self", ".", "_graph", ":", "Graph", "=", "graph", "self", ".", "_nodes", ":", "Dict", "[", "int", ",", "Node", "]", "=", "{", "}", "self", ".", "_node_name_map", ":", "Dict", "[", "str", ",", "Node", "]", "=", "{", "}" ]
python
Initialization. Arguments: graph (Graph): The graph the node list belongs to.
false
2,705,739
def destinations(stop): """Get destination information.""" from pyruter.api import Departures async def get_destinations(): """Get departure information.""" async with aiohttp.ClientSession() as session: data = Departures(LOOP, stop, session=session) result = await data.get_final_destination() print(json.dumps(result, indent=4, sort_keys=True, ensure_ascii=False)) LOOP.run_until_complete(get_destinations())
[ "def", "destinations", "(", "stop", ")", ":", "from", "pyruter", ".", "api", "import", "Departures", "async", "def", "get_destinations", "(", ")", ":", "async", "with", "aiohttp", ".", "ClientSession", "(", ")", "as", "session", ":", "data", "=", "Departures", "(", "LOOP", ",", "stop", ",", "session", "=", "session", ")", "result", "=", "await", "data", ".", "get_final_destination", "(", ")", "print", "(", "json", ".", "dumps", "(", "result", ",", "indent", "=", "4", ",", "sort_keys", "=", "True", ",", "ensure_ascii", "=", "False", ")", ")", "LOOP", ".", "run_until_complete", "(", "get_destinations", "(", ")", ")" ]
python
Get destination information.
false
2,500,112
def _align(self, axes, key_shape=None): """ Align local bolt array so that axes for iteration are in the keys. This operation is applied before most functional operators. It ensures that the specified axes are valid, and might transpose/reshape the underlying array so that the functional operators can be applied over the correct records. Parameters ---------- axes: tuple[int] One or more axes that will be iterated over by a functional operator Returns ------- BoltArrayLocal """ # ensure that the key axes are valid for an ndarray of this shape inshape(self.shape, axes) # compute the set of dimensions/axes that will be used to reshape remaining = [dim for dim in range(len(self.shape)) if dim not in axes] key_shape = key_shape if key_shape else [self.shape[axis] for axis in axes] remaining_shape = [self.shape[axis] for axis in remaining] linearized_shape = [prod(key_shape)] + remaining_shape # compute the transpose permutation transpose_order = axes + remaining # transpose the array so that the keys being mapped over come first, then linearize keys reshaped = self.transpose(*transpose_order).reshape(*linearized_shape) return reshaped
[ "def", "_align", "(", "self", ",", "axes", ",", "key_shape", "=", "None", ")", ":", "inshape", "(", "self", ".", "shape", ",", "axes", ")", "remaining", "=", "[", "dim", "for", "dim", "in", "range", "(", "len", "(", "self", ".", "shape", ")", ")", "if", "dim", "not", "in", "axes", "]", "key_shape", "=", "key_shape", "if", "key_shape", "else", "[", "self", ".", "shape", "[", "axis", "]", "for", "axis", "in", "axes", "]", "remaining_shape", "=", "[", "self", ".", "shape", "[", "axis", "]", "for", "axis", "in", "remaining", "]", "linearized_shape", "=", "[", "prod", "(", "key_shape", ")", "]", "+", "remaining_shape", "transpose_order", "=", "axes", "+", "remaining", "reshaped", "=", "self", ".", "transpose", "(", "*", "transpose_order", ")", ".", "reshape", "(", "*", "linearized_shape", ")", "return", "reshaped" ]
python
Align local bolt array so that axes for iteration are in the keys. This operation is applied before most functional operators. It ensures that the specified axes are valid, and might transpose/reshape the underlying array so that the functional operators can be applied over the correct records. Parameters ---------- axes: tuple[int] One or more axes that will be iterated over by a functional operator Returns ------- BoltArrayLocal
false
1,881,450
def make_seg_table(workflow, seg_files, seg_names, out_dir, tags=None, title_text=None, description=None): """ Creates a node in the workflow for writing the segment summary table. Returns a File instances for the output file. """ seg_files = list(seg_files) seg_names = list(seg_names) if tags is None: tags = [] makedir(out_dir) node = PlotExecutable(workflow.cp, 'page_segtable', ifos=workflow.ifos, out_dir=out_dir, tags=tags).create_node() node.add_input_list_opt('--segment-files', seg_files) quoted_seg_names = [] for s in seg_names: quoted_seg_names.append("'" + s + "'") node.add_opt('--segment-names', ' '.join(quoted_seg_names)) if description: node.add_opt('--description', "'" + description + "'") if title_text: node.add_opt('--title-text', "'" + title_text + "'") node.new_output_file_opt(workflow.analysis_time, '.html', '--output-file') workflow += node return node.output_files[0]
[ "def", "make_seg_table", "(", "workflow", ",", "seg_files", ",", "seg_names", ",", "out_dir", ",", "tags", "=", "None", ",", "title_text", "=", "None", ",", "description", "=", "None", ")", ":", "seg_files", "=", "list", "(", "seg_files", ")", "seg_names", "=", "list", "(", "seg_names", ")", "if", "tags", "is", "None", ":", "tags", "=", "[", "]", "makedir", "(", "out_dir", ")", "node", "=", "PlotExecutable", "(", "workflow", ".", "cp", ",", "'page_segtable'", ",", "ifos", "=", "workflow", ".", "ifos", ",", "out_dir", "=", "out_dir", ",", "tags", "=", "tags", ")", ".", "create_node", "(", ")", "node", ".", "add_input_list_opt", "(", "'--segment-files'", ",", "seg_files", ")", "quoted_seg_names", "=", "[", "]", "for", "s", "in", "seg_names", ":", "quoted_seg_names", ".", "append", "(", "\"'\"", "+", "s", "+", "\"'\"", ")", "node", ".", "add_opt", "(", "'--segment-names'", ",", "' '", ".", "join", "(", "quoted_seg_names", ")", ")", "if", "description", ":", "node", ".", "add_opt", "(", "'--description'", ",", "\"'\"", "+", "description", "+", "\"'\"", ")", "if", "title_text", ":", "node", ".", "add_opt", "(", "'--title-text'", ",", "\"'\"", "+", "title_text", "+", "\"'\"", ")", "node", ".", "new_output_file_opt", "(", "workflow", ".", "analysis_time", ",", "'.html'", ",", "'--output-file'", ")", "workflow", "+=", "node", "return", "node", ".", "output_files", "[", "0", "]" ]
python
Creates a node in the workflow for writing the segment summary table. Returns a File instances for the output file.
false
2,468,555
def import_json(cls, filename): """ Import graph from the given file. The file is expected to contain UTF-8 encoded JSON data. """ with open(filename, 'rb') as f: json_graph = f.read().decode('utf-8') return cls.from_json(json_graph)
[ "def", "import_json", "(", "cls", ",", "filename", ")", ":", "with", "open", "(", "filename", ",", "'rb'", ")", "as", "f", ":", "json_graph", "=", "f", ".", "read", "(", ")", ".", "decode", "(", "'utf-8'", ")", "return", "cls", ".", "from_json", "(", "json_graph", ")" ]
python
Import graph from the given file. The file is expected to contain UTF-8 encoded JSON data.
false
2,285,893
def word(ctx, text, number, by_spaces=False): """ Extracts the nth word from the given text string """ return word_slice(ctx, text, number, conversions.to_integer(number, ctx) + 1, by_spaces)
[ "def", "word", "(", "ctx", ",", "text", ",", "number", ",", "by_spaces", "=", "False", ")", ":", "return", "word_slice", "(", "ctx", ",", "text", ",", "number", ",", "conversions", ".", "to_integer", "(", "number", ",", "ctx", ")", "+", "1", ",", "by_spaces", ")" ]
python
Extracts the nth word from the given text string
false
1,629,531
def competitions_submissions_list(self, id, **kwargs): # noqa: E501 """List competition submissions # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.competitions_submissions_list(id, async_req=True) >>> result = thread.get() :param async_req bool :param str id: Competition name (required) :param int page: Page number :return: Result If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.competitions_submissions_list_with_http_info(id, **kwargs) # noqa: E501 else: (data) = self.competitions_submissions_list_with_http_info(id, **kwargs) # noqa: E501 return data
[ "def", "competitions_submissions_list", "(", "self", ",", "id", ",", "**", "kwargs", ")", ":", "kwargs", "[", "'_return_http_data_only'", "]", "=", "True", "if", "kwargs", ".", "get", "(", "'async_req'", ")", ":", "return", "self", ".", "competitions_submissions_list_with_http_info", "(", "id", ",", "**", "kwargs", ")", "else", ":", "(", "data", ")", "=", "self", ".", "competitions_submissions_list_with_http_info", "(", "id", ",", "**", "kwargs", ")", "return", "data" ]
python
List competition submissions # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.competitions_submissions_list(id, async_req=True) >>> result = thread.get() :param async_req bool :param str id: Competition name (required) :param int page: Page number :return: Result If the method is called asynchronously, returns the request thread.
false
1,911,859
def get_hw_virt_ex_property(self, property_p): """Returns the value of the specified hardware virtualization boolean property. in property_p of type :class:`HWVirtExPropertyType` Property type to query. return value of type bool Property value. raises :class:`OleErrorInvalidarg` Invalid property. """ if not isinstance(property_p, HWVirtExPropertyType): raise TypeError("property_p can only be an instance of type HWVirtExPropertyType") value = self._call("getHWVirtExProperty", in_p=[property_p]) return value
[ "def", "get_hw_virt_ex_property", "(", "self", ",", "property_p", ")", ":", "if", "not", "isinstance", "(", "property_p", ",", "HWVirtExPropertyType", ")", ":", "raise", "TypeError", "(", "\"property_p can only be an instance of type HWVirtExPropertyType\"", ")", "value", "=", "self", ".", "_call", "(", "\"getHWVirtExProperty\"", ",", "in_p", "=", "[", "property_p", "]", ")", "return", "value" ]
python
Returns the value of the specified hardware virtualization boolean property. in property_p of type :class:`HWVirtExPropertyType` Property type to query. return value of type bool Property value. raises :class:`OleErrorInvalidarg` Invalid property.
false
2,658,214
def deramise(r): """ Déramise une chaîne :param string: Chaîne à transformer :type string: str :return: Chaîne nettoyée :rtype: str """ return r.replace('J', 'I') \ .replace('j', 'i') \ .replace('v', 'u') \ .replace("æ", "ae") \ .replace("Æ", "Ae") \ .replace("œ", "oe") \ .replace("Œ", "Oe") \ .replace("ụ", 'u') \ .replace('V', 'U')
[ "def", "deramise", "(", "r", ")", ":", "return", "r", ".", "replace", "(", "'J'", ",", "'I'", ")", ".", "replace", "(", "'j'", ",", "'i'", ")", ".", "replace", "(", "'v'", ",", "'u'", ")", ".", "replace", "(", "\"æ\"", ",", "\"ae\"", ")", ".", "replace", "(", "\"Æ\"", ",", "\"Ae\"", ")", ".", "replace", "(", "\"œ\"", ",", "\"oe\"", ")", ".", "replace", "(", "\"Œ\"", ",", "\"Oe\"", ")", ".", "replace", "(", "\"ụ\"", ",", "'u'", ")", ".", "replace", "(", "'V'", ",", "'U'", ")" ]
python
Déramise une chaîne :param string: Chaîne à transformer :type string: str :return: Chaîne nettoyée :rtype: str
false
1,722,716
def upload(ctx, release, rebuild): """ Uploads distribuition files to pypi or pypitest. """ dist_path = Path(DIST_PATH) if rebuild is False: if not dist_path.exists() or not list(dist_path.glob('*')): print("No distribution files found. Please run 'build' command first") return else: ctx.invoke(build, force=True) if release: args = ['twine', 'upload', 'dist/*'] else: repository = 'https://test.pypi.org/legacy/' args = ['twine', 'upload', '--repository-url', repository, 'dist/*'] env = os.environ.copy() p = subprocess.Popen(args, env=env) p.wait()
[ "def", "upload", "(", "ctx", ",", "release", ",", "rebuild", ")", ":", "dist_path", "=", "Path", "(", "DIST_PATH", ")", "if", "rebuild", "is", "False", ":", "if", "not", "dist_path", ".", "exists", "(", ")", "or", "not", "list", "(", "dist_path", ".", "glob", "(", "'*'", ")", ")", ":", "print", "(", "\"No distribution files found. Please run 'build' command first\"", ")", "return", "else", ":", "ctx", ".", "invoke", "(", "build", ",", "force", "=", "True", ")", "if", "release", ":", "args", "=", "[", "'twine'", ",", "'upload'", ",", "'dist/*'", "]", "else", ":", "repository", "=", "'https://test.pypi.org/legacy/'", "args", "=", "[", "'twine'", ",", "'upload'", ",", "'--repository-url'", ",", "repository", ",", "'dist/*'", "]", "env", "=", "os", ".", "environ", ".", "copy", "(", ")", "p", "=", "subprocess", ".", "Popen", "(", "args", ",", "env", "=", "env", ")", "p", ".", "wait", "(", ")" ]
python
Uploads distribuition files to pypi or pypitest.
false
1,927,136
def liftover_cpra(self, chromosome, position, verbose=False): """ Given chromosome, position in 1-based co-ordinates, This will use pyliftover to liftover a CPRA, will return a (c,p) tuple or raise NonUniqueLiftover if no unique and strand maintaining liftover is possible :param chromosome: string with the chromosome as it's represented in the from_genome :param position: position on chromosome (will be cast to int) :return: ((str) chromosome, (int) position) or None if no liftover """ chromosome = str(chromosome) position = int(position) # Perform the liftover lookup, shift the position by 1 as pyliftover deals in 0-based co-ords new = self.liftover.convert_coordinate(chromosome, position - 1) # This has to be here as new will be NoneType when the chromosome doesn't exist in the chainfile if new: # If the liftover is unique if len(new) == 1: # If the liftover hasn't changed strand if new[0][2] == "+": # Set the co-ordinates to the lifted-over ones and write out new_chromosome = str(new[0][0]) # Shift the position forward by one to convert back to a 1-based co-ords new_position = int(new[0][1]) + 1 return new_chromosome, new_position else: exception_string = "{},{} has a flipped strand in liftover: {}".format(chromosome, position, new) else: exception_string = "{},{} lifts over to multiple positions: {}".format(chromosome, position, new) elif new is None: exception_string = "Chromosome '{}' provided not in chain file".format(chromosome) if verbose: logging.error(exception_string) return None, None
[ "def", "liftover_cpra", "(", "self", ",", "chromosome", ",", "position", ",", "verbose", "=", "False", ")", ":", "chromosome", "=", "str", "(", "chromosome", ")", "position", "=", "int", "(", "position", ")", "new", "=", "self", ".", "liftover", ".", "convert_coordinate", "(", "chromosome", ",", "position", "-", "1", ")", "if", "new", ":", "if", "len", "(", "new", ")", "==", "1", ":", "if", "new", "[", "0", "]", "[", "2", "]", "==", "\"+\"", ":", "new_chromosome", "=", "str", "(", "new", "[", "0", "]", "[", "0", "]", ")", "new_position", "=", "int", "(", "new", "[", "0", "]", "[", "1", "]", ")", "+", "1", "return", "new_chromosome", ",", "new_position", "else", ":", "exception_string", "=", "\"{},{} has a flipped strand in liftover: {}\"", ".", "format", "(", "chromosome", ",", "position", ",", "new", ")", "else", ":", "exception_string", "=", "\"{},{} lifts over to multiple positions: {}\"", ".", "format", "(", "chromosome", ",", "position", ",", "new", ")", "elif", "new", "is", "None", ":", "exception_string", "=", "\"Chromosome '{}' provided not in chain file\"", ".", "format", "(", "chromosome", ")", "if", "verbose", ":", "logging", ".", "error", "(", "exception_string", ")", "return", "None", ",", "None" ]
python
Given chromosome, position in 1-based co-ordinates, This will use pyliftover to liftover a CPRA, will return a (c,p) tuple or raise NonUniqueLiftover if no unique and strand maintaining liftover is possible :param chromosome: string with the chromosome as it's represented in the from_genome :param position: position on chromosome (will be cast to int) :return: ((str) chromosome, (int) position) or None if no liftover
false
1,673,524
def close(self): """Closes associated resources of this request object. This closes all file handles explicitly. You can also use the request object in a with statement which will automatically close it. .. versionadded:: 0.9 """ files = self.__dict__.get("files") for _key, value in iter_multi_items(files or ()): value.close()
[ "def", "close", "(", "self", ")", ":", "files", "=", "self", ".", "__dict__", ".", "get", "(", "\"files\"", ")", "for", "_key", ",", "value", "in", "iter_multi_items", "(", "files", "or", "(", ")", ")", ":", "value", ".", "close", "(", ")" ]
python
Closes associated resources of this request object. This closes all file handles explicitly. You can also use the request object in a with statement which will automatically close it. .. versionadded:: 0.9
false
1,782,552
def sample_discrete(distn,size=[],dtype=np.int32): 'samples from a one-dimensional finite pmf' distn = np.atleast_1d(distn) assert (distn >=0).all() and distn.ndim == 1 if (0 == distn).all(): return np.random.randint(distn.shape[0],size=size) cumvals = np.cumsum(distn) return np.sum(np.array(random(size))[...,na] * cumvals[-1] > cumvals, axis=-1,dtype=dtype)
[ "def", "sample_discrete", "(", "distn", ",", "size", "=", "[", "]", ",", "dtype", "=", "np", ".", "int32", ")", ":", "distn", "=", "np", ".", "atleast_1d", "(", "distn", ")", "assert", "(", "distn", ">=", "0", ")", ".", "all", "(", ")", "and", "distn", ".", "ndim", "==", "1", "if", "(", "0", "==", "distn", ")", ".", "all", "(", ")", ":", "return", "np", ".", "random", ".", "randint", "(", "distn", ".", "shape", "[", "0", "]", ",", "size", "=", "size", ")", "cumvals", "=", "np", ".", "cumsum", "(", "distn", ")", "return", "np", ".", "sum", "(", "np", ".", "array", "(", "random", "(", "size", ")", ")", "[", "...", ",", "na", "]", "*", "cumvals", "[", "-", "1", "]", ">", "cumvals", ",", "axis", "=", "-", "1", ",", "dtype", "=", "dtype", ")" ]
python
samples from a one-dimensional finite pmf
false
1,943,686
def cliques(self, xg): """ Return all equivalence set cliques, assuming each edge in the xref graph is treated as equivalent, and all edges in ontology are subClassOf Arguments --------- xg : Graph an xref graph Returns ------- list of sets """ g = nx.DiGraph() for (x,y) in self.merged_ontology.get_graph().edges(): g.add_edge(x,y) for (x,y) in xg.edges(): g.add_edge(x,y) g.add_edge(y,x) return list(strongly_connected_components(g))
[ "def", "cliques", "(", "self", ",", "xg", ")", ":", "g", "=", "nx", ".", "DiGraph", "(", ")", "for", "(", "x", ",", "y", ")", "in", "self", ".", "merged_ontology", ".", "get_graph", "(", ")", ".", "edges", "(", ")", ":", "g", ".", "add_edge", "(", "x", ",", "y", ")", "for", "(", "x", ",", "y", ")", "in", "xg", ".", "edges", "(", ")", ":", "g", ".", "add_edge", "(", "x", ",", "y", ")", "g", ".", "add_edge", "(", "y", ",", "x", ")", "return", "list", "(", "strongly_connected_components", "(", "g", ")", ")" ]
python
Return all equivalence set cliques, assuming each edge in the xref graph is treated as equivalent, and all edges in ontology are subClassOf Arguments --------- xg : Graph an xref graph Returns ------- list of sets
false
1,828,703
def __init__(self, **kwargs): """ Create a new TakeProfitDetails instance """ super(TakeProfitDetails, self).__init__() # # The price that the Take Profit Order will be triggered at. Only one # of the price and distance fields may be specified. # self.price = kwargs.get("price") # # The time in force for the created Take Profit Order. This may only be # GTC, GTD or GFD. # self.timeInForce = kwargs.get("timeInForce", "GTC") # # The date when the Take Profit Order will be cancelled on if # timeInForce is GTD. # self.gtdTime = kwargs.get("gtdTime") # # The Client Extensions to add to the Take Profit Order when created. # self.clientExtensions = kwargs.get("clientExtensions")
[ "def", "__init__", "(", "self", ",", "**", "kwargs", ")", ":", "super", "(", "TakeProfitDetails", ",", "self", ")", ".", "__init__", "(", ")", "self", ".", "price", "=", "kwargs", ".", "get", "(", "\"price\"", ")", "self", ".", "timeInForce", "=", "kwargs", ".", "get", "(", "\"timeInForce\"", ",", "\"GTC\"", ")", "self", ".", "gtdTime", "=", "kwargs", ".", "get", "(", "\"gtdTime\"", ")", "self", ".", "clientExtensions", "=", "kwargs", ".", "get", "(", "\"clientExtensions\"", ")" ]
python
Create a new TakeProfitDetails instance
false
2,308,531
def get_SZ_single(self, alpha=None, beta=None): """Get the S and Z matrices for a single orientation. """ if alpha == None: alpha = self.alpha if beta == None: beta = self.beta tm_outdated = self._tm_signature != (self.radius, self.radius_type, self.wavelength, self.m, self.axis_ratio, self.shape, self.ddelt, self.ndgs) if tm_outdated: self._init_tmatrix() scatter_outdated = self._scatter_signature != (self.thet0, self.thet, self.phi0, self.phi, alpha, beta, self.orient) outdated = tm_outdated or scatter_outdated if outdated: (self._S_single, self._Z_single) = pytmatrix.calcampl(self.nmax, self.wavelength, self.thet0, self.thet, self.phi0, self.phi, alpha, beta) self._set_scatter_signature() return (self._S_single, self._Z_single)
[ "def", "get_SZ_single", "(", "self", ",", "alpha", "=", "None", ",", "beta", "=", "None", ")", ":", "if", "alpha", "==", "None", ":", "alpha", "=", "self", ".", "alpha", "if", "beta", "==", "None", ":", "beta", "=", "self", ".", "beta", "tm_outdated", "=", "self", ".", "_tm_signature", "!=", "(", "self", ".", "radius", ",", "self", ".", "radius_type", ",", "self", ".", "wavelength", ",", "self", ".", "m", ",", "self", ".", "axis_ratio", ",", "self", ".", "shape", ",", "self", ".", "ddelt", ",", "self", ".", "ndgs", ")", "if", "tm_outdated", ":", "self", ".", "_init_tmatrix", "(", ")", "scatter_outdated", "=", "self", ".", "_scatter_signature", "!=", "(", "self", ".", "thet0", ",", "self", ".", "thet", ",", "self", ".", "phi0", ",", "self", ".", "phi", ",", "alpha", ",", "beta", ",", "self", ".", "orient", ")", "outdated", "=", "tm_outdated", "or", "scatter_outdated", "if", "outdated", ":", "(", "self", ".", "_S_single", ",", "self", ".", "_Z_single", ")", "=", "pytmatrix", ".", "calcampl", "(", "self", ".", "nmax", ",", "self", ".", "wavelength", ",", "self", ".", "thet0", ",", "self", ".", "thet", ",", "self", ".", "phi0", ",", "self", ".", "phi", ",", "alpha", ",", "beta", ")", "self", ".", "_set_scatter_signature", "(", ")", "return", "(", "self", ".", "_S_single", ",", "self", ".", "_Z_single", ")" ]
python
Get the S and Z matrices for a single orientation.
false
2,144,378
def open(self, tid, flags): """ File open. ``YTStor`` object associated with this file is initialised and written to ``self.fds``. Parameters ---------- tid : str Path to file. Original `path` argument is converted to tuple identifier by ``_pathdec`` decorator. flags : int File open mode. Read-only access is allowed. Returns ------- int New file descriptor """ pt = self.PathType.get(tid) if pt is not self.PathType.file and pt is not self.PathType.ctrl: raise FuseOSError(errno.EINVAL) if pt is not self.PathType.ctrl and (flags & os.O_WRONLY or flags & os.O_RDWR): raise FuseOSError(errno.EPERM) if not self.__exists(tid): raise FuseOSError(errno.ENOENT) try: yts = self.searches[tid[0]][tid[1]] except KeyError: return self.fds.push(None) # for control file no association is needed. try: obI = yts.obtainInfo() # network may fail except ConnectionError: raise FuseOSError(errno.ENETDOWN) if obI: fh = self.fds.push(yts) try: yts.registerHandler(fh) except ConnectionError: raise FuseOSError(errno.ENETDOWN) return fh else: raise FuseOSError(errno.EINVAL)
[ "def", "open", "(", "self", ",", "tid", ",", "flags", ")", ":", "pt", "=", "self", ".", "PathType", ".", "get", "(", "tid", ")", "if", "pt", "is", "not", "self", ".", "PathType", ".", "file", "and", "pt", "is", "not", "self", ".", "PathType", ".", "ctrl", ":", "raise", "FuseOSError", "(", "errno", ".", "EINVAL", ")", "if", "pt", "is", "not", "self", ".", "PathType", ".", "ctrl", "and", "(", "flags", "&", "os", ".", "O_WRONLY", "or", "flags", "&", "os", ".", "O_RDWR", ")", ":", "raise", "FuseOSError", "(", "errno", ".", "EPERM", ")", "if", "not", "self", ".", "__exists", "(", "tid", ")", ":", "raise", "FuseOSError", "(", "errno", ".", "ENOENT", ")", "try", ":", "yts", "=", "self", ".", "searches", "[", "tid", "[", "0", "]", "]", "[", "tid", "[", "1", "]", "]", "except", "KeyError", ":", "return", "self", ".", "fds", ".", "push", "(", "None", ")", "try", ":", "obI", "=", "yts", ".", "obtainInfo", "(", ")", "except", "ConnectionError", ":", "raise", "FuseOSError", "(", "errno", ".", "ENETDOWN", ")", "if", "obI", ":", "fh", "=", "self", ".", "fds", ".", "push", "(", "yts", ")", "try", ":", "yts", ".", "registerHandler", "(", "fh", ")", "except", "ConnectionError", ":", "raise", "FuseOSError", "(", "errno", ".", "ENETDOWN", ")", "return", "fh", "else", ":", "raise", "FuseOSError", "(", "errno", ".", "EINVAL", ")" ]
python
File open. ``YTStor`` object associated with this file is initialised and written to ``self.fds``. Parameters ---------- tid : str Path to file. Original `path` argument is converted to tuple identifier by ``_pathdec`` decorator. flags : int File open mode. Read-only access is allowed. Returns ------- int New file descriptor
false
1,834,983
def __load_jco(self): """private method to set the jco attribute from a file or a matrix object """ if self.jco_arg is None: return None #raise Exception("linear_analysis.__load_jco(): jco_arg is None") if isinstance(self.jco_arg, Matrix): self.__jco = self.jco_arg elif isinstance(self.jco_arg, str): self.__jco = self.__fromfile(self.jco_arg,astype=Jco) else: raise Exception("linear_analysis.__load_jco(): jco_arg must " + "be a matrix object or a file name: " + str(self.jco_arg))
[ "def", "__load_jco", "(", "self", ")", ":", "if", "self", ".", "jco_arg", "is", "None", ":", "return", "None", "if", "isinstance", "(", "self", ".", "jco_arg", ",", "Matrix", ")", ":", "self", ".", "__jco", "=", "self", ".", "jco_arg", "elif", "isinstance", "(", "self", ".", "jco_arg", ",", "str", ")", ":", "self", ".", "__jco", "=", "self", ".", "__fromfile", "(", "self", ".", "jco_arg", ",", "astype", "=", "Jco", ")", "else", ":", "raise", "Exception", "(", "\"linear_analysis.__load_jco(): jco_arg must \"", "+", "\"be a matrix object or a file name: \"", "+", "str", "(", "self", ".", "jco_arg", ")", ")" ]
python
private method to set the jco attribute from a file or a matrix object
false
1,720,962
def bgplvm_test_model(optimize=False, verbose=1, plot=False, output_dim=200, nan=False): """ model for testing purposes. Samples from a GP with rbf kernel and learns the samples with a new kernel. Normally not for optimization, just model cheking """ import GPy num_inputs = 13 num_inducing = 5 if plot: output_dim = 1 input_dim = 3 else: input_dim = 2 output_dim = output_dim # generate GPLVM-like data X = _np.random.rand(num_inputs, input_dim) lengthscales = _np.random.rand(input_dim) k = GPy.kern.RBF(input_dim, .5, lengthscales, ARD=True) K = k.K(X) Y = _np.random.multivariate_normal(_np.zeros(num_inputs), K, (output_dim,)).T # k = GPy.kern.RBF_inv(input_dim, .5, _np.ones(input_dim) * 2., ARD=True) + GPy.kern.bias(input_dim) + GPy.kern.white(input_dim) # k = GPy.kern.linear(input_dim)# + GPy.kern.bias(input_dim) + GPy.kern.white(input_dim, 0.00001) # k = GPy.kern.RBF(input_dim, ARD = False) + GPy.kern.white(input_dim, 0.00001) # k = GPy.kern.RBF(input_dim, .5, _np.ones(input_dim) * 2., ARD=True) + GPy.kern.RBF(input_dim, .3, _np.ones(input_dim) * .2, ARD=True) # k = GPy.kern.RBF(input_dim, .5, 2., ARD=0) + GPy.kern.RBF(input_dim, .3, .2, ARD=0) # k = GPy.kern.RBF(input_dim, .5, _np.ones(input_dim) * 2., ARD=True) + GPy.kern.linear(input_dim, _np.ones(input_dim) * .2, ARD=True) p = .3 m = GPy.models.BayesianGPLVM(Y, input_dim, kernel=k, num_inducing=num_inducing) if nan: m.inference_method = GPy.inference.latent_function_inference.var_dtc.VarDTCMissingData() m.Y[_np.random.binomial(1, p, size=(Y.shape)).astype(bool)] = _np.nan m.parameters_changed() #=========================================================================== # randomly obstruct data with percentage p #=========================================================================== # m2 = GPy.models.BayesianGPLVMWithMissingData(Y_obstruct, input_dim, kernel=k, num_inducing=num_inducing) # m.lengthscales = lengthscales if plot: import matplotlib.pyplot as pb m.plot() pb.title('PCA initialisation') # m2.plot() # pb.title('PCA initialisation') if optimize: m.optimize('scg', messages=verbose) # m2.optimize('scg', messages=verbose) if plot: m.plot() pb.title('After optimisation') # m2.plot() # pb.title('After optimisation') return m
[ "def", "bgplvm_test_model", "(", "optimize", "=", "False", ",", "verbose", "=", "1", ",", "plot", "=", "False", ",", "output_dim", "=", "200", ",", "nan", "=", "False", ")", ":", "import", "GPy", "num_inputs", "=", "13", "num_inducing", "=", "5", "if", "plot", ":", "output_dim", "=", "1", "input_dim", "=", "3", "else", ":", "input_dim", "=", "2", "output_dim", "=", "output_dim", "X", "=", "_np", ".", "random", ".", "rand", "(", "num_inputs", ",", "input_dim", ")", "lengthscales", "=", "_np", ".", "random", ".", "rand", "(", "input_dim", ")", "k", "=", "GPy", ".", "kern", ".", "RBF", "(", "input_dim", ",", ".5", ",", "lengthscales", ",", "ARD", "=", "True", ")", "K", "=", "k", ".", "K", "(", "X", ")", "Y", "=", "_np", ".", "random", ".", "multivariate_normal", "(", "_np", ".", "zeros", "(", "num_inputs", ")", ",", "K", ",", "(", "output_dim", ",", ")", ")", ".", "T", "p", "=", ".3", "m", "=", "GPy", ".", "models", ".", "BayesianGPLVM", "(", "Y", ",", "input_dim", ",", "kernel", "=", "k", ",", "num_inducing", "=", "num_inducing", ")", "if", "nan", ":", "m", ".", "inference_method", "=", "GPy", ".", "inference", ".", "latent_function_inference", ".", "var_dtc", ".", "VarDTCMissingData", "(", ")", "m", ".", "Y", "[", "_np", ".", "random", ".", "binomial", "(", "1", ",", "p", ",", "size", "=", "(", "Y", ".", "shape", ")", ")", ".", "astype", "(", "bool", ")", "]", "=", "_np", ".", "nan", "m", ".", "parameters_changed", "(", ")", "if", "plot", ":", "import", "matplotlib", ".", "pyplot", "as", "pb", "m", ".", "plot", "(", ")", "pb", ".", "title", "(", "'PCA initialisation'", ")", "if", "optimize", ":", "m", ".", "optimize", "(", "'scg'", ",", "messages", "=", "verbose", ")", "if", "plot", ":", "m", ".", "plot", "(", ")", "pb", ".", "title", "(", "'After optimisation'", ")", "return", "m" ]
python
model for testing purposes. Samples from a GP with rbf kernel and learns the samples with a new kernel. Normally not for optimization, just model cheking
false
1,642,983
def floating_ip_create(self, pool=None): ''' Allocate a floating IP .. versionadded:: 2016.3.0 ''' nt_ks = self.compute_conn floating_ip = nt_ks.floating_ips.create(pool) response = { 'ip': floating_ip.ip, 'fixed_ip': floating_ip.fixed_ip, 'id': floating_ip.id, 'instance_id': floating_ip.instance_id, 'pool': floating_ip.pool } return response
[ "def", "floating_ip_create", "(", "self", ",", "pool", "=", "None", ")", ":", "nt_ks", "=", "self", ".", "compute_conn", "floating_ip", "=", "nt_ks", ".", "floating_ips", ".", "create", "(", "pool", ")", "response", "=", "{", "'ip'", ":", "floating_ip", ".", "ip", ",", "'fixed_ip'", ":", "floating_ip", ".", "fixed_ip", ",", "'id'", ":", "floating_ip", ".", "id", ",", "'instance_id'", ":", "floating_ip", ".", "instance_id", ",", "'pool'", ":", "floating_ip", ".", "pool", "}", "return", "response" ]
python
Allocate a floating IP .. versionadded:: 2016.3.0
false
1,578,420
def create_table(instance, table_id, initial_split_keys=None, column_families=None): """ Creates the specified Cloud Bigtable table. Raises ``google.api_core.exceptions.AlreadyExists`` if the table exists. :type instance: Instance :param instance: The Cloud Bigtable instance that owns the table. :type table_id: str :param table_id: The ID of the table to create in Cloud Bigtable. :type initial_split_keys: list :param initial_split_keys: (Optional) A list of row keys in bytes to use to initially split the table. :type column_families: dict :param column_families: (Optional) A map of columns to create. The key is the column_id str, and the value is a :class:`google.cloud.bigtable.column_family.GarbageCollectionRule`. """ if column_families is None: column_families = {} if initial_split_keys is None: initial_split_keys = [] table = Table(table_id, instance) table.create(initial_split_keys, column_families)
[ "def", "create_table", "(", "instance", ",", "table_id", ",", "initial_split_keys", "=", "None", ",", "column_families", "=", "None", ")", ":", "if", "column_families", "is", "None", ":", "column_families", "=", "{", "}", "if", "initial_split_keys", "is", "None", ":", "initial_split_keys", "=", "[", "]", "table", "=", "Table", "(", "table_id", ",", "instance", ")", "table", ".", "create", "(", "initial_split_keys", ",", "column_families", ")" ]
python
Creates the specified Cloud Bigtable table. Raises ``google.api_core.exceptions.AlreadyExists`` if the table exists. :type instance: Instance :param instance: The Cloud Bigtable instance that owns the table. :type table_id: str :param table_id: The ID of the table to create in Cloud Bigtable. :type initial_split_keys: list :param initial_split_keys: (Optional) A list of row keys in bytes to use to initially split the table. :type column_families: dict :param column_families: (Optional) A map of columns to create. The key is the column_id str, and the value is a :class:`google.cloud.bigtable.column_family.GarbageCollectionRule`.
false
1,758,605
def __init__(self, guake): """Constructor of Keyboard, only receives the guake instance to be used in internal methods. """ self.guake = guake self.accel_group = None # see reload_accelerators # Setup global keys self.globalhotkeys = {} globalkeys = ['show-hide', 'show-focus'] for key in globalkeys: guake.settings.keybindingsGlobal.onChangedValue(key, self.reload_global) guake.settings.keybindingsGlobal.triggerOnChangedValue( guake.settings.keybindingsGlobal, key, None ) # Setup local keys keys = [ 'toggle-fullscreen', 'new-tab', 'new-tab-home', 'close-tab', 'rename-current-tab', 'previous-tab', 'next-tab', 'clipboard-copy', 'clipboard-paste', 'quit', 'zoom-in', 'zoom-out', 'increase-height', 'decrease-height', 'increase-transparency', 'decrease-transparency', 'toggle-transparency', "search-on-web", 'move-tab-left', 'move-tab-right', 'switch-tab1', 'switch-tab2', 'switch-tab3', 'switch-tab4', 'switch-tab5', 'switch-tab6', 'switch-tab7', 'switch-tab8', 'switch-tab9', 'switch-tab10', 'switch-tab-last', 'reset-terminal', 'split-tab-vertical', 'split-tab-horizontal', 'close-terminal', 'focus-terminal-up', 'focus-terminal-down', 'focus-terminal-right', 'focus-terminal-left', 'move-terminal-split-up', 'move-terminal-split-down', 'move-terminal-split-left', 'move-terminal-split-right', 'search-terminal' ] for key in keys: guake.settings.keybindingsLocal.onChangedValue(key, self.reload_accelerators) self.reload_accelerators()
[ "def", "__init__", "(", "self", ",", "guake", ")", ":", "self", ".", "guake", "=", "guake", "self", ".", "accel_group", "=", "None", "self", ".", "globalhotkeys", "=", "{", "}", "globalkeys", "=", "[", "'show-hide'", ",", "'show-focus'", "]", "for", "key", "in", "globalkeys", ":", "guake", ".", "settings", ".", "keybindingsGlobal", ".", "onChangedValue", "(", "key", ",", "self", ".", "reload_global", ")", "guake", ".", "settings", ".", "keybindingsGlobal", ".", "triggerOnChangedValue", "(", "guake", ".", "settings", ".", "keybindingsGlobal", ",", "key", ",", "None", ")", "keys", "=", "[", "'toggle-fullscreen'", ",", "'new-tab'", ",", "'new-tab-home'", ",", "'close-tab'", ",", "'rename-current-tab'", ",", "'previous-tab'", ",", "'next-tab'", ",", "'clipboard-copy'", ",", "'clipboard-paste'", ",", "'quit'", ",", "'zoom-in'", ",", "'zoom-out'", ",", "'increase-height'", ",", "'decrease-height'", ",", "'increase-transparency'", ",", "'decrease-transparency'", ",", "'toggle-transparency'", ",", "\"search-on-web\"", ",", "'move-tab-left'", ",", "'move-tab-right'", ",", "'switch-tab1'", ",", "'switch-tab2'", ",", "'switch-tab3'", ",", "'switch-tab4'", ",", "'switch-tab5'", ",", "'switch-tab6'", ",", "'switch-tab7'", ",", "'switch-tab8'", ",", "'switch-tab9'", ",", "'switch-tab10'", ",", "'switch-tab-last'", ",", "'reset-terminal'", ",", "'split-tab-vertical'", ",", "'split-tab-horizontal'", ",", "'close-terminal'", ",", "'focus-terminal-up'", ",", "'focus-terminal-down'", ",", "'focus-terminal-right'", ",", "'focus-terminal-left'", ",", "'move-terminal-split-up'", ",", "'move-terminal-split-down'", ",", "'move-terminal-split-left'", ",", "'move-terminal-split-right'", ",", "'search-terminal'", "]", "for", "key", "in", "keys", ":", "guake", ".", "settings", ".", "keybindingsLocal", ".", "onChangedValue", "(", "key", ",", "self", ".", "reload_accelerators", ")", "self", ".", "reload_accelerators", "(", ")" ]
python
Constructor of Keyboard, only receives the guake instance to be used in internal methods.
false
2,150,534
def expansion_max_H(self): """"Return the maximum distance between expansions for the largest allowable H/S ratio. :returns: Maximum expansion distance :rtype: float * meter Examples -------- exp_dist_max(20*u.L/u.s, 40*u.cm, 37000, 25*u.degC, 2*u.m) 0.375 meter """ return (((self.BAFFLE_K / (2 * pc.viscosity_kinematic(self.temp) * (self.vel_grad_avg ** 2))) * (self.Q * self.RATIO_MAX_HS / self.channel_W) ** 3) ** (1/4)).to(u.m)
[ "def", "expansion_max_H", "(", "self", ")", ":", "return", "(", "(", "(", "self", ".", "BAFFLE_K", "/", "(", "2", "*", "pc", ".", "viscosity_kinematic", "(", "self", ".", "temp", ")", "*", "(", "self", ".", "vel_grad_avg", "**", "2", ")", ")", ")", "*", "(", "self", ".", "Q", "*", "self", ".", "RATIO_MAX_HS", "/", "self", ".", "channel_W", ")", "**", "3", ")", "**", "(", "1", "/", "4", ")", ")", ".", "to", "(", "u", ".", "m", ")" ]
python
Return the maximum distance between expansions for the largest allowable H/S ratio. :returns: Maximum expansion distance :rtype: float * meter Examples -------- exp_dist_max(20*u.L/u.s, 40*u.cm, 37000, 25*u.degC, 2*u.m) 0.375 meter
false
2,460,267
def __init__(self, val, next_node=None): """ :param val: Value of node :param next_node: Next node """ self.val = val self.next_node = next_node
[ "def", "__init__", "(", "self", ",", "val", ",", "next_node", "=", "None", ")", ":", "self", ".", "val", "=", "val", "self", ".", "next_node", "=", "next_node" ]
python
:param val: Value of node :param next_node: Next node
false
2,450,607
def dispatch(self, command, app): """ Function runs the active command. Args ---- command (glim.command.Command): the command object. app (glim.app.App): the glim app object. Note: Exception handling should be done in Command class itself. If not, an unhandled exception may result in app crash! """ if self.is_glimcommand(command): command.run(app) else: command.run()
[ "def", "dispatch", "(", "self", ",", "command", ",", "app", ")", ":", "if", "self", ".", "is_glimcommand", "(", "command", ")", ":", "command", ".", "run", "(", "app", ")", "else", ":", "command", ".", "run", "(", ")" ]
python
Function runs the active command. Args ---- command (glim.command.Command): the command object. app (glim.app.App): the glim app object. Note: Exception handling should be done in Command class itself. If not, an unhandled exception may result in app crash!
false
2,247,141
def set_deplyment_vcenter_params(vcenter_resource_model, deploy_params): """ Sets the vcenter parameters if not already set at the deployment option :param deploy_params: vCenterVMFromTemplateResourceModel or vCenterVMFromImageResourceModel :type vcenter_resource_model: VMwarevCenterResourceModel """ # Override attributes deploy_params.vm_cluster = deploy_params.vm_cluster or vcenter_resource_model.vm_cluster deploy_params.vm_storage = deploy_params.vm_storage or vcenter_resource_model.vm_storage deploy_params.vm_resource_pool = deploy_params.vm_resource_pool or vcenter_resource_model.vm_resource_pool deploy_params.vm_location = deploy_params.vm_location or vcenter_resource_model.vm_location deploy_params.default_datacenter = vcenter_resource_model.default_datacenter if not deploy_params.vm_cluster: raise ValueError('VM Cluster is empty') if not deploy_params.vm_storage: raise ValueError('VM Storage is empty') if not deploy_params.vm_location: raise ValueError('VM Location is empty') if not deploy_params.default_datacenter: raise ValueError('Default Datacenter attribute on VMWare vCenter is empty') deploy_params.vm_location = VMLocation.combine([deploy_params.default_datacenter, deploy_params.vm_location])
[ "def", "set_deplyment_vcenter_params", "(", "vcenter_resource_model", ",", "deploy_params", ")", ":", "deploy_params", ".", "vm_cluster", "=", "deploy_params", ".", "vm_cluster", "or", "vcenter_resource_model", ".", "vm_cluster", "deploy_params", ".", "vm_storage", "=", "deploy_params", ".", "vm_storage", "or", "vcenter_resource_model", ".", "vm_storage", "deploy_params", ".", "vm_resource_pool", "=", "deploy_params", ".", "vm_resource_pool", "or", "vcenter_resource_model", ".", "vm_resource_pool", "deploy_params", ".", "vm_location", "=", "deploy_params", ".", "vm_location", "or", "vcenter_resource_model", ".", "vm_location", "deploy_params", ".", "default_datacenter", "=", "vcenter_resource_model", ".", "default_datacenter", "if", "not", "deploy_params", ".", "vm_cluster", ":", "raise", "ValueError", "(", "'VM Cluster is empty'", ")", "if", "not", "deploy_params", ".", "vm_storage", ":", "raise", "ValueError", "(", "'VM Storage is empty'", ")", "if", "not", "deploy_params", ".", "vm_location", ":", "raise", "ValueError", "(", "'VM Location is empty'", ")", "if", "not", "deploy_params", ".", "default_datacenter", ":", "raise", "ValueError", "(", "'Default Datacenter attribute on VMWare vCenter is empty'", ")", "deploy_params", ".", "vm_location", "=", "VMLocation", ".", "combine", "(", "[", "deploy_params", ".", "default_datacenter", ",", "deploy_params", ".", "vm_location", "]", ")" ]
python
Sets the vcenter parameters if not already set at the deployment option :param deploy_params: vCenterVMFromTemplateResourceModel or vCenterVMFromImageResourceModel :type vcenter_resource_model: VMwarevCenterResourceModel
false
1,722,448
def __init__(self, interface=None, bpf_filter=None, display_filter=None, only_summaries=False, decryption_key=None, encryption_type='wpa-pwk', output_file=None, decode_as=None, disable_protocol=None, tshark_path=None, override_prefs=None, capture_filter=None, monitor_mode=None, use_json=False, include_raw=False, eventloop=None, custom_parameters=None): """ Creates a new live capturer on a given interface. Does not start the actual capture itself. :param interface: Name of the interface to sniff on or a list of names (str). If not given, runs on all interfaces. :param bpf_filter: BPF filter to use on packets. :param display_filter: Display (wireshark) filter to use. :param only_summaries: Only produce packet summaries, much faster but includes very little information :param decryption_key: Optional key used to encrypt and decrypt captured traffic. :param encryption_type: Standard of encryption used in captured traffic (must be either 'WEP', 'WPA-PWD', or 'WPA-PWK'. Defaults to WPA-PWK). :param output_file: Additionally save live captured packets to this file. :param decode_as: A dictionary of {decode_criterion_string: decode_as_protocol} that are used to tell tshark to decode protocols in situations it wouldn't usually, for instance {'tcp.port==8888': 'http'} would make it attempt to decode any port 8888 traffic as HTTP. See tshark documentation for details. :param tshark_path: Path of the tshark binary :param override_prefs: A dictionary of tshark preferences to override, {PREFERENCE_NAME: PREFERENCE_VALUE, ...}. :param capture_filter: Capture (wireshark) filter to use. :param disable_protocol: Tells tshark to remove a dissector for a specifc protocol. :param use_json: Uses tshark in JSON mode (EXPERIMENTAL). It is a good deal faster than XML but also has less information. Available from Wireshark 2.2.0. :param custom_parameters: A dict of custom parameters to pass to tshark, i.e. {"--param": "value"} """ super(LiveCapture, self).__init__(display_filter=display_filter, only_summaries=only_summaries, decryption_key=decryption_key, encryption_type=encryption_type, output_file=output_file, decode_as=decode_as, disable_protocol=disable_protocol, tshark_path=tshark_path, override_prefs=override_prefs, capture_filter=capture_filter, use_json=use_json, include_raw=include_raw, eventloop=eventloop, custom_parameters=custom_parameters) self.bpf_filter = bpf_filter self.monitor_mode = monitor_mode if sys.platform == 'win32' and monitor_mode: raise WindowsError('Monitor mode is not supported by the Windows platform') if interface is None: self.interfaces = get_tshark_interfaces(tshark_path) elif isinstance(interface, basestring): self.interfaces = [interface] else: self.interfaces = interface
[ "def", "__init__", "(", "self", ",", "interface", "=", "None", ",", "bpf_filter", "=", "None", ",", "display_filter", "=", "None", ",", "only_summaries", "=", "False", ",", "decryption_key", "=", "None", ",", "encryption_type", "=", "'wpa-pwk'", ",", "output_file", "=", "None", ",", "decode_as", "=", "None", ",", "disable_protocol", "=", "None", ",", "tshark_path", "=", "None", ",", "override_prefs", "=", "None", ",", "capture_filter", "=", "None", ",", "monitor_mode", "=", "None", ",", "use_json", "=", "False", ",", "include_raw", "=", "False", ",", "eventloop", "=", "None", ",", "custom_parameters", "=", "None", ")", ":", "super", "(", "LiveCapture", ",", "self", ")", ".", "__init__", "(", "display_filter", "=", "display_filter", ",", "only_summaries", "=", "only_summaries", ",", "decryption_key", "=", "decryption_key", ",", "encryption_type", "=", "encryption_type", ",", "output_file", "=", "output_file", ",", "decode_as", "=", "decode_as", ",", "disable_protocol", "=", "disable_protocol", ",", "tshark_path", "=", "tshark_path", ",", "override_prefs", "=", "override_prefs", ",", "capture_filter", "=", "capture_filter", ",", "use_json", "=", "use_json", ",", "include_raw", "=", "include_raw", ",", "eventloop", "=", "eventloop", ",", "custom_parameters", "=", "custom_parameters", ")", "self", ".", "bpf_filter", "=", "bpf_filter", "self", ".", "monitor_mode", "=", "monitor_mode", "if", "sys", ".", "platform", "==", "'win32'", "and", "monitor_mode", ":", "raise", "WindowsError", "(", "'Monitor mode is not supported by the Windows platform'", ")", "if", "interface", "is", "None", ":", "self", ".", "interfaces", "=", "get_tshark_interfaces", "(", "tshark_path", ")", "elif", "isinstance", "(", "interface", ",", "basestring", ")", ":", "self", ".", "interfaces", "=", "[", "interface", "]", "else", ":", "self", ".", "interfaces", "=", "interface" ]
python
Creates a new live capturer on a given interface. Does not start the actual capture itself. :param interface: Name of the interface to sniff on or a list of names (str). If not given, runs on all interfaces. :param bpf_filter: BPF filter to use on packets. :param display_filter: Display (wireshark) filter to use. :param only_summaries: Only produce packet summaries, much faster but includes very little information :param decryption_key: Optional key used to encrypt and decrypt captured traffic. :param encryption_type: Standard of encryption used in captured traffic (must be either 'WEP', 'WPA-PWD', or 'WPA-PWK'. Defaults to WPA-PWK). :param output_file: Additionally save live captured packets to this file. :param decode_as: A dictionary of {decode_criterion_string: decode_as_protocol} that are used to tell tshark to decode protocols in situations it wouldn't usually, for instance {'tcp.port==8888': 'http'} would make it attempt to decode any port 8888 traffic as HTTP. See tshark documentation for details. :param tshark_path: Path of the tshark binary :param override_prefs: A dictionary of tshark preferences to override, {PREFERENCE_NAME: PREFERENCE_VALUE, ...}. :param capture_filter: Capture (wireshark) filter to use. :param disable_protocol: Tells tshark to remove a dissector for a specifc protocol. :param use_json: Uses tshark in JSON mode (EXPERIMENTAL). It is a good deal faster than XML but also has less information. Available from Wireshark 2.2.0. :param custom_parameters: A dict of custom parameters to pass to tshark, i.e. {"--param": "value"}
false
2,681,635
def __init__(self, executable, args, env): """ :param str executable: command name to execute *Vim* :param args: arguments to execute *Vim* :type args: None or string or list of string :param env: environment variables to execute *Vim* :type env: None or dict of (string, string) """ self._executable = distutils.spawn.find_executable(executable) self._args = args self._env = env self._open_process()
[ "def", "__init__", "(", "self", ",", "executable", ",", "args", ",", "env", ")", ":", "self", ".", "_executable", "=", "distutils", ".", "spawn", ".", "find_executable", "(", "executable", ")", "self", ".", "_args", "=", "args", "self", ".", "_env", "=", "env", "self", ".", "_open_process", "(", ")" ]
python
:param str executable: command name to execute *Vim* :param args: arguments to execute *Vim* :type args: None or string or list of string :param env: environment variables to execute *Vim* :type env: None or dict of (string, string)
false
2,509,412
def observe(self, event, fn): """ Arguments: event (str): event to subscribe. fn (function|coroutinefunction): function to trigger. Raises: TypeError: if fn argument is not valid """ iscoroutine = asyncio.iscoroutinefunction(fn) if not iscoroutine and not isfunction(fn): raise TypeError('paco: fn param must be a callable ' 'object or coroutine function') observers = self._pool.get(event) if not observers: observers = self._pool[event] = [] # Register the observer observers.append(fn if iscoroutine else coroutine_wrapper(fn))
[ "def", "observe", "(", "self", ",", "event", ",", "fn", ")", ":", "iscoroutine", "=", "asyncio", ".", "iscoroutinefunction", "(", "fn", ")", "if", "not", "iscoroutine", "and", "not", "isfunction", "(", "fn", ")", ":", "raise", "TypeError", "(", "'paco: fn param must be a callable '", "'object or coroutine function'", ")", "observers", "=", "self", ".", "_pool", ".", "get", "(", "event", ")", "if", "not", "observers", ":", "observers", "=", "self", ".", "_pool", "[", "event", "]", "=", "[", "]", "observers", ".", "append", "(", "fn", "if", "iscoroutine", "else", "coroutine_wrapper", "(", "fn", ")", ")" ]
python
Arguments: event (str): event to subscribe. fn (function|coroutinefunction): function to trigger. Raises: TypeError: if fn argument is not valid
false
1,595,900
def __init__(self, requirement_string): """DO NOT CALL THIS UNDOCUMENTED METHOD; use Requirement.parse()!""" try: super(Requirement, self).__init__(requirement_string) except packaging.requirements.InvalidRequirement as e: raise RequirementParseError(str(e)) self.unsafe_name = self.name project_name = safe_name(self.name) self.project_name, self.key = project_name, project_name.lower() self.specs = [ (spec.operator, spec.version) for spec in self.specifier] self.extras = tuple(map(safe_extra, self.extras)) self.hashCmp = ( self.key, self.specifier, frozenset(self.extras), str(self.marker) if self.marker else None, ) self.__hash = hash(self.hashCmp)
[ "def", "__init__", "(", "self", ",", "requirement_string", ")", ":", "try", ":", "super", "(", "Requirement", ",", "self", ")", ".", "__init__", "(", "requirement_string", ")", "except", "packaging", ".", "requirements", ".", "InvalidRequirement", "as", "e", ":", "raise", "RequirementParseError", "(", "str", "(", "e", ")", ")", "self", ".", "unsafe_name", "=", "self", ".", "name", "project_name", "=", "safe_name", "(", "self", ".", "name", ")", "self", ".", "project_name", ",", "self", ".", "key", "=", "project_name", ",", "project_name", ".", "lower", "(", ")", "self", ".", "specs", "=", "[", "(", "spec", ".", "operator", ",", "spec", ".", "version", ")", "for", "spec", "in", "self", ".", "specifier", "]", "self", ".", "extras", "=", "tuple", "(", "map", "(", "safe_extra", ",", "self", ".", "extras", ")", ")", "self", ".", "hashCmp", "=", "(", "self", ".", "key", ",", "self", ".", "specifier", ",", "frozenset", "(", "self", ".", "extras", ")", ",", "str", "(", "self", ".", "marker", ")", "if", "self", ".", "marker", "else", "None", ",", ")", "self", ".", "__hash", "=", "hash", "(", "self", ".", "hashCmp", ")" ]
python
DO NOT CALL THIS UNDOCUMENTED METHOD; use Requirement.parse()!
false
2,614,974
def toBytes(self, value): ''' toBytes - Convert a value to bytes using the encoding specified on this field @param value <str> - The field to convert to bytes @return <bytes> - The object encoded using the codec specified on this field. NOTE: This method may go away. ''' if type(value) == bytes: return value return value.encode(self.getEncoding())
[ "def", "toBytes", "(", "self", ",", "value", ")", ":", "if", "type", "(", "value", ")", "==", "bytes", ":", "return", "value", "return", "value", ".", "encode", "(", "self", ".", "getEncoding", "(", ")", ")" ]
python
toBytes - Convert a value to bytes using the encoding specified on this field @param value <str> - The field to convert to bytes @return <bytes> - The object encoded using the codec specified on this field. NOTE: This method may go away.
false
2,519,141
def run(self): """Run check for duplicates""" # Create dictonary of signatures database_signatures = {} for entry in self._model.reactions: signature = reaction_signature( entry.equation, direction=self._args.compare_direction, stoichiometry=self._args.compare_stoichiometry) database_signatures.setdefault(signature, set()).add( (entry.id, entry.equation, entry.filemark)) for reaction_set in itervalues(database_signatures): if len(reaction_set) > 1: print('Found {} duplicate reactions:'.format( len(reaction_set))) for reaction, equation, filemark in reaction_set: result = ' - {}: {}'.format(reaction, equation) if filemark is not None: result += ' (found in {})'.format(filemark) print(result)
[ "def", "run", "(", "self", ")", ":", "database_signatures", "=", "{", "}", "for", "entry", "in", "self", ".", "_model", ".", "reactions", ":", "signature", "=", "reaction_signature", "(", "entry", ".", "equation", ",", "direction", "=", "self", ".", "_args", ".", "compare_direction", ",", "stoichiometry", "=", "self", ".", "_args", ".", "compare_stoichiometry", ")", "database_signatures", ".", "setdefault", "(", "signature", ",", "set", "(", ")", ")", ".", "add", "(", "(", "entry", ".", "id", ",", "entry", ".", "equation", ",", "entry", ".", "filemark", ")", ")", "for", "reaction_set", "in", "itervalues", "(", "database_signatures", ")", ":", "if", "len", "(", "reaction_set", ")", ">", "1", ":", "print", "(", "'Found {} duplicate reactions:'", ".", "format", "(", "len", "(", "reaction_set", ")", ")", ")", "for", "reaction", ",", "equation", ",", "filemark", "in", "reaction_set", ":", "result", "=", "' - {}: {}'", ".", "format", "(", "reaction", ",", "equation", ")", "if", "filemark", "is", "not", "None", ":", "result", "+=", "' (found in {})'", ".", "format", "(", "filemark", ")", "print", "(", "result", ")" ]
python
Run check for duplicates
false
2,070,934
def build_db_jobs(infiles, blastcmds): """Returns dictionary of db-building commands, keyed by dbname.""" dbjobdict = {} # Dict of database construction jobs, keyed by filename # Create dictionary of database building jobs, keyed by db name # defining jobnum for later use as last job index used for idx, fname in enumerate(infiles): dbjobdict[blastcmds.get_db_name(fname)] = pyani_jobs.Job( "%s_db_%06d" % (blastcmds.prefix, idx), blastcmds.build_db_cmd(fname) ) return dbjobdict
[ "def", "build_db_jobs", "(", "infiles", ",", "blastcmds", ")", ":", "dbjobdict", "=", "{", "}", "for", "idx", ",", "fname", "in", "enumerate", "(", "infiles", ")", ":", "dbjobdict", "[", "blastcmds", ".", "get_db_name", "(", "fname", ")", "]", "=", "pyani_jobs", ".", "Job", "(", "\"%s_db_%06d\"", "%", "(", "blastcmds", ".", "prefix", ",", "idx", ")", ",", "blastcmds", ".", "build_db_cmd", "(", "fname", ")", ")", "return", "dbjobdict" ]
python
Returns dictionary of db-building commands, keyed by dbname.
false
1,599,954
def set_max_position_size(self, asset=None, max_shares=None, max_notional=None, on_error='fail'): """Set a limit on the number of shares and/or dollar value held for the given sid. Limits are treated as absolute values and are enforced at the time that the algo attempts to place an order for sid. This means that it's possible to end up with more than the max number of shares due to splits/dividends, and more than the max notional due to price improvement. If an algorithm attempts to place an order that would result in increasing the absolute value of shares/dollar value exceeding one of these limits, raise a TradingControlException. Parameters ---------- asset : Asset, optional If provided, this sets the guard only on positions in the given asset. max_shares : int, optional The maximum number of shares to hold for an asset. max_notional : float, optional The maximum value to hold for an asset. """ control = MaxPositionSize(asset=asset, max_shares=max_shares, max_notional=max_notional, on_error=on_error) self.register_trading_control(control)
[ "def", "set_max_position_size", "(", "self", ",", "asset", "=", "None", ",", "max_shares", "=", "None", ",", "max_notional", "=", "None", ",", "on_error", "=", "'fail'", ")", ":", "control", "=", "MaxPositionSize", "(", "asset", "=", "asset", ",", "max_shares", "=", "max_shares", ",", "max_notional", "=", "max_notional", ",", "on_error", "=", "on_error", ")", "self", ".", "register_trading_control", "(", "control", ")" ]
python
Set a limit on the number of shares and/or dollar value held for the given sid. Limits are treated as absolute values and are enforced at the time that the algo attempts to place an order for sid. This means that it's possible to end up with more than the max number of shares due to splits/dividends, and more than the max notional due to price improvement. If an algorithm attempts to place an order that would result in increasing the absolute value of shares/dollar value exceeding one of these limits, raise a TradingControlException. Parameters ---------- asset : Asset, optional If provided, this sets the guard only on positions in the given asset. max_shares : int, optional The maximum number of shares to hold for an asset. max_notional : float, optional The maximum value to hold for an asset.
false
2,288,947
def get_yaml_items(self, dir_path, param=None): ''' Loops through the dir_path and parses all YAML files inside the directory. If no param is defined, then all YAML items will be returned in a list. If a param is defined, then all items will be scanned for this param and a list of all those values will be returned. ''' result = [] if not os.path.isdir(dir_path): return [] for filename in os.listdir(dir_path): path = os.path.join(dir_path, filename) items = self.read_yaml(path) for item in items: if param: if param in item: item = item[param] if isinstance(item, list): result.extend(item) else: result.append(item) else: result.append(item) return result
[ "def", "get_yaml_items", "(", "self", ",", "dir_path", ",", "param", "=", "None", ")", ":", "result", "=", "[", "]", "if", "not", "os", ".", "path", ".", "isdir", "(", "dir_path", ")", ":", "return", "[", "]", "for", "filename", "in", "os", ".", "listdir", "(", "dir_path", ")", ":", "path", "=", "os", ".", "path", ".", "join", "(", "dir_path", ",", "filename", ")", "items", "=", "self", ".", "read_yaml", "(", "path", ")", "for", "item", "in", "items", ":", "if", "param", ":", "if", "param", "in", "item", ":", "item", "=", "item", "[", "param", "]", "if", "isinstance", "(", "item", ",", "list", ")", ":", "result", ".", "extend", "(", "item", ")", "else", ":", "result", ".", "append", "(", "item", ")", "else", ":", "result", ".", "append", "(", "item", ")", "return", "result" ]
python
Loops through the dir_path and parses all YAML files inside the directory. If no param is defined, then all YAML items will be returned in a list. If a param is defined, then all items will be scanned for this param and a list of all those values will be returned.
false
2,551,194
def __eq__(self, other): """ Returns true if both objects are equal """ if not isinstance(other, BpmTaskRestPage): return False return self.__dict__ == other.__dict__
[ "def", "__eq__", "(", "self", ",", "other", ")", ":", "if", "not", "isinstance", "(", "other", ",", "BpmTaskRestPage", ")", ":", "return", "False", "return", "self", ".", "__dict__", "==", "other", ".", "__dict__" ]
python
Returns true if both objects are equal
false
2,248,678
def register_listener(self, trigger: str, func: callable, **kwargs) -> None: """ Register a listener for the given trigger. Raises an TypeError when the trigger is not a valid one. To get a list with all valid triggers, use LISTEN_ON_OPTIONS. :param trigger: the trigger on which the given callback should be used. Currently supported: 'universe availability change', 'universe' :param func: the callback. The parameters depend on the trigger. See README for more information """ if trigger in LISTEN_ON_OPTIONS: if trigger == LISTEN_ON_OPTIONS[1]: # if the trigger is universe, use the universe from args as key try: self._callbacks[kwargs[LISTEN_ON_OPTIONS[1]]].append(func) except: self._callbacks[kwargs[LISTEN_ON_OPTIONS[1]]] = [func] try: self._callbacks[trigger].append(func) except: self._callbacks[trigger] = [func] else: raise TypeError(f'The given trigger "{trigger}" is not a valid one!')
[ "def", "register_listener", "(", "self", ",", "trigger", ":", "str", ",", "func", ":", "callable", ",", "**", "kwargs", ")", "->", "None", ":", "if", "trigger", "in", "LISTEN_ON_OPTIONS", ":", "if", "trigger", "==", "LISTEN_ON_OPTIONS", "[", "1", "]", ":", "try", ":", "self", ".", "_callbacks", "[", "kwargs", "[", "LISTEN_ON_OPTIONS", "[", "1", "]", "]", "]", ".", "append", "(", "func", ")", "except", ":", "self", ".", "_callbacks", "[", "kwargs", "[", "LISTEN_ON_OPTIONS", "[", "1", "]", "]", "]", "=", "[", "func", "]", "try", ":", "self", ".", "_callbacks", "[", "trigger", "]", ".", "append", "(", "func", ")", "except", ":", "self", ".", "_callbacks", "[", "trigger", "]", "=", "[", "func", "]", "else", ":", "raise", "TypeError", "(", "f'The given trigger \"{trigger}\" is not a valid one!'", ")" ]
python
Register a listener for the given trigger. Raises an TypeError when the trigger is not a valid one. To get a list with all valid triggers, use LISTEN_ON_OPTIONS. :param trigger: the trigger on which the given callback should be used. Currently supported: 'universe availability change', 'universe' :param func: the callback. The parameters depend on the trigger. See README for more information
false
2,514,644
def apply_dmaglim(self,dmaglim=None): """ Applies a constraint that sets the maximum brightness for non-target star :func:`stars.StarPopulation.set_dmaglim` not yet implemented. """ raise NotImplementedError if 'bright blend limit' not in self.constraints: self.constraints.append('bright blend limit') for pop in self.poplist: if not hasattr(pop,'dmaglim') or pop.is_specific: continue if dmaglim is None: dmag = pop.dmaglim else: dmag = dmaglim pop.set_dmaglim(dmag) self.dmaglim = dmaglim
[ "def", "apply_dmaglim", "(", "self", ",", "dmaglim", "=", "None", ")", ":", "raise", "NotImplementedError", "if", "'bright blend limit'", "not", "in", "self", ".", "constraints", ":", "self", ".", "constraints", ".", "append", "(", "'bright blend limit'", ")", "for", "pop", "in", "self", ".", "poplist", ":", "if", "not", "hasattr", "(", "pop", ",", "'dmaglim'", ")", "or", "pop", ".", "is_specific", ":", "continue", "if", "dmaglim", "is", "None", ":", "dmag", "=", "pop", ".", "dmaglim", "else", ":", "dmag", "=", "dmaglim", "pop", ".", "set_dmaglim", "(", "dmag", ")", "self", ".", "dmaglim", "=", "dmaglim" ]
python
Applies a constraint that sets the maximum brightness for non-target star :func:`stars.StarPopulation.set_dmaglim` not yet implemented.
false
2,510,798
def MD5Hash(password): """ Returns md5 hash of a string. @param password (string) - String to be hashed. @return (string) - Md5 hash of password. """ md5_password = md5.new(password) password_md5 = md5_password.hexdigest() return password_md5
[ "def", "MD5Hash", "(", "password", ")", ":", "md5_password", "=", "md5", ".", "new", "(", "password", ")", "password_md5", "=", "md5_password", ".", "hexdigest", "(", ")", "return", "password_md5" ]
python
Returns md5 hash of a string. @param password (string) - String to be hashed. @return (string) - Md5 hash of password.
false
2,696,409
def extract_cosponsors(bill): """ Return a list of list relating cosponsors to legislation. """ logger.debug("Extracting Cosponsors") cosponsor_map = [] cosponsors = bill.get('cosponsors', []) bill_id = bill.get('bill_id', None) for co in cosponsors: co_list = [] co_list.append(co.get('thomas_id')) co_list.append(bill_id) co_list.append(co.get('district')) co_list.append(co.get('state')) cosponsor_map.append(co_list) logger.debug("End Extractioning Cosponsors") return cosponsor_map
[ "def", "extract_cosponsors", "(", "bill", ")", ":", "logger", ".", "debug", "(", "\"Extracting Cosponsors\"", ")", "cosponsor_map", "=", "[", "]", "cosponsors", "=", "bill", ".", "get", "(", "'cosponsors'", ",", "[", "]", ")", "bill_id", "=", "bill", ".", "get", "(", "'bill_id'", ",", "None", ")", "for", "co", "in", "cosponsors", ":", "co_list", "=", "[", "]", "co_list", ".", "append", "(", "co", ".", "get", "(", "'thomas_id'", ")", ")", "co_list", ".", "append", "(", "bill_id", ")", "co_list", ".", "append", "(", "co", ".", "get", "(", "'district'", ")", ")", "co_list", ".", "append", "(", "co", ".", "get", "(", "'state'", ")", ")", "cosponsor_map", ".", "append", "(", "co_list", ")", "logger", ".", "debug", "(", "\"End Extractioning Cosponsors\"", ")", "return", "cosponsor_map" ]
python
Return a list of list relating cosponsors to legislation.
false
2,489,069
def _is_definition_section(source): """Determine if the source is a definition section. Args: source: The usage string source that may be a section. Returns: True if the source describes a definition section; otherwise, False. """ try: definitions = textwrap.dedent(source).split('\n', 1)[1].splitlines() return all( re.match(r'\s\s+((?!\s\s).+)\s\s+.+', s) for s in definitions) except IndexError: return False
[ "def", "_is_definition_section", "(", "source", ")", ":", "try", ":", "definitions", "=", "textwrap", ".", "dedent", "(", "source", ")", ".", "split", "(", "'\\n'", ",", "1", ")", "[", "1", "]", ".", "splitlines", "(", ")", "return", "all", "(", "re", ".", "match", "(", "r'\\s\\s+((?!\\s\\s).+)\\s\\s+.+'", ",", "s", ")", "for", "s", "in", "definitions", ")", "except", "IndexError", ":", "return", "False" ]
python
Determine if the source is a definition section. Args: source: The usage string source that may be a section. Returns: True if the source describes a definition section; otherwise, False.
false
1,740,638
def get_required_parameter_definitions(self): """Returns all template parameters that do not have a default value. Returns: dict: dict of required CloudFormation Parameters for the blueprint. Will be a dictionary of <parameter name>: <parameter attributes>. """ required = {} for name, attrs in self.get_parameter_definitions().items(): if "Default" not in attrs: required[name] = attrs return required
[ "def", "get_required_parameter_definitions", "(", "self", ")", ":", "required", "=", "{", "}", "for", "name", ",", "attrs", "in", "self", ".", "get_parameter_definitions", "(", ")", ".", "items", "(", ")", ":", "if", "\"Default\"", "not", "in", "attrs", ":", "required", "[", "name", "]", "=", "attrs", "return", "required" ]
python
Returns all template parameters that do not have a default value. Returns: dict: dict of required CloudFormation Parameters for the blueprint. Will be a dictionary of <parameter name>: <parameter attributes>.
false
2,165,776
async def get_sleep_timer_settings(self) -> List[Setting]: """Get sleep timer settings.""" return [ Setting.make(**x) for x in await self.services["system"]["getSleepTimerSettings"]({}) ]
[ "async", "def", "get_sleep_timer_settings", "(", "self", ")", "->", "List", "[", "Setting", "]", ":", "return", "[", "Setting", ".", "make", "(", "**", "x", ")", "for", "x", "in", "await", "self", ".", "services", "[", "\"system\"", "]", "[", "\"getSleepTimerSettings\"", "]", "(", "{", "}", ")", "]" ]
python
Get sleep timer settings.
false
2,710,895
def _get_a(self, _type): """ Gets an instance implementing type <_type> """ tmp = self._get_all(_type) ret = pick(tmp) if len(tmp) != 1: self.l.warn(("get_a: %s all implement %s; " + "picking %s") % (tmp, _type, ret)) return ret
[ "def", "_get_a", "(", "self", ",", "_type", ")", ":", "tmp", "=", "self", ".", "_get_all", "(", "_type", ")", "ret", "=", "pick", "(", "tmp", ")", "if", "len", "(", "tmp", ")", "!=", "1", ":", "self", ".", "l", ".", "warn", "(", "(", "\"get_a: %s all implement %s; \"", "+", "\"picking %s\"", ")", "%", "(", "tmp", ",", "_type", ",", "ret", ")", ")", "return", "ret" ]
python
Gets an instance implementing type <_type>
false
2,623,051
def show(self): """ Display the information (with a pretty print) about the method """ self.show_info() self.show_notes() if self.code != None: self.each_params_by_register(self.code.get_registers_size(), self.get_descriptor()) self.code.show(self.CM.get_vmanalysis().get_method(self)) self.show_xref(self.CM.get_vmanalysis().get_method_analysis(self))
[ "def", "show", "(", "self", ")", ":", "self", ".", "show_info", "(", ")", "self", ".", "show_notes", "(", ")", "if", "self", ".", "code", "!=", "None", ":", "self", ".", "each_params_by_register", "(", "self", ".", "code", ".", "get_registers_size", "(", ")", ",", "self", ".", "get_descriptor", "(", ")", ")", "self", ".", "code", ".", "show", "(", "self", ".", "CM", ".", "get_vmanalysis", "(", ")", ".", "get_method", "(", "self", ")", ")", "self", ".", "show_xref", "(", "self", ".", "CM", ".", "get_vmanalysis", "(", ")", ".", "get_method_analysis", "(", "self", ")", ")" ]
python
Display the information (with a pretty print) about the method
false
1,992,006
def fillTriangle(self, x0, y0, x1, y1, x2, y2, color=None, aa=False): """ Draw filled triangle with points x0,y0 - x1,y1 - x2,y2 :param aa: if True, use Bresenham's algorithm for line drawing; otherwise use Xiaolin Wu's algorithm """ md.fill_triangle(self.set, x0, y0, x1, y1, x2, y2, color, aa)
[ "def", "fillTriangle", "(", "self", ",", "x0", ",", "y0", ",", "x1", ",", "y1", ",", "x2", ",", "y2", ",", "color", "=", "None", ",", "aa", "=", "False", ")", ":", "md", ".", "fill_triangle", "(", "self", ".", "set", ",", "x0", ",", "y0", ",", "x1", ",", "y1", ",", "x2", ",", "y2", ",", "color", ",", "aa", ")" ]
python
Draw filled triangle with points x0,y0 - x1,y1 - x2,y2 :param aa: if True, use Bresenham's algorithm for line drawing; otherwise use Xiaolin Wu's algorithm
false
2,303,619
def get(self, variable_path: str, default: t.Optional[t.Any] = None, coerce_type: t.Optional[t.Type] = None, coercer: t.Optional[t.Callable] = None, **kwargs): """ Reads a value of ``variable_path`` from consul kv storage. :param variable_path: a delimiter-separated path to a nested value :param default: default value if there's no object by specified path :param coerce_type: cast a type of a value to a specified one :param coercer: perform a type casting with specified callback :param kwargs: additional arguments inherited parser may need :return: value or default :raises config.exceptions.KVStorageKeyDoestNotExist: if specified ``endpoint`` does not exists :raises config.exceptions.KVStorageValueIsEmpty: if specified ``endpoint`` does not contain a config """ return self.inner_parser.get( variable_path, default=default, coerce_type=coerce_type, coercer=coercer, **kwargs, )
[ "def", "get", "(", "self", ",", "variable_path", ":", "str", ",", "default", ":", "t", ".", "Optional", "[", "t", ".", "Any", "]", "=", "None", ",", "coerce_type", ":", "t", ".", "Optional", "[", "t", ".", "Type", "]", "=", "None", ",", "coercer", ":", "t", ".", "Optional", "[", "t", ".", "Callable", "]", "=", "None", ",", "**", "kwargs", ")", ":", "return", "self", ".", "inner_parser", ".", "get", "(", "variable_path", ",", "default", "=", "default", ",", "coerce_type", "=", "coerce_type", ",", "coercer", "=", "coercer", ",", "**", "kwargs", ",", ")" ]
python
Reads a value of ``variable_path`` from consul kv storage. :param variable_path: a delimiter-separated path to a nested value :param default: default value if there's no object by specified path :param coerce_type: cast a type of a value to a specified one :param coercer: perform a type casting with specified callback :param kwargs: additional arguments inherited parser may need :return: value or default :raises config.exceptions.KVStorageKeyDoestNotExist: if specified ``endpoint`` does not exists :raises config.exceptions.KVStorageValueIsEmpty: if specified ``endpoint`` does not contain a config
false
2,655,901
def _handle(self, msg): """ Pass a received message to the registered handlers. :param msg: received message :type msg: :class:`fatbotslim.irc.Message` """ def handler_yielder(): for handler in self.handlers: yield handler def handler_callback(_): if msg.propagate: try: h = hyielder.next() g = self._pool.spawn(handler_runner, h) g.link(handler_callback) except StopIteration: pass def handler_runner(h): for command in h.commands: if command == msg.command: method = getattr(h, h.commands[command]) method(msg) hyielder = handler_yielder() try: next_handler = hyielder.next() g = self._pool.spawn(handler_runner, next_handler) g.link(handler_callback) except StopIteration: pass
[ "def", "_handle", "(", "self", ",", "msg", ")", ":", "def", "handler_yielder", "(", ")", ":", "for", "handler", "in", "self", ".", "handlers", ":", "yield", "handler", "def", "handler_callback", "(", "_", ")", ":", "if", "msg", ".", "propagate", ":", "try", ":", "h", "=", "hyielder", ".", "next", "(", ")", "g", "=", "self", ".", "_pool", ".", "spawn", "(", "handler_runner", ",", "h", ")", "g", ".", "link", "(", "handler_callback", ")", "except", "StopIteration", ":", "pass", "def", "handler_runner", "(", "h", ")", ":", "for", "command", "in", "h", ".", "commands", ":", "if", "command", "==", "msg", ".", "command", ":", "method", "=", "getattr", "(", "h", ",", "h", ".", "commands", "[", "command", "]", ")", "method", "(", "msg", ")", "hyielder", "=", "handler_yielder", "(", ")", "try", ":", "next_handler", "=", "hyielder", ".", "next", "(", ")", "g", "=", "self", ".", "_pool", ".", "spawn", "(", "handler_runner", ",", "next_handler", ")", "g", ".", "link", "(", "handler_callback", ")", "except", "StopIteration", ":", "pass" ]
python
Pass a received message to the registered handlers. :param msg: received message :type msg: :class:`fatbotslim.irc.Message`
false
2,142,474
def main(sample_id, result_p1, result_p2, opts): """Main executor of the fastqc_report template. If the "--ignore-tests" option is present in the ``opts`` argument, the health check of the sample will be bypassed, and it will pass the check. This option is used in the first run of FastQC. In the second run (after filtering with trimmomatic) this option is not provided and the samples are submitted to a health check before proceeding in the pipeline. Parameters ---------- sample_id : str Sample Identification string. result_p1 : list Two element list containing the path to the FastQC report files to the first FastQ pair. The first must be the nucleotide level report and the second the categorical report. result_p2: list Two element list containing the path to the FastQC report files to the second FastQ pair. The first must be the nucleotide level report and the second the categorical report. opts : list List of arbitrary options. See `Expected input`_. """ logger.info("Starting fastqc report") json_dic = {} with open("{}_trim_report".format(sample_id), "w") as trep_fh, \ open("optimal_trim", "w") as trim_fh, \ open("{}_status_report".format(sample_id), "w") as rep_fh, \ open(".status", "w") as status_fh, \ open(".warning", "w") as warn_fh, \ open(".fail", "w") as fail_fh, \ open(".report.json", "w") as report_fh: # Perform health check according to the FastQC summary report for # each pair. If both pairs pass the check, send the 'pass' information # to the 'fastqc_health' channel. If at least one fails, send the # summary report. if "--ignore-tests" not in opts: # Get reports for each category in json format json_dic = write_json_report(sample_id, result_p1[0], result_p2[0]) logger.info("Performing FastQ health check") for p, fastqc_summary in enumerate([result_p1[1], result_p2[1]]): logger.debug("Checking files: {}".format(fastqc_summary)) # Get the boolean health variable and a list of failed # categories, if any health, f_cat, warnings = check_summary_health(fastqc_summary) logger.debug("Health checked: {}".format(health)) logger.debug("Failed categories: {}".format(f_cat)) # Write any warnings if warnings: json_dic["warnings"] = [{ "sample": sample_id, "table": "qc", "value": [] }] for w in warnings: warn_fh.write("{}\\n".format(w)) json_dic["warnings"][0]["value"].append(w) # Rename category summary file to the channel that will publish # The results output_file = "{}_{}_summary.txt".format(sample_id, p) os.rename(fastqc_summary, output_file) logger.debug("Setting summary file name to {}".format( output_file)) # If one of the health flags returns False, send the summary # report through the status channel if not health: fail_msg = "Sample failed quality control checks:" \ " {}".format(",".join(f_cat)) logger.warning(fail_msg) fail_fh.write(fail_msg) json_dic["fail"] = [{ "sample": sample_id, "table": "qc", "value": [fail_msg] }] report_fh.write( json.dumps(json_dic, separators=(",", ":"))) status_fh.write("fail") trim_fh.write("fail") rep_fh.write("{}, {}\\n".format(sample_id, ",".join(f_cat))) trep_fh.write("{},fail,fail\\n".format(sample_id)) return logger.info("Sample passed quality control checks") status_fh.write("pass") rep_fh.write("{}, pass\\n".format(sample_id)) logger.info("Assessing optimal trim range for sample") # Get optimal trimming range for sample, based on the per base sequence # content optimal_trim = get_sample_trim(result_p1[0], result_p2[0]) logger.info("Optimal trim range set to: {}".format(optimal_trim)) trim_fh.write("{}".format(" ".join([str(x) for x in optimal_trim]))) trep_fh.write("{},{},{}\\n".format(sample_id, optimal_trim[0], optimal_trim[1])) # The json dict report is only populated when the FastQC quality # checks are performed, that is, when the --ignore-tests option # is not provide if json_dic: report_fh.write(json.dumps(json_dic, separators=(",", ":")))
[ "def", "main", "(", "sample_id", ",", "result_p1", ",", "result_p2", ",", "opts", ")", ":", "logger", ".", "info", "(", "\"Starting fastqc report\"", ")", "json_dic", "=", "{", "}", "with", "open", "(", "\"{}_trim_report\"", ".", "format", "(", "sample_id", ")", ",", "\"w\"", ")", "as", "trep_fh", ",", "open", "(", "\"optimal_trim\"", ",", "\"w\"", ")", "as", "trim_fh", ",", "open", "(", "\"{}_status_report\"", ".", "format", "(", "sample_id", ")", ",", "\"w\"", ")", "as", "rep_fh", ",", "open", "(", "\".status\"", ",", "\"w\"", ")", "as", "status_fh", ",", "open", "(", "\".warning\"", ",", "\"w\"", ")", "as", "warn_fh", ",", "open", "(", "\".fail\"", ",", "\"w\"", ")", "as", "fail_fh", ",", "open", "(", "\".report.json\"", ",", "\"w\"", ")", "as", "report_fh", ":", "if", "\"--ignore-tests\"", "not", "in", "opts", ":", "json_dic", "=", "write_json_report", "(", "sample_id", ",", "result_p1", "[", "0", "]", ",", "result_p2", "[", "0", "]", ")", "logger", ".", "info", "(", "\"Performing FastQ health check\"", ")", "for", "p", ",", "fastqc_summary", "in", "enumerate", "(", "[", "result_p1", "[", "1", "]", ",", "result_p2", "[", "1", "]", "]", ")", ":", "logger", ".", "debug", "(", "\"Checking files: {}\"", ".", "format", "(", "fastqc_summary", ")", ")", "health", ",", "f_cat", ",", "warnings", "=", "check_summary_health", "(", "fastqc_summary", ")", "logger", ".", "debug", "(", "\"Health checked: {}\"", ".", "format", "(", "health", ")", ")", "logger", ".", "debug", "(", "\"Failed categories: {}\"", ".", "format", "(", "f_cat", ")", ")", "if", "warnings", ":", "json_dic", "[", "\"warnings\"", "]", "=", "[", "{", "\"sample\"", ":", "sample_id", ",", "\"table\"", ":", "\"qc\"", ",", "\"value\"", ":", "[", "]", "}", "]", "for", "w", "in", "warnings", ":", "warn_fh", ".", "write", "(", "\"{}\\\\n\"", ".", "format", "(", "w", ")", ")", "json_dic", "[", "\"warnings\"", "]", "[", "0", "]", "[", "\"value\"", "]", ".", "append", "(", "w", ")", "output_file", "=", "\"{}_{}_summary.txt\"", ".", "format", "(", "sample_id", ",", "p", ")", "os", ".", "rename", "(", "fastqc_summary", ",", "output_file", ")", "logger", ".", "debug", "(", "\"Setting summary file name to {}\"", ".", "format", "(", "output_file", ")", ")", "if", "not", "health", ":", "fail_msg", "=", "\"Sample failed quality control checks:\"", "\" {}\"", ".", "format", "(", "\",\"", ".", "join", "(", "f_cat", ")", ")", "logger", ".", "warning", "(", "fail_msg", ")", "fail_fh", ".", "write", "(", "fail_msg", ")", "json_dic", "[", "\"fail\"", "]", "=", "[", "{", "\"sample\"", ":", "sample_id", ",", "\"table\"", ":", "\"qc\"", ",", "\"value\"", ":", "[", "fail_msg", "]", "}", "]", "report_fh", ".", "write", "(", "json", ".", "dumps", "(", "json_dic", ",", "separators", "=", "(", "\",\"", ",", "\":\"", ")", ")", ")", "status_fh", ".", "write", "(", "\"fail\"", ")", "trim_fh", ".", "write", "(", "\"fail\"", ")", "rep_fh", ".", "write", "(", "\"{}, {}\\\\n\"", ".", "format", "(", "sample_id", ",", "\",\"", ".", "join", "(", "f_cat", ")", ")", ")", "trep_fh", ".", "write", "(", "\"{},fail,fail\\\\n\"", ".", "format", "(", "sample_id", ")", ")", "return", "logger", ".", "info", "(", "\"Sample passed quality control checks\"", ")", "status_fh", ".", "write", "(", "\"pass\"", ")", "rep_fh", ".", "write", "(", "\"{}, pass\\\\n\"", ".", "format", "(", "sample_id", ")", ")", "logger", ".", "info", "(", "\"Assessing optimal trim range for sample\"", ")", "optimal_trim", "=", "get_sample_trim", "(", "result_p1", "[", "0", "]", ",", "result_p2", "[", "0", "]", ")", "logger", ".", "info", "(", "\"Optimal trim range set to: {}\"", ".", "format", "(", "optimal_trim", ")", ")", "trim_fh", ".", "write", "(", "\"{}\"", ".", "format", "(", "\" \"", ".", "join", "(", "[", "str", "(", "x", ")", "for", "x", "in", "optimal_trim", "]", ")", ")", ")", "trep_fh", ".", "write", "(", "\"{},{},{}\\\\n\"", ".", "format", "(", "sample_id", ",", "optimal_trim", "[", "0", "]", ",", "optimal_trim", "[", "1", "]", ")", ")", "if", "json_dic", ":", "report_fh", ".", "write", "(", "json", ".", "dumps", "(", "json_dic", ",", "separators", "=", "(", "\",\"", ",", "\":\"", ")", ")", ")" ]
python
Main executor of the fastqc_report template. If the "--ignore-tests" option is present in the ``opts`` argument, the health check of the sample will be bypassed, and it will pass the check. This option is used in the first run of FastQC. In the second run (after filtering with trimmomatic) this option is not provided and the samples are submitted to a health check before proceeding in the pipeline. Parameters ---------- sample_id : str Sample Identification string. result_p1 : list Two element list containing the path to the FastQC report files to the first FastQ pair. The first must be the nucleotide level report and the second the categorical report. result_p2: list Two element list containing the path to the FastQC report files to the second FastQ pair. The first must be the nucleotide level report and the second the categorical report. opts : list List of arbitrary options. See `Expected input`_.
false
2,313,184
def se_iban_load_map(filename: str) -> list: """ Loads Swedish monetary institution codes in CSV format. :param filename: CSV file name of the BIC definitions. Columns: Institution Name, Range Begin-Range End (inclusive), Account digits count :return: List of (bank name, clearing code begin, clearing code end, account digits) """ out = [] name_repl = { 'BNP Paribas Fortis SA/NV, Bankfilial Sverige': 'BNP Paribas Fortis SA/NV', 'Citibank International Plc, Sweden Branch': 'Citibank International Plc', 'Santander Consumer Bank AS (deltar endast i Dataclearingen)': 'Santander Consumer Bank AS', 'Nordax Bank AB (deltar endast i Dataclearingen)': 'Nordax Bank AB', 'Swedbank och fristående Sparbanker, t ex Leksands Sparbank och Roslagsbanken.': 'Swedbank', 'Ålandsbanken Abp (Finland),svensk filial': 'Ålandsbanken Abp', 'SBAB deltar endast i Dataclearingen': 'SBAB', } with open(filename) as fp: for row in csv.reader(fp): if len(row) == 3: name, series, acc_digits = row # pprint([name, series, acc_digits]) # clean up name name = re.sub(r'\n.*', '', name) if name in name_repl: name = name_repl[name] # clean up series ml_acc_digits = acc_digits.split('\n') for i, ser in enumerate(series.split('\n')): begin, end = None, None res = re.match(r'^(\d+)-(\d+).*$', ser) if res: begin, end = res.group(1), res.group(2) if begin is None: res = re.match(r'^(\d{4}).*$', ser) if res: begin = res.group(1) end = begin if begin and end: digits = None try: digits = int(acc_digits) except ValueError: pass if digits is None: try: digits = int(ml_acc_digits[i]) except ValueError: digits = '?' except IndexError: digits = '?' out.append([name, begin, end, digits]) # print('OK!') return out
[ "def", "se_iban_load_map", "(", "filename", ":", "str", ")", "->", "list", ":", "out", "=", "[", "]", "name_repl", "=", "{", "'BNP Paribas Fortis SA/NV, Bankfilial Sverige'", ":", "'BNP Paribas Fortis SA/NV'", ",", "'Citibank International Plc, Sweden Branch'", ":", "'Citibank International Plc'", ",", "'Santander Consumer Bank AS (deltar endast i Dataclearingen)'", ":", "'Santander Consumer Bank AS'", ",", "'Nordax Bank AB (deltar endast i Dataclearingen)'", ":", "'Nordax Bank AB'", ",", "'Swedbank och fristående Sparbanker, t ex Leksands Sparbank och Roslagsbanken.'", ":", "'Swedbank'", ",", "'Ålandsbanken Abp (Finland),svensk filial'", ":", "'Ålandsbanken Abp'", ",", "'SBAB deltar endast i Dataclearingen'", ":", "'SBAB'", ",", "}", "with", "open", "(", "filename", ")", "as", "fp", ":", "for", "row", "in", "csv", ".", "reader", "(", "fp", ")", ":", "if", "len", "(", "row", ")", "==", "3", ":", "name", ",", "series", ",", "acc_digits", "=", "row", "name", "=", "re", ".", "sub", "(", "r'\\n.*'", ",", "''", ",", "name", ")", "if", "name", "in", "name_repl", ":", "name", "=", "name_repl", "[", "name", "]", "ml_acc_digits", "=", "acc_digits", ".", "split", "(", "'\\n'", ")", "for", "i", ",", "ser", "in", "enumerate", "(", "series", ".", "split", "(", "'\\n'", ")", ")", ":", "begin", ",", "end", "=", "None", ",", "None", "res", "=", "re", ".", "match", "(", "r'^(\\d+)-(\\d+).*$'", ",", "ser", ")", "if", "res", ":", "begin", ",", "end", "=", "res", ".", "group", "(", "1", ")", ",", "res", ".", "group", "(", "2", ")", "if", "begin", "is", "None", ":", "res", "=", "re", ".", "match", "(", "r'^(\\d{4}).*$'", ",", "ser", ")", "if", "res", ":", "begin", "=", "res", ".", "group", "(", "1", ")", "end", "=", "begin", "if", "begin", "and", "end", ":", "digits", "=", "None", "try", ":", "digits", "=", "int", "(", "acc_digits", ")", "except", "ValueError", ":", "pass", "if", "digits", "is", "None", ":", "try", ":", "digits", "=", "int", "(", "ml_acc_digits", "[", "i", "]", ")", "except", "ValueError", ":", "digits", "=", "'?'", "except", "IndexError", ":", "digits", "=", "'?'", "out", ".", "append", "(", "[", "name", ",", "begin", ",", "end", ",", "digits", "]", ")", "return", "out" ]
python
Loads Swedish monetary institution codes in CSV format. :param filename: CSV file name of the BIC definitions. Columns: Institution Name, Range Begin-Range End (inclusive), Account digits count :return: List of (bank name, clearing code begin, clearing code end, account digits)
false
2,159,108
def set_host(self, host): """ Set the host for a lightning server. Host can be local (e.g. http://localhost:3000), a heroku instance (e.g. http://lightning-test.herokuapp.com), or a independently hosted lightning server. """ if host[-1] == '/': host = host[:-1] self.host = host return self
[ "def", "set_host", "(", "self", ",", "host", ")", ":", "if", "host", "[", "-", "1", "]", "==", "'/'", ":", "host", "=", "host", "[", ":", "-", "1", "]", "self", ".", "host", "=", "host", "return", "self" ]
python
Set the host for a lightning server. Host can be local (e.g. http://localhost:3000), a heroku instance (e.g. http://lightning-test.herokuapp.com), or a independently hosted lightning server.
false
1,735,022
def visit_target(self, node): """ When we find a target node, first make sure it matches the last reference node we saw. Assuming it does, see if its refuri (link URI) is in our replacement list. If so, replace the link with an internal reference. """ if self.lastref is None: return if ( self.lastref.attributes['name'].lower() not in node.attributes['names'] and self.lastref.attributes['name'].lower() not in node.attributes['dupnames'] ): # return if target doesn't match last reference found return if node.attributes['refuri'] not in self.replacements: # return if the refuri isn't in our replacement mapping return # ok, we have a node to replace... params = self.replacements[node.attributes['refuri']] meth = params[0] args = params[1:] # remove the target itself; we'll just replace the reference node.parent.remove(node) self.lastref.parent.replace(self.lastref, meth(*args))
[ "def", "visit_target", "(", "self", ",", "node", ")", ":", "if", "self", ".", "lastref", "is", "None", ":", "return", "if", "(", "self", ".", "lastref", ".", "attributes", "[", "'name'", "]", ".", "lower", "(", ")", "not", "in", "node", ".", "attributes", "[", "'names'", "]", "and", "self", ".", "lastref", ".", "attributes", "[", "'name'", "]", ".", "lower", "(", ")", "not", "in", "node", ".", "attributes", "[", "'dupnames'", "]", ")", ":", "return", "if", "node", ".", "attributes", "[", "'refuri'", "]", "not", "in", "self", ".", "replacements", ":", "return", "params", "=", "self", ".", "replacements", "[", "node", ".", "attributes", "[", "'refuri'", "]", "]", "meth", "=", "params", "[", "0", "]", "args", "=", "params", "[", "1", ":", "]", "node", ".", "parent", ".", "remove", "(", "node", ")", "self", ".", "lastref", ".", "parent", ".", "replace", "(", "self", ".", "lastref", ",", "meth", "(", "*", "args", ")", ")" ]
python
When we find a target node, first make sure it matches the last reference node we saw. Assuming it does, see if its refuri (link URI) is in our replacement list. If so, replace the link with an internal reference.
false
2,370,567
def buildFITSName(geisname): """Build a new FITS filename for a GEIS input image.""" # User wants to make a FITS copy and update it... _indx = geisname.rfind('.') _fitsname = geisname[:_indx] + '_' + geisname[_indx + 1:-1] + 'h.fits' return _fitsname
[ "def", "buildFITSName", "(", "geisname", ")", ":", "_indx", "=", "geisname", ".", "rfind", "(", "'.'", ")", "_fitsname", "=", "geisname", "[", ":", "_indx", "]", "+", "'_'", "+", "geisname", "[", "_indx", "+", "1", ":", "-", "1", "]", "+", "'h.fits'", "return", "_fitsname" ]
python
Build a new FITS filename for a GEIS input image.
false
2,578,335
def fetchall(self): """Fetch all refs from the upstream repo.""" try: self.repo.remotes.origin.fetch() except git.exc.GitCommandError as err: raise GitError(err)
[ "def", "fetchall", "(", "self", ")", ":", "try", ":", "self", ".", "repo", ".", "remotes", ".", "origin", ".", "fetch", "(", ")", "except", "git", ".", "exc", ".", "GitCommandError", "as", "err", ":", "raise", "GitError", "(", "err", ")" ]
python
Fetch all refs from the upstream repo.
false
1,642,743
def returner(ret): ''' Return data to an odbc server ''' conn = _get_conn(ret) cur = conn.cursor() sql = '''INSERT INTO salt_returns (fun, jid, retval, id, success, full_ret) VALUES (?, ?, ?, ?, ?, ?)''' cur.execute( sql, ( ret['fun'], ret['jid'], salt.utils.json.dumps(ret['return']), ret['id'], ret['success'], salt.utils.json.dumps(ret) ) ) _close_conn(conn)
[ "def", "returner", "(", "ret", ")", ":", "conn", "=", "_get_conn", "(", "ret", ")", "cur", "=", "conn", ".", "cursor", "(", ")", "sql", "=", "'''INSERT INTO salt_returns\n (fun, jid, retval, id, success, full_ret)\n VALUES (?, ?, ?, ?, ?, ?)'''", "cur", ".", "execute", "(", "sql", ",", "(", "ret", "[", "'fun'", "]", ",", "ret", "[", "'jid'", "]", ",", "salt", ".", "utils", ".", "json", ".", "dumps", "(", "ret", "[", "'return'", "]", ")", ",", "ret", "[", "'id'", "]", ",", "ret", "[", "'success'", "]", ",", "salt", ".", "utils", ".", "json", ".", "dumps", "(", "ret", ")", ")", ")", "_close_conn", "(", "conn", ")" ]
python
Return data to an odbc server
false
2,014,925
def export_debug(self, output_path): """ this method is used to generate a debug map for NEO debugger """ file_hash = hashlib.md5(open(output_path, 'rb').read()).hexdigest() avm_name = os.path.splitext(os.path.basename(output_path))[0] json_data = self.generate_debug_json(avm_name, file_hash) mapfilename = output_path.replace('.avm', '.debug.json') with open(mapfilename, 'w+') as out_file: out_file.write(json_data)
[ "def", "export_debug", "(", "self", ",", "output_path", ")", ":", "file_hash", "=", "hashlib", ".", "md5", "(", "open", "(", "output_path", ",", "'rb'", ")", ".", "read", "(", ")", ")", ".", "hexdigest", "(", ")", "avm_name", "=", "os", ".", "path", ".", "splitext", "(", "os", ".", "path", ".", "basename", "(", "output_path", ")", ")", "[", "0", "]", "json_data", "=", "self", ".", "generate_debug_json", "(", "avm_name", ",", "file_hash", ")", "mapfilename", "=", "output_path", ".", "replace", "(", "'.avm'", ",", "'.debug.json'", ")", "with", "open", "(", "mapfilename", ",", "'w+'", ")", "as", "out_file", ":", "out_file", ".", "write", "(", "json_data", ")" ]
python
this method is used to generate a debug map for NEO debugger
false
2,324,768
def publish( c, sdist=True, wheel=False, index=None, sign=False, dry_run=False, directory=None, dual_wheels=False, alt_python=None, check_desc=False, ): """ Publish code to PyPI or index of choice. All parameters save ``dry_run`` and ``directory`` honor config settings of the same name, under the ``packaging`` tree. E.g. say ``.configure({'packaging': {'wheel': True}})`` to force building wheel archives by default. :param bool sdist: Whether to upload sdists/tgzs. :param bool wheel: Whether to upload wheels (requires the ``wheel`` package from PyPI). :param str index: Custom upload index/repository name. See ``upload`` help for details. :param bool sign: Whether to sign the built archive(s) via GPG. :param bool dry_run: Skip actual publication step if ``True``. This also prevents cleanup of the temporary build/dist directories, so you can examine the build artifacts. :param str directory: Base directory within which will live the ``dist/`` and ``build/`` directories. Defaults to a temporary directory which is cleaned up after the run finishes. :param bool dual_wheels: When ``True``, builds individual wheels for Python 2 and Python 3. Useful for situations where you can't build universal wheels, but still want to distribute for both interpreter versions. Requires that you have a useful ``python3`` (or ``python2``, if you're on Python 3 already) binary in your ``$PATH``. Also requires that this other python have the ``wheel`` package installed in its ``site-packages``; usually this will mean the global site-packages for that interpreter. See also the ``alt_python`` argument. :param str alt_python: Path to the 'alternate' Python interpreter to use when ``dual_wheels=True``. When ``None`` (the default) will be ``python3`` or ``python2``, depending on the currently active interpreter. :param bool check_desc: Whether to run ``setup.py check -r -s`` (uses ``readme_renderer``) before trying to publish - catches long_description bugs. Default: ``False``. """ # Don't hide by default, this step likes to be verbose most of the time. c.config.run.hide = False # Config hooks config = c.config.get("packaging", {}) index = config.get("index", index) sign = config.get("sign", sign) dual_wheels = config.get("dual_wheels", dual_wheels) check_desc = config.get("check_desc", check_desc) # Initial sanity check, if needed. Will die usefully. if check_desc: c.run("python setup.py check -r -s") # Build, into controlled temp dir (avoids attempting to re-upload old # files) with tmpdir(skip_cleanup=dry_run, explicit=directory) as tmp: # Build default archives build(c, sdist=sdist, wheel=wheel, directory=tmp) # Build opposing interpreter archive, if necessary if dual_wheels: if not alt_python: alt_python = "python2" if sys.version_info[0] == 2: alt_python = "python3" build(c, sdist=False, wheel=True, directory=tmp, python=alt_python) # Do the thing! upload(c, directory=tmp, index=index, sign=sign, dry_run=dry_run)
[ "def", "publish", "(", "c", ",", "sdist", "=", "True", ",", "wheel", "=", "False", ",", "index", "=", "None", ",", "sign", "=", "False", ",", "dry_run", "=", "False", ",", "directory", "=", "None", ",", "dual_wheels", "=", "False", ",", "alt_python", "=", "None", ",", "check_desc", "=", "False", ",", ")", ":", "c", ".", "config", ".", "run", ".", "hide", "=", "False", "config", "=", "c", ".", "config", ".", "get", "(", "\"packaging\"", ",", "{", "}", ")", "index", "=", "config", ".", "get", "(", "\"index\"", ",", "index", ")", "sign", "=", "config", ".", "get", "(", "\"sign\"", ",", "sign", ")", "dual_wheels", "=", "config", ".", "get", "(", "\"dual_wheels\"", ",", "dual_wheels", ")", "check_desc", "=", "config", ".", "get", "(", "\"check_desc\"", ",", "check_desc", ")", "if", "check_desc", ":", "c", ".", "run", "(", "\"python setup.py check -r -s\"", ")", "with", "tmpdir", "(", "skip_cleanup", "=", "dry_run", ",", "explicit", "=", "directory", ")", "as", "tmp", ":", "build", "(", "c", ",", "sdist", "=", "sdist", ",", "wheel", "=", "wheel", ",", "directory", "=", "tmp", ")", "if", "dual_wheels", ":", "if", "not", "alt_python", ":", "alt_python", "=", "\"python2\"", "if", "sys", ".", "version_info", "[", "0", "]", "==", "2", ":", "alt_python", "=", "\"python3\"", "build", "(", "c", ",", "sdist", "=", "False", ",", "wheel", "=", "True", ",", "directory", "=", "tmp", ",", "python", "=", "alt_python", ")", "upload", "(", "c", ",", "directory", "=", "tmp", ",", "index", "=", "index", ",", "sign", "=", "sign", ",", "dry_run", "=", "dry_run", ")" ]
python
Publish code to PyPI or index of choice. All parameters save ``dry_run`` and ``directory`` honor config settings of the same name, under the ``packaging`` tree. E.g. say ``.configure({'packaging': {'wheel': True}})`` to force building wheel archives by default. :param bool sdist: Whether to upload sdists/tgzs. :param bool wheel: Whether to upload wheels (requires the ``wheel`` package from PyPI). :param str index: Custom upload index/repository name. See ``upload`` help for details. :param bool sign: Whether to sign the built archive(s) via GPG. :param bool dry_run: Skip actual publication step if ``True``. This also prevents cleanup of the temporary build/dist directories, so you can examine the build artifacts. :param str directory: Base directory within which will live the ``dist/`` and ``build/`` directories. Defaults to a temporary directory which is cleaned up after the run finishes. :param bool dual_wheels: When ``True``, builds individual wheels for Python 2 and Python 3. Useful for situations where you can't build universal wheels, but still want to distribute for both interpreter versions. Requires that you have a useful ``python3`` (or ``python2``, if you're on Python 3 already) binary in your ``$PATH``. Also requires that this other python have the ``wheel`` package installed in its ``site-packages``; usually this will mean the global site-packages for that interpreter. See also the ``alt_python`` argument. :param str alt_python: Path to the 'alternate' Python interpreter to use when ``dual_wheels=True``. When ``None`` (the default) will be ``python3`` or ``python2``, depending on the currently active interpreter. :param bool check_desc: Whether to run ``setup.py check -r -s`` (uses ``readme_renderer``) before trying to publish - catches long_description bugs. Default: ``False``.
false
1,909,203
def bloomfilter(collection, on, column, capacity=3000, error_rate=0.01): """ Filter collection on the `on` sequence by BloomFilter built by `column` :param collection: :param on: sequence or column name :param column: instance of Column :param capacity: numbers of capacity :type capacity: int :param error_rate: error rate :type error_rate: float :return: collection :Example: >>> df1 = DataFrame(pd.DataFrame({'a': ['name1', 'name2', 'name3', 'name1'], 'b': [1, 2, 3, 4]})) >>> df2 = DataFrame(pd.DataFrame({'a': ['name1']})) >>> df1.bloom_filter('a', df2.a) a b 0 name1 1 1 name1 4 """ if not isinstance(column, Column): raise TypeError('bloomfilter can only filter on the column of a collection') # to make the class pickled right by the cloudpickle with open(os.path.join(path, 'lib', 'bloomfilter.py')) as bloomfilter_file: local = {} six.exec_(bloomfilter_file.read(), local) BloomFilter = local['BloomFilter'] col_name = column.source_name or column.name on_name = on.name if isinstance(on, SequenceExpr) else on rand_name = '%s_%s'% (on_name, str(uuid.uuid4()).replace('-', '_')) on_col = collection._get_field(on).rename(rand_name) src_collection = collection collection = collection[collection, on_col] @output(src_collection.schema.names, src_collection.schema.types) class Filter(object): def __init__(self, resources): table = resources[0] bloom = BloomFilter(capacity, error_rate) for row in table: bloom.add(str(getattr(row, col_name))) self.bloom = bloom def __call__(self, row): if str(getattr(row, rand_name)) not in self.bloom: return return row[:-1] return collection.apply(Filter, axis=1, resources=[column.input, ])
[ "def", "bloomfilter", "(", "collection", ",", "on", ",", "column", ",", "capacity", "=", "3000", ",", "error_rate", "=", "0.01", ")", ":", "if", "not", "isinstance", "(", "column", ",", "Column", ")", ":", "raise", "TypeError", "(", "'bloomfilter can only filter on the column of a collection'", ")", "with", "open", "(", "os", ".", "path", ".", "join", "(", "path", ",", "'lib'", ",", "'bloomfilter.py'", ")", ")", "as", "bloomfilter_file", ":", "local", "=", "{", "}", "six", ".", "exec_", "(", "bloomfilter_file", ".", "read", "(", ")", ",", "local", ")", "BloomFilter", "=", "local", "[", "'BloomFilter'", "]", "col_name", "=", "column", ".", "source_name", "or", "column", ".", "name", "on_name", "=", "on", ".", "name", "if", "isinstance", "(", "on", ",", "SequenceExpr", ")", "else", "on", "rand_name", "=", "'%s_%s'", "%", "(", "on_name", ",", "str", "(", "uuid", ".", "uuid4", "(", ")", ")", ".", "replace", "(", "'-'", ",", "'_'", ")", ")", "on_col", "=", "collection", ".", "_get_field", "(", "on", ")", ".", "rename", "(", "rand_name", ")", "src_collection", "=", "collection", "collection", "=", "collection", "[", "collection", ",", "on_col", "]", "@", "output", "(", "src_collection", ".", "schema", ".", "names", ",", "src_collection", ".", "schema", ".", "types", ")", "class", "Filter", "(", "object", ")", ":", "def", "__init__", "(", "self", ",", "resources", ")", ":", "table", "=", "resources", "[", "0", "]", "bloom", "=", "BloomFilter", "(", "capacity", ",", "error_rate", ")", "for", "row", "in", "table", ":", "bloom", ".", "add", "(", "str", "(", "getattr", "(", "row", ",", "col_name", ")", ")", ")", "self", ".", "bloom", "=", "bloom", "def", "__call__", "(", "self", ",", "row", ")", ":", "if", "str", "(", "getattr", "(", "row", ",", "rand_name", ")", ")", "not", "in", "self", ".", "bloom", ":", "return", "return", "row", "[", ":", "-", "1", "]", "return", "collection", ".", "apply", "(", "Filter", ",", "axis", "=", "1", ",", "resources", "=", "[", "column", ".", "input", ",", "]", ")" ]
python
Filter collection on the `on` sequence by BloomFilter built by `column` :param collection: :param on: sequence or column name :param column: instance of Column :param capacity: numbers of capacity :type capacity: int :param error_rate: error rate :type error_rate: float :return: collection :Example: >>> df1 = DataFrame(pd.DataFrame({'a': ['name1', 'name2', 'name3', 'name1'], 'b': [1, 2, 3, 4]})) >>> df2 = DataFrame(pd.DataFrame({'a': ['name1']})) >>> df1.bloom_filter('a', df2.a) a b 0 name1 1 1 name1 4
false
2,631,409
def uninitialize_ui(self): """ Uninitializes the Component ui. :return: Method success. :rtype: bool """ LOGGER.debug("> Uninitializing '{0}' Component ui.".format(self.__class__.__name__)) # Signals / Slots. self.refresh_nodes.disconnect(self.__model__refresh_nodes) self.__view_remove_actions() self.__model = None self.__view = None self.initialized_ui = False return True
[ "def", "uninitialize_ui", "(", "self", ")", ":", "LOGGER", ".", "debug", "(", "\"> Uninitializing '{0}' Component ui.\"", ".", "format", "(", "self", ".", "__class__", ".", "__name__", ")", ")", "self", ".", "refresh_nodes", ".", "disconnect", "(", "self", ".", "__model__refresh_nodes", ")", "self", ".", "__view_remove_actions", "(", ")", "self", ".", "__model", "=", "None", "self", ".", "__view", "=", "None", "self", ".", "initialized_ui", "=", "False", "return", "True" ]
python
Uninitializes the Component ui. :return: Method success. :rtype: bool
false
2,492,275
def to_datetime(plain_date, hours=0, minutes=0, seconds=0, ms=0): """given a datetime.date, gives back a datetime.datetime""" # don't mess with datetimes if isinstance(plain_date, datetime.datetime): return plain_date return datetime.datetime( plain_date.year, plain_date.month, plain_date.day, hours, minutes, seconds, ms, )
[ "def", "to_datetime", "(", "plain_date", ",", "hours", "=", "0", ",", "minutes", "=", "0", ",", "seconds", "=", "0", ",", "ms", "=", "0", ")", ":", "if", "isinstance", "(", "plain_date", ",", "datetime", ".", "datetime", ")", ":", "return", "plain_date", "return", "datetime", ".", "datetime", "(", "plain_date", ".", "year", ",", "plain_date", ".", "month", ",", "plain_date", ".", "day", ",", "hours", ",", "minutes", ",", "seconds", ",", "ms", ",", ")" ]
python
given a datetime.date, gives back a datetime.datetime
false
2,551,154
def _switchTo(self, newProto, clientFactory=None): """ Switch this Juice instance to a new protocol. You need to do this 'simultaneously' on both ends of a connection; the easiest way to do this is to use a subclass of ProtocolSwitchCommand. """ assert self.innerProtocol is None, "Protocol can only be safely switched once." self.setRawMode() self.innerProtocol = newProto self.innerProtocolClientFactory = clientFactory newProto.makeConnection(self.transport)
[ "def", "_switchTo", "(", "self", ",", "newProto", ",", "clientFactory", "=", "None", ")", ":", "assert", "self", ".", "innerProtocol", "is", "None", ",", "\"Protocol can only be safely switched once.\"", "self", ".", "setRawMode", "(", ")", "self", ".", "innerProtocol", "=", "newProto", "self", ".", "innerProtocolClientFactory", "=", "clientFactory", "newProto", ".", "makeConnection", "(", "self", ".", "transport", ")" ]
python
Switch this Juice instance to a new protocol. You need to do this 'simultaneously' on both ends of a connection; the easiest way to do this is to use a subclass of ProtocolSwitchCommand.
false
2,144,080
def to_subject_id(s): ''' to_subject_id(s) coerces the given string or number into an integer subject id. If s is not a valid subejct id, raises an exception. ''' if not pimms.is_number(s) and not pimms.is_str(s): raise ValueError('invalid type for subject id: %s' % str(type(s))) if pimms.is_str(s): try: s = os.path.expanduser(s) except Exception: pass if os.path.isdir(s): s = s.split(os.sep)[-1] s = int(s) if s > 999999 or s < 100000: raise ValueError('subject ids must be 6-digit integers whose first digit is > 0') return s
[ "def", "to_subject_id", "(", "s", ")", ":", "if", "not", "pimms", ".", "is_number", "(", "s", ")", "and", "not", "pimms", ".", "is_str", "(", "s", ")", ":", "raise", "ValueError", "(", "'invalid type for subject id: %s'", "%", "str", "(", "type", "(", "s", ")", ")", ")", "if", "pimms", ".", "is_str", "(", "s", ")", ":", "try", ":", "s", "=", "os", ".", "path", ".", "expanduser", "(", "s", ")", "except", "Exception", ":", "pass", "if", "os", ".", "path", ".", "isdir", "(", "s", ")", ":", "s", "=", "s", ".", "split", "(", "os", ".", "sep", ")", "[", "-", "1", "]", "s", "=", "int", "(", "s", ")", "if", "s", ">", "999999", "or", "s", "<", "100000", ":", "raise", "ValueError", "(", "'subject ids must be 6-digit integers whose first digit is > 0'", ")", "return", "s" ]
python
to_subject_id(s) coerces the given string or number into an integer subject id. If s is not a valid subejct id, raises an exception.
false
2,552,875
def compile(self): """ Compile SQL and return 3-tuple ``(sql, params, keys)``. Example usage:: (sql, params, keys) = sc.compile() for row in cursor.execute(sql, params): record = dict(zip(keys, row)) """ params = self.column_params + self.join_params + self.params if self.limit and self.limit >= 0: self.sql_limit = 'LIMIT ?' params += [self.limit] return (self.sql, params, self.keys)
[ "def", "compile", "(", "self", ")", ":", "params", "=", "self", ".", "column_params", "+", "self", ".", "join_params", "+", "self", ".", "params", "if", "self", ".", "limit", "and", "self", ".", "limit", ">=", "0", ":", "self", ".", "sql_limit", "=", "'LIMIT ?'", "params", "+=", "[", "self", ".", "limit", "]", "return", "(", "self", ".", "sql", ",", "params", ",", "self", ".", "keys", ")" ]
python
Compile SQL and return 3-tuple ``(sql, params, keys)``. Example usage:: (sql, params, keys) = sc.compile() for row in cursor.execute(sql, params): record = dict(zip(keys, row))
false
2,516,257
def urlretrieve(self, url, filename=None, method='GET', body=None, dir=None, **kwargs): """ Save result of a request to a file, similarly to :func:`urllib.urlretrieve`. If an error is encountered may raise any of the scrapelib `exceptions`_. A filename may be provided or :meth:`urlretrieve` will safely create a temporary file. If a directory is provided, a file will be given a random name within the specified directory. Either way, it is the responsibility of the caller to ensure that the temporary file is deleted when it is no longer needed. :param url: URL for request :param filename: optional name for file :param method: any valid HTTP method, but generally GET or POST :param body: optional body for request, to turn parameters into an appropriate string use :func:`urllib.urlencode()` :param dir: optional directory to place file in :returns filename, response: tuple with filename for saved response (will be same as given filename if one was given, otherwise will be a temp file in the OS temp directory) and a :class:`Response` object that can be used to inspect the response headers. """ result = self.request(method, url, data=body, **kwargs) result.code = result.status_code # backwards compat if not filename: fd, filename = tempfile.mkstemp(dir=dir) f = os.fdopen(fd, 'wb') else: f = open(filename, 'wb') f.write(result.content) f.close() return filename, result
[ "def", "urlretrieve", "(", "self", ",", "url", ",", "filename", "=", "None", ",", "method", "=", "'GET'", ",", "body", "=", "None", ",", "dir", "=", "None", ",", "**", "kwargs", ")", ":", "result", "=", "self", ".", "request", "(", "method", ",", "url", ",", "data", "=", "body", ",", "**", "kwargs", ")", "result", ".", "code", "=", "result", ".", "status_code", "if", "not", "filename", ":", "fd", ",", "filename", "=", "tempfile", ".", "mkstemp", "(", "dir", "=", "dir", ")", "f", "=", "os", ".", "fdopen", "(", "fd", ",", "'wb'", ")", "else", ":", "f", "=", "open", "(", "filename", ",", "'wb'", ")", "f", ".", "write", "(", "result", ".", "content", ")", "f", ".", "close", "(", ")", "return", "filename", ",", "result" ]
python
Save result of a request to a file, similarly to :func:`urllib.urlretrieve`. If an error is encountered may raise any of the scrapelib `exceptions`_. A filename may be provided or :meth:`urlretrieve` will safely create a temporary file. If a directory is provided, a file will be given a random name within the specified directory. Either way, it is the responsibility of the caller to ensure that the temporary file is deleted when it is no longer needed. :param url: URL for request :param filename: optional name for file :param method: any valid HTTP method, but generally GET or POST :param body: optional body for request, to turn parameters into an appropriate string use :func:`urllib.urlencode()` :param dir: optional directory to place file in :returns filename, response: tuple with filename for saved response (will be same as given filename if one was given, otherwise will be a temp file in the OS temp directory) and a :class:`Response` object that can be used to inspect the response headers.
false
1,962,624
def discover(scope, loglevel, capture): "Discover systems using WS-Discovery" if loglevel: level = getattr(logging, loglevel, None) if not level: print("Invalid log level '%s'" % loglevel) return logger.setLevel(level) run(scope=scope, capture=capture)
[ "def", "discover", "(", "scope", ",", "loglevel", ",", "capture", ")", ":", "if", "loglevel", ":", "level", "=", "getattr", "(", "logging", ",", "loglevel", ",", "None", ")", "if", "not", "level", ":", "print", "(", "\"Invalid log level '%s'\"", "%", "loglevel", ")", "return", "logger", ".", "setLevel", "(", "level", ")", "run", "(", "scope", "=", "scope", ",", "capture", "=", "capture", ")" ]
python
Discover systems using WS-Discovery
false
1,733,473
def start(self, on_done): """ Starts the genesis block creation process. Will call the given `on_done` callback on successful completion. Args: on_done (function): a function called on completion Raises: InvalidGenesisStateError: raises this error if a genesis block is unable to be produced, or the resulting block-chain-id saved. """ genesis_file = os.path.join(self._data_dir, 'genesis.batch') try: with open(genesis_file, 'rb') as batch_file: genesis_data = genesis_pb2.GenesisData() genesis_data.ParseFromString(batch_file.read()) LOGGER.info('Producing genesis block from %s', genesis_file) except IOError: raise InvalidGenesisStateError( "Genesis File {} specified, but unreadable".format( genesis_file)) initial_state_root = self._context_manager.get_first_root() genesis_batches = [batch for batch in genesis_data.batches] if genesis_batches: scheduler = SerialScheduler( self._context_manager.get_squash_handler(), initial_state_root, always_persist=True) LOGGER.debug('Adding %s batches', len(genesis_data.batches)) for batch in genesis_data.batches: scheduler.add_batch(batch) self._transaction_executor.execute(scheduler) scheduler.finalize() scheduler.complete(block=True) txn_receipts = [] state_hash = initial_state_root for batch in genesis_batches: result = scheduler.get_batch_execution_result( batch.header_signature) if result is None or not result.is_valid: raise InvalidGenesisStateError( 'Unable to create genesis block, due to batch {}' .format(batch.header_signature)) if result.state_hash is not None: state_hash = result.state_hash txn_results = scheduler.get_transaction_execution_results( batch.header_signature) txn_receipts += self._make_receipts(txn_results) settings_view = SettingsView( self._state_view_factory.create_view(state_hash)) name = settings_view.get_setting('sawtooth.consensus.algorithm.name') version = settings_view.get_setting( 'sawtooth.consensus.algorithm.version') if name is None or version is None: raise LocalConfigurationError( 'Unable to start validator; sawtooth.consensus.algorithm.name ' 'and sawtooth.consensus.algorithm.version must be set in the ' 'genesis block.') LOGGER.debug('Produced state hash %s for genesis block.', state_hash) block_builder = self._generate_genesis_block() block_builder.add_batches(genesis_batches) block_builder.set_state_hash(state_hash) block_publisher = self._get_block_publisher(initial_state_root) if not block_publisher.initialize_block(block_builder.block_header): LOGGER.error('Consensus refused to initialize consensus block.') raise InvalidGenesisConsensusError( 'Consensus refused to initialize genesis block.') if not block_publisher.finalize_block(block_builder.block_header): LOGGER.error('Consensus refused to finalize genesis block.') raise InvalidGenesisConsensusError( 'Consensus refused to finalize genesis block.') self._sign_block(block_builder) block = block_builder.build_block() blkw = BlockWrapper(block=block) LOGGER.info('Genesis block created: %s', blkw) self._block_manager.put([blkw.block]) self._block_manager.persist(blkw.identifier, "commit_store") self._txn_receipt_store.chain_update(block, txn_receipts) self._chain_id_manager.save_block_chain_id(block.header_signature) LOGGER.debug('Deleting genesis data.') os.remove(genesis_file) if on_done is not None: on_done()
[ "def", "start", "(", "self", ",", "on_done", ")", ":", "genesis_file", "=", "os", ".", "path", ".", "join", "(", "self", ".", "_data_dir", ",", "'genesis.batch'", ")", "try", ":", "with", "open", "(", "genesis_file", ",", "'rb'", ")", "as", "batch_file", ":", "genesis_data", "=", "genesis_pb2", ".", "GenesisData", "(", ")", "genesis_data", ".", "ParseFromString", "(", "batch_file", ".", "read", "(", ")", ")", "LOGGER", ".", "info", "(", "'Producing genesis block from %s'", ",", "genesis_file", ")", "except", "IOError", ":", "raise", "InvalidGenesisStateError", "(", "\"Genesis File {} specified, but unreadable\"", ".", "format", "(", "genesis_file", ")", ")", "initial_state_root", "=", "self", ".", "_context_manager", ".", "get_first_root", "(", ")", "genesis_batches", "=", "[", "batch", "for", "batch", "in", "genesis_data", ".", "batches", "]", "if", "genesis_batches", ":", "scheduler", "=", "SerialScheduler", "(", "self", ".", "_context_manager", ".", "get_squash_handler", "(", ")", ",", "initial_state_root", ",", "always_persist", "=", "True", ")", "LOGGER", ".", "debug", "(", "'Adding %s batches'", ",", "len", "(", "genesis_data", ".", "batches", ")", ")", "for", "batch", "in", "genesis_data", ".", "batches", ":", "scheduler", ".", "add_batch", "(", "batch", ")", "self", ".", "_transaction_executor", ".", "execute", "(", "scheduler", ")", "scheduler", ".", "finalize", "(", ")", "scheduler", ".", "complete", "(", "block", "=", "True", ")", "txn_receipts", "=", "[", "]", "state_hash", "=", "initial_state_root", "for", "batch", "in", "genesis_batches", ":", "result", "=", "scheduler", ".", "get_batch_execution_result", "(", "batch", ".", "header_signature", ")", "if", "result", "is", "None", "or", "not", "result", ".", "is_valid", ":", "raise", "InvalidGenesisStateError", "(", "'Unable to create genesis block, due to batch {}'", ".", "format", "(", "batch", ".", "header_signature", ")", ")", "if", "result", ".", "state_hash", "is", "not", "None", ":", "state_hash", "=", "result", ".", "state_hash", "txn_results", "=", "scheduler", ".", "get_transaction_execution_results", "(", "batch", ".", "header_signature", ")", "txn_receipts", "+=", "self", ".", "_make_receipts", "(", "txn_results", ")", "settings_view", "=", "SettingsView", "(", "self", ".", "_state_view_factory", ".", "create_view", "(", "state_hash", ")", ")", "name", "=", "settings_view", ".", "get_setting", "(", "'sawtooth.consensus.algorithm.name'", ")", "version", "=", "settings_view", ".", "get_setting", "(", "'sawtooth.consensus.algorithm.version'", ")", "if", "name", "is", "None", "or", "version", "is", "None", ":", "raise", "LocalConfigurationError", "(", "'Unable to start validator; sawtooth.consensus.algorithm.name '", "'and sawtooth.consensus.algorithm.version must be set in the '", "'genesis block.'", ")", "LOGGER", ".", "debug", "(", "'Produced state hash %s for genesis block.'", ",", "state_hash", ")", "block_builder", "=", "self", ".", "_generate_genesis_block", "(", ")", "block_builder", ".", "add_batches", "(", "genesis_batches", ")", "block_builder", ".", "set_state_hash", "(", "state_hash", ")", "block_publisher", "=", "self", ".", "_get_block_publisher", "(", "initial_state_root", ")", "if", "not", "block_publisher", ".", "initialize_block", "(", "block_builder", ".", "block_header", ")", ":", "LOGGER", ".", "error", "(", "'Consensus refused to initialize consensus block.'", ")", "raise", "InvalidGenesisConsensusError", "(", "'Consensus refused to initialize genesis block.'", ")", "if", "not", "block_publisher", ".", "finalize_block", "(", "block_builder", ".", "block_header", ")", ":", "LOGGER", ".", "error", "(", "'Consensus refused to finalize genesis block.'", ")", "raise", "InvalidGenesisConsensusError", "(", "'Consensus refused to finalize genesis block.'", ")", "self", ".", "_sign_block", "(", "block_builder", ")", "block", "=", "block_builder", ".", "build_block", "(", ")", "blkw", "=", "BlockWrapper", "(", "block", "=", "block", ")", "LOGGER", ".", "info", "(", "'Genesis block created: %s'", ",", "blkw", ")", "self", ".", "_block_manager", ".", "put", "(", "[", "blkw", ".", "block", "]", ")", "self", ".", "_block_manager", ".", "persist", "(", "blkw", ".", "identifier", ",", "\"commit_store\"", ")", "self", ".", "_txn_receipt_store", ".", "chain_update", "(", "block", ",", "txn_receipts", ")", "self", ".", "_chain_id_manager", ".", "save_block_chain_id", "(", "block", ".", "header_signature", ")", "LOGGER", ".", "debug", "(", "'Deleting genesis data.'", ")", "os", ".", "remove", "(", "genesis_file", ")", "if", "on_done", "is", "not", "None", ":", "on_done", "(", ")" ]
python
Starts the genesis block creation process. Will call the given `on_done` callback on successful completion. Args: on_done (function): a function called on completion Raises: InvalidGenesisStateError: raises this error if a genesis block is unable to be produced, or the resulting block-chain-id saved.
false
2,048,510
def fmt_partition(partition): """Format a |Bipartition|. The returned string looks like:: 0,1 ∅ ─── ✕ ─── 2 0,1 Args: partition (Bipartition): The partition in question. Returns: str: A human-readable string representation of the partition. """ if not partition: return '' parts = [fmt_part(part, partition.node_labels).split('\n') for part in partition] times = (' ', ' {} '.format(MULTIPLY), ' ') breaks = ('\n', '\n', '') # No newline at the end of string between = [times] * (len(parts) - 1) + [breaks] # Alternate [part, break, part, ..., end] elements = chain.from_iterable(zip(parts, between)) # Transform vertical stacks into horizontal lines return ''.join(chain.from_iterable(zip(*elements)))
[ "def", "fmt_partition", "(", "partition", ")", ":", "if", "not", "partition", ":", "return", "''", "parts", "=", "[", "fmt_part", "(", "part", ",", "partition", ".", "node_labels", ")", ".", "split", "(", "'\\n'", ")", "for", "part", "in", "partition", "]", "times", "=", "(", "' '", ",", "' {} '", ".", "format", "(", "MULTIPLY", ")", ",", "' '", ")", "breaks", "=", "(", "'\\n'", ",", "'\\n'", ",", "''", ")", "between", "=", "[", "times", "]", "*", "(", "len", "(", "parts", ")", "-", "1", ")", "+", "[", "breaks", "]", "elements", "=", "chain", ".", "from_iterable", "(", "zip", "(", "parts", ",", "between", ")", ")", "return", "''", ".", "join", "(", "chain", ".", "from_iterable", "(", "zip", "(", "*", "elements", ")", ")", ")" ]
python
Format a |Bipartition|. The returned string looks like:: 0,1 ∅ ─── ✕ ─── 2 0,1 Args: partition (Bipartition): The partition in question. Returns: str: A human-readable string representation of the partition.
false
1,888,046
def notify(title, message, access_token, device_iden=None, email=None, retcode=None): """ Required parameter: * ``access_token`` - Your Pushbullet access token, created at https://www.pushbullet.com/#settings/account Optional parameters: * ``device_iden`` - a device identifier, if omited, notification is sent to all devices * ``email`` - send notification to pushbullte user with the specified email or send an email if they aren't a pushullet user """ data = { 'type': 'note', 'title': title, 'body': message, } if device_iden is not None: data['device_iden'] = device_iden if email is not None: data['email'] = email headers = {'Access-Token': access_token, 'User-Agent': USER_AGENT} resp = requests.post( 'https://api.pushbullet.com/v2/pushes', data=data, headers=headers) resp.raise_for_status()
[ "def", "notify", "(", "title", ",", "message", ",", "access_token", ",", "device_iden", "=", "None", ",", "email", "=", "None", ",", "retcode", "=", "None", ")", ":", "data", "=", "{", "'type'", ":", "'note'", ",", "'title'", ":", "title", ",", "'body'", ":", "message", ",", "}", "if", "device_iden", "is", "not", "None", ":", "data", "[", "'device_iden'", "]", "=", "device_iden", "if", "email", "is", "not", "None", ":", "data", "[", "'email'", "]", "=", "email", "headers", "=", "{", "'Access-Token'", ":", "access_token", ",", "'User-Agent'", ":", "USER_AGENT", "}", "resp", "=", "requests", ".", "post", "(", "'https://api.pushbullet.com/v2/pushes'", ",", "data", "=", "data", ",", "headers", "=", "headers", ")", "resp", ".", "raise_for_status", "(", ")" ]
python
Required parameter: * ``access_token`` - Your Pushbullet access token, created at https://www.pushbullet.com/#settings/account Optional parameters: * ``device_iden`` - a device identifier, if omited, notification is sent to all devices * ``email`` - send notification to pushbullte user with the specified email or send an email if they aren't a pushullet user
false
2,033,773
def depends(func, *dependencies, **kw): """ Annotates a Parameterized method to express its dependencies. The specified dependencies can be either be Parameters of this class, or Parameters of subobjects (Parameterized objects that are values of this object's parameters). Dependencies can either be on Parameter values, or on other metadata about the Parameter. """ # python3 would allow kw-only args # (i.e. "func,*dependencies,watch=False" rather than **kw and the check below) watch = kw.pop("watch",False) assert len(kw)==0, "@depends accepts only 'watch' kw" # TODO: rename dinfo _dinfo = getattr(func, '_dinfo', {}) _dinfo.update({'dependencies': dependencies, 'watch': watch}) @wraps(func) def _depends(*args,**kw): return func(*args,**kw) # storing here risks it being tricky to find if other libraries # mess around with methods _depends._dinfo = _dinfo return _depends
[ "def", "depends", "(", "func", ",", "*", "dependencies", ",", "**", "kw", ")", ":", "watch", "=", "kw", ".", "pop", "(", "\"watch\"", ",", "False", ")", "assert", "len", "(", "kw", ")", "==", "0", ",", "\"@depends accepts only 'watch' kw\"", "_dinfo", "=", "getattr", "(", "func", ",", "'_dinfo'", ",", "{", "}", ")", "_dinfo", ".", "update", "(", "{", "'dependencies'", ":", "dependencies", ",", "'watch'", ":", "watch", "}", ")", "@", "wraps", "(", "func", ")", "def", "_depends", "(", "*", "args", ",", "**", "kw", ")", ":", "return", "func", "(", "*", "args", ",", "**", "kw", ")", "_depends", ".", "_dinfo", "=", "_dinfo", "return", "_depends" ]
python
Annotates a Parameterized method to express its dependencies. The specified dependencies can be either be Parameters of this class, or Parameters of subobjects (Parameterized objects that are values of this object's parameters). Dependencies can either be on Parameter values, or on other metadata about the Parameter.
false
2,012,032
def buttonUp(self, button=mouse.LEFT): """ Releases the specified mouse button. Use Mouse.LEFT, Mouse.MIDDLE, Mouse.RIGHT """ self._lock.acquire() mouse.release(button) self._lock.release()
[ "def", "buttonUp", "(", "self", ",", "button", "=", "mouse", ".", "LEFT", ")", ":", "self", ".", "_lock", ".", "acquire", "(", ")", "mouse", ".", "release", "(", "button", ")", "self", ".", "_lock", ".", "release", "(", ")" ]
python
Releases the specified mouse button. Use Mouse.LEFT, Mouse.MIDDLE, Mouse.RIGHT
false
2,384,997
def filter_trim(self, start=1, end=1, filt=True, samples=None, subset=None): """ Remove points from the start and end of filter regions. Parameters ---------- start, end : int The number of points to remove from the start and end of the specified filter. filt : valid filter string or bool Which filter to trim. If True, applies to currently active filters. """ if samples is not None: subset = self.make_subset(samples) samples = self._get_samples(subset) for s in samples: self.data[s].filter_trim(start, end, filt)
[ "def", "filter_trim", "(", "self", ",", "start", "=", "1", ",", "end", "=", "1", ",", "filt", "=", "True", ",", "samples", "=", "None", ",", "subset", "=", "None", ")", ":", "if", "samples", "is", "not", "None", ":", "subset", "=", "self", ".", "make_subset", "(", "samples", ")", "samples", "=", "self", ".", "_get_samples", "(", "subset", ")", "for", "s", "in", "samples", ":", "self", ".", "data", "[", "s", "]", ".", "filter_trim", "(", "start", ",", "end", ",", "filt", ")" ]
python
Remove points from the start and end of filter regions. Parameters ---------- start, end : int The number of points to remove from the start and end of the specified filter. filt : valid filter string or bool Which filter to trim. If True, applies to currently active filters.
false
1,736,574
def _apply_groups_to_backend(cls, obj, options, backend, clone): "Apply the groups to a single specified backend" obj_handle = obj if options is None: if clone: obj_handle = obj.map(lambda x: x.clone(id=None)) else: obj.map(lambda x: setattr(x, 'id', None)) elif clone: obj_handle = obj.map(lambda x: x.clone(id=x.id)) return StoreOptions.set_options(obj_handle, options, backend=backend)
[ "def", "_apply_groups_to_backend", "(", "cls", ",", "obj", ",", "options", ",", "backend", ",", "clone", ")", ":", "obj_handle", "=", "obj", "if", "options", "is", "None", ":", "if", "clone", ":", "obj_handle", "=", "obj", ".", "map", "(", "lambda", "x", ":", "x", ".", "clone", "(", "id", "=", "None", ")", ")", "else", ":", "obj", ".", "map", "(", "lambda", "x", ":", "setattr", "(", "x", ",", "'id'", ",", "None", ")", ")", "elif", "clone", ":", "obj_handle", "=", "obj", ".", "map", "(", "lambda", "x", ":", "x", ".", "clone", "(", "id", "=", "x", ".", "id", ")", ")", "return", "StoreOptions", ".", "set_options", "(", "obj_handle", ",", "options", ",", "backend", "=", "backend", ")" ]
python
Apply the groups to a single specified backend
false
1,912,586
def _begin_validation( session: UpdateSession, loop: asyncio.AbstractEventLoop, downloaded_update_path: str, robot_name: str) -> asyncio.futures.Future: """ Start the validation process. """ session.set_stage(Stages.VALIDATING) validation_future \ = asyncio.ensure_future(loop.run_in_executor( None, validate_update, downloaded_update_path, session.set_progress)) def validation_done(fut): exc = fut.exception() if exc: session.set_error(getattr(exc, 'short', str(type(exc))), str(exc)) else: rootfs_file, bootfs_file = fut.result() loop.call_soon_threadsafe(_begin_write, session, loop, rootfs_file, robot_name) validation_future.add_done_callback(validation_done) return validation_future
[ "def", "_begin_validation", "(", "session", ":", "UpdateSession", ",", "loop", ":", "asyncio", ".", "AbstractEventLoop", ",", "downloaded_update_path", ":", "str", ",", "robot_name", ":", "str", ")", "->", "asyncio", ".", "futures", ".", "Future", ":", "session", ".", "set_stage", "(", "Stages", ".", "VALIDATING", ")", "validation_future", "=", "asyncio", ".", "ensure_future", "(", "loop", ".", "run_in_executor", "(", "None", ",", "validate_update", ",", "downloaded_update_path", ",", "session", ".", "set_progress", ")", ")", "def", "validation_done", "(", "fut", ")", ":", "exc", "=", "fut", ".", "exception", "(", ")", "if", "exc", ":", "session", ".", "set_error", "(", "getattr", "(", "exc", ",", "'short'", ",", "str", "(", "type", "(", "exc", ")", ")", ")", ",", "str", "(", "exc", ")", ")", "else", ":", "rootfs_file", ",", "bootfs_file", "=", "fut", ".", "result", "(", ")", "loop", ".", "call_soon_threadsafe", "(", "_begin_write", ",", "session", ",", "loop", ",", "rootfs_file", ",", "robot_name", ")", "validation_future", ".", "add_done_callback", "(", "validation_done", ")", "return", "validation_future" ]
python
Start the validation process.
false
1,783,280
def bold(s, *, escape=True): r"""Make a string appear bold in LaTeX formatting. bold() wraps a given string in the LaTeX command \textbf{}. Args ---- s : str The string to be formatted. escape: bool If true the bold text will be escaped Returns ------- NoEscape The formatted string. Examples -------- >>> bold("hello") '\\textbf{hello}' >>> print(bold("hello")) \textbf{hello} """ if escape: s = escape_latex(s) return NoEscape(r'\textbf{' + s + '}')
[ "def", "bold", "(", "s", ",", "*", ",", "escape", "=", "True", ")", ":", "if", "escape", ":", "s", "=", "escape_latex", "(", "s", ")", "return", "NoEscape", "(", "r'\\textbf{'", "+", "s", "+", "'}'", ")" ]
python
r"""Make a string appear bold in LaTeX formatting. bold() wraps a given string in the LaTeX command \textbf{}. Args ---- s : str The string to be formatted. escape: bool If true the bold text will be escaped Returns ------- NoEscape The formatted string. Examples -------- >>> bold("hello") '\\textbf{hello}' >>> print(bold("hello")) \textbf{hello}
false
2,418,226
def find_button(browser, value): """ Find a button with the given value. Searches for the following different kinds of buttons: <input type="submit"> <input type="reset"> <input type="button"> <input type="image"> <button> <{a,p,div,span,...} role="button"> Returns: an :class:`ElementSelector` """ field_types = ( 'submit', 'reset', 'button-element', 'button', 'image', 'button-role', ) return reduce( operator.add, (find_field_with_value(browser, field_type, value) for field_type in field_types) )
[ "def", "find_button", "(", "browser", ",", "value", ")", ":", "field_types", "=", "(", "'submit'", ",", "'reset'", ",", "'button-element'", ",", "'button'", ",", "'image'", ",", "'button-role'", ",", ")", "return", "reduce", "(", "operator", ".", "add", ",", "(", "find_field_with_value", "(", "browser", ",", "field_type", ",", "value", ")", "for", "field_type", "in", "field_types", ")", ")" ]
python
Find a button with the given value. Searches for the following different kinds of buttons: <input type="submit"> <input type="reset"> <input type="button"> <input type="image"> <button> <{a,p,div,span,...} role="button"> Returns: an :class:`ElementSelector`
false
2,267,403
def filter(self, criteria: Q, offset: int = 0, limit: int = 10, order_by: list = ()): """Read the repository and return results as per the filer""" if criteria.children: items = list(self._filter(criteria, self.conn['data'][self.schema_name]).values()) else: items = list(self.conn['data'][self.schema_name].values()) # Sort the filtered results based on the order_by clause for o_key in order_by: reverse = False if o_key.startswith('-'): reverse = True o_key = o_key[1:] items = sorted(items, key=itemgetter(o_key), reverse=reverse) result = ResultSet( offset=offset, limit=limit, total=len(items), items=items[offset: offset + limit]) return result
[ "def", "filter", "(", "self", ",", "criteria", ":", "Q", ",", "offset", ":", "int", "=", "0", ",", "limit", ":", "int", "=", "10", ",", "order_by", ":", "list", "=", "(", ")", ")", ":", "if", "criteria", ".", "children", ":", "items", "=", "list", "(", "self", ".", "_filter", "(", "criteria", ",", "self", ".", "conn", "[", "'data'", "]", "[", "self", ".", "schema_name", "]", ")", ".", "values", "(", ")", ")", "else", ":", "items", "=", "list", "(", "self", ".", "conn", "[", "'data'", "]", "[", "self", ".", "schema_name", "]", ".", "values", "(", ")", ")", "for", "o_key", "in", "order_by", ":", "reverse", "=", "False", "if", "o_key", ".", "startswith", "(", "'-'", ")", ":", "reverse", "=", "True", "o_key", "=", "o_key", "[", "1", ":", "]", "items", "=", "sorted", "(", "items", ",", "key", "=", "itemgetter", "(", "o_key", ")", ",", "reverse", "=", "reverse", ")", "result", "=", "ResultSet", "(", "offset", "=", "offset", ",", "limit", "=", "limit", ",", "total", "=", "len", "(", "items", ")", ",", "items", "=", "items", "[", "offset", ":", "offset", "+", "limit", "]", ")", "return", "result" ]
python
Read the repository and return results as per the filer
false
1,964,760
def __set_variable_watch(self, tid, address, size, action): """ Used by L{watch_variable} and L{stalk_variable}. @type tid: int @param tid: Thread global ID. @type address: int @param address: Memory address of variable to watch. @type size: int @param size: Size of variable to watch. The only supported sizes are: byte (1), word (2), dword (4) and qword (8). @type action: function @param action: (Optional) Action callback function. See L{define_hardware_breakpoint} for more details. @rtype: L{HardwareBreakpoint} @return: Hardware breakpoint at the requested address. """ # TODO # We should merge the breakpoints instead of overwriting them. # We'll have the same problem as watch_buffer and we'll need to change # the API again. if size == 1: sizeFlag = self.BP_WATCH_BYTE elif size == 2: sizeFlag = self.BP_WATCH_WORD elif size == 4: sizeFlag = self.BP_WATCH_DWORD elif size == 8: sizeFlag = self.BP_WATCH_QWORD else: raise ValueError("Bad size for variable watch: %r" % size) if self.has_hardware_breakpoint(tid, address): warnings.warn( "Hardware breakpoint in thread %d at address %s was overwritten!" \ % (tid, HexDump.address(address, self.system.get_thread(tid).get_bits())), BreakpointWarning) bp = self.get_hardware_breakpoint(tid, address) if bp.get_trigger() != self.BP_BREAK_ON_ACCESS or \ bp.get_watch() != sizeFlag: self.erase_hardware_breakpoint(tid, address) self.define_hardware_breakpoint(tid, address, self.BP_BREAK_ON_ACCESS, sizeFlag, True, action) bp = self.get_hardware_breakpoint(tid, address) else: self.define_hardware_breakpoint(tid, address, self.BP_BREAK_ON_ACCESS, sizeFlag, True, action) bp = self.get_hardware_breakpoint(tid, address) return bp
[ "def", "__set_variable_watch", "(", "self", ",", "tid", ",", "address", ",", "size", ",", "action", ")", ":", "if", "size", "==", "1", ":", "sizeFlag", "=", "self", ".", "BP_WATCH_BYTE", "elif", "size", "==", "2", ":", "sizeFlag", "=", "self", ".", "BP_WATCH_WORD", "elif", "size", "==", "4", ":", "sizeFlag", "=", "self", ".", "BP_WATCH_DWORD", "elif", "size", "==", "8", ":", "sizeFlag", "=", "self", ".", "BP_WATCH_QWORD", "else", ":", "raise", "ValueError", "(", "\"Bad size for variable watch: %r\"", "%", "size", ")", "if", "self", ".", "has_hardware_breakpoint", "(", "tid", ",", "address", ")", ":", "warnings", ".", "warn", "(", "\"Hardware breakpoint in thread %d at address %s was overwritten!\"", "%", "(", "tid", ",", "HexDump", ".", "address", "(", "address", ",", "self", ".", "system", ".", "get_thread", "(", "tid", ")", ".", "get_bits", "(", ")", ")", ")", ",", "BreakpointWarning", ")", "bp", "=", "self", ".", "get_hardware_breakpoint", "(", "tid", ",", "address", ")", "if", "bp", ".", "get_trigger", "(", ")", "!=", "self", ".", "BP_BREAK_ON_ACCESS", "or", "bp", ".", "get_watch", "(", ")", "!=", "sizeFlag", ":", "self", ".", "erase_hardware_breakpoint", "(", "tid", ",", "address", ")", "self", ".", "define_hardware_breakpoint", "(", "tid", ",", "address", ",", "self", ".", "BP_BREAK_ON_ACCESS", ",", "sizeFlag", ",", "True", ",", "action", ")", "bp", "=", "self", ".", "get_hardware_breakpoint", "(", "tid", ",", "address", ")", "else", ":", "self", ".", "define_hardware_breakpoint", "(", "tid", ",", "address", ",", "self", ".", "BP_BREAK_ON_ACCESS", ",", "sizeFlag", ",", "True", ",", "action", ")", "bp", "=", "self", ".", "get_hardware_breakpoint", "(", "tid", ",", "address", ")", "return", "bp" ]
python
Used by L{watch_variable} and L{stalk_variable}. @type tid: int @param tid: Thread global ID. @type address: int @param address: Memory address of variable to watch. @type size: int @param size: Size of variable to watch. The only supported sizes are: byte (1), word (2), dword (4) and qword (8). @type action: function @param action: (Optional) Action callback function. See L{define_hardware_breakpoint} for more details. @rtype: L{HardwareBreakpoint} @return: Hardware breakpoint at the requested address.
false
1,604,340
def _process_server_headers( self, key: Union[str, bytes], headers: httputil.HTTPHeaders ) -> None: """Process the headers sent by the server to this client connection. 'key' is the websocket handshake challenge/response key. """ assert headers["Upgrade"].lower() == "websocket" assert headers["Connection"].lower() == "upgrade" accept = self.compute_accept_value(key) assert headers["Sec-Websocket-Accept"] == accept extensions = self._parse_extensions_header(headers) for ext in extensions: if ext[0] == "permessage-deflate" and self._compression_options is not None: self._create_compressors("client", ext[1]) else: raise ValueError("unsupported extension %r", ext) self.selected_subprotocol = headers.get("Sec-WebSocket-Protocol", None)
[ "def", "_process_server_headers", "(", "self", ",", "key", ":", "Union", "[", "str", ",", "bytes", "]", ",", "headers", ":", "httputil", ".", "HTTPHeaders", ")", "->", "None", ":", "assert", "headers", "[", "\"Upgrade\"", "]", ".", "lower", "(", ")", "==", "\"websocket\"", "assert", "headers", "[", "\"Connection\"", "]", ".", "lower", "(", ")", "==", "\"upgrade\"", "accept", "=", "self", ".", "compute_accept_value", "(", "key", ")", "assert", "headers", "[", "\"Sec-Websocket-Accept\"", "]", "==", "accept", "extensions", "=", "self", ".", "_parse_extensions_header", "(", "headers", ")", "for", "ext", "in", "extensions", ":", "if", "ext", "[", "0", "]", "==", "\"permessage-deflate\"", "and", "self", ".", "_compression_options", "is", "not", "None", ":", "self", ".", "_create_compressors", "(", "\"client\"", ",", "ext", "[", "1", "]", ")", "else", ":", "raise", "ValueError", "(", "\"unsupported extension %r\"", ",", "ext", ")", "self", ".", "selected_subprotocol", "=", "headers", ".", "get", "(", "\"Sec-WebSocket-Protocol\"", ",", "None", ")" ]
python
Process the headers sent by the server to this client connection. 'key' is the websocket handshake challenge/response key.
false
2,030,246
def vm_ip(cls, vm_id): """Return the first usable ip address for this vm. Returns a (version, ip) tuple.""" vm_info = cls.info(vm_id) for iface in vm_info['ifaces']: if iface['type'] == 'private': continue for ip in iface['ips']: return ip['version'], ip['ip']
[ "def", "vm_ip", "(", "cls", ",", "vm_id", ")", ":", "vm_info", "=", "cls", ".", "info", "(", "vm_id", ")", "for", "iface", "in", "vm_info", "[", "'ifaces'", "]", ":", "if", "iface", "[", "'type'", "]", "==", "'private'", ":", "continue", "for", "ip", "in", "iface", "[", "'ips'", "]", ":", "return", "ip", "[", "'version'", "]", ",", "ip", "[", "'ip'", "]" ]
python
Return the first usable ip address for this vm. Returns a (version, ip) tuple.
false
2,504,776
def handle_attached(sender, device): """ Handles attached events from USBDevice.start_detection(). """ # Create the device from the specified device arguments. dev = create_device(device) __devices[dev.id] = dev print('attached', dev.id)
[ "def", "handle_attached", "(", "sender", ",", "device", ")", ":", "dev", "=", "create_device", "(", "device", ")", "__devices", "[", "dev", ".", "id", "]", "=", "dev", "print", "(", "'attached'", ",", "dev", ".", "id", ")" ]
python
Handles attached events from USBDevice.start_detection().
false
1,931,700
def get_full_order_book(self, symbol): """Get a list of all bids and asks aggregated by price for a symbol. This call is generally used by professional traders because it uses more server resources and traffic, and Kucoin has strict access frequency control. https://docs.kucoin.com/#get-full-order-book-aggregated :param symbol: Name of symbol e.g. KCS-BTC :type symbol: string .. code:: python orders = client.get_order_book('KCS-BTC') :returns: ApiResponse .. code:: python { "sequence": "3262786978", "bids": [ ["6500.12", "0.45054140"], # [price size] ["6500.11", "0.45054140"] ], "asks": [ ["6500.16", "0.57753524"], ["6500.15", "0.57753524"] ] } :raises: KucoinResponseException, KucoinAPIException """ data = { 'symbol': symbol } return self._get('market/orderbook/level2', False, data=data)
[ "def", "get_full_order_book", "(", "self", ",", "symbol", ")", ":", "data", "=", "{", "'symbol'", ":", "symbol", "}", "return", "self", ".", "_get", "(", "'market/orderbook/level2'", ",", "False", ",", "data", "=", "data", ")" ]
python
Get a list of all bids and asks aggregated by price for a symbol. This call is generally used by professional traders because it uses more server resources and traffic, and Kucoin has strict access frequency control. https://docs.kucoin.com/#get-full-order-book-aggregated :param symbol: Name of symbol e.g. KCS-BTC :type symbol: string .. code:: python orders = client.get_order_book('KCS-BTC') :returns: ApiResponse .. code:: python { "sequence": "3262786978", "bids": [ ["6500.12", "0.45054140"], # [price size] ["6500.11", "0.45054140"] ], "asks": [ ["6500.16", "0.57753524"], ["6500.15", "0.57753524"] ] } :raises: KucoinResponseException, KucoinAPIException
false
2,387,024
def basic_cleanup(self): """ Test: >>> from common_cache import Cache >>> import time >>> cache = Cache(expire=1, cleanup_func=basic_cleanup, regularly_cleanup=False) >>> cache.put('a', value=0) >>> cache.put('b', value=1) >>> cache.put('c', value=2) >>> cache.put('d', value=3, expire=3) >>> cache.size() 4 >>> time.sleep(1) >>> cache.put('e', 4) >>> cache.get('a') == None True >>> cache.get('d') == None False """ next_expire = None keys_to_delete = [] if self.expire is not None and self.expire > 0: # cleanup invalid cache item until the meet valid cache item and record next expire time for k, item in self.cache_items.items(): if item.is_dead(): keys_to_delete.append(k) else: next_expire = item.remaining_survival_time() break # if direct delete will to cause an error: OrderedDict mutated during iteration # so use delay delete for k in keys_to_delete: self.cache_items.pop(k) # if reach the upper limit of capacity then will execute evict by eviction strategy while (len(self.cache_items) > self.capacity): evicted_keys = self.evict_func(cache_dict=self.cache_items, evict_number=self.evict_number) self.logger.debug('Evict operation is completed, count: %s, keys: %s' % (len(evicted_keys), evicted_keys)) return next_expire
[ "def", "basic_cleanup", "(", "self", ")", ":", "next_expire", "=", "None", "keys_to_delete", "=", "[", "]", "if", "self", ".", "expire", "is", "not", "None", "and", "self", ".", "expire", ">", "0", ":", "for", "k", ",", "item", "in", "self", ".", "cache_items", ".", "items", "(", ")", ":", "if", "item", ".", "is_dead", "(", ")", ":", "keys_to_delete", ".", "append", "(", "k", ")", "else", ":", "next_expire", "=", "item", ".", "remaining_survival_time", "(", ")", "break", "for", "k", "in", "keys_to_delete", ":", "self", ".", "cache_items", ".", "pop", "(", "k", ")", "while", "(", "len", "(", "self", ".", "cache_items", ")", ">", "self", ".", "capacity", ")", ":", "evicted_keys", "=", "self", ".", "evict_func", "(", "cache_dict", "=", "self", ".", "cache_items", ",", "evict_number", "=", "self", ".", "evict_number", ")", "self", ".", "logger", ".", "debug", "(", "'Evict operation is completed, count: %s, keys: %s'", "%", "(", "len", "(", "evicted_keys", ")", ",", "evicted_keys", ")", ")", "return", "next_expire" ]
python
Test: >>> from common_cache import Cache >>> import time >>> cache = Cache(expire=1, cleanup_func=basic_cleanup, regularly_cleanup=False) >>> cache.put('a', value=0) >>> cache.put('b', value=1) >>> cache.put('c', value=2) >>> cache.put('d', value=3, expire=3) >>> cache.size() 4 >>> time.sleep(1) >>> cache.put('e', 4) >>> cache.get('a') == None True >>> cache.get('d') == None False
false
1,900,003
def text_filter(regex_base, value): """ Helper method to regex replace images with captions in different markups """ regex = regex_base % { 're_cap': r'[a-zA-Z0-9\.\,:;/_ \(\)\-\!\?"]+', 're_img': r'[a-zA-Z0-9\.:/_\-\% ]+' } images = re.findall(regex, value) for i in images: image = i[1] if image.startswith(settings.MEDIA_URL): image = image[len(settings.MEDIA_URL):] im = get_thumbnail(image, str(sorl_settings.THUMBNAIL_FILTER_WIDTH)) value = value.replace(i[1], im.url) return value
[ "def", "text_filter", "(", "regex_base", ",", "value", ")", ":", "regex", "=", "regex_base", "%", "{", "'re_cap'", ":", "r'[a-zA-Z0-9\\.\\,:;/_ \\(\\)\\-\\!\\?\"]+'", ",", "'re_img'", ":", "r'[a-zA-Z0-9\\.:/_\\-\\% ]+'", "}", "images", "=", "re", ".", "findall", "(", "regex", ",", "value", ")", "for", "i", "in", "images", ":", "image", "=", "i", "[", "1", "]", "if", "image", ".", "startswith", "(", "settings", ".", "MEDIA_URL", ")", ":", "image", "=", "image", "[", "len", "(", "settings", ".", "MEDIA_URL", ")", ":", "]", "im", "=", "get_thumbnail", "(", "image", ",", "str", "(", "sorl_settings", ".", "THUMBNAIL_FILTER_WIDTH", ")", ")", "value", "=", "value", ".", "replace", "(", "i", "[", "1", "]", ",", "im", ".", "url", ")", "return", "value" ]
python
Helper method to regex replace images with captions in different markups
false
2,032,776
def stop(self): """ stop daemon """ try: with self.pidfile: self.log.error("failed to stop, missing pid file or not running") except pidfile.PidFileError: # this isn't exposed in pidfile :o with open(self.pidfile.filename) as fobj: pid = int(fobj.readline().rstrip()) if not pid: self.log.error("failed to read pid from file") self.log.info("killing %d", pid) os.kill(pid, signal.SIGTERM)
[ "def", "stop", "(", "self", ")", ":", "try", ":", "with", "self", ".", "pidfile", ":", "self", ".", "log", ".", "error", "(", "\"failed to stop, missing pid file or not running\"", ")", "except", "pidfile", ".", "PidFileError", ":", "with", "open", "(", "self", ".", "pidfile", ".", "filename", ")", "as", "fobj", ":", "pid", "=", "int", "(", "fobj", ".", "readline", "(", ")", ".", "rstrip", "(", ")", ")", "if", "not", "pid", ":", "self", ".", "log", ".", "error", "(", "\"failed to read pid from file\"", ")", "self", ".", "log", ".", "info", "(", "\"killing %d\"", ",", "pid", ")", "os", ".", "kill", "(", "pid", ",", "signal", ".", "SIGTERM", ")" ]
python
stop daemon
false
1,859,445
def __setitem__( self, key: Union[int, slice], value: Union[BitVec, int, List[Union[int, BitVec]]], ): """ :param key: :param value: """ if isinstance(key, slice): start, step, stop = key.start, key.step, key.stop if start is None: start = 0 if stop is None: raise IndexError("Invalid Memory Slice") if step is None: step = 1 assert type(value) == list for i in range(0, stop - start, step): self[start + i] = cast(List[Union[int, BitVec]], value)[i] else: if isinstance(value, int): assert 0 <= value <= 0xFF if isinstance(value, BitVec): assert value.size() == 8 self._memory[key] = cast(Union[int, BitVec], value)
[ "def", "__setitem__", "(", "self", ",", "key", ":", "Union", "[", "int", ",", "slice", "]", ",", "value", ":", "Union", "[", "BitVec", ",", "int", ",", "List", "[", "Union", "[", "int", ",", "BitVec", "]", "]", "]", ",", ")", ":", "if", "isinstance", "(", "key", ",", "slice", ")", ":", "start", ",", "step", ",", "stop", "=", "key", ".", "start", ",", "key", ".", "step", ",", "key", ".", "stop", "if", "start", "is", "None", ":", "start", "=", "0", "if", "stop", "is", "None", ":", "raise", "IndexError", "(", "\"Invalid Memory Slice\"", ")", "if", "step", "is", "None", ":", "step", "=", "1", "assert", "type", "(", "value", ")", "==", "list", "for", "i", "in", "range", "(", "0", ",", "stop", "-", "start", ",", "step", ")", ":", "self", "[", "start", "+", "i", "]", "=", "cast", "(", "List", "[", "Union", "[", "int", ",", "BitVec", "]", "]", ",", "value", ")", "[", "i", "]", "else", ":", "if", "isinstance", "(", "value", ",", "int", ")", ":", "assert", "0", "<=", "value", "<=", "0xFF", "if", "isinstance", "(", "value", ",", "BitVec", ")", ":", "assert", "value", ".", "size", "(", ")", "==", "8", "self", ".", "_memory", "[", "key", "]", "=", "cast", "(", "Union", "[", "int", ",", "BitVec", "]", ",", "value", ")" ]
python
:param key: :param value:
false
1,803,608
def validate(self): """Ensure that the Traverse block is valid.""" validate_edge_direction(self.direction) validate_safe_string(self.edge_name) if not isinstance(self.within_optional_scope, bool): raise TypeError(u'Expected bool within_optional_scope, got: {} ' u'{}'.format(type(self.within_optional_scope).__name__, self.within_optional_scope)) if not isinstance(self.depth, int): raise TypeError(u'Expected int depth, got: {} {}'.format( type(self.depth).__name__, self.depth)) if not (self.depth >= 1): raise ValueError(u'depth ({}) >= 1 does not hold!'.format(self.depth))
[ "def", "validate", "(", "self", ")", ":", "validate_edge_direction", "(", "self", ".", "direction", ")", "validate_safe_string", "(", "self", ".", "edge_name", ")", "if", "not", "isinstance", "(", "self", ".", "within_optional_scope", ",", "bool", ")", ":", "raise", "TypeError", "(", "u'Expected bool within_optional_scope, got: {} '", "u'{}'", ".", "format", "(", "type", "(", "self", ".", "within_optional_scope", ")", ".", "__name__", ",", "self", ".", "within_optional_scope", ")", ")", "if", "not", "isinstance", "(", "self", ".", "depth", ",", "int", ")", ":", "raise", "TypeError", "(", "u'Expected int depth, got: {} {}'", ".", "format", "(", "type", "(", "self", ".", "depth", ")", ".", "__name__", ",", "self", ".", "depth", ")", ")", "if", "not", "(", "self", ".", "depth", ">=", "1", ")", ":", "raise", "ValueError", "(", "u'depth ({}) >= 1 does not hold!'", ".", "format", "(", "self", ".", "depth", ")", ")" ]
python
Ensure that the Traverse block is valid.
false
2,630,831
def __Script_Editor_Output_plainTextEdit_refresh_ui(self): """ Updates the **Script_Editor_Output_plainTextEdit** Widget. """ memory_handler_stack_depth = len(self.__engine.logging_session_handler_stream.stream) if memory_handler_stack_depth != self.__memory_handler_stack_depth: for line in self.__engine.logging_session_handler_stream.stream[ self.__memory_handler_stack_depth:memory_handler_stack_depth]: self.Script_Editor_Output_plainTextEdit.moveCursor(QTextCursor.End) self.Script_Editor_Output_plainTextEdit.insertPlainText(line) self.__Script_Editor_Output_plainTextEdit_set_default_view_state() self.__memory_handler_stack_depth = memory_handler_stack_depth
[ "def", "__Script_Editor_Output_plainTextEdit_refresh_ui", "(", "self", ")", ":", "memory_handler_stack_depth", "=", "len", "(", "self", ".", "__engine", ".", "logging_session_handler_stream", ".", "stream", ")", "if", "memory_handler_stack_depth", "!=", "self", ".", "__memory_handler_stack_depth", ":", "for", "line", "in", "self", ".", "__engine", ".", "logging_session_handler_stream", ".", "stream", "[", "self", ".", "__memory_handler_stack_depth", ":", "memory_handler_stack_depth", "]", ":", "self", ".", "Script_Editor_Output_plainTextEdit", ".", "moveCursor", "(", "QTextCursor", ".", "End", ")", "self", ".", "Script_Editor_Output_plainTextEdit", ".", "insertPlainText", "(", "line", ")", "self", ".", "__Script_Editor_Output_plainTextEdit_set_default_view_state", "(", ")", "self", ".", "__memory_handler_stack_depth", "=", "memory_handler_stack_depth" ]
python
Updates the **Script_Editor_Output_plainTextEdit** Widget.
false
2,644,716
def pid(self): """The pid of this process. :raises: Will raise a ``Process.UnboundProcess`` exception if the process is not bound to a context. """ self._assert_bound() return PID(self._context.ip, self._context.port, self.name)
[ "def", "pid", "(", "self", ")", ":", "self", ".", "_assert_bound", "(", ")", "return", "PID", "(", "self", ".", "_context", ".", "ip", ",", "self", ".", "_context", ".", "port", ",", "self", ".", "name", ")" ]
python
The pid of this process. :raises: Will raise a ``Process.UnboundProcess`` exception if the process is not bound to a context.
false
2,262,457
def add_link(self, name, desc, layout, node_1, node_2): """ Add a link to a network. Links are what effectively define the network topology, by associating two already existing nodes. """ existing_link = get_session().query(Link).filter(Link.name==name, Link.network_id==self.id).first() if existing_link is not None: raise HydraError("A link with name %s is already in network %s"%(name, self.id)) l = Link() l.name = name l.description = desc l.layout = json.dumps(layout) if layout is not None else None l.node_a = node_1 l.node_b = node_2 get_session().add(l) self.links.append(l) return l
[ "def", "add_link", "(", "self", ",", "name", ",", "desc", ",", "layout", ",", "node_1", ",", "node_2", ")", ":", "existing_link", "=", "get_session", "(", ")", ".", "query", "(", "Link", ")", ".", "filter", "(", "Link", ".", "name", "==", "name", ",", "Link", ".", "network_id", "==", "self", ".", "id", ")", ".", "first", "(", ")", "if", "existing_link", "is", "not", "None", ":", "raise", "HydraError", "(", "\"A link with name %s is already in network %s\"", "%", "(", "name", ",", "self", ".", "id", ")", ")", "l", "=", "Link", "(", ")", "l", ".", "name", "=", "name", "l", ".", "description", "=", "desc", "l", ".", "layout", "=", "json", ".", "dumps", "(", "layout", ")", "if", "layout", "is", "not", "None", "else", "None", "l", ".", "node_a", "=", "node_1", "l", ".", "node_b", "=", "node_2", "get_session", "(", ")", ".", "add", "(", "l", ")", "self", ".", "links", ".", "append", "(", "l", ")", "return", "l" ]
python
Add a link to a network. Links are what effectively define the network topology, by associating two already existing nodes.
false
2,250,448
def select_station( candidates, coverage_range=None, min_fraction_coverage=0.9, distance_warnings=(50000, 200000), rank=1, ): """ Select a station from a list of candidates that meets given data quality criteria. Parameters ---------- candidates : :any:`pandas.DataFrame` A dataframe of the form given by :any:`eeweather.rank_stations` or :any:`eeweather.combine_ranked_stations`, specifically having at least an index with ``usaf_id`` values and the column ``distance_meters``. Returns ------- isd_station, warnings : tuple of (:any:`eeweather.ISDStation`, list of str) A qualified weather station. ``None`` if no station meets criteria. """ def _test_station(station): if coverage_range is None: return True, [] else: start_date, end_date = coverage_range try: tempC, warnings = eeweather.mockable.load_isd_hourly_temp_data( station, start_date, end_date ) except ISDDataNotAvailableError: return False, [] # reject # TODO(philngo): also need to incorporate within-day limits if len(tempC) > 0: fraction_coverage = tempC.notnull().sum() / float(len(tempC)) return (fraction_coverage > min_fraction_coverage), warnings else: return False, [] # reject def _station_warnings(station, distance_meters): return [ EEWeatherWarning( qualified_name="eeweather.exceeds_maximum_distance", description=( "Distance from target to weather station is greater" "than the specified km." ), data={ "distance_meters": distance_meters, "max_distance_meters": d, "rank": rank, }, ) for d in distance_warnings if distance_meters > d ] n_stations_passed = 0 for usaf_id, row in candidates.iterrows(): station = ISDStation(usaf_id) test_result, warnings = _test_station(station) if test_result: n_stations_passed += 1 if n_stations_passed == rank: if not warnings: warnings = [] warnings.extend(_station_warnings(station, row.distance_meters)) return station, warnings no_station_warning = EEWeatherWarning( qualified_name="eeweather.no_weather_station_selected", description=( "No weather station found with the specified rank and" " minimum fracitional coverage." ), data={"rank": rank, "min_fraction_coverage": min_fraction_coverage}, ) return None, [no_station_warning]
[ "def", "select_station", "(", "candidates", ",", "coverage_range", "=", "None", ",", "min_fraction_coverage", "=", "0.9", ",", "distance_warnings", "=", "(", "50000", ",", "200000", ")", ",", "rank", "=", "1", ",", ")", ":", "def", "_test_station", "(", "station", ")", ":", "if", "coverage_range", "is", "None", ":", "return", "True", ",", "[", "]", "else", ":", "start_date", ",", "end_date", "=", "coverage_range", "try", ":", "tempC", ",", "warnings", "=", "eeweather", ".", "mockable", ".", "load_isd_hourly_temp_data", "(", "station", ",", "start_date", ",", "end_date", ")", "except", "ISDDataNotAvailableError", ":", "return", "False", ",", "[", "]", "if", "len", "(", "tempC", ")", ">", "0", ":", "fraction_coverage", "=", "tempC", ".", "notnull", "(", ")", ".", "sum", "(", ")", "/", "float", "(", "len", "(", "tempC", ")", ")", "return", "(", "fraction_coverage", ">", "min_fraction_coverage", ")", ",", "warnings", "else", ":", "return", "False", ",", "[", "]", "def", "_station_warnings", "(", "station", ",", "distance_meters", ")", ":", "return", "[", "EEWeatherWarning", "(", "qualified_name", "=", "\"eeweather.exceeds_maximum_distance\"", ",", "description", "=", "(", "\"Distance from target to weather station is greater\"", "\"than the specified km.\"", ")", ",", "data", "=", "{", "\"distance_meters\"", ":", "distance_meters", ",", "\"max_distance_meters\"", ":", "d", ",", "\"rank\"", ":", "rank", ",", "}", ",", ")", "for", "d", "in", "distance_warnings", "if", "distance_meters", ">", "d", "]", "n_stations_passed", "=", "0", "for", "usaf_id", ",", "row", "in", "candidates", ".", "iterrows", "(", ")", ":", "station", "=", "ISDStation", "(", "usaf_id", ")", "test_result", ",", "warnings", "=", "_test_station", "(", "station", ")", "if", "test_result", ":", "n_stations_passed", "+=", "1", "if", "n_stations_passed", "==", "rank", ":", "if", "not", "warnings", ":", "warnings", "=", "[", "]", "warnings", ".", "extend", "(", "_station_warnings", "(", "station", ",", "row", ".", "distance_meters", ")", ")", "return", "station", ",", "warnings", "no_station_warning", "=", "EEWeatherWarning", "(", "qualified_name", "=", "\"eeweather.no_weather_station_selected\"", ",", "description", "=", "(", "\"No weather station found with the specified rank and\"", "\" minimum fracitional coverage.\"", ")", ",", "data", "=", "{", "\"rank\"", ":", "rank", ",", "\"min_fraction_coverage\"", ":", "min_fraction_coverage", "}", ",", ")", "return", "None", ",", "[", "no_station_warning", "]" ]
python
Select a station from a list of candidates that meets given data quality criteria. Parameters ---------- candidates : :any:`pandas.DataFrame` A dataframe of the form given by :any:`eeweather.rank_stations` or :any:`eeweather.combine_ranked_stations`, specifically having at least an index with ``usaf_id`` values and the column ``distance_meters``. Returns ------- isd_station, warnings : tuple of (:any:`eeweather.ISDStation`, list of str) A qualified weather station. ``None`` if no station meets criteria.
false
2,110,845
def __init__(self, hosts=None): # noqa: E501 """MetricDetailsResponse - a model defined in Swagger""" # noqa: E501 self._hosts = None self.discriminator = None if hosts is not None: self.hosts = hosts
[ "def", "__init__", "(", "self", ",", "hosts", "=", "None", ")", ":", "self", ".", "_hosts", "=", "None", "self", ".", "discriminator", "=", "None", "if", "hosts", "is", "not", "None", ":", "self", ".", "hosts", "=", "hosts" ]
python
MetricDetailsResponse - a model defined in Swagger
false