index
int64 0
731k
| package
stringlengths 2
98
⌀ | name
stringlengths 1
76
| docstring
stringlengths 0
281k
⌀ | code
stringlengths 4
1.07M
⌀ | signature
stringlengths 2
42.8k
⌀ |
---|---|---|---|---|---|
729,506 | tinytuya.OutletDevice | OutletDevice |
Represents a Tuya based Smart Plug or Switch.
| class OutletDevice(Device):
"""
Represents a Tuya based Smart Plug or Switch.
"""
def set_dimmer(self, percentage=None, value=None, dps_id=3, nowait=False):
"""Set dimmer value
Args:
percentage (int): percentage dim 0-100
value (int): direct value for switch 0-255
dps_id (int): DPS index for dimmer value
nowait (bool): True to send without waiting for response.
"""
if percentage is not None:
level = int(percentage * 255.0 / 100.0)
else:
level = value
if level == 0:
self.turn_off(nowait=nowait)
elif level is not None:
if level < 25:
level = 25
if level > 255:
level = 255
self.turn_on(nowait=nowait)
self.set_value(dps_id, level, nowait=nowait)
| (dev_id, address=None, local_key='', dev_type='default', connection_timeout=5, version=3.1, persist=False, cid=None, node_id=None, parent=None, connection_retry_limit=5, connection_retry_delay=5, port=6668) |
729,534 | tinytuya.OutletDevice | set_dimmer | Set dimmer value
Args:
percentage (int): percentage dim 0-100
value (int): direct value for switch 0-255
dps_id (int): DPS index for dimmer value
nowait (bool): True to send without waiting for response.
| def set_dimmer(self, percentage=None, value=None, dps_id=3, nowait=False):
"""Set dimmer value
Args:
percentage (int): percentage dim 0-100
value (int): direct value for switch 0-255
dps_id (int): DPS index for dimmer value
nowait (bool): True to send without waiting for response.
"""
if percentage is not None:
level = int(percentage * 255.0 / 100.0)
else:
level = value
if level == 0:
self.turn_off(nowait=nowait)
elif level is not None:
if level < 25:
level = 25
if level > 255:
level = 255
self.turn_on(nowait=nowait)
self.set_value(dps_id, level, nowait=nowait)
| (self, percentage=None, value=None, dps_id=3, nowait=False) |
729,553 | tinytuya.core | TuyaHeader | TuyaHeader(prefix, seqno, cmd, length, total_length) | from tinytuya.core import TuyaHeader
| (prefix, seqno, cmd, length, total_length) |
729,555 | namedtuple_TuyaHeader | __new__ | Create new instance of TuyaHeader(prefix, seqno, cmd, length, total_length) | from builtins import function
| (_cls, prefix, seqno, cmd, length, total_length) |
729,558 | collections | _replace | Return a new TuyaHeader object replacing specified fields with new values | def namedtuple(typename, field_names, *, rename=False, defaults=None, module=None):
"""Returns a new subclass of tuple with named fields.
>>> Point = namedtuple('Point', ['x', 'y'])
>>> Point.__doc__ # docstring for the new class
'Point(x, y)'
>>> p = Point(11, y=22) # instantiate with positional args or keywords
>>> p[0] + p[1] # indexable like a plain tuple
33
>>> x, y = p # unpack like a regular tuple
>>> x, y
(11, 22)
>>> p.x + p.y # fields also accessible by name
33
>>> d = p._asdict() # convert to a dictionary
>>> d['x']
11
>>> Point(**d) # convert from a dictionary
Point(x=11, y=22)
>>> p._replace(x=100) # _replace() is like str.replace() but targets named fields
Point(x=100, y=22)
"""
# Validate the field names. At the user's option, either generate an error
# message or automatically replace the field name with a valid name.
if isinstance(field_names, str):
field_names = field_names.replace(',', ' ').split()
field_names = list(map(str, field_names))
typename = _sys.intern(str(typename))
if rename:
seen = set()
for index, name in enumerate(field_names):
if (not name.isidentifier()
or _iskeyword(name)
or name.startswith('_')
or name in seen):
field_names[index] = f'_{index}'
seen.add(name)
for name in [typename] + field_names:
if type(name) is not str:
raise TypeError('Type names and field names must be strings')
if not name.isidentifier():
raise ValueError('Type names and field names must be valid '
f'identifiers: {name!r}')
if _iskeyword(name):
raise ValueError('Type names and field names cannot be a '
f'keyword: {name!r}')
seen = set()
for name in field_names:
if name.startswith('_') and not rename:
raise ValueError('Field names cannot start with an underscore: '
f'{name!r}')
if name in seen:
raise ValueError(f'Encountered duplicate field name: {name!r}')
seen.add(name)
field_defaults = {}
if defaults is not None:
defaults = tuple(defaults)
if len(defaults) > len(field_names):
raise TypeError('Got more default values than field names')
field_defaults = dict(reversed(list(zip(reversed(field_names),
reversed(defaults)))))
# Variables used in the methods and docstrings
field_names = tuple(map(_sys.intern, field_names))
num_fields = len(field_names)
arg_list = ', '.join(field_names)
if num_fields == 1:
arg_list += ','
repr_fmt = '(' + ', '.join(f'{name}=%r' for name in field_names) + ')'
tuple_new = tuple.__new__
_dict, _tuple, _len, _map, _zip = dict, tuple, len, map, zip
# Create all the named tuple methods to be added to the class namespace
namespace = {
'_tuple_new': tuple_new,
'__builtins__': {},
'__name__': f'namedtuple_{typename}',
}
code = f'lambda _cls, {arg_list}: _tuple_new(_cls, ({arg_list}))'
__new__ = eval(code, namespace)
__new__.__name__ = '__new__'
__new__.__doc__ = f'Create new instance of {typename}({arg_list})'
if defaults is not None:
__new__.__defaults__ = defaults
@classmethod
def _make(cls, iterable):
result = tuple_new(cls, iterable)
if _len(result) != num_fields:
raise TypeError(f'Expected {num_fields} arguments, got {len(result)}')
return result
_make.__func__.__doc__ = (f'Make a new {typename} object from a sequence '
'or iterable')
def _replace(self, /, **kwds):
result = self._make(_map(kwds.pop, field_names, self))
if kwds:
raise ValueError(f'Got unexpected field names: {list(kwds)!r}')
return result
_replace.__doc__ = (f'Return a new {typename} object replacing specified '
'fields with new values')
def __repr__(self):
'Return a nicely formatted representation string'
return self.__class__.__name__ + repr_fmt % self
def _asdict(self):
'Return a new dict which maps field names to their values.'
return _dict(_zip(self._fields, self))
def __getnewargs__(self):
'Return self as a plain tuple. Used by copy and pickle.'
return _tuple(self)
# Modify function metadata to help with introspection and debugging
for method in (
__new__,
_make.__func__,
_replace,
__repr__,
_asdict,
__getnewargs__,
):
method.__qualname__ = f'{typename}.{method.__name__}'
# Build-up the class namespace dictionary
# and use type() to build the result class
class_namespace = {
'__doc__': f'{typename}({arg_list})',
'__slots__': (),
'_fields': field_names,
'_field_defaults': field_defaults,
'__new__': __new__,
'_make': _make,
'_replace': _replace,
'__repr__': __repr__,
'_asdict': _asdict,
'__getnewargs__': __getnewargs__,
'__match_args__': field_names,
}
for index, name in enumerate(field_names):
doc = _sys.intern(f'Alias for field number {index}')
class_namespace[name] = _tuplegetter(index, doc)
result = type(typename, (tuple,), class_namespace)
# For pickling to work, the __module__ variable needs to be set to the frame
# where the named tuple is created. Bypass this step in environments where
# sys._getframe is not defined (Jython for example) or sys._getframe is not
# defined for arguments greater than 0 (IronPython), or where the user has
# specified a particular module.
if module is None:
try:
module = _sys._getframe(1).f_globals.get('__name__', '__main__')
except (AttributeError, ValueError):
pass
if module is not None:
result.__module__ = module
return result
| (self, /, **kwds) |
729,559 | tinytuya.core | TuyaMessage | TuyaMessage(seqno, cmd, retcode, payload, crc, crc_good, prefix, iv) | from tinytuya.core import TuyaMessage
| (seqno, cmd, retcode, payload, crc, crc_good=True, prefix=21930, iv=None) |
729,561 | namedtuple_TuyaMessage | __new__ | Create new instance of TuyaMessage(seqno, cmd, retcode, payload, crc, crc_good, prefix, iv) | from builtins import function
| (_cls, seqno, cmd, retcode, payload, crc, crc_good=True, prefix=21930, iv=None) |
729,564 | collections | _replace | Return a new TuyaMessage object replacing specified fields with new values | def namedtuple(typename, field_names, *, rename=False, defaults=None, module=None):
"""Returns a new subclass of tuple with named fields.
>>> Point = namedtuple('Point', ['x', 'y'])
>>> Point.__doc__ # docstring for the new class
'Point(x, y)'
>>> p = Point(11, y=22) # instantiate with positional args or keywords
>>> p[0] + p[1] # indexable like a plain tuple
33
>>> x, y = p # unpack like a regular tuple
>>> x, y
(11, 22)
>>> p.x + p.y # fields also accessible by name
33
>>> d = p._asdict() # convert to a dictionary
>>> d['x']
11
>>> Point(**d) # convert from a dictionary
Point(x=11, y=22)
>>> p._replace(x=100) # _replace() is like str.replace() but targets named fields
Point(x=100, y=22)
"""
# Validate the field names. At the user's option, either generate an error
# message or automatically replace the field name with a valid name.
if isinstance(field_names, str):
field_names = field_names.replace(',', ' ').split()
field_names = list(map(str, field_names))
typename = _sys.intern(str(typename))
if rename:
seen = set()
for index, name in enumerate(field_names):
if (not name.isidentifier()
or _iskeyword(name)
or name.startswith('_')
or name in seen):
field_names[index] = f'_{index}'
seen.add(name)
for name in [typename] + field_names:
if type(name) is not str:
raise TypeError('Type names and field names must be strings')
if not name.isidentifier():
raise ValueError('Type names and field names must be valid '
f'identifiers: {name!r}')
if _iskeyword(name):
raise ValueError('Type names and field names cannot be a '
f'keyword: {name!r}')
seen = set()
for name in field_names:
if name.startswith('_') and not rename:
raise ValueError('Field names cannot start with an underscore: '
f'{name!r}')
if name in seen:
raise ValueError(f'Encountered duplicate field name: {name!r}')
seen.add(name)
field_defaults = {}
if defaults is not None:
defaults = tuple(defaults)
if len(defaults) > len(field_names):
raise TypeError('Got more default values than field names')
field_defaults = dict(reversed(list(zip(reversed(field_names),
reversed(defaults)))))
# Variables used in the methods and docstrings
field_names = tuple(map(_sys.intern, field_names))
num_fields = len(field_names)
arg_list = ', '.join(field_names)
if num_fields == 1:
arg_list += ','
repr_fmt = '(' + ', '.join(f'{name}=%r' for name in field_names) + ')'
tuple_new = tuple.__new__
_dict, _tuple, _len, _map, _zip = dict, tuple, len, map, zip
# Create all the named tuple methods to be added to the class namespace
namespace = {
'_tuple_new': tuple_new,
'__builtins__': {},
'__name__': f'namedtuple_{typename}',
}
code = f'lambda _cls, {arg_list}: _tuple_new(_cls, ({arg_list}))'
__new__ = eval(code, namespace)
__new__.__name__ = '__new__'
__new__.__doc__ = f'Create new instance of {typename}({arg_list})'
if defaults is not None:
__new__.__defaults__ = defaults
@classmethod
def _make(cls, iterable):
result = tuple_new(cls, iterable)
if _len(result) != num_fields:
raise TypeError(f'Expected {num_fields} arguments, got {len(result)}')
return result
_make.__func__.__doc__ = (f'Make a new {typename} object from a sequence '
'or iterable')
def _replace(self, /, **kwds):
result = self._make(_map(kwds.pop, field_names, self))
if kwds:
raise ValueError(f'Got unexpected field names: {list(kwds)!r}')
return result
_replace.__doc__ = (f'Return a new {typename} object replacing specified '
'fields with new values')
def __repr__(self):
'Return a nicely formatted representation string'
return self.__class__.__name__ + repr_fmt % self
def _asdict(self):
'Return a new dict which maps field names to their values.'
return _dict(_zip(self._fields, self))
def __getnewargs__(self):
'Return self as a plain tuple. Used by copy and pickle.'
return _tuple(self)
# Modify function metadata to help with introspection and debugging
for method in (
__new__,
_make.__func__,
_replace,
__repr__,
_asdict,
__getnewargs__,
):
method.__qualname__ = f'{typename}.{method.__name__}'
# Build-up the class namespace dictionary
# and use type() to build the result class
class_namespace = {
'__doc__': f'{typename}({arg_list})',
'__slots__': (),
'_fields': field_names,
'_field_defaults': field_defaults,
'__new__': __new__,
'_make': _make,
'_replace': _replace,
'__repr__': __repr__,
'_asdict': _asdict,
'__getnewargs__': __getnewargs__,
'__match_args__': field_names,
}
for index, name in enumerate(field_names):
doc = _sys.intern(f'Alias for field number {index}')
class_namespace[name] = _tuplegetter(index, doc)
result = type(typename, (tuple,), class_namespace)
# For pickling to work, the __module__ variable needs to be set to the frame
# where the named tuple is created. Bypass this step in environments where
# sys._getframe is not defined (Jython for example) or sys._getframe is not
# defined for arguments greater than 0 (IronPython), or where the user has
# specified a particular module.
if module is None:
try:
module = _sys._getframe(1).f_globals.get('__name__', '__main__')
except (AttributeError, ValueError):
pass
if module is not None:
result.__module__ = module
return result
| (self, /, **kwds) |
729,565 | tinytuya.core | XenonDevice | null | class XenonDevice(object):
def __init__(
self, dev_id, address=None, local_key="", dev_type="default", connection_timeout=5, version=3.1, persist=False, cid=None, node_id=None, parent=None, connection_retry_limit=5, connection_retry_delay=5, port=TCPPORT # pylint: disable=W0621
):
"""
Represents a Tuya device.
Args:
dev_id (str): The device id.
address (str): The network address.
local_key (str, optional): The encryption key. Defaults to None.
cid (str: Optional sub device id. Default to None.
node_id (str: alias for cid)
parent (object: gateway device this device is a child of)
Attributes:
port (int): The port to connect to.
"""
self.id = dev_id
self.cid = cid if cid else node_id
self.address = address
self.auto_ip = False
self.dev_type = dev_type
self.dev_type_auto = self.dev_type == 'default'
self.last_dev_type = ''
self.connection_timeout = connection_timeout
self.retry = True
self.disabledetect = False # if True do not detect device22
self.port = port # default - do not expect caller to pass in
self.socket = None
self.socketPersistent = False if not persist else True # pylint: disable=R1719
self.socketNODELAY = True
self.socketRetryLimit = connection_retry_limit
self.socketRetryDelay = connection_retry_delay
self.version = 0
self.dps_to_request = {}
self.seqno = 1
self.sendWait = 0.01
self.dps_cache = {}
self.parent = parent
self.children = {}
self.received_wrong_cid_queue = []
self.local_nonce = b'0123456789abcdef' # not-so-random random key
self.remote_nonce = b''
self.payload_dict = None
if not local_key:
local_key = ""
# sub-devices do not need a local key, so only look it up if we are not a sub-device
if not parent:
devinfo = device_info( dev_id )
if devinfo and 'key' in devinfo and devinfo['key']:
local_key = devinfo['key']
self.local_key = local_key.encode("latin1")
self.real_local_key = self.local_key
self.cipher = None
if self.parent:
# if we are a child then we should have a cid/node_id but none were given - try and find it the same way we look up local keys
if not self.cid:
devinfo = device_info( dev_id )
if devinfo and 'node_id' in devinfo and devinfo['node_id']:
self.cid = devinfo['node_id']
if not self.cid:
# not fatal as the user could have set the device_id to the cid
# in that case dev_type should be 'zigbee' to set the proper fields in requests
log.debug( 'Child device but no cid/node_id given!' )
XenonDevice.set_version(self, self.parent.version)
self.parent._register_child(self)
elif (not address) or address == "Auto" or address == "0.0.0.0":
# try to determine IP address automatically
self.auto_ip = True
bcast_data = find_device(dev_id)
if bcast_data['ip'] is None:
log.debug("Unable to find device on network (specify IP address)")
raise Exception("Unable to find device on network (specify IP address)")
self.address = bcast_data['ip']
self.set_version(float(bcast_data['version']))
time.sleep(0.1)
elif version:
self.set_version(float(version))
else:
# make sure we call our set_version() and not a subclass since some of
# them (such as BulbDevice) make connections when called
XenonDevice.set_version(self, 3.1)
def __del__(self):
# In case we have a lingering socket connection, close it
try:
if self.socket:
# self.socket.shutdown(socket.SHUT_RDWR)
self.socket.close()
self.socket = None
except:
pass
def __repr__(self):
# FIXME can do better than this
if self.parent:
parent = self.parent.id
else:
parent = None
return ("%s( %r, address=%r, local_key=%r, dev_type=%r, connection_timeout=%r, version=%r, persist=%r, cid=%r, parent=%r, children=%r )" %
(self.__class__.__name__, self.id, self.address, self.real_local_key.decode(), self.dev_type, self.connection_timeout, self.version, self.socketPersistent, self.cid, parent, self.children))
def _get_socket(self, renew):
if renew and self.socket is not None:
# self.socket.shutdown(socket.SHUT_RDWR)
self.socket.close()
self.socket = None
if self.socket is None:
# Set up Socket
retries = 0
err = ERR_OFFLINE
while retries < self.socketRetryLimit:
if self.auto_ip and not self.address:
bcast_data = find_device(self.id)
if bcast_data['ip'] is None:
log.debug("Unable to find device on network (specify IP address)")
return ERR_OFFLINE
self.address = bcast_data['ip']
new_version = float(bcast_data['version'])
if new_version != self.version:
# this may trigger a network call which will call _get_socket() again
#self.set_version(new_version)
self.version = new_version
self.version_str = "v" + str(version)
self.version_bytes = str(version).encode('latin1')
self.version_header = self.version_bytes + PROTOCOL_3x_HEADER
self.payload_dict = None
if not self.address:
log.debug("No address for device!")
return ERR_OFFLINE
self.socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
if self.socketNODELAY:
self.socket.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
self.socket.settimeout(self.connection_timeout)
try:
retries = retries + 1
self.socket.connect((self.address, self.port))
if self.version >= 3.4:
# restart session key negotiation
if self._negotiate_session_key():
return True
else:
if self.socket:
self.socket.close()
self.socket = None
return ERR_KEY_OR_VER
else:
return True
except socket.timeout as e:
# unable to open socket
log.debug(
"socket unable to connect (timeout) - retry %d/%d",
retries, self.socketRetryLimit
)
err = ERR_OFFLINE
except Exception as e:
# unable to open socket
log.debug(
"socket unable to connect (exception) - retry %d/%d",
retries, self.socketRetryLimit, exc_info=True
)
err = ERR_CONNECT
if self.socket:
self.socket.close()
self.socket = None
if retries < self.socketRetryLimit:
time.sleep(self.socketRetryDelay)
if self.auto_ip:
self.address = None
# unable to get connection
return err
# existing socket active
return True
def _check_socket_close(self, force=False):
if (force or not self.socketPersistent) and self.socket:
self.socket.close()
self.socket = None
def _recv_all(self, length):
tries = 2
data = b''
while length > 0:
newdata = self.socket.recv(length)
if not newdata or len(newdata) == 0:
log.debug("_recv_all(): no data? %r", newdata)
# connection closed?
tries -= 1
if tries == 0:
raise DecodeError('No data received - connection closed?')
if self.sendWait is not None:
time.sleep(self.sendWait)
continue
data += newdata
length -= len(newdata)
tries = 2
return data
def _receive(self):
# make sure to use the parent's self.seqno and session key
if self.parent:
return self.parent._receive()
# message consists of header + retcode + [data] + crc (4 or 32) + footer
min_len_55AA = struct.calcsize(MESSAGE_HEADER_FMT_55AA) + 4 + 4 + len(SUFFIX_BIN)
# message consists of header + iv + retcode + [data] + crc (16) + footer
min_len_6699 = struct.calcsize(MESSAGE_HEADER_FMT_6699) + 12 + 4 + 16 + len(SUFFIX_BIN)
min_len = min_len_55AA if min_len_55AA < min_len_6699 else min_len_6699
prefix_len = len( PREFIX_55AA_BIN )
data = self._recv_all( min_len )
# search for the prefix. if not found, delete everything except
# the last (prefix_len - 1) bytes and recv more to replace it
prefix_offset_55AA = data.find( PREFIX_55AA_BIN )
prefix_offset_6699 = data.find( PREFIX_6699_BIN )
while prefix_offset_55AA != 0 and prefix_offset_6699 != 0:
log.debug('Message prefix not at the beginning of the received data!')
log.debug('Offset 55AA: %d, 6699: %d, Received data: %r', prefix_offset_55AA, prefix_offset_6699, data)
if prefix_offset_55AA < 0 and prefix_offset_6699 < 0:
data = data[1-prefix_len:]
else:
prefix_offset = prefix_offset_6699 if prefix_offset_55AA < 0 else prefix_offset_55AA
data = data[prefix_offset:]
data += self._recv_all( min_len - len(data) )
prefix_offset_55AA = data.find( PREFIX_55AA_BIN )
prefix_offset_6699 = data.find( PREFIX_6699_BIN )
header = parse_header(data)
remaining = header.total_length - len(data)
if remaining > 0:
data += self._recv_all( remaining )
log.debug("received data=%r", binascii.hexlify(data))
hmac_key = self.local_key if self.version >= 3.4 else None
no_retcode = False #None if self.version >= 3.5 else False
return unpack_message(data, header=header, hmac_key=hmac_key, no_retcode=no_retcode)
# similar to _send_receive() but never retries sending and does not decode the response
def _send_receive_quick(self, payload, recv_retries, from_child=None): # pylint: disable=W0613
if self.parent:
return self.parent._send_receive_quick(payload, recv_retries, from_child=self)
log.debug("sending payload quick")
if self._get_socket(False) is not True:
return None
enc_payload = self._encode_message(payload) if type(payload) == MessagePayload else payload
try:
self.socket.sendall(enc_payload)
except:
self._check_socket_close(True)
return None
if not recv_retries:
return True
while recv_retries:
try:
msg = self._receive()
except:
msg = None
if msg and len(msg.payload) != 0:
return msg
recv_retries -= 1
if recv_retries == 0:
log.debug("received null payload (%r) but out of recv retries, giving up", msg)
else:
log.debug("received null payload (%r), fetch new one - %s retries remaining", msg, recv_retries)
return False
def _send_receive(self, payload, minresponse=28, getresponse=True, decode_response=True, from_child=None):
"""
Send single buffer `payload` and receive a single buffer.
Args:
payload(bytes): Data to send. Set to 'None' to receive only.
minresponse(int): Minimum response size expected (default=28 bytes)
getresponse(bool): If True, wait for and return response.
"""
if self.parent:
return self.parent._send_receive(payload, minresponse, getresponse, decode_response, from_child=self)
if (not payload) and getresponse and self.received_wrong_cid_queue:
if (not self.children) or (not from_child):
r = self.received_wrong_cid_queue[0]
self.received_wrong_cid_queue = self.received_wrong_cid_queue[1:]
return r
found_rq = False
for rq in self.received_wrong_cid_queue:
if rq[0] == from_child:
found_rq = rq
break
if found_rq:
self.received_wrong_cid_queue.remove(found_rq)
return found_rq[1]
success = False
partial_success = False
retries = 0
recv_retries = 0
#max_recv_retries = 0 if not self.retry else 2 if self.socketRetryLimit > 2 else self.socketRetryLimit
max_recv_retries = 0 if not self.retry else self.socketRetryLimit
dev_type = self.dev_type
do_send = True
msg = None
while not success:
# open up socket if device is available
sock_result = self._get_socket(False)
if sock_result is not True:
# unable to get a socket - device likely offline
self._check_socket_close(True)
return error_json( sock_result if sock_result else ERR_OFFLINE )
# send request to device
try:
if payload is not None and do_send:
log.debug("sending payload")
enc_payload = self._encode_message(payload) if type(payload) == MessagePayload else payload
self.socket.sendall(enc_payload)
if self.sendWait is not None:
time.sleep(self.sendWait) # give device time to respond
if getresponse:
do_send = False
rmsg = self._receive()
# device may send null ack (28 byte) response before a full response
# consider it an ACK and do not retry the send even if we do not get a full response
if rmsg:
payload = None
partial_success = True
msg = rmsg
if (not msg or len(msg.payload) == 0) and recv_retries <= max_recv_retries:
log.debug("received null payload (%r), fetch new one - retry %s / %s", msg, recv_retries, max_recv_retries)
recv_retries += 1
if recv_retries > max_recv_retries:
success = True
else:
success = True
log.debug("received message=%r", msg)
else:
# legacy/default mode avoids persisting socket across commands
self._check_socket_close()
return None
except (KeyboardInterrupt, SystemExit) as err:
log.debug("Keyboard Interrupt - Exiting")
raise
except socket.timeout as err:
# a socket timeout occurred
if payload is None:
# Receive only mode - return None
self._check_socket_close()
return None
do_send = True
retries += 1
# toss old socket and get new one
self._check_socket_close(True)
log.debug(
"Timeout in _send_receive() - retry %s / %s",
retries, self.socketRetryLimit
)
# if we exceed the limit of retries then lets get out of here
if retries > self.socketRetryLimit:
log.debug(
"Exceeded tinytuya retry limit (%s)",
self.socketRetryLimit
)
# timeout reached - return error
return error_json(ERR_KEY_OR_VER)
# wait a bit before retrying
time.sleep(0.1)
except DecodeError as err:
log.debug("Error decoding received data - read retry %s/%s", recv_retries, max_recv_retries, exc_info=True)
recv_retries += 1
if recv_retries > max_recv_retries:
# we recieved at least 1 valid message with a null payload, so the send was successful
if partial_success:
self._check_socket_close()
return None
# no valid messages received
self._check_socket_close(True)
return error_json(ERR_PAYLOAD)
except Exception as err:
# likely network or connection error
do_send = True
retries += 1
# toss old socket and get new one
self._check_socket_close(True)
log.debug(
"Network connection error in _send_receive() - retry %s/%s",
retries, self.socketRetryLimit, exc_info=True
)
# if we exceed the limit of retries then lets get out of here
if retries > self.socketRetryLimit:
log.debug(
"Exceeded tinytuya retry limit (%s)",
self.socketRetryLimit
)
log.debug("Unable to connect to device ")
# timeout reached - return error
return error_json(ERR_CONNECT)
# wait a bit before retrying
time.sleep(0.1)
# except
# while
# could be None or have a null payload
if not decode_response:
# legacy/default mode avoids persisting socket across commands
self._check_socket_close()
return msg
return self._process_message( msg, dev_type, from_child, minresponse, decode_response )
def _process_message( self, msg, dev_type=None, from_child=None, minresponse=28, decode_response=True ):
# null packet, nothing to decode
if not msg or len(msg.payload) == 0:
log.debug("raw unpacked message = %r", msg)
# legacy/default mode avoids persisting socket across commands
self._check_socket_close()
return None
# option - decode Message with hard coded offsets
# result = self._decode_payload(data[20:-8])
# Unpack Message into TuyaMessage format
# and return payload decrypted
try:
# Data available: seqno cmd retcode payload crc
log.debug("raw unpacked message = %r", msg)
result = self._decode_payload(msg.payload)
if result is None:
log.debug("_decode_payload() failed!")
except:
log.debug("error unpacking or decoding tuya JSON payload", exc_info=True)
result = error_json(ERR_PAYLOAD)
# Did we detect a device22 device? Return ERR_DEVTYPE error.
if dev_type and dev_type != self.dev_type:
log.debug(
"Device22 detected and updated (%s -> %s) - Update payload and try again",
dev_type,
self.dev_type,
)
result = error_json(ERR_DEVTYPE)
found_child = False
if self.children:
found_cid = None
if result and 'cid' in result:
found_cid = result['cid']
elif result and 'data' in result and type(result['data']) == dict and 'cid' in result['data']:
found_cid = result['data']['cid']
if found_cid:
for c in self.children:
if self.children[c].cid == found_cid:
result['device'] = found_child = self.children[c]
break
if from_child and from_child is not True and from_child != found_child:
# async update from different CID, try again
log.debug( 'Recieved async update for wrong CID %s while looking for CID %s, trying again', found_cid, from_child.cid )
if self.socketPersistent:
# if persistent, save response until the next receive() call
# otherwise, trash it
if found_child:
result = found_child._process_response(result)
else:
result = self._process_response(result)
self.received_wrong_cid_queue.append( (found_child, result) )
# events should not be coming in so fast that we will never timeout a read, so don't worry about loops
return self._send_receive( None, minresponse, True, decode_response, from_child=from_child)
# legacy/default mode avoids persisting socket across commands
self._check_socket_close()
if found_child:
return found_child._process_response(result)
return self._process_response(result)
def _decode_payload(self, payload):
log.debug("decode payload=%r", payload)
cipher = AESCipher(self.local_key)
if self.version == 3.4:
# 3.4 devices encrypt the version header in addition to the payload
try:
log.debug("decrypting=%r", payload)
payload = cipher.decrypt(payload, False, decode_text=False)
except:
log.debug("incomplete payload=%r (len:%d)", payload, len(payload), exc_info=True)
return error_json(ERR_PAYLOAD)
log.debug("decrypted 3.x payload=%r", payload)
log.debug("payload type = %s", type(payload))
if payload.startswith(PROTOCOL_VERSION_BYTES_31):
# Received an encrypted payload
# Remove version header
payload = payload[len(PROTOCOL_VERSION_BYTES_31) :]
# Decrypt payload
# Remove 16-bytes of MD5 hexdigest of payload
payload = cipher.decrypt(payload[16:])
elif self.version >= 3.2: # 3.2 or 3.3 or 3.4 or 3.5
# Trim header for non-default device type
if payload.startswith( self.version_bytes ):
payload = payload[len(self.version_header) :]
log.debug("removing 3.x=%r", payload)
elif self.dev_type == "device22" and (len(payload) & 0x0F) != 0:
payload = payload[len(self.version_header) :]
log.debug("removing device22 3.x header=%r", payload)
if self.version < 3.4:
try:
log.debug("decrypting=%r", payload)
payload = cipher.decrypt(payload, False)
except:
log.debug("incomplete payload=%r (len:%d)", payload, len(payload), exc_info=True)
return error_json(ERR_PAYLOAD)
log.debug("decrypted 3.x payload=%r", payload)
# Try to detect if device22 found
log.debug("payload type = %s", type(payload))
if not isinstance(payload, str):
try:
payload = payload.decode()
except:
log.debug("payload was not string type and decoding failed")
return error_json(ERR_JSON, payload)
if not self.disabledetect and "data unvalid" in payload:
self.dev_type = "device22"
# set at least one DPS
self.dps_to_request = {"1": None}
log.debug(
"'data unvalid' error detected: switching to dev_type %r",
self.dev_type,
)
return None
elif not payload.startswith(b"{"):
log.debug("Unexpected payload=%r", payload)
return error_json(ERR_PAYLOAD, payload)
if not isinstance(payload, str):
payload = payload.decode()
log.debug("decoded results=%r", payload)
try:
json_payload = json.loads(payload)
except:
json_payload = error_json(ERR_JSON, payload)
# v3.4 stuffs it into {"data":{"dps":{"1":true}}, ...}
if "dps" not in json_payload and "data" in json_payload and "dps" in json_payload['data']:
json_payload['dps'] = json_payload['data']['dps']
return json_payload
def _process_response(self, response): # pylint: disable=R0201
"""
Override this function in a sub-class if you want to do some processing on the received data
"""
return response
def _negotiate_session_key(self):
rkey = self._send_receive_quick( self._negotiate_session_key_generate_step_1(), 2 )
step3 = self._negotiate_session_key_generate_step_3( rkey )
if not step3:
return False
self._send_receive_quick( step3, None )
self._negotiate_session_key_generate_finalize()
return True
def _negotiate_session_key_generate_step_1( self ):
self.local_nonce = b'0123456789abcdef' # not-so-random random key
self.remote_nonce = b''
self.local_key = self.real_local_key
return MessagePayload(SESS_KEY_NEG_START, self.local_nonce)
def _negotiate_session_key_generate_step_3( self, rkey ):
if not rkey or type(rkey) != TuyaMessage or len(rkey.payload) < 48:
# error
log.debug("session key negotiation failed on step 1")
return False
if rkey.cmd != SESS_KEY_NEG_RESP:
log.debug("session key negotiation step 2 returned wrong command: %d", rkey.cmd)
return False
payload = rkey.payload
if self.version == 3.4:
try:
log.debug("decrypting=%r", payload)
cipher = AESCipher(self.real_local_key)
payload = cipher.decrypt(payload, False, decode_text=False)
except:
log.debug("session key step 2 decrypt failed, payload=%r (len:%d)", payload, len(payload), exc_info=True)
return False
log.debug("decrypted session key negotiation step 2 payload=%r", payload)
log.debug("payload type = %s len = %d", type(payload), len(payload))
if len(payload) < 48:
log.debug("session key negotiation step 2 failed, too short response")
return False
self.remote_nonce = payload[:16]
hmac_check = hmac.new(self.local_key, self.local_nonce, sha256).digest()
if hmac_check != payload[16:48]:
log.debug("session key negotiation step 2 failed HMAC check! wanted=%r but got=%r", binascii.hexlify(hmac_check), binascii.hexlify(payload[16:48]))
return False
log.debug("session local nonce: %r remote nonce: %r", self.local_nonce, self.remote_nonce)
rkey_hmac = hmac.new(self.local_key, self.remote_nonce, sha256).digest()
return MessagePayload(SESS_KEY_NEG_FINISH, rkey_hmac)
def _negotiate_session_key_generate_finalize( self ):
if IS_PY2:
k = [ chr(ord(a)^ord(b)) for (a,b) in zip(self.local_nonce,self.remote_nonce) ]
self.local_key = ''.join(k)
else:
self.local_key = bytes( [ a^b for (a,b) in zip(self.local_nonce,self.remote_nonce) ] )
log.debug("Session nonce XOR'd: %r", self.local_key)
cipher = AESCipher(self.real_local_key)
if self.version == 3.4:
self.local_key = cipher.encrypt( self.local_key, False, pad=False )
else:
iv = self.local_nonce[:12]
log.debug("Session IV: %r", iv)
self.local_key = cipher.encrypt( self.local_key, use_base64=False, pad=False, iv=iv )[12:28]
log.debug("Session key negotiate success! session key: %r", self.local_key)
return True
# adds protocol header (if needed) and encrypts
def _encode_message( self, msg ):
# make sure to use the parent's self.seqno and session key
if self.parent:
return self.parent._encode_message( msg )
hmac_key = None
iv = None
payload = msg.payload
self.cipher = AESCipher(self.local_key)
if self.version >= 3.4:
hmac_key = self.local_key
if msg.cmd not in NO_PROTOCOL_HEADER_CMDS:
# add the 3.x header
payload = self.version_header + payload
log.debug('final payload: %r', payload)
if self.version >= 3.5:
iv = True
# seqno cmd retcode payload crc crc_good, prefix, iv
msg = TuyaMessage(self.seqno, msg.cmd, None, payload, 0, True, PREFIX_6699_VALUE, True)
self.seqno += 1 # increase message sequence number
data = pack_message(msg,hmac_key=self.local_key)
log.debug("payload encrypted=%r",binascii.hexlify(data))
return data
payload = self.cipher.encrypt(payload, False)
elif self.version >= 3.2:
# expect to connect and then disconnect to set new
payload = self.cipher.encrypt(payload, False)
if msg.cmd not in NO_PROTOCOL_HEADER_CMDS:
# add the 3.x header
payload = self.version_header + payload
elif msg.cmd == CONTROL:
# need to encrypt
payload = self.cipher.encrypt(payload)
preMd5String = (
b"data="
+ payload
+ b"||lpv="
+ PROTOCOL_VERSION_BYTES_31
+ b"||"
+ self.local_key
)
m = md5()
m.update(preMd5String)
hexdigest = m.hexdigest()
# some tuya libraries strip 8: to :24
payload = (
PROTOCOL_VERSION_BYTES_31
+ hexdigest[8:][:16].encode("latin1")
+ payload
)
self.cipher = None
msg = TuyaMessage(self.seqno, msg.cmd, 0, payload, 0, True, PREFIX_55AA_VALUE, False)
self.seqno += 1 # increase message sequence number
buffer = pack_message(msg,hmac_key=hmac_key)
log.debug("payload encrypted=%r",binascii.hexlify(buffer))
return buffer
def _register_child(self, child):
if child.id in self.children and child != self.children[child.id]:
log.debug('Replacing existing child %r!', child.id)
self.children[child.id] = child
# disable device22 detection as some gateways return "json obj data unvalid" when the gateway is polled without a cid
self.disabledetect = True
self.payload_dict = None
def receive(self):
"""
Poll device to read any payload in the buffer. Timeout results in None returned.
"""
return self._send_receive(None)
def send(self, payload):
"""
Send single buffer `payload`.
Args:
payload(bytes): Data to send.
"""
return self._send_receive(payload, 0, getresponse=False)
def status(self, nowait=False):
"""Return device status."""
query_type = DP_QUERY
log.debug("status() entry (dev_type is %s)", self.dev_type)
payload = self.generate_payload(query_type)
data = self._send_receive(payload, 0, getresponse=(not nowait))
log.debug("status() received data=%r", data)
# Error handling
if (not nowait) and data and "Err" in data:
if data["Err"] == str(ERR_DEVTYPE):
# Device22 detected and change - resend with new payload
log.debug("status() rebuilding payload for device22")
payload = self.generate_payload(query_type)
data = self._send_receive(payload)
elif data["Err"] == str(ERR_PAYLOAD):
log.debug("Status request returned an error, is version %r and local key %r correct?", self.version, self.local_key)
return data
def subdev_query( self, nowait=False ):
"""Query for a list of sub-devices and their status"""
# final payload should look like: {"data":{"cids":[]},"reqType":"subdev_online_stat_query"}
payload = self.generate_payload(LAN_EXT_STREAM, rawData={"cids":[]}, reqType='subdev_online_stat_query')
return self._send_receive(payload, 0, getresponse=(not nowait))
def detect_available_dps(self):
"""Return which datapoints are supported by the device."""
# device22 devices need a sort of bruteforce querying in order to detect the
# list of available dps experience shows that the dps available are usually
# in the ranges [1-25] and [100-110] need to split the bruteforcing in
# different steps due to request payload limitation (max. length = 255)
self.dps_cache = {}
ranges = [(2, 11), (11, 21), (21, 31), (100, 111)]
for dps_range in ranges:
# dps 1 must always be sent, otherwise it might fail in case no dps is found
# in the requested range
self.dps_to_request = {"1": None}
self.add_dps_to_request(range(*dps_range))
try:
data = self.status()
except Exception as ex:
log.exception("Failed to get status: %s", ex)
raise
if data is not None and "dps" in data:
for k in data["dps"]:
self.dps_cache[k] = None
if self.dev_type == "default":
self.dps_to_request = self.dps_cache
return self.dps_cache
log.debug("Detected dps: %s", self.dps_cache)
self.dps_to_request = self.dps_cache
return self.dps_cache
def add_dps_to_request(self, dp_indicies):
"""Add a datapoint (DP) to be included in requests."""
if isinstance(dp_indicies, int):
self.dps_to_request[str(dp_indicies)] = None
else:
self.dps_to_request.update({str(index): None for index in dp_indicies})
def set_version(self, version): # pylint: disable=W0621
self.version = version
self.version_str = "v" + str(version)
self.version_bytes = str(version).encode('latin1')
self.version_header = self.version_bytes + PROTOCOL_3x_HEADER
self.payload_dict = None
if version == 3.2: # 3.2 behaves like 3.3 with device22
self.dev_type="device22"
if self.dps_to_request == {}:
self.detect_available_dps()
def set_socketPersistent(self, persist):
self.socketPersistent = persist
if self.socket and not persist:
self.socket.close()
self.socket = None
def set_socketNODELAY(self, nodelay):
self.socketNODELAY = nodelay
if self.socket:
if nodelay:
self.socket.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
else:
self.socket.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 0)
def set_socketRetryLimit(self, limit):
self.socketRetryLimit = limit
def set_socketRetryDelay(self, delay):
self.socketRetryDelay = delay
def set_socketTimeout(self, s):
self.connection_timeout = s
if self.socket:
self.socket.settimeout(s)
def set_dpsUsed(self, dps_to_request):
self.dps_to_request = dps_to_request
def set_retry(self, retry):
self.retry = retry
def set_sendWait(self, s):
self.sendWait = s
def close(self):
self.__del__()
@staticmethod
def find(did):
"""
Mainly here for backwards compatibility.
Calling tinytuya.find_device() directly is recommended.
Parameters:
did = The specific Device ID you are looking for (returns only IP and Version)
Response:
(ip, version)
"""
bcast_data = find_device(dev_id=did)
return (bcast_data['ip'], bcast_data['version'])
def generate_payload(self, command, data=None, gwId=None, devId=None, uid=None, rawData=None, reqType=None):
"""
Generate the payload to send.
Args:
command(str): The type of command.
This is one of the entries from payload_dict
data(dict, optional): The data to send.
This is what will be passed via the 'dps' entry
gwId(str, optional): Will be used for gwId
devId(str, optional): Will be used for devId
uid(str, optional): Will be used for uid
"""
# dicts will get referenced instead of copied if we don't do this
def _deepcopy(dict1):
result = {}
for k in dict1:
if isinstance( dict1[k], dict ):
result[k] = _deepcopy( dict1[k] )
else:
result[k] = dict1[k]
return result
# dict2 will be merged into dict1
# as dict2 is payload_dict['...'] we only need to worry about copying 2 levels deep,
# the command id and "command"/"command_override" keys: i.e. dict2[CMD_ID]["command"]
def _merge_payload_dicts(dict1, dict2):
for cmd in dict2:
if cmd not in dict1:
# make a deep copy so we don't get a reference
dict1[cmd] = _deepcopy( dict2[cmd] )
else:
for var in dict2[cmd]:
if not isinstance( dict2[cmd][var], dict ):
# not a dict, safe to copy
dict1[cmd][var] = dict2[cmd][var]
else:
# make a deep copy so we don't get a reference
dict1[cmd][var] = _deepcopy( dict2[cmd][var] )
# start merging down to the final payload dict
# later merges overwrite earlier merges
# "default" - ("gateway" if gateway) - ("zigbee" if sub-device) - [version string] - ('gateway_'+[version string] if gateway) -
# 'zigbee_'+[version string] if sub-device - [dev_type if not "default"]
if not self.payload_dict or self.last_dev_type != self.dev_type:
self.payload_dict = {}
_merge_payload_dicts( self.payload_dict, payload_dict['default'] )
if self.children:
_merge_payload_dicts( self.payload_dict, payload_dict['gateway'] )
if self.cid:
_merge_payload_dicts( self.payload_dict, payload_dict['zigbee'] )
if self.version_str in payload_dict:
_merge_payload_dicts( self.payload_dict, payload_dict[self.version_str] )
if self.children and ('gateway_'+self.version_str) in payload_dict:
_merge_payload_dicts( self.payload_dict, payload_dict['gateway_'+self.version_str] )
if self.cid and ('zigbee_'+self.version_str) in payload_dict:
_merge_payload_dicts( self.payload_dict, payload_dict['zigbee_'+self.version_str] )
if self.dev_type != 'default':
_merge_payload_dicts( self.payload_dict, payload_dict[self.dev_type] )
log.debug( 'final payload_dict for %r (%r/%r): %r', self.id, self.version_str, self.dev_type, self.payload_dict )
# save it so we don't have to calculate this again unless something changes
self.last_dev_type = self.dev_type
json_data = command_override = None
if command in self.payload_dict:
if 'command' in self.payload_dict[command]:
json_data = self.payload_dict[command]['command']
if 'command_override' in self.payload_dict[command]:
command_override = self.payload_dict[command]['command_override']
if command_override is None:
command_override = command
if json_data is None:
# I have yet to see a device complain about included but unneeded attribs, but they *will*
# complain about missing attribs, so just include them all unless otherwise specified
json_data = {"gwId": "", "devId": "", "uid": "", "t": ""}
# make sure we don't modify payload_dict
json_data = json_data.copy()
if "gwId" in json_data:
if gwId is not None:
json_data["gwId"] = gwId
elif self.parent:
json_data["gwId"] = self.parent.id
else:
json_data["gwId"] = self.id
if "devId" in json_data:
if devId is not None:
json_data["devId"] = devId
else:
json_data["devId"] = self.id
if "uid" in json_data:
if uid is not None:
json_data["uid"] = uid
else:
json_data["uid"] = self.id
if self.cid:
json_data["cid"] = self.cid
if "data" in json_data:
json_data["data"]["cid"] = self.cid
json_data["data"]["ctype"] = 0
#elif "cid" in json_data:
# del json_data['cid']
if "t" in json_data:
if json_data['t'] == "int":
json_data["t"] = int(time.time())
else:
json_data["t"] = str(int(time.time()))
if rawData is not None and "data" in json_data:
json_data["data"] = rawData
elif data is not None:
if "dpId" in json_data:
json_data["dpId"] = data
elif "data" in json_data:
json_data["data"]["dps"] = data
else:
json_data["dps"] = data
elif self.dev_type == "device22" and command == DP_QUERY:
json_data["dps"] = self.dps_to_request
if reqType and "reqType" in json_data:
json_data["reqType"] = reqType
# Create byte buffer from hex data
if json_data == "":
payload = ""
else:
payload = json.dumps(json_data)
# if spaces are not removed device does not respond!
payload = payload.replace(" ", "")
payload = payload.encode("utf-8")
log.debug("building command %s payload=%r", command, payload)
# create Tuya message packet
return MessagePayload(command_override, payload)
| (dev_id, address=None, local_key='', dev_type='default', connection_timeout=5, version=3.1, persist=False, cid=None, node_id=None, parent=None, connection_retry_limit=5, connection_retry_delay=5, port=6668) |
729,602 | tinytuya.core | appenddevice | null | def appenddevice(newdevice, devices):
if newdevice["ip"] in devices:
return True
devices[newdevice["ip"]] = newdevice
return False
| (newdevice, devices) |
729,603 | tinytuya.core | assign_dp_mappings | Adds mappings to all the devices in the tuyadevices list
Parameters:
tuyadevices = list of devices
mappings = dict containing the mappings
Response:
Nothing, modifies tuyadevices in place
| def assign_dp_mappings( tuyadevices, mappings ):
""" Adds mappings to all the devices in the tuyadevices list
Parameters:
tuyadevices = list of devices
mappings = dict containing the mappings
Response:
Nothing, modifies tuyadevices in place
"""
if type(mappings) != dict:
raise ValueError( '\'mappings\' must be a dict' )
if (not mappings) or (not tuyadevices):
return None
for dev in tuyadevices:
try:
devid = dev['id']
productid = dev['product_id']
except:
# we need both the device id and the product id to download mappings!
log.debug( 'Cannot add DP mapping, no device id and/or product id: %r', dev )
continue
if productid in mappings:
dev['mapping'] = mappings[productid]
else:
log.debug( 'Device %s has no mapping!', devid )
dev['mapping'] = None
| (tuyadevices, mappings) |
729,605 | tinytuya.core | bin2hex | null | def bin2hex(x, pretty=False):
if pretty:
space = " "
else:
space = ""
if IS_PY2:
result = "".join("%02X%s" % (ord(y), space) for y in x)
else:
result = "".join("%02X%s" % (y, space) for y in x)
return result
| (x, pretty=False) |
729,608 | tinytuya.core | decrypt | null | def decrypt(msg, key):
return AESCipher( key ).decrypt( msg, use_base64=False, decode_text=True )
| (msg, key) |
729,609 | tinytuya.core | decrypt_udp | null | def decrypt_udp(msg):
try:
header = parse_header(msg)
except:
header = None
if not header:
return decrypt(msg, udpkey)
if header.prefix == PREFIX_55AA_VALUE:
payload = unpack_message(msg).payload
try:
if payload[:1] == b'{' and payload[-1:] == b'}':
return payload.decode()
except:
pass
return decrypt(payload, udpkey)
if header.prefix == PREFIX_6699_VALUE:
unpacked = unpack_message(msg, hmac_key=udpkey, no_retcode=None)
payload = unpacked.payload.decode()
# app sometimes has extra bytes at the end
while payload[-1] == chr(0):
payload = payload[:-1]
return payload
return decrypt(msg, udpkey)
| (msg) |
729,610 | tinytuya.core | deviceScan | Scans your network for Tuya devices and returns dictionary of devices discovered
devices = tinytuya.deviceScan(verbose)
Parameters:
verbose = True or False, print formatted output to stdout [Default: False]
maxretry = The number of loops to wait to pick up UDP from all devices
color = True or False, print output in color [Default: True]
poll = True or False, poll dps status for devices if possible
forcescan = True or False, force network scan for device IP addresses
Response:
devices = Dictionary of all devices found
To unpack data, you can do something like this:
devices = tinytuya.deviceScan()
for ip in devices:
id = devices[ip]['gwId']
key = devices[ip]['productKey']
vers = devices[ip]['version']
dps = devices[ip]['dps']
| def deviceScan(verbose=False, maxretry=None, color=True, poll=True, forcescan=False, byID=False):
"""Scans your network for Tuya devices and returns dictionary of devices discovered
devices = tinytuya.deviceScan(verbose)
Parameters:
verbose = True or False, print formatted output to stdout [Default: False]
maxretry = The number of loops to wait to pick up UDP from all devices
color = True or False, print output in color [Default: True]
poll = True or False, poll dps status for devices if possible
forcescan = True or False, force network scan for device IP addresses
Response:
devices = Dictionary of all devices found
To unpack data, you can do something like this:
devices = tinytuya.deviceScan()
for ip in devices:
id = devices[ip]['gwId']
key = devices[ip]['productKey']
vers = devices[ip]['version']
dps = devices[ip]['dps']
"""
from . import scanner
return scanner.devices(verbose=verbose, scantime=maxretry, color=color, poll=poll, forcescan=forcescan, byID=byID)
| (verbose=False, maxretry=None, color=True, poll=True, forcescan=False, byID=False) |
729,611 | tinytuya.core | device_info | Searches the devices.json file for devices with ID = dev_id
Parameters:
dev_id = The specific Device ID you are looking for
Response:
{dict} containing the the device info, or None if not found
| def device_info( dev_id ):
"""Searches the devices.json file for devices with ID = dev_id
Parameters:
dev_id = The specific Device ID you are looking for
Response:
{dict} containing the the device info, or None if not found
"""
devinfo = None
try:
# Load defaults
with open(DEVICEFILE, 'r') as f:
tuyadevices = json.load(f)
log.debug("loaded=%s [%d devices]", DEVICEFILE, len(tuyadevices))
for dev in tuyadevices:
if 'id' in dev and dev['id'] == dev_id:
log.debug("Device %r found in %s", dev_id, DEVICEFILE)
devinfo = dev
break
except:
# No DEVICEFILE
pass
return devinfo
| (dev_id) |
729,612 | tinytuya.core | encrypt | null | def encrypt(msg, key):
return AESCipher( key ).encrypt( msg, use_base64=False, pad=True )
| (msg, key) |
729,613 | tinytuya.core | error_json | Return error details in JSON | def error_json(number=None, payload=None):
"""Return error details in JSON"""
try:
spayload = json.dumps(payload)
# spayload = payload.replace('\"','').replace('\'','')
except:
spayload = '""'
vals = (error_codes[number], str(number), spayload)
log.debug("ERROR %s - %s - payload: %s", *vals)
return json.loads('{ "Error":"%s", "Err":"%s", "Payload":%s }' % vals)
| (number=None, payload=None) |
729,614 | tinytuya.core | find_device | Scans network for Tuya devices with either ID = dev_id or IP = address
Parameters:
dev_id = The specific Device ID you are looking for
address = The IP address you are tring to find the Device ID for
Response:
{'ip':<ip>, 'version':<version>, 'id':<id>, 'product_id':<product_id>, 'data':<broadcast data>}
| def find_device(dev_id=None, address=None):
"""Scans network for Tuya devices with either ID = dev_id or IP = address
Parameters:
dev_id = The specific Device ID you are looking for
address = The IP address you are tring to find the Device ID for
Response:
{'ip':<ip>, 'version':<version>, 'id':<id>, 'product_id':<product_id>, 'data':<broadcast data>}
"""
if dev_id is None and address is None:
return (None, None, None)
log.debug("Listening for device %s on the network", dev_id)
# Enable UDP listening broadcasting mode on UDP port 6666 - 3.1 Devices
client = socket.socket(socket.AF_INET, socket.SOCK_DGRAM, socket.IPPROTO_UDP)
client.setsockopt(socket.SOL_SOCKET, socket.SO_BROADCAST, 1)
try:
client.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEPORT, 1)
except AttributeError:
# SO_REUSEPORT not available
pass
client.bind(("", UDPPORT))
client.setblocking(False)
# Enable UDP listening broadcasting mode on encrypted UDP port 6667 - 3.3 Devices
clients = socket.socket(socket.AF_INET, socket.SOCK_DGRAM, socket.IPPROTO_UDP)
clients.setsockopt(socket.SOL_SOCKET, socket.SO_BROADCAST, 1)
try:
clients.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEPORT, 1)
except AttributeError:
# SO_REUSEPORT not available
pass
clients.bind(("", UDPPORTS))
clients.setblocking(False)
deadline = time.time() + SCANTIME
selecttime = SCANTIME
ret = None
while (ret is None) and (selecttime > 0):
rd, _, _ = select.select( [client, clients], [], [], selecttime )
for sock in rd:
try:
data, addr = sock.recvfrom(4048)
except:
# Timeout
continue
ip = addr[0]
gwId = version = "" # pylint: disable=W0621
result = data
try:
result = decrypt_udp(result)
result = json.loads(result)
ip = result["ip"]
gwId = result["gwId"]
version = result["version"]
product_id = '' if 'productKey' not in result else result['productKey']
log.debug( 'find() received broadcast from %r: %r', ip, result )
except:
result = {"ip": ip}
log.debug( 'find() failed to decode broadcast from %r: %r', addr, data )
continue
# Check to see if we are only looking for one device
if dev_id and gwId == dev_id:
# We found it by dev_id!
ret = {'ip':ip, 'version':version, 'id':gwId, 'product_id':product_id, 'data':result}
break
elif address and address == ip:
# We found it by ip!
ret = {'ip':ip, 'version':version, 'id':gwId, 'product_id':product_id, 'data':result}
break
selecttime = deadline - time.time()
# while
clients.close()
client.close()
if ret is None:
ret = {'ip':None, 'version':None, 'id':None, 'product_id':None, 'data':{}}
log.debug( 'find() is returning: %r', ret )
return ret
| (dev_id=None, address=None) |
729,615 | tinytuya.core | has_suffix | Check to see if payload has valid Tuya suffix | def has_suffix(payload):
"""Check to see if payload has valid Tuya suffix"""
if len(payload) < 4:
return False
log.debug("buffer %r = %r", payload[-4:], SUFFIX_BIN)
return payload[-4:] == SUFFIX_BIN
| (payload) |
729,616 | tinytuya.core | hex2bin | null | def hex2bin(x):
if IS_PY2:
return x.decode("hex")
else:
return bytes.fromhex(x)
| (x) |
729,622 | tinytuya.core | pack_message | Pack a TuyaMessage into bytes. | def pack_message(msg, hmac_key=None):
"""Pack a TuyaMessage into bytes."""
if msg.prefix == PREFIX_55AA_VALUE:
header_fmt = MESSAGE_HEADER_FMT_55AA
end_fmt = MESSAGE_END_FMT_HMAC if hmac_key else MESSAGE_END_FMT_55AA
msg_len = len(msg.payload) + struct.calcsize(end_fmt)
header_data = ( msg.prefix, msg.seqno, msg.cmd, msg_len )
elif msg.prefix == PREFIX_6699_VALUE:
if not hmac_key:
raise TypeError( 'key must be provided to pack 6699-format messages' )
header_fmt = MESSAGE_HEADER_FMT_6699
end_fmt = MESSAGE_END_FMT_6699
msg_len = len(msg.payload) + (struct.calcsize(end_fmt) - 4) + 12
if type(msg.retcode) == int:
msg_len += struct.calcsize(MESSAGE_RETCODE_FMT)
header_data = ( msg.prefix, 0, msg.seqno, msg.cmd, msg_len )
else:
raise ValueError( 'pack_message() cannot handle message format %08X' % msg.prefix )
# Create full message excluding CRC and suffix
data = struct.pack( header_fmt, *header_data )
if msg.prefix == PREFIX_6699_VALUE:
cipher = AESCipher( hmac_key )
if type(msg.retcode) == int:
raw = struct.pack( MESSAGE_RETCODE_FMT, msg.retcode ) + msg.payload
else:
raw = msg.payload
data2 = cipher.encrypt( raw, use_base64=False, pad=False, iv=True if not msg.iv else msg.iv, header=data[4:])
data += data2 + SUFFIX_6699_BIN
else:
data += msg.payload
if hmac_key:
crc = hmac.new(hmac_key, data, sha256).digest()
else:
crc = binascii.crc32(data) & 0xFFFFFFFF
# Calculate CRC, add it together with suffix
data += struct.pack( end_fmt, crc, SUFFIX_VALUE )
return data
| (msg, hmac_key=None) |
729,623 | tinytuya.core | pad | null | def pad(s):
return s + (16 - len(s) % 16) * chr(16 - len(s) % 16)
| (s) |
729,624 | tinytuya.core | parse_header | null | def parse_header(data):
if( data[:4] == PREFIX_6699_BIN ):
fmt = MESSAGE_HEADER_FMT_6699
else:
fmt = MESSAGE_HEADER_FMT_55AA
header_len = struct.calcsize(fmt)
if len(data) < header_len:
raise DecodeError('Not enough data to unpack header')
unpacked = struct.unpack( fmt, data[:header_len] )
prefix = unpacked[0]
if prefix == PREFIX_55AA_VALUE:
prefix, seqno, cmd, payload_len = unpacked
total_length = payload_len + header_len
elif prefix == PREFIX_6699_VALUE:
prefix, unknown, seqno, cmd, payload_len = unpacked
#seqno |= unknown << 32
total_length = payload_len + header_len + len(SUFFIX_6699_BIN)
else:
#log.debug('Header prefix wrong! %08X != %08X', prefix, PREFIX_VALUE)
raise DecodeError('Header prefix wrong! %08X is not %08X or %08X' % (prefix, PREFIX_55AA_VALUE, PREFIX_6699_VALUE))
# sanity check. currently the max payload length is somewhere around 300 bytes
if payload_len > 1000:
raise DecodeError('Header claims the packet size is over 1000 bytes! It is most likely corrupt. Claimed size: %d bytes. fmt:%s unpacked:%r' % (payload_len,fmt,unpacked))
return TuyaHeader(prefix, seqno, cmd, payload_len, total_length)
| (data) |
729,625 | tinytuya.core | scan | Scans your network for Tuya devices with output to stdout | def scan(maxretry=None, color=True, forcescan=False):
"""Scans your network for Tuya devices with output to stdout"""
from . import scanner
scanner.scan(scantime=maxretry, color=color, forcescan=forcescan)
| (maxretry=None, color=True, forcescan=False) |
729,627 | tinytuya.core | set_debug | Enable tinytuya verbose logging | def set_debug(toggle=True, color=True):
"""Enable tinytuya verbose logging"""
if toggle:
if color:
logging.basicConfig(
format="\x1b[31;1m%(levelname)s:%(message)s\x1b[0m", level=logging.DEBUG
)
else:
logging.basicConfig(format="%(levelname)s:%(message)s", level=logging.DEBUG)
log.setLevel(logging.DEBUG)
log.debug("TinyTuya [%s]\n", __version__)
log.debug("Python %s on %s", sys.version, sys.platform)
if AESCipher.CRYPTOLIB_HAS_GCM == False:
log.debug("Using %s %s for crypto", AESCipher.CRYPTOLIB, AESCipher.CRYPTOLIB_VER)
log.debug("Warning: Crypto library does not support AES-GCM, v3.5 devices will not work!")
else:
log.debug("Using %s %s for crypto, GCM is supported", AESCipher.CRYPTOLIB, AESCipher.CRYPTOLIB_VER)
else:
log.setLevel(logging.NOTSET)
| (toggle=True, color=True) |
729,631 | tinytuya.core | termcolor | null | def termcolor(color=True):
if color is False:
# Disable Terminal Color Formatting
bold = subbold = normal = dim = alert = alertdim = cyan = red = yellow = ""
else:
# Terminal Color Formatting
bold = "\033[0m\033[97m\033[1m"
subbold = "\033[0m\033[32m"
normal = "\033[97m\033[0m"
dim = "\033[0m\033[97m\033[2m"
alert = "\033[0m\033[91m\033[1m"
alertdim = "\033[0m\033[91m\033[2m"
cyan = "\033[0m\033[36m"
red = "\033[0m\033[31m"
yellow = "\033[0m\033[33m"
return bold,subbold,normal,dim,alert,alertdim,cyan,red,yellow
| (color=True) |
729,633 | tinytuya.core | unpack_message | Unpack bytes into a TuyaMessage. | def unpack_message(data, hmac_key=None, header=None, no_retcode=False):
"""Unpack bytes into a TuyaMessage."""
if header is None:
header = parse_header(data)
if header.prefix == PREFIX_55AA_VALUE:
# 4-word header plus return code
header_len = struct.calcsize(MESSAGE_HEADER_FMT_55AA)
end_fmt = MESSAGE_END_FMT_HMAC if hmac_key else MESSAGE_END_FMT_55AA
retcode_len = 0 if no_retcode else struct.calcsize(MESSAGE_RETCODE_FMT)
msg_len = header_len + header.length
elif header.prefix == PREFIX_6699_VALUE:
if not hmac_key:
raise TypeError( 'key must be provided to unpack 6699-format messages' )
header_len = struct.calcsize(MESSAGE_HEADER_FMT_6699)
end_fmt = MESSAGE_END_FMT_6699
retcode_len = 0
msg_len = header_len + header.length + 4
else:
raise ValueError( 'unpack_message() cannot handle message format %08X' % header.prefix )
if len(data) < msg_len:
log.debug('unpack_message(): not enough data to unpack payload! need %d but only have %d', header_len+header.length, len(data))
raise DecodeError('Not enough data to unpack payload')
end_len = struct.calcsize(end_fmt)
# the retcode is technically part of the payload, but strip it as we do not want it here
retcode = 0 if not retcode_len else struct.unpack(MESSAGE_RETCODE_FMT, data[header_len:header_len+retcode_len])[0]
payload = data[header_len+retcode_len:msg_len]
crc, suffix = struct.unpack(end_fmt, payload[-end_len:])
payload = payload[:-end_len]
if header.prefix == PREFIX_55AA_VALUE:
if hmac_key:
have_crc = hmac.new(hmac_key, data[:(header_len+header.length)-end_len], sha256).digest()
else:
have_crc = binascii.crc32(data[:(header_len+header.length)-end_len]) & 0xFFFFFFFF
if suffix != SUFFIX_VALUE:
log.debug('Suffix prefix wrong! %08X != %08X', suffix, SUFFIX_VALUE)
if crc != have_crc:
if hmac_key:
log.debug('HMAC checksum wrong! %r != %r', binascii.hexlify(have_crc), binascii.hexlify(crc))
else:
log.debug('CRC wrong! %08X != %08X', have_crc, crc)
crc_good = crc == have_crc
iv = None
elif header.prefix == PREFIX_6699_VALUE:
iv = payload[:12]
payload = payload[12:]
try:
cipher = AESCipher( hmac_key )
payload = cipher.decrypt( payload, use_base64=False, decode_text=False, verify_padding=False, iv=iv, header=data[4:header_len], tag=crc)
crc_good = True
except:
crc_good = False
retcode_len = struct.calcsize(MESSAGE_RETCODE_FMT)
if no_retcode is False:
pass
elif no_retcode is None and payload[0:1] != b'{' and payload[retcode_len:retcode_len+1] == b'{':
retcode_len = struct.calcsize(MESSAGE_RETCODE_FMT)
else:
retcode_len = 0
if retcode_len:
retcode = struct.unpack(MESSAGE_RETCODE_FMT, payload[:retcode_len])[0]
payload = payload[retcode_len:]
return TuyaMessage(header.seqno, header.cmd, retcode, payload, crc, crc_good, header.prefix, iv)
| (data, hmac_key=None, header=None, no_retcode=False) |
729,634 | tinytuya.core | unpad | null | def unpad(s):
return s[: -ord(s[len(s) - 1 :])]
| (s) |
729,635 | avltree._avl_tree | AvlTree | Lightweight, pure-python AVL tree.
This class implements the MutableMapping interface and can be used in almost every
way that a built-in dictionary can.
# Sorting
The AVL tree data structure makes it possible to easily iterate on keys in
sort-order, and any method which returns an iterable of keys, values, or items will
return them in sort-order by key.
# Keys
Anything used as a key in an AvlTree must implement `__eq__`, `__hash__`, and
`__lt__`. That is to say they must be immutable and have a less-than comparison.
It's recommended to only insert keys which are all the same type, ensuring that they
have a well-ordering. However, keys of different types can be inserted as long as
their `__eq__` and `__lt__` methods behave.
This class provides no protections against poorly behaved keys and can fail in an
undefined manner if keys are not implemented properly.
# Values
Values can be any Python object.
# Recursion
This class does not use recursive techniques. This ensures that this package can be
used on platforms with low recursion limits, even in scenarios with very large and
very deep trees.
# Time Complexities
Time complexities for specific methods can be found in their docstrings.
| class AvlTree(MutableMapping[_K, _V]):
"""Lightweight, pure-python AVL tree.
This class implements the MutableMapping interface and can be used in almost every
way that a built-in dictionary can.
# Sorting
The AVL tree data structure makes it possible to easily iterate on keys in
sort-order, and any method which returns an iterable of keys, values, or items will
return them in sort-order by key.
# Keys
Anything used as a key in an AvlTree must implement `__eq__`, `__hash__`, and
`__lt__`. That is to say they must be immutable and have a less-than comparison.
It's recommended to only insert keys which are all the same type, ensuring that they
have a well-ordering. However, keys of different types can be inserted as long as
their `__eq__` and `__lt__` methods behave.
This class provides no protections against poorly behaved keys and can fail in an
undefined manner if keys are not implemented properly.
# Values
Values can be any Python object.
# Recursion
This class does not use recursive techniques. This ensures that this package can be
used on platforms with low recursion limits, even in scenarios with very large and
very deep trees.
# Time Complexities
Time complexities for specific methods can be found in their docstrings.
"""
def __init__(self, mapping: Mapping[_K, _V] | None = None) -> None:
"""Constructor.
Inserting the elements of a passed-in mapping has an amortized and worst-case
time complexity of O[n*log(n)].
Example usage:
```
>>> from avltree import AvlTree
>>> avl_tree = AvlTree[int, str]({0: 'a', 1: 'b'})
>>> avl_tree
AvlTree({0: 'a', 1: 'b'})
```
Args:
mapping (dict[_K, _V] | None): An optional initial mapping of items to add
to this tree. Defaults to None.
"""
self.__nodes: Final[dict[_K, AvlTreeNode[_K, _V]]] = {}
self.__root_key: _K | None = None
if mapping is not None:
for key, value in mapping.items():
self[key] = value
def __setitem__(self, __k: _K, __v: _V) -> None:
"""Maps the given key to the given value in this tree.
This method has an amortized and worst-case time complexity of O[log(n)].
Example usage:
```
>>> from avltree import AvlTree
>>> avl_tree = AvlTree[int, str]({0: 'a', 1: 'b'})
>>> avl_tree[0] = 'foo'
>>> avl_tree[2] = 'c'
>>> avl_tree
AvlTree({0: 'foo', 1: 'b', 2: 'c'})
```
Args:
__k (_K): The key to map.
__v (_V): The value to associate with the key.
"""
if __k in self.__nodes:
self.__nodes[__k].value = __v
return
if self.__root_key is None:
self.__root_key = __k
self.__nodes[self.__root_key] = AvlTreeNode[_K, _V](value=__v)
return
stack: Final[list[_K]] = [self.__root_key]
current_node: AvlTreeNode[_K, _V] = self.__nodes[stack[-1]]
while True:
if __k < stack[-1] and current_node.lesser_child_key is None:
current_node.lesser_child_key = __k
self.__nodes[__k] = AvlTreeNode[_K, _V](value=__v)
break
if stack[-1] < __k and current_node.greater_child_key is None:
current_node.greater_child_key = __k
self.__nodes[__k] = AvlTreeNode[_K, _V](value=__v)
break
if __k < stack[-1] and current_node.lesser_child_key is not None:
stack.append(current_node.lesser_child_key)
current_node = self.__nodes[stack[-1]]
elif current_node.greater_child_key is not None:
stack.append(current_node.greater_child_key)
current_node = self.__nodes[stack[-1]]
self.__enforce_avl(stack=stack)
def __delitem__(self, __k: _K) -> None: # noqa: C901, PLR0912
"""Deletes the given key from this tree.
This method has an amortized and worst-case time complexity of O[log(n)].
Example usage:
```
>>> from avltree import AvlTree
>>> avl_tree = AvlTree[int, str]({0: 'a', 1: 'b'})
>>> del avl_tree[0]
>>> avl_tree
AvlTree({1: 'b'})
```
Args:
__k (_K): The key to delete from this tree.
Raises:
KeyError: If the given key is not present in this tree.
"""
if self.__root_key is None:
message: str = f"Key not present in AvlTree: {__k!r}"
raise KeyError(message)
# Find the node to discard and its parent node
parent: AvlTreeNode[_K, _V] | None = None
node_key: _K = self.__root_key
stack: Final[list[_K]] = [node_key]
node: AvlTreeNode[_K, _V] = self.__nodes[node_key]
while node_key != __k:
parent = node
node_key = self.__get_closer_key(key=__k, current_key=node_key)
stack.append(node_key)
node = self.__nodes[node_key]
# Find the key of the node with which to replace the existing node
replacement_key: _K | None = None
if node.lesser_child_key is not None and node.greater_child_key is None:
replacement_key = node.lesser_child_key
elif node.lesser_child_key is None and node.greater_child_key is not None:
replacement_key = node.greater_child_key
elif node.lesser_child_key is not None and node.greater_child_key is not None:
# Find the next highest node than the one to remove
successor_parent: AvlTreeNode[_K, _V] | None = None
replacement_key = node.greater_child_key
successor: AvlTreeNode[_K, _V] = self.__nodes[replacement_key]
while successor.lesser_child_key is not None:
successor_parent = successor
stack.append(replacement_key)
replacement_key = successor.lesser_child_key
successor = self.__nodes[successor.lesser_child_key]
# Swap the successor node with the node to replace
if successor_parent is not None and successor.greater_child_key is None:
successor_parent.lesser_child_key = None
successor.greater_child_key = node.greater_child_key
elif successor_parent is not None:
successor_parent.lesser_child_key = successor.greater_child_key
successor.greater_child_key = node.greater_child_key
successor.lesser_child_key = node.lesser_child_key
# Swap the node to its replacement
if parent is None:
self.__root_key = replacement_key
elif parent.lesser_child_key == node_key:
parent.lesser_child_key = replacement_key
else:
parent.greater_child_key = replacement_key
del self.__nodes[node_key]
if replacement_key is None:
stack.remove(node_key)
else:
stack[stack.index(node_key)] = replacement_key
self.__enforce_avl(stack=stack)
def __getitem__(self, __k: _K) -> _V:
"""Gets the value associated with the given key.
This method has an amortized and worst-case time complexity of O[log(n)].
Example usage:
```
>>> from avltree import AvlTree
>>> avl_tree = AvlTree[int, str]({0: 'a', 1: 'b'})
>>> avl_tree[0]
'a'
>>> avl_tree[2]
KeyError: 'Key not present in AvlTree: 2'
```
Args:
__k (_K): The key.
Returns:
_V: The value associated with the given key.
Raises:
KeyError: If the given key is not present in this tree.
"""
if self.__root_key is None:
message: str = f"Key not present in AvlTree: {__k!r}"
raise KeyError(message)
current_key: _K = self.__root_key
current_node: AvlTreeNode[_K, _V] = self.__nodes[current_key]
while current_key != __k:
current_key = self.__get_closer_key(key=__k, current_key=current_key)
current_node = self.__nodes[current_key]
return current_node.value
def __len__(self) -> int:
"""Returns the number of items contained in this tree.
This method has an amortized and worst-case time complexity of O[1].
Example usage:
```
>>> from avltree import AvlTree
>>> avl_tree = AvlTree[int, str]({0: 'a', 1: 'b'})
>>> len(avl_tree)
2
```
Returns:
int: The number of items contained in this tree.
"""
return len(self.__nodes)
def __iter__(self) -> Iterator[_K]:
"""Iterates over all keys contained in this tree in sort order.
Getting the first key has an amortized and worst-case time complexity of
O[log(n)]. Iterating over all keys has an amortized and worst-case time
complexity of O[n].
Example usage:
```
>>> from avltree import AvlTree
>>> avl_tree = AvlTree[int, str]({0: 'a', 1: 'b'})
>>> for key in avl_tree:
... print(key)
...
0
1
```
Returns:
Iterator[_K]: The iterator object.
"""
return self.between()
def __repr__(self) -> str:
"""Builds a developer-friendly string representation of this AvlTree.
Example usage:
```
>>> from avltree import AvlTree
>>> avl_tree = AvlTree[int, str]({0: 'a', 1: 'b'})
>>> repr(avl_tree)
"AvlTree({0: 'a', 1: 'b'})"
```
Returns:
str: A string representation of this AvlTree.
"""
return f"AvlTree({{{', '.join(f'{k!r}: {v!r}' for k, v in self.items())}}})"
def minimum(self) -> _K:
"""Gets the minimum key contained in this tree.
This method has an amortized and worst-case time complexity of O[log(n)].
Example usage:
```
>>> from avltree import AvlTree
>>> avl_tree = AvlTree[int, str]({0: 'a', 1: 'b'})
>>> avl_tree.minimum()
0
```
Returns:
_K: The minimum key.
Raises:
ValueError: If there are no keys present in this tree.
"""
if self.__root_key is None:
message: Final[str] = "Cannot get the minimum of an empty AvlTree"
raise ValueError(message)
key: _K = self.__root_key
while self.__nodes[key].lesser_child_key is not None:
key = self.__nodes[key].lesser_child_key # type: ignore[assignment]
return key
def maximum(self) -> _K:
"""Gets the maximum key contained in this tree.
This method has an amortized and worst-case time complexity of O[log(n)].
Example usage:
```
>>> from avltree import AvlTree
>>> avl_tree = AvlTree[int, str]({0: 'a', 1: 'b'})
>>> avl_tree.maximum()
1
```
Returns:
_K: The maximum key.
Raises:
ValueError: If there are no keys present in this tree.
"""
if self.__root_key is None:
message: Final[str] = "Cannot get the maximum of an empty AvlTree"
raise ValueError(message)
key: _K = self.__root_key
node: AvlTreeNode[_K, _V] = self.__nodes[key]
while node.greater_child_key is not None:
key = node.greater_child_key
node = self.__nodes[key]
return key
def between( # noqa: C901, PLR0912
self,
start: _K | None = None,
stop: _K | None = None,
treatment: Literal["inclusive", "exclusive"] = "inclusive",
) -> Iterator[_K]:
"""Iterates over all keys between the given start and stop in sort order.
Getting the first key has an amortized and worst-case time complexity of
O[log(n)]. Iterating over all keys has an amortized and worst-case time
complexity of O[k], where k is the number of items in only the interval between
start and stop.
Example usage:
```
>>> from avltree import AvlTree
>>> avl_tree = AvlTree[int, str]({0: 'a', 1: 'b', 2: 'c'})
>>> for key in avl_tree.between(start=0, stop=2, treatment="inclusive"):
... print(key)
...
0
1
2
>>> for key in avl_tree.between(start=0, treatment="exclusive"):
... print(key)
...
1
2
```
Args:
start (_K | None): The key at which to start iterating. If None, iteration
starts at the minimum key. Defaults to None.
stop (_K | None): The key at which to stop iterating. If None, iteration
stops at the maximum key. Defaults to None.
treatment (Literal["inclusive", "exclusive"]): Whether the given start and
stop should be included or excluded from the returned iterator. Has no
effect when start and stop are None. Defaults to "inclusive".
Returns:
Iterator[_K]: An iterator which will iterate over all keys between the given
start and stop.
"""
if self.__root_key is None:
return
stack: Final[list[tuple[_K, bool]]] = []
current_key: _K = self.__root_key
while True:
current_node: AvlTreeNode[_K, _V] = self.__nodes[current_key]
if start is None or start < current_key:
stack.append((current_key, True))
if current_node.lesser_child_key is not None:
current_key = current_node.lesser_child_key
else:
break
elif start == current_key:
if treatment == "inclusive":
stack.append((current_key, True))
break
elif current_node.greater_child_key is not None:
current_key = current_node.greater_child_key
else:
break
elif current_node.greater_child_key is not None:
current_key = current_node.greater_child_key
else:
break
while len(stack) > 0:
key, lesser_child_visited = stack.pop(-1)
node: AvlTreeNode[_K, _V] = self.__nodes[key]
if (
stop is not None # noqa: PLR0916
and (stop < key or (stop == key and treatment == "exclusive"))
and (lesser_child_visited or node.lesser_child_key is None)
):
break
elif node.lesser_child_key is not None and not lesser_child_visited:
stack.append((key, True))
stack.append((node.lesser_child_key, False))
elif node.greater_child_key is not None:
stack.append((node.greater_child_key, False))
yield key
else:
yield key
def __get_closer_key(self, key: _K, current_key: _K) -> _K:
"""Gets the next closest key to the given key.
Args:
key (_K): The key to search for.
current_key (_K): The current key.
Returns:
_K: The next closest key to the given key.
Raises:
KeyError: If the given key is not present in this tree.
"""
current_node: Final[AvlTreeNode[_K, _V]] = self.__nodes[current_key]
if key < current_key and current_node.lesser_child_key is not None:
return current_node.lesser_child_key
if current_key < key and current_node.greater_child_key is not None:
return current_node.greater_child_key
message: Final[str] = f"Key not present in AvlTree: {key!r}"
raise KeyError(message)
def __enforce_avl(self, stack: list[_K]) -> None:
"""Enforces the AVL property on this tree.
Args:
stack (list[_K]): The stack to traverse in reverse order.
"""
while len(stack) > 0:
key: _K = stack.pop(-1)
node: AvlTreeNode[_K, _V] = self.__nodes[key]
balance: int = self.__calculate_balance(node=node)
if -1 <= balance <= 1:
self.__update_height(node=node)
continue
if balance == -2: # noqa: PLR2004
lesser_child_key: _K = cast(_K, node.lesser_child_key)
if self.__calculate_balance(node=self.__nodes[lesser_child_key]) == 1:
node.lesser_child_key = self.__rotate(
key=lesser_child_key,
direction="left",
)
replacement_key: _K = self.__rotate(key=key, direction="right")
else:
greater_child_key: _K = cast(_K, node.greater_child_key)
if self.__calculate_balance(node=self.__nodes[greater_child_key]) == -1:
node.greater_child_key = self.__rotate(
key=greater_child_key,
direction="right",
)
replacement_key = self.__rotate(key=key, direction="left")
parent_node: AvlTreeNode[_K, _V] | None = (
None if len(stack) == 0 else self.__nodes[stack[-1]]
)
if parent_node is None:
self.__root_key = replacement_key
elif parent_node.lesser_child_key == key:
parent_node.lesser_child_key = replacement_key
else:
parent_node.greater_child_key = replacement_key
def __calculate_balance(self, node: AvlTreeNode[_K, _V]) -> int:
"""Calculates the balance factor of the given node.
Args:
node (AvlTreeNode[_K, _V]): The node.
Returns:
int: The balance factor of the given node.
"""
return (
-1
if node.greater_child_key is None
else self.__nodes[node.greater_child_key].height
) - (
-1
if node.lesser_child_key is None
else self.__nodes[node.lesser_child_key].height
)
def __update_height(self, node: AvlTreeNode[_K, _V]) -> None:
"""Updates the height of the given node.
Args:
node (AvlTreeNode[_K, _V]): The node.
"""
node.height = 1 + max(
(
-1
if node.greater_child_key is None
else self.__nodes[node.greater_child_key].height
),
(
-1
if node.lesser_child_key is None
else self.__nodes[node.lesser_child_key].height
),
)
def __rotate(self, key: _K, direction: Literal["left", "right"]) -> _K:
"""Performs a rotation at the given key.
Args:
key (_K): The key to perform a right rotation on.
direction (Literal["left", "right"]): The direction of the rotation.
Returns:
_K: The new root key of this subtree.
Raises:
ValueError: If the shape of the tree is incompatible with the requested
rotation direction.
"""
node: Final[AvlTreeNode[_K, _V]] = self.__nodes[key]
replacement_key: Final[_K] = cast(
_K,
node.greater_child_key if direction == "left" else node.lesser_child_key,
)
replacement_node: Final[AvlTreeNode[_K, _V]] = self.__nodes[replacement_key]
if direction == "left":
node.greater_child_key = replacement_node.lesser_child_key
replacement_node.lesser_child_key = key
else:
node.lesser_child_key = replacement_node.greater_child_key
replacement_node.greater_child_key = key
self.__update_height(node=node)
self.__update_height(node=replacement_node)
return replacement_key
| (mapping: 'Mapping[_K, _V] | None' = None) -> 'None' |
729,636 | avltree._avl_tree | __calculate_balance | Calculates the balance factor of the given node.
Args:
node (AvlTreeNode[_K, _V]): The node.
Returns:
int: The balance factor of the given node.
| def __calculate_balance(self, node: AvlTreeNode[_K, _V]) -> int:
"""Calculates the balance factor of the given node.
Args:
node (AvlTreeNode[_K, _V]): The node.
Returns:
int: The balance factor of the given node.
"""
return (
-1
if node.greater_child_key is None
else self.__nodes[node.greater_child_key].height
) - (
-1
if node.lesser_child_key is None
else self.__nodes[node.lesser_child_key].height
)
| (self, node: avltree._avl_tree_node.AvlTreeNode[~_K, ~_V]) -> int |
729,637 | avltree._avl_tree | __enforce_avl | Enforces the AVL property on this tree.
Args:
stack (list[_K]): The stack to traverse in reverse order.
| def __enforce_avl(self, stack: list[_K]) -> None:
"""Enforces the AVL property on this tree.
Args:
stack (list[_K]): The stack to traverse in reverse order.
"""
while len(stack) > 0:
key: _K = stack.pop(-1)
node: AvlTreeNode[_K, _V] = self.__nodes[key]
balance: int = self.__calculate_balance(node=node)
if -1 <= balance <= 1:
self.__update_height(node=node)
continue
if balance == -2: # noqa: PLR2004
lesser_child_key: _K = cast(_K, node.lesser_child_key)
if self.__calculate_balance(node=self.__nodes[lesser_child_key]) == 1:
node.lesser_child_key = self.__rotate(
key=lesser_child_key,
direction="left",
)
replacement_key: _K = self.__rotate(key=key, direction="right")
else:
greater_child_key: _K = cast(_K, node.greater_child_key)
if self.__calculate_balance(node=self.__nodes[greater_child_key]) == -1:
node.greater_child_key = self.__rotate(
key=greater_child_key,
direction="right",
)
replacement_key = self.__rotate(key=key, direction="left")
parent_node: AvlTreeNode[_K, _V] | None = (
None if len(stack) == 0 else self.__nodes[stack[-1]]
)
if parent_node is None:
self.__root_key = replacement_key
elif parent_node.lesser_child_key == key:
parent_node.lesser_child_key = replacement_key
else:
parent_node.greater_child_key = replacement_key
| (self, stack: list[~_K]) -> NoneType |
729,638 | avltree._avl_tree | __get_closer_key | Gets the next closest key to the given key.
Args:
key (_K): The key to search for.
current_key (_K): The current key.
Returns:
_K: The next closest key to the given key.
Raises:
KeyError: If the given key is not present in this tree.
| def __get_closer_key(self, key: _K, current_key: _K) -> _K:
"""Gets the next closest key to the given key.
Args:
key (_K): The key to search for.
current_key (_K): The current key.
Returns:
_K: The next closest key to the given key.
Raises:
KeyError: If the given key is not present in this tree.
"""
current_node: Final[AvlTreeNode[_K, _V]] = self.__nodes[current_key]
if key < current_key and current_node.lesser_child_key is not None:
return current_node.lesser_child_key
if current_key < key and current_node.greater_child_key is not None:
return current_node.greater_child_key
message: Final[str] = f"Key not present in AvlTree: {key!r}"
raise KeyError(message)
| (self, key: ~_K, current_key: ~_K) -> ~_K |
729,639 | avltree._avl_tree | __rotate | Performs a rotation at the given key.
Args:
key (_K): The key to perform a right rotation on.
direction (Literal["left", "right"]): The direction of the rotation.
Returns:
_K: The new root key of this subtree.
Raises:
ValueError: If the shape of the tree is incompatible with the requested
rotation direction.
| def __rotate(self, key: _K, direction: Literal["left", "right"]) -> _K:
"""Performs a rotation at the given key.
Args:
key (_K): The key to perform a right rotation on.
direction (Literal["left", "right"]): The direction of the rotation.
Returns:
_K: The new root key of this subtree.
Raises:
ValueError: If the shape of the tree is incompatible with the requested
rotation direction.
"""
node: Final[AvlTreeNode[_K, _V]] = self.__nodes[key]
replacement_key: Final[_K] = cast(
_K,
node.greater_child_key if direction == "left" else node.lesser_child_key,
)
replacement_node: Final[AvlTreeNode[_K, _V]] = self.__nodes[replacement_key]
if direction == "left":
node.greater_child_key = replacement_node.lesser_child_key
replacement_node.lesser_child_key = key
else:
node.lesser_child_key = replacement_node.greater_child_key
replacement_node.greater_child_key = key
self.__update_height(node=node)
self.__update_height(node=replacement_node)
return replacement_key
| (self, key: ~_K, direction: Literal['left', 'right']) -> ~_K |
729,640 | avltree._avl_tree | __update_height | Updates the height of the given node.
Args:
node (AvlTreeNode[_K, _V]): The node.
| def __update_height(self, node: AvlTreeNode[_K, _V]) -> None:
"""Updates the height of the given node.
Args:
node (AvlTreeNode[_K, _V]): The node.
"""
node.height = 1 + max(
(
-1
if node.greater_child_key is None
else self.__nodes[node.greater_child_key].height
),
(
-1
if node.lesser_child_key is None
else self.__nodes[node.lesser_child_key].height
),
)
| (self, node: avltree._avl_tree_node.AvlTreeNode[~_K, ~_V]) -> NoneType |
729,642 | avltree._avl_tree | __delitem__ | Deletes the given key from this tree.
This method has an amortized and worst-case time complexity of O[log(n)].
Example usage:
```
>>> from avltree import AvlTree
>>> avl_tree = AvlTree[int, str]({0: 'a', 1: 'b'})
>>> del avl_tree[0]
>>> avl_tree
AvlTree({1: 'b'})
```
Args:
__k (_K): The key to delete from this tree.
Raises:
KeyError: If the given key is not present in this tree.
| def __delitem__(self, __k: _K) -> None: # noqa: C901, PLR0912
"""Deletes the given key from this tree.
This method has an amortized and worst-case time complexity of O[log(n)].
Example usage:
```
>>> from avltree import AvlTree
>>> avl_tree = AvlTree[int, str]({0: 'a', 1: 'b'})
>>> del avl_tree[0]
>>> avl_tree
AvlTree({1: 'b'})
```
Args:
__k (_K): The key to delete from this tree.
Raises:
KeyError: If the given key is not present in this tree.
"""
if self.__root_key is None:
message: str = f"Key not present in AvlTree: {__k!r}"
raise KeyError(message)
# Find the node to discard and its parent node
parent: AvlTreeNode[_K, _V] | None = None
node_key: _K = self.__root_key
stack: Final[list[_K]] = [node_key]
node: AvlTreeNode[_K, _V] = self.__nodes[node_key]
while node_key != __k:
parent = node
node_key = self.__get_closer_key(key=__k, current_key=node_key)
stack.append(node_key)
node = self.__nodes[node_key]
# Find the key of the node with which to replace the existing node
replacement_key: _K | None = None
if node.lesser_child_key is not None and node.greater_child_key is None:
replacement_key = node.lesser_child_key
elif node.lesser_child_key is None and node.greater_child_key is not None:
replacement_key = node.greater_child_key
elif node.lesser_child_key is not None and node.greater_child_key is not None:
# Find the next highest node than the one to remove
successor_parent: AvlTreeNode[_K, _V] | None = None
replacement_key = node.greater_child_key
successor: AvlTreeNode[_K, _V] = self.__nodes[replacement_key]
while successor.lesser_child_key is not None:
successor_parent = successor
stack.append(replacement_key)
replacement_key = successor.lesser_child_key
successor = self.__nodes[successor.lesser_child_key]
# Swap the successor node with the node to replace
if successor_parent is not None and successor.greater_child_key is None:
successor_parent.lesser_child_key = None
successor.greater_child_key = node.greater_child_key
elif successor_parent is not None:
successor_parent.lesser_child_key = successor.greater_child_key
successor.greater_child_key = node.greater_child_key
successor.lesser_child_key = node.lesser_child_key
# Swap the node to its replacement
if parent is None:
self.__root_key = replacement_key
elif parent.lesser_child_key == node_key:
parent.lesser_child_key = replacement_key
else:
parent.greater_child_key = replacement_key
del self.__nodes[node_key]
if replacement_key is None:
stack.remove(node_key)
else:
stack[stack.index(node_key)] = replacement_key
self.__enforce_avl(stack=stack)
| (self, _AvlTree__k: ~_K) -> NoneType |
729,644 | avltree._avl_tree | __getitem__ | Gets the value associated with the given key.
This method has an amortized and worst-case time complexity of O[log(n)].
Example usage:
```
>>> from avltree import AvlTree
>>> avl_tree = AvlTree[int, str]({0: 'a', 1: 'b'})
>>> avl_tree[0]
'a'
>>> avl_tree[2]
KeyError: 'Key not present in AvlTree: 2'
```
Args:
__k (_K): The key.
Returns:
_V: The value associated with the given key.
Raises:
KeyError: If the given key is not present in this tree.
| def __getitem__(self, __k: _K) -> _V:
"""Gets the value associated with the given key.
This method has an amortized and worst-case time complexity of O[log(n)].
Example usage:
```
>>> from avltree import AvlTree
>>> avl_tree = AvlTree[int, str]({0: 'a', 1: 'b'})
>>> avl_tree[0]
'a'
>>> avl_tree[2]
KeyError: 'Key not present in AvlTree: 2'
```
Args:
__k (_K): The key.
Returns:
_V: The value associated with the given key.
Raises:
KeyError: If the given key is not present in this tree.
"""
if self.__root_key is None:
message: str = f"Key not present in AvlTree: {__k!r}"
raise KeyError(message)
current_key: _K = self.__root_key
current_node: AvlTreeNode[_K, _V] = self.__nodes[current_key]
while current_key != __k:
current_key = self.__get_closer_key(key=__k, current_key=current_key)
current_node = self.__nodes[current_key]
return current_node.value
| (self, _AvlTree__k: ~_K) -> ~_V |
729,645 | avltree._avl_tree | __init__ | Constructor.
Inserting the elements of a passed-in mapping has an amortized and worst-case
time complexity of O[n*log(n)].
Example usage:
```
>>> from avltree import AvlTree
>>> avl_tree = AvlTree[int, str]({0: 'a', 1: 'b'})
>>> avl_tree
AvlTree({0: 'a', 1: 'b'})
```
Args:
mapping (dict[_K, _V] | None): An optional initial mapping of items to add
to this tree. Defaults to None.
| def __init__(self, mapping: Mapping[_K, _V] | None = None) -> None:
"""Constructor.
Inserting the elements of a passed-in mapping has an amortized and worst-case
time complexity of O[n*log(n)].
Example usage:
```
>>> from avltree import AvlTree
>>> avl_tree = AvlTree[int, str]({0: 'a', 1: 'b'})
>>> avl_tree
AvlTree({0: 'a', 1: 'b'})
```
Args:
mapping (dict[_K, _V] | None): An optional initial mapping of items to add
to this tree. Defaults to None.
"""
self.__nodes: Final[dict[_K, AvlTreeNode[_K, _V]]] = {}
self.__root_key: _K | None = None
if mapping is not None:
for key, value in mapping.items():
self[key] = value
| (self, mapping: Optional[Mapping[~_K, ~_V]] = None) -> NoneType |
729,646 | avltree._avl_tree | __iter__ | Iterates over all keys contained in this tree in sort order.
Getting the first key has an amortized and worst-case time complexity of
O[log(n)]. Iterating over all keys has an amortized and worst-case time
complexity of O[n].
Example usage:
```
>>> from avltree import AvlTree
>>> avl_tree = AvlTree[int, str]({0: 'a', 1: 'b'})
>>> for key in avl_tree:
... print(key)
...
0
1
```
Returns:
Iterator[_K]: The iterator object.
| def __iter__(self) -> Iterator[_K]:
"""Iterates over all keys contained in this tree in sort order.
Getting the first key has an amortized and worst-case time complexity of
O[log(n)]. Iterating over all keys has an amortized and worst-case time
complexity of O[n].
Example usage:
```
>>> from avltree import AvlTree
>>> avl_tree = AvlTree[int, str]({0: 'a', 1: 'b'})
>>> for key in avl_tree:
... print(key)
...
0
1
```
Returns:
Iterator[_K]: The iterator object.
"""
return self.between()
| (self) -> Iterator[~_K] |
729,647 | avltree._avl_tree | __len__ | Returns the number of items contained in this tree.
This method has an amortized and worst-case time complexity of O[1].
Example usage:
```
>>> from avltree import AvlTree
>>> avl_tree = AvlTree[int, str]({0: 'a', 1: 'b'})
>>> len(avl_tree)
2
```
Returns:
int: The number of items contained in this tree.
| def __len__(self) -> int:
"""Returns the number of items contained in this tree.
This method has an amortized and worst-case time complexity of O[1].
Example usage:
```
>>> from avltree import AvlTree
>>> avl_tree = AvlTree[int, str]({0: 'a', 1: 'b'})
>>> len(avl_tree)
2
```
Returns:
int: The number of items contained in this tree.
"""
return len(self.__nodes)
| (self) -> int |
729,648 | avltree._avl_tree | __repr__ | Builds a developer-friendly string representation of this AvlTree.
Example usage:
```
>>> from avltree import AvlTree
>>> avl_tree = AvlTree[int, str]({0: 'a', 1: 'b'})
>>> repr(avl_tree)
"AvlTree({0: 'a', 1: 'b'})"
```
Returns:
str: A string representation of this AvlTree.
| def __repr__(self) -> str:
"""Builds a developer-friendly string representation of this AvlTree.
Example usage:
```
>>> from avltree import AvlTree
>>> avl_tree = AvlTree[int, str]({0: 'a', 1: 'b'})
>>> repr(avl_tree)
"AvlTree({0: 'a', 1: 'b'})"
```
Returns:
str: A string representation of this AvlTree.
"""
return f"AvlTree({{{', '.join(f'{k!r}: {v!r}' for k, v in self.items())}}})"
| (self) -> str |
729,649 | avltree._avl_tree | __setitem__ | Maps the given key to the given value in this tree.
This method has an amortized and worst-case time complexity of O[log(n)].
Example usage:
```
>>> from avltree import AvlTree
>>> avl_tree = AvlTree[int, str]({0: 'a', 1: 'b'})
>>> avl_tree[0] = 'foo'
>>> avl_tree[2] = 'c'
>>> avl_tree
AvlTree({0: 'foo', 1: 'b', 2: 'c'})
```
Args:
__k (_K): The key to map.
__v (_V): The value to associate with the key.
| def __setitem__(self, __k: _K, __v: _V) -> None:
"""Maps the given key to the given value in this tree.
This method has an amortized and worst-case time complexity of O[log(n)].
Example usage:
```
>>> from avltree import AvlTree
>>> avl_tree = AvlTree[int, str]({0: 'a', 1: 'b'})
>>> avl_tree[0] = 'foo'
>>> avl_tree[2] = 'c'
>>> avl_tree
AvlTree({0: 'foo', 1: 'b', 2: 'c'})
```
Args:
__k (_K): The key to map.
__v (_V): The value to associate with the key.
"""
if __k in self.__nodes:
self.__nodes[__k].value = __v
return
if self.__root_key is None:
self.__root_key = __k
self.__nodes[self.__root_key] = AvlTreeNode[_K, _V](value=__v)
return
stack: Final[list[_K]] = [self.__root_key]
current_node: AvlTreeNode[_K, _V] = self.__nodes[stack[-1]]
while True:
if __k < stack[-1] and current_node.lesser_child_key is None:
current_node.lesser_child_key = __k
self.__nodes[__k] = AvlTreeNode[_K, _V](value=__v)
break
if stack[-1] < __k and current_node.greater_child_key is None:
current_node.greater_child_key = __k
self.__nodes[__k] = AvlTreeNode[_K, _V](value=__v)
break
if __k < stack[-1] and current_node.lesser_child_key is not None:
stack.append(current_node.lesser_child_key)
current_node = self.__nodes[stack[-1]]
elif current_node.greater_child_key is not None:
stack.append(current_node.greater_child_key)
current_node = self.__nodes[stack[-1]]
self.__enforce_avl(stack=stack)
| (self, _AvlTree__k: ~_K, _AvlTree__v: ~_V) -> NoneType |
729,650 | avltree._avl_tree | between | Iterates over all keys between the given start and stop in sort order.
Getting the first key has an amortized and worst-case time complexity of
O[log(n)]. Iterating over all keys has an amortized and worst-case time
complexity of O[k], where k is the number of items in only the interval between
start and stop.
Example usage:
```
>>> from avltree import AvlTree
>>> avl_tree = AvlTree[int, str]({0: 'a', 1: 'b', 2: 'c'})
>>> for key in avl_tree.between(start=0, stop=2, treatment="inclusive"):
... print(key)
...
0
1
2
>>> for key in avl_tree.between(start=0, treatment="exclusive"):
... print(key)
...
1
2
```
Args:
start (_K | None): The key at which to start iterating. If None, iteration
starts at the minimum key. Defaults to None.
stop (_K | None): The key at which to stop iterating. If None, iteration
stops at the maximum key. Defaults to None.
treatment (Literal["inclusive", "exclusive"]): Whether the given start and
stop should be included or excluded from the returned iterator. Has no
effect when start and stop are None. Defaults to "inclusive".
Returns:
Iterator[_K]: An iterator which will iterate over all keys between the given
start and stop.
| def between( # noqa: C901, PLR0912
self,
start: _K | None = None,
stop: _K | None = None,
treatment: Literal["inclusive", "exclusive"] = "inclusive",
) -> Iterator[_K]:
"""Iterates over all keys between the given start and stop in sort order.
Getting the first key has an amortized and worst-case time complexity of
O[log(n)]. Iterating over all keys has an amortized and worst-case time
complexity of O[k], where k is the number of items in only the interval between
start and stop.
Example usage:
```
>>> from avltree import AvlTree
>>> avl_tree = AvlTree[int, str]({0: 'a', 1: 'b', 2: 'c'})
>>> for key in avl_tree.between(start=0, stop=2, treatment="inclusive"):
... print(key)
...
0
1
2
>>> for key in avl_tree.between(start=0, treatment="exclusive"):
... print(key)
...
1
2
```
Args:
start (_K | None): The key at which to start iterating. If None, iteration
starts at the minimum key. Defaults to None.
stop (_K | None): The key at which to stop iterating. If None, iteration
stops at the maximum key. Defaults to None.
treatment (Literal["inclusive", "exclusive"]): Whether the given start and
stop should be included or excluded from the returned iterator. Has no
effect when start and stop are None. Defaults to "inclusive".
Returns:
Iterator[_K]: An iterator which will iterate over all keys between the given
start and stop.
"""
if self.__root_key is None:
return
stack: Final[list[tuple[_K, bool]]] = []
current_key: _K = self.__root_key
while True:
current_node: AvlTreeNode[_K, _V] = self.__nodes[current_key]
if start is None or start < current_key:
stack.append((current_key, True))
if current_node.lesser_child_key is not None:
current_key = current_node.lesser_child_key
else:
break
elif start == current_key:
if treatment == "inclusive":
stack.append((current_key, True))
break
elif current_node.greater_child_key is not None:
current_key = current_node.greater_child_key
else:
break
elif current_node.greater_child_key is not None:
current_key = current_node.greater_child_key
else:
break
while len(stack) > 0:
key, lesser_child_visited = stack.pop(-1)
node: AvlTreeNode[_K, _V] = self.__nodes[key]
if (
stop is not None # noqa: PLR0916
and (stop < key or (stop == key and treatment == "exclusive"))
and (lesser_child_visited or node.lesser_child_key is None)
):
break
elif node.lesser_child_key is not None and not lesser_child_visited:
stack.append((key, True))
stack.append((node.lesser_child_key, False))
elif node.greater_child_key is not None:
stack.append((node.greater_child_key, False))
yield key
else:
yield key
| (self, start: Optional[~_K] = None, stop: Optional[~_K] = None, treatment: Literal['inclusive', 'exclusive'] = 'inclusive') -> Iterator[~_K] |
729,655 | avltree._avl_tree | maximum | Gets the maximum key contained in this tree.
This method has an amortized and worst-case time complexity of O[log(n)].
Example usage:
```
>>> from avltree import AvlTree
>>> avl_tree = AvlTree[int, str]({0: 'a', 1: 'b'})
>>> avl_tree.maximum()
1
```
Returns:
_K: The maximum key.
Raises:
ValueError: If there are no keys present in this tree.
| def maximum(self) -> _K:
"""Gets the maximum key contained in this tree.
This method has an amortized and worst-case time complexity of O[log(n)].
Example usage:
```
>>> from avltree import AvlTree
>>> avl_tree = AvlTree[int, str]({0: 'a', 1: 'b'})
>>> avl_tree.maximum()
1
```
Returns:
_K: The maximum key.
Raises:
ValueError: If there are no keys present in this tree.
"""
if self.__root_key is None:
message: Final[str] = "Cannot get the maximum of an empty AvlTree"
raise ValueError(message)
key: _K = self.__root_key
node: AvlTreeNode[_K, _V] = self.__nodes[key]
while node.greater_child_key is not None:
key = node.greater_child_key
node = self.__nodes[key]
return key
| (self) -> ~_K |
729,656 | avltree._avl_tree | minimum | Gets the minimum key contained in this tree.
This method has an amortized and worst-case time complexity of O[log(n)].
Example usage:
```
>>> from avltree import AvlTree
>>> avl_tree = AvlTree[int, str]({0: 'a', 1: 'b'})
>>> avl_tree.minimum()
0
```
Returns:
_K: The minimum key.
Raises:
ValueError: If there are no keys present in this tree.
| def minimum(self) -> _K:
"""Gets the minimum key contained in this tree.
This method has an amortized and worst-case time complexity of O[log(n)].
Example usage:
```
>>> from avltree import AvlTree
>>> avl_tree = AvlTree[int, str]({0: 'a', 1: 'b'})
>>> avl_tree.minimum()
0
```
Returns:
_K: The minimum key.
Raises:
ValueError: If there are no keys present in this tree.
"""
if self.__root_key is None:
message: Final[str] = "Cannot get the minimum of an empty AvlTree"
raise ValueError(message)
key: _K = self.__root_key
while self.__nodes[key].lesser_child_key is not None:
key = self.__nodes[key].lesser_child_key # type: ignore[assignment]
return key
| (self) -> ~_K |
729,665 | allpairspy.allpairs | AllPairs | null | class AllPairs:
def __init__(self, parameters, filter_func=lambda x: True, previously_tested=None, n=2):
"""
TODO: check that input arrays are:
- (optional) has no duplicated values inside single array / or compress such values
"""
if not previously_tested:
previously_tested = [[]]
self.__validate_parameter(parameters)
self.__is_ordered_dict_param = isinstance(parameters, OrderedDict)
self.__param_name_list = self.__extract_param_name_list(parameters)
self.__pairs_class = namedtuple("Pairs", self.__param_name_list)
self.__filter_func = filter_func
self.__n = n
self.__pairs = PairsStorage(n)
value_matrix = self.__extract_value_matrix(parameters)
self.__max_unique_pairs_expected = get_max_combination_number(value_matrix, n)
self.__working_item_matrix = self.__get_working_item_matrix(value_matrix)
for arr in previously_tested:
if not arr:
continue
if len(arr) != len(self.__working_item_matrix):
raise RuntimeError("previously tested combination is not complete")
if not self.__filter_func(arr):
raise ValueError("invalid tested combination is provided")
tested = []
for i, val in enumerate(arr):
idxs = [
Item(item.id, 0) for item in self.__working_item_matrix[i] if item.value == val
]
if len(idxs) != 1:
raise ValueError(
"value from previously tested combination is not "
"found in the parameters or found more than "
"once"
)
tested.append(idxs[0])
self.__pairs.add_sequence(tested)
def __iter__(self):
return self
def next(self):
return self.__next__()
def __next__(self):
assert len(self.__pairs) <= self.__max_unique_pairs_expected
if len(self.__pairs) == self.__max_unique_pairs_expected:
# no reasons to search further - all pairs are found
raise StopIteration()
previous_unique_pairs_count = len(self.__pairs)
chosen_item_list = [None] * len(self.__working_item_matrix)
indexes = [None] * len(self.__working_item_matrix)
direction = 1
i = 0
while -1 < i < len(self.__working_item_matrix):
if direction == 1:
# move forward
self.__resort_working_array(chosen_item_list[:i], i)
indexes[i] = 0
elif direction == 0 or direction == -1:
# scan current array or go back
indexes[i] += 1
if indexes[i] >= len(self.__working_item_matrix[i]):
direction = -1
if i == 0:
raise StopIteration()
i += direction
continue
direction = 0
else:
raise ValueError(f"next(): unknown 'direction' code '{direction}'")
chosen_item_list[i] = self.__working_item_matrix[i][indexes[i]]
if self.__filter_func(self.__get_values(chosen_item_list[: i + 1])):
assert direction > -1
direction = 1
else:
direction = 0
i += direction
if len(self.__working_item_matrix) != len(chosen_item_list):
raise StopIteration()
self.__pairs.add_sequence(chosen_item_list)
if len(self.__pairs) == previous_unique_pairs_count:
# could not find new unique pairs - stop
raise StopIteration()
# replace returned array elements with real values and return it
return self.__get_iteration_value(chosen_item_list)
def __validate_parameter(self, value):
if isinstance(value, OrderedDict):
for parameter_list in value.values():
if not parameter_list:
raise ValueError("each parameter arrays must have at least one item")
return
if len(value) < 2:
raise ValueError("must provide more than one option")
for parameter_list in value:
if not parameter_list:
raise ValueError("each parameter arrays must have at least one item")
def __resort_working_array(self, chosen_item_list, num):
for item in self.__working_item_matrix[num]:
data_node = self.__pairs.get_node_info(item)
new_combs = [
# numbers of new combinations to be created if this item is
# appended to array
{key(z) for z in combinations(chosen_item_list + [item], i + 1)}
- self.__pairs.get_combs()[i]
for i in range(0, self.__n)
]
# weighting the node node that creates most of new pairs is the best
weights = [-len(new_combs[-1])]
# less used outbound connections most likely to produce more new
# pairs while search continues
weights.extend(
[len(data_node.out)]
+ [len(x) for x in reversed(new_combs[:-1])]
+ [-data_node.counter] # less used node is better
)
# otherwise we will prefer node with most of free inbound
# connections; somehow it works out better ;)
weights.append(-len(data_node.in_))
item.set_weights(weights)
self.__working_item_matrix[num].sort(key=cmp_to_key(cmp_item))
def __get_working_item_matrix(self, parameter_matrix):
return [
[
Item(f"a{param_idx:d}v{value_idx:d}", value)
for value_idx, value in enumerate(value_list)
]
for param_idx, value_list in enumerate(parameter_matrix)
]
@staticmethod
def __get_values(item_list):
return [item.value for item in item_list]
def __get_iteration_value(self, item_list):
if not self.__param_name_list:
return [item.value for item in item_list]
return self.__pairs_class(*[item.value for item in item_list])
def __extract_param_name_list(self, parameters):
if not self.__is_ordered_dict_param:
return []
return list(parameters)
def __extract_value_matrix(self, parameters):
if not self.__is_ordered_dict_param:
return parameters
return [v for v in parameters.values()]
| (parameters, filter_func=<function AllPairs.<lambda> at 0x7f625a34cca0>, previously_tested=None, n=2) |
729,666 | allpairspy.allpairs | __extract_param_name_list | null | def __extract_param_name_list(self, parameters):
if not self.__is_ordered_dict_param:
return []
return list(parameters)
| (self, parameters) |
729,667 | allpairspy.allpairs | __extract_value_matrix | null | def __extract_value_matrix(self, parameters):
if not self.__is_ordered_dict_param:
return parameters
return [v for v in parameters.values()]
| (self, parameters) |
729,668 | allpairspy.allpairs | __get_iteration_value | null | def __get_iteration_value(self, item_list):
if not self.__param_name_list:
return [item.value for item in item_list]
return self.__pairs_class(*[item.value for item in item_list])
| (self, item_list) |
729,669 | allpairspy.allpairs | __get_values | null | @staticmethod
def __get_values(item_list):
return [item.value for item in item_list]
| (item_list) |
729,670 | allpairspy.allpairs | __get_working_item_matrix | null | def __get_working_item_matrix(self, parameter_matrix):
return [
[
Item(f"a{param_idx:d}v{value_idx:d}", value)
for value_idx, value in enumerate(value_list)
]
for param_idx, value_list in enumerate(parameter_matrix)
]
| (self, parameter_matrix) |
729,671 | allpairspy.allpairs | __resort_working_array | null | def __resort_working_array(self, chosen_item_list, num):
for item in self.__working_item_matrix[num]:
data_node = self.__pairs.get_node_info(item)
new_combs = [
# numbers of new combinations to be created if this item is
# appended to array
{key(z) for z in combinations(chosen_item_list + [item], i + 1)}
- self.__pairs.get_combs()[i]
for i in range(0, self.__n)
]
# weighting the node node that creates most of new pairs is the best
weights = [-len(new_combs[-1])]
# less used outbound connections most likely to produce more new
# pairs while search continues
weights.extend(
[len(data_node.out)]
+ [len(x) for x in reversed(new_combs[:-1])]
+ [-data_node.counter] # less used node is better
)
# otherwise we will prefer node with most of free inbound
# connections; somehow it works out better ;)
weights.append(-len(data_node.in_))
item.set_weights(weights)
self.__working_item_matrix[num].sort(key=cmp_to_key(cmp_item))
| (self, chosen_item_list, num) |
729,672 | allpairspy.allpairs | __validate_parameter | null | def __validate_parameter(self, value):
if isinstance(value, OrderedDict):
for parameter_list in value.values():
if not parameter_list:
raise ValueError("each parameter arrays must have at least one item")
return
if len(value) < 2:
raise ValueError("must provide more than one option")
for parameter_list in value:
if not parameter_list:
raise ValueError("each parameter arrays must have at least one item")
| (self, value) |
729,673 | allpairspy.allpairs | __init__ |
TODO: check that input arrays are:
- (optional) has no duplicated values inside single array / or compress such values
| def __init__(self, parameters, filter_func=lambda x: True, previously_tested=None, n=2):
"""
TODO: check that input arrays are:
- (optional) has no duplicated values inside single array / or compress such values
"""
if not previously_tested:
previously_tested = [[]]
self.__validate_parameter(parameters)
self.__is_ordered_dict_param = isinstance(parameters, OrderedDict)
self.__param_name_list = self.__extract_param_name_list(parameters)
self.__pairs_class = namedtuple("Pairs", self.__param_name_list)
self.__filter_func = filter_func
self.__n = n
self.__pairs = PairsStorage(n)
value_matrix = self.__extract_value_matrix(parameters)
self.__max_unique_pairs_expected = get_max_combination_number(value_matrix, n)
self.__working_item_matrix = self.__get_working_item_matrix(value_matrix)
for arr in previously_tested:
if not arr:
continue
if len(arr) != len(self.__working_item_matrix):
raise RuntimeError("previously tested combination is not complete")
if not self.__filter_func(arr):
raise ValueError("invalid tested combination is provided")
tested = []
for i, val in enumerate(arr):
idxs = [
Item(item.id, 0) for item in self.__working_item_matrix[i] if item.value == val
]
if len(idxs) != 1:
raise ValueError(
"value from previously tested combination is not "
"found in the parameters or found more than "
"once"
)
tested.append(idxs[0])
self.__pairs.add_sequence(tested)
| (self, parameters, filter_func=<function AllPairs.<lambda> at 0x7f625a34cca0>, previously_tested=None, n=2) |
729,675 | allpairspy.allpairs | __next__ | null | def __next__(self):
assert len(self.__pairs) <= self.__max_unique_pairs_expected
if len(self.__pairs) == self.__max_unique_pairs_expected:
# no reasons to search further - all pairs are found
raise StopIteration()
previous_unique_pairs_count = len(self.__pairs)
chosen_item_list = [None] * len(self.__working_item_matrix)
indexes = [None] * len(self.__working_item_matrix)
direction = 1
i = 0
while -1 < i < len(self.__working_item_matrix):
if direction == 1:
# move forward
self.__resort_working_array(chosen_item_list[:i], i)
indexes[i] = 0
elif direction == 0 or direction == -1:
# scan current array or go back
indexes[i] += 1
if indexes[i] >= len(self.__working_item_matrix[i]):
direction = -1
if i == 0:
raise StopIteration()
i += direction
continue
direction = 0
else:
raise ValueError(f"next(): unknown 'direction' code '{direction}'")
chosen_item_list[i] = self.__working_item_matrix[i][indexes[i]]
if self.__filter_func(self.__get_values(chosen_item_list[: i + 1])):
assert direction > -1
direction = 1
else:
direction = 0
i += direction
if len(self.__working_item_matrix) != len(chosen_item_list):
raise StopIteration()
self.__pairs.add_sequence(chosen_item_list)
if len(self.__pairs) == previous_unique_pairs_count:
# could not find new unique pairs - stop
raise StopIteration()
# replace returned array elements with real values and return it
return self.__get_iteration_value(chosen_item_list)
| (self) |
729,679 | scs | SCS | null | class SCS(object):
def __init__(self, data, cone, **settings):
"""Initialize the SCS solver.
@param data Dictionary containing keys `P`, `A`, `b`, `c`.
@param cone Dictionary containing cone information.
@param settings Settings as kwargs, see docs.
"""
self._settings = settings
if not data or not cone:
raise ValueError("Missing data or cone information")
if "b" not in data or "c" not in data:
raise ValueError("Missing one of b, c from data dictionary")
if "A" not in data:
raise ValueError("Missing A from data dictionary")
A = data["A"]
b = data["b"]
c = data["c"]
if A is None or b is None or c is None:
raise ValueError("Incomplete data specification")
if not sparse.issparse(A):
raise TypeError("A is required to be a sparse matrix")
if not sparse.isspmatrix_csc(A):
warn(
"Converting A to a CSC (compressed sparse column) matrix;"
" may take a while."
)
A = A.tocsc()
if sparse.issparse(b):
b = b.todense()
if sparse.issparse(c):
c = c.todense()
m = len(b)
n = len(c)
if not A.has_sorted_indices:
A.sort_indices()
Adata, Aindices, Acolptr = A.data, A.indices, A.indptr
if A.shape != (m, n):
raise ValueError("A shape not compatible with b,c")
Pdata, Pindices, Pcolptr = None, None, None
if "P" in data:
P = data["P"]
if P is not None:
if not sparse.issparse(P):
raise TypeError("P is required to be a sparse matrix")
if P.shape != (n, n):
raise ValueError("P shape not compatible with A,b,c")
if not sparse.isspmatrix_csc(P):
warn(
"Converting P to a CSC (compressed sparse column) "
"matrix; may take a while."
)
P = P.tocsc()
# extract upper triangular component only
if sparse.tril(P, -1).data.size > 0:
P = sparse.triu(P, format="csc")
if not P.has_sorted_indices:
P.sort_indices()
Pdata, Pindices, Pcolptr = P.data, P.indices, P.indptr
# Which scs are we using (scs_direct, scs_indirect, ...)
_scs = _select_scs_module(self._settings)
# Initialize solver
self._solver = _scs.SCS(
(m, n),
Adata,
Aindices,
Acolptr,
Pdata,
Pindices,
Pcolptr,
b,
c,
cone,
**self._settings
)
def solve(self, warm_start=True, x=None, y=None, s=None):
"""Solve the optimization problem.
@param warm_start Whether to warm-start. By default the solution of
the previous problem is used as the warm-start. The
warm-start can be overriden to another value by
passing `x`, `y`, `s` args.
@param x Primal warm-start override.
@param y Dual warm-start override.
@param s Slack warm-start override.
@return dictionary with solution with keys:
'x' - primal solution
's' - primal slack solution
'y' - dual solution
'info' - information dictionary (see docs)
"""
return self._solver.solve(warm_start, x, y, s)
def update(self, b=None, c=None):
"""Update the `b` vector, `c` vector, or both, before another solve.
After a solve we can reuse the SCS workspace in another solve if the
only problem data that has changed are the `b` and `c` vectors.
@param b New `b` vector.
@param c New `c` vector.
"""
self._solver.update(b, c)
| (data, cone, **settings) |
729,680 | scs | __init__ | Initialize the SCS solver.
@param data Dictionary containing keys `P`, `A`, `b`, `c`.
@param cone Dictionary containing cone information.
@param settings Settings as kwargs, see docs.
| def __init__(self, data, cone, **settings):
"""Initialize the SCS solver.
@param data Dictionary containing keys `P`, `A`, `b`, `c`.
@param cone Dictionary containing cone information.
@param settings Settings as kwargs, see docs.
"""
self._settings = settings
if not data or not cone:
raise ValueError("Missing data or cone information")
if "b" not in data or "c" not in data:
raise ValueError("Missing one of b, c from data dictionary")
if "A" not in data:
raise ValueError("Missing A from data dictionary")
A = data["A"]
b = data["b"]
c = data["c"]
if A is None or b is None or c is None:
raise ValueError("Incomplete data specification")
if not sparse.issparse(A):
raise TypeError("A is required to be a sparse matrix")
if not sparse.isspmatrix_csc(A):
warn(
"Converting A to a CSC (compressed sparse column) matrix;"
" may take a while."
)
A = A.tocsc()
if sparse.issparse(b):
b = b.todense()
if sparse.issparse(c):
c = c.todense()
m = len(b)
n = len(c)
if not A.has_sorted_indices:
A.sort_indices()
Adata, Aindices, Acolptr = A.data, A.indices, A.indptr
if A.shape != (m, n):
raise ValueError("A shape not compatible with b,c")
Pdata, Pindices, Pcolptr = None, None, None
if "P" in data:
P = data["P"]
if P is not None:
if not sparse.issparse(P):
raise TypeError("P is required to be a sparse matrix")
if P.shape != (n, n):
raise ValueError("P shape not compatible with A,b,c")
if not sparse.isspmatrix_csc(P):
warn(
"Converting P to a CSC (compressed sparse column) "
"matrix; may take a while."
)
P = P.tocsc()
# extract upper triangular component only
if sparse.tril(P, -1).data.size > 0:
P = sparse.triu(P, format="csc")
if not P.has_sorted_indices:
P.sort_indices()
Pdata, Pindices, Pcolptr = P.data, P.indices, P.indptr
# Which scs are we using (scs_direct, scs_indirect, ...)
_scs = _select_scs_module(self._settings)
# Initialize solver
self._solver = _scs.SCS(
(m, n),
Adata,
Aindices,
Acolptr,
Pdata,
Pindices,
Pcolptr,
b,
c,
cone,
**self._settings
)
| (self, data, cone, **settings) |
729,681 | scs | solve | Solve the optimization problem.
@param warm_start Whether to warm-start. By default the solution of
the previous problem is used as the warm-start. The
warm-start can be overriden to another value by
passing `x`, `y`, `s` args.
@param x Primal warm-start override.
@param y Dual warm-start override.
@param s Slack warm-start override.
@return dictionary with solution with keys:
'x' - primal solution
's' - primal slack solution
'y' - dual solution
'info' - information dictionary (see docs)
| def solve(self, warm_start=True, x=None, y=None, s=None):
"""Solve the optimization problem.
@param warm_start Whether to warm-start. By default the solution of
the previous problem is used as the warm-start. The
warm-start can be overriden to another value by
passing `x`, `y`, `s` args.
@param x Primal warm-start override.
@param y Dual warm-start override.
@param s Slack warm-start override.
@return dictionary with solution with keys:
'x' - primal solution
's' - primal slack solution
'y' - dual solution
'info' - information dictionary (see docs)
"""
return self._solver.solve(warm_start, x, y, s)
| (self, warm_start=True, x=None, y=None, s=None) |
729,682 | scs | update | Update the `b` vector, `c` vector, or both, before another solve.
After a solve we can reuse the SCS workspace in another solve if the
only problem data that has changed are the `b` and `c` vectors.
@param b New `b` vector.
@param c New `c` vector.
| def update(self, b=None, c=None):
"""Update the `b` vector, `c` vector, or both, before another solve.
After a solve we can reuse the SCS workspace in another solve if the
only problem data that has changed are the `b` and `c` vectors.
@param b New `b` vector.
@param c New `c` vector.
"""
self._solver.update(b, c)
| (self, b=None, c=None) |
729,684 | scs | _select_scs_module | null | def _select_scs_module(stgs):
if stgs.pop("gpu", False): # False by default
if not stgs.pop("use_indirect", _USE_INDIRECT_DEFAULT):
raise NotImplementedError(
"GPU direct solver not yet available, pass `use_indirect=True`."
)
import _scs_gpu
return _scs_gpu
if stgs.pop("mkl", False): # False by default
if stgs.pop("use_indirect", False):
raise NotImplementedError(
"MKL indirect solver not yet available, pass `use_indirect=False`."
)
import _scs_mkl
return _scs_mkl
if stgs.pop("use_indirect", _USE_INDIRECT_DEFAULT):
import _scs_indirect
return _scs_indirect
return _scs_direct
| (stgs) |
729,685 | scs | solve | null | def solve(data, cone, **settings):
solver = SCS(data, cone, **settings)
# Hack out the warm start data from old API
x = y = s = None
if "x" in data:
x = data["x"]
if "y" in data:
y = data["y"]
if "s" in data:
s = data["s"]
return solver.solve(warm_start=True, x=x, y=y, s=s)
| (data, cone, **settings) |
729,687 | aiconfig_extension_groq.groq | GroqParser | null | class GroqParser(DefaultOpenAIParser):
def __init__(self, model: str):
super().__init__(model_id = model)
# "Model" field is a custom name for the specific model they want to use
# when registering the Groq model parser. See the cookbook for reference:
# https://github.com/lastmile-ai/aiconfig/blob/main/cookbooks/Groq/aiconfig_model_registry.py#L15
self.model = model
async def deserialize(self, *args, **kwargs):
# Logic doesn't depend on input, pass it forward to the openai model parser deserialize
openai_deserialized_params = await super().deserialize(*args, **kwargs)
openai_deserialized_params["model"] = self.model
return openai_deserialized_params
def initialize_openai_client(self) -> None:
# Initialize Groq Client
self.client = Groq(
api_key=os.getenv("GROQ_API_KEY"),
)
| (model: str) |
729,688 | aiconfig_extension_groq.groq | __init__ | null | def __init__(self, model: str):
super().__init__(model_id = model)
# "Model" field is a custom name for the specific model they want to use
# when registering the Groq model parser. See the cookbook for reference:
# https://github.com/lastmile-ai/aiconfig/blob/main/cookbooks/Groq/aiconfig_model_registry.py#L15
self.model = model
| (self, model: str) |
729,690 | aiconfig.model_parser | get_model_settings |
Extracts the AI model's settings from the configuration. If both prompt and config level settings are defined, merge them with prompt settings taking precedence.
Args:
prompt: The prompt object.
Returns:
dict: The settings of the model used by the prompt.
| def get_model_settings(
self, prompt: Prompt, aiconfig: "AIConfigRuntime"
) -> Dict[str, Any]:
"""
Extracts the AI model's settings from the configuration. If both prompt and config level settings are defined, merge them with prompt settings taking precedence.
Args:
prompt: The prompt object.
Returns:
dict: The settings of the model used by the prompt.
"""
if not prompt:
return aiconfig.get_global_settings(self.id())
# Check if the prompt exists in the config
if (
prompt.name not in aiconfig.prompt_index
or aiconfig.prompt_index[prompt.name] != prompt
):
raise IndexError(f"Prompt '{prompt.name}' not in config.")
model_metadata = prompt.metadata.model if prompt.metadata else None
if model_metadata is None:
# Use Default Model
default_model = aiconfig.get_default_model()
if not default_model:
raise KeyError(
f"No default model specified in AIConfigMetadata, and prompt `{prompt.name}` does not specify a model."
)
return aiconfig.get_global_settings(default_model)
elif isinstance(model_metadata, str):
# Use Global settings
return aiconfig.get_global_settings(model_metadata)
else:
# Merge config and prompt settings with prompt settings taking precedent
model_settings = {}
global_settings = aiconfig.get_global_settings(model_metadata.name)
prompt_settings = (
prompt.metadata.model.settings
if prompt.metadata.model.settings is not None
else {}
)
model_settings.update(global_settings)
model_settings.update(prompt_settings)
return model_settings
| (self, prompt: aiconfig.schema.Prompt, aiconfig: 'AIConfigRuntime') -> Dict[str, Any] |
729,691 | aiconfig.default_parsers.openai | get_output_text | null | def get_output_text(
self,
prompt: Prompt,
aiconfig: "AIConfigRuntime",
output: Optional[Output] = None,
) -> str:
if not output:
output = aiconfig.get_latest_output(prompt)
if not output:
return ""
if output.output_type == "execute_result":
output_data = output.data
if isinstance(output_data, str):
return output_data
if isinstance(output_data, OutputDataWithValue):
if isinstance(output_data.value, str):
return output_data.value
# If we get here that means it must be of kind tool_calls
return output_data.model_dump_json(exclude_none=True, indent=2)
# Doing this to be backwards-compatible with old output format
# where we used to save the ChatCompletionMessage in output.data
if isinstance(output_data, ChatCompletionMessage):
if (
hasattr(output_data, "content")
and output_data.content is not None
):
return output_data.content
elif output_data.function_call is not None:
return str(output_data.function_call)
return ""
| (self, prompt: aiconfig.schema.Prompt, aiconfig: 'AIConfigRuntime', output: Union[aiconfig.schema.ExecuteResult, aiconfig.schema.Error, NoneType] = None) -> str |
729,692 | aiconfig.default_parsers.openai | get_prompt_template |
Returns a template for a prompt.
| def get_prompt_template(
self, prompt: Prompt, aiconfig: "AIConfigRuntime"
) -> str:
"""
Returns a template for a prompt.
"""
if isinstance(prompt.input, str):
return prompt.input
elif isinstance(prompt.input, PromptInput) and isinstance(
prompt.input.data, str
):
return prompt.input.data
else:
message = prompt.input
return message.content or ""
| (self, prompt: aiconfig.schema.Prompt, aiconfig: 'AIConfigRuntime') -> str |
729,693 | aiconfig.default_parsers.openai | id | null | def id(self) -> str:
return self.model_id
| (self) -> str |
729,694 | aiconfig_extension_groq.groq | initialize_openai_client | null | def initialize_openai_client(self) -> None:
# Initialize Groq Client
self.client = Groq(
api_key=os.getenv("GROQ_API_KEY"),
)
| (self) -> NoneType |
729,695 | aiconfig.default_parsers.parameterized_model_parser | resolve_prompt_template |
Resolves a templated string with the provided parameters (applied from the AIConfig as well as passed in params).
Args:
prompt_template (str): The template string to resolve.
prompt (Prompt): The prompt object that the template string belongs to (if any).
ai_config (AIConfigRuntime): The AIConfig that the template string belongs to (if any).
params (dict): Optional parameters resolve the template string with.
Returns:
str: The resolved string.
| def resolve_prompt_template(
prompt_template: str,
prompt: Prompt,
ai_config: "AIConfigRuntime",
params: Optional[JSONObject] = {},
):
"""
Resolves a templated string with the provided parameters (applied from the AIConfig as well as passed in params).
Args:
prompt_template (str): The template string to resolve.
prompt (Prompt): The prompt object that the template string belongs to (if any).
ai_config (AIConfigRuntime): The AIConfig that the template string belongs to (if any).
params (dict): Optional parameters resolve the template string with.
Returns:
str: The resolved string.
"""
return resolve_prompt_string(
prompt, params, ai_config, prompt_template
)
| (prompt_template: str, prompt: aiconfig.schema.Prompt, ai_config: 'AIConfigRuntime', params: Optional[Dict[str, Any]] = {}) |
729,696 | aiconfig.default_parsers.parameterized_model_parser | run | null | @abstractmethod
async def run_inference(self) -> List[Output]:
pass
| (self, prompt: aiconfig.schema.Prompt, aiconfig: aiconfig.schema.AIConfig, options: Optional[aiconfig.model_parser.InferenceOptions] = None, parameters: Dict = {}, run_with_dependencies: Optional[bool] = False) -> List[Union[aiconfig.schema.ExecuteResult, aiconfig.schema.Error]] |
729,697 | aiconfig.model_parser | run_batch |
Concurrently runs inference on multiple parameter sets, one set at a time.
Default implementation for the run_batch method. Model Parsers may choose to override this method if they need to implement custom batch execution logic.
For each dictionary of parameters in `params_list``, the `run` method is invoked. All iterations are separate as we use a deep copy of `aiconfig` in each iteration.
Args:
prompt (Prompt): The prompt for running the inference
aiconfig (AIConfigRuntime): The AIConfig object containing all necessary configurations (prompts and parameters) for running the inference.
parameters_list (list[dict[str, Any]]): A List of dictionaries, where each dictionary is a set of parameters that directly influence the behaviour of inference.
options (InferenceOptions, optional): Options to tune the execution of inference, like setting timeout limits, number of retries, etc.
**kwargs: Additional arguments like metadata or custom configuration that could be used to modify the inference behaviour.
Returns:
list[AIConfigRuntime]: A list of AIConfigRuntime objects. Each object contains the state of the AIConfigRuntime after each run using the corresponding parameter set from params_list.
| @abstractmethod
async def run(
self,
prompt: Prompt,
aiconfig: AIConfig,
options: Optional["InferenceOptions"] = None,
parameters: Dict = {},
run_with_dependencies: Optional[bool] = False,
) -> ExecuteResult:
"""
Execute model inference based on completion data to be constructed in deserialize(), which includes the input prompt and
model-specific inference settings. Saves the response or output in prompt.outputs.
Args:
prompt (Prompt): The prompt to be used for inference.
aiconfig (AIConfig): The AIConfig object containing all prompts and parameters.
options (InferenceOptions, optional): Options that determine how to run inference for the prompt
parameters (dict, optional): Optional parameters to include in the serialization.
Returns:
ExecuteResult: The response generated by the model.
"""
| (self, prompt: aiconfig.schema.Prompt, aiconfig: 'AIConfigRuntime', parameters_list: list[dict[str, typing.Any]], options: Optional[ForwardRef('InferenceOptions')] = None, **kwargs: Any) -> list['AIConfigRuntime'] |
729,698 | aiconfig.default_parsers.openai | run_inference |
Invoked to run a prompt in the .aiconfig. This method should perform
the actual model inference based on the provided prompt and inference settings.
Args:
prompt (str): The input prompt.
inference_settings (dict): Model-specific inference settings.
Returns:
ExecuteResult: The response from the model.
| @abstractmethod
def id(self) -> str:
"""
Returns an identifier for the model (e.g. gpt-3.5, gpt-4, etc.).
"""
return self.id
| (self, prompt: aiconfig.schema.Prompt, aiconfig: 'AIConfigRuntime', options: aiconfig.model_parser.InferenceOptions, parameters) -> List[Union[aiconfig.schema.ExecuteResult, aiconfig.schema.Error]] |
729,699 | aiconfig.default_parsers.parameterized_model_parser | run_with_dependencies |
Executes the AI model with the resolved dependencies and prompt references and returns the API response.
Args:
prompt: The prompt to be used.
aiconfig: The AIConfig object containing all prompts and parameters.
parameters (dict): The resolved parameters to use for inference.
Returns:
ExecuteResult: An Object containing the response from the AI model.
| @abstractmethod
async def run_inference(self) -> List[Output]:
pass
| (self, prompt: aiconfig.schema.Prompt, aiconfig: aiconfig.schema.AIConfig, options=None, parameters: Dict = {}) -> List[Union[aiconfig.schema.ExecuteResult, aiconfig.schema.Error]] |
729,700 | aiconfig.default_parsers.openai | serialize |
Defines how prompts and model inference settings get serialized in the .aiconfig.
Args:
prompt (str): The prompt to be serialized.
inference_settings (dict): Model-specific inference settings to be serialized.
Returns:
str: Serialized representation of the prompt and inference settings.
| @abstractmethod
def id(self) -> str:
"""
Returns an identifier for the model (e.g. gpt-3.5, gpt-4, etc.).
"""
return self.id
| (self, prompt_name: str, data: Dict, ai_config: 'AIConfigRuntime', parameters: Optional[Dict], **kwargs) -> List[aiconfig.schema.Prompt] |
729,707 | dotchain.dot_chain | DotChain | null | class DotChain:
__slots__ = ('__chain__')
def __init__(self, data: t.Any = None,
context: t.Optional[t.Any | list[t.Any]] = [],
parent: t.Optional[Chain] = None,
pipe: t.Optional[bool] = False,
**kwargs):
if 'chain' in kwargs:
self.__chain__ = kwargs.get('chain')
else:
self.__chain__ = Chain(data=data, context=context, parent=parent, pipe=pipe)
@property
def Pipe(self) -> t.Self:
self.__chain__.pipe = True
return self
@property
def Chain(self) -> t.Self:
self.__chain__.pipe = False
return self
def With(self, *contexts: t.Any | list[t.Any], clear: bool = False) -> t.Self:
self.__chain__.set_contexts(*contexts, clear=clear)
return self
def Result(self) -> t.Any:
return self.__chain__.result_sync()
def Call(self, callable: t.Callable) -> DotChain:
attr_chain = GetAttrChain(parent=self.__chain__,
item=callable,
context=self.__chain__.contexts,
pipe=self.__chain__.pipe)
return DotChain(
chain=CallChain(parent=attr_chain,
context=attr_chain.contexts,
pipe=attr_chain.pipe))
def __getattr__(self, item: str) -> DotChain:
# https://github.com/python/cpython/issues/69718#issuecomment-1093697247
if item.startswith('__'):
raise AttributeError(item)
return DotChain(
chain=GetAttrChain(self.__chain__,
item,
context=self.__chain__.contexts,
pipe=self.__chain__.pipe))
def __call__(self, *args, **kwargs) -> DotChain:
return DotChain(
chain=CallChain(self.__chain__,
args=args,
kwargs=kwargs,
context=self.__chain__.contexts,
pipe=self.__chain__.pipe))
def __await__(self):
return self.__chain__.__await__()
def __aiter__(self):
self.__chain__.__aiter__()
return self
def __iter__(self):
self.__chain__.__iter__()
return self
async def __anext__(self):
return await self.__chain__.__anext__()
def __next__(self):
return self.__chain__.__next__()
| (data: 't.Any' = None, context: 't.Optional[t.Any | list[t.Any]]' = [], parent: 't.Optional[Chain]' = None, pipe: 't.Optional[bool]' = False, **kwargs) |
729,708 | dotchain.dot_chain | Call | null | def Call(self, callable: t.Callable) -> DotChain:
attr_chain = GetAttrChain(parent=self.__chain__,
item=callable,
context=self.__chain__.contexts,
pipe=self.__chain__.pipe)
return DotChain(
chain=CallChain(parent=attr_chain,
context=attr_chain.contexts,
pipe=attr_chain.pipe))
| (self, callable: Callable) -> dotchain.dot_chain.DotChain |
729,709 | dotchain.dot_chain | Result | null | def Result(self) -> t.Any:
return self.__chain__.result_sync()
| (self) -> Any |
729,710 | dotchain.dot_chain | With | null | def With(self, *contexts: t.Any | list[t.Any], clear: bool = False) -> t.Self:
self.__chain__.set_contexts(*contexts, clear=clear)
return self
| (self, *contexts: 't.Any | list[t.Any]', clear: 'bool' = False) -> 't.Self' |
729,711 | dotchain.dot_chain | __aiter__ | null | def __aiter__(self):
self.__chain__.__aiter__()
return self
| (self) |
729,712 | dotchain.dot_chain | __anext__ | null | def __iter__(self):
self.__chain__.__iter__()
return self
| (self) |
729,713 | dotchain.dot_chain | __await__ | null | def __await__(self):
return self.__chain__.__await__()
| (self) |
729,714 | dotchain.dot_chain | __call__ | null | def __call__(self, *args, **kwargs) -> DotChain:
return DotChain(
chain=CallChain(self.__chain__,
args=args,
kwargs=kwargs,
context=self.__chain__.contexts,
pipe=self.__chain__.pipe))
| (self, *args, **kwargs) -> dotchain.dot_chain.DotChain |
729,715 | dotchain.dot_chain | __getattr__ | null | def __getattr__(self, item: str) -> DotChain:
# https://github.com/python/cpython/issues/69718#issuecomment-1093697247
if item.startswith('__'):
raise AttributeError(item)
return DotChain(
chain=GetAttrChain(self.__chain__,
item,
context=self.__chain__.contexts,
pipe=self.__chain__.pipe))
| (self, item: str) -> dotchain.dot_chain.DotChain |
729,716 | dotchain.dot_chain | __init__ | null | def __init__(self, data: t.Any = None,
context: t.Optional[t.Any | list[t.Any]] = [],
parent: t.Optional[Chain] = None,
pipe: t.Optional[bool] = False,
**kwargs):
if 'chain' in kwargs:
self.__chain__ = kwargs.get('chain')
else:
self.__chain__ = Chain(data=data, context=context, parent=parent, pipe=pipe)
| (self, data: Optional[Any] = None, context: Union[Any, list[Any], NoneType] = [], parent: Optional[dotchain.chain.Chain] = None, pipe: Optional[bool] = False, **kwargs) |
729,718 | dotchain.dot_chain | __next__ | null | def __next__(self):
return self.__chain__.__next__()
| (self) |
729,721 | addfips.addfips | AddFIPS | Get state or county FIPS codes | class AddFIPS:
"""Get state or county FIPS codes"""
default_county_field = 'county'
default_state_field = 'state'
data = files('addfips')
def __init__(self, vintage=None):
# Handle de-diacreticizing
self.diacretic_pattern = '(' + ('|'.join(DIACRETICS)) + ')'
self.delete_diacretics = lambda x: DIACRETICS[x.group()]
if vintage is None or vintage not in COUNTY_FILES:
vintage = max(COUNTY_FILES.keys())
self._states, self._state_fips = self._load_state_data()
self._counties = self._load_county_data(vintage)
def _load_state_data(self):
with self.data.joinpath(STATES).open('rt', encoding='utf-8') as f:
reader = csv.DictReader(f)
states = {}
state_fips = {}
for row in reader:
states[row['postal'].lower()] = row['fips']
states[row['name'].lower()] = row['fips']
state_fips[row['fips']] = row['fips']
state_fips = frozenset(state_fips)
return states, state_fips
def _load_county_data(self, vintage):
with self.data.joinpath(COUNTY_FILES[vintage]).open('rt', encoding='utf-8') as f:
counties = {}
for row in csv.DictReader(f):
if row['statefp'] not in counties:
counties[row['statefp']] = {}
state = counties[row['statefp']]
# Strip diacretics, remove geography name and add both to dict
county = self._delete_diacretics(row['name'].lower())
bare_county = re.sub(COUNTY_PATTERN, '', county)
state[county] = state[bare_county] = row['countyfp']
# Add both versions of abbreviated names to the dict.
for short, full in ABBREVS.items():
needle, replace = None, None
if county.startswith(short):
needle, replace = short, full
elif county.startswith(full):
needle, replace = full, short
if needle is not None:
replaced = county.replace(needle, replace, 1)
bare_replaced = bare_county.replace(needle, replace, 1)
state[replaced] = state[bare_replaced] = row['countyfp']
return counties
def _delete_diacretics(self, string):
return re.sub(self.diacretic_pattern, self.delete_diacretics, string)
def get_state_fips(self, state):
'''Get FIPS code from a state name or postal code'''
if state is None:
return None
# Check if we already have a FIPS code
if state in self._state_fips:
return state
return self._states.get(state.lower())
def get_county_fips(self, county, state):
"""
Get a county's FIPS code.
:county str County name
:state str Name, postal abbreviation or FIPS code for a state
"""
state_fips = self.get_state_fips(state)
counties = self._counties.get(state_fips, {})
try:
name = self._delete_diacretics(county.lower())
return state_fips + counties.get(name)
except TypeError:
return None
def add_state_fips(self, row, state_field=None):
"""
Add state FIPS to a dictionary.
:row dict/list A dictionary with state and county names
:state_field str name of state name field. default: state
"""
if state_field is None:
state_field = self.default_state_field
fips = self.get_state_fips(row[state_field])
try:
row['fips'] = fips
except TypeError:
row.insert(0, fips)
return row
def add_county_fips(self, row, county_field=None, state_field=None, state=None):
"""
Add county FIPS to a dictionary containing a state name, FIPS code, or using a passed state name or FIPS code.
:row dict/list A dictionary with state and county names
:county_field str county name field. default: county
:state_fips_field str state FIPS field containing state fips
:state_field str state name field. default: county
:state str State name, postal abbreviation or FIPS code to use
"""
if state:
state_fips = self.get_state_fips(state)
else:
state_fips = self.get_state_fips(row[state_field or self.default_state_field])
if county_field is None:
county_field = self.default_county_field
fips = self.get_county_fips(row[county_field], state_fips)
try:
row['fips'] = fips
except TypeError:
row.insert(0, fips)
return row
| (vintage=None) |
729,722 | addfips.addfips | __init__ | null | def __init__(self, vintage=None):
# Handle de-diacreticizing
self.diacretic_pattern = '(' + ('|'.join(DIACRETICS)) + ')'
self.delete_diacretics = lambda x: DIACRETICS[x.group()]
if vintage is None or vintage not in COUNTY_FILES:
vintage = max(COUNTY_FILES.keys())
self._states, self._state_fips = self._load_state_data()
self._counties = self._load_county_data(vintage)
| (self, vintage=None) |
729,723 | addfips.addfips | _delete_diacretics | null | def _delete_diacretics(self, string):
return re.sub(self.diacretic_pattern, self.delete_diacretics, string)
| (self, string) |
729,724 | addfips.addfips | _load_county_data | null | def _load_county_data(self, vintage):
with self.data.joinpath(COUNTY_FILES[vintage]).open('rt', encoding='utf-8') as f:
counties = {}
for row in csv.DictReader(f):
if row['statefp'] not in counties:
counties[row['statefp']] = {}
state = counties[row['statefp']]
# Strip diacretics, remove geography name and add both to dict
county = self._delete_diacretics(row['name'].lower())
bare_county = re.sub(COUNTY_PATTERN, '', county)
state[county] = state[bare_county] = row['countyfp']
# Add both versions of abbreviated names to the dict.
for short, full in ABBREVS.items():
needle, replace = None, None
if county.startswith(short):
needle, replace = short, full
elif county.startswith(full):
needle, replace = full, short
if needle is not None:
replaced = county.replace(needle, replace, 1)
bare_replaced = bare_county.replace(needle, replace, 1)
state[replaced] = state[bare_replaced] = row['countyfp']
return counties
| (self, vintage) |
729,725 | addfips.addfips | _load_state_data | null | def _load_state_data(self):
with self.data.joinpath(STATES).open('rt', encoding='utf-8') as f:
reader = csv.DictReader(f)
states = {}
state_fips = {}
for row in reader:
states[row['postal'].lower()] = row['fips']
states[row['name'].lower()] = row['fips']
state_fips[row['fips']] = row['fips']
state_fips = frozenset(state_fips)
return states, state_fips
| (self) |
729,726 | addfips.addfips | add_county_fips |
Add county FIPS to a dictionary containing a state name, FIPS code, or using a passed state name or FIPS code.
:row dict/list A dictionary with state and county names
:county_field str county name field. default: county
:state_fips_field str state FIPS field containing state fips
:state_field str state name field. default: county
:state str State name, postal abbreviation or FIPS code to use
| def add_county_fips(self, row, county_field=None, state_field=None, state=None):
"""
Add county FIPS to a dictionary containing a state name, FIPS code, or using a passed state name or FIPS code.
:row dict/list A dictionary with state and county names
:county_field str county name field. default: county
:state_fips_field str state FIPS field containing state fips
:state_field str state name field. default: county
:state str State name, postal abbreviation or FIPS code to use
"""
if state:
state_fips = self.get_state_fips(state)
else:
state_fips = self.get_state_fips(row[state_field or self.default_state_field])
if county_field is None:
county_field = self.default_county_field
fips = self.get_county_fips(row[county_field], state_fips)
try:
row['fips'] = fips
except TypeError:
row.insert(0, fips)
return row
| (self, row, county_field=None, state_field=None, state=None) |
729,727 | addfips.addfips | add_state_fips |
Add state FIPS to a dictionary.
:row dict/list A dictionary with state and county names
:state_field str name of state name field. default: state
| def add_state_fips(self, row, state_field=None):
"""
Add state FIPS to a dictionary.
:row dict/list A dictionary with state and county names
:state_field str name of state name field. default: state
"""
if state_field is None:
state_field = self.default_state_field
fips = self.get_state_fips(row[state_field])
try:
row['fips'] = fips
except TypeError:
row.insert(0, fips)
return row
| (self, row, state_field=None) |
729,728 | addfips.addfips | get_county_fips |
Get a county's FIPS code.
:county str County name
:state str Name, postal abbreviation or FIPS code for a state
| def get_county_fips(self, county, state):
"""
Get a county's FIPS code.
:county str County name
:state str Name, postal abbreviation or FIPS code for a state
"""
state_fips = self.get_state_fips(state)
counties = self._counties.get(state_fips, {})
try:
name = self._delete_diacretics(county.lower())
return state_fips + counties.get(name)
except TypeError:
return None
| (self, county, state) |
729,729 | addfips.addfips | get_state_fips | Get FIPS code from a state name or postal code | def get_state_fips(self, state):
'''Get FIPS code from a state name or postal code'''
if state is None:
return None
# Check if we already have a FIPS code
if state in self._state_fips:
return state
return self._states.get(state.lower())
| (self, state) |
729,731 | minidir | Directory | Directory is a interface that can add, remove and iterate files | class Directory(typing.Protocol):
"""Directory is a interface that can add, remove and iterate files"""
def __iter__(self) -> typing.Iterator[Path]:
pass
def create(self, path: Path) -> File:
pass
def remove(self, path: Path) -> None:
pass
def get(self, path: Path) -> File:
pass
| (*args, **kwargs) |
729,733 | minidir | __iter__ | null | def __iter__(self) -> typing.Iterator[Path]:
pass
| (self) -> Iterator[minidir.Path] |
729,735 | minidir | create | null | def create(self, path: Path) -> File:
pass
| (self, path: minidir.Path) -> minidir.File |
729,736 | minidir | get | null | def get(self, path: Path) -> File:
pass
| (self, path: minidir.Path) -> minidir.File |
729,737 | minidir | remove | null | def remove(self, path: Path) -> None:
pass
| (self, path: minidir.Path) -> NoneType |
729,738 | minidir | FakeDirectory | FakeDirectory implements Directory protocol in memory. | class FakeDirectory:
"""FakeDirectory implements Directory protocol in memory."""
_dir: typing.Dict[str, bytes]
def __init__(self) -> None:
self._dir = {}
pass
def __iter__(self) -> typing.Iterator[Path]:
paths: typing.List[Path] = []
for key in self._dir:
paths.append(SomePath(key))
return iter(paths)
def create(self, path: Path) -> File:
if str(path) in self._dir:
raise NameCollision()
self._dir[str(path)] = b""
return _FakeDirectoryFile(self._dir, str(path))
def remove(self, path: Path) -> None:
if str(path) not in self._dir:
raise NotFound()
del self._dir[str(path)]
def get(self, path: Path) -> File:
if str(path) not in self._dir:
raise NotFound()
return _FakeDirectoryFile(self._dir, str(path))
| () -> None |