response
stringlengths 1
33.1k
| instruction
stringlengths 22
582k
|
---|---|
Extract a model from its distributed containers.
Args:
model (`torch.nn.Module`):
The model to extract.
keep_fp32_wrapper (`bool`, *optional*):
Whether to remove mixed precision hooks from the model.
recursive (`bool`, *optional*, defaults to `False`):
Whether to recursively extract all cases of `module.module` from `model` as well as unwrap child sublayers
recursively, not just the top-level distributed containers.
Returns:
`torch.nn.Module`: The extracted model. | def extract_model_from_parallel(model, keep_fp32_wrapper: bool = True, recursive: bool = False):
"""
Extract a model from its distributed containers.
Args:
model (`torch.nn.Module`):
The model to extract.
keep_fp32_wrapper (`bool`, *optional*):
Whether to remove mixed precision hooks from the model.
recursive (`bool`, *optional*, defaults to `False`):
Whether to recursively extract all cases of `module.module` from `model` as well as unwrap child sublayers
recursively, not just the top-level distributed containers.
Returns:
`torch.nn.Module`: The extracted model.
"""
options = (torch.nn.parallel.DistributedDataParallel, torch.nn.DataParallel)
is_compiled = is_compiled_module(model)
if is_compiled:
compiled_model = model
model = model._orig_mod
if is_deepspeed_available():
from deepspeed import DeepSpeedEngine
options += (DeepSpeedEngine,)
if is_torch_version(">=", FSDP_PYTORCH_VERSION) and is_torch_distributed_available():
from torch.distributed.fsdp.fully_sharded_data_parallel import FullyShardedDataParallel as FSDP
options += (FSDP,)
while isinstance(model, options):
model = model.module
if recursive:
# This is needed in cases such as using FSDPv2 on XLA
def _recursive_unwrap(module):
# Wrapped modules are standardly wrapped as `module`, similar to the cases earlier
# with DDP, DataParallel, DeepSpeed, and FSDP
if hasattr(module, "module"):
unwrapped_module = _recursive_unwrap(module.module)
else:
unwrapped_module = module
# Next unwrap child sublayers recursively
for name, child in unwrapped_module.named_children():
setattr(unwrapped_module, name, _recursive_unwrap(child))
return unwrapped_module
# Start with top-level
model = _recursive_unwrap(model)
if not keep_fp32_wrapper:
forward = model.forward
original_forward = model.__dict__.pop("_original_forward", None)
if original_forward is not None:
while hasattr(forward, "__wrapped__"):
forward = forward.__wrapped__
if forward == original_forward:
break
model.forward = MethodType(forward, model)
if getattr(model, "_converted_to_transformer_engine", False):
convert_model(model, to_transformer_engine=False)
if is_compiled:
compiled_model._orig_mod = model
model = compiled_model
return model |
Introduces a blocking point in the script, making sure all processes have reached this point before continuing.
<Tip warning={true}>
Make sure all processes will reach this instruction otherwise one of your processes will hang forever.
</Tip> | def wait_for_everyone():
"""
Introduces a blocking point in the script, making sure all processes have reached this point before continuing.
<Tip warning={true}>
Make sure all processes will reach this instruction otherwise one of your processes will hang forever.
</Tip>
"""
PartialState().wait_for_everyone() |
Cleans the state dictionary from a model and removes tensor aliasing if present.
Args:
state_dict (`dict`):
The state dictionary from a model | def clean_state_dict_for_safetensors(state_dict: dict):
"""
Cleans the state dictionary from a model and removes tensor aliasing if present.
Args:
state_dict (`dict`):
The state dictionary from a model
"""
ptrs = collections.defaultdict(list)
# When bnb serialization is used, weights in state dict can be strings
for name, tensor in state_dict.items():
if not isinstance(tensor, str):
ptrs[id_tensor_storage(tensor)].append(name)
# These are all pointers of tensors with shared memory
shared_ptrs = {ptr: names for ptr, names in ptrs.items() if len(names) > 1}
warn_names = set()
for names in shared_ptrs.values():
# When not all duplicates have been cleaned, we still remove those keys but put a clear warning.
# If the link between tensors was done at runtime then `from_pretrained` will not get
# the key back leading to random tensor. A proper warning will be shown
# during reload (if applicable), but since the file is not necessarily compatible with
# the config, better show a proper warning.
found_names = [name for name in names if name in state_dict]
warn_names.update(found_names[1:])
for name in found_names[1:]:
del state_dict[name]
if len(warn_names) > 0:
logger.warning(
f"Removed shared tensor {warn_names} while saving. This should be OK, but check by verifying that you don't receive any warning while reloading",
)
state_dict = {k: v.contiguous() if isinstance(v, torch.Tensor) else v for k, v in state_dict.items()}
return state_dict |
Save the data to disk. Use in place of `torch.save()`.
Args:
obj:
The data to save
f:
The file (or file-like object) to use to save the data
save_on_each_node (`bool`, *optional*, defaults to `False`):
Whether to only save on the global main process
safe_serialization (`bool`, *optional*, defaults to `False`):
Whether to save `obj` using `safetensors` or the traditional PyTorch way (that uses `pickle`). | def save(obj, f, save_on_each_node: bool = False, safe_serialization: bool = False):
"""
Save the data to disk. Use in place of `torch.save()`.
Args:
obj:
The data to save
f:
The file (or file-like object) to use to save the data
save_on_each_node (`bool`, *optional*, defaults to `False`):
Whether to only save on the global main process
safe_serialization (`bool`, *optional*, defaults to `False`):
Whether to save `obj` using `safetensors` or the traditional PyTorch way (that uses `pickle`).
"""
# When TorchXLA is enabled, it's necessary to transfer all data to the CPU before saving.
# Another issue arises with `id_tensor_storage`, which treats all XLA tensors as identical.
# If tensors remain on XLA, calling `clean_state_dict_for_safetensors` will result in only
# one XLA tensor remaining.
if PartialState().distributed_type == DistributedType.XLA:
obj = xm._maybe_convert_to_cpu(obj)
# Check if it's a model and remove duplicates
if safe_serialization:
save_func = partial(safe_save_file, metadata={"format": "pt"})
if isinstance(obj, OrderedDict):
obj = clean_state_dict_for_safetensors(obj)
else:
save_func = torch.save
if PartialState().is_main_process and not save_on_each_node:
save_func(obj, f)
elif PartialState().is_local_main_process and save_on_each_node:
save_func(obj, f) |
A context manager that will temporarily clear environment variables.
When this context exits, the previous environment variables will be back.
Example:
```python
>>> import os
>>> from accelerate.utils import clear_environment
>>> os.environ["FOO"] = "bar"
>>> with clear_environment():
... print(os.environ)
... os.environ["FOO"] = "new_bar"
... print(os.environ["FOO"])
{}
new_bar
>>> print(os.environ["FOO"])
bar
``` | def clear_environment():
"""
A context manager that will temporarily clear environment variables.
When this context exits, the previous environment variables will be back.
Example:
```python
>>> import os
>>> from accelerate.utils import clear_environment
>>> os.environ["FOO"] = "bar"
>>> with clear_environment():
... print(os.environ)
... os.environ["FOO"] = "new_bar"
... print(os.environ["FOO"])
{}
new_bar
>>> print(os.environ["FOO"])
bar
```
"""
_old_os_environ = os.environ.copy()
os.environ.clear()
try:
yield
finally:
os.environ.clear() # clear any added keys,
os.environ.update(_old_os_environ) |
A context manager that will add each keyword argument passed to `os.environ` and remove them when exiting.
Will convert the values in `kwargs` to strings and upper-case all the keys.
Example:
```python
>>> import os
>>> from accelerate.utils import patch_environment
>>> with patch_environment(FOO="bar"):
... print(os.environ["FOO"]) # prints "bar"
>>> print(os.environ["FOO"]) # raises KeyError
``` | def patch_environment(**kwargs):
"""
A context manager that will add each keyword argument passed to `os.environ` and remove them when exiting.
Will convert the values in `kwargs` to strings and upper-case all the keys.
Example:
```python
>>> import os
>>> from accelerate.utils import patch_environment
>>> with patch_environment(FOO="bar"):
... print(os.environ["FOO"]) # prints "bar"
>>> print(os.environ["FOO"]) # raises KeyError
```
"""
existing_vars = {}
for key, value in kwargs.items():
key = key.upper()
if key in os.environ:
existing_vars[key] = os.environ[key]
os.environ[key] = str(value)
try:
yield
finally:
for key in kwargs:
key = key.upper()
if key in existing_vars:
# restore previous value
os.environ[key] = existing_vars[key]
else:
os.environ.pop(key, None) |
Gets a pretty name from `obj`. | def get_pretty_name(obj):
"""
Gets a pretty name from `obj`.
"""
if not hasattr(obj, "__qualname__") and not hasattr(obj, "__name__"):
obj = getattr(obj, "__class__", obj)
if hasattr(obj, "__qualname__"):
return obj.__qualname__
if hasattr(obj, "__name__"):
return obj.__name__
return str(obj) |
Recursively merges two dictionaries.
Args:
source (`dict`): The dictionary to merge into `destination`.
destination (`dict`): The dictionary to merge `source` into. | def merge_dicts(source, destination):
"""
Recursively merges two dictionaries.
Args:
source (`dict`): The dictionary to merge into `destination`.
destination (`dict`): The dictionary to merge `source` into.
"""
for key, value in source.items():
if isinstance(value, dict):
node = destination.setdefault(key, {})
merge_dicts(value, node)
else:
destination[key] = value
return destination |
Checks if a port is in use on `localhost`. Useful for checking if multiple `accelerate launch` commands have been
run and need to see if the port is already in use. | def is_port_in_use(port: int = None) -> bool:
"""
Checks if a port is in use on `localhost`. Useful for checking if multiple `accelerate launch` commands have been
run and need to see if the port is already in use.
"""
if port is None:
port = 29500
with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s:
return s.connect_ex(("localhost", port)) == 0 |
Converts `size` from bytes to the largest possible unit | def convert_bytes(size):
"Converts `size` from bytes to the largest possible unit"
for x in ["bytes", "KB", "MB", "GB", "TB"]:
if size < 1024.0:
return f"{round(size, 2)} {x}"
size /= 1024.0
return f"{round(size, 2)} PB" |
Warns if the kernel version is below the recommended minimum on Linux. | def check_os_kernel():
"""Warns if the kernel version is below the recommended minimum on Linux."""
# see issue #1929
info = platform.uname()
system = info.system
if system != "Linux":
return
_, version, *_ = re.split(r"(\d+\.\d+\.\d+)", info.release)
min_version = "5.5.0"
if Version(version) < Version(min_version):
msg = (
f"Detected kernel version {version}, which is below the recommended minimum of {min_version}; this can "
"cause the process to hang. It is recommended to upgrade the kernel to the minimum version or higher."
)
logger.warning(msg, main_process_only=True) |
Recursive `getattr`.
Args:
obj:
A class instance holding the attribute.
attr (`str`):
The attribute that is to be retrieved, e.g. 'attribute1.attribute2'. | def recursive_getattr(obj, attr: str):
"""
Recursive `getattr`.
Args:
obj:
A class instance holding the attribute.
attr (`str`):
The attribute that is to be retrieved, e.g. 'attribute1.attribute2'.
"""
def _getattr(obj, attr):
return getattr(obj, attr)
return reduce(_getattr, [obj] + attr.split(".")) |
Helper function for reproducible behavior to set the seed in `random`, `numpy`, `torch`.
Args:
seed (`int`):
The seed to set.
device_specific (`bool`, *optional*, defaults to `False`):
Whether to differ the seed on each device slightly with `self.process_index`.
deterministic (`bool`, *optional*, defaults to `False`):
Whether to use deterministic algorithms where available. Can slow down training. | def set_seed(seed: int, device_specific: bool = False, deterministic: bool = False):
"""
Helper function for reproducible behavior to set the seed in `random`, `numpy`, `torch`.
Args:
seed (`int`):
The seed to set.
device_specific (`bool`, *optional*, defaults to `False`):
Whether to differ the seed on each device slightly with `self.process_index`.
deterministic (`bool`, *optional*, defaults to `False`):
Whether to use deterministic algorithms where available. Can slow down training.
"""
if device_specific:
seed += AcceleratorState().process_index
random.seed(seed)
np.random.seed(seed)
torch.manual_seed(seed)
if is_xpu_available():
torch.xpu.manual_seed_all(seed)
elif is_npu_available():
torch.npu.manual_seed_all(seed)
elif is_mlu_available():
torch.mlu.manual_seed_all(seed)
else:
torch.cuda.manual_seed_all(seed)
# ^^ safe to call this function even if cuda is not available
if is_torch_xla_available():
xm.set_rng_state(seed)
if deterministic:
torch.use_deterministic_algorithms(True) |
Helper function to install appropriate xla wheels based on the `torch` version in Google Colaboratory.
Args:
upgrade (`bool`, *optional*, defaults to `False`):
Whether to upgrade `torch` and install the latest `torch_xla` wheels.
Example:
```python
>>> from accelerate.utils import install_xla
>>> install_xla(upgrade=True)
``` | def install_xla(upgrade: bool = False):
"""
Helper function to install appropriate xla wheels based on the `torch` version in Google Colaboratory.
Args:
upgrade (`bool`, *optional*, defaults to `False`):
Whether to upgrade `torch` and install the latest `torch_xla` wheels.
Example:
```python
>>> from accelerate.utils import install_xla
>>> install_xla(upgrade=True)
```
"""
in_colab = False
if "IPython" in sys.modules:
in_colab = "google.colab" in str(sys.modules["IPython"].get_ipython())
if in_colab:
if upgrade:
torch_install_cmd = ["pip", "install", "-U", "torch"]
subprocess.run(torch_install_cmd, check=True)
# get the current version of torch
torch_version = importlib.metadata.version("torch")
torch_version_trunc = torch_version[: torch_version.rindex(".")]
xla_wheel = f"https://storage.googleapis.com/tpu-pytorch/wheels/colab/torch_xla-{torch_version_trunc}-cp37-cp37m-linux_x86_64.whl"
xla_install_cmd = ["pip", "install", xla_wheel]
subprocess.run(xla_install_cmd, check=True)
else:
raise RuntimeError("`install_xla` utility works only on google colab.") |
Wrapper around `tqdm.tqdm` that optionally displays only on the main process.
Args:
main_process_only (`bool`, *optional*):
Whether to display the progress bar only on the main process | def tqdm(*args, main_process_only: bool = True, **kwargs):
"""
Wrapper around `tqdm.tqdm` that optionally displays only on the main process.
Args:
main_process_only (`bool`, *optional*):
Whether to display the progress bar only on the main process
"""
if not is_tqdm_available():
raise ImportError("Accelerate's `tqdm` module requires `tqdm` to be installed. Please run `pip install tqdm`.")
if len(args) > 0 and isinstance(args[0], bool):
warnings.warn(
f"Passing `{args[0]}` as the first argument to Accelerate's `tqdm` wrapper is deprecated "
"and will be removed in v0.33.0. Please use the `main_process_only` keyword argument instead.",
FutureWarning,
)
main_process_only = args[0]
args = args[1:]
disable = kwargs.pop("disable", False)
if main_process_only and not disable:
disable = PartialState().local_process_index != 0
return _tqdm(*args, **kwargs, disable=disable) |
Recursively converts the linear and layernorm layers of a model to their `transformers_engine` counterpart. | def convert_model(model, to_transformer_engine=True, _convert_linear=True, _convert_ln=True):
"""
Recursively converts the linear and layernorm layers of a model to their `transformers_engine` counterpart.
"""
if not is_fp8_available():
raise ImportError("Using `convert_model` requires transformer_engine to be installed.")
for name, module in model.named_children():
if isinstance(module, nn.Linear) and to_transformer_engine and _convert_linear:
# Return early if the linear layer weights are not multiples of 16
if any(p % 16 != 0 for p in module.weight.shape):
return
has_bias = module.bias is not None
te_module = te.Linear(
module.in_features, module.out_features, bias=has_bias, params_dtype=module.weight.dtype
)
te_module.weight.copy_(module.weight)
if has_bias:
te_module.bias.copy_(module.bias)
setattr(model, name, te_module)
elif isinstance(module, nn.LayerNorm) and to_transformer_engine and _convert_ln:
te_module = te.LayerNorm(module.normalized_shape[0], eps=module.eps, params_dtype=module.weight.dtype)
te_module.weight.copy_(module.weight)
te_module.bias.copy_(module.bias)
setattr(model, name, te_module)
elif isinstance(module, te.Linear) and not to_transformer_engine and _convert_linear:
has_bias = module.bias is not None
new_module = nn.Linear(
module.in_features, module.out_features, bias=has_bias, params_dtype=module.weight.dtype
)
new_module.weight.copy_(module.weight)
if has_bias:
new_module.bias.copy_(module.bias)
setattr(model, name, new_module)
elif isinstance(module, te.LayerNorm) and not to_transformer_engine and _convert_ln:
new_module = nn.LayerNorm(module.normalized_shape[0], eps=module.eps, params_dtype=module.weight.dtype)
new_module.weight.copy_(module.weight)
new_module.bias.copy_(module.bias)
setattr(model, name, new_module)
else:
convert_model(
module,
to_transformer_engine=to_transformer_engine,
_convert_linear=_convert_linear,
_convert_ln=_convert_ln,
) |
Returns whether a given model has some `transformer_engine` layer or not. | def has_transformer_engine_layers(model):
"""
Returns whether a given model has some `transformer_engine` layer or not.
"""
if not is_fp8_available():
raise ImportError("Using `has_transformer_engine_layers` requires transformer_engine to be installed.")
for m in model.modules():
if isinstance(m, (te.LayerNorm, te.Linear, te.TransformerLayer)):
return True
return False |
Compares a library version to some requirement using a given operation.
Args:
library_or_version (`str` or `packaging.version.Version`):
A library name or a version to check.
operation (`str`):
A string representation of an operator, such as `">"` or `"<="`.
requirement_version (`str`):
The version to compare the library version against | def compare_versions(library_or_version: Union[str, Version], operation: str, requirement_version: str):
"""
Compares a library version to some requirement using a given operation.
Args:
library_or_version (`str` or `packaging.version.Version`):
A library name or a version to check.
operation (`str`):
A string representation of an operator, such as `">"` or `"<="`.
requirement_version (`str`):
The version to compare the library version against
"""
if operation not in STR_OPERATION_TO_FUNC.keys():
raise ValueError(f"`operation` must be one of {list(STR_OPERATION_TO_FUNC.keys())}, received {operation}")
operation = STR_OPERATION_TO_FUNC[operation]
if isinstance(library_or_version, str):
library_or_version = parse(importlib.metadata.version(library_or_version))
return operation(library_or_version, parse(requirement_version)) |
Compares the current PyTorch version to a given reference with an operation.
Args:
operation (`str`):
A string representation of an operator, such as `">"` or `"<="`
version (`str`):
A string version of PyTorch | def is_torch_version(operation: str, version: str):
"""
Compares the current PyTorch version to a given reference with an operation.
Args:
operation (`str`):
A string representation of an operator, such as `">"` or `"<="`
version (`str`):
A string version of PyTorch
"""
return compare_versions(torch_version, operation, version) |
With this test, an observed batch size of 64 should result in neglible
differences in the scheduler after going through the correct number of steps.
Uses single, two, and four steps to test. | def accumulation_test(num_processes: int = 2):
"""
With this test, an observed batch size of 64 should result in neglible
differences in the scheduler after going through the correct number of steps.
Uses single, two, and four steps to test.
"""
from transformers import get_linear_schedule_with_warmup
steps = [1, 2, 4]
for num_steps in steps:
plugin = GradientAccumulationPlugin(num_steps=num_steps, adjust_scheduler=num_steps > 1)
accelerator = Accelerator(gradient_accumulation_plugin=plugin)
model = torch.nn.Linear(2, 4)
optimizer = torch.optim.AdamW(model.parameters(), lr=10.0)
scheduler = get_linear_schedule_with_warmup(optimizer=optimizer, num_warmup_steps=0, num_training_steps=20)
model, optimizer, scheduler = accelerator.prepare(model, optimizer, scheduler)
for i in range(10 * num_steps):
with accelerator.accumulate(model):
optimizer.step()
scheduler.step()
if i == (10 * num_steps - 2):
assert (
scheduler.get_last_lr()[0] != 0
), f"Wrong lr found at second-to-last step, expected non-zero, got {scheduler.get_last_lr()[0]}. num_steps: {num_steps}"
assert (
scheduler.get_last_lr()[0] == 0
), f"Wrong lr found at last step, expected 0, got {scheduler.get_last_lr()[0]}"
GradientState._reset_state() |
Generates a tuple of dummy DataLoaders to test with | def dummy_dataloaders(a=2, b=3, batch_size=16, n_train_batches: int = 10, n_valid_batches: int = 2):
"Generates a tuple of dummy DataLoaders to test with"
def get_dataset(n_batches):
x = torch.randn(batch_size * n_batches, 1)
return TensorDataset(x, a * x + b + 0.1 * torch.randn(batch_size * n_batches, 1))
train_dataset = get_dataset(n_train_batches)
valid_dataset = get_dataset(n_valid_batches)
train_dataloader = DataLoader(train_dataset, shuffle=True, batch_size=batch_size, num_workers=4)
valid_dataloader = DataLoader(valid_dataset, shuffle=False, batch_size=batch_size, num_workers=4)
return (train_dataloader, valid_dataloader) |
Trains for `num_epochs` | def train(num_epochs, model, dataloader, optimizer, accelerator, scheduler=None):
"Trains for `num_epochs`"
rands = []
for epoch in range(num_epochs):
# Train quickly
model.train()
for batch in dataloader:
x, y = batch
outputs = model(x)
loss = torch.nn.functional.mse_loss(outputs, y)
accelerator.backward(loss)
optimizer.step()
optimizer.zero_grad()
rands.append(random.random()) # Introduce some randomness
if scheduler is not None:
scheduler.step()
return rands |
Helper function parsing the command line options
@retval ArgumentParser | def parse_args():
"""
Helper function parsing the command line options
@retval ArgumentParser
"""
parser = ArgumentParser(
description=(
"PyTorch TPU distributed training launch "
"helper utility that will spawn up "
"multiple distributed processes"
)
)
# Optional arguments for the launch helper
parser.add_argument("--num_cores", type=int, default=1, help="Number of TPU cores to use (1 or 8).")
# positional
parser.add_argument(
"training_script",
type=str,
help=(
"The full path to the single TPU training "
"program/script to be launched in parallel, "
"followed by all the arguments for the "
"training script"
),
)
# rest from the training program
parser.add_argument("training_script_args", nargs=REMAINDER)
return parser.parse_args() |
Retrieve a from a url as a string. | def r(url, headers=ADOBE_REQ_HEADERS):
"""Retrieve a from a url as a string."""
req = session.get(url, headers=headers)
req.encoding = 'utf-8'
return req.text |
First stage of parsing the XML. | def get_products_xml(adobeurl):
"""First stage of parsing the XML."""
print('Source URL is: ' + adobeurl)
return ET.fromstring(r(adobeurl)) |
2nd stage of parsing the XML. | def parse_products_xml(products_xml, urlVersion, allowedPlatforms):
"""2nd stage of parsing the XML."""
if urlVersion == 6:
prefix = 'channels/'
else:
prefix = ''
cdn = products_xml.find(prefix + 'channel/cdn/secure').text
products = {}
parent_map = {c: p for p in products_xml.iter() for c in p}
for p in products_xml.findall(prefix + 'channel/products/product'):
sap = p.get('id')
hidden = parent_map[parent_map[p]].get('name') != 'ccm'
displayName = p.find('displayName').text
productVersion = p.get('version')
if not products.get(sap):
products[sap] = {
'hidden': hidden,
'displayName': displayName,
'sapCode': sap,
'versions': OrderedDict()
}
for pf in p.findall('platforms/platform'):
baseVersion = pf.find('languageSet').get('baseVersion')
buildGuid = pf.find('languageSet').get('buildGuid')
appplatform = pf.get('id')
dependencies = list(pf.findall('languageSet/dependencies/dependency'))
if productVersion in products[sap]['versions']:
if products[sap]['versions'][productVersion]['apPlatform'] in allowedPlatforms:
break # There's no single-arch binary if macuniversal is available
if sap == 'APRO':
baseVersion = productVersion
if urlVersion == 4 or urlVersion == 5:
productVersion = pf.find('languageSet/nglLicensingInfo/appVersion').text
if urlVersion == 6:
for b in products_xml.findall('builds/build'):
if b.get("id") == sap and b.get("version") == baseVersion:
productVersion = b.find('nglLicensingInfo/appVersion').text
break
buildGuid = pf.find('languageSet/urls/manifestURL').text
# This is actually manifest URL
products[sap]['versions'][productVersion] = {
'sapCode': sap,
'baseVersion': baseVersion,
'productVersion': productVersion,
'apPlatform': appplatform,
'dependencies': [{
'sapCode': d.find('sapCode').text, 'version': d.find('baseVersion').text
} for d in dependencies],
'buildGuid': buildGuid
}
return products, cdn |
Question prompt default Y. | def questiony(question: str) -> bool:
"""Question prompt default Y."""
reply = None
while reply not in ("", "y", "n"):
reply = input(f"{question} (Y/n): ").lower()
return (reply in ("", "y")) |
Question prompt default N. | def questionn(question: str) -> bool:
"""Question prompt default N."""
reply = None
while reply not in ("", "y", "n"):
reply = input(f"{question} (y/N): ").lower()
return (reply in ("y", "Y")) |
Retrieve JSON. | def get_application_json(buildGuid):
"""Retrieve JSON."""
headers = ADOBE_REQ_HEADERS.copy()
headers['x-adobe-build-guid'] = buildGuid
return json.loads(r(ADOBE_APPLICATION_JSON_URL, headers)) |
Ask for desired download folder | def get_download_path():
"""Ask for desired download folder"""
if (args.destination):
print('\nUsing provided destination: ' + args.destination)
dest = args.destination
else:
print('\nPlease navigate to the desired downloads folder, or cancel to abort.')
p = Popen(['/usr/bin/osascript', '-e',
'tell application (path to frontmost application as text)\nset _path to choose folder\nPOSIX path of _path\nend'], stdout=PIPE)
dest = p.communicate()[0].decode('utf-8').strip()
if (p.returncode != 0):
print('Exiting...')
exit()
return dest |
Download a file | def download_file(url, product_dir, s, v, name=None):
"""Download a file"""
if not name:
name = url.split('/')[-1].split('?')[0]
print('Url is: ' + url)
print('[{}_{}] Downloading {}'.format(s, v, name))
file_path = os.path.join(product_dir, name)
response = session.head(url, stream=True, headers=ADOBE_DL_HEADERS)
total_size_in_bytes = int(
response.headers.get('content-length', 0))
if (args.skipExisting and os.path.isfile(file_path) and os.path.getsize(file_path) == total_size_in_bytes):
print('[{}_{}] {} already exists, skipping'.format(s, v, name))
else:
response = session.get(
url, stream=True, headers=ADOBE_REQ_HEADERS)
total_size_in_bytes = int(
response.headers.get('content-length', 0))
block_size = 1024 # 1 Kibibyte
progress_bar = tqdm(total=total_size_in_bytes,
unit='iB', unit_scale=True)
with open(file_path, 'wb') as file:
for data in response.iter_content(block_size):
progress_bar.update(len(data))
file.write(data)
progress_bar.close()
if total_size_in_bytes != 0 and progress_bar.n != total_size_in_bytes:
print("ERROR, something went wrong") |
Download APRO | def download_APRO(appInfo, cdn):
"""Download APRO"""
manifest = get_products_xml(cdn + appInfo['buildGuid'])
downloadURL = manifest.find('asset_list/asset/asset_path').text
dest = get_download_path()
sapCode = appInfo['sapCode']
version = appInfo['productVersion']
name = 'Intall {}_{}_{}.dmg'.format(sapCode, version, appInfo['apPlatform'])
print('')
print('sapCode: ' + sapCode)
print('version: ' + version)
print('installLanguage: ' + 'ALL')
print('dest: ' + os.path.join(dest, name))
print('\nDownloading...\n')
print('[{}_{}] Selected 1 package'.format(sapCode, version))
download_file(downloadURL, dest, sapCode, version, name)
print('\nInstaller successfully downloaded. Open ' + os.path.join(dest, name) + ' and run Acrobat/Acrobat DC Installer.pkg to install.')
return |
Run Main exicution. | def run_ccdl(products, cdn, sapCodes, allowedPlatforms):
"""Run Main exicution."""
sapCode = args.sapCode
if not sapCode:
for s, d in sapCodes.items():
print('[{}]{}{}'.format(s, (10 - len(s)) * ' ', d))
while sapCode is None:
val = input(
'\nPlease enter the SAP Code of the desired product (eg. PHSP for Photoshop): ').upper() or 'PHSP'
if products.get(val):
sapCode = val
else:
print(
'{} is not a valid SAP Code. Please use a value from the list above.'.format(val))
product = products.get(sapCode)
versions = product['versions']
version = None
if (args.version):
if versions.get(args.version):
print('\nUsing provided version: ' + args.version)
version = args.version
else:
print('\nProvided version not found: ' + args.version)
print('')
if not version:
lastv = None
for v in reversed(versions.values()):
if v['buildGuid'] and v['apPlatform'] in allowedPlatforms:
print('{} Platform: {} - {}'.format(product['displayName'], v['apPlatform'], v['productVersion']))
lastv = v['productVersion']
while version is None:
val = input('\nPlease enter the desired version. Nothing for ' + lastv + ': ') or lastv
if versions.get(val):
version = val
else:
print('{} is not a valid version. Please use a value from the list above.'.format(val))
print('')
if sapCode == 'APRO':
download_APRO(versions[version], cdn)
return
# TODO: Parase languages in the xml
langs = ['en_US', 'en_GB', 'en_IL', 'en_AE', 'es_ES', 'es_MX', 'pt_BR', 'fr_FR', 'fr_CA', 'fr_MA', 'it_IT', 'de_DE', 'nl_NL',
'ru_RU', 'uk_UA', 'zh_TW', 'zh_CN', 'ja_JP', 'ko_KR', 'pl_PL', 'hu_HU', 'cs_CZ', 'tr_TR', 'sv_SE', 'nb_NO', 'fi_FI', 'da_DK', 'ALL']
# Detecting Current set default Os language. Fixed.
deflocal = locale.getlocale()[0]
if not deflocal:
deflocal = 'en_US'
oslang = None
if args.osLanguage:
oslang = args.osLanguage
elif deflocal:
oslang = deflocal
if oslang in langs:
deflang = oslang
else:
deflang = 'en_US'
installLanguage = None
if args.installLanguage:
if args.installLanguage in langs:
print('\nUsing provided language: ' + args.installLanguage)
installLanguage = args.installLanguage
else:
print('\nProvided language not available: ' + args.installLanguage)
if not installLanguage:
print('Available languages: {}'.format(', '.join(langs)))
while installLanguage is None:
val = input(
f'\nPlease enter the desired install language, or nothing for [{deflang}]: ') or deflang
if len(val) == 5:
val = val[0:2].lower() + val[2] + val[3:5].upper()
elif len(val) == 3:
val = val.upper()
if val in langs:
installLanguage = val
else:
print(
'{} is not available. Please use a value from the list above.'.format(val))
if oslang != installLanguage:
if installLanguage != 'ALL':
while oslang not in langs:
print('Could not detect your default Language for MacOS.')
oslang = input(
f'\nPlease enter the your OS Language, or nothing for [{installLanguage}]: ') or installLanguage
if oslang not in langs:
print(
'{} is not available. Please use a value from the list above.'.format(oslang))
dest = get_download_path()
print('')
prodInfo = versions[version]
prods_to_download = []
dependencies = prodInfo['dependencies']
for d in dependencies:
firstArch = firstGuid = buildGuid = None
for p in products[d['sapCode']]['versions']:
if products[d['sapCode']]['versions'][p]['baseVersion'] == d['version']:
if not firstGuid:
firstGuid = products[d['sapCode']]['versions'][p]['buildGuid']
firstArch = products[d['sapCode']]['versions'][p]['apPlatform']
if products[d['sapCode']]['versions'][p]['apPlatform'] in allowedPlatforms:
buildGuid = products[d['sapCode']]['versions'][p]['buildGuid']
break
if not buildGuid:
buildGuid = firstGuid
prods_to_download.append({'sapCode': d['sapCode'], 'version': d['version'],
'buildGuid': buildGuid})
prods_to_download.insert(
0, {'sapCode': prodInfo['sapCode'], 'version': prodInfo['productVersion'], 'buildGuid': prodInfo['buildGuid']})
apPlatform = prodInfo['apPlatform']
install_app_name = 'Install {}_{}-{}-{}.app'.format(
sapCode, version, installLanguage, apPlatform)
install_app_path = os.path.join(dest, install_app_name)
print('sapCode: ' + sapCode)
print('version: ' + version)
print('installLanguage: ' + installLanguage)
print('dest: ' + install_app_path)
print(prods_to_download)
print('\nCreating {}'.format(install_app_name))
with Popen(['/usr/bin/osacompile', '-l', 'JavaScript', '-o', os.path.join(dest, install_app_path)], stdin=PIPE) as p:
p.communicate(INSTALL_APP_APPLE_SCRIPT.encode('utf-8'))
if os.path.isfile(ADOBE_CC_MAC_ICON_PATH):
icon_path = ADOBE_CC_MAC_ICON_PATH
else:
icon_path = MAC_VOLUME_ICON_PATH
shutil.copyfile(icon_path, os.path.join(install_app_path,
'Contents', 'Resources', 'applet.icns'))
products_dir = os.path.join(
install_app_path, 'Contents', 'Resources', 'products')
print('\nPreparing...\n')
for p in prods_to_download:
s, v = p['sapCode'], p['version']
product_dir = os.path.join(products_dir, s)
app_json_path = os.path.join(product_dir, 'application.json')
print('[{}_{}] Downloading application.json'.format(s, v))
app_json = get_application_json(p['buildGuid'])
p['application_json'] = app_json
print('[{}_{}] Creating folder for product'.format(s, v))
os.makedirs(product_dir, exist_ok=True)
print('[{}_{}] Saving application.json'.format(s, v))
with open(app_json_path, 'w') as file:
json.dump(app_json, file, separators=(',', ':'))
print('')
print('Downloading...\n')
for p in prods_to_download:
s, v = p['sapCode'], p['version']
app_json = p['application_json']
product_dir = os.path.join(products_dir, s)
print('[{}_{}] Parsing available packages'.format(s, v))
core_pkg_count = 0
noncore_pkg_count = 0
packages = app_json['Packages']['Package']
download_urls = []
for pkg in packages:
if pkg.get('Type') and pkg['Type'] == 'core':
core_pkg_count += 1
download_urls.append(cdn + pkg['Path'])
else:
# TODO: actually parse `Condition` and check it properly (and maybe look for & add support for conditions other than installLanguage)
if installLanguage == "ALL":
noncore_pkg_count += 1
download_urls.append(cdn + pkg['Path'])
else:
if (not pkg.get('Condition')) or installLanguage in pkg['Condition'] or oslang in pkg['Condition']:
noncore_pkg_count += 1
download_urls.append(cdn + pkg['Path'])
print('[{}_{}] Selected {} core packages and {} non-core packages'.format(s,
v, core_pkg_count, noncore_pkg_count))
for url in download_urls:
download_file(url, product_dir, s, v)
print('\nGenerating driver.xml')
driver = DRIVER_XML.format(
name=product['displayName'],
sapCode=prodInfo['sapCode'],
version=prodInfo['productVersion'],
installPlatform=apPlatform,
dependencies='\n'.join([DRIVER_XML_DEPENDENCY.format(
sapCode=d['sapCode'],
version=d['version']
) for d in prodInfo['dependencies']]),
language=installLanguage
)
with open(os.path.join(products_dir, 'driver.xml'), 'w') as f:
f.write(driver)
f.close()
print('\nPackage successfully created. Run {} to install.'.format(install_app_path))
return |
Constructs and sends a request.
Returns response object.
method - HTTP method
url - request url
params - (optional) Dictionary or bytes to be sent in the query
string of the new request
data - (optional) Dictionary, bytes, or file-like object to
send in the body of the request
json - (optional) Any json compatible python object
headers - (optional) Dictionary of HTTP Headers to send with
the request
cookies - (optional) Dict object to send with the request
auth - (optional) BasicAuth named tuple represent HTTP Basic Auth
auth - aiohttp.helpers.BasicAuth
allow_redirects - (optional) If set to False, do not follow
redirects
version - Request HTTP version.
compress - Set to True if request has to be compressed
with deflate encoding.
chunked - Set to chunk size for chunked transfer encoding.
expect100 - Expect 100-continue response from server.
connector - BaseConnector sub-class instance to support
connection pooling.
read_until_eof - Read response until eof if response
does not have Content-Length header.
loop - Optional event loop.
timeout - Optional ClientTimeout settings structure, 5min
total timeout by default.
Usage::
>>> import aiohttp
>>> async with aiohttp.request('GET', 'http://python.org/') as resp:
... print(resp)
... data = await resp.read()
<ClientResponse(https://www.python.org/) [200 OK]> | def request(
method: str,
url: StrOrURL,
*,
params: Optional[Mapping[str, str]] = None,
data: Any = None,
json: Any = None,
headers: Optional[LooseHeaders] = None,
skip_auto_headers: Optional[Iterable[str]] = None,
auth: Optional[BasicAuth] = None,
allow_redirects: bool = True,
max_redirects: int = 10,
compress: Optional[str] = None,
chunked: Optional[bool] = None,
expect100: bool = False,
raise_for_status: Optional[bool] = None,
read_until_eof: bool = True,
proxy: Optional[StrOrURL] = None,
proxy_auth: Optional[BasicAuth] = None,
timeout: Union[ClientTimeout, _SENTINEL] = sentinel,
cookies: Optional[LooseCookies] = None,
version: HttpVersion = http.HttpVersion11,
connector: Optional[BaseConnector] = None,
read_bufsize: Optional[int] = None,
max_line_size: int = 8190,
max_field_size: int = 8190,
) -> _SessionRequestContextManager:
"""Constructs and sends a request.
Returns response object.
method - HTTP method
url - request url
params - (optional) Dictionary or bytes to be sent in the query
string of the new request
data - (optional) Dictionary, bytes, or file-like object to
send in the body of the request
json - (optional) Any json compatible python object
headers - (optional) Dictionary of HTTP Headers to send with
the request
cookies - (optional) Dict object to send with the request
auth - (optional) BasicAuth named tuple represent HTTP Basic Auth
auth - aiohttp.helpers.BasicAuth
allow_redirects - (optional) If set to False, do not follow
redirects
version - Request HTTP version.
compress - Set to True if request has to be compressed
with deflate encoding.
chunked - Set to chunk size for chunked transfer encoding.
expect100 - Expect 100-continue response from server.
connector - BaseConnector sub-class instance to support
connection pooling.
read_until_eof - Read response until eof if response
does not have Content-Length header.
loop - Optional event loop.
timeout - Optional ClientTimeout settings structure, 5min
total timeout by default.
Usage::
>>> import aiohttp
>>> async with aiohttp.request('GET', 'http://python.org/') as resp:
... print(resp)
... data = await resp.read()
<ClientResponse(https://www.python.org/) [200 OK]>
"""
connector_owner = False
if connector is None:
connector_owner = True
connector = TCPConnector(force_close=True)
session = ClientSession(
cookies=cookies,
version=version,
timeout=timeout,
connector=connector,
connector_owner=connector_owner,
)
return _SessionRequestContextManager(
session._request(
method,
url,
params=params,
data=data,
json=json,
headers=headers,
skip_auto_headers=skip_auto_headers,
auth=auth,
allow_redirects=allow_redirects,
max_redirects=max_redirects,
compress=compress,
chunked=chunked,
expect100=expect100,
raise_for_status=raise_for_status,
read_until_eof=read_until_eof,
proxy=proxy,
proxy_auth=proxy_auth,
read_bufsize=read_bufsize,
max_line_size=max_line_size,
max_field_size=max_field_size,
),
session,
) |
Load netrc from file.
Attempt to load it from the path specified by the env-var
NETRC or in the default location in the user's home directory.
Returns None if it couldn't be found or fails to parse. | def netrc_from_env() -> Optional[netrc.netrc]:
"""Load netrc from file.
Attempt to load it from the path specified by the env-var
NETRC or in the default location in the user's home directory.
Returns None if it couldn't be found or fails to parse.
"""
netrc_env = os.environ.get("NETRC")
if netrc_env is not None:
netrc_path = Path(netrc_env)
else:
try:
home_dir = Path.home()
except RuntimeError as e: # pragma: no cover
# if pathlib can't resolve home, it may raise a RuntimeError
client_logger.debug(
"Could not resolve home directory when "
"trying to look for .netrc file: %s",
e,
)
return None
netrc_path = home_dir / (
"_netrc" if platform.system() == "Windows" else ".netrc"
)
try:
return netrc.netrc(str(netrc_path))
except netrc.NetrcParseError as e:
client_logger.warning("Could not parse .netrc file: %s", e)
except OSError as e:
netrc_exists = False
with contextlib.suppress(OSError):
netrc_exists = netrc_path.is_file()
# we couldn't read the file (doesn't exist, permissions, etc.)
if netrc_env or netrc_exists:
# only warn if the environment wanted us to load it,
# or it appears like the default file does actually exist
client_logger.warning("Could not read .netrc file: %s", e)
return None |
Return :py:class:`~aiohttp.BasicAuth` credentials for ``host`` from ``netrc_obj``.
:raises LookupError: if ``netrc_obj`` is :py:data:`None` or if no
entry is found for the ``host``. | def basicauth_from_netrc(netrc_obj: Optional[netrc.netrc], host: str) -> BasicAuth:
"""
Return :py:class:`~aiohttp.BasicAuth` credentials for ``host`` from ``netrc_obj``.
:raises LookupError: if ``netrc_obj`` is :py:data:`None` or if no
entry is found for the ``host``.
"""
if netrc_obj is None:
raise LookupError("No .netrc file found")
auth_from_netrc = netrc_obj.authenticators(host)
if auth_from_netrc is None:
raise LookupError(f"No entry for {host!s} found in the `.netrc` file.")
login, account, password = auth_from_netrc
# TODO(PY311): username = login or account
# Up to python 3.10, account could be None if not specified,
# and login will be empty string if not specified. From 3.11,
# login and account will be empty string if not specified.
username = login if (login or account is None) else account
# TODO(PY311): Remove this, as password will be empty string
# if not specified
if password is None:
password = ""
return BasicAuth(username, password) |
Get a permitted proxy for the given URL from the env. | def get_env_proxy_for_url(url: URL) -> Tuple[URL, Optional[BasicAuth]]:
"""Get a permitted proxy for the given URL from the env."""
if url.host is not None and proxy_bypass(url.host):
raise LookupError(f"Proxying is disallowed for `{url.host!r}`")
proxies_in_env = proxies_from_env()
try:
proxy_info = proxies_in_env[url.scheme]
except KeyError:
raise LookupError(f"No proxies found for `{url!s}` in the env")
else:
return proxy_info.proxy, proxy_info.proxy_auth |
Parses a MIME type into its components.
mimetype is a MIME type string.
Returns a MimeType object.
Example:
>>> parse_mimetype('text/html; charset=utf-8')
MimeType(type='text', subtype='html', suffix='',
parameters={'charset': 'utf-8'}) | def parse_mimetype(mimetype: str) -> MimeType:
"""Parses a MIME type into its components.
mimetype is a MIME type string.
Returns a MimeType object.
Example:
>>> parse_mimetype('text/html; charset=utf-8')
MimeType(type='text', subtype='html', suffix='',
parameters={'charset': 'utf-8'})
"""
if not mimetype:
return MimeType(
type="", subtype="", suffix="", parameters=MultiDictProxy(MultiDict())
)
parts = mimetype.split(";")
params: MultiDict[str] = MultiDict()
for item in parts[1:]:
if not item:
continue
key, _, value = item.partition("=")
params.add(key.lower().strip(), value.strip(' "'))
fulltype = parts[0].strip().lower()
if fulltype == "*":
fulltype = "*/*"
mtype, _, stype = fulltype.partition("/")
stype, _, suffix = stype.partition("+")
return MimeType(
type=mtype, subtype=stype, suffix=suffix, parameters=MultiDictProxy(params)
) |
Return 7-bit content as quoted-string.
Format content into a quoted-string as defined in RFC5322 for
Internet Message Format. Notice that this is not the 8-bit HTTP
format, but the 7-bit email format. Content must be in usascii or
a ValueError is raised. | def quoted_string(content: str) -> str:
"""Return 7-bit content as quoted-string.
Format content into a quoted-string as defined in RFC5322 for
Internet Message Format. Notice that this is not the 8-bit HTTP
format, but the 7-bit email format. Content must be in usascii or
a ValueError is raised.
"""
if not (QCONTENT > set(content)):
raise ValueError(f"bad content for quoted-string {content!r}")
return not_qtext_re.sub(lambda x: "\\" + x.group(0), content) |
Sets ``Content-Disposition`` header for MIME.
This is the MIME payload Content-Disposition header from RFC 2183
and RFC 7579 section 4.2, not the HTTP Content-Disposition from
RFC 6266.
disptype is a disposition type: inline, attachment, form-data.
Should be valid extension token (see RFC 2183)
quote_fields performs value quoting to 7-bit MIME headers
according to RFC 7578. Set to quote_fields to False if recipient
can take 8-bit file names and field values.
_charset specifies the charset to use when quote_fields is True.
params is a dict with disposition params. | def content_disposition_header(
disptype: str, quote_fields: bool = True, _charset: str = "utf-8", **params: str
) -> str:
"""Sets ``Content-Disposition`` header for MIME.
This is the MIME payload Content-Disposition header from RFC 2183
and RFC 7579 section 4.2, not the HTTP Content-Disposition from
RFC 6266.
disptype is a disposition type: inline, attachment, form-data.
Should be valid extension token (see RFC 2183)
quote_fields performs value quoting to 7-bit MIME headers
according to RFC 7578. Set to quote_fields to False if recipient
can take 8-bit file names and field values.
_charset specifies the charset to use when quote_fields is True.
params is a dict with disposition params.
"""
if not disptype or not (TOKEN > set(disptype)):
raise ValueError("bad content disposition type {!r}" "".format(disptype))
value = disptype
if params:
lparams = []
for key, val in params.items():
if not key or not (TOKEN > set(key)):
raise ValueError(
"bad content disposition parameter" " {!r}={!r}".format(key, val)
)
if quote_fields:
if key.lower() == "filename":
qval = quote(val, "", encoding=_charset)
lparams.append((key, '"%s"' % qval))
else:
try:
qval = quoted_string(val)
except ValueError:
qval = "".join(
(_charset, "''", quote(val, "", encoding=_charset))
)
lparams.append((key + "*", qval))
else:
lparams.append((key, '"%s"' % qval))
else:
qval = val.replace("\\", "\\\\").replace('"', '\\"')
lparams.append((key, '"%s"' % qval))
sparams = "; ".join("=".join(pair) for pair in lparams)
value = "; ".join((value, sparams))
return value |
Checks if received content type is processable as an expected one.
Both arguments should be given without parameters. | def is_expected_content_type(
response_content_type: str, expected_content_type: str
) -> bool:
"""Checks if received content type is processable as an expected one.
Both arguments should be given without parameters.
"""
if expected_content_type == "application/json":
return json_re.match(response_content_type) is not None
return expected_content_type in response_content_type |
Set future exception.
If the future is marked as complete, this function is a no-op.
:param exc_cause: An exception that is a direct cause of ``exc``.
Only set if provided. | def set_exception(
fut: "asyncio.Future[_T] | ErrorableProtocol",
exc: BaseException,
exc_cause: BaseException = _EXC_SENTINEL,
) -> None:
"""Set future exception.
If the future is marked as complete, this function is a no-op.
:param exc_cause: An exception that is a direct cause of ``exc``.
Only set if provided.
"""
if asyncio.isfuture(fut) and fut.done():
return
exc_is_sentinel = exc_cause is _EXC_SENTINEL
exc_causes_itself = exc is exc_cause
if not exc_is_sentinel and not exc_causes_itself:
exc.__cause__ = exc_cause
fut.set_exception(exc) |
Process a date string, return a datetime object | def parse_http_date(date_str: Optional[str]) -> Optional[datetime.datetime]:
"""Process a date string, return a datetime object"""
if date_str is not None:
timetuple = parsedate(date_str)
if timetuple is not None:
with suppress(ValueError):
return datetime.datetime(*timetuple[:6], tzinfo=datetime.timezone.utc)
return None |
Check if a request must return an empty body. | def must_be_empty_body(method: str, code: int) -> bool:
"""Check if a request must return an empty body."""
return (
status_code_must_be_empty_body(code)
or method_must_be_empty_body(method)
or (200 <= code < 300 and method.upper() == hdrs.METH_CONNECT)
) |
Check if a method must return an empty body. | def method_must_be_empty_body(method: str) -> bool:
"""Check if a method must return an empty body."""
# https://datatracker.ietf.org/doc/html/rfc9112#section-6.3-2.1
# https://datatracker.ietf.org/doc/html/rfc9112#section-6.3-2.2
return method.upper() == hdrs.METH_HEAD |
Check if a status code must return an empty body. | def status_code_must_be_empty_body(code: int) -> bool:
"""Check if a status code must return an empty body."""
# https://datatracker.ietf.org/doc/html/rfc9112#section-6.3-2.1
return code in {204, 304} or 100 <= code < 200 |
Check if a Content-Length header should be removed.
This should always be a subset of must_be_empty_body | def should_remove_content_length(method: str, code: int) -> bool:
"""Check if a Content-Length header should be removed.
This should always be a subset of must_be_empty_body
"""
# https://www.rfc-editor.org/rfc/rfc9110.html#section-8.6-8
# https://www.rfc-editor.org/rfc/rfc9110.html#section-15.4.5-4
return (
code in {204, 304}
or 100 <= code < 200
or (200 <= code < 300 and method.upper() == hdrs.METH_CONNECT)
) |
Check if the upgrade header is supported. | def _is_supported_upgrade(headers: CIMultiDictProxy[str]) -> bool:
"""Check if the upgrade header is supported."""
return headers.get(hdrs.UPGRADE, "").lower() in {"tcp", "websocket"} |
Websocket masking function.
`mask` is a `bytes` object of length 4; `data` is a `bytearray`
object of any length. The contents of `data` are masked with `mask`,
as specified in section 5.3 of RFC 6455.
Note that this function mutates the `data` argument.
This pure-python implementation may be replaced by an optimized
version when available. | def _websocket_mask_python(mask: bytes, data: bytearray) -> None:
"""Websocket masking function.
`mask` is a `bytes` object of length 4; `data` is a `bytearray`
object of any length. The contents of `data` are masked with `mask`,
as specified in section 5.3 of RFC 6455.
Note that this function mutates the `data` argument.
This pure-python implementation may be replaced by an optimized
version when available.
"""
assert isinstance(data, bytearray), data
assert len(mask) == 4, mask
if data:
_XOR_TABLE = _xor_table()
a, b, c, d = (_XOR_TABLE[n] for n in mask)
data[::4] = data[::4].translate(a)
data[1::4] = data[1::4].translate(b)
data[2::4] = data[2::4].translate(c)
data[3::4] = data[3::4].translate(d) |
Set up pytest fixture.
Allow fixtures to be coroutines. Run coroutine fixtures in an event loop. | def pytest_fixture_setup(fixturedef): # type: ignore[no-untyped-def]
"""Set up pytest fixture.
Allow fixtures to be coroutines. Run coroutine fixtures in an event loop.
"""
func = fixturedef.func
if inspect.isasyncgenfunction(func):
# async generator fixture
is_async_gen = True
elif asyncio.iscoroutinefunction(func):
# regular async fixture
is_async_gen = False
else:
# not an async fixture, nothing to do
return
strip_request = False
if "request" not in fixturedef.argnames:
fixturedef.argnames += ("request",)
strip_request = True
def wrapper(*args, **kwargs): # type: ignore[no-untyped-def]
request = kwargs["request"]
if strip_request:
del kwargs["request"]
# if neither the fixture nor the test use the 'loop' fixture,
# 'getfixturevalue' will fail because the test is not parameterized
# (this can be removed someday if 'loop' is no longer parameterized)
if "loop" not in request.fixturenames:
raise Exception(
"Asynchronous fixtures must depend on the 'loop' fixture or "
"be used in tests depending from it."
)
_loop = request.getfixturevalue("loop")
if is_async_gen:
# for async generators, we need to advance the generator once,
# then advance it again in a finalizer
gen = func(*args, **kwargs)
def finalizer(): # type: ignore[no-untyped-def]
try:
return _loop.run_until_complete(gen.__anext__())
except StopAsyncIteration:
pass
request.addfinalizer(finalizer)
return _loop.run_until_complete(gen.__anext__())
else:
return _loop.run_until_complete(func(*args, **kwargs))
fixturedef.func = wrapper |
--fast config option | def fast(request): # type: ignore[no-untyped-def]
"""--fast config option"""
return request.config.getoption("--aiohttp-fast") |
--enable-loop-debug config option | def loop_debug(request): # type: ignore[no-untyped-def]
"""--enable-loop-debug config option"""
return request.config.getoption("--aiohttp-enable-loop-debug") |
Context manager which checks for RuntimeWarnings.
This exists specifically to
avoid "coroutine 'X' was never awaited" warnings being missed.
If RuntimeWarnings occur in the context a RuntimeError is raised. | def _runtime_warning_context(): # type: ignore[no-untyped-def]
"""Context manager which checks for RuntimeWarnings.
This exists specifically to
avoid "coroutine 'X' was never awaited" warnings being missed.
If RuntimeWarnings occur in the context a RuntimeError is raised.
"""
with warnings.catch_warnings(record=True) as _warnings:
yield
rw = [
"{w.filename}:{w.lineno}:{w.message}".format(w=w)
for w in _warnings
if w.category == RuntimeWarning
]
if rw:
raise RuntimeError(
"{} Runtime Warning{},\n{}".format(
len(rw), "" if len(rw) == 1 else "s", "\n".join(rw)
)
)
# Propagate warnings to pytest
for msg in _warnings:
warnings.showwarning(
msg.message, msg.category, msg.filename, msg.lineno, msg.file, msg.line
) |
Passthrough loop context.
Sets up and tears down a loop unless one is passed in via the loop
argument when it's passed straight through. | def _passthrough_loop_context(loop, fast=False): # type: ignore[no-untyped-def]
"""Passthrough loop context.
Sets up and tears down a loop unless one is passed in via the loop
argument when it's passed straight through.
"""
if loop:
# loop already exists, pass it straight through
yield loop
else:
# this shadows loop_context's standard behavior
loop = setup_test_loop()
yield loop
teardown_test_loop(loop, fast=fast) |
Fix pytest collecting for coroutines. | def pytest_pycollect_makeitem(collector, name, obj): # type: ignore[no-untyped-def]
"""Fix pytest collecting for coroutines."""
if collector.funcnamefilter(name) and asyncio.iscoroutinefunction(obj):
return list(collector._genfunctions(name, obj)) |
Run coroutines in an event loop instead of a normal function call. | def pytest_pyfunc_call(pyfuncitem): # type: ignore[no-untyped-def]
"""Run coroutines in an event loop instead of a normal function call."""
fast = pyfuncitem.config.getoption("--aiohttp-fast")
if asyncio.iscoroutinefunction(pyfuncitem.function):
existing_loop = pyfuncitem.funcargs.get(
"proactor_loop"
) or pyfuncitem.funcargs.get("loop", None)
with _runtime_warning_context():
with _passthrough_loop_context(existing_loop, fast=fast) as _loop:
testargs = {
arg: pyfuncitem.funcargs[arg]
for arg in pyfuncitem._fixtureinfo.argnames
}
_loop.run_until_complete(pyfuncitem.obj(**testargs))
return True |
Return an instance of the event loop. | def loop(loop_factory, fast, loop_debug): # type: ignore[no-untyped-def]
"""Return an instance of the event loop."""
policy = loop_factory()
asyncio.set_event_loop_policy(policy)
with loop_context(fast=fast) as _loop:
if loop_debug:
_loop.set_debug(True) # pragma: no cover
asyncio.set_event_loop(_loop)
yield _loop |
Return a port that is unused on the current host. | def aiohttp_unused_port() -> Callable[[], int]:
"""Return a port that is unused on the current host."""
return _unused_port |
Factory to create a TestServer instance, given an app.
aiohttp_server(app, **kwargs) | def aiohttp_server(loop: asyncio.AbstractEventLoop) -> Iterator[AiohttpServer]:
"""Factory to create a TestServer instance, given an app.
aiohttp_server(app, **kwargs)
"""
servers = []
async def go(app, *, port=None, **kwargs): # type: ignore[no-untyped-def]
server = TestServer(app, port=port)
await server.start_server(**kwargs)
servers.append(server)
return server
yield go
async def finalize() -> None:
while servers:
await servers.pop().close()
loop.run_until_complete(finalize()) |
Factory to create a RawTestServer instance, given a web handler.
aiohttp_raw_server(handler, **kwargs) | def aiohttp_raw_server(loop: asyncio.AbstractEventLoop) -> Iterator[AiohttpRawServer]:
"""Factory to create a RawTestServer instance, given a web handler.
aiohttp_raw_server(handler, **kwargs)
"""
servers = []
async def go(handler, *, port=None, **kwargs): # type: ignore[no-untyped-def]
server = RawTestServer(handler, port=port)
await server.start_server(**kwargs)
servers.append(server)
return server
yield go
async def finalize() -> None:
while servers:
await servers.pop().close()
loop.run_until_complete(finalize()) |
Client class to use in ``aiohttp_client`` factory.
Use it for passing custom ``TestClient`` implementations.
Example::
class MyClient(TestClient):
async def login(self, *, user, pw):
payload = {"username": user, "password": pw}
return await self.post("/login", json=payload)
@pytest.fixture
def aiohttp_client_cls():
return MyClient
def test_login(aiohttp_client):
app = web.Application()
client = await aiohttp_client(app)
await client.login(user="admin", pw="s3cr3t") | def aiohttp_client_cls() -> Type[TestClient]:
"""
Client class to use in ``aiohttp_client`` factory.
Use it for passing custom ``TestClient`` implementations.
Example::
class MyClient(TestClient):
async def login(self, *, user, pw):
payload = {"username": user, "password": pw}
return await self.post("/login", json=payload)
@pytest.fixture
def aiohttp_client_cls():
return MyClient
def test_login(aiohttp_client):
app = web.Application()
client = await aiohttp_client(app)
await client.login(user="admin", pw="s3cr3t")
"""
return TestClient |
Factory to create a TestClient instance.
aiohttp_client(app, **kwargs)
aiohttp_client(server, **kwargs)
aiohttp_client(raw_server, **kwargs) | def aiohttp_client(
loop: asyncio.AbstractEventLoop, aiohttp_client_cls: Type[TestClient]
) -> Iterator[AiohttpClient]:
"""Factory to create a TestClient instance.
aiohttp_client(app, **kwargs)
aiohttp_client(server, **kwargs)
aiohttp_client(raw_server, **kwargs)
"""
clients = []
async def go(
__param: Union[Application, BaseTestServer],
*,
server_kwargs: Optional[Dict[str, Any]] = None,
**kwargs: Any
) -> TestClient:
if isinstance(__param, Application):
server_kwargs = server_kwargs or {}
server = TestServer(__param, **server_kwargs)
client = aiohttp_client_cls(server, **kwargs)
elif isinstance(__param, BaseTestServer):
client = aiohttp_client_cls(__param, **kwargs)
else:
raise ValueError("Unknown argument type: %r" % type(__param))
await client.start_server()
clients.append(client)
return client
yield go
async def finalize() -> None:
while clients:
await clients.pop().close()
loop.run_until_complete(finalize()) |
Return a port that is unused on the current host. | def unused_port() -> int:
"""Return a port that is unused on the current host."""
with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s:
s.bind(("127.0.0.1", 0))
return cast(int, s.getsockname()[1]) |
A contextmanager that creates an event_loop, for test purposes.
Handles the creation and cleanup of a test loop. | def loop_context(
loop_factory: _LOOP_FACTORY = asyncio.new_event_loop, fast: bool = False
) -> Iterator[asyncio.AbstractEventLoop]:
"""A contextmanager that creates an event_loop, for test purposes.
Handles the creation and cleanup of a test loop.
"""
loop = setup_test_loop(loop_factory)
yield loop
teardown_test_loop(loop, fast=fast) |
Create and return an asyncio.BaseEventLoop instance.
The caller should also call teardown_test_loop,
once they are done with the loop. | def setup_test_loop(
loop_factory: _LOOP_FACTORY = asyncio.new_event_loop,
) -> asyncio.AbstractEventLoop:
"""Create and return an asyncio.BaseEventLoop instance.
The caller should also call teardown_test_loop,
once they are done with the loop.
"""
loop = loop_factory()
asyncio.set_event_loop(loop)
return loop |
Teardown and cleanup an event_loop created by setup_test_loop. | def teardown_test_loop(loop: asyncio.AbstractEventLoop, fast: bool = False) -> None:
"""Teardown and cleanup an event_loop created by setup_test_loop."""
closed = loop.is_closed()
if not closed:
loop.call_soon(loop.stop)
loop.run_forever()
loop.close()
if not fast:
gc.collect()
asyncio.set_event_loop(None) |
Creates mocked web.Request testing purposes.
Useful in unit tests, when spinning full web server is overkill or
specific conditions and errors are hard to trigger. | def make_mocked_request(
method: str,
path: str,
headers: Any = None,
*,
match_info: Any = sentinel,
version: HttpVersion = HttpVersion(1, 1),
closing: bool = False,
app: Any = None,
writer: Any = sentinel,
protocol: Any = sentinel,
transport: Any = sentinel,
payload: Any = sentinel,
sslcontext: Optional[SSLContext] = None,
client_max_size: int = 1024**2,
loop: Any = ...,
) -> Request:
"""Creates mocked web.Request testing purposes.
Useful in unit tests, when spinning full web server is overkill or
specific conditions and errors are hard to trigger.
"""
task = mock.Mock()
if loop is ...:
# no loop passed, try to get the current one if
# its is running as we need a real loop to create
# executor jobs to be able to do testing
# with a real executor
try:
loop = asyncio.get_running_loop()
except RuntimeError:
loop = mock.Mock()
loop.create_future.return_value = ()
if version < HttpVersion(1, 1):
closing = True
if headers:
headers = CIMultiDictProxy(CIMultiDict(headers))
raw_hdrs = tuple(
(k.encode("utf-8"), v.encode("utf-8")) for k, v in headers.items()
)
else:
headers = CIMultiDictProxy(CIMultiDict())
raw_hdrs = ()
chunked = "chunked" in headers.get(hdrs.TRANSFER_ENCODING, "").lower()
message = RawRequestMessage(
method,
path,
version,
headers,
raw_hdrs,
closing,
None,
False,
chunked,
URL(path),
)
if app is None:
app = _create_app_mock()
if transport is sentinel:
transport = _create_transport(sslcontext)
if protocol is sentinel:
protocol = mock.Mock()
protocol.transport = transport
if writer is sentinel:
writer = mock.Mock()
writer.write_headers = make_mocked_coro(None)
writer.write = make_mocked_coro(None)
writer.write_eof = make_mocked_coro(None)
writer.drain = make_mocked_coro(None)
writer.transport = transport
protocol.transport = transport
protocol.writer = writer
if payload is sentinel:
payload = mock.Mock()
req = Request(
message, payload, protocol, writer, task, loop, client_max_size=client_max_size
)
match_info = UrlMappingMatchInfo(
{} if match_info is sentinel else match_info, mock.Mock()
)
match_info.add_app(app)
req._match_info = match_info
return req |
Creates a coroutine mock. | def make_mocked_coro(
return_value: Any = sentinel, raise_exception: Any = sentinel
) -> Any:
"""Creates a coroutine mock."""
async def mock_coro(*args: Any, **kwargs: Any) -> Any:
if raise_exception is not sentinel:
raise raise_exception
if not inspect.isawaitable(return_value):
return return_value
await return_value
return mock.Mock(wraps=mock_coro) |
Run an app locally | def run_app(
app: Union[Application, Awaitable[Application]],
*,
debug: bool = False,
host: Optional[Union[str, HostSequence]] = None,
port: Optional[int] = None,
path: Union[PathLike, TypingIterable[PathLike], None] = None,
sock: Optional[Union[socket.socket, TypingIterable[socket.socket]]] = None,
shutdown_timeout: float = 60.0,
keepalive_timeout: float = 75.0,
ssl_context: Optional[SSLContext] = None,
print: Optional[Callable[..., None]] = print,
backlog: int = 128,
access_log_class: Type[AbstractAccessLogger] = AccessLogger,
access_log_format: str = AccessLogger.LOG_FORMAT,
access_log: Optional[logging.Logger] = access_logger,
handle_signals: bool = True,
reuse_address: Optional[bool] = None,
reuse_port: Optional[bool] = None,
handler_cancellation: bool = False,
loop: Optional[asyncio.AbstractEventLoop] = None,
) -> None:
"""Run an app locally"""
if loop is None:
loop = asyncio.new_event_loop()
loop.set_debug(debug)
# Configure if and only if in debugging mode and using the default logger
if loop.get_debug() and access_log and access_log.name == "aiohttp.access":
if access_log.level == logging.NOTSET:
access_log.setLevel(logging.DEBUG)
if not access_log.hasHandlers():
access_log.addHandler(logging.StreamHandler())
main_task = loop.create_task(
_run_app(
app,
host=host,
port=port,
path=path,
sock=sock,
shutdown_timeout=shutdown_timeout,
keepalive_timeout=keepalive_timeout,
ssl_context=ssl_context,
print=print,
backlog=backlog,
access_log_class=access_log_class,
access_log_format=access_log_format,
access_log=access_log,
handle_signals=handle_signals,
reuse_address=reuse_address,
reuse_port=reuse_port,
handler_cancellation=handler_cancellation,
)
)
try:
asyncio.set_event_loop(loop)
loop.run_until_complete(main_task)
except (GracefulExit, KeyboardInterrupt): # pragma: no cover
pass
finally:
_cancel_tasks({main_task}, loop)
_cancel_tasks(asyncio.all_tasks(loop), loop)
loop.run_until_complete(loop.shutdown_asyncgens())
loop.close()
asyncio.set_event_loop(None) |
Factory for producing a middleware that normalizes the path of a request.
Normalizing means:
- Add or remove a trailing slash to the path.
- Double slashes are replaced by one.
The middleware returns as soon as it finds a path that resolves
correctly. The order if both merge and append/remove are enabled is
1) merge slashes
2) append/remove slash
3) both merge slashes and append/remove slash.
If the path resolves with at least one of those conditions, it will
redirect to the new path.
Only one of `append_slash` and `remove_slash` can be enabled. If both
are `True` the factory will raise an assertion error
If `append_slash` is `True` the middleware will append a slash when
needed. If a resource is defined with trailing slash and the request
comes without it, it will append it automatically.
If `remove_slash` is `True`, `append_slash` must be `False`. When enabled
the middleware will remove trailing slashes and redirect if the resource
is defined
If merge_slashes is True, merge multiple consecutive slashes in the
path into one. | def normalize_path_middleware(
*,
append_slash: bool = True,
remove_slash: bool = False,
merge_slashes: bool = True,
redirect_class: Type[HTTPMove] = HTTPPermanentRedirect,
) -> Middleware:
"""Factory for producing a middleware that normalizes the path of a request.
Normalizing means:
- Add or remove a trailing slash to the path.
- Double slashes are replaced by one.
The middleware returns as soon as it finds a path that resolves
correctly. The order if both merge and append/remove are enabled is
1) merge slashes
2) append/remove slash
3) both merge slashes and append/remove slash.
If the path resolves with at least one of those conditions, it will
redirect to the new path.
Only one of `append_slash` and `remove_slash` can be enabled. If both
are `True` the factory will raise an assertion error
If `append_slash` is `True` the middleware will append a slash when
needed. If a resource is defined with trailing slash and the request
comes without it, it will append it automatically.
If `remove_slash` is `True`, `append_slash` must be `False`. When enabled
the middleware will remove trailing slashes and redirect if the resource
is defined
If merge_slashes is True, merge multiple consecutive slashes in the
path into one.
"""
correct_configuration = not (append_slash and remove_slash)
assert correct_configuration, "Cannot both remove and append slash"
async def impl(request: Request, handler: Handler) -> StreamResponse:
if isinstance(request.match_info.route, SystemRoute):
paths_to_check = []
if "?" in request.raw_path:
path, query = request.raw_path.split("?", 1)
query = "?" + query
else:
query = ""
path = request.raw_path
if merge_slashes:
paths_to_check.append(re.sub("//+", "/", path))
if append_slash and not request.path.endswith("/"):
paths_to_check.append(path + "/")
if remove_slash and request.path.endswith("/"):
paths_to_check.append(path[:-1])
if merge_slashes and append_slash:
paths_to_check.append(re.sub("//+", "/", path + "/"))
if merge_slashes and remove_slash and path.endswith("/"):
merged_slashes = re.sub("//+", "/", path)
paths_to_check.append(merged_slashes[:-1])
for path in paths_to_check:
path = re.sub("^//+", "/", path) # SECURITY: GHSA-v6wp-4m6f-gcjg
resolves, request = await _check_request_resolves(request, path)
if resolves:
raise redirect_class(request.raw_path + query)
return await handler(request)
return impl |
Prepare :file:`.netrc` with given contents.
Monkey-patches :envvar:`NETRC` to point to created file. | def netrc_contents(
tmp_path: Path,
monkeypatch: pytest.MonkeyPatch,
request: pytest.FixtureRequest,
):
"""
Prepare :file:`.netrc` with given contents.
Monkey-patches :envvar:`NETRC` to point to created file.
"""
netrc_contents = getattr(request, "param", None)
netrc_file_path = tmp_path / ".netrc"
if netrc_contents is not None:
netrc_file_path.write_text(netrc_contents)
monkeypatch.setenv("NETRC", str(netrc_file_path))
return netrc_file_path |
Find all importables in the project.
Return them in order. | def _find_all_importables(pkg: ModuleType) -> List[str]:
"""Find all importables in the project.
Return them in order.
"""
return sorted(
set(
chain.from_iterable(
_discover_path_importables(Path(p), pkg.__name__) for p in pkg.__path__
),
),
) |
Yield all importables under a given path and package. | def _discover_path_importables(
pkg_pth: Path,
pkg_name: str,
) -> Generator[str, None, None]:
"""Yield all importables under a given path and package."""
for dir_path, _d, file_names in os.walk(pkg_pth):
pkg_dir_path = Path(dir_path)
if pkg_dir_path.parts[-1] == "__pycache__":
continue
if all(Path(_).suffix != ".py" for _ in file_names):
continue
rel_pt = pkg_dir_path.relative_to(pkg_pth)
pkg_pref = ".".join((pkg_name,) + rel_pt.parts)
yield from (
pkg_path
for _, pkg_path, _ in pkgutil.walk_packages(
(str(pkg_dir_path),),
prefix=f"{pkg_pref}.",
)
) |
Verify that exploding importables doesn't explode.
This is seeking for any import errors including ones caused
by circular imports. | def test_no_warnings(import_path: str) -> None:
"""Verify that exploding importables doesn't explode.
This is seeking for any import errors including ones caused
by circular imports.
"""
imp_cmd = (
# fmt: off
sys.executable,
"-W", "error",
# The following deprecation warning is triggered by importing
# `gunicorn.util`. Hopefully, it'll get fixed in the future. See
# https://github.com/benoitc/gunicorn/issues/2840 for detail.
"-W", "ignore:module 'sre_constants' is "
"deprecated:DeprecationWarning:pkg_resources._vendor.pyparsing",
# Also caused by `gunicorn.util` importing `pkg_resources`:
"-W", "ignore:Creating a LegacyVersion has been deprecated and "
"will be removed in the next major release:"
"DeprecationWarning:",
# Deprecation warning emitted by setuptools v67.5.0+ triggered by importing
# `gunicorn.util`.
"-W", "ignore:pkg_resources is deprecated as an API:"
"DeprecationWarning",
"-c", f"import {import_path!s}",
# fmt: on
)
subprocess.check_call(imp_cmd) |
Test appropriate Authorization header is sent when netrc is not empty. | def test_basicauth_from_netrc_present(
make_request: Any,
expected_auth: Optional[helpers.BasicAuth],
):
"""Test appropriate Authorization header is sent when netrc is not empty."""
req = make_request("get", "http://example.com", trust_env=True)
assert req.headers[hdrs.AUTHORIZATION] == expected_auth.encode() |
Test no authorization header is sent via netrc if trust_env is False | def test_basicauth_from_netrc_present_untrusted_env(
make_request: Any,
):
"""Test no authorization header is sent via netrc if trust_env is False"""
req = make_request("get", "http://example.com", trust_env=False)
assert hdrs.AUTHORIZATION not in req.headers |
Test that no Authorization header is sent when netrc is empty | def test_basicauth_from_empty_netrc(
make_request: Any,
):
"""Test that no Authorization header is sent when netrc is empty"""
req = make_request("get", "http://example.com", trust_env=True)
assert hdrs.AUTHORIZATION not in req.headers |
Create pickled data for test_pickle_format(). | def dump_cookiejar() -> bytes: # pragma: no cover
"""Create pickled data for test_pickle_format()."""
cj = CookieJar()
cj.update_cookies(cookies_to_send.__pytest_wrapped__.obj())
return pickle.dumps(cj._cookies, pickle.HIGHEST_PROTOCOL) |
Test if cookiejar pickle format breaks.
If this test fails, it may indicate that saved cookiejars will stop working.
If that happens then:
1. Avoid releasing the change in a bugfix release.
2. Try to include a migration script in the release notes (example below).
3. Use dump_cookiejar() at the top of this file to update `pickled`.
Depending on the changes made, a migration script might look like:
import pickle
with file_path.open("rb") as f:
cookies = pickle.load(f)
morsels = [(name, m) for c in cookies.values() for name, m in c.items()]
cookies.clear()
for name, m in morsels:
cookies[(m["domain"], m["path"])][name] = m
with file_path.open("wb") as f:
pickle.dump(cookies, f, pickle.HIGHEST_PROTOCOL) | def test_pickle_format(cookies_to_send) -> None:
"""Test if cookiejar pickle format breaks.
If this test fails, it may indicate that saved cookiejars will stop working.
If that happens then:
1. Avoid releasing the change in a bugfix release.
2. Try to include a migration script in the release notes (example below).
3. Use dump_cookiejar() at the top of this file to update `pickled`.
Depending on the changes made, a migration script might look like:
import pickle
with file_path.open("rb") as f:
cookies = pickle.load(f)
morsels = [(name, m) for c in cookies.values() for name, m in c.items()]
cookies.clear()
for name, m in morsels:
cookies[(m["domain"], m["path"])][name] = m
with file_path.open("wb") as f:
pickle.dump(cookies, f, pickle.HIGHEST_PROTOCOL)
"""
pickled = b"\x80\x04\x95\xc8\x0b\x00\x00\x00\x00\x00\x00\x8c\x0bcollections\x94\x8c\x0bdefaultdict\x94\x93\x94\x8c\x0chttp.cookies\x94\x8c\x0cSimpleCookie\x94\x93\x94\x85\x94R\x94(\x8c\x00\x94h\x08\x86\x94h\x05)\x81\x94\x8c\rshared-cookie\x94h\x03\x8c\x06Morsel\x94\x93\x94)\x81\x94(\x8c\x07expires\x94h\x08\x8c\x04path\x94\x8c\x01/\x94\x8c\x07comment\x94h\x08\x8c\x06domain\x94h\x08\x8c\x07max-age\x94h\x08\x8c\x06secure\x94h\x08\x8c\x08httponly\x94h\x08\x8c\x07version\x94h\x08\x8c\x08samesite\x94h\x08u}\x94(\x8c\x03key\x94h\x0b\x8c\x05value\x94\x8c\x05first\x94\x8c\x0bcoded_value\x94h\x1cubs\x8c\x0bexample.com\x94h\x08\x86\x94h\x05)\x81\x94(\x8c\rdomain-cookie\x94h\r)\x81\x94(\x8c\x07expires\x94h\x08\x8c\x04path\x94h\x11\x8c\x07comment\x94h\x08\x8c\x06domain\x94h\x1e\x8c\x07max-age\x94h\x08\x8c\x06secure\x94h\x08\x8c\x08httponly\x94h\x08\x8c\x07version\x94h\x08\x8c\x08samesite\x94h\x08u}\x94(h\x1ah!h\x1b\x8c\x06second\x94h\x1dh-ub\x8c\x14dotted-domain-cookie\x94h\r)\x81\x94(\x8c\x07expires\x94h\x08\x8c\x04path\x94h\x11\x8c\x07comment\x94h\x08\x8c\x06domain\x94\x8c\x0bexample.com\x94\x8c\x07max-age\x94h\x08\x8c\x06secure\x94h\x08\x8c\x08httponly\x94h\x08\x8c\x07version\x94h\x08\x8c\x08samesite\x94h\x08u}\x94(h\x1ah.h\x1b\x8c\x05fifth\x94h\x1dh;ubu\x8c\x11test1.example.com\x94h\x08\x86\x94h\x05)\x81\x94\x8c\x11subdomain1-cookie\x94h\r)\x81\x94(\x8c\x07expires\x94h\x08\x8c\x04path\x94h\x11\x8c\x07comment\x94h\x08\x8c\x06domain\x94h<\x8c\x07max-age\x94h\x08\x8c\x06secure\x94h\x08\x8c\x08httponly\x94h\x08\x8c\x07version\x94h\x08\x8c\x08samesite\x94h\x08u}\x94(h\x1ah?h\x1b\x8c\x05third\x94h\x1dhKubs\x8c\x11test2.example.com\x94h\x08\x86\x94h\x05)\x81\x94\x8c\x11subdomain2-cookie\x94h\r)\x81\x94(\x8c\x07expires\x94h\x08\x8c\x04path\x94h\x11\x8c\x07comment\x94h\x08\x8c\x06domain\x94hL\x8c\x07max-age\x94h\x08\x8c\x06secure\x94h\x08\x8c\x08httponly\x94h\x08\x8c\x07version\x94h\x08\x8c\x08samesite\x94h\x08u}\x94(h\x1ahOh\x1b\x8c\x06fourth\x94h\x1dh[ubs\x8c\rdifferent.org\x94h\x08\x86\x94h\x05)\x81\x94\x8c\x17different-domain-cookie\x94h\r)\x81\x94(\x8c\x07expires\x94h\x08\x8c\x04path\x94h\x11\x8c\x07comment\x94h\x08\x8c\x06domain\x94h\\\x8c\x07max-age\x94h\x08\x8c\x06secure\x94h\x08\x8c\x08httponly\x94h\x08\x8c\x07version\x94h\x08\x8c\x08samesite\x94h\x08u}\x94(h\x1ah_h\x1b\x8c\x05sixth\x94h\x1dhkubs\x8c\nsecure.com\x94h\x08\x86\x94h\x05)\x81\x94\x8c\rsecure-cookie\x94h\r)\x81\x94(\x8c\x07expires\x94h\x08\x8c\x04path\x94h\x11\x8c\x07comment\x94h\x08\x8c\x06domain\x94hl\x8c\x07max-age\x94h\x08\x8c\x06secure\x94\x88\x8c\x08httponly\x94h\x08\x8c\x07version\x94h\x08\x8c\x08samesite\x94h\x08u}\x94(h\x1ahoh\x1b\x8c\x07seventh\x94h\x1dh{ubs\x8c\x0cpathtest.com\x94h\x08\x86\x94h\x05)\x81\x94(\x8c\x0eno-path-cookie\x94h\r)\x81\x94(\x8c\x07expires\x94h\x08\x8c\x04path\x94h\x11\x8c\x07comment\x94h\x08\x8c\x06domain\x94h|\x8c\x07max-age\x94h\x08\x8c\x06secure\x94h\x08\x8c\x08httponly\x94h\x08\x8c\x07version\x94h\x08\x8c\x08samesite\x94h\x08u}\x94(h\x1ah\x7fh\x1b\x8c\x06eighth\x94h\x1dh\x8bub\x8c\x0cpath1-cookie\x94h\r)\x81\x94(\x8c\x07expires\x94h\x08\x8c\x04path\x94h\x11\x8c\x07comment\x94h\x08\x8c\x06domain\x94\x8c\x0cpathtest.com\x94\x8c\x07max-age\x94h\x08\x8c\x06secure\x94h\x08\x8c\x08httponly\x94h\x08\x8c\x07version\x94h\x08\x8c\x08samesite\x94h\x08u}\x94(h\x1ah\x8ch\x1b\x8c\x05ninth\x94h\x1dh\x99ubu\x8c\x0cpathtest.com\x94\x8c\x04/one\x94\x86\x94h\x05)\x81\x94\x8c\x0cpath2-cookie\x94h\r)\x81\x94(\x8c\x07expires\x94h\x08\x8c\x04path\x94h\x9b\x8c\x07comment\x94h\x08\x8c\x06domain\x94h\x9a\x8c\x07max-age\x94h\x08\x8c\x06secure\x94h\x08\x8c\x08httponly\x94h\x08\x8c\x07version\x94h\x08\x8c\x08samesite\x94h\x08u}\x94(h\x1ah\x9eh\x1b\x8c\x05tenth\x94h\x1dh\xaaubs\x8c\x0cpathtest.com\x94\x8c\x08/one/two\x94\x86\x94h\x05)\x81\x94(\x8c\x0cpath3-cookie\x94h\r)\x81\x94(\x8c\x07expires\x94h\x08\x8c\x04path\x94h\xac\x8c\x07comment\x94h\x08\x8c\x06domain\x94h\xab\x8c\x07max-age\x94h\x08\x8c\x06secure\x94h\x08\x8c\x08httponly\x94h\x08\x8c\x07version\x94h\x08\x8c\x08samesite\x94h\x08u}\x94(h\x1ah\xafh\x1b\x8c\x08eleventh\x94h\x1dh\xbbub\x8c\x0cpath4-cookie\x94h\r)\x81\x94(\x8c\x07expires\x94h\x08\x8c\x04path\x94\x8c\t/one/two/\x94\x8c\x07comment\x94h\x08\x8c\x06domain\x94\x8c\x0cpathtest.com\x94\x8c\x07max-age\x94h\x08\x8c\x06secure\x94h\x08\x8c\x08httponly\x94h\x08\x8c\x07version\x94h\x08\x8c\x08samesite\x94h\x08u}\x94(h\x1ah\xbch\x1b\x8c\x07twelfth\x94h\x1dh\xcaubu\x8c\x0fexpirestest.com\x94h\x08\x86\x94h\x05)\x81\x94\x8c\x0eexpires-cookie\x94h\r)\x81\x94(\x8c\x07expires\x94\x8c\x1cTue, 1 Jan 2999 12:00:00 GMT\x94\x8c\x04path\x94h\x11\x8c\x07comment\x94h\x08\x8c\x06domain\x94h\xcb\x8c\x07max-age\x94h\x08\x8c\x06secure\x94h\x08\x8c\x08httponly\x94h\x08\x8c\x07version\x94h\x08\x8c\x08samesite\x94h\x08u}\x94(h\x1ah\xceh\x1b\x8c\nthirteenth\x94h\x1dh\xdbubs\x8c\x0emaxagetest.com\x94h\x08\x86\x94h\x05)\x81\x94\x8c\x0emax-age-cookie\x94h\r)\x81\x94(\x8c\x07expires\x94h\x08\x8c\x04path\x94h\x11\x8c\x07comment\x94h\x08\x8c\x06domain\x94h\xdc\x8c\x07max-age\x94\x8c\x0260\x94\x8c\x06secure\x94h\x08\x8c\x08httponly\x94h\x08\x8c\x07version\x94h\x08\x8c\x08samesite\x94h\x08u}\x94(h\x1ah\xdfh\x1b\x8c\nfourteenth\x94h\x1dh\xecubs\x8c\x12invalid-values.com\x94h\x08\x86\x94h\x05)\x81\x94(\x8c\x16invalid-max-age-cookie\x94h\r)\x81\x94(\x8c\x07expires\x94h\x08\x8c\x04path\x94h\x11\x8c\x07comment\x94h\x08\x8c\x06domain\x94h\xed\x8c\x07max-age\x94h\x08\x8c\x06secure\x94h\x08\x8c\x08httponly\x94h\x08\x8c\x07version\x94h\x08\x8c\x08samesite\x94h\x08u}\x94(h\x1ah\xf0h\x1b\x8c\tfifteenth\x94h\x1dh\xfcub\x8c\x16invalid-expires-cookie\x94h\r)\x81\x94(\x8c\x07expires\x94h\x08\x8c\x04path\x94h\x11\x8c\x07comment\x94h\x08\x8c\x06domain\x94\x8c\x12invalid-values.com\x94\x8c\x07max-age\x94h\x08\x8c\x06secure\x94h\x08\x8c\x08httponly\x94h\x08\x8c\x07version\x94h\x08\x8c\x08samesite\x94h\x08u}\x94(h\x1ah\xfdh\x1b\x8c\tsixteenth\x94h\x1dj\n\x01\x00\x00ubuu."
cookies = pickle.loads(pickled)
cj = CookieJar()
cj.update_cookies(cookies_to_send)
assert cookies == cj._cookies |
Per RFC 2045, media type matching is case insensitive. | def test_is_expected_content_type_json_non_lowercase():
"""Per RFC 2045, media type matching is case insensitive."""
expected_ct = "application/json"
response_ct = "Application/JSON"
assert is_expected_content_type(
response_content_type=response_ct, expected_content_type=expected_ct
) |
Test that reading netrc files from env works as expected | def test_netrc_from_env(expected_username: str):
"""Test that reading netrc files from env works as expected"""
netrc_obj = helpers.netrc_from_env()
assert netrc_obj.authenticators("example.com")[0] == expected_username |
Test that netrc file contents are properly parsed into BasicAuth tuples | def test_basicauth_present_in_netrc(
expected_auth: helpers.BasicAuth,
):
"""Test that netrc file contents are properly parsed into BasicAuth tuples"""
netrc_obj = helpers.netrc_from_env()
assert expected_auth == helpers.basicauth_from_netrc(netrc_obj, "example.com") |
Test that an error is raised if netrc doesn't have an entry for our host | def test_read_basicauth_from_empty_netrc():
"""Test that an error is raised if netrc doesn't have an entry for our host"""
netrc_obj = helpers.netrc_from_env()
with pytest.raises(
LookupError, match="No entry for example.com found in the `.netrc` file."
):
helpers.basicauth_from_netrc(netrc_obj, "example.com") |
Test that HEAD is the only method that unequivocally must have an empty body. | def test_method_must_be_empty_body():
"""Test that HEAD is the only method that unequivocally must have an empty body."""
assert method_must_be_empty_body("HEAD") is True
# CONNECT is only empty on a successful response
assert method_must_be_empty_body("CONNECT") is False |
Test should_remove_content_length is always a subset of must_be_empty_body. | def test_should_remove_content_length_is_subset_of_must_be_empty_body():
"""Test should_remove_content_length is always a subset of must_be_empty_body."""
assert should_remove_content_length("GET", 101) is True
assert must_be_empty_body("GET", 101) is True
assert should_remove_content_length("GET", 102) is True
assert must_be_empty_body("GET", 102) is True
assert should_remove_content_length("GET", 204) is True
assert must_be_empty_body("GET", 204) is True
assert should_remove_content_length("GET", 204) is True
assert must_be_empty_body("GET", 204) is True
assert should_remove_content_length("GET", 200) is False
assert must_be_empty_body("GET", 200) is False
assert should_remove_content_length("HEAD", 200) is False
assert must_be_empty_body("HEAD", 200) is True
# CONNECT is only empty on a successful response
assert should_remove_content_length("CONNECT", 200) is True
assert must_be_empty_body("CONNECT", 200) is True
assert should_remove_content_length("CONNECT", 201) is True
assert must_be_empty_body("CONNECT", 201) is True
assert should_remove_content_length("CONNECT", 300) is False
assert must_be_empty_body("CONNECT", 300) is False |
Test that invalid chunked encoding doesn't allow content-length to be used. | def test_bad_chunked_py(loop: Any, protocol: Any) -> None:
"""Test that invalid chunked encoding doesn't allow content-length to be used."""
parser = HttpRequestParserPy(
protocol,
loop,
2**16,
max_line_size=8190,
max_field_size=8190,
)
text = (
b"GET / HTTP/1.1\r\nHost: a\r\nTransfer-Encoding: chunked\r\n\r\n0_2e\r\n\r\n"
+ b"GET / HTTP/1.1\r\nHost: a\r\nContent-Length: 5\r\n\r\n0\r\n\r\n"
)
messages, upgrade, tail = parser.feed_data(text)
assert isinstance(messages[0][1].exception(), http_exceptions.TransferEncodingError) |
C parser behaves differently. Maybe we should align them later. | def test_bad_chunked_c(loop: Any, protocol: Any) -> None:
"""C parser behaves differently. Maybe we should align them later."""
parser = HttpRequestParserC(
protocol,
loop,
2**16,
max_line_size=8190,
max_field_size=8190,
)
text = (
b"GET / HTTP/1.1\r\nHost: a\r\nTransfer-Encoding: chunked\r\n\r\n0_2e\r\n\r\n"
+ b"GET / HTTP/1.1\r\nHost: a\r\nContent-Length: 5\r\n\r\n0\r\n\r\n"
)
with pytest.raises(http_exceptions.BadHttpMessage):
parser.feed_data(text) |
Test not upgraded if missing Upgrade header. | def test_bad_upgrade(parser: Any) -> None:
"""Test not upgraded if missing Upgrade header."""
text = b"GET /test HTTP/1.1\r\nconnection: upgrade\r\n\r\n"
messages, upgrade, tail = parser.feed_data(text)
msg = messages[0][0]
assert not msg.upgrade
assert not upgrade |
Still a lot of dodgy servers sending bad requests like this. | def test_http_response_parser_bad_crlf(response: Any) -> None:
"""Still a lot of dodgy servers sending bad requests like this."""
messages, upgrade, tail = response.feed_data(
b"HTTP/1.0 200 OK\nFoo: abc\nBar: def\n\nBODY\n"
)
msg = messages[0][0]
assert msg.headers["Foo"] == "abc"
assert msg.headers["Bar"] == "def" |
See https://github.com/aio-libs/aiohttp/issues/6197 | def test___all__(pytester: pytest.Pytester) -> None:
"""See https://github.com/aio-libs/aiohttp/issues/6197"""
pytester.makepyfile(
test_a="""
from aiohttp import *
assert 'GunicornWebWorker' in globals()
"""
)
result = pytester.runpytest("-vv")
result.assert_outcomes(passed=0, errors=0) |
Check that importing aiohttp doesn't take too long.
Obviously, the time may vary on different machines and may need to be adjusted
from time to time, but this should provide an early warning if something is
added that significantly increases import time. | def test_import_time(pytester: pytest.Pytester) -> None:
"""Check that importing aiohttp doesn't take too long.
Obviously, the time may vary on different machines and may need to be adjusted
from time to time, but this should provide an early warning if something is
added that significantly increases import time.
"""
root = Path(__file__).parent.parent
old_path = os.environ.get("PYTHONPATH")
os.environ["PYTHONPATH"] = os.pathsep.join([str(root)] + sys.path)
best_time_ms = 1000
cmd = "import timeit; print(int(timeit.timeit('import aiohttp', number=1) * 1000))"
try:
for _ in range(3):
r = pytester.run(sys.executable, "-We", "-c", cmd)
assert not r.stderr.str()
runtime_ms = int(r.stdout.str())
if runtime_ms < best_time_ms:
best_time_ms = runtime_ms
finally:
if old_path is None:
os.environ.pop("PYTHONPATH")
else:
os.environ["PYTHONPATH"] = old_path
expected_time = _TARGET_TIMINGS_BY_PYTHON_VERSION.get(
f"{sys.version_info.major}.{sys.version_info.minor}", 200
)
assert best_time_ms < expected_time |
Return the URL of an instance of a running secure proxy.
This fixture also spawns that instance and tears it down after the test. | def secure_proxy_url(tls_certificate_pem_path):
"""Return the URL of an instance of a running secure proxy.
This fixture also spawns that instance and tears it down after the test.
"""
proxypy_args = [
# --threadless does not work on windows, see
# https://github.com/abhinavsingh/proxy.py/issues/492
"--threaded" if os.name == "nt" else "--threadless",
"--num-workers",
"1", # the tests only send one query anyway
"--hostname",
"127.0.0.1", # network interface to listen to
"--port",
0, # ephemeral port, so that kernel allocates a free one
"--cert-file",
tls_certificate_pem_path, # contains both key and cert
"--key-file",
tls_certificate_pem_path, # contains both key and cert
]
with proxy.Proxy(input_args=proxypy_args) as proxy_instance:
yield URL.build(
scheme="https",
host=str(proxy_instance.flags.hostname),
port=proxy_instance.flags.port,
) |
Test that the logger does nothing when the log level is disabled. | def test_logger_does_nothing_when_disabled(caplog: pytest.LogCaptureFixture) -> None:
"""Test that the logger does nothing when the log level is disabled."""
mock_logger = logging.getLogger("test.aiohttp.log")
mock_logger.setLevel(logging.INFO)
access_logger = AccessLogger(mock_logger, "%b")
access_logger.log(
mock.Mock(name="mock_request"), mock.Mock(name="mock_response"), 42
)
assert "mock_response" in caplog.text |
Create a temp path with hello.txt and compressed versions.
The uncompressed text file path is returned by default. Alternatively, an
indirect parameter can be passed with an encoding to get a compressed path. | def hello_txt(request, tmp_path_factory) -> pathlib.Path:
"""Create a temp path with hello.txt and compressed versions.
The uncompressed text file path is returned by default. Alternatively, an
indirect parameter can be passed with an encoding to get a compressed path.
"""
txt = tmp_path_factory.mktemp("hello-") / "hello.txt"
hello = {
None: txt,
"gzip": txt.with_suffix(f"{txt.suffix}.gz"),
"br": txt.with_suffix(f"{txt.suffix}.br"),
}
hello[None].write_bytes(HELLO_AIOHTTP)
hello["gzip"].write_bytes(gzip.compress(HELLO_AIOHTTP))
hello["br"].write_bytes(brotli.compress(HELLO_AIOHTTP))
encoding = getattr(request, "param", None)
return hello[encoding] |
Extract provider id from provider specification.
:param provider_spec: provider specification can be in the form of the "PROVIDER_ID" or
"apache-airflow-providers-PROVIDER", optionally followed by ">=VERSION".
:return: short provider_id with `.` instead of `-` in case of `apache` and other providers with
`-` in the name. | def get_provider_id(provider_spec: str) -> str:
"""
Extract provider id from provider specification.
:param provider_spec: provider specification can be in the form of the "PROVIDER_ID" or
"apache-airflow-providers-PROVIDER", optionally followed by ">=VERSION".
:return: short provider_id with `.` instead of `-` in case of `apache` and other providers with
`-` in the name.
"""
_provider_id = provider_spec.split(">=")[0]
if _provider_id.startswith("apache-airflow-providers-"):
_provider_id = _provider_id.replace("apache-airflow-providers-", "").replace("-", ".")
return _provider_id |
Convert provider specification with provider_id to provider requirement.
The requirement can be used when constructing dependencies. It automatically adds pre-release specifier
in case we are building pre-release version of Airflow. This way we can handle the case when airflow
depends on specific version of the provider that has not yet been released - then we release the
pre-release version of provider to PyPI and airflow built in CI, or Airflow pre-release version will
automatically depend on that pre-release version of the provider.
:param provider_spec: provider specification can be in the form of the "PROVIDER_ID" optionally followed
by >=VERSION.
:return: requirement for the provider that can be used as dependency. | def get_provider_requirement(provider_spec: str) -> str:
"""
Convert provider specification with provider_id to provider requirement.
The requirement can be used when constructing dependencies. It automatically adds pre-release specifier
in case we are building pre-release version of Airflow. This way we can handle the case when airflow
depends on specific version of the provider that has not yet been released - then we release the
pre-release version of provider to PyPI and airflow built in CI, or Airflow pre-release version will
automatically depend on that pre-release version of the provider.
:param provider_spec: provider specification can be in the form of the "PROVIDER_ID" optionally followed
by >=VERSION.
:return: requirement for the provider that can be used as dependency.
"""
if ">=" in provider_spec:
# we cannot import `airflow` here directly as it would pull re2 and a number of airflow
# dependencies so we need to read airflow version by matching a regexp
airflow_init_content = (AIRFLOW_ROOT_PATH / "airflow" / "__init__.py").read_text()
airflow_version_pattern = r'__version__ = "(\d+\.\d+\.\d+\S*)"'
airflow_version_match = re.search(airflow_version_pattern, airflow_init_content)
if not airflow_version_match:
raise RuntimeError("Cannot find Airflow version in airflow/__init__.py")
from packaging.version import Version
current_airflow_version = Version(airflow_version_match.group(1))
provider_id, min_version = provider_spec.split(">=")
provider_version = Version(min_version)
if provider_version.is_prerelease and not current_airflow_version.is_prerelease:
# strip pre-release version from the pre-installed provider's version when we are preparing
# the official package
min_version = str(provider_version.base_version)
return f"apache-airflow-providers-{provider_id.replace('.', '-')}>={min_version}"
else:
return f"apache-airflow-providers-{provider_spec.replace('.', '-')}" |
Convert provider specification to extra dependency.
:param provider_requirement: requirement of the provider in the form of apache-airflow-provider-*,
optionally followed by >=VERSION.
:return: extra dependency in the form of apache-airflow[extra] | def convert_to_extra_dependency(provider_requirement: str) -> str:
"""
Convert provider specification to extra dependency.
:param provider_requirement: requirement of the provider in the form of apache-airflow-provider-*,
optionally followed by >=VERSION.
:return: extra dependency in the form of apache-airflow[extra]
"""
# if there is version in dependency - remove it as we do not need it in extra specification
# for editable installation
if ">=" in provider_requirement:
provider_requirement = provider_requirement.split(">=")[0]
extra = provider_requirement.replace("apache-airflow-providers-", "").replace("-", "_").replace(".", "_")
return f"apache-airflow[{extra}]" |
Produce the Python exclusion that should be used - converted from the list of python versions.
:param excluded_python_versions: list of python versions to exclude the dependency for.
:return: python version exclusion string that can be added to dependency in specification. | def get_python_exclusion(excluded_python_versions: list[str]):
"""
Produce the Python exclusion that should be used - converted from the list of python versions.
:param excluded_python_versions: list of python versions to exclude the dependency for.
:return: python version exclusion string that can be added to dependency in specification.
"""
exclusion = ""
if excluded_python_versions:
separator = ";"
for version in excluded_python_versions:
exclusion += f'{separator}python_version != "{version}"'
separator = " and "
return exclusion |
Whether the dependency should be skipped for editable build for current python version.
:param excluded_python_versions: list of excluded python versions.
:return: True if the dependency should be skipped for editable build for the current python version. | def skip_for_editable_build(excluded_python_versions: list[str]) -> bool:
"""
Whether the dependency should be skipped for editable build for current python version.
:param excluded_python_versions: list of excluded python versions.
:return: True if the dependency should be skipped for editable build for the current python version.
"""
current_python_version = f"{sys.version_info.major}.{sys.version_info.minor}"
if current_python_version in excluded_python_versions:
return True
return False |
Expand (potentially nested) env vars.
Repeat and apply `expandvars` and `expanduser` until
interpolation stops having any effect. | def expand_env_var(env_var: str | None) -> str | None:
"""
Expand (potentially nested) env vars.
Repeat and apply `expandvars` and `expanduser` until
interpolation stops having any effect.
"""
if not env_var:
return env_var
while True:
interpolated = os.path.expanduser(os.path.expandvars(str(env_var)))
if interpolated == env_var:
return interpolated
else:
env_var = interpolated |