Sergidev commited on
Commit
16161ed
·
verified ·
1 Parent(s): a3a6f96

History feature v1

Browse files
Files changed (1) hide show
  1. utils.py +8 -72
utils.py CHANGED
@@ -4,6 +4,8 @@ import random
4
  import numpy as np
5
  import json
6
  import torch
 
 
7
  from PIL import Image, PngImagePlugin
8
  from datetime import datetime
9
  from dataclasses import dataclass
@@ -18,77 +20,11 @@ from diffusers import (
18
 
19
  MAX_SEED = np.iinfo(np.int32).max
20
 
21
- def randomize_seed_fn(seed: int, randomize_seed: bool) -> int:
22
- if randomize_seed:
23
- seed = random.randint(0, MAX_SEED)
24
- return seed
25
 
26
- def seed_everything(seed: int) -> torch.Generator:
27
- torch.manual_seed(seed)
28
- torch.cuda.manual_seed_all(seed)
29
- np.random.seed(seed)
30
- generator = torch.Generator()
31
- generator.manual_seed(seed)
32
- return generator
33
 
34
- def parse_aspect_ratio(aspect_ratio: str) -> Optional[Tuple[int, int]]:
35
- if aspect_ratio == "Custom":
36
- return None
37
- width, height = aspect_ratio.split(" x ")
38
- return int(width), int(height)
39
-
40
- def aspect_ratio_handler(aspect_ratio: str, custom_width: int, custom_height: int) -> Tuple[int, int]:
41
- if aspect_ratio == "Custom":
42
- return custom_width, custom_height
43
- else:
44
- width, height = parse_aspect_ratio(aspect_ratio)
45
- return width, height
46
-
47
- def get_scheduler(scheduler_config: Dict, name: str) -> Optional[Callable]:
48
- scheduler_factory_map = {
49
- "DPM++ 2M Karras": lambda: DPMSolverMultistepScheduler.from_config(scheduler_config, use_karras_sigmas=True),
50
- "DPM++ SDE Karras": lambda: DPMSolverSinglestepScheduler.from_config(scheduler_config, use_karras_sigmas=True),
51
- "DPM++ 2M SDE Karras": lambda: DPMSolverMultistepScheduler.from_config(scheduler_config, use_karras_sigmas=True, algorithm_type="sde-dpmsolver++"),
52
- "Euler": lambda: EulerDiscreteScheduler.from_config(scheduler_config),
53
- "Euler a": lambda: EulerAncestralDiscreteScheduler.from_config(scheduler_config),
54
- "DDIM": lambda: DDIMScheduler.from_config(scheduler_config),
55
- }
56
- return scheduler_factory_map.get(name, lambda: None)()
57
-
58
- def free_memory() -> None:
59
- torch.cuda.empty_cache()
60
- gc.collect()
61
-
62
- def common_upscale(samples: torch.Tensor, width: int, height: int, upscale_method: str) -> torch.Tensor:
63
- return torch.nn.functional.interpolate(samples, size=(height, width), mode=upscale_method)
64
-
65
- def upscale(samples: torch.Tensor, upscale_method: str, scale_by: float) -> torch.Tensor:
66
- width = round(samples.shape[3] * scale_by)
67
- height = round(samples.shape[2] * scale_by)
68
- return common_upscale(samples, width, height, upscale_method)
69
-
70
- def preprocess_image_dimensions(width, height):
71
- if width % 8 != 0:
72
- width = width - (width % 8)
73
- if height % 8 != 0:
74
- height = height - (height % 8)
75
- return width, height
76
-
77
- def save_image(image, metadata, output_dir):
78
- current_time = datetime.now().strftime("%Y%m%d_%H%M%S")
79
- os.makedirs(output_dir, exist_ok=True)
80
- filename = f"image_{current_time}.png"
81
- filepath = os.path.join(output_dir, filename)
82
-
83
- metadata_str = json.dumps(metadata)
84
- info = PngImagePlugin.PngInfo()
85
- info.add_text("metadata", metadata_str)
86
- image.save(filepath, "PNG", pnginfo=info)
87
- return filepath
88
-
89
- def is_google_colab():
90
- try:
91
- import google.colab
92
- return True
93
- except:
94
- return False
 
4
  import numpy as np
5
  import json
6
  import torch
7
+ import base64
8
+ from io import BytesIO
9
  from PIL import Image, PngImagePlugin
10
  from datetime import datetime
11
  from dataclasses import dataclass
 
20
 
21
  MAX_SEED = np.iinfo(np.int32).max
22
 
23
+ # ... (rest of the existing functions remain the same)
 
 
 
24
 
25
+ def image_to_base64(image: Image.Image) -> str:
26
+ buffered = BytesIO()
27
+ image.save(buffered, format="PNG")
28
+ return base64.b64encode(buffered.getvalue()).decode()
 
 
 
29
 
30
+ # ... (rest of the existing functions remain the same)