sayakpaul HF staff commited on
Commit
4eeb10d
1 Parent(s): 1fad965

multiple improvements.

Browse files
Files changed (2) hide show
  1. __pycache__/app.cpython-39.pyc +0 -0
  2. app.py +26 -16
__pycache__/app.cpython-39.pyc ADDED
Binary file (5.05 kB). View file
 
app.py CHANGED
@@ -2,7 +2,14 @@ from huggingface_hub import model_info, hf_hub_download
2
  import gradio as gr
3
  import json
4
 
5
- COMPONENT_FILTER = ["scheduler", "safety_checker", "feature_extractor", "tokenizer", "tokenizer_2"]
 
 
 
 
 
 
 
6
 
7
 
8
  def format_size(num: int) -> str:
@@ -48,14 +55,17 @@ def get_component_wise_memory(pipeline_id, token=None, variant=None, revision=No
48
  index_dict = load_model_index(pipeline_id, token=token, revision=revision)
49
 
50
  # Check if all the concerned components have the checkpoints in the requested "variant" and "extension".
51
- index_filter = COMPONENT_FILTER.copy()
52
- index_filter.extend(["_class_name", "_diffusers_version", "force_zeros_for_empty_prompt", "add_watermarker"])
53
  for current_component in index_dict:
54
- if current_component not in index_filter:
 
 
 
 
55
  current_component_fileobjs = list(filter(lambda x: current_component in x.rfilename, files_in_repo))
56
  if current_component_fileobjs:
57
  current_component_filenames = [fileobj.rfilename for fileobj in current_component_fileobjs]
58
- condition = (
59
  lambda filename: extension in filename and variant in filename
60
  if variant is not None
61
  else lambda filename: extension in filename
@@ -105,29 +115,26 @@ def get_component_wise_memory(pipeline_id, token=None, variant=None, revision=No
105
  selected_file = current_file
106
 
107
  if selected_file is not None:
108
- print(selected_file.rfilename)
109
  component_wise_memory[component] = selected_file.size
110
 
111
  return format_output(pipeline_id, component_wise_memory)
112
 
113
 
114
- gr.Interface(
115
  title="Compute component-wise memory of a 🧨 Diffusers pipeline.",
116
- description="Sizes will be reported in GB. Pipelines containing text encoders with sharded checkpoints are also supported (PixArt-Alpha, for example) 🤗",
 
117
  fn=get_component_wise_memory,
118
  inputs=[
119
  gr.components.Textbox(lines=1, label="pipeline_id", info="Example: runwayml/stable-diffusion-v1-5"),
120
  gr.components.Textbox(lines=1, label="hf_token", info="Pass this in case of private repositories."),
121
- gr.components.Dropdown(
122
- [
123
- "fp32",
124
- "fp16",
125
- ],
126
  label="variant",
127
  info="Precision to use for calculation.",
128
  ),
129
  gr.components.Textbox(lines=1, label="revision", info="Repository revision to use."),
130
- gr.components.Dropdown(
131
  [".bin", ".safetensors"],
132
  label="extension",
133
  info="Extension to use.",
@@ -138,7 +145,10 @@ gr.Interface(
138
  ["runwayml/stable-diffusion-v1-5", None, "fp32", None, ".safetensors"],
139
  ["stabilityai/stable-diffusion-xl-base-1.0", None, "fp16", None, ".safetensors"],
140
  ["PixArt-alpha/PixArt-XL-2-1024-MS", None, "fp32", None, ".safetensors"],
 
 
141
  ],
142
  theme=gr.themes.Soft(),
143
- allow_flagging=False,
144
- ).launch(show_error=True)
 
 
2
  import gradio as gr
3
  import json
4
 
5
+ COMPONENT_FILTER = [
6
+ "scheduler",
7
+ "feature_extractor",
8
+ "tokenizer",
9
+ "tokenizer_2",
10
+ "_class_name",
11
+ "_diffusers_version",
12
+ ]
13
 
14
 
15
  def format_size(num: int) -> str:
 
55
  index_dict = load_model_index(pipeline_id, token=token, revision=revision)
56
 
57
  # Check if all the concerned components have the checkpoints in the requested "variant" and "extension".
58
+ print(f"Index dict: {index_dict}")
 
59
  for current_component in index_dict:
60
+ if (
61
+ current_component not in COMPONENT_FILTER
62
+ and isinstance(index_dict[current_component], list)
63
+ and len(index_dict[current_component]) == 2
64
+ ):
65
  current_component_fileobjs = list(filter(lambda x: current_component in x.rfilename, files_in_repo))
66
  if current_component_fileobjs:
67
  current_component_filenames = [fileobj.rfilename for fileobj in current_component_fileobjs]
68
+ condition = ( # noqa: E731
69
  lambda filename: extension in filename and variant in filename
70
  if variant is not None
71
  else lambda filename: extension in filename
 
115
  selected_file = current_file
116
 
117
  if selected_file is not None:
 
118
  component_wise_memory[component] = selected_file.size
119
 
120
  return format_output(pipeline_id, component_wise_memory)
121
 
122
 
123
+ with gr.Interface(
124
  title="Compute component-wise memory of a 🧨 Diffusers pipeline.",
125
+ description="Pipelines containing text encoders with sharded checkpoints are also supported"
126
+ " (PixArt-Alpha, for example) 🤗",
127
  fn=get_component_wise_memory,
128
  inputs=[
129
  gr.components.Textbox(lines=1, label="pipeline_id", info="Example: runwayml/stable-diffusion-v1-5"),
130
  gr.components.Textbox(lines=1, label="hf_token", info="Pass this in case of private repositories."),
131
+ gr.components.Radio(
132
+ ["fp32", "fp16", "bf16"],
 
 
 
133
  label="variant",
134
  info="Precision to use for calculation.",
135
  ),
136
  gr.components.Textbox(lines=1, label="revision", info="Repository revision to use."),
137
+ gr.components.Radio(
138
  [".bin", ".safetensors"],
139
  label="extension",
140
  info="Extension to use.",
 
145
  ["runwayml/stable-diffusion-v1-5", None, "fp32", None, ".safetensors"],
146
  ["stabilityai/stable-diffusion-xl-base-1.0", None, "fp16", None, ".safetensors"],
147
  ["PixArt-alpha/PixArt-XL-2-1024-MS", None, "fp32", None, ".safetensors"],
148
+ ["stabilityai/stable-cascade", None, "bf16", None, ".safetensors"],
149
+ ["Deci/DeciDiffusion-v2-0", None, "fp32", None, ".safetensors"],
150
  ],
151
  theme=gr.themes.Soft(),
152
+ allow_flagging="never",
153
+ ) as demo:
154
+ demo.launch(show_error=True)