Abhinowww commited on
Commit
0e46c31
·
1 Parent(s): 5a6aa4b

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +7 -37
app.py CHANGED
@@ -81,7 +81,7 @@ generatorvoldemort = deepcopy(original_generator)
81
 
82
  generatorpushpa = deepcopy(original_generator)
83
 
84
- # generatorcaitlyn = deepcopy(original_generator)
85
 
86
  # generatoryasuho = deepcopy(original_generator)
87
 
@@ -103,8 +103,6 @@ transform = transforms.Compose(
103
  )
104
 
105
 
106
-
107
-
108
  modeljoker = hf_hub_download(repo_id="Abhinowww/Capstone", filename="JokerEightHundredFalse.pt")
109
  ckptjoker = torch.load(modeljoker, map_location=lambda storage, loc: storage)
110
  generatorjoker.load_state_dict(ckptjoker, strict=False)
@@ -119,40 +117,12 @@ modelpushpa = hf_hub_download(repo_id="Abhinowww/Capstone", filename="PushpaFour
119
  ckptpushpa = torch.load(modelpushpa, map_location=lambda storage, loc: storage)
120
  generatorpushpa.load_state_dict(ckptpushpa, strict=False)
121
 
 
 
 
122
 
123
- # modelcaitlyn = hf_hub_download(repo_id="akhaliq/jojogan-arcane", filename="arcane_caitlyn_preserve_color.pt")
124
-
125
- # ckptcaitlyn = torch.load(modelcaitlyn, map_location=lambda storage, loc: storage)
126
- # generatorcaitlyn.load_state_dict(ckptcaitlyn["g"], strict=False)
127
-
128
-
129
- # modelyasuho = hf_hub_download(repo_id="akhaliq/JoJoGAN-jojo", filename="jojo_yasuho_preserve_color.pt")
130
-
131
- # ckptyasuho = torch.load(modelyasuho, map_location=lambda storage, loc: storage)
132
- # generatoryasuho.load_state_dict(ckptyasuho["g"], strict=False)
133
-
134
-
135
- # model_arcane_multi = hf_hub_download(repo_id="akhaliq/jojogan-arcane", filename="arcane_multi_preserve_color.pt")
136
-
137
- # ckptarcanemulti = torch.load(model_arcane_multi, map_location=lambda storage, loc: storage)
138
- # generatorarcanemulti.load_state_dict(ckptarcanemulti["g"], strict=False)
139
 
140
 
141
- # modelart = hf_hub_download(repo_id="akhaliq/jojo-gan-art", filename="art.pt")
142
-
143
- # ckptart = torch.load(modelart, map_location=lambda storage, loc: storage)
144
- # generatorart.load_state_dict(ckptart["g"], strict=False)
145
-
146
-
147
- # modelSpiderverse = hf_hub_download(repo_id="akhaliq/jojo-gan-spiderverse", filename="Spiderverse-face-500iters-8face.pt")
148
-
149
- # ckptspider = torch.load(modelSpiderverse, map_location=lambda storage, loc: storage)
150
- # generatorspider.load_state_dict(ckptspider["g"], strict=False)
151
-
152
- # modelSketch = hf_hub_download(repo_id="akhaliq/jojogan-sketch", filename="sketch_multi.pt")
153
-
154
- # ckptsketch = torch.load(modelSketch, map_location=lambda storage, loc: storage)
155
- # generatorsketch.load_state_dict(ckptsketch["g"], strict=False)
156
 
157
  def inference(img, model):
158
  img.save('out.jpg')
@@ -168,9 +138,9 @@ def inference(img, model):
168
  elif model == 'Pushpa':
169
  with torch.no_grad():
170
  my_sample = generatorpushpa(my_w, input_is_latent=True)
171
- # elif model == 'Caitlyn':
172
- # with torch.no_grad():
173
- # my_sample = generatorcaitlyn(my_w, input_is_latent=True)
174
  # elif model == 'Yasuho':
175
  # with torch.no_grad():
176
  # my_sample = generatoryasuho(my_w, input_is_latent=True)
 
81
 
82
  generatorpushpa = deepcopy(original_generator)
83
 
84
+ generatorgiga = deepcopy(original_generator)
85
 
86
  # generatoryasuho = deepcopy(original_generator)
87
 
 
103
  )
104
 
105
 
 
 
106
  modeljoker = hf_hub_download(repo_id="Abhinowww/Capstone", filename="JokerEightHundredFalse.pt")
107
  ckptjoker = torch.load(modeljoker, map_location=lambda storage, loc: storage)
108
  generatorjoker.load_state_dict(ckptjoker, strict=False)
 
117
  ckptpushpa = torch.load(modelpushpa, map_location=lambda storage, loc: storage)
118
  generatorpushpa.load_state_dict(ckptpushpa, strict=False)
119
 
120
+ modelgiga = hf_hub_download(repo_id="Abhinowww/Capstone", filename="GigachadFourHundredFalse.pt")
121
+ ckptgiga = torch.load(modelgiga, map_location=lambda storage, loc: storage)
122
+ generatorgiga.load_state_dict(ckptgiga, strict=False)
123
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
124
 
125
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
126
 
127
  def inference(img, model):
128
  img.save('out.jpg')
 
138
  elif model == 'Pushpa':
139
  with torch.no_grad():
140
  my_sample = generatorpushpa(my_w, input_is_latent=True)
141
+ elif model == 'Gigachad':
142
+ with torch.no_grad():
143
+ my_sample = generatorgiga(my_w, input_is_latent=True)
144
  # elif model == 'Yasuho':
145
  # with torch.no_grad():
146
  # my_sample = generatoryasuho(my_w, input_is_latent=True)