Abhinowww commited on
Commit
4474065
·
1 Parent(s): 627f074

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +16 -9
app.py CHANGED
@@ -83,9 +83,9 @@ generatorpushpa = deepcopy(original_generator)
83
 
84
  generatorgiga = deepcopy(original_generator)
85
 
86
- # generatoryasuho = deepcopy(original_generator)
87
 
88
- # generatorarcanemulti = deepcopy(original_generator)
89
 
90
  # generatorart = deepcopy(original_generator)
91
 
@@ -121,6 +121,13 @@ modelgiga = hf_hub_download(repo_id="Abhinowww/Capstone", filename="GigachadFour
121
  ckptgiga = torch.load(modelgiga, map_location=lambda storage, loc: storage)
122
  generatorgiga.load_state_dict(ckptgiga, strict=False)
123
 
 
 
 
 
 
 
 
124
 
125
 
126
 
@@ -141,12 +148,12 @@ def inference(img, model):
141
  elif model == 'Gigachad':
142
  with torch.no_grad():
143
  my_sample = generatorgiga(my_w, input_is_latent=True)
144
- # elif model == 'Yasuho':
145
- # with torch.no_grad():
146
- # my_sample = generatoryasuho(my_w, input_is_latent=True)
147
- # elif model == 'Arcane Multi':
148
- # with torch.no_grad():
149
- # my_sample = generatorarcanemulti(my_w, input_is_latent=True)
150
  # elif model == 'Art':
151
  # with torch.no_grad():
152
  # my_sample = generatorart(my_w, input_is_latent=True)
@@ -173,4 +180,4 @@ description = "Capstone Project. To use it, simply upload your image, or click o
173
  # css_code='body{background-image:url("https://picsum.photos/seed/picsum/200/300");}'
174
  # gr.Interface(lambda x:x, "textbox", "textbox", css=css_code).launch(debug=True)
175
 
176
- gr.Interface(inference, [gr.inputs.Image(type="pil"),gr.inputs.Dropdown(choices=['Joker', 'Voldemort', 'Pushpa', 'Gigachad'], type="value", default='Joker', label="Model")], gr.outputs.Image(type="pil"),title=title,description=description,allow_flagging=False,allow_screenshot=False).launch()
 
83
 
84
  generatorgiga = deepcopy(original_generator)
85
 
86
+ generatorsketchtrue = deepcopy(original_generator)
87
 
88
+ generatorsketchfalse = deepcopy(original_generator)
89
 
90
  # generatorart = deepcopy(original_generator)
91
 
 
121
  ckptgiga = torch.load(modelgiga, map_location=lambda storage, loc: storage)
122
  generatorgiga.load_state_dict(ckptgiga, strict=False)
123
 
124
+ modelsketchtrue = hf_hub_download(repo_id="Abhinowww/Capstone", filename="PushpaFourHundredFalse.pt")
125
+ ckptsketchtrue = torch.load(modelsketchtrue, map_location=lambda storage, loc: storage)
126
+ generatorsketchtrue.load_state_dict(ckptsketchtrue, strict=False)
127
+
128
+ modelsketchfalse = hf_hub_download(repo_id="Abhinowww/Capstone", filename="GigachadFourHundredFalse.pt")
129
+ ckptsketchfalse = torch.load(modelsketchfalse, map_location=lambda storage, loc: storage)
130
+ generatorsketchfalse.load_state_dict(ckptsketchfalse, strict=False)
131
 
132
 
133
 
 
148
  elif model == 'Gigachad':
149
  with torch.no_grad():
150
  my_sample = generatorgiga(my_w, input_is_latent=True)
151
+ elif model == 'Sketch':
152
+ with torch.no_grad():
153
+ my_sample = generatorsketchfalse(my_w, input_is_latent=True)
154
+ elif model == 'Sketch Preserve':
155
+ with torch.no_grad():
156
+ my_sample = generatorsketchtrue(my_w, input_is_latent=True)
157
  # elif model == 'Art':
158
  # with torch.no_grad():
159
  # my_sample = generatorart(my_w, input_is_latent=True)
 
180
  # css_code='body{background-image:url("https://picsum.photos/seed/picsum/200/300");}'
181
  # gr.Interface(lambda x:x, "textbox", "textbox", css=css_code).launch(debug=True)
182
 
183
+ gr.Interface(inference, [gr.inputs.Image(type="pil"),gr.inputs.Dropdown(choices=['Joker', 'Voldemort', 'Pushpa', 'Gigachad', 'Sketch', 'Sketch Preserve'], type="value", default='Joker', label="Model")], gr.outputs.Image(type="pil"),title=title,description=description,allow_flagging=False,allow_screenshot=False).launch()