Update app.py
Browse files
app.py
CHANGED
@@ -4,6 +4,7 @@ import torch.nn as nn
|
|
4 |
from matplotlib import pyplot as plt
|
5 |
from PIL import Image
|
6 |
import io
|
|
|
7 |
class Generator(nn.Module):
|
8 |
def __init__(self):
|
9 |
super(Generator, self).__init__()
|
@@ -19,6 +20,7 @@ class Generator(nn.Module):
|
|
19 |
|
20 |
def forward(self, x):
|
21 |
return self.model(x)
|
|
|
22 |
device = "cpu"
|
23 |
generator = Generator()
|
24 |
generator.load_state_dict(torch.load('generator_model.pt', map_location=torch.device('cpu')))
|
@@ -28,24 +30,17 @@ ethnicity_map = {'White': 0, 'Black': 1, 'Asian': 2, 'Indian': 3, 'Other': 4}
|
|
28 |
gender_map = {'Male': 0, 'Female': 1}
|
29 |
|
30 |
def generate_faces(age, ethnicity, gender):
|
31 |
-
|
32 |
random_z = torch.randn(num_faces_to_generate, z_dim)
|
33 |
-
|
34 |
-
|
35 |
random_z[:, 0] = age
|
36 |
random_z[:, 1] = ethnicity_map[ethnicity]
|
37 |
random_z[:, 2] = gender_map[gender]
|
38 |
-
|
39 |
-
|
40 |
random_z[:, 3:] = torch.randn(num_faces_to_generate, z_dim - 3)
|
|
|
41 |
with torch.no_grad():
|
42 |
generated_faces = generator(random_z)
|
43 |
|
44 |
-
|
45 |
generated_faces_np = generated_faces.cpu().detach().numpy()
|
46 |
-
|
47 |
generated_faces_np = generated_faces_np.reshape(-1, 48, 48)
|
48 |
-
|
49 |
img = plot_images(generated_faces_np)
|
50 |
return img
|
51 |
|
@@ -61,66 +56,51 @@ def plot_images(images):
|
|
61 |
fig.savefig(buf)
|
62 |
buf.seek(0)
|
63 |
img = Image.open(buf)
|
64 |
-
|
65 |
return img
|
66 |
|
67 |
title = "FaceGAN by Q-bert"
|
68 |
-
description =
|
69 |
## FaceGAN - Human Face Generation with GANs
|
70 |
-
|
71 |
-
|
72 |
### Description
|
73 |
-
|
74 |
FaceGAN is a powerful Generative Adversarial Network (GAN) model designed to generate realistic human faces with varying attributes, including age, ethnicity, and gender. This model has been extensively trained on a diverse dataset of real human faces, enabling it to produce high-quality synthetic images.
|
75 |
-
|
76 |
The purpose of this project is to create an interactive web interface for FaceGAN. This interface allows users to explore the capabilities of the model by generating custom human faces with specific attributes. Users can adjust various parameters to influence the output, such as age range, ethnicity, gender, image resolution, noise levels, and latent space values.
|
77 |
-
|
78 |
### How It Works
|
79 |
-
|
80 |
FaceGAN consists of two main components: a **Generator** and a **Discriminator**. The Generator generates synthetic face images based on random noise and user-defined attributes. The Discriminator evaluates these generated images and real human face images to distinguish between real and fake. The two components are trained in an adversarial manner, where the Generator tries to improve its ability to deceive the Discriminator, and the Discriminator tries to improve its ability to distinguish real from fake.
|
81 |
-
|
82 |
### Features
|
83 |
-
|
84 |
- Generate realistic human faces with different attributes (age, ethnicity, gender).
|
85 |
- Adjust age range to control the apparent age of the generated faces.
|
86 |
- Choose ethnicity to influence the racial appearance of the generated faces.
|
87 |
- Select gender to determine the gender representation of the generated faces.
|
88 |
- Fine-tune image resolution and noise levels for more precise results.
|
89 |
- Explore the latent space by adjusting latent space values for unique face variations.
|
90 |
-
|
91 |
### Installation
|
92 |
-
|
93 |
1. Clone this repository to your local machine.
|
94 |
2. Install the required dependencies listed in `requirements.txt`.
|
95 |
-
|
96 |
### Usage
|
97 |
-
|
98 |
1. Run the application using `python app.py`.
|
99 |
2. Access the web interface through your browser at `http://localhost:5000`.
|
100 |
3. Customize the face attributes using the provided controls.
|
101 |
4. Observe the generated faces based on your selected attributes.
|
102 |
-
|
103 |
### Contributing
|
104 |
-
|
105 |
Contributions to this project are welcome! If you find any issues or want to add new features, feel free to open an issue or submit a pull request.
|
106 |
-
|
107 |
### License
|
108 |
-
|
109 |
This project is licensed under the [MIT License](https://opensource.org/licenses/MIT).
|
110 |
-
|
111 |
### Credits
|
112 |
-
|
113 |
The FaceGAN model used in this project is based on the work by [Talha Rüzgar Akkuş](https://www.linkedin.com/in/talha-r%C3%BCzgar-akku%C5%9F-1b5457264/).
|
114 |
-
|
115 |
### Disclaimer
|
116 |
-
|
117 |
The generated faces in this application are entirely synthetic and do not represent real individuals. The application is for educational and entertainment purposes only. The creators of this application are not responsible for any misuse or misrepresentation of the generated content.
|
118 |
"""
|
119 |
-
|
120 |
-
|
121 |
-
|
122 |
-
|
123 |
-
|
124 |
-
|
125 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
126 |
iface.launch()
|
|
|
4 |
from matplotlib import pyplot as plt
|
5 |
from PIL import Image
|
6 |
import io
|
7 |
+
|
8 |
class Generator(nn.Module):
|
9 |
def __init__(self):
|
10 |
super(Generator, self).__init__()
|
|
|
20 |
|
21 |
def forward(self, x):
|
22 |
return self.model(x)
|
23 |
+
|
24 |
device = "cpu"
|
25 |
generator = Generator()
|
26 |
generator.load_state_dict(torch.load('generator_model.pt', map_location=torch.device('cpu')))
|
|
|
30 |
gender_map = {'Male': 0, 'Female': 1}
|
31 |
|
32 |
def generate_faces(age, ethnicity, gender):
|
|
|
33 |
random_z = torch.randn(num_faces_to_generate, z_dim)
|
|
|
|
|
34 |
random_z[:, 0] = age
|
35 |
random_z[:, 1] = ethnicity_map[ethnicity]
|
36 |
random_z[:, 2] = gender_map[gender]
|
|
|
|
|
37 |
random_z[:, 3:] = torch.randn(num_faces_to_generate, z_dim - 3)
|
38 |
+
|
39 |
with torch.no_grad():
|
40 |
generated_faces = generator(random_z)
|
41 |
|
|
|
42 |
generated_faces_np = generated_faces.cpu().detach().numpy()
|
|
|
43 |
generated_faces_np = generated_faces_np.reshape(-1, 48, 48)
|
|
|
44 |
img = plot_images(generated_faces_np)
|
45 |
return img
|
46 |
|
|
|
56 |
fig.savefig(buf)
|
57 |
buf.seek(0)
|
58 |
img = Image.open(buf)
|
|
|
59 |
return img
|
60 |
|
61 |
title = "FaceGAN by Q-bert"
|
62 |
+
description = """
|
63 |
## FaceGAN - Human Face Generation with GANs
|
|
|
|
|
64 |
### Description
|
|
|
65 |
FaceGAN is a powerful Generative Adversarial Network (GAN) model designed to generate realistic human faces with varying attributes, including age, ethnicity, and gender. This model has been extensively trained on a diverse dataset of real human faces, enabling it to produce high-quality synthetic images.
|
|
|
66 |
The purpose of this project is to create an interactive web interface for FaceGAN. This interface allows users to explore the capabilities of the model by generating custom human faces with specific attributes. Users can adjust various parameters to influence the output, such as age range, ethnicity, gender, image resolution, noise levels, and latent space values.
|
|
|
67 |
### How It Works
|
|
|
68 |
FaceGAN consists of two main components: a **Generator** and a **Discriminator**. The Generator generates synthetic face images based on random noise and user-defined attributes. The Discriminator evaluates these generated images and real human face images to distinguish between real and fake. The two components are trained in an adversarial manner, where the Generator tries to improve its ability to deceive the Discriminator, and the Discriminator tries to improve its ability to distinguish real from fake.
|
|
|
69 |
### Features
|
|
|
70 |
- Generate realistic human faces with different attributes (age, ethnicity, gender).
|
71 |
- Adjust age range to control the apparent age of the generated faces.
|
72 |
- Choose ethnicity to influence the racial appearance of the generated faces.
|
73 |
- Select gender to determine the gender representation of the generated faces.
|
74 |
- Fine-tune image resolution and noise levels for more precise results.
|
75 |
- Explore the latent space by adjusting latent space values for unique face variations.
|
|
|
76 |
### Installation
|
|
|
77 |
1. Clone this repository to your local machine.
|
78 |
2. Install the required dependencies listed in `requirements.txt`.
|
|
|
79 |
### Usage
|
|
|
80 |
1. Run the application using `python app.py`.
|
81 |
2. Access the web interface through your browser at `http://localhost:5000`.
|
82 |
3. Customize the face attributes using the provided controls.
|
83 |
4. Observe the generated faces based on your selected attributes.
|
|
|
84 |
### Contributing
|
|
|
85 |
Contributions to this project are welcome! If you find any issues or want to add new features, feel free to open an issue or submit a pull request.
|
|
|
86 |
### License
|
|
|
87 |
This project is licensed under the [MIT License](https://opensource.org/licenses/MIT).
|
|
|
88 |
### Credits
|
|
|
89 |
The FaceGAN model used in this project is based on the work by [Talha Rüzgar Akkuş](https://www.linkedin.com/in/talha-r%C3%BCzgar-akku%C5%9F-1b5457264/).
|
|
|
90 |
### Disclaimer
|
|
|
91 |
The generated faces in this application are entirely synthetic and do not represent real individuals. The application is for educational and entertainment purposes only. The creators of this application are not responsible for any misuse or misrepresentation of the generated content.
|
92 |
"""
|
93 |
+
|
94 |
+
iface = gr.Interface(
|
95 |
+
fn=generate_faces,
|
96 |
+
inputs=[
|
97 |
+
gr.Slider(minimum=0, maximum=100, label='Age'),
|
98 |
+
gr.Dropdown(choices=['White', 'Black', 'Asian', 'Indian', 'Other'], label='Ethnicity'),
|
99 |
+
gr.Radio(choices=['Male', 'Female'], label='Gender')
|
100 |
+
],
|
101 |
+
description=description,
|
102 |
+
title=title,
|
103 |
+
outputs=gr.Image(type='pil')
|
104 |
+
)
|
105 |
+
|
106 |
iface.launch()
|