FaceGAN / app.py
Q-bert's picture
Update app.py
b8cb3f7
raw
history blame
5.33 kB
import gradio as gr
import torch
from matplotlib import pyplot as plt
from PIL import Image
import io
class Generator(nn.Module):
def __init__(self):
super(Generator, self).__init__()
self.model = nn.Sequential(
nn.Linear(z_dim, 256),
nn.LeakyReLU(0.2),
nn.Linear(256, 512),
nn.LeakyReLU(0.2),
nn.Linear(512, 1024),
nn.LeakyReLU(0.2),
nn.Linear(1024, 2304)
)
def forward(self, x):
return self.model(x)
device = "cpu"
generator = Generator()
generator.load_state_dict(torch.load('generator_model.pt'))
num_faces_to_generate = 10
z_dim = 13
ethnicity_map = {'White': 0, 'Black': 1, 'Asian': 2, 'Indian': 3, 'Other': 4}
gender_map = {'Male': 0, 'Female': 1}
def generate_faces(age, ethnicity, gender):
random_z = torch.randn(num_faces_to_generate, z_dim).to(device)
random_z[:, 0] = age
random_z[:, 1] = ethnicity_map[ethnicity]
random_z[:, 2] = gender_map[gender]
random_z[:, 3:] = torch.randn(num_faces_to_generate, z_dim - 3).to(device)
with torch.no_grad():
generated_faces = generator(random_z)
generated_faces_np = generated_faces.cpu().detach().numpy()
generated_faces_np = generated_faces_np.reshape(-1, 48, 48)
img = plot_images(generated_faces_np)
return img
def plot_images(images):
num_cols = 5
num_rows = (len(images) - 1) // num_cols + 1
fig, axs = plt.subplots(num_rows, num_cols, figsize=(num_cols * 2, num_rows * 2))
for i, image in enumerate(images):
axs[i // num_cols][i % num_cols].imshow(image, cmap='gray')
axs[i // num_cols][i % num_cols].axis('off')
plt.tight_layout()
buf = io.BytesIO()
fig.savefig(buf)
buf.seek(0)
img = Image.open(buf)
return img
title = "FaceGAN by Q-bert"
description = f"""
## FaceGAN - Human Face Generation with GANs
![FaceGAN](https://example.com/facegan_image.jpg)
### Description
FaceGAN is a powerful Generative Adversarial Network (GAN) model designed to generate realistic human faces with varying attributes, including age, ethnicity, and gender. This model has been extensively trained on a diverse dataset of real human faces, enabling it to produce high-quality synthetic images.
The purpose of this project is to create an interactive web interface for FaceGAN. This interface allows users to explore the capabilities of the model by generating custom human faces with specific attributes. Users can adjust various parameters to influence the output, such as age range, ethnicity, gender, image resolution, noise levels, and latent space values.
### How It Works
FaceGAN consists of two main components: a **Generator** and a **Discriminator**. The Generator generates synthetic face images based on random noise and user-defined attributes. The Discriminator evaluates these generated images and real human face images to distinguish between real and fake. The two components are trained in an adversarial manner, where the Generator tries to improve its ability to deceive the Discriminator, and the Discriminator tries to improve its ability to distinguish real from fake.
### Features
- Generate realistic human faces with different attributes (age, ethnicity, gender).
- Adjust age range to control the apparent age of the generated faces.
- Choose ethnicity to influence the racial appearance of the generated faces.
- Select gender to determine the gender representation of the generated faces.
- Fine-tune image resolution and noise levels for more precise results.
- Explore the latent space by adjusting latent space values for unique face variations.
### Installation
1. Clone this repository to your local machine.
2. Install the required dependencies listed in `requirements.txt`.
### Usage
1. Run the application using `python app.py`.
2. Access the web interface through your browser at `http://localhost:5000`.
3. Customize the face attributes using the provided controls.
4. Observe the generated faces based on your selected attributes.
### Contributing
Contributions to this project are welcome! If you find any issues or want to add new features, feel free to open an issue or submit a pull request.
### License
This project is licensed under the [MIT License](https://opensource.org/licenses/MIT).
### Credits
The FaceGAN model used in this project is based on the work by [Talha Rüzgar Akkuş](https://www.linkedin.com/in/talha-r%C3%BCzgar-akku%C5%9F-1b5457264/).
### Disclaimer
The generated faces in this application are entirely synthetic and do not represent real individuals. The application is for educational and entertainment purposes only. The creators of this application are not responsible for any misuse or misrepresentation of the generated content.
"""
iface = gr.Interface(fn=generate_faces,
inputs=[gr.inputs.Slider(minimum=0, maximum=100, label='Age'),
gr.inputs.Dropdown(choices=['White', 'Black', 'Asian', 'Indian', 'Other'], label='Ethnicity'),
gr.inputs.Radio(choices=['Male', 'Female'], label='Gender')],
description=description,
title=title,
outputs=gr.outputs.Image(type='pil'))
iface.launch()