|
import gradio as gr |
|
import pickle |
|
from datasets import load_dataset |
|
from plaid.containers.sample import Sample |
|
|
|
|
|
import numpy as np |
|
import pyrender |
|
from trimesh import Trimesh |
|
import matplotlib as mpl |
|
import matplotlib.cm as cm |
|
|
|
import os |
|
|
|
os.environ["PYOPENGL_PLATFORM"] = "egl" |
|
|
|
|
|
|
|
|
|
|
|
hf_dataset = load_dataset("PLAID-datasets/Tensile2d", split="all_samples") |
|
|
|
nb_samples = 500 |
|
|
|
field_names_train = ["sig11", "sig22", "sig12", "U1", "U2", "q"] |
|
|
|
_HEADER_ = ''' |
|
<h2><b>Visualization demo of <a href='https://huggingface.co/datasets/PLAID-datasets/Tensile2d' target='_blank'><b>Tensile2d dataset</b></b></h2> |
|
''' |
|
|
|
|
|
def round_num(num)->str: |
|
return '%s' % float('%.3g' % num) |
|
|
|
def sample_info(sample_id_str, fieldn): |
|
|
|
sample_ = hf_dataset[int(sample_id_str)]["sample"] |
|
plaid_sample = Sample.model_validate(pickle.loads(sample_)) |
|
|
|
|
|
nodes = plaid_sample.get_nodes() |
|
field = plaid_sample.get_field(fieldn) |
|
if nodes.shape[1] == 2: |
|
nodes__ = np.zeros((nodes.shape[0],nodes.shape[1]+1)) |
|
nodes__[:,:-1] = nodes |
|
nodes = nodes__ |
|
|
|
|
|
triangles = plaid_sample.get_elements()['TRI_3'] |
|
|
|
|
|
if np.linalg.norm(field) > 0: |
|
norm = mpl.colors.Normalize(vmin=np.min(field), vmax=np.max(field)) |
|
cmap = cm.coolwarm |
|
m = cm.ScalarMappable(norm=norm, cmap=cmap) |
|
|
|
vertex_colors = m.to_rgba(field)[:,:3] |
|
else: |
|
vertex_colors = 1+np.zeros((field.shape[0], 3)) |
|
vertex_colors[:,0] = 0.2298057 |
|
vertex_colors[:,1] = 0.01555616 |
|
vertex_colors[:,2] = 0.15023281 |
|
|
|
|
|
trimesh = Trimesh(vertices = nodes, faces = triangles) |
|
trimesh.visual.vertex_colors = vertex_colors |
|
mesh = pyrender.Mesh.from_trimesh(trimesh, smooth=False) |
|
|
|
|
|
scene = pyrender.Scene(ambient_light=[.1, .1, .3], bg_color=[0, 0, 0]) |
|
camera = pyrender.PerspectiveCamera( yfov=np.pi / 3.0) |
|
light = pyrender.DirectionalLight(color=[1,1,1], intensity=1000.) |
|
|
|
scene.add(mesh, pose= np.eye(4)) |
|
scene.add(light, pose= np.eye(4)) |
|
|
|
scene.add(camera, pose=[[ 1, 0, 0, 0], |
|
[ 0, 1, 0, 0], |
|
[ 0, 0, 1, 3], |
|
[ 0, 0, 0, 1]]) |
|
|
|
|
|
r = pyrender.OffscreenRenderer(1024, 1024) |
|
color, _ = r.render(scene) |
|
|
|
|
|
|
|
str__ = f"Training sample {sample_id_str}\n" |
|
str__ += str(plaid_sample)+"\n" |
|
|
|
if len(hf_dataset.description['in_scalars_names'])>0: |
|
str__ += "\ninput scalars:\n" |
|
for sname in hf_dataset.description['in_scalars_names']: |
|
str__ += f"- {sname}: {round_num(plaid_sample.get_scalar(sname))}\n" |
|
if len(hf_dataset.description['out_scalars_names'])>0: |
|
str__ += "\noutput scalars:\n" |
|
for sname in hf_dataset.description['out_scalars_names']: |
|
str__ += f"- {sname}: {round_num(plaid_sample.get_scalar(sname))}\n" |
|
str__ += f"\n\nMesh number of nodes: {nodes.shape[0]}\n" |
|
if len(hf_dataset.description['in_fields_names'])>0: |
|
str__ += "\ninput fields:\n" |
|
for fname in hf_dataset.description['in_fields_names']: |
|
str__ += f"- {fname}\n" |
|
if len(hf_dataset.description['out_fields_names'])>0: |
|
str__ += "\noutput fields:\n" |
|
for fname in hf_dataset.description['out_fields_names']: |
|
str__ += f"- {fname}\n" |
|
|
|
return str__, color |
|
|
|
|
|
if __name__ == "__main__": |
|
|
|
with gr.Blocks() as demo: |
|
gr.Markdown(_HEADER_) |
|
with gr.Row(variant="panel"): |
|
with gr.Column(): |
|
d1 = gr.Slider(0, nb_samples-1, value=0, label="Training sample id", info="Choose between 0 and "+str(nb_samples-1)) |
|
output1 = gr.Text(label="Training sample info") |
|
with gr.Column(): |
|
d2 = gr.Dropdown(field_names_train, value=field_names_train[0], label="Field name") |
|
output2 = gr.Image(label="Training sample visualization") |
|
|
|
d1.input(sample_info, [d1, d2], [output1, output2]) |
|
d2.input(sample_info, [d1, d2], [output1, output2]) |
|
|
|
demo.launch() |
|
|
|
|