# Code source: Gaël Varoquaux # License: BSD 3 clause import numpy as np import matplotlib.pyplot as plt from sklearn import svm import gradio as gr import matplotlib matplotlib.use('Agg') kernels = ["linear", "poly", "rbf"] font1 = {'family':'Consolas','size':20} cmaps = {'Set1': plt.cm.Set1, 'Set2': plt.cm.Set2, 'Set3': plt.cm.Set3, 'tab10': plt.cm.tab10, 'tab20': plt.cm.tab20} # fit the model def clf_kernel(kernel, cmap, dpi = 300, use_random = False): #example data if use_random == False: X = np.c_[ (0.4, -0.7), (-1.5, -1), (-1.4, -0.9), (-1.3, -1.2), (-1.5, 0.2), (-1.2, -0.4), (-0.5, 1.2), (-1.5, 2.1), (1, 1), # -- (1.3, 0.8), (1.5, 0.5), (0.2, -2), (0.5, -2.4), (0.2, -2.3), (0, -2.7), (1.3, 2.8), ].T else: #emulate some random data x = np.random.uniform(-2, 2, size=(16, 1)) y = np.random.uniform(-2, 2, size=(16, 1)) X = np.hstack((x, y)) Y = [0] * 8 + [1] * 8 clf = svm.SVC(kernel=kernel, gamma=2) clf.fit(X, Y) # plot the line, the points, and the nearest vectors to the plane fig= plt.figure(figsize=(10, 6), facecolor = 'none', frameon = False, dpi = dpi) ax = fig.add_subplot(111) ax.scatter( clf.support_vectors_[:, 0], clf.support_vectors_[:, 1], s=80, facecolors="none", zorder=10, edgecolors="k", ) ax.scatter(X[:, 0], X[:, 1], c=Y, zorder=10, cmap=cmap, edgecolors="k") ax.axis("tight") x_min = -3 x_max = 3 y_min = -3 y_max = 3 XX, YY = np.mgrid[x_min:x_max:200j, y_min:y_max:200j] Z = clf.decision_function(np.c_[XX.ravel(), YY.ravel()]) # Put the result into a color plot Z = Z.reshape(XX.shape) ax.pcolormesh(XX, YY, Z > 0, cmap=cmap) ax.contour( XX, YY, Z, colors=["k", "k", "k"], linestyles=["--", "-", "--"], levels=[-0.5, 0, 0.5], ) ax.set_xlim(x_min, x_max) ax.set_ylim(y_min, y_max) ax.set_xticks(()) ax.set_yticks(()) ax.set_title('Type of kernel: ' + kernel, color = "white", fontdict = font1, pad=20, bbox=dict(boxstyle="round,pad=0.3", color = "#6366F1")) return fig intro = """

Introducing SVM-Kernels

""" desc = """

🤗 Three different types of SVM-Kernels are displayed below. The polynomial and RBF are especially useful when the data-points are not linearly separable. 🤗

""" notice = """
Notice: Run the model on example data or check Randomize data to check out the model on emulated data-points.
""" made ="""

Made with ❤

""" link = """
Demo is based on this script from scikit-learn documentation""" with gr.Blocks(theme=gr.themes.Soft(primary_hue="indigo", secondary_hue="violet", neutral_hue="neutral", font = gr.themes.GoogleFont("Inter")), title="SVM-Kernels") as demo: gr.HTML(intro) gr.HTML(desc) with gr.Box(): with gr.Row(): kernel = gr.Dropdown([i for i in kernels], label="Select kernel:", show_label = True, value = 'linear') with gr.Accordion(label = "More options", open = True): cmap = gr.Radio(['Set1', 'Set2', 'Set3', 'tab10', 'tab20'], label="Choose color map: ", value = 'Set2') dpi = gr.Slider(50, 150, value = 100, step = 1, label = "Set the resolution: ") gr.HTML(notice) random = gr.Checkbox(label="Randomize data", value = False) btn = gr.Button('Make plot!').style(full_width=True) plot = gr.Plot(label="Plot") btn.click(fn=clf_kernel, inputs=[kernel,cmap,dpi,random], outputs=plot) gr.HTML(made) gr.HTML(link) demo.launch()