File size: 4,378 Bytes
f114100
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
dcc1dbb
f114100
 
 
 
 
 
 
2daeef1
f114100
 
 
 
 
 
 
42031dd
870ca37
2a85b13
e05886e
2a85b13
 
 
 
e05886e
 
2a85b13
90697a4
 
2a85b13
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
42031dd
 
f114100
 
 
4f590e2
 
19665f5
4f590e2
3f36f69
4f590e2
f114100
04b5743
 
a74d65b
ee7e667
76447fd
c599d42
76447fd
 
9dda449
 
cfd9a0f
04b5743
f114100
6953224
19665f5
f114100
 
908df9a
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
import numpy as np
import matplotlib.pyplot as plt
from sklearn import svm
import gradio as gr
from PIL import Image

def calculate_score(clf):
    xx, yy = np.meshgrid(np.linspace(-3, 3, 500), np.linspace(-3, 3, 500))
    X_test = np.c_[xx.ravel(), yy.ravel()]
    Y_test = np.logical_xor(xx.ravel() > 0, yy.ravel() > 0)
    return clf.score(X_test, Y_test)

def getColorMap(kernel, gamma):
    # prepare the training dataset
    np.random.seed(0)
    X = np.random.randn(300, 2)
    Y = np.logical_xor(X[:, 0] > 0, X[:, 1] > 0)

    # fit the model
    clf = svm.NuSVC(kernel=kernel, gamma=gamma)
    clf.fit(X, Y)
    
    #create a grid for the plotting the decision function
    xx, yy = np.meshgrid(np.linspace(-3, 3, 500), np.linspace(-3, 3, 500))
    
    # plot the decision function for each datapoint on the grid
    Z = clf.decision_function(np.c_[xx.ravel(), yy.ravel()])
    Z = Z.reshape(xx.shape)

    plt.figure(figsize=(10, 4))
    plt.imshow(
        Z,
        interpolation="nearest",
        extent=(xx.min(), xx.max(), yy.min(), yy.max()),
        aspect="auto",
        origin="lower",
        cmap=plt.cm.PuOr_r,
    )
    contours = plt.contour(xx, yy, Z, levels=[0], linewidths=2, linestyles="dashed")
    plt.scatter(X[:, 0], X[:, 1], s=30, c=Y, cmap=plt.cm.Paired, edgecolors='k')
    plt.title(f"Decision function for Non-Linear SVC with the {kernel} kernel and '{gamma}' gamma ", fontsize='14')	#title
    plt.xlabel("X",fontsize='13')	#adds a label in the x axis
    plt.ylabel("Y",fontsize='13')	#adds a label in the y axis
    return plt, calculate_score(clf)

#XOR_TABLE markdown text
XOR_TABLE = """
<style type="text/css">
.tg  {border-collapse:collapse;border-spacing:10PX;width:50%;margin:auto}
.tg td{border-color:black;border-style:solid;border-width:1px;font-family:Arial, sans-serif;font-size:14px;
  overflow:hidden;padding:10px 5px;word-break:normal;}
.tg th{border-color:black;border-style:solid;border-width:1px;font-family:Arial, sans-serif;font-size:14px;
  font-weight:normal;overflow:hidden;padding:10px 5px;word-break:normal;}
.tg .tg-c3ow{border-color:inherit;text-align:center}
td, th {padding-left: 1rem}
</style>
<BR>
<H3>Table explaining the 'XOR' operator</H3>
<table class="tg">
<thead>
  <tr>
    <th class="tg-c3ow">A</th>
    <th class="tg-c3ow">B</th>
    <th class="tg-c3ow">A XOR B</th>
  </tr>
</thead>
<tbody>
  <tr>
    <td class="tg-c3ow">0</td>
    <td class="tg-c3ow">0</td>
    <td class="tg-c3ow">0</td>
  </tr>
  <tr>
    <td class="tg-c3ow">0</td>
    <td class="tg-c3ow">1</td>
    <td class="tg-c3ow">1</td>
  </tr>
  <tr>
    <td class="tg-c3ow">1</td>
    <td class="tg-c3ow">0</td>
    <td class="tg-c3ow">1</td>
  </tr>
  <tr>
    <td class="tg-c3ow">1</td>
    <td class="tg-c3ow">1</td>
    <td class="tg-c3ow">0</td>
  </tr>
</tbody>
</table>
"""


with gr.Blocks() as demo:
    gr.Markdown("## Learning the XOR function: An application of Binary Classification using Non-linear SVM")
    gr.Markdown("### This demo is based on this [scikit-learn example](https://scikit-learn.org/stable/auto_examples/svm/plot_svm_nonlinear.html#sphx-glr-auto-examples-svm-plot-svm-nonlinear-py).")
    gr.Markdown("### In this demo, we use a non-linear SVC (Support Vector Classifier) to learn the decision function of the XOR operator.")
                    
    gr.Markdown("### Furthermore, we observe that we get different decision function plots by varying the Kernel and Gamma hyperparameters of the non-linear SVC.")

    gr.Markdown("### Feel free to experiment with kernel and gamma values below to see how the quality of the decision function changes with the hyperparameters.")

    inp1 = gr.Radio(['poly', 'rbf', 'sigmoid'], label="Kernel", info="Choose a kernel", value="poly")
    inp2 = gr.Radio(['scale', 'auto'], label="Gamma", info="Choose a gamma value", value="scale")
    
    with gr.Row().style(equal_height=True):
        with gr.Column(scale=2):        
            plot = gr.Plot(label=f"Decision function plot")
        with gr.Column(scale=1):
            num = gr.Textbox(label="Test Accuracy")

    inp1.change(getColorMap,  inputs=[inp1, inp2], outputs=[plot, num])
    inp2.change(getColorMap,  inputs=[inp1, inp2], outputs=[plot, num])
    demo.load(getColorMap, inputs=[inp1, inp2], outputs=[plot, num])

    gr.HTML(XOR_TABLE)


if __name__ == "__main__":
    demo.launch()