Update index.html
Browse files- index.html +66 -0
index.html
CHANGED
@@ -22,9 +22,75 @@
|
|
22 |
<input type="submit" value="Search">
|
23 |
</form>
|
24 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
25 |
<div id="results"></div>
|
26 |
|
27 |
<script>
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
28 |
document.getElementById("embed-form").addEventListener("submit", function(event) {
|
29 |
event.preventDefault();
|
30 |
const text = document.getElementById("embed-input").value;
|
|
|
22 |
<input type="submit" value="Search">
|
23 |
</form>
|
24 |
|
25 |
+
<div class="form-group">
|
26 |
+
<label for="chatModels">Chat Models:</label>
|
27 |
+
<select id="chatModels" multiple>
|
28 |
+
<option value="claude-3-5-sonnet">claude-3-5-sonnet</option>
|
29 |
+
<option value="gpt-3.5-turbo">GPT-3.5 Turbo</option>
|
30 |
+
<option value="gpt-4o">GPT-4o</option>
|
31 |
+
</select>
|
32 |
+
</div>
|
33 |
+
|
34 |
<div id="results"></div>
|
35 |
|
36 |
<script>
|
37 |
+
let apiKey = '';
|
38 |
+
let apiUrl = '';
|
39 |
+
|
40 |
+
////////////////////////////////////////////////
|
41 |
+
// Populate the model list on api key change
|
42 |
+
////////////////////////////////////////////////
|
43 |
+
|
44 |
+
async function fetchLLMModels() {
|
45 |
+
try {
|
46 |
+
const apiBaseUrl = document.getElementById('apiBaseUrl').value;
|
47 |
+
const apiKey = document.getElementById('apiKey').value;
|
48 |
+
|
49 |
+
const response = await fetch(
|
50 |
+
apiBaseUrl + 'models',
|
51 |
+
{
|
52 |
+
method: 'GET',
|
53 |
+
headers: {
|
54 |
+
'Authorization': `Bearer ${apiKey}`
|
55 |
+
}
|
56 |
+
}
|
57 |
+
);
|
58 |
+
if (!response.ok) {
|
59 |
+
throw new Error(`HTTP error! Status: ${response.status}`);
|
60 |
+
}
|
61 |
+
const data = await response.json();
|
62 |
+
return data.data.map(model => model.id);
|
63 |
+
} catch (error) {
|
64 |
+
console.error('Error fetching LLM models:', error);
|
65 |
+
return ['Error fetching model'];
|
66 |
+
}
|
67 |
+
}
|
68 |
+
|
69 |
+
async function populateLLMModels() {
|
70 |
+
const chatModels = document.getElementById('chatModels');
|
71 |
+
// Set the multiple attribute
|
72 |
+
chatModels.setAttribute('multiple', '');
|
73 |
+
// Clear existing options
|
74 |
+
chatModels.innerHTML = '';
|
75 |
+
|
76 |
+
const models = await fetchLLMModels();
|
77 |
+
|
78 |
+
// Sort options alphabetically
|
79 |
+
const sortedOptions = Array.from(models).sort((a, b) => a.localeCompare(b));
|
80 |
+
console.log(sortedOptions);
|
81 |
+
|
82 |
+
// Add new options
|
83 |
+
sortedOptions.forEach(model => {
|
84 |
+
const option = document.createElement('option');
|
85 |
+
option.value = model;
|
86 |
+
option.text = model;
|
87 |
+
chatModels.add(option);
|
88 |
+
});
|
89 |
+
// Set the size of the select element to match the number of options
|
90 |
+
chatModels.size = chatModels.options.length / 4;
|
91 |
+
}
|
92 |
+
|
93 |
+
|
94 |
document.getElementById("embed-form").addEventListener("submit", function(event) {
|
95 |
event.preventDefault();
|
96 |
const text = document.getElementById("embed-input").value;
|