aelitta's picture
Upload folder using huggingface_hub
4bdb245 verified
{
"openapi": "3.1.0",
"info": {
"title": "FastAPI",
"version": "0.1.0"
},
"paths": {
"/v1/completions": {
"post": {
"tags": [
"Contextual Completions"
],
"summary": "Completion",
"description": "We recommend most users use our Chat completions API.\n\nGiven a prompt, the model will return one predicted completion.\n\nOptionally include a `system_prompt` to influence the way the LLM answers.\n\nIf `use_context`\nis set to `true`, the model will use context coming from the ingested documents\nto create the response. The documents being used can be filtered using the\n`context_filter` and passing the document IDs to be used. Ingested documents IDs\ncan be found using `/ingest/list` endpoint. If you want all ingested documents to\nbe used, remove `context_filter` altogether.\n\nWhen using `'include_sources': true`, the API will return the source Chunks used\nto create the response, which come from the context provided.\n\nWhen using `'stream': true`, the API will return data chunks following [OpenAI's\nstreaming model](https://platform.openai.com/docs/api-reference/chat/streaming):\n```\n{\"id\":\"12345\",\"object\":\"completion.chunk\",\"created\":1694268190,\n\"model\":\"private-gpt\",\"choices\":[{\"index\":0,\"delta\":{\"content\":\"Hello\"},\n\"finish_reason\":null}]}\n```",
"operationId": "prompt_completion_v1_completions_post",
"requestBody": {
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/CompletionsBody"
}
}
},
"required": true
},
"responses": {
"200": {
"description": "Successful Response",
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/OpenAICompletion"
}
}
}
},
"422": {
"description": "Validation Error",
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/HTTPValidationError"
}
}
}
}
},
"x-fern-streaming": {
"stream-condition": "stream",
"response": {
"$ref": "#/components/schemas/OpenAICompletion"
},
"response-stream": {
"$ref": "#/components/schemas/OpenAICompletion"
}
}
}
},
"/v1/chat/completions": {
"post": {
"tags": [
"Contextual Completions"
],
"summary": "Chat Completion",
"description": "Given a list of messages comprising a conversation, return a response.\n\nOptionally include an initial `role: system` message to influence the way\nthe LLM answers.\n\nIf `use_context` is set to `true`, the model will use context coming\nfrom the ingested documents to create the response. The documents being used can\nbe filtered using the `context_filter` and passing the document IDs to be used.\nIngested documents IDs can be found using `/ingest/list` endpoint. If you want\nall ingested documents to be used, remove `context_filter` altogether.\n\nWhen using `'include_sources': true`, the API will return the source Chunks used\nto create the response, which come from the context provided.\n\nWhen using `'stream': true`, the API will return data chunks following [OpenAI's\nstreaming model](https://platform.openai.com/docs/api-reference/chat/streaming):\n```\n{\"id\":\"12345\",\"object\":\"completion.chunk\",\"created\":1694268190,\n\"model\":\"private-gpt\",\"choices\":[{\"index\":0,\"delta\":{\"content\":\"Hello\"},\n\"finish_reason\":null}]}\n```",
"operationId": "chat_completion_v1_chat_completions_post",
"requestBody": {
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/ChatBody"
}
}
},
"required": true
},
"responses": {
"200": {
"description": "Successful Response",
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/OpenAICompletion"
}
}
}
},
"422": {
"description": "Validation Error",
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/HTTPValidationError"
}
}
}
}
},
"x-fern-streaming": {
"stream-condition": "stream",
"response": {
"$ref": "#/components/schemas/OpenAICompletion"
},
"response-stream": {
"$ref": "#/components/schemas/OpenAICompletion"
}
}
}
},
"/v1/chunks": {
"post": {
"tags": [
"Context Chunks"
],
"summary": "Chunks Retrieval",
"description": "Given a `text`, returns the most relevant chunks from the ingested documents.\n\nThe returned information can be used to generate prompts that can be\npassed to `/completions` or `/chat/completions` APIs. Note: it is usually a very\nfast API, because only the Embeddings model is involved, not the LLM. The\nreturned information contains the relevant chunk `text` together with the source\n`document` it is coming from. It also contains a score that can be used to\ncompare different results.\n\nThe max number of chunks to be returned is set using the `limit` param.\n\nPrevious and next chunks (pieces of text that appear right before or after in the\ndocument) can be fetched by using the `prev_next_chunks` field.\n\nThe documents being used can be filtered using the `context_filter` and passing\nthe document IDs to be used. Ingested documents IDs can be found using\n`/ingest/list` endpoint. If you want all ingested documents to be used,\nremove `context_filter` altogether.",
"operationId": "chunks_retrieval_v1_chunks_post",
"requestBody": {
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/ChunksBody"
}
}
},
"required": true
},
"responses": {
"200": {
"description": "Successful Response",
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/ChunksResponse"
}
}
}
},
"422": {
"description": "Validation Error",
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/HTTPValidationError"
}
}
}
}
}
}
},
"/v1/ingest": {
"post": {
"tags": [
"Ingestion"
],
"summary": "Ingest",
"description": "Ingests and processes a file.\n\nDeprecated. Use ingest/file instead.",
"operationId": "ingest_v1_ingest_post",
"requestBody": {
"content": {
"multipart/form-data": {
"schema": {
"$ref": "#/components/schemas/Body_ingest_v1_ingest_post"
}
}
},
"required": true
},
"responses": {
"200": {
"description": "Successful Response",
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/IngestResponse"
}
}
}
},
"422": {
"description": "Validation Error",
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/HTTPValidationError"
}
}
}
}
},
"deprecated": true
}
},
"/v1/ingest/file": {
"post": {
"tags": [
"Ingestion"
],
"summary": "Ingest File",
"description": "Ingests and processes a file, storing its chunks to be used as context.\n\nThe context obtained from files is later used in\n`/chat/completions`, `/completions`, and `/chunks` APIs.\n\nMost common document\nformats are supported, but you may be prompted to install an extra dependency to\nmanage a specific file type.\n\nA file can generate different Documents (for example a PDF generates one Document\nper page). All Documents IDs are returned in the response, together with the\nextracted Metadata (which is later used to improve context retrieval). Those IDs\ncan be used to filter the context used to create responses in\n`/chat/completions`, `/completions`, and `/chunks` APIs.",
"operationId": "ingest_file_v1_ingest_file_post",
"requestBody": {
"content": {
"multipart/form-data": {
"schema": {
"$ref": "#/components/schemas/Body_ingest_file_v1_ingest_file_post"
}
}
},
"required": true
},
"responses": {
"200": {
"description": "Successful Response",
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/IngestResponse"
}
}
}
},
"422": {
"description": "Validation Error",
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/HTTPValidationError"
}
}
}
}
}
}
},
"/v1/ingest/text": {
"post": {
"tags": [
"Ingestion"
],
"summary": "Ingest Text",
"description": "Ingests and processes a text, storing its chunks to be used as context.\n\nThe context obtained from files is later used in\n`/chat/completions`, `/completions`, and `/chunks` APIs.\n\nA Document will be generated with the given text. The Document\nID is returned in the response, together with the\nextracted Metadata (which is later used to improve context retrieval). That ID\ncan be used to filter the context used to create responses in\n`/chat/completions`, `/completions`, and `/chunks` APIs.",
"operationId": "ingest_text_v1_ingest_text_post",
"requestBody": {
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/IngestTextBody"
}
}
},
"required": true
},
"responses": {
"200": {
"description": "Successful Response",
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/IngestResponse"
}
}
}
},
"422": {
"description": "Validation Error",
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/HTTPValidationError"
}
}
}
}
}
}
},
"/v1/ingest/list": {
"get": {
"tags": [
"Ingestion"
],
"summary": "List Ingested",
"description": "Lists already ingested Documents including their Document ID and metadata.\n\nThose IDs can be used to filter the context used to create responses\nin `/chat/completions`, `/completions`, and `/chunks` APIs.",
"operationId": "list_ingested_v1_ingest_list_get",
"responses": {
"200": {
"description": "Successful Response",
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/IngestResponse"
}
}
}
}
}
}
},
"/v1/ingest/{doc_id}": {
"delete": {
"tags": [
"Ingestion"
],
"summary": "Delete Ingested",
"description": "Delete the specified ingested Document.\n\nThe `doc_id` can be obtained from the `GET /ingest/list` endpoint.\nThe document will be effectively deleted from your storage context.",
"operationId": "delete_ingested_v1_ingest__doc_id__delete",
"parameters": [
{
"name": "doc_id",
"in": "path",
"required": true,
"schema": {
"type": "string",
"title": "Doc Id"
}
}
],
"responses": {
"200": {
"description": "Successful Response",
"content": {
"application/json": {
"schema": {}
}
}
},
"422": {
"description": "Validation Error",
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/HTTPValidationError"
}
}
}
}
}
}
},
"/v1/embeddings": {
"post": {
"tags": [
"Embeddings"
],
"summary": "Embeddings Generation",
"description": "Get a vector representation of a given input.\n\nThat vector representation can be easily consumed\nby machine learning models and algorithms.",
"operationId": "embeddings_generation_v1_embeddings_post",
"requestBody": {
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/EmbeddingsBody"
}
}
},
"required": true
},
"responses": {
"200": {
"description": "Successful Response",
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/EmbeddingsResponse"
}
}
}
},
"422": {
"description": "Validation Error",
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/HTTPValidationError"
}
}
}
}
}
}
},
"/health": {
"get": {
"tags": [
"Health"
],
"summary": "Health",
"description": "Return ok if the system is up.",
"operationId": "health_health_get",
"responses": {
"200": {
"description": "Successful Response",
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/HealthResponse"
}
}
}
}
}
}
}
},
"components": {
"schemas": {
"Body_ingest_file_v1_ingest_file_post": {
"properties": {
"file": {
"type": "string",
"format": "binary",
"title": "File"
}
},
"type": "object",
"required": [
"file"
],
"title": "Body_ingest_file_v1_ingest_file_post"
},
"Body_ingest_v1_ingest_post": {
"properties": {
"file": {
"type": "string",
"format": "binary",
"title": "File"
}
},
"type": "object",
"required": [
"file"
],
"title": "Body_ingest_v1_ingest_post"
},
"ChatBody": {
"properties": {
"messages": {
"items": {
"$ref": "#/components/schemas/OpenAIMessage"
},
"type": "array",
"title": "Messages"
},
"use_context": {
"type": "boolean",
"title": "Use Context",
"default": false
},
"context_filter": {
"anyOf": [
{
"$ref": "#/components/schemas/ContextFilter"
},
{
"type": "null"
}
]
},
"include_sources": {
"type": "boolean",
"title": "Include Sources",
"default": true
},
"stream": {
"type": "boolean",
"title": "Stream",
"default": false
}
},
"type": "object",
"required": [
"messages"
],
"title": "ChatBody",
"examples": [
{
"context_filter": {
"docs_ids": [
"c202d5e6-7b69-4869-81cc-dd574ee8ee11"
]
},
"include_sources": true,
"messages": [
{
"content": "You are a rapper. Always answer with a rap.",
"role": "system"
},
{
"content": "How do you fry an egg?",
"role": "user"
}
],
"stream": false,
"use_context": true
}
]
},
"Chunk": {
"properties": {
"object": {
"const": "context.chunk",
"title": "Object"
},
"score": {
"type": "number",
"title": "Score",
"examples": [
0.023
]
},
"document": {
"$ref": "#/components/schemas/IngestedDoc"
},
"text": {
"type": "string",
"title": "Text",
"examples": [
"Outbound sales increased 20%, driven by new leads."
]
},
"previous_texts": {
"anyOf": [
{
"items": {
"type": "string"
},
"type": "array"
},
{
"type": "null"
}
],
"title": "Previous Texts",
"examples": [
[
"SALES REPORT 2023",
"Inbound didn't show major changes."
]
]
},
"next_texts": {
"anyOf": [
{
"items": {
"type": "string"
},
"type": "array"
},
{
"type": "null"
}
],
"title": "Next Texts",
"examples": [
[
"New leads came from Google Ads campaign.",
"The campaign was run by the Marketing Department"
]
]
}
},
"type": "object",
"required": [
"object",
"score",
"document",
"text"
],
"title": "Chunk"
},
"ChunksBody": {
"properties": {
"text": {
"type": "string",
"title": "Text",
"examples": [
"Q3 2023 sales"
]
},
"context_filter": {
"anyOf": [
{
"$ref": "#/components/schemas/ContextFilter"
},
{
"type": "null"
}
]
},
"limit": {
"type": "integer",
"title": "Limit",
"default": 10
},
"prev_next_chunks": {
"type": "integer",
"title": "Prev Next Chunks",
"default": 0,
"examples": [
2
]
}
},
"type": "object",
"required": [
"text"
],
"title": "ChunksBody"
},
"ChunksResponse": {
"properties": {
"object": {
"const": "list",
"title": "Object"
},
"model": {
"const": "private-gpt",
"title": "Model"
},
"data": {
"items": {
"$ref": "#/components/schemas/Chunk"
},
"type": "array",
"title": "Data"
}
},
"type": "object",
"required": [
"object",
"model",
"data"
],
"title": "ChunksResponse"
},
"CompletionsBody": {
"properties": {
"prompt": {
"type": "string",
"title": "Prompt"
},
"system_prompt": {
"anyOf": [
{
"type": "string"
},
{
"type": "null"
}
],
"title": "System Prompt"
},
"use_context": {
"type": "boolean",
"title": "Use Context",
"default": false
},
"context_filter": {
"anyOf": [
{
"$ref": "#/components/schemas/ContextFilter"
},
{
"type": "null"
}
]
},
"include_sources": {
"type": "boolean",
"title": "Include Sources",
"default": true
},
"stream": {
"type": "boolean",
"title": "Stream",
"default": false
}
},
"type": "object",
"required": [
"prompt"
],
"title": "CompletionsBody",
"examples": [
{
"include_sources": false,
"prompt": "How do you fry an egg?",
"stream": false,
"system_prompt": "You are a rapper. Always answer with a rap.",
"use_context": false
}
]
},
"ContextFilter": {
"properties": {
"docs_ids": {
"anyOf": [
{
"items": {
"type": "string"
},
"type": "array"
},
{
"type": "null"
}
],
"title": "Docs Ids",
"examples": [
[
"c202d5e6-7b69-4869-81cc-dd574ee8ee11"
]
]
}
},
"type": "object",
"required": [
"docs_ids"
],
"title": "ContextFilter"
},
"Embedding": {
"properties": {
"index": {
"type": "integer",
"title": "Index"
},
"object": {
"const": "embedding",
"title": "Object"
},
"embedding": {
"items": {
"type": "number"
},
"type": "array",
"title": "Embedding",
"examples": [
[
0.0023064255,
-0.009327292
]
]
}
},
"type": "object",
"required": [
"index",
"object",
"embedding"
],
"title": "Embedding"
},
"EmbeddingsBody": {
"properties": {
"input": {
"anyOf": [
{
"type": "string"
},
{
"items": {
"type": "string"
},
"type": "array"
}
],
"title": "Input"
}
},
"type": "object",
"required": [
"input"
],
"title": "EmbeddingsBody"
},
"EmbeddingsResponse": {
"properties": {
"object": {
"const": "list",
"title": "Object"
},
"model": {
"const": "private-gpt",
"title": "Model"
},
"data": {
"items": {
"$ref": "#/components/schemas/Embedding"
},
"type": "array",
"title": "Data"
}
},
"type": "object",
"required": [
"object",
"model",
"data"
],
"title": "EmbeddingsResponse"
},
"HTTPValidationError": {
"properties": {
"detail": {
"items": {
"$ref": "#/components/schemas/ValidationError"
},
"type": "array",
"title": "Detail"
}
},
"type": "object",
"title": "HTTPValidationError"
},
"HealthResponse": {
"properties": {
"status": {
"const": "ok",
"title": "Status",
"default": "ok"
}
},
"type": "object",
"title": "HealthResponse"
},
"IngestResponse": {
"properties": {
"object": {
"const": "list",
"title": "Object"
},
"model": {
"const": "private-gpt",
"title": "Model"
},
"data": {
"items": {
"$ref": "#/components/schemas/IngestedDoc"
},
"type": "array",
"title": "Data"
}
},
"type": "object",
"required": [
"object",
"model",
"data"
],
"title": "IngestResponse"
},
"IngestTextBody": {
"properties": {
"file_name": {
"type": "string",
"title": "File Name",
"examples": [
"Avatar: The Last Airbender"
]
},
"text": {
"type": "string",
"title": "Text",
"examples": [
"Avatar is set in an Asian and Arctic-inspired world in which some people can telekinetically manipulate one of the four elements\u2014water, earth, fire or air\u2014through practices known as 'bending', inspired by Chinese martial arts."
]
}
},
"type": "object",
"required": [
"file_name",
"text"
],
"title": "IngestTextBody"
},
"IngestedDoc": {
"properties": {
"object": {
"const": "ingest.document",
"title": "Object"
},
"doc_id": {
"type": "string",
"title": "Doc Id",
"examples": [
"c202d5e6-7b69-4869-81cc-dd574ee8ee11"
]
},
"doc_metadata": {
"anyOf": [
{
"type": "object"
},
{
"type": "null"
}
],
"title": "Doc Metadata",
"examples": [
{
"file_name": "Sales Report Q3 2023.pdf",
"page_label": "2"
}
]
}
},
"type": "object",
"required": [
"object",
"doc_id",
"doc_metadata"
],
"title": "IngestedDoc"
},
"OpenAIChoice": {
"properties": {
"finish_reason": {
"anyOf": [
{
"type": "string"
},
{
"type": "null"
}
],
"title": "Finish Reason",
"examples": [
"stop"
]
},
"delta": {
"anyOf": [
{
"$ref": "#/components/schemas/OpenAIDelta"
},
{
"type": "null"
}
]
},
"message": {
"anyOf": [
{
"$ref": "#/components/schemas/OpenAIMessage"
},
{
"type": "null"
}
]
},
"sources": {
"anyOf": [
{
"items": {
"$ref": "#/components/schemas/Chunk"
},
"type": "array"
},
{
"type": "null"
}
],
"title": "Sources"
},
"index": {
"type": "integer",
"title": "Index",
"default": 0
}
},
"type": "object",
"required": [
"finish_reason"
],
"title": "OpenAIChoice",
"description": "Response from AI.\n\nEither the delta or the message will be present, but never both.\nSources used will be returned in case context retrieval was enabled."
},
"OpenAICompletion": {
"properties": {
"id": {
"type": "string",
"title": "Id"
},
"object": {
"type": "string",
"enum": [
"completion",
"completion.chunk"
],
"title": "Object",
"default": "completion"
},
"created": {
"type": "integer",
"title": "Created",
"examples": [
1623340000
]
},
"model": {
"const": "private-gpt",
"title": "Model"
},
"choices": {
"items": {
"$ref": "#/components/schemas/OpenAIChoice"
},
"type": "array",
"title": "Choices"
}
},
"type": "object",
"required": [
"id",
"created",
"model",
"choices"
],
"title": "OpenAICompletion",
"description": "Clone of OpenAI Completion model.\n\nFor more information see: https://platform.openai.com/docs/api-reference/chat/object"
},
"OpenAIDelta": {
"properties": {
"content": {
"anyOf": [
{
"type": "string"
},
{
"type": "null"
}
],
"title": "Content"
}
},
"type": "object",
"required": [
"content"
],
"title": "OpenAIDelta",
"description": "A piece of completion that needs to be concatenated to get the full message."
},
"OpenAIMessage": {
"properties": {
"role": {
"type": "string",
"enum": [
"assistant",
"system",
"user"
],
"title": "Role",
"default": "user"
},
"content": {
"anyOf": [
{
"type": "string"
},
{
"type": "null"
}
],
"title": "Content"
}
},
"type": "object",
"required": [
"content"
],
"title": "OpenAIMessage",
"description": "Inference result, with the source of the message.\n\nRole could be the assistant or system\n(providing a default response, not AI generated)."
},
"ValidationError": {
"properties": {
"loc": {
"items": {
"anyOf": [
{
"type": "string"
},
{
"type": "integer"
}
]
},
"type": "array",
"title": "Location"
},
"msg": {
"type": "string",
"title": "Message"
},
"type": {
"type": "string",
"title": "Error Type"
}
},
"type": "object",
"required": [
"loc",
"msg",
"type"
],
"title": "ValidationError"
}
}
}
}