File size: 2,659 Bytes
a64efb8
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
# see also: https://github.com/hjacobs/connexion-example

swagger: '2.0'
info:
  title: BERT-viz API
  version: "0.0.1"
consumes:
  - application/json
produces:
  - application/json
basePath: /api

# ===============================================================================
## DEFINE API ##
# ===============================================================================
paths:

  /get-model-details:
    get:
      tags: [All]
      operationId: main.get_model_details
      summary: Get necessary information about the model, such as number of layers and heads
      parameters:
        - name: model
          description: Short string representing pretrained model, such as 'bert-base-uncased'
          in: query
          type: string
      responses:
        200:
          description: Returns information about the model

  /attend+meta:
    get:
      tags: [All]
      operationId: main.get_attentions_and_preds
      summary: Get the attention information, BERT Embeddings, and spacy meta info for an input sentence
      parameters:
        - name: model
          description: Which pretrained transformer information is requested from
          in: query
          type: string
        - name: sentence
          description: Sentence to analyze
          in: query
          type: string
        - name: layer
          description: Layer to get attentions at
          in: query
          type: number
      responses:
        200:
          description: Returns attentions, embeddings, and metadata

  /update-mask:
    post:
      tags: [All]
      operationId: main.update_masked_attention
      summary: Get the masked attention information of tokens given indices to mask
      parameters:
        - name: payload
          description: Main contents
          in: body
          schema:
            $ref: '#/definitions/maskPayload'
      responses:
        200:
          description: Update BERT's masked behavior for passed tokens


definitions:
  maskPayload:
    type: object
    properties:
      model: 
        type: string
        description: Which model to get results from
      tokens:
        type: array
        items: 
          type: string
        description: Main sentence tokens to analyze
      sentence:
        type: string
        description: The original sentence the tokens came from, for extracting metadata
      mask:
        type: array
        items: 
          type: number
        description: Indices of tokens to mask
      layer:
        type: number
        description: Layer to get results for
      
    required:
      - model
      - tokens
      - sentence
      - mask
      - layer