aksell commited on
Commit
9a7dfe2
1 Parent(s): 58c7b8d

Fix zero indexing of head and layer selector

Browse files
protention/streamlit/Attention_On_Structure.py CHANGED
@@ -21,11 +21,13 @@ pdb_id = st.text_input("PDB ID", "4RW0")
21
 
22
  left, right = st.columns(2)
23
  with left:
24
- layer = st.number_input("Layer", value=1, min_value=1, max_value=selected_model.layers)
 
25
  with right:
26
- head = st.number_input("Head", value=1, min_value=1, max_value=selected_model.heads)
 
27
 
28
- min_attn = st.slider("Minimum attention", min_value=0.0, max_value=0.4, value=0.15)
29
 
30
  attention_pairs = get_attention_pairs(pdb_id, layer, head, min_attn, model_type=selected_model.name)
31
 
 
21
 
22
  left, right = st.columns(2)
23
  with left:
24
+ layer_one = st.number_input("Layer", value=1, min_value=1, max_value=selected_model.layers)
25
+ layer = layer_one - 1
26
  with right:
27
+ head_one = st.number_input("Head", value=1, min_value=1, max_value=selected_model.heads)
28
+ head = head_one - 1
29
 
30
+ min_attn = st.slider("Minimum attention", min_value=0.0, max_value=0.4, value=0.1)
31
 
32
  attention_pairs = get_attention_pairs(pdb_id, layer, head, min_attn, model_type=selected_model.name)
33