File size: 1,715 Bytes
f766ce9
 
 
8b7a945
f766ce9
 
8b7a945
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
f766ce9
 
 
 
 
 
 
 
 
8ec7973
f766ce9
 
 
8ec7973
f766ce9
 
 
8a1daf9
f766ce9
 
 
 
 
 
 
 
 
 
36c5a0c
 
 
 
 
f766ce9
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
from dataclasses import dataclass
from enum import Enum


@dataclass
class Task:
    name: str  # qa, long_doc

@dataclass
class Metric:
    name: str  # ndcg_at_1

@dataclass
class Language:
    name: str  # en, zh


@dataclass
class Domain:
    name: str  # law, wiki


@dataclass
class EmbeddingModel:
    full_name: str  # jinaai/jina-embeddings-v2-en-base
    org: str  # jinaai
    model: str  # jina-embeddings-v2-en-base
    size: int  # size (millions of parameters)
    dim: int  # output dimensions
    max_tokens: int  # max tokens
    model_type: str  # open, proprietary, sentence transformers



NUM_FEWSHOT = 0 # Change with your few shot
# ---------------------------------------------------



# Your leaderboard name
TITLE = """<h1 align="center" id="space-title">AIR-Bench</h1>"""

# What does your leaderboard evaluate?
INTRODUCTION_TEXT = """
AIR-Bench: Automated Heterogeneous Information Retrieval Benchmark
"""

# Which evaluations are you running? how can people reproduce what you have?
BENCHMARKS_TEXT = f"""
## How it works

## Reproducibility
To reproduce our results, here is the commands you can run:

"""

EVALUATION_QUEUE_TEXT = """
## Some good practices before submitting a model

### 1)
### 2)
### 3)
### 4)


## In case of model failure
If your model is displayed in the `FAILED` category, its execution stopped.
Make sure you have followed the above steps first.
If everything is done, check you can launch the EleutherAIHarness on your model locally, using the above command without modifications (you can add `--limit` to limit the number of examples per task).
"""

CITATION_BUTTON_LABEL = "Copy the following snippet to cite these results"
CITATION_BUTTON_TEXT = r"""
"""