xiuyul commited on
Commit
2fcf5a9
1 Parent(s): aa4e5f7
Files changed (2) hide show
  1. app.py +5 -4
  2. result.py +20 -5
app.py CHANGED
@@ -4,8 +4,10 @@ import pandas as pd
4
 
5
 
6
  BASELINE = f'<a target="_blank" href=https://github.com/showlab/loveu-tgve-2023 style="color: blue; text-decoration: underline;text-decoration-style: dotted;">Tune-A-Video (Baseline)</a>'
7
- COLS = ["Method", "CLIPScore (Frame Consistency) ⬆️", "CLIPScore (Text Alignment) ⬆️", "PickScore ⬆️", "Human Preference ⬆️", "References"]
8
- TYPES = ["markdown", "number", "number", "number", "str", "markdown"]
 
 
9
 
10
 
11
  def get_leaderboard():
@@ -17,7 +19,6 @@ def get_leaderboard():
17
  "CLIPScore (Frame Consistency) ⬆️":92.40,
18
  "CLIPScore (Text Alignment) ⬆️":27.12,
19
  "PickScore ⬆️":20.36,
20
- "Human Preference ⬆️":'',
21
  "References": ','.join([f'<a target="_blank" href="https://arxiv.org/abs/2212.11565" style="color: blue">Paper</a>',
22
  f'<a target="_blank" href="https://github.com/showlab/Tune-A-Video" style="color: blue">Code</a>',
23
  f'<a target="_blank" href="https://tuneavideo.github.io/" style="color: blue">Website</a>',
@@ -35,7 +36,7 @@ def get_leaderboard():
35
  all_data += submission_results
36
 
37
  dataframe = pd.DataFrame.from_records(all_data)
38
- dataframe = dataframe.sort_values(by=['CLIPScore (Text Alignment) ⬆️'], ascending=False)
39
  print(dataframe)
40
  dataframe = dataframe[COLS]
41
  return dataframe
 
4
 
5
 
6
  BASELINE = f'<a target="_blank" href=https://github.com/showlab/loveu-tgve-2023 style="color: blue; text-decoration: underline;text-decoration-style: dotted;">Tune-A-Video (Baseline)</a>'
7
+ COLS = ["Method", "Human Eval (Aesthetic) ⬆️", "Human Eval (Structure) ⬆️", "Human Eval (Text Alignment) ⬆️", "Human Eval (Avg.) ⬆️",
8
+ "CLIPScore (Frame Consistency) ⬆️", "CLIPScore (Text Alignment) ⬆️", "PickScore ⬆️",
9
+ "References"]
10
+ TYPES = ["markdown", "number", "number", "number", "number", "number", "number", "number", "markdown"]
11
 
12
 
13
  def get_leaderboard():
 
19
  "CLIPScore (Frame Consistency) ⬆️":92.40,
20
  "CLIPScore (Text Alignment) ⬆️":27.12,
21
  "PickScore ⬆️":20.36,
 
22
  "References": ','.join([f'<a target="_blank" href="https://arxiv.org/abs/2212.11565" style="color: blue">Paper</a>',
23
  f'<a target="_blank" href="https://github.com/showlab/Tune-A-Video" style="color: blue">Code</a>',
24
  f'<a target="_blank" href="https://tuneavideo.github.io/" style="color: blue">Website</a>',
 
36
  all_data += submission_results
37
 
38
  dataframe = pd.DataFrame.from_records(all_data)
39
+ dataframe = dataframe.sort_values(by=['Human Eval (Avg.) ⬆️'], ascending=False)
40
  print(dataframe)
41
  dataframe = dataframe[COLS]
42
  return dataframe
result.py CHANGED
@@ -4,7 +4,10 @@ submission_results = [
4
  "CLIPScore (Frame Consistency) ⬆️":91.25,
5
  "CLIPScore (Text Alignment) ⬆️":27.21,
6
  "PickScore ⬆️":20.72,
7
- "Human Preference ⬆️":'',
 
 
 
8
  "References": ''
9
  },
10
  {
@@ -12,7 +15,10 @@ submission_results = [
12
  "CLIPScore (Frame Consistency) ⬆️":92.27,
13
  "CLIPScore (Text Alignment) ⬆️":25.57,
14
  "PickScore ⬆️":20.22,
15
- "Human Preference ⬆️":'',
 
 
 
16
  "References": ''
17
  },
18
  {
@@ -20,7 +26,10 @@ submission_results = [
20
  "CLIPScore (Frame Consistency) ⬆️":92.47,
21
  "CLIPScore (Text Alignment) ⬆️":25.53,
22
  "PickScore ⬆️":19.79,
23
- "Human Preference ⬆️":'',
 
 
 
24
  "References": ''
25
  },
26
  {
@@ -28,7 +37,10 @@ submission_results = [
28
  "CLIPScore (Frame Consistency) ⬆️":92.17,
29
  "CLIPScore (Text Alignment) ⬆️":27.55,
30
  "PickScore ⬆️":20.55,
31
- "Human Preference ⬆️":'',
 
 
 
32
  "References": ','.join([f'<a target="_blank" href="https://github.com/Pranjal2041/RewardT2VE" style="color: blue">Code</a>'])
33
  },
34
  {
@@ -36,7 +48,10 @@ submission_results = [
36
  "CLIPScore (Frame Consistency) ⬆️":89.90,
37
  "CLIPScore (Text Alignment) ⬆️":26.89,
38
  "PickScore ⬆️":20.71,
39
- "Human Preference ⬆️":'',
 
 
 
40
  "References": ''
41
  }
42
  ]
 
4
  "CLIPScore (Frame Consistency) ⬆️":91.25,
5
  "CLIPScore (Text Alignment) ⬆️":27.21,
6
  "PickScore ⬆️":20.72,
7
+ "Human Eval (Aesthetic) ⬆️":0.465,
8
+ "Human Eval (Structure) ⬆️":0.348,
9
+ "Human Eval (Text Alignment) ⬆️":0.538,
10
+ "Human Eval (Avg.) ⬆️":0.450,
11
  "References": ''
12
  },
13
  {
 
15
  "CLIPScore (Frame Consistency) ⬆️":92.27,
16
  "CLIPScore (Text Alignment) ⬆️":25.57,
17
  "PickScore ⬆️":20.22,
18
+ "Human Eval (Aesthetic) ⬆️":0.564,
19
+ "Human Eval (Structure) ⬆️":0.601,
20
+ "Human Eval (Text Alignment) ⬆️":0.531,
21
+ "Human Eval (Avg.) ⬆️":0.565,
22
  "References": ''
23
  },
24
  {
 
26
  "CLIPScore (Frame Consistency) ⬆️":92.47,
27
  "CLIPScore (Text Alignment) ⬆️":25.53,
28
  "PickScore ⬆️":19.79,
29
+ "Human Eval (Aesthetic) ⬆️":0.387,
30
+ "Human Eval (Structure) ⬆️":0.402,
31
+ "Human Eval (Text Alignment) ⬆️":0.399,
32
+ "Human Eval (Avg.) ⬆️":0.396,
33
  "References": ''
34
  },
35
  {
 
37
  "CLIPScore (Frame Consistency) ⬆️":92.17,
38
  "CLIPScore (Text Alignment) ⬆️":27.55,
39
  "PickScore ⬆️":20.55,
40
+ "Human Eval (Aesthetic) ⬆️":0.438,
41
+ "Human Eval (Structure) ⬆️":0.446,
42
+ "Human Eval (Text Alignment) ⬆️":0.451,
43
+ "Human Eval (Avg.) ⬆️":0.445,
44
  "References": ','.join([f'<a target="_blank" href="https://github.com/Pranjal2041/RewardT2VE" style="color: blue">Code</a>'])
45
  },
46
  {
 
48
  "CLIPScore (Frame Consistency) ⬆️":89.90,
49
  "CLIPScore (Text Alignment) ⬆️":26.89,
50
  "PickScore ⬆️":20.71,
51
+ "Human Eval (Aesthetic) ⬆️":0.599,
52
+ "Human Eval (Structure) ⬆️":0.486,
53
+ "Human Eval (Text Alignment) ⬆️":0.689,
54
+ "Human Eval (Avg.) ⬆️":0.591,
55
  "References": ''
56
  }
57
  ]