Spaces:
Sleeping
Sleeping
fix env
Browse files
app.py
CHANGED
@@ -14,6 +14,9 @@ import numpy as np
|
|
14 |
import matplotlib.pyplot as plt
|
15 |
import fairseq
|
16 |
|
|
|
|
|
|
|
17 |
|
18 |
fairseq_path = os.path.dirname(os.path.dirname(fairseq.__file__))
|
19 |
|
@@ -24,8 +27,6 @@ sys.path.insert(1, f"{fairseq_path}/examples")
|
|
24 |
from mass.s2s_model import TransformerMASSModel
|
25 |
from transformer.hub_interface import TransformerHubInterface
|
26 |
|
27 |
-
logger = logging.getLogger(__name__)
|
28 |
-
|
29 |
notice_markdown = ("""
|
30 |
# Directed Acyclic Transformer: A Non-Autoregressive Sequence-to-Sequence Model designed for Parallel Text Generation.
|
31 |
- **Fast Generation**: DA-Transformer offers faster inference compared to autoregressive Transformers (with fairseq implementation), with a reduction in latency by 7~14x and an increase in throughput by ~20x.
|
|
|
14 |
import matplotlib.pyplot as plt
|
15 |
import fairseq
|
16 |
|
17 |
+
logger = logging.getLogger(__name__)
|
18 |
+
logger.info("start init")
|
19 |
+
|
20 |
|
21 |
fairseq_path = os.path.dirname(os.path.dirname(fairseq.__file__))
|
22 |
|
|
|
27 |
from mass.s2s_model import TransformerMASSModel
|
28 |
from transformer.hub_interface import TransformerHubInterface
|
29 |
|
|
|
|
|
30 |
notice_markdown = ("""
|
31 |
# Directed Acyclic Transformer: A Non-Autoregressive Sequence-to-Sequence Model designed for Parallel Text Generation.
|
32 |
- **Fast Generation**: DA-Transformer offers faster inference compared to autoregressive Transformers (with fairseq implementation), with a reduction in latency by 7~14x and an increase in throughput by ~20x.
|