Spaces:
Running
on
Zero
Running
on
Zero
Update
Browse files- .pre-commit-config.yaml +59 -35
- .style.yapf +0 -5
- app.py +5 -5
- model.py +3 -5
.pre-commit-config.yaml
CHANGED
@@ -1,36 +1,60 @@
|
|
1 |
-
exclude: ^MangaLineExtraction_PyTorch
|
2 |
repos:
|
3 |
-
- repo: https://github.com/pre-commit/pre-commit-hooks
|
4 |
-
|
5 |
-
|
6 |
-
|
7 |
-
|
8 |
-
|
9 |
-
|
10 |
-
|
11 |
-
|
12 |
-
|
13 |
-
|
14 |
-
|
15 |
-
|
16 |
-
|
17 |
-
-
|
18 |
-
|
19 |
-
|
20 |
-
|
21 |
-
|
22 |
-
|
23 |
-
|
24 |
-
|
25 |
-
|
26 |
-
|
27 |
-
- repo: https://github.com/pre-commit/mirrors-mypy
|
28 |
-
|
29 |
-
|
30 |
-
|
31 |
-
|
32 |
-
|
33 |
-
|
34 |
-
|
35 |
-
|
36 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
repos:
|
2 |
+
- repo: https://github.com/pre-commit/pre-commit-hooks
|
3 |
+
rev: v4.6.0
|
4 |
+
hooks:
|
5 |
+
- id: check-executables-have-shebangs
|
6 |
+
- id: check-json
|
7 |
+
- id: check-merge-conflict
|
8 |
+
- id: check-shebang-scripts-are-executable
|
9 |
+
- id: check-toml
|
10 |
+
- id: check-yaml
|
11 |
+
- id: end-of-file-fixer
|
12 |
+
- id: mixed-line-ending
|
13 |
+
args: ["--fix=lf"]
|
14 |
+
- id: requirements-txt-fixer
|
15 |
+
- id: trailing-whitespace
|
16 |
+
- repo: https://github.com/myint/docformatter
|
17 |
+
rev: v1.7.5
|
18 |
+
hooks:
|
19 |
+
- id: docformatter
|
20 |
+
args: ["--in-place"]
|
21 |
+
- repo: https://github.com/pycqa/isort
|
22 |
+
rev: 5.13.2
|
23 |
+
hooks:
|
24 |
+
- id: isort
|
25 |
+
args: ["--profile", "black"]
|
26 |
+
- repo: https://github.com/pre-commit/mirrors-mypy
|
27 |
+
rev: v1.10.0
|
28 |
+
hooks:
|
29 |
+
- id: mypy
|
30 |
+
args: ["--ignore-missing-imports"]
|
31 |
+
additional_dependencies:
|
32 |
+
[
|
33 |
+
"types-python-slugify",
|
34 |
+
"types-requests",
|
35 |
+
"types-PyYAML",
|
36 |
+
"types-pytz",
|
37 |
+
]
|
38 |
+
- repo: https://github.com/psf/black
|
39 |
+
rev: 24.4.2
|
40 |
+
hooks:
|
41 |
+
- id: black
|
42 |
+
language_version: python3.10
|
43 |
+
args: ["--line-length", "119"]
|
44 |
+
- repo: https://github.com/kynan/nbstripout
|
45 |
+
rev: 0.7.1
|
46 |
+
hooks:
|
47 |
+
- id: nbstripout
|
48 |
+
args:
|
49 |
+
[
|
50 |
+
"--extra-keys",
|
51 |
+
"metadata.interpreter metadata.kernelspec cell.metadata.pycharm",
|
52 |
+
]
|
53 |
+
- repo: https://github.com/nbQA-dev/nbQA
|
54 |
+
rev: 1.8.5
|
55 |
+
hooks:
|
56 |
+
- id: nbqa-black
|
57 |
+
- id: nbqa-pyupgrade
|
58 |
+
args: ["--py37-plus"]
|
59 |
+
- id: nbqa-isort
|
60 |
+
args: ["--float-to-top"]
|
.style.yapf
DELETED
@@ -1,5 +0,0 @@
|
|
1 |
-
[style]
|
2 |
-
based_on_style = pep8
|
3 |
-
blank_line_before_nested_class_or_def = false
|
4 |
-
spaces_before_comment = 2
|
5 |
-
split_before_logical_operator = true
|
|
|
|
|
|
|
|
|
|
|
|
app.py
CHANGED
@@ -6,17 +6,17 @@ import gradio as gr
|
|
6 |
|
7 |
from model import Model
|
8 |
|
9 |
-
DESCRIPTION =
|
10 |
|
11 |
model = Model()
|
12 |
|
13 |
-
with gr.Blocks(css=
|
14 |
gr.Markdown(DESCRIPTION)
|
15 |
with gr.Row():
|
16 |
with gr.Column():
|
17 |
-
input_image = gr.Image(label=
|
18 |
-
run_button = gr.Button(value=
|
19 |
with gr.Column():
|
20 |
-
result = gr.Image(label=
|
21 |
run_button.click(fn=model.predict, inputs=input_image, outputs=result)
|
22 |
demo.queue().launch()
|
|
|
6 |
|
7 |
from model import Model
|
8 |
|
9 |
+
DESCRIPTION = "# [MangaLineExtraction_PyTorch](https://github.com/ljsabc/MangaLineExtraction_PyTorch)"
|
10 |
|
11 |
model = Model()
|
12 |
|
13 |
+
with gr.Blocks(css="style.css") as demo:
|
14 |
gr.Markdown(DESCRIPTION)
|
15 |
with gr.Row():
|
16 |
with gr.Column():
|
17 |
+
input_image = gr.Image(label="Input", type="numpy")
|
18 |
+
run_button = gr.Button(value="Run")
|
19 |
with gr.Column():
|
20 |
+
result = gr.Image(label="Result", type="numpy", elem_id="result")
|
21 |
run_button.click(fn=model.predict, inputs=input_image, outputs=result)
|
22 |
demo.queue().launch()
|
model.py
CHANGED
@@ -10,7 +10,7 @@ import torch
|
|
10 |
import torch.nn as nn
|
11 |
|
12 |
current_dir = pathlib.Path(__file__).parent
|
13 |
-
submodule_dir = current_dir /
|
14 |
sys.path.insert(0, submodule_dir.as_posix())
|
15 |
|
16 |
from model_torch import res_skip
|
@@ -20,13 +20,11 @@ MAX_SIZE = 1000
|
|
20 |
|
21 |
class Model:
|
22 |
def __init__(self):
|
23 |
-
self.device = torch.device(
|
24 |
-
'cuda:0' if torch.cuda.is_available() else 'cpu')
|
25 |
self.model = self._load_model()
|
26 |
|
27 |
def _load_model(self) -> nn.Module:
|
28 |
-
ckpt_path = huggingface_hub.hf_hub_download(
|
29 |
-
'public-data/MangaLineExtraction_PyTorch', 'erika.pth')
|
30 |
state_dict = torch.load(ckpt_path)
|
31 |
model = res_skip()
|
32 |
model.load_state_dict(state_dict)
|
|
|
10 |
import torch.nn as nn
|
11 |
|
12 |
current_dir = pathlib.Path(__file__).parent
|
13 |
+
submodule_dir = current_dir / "MangaLineExtraction_PyTorch"
|
14 |
sys.path.insert(0, submodule_dir.as_posix())
|
15 |
|
16 |
from model_torch import res_skip
|
|
|
20 |
|
21 |
class Model:
|
22 |
def __init__(self):
|
23 |
+
self.device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu")
|
|
|
24 |
self.model = self._load_model()
|
25 |
|
26 |
def _load_model(self) -> nn.Module:
|
27 |
+
ckpt_path = huggingface_hub.hf_hub_download("public-data/MangaLineExtraction_PyTorch", "erika.pth")
|
|
|
28 |
state_dict = torch.load(ckpt_path)
|
29 |
model = res_skip()
|
30 |
model.load_state_dict(state_dict)
|