hysts HF staff commited on
Commit
ec4a1e7
1 Parent(s): 19170fb
Files changed (4) hide show
  1. .pre-commit-config.yaml +59 -35
  2. .style.yapf +0 -5
  3. app.py +5 -5
  4. model.py +3 -5
.pre-commit-config.yaml CHANGED
@@ -1,36 +1,60 @@
1
- exclude: ^MangaLineExtraction_PyTorch
2
  repos:
3
- - repo: https://github.com/pre-commit/pre-commit-hooks
4
- rev: v4.2.0
5
- hooks:
6
- - id: check-executables-have-shebangs
7
- - id: check-json
8
- - id: check-merge-conflict
9
- - id: check-shebang-scripts-are-executable
10
- - id: check-toml
11
- - id: check-yaml
12
- - id: double-quote-string-fixer
13
- - id: end-of-file-fixer
14
- - id: mixed-line-ending
15
- args: ['--fix=lf']
16
- - id: requirements-txt-fixer
17
- - id: trailing-whitespace
18
- - repo: https://github.com/myint/docformatter
19
- rev: v1.4
20
- hooks:
21
- - id: docformatter
22
- args: ['--in-place']
23
- - repo: https://github.com/pycqa/isort
24
- rev: 5.12.0
25
- hooks:
26
- - id: isort
27
- - repo: https://github.com/pre-commit/mirrors-mypy
28
- rev: v0.991
29
- hooks:
30
- - id: mypy
31
- args: ['--ignore-missing-imports']
32
- - repo: https://github.com/google/yapf
33
- rev: v0.32.0
34
- hooks:
35
- - id: yapf
36
- args: ['--parallel', '--in-place']
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
  repos:
2
+ - repo: https://github.com/pre-commit/pre-commit-hooks
3
+ rev: v4.6.0
4
+ hooks:
5
+ - id: check-executables-have-shebangs
6
+ - id: check-json
7
+ - id: check-merge-conflict
8
+ - id: check-shebang-scripts-are-executable
9
+ - id: check-toml
10
+ - id: check-yaml
11
+ - id: end-of-file-fixer
12
+ - id: mixed-line-ending
13
+ args: ["--fix=lf"]
14
+ - id: requirements-txt-fixer
15
+ - id: trailing-whitespace
16
+ - repo: https://github.com/myint/docformatter
17
+ rev: v1.7.5
18
+ hooks:
19
+ - id: docformatter
20
+ args: ["--in-place"]
21
+ - repo: https://github.com/pycqa/isort
22
+ rev: 5.13.2
23
+ hooks:
24
+ - id: isort
25
+ args: ["--profile", "black"]
26
+ - repo: https://github.com/pre-commit/mirrors-mypy
27
+ rev: v1.10.0
28
+ hooks:
29
+ - id: mypy
30
+ args: ["--ignore-missing-imports"]
31
+ additional_dependencies:
32
+ [
33
+ "types-python-slugify",
34
+ "types-requests",
35
+ "types-PyYAML",
36
+ "types-pytz",
37
+ ]
38
+ - repo: https://github.com/psf/black
39
+ rev: 24.4.2
40
+ hooks:
41
+ - id: black
42
+ language_version: python3.10
43
+ args: ["--line-length", "119"]
44
+ - repo: https://github.com/kynan/nbstripout
45
+ rev: 0.7.1
46
+ hooks:
47
+ - id: nbstripout
48
+ args:
49
+ [
50
+ "--extra-keys",
51
+ "metadata.interpreter metadata.kernelspec cell.metadata.pycharm",
52
+ ]
53
+ - repo: https://github.com/nbQA-dev/nbQA
54
+ rev: 1.8.5
55
+ hooks:
56
+ - id: nbqa-black
57
+ - id: nbqa-pyupgrade
58
+ args: ["--py37-plus"]
59
+ - id: nbqa-isort
60
+ args: ["--float-to-top"]
.style.yapf DELETED
@@ -1,5 +0,0 @@
1
- [style]
2
- based_on_style = pep8
3
- blank_line_before_nested_class_or_def = false
4
- spaces_before_comment = 2
5
- split_before_logical_operator = true
 
 
 
 
 
 
app.py CHANGED
@@ -6,17 +6,17 @@ import gradio as gr
6
 
7
  from model import Model
8
 
9
- DESCRIPTION = '# [MangaLineExtraction_PyTorch](https://github.com/ljsabc/MangaLineExtraction_PyTorch)'
10
 
11
  model = Model()
12
 
13
- with gr.Blocks(css='style.css') as demo:
14
  gr.Markdown(DESCRIPTION)
15
  with gr.Row():
16
  with gr.Column():
17
- input_image = gr.Image(label='Input', type='numpy')
18
- run_button = gr.Button(value='Run')
19
  with gr.Column():
20
- result = gr.Image(label='Result', type='numpy', elem_id='result')
21
  run_button.click(fn=model.predict, inputs=input_image, outputs=result)
22
  demo.queue().launch()
 
6
 
7
  from model import Model
8
 
9
+ DESCRIPTION = "# [MangaLineExtraction_PyTorch](https://github.com/ljsabc/MangaLineExtraction_PyTorch)"
10
 
11
  model = Model()
12
 
13
+ with gr.Blocks(css="style.css") as demo:
14
  gr.Markdown(DESCRIPTION)
15
  with gr.Row():
16
  with gr.Column():
17
+ input_image = gr.Image(label="Input", type="numpy")
18
+ run_button = gr.Button(value="Run")
19
  with gr.Column():
20
+ result = gr.Image(label="Result", type="numpy", elem_id="result")
21
  run_button.click(fn=model.predict, inputs=input_image, outputs=result)
22
  demo.queue().launch()
model.py CHANGED
@@ -10,7 +10,7 @@ import torch
10
  import torch.nn as nn
11
 
12
  current_dir = pathlib.Path(__file__).parent
13
- submodule_dir = current_dir / 'MangaLineExtraction_PyTorch'
14
  sys.path.insert(0, submodule_dir.as_posix())
15
 
16
  from model_torch import res_skip
@@ -20,13 +20,11 @@ MAX_SIZE = 1000
20
 
21
  class Model:
22
  def __init__(self):
23
- self.device = torch.device(
24
- 'cuda:0' if torch.cuda.is_available() else 'cpu')
25
  self.model = self._load_model()
26
 
27
  def _load_model(self) -> nn.Module:
28
- ckpt_path = huggingface_hub.hf_hub_download(
29
- 'public-data/MangaLineExtraction_PyTorch', 'erika.pth')
30
  state_dict = torch.load(ckpt_path)
31
  model = res_skip()
32
  model.load_state_dict(state_dict)
 
10
  import torch.nn as nn
11
 
12
  current_dir = pathlib.Path(__file__).parent
13
+ submodule_dir = current_dir / "MangaLineExtraction_PyTorch"
14
  sys.path.insert(0, submodule_dir.as_posix())
15
 
16
  from model_torch import res_skip
 
20
 
21
  class Model:
22
  def __init__(self):
23
+ self.device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu")
 
24
  self.model = self._load_model()
25
 
26
  def _load_model(self) -> nn.Module:
27
+ ckpt_path = huggingface_hub.hf_hub_download("public-data/MangaLineExtraction_PyTorch", "erika.pth")
 
28
  state_dict = torch.load(ckpt_path)
29
  model = res_skip()
30
  model.load_state_dict(state_dict)