-
-
Notifications
You must be signed in to change notification settings - Fork 345
Expand file tree
/
Copy pathload_model.py
More file actions
47 lines (40 loc) · 1.46 KB
/
load_model.py
File metadata and controls
47 lines (40 loc) · 1.46 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
from __future__ import annotations
from pathlib import Path
from nodes.groups import ncnn_file_inputs_group
from nodes.impl.ncnn.model import NcnnModel, NcnnModelWrapper
from nodes.impl.ncnn.optimizer import NcnnOptimizer
from nodes.properties.inputs import BinFileInput, ParamFileInput
from nodes.properties.outputs import DirectoryOutput, FileNameOutput, NcnnModelOutput
from nodes.utils.utils import split_file_path
from .. import io_group
@io_group.register(
schema_id="chainner:ncnn:load_model",
name="Load Model",
description=(
"Load NCNN model (.bin and .param files). Theoretically supports any NCNN"
" Super-Resolution model that doesn't expect non-standard preprocessing."
),
icon="NCNN",
inputs=[
ncnn_file_inputs_group(
ParamFileInput(primary_input=True),
BinFileInput(primary_input=True),
)
],
outputs=[
NcnnModelOutput(kind="tagged").suggest(),
DirectoryOutput("Directory", of_input=0).with_id(2),
FileNameOutput("Name", of_input=0).with_id(1),
],
see_also=[
"chainner:ncnn:load_models",
],
side_effects=True,
)
def load_model_node(
param_path: Path, bin_path: Path
) -> tuple[NcnnModelWrapper, Path, str]:
model = NcnnModel.load_from_file(str(param_path), str(bin_path))
NcnnOptimizer(model).optimize()
model_dir, model_name, _ = split_file_path(param_path)
return NcnnModelWrapper(model), model_dir, model_name