Spaces:
Running
Running
File size: 1,238 Bytes
07f408f 681fa96 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 |
from ..utils import common_annotator_call, INPUT, define_preprocessor_inputs
import comfy.model_management as model_management
class DensePose_Preprocessor:
@classmethod
def INPUT_TYPES(s):
return define_preprocessor_inputs(
model=INPUT.COMBO(["densepose_r50_fpn_dl.torchscript", "densepose_r101_fpn_dl.torchscript"]),
cmap=INPUT.COMBO(["Viridis (MagicAnimate)", "Parula (CivitAI)"]),
resolution=INPUT.RESOLUTION()
)
RETURN_TYPES = ("IMAGE",)
FUNCTION = "execute"
CATEGORY = "ControlNet Preprocessors/Faces and Poses Estimators"
def execute(self, image, model="densepose_r50_fpn_dl.torchscript", cmap="Viridis (MagicAnimate)", resolution=512):
from custom_controlnet_aux.densepose import DenseposeDetector
model = DenseposeDetector \
.from_pretrained(filename=model) \
.to(model_management.get_torch_device())
return (common_annotator_call(model, image, cmap="viridis" if "Viridis" in cmap else "parula", resolution=resolution), )
NODE_CLASS_MAPPINGS = {
"DensePosePreprocessor": DensePose_Preprocessor
}
NODE_DISPLAY_NAME_MAPPINGS = {
"DensePosePreprocessor": "DensePose Estimator"
} |