diff --git a/docs/scripts/make_model_zoo_docs.py b/docs/scripts/make_model_zoo_docs.py index 57d7bf32b9..472e0b31db 100644 --- a/docs/scripts/make_model_zoo_docs.py +++ b/docs/scripts/make_model_zoo_docs.py @@ -147,25 +147,25 @@ dataset.apply_model(model, label_field="auto") session = fo.launch_app(dataset) -{% elif 'segment-anything' in tags and 'video' in tags and 'med-SAM' not in tags %} +{% elif 'med-sam' in name %} model = foz.load_zoo_model("{{ name }}") # Segment inside boxes and propagate to all frames dataset.apply_model( model, - label_field="segmentations", - prompt_field="frames.detections", # can contain Detections or Keypoints + label_field="pred_segmentations", + prompt_field="frames.gt_detections", ) session = fo.launch_app(dataset) -{% elif 'med-sam' in name %} +{% elif 'segment-anything' in tags and 'video' in tags %} model = foz.load_zoo_model("{{ name }}") # Segment inside boxes and propagate to all frames dataset.apply_model( model, - label_field="pred_segmentations", - prompt_field="frames.gt_detections", + label_field="segmentations", + prompt_field="frames.detections", # can contain Detections or Keypoints ) session = fo.launch_app(dataset) @@ -354,7 +354,7 @@ def _render_card_model_content(template, model_name): tags = ",".join(tags) - link = "models.html#%s" % zoo_model.name + link = "models.html#%s" % zoo_model.name.replace(".", "-") description = zoo_model.description diff --git a/fiftyone/zoo/models/manifest-torch.json b/fiftyone/zoo/models/manifest-torch.json index 8d00620325..ee2b3a2caf 100644 --- a/fiftyone/zoo/models/manifest-torch.json +++ b/fiftyone/zoo/models/manifest-torch.json @@ -486,6 +486,285 @@ ], "date_added": "2024-08-17 14:48:00" }, + { + "base_name": "segment-anything-2.1-hiera-tiny-image-torch", + "base_filename": "sam2.1_hiera_tiny_image.pt", + "version": null, + "description": "Segment Anything Model 2 (SAM2) from `SAM2: Segment Anything in Images and Videos `_", + "source": "https://ai.meta.com/sam2/", + "size_bytes": 155906050, + "manager": { + "type": "fiftyone.core.models.ModelManager", + "config": { + "url": "https://dl.fbaipublicfiles.com/segment_anything_2/092824/sam2.1_hiera_tiny.pt" + } + }, + "default_deployment_config_dict": { + "type": "fiftyone.utils.sam2.SegmentAnything2ImageModel", + "config": { + "entrypoint_fcn": "sam2.build_sam.build_sam2", + "entrypoint_args": { + "model_cfg": "configs/sam2.1/sam2.1_hiera_t.yaml" + }, + "output_processor_cls": "fiftyone.utils.torch.SemanticSegmenterOutputProcessor" + } + }, + "requirements": { + "packages": ["torch", "torchvision"], + "cpu": { + "support": true + }, + "gpu": { + "support": true + } + }, + "tags": ["segment-anything", "torch", "zero-shot"], + "date_added": "2024-08-05 14:38:20" + }, + { + "base_name": "segment-anything-2.1-hiera-small-image-torch", + "base_filename": "sam2.1_hiera_small_image.pt", + "version": null, + "description": "Segment Anything Model 2 (SAM2) from `SAM2: Segment Anything in Images and Videos `_", + "source": "https://ai.meta.com/sam2/", + "size_bytes": 155906050, + "manager": { + "type": "fiftyone.core.models.ModelManager", + "config": { + "url": "https://dl.fbaipublicfiles.com/segment_anything_2/092824/sam2.1_hiera_small.pt" + } + }, + "default_deployment_config_dict": { + "type": "fiftyone.utils.sam2.SegmentAnything2ImageModel", + "config": { + "entrypoint_fcn": "sam2.build_sam.build_sam2", + "entrypoint_args": { + "model_cfg": "configs/sam2.1/sam2.1_hiera_s.yaml" + }, + "output_processor_cls": "fiftyone.utils.torch.SemanticSegmenterOutputProcessor" + } + }, + "requirements": { + "packages": ["torch", "torchvision"], + "cpu": { + "support": true + }, + "gpu": { + "support": true + } + }, + "tags": ["segment-anything", "torch", "zero-shot"], + "date_added": "2024-08-05 14:38:20" + }, + { + "base_name": "segment-anything-2.1-hiera-base-plus-image-torch", + "base_filename": "sam2.1_hiera_base_plus_image.pt", + "version": null, + "description": "Segment Anything Model 2 (SAM2) from `SAM2: Segment Anything in Images and Videos `_", + "source": "https://ai.meta.com/sam2/", + "size_bytes": 155906050, + "manager": { + "type": "fiftyone.core.models.ModelManager", + "config": { + "url": "https://dl.fbaipublicfiles.com/segment_anything_2/092824/sam2.1_hiera_base_plus.pt" + } + }, + "default_deployment_config_dict": { + "type": "fiftyone.utils.sam2.SegmentAnything2ImageModel", + "config": { + "entrypoint_fcn": "sam2.build_sam.build_sam2", + "entrypoint_args": { + "model_cfg": "configs/sam2.1/sam2.1_hiera_b+.yaml" + }, + "output_processor_cls": "fiftyone.utils.torch.SemanticSegmenterOutputProcessor" + } + }, + "requirements": { + "packages": ["torch", "torchvision"], + "cpu": { + "support": true + }, + "gpu": { + "support": true + } + }, + "tags": ["segment-anything", "torch", "zero-shot"], + "date_added": "2024-08-05 14:38:20" + }, + { + "base_name": "segment-anything-2.1-hiera-large-image-torch", + "base_filename": "sam2.1_hiera_large_image.pt", + "version": null, + "description": "Segment Anything Model 2 (SAM2) from `SAM2: Segment Anything in Images and Videos `_", + "source": "https://ai.meta.com/sam2/", + "size_bytes": 155906050, + "manager": { + "type": "fiftyone.core.models.ModelManager", + "config": { + "url": "https://dl.fbaipublicfiles.com/segment_anything_2/092824/sam2.1_hiera_large.pt" + } + }, + "default_deployment_config_dict": { + "type": "fiftyone.utils.sam2.SegmentAnything2ImageModel", + "config": { + "entrypoint_fcn": "sam2.build_sam.build_sam2", + "entrypoint_args": { + "model_cfg": "configs/sam2.1/sam2.1_hiera_l.yaml" + }, + "output_processor_cls": "fiftyone.utils.torch.SemanticSegmenterOutputProcessor" + } + }, + "requirements": { + "packages": ["torch", "torchvision"], + "cpu": { + "support": true + }, + "gpu": { + "support": true + } + }, + "tags": ["segment-anything", "torch", "zero-shot"], + "date_added": "2024-08-05 14:38:20" + }, + { + "base_name": "segment-anything-2.1-hiera-tiny-video-torch", + "base_filename": "sam2.1_hiera_tiny_video.pt", + "version": null, + "description": "Segment Anything Model 2 (SAM2) from `SAM2: Segment Anything in Images and Videos `_", + "source": "https://ai.meta.com/sam2/", + "size_bytes": 155906050, + "manager": { + "type": "fiftyone.core.models.ModelManager", + "config": { + "url": "https://dl.fbaipublicfiles.com/segment_anything_2/092824/sam2.1_hiera_tiny.pt" + } + }, + "default_deployment_config_dict": { + "type": "fiftyone.utils.sam2.SegmentAnything2VideoModel", + "config": { + "entrypoint_fcn": "sam2.build_sam.build_sam2_video_predictor", + "entrypoint_args": { + "model_cfg": "configs/sam2.1/sam2.1_hiera_t.yaml" + } + } + }, + "requirements": { + "packages": ["torch", "torchvision"], + "cpu": { + "support": true + }, + "gpu": { + "support": true + } + }, + "tags": ["segment-anything", "torch", "zero-shot", "video"], + "date_added": "2024-08-05 14:38:20" + }, + { + "base_name": "segment-anything-2.1-hiera-small-video-torch", + "base_filename": "sam2.1_hiera_small_video.pt", + "version": null, + "description": "Segment Anything Model 2 (SAM2) from `SAM2: Segment Anything in Images and Videos `_", + "source": "https://ai.meta.com/sam2/", + "size_bytes": 155906050, + "manager": { + "type": "fiftyone.core.models.ModelManager", + "config": { + "url": "https://dl.fbaipublicfiles.com/segment_anything_2/092824/sam2.1_hiera_small.pt" + } + }, + "default_deployment_config_dict": { + "type": "fiftyone.utils.sam2.SegmentAnything2VideoModel", + "config": { + "entrypoint_fcn": "sam2.build_sam.build_sam2_video_predictor", + "entrypoint_args": { + "model_cfg": "configs/sam2.1/sam2.1_hiera_s.yaml" + }, + "output_processor_cls": "fiftyone.utils.torch.SemanticSegmenterOutputProcessor" + } + }, + "requirements": { + "packages": ["torch", "torchvision"], + "cpu": { + "support": true + }, + "gpu": { + "support": true + } + }, + "tags": ["segment-anything", "torch", "zero-shot", "video"], + "date_added": "2024-08-05 14:38:20" + }, + { + "base_name": "segment-anything-2.1-hiera-base-plus-video-torch", + "base_filename": "sam2.1_hiera_base_plus_video.pt", + "version": null, + "description": "Segment Anything Model 2 (SAM2) from `SAM2: Segment Anything in Images and Videos `_", + "source": "https://ai.meta.com/sam2/", + "size_bytes": 155906050, + "manager": { + "type": "fiftyone.core.models.ModelManager", + "config": { + "url": "https://dl.fbaipublicfiles.com/segment_anything_2/092824/sam2.1_hiera_base_plus.pt" + } + }, + "default_deployment_config_dict": { + "type": "fiftyone.utils.sam2.SegmentAnything2VideoModel", + "config": { + "entrypoint_fcn": "sam2.build_sam.build_sam2_video_predictor", + "entrypoint_args": { + "model_cfg": "configs/sam2.1/sam2.1_hiera_b+.yaml" + }, + "output_processor_cls": "fiftyone.utils.torch.SemanticSegmenterOutputProcessor" + } + }, + "requirements": { + "packages": ["torch", "torchvision"], + "cpu": { + "support": true + }, + "gpu": { + "support": true + } + }, + "tags": ["segment-anything", "torch", "zero-shot", "video"], + "date_added": "2024-08-05 14:38:20" + }, + { + "base_name": "segment-anything-2.1-hiera-large-video-torch", + "base_filename": "sam2.1_hiera_large_video.pt", + "version": null, + "description": "Segment Anything Model 2 (SAM2) from `SAM2: Segment Anything in Images and Videos `_", + "source": "https://ai.meta.com/sam2/", + "size_bytes": 155906050, + "manager": { + "type": "fiftyone.core.models.ModelManager", + "config": { + "url": "https://dl.fbaipublicfiles.com/segment_anything_2/092824/sam2.1_hiera_large.pt" + } + }, + "default_deployment_config_dict": { + "type": "fiftyone.utils.sam2.SegmentAnything2VideoModel", + "config": { + "entrypoint_fcn": "sam2.build_sam.build_sam2_video_predictor", + "entrypoint_args": { + "model_cfg": "configs/sam2.1/sam2.1_hiera_l.yaml" + }, + "output_processor_cls": "fiftyone.utils.torch.SemanticSegmenterOutputProcessor" + } + }, + "requirements": { + "packages": ["torch", "torchvision"], + "cpu": { + "support": true + }, + "gpu": { + "support": true + } + }, + "tags": ["segment-anything", "torch", "zero-shot", "video"], + "date_added": "2024-08-05 14:38:20" + }, { "base_name": "deeplabv3-resnet50-coco-torch", "base_filename": "deeplabv3_resnet50_coco-cd0a2569.pth",