File "C:\Temp\ComfyUI_windows_portable\ComfyUI\execution.py", line 327, in execute
output_data, output_ui, has_subgraph = get_output_data(obj, input_data_all, execution_block_cb=execution_block_cb, pre_execute_cb=pre_execute_cb)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Temp\ComfyUI_windows_portable\ComfyUI\execution.py", line 202, in get_output_data
return_values = _map_node_over_list(obj, input_data_all, obj.FUNCTION, allow_interrupt=True, execution_block_cb=execution_block_cb, pre_execute_cb=pre_execute_cb)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Temp\ComfyUI_windows_portable\ComfyUI\execution.py", line 174, in _map_node_over_list
process_inputs(input_dict, i)
File "C:\Temp\ComfyUI_windows_portable\ComfyUI\execution.py", line 163, in process_inputs
results.append(getattr(obj, func)(**inputs))
^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Temp\ComfyUI_windows_portable\ComfyUI\custom_nodes\ComfyUI_Florence2SAM2\__init__.py", line 83, in _process_image
annotated_image, mask, masked_image = process_image(torch_device, sam2_model, img, prompt, keep_model_loaded)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Temp\ComfyUI_windows_portable\ComfyUI\custom_nodes\ComfyUI_Florence2SAM2\app.py", line 184, in process_image
annotated_image, mask_list = _process_image(IMAGE_OPEN_VOCABULARY_DETECTION_MODE, image, promt)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Temp\ComfyUI_windows_portable\python_embeded\Lib\site-packages\torch\utils\_contextlib.py", line 116, in decorate_context
return func(*args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^
File "C:\Temp\ComfyUI_windows_portable\python_embeded\Lib\site-packages\torch\amp\autocast_mode.py", line 44, in decorate_autocast
return func(*args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^
File "C:\Temp\ComfyUI_windows_portable\ComfyUI\custom_nodes\ComfyUI_Florence2SAM2\app.py", line 243, in _process_image
detections = run_sam_inference(SAM_IMAGE_MODEL, image_input, detections)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Temp\ComfyUI_windows_portable\ComfyUI\custom_nodes\ComfyUI_Florence2SAM2\utils\sam.py", line 75, in run_sam_inference
mask, score, _ = model.predict(box=detections.xyxy, multimask_output=False)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Temp\ComfyUI_windows_portable\python_embeded\Lib\site-packages\sam2\sam2_image_predictor.py", line 269, in predict
masks, iou_predictions, low_res_masks = self._predict(
^^^^^^^^^^^^^^
File "C:\Temp\ComfyUI_windows_portable\python_embeded\Lib\site-packages\torch\utils\_contextlib.py", line 116, in decorate_context
return func(*args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^
File "C:\Temp\ComfyUI_windows_portable\python_embeded\Lib\site-packages\sam2\sam2_image_predictor.py", line 398, in _predict
low_res_masks, iou_predictions, _, _ = self.model.sam_mask_decoder(
^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Temp\ComfyUI_windows_portable\python_embeded\Lib\site-packages\torch\nn\modules\module.py", line 1736, in _wrapped_call_impl
return self._call_impl(*args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Temp\ComfyUI_windows_portable\python_embeded\Lib\site-packages\torch\nn\modules\module.py", line 1747, in _call_impl
return forward_call(*args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Temp\ComfyUI_windows_portable\python_embeded\Lib\site-packages\sam2\modeling\sam\mask_decoder.py", line 136, in forward
masks, iou_pred, mask_tokens_out, object_score_logits = self.predict_masks(
^^^^^^^^^^^^^^^^^^^
File "C:\Temp\ComfyUI_windows_portable\python_embeded\Lib\site-packages\sam2\modeling\sam\mask_decoder.py", line 213, in predict_masks
hs, src = self.transformer(src, pos_src, tokens)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Temp\ComfyUI_windows_portable\python_embeded\Lib\site-packages\torch\nn\modules\module.py", line 1736, in _wrapped_call_impl
return self._call_impl(*args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Temp\ComfyUI_windows_portable\python_embeded\Lib\site-packages\torch\nn\modules\module.py", line 1747, in _call_impl
return forward_call(*args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Temp\ComfyUI_windows_portable\python_embeded\Lib\site-packages\sam2\modeling\sam\transformer.py", line 100, in forward
queries, keys = layer(
^^^^^^
File "C:\Temp\ComfyUI_windows_portable\python_embeded\Lib\site-packages\torch\nn\modules\module.py", line 1736, in _wrapped_call_impl
return self._call_impl(*args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Temp\ComfyUI_windows_portable\python_embeded\Lib\site-packages\torch\nn\modules\module.py", line 1747, in _call_impl
return forward_call(*args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Temp\ComfyUI_windows_portable\python_embeded\Lib\site-packages\sam2\modeling\sam\transformer.py", line 166, in forward
queries = self.self_attn(q=queries, k=queries, v=queries)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Temp\ComfyUI_windows_portable\python_embeded\Lib\site-packages\torch\nn\modules\module.py", line 1736, in _wrapped_call_impl
return self._call_impl(*args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Temp\ComfyUI_windows_portable\python_embeded\Lib\site-packages\torch\nn\modules\module.py", line 1747, in _call_impl
return forward_call(*args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Temp\ComfyUI_windows_portable\python_embeded\Lib\site-packages\sam2\modeling\sam\transformer.py", line 250, in forward
out = F.scaled_dot_product_attention(q, k, v, dropout_p=dropout_p)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
getting this:
RdancerFlorence2SAM2GenerateMask
No available kernel. Aborting execution.
win 11, rtx 4080, 566 driver, cuda 12.4
ComfyUi thinks nodes are properly installed
flash attention is installed
ComfyUI Error Report
Error Details
Stack Trace