o
    Ďi-                     @  s   d dl mZ d dlmZ d dlZd dlZd dlmZ d dl	m
Z
 d dlmZ d dlmZmZ d dlmZ d d	lmZ G d
d de
ZdS )    )annotations)AnyN)logger)DetectionModel)ObjectPrediction)fix_full_shape_listfix_shift_amount_list)$get_coco_segmentation_from_bool_mask)check_requirementsc                      s   e Zd ZdZ fddZdd Zddd	ZdddZedd Z	edd Z
edd Zedd ZddggdfdddZ  ZS ) UltralyticsDetectionModelzkDetection model for Ultralytics YOLO models.

    Supports both PyTorch (.pt) and ONNX (.onnx) models.
    c                   sF   | dd| _t| dd pg }g t|d| _t j|i | d S )NfuseFrequired_packagesultralytics)popr   getattrlistr   super__init__)selfargskwargsexisting_packages	__class__ S/home/jeff/fluffinator/venv/lib/python3.10/site-packages/sahi/models/ultralytics.pyr      s   z"UltralyticsDetectionModel.__init__c              
   C  s   ddl m} | jrd| jv rtddg z.|| j}| jr)| jds)|| j | | | jr=t	|dr@|  W dS W dS W dS  t
yS } ztd|d}~ww )	z|Detection model is initialized and set to self.model.

        Supports both PyTorch (.pt) and ONNX (.onnx) models.
        r   )YOLOz.onnxonnxonnxruntimer   z2model_path is not a valid Ultralytics model path: N)r   r   
model_pathr
   endswithtodevice	set_modelr   hasattr	Exception	TypeError)r   r   modeler   r   r   
load_model   s   


z$UltralyticsDetectionModel.load_modelr'   r   c                 K  s.   || _ | jsdd t| jD }|| _dS dS )zySets the underlying Ultralytics model.

        Args:
            model: Any
                A Ultralytics model
        c                 S  s   i | ]	\}}t ||qS r   )str).0indcategory_namer   r   r   
<dictcomp>>   s    z7UltralyticsDetectionModel.set_model.<locals>.<dictcomp>N)r'   category_mapping	enumeratecategory_names)r   r'   r   r/   r   r   r   r#   3   s
   
z#UltralyticsDetectionModel.set_modelimage
np.ndarrayc                   s"  ddl | jdu rtd| jd| j| jd}| jdur#d| ji|}| j|dddddddf fi |}| jrlddlm	} |d j
sdt| jd	rQ| jj nd
 |jg  d|d jj|d _
dd |D }n| jrt| jd	d
  fdd|D }ndd |D }|| _|j| _dS )a  Prediction is performed using self.model and the prediction result is set to self._original_predictions.

        Args:
            image: np.ndarray
                A numpy array that contains the image to be predicted. 3 channel image should be in RGB order.
        r   Nz5Model is not loaded, load it by calling .load_model()F)cfgverboseconfr"   imgsz)Masksr"   cpur"   c                 S  s   g | ]
}|j j|jjfqS r   )boxesdatamasksr+   resultr   r   r   
<listcomp>g   s    z?UltralyticsDetectionModel.perform_inference.<locals>.<listcomp>c                   sp   g | ]4}|j d urj|j j|j jd|j jdgddnjd d|j d ur.|j jnjd dfqS )Nr8      )dim)r      r;   )r         )obbcatxyxyr6   	unsqueezeclsemptyxyxyxyxyr?   r"   torchr   r   rA   q   s    
	c                 S  s   g | ]}|j jqS r   )r<   r=   r?   r   r   r   rA      s    )rO   r'   
ValueErrorconfig_pathconfidence_thresholdr"   
image_sizehas_maskultralytics.engine.resultsr9   r>   r$   tensorr<   
orig_shapeis_obbr   _original_predictionsshape_original_shape)r   r2   r   prediction_resultr9   r   rN   r   perform_inferenceA   s6   


*


z+UltralyticsDetectionModel.perform_inferencec                 C  s<   t | jdr| jjr| jj S | jrt| j S td)NnameszNCategory names not available. Please provide category_mapping for ONNX models.)r$   r'   r^   valuesr/   r   rP   r   r   r   r   r1      s
   z(UltralyticsDetectionModel.category_namesc                 C  s8   t | jdr| jjrt| jjS | jrt| jS td)zReturns number of categories.r^   zWCannot determine number of categories. Please provide category_mapping for ONNX models.)r$   r'   r^   lenr/   rP   r`   r   r   r   num_categories   s
   
z(UltralyticsDetectionModel.num_categoriesc                 C  sd   t | jdrd| jjv r| jjd dkS t | jdr | jjdkS | jr0t| jtr0d| j v S dS )z3Returns if model output contains segmentation mask.	overridestasksegmentsegFr$   r'   rc   rd   r   
isinstancer*   lowerr`   r   r   r   rT         z"UltralyticsDetectionModel.has_maskc                 C  sd   t | jdrd| jjv r| jjd dkS t | jdr | jjdkS | jr0t| jtr0d| j v S dS )z9Returns if model output contains oriented bounding boxes.rc   rd   rG   Frg   r`   r   r   r   rX      rj   z UltralyticsDetectionModel.is_obbr   Nshift_amount_listlist[list[int]] | Nonefull_shape_listc                 C  s8  | j }t|}t|}g }t|D ]\}}|| }|du r dn|| }g }	| js,| jrA|d    }
|d    }n|j	   }
d}t|
D ]\}}|dd 
 }|d }t|d }| jt| }dd |D }|durt|d |d |d< t|d |d |d< t|d |d |d< t|d |d	 |d	< |d |d k r|d |d	 k std
|  qPd}|dur| jr|| }t|tj| jd | jd f}t|}n|| }|d
 g}t|dkrqPt|||||||du r| jdd n|d}|	| qP||	 q|| _dS )a9  self._original_predictions is converted to a list of prediction.ObjectPrediction and set to
        self._object_prediction_list_per_image.

        Args:
            shift_amount_list: list of list
                To shift the box and mask predictions from sliced image to full sized image, should
                be in the form of List[[shift_x, shift_y],[shift_x, shift_y],...]
            full_shape_list: list of list
                Size of the full image after shifting, should be in the form of
                List[[height, width],[height, width],...]
        Nr   rB   rE      c                 S  s   g | ]}t d |qS )r   )max)r+   coordr   r   r   rA      s    zfUltralyticsDetectionModel._create_object_prediction_list_from_original_predictions.<locals>.<listcomp>rF      z'ignoring invalid prediction with bbox: r8   )bboxcategory_idscoresegmentationr-   shift_amount
full_shape)rY   r   r   r0   rT   rX   r:   detachnumpyr=   tolistintr/   r*   minr   warningcv2resizeastypenpuint8r[   r	   reshapera   r   append!_object_prediction_list_per_image)r   rk   rm   original_predictions object_prediction_list_per_image	image_indimage_predictionsrv   rw   object_prediction_listr<   masks_or_pointspred_ind
predictionrr   rt   rs   r-   ru   	bool_mask
obb_pointsobject_predictionr   r   r   8_create_object_prediction_list_from_original_predictions   sf    
	
zRUltralyticsDetectionModel._create_object_prediction_list_from_original_predictions)r'   r   )r2   r3   )rk   rl   rm   rl   )__name__
__module____qualname____doc__r   r)   r#   r]   propertyr1   rb   rT   rX   r   __classcell__r   r   r   r   r      s"    

I
	
	

r   )
__future__r   typingr   r~   ry   r   sahi.loggerr   sahi.models.baser   sahi.predictionr   sahi.utils.compatibilityr   r   sahi.utils.cvr	   sahi.utils.import_utilsr
   r   r   r   r   r   <module>   s    