o
    Ďi                      @  s   d dl mZ d dlZd dlmZ d dlZd dlmZ d dl	m
Z
 d dlmZ d dlmZmZ d dlmZ G d	d
 d
ZG dd de
ZG dd dZdS )    )annotationsN)Any)Image)ObjectAnnotation)CocoPrediction)read_image_as_pilvisualize_object_predictions)Pathc                   @  s>   e Zd ZdddZdd Zdd Zd	d
 Zdd Zdd ZdS )PredictionScorevaluefloat | np.ndarrayc                 C  s&   t |jdkrt| }|| _dS )zP
        Arguments:
            score: prediction score between 0 and 1
        numpyN)type
__module__copydeepcopytolistr   )selfr    r   K/home/jeff/fluffinator/venv/lib/python3.10/site-packages/sahi/prediction.py__init__   s   
zPredictionScore.__init__c                 C  
   | j |kS )z)Check if score is greater than threshold.r   r   	thresholdr   r   r   is_greater_than_threshold   s   
z)PredictionScore.is_greater_than_thresholdc                 C  s
   | j |kS Nr   r   r   r   r   __eq__      
zPredictionScore.__eq__c                 C  r   r   r   r   r   r   r   __gt__"   r   zPredictionScore.__gt__c                 C  s
   | j |k S r   r   r   r   r   r   __lt__%   r   zPredictionScore.__lt__c                 C  s   d| j  dS )NzPredictionScore: <value: >r   r   r   r   r   __repr__(   s   zPredictionScore.__repr__N)r   r   )	__name__r   __qualname__r   r   r   r   r    r#   r   r   r   r   r
      s    
r
   c                      sZ   e Zd ZdZdddddddgdfd fddZdd ZdddZd ddZdd Z  Z	S )!ObjectPredictionz/Class for handling detection model predictions.Ng        r   bboxlist[int] | Nonecategory_id
int | Nonecategory_name
str | Nonesegmentationlist[list[float]] | Nonescorefloatshift_amount
full_shapec                   s&   t || _t j||||||d dS )a  Creates ObjectPrediction from bbox, score, category_id, category_name, segmentation.

        Arguments:
            bbox: list
                [minx, miny, maxx, maxy]
            score: float
                Prediction score between 0 and 1
            category_id: int
                ID of the object category
            category_name: str
                Name of the object category
            segmentation: List[List]
                [
                    [x1, y1, x2, y2, x3, y3, ...],
                    [x1, y1, x2, y2, x3, y3, ...],
                    ...
                ]
            shift_amount: list
                To shift the box and mask predictions from sliced image
                to full sized image, should be in the form of [shift_x, shift_y]
            full_shape: list
                Size of the full image after shifting, should be in
                the form of [height, width]
        )r'   r)   r-   r+   r1   r2   N)r
   r/   superr   )r   r'   r)   r+   r-   r/   r1   r2   	__class__r   r   r   /   s   
"
zObjectPrediction.__init__c              	   C  st   | j r"| j  }t| j  | jj| jj	|j
| jjddg|jdS t| j  | jj| jj	d| jjddgddS )zReturns shifted version ObjectPrediction.

        Shifts bbox and mask coords. Used for mapping sliced predictions over full image.
        r   )r'   r)   r/   r-   r+   r1   r2   N)maskget_shifted_maskr&   r'   get_shifted_boxto_xyxycategoryidr/   r   r-   namer2   )r   shifted_maskr   r   r   get_shifted_object_prediction[   s(   

z.ObjectPrediction.get_shifted_object_predictionc                 C  sX   | j rtj| j j| jj| jj| jj|d}|S tj	| j
 | jj| jj| jj|d}|S )zJReturns sahi.utils.coco.CocoPrediction representation of ObjectAnnotation.)r-   r)   r+   r/   image_id)r'   r)   r+   r/   r?   )r6   r   from_coco_segmentationr-   r:   r;   r<   r/   r   from_coco_bboxr'   to_xywh)r   r?   coco_predictionr   r   r   to_coco_predictionv   s"   z#ObjectPrediction.to_coco_predictionimage_heightintimage_widthc           
      C  sv   zddl }W n ty   tdw | j \}}}}|| || || | || | g}|j| jj|| jjd}	|	S )z>Returns fiftyone.Detection representation of ObjectPrediction.r   NzWPlease run "pip install -U fiftyone" to install fiftyone first for fiftyone conversion.)labelbounding_box
confidence)	fiftyoneImportErrorr'   r9   	Detectionr:   r<   r/   r   )
r   rE   rG   fox1y1x2y2rel_boxfiftyone_detectionr   r   r   to_fiftyone_detection   s   $z&ObjectPrediction.to_fiftyone_detectionc              	   C  s&   d| j  d| j d| j d| j d	S )NzObjectPrediction<
    bbox: z,
    mask: z,
    score: z,
    category: r!   )r'   r6   r/   r:   r"   r   r   r   r#      s   zObjectPrediction.__repr__)r'   r(   r)   r*   r+   r,   r-   r.   r/   r0   r1   r(   r2   r(   r   )rE   rF   rG   rF   )
r$   r   r%   __doc__r   r>   rD   rU   r#   __classcell__r   r   r4   r   r&   ,   s    ,

r&   c                   @  sV   e Zd Ze fd!ddZ					
	
	d"d#ddZdd Zd$d%ddZdd Zdd  Z	d	S )&PredictionResultobject_prediction_listlist[ObjectPrediction]imageImage.Image | str | np.ndarraydurations_in_secondsdict[str, Any]c                 C  s*   t || _| jj\| _| _|| _|| _d S r   )r   r[   sizerG   rE   rY   r]   )r   rY   r[   r]   r   r   r   r      s   

zPredictionResult.__init__NFprediction_visual
export_dirstr	text_sizefloat | Nonerect_thr*   hide_labelsbool	hide_conf	file_namec                 C  s>   t |jddd tt| j| j||dd||||dd dS )aI  

        Args:
            export_dir: directory for resulting visualization to be exported
            text_size: size of the category name over box
            rect_th: rectangle thickness
            hide_labels: hide labels
            hide_conf: hide confidence
            file_name: saving name
        Returns:

        T)parentsexist_okNpng)r[   rY   re   rc   text_thcolorrf   rh   
output_dirri   export_format)r	   mkdirr   npascontiguousarrayr[   rY   )r   ra   rc   re   rf   rh   ri   r   r   r   export_visuals   s   

zPredictionResult.export_visualsc                 C  s$   g }| j D ]
}|| j q|S r   rY   appendrD   json)r   coco_annotation_listobject_predictionr   r   r   to_coco_annotations   s   
z$PredictionResult.to_coco_annotationsr?   c                 C  s(   g }| j D ]}||j|dj q|S )N)r?   ru   )r   r?   coco_prediction_listry   r   r   r   to_coco_predictions   s   
z$PredictionResult.to_coco_predictionsc                 C  s"   g }| j D ]	}||  q|S r   )rY   rv   to_imantics_annotation)r   imantics_annotation_listry   r   r   r   to_imantics_annotations   s   
z(PredictionResult.to_imantics_annotationsc                 C  sP   zdd l }W n ty   tdw g }| jD ]}||j| j| jd q|S )Nr   zKPlease run "uv pip install -U fiftyone" to install fiftyone for conversion.)rE   rG   )rK   rL   rY   rv   rU   rE   rG   )r   rN   fiftyone_detection_listry   r   r   r   to_fiftyone_detections   s   
z'PredictionResult.to_fiftyone_detections)rY   rZ   r[   r\   r]   r^   )NNFFr`   )ra   rb   rc   rd   re   r*   rf   rg   rh   rg   ri   rb   r   )r?   r*   )
r$   r   r%   dictr   rt   rz   r|   r   r   r   r   r   r   rX      s    $rX   )
__future__r   r   typingr   r   rr   PILr   sahi.annotationr   sahi.utils.cocor   sahi.utils.cvr   r   sahi.utils.filer	   r
   r&   rX   r   r   r   r   <module>   s    r