
    Owg]                    f   d Z ddlmZ ddlZddlZddlZddlmZmZm	Z	 ddl
Z
ddl
mZ ddlmZ ddlmZ ddlmZ dd	lmZ dd
lmZ ddlmZ ddlmZ ddlmZ ddlZddlmZmZ ddl m!Z! ddl"m#Z# ddl$m%Z%m&Z&m'Z'm(Z(m)Z) erddl*m+Z+m,Z,m-Z-m.Z.m/Z/ d dZ0	 	 	 d!	 	 	 	 	 	 	 	 	 	 	 d"dZ1 G d d      Z2 G d de2      Z3 G d de2      Z4 ee!d         	 	 	 	 	 	 	 d#	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 d$d       Z5 ee!d         dddejl                  ejl                  ddf	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 d%d       Z7y)&z parquet compat     )annotationsN)TYPE_CHECKINGAnyLiteral)catch_warnings)using_pyarrow_string_dtype)_get_option)lib)import_optional_dependencyAbstractMethodError)doc)find_stack_level)check_dtype_backend)	DataFrame
get_option)_shared_docs)arrow_string_types_mapper)	IOHandles
get_handleis_fsspec_urlis_urlstringify_path)DtypeBackendFilePath
ReadBufferStorageOptionsWriteBufferBaseImplc                $   | dk(  rt        d      } | dk(  r,t        t        g}d}|D ]  }	  |       c S  t        d|       | dk(  r
t               S | dk(  r
t               S t        d	      # t        $ r}|dt	        |      z   z  }Y d}~dd}~ww xY w)
zreturn our implementationautozio.parquet.engine z
 - NzUnable to find a usable engine; tried using: 'pyarrow', 'fastparquet'.
A suitable version of pyarrow or fastparquet is required for parquet support.
Trying to import the above resulted in these errors:pyarrowfastparquetz.engine must be one of 'pyarrow', 'fastparquet')r   PyArrowImplFastParquetImplImportErrorstr
ValueError)engineengine_classes
error_msgsengine_classerrs        H/var/www/horilla/myenv/lib/python3.12/site-packages/pandas/io/parquet.py
get_enginer0   3   s    /0%7
* 	1L1#~%	1 C l
 	
 }	=	   
E
FF%  1gC00
1s   A++	B4B

Bstorage_optionsc                ,   t        |       }|t        dd      }t        dd      }|#t        ||j                        r|rOt	        d      |!t        ||j
                  j                        rn!t        dt        |      j                         t        |      rk|i|5t        d      }t        d      }	 |j                  j                  |       \  }}|Mt        d      } |j                  j                  |fi |xs i \  }}n|rt!        |      r|d	k7  rt        d
      d}	|sN|sLt        |t"              r<t$        j&                  j)                  |      st+        ||d|      }	d}|	j,                  }||	|fS # t        |j                  f$ r Y w xY w)zFile handling for PyArrow.Nz
pyarrow.fsignore)errorsfsspecz8storage_options not supported with a pyarrow FileSystem.z9filesystem must be a pyarrow or fsspec FileSystem, not a r#   rbz8storage_options passed with buffer, or non-supported URLFis_textr1   )r   r   
isinstance
FileSystemNotImplementedErrorspecAbstractFileSystemr)   type__name__r   from_uri	TypeErrorArrowInvalidcore	url_to_fsr   r(   ospathisdirr   handle)
rF   fsr1   modeis_dirpath_or_handlepa_fsr5   pahandless
             r/   _get_path_or_handlerP   U   s    $D)N	~*<I+HXFB0@0@!A)N  Jr6;;3Q3Q$Rb**+-  ^$"+I6B.|<E%*%5%5%>%>t%D"N :/9F!6!6!6"#2#8b"B 
&"8DDL STTG~s+n-
 D%
  7B&&7 r/ s   7E; ;FFc                  0    e Zd Zedd       ZddZdddZy)	r   c                :    t        | t              st        d      y )Nz+to_parquet only supports IO with DataFrames)r9   r   r)   )dfs    r/   validate_dataframezBaseImpl.validate_dataframe   s    "i(JKK )    c                    t        |       Nr   )selfrS   rF   compressionkwargss        r/   writezBaseImpl.write       !$''rU   Nc                    t        |       rW   r   )rX   rF   columnsrZ   s       r/   readzBaseImpl.read   r\   rU   )rS   r   returnNone)rS   r   rW   )r`   r   )r?   
__module____qualname__staticmethodrT   r[   r_    rU   r/   r   r      s     L L((rU   c                  z    e Zd ZddZ	 	 	 	 	 d	 	 	 	 	 	 	 	 	 	 	 	 	 ddZdddej                  ddf	 	 	 	 	 	 	 d	dZy)
r%   c                <    t        dd       dd l}dd l}|| _        y )Nr#   z(pyarrow is required for parquet support.extrar   )r   pyarrow.parquet(pandas.core.arrays.arrow.extension_typesapi)rX   r#   pandass      r/   __init__zPyArrowImpl.__init__   s!    "G	
 	 	8rU   Nc                   | j                  |       d|j                  dd       i}	|||	d<    | j                  j                  j                  |fi |	}
|j
                  rNdt        j                  |j
                        i}|
j                  j                  }i ||}|
j                  |      }
t        |||d|d u      \  }}}t        |t        j                        rmt        |d      rat        |j                   t"        t$        f      rAt        |j                   t$              r|j                   j'                         }n|j                   }	 |- | j                  j(                  j*                  |
|f|||d| n+ | j                  j(                  j,                  |
|f||d| ||j/                          y y # ||j/                          w w xY w)	Nschemapreserve_indexPANDAS_ATTRSwb)r1   rJ   rK   name)rY   partition_cols
filesystem)rY   rv   )rT   poprl   Tablefrom_pandasattrsjsondumpsrp   metadatareplace_schema_metadatarP   r9   ioBufferedWriterhasattrrt   r(   bytesdecodeparquetwrite_to_datasetwrite_tableclose)rX   rS   rF   rY   indexr1   ru   rv   rZ   from_pandas_kwargstabledf_metadataexisting_metadatamerged_metadatarL   rO   s                   r/   r[   zPyArrowImpl.write   s    	#.6

8T8R-S38/0***2D1CD88)4::bhh+?@K % 5 5B!2BkBO11/BE.A+!-/
+ ~r'8'89/>..e=.--u5!/!4!4!;!;!=!/!4!4	 )1  11" !,#1)  -  ,," !,)	
  " #w" #s   AF: :GFc                   d|d<   i }	|dk(  rddl m}
  |
       }|j                  |	d<   n0|dk(  rt        j                  |	d<   nt               rt               |	d<   t        dd	      }|d
k(  rd|	d<   t        |||d      \  }}}	  | j                  j                  j                  |f|||d|} |j                  di |	}|d
k(  r|j                  d
d      }|j                  j                  rKd|j                  j                  v r3|j                  j                  d   }t!        j"                  |      |_        |||j'                          S S # ||j'                          w w xY w)NTuse_pandas_metadatanumpy_nullabler   )_arrow_dtype_mappingtypes_mapperr#   zmode.data_manager)silentarraysplit_blocksr6   )r1   rJ   )r^   rv   filtersF)copys   PANDAS_ATTRSre   )pandas.io._utilr   getpd
ArrowDtyper   r   r	   rP   rl   r   
read_table	to_pandas_as_managerrp   r}   r{   loadsrz   r   )rX   rF   r^   r   use_nullable_dtypesdtype_backendr1   rv   rZ   to_pandas_kwargsr   mappingmanagerrL   rO   pa_tableresultr   s                     r/   r_   zPyArrowImpl.read   s    )-$%,,<*,G/6{{^,i'/1}}^,')/H/J^,1$?g/3^,.A+	/
+	 2txx''22%	
 H (X'';*:;F'!++G%+@''"hoo&>&>>"*//":":?"KK#'::k#:FL" #w" #s   B7E E%r`   ra   snappyNNNN)rS   r   rF   zFilePath | WriteBuffer[bytes]rY   
str | Noner   bool | Noner1   StorageOptions | Noneru   list[str] | Noner`   ra   )r   boolr   DtypeBackend | lib.NoDefaultr1   r   r`   r   )r?   rb   rc   rn   r[   r
   
no_defaultr_   re   rU   r/   r%   r%      s    	 #+!15+/@ @  ,@   	@ 
 @  /@  )@  
@ J $)69nn156 
 "6  46  /6  
6 rU   r%   c                  N    e Zd ZddZ	 	 	 	 	 d	 	 	 	 	 	 	 ddZ	 	 	 	 d	 	 	 d	dZy)
r&   c                ,    t        dd      }|| _        y )Nr$   z,fastparquet is required for parquet support.rh   )r   rl   )rX   r$   s     r/   rn   zFastParquetImpl.__init__)  s     1!O
 rU   Nc                  	 | j                  |       d|v r|t        d      d|v r|j                  d      }|d|d<   |t        d      t	        |      }t        |      rt        d      		fd|d<   nrt        d	      t        d
      5   | j                  j                  ||f|||d| d d d        y # 1 sw Y   y xY w)Npartition_onzYCannot use both partition_on and partition_cols. Use partition_cols for partitioning datahivefile_scheme9filesystem is not implemented for the fastparquet engine.r5   c                P     j                   | dfi xs i j                         S )Nrs   )open)rF   _r5   r1   s     r/   <lambda>z'FastParquetImpl.write.<locals>.<lambda>T  s.    +&++d3.4"3df rU   	open_withz?storage_options passed with file object or non-fsspec file pathT)record)rY   write_indexr   )
rT   r)   rw   r;   r   r   r   r   rl   r[   )
rX   rS   rF   rY   r   ru   r1   rv   rZ   r5   s
         `  @r/   r[   zFastParquetImpl.write1  s    	#V#(BK  V##ZZ7N%$*F=!!%K 
 d#/9F#F; Q  4( 	DHHNN (!+ 	 	 	s   #B>>Cc                   i }|j                  dd      }|j                  dt        j                        }	d|d<   |rt        d      |	t        j                  urt        d      |t	        d      t        |      }d }
t        |      r1t        d      } |j                  |d	fi |xs i j                  |d
<   nJt        |t              r:t        j                  j                  |      st        |d	d|      }
|
j                   }	  | j"                  j$                  |fi |} |j&                  d||d||
|
j)                          S S # |
|
j)                          w w xY w)Nr   Fr   pandas_nullszNThe 'use_nullable_dtypes' argument is not supported for the fastparquet enginezHThe 'dtype_backend' argument is not supported for the fastparquet enginer   r5   r6   rI   r7   )r^   r   re   )rw   r
   r   r)   r;   r   r   r   r   rI   r9   r(   rE   rF   rG   r   rH   rl   ParquetFiler   r   )rX   rF   r^   r   r1   rv   rZ   parquet_kwargsr   r   rO   r5   parquet_files                r/   r_   zFastParquetImpl.readf  sm    *,$jj)>F

?CNNC).~&%  .%  !%K  d#/9F#.6;;tT#Uo>SQS#U#X#XN4 c"277==+> !dE?G >>D	 /488//GGL)<))U'7UfU" #w" #s   1E
 
Er   r   )rS   r   rY   z*Literal['snappy', 'gzip', 'brotli'] | Noner1   r   r`   ra   )NNNN)r1   r   r`   r   )r?   rb   rc   rn   r[   r_   re   rU   r/   r&   r&   (  sn     CK1533 @	3 /3 
3p 150 
 /0  
0 rU   r&   )r1   r!   c           	         t        |t              r|g}t        |      }	|t        j                         n|}
 |	j
                  | |
f|||||d| |,t        |
t        j                        sJ |
j                         S y)a	  
    Write a DataFrame to the parquet format.

    Parameters
    ----------
    df : DataFrame
    path : str, path object, file-like object, or None, default None
        String, path object (implementing ``os.PathLike[str]``), or file-like
        object implementing a binary ``write()`` function. If None, the result is
        returned as bytes. If a string, it will be used as Root Directory path
        when writing a partitioned dataset. The engine fastparquet does not
        accept file-like objects.
    engine : {{'auto', 'pyarrow', 'fastparquet'}}, default 'auto'
        Parquet library to use. If 'auto', then the option
        ``io.parquet.engine`` is used. The default ``io.parquet.engine``
        behavior is to try 'pyarrow', falling back to 'fastparquet' if
        'pyarrow' is unavailable.

        When using the ``'pyarrow'`` engine and no storage options are provided
        and a filesystem is implemented by both ``pyarrow.fs`` and ``fsspec``
        (e.g. "s3://"), then the ``pyarrow.fs`` filesystem is attempted first.
        Use the filesystem keyword with an instantiated fsspec filesystem
        if you wish to use its implementation.
    compression : {{'snappy', 'gzip', 'brotli', 'lz4', 'zstd', None}},
        default 'snappy'. Name of the compression to use. Use ``None``
        for no compression.
    index : bool, default None
        If ``True``, include the dataframe's index(es) in the file output. If
        ``False``, they will not be written to the file.
        If ``None``, similar to ``True`` the dataframe's index(es)
        will be saved. However, instead of being saved as values,
        the RangeIndex will be stored as a range in the metadata so it
        doesn't require much space and is faster. Other indexes will
        be included as columns in the file output.
    partition_cols : str or list, optional, default None
        Column names by which to partition the dataset.
        Columns are partitioned in the order they are given.
        Must be None if path is not a string.
    {storage_options}

    filesystem : fsspec or pyarrow filesystem, default None
        Filesystem object to use when reading the parquet file. Only implemented
        for ``engine="pyarrow"``.

        .. versionadded:: 2.1.0

    kwargs
        Additional keyword arguments passed to the engine

    Returns
    -------
    bytes if no path argument is provided else None
    N)rY   r   ru   r1   rv   )r9   r(   r0   r   BytesIOr[   getvalue)rS   rF   r*   rY   r   r1   ru   rv   rZ   implpath_or_bufs              r/   
to_parquetr     s    B .#&()fDAESWKDJJ
	  %'	 	 |+rzz222##%%rU   c           
         t        |      }	|t        j                  ur0d}
|du r|
dz  }
t        j                  |
t
        t                      nd}t        |        |	j                  | f||||||d|S )a  
    Load a parquet object from the file path, returning a DataFrame.

    Parameters
    ----------
    path : str, path object or file-like object
        String, path object (implementing ``os.PathLike[str]``), or file-like
        object implementing a binary ``read()`` function.
        The string could be a URL. Valid URL schemes include http, ftp, s3,
        gs, and file. For file URLs, a host is expected. A local file could be:
        ``file://localhost/path/to/table.parquet``.
        A file URL can also be a path to a directory that contains multiple
        partitioned parquet files. Both pyarrow and fastparquet support
        paths to directories as well as file URLs. A directory path could be:
        ``file://localhost/path/to/tables`` or ``s3://bucket/partition_dir``.
    engine : {{'auto', 'pyarrow', 'fastparquet'}}, default 'auto'
        Parquet library to use. If 'auto', then the option
        ``io.parquet.engine`` is used. The default ``io.parquet.engine``
        behavior is to try 'pyarrow', falling back to 'fastparquet' if
        'pyarrow' is unavailable.

        When using the ``'pyarrow'`` engine and no storage options are provided
        and a filesystem is implemented by both ``pyarrow.fs`` and ``fsspec``
        (e.g. "s3://"), then the ``pyarrow.fs`` filesystem is attempted first.
        Use the filesystem keyword with an instantiated fsspec filesystem
        if you wish to use its implementation.
    columns : list, default=None
        If not None, only these columns will be read from the file.
    {storage_options}

        .. versionadded:: 1.3.0

    use_nullable_dtypes : bool, default False
        If True, use dtypes that use ``pd.NA`` as missing value indicator
        for the resulting DataFrame. (only applicable for the ``pyarrow``
        engine)
        As new dtypes are added that support ``pd.NA`` in the future, the
        output with this option will change to use those dtypes.
        Note: this is an experimental option, and behaviour (e.g. additional
        support dtypes) may change without notice.

        .. deprecated:: 2.0

    dtype_backend : {{'numpy_nullable', 'pyarrow'}}, default 'numpy_nullable'
        Back-end data type applied to the resultant :class:`DataFrame`
        (still experimental). Behaviour is as follows:

        * ``"numpy_nullable"``: returns nullable-dtype-backed :class:`DataFrame`
          (default).
        * ``"pyarrow"``: returns pyarrow-backed nullable :class:`ArrowDtype`
          DataFrame.

        .. versionadded:: 2.0

    filesystem : fsspec or pyarrow filesystem, default None
        Filesystem object to use when reading the parquet file. Only implemented
        for ``engine="pyarrow"``.

        .. versionadded:: 2.1.0

    filters : List[Tuple] or List[List[Tuple]], default None
        To filter out data.
        Filter syntax: [[(column, op, val), ...],...]
        where op is [==, =, >, >=, <, <=, !=, in, not in]
        The innermost tuples are transposed into a set of filters applied
        through an `AND` operation.
        The outer list combines these sets of filters through an `OR`
        operation.
        A single list of tuples can also be used, meaning that no `OR`
        operation between set of filters is to be conducted.

        Using this argument will NOT result in row-wise filtering of the final
        partitions unless ``engine="pyarrow"`` is also specified.  For
        other engines, filtering is only performed at the partition level, that is,
        to prevent the loading of some row-groups and/or files.

        .. versionadded:: 2.1.0

    **kwargs
        Any additional kwargs are passed to the engine.

    Returns
    -------
    DataFrame

    See Also
    --------
    DataFrame.to_parquet : Create a parquet object that serializes a DataFrame.

    Examples
    --------
    >>> original_df = pd.DataFrame(
    ...     {{"foo": range(5), "bar": range(5, 10)}}
    ...    )
    >>> original_df
       foo  bar
    0    0    5
    1    1    6
    2    2    7
    3    3    8
    4    4    9
    >>> df_parquet_bytes = original_df.to_parquet()
    >>> from io import BytesIO
    >>> restored_df = pd.read_parquet(BytesIO(df_parquet_bytes))
    >>> restored_df
       foo  bar
    0    0    5
    1    1    6
    2    2    7
    3    3    8
    4    4    9
    >>> restored_df.equals(original_df)
    True
    >>> restored_bar = pd.read_parquet(BytesIO(df_parquet_bytes), columns=["bar"])
    >>> restored_bar
        bar
    0    5
    1    6
    2    7
    3    8
    4    9
    >>> restored_bar.equals(original_df[['bar']])
    True

    The function uses `kwargs` that are passed directly to the engine.
    In the following example, we use the `filters` argument of the pyarrow
    engine to filter the rows of the DataFrame.

    Since `pyarrow` is the default engine, we can omit the `engine` argument.
    Note that the `filters` argument is implemented by the `pyarrow` engine,
    which can benefit from multithreading and also potentially be more
    economical in terms of memory.

    >>> sel = [("foo", ">", 2)]
    >>> restored_part = pd.read_parquet(BytesIO(df_parquet_bytes), filters=sel)
    >>> restored_part
        foo  bar
    0    3    8
    1    4    9
    zYThe argument 'use_nullable_dtypes' is deprecated and will be removed in a future version.TzFUse dtype_backend='numpy_nullable' instead of use_nullable_dtype=True.)
stacklevelF)r^   r   r1   r   r   rv   )	r0   r
   r   warningswarnFutureWarningr   r   r_   )rF   r*   r^   r1   r   r   rv   r   rZ   r   msgs              r/   read_parquetr     s    r fD#..0# 	 $&XC 	c=5E5GH#&499	'/#	 	 	rU   )r*   r(   r`   r   )Nr6   F)rF   z1FilePath | ReadBuffer[bytes] | WriteBuffer[bytes]rI   r   r1   r   rJ   r(   rK   r   r`   zVtuple[FilePath | ReadBuffer[bytes] | WriteBuffer[bytes], IOHandles[bytes] | None, Any])Nr!   r   NNNN)rS   r   rF   z$FilePath | WriteBuffer[bytes] | Noner*   r(   rY   r   r   r   r1   r   ru   r   rv   r   r`   zbytes | None)rF   zFilePath | ReadBuffer[bytes]r*   r(   r^   r   r1   r   r   zbool | lib.NoDefaultr   r   rv   r   r   z&list[tuple] | list[list[tuple]] | Noner`   r   )8__doc__
__future__r   r   r{   rE   typingr   r   r   r   r   pandas._configr   pandas._config.configr	   pandas._libsr
   pandas.compat._optionalr   pandas.errorsr   pandas.util._decoratorsr   pandas.util._exceptionsr   pandas.util._validatorsr   rm   r   r   r   pandas.core.shared_docsr   r   r   pandas.io.commonr   r   r   r   r   pandas._typingr   r   r   r   r   r0   rP   r   r%   r&   r   r   r   re   rU   r/   <module>r      s    " 	  	 
  # 5 -  > - ' 4 7  1 5   GJ .2<'
;<'<' +<' 	<'
 <'<'~
( 
(D ( D Nn h n b \"345 26&-1'+UU
.U U 	U
 U +U %U U U 6Up \"345  $-10325..6:q
&qq q +	q
 .q 0q q 4q q 6qrU   