
    Owg                        d Z ddlmZ ddlmZ ddlmZ ddlZddlZddl	m
Z
 e
rddlmZmZ dZdd	Zdd
ZddZ	 	 	 	 	 	 	 	 ddZddZy)z6
:func:`~pandas.eval` source string parsing functions
    )annotations)StringIO)	iskeywordN)TYPE_CHECKING)HashableIteratord   c                   | j                         rt        |       s| S t        j                  j	                         D ci c]  \  }}|dt
        j                  |    d }}}|j                  ddddddddd	       d
j                  | D cg c]  }|j                  ||       c}      } d|  } | j                         st        d|  d      | S c c}}w c c}w )a=  
    Create valid Python identifiers from any string.

    Check if name contains any special characters. If it contains any
    special characters, the special characters will be replaced by
    a special string and a prefix is added.

    Raises
    ------
    SyntaxError
        If the returned name is not a Python valid identifier, raise an exception.
        This can happen if there is a hashtag in the name, as the tokenizer will
        than terminate and not find the backtick.
        But also for characters that fall out of the range of (U+0001..U+007F).
    __QUESTIONMARK__EXCLAMATIONMARK__DOLLARSIGN_
_EUROSIGN__DEGREESIGN__SINGLEQUOTE__DOUBLEQUOTE_) ?!$u   €   °'" BACKTICK_QUOTED_STRING_zCould not convert 'z' to a valid Python identifier.)isidentifierr   tokenizeEXACT_TOKEN_TYPESitemstokentok_nameupdatejoingetSyntaxError)namechartokvalspecial_characters_replacementss       V/var/www/horilla/myenv/lib/python3.12/site-packages/pandas/core/computation/parsing.pycreate_valid_python_identifierr+      s      9T? &77==?'D& 	%..()++'# ' $**!$   	
  77PTU377dCUVD$TF+D/v5TUVVK5'( Vs   "CCc                \    | \  }}|t         k(  rt        j                  t        |      fS ||fS )a[  
    Clean up a column name if surrounded by backticks.

    Backtick quoted string are indicated by a certain tokval value. If a string
    is a backtick quoted token it will processed by
    :func:`_create_valid_python_identifier` so that the parser can find this
    string when the query is executed.
    In this case the tok will get the NAME tokval.

    Parameters
    ----------
    tok : tuple of int, str
        ints correspond to the all caps constants in the tokenize module

    Returns
    -------
    tok : Tuple[int, str]
        Either the input or token or the replacement values
    )BACKTICK_QUOTED_STRINGr   NAMEr+   )toktoknumr(   s      r*   clean_backtick_quoted_toksr1   I   s5    ( NFF''}}<VDDD6>    c                v    	 t        d|  d      }t        |      d   }t        |      S # t        $ r | cY S w xY w)a  
    Function to emulate the cleaning of a backtick quoted name.

    The purpose for this function is to see what happens to the name of
    identifier if it goes to the process of being parsed a Python code
    inside a backtick quoted string and than being cleaned
    (removed of any special characters).

    Parameters
    ----------
    name : hashable
        Name to be cleaned.

    Returns
    -------
    name : hashable
        Returns the name after tokenizing and cleaning.

    Notes
    -----
        For some cases, a name cannot be converted to a valid Python identifier.
        In that case :func:`tokenize_string` raises a SyntaxError.
        In that case, we just return the name unmodified.

        If this name was used in the query string (this makes the query call impossible)
        an error will be raised by :func:`tokenize_backtick_quoted_string` instead,
        which is not caught and propagates to the user level.
    `   )tokenize_stringnextr+   r%   )r&   	tokenizedr(   s      r*   clean_column_namer9   c   sF    :#avQK0	i#-f55 s   '* 88c                J    | D ]  \  }}}}}|dk(  s|d   } n t         || fS )a  
    Creates a token from a backtick quoted string.

    Moves the token_generator forwards till right after the next backtick.

    Parameters
    ----------
    token_generator : Iterator[tokenize.TokenInfo]
        The generator that yields the tokens of the source string (Tuple[int, str]).
        The generator is at the first token after the backtick (`)

    source : str
        The Python source code string.

    string_start : int
        This is the start of backtick quoted string inside the source string.

    Returns
    -------
    tok: Tuple[int, str]
        The token that represents the backtick quoted string.
        The integer is equal to BACKTICK_QUOTED_STRING (100).
    r4   r5   )r-   )token_generatorsourcestring_startr   r(   start
string_ends          r*   tokenize_backtick_quoted_stringr@      sE    4 #2 65!QS=qJ
 "6,z#BBBr2   c              #    K   t        |       j                  }t        j                  |      }|D ]+  \  }}}}}|dk(  r	 t	        || |d   dz          &||f - y# t
        $ r}t        d|  d      |d}~ww xY ww)a$  
    Tokenize a Python source code string.

    Parameters
    ----------
    source : str
        The Python source code string.

    Returns
    -------
    tok_generator : Iterator[Tuple[int, str]]
        An iterator yielding all tokens with only toknum and tokval (Tuple[ing, str]).
    r4   r5   )r=   zFailed to parse backticks in 'z'.N)r   readliner   generate_tokensr@   	Exceptionr%   )r<   line_readerr;   r0   r(   r>   r   errs           r*   r6   r6      s      6"++K..{;O (7 	!#q!S=X5#V%(Q,  &. 	!  X!$B6(""MNTWWXs(   ;A?A
A?	A<'A77A<<A?)r&   strreturnrG   )r/   tuple[int, str]rH   rI   )r&   r   rH   r   )r;   zIterator[tokenize.TokenInfo]r<   rG   r=   intrH   rI   )r<   rG   rH   zIterator[tuple[int, str]])__doc__
__future__r   ior   keywordr   r    r   typingr   collections.abcr   r   r-   r+   r1   r9   r@   r6    r2   r*   <module>rR      so    #        0f4"JC1C;>CNQCCD!r2   