B
    ë¹`H  ã               @   sz   d Z ddlZddlZddlmZ ddlmZmZmZ ddl	m
Z
 ddgZG dd„ deƒZi Ze d	¡ZG d
d„ deƒZdS )zÂ
    pygments.lexers.special
    ~~~~~~~~~~~~~~~~~~~~~~~

    Special lexers.

    :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
    :license: BSD, see LICENSE for details.
é    N)ÚLexer)ÚTokenÚErrorÚText)Úget_choice_optÚ	TextLexerÚRawTokenLexerc               @   s:   e Zd ZdZdZdgZdgZdgZdZdd„ Z	d	d
„ Z
dS )r   z3
    "Null" lexer, doesn't highlight anything.
    z	Text onlyÚtextz*.txtz
text/plaing{®Gáz„?c             c   s   dt |fV  d S )Nr   )r   )Úselfr	   © r   ú_/home/kop/projects/devel/pgwui/test_venv/lib/python3.7/site-packages/pygments/lexers/special.pyÚget_tokens_unprocessed    s    z TextLexer.get_tokens_unprocessedc             C   s   t jS )N)r   Úpriority)r	   r   r   r   Úanalyse_text#   s    zTextLexer.analyse_textN)Ú__name__Ú
__module__Ú__qualname__Ú__doc__ÚnameÚaliasesÚ	filenamesÚ	mimetypesr   r   r   r   r   r   r   r      s   z.*?
c               @   s:   e Zd ZdZdZg Zg ZdgZdd„ Zdd„ Z	dd	„ Z
d
S )r   a  
    Recreate a token stream formatted with the `RawTokenFormatter`.

    Additional options accepted:

    `compress`
        If set to ``"gz"`` or ``"bz2"``, decompress the token stream with
        the given compression algorithm before lexing (default: ``""``).
    zRaw token datazapplication/x-pygments-tokensc             K   s*   t |dddddgdƒ| _tj| f|Ž d S )NÚcompressÚ ÚnoneÚgzÚbz2)r   r   r   Ú__init__)r
   Úoptionsr   r   r   r   ;   s    zRawTokenLexer.__init__c             c   sÈ   | j r~t|tƒr| d¡}y>| j dkr:dd l}| |¡}n| j dkrVdd l}| |¡}W n$ tk
r|   t| 	d¡fV  Y nX t|t
ƒr’| 	d¡}| d¡d }x"|  |¡D ]\}}}||fV  q¬W d S )NÚlatin1r   r   r   Ú
)r   Ú
isinstanceÚstrÚencodeÚgzipÚ
decompressr   ÚOSErrorr   ÚdecodeÚbytesÚstripr   )r
   r	   r$   r   ÚiÚtÚvr   r   r   Ú
get_tokens@   s"    





zRawTokenLexer.get_tokensc       	   
   c   sî   d}xät  |¡D ]Ö}y”| ¡  ¡  dd¡\}}t |¡}|sŠt}| d¡dd … }x.|D ]&}|rl|d  ¡ stt	dƒ‚t
||ƒ}qXW |t|< t |¡}t|tƒs¦t	dƒ‚W n$ tt	fk
rÌ   | ¡ }t}Y nX |||fV  |t|ƒ7 }qW d S )Nr   ú	é   Ú.zmalformed token namezexpected str)Úline_reÚfinditerÚgroupÚrstripÚsplitÚ_ttype_cacheÚgetr   ÚisupperÚ
ValueErrorÚgetattrÚastÚliteral_evalr!   r"   ÚSyntaxErrorr   Úlen)	r
   r	   ÚlengthÚmatchZttypestrÚvalZttypeZttypesZttype_r   r   r   r   U   s*    




z$RawTokenLexer.get_tokens_unprocessedN)r   r   r   r   r   r   r   r   r   r-   r   r   r   r   r   r   ,   s   	)r   r;   ÚreZpygments.lexerr   Zpygments.tokenr   r   r   Zpygments.utilr   Ú__all__r   r6   Úcompiler1   r   r   r   r   r   Ú<module>	   s   
