diff options
author | Thomas Kluyver <takowl@gmail.com> | 2018-06-05 19:26:39 +0200 |
---|---|---|
committer | Carol Willing <carolcode@willingconsulting.com> | 2018-06-05 10:26:39 -0700 |
commit | c56b17bd8c7a3fd03859822246633d2c9586f8bd (patch) | |
tree | 346fb8b3a6614679232792b3f46398b33e5f3c0e /Lib/tokenize.py | |
parent | bpo-33751: Fix test_file. (GH-7378) (diff) | |
download | cpython-c56b17bd8c7a3fd03859822246633d2c9586f8bd.tar.gz cpython-c56b17bd8c7a3fd03859822246633d2c9586f8bd.tar.bz2 cpython-c56b17bd8c7a3fd03859822246633d2c9586f8bd.zip |
bpo-12486: Document tokenize.generate_tokens() as public API (#6957)
* Document tokenize.generate_tokens()
* Add news file
* Add test for generate_tokens
* Document behaviour around ENCODING token
* Add generate_tokens to __all__
Diffstat (limited to 'Lib/tokenize.py')
-rw-r--r-- | Lib/tokenize.py | 9 |
1 files changed, 6 insertions, 3 deletions
diff --git a/Lib/tokenize.py b/Lib/tokenize.py index 40e6a8b9297..c78d9f7e9ee 100644 --- a/Lib/tokenize.py +++ b/Lib/tokenize.py @@ -37,7 +37,7 @@ cookie_re = re.compile(r'^[ \t\f]*#.*?coding[:=][ \t]*([-\w.]+)', re.ASCII) blank_re = re.compile(br'^[ \t\f]*(?:[#\r\n]|$)', re.ASCII) import token -__all__ = token.__all__ + ["tokenize", "detect_encoding", +__all__ = token.__all__ + ["tokenize", "generate_tokens", "detect_encoding", "untokenize", "TokenInfo"] del token @@ -653,9 +653,12 @@ def _tokenize(readline, encoding): yield TokenInfo(ENDMARKER, '', (lnum, 0), (lnum, 0), '') -# An undocumented, backwards compatible, API for all the places in the standard -# library that expect to be able to use tokenize with strings def generate_tokens(readline): + """Tokenize a source reading Python code as unicode strings. + + This has the same API as tokenize(), except that it expects the *readline* + callable to return str objects instead of bytes. + """ return _tokenize(readline, None) def main(): |