diff options
author | Victor Stinner <victor.stinner@gmail.com> | 2014-12-05 10:18:30 +0100 |
---|---|---|
committer | Victor Stinner <victor.stinner@gmail.com> | 2014-12-05 10:18:30 +0100 |
commit | 9d279b87d81b7cd9471e8373d022451bb00f094f (patch) | |
tree | 845d27846df2e3be00de264b2d156691b8be38cd /Lib/tokenize.py | |
parent | Merge with 3.4. Edit Menus section of Idle doc. (diff) | |
parent | Issue #22599: Enhance tokenize.open() to be able to call it during Python (diff) | |
download | cpython-9d279b87d81b7cd9471e8373d022451bb00f094f.tar.gz cpython-9d279b87d81b7cd9471e8373d022451bb00f094f.tar.bz2 cpython-9d279b87d81b7cd9471e8373d022451bb00f094f.zip |
(Merge 3.4) Issue #22599: Enhance tokenize.open() to be able to call it during
Python finalization.
Before the module kept a reference to the builtins module, but the module
attributes are cleared during Python finalization. Instead, keep directly a
reference to the open() function.
This enhancement is not perfect, calling tokenize.open() can still fail if
called very late during Python finalization. Usually, the function is called
by the linecache module which is called to display a traceback or emit a
warning.
Diffstat (limited to 'Lib/tokenize.py')
-rw-r--r-- | Lib/tokenize.py | 7 |
1 files changed, 4 insertions, 3 deletions
diff --git a/Lib/tokenize.py b/Lib/tokenize.py index 742abd19922..51da4847f27 100644 --- a/Lib/tokenize.py +++ b/Lib/tokenize.py @@ -24,7 +24,6 @@ __author__ = 'Ka-Ping Yee <ping@lfw.org>' __credits__ = ('GvR, ESR, Tim Peters, Thomas Wouters, Fred Drake, ' 'Skip Montanaro, Raymond Hettinger, Trent Nelson, ' 'Michael Foord') -import builtins from codecs import lookup, BOM_UTF8 import collections from io import TextIOWrapper @@ -431,11 +430,13 @@ def detect_encoding(readline): return default, [first, second] +_builtin_open = open + def open(filename): """Open a file in read only mode using the encoding detected by detect_encoding(). """ - buffer = builtins.open(filename, 'rb') + buffer = _builtin_open(filename, 'rb') encoding, lines = detect_encoding(buffer.readline) buffer.seek(0) text = TextIOWrapper(buffer, encoding, line_buffering=True) @@ -658,7 +659,7 @@ def main(): # Tokenize the input if args.filename: filename = args.filename - with builtins.open(filename, 'rb') as f: + with _builtin_open(filename, 'rb') as f: tokens = list(tokenize(f.readline)) else: filename = "<stdin>" |