Skip to content

Commit 456ec98

Browse files
committed
Add tests for tokenize command-line interface
1 parent 55815a6 commit 456ec98

2 files changed

Lines changed: 86 additions & 3 deletions

File tree

Lib/test/test_tokenize.py

Lines changed: 83 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,8 @@
1+
import contextlib
2+
import itertools
13
import os
24
import re
5+
import tempfile
36
import token
47
import tokenize
58
import unittest
@@ -3178,5 +3181,85 @@ def test_newline_at_the_end_of_buffer(self):
31783181
run_test_script(file_name)
31793182

31803183

3184+
class CommandLineTest(unittest.TestCase):
3185+
def setUp(self):
3186+
self.filename = tempfile.mktemp()
3187+
self.addCleanup(os_helper.unlink, self.filename)
3188+
3189+
@staticmethod
3190+
def text_normalize(string):
3191+
"""Dedent *string* and strip it from its surrounding whitespaces.
3192+
3193+
This method is used by the other utility functions so that any
3194+
string to write or to match against can be freely indented.
3195+
"""
3196+
def normalize_spaces(text):
3197+
return re.sub(r'\s+', ' ', text).strip()
3198+
return normalize_spaces(dedent(string)).strip()
3199+
3200+
def set_source(self, content):
3201+
with open(self.filename, 'w') as fp:
3202+
fp.write(content)
3203+
3204+
def invoke_tokenize(self, *flags):
3205+
output = StringIO()
3206+
with contextlib.redirect_stdout(output):
3207+
tokenize._main(args=[*flags, self.filename])
3208+
return self.text_normalize(output.getvalue())
3209+
3210+
def check_output(self, source, expect, *flags):
3211+
with self.subTest(source=source, flags=flags):
3212+
self.set_source(source)
3213+
res = self.invoke_tokenize(*flags)
3214+
expect = self.text_normalize(expect)
3215+
self.assertListEqual(res.splitlines(), expect.splitlines())
3216+
3217+
def test_invocation(self):
3218+
# test various combinations of parameters
3219+
base_flags = ('-e', '--exact')
3220+
3221+
self.set_source('''
3222+
def f():
3223+
print(x)
3224+
return None
3225+
''')
3226+
3227+
for flag in base_flags:
3228+
with self.subTest(args=flag):
3229+
_ = self.invoke_tokenize(flag)
3230+
3231+
with self.assertRaises(SystemExit):
3232+
# suppress argparse error message
3233+
with contextlib.redirect_stderr(StringIO()):
3234+
_ = self.invoke_tokenize('--unknown')
3235+
3236+
def test_without_flag(self):
3237+
# test 'python -m tokenize source.py'
3238+
source = 'a = 1\n'
3239+
expect = '''
3240+
0,0-0,0: ENCODING 'utf-8'
3241+
1,0-1,1: NAME 'a'
3242+
1,2-1,3: OP '='
3243+
1,4-1,5: NUMBER '1'
3244+
1,5-1,6: NEWLINE '\\n'
3245+
2,0-2,0: ENDMARKER ''
3246+
'''
3247+
self.check_output(source, expect)
3248+
3249+
def test_exact_flag(self):
3250+
# test 'python -m tokenize -e/--exact source.py'
3251+
source = 'a = 1\n'
3252+
expect = '''
3253+
0,0-0,0: ENCODING 'utf-8'
3254+
1,0-1,1: NAME 'a'
3255+
1,2-1,3: EQUAL '='
3256+
1,4-1,5: NUMBER '1'
3257+
1,5-1,6: NEWLINE '\\n'
3258+
2,0-2,0: ENDMARKER ''
3259+
'''
3260+
for flag in ['-e', '--exact']:
3261+
self.check_output(source, expect, flag)
3262+
3263+
31813264
if __name__ == "__main__":
31823265
unittest.main()

Lib/tokenize.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -499,7 +499,7 @@ def generate_tokens(readline):
499499
"""
500500
return _generate_tokens_from_c_tokenizer(readline, extra_tokens=True)
501501

502-
def main():
502+
def _main(args=None):
503503
import argparse
504504

505505
# Helper error handling routines
@@ -524,7 +524,7 @@ def error(message, filename=None, location=None):
524524
help='the file to tokenize; defaults to stdin')
525525
parser.add_argument('-e', '--exact', dest='exact', action='store_true',
526526
help='display token names using the exact type')
527-
args = parser.parse_args()
527+
args = parser.parse_args(args)
528528

529529
try:
530530
# Tokenize the input
@@ -589,4 +589,4 @@ def _generate_tokens_from_c_tokenizer(source, encoding=None, extra_tokens=False)
589589

590590

591591
if __name__ == "__main__":
592-
main()
592+
_main()

0 commit comments

Comments
 (0)