Hide keyboard shortcuts

Hot-keys on this page

r m x p   toggle line displays

j k   next/prev highlighted chunk

0   (zero) top of page

1   (one) first highlighted chunk

1# -*- coding: utf-8 -*- 

2#@+leo-ver=5-thin 

3#@+node:ekr.20210902073413.1: * @file ../unittests/core/test_leoAst.py 

4#@@first 

5"""Tests of leoAst.py""" 

6#@+<< leoAst imports >> 

7#@+node:ekr.20210902074548.1: ** << leoAst imports >> 

8import ast 

9import os 

10import sys 

11import textwrap 

12import time 

13import token as token_module 

14from typing import Any, Dict, List 

15import unittest 

16import warnings 

17warnings.simplefilter("ignore") 

18# pylint: disable=import-error 

19# Third-party. 

20try: 

21 import asttokens 

22except Exception: 

23 asttokens = None 

24try: 

25 # Suppress a warning about imp being deprecated. 

26 with warnings.catch_warnings(): 

27 import black 

28except Exception: 

29 black = None 

30 

31# pylint: disable=wrong-import-position 

32from leo.core import leoGlobals as g 

33from leo.core.leoAst import AstNotEqual 

34from leo.core.leoAst import Fstringify, Orange 

35from leo.core.leoAst import Token, TokenOrderGenerator, TokenOrderTraverser 

36from leo.core.leoAst import get_encoding_directive, read_file, strip_BOM 

37from leo.core.leoAst import make_tokens, parse_ast, tokens_to_string 

38from leo.core.leoAst import dump_ast, dump_contents, dump_tokens, dump_tree, _op_names 

39#@-<< leoAst imports >> 

40v1, v2, junk1, junk2, junk3 = sys.version_info 

41py_version = (v1, v2) 

42#@+others 

43#@+node:ekr.20200107114620.1: ** functions: unit testing 

44#@+node:ekr.20191027072126.1: *3* function: compare_asts & helpers 

45def compare_asts(ast1, ast2): 

46 """Compare two ast trees. Return True if they are equal.""" 

47 # Compare the two parse trees. 

48 try: 

49 _compare_asts(ast1, ast2) 

50 except AstNotEqual: 

51 dump_ast(ast1, tag='AST BEFORE') 

52 dump_ast(ast2, tag='AST AFTER') 

53 return False 

54 except Exception: 

55 g.trace("Unexpected exception") 

56 g.es_exception() 

57 return False 

58 return True 

59#@+node:ekr.20191027071653.2: *4* function._compare_asts 

60def _compare_asts(node1, node2): 

61 """ 

62 Compare both nodes, and recursively compare their children. 

63 

64 See also: http://stackoverflow.com/questions/3312989/ 

65 """ 

66 # Compare the nodes themselves. 

67 _compare_nodes(node1, node2) 

68 # Get the list of fields. 

69 fields1 = getattr(node1, "_fields", []) # type:ignore 

70 fields2 = getattr(node2, "_fields", []) # type:ignore 

71 if fields1 != fields2: 

72 raise AstNotEqual( 

73 f"node1._fields: {fields1}\n" f"node2._fields: {fields2}") 

74 # Recursively compare each field. 

75 for field in fields1: 

76 if field not in ('lineno', 'col_offset', 'ctx'): 

77 attr1 = getattr(node1, field, None) 

78 attr2 = getattr(node2, field, None) 

79 if attr1.__class__.__name__ != attr2.__class__.__name__: 

80 raise AstNotEqual(f"attrs1: {attr1},\n" f"attrs2: {attr2}") 

81 _compare_asts(attr1, attr2) 

82#@+node:ekr.20191027071653.3: *4* function._compare_nodes 

83def _compare_nodes(node1, node2): 

84 """ 

85 Compare node1 and node2. 

86 For lists and tuples, compare elements recursively. 

87 Raise AstNotEqual if not equal. 

88 """ 

89 # Class names must always match. 

90 if node1.__class__.__name__ != node2.__class__.__name__: 

91 raise AstNotEqual( 

92 f"node1.__class__.__name__: {node1.__class__.__name__}\n" 

93 f"node2.__class__.__name__: {node2.__class__.__name_}" 

94 ) 

95 # Special cases for strings and None 

96 if node1 is None: 

97 return 

98 if isinstance(node1, str): 

99 if node1 != node2: 

100 raise AstNotEqual(f"node1: {node1!r}\n" f"node2: {node2!r}") 

101 # Special cases for lists and tuples: 

102 if isinstance(node1, (tuple, list)): 

103 if len(node1) != len(node2): 

104 raise AstNotEqual(f"node1: {node1}\n" f"node2: {node2}") 

105 for i, item1 in enumerate(node1): 

106 item2 = node2[i] 

107 if item1.__class__.__name__ != item2.__class__.__name__: 

108 raise AstNotEqual( 

109 f"list item1: {i} {item1}\n" f"list item2: {i} {item2}" 

110 ) 

111 _compare_asts(item1, item2) 

112#@+node:ekr.20191121081439.1: *3* function: compare_lists 

113def compare_lists(list1, list2): 

114 """ 

115 Compare two lists of strings, showing the first mismatch. 

116 

117 Return the index of the first mismatched lines, or None if identical. 

118 """ 

119 import itertools 

120 it = itertools.zip_longest(list1, list2, fillvalue='Missing!') 

121 for i, (s1, s2) in enumerate(it): 

122 if s1 != s2: 

123 return i 

124 return None 

125#@+node:ekr.20191226071135.1: *3* function: get_time 

126def get_time(): 

127 return time.process_time() 

128#@+node:ekr.20210902074155.1: ** Test classes... 

129#@+node:ekr.20191227154302.1: *3* class BaseTest (TestCase) 

130class BaseTest(unittest.TestCase): 

131 """ 

132 The base class of all tests of leoAst.py. 

133 

134 This class contains only helpers. 

135 """ 

136 

137 # Statistics. 

138 counts: Dict[str, int] = {} 

139 times: Dict[str, float] = {} 

140 

141 # Debugging traces & behavior. 

142 # create_links: 'full-traceback' 

143 # make_data: 'contents', 'tokens', 'tree', 

144 # 'post-tokens', 'post-tree', 

145 # 'unit-test' 

146 debug_list: List[str] = [] 

147 link_error: Exception = None 

148 

149 #@+others 

150 #@+node:ekr.20200110103036.1: *4* BaseTest.adjust_expected 

151 def adjust_expected(self, s): 

152 """Adjust leading indentation in the expected string s.""" 

153 return textwrap.dedent(s.lstrip('\\\n')).rstrip() + '\n' 

154 #@+node:ekr.20200110092217.1: *4* BaseTest.check_roundtrip 

155 def check_roundtrip(self, contents): 

156 """Check that the tokenizer round-trips the given contents.""" 

157 contents, tokens, tree = self.make_data(contents) 

158 results = tokens_to_string(tokens) 

159 self.assertEqual(contents, results) 

160 #@+node:ekr.20191227054856.1: *4* BaseTest.make_data 

161 def make_data(self, contents, description=None): 

162 """Return (contents, tokens, tree) for the given contents.""" 

163 contents = contents.lstrip('\\\n') 

164 if not contents: 

165 return '', None, None 

166 self.link_error = None 

167 t1 = get_time() 

168 self.update_counts('characters', len(contents)) 

169 # Ensure all tests end in exactly one newline. 

170 contents = textwrap.dedent(contents).rstrip() + '\n' 

171 # Create the TOG instance. 

172 self.tog = TokenOrderGenerator() 

173 self.tog.filename = description or g.callers(2).split(',')[0] 

174 # Pass 0: create the tokens and parse tree 

175 tokens = self.make_tokens(contents) 

176 if not tokens: 

177 self.fail('make_tokens failed') 

178 tree = self.make_tree(contents) 

179 if not tree: 

180 self.fail('make_tree failed') 

181 if 'contents' in self.debug_list: 

182 dump_contents(contents) 

183 if 'ast' in self.debug_list: 

184 if py_version >= (3, 9): 

185 # pylint: disable=unexpected-keyword-arg 

186 g.printObj(ast.dump(tree, indent=2), tag='ast.dump') 

187 else: 

188 g.printObj(ast.dump(tree), tag='ast.dump') 

189 if 'tree' in self.debug_list: # Excellent traces for tracking down mysteries. 

190 dump_ast(tree) 

191 if 'tokens' in self.debug_list: 

192 dump_tokens(tokens) 

193 self.balance_tokens(tokens) 

194 # Pass 1: create the links. 

195 self.create_links(tokens, tree) 

196 if 'post-tree' in self.debug_list: 

197 dump_tree(tokens, tree) 

198 if 'post-tokens' in self.debug_list: 

199 dump_tokens(tokens) 

200 t2 = get_time() 

201 self.update_times('90: TOTAL', t2 - t1) 

202 if self.link_error: 

203 self.fail(self.link_error) 

204 return contents, tokens, tree 

205 #@+node:ekr.20191227103533.1: *4* BaseTest.make_file_data 

206 def make_file_data(self, filename): 

207 """Return (contents, tokens, tree) from the given file.""" 

208 directory = os.path.dirname(__file__) 

209 filename = g.os_path_finalize_join(directory, '..', '..', 'core', filename) 

210 assert os.path.exists(filename), repr(filename) 

211 contents = read_file(filename) 

212 contents, tokens, tree = self.make_data(contents, filename) 

213 return contents, tokens, tree 

214 #@+node:ekr.20191228101601.1: *4* BaseTest: passes... 

215 #@+node:ekr.20191228095945.11: *5* 0.1: BaseTest.make_tokens 

216 def make_tokens(self, contents): 

217 """ 

218 BaseTest.make_tokens. 

219 

220 Make tokens from contents. 

221 """ 

222 t1 = get_time() 

223 # Tokenize. 

224 tokens = make_tokens(contents) 

225 t2 = get_time() 

226 self.update_counts('tokens', len(tokens)) 

227 self.update_times('01: make-tokens', t2 - t1) 

228 return tokens 

229 #@+node:ekr.20191228102101.1: *5* 0.2: BaseTest.make_tree 

230 def make_tree(self, contents): 

231 """ 

232 BaseTest.make_tree. 

233 

234 Return the parse tree for the given contents string. 

235 """ 

236 t1 = get_time() 

237 tree = parse_ast(contents) 

238 t2 = get_time() 

239 self.update_times('02: parse_ast', t2 - t1) 

240 return tree 

241 #@+node:ekr.20191228185201.1: *5* 0.3: BaseTest.balance_tokens 

242 def balance_tokens(self, tokens): 

243 """ 

244 BastTest.balance_tokens. 

245 

246 Insert links between corresponding paren tokens. 

247 """ 

248 t1 = get_time() 

249 count = self.tog.balance_tokens(tokens) 

250 t2 = get_time() 

251 self.update_counts('paren-tokens', count) 

252 self.update_times('03: balance-tokens', t2 - t1) 

253 return count 

254 #@+node:ekr.20191228101437.1: *5* 1.1: BaseTest.create_links 

255 def create_links(self, tokens, tree, filename='unit test'): 

256 """ 

257 BaseTest.create_links. 

258 

259 Insert two-way links between the tokens and ast tree. 

260 """ 

261 tog = self.tog 

262 try: 

263 t1 = get_time() 

264 # Yes, list *is* required here. 

265 list(tog.create_links(tokens, tree)) 

266 t2 = get_time() 

267 self.update_counts('nodes', tog.n_nodes) 

268 self.update_times('11: create-links', t2 - t1) 

269 except Exception as e: 

270 print('\n') 

271 g.trace(g.callers(), '\n') 

272 if 'full-traceback' in self.debug_list: 

273 g.es_exception() 

274 # Weird: calling self.fail creates ugly failures. 

275 self.link_error = e 

276 #@+node:ekr.20191228095945.10: *5* 2.1: BaseTest.fstringify 

277 def fstringify(self, contents, tokens, tree, filename=None, silent=False): 

278 """ 

279 BaseTest.fstringify. 

280 """ 

281 t1 = get_time() 

282 if not filename: 

283 filename = g.callers(1) 

284 fs = Fstringify() 

285 if silent: 

286 fs.silent = True 

287 result_s = fs.fstringify(contents, filename, tokens, tree) 

288 t2 = get_time() 

289 self.update_times('21: fstringify', t2 - t1) 

290 return result_s 

291 #@+node:ekr.20200107175223.1: *5* 2.2: BaseTest.beautify 

292 def beautify(self, contents, tokens, tree, filename=None, max_join_line_length=None, max_split_line_length=None): 

293 """ 

294 BaseTest.beautify. 

295 """ 

296 t1 = get_time() 

297 if not contents: 

298 return '' 

299 if not filename: 

300 filename = g.callers(2).split(',')[0] 

301 orange = Orange() 

302 result_s = orange.beautify(contents, filename, tokens, tree, 

303 max_join_line_length=max_join_line_length, 

304 max_split_line_length=max_split_line_length) 

305 t2 = get_time() 

306 self.update_times('22: beautify', t2 - t1) 

307 self.code_list = orange.code_list 

308 return result_s 

309 #@+node:ekr.20191228095945.1: *4* BaseTest: stats... 

310 # Actions should fail by throwing an exception. 

311 #@+node:ekr.20191228095945.12: *5* BaseTest.dump_stats & helpers 

312 def dump_stats(self): 

313 """Show all calculated statistics.""" 

314 if self.counts or self.times: 

315 print('') 

316 self.dump_counts() 

317 self.dump_times() 

318 print('') 

319 #@+node:ekr.20191228154757.1: *6* BaseTest.dump_counts 

320 def dump_counts(self): 

321 """Show all calculated counts.""" 

322 for key, n in self.counts.items(): 

323 print(f"{key:>16}: {n:>6}") 

324 #@+node:ekr.20191228154801.1: *6* BaseTest.dump_times 

325 def dump_times(self): 

326 """ 

327 Show all calculated times. 

328 

329 Keys should start with a priority (sort order) of the form `[0-9][0-9]:` 

330 """ 

331 for key in sorted(self.times): 

332 t = self.times.get(key) 

333 key2 = key[3:] 

334 print(f"{key2:>16}: {t:6.3f} sec.") 

335 #@+node:ekr.20191228181624.1: *5* BaseTest.update_counts & update_times 

336 def update_counts(self, key, n): 

337 """Update the count statistic given by key, n.""" 

338 old_n = self.counts.get(key, 0) 

339 self.counts[key] = old_n + n 

340 

341 def update_times(self, key, t): 

342 """Update the timing statistic given by key, t.""" 

343 old_t = self.times.get(key, 0.0) 

344 self.times[key] = old_t + t 

345 #@-others 

346#@+node:ekr.20200122161530.1: *3* class Optional_TestFiles (BaseTest) 

347class Optional_TestFiles(BaseTest): 

348 """ 

349 Tests for the TokenOrderGenerator class that act on files. 

350 

351 These are optional tests. They take a long time and are not needed 

352 for 100% coverage. 

353 

354 All of these tests failed at one time. 

355 """ 

356 #@+others 

357 #@+node:ekr.20200726145235.2: *4* TestFiles.test_leoApp 

358 def test_leoApp(self): 

359 

360 self.make_file_data('leoApp.py') 

361 #@+node:ekr.20200726145235.1: *4* TestFiles.test_leoAst 

362 def test_leoAst(self): 

363 

364 self.make_file_data('leoAst.py') 

365 #@+node:ekr.20200726145333.1: *4* TestFiles.test_leoDebugger 

366 def test_leoDebugger(self): 

367 

368 self.make_file_data('leoDebugger.py') 

369 #@+node:ekr.20200726145333.2: *4* TestFiles.test_leoFind 

370 def test_leoFind(self): 

371 

372 self.make_file_data('leoFind.py') 

373 #@+node:ekr.20200726145333.3: *4* TestFiles.test_leoGlobals 

374 def test_leoGlobals(self): 

375 

376 self.make_file_data('leoGlobals.py') 

377 #@+node:ekr.20200726145333.4: *4* TestFiles.test_leoTips 

378 def test_leoTips(self): 

379 

380 self.make_file_data('leoTips.py') 

381 #@+node:ekr.20200726145735.1: *4* TestFiles.test_runLeo 

382 def test_runLeo(self): 

383 

384 self.make_file_data('runLeo.py') 

385 #@+node:ekr.20200115162419.1: *4* TestFiles.compare_tog_vs_asttokens 

386 def compare_tog_vs_asttokens(self): 

387 """Compare asttokens token lists with TOG token lists.""" 

388 if not asttokens: 

389 self.skipTest('requires asttokens') 

390 # Define TestToken class and helper functions. 

391 stack: List[ast.AST] = [] 

392 #@+others 

393 #@+node:ekr.20200124024159.2: *5* class TestToken (internal) 

394 class TestToken: 

395 """A patchable representation of the 5-tuples created by tokenize and used by asttokens.""" 

396 

397 def __init__(self, kind, value): 

398 self.kind = kind 

399 self.value = value 

400 self.node_list: List[ast.AST] = [] 

401 

402 def __str__(self): 

403 tokens_s = ', '.join([z.__class__.__name__ for z in self.node_list]) 

404 return f"{self.kind:14} {self.value:20} {tokens_s!s}" 

405 

406 __repr__ = __str__ 

407 #@+node:ekr.20200124024159.3: *5* function: atok_name 

408 def atok_name(token): 

409 """Return a good looking name for the given 5-tuple""" 

410 return token_module.tok_name[token[0]].lower() # type:ignore 

411 #@+node:ekr.20200124024159.4: *5* function: atok_value 

412 def atok_value(token): 

413 """Print a good looking value for the given 5-tuple""" 

414 return token.string if atok_name(token) == 'string' else repr(token.string) 

415 #@+node:ekr.20200124024159.5: *5* function: dump_token 

416 def dump_token(token): 

417 node_list = list(set(getattr(token, 'node_set', []))) 

418 node_list = sorted([z.__class__.__name__ for z in node_list]) 

419 return f"{token.index:2} {atok_name(token):12} {atok_value(token):20} {node_list}" 

420 #@+node:ekr.20200124024159.6: *5* function: postvisit 

421 def postvisit(node, par_value, value): 

422 nonlocal stack 

423 stack.pop() 

424 return par_value or [] 

425 #@+node:ekr.20200124024159.7: *5* function: previsit 

426 def previsit(node, par_value): 

427 nonlocal stack 

428 if isinstance(node, ast.Module): 

429 stack = [] 

430 if stack: 

431 parent = stack[-1] 

432 children: List[ast.AST] = getattr(parent, 'children', []) 

433 parent.children = children + [node] # type:ignore 

434 node.parent = parent 

435 else: 

436 node.parent = None 

437 node.children = [] 

438 stack.append(node) 

439 return par_value, [] 

440 #@-others 

441 directory = r'c:\leo.repo\leo-editor\leo\core' 

442 filename = 'leoAst.py' 

443 filename = os.path.join(directory, filename) 

444 # A fair comparison omits the read time. 

445 t0 = get_time() 

446 contents = read_file(filename) 

447 t1 = get_time() 

448 # Part 1: TOG. 

449 tog = TokenOrderGenerator() 

450 tog.filename = filename 

451 tokens = make_tokens(contents) 

452 tree = parse_ast(contents) 

453 tog.create_links(tokens, tree) 

454 tog.balance_tokens(tokens) 

455 t2 = get_time() 

456 # Part 2: Create asttokens data. 

457 atok = asttokens.ASTTokens(contents, parse=True, filename=filename) 

458 t3 = get_time() 

459 # Create a patchable list of TestToken objects. 

460 tokens = [TestToken(atok_name(z), atok_value(z)) for z in atok.tokens] # type:ignore 

461 # Inject parent/child links into nodes. 

462 asttokens.util.visit_tree(atok.tree, previsit, postvisit) 

463 # Create token.token_list for each token. 

464 for node in asttokens.util.walk(atok.tree): 

465 # Inject node into token.node_list 

466 for ast_token in atok.get_tokens(node, include_extra=True): 

467 i = ast_token.index 

468 token = tokens[i] 

469 token.node_list.append(node) 

470 t4 = get_time() 

471 if 1: 

472 print( 

473 f" read: {t1-t0:5.3f} sec.\n" 

474 f" TOG: {t2-t1:5.3f} sec.\n" 

475 f"asttokens 1: {t3-t2:5.3f} sec.\n" 

476 f"asttokens 2: {t4-t3:5.3f} sec.\n") 

477 if 0: 

478 print('===== asttokens =====\n') 

479 for node in asttokens.util.walk(tree): 

480 print(f"{node.__class__.__name__:>10} {atok.get_text(node)!s}") 

481 #@-others 

482#@+node:ekr.20191229083512.1: *3* class TestFstringify (BaseTest) 

483class TestFstringify(BaseTest): 

484 """Tests for the TokenOrderGenerator class.""" 

485 #@+others 

486 #@+node:ekr.20200111043311.1: *4* Bugs... 

487 #@+node:ekr.20210318054321.1: *5* TestFstringify.test_bug_1851 

488 def test_bug_1851(self): 

489 # leoCheck.py. 

490 contents = """\ 

491 from dataclasses import dataclass 

492 

493 @dataclass(frozen=True) 

494 class TestClass: 

495 value: str 

496 start: int 

497 end: int 

498 

499 f = TestClass('abc', 0, 10) 

500 """ 

501 contents, tokens, tree = self.make_data(contents) 

502 expected = textwrap.dedent(contents).rstrip() + '\n' 

503 results = self.fstringify(contents, tokens, tree) 

504 self.assertEqual(results, expected) 

505 #@+node:ekr.20200111043311.2: *5* TestFstringify.test_crash_1 

506 def test_crash_1(self): 

507 # leoCheck.py. 

508 contents = """return ('error', 'no member %s' % ivar)""" 

509 expected = """return ('error', f"no member {ivar}")\n""" 

510 contents, tokens, tree = self.make_data(contents) 

511 results = self.fstringify(contents, tokens, tree) 

512 self.assertEqual(results, expected) 

513 #@+node:ekr.20200111075114.1: *5* TestFstringify.test_crash_2 

514 def test_crash_2(self): 

515 # leoCheck.py, line 1704. 

516 # format = 

517 # 'files: %s lines: %s chars: %s classes: %s\n' 

518 # 'defs: %s calls: %s undefined calls: %s returns: %s' 

519 # ) 

520 contents = r"""'files: %s\n' 'defs: %s'""" 

521 expected = contents + '\n' 

522 contents, tokens, tree = self.make_data(contents) 

523 results = self.fstringify(contents, tokens, tree) 

524 self.assertEqual(results, expected) 

525 #@+node:ekr.20200214155156.1: *4* TestFstringify.show_message 

526 def show_message(self): 

527 """Separate test of fs.message.""" 

528 fs = Fstringify() 

529 fs.filename = 'test_file.py' 

530 fs.line_number = 42 

531 fs.line = 'The test line\n' 

532 fs.silent = False 

533 # Test message. 

534 fs.message( 

535 "Test:\n" 

536 "< Left align\n" 

537 ":Colon: align\n" 

538 "> Right align\n" 

539 " Default align") 

540 # 

541 # change_quotes... 

542 fs.message("can't create f-fstring: no lt_s!") 

543 lt_s = "lt_s" 

544 delim = 'Delim' 

545 token = Token('Kind', 'Value') 

546 fs.message( 

547 f"unexpected token: {token.kind} {token.value}\n" 

548 f" lt_s: {lt_s!r}") 

549 fs.message( 

550 f"can't create f-fstring: {lt_s!r}\n" 

551 f": conflicting delim: {delim!r}") 

552 fs.message( 

553 f"can't create f-fstring: {lt_s!r}\n" 

554 f":backslash in {{expr}}: {delim!r}") 

555 # Check newlines... 

556 fs.message( 

557 f" can't create f-fstring: {lt_s!r}\n" 

558 f":curly bracket underflow:") 

559 fs.message( 

560 f" can't create f-fstring: {lt_s!r}\n" 

561 f":string contains a backslash:") 

562 fs.message( 

563 f" can't create f-fstring: {lt_s!r}\n" 

564 f":unclosed curly bracket:") 

565 # Make fstring 

566 before, after = 'Before', 'After' 

567 fs.message( 

568 f"trace:\n" 

569 f":from: {before!s}\n" 

570 f": to: {after!s}") 

571 #@+node:ekr.20200106163535.1: *4* TestFstringify.test_braces 

572 def test_braces(self): 

573 

574 # From pr.construct_stylesheet in leoPrinting.py 

575 contents = """'h1 {font-family: %s}' % (family)""" 

576 expected = """f"h1 {{font-family: {family}}}"\n""" 

577 contents, tokens, tree = self.make_data(contents) 

578 results = self.fstringify(contents, tokens, tree) 

579 self.assertEqual(results, expected) 

580 #@+node:ekr.20200217171334.1: *4* TestFstringify.test_backslash_in_expr 

581 def test_backslash_in_expr(self): 

582 # From get_flake8_config. 

583 contents = r"""print('aaa\n%s' % ('\n'.join(dir_table)))""" 

584 expected = contents.rstrip() + '\n' 

585 contents, tokens, tree = self.make_data(contents) 

586 results = self.fstringify(contents, tokens, tree, silent=True) 

587 self.assertEqual(results, expected) 

588 #@+node:ekr.20191230150653.1: *4* TestFstringify.test_call_in_rhs 

589 def test_call_in_rhs(self): 

590 

591 contents = """'%s' % d()""" 

592 expected = """f"{d()}"\n""" 

593 contents, tokens, tree = self.make_data(contents) 

594 results = self.fstringify(contents, tokens, tree) 

595 self.assertEqual(results, expected) 

596 #@+node:ekr.20200104045907.1: *4* TestFstringify.test_call_in_rhs_2 

597 def test_call_in_rhs_2(self): 

598 

599 # From LM.traceSettingsDict 

600 contents = """print('%s' % (len(d.keys())))""" 

601 expected = """print(f"{len(d.keys())}")\n""" 

602 contents, tokens, tree = self.make_data(contents) 

603 results = self.fstringify(contents, tokens, tree) 

604 self.assertEqual(results, expected) 

605 #@+node:ekr.20200105073155.1: *4* TestFstringify.test_call_with_attribute 

606 def test_call_with_attribute(self): 

607 

608 contents = """g.blue('wrote %s' % p.atShadowFileNodeName())""" 

609 expected = """g.blue(f"wrote {p.atShadowFileNodeName()}")\n""" 

610 contents, tokens, tree = self.make_data(contents) 

611 results = self.fstringify(contents, tokens, tree) 

612 self.assertEqual(results, expected) 

613 #@+node:ekr.20200122035055.1: *4* TestFstringify.test_call_with_comments 

614 def test_call_with_comments(self): 

615 

616 contents = """\ 

617 print('%s in %5.2f sec' % ( 

618 "done", # message 

619 2.9, # time 

620 )) # trailing comment""" 

621 

622 expected = """\ 

623 print(f'{"done"} in {2.9:5.2f} sec') # trailing comment 

624 """ 

625 contents, tokens, tree = self.make_data(contents) 

626 expected = textwrap.dedent(expected).rstrip() + '\n' 

627 results = self.fstringify(contents, tokens, tree) 

628 self.assertEqual(results, expected) 

629 #@+node:ekr.20200206173126.1: *4* TestFstringify.test_change_quotes 

630 def test_change_quotes(self): 

631 

632 contents = """ret = '[%s]' % ','.join([show(z) for z in arg])""" 

633 expected = """ret = f"[{','.join([show(z) for z in arg])}]"\n""" 

634 contents, tokens, tree = self.make_data(contents) 

635 results = self.fstringify(contents, tokens, tree) 

636 self.assertEqual(results, expected) 

637 #@+node:ekr.20200101060616.1: *4* TestFstringify.test_complex_rhs 

638 def test_complex_rhs(self): 

639 # From LM.mergeShortcutsDicts. 

640 contents = ( 

641 """g.trace('--trace-binding: %20s binds %s to %s' % (""" 

642 """ c.shortFileName(), binding, d.get(binding) or []))""") 

643 expected = ( 

644 """g.trace(f"--trace-binding: {c.shortFileName():20} """ 

645 """binds {binding} to {d.get(binding) or []}")\n""") 

646 contents, tokens, tree = self.make_data(contents) 

647 results = self.fstringify(contents, tokens, tree) 

648 self.assertEqual(results, expected) 

649 #@+node:ekr.20200206174208.1: *4* TestFstringify.test_function_call 

650 def test_function_call(self): 

651 

652 contents = """mods = ''.join(['%s+' % z.capitalize() for z in self.mods])""" 

653 expected = """mods = ''.join([f"{z.capitalize()}+" for z in self.mods])\n""" 

654 contents, tokens, tree = self.make_data(contents) 

655 results = self.fstringify(contents, tokens, tree) 

656 self.assertEqual(results, expected) 

657 #@+node:ekr.20200106085608.1: *4* TestFstringify.test_ImportFrom 

658 def test_ImportFrom(self): 

659 

660 table = ( 

661 """from .globals import a, b""", 

662 """from ..globals import x, y, z""", 

663 """from . import j""", 

664 ) 

665 for contents in table: 

666 contents, tokens, tree = self.make_data(contents) 

667 results = self.fstringify(contents, tokens, tree) 

668 self.assertEqual(results, contents) 

669 #@+node:ekr.20200106042452.1: *4* TestFstringify.test_ListComp 

670 def test_ListComp(self): 

671 

672 table = ( 

673 """replaces = [L + c + R[1:] for L, R in splits if R for c in letters]""", 

674 """[L for L in x for c in y]""", 

675 """[L for L in x for c in y if L if not c]""", 

676 ) 

677 for contents in table: 

678 contents, tokens, tree = self.make_data(contents) 

679 results = self.fstringify(contents, tokens, tree) 

680 expected = contents 

681 self.assertEqual(results, expected) 

682 #@+node:ekr.20200112163031.1: *4* TestFstringify.test_munge_spec 

683 def test_munge_spec(self): 

684 

685 # !head:tail or :tail 

686 table = ( 

687 ('+1s', '', '+1'), 

688 ('-2s', '', '>2'), 

689 ('3s', '', '3'), 

690 ('4r', 'r', '4'), 

691 ) 

692 for spec, e_head, e_tail in table: 

693 head, tail = Fstringify().munge_spec(spec) 

694 assert(head, tail) == (e_head, e_tail), ( 

695 f"\n" 

696 f" spec: {spec}\n" 

697 f"expected head: {e_head}\n" 

698 f" got head: {head}\n" 

699 f"expected tail: {e_tail}\n" 

700 f" got tail: {tail}\n") 

701 #@+node:ekr.20200104042705.1: *4* TestFstringify.test_newlines 

702 def test_newlines(self): 

703 

704 contents = r"""\ 

705 print("hello\n") 

706 print('world\n') 

707 print("hello\r\n") 

708 print('world\r\n') 

709 """ 

710 contents, tokens, tree = self.make_data(contents) 

711 expected = contents 

712 results = self.fstringify(contents, tokens, tree) 

713 self.assertEqual(results, expected) 

714 #@+node:ekr.20191230183652.1: *4* TestFstringify.test_parens_in_rhs 

715 def test_parens_in_rhs(self): 

716 

717 contents = """print('%20s' % (ivar), val)""" 

718 expected = """print(f"{ivar:20}", val)\n""" 

719 contents, tokens, tree = self.make_data(contents) 

720 results = self.fstringify(contents, tokens, tree) 

721 self.assertEqual(results, expected) 

722 #@+node:ekr.20200106091740.1: *4* TestFstringify.test_single_quotes 

723 def test_single_quotes(self): 

724 

725 table = ( 

726 # Case 0. 

727 ("""print('%r "default"' % style_name)""", 

728 """print(f'{style_name!r} "default"')\n"""), 

729 # Case 1. 

730 ("""print('%r' % "val")""", 

731 """print(f'{"val"!r}')\n"""), 

732 # Case 2. 

733 ("""print("%r" % "val")""", 

734 """print(f'{"val"!r}')\n"""), 

735 ) 

736 for i, data in enumerate(table): 

737 contents, expected = data 

738 description = f"test_single_quotes: {i}" 

739 contents, tokens, tree = self.make_data(contents, description) 

740 results = self.fstringify(contents, tokens, tree, filename=description) 

741 self.assertEqual(results, expected, msg=i) 

742 #@+node:ekr.20200214094938.1: *4* TestFstringify.test_switch_quotes 

743 def test_switch_quotes(self): 

744 table = ( 

745 ( 

746 """print('%r' % 'style_name')""", 

747 """print(f"{'style_name'!r}")\n""", 

748 ), 

749 ) 

750 for i, data in enumerate(table): 

751 contents, expected = data 

752 description = f"test_single_quotes: {i}" 

753 contents, tokens, tree = self.make_data(contents, description) 

754 results = self.fstringify(contents, tokens, tree, filename=description) 

755 self.assertEqual(results, expected, msg=i) 

756 #@+node:ekr.20200206173725.1: *4* TestFstringify.test_switch_quotes_2 

757 def test_switch_quotes_2(self): 

758 

759 contents = """ 

760 g.es('%s blah blah' % ( 

761 g.angleBrackets('*'))) 

762 """ 

763 expected = """g.es(f"{g.angleBrackets(\'*\')} blah blah")\n""" 

764 contents, tokens, tree = self.make_data(contents) 

765 results = self.fstringify(contents, tokens, tree) 

766 self.assertEqual(results, expected) 

767 #@+node:ekr.20200206173628.1: *4* TestFstringify.test_switch_quotes_3 

768 def test_switch_quotes_3(self): 

769 

770 contents = """print('Test %s' % 'one')""" 

771 expected = """print(f"Test {'one'}")\n""" 

772 contents, tokens, tree = self.make_data(contents) 

773 results = self.fstringify(contents, tokens, tree) 

774 self.assertEqual(results, expected) 

775 #@+node:ekr.20200219125956.1: *4* TestFstringify.test_switch_quotes_fail 

776 def test_switch_quotes_fail(self): 

777 

778 contents = """print('Test %s %s' % ('one', "two"))""" 

779 contents, tokens, tree = self.make_data(contents) 

780 expected = contents 

781 results = self.fstringify(contents, tokens, tree) 

782 self.assertEqual(results, expected) 

783 #@-others 

784#@+node:ekr.20200107174645.1: *3* class TestOrange (BaseTest) 

785class TestOrange(BaseTest): 

786 """ 

787 Tests for the Orange class. 

788 

789 **Important**: All unit tests assume that black_mode is False. 

790 That is, unit tests assume that no blank lines 

791 are ever inserted or deleted. 

792 """ 

793 #@+others 

794 #@+node:ekr.20200115201823.1: *4* TestOrange.blacken 

795 def blacken(self, contents, line_length=None): 

796 """Return the results of running black on contents""" 

797 if not black: 

798 self.skipTest('Can not import black') 

799 # Suppress string normalization! 

800 try: 

801 mode = black.FileMode() 

802 mode.string_normalization = False 

803 if line_length is not None: 

804 mode.line_length = line_length 

805 except TypeError: 

806 self.skipTest('old version of black') 

807 return black.format_str(contents, mode=mode) 

808 #@+node:ekr.20200228074455.1: *4* TestOrange.test_bug_1429 

809 def test_bug_1429(self): 

810 

811 contents = r'''\ 

812 def get_semver(tag): 

813 """bug 1429 docstring""" 

814 try: 

815 import semantic_version 

816 version = str(semantic_version.Version.coerce(tag, partial=True)) 

817 # tuple of major, minor, build, pre-release, patch 

818 # 5.6b2 --> 5.6-b2 

819 except(ImportError, ValueError) as err: 

820 print('\n', err) 

821 print("""*** Failed to parse Semantic Version from git tag '{0}'. 

822 Expecting tag name like '5.7b2', 'leo-4.9.12', 'v4.3' for releases. 

823 This version can't be uploaded to PyPi.org.""".format(tag)) 

824 version = tag 

825 return version 

826 ''' 

827 contents, tokens, tree = self.make_data(contents) 

828 expected = contents.rstrip() + '\n' 

829 results = self.beautify(contents, tokens, tree, 

830 max_join_line_length=0, max_split_line_length=0) 

831 self.assertEqual(results, expected) 

832 #@+node:ekr.20210318055702.1: *4* TestOrange.test_bug_1851 

833 def test_bug_1851(self): 

834 

835 contents = r'''\ 

836 def foo(a1): 

837 pass 

838 ''' 

839 contents, tokens, tree = self.make_data(contents) 

840 expected = contents.rstrip() + '\n' 

841 results = self.beautify(contents, tokens, tree, 

842 max_join_line_length=0, max_split_line_length=0) 

843 self.assertEqual(results, expected) 

844 #@+node:ekr.20200219114415.1: *4* TestOrange.test_at_doc_part 

845 def test_at_doc_part(self): 

846 

847 line_length = 40 # For testing. 

848 contents = """\ 

849 #@+at Line 1 

850 # Line 2 

851 #@@c 

852 

853 print('hi') 

854 """ 

855 contents, tokens, tree = self.make_data(contents) 

856 expected = contents.rstrip() + '\n' 

857 results = self.beautify(contents, tokens, tree, 

858 max_join_line_length=line_length, 

859 max_split_line_length=line_length, 

860 ) 

861 self.assertEqual(results, expected) 

862 #@+node:ekr.20200116102345.1: *4* TestOrange.test_backslash_newline 

863 def test_backslash_newline(self): 

864 """ 

865 This test is necessarily different from black, because orange doesn't 

866 delete semicolon tokens. 

867 """ 

868 contents = r""" 

869 print(a);\ 

870 print(b) 

871 print(c); \ 

872 print(d) 

873 """ 

874 contents, tokens, tree = self.make_data(contents) 

875 expected = contents.rstrip() + '\n' 

876 # expected = self.blacken(contents).rstrip() + '\n' 

877 results = self.beautify(contents, tokens, tree) 

878 self.assertEqual(results, expected) 

879 #@+node:ekr.20200219145639.1: *4* TestOrange.test_blank_lines_after_function 

880 def test_blank_lines_after_function(self): 

881 

882 contents = """\ 

883 # Comment line 1. 

884 # Comment line 2. 

885 

886 def spam(): 

887 pass 

888 # Properly indented comment. 

889 

890 # Comment line3. 

891 # Comment line4. 

892 a = 2 

893 """ 

894 contents, tokens, tree = self.make_data(contents) 

895 expected = contents 

896 results = self.beautify(contents, tokens, tree) 

897 self.assertEqual(results, expected) 

898 #@+node:ekr.20200220050758.1: *4* TestOrange.test_blank_lines_after_function_2 

899 def test_blank_lines_after_function_2(self): 

900 

901 contents = """\ 

902 # Leading comment line 1. 

903 # Leading comment lines 2. 

904 

905 def spam(): 

906 pass 

907 

908 # Trailing comment line. 

909 a = 2 

910 """ 

911 contents, tokens, tree = self.make_data(contents) 

912 expected = contents 

913 results = self.beautify(contents, tokens, tree) 

914 self.assertEqual(results, expected) 

915 #@+node:ekr.20200220053212.1: *4* TestOrange.test_blank_lines_after_function_3 

916 def test_blank_lines_after_function_3(self): 

917 

918 # From leoAtFile.py. 

919 contents = r"""\ 

920 def writeAsisNode(self, p): 

921 print('1') 

922 

923 def put(s): 

924 print('2') 

925 

926 # Trailing comment 1. 

927 # Trailing comment 2. 

928 print('3') 

929 """ 

930 contents, tokens, tree = self.make_data(contents) 

931 expected = contents 

932 results = self.beautify(contents, tokens, tree) 

933 self.assertEqual(results, expected) 

934 #@+node:ekr.20200210120455.1: *4* TestOrange.test_decorator 

935 def test_decorator(self): 

936 

937 table = ( 

938 # Case 0. 

939 """\ 

940 @my_decorator(1) 

941 def func(): 

942 pass 

943 """, 

944 # Case 1. 

945 """\ 

946 if 1: 

947 @my_decorator 

948 def func(): 

949 pass 

950 """, 

951 # Case 2. 

952 '''\ 

953 @g.commander_command('promote') 

954 def promote(self, event=None, undoFlag=True): 

955 """Make all children of the selected nodes siblings of the selected node.""" 

956 ''', 

957 ) 

958 for i, contents in enumerate(table): 

959 contents, tokens, tree = self.make_data(contents) 

960 expected = contents 

961 results = self.beautify(contents, tokens, tree) 

962 if results != expected: 

963 g.trace('Fail:', i) 

964 self.assertEqual(results, expected) 

965 #@+node:ekr.20200211094614.1: *4* TestOrange.test_dont_delete_blank_lines 

966 def test_dont_delete_blank_lines(self): 

967 

968 line_length = 40 # For testing. 

969 contents = """\ 

970 class Test: 

971 

972 def test_func(): 

973 

974 pass 

975 

976 a = 2 

977 """ 

978 contents, tokens, tree = self.make_data(contents) 

979 expected = contents.rstrip() + '\n' 

980 results = self.beautify(contents, tokens, tree, 

981 max_join_line_length=line_length, 

982 max_split_line_length=line_length, 

983 ) 

984 self.assertEqual(results, expected) 

985 #@+node:ekr.20200116110652.1: *4* TestOrange.test_function_defs 

986 def test_function_defs(self): 

987 

988 table = ( 

989 # Case 0. 

990 """\ 

991 def f1(a=2 + 5): 

992 pass 

993 """, 

994 # Case 2 

995 """\ 

996 def f1(): 

997 pass 

998 """, 

999 # Case 3. 

1000 """\ 

1001 def f1(): 

1002 pass 

1003 """, 

1004 # Case 4. 

1005 '''\ 

1006 def should_kill_beautify(p): 

1007 """Return True if p.b contains @killbeautify""" 

1008 return 'killbeautify' in g.get_directives_dict(p) 

1009 ''', 

1010 ) 

1011 for i, contents in enumerate(table): 

1012 contents, tokens, tree = self.make_data(contents) 

1013 expected = self.blacken(contents).rstrip() + '\n' 

1014 results = self.beautify(contents, tokens, tree) 

1015 self.assertEqual(results, expected) 

1016 #@+node:ekr.20200209152745.1: *4* TestOrange.test_indented_comment 

1017 def test_indented_comment(self): 

1018 

1019 line_length = 40 # For testing. 

1020 table = ( 

1021 """\ 

1022 if 1: 

1023 pass 

1024 # An indented comment. 

1025 """, 

1026 """\ 

1027 table = ( 

1028 # Indented comment. 

1029 ) 

1030 """ 

1031 ) 

1032 

1033 fails = 0 

1034 for contents in table: 

1035 contents, tokens, tree = self.make_data(contents) 

1036 expected = contents 

1037 if 0: 

1038 dump_contents(contents) 

1039 dump_tokens(tokens) 

1040 # dump_tree(tokens, tree) 

1041 results = self.beautify(contents, tokens, tree, 

1042 max_join_line_length=line_length, 

1043 max_split_line_length=line_length, 

1044 ) 

1045 message = ( 

1046 f"\n" 

1047 f" contents: {contents!r}\n" 

1048 f" expected: {expected!r}\n" 

1049 f" got: {results!r}") 

1050 if results != expected: 

1051 fails += 1 

1052 print(f"Fail: {fails}\n{message}") 

1053 elif 0: 

1054 print(f"Ok:\n{message}") 

1055 assert not fails, fails 

1056 #@+node:ekr.20200116104031.1: *4* TestOrange.test_join_and_strip_condition 

1057 def test_join_and_strip_condition(self): 

1058 

1059 contents = """\ 

1060 if ( 

1061 a == b or 

1062 c == d 

1063 ): 

1064 pass 

1065 """ 

1066 expected = """\ 

1067 if (a == b or c == d): 

1068 pass 

1069 """ 

1070 contents, tokens, tree = self.make_data(contents) 

1071 expected = textwrap.dedent(expected) 

1072 # Black also removes parens, which is beyond our scope at present. 

1073 # expected = self.blacken(contents, line_length=40) 

1074 results = self.beautify(contents, tokens, tree) 

1075 self.assertEqual(results, expected) 

1076 #@+node:ekr.20200208041446.1: *4* TestOrange.test_join_leading_whitespace 

1077 def test_join_leading_whitespace(self): 

1078 

1079 line_length = 40 # For testing. 

1080 table = ( 

1081 #1234567890x1234567890x1234567890x1234567890x 

1082 """\ 

1083 if 1: 

1084 print('4444', 

1085 '5555') 

1086 """, 

1087 """\ 

1088 if 1: 

1089 print('4444', '5555')\n""", 

1090 ) 

1091 fails = 0 

1092 for contents in table: 

1093 contents, tokens, tree = self.make_data(contents) 

1094 if 0: 

1095 dump_contents(contents) 

1096 dump_tokens(tokens) 

1097 # dump_tree(tokens, tree) 

1098 expected = contents 

1099 # expected = self.blacken(contents, line_length=line_length) 

1100 results = self.beautify(contents, tokens, tree, 

1101 max_join_line_length=line_length, 

1102 max_split_line_length=line_length, 

1103 ) 

1104 message = ( 

1105 f"\n" 

1106 f" contents: {contents!r}\n" 

1107 f" expected: {expected!r}\n" 

1108 f" got: {results!r}") 

1109 if results != expected: 

1110 fails += 1 

1111 print(f"Fail: {fails}\n{message}") 

1112 elif 0: 

1113 print(f"Ok:\n{message}") 

1114 assert not fails, fails 

1115 #@+node:ekr.20200121093134.1: *4* TestOrange.test_join_lines 

1116 def test_join_lines(self): 

1117 

1118 # Except where noted, all entries are expected values.... 

1119 line_length = 40 # For testing. 

1120 table = ( 

1121 #1234567890x1234567890x1234567890x1234567890x 

1122 """print('4444',\n '5555')""", 

1123 """print('4444', '5555')\n""", 

1124 ) 

1125 fails = 0 

1126 for contents in table: 

1127 contents, tokens, tree = self.make_data(contents) 

1128 if 0: 

1129 dump_contents(contents) 

1130 dump_tokens(tokens) 

1131 # dump_tree(tokens, tree) 

1132 expected = contents 

1133 results = self.beautify(contents, tokens, tree, 

1134 max_join_line_length=line_length, 

1135 max_split_line_length=line_length, 

1136 ) 

1137 message = ( 

1138 f"\n" 

1139 f" contents: {contents!r}\n" 

1140 f" expected: {expected!r}\n" 

1141 f" orange: {results!r}") 

1142 if results != expected: 

1143 fails += 1 

1144 print(f"Fail: {fails}\n{message}") 

1145 elif 0: 

1146 print(f"Ok:\n{message}") 

1147 self.assertEqual(fails, 0) 

1148 #@+node:ekr.20200210051900.1: *4* TestOrange.test_join_suppression 

1149 def test_join_suppression(self): 

1150 

1151 contents = """\ 

1152 class T: 

1153 a = 1 

1154 print( 

1155 a 

1156 ) 

1157 """ 

1158 expected = """\ 

1159 class T: 

1160 a = 1 

1161 print(a) 

1162 """ 

1163 contents, tokens, tree = self.make_data(contents) 

1164 expected = textwrap.dedent(expected) 

1165 results = self.beautify(contents, tokens, tree) 

1166 self.assertEqual(results, expected) 

1167 #@+node:ekr.20200207093606.1: *4* TestOrange.test_join_too_long_lines 

1168 def test_join_too_long_lines(self): 

1169 

1170 # Except where noted, all entries are expected values.... 

1171 line_length = 40 # For testing. 

1172 table = ( 

1173 #1234567890x1234567890x1234567890x1234567890x 

1174 ( 

1175 """print('aaaaaaaaaaaa',\n 'bbbbbbbbbbbb', 'cccccccccccccccc')""", 

1176 """print('aaaaaaaaaaaa',\n 'bbbbbbbbbbbb', 'cccccccccccccccc')\n""", 

1177 ), 

1178 ) 

1179 fails = 0 

1180 for contents, expected in table: 

1181 contents, tokens, tree = self.make_data(contents) 

1182 if 0: 

1183 dump_contents(contents) 

1184 dump_tokens(tokens) 

1185 # dump_tree(tokens, tree) 

1186 results = self.beautify(contents, tokens, tree, 

1187 max_join_line_length=line_length, 

1188 max_split_line_length=line_length, 

1189 ) 

1190 message = ( 

1191 f"\n" 

1192 f" contents: {contents!r}\n" 

1193 f" expected: {expected!r}\n" 

1194 f" got: {results!r}") 

1195 if results != expected: 

1196 fails += 1 

1197 print(f"Fail: {fails}\n{message}") 

1198 elif 0: 

1199 print(f"Ok:\n{message}") 

1200 assert not fails, fails 

1201 #@+node:ekr.20200108075541.1: *4* TestOrange.test_leo_sentinels 

1202 def test_leo_sentinels_1(self): 

1203 

1204 # Careful: don't put a sentinel into the file directly. 

1205 # That would corrupt leoAst.py. 

1206 sentinel = '#@+node:ekr.20200105143308.54: ** test' 

1207 contents = f"""\ 

1208 {sentinel} 

1209 def spam(): 

1210 pass 

1211 """ 

1212 contents, tokens, tree = self.make_data(contents) 

1213 expected = contents.rstrip() + '\n' 

1214 results = self.beautify(contents, tokens, tree) 

1215 self.assertEqual(results, expected) 

1216 #@+node:ekr.20200209155457.1: *4* TestOrange.test_leo_sentinels_2 

1217 def test_leo_sentinels_2(self): 

1218 

1219 # Careful: don't put a sentinel into the file directly. 

1220 # That would corrupt leoAst.py. 

1221 sentinel = '#@+node:ekr.20200105143308.54: ** test' 

1222 contents = f"""\ 

1223 {sentinel} 

1224 class TestClass: 

1225 pass 

1226 """ 

1227 contents, tokens, tree = self.make_data(contents) 

1228 expected = contents.rstrip() + '\n' 

1229 results = self.beautify(contents, tokens, tree) 

1230 self.assertEqual(results, expected) 

1231 #@+node:ekr.20200108082833.1: *4* TestOrange.test_lines_before_class 

1232 def test_lines_before_class(self): 

1233 

1234 contents = """\ 

1235 a = 2 

1236 class aClass: 

1237 pass 

1238 """ 

1239 contents, tokens, tree = self.make_data(contents) 

1240 expected = contents 

1241 results = self.beautify(contents, tokens, tree) 

1242 self.assertEqual(results, expected) 

1243 #@+node:ekr.20200110014220.86: *4* TestOrange.test_multi_line_pet_peeves 

1244 def test_multi_line_pet_peeves(self): 

1245 

1246 contents = """\ 

1247 if x == 4: pass 

1248 if x == 4 : pass 

1249 print (x, y); x, y = y, x 

1250 print (x , y) ; x , y = y , x 

1251 if(1): 

1252 pass 

1253 elif(2): 

1254 pass 

1255 while(3): 

1256 pass 

1257 """ 

1258 # At present Orange doesn't split lines... 

1259 expected = """\ 

1260 if x == 4: pass 

1261 if x == 4: pass 

1262 print(x, y); x, y = y, x 

1263 print(x, y); x, y = y, x 

1264 if (1): 

1265 pass 

1266 elif (2): 

1267 pass 

1268 while (3): 

1269 pass 

1270 """ 

1271 contents, tokens, tree = self.make_data(contents) 

1272 expected = self.adjust_expected(expected) 

1273 results = self.beautify(contents, tokens, tree) 

1274 self.assertEqual(results, expected) 

1275 #@+node:ekr.20200110014220.95: *4* TestOrange.test_one_line_pet_peeves 

1276 def test_one_line_pet_peeves(self): 

1277 

1278 tag = 'test_one_line_pet_peeves' 

1279 verbose_pass = False 

1280 verbose_fail = True 

1281 # Except where noted, all entries are expected values.... 

1282 if 0: 

1283 # Test fails or recents... 

1284 table = ( 

1285 # """a[: 1 if True else 2 :]""", 

1286 """a[:-1]""", 

1287 ) 

1288 else: 

1289 table = ( 

1290 # Assignments... 

1291 # Slices (colons)... 

1292 """a[:-1]""", 

1293 """a[: 1 if True else 2 :]""", 

1294 """a[1 : 1 + 2]""", 

1295 """a[lower:]""", 

1296 """a[lower::]""", 

1297 """a[:upper]""", 

1298 """a[:upper:]""", 

1299 """a[::step]""", 

1300 """a[lower:upper:]""", 

1301 """a[lower:upper:step]""", 

1302 """a[lower + offset : upper + offset]""", 

1303 """a[: upper_fn(x) :]""", 

1304 """a[: upper_fn(x) : step_fn(x)]""", 

1305 """a[:: step_fn(x)]""", 

1306 """a[: upper_fn(x) :]""", 

1307 """a[: upper_fn(x) : 2 + 1]""", 

1308 """a[:]""", 

1309 """a[::]""", 

1310 """a[1:]""", 

1311 """a[1::]""", 

1312 """a[:2]""", 

1313 """a[:2:]""", 

1314 """a[::3]""", 

1315 """a[1:2]""", 

1316 """a[1:2:]""", 

1317 """a[:2:3]""", 

1318 """a[1:2:3]""", 

1319 # * and **, inside and outside function calls. 

1320 """a = b * c""", 

1321 """a = b ** c""", 

1322 """f(*args)""", 

1323 """f(**kwargs)""", 

1324 """f(*args, **kwargs)""", 

1325 """f(a, *args)""", 

1326 """f(a=2, *args)""", 

1327 # Calls... 

1328 """f(-1)""", 

1329 """f(-1 < 2)""", 

1330 """f(1)""", 

1331 """f(2 * 3)""", 

1332 """f(2 + name)""", 

1333 """f(a)""", 

1334 """f(a.b)""", 

1335 """f(a=2 + 3, b=4 - 5, c= 6 * 7, d=8 / 9, e=10 // 11)""", 

1336 """f(a[1 + 2])""", 

1337 """f({key: 1})""", 

1338 """t = (0,)""", 

1339 """x, y = y, x""", 

1340 # Dicts... 

1341 """d = {key: 1}""", 

1342 """d['key'] = a[i]""", 

1343 # Trailing comments: expect two spaces. 

1344 """whatever # comment""", 

1345 """whatever # comment""", 

1346 """whatever # comment""", 

1347 # Word ops... 

1348 """v1 = v2 and v3 if v3 not in v4 or v5 in v6 else v7""", 

1349 """print(v7 for v8 in v9)""", 

1350 # Unary ops... 

1351 """v = -1 if a < b else -2""", 

1352 # Returns... 

1353 """return -1""", 

1354 ) 

1355 fails = 0 

1356 for i, contents in enumerate(table): 

1357 description = f"{tag} part {i}" 

1358 contents, tokens, tree = self.make_data(contents, description) 

1359 expected = self.blacken(contents) 

1360 results = self.beautify(contents, tokens, tree, filename=description) 

1361 message = ( 

1362 f"\n" 

1363 f" contents: {contents.rstrip()}\n" 

1364 f" black: {expected.rstrip()}\n" 

1365 f" orange: {results.rstrip()}") 

1366 if results != expected: 

1367 fails += 1 

1368 if verbose_fail: 

1369 print(f"Fail: {fails}\n{message}") 

1370 elif verbose_pass: 

1371 print(f"Ok:\n{message}") 

1372 self.assertEqual(fails, 0) 

1373 #@+node:ekr.20200210050646.1: *4* TestOrange.test_return 

1374 def test_return(self): 

1375 

1376 contents = """return []""" 

1377 expected = self.blacken(contents) 

1378 contents, tokens, tree = self.make_data(contents) 

1379 results = self.beautify(contents, tokens, tree) 

1380 self.assertEqual(results, expected) 

1381 #@+node:ekr.20200107174742.1: *4* TestOrange.test_single_quoted_string 

1382 def test_single_quoted_string(self): 

1383 

1384 contents = """print('hi')""" 

1385 # blacken suppresses string normalization. 

1386 expected = self.blacken(contents) 

1387 contents, tokens, tree = self.make_data(contents) 

1388 results = self.beautify(contents, tokens, tree) 

1389 self.assertEqual(results, expected) 

1390 #@+node:ekr.20200117180956.1: *4* TestOrange.test_split_lines 

1391 def test_split_lines(self): 

1392 

1393 line_length = 40 # For testing. 

1394 table = ( 

1395 #1234567890x1234567890x1234567890x1234567890x 

1396 """\ 

1397 if 1: 

1398 print('1111111111', '2222222222', '3333333333') 

1399 """, 

1400 """print('aaaaaaaaaaaaa', 'bbbbbbbbbbbbbb', 'cccccc')""", 

1401 """print('aaaaaaaaaaaaa', 'bbbbbbbbbbbbbb', 'cccccc', 'ddddddddddddddddd')""", 

1402 ) 

1403 fails = 0 

1404 for contents in table: 

1405 contents, tokens, tree = self.make_data(contents) 

1406 if 0: 

1407 dump_tokens(tokens) 

1408 # dump_tree(tokens, tree) 

1409 expected = self.blacken(contents, line_length=line_length) 

1410 results = self.beautify(contents, tokens, tree, 

1411 max_join_line_length=line_length, 

1412 max_split_line_length=line_length, 

1413 ) 

1414 message = ( 

1415 f"\n" 

1416 f" contents: {contents!s}\n" 

1417 f" black: {expected!s}\n" 

1418 f" orange: {results!s}") 

1419 if results != expected: 

1420 fails += 1 

1421 print(f"Fail: {fails}\n{message}") 

1422 elif 0: 

1423 print(f"Ok:\n{message}") 

1424 self.assertEqual(fails, 0) 

1425 #@+node:ekr.20200210073227.1: *4* TestOrange.test_split_lines_2 

1426 def test_split_lines_2(self): 

1427 

1428 line_length = 40 # For testing. 

1429 # Different from how black handles things. 

1430 contents = """\ 

1431 if not any([z.kind == 'lt' for z in line_tokens]): 

1432 return False 

1433 """ 

1434 expected = """\ 

1435 if not any( 

1436 [z.kind == 'lt' for z in line_tokens]): 

1437 return False 

1438 """ 

1439 fails = 0 

1440 contents, tokens, tree = self.make_data(contents) 

1441 # expected = self.blacken(contents, line_length=line_length) 

1442 expected = textwrap.dedent(expected) 

1443 results = self.beautify(contents, tokens, tree, 

1444 max_join_line_length=line_length, 

1445 max_split_line_length=line_length, 

1446 ) 

1447 message = ( 

1448 f"\n" 

1449 f" contents: {contents!r}\n" 

1450 f" expected: {expected!r}\n" 

1451 f" got: {results!r}") 

1452 if results != expected: 

1453 fails += 1 

1454 print(f"Fail: {fails}\n{message}") 

1455 elif 0: 

1456 print(f"Ok:\n{message}") 

1457 self.assertEqual(fails, 0) 

1458 #@+node:ekr.20200219144837.1: *4* TestOrange.test_split_lines_3 

1459 def test_split_lines_3(self): 

1460 

1461 line_length = 40 # For testing. 

1462 # Different from how black handles things. 

1463 contents = """print('eee', ('fffffff, ggggggg', 'hhhhhhhh', 'iiiiiii'), 'jjjjjjj', 'kkkkkk')""" 

1464 # This is a bit different from black, but it's good enough for now. 

1465 expected = """\ 

1466 print( 

1467 'eee', 

1468 ('fffffff, ggggggg', 'hhhhhhhh', 'iiiiiii'), 

1469 'jjjjjjj', 

1470 'kkkkkk', 

1471 ) 

1472 """ 

1473 fails = 0 

1474 contents, tokens, tree = self.make_data(contents) 

1475 # expected = self.blacken(contents, line_length=line_length) 

1476 expected = textwrap.dedent(expected) 

1477 results = self.beautify(contents, tokens, tree, 

1478 max_join_line_length=line_length, 

1479 max_split_line_length=line_length, 

1480 ) 

1481 message = ( 

1482 f"\n" 

1483 f" contents: {contents!r}\n" 

1484 f" expected: {expected!r}\n" 

1485 f" got: {results!r}") 

1486 if results != expected: 

1487 fails += 1 

1488 print(f"Fail: {fails}\n{message}") 

1489 elif 0: 

1490 print(f"Ok:\n{message}") 

1491 self.assertEqual(fails, 0) 

1492 #@+node:ekr.20200119155207.1: *4* TestOrange.test_sync_tokens 

1493 def test_sync_tokens(self): 

1494 

1495 contents = """if x == 4: pass""" 

1496 # At present Orange doesn't split lines... 

1497 expected = """if x == 4: pass""" 

1498 contents, tokens, tree = self.make_data(contents) 

1499 expected = self.adjust_expected(expected) 

1500 results = self.beautify(contents, tokens, tree) 

1501 self.assertEqual(results, expected) 

1502 #@+node:ekr.20200209161226.1: *4* TestOrange.test_ternary 

1503 def test_ternary(self): 

1504 

1505 contents = """print(2 if name == 'class' else 1)""" 

1506 contents, tokens, tree = self.make_data(contents) 

1507 expected = contents 

1508 results = self.beautify(contents, tokens, tree) 

1509 self.assertEqual(results, expected) 

1510 #@+node:ekr.20200211093359.1: *4* TestOrange.test_verbatim 

1511 def test_verbatim(self): 

1512 

1513 line_length = 40 # For testing. 

1514 contents = """\ 

1515 #@@nobeautify 

1516 

1517 def addOptionsToParser(self, parser, trace_m): 

1518 

1519 add = parser.add_option 

1520 

1521 def add_bool(option, help, dest=None): 

1522 add(option, action='store_true', dest=dest, help=help) 

1523 

1524 add_bool('--diff', 'use Leo as an external git diff') 

1525 # add_bool('--dock', 'use a Qt dock') 

1526 add_bool('--fullscreen', 'start fullscreen') 

1527 add_bool('--init-docks', 'put docks in default positions') 

1528 # Multiple bool values. 

1529 add('-v', '--version', action='store_true', 

1530 help='print version number and exit') 

1531 

1532 # From leoAtFile.py 

1533 noDirective = 1 # not an at-directive. 

1534 allDirective = 2 # at-all (4.2) 

1535 docDirective = 3 # @doc. 

1536 

1537 #@@beautify 

1538 """ 

1539 contents, tokens, tree = self.make_data(contents) 

1540 expected = contents 

1541 results = self.beautify(contents, tokens, tree, 

1542 max_join_line_length=line_length, 

1543 max_split_line_length=line_length, 

1544 ) 

1545 self.assertEqual(results, expected, msg=contents) 

1546 #@+node:ekr.20200729083027.1: *4* TestOrange.verbatim2 

1547 def test_verbatim2(self): 

1548 

1549 contents = """\ 

1550 #@@beautify 

1551 #@@nobeautify 

1552 #@+at Starts doc part 

1553 # More doc part. 

1554 # The @c ends the doc part. 

1555 #@@c 

1556 """ 

1557 contents, tokens, tree = self.make_data(contents) 

1558 expected = contents 

1559 results = self.beautify(contents, tokens, tree) 

1560 self.assertEqual(results, expected, msg=contents) 

1561 #@+node:ekr.20200211094209.1: *4* TestOrange.test_verbatim_with_pragma 

1562 def test_verbatim_with_pragma(self): 

1563 

1564 line_length = 40 # For testing. 

1565 contents = """\ 

1566 #pragma: no beautify 

1567 

1568 def addOptionsToParser(self, parser, trace_m): 

1569 

1570 add = parser.add_option 

1571 

1572 def add_bool(option, help, dest=None): 

1573 add(option, action='store_true', dest=dest, help=help) 

1574 

1575 add_bool('--diff', 'use Leo as an external git diff') 

1576 # add_bool('--dock', 'use a Qt dock') 

1577 add_bool('--fullscreen', 'start fullscreen') 

1578 add_other('--window-size', 'initial window size (height x width)', m='SIZE') 

1579 add_other('--window-spot', 'initial window position (top x left)', m='SPOT') 

1580 # Multiple bool values. 

1581 add('-v', '--version', action='store_true', 

1582 help='print version number and exit') 

1583 

1584 # pragma: beautify 

1585 """ 

1586 contents, tokens, tree = self.make_data(contents) 

1587 expected = contents 

1588 results = self.beautify(contents, tokens, tree, 

1589 max_join_line_length=line_length, 

1590 max_split_line_length=line_length, 

1591 ) 

1592 self.assertEqual(results, expected, msg=contents) 

1593 #@-others 

1594#@+node:ekr.20191231130208.1: *3* class TestReassignTokens (BaseTest) 

1595class TestReassignTokens(BaseTest): 

1596 """Test cases for the ReassignTokens class.""" 

1597 #@+others 

1598 #@+node:ekr.20191231130320.1: *4* test_reassign_tokens (to do) 

1599 def test_reassign_tokens(self): 

1600 pass 

1601 #@+node:ekr.20191231130334.1: *4* test_nearest_common_ancestor 

1602 def test_nearest_common_ancestor(self): 

1603 

1604 contents = """name='uninverted %s' % d.name()""" 

1605 self.make_data(contents) 

1606 #@-others 

1607#@+node:ekr.20191227051737.1: *3* class TestTOG (BaseTest) 

1608class TestTOG(BaseTest): 

1609 """ 

1610 Tests for the TokenOrderGenerator class. 

1611 

1612 These tests call BaseTest.make_data, which creates the two-way links 

1613 between tokens and the parse tree. 

1614 

1615 The asserts in tog.sync_tokens suffice to create strong unit tests. 

1616 """ 

1617 

1618 debug_list = ['unit-test'] 

1619 

1620 #@+others 

1621 #@+node:ekr.20210318213945.1: *4* TestTOG.Recent bugs & features 

1622 #@+node:ekr.20210321172902.1: *5* test_bug_1851 

1623 def test_bug_1851(self): 

1624 

1625 contents = r'''\ 

1626 def foo(a1): 

1627 pass 

1628 ''' 

1629 contents, tokens, tree = self.make_data(contents) 

1630 #@+node:ekr.20210914161519.1: *5* test_bug_2171 

1631 def test_bug_2171(self): 

1632 

1633 import sys 

1634 if sys.version_info < (3, 9, 0): 

1635 self.skipTest('Requires Python 3.9') 

1636 

1637 contents = "'HEAD:%s' % g.os_path_join( *(relative_path + [filename]) )" 

1638 contents, tokens, tree = self.make_data(contents) 

1639 #@+node:ekr.20210318213133.1: *5* test_full_grammar 

1640 def test_full_grammar(self): 

1641 # Load py3_test_grammar.py. 

1642 dir_ = os.path.dirname(__file__) 

1643 path = os.path.abspath(os.path.join(dir_, '..', 'py3_test_grammar.py')) 

1644 assert os.path.exists(path), path 

1645 if py_version < (3, 8): 

1646 self.skipTest('Requires Python 3.8 or above') 

1647 # Verify that leoAst can parse the file. 

1648 contents = read_file(path) 

1649 self.make_data(contents) 

1650 #@+node:ekr.20210318214057.1: *5* test_line_315 

1651 def test_line_315(self): 

1652 

1653 # 

1654 # Known bug: position-only args exist in Python 3.8, 

1655 # but there is no easy way of syncing them. 

1656 # This bug will not be fixed. 

1657 # The workaround is to require Python 3.9 

1658 if py_version >= (3, 9): 

1659 contents = '''\ 

1660 f(1, x=2, 

1661 *[3, 4], y=5) 

1662 ''' 

1663 elif 1: # Expected order. 

1664 contents = '''f(1, *[a, 3], x=2, y=5)''' 

1665 else: # Legacy. 

1666 contents = '''f(a, *args, **kwargs)''' 

1667 contents, tokens, tree = self.make_data(contents) 

1668 #@+node:ekr.20210320095504.8: *5* test_line_337 

1669 def test_line_337(self): 

1670 

1671 if py_version >= (3, 8): # Requires neither line_no nor col_offset fields. 

1672 contents = '''def f(a, b:1, c:2, d, e:3=4, f=5, *g:6, h:7, i=8, j:9=10, **k:11) -> 12: pass''' 

1673 else: 

1674 contents = '''def f(a, b, d=4, *arg, **keys): pass''' 

1675 contents, tokens, tree = self.make_data(contents) 

1676 #@+node:ekr.20210320065202.1: *5* test_line_483 

1677 def test_line_483(self): 

1678 

1679 if py_version < (3, 8): 

1680 # Python 3.8: https://bugs.python.org/issue32117 

1681 self.skipTest(f"Python {v1}.{v2} does not support generalized iterable assignment") 

1682 contents = '''def g3(): return 1, *return_list''' 

1683 contents, tokens, tree = self.make_data(contents) 

1684 #@+node:ekr.20210320065344.1: *5* test_line_494 

1685 def test_line_494(self): 

1686 

1687 """ 

1688 https://docs.python.org/3/whatsnew/3.8.html#other-language-changes 

1689 

1690 Generalized iterable unpacking in yield and return statements no longer 

1691 requires enclosing parentheses. This brings the yield and return syntax 

1692 into better agreement with normal assignment syntax. 

1693 """ 

1694 if py_version < (3, 8): 

1695 # Python 3.8: https://bugs.python.org/issue32117 

1696 self.skipTest(f"Python {v1}.{v2} does not support generalized iterable assignment") 

1697 contents = '''def g2(): yield 1, *yield_list''' 

1698 contents, tokens, tree = self.make_data(contents) 

1699 #@+node:ekr.20210319130349.1: *5* test_line_875 

1700 def test_line_875(self): 

1701 

1702 contents = '''list((x, y) for x in 'abcd' for y in 'abcd')''' 

1703 contents, tokens, tree = self.make_data(contents) 

1704 #@+node:ekr.20210319130616.1: *5* test_line_898 

1705 def test_line_898(self): 

1706 

1707 contents = '''g = ((i,j) for i in range(x) if t for j in range(x))''' 

1708 contents, tokens, tree = self.make_data(contents) 

1709 #@+node:ekr.20210320085705.1: *5* test_walrus_operator 

1710 def test_walrus_operator(self): 

1711 

1712 if py_version < (3, 8): 

1713 self.skipTest(f"Python {v1}.{v2} does not support assignment expressions") 

1714 contents = '''if (n := len(a)) > 10: pass''' 

1715 contents, tokens, tree = self.make_data(contents) 

1716 #@+node:ekr.20191227052446.10: *4* TestTOG.Contexts... 

1717 #@+node:ekr.20191227052446.11: *5* test_ClassDef 

1718 def test_ClassDef(self): 

1719 contents = """\ 

1720 class TestClass1: 

1721 pass 

1722 

1723 def decorator(): 

1724 pass 

1725 

1726 @decorator 

1727 class TestClass2: 

1728 pass 

1729 

1730 @decorator 

1731 class TestClass(base1, base2): 

1732 pass 

1733 """ 

1734 self.make_data(contents) 

1735 #@+node:ekr.20191227052446.12: *5* test_ClassDef2 

1736 def test_ClassDef2(self): 

1737 contents = r'''\ 

1738 """ds 1""" 

1739 class TestClass: 

1740 """ds 2""" 

1741 def long_name(a, b=2): 

1742 """ds 3""" 

1743 print('done') 

1744 ''' 

1745 self.make_data(contents) 

1746 #@+node:ekr.20191227052446.13: *5* test_FunctionDef 

1747 def test_FunctionDef(self): 

1748 contents = r"""\ 

1749 def run(fileName=None, pymacs=None): 

1750 pass 

1751 """ 

1752 self.make_data(contents) 

1753 #@+node:ekr.20200111171738.1: *5* test_FunctionDef_with_annotations 

1754 def test_FunctionDef_with_annotations(self): 

1755 contents = r"""\ 

1756 def foo(a: 'x', b: 5 + 6, c: list) -> max(2, 9): 

1757 pass 

1758 """ 

1759 self.make_data(contents) 

1760 # contents, tokens, tree = self.make_data(contents) 

1761 # dump_ast(tree) 

1762 #@+node:ekr.20210802162650.1: *5* test_FunctionDef_with_posonly_args 

1763 def test_FunctionDef_with_posonly_args(self): 

1764 

1765 import sys 

1766 if sys.version_info < (3, 9, 0): 

1767 self.skipTest('Requires Python 3.9') 

1768 

1769 # From PEP 570 

1770 contents = r"""\ 

1771 def pos_only_arg(arg, /): 

1772 pass 

1773 def kwd_only_arg(*, arg): 

1774 pass 

1775 def combined_example(pos_only, /, standard, *, kwd_only): 

1776 pass 

1777 """ 

1778 self.make_data(contents) 

1779 #@+node:ekr.20191227052446.14: *4* TestTOG.Expressions & operators... 

1780 #@+node:ekr.20191227052446.15: *5* test_attribute 

1781 def test_attribute(self): 

1782 contents = r"""\ 

1783 open(os.devnull, "w") 

1784 """ 

1785 self.make_data(contents) 

1786 #@+node:ekr.20191227052446.16: *5* test_CompareOp 

1787 def test_CompareOp(self): 

1788 contents = r"""\ 

1789 if a and not b and c: 

1790 pass 

1791 """ 

1792 self.make_data(contents) 

1793 #@+node:ekr.20191227052446.17: *5* test_Dict_1 

1794 def test_Dict(self): 

1795 contents = r"""\ 

1796 d = {'a' if x else 'b': True,} 

1797 """ 

1798 self.make_data(contents) 

1799 #@+node:ekr.20200111191153.1: *5* test_Dict_2 

1800 def test_Dict_2(self): 

1801 contents = r"""\ 

1802 d = {} 

1803 """ 

1804 self.make_data(contents) 

1805 #@+node:ekr.20191227052446.18: *5* test_DictComp 

1806 def test_DictComp(self): 

1807 # leoGlobals.py, line 3028. 

1808 contents = r"""\ 

1809 d2 = {val: key for key, val in d} 

1810 """ 

1811 self.make_data(contents) 

1812 #@+node:ekr.20200112042410.1: *5* test_ExtSlice 

1813 def test_ExtSlice(self): 

1814 contents = r"""a [1, 2: 3]""" 

1815 self.make_data(contents) 

1816 #@+node:ekr.20191227052446.19: *5* test_ListComp 

1817 def test_ListComp(self): 

1818 # ListComp and comprehension. 

1819 contents = r"""\ 

1820 any([p2.isDirty() for p2 in p.subtree()]) 

1821 """ 

1822 self.make_data(contents) 

1823 #@+node:ekr.20191227052446.20: *5* test_NameConstant 

1824 def test_NameConstant(self): 

1825 contents = r"""\ 

1826 run(a=None, b=str) 

1827 """ 

1828 self.make_data(contents) 

1829 #@+node:ekr.20191227052446.21: *5* test_Operator: semicolon 

1830 def test_op_semicolon(self): 

1831 contents = r"""\ 

1832 print('c'); 

1833 print('d') 

1834 """ 

1835 self.make_data(contents) 

1836 #@+node:ekr.20191227052446.22: *5* test_Operator: semicolon between statements 

1837 def test_op_semicolon2(self): 

1838 contents = r"""\ 

1839 a = 1 ; b = 2 

1840 print('a') ; print('b') 

1841 """ 

1842 self.make_data(contents) 

1843 #@+node:ekr.20200111194454.1: *5* test_Set 

1844 def test_Set(self): 

1845 contents = """{'a', 'b'}""" 

1846 self.make_data(contents) 

1847 #@+node:ekr.20200111195654.1: *5* test_SetComp 

1848 def test_SetComp(self): 

1849 contents = """aSet = { (x, y) for x in r for y in r if x < y }""" 

1850 self.make_data(contents) 

1851 #@+node:ekr.20191227052446.23: *5* test_UnaryOp 

1852 def test_UnaryOp(self): 

1853 contents = r"""\ 

1854 print(-(2)) 

1855 """ 

1856 self.make_data(contents) 

1857 #@+node:ekr.20191227052446.65: *4* TestTOG.f-strings.... 

1858 #@+node:ekr.20191227052446.66: *5* test_fstring01: complex Call 

1859 def test_fstring1(self): 

1860 # Line 1177, leoApp.py 

1861 contents = r"""\ 

1862 print( 

1863 message = f"line 1: {old_id!r}\n" "line 2\n" 

1864 ) 

1865 print('done') 

1866 """ 

1867 self.make_data(contents) 

1868 #@+node:ekr.20191227052446.67: *5* test_fstring02: Ternary 

1869 def test_fstring2(self): 

1870 contents = r"""\ 

1871 func(f"{b if not cond1 else ''}") 

1872 """ 

1873 self.make_data(contents) 

1874 #@+node:ekr.20191227052446.68: *5* test_fstring03: single f-string 

1875 def test_fstring3(self): 

1876 contents = r"""\ 

1877 print(f'{7.1}') 

1878 print('end') 

1879 """ 

1880 self.make_data(contents) 

1881 #@+node:ekr.20191227052446.69: *5* test_fstring04: f-string + plain 

1882 def test_fstring4(self): 

1883 contents = r"""\ 

1884 print(f'{7.1}' 'p7.2') 

1885 print('end') 

1886 """ 

1887 self.make_data(contents) 

1888 #@+node:ekr.20191227052446.70: *5* test_fstring05: plain + f-string 

1889 def test_fstring5(self): 

1890 contents = r"""\ 

1891 print('p1' f'{f2}') 

1892 'end' 

1893 """ 

1894 self.make_data(contents) 

1895 #@+node:ekr.20191227052446.71: *5* test_fstring06: f-string + fstring 

1896 def test_fstring6(self): 

1897 contents = r"""\ 

1898 print(f'{f1}' f'{f2}') 

1899 'end' 

1900 """ 

1901 self.make_data(contents) 

1902 #@+node:ekr.20191227052446.72: *5* test_fstring07: many 

1903 def test_fstring7(self): 

1904 contents = r"""\ 

1905 print('s1', f'{f2}' f'f3' f'{f4}' 's5') 

1906 'end' 

1907 """ 

1908 self.make_data(contents) 

1909 #@+node:ekr.20191227052446.73: *5* test_fstring08: ternary op 

1910 def test_fstring8(self): 

1911 # leoFind.py line 856 

1912 contents = r"""\ 

1913 a = f"{'a' if x else 'b'}" 

1914 f() 

1915 

1916 # Pass 

1917 # print(f"{'a' if x else 'b'}") 

1918 """ 

1919 self.make_data(contents) 

1920 #@+node:ekr.20191227052446.74: *5* test_fstring09: leoFind.py line 856 

1921 def test_fstring9(self): 

1922 contents = r"""\ 

1923 func( 

1924 "Isearch" 

1925 f"{' Backward' if True else ''}" 

1926 ) 

1927 print('done') 

1928 """ 

1929 self.make_data(contents) 

1930 #@+node:ekr.20191227052446.75: *5* test_fstring10: leoFind.py: line 861 

1931 def test_fstring10(self): 

1932 # leoFind.py: line 861 

1933 contents = r"""\ 

1934 one(f"{'B'}" ": ") 

1935 """ 

1936 self.make_data(contents) 

1937 #@+node:ekr.20191227052446.76: *5* test_fstring11: joins 

1938 def test_fstring11(self): 

1939 contents = r"""\ 

1940 print(f'x3{e3+1}y3' f'x4{e4+2}y4') 

1941 print('done') 

1942 """ 

1943 self.make_data(contents) 

1944 #@+node:ekr.20191227052446.77: *6* more 

1945 # Single f-strings. 

1946 # 'p1' ; 

1947 # f'f1' ; 

1948 # f'x1{e1}y1' ; 

1949 # f'x2{e2+1}y2{e2+2}z2' ; 

1950 

1951 # Concatentated strings... 

1952 # 'p2', 'p3' ; 

1953 # f'f2' 'f3' ; 

1954 

1955 # f'x5{e5+1}y5{e5+1}z5' f'x6{e6+1}y6{e6+1}z6' ; 

1956 #@+node:ekr.20191227052446.78: *5* test_fstring12: joins + 1 f-expr 

1957 def test_fstring12(self): 

1958 contents = r"""\ 

1959 print(f'x1{e1}y1', 'p1') 

1960 print(f'x2{e2}y2', f'f2') 

1961 print(f'x3{e3}y3', f'x4{e4}y4') 

1962 print('end') 

1963 """ 

1964 self.make_data(contents) 

1965 #@+node:ekr.20191227052446.79: *5* test_fstring13: joins + 2 f-exprs 

1966 def test_fstring13(self): 

1967 contents = r"""\ 

1968 print(f'x1{e1}y1{e2}z1', 'p1') 

1969 print(f'x2{e3}y2{e3}z2', f'f2') 

1970 print(f'x3{e4}y3{e5}z3', f'x4{e6}y4{e7}z4') 

1971 print('end') 

1972 """ 

1973 self.make_data(contents) 

1974 #@+node:ekr.20191227052446.80: *5* test_fstring14: complex, with commas 

1975 def test_fstring14(self): 

1976 contents = r"""\ 

1977 print(f"{list(z for z in ('a', 'b', 'c') if z != 'b')}") 

1978 """ 

1979 self.make_data(contents) 

1980 #@+node:ekr.20191227052446.81: *5* test_fstring15 

1981 def test_fstring15(self): 

1982 contents = r"""\ 

1983 print(f"test {a}={2}") 

1984 print('done') 

1985 """ 

1986 self.make_data(contents) 

1987 #@+node:ekr.20191227052446.83: *5* test_fstring16: simple 

1988 def test_fstring16(self): 

1989 contents = r"""\ 

1990 'p1' ; 

1991 f'f1' ; 

1992 'done' ; 

1993 """ 

1994 self.make_data(contents) 

1995 #@+node:ekr.20191227052446.82: *5* test_regex_fstring 

1996 def test_regex_fstring(self): 

1997 # Line 7709, leoGlobals.py 

1998 contents = r'''\ 

1999 fr"""{kinds}://[^\s'"]+[\w=/]""" 

2000 ''' 

2001 self.make_data(contents) 

2002 #@+node:ekr.20191227052446.32: *4* TestTOG.If... 

2003 #@+node:ekr.20191227052446.33: *5* test_from leoTips.py 

2004 def test_if1(self): 

2005 # Line 93, leoTips.py 

2006 contents = r"""\ 

2007 self.make_data(contents) 

2008 unseen = [i for i in range(5) if i not in seen] 

2009 for issue in data: 

2010 for a in aList: 

2011 print('a') 

2012 else: 

2013 print('b') 

2014 if b: 

2015 print('c') 

2016 """ 

2017 self.make_data(contents) 

2018 #@+node:ekr.20191227052446.34: *5* test_if + tuple 

2019 def test_if2(self): 

2020 contents = r"""\ 

2021 for i, j in b: 

2022 pass 

2023 """ 

2024 self.make_data(contents) 

2025 #@+node:ekr.20191227052446.35: *5* test_if + unary op 

2026 def test_if3(self): 

2027 contents = r"""\ 

2028 if -(2): 

2029 pass 

2030 """ 

2031 self.make_data(contents) 

2032 #@+node:ekr.20191227052446.36: *5* test_if, elif 

2033 def test_if4(self): 

2034 contents = r"""\ 

2035 if 1: 

2036 print('a') 

2037 elif 2: 

2038 print('b') 

2039 elif 3: 

2040 print('c') 

2041 print('d') 

2042 print('-') 

2043 if 1: 

2044 print('e') 

2045 elif 2: 

2046 print('f') 

2047 print('g') 

2048 """ 

2049 self.make_data(contents) 

2050 #@+node:ekr.20191227052446.37: *5* test_if, elif + 2 

2051 def test_if5(self): 

2052 contents = r"""\ 

2053 if 1: 

2054 pass 

2055 elif 2: 

2056 pass 

2057 pass 

2058 """ 

2059 self.make_data(contents) 

2060 #@+node:ekr.20191227052446.38: *5* test_if, elif, else 

2061 def test_if6(self): 

2062 contents = r"""\ 

2063 if (a): 

2064 print('a1') 

2065 print('a2') 

2066 elif b: 

2067 print('b1') 

2068 print('b2') 

2069 else: 

2070 print('c1') 

2071 print('c2') 

2072 """ 

2073 self.make_data(contents) 

2074 #@+node:ekr.20191227052446.39: *5* test_if, else 

2075 def test_if7(self): 

2076 contents = r"""\ 

2077 if 1: 

2078 print('a') 

2079 else: 

2080 print('b') 

2081 """ 

2082 self.make_data(contents) 

2083 #@+node:ekr.20191227052446.40: *5* test_if, else, if 

2084 def test_if8(self): 

2085 contents = r"""\ 

2086 if 1: 

2087 print('a') 

2088 else: 

2089 if 2: 

2090 print('b') 

2091 """ 

2092 self.make_data(contents) 

2093 #@+node:ekr.20191227052446.41: *5* test_Nested If's 

2094 def test_if9(self): 

2095 contents = r"""\ 

2096 if a: 

2097 if b: 

2098 print('b') 

2099 else: 

2100 if d: 

2101 print('d') 

2102 """ 

2103 self.make_data(contents) 

2104 #@+node:ekr.20191227052446.42: *5* test_ternary + if 

2105 def test_if10(self): 

2106 contents = r"""\ 

2107 if 1: 

2108 a = 'class' if cond else 'def' 

2109 # find_pattern = prefix + ' ' + word 

2110 print('1') 

2111 else: 

2112 print('2') 

2113 """ 

2114 self.make_data(contents) 

2115 #@+node:ekr.20191227145620.1: *4* TestTOG.Miscellaneous... 

2116 #@+node:ekr.20200206041753.1: *5* test_comment_in_set_links 

2117 def test_comment_in_set_links(self): 

2118 contents = """ 

2119 def spam(): 

2120 # comment 

2121 pass 

2122 """ 

2123 self.make_data(contents) 

2124 #@+node:ekr.20200112065944.1: *5* test_ellipsis_1 

2125 def test_ellipsis_1(self): 

2126 contents = """ 

2127 def spam(): 

2128 ... 

2129 """ 

2130 self.make_data(contents) 

2131 #@+node:ekr.20200112070228.1: *5* test_ellipsis_2 

2132 def test_ellipsis_2(self): 

2133 contents = """ 

2134 def partial(func: Callable[..., str], *args): 

2135 pass 

2136 """ 

2137 self.make_data(contents) 

2138 #@+node:ekr.20191227075951.1: *5* test_end_of_line 

2139 def test_end_of_line(self): 

2140 self.make_data("""# Only a comment.""") 

2141 #@+node:ekr.20191227052446.50: *4* TestTOG.Plain Strings... 

2142 #@+node:ekr.20191227052446.52: *5* test_\x and \o escapes 

2143 def test_escapes(self): 

2144 # Line 4609, leoGlobals.py 

2145 contents = r"""\ 

2146 print("\x7e" "\0777") # tilde. 

2147 print('done') 

2148 """ 

2149 self.make_data(contents) 

2150 #@+node:ekr.20191227052446.53: *5* test_backslashes in docstring 

2151 def test_backslashes(self): 

2152 # leoGlobals.py. 

2153 contents = r'''\ 

2154 class SherlockTracer: 

2155 """before\\after""" 

2156 ''' 

2157 self.make_data(contents) 

2158 #@+node:ekr.20191227052446.54: *5* test_bs/nl 

2159 def test_bs_nl(self): 

2160 contents = r"""\ 

2161 print('hello\ 

2162 world') 

2163 """ 

2164 self.make_data(contents) 

2165 #@+node:ekr.20191227052446.55: *5* test_bytes bs-x 

2166 def test_bytes(self): 

2167 # Line 201, leoApp.py 

2168 contents = r"""\ 

2169 print(b'\xfe') 

2170 print('done') 

2171 """ 

2172 self.make_data(contents) 

2173 #@+node:ekr.20191227052446.56: *5* test_empty string 

2174 def test_empyt_string(self): 

2175 contents = r"""\ 

2176 self.s = '' 

2177 self.i = 0 

2178 """ 

2179 self.make_data(contents) 

2180 #@+node:ekr.20191227052446.57: *5* test_escaped string delims 

2181 def test_escaped_delims(self): 

2182 contents = r"""\ 

2183 print("a\"b") 

2184 """ 

2185 self.make_data(contents) 

2186 #@+node:ekr.20191227052446.58: *5* test_escaped strings 

2187 def test_escaped_strings(self): 

2188 contents = r"""\ 

2189 f1(a='\b', b='\n', t='\t') 

2190 f2(f='\f', r='\r', v='\v') 

2191 f3(bs='\\') 

2192 """ 

2193 self.make_data(contents) 

2194 #@+node:ekr.20191227052446.59: *5* test_f-string join 

2195 def test_fstring_join(self): 

2196 # The first newline causes the fail. 

2197 contents = r"""\ 

2198 print(f"a {old_id!r}\n" "b\n") 

2199 print('done') 

2200 """ 

2201 self.make_data(contents) 

2202 #@+node:ekr.20191227052446.64: *5* test_potential_fstring 

2203 def test_potential_fstring(self): 

2204 contents = r"""\ 

2205 print('test %s=%s'%(a, 2)) 

2206 print('done') 

2207 """ 

2208 self.make_data(contents) 

2209 #@+node:ekr.20191227052446.60: *5* test_raw docstring 

2210 def test_raw_docstring(self): 

2211 contents = r'''\ 

2212 # Line 1619 leoFind.py 

2213 print(r"""DS""") 

2214 ''' 

2215 self.make_data(contents) 

2216 #@+node:ekr.20191227052446.61: *5* test_raw escaped strings 

2217 def test_raw_escapes(self): 

2218 contents = r"""\ 

2219 r1(a=r'\b', b=r'\n', t=r'\t') 

2220 r2(f=r'\f', r=r'\r', v=r'\v') 

2221 r3(bs=r'\\') 

2222 """ 

2223 self.make_data(contents) 

2224 #@+node:ekr.20191227052446.62: *5* test_single quote 

2225 def test_single_quote(self): 

2226 # leoGlobals.py line 806. 

2227 contents = r"""\ 

2228 print('"') 

2229 """ 

2230 self.make_data(contents) 

2231 #@+node:ekr.20191227052446.63: *5* test_string concatenation_1 

2232 def test_concatenation_1(self): 

2233 contents = r"""\ 

2234 print('a' 'b') 

2235 print('c') 

2236 """ 

2237 self.make_data(contents) 

2238 #@+node:ekr.20200111042825.1: *5* test_string_concatenation_2 

2239 def test_string_concatenation_2(self): 

2240 # Crash in leoCheck.py. 

2241 contents = """return self.Type('error', 'no member %s' % ivar)""" 

2242 self.make_data(contents) 

2243 #@+node:ekr.20191227052446.43: *4* TestTOG.Statements... 

2244 #@+node:ekr.20200112075707.1: *5* test_AnnAssign 

2245 def test_AnnAssign(self): 

2246 contents = """x: int = 0""" 

2247 self.make_data(contents) 

2248 #@+node:ekr.20200112071833.1: *5* test_AsyncFor 

2249 def test_AsyncFor(self): 

2250 # This may require Python 3.7. 

2251 contents = """\ 

2252 async def commit(session, data): 

2253 async for z in session.transaction(): 

2254 await z(data) 

2255 else: 

2256 print('oops') 

2257 """ 

2258 self.make_data(contents) 

2259 #@+node:ekr.20200111175043.1: *5* test_AsyncFunctionDef 

2260 def test_AsyncFunctionDef(self): 

2261 contents = """\ 

2262 @my_decorator 

2263 async def count() -> 42: 

2264 print("One") 

2265 await asyncio.sleep(1) 

2266 """ 

2267 self.make_data(contents) 

2268 #@+node:ekr.20200112073151.1: *5* test_AsyncWith 

2269 def test_AsyncWith(self): 

2270 contents = """\ 

2271 async def commit(session, data): 

2272 async with session.transaction(): 

2273 await session.update(data) 

2274 """ 

2275 self.make_data(contents) 

2276 #@+node:ekr.20191227052446.44: *5* test_Call 

2277 def test_Call(self): 

2278 contents = """func(a, b, one='one', two=2, three=4+5, *args, **kwargs)""" 

2279 # contents = """func(*args, **kwargs)""" 

2280 # f1(a,b=2) 

2281 # f2(1 + 2) 

2282 # f3(arg, *args, **kwargs) 

2283 # f4(a='a', *args, **kwargs) 

2284 self.make_data(contents) 

2285 #@+node:ekr.20200206040732.1: *5* test_Delete 

2286 def test_Delete(self): 

2287 

2288 # Coverage test for spaces 

2289 contents = """del x""" 

2290 self.make_data(contents) 

2291 #@+node:ekr.20200111175335.1: *5* test_For 

2292 def test_For(self): 

2293 contents = r"""\ 

2294 for a in b: 

2295 pass 

2296 """ 

2297 self.make_data(contents) 

2298 #@+node:ekr.20191227052446.45: *5* test_Global 

2299 def test_Global(self): 

2300 # Line 1604, leoGlobals.py 

2301 contents = r""" 

2302 def spam(): 

2303 global gg 

2304 print('') 

2305 """ 

2306 self.make_data(contents) 

2307 #@+node:ekr.20200111200424.1: *5* test_ImportFrom 

2308 def test_ImportFrom(self): 

2309 contents = r"""from a import b as c""" 

2310 self.make_data(contents) 

2311 #@+node:ekr.20210318174705.1: *5* test_ImportFromStar 

2312 def test_ImportFromStar(self): 

2313 contents = r"""from sys import *""" 

2314 self.make_data(contents) 

2315 #@+node:ekr.20200206040424.1: *5* test_Lambda 

2316 def test_Lambda(self): 

2317 

2318 # Coverage test for spaces 

2319 contents = """f = lambda x: x""" 

2320 self.make_data(contents) 

2321 #@+node:ekr.20200111200640.1: *5* test_Nonlocal 

2322 def test_Nonlocal(self): 

2323 contents = r"""nonlocal name1, name2""" 

2324 self.make_data(contents) 

2325 #@+node:ekr.20191227052446.46: *5* test_Try 

2326 def test_Try(self): 

2327 contents = r"""\ 

2328 try: 

2329 print('a1') 

2330 print('a2') 

2331 except ImportError: 

2332 print('b1') 

2333 print('b2') 

2334 except SyntaxError: 

2335 print('c1') 

2336 print('c2') 

2337 finally: 

2338 print('d1') 

2339 print('d2') 

2340 """ 

2341 self.make_data(contents) 

2342 #@+node:ekr.20191227052446.47: *5* test_TryExceptElse 

2343 def test_Try2(self): 

2344 # Line 240: leoDebugger.py 

2345 contents = r"""\ 

2346 try: 

2347 print('a') 

2348 except ValueError: 

2349 print('b') 

2350 else: 

2351 print('c') 

2352 """ 

2353 self.make_data(contents) 

2354 #@+node:ekr.20200206041336.1: *5* test_While 

2355 def test_While(self): 

2356 contents = r"""\ 

2357 while f(): 

2358 print('continue') 

2359 else: 

2360 print('done') 

2361 """ 

2362 self.make_data(contents) 

2363 #@+node:ekr.20191227052446.48: *5* test_With 

2364 def test_With(self): 

2365 # leoGlobals.py, line 1785. 

2366 contents = r"""\ 

2367 with open(fn) as f: 

2368 pass 

2369 """ 

2370 self.make_data(contents) 

2371 #@+node:ekr.20200206041611.1: *5* test_Yield 

2372 def test_Yield(self): 

2373 contents = r"""\ 

2374 def gen_test(): 

2375 yield self.gen_token('newline', '\n') 

2376 """ 

2377 self.make_data(contents) 

2378 #@+node:ekr.20191227052446.49: *5* test_YieldFrom 

2379 def test_YieldFrom(self): 

2380 # Line 1046, leoAst.py 

2381 contents = r"""\ 

2382 def gen_test(): 

2383 self.node = tree 

2384 yield from self.gen_token('newline', '\n') 

2385 print('done') 

2386 """ 

2387 self.make_data(contents) 

2388 #@+node:ekr.20191228193740.1: *4* TestTOG.test_aa && zz 

2389 def test_aaa(self): 

2390 """The first test.""" 

2391 g.total_time = get_time() 

2392 

2393 def test_zzz(self): 

2394 """The last test.""" 

2395 t2 = get_time() 

2396 self.update_times('90: TOTAL', t2 - g.total_time) 

2397 # self.dump_stats() 

2398 #@-others 

2399#@+node:ekr.20200110093802.1: *3* class TestTokens (BaseTest) 

2400class TestTokens(BaseTest): 

2401 """Unit tests for tokenizing.""" 

2402 #@+others 

2403 #@+node:ekr.20200122165910.1: *4* TT.show_asttokens_script 

2404 def show_asttokens_script(self): 

2405 """ 

2406 A script showing how asttokens can *easily* do the following: 

2407 - Inject parent/child links into ast nodes. 

2408 - Inject many-to-many links between tokens and ast nodes. 

2409 """ 

2410 # pylint: disable=import-error,reimported 

2411 import ast 

2412 import asttokens 

2413 import token as token_module 

2414 stack: List[ast.AST] = [] 

2415 # Define TestToken class and helper functions. 

2416 #@+others 

2417 #@+node:ekr.20200122170101.3: *5* class TestToken 

2418 class TestToken: 

2419 """A patchable representation of the 5-tuples created by tokenize and used by asttokens.""" 

2420 

2421 def __init__(self, kind, value): 

2422 self.kind = kind 

2423 self.value = value 

2424 self.node_list: List[Any] = [] 

2425 

2426 def __str__(self): 

2427 tokens_s = ', '.join([z.__class__.__name__ for z in self.node_list]) 

2428 return f"{self.kind:12} {self.value:20} {tokens_s!s}" 

2429 

2430 __repr__ = __str__ 

2431 #@+node:ekr.20200122170101.1: *5* function: atok_name 

2432 def atok_name(token): 

2433 """Return a good looking name for the given 5-tuple""" 

2434 return token_module.tok_name[token[0]].lower() # type:ignore 

2435 #@+node:ekr.20200122170101.2: *5* function: atok_value 

2436 def atok_value(token): 

2437 """Print a good looking value for the given 5-tuple""" 

2438 return token.string if atok_name(token) == 'string' else repr(token.string) 

2439 #@+node:ekr.20200122170057.1: *5* function: dump_token 

2440 def dump_token(token): 

2441 node_list = list(set(getattr(token, 'node_set', []))) 

2442 node_list = sorted([z.__class__.__name__ for z in node_list]) 

2443 return f"{token.index:2} {atok_name(token):12} {atok_value(token):20} {node_list}" 

2444 #@+node:ekr.20200122170337.1: *5* function: postvisit 

2445 def postvisit(node, par_value, value): 

2446 nonlocal stack 

2447 stack.pop() 

2448 return par_value or [] 

2449 #@+node:ekr.20200122170101.4: *5* function: previsit 

2450 def previsit(node, par_value): 

2451 nonlocal stack 

2452 if isinstance(node, ast.Module): 

2453 stack = [] 

2454 if stack: 

2455 parent = stack[-1] 

2456 children: List[ast.AST] = getattr(parent, 'children', []) 

2457 parent.children = children + [node] # type:ignore 

2458 node.parent = parent 

2459 else: 

2460 node.parent = None 

2461 node.children = [] 

2462 stack.append(node) 

2463 return par_value, [] 

2464 #@-others 

2465 table = ( 

2466 # """print('%s in %5.2f sec' % ("done", 2.9))\n""", 

2467 """print(a[1:2:3])\n""", 

2468 ) 

2469 for source in table: 

2470 print(f"Source...\n\n{source}") 

2471 atok = asttokens.ASTTokens(source, parse=True) 

2472 # Create a patchable list of Token objects. 

2473 tokens = [TestToken(atok_name(z), atok_value(z)) for z in atok.tokens] 

2474 # Inject parent/child links into nodes. 

2475 asttokens.util.visit_tree(atok.tree, previsit, postvisit) 

2476 # Create token.token_list for each token. 

2477 for node in asttokens.util.walk(atok.tree): 

2478 # Inject node into token.node_list 

2479 for ast_token in atok.get_tokens(node, include_extra=True): 

2480 i = ast_token.index 

2481 token = tokens[i] 

2482 token.node_list.append(node) 

2483 # Print the resulting parent/child links. 

2484 for node in ast.walk(atok.tree): 

2485 if hasattr(node, 'first_token'): 

2486 parent = getattr(node, 'parent', None) 

2487 parent_s = parent.__class__.__name__ if parent else 'None' 

2488 children: List[ast.AST] = getattr(node, 'children', []) 

2489 if children: 

2490 children_s = ', '.join(z.__class__.__name__ for z in children) 

2491 else: 

2492 children_s = 'None' 

2493 print( 

2494 f"\n" 

2495 f" node: {node.__class__.__name__}\n" 

2496 f" parent: {parent_s}\n" 

2497 f"children: {children_s}") 

2498 # Print the resulting tokens. 

2499 g.printObj(tokens, tag='Tokens') 

2500 #@+node:ekr.20200121025938.1: *4* TT.show_example_dump 

2501 def show_example_dump(self): 

2502 

2503 # Will only be run when enabled explicitly. 

2504 

2505 contents = """\ 

2506 print('line 1') 

2507 print('line 2') 

2508 print('line 3') 

2509 """ 

2510 contents, tokens, tree = self.make_data(contents) 

2511 dump_contents(contents) 

2512 dump_tokens(tokens) 

2513 dump_tree(tokens, tree) 

2514 #@+node:ekr.20200110015014.6: *4* TT.test_bs_nl_tokens 

2515 def test_bs_nl_tokens(self): 

2516 # Test https://bugs.python.org/issue38663. 

2517 

2518 contents = """\ 

2519 print \ 

2520 ('abc') 

2521 """ 

2522 self.check_roundtrip(contents) 

2523 #@+node:ekr.20200110015014.8: *4* TT.test_continuation_1 

2524 def test_continuation_1(self): 

2525 

2526 contents = """\ 

2527 a = (3,4, 

2528 5,6) 

2529 y = [3, 4, 

2530 5] 

2531 z = {'a': 5, 

2532 'b':15, 'c':True} 

2533 x = len(y) + 5 - a[ 

2534 3] - a[2] + len(z) - z[ 

2535 'b'] 

2536 """ 

2537 self.check_roundtrip(contents) 

2538 #@+node:ekr.20200111085210.1: *4* TT.test_continuation_2 

2539 def test_continuation_2(self): 

2540 # Backslash means line continuation, except for comments 

2541 contents = ( 

2542 'x=1+\\\n 2' 

2543 '# This is a comment\\\n # This also' 

2544 ) 

2545 self.check_roundtrip(contents) 

2546 #@+node:ekr.20200111085211.1: *4* TT.test_continuation_3 

2547 def test_continuation_3(self): 

2548 

2549 contents = """\ 

2550 # Comment \\\n 

2551 x = 0 

2552 """ 

2553 self.check_roundtrip(contents) 

2554 #@+node:ekr.20200110015014.10: *4* TT.test_string_concatenation_1 

2555 def test_string_concatentation_1(self): 

2556 # Two *plain* string literals on the same line 

2557 self.check_roundtrip("""'abc' 'xyz'""") 

2558 #@+node:ekr.20200111081801.1: *4* TT.test_string_concatenation_2 

2559 def test_string_concatentation_2(self): 

2560 # f-string followed by plain string on the same line 

2561 self.check_roundtrip("""f'abc' 'xyz'""") 

2562 #@+node:ekr.20200111081832.1: *4* TT.test_string_concatenation_3 

2563 def test_string_concatentation_3(self): 

2564 # plain string followed by f-string on the same line 

2565 self.check_roundtrip("""'abc' f'xyz'""") 

2566 #@+node:ekr.20160521103254.1: *4* TT.test_visitors_exist 

2567 def test_visitors_exist(self): 

2568 """Ensure that visitors for all ast nodes exist.""" 

2569 import _ast 

2570 # Compute all fields to BaseTest. 

2571 aList = sorted(dir(_ast)) 

2572 remove = [ 

2573 'Interactive', 'Suite', # Not necessary. 

2574 'AST', # The base class, 

2575 # Constants... 

2576 'PyCF_ALLOW_TOP_LEVEL_AWAIT', 

2577 'PyCF_ONLY_AST', 

2578 'PyCF_TYPE_COMMENTS', 

2579 # New ast nodes for Python 3.8. 

2580 # We can ignore these nodes because: 

2581 # 1. ast.parse does not generate them by default. 

2582 # 2. The type comments are ordinary comments. 

2583 # They do not need to be specially synced. 

2584 # 3. Tools such as black, orange, and fstringify will 

2585 # only ever handle comments as comments. 

2586 'FunctionType', 'NamedExpr', 'TypeIgnore', 

2587 ] 

2588 aList = [z for z in aList if not z[0].islower()] 

2589 # Remove base classes. 

2590 aList = [z for z in aList 

2591 if not z.startswith('_') and not z in remove] 

2592 # Now test them. 

2593 table = ( 

2594 TokenOrderGenerator, 

2595 ) 

2596 for class_ in table: 

2597 traverser = class_() 

2598 errors, nodes, ops = 0, 0, 0 

2599 for z in aList: 

2600 if hasattr(traverser, 'do_' + z): 

2601 nodes += 1 

2602 elif _op_names.get(z): 

2603 ops += 1 

2604 else: 

2605 errors += 1 

2606 print( 

2607 f"Missing visitor: " 

2608 f"{traverser.__class__.__name__}.{z}") 

2609 msg = f"{nodes} node types, {ops} op types, {errors} errors" 

2610 assert not errors, msg 

2611 #@-others 

2612#@+node:ekr.20200107144010.1: *3* class TestTopLevelFunctions (BaseTest) 

2613class TestTopLevelFunctions(BaseTest): 

2614 """Tests for the top-level functions in leoAst.py.""" 

2615 #@+others 

2616 #@+node:ekr.20200107144227.1: *4* test_get_encoding_directive 

2617 def test_get_encoding_directive(self): 

2618 

2619 filename = __file__ 

2620 assert os.path.exists(filename), repr(filename) 

2621 with open(filename, 'rb') as f: 

2622 bb = f.read() 

2623 e = get_encoding_directive(bb) 

2624 self.assertEqual(e.lower(), 'utf-8') 

2625 #@+node:ekr.20200107150857.1: *4* test_strip_BOM 

2626 def test_strip_BOM(self): 

2627 

2628 filename = __file__ 

2629 assert os.path.exists(filename), repr(filename) 

2630 with open(filename, 'rb') as f: 

2631 bb = f.read() 

2632 assert bb, filename 

2633 e, s = strip_BOM(bb) 

2634 assert e is None or e.lower() == 'utf-8', repr(e) 

2635 #@-others 

2636#@+node:ekr.20191227152538.1: *3* class TestTOT (BaseTest) 

2637class TestTOT(BaseTest): 

2638 """Tests for the TokenOrderTraverser class.""" 

2639 #@+others 

2640 #@+node:ekr.20200111115318.1: *4* test_tot.test_traverse 

2641 def test_traverse(self): 

2642 

2643 contents = """\ 

2644 f(1) 

2645 b = 2 + 3 

2646 """ 

2647 # print('%s = %s' % (2+3, 4*5)) 

2648 if 1: 

2649 contents, tokens, tree = self.make_file_data('leoApp.py') 

2650 else: 

2651 contents, tokens, tree = self.make_data(contents) 

2652 tot = TokenOrderTraverser() 

2653 t1 = get_time() 

2654 n_nodes = tot.traverse(tree) 

2655 t2 = get_time() 

2656 self.update_counts('nodes', n_nodes) 

2657 self.update_times('50: TOT.traverse', t2 - t1) 

2658 # self.dump_stats() 

2659 #@-others 

2660#@-others 

2661#@-leo