1""" 2 test_versioning 3 ~~~~~~~~~~~~~~~ 4 5 Test the versioning implementation. 6 7 :copyright: Copyright 2007-2021 by the Sphinx team, see AUTHORS. 8 :license: BSD, see LICENSE for details. 9""" 10 11import pickle 12 13import pytest 14from docutils.parsers.rst.directives.html import MetaBody 15 16from sphinx import addnodes 17from sphinx.testing.util import SphinxTestApp 18from sphinx.versioning import add_uids, get_ratio, merge_doctrees 19 20app = original = original_uids = None 21 22 23@pytest.fixture(scope='module', autouse=True) 24def setup_module(rootdir, sphinx_test_tempdir): 25 global app, original, original_uids 26 srcdir = sphinx_test_tempdir / 'test-versioning' 27 if not srcdir.exists(): 28 (rootdir / 'test-versioning').copytree(srcdir) 29 app = SphinxTestApp(srcdir=srcdir) 30 app.builder.env.app = app 31 app.connect('doctree-resolved', on_doctree_resolved) 32 app.build() 33 original = doctrees['original'] 34 original_uids = [n.uid for n in add_uids(original, is_paragraph)] 35 yield 36 app.cleanup() 37 38 39doctrees = {} 40 41 42def on_doctree_resolved(app, doctree, docname): 43 doctrees[docname] = doctree 44 45 46def is_paragraph(node): 47 return node.__class__.__name__ == 'paragraph' 48 49 50def test_get_ratio(): 51 assert get_ratio('', 'a') 52 assert get_ratio('a', '') 53 54 55def test_add_uids(): 56 assert len(original_uids) == 3 57 58 59def test_picklablility(): 60 # we have to modify the doctree so we can pickle it 61 copy = original.copy() 62 copy.reporter = None 63 copy.transformer = None 64 copy.settings.warning_stream = None 65 copy.settings.env = None 66 copy.settings.record_dependencies = None 67 for metanode in copy.traverse(MetaBody.meta): 68 metanode.__class__ = addnodes.meta 69 loaded = pickle.loads(pickle.dumps(copy, pickle.HIGHEST_PROTOCOL)) 70 assert all(getattr(n, 'uid', False) for n in loaded.traverse(is_paragraph)) 71 72 73def test_modified(): 74 modified = doctrees['modified'] 75 new_nodes = list(merge_doctrees(original, modified, is_paragraph)) 76 uids = [n.uid for n in modified.traverse(is_paragraph)] 77 assert not new_nodes 78 assert original_uids == uids 79 80 81def test_added(): 82 added = doctrees['added'] 83 new_nodes = list(merge_doctrees(original, added, is_paragraph)) 84 uids = [n.uid for n in added.traverse(is_paragraph)] 85 assert len(new_nodes) == 1 86 assert original_uids == uids[:-1] 87 88 89def test_deleted(): 90 deleted = doctrees['deleted'] 91 new_nodes = list(merge_doctrees(original, deleted, is_paragraph)) 92 uids = [n.uid for n in deleted.traverse(is_paragraph)] 93 assert not new_nodes 94 assert original_uids[::2] == uids 95 96 97def test_deleted_end(): 98 deleted_end = doctrees['deleted_end'] 99 new_nodes = list(merge_doctrees(original, deleted_end, is_paragraph)) 100 uids = [n.uid for n in deleted_end.traverse(is_paragraph)] 101 assert not new_nodes 102 assert original_uids[:-1] == uids 103 104 105def test_insert(): 106 insert = doctrees['insert'] 107 new_nodes = list(merge_doctrees(original, insert, is_paragraph)) 108 uids = [n.uid for n in insert.traverse(is_paragraph)] 109 assert len(new_nodes) == 1 110 assert original_uids[0] == uids[0] 111 assert original_uids[1:] == uids[2:] 112 113 114def test_insert_beginning(): 115 insert_beginning = doctrees['insert_beginning'] 116 new_nodes = list(merge_doctrees(original, insert_beginning, is_paragraph)) 117 uids = [n.uid for n in insert_beginning.traverse(is_paragraph)] 118 assert len(new_nodes) == 1 119 assert len(uids) == 4 120 assert original_uids == uids[1:] 121 assert original_uids[0] != uids[0] 122 123 124def test_insert_similar(): 125 insert_similar = doctrees['insert_similar'] 126 new_nodes = list(merge_doctrees(original, insert_similar, is_paragraph)) 127 uids = [n.uid for n in insert_similar.traverse(is_paragraph)] 128 assert len(new_nodes) == 1 129 assert new_nodes[0].rawsource == 'Anyway I need more' 130 assert original_uids[0] == uids[0] 131 assert original_uids[1:] == uids[2:] 132