test_cache.py 6.7 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195
  1. """
  2. Test all things related to the ``jedi.cache`` module.
  3. """
  4. import os
  5. import os.path
  6. import pytest
  7. import time
  8. from parso.cache import (_CACHED_FILE_MAXIMUM_SURVIVAL, _VERSION_TAG,
  9. _get_cache_clear_lock, _get_hashed_path,
  10. _load_from_file_system, _NodeCacheItem,
  11. _remove_cache_and_update_lock, _save_to_file_system,
  12. load_module, parser_cache, try_to_save_module)
  13. from parso._compatibility import is_pypy, PermissionError
  14. from parso import load_grammar
  15. from parso import cache
  16. from parso import file_io
  17. from parso import parse
  18. skip_pypy = pytest.mark.skipif(
  19. is_pypy,
  20. reason="pickling in pypy is slow, since we don't pickle,"
  21. "we never go into path of auto-collecting garbage"
  22. )
  23. @pytest.fixture()
  24. def isolated_parso_cache(monkeypatch, tmpdir):
  25. """Set `parso.cache._default_cache_path` to a temporary directory
  26. during the test. """
  27. cache_path = str(os.path.join(str(tmpdir), "__parso_cache"))
  28. monkeypatch.setattr(cache, '_default_cache_path', cache_path)
  29. monkeypatch.setattr(cache, '_get_default_cache_path', lambda *args, **kwargs: cache_path)
  30. return cache_path
  31. @pytest.mark.skip("SUBBOTNIK-2721 Disable load cache from disk")
  32. def test_modulepickling_change_cache_dir(tmpdir):
  33. """
  34. ParserPickling should not save old cache when cache_directory is changed.
  35. See: `#168 <https://github.com/davidhalter/jedi/pull/168>`_
  36. """
  37. dir_1 = str(tmpdir.mkdir('first'))
  38. dir_2 = str(tmpdir.mkdir('second'))
  39. item_1 = _NodeCacheItem('bla', [])
  40. item_2 = _NodeCacheItem('bla', [])
  41. path_1 = 'fake path 1'
  42. path_2 = 'fake path 2'
  43. hashed_grammar = load_grammar()._hashed
  44. _save_to_file_system(hashed_grammar, path_1, item_1, cache_path=dir_1)
  45. parser_cache.clear()
  46. cached = load_stored_item(hashed_grammar, path_1, item_1, cache_path=dir_1)
  47. assert cached == item_1.node
  48. _save_to_file_system(hashed_grammar, path_2, item_2, cache_path=dir_2)
  49. cached = load_stored_item(hashed_grammar, path_1, item_1, cache_path=dir_2)
  50. assert cached is None
  51. def load_stored_item(hashed_grammar, path, item, cache_path):
  52. """Load `item` stored at `path` in `cache`."""
  53. item = _load_from_file_system(hashed_grammar, path, item.change_time - 1, cache_path)
  54. return item
  55. @pytest.mark.usefixtures("isolated_parso_cache")
  56. def test_modulepickling_simulate_deleted_cache(tmpdir):
  57. """
  58. Tests loading from a cache file after it is deleted.
  59. According to macOS `dev docs`__,
  60. Note that the system may delete the Caches/ directory to free up disk
  61. space, so your app must be able to re-create or download these files as
  62. needed.
  63. It is possible that other supported platforms treat cache files the same
  64. way.
  65. __ https://developer.apple.com/library/content/documentation/FileManagement/Conceptual/FileSystemProgrammingGuide/FileSystemOverview/FileSystemOverview.html
  66. """
  67. grammar = load_grammar()
  68. module = 'fake parser'
  69. # Create the file
  70. path = tmpdir.dirname + '/some_path'
  71. with open(path, 'w'):
  72. pass
  73. io = file_io.FileIO(path)
  74. try_to_save_module(grammar._hashed, io, module, lines=[])
  75. assert load_module(grammar._hashed, io) == module
  76. os.unlink(_get_hashed_path(grammar._hashed, path))
  77. parser_cache.clear()
  78. cached2 = load_module(grammar._hashed, io)
  79. assert cached2 is None
  80. @pytest.mark.skip
  81. def test_cache_limit():
  82. def cache_size():
  83. return sum(len(v) for v in parser_cache.values())
  84. try:
  85. parser_cache.clear()
  86. future_node_cache_item = _NodeCacheItem('bla', [], change_time=time.time() + 10e6)
  87. old_node_cache_item = _NodeCacheItem('bla', [], change_time=time.time() - 10e4)
  88. parser_cache['some_hash_old'] = {
  89. '/path/%s' % i: old_node_cache_item for i in range(300)
  90. }
  91. parser_cache['some_hash_new'] = {
  92. '/path/%s' % i: future_node_cache_item for i in range(300)
  93. }
  94. assert cache_size() == 600
  95. parse('somecode', cache=True, path='/path/somepath')
  96. assert cache_size() == 301
  97. finally:
  98. parser_cache.clear()
  99. class _FixedTimeFileIO(file_io.KnownContentFileIO):
  100. def __init__(self, path, content, last_modified):
  101. super(_FixedTimeFileIO, self).__init__(path, content)
  102. self._last_modified = last_modified
  103. def get_last_modified(self):
  104. return self._last_modified
  105. @pytest.mark.skip
  106. @pytest.mark.parametrize('diff_cache', [False, True])
  107. @pytest.mark.parametrize('use_file_io', [False, True])
  108. def test_cache_last_used_update(diff_cache, use_file_io):
  109. p = '/path/last-used'
  110. parser_cache.clear() # Clear, because then it's easier to find stuff.
  111. parse('somecode', cache=True, path=p)
  112. node_cache_item = next(iter(parser_cache.values()))[p]
  113. now = time.time()
  114. assert node_cache_item.last_used < now
  115. if use_file_io:
  116. f = _FixedTimeFileIO(p, 'code', node_cache_item.last_used - 10)
  117. parse(file_io=f, cache=True, diff_cache=diff_cache)
  118. else:
  119. parse('somecode2', cache=True, path=p, diff_cache=diff_cache)
  120. node_cache_item = next(iter(parser_cache.values()))[p]
  121. assert now < node_cache_item.last_used < time.time()
  122. @skip_pypy
  123. def test_inactive_cache(tmpdir, isolated_parso_cache):
  124. parser_cache.clear()
  125. test_subjects = "abcdef"
  126. for path in test_subjects:
  127. parse('somecode', cache=True, path=os.path.join(str(tmpdir), path))
  128. raw_cache_path = os.path.join(isolated_parso_cache, _VERSION_TAG)
  129. assert os.path.exists(raw_cache_path)
  130. paths = os.listdir(raw_cache_path)
  131. a_while_ago = time.time() - _CACHED_FILE_MAXIMUM_SURVIVAL
  132. old_paths = set()
  133. for path in paths[:len(test_subjects) // 2]: # make certain number of paths old
  134. os.utime(os.path.join(raw_cache_path, path), (a_while_ago, a_while_ago))
  135. old_paths.add(path)
  136. # nothing should be cleared while the lock is on
  137. assert os.path.exists(_get_cache_clear_lock().path)
  138. _remove_cache_and_update_lock() # it shouldn't clear anything
  139. assert len(os.listdir(raw_cache_path)) == len(test_subjects)
  140. assert old_paths.issubset(os.listdir(raw_cache_path))
  141. os.utime(_get_cache_clear_lock().path, (a_while_ago, a_while_ago))
  142. _remove_cache_and_update_lock()
  143. assert len(os.listdir(raw_cache_path)) == len(test_subjects) // 2
  144. assert not old_paths.intersection(os.listdir(raw_cache_path))
  145. @pytest.mark.skip
  146. @skip_pypy
  147. def test_permission_error(monkeypatch):
  148. def save(*args, **kwargs):
  149. was_called[0] = True # Python 2... Use nonlocal instead
  150. raise PermissionError
  151. was_called = [False]
  152. monkeypatch.setattr(cache, '_save_to_file_system', save)
  153. with pytest.warns(Warning):
  154. parse(path=__file__, cache=True, diff_cache=True)
  155. assert was_called[0]