pathlib.py 11 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380
  1. # -*- coding: utf-8 -*-
  2. from __future__ import absolute_import
  3. import atexit
  4. import errno
  5. import fnmatch
  6. import itertools
  7. import operator
  8. import os
  9. import shutil
  10. import sys
  11. import uuid
  12. import warnings
  13. from functools import partial
  14. from functools import reduce
  15. from os.path import expanduser
  16. from os.path import expandvars
  17. from os.path import isabs
  18. from os.path import sep
  19. from posixpath import sep as posix_sep
  20. import six
  21. from six.moves import map
  22. from .compat import PY36
  23. from _pytest.warning_types import PytestWarning
  24. if PY36:
  25. from pathlib import Path, PurePath
  26. else:
  27. from pathlib2 import Path, PurePath
  28. __all__ = ["Path", "PurePath"]
  29. LOCK_TIMEOUT = 60 * 60 * 3
  30. get_lock_path = operator.methodcaller("joinpath", ".lock")
  31. def ensure_reset_dir(path):
  32. """
  33. ensures the given path is an empty directory
  34. """
  35. if path.exists():
  36. rm_rf(path)
  37. path.mkdir()
  38. def on_rm_rf_error(func, path, exc, **kwargs):
  39. """Handles known read-only errors during rmtree.
  40. The returned value is used only by our own tests.
  41. """
  42. start_path = kwargs["start_path"]
  43. exctype, excvalue = exc[:2]
  44. # another process removed the file in the middle of the "rm_rf" (xdist for example)
  45. # more context: https://github.com/pytest-dev/pytest/issues/5974#issuecomment-543799018
  46. if isinstance(excvalue, OSError) and excvalue.errno == errno.ENOENT:
  47. return False
  48. if not isinstance(excvalue, OSError) or excvalue.errno not in (
  49. errno.EACCES,
  50. errno.EPERM,
  51. ):
  52. warnings.warn(
  53. PytestWarning(
  54. "(rm_rf) error removing {}\n{}: {}".format(path, exctype, excvalue)
  55. )
  56. )
  57. return False
  58. if func not in (os.rmdir, os.remove, os.unlink):
  59. warnings.warn(
  60. PytestWarning(
  61. "(rm_rf) unknown function {} when removing {}:\n{}: {}".format(
  62. path, func, exctype, excvalue
  63. )
  64. )
  65. )
  66. return False
  67. # Chmod + retry.
  68. import stat
  69. def chmod_rw(p):
  70. mode = os.stat(p).st_mode
  71. os.chmod(p, mode | stat.S_IRUSR | stat.S_IWUSR)
  72. # For files, we need to recursively go upwards in the directories to
  73. # ensure they all are also writable.
  74. p = Path(path)
  75. if p.is_file():
  76. for parent in p.parents:
  77. chmod_rw(str(parent))
  78. # stop when we reach the original path passed to rm_rf
  79. if parent == start_path:
  80. break
  81. chmod_rw(str(path))
  82. func(path)
  83. return True
  84. def rm_rf(path):
  85. """Remove the path contents recursively, even if some elements
  86. are read-only.
  87. """
  88. onerror = partial(on_rm_rf_error, start_path=path)
  89. shutil.rmtree(str(path), onerror=onerror)
  90. def find_prefixed(root, prefix):
  91. """finds all elements in root that begin with the prefix, case insensitive"""
  92. l_prefix = prefix.lower()
  93. for x in root.iterdir():
  94. if x.name.lower().startswith(l_prefix):
  95. yield x
  96. def extract_suffixes(iter, prefix):
  97. """
  98. :param iter: iterator over path names
  99. :param prefix: expected prefix of the path names
  100. :returns: the parts of the paths following the prefix
  101. """
  102. p_len = len(prefix)
  103. for p in iter:
  104. yield p.name[p_len:]
  105. def find_suffixes(root, prefix):
  106. """combines find_prefixes and extract_suffixes
  107. """
  108. return extract_suffixes(find_prefixed(root, prefix), prefix)
  109. def parse_num(maybe_num):
  110. """parses number path suffixes, returns -1 on error"""
  111. try:
  112. return int(maybe_num)
  113. except ValueError:
  114. return -1
  115. if six.PY2:
  116. def _max(iterable, default):
  117. """needed due to python2.7 lacking the default argument for max"""
  118. return reduce(max, iterable, default)
  119. else:
  120. _max = max
  121. def _force_symlink(root, target, link_to):
  122. """helper to create the current symlink
  123. it's full of race conditions that are reasonably ok to ignore
  124. for the context of best effort linking to the latest testrun
  125. the presumption being thatin case of much parallelism
  126. the inaccuracy is going to be acceptable
  127. """
  128. current_symlink = root.joinpath(target)
  129. try:
  130. current_symlink.unlink()
  131. except OSError:
  132. pass
  133. try:
  134. current_symlink.symlink_to(link_to)
  135. except Exception:
  136. pass
  137. def make_numbered_dir(root, prefix):
  138. """create a directory with an increased number as suffix for the given prefix"""
  139. for i in range(10):
  140. # try up to 10 times to create the folder
  141. max_existing = _max(map(parse_num, find_suffixes(root, prefix)), default=-1)
  142. new_number = max_existing + 1
  143. new_path = root.joinpath("{}{}".format(prefix, new_number))
  144. try:
  145. new_path.mkdir()
  146. except Exception:
  147. pass
  148. else:
  149. _force_symlink(root, prefix + "current", new_path)
  150. return new_path
  151. else:
  152. raise EnvironmentError(
  153. "could not create numbered dir with prefix "
  154. "{prefix} in {root} after 10 tries".format(prefix=prefix, root=root)
  155. )
  156. def create_cleanup_lock(p):
  157. """crates a lock to prevent premature folder cleanup"""
  158. lock_path = get_lock_path(p)
  159. try:
  160. fd = os.open(str(lock_path), os.O_WRONLY | os.O_CREAT | os.O_EXCL, 0o644)
  161. except OSError as e:
  162. if e.errno == errno.EEXIST:
  163. six.raise_from(
  164. EnvironmentError("cannot create lockfile in {path}".format(path=p)), e
  165. )
  166. else:
  167. raise
  168. else:
  169. pid = os.getpid()
  170. spid = str(pid)
  171. if not isinstance(spid, bytes):
  172. spid = spid.encode("ascii")
  173. os.write(fd, spid)
  174. os.close(fd)
  175. if not lock_path.is_file():
  176. raise EnvironmentError("lock path got renamed after successful creation")
  177. return lock_path
  178. def register_cleanup_lock_removal(lock_path, register=atexit.register):
  179. """registers a cleanup function for removing a lock, by default on atexit"""
  180. pid = os.getpid()
  181. def cleanup_on_exit(lock_path=lock_path, original_pid=pid):
  182. current_pid = os.getpid()
  183. if current_pid != original_pid:
  184. # fork
  185. return
  186. try:
  187. lock_path.unlink()
  188. except (OSError, IOError):
  189. pass
  190. return register(cleanup_on_exit)
  191. def maybe_delete_a_numbered_dir(path):
  192. """removes a numbered directory if its lock can be obtained and it does not seem to be in use"""
  193. lock_path = None
  194. try:
  195. lock_path = create_cleanup_lock(path)
  196. parent = path.parent
  197. garbage = parent.joinpath("garbage-{}".format(uuid.uuid4()))
  198. path.rename(garbage)
  199. rm_rf(garbage)
  200. except (OSError, EnvironmentError):
  201. # known races:
  202. # * other process did a cleanup at the same time
  203. # * deletable folder was found
  204. # * process cwd (Windows)
  205. return
  206. finally:
  207. # if we created the lock, ensure we remove it even if we failed
  208. # to properly remove the numbered dir
  209. if lock_path is not None:
  210. try:
  211. lock_path.unlink()
  212. except (OSError, IOError):
  213. pass
  214. def ensure_deletable(path, consider_lock_dead_if_created_before):
  215. """checks if a lock exists and breaks it if its considered dead"""
  216. if path.is_symlink():
  217. return False
  218. lock = get_lock_path(path)
  219. if not lock.exists():
  220. return True
  221. try:
  222. lock_time = lock.stat().st_mtime
  223. except Exception:
  224. return False
  225. else:
  226. if lock_time < consider_lock_dead_if_created_before:
  227. lock.unlink()
  228. return True
  229. else:
  230. return False
  231. def try_cleanup(path, consider_lock_dead_if_created_before):
  232. """tries to cleanup a folder if we can ensure it's deletable"""
  233. if ensure_deletable(path, consider_lock_dead_if_created_before):
  234. maybe_delete_a_numbered_dir(path)
  235. def cleanup_candidates(root, prefix, keep):
  236. """lists candidates for numbered directories to be removed - follows py.path"""
  237. max_existing = _max(map(parse_num, find_suffixes(root, prefix)), default=-1)
  238. max_delete = max_existing - keep
  239. paths = find_prefixed(root, prefix)
  240. paths, paths2 = itertools.tee(paths)
  241. numbers = map(parse_num, extract_suffixes(paths2, prefix))
  242. for path, number in zip(paths, numbers):
  243. if number <= max_delete:
  244. yield path
  245. def cleanup_numbered_dir(root, prefix, keep, consider_lock_dead_if_created_before):
  246. """cleanup for lock driven numbered directories"""
  247. for path in cleanup_candidates(root, prefix, keep):
  248. try_cleanup(path, consider_lock_dead_if_created_before)
  249. for path in root.glob("garbage-*"):
  250. try_cleanup(path, consider_lock_dead_if_created_before)
  251. def make_numbered_dir_with_cleanup(root, prefix, keep, lock_timeout):
  252. """creates a numbered dir with a cleanup lock and removes old ones"""
  253. e = None
  254. for i in range(10):
  255. try:
  256. p = make_numbered_dir(root, prefix)
  257. lock_path = create_cleanup_lock(p)
  258. register_cleanup_lock_removal(lock_path)
  259. except Exception as exc:
  260. e = exc
  261. else:
  262. consider_lock_dead_if_created_before = p.stat().st_mtime - lock_timeout
  263. cleanup_numbered_dir(
  264. root=root,
  265. prefix=prefix,
  266. keep=keep,
  267. consider_lock_dead_if_created_before=consider_lock_dead_if_created_before,
  268. )
  269. return p
  270. assert e is not None
  271. raise e
  272. def resolve_from_str(input, root):
  273. assert not isinstance(input, Path), "would break on py2"
  274. root = Path(root)
  275. input = expanduser(input)
  276. input = expandvars(input)
  277. if isabs(input):
  278. return Path(input)
  279. else:
  280. return root.joinpath(input)
  281. def fnmatch_ex(pattern, path):
  282. """FNMatcher port from py.path.common which works with PurePath() instances.
  283. The difference between this algorithm and PurePath.match() is that the latter matches "**" glob expressions
  284. for each part of the path, while this algorithm uses the whole path instead.
  285. For example:
  286. "tests/foo/bar/doc/test_foo.py" matches pattern "tests/**/doc/test*.py" with this algorithm, but not with
  287. PurePath.match().
  288. This algorithm was ported to keep backward-compatibility with existing settings which assume paths match according
  289. this logic.
  290. References:
  291. * https://bugs.python.org/issue29249
  292. * https://bugs.python.org/issue34731
  293. """
  294. path = PurePath(path)
  295. iswin32 = sys.platform.startswith("win")
  296. if iswin32 and sep not in pattern and posix_sep in pattern:
  297. # Running on Windows, the pattern has no Windows path separators,
  298. # and the pattern has one or more Posix path separators. Replace
  299. # the Posix path separators with the Windows path separator.
  300. pattern = pattern.replace(posix_sep, sep)
  301. if sep not in pattern:
  302. name = path.name
  303. else:
  304. name = six.text_type(path)
  305. return fnmatch.fnmatch(name, pattern)
  306. def parts(s):
  307. parts = s.split(sep)
  308. return {sep.join(parts[: i + 1]) or sep for i in range(len(parts))}