Dependencies.py 49 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296
  1. from __future__ import absolute_import, print_function
  2. import cython
  3. from .. import __version__
  4. import collections
  5. import contextlib
  6. import hashlib
  7. import os
  8. import shutil
  9. import subprocess
  10. import re, sys, time
  11. import warnings
  12. from glob import iglob
  13. from io import open as io_open
  14. from os.path import relpath as _relpath
  15. from distutils.extension import Extension
  16. from distutils.util import strtobool
  17. import zipfile
  18. try:
  19. from collections.abc import Iterable
  20. except ImportError:
  21. from collections import Iterable
  22. try:
  23. import gzip
  24. gzip_open = gzip.open
  25. gzip_ext = '.gz'
  26. except ImportError:
  27. gzip_open = open
  28. gzip_ext = ''
  29. try:
  30. import zlib
  31. zipfile_compression_mode = zipfile.ZIP_DEFLATED
  32. except ImportError:
  33. zipfile_compression_mode = zipfile.ZIP_STORED
  34. try:
  35. import pythran
  36. except:
  37. pythran = None
  38. from .. import Utils
  39. from ..Utils import (cached_function, cached_method, path_exists, write_depfile,
  40. safe_makedirs, copy_file_to_dir_if_newer, is_package_dir, replace_suffix)
  41. from ..Compiler.Main import Context, CompilationOptions, default_options
  42. join_path = cached_function(os.path.join)
  43. copy_once_if_newer = cached_function(copy_file_to_dir_if_newer)
  44. safe_makedirs_once = cached_function(safe_makedirs)
  45. if sys.version_info[0] < 3:
  46. # stupid Py2 distutils enforces str type in list of sources
  47. _fs_encoding = sys.getfilesystemencoding()
  48. if _fs_encoding is None:
  49. _fs_encoding = sys.getdefaultencoding()
  50. def encode_filename_in_py2(filename):
  51. if not isinstance(filename, bytes):
  52. return filename.encode(_fs_encoding)
  53. return filename
  54. else:
  55. def encode_filename_in_py2(filename):
  56. return filename
  57. basestring = str
  58. def _make_relative(file_paths, base=None):
  59. if not base:
  60. base = os.getcwd()
  61. if base[-1] != os.path.sep:
  62. base += os.path.sep
  63. return [_relpath(path, base) if path.startswith(base) else path
  64. for path in file_paths]
  65. def extended_iglob(pattern):
  66. if '{' in pattern:
  67. m = re.match('(.*){([^}]+)}(.*)', pattern)
  68. if m:
  69. before, switch, after = m.groups()
  70. for case in switch.split(','):
  71. for path in extended_iglob(before + case + after):
  72. yield path
  73. return
  74. if '**/' in pattern:
  75. seen = set()
  76. first, rest = pattern.split('**/', 1)
  77. if first:
  78. first = iglob(first+'/')
  79. else:
  80. first = ['']
  81. for root in first:
  82. for path in extended_iglob(join_path(root, rest)):
  83. if path not in seen:
  84. seen.add(path)
  85. yield path
  86. for path in extended_iglob(join_path(root, '*', '**/' + rest)):
  87. if path not in seen:
  88. seen.add(path)
  89. yield path
  90. else:
  91. for path in iglob(pattern):
  92. yield path
  93. def nonempty(it, error_msg="expected non-empty iterator"):
  94. empty = True
  95. for value in it:
  96. empty = False
  97. yield value
  98. if empty:
  99. raise ValueError(error_msg)
  100. @cached_function
  101. def file_hash(filename):
  102. path = os.path.normpath(filename)
  103. prefix = ('%d:%s' % (len(path), path)).encode("UTF-8")
  104. m = hashlib.md5(prefix)
  105. with open(path, 'rb') as f:
  106. data = f.read(65000)
  107. while data:
  108. m.update(data)
  109. data = f.read(65000)
  110. return m.hexdigest()
  111. def update_pythran_extension(ext):
  112. if pythran is None:
  113. raise RuntimeError("You first need to install Pythran to use the np_pythran directive.")
  114. try:
  115. pythran_ext = pythran.config.make_extension(python=True)
  116. except TypeError: # older pythran version only
  117. pythran_ext = pythran.config.make_extension()
  118. ext.include_dirs.extend(pythran_ext['include_dirs'])
  119. ext.extra_compile_args.extend(pythran_ext['extra_compile_args'])
  120. ext.extra_link_args.extend(pythran_ext['extra_link_args'])
  121. ext.define_macros.extend(pythran_ext['define_macros'])
  122. ext.undef_macros.extend(pythran_ext['undef_macros'])
  123. ext.library_dirs.extend(pythran_ext['library_dirs'])
  124. ext.libraries.extend(pythran_ext['libraries'])
  125. ext.language = 'c++'
  126. # These options are not compatible with the way normal Cython extensions work
  127. for bad_option in ["-fwhole-program", "-fvisibility=hidden"]:
  128. try:
  129. ext.extra_compile_args.remove(bad_option)
  130. except ValueError:
  131. pass
  132. def parse_list(s):
  133. """
  134. >>> parse_list("")
  135. []
  136. >>> parse_list("a")
  137. ['a']
  138. >>> parse_list("a b c")
  139. ['a', 'b', 'c']
  140. >>> parse_list("[a, b, c]")
  141. ['a', 'b', 'c']
  142. >>> parse_list('a " " b')
  143. ['a', ' ', 'b']
  144. >>> parse_list('[a, ",a", "a,", ",", ]')
  145. ['a', ',a', 'a,', ',']
  146. """
  147. if len(s) >= 2 and s[0] == '[' and s[-1] == ']':
  148. s = s[1:-1]
  149. delimiter = ','
  150. else:
  151. delimiter = ' '
  152. s, literals = strip_string_literals(s)
  153. def unquote(literal):
  154. literal = literal.strip()
  155. if literal[0] in "'\"":
  156. return literals[literal[1:-1]]
  157. else:
  158. return literal
  159. return [unquote(item) for item in s.split(delimiter) if item.strip()]
  160. transitive_str = object()
  161. transitive_list = object()
  162. bool_or = object()
  163. distutils_settings = {
  164. 'name': str,
  165. 'sources': list,
  166. 'define_macros': list,
  167. 'undef_macros': list,
  168. 'libraries': transitive_list,
  169. 'library_dirs': transitive_list,
  170. 'runtime_library_dirs': transitive_list,
  171. 'include_dirs': transitive_list,
  172. 'extra_objects': list,
  173. 'extra_compile_args': transitive_list,
  174. 'extra_link_args': transitive_list,
  175. 'export_symbols': list,
  176. 'depends': transitive_list,
  177. 'language': transitive_str,
  178. 'np_pythran': bool_or
  179. }
  180. @cython.locals(start=cython.Py_ssize_t, end=cython.Py_ssize_t)
  181. def line_iter(source):
  182. if isinstance(source, basestring):
  183. start = 0
  184. while True:
  185. end = source.find('\n', start)
  186. if end == -1:
  187. yield source[start:]
  188. return
  189. yield source[start:end]
  190. start = end+1
  191. else:
  192. for line in source:
  193. yield line
  194. class DistutilsInfo(object):
  195. def __init__(self, source=None, exn=None):
  196. self.values = {}
  197. if source is not None:
  198. for line in line_iter(source):
  199. line = line.lstrip()
  200. if not line:
  201. continue
  202. if line[0] != '#':
  203. break
  204. line = line[1:].lstrip()
  205. kind = next((k for k in ("distutils:","cython:") if line.startswith(k)), None)
  206. if kind is not None:
  207. key, _, value = [s.strip() for s in line[len(kind):].partition('=')]
  208. type = distutils_settings.get(key, None)
  209. if line.startswith("cython:") and type is None: continue
  210. if type in (list, transitive_list):
  211. value = parse_list(value)
  212. if key == 'define_macros':
  213. value = [tuple(macro.split('=', 1))
  214. if '=' in macro else (macro, None)
  215. for macro in value]
  216. if type is bool_or:
  217. value = strtobool(value)
  218. self.values[key] = value
  219. elif exn is not None:
  220. for key in distutils_settings:
  221. if key in ('name', 'sources','np_pythran'):
  222. continue
  223. value = getattr(exn, key, None)
  224. if value:
  225. self.values[key] = value
  226. def merge(self, other):
  227. if other is None:
  228. return self
  229. for key, value in other.values.items():
  230. type = distutils_settings[key]
  231. if type is transitive_str and key not in self.values:
  232. self.values[key] = value
  233. elif type is transitive_list:
  234. if key in self.values:
  235. # Change a *copy* of the list (Trac #845)
  236. all = self.values[key][:]
  237. for v in value:
  238. if v not in all:
  239. all.append(v)
  240. value = all
  241. self.values[key] = value
  242. elif type is bool_or:
  243. self.values[key] = self.values.get(key, False) | value
  244. return self
  245. def subs(self, aliases):
  246. if aliases is None:
  247. return self
  248. resolved = DistutilsInfo()
  249. for key, value in self.values.items():
  250. type = distutils_settings[key]
  251. if type in [list, transitive_list]:
  252. new_value_list = []
  253. for v in value:
  254. if v in aliases:
  255. v = aliases[v]
  256. if isinstance(v, list):
  257. new_value_list += v
  258. else:
  259. new_value_list.append(v)
  260. value = new_value_list
  261. else:
  262. if value in aliases:
  263. value = aliases[value]
  264. resolved.values[key] = value
  265. return resolved
  266. def apply(self, extension):
  267. for key, value in self.values.items():
  268. type = distutils_settings[key]
  269. if type in [list, transitive_list]:
  270. value = getattr(extension, key) + list(value)
  271. setattr(extension, key, value)
  272. @cython.locals(start=cython.Py_ssize_t, q=cython.Py_ssize_t,
  273. single_q=cython.Py_ssize_t, double_q=cython.Py_ssize_t,
  274. hash_mark=cython.Py_ssize_t, end=cython.Py_ssize_t,
  275. k=cython.Py_ssize_t, counter=cython.Py_ssize_t, quote_len=cython.Py_ssize_t)
  276. def strip_string_literals(code, prefix='__Pyx_L'):
  277. """
  278. Normalizes every string literal to be of the form '__Pyx_Lxxx',
  279. returning the normalized code and a mapping of labels to
  280. string literals.
  281. """
  282. new_code = []
  283. literals = {}
  284. counter = 0
  285. start = q = 0
  286. in_quote = False
  287. hash_mark = single_q = double_q = -1
  288. code_len = len(code)
  289. quote_type = None
  290. quote_len = -1
  291. while True:
  292. if hash_mark < q:
  293. hash_mark = code.find('#', q)
  294. if single_q < q:
  295. single_q = code.find("'", q)
  296. if double_q < q:
  297. double_q = code.find('"', q)
  298. q = min(single_q, double_q)
  299. if q == -1:
  300. q = max(single_q, double_q)
  301. # We're done.
  302. if q == -1 and hash_mark == -1:
  303. new_code.append(code[start:])
  304. break
  305. # Try to close the quote.
  306. elif in_quote:
  307. if code[q-1] == u'\\':
  308. k = 2
  309. while q >= k and code[q-k] == u'\\':
  310. k += 1
  311. if k % 2 == 0:
  312. q += 1
  313. continue
  314. if code[q] == quote_type and (
  315. quote_len == 1 or (code_len > q + 2 and quote_type == code[q+1] == code[q+2])):
  316. counter += 1
  317. label = "%s%s_" % (prefix, counter)
  318. literals[label] = code[start+quote_len:q]
  319. full_quote = code[q:q+quote_len]
  320. new_code.append(full_quote)
  321. new_code.append(label)
  322. new_code.append(full_quote)
  323. q += quote_len
  324. in_quote = False
  325. start = q
  326. else:
  327. q += 1
  328. # Process comment.
  329. elif -1 != hash_mark and (hash_mark < q or q == -1):
  330. new_code.append(code[start:hash_mark+1])
  331. end = code.find('\n', hash_mark)
  332. counter += 1
  333. label = "%s%s_" % (prefix, counter)
  334. if end == -1:
  335. end_or_none = None
  336. else:
  337. end_or_none = end
  338. literals[label] = code[hash_mark+1:end_or_none]
  339. new_code.append(label)
  340. if end == -1:
  341. break
  342. start = q = end
  343. # Open the quote.
  344. else:
  345. if code_len >= q+3 and (code[q] == code[q+1] == code[q+2]):
  346. quote_len = 3
  347. else:
  348. quote_len = 1
  349. in_quote = True
  350. quote_type = code[q]
  351. new_code.append(code[start:q])
  352. start = q
  353. q += quote_len
  354. return "".join(new_code), literals
  355. # We need to allow spaces to allow for conditional compilation like
  356. # IF ...:
  357. # cimport ...
  358. dependency_regex = re.compile(r"(?:^\s*from +([0-9a-zA-Z_.]+) +cimport)|"
  359. r"(?:^\s*cimport +([0-9a-zA-Z_.]+(?: *, *[0-9a-zA-Z_.]+)*))|"
  360. r"(?:^\s*cdef +extern +from +['\"]([^'\"]+)['\"])|"
  361. r"(?:^\s*include +['\"]([^'\"]+)['\"])", re.M)
  362. dependency_after_from_regex = re.compile(
  363. r"(?:^\s+\(([0-9a-zA-Z_., ]*)\)[#\n])|"
  364. r"(?:^\s+([0-9a-zA-Z_., ]*)[#\n])",
  365. re.M)
  366. def normalize_existing(base_path, rel_paths):
  367. return normalize_existing0(os.path.dirname(base_path), tuple(set(rel_paths)))
  368. @cached_function
  369. def normalize_existing0(base_dir, rel_paths):
  370. """
  371. Given some base directory ``base_dir`` and a list of path names
  372. ``rel_paths``, normalize each relative path name ``rel`` by
  373. replacing it by ``os.path.join(base, rel)`` if that file exists.
  374. Return a couple ``(normalized, needed_base)`` where ``normalized``
  375. if the list of normalized file names and ``needed_base`` is
  376. ``base_dir`` if we actually needed ``base_dir``. If no paths were
  377. changed (for example, if all paths were already absolute), then
  378. ``needed_base`` is ``None``.
  379. """
  380. normalized = []
  381. needed_base = None
  382. for rel in rel_paths:
  383. if os.path.isabs(rel):
  384. normalized.append(rel)
  385. continue
  386. path = join_path(base_dir, rel)
  387. if path_exists(path):
  388. normalized.append(os.path.normpath(path))
  389. needed_base = base_dir
  390. else:
  391. normalized.append(rel)
  392. return (normalized, needed_base)
  393. def resolve_depends(depends, include_dirs):
  394. include_dirs = tuple(include_dirs)
  395. resolved = []
  396. for depend in depends:
  397. path = resolve_depend(depend, include_dirs)
  398. if path is not None:
  399. resolved.append(path)
  400. return resolved
  401. @cached_function
  402. def resolve_depend(depend, include_dirs):
  403. if depend[0] == '<' and depend[-1] == '>':
  404. return None
  405. for dir in include_dirs:
  406. path = join_path(dir, depend)
  407. if path_exists(path):
  408. return os.path.normpath(path)
  409. return None
  410. @cached_function
  411. def package(filename):
  412. dir = os.path.dirname(os.path.abspath(str(filename)))
  413. if dir != filename and is_package_dir(dir):
  414. return package(dir) + (os.path.basename(dir),)
  415. else:
  416. return ()
  417. @cached_function
  418. def fully_qualified_name(filename):
  419. module = os.path.splitext(os.path.basename(filename))[0]
  420. return '.'.join(package(filename) + (module,))
  421. @cached_function
  422. def parse_dependencies(source_filename):
  423. # Actual parsing is way too slow, so we use regular expressions.
  424. # The only catch is that we must strip comments and string
  425. # literals ahead of time.
  426. with Utils.open_source_file(source_filename, error_handling='ignore') as fh:
  427. source = fh.read()
  428. distutils_info = DistutilsInfo(source)
  429. source, literals = strip_string_literals(source)
  430. source = source.replace('\\\n', ' ').replace('\t', ' ')
  431. # TODO: pure mode
  432. cimports = []
  433. includes = []
  434. externs = []
  435. for m in dependency_regex.finditer(source):
  436. cimport_from, cimport_list, extern, include = m.groups()
  437. if cimport_from:
  438. cimports.append(cimport_from)
  439. m_after_from = dependency_after_from_regex.search(source, pos=m.end())
  440. if m_after_from:
  441. multiline, one_line = m_after_from.groups()
  442. subimports = multiline or one_line
  443. cimports.extend("{0}.{1}".format(cimport_from, s.strip())
  444. for s in subimports.split(','))
  445. elif cimport_list:
  446. cimports.extend(x.strip() for x in cimport_list.split(","))
  447. elif extern:
  448. externs.append(literals[extern])
  449. else:
  450. includes.append(literals[include])
  451. return cimports, includes, externs, distutils_info
  452. class DependencyTree(object):
  453. def __init__(self, context, quiet=False):
  454. self.context = context
  455. self.quiet = quiet
  456. self._transitive_cache = {}
  457. def parse_dependencies(self, source_filename):
  458. if path_exists(source_filename):
  459. source_filename = os.path.normpath(source_filename)
  460. return parse_dependencies(source_filename)
  461. @cached_method
  462. def included_files(self, filename):
  463. # This is messy because included files are textually included, resolving
  464. # cimports (but not includes) relative to the including file.
  465. all = set()
  466. for include in self.parse_dependencies(filename)[1]:
  467. include_path = join_path(os.path.dirname(filename), include)
  468. if not path_exists(include_path):
  469. include_path = self.context.find_include_file(include, None)
  470. if include_path:
  471. if '.' + os.path.sep in include_path:
  472. include_path = os.path.normpath(include_path)
  473. all.add(include_path)
  474. all.update(self.included_files(include_path))
  475. elif not self.quiet:
  476. print("Unable to locate '%s' referenced from '%s'" % (filename, include))
  477. return all
  478. @cached_method
  479. def cimports_externs_incdirs(self, filename):
  480. # This is really ugly. Nested cimports are resolved with respect to the
  481. # includer, but includes are resolved with respect to the includee.
  482. cimports, includes, externs = self.parse_dependencies(filename)[:3]
  483. cimports = set(cimports)
  484. externs = set(externs)
  485. incdirs = set()
  486. for include in self.included_files(filename):
  487. included_cimports, included_externs, included_incdirs = self.cimports_externs_incdirs(include)
  488. cimports.update(included_cimports)
  489. externs.update(included_externs)
  490. incdirs.update(included_incdirs)
  491. externs, incdir = normalize_existing(filename, externs)
  492. if incdir:
  493. incdirs.add(incdir)
  494. return tuple(cimports), externs, incdirs
  495. def cimports(self, filename):
  496. return self.cimports_externs_incdirs(filename)[0]
  497. def package(self, filename):
  498. return package(filename)
  499. def fully_qualified_name(self, filename):
  500. return fully_qualified_name(filename)
  501. @cached_method
  502. def find_pxd(self, module, filename=None):
  503. is_relative = module[0] == '.'
  504. if is_relative and not filename:
  505. raise NotImplementedError("New relative imports.")
  506. if filename is not None:
  507. module_path = module.split('.')
  508. if is_relative:
  509. module_path.pop(0) # just explicitly relative
  510. package_path = list(self.package(filename))
  511. while module_path and not module_path[0]:
  512. try:
  513. package_path.pop()
  514. except IndexError:
  515. return None # FIXME: error?
  516. module_path.pop(0)
  517. relative = '.'.join(package_path + module_path)
  518. pxd = self.context.find_pxd_file(relative, None)
  519. if pxd:
  520. return pxd
  521. if is_relative:
  522. return None # FIXME: error?
  523. return self.context.find_pxd_file(module, None)
  524. @cached_method
  525. def cimported_files(self, filename):
  526. if filename[-4:] == '.pyx' and path_exists(filename[:-4] + '.pxd'):
  527. pxd_list = [filename[:-4] + '.pxd']
  528. else:
  529. pxd_list = []
  530. # Cimports generates all possible combinations package.module
  531. # when imported as from package cimport module.
  532. for module in self.cimports(filename):
  533. if module[:7] == 'cython.' or module == 'cython':
  534. continue
  535. pxd_file = self.find_pxd(module, filename)
  536. if pxd_file is not None:
  537. pxd_list.append(pxd_file)
  538. return tuple(pxd_list)
  539. @cached_method
  540. def immediate_dependencies(self, filename):
  541. all = set([filename])
  542. all.update(self.cimported_files(filename))
  543. all.update(self.included_files(filename))
  544. return all
  545. def all_dependencies(self, filename):
  546. return self.transitive_merge(filename, self.immediate_dependencies, set.union)
  547. @cached_method
  548. def timestamp(self, filename):
  549. return os.path.getmtime(filename)
  550. def extract_timestamp(self, filename):
  551. return self.timestamp(filename), filename
  552. def newest_dependency(self, filename):
  553. return max([self.extract_timestamp(f) for f in self.all_dependencies(filename)])
  554. def transitive_fingerprint(self, filename, module, compilation_options):
  555. r"""
  556. Return a fingerprint of a cython file that is about to be cythonized.
  557. Fingerprints are looked up in future compilations. If the fingerprint
  558. is found, the cythonization can be skipped. The fingerprint must
  559. incorporate everything that has an influence on the generated code.
  560. """
  561. try:
  562. m = hashlib.md5(__version__.encode('UTF-8'))
  563. m.update(file_hash(filename).encode('UTF-8'))
  564. for x in sorted(self.all_dependencies(filename)):
  565. if os.path.splitext(x)[1] not in ('.c', '.cpp', '.h'):
  566. m.update(file_hash(x).encode('UTF-8'))
  567. # Include the module attributes that change the compilation result
  568. # in the fingerprint. We do not iterate over module.__dict__ and
  569. # include almost everything here as users might extend Extension
  570. # with arbitrary (random) attributes that would lead to cache
  571. # misses.
  572. m.update(str((
  573. module.language,
  574. getattr(module, 'py_limited_api', False),
  575. getattr(module, 'np_pythran', False)
  576. )).encode('UTF-8'))
  577. m.update(compilation_options.get_fingerprint().encode('UTF-8'))
  578. return m.hexdigest()
  579. except IOError:
  580. return None
  581. def distutils_info0(self, filename):
  582. info = self.parse_dependencies(filename)[3]
  583. kwds = info.values
  584. cimports, externs, incdirs = self.cimports_externs_incdirs(filename)
  585. basedir = os.getcwd()
  586. # Add dependencies on "cdef extern from ..." files
  587. if externs:
  588. externs = _make_relative(externs, basedir)
  589. if 'depends' in kwds:
  590. kwds['depends'] = list(set(kwds['depends']).union(externs))
  591. else:
  592. kwds['depends'] = list(externs)
  593. # Add include_dirs to ensure that the C compiler will find the
  594. # "cdef extern from ..." files
  595. if incdirs:
  596. include_dirs = list(kwds.get('include_dirs', []))
  597. for inc in _make_relative(incdirs, basedir):
  598. if inc not in include_dirs:
  599. include_dirs.append(inc)
  600. kwds['include_dirs'] = include_dirs
  601. return info
  602. def distutils_info(self, filename, aliases=None, base=None):
  603. return (self.transitive_merge(filename, self.distutils_info0, DistutilsInfo.merge)
  604. .subs(aliases)
  605. .merge(base))
  606. def transitive_merge(self, node, extract, merge):
  607. try:
  608. seen = self._transitive_cache[extract, merge]
  609. except KeyError:
  610. seen = self._transitive_cache[extract, merge] = {}
  611. return self.transitive_merge_helper(
  612. node, extract, merge, seen, {}, self.cimported_files)[0]
  613. def transitive_merge_helper(self, node, extract, merge, seen, stack, outgoing):
  614. if node in seen:
  615. return seen[node], None
  616. deps = extract(node)
  617. if node in stack:
  618. return deps, node
  619. try:
  620. stack[node] = len(stack)
  621. loop = None
  622. for next in outgoing(node):
  623. sub_deps, sub_loop = self.transitive_merge_helper(next, extract, merge, seen, stack, outgoing)
  624. if sub_loop is not None:
  625. if loop is not None and stack[loop] < stack[sub_loop]:
  626. pass
  627. else:
  628. loop = sub_loop
  629. deps = merge(deps, sub_deps)
  630. if loop == node:
  631. loop = None
  632. if loop is None:
  633. seen[node] = deps
  634. return deps, loop
  635. finally:
  636. del stack[node]
  637. _dep_tree = None
  638. def create_dependency_tree(ctx=None, quiet=False):
  639. global _dep_tree
  640. if _dep_tree is None:
  641. if ctx is None:
  642. ctx = Context(["."], CompilationOptions(default_options))
  643. _dep_tree = DependencyTree(ctx, quiet=quiet)
  644. return _dep_tree
  645. # If this changes, change also docs/src/reference/compilation.rst
  646. # which mentions this function
  647. def default_create_extension(template, kwds):
  648. if 'depends' in kwds:
  649. include_dirs = kwds.get('include_dirs', []) + ["."]
  650. depends = resolve_depends(kwds['depends'], include_dirs)
  651. kwds['depends'] = sorted(set(depends + template.depends))
  652. t = template.__class__
  653. ext = t(**kwds)
  654. metadata = dict(distutils=kwds, module_name=kwds['name'])
  655. return (ext, metadata)
  656. # This may be useful for advanced users?
  657. def create_extension_list(patterns, exclude=None, ctx=None, aliases=None, quiet=False, language=None,
  658. exclude_failures=False):
  659. if language is not None:
  660. print('Warning: passing language={0!r} to cythonize() is deprecated. '
  661. 'Instead, put "# distutils: language={0}" in your .pyx or .pxd file(s)'.format(language))
  662. if exclude is None:
  663. exclude = []
  664. if patterns is None:
  665. return [], {}
  666. elif isinstance(patterns, basestring) or not isinstance(patterns, Iterable):
  667. patterns = [patterns]
  668. explicit_modules = set([m.name for m in patterns if isinstance(m, Extension)])
  669. seen = set()
  670. deps = create_dependency_tree(ctx, quiet=quiet)
  671. to_exclude = set()
  672. if not isinstance(exclude, list):
  673. exclude = [exclude]
  674. for pattern in exclude:
  675. to_exclude.update(map(os.path.abspath, extended_iglob(pattern)))
  676. module_list = []
  677. module_metadata = {}
  678. # workaround for setuptools
  679. if 'setuptools' in sys.modules:
  680. Extension_distutils = sys.modules['setuptools.extension']._Extension
  681. Extension_setuptools = sys.modules['setuptools'].Extension
  682. else:
  683. # dummy class, in case we do not have setuptools
  684. Extension_distutils = Extension
  685. class Extension_setuptools(Extension): pass
  686. # if no create_extension() function is defined, use a simple
  687. # default function.
  688. create_extension = ctx.options.create_extension or default_create_extension
  689. for pattern in patterns:
  690. if isinstance(pattern, str):
  691. filepattern = pattern
  692. template = Extension(pattern, []) # Fake Extension without sources
  693. name = '*'
  694. base = None
  695. ext_language = language
  696. elif isinstance(pattern, (Extension_distutils, Extension_setuptools)):
  697. cython_sources = [s for s in pattern.sources
  698. if os.path.splitext(s)[1] in ('.py', '.pyx')]
  699. if cython_sources:
  700. filepattern = cython_sources[0]
  701. if len(cython_sources) > 1:
  702. print("Warning: Multiple cython sources found for extension '%s': %s\n"
  703. "See http://cython.readthedocs.io/en/latest/src/userguide/sharing_declarations.html "
  704. "for sharing declarations among Cython files." % (pattern.name, cython_sources))
  705. else:
  706. # ignore non-cython modules
  707. module_list.append(pattern)
  708. continue
  709. template = pattern
  710. name = template.name
  711. base = DistutilsInfo(exn=template)
  712. ext_language = None # do not override whatever the Extension says
  713. else:
  714. msg = str("pattern is not of type str nor subclass of Extension (%s)"
  715. " but of type %s and class %s" % (repr(Extension),
  716. type(pattern),
  717. pattern.__class__))
  718. raise TypeError(msg)
  719. for file in nonempty(sorted(extended_iglob(filepattern)), "'%s' doesn't match any files" % filepattern):
  720. if os.path.abspath(file) in to_exclude:
  721. continue
  722. module_name = deps.fully_qualified_name(file)
  723. if '*' in name:
  724. if module_name in explicit_modules:
  725. continue
  726. elif name:
  727. module_name = name
  728. Utils.raise_error_if_module_name_forbidden(module_name)
  729. if module_name not in seen:
  730. try:
  731. kwds = deps.distutils_info(file, aliases, base).values
  732. except Exception:
  733. if exclude_failures:
  734. continue
  735. raise
  736. if base is not None:
  737. for key, value in base.values.items():
  738. if key not in kwds:
  739. kwds[key] = value
  740. kwds['name'] = module_name
  741. sources = [file] + [m for m in template.sources if m != filepattern]
  742. if 'sources' in kwds:
  743. # allow users to add .c files etc.
  744. for source in kwds['sources']:
  745. source = encode_filename_in_py2(source)
  746. if source not in sources:
  747. sources.append(source)
  748. kwds['sources'] = sources
  749. if ext_language and 'language' not in kwds:
  750. kwds['language'] = ext_language
  751. np_pythran = kwds.pop('np_pythran', False)
  752. # Create the new extension
  753. m, metadata = create_extension(template, kwds)
  754. m.np_pythran = np_pythran or getattr(m, 'np_pythran', False)
  755. if m.np_pythran:
  756. update_pythran_extension(m)
  757. module_list.append(m)
  758. # Store metadata (this will be written as JSON in the
  759. # generated C file but otherwise has no purpose)
  760. module_metadata[module_name] = metadata
  761. if file not in m.sources:
  762. # Old setuptools unconditionally replaces .pyx with .c/.cpp
  763. target_file = os.path.splitext(file)[0] + ('.cpp' if m.language == 'c++' else '.c')
  764. try:
  765. m.sources.remove(target_file)
  766. except ValueError:
  767. # never seen this in the wild, but probably better to warn about this unexpected case
  768. print("Warning: Cython source file not found in sources list, adding %s" % file)
  769. m.sources.insert(0, file)
  770. seen.add(name)
  771. return module_list, module_metadata
  772. # This is the user-exposed entry point.
  773. def cythonize(module_list, exclude=None, nthreads=0, aliases=None, quiet=False, force=None, language=None,
  774. exclude_failures=False, **options):
  775. """
  776. Compile a set of source modules into C/C++ files and return a list of distutils
  777. Extension objects for them.
  778. :param module_list: As module list, pass either a glob pattern, a list of glob
  779. patterns or a list of Extension objects. The latter
  780. allows you to configure the extensions separately
  781. through the normal distutils options.
  782. You can also pass Extension objects that have
  783. glob patterns as their sources. Then, cythonize
  784. will resolve the pattern and create a
  785. copy of the Extension for every matching file.
  786. :param exclude: When passing glob patterns as ``module_list``, you can exclude certain
  787. module names explicitly by passing them into the ``exclude`` option.
  788. :param nthreads: The number of concurrent builds for parallel compilation
  789. (requires the ``multiprocessing`` module).
  790. :param aliases: If you want to use compiler directives like ``# distutils: ...`` but
  791. can only know at compile time (when running the ``setup.py``) which values
  792. to use, you can use aliases and pass a dictionary mapping those aliases
  793. to Python strings when calling :func:`cythonize`. As an example, say you
  794. want to use the compiler
  795. directive ``# distutils: include_dirs = ../static_libs/include/``
  796. but this path isn't always fixed and you want to find it when running
  797. the ``setup.py``. You can then do ``# distutils: include_dirs = MY_HEADERS``,
  798. find the value of ``MY_HEADERS`` in the ``setup.py``, put it in a python
  799. variable called ``foo`` as a string, and then call
  800. ``cythonize(..., aliases={'MY_HEADERS': foo})``.
  801. :param quiet: If True, Cython won't print error, warning, or status messages during the
  802. compilation.
  803. :param force: Forces the recompilation of the Cython modules, even if the timestamps
  804. don't indicate that a recompilation is necessary.
  805. :param language: To globally enable C++ mode, you can pass ``language='c++'``. Otherwise, this
  806. will be determined at a per-file level based on compiler directives. This
  807. affects only modules found based on file names. Extension instances passed
  808. into :func:`cythonize` will not be changed. It is recommended to rather
  809. use the compiler directive ``# distutils: language = c++`` than this option.
  810. :param exclude_failures: For a broad 'try to compile' mode that ignores compilation
  811. failures and simply excludes the failed extensions,
  812. pass ``exclude_failures=True``. Note that this only
  813. really makes sense for compiling ``.py`` files which can also
  814. be used without compilation.
  815. :param annotate: If ``True``, will produce a HTML file for each of the ``.pyx`` or ``.py``
  816. files compiled. The HTML file gives an indication
  817. of how much Python interaction there is in
  818. each of the source code lines, compared to plain C code.
  819. It also allows you to see the C/C++ code
  820. generated for each line of Cython code. This report is invaluable when
  821. optimizing a function for speed,
  822. and for determining when to :ref:`release the GIL <nogil>`:
  823. in general, a ``nogil`` block may contain only "white" code.
  824. See examples in :ref:`determining_where_to_add_types` or
  825. :ref:`primes`.
  826. :param compiler_directives: Allow to set compiler directives in the ``setup.py`` like this:
  827. ``compiler_directives={'embedsignature': True}``.
  828. See :ref:`compiler-directives`.
  829. :param depfile: produce depfiles for the sources if True.
  830. """
  831. if exclude is None:
  832. exclude = []
  833. if 'include_path' not in options:
  834. options['include_path'] = ['.']
  835. if 'common_utility_include_dir' in options:
  836. safe_makedirs(options['common_utility_include_dir'])
  837. depfile = options.pop('depfile', None)
  838. if pythran is None:
  839. pythran_options = None
  840. else:
  841. pythran_options = CompilationOptions(**options)
  842. pythran_options.cplus = True
  843. pythran_options.np_pythran = True
  844. if force is None:
  845. force = os.environ.get("CYTHON_FORCE_REGEN") == "1" # allow global overrides for build systems
  846. c_options = CompilationOptions(**options)
  847. cpp_options = CompilationOptions(**options); cpp_options.cplus = True
  848. ctx = c_options.create_context()
  849. options = c_options
  850. module_list, module_metadata = create_extension_list(
  851. module_list,
  852. exclude=exclude,
  853. ctx=ctx,
  854. quiet=quiet,
  855. exclude_failures=exclude_failures,
  856. language=language,
  857. aliases=aliases)
  858. deps = create_dependency_tree(ctx, quiet=quiet)
  859. build_dir = getattr(options, 'build_dir', None)
  860. def copy_to_build_dir(filepath, root=os.getcwd()):
  861. filepath_abs = os.path.abspath(filepath)
  862. if os.path.isabs(filepath):
  863. filepath = filepath_abs
  864. if filepath_abs.startswith(root):
  865. # distutil extension depends are relative to cwd
  866. mod_dir = join_path(build_dir,
  867. os.path.dirname(_relpath(filepath, root)))
  868. copy_once_if_newer(filepath_abs, mod_dir)
  869. modules_by_cfile = collections.defaultdict(list)
  870. to_compile = []
  871. for m in module_list:
  872. if build_dir:
  873. for dep in m.depends:
  874. copy_to_build_dir(dep)
  875. cy_sources = [
  876. source for source in m.sources
  877. if os.path.splitext(source)[1] in ('.pyx', '.py')]
  878. if len(cy_sources) == 1:
  879. # normal "special" case: believe the Extension module name to allow user overrides
  880. full_module_name = m.name
  881. else:
  882. # infer FQMN from source files
  883. full_module_name = None
  884. new_sources = []
  885. for source in m.sources:
  886. base, ext = os.path.splitext(source)
  887. if ext in ('.pyx', '.py'):
  888. if m.np_pythran:
  889. c_file = base + '.cpp'
  890. options = pythran_options
  891. elif m.language == 'c++':
  892. c_file = base + '.cpp'
  893. options = cpp_options
  894. else:
  895. c_file = base + '.c'
  896. options = c_options
  897. # setup for out of place build directory if enabled
  898. if build_dir:
  899. if os.path.isabs(c_file):
  900. warnings.warn("build_dir has no effect for absolute source paths")
  901. c_file = os.path.join(build_dir, c_file)
  902. dir = os.path.dirname(c_file)
  903. safe_makedirs_once(dir)
  904. # write out the depfile, if requested
  905. if depfile:
  906. dependencies = deps.all_dependencies(source)
  907. write_depfile(c_file, source, dependencies)
  908. if os.path.exists(c_file):
  909. c_timestamp = os.path.getmtime(c_file)
  910. else:
  911. c_timestamp = -1
  912. # Priority goes first to modified files, second to direct
  913. # dependents, and finally to indirect dependents.
  914. if c_timestamp < deps.timestamp(source):
  915. dep_timestamp, dep = deps.timestamp(source), source
  916. priority = 0
  917. else:
  918. dep_timestamp, dep = deps.newest_dependency(source)
  919. priority = 2 - (dep in deps.immediate_dependencies(source))
  920. if force or c_timestamp < dep_timestamp:
  921. if not quiet and not force:
  922. if source == dep:
  923. print("Compiling %s because it changed." % source)
  924. else:
  925. print("Compiling %s because it depends on %s." % (source, dep))
  926. if not force and options.cache:
  927. fingerprint = deps.transitive_fingerprint(source, m, options)
  928. else:
  929. fingerprint = None
  930. to_compile.append((
  931. priority, source, c_file, fingerprint, quiet,
  932. options, not exclude_failures, module_metadata.get(m.name),
  933. full_module_name))
  934. new_sources.append(c_file)
  935. modules_by_cfile[c_file].append(m)
  936. else:
  937. new_sources.append(source)
  938. if build_dir:
  939. copy_to_build_dir(source)
  940. m.sources = new_sources
  941. if options.cache:
  942. if not os.path.exists(options.cache):
  943. os.makedirs(options.cache)
  944. to_compile.sort()
  945. # Drop "priority" component of "to_compile" entries and add a
  946. # simple progress indicator.
  947. N = len(to_compile)
  948. progress_fmt = "[{0:%d}/{1}] " % len(str(N))
  949. for i in range(N):
  950. progress = progress_fmt.format(i+1, N)
  951. to_compile[i] = to_compile[i][1:] + (progress,)
  952. if N <= 1:
  953. nthreads = 0
  954. if nthreads:
  955. # Requires multiprocessing (or Python >= 2.6)
  956. try:
  957. import multiprocessing
  958. pool = multiprocessing.Pool(
  959. nthreads, initializer=_init_multiprocessing_helper)
  960. except (ImportError, OSError):
  961. print("multiprocessing required for parallel cythonization")
  962. nthreads = 0
  963. else:
  964. # This is a bit more involved than it should be, because KeyboardInterrupts
  965. # break the multiprocessing workers when using a normal pool.map().
  966. # See, for example:
  967. # http://noswap.com/blog/python-multiprocessing-keyboardinterrupt
  968. try:
  969. result = pool.map_async(cythonize_one_helper, to_compile, chunksize=1)
  970. pool.close()
  971. while not result.ready():
  972. try:
  973. result.get(99999) # seconds
  974. except multiprocessing.TimeoutError:
  975. pass
  976. except KeyboardInterrupt:
  977. pool.terminate()
  978. raise
  979. pool.join()
  980. if not nthreads:
  981. for args in to_compile:
  982. cythonize_one(*args)
  983. if exclude_failures:
  984. failed_modules = set()
  985. for c_file, modules in modules_by_cfile.items():
  986. if not os.path.exists(c_file):
  987. failed_modules.update(modules)
  988. elif os.path.getsize(c_file) < 200:
  989. f = io_open(c_file, 'r', encoding='iso8859-1')
  990. try:
  991. if f.read(len('#error ')) == '#error ':
  992. # dead compilation result
  993. failed_modules.update(modules)
  994. finally:
  995. f.close()
  996. if failed_modules:
  997. for module in failed_modules:
  998. module_list.remove(module)
  999. print("Failed compilations: %s" % ', '.join(sorted([
  1000. module.name for module in failed_modules])))
  1001. if options.cache:
  1002. cleanup_cache(options.cache, getattr(options, 'cache_size', 1024 * 1024 * 100))
  1003. # cythonize() is often followed by the (non-Python-buffered)
  1004. # compiler output, flush now to avoid interleaving output.
  1005. sys.stdout.flush()
  1006. return module_list
  1007. if os.environ.get('XML_RESULTS'):
  1008. compile_result_dir = os.environ['XML_RESULTS']
  1009. def record_results(func):
  1010. def with_record(*args):
  1011. t = time.time()
  1012. success = True
  1013. try:
  1014. try:
  1015. func(*args)
  1016. except:
  1017. success = False
  1018. finally:
  1019. t = time.time() - t
  1020. module = fully_qualified_name(args[0])
  1021. name = "cythonize." + module
  1022. failures = 1 - success
  1023. if success:
  1024. failure_item = ""
  1025. else:
  1026. failure_item = "failure"
  1027. output = open(os.path.join(compile_result_dir, name + ".xml"), "w")
  1028. output.write("""
  1029. <?xml version="1.0" ?>
  1030. <testsuite name="%(name)s" errors="0" failures="%(failures)s" tests="1" time="%(t)s">
  1031. <testcase classname="%(name)s" name="cythonize">
  1032. %(failure_item)s
  1033. </testcase>
  1034. </testsuite>
  1035. """.strip() % locals())
  1036. output.close()
  1037. return with_record
  1038. else:
  1039. def record_results(func):
  1040. return func
  1041. # TODO: Share context? Issue: pyx processing leaks into pxd module
  1042. @record_results
  1043. def cythonize_one(pyx_file, c_file, fingerprint, quiet, options=None,
  1044. raise_on_failure=True, embedded_metadata=None, full_module_name=None,
  1045. progress=""):
  1046. from ..Compiler.Main import compile_single, default_options
  1047. from ..Compiler.Errors import CompileError, PyrexError
  1048. if fingerprint:
  1049. if not os.path.exists(options.cache):
  1050. safe_makedirs(options.cache)
  1051. # Cython-generated c files are highly compressible.
  1052. # (E.g. a compression ratio of about 10 for Sage).
  1053. fingerprint_file_base = join_path(
  1054. options.cache, "%s-%s" % (os.path.basename(c_file), fingerprint))
  1055. gz_fingerprint_file = fingerprint_file_base + gzip_ext
  1056. zip_fingerprint_file = fingerprint_file_base + '.zip'
  1057. if os.path.exists(gz_fingerprint_file) or os.path.exists(zip_fingerprint_file):
  1058. if not quiet:
  1059. print("%sFound compiled %s in cache" % (progress, pyx_file))
  1060. if os.path.exists(gz_fingerprint_file):
  1061. os.utime(gz_fingerprint_file, None)
  1062. with contextlib.closing(gzip_open(gz_fingerprint_file, 'rb')) as g:
  1063. with contextlib.closing(open(c_file, 'wb')) as f:
  1064. shutil.copyfileobj(g, f)
  1065. else:
  1066. os.utime(zip_fingerprint_file, None)
  1067. dirname = os.path.dirname(c_file)
  1068. with contextlib.closing(zipfile.ZipFile(zip_fingerprint_file)) as z:
  1069. for artifact in z.namelist():
  1070. z.extract(artifact, os.path.join(dirname, artifact))
  1071. return
  1072. if not quiet:
  1073. print("%sCythonizing %s" % (progress, pyx_file))
  1074. if options is None:
  1075. options = CompilationOptions(default_options)
  1076. options.output_file = c_file
  1077. options.embedded_metadata = embedded_metadata
  1078. any_failures = 0
  1079. try:
  1080. result = compile_single(pyx_file, options, full_module_name=full_module_name)
  1081. if result.num_errors > 0:
  1082. any_failures = 1
  1083. except (EnvironmentError, PyrexError) as e:
  1084. sys.stderr.write('%s\n' % e)
  1085. any_failures = 1
  1086. # XXX
  1087. import traceback
  1088. traceback.print_exc()
  1089. except Exception:
  1090. if raise_on_failure:
  1091. raise
  1092. import traceback
  1093. traceback.print_exc()
  1094. any_failures = 1
  1095. if any_failures:
  1096. if raise_on_failure:
  1097. raise CompileError(None, pyx_file)
  1098. elif os.path.exists(c_file):
  1099. os.remove(c_file)
  1100. elif fingerprint:
  1101. artifacts = list(filter(None, [
  1102. getattr(result, attr, None)
  1103. for attr in ('c_file', 'h_file', 'api_file', 'i_file')]))
  1104. if len(artifacts) == 1:
  1105. fingerprint_file = gz_fingerprint_file
  1106. with contextlib.closing(open(c_file, 'rb')) as f:
  1107. with contextlib.closing(gzip_open(fingerprint_file + '.tmp', 'wb')) as g:
  1108. shutil.copyfileobj(f, g)
  1109. else:
  1110. fingerprint_file = zip_fingerprint_file
  1111. with contextlib.closing(zipfile.ZipFile(
  1112. fingerprint_file + '.tmp', 'w', zipfile_compression_mode)) as zip:
  1113. for artifact in artifacts:
  1114. zip.write(artifact, os.path.basename(artifact))
  1115. os.rename(fingerprint_file + '.tmp', fingerprint_file)
  1116. def cythonize_one_helper(m):
  1117. import traceback
  1118. try:
  1119. return cythonize_one(*m)
  1120. except Exception:
  1121. traceback.print_exc()
  1122. raise
  1123. def _init_multiprocessing_helper():
  1124. # KeyboardInterrupt kills workers, so don't let them get it
  1125. import signal
  1126. signal.signal(signal.SIGINT, signal.SIG_IGN)
  1127. def cleanup_cache(cache, target_size, ratio=.85):
  1128. try:
  1129. p = subprocess.Popen(['du', '-s', '-k', os.path.abspath(cache)], stdout=subprocess.PIPE)
  1130. res = p.wait()
  1131. if res == 0:
  1132. total_size = 1024 * int(p.stdout.read().strip().split()[0])
  1133. if total_size < target_size:
  1134. return
  1135. except (OSError, ValueError):
  1136. pass
  1137. total_size = 0
  1138. all = []
  1139. for file in os.listdir(cache):
  1140. path = join_path(cache, file)
  1141. s = os.stat(path)
  1142. total_size += s.st_size
  1143. all.append((s.st_atime, s.st_size, path))
  1144. if total_size > target_size:
  1145. for time, size, file in reversed(sorted(all)):
  1146. os.unlink(file)
  1147. total_size -= size
  1148. if total_size < target_size * ratio:
  1149. break