package_json.py 7.5 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249
  1. import json
  2. import logging
  3. import os
  4. from six import iteritems
  5. from .utils import build_pj_path
  6. logger = logging.getLogger(__name__)
  7. class PackageJsonWorkspaceError(RuntimeError):
  8. pass
  9. class PackageJson(object):
  10. DEP_KEY = "dependencies"
  11. DEV_DEP_KEY = "devDependencies"
  12. PEER_DEP_KEY = "peerDependencies"
  13. OPT_DEP_KEY = "optionalDependencies"
  14. DEP_KEYS = (DEP_KEY, DEV_DEP_KEY, PEER_DEP_KEY, OPT_DEP_KEY)
  15. WORKSPACE_SCHEMA = "workspace:"
  16. @classmethod
  17. def load(cls, path):
  18. """
  19. :param path: package.json path
  20. :type path: str
  21. :rtype: PackageJson
  22. """
  23. pj = cls(path)
  24. pj.read()
  25. return pj
  26. def __init__(self, path):
  27. # type: (str) -> None
  28. if not os.path.isabs(path):
  29. raise TypeError("Absolute path required, given: {}".format(path))
  30. self.path = path
  31. self.data = None
  32. def read(self):
  33. with open(self.path, 'rb') as f:
  34. self.data = json.load(f)
  35. def write(self, path=None):
  36. """
  37. :param path: path to store package.json, defaults to original path
  38. :type path: str
  39. """
  40. if path is None:
  41. path = self.path
  42. directory = os.path.dirname(path)
  43. if not os.path.exists(directory):
  44. os.mkdir(directory)
  45. with open(path, "w") as f:
  46. json.dump(self.data, f, indent=2, ensure_ascii=False)
  47. f.write('\n') # it's better for diff algorithm in arc
  48. logger.debug("Written {}".format(path))
  49. def get_name(self):
  50. # type: () -> str
  51. name = self.data.get("name")
  52. if not name:
  53. name = os.path.dirname(self.path).replace("/", "-").strip("-")
  54. return name
  55. def get_version(self):
  56. return self.data["version"]
  57. def get_description(self):
  58. return self.data.get("description")
  59. def get_use_prebuilder(self):
  60. return self.data.get("usePrebuilder", False)
  61. def get_nodejs_version(self):
  62. return self.data.get("engines", {}).get("node")
  63. def get_dep_specifier(self, dep_name):
  64. for name, spec in self.dependencies_iter():
  65. if dep_name == name:
  66. return spec
  67. return None
  68. def dependencies_iter(self):
  69. for key in self.DEP_KEYS:
  70. deps = self.data.get(key)
  71. if not deps:
  72. continue
  73. for name, spec in iteritems(deps):
  74. yield (name, spec)
  75. def has_dependencies(self):
  76. first_dep = next(self.dependencies_iter(), None)
  77. return first_dep is not None
  78. def bins_iter(self):
  79. bins = self.data.get("bin")
  80. if isinstance(bins, str):
  81. yield bins
  82. elif isinstance(bins, dict):
  83. for bin in bins.values():
  84. yield bin
  85. def get_bin_path(self, bin_name=None):
  86. # type: (str|None) -> str|None
  87. actual_bin_name = bin_name or self.get_name() # type: str
  88. bins = self.data.get("bin")
  89. if isinstance(bins, str):
  90. if bin_name is not None:
  91. logger.warning("bin_name is unused, because 'bin' is a string")
  92. return bins
  93. if isinstance(bins, dict):
  94. for name, path in bins.items():
  95. if name == actual_bin_name:
  96. return path
  97. return None
  98. # TODO: FBP-1254
  99. # def get_workspace_dep_spec_paths(self) -> list[tuple[str, str]]:
  100. def get_workspace_dep_spec_paths(self):
  101. """
  102. Returns names and paths from specifiers of the defined workspace dependencies.
  103. :rtype: list[tuple[str, str]]
  104. """
  105. spec_paths = []
  106. schema = self.WORKSPACE_SCHEMA
  107. schema_len = len(schema)
  108. for name, spec in self.dependencies_iter():
  109. if not spec.startswith(schema):
  110. continue
  111. spec_path = spec[schema_len:]
  112. if not (spec_path.startswith(".") or spec_path.startswith("..")):
  113. raise PackageJsonWorkspaceError(
  114. "Expected relative path specifier for workspace dependency, but got '{}' for {} in {}".format(
  115. spec, name, self.path
  116. )
  117. )
  118. spec_paths.append((name, spec_path))
  119. return spec_paths
  120. def get_workspace_dep_paths(self, base_path=None):
  121. """
  122. Returns paths of the defined workspace dependencies.
  123. :param base_path: base path to resolve relative dep paths
  124. :type base_path: str
  125. :rtype: list of str
  126. """
  127. if base_path is None:
  128. base_path = os.path.dirname(self.path)
  129. return [os.path.normpath(os.path.join(base_path, p)) for _, p in self.get_workspace_dep_spec_paths()]
  130. def get_workspace_deps(self):
  131. """
  132. :rtype: list of PackageJson
  133. """
  134. ws_deps = []
  135. pj_dir = os.path.dirname(self.path)
  136. for name, rel_path in self.get_workspace_dep_spec_paths():
  137. dep_path = os.path.normpath(os.path.join(pj_dir, rel_path))
  138. dep_pj = PackageJson.load(build_pj_path(dep_path))
  139. if name != dep_pj.get_name():
  140. raise PackageJsonWorkspaceError(
  141. "Workspace dependency name mismatch, found '{}' instead of '{}' in {}".format(
  142. name, dep_pj.get_name(), self.path
  143. )
  144. )
  145. ws_deps.append(dep_pj)
  146. return ws_deps
  147. def get_workspace_map(self, ignore_self=False):
  148. """
  149. Returns absolute paths of the workspace dependencies (including transitive) mapped to package.json and depth.
  150. :param ignore_self: whether path of the current module will be excluded
  151. :type ignore_self: bool
  152. :rtype: dict of (PackageJson, int)
  153. """
  154. ws_deps = {}
  155. # list of (pj, depth)
  156. pj_queue = [(self, 0)]
  157. while len(pj_queue):
  158. (pj, depth) = pj_queue.pop()
  159. pj_dir = os.path.dirname(pj.path)
  160. if pj_dir in ws_deps:
  161. continue
  162. if not ignore_self or pj != self:
  163. ws_deps[pj_dir] = (pj, depth)
  164. for dep_pj in pj.get_workspace_deps():
  165. pj_queue.append((dep_pj, depth + 1))
  166. return ws_deps
  167. def get_dep_paths_by_names(self):
  168. """
  169. Returns dict of {dependency_name: dependency_path}
  170. """
  171. ws_map = self.get_workspace_map()
  172. return {pj.get_name(): path for path, (pj, _) in ws_map.items()}
  173. def validate_prebuilds(self, requires_build_packages: list[str]):
  174. pnpm_overrides: dict[str, str] = self.data.get("pnpm", {}).get("overrides", {})
  175. use_prebuild_flags: dict[str, bool] = self.data.get("@yatool/prebuilder", {}).get("usePrebuild", {})
  176. def covered(k: str) -> bool:
  177. if k.startswith("@yandex-prebuild/"):
  178. return True
  179. return k in use_prebuild_flags
  180. not_covered = [key for key in requires_build_packages if not covered(key)]
  181. use_prebuild_keys = [key for key in use_prebuild_flags if use_prebuild_flags[key]]
  182. missing_overrides = [key for key in use_prebuild_keys if key not in pnpm_overrides]
  183. messages = []
  184. if not_covered:
  185. messages.append("These packages possibly have addons but are not checked yet:")
  186. messages.extend([f" - {key}" for key in not_covered])
  187. if missing_overrides:
  188. messages.append("These packages have addons but overrides are not set:")
  189. messages.extend([f" - {key}" for key in missing_overrides])
  190. return (not messages, messages)