package_json.py 7.9 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258
  1. import json
  2. import logging
  3. import os
  4. from six import iteritems
  5. from .utils import build_pj_path
  6. logger = logging.getLogger(__name__)
  7. class PackageJsonWorkspaceError(RuntimeError):
  8. pass
  9. class PackageJson(object):
  10. DEP_KEY = "dependencies"
  11. DEV_DEP_KEY = "devDependencies"
  12. PEER_DEP_KEY = "peerDependencies"
  13. OPT_DEP_KEY = "optionalDependencies"
  14. PNPM_OVERRIDES_KEY = "pnpm.overrides"
  15. DEP_KEYS = (DEP_KEY, DEV_DEP_KEY, PEER_DEP_KEY, OPT_DEP_KEY, PNPM_OVERRIDES_KEY)
  16. WORKSPACE_SCHEMA = "workspace:"
  17. @classmethod
  18. def load(cls, path):
  19. """
  20. :param path: package.json path
  21. :type path: str
  22. :rtype: PackageJson
  23. """
  24. pj = cls(path)
  25. pj.read()
  26. return pj
  27. def __init__(self, path):
  28. # type: (str) -> None
  29. if not os.path.isabs(path):
  30. raise TypeError("Absolute path required, given: {}".format(path))
  31. self.path = path
  32. self.data = None
  33. def read(self):
  34. with open(self.path, 'rb') as f:
  35. self.data = json.load(f)
  36. def write(self, path=None):
  37. """
  38. :param path: path to store package.json, defaults to original path
  39. :type path: str
  40. """
  41. if path is None:
  42. path = self.path
  43. directory = os.path.dirname(path)
  44. if not os.path.exists(directory):
  45. os.mkdir(directory)
  46. with open(path, "w") as f:
  47. json.dump(self.data, f, indent=2, ensure_ascii=False)
  48. f.write('\n') # it's better for diff algorithm in arc
  49. logger.debug("Written {}".format(path))
  50. def get_name(self):
  51. # type: () -> str
  52. name = self.data.get("name")
  53. if not name:
  54. name = os.path.dirname(self.path).replace("/", "-").strip("-")
  55. return name
  56. def get_version(self):
  57. return self.data["version"]
  58. def get_description(self):
  59. return self.data.get("description")
  60. def get_use_prebuilder(self):
  61. return self.data.get("usePrebuilder", False)
  62. def get_nodejs_version(self):
  63. return self.data.get("engines", {}).get("node")
  64. def get_dep_specifier(self, dep_name):
  65. for name, spec in self.dependencies_iter():
  66. if dep_name == name:
  67. return spec
  68. return None
  69. def dependencies_iter(self):
  70. for key in self.DEP_KEYS:
  71. if key == self.PNPM_OVERRIDES_KEY:
  72. deps = self.data.get("pnpm", {}).get("overrides", {})
  73. else:
  74. deps = self.data.get(key)
  75. if not deps:
  76. continue
  77. for name, spec in iteritems(deps):
  78. yield name, spec
  79. def has_dependencies(self):
  80. first_dep = next(self.dependencies_iter(), None)
  81. return first_dep is not None
  82. def bins_iter(self):
  83. bins = self.data.get("bin")
  84. if isinstance(bins, str):
  85. yield bins
  86. elif isinstance(bins, dict):
  87. for bin in bins.values():
  88. yield bin
  89. def get_bin_path(self, bin_name=None):
  90. # type: (str|None) -> str|None
  91. actual_bin_name = bin_name or self.get_name() # type: str
  92. bins = self.data.get("bin")
  93. if isinstance(bins, str):
  94. if bin_name is not None:
  95. logger.warning("bin_name is unused, because 'bin' is a string")
  96. return bins
  97. if isinstance(bins, dict):
  98. for name, path in bins.items():
  99. if name == actual_bin_name:
  100. return path
  101. return None
  102. # TODO: FBP-1254
  103. # def get_workspace_dep_spec_paths(self) -> list[tuple[str, str]]:
  104. def get_workspace_dep_spec_paths(self):
  105. """
  106. Returns names and paths from specifiers of the defined workspace dependencies.
  107. :rtype: list[tuple[str, str]]
  108. """
  109. spec_paths = []
  110. schema = self.WORKSPACE_SCHEMA
  111. schema_len = len(schema)
  112. for name, spec in self.dependencies_iter():
  113. if not spec.startswith(schema):
  114. continue
  115. spec_path = spec[schema_len:]
  116. if not (spec_path.startswith(".") or spec_path.startswith("..")):
  117. raise PackageJsonWorkspaceError(
  118. "Expected relative path specifier for workspace dependency, but got '{}' for {} in {}".format(
  119. spec, name, self.path
  120. )
  121. )
  122. spec_paths.append((name, spec_path))
  123. return spec_paths
  124. def get_workspace_dep_paths(self, base_path=None):
  125. """
  126. Returns paths of the defined workspace dependencies.
  127. :param base_path: base path to resolve relative dep paths
  128. :type base_path: str
  129. :rtype: list of str
  130. """
  131. if base_path is None:
  132. base_path = os.path.dirname(self.path)
  133. return [os.path.normpath(os.path.join(base_path, p)) for _, p in self.get_workspace_dep_spec_paths()]
  134. def get_workspace_deps(self):
  135. """
  136. :rtype: list of PackageJson
  137. """
  138. ws_deps = []
  139. pj_dir = os.path.dirname(self.path)
  140. for name, rel_path in self.get_workspace_dep_spec_paths():
  141. dep_path = os.path.normpath(os.path.join(pj_dir, rel_path))
  142. dep_pj = PackageJson.load(build_pj_path(dep_path))
  143. if name != dep_pj.get_name():
  144. raise PackageJsonWorkspaceError(
  145. "Workspace dependency name mismatch, found '{}' instead of '{}' in {}".format(
  146. name, dep_pj.get_name(), self.path
  147. )
  148. )
  149. ws_deps.append(dep_pj)
  150. return ws_deps
  151. def get_workspace_map(self, ignore_self=False):
  152. """
  153. Returns absolute paths of the workspace dependencies (including transitive) mapped to package.json and depth.
  154. :param ignore_self: whether path of the current module will be excluded
  155. :type ignore_self: bool
  156. :rtype: dict of (PackageJson, int)
  157. """
  158. ws_deps = {}
  159. # list of (pj, depth)
  160. pj_queue = [(self, 0)]
  161. while len(pj_queue):
  162. (pj, depth) = pj_queue.pop()
  163. pj_dir = os.path.dirname(pj.path)
  164. if pj_dir in ws_deps:
  165. continue
  166. if not ignore_self or pj != self:
  167. ws_deps[pj_dir] = (pj, depth)
  168. for dep_pj in pj.get_workspace_deps():
  169. pj_queue.append((dep_pj, depth + 1))
  170. return ws_deps
  171. def get_dep_paths_by_names(self):
  172. """
  173. Returns dict of {dependency_name: dependency_path}
  174. """
  175. ws_map = self.get_workspace_map()
  176. return {pj.get_name(): path for path, (pj, _) in ws_map.items()}
  177. def validate_prebuilds(self, requires_build_packages: list[str]):
  178. pnpm_overrides: dict[str, str] = self.data.get("pnpm", {}).get("overrides", {})
  179. use_prebuild_flags: dict[str, bool] = self.data.get("@yatool/prebuilder", {}).get("usePrebuild", {})
  180. def covered(k: str) -> bool:
  181. if k.startswith("@yandex-prebuild/"):
  182. return True
  183. return k in use_prebuild_flags
  184. not_covered = [key for key in requires_build_packages if not covered(key)]
  185. use_prebuild_keys = [key for key in use_prebuild_flags if use_prebuild_flags[key]]
  186. missing_overrides = [key for key in use_prebuild_keys if key not in pnpm_overrides]
  187. messages = []
  188. if not_covered:
  189. messages.append("These packages possibly have addons but are not checked yet:")
  190. messages.extend([f" - {key}" for key in not_covered])
  191. if missing_overrides:
  192. messages.append("These packages have addons but overrides are not set:")
  193. messages.extend([f" - {key}" for key in missing_overrides])
  194. return (not messages, messages)
  195. def get_pnpm_patched_dependencies(self) -> dict[str, str]:
  196. return self.data.get("pnpm", {}).get("patchedDependencies", {})