repo.py 15 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415
  1. # copyright (c) 2024 PaddlePaddle Authors. All Rights Reserve.
  2. #
  3. # Licensed under the Apache License, Version 2.0 (the "License");
  4. # you may not use this file except in compliance with the License.
  5. # You may obtain a copy of the License at
  6. #
  7. # http://www.apache.org/licenses/LICENSE-2.0
  8. #
  9. # Unless required by applicable law or agreed to in writing, software
  10. # distributed under the License is distributed on an "AS IS" BASIS,
  11. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  12. # See the License for the specific language governing permissions and
  13. # limitations under the License.
  14. import os
  15. import os.path as osp
  16. import importlib
  17. import tempfile
  18. import shutil
  19. from ..utils import logging
  20. from ..utils.download import download_and_extract
  21. from .meta import get_repo_meta, REPO_DOWNLOAD_BASE
  22. from .utils import (
  23. install_packages_using_pip,
  24. fetch_repo_using_git,
  25. reset_repo_using_git,
  26. uninstall_package_using_pip,
  27. remove_repo_using_rm,
  28. check_installation_using_pip,
  29. build_wheel_using_pip,
  30. mute,
  31. switch_working_dir,
  32. to_dep_spec_pep508,
  33. env_marker_ast2expr,
  34. install_external_deps,
  35. )
  36. __all__ = ["build_repo_instance", "build_repo_group_installer"]
  37. def build_repo_instance(repo_name, *args, **kwargs):
  38. """build_repo_instance"""
  39. # XXX: Hard-code type
  40. repo_cls = PPRepository
  41. repo_instance = repo_cls(repo_name, *args, **kwargs)
  42. return repo_instance
  43. def build_repo_group_installer(*repos):
  44. """build_repo_group_installer"""
  45. return RepositoryGroupInstaller(list(repos))
  46. def build_repo_group_getter(*repos):
  47. """build_repo_group_getter"""
  48. return RepositoryGroupGetter(list(repos))
  49. class PPRepository(object):
  50. """
  51. Installation, initialization, and PDX module import handler for a
  52. PaddlePaddle repository.
  53. """
  54. def __init__(self, name, repo_parent_dir, pdx_collection_mod):
  55. super().__init__()
  56. self.name = name
  57. self.repo_parent_dir = repo_parent_dir
  58. self.root_dir = osp.join(repo_parent_dir, self.name)
  59. self.meta = get_repo_meta(self.name)
  60. self.git_path = self.meta["git_path"]
  61. self.pkg_name = self.meta["pkg_name"]
  62. self.lib_name = self.meta["lib_name"]
  63. self.pdx_mod_name = (
  64. pdx_collection_mod.__name__ + "." + self.meta["pdx_pkg_name"]
  65. )
  66. self.main_req_file = self.meta.get("main_req_file", "requirements.txt")
  67. def initialize(self):
  68. """initialize"""
  69. if not self.check_installation(quick_check=True):
  70. return False
  71. if "path_env" in self.meta:
  72. # Set env var
  73. os.environ[self.meta["path_env"]] = osp.abspath(self.root_dir)
  74. # NOTE: By calling `self.get_pdx()` we actually loads the repo PDX package
  75. # and do all registration.
  76. self.get_pdx()
  77. return True
  78. def check_installation(self, quick_check=False):
  79. """check_installation"""
  80. if quick_check:
  81. lib = self._get_lib(load=False)
  82. return lib is not None
  83. else:
  84. # TODO: Also check if correct dependencies are installed.
  85. return check_installation_using_pip(self.pkg_name)
  86. def check_repo_exiting(self, quick_check=False):
  87. """check_repo_exiting"""
  88. return os.path.exists(os.path.join(self.root_dir, ".git"))
  89. def install(self, *args, **kwargs):
  90. """install"""
  91. return RepositoryGroupInstaller([self]).install(*args, **kwargs)
  92. def uninstall(self, *args, **kwargs):
  93. """uninstall"""
  94. return RepositoryGroupInstaller([self]).uninstall(*args, **kwargs)
  95. def install_deps(self, *args, **kwargs):
  96. """install_deps"""
  97. return RepositoryGroupInstaller([self]).install_deps(*args, **kwargs)
  98. def install_package(self, no_deps=False, clean=True, install_extra_only=False):
  99. """install_package"""
  100. editable = self.meta.get("editable", True)
  101. extra_editable = self.meta.get("extra_editable", None)
  102. if editable:
  103. logging.warning(f"{self.pkg_name} will be installed in editable mode.")
  104. with switch_working_dir(self.root_dir):
  105. if install_extra_only:
  106. src_requirements = os.path.join(self.root_dir, "requirements.txt")
  107. paddlex_requirements = os.path.join(
  108. self.root_dir, "requirements_paddlex.txt"
  109. )
  110. shutil.copy(paddlex_requirements, src_requirements)
  111. try:
  112. install_packages_using_pip(["."], editable=editable, no_deps=no_deps)
  113. install_external_deps(self.name, self.root_dir)
  114. finally:
  115. if clean:
  116. # Clean build artifacts
  117. tmp_build_dir = os.path.join(self.root_dir, "build")
  118. if os.path.exists(tmp_build_dir):
  119. shutil.rmtree(tmp_build_dir)
  120. if extra_editable:
  121. with switch_working_dir(os.path.join(self.root_dir, extra_editable)):
  122. try:
  123. install_packages_using_pip(["."], editable=True, no_deps=no_deps)
  124. finally:
  125. if clean:
  126. # Clean build artifacts
  127. tmp_build_dir = os.path.join(self.root_dir, "build")
  128. if os.path.exists(tmp_build_dir):
  129. shutil.rmtree(tmp_build_dir)
  130. def uninstall_package(self):
  131. """uninstall_package"""
  132. uninstall_package_using_pip(self.pkg_name)
  133. def download(self):
  134. """download from remote"""
  135. download_url = f"{REPO_DOWNLOAD_BASE}{self.name}.tar"
  136. os.makedirs(self.repo_parent_dir, exist_ok=True)
  137. download_and_extract(download_url, self.repo_parent_dir, self.name)
  138. # reset_repo_using_git('FETCH_HEAD')
  139. def remove(self):
  140. """remove"""
  141. with switch_working_dir(self.repo_parent_dir):
  142. remove_repo_using_rm(self.name)
  143. def update(self, platform=None):
  144. """update"""
  145. branch = self.meta.get("branch", None)
  146. git_url = f"https://{platform}{self.git_path}"
  147. with switch_working_dir(self.root_dir):
  148. try:
  149. fetch_repo_using_git(branch=branch, url=git_url)
  150. reset_repo_using_git("FETCH_HEAD")
  151. except Exception as e:
  152. logging.warning(
  153. f"Update {self.name} from {git_url} failed, check your network connection. Error:\n{e}"
  154. )
  155. def wheel(self, dst_dir):
  156. """wheel"""
  157. with tempfile.TemporaryDirectory() as td:
  158. tmp_repo_dir = osp.join(td, self.name)
  159. tmp_dst_dir = osp.join(td, "dist")
  160. shutil.copytree(self.root_dir, tmp_repo_dir, symlinks=False)
  161. # NOTE: Installation of the repo relies on `self.main_req_file` in root directory
  162. # Thus, we overwrite the content of it.
  163. main_req_file_path = osp.join(tmp_repo_dir, self.main_req_file)
  164. deps_str = self.get_deps()
  165. with open(main_req_file_path, "w", encoding="utf-8") as f:
  166. f.write(deps_str)
  167. install_packages_using_pip([], req_files=[main_req_file_path])
  168. with switch_working_dir(tmp_repo_dir):
  169. build_wheel_using_pip(".", tmp_dst_dir)
  170. shutil.copytree(tmp_dst_dir, dst_dir)
  171. def _get_lib(self, load=True):
  172. """_get_lib"""
  173. import importlib.util
  174. importlib.invalidate_caches()
  175. if load:
  176. try:
  177. with mute():
  178. return importlib.import_module(self.lib_name)
  179. except ImportError:
  180. return None
  181. else:
  182. spec = importlib.util.find_spec(self.lib_name)
  183. if spec is not None and not osp.exists(spec.origin):
  184. return None
  185. else:
  186. return spec
  187. def get_pdx(self):
  188. """get_pdx"""
  189. return importlib.import_module(self.pdx_mod_name)
  190. def get_deps(self, install_extra_only=False):
  191. """get_deps"""
  192. # Merge requirement files
  193. if install_extra_only:
  194. req_list = []
  195. else:
  196. req_list = [self.main_req_file]
  197. req_list.extend(self.meta.get("extra_req_files", []))
  198. deps = []
  199. for req in req_list:
  200. with open(osp.join(self.root_dir, req), "r", encoding="utf-8") as f:
  201. deps.append(f.read())
  202. for dep in self.meta.get("pdx_pkg_deps", []):
  203. deps.append(dep)
  204. deps = "\n".join(deps)
  205. return deps
  206. def get_version(self):
  207. """get_version"""
  208. version_file = osp.join(self.root_dir, ".pdx_gen.version")
  209. with open(version_file, "r", encoding="utf-8") as f:
  210. lines = f.readlines()
  211. sta_ver = lines[0].rstrip()
  212. commit = lines[1].rstrip()
  213. ret = [sta_ver, commit]
  214. # TODO: Get dynamic version in a subprocess.
  215. ret.append(None)
  216. return ret
  217. def __str__(self):
  218. return f"({self.name}, {id(self)})"
  219. class RepositoryGroupInstaller(object):
  220. """RepositoryGroupInstaller"""
  221. def __init__(self, repos):
  222. super().__init__()
  223. self.repos = repos
  224. def install(self, force_reinstall=False, no_deps=False, constraints=None):
  225. """install"""
  226. # Rollback on failure is not yet supported. A failed installation
  227. # could leave a broken environment.
  228. if force_reinstall:
  229. self.uninstall()
  230. ins_flags = []
  231. repos = self._sort_repos(self.repos, check_missing=True)
  232. for repo in repos:
  233. if force_reinstall or not repo.check_installation():
  234. ins_flags.append(True)
  235. else:
  236. ins_flags.append(False)
  237. if not no_deps:
  238. # We collect the dependencies and install them all at once
  239. # such that we can make use of the pip resolver.
  240. self.install_deps(constraints=constraints)
  241. # XXX: For historical reasons the repo packages are sequentially
  242. # installed, and we have no failure rollbacks. Meanwhile, installation
  243. # failure of one repo package aborts the entire installation process.
  244. for ins_flag, repo in zip(ins_flags, repos):
  245. if ins_flag:
  246. if repo.name in ["PaddleVideo"]:
  247. repo.install_package(no_deps=True, install_extra_only=True)
  248. else:
  249. repo.install_package(no_deps=True)
  250. def uninstall(self):
  251. """uninstall"""
  252. repos = self._sort_repos(self.repos, check_missing=False)
  253. repos = repos[::-1]
  254. for repo in repos:
  255. if repo.check_installation():
  256. # NOTE: Dependencies are not uninstalled.
  257. repo.uninstall_package()
  258. def get_deps(self):
  259. """get_deps"""
  260. deps_list = []
  261. repos = self._sort_repos(self.repos, check_missing=True)
  262. for repo in repos:
  263. if repo.name in ["PaddleVideo"]:
  264. deps = repo.get_deps(install_extra_only=True)
  265. else:
  266. deps = repo.get_deps()
  267. deps = self._normalize_deps(deps, headline=f"# {repo.name} dependencies")
  268. deps_list.append(deps)
  269. # Add an extra new line to separate dependencies of different repos.
  270. return "\n\n".join(deps_list)
  271. def install_deps(self, constraints):
  272. """install_deps"""
  273. deps_str = self.get_deps()
  274. with tempfile.TemporaryDirectory() as td:
  275. req_file = os.path.join(td, "requirements.txt")
  276. with open(req_file, "w", encoding="utf-8") as fr:
  277. fr.write(deps_str)
  278. if constraints is not None:
  279. cons_file = os.path.join(td, "constraints.txt")
  280. with open(cons_file, "w", encoding="utf-8") as fc:
  281. fc.write(constraints)
  282. cons_files = [cons_file]
  283. else:
  284. cons_files = []
  285. install_packages_using_pip([], req_files=[req_file], cons_files=cons_files)
  286. def _sort_repos(self, repos, check_missing=False):
  287. # We sort the repos to ensure that the dependencies precede the
  288. # dependant in the list.
  289. name_meta_pairs = []
  290. for repo in repos:
  291. name_meta_pairs.append((repo.name, repo.meta))
  292. unique_pairs = []
  293. hashset = set()
  294. for name, meta in name_meta_pairs:
  295. if name in hashset:
  296. continue
  297. else:
  298. unique_pairs.append((name, meta))
  299. hashset.add(name)
  300. sorted_repos = []
  301. missing_names = []
  302. name2repo = {repo.name: repo for repo in repos}
  303. for name, meta in unique_pairs:
  304. if name in name2repo:
  305. repo = name2repo[name]
  306. sorted_repos.append(repo)
  307. else:
  308. missing_names.append(name)
  309. if check_missing and len(missing_names) > 0:
  310. be = "is" if len(missing_names) == 1 else "are"
  311. raise RuntimeError(f"{missing_names} {be} required in the installation.")
  312. else:
  313. assert len(sorted_repos) == len(self.repos)
  314. return sorted_repos
  315. def _normalize_deps(self, deps, headline=None):
  316. repo_pkgs = set(repo.pkg_name for repo in self.repos)
  317. normed_lines = []
  318. if headline is not None:
  319. normed_lines.append(headline)
  320. for line in deps.splitlines():
  321. line_s = line.strip()
  322. if len(line_s) == 0 or line_s.startswith("#"):
  323. continue
  324. # If `line` is not a comment, it must be a requirement specifier.
  325. # Other forms may cause a parse error.
  326. n, e, v, m = to_dep_spec_pep508(line_s)
  327. if isinstance(v, str):
  328. raise RuntimeError("Currently, URL based lookup is not supported.")
  329. if n in repo_pkgs:
  330. # Skip repo packages
  331. continue
  332. elif check_installation_using_pip(n):
  333. continue
  334. else:
  335. line_n = [n]
  336. fe = f"[{','.join(e)}]" if e else ""
  337. if fe:
  338. line_n.append(fe)
  339. fv = []
  340. for tup in v:
  341. fv.append(" ".join(tup))
  342. fv = ", ".join(fv) if fv else ""
  343. if fv:
  344. line_n.append(fv)
  345. if m is not None:
  346. fm = f"; {env_marker_ast2expr(m)}"
  347. line_n.append(fm)
  348. line_n = " ".join(line_n)
  349. normed_lines.append(line_n)
  350. return "\n".join(normed_lines)
  351. class RepositoryGroupGetter(object):
  352. """RepositoryGroupGetter"""
  353. def __init__(self, repos):
  354. super().__init__()
  355. self.repos = repos
  356. def get(self, force=False, platform=None):
  357. """clone"""
  358. if force:
  359. self.remove()
  360. for repo in self.repos:
  361. repo.download()
  362. repo.update(platform=platform)
  363. def remove(self):
  364. """remove"""
  365. for repo in self.repos:
  366. repo.remove()