repo.py 15 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412
  1. # copyright (c) 2024 PaddlePaddle Authors. All Rights Reserve.
  2. #
  3. # Licensed under the Apache License, Version 2.0 (the "License");
  4. # you may not use this file except in compliance with the License.
  5. # You may obtain a copy of the License at
  6. #
  7. # http://www.apache.org/licenses/LICENSE-2.0
  8. #
  9. # Unless required by applicable law or agreed to in writing, software
  10. # distributed under the License is distributed on an "AS IS" BASIS,
  11. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  12. # See the License for the specific language governing permissions and
  13. # limitations under the License.
  14. import os
  15. import os.path as osp
  16. import importlib
  17. import tempfile
  18. import shutil
  19. from ..utils import logging
  20. from .meta import get_repo_meta
  21. from .utils import (install_packages_using_pip, clone_repos_using_git,
  22. update_repos_using_git, uninstall_package_using_pip,
  23. remove_repos_using_rm, check_installation_using_pip,
  24. build_wheel_using_pip, mute, switch_working_dir,
  25. to_dep_spec_pep508, env_marker_ast2expr)
  26. __all__ = ['build_repo_instance', 'build_repo_group_installer']
  27. def build_repo_instance(repo_name, *args, **kwargs):
  28. """ build_repo_instance """
  29. # XXX: Hard-code type
  30. repo_cls = PPRepository
  31. repo_instance = repo_cls(repo_name, *args, **kwargs)
  32. return repo_instance
  33. def build_repo_group_installer(*repos):
  34. """ build_repo_group_installer """
  35. return RepositoryGroupInstaller(list(repos))
  36. def build_repo_group_cloner(*repos):
  37. """ build_repo_group_cloner """
  38. return RepositoryGroupCloner(list(repos))
  39. class PPRepository(object):
  40. """
  41. Installation, initialization, and PDX module import handler for a
  42. PaddlePaddle repository.
  43. """
  44. def __init__(self, name, repo_parent_dir, pdx_collection_mod):
  45. super().__init__()
  46. self.name = name
  47. self.repo_parent_dir = repo_parent_dir
  48. self.root_dir = osp.join(repo_parent_dir, self.name)
  49. self.meta = get_repo_meta(self.name)
  50. self.repo_url = self.meta['repo_url']
  51. self.pkg_name = self.meta['pkg_name']
  52. self.lib_name = self.meta['lib_name']
  53. self.pdx_mod_name = pdx_collection_mod.__name__ + '.' + self.meta[
  54. 'pdx_pkg_name']
  55. self.main_req_file = self.meta.get('main_req_file', 'requirements.txt')
  56. def initialize(self):
  57. """ initialize """
  58. if not self.check_installation(quick_check=True):
  59. return False
  60. if 'path_env' in self.meta:
  61. # Set env var
  62. os.environ[self.meta['path_env']] = osp.abspath(self.root_dir)
  63. # NOTE: By calling `self.get_pdx()` we actually loads the repo PDX package
  64. # and do all registration.
  65. self.get_pdx()
  66. return True
  67. def check_installation(self, quick_check=False):
  68. """ check_installation """
  69. if quick_check:
  70. lib = self._get_lib(load=False)
  71. return lib is not None
  72. else:
  73. # TODO: Also check if correct dependencies are installed.
  74. return check_installation_using_pip(self.pkg_name)
  75. def check_repo_exiting(self, quick_check=False):
  76. """ check_repo_exiting """
  77. return os.path.exists(os.path.join(self.root_dir, '.git'))
  78. def install(self, *args, **kwargs):
  79. """ install """
  80. return RepositoryGroupInstaller([self]).install(*args, **kwargs)
  81. def uninstall(self, *args, **kwargs):
  82. """ uninstall """
  83. return RepositoryGroupInstaller([self]).uninstall(*args, **kwargs)
  84. def install_deps(self, *args, **kwargs):
  85. """ install_deps """
  86. return RepositoryGroupInstaller([self]).install_deps(*args, **kwargs)
  87. def install_package(self, no_deps=False, clean=True):
  88. """ install_package """
  89. editable = self.meta.get('editable', True)
  90. extra_editable = self.meta.get('extra_editable', None)
  91. if editable:
  92. logging.warning(
  93. f"{self.pkg_name} will be installed in editable mode.")
  94. with switch_working_dir(self.root_dir):
  95. try:
  96. install_packages_using_pip(
  97. ['.'], editable=editable, no_deps=no_deps)
  98. finally:
  99. if clean:
  100. # Clean build artifacts
  101. tmp_build_dir = os.path.join(self.root_dir, 'build')
  102. if os.path.exists(tmp_build_dir):
  103. shutil.rmtree(tmp_build_dir)
  104. if extra_editable:
  105. with switch_working_dir(
  106. os.path.join(self.root_dir, extra_editable)):
  107. try:
  108. install_packages_using_pip(
  109. ['.'], editable=True, no_deps=no_deps)
  110. finally:
  111. if clean:
  112. # Clean build artifacts
  113. tmp_build_dir = os.path.join(self.root_dir, 'build')
  114. if os.path.exists(tmp_build_dir):
  115. shutil.rmtree(tmp_build_dir)
  116. def uninstall_package(self):
  117. """ uninstall_package """
  118. uninstall_package_using_pip(self.pkg_name)
  119. def clone(self, *args, **kwargs):
  120. """ clone """
  121. return RepositoryGroupCloner([self]).clone(*args, **kwargs)
  122. def remove(self, *args, **kwargs):
  123. """ remove """
  124. return RepositoryGroupCloner([self]).remove(*args, **kwargs)
  125. def clone_repos(self, platform=None):
  126. """ clone_repos """
  127. branch = self.meta.get('branch', None)
  128. repo_url = f'https://{platform}{self.repo_url}'
  129. os.makedirs(self.repo_parent_dir, exist_ok=True)
  130. with switch_working_dir(self.repo_parent_dir):
  131. clone_repos_using_git(repo_url, branch=branch)
  132. def update_repos(self):
  133. """ update_repos """
  134. with switch_working_dir(self.root_dir):
  135. try:
  136. update_repos_using_git()
  137. except Exception as e:
  138. logging.warning(
  139. f"Pull {self.name} from {self.repo_url} failed, check your network connection."
  140. )
  141. def remove_repos(self):
  142. """ remove_repos """
  143. with switch_working_dir(self.repo_parent_dir):
  144. remove_repos_using_rm(self.name)
  145. def wheel(self, dst_dir):
  146. """ wheel """
  147. with tempfile.TemporaryDirectory() as td:
  148. tmp_repo_dir = osp.join(td, self.name)
  149. tmp_dst_dir = osp.join(td, 'dist')
  150. shutil.copytree(self.root_dir, tmp_repo_dir, symlinks=False)
  151. # NOTE: Installation of the repo relies on `self.main_req_file` in root directory
  152. # Thus, we overwrite the content of it.
  153. main_req_file_path = osp.join(tmp_repo_dir, self.main_req_file)
  154. deps_str = self.get_deps()
  155. with open(main_req_file_path, 'w', encoding='utf-8') as f:
  156. f.write(deps_str)
  157. install_packages_using_pip([], req_files=[main_req_file_path])
  158. with switch_working_dir(tmp_repo_dir):
  159. build_wheel_using_pip('.', tmp_dst_dir)
  160. shutil.copytree(tmp_dst_dir, dst_dir)
  161. def _get_lib(self, load=True):
  162. """ _get_lib """
  163. import importlib.util
  164. importlib.invalidate_caches()
  165. if load:
  166. try:
  167. with mute():
  168. return importlib.import_module(self.lib_name)
  169. except ImportError:
  170. return None
  171. else:
  172. spec = importlib.util.find_spec(self.lib_name)
  173. if spec is not None and not osp.exists(spec.origin):
  174. return None
  175. else:
  176. return spec
  177. def get_pdx(self):
  178. """ get_pdx """
  179. return importlib.import_module(self.pdx_mod_name)
  180. def get_deps(self):
  181. """ get_deps """
  182. # Merge requirement files
  183. req_list = [self.main_req_file]
  184. req_list.extend(self.meta.get('extra_req_files', []))
  185. deps = []
  186. for req in req_list:
  187. with open(osp.join(self.root_dir, req), 'r', encoding='utf-8') as f:
  188. deps.append(f.read())
  189. for dep in self.meta.get('pdx_pkg_deps', []):
  190. deps.append(dep)
  191. deps = '\n'.join(deps)
  192. return deps
  193. def get_version(self):
  194. """ get_version """
  195. version_file = osp.join(self.root_dir, '.pdx_gen.version')
  196. with open(version_file, 'r', encoding='utf-8') as f:
  197. lines = f.readlines()
  198. sta_ver = lines[0].rstrip()
  199. commit = lines[1].rstrip()
  200. ret = [sta_ver, commit]
  201. # TODO: Get dynamic version in a subprocess.
  202. ret.append(None)
  203. return ret
  204. def __str__(self):
  205. return f"({self.name}, {id(self)})"
  206. class RepositoryGroupInstaller(object):
  207. """ RepositoryGroupInstaller """
  208. def __init__(self, repos):
  209. super().__init__()
  210. self.repos = repos
  211. def install(self, force_reinstall=False, no_deps=False, constraints=None):
  212. """ install """
  213. # Rollback on failure is not yet supported. A failed installation
  214. # could leave a broken environment.
  215. if force_reinstall:
  216. self.uninstall()
  217. ins_flags = []
  218. repos = self._sort_repos(self.repos, check_missing=True)
  219. for repo in repos:
  220. if force_reinstall or not repo.check_installation():
  221. ins_flags.append(True)
  222. else:
  223. ins_flags.append(False)
  224. if not no_deps:
  225. # We collect the dependencies and install them all at once
  226. # such that we can make use of the pip resolver.
  227. self.install_deps(constraints=constraints)
  228. # XXX: For historical reasons the repo packages are sequentially
  229. # installed, and we have no failure rollbacks. Meanwhile, installation
  230. # failure of one repo package aborts the entire installation process.
  231. for ins_flag, repo in zip(ins_flags, repos):
  232. if ins_flag:
  233. repo.install_package(no_deps=True)
  234. def uninstall(self):
  235. """ uninstall """
  236. repos = self._sort_repos(self.repos, check_missing=False)
  237. repos = repos[::-1]
  238. for repo in repos:
  239. if repo.check_installation():
  240. # NOTE: Dependencies are not uninstalled.
  241. repo.uninstall_package()
  242. def update(self):
  243. """ update """
  244. for repo in self.repos:
  245. repo.update_repos()
  246. def get_deps(self):
  247. """ get_deps """
  248. deps_list = []
  249. repos = self._sort_repos(self.repos, check_missing=True)
  250. for repo in repos:
  251. deps = repo.get_deps()
  252. deps = self._normalize_deps(
  253. deps, headline=f"# {repo.name} dependencies")
  254. deps_list.append(deps)
  255. # Add an extra new line to separate dependencies of different repos.
  256. return '\n\n'.join(deps_list)
  257. def install_deps(self, constraints):
  258. """ install_deps """
  259. deps_str = self.get_deps()
  260. with tempfile.TemporaryDirectory() as td:
  261. req_file = os.path.join(td, 'requirements.txt')
  262. with open(req_file, 'w', encoding='utf-8') as fr:
  263. fr.write(deps_str)
  264. if constraints is not None:
  265. cons_file = os.path.join(td, 'constraints.txt')
  266. with open(cons_file, 'w', encoding='utf-8') as fc:
  267. fc.write(constraints)
  268. cons_files = [cons_file]
  269. else:
  270. cons_files = []
  271. install_packages_using_pip(
  272. [], req_files=[req_file], cons_files=cons_files)
  273. def _sort_repos(self, repos, check_missing=False):
  274. # We sort the repos to ensure that the dependencies precede the
  275. # dependant in the list.
  276. def _parse_repo_deps(name, repo_meta):
  277. ret = []
  278. for n in repo_meta.get('requires', []):
  279. ret.extend(_parse_repo_deps(n, get_repo_meta(n)))
  280. ret.append((name, repo_meta))
  281. return ret
  282. name_meta_pairs = []
  283. for repo in repos:
  284. name_meta_pairs.extend(_parse_repo_deps(repo.name, repo.meta))
  285. unique_pairs = []
  286. hashset = set()
  287. for name, meta in name_meta_pairs:
  288. if name in hashset:
  289. continue
  290. else:
  291. unique_pairs.append((name, meta))
  292. hashset.add(name)
  293. sorted_repos = []
  294. missing_names = []
  295. name2repo = {repo.name: repo for repo in repos}
  296. for name, meta in unique_pairs:
  297. if name in name2repo:
  298. repo = name2repo[name]
  299. sorted_repos.append(repo)
  300. else:
  301. missing_names.append(name)
  302. if check_missing and len(missing_names) > 0:
  303. be = 'is' if len(missing_names) == 1 else 'are'
  304. raise RuntimeError(
  305. f"{missing_names} {be} required in the installation.")
  306. else:
  307. assert len(sorted_repos) == len(self.repos)
  308. return sorted_repos
  309. def _normalize_deps(self, deps, headline=None):
  310. repo_pkgs = set(repo.pkg_name for repo in self.repos)
  311. normed_lines = []
  312. if headline is not None:
  313. normed_lines.append(headline)
  314. for line in deps.splitlines():
  315. line_s = line.strip()
  316. if len(line_s) == 0 or line_s.startswith('#'):
  317. continue
  318. # If `line` is not a comment, it must be a requirement specifier.
  319. # Other forms may cause a parse error.
  320. n, e, v, m = to_dep_spec_pep508(line_s)
  321. if isinstance(v, str):
  322. raise RuntimeError(
  323. "Currently, URL based lookup is not supported.")
  324. if n in repo_pkgs:
  325. # Skip repo packages
  326. continue
  327. else:
  328. line_n = [n]
  329. fe = f"[{','.join(e)}]" if e else ''
  330. if fe:
  331. line_n.append(fe)
  332. fv = []
  333. for tup in v:
  334. fv.append(' '.join(tup))
  335. fv = ', '.join(fv) if fv else ''
  336. if fv:
  337. line_n.append(fv)
  338. if m is not None:
  339. fm = f"; {env_marker_ast2expr(m)}"
  340. line_n.append(fm)
  341. line_n = ' '.join(line_n)
  342. normed_lines.append(line_n)
  343. return '\n'.join(normed_lines)
  344. class RepositoryGroupCloner(object):
  345. """ RepositoryGroupCloner """
  346. def __init__(self, repos):
  347. super().__init__()
  348. self.repos = repos
  349. def clone(self, force_reclone=False, platform=None):
  350. """ clone """
  351. if force_reclone:
  352. self.remove()
  353. for repo in self.repos:
  354. repo.clone_repos(platform=platform)
  355. def remove(self):
  356. """ remove """
  357. for repo in self.repos:
  358. repo.remove_repos()