abe_setup.py 13 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400
  1. #!/usr/bin/env python3
  2. #
  3. # A script to setup a Working tree from abe git submodules
  4. DEBUG = 1
  5. def main():
  6. global DEBUG
  7. p = parser()
  8. args = p.parse_args()
  9. DEBUG = args.debug
  10. DEBUG = DEBUG - (1 if args.quiet else 0)
  11. print(args) if DEBUG>1 else None
  12. count_submodules = 0
  13. last_parent = None
  14. jobs = []
  15. if args.abe_command:
  16. executeAbeCommand( args.output_dir, args.abe_command )
  17. else:
  18. subs = abe_submodules(
  19. args.output_dir, args.source_url, args.tag,
  20. recurse_bsps=args.recurse_bsps,
  21. recurse_bootloaders=args.recurse_bootloaders )
  22. if args.execute_command:
  23. for sub in subs:
  24. count_submodules += 1
  25. if DEBUG > 0:
  26. print("Count:",count_submodules)
  27. from subprocess import run, CalledProcessError
  28. import sys
  29. try:
  30. run(args.execute_command, shell=True,check=True, cwd=sub.fullpath)
  31. except CalledProcessError as err:
  32. print(f"Command: \"{args.execute_command}\" in \"{sub.fullpath}\" returned exitcode \"{err.returncode}\"")
  33. sys.exit(err.returncode)
  34. else:
  35. clone_subs_threaded(subs, jobs=args.jobs, kwargs={'cache_dir':args.cache_dir, 'fetch_origin':(not args.no_fetch_origin)})
  36. def clone_subs_threaded(subs, jobs=0, kwargs={}):
  37. import multiprocessing
  38. import time
  39. if jobs < 1:
  40. jobs = multiprocessing.cpu_count()
  41. with multiprocessing.Pool(processes=jobs) as pool:
  42. jobs = []
  43. for sub in subs:
  44. job = pool.apply_async(sub.clone_repo, (), kwargs)
  45. jobs.append(job)
  46. if sub.last:
  47. while not jobs == []:
  48. j = jobs.pop(0)
  49. j.get()
  50. while not jobs == []:
  51. j = jobs.pop(0)
  52. j.get()
  53. '''
  54. for j in jobs:
  55. if not j.is_alive():
  56. j.join()
  57. to_remove = j
  58. if to_remove:
  59. jobs.remove(to_remove)
  60. else:
  61. time.sleep(1)
  62. if sub.last:
  63. while len(jobs):
  64. jobs.pop().join()
  65. '''
  66. def parser():
  67. import argparse
  68. p = argparse.ArgumentParser()
  69. p.add_argument('-d', '--debug', action='count', default=1)
  70. p.add_argument('-j', '--jobs',
  71. default=1,
  72. type=int,
  73. help='use threaded clone with n jobs')
  74. p.add_argument('-q', '--quiet', action='store_true')
  75. p.add_argument('-o', '--output_dir',
  76. default='.',
  77. help='local directory to operate on')
  78. p.add_argument('-c', '--cache_dir',
  79. default=None,
  80. help='use a separate local directory to cache git repositories')
  81. p.add_argument('-n', '--no_fetch_origin',
  82. action='store_true',
  83. help='prevent fetching from origin')
  84. p.add_argument('-t', '--tag',
  85. default='master',
  86. help='commit/branch/tag to be checked out')
  87. p.add_argument('-s', '--source_url',
  88. default=None,
  89. help='url to be used for origin remote')
  90. p.add_argument('--recurse_bootloaders',
  91. action='store_true',
  92. help='also recurse into .abe/bootloaders')
  93. p.add_argument('--recurse_bsps',
  94. action='store_true',
  95. help='also recurse into .abe/bsps')
  96. p.add_argument('-a', '--abe_command',
  97. default=None,
  98. help="execute an ABE command in output_dir")
  99. p.add_argument('-x', '--execute_command',
  100. default=None,
  101. help='execute command in subdirs recursively')
  102. return p
  103. def executeAbeCommand( repo_path, command_name ):
  104. import os
  105. from subprocess import run, CalledProcessError
  106. commandFileRel = os.path.join('.abe', 'commands', command_name + ".cmd")
  107. commandFile = os.path.join(repo_path, commandFileRel)
  108. if os.path.isfile(commandFile):
  109. try:
  110. proc = run([ 'bash', '-c', f"set -e;./{commandFileRel}" ], check=True, cwd=repo_path)
  111. except CalledProcessError as err:
  112. print(commandFileRel, 'in', repo_path, 'returned exitcode', err.returncode)
  113. def abe_submodules(path, remote, ref, **kwargs):
  114. url = sane_origin_url( remote, path )
  115. sub = AbeSubmodule([path, url.geturl(), ref], AbeSubType.SUBMODULE)
  116. yield sub
  117. for s in recurse_abe_submodules(sub, **kwargs):
  118. yield s
  119. def recurse_abe_submodules(parent, **kwargs):
  120. subm = get_abe_subtree( parent.fullpath, **kwargs)
  121. acc = []
  122. last = None
  123. for su in subm:
  124. su.parent = parent
  125. print(f"Repo: \"{parent.fullpath}\" has Submodule: \"{su.fullpath}\" Type: \"{su.subtype}\" From: \"{su.url.geturl()}\" Ref: \"{su.ref}\"") if DEBUG else None
  126. acc.append(su)
  127. if last != None:
  128. last.last = False
  129. yield last
  130. last = su
  131. if last != None:
  132. last.last = True
  133. yield last
  134. for sub in acc:
  135. for ret in recurse_abe_submodules( sub ):
  136. yield ret
  137. def sane_origin_url(input_url = None, path = None):
  138. from urllib.parse import urlparse
  139. print("Source Url:", input_url) if DEBUG>1 else None
  140. if input_url:
  141. if hasattr(input_url, "geturl"):
  142. return input_url
  143. if input_url.find('://') < 0 and input_url.find( ':' ) > 0:
  144. source_url = 'ssh://' + input_url.replace( ':', '/', 1)
  145. else:
  146. source_url = input_url
  147. else:
  148. source_url = origin_url( path )
  149. return urlparse(source_url)
  150. def origin_url(path):
  151. from git import Repo
  152. from urllib.parse import urlparse
  153. repo = Repo(path)
  154. for url in repo.remotes.origin.urls:
  155. return url
  156. from git import RemoteProgress
  157. import sys
  158. class ProgressPrinter(RemoteProgress):
  159. def update(self,op_code,cur_count,max_count=None,message=''):
  160. if DEBUG:
  161. cur_count_int = round(cur_count)
  162. if not max_count:
  163. div = f"{cur_count_int}/"+ ''.join(['?' for _ in range(len(str(cur_count_int)))])
  164. perc = '???'
  165. else:
  166. max_count_int = round(max_count)
  167. div = f"{cur_count_int}/{max_count_int}"
  168. perc = round(100*cur_count / (max_count or 100))
  169. print(f"\033[2K" + div, f"{perc:>3}%", message, end='\r')
  170. sys.stdout.flush()
  171. def clone_repo_cached( sub, cache_dir=None, bare=False, fetch_origin=True):
  172. print('Repo:', sub.path) if DEBUG>1 else None
  173. from git import Repo
  174. ref = sub.ref
  175. url = sub.url.geturl()
  176. repo = Repo.init(sub.fullpath, bare=bare)
  177. if 'origin' in repo.remotes:
  178. origin = repo.remotes['origin']
  179. else:
  180. origin = repo.create_remote('origin', url)
  181. if cache_dir != None:
  182. cache_repo_path = cache_path(cache_dir, origin.urls)
  183. cache_link = real_relpath( cache_repo_path, sub.fullpath)
  184. print(f"Cacheing from: {cache_repo_path}, origin: {url}") if DEBUG else None
  185. cache_repo = AbeSubmodule((cache_repo_path, url, sub.ref), AbeSubType.SUBMODULE, parent=None)
  186. clone_repo_cached(
  187. cache_repo,
  188. cache_dir = None,
  189. bare = True,
  190. fetch_origin = fetch_origin)
  191. if 'cache' in repo.remotes:
  192. cache = repo.remotes['cache']
  193. else:
  194. print(f"Setting up cache: {cache_link}, for Repo: {path}") if DEBUG>2 else None
  195. cache = repo.create_remote('cache', cache_link)
  196. # cache.set_url("no_push" , '--push')
  197. cache.fetch(refspec="+refs/remotes/origin/*:refs/remotes/origin/*",progress=ProgressPrinter())
  198. print()
  199. elif fetch_origin or (
  200. (ref not in origin.refs) and
  201. (ref not in repo.tags)):
  202. origin.fetch(progress=ProgressPrinter())
  203. print()
  204. if not bare:
  205. print('Refs:', origin.refs) if DEBUG>1 else None
  206. print('Heads:', repo.heads) if DEBUG>1 else None
  207. if ref in repo.tags:
  208. tracking_branch_name = 'local_tag_branch/'+ref
  209. tracking_ref = repo.tags[ref]
  210. if tracking_branch_name in repo.heads:
  211. active_branch = repo.heads[tracking_branch_name]
  212. else:
  213. active_branch = repo.create_head(tracking_branch_name, tracking_ref)
  214. elif ref in repo.heads:
  215. tracking_ref = origin.refs[ref]
  216. active_branch = repo.heads[ref]
  217. active_branch.set_tracking_branch(tracking_ref)
  218. elif ref in origin.refs:
  219. tracking_ref = origin.refs[ref]
  220. active_branch = repo.create_head(ref, tracking_ref)
  221. active_branch.set_tracking_branch(tracking_ref)
  222. else:
  223. tracking_ref = ref
  224. tracking_branch_name = 'local_commit_branch/'+ref
  225. if tracking_branch_name in repo.heads:
  226. active_branch = repo.heads[tracking_branch_name]
  227. else:
  228. try:
  229. active_branch = repo.create_head(tracking_branch_name, tracking_ref)
  230. except Exception:
  231. raise Exception(f"Branch/Tag/Commit \"{ref}\" not found")
  232. print('Active Branch:', active_branch) if DEBUG else None
  233. print('Tracking Ref:', tracking_ref, '\n') if DEBUG else None
  234. active_branch.checkout()
  235. repo.head.reset( tracking_ref, index=True, working_tree=False)
  236. return repo
  237. def cache_path(cache, remote):
  238. import os.path
  239. from urllib.parse import urlparse
  240. for url in remote:
  241. sane_url = urlparse(url)
  242. sane_url = sane_url._replace(
  243. path = str(sane_url.path).lstrip('/'),
  244. netloc = str(sane_url.netloc).replace('/','_').replace( '@', '_').replace(':', '_')
  245. )
  246. return os.path.join( cache, sane_url.netloc, sane_url.path)
  247. def real_relpath(dest, source='.'):
  248. import os.path
  249. real_dest = os.path.realpath(dest)
  250. real_source = os.path.realpath(source)
  251. return os.path.relpath(real_dest, real_source)
  252. from enum import Enum
  253. class AbeSubType(Enum):
  254. SUBMODULE = 1
  255. BOOTLOADER = 2
  256. BSP = 3
  257. class AbeSubmodule():
  258. subtype = AbeSubType.SUBMODULE
  259. last = True
  260. def __init__(self, sub_info, subtype=None, parent=None):
  261. if subtype != None:
  262. self.subtype = subtype
  263. self.path = sub_info[0]
  264. self.remote = sub_info[1]
  265. self.ref = sub_info[2]
  266. self.parent = parent
  267. if self.subtype == AbeSubType.BSP:
  268. self.remote = 'bsp/' + self.remote
  269. if self.subtype == AbeSubType.BOOTLOADER:
  270. self.remote = 'bootloader/' + self.remote
  271. def __repr__(self):
  272. return f"AbeSubmodule(['{self.path}','{self.remote}','{self.ref}'], {self.subtype}, {self.parent})"
  273. @property
  274. def url(self):
  275. from urllib.parse import urlparse
  276. if not self.parent:
  277. return urlparse(self.remote)
  278. surl = urlparse(self.remote)
  279. newurl = self.parent.url._replace(path=surl.path)
  280. if surl.netloc:
  281. newurl = newurl._replace(netloc=surl.netloc)
  282. if surl.scheme:
  283. newurl = newurl._replace(scheme=surl.scheme)
  284. return newurl
  285. @property
  286. def fullpath(self):
  287. if not self.parent:
  288. return self.path
  289. import os.path
  290. return os.path.normpath(os.path.join( self.parent.fullpath, self.path))
  291. def clone_repo(self, cache_dir=None, fetch_origin=True):
  292. return clone_repo_cached( self, cache_dir=cache_dir, fetch_origin=fetch_origin)
  293. def get_abe_subtree(repo_dir, recurse_bsps=False, recurse_bootloaders=False):
  294. import itertools
  295. subfile_generators = [
  296. get_abe_submodules(repo_dir),
  297. ]
  298. if recurse_bootloaders:
  299. subfile_generators.append(get_abe_bootloaders(repo_dir))
  300. if recurse_bsps:
  301. subfile_generators.append(get_abe_bsps(repo_dir))
  302. return itertools.chain(*subfile_generators)
  303. def get_abe_bsps(repo_dir):
  304. import os
  305. bspfile_path = os.path.join(repo_dir, '.abe', 'bsps')
  306. if os.path.isfile(bspfile_path):
  307. return parse_abe_subfile(bspfile_path, subtype=AbeSubType.BSP)
  308. return iter(())
  309. def get_abe_bootloaders(repo_dir):
  310. import os
  311. bootloaderfile_path = os.path.join(repo_dir, '.abe', 'bootloaders')
  312. if os.path.isfile(bootloaderfile_path):
  313. return parse_abe_subfile(bootloaderfile_path, subtype=AbeSubType.BOOTLOADER)
  314. return iter(())
  315. def get_abe_submodules(repo_dir):
  316. import os
  317. import itertools
  318. subfile_generators = []
  319. subfile_path = os.path.join(repo_dir, '.abe', 'submodules')
  320. if os.path.isfile(subfile_path):
  321. return parse_abe_subfile(subfile_path, subtype=AbeSubType.SUBMODULE)
  322. return iter(())
  323. def parse_abe_subfile(subfile_path, subtype=AbeSubType.SUBMODULE):
  324. with open(subfile_path) as subfile:
  325. for line in subfile.readlines():
  326. strline = line.strip()
  327. if strline.startswith('#'):
  328. continue
  329. sline = strline.split()
  330. if len(sline) < 3:
  331. continue
  332. print('Submodule:', sline) if DEBUG>1 else None
  333. sub = AbeSubmodule(sline, subtype)
  334. yield sub
  335. if __name__=='__main__':
  336. main()