|
|
@@ -16,8 +16,11 @@ def main():
|
|
|
if args.source_url and args.source_url.find('://') < 0 and args.source_url.find( ':' ) > 0:
|
|
|
args.source_url = 'ssh://' + args.source_url.replace( ':', '/', 1)
|
|
|
|
|
|
- for ret in recurse_abe_submodules( args.output_dir, args.source_url, args.tag, clone_repo_cached(cache_dir=args.cache_dir)):
|
|
|
- pass
|
|
|
+ clone_repo_cached( args.output_dir, args.source_url, args.tag,
|
|
|
+ cache_dir=args.cache_dir, fetch_origin=(not args.no_fetch_origin))
|
|
|
+ for ret in recurse_abe_submodules( args.output_dir, args.source_url, args.tag):
|
|
|
+ clone_repo_cached( ret.path, ret.remote, ret.ref,
|
|
|
+ cache_dir=args.cache_dir, fetch_origin=(not args.no_fetch_origin))
|
|
|
|
|
|
|
|
|
def parser():
|
|
|
@@ -27,27 +30,39 @@ def parser():
|
|
|
p.add_argument('-q', '--quiet', action='store_true')
|
|
|
p.add_argument('-o', '--output_dir', default='.')
|
|
|
p.add_argument('-c', '--cache_dir', default=None)
|
|
|
+ p.add_argument('-n', '--no_fetch_origin', action='store_true')
|
|
|
p.add_argument('-t', '--tag', default='master')
|
|
|
p.add_argument('-s', '--source_url', default=None)
|
|
|
return p
|
|
|
|
|
|
|
|
|
-def recurse_abe_submodules(path, remote, ref=None, func=None):
|
|
|
- if func != None:
|
|
|
- yield func( path, remote, ref)
|
|
|
+def recurse_abe_submodules(path, remote, ref, func=None, parent=None):
|
|
|
+ from urllib.parse import urlparse
|
|
|
+ if remote != None:
|
|
|
+ url = urlparse(remote)
|
|
|
+ else:
|
|
|
+ url = origin_url( path )
|
|
|
|
|
|
- subm = get_abe_submodules( path )
|
|
|
+ sub = AbeSubmodule([path, url.geturl(), ref], AbeSubType.SUBMODULE, parent)
|
|
|
+ yield sub
|
|
|
+
|
|
|
+ subm = get_abe_tree( path )
|
|
|
|
|
|
import os
|
|
|
- import urllib.parse
|
|
|
- url = urllib.parse.urlparse(remote)
|
|
|
for su in subm:
|
|
|
- newpath = os.path.normpath(os.path.join( path, su.subdir))
|
|
|
- newurl = url._replace(path=su.remote).geturl()
|
|
|
- print(f"Repo: \"{path}\" has Submodule: \"{newpath}\" Type: \"{su.subtype}\" From: \"{newurl}\"") if DEBUG else None
|
|
|
- for ret in recurse_abe_submodules( newpath, newurl, su.ref, func):
|
|
|
+ newpath = os.path.normpath(os.path.join( sub.path, su.path))
|
|
|
+ newurl = su.urlparse(url)
|
|
|
+ print(f"Repo: \"{path}\" has Submodule: \"{newpath}\" Type: \"{su.subtype}\" From: \"{newurl.geturl()}\"") if DEBUG else None
|
|
|
+ for ret in recurse_abe_submodules( newpath, newurl.geturl(), su.ref, func, sub):
|
|
|
yield ret
|
|
|
|
|
|
+def origin_url(path):
|
|
|
+ from git import Repo
|
|
|
+ from urllib.parse import urlparse
|
|
|
+ repo = Repo(path)
|
|
|
+ for url in repo.remotes.origin.urls:
|
|
|
+ return urlparse(url)
|
|
|
+
|
|
|
def cache_path(cache, remote):
|
|
|
import os.path
|
|
|
for url in remote:
|
|
|
@@ -67,67 +82,72 @@ class ProgressPrinter(RemoteProgress):
|
|
|
max_count_int = round(max_count or 100)
|
|
|
print(f"\033[2K{cur_count_int}/{max_count_int}", str(round(100*cur_count / (max_count or 100)))+'%', end="\r")
|
|
|
|
|
|
-def clone_repo_cached( cache_dir=None, bare=False):
|
|
|
- def clone_repo(path, remote, ref=None, cache_dir=cache_dir, bare=bare):
|
|
|
- print('Repo:', path) if DEBUG>1 else None
|
|
|
- from git import Repo
|
|
|
- repo = Repo.init(path, bare=bare)
|
|
|
- if 'origin' in repo.remotes:
|
|
|
- origin = repo.remotes['origin']
|
|
|
+def clone_repo_cached( path, remote=None, ref=None,
|
|
|
+ cache_dir=None, bare=False, fetch_origin=True):
|
|
|
+ print('Repo:', path) if DEBUG>1 else None
|
|
|
+ from git import Repo
|
|
|
+ repo = Repo.init(path, bare=bare)
|
|
|
+ if 'origin' in repo.remotes:
|
|
|
+ origin = repo.remotes['origin']
|
|
|
+ else:
|
|
|
+ origin = repo.create_remote('origin', remote)
|
|
|
+
|
|
|
+ if cache_dir != None:
|
|
|
+ cache_repo_path = cache_path(cache_dir, origin.urls)
|
|
|
+ print(f"Cacheing from: {cache_repo_path}") if DEBUG else None
|
|
|
+ clone_repo_cached(
|
|
|
+ cache_repo_path,
|
|
|
+ remote = remote,
|
|
|
+ ref = ref,
|
|
|
+ cache_dir = None,
|
|
|
+ bare = True,
|
|
|
+ fetch_origin = fetch_origin)
|
|
|
+ if 'cache' in repo.remotes:
|
|
|
+ cache = repo.remotes['cache']
|
|
|
else:
|
|
|
- origin = repo.create_remote('origin', remote)
|
|
|
-
|
|
|
- if cache_dir != None:
|
|
|
- cache_repo_path = cache_path(cache_dir, origin.urls)
|
|
|
- print(f"Cacheing from: {cache_repo_path}") if DEBUG else None
|
|
|
- clone_repo_cached(cache_dir=None, bare=True)( cache_repo_path, remote, ref)
|
|
|
- if 'cache' in repo.remotes:
|
|
|
- cache = repo.remotes['cache']
|
|
|
+ cache = repo.create_remote('cache', real_relpath( cache_repo_path, path))
|
|
|
+ cache.set_url("no_push" , '--push')
|
|
|
+ cache.fetch(refspec="+refs/remotes/origin/*:refs/remotes/origin/*",progress=ProgressPrinter())
|
|
|
+ print()
|
|
|
+ elif fetch_origin:
|
|
|
+ origin.fetch(progress=ProgressPrinter())
|
|
|
+ print()
|
|
|
+
|
|
|
+ if not bare:
|
|
|
+ print('Refs:', origin.refs) if DEBUG>1 else None
|
|
|
+ print('Heads:', repo.heads) if DEBUG>1 else None
|
|
|
+
|
|
|
+ if ref in repo.tags:
|
|
|
+ tracking_branch_name = 'local_tag_branch/'+ref
|
|
|
+ tracking_ref = repo.tags[ref]
|
|
|
+ if tracking_branch_name in repo.heads:
|
|
|
+ active_branch = repo.heads[tracking_branch_name]
|
|
|
else:
|
|
|
- cache = repo.create_remote('cache', real_relpath( cache_repo_path, path))
|
|
|
- cache.set_url("no_push" , '--push')
|
|
|
- cache.fetch(refspec="+refs/remotes/origin/*:refs/remotes/origin/*",progress=ProgressPrinter())
|
|
|
- print()
|
|
|
+ active_branch = repo.create_head('local_tag_branch/'+ref, tracking_ref)
|
|
|
+ elif ref in repo.heads:
|
|
|
+ tracking_ref = origin.refs[ref]
|
|
|
+ active_branch = repo.heads[ref]
|
|
|
+ active_branch.set_tracking_branch(tracking_ref)
|
|
|
+ elif ref in origin.refs:
|
|
|
+ tracking_ref = origin.refs[ref]
|
|
|
+ active_branch = repo.create_head(ref, tracking_ref)
|
|
|
+ active_branch.set_tracking_branch(tracking_ref)
|
|
|
+ elif ref in origin.refs:
|
|
|
+ tracking_ref = origin.refs[ref]
|
|
|
+ active_branch = repo.create_head(ref, tracking_ref)
|
|
|
+ active_branch.set_tracking_branch(tracking_ref)
|
|
|
else:
|
|
|
- origin.fetch(progress=ProgressPrinter())
|
|
|
- print()
|
|
|
-
|
|
|
- if not bare:
|
|
|
- print('Refs:', origin.refs) if DEBUG>1 else None
|
|
|
- print('Heads:', repo.heads) if DEBUG>1 else None
|
|
|
-
|
|
|
- if ref in repo.tags:
|
|
|
- tracking_branch_name = 'local_tag_branch/'+ref
|
|
|
- tracking_ref = repo.tags[ref]
|
|
|
- if tracking_branch_name in repo.heads:
|
|
|
- active_branch = repo.heads[tracking_branch_name]
|
|
|
- else:
|
|
|
- active_branch = repo.create_head('local_tag_branch/'+ref, tracking_ref)
|
|
|
- elif ref in repo.heads:
|
|
|
- tracking_ref = origin.refs[ref]
|
|
|
- active_branch = repo.heads[ref]
|
|
|
- active_branch.set_tracking_branch(tracking_ref)
|
|
|
- elif ref in origin.refs:
|
|
|
- tracking_ref = origin.refs[ref]
|
|
|
- active_branch = repo.create_head(ref, tracking_ref)
|
|
|
- active_branch.set_tracking_branch(tracking_ref)
|
|
|
- elif ref in origin.refs:
|
|
|
- tracking_ref = origin.refs[ref]
|
|
|
- active_branch = repo.create_head(ref, tracking_ref)
|
|
|
- active_branch.set_tracking_branch(tracking_ref)
|
|
|
- else:
|
|
|
- try:
|
|
|
- tracking_ref = ref
|
|
|
- tracking_branch_name = 'local_commit_branch/'+ref
|
|
|
- active_branch = repo.create_head(tracking_branch_name, tracking_ref)
|
|
|
- except Exception:
|
|
|
- raise Exception(f"Branch/Tag/Commit \"{ref}\" not found")
|
|
|
- print('Active Branch:', active_branch) if DEBUG else None
|
|
|
- print('Tracking Ref:', tracking_ref, '\n') if DEBUG else None
|
|
|
- active_branch.checkout()
|
|
|
- repo.head.reset( tracking_ref, index=True, working_tree=False)
|
|
|
- return repo
|
|
|
- return clone_repo
|
|
|
+ try:
|
|
|
+ tracking_ref = ref
|
|
|
+ tracking_branch_name = 'local_commit_branch/'+ref
|
|
|
+ active_branch = repo.create_head(tracking_branch_name, tracking_ref)
|
|
|
+ except Exception:
|
|
|
+ raise Exception(f"Branch/Tag/Commit \"{ref}\" not found")
|
|
|
+ print('Active Branch:', active_branch) if DEBUG else None
|
|
|
+ print('Tracking Ref:', tracking_ref, '\n') if DEBUG else None
|
|
|
+ active_branch.checkout()
|
|
|
+ repo.head.reset( tracking_ref, index=True, working_tree=False)
|
|
|
+ return repo
|
|
|
|
|
|
|
|
|
from enum import Enum
|
|
|
@@ -139,36 +159,66 @@ class AbeSubType(Enum):
|
|
|
|
|
|
class AbeSubmodule():
|
|
|
subtype = AbeSubType.SUBMODULE
|
|
|
- def __init__(self, sub_info, subtype=None):
|
|
|
+ def __init__(self, sub_info, subtype=None, parent=None):
|
|
|
if subtype != None:
|
|
|
self.subtype = subtype
|
|
|
- self.subdir = sub_info[0]
|
|
|
+ self.path = sub_info[0]
|
|
|
self.remote = sub_info[1]
|
|
|
self.ref = sub_info[2]
|
|
|
+ self.parent = parent
|
|
|
if self.subtype == AbeSubType.BSP:
|
|
|
self.remote = 'bsp/' + self.remote
|
|
|
if self.subtype == AbeSubType.BOOTLOADER:
|
|
|
self.remote = 'bootloader/' + self.remote
|
|
|
|
|
|
def __repr__(self):
|
|
|
- return f"AbeSubmodule(['{self.subdir}','{self.remote}','{self.ref}'], {self.subtype})"
|
|
|
+ return f"AbeSubmodule(['{self.path}','{self.remote}','{self.ref}'], {self.subtype}, {self.parent})"
|
|
|
+
|
|
|
+ def urlparse(self, parent_url):
|
|
|
+ from urllib.parse import urlparse
|
|
|
+ suurl = urlparse(self.remote)
|
|
|
+ newurl = parent_url._replace(path=suurl.path)
|
|
|
+ if suurl.netloc:
|
|
|
+ newurl = newurl._replace(netloc=suurl.netloc)
|
|
|
+ if suurl.scheme:
|
|
|
+ newurl = newurl._replace(scheme=suurl.scheme)
|
|
|
+ return newurl
|
|
|
+
|
|
|
+def get_abe_tree(repo_dir):
|
|
|
+ import itertools
|
|
|
+ subfile_generators = [
|
|
|
+ get_abe_bsps(repo_dir),
|
|
|
+ get_abe_bootloaders(repo_dir),
|
|
|
+ get_abe_submodules(repo_dir),
|
|
|
+ ]
|
|
|
+ return itertools.chain(*subfile_generators)
|
|
|
|
|
|
+def get_abe_bsps(repo_dir):
|
|
|
+ import os
|
|
|
+ bspfile_path = os.path.join(repo_dir, '.abe', 'bsps')
|
|
|
+
|
|
|
+ if os.path.isfile(bspfile_path):
|
|
|
+ return parse_abe_subfile(bspfile_path, subtype=AbeSubType.BSP)
|
|
|
+ return iter(())
|
|
|
+
|
|
|
+def get_abe_bootloaders(repo_dir):
|
|
|
+ import os
|
|
|
+ bootloaderfile_path = os.path.join(repo_dir, '.abe', 'bootloaders')
|
|
|
+
|
|
|
+ if os.path.isfile(bootloaderfile_path):
|
|
|
+ return parse_abe_subfile(bootloaderfile_path, subtype=AbeSubType.BOOTLOADER)
|
|
|
+ return iter(())
|
|
|
|
|
|
def get_abe_submodules(repo_dir):
|
|
|
import os
|
|
|
import itertools
|
|
|
subfile_generators = []
|
|
|
subfile_path = os.path.join(repo_dir, '.abe', 'submodules')
|
|
|
+
|
|
|
if os.path.isfile(subfile_path):
|
|
|
- subfile_generators.append(parse_abe_subfile(subfile_path, subtype=AbeSubType.SUBMODULE))
|
|
|
- bspfile_path = os.path.join(repo_dir, '.abe', 'bsps')
|
|
|
- if os.path.isfile(bspfile_path):
|
|
|
- subfile_generators.append(parse_abe_subfile(bspfile_path, subtype=AbeSubType.BSP))
|
|
|
- bootloaderfile_path = os.path.join(repo_dir, '.abe', 'bootloaders')
|
|
|
- if os.path.isfile(bootloaderfile_path):
|
|
|
- subfile_generators.append(parse_abe_subfile(bootloaderfile_path, subtype=AbeSubType.BOOTLOADER))
|
|
|
- return itertools.chain(*subfile_generators)
|
|
|
-
|
|
|
+ return parse_abe_subfile(subfile_path, subtype=AbeSubType.SUBMODULE)
|
|
|
+ return iter(())
|
|
|
+
|
|
|
|
|
|
def parse_abe_subfile(subfile_path, subtype=AbeSubType.SUBMODULE):
|
|
|
with open(subfile_path) as subfile:
|