|
|
@@ -13,12 +13,11 @@ def main():
|
|
|
|
|
|
print(args) if DEBUG>1 else None
|
|
|
|
|
|
- if args.source_url and args.source_url.find('://') < 0 and args.source_url.find( ':' ) > 0:
|
|
|
- args.source_url = 'ssh://' + args.source_url.replace( ':', '/', 1)
|
|
|
+ source_url = sane_origin_url( args.source_url, args.output_dir )
|
|
|
|
|
|
- clone_repo_cached( args.output_dir, args.source_url, args.tag,
|
|
|
+ clone_repo_cached( args.output_dir, source_url.geturl(), args.tag,
|
|
|
cache_dir=args.cache_dir, fetch_origin=(not args.no_fetch_origin))
|
|
|
- for ret in recurse_abe_submodules( args.output_dir, args.source_url, args.tag):
|
|
|
+ for ret in recurse_abe_submodules( args.output_dir, source_url, args.tag):
|
|
|
clone_repo_cached( ret.path, ret.remote, ret.ref,
|
|
|
cache_dir=args.cache_dir, fetch_origin=(not args.no_fetch_origin))
|
|
|
|
|
|
@@ -36,12 +35,9 @@ def parser():
|
|
|
return p
|
|
|
|
|
|
|
|
|
-def recurse_abe_submodules(path, remote, ref, func=None, parent=None):
|
|
|
+def recurse_abe_submodules(path, remote, ref, parent=None):
|
|
|
from urllib.parse import urlparse
|
|
|
- if remote != None:
|
|
|
- url = urlparse(remote)
|
|
|
- else:
|
|
|
- url = origin_url( path )
|
|
|
+ url = sane_origin_url( remote, path )
|
|
|
|
|
|
sub = AbeSubmodule([path, url.geturl(), ref], AbeSubType.SUBMODULE, parent)
|
|
|
yield sub
|
|
|
@@ -53,15 +49,33 @@ def recurse_abe_submodules(path, remote, ref, func=None, parent=None):
|
|
|
newpath = os.path.normpath(os.path.join( sub.path, su.path))
|
|
|
newurl = su.urlparse(url)
|
|
|
print(f"Repo: \"{path}\" has Submodule: \"{newpath}\" Type: \"{su.subtype}\" From: \"{newurl.geturl()}\"") if DEBUG else None
|
|
|
- for ret in recurse_abe_submodules( newpath, newurl.geturl(), su.ref, func, sub):
|
|
|
+ for ret in recurse_abe_submodules( newpath, newurl, su.ref, sub):
|
|
|
yield ret
|
|
|
|
|
|
+
|
|
|
+def sane_origin_url(input_url = None, path = None):
|
|
|
+ from urllib.parse import urlparse
|
|
|
+ print("Source Url:", input_url) if DEBUG>1 else None
|
|
|
+
|
|
|
+ if input_url:
|
|
|
+ if hasattr(input_url, "geturl"):
|
|
|
+ return input_url
|
|
|
+ if input_url.find('://') < 0 and input_url.find( ':' ) > 0:
|
|
|
+ source_url = 'ssh://' + input_url.replace( ':', '/', 1)
|
|
|
+ else:
|
|
|
+ source_url = input_url
|
|
|
+ else:
|
|
|
+ source_url = origin_url( path )
|
|
|
+ return urlparse(source_url)
|
|
|
+
|
|
|
+
|
|
|
def origin_url(path):
|
|
|
from git import Repo
|
|
|
from urllib.parse import urlparse
|
|
|
repo = Repo(path)
|
|
|
for url in repo.remotes.origin.urls:
|
|
|
- return urlparse(url)
|
|
|
+ print("WOLOLO")
|
|
|
+ return url
|
|
|
|
|
|
def cache_path(cache, remote):
|
|
|
import os.path
|
|
|
@@ -94,7 +108,7 @@ def clone_repo_cached( path, remote=None, ref=None,
|
|
|
|
|
|
if cache_dir != None:
|
|
|
cache_repo_path = cache_path(cache_dir, origin.urls)
|
|
|
- print(f"Cacheing from: {cache_repo_path}") if DEBUG else None
|
|
|
+ print(f"Cacheing from: {cache_repo_path}, origin: {remote}") if DEBUG else None
|
|
|
clone_repo_cached(
|
|
|
cache_repo_path,
|
|
|
remote = remote,
|