| 123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159 | #!/usr/bin/env python3import osimport sysfrom subprocess import run,Popen, PIPEgitlab_api_token="t9cAod1DpruzessGg8bq"local_base=f"./repositories"COM=sys.argv[0]def parser():    from argparse import ArgumentParser    p = ArgumentParser()    p.add_argument("source", help="git origin that will be used to fetch repos. Should support 'ssh <source> info'")    p.add_argument("-b", "--base", default="./repositories", help="local basedir for mirror")    p.add_argument("-t", "--threads", default=4, type=int, help="number of worker threads")    return pdef main():    p = parser()    args = p.parse_args()    repos = repos_from_lines(git_info(args.source))    repo_obj = [Repo(args.source,r,args.base) for r in repos]    for output in map_threaded(            lambda r:r.pull(),            repo_obj,            threads = args.threads,            debug=lambda cur,tot,arg:print(f"{COM}: {cur}/{tot}: {arg}"),        ):        passclass Repo():    def __init__(self,remote,path,base):        self.remote = remote        self.path   = path        self.base   = base    @property    def origin(self):        return f"{self.remote}:{self.path}"    @property    def local_path(self):        return os.path.join(self.base, self.remote, self.path+".git")    @property    def basename(self):        return self.path_split[-1]    @property    def path_split(self):        return self.path.split("/")    @property    def namespace(self):        return self.path_split[:-1]    def pull(self):        if not os.path.isdir(os.path.join(self.local_path , "refs","remotes","origin")):            self.init()        self.fetch()        return True    def fetch(self):        proc = run(["git","fetch","-p","origin","refs/*:refs/*"], cwd=self.local_path)        if not proc.returncode == 0:            print(f"Error: {proc}, {self.local_path}")            _rm(self.local_path)    def init(self):        _rm(self.local_path)        run(["mkdir","-p",self.local_path])        proc1 = run(["git","init","--bare"], cwd=self.local_path)        proc2 = run(["git","remote","add","origin",self.origin], cwd=self.local_path)        if not proc1.returncode == 0 and proc2.returncode == 0:            print(f"Error: {proc1}, {proc2}, {self.local_path}")            _rm(self.local_path)    def __repr__(self):        return f"Repo('{self.remote}','{self.path}','{self.base}')"def git_info(source_server):    proc = Popen(        ["ssh",source_server,"info"],        stdout=PIPE,        stderr=PIPE,        encoding="utf-8")    for line in iter(proc.stdout.readline, ""):        yield line    proc.wait()    print("GIT_INFO DONE!!!!!!!!!!!!!!!")def repos_from_lines(lines):    for line in lines:        if line.find("gitolite") >= 0:            continue        fields = line.split()        if len(fields) < 3:            continue        yield fields[2]''':param target: A function to be mapped.:param args: An Array or Generator to be mapped.:return: Generator of function return values, but not in same order.'''def map_threaded(target, args=[], threads=4, debug=None):    import threading    import queue    inq = queue.Queue()    outq = queue.Queue()    jobs = []    def worker():        while True:            try:                num, arg = inq.get(block=False)            except:                continue            if num == None:                break            if debug:                debug( num, num+inq.qsize(), arg)            result = target(arg)            outq.put(result)    while len(jobs) < threads:        th = threading.Thread(target=worker)        th.start()        jobs.append(th)    def filler():        i = 0        for arg in args:            i += 1            inq.put((i,arg))        for job in jobs:            inq.put((None,None))    fillThread = threading.Thread(target=filler)    fillThread.start()    while fillThread.is_alive():        yield outq.get(block=True)    fillThread.join()    while not outq.empty():        yield outq.get(block=True)    for job in jobs:        job.join()    while not outq.empty():        yield outq.get(block=True)def _rm(di):    run(["rm","-rf", di])if __name__ == "__main__":    main()
 |