Ver Fonte

tried multiprocessing

Tobias Simetsreiter há 5 anos atrás
pai
commit
5f4b6f27a6
1 ficheiros alterados com 44 adições e 25 exclusões
  1. 44 25
      abe_setup.py

+ 44 - 25
abe_setup.py

@@ -18,18 +18,21 @@ def main():
     last_parent = None
     jobs = []
 
+
     if args.abe_command:
         executeAbeCommand( args.output_dir, args.abe_command )
     else:
-        for sub in abe_submodules(
+        subs = abe_submodules(
             args.output_dir, args.source_url, args.tag,
                 recurse_bsps=args.recurse_bsps,
-                recurse_bootloaders=args.recurse_bootloaders ):
-            count_submodules += 1
-            if DEBUG > 0:
-                print("Count:",count_submodules)
+                recurse_bootloaders=args.recurse_bootloaders )
+
+        if args.execute_command:
+            for sub in subs:
+                count_submodules += 1
+                if DEBUG > 0:
+                    print("Count:",count_submodules)
 
-            if args.execute_command:
                 from subprocess import run, CalledProcessError
                 import sys
                 try:
@@ -38,26 +41,42 @@ def main():
                     print(f"Command: \"{args.execute_command}\" in \"{sub.fullpath}\" returned exitcode \"{err.returncode}\"")
                     sys.exit(err.returncode)
 
+        else:
+            clone_subs_threaded(subs, jobs=args.jobs, kwargs={'cache_dir':args.cache_dir, 'fetch_origin':(not args.no_fetch_origin)})
+
+def clone_subs_threaded(subs, jobs=0, kwargs={}):
+    import multiprocessing
+    import time
+    if jobs < 1:
+        jobs = multiprocessing.cpu_count()
+
+    with multiprocessing.Pool(processes=jobs) as pool:
+        jobs = []
+        for sub in subs:
+            job = pool.apply_async(sub.clone_repo, (), kwargs)
+            jobs.append(job)
+            if sub.last:
+                while not jobs == []:
+                    j = jobs.pop(0)
+                    j.get()
+        while not jobs == []:
+            j = jobs.pop(0)
+            j.get()
+
+'''
+
+            for j in jobs:
+                if not j.is_alive():
+                    j.join()
+                    to_remove = j
+            if to_remove:
+                jobs.remove(to_remove)
             else:
-                from threading import Thread
-                import time
-                job = Thread(target=sub.clone_repo, kwargs={'cache_dir':args.cache_dir, 'fetch_origin':(not args.no_fetch_origin)})
-                job.start()
-                jobs.append(job)
-                while len(jobs) >= args.jobs:
-                    to_remove = None
-                    for j in jobs:
-                        if not j.is_alive():
-                            j.join()
-                            to_remove = j
-                    if to_remove:
-                        jobs.remove(to_remove)
-                    else:
-                        time.sleep(1)
-                if sub.last:
-                    while len(jobs):
-                        jobs.pop().join()
-
+                time.sleep(1)
+        if sub.last:
+            while len(jobs):
+                jobs.pop().join()
+'''
 
 def parser():
     import argparse