|
|
|
@ -10,12 +10,11 @@ from queue import Queue, Empty |
|
|
|
|
from sys import stdout, exit as sys_exit |
|
|
|
|
from os import access, W_OK, SEEK_END |
|
|
|
|
from pathlib import Path |
|
|
|
|
from json import dump |
|
|
|
|
from json import dump, load |
|
|
|
|
from uuid import uuid4 |
|
|
|
|
from math import ceil |
|
|
|
|
from datetime import datetime, timezone |
|
|
|
|
from traceback import format_exc |
|
|
|
|
|
|
|
|
|
from webdav3.client import Client |
|
|
|
|
from webdav3.exceptions import ConnectionException |
|
|
|
|
|
|
|
|
@ -118,11 +117,36 @@ class Migration: |
|
|
|
|
|
|
|
|
|
def start(self, src_path: str, dest_path: str): |
|
|
|
|
""" |
|
|
|
|
Start the workers and fill the queue with the first explore task at the root |
|
|
|
|
Start the workers with the first explore task at the root |
|
|
|
|
|
|
|
|
|
:src_path: Root path for source |
|
|
|
|
:dest_path: Root path for dest |
|
|
|
|
""" |
|
|
|
|
self.queue.put((self.explore, (src_path, dest_path))) |
|
|
|
|
return self.start_workers() |
|
|
|
|
|
|
|
|
|
def retry(self, file: Path): |
|
|
|
|
""" |
|
|
|
|
Start the workers with the errors tasks |
|
|
|
|
|
|
|
|
|
:file: JSON file of errors |
|
|
|
|
""" |
|
|
|
|
|
|
|
|
|
with file.open("r") as datas: |
|
|
|
|
errors = load(datas) |
|
|
|
|
|
|
|
|
|
for err in errors: |
|
|
|
|
if "task" not in err or "args" not in err: |
|
|
|
|
continue |
|
|
|
|
task = getattr(self, err["task"]) |
|
|
|
|
self.queue.put((task, err["args"])) |
|
|
|
|
|
|
|
|
|
return self.start_workers() |
|
|
|
|
|
|
|
|
|
def start_workers(self): |
|
|
|
|
""" |
|
|
|
|
Start the workers |
|
|
|
|
""" |
|
|
|
|
interrupt = False |
|
|
|
|
workers = [] |
|
|
|
|
|
|
|
|
@ -131,7 +155,7 @@ class Migration: |
|
|
|
|
self.tasks_errors, daemon=True) |
|
|
|
|
workers.append(worker) |
|
|
|
|
worker.start() |
|
|
|
|
self.queue.put((self.explore, (src_path, dest_path))) |
|
|
|
|
|
|
|
|
|
try: |
|
|
|
|
self.queue.join() |
|
|
|
|
except KeyboardInterrupt: |
|
|
|
@ -147,7 +171,6 @@ class Migration: |
|
|
|
|
|
|
|
|
|
return not interrupt |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def report(self): |
|
|
|
|
""" |
|
|
|
|
Report stats and errors |
|
|
|
@ -272,6 +295,8 @@ class Migration: |
|
|
|
|
|
|
|
|
|
chunk_count = ceil(float(size) / float(chunk_size)) |
|
|
|
|
|
|
|
|
|
self.set_dest_mtime("upload", src_path) |
|
|
|
|
|
|
|
|
|
for chunk_index in range(chunk_count): |
|
|
|
|
with BytesIO() as chunk_buff: |
|
|
|
|
c_z = chunk_buff.write(buff.read(chunk_size)) |
|
|
|
@ -357,6 +382,7 @@ def main(): |
|
|
|
|
help="The WebDav errors dump") |
|
|
|
|
parser.add_argument("--no-sync", default=True, action='store_false', dest="sync", |
|
|
|
|
help="Avoid copying of existing file with same size and modification date") |
|
|
|
|
parser.add_argument("--retry", default=None, help="Retry from errors stored in a JSON file") |
|
|
|
|
|
|
|
|
|
try: |
|
|
|
|
args = parser.parse_args() |
|
|
|
@ -390,13 +416,24 @@ def main(): |
|
|
|
|
or not access(error_dump.parent, W_OK): |
|
|
|
|
raise ArgumentTypeError("Errors dump file path invalid") |
|
|
|
|
|
|
|
|
|
if args.retry: |
|
|
|
|
retry_file = Path(args.retry) |
|
|
|
|
if not retry_file.exists() or not retry_file.is_file() or not access(retry_file, W_OK): |
|
|
|
|
raise ArgumentTypeError("Retry file path invalid") |
|
|
|
|
if error_dump ==retry_file: |
|
|
|
|
raise ArgumentTypeError("Errors dump and retry file is the same") |
|
|
|
|
|
|
|
|
|
except ArgumentTypeError as exception: |
|
|
|
|
print(exception) |
|
|
|
|
sys_exit(1) |
|
|
|
|
|
|
|
|
|
migration = Migration(src_client, dest_client, args.workers, args.memory_limit, |
|
|
|
|
args.chunk_upload, args.chunk_upload_size, error_dump, args.sync) |
|
|
|
|
not_interrupt = migration.start(args.src_path, args.dest_path) |
|
|
|
|
if not args.retry: |
|
|
|
|
not_interrupt = migration.start(args.src_path, args.dest_path) |
|
|
|
|
else: |
|
|
|
|
not_interrupt = migration.retry(retry_file) |
|
|
|
|
|
|
|
|
|
migration.report() |
|
|
|
|
|
|
|
|
|
if not not_interrupt: |
|
|
|
|