1 | # s3-bsync Copyright (c) 2022 Joshua Stockin |
2 | # <https://joshstock.in> |
3 | # <https://git.joshstock.in/s3-bsync> |
4 | # |
5 | # This software is licensed and distributed under the terms of the MIT License. |
6 | # See the MIT License in the LICENSE file of this project's root folder. |
7 | # |
8 | # This comment block and its contents, including this disclaimer, MUST be |
9 | # preserved in all copies or distributions of this software's source. |
10 |
|
11 | import logging |
12 | import datetime |
13 |
|
14 | from . import syncfile |
15 | from .classes import sync_managed_bucket |
16 |
|
17 | logger = logging.getLogger(__name__) |
18 |
|
19 | __all__ = ["run"] |
20 |
|
21 |
|
22 | def purge(state): |
23 | logger.debug("Purging syncfile") |
24 | state.purge() |
25 | logger.debug("Success. Exiting...") |
26 | exit(0) |
27 |
|
28 |
|
29 | def dump(state): |
30 | logger.debug("Running in DUMP mode. Echoing deserialized information to stdout:") |
31 | print(f"DUMP mode") |
32 | print( |
33 | f'Inputted valid s3sync File "{state.file_path}" (version {state.file_version})' |
34 | ) |
35 | print(f"Metadata") |
36 | print( |
37 | f" Last synced time: {state.last_synced_time} (resolves to {datetime.datetime.fromtimestamp(state.last_synced_time / 1000.0)})" |
38 | ) |
39 | print(f" Number of tracked buckets: {len(state.managed_buckets)}") |
40 | print( |
41 | f" Total # of mapped directores: {sum([len(bucket.directory_maps) for bucket in state.managed_buckets])}" |
42 | ) |
43 | print( |
44 | f" Total # of tracked fileobjects: {sum([len(bucket.fileobjects) for bucket in state.managed_buckets])}" |
45 | ) |
46 | print(f" Filesize: {state.file_size}") |
47 |
|
48 | for bucket in state.managed_buckets: |
49 | print(f'Bucket "{bucket.bucket_name}"') |
50 | print(f" # of mapped directores: {len(bucket.directory_maps)}") |
51 | print(f" # of tracked fileobjects: {len(bucket.fileobjects)}") |
52 | if len(bucket.directory_maps) > 0: |
53 | print(f" Mapped directories:") |
54 | for dirmap in bucket.directory_maps: |
55 | print( |
56 | f' > "{dirmap.local_path}" to "s3://{bucket.bucket_name}/{dirmap.s3_prefix}"' |
57 | ) |
58 | print(f" gz_compress {dirmap.gz_compress}") |
59 | print(f" recursive {dirmap.recursive}") |
60 | print(f" gpg_enabled {dirmap.gpg_enabled}") |
61 | print(f' gpg_email "{dirmap.gpg_email}"') |
62 |
|
63 | logger.debug("Finished dump. Exiting...") |
64 | exit(0) |
65 |
|
66 |
|
67 | def run(settings): |
68 | logger.debug("Entering run sequence") |
69 | state = syncfile.syncfile(settings.syncfile) |
70 |
|
71 | if "PURGE" in settings.mode: |
72 | purge(state) |
73 |
|
74 | if ( |
75 | state.file_exists() and not "OVERWRITE" in settings.mode |
76 | ): # data will be used, not overwritten |
77 | logger.debug("Syncfile exists. Deserializing...") |
78 | state.deserialize() |
79 |
|
80 | if not state.file_exists() and "INIT" not in settings.mode: |
81 | logger.error("Syncfile is nonexistent; run in INIT mode to create") |
82 | exit(1) |
83 |
|
84 | if "DUMP" in settings.mode: |
85 | dump(state) |
86 |
|
87 | if "INIT" in settings.mode: |
88 | if hasattr(settings, "dirmaps"): |
89 | for local_path in settings.dirmaps: |
90 | state.map_directory(local_path, settings.dirmaps[local_path]) |
91 | state.serialize() |
92 |
|