Compare commits

...

33 Commits

Author SHA1 Message Date
mike cd14dc6158 log tweak 2024-04-21 02:07:40 -07:00
Mike e4d5e1b595 Update pyenv version and relock pipenv 2023-08-18 01:53:51 -07:00
mike 2e4638e448 Typo 2023-05-29 13:25:03 -07:00
mike 916064c153 Bump python version and relock pipenv 2023-05-20 18:24:25 -07:00
root ec894014c6 Uhm whoops 2023-03-27 20:27:35 -07:00
root d66688eb3c Remove annoying log emissions 2023-03-27 20:17:45 -07:00
root 5e30c2f7da Further upgrades to Logging 2023-03-27 20:12:37 -07:00
root c0769ad0b1 Working on logging 2023-03-27 19:59:45 -07:00
root 9d17178012 Adding dedicated Logger and Config classes 2023-03-27 19:40:52 -07:00
root 4f0b29cd3d Start a domain folder 2023-03-27 18:50:11 -07:00
root 9a2efa9f0a When consuming configs inside a dir, ignore nested dirs and files without valid YAML extensions 2023-03-27 18:44:27 -07:00
root 30bb98dff0 Update license info 2023-03-27 18:15:11 -07:00
root 429a4a6712 Hilarious oversights 2023-03-26 03:38:27 -07:00
root acfbb90f91 hooks? 2023-03-26 02:26:02 -07:00
root 9f26c09453 hooks? 2023-03-26 02:25:56 -07:00
root 248f759d96 hooks? 2023-03-26 02:25:27 -07:00
root 0125e92a0a hooks? 2023-03-26 02:24:20 -07:00
root a59c573174 hooks? 2023-03-26 02:24:05 -07:00
root b3687abb62 hooks? 2023-03-26 02:23:07 -07:00
root 91b2b0d98a hooks? 2023-03-26 02:22:15 -07:00
root cf9be50c2a hooks? 2023-03-26 02:21:58 -07:00
root 5ffe16cd31 hooks? 2023-03-26 02:20:39 -07:00
root 8e03950102 Bump python version 2023-03-26 02:17:04 -07:00
mike 9aa66d8e50 Bump python version to 3.10 because old one was segfaulting 2022-07-15 03:26:00 -07:00
mike cfccf4aa70 Uhm specify a pyenv version? 2022-07-10 06:30:55 -07:00
mike e704930c71 Re-lock pipenv using more specific python version 2022-07-10 06:30:33 -07:00
hoth 86aed2d1f1 Bugfix: Bad var name caused wrong files to be deleted in check for max items 2022-02-13 16:06:14 +05:30
hoth effa940e69 Allow enforcement of minimum item count. Improve logging output. 2022-02-01 07:08:30 +05:30
root 5d2e93ca41 Upgrade: Can now delete based on age 2022-02-01 04:05:48 +05:30
root bd25e49582 Tweak output wording 2022-02-01 02:19:36 +05:30
root 8a41635c1f Trying to bump pipenv deps 2022-02-01 02:17:15 +05:30
root 758ec336c1 nop to test hooks 2022-02-01 02:13:44 +05:30
root cb1cc280ed Convert CLI parsing from DIY to argparse 2022-02-01 02:05:08 +05:30
9 changed files with 665 additions and 275 deletions

1
.python-version Normal file
View File

@ -0,0 +1 @@
3.11.4

View File

@ -1,251 +0,0 @@
#!/usr/bin/env python3
"""
Mike's Backup Rotator
A simple script to help automatically rotate backup files
Copyright 2019 Mike Peralta; All rights reserved
Released under the GNU GENERAL PUBLIC LICENSE v3 (See LICENSE file for more)
"""
import datetime
import os
import shutil
import sys
import syslog
import yaml
class BackupRotator:
def __init__(self):
self.__dry_run = False
self.__configs = []
self.__config_paths = []
self.__calculated_actions = []
def run(self):
self.log("Begin")
self.consume_arguments()
self.consume_configs(self.__config_paths)
# Rotate once per config
for config_index in range(len(self.__configs)):
#
config = self.__configs[config_index]
#
self.log("Rotating for config " + str(config_index + 1) + " of " + str(len(self.__configs)), config["__path"])
self.do_rotate(config)
@staticmethod
def current_time():
now = datetime.datetime.now()
now_s = now.strftime("%b-%d-%Y %I:%M%p")
return str(now_s)
def log(self, s, o=None):
now = self.current_time()
to_log = "[" + now + "][Backup Rotator] " + str(s)
if o is not None:
to_log += " " + str(o)
syslog.syslog(to_log)
print(to_log)
def consume_arguments(self):
self.__config_paths = []
for i in range(1, len(sys.argv)):
arg = sys.argv[i]
if arg == "--config":
i, one_path = self.consume_argument_companion(i)
self.__config_paths.append(one_path)
self.log("Found config path argument:", one_path)
elif arg == "--dry-run":
self.__dry_run = True
self.log("Activating global dry-run mode")
@staticmethod
def consume_argument_companion(arg_index):
companion_index = arg_index + 1
if companion_index >= len(sys.argv):
raise Exception("Expected argument after", sys.argv[arg_index])
return companion_index, sys.argv[companion_index]
def consume_configs(self, paths: list=None):
if paths is None:
raise Exception("Auto-finding of config file not implemented")
# Use each config path
for path in paths:
# If this is a single path
if os.path.isfile(path):
self.consume_config(path)
# If this is a directory
elif os.path.isdir(path):
# Iterate over each file inside
for file_name in os.listdir(path):
self.consume_config(os.path.join(path, file_name))
def consume_config(self, path: str):
# Open the file
f = open(path)
if not f:
raise Exception("Unable to open config file: " + path)
# Parse
config = yaml.safe_load(f)
# Add its own path
config["__path"] = path
# Consume to internal
self.__configs.append(config)
self.log("Consumed config from path:", path)
def do_rotate(self, config):
self.rotate_paths(config)
def rotate_paths(self, config):
self.log("Begin rotating " + str(len(config["paths"])) + " paths")
for path in config["paths"]:
self.rotate_path(config, path)
def rotate_path(self, config, path):
self.log("Rotating path", path)
if "maximum-items" not in config:
raise Exception("Please provide config key: \"maximum-items\"")
max_items = config["maximum-items"]
if not os.path.isdir(path):
raise Exception("Path should be a directory:" + str(path))
children = self.gather_rotation_candidates(config, path)
# Do we need to rotate anything out?
if len(children) <= max_items:
self.log(
"Path only has " + str(len(children)) + " items,"
+ " but needs " + str(max_items) + " for rotation"
+ "; Won't rotate this path."
)
return
#
purge_count = len(children) - max_items
self.log(
"Need to purge " + str(purge_count) + " items"
)
for purge_index in range(purge_count):
#
item_to_purge = self.pick_item_to_purge(config, children)
children.remove(item_to_purge)
#
if os.path.isfile(item_to_purge):
self.remove_file(config, item_to_purge)
elif os.path.isdir(item_to_purge):
self.remove_directory(config, item_to_purge)
else:
raise Exception("Don't know how to remove this item: " + str(item_to_purge))
@staticmethod
def gather_rotation_candidates(config, path):
candidates = []
if "target-type" not in config.keys():
raise Exception("Please provide the configuration key: target-type")
for item_name in os.listdir(path):
item_path = os.path.join(path, item_name)
if config["target-type"] == "file":
if not os.path.isfile(item_path):
continue
elif config["target-type"] == "directory":
if not os.path.isdir(item_path):
continue
else:
raise Exception("Configuration key \"target-type\" must be \"file\" or \"directory\"")
candidates.append(item_path)
return candidates
@staticmethod
def pick_item_to_purge(config, items):
if "date-detection" not in config.keys():
raise Exception("Please provide config key: \"date-detection\"")
detection = config["date-detection"]
best_item = None
best_ctime = None
for item in items:
if detection == "file":
ctime = os.path.getctime(item)
if best_ctime is None or ctime < best_ctime:
best_ctime = ctime
best_item = item
else:
raise Exception("Invalid value for \"date-detection\": " + str(detection))
return best_item
def remove_file(self, config, file_path):
if not os.path.isfile(file_path):
raise Exception("Tried to remove a file, but this path isn't a file: " + str(file_path))
if self.__dry_run:
self.log("Won't purge file during global-level dry run: ", file_path)
elif "dry-run" in config.keys() and config["dry-run"] is True:
self.log("Won't purge file during config-level dry run: ", file_path)
else:
self.log("Purging file:", file_path)
os.remove(file_path)
def remove_directory(self, config, dir_path):
if not os.path.isdir(dir_path):
raise Exception("Tried to remove a directory, but this path isn't a directory: " + str(dir_path))
if self.__dry_run:
self.log("Won't purge directory during global-level dry run: ", dir_path)
elif "dry-run" in config.keys() and config["dry-run"] is True:
self.log("Won't purge directory during config-level dry run: ", dir_path)
else:
self.log("Purging directory:", dir_path)
shutil.rmtree(dir_path)

View File

@ -4,10 +4,11 @@ verify_ssl = true
name = "pypi"
[packages]
pyyaml = "*"
pyyaml = ">=5.4"
[dev-packages]
[requires]
python_version = "3"
python_version = "3.11.4"

59
Pipfile.lock generated
View File

@ -1,11 +1,11 @@
{
"_meta": {
"hash": {
"sha256": "25d0d6f494ae55bc9fa72711e1e8edb7e3d6b7ce72e6254d2ec85bd3b0e637bd"
"sha256": "8c9a360e47ffd3442df22d4110e1463a62ea4dc7c0217a87a57b642b78c3d609"
},
"pipfile-spec": 6,
"requires": {
"python_version": "3"
"python_version": "3.11.4"
},
"sources": [
{
@ -18,22 +18,49 @@
"default": {
"pyyaml": {
"hashes": [
"sha256:06a0d7ba600ce0b2d2fe2e78453a470b5a6e000a985dd4a4e54e436cc36b0e97",
"sha256:240097ff019d7c70a4922b6869d8a86407758333f02203e0fc6ff79c5dcede76",
"sha256:4f4b913ca1a7319b33cfb1369e91e50354d6f07a135f3b901aca02aa95940bd2",
"sha256:6034f55dab5fea9e53f436aa68fa3ace2634918e8b5994d82f3621c04ff5ed2e",
"sha256:69f00dca373f240f842b2931fb2c7e14ddbacd1397d57157a9b005a6a9942648",
"sha256:73f099454b799e05e5ab51423c7bcf361c58d3206fa7b0d555426b1f4d9a3eaf",
"sha256:74809a57b329d6cc0fdccee6318f44b9b8649961fa73144a98735b0aaf029f1f",
"sha256:7739fc0fa8205b3ee8808aea45e968bc90082c10aef6ea95e855e10abf4a37b2",
"sha256:95f71d2af0ff4227885f7a6605c37fd53d3a106fcab511b8860ecca9fcf400ee",
"sha256:ad9c67312c84def58f3c04504727ca879cb0013b2517c85a9a253f0cb6380c0a",
"sha256:b8eac752c5e14d3eca0e6dd9199cd627518cb5ec06add0de9d32baeee6fe645d",
"sha256:cc8955cfbfc7a115fa81d85284ee61147059a753344bc51098f3ccd69b0d7e0c",
"sha256:d13155f591e6fcc1ec3b30685d50bf0711574e2c0dfffd7644babf8b5102ca1a"
"sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc",
"sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741",
"sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206",
"sha256:18aeb1bf9a78867dc38b259769503436b7c72f7a1f1f4c93ff9a17de54319b27",
"sha256:1d4c7e777c441b20e32f52bd377e0c409713e8bb1386e1099c2415f26e479595",
"sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62",
"sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98",
"sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696",
"sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d",
"sha256:4fb147e7a67ef577a588a0e2c17b6db51dda102c71de36f8549b6816a96e1867",
"sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47",
"sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486",
"sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6",
"sha256:596106435fa6ad000c2991a98fa58eeb8656ef2325d7e158344fb33864ed87e3",
"sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007",
"sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938",
"sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c",
"sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735",
"sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d",
"sha256:9046c58c4395dff28dd494285c82ba00b546adfc7ef001486fbf0324bc174fba",
"sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8",
"sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5",
"sha256:afd7e57eddb1a54f0f1a974bc4391af8bcce0b444685d936840f125cf046d5bd",
"sha256:b1275ad35a5d18c62a7220633c913e1b42d44b46ee12554e5fd39c70a243d6a3",
"sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0",
"sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515",
"sha256:baa90d3f661d43131ca170712d903e6295d1f7a0f595074f151c0aed377c9b9c",
"sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c",
"sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924",
"sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34",
"sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43",
"sha256:c8098ddcc2a85b61647b2590f825f3db38891662cfc2fc776415143f599bb859",
"sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673",
"sha256:d858aa552c999bc8a8d57426ed01e40bef403cd8ccdd0fc5f6f04a00414cac2a",
"sha256:f003ed9ad21d6a4713f0a9b5a7a0a79e08dd0f221aff4525a2be4c346ee60aab",
"sha256:f22ac1c3cac4dbc50079e965eba2c1058622631e526bd9afd45fedd49ba781fa",
"sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c",
"sha256:fca0e3a251908a499833aa292323f32437106001d436eca0e6e7833256674585",
"sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d",
"sha256:fd66fc5d0da6d9815ba2cebeb4205f95818ff4b79c3ebe268e75d961704af52f"
],
"index": "pypi",
"version": "==5.3.1"
"version": "==6.0.1"
}
},
"develop": {}

View File

@ -5,6 +5,12 @@ This program functions somewhat similarly to a log rotator. It's purpose is to r
Suppose you have a third party backup program regularly dropping backup files into some directory. You could use this program to limit the number of files that remain in the directory at any given time.
# License
Copyright 2023 Mike Peralta; All rights reserved
Releasing to the public under the GNU GENERAL PUBLIC LICENSE v3 (See LICENSE file for more)
# Requirements
* Python 3
@ -78,11 +84,25 @@ Specifies the method used when attempting to determine how old a backup file/dir
Currently, only *file* is supported
### minimum-items < INTEGER >
Specifies the minimum number of backup files/dirs that must be present before rotating can happen. Should be an integer.
This option doesn't specify how much to rotate on its own, but when rotation is possible. It should probably be used with maximum-age or something other than maximum-items.
For example, when the *minimum-items* value is set to 5, and *target-type* is *file*, the program will not rotate any files until there are at least 5 in the target directory.
### maximum-items < INTEGER >
Specifies the maximum number of backup files/dirs that are allowed in a path before rotating will happen. Should be an integer.
For example, when the *maximum-items* value is set to 5, and *target-type* is *file*, the program will not rotate any files until there are at least 5 in the target directory.
For example, when the *maximum-items* value is set to 500, and *target-type* is *file*, the program will not rotate any files until there are at least 500 in the target directory.
### maximum-age < INTEGER >
Specifies the maximum age (in days) of backup files/dirs that are allowed in a path before rotating will happen. Should be an integer.
For example, when the *maximum-age* value is set to 30, and *target-type* is *file*, the program will not rotate any files that are newer than 30 days.
### paths < Array of Paths >

377
domain/BackupRotator.py Executable file
View File

@ -0,0 +1,377 @@
#!/usr/bin/env python3
"""
Mike's Backup Rotator
A simple script to help automatically rotate backup files
Copyright 2023 Mike Peralta; All rights reserved
Releasing to the public under the GNU GENERAL PUBLIC LICENSE v3 (See LICENSE file for more)
"""
from domain.Logger import Logger
from domain.Config import Config
import datetime
import os
# import pprint
import shutil
import sys
import time
import yaml
class BackupRotator:
def __init__(self, debug:bool = False):
self.__logger = Logger(name=type(self).__name__, debug=debug)
self.__config_helper = Config(logger=self.__logger)
self.__dry_run = False
self.__configs = []
self.__config_paths = []
self.__calculated_actions = []
def run(self, configs, dry_run: bool = False):
self.info("Begin")
self.__dry_run = dry_run
self.__config_paths = configs
self._consume_configs(self.__config_paths)
# Rotate once per config
for config_index in range(len(self.__configs)):
#
config = self.__configs[config_index]
#
self.info(f"Rotating for config {config_index + 1} of {len(self.__configs)} : {config['__path']}")
self._do_rotate(config)
@staticmethod
def current_time():
now = datetime.datetime.now()
now_s = now.strftime("%b-%d-%Y %I:%M%p")
return str(now_s)
def debug(self, s):
self.__logger.debug(s)
def info(self, s):
self.__logger.info(s)
def warn(self, s):
self.__logger.warn(s)
def error(self, s):
self.__logger.error(s)
def _consume_configs(self, paths: list=None):
configs = self.__config_helper.gather_valid_configs(paths=paths)
for config in configs:
self._consume_config(path=config)
def _consume_config(self, path: str):
# Open the file
f = open(path)
if not f:
raise Exception("Unable to open config file: " + path)
# Parse
config = yaml.safe_load(f)
# Add its own path
config["__path"] = path
# Consume to internal
self.__configs.append(config)
self.info(f"Consumed config from path: {path}")
def _do_rotate(self, config):
self._rotate_paths(config)
def _rotate_paths(self, config):
self.info("Begin rotating " + str(len(config["paths"])) + " paths")
for path in config["paths"]:
self._rotate_path(config, path)
def _rotate_path(self, config, path):
assert os.path.isdir(path), "Path should be a directory: {}".format(path)
self.info("Rotating path: {}".format(path))
found_any_rotation_keys = False
if "maximum-items" in config.keys():
found_any_rotation_keys = True
self._rotate_path_for_maximum_items(config=config, path=path, max_items=config["maximum-items"])
if "maximum-age" in config.keys():
found_any_rotation_keys = True
self._rotate_path_for_maximum_age(config=config, path=path, max_age_days=config["maximum-age"])
assert found_any_rotation_keys is True, \
"Config needs one of the following keys: \"maximum-items\""
def _rotate_path_for_maximum_items(self, config, path: str, max_items: int):
assert os.path.isdir(path), "Path should be a directory: {}".format(path)
self.info("Rotating path for a maximum of {} items: {}".format(
max_items, path
))
children = self._gather_rotation_candidates(config, path)
minimum_items = self._determine_minimum_items(config)
# Do we need to rotate anything out?
if len(children) < minimum_items:
self.info("Path only has {} items, which does not meet the minimum threshold of {} items. Won't rotate this path.".format(
len(children), minimum_items
))
return
elif len(children) <= max_items:
self.info("Path only has {} items, but needs more than {} for rotation; Won't rotate this path.".format(
len(children), max_items
))
return
self.info("Found {} items to examine".format(len(children)))
#
maximum_purge_count = len(children) - minimum_items
purge_count = len(children) - max_items
self.info("Want to purge {} items".format(purge_count))
if purge_count > maximum_purge_count:
self.info("Reducing purge count from {} to {} items to respect minimum items setting ({})".format(
purge_count, maximum_purge_count, minimum_items
))
purge_count = maximum_purge_count
children_to_purge = []
for purge_index in range(purge_count):
#
item_to_purge, item_ctime, item_age_seconds, item_age = self._pick_oldest_item(config, children)
children.remove(item_to_purge)
self.info("Found next item to purge: ({}) {} ({})".format(
purge_index + 1,
os.path.basename(item_to_purge),
item_age
))
#
children_to_purge.append(item_to_purge)
#
self.info("Removing items")
for child_to_purge in children_to_purge:
child_basename = os.path.basename(child_to_purge)
self._remove_item(config, child_to_purge)
def _rotate_path_for_maximum_age(self, config, path: str, max_age_days: int):
assert os.path.isdir(path), "Path should be a directory: {}".format(path)
self.info("Rotating path for max age of {} days: {}".format(max_age_days, path))
children = self._gather_rotation_candidates(config, path)
minimum_items = self._determine_minimum_items(config)
# Do we need to rotate anything out?
if len(children) < minimum_items:
self.info("Path only has {} items, which does not meet the minimum threshold of {} items. Won't rotate this path.".format(
len(children), minimum_items
))
return
self.info("Examining {} items for deletion".format(len(children)))
children_to_delete = []
for child in children:
age_seconds = self._detect_item_age_seconds(config, child)
age_days = self._detect_item_age_days(config, child)
age_formatted = self.seconds_to_time_string(age_seconds)
child_basename = os.path.basename(child)
if age_days > max_age_days:
self.info("[Old enough ] {} ({})".format(
child_basename, age_formatted
))
children_to_delete.append(child)
else:
self.info("[Not Old enough] {} ({})".format(
child_basename, age_formatted
))
if len(children_to_delete) > 0:
self.info("Removing old items ...")
for child_to_delete in children_to_delete:
basename = os.path.basename(child_to_delete)
self._remove_item(config, child_to_delete)
else:
self.info("No old items to remove")
@staticmethod
def _gather_rotation_candidates(config, path):
candidates = []
if "target-type" not in config.keys():
raise Exception("Please provide the configuration key: target-type")
for item_name in os.listdir(path):
item_path = os.path.join(path, item_name)
if config["target-type"] == "file":
if not os.path.isfile(item_path):
continue
elif config["target-type"] == "directory":
if not os.path.isdir(item_path):
continue
else:
raise Exception("Configuration key \"target-type\" must be \"file\" or \"directory\"")
candidates.append(item_path)
return candidates
def _pick_oldest_item(self, config, items):
best_item = None
best_ctime = None
for item in items:
ctime = self._detect_item_date(config, item)
if best_ctime is None or ctime < best_ctime:
best_ctime = ctime
best_item = item
age_seconds = self._detect_item_age_seconds(config, best_item)
age_string = self.seconds_to_time_string(age_seconds)
return best_item, best_ctime, age_seconds, age_string
@staticmethod
def _detect_item_date(config, item):
assert "date-detection" in config.keys(), "Please provide config key: \"date-detection\""
detection = config["date-detection"]
if detection == "file":
ctime = os.path.getctime(item)
else:
raise AssertionError(f"Invalid value for \"date-detection\"; Should be one of [file]: {detection}")
return ctime
def _detect_item_age_seconds(self, config, item):
now = time.time()
ctime = self._detect_item_date(config, item)
delta = now - ctime
return delta
def _detect_item_age_days(self, config, item):
age_seconds = self._detect_item_age_seconds(config, item)
age_days = int(age_seconds / 86400)
return age_days
def seconds_to_time_string(self, seconds: float):
if isinstance(seconds, float):
pass
elif isinstance(seconds, int):
seconds = float * 1.0
else:
raise AssertionError("Seconds must be an int or float")
# Map
map = {
"year": 31536000.0,
"month": 2592000.0,
"week": 604800.0,
"day": 86400.0,
"hour": 3600.0,
"minute": 60.0,
"second": 1.0
}
s_parts = []
for unit_label in map.keys():
unit_seconds = map[unit_label]
if seconds >= unit_seconds:
unit_count = int(seconds / unit_seconds)
s_parts.append("{} {}{}".format(
unit_count, unit_label,
"" if unit_count == 1 else "s"
))
seconds -= unit_seconds * unit_count
s = ", ".join(s_parts)
return s
def _remove_item(self, config, path):
if os.path.isfile(path):
self._remove_file(config, path)
elif os.path.isdir(path):
self._remove_directory(config, path)
else:
raise AssertionError("Don't know how to remove this item: {}".format(path))
def _remove_file(self, config, file_path):
if not os.path.isfile(file_path):
raise Exception("Tried to remove a file, but this path isn't a file: " + str(file_path))
if self.__dry_run:
self.info(f"Won't purge file during global-level dry run: {file_path}")
elif "dry-run" in config.keys() and config["dry-run"] is True:
self.info(f"Won't purge file during config-level dry run: {file_path}")
else:
self.info(f"Purging file: {file_path}")
os.remove(file_path)
def _remove_directory(self, config, dir_path):
if not os.path.isdir(dir_path):
raise Exception("Tried to remove a directory, but this path isn't a directory: " + str(dir_path))
if self.__dry_run:
self.info(f"Won't purge directory during global-level dry run: {dir_path}")
elif "dry-run" in config.keys() and config["dry-run"] is True:
self.info(f"Won't purge directory during config-level dry run: {dir_path}")
else:
self.info(f"Purging directory: {dir_path}")
shutil.rmtree(dir_path)
def _determine_minimum_items(self, config):
minimum_items = 0
if "minimum-items" in config.keys():
minimum_items = config["minimum-items"]
self.info("Won't delete anything unless a minimum of {} items were found".format(minimum_items))
else:
self.info("No value found for \"minimum-items\"; Will not enforce minimum item constraint.")
return minimum_items

113
domain/Config.py Normal file
View File

@ -0,0 +1,113 @@
from domain.Logger import Logger
import os
class Config:
__DEFAULT_VALID_EXTENSIONS = [
"yaml",
"yml"
]
def __init__(self, logger):
self.__logger = logger
self.__valid_extensions = self.__DEFAULT_VALID_EXTENSIONS
def debug(self, s):
self.__logger.debug(f"[{type(self).__name__}] {s}")
def info(self, s):
self.__logger.info(f"[{type(self).__name__}] {s}")
def warn(self, s):
self.__logger.warn(f"[{type(self).__name__}] {s}")
def error(self, s):
self.__logger.error(f"[{type(self).__name__}] {s}")
@staticmethod
def get_dir_files_recursive(path: str):
files_paths = []
for dir_path, dirnames, filenames in os.walk(path):
for file_name in filenames:
file_path = os.path.join(dir_path, file_name)
files_paths.append(file_path)
# print("Uhm yeah", dir_path, "--", dirnames, "--", file_name)
# print("==>", file_path)
return files_paths
def gather_valid_configs(self, paths: list=None):
assert paths is not None, "Config paths cannot be None"
assert len(paths) > 0, "Must provide at least one config file path"
self.info("Gathering valid configs")
file_paths = []
configs = []
not_configs = []
# First gather all files that are potential configs
for path in paths:
self.info(f"Inspecting path: {path}")
if os.path.isfile(path):
self.debug(f"Path is a file; Adding directly to potential config candidates: {path}")
file_paths.append(path)
elif os.path.isdir(path):
self.debug(f"Path is a dir; Scanning recursively for potential config candidate files: {path}")
for file_path in Config.get_dir_files_recursive(path=path):
self.info(f"> Candidate file: {file_path}")
file_paths.append(file_path)
else:
raise AssertionError(f"Don't know how to handle path that isn't a file or dir: {path}")
# Now, filter for files with valid YAML extensions
for file_path in file_paths:
if self.check_file_extension(file_path=file_path, extensions=None):
configs.append(file_path)
else:
not_configs.append(file_path)
self.info("Filtered out non-config files:")
if len(not_configs) > 0:
for not_config in not_configs:
self.info(f"> {not_config}")
else:
self.info("> [none]")
self.info("Kept config-looking files:")
if len(configs) > 0:
for config in configs:
self.info(f"> {config}")
else:
self.info("> [none]")
return configs
def check_file_extension(self, file_path, extensions: list=None):
if extensions is None:
extensions = self.__valid_extensions
file_name, file_extension = os.path.splitext(file_path)
if len(file_extension) > 0 and file_extension[0] == ".":
file_extension = file_extension[1:]
file_extension = file_extension.lower()
for valid_extension in extensions:
#print(file_name, "---", file_extension, "---", valid_extension)
if file_extension == valid_extension:
return True
return False

64
domain/Logger.py Normal file
View File

@ -0,0 +1,64 @@
import logging
from logging.handlers import SysLogHandler
import sys
class Logger:
def __init__(self, name: str, debug: bool=False):
self.__name = name
self.__logger = logging.getLogger(self.__name)
if debug:
level = logging.DEBUG
else:
level = logging.INFO
self.__logger.setLevel(level)
formatter = logging.Formatter('[%(name)s][%(levelname)s] %(message)s')
formatter_full = logging.Formatter('[%(asctime)s][%(name)s][%(levelname)s] %(message)s')
# Console output / stream handler (STDOUT)
handler = logging.StreamHandler(
stream=sys.stdout
)
handler.setLevel(level)
handler.addFilter(lambda entry: entry.levelno <= logging.INFO)
handler.setFormatter(formatter_full)
self.__logger.addHandler(handler)
# Console output / stream handler (STDERR)
handler = logging.StreamHandler(
stream=sys.stderr
)
handler.setLevel(logging.WARNING)
handler.setFormatter(formatter_full)
self.__logger.addHandler(handler)
# Syslog handler
handler = SysLogHandler(
address="/dev/log"
)
handler.setLevel(level)
handler.setFormatter(formatter)
self.__logger.addHandler(handler)
# This is annoying inside cron
# self.debug("Test debug log")
# self.info("Test info log")
# self.warn("Test warn log")
# self.error("Test error log")
def debug(self, s):
self.__logger.debug(s)
def info(self, s):
self.__logger.info(s)
def warn(self, s):
self.__logger.warn(s)
def error(self, s):
self.__logger.error(s)

48
main.py
View File

@ -1,15 +1,53 @@
#!/usr/bin/env python3
from BackupRotator import BackupRotator
from domain.BackupRotator import BackupRotator
import argparse
#
def main():
rotator = BackupRotator()
rotator.run()
parser = argparse.ArgumentParser(
description="Mike's Backup Rotator. Helps automatically remove old backup files or folders."
)
parser.add_argument(
"--debug", "--verbose",
dest="debug",
default=False,
action="store_true",
help="Verbose/Debug logging mode"
)
parser.add_argument(
"--config", "-c",
dest="config_files",
default=[],
action="append",
type=str,
help="Specify a configuration file. Can be called multiple times."
)
parser.add_argument(
"--dry-run", "-d",
dest="dry_run",
default=False,
action="store_true",
help="Only perform an analysis; Don't delete anything."
)
args = parser.parse_args()
rotator = BackupRotator(
debug=args.debug
)
rotator.run(
configs=args.config_files,
dry_run=args.dry_run
)
if __name__ == "__main__":
main()