More robust log file trimming (#366)

pull/370/head
Tobi 2 years ago committed by GitHub
parent 2c5a4d71f2
commit 4f03a7b484
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23

@ -266,6 +266,36 @@ def update_verbosity(debug):
logger.setLevel(logging.INFO)
def trim_logfile(log_path):
"""Keep the logfile short."""
if not os.path.exists(log_path):
return
file_size_mb = os.path.getsize(log_path) / 1000 / 1000
if file_size_mb > 100:
# something went terribly wrong here. The service might timeout because
# it takes too long to trim this file. delete it instead. This probably
# only happens when doing funny things while in debug mode.
logger.warning(
"Removing enormous log file of %dMB",
file_size_mb,
)
os.remove(log_path)
return
# the logfile should not be too long to avoid overflowing the storage
try:
with open(log_path, "rb") as file:
binary = file.readlines()[-1000:]
content = [line.decode("utf-8", errors="ignore") for line in binary]
with open(log_path, "w") as file:
file.truncate(0)
file.writelines(content)
except Exception as e:
logger.error('Failed to trim logfile: "%s"', str(e))
def add_filehandler(log_path=LOG_PATH):
"""Clear the existing logfile and start logging to it."""
try:
@ -276,14 +306,7 @@ def add_filehandler(log_path=LOG_PATH):
# used to be a folder < 0.8.0
shutil.rmtree(log_path)
if os.path.exists(log_path):
# the logfile should not be too long to avoid overflowing the storage
with open(log_path, "r") as file:
content = file.readlines()[-1000:]
with open(log_path, "w") as file:
file.truncate(0)
file.writelines(content)
trim_logfile(log_path)
file_handler = logging.FileHandler(log_path)
file_handler.setFormatter(ColorfulFormatter())

@ -89,7 +89,7 @@ class TestLogger(unittest.TestCase):
with open(path, "r") as f:
# it only keeps the newest information
content = f.readlines()
self.assertLess(len(content), 1100)
self.assertLess(abs(len(content) - 1000), 10)
# whatever the logging module decides to log into that file
self.assertNotIn("aaaa", content[-1])

Loading…
Cancel
Save