unit
unit copied to clipboard
log level , logs filled with 'application started'
is it possible to get rid of info level in stderr? error logs got those lines all the time. too many of them for nothing. i think we need to provide option for log level. thanks
2024/10/24 18:25:55 [info] 1192115#1192115 "myapp" application started
2024/10/24 18:26:55 [info] 1192254#1192254 "myapp" application started
2024/10/24 18:29:01 [info] 1192431#1192431 "myapp" application started
2024/10/24 18:29:57 [info] 1192503#1192503 "myapp" application started
2024/10/24 18:29:57 [info] 1192504#1192504 "myapp" application started
2024/10/24 18:29:57 [info] 1192505#1192505 "myapp" application started
2024/10/24 18:29:57 [info] 1192506#1192506 "myapp" application started
2024/10/24 18:29:57 [info] 1192507#1192507 "myapp" application started
2024/10/24 18:32:37 [info] 1192696#1192696 "myapp" application started
2024/10/24 18:32:45 [info] 1192703#1192703 "myapp" application started
2024/10/24 18:32:45 [info] 1192704#1192704 "myapp" application started
Any updates on it?
Before the official fix for this issue, I asked AI to write a script to delete useless content:
clean_logs.py
#!/usr/bin/env python3
"""
Script to clean routine log entries from Unit logs.
Removes entries like:
- "[info] ... application started"
- "[notice] ... exited with code 0"
"""
import re
import sys
import os
import argparse
from pathlib import Path
# Patterns to identify routine log entries to remove
ROUTINE_PATTERNS = [
r'\[info\] [0-9]+#[0-9]+ ".*" application started$',
r'\[notice\] [0-9]+#[0-9]+ app process [0-9]+ exited with code 0$'
]
def should_remove_line(line):
"""Check if a line matches any of the routine patterns."""
for pattern in ROUTINE_PATTERNS:
if re.search(pattern, line):
return True
return False
def clean_log_file(file_path, dry_run=False):
"""Clean a single log file by removing routine entries."""
try:
with open(file_path, 'r', encoding='utf-8') as f:
lines = f.readlines()
# Filter out routine log entries
cleaned_lines = [line for line in lines if not should_remove_line(line)]
# Write back to file if not in dry run mode
if not dry_run:
with open(file_path, 'w', encoding='utf-8') as f:
f.writelines(cleaned_lines)
# Return stats
removed_count = len(lines) - len(cleaned_lines)
return len(lines), removed_count
except Exception as e:
print(f"Error processing {file_path}: {e}")
return 0, 0
def main():
parser = argparse.ArgumentParser(description="Clean routine log entries from Unit logs")
parser.add_argument("files", nargs="+", help="Log files to clean")
parser.add_argument("--dry-run", action="store_true",
help="Show what would be removed without actually modifying files")
args = parser.parse_args()
total_lines = 0
total_removed = 0
for file_path in args.files:
if not os.path.exists(file_path):
print(f"File not found: {file_path}")
continue
original_count, removed_count = clean_log_file(file_path, args.dry_run)
total_lines += original_count
total_removed += removed_count
status = "would remove" if args.dry_run else "removed"
print(f"{file_path}: {status} {removed_count} lines")
print(f"\nTotal: processed {total_lines} lines, {total_removed} routine entries removed")
if __name__ == "__main__":
main()