CHANGE import of multiple log files

This commit is contained in:
CaffeineFueled 2025-04-15 07:11:30 +02:00
parent 0e3323b7ab
commit a3cff924ba

98
main.py
View file

@ -136,12 +136,15 @@ async def combined_view(
# Parse all log files and collect all rows # Parse all log files and collect all rows
for filename in log_files: for filename in log_files:
log_path = os.path.join(logs_dir, filename) log_path = os.path.join(logs_dir, filename)
try:
columns, rows = parse_log_file(log_path) columns, rows = parse_log_file(log_path)
if columns: if columns:
common_columns.update(columns) common_columns.update(columns)
all_rows.extend(rows) all_rows.extend(rows)
except Exception as e:
print(f"Error processing file {filename} in combined view: {e}")
# Apply gateway filter if specified # Apply gateway filter if specified
if gateway: if gateway:
@ -290,11 +293,14 @@ async def api_all_entries(
reference_columns = [] reference_columns = []
for filename in log_files: for filename in log_files:
log_path = os.path.join(logs_dir, filename) log_path = os.path.join(logs_dir, filename)
try:
columns, rows = parse_log_file(log_path) columns, rows = parse_log_file(log_path)
if columns and not reference_columns: if columns and not reference_columns:
# Save column order from first file with columns # Save column order from first file with columns
reference_columns = columns reference_columns = columns
all_rows.extend(rows) all_rows.extend(rows)
except Exception as e:
print(f"Error processing file {filename} in api_all_entries: {e}")
# Apply gateway filter if specified # Apply gateway filter if specified
if gateway: if gateway:
@ -355,13 +361,51 @@ class LogRow(BaseModel):
@app.get("/view/{filename}", response_class=HTMLResponse) @app.get("/view/{filename}", response_class=HTMLResponse)
async def view_log(request: Request, filename: str): async def view_log(request: Request, filename: str):
log_path = os.path.join(os.getcwd(), "logs", filename) log_path = os.path.join(os.getcwd(), "logs", filename)
raw_content = "" raw_content = None
parsed_rows = [] parsed_rows = []
header_columns = [] header_columns = []
try: try:
with open(log_path, "r") as file: # Read the file in binary mode first to check for encodings
raw_content = file.read() with open(log_path, "rb") as file:
binary_content = file.read()
# Check for BOM (Byte Order Mark) at the beginning of the file
raw_content = None
# Check for UTF-16 LE BOM
if binary_content.startswith(b'\xff\xfe'):
try:
raw_content = binary_content.decode('utf-16-le')
except UnicodeDecodeError:
pass
# Check for UTF-16 BE BOM
if raw_content is None and binary_content.startswith(b'\xfe\xff'):
try:
raw_content = binary_content.decode('utf-16-be')
except UnicodeDecodeError:
pass
# Try UTF-8
if raw_content is None:
try:
raw_content = binary_content.decode('utf-8')
except UnicodeDecodeError:
pass
# Try common encodings if we still don't have content
if raw_content is None:
for encoding in ['utf-16', 'latin1', 'cp1252', 'iso-8859-1']:
try:
raw_content = binary_content.decode(encoding)
break
except UnicodeDecodeError:
continue
# If all decodings fail, use latin1 as a fallback with replacement
if raw_content is None:
raw_content = binary_content.decode('latin1', errors='replace')
header_columns, parsed_dict_rows = parse_log_file(log_path) header_columns, parsed_dict_rows = parse_log_file(log_path)
@ -427,6 +471,7 @@ def get_all_logs() -> List[LogEntry]:
result = [] result = []
for filename in log_files: for filename in log_files:
try:
gateway, timestamp = parse_filename(filename) gateway, timestamp = parse_filename(filename)
if gateway and timestamp: if gateway and timestamp:
result.append(LogEntry( result.append(LogEntry(
@ -434,6 +479,10 @@ def get_all_logs() -> List[LogEntry]:
timestamp=timestamp, timestamp=timestamp,
filename=filename filename=filename
)) ))
else:
print(f"Could not parse filename: {filename}")
except Exception as e:
print(f"Error processing log file {filename}: {e}")
# Sort by timestamp descending (newest first) # Sort by timestamp descending (newest first)
result.sort(key=lambda x: x.timestamp, reverse=True) result.sort(key=lambda x: x.timestamp, reverse=True)
@ -459,8 +508,47 @@ def parse_log_file(log_path):
header_columns = [] header_columns = []
try: try:
with open(log_path, "r") as file: # Read the file in binary mode first to check for encodings
content = file.read() with open(log_path, "rb") as file:
binary_content = file.read()
# Check for BOM (Byte Order Mark) at the beginning of the file
content = None
# Check for UTF-16 LE BOM
if binary_content.startswith(b'\xff\xfe'):
try:
content = binary_content.decode('utf-16-le')
except UnicodeDecodeError:
pass
# Check for UTF-16 BE BOM
if content is None and binary_content.startswith(b'\xfe\xff'):
try:
content = binary_content.decode('utf-16-be')
except UnicodeDecodeError:
pass
# Try UTF-8
if content is None:
try:
content = binary_content.decode('utf-8')
except UnicodeDecodeError:
pass
# Try common encodings if we still don't have content
if content is None:
for encoding in ['utf-16', 'latin1', 'cp1252', 'iso-8859-1']:
try:
content = binary_content.decode(encoding)
break
except UnicodeDecodeError:
continue
# If all decodings fail, use latin1 as a fallback with replacement
if content is None:
content = binary_content.decode('latin1', errors='replace')
lines = content.splitlines() lines = content.splitlines()
# Find the "SSL-VPN sessions:" section # Find the "SSL-VPN sessions:" section