commit 0e3323b7abd07bc3e7f5c2090950ac416f03209a Author: CaffeineFueled Date: Thu Apr 10 21:40:30 2025 +0200 init diff --git a/.dockerignore b/.dockerignore new file mode 100644 index 0000000..53d5862 --- /dev/null +++ b/.dockerignore @@ -0,0 +1,43 @@ +# Git +.git +.gitignore + +# Python cache files +__pycache__/ +*.py[cod] +*$py.class +*.so +.Python +.pytest_cache/ +htmlcov/ +.coverage +.coverage.* +.cache/ + +# Environments +.env +.venv +env/ +venv/ +ENV/ +env.bak/ +venv.bak/ + +# Docker +Dockerfile +docker-compose.yml +.dockerignore + +# Database +*.db +*.sqlite3 +*.json + +# Logs +*.log + +# VS Code +.vscode/ + +# Local data +uniteddomain.csv \ No newline at end of file diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..7222a10 --- /dev/null +++ b/.gitignore @@ -0,0 +1,98 @@ +# Python +__pycache__/ +*.py[cod] +*$py.class +*.so +.Python +build/ +develop-eggs/ +dist/ +downloads/ +eggs/ +.eggs/ +lib/ +lib64/ +parts/ +sdist/ +var/ +wheels/ +*.egg-info/ +.installed.cfg +*.egg +MANIFEST +*.manifest +*.spec +pip-log.txt +pip-delete-this-directory.txt + +# Environments +.env +.venv +env/ +venv/ +ENV/ +env.bak/ +venv.bak/ + +# Unit tests / coverage reports +htmlcov/ +.tox/ +.nox/ +.coverage +.coverage.* +.cache +nosetests.xml +coverage.xml +*.cover +.hypothesis/ +.pytest_cache/ +pytestdebug.log + +# Database +*.db +*.sqlite3 +*.sqlite +domains_db.json + +# FastAPI project +domains_db.json +*.csv +!requirements.txt + +# Logs +logs/ +*.log +npm-debug.log* +yarn-debug.log* +yarn-error.log* + +# IDE specific files +.idea/ +.vscode/ +*.swp +*.swo +*~ +.DS_Store + +# Docker +.docker/ +docker-data/ +data/ + +# Jupyter Notebook +.ipynb_checkpoints + +# Runtime data +pids +*.pid +*.seed +*.pid.lock + +# Node dependencies +node_modules/ + +# Temporary files +tmp/ +temp/ +*.tmp +.temp \ No newline at end of file diff --git a/CONTAINER_INSTRUCTIONS.md b/CONTAINER_INSTRUCTIONS.md new file mode 100644 index 0000000..80a194f --- /dev/null +++ b/CONTAINER_INSTRUCTIONS.md @@ -0,0 +1,227 @@ +# Container Instructions for VPN Session Viewer + +This guide explains how to run the VPN Session Viewer application in a secure rootless container with persistent log storage using Podman or Docker. + +## Prerequisites + +- [Podman](https://podman.io/getting-started/installation) (version 3.0 or higher) or [Docker](https://docs.docker.com/get-docker/) (version 20.10 or higher) + +## Security Features + +This deployment includes the following security features: + +1. **Rootless container**: The application runs as a non-root user (UID 1000) +2. **Dropped capabilities**: All Linux capabilities are dropped +3. **No privilege escalation**: The container cannot gain additional privileges +4. **Minimal base image**: Uses a slim Python image to reduce attack surface +5. **Non-privileged ports**: Uses port 8000 instead of privileged ports (<1024) +6. **Persistent volume**: VPN logs are stored in a volume for persistence + +## Quick Start with Provided Script + +The easiest way to run the container is using the included script: + +```bash +./run_container.sh +``` + +This script will automatically: +1. Detect whether to use Podman or Docker +2. Build the container image +3. Create a logs directory if it doesn't exist +4. Run the container with all necessary security settings + +## Manual Setup with Podman + +### Building the Container + +```bash +podman build -t vpn-session-viewer:latest . +``` + +### Creating the Logs Directory + +```bash +mkdir -p ./logs +``` + +### Running the Container + +```bash +podman run --name vpn-session-viewer \ + -p 8000:8000 \ + -v ./logs:/home/appuser/app/logs:Z \ + --security-opt no-new-privileges:true \ + --cap-drop ALL \ + --user 1000:1000 \ + -d vpn-session-viewer:latest +``` + +### Checking Container Status + +```bash +podman ps +``` + +### Accessing the Application + +Open your browser to: +``` +http://localhost:8000 +``` + +## Manual Setup with Docker + +### Building the Container + +```bash +docker build -t vpn-session-viewer:latest . +``` + +### Creating the Logs Directory + +```bash +mkdir -p ./logs +``` + +### Running the Container + +```bash +docker run --name vpn-session-viewer \ + -p 8000:8000 \ + -v ./logs:/home/appuser/app/logs \ + --security-opt no-new-privileges:true \ + --cap-drop ALL \ + --user 1000:1000 \ + -d vpn-session-viewer:latest +``` + +### Checking Container Status + +```bash +docker ps +``` + +### Accessing the Application + +Open your browser to: +``` +http://localhost:8000 +``` + +## Working with VPN Logs + +### Log File Format + +Log files should follow this naming convention: +``` +{gateway-name}_{ISO-timestamp}.logs +``` + +Example: `firewall-1_2025-04-10T17:04:51Z.logs` + +### Adding Log Files + +Simply place your VPN log files in the `./logs` directory on your host machine. The container will automatically access them. + +## Maintenance + +### View Logs + +**Podman:** +```bash +podman logs vpn-session-viewer +``` + +**Docker:** +```bash +docker logs vpn-session-viewer +``` + +### Restart the Application + +**Podman:** +```bash +podman restart vpn-session-viewer +``` + +**Docker:** +```bash +docker restart vpn-session-viewer +``` + +### Stop the Application + +**Podman:** +```bash +podman stop vpn-session-viewer +``` + +**Docker:** +```bash +docker stop vpn-session-viewer +``` + +### Remove the Container + +**Podman:** +```bash +podman rm vpn-session-viewer +``` + +**Docker:** +```bash +docker rm vpn-session-viewer +``` + +## Troubleshooting + +### Check Container Status + +**Podman:** +```bash +podman ps -a +``` + +**Docker:** +```bash +docker ps -a +``` + +### Inspect the Container + +**Podman:** +```bash +podman inspect vpn-session-viewer +``` + +**Docker:** +```bash +docker inspect vpn-session-viewer +``` + +### Access Container Shell + +**Podman:** +```bash +podman exec -it vpn-session-viewer bash +``` + +**Docker:** +```bash +docker exec -it vpn-session-viewer bash +``` + +### Check Files in Container + +To verify logs are correctly mounted: + +**Podman:** +```bash +podman exec -it vpn-session-viewer ls -la /home/appuser/app/logs +``` + +**Docker:** +```bash +docker exec -it vpn-session-viewer ls -la /home/appuser/app/logs +``` \ No newline at end of file diff --git a/Dockerfile b/Dockerfile new file mode 100644 index 0000000..0241c77 --- /dev/null +++ b/Dockerfile @@ -0,0 +1,41 @@ +# Use Python 3.11 slim image for a smaller footprint +FROM python:3.11-slim + +# Set environment variables +ENV PYTHONDONTWRITEBYTECODE=1 \ + PYTHONUNBUFFERED=1 \ + PIP_NO_CACHE_DIR=1 \ + PIP_DISABLE_PIP_VERSION_CHECK=1 \ + HOME=/home/appuser \ + APP_HOME=/home/appuser/app + +# Create non-root user and setup directories +RUN groupadd -g 1000 appgroup && \ + useradd -m -u 1000 -g appgroup -s /bin/bash -d ${HOME} appuser && \ + mkdir -p ${APP_HOME} && \ + mkdir -p ${APP_HOME}/logs && \ + mkdir -p ${APP_HOME}/templates && \ + chown -R appuser:appgroup ${HOME} + +# Set the working directory +WORKDIR ${APP_HOME} + +# Install dependencies +COPY --chown=appuser:appgroup requirements.txt ${APP_HOME}/ +RUN pip install --no-cache-dir -r requirements.txt + +# Copy application code +COPY --chown=appuser:appgroup main.py ${APP_HOME}/ +COPY --chown=appuser:appgroup templates/ ${APP_HOME}/templates/ + +# Create a volume for logs +VOLUME ["${APP_HOME}/logs"] + +# Switch to non-root user +USER appuser + +# Expose port +EXPOSE 8000 + +# Command to run the application +CMD ["uvicorn", "main:app", "--host", "0.0.0.0", "--port", "8000", "--reload"] \ No newline at end of file diff --git a/README.md b/README.md new file mode 100644 index 0000000..9b94743 --- /dev/null +++ b/README.md @@ -0,0 +1,152 @@ +# VPN Session Viewer + +A simple FastAPI application to display VPN session logs with a clean HTML interface and API endpoints. + +## Features + +- Display all VPN session logs in a clean HTML interface +- Filter logs by gateway name +- Prioritizes the "SSL-VPN sessions:" section from log files +- View individual log file contents in a structured table format +- Parse VPN connection data from CLI-style log files +- Combined view of all VPN sessions with filtering by gateway, precise date/time range (to the minute), and text search +- API endpoints for programmatic access to both log metadata and parsed content with various filtering options + +## Setup + +### Option 1: Local Setup + +1. Create a virtual environment: + ``` + python -m venv venv + source venv/bin/activate # On Windows: venv\Scripts\activate + ``` + +2. Install dependencies: + ``` + pip install -r requirements.txt + ``` + +3. Run the application: + ``` + python main.py + ``` + +4. Access the web interface at http://localhost:8000 + +### Option 2: Docker Setup + +A Docker/Podman setup is included for easy deployment: + +1. Make sure Docker or Podman is installed on your system + +2. Run the container setup script: + ``` + ./run_container.sh + ``` + +3. Access the web interface at http://localhost:8000 + +4. Place your VPN log files in the `./logs` directory - the container will access them automatically + +5. To stop the container: + ``` + docker stop vpn-session-viewer # If using Docker + podman stop vpn-session-viewer # If using Podman + ``` + +## Log File Format + +Log files should follow this naming convention: +``` +{gateway-name}_{ISO-timestamp}.logs +``` + +Example: `firewall-1_2025-04-10T17:04:51Z.logs` + +## API Endpoints + +- `GET /api/logs` - Get all logs +- `GET /api/logs?gateway={name}` - Filter logs by gateway name +- `GET /api/gateways` - Get a list of all gateway names +- `GET /api/log-content/{filename}` - Get parsed log content in structured format +- `GET /api/all-entries` - Get all parsed entries from all log files +- `GET /api/all-entries?gateway={name}` - Filter combined entries by gateway +- `GET /api/all-entries?start_date={datetime}` - Filter entries by start date/time (ISO format) +- `GET /api/all-entries?end_date={datetime}` - Filter entries by end date/time (ISO format) +- `GET /api/all-entries?use_default_time` - Show only entries from last 30 minutes +- `GET /api/all-entries?search={term}` - Search across all entries +- `GET /api/all-entries?gateway={name}&start_date={date}&end_date={date}&search={term}` - Combined filters + +## Example API Responses + +**GET /api/logs** +```json +[ + { + "gateway": "firewall-1", + "timestamp": "2025-04-10T17:10:51+00:00", + "filename": "firewall-1_2025-04-10T17:10:51Z.logs" + }, + { + "gateway": "firewall-1", + "timestamp": "2025-04-10T17:04:51+00:00", + "filename": "firewall-1_2025-04-10T17:04:51Z.logs" + }, + { + "gateway": "device-1", + "timestamp": "2025-04-10T17:04:51+00:00", + "filename": "device-1_2025-04-10T17:04:51Z.logs" + } +] +``` + +**GET /api/gateways** +```json +[ + "device-1", + "firewall-1" +] +``` + +**GET /api/log-content/device-1_2025-04-10T17:04:51Z.logs** +```json +[ + { + "Index": "0", + "User": "Norbert.Hoeller@example.com", + "Group": "g_VPN_SAP_Service_SSO", + "Auth Type": "256(1)", + "Timeout": "105900", + "Auth-Timeout": "105900", + "From": "78.35.118.145", + "HTTP in/out": "0/0", + "HTTPS in/out": "0/0", + "Two-factor Auth": "0" + }, + { + "Index": "1", + "User": "r_Andreini.M@example.onmicrosoft.com", + "Group": "G_VPN_EXTERN_EID_SSO", + "Auth Type": "256(1)", + "Timeout": "172503", + "Auth-Timeout": "172503", + "From": "195.72.210.237", + "HTTP in/out": "0/0", + "HTTPS in/out": "0/0", + "Two-factor Auth": "0" + }, + { + "Index": "2", + "User": "Waldemar.Roth@example.com", + "Group": "g_VPN_Controlling_SSO", + "Auth Type": "256(1)", + "Timeout": "172439", + "Auth-Timeout": "172439", + "From": "87.151.79.111", + "HTTP in/out": "0/0", + "HTTPS in/out": "0/0", + "Two-factor Auth": "0" + } +] +``` \ No newline at end of file diff --git a/logs/.gitignore b/logs/.gitignore new file mode 100644 index 0000000..5e7d273 --- /dev/null +++ b/logs/.gitignore @@ -0,0 +1,4 @@ +# Ignore everything in this directory +* +# Except this file +!.gitignore diff --git a/main.py b/main.py new file mode 100644 index 0000000..9831d45 --- /dev/null +++ b/main.py @@ -0,0 +1,528 @@ +from fastapi import FastAPI, Request, Query, HTTPException +from fastapi.responses import HTMLResponse +from fastapi.templating import Jinja2Templates +from fastapi.staticfiles import StaticFiles +from typing import List, Optional +import os +import re +from datetime import datetime, timedelta +from pydantic import BaseModel + +app = FastAPI(title="VPN Session Viewer") + +templates = Jinja2Templates(directory="templates") + +# Model for log entries +class LogEntry(BaseModel): + gateway: str + timestamp: datetime + filename: str + +@app.get("/", response_class=HTMLResponse) +async def home(request: Request, gateway: Optional[str] = None): + logs = get_all_logs() + + if gateway: + logs = [log for log in logs if log.gateway == gateway] + + gateways = sorted(set(log.gateway for log in get_all_logs())) + + return templates.TemplateResponse("index.html", { + "request": request, + "logs": logs, + "gateways": gateways, + "selected_gateway": gateway + }) + +@app.get("/api/logs", response_model=List[LogEntry]) +async def api_logs(gateway: Optional[str] = None): + """Get all logs or filter by gateway name""" + logs = get_all_logs() + + if gateway: + logs = [log for log in logs if log.gateway == gateway] + + return logs + +@app.get("/api/gateways", response_model=List[str]) +async def api_gateways(): + """Get list of unique gateway names""" + logs = get_all_logs() + gateways = set(log.gateway for log in logs) + return sorted(list(gateways)) + +@app.get("/api/log-content/{filename}", response_model=List[dict]) +async def api_log_content(filename: str): + """Get parsed log content for a specific file""" + log_path = os.path.join(os.getcwd(), "logs", filename) + + try: + _, parsed_rows = parse_log_file(log_path) + except FileNotFoundError: + raise HTTPException(status_code=404, detail=f"Log file {filename} not found") + + return parsed_rows + +@app.get("/combined", response_class=HTMLResponse) +async def combined_view( + request: Request, + gateway: Optional[str] = None, + search: Optional[str] = None, + start_date: Optional[str] = None, + end_date: Optional[str] = None, + use_default_time: Optional[str] = None +): + """Combined view of all logs with filtering and search""" + # Convert string form parameter to boolean + use_default_time_bool = use_default_time is not None + logs_dir = os.path.join(os.getcwd(), "logs") + all_rows = [] + common_columns = set() + + # Parse date strings into datetime objects if provided + # or set defaults if not provided and use_default_time is True + start_datetime = None + end_datetime = None + + # If no dates are provided and use_default_time is True, set defaults + if not start_date and not end_date and use_default_time_bool: + # Set end_datetime to current time - use UTC timezone for consistency + end_datetime = datetime.now().replace(tzinfo=None) + # Set start_datetime to 30 minutes ago + start_datetime = end_datetime - timedelta(minutes=30) + else: + # Process provided dates + if start_date: + try: + # Handle both ISO format and datetime-local input format + if 'T' in start_date: + # Make sure we have seconds if not provided + if len(start_date.split('T')[1].split(':')) == 2: + start_date = f"{start_date}:00" + # Add timezone if missing + if not start_date.endswith('Z') and '+' not in start_date: + start_date = f"{start_date}Z" + else: + # If only date without time, set time to start of day + start_date = f"{start_date}T00:00:00Z" + + # Parse and remove timezone for consistent comparisons + start_datetime = datetime.fromisoformat(start_date.replace('Z', '+00:00')).replace(tzinfo=None) + except ValueError as e: + print(f"Error parsing start_date: {e}") + + if end_date: + try: + # Handle both ISO format and datetime-local input format + if 'T' in end_date: + # Make sure we have seconds if not provided + if len(end_date.split('T')[1].split(':')) == 2: + end_date = f"{end_date}:00" + # Add timezone if missing + if not end_date.endswith('Z') and '+' not in end_date: + end_date = f"{end_date}Z" + else: + # If only date without time, set time to end of day + end_date = f"{end_date}T23:59:59Z" + + # Parse and remove timezone for consistent comparisons + end_datetime = datetime.fromisoformat(end_date.replace('Z', '+00:00')).replace(tzinfo=None) + except ValueError as e: + print(f"Error parsing end_date: {e}") + + # Get all log files + log_files = [f for f in os.listdir(logs_dir) if f.endswith(".logs")] + + # Parse all log files and collect all rows + for filename in log_files: + log_path = os.path.join(logs_dir, filename) + columns, rows = parse_log_file(log_path) + + if columns: + common_columns.update(columns) + + all_rows.extend(rows) + + # Apply gateway filter if specified + if gateway: + all_rows = [row for row in all_rows if row.get("_gateway") == gateway] + + # Apply date range filter if specified + if start_datetime or end_datetime: + filtered_rows = [] + for row in all_rows: + timestamp = row.get("_timestamp") + if timestamp: + if start_datetime and timestamp < start_datetime: + continue + if end_datetime and timestamp > end_datetime: + continue + filtered_rows.append(row) + all_rows = filtered_rows + + # Apply search filter if specified + if search and search.strip(): + search_term = search.lower() + filtered_rows = [] + + for row in all_rows: + for key, value in row.items(): + if isinstance(value, str) and search_term in value.lower(): + filtered_rows.append(row) + break + + all_rows = filtered_rows + + # Sort by timestamp descending (newest first) + all_rows.sort(key=lambda x: x.get("_timestamp", datetime.min), reverse=True) + + # Get unique gateway names for filter dropdown + gateways = sorted(set(row.get("_gateway") for row in all_rows if row.get("_gateway"))) + + # Prepare final columns list while preserving original order + # We'll use a reference order from the first log file that has columns + reference_columns = [] + for filename in log_files: + log_path = os.path.join(logs_dir, filename) + first_columns, _ = parse_log_file(log_path) + if first_columns: + reference_columns = first_columns + break + + # Ensure all common columns are included while preserving original order where possible + display_columns = [] + # First add columns in the reference order + for col in reference_columns: + if col in common_columns: + display_columns.append(col) + common_columns.remove(col) + + # Add any remaining columns + display_columns.extend(sorted(list(common_columns))) + + # Add metadata columns last + meta_columns = ["_gateway", "_timestamp", "_source_file"] + final_columns = display_columns + meta_columns + + # Format dates for display in datetime-local form fields + formatted_start_date = start_datetime.strftime('%Y-%m-%dT%H:%M') if start_datetime else "" + formatted_end_date = end_datetime.strftime('%Y-%m-%dT%H:%M') if end_datetime else "" + + return templates.TemplateResponse("combined.html", { + "request": request, + "rows": all_rows, + "columns": final_columns, + "gateways": gateways, + "selected_gateway": gateway, + "search_term": search, + "start_date": formatted_start_date, + "end_date": formatted_end_date + }) + +@app.get("/api/all-entries", response_model=List[dict]) +async def api_all_entries( + gateway: Optional[str] = None, + search: Optional[str] = None, + start_date: Optional[str] = None, + end_date: Optional[str] = None, + use_default_time: Optional[str] = None +): + """Get all log entries from all files with optional filtering""" + # Convert string parameter to boolean + use_default_time_bool = use_default_time is not None + logs_dir = os.path.join(os.getcwd(), "logs") + all_rows = [] + + # Parse date strings into datetime objects if provided + # or set defaults if not provided and use_default_time is True + start_datetime = None + end_datetime = None + + # If no dates are provided and use_default_time is True, set defaults + if not start_date and not end_date and use_default_time_bool: + # Set end_datetime to current time - use UTC timezone for consistency + end_datetime = datetime.now().replace(tzinfo=None) + # Set start_datetime to 30 minutes ago + start_datetime = end_datetime - timedelta(minutes=30) + else: + if start_date: + try: + # Handle both ISO format and datetime-local input format + if 'T' in start_date: + # Make sure we have seconds if not provided + if len(start_date.split('T')[1].split(':')) == 2: + start_date = f"{start_date}:00" + # Add timezone if missing + if not start_date.endswith('Z') and '+' not in start_date: + start_date = f"{start_date}Z" + else: + # If only date without time, set time to start of day + start_date = f"{start_date}T00:00:00Z" + + # Parse and remove timezone for consistent comparisons + start_datetime = datetime.fromisoformat(start_date.replace('Z', '+00:00')).replace(tzinfo=None) + except ValueError as e: + print(f"Error parsing start_date: {e}") + + if end_date: + try: + # Handle both ISO format and datetime-local input format + if 'T' in end_date: + # Make sure we have seconds if not provided + if len(end_date.split('T')[1].split(':')) == 2: + end_date = f"{end_date}:00" + # Add timezone if missing + if not end_date.endswith('Z') and '+' not in end_date: + end_date = f"{end_date}Z" + else: + # If only date without time, set time to end of day + end_date = f"{end_date}T23:59:59Z" + + # Parse and remove timezone for consistent comparisons + end_datetime = datetime.fromisoformat(end_date.replace('Z', '+00:00')).replace(tzinfo=None) + except ValueError as e: + print(f"Error parsing end_date: {e}") + + # Get all log files + log_files = [f for f in os.listdir(logs_dir) if f.endswith(".logs")] + + # Parse all log files and collect all rows and track column order + reference_columns = [] + for filename in log_files: + log_path = os.path.join(logs_dir, filename) + columns, rows = parse_log_file(log_path) + if columns and not reference_columns: + # Save column order from first file with columns + reference_columns = columns + all_rows.extend(rows) + + # Apply gateway filter if specified + if gateway: + all_rows = [row for row in all_rows if row.get("_gateway") == gateway] + + # Apply date range filter if specified + if start_datetime or end_datetime: + filtered_rows = [] + for row in all_rows: + timestamp = row.get("_timestamp") + if timestamp: + if start_datetime and timestamp < start_datetime: + continue + if end_datetime and timestamp > end_datetime: + continue + filtered_rows.append(row) + all_rows = filtered_rows + + # Apply search filter if specified + if search and search.strip(): + search_term = search.lower() + filtered_rows = [] + + for row in all_rows: + for key, value in row.items(): + if isinstance(value, str) and search_term in value.lower(): + filtered_rows.append(row) + break + + all_rows = filtered_rows + + # Sort by timestamp descending (newest first) + all_rows.sort(key=lambda x: x.get("_timestamp", datetime.min), reverse=True) + + return all_rows + +class LogRow(BaseModel): + """Model for a parsed log row""" + index: Optional[int] = None + user: Optional[str] = None + group: Optional[str] = None + # Fields for Login Users section + auth_type: Optional[str] = None + timeout: Optional[str] = None + auth_timeout: Optional[str] = None + from_ip: Optional[str] = None + http: Optional[str] = None + https: Optional[str] = None + two_factor: Optional[str] = None + # Fields for Sessions section + source_ip: Optional[str] = None + duration: Optional[str] = None + io_bytes: Optional[str] = None + tunnel_dest_ip: Optional[str] = None + # Generic field for raw line + raw_line: str + +@app.get("/view/{filename}", response_class=HTMLResponse) +async def view_log(request: Request, filename: str): + log_path = os.path.join(os.getcwd(), "logs", filename) + raw_content = "" + parsed_rows = [] + header_columns = [] + + try: + with open(log_path, "r") as file: + raw_content = file.read() + + header_columns, parsed_dict_rows = parse_log_file(log_path) + + # Convert dictionary rows to LogRow objects for backward compatibility with the template + for row_dict in parsed_dict_rows: + row = LogRow(raw_line="") + + # Common fields + if "Index" in row_dict and row_dict["Index"].isdigit(): + row.index = int(row_dict["Index"]) + if "User" in row_dict: + row.user = row_dict["User"] + if "Group" in row_dict: + row.group = row_dict["Group"] + + # Login Users fields + if "Auth Type" in row_dict: + row.auth_type = row_dict["Auth Type"] + if "Timeout" in row_dict: + row.timeout = row_dict["Timeout"] + if "Auth-Timeout" in row_dict: + row.auth_timeout = row_dict["Auth-Timeout"] + if "From" in row_dict: + row.from_ip = row_dict["From"] + if "HTTP in/out" in row_dict: + row.http = row_dict["HTTP in/out"] + if "HTTPS in/out" in row_dict: + row.https = row_dict["HTTPS in/out"] + if "Two-factor Auth" in row_dict: + row.two_factor = row_dict["Two-factor Auth"] + + # VPN Sessions fields + if "Source IP" in row_dict: + row.source_ip = row_dict["Source IP"] + if "Duration" in row_dict: + row.duration = row_dict["Duration"] + if "I/O Bytes" in row_dict: + row.io_bytes = row_dict["I/O Bytes"] + if "Tunnel/Dest IP" in row_dict: + row.tunnel_dest_ip = row_dict["Tunnel/Dest IP"] + + parsed_rows.append(row) + + except FileNotFoundError: + raw_content = f"Log file {filename} not found" + + gateway, timestamp = parse_filename(filename) + + return templates.TemplateResponse("view.html", { + "request": request, + "filename": filename, + "gateway": gateway, + "timestamp": timestamp, + "raw_content": raw_content, + "parsed_rows": parsed_rows, + "columns": header_columns + }) + +def get_all_logs() -> List[LogEntry]: + """Get all log files in the logs directory""" + logs_dir = os.path.join(os.getcwd(), "logs") + log_files = [f for f in os.listdir(logs_dir) if f.endswith(".logs")] + result = [] + + for filename in log_files: + gateway, timestamp = parse_filename(filename) + if gateway and timestamp: + result.append(LogEntry( + gateway=gateway, + timestamp=timestamp, + filename=filename + )) + + # Sort by timestamp descending (newest first) + result.sort(key=lambda x: x.timestamp, reverse=True) + return result + +def parse_filename(filename: str): + """Parse gateway name and timestamp from filename""" + pattern = r"^(.+)_(\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}Z)\.logs$" + match = re.match(pattern, filename) + + if match: + gateway = match.group(1) + timestamp_str = match.group(2) + # Parse timestamp but remove timezone info for consistent comparisons + timestamp = datetime.fromisoformat(timestamp_str.replace('Z', '+00:00')).replace(tzinfo=None) + return gateway, timestamp + + return None, None + +def parse_log_file(log_path): + """Parse a log file and return header columns and rows""" + parsed_rows = [] + header_columns = [] + + try: + with open(log_path, "r") as file: + content = file.read() + lines = content.splitlines() + + # Find the "SSL-VPN sessions:" section + session_section_start = None + for i, line in enumerate(lines): + if "SSL-VPN sessions:" in line: + session_section_start = i + break + + if session_section_start is None: + # If SSL-VPN sessions section not found, fall back to the login users section + for i, line in enumerate(lines): + if "SSL-VPN Login Users:" in line: + session_section_start = i + break + + if session_section_start is None: + # No recognized sections found + return header_columns, parsed_rows + + # Find header line with column names (it should be right after the section title) + header_line_idx = session_section_start + 1 + if header_line_idx < len(lines): + header_line = lines[header_line_idx] + if "Index" in header_line and "User" in header_line and "Group" in header_line: + # Preserve exact order of columns from file + header_columns = [col.strip() for col in header_line.split("\t") if col.strip()] + + # Parse data rows + for line in lines[header_line_idx+1:]: + # Stop parsing when we hit an empty line or a new section + if not line.strip() or line.strip().endswith("#"): + break + + if line.strip() and not line.startswith("FBI-HQ-SSLVPN #"): + columns = [col.strip() for col in line.split("\t") if col] + row_data = {} + + # Map columns to dictionary in original order with extra whitespace handling + for i, col in enumerate(columns): + if i < len(header_columns): + column_name = header_columns[i] + # Triple strip to ensure all possible whitespace is removed + clean_value = col.strip() if col else "" + # Special handling for Tunnel/Dest IP which may have extra spaces + if column_name == "Tunnel/Dest IP": + clean_value = clean_value.strip() + row_data[column_name] = clean_value + + # Add source filename metadata + filename = os.path.basename(log_path) + gateway, timestamp = parse_filename(filename) + row_data["_source_file"] = filename + row_data["_gateway"] = gateway + row_data["_timestamp"] = timestamp + + parsed_rows.append(row_data) + except Exception as e: + print(f"Error parsing log file {log_path}: {e}") + + return header_columns, parsed_rows + +if __name__ == "__main__": + import uvicorn + uvicorn.run(app, host="0.0.0.0", port=8000) diff --git a/requirements.txt b/requirements.txt new file mode 100644 index 0000000..6f91dc3 --- /dev/null +++ b/requirements.txt @@ -0,0 +1,4 @@ +fastapi==0.104.1 +uvicorn==0.23.2 +jinja2==3.1.2 +pydantic==2.4.2 \ No newline at end of file diff --git a/run_container.sh b/run_container.sh new file mode 100755 index 0000000..0f972a9 --- /dev/null +++ b/run_container.sh @@ -0,0 +1,73 @@ +#!/bin/bash + +# Function to check if a command exists +command_exists() { + command -v "$1" >/dev/null 2>&1 +} + +# Set container name +CONTAINER_NAME="vpn-session-viewer" + +# Determine if we use podman or docker +if command_exists podman; then + CONTAINER_CMD="podman" + VOLUME_FLAG=":Z" + echo "Using Podman for container management." +elif command_exists docker; then + CONTAINER_CMD="docker" + VOLUME_FLAG="" + echo "Using Docker for container management." +else + echo "Error: Neither Podman nor Docker found. Please install one of them first." + exit 1 +fi + +# Stop and remove container if it exists +echo "Checking for existing container..." +if $CONTAINER_CMD ps -a --format '{{.Names}}' | grep -q "^${CONTAINER_NAME}$"; then + echo "Stopping and removing existing ${CONTAINER_NAME} container..." + $CONTAINER_CMD stop ${CONTAINER_NAME} + $CONTAINER_CMD rm ${CONTAINER_NAME} +fi + +# Create volume if it doesn't exist +echo "Creating volume for VPN logs storage..." +$CONTAINER_CMD volume create vpn-logs + +# Build the container image +echo "Building container image..." +$CONTAINER_CMD build -t ${CONTAINER_NAME}:latest . + +# Set up local logs directory to mount +LOGS_DIR="./logs" +if [ ! -d "$LOGS_DIR" ]; then + echo "Creating logs directory..." + mkdir -p "$LOGS_DIR" +fi + +# Run the container +echo "Starting container..." +$CONTAINER_CMD run --name ${CONTAINER_NAME} \ + -p 8000:8000 \ + -v "$LOGS_DIR":/home/appuser/app/logs${VOLUME_FLAG} \ + --security-opt no-new-privileges:true \ + --cap-drop ALL \ + --user 1000:1000 \ + -d ${CONTAINER_NAME}:latest + +# Check if container started successfully +if [ $? -eq 0 ]; then + echo "Container started successfully!" + echo "VPN Session Viewer is available at: http://localhost:8000" + echo "" + echo "Container logs:" + $CONTAINER_CMD logs ${CONTAINER_NAME} + + echo "" + echo "Note: Log files should be placed in the ./logs directory." + echo " The format should be: {gateway-name}_{ISO-timestamp}.logs" + echo " Example: firewall-1_2025-04-10T17:04:51Z.logs" +else + echo "Failed to start container." + exit 1 +fi \ No newline at end of file diff --git a/templates/base.html b/templates/base.html new file mode 100644 index 0000000..e838e6e --- /dev/null +++ b/templates/base.html @@ -0,0 +1,170 @@ + + + + + + VPN Log Viewer + + + +
+

VPN Log Viewer

+
+
+ {% block content %}{% endblock %} +
+ + + \ No newline at end of file diff --git a/templates/combined.html b/templates/combined.html new file mode 100644 index 0000000..8389b49 --- /dev/null +++ b/templates/combined.html @@ -0,0 +1,133 @@ +{% extends "base.html" %} + +{% block content %} + +

Combined VPN Sessions View

+ + + +
+

API Endpoints

+

Get combined data via API: /api/all-entries

+

Filter by gateway: /api/all-entries?gateway={{ selected_gateway }}

+

Filter by date range: /api/all-entries?start_date={{ start_date }}&end_date={{ end_date }}

+

Use default time (last 30 min): /api/all-entries?use_default_time

+

Search: /api/all-entries?search={{ search_term }}

+

Combined filters: /api/all-entries?gateway={{ selected_gateway }}&start_date={{ start_date }}&end_date={{ end_date }}&search={{ search_term }}

+

Note: For API calls, date/time must be in ISO format (YYYY-MM-DDThh:mm:ss)

+
+ +
+
+
+
+ + +
+ +
+ + +
+ +
+ + +
+ +
+ + +
+ +
+ +
+ +
+ + Reset +
+
+
+
+ +
+ + + + {% for col in columns %} + + {% endfor %} + + + + {% for row in rows %} + + {% for col in columns %} + + {% endfor %} + + {% else %} + + + + {% endfor %} + +
+ {% if col.startswith('_') %} + {{ col[1:] | capitalize }} + {% else %} + {{ col }} + {% endif %} +
+ {% if col == '_source_file' %} + {{ row[col] }} + {% elif col == '_timestamp' and row[col] %} + {{ row[col].strftime('%Y-%m-%d %H:%M:%S UTC') }} + {% else %} + {% if row[col] is string %} + {{ row[col].strip() }} + {% elif row[col] is none %} + + {% else %} + {{ row[col] }} + {% endif %} + {% endif %} +
No matching logs found
+
+ +{% endblock %} \ No newline at end of file diff --git a/templates/index.html b/templates/index.html new file mode 100644 index 0000000..22e6ee5 --- /dev/null +++ b/templates/index.html @@ -0,0 +1,53 @@ +{% extends "base.html" %} + +{% block content %} +

VPN Session Logs

+ + + +
+

API Endpoints

+

Get all logs: /api/logs

+

Filter logs by gateway: /api/logs?gateway={{ selected_gateway }}

+

Get all gateways: /api/gateways

+

Get log content: /api/log-content/{filename}

+
+ +
+
+ + +
+
+ + + + + + + + + + + {% for log in logs %} + + + + + + {% else %} + + + + {% endfor %} + +
GatewayTimestampActions
{{ log.gateway }}{{ log.timestamp.strftime('%Y-%m-%d %H:%M:%S UTC') }}View
No logs found
+ +{% endblock %} \ No newline at end of file diff --git a/templates/view.html b/templates/view.html new file mode 100644 index 0000000..a22462f --- /dev/null +++ b/templates/view.html @@ -0,0 +1,99 @@ +{% extends "base.html" %} + +{% block content %} +

VPN Session Details

+ +

← Back to all logs

+ +
+

API Endpoints

+

Get log content via API: /api/log-content/{{ filename }}

+
+ +
+

Gateway: {{ gateway }}

+

Timestamp: {{ timestamp.strftime('%Y-%m-%d %H:%M:%S UTC') if timestamp else 'Unknown' }}

+

Filename: {{ filename }}

+
+ +

Log Content

+ +{% if parsed_rows %} +
+ + + + {% for col in columns %} + + {% endfor %} + + + + {% for row in parsed_rows %} + + {% if 'Index' in columns %} + + {% endif %} + + {% if 'User' in columns %} + + {% endif %} + + {% if 'Group' in columns %} + + {% endif %} + + {# VPN Login Users fields #} + {% if 'Auth Type' in columns %} + + {% endif %} + + {% if 'Timeout' in columns %} + + {% endif %} + + {% if 'Auth-Timeout' in columns %} + + {% endif %} + + {% if 'From' in columns %} + + {% endif %} + + {% if 'HTTP in/out' in columns %} + + {% endif %} + + {% if 'HTTPS in/out' in columns %} + + {% endif %} + + {% if 'Two-factor Auth' in columns %} + + {% endif %} + + {# VPN Sessions fields #} + {% if 'Source IP' in columns %} + + {% endif %} + + {% if 'Duration' in columns %} + + {% endif %} + + {% if 'I/O Bytes' in columns %} + + {% endif %} + + {% if 'Tunnel/Dest IP' in columns %} + + {% endif %} + + {% endfor %} + +
{{ col }}
{{ row.index }}{{ row.user.strip() if row.user else "" }}{{ row.group.strip() if row.group else "" }}{{ row.auth_type.strip() if row.auth_type else "" }}{{ row.timeout.strip() if row.timeout else "" }}{{ row.auth_timeout.strip() if row.auth_timeout else "" }}{{ row.from_ip.strip() if row.from_ip else "" }}{{ row.http.strip() if row.http else "" }}{{ row.https.strip() if row.https else "" }}{{ row.two_factor.strip() if row.two_factor else "" }}{{ row.source_ip.strip() if row.source_ip else "" }}{{ row.duration.strip() if row.duration else "" }}{{ row.io_bytes.strip() if row.io_bytes else "" }}{{ (row.tunnel_dest_ip.strip() if row.tunnel_dest_ip else "") }}
+
+{% else %} +
{{ raw_content }}
+{% endif %} +{% endblock %} \ No newline at end of file