Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
43 changes: 40 additions & 3 deletions insightlog.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,9 @@
import re
import calendar
from datetime import datetime
import csv
import json
import os

# Service settings
DEFAULT_NGINX = {
Expand Down Expand Up @@ -123,8 +126,8 @@ def get_date_filter(settings, minute=datetime.now().minute, hour=datetime.now().
def check_match(line, filter_pattern, is_regex=False, is_casesensitive=True, is_reverse=False):
"""Check if line contains/matches filter pattern"""
if is_regex:
check_result = re.match(filter_pattern, line) if is_casesensitive \
else re.match(filter_pattern, line, re.IGNORECASE)
check_result = re.search(filter_pattern, line) if is_casesensitive \
else re.search(filter_pattern, line, re.IGNORECASE) #THE BUG FIXED re.match-->re.search
else:
check_result = (filter_pattern in line) if is_casesensitive else (filter_pattern.lower() in line.lower())
if is_reverse:
Expand Down Expand Up @@ -295,6 +298,34 @@ def get_requests(service, data=None, filepath=None, filters=None):
return None


def export_results(requests, output_path="output"):
"""
Export parsed requests to both CSV and JSON in the current working directory.
"""
if not requests:
print("Nothing to export.")
return

# Extract just the base name to ensure files save in the current folder
base_name = os.path.splitext(os.path.basename(output_path))[0]
if not base_name:
base_name = "output"

# Export to JSON
json_path = f"{base_name}.json"
with open(json_path, 'w', encoding='utf-8') as f:
json.dump(requests, f, indent=2, default=str)

# Export to CSV
csv_path = f"{base_name}.csv"
with open(csv_path, 'w', newline='', encoding='utf-8') as f:
# Using the keys from the first dictionary as the CSV column headers
writer = csv.DictWriter(f, fieldnames=requests[0].keys())
writer.writeheader()
writer.writerows(requests)

print(f"Exported {len(requests)} records to both '{json_path}' and '{csv_path}'.")

# CLI entry point
if __name__ == '__main__':
import argparse
Expand All @@ -303,6 +334,8 @@ def get_requests(service, data=None, filepath=None, filters=None):
parser.add_argument('--service', required=True, choices=['nginx', 'apache2', 'auth'], help='Type of log to analyze')
parser.add_argument('--logfile', required=True, help='Path to the log file')
parser.add_argument('--filter', required=False, default=None, help='String to filter log lines')
# Added the --export argument here
parser.add_argument('--export', required=False, default=None, help='Base filename for export (creates both .json and .csv)')
args = parser.parse_args()

filters = []
Expand All @@ -313,4 +346,8 @@ def get_requests(service, data=None, filepath=None, filters=None):
if requests:
for req in requests:
print(req)


# Added the call to your new export function here
if args.export:
export_results(requests, output_path=args.export)
#print(check_match(line="abc123def", filter_pattern=r"\d+", is_regex=True))
Loading