Skip to content

Instantly share code, notes, and snippets.

Show Gist options
  • Save ParkWardRR/d5658e96c1830a6d087a2007b2efd794 to your computer and use it in GitHub Desktop.
Save ParkWardRR/d5658e96c1830a6d087a2007b2efd794 to your computer and use it in GitHub Desktop.
#!/bin/bash
# moved jere: https://github.com/ParkWardRR/bmw-bm3-knock-analyzer/tree/main
# BMW BM3 Knock Analyzer v4.0 (macOS, robust 0-knock handling)
# bmw-bootmod3-knock-event-context-extractor.sh
# This script processes BM3 log CSVs for the BMW N20 engine to analyze knock events.
# It collects knock event data, adds context rows, generates checksums for validation,
# and produces a summary report. All output is timestamped and organized for easy review.
# --- NOTES ---
# - Edit INPUT_DIR to match your BM3 log folder.
# - BUFFER_PERCENT controls how many rows before/after a knock event you want to see for context.
# - The script is robust for 0-knock logs (it won't crash or output empty files).
# - All output is timestamped for easy tracking.
# - Checksums help ensure no files were corrupted or changed accidentally.
# - For N20 tuning, you can adjust which columns to extract by editing the ENGINE-SPECIFIC PARAMETERS section.
# - No code logic has been changedβ€”just comments and explanations added for clarity.
# - NEW: The script now also outputs a concatenated CSV of all logs, regardless of knock events.
#
# --- USER EDITABLE SECTION ---
# Set these to match your environment and engine/log file structure.
INPUT_DIR="/Users/twesh/Desktop/BM3/Input" # <--- Directory containing your BM3 log CSVs
OUTPUT_PREFIX="knock-analysis" # <--- Prefix for output knock event files
BUFFER_PERCENT=10 # <--- Number of rows before/after a knock event to include for context
MERGE_FINAL="combined_knocks.csv" # <--- Name for the merged knock events file
MERGE_ALL="all_logs_concatenated.csv" # <--- Name for the concatenated all-logs file (NEW)
# --- ENGINE-SPECIFIC PARAMETERS ---
# !!! IMPORTANT: Edit these if you are using logs from a different engine or logging software !!!
# These must match the exact column headers in your CSV log files.
KNOCK_COLUMN_NAME="Knock Detected[0/1]" # Column name for knock detection (0 = no knock, 1 = knock)
THROTTLE_COLUMN_NAME="Accel. Pedal[%]" # Column name for throttle pedal position (%)
IAT_COLUMN_NAME="IAT[F]" # Column name for Intake Air Temperature (Β°F)
# For other engines, update the above variables to match your log file's column headers.
# --- END USER EDITABLE SECTION ---
# Get the parent directory of the input folder (one level up)
PARENT_DIR=$(dirname "$INPUT_DIR")
OUTPUT_BASE="${PARENT_DIR}/Output" # Output directory for results
# Create a timestamp for unique output folders (prevents overwriting)
TIMESTAMP=$(date +"%Y%m%d-%H%M%S")
FINAL_OUTPUT="${OUTPUT_BASE}/${TIMESTAMP}" # Final output folder for this run
# Output file paths for checksums, validation, and terminal output
CHECKSUM_FILE="${FINAL_OUTPUT}/checksums.txt"
VALIDATION_FILE="${FINAL_OUTPUT}/validation.txt"
TERM_OUTPUT="${FINAL_OUTPUT}/term_output.txt"
# Create the output directory if it doesn't exist
mkdir -p "$FINAL_OUTPUT"
# Initialize (truncate) the checksum and validation files
> "$CHECKSUM_FILE"
> "$VALIDATION_FILE"
# Initialize counters for total knock events and files processed
total_knocks=0
total_files=0
# Print header info to terminal
echo "πŸ”§ BMW N20 Knock Analysis Suite"
echo "==============================="
echo "πŸ“‚ Input: ${INPUT_DIR}"
echo "πŸ“ Output: ${FINAL_OUTPUT}"
echo ""
echo "βš™οΈ Engine-specific log columns:"
echo " Knock: ${KNOCK_COLUMN_NAME}"
echo " Throttle: ${THROTTLE_COLUMN_NAME}"
echo " IAT: ${IAT_COLUMN_NAME}"
echo ""
# --- CONCATENATE ALL CSVs (regardless of knock) ---
# This will create a single CSV containing all rows from all input logs
# Only the header from the first file is kept to avoid duplicates
first=1
for file in "${INPUT_DIR}"/*.csv; do
if [[ $first -eq 1 ]]; then
cat "$file" > "${FINAL_OUTPUT}/${MERGE_ALL}"
first=0
else
tail -n +2 "$file" >> "${FINAL_OUTPUT}/${MERGE_ALL}"
fi
done
# Main processing loop: for each CSV in the input directory
for file in "${INPUT_DIR}"/*.csv; do
filename=$(basename "$file") # Get the file name only
RAND_ID=$(jot -r 1 1000 9999) # Generate random ID for output file (avoids name collisions)
KNOCK_FILE="${FINAL_OUTPUT}/${OUTPUT_PREFIX}-${RAND_ID}.csv" # Output file for knock events in this log
checksum=$(md5 -q "$file") # Calculate MD5 checksum for file integrity
echo "${filename} ${checksum}" >> "$CHECKSUM_FILE" # Log the checksum
# --- COLUMN DETECTION ---
# Read the header row to find which columns contain the needed data
header=$(head -1 "$file")
IFS=',' read -r -a cols <<< "$header"
unset knock_col throttle_col iat_col
for i in "${!cols[@]}"; do
[[ "${cols[$i]}" == "$KNOCK_COLUMN_NAME" ]] && knock_col=$((i+1))
[[ "${cols[$i]}" == "$THROTTLE_COLUMN_NAME" ]] && throttle_col=$((i+1))
[[ "${cols[$i]}" == "$IAT_COLUMN_NAME" ]] && iat_col=$((i+1))
done
# --- SAFETY CHECK ---
# If any required columns are missing, skip this file and log the issue
if [[ -z $knock_col || -z $throttle_col || -z $iat_col ]]; then
echo "⚠️ Skipping $filename (missing expected columns)" | tee -a "$VALIDATION_FILE"
echo " (Expected: '$KNOCK_COLUMN_NAME', '$THROTTLE_COLUMN_NAME', '$IAT_COLUMN_NAME')" | tee -a "$VALIDATION_FILE"
continue
fi
# --- KNOCK EVENT EXTRACTION ---
# Use awk to scan the file and extract knock events with context
awk -F',' -v buffer="$BUFFER_PERCENT" -v outfile="$KNOCK_FILE" \
-v knock_col="$knock_col" -v throttle_col="$throttle_col" -v iat_col="$iat_col" '
NR==1 { header=$0; print header > outfile; next } # Print header to output file, skip to next row
{
knocks[NR]=$knock_col > 0 ? $knock_col : 0 # Store knock flag (0 or 1)
throttles[NR]=$throttle_col+0 # Store throttle value (numeric)
iats[NR]=$iat_col+0 # Store IAT value (numeric)
lines[NR]=$0 # Store the full line for later output
if ($knock_col == 1) { knock_lines[NR]=1; knock_count++ } # Track rows with knock events
if ($throttle_col+0 > max_throttle) max_throttle=$throttle_col+0 # Track max throttle
if ($iat_col+0 > max_iat) max_iat=$iat_col+0 # Track max IAT
}
END {
# Output rows around each knock event (buffer before/after)
if (knock_count > 0) {
for (k in knock_lines) {
start = k - buffer > 1 ? k - buffer : 1
end = k + buffer < NR ? k + buffer : NR
for (i=start; i<=end; i++) if (!printed[i]++) print lines[i] >> outfile
}
}
# Write stats for this file to validation file
if (knock_count > 0)
print "πŸ”₯ Knock Events: " knock_count " in " FILENAME
else
print "βœ… Confirmed: 0 knock events found in " FILENAME
print "🌑️ Max IAT: " (max_iat ? max_iat : "N/A") "°F in " FILENAME
print "🎚️ Max Throttle: " (max_throttle ? max_throttle : "N/A") "% in " FILENAME
print "-----------------"
}' "$file" >> "$VALIDATION_FILE"
# --- KNOCK EVENT COUNTING ---
# Extract knock count from validation file for this log and tally up
file_knocks=$(awk '/Knock Events:/ {print $3}' "$VALIDATION_FILE" | tail -1)
(( total_knocks += file_knocks ))
(( total_files++ ))
done
# --- MERGE ALL KNOCK FILES ---
echo "πŸ”— Merging knock files..."
awk '
FNR == 1 && NR != 1 { next } # Skip header row for all but first file
{ print }' "${FINAL_OUTPUT}"/*.csv > "${FINAL_OUTPUT}/${MERGE_FINAL}"
# --- CHECKSUM FOR MERGED FILE ---
merged_checksum=$(md5 -q "${FINAL_OUTPUT}/${MERGE_FINAL}")
echo "Merged_File ${merged_checksum}" >> "$CHECKSUM_FILE"
# --- CHECKSUM FOR CONCATENATED ALL-LOGS FILE ---
all_logs_checksum=$(md5 -q "${FINAL_OUTPUT}/${MERGE_ALL}")
echo "All_Logs_File ${all_logs_checksum}" >> "$CHECKSUM_FILE"
# --- OUTPUT SUMMARY TO TERMINAL AND FILE ---
{
echo ""
echo "βœ… Processing Complete"
echo "---------------------"
echo "πŸ“Š Results Summary:"
echo "- Total knock files: $total_files"
echo "- Merged knock event CSV: ${FINAL_OUTPUT}/${MERGE_FINAL}"
echo "- All logs concatenated CSV: ${FINAL_OUTPUT}/${MERGE_ALL}"
echo "- Timestamp: ${TIMESTAMP}"
echo ""
echo "Validation Report:"
cat "$VALIDATION_FILE"
echo -e "\nChecksum Verification:"
cat "$CHECKSUM_FILE"
if [[ $total_knocks -eq 0 ]]; then
echo "βœ… Confirmed: 0 knock events found in all input files."
fi
} | tee "$TERM_OUTPUT"
# --- END OF SCRIPT ---

Comments are disabled for this gist.