diff --git a/tools/sentinel-shell/readme.md b/tools/sentinel-shell/readme.md new file mode 100644 index 0000000..6ac882e --- /dev/null +++ b/tools/sentinel-shell/readme.md @@ -0,0 +1,60 @@ +# Sentinel Shell Scripts + +Sentinel scripts are part of the FAIR Forge Toolchain, and act as an early detection system for checking received packages. Sentinel will flag anomalies that need closer inspection or present critical issues. + +## 1. Sentinel Write Check + +Starting from a given target directory, `sentinel-write-check.sh` will recursively check files and directories for anything with world-writeable permissions; i.e., "write" permissions are enabled in the final position of the octal file permissions. An optional flag will unset this, reducing the octal number by two, so `662` becomes `660`, `757` becomes `755`, and so on. + +The script should work in most modern shells, and is hardened against some basic attacks. For example, it will not `chmod` a symlink that could point `/somewhere-vital/`. The script's output should not reveal anything about the current environment, so does not output an absolute path, only the relative path from the stated target directory. + +### Usage + +`./sentinel-write-check.sh targetdir` + +Optional: remove the offending world-writeable setting with the `--fix` flag. + + +### Explanation + +In Unix/Linux systems, octal file permissions are numerical representations of file permissions, and are a clever bulletproof way of turning 9 letters into three digits, elegantly storing them in only 9 bits. For the record, nine literal letters (`rwxrwxrwx`) in ASCII would use 72 bits. Unix is old enough that those bit mattered. Learn how it works at https://www.redhat.com/en/blog/linux-file-permissions-explained and https://linuxvox.com/blog/linux-octal-permissions/ or just skip to https://chmod-calculator.com/ if you must. + +## 2. Sentinel File Stats + +The file stats script gathers a quick statistical overview of what's in a directory. After scanning, it will report the number of files, number of lines, and file size for each file type, such as `.php`, `.js`, image types, and project documentation (`.txt` or `.md`). + + While scanning, the script will specifically check for indicators of common external sources for updates, such as `.gitignore` file. In additon to Git, Composer, Node.js, and PyPI sources are flagged. These may not be important, but may indicate either that the package is installing software from external sources or that the package wasn't cleaned up before being distributed. + +The script will also check for three specific files, `readme`, `security`, and `license`, which should be included as `.md` or `.txt` files as a matter of best practice. If present but with an improbably-small file size, it'll flag that too. (A `touch` is not enough for a best-practice indicator.) + + +### Usage + +`./sentinel-file-stats targetdir` + +Optional flags: + +`-s` silent, no output to terminal +`-j` write results to a `.json` file + + +## 3. Sentinel File Integrity + +The file integrity script will recursively scan a target directory, doing a deeper check for anomalies embedded in the file structure. It will report any mime-type mistmatches encountered, such as a file named `file.jpg` with a mime-type suggesting it may contain executable code. It will check for and count binary files or archives, hidden directories, SUID/SGID files, and other basic spoofing attempts like double-extension file names. (This will yield false positives, but names like `file.txt.exe` likely indicate something malicious. For good measure, it'll also check if there's any \__MACOS cruft left behind. + +This script is more resource-intense than the first two, as it does a deeper inspection of each file. + +### Usage + +`./sentinel-integrity.sh targetdir` + +Optional flags: + +`-s` silent, no output to terminal +`-j` write results to a `.json` file + + + +## License: MIT + + diff --git a/tools/sentinel-shell/sentinel-file-stats.sh b/tools/sentinel-shell/sentinel-file-stats.sh new file mode 100644 index 0000000..b881245 --- /dev/null +++ b/tools/sentinel-shell/sentinel-file-stats.sh @@ -0,0 +1,262 @@ +#!/bin/bash + +# +# create a quick overview of the files with statistical summary +# highlight any immediate issues +# + + +# +# enviro stuff +# +set -u +export LC_ALL=C +export PATH="/usr/bin:/bin:/usr/sbin:/sbin" +IFS=$' \t\n' + + +# +# do we have the tools we need here? +# +for tool in find grep du wc file realpath basename cut date; do + if ! type -p "$tool" >/dev/null; then + echo "Error: Required tool '$tool' not found." >&2; exit 1 + fi +done + + +# +# check option flags for json file output or run silent +# +JSON_OUT=false; SILENT=false +while getopts "sj" opt; do + case $opt in + s) SILENT=true ;; + j) JSON_OUT=true ;; + *) echo "Usage: $0 [-s] [-j] [target_dir] (use -sj for silent JSON)" >&2; exit 1 ;; + esac +done +shift $((OPTIND-1)) + +# must have target directory +TARGET_DIR="${1:-.}" +[ ! -d "$TARGET_DIR" ] && { echo "Error: Directory not found." >&2; exit 1; } + +DISPLAY_NAME=$(basename "$(realpath "$TARGET_DIR")") +JSON_FILE="${DISPLAY_NAME}_file-stats.json" + +# maybe silent +log() { [ "$SILENT" = false ] && echo -e "$1"; } + + +# +# make file sizes human-readable +# +format_bytes() { + local bytes=$1 + if [[ $bytes -lt 1024 ]]; then echo "${bytes} B" + elif [[ $bytes -lt 1048576 ]]; then echo "$(( (bytes + 512) / 1024 )) KB" + else echo "$(( (bytes + 524288) / 1048576 )) MB"; fi +} + + +# +# benchmark measures +# +FRESH_LIMIT=129600 +SCORE=100 +CRIT_COUNT=0; WARN_COUNT=0; QUAL_COUNT=0 +FINDINGS_JSON="" + + +add_finding() { + local type="$1" sev="$2" msg="$3" + local entry="{\"type\": \"$type\", \"severity\": \"$sev\", \"message\": \"$msg\"}" + if [ -z "$FINDINGS_JSON" ]; then FINDINGS_JSON="$entry"; else FINDINGS_JSON="$FINDINGS_JSON, $entry"; fi +} + +declare -A NAMES=( + ["php"]="PHP" ["js"]="JavaScript" ["py"]="Python" ["rb"]="Ruby" + ["go"]="Go" ["c"]="C Source" ["cpp"]="C++ Source" ["sh"]="Shell Script" + ["html"]="HTML" ["htm"]="HTML" ["shtml"]="HTML" ["xml"]="XML" ["svg"]="SVG Vector" + ["css"]="CSS" ["scss"]="SCSS" ["less"]="LESS" ["md"]="Markdown" ["txt"]="Plain Text" + ["json"]="JSON" ["yml"]="YAML" ["yaml"]="YAML" ["sql"]="SQL" + ["no_ext"]="No Extension" ["minified"]="Minified Code" + ["docs"]="Project Docs" +) +declare -A L B C +EXTENSIONS=() + +log "" +log "Sentinel Code Stats for [$DISPLAY_NAME]" +log "------------------------------------------------------------" + + +# +# loop to check & count the things... +# +FILE_COUNT=0 +while IFS='|' read -r f_size f_path; do + [ -z "$f_path" ] && continue + ((FILE_COUNT++)) + rel_f_path=$(realpath --relative-to="$TARGET_DIR" "$f_path") + filename=$(basename "$f_path") + fname_lower=$(echo "$filename" | tr '[:upper:]' '[:lower:]') + ext="${filename##*.}" + ext_lower=$(echo "$ext" | tr '[:upper:]' '[:lower:]') + + if [[ "$ext_lower" != "php" ]]; then + if grep -q "/dev/null; then + ((CRIT_COUNT++)) + log " [!! ALERT !!] Hidden PHP found in: ./$rel_f_path" + add_finding "hidden_php" "CRITICAL" "./$rel_f_path" + fi + if grep -qE "eval\(|exec\(|password[[:space:]]*=|API_KEY|SECRET_KEY" "$f_path" 2>/dev/null; then + ((WARN_COUNT++)) + add_finding "sensitive_string" "WARNING" "./$rel_f_path" + fi + fi + + if [[ "$ext_lower" =~ ^(sh|py|pl|rb)$ ]]; then + if [[ ! "$(head -n 1 "$f_path" 2>/dev/null)" =~ ^#! ]]; then + ((WARN_COUNT++)) + add_finding "missing_shebang" "WARNING" "./$rel_f_path" + fi + fi + + if [[ "$fname_lower" =~ ^(readme|security|license)\.(md|txt)$ ]]; then + cat="docs" + elif [[ "$fname_lower" == *.min.* ]]; then cat="minified" + else [[ "$filename" == "$ext" ]] && ext_lower="no_ext"; cat="$ext_lower"; fi + + if [[ -z ${C[$cat]:-} ]]; then C[$cat]=0; L[$cat]=0; B[$cat]=0; EXTENSIONS+=("$cat"); fi + ((C[$cat]++)); B[$cat]=$((B[$cat] + f_size)) + [[ "$cat" != "minified" ]] && L[$cat]=$((L[$cat] + $(wc -l < "$f_path" 2>/dev/null || echo 0))) + +done < <(find "$TARGET_DIR" -mount -type f ! -path "*/vendor/*" ! -path "*/node_modules/*" ! -path "*/venv/*" ! -path "*/.venv/*" \( \ + -name "*.php" -o -name "*.js" -o -name "*.py" -o \ + -name "*.css" -o -name "*.scss" -o -name "*.html" -o \ + -name "*.md" -o -name "*.txt" -o \ + -iname "readme*" -o -iname "security*" -o -iname "license*" \ + \) -printf "%s|%p\n" 2>/dev/null) + + +# +# report what we found +# +[ "$SILENT" = false ] && printf "%-25s %-10s %-12s %-12s\n" "File Type" "Files" "Lines" "Size" +GRAND_L=0; GRAND_B=0; EXT_JSON="" +for cat in $(printf "%s\n" "${EXTENSIONS[@]}" | sort); do + [[ "$cat" != "minified" ]] && { GRAND_L=$((GRAND_L + L[$cat])); GRAND_B=$((GRAND_B + B[$cat])); } + E_ENTRY="\"$cat\": {\"files\": ${C[$cat]}, \"lines\": ${L[$cat]:-0}, \"bytes\": ${B[$cat]}}" + [ -z "$EXT_JSON" ] && EXT_JSON="$E_ENTRY" || EXT_JSON="$EXT_JSON, $E_ENTRY" + if [ "$SILENT" = false ]; then + label=".$cat"; [[ -n ${NAMES[$cat]:-} ]] && label=".$cat (${NAMES[$cat]})" + [[ "$cat" == "docs" ]] && label="Project Documentation" + [[ "$cat" == "minified" ]] && label="Minified (Omitted)" + printf "%-25s %-10d %-12s %-12s\n" " $label" "${C[$cat]}" "${L[$cat]:-N/A}" "$(format_bytes ${B[$cat]})" + fi +done +log "------------------------------------------------------------" +[ "$SILENT" = false ] && printf "%-25s %-10s %-12d %-12s\n" "Grand Totals:" "$FILE_COUNT" "$GRAND_L" "$(format_bytes $GRAND_B)" +log "------------------------------------------------------------" + +# +# review external software sources +# +log "External Software Sources:" + +GITIGNORE="$TARGET_DIR/.gitignore"; HAS_STALE=0 +is_ignored() { [ -f "$GITIGNORE" ] && grep -qE "^/?$1(/|$)" "$GITIGNORE"; } + + +# --- .gitignore --- +if [ -f "$GITIGNORE" ]; then + log " [FOUND] .gitignore configuration" + if [ -f "$TARGET_DIR/.env" ]; then + if grep -qE "^\.env($|[[:space:]])" "$GITIGNORE"; then log " [SAFE] .env is ignored" + else log " [CRITICAL] .env NOT in .gitignore!"; ((CRIT_COUNT++)); add_finding "gitignore_missing" "CRITICAL" ".env"; fi + fi +else log " [NOT FOUND] .gitignore"; fi + + +# --- Composer --- +if [ -d "$TARGET_DIR/vendor" ] || [ -f "$TARGET_DIR/composer.json" ]; then + log " [DETECTED] PHP (Composer)" + is_ignored "vendor" || { log " [CRITICAL] /vendor NOT in .gitignore!"; ((CRIT_COUNT++)); add_finding "gitignore_missing" "CRITICAL" "vendor/"; } + if [ -f "$TARGET_DIR/composer.lock" ]; then + if [ -n "$(find "$TARGET_DIR/composer.lock" -mmin +"$FRESH_LIMIT" -print)" ]; then + log " [STALE] ./composer.lock (>90d)"; HAS_STALE=1; ((WARN_COUNT++)); add_finding "stale_lock" "WARNING" "composer.lock" + else log " [FRESH] ./composer.lock is current."; fi + else log " [CRITICAL] composer.lock MISSING!"; ((CRIT_COUNT++)); add_finding "missing_lock" "CRITICAL" "composer.lock"; fi +else log " [NOT FOUND] PHP (Composer) sources"; fi + + +# --- Node.js (npm) --- +if [ -d "$TARGET_DIR/node_modules" ] || [ -f "$TARGET_DIR/package.json" ]; then + log " [DETECTED] Node.js (npm)" + is_ignored "node_modules" || { log " [CRITICAL] /node_modules NOT in .gitignore!"; ((CRIT_COUNT++)); add_finding "gitignore_missing" "CRITICAL" "node_modules/"; } + if [ -f "$TARGET_DIR/package-lock.json" ]; then + if [ -n "$(find "$TARGET_DIR/package-lock.json" -mmin +"$FRESH_LIMIT" -print)" ]; then + log " [STALE] ./package-lock.json (>90d)"; HAS_STALE=1; ((WARN_COUNT++)); add_finding "stale_lock" "WARNING" "package-lock.json" + else log " [FRESH] ./package-lock.json is current."; fi + else log " [WARNING] package-lock.json MISSING!"; ((WARN_COUNT++)); add_finding "missing_lock" "WARNING" "package-lock.json"; fi +else log " [NOT FOUND] Node.js (npm) sources"; fi + + +# --- Python --- +if [ -d "$TARGET_DIR/venv" ] || [ -d "$TARGET_DIR/.venv" ] || [ -f "$TARGET_DIR/requirements.txt" ]; then + log " [DETECTED] Python (PyPI)" + if [ -d "$TARGET_DIR/venv" ] || [ -d "$TARGET_DIR/.venv" ]; then + (is_ignored "venv" || is_ignored ".venv") || { log " [CRITICAL] VirtualEnv NOT in .gitignore!"; ((CRIT_COUNT++)); add_finding "gitignore_missing" "CRITICAL" "venv/"; } + fi +else log " [NOT FOUND] Python (PyPI) sources"; fi + +[ "$HAS_STALE" -eq 1 ] && log " [NOTICE] Stale lockfiles detected. Verify against SBOM." + + +# +# Best-practice file check +# +log "Best-Practice md/txt Files:" +for doc in "README" "LICENSE" "SECURITY"; do + d_path=$(find "$TARGET_DIR" -maxdepth 1 -type f \( -iname "$doc.md" -o -iname "$doc.txt" \) -printf "%p" -quit 2>/dev/null) + if [[ -z "$d_path" ]]; then log " [MISSING] $doc file"; ((QUAL_COUNT++)); add_finding "missing_doc" "QUALITY" "$doc"; + else + d_size=$(stat -c%s "$d_path") + d_name=$(basename "$d_path") + if [[ $d_size -lt 300 ]]; then log " [FOUND] $d_name ($d_size B) [!! TOO SMALL !!]"; ((QUAL_COUNT++)); add_finding "small_doc" "QUALITY" "$d_name"; + else log " [FOUND] $d_name ($(format_bytes $d_size)) [OK]"; fi + fi +done + + +# +# Calculate a risk/health score +# removed this, it's too rudimentary +# +# SCORE=$(( 100 - (CRIT_COUNT * 15) - (WARN_COUNT * 5) - (QUAL_COUNT * 2) )) +[ $SCORE -lt 0 ] && SCORE=0 +log "------------------------------------------------------------" +# log "Sentinel Health Score: $SCORE/100" + + +# +# maybe write to json file +# purposely removed from json: "health_score": $SCORE, +# +if [ "$JSON_OUT" = true ]; then + TIMESTAMP=$(date -u +"%Y-%m-%dT%H:%M:%SZ") + RAW_JSON="{\"project\":\"$DISPLAY_NAME\",\"timestamp\":\"$TIMESTAMP\",\"audit_type\":\"file_statistics\",\"metrics\":{\"total_files\":\"$FILE_COUNT\",\"total_lines\":\"$GRAND_L\",\"total_size_bytes\":\"$GRAND_B\",\"by_extension\":{$EXT_JSON}},\"counts\":{\"critical\":\"$CRIT_COUNT\",\"warnings\":\"$WARN_COUNT\",\"quality\":\"$QUAL_COUNT\"},\"findings\":[$FINDINGS_JSON]}" +echo $RAW_JSON + # check if jq is in the path; if not, use python to pretty-print the json + if type jq >/dev/null 2>&1; then + echo "$RAW_JSON" | jq . > "$JSON_FILE" + else + echo "$RAW_JSON" | python3 -m json.tool > "$JSON_FILE" + fi +fi + +[ "$SILENT" = false ] && echo "JSON report generated: $JSON_FILE" +log "" + diff --git a/tools/sentinel-shell/sentinel-integrity.sh b/tools/sentinel-shell/sentinel-integrity.sh new file mode 100644 index 0000000..10fe853 --- /dev/null +++ b/tools/sentinel-shell/sentinel-integrity.sh @@ -0,0 +1,174 @@ +#!/bin/bash + +# +# set enviro +# +set -u +export LC_ALL=C +export PATH="/usr/bin:/bin:/usr/sbin:/sbin" +IFS=$' \t\n' + +# default options +JSON_OUT=false; SILENT=false +while getopts "sj" opt; do + case $opt in + s) SILENT=true ;; + j) JSON_OUT=true ;; + *) echo "Usage: $0 [-s] [-j] [target_dir]" >&2; exit 1 ;; + esac +done +shift $((OPTIND-1)) + +# must specify a directory to recurse from +TARGET_DIR="${1:-.}" +[ ! -d "$TARGET_DIR" ] && { echo "Error: Directory not found." >&2; exit 1; } + +# drop any path indicators from output (silence cues about the environment) +DISPLAY_NAME=$(basename "$(realpath "$TARGET_DIR")") +JSON_FILE="${DISPLAY_NAME}_integrity.json" + +# report human-readable file sizes +log() { [ "$SILENT" = false ] && echo -e "$1"; } +format_bytes() { + local bytes=$1 + if [[ $bytes -lt 1024 ]]; then echo "${bytes} B" + elif [[ $bytes -lt 1048576 ]]; then echo "$(( (bytes + 512) / 1024 )) KB" + else echo "$(( (bytes + 524288) / 1048576 )) MB"; fi +} + +# +# set up json output +# +FINDINGS_JSON="" +add_finding() { + local type="$1" sev="$2" f_path="$3" sz="$4" mime="$5" + local entry="{\"type\": \"$type\", \"severity\": \"$sev\", \"file\": \"$f_path\", \"size_bytes\": $sz, \"mime\": \"$mime\"}" + if [ -z "$FINDINGS_JSON" ]; then FINDINGS_JSON="$entry"; else FINDINGS_JSON="$FINDINGS_JSON, $entry"; fi +} + +# init counters +MACOS_CRUFT=0; IMG_COUNT=0; IMG_SIZE=0; BIN_COUNT=0; BIN_SIZE=0; MISMATCH_COUNT=0; EXEC_COUNT=0; SUID_COUNT=0; DUAL_EXT_COUNT=0; HIDDEN_DIRS=0 + +log "" +log "Sentinel File Integrity Check: [$DISPLAY_NAME]" +log "------------------------------------------------------------" +log "SCAN FOUND:" +log "-----------" + +# +# Loop: look for things and count 'em up +# +while IFS='|' read -r f_size f_mode f_path; do + [ -z "$f_path" ] && continue + rel_path=$(realpath --relative-to="$TARGET_DIR" "$f_path") + full_display_path="$DISPLAY_NAME/$rel_path" + filename=$(basename "$f_path") + + # did MACOS leave stupid footprints that weren't removed? + if [[ "$f_path" == *"__MACOSX"* || "$filename" == ".DS_Store" ]]; then + ((MACOS_CRUFT++)); add_finding "macos_metadata" "INFO" "$full_display_path" "$f_size" "inode/directory" + continue + fi + + # + # validate file extensions + # + + # get MIME types + mime=$(file --mime-type -b "$f_path") + ext_lower=$(echo "${f_path##*.}" | tr '[:upper:]' '[:lower:]') + + # + # this part will catch a lot of false positives like version-4.2.1.zip but we're looking + # for stuff like file.jpg.php trying to hide executable code as a known file type + is_dual=false; [[ "$(echo "$filename" | tr -cd '.' | wc -c)" -gt 1 ]] && is_dual=true + # + # is this an executable? + is_exec=false; [[ "$mime" == "application/x-executable" || "$mime" == "application/x-sharedlib" || "$mime" == "application/x-dosexec" ]] && is_exec=true + + # + # validate some common file types (could add to these) + # refer: https://mimetype.io/all-types + # and https://developer.mozilla.org/en-US/docs/Web/HTTP/Guides/MIME_types/Common_types + # + is_valid=true + case "$ext_lower" in + zip) [[ "$mime" != "application/zip" ]] && is_valid=false ;; + gz|tgz) [[ "$mime" != "application/gzip" ]] && is_valid=false ;; # may be incorrect if Windows/Mac uploaded + gz|tgz) [[ "$mime" == "application/x-gzip" ]] && is_valid=true ;; # because Windows/Mac set nonstandard MIME type + pdf) [[ "$mime" != "application/pdf" ]] && is_valid=false ;; + png) [[ "$mime" != "image/png" ]] && is_valid=false ;; + jpg|jpeg) [[ "$mime" != "image/jpeg" ]] && is_valid=false ;; + gif) [[ "$mime" != "image/gif" ]] && is_valid=false ;; + avif) [[ "$mime" != "image/avif" ]] && is_valid=false ;; + webp) [[ "$mime" != "image/webp" ]] && is_valid=false ;; + ico) [[ "$mime" != "image/vnd.microsoft.icon" ]] && is_valid=false ;; + esac + + # tally image stats + if [ "$is_valid" = true ] && [ "$is_dual" = false ] && [[ "$mime" =~ ^image/ ]]; then + ((IMG_COUNT++)); IMG_SIZE=$((IMG_SIZE + f_size)); continue + fi + + # tally binaries + ((BIN_COUNT++)); BIN_SIZE=$((BIN_SIZE + f_size)) + status=" [OK]"; sev="WARNING" + + # spoof warnings + if [ "$is_valid" = false ]; then status="[!! MIME MISMATCH !!]"; ((MISMATCH_COUNT++)); sev="CRITICAL"; + elif [ "$is_dual" = true ]; then status="[!! DUAL EXT !!] "; ((DUAL_EXT_COUNT++)); + elif [[ "$f_mode" =~ ^[4-7]...$ ]]; then status="[!! SUID !!] "; ((SUID_COUNT++)); sev="CRITICAL"; + elif [ "$is_exec" = true ]; then status="[!! EXECUTABLE !!] "; ((EXEC_COUNT++)); fi + + + # tally hidden directories, excl current/parent + if [ -d "$f_path" ] && [[ "$filename" == .* ]] && [[ "$filename" != "." && "$filename" != ".." ]]; then + ((HIDDEN_DIRS++)) + add_finding "hidden_directory" "WARNING" "$full_display_path" "0" "inode/directory" + log "[!! HIDDEN DIR !!] $full_display_path" + continue + fi + + add_finding "${status//[\[\]\! ]/_}" "$sev" "$full_display_path" "$f_size" "$mime" + log "$status $(format_bytes $f_size) ($mime) $full_display_path" + +done < <(find "$TARGET_DIR" -mount -type f,d ! \( -name "*.php" -o -name "*.js" -o -name "*.py" -o -name "*.css" -o -name "*.scss" -o -name "*.html" -o -name "*.md" -o -name "*.txt" -o -name "*.xml" -o -name "*.json" -o -name "*.yml" -o -name "*.yaml" -o -name "*.sql" \) -printf "%s|%a|%p\n" 2>/dev/null) +# +# thus ends the loop +# done with checks +# + + + +# +# report +# +log "------------------------------------------------------------" +log "File Ingegrity Summary" +log "----------------------" +log "FOUND:" +log " Verified Web Images : $IMG_COUNT files ($(format_bytes $IMG_SIZE))" +log " MacOS Metadata Cruft : $MACOS_CRUFT files" +log " Other Binaries/Archives : $BIN_COUNT files ($(format_bytes $BIN_SIZE))" +log " Hidden Directories : $HIDDEN_DIRS" +log "" +log "CRITICAL WARNINGS:" +log " Mime-Type Mismatches : $MISMATCH_COUNT" +log " Dual-Extension Files : $DUAL_EXT_COUNT" +log " SUID/SGID Files : $SUID_COUNT" +log " Binary Executables : $EXEC_COUNT" +log "------------------------------------------------------------" +log "" + + +# +# maybe write to json file +# +if [ "$JSON_OUT" = true ]; then + RAW_JSON="{\"project\":\"$DISPLAY_NAME\",\"audit_type\":\"file_integrity\",\"timestamp\":\"$(date -u +"%Y-%m-%dT%H:%M:%SZ")\",\"summary\":{\"web_images\":{\"count\":$IMG_COUNT,\"size_bytes\":$IMG_SIZE},\"macos_cruft\":{\"count\":$MACOS_CRUFT},\"midden_directories\":{\"count\":$HIDDEN_DIRS},\"other_binaries\":{\"count\":$BIN_COUNT,\"size_bytes\":$BIN_SIZE},\"anomalies\":{\"mime_mismatches\":$MISMATCH_COUNT,\"dual_extensions\":$DUAL_EXT_COUNT,\"suid_files\":$SUID_COUNT,\"executables\":$EXEC_COUNT}},\"findings\":[$FINDINGS_JSON]}" + # check if jq is available in the path, else use python to pretty-print the json + if type jq >/dev/null 2>&1; then echo "$RAW_JSON" | jq . > "$JSON_FILE" + else echo "$RAW_JSON" | python3 -m json.tool > "$JSON_FILE"; fi +fi + + diff --git a/tools/sentinel-shell/sentinel-write-check.sh b/tools/sentinel-shell/sentinel-write-check.sh new file mode 100644 index 0000000..e9d52ba --- /dev/null +++ b/tools/sentinel-shell/sentinel-write-check.sh @@ -0,0 +1,106 @@ +#!/bin/bash + +## +# recursively check files & directories for any world-writeable permissions +# optionally remove the offending write permission with --fix flag +# script output should reveal nothing about the current environment (e.g., absolute path) +# script should resist most direct attacks (e.g. don't chmod a symlink pointing /somewhere) +## + +# enviro: exit on undefined vars, maybe prevent unintended consequences +# sanitize environment (ish); should work in most current shells +set -u +export LC_ALL=C +export PATH="/usr/bin:/bin:/usr/sbin:/sbin" +IFS=$' \t\n' + +# init vars +FIXIT=false +TARGET_DIR="" +WRITABLE_FILES=0 +WRITABLE_DIRS=0 + +# +# parse what we're given +# +for arg in "$@"; do + case "$arg" in + --fix) FIXIT=true ;; + -*) echo "Unknown option: $arg" >&2; exit 1 ;; + *) TARGET_DIR="$arg" ;; + esac +done + +[ "$FIXIT" = true ] && FIXFLAG="Fixed" || FIXFLAG="Flagged" + +TARGET_DIR="${TARGET_DIR:-.}" + +if [ ! -d "$TARGET_DIR" ]; then + echo "Error: Directory '$TARGET_DIR' not found." >&2 + exit 1 +fi + +# echo "Scanning: $TARGET_DIR" +echo "" +[ "$FIXIT" = true ] && echo "File permission fixes for $TARGET_DIR" || echo "File permission scan for $TARGET_DIR" +echo "------------------------------------------------------------" + +# +# what's writeable that shouldn't be? tally what we find in an array +# avoid tmp files & pipe-related exit code issues +# +mapfile -d '' WRITABLE_ITEMS < <(find "$TARGET_DIR" -perm -002 -print0 2>/dev/null) + +# count 'em all up for future reference +ALL_TOTAL_FILES=$(find "$TARGET_DIR" -type f 2>/dev/null | wc -l) +ALL_TOTAL_DIRS=$(find "$TARGET_DIR" -type d 2>/dev/null | wc -l) + +# +# loop through files ("everything is a file") +# +for item in "${WRITABLE_ITEMS[@]}"; do + [ -z "$item" ] && continue + + # make sure item actually exists (handle race conditions) + [ ! -e "$item" ] && [ ! -L "$item" ] && continue + + # get octal perms + FOUND_PERM=$(stat -c "%a" "$item" 2>/dev/null || echo "???") + + if [ -d "$item" ]; then + ((WRITABLE_DIRS++)) + else + ((WRITABLE_FILES++)) + fi + + # + # maybe fix permissions, change 1 bit + # + if [ "$FIXIT" = true ]; then + if [ -L "$item" ]; then + echo "[SKIPPED-LINK] $FOUND_PERM : $item" + else + if chmod o-w "$item" 2>/dev/null; then + FIXED_PERM=$(stat -c "%a" "$item" 2>/dev/null || echo "???") + echo "[FIXED] $FOUND_PERM -> $FIXED_PERM : $item" + else + echo "[FAILED] $FOUND_PERM : $item" + fi + fi + else + echo "[FOUND] $FOUND_PERM : $item" + fi +done + +# +# summary report +# +PASSED_FILES=$((ALL_TOTAL_FILES - WRITABLE_FILES)) +PASSED_DIRS=$((ALL_TOTAL_DIRS - WRITABLE_DIRS)) + +echo "------------------------------------------------------------" +echo "$ALL_TOTAL_DIRS Directories Checked | $PASSED_DIRS Passed | $WRITABLE_DIRS $FIXFLAG" +echo "$ALL_TOTAL_FILES Files Checked | $PASSED_FILES Passed | $WRITABLE_FILES $FIXFLAG" +[ "$FIXIT" = true ] && echo "Done. World-writeable permissions have been removed." || echo "Done, with no changes made. Use --fix to correct world-writeable permissions." +echo "" +