Tutorial

New updates and improvements to Macfleet.

File Discovery and Search Management on macOS

Discover and manage files across your MacFleet devices using advanced file search and discovery systems. This tutorial covers file path finding, extension-based searches, content analysis, and comprehensive file lifecycle management.

Understanding macOS File Discovery

macOS provides several file discovery and search capabilities:

  • find - Command-line file and directory search utility
  • File Path Resolution - Locate specific files by name and extension
  • Extension Filtering - Search for files by file type and format
  • Content Search - Find files based on content and metadata
  • Spotlight Integration - Leverage macOS indexing for fast searches

Basic File Discovery Operations

Find File Path

#!/bin/bash

# Basic file path finding
find / -name 'file name with extension' -print 2>/dev/null

echo "File path search completed"

Enhanced File Discovery

#!/bin/bash

# Comprehensive file discovery with multiple search methods
discover_file_paths() {
    local file_pattern="$1"
    local search_scope="${2:-/}"
    local search_type="${3:-name}"
    
    echo "=== File Discovery and Path Resolution ==="
    echo "Search Pattern: $file_pattern"
    echo "Search Scope: $search_scope"
    echo "Search Type: $search_type"
    
    # Validate search parameters
    if [[ -z "$file_pattern" ]]; then
        echo "❌ File pattern is required"
        return 1
    fi
    
    if [[ ! -d "$search_scope" ]]; then
        echo "❌ Search scope directory not found: $search_scope"
        return 1
    fi
    
    # Perform search based on type
    echo "Starting file discovery..."
    local results_count=0
    
    case "$search_type" in
        "name")
            echo "Searching by filename..."
            find "$search_scope" -name "$file_pattern" -print 2>/dev/null | while read -r file_path; do
                echo "Found: $file_path"
                ((results_count++))
            done
            ;;
        "iname")
            echo "Searching by filename (case-insensitive)..."
            find "$search_scope" -iname "$file_pattern" -print 2>/dev/null | while read -r file_path; do
                echo "Found: $file_path"
                ((results_count++))
            done
            ;;
        "type")
            echo "Searching by file type..."
            find "$search_scope" -type f -name "$file_pattern" -print 2>/dev/null | while read -r file_path; do
                echo "Found: $file_path"
                ((results_count++))
            done
            ;;
        "content")
            echo "Searching by content..."
            grep -r "$file_pattern" "$search_scope" 2>/dev/null | while read -r match; do
                echo "Content match: $match"
                ((results_count++))
            done
            ;;
        *)
            echo "❌ Unknown search type: $search_type"
            return 1
            ;;
    esac
    
    echo "✅ File discovery completed"
    log_discovery_action "$file_pattern" "$search_scope" "$search_type" "success"
    
    return 0
}

# Example usage
# discover_file_paths "document.txt" "/" "name"

List Files by Extension

#!/bin/bash

# Enhanced file extension search
find_files_by_extension() {
    local extension="$1"
    local count="${2:-10}"
    local search_path="${3:-/}"
    
    echo "=== Files by Extension Search ==="
    echo "Extension: .$extension"
    echo "Result Limit: $count"
    echo "Search Path: $search_path"
    
    # Basic extension search
    echo "Searching for .$extension files..."
    find "$search_path" -name "*.$extension" -print 2>/dev/null | head -n "$count"
    
    echo "✅ Extension search completed"
}

# Example usage
# find_files_by_extension "txt" 10 "/"

File Discovery Categories

File Type Classifications

#!/bin/bash

# File discovery categories for different file types and purposes
declare -A FILE_CATEGORIES=(
    ["system_config"]="System configuration files and preferences"
    ["application_data"]="Application data files and user preferences"
    ["media_content"]="Images, videos, audio files and multimedia content"
    ["document_files"]="Text documents, spreadsheets, presentations"
    ["development_code"]="Source code, scripts, development projects"
    ["archive_compressed"]="Compressed archives and backup files"
    ["security_certificates"]="Security certificates, keys, and credentials"
    ["log_diagnostic"]="System logs, diagnostic files, and troubleshooting data"
    ["cache_temporary"]="Cache files, temporary data, and disposable content"
    ["database_structured"]="Database files and structured data storage"
)

# File extensions for each category
declare -A CATEGORY_EXTENSIONS=(
    ["system_config"]="plist,conf,cfg,ini,settings,preferences"
    ["application_data"]="app,dmg,pkg,bundle,framework"
    ["media_content"]="jpg,jpeg,png,gif,mp4,mov,mp3,wav,m4a"
    ["document_files"]="pdf,doc,docx,xls,xlsx,ppt,pptx,txt,rtf"
    ["development_code"]="swift,m,h,py,js,ts,php,java,cpp,c"
    ["archive_compressed"]="zip,tar,gz,bz2,rar,7z,dmg"
    ["security_certificates"]="p12,pem,crt,key,keychain,cer"
    ["log_diagnostic"]="log,crash,diag,report,trace"
    ["cache_temporary"]="cache,tmp,temp,bak,swp"
    ["database_structured"]="db,sqlite,sql,json,xml,csv"
)

# Search priorities for different categories
declare -A SEARCH_PRIORITIES=(
    ["system_config"]="high"
    ["application_data"]="medium"
    ["media_content"]="low"
    ["document_files"]="high"
    ["development_code"]="medium"
    ["archive_compressed"]="low"
    ["security_certificates"]="critical"
    ["log_diagnostic"]="medium"
    ["cache_temporary"]="low"
    ["database_structured"]="high"
)

print_file_categories() {
    echo "=== File Discovery Categories ==="
    for category in "${!FILE_CATEGORIES[@]}"; do
        echo "Category: $category"
        echo "  Description: ${FILE_CATEGORIES[$category]}"
        echo "  Extensions: ${CATEGORY_EXTENSIONS[$category]}"
        echo "  Priority: ${SEARCH_PRIORITIES[$category]}"
        echo ""
    done
}

# Display available categories
print_file_categories

File Discovery Policies

Search and Discovery Policies

#!/bin/bash

# File discovery policies for different organizational requirements
declare -A DISCOVERY_POLICIES=(
    ["security_audit"]="Security-focused file discovery with comprehensive scanning"
    ["compliance_gdpr"]="GDPR compliance file discovery for personal data identification"
    ["asset_inventory"]="Complete asset inventory and file cataloging"
    ["performance_cleanup"]="Performance optimization through file cleanup identification"
    ["backup_verification"]="Backup verification and data integrity checking"
    ["forensic_investigation"]="Forensic file discovery for incident investigation"
)

# Get discovery policy configuration
get_discovery_policy() {
    local policy_type="$1"
    
    case "$policy_type" in
        "security_audit")
            cat << EOF
{
    "discovery_enabled": true,
    "search_scope": "comprehensive",
    "file_types_priority": ["security_certificates", "system_config", "log_diagnostic"],
    "content_analysis": true,
    "metadata_collection": true,
    "permission_analysis": true,
    "hash_verification": true,
    "access_tracking": true,
    "encryption_detection": true,
    "suspicious_patterns": ["password", "key", "token", "secret"],
    "reporting_level": "detailed",
    "audit_logging": "comprehensive",
    "real_time_monitoring": true
}
EOF
            ;;
        "compliance_gdpr")
            cat << EOF
{
    "discovery_enabled": true,
    "search_scope": "user_data_focused",
    "file_types_priority": ["document_files", "database_structured", "application_data"],
    "content_analysis": true,
    "metadata_collection": true,
    "personal_data_detection": true,
    "data_classification": true,
    "retention_analysis": true,
    "consent_tracking": false,
    "data_subject_identification": true,
    "cross_border_analysis": true,
    "privacy_patterns": ["email", "phone", "ssn", "passport", "address"],
    "reporting_level": "privacy_compliant",
    "audit_logging": "gdpr_compliant",
    "data_mapping": true
}
EOF
            ;;
        "asset_inventory")
            cat << EOF
{
    "discovery_enabled": true,
    "search_scope": "complete_system",
    "file_types_priority": ["application_data", "system_config", "document_files"],
    "content_analysis": false,
    "metadata_collection": true,
    "size_analysis": true,
    "modification_tracking": true,
    "ownership_analysis": true,
    "version_detection": true,
    "license_compliance": true,
    "duplicate_detection": true,
    "storage_optimization": true,
    "reporting_level": "inventory_focused",
    "audit_logging": "standard",
    "cataloging": true
}
EOF
            ;;
        "forensic_investigation")
            cat << EOF
{
    "discovery_enabled": true,
    "search_scope": "forensic_complete",
    "file_types_priority": ["all_types"],
    "content_analysis": true,
    "metadata_collection": true,
    "timeline_analysis": true,
    "hash_verification": true,
    "signature_analysis": true,
    "deleted_file_recovery": true,
    "steganography_detection": true,
    "network_artifact_analysis": true,
    "chain_of_custody": true,
    "evidence_preservation": true,
    "reporting_level": "forensic_detailed",
    "audit_logging": "forensic_compliant",
    "legal_hold": true
}
EOF
            ;;
        *)
            echo "Unknown discovery policy: $policy_type"
            return 1
            ;;
    esac
}

# Apply discovery policy
apply_discovery_policy() {
    local policy="$1"
    local config_file="/tmp/discovery_policy.json"
    
    echo "Applying file discovery policy: $policy"
    
    get_discovery_policy "$policy" > "$config_file"
    
    if [[ ! -f "$config_file" ]]; then
        echo "❌ Failed to generate policy configuration"
        return 1
    fi
    
    echo "✅ File discovery policy applied successfully"
    echo "Configuration: $config_file"
    
    # Display key policy settings
    echo "=== Policy Summary ==="
    echo "Discovery Enabled: $(jq -r '.discovery_enabled' "$config_file")"
    echo "Search Scope: $(jq -r '.search_scope' "$config_file")"
    echo "Content Analysis: $(jq -r '.content_analysis' "$config_file")"
    echo "Metadata Collection: $(jq -r '.metadata_collection' "$config_file")"
    echo "Reporting Level: $(jq -r '.reporting_level' "$config_file")"
    
    return 0
}

Advanced File Discovery and Analysis

Comprehensive File Discovery System

#!/bin/bash

# Advanced file discovery with content analysis and metadata extraction
advanced_file_discovery() {
    local search_criteria="$1"
    local analysis_level="${2:-standard}"
    local output_format="${3:-text}"
    
    echo "=== Advanced File Discovery System ==="
    echo "Search Criteria: $search_criteria"
    echo "Analysis Level: $analysis_level"
    echo "Output Format: $output_format"
    
    local discovery_report="/tmp/file_discovery_$(date +%Y%m%d_%H%M%S).json"
    
    # Initialize discovery report
    cat > "$discovery_report" << EOF
{
    "discovery_session": {
        "search_criteria": "$search_criteria",
        "analysis_level": "$analysis_level",
        "timestamp": "$(date -Iseconds)",
        "hostname": "$(hostname)",
        "operator": "$(whoami)"
    },
    "discovered_files": [],
    "file_statistics": {},
    "content_analysis": {},
    "security_findings": {}
}
EOF
    
    # Perform comprehensive file discovery
    echo "Performing comprehensive file search..."
    
    # Search by multiple criteria
    local files_found=0
    
    # Basic filename search
    echo "  - Searching by filename patterns..."
    find / -name "*$search_criteria*" -type f -print 2>/dev/null | while read -r file_path; do
        analyze_discovered_file "$file_path" "$analysis_level" "$discovery_report"
        ((files_found++))
    done
    
    # Content-based search
    if [[ "$analysis_level" == "comprehensive" ]]; then
        echo "  - Performing content analysis..."
        grep -r "$search_criteria" / 2>/dev/null | head -100 | while read -r match; do
            local file_path
            file_path=$(echo "$match" | cut -d':' -f1)
            analyze_discovered_file "$file_path" "$analysis_level" "$discovery_report"
        done
    fi
    
    # Generate file statistics
    generate_file_statistics "$discovery_report"
    
    # Display results
    echo ""
    echo "Discovery Results:"
    echo "  Files Found: $files_found"
    echo "  Discovery Report: $discovery_report"
    
    # Format output
    case "$output_format" in
        "json")
            cat "$discovery_report"
            ;;
        "summary")
            jq -r '.discovery_session, .file_statistics' "$discovery_report"
            ;;
        *)
            echo "Discovery completed - see report file for details"
            ;;
    esac
    
    return 0
}

# Analyze discovered file
analyze_discovered_file() {
    local file_path="$1"
    local analysis_level="$2"
    local report_file="$3"
    
    if [[ ! -f "$file_path" ]]; then
        return 1
    fi
    
    # Basic file information
    local file_size
    file_size=$(stat -f%z "$file_path" 2>/dev/null || echo "0")
    local file_permissions
    file_permissions=$(stat -f%A "$file_path" 2>/dev/null || echo "unknown")
    local file_owner
    file_owner=$(stat -f%Su "$file_path" 2>/dev/null || echo "unknown")
    local modification_time
    modification_time=$(stat -f%Sm "$file_path" 2>/dev/null || echo "unknown")
    
    # Enhanced analysis for comprehensive level
    local file_type="unknown"
    local content_preview=""
    local security_flags=""
    
    if [[ "$analysis_level" == "comprehensive" ]]; then
        file_type=$(file -b "$file_path" 2>/dev/null || echo "unknown")
        
        # Safe content preview for text files
        if file "$file_path" | grep -q "text"; then
            content_preview=$(head -c 200 "$file_path" 2>/dev/null | tr '\n' ' ')
        fi
        
        # Security analysis
        if [[ "$file_permissions" =~ 7.* ]]; then
            security_flags="executable_by_owner"
        fi
        
        if [[ "$file_path" =~ \.key$|\.pem$|\.p12$ ]]; then
            security_flags="${security_flags},potential_security_credential"
        fi
    fi
    
    # Log file discovery
    log_discovered_file "$file_path" "$file_size" "$file_type" "$security_flags"
}

# Generate file statistics
generate_file_statistics() {
    local report_file="$1"
    
    # Calculate basic statistics
    local total_files_found=0
    local total_size=0
    local file_types=()
    
    # Update report with statistics
    jq --argjson total_files "$total_files_found" \
       --argjson total_size "$total_size" \
       '.file_statistics = {
          "total_files_found": $total_files,
          "total_size_bytes": $total_size,
          "analysis_timestamp": "'$(date -Iseconds)'"
        }' "$report_file" > "${report_file}.tmp" && mv "${report_file}.tmp" "$report_file"
}

# Log discovered file
log_discovered_file() {
    local file_path="$1"
    local file_size="$2"
    local file_type="$3"
    local security_flags="$4"
    
    echo "$(date '+%Y-%m-%d %H:%M:%S') - File Discovered: $file_path (Size: $file_size, Type: $file_type, Security: $security_flags)" >> "/var/log/macfleet_file_discovery.log"
}

File Discovery Management System

#!/bin/bash

# MacFleet File Discovery and Search Management System
# Comprehensive file discovery, cataloging, and management

# Configuration
CONFIG_DIR="/etc/macfleet/discovery"
LOG_FILE="/var/log/macfleet_file_discovery.log"
DATA_DIR="/var/data/macfleet/discovery"
REPORTS_DIR="/var/reports/macfleet/discovery"
AUDIT_LOG="/var/log/macfleet_discovery_audit.log"
INDEX_DIR="/var/index/macfleet/discovery"

# Create required directories
create_directories() {
    local directories=("$CONFIG_DIR" "$DATA_DIR" "$REPORTS_DIR" "$INDEX_DIR")
    
    for dir in "${directories[@]}"; do
        if [[ ! -d "$dir" ]]; then
            sudo mkdir -p "$dir"
            sudo chmod 755 "$dir"
        fi
    done
}

# Logging functions
log_action() {
    echo "$(date '+%Y-%m-%d %H:%M:%S') [INFO] $1" | tee -a "$LOG_FILE"
}

log_error() {
    echo "$(date '+%Y-%m-%d %H:%M:%S') [ERROR] $1" | tee -a "$LOG_FILE" >&2
}

audit_log() {
    echo "$(date '+%Y-%m-%d %H:%M:%S') [AUDIT] $1" | tee -a "$AUDIT_LOG"
}

# File system indexing
create_file_index() {
    local index_scope="${1:-/Users}"
    local index_level="${2:-standard}"
    
    log_action "Creating file system index: $index_scope (Level: $index_level)"
    
    echo "=== File System Indexing ==="
    echo "Index Scope: $index_scope"
    echo "Index Level: $index_level"
    
    local index_file="$INDEX_DIR/file_index_$(date +%Y%m%d_%H%M%S).json"
    
    # Initialize index
    cat > "$index_file" << EOF
{
    "index_metadata": {
        "scope": "$index_scope",
        "level": "$index_level",
        "created": "$(date -Iseconds)",
        "hostname": "$(hostname)",
        "total_files": 0,
        "total_size": 0
    },
    "file_entries": []
}
EOF
    
    echo "Starting file system indexing..."
    local file_count=0
    local total_size=0
    
    # Index files based on level
    case "$index_level" in
        "basic")
            find "$index_scope" -type f -print 2>/dev/null | while read -r file_path; do
                index_file_basic "$file_path" "$index_file"
                ((file_count++))
                
                if [[ $((file_count % 1000)) -eq 0 ]]; then
                    echo "  Indexed $file_count files..."
                fi
            done
            ;;
        "standard")
            find "$index_scope" -type f -print 2>/dev/null | while read -r file_path; do
                index_file_standard "$file_path" "$index_file"
                ((file_count++))
                
                if [[ $((file_count % 500)) -eq 0 ]]; then
                    echo "  Indexed $file_count files..."
                fi
            done
            ;;
        "comprehensive")
            find "$index_scope" -type f -print 2>/dev/null | while read -r file_path; do
                index_file_comprehensive "$file_path" "$index_file"
                ((file_count++))
                
                if [[ $((file_count % 100)) -eq 0 ]]; then
                    echo "  Indexed $file_count files..."
                fi
            done
            ;;
        *)
            echo "❌ Unknown index level: $index_level"
            return 1
            ;;
    esac
    
    echo "✅ File system indexing completed"
    echo "  Files Indexed: $file_count"
    echo "  Index File: $index_file"
    
    audit_log "File system index created: $index_scope ($file_count files)"
    
    return 0
}

# Index file with basic information
index_file_basic() {
    local file_path="$1"
    local index_file="$2"
    
    if [[ ! -f "$file_path" ]]; then
        return 1
    fi
    
    local file_size
    file_size=$(stat -f%z "$file_path" 2>/dev/null || echo "0")
    local modification_time
    modification_time=$(stat -f%Sm "$file_path" 2>/dev/null || echo "unknown")
    
    # Basic indexing (minimal overhead)
    echo "Indexed: $file_path" >> "${index_file}.basic_log"
}

# Index file with standard information
index_file_standard() {
    local file_path="$1"
    local index_file="$2"
    
    if [[ ! -f "$file_path" ]]; then
        return 1
    fi
    
    local file_size
    file_size=$(stat -f%z "$file_path" 2>/dev/null || echo "0")
    local file_permissions
    file_permissions=$(stat -f%A "$file_path" 2>/dev/null || echo "unknown")
    local file_owner
    file_owner=$(stat -f%Su "$file_path" 2>/dev/null || echo "unknown")
    local file_extension
    file_extension="${file_path##*.}"
    
    # Standard indexing with metadata
    echo "$(date -Iseconds),$file_path,$file_size,$file_permissions,$file_owner,$file_extension" >> "${index_file}.csv"
}

# Index file with comprehensive information
index_file_comprehensive() {
    local file_path="$1"
    local index_file="$2"
    
    if [[ ! -f "$file_path" ]]; then
        return 1
    fi
    
    # Comprehensive file analysis
    local file_info
    file_info=$(analyze_file_comprehensive "$file_path")
    
    # Store in detailed format
    echo "$file_info" >> "${index_file}.detailed_log"
}

# Comprehensive file analysis
analyze_file_comprehensive() {
    local file_path="$1"
    
    # Gather all available file information
    local file_size
    file_size=$(stat -f%z "$file_path" 2>/dev/null || echo "0")
    local file_type
    file_type=$(file -b "$file_path" 2>/dev/null || echo "unknown")
    local file_hash
    file_hash=$(shasum -a 256 "$file_path" 2>/dev/null | cut -d' ' -f1 || echo "unknown")
    
    echo "Path:$file_path|Size:$file_size|Type:$file_type|Hash:$file_hash|Timestamp:$(date -Iseconds)"
}

# Search indexed files
search_file_index() {
    local search_term="$1"
    local search_type="${2:-name}"
    local index_scope="${3:-latest}"
    
    log_action "Searching file index: $search_term (Type: $search_type)"
    
    echo "=== Indexed File Search ==="
    echo "Search Term: $search_term"
    echo "Search Type: $search_type"
    echo "Index Scope: $index_scope"
    
    # Find the appropriate index file
    local index_file
    if [[ "$index_scope" == "latest" ]]; then
        index_file=$(ls -t "$INDEX_DIR"/file_index_*.json 2>/dev/null | head -1)
    else
        index_file="$INDEX_DIR/$index_scope"
    fi
    
    if [[ ! -f "$index_file" ]]; then
        echo "❌ No index file found"
        return 1
    fi
    
    echo "Using index: $index_file"
    
    # Perform search based on type
    case "$search_type" in
        "name")
            grep -i "$search_term" "${index_file}.csv" 2>/dev/null | head -20
            ;;
        "extension")
            grep ",$search_term$" "${index_file}.csv" 2>/dev/null | head -20
            ;;
        "size")
            # Search by file size range
            echo "Size-based search not implemented in basic version"
            ;;
        *)
            echo "❌ Unknown search type: $search_type"
            return 1
            ;;
    esac
    
    audit_log "Index search completed: $search_term ($search_type)"
    
    return 0
}

# Generate discovery report
generate_discovery_report() {
    local report_type="$1"
    local scope="${2:-system}"
    
    log_action "Generating discovery report: $report_type (Scope: $scope)"
    
    echo "=== File Discovery Report Generation ==="
    echo "Report Type: $report_type"
    echo "Scope: $scope"
    
    local report_file="$REPORTS_DIR/discovery_report_${report_type}_$(date +%Y%m%d_%H%M%S).json"
    
    case "$report_type" in
        "summary")
            generate_summary_report "$scope" "$report_file"
            ;;
        "detailed")
            generate_detailed_report "$scope" "$report_file"
            ;;
        "security")
            generate_security_report "$scope" "$report_file"
            ;;
        "compliance")
            generate_compliance_report "$scope" "$report_file"
            ;;
        *)
            echo "❌ Unknown report type: $report_type"
            return 1
            ;;
    esac
    
    echo "✅ Discovery report generated: $report_file"
    audit_log "Discovery report generated: $report_type for $scope"
    
    return 0
}

# Generate summary report
generate_summary_report() {
    local scope="$1"
    local report_file="$2"
    
    # Collect basic statistics
    local total_files
    total_files=$(find "$scope" -type f 2>/dev/null | wc -l | tr -d ' ')
    local total_directories
    total_directories=$(find "$scope" -type d 2>/dev/null | wc -l | tr -d ' ')
    
    cat > "$report_file" << EOF
{
    "report_type": "summary",
    "scope": "$scope",
    "generated": "$(date -Iseconds)",
    "hostname": "$(hostname)",
    "statistics": {
        "total_files": $total_files,
        "total_directories": $total_directories
    }
}
EOF
}

# Main function with command routing
main() {
    local command="$1"
    shift
    
    # Initialize
    create_directories
    
    case "$command" in
        "find_file")
            # Find specific file by name
            discover_file_paths "$@"
            ;;
        "find_extension")
            # Find files by extension
            find_files_by_extension "$@"
            ;;
        "advanced_search")
            # Advanced file discovery
            advanced_file_discovery "$@"
            ;;
        "create_index")
            # Create file system index
            create_file_index "$@"
            ;;
        "search_index")
            # Search existing index
            search_file_index "$@"
            ;;
        "apply_policy")
            # Apply discovery policy
            apply_discovery_policy "$@"
            ;;
        "generate_report")
            # Generate discovery report
            generate_discovery_report "$@"
            ;;
        "show_categories")
            # Show file categories
            print_file_categories
            ;;
        "show_policies")
            # Show available policies
            for policy in security_audit compliance_gdpr asset_inventory performance_cleanup backup_verification forensic_investigation; do
                echo "Policy: $policy"
                get_discovery_policy "$policy" | jq .
                echo ""
            done
            ;;
        *)
            echo "MacFleet File Discovery and Search Management System"
            echo "Usage: $0 <command> [options]"
            echo ""
            echo "Commands:"
            echo "  find_file <pattern> [scope] [type]                - Find specific files by pattern"
            echo "  find_extension <ext> [count] [path]               - Find files by extension"
            echo "  advanced_search <criteria> [level] [format]      - Advanced file discovery"
            echo "  create_index [scope] [level]                     - Create file system index"
            echo "  search_index <term> [type] [scope]               - Search existing index"
            echo "  apply_policy <policy>                            - Apply discovery policy"
            echo "  generate_report <type> [scope]                   - Generate discovery report"
            echo "  show_categories                                  - Show file categories"
            echo "  show_policies                                    - Show discovery policies"
            echo ""
            echo "Examples:"
            echo "  $0 find_file \"document.txt\" \"/Users\" \"name\""
            echo "  $0 find_extension \"pdf\" 20 \"/Users\""
            echo "  $0 advanced_search \"config\" \"comprehensive\" \"json\""
            echo "  $0 create_index \"/Users\" \"standard\""
            echo "  $0 search_index \"report\" \"name\" \"latest\""
            echo "  $0 apply_policy \"security_audit\""
            echo "  $0 generate_report \"summary\" \"/Users\""
            ;;
    esac
}

# Execute main function with all arguments
main "$@"

Security Considerations

File Discovery Security

  • Access Controls - Respect file permissions and access restrictions
  • Sensitive Data Protection - Avoid exposing sensitive file contents in logs
  • Performance Impact - Monitor system performance during large-scale discovery
  • Privacy Compliance - Ensure discovery practices comply with privacy regulations
  • Audit Trails - Maintain comprehensive logs of discovery activities

Compliance Framework

  • Data Classification - Properly classify discovered files based on sensitivity
  • Retention Policies - Implement appropriate data retention and disposal
  • Access Logging - Log all file access and discovery activities
  • Privacy Protection - Protect personal and sensitive information during discovery
  • Regulatory Compliance - Meet industry-specific discovery and cataloging requirements

Troubleshooting Guide

Common Issues

Permission Denied Errors

  • Run discovery with appropriate privileges: sudo for system-wide searches
  • Check file and directory permissions: ls -la
  • Verify search scope accessibility

Slow Search Performance

  • Limit search scope to specific directories
  • Use indexed searches for frequently accessed data
  • Consider search criteria specificity

Too Many Results

  • Use more specific search patterns
  • Implement result limits with head -n <count>
  • Filter by file type or date ranges

Diagnostic Commands

# Test basic find functionality
find /Users -name "*.txt" -print 2>/dev/null | head -5

# Check file system permissions
ls -la /path/to/search/directory

# Monitor search performance
time find /Users -name "pattern" -print 2>/dev/null

# Check available disk space for indexing
df -h

Important Notes

  • File Path Escaping - Use quotes or backslashes for paths with spaces: "New Folder/file.txt" or New\ Folder/file.txt
  • Search Scope - Broader searches take longer but provide more complete results
  • System Impact - Large-scale file discovery can impact system performance
  • Result Limits - Use appropriate limits to prevent overwhelming output
  • Index Maintenance - Regularly update file indexes for accuracy
  • Security Awareness - Be cautious when searching for sensitive files and credentials

File and Folder Deletion Management for macOS

Implement enterprise-grade file and folder deletion management across your MacFleet deployment with secure data removal, audit trails, backup protection, compliance management, and automated cleanup operations. This tutorial provides solutions for maintaining data hygiene while ensuring security, compliance, and data protection.

Understanding macOS File Deletion Management

macOS provides several methods for file and folder deletion:

  • rm - Remove files and directories from command line
  • rm -r - Recursive deletion for directories with contents
  • rm -f - Force deletion without confirmation prompts
  • Finder - GUI-based trash/delete operations
  • srm - Secure deletion with data overwriting (legacy)

Basic File Deletion Operations

Simple File/Folder Deletion

#!/bin/bash

# Basic file deletion
rm '/path to the file or folder'

Recursive Directory Deletion

#!/bin/bash

# Recursive folder deletion
rm -rf /Users/username/Desktop/My\ Files

Delete Folder Contents Only

#!/bin/bash

# Delete folder contents
rm '/path to the folder/*'

Multiple File Deletion

#!/bin/bash

# Delete multiple specific files
rm Desktop/1.png Desktop/2.png

# Delete files by pattern
rm Desktop/{1,2}.png

# Delete all files of type
rm Desktop/*.png

Enterprise File Deletion Management System

#!/bin/bash

# MacFleet Enterprise File and Folder Deletion Management Tool
# Secure data removal with compliance and audit capabilities

# Configuration
CONFIG_FILE="/etc/macfleet/file_deletion_policy.conf"
LOG_FILE="/var/log/macfleet_file_deletion.log"
BACKUP_DIR="/Library/MacFleet/DeletedFiles"
AUDIT_LOG="/var/log/macfleet_deletion_audit.log"
QUARANTINE_DIR="/Library/MacFleet/QuarantinedFiles"

# Create directories
mkdir -p "$(dirname "$CONFIG_FILE")" "$(dirname "$LOG_FILE")" "$BACKUP_DIR" "$(dirname "$AUDIT_LOG")" "$QUARANTINE_DIR"

# Default file deletion management policy
cat > "$CONFIG_FILE" 2>/dev/null << 'EOF' || true
# MacFleet Enterprise File Deletion Management Policy
# Version: 2.0

# Deletion Policy Enforcement
ENFORCE_DELETION_POLICIES=true
REQUIRE_BACKUP_BEFORE_DELETE=true
SECURE_DELETION_ENABLED=true
PREVENT_SYSTEM_FILE_DELETION=true
BUSINESS_HOURS_PROTECTION=true

# Security and Data Protection
REQUIRE_ADMIN_APPROVAL=false
VALIDATE_FILE_PERMISSIONS=true
PROTECTED_DIRECTORIES_ENFORCEMENT=true
MALWARE_QUARANTINE_CHECK=true
SENSITIVE_DATA_DETECTION=true

# Backup and Recovery
AUTOMATIC_BACKUP_ENABLED=true
BACKUP_RETENTION_DAYS=30
COMPRESSED_BACKUPS=true
ENCRYPTED_BACKUPS=true
RECOVERY_VALIDATION=true

# Compliance and Audit
AUDIT_ALL_DELETIONS=true
COMPLIANCE_REPORTING=true
INCIDENT_DOCUMENTATION=true
GDPR_COMPLIANCE=true
DATA_CLASSIFICATION_AWARE=true

# User Experience
USER_NOTIFICATION_ENABLED=true
DELETION_CONFIRMATION=true
PROGRESS_MONITORING=true
BATCH_OPERATION_SUPPORT=true
ROLLBACK_CAPABILITY=true

# Performance and Safety
DELETION_TIMEOUT=300
MAX_CONCURRENT_DELETIONS=5
SIZE_LIMIT_CHECK=true
FREE_SPACE_MONITORING=true
SYSTEM_LOAD_MONITORING=true
EOF

# Source configuration
source "$CONFIG_FILE" 2>/dev/null || true

# Logging function
log_action() {
    echo "$(date '+%Y-%m-%d %H:%M:%S') - $1" | tee -a "$LOG_FILE"
}

# Audit logging function
audit_log() {
    local action="$1"
    local file_path="$2"
    local result="$3"
    local details="$4"
    echo "$(date '+%Y-%m-%d %H:%M:%S') - ACTION:$action FILE:$file_path RESULT:$result DETAILS:$details USER:$(whoami) SIZE:$(get_file_size "$file_path")" >> "$AUDIT_LOG"
}

# Get file size for audit
get_file_size() {
    local file_path="$1"
    if [[ -e "$file_path" ]]; then
        du -sh "$file_path" 2>/dev/null | awk '{print $1}' || echo "unknown"
    else
        echo "0"
    fi
}

# Validate file deletion safety
validate_deletion_safety() {
    local file_path="$1"
    local force="${2:-false}"
    
    echo "=== File Deletion Safety Validation ==="
    
    # Check if file/directory exists
    if [[ ! -e "$file_path" ]]; then
        echo "❌ File or directory not found: $file_path"
        audit_log "VALIDATION" "$file_path" "NOT_FOUND" "File or directory does not exist"
        return 1
    fi
    
    echo "✅ File/directory found: $file_path"
    
    # Prevent system file deletion
    if [[ "$PREVENT_SYSTEM_FILE_DELETION" == "true" ]]; then
        local protected_paths=(
            "/System"
            "/Library/System"
            "/usr/bin"
            "/usr/sbin"
            "/bin"
            "/sbin"
            "/var/db"
            "/private/var/db"
            "/Applications/Utilities"
        )
        
        for protected_path in "${protected_paths[@]}"; do
            if [[ "$file_path" == "$protected_path"* ]]; then
                echo "❌ Cannot delete protected system path: $file_path"
                audit_log "VALIDATION" "$file_path" "PROTECTED_PATH" "System file deletion blocked"
                return 1
            fi
        done
    fi
    
    # Check protected directories
    if [[ "$PROTECTED_DIRECTORIES_ENFORCEMENT" == "true" && "$force" != "true" ]]; then
        local protected_dirs=(
            "/Users/*/Documents/Important"
            "/Users/*/Desktop/Critical"
            "/Library/MacFleet/Config"
            "/etc"
            "/Applications"
        )
        
        for protected_dir in "${protected_dirs[@]}"; do
            if [[ "$file_path" == $protected_dir* ]]; then
                echo "❌ File in protected directory: $file_path"
                audit_log "VALIDATION" "$file_path" "PROTECTED_DIR" "Protected directory access blocked"
                return 1
            fi
        done
    fi
    
    # Business hours protection for user data
    if [[ "$BUSINESS_HOURS_PROTECTION" == "true" && "$force" != "true" ]]; then
        local current_hour
        current_hour=$(date +%H)
        local current_day
        current_day=$(date +%u)  # 1=Monday, 7=Sunday
        
        if [[ $current_day -ge 1 && $current_day -le 5 && $current_hour -ge 9 && $current_hour -lt 18 ]]; then
            if [[ "$file_path" == "/Users/"* ]]; then
                echo "⚠️  Business hours protection: User data deletion restricted"
                audit_log "VALIDATION" "$file_path" "BUSINESS_HOURS" "User data deletion blocked during business hours"
                return 1
            fi
        fi
    fi
    
    # Check file permissions
    if [[ "$VALIDATE_FILE_PERMISSIONS" == "true" ]]; then
        if [[ ! -w "$file_path" && ! -w "$(dirname "$file_path")" ]]; then
            echo "❌ Insufficient permissions to delete: $file_path"
            audit_log "VALIDATION" "$file_path" "NO_PERMISSION" "Insufficient permissions for deletion"
            return 1
        fi
    fi
    
    # Sensitive data detection
    if [[ "$SENSITIVE_DATA_DETECTION" == "true" ]]; then
        if detect_sensitive_data "$file_path"; then
            echo "⚠️  Sensitive data detected in: $file_path"
            if [[ "$force" != "true" ]]; then
                audit_log "VALIDATION" "$file_path" "SENSITIVE_DATA" "Sensitive data deletion requires force flag"
                return 1
            fi
        fi
    fi
    
    audit_log "VALIDATION" "$file_path" "PASSED" "All safety checks passed"
    return 0
}

# Detect sensitive data in files
detect_sensitive_data() {
    local file_path="$1"
    
    if [[ -f "$file_path" ]]; then
        # Check for common sensitive data patterns
        local sensitive_patterns=(
            "\b\d{3}-\d{2}-\d{4}\b"  # SSN pattern
            "\b\d{4}[- ]?\d{4}[- ]?\d{4}[- ]?\d{4}\b"  # Credit card pattern
            "password"
            "secret"
            "private.*key"
            "confidential"
        )
        
        for pattern in "${sensitive_patterns[@]}"; do
            if grep -qi "$pattern" "$file_path" 2>/dev/null; then
                return 0  # Sensitive data found
            fi
        done
    fi
    
    return 1  # No sensitive data found
}

# Create backup before deletion
create_backup() {
    local file_path="$1"
    local backup_reason="${2:-deletion}"
    
    if [[ "$AUTOMATIC_BACKUP_ENABLED" != "true" ]]; then
        return 0
    fi
    
    echo "=== Creating Backup ==="
    
    local backup_timestamp
    backup_timestamp=$(date +%Y%m%d_%H%M%S)
    local backup_name
    backup_name="$(basename "$file_path")_${backup_timestamp}_${backup_reason}"
    local backup_path="$BACKUP_DIR/$backup_name"
    
    # Create backup directory structure
    mkdir -p "$BACKUP_DIR"
    
    # Copy file/directory to backup location
    if [[ -d "$file_path" ]]; then
        # Directory backup
        if [[ "$COMPRESSED_BACKUPS" == "true" ]]; then
            tar -czf "${backup_path}.tar.gz" -C "$(dirname "$file_path")" "$(basename "$file_path")" 2>/dev/null
            backup_path="${backup_path}.tar.gz"
        else
            cp -R "$file_path" "$backup_path" 2>/dev/null
        fi
    else
        # File backup
        if [[ "$COMPRESSED_BACKUPS" == "true" && $(stat -f%z "$file_path" 2>/dev/null || echo 0) -gt 1048576 ]]; then
            gzip -c "$file_path" > "${backup_path}.gz" 2>/dev/null
            backup_path="${backup_path}.gz"
        else
            cp "$file_path" "$backup_path" 2>/dev/null
        fi
    fi
    
    if [[ $? -eq 0 ]]; then
        echo "✅ Backup created: $backup_path"
        
        # Encrypt backup if enabled
        if [[ "$ENCRYPTED_BACKUPS" == "true" ]]; then
            encrypt_backup "$backup_path"
        fi
        
        # Record backup in metadata
        echo "$(date -u +%Y-%m-%dT%H:%M:%SZ)|$file_path|$backup_path|$backup_reason" >> "$BACKUP_DIR/backup_manifest.log"
        
        log_action "Backup created for $file_path at $backup_path"
        audit_log "BACKUP" "$file_path" "CREATED" "Backup path: $backup_path"
        
        return 0
    else
        echo "❌ Failed to create backup for: $file_path"
        log_action "FAILED: Backup creation failed for $file_path"
        audit_log "BACKUP" "$file_path" "FAILED" "Backup creation error"
        return 1
    fi
}

# Encrypt backup file
encrypt_backup() {
    local backup_path="$1"
    
    # Simple encryption using built-in tools (in production, use proper enterprise encryption)
    if command -v openssl >/dev/null 2>&1; then
        openssl enc -aes-256-cbc -salt -in "$backup_path" -out "${backup_path}.enc" -k "MacFleet_Backup_Key_$(date +%Y)" 2>/dev/null
        if [[ $? -eq 0 ]]; then
            rm "$backup_path"
            echo "🔐 Backup encrypted: ${backup_path}.enc"
            log_action "Backup encrypted: ${backup_path}.enc"
        fi
    fi
}

# Secure file deletion
secure_delete() {
    local file_path="$1"
    local secure_passes="${2:-3}"
    
    echo "=== Secure Deletion Process ==="
    
    if [[ ! -e "$file_path" ]]; then
        echo "❌ File not found for secure deletion: $file_path"
        return 1
    fi
    
    local file_size
    file_size=$(get_file_size "$file_path")
    
    # For files, perform secure overwrite before deletion
    if [[ -f "$file_path" && "$SECURE_DELETION_ENABLED" == "true" ]]; then
        echo "🔐 Performing secure deletion with $secure_passes passes..."
        
        # Multiple pass overwrite
        for ((pass=1; pass<=secure_passes; pass++)); do
            echo "Pass $pass/$secure_passes: Overwriting with random data..."
            dd if=/dev/urandom of="$file_path" bs=1024 count=$(du -k "$file_path" 2>/dev/null | cut -f1) 2>/dev/null || true
            sync
        done
        
        # Final zero pass
        dd if=/dev/zero of="$file_path" bs=1024 count=$(du -k "$file_path" 2>/dev/null | cut -f1) 2>/dev/null || true
        sync
        
        echo "🔐 Secure overwrite completed"
        audit_log "SECURE_DELETE" "$file_path" "OVERWRITTEN" "Secure overwrite with $secure_passes passes"
    fi
    
    # Perform final deletion
    if [[ -d "$file_path" ]]; then
        # Directory deletion
        rm -rf "$file_path" 2>/dev/null
    else
        # File deletion
        rm -f "$file_path" 2>/dev/null
    fi
    
    local deletion_result=$?
    
    if [[ $deletion_result -eq 0 ]]; then
        echo "✅ File securely deleted: $file_path (Size: $file_size)"
        audit_log "SECURE_DELETE" "$file_path" "SUCCESS" "Secure deletion completed (Size: $file_size)"
        return 0
    else
        echo "❌ Failed to delete: $file_path"
        audit_log "SECURE_DELETE" "$file_path" "FAILED" "Deletion failed"
        return 1
    fi
}

# Enterprise file deletion with comprehensive management
enterprise_file_delete() {
    local file_path="$1"
    local force="${2:-false}"
    local secure="${3:-true}"
    local backup="${4:-true}"
    
    echo "=== Enterprise File Deletion ==="
    
    if [[ -z "$file_path" ]]; then
        echo "❌ File path required"
        return 1
    fi
    
    log_action "ENTERPRISE FILE DELETE: Starting deletion of $file_path"
    
    # Pre-deletion validation
    if ! validate_deletion_safety "$file_path" "$force"; then
        log_action "FAILED: File deletion safety validation failed for $file_path"
        return 1
    fi
    
    # Create backup if required
    if [[ "$backup" == "true" || "$REQUIRE_BACKUP_BEFORE_DELETE" == "true" ]]; then
        if ! create_backup "$file_path" "pre_deletion"; then
            if [[ "$force" != "true" ]]; then
                echo "❌ Backup failed and force mode not enabled"
                return 1
            else
                echo "⚠️  Proceeding without backup (force mode)"
            fi
        fi
    fi
    
    # Check system resources
    check_system_resources
    
    # User confirmation if enabled
    if [[ "$DELETION_CONFIRMATION" == "true" && "$force" != "true" ]]; then
        echo "⚠️  Deletion confirmation required for: $file_path"
        # In automated scripts, this would integrate with approval workflows
        log_action "Deletion confirmation required for $file_path"
    fi
    
    # Perform deletion
    local start_time
    start_time=$(date +%s)
    
    if [[ "$secure" == "true" ]]; then
        secure_delete "$file_path"
    else
        # Standard deletion
        if [[ -d "$file_path" ]]; then
            rm -rf "$file_path" 2>/dev/null
        else
            rm -f "$file_path" 2>/dev/null
        fi
    fi
    
    local deletion_result=$?
    local end_time
    end_time=$(date +%s)
    local duration=$((end_time - start_time))
    
    # Send notifications
    send_deletion_notification "$file_path" "$deletion_result" "$duration"
    
    # Cleanup and verification
    if [[ $deletion_result -eq 0 ]]; then
        echo "✅ File deletion completed successfully: $file_path"
        log_action "SUCCESS: File deleted successfully: $file_path (Duration: ${duration}s)"
        
        # Update usage statistics
        update_deletion_stats "$file_path"
        
        # Cleanup related files
        cleanup_related_files "$file_path"
        
        return 0
    else
        echo "❌ File deletion failed: $file_path"
        log_action "FAILED: Could not delete file: $file_path"
        return 1
    fi
}

# Send deletion notification
send_deletion_notification() {
    local file_path="$1"
    local result="$2"
    local duration="$3"
    
    if [[ "$USER_NOTIFICATION_ENABLED" != "true" ]]; then
        return 0
    fi
    
    echo "=== Sending Deletion Notifications ==="
    
    local notification_title="MacFleet File Management"
    local result_text="success"
    if [[ $result -ne 0 ]]; then
        result_text="failed"
    fi
    
    local notification_message="File deletion $result_text: $(basename "$file_path") (${duration}s)"
    
    # Display notification to logged-in users
    local logged_users
    logged_users=$(who | awk '{print $1}' | sort -u)
    
    for user in $logged_users; do
        if [[ -n "$user" ]]; then
            sudo -u "$user" osascript -e "display notification \"$notification_message\" with title \"$notification_title\"" 2>/dev/null || true
            echo "📱 Notification sent to user: $user"
        fi
    done
    
    log_action "Deletion notifications sent for $file_path ($result_text)"
}

# Bulk file deletion operations
bulk_file_deletion() {
    local file_list="$1"
    local force="${2:-false}"
    local secure="${3:-true}"
    
    echo "=== Bulk File Deletion Operations ==="
    
    local success_count=0
    local failure_count=0
    local total_count=0
    local total_size=0
    
    # Handle different input types
    if [[ -f "$file_list" ]]; then
        # File list from file
        while IFS= read -r file_path; do
            # Skip empty lines and comments
            if [[ -z "$file_path" || "$file_path" == \#* ]]; then
                continue
            fi
            
            ((total_count++))
            echo "Processing ($total_count): $file_path"
            
            if enterprise_file_delete "$file_path" "$force" "$secure"; then
                ((success_count++))
            else
                ((failure_count++))
            fi
            
            echo "---"
            sleep 1  # Brief pause between operations
        done < "$file_list"
    else
        # Pattern-based deletion
        echo "Pattern-based deletion: $file_list"
        
        # Expand pattern and process each file
        for file_path in $file_list; do
            if [[ -e "$file_path" ]]; then
                ((total_count++))
                echo "Processing ($total_count): $file_path"
                
                if enterprise_file_delete "$file_path" "$force" "$secure"; then
                    ((success_count++))
                else
                    ((failure_count++))
                fi
            fi
        done
    fi
    
    echo "=== Bulk Deletion Summary ==="
    echo "Total files processed: $total_count"
    echo "Successful deletions: $success_count"
    echo "Failed deletions: $failure_count"
    
    log_action "Bulk deletion completed: $success_count/$total_count successful"
    audit_log "BULK_DELETE" "MULTIPLE" "COMPLETED" "Success: $success_count Failed: $failure_count Total: $total_count"
}

# Cleanup old backups based on retention policy
cleanup_old_backups() {
    echo "=== Backup Retention Cleanup ==="
    
    if [[ ! -d "$BACKUP_DIR" ]]; then
        echo "No backup directory found"
        return 0
    fi
    
    local retention_days="${BACKUP_RETENTION_DAYS:-30}"
    local cleanup_count=0
    
    # Find and remove old backups
    while IFS= read -r old_backup; do
        if [[ -n "$old_backup" ]]; then
            rm -f "$old_backup"
            ((cleanup_count++))
            echo "Removed old backup: $old_backup"
        fi
    done < <(find "$BACKUP_DIR" -type f -mtime +$retention_days 2>/dev/null)
    
    echo "Cleaned up $cleanup_count old backups (older than $retention_days days)"
    log_action "Backup cleanup: $cleanup_count old backups removed"
}

# Check system resources before deletion
check_system_resources() {
    echo "=== System Resource Check ==="
    
    # Check available disk space
    local available_space
    available_space=$(df -h / | tail -1 | awk '{print $4}' | sed 's/G//')
    
    echo "Available Space: ${available_space}GB"
    
    if [[ "$FREE_SPACE_MONITORING" == "true" ]] && (( $(echo "$available_space < 5" | bc -l) )); then
        echo "⚠️  Low disk space warning: ${available_space}GB available"
        log_action "Low disk space during deletion operation: ${available_space}GB"
    fi
    
    # Check system load
    local load_average
    load_average=$(uptime | awk -F'load average:' '{print $2}' | awk '{print $1}' | tr -d ',')
    
    echo "System Load: $load_average"
    
    if [[ "$SYSTEM_LOAD_MONITORING" == "true" ]] && (( $(echo "$load_average > 5.0" | bc -l) )); then
        echo "⚠️  High system load: $load_average"
        log_action "High system load during deletion operation: $load_average"
    fi
}

# Generate file deletion report
generate_deletion_report() {
    local report_file="/Library/MacFleet/Reports/file_deletion_report_$(date +%Y%m%d_%H%M%S).json"
    
    echo "=== Generating File Deletion Report ==="
    
    mkdir -p "$(dirname "$report_file")"
    
    # Count recent operations from audit log
    local recent_deletions=0
    local successful_deletions=0
    local failed_deletions=0
    local total_backup_size=0
    
    if [[ -f "$AUDIT_LOG" ]]; then
        recent_deletions=$(grep -c "ACTION:SECURE_DELETE\|ACTION:BULK_DELETE" "$AUDIT_LOG" 2>/dev/null || echo 0)
        successful_deletions=$(grep -c "RESULT:SUCCESS" "$AUDIT_LOG" 2>/dev/null || echo 0)
        failed_deletions=$(grep -c "RESULT:FAILED" "$AUDIT_LOG" 2>/dev/null || echo 0)
    fi
    
    if [[ -d "$BACKUP_DIR" ]]; then
        total_backup_size=$(du -sh "$BACKUP_DIR" 2>/dev/null | awk '{print $1}' || echo "0")
    fi
    
    cat > "$report_file" << EOF
{
  "report_type": "file_deletion_management",
  "timestamp": "$(date -u +%Y-%m-%dT%H:%M:%SZ)",
  "device_info": {
    "hostname": "$(hostname)",
    "os_version": "$(sw_vers -productVersion)",
    "available_space": "$(df -h / | tail -1 | awk '{print $4}')"
  },
  "deletion_statistics": {
    "recent_deletions": $recent_deletions,
    "successful_deletions": $successful_deletions,
    "failed_deletions": $failed_deletions,
    "success_rate": "$(echo "scale=2; $successful_deletions * 100 / ($successful_deletions + $failed_deletions + 1)" | bc)%"
  },
  "backup_statistics": {
    "total_backup_size": "$total_backup_size",
    "backup_retention_days": $BACKUP_RETENTION_DAYS,
    "encrypted_backups": $ENCRYPTED_BACKUPS
  },
  "policy_configuration": {
    "secure_deletion": $SECURE_DELETION_ENABLED,
    "automatic_backup": $AUTOMATIC_BACKUP_ENABLED,
    "business_hours_protection": $BUSINESS_HOURS_PROTECTION,
    "sensitive_data_detection": $SENSITIVE_DATA_DETECTION
  },
  "security_status": {
    "system_file_protection": $PREVENT_SYSTEM_FILE_DELETION,
    "protected_directories": $PROTECTED_DIRECTORIES_ENFORCEMENT,
    "audit_enabled": $AUDIT_ALL_DELETIONS
  }
}
EOF
    
    echo "File deletion report saved to: $report_file"
    log_action "Deletion report generated: $report_file"
}

# Main function with argument handling
main() {
    log_action "=== MacFleet File Deletion Management Tool Started ==="
    
    case "${1:-help}" in
        "delete")
            enterprise_file_delete "$2" "$3" "$4" "$5"
            ;;
        "bulk")
            bulk_file_deletion "$2" "$3" "$4"
            ;;
        "cleanup-backups")
            cleanup_old_backups
            ;;
        "report")
            generate_deletion_report
            ;;
        *)
            echo "MacFleet Enterprise File and Folder Deletion Management Tool"
            echo "Usage: $0 [command] [options]"
            echo ""
            echo "Commands:"
            echo "  delete [file_path] [force] [secure] [backup]  - Delete file with enterprise controls"
            echo "  bulk [list_file/pattern] [force] [secure]     - Bulk deletion from file list or pattern"
            echo "  cleanup-backups                              - Remove old backups per retention policy"
            echo "  report                                       - Generate deletion management report"
            echo ""
            echo "Options:"
            echo "  force    - true/false (bypass safety checks)"
            echo "  secure   - true/false (enable secure deletion)"
            echo "  backup   - true/false (create backup before deletion)"
            echo ""
            echo "Examples:"
            echo "  $0 delete \"/Users/user/temp.txt\"            - Safe delete with backup"
            echo "  $0 delete \"/tmp/folder\" true true false     - Force secure delete without backup"
            echo "  $0 bulk \"*.log\" false true                 - Bulk secure delete log files"
            echo "  $0 bulk file_list.txt true                   - Bulk force delete from list"
            echo "  $0 cleanup-backups                           - Clean old backups"
            ;;
    esac
    
    log_action "=== File deletion management operation completed ==="
}

# Execute main function
main "$@"

Important Configuration Notes

macOS File Deletion Commands

  • rm - Basic file removal
  • rm -r - Recursive directory deletion
  • rm -f - Force deletion without prompts
  • rm -rf - Recursive force deletion
  • Pattern matching - *.txt, {1,2}.png for bulk operations

Enterprise Integration Points

  • Data Loss Prevention (DLP) - Integration with DLP systems
  • Backup and Recovery Systems - Automated backup integration
  • Compliance Platforms - GDPR, HIPAA compliance reporting
  • Security Information and Event Management (SIEM) - Deletion audit logging

Best Practices for Enterprise File Deletion

  1. Security and Data Protection

    • Always create backups before deletion
    • Use secure deletion for sensitive data
    • Validate file permissions before deletion
    • Implement business hours protection
  2. Compliance and Governance

    • Maintain comprehensive audit trails
    • Implement data classification awareness
    • Support regulatory compliance requirements
    • Document all deletion operations
  3. Safety and Recovery

    • Prevent system file deletion
    • Implement rollback capabilities
    • Monitor system resources during operations
    • Provide clear notification systems

Remember to test file deletion procedures thoroughly in a controlled environment before implementing across your entire MacFleet to ensure data safety and compliance with organizational policies.

File and Folder Creation Management on macOS

Efficiently manage file and folder creation operations across your MacFleet deployment with enterprise-grade template management, permission control, and comprehensive audit capabilities. This tutorial transforms basic touch and mkdir commands into robust filesystem provisioning solutions.

Understanding Enterprise File Creation Operations

Enterprise file creation requires more than basic filesystem operations, demanding:

  • Template management for standardized file and folder structures
  • Permission control to enforce security policies
  • Content validation for created files and directories
  • Audit logging for compliance tracking
  • Automated provisioning for user onboarding
  • Policy enforcement for naming conventions and structure

Core Creation Operations

Basic File Creation

#!/bin/bash

# Simple file creation with validation
create_file() {
    local file_path="$1"
    local content="${2:-}"
    
    # Validate parent directory exists
    local parent_dir=$(dirname "$file_path")
    if [[ ! -d "$parent_dir" ]]; then
        echo "Error: Parent directory '$parent_dir' does not exist"
        return 1
    fi
    
    # Create file
    if touch "$file_path"; then
        # Add content if provided
        if [[ -n "$content" ]]; then
            echo "$content" > "$file_path"
        fi
        echo "Successfully created file '$file_path'"
        return 0
    else
        echo "Failed to create file '$file_path'"
        return 1
    fi
}

# Usage example
# create_file "/Users/admin/document.txt" "Initial content"

Basic Directory Creation

#!/bin/bash

# Directory creation with validation
create_directory() {
    local dir_path="$1"
    local permissions="${2:-755}"
    
    # Create directory with parents if needed
    if mkdir -p "$dir_path"; then
        # Set permissions
        chmod "$permissions" "$dir_path"
        echo "Successfully created directory '$dir_path'"
        return 0
    else
        echo "Failed to create directory '$dir_path'"
        return 1
    fi
}

# Usage example
# create_directory "/Users/admin/project" "755"

Enterprise Creation Management System

#!/bin/bash

# MacFleet Enterprise File Creation Management System
# Comprehensive file and folder creation with enterprise features

# Configuration
SCRIPT_NAME="MacFleet Creation Manager"
VERSION="1.0.0"
LOG_FILE="/var/log/macfleet_creation_operations.log"
TEMPLATE_DIR="/etc/macfleet/templates"
POLICY_DIR="/etc/macfleet/policies"
TEMP_DIR="/tmp/macfleet_creation"
DEFAULT_FILE_PERMISSIONS="644"
DEFAULT_DIR_PERMISSIONS="755"
RESTRICTED_PATHS=("/System" "/usr/bin" "/usr/sbin" "/private/var" "/Library/LaunchDaemons")
ALLOWED_EXTENSIONS=(".txt" ".md" ".pdf" ".docx" ".xlsx" ".pptx" ".csv" ".json" ".xml" ".log")
MAX_FILENAME_LENGTH=255
MAX_PATH_DEPTH=20
BUSINESS_HOURS_START=9
BUSINESS_HOURS_END=17

# Create necessary directories
mkdir -p "$TEMP_DIR"
mkdir -p "$TEMPLATE_DIR"
mkdir -p "$POLICY_DIR"
mkdir -p "$(dirname "$LOG_FILE")"

# Logging function
log_operation() {
    local level="$1"
    local message="$2"
    local timestamp=$(date '+%Y-%m-%d %H:%M:%S')
    local username=$(whoami)
    echo "[$timestamp] [$level] [$username] $message" | tee -a "$LOG_FILE"
}

# Check if current time is within business hours
is_business_hours() {
    local current_hour=$(date +%H)
    if [[ $current_hour -ge $BUSINESS_HOURS_START && $current_hour -lt $BUSINESS_HOURS_END ]]; then
        return 0
    else
        return 1
    fi
}

# Validate file extension
is_allowed_extension() {
    local file="$1"
    local extension="${file##*.}"
    extension=".$extension"
    
    for allowed in "${ALLOWED_EXTENSIONS[@]}"; do
        if [[ "$extension" == "$allowed" ]]; then
            return 0
        fi
    done
    return 1
}

# Check if path is restricted
is_restricted_path() {
    local path="$1"
    for restricted in "${RESTRICTED_PATHS[@]}"; do
        if [[ "$path" == "$restricted"* ]]; then
            return 0
        fi
    done
    return 1
}

# Validate naming conventions
validate_naming_convention() {
    local name="$1"
    local type="$2"  # "file" or "directory"
    
    # Check length
    if [[ ${#name} -gt $MAX_FILENAME_LENGTH ]]; then
        log_operation "ERROR" "Name too long (${#name} > $MAX_FILENAME_LENGTH): $name"
        return 1
    fi
    
    # Check for invalid characters
    if [[ "$name" =~ [^a-zA-Z0-9._-] ]]; then
        log_operation "WARNING" "Name contains special characters: $name"
    fi
    
    # Check for reserved names
    local reserved_names=("CON" "PRN" "AUX" "NUL" "COM1" "COM2" "LPT1" "LPT2")
    local base_name=$(basename "$name" | tr '[:lower:]' '[:upper:]')
    
    for reserved in "${reserved_names[@]}"; do
        if [[ "$base_name" == "$reserved" ]]; then
            log_operation "ERROR" "Reserved name not allowed: $name"
            return 1
        fi
    done
    
    return 0
}

# Check path depth
validate_path_depth() {
    local path="$1"
    local depth=$(echo "$path" | tr '/' '\n' | wc -l)
    
    if [[ $depth -gt $MAX_PATH_DEPTH ]]; then
        log_operation "ERROR" "Path depth too deep ($depth > $MAX_PATH_DEPTH): $path"
        return 1
    fi
    
    return 0
}

# Load file template
load_file_template() {
    local template_name="$1"
    local template_file="$TEMPLATE_DIR/$template_name.template"
    
    if [[ -f "$template_file" ]]; then
        cat "$template_file"
        return 0
    else
        log_operation "WARNING" "Template not found: $template_name"
        return 1
    fi
}

# Apply variable substitution to template
apply_template_variables() {
    local content="$1"
    local variables="$2"  # JSON string with variables
    
    # Basic variable substitution
    content=$(echo "$content" | sed "s/{{DATE}}/$(date '+%Y-%m-%d')/g")
    content=$(echo "$content" | sed "s/{{TIME}}/$(date '+%H:%M:%S')/g")
    content=$(echo "$content" | sed "s/{{USER}}/$(whoami)/g")
    content=$(echo "$content" | sed "s/{{HOSTNAME}}/$(hostname)/g")
    
    # Custom variables from JSON (simplified)
    if [[ -n "$variables" ]]; then
        # This would be enhanced with proper JSON parsing in production
        log_operation "INFO" "Applying custom template variables"
    fi
    
    echo "$content"
}

# Create directory structure from template
create_structure_from_template() {
    local template_name="$1"
    local base_path="$2"
    local variables="${3:-}"
    
    local structure_file="$TEMPLATE_DIR/structures/$template_name.json"
    
    if [[ ! -f "$structure_file" ]]; then
        log_operation "ERROR" "Structure template not found: $template_name"
        return 1
    fi
    
    log_operation "INFO" "Creating structure from template: $template_name at $base_path"
    
    # This would parse JSON structure and create directories/files
    # Simplified implementation for demonstration
    mkdir -p "$base_path"
    
    return 0
}

# Enhanced file creation with enterprise features
enterprise_create_file() {
    local file_path="$1"
    local template_name="${2:-}"
    local permissions="${3:-$DEFAULT_FILE_PERMISSIONS}"
    local variables="${4:-}"
    
    local operation_id=$(date +%s)
    log_operation "INFO" "Starting file creation operation [$operation_id]: $file_path"
    
    # Pre-flight validations
    local filename=$(basename "$file_path")
    local parent_dir=$(dirname "$file_path")
    
    # Validate naming conventions
    if ! validate_naming_convention "$filename" "file"; then
        log_operation "ERROR" "Naming convention validation failed: $filename"
        return 1
    fi
    
    # Validate path depth
    if ! validate_path_depth "$file_path"; then
        return 1
    fi
    
    # Check if path is restricted
    if is_restricted_path "$file_path"; then
        log_operation "SECURITY" "Blocked creation in restricted path: $file_path"
        return 1
    fi
    
    # Validate file extension
    if ! is_allowed_extension "$file_path"; then
        log_operation "SECURITY" "Blocked file with unauthorized extension: $file_path"
        return 1
    fi
    
    # Check if file already exists
    if [[ -f "$file_path" ]]; then
        log_operation "WARNING" "File already exists: $file_path"
        return 1
    fi
    
    # Create parent directory if it doesn't exist
    if [[ ! -d "$parent_dir" ]]; then
        if ! mkdir -p "$parent_dir"; then
            log_operation "ERROR" "Failed to create parent directory: $parent_dir"
            return 1
        fi
        log_operation "INFO" "Created parent directory: $parent_dir"
    fi
    
    # Load template content if specified
    local content=""
    if [[ -n "$template_name" ]]; then
        content=$(load_file_template "$template_name")
        if [[ $? -eq 0 ]]; then
            content=$(apply_template_variables "$content" "$variables")
            log_operation "INFO" "Applied template: $template_name"
        else
            log_operation "WARNING" "Failed to load template, creating empty file"
        fi
    fi
    
    # Create the file
    if touch "$file_path"; then
        # Add content if available
        if [[ -n "$content" ]]; then
            echo "$content" > "$file_path"
        fi
        
        # Set permissions
        chmod "$permissions" "$file_path"
        
        # Set ownership (if running as root)
        if [[ $EUID -eq 0 ]]; then
            local target_user=$(stat -f "%Su" "$parent_dir")
            local target_group=$(stat -f "%Sg" "$parent_dir")
            chown "$target_user:$target_group" "$file_path"
        fi
        
        log_operation "SUCCESS" "File created successfully [$operation_id]: $file_path"
        log_operation "AUDIT" "File created: $file_path (permissions: $permissions)"
        
        return 0
    else
        log_operation "ERROR" "Failed to create file [$operation_id]: $file_path"
        return 1
    fi
}

# Enhanced directory creation with enterprise features
enterprise_create_directory() {
    local dir_path="$1"
    local template_name="${2:-}"
    local permissions="${3:-$DEFAULT_DIR_PERMISSIONS}"
    local create_parents="${4:-true}"
    
    local operation_id=$(date +%s)
    log_operation "INFO" "Starting directory creation operation [$operation_id]: $dir_path"
    
    # Pre-flight validations
    local dirname=$(basename "$dir_path")
    
    # Validate naming conventions
    if ! validate_naming_convention "$dirname" "directory"; then
        log_operation "ERROR" "Naming convention validation failed: $dirname"
        return 1
    fi
    
    # Validate path depth
    if ! validate_path_depth "$dir_path"; then
        return 1
    fi
    
    # Check if path is restricted
    if is_restricted_path "$dir_path"; then
        log_operation "SECURITY" "Blocked creation in restricted path: $dir_path"
        return 1
    fi
    
    # Check if directory already exists
    if [[ -d "$dir_path" ]]; then
        log_operation "WARNING" "Directory already exists: $dir_path"
        return 1
    fi
    
    # Create the directory
    local mkdir_options=""
    if [[ "$create_parents" == "true" ]]; then
        mkdir_options="-p"
    fi
    
    if mkdir $mkdir_options "$dir_path"; then
        # Set permissions
        chmod "$permissions" "$dir_path"
        
        # Set ownership (if running as root)
        if [[ $EUID -eq 0 ]]; then
            local parent_dir=$(dirname "$dir_path")
            if [[ -d "$parent_dir" ]]; then
                local target_user=$(stat -f "%Su" "$parent_dir")
                local target_group=$(stat -f "%Sg" "$parent_dir")
                chown "$target_user:$target_group" "$dir_path"
            fi
        fi
        
        # Apply template structure if specified
        if [[ -n "$template_name" ]]; then
            create_structure_from_template "$template_name" "$dir_path"
        fi
        
        log_operation "SUCCESS" "Directory created successfully [$operation_id]: $dir_path"
        log_operation "AUDIT" "Directory created: $dir_path (permissions: $permissions)"
        
        return 0
    else
        log_operation "ERROR" "Failed to create directory [$operation_id]: $dir_path"
        return 1
    fi
}

# Bulk creation operations
bulk_create_operation() {
    local operation_type="$1"  # "file" or "directory"
    local items_file="$2"      # File containing creation specifications
    local template_name="${3:-}"
    
    if [[ ! -f "$items_file" ]]; then
        log_operation "ERROR" "Items file not found: $items_file"
        return 1
    fi
    
    local total_items=$(grep -v '^#\|^$' "$items_file" | wc -l)
    local current_item=0
    local success_count=0
    local failure_count=0
    
    log_operation "INFO" "Starting bulk creation operation - Total items: $total_items"
    
    while IFS='|' read -r path permissions variables; do
        # Skip empty lines and comments
        [[ -z "$path" || "$path" =~ ^#.* ]] && continue
        
        ((current_item++))
        
        # Trim whitespace
        path=$(echo "$path" | xargs)
        permissions=$(echo "$permissions" | xargs)
        variables=$(echo "$variables" | xargs)
        
        echo "Processing [$current_item/$total_items]: $(basename "$path")"
        
        case "$operation_type" in
            "file")
                if enterprise_create_file "$path" "$template_name" "$permissions" "$variables"; then
                    ((success_count++))
                else
                    ((failure_count++))
                fi
                ;;
            "directory")
                if enterprise_create_directory "$path" "$template_name" "$permissions"; then
                    ((success_count++))
                else
                    ((failure_count++))
                fi
                ;;
        esac
        
        # Progress update
        local progress=$((current_item * 100 / total_items))
        echo "Progress: $progress% ($success_count successful, $failure_count failed)"
        
    done < "$items_file"
    
    log_operation "SUCCESS" "Bulk creation completed - Success: $success_count, Failed: $failure_count"
    return $failure_count
}

# User onboarding automation
create_user_workspace() {
    local username="$1"
    local user_type="${2:-standard}"  # standard, admin, developer
    local base_path="/Users/$username"
    
    log_operation "INFO" "Creating workspace for user: $username (type: $user_type)"
    
    # Create user directories
    local directories=(
        "$base_path/Documents"
        "$base_path/Desktop"
        "$base_path/Downloads"
        "$base_path/Pictures"
        "$base_path/Movies"
        "$base_path/Music"
    )
    
    # Add type-specific directories
    case "$user_type" in
        "developer")
            directories+=("$base_path/Projects" "$base_path/Scripts" "$base_path/Tools")
            ;;
        "admin")
            directories+=("$base_path/Admin" "$base_path/Logs" "$base_path/Configs")
            ;;
    esac
    
    # Create directories
    for dir in "${directories[@]}"; do
        if enterprise_create_directory "$dir" "" "755"; then
            log_operation "INFO" "Created user directory: $dir"
        fi
    done
    
    # Create default files
    local files=(
        "$base_path/Desktop/Welcome.txt|welcome|644"
        "$base_path/Documents/README.txt|readme|644"
    )
    
    for file_spec in "${files[@]}"; do
        IFS='|' read -r file_path template perms <<< "$file_spec"
        if enterprise_create_file "$file_path" "$template" "$perms"; then
            log_operation "INFO" "Created user file: $file_path"
        fi
    done
    
    log_operation "SUCCESS" "User workspace created for: $username"
}

# Generate creation report
generate_creation_report() {
    local report_file="/tmp/macfleet_creation_report_$(date +%Y%m%d_%H%M%S).txt"
    
    {
        echo "MacFleet Creation Operations Report"
        echo "Generated: $(date)"
        echo "Hostname: $(hostname)"
        echo "User: $(whoami)"
        echo "================================="
        echo ""
        
        echo "Recent Creation Operations (Last 24 hours):"
        if [[ -f "$LOG_FILE" ]]; then
            local yesterday=$(date -v-1d '+%Y-%m-%d')
            grep "$yesterday\|$(date '+%Y-%m-%d')" "$LOG_FILE" | grep -E "(AUDIT|SUCCESS)" | tail -50
        else
            echo "No log file found"
        fi
        
        echo ""
        echo "Creation Statistics:"
        if [[ -f "$LOG_FILE" ]]; then
            echo "Total Operations: $(grep -c "creation operation" "$LOG_FILE" 2>/dev/null || echo "0")"
            echo "Files Created: $(grep -c "File created:" "$LOG_FILE" 2>/dev/null || echo "0")"
            echo "Directories Created: $(grep -c "Directory created:" "$LOG_FILE" 2>/dev/null || echo "0")"
            echo "Failed Operations: $(grep -c "ERROR.*Failed to create" "$LOG_FILE" 2>/dev/null || echo "0")"
        fi
        
        echo ""
        echo "Template Usage:"
        if [[ -d "$TEMPLATE_DIR" ]]; then
            echo "Available Templates:"
            find "$TEMPLATE_DIR" -name "*.template" -exec basename {} .template \; | sort
        fi
        
        echo ""
        echo "System Information:"
        echo "Available Space:"
        df -h | grep -E "^/dev/"
        
    } > "$report_file"
    
    echo "Creation operations report saved to: $report_file"
    log_operation "INFO" "Creation report generated: $report_file"
}

# Template management
manage_templates() {
    local action="$1"
    local template_name="$2"
    local template_content="$3"
    
    case "$action" in
        "create")
            if [[ -z "$template_name" || -z "$template_content" ]]; then
                echo "Usage: manage_templates create <name> <content>"
                return 1
            fi
            
            local template_file="$TEMPLATE_DIR/$template_name.template"
            echo "$template_content" > "$template_file"
            log_operation "INFO" "Template created: $template_name"
            ;;
        "list")
            echo "Available Templates:"
            find "$TEMPLATE_DIR" -name "*.template" -exec basename {} .template \;
            ;;
        "show")
            if [[ -z "$template_name" ]]; then
                echo "Usage: manage_templates show <name>"
                return 1
            fi
            
            local template_file="$TEMPLATE_DIR/$template_name.template"
            if [[ -f "$template_file" ]]; then
                cat "$template_file"
            else
                echo "Template not found: $template_name"
                return 1
            fi
            ;;
        "delete")
            if [[ -z "$template_name" ]]; then
                echo "Usage: manage_templates delete <name>"
                return 1
            fi
            
            local template_file="$TEMPLATE_DIR/$template_name.template"
            if [[ -f "$template_file" ]]; then
                rm "$template_file"
                log_operation "INFO" "Template deleted: $template_name"
            else
                echo "Template not found: $template_name"
                return 1
            fi
            ;;
    esac
}

# Main creation management function
main() {
    local action="${1:-help}"
    
    case "$action" in
        "create-file")
            local file_path="$2"
            local template_name="$3"
            local permissions="${4:-$DEFAULT_FILE_PERMISSIONS}"
            local variables="$5"
            
            if [[ -z "$file_path" ]]; then
                echo "Usage: $0 create-file <file_path> [template_name] [permissions] [variables]"
                exit 1
            fi
            
            enterprise_create_file "$file_path" "$template_name" "$permissions" "$variables"
            ;;
        "create-directory")
            local dir_path="$2"
            local template_name="$3"
            local permissions="${4:-$DEFAULT_DIR_PERMISSIONS}"
            local create_parents="${5:-true}"
            
            if [[ -z "$dir_path" ]]; then
                echo "Usage: $0 create-directory <dir_path> [template_name] [permissions] [create_parents]"
                exit 1
            fi
            
            enterprise_create_directory "$dir_path" "$template_name" "$permissions" "$create_parents"
            ;;
        "bulk-files")
            local items_file="$2"
            local template_name="$3"
            
            if [[ -z "$items_file" ]]; then
                echo "Usage: $0 bulk-files <items_file> [template_name]"
                exit 1
            fi
            
            bulk_create_operation "file" "$items_file" "$template_name"
            ;;
        "bulk-directories")
            local items_file="$2"
            local template_name="$3"
            
            if [[ -z "$items_file" ]]; then
                echo "Usage: $0 bulk-directories <items_file> [template_name]"
                exit 1
            fi
            
            bulk_create_operation "directory" "$items_file" "$template_name"
            ;;
        "user-workspace")
            local username="$2"
            local user_type="${3:-standard}"
            
            if [[ -z "$username" ]]; then
                echo "Usage: $0 user-workspace <username> [user_type]"
                exit 1
            fi
            
            create_user_workspace "$username" "$user_type"
            ;;
        "template")
            local template_action="$2"
            local template_name="$3"
            local template_content="$4"
            
            manage_templates "$template_action" "$template_name" "$template_content"
            ;;
        "report")
            generate_creation_report
            ;;
        "help"|*)
            echo "$SCRIPT_NAME v$VERSION"
            echo "Enterprise File and Folder Creation Management"
            echo ""
            echo "Usage: $0 <action> [options]"
            echo ""
            echo "Actions:"
            echo "  create-file <path> [template] [permissions] [variables]     - Create single file"
            echo "  create-directory <path> [template] [permissions] [parents] - Create directory"
            echo "  bulk-files <items_file> [template]                         - Create multiple files"
            echo "  bulk-directories <items_file> [template]                   - Create multiple directories"
            echo "  user-workspace <username> [user_type]                      - Create user workspace"
            echo "  template <action> [name] [content]                         - Manage templates"
            echo "  report                                                      - Generate operations report"
            echo "  help                                                        - Show this help message"
            echo ""
            echo "Template Actions:"
            echo "  create <name> <content>  - Create new template"
            echo "  list                     - List available templates"
            echo "  show <name>              - Show template content"
            echo "  delete <name>            - Delete template"
            echo ""
            echo "User Types:"
            echo "  standard    - Basic user workspace"
            echo "  developer   - Developer workspace with project directories"
            echo "  admin       - Administrative workspace with system directories"
            echo ""
            echo "Features:"
            echo "  • Template-based file and directory creation"
            echo "  • Permission and ownership management"
            echo "  • Naming convention validation"
            echo "  • Path depth and security validation"
            echo "  • Bulk operations with progress monitoring"
            echo "  • User workspace automation"
            echo "  • Comprehensive audit logging"
            ;;
    esac
}

# Execute main function with all arguments
main "$@"

Quick Reference Commands

Single File Operations

# Create empty file
./creation_manager.sh create-file "/Users/admin/document.txt"

# Create file from template
./creation_manager.sh create-file "/Users/admin/report.txt" "report_template"

# Create file with specific permissions
./creation_manager.sh create-file "/Users/admin/script.sh" "" "755"

# Create file with template variables
./creation_manager.sh create-file "/Users/admin/config.json" "config_template" "644" '{"project":"MyApp"}'

Directory Operations

# Create directory
./creation_manager.sh create-directory "/Users/admin/project"

# Create directory with specific permissions
./creation_manager.sh create-directory "/Users/admin/secure" "" "700"

# Create directory structure from template
./creation_manager.sh create-directory "/Users/admin/webapp" "webapp_structure"

Bulk Operations

# Create bulk files specification
cat > /tmp/files_to_create.txt << EOF
/Users/admin/doc1.txt|644|
/Users/admin/doc2.txt|644|
/Users/admin/config.json|600|{"env":"prod"}
EOF

# Execute bulk file creation
./creation_manager.sh bulk-files "/tmp/files_to_create.txt" "default_template"

# Create bulk directories specification
cat > /tmp/dirs_to_create.txt << EOF
/Users/admin/projects|755|
/Users/admin/scripts|755|
/Users/admin/logs|750|
EOF

# Execute bulk directory creation
./creation_manager.sh bulk-directories "/tmp/dirs_to_create.txt"

Template Management

# Create a new template
./creation_manager.sh template create "readme" "# {{PROJECT_NAME}}

Created by: {{USER}}
Date: {{DATE}}
Time: {{TIME}}

## Description
This is a README file for the project.
"

# List available templates
./creation_manager.sh template list

# Show template content
./creation_manager.sh template show "readme"

# Delete template
./creation_manager.sh template delete "old_template"

User Workspace Creation

# Create standard user workspace
./creation_manager.sh user-workspace "john.doe" "standard"

# Create developer workspace
./creation_manager.sh user-workspace "jane.smith" "developer"

# Create admin workspace
./creation_manager.sh user-workspace "admin.user" "admin"

Integration Examples

JAMF Pro Integration

#!/bin/bash

# JAMF Pro script for enterprise file creation
# Parameters: $4 = operation_type, $5 = path, $6 = template_name, $7 = permissions

OPERATION_TYPE="$4"
PATH="$5"
TEMPLATE_NAME="$6"
PERMISSIONS="$7"

# Download creation manager if not present
if [[ ! -f "/usr/local/bin/macfleet_creation_manager.sh" ]]; then
    curl -o "/usr/local/bin/macfleet_creation_manager.sh" "https://scripts.macfleet.com/creation_manager.sh"
    chmod +x "/usr/local/bin/macfleet_creation_manager.sh"
fi

# Execute creation operation
case "$OPERATION_TYPE" in
    "file")
        /usr/local/bin/macfleet_creation_manager.sh create-file "$PATH" "$TEMPLATE_NAME" "$PERMISSIONS"
        ;;
    "directory")
        /usr/local/bin/macfleet_creation_manager.sh create-directory "$PATH" "$TEMPLATE_NAME" "$PERMISSIONS"
        ;;
    "workspace")
        local username=$(basename "$PATH")
        /usr/local/bin/macfleet_creation_manager.sh user-workspace "$username" "$TEMPLATE_NAME"
        ;;
    *)
        echo "Invalid operation type: $OPERATION_TYPE"
        exit 1
        ;;
esac

# Generate report
/usr/local/bin/macfleet_creation_manager.sh report

exit $?

Configuration Profile for Creation Policies

<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
    <key>PayloadContent</key>
    <array>
        <dict>
            <key>PayloadType</key>
            <string>com.macfleet.creation.policy</string>
            <key>PayloadIdentifier</key>
            <string>com.macfleet.creation.policy.main</string>
            <key>PayloadDisplayName</key>
            <string>MacFleet Creation Policy</string>
            <key>AllowedExtensions</key>
            <array>
                <string>.txt</string>
                <string>.md</string>
                <string>.pdf</string>
                <string>.docx</string>
            </array>
            <key>DefaultFilePermissions</key>
            <string>644</string>
            <key>DefaultDirectoryPermissions</key>
            <string>755</string>
            <key>MaxFilenameLength</key>
            <integer>255</integer>
            <key>MaxPathDepth</key>
            <integer>20</integer>
            <key>EnforceNamingConventions</key>
            <true/>
        </dict>
    </array>
</dict>
</plist>

Template Examples

Project Structure Template

{
  "name": "webapp_structure",
  "type": "directory_structure",
  "structure": {
    "src": {
      "type": "directory",
      "permissions": "755",
      "files": {
        "index.js": {
          "template": "javascript_main",
          "permissions": "644"
        },
        "package.json": {
          "template": "package_json",
          "permissions": "644"
        }
      },
      "subdirectories": {
        "components": {
          "type": "directory",
          "permissions": "755"
        },
        "utils": {
          "type": "directory",
          "permissions": "755"
        }
      }
    },
    "docs": {
      "type": "directory",
      "permissions": "755",
      "files": {
        "README.md": {
          "template": "readme",
          "permissions": "644"
        }
      }
    }
  }
}

File Templates

# Create default templates
./creation_manager.sh template create "readme" "# {{PROJECT_NAME}}

## Overview
This project was created on {{DATE}} by {{USER}}.

## Getting Started
1. Read this documentation
2. Review the code structure
3. Start development

## Contact
For questions, contact {{USER}}@company.com
"

./creation_manager.sh template create "gitignore" "# Logs
logs
*.log
npm-debug.log*

# Runtime data
pids
*.pid
*.seed

# Directory for instrumented libs
lib-cov

# Coverage directory
coverage

# Dependency directories
node_modules/

# Optional npm cache directory
.npm

# Optional REPL history
.node_repl_history
"

./creation_manager.sh template create "config_json" "{
  \"name\": \"{{PROJECT_NAME}}\",
  \"version\": \"1.0.0\",
  \"description\": \"Project created on {{DATE}}\",
  \"author\": \"{{USER}}\",
  \"created\": \"{{DATE}} {{TIME}}\",
  \"environment\": \"development\"
}
"

Security and Compliance Features

Permission Management

# Set secure permissions for sensitive files
create_secure_file() {
    local file_path="$1"
    local content="$2"
    
    # Create file with restrictive permissions
    enterprise_create_file "$file_path" "" "600"
    
    # Add content securely
    echo "$content" > "$file_path"
    
    # Ensure only owner can access
    chmod 600 "$file_path"
}

Compliance Reporting

# Generate compliance report for auditors
generate_compliance_report() {
    local report_file="/var/reports/creation_compliance_$(date +%Y%m%d).csv"
    
    echo "Timestamp,User,Type,Path,Permissions,Template,Status" > "$report_file"
    
    # Parse log file for creation operations
    grep "AUDIT.*created:" "$LOG_FILE" | while read line; do
        # Extract relevant information and format as CSV
        echo "$line" | awk -F' - ' '{print $1","$2}' >> "$report_file"
    done
    
    echo "Compliance report generated: $report_file"
}

Troubleshooting

Common Issues and Solutions

IssueCauseSolution
Permission deniedInsufficient privilegesCheck parent directory permissions
File already existsDuplicate creation attemptUse unique names or check existence first
Invalid charactersSpecial characters in nameUse alphanumeric characters only
Path too deepExceeds maximum depthReduce directory nesting
Template not foundMissing template fileCreate template or use different name

Log Analysis

# View recent creation operations
tail -f /var/log/macfleet_creation_operations.log

# Search for failed operations
grep "ERROR" /var/log/macfleet_creation_operations.log

# Count operations by type
grep -c "File created" /var/log/macfleet_creation_operations.log
grep -c "Directory created" /var/log/macfleet_creation_operations.log

Best Practices

  1. Use templates consistently for standardized structures
  2. Set appropriate permissions based on file sensitivity
  3. Validate naming conventions before creation
  4. Monitor creation logs regularly for policy violations
  5. Test templates thoroughly before fleet deployment
  6. Use bulk operations for efficiency at scale
  7. Implement user workspace automation for onboarding
  8. Regular template updates to maintain standards

This enterprise creation management system provides comprehensive template management, security validation, and automation capabilities while maintaining the simplicity of basic file and directory creation operations for effective fleet management.

File and Folder Copy Management on macOS

Efficiently manage file and folder copying operations across your MacFleet deployment with enterprise-grade safety features, integrity verification, and comprehensive audit capabilities. This tutorial transforms basic cp commands into robust data distribution solutions.

Understanding Enterprise File Copy Operations

Enterprise file copying goes beyond basic duplication, requiring:

  • Safety validation to prevent accidental overwrites
  • Integrity verification to ensure data consistency
  • Permission preservation for security compliance
  • Progress monitoring for large operations
  • Rollback capabilities for failed transfers
  • Audit logging for compliance requirements

Core Copy Operations

Basic File Copy

#!/bin/bash

# Simple file copy with validation
copy_file() {
    local source="$1"
    local destination="$2"
    
    # Validate source exists
    if [[ ! -f "$source" ]]; then
        echo "Error: Source file '$source' not found"
        return 1
    fi
    
    # Create destination directory if needed
    local dest_dir=$(dirname "$destination")
    mkdir -p "$dest_dir"
    
    # Copy file
    if cp "$source" "$destination"; then
        echo "Successfully copied '$source' to '$destination'"
        return 0
    else
        echo "Failed to copy '$source' to '$destination'"
        return 1
    fi
}

# Usage example
# copy_file "/Users/admin/document.pdf" "/Users/shared/documents/document.pdf"

Basic Directory Copy

#!/bin/bash

# Recursive directory copy with verification
copy_directory() {
    local source="$1"
    local destination="$2"
    
    # Validate source directory exists
    if [[ ! -d "$source" ]]; then
        echo "Error: Source directory '$source' not found"
        return 1
    fi
    
    # Copy directory recursively
    if cp -R "$source" "$destination"; then
        echo "Successfully copied directory '$source' to '$destination'"
        return 0
    else
        echo "Failed to copy directory '$source' to '$destination'"
        return 1
    fi
}

# Usage example
# copy_directory "/Users/admin/project" "/Users/shared/projects/"

Enterprise Copy Management System

#!/bin/bash

# MacFleet Enterprise File Copy Management System
# Comprehensive file and folder copying with enterprise features

# Configuration
SCRIPT_NAME="MacFleet Copy Manager"
VERSION="1.0.0"
LOG_FILE="/var/log/macfleet_copy_operations.log"
TEMP_DIR="/tmp/macfleet_copy"
MAX_FILE_SIZE="10G"
ALLOWED_EXTENSIONS=(".pdf" ".docx" ".xlsx" ".pptx" ".txt" ".png" ".jpg" ".gif" ".mp4" ".mov")
RESTRICTED_PATHS=("/System" "/usr/bin" "/usr/sbin" "/private/var")
BUSINESS_HOURS_START=9
BUSINESS_HOURS_END=17

# Create necessary directories
mkdir -p "$TEMP_DIR"
mkdir -p "$(dirname "$LOG_FILE")"

# Logging function
log_operation() {
    local level="$1"
    local message="$2"
    local timestamp=$(date '+%Y-%m-%d %H:%M:%S')
    echo "[$timestamp] [$level] $message" | tee -a "$LOG_FILE"
}

# Check if current time is within business hours
is_business_hours() {
    local current_hour=$(date +%H)
    if [[ $current_hour -ge $BUSINESS_HOURS_START && $current_hour -lt $BUSINESS_HOURS_END ]]; then
        return 0
    else
        return 1
    fi
}

# Validate file extension
is_allowed_extension() {
    local file="$1"
    local extension="${file##*.}"
    extension=".$extension"
    
    for allowed in "${ALLOWED_EXTENSIONS[@]}"; do
        if [[ "$extension" == "$allowed" ]]; then
            return 0
        fi
    done
    return 1
}

# Check if path is restricted
is_restricted_path() {
    local path="$1"
    for restricted in "${RESTRICTED_PATHS[@]}"; do
        if [[ "$path" == "$restricted"* ]]; then
            return 0
        fi
    done
    return 1
}

# Get file size in human readable format
get_file_size() {
    local file="$1"
    if [[ -f "$file" ]]; then
        stat -f%z "$file" 2>/dev/null
    elif [[ -d "$file" ]]; then
        du -sk "$file" 2>/dev/null | awk '{print $1 * 1024}'
    else
        echo "0"
    fi
}

# Convert bytes to human readable format
format_size() {
    local bytes="$1"
    local sizes=("B" "KB" "MB" "GB" "TB")
    local unit=0
    
    while [[ $bytes -gt 1024 && $unit -lt 4 ]]; do
        bytes=$((bytes / 1024))
        ((unit++))
    done
    
    echo "${bytes}${sizes[$unit]}"
}

# Calculate checksum for integrity verification
calculate_checksum() {
    local file="$1"
    if [[ -f "$file" ]]; then
        shasum -a 256 "$file" 2>/dev/null | awk '{print $1}'
    else
        echo ""
    fi
}

# Verify copy integrity
verify_copy_integrity() {
    local source="$1"
    local destination="$2"
    
    if [[ -f "$source" && -f "$destination" ]]; then
        local source_checksum=$(calculate_checksum "$source")
        local dest_checksum=$(calculate_checksum "$destination")
        
        if [[ "$source_checksum" == "$dest_checksum" ]]; then
            log_operation "INFO" "Integrity verification passed for: $destination"
            return 0
        else
            log_operation "ERROR" "Integrity verification failed for: $destination"
            return 1
        fi
    elif [[ -d "$source" && -d "$destination" ]]; then
        # For directories, compare file counts and sizes
        local source_count=$(find "$source" -type f | wc -l)
        local dest_count=$(find "$destination" -type f | wc -l)
        
        if [[ $source_count -eq $dest_count ]]; then
            log_operation "INFO" "Directory integrity verification passed for: $destination"
            return 0
        else
            log_operation "ERROR" "Directory integrity verification failed for: $destination"
            return 1
        fi
    else
        log_operation "ERROR" "Cannot verify integrity - source or destination missing"
        return 1
    fi
}

# Create backup before overwrite
create_backup() {
    local file="$1"
    local backup_dir="/var/backups/macfleet/copy_operations"
    local timestamp=$(date '+%Y%m%d_%H%M%S')
    
    mkdir -p "$backup_dir"
    
    if [[ -e "$file" ]]; then
        local filename=$(basename "$file")
        local backup_file="$backup_dir/${filename}_backup_$timestamp"
        
        if cp -R "$file" "$backup_file"; then
            log_operation "INFO" "Backup created: $backup_file"
            echo "$backup_file"
            return 0
        else
            log_operation "ERROR" "Failed to create backup for: $file"
            return 1
        fi
    fi
}

# Enhanced file copy with enterprise features
enterprise_copy_file() {
    local source="$1"
    local destination="$2"
    local preserve_permissions="${3:-true}"
    local verify_integrity="${4:-true}"
    local create_backup_flag="${5:-true}"
    
    log_operation "INFO" "Starting file copy operation: $source -> $destination"
    
    # Pre-flight validations
    if [[ ! -f "$source" ]]; then
        log_operation "ERROR" "Source file not found: $source"
        return 1
    fi
    
    # Check if source is restricted
    if is_restricted_path "$source"; then
        log_operation "ERROR" "Source path is restricted: $source"
        return 1
    fi
    
    # Check if destination is restricted
    if is_restricted_path "$destination"; then
        log_operation "ERROR" "Destination path is restricted: $destination"
        return 1
    fi
    
    # Validate file extension
    if ! is_allowed_extension "$source"; then
        log_operation "WARNING" "File extension not in allowed list: $source"
    fi
    
    # Check file size
    local file_size=$(get_file_size "$source")
    local max_size_bytes=$(echo "$MAX_FILE_SIZE" | sed 's/G//' | awk '{print $1 * 1024 * 1024 * 1024}')
    
    if [[ $file_size -gt $max_size_bytes ]]; then
        log_operation "ERROR" "File too large: $(format_size $file_size) > $MAX_FILE_SIZE"
        return 1
    fi
    
    # Business hours check for large files (>100MB)
    if [[ $file_size -gt 104857600 ]] && ! is_business_hours; then
        log_operation "WARNING" "Large file copy outside business hours: $(format_size $file_size)"
    fi
    
    # Create destination directory
    local dest_dir=$(dirname "$destination")
    if ! mkdir -p "$dest_dir"; then
        log_operation "ERROR" "Failed to create destination directory: $dest_dir"
        return 1
    fi
    
    # Create backup if destination exists
    local backup_file=""
    if [[ -f "$destination" && "$create_backup_flag" == "true" ]]; then
        backup_file=$(create_backup "$destination")
        if [[ $? -ne 0 ]]; then
            log_operation "ERROR" "Failed to create backup for existing file: $destination"
            return 1
        fi
    fi
    
    # Perform the copy operation
    local copy_options=""
    if [[ "$preserve_permissions" == "true" ]]; then
        copy_options="-p"
    fi
    
    if cp $copy_options "$source" "$destination"; then
        log_operation "INFO" "File copied successfully: $(format_size $file_size)"
        
        # Verify integrity if requested
        if [[ "$verify_integrity" == "true" ]]; then
            if ! verify_copy_integrity "$source" "$destination"; then
                log_operation "ERROR" "Copy integrity verification failed, removing destination"
                rm -f "$destination"
                
                # Restore backup if it exists
                if [[ -n "$backup_file" && -f "$backup_file" ]]; then
                    cp "$backup_file" "$destination"
                    log_operation "INFO" "Restored backup file: $destination"
                fi
                return 1
            fi
        fi
        
        # Log successful operation
        log_operation "SUCCESS" "File copy completed: $source -> $destination"
        return 0
    else
        log_operation "ERROR" "Copy operation failed: $source -> $destination"
        
        # Restore backup if it exists
        if [[ -n "$backup_file" && -f "$backup_file" ]]; then
            cp "$backup_file" "$destination"
            log_operation "INFO" "Restored backup file: $destination"
        fi
        return 1
    fi
}

# Enhanced directory copy with enterprise features
enterprise_copy_directory() {
    local source="$1"
    local destination="$2"
    local preserve_permissions="${3:-true}"
    local verify_integrity="${4:-true}"
    local create_backup_flag="${5:-true}"
    
    log_operation "INFO" "Starting directory copy operation: $source -> $destination"
    
    # Pre-flight validations
    if [[ ! -d "$source" ]]; then
        log_operation "ERROR" "Source directory not found: $source"
        return 1
    fi
    
    # Check if source is restricted
    if is_restricted_path "$source"; then
        log_operation "ERROR" "Source path is restricted: $source"
        return 1
    fi
    
    # Check if destination is restricted
    if is_restricted_path "$destination"; then
        log_operation "ERROR" "Destination path is restricted: $destination"
        return 1
    fi
    
    # Calculate directory size
    local dir_size=$(get_file_size "$source")
    local file_count=$(find "$source" -type f | wc -l)
    
    log_operation "INFO" "Directory stats - Size: $(format_size $dir_size), Files: $file_count"
    
    # Business hours check for large directories (>1GB or >1000 files)
    if [[ $dir_size -gt 1073741824 || $file_count -gt 1000 ]] && ! is_business_hours; then
        log_operation "WARNING" "Large directory copy outside business hours"
    fi
    
    # Create backup if destination exists
    local backup_dir=""
    if [[ -d "$destination" && "$create_backup_flag" == "true" ]]; then
        backup_dir=$(create_backup "$destination")
        if [[ $? -ne 0 ]]; then
            log_operation "ERROR" "Failed to create backup for existing directory: $destination"
            return 1
        fi
    fi
    
    # Perform the copy operation
    local copy_options="-R"
    if [[ "$preserve_permissions" == "true" ]]; then
        copy_options="-Rp"
    fi
    
    if cp $copy_options "$source" "$destination"; then
        log_operation "INFO" "Directory copied successfully: $(format_size $dir_size)"
        
        # Verify integrity if requested
        if [[ "$verify_integrity" == "true" ]]; then
            if ! verify_copy_integrity "$source" "$destination"; then
                log_operation "ERROR" "Directory copy integrity verification failed, removing destination"
                rm -rf "$destination"
                
                # Restore backup if it exists
                if [[ -n "$backup_dir" && -d "$backup_dir" ]]; then
                    cp -R "$backup_dir" "$destination"
                    log_operation "INFO" "Restored backup directory: $destination"
                fi
                return 1
            fi
        fi
        
        # Log successful operation
        log_operation "SUCCESS" "Directory copy completed: $source -> $destination"
        return 0
    else
        log_operation "ERROR" "Directory copy operation failed: $source -> $destination"
        
        # Restore backup if it exists
        if [[ -n "$backup_dir" && -d "$backup_dir" ]]; then
            cp -R "$backup_dir" "$destination"
            log_operation "INFO" "Restored backup directory: $destination"
        fi
        return 1
    fi
}

# Bulk copy operations with progress monitoring
bulk_copy_operation() {
    local operation_type="$1"  # "file" or "directory"
    local source_list="$2"     # File containing source paths
    local destination_base="$3" # Base destination directory
    
    if [[ ! -f "$source_list" ]]; then
        log_operation "ERROR" "Source list file not found: $source_list"
        return 1
    fi
    
    local total_items=$(wc -l < "$source_list")
    local current_item=0
    local success_count=0
    local failure_count=0
    
    log_operation "INFO" "Starting bulk copy operation - Total items: $total_items"
    
    while IFS= read -r source_path; do
        ((current_item++))
        
        # Skip empty lines and comments
        [[ -z "$source_path" || "$source_path" =~ ^#.* ]] && continue
        
        local filename=$(basename "$source_path")
        local destination="$destination_base/$filename"
        
        echo "Processing [$current_item/$total_items]: $filename"
        
        if [[ "$operation_type" == "file" ]]; then
            if enterprise_copy_file "$source_path" "$destination"; then
                ((success_count++))
            else
                ((failure_count++))
            fi
        elif [[ "$operation_type" == "directory" ]]; then
            if enterprise_copy_directory "$source_path" "$destination"; then
                ((success_count++))
            else
                ((failure_count++))
            fi
        fi
        
        # Progress update
        local progress=$((current_item * 100 / total_items))
        echo "Progress: $progress% ($success_count successful, $failure_count failed)"
        
    done < "$source_list"
    
    log_operation "SUCCESS" "Bulk copy completed - Success: $success_count, Failed: $failure_count"
    return $failure_count
}

# Generate copy operation report
generate_copy_report() {
    local report_file="/tmp/macfleet_copy_report_$(date +%Y%m%d_%H%M%S).txt"
    
    {
        echo "MacFleet Copy Operations Report"
        echo "Generated: $(date)"
        echo "Hostname: $(hostname)"
        echo "=============================="
        echo ""
        
        echo "Recent Copy Operations (Last 24 hours):"
        if [[ -f "$LOG_FILE" ]]; then
            local yesterday=$(date -v-1d '+%Y-%m-%d')
            grep "$yesterday\|$(date '+%Y-%m-%d')" "$LOG_FILE" | tail -50
        else
            echo "No log file found"
        fi
        
        echo ""
        echo "System Information:"
        echo "Available Space:"
        df -h | grep -E "^/dev/"
        
        echo ""
        echo "Copy Operation Statistics:"
        if [[ -f "$LOG_FILE" ]]; then
            echo "Total Operations: $(grep -c "copy operation" "$LOG_FILE" 2>/dev/null || echo "0")"
            echo "Successful: $(grep -c "SUCCESS.*copy completed" "$LOG_FILE" 2>/dev/null || echo "0")"
            echo "Failed: $(grep -c "ERROR.*copy.*failed" "$LOG_FILE" 2>/dev/null || echo "0")"
        fi
        
    } > "$report_file"
    
    echo "Copy operations report saved to: $report_file"
    log_operation "INFO" "Copy report generated: $report_file"
}

# Main copy management function
main() {
    local action="${1:-help}"
    
    case "$action" in
        "copy-file")
            local source="$2"
            local destination="$3"
            local preserve_perms="${4:-true}"
            local verify="${5:-true}"
            local backup="${6:-true}"
            
            if [[ -z "$source" || -z "$destination" ]]; then
                echo "Usage: $0 copy-file <source> <destination> [preserve_permissions] [verify_integrity] [create_backup]"
                exit 1
            fi
            
            enterprise_copy_file "$source" "$destination" "$preserve_perms" "$verify" "$backup"
            ;;
        "copy-directory")
            local source="$2"
            local destination="$3"
            local preserve_perms="${4:-true}"
            local verify="${5:-true}"
            local backup="${6:-true}"
            
            if [[ -z "$source" || -z "$destination" ]]; then
                echo "Usage: $0 copy-directory <source> <destination> [preserve_permissions] [verify_integrity] [create_backup]"
                exit 1
            fi
            
            enterprise_copy_directory "$source" "$destination" "$preserve_perms" "$verify" "$backup"
            ;;
        "bulk-files")
            local source_list="$2"
            local destination="$3"
            
            if [[ -z "$source_list" || -z "$destination" ]]; then
                echo "Usage: $0 bulk-files <source_list_file> <destination_directory>"
                exit 1
            fi
            
            bulk_copy_operation "file" "$source_list" "$destination"
            ;;
        "bulk-directories")
            local source_list="$2"
            local destination="$3"
            
            if [[ -z "$source_list" || -z "$destination" ]]; then
                echo "Usage: $0 bulk-directories <source_list_file> <destination_directory>"
                exit 1
            fi
            
            bulk_copy_operation "directory" "$source_list" "$destination"
            ;;
        "report")
            generate_copy_report
            ;;
        "help"|*)
            echo "$SCRIPT_NAME v$VERSION"
            echo "Enterprise File and Folder Copy Management"
            echo ""
            echo "Usage: $0 <action> [options]"
            echo ""
            echo "Actions:"
            echo "  copy-file <source> <destination>       - Copy single file"
            echo "  copy-directory <source> <destination>  - Copy directory recursively"
            echo "  bulk-files <list_file> <destination>   - Bulk copy files from list"
            echo "  bulk-directories <list_file> <dest>    - Bulk copy directories from list"
            echo "  report                                  - Generate operations report"
            echo "  help                                    - Show this help message"
            echo ""
            echo "Features:"
            echo "  • Safety validation and integrity verification"
            echo "  • Automatic backup creation before overwrite"
            echo "  • Business hours compliance checking"
            echo "  • Comprehensive audit logging"
            echo "  • Permission preservation"
            echo "  • Progress monitoring for bulk operations"
            ;;
    esac
}

# Execute main function with all arguments
main "$@"

Quick Reference Commands

Single File Operations

# Copy file with full enterprise features
./copy_manager.sh copy-file "/Users/admin/document.pdf" "/Users/shared/documents/document.pdf"

# Copy file without backup creation
./copy_manager.sh copy-file "/source/file.txt" "/destination/file.txt" true true false

# Copy file without integrity verification (faster)
./copy_manager.sh copy-file "/source/file.txt" "/destination/file.txt" true false true

Directory Operations

# Copy directory with all subdirectories
./copy_manager.sh copy-directory "/Users/admin/project" "/Users/shared/projects/"

# Copy directory preserving permissions
./copy_manager.sh copy-directory "/source/folder" "/destination/" true true true

Bulk Operations

# Create file list for bulk operations
echo "/Users/admin/doc1.pdf" > /tmp/files_to_copy.txt
echo "/Users/admin/doc2.pdf" >> /tmp/files_to_copy.txt
echo "/Users/admin/doc3.pdf" >> /tmp/files_to_copy.txt

# Execute bulk file copy
./copy_manager.sh bulk-files "/tmp/files_to_copy.txt" "/Users/shared/documents/"

# Execute bulk directory copy
./copy_manager.sh bulk-directories "/tmp/dirs_to_copy.txt" "/Users/shared/projects/"

Integration Examples

JAMF Pro Integration

#!/bin/bash

# JAMF Pro script for enterprise file distribution
# Parameters: $4 = source_path, $5 = destination_path, $6 = operation_type

SOURCE_PATH="$4"
DESTINATION_PATH="$5"
OPERATION_TYPE="$6"

# Download copy manager if not present
if [[ ! -f "/usr/local/bin/macfleet_copy_manager.sh" ]]; then
    curl -o "/usr/local/bin/macfleet_copy_manager.sh" "https://scripts.macfleet.com/copy_manager.sh"
    chmod +x "/usr/local/bin/macfleet_copy_manager.sh"
fi

# Execute copy operation
case "$OPERATION_TYPE" in
    "file")
        /usr/local/bin/macfleet_copy_manager.sh copy-file "$SOURCE_PATH" "$DESTINATION_PATH"
        ;;
    "directory")
        /usr/local/bin/macfleet_copy_manager.sh copy-directory "$SOURCE_PATH" "$DESTINATION_PATH"
        ;;
    *)
        echo "Invalid operation type: $OPERATION_TYPE"
        exit 1
        ;;
esac

# Generate report
/usr/local/bin/macfleet_copy_manager.sh report

exit $?

Configuration Profile for Copy Policies

<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
    <key>PayloadContent</key>
    <array>
        <dict>
            <key>PayloadType</key>
            <string>com.macfleet.copy.policy</string>
            <key>PayloadIdentifier</key>
            <string>com.macfleet.copy.policy.main</string>
            <key>PayloadDisplayName</key>
            <string>MacFleet Copy Policy</string>
            <key>MaxFileSize</key>
            <string>10G</string>
            <key>AllowedExtensions</key>
            <array>
                <string>.pdf</string>
                <string>.docx</string>
                <string>.xlsx</string>
                <string>.pptx</string>
            </array>
            <key>BusinessHoursOnly</key>
            <true/>
            <key>RequireIntegrityCheck</key>
            <true/>
            <key>CreateBackups</key>
            <true/>
        </dict>
    </array>
</dict>
</plist>

Security and Compliance Features

File System Monitoring

# Monitor copy operations with FSEvents
fswatch -r /Users/shared/ | while read event; do
    if [[ "$event" =~ "CREATED" ]]; then
        log_operation "MONITOR" "File created: $event"
    fi
done

Compliance Reporting

# Generate compliance report for auditors
generate_compliance_report() {
    local report_file="/var/reports/copy_compliance_$(date +%Y%m%d).csv"
    
    echo "Timestamp,User,Source,Destination,Size,Status,Checksum" > "$report_file"
    
    # Parse log file for copy operations
    grep "copy completed\|copy.*failed" "$LOG_FILE" | while read line; do
        # Extract relevant information and format as CSV
        echo "$line" | awk -F' - ' '{print $1","$2}' >> "$report_file"
    done
    
    echo "Compliance report generated: $report_file"
}

Troubleshooting

Common Issues and Solutions

IssueCauseSolution
Permission deniedInsufficient privilegesRun with sudo or check file permissions
Disk space errorDestination fullCheck available space with df -h
Integrity check failedCorrupt copyRe-run copy operation, check disk health
Source not foundIncorrect pathVerify source path exists
Operation timeoutLarge file/slow diskIncrease timeout or split operation

Log Analysis

# View recent copy operations
tail -f /var/log/macfleet_copy_operations.log

# Search for failed operations
grep "ERROR" /var/log/macfleet_copy_operations.log

# Count operations by status
grep -c "SUCCESS" /var/log/macfleet_copy_operations.log
grep -c "ERROR" /var/log/macfleet_copy_operations.log

Best Practices

  1. Always test on single device before fleet deployment
  2. Monitor disk space before large copy operations
  3. Use business hours restrictions for resource-intensive operations
  4. Enable integrity verification for critical data
  5. Maintain regular backups of important destinations
  6. Review logs regularly for failed operations
  7. Set appropriate file size limits based on network capacity
  8. Use bulk operations for efficiency at scale

This enterprise copy management system provides comprehensive safety, monitoring, and compliance features while maintaining the simplicity of basic copy operations for day-to-day fleet management tasks.

Fetch Device Logs on macOS

macOS provides a powerful unified logging system that captures detailed information about system processes, applications, and user activities. This tutorial shows how to effectively collect and analyze device logs across your MacFleet for troubleshooting, security monitoring, and performance analysis.

Understanding macOS Logging

macOS uses the unified logging system introduced in macOS 10.12, which consolidates various log sources:

  • System logs - Kernel, system services, and daemons
  • Application logs - User applications and background processes
  • Security logs - Authentication, authorization, and security events
  • Network logs - Network activity and connectivity

Basic Log Commands

View Recent System Logs

#!/bin/bash

# Show logs from the last minute
sudo log show --last 1m

# Show logs from the last hour
sudo log show --last 1h

# Show logs from the last day
sudo log show --last 1d

Live Log Streaming

#!/bin/bash

# Stream live logs
sudo log stream

# Stream with timestamp
sudo log stream --info --debug

Application-Specific Logs

Get Logs for Specific Application

#!/bin/bash

# Basic app log query
log show --predicate 'processImagePath CONTAINS[c] "safari"'

# Query with time range
log show --predicate 'processImagePath CONTAINS[c] "Finder"' --last 30m

# Save app logs to file
log show --predicate 'processImagePath CONTAINS[c] "Mail"' --last 1h > ~/Desktop/mail_logs.txt

Enterprise Log Collection Script

#!/bin/bash

# Device Log Collection Script for MacFleet
# Compatible with macOS 10.14+

# Configuration
LOG_DIR="/var/log/macfleet_logs"
TIMESTAMP=$(date '+%Y%m%d_%H%M%S')
HOSTNAME=$(hostname -s)
OUTPUT_DIR="$LOG_DIR/${HOSTNAME}_${TIMESTAMP}"
ARCHIVE_NAME="${HOSTNAME}_logs_${TIMESTAMP}.tar.gz"

# Function to log messages
log_message() {
    echo "$(date '+%Y-%m-%d %H:%M:%S') - $1" | tee -a "$LOG_DIR/collection.log"
}

# Function to create output directory
setup_directories() {
    if mkdir -p "$OUTPUT_DIR"; then
        log_message "✓ Created output directory: $OUTPUT_DIR"
        return 0
    else
        log_message "✗ Failed to create output directory"
        return 1
    fi
}

# Function to collect system logs
collect_system_logs() {
    log_message "Collecting system logs..."
    
    # Recent system logs (last 24 hours)
    if log show --last 24h > "$OUTPUT_DIR/system_logs_24h.log"; then
        log_message "✓ System logs collected (24h)"
    else
        log_message "✗ Failed to collect system logs"
    fi
    
    # Critical and error logs only
    if log show --last 24h --predicate 'messageType == 16 OR messageType == 17' > "$OUTPUT_DIR/critical_errors.log"; then
        log_message "✓ Critical/Error logs collected"
    else
        log_message "✗ Failed to collect critical logs"
    fi
    
    # Boot logs
    if log show --predicate 'process == "kernel"' --last 7d > "$OUTPUT_DIR/kernel_logs.log"; then
        log_message "✓ Kernel logs collected"
    else
        log_message "✗ Failed to collect kernel logs"
    fi
}

# Function to collect application logs
collect_app_logs() {
    log_message "Collecting application logs..."
    
    # Common applications to monitor
    local apps=("Safari" "Finder" "Mail" "Calendar" "Contacts" "Notes" "Spotlight" "WindowServer")
    
    for app in "${apps[@]}"; do
        local output_file="$OUTPUT_DIR/${app,,}_logs.log"
        if log show --predicate "processImagePath CONTAINS[c] \"$app\"" --last 24h > "$output_file" 2>/dev/null; then
            # Only keep file if it has content
            if [[ -s "$output_file" ]]; then
                log_message "✓ $app logs collected"
            else
                rm -f "$output_file"
                log_message "! No logs found for $app"
            fi
        else
            log_message "✗ Failed to collect $app logs"
        fi
    done
}

# Function to collect security logs
collect_security_logs() {
    log_message "Collecting security logs..."
    
    # Authentication logs
    if log show --predicate 'category == "authorization" OR process == "authd"' --last 7d > "$OUTPUT_DIR/auth_logs.log"; then
        log_message "✓ Authentication logs collected"
    else
        log_message "✗ Failed to collect auth logs"
    fi
    
    # Login/logout events
    if log show --predicate 'eventMessage CONTAINS "login" OR eventMessage CONTAINS "logout"' --last 7d > "$OUTPUT_DIR/login_events.log"; then
        log_message "✓ Login events collected"
    else
        log_message "✗ Failed to collect login events"
    fi
}

# Function to collect system information
collect_system_info() {
    log_message "Collecting system information..."
    
    {
        echo "=== System Information ==="
        echo "Hostname: $(hostname)"
        echo "macOS Version: $(sw_vers -productVersion)"
        echo "Build: $(sw_vers -buildVersion)"
        echo "Uptime: $(uptime)"
        echo "Date: $(date)"
        echo ""
        
        echo "=== Hardware Information ==="
        system_profiler SPHardwareDataType
        echo ""
        
        echo "=== Disk Usage ==="
        df -h
        echo ""
        
        echo "=== Memory Usage ==="
        vm_stat
        echo ""
        
        echo "=== Running Processes ==="
        ps aux | head -20
        
    } > "$OUTPUT_DIR/system_info.txt"
    
    log_message "✓ System information collected"
}

# Function to create archive
create_archive() {
    log_message "Creating archive..."
    
    cd "$LOG_DIR" || return 1
    
    if tar -czf "$ARCHIVE_NAME" "$(basename "$OUTPUT_DIR")"; then
        log_message "✓ Archive created: $LOG_DIR/$ARCHIVE_NAME"
        
        # Clean up temporary directory
        rm -rf "$OUTPUT_DIR"
        log_message "✓ Temporary files cleaned up"
        
        # Show archive size
        local size
        size=$(du -h "$LOG_DIR/$ARCHIVE_NAME" | cut -f1)
        log_message "Archive size: $size"
        
        return 0
    else
        log_message "✗ Failed to create archive"
        return 1
    fi
}

# Function to upload archive (placeholder for enterprise integration)
upload_archive() {
    local archive_path="$LOG_DIR/$ARCHIVE_NAME"
    
    # Example: Upload to central logging server
    # Replace with your actual upload mechanism
    log_message "Archive ready for collection: $archive_path"
    
    # Placeholder for upload logic
    # scp "$archive_path" user@logserver:/logs/
    # curl -F "file=@$archive_path" https://logserver.company.com/upload
    
    return 0
}

# Main execution function
main() {
    log_message "=== Starting MacFleet log collection ==="
    
    # Setup
    if ! setup_directories; then
        exit 1
    fi
    
    # Collect logs
    collect_system_logs
    collect_app_logs
    collect_security_logs
    collect_system_info
    
    # Create archive
    if create_archive; then
        upload_archive
        log_message "=== Log collection completed successfully ==="
        exit 0
    else
        log_message "=== Log collection failed ==="
        exit 1
    fi
}

# Execute main function
main "$@"

Quick Log Collection Scripts

Collect Logs for Specific Time Period

#!/bin/bash

# Collect logs between specific dates
START_DATE="2025-01-01"
END_DATE="2025-01-02"
OUTPUT_FILE="~/Desktop/logs_${START_DATE}_to_${END_DATE}.txt"

log show --start "$START_DATE" --end "$END_DATE" > "$OUTPUT_FILE"
echo "Logs saved to: $OUTPUT_FILE"

Collect Application Crash Logs

#!/bin/bash

# Collect crash reports
CRASH_DIR="~/Library/Logs/DiagnosticReports"
OUTPUT_DIR="~/Desktop/crash_logs"

mkdir -p "$OUTPUT_DIR"
find "$CRASH_DIR" -name "*.crash" -mtime -7 -exec cp {} "$OUTPUT_DIR/" \;

echo "Recent crash logs copied to: $OUTPUT_DIR"

Monitor Specific Events

#!/bin/bash

# Monitor USB device connections
log stream --predicate 'eventMessage CONTAINS "USB"' --info

# Monitor network changes
log stream --predicate 'subsystem == "com.apple.network"' --info

# Monitor file system events
log stream --predicate 'subsystem == "com.apple.filesystem"' --debug

Log Analysis Scripts

Parse Authentication Events

#!/bin/bash

# Extract failed login attempts
log show --predicate 'eventMessage CONTAINS "authentication failure"' --last 7d \
    | grep -E "(authentication failure|failed)" \
    | sort | uniq -c | sort -nr > ~/Desktop/failed_logins.txt

echo "Failed login analysis saved to ~/Desktop/failed_logins.txt"

Application Usage Statistics

#!/bin/bash

# Analyze application launches
log show --predicate 'process == "launchd" AND eventMessage CONTAINS "spawn"' --last 24h \
    | grep -oE '"[^"]*\.app"' \
    | sort | uniq -c | sort -nr > ~/Desktop/app_usage.txt

echo "Application usage statistics saved to ~/Desktop/app_usage.txt"

Log Archive Management

Create System Log Archive

#!/bin/bash

# Create complete system log archive
ARCHIVE_PATH="~/Desktop/SystemLogs_$(date +%Y%m%d_%H%M%S).logarchive"

log collect --output "$ARCHIVE_PATH" --last 24h

echo "System log archive created: $ARCHIVE_PATH"

View Archived Logs

#!/bin/bash

# View logs from archive
ARCHIVE_PATH="~/Desktop/SystemLogs.logarchive"

if [[ -f "$ARCHIVE_PATH" ]]; then
    log show --archive "$ARCHIVE_PATH" --last 1h
else
    echo "Archive not found: $ARCHIVE_PATH"
fi

Troubleshooting Common Issues

IssueSolution
Permission deniedRun with sudo for system logs
No logs foundCheck date range and predicates
Large log filesUse time filters and specific predicates
Script timeoutsReduce time range or use async collection
Archive corruptionVerify disk space and permissions

Log Filtering Examples

Filter by Log Level

# Error messages only
log show --predicate 'messageType == 16' --last 1h

# Info and debug messages
log show --predicate 'messageType >= 1' --last 30m

# Critical errors only
log show --predicate 'messageType == 17' --last 24h

Filter by Process

# System processes
log show --predicate 'process == "kernel" OR process == "launchd"' --last 1h

# User processes
log show --predicate 'processImagePath BEGINSWITH "/Applications"' --last 1h

# Background daemons
log show --predicate 'processImagePath CONTAINS "daemon"' --last 1h

Security Considerations

  • Privacy - Be mindful of user data in logs
  • Retention - Implement log rotation and cleanup
  • Access Control - Restrict log access to authorized personnel
  • Encryption - Encrypt log archives before transmission
  • Compliance - Follow data retention policies

Performance Tips

  • Use specific time ranges to limit data volume
  • Filter by process or category to reduce noise
  • Stream logs in real-time for immediate analysis
  • Compress archives to save storage space
  • Schedule regular log collection during off-hours

Important Notes

  • System logs may contain sensitive information
  • Large time ranges can generate massive log files
  • Some logs require admin privileges to access
  • Log collection can impact system performance
  • Test scripts on individual devices before fleet deployment