using BenchmarkTools using ClusterManagers using CSV using DataFrames using Distributed using Statistics include("../TUG/src/TUG.jl") using .TUG # 1. Environment Setup function setup_environment(num_procs::Int) if num_procs > 0 if haskey(ENV, "SLURM_JOB_ID") # Add SLURM processes added_procs = addprocs(SlurmManager(num_procs), exclusive="") else # Add local processes added_procs = addprocs(num_procs) end # Use remotecall to include TUG on each new worker for proc in added_procs remotecall_wait(include, proc, "../TUG/src/TUG.jl") remotecall_wait(eval, proc, :(using .TUG)) end end end # 2. Test Case Definitions function testBTCS100()::Tuple{Grid,Boundary} rows::Int = 1024 cols::Int = 1000 alphaX = fill(1.25, rows, cols) alphaY = fill(1.1, rows, cols) alphaX[1:100, :] .= 0.5 alphaX[101:200, :] .= 0.8 alphaY[:, 1:200] .= 0.6 alphaY[:, 201:400] .= 0.9 grid::Grid = Grid{Float64}(rows, cols, alphaX, alphaY) concentrations = fill(0.5, rows, cols) concentrations[11, 11] = 15000 concentrations[1015, 991] = 7500 concentrations[11, 991] = 7500 concentrations[1015, 11] = 7500 setConcentrations!(grid, concentrations) bc::Boundary = Boundary(grid) setBoundarySideClosed!(bc, LEFT) setBoundarySideClosed!(bc, RIGHT) setBoundarySideClosed!(bc, TOP) setBoundarySideClosed!(bc, BOTTOM) return grid, bc end function testBTCS200()::Tuple{Grid,Boundary} rows::Int = 2027 cols::Int = 1999 alphaX = [sin(i / 100) * cos(j / 100) for i in 1:rows, j in 1:cols] alphaY = [cos(i / 100) * sin(j / 100) for i in 1:rows, j in 1:cols] grid::Grid = Grid{Float64}(rows, cols, alphaX, alphaY) concentrations = [i * j / 1e2 for i in 1:rows, j in 1:cols] concentrations[11, 11] = 15000 concentrations[2021, 1995] = 7500 concentrations[11, 1995] = 7500 concentrations[2021, 11] = 7500 setConcentrations!(grid, concentrations) bc::Boundary = Boundary(grid) setBoundarySideClosed!(bc, LEFT) setBoundarySideConstant!(bc, RIGHT, 1.5) setBoundarySideClosed!(bc, TOP) setBoundarySideConstant!(bc, BOTTOM, 0.75) return grid, bc end function testFTCS500()::Tuple{Grid,Boundary} rows::Int = 2000 cols::Int = 2000 alphaX = [sin(i / 100) * cos(j / 100) + 1 for i in 1:rows, j in 1:cols] alphaY = [cos(i / 100) * sin(j / 100) + 1 for i in 1:rows, j in 1:cols] grid::Grid = Grid{Float64}(rows, cols, alphaX, alphaY) concentrations = [(i * j) / 1e6 for i in 1:rows, j in 1:cols] concentrations[1001, 1001] = 2000 setConcentrations!(grid, concentrations) bc::Boundary = Boundary(grid) setBoundarySideClosed!(bc, LEFT) setBoundarySideClosed!(bc, RIGHT) setBoundarySideClosed!(bc, TOP) setBoundarySideConstant!(bc, BOTTOM, 0.75) return grid, bc end # 3. Simulation Runners function run_static_simulation(grid::Grid, bc::Boundary, method, steps::Int, dt::Float64) simulation = Simulation(grid, bc, method, steps, dt, CONSOLE_OUTPUT_OFF, CSV_OUTPUT_OFF) TUG.run(simulation) end function run_dynamic_simulation(grid::Grid, bc::Boundary, method, steps::Int, dt::Float64, num_procs::Int) setup_environment(num_procs) simulation = DynamicSimulation(grid, bc, method, dt) createGrid(simulation) for _ in 1:steps next(simulation) end if num_procs > 0 rmprocs(workers()) end end # 4. Performance Metrics function measure_performance(test_case_function, method, steps::Int, dt::Float64, num_procs::Int, dynamic::Bool=false) grid, bc = test_case_function() if dynamic simulation_run = @benchmark run_dynamic_simulation($grid, $bc, $method, $steps, $dt, $num_procs) else simulation_run = @benchmark run_static_simulation($grid, $bc, $method, $steps, $dt) end time_measurement = mean(simulation_run).time / 1e9 memory_measurement = mean(simulation_run).memory / 1e6 return (time=time_measurement, memory=memory_measurement) end # 5. Benchmarking and Comparison function benchmark_test_case(test_case_function, method, steps::Int, dt::Float64, is_distributed::Bool, num_procs::Int) performance_metrics = measure_performance(test_case_function, method, steps, dt, num_procs, is_distributed) return performance_metrics end # 6. Reporting function report_results(results) for result in results println("Test Case: $(result.test_case)$(result.is_distributed ? ", Procs: $(result.num_procs)" : ""), Time: $(result.time) s, Memory: $(result.memory) MB") end end # 7. Cleanup function cleanup() for filename in readdir() if occursin(".csv", filename) rm(filename, force=true) end end end # Main Function function main() test_cases = [(testBTCS100, BTCS, 100, 0.01), (testBTCS200, BTCS, 200, 0.005), (testFTCS500, FTCS, 500, 0.005)] configurations = [(false, 0), (true, 0), (true, 1), (true, 2)] # Non-distributed, distributed with 1 and 2 additional procs results = [] for (test_case, method, steps, dt) in test_cases for (is_distributed, num_procs) in configurations performance_metrics = benchmark_test_case(test_case, method, steps, dt, is_distributed, num_procs) push!(results, (test_case=test_case, method=method, is_distributed=is_distributed, num_procs=num_procs, time=performance_metrics.time, memory=performance_metrics.memory)) end end cleanup() report_results(results) end main()