diff --git a/.github/workflows/Docs.yml b/.github/workflows/Docs.yml index 6323a40ab13..ac2c956c71d 100644 --- a/.github/workflows/Docs.yml +++ b/.github/workflows/Docs.yml @@ -37,7 +37,7 @@ jobs: - name: Install Julia uses: julia-actions/setup-julia@v2 with: - version: '1' + version: '1.10' - uses: julia-actions/cache@v2 - name: Build and deploy docs env: diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index cadea1e2436..c436f842b33 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -25,7 +25,7 @@ jobs: matrix: version: - '1.6' - - '1' # automatically expands to the latest stable 1.x release of Julia + - '1.10' os: - ubuntu-20.04 arch: diff --git a/.github/workflows/compilation-benchmark.yaml b/.github/workflows/compilation-benchmark.yaml index 6df32a3184f..ba6db56d44d 100644 --- a/.github/workflows/compilation-benchmark.yaml +++ b/.github/workflows/compilation-benchmark.yaml @@ -28,7 +28,7 @@ jobs: run: sudo apt-get update && sudo apt-get install -y xorg-dev mesa-utils xvfb libgl1 freeglut3-dev libxrandr-dev libxinerama-dev libxcursor-dev libxi-dev libxext-dev - uses: julia-actions/setup-julia@v2 with: - version: '1' + version: '1.10' arch: x64 - uses: julia-actions/cache@v2 - name: Benchmark diff --git a/.github/workflows/reference_tests.yml b/.github/workflows/reference_tests.yml index 07b60c28037..86003d3b61f 100644 --- a/.github/workflows/reference_tests.yml +++ b/.github/workflows/reference_tests.yml @@ -25,7 +25,7 @@ jobs: matrix: version: - '1.6' - - '1' # automatically expands to the latest stable 1.x release of Julia + - '1.10' os: - ubuntu-20.04 arch: @@ -74,7 +74,7 @@ jobs: matrix: version: - '1.6' - - '1' # automatically expands to the latest stable 1.x release of Julia + - '1.10' os: - ubuntu-20.04 arch: @@ -124,7 +124,7 @@ jobs: matrix: version: - '1.6' - - '1' # automatically expands to the latest stable 1.x release of Julia + - '1.10' os: - ubuntu-20.04 arch: @@ -172,15 +172,15 @@ jobs: steps: - uses: actions/download-artifact@v4 with: - name: ReferenceImages_WGLMakie_1 + name: ReferenceImages_WGLMakie_1.10 path: ./ReferenceImages/WGLMakie - uses: actions/download-artifact@v4 with: - name: ReferenceImages_CairoMakie_1 + name: ReferenceImages_CairoMakie_1.10 path: ./ReferenceImages/CairoMakie - uses: actions/download-artifact@v4 with: - name: ReferenceImages_GLMakie_1 + name: ReferenceImages_GLMakie_1.10 path: ./ReferenceImages/GLMakie - name: Consolidate reference image folders run: | @@ -198,9 +198,10 @@ jobs: # Loop through the directories and concatenate the files, and copy recorded folders for dir in WGLMakie CairoMakie GLMakie; do - # Concatenate scores.tsv and new_files.txt + # Concatenate scores.tsv, new_files.txt and missing_files.txt cat "${baseDir}/${dir}/scores.tsv" >> "./ReferenceImagesCombined/scores.tsv" cat "${baseDir}/${dir}/new_files.txt" >> "./ReferenceImagesCombined/new_files.txt" + cat "${baseDir}/${dir}/missing_files.txt" >> "./ReferenceImagesCombined/missing_files.txt" # Copy recorded folder mkdir -p "./ReferenceImagesCombined/recorded/${dir}/" diff --git a/.github/workflows/relocatability.yml b/.github/workflows/relocatability.yml index 483ecb2e552..d3e62f5c33f 100644 --- a/.github/workflows/relocatability.yml +++ b/.github/workflows/relocatability.yml @@ -26,7 +26,7 @@ jobs: fail-fast: false matrix: version: - - '1.10' # automatically expands to the latest stable 1.x release of Julia + - '1.10' os: - ubuntu-20.04 arch: diff --git a/.github/workflows/rprmakie.yaml b/.github/workflows/rprmakie.yaml index d52cc61a470..f8b5ae8c842 100644 --- a/.github/workflows/rprmakie.yaml +++ b/.github/workflows/rprmakie.yaml @@ -24,7 +24,7 @@ jobs: fail-fast: false matrix: version: - - '1' # automatically expands to the latest stable 1.x release of Julia + - '1.10' os: - ubuntu-20.04 arch: diff --git a/CairoMakie/test/runtests.jl b/CairoMakie/test/runtests.jl index 2b0528ca431..b131a1dff32 100644 --- a/CairoMakie/test/runtests.jl +++ b/CairoMakie/test/runtests.jl @@ -193,7 +193,7 @@ functions = [:volume, :volume!, :uv_mesh] CairoMakie.activate!(type = "png", px_per_unit = 1) ReferenceTests.mark_broken_tests(excludes, functions=functions) recorded_files, recording_dir = @include_reference_tests CairoMakie "refimages.jl" - missing_images, scores = ReferenceTests.record_comparison(recording_dir) + missing_images, scores = ReferenceTests.record_comparison(recording_dir, "CairoMakie") ReferenceTests.test_comparison(scores; threshold = 0.05) end diff --git a/GLMakie/test/runtests.jl b/GLMakie/test/runtests.jl index 9bb97615171..2aa9d1d5765 100644 --- a/GLMakie/test/runtests.jl +++ b/GLMakie/test/runtests.jl @@ -30,7 +30,7 @@ include("unit_tests.jl") @testset "refimages" begin ReferenceTests.mark_broken_tests() recorded_files, recording_dir = @include_reference_tests GLMakie "refimages.jl" joinpath(@__DIR__, "glmakie_refimages.jl") - missing_images, scores = ReferenceTests.record_comparison(recording_dir) + missing_images, scores = ReferenceTests.record_comparison(recording_dir, "GLMakie") ReferenceTests.test_comparison(scores; threshold = 0.05) end diff --git a/ReferenceTests/src/database.jl b/ReferenceTests/src/database.jl index 49c5bafdbf0..6bb2b835fd7 100644 --- a/ReferenceTests/src/database.jl +++ b/ReferenceTests/src/database.jl @@ -17,6 +17,7 @@ const RECORDING_DIR = Base.RefValue{String}() const SKIP_TITLES = Set{String}() const SKIP_FUNCTIONS = Set{Symbol}() const COUNTER = Ref(0) +const SKIPPED_NAMES = Set{String}() # names skipped due to title exclusion or function exclusion """ @reference_test(name, code) @@ -32,6 +33,7 @@ macro reference_test(name, code) @testset $(title) begin if $skip @test_broken false + mark_skipped!($title) else t1 = time() if $title in $REGISTERED_TESTS @@ -84,10 +86,13 @@ end function mark_broken_tests(title_excludes = []; functions=[]) empty!(SKIP_TITLES) empty!(SKIP_FUNCTIONS) + empty!(SKIPPED_NAMES) union!(SKIP_TITLES, title_excludes) union!(SKIP_FUNCTIONS, functions) end +mark_skipped!(name::String) = push!(SKIPPED_NAMES, name) + macro include_reference_tests(backend::Symbol, path, paths...) toplevel_folder = dirname(string(__source__.file)) return esc(quote diff --git a/ReferenceTests/src/runtests.jl b/ReferenceTests/src/runtests.jl index cb0d0672322..52a689289d0 100644 --- a/ReferenceTests/src/runtests.jl +++ b/ReferenceTests/src/runtests.jl @@ -81,7 +81,7 @@ function get_all_relative_filepaths_recursively(dir) end end -function record_comparison(base_folder::String; record_folder_name="recorded", tag=last_major_version()) +function record_comparison(base_folder::String, backend::String; record_folder_name="recorded", tag=last_major_version()) record_folder = joinpath(base_folder, record_folder_name) @info "Downloading reference images" reference_folder = download_refimages(tag) @@ -99,6 +99,19 @@ function record_comparison(base_folder::String; record_folder_name="recorded", t println(file, path) end end + + open(joinpath(base_folder, "missing_files.txt"), "w") do file + backend_ref_dir = joinpath(reference_folder, backend) + recorded_paths = mapreduce(vcat, walkdir(backend_ref_dir)) do (root, dirs, files) + relpath.(joinpath.(root, files), reference_folder) + end + skipped = Set([joinpath(backend, "$name.png") for name in SKIPPED_NAMES]) + missing_recordings = setdiff(Set(recorded_paths), Set(testimage_paths), skipped) + + for path in missing_recordings + println(file, path) + end + end open(joinpath(base_folder, "scores.tsv"), "w") do file paths_scores = sort(collect(pairs(scores)), by = last, rev = true) @@ -120,7 +133,12 @@ function test_comparison(scores; threshold) end end -function compare(relative_test_paths::Vector{String}, reference_dir::String, record_dir; o_refdir=reference_dir, missing_refimages=String[], scores=Dict{String,Float64}()) +function compare( + relative_test_paths::Vector{String}, reference_dir::String, record_dir; + o_refdir = reference_dir, missing_refimages = String[], + scores = Dict{String,Float64}() + ) + for relative_test_path in relative_test_paths ref_path = joinpath(reference_dir, relative_test_path) rec_path = joinpath(record_dir, relative_test_path) diff --git a/ReferenceUpdater/Project.toml b/ReferenceUpdater/Project.toml index a8d2097449d..59313979845 100644 --- a/ReferenceUpdater/Project.toml +++ b/ReferenceUpdater/Project.toml @@ -4,6 +4,7 @@ authors = ["Julius Krumbiegel "] version = "0.1.0" [deps] +Dates = "ade2ca70-3891-5945-98fb-dc099432e06a" Downloads = "f43a241f-c20a-4ad4-852c-f6b1247861c6" HTTP = "cd3eb016-35fb-5094-929b-558a96fad6f3" JSON3 = "0f8b85d8-7281-11e9-16c2-39a750bddbf1" diff --git a/ReferenceUpdater/src/ReferenceUpdater.jl b/ReferenceUpdater/src/ReferenceUpdater.jl index 6acf2060e64..bca0ba7513a 100644 --- a/ReferenceUpdater/src/ReferenceUpdater.jl +++ b/ReferenceUpdater/src/ReferenceUpdater.jl @@ -8,6 +8,7 @@ import JSON3 import ZipFile import REPL import TOML +using Dates function github_token() get(ENV, "GITHUB_TOKEN") do @@ -29,4 +30,4 @@ function __init__() atexit(wipe_cache!) end -end \ No newline at end of file +end diff --git a/ReferenceUpdater/src/image_download.jl b/ReferenceUpdater/src/image_download.jl index 063edf9860b..bc3098a0faa 100644 --- a/ReferenceUpdater/src/image_download.jl +++ b/ReferenceUpdater/src/image_download.jl @@ -18,3 +18,13 @@ function upload_reference_images(path=basedir("recorded"), tag=last_major_versio upload_release("MakieOrg", "Makie.jl", github_token(), tag, tarfile) end end + +function download_refimages(tag=last_major_version()) + url = "https://github.com/MakieOrg/Makie.jl/releases/download/$(tag)/reference_images.tar" + images_tar = Downloads.download(url) + images = tempname() + isdir(images) && rm(images, recursive=true, force=true) + Tar.extract(images_tar, images) + rm(images_tar) + return images +end diff --git a/ReferenceUpdater/src/local_server.jl b/ReferenceUpdater/src/local_server.jl index 44fb0dbaf3d..4496911129d 100644 --- a/ReferenceUpdater/src/local_server.jl +++ b/ReferenceUpdater/src/local_server.jl @@ -12,24 +12,27 @@ function serve_update_page_from_dir(folder) folder = realpath(folder) @assert isdir(folder) "$folder is not a valid directory." - split_scores(folder) - + group_scores(folder) + group_files(folder, "new_files.txt", "new_files_grouped.txt") + group_files(folder, "missing_files.txt", "missing_files_grouped.txt") + router = HTTP.Router() function receive_update(req) data = JSON3.read(req.body) - images = data["images"] + images_to_update = data["images_to_update"] + images_to_delete = data["images_to_delete"] tag = data["tag"] - tempdir = tempname() recorded_folder = joinpath(folder, "recorded") - reference_folder = joinpath(folder, "reference") - @info "Copying reference folder to \"$tempdir\"" - cp(reference_folder, tempdir) + @info "Downloading latest reference folder for $tag" + tempdir = download_refimages(tag) + + @info "Updating files in $tempdir" - for image in images - @info "Overwriting \"$image\" in new reference folder" + for image in images_to_update + @info "Overwriting or adding $image" copy_filepath = joinpath(tempdir, image) copy_dir = splitdir(copy_filepath)[1] # make the path in case a new refimage is in a not yet existing folder @@ -37,6 +40,16 @@ function serve_update_page_from_dir(folder) cp(joinpath(recorded_folder, image), copy_filepath, force = true) end + for image in images_to_delete + @info "Deleting $image" + copy_filepath = joinpath(tempdir, image) + if isfile(copy_filepath) + rm(copy_filepath, recursive = true) + else + @warn "Cannot delete $image - it has already been deleted." + end + end + @info "Uploading updated reference images under tag \"$tag\"" try upload_reference_images(tempdir, tag) @@ -107,7 +120,7 @@ function serve_update_page(; commit = nothing, pr = nothing) checkruns = filter(checksinfo["check_runs"]) do checkrun name = checkrun["name"] id = checkrun["id"] - + if name == "Merge artifacts" job = JSON3.read(authget("https://api.github.com/repos/MakieOrg/Makie.jl/actions/jobs/$(id)").body) run = JSON3.read(authget(job["run_url"]).body) @@ -121,14 +134,20 @@ function serve_update_page(; commit = nothing, pr = nothing) return false end end + if isempty(checkruns) error("\"Merge artifacts\" run is not available.") end if length(checkruns) > 1 - error("Found multiple checkruns for \"Merge artifacts\", this is unexpected.") - end - + datetimes = map(checkruns) do checkrun + DateTime(checkrun["completed_at"], dateformat"y-m-dTH:M:SZ") + end + datetime, idx = findmax(datetimes) + @warn("Found multiple checkruns for \"Merge artifacts\". Using latest with timestamp: $datetime") + check = checkruns[idx] + else check = only(checkruns) + end job = JSON3.read(authget("https://api.github.com/repos/MakieOrg/Makie.jl/actions/jobs/$(check["id"])").body) run = JSON3.read(authget(job["run_url"]).body) @@ -185,7 +204,7 @@ function unzip(file, exdir = "") end -function split_scores(path) +function group_scores(path) isfile(joinpath(path, "scores_table.tsv")) && return # Load all refimg scores into a Dict @@ -210,7 +229,7 @@ function split_scores(path) end end end - + # sort by max score across all backends so problem come first data_vec = collect(pairs(data)) sort!(data_vec, by = x -> maximum(x[2]), rev = true) @@ -231,3 +250,42 @@ function split_scores(path) return end + +function group_files(path, input_filename, output_filename) + isfile(joinpath(path, output_filename)) && return + + # Group files in new_files/missing_files into a table like layout: + # GLMakie CairoMakie WGLMakie + + # collect refimg names and which backends they exist for + data = Dict{String, Vector{Bool}}() + open(joinpath(path, input_filename), "r") do file + for filepath in eachline(file) + pieces = split(filepath, '/') + backend = pieces[1] + if !(backend in ("GLMakie", "CairoMakie", "WGLMakie")) + error("Failed to parse backend in \"$line\", got \"$backend\"") + end + + filename = join(pieces[2:end], '/') + exists = get!(data, filename, [false, false, false]) + + exists[1] |= backend == "GLMakie" + exists[2] |= backend == "CairoMakie" + exists[3] |= backend == "WGLMakie" + end + end + + # generate new structed file + open(joinpath(path, output_filename), "w") do file + for (filename, valid) in data + println(file, + ifelse(valid[1], "GLMakie/$filename", "INVALID"), '\t', + ifelse(valid[2], "CairoMakie/$filename", "INVALID"), '\t', + ifelse(valid[3], "WGLMakie/$filename", "INVALID") + ) + end + end + + return +end diff --git a/ReferenceUpdater/src/reference_images.html b/ReferenceUpdater/src/reference_images.html index 39181a9aef3..fc58c9739a8 100644 --- a/ReferenceUpdater/src/reference_images.html +++ b/ReferenceUpdater/src/reference_images.html @@ -34,9 +34,31 @@

Reference images

+

+ +

New images without references

+ The selected CI run produced an image for which no reference image exists. + Selected images will be added as new reference images. +

+ Toggle All
+ +

Old reference images without recordings

+ The selected CI run did not produce an image, but a reference image exists. + This implies that a reference test was deleted or renamed. + Selected images will be deleted from the reference images. +

+ Toggle All
+
+

Images with references

+ This is the normal case where the selected CI run produced an image and the reference image exists. + Each row shows one image per backend from the same reference image test, which can be compared with its reference image. + Rows are sorted based on the maximum row score (bigger = more different). + Red cells fail CI (assuming the thresholds are up to date), yellow cells may but likely don't have signficant visual difference and gray cells are visually equivalent. +

@@ -111,53 +133,137 @@

Images with references

}) }) - fetch('new_files.txt') + fetch('new_files_grouped.txt') .then(response => response.text()) .then(data => { di = document.querySelector("#new-images-list") - data.split(/\r?\n/).forEach(path => { - if (path == ""){ + data.split(/\r?\n/).forEach(line => { + if (line == ""){ return } + parts = line.split('\t') - div = document.createElement("div") - di.append(div) - div.innerHTML = ` - - ${path} - ` - if (path.endsWith(".png")){ - div.innerHTML += `` - } else if (path.endsWith(".mp4")){ - div.innerHTML += `