Skip to content

Commit

Permalink
Merge pull request #2149 from nulib/deploy/staging
Browse files Browse the repository at this point in the history
Deploy v 1.6.0 to production
  • Loading branch information
kdid authored Apr 16, 2021
2 parents 067071a + f69a9b6 commit 8e357f8
Show file tree
Hide file tree
Showing 4 changed files with 45 additions and 30 deletions.
22 changes: 18 additions & 4 deletions lib/meadow/data/preservation_check_writer.ex
Original file line number Diff line number Diff line change
Expand Up @@ -110,8 +110,8 @@ defmodule Meadow.Data.PreservationCheckWriter do
file_set.id,
file_set.metadata.label,
file_set.role.id,
Map.get(file_set.metadata.digests, "sha256"),
Map.get(file_set.metadata.digests, "sha1"),
get_if_map(result.digests, "sha256"),
get_if_map(result.digests, "sha1"),
file_set.metadata.location,
Map.fetch!(result, :preservation),
FileSets.pyramid_uri_for(file_set),
Expand All @@ -120,19 +120,33 @@ defmodule Meadow.Data.PreservationCheckWriter do
end
end

defp get_if_map(map, key) when is_map(map) do
Map.get(map, key, "MISSING")
end

defp get_if_map(_, _), do: "MISSING"

defp check_files(file_set) do
%{
:digests => file_set.metadata |> Map.get(:digests),
:preservation => validate_preservation_file(file_set.metadata.location),
:pyramid => validate_pyramid_present(file_set)
}
end

defp record_invalid_file_set(%{:preservation => false, :pyramid => _}, cache_key),
defp record_invalid_file_set(%{preservation: false}, cache_key),
do: record_invalid_file_set(cache_key)

defp record_invalid_file_set(%{:pyramid => false, :preservation => _}, cache_key),
defp record_invalid_file_set(%{pyramid: false}, cache_key),
do: record_invalid_file_set(cache_key)

defp record_invalid_file_set(%{digests: digests}, cache_key) do
case digests do
%{"sha256" => <<_sha256::binary-size(64)>>, "sha1" => <<_sha1::binary-size(40)>>} -> :noop
_ -> record_invalid_file_set(cache_key)
end
end

defp record_invalid_file_set(_, _cache_key), do: :noop

defp record_invalid_file_set(cache_key) do
Expand Down
2 changes: 1 addition & 1 deletion mix.exs
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
defmodule Meadow.MixProject do
use Mix.Project

@app_version "1.5.2"
@app_version "1.6.0"

def project do
[
Expand Down
7 changes: 5 additions & 2 deletions priv/elasticsearch/meadow.json
Original file line number Diff line number Diff line change
Expand Up @@ -60,7 +60,10 @@
{
"ids": {
"path_match": "*.id",
"mapping": { "type": "keyword" }
"mapping": {
"type": "keyword",
"copy_to": ["all_controlled_terms"]
}
}
},
{
Expand Down Expand Up @@ -97,7 +100,7 @@
"fields": {
"keyword": { "type": "keyword" }
},
"copy_to": ["full_text"]
"copy_to": ["full_text", "all_controlled_labels"]
}
}
},
Expand Down
44 changes: 21 additions & 23 deletions test/meadow/data/preservation_check_writer_test.exs
Original file line number Diff line number Diff line change
Expand Up @@ -64,22 +64,21 @@ defmodule Meadow.Data.PreservationCheckWriterTest do
end

describe "generate_report/1" do
setup do
setup %{file_set_1: file_set_1, file_set_2: file_set_2} do
on_exit(fn ->
delete_object(@preservation_check_bucket, @report_filename)
delete_object(@pyramid_bucket, Pairtree.pyramid_path(file_set_1.id))
delete_object(@pyramid_bucket, Pairtree.pyramid_path(file_set_2.id))
end)

:ok
end

@describetag s3: [@preservation_fixture]
test "generates and uploads a preservation check report", %{
file_set_1: file_set_1,
file_set_2: file_set_2
} do
on_exit(fn ->
delete_object(@pyramid_bucket, Pairtree.pyramid_path(file_set_1.id))
delete_object(@pyramid_bucket, Pairtree.pyramid_path(file_set_2.id))
end)

CreatePyramidTiff.process(%{file_set_id: file_set_1.id}, %{})
CreatePyramidTiff.process(%{file_set_id: file_set_2.id}, %{})

Expand All @@ -94,11 +93,6 @@ defmodule Meadow.Data.PreservationCheckWriterTest do
file_set_1: file_set_1,
file_set_2: file_set_2
} do
on_exit(fn ->
delete_object(@pyramid_bucket, Pairtree.pyramid_path(file_set_1.id))
delete_object(@pyramid_bucket, Pairtree.pyramid_path(file_set_2.id))
end)

CreatePyramidTiff.process(%{file_set_id: file_set_1.id}, %{})
CreatePyramidTiff.process(%{file_set_id: file_set_2.id}, %{})

Expand All @@ -120,11 +114,6 @@ defmodule Meadow.Data.PreservationCheckWriterTest do
file_set_1: file_set_1,
file_set_2: file_set_2
} do
on_exit(fn ->
delete_object(@pyramid_bucket, Pairtree.pyramid_path(file_set_1.id))
delete_object(@pyramid_bucket, Pairtree.pyramid_path(file_set_2.id))
end)

CreatePyramidTiff.process(%{file_set_id: file_set_1.id}, %{})
CreatePyramidTiff.process(%{file_set_id: file_set_2.id}, %{})

Expand All @@ -142,20 +131,29 @@ defmodule Meadow.Data.PreservationCheckWriterTest do

@describetag s3: [@preservation_fixture]
test "records an error if pyramid file not found in expected location", %{
file_set_1: file_set_1,
file_set_2: file_set_2
file_set_1: file_set_1
} do
on_exit(fn ->
delete_object(@pyramid_bucket, Pairtree.pyramid_path(file_set_1.id))
delete_object(@pyramid_bucket, Pairtree.pyramid_path(file_set_2.id))
end)

CreatePyramidTiff.process(%{file_set_id: file_set_1.id}, %{})

assert {:ok, "s3://test-preservation-checks/pres_check.csv", 1} =
PreservationCheckWriter.generate_report(@report_filename)

assert object_exists?(@preservation_check_bucket, @report_filename)
end

@describetag s3: [@preservation_fixture]
test "records an error if file set digests are missing", %{
file_set_1: file_set_1,
file_set_2: file_set_2
} do
FileSets.update_file_set(file_set_1, %{metadata: %{digests: nil}})
FileSets.update_file_set(file_set_2, %{metadata: %{digests: %{"sha256" => "badsha"}}})

CreatePyramidTiff.process(%{file_set_id: file_set_1.id}, %{})
CreatePyramidTiff.process(%{file_set_id: file_set_2.id}, %{})

assert {:ok, "s3://test-preservation-checks/pres_check.csv", 2} =
PreservationCheckWriter.generate_report(@report_filename)
end
end
end

0 comments on commit 8e357f8

Please sign in to comment.