|
| 1 | +defmodule Transport.Test.Transport.Jobs.GTFSImportStopsTest do |
| 2 | + use ExUnit.Case, async: true |
| 3 | + use Oban.Testing, repo: DB.Repo |
| 4 | + import DB.Factory |
| 5 | + import Mox |
| 6 | + import Ecto.Query |
| 7 | + |
| 8 | + setup :verify_on_exit! |
| 9 | + |
| 10 | + setup do |
| 11 | + :ok = Ecto.Adapters.SQL.Sandbox.checkout(DB.Repo) |
| 12 | + end |
| 13 | + |
| 14 | + def data_import_ids do |
| 15 | + DB.Repo.all(from(di in DB.DataImport, select: di.id, order_by: [asc: di.id])) |
| 16 | + end |
| 17 | + |
| 18 | + def setup_mox(zip_filename) do |
| 19 | + # NOTE: it will be possible to reuse common code from Transport.Unzip.S3 in there |
| 20 | + Transport.Unzip.S3.Mock |
| 21 | + |> expect(:get_file_stream, fn file_in_zip, zip_file, bucket -> |
| 22 | + # from payload |
| 23 | + assert zip_file == zip_filename |
| 24 | + # from config |
| 25 | + assert bucket == "transport-data-gouv-fr-resource-history-test" |
| 26 | + |
| 27 | + # stub with a local file |
| 28 | + path = "#{__DIR__}/../../fixture/files/gtfs_import.zip" |
| 29 | + zip_file = Unzip.LocalFile.open(path) |
| 30 | + {:ok, unzip} = Unzip.new(zip_file) |
| 31 | + Unzip.file_stream!(unzip, file_in_zip) |
| 32 | + end) |
| 33 | + end |
| 34 | + |
| 35 | + test "import stops" do |
| 36 | + %{id: dataset_id} = insert(:dataset, %{datagouv_id: "xxx", datagouv_title: "coucou"}) |
| 37 | + %{id: resource_id} = insert(:resource, dataset_id: dataset_id) |
| 38 | + |
| 39 | + %{id: resource_history_id} = |
| 40 | + insert(:resource_history, %{resource_id: resource_id, payload: %{"filename" => "some-file.zip"}}) |
| 41 | + |
| 42 | + setup_mox("some-file.zip") |
| 43 | + assert data_import_ids() == [] |
| 44 | + first_data_import_id = Transport.GTFSImportStops.import_stops_and_remove_previous(resource_history_id) |
| 45 | + assert data_import_ids() == [first_data_import_id] |
| 46 | + |
| 47 | + # subsequent import must remove the previous import for same resource_history_id |
| 48 | + setup_mox("some-file.zip") |
| 49 | + second_data_import_id = Transport.GTFSImportStops.import_stops_and_remove_previous(resource_history_id) |
| 50 | + assert data_import_ids() == [second_data_import_id] |
| 51 | + |
| 52 | + # subsequent import for a new resource_history_id on same resource should also remove previous imports |
| 53 | + %{id: new_resource_history_id} = |
| 54 | + insert(:resource_history, %{resource_id: resource_id, payload: %{"filename" => "some-new-file.zip"}}) |
| 55 | + |
| 56 | + setup_mox("some-new-file.zip") |
| 57 | + third_data_import_id = Transport.GTFSImportStops.import_stops_and_remove_previous(new_resource_history_id) |
| 58 | + assert data_import_ids() == [third_data_import_id] |
| 59 | + |
| 60 | + # other resources should not be impacted by import |
| 61 | + setup_mox("some-other-file.zip") |
| 62 | + %{id: other_dataset_id} = insert(:dataset, %{datagouv_id: "yyy"}) |
| 63 | + %{id: other_resource_id} = insert(:resource, dataset_id: other_dataset_id) |
| 64 | + |
| 65 | + %{id: other_resource_history_id} = |
| 66 | + insert(:resource_history, %{resource_id: other_resource_id, payload: %{"filename" => "some-other-file.zip"}}) |
| 67 | + |
| 68 | + other_data_import_id = Transport.GTFSImportStops.import_stops_and_remove_previous(other_resource_history_id) |
| 69 | + |
| 70 | + assert data_import_ids() == [third_data_import_id, other_data_import_id] |
| 71 | + |
| 72 | + %{id: new_resource_history_id} = |
| 73 | + insert(:resource_history, %{resource_id: resource_id, payload: %{"filename" => "some-new-file.zip"}}) |
| 74 | + |
| 75 | + setup_mox("some-new-file.zip") |
| 76 | + fourth_data_import_id = Transport.GTFSImportStops.import_stops_and_remove_previous(new_resource_history_id) |
| 77 | + assert data_import_ids() == [other_data_import_id, fourth_data_import_id] |
| 78 | + end |
| 79 | +end |
0 commit comments