From 3ce85bce7ddcc33e2c1fa146e51c620449f822f6 Mon Sep 17 00:00:00 2001 From: maslowalex Date: Mon, 1 Mar 2021 16:52:48 +0200 Subject: [PATCH] Initial --- lib/crawly_ui/queries/log.ex | 20 ++++++++++ .../live/available_spider_live.ex | 2 +- test/crawly_ui/queries/log_test.exs | 37 +++++++++++++++++++ .../helpers/pagination_helpers_test.exs | 8 ++++ 4 files changed, 66 insertions(+), 1 deletion(-) create mode 100644 test/crawly_ui/queries/log_test.exs diff --git a/lib/crawly_ui/queries/log.ex b/lib/crawly_ui/queries/log.ex index c46398f..d06ca63 100644 --- a/lib/crawly_ui/queries/log.ex +++ b/lib/crawly_ui/queries/log.ex @@ -30,6 +30,26 @@ defmodule CrawlyUI.Queries.Log do |> Repo.all() end + def paginate(%Log{id: id, inserted_at: inserted_at}, job_id, opts \\ [items_per_page: 5, filter: "all"]) do + maybe_extra_filter = + case opts[:filter] do + "all" -> "" + f -> "and category = ''#{f}''" + end + + %{columns: columns, rows: rows} = + CrawlyUI.Repo.query!(""" + select * from logs + where job_id = #{job_id} + and row (id, inserted_at) < (#{id}, '#{inserted_at}'::timestamp) + #{maybe_extra_filter} + order by inserted_at desc, id desc + fetch first #{opts[:items_per_page]} rows only + """) + + Enum.map(rows, fn row -> CrawlyUI.Repo.load(Log, Enum.zip(columns, row)) end) + end + defp maybe_apply_category_filters(query, "all"), do: query defp maybe_apply_category_filters(query, filter) do diff --git a/lib/crawly_ui_web/live/available_spider_live.ex b/lib/crawly_ui_web/live/available_spider_live.ex index 6cbf130..8b8b76c 100644 --- a/lib/crawly_ui_web/live/available_spider_live.ex +++ b/lib/crawly_ui_web/live/available_spider_live.ex @@ -1,5 +1,5 @@ defmodule CrawlyUIWeb.AvailableSpiderLive do - @moduledoc """ + @moduledoc """ Live view module to display a list of coded (opposite to visual) spiders from all nodes. """ use Phoenix.LiveView, layout: {CrawlyUIWeb.LayoutView, "live.html"} diff --git a/test/crawly_ui/queries/log_test.exs b/test/crawly_ui/queries/log_test.exs new file mode 100644 index 0000000..8374e33 --- /dev/null +++ b/test/crawly_ui/queries/log_test.exs @@ -0,0 +1,37 @@ +defmodule CrawlyUI.Queries.LogTest do + use CrawlyUI.DataCase + + import Ecto.Query, warn: false + + alias CrawlyUI.Repo + + describe "#paginate" do + setup do + [job: insert_job()] + end + + test "with inserted_at returns logs next to specified", %{job: job} do + log1 = insert_log(job.id) + log2 = insert_log(job.id) + log3 = insert_log(job.id) + log4 = insert_log(job.id) + log5 = insert_log(job.id) + log6 = insert_log(job.id) + log7 = insert_log(job.id) + log8 = insert_log(job.id) + log9 = insert_log(job.id) + log10 = insert_log(job.id) + + assert CrawlyUI.Queries.Log.paginate(log10, job.id) == [log9, log8, log7, log6, log5] + assert CrawlyUI.Queries.Log.paginate(log5, job.id) == [log4, log3, log2, log1] + end + + test "it accepts rows argument", %{job: job} do + + end + + test "it accepts filters argument", %{job: job} do + + end + end +end diff --git a/test/crawly_ui_web/helpers/pagination_helpers_test.exs b/test/crawly_ui_web/helpers/pagination_helpers_test.exs index ea5ea9e..d46fc02 100644 --- a/test/crawly_ui_web/helpers/pagination_helpers_test.exs +++ b/test/crawly_ui_web/helpers/pagination_helpers_test.exs @@ -54,4 +54,12 @@ defmodule CrawlyUIWeb.PaginationHelpersTest do defp match_page(pagination, page) do assert String.contains?(pagination, Integer.to_string(page)) end + + test "id_based pagination" do + pagination = PaginationHelpers.id_based(last_id: 1, first_id: 50, previous_page: false, next_page: true) + + assert String.contains?(pagination, "?last_id=50") + assert String.contains?(pagination, "?first_id=1") + assert String.contains?(pagination, "?first_id=1") + end end