Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
20 changes: 20 additions & 0 deletions lib/crawly_ui/queries/log.ex
Original file line number Diff line number Diff line change
Expand Up @@ -30,6 +30,26 @@ defmodule CrawlyUI.Queries.Log do
|> Repo.all()
end

def paginate(%Log{id: id, inserted_at: inserted_at}, job_id, opts \\ [items_per_page: 5, filter: "all"]) do
maybe_extra_filter =
case opts[:filter] do
"all" -> ""
f -> "and category = ''#{f}''"
end

%{columns: columns, rows: rows} =
CrawlyUI.Repo.query!("""
select * from logs
where job_id = #{job_id}
and row (id, inserted_at) < (#{id}, '#{inserted_at}'::timestamp)
#{maybe_extra_filter}
order by inserted_at desc, id desc
fetch first #{opts[:items_per_page]} rows only
""")

Enum.map(rows, fn row -> CrawlyUI.Repo.load(Log, Enum.zip(columns, row)) end)
end

defp maybe_apply_category_filters(query, "all"), do: query

defp maybe_apply_category_filters(query, filter) do
Expand Down
2 changes: 1 addition & 1 deletion lib/crawly_ui_web/live/available_spider_live.ex
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
defmodule CrawlyUIWeb.AvailableSpiderLive do
@moduledoc """
@moduledoc """
Live view module to display a list of coded (opposite to visual) spiders from all nodes.
"""
use Phoenix.LiveView, layout: {CrawlyUIWeb.LayoutView, "live.html"}
Expand Down
37 changes: 37 additions & 0 deletions test/crawly_ui/queries/log_test.exs
Original file line number Diff line number Diff line change
@@ -0,0 +1,37 @@
defmodule CrawlyUI.Queries.LogTest do
use CrawlyUI.DataCase

import Ecto.Query, warn: false

alias CrawlyUI.Repo

describe "#paginate" do
setup do
[job: insert_job()]
end

test "with inserted_at returns logs next to specified", %{job: job} do
log1 = insert_log(job.id)
log2 = insert_log(job.id)
log3 = insert_log(job.id)
log4 = insert_log(job.id)
log5 = insert_log(job.id)
log6 = insert_log(job.id)
log7 = insert_log(job.id)
log8 = insert_log(job.id)
log9 = insert_log(job.id)
log10 = insert_log(job.id)

assert CrawlyUI.Queries.Log.paginate(log10, job.id) == [log9, log8, log7, log6, log5]
assert CrawlyUI.Queries.Log.paginate(log5, job.id) == [log4, log3, log2, log1]
end

test "it accepts rows argument", %{job: job} do

end

test "it accepts filters argument", %{job: job} do

end
end
end
8 changes: 8 additions & 0 deletions test/crawly_ui_web/helpers/pagination_helpers_test.exs
Original file line number Diff line number Diff line change
Expand Up @@ -54,4 +54,12 @@ defmodule CrawlyUIWeb.PaginationHelpersTest do
defp match_page(pagination, page) do
assert String.contains?(pagination, Integer.to_string(page))
end

test "id_based pagination" do
pagination = PaginationHelpers.id_based(last_id: 1, first_id: 50, previous_page: false, next_page: true)

assert String.contains?(pagination, "?last_id=50")
assert String.contains?(pagination, "?first_id=1")
assert String.contains?(pagination, "?first_id=1")
end
end