Skip to content

Commit f22a2ef

Browse files
committed
Add tests for info, temoving Char
1 parent 7ba0b5d commit f22a2ef

File tree

3 files changed

+28
-17
lines changed

3 files changed

+28
-17
lines changed

lib/tokenizers/decode_stream.ex

Lines changed: 6 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -28,30 +28,20 @@ defmodule Tokenizers.DecodeStream do
2828
Returns `{:error, reason}` if an error occurs during decoding.
2929
"""
3030
def step(%__MODULE__{} = decode_stream, tokenizer, id) when is_integer(id) do
31-
case Tokenizers.Native.decoder_stream_step(decode_stream, tokenizer, id) do
32-
{:ok, result} -> {:ok, result}
33-
{:error, reason} -> {:error, reason}
34-
end
31+
Tokenizers.Native.decoder_stream_step(decode_stream, tokenizer, id)
3532
end
3633

3734
@doc """
3835
Returns information about the decode stream state.
3936
"""
40-
def info(%__MODULE__{} = decode_stream) do
41-
Tokenizers.Native.decoder_stream_info(decode_stream)
42-
end
37+
defdelegate info(decode_stream), to: Tokenizers.Native, as: :decoder_stream_info
4338

4439
defimpl Inspect do
45-
def inspect(decode_stream, _opts) do
46-
info = Tokenizers.DecodeStream.info(decode_stream)
47-
"#Tokenizers.DecodeStream<#{inspect(info)}>"
48-
end
49-
end
40+
import Inspect.Algebra
41+
alias Tokenizers.DecodeStream
5042

51-
defimpl String.Chars do
52-
def to_string(decode_stream) do
53-
info = Tokenizers.DecodeStream.info(decode_stream)
54-
"#Tokenizers.DecodeStream<#{inspect(info)}>"
43+
def inspect(decode_stream, opts) do
44+
"#Tokenizers.DecodeStream<#{to_doc(DecodeStream.info(decode_stream), opts)}>"
5545
end
5646
end
5747
end

lib/tokenizers/native.ex

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -10,7 +10,8 @@ defmodule Tokenizers.Native do
1010
crate: "ex_tokenizers",
1111
version: version,
1212
base_url: "#{github_url}/releases/download/v#{version}",
13-
force_build: System.get_env("TOKENIZERS_BUILD") in ["1", "true"]
13+
# force_build: System.get_env("TOKENIZERS_BUILD") in ["1", "true"]
14+
force_build: true
1415

1516
# Added tokens
1617
def added_token_new(_token, _opts), do: err()

test/tokenizers/decode_stream_test.exs

Lines changed: 20 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -64,4 +64,24 @@ defmodule Tokenizers.DecodeStreamTest do
6464
{:ok, " This"} = Tokenizers.DecodeStream.step(ds, tk, 1)
6565
end
6666
end
67+
68+
describe "DecodeStream info" do
69+
test "skip_special_tokens false" do
70+
assert Tokenizers.DecodeStream.info(Tokenizers.DecodeStream.new(false)) == %{
71+
"skip_special_tokens" => false
72+
}
73+
end
74+
75+
test "skip_special_tokens true" do
76+
assert Tokenizers.DecodeStream.info(Tokenizers.DecodeStream.new(true)) == %{
77+
"skip_special_tokens" => true
78+
}
79+
end
80+
81+
test "default DecodeStream" do
82+
assert Tokenizers.DecodeStream.info(Tokenizers.DecodeStream.new()) == %{
83+
"skip_special_tokens" => false
84+
}
85+
end
86+
end
6787
end

0 commit comments

Comments
 (0)