mirror of
https://github.com/elastic/logstash.git
synced 2025-04-23 22:27:21 -04:00
parent
c09481897c
commit
42830bd09b
1 changed files with 18 additions and 25 deletions
|
@ -4,35 +4,28 @@ require "logstash/util/buftok"
|
|||
|
||||
describe FileWatch::BufferedTokenizer do
|
||||
|
||||
context "test" do
|
||||
subject { FileWatch::BufferedTokenizer.new }
|
||||
|
||||
it "should tokenize a single token" do
|
||||
t = FileWatch::BufferedTokenizer.new
|
||||
expect(t.extract("foo\n")).to eq(["foo"])
|
||||
end
|
||||
it "should tokenize a single token" do
|
||||
expect(subject.extract("foo\n")).to eq(["foo"])
|
||||
end
|
||||
|
||||
it "should merge multiple token" do
|
||||
t = FileWatch::BufferedTokenizer.new
|
||||
expect(t.extract("foo")).to eq([])
|
||||
expect(t.extract("bar\n")).to eq(["foobar"])
|
||||
end
|
||||
it "should merge multiple token" do
|
||||
expect(subject.extract("foo")).to eq([])
|
||||
expect(subject.extract("bar\n")).to eq(["foobar"])
|
||||
end
|
||||
|
||||
it "should tokenize multiple token" do
|
||||
t = FileWatch::BufferedTokenizer.new
|
||||
expect(t.extract("foo\nbar\n")).to eq(["foo", "bar"])
|
||||
end
|
||||
it "should tokenize multiple token" do
|
||||
expect(subject.extract("foo\nbar\n")).to eq(["foo", "bar"])
|
||||
end
|
||||
|
||||
it "should ignore empty payload" do
|
||||
t = FileWatch::BufferedTokenizer.new
|
||||
expect(t.extract("")).to eq([])
|
||||
expect(t.extract("foo\nbar")).to eq(["foo"])
|
||||
end
|
||||
|
||||
it "should tokenize empty payload with newline" do
|
||||
t = FileWatch::BufferedTokenizer.new
|
||||
expect(t.extract("\n")).to eq([""])
|
||||
expect(t.extract("\n\n\n")).to eq(["", "", ""])
|
||||
end
|
||||
it "should ignore empty payload" do
|
||||
expect(subject.extract("")).to eq([])
|
||||
expect(subject.extract("foo\nbar")).to eq(["foo"])
|
||||
end
|
||||
|
||||
it "should tokenize empty payload with newline" do
|
||||
expect(subject.extract("\n")).to eq([""])
|
||||
expect(subject.extract("\n\n\n")).to eq(["", "", ""])
|
||||
end
|
||||
end
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue