summaryrefslogtreecommitdiff
path: root/src/tokenizer_test.go
diff options
context:
space:
mode:
authorJunegunn Choi <junegunn.c@gmail.com>2015-01-02 04:49:30 +0900
committerJunegunn Choi <junegunn.c@gmail.com>2015-01-04 00:37:29 +0900
commitf3177305d5572b26f135fc045481358b4eb1bf69 (patch)
treed59fd9587e44e998581a131875bf45e243df6c6e /src/tokenizer_test.go
parent7ba93d9f8351be64b37c65ae04d594ee261d5d26 (diff)
downloadfzf-f3177305d5572b26f135fc045481358b4eb1bf69.tar.gz
Rewrite fzf in Go
Diffstat (limited to 'src/tokenizer_test.go')
-rw-r--r--src/tokenizer_test.go97
1 files changed, 97 insertions, 0 deletions
diff --git a/src/tokenizer_test.go b/src/tokenizer_test.go
new file mode 100644
index 00000000..ed77efe9
--- /dev/null
+++ b/src/tokenizer_test.go
@@ -0,0 +1,97 @@
+package fzf
+
+import "testing"
+
+func TestParseRange(t *testing.T) {
+ {
+ i := ".."
+ r, _ := ParseRange(&i)
+ if r.begin != RANGE_ELLIPSIS || r.end != RANGE_ELLIPSIS {
+ t.Errorf("%s", r)
+ }
+ }
+ {
+ i := "3.."
+ r, _ := ParseRange(&i)
+ if r.begin != 3 || r.end != RANGE_ELLIPSIS {
+ t.Errorf("%s", r)
+ }
+ }
+ {
+ i := "3..5"
+ r, _ := ParseRange(&i)
+ if r.begin != 3 || r.end != 5 {
+ t.Errorf("%s", r)
+ }
+ }
+ {
+ i := "-3..-5"
+ r, _ := ParseRange(&i)
+ if r.begin != -3 || r.end != -5 {
+ t.Errorf("%s", r)
+ }
+ }
+ {
+ i := "3"
+ r, _ := ParseRange(&i)
+ if r.begin != 3 || r.end != 3 {
+ t.Errorf("%s", r)
+ }
+ }
+}
+
+func TestTokenize(t *testing.T) {
+ // AWK-style
+ input := " abc: def: ghi "
+ tokens := Tokenize(&input, nil)
+ if *tokens[0].text != "abc: " || tokens[0].prefixLength != 2 {
+ t.Errorf("%s", tokens)
+ }
+
+ // With delimiter
+ tokens = Tokenize(&input, delimiterRegexp(":"))
+ if *tokens[0].text != " abc:" || tokens[0].prefixLength != 0 {
+ t.Errorf("%s", tokens)
+ }
+}
+
+func TestTransform(t *testing.T) {
+ input := " abc: def: ghi: jkl"
+ {
+ tokens := Tokenize(&input, nil)
+ {
+ ranges := splitNth("1,2,3")
+ tx := Transform(tokens, ranges)
+ if *tx.whole != "abc: def: ghi: " {
+ t.Errorf("%s", *tx)
+ }
+ }
+ {
+ ranges := splitNth("1..2,3,2..,1")
+ tx := Transform(tokens, ranges)
+ if *tx.whole != "abc: def: ghi: def: ghi: jklabc: " ||
+ len(tx.parts) != 4 ||
+ *tx.parts[0].text != "abc: def: " || tx.parts[0].prefixLength != 2 ||
+ *tx.parts[1].text != "ghi: " || tx.parts[1].prefixLength != 14 ||
+ *tx.parts[2].text != "def: ghi: jkl" || tx.parts[2].prefixLength != 8 ||
+ *tx.parts[3].text != "abc: " || tx.parts[3].prefixLength != 2 {
+ t.Errorf("%s", *tx)
+ }
+ }
+ }
+ {
+ tokens := Tokenize(&input, delimiterRegexp(":"))
+ {
+ ranges := splitNth("1..2,3,2..,1")
+ tx := Transform(tokens, ranges)
+ if *tx.whole != " abc: def: ghi: def: ghi: jkl abc:" ||
+ len(tx.parts) != 4 ||
+ *tx.parts[0].text != " abc: def:" || tx.parts[0].prefixLength != 0 ||
+ *tx.parts[1].text != " ghi:" || tx.parts[1].prefixLength != 12 ||
+ *tx.parts[2].text != " def: ghi: jkl" || tx.parts[2].prefixLength != 6 ||
+ *tx.parts[3].text != " abc:" || tx.parts[3].prefixLength != 0 {
+ t.Errorf("%s", *tx)
+ }
+ }
+ }
+}