mirror of
https://github.com/antonmedv/fx
synced 2024-11-11 07:10:28 +00:00
114 lines
3.1 KiB
Go
114 lines
3.1 KiB
Go
|
/*
|
||
|
Copyright 2012 Google Inc. All Rights Reserved.
|
||
|
|
||
|
Licensed under the Apache License, Version 2.0 (the "License");
|
||
|
you may not use this file except in compliance with the License.
|
||
|
You may obtain a copy of the License at
|
||
|
|
||
|
http://www.apache.org/licenses/LICENSE-2.0
|
||
|
|
||
|
Unless required by applicable law or agreed to in writing, software
|
||
|
distributed under the License is distributed on an "AS IS" BASIS,
|
||
|
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||
|
See the License for the specific language governing permissions and
|
||
|
limitations under the License.
|
||
|
*/
|
||
|
|
||
|
package shlex
|
||
|
|
||
|
import (
|
||
|
"strings"
|
||
|
"testing"
|
||
|
|
||
|
"github.com/stretchr/testify/require"
|
||
|
)
|
||
|
|
||
|
var (
|
||
|
// one two "three four" "five \"six\"" seven#eight # nine # ten
|
||
|
// eleven 'twelve\'
|
||
|
testString = "one two \"three four\" \"five \\\"six\\\"\" seven#eight # nine # ten\n eleven 'twelve\\' thirteen=13 fourteen/14"
|
||
|
)
|
||
|
|
||
|
func TestClassifier(t *testing.T) {
|
||
|
classifier := newDefaultClassifier()
|
||
|
tests := map[rune]runeTokenClass{
|
||
|
' ': spaceRuneClass,
|
||
|
'"': escapingQuoteRuneClass,
|
||
|
'\'': nonEscapingQuoteRuneClass,
|
||
|
'#': commentRuneClass}
|
||
|
for runeChar, want := range tests {
|
||
|
got := classifier.ClassifyRune(runeChar)
|
||
|
if got != want {
|
||
|
t.Errorf("ClassifyRune(%v) -> %v. Want: %v", runeChar, got, want)
|
||
|
}
|
||
|
}
|
||
|
}
|
||
|
|
||
|
func TestTokenizer(t *testing.T) {
|
||
|
testInput := strings.NewReader(testString)
|
||
|
expectedTokens := []*Token{
|
||
|
&Token{WordToken, "one"},
|
||
|
&Token{WordToken, "two"},
|
||
|
&Token{WordToken, "three four"},
|
||
|
&Token{WordToken, "five \"six\""},
|
||
|
&Token{WordToken, "seven#eight"},
|
||
|
&Token{CommentToken, " nine # ten"},
|
||
|
&Token{WordToken, "eleven"},
|
||
|
&Token{WordToken, "twelve\\"},
|
||
|
&Token{WordToken, "thirteen=13"},
|
||
|
&Token{WordToken, "fourteen/14"}}
|
||
|
|
||
|
tokenizer := NewTokenizer(testInput)
|
||
|
for i, want := range expectedTokens {
|
||
|
got, err := tokenizer.Next()
|
||
|
if err != nil {
|
||
|
t.Error(err)
|
||
|
}
|
||
|
if !got.Equal(want) {
|
||
|
t.Errorf("Tokenizer.Next()[%v] of %q -> %v. Want: %v", i, testString, got, want)
|
||
|
}
|
||
|
}
|
||
|
}
|
||
|
|
||
|
func TestLexer(t *testing.T) {
|
||
|
testInput := strings.NewReader(testString)
|
||
|
expectedStrings := []string{"one", "two", "three four", "five \"six\"", "seven#eight", "eleven", "twelve\\", "thirteen=13", "fourteen/14"}
|
||
|
|
||
|
lexer := NewLexer(testInput)
|
||
|
for i, want := range expectedStrings {
|
||
|
got, err := lexer.Next()
|
||
|
if err != nil {
|
||
|
t.Error(err)
|
||
|
}
|
||
|
if got != want {
|
||
|
t.Errorf("Lexer.Next()[%v] of %q -> %v. Want: %v", i, testString, got, want)
|
||
|
}
|
||
|
}
|
||
|
}
|
||
|
|
||
|
func TestSplit(t *testing.T) {
|
||
|
want := []string{"one", "two", "three four", "five \"six\"", "seven#eight", "eleven", "twelve\\", "thirteen=13", "fourteen/14"}
|
||
|
got, err := Split(testString)
|
||
|
if err != nil {
|
||
|
t.Error(err)
|
||
|
}
|
||
|
if len(want) != len(got) {
|
||
|
t.Errorf("Split(%q) -> %v. Want: %v", testString, got, want)
|
||
|
}
|
||
|
for i := range got {
|
||
|
if got[i] != want[i] {
|
||
|
t.Errorf("Split(%q)[%v] -> %v. Want: %v", testString, i, got[i], want[i])
|
||
|
}
|
||
|
}
|
||
|
}
|
||
|
|
||
|
func TestSplit_unfinished(t *testing.T) {
|
||
|
got, err := Split("one 'two")
|
||
|
require.NoError(t, err)
|
||
|
require.Equal(t, []string{"one", "two"}, got)
|
||
|
|
||
|
got, err = Split("one \"two")
|
||
|
require.NoError(t, err)
|
||
|
require.Equal(t, []string{"one", "two"}, got)
|
||
|
}
|