Skip to content

Commit

Permalink
lexer: fix prefix
Browse files Browse the repository at this point in the history
  • Loading branch information
rsteube committed Jul 29, 2023
1 parent 791af79 commit f18e5a3
Show file tree
Hide file tree
Showing 7 changed files with 135 additions and 83 deletions.
11 changes: 7 additions & 4 deletions action.go
Original file line number Diff line number Diff line change
Expand Up @@ -188,16 +188,19 @@ func (a Action) split(pipelines bool) Action {
c.Value = tokenset.Tokens[len(tokenset.Tokens)-1]
invoked := a.Invoke(c)
for index, value := range invoked.rawValues {
if !invoked.meta.Nospace.Matches(value.Value) {
if !invoked.meta.Nospace.Matches(value.Value) || strings.Contains(value.Value, " ") { // TODO special characters
switch tokenset.State {
case lexer.OPEN_DOUBLE:
invoked.rawValues[index].Value = fmt.Sprintf(`"%v" `, strings.Replace(value.Value, `"`, `\"`, -1))
invoked.rawValues[index].Value = fmt.Sprintf(`"%v"`, strings.Replace(value.Value, `"`, `\"`, -1))
case lexer.OPEN_SINGLE:
invoked.rawValues[index].Value = fmt.Sprintf(`'%v' `, strings.Replace(value.Value, `'`, `'"'"'`, -1))
invoked.rawValues[index].Value = fmt.Sprintf(`'%v'`, strings.Replace(value.Value, `'`, `'"'"'`, -1))
default:
invoked.rawValues[index].Value = strings.Replace(value.Value, ` `, `\ `, -1) + ` `
invoked.rawValues[index].Value = strings.Replace(value.Value, ` `, `\ `, -1)
}
}
if !invoked.meta.Nospace.Matches(value.Value) {
invoked.rawValues[index].Value += " "
}
}
invoked.Prefix(tokenset.Prefix)
return invoked.ToA().NoSpace()
Expand Down
4 changes: 2 additions & 2 deletions example/cmd/modifier.go
Original file line number Diff line number Diff line change
Expand Up @@ -132,7 +132,7 @@ func init() {
cmd.Flags().StringP("string", "s", "", "string flag")

carapace.Gen(cmd).FlagCompletion(carapace.ActionMap{
"string": carapace.ActionValues("one", "two", "three"),
"string": carapace.ActionValues("one", "two", "three with space"),
})

carapace.Gen(cmd).PositionalCompletion(
Expand All @@ -149,7 +149,7 @@ func init() {
cmd.Flags().StringP("string", "s", "", "string flag")

carapace.Gen(cmd).FlagCompletion(carapace.ActionMap{
"string": carapace.ActionValues("one", "two", "three"),
"string": carapace.ActionValues("one", "two", "three with space"),
})

carapace.Gen(cmd).PositionalCompletion(
Expand Down
41 changes: 21 additions & 20 deletions example/cmd/modifier_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -181,26 +181,6 @@ func TestSplit(t *testing.T) {
Usage("Split()").
Tag("files"))

s.Run("modifier", "--split", "pos1 \"").
Expect(carapace.ActionValues(
"subdir/",
).StyleF(style.ForPathExt).
Prefix("pos1 ").
Suffix("\"").
NoSpace('*').
Usage("Split()").
Tag("files"))

s.Run("modifier", "--split", "pos1 '").
Expect(carapace.ActionValues(
"subdir/",
).StyleF(style.ForPathExt).
Prefix("pos1 ").
Suffix("'").
NoSpace('*').
Usage("Split()").
Tag("files"))

s.Run("modifier", "--split", "pos1 --").
Expect(carapace.ActionStyledValuesDescribed(
"--bool", "bool flag", style.Default,
Expand Down Expand Up @@ -237,5 +217,26 @@ func TestSplit(t *testing.T) {
Suffix("' ").
NoSpace('*').
Usage("bool flag"))

t.Skip("skipping test that don't work yet") // TODO these need to work
s.Run("modifier", "--split", "pos1 \"").
Expect(carapace.ActionValues(
"subdir/",
).StyleF(style.ForPathExt).
Prefix("pos1 \"").
Suffix("\"").
NoSpace('*').
Usage("Split()").
Tag("files"))

s.Run("modifier", "--split", "pos1 '").
Expect(carapace.ActionValues(
"subdir/",
).StyleF(style.ForPathExt).
Prefix("pos1 '").
Suffix("'").
NoSpace('*').
Usage("Split()").
Tag("files"))
})
}
28 changes: 12 additions & 16 deletions internal/lexer/lexer.go
Original file line number Diff line number Diff line change
Expand Up @@ -23,32 +23,22 @@ type Tokenset struct {
func Split(s string, pipelines bool) (*Tokenset, error) {
tokenset, err := split(s, pipelines)
if err != nil && err.Error() == "EOF found when expecting closing quote" {
tokenset, err = split(s+`_"`, pipelines)
tokenset, err = split(s+`"`, pipelines)
if err == nil {
last := tokenset.Tokens[len(tokenset.Tokens)-1]
tokenset.Tokens[len(tokenset.Tokens)-1] = last[:len(last)-1]
tokenset.Prefix = tokenset.Prefix[:len(tokenset.Prefix)-1]
tokenset.State = OPEN_DOUBLE
}
}
if err != nil && err.Error() == "EOF found when expecting closing quote" {
tokenset, err = split(s+`_'`, pipelines)
tokenset, err = split(s+`'`, pipelines)
if err == nil {
last := tokenset.Tokens[len(tokenset.Tokens)-1]
tokenset.Tokens[len(tokenset.Tokens)-1] = last[:len(last)-1]
tokenset.Prefix = tokenset.Prefix[:len(tokenset.Prefix)-1]
tokenset.State = OPEN_SINGLE
}
}
return tokenset, err
}

func split(s string, pipelines bool) (*Tokenset, error) {
f := shlex.Split
if pipelines {
f = shlex.SplitP
}
splitted, err := f(s)
splitted, prefix, err := shlex.SplitP(s, pipelines)
if strings.HasSuffix(s, " ") {
splitted = append(splitted, "")
}
Expand All @@ -59,8 +49,14 @@ func split(s string, pipelines bool) (*Tokenset, error) {
if len(splitted) == 0 {
splitted = []string{""}
}
return &Tokenset{

if len(splitted[len(splitted)-1]) == 0 {
prefix = s
}

t := &Tokenset{
Tokens: splitted,
Prefix: s[:strings.LastIndex(s, splitted[len(splitted)-1])],
}, nil
Prefix: prefix,
}
return t, nil
}
41 changes: 41 additions & 0 deletions internal/lexer/lexer_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -30,6 +30,16 @@ func TestSplit(t *testing.T) {
Prefix: ` `,
})

_test(`example `, Tokenset{
Tokens: []string{"example", ""},
Prefix: `example `,
})

_test(` example `, Tokenset{
Tokens: []string{"example", ""},
Prefix: ` example `,
})

_test(`"example`, Tokenset{
Tokens: []string{"example"},
State: OPEN_DOUBLE,
Expand Down Expand Up @@ -121,4 +131,35 @@ func TestSplit(t *testing.T) {
Tokens: []string{"echo", ""},
Prefix: `example 'action' -- & echo `,
})

_test(`example 'single with space`, Tokenset{
Tokens: []string{"example", "single with space"},
Prefix: `example `,
State: OPEN_SINGLE,
})

_test(`example "double with space`, Tokenset{
Tokens: []string{"example", "double with space"},
Prefix: `example `,
State: OPEN_DOUBLE,
})

_test(`example "double with \"space`, Tokenset{
Tokens: []string{"example", "double with \"space"},
Prefix: `example `,
State: OPEN_DOUBLE,
})

t.Skip("skipping test that don't work yet") // TODO these need to work
_test(`example "`, Tokenset{
Tokens: []string{"example", ""},
Prefix: `example `,
State: OPEN_DOUBLE,
})

_test(`example '`, Tokenset{
Tokens: []string{"example", ""},
Prefix: `example `,
State: OPEN_SINGLE,
})
}
Loading

0 comments on commit f18e5a3

Please sign in to comment.