Skip to content

Commit

Permalink
Add --accept-nth option to transform the output
Browse files Browse the repository at this point in the history
This option can be used to replace a sed or awk in the post-processing step.

  ps -ef | fzf --multi --header-lines 1 | awk '{print $2}'
  ps -ef | fzf --multi --header-lines 1 --accept-nth 2

This may not be a very "Unix-y" thing to do, so I've always felt that fzf
shouldn't have such an option, but I've finally changed my mind because:

* fzf can be configured with a custom delimiter that is a fixed string
  or a regular expression.
* In such cases, you'd need to repeat the delimiter again in the
  post-processing step.
* Also, tools like awk or sed may interpret a regular expression
  differently, causing mismatches.

You can still use sed, cut, or awk if you prefer.

Close #3987
Close #1323
  • Loading branch information
junegunn committed Feb 9, 2025
1 parent a1994ff commit 2b58458
Show file tree
Hide file tree
Showing 9 changed files with 113 additions and 10 deletions.
4 changes: 4 additions & 0 deletions man/man1/fzf.1
Original file line number Diff line number Diff line change
Expand Up @@ -120,6 +120,10 @@ fields.
.BI "\-\-with\-nth=" "N[,..]"
Transform the presentation of each line using field index expressions
.TP
.BI "\-\-accept\-nth=" "N[,..]"
Define which fields to print on accept. The last delimiter is stripped from the
output.
.TP
.B "+s, \-\-no\-sort"
Do not sort the result
.TP
Expand Down
2 changes: 1 addition & 1 deletion src/core.go
Original file line number Diff line number Diff line change
Expand Up @@ -128,7 +128,7 @@ func Run(opts *Options) (int, error) {
}
}
trans := Transform(tokens, opts.WithNth)
transformed := joinTokens(trans)
transformed := JoinTokens(trans)
if len(header) < opts.HeaderLines {
header = append(header, transformed)
eventBox.Set(EvtHeader, header)
Expand Down
11 changes: 11 additions & 0 deletions src/options.go
Original file line number Diff line number Diff line change
Expand Up @@ -41,6 +41,7 @@ Usage: fzf [options]
integer or a range expression ([BEGIN]..[END]).
--with-nth=N[,..] Transform the presentation of each line using
field index expressions
--accept-nth=N[,..] Define which fields to print on accept
-d, --delimiter=STR Field delimiter regex (default: AWK-style)
+s, --no-sort Do not sort the result
--literal Do not normalize latin script letters
Expand Down Expand Up @@ -544,6 +545,7 @@ type Options struct {
Normalize bool
Nth []Range
WithNth []Range
AcceptNth []Range
Delimiter Delimiter
Sort int
Track trackOption
Expand Down Expand Up @@ -666,6 +668,7 @@ func defaultOptions() *Options {
Normalize: true,
Nth: make([]Range, 0),
WithNth: make([]Range, 0),
AcceptNth: make([]Range, 0),
Delimiter: Delimiter{},
Sort: 1000,
Track: trackDisabled,
Expand Down Expand Up @@ -2383,6 +2386,14 @@ func parseOptions(index *int, opts *Options, allArgs []string) error {
if opts.WithNth, err = splitNth(str); err != nil {
return err
}
case "--accept-nth":
str, err := nextString("nth expression required")
if err != nil {
return err
}
if opts.AcceptNth, err = splitNth(str); err != nil {
return err
}
case "-s", "--sort":
if opts.Sort, err = optionalNumeric(1); err != nil {
return err
Expand Down
2 changes: 2 additions & 0 deletions src/pattern.go
Original file line number Diff line number Diff line change
Expand Up @@ -403,6 +403,8 @@ func (p *Pattern) transformInput(item *Item) []Token {

tokens := Tokenize(item.text.ToString(), p.delimiter)
ret := Transform(tokens, p.nth)
// TODO: We could apply StripLastDelimiter to exclude the last delimiter from
// the search allowing suffix match with a string or a regex delimiter.
item.transformed = &transformed{p.revision, ret}
return ret
}
Expand Down
18 changes: 14 additions & 4 deletions src/terminal.go
Original file line number Diff line number Diff line change
Expand Up @@ -305,6 +305,7 @@ type Terminal struct {
nthAttr tui.Attr
nth []Range
nthCurrent []Range
acceptNth []Range
tabstop int
margin [4]sizeSpec
padding [4]sizeSpec
Expand Down Expand Up @@ -914,6 +915,7 @@ func NewTerminal(opts *Options, eventBox *util.EventBox, executor *util.Executor
nthAttr: opts.Theme.Nth.Attr,
nth: opts.Nth,
nthCurrent: opts.Nth,
acceptNth: opts.AcceptNth,
tabstop: opts.Tabstop,
hasStartActions: false,
hasResultActions: false,
Expand Down Expand Up @@ -1561,16 +1563,24 @@ func (t *Terminal) output() bool {
for _, s := range t.printQueue {
t.printer(s)
}
transform := func(item *Item) string {
return item.AsString(t.ansi)
}
if len(t.acceptNth) > 0 {
transform = func(item *Item) string {
return JoinTokens(StripLastDelimiter(Transform(Tokenize(item.AsString(t.ansi), t.delimiter), t.acceptNth), t.delimiter))
}
}
found := len(t.selected) > 0
if !found {
current := t.currentItem()
if current != nil {
t.printer(current.AsString(t.ansi))
t.printer(transform(current))
found = true
}
} else {
for _, sel := range t.sortSelected() {
t.printer(sel.item.AsString(t.ansi))
t.printer(transform(sel.item))
}
}
return found
Expand Down Expand Up @@ -3847,7 +3857,7 @@ func replacePlaceholder(params replacePlaceholderParams) (string, []string) {
elems, prefixLength := awkTokenizer(params.query)
tokens := withPrefixLengths(elems, prefixLength)
trans := Transform(tokens, nth)
result := joinTokens(trans)
result := JoinTokens(trans)
if !flags.preserveSpace {
result = strings.TrimSpace(result)
}
Expand Down Expand Up @@ -3897,7 +3907,7 @@ func replacePlaceholder(params replacePlaceholderParams) (string, []string) {
replace = func(item *Item) string {
tokens := Tokenize(item.AsString(params.stripAnsi), params.delimiter)
trans := Transform(tokens, ranges)
str := joinTokens(trans)
str := JoinTokens(trans)

// trim the last delimiter
if params.delimiter.str != nil {
Expand Down
33 changes: 31 additions & 2 deletions src/tokenizer.go
Original file line number Diff line number Diff line change
Expand Up @@ -211,7 +211,36 @@ func Tokenize(text string, delimiter Delimiter) []Token {
return withPrefixLengths(tokens, 0)
}

func joinTokens(tokens []Token) string {
// StripLastDelimiter removes the trailing delimiter and whitespaces from the
// last token.
func StripLastDelimiter(tokens []Token, delimiter Delimiter) []Token {
if len(tokens) == 0 {
return tokens
}

lastToken := tokens[len(tokens)-1]

if delimiter.str == nil && delimiter.regex == nil {
lastToken.text.TrimTrailingWhitespaces()
} else {
if delimiter.str != nil {
lastToken.text.TrimSuffix([]rune(*delimiter.str))
} else if delimiter.regex != nil {
str := lastToken.text.ToString()
locs := delimiter.regex.FindAllStringIndex(str, -1)
if len(locs) > 0 {
lastLoc := locs[len(locs)-1]
lastToken.text.SliceRight(lastLoc[0])
}
}
lastToken.text.TrimTrailingWhitespaces()
}

return tokens
}

// JoinTokens concatenates the tokens into a single string
func JoinTokens(tokens []Token) string {
var output bytes.Buffer
for _, token := range tokens {
output.WriteString(token.text.ToString())
Expand All @@ -229,7 +258,7 @@ func Transform(tokens []Token, withNth []Range) []Token {
if r.begin == r.end {
idx := r.begin
if idx == rangeEllipsis {
chars := util.ToChars(stringBytes(joinTokens(tokens)))
chars := util.ToChars(stringBytes(JoinTokens(tokens)))
parts = append(parts, &chars)
} else {
if idx < 0 {
Expand Down
6 changes: 3 additions & 3 deletions src/tokenizer_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -85,14 +85,14 @@ func TestTransform(t *testing.T) {
{
ranges, _ := splitNth("1,2,3")
tx := Transform(tokens, ranges)
if joinTokens(tx) != "abc: def: ghi: " {
if JoinTokens(tx) != "abc: def: ghi: " {
t.Errorf("%s", tx)
}
}
{
ranges, _ := splitNth("1..2,3,2..,1")
tx := Transform(tokens, ranges)
if string(joinTokens(tx)) != "abc: def: ghi: def: ghi: jklabc: " ||
if string(JoinTokens(tx)) != "abc: def: ghi: def: ghi: jklabc: " ||
len(tx) != 4 ||
tx[0].text.ToString() != "abc: def: " || tx[0].prefixLength != 2 ||
tx[1].text.ToString() != "ghi: " || tx[1].prefixLength != 14 ||
Expand All @@ -107,7 +107,7 @@ func TestTransform(t *testing.T) {
{
ranges, _ := splitNth("1..2,3,2..,1")
tx := Transform(tokens, ranges)
if joinTokens(tx) != " abc: def: ghi: def: ghi: jkl abc:" ||
if JoinTokens(tx) != " abc: def: ghi: def: ghi: jkl abc:" ||
len(tx) != 4 ||
tx[0].text.ToString() != " abc: def:" || tx[0].prefixLength != 0 ||
tx[1].text.ToString() != " ghi:" || tx[1].prefixLength != 12 ||
Expand Down
21 changes: 21 additions & 0 deletions src/util/chars.go
Original file line number Diff line number Diff line change
Expand Up @@ -189,6 +189,27 @@ func (chars *Chars) TrimTrailingWhitespaces() {
chars.slice = chars.slice[0 : len(chars.slice)-whitespaces]
}

func (chars *Chars) TrimSuffix(runes []rune) {
lastIdx := len(chars.slice)
firstIdx := lastIdx - len(runes)
if firstIdx < 0 {
return
}

for i := firstIdx; i < lastIdx; i++ {
char := chars.Get(i)
if char != runes[i-firstIdx] {
return
}
}

chars.slice = chars.slice[0:firstIdx]
}

func (chars *Chars) SliceRight(last int) {
chars.slice = chars.slice[:last]
}

func (chars *Chars) ToString() string {
if runes := chars.optionalRunes(); runes != nil {
return string(runes)
Expand Down
26 changes: 26 additions & 0 deletions test/test_core.rb
Original file line number Diff line number Diff line change
Expand Up @@ -1665,4 +1665,30 @@ def test_abort_action_chain
assert_equal '', File.read(tempname).chomp
end
end

def test_accept_nth
tmux.send_keys %((echo "foo bar baz"; echo "bar baz foo") | #{FZF} --multi --accept-nth 2,2 --sync --bind start:select-all+accept > #{tempname}), :Enter
wait do
assert_path_exists tempname
assert_equal ['bar bar', 'baz baz'], File.readlines(tempname, chomp: true)
end
end

def test_accept_nth_string_delimiter
tmux.send_keys %(echo "foo ,bar,baz" | #{FZF} -d, --accept-nth 2,2,1,3,1 --sync --bind start:accept > #{tempname}), :Enter
wait do
assert_path_exists tempname
# Last delimiter and the whitespaces are removed
assert_equal ['bar,bar,foo ,bazfoo'], File.readlines(tempname, chomp: true)
end
end

def test_accept_nth_regex_delimiter
tmux.send_keys %(echo "foo :,:bar,baz" | #{FZF} --delimiter='[:,]+' --accept-nth 2,2,1,3,1 --sync --bind start:accept > #{tempname}), :Enter
wait do
assert_path_exists tempname
# Last delimiter and the whitespaces are removed
assert_equal ['bar,bar,foo :,:bazfoo'], File.readlines(tempname, chomp: true)
end
end
end

0 comments on commit 2b58458

Please sign in to comment.