1package participle
2
3import (
4	"errors"
5	"io"
6	"strconv"
7	"strings"
8
9	"github.com/alecthomas/participle/lexer"
10)
11
12type mapperByToken struct {
13	symbols []string
14	mapper  Mapper
15}
16
17// DropToken can be returned by a Mapper to remove a token from the stream.
18var DropToken = errors.New("drop token") // nolint: golint
19
20// Mapper function for mutating tokens before being applied to the AST.
21//
22// If the Mapper func returns an error of DropToken, the token will be removed from the stream.
23type Mapper func(token lexer.Token) (lexer.Token, error)
24
25// Map is an Option that configures the Parser to apply a mapping function to each Token from the lexer.
26//
27// This can be useful to eg. upper-case all tokens of a certain type, or dequote strings.
28//
29// "symbols" specifies the token symbols that the Mapper will be applied to. If empty, all tokens will be mapped.
30func Map(mapper Mapper, symbols ...string) Option {
31	return func(p *Parser) error {
32		p.mappers = append(p.mappers, mapperByToken{
33			mapper:  mapper,
34			symbols: symbols,
35		})
36		return nil
37	}
38}
39
40// Unquote applies strconv.Unquote() to tokens of the given types.
41//
42// Tokens of type "String" will be unquoted if no other types are provided.
43func Unquote(types ...string) Option {
44	if len(types) == 0 {
45		types = []string{"String"}
46	}
47	return Map(func(t lexer.Token) (lexer.Token, error) {
48		value, err := unquote(t.Value)
49		if err != nil {
50			return t, lexer.Errorf(t.Pos, "invalid quoted string %q: %s", t.Value, err.Error())
51		}
52		t.Value = value
53		return t, nil
54	}, types...)
55}
56
57func unquote(s string) (string, error) {
58	quote := s[0]
59	s = s[1 : len(s)-1]
60	out := ""
61	for s != "" {
62		value, _, tail, err := strconv.UnquoteChar(s, quote)
63		if err != nil {
64			return "", err
65		}
66		s = tail
67		out += string(value)
68	}
69	return out, nil
70}
71
72// Upper is an Option that upper-cases all tokens of the given type. Useful for case normalisation.
73func Upper(types ...string) Option {
74	return Map(func(token lexer.Token) (lexer.Token, error) {
75		token.Value = strings.ToUpper(token.Value)
76		return token, nil
77	}, types...)
78}
79
80// Elide drops tokens of the specified types.
81func Elide(types ...string) Option {
82	return Map(func(token lexer.Token) (lexer.Token, error) {
83		return lexer.Token{}, DropToken
84	}, types...)
85}
86
87// Apply a Mapping to all tokens coming out of a Lexer.
88type mappingLexerDef struct {
89	lexer.Definition
90	mapper Mapper
91}
92
93func (m *mappingLexerDef) Lex(r io.Reader) (lexer.Lexer, error) {
94	lexer, err := m.Definition.Lex(r)
95	if err != nil {
96		return nil, err
97	}
98	return &mappingLexer{lexer, m.mapper}, nil
99}
100
101type mappingLexer struct {
102	lexer.Lexer
103	mapper Mapper
104}
105
106func (m *mappingLexer) Next() (lexer.Token, error) {
107	for {
108		t, err := m.Lexer.Next()
109		if err != nil {
110			return t, err
111		}
112		t, err = m.mapper(t)
113		if err == DropToken {
114			continue
115		}
116		return t, err
117	}
118}
119