http.go

  1package h
  2
  3import (
  4	"strings"
  5
  6	. "github.com/alecthomas/chroma" // nolint
  7	"github.com/alecthomas/chroma/lexers/internal"
  8)
  9
 10// HTTP lexer.
 11var HTTP = internal.Register(httpBodyContentTypeLexer(MustNewLexer(
 12	&Config{
 13		Name:         "HTTP",
 14		Aliases:      []string{"http"},
 15		Filenames:    []string{},
 16		MimeTypes:    []string{},
 17		NotMultiline: true,
 18		DotAll:       true,
 19	},
 20	Rules{
 21		"root": {
 22			{`(GET|POST|PUT|DELETE|HEAD|OPTIONS|TRACE|PATCH|CONNECT)( +)([^ ]+)( +)(HTTP)(/)(1\.[01])(\r?\n|\Z)`, ByGroups(NameFunction, Text, NameNamespace, Text, KeywordReserved, Operator, LiteralNumber, Text), Push("headers")},
 23			{`(HTTP)(/)(1\.[01])( +)(\d{3})( +)([^\r\n]+)(\r?\n|\Z)`, ByGroups(KeywordReserved, Operator, LiteralNumber, Text, LiteralNumber, Text, NameException, Text), Push("headers")},
 24		},
 25		"headers": {
 26			{`([^\s:]+)( *)(:)( *)([^\r\n]+)(\r?\n|\Z)`, EmitterFunc(httpHeaderBlock), nil},
 27			{`([\t ]+)([^\r\n]+)(\r?\n|\Z)`, EmitterFunc(httpContinuousHeaderBlock), nil},
 28			{`\r?\n`, Text, Push("content")},
 29		},
 30		"content": {
 31			{`.+`, EmitterFunc(httpContentBlock), nil},
 32		},
 33	},
 34)))
 35
 36func httpContentBlock(groups []string, lexer Lexer) Iterator {
 37	tokens := []Token{
 38		{Generic, groups[0]},
 39	}
 40	return Literator(tokens...)
 41}
 42
 43func httpHeaderBlock(groups []string, lexer Lexer) Iterator {
 44	tokens := []Token{
 45		{Name, groups[1]},
 46		{Text, groups[2]},
 47		{Operator, groups[3]},
 48		{Text, groups[4]},
 49		{Literal, groups[5]},
 50		{Text, groups[6]},
 51	}
 52	return Literator(tokens...)
 53}
 54
 55func httpContinuousHeaderBlock(groups []string, lexer Lexer) Iterator {
 56	tokens := []Token{
 57		{Text, groups[1]},
 58		{Literal, groups[2]},
 59		{Text, groups[3]},
 60	}
 61	return Literator(tokens...)
 62}
 63
 64func httpBodyContentTypeLexer(lexer Lexer) Lexer { return &httpBodyContentTyper{lexer} }
 65
 66type httpBodyContentTyper struct{ Lexer }
 67
 68func (d *httpBodyContentTyper) Tokenise(options *TokeniseOptions, text string) (Iterator, error) { // nolint: gocognit
 69	var contentType string
 70	var isContentType bool
 71	var subIterator Iterator
 72
 73	it, err := d.Lexer.Tokenise(options, text)
 74	if err != nil {
 75		return nil, err
 76	}
 77
 78	return func() Token {
 79		token := it()
 80
 81		if token == EOF {
 82			if subIterator != nil {
 83				return subIterator()
 84			}
 85			return EOF
 86		}
 87
 88		switch {
 89		case token.Type == Name && strings.ToLower(token.Value) == "content-type":
 90			{
 91				isContentType = true
 92			}
 93		case token.Type == Literal && isContentType:
 94			{
 95				isContentType = false
 96				contentType = strings.TrimSpace(token.Value)
 97				pos := strings.Index(contentType, ";")
 98				if pos > 0 {
 99					contentType = strings.TrimSpace(contentType[:pos])
100				}
101			}
102		case token.Type == Generic && contentType != "":
103			{
104				lexer := internal.MatchMimeType(contentType)
105
106				// application/calendar+xml can be treated as application/xml
107				// if there's not a better match.
108				if lexer == nil && strings.Contains(contentType, "+") {
109					slashPos := strings.Index(contentType, "/")
110					plusPos := strings.LastIndex(contentType, "+")
111					contentType = contentType[:slashPos+1] + contentType[plusPos+1:]
112					lexer = internal.MatchMimeType(contentType)
113				}
114
115				if lexer == nil {
116					token.Type = Text
117				} else {
118					subIterator, err = lexer.Tokenise(nil, token.Value)
119					if err != nil {
120						panic(err)
121					}
122					return EOF
123				}
124			}
125		}
126		return token
127	}, nil
128}