1
0
Fork 0
mirror of https://github.com/Luzifer/badge-gen.git synced 2024-11-10 06:10:03 +00:00
badge-gen/vendor/github.com/tdewolff/parse/css
2016-06-28 19:38:58 +02:00
..
hash.go Update Godeps 2016-06-28 19:38:58 +02:00
lex.go Update Godeps 2016-06-28 19:38:58 +02:00
parse.go Update Godeps 2016-06-28 19:38:58 +02:00
README.md Update Godeps 2016-06-28 19:38:58 +02:00
util.go Update Godeps 2016-06-28 19:38:58 +02:00

CSS GoDoc GoCover

This package is a CSS3 lexer and parser written in Go. Both follow the specification at CSS Syntax Module Level 3. The lexer takes an io.Reader and converts it into tokens until the EOF. The parser returns a parse tree of the full io.Reader input stream, but the low-level Next function can be used for stream parsing to returns grammar units until the EOF.

Installation

Run the following command

go get github.com/tdewolff/parse/css

or add the following import and run project with go get

import "github.com/tdewolff/parse/css"

Lexer

Usage

The following initializes a new Lexer with io.Reader r:

l := css.NewLexer(r)

To tokenize until EOF an error, use:

for {
	tt, text := l.Next()
	switch tt {
	case css.ErrorToken:
		// error or EOF set in l.Err()
		return
	// ...
	}
}

All tokens (see CSS Syntax Module Level 3):

ErrorToken			// non-official token, returned when errors occur
IdentToken
FunctionToken		// rgb( rgba( ...
AtKeywordToken		// @abc
HashToken			// #abc
StringToken
BadStringToken
UrlToken			// url(
BadUrlToken
DelimToken			// any unmatched character
NumberToken			// 5
PercentageToken		// 5%
DimensionToken		// 5em
UnicodeRangeToken
IncludeMatchToken	// ~=
DashMatchToken		// |=
PrefixMatchToken	// ^=
SuffixMatchToken	// $=
SubstringMatchToken // *=
ColumnToken			// ||
WhitespaceToken
CDOToken 			// <!--
CDCToken 			// -->
ColonToken
SemicolonToken
CommaToken
BracketToken 		// ( ) [ ] { }, all bracket tokens use this, Data() can distinguish between the brackets
CommentToken		// non-official token

Examples

package main

import (
	"os"

	"github.com/tdewolff/parse/css"
)

// Tokenize CSS3 from stdin.
func main() {
	l := css.NewLexer(os.Stdin)
	for {
		tt, text := l.Next()
		switch tt {
		case css.ErrorToken:
			if l.Err() != io.EOF {
				fmt.Println("Error on line", l.Line(), ":", l.Err())
			}
			return
		case css.IdentToken:
			fmt.Println("Identifier", string(text))
		case css.NumberToken:
			fmt.Println("Number", string(text))
		// ...
		}
	}
}

Parser

Usage

The following creates a new Parser.

// false because this is the content of an inline style attribute
p := css.NewParser(bytes.NewBufferString("color: red;"), false)

To iterate over the stylesheet, use:

for {
    gt, _, data := p.Next()
    if gt == css.ErrorGrammar {
        break
    }
    // ...
}

All grammar units returned by Next:

ErrorGrammar
AtRuleGrammar
EndAtRuleGrammar
RulesetGrammar
EndRulesetGrammar
DeclarationGrammar
TokenGrammar

Examples

package main

import (
	"bytes"
	"fmt"

	"github.com/tdewolff/parse/css"
)

func main() {
	// false because this is the content of an inline style attribute
	p := css.NewParser(bytes.NewBufferString("color: red;"), false)
	out := ""
	for {
		gt, _, data := p.Next()
		if gt == css.ErrorGrammar {
			break
		} else if gt == css.AtRuleGrammar || gt == css.BeginAtRuleGrammar || gt == css.BeginRulesetGrammar || gt == css.DeclarationGrammar {
			out += string(data)
			if gt == css.DeclarationGrammar {
				out += ":"
			}
			for _, val := range p.Values() {
				out += string(val.Data)
			}
			if gt == css.BeginAtRuleGrammar || gt == css.BeginRulesetGrammar {
				out += "{"
			} else if gt == css.AtRuleGrammar || gt == css.DeclarationGrammar {
				out += ";"
			}
		} else {
			out += string(data)
		}
	}
	fmt.Println(out)
}

License

Released under the MIT license.