Updated transitive dependencies

This commit is contained in:
Seednode 2024-03-12 08:18:49 -05:00
parent 1b2c61176f
commit 43a8d43f0c
25 changed files with 838 additions and 164 deletions

2
go.mod
View File

@ -3,7 +3,7 @@ module seedno.de/seednode/roulette
go 1.22 go 1.22
require ( require (
github.com/alecthomas/chroma/v2 v2.12.0 github.com/alecthomas/chroma/v2 v2.13.0
github.com/julienschmidt/httprouter v1.3.0 github.com/julienschmidt/httprouter v1.3.0
github.com/klauspost/compress v1.17.7 github.com/klauspost/compress v1.17.7
github.com/spf13/cobra v1.8.0 github.com/spf13/cobra v1.8.0

12
go.sum
View File

@ -1,9 +1,9 @@
github.com/alecthomas/assert/v2 v2.2.1 h1:XivOgYcduV98QCahG8T5XTezV5bylXe+lBxLG2K2ink= github.com/alecthomas/assert/v2 v2.6.0 h1:o3WJwILtexrEUk3cUVal3oiQY2tfgr/FHWiz/v2n4FU=
github.com/alecthomas/assert/v2 v2.2.1/go.mod h1:pXcQ2Asjp247dahGEmsZ6ru0UVwnkhktn7S0bBDLxvQ= github.com/alecthomas/assert/v2 v2.6.0/go.mod h1:Bze95FyfUr7x34QZrjL+XP+0qgp/zg8yS+TtBj1WA3k=
github.com/alecthomas/chroma/v2 v2.12.0 h1:Wh8qLEgMMsN7mgyG8/qIpegky2Hvzr4By6gEF7cmWgw= github.com/alecthomas/chroma/v2 v2.13.0 h1:VP72+99Fb2zEcYM0MeaWJmV+xQvz5v5cxRHd+ooU1lI=
github.com/alecthomas/chroma/v2 v2.12.0/go.mod h1:4TQu7gdfuPjSh76j78ietmqh9LiurGF0EpseFXdKMBw= github.com/alecthomas/chroma/v2 v2.13.0/go.mod h1:BUGjjsD+ndS6eX37YgTchSEG+Jg9Jv1GiZs9sqPqztk=
github.com/alecthomas/repr v0.2.0 h1:HAzS41CIzNW5syS8Mf9UwXhNH1J9aix/BvDRf1Ml2Yk= github.com/alecthomas/repr v0.4.0 h1:GhI2A8MACjfegCPVq9f1FLvIBS+DrQ2KQBFZP1iFzXc=
github.com/alecthomas/repr v0.2.0/go.mod h1:Fr0507jx4eOXV7AlPV6AVZLYrLIuIeSOWtW57eE/O/4= github.com/alecthomas/repr v0.4.0/go.mod h1:Fr0507jx4eOXV7AlPV6AVZLYrLIuIeSOWtW57eE/O/4=
github.com/cpuguy83/go-md2man/v2 v2.0.3/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o= github.com/cpuguy83/go-md2man/v2 v2.0.3/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o=
github.com/dlclark/regexp2 v1.11.0 h1:G/nrcoOa7ZXlpoa/91N3X7mM3r8eIlMBBJZvsz/mxKI= github.com/dlclark/regexp2 v1.11.0 h1:G/nrcoOa7ZXlpoa/91N3X7mM3r8eIlMBBJZvsz/mxKI=
github.com/dlclark/regexp2 v1.11.0/go.mod h1:DHkYz0B9wPfa6wondMfaivmHpzrQ3v9q8cnmRbL6yW8= github.com/dlclark/regexp2 v1.11.0/go.mod h1:DHkYz0B9wPfa6wondMfaivmHpzrQ3v9q8cnmRbL6yW8=

View File

@ -11,3 +11,7 @@ insert_final_newline = true
indent_style = space indent_style = space
indent_size = 2 indent_size = 2
insert_final_newline = false insert_final_newline = false
[*.yml]
indent_style = space
indent_size = 2

View File

@ -49,6 +49,8 @@ linters:
- nosnakecase - nosnakecase
- testableexamples - testableexamples
- musttag - musttag
- depguard
- goconst
linters-settings: linters-settings:
govet: govet:

View File

@ -8,75 +8,72 @@ highlighted HTML, ANSI-coloured text, etc.
Chroma is based heavily on [Pygments](http://pygments.org/), and includes Chroma is based heavily on [Pygments](http://pygments.org/), and includes
translators for Pygments lexers and styles. translators for Pygments lexers and styles.
<a id="markdown-table-of-contents" name="table-of-contents"></a>
## Table of Contents ## Table of Contents
<!-- TOC --> <!-- TOC -->
1. [Table of Contents](#table-of-contents) 1. [Supported languages](#supported-languages)
2. [Supported languages](#supported-languages) 2. [Try it](#try-it)
3. [Try it](#try-it) 3. [Using the library](#using-the-library)
4. [Using the library](#using-the-library)
1. [Quick start](#quick-start) 1. [Quick start](#quick-start)
2. [Identifying the language](#identifying-the-language) 2. [Identifying the language](#identifying-the-language)
3. [Formatting the output](#formatting-the-output) 3. [Formatting the output](#formatting-the-output)
4. [The HTML formatter](#the-html-formatter) 4. [The HTML formatter](#the-html-formatter)
5. [More detail](#more-detail) 4. [More detail](#more-detail)
1. [Lexers](#lexers) 1. [Lexers](#lexers)
2. [Formatters](#formatters) 2. [Formatters](#formatters)
3. [Styles](#styles) 3. [Styles](#styles)
6. [Command-line interface](#command-line-interface) 5. [Command-line interface](#command-line-interface)
7. [Testing lexers](#testing-lexers) 6. [Testing lexers](#testing-lexers)
8. [What's missing compared to Pygments?](#whats-missing-compared-to-pygments) 7. [What's missing compared to Pygments?](#whats-missing-compared-to-pygments)
<!-- /TOC --> <!-- /TOC -->
<a id="markdown-supported-languages" name="supported-languages"></a>
## Supported languages ## Supported languages
| Prefix | Language | | Prefix | Language |
| :----: | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | | :----: | --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- |
| A | ABAP, ABNF, ActionScript, ActionScript 3, Ada, Agda, AL, Alloy, Angular2, ANTLR, ApacheConf, APL, AppleScript, ArangoDB AQL, Arduino, ArmAsm, AutoHotkey, AutoIt, Awk | | A | ABAP, ABNF, ActionScript, ActionScript 3, Ada, Agda, AL, Alloy, Angular2, ANTLR, ApacheConf, APL, AppleScript, ArangoDB AQL, Arduino, ArmAsm, AutoHotkey, AutoIt, Awk |
| B | Ballerina, Bash, Bash Session, Batchfile, BibTeX, Bicep, BlitzBasic, BNF, BQN, Brainfuck | | B | Ballerina, Bash, Bash Session, Batchfile, BibTeX, Bicep, BlitzBasic, BNF, BQN, Brainfuck |
| C | C, C#, C++, Caddyfile, Caddyfile Directives, Cap'n Proto, Cassandra CQL, Ceylon, CFEngine3, cfstatement, ChaiScript, Chapel, Cheetah, Clojure, CMake, COBOL, CoffeeScript, Common Lisp, Coq, Crystal, CSS, Cython | | C | C, C#, C++, Caddyfile, Caddyfile Directives, Cap'n Proto, Cassandra CQL, Ceylon, CFEngine3, cfstatement, ChaiScript, Chapel, Cheetah, Clojure, CMake, COBOL, CoffeeScript, Common Lisp, Coq, Crystal, CSS, Cython |
| D | D, Dart, Dax, Diff, Django/Jinja, dns, Docker, DTD, Dylan | | D | D, Dart, Dax, Desktop Entry, Diff, Django/Jinja, dns, Docker, DTD, Dylan |
| E | EBNF, Elixir, Elm, EmacsLisp, Erlang | | E | EBNF, Elixir, Elm, EmacsLisp, Erlang |
| F | Factor, Fennel, Fish, Forth, Fortran, FortranFixed, FSharp | | F | Factor, Fennel, Fish, Forth, Fortran, FortranFixed, FSharp |
| G | GAS, GDScript, Genshi, Genshi HTML, Genshi Text, Gherkin, GLSL, Gnuplot, Go, Go HTML Template, Go Text Template, GraphQL, Groff, Groovy | | G | GAS, GDScript, Genshi, Genshi HTML, Genshi Text, Gherkin, GLSL, Gnuplot, Go, Go HTML Template, Go Text Template, GraphQL, Groff, Groovy |
| H | Handlebars, Hare, Haskell, Haxe, HCL, Hexdump, HLB, HLSL, HolyC, HTML, HTTP, Hy | | H | Handlebars, Hare, Haskell, Haxe, HCL, Hexdump, HLB, HLSL, HolyC, HTML, HTTP, Hy |
| I | Idris, Igor, INI, Io, ISCdhcpd | | I | Idris, Igor, INI, Io, ISCdhcpd |
| J | J, Java, JavaScript, JSON, Julia, Jungle | | J | J, Java, JavaScript, JSON, Julia, Jungle |
| K | Kotlin | | K | Kotlin |
| L | Lighttpd configuration file, LLVM, Lua | | L | Lighttpd configuration file, LLVM, Lua |
| M | Makefile, Mako, markdown, Mason, Mathematica, Matlab, mcfunction, Meson, Metal, MiniZinc, MLIR, Modula-2, MonkeyC, MorrowindScript, Myghty, MySQL | | M | Makefile, Mako, markdown, Mason, Materialize SQL dialect, Mathematica, Matlab, mcfunction, Meson, Metal, MiniZinc, MLIR, Modula-2, MonkeyC, MorrowindScript, Myghty, MySQL |
| N | NASM, Natural, Newspeak, Nginx configuration file, Nim, Nix | | N | NASM, Natural, Newspeak, Nginx configuration file, Nim, Nix |
| O | Objective-C, OCaml, Octave, Odin, OnesEnterprise, OpenEdge ABL, OpenSCAD, Org Mode | | O | Objective-C, OCaml, Octave, Odin, OnesEnterprise, OpenEdge ABL, OpenSCAD, Org Mode |
| P | PacmanConf, Perl, PHP, PHTML, Pig, PkgConfig, PL/pgSQL, plaintext, Plutus Core, Pony, PostgreSQL SQL dialect, PostScript, POVRay, PowerQuery, PowerShell, Prolog, PromQL, properties, Protocol Buffer, PRQL, PSL, Puppet, Python, Python 2 | | P | PacmanConf, Perl, PHP, PHTML, Pig, PkgConfig, PL/pgSQL, plaintext, Plutus Core, Pony, PostgreSQL SQL dialect, PostScript, POVRay, PowerQuery, PowerShell, Prolog, PromQL, Promela, properties, Protocol Buffer, PRQL, PSL, Puppet, Python, Python 2 |
| Q | QBasic, QML | | Q | QBasic, QML |
| R | R, Racket, Ragel, Raku, react, ReasonML, reg, reStructuredText, Rexx, Ruby, Rust | | R | R, Racket, Ragel, Raku, react, ReasonML, reg, Rego, reStructuredText, Rexx, RPMSpec, Ruby, Rust |
| S | SAS, Sass, Scala, Scheme, Scilab, SCSS, Sed, Sieve, Smali, Smalltalk, Smarty, Snobol, Solidity, SourcePawn, SPARQL, SQL, SquidConf, Standard ML, stas, Stylus, Svelte, Swift, SYSTEMD, systemverilog | | S | SAS, Sass, Scala, Scheme, Scilab, SCSS, Sed, Sieve, Smali, Smalltalk, Smarty, Snobol, Solidity, SourcePawn, SPARQL, SQL, SquidConf, Standard ML, stas, Stylus, Svelte, Swift, SYSTEMD, systemverilog |
| T | TableGen, Tal, TASM, Tcl, Tcsh, Termcap, Terminfo, Terraform, TeX, Thrift, TOML, TradingView, Transact-SQL, Turing, Turtle, Twig, TypeScript, TypoScript, TypoScriptCssData, TypoScriptHtmlData | | T | TableGen, Tal, TASM, Tcl, Tcsh, Termcap, Terminfo, Terraform, TeX, Thrift, TOML, TradingView, Transact-SQL, Turing, Turtle, Twig, TypeScript, TypoScript, TypoScriptCssData, TypoScriptHtmlData |
| V | V, V shell, Vala, VB.net, verilog, VHDL, VHS, VimL, vue | | V | V, V shell, Vala, VB.net, verilog, VHDL, VHS, VimL, vue |
| W | WDTE, WebGPU Shading Language, Whiley | | W | WDTE, WebGPU Shading Language, Whiley |
| X | XML, Xorg | | X | XML, Xorg |
| Y | YAML, YANG | | Y | YAML, YANG |
| Z | Z80 Assembly, Zed, Zig | | Z | Z80 Assembly, Zed, Zig |
_I will attempt to keep this section up to date, but an authoritative list can be _I will attempt to keep this section up to date, but an authoritative list can be
displayed with `chroma --list`._ displayed with `chroma --list`._
<a id="markdown-try-it" name="try-it"></a>
## Try it ## Try it
Try out various languages and styles on the [Chroma Playground](https://swapoff.org/chroma/playground/). Try out various languages and styles on the [Chroma Playground](https://swapoff.org/chroma/playground/).
<a id="markdown-using-the-library" name="using-the-library"></a>
## Using the library ## Using the library
This is version 2 of Chroma, use the import path:
```go
import "github.com/alecthomas/chroma/v2"
```
Chroma, like Pygments, has the concepts of Chroma, like Pygments, has the concepts of
[lexers](https://github.com/alecthomas/chroma/tree/master/lexers), [lexers](https://github.com/alecthomas/chroma/tree/master/lexers),
[formatters](https://github.com/alecthomas/chroma/tree/master/formatters) and [formatters](https://github.com/alecthomas/chroma/tree/master/formatters) and
@ -95,8 +92,6 @@ In all cases, if a lexer, formatter or style can not be determined, `nil` will
be returned. In this situation you may want to default to the `Fallback` be returned. In this situation you may want to default to the `Fallback`
value in each respective package, which provides sane defaults. value in each respective package, which provides sane defaults.
<a id="markdown-quick-start" name="quick-start"></a>
### Quick start ### Quick start
A convenience function exists that can be used to simply format some source A convenience function exists that can be used to simply format some source
@ -106,8 +101,6 @@ text, without any effort:
err := quick.Highlight(os.Stdout, someSourceCode, "go", "html", "monokai") err := quick.Highlight(os.Stdout, someSourceCode, "go", "html", "monokai")
``` ```
<a id="markdown-identifying-the-language" name="identifying-the-language"></a>
### Identifying the language ### Identifying the language
To highlight code, you'll first have to identify what language the code is To highlight code, you'll first have to identify what language the code is
@ -147,8 +140,6 @@ token types into a single token:
lexer = chroma.Coalesce(lexer) lexer = chroma.Coalesce(lexer)
``` ```
<a id="markdown-formatting-the-output" name="formatting-the-output"></a>
### Formatting the output ### Formatting the output
Once a language is identified you will need to pick a formatter and a style (theme). Once a language is identified you will need to pick a formatter and a style (theme).
@ -177,8 +168,6 @@ And finally, format the tokens from the iterator:
err := formatter.Format(w, style, iterator) err := formatter.Format(w, style, iterator)
``` ```
<a id="markdown-the-html-formatter" name="the-html-formatter"></a>
### The HTML formatter ### The HTML formatter
By default the `html` registered formatter generates standalone HTML with By default the `html` registered formatter generates standalone HTML with
@ -203,12 +192,8 @@ formatter := html.New(html.WithClasses(true))
err := formatter.WriteCSS(w, style) err := formatter.WriteCSS(w, style)
``` ```
<a id="markdown-more-detail" name="more-detail"></a>
## More detail ## More detail
<a id="markdown-lexers" name="lexers"></a>
### Lexers ### Lexers
See the [Pygments documentation](http://pygments.org/docs/lexerdevelopment/) See the [Pygments documentation](http://pygments.org/docs/lexerdevelopment/)
@ -228,8 +213,6 @@ python3 _tools/pygments2chroma_xml.py \
See notes in [pygments-lexers.txt](https://github.com/alecthomas/chroma/blob/master/pygments-lexers.txt) See notes in [pygments-lexers.txt](https://github.com/alecthomas/chroma/blob/master/pygments-lexers.txt)
for a list of lexers, and notes on some of the issues importing them. for a list of lexers, and notes on some of the issues importing them.
<a id="markdown-formatters" name="formatters"></a>
### Formatters ### Formatters
Chroma supports HTML output, as well as terminal output in 8 colour, 256 colour, and true-colour. Chroma supports HTML output, as well as terminal output in 8 colour, 256 colour, and true-colour.
@ -237,8 +220,6 @@ Chroma supports HTML output, as well as terminal output in 8 colour, 256 colour,
A `noop` formatter is included that outputs the token text only, and a `tokens` A `noop` formatter is included that outputs the token text only, and a `tokens`
formatter outputs raw tokens. The latter is useful for debugging lexers. formatter outputs raw tokens. The latter is useful for debugging lexers.
<a id="markdown-styles" name="styles"></a>
### Styles ### Styles
Chroma styles are defined in XML. The style entries use the Chroma styles are defined in XML. The style entries use the
@ -262,8 +243,6 @@ Also, token types in a style file are hierarchical. For instance, when `CommentS
For a quick overview of the available styles and how they look, check out the [Chroma Style Gallery](https://xyproto.github.io/splash/docs/). For a quick overview of the available styles and how they look, check out the [Chroma Style Gallery](https://xyproto.github.io/splash/docs/).
<a id="markdown-command-line-interface" name="command-line-interface"></a>
## Command-line interface ## Command-line interface
A command-line interface to Chroma is included. A command-line interface to Chroma is included.
@ -288,10 +267,6 @@ on under the hood for easy integration with [lesspipe shipping with
Debian and derivatives](https://manpages.debian.org/lesspipe#USER_DEFINED_FILTERS); Debian and derivatives](https://manpages.debian.org/lesspipe#USER_DEFINED_FILTERS);
for that setup the `chroma` executable can be just symlinked to `~/.lessfilter`. for that setup the `chroma` executable can be just symlinked to `~/.lessfilter`.
<a id="markdown-whats-missing-compared-to-pygments" name="whats-missing-compared-to-pygments"></a>
<a id="markdown-testing-lexers" name="testing-lexers"></a>
## Testing lexers ## Testing lexers
If you edit some lexers and want to try it, open a shell in `cmd/chromad` and run: If you edit some lexers and want to try it, open a shell in `cmd/chromad` and run:

View File

@ -5,7 +5,9 @@ import (
"html" "html"
"io" "io"
"sort" "sort"
"strconv"
"strings" "strings"
"sync"
"github.com/alecthomas/chroma/v2" "github.com/alecthomas/chroma/v2"
) )
@ -132,6 +134,7 @@ func New(options ...Option) *Formatter {
baseLineNumber: 1, baseLineNumber: 1,
preWrapper: defaultPreWrapper, preWrapper: defaultPreWrapper,
} }
f.styleCache = newStyleCache(f)
for _, option := range options { for _, option := range options {
option(f) option(f)
} }
@ -188,6 +191,7 @@ var (
// Formatter that generates HTML. // Formatter that generates HTML.
type Formatter struct { type Formatter struct {
styleCache *styleCache
standalone bool standalone bool
prefix string prefix string
Classes bool // Exported field to detect when classes are being used Classes bool // Exported field to detect when classes are being used
@ -220,12 +224,7 @@ func (f *Formatter) Format(w io.Writer, style *chroma.Style, iterator chroma.Ite
// //
// OTOH we need to be super careful about correct escaping... // OTOH we need to be super careful about correct escaping...
func (f *Formatter) writeHTML(w io.Writer, style *chroma.Style, tokens []chroma.Token) (err error) { // nolint: gocyclo func (f *Formatter) writeHTML(w io.Writer, style *chroma.Style, tokens []chroma.Token) (err error) { // nolint: gocyclo
css := f.styleToCSS(style) css := f.styleCache.get(style, true)
if !f.Classes {
for t, style := range css {
css[t] = compressStyle(style)
}
}
if f.standalone { if f.standalone {
fmt.Fprint(w, "<html>\n") fmt.Fprint(w, "<html>\n")
if f.Classes { if f.Classes {
@ -243,7 +242,7 @@ func (f *Formatter) writeHTML(w io.Writer, style *chroma.Style, tokens []chroma.
wrapInTable := f.lineNumbers && f.lineNumbersInTable wrapInTable := f.lineNumbers && f.lineNumbersInTable
lines := chroma.SplitTokensIntoLines(tokens) lines := chroma.SplitTokensIntoLines(tokens)
lineDigits := len(fmt.Sprintf("%d", f.baseLineNumber+len(lines)-1)) lineDigits := len(strconv.Itoa(f.baseLineNumber + len(lines) - 1))
highlightIndex := 0 highlightIndex := 0
if wrapInTable { if wrapInTable {
@ -251,7 +250,7 @@ func (f *Formatter) writeHTML(w io.Writer, style *chroma.Style, tokens []chroma.
fmt.Fprintf(w, "<div%s>\n", f.styleAttr(css, chroma.PreWrapper)) fmt.Fprintf(w, "<div%s>\n", f.styleAttr(css, chroma.PreWrapper))
fmt.Fprintf(w, "<table%s><tr>", f.styleAttr(css, chroma.LineTable)) fmt.Fprintf(w, "<table%s><tr>", f.styleAttr(css, chroma.LineTable))
fmt.Fprintf(w, "<td%s>\n", f.styleAttr(css, chroma.LineTableTD)) fmt.Fprintf(w, "<td%s>\n", f.styleAttr(css, chroma.LineTableTD))
fmt.Fprintf(w, f.preWrapper.Start(false, f.styleAttr(css, chroma.PreWrapper))) fmt.Fprintf(w, "%s", f.preWrapper.Start(false, f.styleAttr(css, chroma.PreWrapper)))
for index := range lines { for index := range lines {
line := f.baseLineNumber + index line := f.baseLineNumber + index
highlight, next := f.shouldHighlight(highlightIndex, line) highlight, next := f.shouldHighlight(highlightIndex, line)
@ -273,7 +272,7 @@ func (f *Formatter) writeHTML(w io.Writer, style *chroma.Style, tokens []chroma.
fmt.Fprintf(w, "<td%s>\n", f.styleAttr(css, chroma.LineTableTD, "width:100%")) fmt.Fprintf(w, "<td%s>\n", f.styleAttr(css, chroma.LineTableTD, "width:100%"))
} }
fmt.Fprintf(w, f.preWrapper.Start(true, f.styleAttr(css, chroma.PreWrapper))) fmt.Fprintf(w, "%s", f.preWrapper.Start(true, f.styleAttr(css, chroma.PreWrapper)))
highlightIndex = 0 highlightIndex = 0
for index, tokens := range lines { for index, tokens := range lines {
@ -323,7 +322,7 @@ func (f *Formatter) writeHTML(w io.Writer, style *chroma.Style, tokens []chroma.
fmt.Fprint(w, `</span>`) // End of Line fmt.Fprint(w, `</span>`) // End of Line
} }
} }
fmt.Fprintf(w, f.preWrapper.End(true)) fmt.Fprintf(w, "%s", f.preWrapper.End(true))
if wrapInTable { if wrapInTable {
fmt.Fprint(w, "</td></tr></table>\n") fmt.Fprint(w, "</td></tr></table>\n")
@ -419,7 +418,7 @@ func (f *Formatter) tabWidthStyle() string {
// WriteCSS writes CSS style definitions (without any surrounding HTML). // WriteCSS writes CSS style definitions (without any surrounding HTML).
func (f *Formatter) WriteCSS(w io.Writer, style *chroma.Style) error { func (f *Formatter) WriteCSS(w io.Writer, style *chroma.Style) error {
css := f.styleToCSS(style) css := f.styleCache.get(style, false)
// Special-case background as it is mapped to the outer ".chroma" class. // Special-case background as it is mapped to the outer ".chroma" class.
if _, err := fmt.Fprintf(w, "/* %s */ .%sbg { %s }\n", chroma.Background, f.prefix, css[chroma.Background]); err != nil { if _, err := fmt.Fprintf(w, "/* %s */ .%sbg { %s }\n", chroma.Background, f.prefix, css[chroma.Background]); err != nil {
return err return err
@ -562,3 +561,63 @@ func compressStyle(s string) string {
} }
return strings.Join(out, ";") return strings.Join(out, ";")
} }
const styleCacheLimit = 32
type styleCacheEntry struct {
style *chroma.Style
compressed bool
cache map[chroma.TokenType]string
}
type styleCache struct {
mu sync.Mutex
// LRU cache of compiled (and possibly compressed) styles. This is a slice
// because the cache size is small, and a slice is sufficiently fast for
// small N.
cache []styleCacheEntry
f *Formatter
}
func newStyleCache(f *Formatter) *styleCache {
return &styleCache{f: f}
}
func (l *styleCache) get(style *chroma.Style, compress bool) map[chroma.TokenType]string {
l.mu.Lock()
defer l.mu.Unlock()
// Look for an existing entry.
for i := len(l.cache) - 1; i >= 0; i-- {
entry := l.cache[i]
if entry.style == style && entry.compressed == compress {
// Top of the cache, no need to adjust the order.
if i == len(l.cache)-1 {
return entry.cache
}
// Move this entry to the end of the LRU
copy(l.cache[i:], l.cache[i+1:])
l.cache[len(l.cache)-1] = entry
return entry.cache
}
}
// No entry, create one.
cached := l.f.styleToCSS(style)
if !l.f.Classes {
for t, style := range cached {
cached[t] = compressStyle(style)
}
}
if compress {
for t, style := range cached {
cached[t] = compressStyle(style)
}
}
// Evict the oldest entry.
if len(l.cache) >= styleCacheLimit {
l.cache = l.cache[0:copy(l.cache, l.cache[1:])]
}
l.cache = append(l.cache, styleCacheEntry{style: style, cache: cached, compressed: compress})
return cached
}

View File

@ -4,52 +4,82 @@ import (
. "github.com/alecthomas/chroma/v2" // nolint . "github.com/alecthomas/chroma/v2" // nolint
) )
// Matcher token stub for docs, or
// Named matcher: @name, or
// Path matcher: /foo, or
// Wildcard path matcher: *
// nolint: gosec
var caddyfileMatcherTokenRegexp = `(\[\<matcher\>\]|@[^\s]+|/[^\s]+|\*)`
// Comment at start of line, or
// Comment preceded by whitespace
var caddyfileCommentRegexp = `(^|\s+)#.*\n`
// caddyfileCommon are the rules common to both of the lexer variants // caddyfileCommon are the rules common to both of the lexer variants
func caddyfileCommonRules() Rules { func caddyfileCommonRules() Rules {
return Rules{ return Rules{
"site_block_common": { "site_block_common": {
Include("site_body"),
// Any other directive
{`[^\s#]+`, Keyword, Push("directive")},
Include("base"),
},
"site_body": {
// Import keyword // Import keyword
{`(import)(\s+)([^\s]+)`, ByGroups(Keyword, Text, NameVariableMagic), nil}, {`\b(import|invoke)\b( [^\s#]+)`, ByGroups(Keyword, Text), Push("subdirective")},
// Matcher definition // Matcher definition
{`@[^\s]+(?=\s)`, NameDecorator, Push("matcher")}, {`@[^\s]+(?=\s)`, NameDecorator, Push("matcher")},
// Matcher token stub for docs // Matcher token stub for docs
{`\[\<matcher\>\]`, NameDecorator, Push("matcher")}, {`\[\<matcher\>\]`, NameDecorator, Push("matcher")},
// These cannot have matchers but may have things that look like // These cannot have matchers but may have things that look like
// matchers in their arguments, so we just parse as a subdirective. // matchers in their arguments, so we just parse as a subdirective.
{`try_files`, Keyword, Push("subdirective")}, {`\b(try_files|tls|log|bind)\b`, Keyword, Push("subdirective")},
// These are special, they can nest more directives // These are special, they can nest more directives
{`handle_errors|handle|route|handle_path|not`, Keyword, Push("nested_directive")}, {`\b(handle_errors|handle_path|handle_response|replace_status|handle|route)\b`, Keyword, Push("nested_directive")},
// Any other directive // uri directive has special syntax
{`[^\s#]+`, Keyword, Push("directive")}, {`\b(uri)\b`, Keyword, Push("uri_directive")},
Include("base"),
}, },
"matcher": { "matcher": {
{`\{`, Punctuation, Push("block")}, {`\{`, Punctuation, Push("block")},
// Not can be one-liner // Not can be one-liner
{`not`, Keyword, Push("deep_not_matcher")}, {`not`, Keyword, Push("deep_not_matcher")},
// Heredoc for CEL expression
Include("heredoc"),
// Backtick for CEL expression
{"`", StringBacktick, Push("backticks")},
// Any other same-line matcher // Any other same-line matcher
{`[^\s#]+`, Keyword, Push("arguments")}, {`[^\s#]+`, Keyword, Push("arguments")},
// Terminators // Terminators
{`\n`, Text, Pop(1)}, {`\s*\n`, Text, Pop(1)},
{`\}`, Punctuation, Pop(1)}, {`\}`, Punctuation, Pop(1)},
Include("base"), Include("base"),
}, },
"block": { "block": {
{`\}`, Punctuation, Pop(2)}, {`\}`, Punctuation, Pop(2)},
// Using double quotes doesn't stop at spaces
{`"`, StringDouble, Push("double_quotes")},
// Using backticks doesn't stop at spaces
{"`", StringBacktick, Push("backticks")},
// Not can be one-liner // Not can be one-liner
{`not`, Keyword, Push("not_matcher")}, {`not`, Keyword, Push("not_matcher")},
// Any other subdirective // Directives & matcher definitions
Include("site_body"),
// Any directive
{`[^\s#]+`, Keyword, Push("subdirective")}, {`[^\s#]+`, Keyword, Push("subdirective")},
Include("base"), Include("base"),
}, },
"nested_block": { "nested_block": {
{`\}`, Punctuation, Pop(2)}, {`\}`, Punctuation, Pop(2)},
// Matcher definition // Using double quotes doesn't stop at spaces
{`@[^\s]+(?=\s)`, NameDecorator, Push("matcher")}, {`"`, StringDouble, Push("double_quotes")},
// Something that starts with literally < is probably a docs stub // Using backticks doesn't stop at spaces
{`\<[^#]+\>`, Keyword, Push("nested_directive")}, {"`", StringBacktick, Push("backticks")},
// Any other directive // Not can be one-liner
{`[^\s#]+`, Keyword, Push("nested_directive")}, {`not`, Keyword, Push("not_matcher")},
// Directives & matcher definitions
Include("site_body"),
// Any other subdirective
{`[^\s#]+`, Keyword, Push("directive")},
Include("base"), Include("base"),
}, },
"not_matcher": { "not_matcher": {
@ -66,69 +96,97 @@ func caddyfileCommonRules() Rules {
}, },
"directive": { "directive": {
{`\{(?=\s)`, Punctuation, Push("block")}, {`\{(?=\s)`, Punctuation, Push("block")},
Include("matcher_token"), {caddyfileMatcherTokenRegexp, NameDecorator, Push("arguments")},
Include("comments_pop_1"), {caddyfileCommentRegexp, CommentSingle, Pop(1)},
{`\n`, Text, Pop(1)}, {`\s*\n`, Text, Pop(1)},
Include("base"), Include("base"),
}, },
"nested_directive": { "nested_directive": {
{`\{(?=\s)`, Punctuation, Push("nested_block")}, {`\{(?=\s)`, Punctuation, Push("nested_block")},
Include("matcher_token"), {caddyfileMatcherTokenRegexp, NameDecorator, Push("nested_arguments")},
Include("comments_pop_1"), {caddyfileCommentRegexp, CommentSingle, Pop(1)},
{`\n`, Text, Pop(1)}, {`\s*\n`, Text, Pop(1)},
Include("base"), Include("base"),
}, },
"subdirective": { "subdirective": {
{`\{(?=\s)`, Punctuation, Push("block")}, {`\{(?=\s)`, Punctuation, Push("block")},
Include("comments_pop_1"), {caddyfileCommentRegexp, CommentSingle, Pop(1)},
{`\n`, Text, Pop(1)}, {`\s*\n`, Text, Pop(1)},
Include("base"), Include("base"),
}, },
"arguments": { "arguments": {
{`\{(?=\s)`, Punctuation, Push("block")}, {`\{(?=\s)`, Punctuation, Push("block")},
Include("comments_pop_2"), {caddyfileCommentRegexp, CommentSingle, Pop(2)},
{`\\\n`, Text, nil}, // Skip escaped newlines {`\\\n`, Text, nil}, // Skip escaped newlines
{`\n`, Text, Pop(2)}, {`\s*\n`, Text, Pop(2)},
Include("base"),
},
"nested_arguments": {
{`\{(?=\s)`, Punctuation, Push("nested_block")},
{caddyfileCommentRegexp, CommentSingle, Pop(2)},
{`\\\n`, Text, nil}, // Skip escaped newlines
{`\s*\n`, Text, Pop(2)},
Include("base"), Include("base"),
}, },
"deep_subdirective": { "deep_subdirective": {
{`\{(?=\s)`, Punctuation, Push("block")}, {`\{(?=\s)`, Punctuation, Push("block")},
Include("comments_pop_3"), {caddyfileCommentRegexp, CommentSingle, Pop(3)},
{`\n`, Text, Pop(3)}, {`\s*\n`, Text, Pop(3)},
Include("base"), Include("base"),
}, },
"matcher_token": { "uri_directive": {
{`@[^\s]+`, NameDecorator, Push("arguments")}, // Named matcher {`\{(?=\s)`, Punctuation, Push("block")},
{`/[^\s]+`, NameDecorator, Push("arguments")}, // Path matcher {caddyfileMatcherTokenRegexp, NameDecorator, nil},
{`\*`, NameDecorator, Push("arguments")}, // Wildcard path matcher {`(strip_prefix|strip_suffix|replace|path_regexp)`, NameConstant, Push("arguments")},
{`\[\<matcher\>\]`, NameDecorator, Push("arguments")}, // Matcher token stub for docs {caddyfileCommentRegexp, CommentSingle, Pop(1)},
{`\s*\n`, Text, Pop(1)},
Include("base"),
}, },
"comments": { "double_quotes": {
{`^#.*\n`, CommentSingle, nil}, // Comment at start of line Include("placeholder"),
{`\s+#.*\n`, CommentSingle, nil}, // Comment preceded by whitespace {`\\"`, StringDouble, nil},
{`[^"]`, StringDouble, nil},
{`"`, StringDouble, Pop(1)},
}, },
"comments_pop_1": { "backticks": {
{`^#.*\n`, CommentSingle, Pop(1)}, // Comment at start of line Include("placeholder"),
{`\s+#.*\n`, CommentSingle, Pop(1)}, // Comment preceded by whitespace {"\\\\`", StringBacktick, nil},
{"[^`]", StringBacktick, nil},
{"`", StringBacktick, Pop(1)},
}, },
"comments_pop_2": { "optional": {
{`^#.*\n`, CommentSingle, Pop(2)}, // Comment at start of line // Docs syntax for showing optional parts with [ ]
{`\s+#.*\n`, CommentSingle, Pop(2)}, // Comment preceded by whitespace {`\[`, Punctuation, Push("optional")},
Include("name_constants"),
{`\|`, Punctuation, nil},
{`[^\[\]\|]+`, String, nil},
{`\]`, Punctuation, Pop(1)},
}, },
"comments_pop_3": { "heredoc": {
{`^#.*\n`, CommentSingle, Pop(3)}, // Comment at start of line {`(<<([a-zA-Z0-9_-]+))(\n(.*|\n)*)(\s*)(\2)`, ByGroups(StringHeredoc, nil, String, String, String, StringHeredoc), nil},
{`\s+#.*\n`, CommentSingle, Pop(3)}, // Comment preceded by whitespace },
"name_constants": {
{`\b(most_recently_modified|largest_size|smallest_size|first_exist|internal|disable_redirects|ignore_loaded_certs|disable_certs|private_ranges|first|last|before|after|on|off)\b(\||(?=\]|\s|$))`, ByGroups(NameConstant, Punctuation), nil},
},
"placeholder": {
// Placeholder with dots, colon for default value, brackets for args[0:]
{`\{[\w+.\[\]\:\$-]+\}`, StringEscape, nil},
// Handle opening brackets with no matching closing one
{`\{[^\}\s]*\b`, String, nil},
}, },
"base": { "base": {
Include("comments"), {caddyfileCommentRegexp, CommentSingle, nil},
{`(on|off|first|last|before|after|internal|strip_prefix|strip_suffix|replace)\b`, NameConstant, nil}, {`\[\<matcher\>\]`, NameDecorator, nil},
{`(https?://)?([a-z0-9.-]+)(:)([0-9]+)`, ByGroups(Name, Name, Punctuation, LiteralNumberInteger), nil}, Include("name_constants"),
{`[a-z-]+/[a-z-+]+`, LiteralString, nil}, Include("heredoc"),
{`[0-9]+[km]?\b`, LiteralNumberInteger, nil}, {`(https?://)?([a-z0-9.-]+)(:)([0-9]+)([^\s]*)`, ByGroups(Name, Name, Punctuation, NumberInteger, Name), nil},
{`\{[\w+.\$-]+\}`, LiteralStringEscape, nil}, // Placeholder {`\[`, Punctuation, Push("optional")},
{`\[(?=[^#{}$]+\])`, Punctuation, nil}, {"`", StringBacktick, Push("backticks")},
{`\]|\|`, Punctuation, nil}, {`"`, StringDouble, Push("double_quotes")},
{`[^\s#{}$\]]+`, LiteralString, nil}, Include("placeholder"),
{`[a-z-]+/[a-z-+]+`, String, nil},
{`[0-9]+([smhdk]|ns|us|µs|ms)?\b`, NumberInteger, nil},
{`[^\s\n#\{]+`, String, nil},
{`/[^\s#]*`, Name, nil}, {`/[^\s#]*`, Name, nil},
{`\s+`, Text, nil}, {`\s+`, Text, nil},
}, },
@ -149,27 +207,29 @@ var Caddyfile = Register(MustNewLexer(
func caddyfileRules() Rules { func caddyfileRules() Rules {
return Rules{ return Rules{
"root": { "root": {
Include("comments"), {caddyfileCommentRegexp, CommentSingle, nil},
// Global options block // Global options block
{`^\s*(\{)\s*$`, ByGroups(Punctuation), Push("globals")}, {`^\s*(\{)\s*$`, ByGroups(Punctuation), Push("globals")},
// Top level import
{`(import)(\s+)([^\s]+)`, ByGroups(Keyword, Text, NameVariableMagic), nil},
// Snippets // Snippets
{`(\([^\s#]+\))(\s*)(\{)`, ByGroups(NameVariableAnonymous, Text, Punctuation), Push("snippet")}, {`(&?\([^\s#]+\))(\s*)(\{)`, ByGroups(NameVariableAnonymous, Text, Punctuation), Push("snippet")},
// Site label // Site label
{`[^#{(\s,]+`, GenericHeading, Push("label")}, {`[^#{(\s,]+`, GenericHeading, Push("label")},
// Site label with placeholder // Site label with placeholder
{`\{[\w+.\$-]+\}`, LiteralStringEscape, Push("label")}, {`\{[\w+.\[\]\:\$-]+\}`, StringEscape, Push("label")},
{`\s+`, Text, nil}, {`\s+`, Text, nil},
}, },
"globals": { "globals": {
{`\}`, Punctuation, Pop(1)}, {`\}`, Punctuation, Pop(1)},
{`[^\s#]+`, Keyword, Push("directive")}, // Global options are parsed as subdirectives (no matcher)
{`[^\s#]+`, Keyword, Push("subdirective")},
Include("base"), Include("base"),
}, },
"snippet": { "snippet": {
{`\}`, Punctuation, Pop(1)}, {`\}`, Punctuation, Pop(1)},
// Matcher definition Include("site_body"),
{`@[^\s]+(?=\s)`, NameDecorator, Push("matcher")}, // Any other directive
// Any directive
{`[^\s#]+`, Keyword, Push("directive")}, {`[^\s#]+`, Keyword, Push("directive")},
Include("base"), Include("base"),
}, },
@ -179,7 +239,7 @@ func caddyfileRules() Rules {
{`,\s*\n?`, Text, nil}, {`,\s*\n?`, Text, nil},
{` `, Text, nil}, {` `, Text, nil},
// Site label with placeholder // Site label with placeholder
{`\{[\w+.\$-]+\}`, LiteralStringEscape, nil}, Include("placeholder"),
// Site label // Site label
{`[^#{(\s,]+`, GenericHeading, nil}, {`[^#{(\s,]+`, GenericHeading, nil},
// Comment after non-block label (hack because comments end in \n) // Comment after non-block label (hack because comments end in \n)

View File

@ -19,10 +19,10 @@
<rule pattern="\\\n"> <rule pattern="\\\n">
<token type="Text"/> <token type="Text"/>
</rule> </rule>
<rule pattern="///[^\n\r]+"> <rule pattern="///[^\n\r]*">
<token type="CommentSpecial"/> <token type="CommentSpecial"/>
</rule> </rule>
<rule pattern="//[^\n\r]+"> <rule pattern="//[^\n\r]*">
<token type="CommentSingle"/> <token type="CommentSingle"/>
</rule> </rule>
<rule pattern="/[*].*?[*]/"> <rule pattern="/[*].*?[*]/">

View File

@ -49,7 +49,7 @@
<rule pattern="(true|false|null|_)\b"> <rule pattern="(true|false|null|_)\b">
<token type="KeywordConstant"/> <token type="KeywordConstant"/>
</rule> </rule>
<rule pattern="[_a-zA-Z]\w*"> <rule pattern="#?[_a-zA-Z$]\w*">
<token type="Name"/> <token type="Name"/>
</rule> </rule>
</state> </state>

View File

@ -0,0 +1,17 @@
<lexer>
<config>
<name>Desktop file</name>
<alias>desktop</alias>
<alias>desktop_entry</alias>
<filename>*.desktop</filename>
<mime_type>application/x-desktop</mime_type>
</config>
<rules>
<state name="root">
<rule pattern="^[ \t]*\n"><token type="TextWhitespace"/></rule>
<rule pattern="^(#.*)(\n)"><bygroups><token type="CommentSingle"/><token type="TextWhitespace"/></bygroups></rule>
<rule pattern="(\[[^\]\n]+\])(\n)"><bygroups><token type="Keyword"/><token type="TextWhitespace"/></bygroups></rule>
<rule pattern="([-A-Za-z0-9]+)(\[[^\] \t=]+\])?([ \t]*)(=)([ \t]*)([^\n]*)([ \t\n]*\n)"><bygroups><token type="NameAttribute"/><token type="NameNamespace"/><token type="TextWhitespace"/><token type="Operator"/><token type="TextWhitespace"/><token type="LiteralString"/><token type="TextWhitespace"/></bygroups></rule>
</state>
</rules>
</lexer>

View File

@ -86,7 +86,7 @@
<rule pattern="\\(?![:!#$%&amp;*+.\\/&lt;=&gt;?@^|~-]+)"> <rule pattern="\\(?![:!#$%&amp;*+.\\/&lt;=&gt;?@^|~-]+)">
<token type="NameFunction"/> <token type="NameFunction"/>
</rule> </rule>
<rule pattern="(&lt;-|::|-&gt;|=&gt;|=)(?![:!#$%&amp;*+.\\/&lt;=&gt;?@^|~-]+)"> <rule pattern="(&lt;-|::|-&gt;|=&gt;|=|'([:!#$%&amp;*+.\\/&lt;=&gt;?@^|~-]+))(?![:!#$%&amp;*+.\\/&lt;=&gt;?@^|~-]+)">
<token type="OperatorWord"/> <token type="OperatorWord"/>
</rule> </rule>
<rule pattern=":[:!#$%&amp;*+.\\/&lt;=&gt;?@^|~-]*"> <rule pattern=":[:!#$%&amp;*+.\\/&lt;=&gt;?@^|~-]*">

View File

@ -3,6 +3,7 @@
<name>JSON</name> <name>JSON</name>
<alias>json</alias> <alias>json</alias>
<filename>*.json</filename> <filename>*.json</filename>
<filename>*.avsc</filename>
<mime_type>application/json</mime_type> <mime_type>application/json</mime_type>
<dot_all>true</dot_all> <dot_all>true</dot_all>
<not_multiline>true</not_multiline> <not_multiline>true</not_multiline>

View File

@ -0,0 +1,155 @@
<lexer>
<config>
<name>Materialize SQL dialect</name>
<alias>materialize</alias>
<alias>mzsql</alias>
<mime_type>text/x-materializesql</mime_type>
<case_insensitive>true</case_insensitive>
<not_multiline>true</not_multiline>
</config>
<rules>
<state name="root">
<rule pattern="\s+">
<token type="Text"/>
</rule>
<rule pattern="--.*\n?">
<token type="CommentSingle"/>
</rule>
<rule pattern="/\*">
<token type="CommentMultiline"/>
<push state="multiline-comments"/>
</rule>
<rule pattern="(bigint|bigserial|bit|bit\s+varying|bool|boolean|box|bytea|char|character|character\s+varying|cidr|circle|date|decimal|double\s+precision|float4|float8|inet|int|int2|int4|int8|integer|interval|json|jsonb|line|lseg|macaddr|money|numeric|path|pg_lsn|point|polygon|real|serial|serial2|serial4|serial8|smallint|smallserial|text|time|timestamp|timestamptz|timetz|tsquery|tsvector|txid_snapshot|uuid|varbit|varchar|with\s+time\s+zone|without\s+time\s+zone|xml|anyarray|anyelement|anyenum|anynonarray|anyrange|cstring|fdw_handler|internal|language_handler|opaque|record|void)\b">
<token type="NameBuiltin"/>
</rule>
<rule pattern="(?s)(DO)(\s+)(?:(LANGUAGE)?(\s+)(&#39;?)(\w+)?(&#39;?)(\s+))?(\$)([^$]*)(\$)(.*?)(\$)(\10)(\$)">
<usingbygroup>
<sublexer_name_group>6</sublexer_name_group>
<code_group>12</code_group>
<emitters>
<token type="Keyword"/>
<token type="Text"/>
<token type="Keyword"/>
<token type="Text"/>
<token type="LiteralStringSingle"/>
<token type="LiteralStringSingle"/>
<token type="LiteralStringSingle"/>
<token type="Text"/>
<token type="LiteralStringHeredoc"/>
<token type="LiteralStringHeredoc"/>
<token type="LiteralStringHeredoc"/>
<token type="LiteralStringHeredoc"/>
<token type="LiteralStringHeredoc"/>
<token type="LiteralStringHeredoc"/>
<token type="LiteralStringHeredoc"/>
</emitters>
</usingbygroup>
</rule>
<rule pattern="(ACCESS|ACKS|ADD|ADDRESSES|AGGREGATE|ALL|ALTER|AND|ANY|ARN|ARRANGEMENT|ARRAY|AS|ASC|ASSERT|AT|AUCTION|AUTHORITY|AVAILABILITY|AVRO|AWS|BEGIN|BETWEEN|BIGINT|BILLED|BODY|BOOLEAN|BOTH|BPCHAR|BROKEN|BROKER|BROKERS|BY|BYTES|CARDINALITY|CASCADE|CASE|CAST|CERTIFICATE|CHAIN|CHAR|CHARACTER|CHARACTERISTICS|CHECK|CLIENT|CLOSE|CLUSTER|CLUSTERS|COALESCE|COLLATE|COLUMN|COLUMNS|COMMENT|COMMIT|COMMITTED|COMPACTION|COMPRESSION|COMPUTE|COMPUTECTL|CONFLUENT|CONNECTION|CONNECTIONS|CONSTRAINT|COPY|COUNT|COUNTER|CREATE|CREATECLUSTER|CREATEDB|CREATEROLE|CROSS|CSV|CURRENT|CURSOR|DATABASE|DATABASES|DATUMS|DAY|DAYS|DEALLOCATE|DEBEZIUM|DEBUG|DEBUGGING|DEC|DECIMAL|DECLARE|DECORRELATED|DEFAULT|DEFAULTS|DELETE|DELIMITED|DELIMITER|DESC|DETAILS|DISCARD|DISK|DISTINCT|DOC|DOT|DOUBLE|DROP|EFFORT|ELEMENT|ELSE|ENABLE|END|ENDPOINT|ENFORCED|ENVELOPE|ERROR|ESCAPE|EXCEPT|EXECUTE|EXISTS|EXPECTED|EXPLAIN|EXPOSE|EXTRACT|FACTOR|FALSE|FETCH|FIELDS|FILTER|FIRST|FLOAT|FOLLOWING|FOR|FOREIGN|FORMAT|FORWARD|FROM|FULL|FULLNAME|FUNCTION|GENERATOR|GRANT|GREATEST|GROUP|GROUPS|HAVING|HEADER|HEADERS|HOLD|HOST|HOUR|HOURS|ID|IDEMPOTENCE|IDLE|IF|IGNORE|ILIKE|IN|INCLUDE|INDEX|INDEXES|INFO|INHERIT|INLINE|INNER|INPUT|INSERT|INSPECT|INT|INTEGER|INTERNAL|INTERSECT|INTERVAL|INTO|INTROSPECTION|IS|ISNULL|ISOLATION|JOIN|JSON|KAFKA|KEY|KEYS|LAST|LATERAL|LATEST|LEADING|LEAST|LEFT|LEVEL|LIKE|LIMIT|LIST|LOAD|LOCAL|LOG|LOGICAL|LOGIN|MANAGED|MAP|MARKETING|MATERIALIZE|MATERIALIZED|MAX|MECHANISMS|MEMBERSHIP|MERGE|MESSAGE|METADATA|MINUTE|MINUTES|MODE|MONTH|MONTHS|MS|MUTUALLY|NAME|NAMES|NATURAL|NEXT|NO|NOCREATECLUSTER|NOCREATEDB|NOCREATEROLE|NOINHERIT|NOLOGIN|NONE|NOSUPERUSER|NOT|NOTICE|NULL|NULLIF|NULLS|OBJECTS|OF|OFFSET|ON|ONLY|OPERATOR|OPTIMIZED|OPTIMIZER|OPTIONS|OR|ORDER|ORDINALITY|OUTER|OVER|OWNED|OWNER|PARTITION|PASSWORD|PHYSICAL|PLAN|PLANS|PORT|POSITION|POSTGRES|PRECEDING|PRECISION|PREFIX|PREPARE|PRIMARY|PRIVATELINK|PRIVILEGES|PROGRESS|PROTOBUF|PROTOCOL|PUBLICATION|QUERY|QUOTE|RAISE|RANGE|RAW|READ|REAL|REASSIGN|RECURSION|RECURSIVE|REFERENCES|REFRESH|REGEX|REGION|REGISTRY|RENAME|REPEATABLE|REPLACE|REPLICA|REPLICAS|REPLICATION|RESET|RESPECT|RESTRICT|RETENTION|RETURN|RETURNING|REVOKE|RIGHT|ROLE|ROLES|ROLLBACK|ROTATE|ROW|ROWS|SASL|SCALE|SCHEMA|SCHEMAS|SCRIPT|SECOND|SECONDS|SECRET|SECRETS|SECURITY|SEED|SELECT|SEQUENCES|SERIALIZABLE|SERVICE|SESSION|SET|SHARD|SHOW|SINK|SINKS|SIZE|SMALLINT|SNAPSHOT|SOME|SOURCE|SOURCES|SSH|SSL|START|STDIN|STDOUT|STORAGE|STORAGECTL|STRATEGY|STRICT|STRING|SUBSCRIBE|SUBSOURCE|SUBSOURCES|SUBSTRING|SUPERUSER|SWAP|SYSTEM|TABLE|TABLES|TAIL|TEMP|TEMPORARY|TEST|TEXT|THEN|TICK|TIES|TIME|TIMELINE|TIMEOUT|TIMESTAMP|TIMESTAMPTZ|TO|TOKEN|TOPIC|TPCH|TRACE|TRAILING|TRANSACTION|TRIM|TRUE|TUNNEL|TYPE|TYPES|UNBOUNDED|UNCOMMITTED|UNION|UNIQUE|UNKNOWN|UP|UPDATE|UPSERT|URL|USAGE|USER|USERNAME|USERS|USING|VALIDATE|VALUE|VALUES|VARCHAR|VARYING|VIEW|VIEWS|WARNING|WEBHOOK|WHEN|WHERE|WINDOW|WIRE|WITH|WITHIN|WITHOUT|WORK|WORKERS|WRITE|YEAR|YEARS|ZONE|ZONES)\b">
<token type="Keyword"/>
</rule>
<rule pattern="[+*/&lt;&gt;=~!@#%^&amp;|`?-]+">
<token type="Operator"/>
</rule>
<rule pattern="::">
<token type="Operator"/>
</rule>
<rule pattern="\$\d+">
<token type="NameVariable"/>
</rule>
<rule pattern="([0-9]*\.[0-9]*|[0-9]+)(e[+-]?[0-9]+)?">
<token type="LiteralNumberFloat"/>
</rule>
<rule pattern="[0-9]+">
<token type="LiteralNumberInteger"/>
</rule>
<rule pattern="((?:E|U&amp;)?)(&#39;)">
<bygroups>
<token type="LiteralStringAffix"/>
<token type="LiteralStringSingle"/>
</bygroups>
<push state="string"/>
</rule>
<rule pattern="((?:U&amp;)?)(&#34;)">
<bygroups>
<token type="LiteralStringAffix"/>
<token type="LiteralStringName"/>
</bygroups>
<push state="quoted-ident"/>
</rule>
<rule pattern="(?s)(\$)([^$]*)(\$)(.*?)(\$)(\2)(\$)(\s+)(LANGUAGE)?(\s+)(&#39;?)(\w+)?(&#39;?)">
<usingbygroup>
<sublexer_name_group>12</sublexer_name_group>
<code_group>4</code_group>
<emitters>
<token type="LiteralStringHeredoc"/>
<token type="LiteralStringHeredoc"/>
<token type="LiteralStringHeredoc"/>
<token type="LiteralStringHeredoc"/>
<token type="LiteralStringHeredoc"/>
<token type="LiteralStringHeredoc"/>
<token type="LiteralStringHeredoc"/>
<token type="Text"/>
<token type="Keyword"/>
<token type="Text"/>
<token type="LiteralStringSingle"/>
<token type="LiteralStringSingle"/>
<token type="LiteralStringSingle"/>
</emitters>
</usingbygroup>
</rule>
<rule pattern="(?s)(\$)([^$]*)(\$)(.*?)(\$)(\2)(\$)">
<token type="LiteralStringHeredoc"/>
</rule>
<rule pattern="[a-z_]\w*">
<token type="Name"/>
</rule>
<rule pattern=":([&#39;&#34;]?)[a-z]\w*\b\1">
<token type="NameVariable"/>
</rule>
<rule pattern="[;:()\[\]{},.]">
<token type="Punctuation"/>
</rule>
</state>
<state name="multiline-comments">
<rule pattern="/\*">
<token type="CommentMultiline"/>
<push state="multiline-comments"/>
</rule>
<rule pattern="\*/">
<token type="CommentMultiline"/>
<pop depth="1"/>
</rule>
<rule pattern="[^/*]+">
<token type="CommentMultiline"/>
</rule>
<rule pattern="[/*]">
<token type="CommentMultiline"/>
</rule>
</state>
<state name="string">
<rule pattern="[^&#39;]+">
<token type="LiteralStringSingle"/>
</rule>
<rule pattern="&#39;&#39;">
<token type="LiteralStringSingle"/>
</rule>
<rule pattern="&#39;">
<token type="LiteralStringSingle"/>
<pop depth="1"/>
</rule>
</state>
<state name="quoted-ident">
<rule pattern="[^&#34;]+">
<token type="LiteralStringName"/>
</rule>
<rule pattern="&#34;&#34;">
<token type="LiteralStringName"/>
</rule>
<rule pattern="&#34;">
<token type="LiteralStringName"/>
<pop depth="1"/>
</rule>
</state>
</rules>
</lexer>

View File

@ -0,0 +1,123 @@
<lexer>
<config>
<name>NDISASM</name>
<alias>ndisasm</alias>
<mime_type>text/x-disasm</mime_type>
<case_insensitive>true</case_insensitive>
<priority>0.5</priority> <!-- Lower than NASM -->
</config>
<rules>
<state name="root">
<rule pattern="^[0-9A-Za-z]+">
<token type="CommentSpecial"/>
<push state="offset"/>
</rule>
</state>
<state name="offset">
<rule pattern="[0-9A-Za-z]+">
<token type="CommentSpecial"/>
<push state="assembly"/>
</rule>
<rule>
<include state="whitespace"/>
</rule>
</state>
<state name="punctuation">
<rule pattern="[,():\[\]]+">
<token type="Punctuation"/>
</rule>
<rule pattern="[&amp;|^&lt;&gt;+*/%~-]+">
<token type="Operator"/>
</rule>
<rule pattern="[$]+">
<token type="KeywordConstant"/>
</rule>
<rule pattern="seg|wrt|strict">
<token type="OperatorWord"/>
</rule>
<rule pattern="byte|[dq]?word">
<token type="KeywordType"/>
</rule>
</state>
<state name="assembly">
<rule>
<include state="whitespace"/>
</rule>
<rule pattern="[a-z$._?][\w$.?#@~]*:">
<token type="NameLabel"/>
</rule>
<rule pattern="([a-z$._?][\w$.?#@~]*)(\s+)(equ)">
<bygroups>
<token type="NameConstant"/>
<token type="KeywordDeclaration"/>
<token type="KeywordDeclaration"/>
</bygroups>
<push state="instruction-args"/>
</rule>
<rule pattern="BITS|USE16|USE32|SECTION|SEGMENT|ABSOLUTE|EXTERN|GLOBAL|ORG|ALIGN|STRUC|ENDSTRUC|COMMON|CPU|GROUP|UPPERCASE|IMPORT|EXPORT|LIBRARY|MODULE">
<token type="Keyword"/>
<push state="instruction-args"/>
</rule>
<rule pattern="(?:res|d)[bwdqt]|times">
<token type="KeywordDeclaration"/>
<push state="instruction-args"/>
</rule>
<rule pattern="[a-z$._?][\w$.?#@~]*">
<token type="NameFunction"/>
<push state="instruction-args"/>
</rule>
<rule pattern="[\r\n]+">
<token type="Text"/>
<pop depth="2"/>
</rule>
</state>
<state name="instruction-args">
<rule pattern="&#34;(\\&#34;|[^&#34;\n])*&#34;|&#39;(\\&#39;|[^&#39;\n])*&#39;|`(\\`|[^`\n])*`">
<token type="LiteralString"/>
</rule>
<rule pattern="(?:0x[0-9a-f]+|$0[0-9a-f]*|[0-9]+[0-9a-f]*h)">
<token type="LiteralNumberHex"/>
</rule>
<rule pattern="[0-7]+q">
<token type="LiteralNumberOct"/>
</rule>
<rule pattern="[01]+b">
<token type="LiteralNumberBin"/>
</rule>
<rule pattern="[0-9]+\.e?[0-9]+">
<token type="LiteralNumberFloat"/>
</rule>
<rule pattern="[0-9]+">
<token type="LiteralNumberInteger"/>
</rule>
<rule>
<include state="punctuation"/>
</rule>
<rule pattern="r[0-9][0-5]?[bwd]|[a-d][lh]|[er]?[a-d]x|[er]?[sb]p|[er]?[sd]i|[c-gs]s|st[0-7]|mm[0-7]|cr[0-4]|dr[0-367]|tr[3-7]">
<token type="NameBuiltin"/>
</rule>
<rule pattern="[a-z$._?][\w$.?#@~]*">
<token type="NameVariable"/>
</rule>
<rule pattern="[\r\n]+">
<token type="Text"/>
<pop depth="3"/>
</rule>
<rule>
<include state="whitespace"/>
</rule>
</state>
<state name="whitespace">
<rule pattern="\n">
<token type="Text"/>
<pop depth="2"/>
</rule>
<rule pattern="[ \t]+">
<token type="Text"/>
</rule>
<rule pattern=";.*">
<token type="CommentSingle"/>
</rule>
</state>
</rules>
</lexer>

View File

@ -0,0 +1,119 @@
<lexer>
<config>
<name>Promela</name>
<alias>promela</alias>
<filename>*.pml</filename>
<filename>*.prom</filename>
<filename>*.prm</filename>
<filename>*.promela</filename>
<filename>*.pr</filename>
<filename>*.pm</filename>
<mime_type>text/x-promela</mime_type>
</config>
<rules>
<state name="statements">
<rule pattern="(\[\]|&lt;&gt;|/\\|\\/)|(U|W|V)\b"><token type="Operator"/></rule>
<rule pattern="@"><token type="Punctuation"/></rule>
<rule pattern="(\.)([a-zA-Z_]\w*)"><bygroups><token type="Operator"/><token type="NameAttribute"/></bygroups></rule>
<rule><include state="keywords"/></rule>
<rule><include state="types"/></rule>
<rule pattern="([LuU]|u8)?(&quot;)"><bygroups><token type="LiteralStringAffix"/><token type="LiteralString"/></bygroups><push state="string"/></rule>
<rule pattern="([LuU]|u8)?(&#x27;)(\\.|\\[0-7]{1,3}|\\x[a-fA-F0-9]{1,2}|[^\\\&#x27;\n])(&#x27;)"><bygroups><token type="LiteralStringAffix"/><token type="LiteralStringChar"/><token type="LiteralStringChar"/><token type="LiteralStringChar"/></bygroups></rule>
<rule pattern="0[xX]([0-9a-fA-F](\&#x27;?[0-9a-fA-F])*\.[0-9a-fA-F](\&#x27;?[0-9a-fA-F])*|\.[0-9a-fA-F](\&#x27;?[0-9a-fA-F])*|[0-9a-fA-F](\&#x27;?[0-9a-fA-F])*)[pP][+-]?[0-9a-fA-F](\&#x27;?[0-9a-fA-F])*[lL]?"><token type="LiteralNumberFloat"/></rule>
<rule pattern="(-)?(\d(\&#x27;?\d)*\.\d(\&#x27;?\d)*|\.\d(\&#x27;?\d)*|\d(\&#x27;?\d)*)[eE][+-]?\d(\&#x27;?\d)*[fFlL]?"><token type="LiteralNumberFloat"/></rule>
<rule pattern="(-)?((\d(\&#x27;?\d)*\.(\d(\&#x27;?\d)*)?|\.\d(\&#x27;?\d)*)[fFlL]?)|(\d(\&#x27;?\d)*[fFlL])"><token type="LiteralNumberFloat"/></rule>
<rule pattern="(-)?0[xX][0-9a-fA-F](\&#x27;?[0-9a-fA-F])*(([uU][lL]{0,2})|[lL]{1,2}[uU]?)?"><token type="LiteralNumberHex"/></rule>
<rule pattern="(-)?0[bB][01](\&#x27;?[01])*(([uU][lL]{0,2})|[lL]{1,2}[uU]?)?"><token type="LiteralNumberBin"/></rule>
<rule pattern="(-)?0(\&#x27;?[0-7])+(([uU][lL]{0,2})|[lL]{1,2}[uU]?)?"><token type="LiteralNumberOct"/></rule>
<rule pattern="(-)?\d(\&#x27;?\d)*(([uU][lL]{0,2})|[lL]{1,2}[uU]?)?"><token type="LiteralNumberInteger"/></rule>
<rule pattern="[~!%^&amp;*+=|?:&lt;&gt;/-]"><token type="Operator"/></rule>
<rule pattern="[()\[\],.]"><token type="Punctuation"/></rule>
<rule pattern="(true|false|NULL)\b"><token type="NameBuiltin"/></rule>
<rule pattern="(?!\d)(?:[\w$]|\\u[0-9a-fA-F]{4}|\\U[0-9a-fA-F]{8})+"><token type="Name"/></rule>
</state>
<state name="types">
<rule pattern="(bit|bool|byte|pid|short|int|unsigned)\b"><token type="KeywordType"/></rule>
</state>
<state name="keywords">
<rule pattern="(atomic|break|d_step|do|od|for|in|goto|if|fi|unless)\b"><token type="Keyword"/></rule>
<rule pattern="(assert|get_priority|printf|printm|set_priority)\b"><token type="NameFunction"/></rule>
<rule pattern="(c_code|c_decl|c_expr|c_state|c_track)\b"><token type="Keyword"/></rule>
<rule pattern="(_|_last|_nr_pr|_pid|_priority|else|np_|STDIN)\b"><token type="NameBuiltin"/></rule>
<rule pattern="(empty|enabled|eval|full|len|nempty|nfull|pc_value)\b"><token type="NameFunction"/></rule>
<rule pattern="run\b"><token type="OperatorWord"/></rule>
<rule pattern="(active|chan|D_proctype|hidden|init|local|mtype|never|notrace|proctype|show|trace|typedef|xr|xs)\b"><token type="KeywordDeclaration"/></rule>
<rule pattern="(priority|provided)\b"><token type="Keyword"/></rule>
<rule pattern="(inline|ltl|select)\b"><token type="KeywordDeclaration"/></rule>
<rule pattern="skip\b"><token type="Keyword"/></rule>
</state>
<state name="whitespace">
<rule pattern="^#if\s+0"><token type="CommentPreproc"/><push state="if0"/></rule>
<rule pattern="^#"><token type="CommentPreproc"/><push state="macro"/></rule>
<rule pattern="^(\s*(?:/[*].*?[*]/\s*)?)(#if\s+0)"><bygroups><usingself state="root"/><token type="CommentPreproc"/></bygroups><push state="if0"/></rule>
<rule pattern="^(\s*(?:/[*].*?[*]/\s*)?)(#)"><bygroups><usingself state="root"/><token type="CommentPreproc"/></bygroups><push state="macro"/></rule>
<rule pattern="(^[ \t]*)(?!(?:public|private|protected|default)\b)((?!\d)(?:[\w$]|\\u[0-9a-fA-F]{4}|\\U[0-9a-fA-F]{8})+)(\s*)(:)(?!:)"><bygroups><token type="TextWhitespace"/><token type="NameLabel"/><token type="TextWhitespace"/><token type="Punctuation"/></bygroups></rule>
<rule pattern="\n"><token type="TextWhitespace"/></rule>
<rule pattern="[^\S\n]+"><token type="TextWhitespace"/></rule>
<rule pattern="\\\n"><token type="Text"/></rule>
<rule pattern="//(?:.|(?&lt;=\\)\n)*\n"><token type="CommentSingle"/></rule>
<rule pattern="/(?:\\\n)?[*](?:[^*]|[*](?!(?:\\\n)?/))*[*](?:\\\n)?/"><token type="CommentMultiline"/></rule>
<rule pattern="/(\\\n)?[*][\w\W]*"><token type="CommentMultiline"/></rule>
</state>
<state name="root">
<rule><include state="whitespace"/></rule>
<rule><include state="keywords"/></rule>
<rule pattern="((?!\d)(?:[\w$]|\\u[0-9a-fA-F]{4}|\\U[0-9a-fA-F]{8}|::)+(?:[&amp;*\s])+)(\s*(?:(?:(?://(?:.|(?&lt;=\\)\n)*\n)|(?:/(?:\\\n)?[*](?:[^*]|[*](?!(?:\\\n)?/))*[*](?:\\\n)?/))\s*)*)((?!\d)(?:[\w$]|\\u[0-9a-fA-F]{4}|\\U[0-9a-fA-F]{8}|::)+)(\s*(?:(?:(?://(?:.|(?&lt;=\\)\n)*\n)|(?:/(?:\\\n)?[*](?:[^*]|[*](?!(?:\\\n)?/))*[*](?:\\\n)?/))\s*)*)(\([^;&quot;\&#x27;)]*?\))(\s*(?:(?:(?://(?:.|(?&lt;=\\)\n)*\n)|(?:/(?:\\\n)?[*](?:[^*]|[*](?!(?:\\\n)?/))*[*](?:\\\n)?/))\s*)*)([^;{/&quot;\&#x27;]*)(\{)"><bygroups><usingself state="root"/><usingself state="whitespace"/><token type="NameFunction"/><usingself state="whitespace"/><usingself state="root"/><usingself state="whitespace"/><usingself state="root"/><token type="Punctuation"/></bygroups><push state="function"/></rule>
<rule pattern="((?!\d)(?:[\w$]|\\u[0-9a-fA-F]{4}|\\U[0-9a-fA-F]{8}|::)+(?:[&amp;*\s])+)(\s*(?:(?:(?://(?:.|(?&lt;=\\)\n)*\n)|(?:/(?:\\\n)?[*](?:[^*]|[*](?!(?:\\\n)?/))*[*](?:\\\n)?/))\s*)*)((?!\d)(?:[\w$]|\\u[0-9a-fA-F]{4}|\\U[0-9a-fA-F]{8}|::)+)(\s*(?:(?:(?://(?:.|(?&lt;=\\)\n)*\n)|(?:/(?:\\\n)?[*](?:[^*]|[*](?!(?:\\\n)?/))*[*](?:\\\n)?/))\s*)*)(\([^;&quot;\&#x27;)]*?\))(\s*(?:(?:(?://(?:.|(?&lt;=\\)\n)*\n)|(?:/(?:\\\n)?[*](?:[^*]|[*](?!(?:\\\n)?/))*[*](?:\\\n)?/))\s*)*)([^;/&quot;\&#x27;]*)(;)"><bygroups><usingself state="root"/><usingself state="whitespace"/><token type="NameFunction"/><usingself state="whitespace"/><usingself state="root"/><usingself state="whitespace"/><usingself state="root"/><token type="Punctuation"/></bygroups></rule>
<rule><include state="types"/></rule>
<rule><push state="statement"/></rule>
</state>
<state name="statement">
<rule><include state="whitespace"/></rule>
<rule><include state="statements"/></rule>
<rule pattern="\}"><token type="Punctuation"/></rule>
<rule pattern="[{;]"><token type="Punctuation"/><pop depth="1"/></rule>
</state>
<state name="function">
<rule><include state="whitespace"/></rule>
<rule><include state="statements"/></rule>
<rule pattern=";"><token type="Punctuation"/></rule>
<rule pattern="\{"><token type="Punctuation"/><push/></rule>
<rule pattern="\}"><token type="Punctuation"/><pop depth="1"/></rule>
</state>
<state name="string">
<rule pattern="&quot;"><token type="LiteralString"/><pop depth="1"/></rule>
<rule pattern="\\([\\abfnrtv&quot;\&#x27;]|x[a-fA-F0-9]{2,4}|u[a-fA-F0-9]{4}|U[a-fA-F0-9]{8}|[0-7]{1,3})"><token type="LiteralStringEscape"/></rule>
<rule pattern="[^\\&quot;\n]+"><token type="LiteralString"/></rule>
<rule pattern="\\\n"><token type="LiteralString"/></rule>
<rule pattern="\\"><token type="LiteralString"/></rule>
</state>
<state name="macro">
<rule pattern="(\s*(?:/[*].*?[*]/\s*)?)(include)(\s*(?:/[*].*?[*]/\s*)?)(&quot;[^&quot;]+&quot;)([^\n]*)"><bygroups><usingself state="root"/><token type="CommentPreproc"/><usingself state="root"/><token type="CommentPreprocFile"/><token type="CommentSingle"/></bygroups></rule>
<rule pattern="(\s*(?:/[*].*?[*]/\s*)?)(include)(\s*(?:/[*].*?[*]/\s*)?)(&lt;[^&gt;]+&gt;)([^\n]*)"><bygroups><usingself state="root"/><token type="CommentPreproc"/><usingself state="root"/><token type="CommentPreprocFile"/><token type="CommentSingle"/></bygroups></rule>
<rule pattern="[^/\n]+"><token type="CommentPreproc"/></rule>
<rule pattern="/[*](.|\n)*?[*]/"><token type="CommentMultiline"/></rule>
<rule pattern="//.*?\n"><token type="CommentSingle"/><pop depth="1"/></rule>
<rule pattern="/"><token type="CommentPreproc"/></rule>
<rule pattern="(?&lt;=\\)\n"><token type="CommentPreproc"/></rule>
<rule pattern="\n"><token type="CommentPreproc"/><pop depth="1"/></rule>
</state>
<state name="if0">
<rule pattern="^\s*#if.*?(?&lt;!\\)\n"><token type="CommentPreproc"/><push/></rule>
<rule pattern="^\s*#el(?:se|if).*\n"><token type="CommentPreproc"/><pop depth="1"/></rule>
<rule pattern="^\s*#endif.*?(?&lt;!\\)\n"><token type="CommentPreproc"/><pop depth="1"/></rule>
<rule pattern=".*?\n"><token type="Comment"/></rule>
</state>
<state name="classname">
<rule pattern="(?!\d)(?:[\w$]|\\u[0-9a-fA-F]{4}|\\U[0-9a-fA-F]{8})+"><token type="NameClass"/><pop depth="1"/></rule>
<rule pattern="\s*(?=&gt;)"><token type="Text"/><pop depth="1"/></rule>
<rule><pop depth="1"/></rule>
</state>
<state name="case-value">
<rule pattern="(?&lt;!:)(:)(?!:)"><token type="Punctuation"/><pop depth="1"/></rule>
<rule pattern="(?!\d)(?:[\w$]|\\u[0-9a-fA-F]{4}|\\U[0-9a-fA-F]{8})+"><token type="NameConstant"/></rule>
<rule><include state="whitespace"/></rule>
<rule><include state="statements"/></rule>
</state>
</rules>
</lexer>

View File

@ -0,0 +1,94 @@
<lexer>
<config>
<name>Rego</name>
<alias>rego</alias>
<filename>*.rego</filename>
</config>
<rules>
<state name="root">
<rule pattern="(package|import|as|not|with|default|else|some|in|if|contains)\b">
<token type="KeywordDeclaration"/>
</rule>
<!-- importing keywords should then show up as keywords -->
<rule pattern="(import)( future.keywords.)(\w+)">
<bygroups>
<token type="KeywordDeclaration"/>
<token type="Text"/>
<token type="KeywordDeclaration"/>
</bygroups>
</rule>
<rule pattern="#[^\r\n]*">
<token type="Comment"/>
</rule>
<rule pattern="(FIXME|TODO|XXX)\b( .*)$">
<bygroups>
<token type="Error"/>
<token type="CommentSpecial"/>
</bygroups>
</rule>
<rule pattern="(true|false|null)\b">
<token type="KeywordConstant"/>
</rule>
<rule pattern="\d+i">
<token type="LiteralNumber"/>
</rule>
<rule pattern="\d+\.\d*([Ee][-+]\d+)?i">
<token type="LiteralNumber"/>
</rule>
<rule pattern="\.\d+([Ee][-+]\d+)?i">
<token type="LiteralNumber"/>
</rule>
<rule pattern="\d+[Ee][-+]\d+i">
<token type="LiteralNumber"/>
</rule>
<rule pattern="\d+(\.\d+[eE][+\-]?\d+|\.\d*|[eE][+\-]?\d+)">
<token type="LiteralNumberFloat"/>
</rule>
<rule pattern="\.\d+([eE][+\-]?\d+)?">
<token type="LiteralNumberFloat"/>
</rule>
<rule pattern="(0|[1-9][0-9]*)">
<token type="LiteralNumberInteger"/>
</rule>
<rule pattern="&#34;&#34;&#34;.*?&#34;&#34;&#34;">
<token type="LiteralStringDouble"/>
</rule>
<rule pattern="&#34;(\\\\|\\&#34;|[^&#34;])*&#34;">
<token type="LiteralStringDouble"/>
</rule>
<rule pattern="\$/((?!/\$).)*/\$">
<token type="LiteralString"/>
</rule>
<rule pattern="/(\\\\|\\&#34;|[^/])*/">
<token type="LiteralString"/>
</rule>
<rule pattern="^(\w+)">
<token type="Name"/>
</rule>
<rule pattern="[a-z_-][\w-]*(?=\()">
<token type="NameFunction"/>
</rule>
<rule pattern="[\r\n\s]+">
<token type="TextWhitespace"/>
</rule>
<rule pattern="(package|import)(\s+)">
<bygroups>
<token type="KeywordDeclaration"/>
<token type="Text"/>
</bygroups>
</rule>
<rule pattern="[=&lt;&gt;!+-/*&amp;|]">
<token type="Operator"/>
</rule>
<rule pattern=":=">
<token type="Operator"/>
</rule>
<rule pattern="[[\]{}():;]+">
<token type="Punctuation"/>
</rule>
<rule pattern="[$a-zA-Z_]\w*">
<token type="NameOther"/>
</rule>
</state>
</rules>
</lexer>

View File

@ -0,0 +1,58 @@
<lexer>
<config>
<name>RPMSpec</name>
<alias>spec</alias>
<filename>*.spec</filename>
<mime_type>text/x-rpm-spec</mime_type>
</config>
<rules>
<state name="root">
<rule pattern="#.*$"><token type="Comment"/></rule>
<rule><include state="basic"/></rule>
</state>
<state name="description">
<rule pattern="^(%(?:package|prep|build|install|clean|check|pre[a-z]*|post[a-z]*|trigger[a-z]*|files))(.*)$"><bygroups><token type="NameDecorator"/><token type="Text"/></bygroups><pop depth="1"/></rule>
<rule pattern="\s+"><token type="TextWhitespace"/></rule>
<rule pattern="."><token type="Text"/></rule>
</state>
<state name="changelog">
<rule pattern="\*.*$"><token type="GenericSubheading"/></rule>
<rule pattern="^(%(?:package|prep|build|install|clean|check|pre[a-z]*|post[a-z]*|trigger[a-z]*|files))(.*)$"><bygroups><token type="NameDecorator"/><token type="Text"/></bygroups><pop depth="1"/></rule>
<rule pattern="\s+"><token type="TextWhitespace"/></rule>
<rule pattern="."><token type="Text"/></rule>
</state>
<state name="string">
<rule pattern="&quot;"><token type="LiteralStringDouble"/><pop depth="1"/></rule>
<rule pattern="\\([\\abfnrtv&quot;\&#x27;]|x[a-fA-F0-9]{2,4}|[0-7]{1,3})"><token type="LiteralStringEscape"/></rule>
<rule><include state="interpol"/></rule>
<rule pattern="."><token type="LiteralStringDouble"/></rule>
</state>
<state name="basic">
<rule><include state="macro"/></rule>
<rule pattern="(?i)^(Name|Version|Release|Epoch|Summary|Group|License|Packager|Vendor|Icon|URL|Distribution|Prefix|Patch[0-9]*|Source[0-9]*|Requires\(?[a-z]*\)?|[a-z]+Req|Obsoletes|Suggests|Provides|Conflicts|Build[a-z]+|[a-z]+Arch|Auto[a-z]+)(:)(.*)$"><bygroups><token type="GenericHeading"/><token type="Punctuation"/><usingself state="root"/></bygroups></rule>
<rule pattern="^%description"><token type="NameDecorator"/><push state="description"/></rule>
<rule pattern="^%changelog"><token type="NameDecorator"/><push state="changelog"/></rule>
<rule pattern="^(%(?:package|prep|build|install|clean|check|pre[a-z]*|post[a-z]*|trigger[a-z]*|files))(.*)$"><bygroups><token type="NameDecorator"/><token type="Text"/></bygroups></rule>
<rule pattern="%(attr|defattr|dir|doc(?:dir)?|setup|config(?:ure)?|make(?:install)|ghost|patch[0-9]+|find_lang|exclude|verify)"><token type="Keyword"/></rule>
<rule><include state="interpol"/></rule>
<rule pattern="&#x27;.*?&#x27;"><token type="LiteralStringSingle"/></rule>
<rule pattern="&quot;"><token type="LiteralStringDouble"/><push state="string"/></rule>
<rule pattern="\s+"><token type="TextWhitespace"/></rule>
<rule pattern="."><token type="Text"/></rule>
</state>
<state name="macro">
<rule pattern="%define.*$"><token type="CommentPreproc"/></rule>
<rule pattern="%\{\!\?.*%define.*\}"><token type="CommentPreproc"/></rule>
<rule pattern="(%(?:if(?:n?arch)?|else(?:if)?|endif))(.*)$"><bygroups><token type="CommentPreproc"/><token type="Text"/></bygroups></rule>
</state>
<state name="interpol">
<rule pattern="%\{?__[a-z_]+\}?"><token type="NameFunction"/></rule>
<rule pattern="%\{?_([a-z_]+dir|[a-z_]+path|prefix)\}?"><token type="KeywordPseudo"/></rule>
<rule pattern="%\{\?\w+\}"><token type="NameVariable"/></rule>
<rule pattern="\$\{?RPM_[A-Z0-9_]+\}?"><token type="NameVariableGlobal"/></rule>
<rule pattern="%\{[a-zA-Z]\w+\}"><token type="KeywordConstant"/></rule>
</state>
</rules>
</lexer>

View File

@ -55,7 +55,7 @@ func goRules() Rules {
{`"(\\\\|\\"|[^"])*"`, LiteralString, nil}, {`"(\\\\|\\"|[^"])*"`, LiteralString, nil},
{`(<<=|>>=|<<|>>|<=|>=|&\^=|&\^|\+=|-=|\*=|/=|%=|&=|\|=|&&|\|\||<-|\+\+|--|==|!=|:=|\.\.\.|[+\-*/%&])`, Operator, nil}, {`(<<=|>>=|<<|>>|<=|>=|&\^=|&\^|\+=|-=|\*=|/=|%=|&=|\|=|&&|\|\||<-|\+\+|--|==|!=|:=|\.\.\.|[+\-*/%&])`, Operator, nil},
{`([a-zA-Z_]\w*)(\s*)(\()`, ByGroups(NameFunction, UsingSelf("root"), Punctuation), nil}, {`([a-zA-Z_]\w*)(\s*)(\()`, ByGroups(NameFunction, UsingSelf("root"), Punctuation), nil},
{`[|^<>=!()\[\]{}.,;:]`, Punctuation, nil}, {`[|^<>=!()\[\]{}.,;:~]`, Punctuation, nil},
{`[^\W\d]\w*`, NameOther, nil}, {`[^\W\d]\w*`, NameOther, nil},
}, },
} }

View File

@ -1,11 +1,18 @@
{ {
$schema: "https://docs.renovatebot.com/renovate-schema.json", $schema: "https://docs.renovatebot.com/renovate-schema.json",
extends: [ extends: [
"config:recommended", "config:recommended",
":semanticCommits", ":semanticCommits",
":semanticCommitTypeAll(chore)", ":semanticCommitTypeAll(chore)",
":semanticCommitScope(deps)", ":semanticCommitScope(deps)",
"group:allNonMajor", "group:allNonMajor",
"schedule:earlyMondays", // Run once a week. "schedule:earlyMondays", // Run once a week.
], ],
packageRules: [
{
matchPackageNames: ["golangci-lint"],
matchManagers: ["hermit"],
enabled: false,
},
],
} }

View File

@ -5,7 +5,7 @@
<entry type="Other" style="#c6d0f5"/> <entry type="Other" style="#c6d0f5"/>
<entry type="LineTableTD" style=""/> <entry type="LineTableTD" style=""/>
<entry type="LineTable" style=""/> <entry type="LineTable" style=""/>
<entry type="LineHighlight" style="#51576d"/> <entry type="LineHighlight" style="bg:#51576d"/>
<entry type="LineNumbersTable" style="#838ba7"/> <entry type="LineNumbersTable" style="#838ba7"/>
<entry type="LineNumbers" style="#838ba7"/> <entry type="LineNumbers" style="#838ba7"/>
<entry type="Keyword" style="#ca9ee6"/> <entry type="Keyword" style="#ca9ee6"/>

View File

@ -5,7 +5,7 @@
<entry type="Other" style="#4c4f69"/> <entry type="Other" style="#4c4f69"/>
<entry type="LineTableTD" style=""/> <entry type="LineTableTD" style=""/>
<entry type="LineTable" style=""/> <entry type="LineTable" style=""/>
<entry type="LineHighlight" style="#bcc0cc"/> <entry type="LineHighlight" style="bg:#bcc0cc"/>
<entry type="LineNumbersTable" style="#8c8fa1"/> <entry type="LineNumbersTable" style="#8c8fa1"/>
<entry type="LineNumbers" style="#8c8fa1"/> <entry type="LineNumbers" style="#8c8fa1"/>
<entry type="Keyword" style="#8839ef"/> <entry type="Keyword" style="#8839ef"/>

View File

@ -5,7 +5,7 @@
<entry type="Other" style="#cad3f5"/> <entry type="Other" style="#cad3f5"/>
<entry type="LineTableTD" style=""/> <entry type="LineTableTD" style=""/>
<entry type="LineTable" style=""/> <entry type="LineTable" style=""/>
<entry type="LineHighlight" style="#494d64"/> <entry type="LineHighlight" style="bg:#494d64"/>
<entry type="LineNumbersTable" style="#8087a2"/> <entry type="LineNumbersTable" style="#8087a2"/>
<entry type="LineNumbers" style="#8087a2"/> <entry type="LineNumbers" style="#8087a2"/>
<entry type="Keyword" style="#c6a0f6"/> <entry type="Keyword" style="#c6a0f6"/>

View File

@ -5,7 +5,7 @@
<entry type="Other" style="#cdd6f4"/> <entry type="Other" style="#cdd6f4"/>
<entry type="LineTableTD" style=""/> <entry type="LineTableTD" style=""/>
<entry type="LineTable" style=""/> <entry type="LineTable" style=""/>
<entry type="LineHighlight" style="#45475a"/> <entry type="LineHighlight" style="bg:#45475a"/>
<entry type="LineNumbersTable" style="#7f849c"/> <entry type="LineNumbersTable" style="#7f849c"/>
<entry type="LineNumbers" style="#7f849c"/> <entry type="LineNumbers" style="#7f849c"/>
<entry type="Keyword" style="#cba6f7"/> <entry type="Keyword" style="#cba6f7"/>

View File

@ -1,6 +1,6 @@
<style name="github-dark"> <style name="github-dark">
<entry type="Error" style="#f85149"/> <entry type="Error" style="#f85149"/>
<entry type="LineHighlight" style="#6e7681"/> <entry type="LineHighlight" style="bg:#6e7681"/>
<entry type="LineNumbers" style="#6e7681"/> <entry type="LineNumbers" style="#6e7681"/>
<entry type="Background" style="#e6edf3 bg:#0d1117"/> <entry type="Background" style="#e6edf3 bg:#0d1117"/>
<entry type="Keyword" style="#ff7b72"/> <entry type="Keyword" style="#ff7b72"/>

2
vendor/modules.txt vendored
View File

@ -1,4 +1,4 @@
# github.com/alecthomas/chroma/v2 v2.12.0 # github.com/alecthomas/chroma/v2 v2.13.0
## explicit; go 1.19 ## explicit; go 1.19
github.com/alecthomas/chroma/v2 github.com/alecthomas/chroma/v2
github.com/alecthomas/chroma/v2/formatters/html github.com/alecthomas/chroma/v2/formatters/html