Updated transitive dependencies

This commit is contained in:
Seednode 2024-03-12 08:18:49 -05:00
parent 1b2c61176f
commit 43a8d43f0c
25 changed files with 838 additions and 164 deletions

2
go.mod
View File

@ -3,7 +3,7 @@ module seedno.de/seednode/roulette
go 1.22
require (
github.com/alecthomas/chroma/v2 v2.12.0
github.com/alecthomas/chroma/v2 v2.13.0
github.com/julienschmidt/httprouter v1.3.0
github.com/klauspost/compress v1.17.7
github.com/spf13/cobra v1.8.0

12
go.sum
View File

@ -1,9 +1,9 @@
github.com/alecthomas/assert/v2 v2.2.1 h1:XivOgYcduV98QCahG8T5XTezV5bylXe+lBxLG2K2ink=
github.com/alecthomas/assert/v2 v2.2.1/go.mod h1:pXcQ2Asjp247dahGEmsZ6ru0UVwnkhktn7S0bBDLxvQ=
github.com/alecthomas/chroma/v2 v2.12.0 h1:Wh8qLEgMMsN7mgyG8/qIpegky2Hvzr4By6gEF7cmWgw=
github.com/alecthomas/chroma/v2 v2.12.0/go.mod h1:4TQu7gdfuPjSh76j78ietmqh9LiurGF0EpseFXdKMBw=
github.com/alecthomas/repr v0.2.0 h1:HAzS41CIzNW5syS8Mf9UwXhNH1J9aix/BvDRf1Ml2Yk=
github.com/alecthomas/repr v0.2.0/go.mod h1:Fr0507jx4eOXV7AlPV6AVZLYrLIuIeSOWtW57eE/O/4=
github.com/alecthomas/assert/v2 v2.6.0 h1:o3WJwILtexrEUk3cUVal3oiQY2tfgr/FHWiz/v2n4FU=
github.com/alecthomas/assert/v2 v2.6.0/go.mod h1:Bze95FyfUr7x34QZrjL+XP+0qgp/zg8yS+TtBj1WA3k=
github.com/alecthomas/chroma/v2 v2.13.0 h1:VP72+99Fb2zEcYM0MeaWJmV+xQvz5v5cxRHd+ooU1lI=
github.com/alecthomas/chroma/v2 v2.13.0/go.mod h1:BUGjjsD+ndS6eX37YgTchSEG+Jg9Jv1GiZs9sqPqztk=
github.com/alecthomas/repr v0.4.0 h1:GhI2A8MACjfegCPVq9f1FLvIBS+DrQ2KQBFZP1iFzXc=
github.com/alecthomas/repr v0.4.0/go.mod h1:Fr0507jx4eOXV7AlPV6AVZLYrLIuIeSOWtW57eE/O/4=
github.com/cpuguy83/go-md2man/v2 v2.0.3/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o=
github.com/dlclark/regexp2 v1.11.0 h1:G/nrcoOa7ZXlpoa/91N3X7mM3r8eIlMBBJZvsz/mxKI=
github.com/dlclark/regexp2 v1.11.0/go.mod h1:DHkYz0B9wPfa6wondMfaivmHpzrQ3v9q8cnmRbL6yW8=

View File

@ -11,3 +11,7 @@ insert_final_newline = true
indent_style = space
indent_size = 2
insert_final_newline = false
[*.yml]
indent_style = space
indent_size = 2

View File

@ -49,6 +49,8 @@ linters:
- nosnakecase
- testableexamples
- musttag
- depguard
- goconst
linters-settings:
govet:

View File

@ -8,75 +8,72 @@ highlighted HTML, ANSI-coloured text, etc.
Chroma is based heavily on [Pygments](http://pygments.org/), and includes
translators for Pygments lexers and styles.
<a id="markdown-table-of-contents" name="table-of-contents"></a>
## Table of Contents
<!-- TOC -->
1. [Table of Contents](#table-of-contents)
2. [Supported languages](#supported-languages)
3. [Try it](#try-it)
4. [Using the library](#using-the-library)
1. [Supported languages](#supported-languages)
2. [Try it](#try-it)
3. [Using the library](#using-the-library)
1. [Quick start](#quick-start)
2. [Identifying the language](#identifying-the-language)
3. [Formatting the output](#formatting-the-output)
4. [The HTML formatter](#the-html-formatter)
5. [More detail](#more-detail)
4. [More detail](#more-detail)
1. [Lexers](#lexers)
2. [Formatters](#formatters)
3. [Styles](#styles)
6. [Command-line interface](#command-line-interface)
7. [Testing lexers](#testing-lexers)
8. [What's missing compared to Pygments?](#whats-missing-compared-to-pygments)
5. [Command-line interface](#command-line-interface)
6. [Testing lexers](#testing-lexers)
7. [What's missing compared to Pygments?](#whats-missing-compared-to-pygments)
<!-- /TOC -->
<a id="markdown-supported-languages" name="supported-languages"></a>
## Supported languages
| Prefix | Language |
| :----: | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ |
| A | ABAP, ABNF, ActionScript, ActionScript 3, Ada, Agda, AL, Alloy, Angular2, ANTLR, ApacheConf, APL, AppleScript, ArangoDB AQL, Arduino, ArmAsm, AutoHotkey, AutoIt, Awk |
| B | Ballerina, Bash, Bash Session, Batchfile, BibTeX, Bicep, BlitzBasic, BNF, BQN, Brainfuck |
| C | C, C#, C++, Caddyfile, Caddyfile Directives, Cap'n Proto, Cassandra CQL, Ceylon, CFEngine3, cfstatement, ChaiScript, Chapel, Cheetah, Clojure, CMake, COBOL, CoffeeScript, Common Lisp, Coq, Crystal, CSS, Cython |
| D | D, Dart, Dax, Diff, Django/Jinja, dns, Docker, DTD, Dylan |
| E | EBNF, Elixir, Elm, EmacsLisp, Erlang |
| F | Factor, Fennel, Fish, Forth, Fortran, FortranFixed, FSharp |
| G | GAS, GDScript, Genshi, Genshi HTML, Genshi Text, Gherkin, GLSL, Gnuplot, Go, Go HTML Template, Go Text Template, GraphQL, Groff, Groovy |
| H | Handlebars, Hare, Haskell, Haxe, HCL, Hexdump, HLB, HLSL, HolyC, HTML, HTTP, Hy |
| I | Idris, Igor, INI, Io, ISCdhcpd |
| J | J, Java, JavaScript, JSON, Julia, Jungle |
| K | Kotlin |
| L | Lighttpd configuration file, LLVM, Lua |
| M | Makefile, Mako, markdown, Mason, Mathematica, Matlab, mcfunction, Meson, Metal, MiniZinc, MLIR, Modula-2, MonkeyC, MorrowindScript, Myghty, MySQL |
| N | NASM, Natural, Newspeak, Nginx configuration file, Nim, Nix |
| O | Objective-C, OCaml, Octave, Odin, OnesEnterprise, OpenEdge ABL, OpenSCAD, Org Mode |
| P | PacmanConf, Perl, PHP, PHTML, Pig, PkgConfig, PL/pgSQL, plaintext, Plutus Core, Pony, PostgreSQL SQL dialect, PostScript, POVRay, PowerQuery, PowerShell, Prolog, PromQL, properties, Protocol Buffer, PRQL, PSL, Puppet, Python, Python 2 |
| Q | QBasic, QML |
| R | R, Racket, Ragel, Raku, react, ReasonML, reg, reStructuredText, Rexx, Ruby, Rust |
| S | SAS, Sass, Scala, Scheme, Scilab, SCSS, Sed, Sieve, Smali, Smalltalk, Smarty, Snobol, Solidity, SourcePawn, SPARQL, SQL, SquidConf, Standard ML, stas, Stylus, Svelte, Swift, SYSTEMD, systemverilog |
| T | TableGen, Tal, TASM, Tcl, Tcsh, Termcap, Terminfo, Terraform, TeX, Thrift, TOML, TradingView, Transact-SQL, Turing, Turtle, Twig, TypeScript, TypoScript, TypoScriptCssData, TypoScriptHtmlData |
| V | V, V shell, Vala, VB.net, verilog, VHDL, VHS, VimL, vue |
| W | WDTE, WebGPU Shading Language, Whiley |
| X | XML, Xorg |
| Y | YAML, YANG |
| Z | Z80 Assembly, Zed, Zig |
| Prefix | Language |
| :----: | --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- |
| A | ABAP, ABNF, ActionScript, ActionScript 3, Ada, Agda, AL, Alloy, Angular2, ANTLR, ApacheConf, APL, AppleScript, ArangoDB AQL, Arduino, ArmAsm, AutoHotkey, AutoIt, Awk |
| B | Ballerina, Bash, Bash Session, Batchfile, BibTeX, Bicep, BlitzBasic, BNF, BQN, Brainfuck |
| C | C, C#, C++, Caddyfile, Caddyfile Directives, Cap'n Proto, Cassandra CQL, Ceylon, CFEngine3, cfstatement, ChaiScript, Chapel, Cheetah, Clojure, CMake, COBOL, CoffeeScript, Common Lisp, Coq, Crystal, CSS, Cython |
| D | D, Dart, Dax, Desktop Entry, Diff, Django/Jinja, dns, Docker, DTD, Dylan |
| E | EBNF, Elixir, Elm, EmacsLisp, Erlang |
| F | Factor, Fennel, Fish, Forth, Fortran, FortranFixed, FSharp |
| G | GAS, GDScript, Genshi, Genshi HTML, Genshi Text, Gherkin, GLSL, Gnuplot, Go, Go HTML Template, Go Text Template, GraphQL, Groff, Groovy |
| H | Handlebars, Hare, Haskell, Haxe, HCL, Hexdump, HLB, HLSL, HolyC, HTML, HTTP, Hy |
| I | Idris, Igor, INI, Io, ISCdhcpd |
| J | J, Java, JavaScript, JSON, Julia, Jungle |
| K | Kotlin |
| L | Lighttpd configuration file, LLVM, Lua |
| M | Makefile, Mako, markdown, Mason, Materialize SQL dialect, Mathematica, Matlab, mcfunction, Meson, Metal, MiniZinc, MLIR, Modula-2, MonkeyC, MorrowindScript, Myghty, MySQL |
| N | NASM, Natural, Newspeak, Nginx configuration file, Nim, Nix |
| O | Objective-C, OCaml, Octave, Odin, OnesEnterprise, OpenEdge ABL, OpenSCAD, Org Mode |
| P | PacmanConf, Perl, PHP, PHTML, Pig, PkgConfig, PL/pgSQL, plaintext, Plutus Core, Pony, PostgreSQL SQL dialect, PostScript, POVRay, PowerQuery, PowerShell, Prolog, PromQL, Promela, properties, Protocol Buffer, PRQL, PSL, Puppet, Python, Python 2 |
| Q | QBasic, QML |
| R | R, Racket, Ragel, Raku, react, ReasonML, reg, Rego, reStructuredText, Rexx, RPMSpec, Ruby, Rust |
| S | SAS, Sass, Scala, Scheme, Scilab, SCSS, Sed, Sieve, Smali, Smalltalk, Smarty, Snobol, Solidity, SourcePawn, SPARQL, SQL, SquidConf, Standard ML, stas, Stylus, Svelte, Swift, SYSTEMD, systemverilog |
| T | TableGen, Tal, TASM, Tcl, Tcsh, Termcap, Terminfo, Terraform, TeX, Thrift, TOML, TradingView, Transact-SQL, Turing, Turtle, Twig, TypeScript, TypoScript, TypoScriptCssData, TypoScriptHtmlData |
| V | V, V shell, Vala, VB.net, verilog, VHDL, VHS, VimL, vue |
| W | WDTE, WebGPU Shading Language, Whiley |
| X | XML, Xorg |
| Y | YAML, YANG |
| Z | Z80 Assembly, Zed, Zig |
_I will attempt to keep this section up to date, but an authoritative list can be
displayed with `chroma --list`._
<a id="markdown-try-it" name="try-it"></a>
## Try it
Try out various languages and styles on the [Chroma Playground](https://swapoff.org/chroma/playground/).
<a id="markdown-using-the-library" name="using-the-library"></a>
## Using the library
This is version 2 of Chroma, use the import path:
```go
import "github.com/alecthomas/chroma/v2"
```
Chroma, like Pygments, has the concepts of
[lexers](https://github.com/alecthomas/chroma/tree/master/lexers),
[formatters](https://github.com/alecthomas/chroma/tree/master/formatters) and
@ -95,8 +92,6 @@ In all cases, if a lexer, formatter or style can not be determined, `nil` will
be returned. In this situation you may want to default to the `Fallback`
value in each respective package, which provides sane defaults.
<a id="markdown-quick-start" name="quick-start"></a>
### Quick start
A convenience function exists that can be used to simply format some source
@ -106,8 +101,6 @@ text, without any effort:
err := quick.Highlight(os.Stdout, someSourceCode, "go", "html", "monokai")
```
<a id="markdown-identifying-the-language" name="identifying-the-language"></a>
### Identifying the language
To highlight code, you'll first have to identify what language the code is
@ -147,8 +140,6 @@ token types into a single token:
lexer = chroma.Coalesce(lexer)
```
<a id="markdown-formatting-the-output" name="formatting-the-output"></a>
### Formatting the output
Once a language is identified you will need to pick a formatter and a style (theme).
@ -177,8 +168,6 @@ And finally, format the tokens from the iterator:
err := formatter.Format(w, style, iterator)
```
<a id="markdown-the-html-formatter" name="the-html-formatter"></a>
### The HTML formatter
By default the `html` registered formatter generates standalone HTML with
@ -203,12 +192,8 @@ formatter := html.New(html.WithClasses(true))
err := formatter.WriteCSS(w, style)
```
<a id="markdown-more-detail" name="more-detail"></a>
## More detail
<a id="markdown-lexers" name="lexers"></a>
### Lexers
See the [Pygments documentation](http://pygments.org/docs/lexerdevelopment/)
@ -228,8 +213,6 @@ python3 _tools/pygments2chroma_xml.py \
See notes in [pygments-lexers.txt](https://github.com/alecthomas/chroma/blob/master/pygments-lexers.txt)
for a list of lexers, and notes on some of the issues importing them.
<a id="markdown-formatters" name="formatters"></a>
### Formatters
Chroma supports HTML output, as well as terminal output in 8 colour, 256 colour, and true-colour.
@ -237,8 +220,6 @@ Chroma supports HTML output, as well as terminal output in 8 colour, 256 colour,
A `noop` formatter is included that outputs the token text only, and a `tokens`
formatter outputs raw tokens. The latter is useful for debugging lexers.
<a id="markdown-styles" name="styles"></a>
### Styles
Chroma styles are defined in XML. The style entries use the
@ -262,8 +243,6 @@ Also, token types in a style file are hierarchical. For instance, when `CommentS
For a quick overview of the available styles and how they look, check out the [Chroma Style Gallery](https://xyproto.github.io/splash/docs/).
<a id="markdown-command-line-interface" name="command-line-interface"></a>
## Command-line interface
A command-line interface to Chroma is included.
@ -288,10 +267,6 @@ on under the hood for easy integration with [lesspipe shipping with
Debian and derivatives](https://manpages.debian.org/lesspipe#USER_DEFINED_FILTERS);
for that setup the `chroma` executable can be just symlinked to `~/.lessfilter`.
<a id="markdown-whats-missing-compared-to-pygments" name="whats-missing-compared-to-pygments"></a>
<a id="markdown-testing-lexers" name="testing-lexers"></a>
## Testing lexers
If you edit some lexers and want to try it, open a shell in `cmd/chromad` and run:

View File

@ -5,7 +5,9 @@ import (
"html"
"io"
"sort"
"strconv"
"strings"
"sync"
"github.com/alecthomas/chroma/v2"
)
@ -132,6 +134,7 @@ func New(options ...Option) *Formatter {
baseLineNumber: 1,
preWrapper: defaultPreWrapper,
}
f.styleCache = newStyleCache(f)
for _, option := range options {
option(f)
}
@ -188,6 +191,7 @@ var (
// Formatter that generates HTML.
type Formatter struct {
styleCache *styleCache
standalone bool
prefix string
Classes bool // Exported field to detect when classes are being used
@ -220,12 +224,7 @@ func (f *Formatter) Format(w io.Writer, style *chroma.Style, iterator chroma.Ite
//
// OTOH we need to be super careful about correct escaping...
func (f *Formatter) writeHTML(w io.Writer, style *chroma.Style, tokens []chroma.Token) (err error) { // nolint: gocyclo
css := f.styleToCSS(style)
if !f.Classes {
for t, style := range css {
css[t] = compressStyle(style)
}
}
css := f.styleCache.get(style, true)
if f.standalone {
fmt.Fprint(w, "<html>\n")
if f.Classes {
@ -243,7 +242,7 @@ func (f *Formatter) writeHTML(w io.Writer, style *chroma.Style, tokens []chroma.
wrapInTable := f.lineNumbers && f.lineNumbersInTable
lines := chroma.SplitTokensIntoLines(tokens)
lineDigits := len(fmt.Sprintf("%d", f.baseLineNumber+len(lines)-1))
lineDigits := len(strconv.Itoa(f.baseLineNumber + len(lines) - 1))
highlightIndex := 0
if wrapInTable {
@ -251,7 +250,7 @@ func (f *Formatter) writeHTML(w io.Writer, style *chroma.Style, tokens []chroma.
fmt.Fprintf(w, "<div%s>\n", f.styleAttr(css, chroma.PreWrapper))
fmt.Fprintf(w, "<table%s><tr>", f.styleAttr(css, chroma.LineTable))
fmt.Fprintf(w, "<td%s>\n", f.styleAttr(css, chroma.LineTableTD))
fmt.Fprintf(w, f.preWrapper.Start(false, f.styleAttr(css, chroma.PreWrapper)))
fmt.Fprintf(w, "%s", f.preWrapper.Start(false, f.styleAttr(css, chroma.PreWrapper)))
for index := range lines {
line := f.baseLineNumber + index
highlight, next := f.shouldHighlight(highlightIndex, line)
@ -273,7 +272,7 @@ func (f *Formatter) writeHTML(w io.Writer, style *chroma.Style, tokens []chroma.
fmt.Fprintf(w, "<td%s>\n", f.styleAttr(css, chroma.LineTableTD, "width:100%"))
}
fmt.Fprintf(w, f.preWrapper.Start(true, f.styleAttr(css, chroma.PreWrapper)))
fmt.Fprintf(w, "%s", f.preWrapper.Start(true, f.styleAttr(css, chroma.PreWrapper)))
highlightIndex = 0
for index, tokens := range lines {
@ -323,7 +322,7 @@ func (f *Formatter) writeHTML(w io.Writer, style *chroma.Style, tokens []chroma.
fmt.Fprint(w, `</span>`) // End of Line
}
}
fmt.Fprintf(w, f.preWrapper.End(true))
fmt.Fprintf(w, "%s", f.preWrapper.End(true))
if wrapInTable {
fmt.Fprint(w, "</td></tr></table>\n")
@ -419,7 +418,7 @@ func (f *Formatter) tabWidthStyle() string {
// WriteCSS writes CSS style definitions (without any surrounding HTML).
func (f *Formatter) WriteCSS(w io.Writer, style *chroma.Style) error {
css := f.styleToCSS(style)
css := f.styleCache.get(style, false)
// Special-case background as it is mapped to the outer ".chroma" class.
if _, err := fmt.Fprintf(w, "/* %s */ .%sbg { %s }\n", chroma.Background, f.prefix, css[chroma.Background]); err != nil {
return err
@ -562,3 +561,63 @@ func compressStyle(s string) string {
}
return strings.Join(out, ";")
}
const styleCacheLimit = 32
type styleCacheEntry struct {
style *chroma.Style
compressed bool
cache map[chroma.TokenType]string
}
type styleCache struct {
mu sync.Mutex
// LRU cache of compiled (and possibly compressed) styles. This is a slice
// because the cache size is small, and a slice is sufficiently fast for
// small N.
cache []styleCacheEntry
f *Formatter
}
func newStyleCache(f *Formatter) *styleCache {
return &styleCache{f: f}
}
func (l *styleCache) get(style *chroma.Style, compress bool) map[chroma.TokenType]string {
l.mu.Lock()
defer l.mu.Unlock()
// Look for an existing entry.
for i := len(l.cache) - 1; i >= 0; i-- {
entry := l.cache[i]
if entry.style == style && entry.compressed == compress {
// Top of the cache, no need to adjust the order.
if i == len(l.cache)-1 {
return entry.cache
}
// Move this entry to the end of the LRU
copy(l.cache[i:], l.cache[i+1:])
l.cache[len(l.cache)-1] = entry
return entry.cache
}
}
// No entry, create one.
cached := l.f.styleToCSS(style)
if !l.f.Classes {
for t, style := range cached {
cached[t] = compressStyle(style)
}
}
if compress {
for t, style := range cached {
cached[t] = compressStyle(style)
}
}
// Evict the oldest entry.
if len(l.cache) >= styleCacheLimit {
l.cache = l.cache[0:copy(l.cache, l.cache[1:])]
}
l.cache = append(l.cache, styleCacheEntry{style: style, cache: cached, compressed: compress})
return cached
}

View File

@ -4,52 +4,82 @@ import (
. "github.com/alecthomas/chroma/v2" // nolint
)
// Matcher token stub for docs, or
// Named matcher: @name, or
// Path matcher: /foo, or
// Wildcard path matcher: *
// nolint: gosec
var caddyfileMatcherTokenRegexp = `(\[\<matcher\>\]|@[^\s]+|/[^\s]+|\*)`
// Comment at start of line, or
// Comment preceded by whitespace
var caddyfileCommentRegexp = `(^|\s+)#.*\n`
// caddyfileCommon are the rules common to both of the lexer variants
func caddyfileCommonRules() Rules {
return Rules{
"site_block_common": {
Include("site_body"),
// Any other directive
{`[^\s#]+`, Keyword, Push("directive")},
Include("base"),
},
"site_body": {
// Import keyword
{`(import)(\s+)([^\s]+)`, ByGroups(Keyword, Text, NameVariableMagic), nil},
{`\b(import|invoke)\b( [^\s#]+)`, ByGroups(Keyword, Text), Push("subdirective")},
// Matcher definition
{`@[^\s]+(?=\s)`, NameDecorator, Push("matcher")},
// Matcher token stub for docs
{`\[\<matcher\>\]`, NameDecorator, Push("matcher")},
// These cannot have matchers but may have things that look like
// matchers in their arguments, so we just parse as a subdirective.
{`try_files`, Keyword, Push("subdirective")},
{`\b(try_files|tls|log|bind)\b`, Keyword, Push("subdirective")},
// These are special, they can nest more directives
{`handle_errors|handle|route|handle_path|not`, Keyword, Push("nested_directive")},
// Any other directive
{`[^\s#]+`, Keyword, Push("directive")},
Include("base"),
{`\b(handle_errors|handle_path|handle_response|replace_status|handle|route)\b`, Keyword, Push("nested_directive")},
// uri directive has special syntax
{`\b(uri)\b`, Keyword, Push("uri_directive")},
},
"matcher": {
{`\{`, Punctuation, Push("block")},
// Not can be one-liner
{`not`, Keyword, Push("deep_not_matcher")},
// Heredoc for CEL expression
Include("heredoc"),
// Backtick for CEL expression
{"`", StringBacktick, Push("backticks")},
// Any other same-line matcher
{`[^\s#]+`, Keyword, Push("arguments")},
// Terminators
{`\n`, Text, Pop(1)},
{`\s*\n`, Text, Pop(1)},
{`\}`, Punctuation, Pop(1)},
Include("base"),
},
"block": {
{`\}`, Punctuation, Pop(2)},
// Using double quotes doesn't stop at spaces
{`"`, StringDouble, Push("double_quotes")},
// Using backticks doesn't stop at spaces
{"`", StringBacktick, Push("backticks")},
// Not can be one-liner
{`not`, Keyword, Push("not_matcher")},
// Any other subdirective
// Directives & matcher definitions
Include("site_body"),
// Any directive
{`[^\s#]+`, Keyword, Push("subdirective")},
Include("base"),
},
"nested_block": {
{`\}`, Punctuation, Pop(2)},
// Matcher definition
{`@[^\s]+(?=\s)`, NameDecorator, Push("matcher")},
// Something that starts with literally < is probably a docs stub
{`\<[^#]+\>`, Keyword, Push("nested_directive")},
// Any other directive
{`[^\s#]+`, Keyword, Push("nested_directive")},
// Using double quotes doesn't stop at spaces
{`"`, StringDouble, Push("double_quotes")},
// Using backticks doesn't stop at spaces
{"`", StringBacktick, Push("backticks")},
// Not can be one-liner
{`not`, Keyword, Push("not_matcher")},
// Directives & matcher definitions
Include("site_body"),
// Any other subdirective
{`[^\s#]+`, Keyword, Push("directive")},
Include("base"),
},
"not_matcher": {
@ -66,69 +96,97 @@ func caddyfileCommonRules() Rules {
},
"directive": {
{`\{(?=\s)`, Punctuation, Push("block")},
Include("matcher_token"),
Include("comments_pop_1"),
{`\n`, Text, Pop(1)},
{caddyfileMatcherTokenRegexp, NameDecorator, Push("arguments")},
{caddyfileCommentRegexp, CommentSingle, Pop(1)},
{`\s*\n`, Text, Pop(1)},
Include("base"),
},
"nested_directive": {
{`\{(?=\s)`, Punctuation, Push("nested_block")},
Include("matcher_token"),
Include("comments_pop_1"),
{`\n`, Text, Pop(1)},
{caddyfileMatcherTokenRegexp, NameDecorator, Push("nested_arguments")},
{caddyfileCommentRegexp, CommentSingle, Pop(1)},
{`\s*\n`, Text, Pop(1)},
Include("base"),
},
"subdirective": {
{`\{(?=\s)`, Punctuation, Push("block")},
Include("comments_pop_1"),
{`\n`, Text, Pop(1)},
{caddyfileCommentRegexp, CommentSingle, Pop(1)},
{`\s*\n`, Text, Pop(1)},
Include("base"),
},
"arguments": {
{`\{(?=\s)`, Punctuation, Push("block")},
Include("comments_pop_2"),
{caddyfileCommentRegexp, CommentSingle, Pop(2)},
{`\\\n`, Text, nil}, // Skip escaped newlines
{`\n`, Text, Pop(2)},
{`\s*\n`, Text, Pop(2)},
Include("base"),
},
"nested_arguments": {
{`\{(?=\s)`, Punctuation, Push("nested_block")},
{caddyfileCommentRegexp, CommentSingle, Pop(2)},
{`\\\n`, Text, nil}, // Skip escaped newlines
{`\s*\n`, Text, Pop(2)},
Include("base"),
},
"deep_subdirective": {
{`\{(?=\s)`, Punctuation, Push("block")},
Include("comments_pop_3"),
{`\n`, Text, Pop(3)},
{caddyfileCommentRegexp, CommentSingle, Pop(3)},
{`\s*\n`, Text, Pop(3)},
Include("base"),
},
"matcher_token": {
{`@[^\s]+`, NameDecorator, Push("arguments")}, // Named matcher
{`/[^\s]+`, NameDecorator, Push("arguments")}, // Path matcher
{`\*`, NameDecorator, Push("arguments")}, // Wildcard path matcher
{`\[\<matcher\>\]`, NameDecorator, Push("arguments")}, // Matcher token stub for docs
"uri_directive": {
{`\{(?=\s)`, Punctuation, Push("block")},
{caddyfileMatcherTokenRegexp, NameDecorator, nil},
{`(strip_prefix|strip_suffix|replace|path_regexp)`, NameConstant, Push("arguments")},
{caddyfileCommentRegexp, CommentSingle, Pop(1)},
{`\s*\n`, Text, Pop(1)},
Include("base"),
},
"comments": {
{`^#.*\n`, CommentSingle, nil}, // Comment at start of line
{`\s+#.*\n`, CommentSingle, nil}, // Comment preceded by whitespace
"double_quotes": {
Include("placeholder"),
{`\\"`, StringDouble, nil},
{`[^"]`, StringDouble, nil},
{`"`, StringDouble, Pop(1)},
},
"comments_pop_1": {
{`^#.*\n`, CommentSingle, Pop(1)}, // Comment at start of line
{`\s+#.*\n`, CommentSingle, Pop(1)}, // Comment preceded by whitespace
"backticks": {
Include("placeholder"),
{"\\\\`", StringBacktick, nil},
{"[^`]", StringBacktick, nil},
{"`", StringBacktick, Pop(1)},
},
"comments_pop_2": {
{`^#.*\n`, CommentSingle, Pop(2)}, // Comment at start of line
{`\s+#.*\n`, CommentSingle, Pop(2)}, // Comment preceded by whitespace
"optional": {
// Docs syntax for showing optional parts with [ ]
{`\[`, Punctuation, Push("optional")},
Include("name_constants"),
{`\|`, Punctuation, nil},
{`[^\[\]\|]+`, String, nil},
{`\]`, Punctuation, Pop(1)},
},
"comments_pop_3": {
{`^#.*\n`, CommentSingle, Pop(3)}, // Comment at start of line
{`\s+#.*\n`, CommentSingle, Pop(3)}, // Comment preceded by whitespace
"heredoc": {
{`(<<([a-zA-Z0-9_-]+))(\n(.*|\n)*)(\s*)(\2)`, ByGroups(StringHeredoc, nil, String, String, String, StringHeredoc), nil},
},
"name_constants": {
{`\b(most_recently_modified|largest_size|smallest_size|first_exist|internal|disable_redirects|ignore_loaded_certs|disable_certs|private_ranges|first|last|before|after|on|off)\b(\||(?=\]|\s|$))`, ByGroups(NameConstant, Punctuation), nil},
},
"placeholder": {
// Placeholder with dots, colon for default value, brackets for args[0:]
{`\{[\w+.\[\]\:\$-]+\}`, StringEscape, nil},
// Handle opening brackets with no matching closing one
{`\{[^\}\s]*\b`, String, nil},
},
"base": {
Include("comments"),
{`(on|off|first|last|before|after|internal|strip_prefix|strip_suffix|replace)\b`, NameConstant, nil},
{`(https?://)?([a-z0-9.-]+)(:)([0-9]+)`, ByGroups(Name, Name, Punctuation, LiteralNumberInteger), nil},
{`[a-z-]+/[a-z-+]+`, LiteralString, nil},
{`[0-9]+[km]?\b`, LiteralNumberInteger, nil},
{`\{[\w+.\$-]+\}`, LiteralStringEscape, nil}, // Placeholder
{`\[(?=[^#{}$]+\])`, Punctuation, nil},
{`\]|\|`, Punctuation, nil},
{`[^\s#{}$\]]+`, LiteralString, nil},
{caddyfileCommentRegexp, CommentSingle, nil},
{`\[\<matcher\>\]`, NameDecorator, nil},
Include("name_constants"),
Include("heredoc"),
{`(https?://)?([a-z0-9.-]+)(:)([0-9]+)([^\s]*)`, ByGroups(Name, Name, Punctuation, NumberInteger, Name), nil},
{`\[`, Punctuation, Push("optional")},
{"`", StringBacktick, Push("backticks")},
{`"`, StringDouble, Push("double_quotes")},
Include("placeholder"),
{`[a-z-]+/[a-z-+]+`, String, nil},
{`[0-9]+([smhdk]|ns|us|µs|ms)?\b`, NumberInteger, nil},
{`[^\s\n#\{]+`, String, nil},
{`/[^\s#]*`, Name, nil},
{`\s+`, Text, nil},
},
@ -149,27 +207,29 @@ var Caddyfile = Register(MustNewLexer(
func caddyfileRules() Rules {
return Rules{
"root": {
Include("comments"),
{caddyfileCommentRegexp, CommentSingle, nil},
// Global options block
{`^\s*(\{)\s*$`, ByGroups(Punctuation), Push("globals")},
// Top level import
{`(import)(\s+)([^\s]+)`, ByGroups(Keyword, Text, NameVariableMagic), nil},
// Snippets
{`(\([^\s#]+\))(\s*)(\{)`, ByGroups(NameVariableAnonymous, Text, Punctuation), Push("snippet")},
{`(&?\([^\s#]+\))(\s*)(\{)`, ByGroups(NameVariableAnonymous, Text, Punctuation), Push("snippet")},
// Site label
{`[^#{(\s,]+`, GenericHeading, Push("label")},
// Site label with placeholder
{`\{[\w+.\$-]+\}`, LiteralStringEscape, Push("label")},
{`\{[\w+.\[\]\:\$-]+\}`, StringEscape, Push("label")},
{`\s+`, Text, nil},
},
"globals": {
{`\}`, Punctuation, Pop(1)},
{`[^\s#]+`, Keyword, Push("directive")},
// Global options are parsed as subdirectives (no matcher)
{`[^\s#]+`, Keyword, Push("subdirective")},
Include("base"),
},
"snippet": {
{`\}`, Punctuation, Pop(1)},
// Matcher definition
{`@[^\s]+(?=\s)`, NameDecorator, Push("matcher")},
// Any directive
Include("site_body"),
// Any other directive
{`[^\s#]+`, Keyword, Push("directive")},
Include("base"),
},
@ -179,7 +239,7 @@ func caddyfileRules() Rules {
{`,\s*\n?`, Text, nil},
{` `, Text, nil},
// Site label with placeholder
{`\{[\w+.\$-]+\}`, LiteralStringEscape, nil},
Include("placeholder"),
// Site label
{`[^#{(\s,]+`, GenericHeading, nil},
// Comment after non-block label (hack because comments end in \n)

View File

@ -19,10 +19,10 @@
<rule pattern="\\\n">
<token type="Text"/>
</rule>
<rule pattern="///[^\n\r]+">
<rule pattern="///[^\n\r]*">
<token type="CommentSpecial"/>
</rule>
<rule pattern="//[^\n\r]+">
<rule pattern="//[^\n\r]*">
<token type="CommentSingle"/>
</rule>
<rule pattern="/[*].*?[*]/">

View File

@ -49,7 +49,7 @@
<rule pattern="(true|false|null|_)\b">
<token type="KeywordConstant"/>
</rule>
<rule pattern="[_a-zA-Z]\w*">
<rule pattern="#?[_a-zA-Z$]\w*">
<token type="Name"/>
</rule>
</state>

View File

@ -0,0 +1,17 @@
<lexer>
<config>
<name>Desktop file</name>
<alias>desktop</alias>
<alias>desktop_entry</alias>
<filename>*.desktop</filename>
<mime_type>application/x-desktop</mime_type>
</config>
<rules>
<state name="root">
<rule pattern="^[ \t]*\n"><token type="TextWhitespace"/></rule>
<rule pattern="^(#.*)(\n)"><bygroups><token type="CommentSingle"/><token type="TextWhitespace"/></bygroups></rule>
<rule pattern="(\[[^\]\n]+\])(\n)"><bygroups><token type="Keyword"/><token type="TextWhitespace"/></bygroups></rule>
<rule pattern="([-A-Za-z0-9]+)(\[[^\] \t=]+\])?([ \t]*)(=)([ \t]*)([^\n]*)([ \t\n]*\n)"><bygroups><token type="NameAttribute"/><token type="NameNamespace"/><token type="TextWhitespace"/><token type="Operator"/><token type="TextWhitespace"/><token type="LiteralString"/><token type="TextWhitespace"/></bygroups></rule>
</state>
</rules>
</lexer>

View File

@ -86,7 +86,7 @@
<rule pattern="\\(?![:!#$%&amp;*+.\\/&lt;=&gt;?@^|~-]+)">
<token type="NameFunction"/>
</rule>
<rule pattern="(&lt;-|::|-&gt;|=&gt;|=)(?![:!#$%&amp;*+.\\/&lt;=&gt;?@^|~-]+)">
<rule pattern="(&lt;-|::|-&gt;|=&gt;|=|'([:!#$%&amp;*+.\\/&lt;=&gt;?@^|~-]+))(?![:!#$%&amp;*+.\\/&lt;=&gt;?@^|~-]+)">
<token type="OperatorWord"/>
</rule>
<rule pattern=":[:!#$%&amp;*+.\\/&lt;=&gt;?@^|~-]*">

View File

@ -3,6 +3,7 @@
<name>JSON</name>
<alias>json</alias>
<filename>*.json</filename>
<filename>*.avsc</filename>
<mime_type>application/json</mime_type>
<dot_all>true</dot_all>
<not_multiline>true</not_multiline>

View File

@ -0,0 +1,155 @@
<lexer>
<config>
<name>Materialize SQL dialect</name>
<alias>materialize</alias>
<alias>mzsql</alias>
<mime_type>text/x-materializesql</mime_type>
<case_insensitive>true</case_insensitive>
<not_multiline>true</not_multiline>
</config>
<rules>
<state name="root">
<rule pattern="\s+">
<token type="Text"/>
</rule>
<rule pattern="--.*\n?">
<token type="CommentSingle"/>
</rule>
<rule pattern="/\*">
<token type="CommentMultiline"/>
<push state="multiline-comments"/>
</rule>
<rule pattern="(bigint|bigserial|bit|bit\s+varying|bool|boolean|box|bytea|char|character|character\s+varying|cidr|circle|date|decimal|double\s+precision|float4|float8|inet|int|int2|int4|int8|integer|interval|json|jsonb|line|lseg|macaddr|money|numeric|path|pg_lsn|point|polygon|real|serial|serial2|serial4|serial8|smallint|smallserial|text|time|timestamp|timestamptz|timetz|tsquery|tsvector|txid_snapshot|uuid|varbit|varchar|with\s+time\s+zone|without\s+time\s+zone|xml|anyarray|anyelement|anyenum|anynonarray|anyrange|cstring|fdw_handler|internal|language_handler|opaque|record|void)\b">
<token type="NameBuiltin"/>
</rule>
<rule pattern="(?s)(DO)(\s+)(?:(LANGUAGE)?(\s+)(&#39;?)(\w+)?(&#39;?)(\s+))?(\$)([^$]*)(\$)(.*?)(\$)(\10)(\$)">
<usingbygroup>
<sublexer_name_group>6</sublexer_name_group>
<code_group>12</code_group>
<emitters>
<token type="Keyword"/>
<token type="Text"/>
<token type="Keyword"/>
<token type="Text"/>
<token type="LiteralStringSingle"/>
<token type="LiteralStringSingle"/>
<token type="LiteralStringSingle"/>
<token type="Text"/>
<token type="LiteralStringHeredoc"/>
<token type="LiteralStringHeredoc"/>
<token type="LiteralStringHeredoc"/>
<token type="LiteralStringHeredoc"/>
<token type="LiteralStringHeredoc"/>
<token type="LiteralStringHeredoc"/>
<token type="LiteralStringHeredoc"/>
</emitters>
</usingbygroup>
</rule>
<rule pattern="(ACCESS|ACKS|ADD|ADDRESSES|AGGREGATE|ALL|ALTER|AND|ANY|ARN|ARRANGEMENT|ARRAY|AS|ASC|ASSERT|AT|AUCTION|AUTHORITY|AVAILABILITY|AVRO|AWS|BEGIN|BETWEEN|BIGINT|BILLED|BODY|BOOLEAN|BOTH|BPCHAR|BROKEN|BROKER|BROKERS|BY|BYTES|CARDINALITY|CASCADE|CASE|CAST|CERTIFICATE|CHAIN|CHAR|CHARACTER|CHARACTERISTICS|CHECK|CLIENT|CLOSE|CLUSTER|CLUSTERS|COALESCE|COLLATE|COLUMN|COLUMNS|COMMENT|COMMIT|COMMITTED|COMPACTION|COMPRESSION|COMPUTE|COMPUTECTL|CONFLUENT|CONNECTION|CONNECTIONS|CONSTRAINT|COPY|COUNT|COUNTER|CREATE|CREATECLUSTER|CREATEDB|CREATEROLE|CROSS|CSV|CURRENT|CURSOR|DATABASE|DATABASES|DATUMS|DAY|DAYS|DEALLOCATE|DEBEZIUM|DEBUG|DEBUGGING|DEC|DECIMAL|DECLARE|DECORRELATED|DEFAULT|DEFAULTS|DELETE|DELIMITED|DELIMITER|DESC|DETAILS|DISCARD|DISK|DISTINCT|DOC|DOT|DOUBLE|DROP|EFFORT|ELEMENT|ELSE|ENABLE|END|ENDPOINT|ENFORCED|ENVELOPE|ERROR|ESCAPE|EXCEPT|EXECUTE|EXISTS|EXPECTED|EXPLAIN|EXPOSE|EXTRACT|FACTOR|FALSE|FETCH|FIELDS|FILTER|FIRST|FLOAT|FOLLOWING|FOR|FOREIGN|FORMAT|FORWARD|FROM|FULL|FULLNAME|FUNCTION|GENERATOR|GRANT|GREATEST|GROUP|GROUPS|HAVING|HEADER|HEADERS|HOLD|HOST|HOUR|HOURS|ID|IDEMPOTENCE|IDLE|IF|IGNORE|ILIKE|IN|INCLUDE|INDEX|INDEXES|INFO|INHERIT|INLINE|INNER|INPUT|INSERT|INSPECT|INT|INTEGER|INTERNAL|INTERSECT|INTERVAL|INTO|INTROSPECTION|IS|ISNULL|ISOLATION|JOIN|JSON|KAFKA|KEY|KEYS|LAST|LATERAL|LATEST|LEADING|LEAST|LEFT|LEVEL|LIKE|LIMIT|LIST|LOAD|LOCAL|LOG|LOGICAL|LOGIN|MANAGED|MAP|MARKETING|MATERIALIZE|MATERIALIZED|MAX|MECHANISMS|MEMBERSHIP|MERGE|MESSAGE|METADATA|MINUTE|MINUTES|MODE|MONTH|MONTHS|MS|MUTUALLY|NAME|NAMES|NATURAL|NEXT|NO|NOCREATECLUSTER|NOCREATEDB|NOCREATEROLE|NOINHERIT|NOLOGIN|NONE|NOSUPERUSER|NOT|NOTICE|NULL|NULLIF|NULLS|OBJECTS|OF|OFFSET|ON|ONLY|OPERATOR|OPTIMIZED|OPTIMIZER|OPTIONS|OR|ORDER|ORDINALITY|OUTER|OVER|OWNED|OWNER|PARTITION|PASSWORD|PHYSICAL|PLAN|PLANS|PORT|POSITION|POSTGRES|PRECEDING|PRECISION|PREFIX|PREPARE|PRIMARY|PRIVATELINK|PRIVILEGES|PROGRESS|PROTOBUF|PROTOCOL|PUBLICATION|QUERY|QUOTE|RAISE|RANGE|RAW|READ|REAL|REASSIGN|RECURSION|RECURSIVE|REFERENCES|REFRESH|REGEX|REGION|REGISTRY|RENAME|REPEATABLE|REPLACE|REPLICA|REPLICAS|REPLICATION|RESET|RESPECT|RESTRICT|RETENTION|RETURN|RETURNING|REVOKE|RIGHT|ROLE|ROLES|ROLLBACK|ROTATE|ROW|ROWS|SASL|SCALE|SCHEMA|SCHEMAS|SCRIPT|SECOND|SECONDS|SECRET|SECRETS|SECURITY|SEED|SELECT|SEQUENCES|SERIALIZABLE|SERVICE|SESSION|SET|SHARD|SHOW|SINK|SINKS|SIZE|SMALLINT|SNAPSHOT|SOME|SOURCE|SOURCES|SSH|SSL|START|STDIN|STDOUT|STORAGE|STORAGECTL|STRATEGY|STRICT|STRING|SUBSCRIBE|SUBSOURCE|SUBSOURCES|SUBSTRING|SUPERUSER|SWAP|SYSTEM|TABLE|TABLES|TAIL|TEMP|TEMPORARY|TEST|TEXT|THEN|TICK|TIES|TIME|TIMELINE|TIMEOUT|TIMESTAMP|TIMESTAMPTZ|TO|TOKEN|TOPIC|TPCH|TRACE|TRAILING|TRANSACTION|TRIM|TRUE|TUNNEL|TYPE|TYPES|UNBOUNDED|UNCOMMITTED|UNION|UNIQUE|UNKNOWN|UP|UPDATE|UPSERT|URL|USAGE|USER|USERNAME|USERS|USING|VALIDATE|VALUE|VALUES|VARCHAR|VARYING|VIEW|VIEWS|WARNING|WEBHOOK|WHEN|WHERE|WINDOW|WIRE|WITH|WITHIN|WITHOUT|WORK|WORKERS|WRITE|YEAR|YEARS|ZONE|ZONES)\b">
<token type="Keyword"/>
</rule>
<rule pattern="[+*/&lt;&gt;=~!@#%^&amp;|`?-]+">
<token type="Operator"/>
</rule>
<rule pattern="::">
<token type="Operator"/>
</rule>
<rule pattern="\$\d+">
<token type="NameVariable"/>
</rule>
<rule pattern="([0-9]*\.[0-9]*|[0-9]+)(e[+-]?[0-9]+)?">
<token type="LiteralNumberFloat"/>
</rule>
<rule pattern="[0-9]+">
<token type="LiteralNumberInteger"/>
</rule>
<rule pattern="((?:E|U&amp;)?)(&#39;)">
<bygroups>
<token type="LiteralStringAffix"/>
<token type="LiteralStringSingle"/>
</bygroups>
<push state="string"/>
</rule>
<rule pattern="((?:U&amp;)?)(&#34;)">
<bygroups>
<token type="LiteralStringAffix"/>
<token type="LiteralStringName"/>
</bygroups>
<push state="quoted-ident"/>
</rule>
<rule pattern="(?s)(\$)([^$]*)(\$)(.*?)(\$)(\2)(\$)(\s+)(LANGUAGE)?(\s+)(&#39;?)(\w+)?(&#39;?)">
<usingbygroup>
<sublexer_name_group>12</sublexer_name_group>
<code_group>4</code_group>
<emitters>
<token type="LiteralStringHeredoc"/>
<token type="LiteralStringHeredoc"/>
<token type="LiteralStringHeredoc"/>
<token type="LiteralStringHeredoc"/>
<token type="LiteralStringHeredoc"/>
<token type="LiteralStringHeredoc"/>
<token type="LiteralStringHeredoc"/>
<token type="Text"/>
<token type="Keyword"/>
<token type="Text"/>
<token type="LiteralStringSingle"/>
<token type="LiteralStringSingle"/>
<token type="LiteralStringSingle"/>
</emitters>
</usingbygroup>
</rule>
<rule pattern="(?s)(\$)([^$]*)(\$)(.*?)(\$)(\2)(\$)">
<token type="LiteralStringHeredoc"/>
</rule>
<rule pattern="[a-z_]\w*">
<token type="Name"/>
</rule>
<rule pattern=":([&#39;&#34;]?)[a-z]\w*\b\1">
<token type="NameVariable"/>
</rule>
<rule pattern="[;:()\[\]{},.]">
<token type="Punctuation"/>
</rule>
</state>
<state name="multiline-comments">
<rule pattern="/\*">
<token type="CommentMultiline"/>
<push state="multiline-comments"/>
</rule>
<rule pattern="\*/">
<token type="CommentMultiline"/>
<pop depth="1"/>
</rule>
<rule pattern="[^/*]+">
<token type="CommentMultiline"/>
</rule>
<rule pattern="[/*]">
<token type="CommentMultiline"/>
</rule>
</state>
<state name="string">
<rule pattern="[^&#39;]+">
<token type="LiteralStringSingle"/>
</rule>
<rule pattern="&#39;&#39;">
<token type="LiteralStringSingle"/>
</rule>
<rule pattern="&#39;">
<token type="LiteralStringSingle"/>
<pop depth="1"/>
</rule>
</state>
<state name="quoted-ident">
<rule pattern="[^&#34;]+">
<token type="LiteralStringName"/>
</rule>
<rule pattern="&#34;&#34;">
<token type="LiteralStringName"/>
</rule>
<rule pattern="&#34;">
<token type="LiteralStringName"/>
<pop depth="1"/>
</rule>
</state>
</rules>
</lexer>

View File

@ -0,0 +1,123 @@
<lexer>
<config>
<name>NDISASM</name>
<alias>ndisasm</alias>
<mime_type>text/x-disasm</mime_type>
<case_insensitive>true</case_insensitive>
<priority>0.5</priority> <!-- Lower than NASM -->
</config>
<rules>
<state name="root">
<rule pattern="^[0-9A-Za-z]+">
<token type="CommentSpecial"/>
<push state="offset"/>
</rule>
</state>
<state name="offset">
<rule pattern="[0-9A-Za-z]+">
<token type="CommentSpecial"/>
<push state="assembly"/>
</rule>
<rule>
<include state="whitespace"/>
</rule>
</state>
<state name="punctuation">
<rule pattern="[,():\[\]]+">
<token type="Punctuation"/>
</rule>
<rule pattern="[&amp;|^&lt;&gt;+*/%~-]+">
<token type="Operator"/>
</rule>
<rule pattern="[$]+">
<token type="KeywordConstant"/>
</rule>
<rule pattern="seg|wrt|strict">
<token type="OperatorWord"/>
</rule>
<rule pattern="byte|[dq]?word">
<token type="KeywordType"/>
</rule>
</state>
<state name="assembly">
<rule>
<include state="whitespace"/>
</rule>
<rule pattern="[a-z$._?][\w$.?#@~]*:">
<token type="NameLabel"/>
</rule>
<rule pattern="([a-z$._?][\w$.?#@~]*)(\s+)(equ)">
<bygroups>
<token type="NameConstant"/>
<token type="KeywordDeclaration"/>
<token type="KeywordDeclaration"/>
</bygroups>
<push state="instruction-args"/>
</rule>
<rule pattern="BITS|USE16|USE32|SECTION|SEGMENT|ABSOLUTE|EXTERN|GLOBAL|ORG|ALIGN|STRUC|ENDSTRUC|COMMON|CPU|GROUP|UPPERCASE|IMPORT|EXPORT|LIBRARY|MODULE">
<token type="Keyword"/>
<push state="instruction-args"/>
</rule>
<rule pattern="(?:res|d)[bwdqt]|times">
<token type="KeywordDeclaration"/>
<push state="instruction-args"/>
</rule>
<rule pattern="[a-z$._?][\w$.?#@~]*">
<token type="NameFunction"/>
<push state="instruction-args"/>
</rule>
<rule pattern="[\r\n]+">
<token type="Text"/>
<pop depth="2"/>
</rule>
</state>
<state name="instruction-args">
<rule pattern="&#34;(\\&#34;|[^&#34;\n])*&#34;|&#39;(\\&#39;|[^&#39;\n])*&#39;|`(\\`|[^`\n])*`">
<token type="LiteralString"/>
</rule>
<rule pattern="(?:0x[0-9a-f]+|$0[0-9a-f]*|[0-9]+[0-9a-f]*h)">
<token type="LiteralNumberHex"/>
</rule>
<rule pattern="[0-7]+q">
<token type="LiteralNumberOct"/>
</rule>
<rule pattern="[01]+b">
<token type="LiteralNumberBin"/>
</rule>
<rule pattern="[0-9]+\.e?[0-9]+">
<token type="LiteralNumberFloat"/>
</rule>
<rule pattern="[0-9]+">
<token type="LiteralNumberInteger"/>
</rule>
<rule>
<include state="punctuation"/>
</rule>
<rule pattern="r[0-9][0-5]?[bwd]|[a-d][lh]|[er]?[a-d]x|[er]?[sb]p|[er]?[sd]i|[c-gs]s|st[0-7]|mm[0-7]|cr[0-4]|dr[0-367]|tr[3-7]">
<token type="NameBuiltin"/>
</rule>
<rule pattern="[a-z$._?][\w$.?#@~]*">
<token type="NameVariable"/>
</rule>
<rule pattern="[\r\n]+">
<token type="Text"/>
<pop depth="3"/>
</rule>
<rule>
<include state="whitespace"/>
</rule>
</state>
<state name="whitespace">
<rule pattern="\n">
<token type="Text"/>
<pop depth="2"/>
</rule>
<rule pattern="[ \t]+">
<token type="Text"/>
</rule>
<rule pattern=";.*">
<token type="CommentSingle"/>
</rule>
</state>
</rules>
</lexer>

View File

@ -0,0 +1,119 @@
<lexer>
<config>
<name>Promela</name>
<alias>promela</alias>
<filename>*.pml</filename>
<filename>*.prom</filename>
<filename>*.prm</filename>
<filename>*.promela</filename>
<filename>*.pr</filename>
<filename>*.pm</filename>
<mime_type>text/x-promela</mime_type>
</config>
<rules>
<state name="statements">
<rule pattern="(\[\]|&lt;&gt;|/\\|\\/)|(U|W|V)\b"><token type="Operator"/></rule>
<rule pattern="@"><token type="Punctuation"/></rule>
<rule pattern="(\.)([a-zA-Z_]\w*)"><bygroups><token type="Operator"/><token type="NameAttribute"/></bygroups></rule>
<rule><include state="keywords"/></rule>
<rule><include state="types"/></rule>
<rule pattern="([LuU]|u8)?(&quot;)"><bygroups><token type="LiteralStringAffix"/><token type="LiteralString"/></bygroups><push state="string"/></rule>
<rule pattern="([LuU]|u8)?(&#x27;)(\\.|\\[0-7]{1,3}|\\x[a-fA-F0-9]{1,2}|[^\\\&#x27;\n])(&#x27;)"><bygroups><token type="LiteralStringAffix"/><token type="LiteralStringChar"/><token type="LiteralStringChar"/><token type="LiteralStringChar"/></bygroups></rule>
<rule pattern="0[xX]([0-9a-fA-F](\&#x27;?[0-9a-fA-F])*\.[0-9a-fA-F](\&#x27;?[0-9a-fA-F])*|\.[0-9a-fA-F](\&#x27;?[0-9a-fA-F])*|[0-9a-fA-F](\&#x27;?[0-9a-fA-F])*)[pP][+-]?[0-9a-fA-F](\&#x27;?[0-9a-fA-F])*[lL]?"><token type="LiteralNumberFloat"/></rule>
<rule pattern="(-)?(\d(\&#x27;?\d)*\.\d(\&#x27;?\d)*|\.\d(\&#x27;?\d)*|\d(\&#x27;?\d)*)[eE][+-]?\d(\&#x27;?\d)*[fFlL]?"><token type="LiteralNumberFloat"/></rule>
<rule pattern="(-)?((\d(\&#x27;?\d)*\.(\d(\&#x27;?\d)*)?|\.\d(\&#x27;?\d)*)[fFlL]?)|(\d(\&#x27;?\d)*[fFlL])"><token type="LiteralNumberFloat"/></rule>
<rule pattern="(-)?0[xX][0-9a-fA-F](\&#x27;?[0-9a-fA-F])*(([uU][lL]{0,2})|[lL]{1,2}[uU]?)?"><token type="LiteralNumberHex"/></rule>
<rule pattern="(-)?0[bB][01](\&#x27;?[01])*(([uU][lL]{0,2})|[lL]{1,2}[uU]?)?"><token type="LiteralNumberBin"/></rule>
<rule pattern="(-)?0(\&#x27;?[0-7])+(([uU][lL]{0,2})|[lL]{1,2}[uU]?)?"><token type="LiteralNumberOct"/></rule>
<rule pattern="(-)?\d(\&#x27;?\d)*(([uU][lL]{0,2})|[lL]{1,2}[uU]?)?"><token type="LiteralNumberInteger"/></rule>
<rule pattern="[~!%^&amp;*+=|?:&lt;&gt;/-]"><token type="Operator"/></rule>
<rule pattern="[()\[\],.]"><token type="Punctuation"/></rule>
<rule pattern="(true|false|NULL)\b"><token type="NameBuiltin"/></rule>
<rule pattern="(?!\d)(?:[\w$]|\\u[0-9a-fA-F]{4}|\\U[0-9a-fA-F]{8})+"><token type="Name"/></rule>
</state>
<state name="types">
<rule pattern="(bit|bool|byte|pid|short|int|unsigned)\b"><token type="KeywordType"/></rule>
</state>
<state name="keywords">
<rule pattern="(atomic|break|d_step|do|od|for|in|goto|if|fi|unless)\b"><token type="Keyword"/></rule>
<rule pattern="(assert|get_priority|printf|printm|set_priority)\b"><token type="NameFunction"/></rule>
<rule pattern="(c_code|c_decl|c_expr|c_state|c_track)\b"><token type="Keyword"/></rule>
<rule pattern="(_|_last|_nr_pr|_pid|_priority|else|np_|STDIN)\b"><token type="NameBuiltin"/></rule>
<rule pattern="(empty|enabled|eval|full|len|nempty|nfull|pc_value)\b"><token type="NameFunction"/></rule>
<rule pattern="run\b"><token type="OperatorWord"/></rule>
<rule pattern="(active|chan|D_proctype|hidden|init|local|mtype|never|notrace|proctype|show|trace|typedef|xr|xs)\b"><token type="KeywordDeclaration"/></rule>
<rule pattern="(priority|provided)\b"><token type="Keyword"/></rule>
<rule pattern="(inline|ltl|select)\b"><token type="KeywordDeclaration"/></rule>
<rule pattern="skip\b"><token type="Keyword"/></rule>
</state>
<state name="whitespace">
<rule pattern="^#if\s+0"><token type="CommentPreproc"/><push state="if0"/></rule>
<rule pattern="^#"><token type="CommentPreproc"/><push state="macro"/></rule>
<rule pattern="^(\s*(?:/[*].*?[*]/\s*)?)(#if\s+0)"><bygroups><usingself state="root"/><token type="CommentPreproc"/></bygroups><push state="if0"/></rule>
<rule pattern="^(\s*(?:/[*].*?[*]/\s*)?)(#)"><bygroups><usingself state="root"/><token type="CommentPreproc"/></bygroups><push state="macro"/></rule>
<rule pattern="(^[ \t]*)(?!(?:public|private|protected|default)\b)((?!\d)(?:[\w$]|\\u[0-9a-fA-F]{4}|\\U[0-9a-fA-F]{8})+)(\s*)(:)(?!:)"><bygroups><token type="TextWhitespace"/><token type="NameLabel"/><token type="TextWhitespace"/><token type="Punctuation"/></bygroups></rule>
<rule pattern="\n"><token type="TextWhitespace"/></rule>
<rule pattern="[^\S\n]+"><token type="TextWhitespace"/></rule>
<rule pattern="\\\n"><token type="Text"/></rule>
<rule pattern="//(?:.|(?&lt;=\\)\n)*\n"><token type="CommentSingle"/></rule>
<rule pattern="/(?:\\\n)?[*](?:[^*]|[*](?!(?:\\\n)?/))*[*](?:\\\n)?/"><token type="CommentMultiline"/></rule>
<rule pattern="/(\\\n)?[*][\w\W]*"><token type="CommentMultiline"/></rule>
</state>
<state name="root">
<rule><include state="whitespace"/></rule>
<rule><include state="keywords"/></rule>
<rule pattern="((?!\d)(?:[\w$]|\\u[0-9a-fA-F]{4}|\\U[0-9a-fA-F]{8}|::)+(?:[&amp;*\s])+)(\s*(?:(?:(?://(?:.|(?&lt;=\\)\n)*\n)|(?:/(?:\\\n)?[*](?:[^*]|[*](?!(?:\\\n)?/))*[*](?:\\\n)?/))\s*)*)((?!\d)(?:[\w$]|\\u[0-9a-fA-F]{4}|\\U[0-9a-fA-F]{8}|::)+)(\s*(?:(?:(?://(?:.|(?&lt;=\\)\n)*\n)|(?:/(?:\\\n)?[*](?:[^*]|[*](?!(?:\\\n)?/))*[*](?:\\\n)?/))\s*)*)(\([^;&quot;\&#x27;)]*?\))(\s*(?:(?:(?://(?:.|(?&lt;=\\)\n)*\n)|(?:/(?:\\\n)?[*](?:[^*]|[*](?!(?:\\\n)?/))*[*](?:\\\n)?/))\s*)*)([^;{/&quot;\&#x27;]*)(\{)"><bygroups><usingself state="root"/><usingself state="whitespace"/><token type="NameFunction"/><usingself state="whitespace"/><usingself state="root"/><usingself state="whitespace"/><usingself state="root"/><token type="Punctuation"/></bygroups><push state="function"/></rule>
<rule pattern="((?!\d)(?:[\w$]|\\u[0-9a-fA-F]{4}|\\U[0-9a-fA-F]{8}|::)+(?:[&amp;*\s])+)(\s*(?:(?:(?://(?:.|(?&lt;=\\)\n)*\n)|(?:/(?:\\\n)?[*](?:[^*]|[*](?!(?:\\\n)?/))*[*](?:\\\n)?/))\s*)*)((?!\d)(?:[\w$]|\\u[0-9a-fA-F]{4}|\\U[0-9a-fA-F]{8}|::)+)(\s*(?:(?:(?://(?:.|(?&lt;=\\)\n)*\n)|(?:/(?:\\\n)?[*](?:[^*]|[*](?!(?:\\\n)?/))*[*](?:\\\n)?/))\s*)*)(\([^;&quot;\&#x27;)]*?\))(\s*(?:(?:(?://(?:.|(?&lt;=\\)\n)*\n)|(?:/(?:\\\n)?[*](?:[^*]|[*](?!(?:\\\n)?/))*[*](?:\\\n)?/))\s*)*)([^;/&quot;\&#x27;]*)(;)"><bygroups><usingself state="root"/><usingself state="whitespace"/><token type="NameFunction"/><usingself state="whitespace"/><usingself state="root"/><usingself state="whitespace"/><usingself state="root"/><token type="Punctuation"/></bygroups></rule>
<rule><include state="types"/></rule>
<rule><push state="statement"/></rule>
</state>
<state name="statement">
<rule><include state="whitespace"/></rule>
<rule><include state="statements"/></rule>
<rule pattern="\}"><token type="Punctuation"/></rule>
<rule pattern="[{;]"><token type="Punctuation"/><pop depth="1"/></rule>
</state>
<state name="function">
<rule><include state="whitespace"/></rule>
<rule><include state="statements"/></rule>
<rule pattern=";"><token type="Punctuation"/></rule>
<rule pattern="\{"><token type="Punctuation"/><push/></rule>
<rule pattern="\}"><token type="Punctuation"/><pop depth="1"/></rule>
</state>
<state name="string">
<rule pattern="&quot;"><token type="LiteralString"/><pop depth="1"/></rule>
<rule pattern="\\([\\abfnrtv&quot;\&#x27;]|x[a-fA-F0-9]{2,4}|u[a-fA-F0-9]{4}|U[a-fA-F0-9]{8}|[0-7]{1,3})"><token type="LiteralStringEscape"/></rule>
<rule pattern="[^\\&quot;\n]+"><token type="LiteralString"/></rule>
<rule pattern="\\\n"><token type="LiteralString"/></rule>
<rule pattern="\\"><token type="LiteralString"/></rule>
</state>
<state name="macro">
<rule pattern="(\s*(?:/[*].*?[*]/\s*)?)(include)(\s*(?:/[*].*?[*]/\s*)?)(&quot;[^&quot;]+&quot;)([^\n]*)"><bygroups><usingself state="root"/><token type="CommentPreproc"/><usingself state="root"/><token type="CommentPreprocFile"/><token type="CommentSingle"/></bygroups></rule>
<rule pattern="(\s*(?:/[*].*?[*]/\s*)?)(include)(\s*(?:/[*].*?[*]/\s*)?)(&lt;[^&gt;]+&gt;)([^\n]*)"><bygroups><usingself state="root"/><token type="CommentPreproc"/><usingself state="root"/><token type="CommentPreprocFile"/><token type="CommentSingle"/></bygroups></rule>
<rule pattern="[^/\n]+"><token type="CommentPreproc"/></rule>
<rule pattern="/[*](.|\n)*?[*]/"><token type="CommentMultiline"/></rule>
<rule pattern="//.*?\n"><token type="CommentSingle"/><pop depth="1"/></rule>
<rule pattern="/"><token type="CommentPreproc"/></rule>
<rule pattern="(?&lt;=\\)\n"><token type="CommentPreproc"/></rule>
<rule pattern="\n"><token type="CommentPreproc"/><pop depth="1"/></rule>
</state>
<state name="if0">
<rule pattern="^\s*#if.*?(?&lt;!\\)\n"><token type="CommentPreproc"/><push/></rule>
<rule pattern="^\s*#el(?:se|if).*\n"><token type="CommentPreproc"/><pop depth="1"/></rule>
<rule pattern="^\s*#endif.*?(?&lt;!\\)\n"><token type="CommentPreproc"/><pop depth="1"/></rule>
<rule pattern=".*?\n"><token type="Comment"/></rule>
</state>
<state name="classname">
<rule pattern="(?!\d)(?:[\w$]|\\u[0-9a-fA-F]{4}|\\U[0-9a-fA-F]{8})+"><token type="NameClass"/><pop depth="1"/></rule>
<rule pattern="\s*(?=&gt;)"><token type="Text"/><pop depth="1"/></rule>
<rule><pop depth="1"/></rule>
</state>
<state name="case-value">
<rule pattern="(?&lt;!:)(:)(?!:)"><token type="Punctuation"/><pop depth="1"/></rule>
<rule pattern="(?!\d)(?:[\w$]|\\u[0-9a-fA-F]{4}|\\U[0-9a-fA-F]{8})+"><token type="NameConstant"/></rule>
<rule><include state="whitespace"/></rule>
<rule><include state="statements"/></rule>
</state>
</rules>
</lexer>

View File

@ -0,0 +1,94 @@
<lexer>
<config>
<name>Rego</name>
<alias>rego</alias>
<filename>*.rego</filename>
</config>
<rules>
<state name="root">
<rule pattern="(package|import|as|not|with|default|else|some|in|if|contains)\b">
<token type="KeywordDeclaration"/>
</rule>
<!-- importing keywords should then show up as keywords -->
<rule pattern="(import)( future.keywords.)(\w+)">
<bygroups>
<token type="KeywordDeclaration"/>
<token type="Text"/>
<token type="KeywordDeclaration"/>
</bygroups>
</rule>
<rule pattern="#[^\r\n]*">
<token type="Comment"/>
</rule>
<rule pattern="(FIXME|TODO|XXX)\b( .*)$">
<bygroups>
<token type="Error"/>
<token type="CommentSpecial"/>
</bygroups>
</rule>
<rule pattern="(true|false|null)\b">
<token type="KeywordConstant"/>
</rule>
<rule pattern="\d+i">
<token type="LiteralNumber"/>
</rule>
<rule pattern="\d+\.\d*([Ee][-+]\d+)?i">
<token type="LiteralNumber"/>
</rule>
<rule pattern="\.\d+([Ee][-+]\d+)?i">
<token type="LiteralNumber"/>
</rule>
<rule pattern="\d+[Ee][-+]\d+i">
<token type="LiteralNumber"/>
</rule>
<rule pattern="\d+(\.\d+[eE][+\-]?\d+|\.\d*|[eE][+\-]?\d+)">
<token type="LiteralNumberFloat"/>
</rule>
<rule pattern="\.\d+([eE][+\-]?\d+)?">
<token type="LiteralNumberFloat"/>
</rule>
<rule pattern="(0|[1-9][0-9]*)">
<token type="LiteralNumberInteger"/>
</rule>
<rule pattern="&#34;&#34;&#34;.*?&#34;&#34;&#34;">
<token type="LiteralStringDouble"/>
</rule>
<rule pattern="&#34;(\\\\|\\&#34;|[^&#34;])*&#34;">
<token type="LiteralStringDouble"/>
</rule>
<rule pattern="\$/((?!/\$).)*/\$">
<token type="LiteralString"/>
</rule>
<rule pattern="/(\\\\|\\&#34;|[^/])*/">
<token type="LiteralString"/>
</rule>
<rule pattern="^(\w+)">
<token type="Name"/>
</rule>
<rule pattern="[a-z_-][\w-]*(?=\()">
<token type="NameFunction"/>
</rule>
<rule pattern="[\r\n\s]+">
<token type="TextWhitespace"/>
</rule>
<rule pattern="(package|import)(\s+)">
<bygroups>
<token type="KeywordDeclaration"/>
<token type="Text"/>
</bygroups>
</rule>
<rule pattern="[=&lt;&gt;!+-/*&amp;|]">
<token type="Operator"/>
</rule>
<rule pattern=":=">
<token type="Operator"/>
</rule>
<rule pattern="[[\]{}():;]+">
<token type="Punctuation"/>
</rule>
<rule pattern="[$a-zA-Z_]\w*">
<token type="NameOther"/>
</rule>
</state>
</rules>
</lexer>

View File

@ -0,0 +1,58 @@
<lexer>
<config>
<name>RPMSpec</name>
<alias>spec</alias>
<filename>*.spec</filename>
<mime_type>text/x-rpm-spec</mime_type>
</config>
<rules>
<state name="root">
<rule pattern="#.*$"><token type="Comment"/></rule>
<rule><include state="basic"/></rule>
</state>
<state name="description">
<rule pattern="^(%(?:package|prep|build|install|clean|check|pre[a-z]*|post[a-z]*|trigger[a-z]*|files))(.*)$"><bygroups><token type="NameDecorator"/><token type="Text"/></bygroups><pop depth="1"/></rule>
<rule pattern="\s+"><token type="TextWhitespace"/></rule>
<rule pattern="."><token type="Text"/></rule>
</state>
<state name="changelog">
<rule pattern="\*.*$"><token type="GenericSubheading"/></rule>
<rule pattern="^(%(?:package|prep|build|install|clean|check|pre[a-z]*|post[a-z]*|trigger[a-z]*|files))(.*)$"><bygroups><token type="NameDecorator"/><token type="Text"/></bygroups><pop depth="1"/></rule>
<rule pattern="\s+"><token type="TextWhitespace"/></rule>
<rule pattern="."><token type="Text"/></rule>
</state>
<state name="string">
<rule pattern="&quot;"><token type="LiteralStringDouble"/><pop depth="1"/></rule>
<rule pattern="\\([\\abfnrtv&quot;\&#x27;]|x[a-fA-F0-9]{2,4}|[0-7]{1,3})"><token type="LiteralStringEscape"/></rule>
<rule><include state="interpol"/></rule>
<rule pattern="."><token type="LiteralStringDouble"/></rule>
</state>
<state name="basic">
<rule><include state="macro"/></rule>
<rule pattern="(?i)^(Name|Version|Release|Epoch|Summary|Group|License|Packager|Vendor|Icon|URL|Distribution|Prefix|Patch[0-9]*|Source[0-9]*|Requires\(?[a-z]*\)?|[a-z]+Req|Obsoletes|Suggests|Provides|Conflicts|Build[a-z]+|[a-z]+Arch|Auto[a-z]+)(:)(.*)$"><bygroups><token type="GenericHeading"/><token type="Punctuation"/><usingself state="root"/></bygroups></rule>
<rule pattern="^%description"><token type="NameDecorator"/><push state="description"/></rule>
<rule pattern="^%changelog"><token type="NameDecorator"/><push state="changelog"/></rule>
<rule pattern="^(%(?:package|prep|build|install|clean|check|pre[a-z]*|post[a-z]*|trigger[a-z]*|files))(.*)$"><bygroups><token type="NameDecorator"/><token type="Text"/></bygroups></rule>
<rule pattern="%(attr|defattr|dir|doc(?:dir)?|setup|config(?:ure)?|make(?:install)|ghost|patch[0-9]+|find_lang|exclude|verify)"><token type="Keyword"/></rule>
<rule><include state="interpol"/></rule>
<rule pattern="&#x27;.*?&#x27;"><token type="LiteralStringSingle"/></rule>
<rule pattern="&quot;"><token type="LiteralStringDouble"/><push state="string"/></rule>
<rule pattern="\s+"><token type="TextWhitespace"/></rule>
<rule pattern="."><token type="Text"/></rule>
</state>
<state name="macro">
<rule pattern="%define.*$"><token type="CommentPreproc"/></rule>
<rule pattern="%\{\!\?.*%define.*\}"><token type="CommentPreproc"/></rule>
<rule pattern="(%(?:if(?:n?arch)?|else(?:if)?|endif))(.*)$"><bygroups><token type="CommentPreproc"/><token type="Text"/></bygroups></rule>
</state>
<state name="interpol">
<rule pattern="%\{?__[a-z_]+\}?"><token type="NameFunction"/></rule>
<rule pattern="%\{?_([a-z_]+dir|[a-z_]+path|prefix)\}?"><token type="KeywordPseudo"/></rule>
<rule pattern="%\{\?\w+\}"><token type="NameVariable"/></rule>
<rule pattern="\$\{?RPM_[A-Z0-9_]+\}?"><token type="NameVariableGlobal"/></rule>
<rule pattern="%\{[a-zA-Z]\w+\}"><token type="KeywordConstant"/></rule>
</state>
</rules>
</lexer>

View File

@ -55,7 +55,7 @@ func goRules() Rules {
{`"(\\\\|\\"|[^"])*"`, LiteralString, nil},
{`(<<=|>>=|<<|>>|<=|>=|&\^=|&\^|\+=|-=|\*=|/=|%=|&=|\|=|&&|\|\||<-|\+\+|--|==|!=|:=|\.\.\.|[+\-*/%&])`, Operator, nil},
{`([a-zA-Z_]\w*)(\s*)(\()`, ByGroups(NameFunction, UsingSelf("root"), Punctuation), nil},
{`[|^<>=!()\[\]{}.,;:]`, Punctuation, nil},
{`[|^<>=!()\[\]{}.,;:~]`, Punctuation, nil},
{`[^\W\d]\w*`, NameOther, nil},
},
}

View File

@ -1,11 +1,18 @@
{
$schema: "https://docs.renovatebot.com/renovate-schema.json",
extends: [
"config:recommended",
":semanticCommits",
":semanticCommitTypeAll(chore)",
":semanticCommitScope(deps)",
"group:allNonMajor",
"schedule:earlyMondays", // Run once a week.
],
$schema: "https://docs.renovatebot.com/renovate-schema.json",
extends: [
"config:recommended",
":semanticCommits",
":semanticCommitTypeAll(chore)",
":semanticCommitScope(deps)",
"group:allNonMajor",
"schedule:earlyMondays", // Run once a week.
],
packageRules: [
{
matchPackageNames: ["golangci-lint"],
matchManagers: ["hermit"],
enabled: false,
},
],
}

View File

@ -5,7 +5,7 @@
<entry type="Other" style="#c6d0f5"/>
<entry type="LineTableTD" style=""/>
<entry type="LineTable" style=""/>
<entry type="LineHighlight" style="#51576d"/>
<entry type="LineHighlight" style="bg:#51576d"/>
<entry type="LineNumbersTable" style="#838ba7"/>
<entry type="LineNumbers" style="#838ba7"/>
<entry type="Keyword" style="#ca9ee6"/>

View File

@ -5,7 +5,7 @@
<entry type="Other" style="#4c4f69"/>
<entry type="LineTableTD" style=""/>
<entry type="LineTable" style=""/>
<entry type="LineHighlight" style="#bcc0cc"/>
<entry type="LineHighlight" style="bg:#bcc0cc"/>
<entry type="LineNumbersTable" style="#8c8fa1"/>
<entry type="LineNumbers" style="#8c8fa1"/>
<entry type="Keyword" style="#8839ef"/>

View File

@ -5,7 +5,7 @@
<entry type="Other" style="#cad3f5"/>
<entry type="LineTableTD" style=""/>
<entry type="LineTable" style=""/>
<entry type="LineHighlight" style="#494d64"/>
<entry type="LineHighlight" style="bg:#494d64"/>
<entry type="LineNumbersTable" style="#8087a2"/>
<entry type="LineNumbers" style="#8087a2"/>
<entry type="Keyword" style="#c6a0f6"/>

View File

@ -5,7 +5,7 @@
<entry type="Other" style="#cdd6f4"/>
<entry type="LineTableTD" style=""/>
<entry type="LineTable" style=""/>
<entry type="LineHighlight" style="#45475a"/>
<entry type="LineHighlight" style="bg:#45475a"/>
<entry type="LineNumbersTable" style="#7f849c"/>
<entry type="LineNumbers" style="#7f849c"/>
<entry type="Keyword" style="#cba6f7"/>

View File

@ -1,6 +1,6 @@
<style name="github-dark">
<entry type="Error" style="#f85149"/>
<entry type="LineHighlight" style="#6e7681"/>
<entry type="LineHighlight" style="bg:#6e7681"/>
<entry type="LineNumbers" style="#6e7681"/>
<entry type="Background" style="#e6edf3 bg:#0d1117"/>
<entry type="Keyword" style="#ff7b72"/>
@ -42,4 +42,4 @@
<entry type="GenericTraceback" style="#ff7b72"/>
<entry type="GenericUnderline" style="underline"/>
<entry type="TextWhitespace" style="#6e7681"/>
</style>
</style>

2
vendor/modules.txt vendored
View File

@ -1,4 +1,4 @@
# github.com/alecthomas/chroma/v2 v2.12.0
# github.com/alecthomas/chroma/v2 v2.13.0
## explicit; go 1.19
github.com/alecthomas/chroma/v2
github.com/alecthomas/chroma/v2/formatters/html