diff --git a/go.mod b/go.mod index e07defe79..3b83aced9 100755 --- a/go.mod +++ b/go.mod @@ -22,6 +22,7 @@ require ( github.com/PuerkitoBio/goquery v1.5.0 github.com/RichardKnop/machinery v1.6.9 github.com/RoaringBitmap/roaring v0.4.23 // indirect + github.com/alecthomas/chroma v0.10.0 github.com/alibabacloud-go/darabonba-openapi v0.1.18 github.com/alibabacloud-go/dysmsapi-20170525/v2 v2.0.9 github.com/alibabacloud-go/tea v1.1.17 @@ -121,7 +122,8 @@ require ( github.com/xanzy/go-gitlab v0.31.0 github.com/yohcop/openid-go v1.0.0 github.com/yuin/goldmark v1.4.13 - github.com/yuin/goldmark-meta v0.0.0-20191126180153-f0638e958b60 + github.com/yuin/goldmark-highlighting v0.0.0-20220208100518-594be1970594 + github.com/yuin/goldmark-meta v1.1.0 golang.org/x/crypto v0.0.0-20200510223506-06a226fb4e37 golang.org/x/mod v0.3.0 // indirect golang.org/x/net v0.0.0-20200513185701-a91f0712d120 @@ -138,7 +140,7 @@ require ( gopkg.in/ldap.v3 v3.0.2 gopkg.in/macaron.v1 v1.3.9 // indirect gopkg.in/testfixtures.v2 v2.5.0 - gopkg.in/yaml.v2 v2.2.8 + gopkg.in/yaml.v2 v2.3.0 mvdan.cc/xurls/v2 v2.1.0 strk.kbt.io/projects/go/libravatar v0.0.0-20191008002943-06d1c002b251 xorm.io/builder v0.3.7 diff --git a/go.sum b/go.sum index bd231b321..6735a1938 100755 --- a/go.sum +++ b/go.sum @@ -76,6 +76,8 @@ github.com/Shopify/toxiproxy v2.1.4+incompatible/go.mod h1:OXgGpZ6Cli1/URJOF1DMx github.com/Unknwon/com v0.0.0-20190321035513-0fed4efef755/go.mod h1:voKvFVpXBJxdIPeqjoJuLK+UVcRlo/JLjeToGxPYu68= github.com/alcortesm/tgz v0.0.0-20161220082320-9c5fe88206d7 h1:uSoVVbwJiQipAclBbw+8quDsfcvFjOpI5iCf4p/cqCs= github.com/alcortesm/tgz v0.0.0-20161220082320-9c5fe88206d7/go.mod h1:6zEj6s6u/ghQa61ZWa/C2Aw3RkjiTBOix7dkqa1VLIs= +github.com/alecthomas/chroma v0.10.0 h1:7XDcGkCQopCNKjZHfYrNLraA+M7e0fMiJ/Mfikbfjek= +github.com/alecthomas/chroma v0.10.0/go.mod h1:jtJATyUxlIORhUOFNA9NZDWGAQ8wpxQQqNSB4rjA/1s= github.com/alecthomas/template v0.0.0-20160405071501-a0175ee3bccc/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc= github.com/alecthomas/units v0.0.0-20151022065526-2efee857e7cf/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0= github.com/alibabacloud-go/alibabacloud-gateway-spi v0.0.2/go.mod h1:sCavSAvdzOjul4cEqeVtvlSaSScfNsTQ+46HwlTL1hc= @@ -203,6 +205,8 @@ github.com/dgrijalva/jwt-go v3.2.0+incompatible/go.mod h1:E3ru+11k8xSBh+hMPgOLZm github.com/dgryski/go-sip13 v0.0.0-20181026042036-e10d5fee7954/go.mod h1:vAd38F8PWV+bWy6jNmig1y/TA+kYO4g3RSRF0IAv0no= github.com/disintegration/imaging v1.6.2 h1:w1LecBlG2Lnp8B3jk5zSuNqd7b4DXhcjwek1ei82L+c= github.com/disintegration/imaging v1.6.2/go.mod h1:44/5580QXChDfwIclfc/PCwrr44amcmDAg8hxG0Ewe4= +github.com/dlclark/regexp2 v1.4.0 h1:F1rxgk7p4uKjwIQxBs9oAXe5CqrXlCduYEJvrF4u93E= +github.com/dlclark/regexp2 v1.4.0/go.mod h1:2pZnwuY/m+8K6iRw6wQdMtk+rH5tNGR1i55kozfMjCc= github.com/docker/go-units v0.3.3/go.mod h1:fgPhTUdO+D/Jk86RDLlptpiXQzgHJF7gydDDbaIK4Dk= github.com/docker/go-units v0.4.0/go.mod h1:fgPhTUdO+D/Jk86RDLlptpiXQzgHJF7gydDDbaIK4Dk= github.com/dustin/go-humanize v1.0.0 h1:VSnTsYCnlFHaM2/igO1h6X3HA71jcobQuxemgkq4zYo= @@ -804,10 +808,16 @@ github.com/yuin/goldmark v1.1.27 h1:nqDD4MMMQA0lmWq03Z2/myGPYLQoXtmi0rGVs95ntbo= github.com/yuin/goldmark v1.1.27/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= github.com/yuin/goldmark v1.1.30 h1:j4d4Lw3zqZelDhBksEo3BnWg9xhXRQGJPPSL6OApZjI= github.com/yuin/goldmark v1.1.30/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= +github.com/yuin/goldmark v1.4.5/go.mod h1:rmuwmfZ0+bvzB24eSC//bk1R1Zp3hM0OXYv/G2LIilg= +github.com/yuin/goldmark v1.4.6/go.mod h1:rmuwmfZ0+bvzB24eSC//bk1R1Zp3hM0OXYv/G2LIilg= github.com/yuin/goldmark v1.4.13 h1:fVcFKWvrslecOb/tg+Cc05dkeYx540o0FuFt3nUVDoE= github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY= +github.com/yuin/goldmark-highlighting v0.0.0-20220208100518-594be1970594 h1:yHfZyN55+5dp1wG7wDKv8HQ044moxkyGq12KFFMFDxg= +github.com/yuin/goldmark-highlighting v0.0.0-20220208100518-594be1970594/go.mod h1:U9ihbh+1ZN7fR5Se3daSPoz1CGF9IYtSvWwVQtnzGHU= github.com/yuin/goldmark-meta v0.0.0-20191126180153-f0638e958b60 h1:gZucqLjL1eDzVWrXj4uiWeMbAopJlBR2mKQAsTGdPwo= github.com/yuin/goldmark-meta v0.0.0-20191126180153-f0638e958b60/go.mod h1:i9VhcIHN2PxXMbQrKqXNueok6QNONoPjNMoj9MygVL0= +github.com/yuin/goldmark-meta v1.1.0 h1:pWw+JLHGZe8Rk0EGsMVssiNb/AaPMHfSRszZeUeiOUc= +github.com/yuin/goldmark-meta v1.1.0/go.mod h1:U4spWENafuA7Zyg+Lj5RqK/MF+ovMYtBvXi1lBb2VP0= github.com/ziutek/mymysql v1.5.4 h1:GB0qdRGsTwQSBVYuVShFBKaXSnSnYYC2d9knnE1LHFs= github.com/ziutek/mymysql v1.5.4/go.mod h1:LMSpPZ6DbqWFxNCHW77HeMg9I646SAhApZ/wKdgO/C0= go.etcd.io/bbolt v1.3.2/go.mod h1:IbVyRI1SCnLcuJnV2u8VeU0CEYM7e686BmAb1XKL+uU= @@ -1088,6 +1098,8 @@ gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.2.4/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.2.8 h1:obN1ZagJSUGI0Ek/LBmuj4SNLPfIny3KsKFopxRdj10= gopkg.in/yaml.v2 v2.2.8/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.3.0 h1:clyUAQHOM3G0M3f5vQj7LuJrETvjVot3Z5el9nffUtU= +gopkg.in/yaml.v2 v2.3.0/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c h1:dUUwHk2QECo/6vqA44rthZ8ie2QXMNeKRTHCNY2nXvo= gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= grpc.go4.org v0.0.0-20170609214715-11d0a25b4919/go.mod h1:77eQGdRu53HpSqPFJFmuJdjuHRquDANNeA4x7B8WQ9o= diff --git a/modules/markup/markdown/markdown.go b/modules/markup/markdown/markdown.go index e50301ffe..407339461 100644 --- a/modules/markup/markdown/markdown.go +++ b/modules/markup/markdown/markdown.go @@ -7,6 +7,7 @@ package markdown import ( "bytes" + "strings" "sync" "code.gitea.io/gitea/modules/log" @@ -14,6 +15,8 @@ import ( "code.gitea.io/gitea/modules/markup/common" "code.gitea.io/gitea/modules/setting" giteautil "code.gitea.io/gitea/modules/util" + chromahtml "github.com/alecthomas/chroma/formatters/html" + highlighting "github.com/yuin/goldmark-highlighting" "github.com/yuin/goldmark" meta "github.com/yuin/goldmark-meta" @@ -42,16 +45,48 @@ func NewGiteaParseContext(urlPrefix string, isWiki bool) parser.Context { func RenderRaw(body []byte, urlPrefix string, wikiMarkdown bool) []byte { once.Do(func() { converter = goldmark.New( - goldmark.WithExtensions(extension.Table, + goldmark.WithExtensions( + extension.NewTable( + extension.WithTableCellAlignMethod(extension.TableCellAlignAttribute)), extension.Strikethrough, extension.TaskList, extension.DefinitionList, common.FootnoteExtension, - extension.NewTypographer( - extension.WithTypographicSubstitutions(extension.TypographicSubstitutions{ - extension.EnDash: nil, - extension.EmDash: nil, - extension.Ellipsis: nil, + highlighting.NewHighlighting( + highlighting.WithFormatOptions( + chromahtml.WithClasses(true), + chromahtml.PreventSurroundingPre(true), + ), + highlighting.WithWrapperRenderer(func(w util.BufWriter, c highlighting.CodeBlockContext, entering bool) { + if entering { + language, _ := c.Language() + if language == nil { + language = []byte("text") + } + + languageStr := string(language) + + preClasses := []string{"code-block"} + if languageStr == "mermaid" { + preClasses = append(preClasses, "is-loading") + } + + _, err := w.WriteString(`
`)
+							if err != nil {
+								return
+							}
+
+							// include language-x class as part of commonmark spec
+							_, err = w.WriteString(``)
+							if err != nil {
+								return
+							}
+						} else {
+							_, err := w.WriteString("
") + if err != nil { + return + } + } }), ), meta.Meta, diff --git a/vendor/github.com/alecthomas/chroma/.gitignore b/vendor/github.com/alecthomas/chroma/.gitignore new file mode 100644 index 000000000..ccacd12e9 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/.gitignore @@ -0,0 +1,19 @@ +# Binaries for programs and plugins +*.exe +*.dll +*.so +*.dylib +/cmd/chroma/chroma + +# Test binary, build with `go test -c` +*.test + +# Output of the go coverage tool, specifically when used with LiteIDE +*.out + +# Project-local glide cache, RE: https://github.com/Masterminds/glide/issues/736 +.glide/ + +_models/ + +_examples/ diff --git a/vendor/github.com/alecthomas/chroma/.golangci.yml b/vendor/github.com/alecthomas/chroma/.golangci.yml new file mode 100644 index 000000000..ba7557339 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/.golangci.yml @@ -0,0 +1,76 @@ +run: + tests: true + skip-dirs: + - _examples + +output: + print-issued-lines: false + +linters: + enable-all: true + disable: + - maligned + - megacheck + - lll + - gocyclo + - dupl + - gochecknoglobals + - funlen + - godox + - wsl + - gomnd + - gocognit + - goerr113 + - nolintlint + - testpackage + - godot + - nestif + - paralleltest + - nlreturn + - cyclop + - exhaustivestruct + - gci + - gofumpt + - errorlint + - exhaustive + - ifshort + - wrapcheck + - stylecheck + +linters-settings: + govet: + check-shadowing: true + gocyclo: + min-complexity: 10 + dupl: + threshold: 100 + goconst: + min-len: 8 + min-occurrences: 3 + forbidigo: + forbid: + - (Must)?NewLexer + exclude_godoc_examples: false + + +issues: + max-per-linter: 0 + max-same: 0 + exclude-use-default: false + exclude: + # Captured by errcheck. + - '^(G104|G204):' + # Very commonly not checked. + - 'Error return value of .(.*\.Help|.*\.MarkFlagRequired|(os\.)?std(out|err)\..*|.*Close|.*Flush|os\.Remove(All)?|.*printf?|os\.(Un)?Setenv). is not checked' + - 'exported method (.*\.MarshalJSON|.*\.UnmarshalJSON|.*\.EntityURN|.*\.GoString|.*\.Pos) should have comment or be unexported' + - 'composite literal uses unkeyed fields' + - 'declaration of "err" shadows declaration' + - 'should not use dot imports' + - 'Potential file inclusion via variable' + - 'should have comment or be unexported' + - 'comment on exported var .* should be of the form' + - 'at least one file in a package should have a package comment' + - 'string literal contains the Unicode' + - 'methods on the same type should have the same receiver name' + - '_TokenType_name should be _TokenTypeName' + - '`_TokenType_map` should be `_TokenTypeMap`' diff --git a/vendor/github.com/alecthomas/chroma/.goreleaser.yml b/vendor/github.com/alecthomas/chroma/.goreleaser.yml new file mode 100644 index 000000000..8cd7592d3 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/.goreleaser.yml @@ -0,0 +1,37 @@ +project_name: chroma +release: + github: + owner: alecthomas + name: chroma +brews: + - + install: bin.install "chroma" +env: + - CGO_ENABLED=0 +builds: +- goos: + - linux + - darwin + - windows + goarch: + - arm64 + - amd64 + - "386" + goarm: + - "6" + dir: ./cmd/chroma + main: . + ldflags: -s -w -X main.version={{.Version}} -X main.commit={{.Commit}} -X main.date={{.Date}} + binary: chroma +archives: + - + format: tar.gz + name_template: '{{ .Binary }}-{{ .Version }}-{{ .Os }}-{{ .Arch }}{{ if .Arm }}v{{ + .Arm }}{{ end }}' + files: + - COPYING + - README* +snapshot: + name_template: SNAPSHOT-{{ .Commit }} +checksum: + name_template: '{{ .ProjectName }}-{{ .Version }}-checksums.txt' diff --git a/vendor/github.com/alecthomas/chroma/COPYING b/vendor/github.com/alecthomas/chroma/COPYING new file mode 100644 index 000000000..92dc39f70 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/COPYING @@ -0,0 +1,19 @@ +Copyright (C) 2017 Alec Thomas + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies +of the Software, and to permit persons to whom the Software is furnished to do +so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/vendor/github.com/alecthomas/chroma/Makefile b/vendor/github.com/alecthomas/chroma/Makefile new file mode 100644 index 000000000..34e3c41c6 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/Makefile @@ -0,0 +1,19 @@ +.PHONY: chromad upload all + +VERSION ?= $(shell git describe --tags --dirty --always) + +all: README.md tokentype_string.go + +README.md: lexers/*/*.go + ./table.py + +tokentype_string.go: types.go + go generate + +chromad: + rm -f chromad + (export CGOENABLED=0 GOOS=linux GOARCH=amd64; cd ./cmd/chromad && go build -ldflags="-X 'main.version=$(VERSION)'" -o ../../chromad .) + +upload: chromad + scp chromad root@swapoff.org: && \ + ssh root@swapoff.org 'install -m755 ./chromad /srv/http/swapoff.org/bin && service chromad restart' diff --git a/vendor/github.com/alecthomas/chroma/README.md b/vendor/github.com/alecthomas/chroma/README.md new file mode 100644 index 000000000..b62b84768 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/README.md @@ -0,0 +1,285 @@ +# Chroma — A general purpose syntax highlighter in pure Go +[![Golang Documentation](https://godoc.org/github.com/alecthomas/chroma?status.svg)](https://godoc.org/github.com/alecthomas/chroma) [![CI](https://github.com/alecthomas/chroma/actions/workflows/ci.yml/badge.svg)](https://github.com/alecthomas/chroma/actions/workflows/ci.yml) [![Slack chat](https://img.shields.io/static/v1?logo=slack&style=flat&label=slack&color=green&message=gophers)](https://invite.slack.golangbridge.org/) + +> **NOTE:** As Chroma has just been released, its API is still in flux. That said, the high-level interface should not change significantly. + +Chroma takes source code and other structured text and converts it into syntax +highlighted HTML, ANSI-coloured text, etc. + +Chroma is based heavily on [Pygments](http://pygments.org/), and includes +translators for Pygments lexers and styles. + + +## Table of Contents + + + +1. [Table of Contents](#table-of-contents) +2. [Supported languages](#supported-languages) +3. [Try it](#try-it) +4. [Using the library](#using-the-library) + 1. [Quick start](#quick-start) + 2. [Identifying the language](#identifying-the-language) + 3. [Formatting the output](#formatting-the-output) + 4. [The HTML formatter](#the-html-formatter) +5. [More detail](#more-detail) + 1. [Lexers](#lexers) + 2. [Formatters](#formatters) + 3. [Styles](#styles) +6. [Command-line interface](#command-line-interface) +7. [What's missing compared to Pygments?](#whats-missing-compared-to-pygments) + + + + +## Supported languages + +Prefix | Language +:----: | -------- +A | ABAP, ABNF, ActionScript, ActionScript 3, Ada, Angular2, ANTLR, ApacheConf, APL, AppleScript, Arduino, Awk +B | Ballerina, Base Makefile, Bash, Batchfile, BibTeX, Bicep, BlitzBasic, BNF, Brainfuck +C | C, C#, C++, Caddyfile, Caddyfile Directives, Cap'n Proto, Cassandra CQL, Ceylon, CFEngine3, cfstatement, ChaiScript, Cheetah, Clojure, CMake, COBOL, CoffeeScript, Common Lisp, Coq, Crystal, CSS, Cython +D | D, Dart, Diff, Django/Jinja, Docker, DTD, Dylan +E | EBNF, Elixir, Elm, EmacsLisp, Erlang +F | Factor, Fish, Forth, Fortran, FSharp +G | GAS, GDScript, Genshi, Genshi HTML, Genshi Text, Gherkin, GLSL, Gnuplot, Go, Go HTML Template, Go Text Template, GraphQL, Groff, Groovy +H | Handlebars, Haskell, Haxe, HCL, Hexdump, HLB, HTML, HTTP, Hy +I | Idris, Igor, INI, Io +J | J, Java, JavaScript, JSON, Julia, Jungle +K | Kotlin +L | Lighttpd configuration file, LLVM, Lua +M | Mako, markdown, Mason, Mathematica, Matlab, MiniZinc, MLIR, Modula-2, MonkeyC, MorrowindScript, Myghty, MySQL +N | NASM, Newspeak, Nginx configuration file, Nim, Nix +O | Objective-C, OCaml, Octave, OnesEnterprise, OpenEdge ABL, OpenSCAD, Org Mode +P | PacmanConf, Perl, PHP, PHTML, Pig, PkgConfig, PL/pgSQL, plaintext, Pony, PostgreSQL SQL dialect, PostScript, POVRay, PowerShell, Prolog, PromQL, Protocol Buffer, Puppet, Python 2, Python +Q | QBasic +R | R, Racket, Ragel, Raku, react, ReasonML, reg, reStructuredText, Rexx, Ruby, Rust +S | SAS, Sass, Scala, Scheme, Scilab, SCSS, Smalltalk, Smarty, Snobol, Solidity, SPARQL, SQL, SquidConf, Standard ML, Stylus, Svelte, Swift, SYSTEMD, systemverilog +T | TableGen, TASM, Tcl, Tcsh, Termcap, Terminfo, Terraform, TeX, Thrift, TOML, TradingView, Transact-SQL, Turing, Turtle, Twig, TypeScript, TypoScript, TypoScriptCssData, TypoScriptHtmlData +V | VB.net, verilog, VHDL, VimL, vue +W | WDTE +X | XML, Xorg +Y | YAML, YANG +Z | Zig + + +_I will attempt to keep this section up to date, but an authoritative list can be +displayed with `chroma --list`._ + + +## Try it + +Try out various languages and styles on the [Chroma Playground](https://swapoff.org/chroma/playground/). + + +## Using the library + +Chroma, like Pygments, has the concepts of +[lexers](https://github.com/alecthomas/chroma/tree/master/lexers), +[formatters](https://github.com/alecthomas/chroma/tree/master/formatters) and +[styles](https://github.com/alecthomas/chroma/tree/master/styles). + +Lexers convert source text into a stream of tokens, styles specify how token +types are mapped to colours, and formatters convert tokens and styles into +formatted output. + +A package exists for each of these, containing a global `Registry` variable +with all of the registered implementations. There are also helper functions +for using the registry in each package, such as looking up lexers by name or +matching filenames, etc. + +In all cases, if a lexer, formatter or style can not be determined, `nil` will +be returned. In this situation you may want to default to the `Fallback` +value in each respective package, which provides sane defaults. + + +### Quick start + +A convenience function exists that can be used to simply format some source +text, without any effort: + +```go +err := quick.Highlight(os.Stdout, someSourceCode, "go", "html", "monokai") +``` + + +### Identifying the language + +To highlight code, you'll first have to identify what language the code is +written in. There are three primary ways to do that: + +1. Detect the language from its filename. + + ```go + lexer := lexers.Match("foo.go") + ``` + +3. Explicitly specify the language by its Chroma syntax ID (a full list is available from `lexers.Names()`). + + ```go + lexer := lexers.Get("go") + ``` + +3. Detect the language from its content. + + ```go + lexer := lexers.Analyse("package main\n\nfunc main()\n{\n}\n") + ``` + +In all cases, `nil` will be returned if the language can not be identified. + +```go +if lexer == nil { + lexer = lexers.Fallback +} +``` + +At this point, it should be noted that some lexers can be extremely chatty. To +mitigate this, you can use the coalescing lexer to coalesce runs of identical +token types into a single token: + +```go +lexer = chroma.Coalesce(lexer) +``` + + +### Formatting the output + +Once a language is identified you will need to pick a formatter and a style (theme). + +```go +style := styles.Get("swapoff") +if style == nil { + style = styles.Fallback +} +formatter := formatters.Get("html") +if formatter == nil { + formatter = formatters.Fallback +} +``` + +Then obtain an iterator over the tokens: + +```go +contents, err := ioutil.ReadAll(r) +iterator, err := lexer.Tokenise(nil, string(contents)) +``` + +And finally, format the tokens from the iterator: + +```go +err := formatter.Format(w, style, iterator) +``` + + +### The HTML formatter + +By default the `html` registered formatter generates standalone HTML with +embedded CSS. More flexibility is available through the `formatters/html` package. + +Firstly, the output generated by the formatter can be customised with the +following constructor options: + +- `Standalone()` - generate standalone HTML with embedded CSS. +- `WithClasses()` - use classes rather than inlined style attributes. +- `ClassPrefix(prefix)` - prefix each generated CSS class. +- `TabWidth(width)` - Set the rendered tab width, in characters. +- `WithLineNumbers()` - Render line numbers (style with `LineNumbers`). +- `LinkableLineNumbers()` - Make the line numbers linkable and be a link to themselves. +- `HighlightLines(ranges)` - Highlight lines in these ranges (style with `LineHighlight`). +- `LineNumbersInTable()` - Use a table for formatting line numbers and code, rather than spans. + +If `WithClasses()` is used, the corresponding CSS can be obtained from the formatter with: + +```go +formatter := html.New(html.WithClasses()) +err := formatter.WriteCSS(w, style) +``` + + +## More detail + + +### Lexers + +See the [Pygments documentation](http://pygments.org/docs/lexerdevelopment/) +for details on implementing lexers. Most concepts apply directly to Chroma, +but see existing lexer implementations for real examples. + +In many cases lexers can be automatically converted directly from Pygments by +using the included Python 3 script `pygments2chroma.py`. I use something like +the following: + +```sh +python3 _tools/pygments2chroma.py \ + pygments.lexers.jvm.KotlinLexer \ + > lexers/k/kotlin.go \ + && gofmt -s -w lexers/k/kotlin.go +``` + +See notes in [pygments-lexers.txt](https://github.com/alecthomas/chroma/blob/master/pygments-lexers.txt) +for a list of lexers, and notes on some of the issues importing them. + + +### Formatters + +Chroma supports HTML output, as well as terminal output in 8 colour, 256 colour, and true-colour. + +A `noop` formatter is included that outputs the token text only, and a `tokens` +formatter outputs raw tokens. The latter is useful for debugging lexers. + + +### Styles + +Chroma styles use the [same syntax](http://pygments.org/docs/styles/) as Pygments. + +All Pygments styles have been converted to Chroma using the `_tools/style.py` script. + +When you work with one of [Chroma's styles](https://github.com/alecthomas/chroma/tree/master/styles), know that the `chroma.Background` token type provides the default style for tokens. It does so by defining a foreground color and background color. + +For example, this gives each token name not defined in the style a default color of `#f8f8f8` and uses `#000000` for the highlighted code block's background: + +~~~go +chroma.Background: "#f8f8f2 bg:#000000", +~~~ + +Also, token types in a style file are hierarchical. For instance, when `CommentSpecial` is not defined, Chroma uses the token style from `Comment`. So when several comment tokens use the same color, you'll only need to define `Comment` and override the one that has a different color. + +For a quick overview of the available styles and how they look, check out the [Chroma Style Gallery](https://xyproto.github.io/splash/docs/). + + +## Command-line interface + +A command-line interface to Chroma is included. + +Binaries are available to install from [the releases page](https://github.com/alecthomas/chroma/releases). + +The CLI can be used as a preprocessor to colorise output of `less(1)`, +see documentation for the `LESSOPEN` environment variable. + +The `--fail` flag can be used to suppress output and return with exit status +1 to facilitate falling back to some other preprocessor in case chroma +does not resolve a specific lexer to use for the given file. For example: + +```shell +export LESSOPEN='| p() { chroma --fail "$1" || cat "$1"; }; p "%s"' +``` + +Replace `cat` with your favourite fallback preprocessor. + +When invoked as `.lessfilter`, the `--fail` flag is automatically turned +on under the hood for easy integration with [lesspipe shipping with +Debian and derivatives](https://manpages.debian.org/lesspipe#USER_DEFINED_FILTERS); +for that setup the `chroma` executable can be just symlinked to `~/.lessfilter`. + + +## What's missing compared to Pygments? + +- Quite a few lexers, for various reasons (pull-requests welcome): + - Pygments lexers for complex languages often include custom code to + handle certain aspects, such as Raku's ability to nest code inside + regular expressions. These require time and effort to convert. + - I mostly only converted languages I had heard of, to reduce the porting cost. +- Some more esoteric features of Pygments are omitted for simplicity. +- Though the Chroma API supports content detection, very few languages support them. + I have plans to implement a statistical analyser at some point, but not enough time. diff --git a/vendor/github.com/alecthomas/chroma/coalesce.go b/vendor/github.com/alecthomas/chroma/coalesce.go new file mode 100644 index 000000000..f5048951a --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/coalesce.go @@ -0,0 +1,35 @@ +package chroma + +// Coalesce is a Lexer interceptor that collapses runs of common types into a single token. +func Coalesce(lexer Lexer) Lexer { return &coalescer{lexer} } + +type coalescer struct{ Lexer } + +func (d *coalescer) Tokenise(options *TokeniseOptions, text string) (Iterator, error) { + var prev Token + it, err := d.Lexer.Tokenise(options, text) + if err != nil { + return nil, err + } + return func() Token { + for token := it(); token != (EOF); token = it() { + if len(token.Value) == 0 { + continue + } + if prev == EOF { + prev = token + } else { + if prev.Type == token.Type && len(prev.Value) < 8192 { + prev.Value += token.Value + } else { + out := prev + prev = token + return out + } + } + } + out := prev + prev = EOF + return out + }, nil +} diff --git a/vendor/github.com/alecthomas/chroma/colour.go b/vendor/github.com/alecthomas/chroma/colour.go new file mode 100644 index 000000000..15d794ce2 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/colour.go @@ -0,0 +1,164 @@ +package chroma + +import ( + "fmt" + "math" + "strconv" + "strings" +) + +// ANSI2RGB maps ANSI colour names, as supported by Chroma, to hex RGB values. +var ANSI2RGB = map[string]string{ + "#ansiblack": "000000", + "#ansidarkred": "7f0000", + "#ansidarkgreen": "007f00", + "#ansibrown": "7f7fe0", + "#ansidarkblue": "00007f", + "#ansipurple": "7f007f", + "#ansiteal": "007f7f", + "#ansilightgray": "e5e5e5", + // Normal + "#ansidarkgray": "555555", + "#ansired": "ff0000", + "#ansigreen": "00ff00", + "#ansiyellow": "ffff00", + "#ansiblue": "0000ff", + "#ansifuchsia": "ff00ff", + "#ansiturquoise": "00ffff", + "#ansiwhite": "ffffff", + + // Aliases without the "ansi" prefix, because...why? + "#black": "000000", + "#darkred": "7f0000", + "#darkgreen": "007f00", + "#brown": "7f7fe0", + "#darkblue": "00007f", + "#purple": "7f007f", + "#teal": "007f7f", + "#lightgray": "e5e5e5", + // Normal + "#darkgray": "555555", + "#red": "ff0000", + "#green": "00ff00", + "#yellow": "ffff00", + "#blue": "0000ff", + "#fuchsia": "ff00ff", + "#turquoise": "00ffff", + "#white": "ffffff", +} + +// Colour represents an RGB colour. +type Colour int32 + +// NewColour creates a Colour directly from RGB values. +func NewColour(r, g, b uint8) Colour { + return ParseColour(fmt.Sprintf("%02x%02x%02x", r, g, b)) +} + +// Distance between this colour and another. +// +// This uses the approach described here (https://www.compuphase.com/cmetric.htm). +// This is not as accurate as LAB, et. al. but is *vastly* simpler and sufficient for our needs. +func (c Colour) Distance(e2 Colour) float64 { + ar, ag, ab := int64(c.Red()), int64(c.Green()), int64(c.Blue()) + br, bg, bb := int64(e2.Red()), int64(e2.Green()), int64(e2.Blue()) + rmean := (ar + br) / 2 + r := ar - br + g := ag - bg + b := ab - bb + return math.Sqrt(float64((((512 + rmean) * r * r) >> 8) + 4*g*g + (((767 - rmean) * b * b) >> 8))) +} + +// Brighten returns a copy of this colour with its brightness adjusted. +// +// If factor is negative, the colour is darkened. +// +// Uses approach described here (http://www.pvladov.com/2012/09/make-color-lighter-or-darker.html). +func (c Colour) Brighten(factor float64) Colour { + r := float64(c.Red()) + g := float64(c.Green()) + b := float64(c.Blue()) + + if factor < 0 { + factor++ + r *= factor + g *= factor + b *= factor + } else { + r = (255-r)*factor + r + g = (255-g)*factor + g + b = (255-b)*factor + b + } + return NewColour(uint8(r), uint8(g), uint8(b)) +} + +// BrightenOrDarken brightens a colour if it is < 0.5 brighteness or darkens if > 0.5 brightness. +func (c Colour) BrightenOrDarken(factor float64) Colour { + if c.Brightness() < 0.5 { + return c.Brighten(factor) + } + return c.Brighten(-factor) +} + +// Brightness of the colour (roughly) in the range 0.0 to 1.0 +func (c Colour) Brightness() float64 { + return (float64(c.Red()) + float64(c.Green()) + float64(c.Blue())) / 255.0 / 3.0 +} + +// ParseColour in the forms #rgb, #rrggbb, #ansi, or #. +// Will return an "unset" colour if invalid. +func ParseColour(colour string) Colour { + colour = normaliseColour(colour) + n, err := strconv.ParseUint(colour, 16, 32) + if err != nil { + return 0 + } + return Colour(n + 1) +} + +// MustParseColour is like ParseColour except it panics if the colour is invalid. +// +// Will panic if colour is in an invalid format. +func MustParseColour(colour string) Colour { + parsed := ParseColour(colour) + if !parsed.IsSet() { + panic(fmt.Errorf("invalid colour %q", colour)) + } + return parsed +} + +// IsSet returns true if the colour is set. +func (c Colour) IsSet() bool { return c != 0 } + +func (c Colour) String() string { return fmt.Sprintf("#%06x", int(c-1)) } +func (c Colour) GoString() string { return fmt.Sprintf("Colour(0x%06x)", int(c-1)) } + +// Red component of colour. +func (c Colour) Red() uint8 { return uint8(((c - 1) >> 16) & 0xff) } + +// Green component of colour. +func (c Colour) Green() uint8 { return uint8(((c - 1) >> 8) & 0xff) } + +// Blue component of colour. +func (c Colour) Blue() uint8 { return uint8((c - 1) & 0xff) } + +// Colours is an orderable set of colours. +type Colours []Colour + +func (c Colours) Len() int { return len(c) } +func (c Colours) Swap(i, j int) { c[i], c[j] = c[j], c[i] } +func (c Colours) Less(i, j int) bool { return c[i] < c[j] } + +// Convert colours to #rrggbb. +func normaliseColour(colour string) string { + if ansi, ok := ANSI2RGB[colour]; ok { + return ansi + } + if strings.HasPrefix(colour, "#") { + colour = colour[1:] + if len(colour) == 3 { + return colour[0:1] + colour[0:1] + colour[1:2] + colour[1:2] + colour[2:3] + colour[2:3] + } + } + return colour +} diff --git a/vendor/github.com/alecthomas/chroma/delegate.go b/vendor/github.com/alecthomas/chroma/delegate.go new file mode 100644 index 000000000..5cef01bd7 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/delegate.go @@ -0,0 +1,137 @@ +package chroma + +import ( + "bytes" +) + +type delegatingLexer struct { + root Lexer + language Lexer +} + +// DelegatingLexer combines two lexers to handle the common case of a language embedded inside another, such as PHP +// inside HTML or PHP inside plain text. +// +// It takes two lexer as arguments: a root lexer and a language lexer. First everything is scanned using the language +// lexer, which must return "Other" for unrecognised tokens. Then all "Other" tokens are lexed using the root lexer. +// Finally, these two sets of tokens are merged. +// +// The lexers from the template lexer package use this base lexer. +func DelegatingLexer(root Lexer, language Lexer) Lexer { + return &delegatingLexer{ + root: root, + language: language, + } +} + +func (d *delegatingLexer) Config() *Config { + return d.language.Config() +} + +// An insertion is the character range where language tokens should be inserted. +type insertion struct { + start, end int + tokens []Token +} + +func (d *delegatingLexer) Tokenise(options *TokeniseOptions, text string) (Iterator, error) { // nolint: gocognit + tokens, err := Tokenise(Coalesce(d.language), options, text) + if err != nil { + return nil, err + } + // Compute insertions and gather "Other" tokens. + others := &bytes.Buffer{} + insertions := []*insertion{} + var insert *insertion + offset := 0 + var last Token + for _, t := range tokens { + if t.Type == Other { + if last != EOF && insert != nil && last.Type != Other { + insert.end = offset + } + others.WriteString(t.Value) + } else { + if last == EOF || last.Type == Other { + insert = &insertion{start: offset} + insertions = append(insertions, insert) + } + insert.tokens = append(insert.tokens, t) + } + last = t + offset += len(t.Value) + } + + if len(insertions) == 0 { + return d.root.Tokenise(options, text) + } + + // Lex the other tokens. + rootTokens, err := Tokenise(Coalesce(d.root), options, others.String()) + if err != nil { + return nil, err + } + + // Interleave the two sets of tokens. + var out []Token + offset = 0 // Offset into text. + tokenIndex := 0 + nextToken := func() Token { + if tokenIndex >= len(rootTokens) { + return EOF + } + t := rootTokens[tokenIndex] + tokenIndex++ + return t + } + insertionIndex := 0 + nextInsertion := func() *insertion { + if insertionIndex >= len(insertions) { + return nil + } + i := insertions[insertionIndex] + insertionIndex++ + return i + } + t := nextToken() + i := nextInsertion() + for t != EOF || i != nil { + // fmt.Printf("%d->%d:%q %d->%d:%q\n", offset, offset+len(t.Value), t.Value, i.start, i.end, Stringify(i.tokens...)) + if t == EOF || (i != nil && i.start < offset+len(t.Value)) { + var l Token + l, t = splitToken(t, i.start-offset) + if l != EOF { + out = append(out, l) + offset += len(l.Value) + } + out = append(out, i.tokens...) + offset += i.end - i.start + if t == EOF { + t = nextToken() + } + i = nextInsertion() + } else { + out = append(out, t) + offset += len(t.Value) + t = nextToken() + } + } + return Literator(out...), nil +} + +func splitToken(t Token, offset int) (l Token, r Token) { + if t == EOF { + return EOF, EOF + } + if offset == 0 { + return EOF, t + } + if offset == len(t.Value) { + return t, EOF + } + l = t.Clone() + r = t.Clone() + l.Value = l.Value[:offset] + r.Value = r.Value[offset:] + return +} diff --git a/vendor/github.com/alecthomas/chroma/doc.go b/vendor/github.com/alecthomas/chroma/doc.go new file mode 100644 index 000000000..4dde77c81 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/doc.go @@ -0,0 +1,7 @@ +// Package chroma takes source code and other structured text and converts it into syntax highlighted HTML, ANSI- +// coloured text, etc. +// +// Chroma is based heavily on Pygments, and includes translators for Pygments lexers and styles. +// +// For more information, go here: https://github.com/alecthomas/chroma +package chroma diff --git a/vendor/github.com/alecthomas/chroma/formatter.go b/vendor/github.com/alecthomas/chroma/formatter.go new file mode 100644 index 000000000..00dd5d8df --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/formatter.go @@ -0,0 +1,43 @@ +package chroma + +import ( + "io" +) + +// A Formatter for Chroma lexers. +type Formatter interface { + // Format returns a formatting function for tokens. + // + // If the iterator panics, the Formatter should recover. + Format(w io.Writer, style *Style, iterator Iterator) error +} + +// A FormatterFunc is a Formatter implemented as a function. +// +// Guards against iterator panics. +type FormatterFunc func(w io.Writer, style *Style, iterator Iterator) error + +func (f FormatterFunc) Format(w io.Writer, s *Style, it Iterator) (err error) { // nolint + defer func() { + if perr := recover(); perr != nil { + err = perr.(error) + } + }() + return f(w, s, it) +} + +type recoveringFormatter struct { + Formatter +} + +func (r recoveringFormatter) Format(w io.Writer, s *Style, it Iterator) (err error) { + defer func() { + if perr := recover(); perr != nil { + err = perr.(error) + } + }() + return r.Formatter.Format(w, s, it) +} + +// RecoveringFormatter wraps a formatter with panic recovery. +func RecoveringFormatter(formatter Formatter) Formatter { return recoveringFormatter{formatter} } diff --git a/vendor/github.com/alecthomas/chroma/formatters/html/html.go b/vendor/github.com/alecthomas/chroma/formatters/html/html.go new file mode 100644 index 000000000..164c842d2 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/formatters/html/html.go @@ -0,0 +1,500 @@ +package html + +import ( + "fmt" + "html" + "io" + "sort" + "strings" + + "github.com/alecthomas/chroma" +) + +// Option sets an option of the HTML formatter. +type Option func(f *Formatter) + +// Standalone configures the HTML formatter for generating a standalone HTML document. +func Standalone(b bool) Option { return func(f *Formatter) { f.standalone = b } } + +// ClassPrefix sets the CSS class prefix. +func ClassPrefix(prefix string) Option { return func(f *Formatter) { f.prefix = prefix } } + +// WithClasses emits HTML using CSS classes, rather than inline styles. +func WithClasses(b bool) Option { return func(f *Formatter) { f.Classes = b } } + +// WithAllClasses disables an optimisation that omits redundant CSS classes. +func WithAllClasses(b bool) Option { return func(f *Formatter) { f.allClasses = b } } + +// TabWidth sets the number of characters for a tab. Defaults to 8. +func TabWidth(width int) Option { return func(f *Formatter) { f.tabWidth = width } } + +// PreventSurroundingPre prevents the surrounding pre tags around the generated code. +func PreventSurroundingPre(b bool) Option { + return func(f *Formatter) { + if b { + f.preWrapper = nopPreWrapper + } else { + f.preWrapper = defaultPreWrapper + } + } +} + +// WithPreWrapper allows control of the surrounding pre tags. +func WithPreWrapper(wrapper PreWrapper) Option { + return func(f *Formatter) { + f.preWrapper = wrapper + } +} + +// WrapLongLines wraps long lines. +func WrapLongLines(b bool) Option { + return func(f *Formatter) { + f.wrapLongLines = b + } +} + +// WithLineNumbers formats output with line numbers. +func WithLineNumbers(b bool) Option { + return func(f *Formatter) { + f.lineNumbers = b + } +} + +// LineNumbersInTable will, when combined with WithLineNumbers, separate the line numbers +// and code in table td's, which make them copy-and-paste friendly. +func LineNumbersInTable(b bool) Option { + return func(f *Formatter) { + f.lineNumbersInTable = b + } +} + +// LinkableLineNumbers decorates the line numbers HTML elements with an "id" +// attribute so they can be linked. +func LinkableLineNumbers(b bool, prefix string) Option { + return func(f *Formatter) { + f.linkableLineNumbers = b + f.lineNumbersIDPrefix = prefix + } +} + +// HighlightLines higlights the given line ranges with the Highlight style. +// +// A range is the beginning and ending of a range as 1-based line numbers, inclusive. +func HighlightLines(ranges [][2]int) Option { + return func(f *Formatter) { + f.highlightRanges = ranges + sort.Sort(f.highlightRanges) + } +} + +// BaseLineNumber sets the initial number to start line numbering at. Defaults to 1. +func BaseLineNumber(n int) Option { + return func(f *Formatter) { + f.baseLineNumber = n + } +} + +// New HTML formatter. +func New(options ...Option) *Formatter { + f := &Formatter{ + baseLineNumber: 1, + preWrapper: defaultPreWrapper, + } + for _, option := range options { + option(f) + } + return f +} + +// PreWrapper defines the operations supported in WithPreWrapper. +type PreWrapper interface { + // Start is called to write a start
 element.
+	// The code flag tells whether this block surrounds
+	// highlighted code. This will be false when surrounding
+	// line numbers.
+	Start(code bool, styleAttr string) string
+
+	// End is called to write the end 
element. + End(code bool) string +} + +type preWrapper struct { + start func(code bool, styleAttr string) string + end func(code bool) string +} + +func (p preWrapper) Start(code bool, styleAttr string) string { + return p.start(code, styleAttr) +} + +func (p preWrapper) End(code bool) string { + return p.end(code) +} + +var ( + nopPreWrapper = preWrapper{ + start: func(code bool, styleAttr string) string { return "" }, + end: func(code bool) string { return "" }, + } + defaultPreWrapper = preWrapper{ + start: func(code bool, styleAttr string) string { + if code { + return fmt.Sprintf(`
`, styleAttr)
+			}
+
+			return fmt.Sprintf(`
`, styleAttr)
+		},
+		end: func(code bool) string {
+			if code {
+				return `
` + } + + return `
` + }, + } +) + +// Formatter that generates HTML. +type Formatter struct { + standalone bool + prefix string + Classes bool // Exported field to detect when classes are being used + allClasses bool + preWrapper PreWrapper + tabWidth int + wrapLongLines bool + lineNumbers bool + lineNumbersInTable bool + linkableLineNumbers bool + lineNumbersIDPrefix string + highlightRanges highlightRanges + baseLineNumber int +} + +type highlightRanges [][2]int + +func (h highlightRanges) Len() int { return len(h) } +func (h highlightRanges) Swap(i, j int) { h[i], h[j] = h[j], h[i] } +func (h highlightRanges) Less(i, j int) bool { return h[i][0] < h[j][0] } + +func (f *Formatter) Format(w io.Writer, style *chroma.Style, iterator chroma.Iterator) (err error) { + return f.writeHTML(w, style, iterator.Tokens()) +} + +// We deliberately don't use html/template here because it is two orders of magnitude slower (benchmarked). +// +// OTOH we need to be super careful about correct escaping... +func (f *Formatter) writeHTML(w io.Writer, style *chroma.Style, tokens []chroma.Token) (err error) { // nolint: gocyclo + css := f.styleToCSS(style) + if !f.Classes { + for t, style := range css { + css[t] = compressStyle(style) + } + } + if f.standalone { + fmt.Fprint(w, "\n") + if f.Classes { + fmt.Fprint(w, "") + } + fmt.Fprintf(w, "\n", f.styleAttr(css, chroma.Background)) + } + + wrapInTable := f.lineNumbers && f.lineNumbersInTable + + lines := chroma.SplitTokensIntoLines(tokens) + lineDigits := len(fmt.Sprintf("%d", f.baseLineNumber+len(lines)-1)) + highlightIndex := 0 + + if wrapInTable { + // List line numbers in its own + fmt.Fprintf(w, "\n", f.styleAttr(css, chroma.PreWrapper)) + fmt.Fprintf(w, "", f.styleAttr(css, chroma.LineTable)) + fmt.Fprintf(w, "\n", f.styleAttr(css, chroma.LineTableTD)) + fmt.Fprintf(w, f.preWrapper.Start(false, f.styleAttr(css, chroma.PreWrapper))) + for index := range lines { + line := f.baseLineNumber + index + highlight, next := f.shouldHighlight(highlightIndex, line) + if next { + highlightIndex++ + } + if highlight { + fmt.Fprintf(w, "", f.styleAttr(css, chroma.LineHighlight)) + } + + fmt.Fprintf(w, "%s\n", f.styleAttr(css, chroma.LineNumbersTable), f.lineIDAttribute(line), f.lineTitleWithLinkIfNeeded(lineDigits, line)) + + if highlight { + fmt.Fprintf(w, "") + } + } + fmt.Fprint(w, f.preWrapper.End(false)) + fmt.Fprint(w, "\n") + fmt.Fprintf(w, "\n", f.styleAttr(css, chroma.LineTableTD, "width:100%")) + } + + fmt.Fprintf(w, f.preWrapper.Start(true, f.styleAttr(css, chroma.PreWrapper))) + + highlightIndex = 0 + for index, tokens := range lines { + // 1-based line number. + line := f.baseLineNumber + index + highlight, next := f.shouldHighlight(highlightIndex, line) + if next { + highlightIndex++ + } + + // Start of Line + fmt.Fprint(w, ``) + } else { + fmt.Fprintf(w, "%s>", f.styleAttr(css, chroma.Line)) + } + + // Line number + if f.lineNumbers && !wrapInTable { + fmt.Fprintf(w, "%s", f.styleAttr(css, chroma.LineNumbers), f.lineIDAttribute(line), f.lineTitleWithLinkIfNeeded(lineDigits, line)) + } + + fmt.Fprintf(w, ``, f.styleAttr(css, chroma.CodeLine)) + + for _, token := range tokens { + html := html.EscapeString(token.String()) + attr := f.styleAttr(css, token.Type) + if attr != "" { + html = fmt.Sprintf("%s", attr, html) + } + fmt.Fprint(w, html) + } + + fmt.Fprint(w, ``) // End of CodeLine + + fmt.Fprint(w, ``) // End of Line + } + + fmt.Fprintf(w, f.preWrapper.End(true)) + + if wrapInTable { + fmt.Fprint(w, "\n") + fmt.Fprint(w, "\n") + } + + if f.standalone { + fmt.Fprint(w, "\n\n") + fmt.Fprint(w, "\n") + } + + return nil +} + +func (f *Formatter) lineIDAttribute(line int) string { + if !f.linkableLineNumbers { + return "" + } + return fmt.Sprintf(" id=\"%s\"", f.lineID(line)) +} + +func (f *Formatter) lineTitleWithLinkIfNeeded(lineDigits, line int) string { + title := fmt.Sprintf("%*d", lineDigits, line) + if !f.linkableLineNumbers { + return title + } + return fmt.Sprintf("%s", f.lineID(line), title) +} + +func (f *Formatter) lineID(line int) string { + return fmt.Sprintf("%s%d", f.lineNumbersIDPrefix, line) +} + +func (f *Formatter) shouldHighlight(highlightIndex, line int) (bool, bool) { + next := false + for highlightIndex < len(f.highlightRanges) && line > f.highlightRanges[highlightIndex][1] { + highlightIndex++ + next = true + } + if highlightIndex < len(f.highlightRanges) { + hrange := f.highlightRanges[highlightIndex] + if line >= hrange[0] && line <= hrange[1] { + return true, next + } + } + return false, next +} + +func (f *Formatter) class(t chroma.TokenType) string { + for t != 0 { + if cls, ok := chroma.StandardTypes[t]; ok { + if cls != "" { + return f.prefix + cls + } + return "" + } + t = t.Parent() + } + if cls := chroma.StandardTypes[t]; cls != "" { + return f.prefix + cls + } + return "" +} + +func (f *Formatter) styleAttr(styles map[chroma.TokenType]string, tt chroma.TokenType, extraCSS ...string) string { + if f.Classes { + cls := f.class(tt) + if cls == "" { + return "" + } + return fmt.Sprintf(` class="%s"`, cls) + } + if _, ok := styles[tt]; !ok { + tt = tt.SubCategory() + if _, ok := styles[tt]; !ok { + tt = tt.Category() + if _, ok := styles[tt]; !ok { + return "" + } + } + } + css := []string{styles[tt]} + css = append(css, extraCSS...) + return fmt.Sprintf(` style="%s"`, strings.Join(css, ";")) +} + +func (f *Formatter) tabWidthStyle() string { + if f.tabWidth != 0 && f.tabWidth != 8 { + return fmt.Sprintf("; -moz-tab-size: %[1]d; -o-tab-size: %[1]d; tab-size: %[1]d", f.tabWidth) + } + return "" +} + +// WriteCSS writes CSS style definitions (without any surrounding HTML). +func (f *Formatter) WriteCSS(w io.Writer, style *chroma.Style) error { + css := f.styleToCSS(style) + // Special-case background as it is mapped to the outer ".chroma" class. + if _, err := fmt.Fprintf(w, "/* %s */ .%sbg { %s }\n", chroma.Background, f.prefix, css[chroma.Background]); err != nil { + return err + } + // Special-case PreWrapper as it is the ".chroma" class. + if _, err := fmt.Fprintf(w, "/* %s */ .%schroma { %s }\n", chroma.PreWrapper, f.prefix, css[chroma.PreWrapper]); err != nil { + return err + } + // Special-case code column of table to expand width. + if f.lineNumbers && f.lineNumbersInTable { + if _, err := fmt.Fprintf(w, "/* %s */ .%schroma .%s:last-child { width: 100%%; }", + chroma.LineTableTD, f.prefix, f.class(chroma.LineTableTD)); err != nil { + return err + } + } + // Special-case line number highlighting when targeted. + if f.lineNumbers || f.lineNumbersInTable { + targetedLineCSS := StyleEntryToCSS(style.Get(chroma.LineHighlight)) + for _, tt := range []chroma.TokenType{chroma.LineNumbers, chroma.LineNumbersTable} { + fmt.Fprintf(w, "/* %s targeted by URL anchor */ .%schroma .%s:target { %s }\n", tt, f.prefix, f.class(tt), targetedLineCSS) + } + } + tts := []int{} + for tt := range css { + tts = append(tts, int(tt)) + } + sort.Ints(tts) + for _, ti := range tts { + tt := chroma.TokenType(ti) + switch tt { + case chroma.Background, chroma.PreWrapper: + continue + } + class := f.class(tt) + if class == "" { + continue + } + styles := css[tt] + if _, err := fmt.Fprintf(w, "/* %s */ .%schroma .%s { %s }\n", tt, f.prefix, class, styles); err != nil { + return err + } + } + return nil +} + +func (f *Formatter) styleToCSS(style *chroma.Style) map[chroma.TokenType]string { + classes := map[chroma.TokenType]string{} + bg := style.Get(chroma.Background) + // Convert the style. + for t := range chroma.StandardTypes { + entry := style.Get(t) + if t != chroma.Background { + entry = entry.Sub(bg) + } + if !f.allClasses && entry.IsZero() { + continue + } + classes[t] = StyleEntryToCSS(entry) + } + classes[chroma.Background] += f.tabWidthStyle() + classes[chroma.PreWrapper] += classes[chroma.Background] + `;` + // Make PreWrapper a grid to show highlight style with full width. + if len(f.highlightRanges) > 0 { + classes[chroma.PreWrapper] += `display: grid;` + } + // Make PreWrapper wrap long lines. + if f.wrapLongLines { + classes[chroma.PreWrapper] += `white-space: pre-wrap; word-break: break-word;` + } + lineNumbersStyle := `white-space: pre; user-select: none; margin-right: 0.4em; padding: 0 0.4em 0 0.4em;` + // All rules begin with default rules followed by user provided rules + classes[chroma.Line] = `display: flex;` + classes[chroma.Line] + classes[chroma.LineNumbers] = lineNumbersStyle + classes[chroma.LineNumbers] + classes[chroma.LineNumbersTable] = lineNumbersStyle + classes[chroma.LineNumbersTable] + classes[chroma.LineTable] = "border-spacing: 0; padding: 0; margin: 0; border: 0;" + classes[chroma.LineTable] + classes[chroma.LineTableTD] = "vertical-align: top; padding: 0; margin: 0; border: 0;" + classes[chroma.LineTableTD] + return classes +} + +// StyleEntryToCSS converts a chroma.StyleEntry to CSS attributes. +func StyleEntryToCSS(e chroma.StyleEntry) string { + styles := []string{} + if e.Colour.IsSet() { + styles = append(styles, "color: "+e.Colour.String()) + } + if e.Background.IsSet() { + styles = append(styles, "background-color: "+e.Background.String()) + } + if e.Bold == chroma.Yes { + styles = append(styles, "font-weight: bold") + } + if e.Italic == chroma.Yes { + styles = append(styles, "font-style: italic") + } + if e.Underline == chroma.Yes { + styles = append(styles, "text-decoration: underline") + } + return strings.Join(styles, "; ") +} + +// Compress CSS attributes - remove spaces, transform 6-digit colours to 3. +func compressStyle(s string) string { + parts := strings.Split(s, ";") + out := []string{} + for _, p := range parts { + p = strings.Join(strings.Fields(p), " ") + p = strings.Replace(p, ": ", ":", 1) + if strings.Contains(p, "#") { + c := p[len(p)-6:] + if c[0] == c[1] && c[2] == c[3] && c[4] == c[5] { + p = p[:len(p)-6] + c[0:1] + c[2:3] + c[4:5] + } + } + out = append(out, p) + } + return strings.Join(out, ";") +} diff --git a/vendor/github.com/alecthomas/chroma/go.mod b/vendor/github.com/alecthomas/chroma/go.mod new file mode 100644 index 000000000..c406562f2 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/go.mod @@ -0,0 +1,9 @@ +module github.com/alecthomas/chroma + +go 1.13 + +require ( + github.com/davecgh/go-spew v1.1.1 // indirect + github.com/dlclark/regexp2 v1.4.0 + github.com/stretchr/testify v1.7.0 +) diff --git a/vendor/github.com/alecthomas/chroma/go.sum b/vendor/github.com/alecthomas/chroma/go.sum new file mode 100644 index 000000000..734f796c8 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/go.sum @@ -0,0 +1,14 @@ +github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= +github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/dlclark/regexp2 v1.4.0 h1:F1rxgk7p4uKjwIQxBs9oAXe5CqrXlCduYEJvrF4u93E= +github.com/dlclark/regexp2 v1.4.0/go.mod h1:2pZnwuY/m+8K6iRw6wQdMtk+rH5tNGR1i55kozfMjCc= +github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= +github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= +github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= +github.com/stretchr/testify v1.7.0 h1:nwc3DEeHmmLAfoZucVR881uASk0Mfjw8xYJ99tb5CcY= +github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= +gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405 h1:yhCVgyC4o1eVCa2tZl7eS0r+SDo693bJlVdllGtEeKM= +gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c h1:dUUwHk2QECo/6vqA44rthZ8ie2QXMNeKRTHCNY2nXvo= +gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= diff --git a/vendor/github.com/alecthomas/chroma/iterator.go b/vendor/github.com/alecthomas/chroma/iterator.go new file mode 100644 index 000000000..d5175de89 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/iterator.go @@ -0,0 +1,76 @@ +package chroma + +import "strings" + +// An Iterator across tokens. +// +// EOF will be returned at the end of the Token stream. +// +// If an error occurs within an Iterator, it may propagate this in a panic. Formatters should recover. +type Iterator func() Token + +// Tokens consumes all tokens from the iterator and returns them as a slice. +func (i Iterator) Tokens() []Token { + var out []Token + for t := i(); t != EOF; t = i() { + out = append(out, t) + } + return out +} + +// Concaterator concatenates tokens from a series of iterators. +func Concaterator(iterators ...Iterator) Iterator { + return func() Token { + for len(iterators) > 0 { + t := iterators[0]() + if t != EOF { + return t + } + iterators = iterators[1:] + } + return EOF + } +} + +// Literator converts a sequence of literal Tokens into an Iterator. +func Literator(tokens ...Token) Iterator { + return func() Token { + if len(tokens) == 0 { + return EOF + } + token := tokens[0] + tokens = tokens[1:] + return token + } +} + +// SplitTokensIntoLines splits tokens containing newlines in two. +func SplitTokensIntoLines(tokens []Token) (out [][]Token) { + var line []Token // nolint: prealloc + for _, token := range tokens { + for strings.Contains(token.Value, "\n") { + parts := strings.SplitAfterN(token.Value, "\n", 2) + // Token becomes the tail. + token.Value = parts[1] + + // Append the head to the line and flush the line. + clone := token.Clone() + clone.Value = parts[0] + line = append(line, clone) + out = append(out, line) + line = nil + } + line = append(line, token) + } + if len(line) > 0 { + out = append(out, line) + } + // Strip empty trailing token line. + if len(out) > 0 { + last := out[len(out)-1] + if len(last) == 1 && last[0].Value == "" { + out = out[:len(out)-1] + } + } + return +} diff --git a/vendor/github.com/alecthomas/chroma/lexer.go b/vendor/github.com/alecthomas/chroma/lexer.go new file mode 100644 index 000000000..fe625198e --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexer.go @@ -0,0 +1,128 @@ +package chroma + +import ( + "fmt" + "strings" +) + +var ( + defaultOptions = &TokeniseOptions{ + State: "root", + EnsureLF: true, + } +) + +// Config for a lexer. +type Config struct { + // Name of the lexer. + Name string + + // Shortcuts for the lexer + Aliases []string + + // File name globs + Filenames []string + + // Secondary file name globs + AliasFilenames []string + + // MIME types + MimeTypes []string + + // Regex matching is case-insensitive. + CaseInsensitive bool + + // Regex matches all characters. + DotAll bool + + // Regex does not match across lines ($ matches EOL). + // + // Defaults to multiline. + NotMultiline bool + + // Don't strip leading and trailing newlines from the input. + // DontStripNL bool + + // Strip all leading and trailing whitespace from the input + // StripAll bool + + // Make sure that the input ends with a newline. This + // is required for some lexers that consume input linewise. + EnsureNL bool + + // If given and greater than 0, expand tabs in the input. + // TabSize int + + // Priority of lexer. + // + // If this is 0 it will be treated as a default of 1. + Priority float32 +} + +// Token output to formatter. +type Token struct { + Type TokenType `json:"type"` + Value string `json:"value"` +} + +func (t *Token) String() string { return t.Value } +func (t *Token) GoString() string { return fmt.Sprintf("&Token{%s, %q}", t.Type, t.Value) } + +// Clone returns a clone of the Token. +func (t *Token) Clone() Token { + return *t +} + +// EOF is returned by lexers at the end of input. +var EOF Token + +// TokeniseOptions contains options for tokenisers. +type TokeniseOptions struct { + // State to start tokenisation in. Defaults to "root". + State string + // Nested tokenisation. + Nested bool + + // If true, all EOLs are converted into LF + // by replacing CRLF and CR + EnsureLF bool +} + +// A Lexer for tokenising source code. +type Lexer interface { + // Config describing the features of the Lexer. + Config() *Config + // Tokenise returns an Iterator over tokens in text. + Tokenise(options *TokeniseOptions, text string) (Iterator, error) +} + +// Lexers is a slice of lexers sortable by name. +type Lexers []Lexer + +func (l Lexers) Len() int { return len(l) } +func (l Lexers) Swap(i, j int) { l[i], l[j] = l[j], l[i] } +func (l Lexers) Less(i, j int) bool { + return strings.ToLower(l[i].Config().Name) < strings.ToLower(l[j].Config().Name) +} + +// PrioritisedLexers is a slice of lexers sortable by priority. +type PrioritisedLexers []Lexer + +func (l PrioritisedLexers) Len() int { return len(l) } +func (l PrioritisedLexers) Swap(i, j int) { l[i], l[j] = l[j], l[i] } +func (l PrioritisedLexers) Less(i, j int) bool { + ip := l[i].Config().Priority + if ip == 0 { + ip = 1 + } + jp := l[j].Config().Priority + if jp == 0 { + jp = 1 + } + return ip > jp +} + +// Analyser determines how appropriate this lexer is for the given text. +type Analyser interface { + AnalyseText(text string) float32 +} diff --git a/vendor/github.com/alecthomas/chroma/lexers/README.md b/vendor/github.com/alecthomas/chroma/lexers/README.md new file mode 100644 index 000000000..b4ed292b1 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/README.md @@ -0,0 +1,40 @@ +# Lexer tests + +The tests in this directory feed a known input `testdata/.actual` into the parser for `` and check +that its output matches `.exported`. + +It is also possible to perform several tests on a same parser ``, by placing know inputs `*.actual` into a +directory `testdata//`. + +## Running the tests + +Run the tests as normal: +```go +go test ./lexers +``` + +## Update existing tests +When you add a new test data file (`*.actual`), you need to regenerate all tests. That's how Chroma creates the `*.expected` test file based on the corresponding lexer. + +To regenerate all tests, type in your terminal: + +```go +RECORD=true go test ./lexers +``` + +This first sets the `RECORD` environment variable to `true`. Then it runs `go test` on the `./lexers` directory of the Chroma project. + +(That environment variable tells Chroma it needs to output test data. After running `go test ./lexers` you can remove or reset that variable.) + +### Windows users +Windows users will find that the `RECORD=true go test ./lexers` command fails in both the standard command prompt terminal and in PowerShell. + +Instead we have to perform both steps separately: + +- Set the `RECORD` environment variable to `true`. + + In the regular command prompt window, the `set` command sets an environment variable for the current session: `set RECORD=true`. See [this page](https://superuser.com/questions/212150/how-to-set-env-variable-in-windows-cmd-line) for more. + + In PowerShell, you can use the `$env:RECORD = 'true'` command for that. See [this article](https://mcpmag.com/articles/2019/03/28/environment-variables-in-powershell.aspx) for more. + + You can also make a persistent environment variable by hand in the Windows computer settings. See [this article](https://www.computerhope.com/issues/ch000549.htm) for how. +- When the environment variable is set, run `go tests ./lexers`. + +Chroma will now regenerate the test files and print its results to the console window. diff --git a/vendor/github.com/alecthomas/chroma/lexers/a/abap.go b/vendor/github.com/alecthomas/chroma/lexers/a/abap.go new file mode 100644 index 000000000..268aa6a59 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/a/abap.go @@ -0,0 +1,60 @@ +package a + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// ABAP lexer. +var Abap = internal.Register(MustNewLazyLexer( + &Config{ + Name: "ABAP", + Aliases: []string{"abap"}, + Filenames: []string{"*.abap", "*.ABAP"}, + MimeTypes: []string{"text/x-abap"}, + CaseInsensitive: true, + }, + abapRules, +)) + +func abapRules() Rules { + return Rules{ + "common": { + {`\s+`, Text, nil}, + {`^\*.*$`, CommentSingle, nil}, + {`\".*?\n`, CommentSingle, nil}, + {`##\w+`, CommentSpecial, nil}, + }, + "variable-names": { + {`<\S+>`, NameVariable, nil}, + {`\w[\w~]*(?:(\[\])|->\*)?`, NameVariable, nil}, + }, + "root": { + Include("common"), + {`CALL\s+(?:BADI|CUSTOMER-FUNCTION|FUNCTION)`, Keyword, nil}, + {`(CALL\s+(?:DIALOG|SCREEN|SUBSCREEN|SELECTION-SCREEN|TRANSACTION|TRANSFORMATION))\b`, Keyword, nil}, + {`(FORM|PERFORM)(\s+)(\w+)`, ByGroups(Keyword, Text, NameFunction), nil}, + {`(PERFORM)(\s+)(\()(\w+)(\))`, ByGroups(Keyword, Text, Punctuation, NameVariable, Punctuation), nil}, + {`(MODULE)(\s+)(\S+)(\s+)(INPUT|OUTPUT)`, ByGroups(Keyword, Text, NameFunction, Text, Keyword), nil}, + {`(METHOD)(\s+)([\w~]+)`, ByGroups(Keyword, Text, NameFunction), nil}, + {`(\s+)([\w\-]+)([=\-]>)([\w\-~]+)`, ByGroups(Text, NameVariable, Operator, NameFunction), nil}, + {`(?<=(=|-)>)([\w\-~]+)(?=\()`, NameFunction, nil}, + {`(TEXT)(-)(\d{3})`, ByGroups(Keyword, Punctuation, LiteralNumberInteger), nil}, + {`(TEXT)(-)(\w{3})`, ByGroups(Keyword, Punctuation, NameVariable), nil}, + {`(ADD-CORRESPONDING|AUTHORITY-CHECK|CLASS-DATA|CLASS-EVENTS|CLASS-METHODS|CLASS-POOL|DELETE-ADJACENT|DIVIDE-CORRESPONDING|EDITOR-CALL|ENHANCEMENT-POINT|ENHANCEMENT-SECTION|EXIT-COMMAND|FIELD-GROUPS|FIELD-SYMBOLS|FUNCTION-POOL|INTERFACE-POOL|INVERTED-DATE|LOAD-OF-PROGRAM|LOG-POINT|MESSAGE-ID|MOVE-CORRESPONDING|MULTIPLY-CORRESPONDING|NEW-LINE|NEW-PAGE|NEW-SECTION|NO-EXTENSION|OUTPUT-LENGTH|PRINT-CONTROL|SELECT-OPTIONS|START-OF-SELECTION|SUBTRACT-CORRESPONDING|SYNTAX-CHECK|SYSTEM-EXCEPTIONS|TYPE-POOL|TYPE-POOLS|NO-DISPLAY)\b`, Keyword, nil}, + {`(?])(CREATE\s+(PUBLIC|PRIVATE|DATA|OBJECT)|(PUBLIC|PRIVATE|PROTECTED)\s+SECTION|(TYPE|LIKE)\s+((LINE\s+OF|REF\s+TO|(SORTED|STANDARD|HASHED)\s+TABLE\s+OF))?|FROM\s+(DATABASE|MEMORY)|CALL\s+METHOD|(GROUP|ORDER) BY|HAVING|SEPARATED BY|GET\s+(BADI|BIT|CURSOR|DATASET|LOCALE|PARAMETER|PF-STATUS|(PROPERTY|REFERENCE)\s+OF|RUN\s+TIME|TIME\s+(STAMP)?)?|SET\s+(BIT|BLANK\s+LINES|COUNTRY|CURSOR|DATASET|EXTENDED\s+CHECK|HANDLER|HOLD\s+DATA|LANGUAGE|LEFT\s+SCROLL-BOUNDARY|LOCALE|MARGIN|PARAMETER|PF-STATUS|PROPERTY\s+OF|RUN\s+TIME\s+(ANALYZER|CLOCK\s+RESOLUTION)|SCREEN|TITLEBAR|UPADTE\s+TASK\s+LOCAL|USER-COMMAND)|CONVERT\s+((INVERTED-)?DATE|TIME|TIME\s+STAMP|TEXT)|(CLOSE|OPEN)\s+(DATASET|CURSOR)|(TO|FROM)\s+(DATA BUFFER|INTERNAL TABLE|MEMORY ID|DATABASE|SHARED\s+(MEMORY|BUFFER))|DESCRIBE\s+(DISTANCE\s+BETWEEN|FIELD|LIST|TABLE)|FREE\s(MEMORY|OBJECT)?|PROCESS\s+(BEFORE\s+OUTPUT|AFTER\s+INPUT|ON\s+(VALUE-REQUEST|HELP-REQUEST))|AT\s+(LINE-SELECTION|USER-COMMAND|END\s+OF|NEW)|AT\s+SELECTION-SCREEN(\s+(ON(\s+(BLOCK|(HELP|VALUE)-REQUEST\s+FOR|END\s+OF|RADIOBUTTON\s+GROUP))?|OUTPUT))?|SELECTION-SCREEN:?\s+((BEGIN|END)\s+OF\s+((TABBED\s+)?BLOCK|LINE|SCREEN)|COMMENT|FUNCTION\s+KEY|INCLUDE\s+BLOCKS|POSITION|PUSHBUTTON|SKIP|ULINE)|LEAVE\s+(LIST-PROCESSING|PROGRAM|SCREEN|TO LIST-PROCESSING|TO TRANSACTION)(ENDING|STARTING)\s+AT|FORMAT\s+(COLOR|INTENSIFIED|INVERSE|HOTSPOT|INPUT|FRAMES|RESET)|AS\s+(CHECKBOX|SUBSCREEN|WINDOW)|WITH\s+(((NON-)?UNIQUE)?\s+KEY|FRAME)|(BEGIN|END)\s+OF|DELETE(\s+ADJACENT\s+DUPLICATES\sFROM)?|COMPARING(\s+ALL\s+FIELDS)?|(INSERT|APPEND)(\s+INITIAL\s+LINE\s+(IN)?TO|\s+LINES\s+OF)?|IN\s+((BYTE|CHARACTER)\s+MODE|PROGRAM)|END-OF-(DEFINITION|PAGE|SELECTION)|WITH\s+FRAME(\s+TITLE)|(REPLACE|FIND)\s+((FIRST|ALL)\s+OCCURRENCES?\s+OF\s+)?(SUBSTRING|REGEX)?|MATCH\s+(LENGTH|COUNT|LINE|OFFSET)|(RESPECTING|IGNORING)\s+CASE|IN\s+UPDATE\s+TASK|(SOURCE|RESULT)\s+(XML)?|REFERENCE\s+INTO|AND\s+(MARK|RETURN)|CLIENT\s+SPECIFIED|CORRESPONDING\s+FIELDS\s+OF|IF\s+FOUND|FOR\s+EVENT|INHERITING\s+FROM|LEAVE\s+TO\s+SCREEN|LOOP\s+AT\s+(SCREEN)?|LOWER\s+CASE|MATCHCODE\s+OBJECT|MODIF\s+ID|MODIFY\s+SCREEN|NESTING\s+LEVEL|NO\s+INTERVALS|OF\s+STRUCTURE|RADIOBUTTON\s+GROUP|RANGE\s+OF|REF\s+TO|SUPPRESS DIALOG|TABLE\s+OF|UPPER\s+CASE|TRANSPORTING\s+NO\s+FIELDS|VALUE\s+CHECK|VISIBLE\s+LENGTH|HEADER\s+LINE|COMMON\s+PART)\b`, Keyword, nil}, + {`(^|(?<=(\s|\.)))(ABBREVIATED|ABSTRACT|ADD|ALIASES|ALIGN|ALPHA|ASSERT|AS|ASSIGN(ING)?|AT(\s+FIRST)?|BACK|BLOCK|BREAK-POINT|CASE|CATCH|CHANGING|CHECK|CLASS|CLEAR|COLLECT|COLOR|COMMIT|CREATE|COMMUNICATION|COMPONENTS?|COMPUTE|CONCATENATE|CONDENSE|CONSTANTS|CONTEXTS|CONTINUE|CONTROLS|COUNTRY|CURRENCY|DATA|DATE|DECIMALS|DEFAULT|DEFINE|DEFINITION|DEFERRED|DEMAND|DETAIL|DIRECTORY|DIVIDE|DO|DUMMY|ELSE(IF)?|ENDAT|ENDCASE|ENDCATCH|ENDCLASS|ENDDO|ENDFORM|ENDFUNCTION|ENDIF|ENDINTERFACE|ENDLOOP|ENDMETHOD|ENDMODULE|ENDSELECT|ENDTRY|ENDWHILE|ENHANCEMENT|EVENTS|EXACT|EXCEPTIONS?|EXIT|EXPONENT|EXPORT|EXPORTING|EXTRACT|FETCH|FIELDS?|FOR|FORM|FORMAT|FREE|FROM|FUNCTION|HIDE|ID|IF|IMPORT|IMPLEMENTATION|IMPORTING|IN|INCLUDE|INCLUDING|INDEX|INFOTYPES|INITIALIZATION|INTERFACE|INTERFACES|INTO|LANGUAGE|LEAVE|LENGTH|LINES|LOAD|LOCAL|JOIN|KEY|NEXT|MAXIMUM|MESSAGE|METHOD[S]?|MINIMUM|MODULE|MODIFIER|MODIFY|MOVE|MULTIPLY|NODES|NUMBER|OBLIGATORY|OBJECT|OF|OFF|ON|OTHERS|OVERLAY|PACK|PAD|PARAMETERS|PERCENTAGE|POSITION|PROGRAM|PROVIDE|PUBLIC|PUT|PF\d\d|RAISE|RAISING|RANGES?|READ|RECEIVE|REDEFINITION|REFRESH|REJECT|REPORT|RESERVE|RESUME|RETRY|RETURN|RETURNING|RIGHT|ROLLBACK|REPLACE|SCROLL|SEARCH|SELECT|SHIFT|SIGN|SINGLE|SIZE|SKIP|SORT|SPLIT|STATICS|STOP|STYLE|SUBMATCHES|SUBMIT|SUBTRACT|SUM(?!\()|SUMMARY|SUMMING|SUPPLY|TABLE|TABLES|TIMESTAMP|TIMES?|TIMEZONE|TITLE|\??TO|TOP-OF-PAGE|TRANSFER|TRANSLATE|TRY|TYPES|ULINE|UNDER|UNPACK|UPDATE|USING|VALUE|VALUES|VIA|VARYING|VARY|WAIT|WHEN|WHERE|WIDTH|WHILE|WITH|WINDOW|WRITE|XSD|ZERO)\b`, Keyword, nil}, + {`(abs|acos|asin|atan|boolc|boolx|bit_set|char_off|charlen|ceil|cmax|cmin|condense|contains|contains_any_of|contains_any_not_of|concat_lines_of|cos|cosh|count|count_any_of|count_any_not_of|dbmaxlen|distance|escape|exp|find|find_end|find_any_of|find_any_not_of|floor|frac|from_mixed|insert|lines|log|log10|match|matches|nmax|nmin|numofchar|repeat|replace|rescale|reverse|round|segment|shift_left|shift_right|sign|sin|sinh|sqrt|strlen|substring|substring_after|substring_from|substring_before|substring_to|tan|tanh|to_upper|to_lower|to_mixed|translate|trunc|xstrlen)(\()\b`, ByGroups(NameBuiltin, Punctuation), nil}, + {`&[0-9]`, Name, nil}, + {`[0-9]+`, LiteralNumberInteger, nil}, + {`(?<=(\s|.))(AND|OR|EQ|NE|GT|LT|GE|LE|CO|CN|CA|NA|CS|NOT|NS|CP|NP|BYTE-CO|BYTE-CN|BYTE-CA|BYTE-NA|BYTE-CS|BYTE-NS|IS\s+(NOT\s+)?(INITIAL|ASSIGNED|REQUESTED|BOUND))\b`, OperatorWord, nil}, + Include("variable-names"), + {`[?*<>=\-+&]`, Operator, nil}, + {`'(''|[^'])*'`, LiteralStringSingle, nil}, + {"`([^`])*`", LiteralStringSingle, nil}, + {`([|}])([^{}|]*?)([|{])`, ByGroups(Punctuation, LiteralStringSingle, Punctuation), nil}, + {`[/;:()\[\],.]`, Punctuation, nil}, + {`(!)(\w+)`, ByGroups(Operator, Name), nil}, + }, + } +} diff --git a/vendor/github.com/alecthomas/chroma/lexers/a/abnf.go b/vendor/github.com/alecthomas/chroma/lexers/a/abnf.go new file mode 100644 index 000000000..85c47afce --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/a/abnf.go @@ -0,0 +1,42 @@ +package a + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// Abnf lexer. +var Abnf = internal.Register(MustNewLazyLexer( + &Config{ + Name: "ABNF", + Aliases: []string{"abnf"}, + Filenames: []string{"*.abnf"}, + MimeTypes: []string{"text/x-abnf"}, + }, + abnfRules, +)) + +func abnfRules() Rules { + return Rules{ + "root": { + {`;.*$`, CommentSingle, nil}, + {`(%[si])?"[^"]*"`, Literal, nil}, + {`%b[01]+\-[01]+\b`, Literal, nil}, + {`%b[01]+(\.[01]+)*\b`, Literal, nil}, + {`%d[0-9]+\-[0-9]+\b`, Literal, nil}, + {`%d[0-9]+(\.[0-9]+)*\b`, Literal, nil}, + {`%x[0-9a-fA-F]+\-[0-9a-fA-F]+\b`, Literal, nil}, + {`%x[0-9a-fA-F]+(\.[0-9a-fA-F]+)*\b`, Literal, nil}, + {`\b[0-9]+\*[0-9]+`, Operator, nil}, + {`\b[0-9]+\*`, Operator, nil}, + {`\b[0-9]+`, Operator, nil}, + {`\*`, Operator, nil}, + {Words(``, `\b`, `ALPHA`, `BIT`, `CHAR`, `CR`, `CRLF`, `CTL`, `DIGIT`, `DQUOTE`, `HEXDIG`, `HTAB`, `LF`, `LWSP`, `OCTET`, `SP`, `VCHAR`, `WSP`), Keyword, nil}, + {`[a-zA-Z][a-zA-Z0-9-]+\b`, NameClass, nil}, + {`(=/|=|/)`, Operator, nil}, + {`[\[\]()]`, Punctuation, nil}, + {`\s+`, Text, nil}, + {`.`, Text, nil}, + }, + } +} diff --git a/vendor/github.com/alecthomas/chroma/lexers/a/actionscript.go b/vendor/github.com/alecthomas/chroma/lexers/a/actionscript.go new file mode 100644 index 000000000..df55d6dac --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/a/actionscript.go @@ -0,0 +1,43 @@ +package a + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// Actionscript lexer. +var Actionscript = internal.Register(MustNewLazyLexer( + &Config{ + Name: "ActionScript", + Aliases: []string{"as", "actionscript"}, + Filenames: []string{"*.as"}, + MimeTypes: []string{"application/x-actionscript", "text/x-actionscript", "text/actionscript"}, + NotMultiline: true, + DotAll: true, + }, + actionscriptRules, +)) + +func actionscriptRules() Rules { + return Rules{ + "root": { + {`\s+`, Text, nil}, + {`//.*?\n`, CommentSingle, nil}, + {`/\*.*?\*/`, CommentMultiline, nil}, + {`/(\\\\|\\/|[^/\n])*/[gim]*`, LiteralStringRegex, nil}, + {`[~^*!%&<>|+=:;,/?\\-]+`, Operator, nil}, + {`[{}\[\]();.]+`, Punctuation, nil}, + {Words(``, `\b`, `case`, `default`, `for`, `each`, `in`, `while`, `do`, `break`, `return`, `continue`, `if`, `else`, `throw`, `try`, `catch`, `var`, `with`, `new`, `typeof`, `arguments`, `instanceof`, `this`, `switch`), Keyword, nil}, + {Words(``, `\b`, `class`, `public`, `final`, `internal`, `native`, `override`, `private`, `protected`, `static`, `import`, `extends`, `implements`, `interface`, `intrinsic`, `return`, `super`, `dynamic`, `function`, `const`, `get`, `namespace`, `package`, `set`), KeywordDeclaration, nil}, + {`(true|false|null|NaN|Infinity|-Infinity|undefined|Void)\b`, KeywordConstant, nil}, + {Words(``, `\b`, `Accessibility`, `AccessibilityProperties`, `ActionScriptVersion`, `ActivityEvent`, `AntiAliasType`, `ApplicationDomain`, `AsBroadcaster`, `Array`, `AsyncErrorEvent`, `AVM1Movie`, `BevelFilter`, `Bitmap`, `BitmapData`, `BitmapDataChannel`, `BitmapFilter`, `BitmapFilterQuality`, `BitmapFilterType`, `BlendMode`, `BlurFilter`, `Boolean`, `ByteArray`, `Camera`, `Capabilities`, `CapsStyle`, `Class`, `Color`, `ColorMatrixFilter`, `ColorTransform`, `ContextMenu`, `ContextMenuBuiltInItems`, `ContextMenuEvent`, `ContextMenuItem`, `ConvultionFilter`, `CSMSettings`, `DataEvent`, `Date`, `DefinitionError`, `DeleteObjectSample`, `Dictionary`, `DisplacmentMapFilter`, `DisplayObject`, `DisplacmentMapFilterMode`, `DisplayObjectContainer`, `DropShadowFilter`, `Endian`, `EOFError`, `Error`, `ErrorEvent`, `EvalError`, `Event`, `EventDispatcher`, `EventPhase`, `ExternalInterface`, `FileFilter`, `FileReference`, `FileReferenceList`, `FocusDirection`, `FocusEvent`, `Font`, `FontStyle`, `FontType`, `FrameLabel`, `FullScreenEvent`, `Function`, `GlowFilter`, `GradientBevelFilter`, `GradientGlowFilter`, `GradientType`, `Graphics`, `GridFitType`, `HTTPStatusEvent`, `IBitmapDrawable`, `ID3Info`, `IDataInput`, `IDataOutput`, `IDynamicPropertyOutputIDynamicPropertyWriter`, `IEventDispatcher`, `IExternalizable`, `IllegalOperationError`, `IME`, `IMEConversionMode`, `IMEEvent`, `int`, `InteractiveObject`, `InterpolationMethod`, `InvalidSWFError`, `InvokeEvent`, `IOError`, `IOErrorEvent`, `JointStyle`, `Key`, `Keyboard`, `KeyboardEvent`, `KeyLocation`, `LineScaleMode`, `Loader`, `LoaderContext`, `LoaderInfo`, `LoadVars`, `LocalConnection`, `Locale`, `Math`, `Matrix`, `MemoryError`, `Microphone`, `MorphShape`, `Mouse`, `MouseEvent`, `MovieClip`, `MovieClipLoader`, `Namespace`, `NetConnection`, `NetStatusEvent`, `NetStream`, `NewObjectSample`, `Number`, `Object`, `ObjectEncoding`, `PixelSnapping`, `Point`, `PrintJob`, `PrintJobOptions`, `PrintJobOrientation`, `ProgressEvent`, `Proxy`, `QName`, `RangeError`, `Rectangle`, `ReferenceError`, `RegExp`, `Responder`, `Sample`, `Scene`, `ScriptTimeoutError`, `Security`, `SecurityDomain`, `SecurityError`, `SecurityErrorEvent`, `SecurityPanel`, `Selection`, `Shape`, `SharedObject`, `SharedObjectFlushStatus`, `SimpleButton`, `Socket`, `Sound`, `SoundChannel`, `SoundLoaderContext`, `SoundMixer`, `SoundTransform`, `SpreadMethod`, `Sprite`, `StackFrame`, `StackOverflowError`, `Stage`, `StageAlign`, `StageDisplayState`, `StageQuality`, `StageScaleMode`, `StaticText`, `StatusEvent`, `String`, `StyleSheet`, `SWFVersion`, `SyncEvent`, `SyntaxError`, `System`, `TextColorType`, `TextField`, `TextFieldAutoSize`, `TextFieldType`, `TextFormat`, `TextFormatAlign`, `TextLineMetrics`, `TextRenderer`, `TextSnapshot`, `Timer`, `TimerEvent`, `Transform`, `TypeError`, `uint`, `URIError`, `URLLoader`, `URLLoaderDataFormat`, `URLRequest`, `URLRequestHeader`, `URLRequestMethod`, `URLStream`, `URLVariabeles`, `VerifyError`, `Video`, `XML`, `XMLDocument`, `XMLList`, `XMLNode`, `XMLNodeType`, `XMLSocket`, `XMLUI`), NameBuiltin, nil}, + {Words(``, `\b`, `decodeURI`, `decodeURIComponent`, `encodeURI`, `escape`, `eval`, `isFinite`, `isNaN`, `isXMLName`, `clearInterval`, `fscommand`, `getTimer`, `getURL`, `getVersion`, `parseFloat`, `parseInt`, `setInterval`, `trace`, `updateAfterEvent`, `unescape`), NameFunction, nil}, + {`[$a-zA-Z_]\w*`, NameOther, nil}, + {`[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?`, LiteralNumberFloat, nil}, + {`0x[0-9a-f]+`, LiteralNumberHex, nil}, + {`[0-9]+`, LiteralNumberInteger, nil}, + {`"(\\\\|\\"|[^"])*"`, LiteralStringDouble, nil}, + {`'(\\\\|\\'|[^'])*'`, LiteralStringSingle, nil}, + }, + } +} diff --git a/vendor/github.com/alecthomas/chroma/lexers/a/actionscript3.go b/vendor/github.com/alecthomas/chroma/lexers/a/actionscript3.go new file mode 100644 index 000000000..45596dcc1 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/a/actionscript3.go @@ -0,0 +1,60 @@ +package a + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// Actionscript 3 lexer. +var Actionscript3 = internal.Register(MustNewLazyLexer( + &Config{ + Name: "ActionScript 3", + Aliases: []string{"as3", "actionscript3"}, + Filenames: []string{"*.as"}, + MimeTypes: []string{"application/x-actionscript3", "text/x-actionscript3", "text/actionscript3"}, + DotAll: true, + }, + actionscript3Rules, +)) + +func actionscript3Rules() Rules { + return Rules{ + "root": { + {`\s+`, Text, nil}, + {`(function\s+)([$a-zA-Z_]\w*)(\s*)(\()`, ByGroups(KeywordDeclaration, NameFunction, Text, Operator), Push("funcparams")}, + {`(var|const)(\s+)([$a-zA-Z_]\w*)(\s*)(:)(\s*)([$a-zA-Z_]\w*(?:\.<\w+>)?)`, ByGroups(KeywordDeclaration, Text, Name, Text, Punctuation, Text, KeywordType), nil}, + {`(import|package)(\s+)((?:[$a-zA-Z_]\w*|\.)+)(\s*)`, ByGroups(Keyword, Text, NameNamespace, Text), nil}, + {`(new)(\s+)([$a-zA-Z_]\w*(?:\.<\w+>)?)(\s*)(\()`, ByGroups(Keyword, Text, KeywordType, Text, Operator), nil}, + {`//.*?\n`, CommentSingle, nil}, + {`/\*.*?\*/`, CommentMultiline, nil}, + {`/(\\\\|\\/|[^\n])*/[gisx]*`, LiteralStringRegex, nil}, + {`(\.)([$a-zA-Z_]\w*)`, ByGroups(Operator, NameAttribute), nil}, + {`(case|default|for|each|in|while|do|break|return|continue|if|else|throw|try|catch|with|new|typeof|arguments|instanceof|this|switch|import|include|as|is)\b`, Keyword, nil}, + {`(class|public|final|internal|native|override|private|protected|static|import|extends|implements|interface|intrinsic|return|super|dynamic|function|const|get|namespace|package|set)\b`, KeywordDeclaration, nil}, + {`(true|false|null|NaN|Infinity|-Infinity|undefined|void)\b`, KeywordConstant, nil}, + {`(decodeURI|decodeURIComponent|encodeURI|escape|eval|isFinite|isNaN|isXMLName|clearInterval|fscommand|getTimer|getURL|getVersion|isFinite|parseFloat|parseInt|setInterval|trace|updateAfterEvent|unescape)\b`, NameFunction, nil}, + {`[$a-zA-Z_]\w*`, Name, nil}, + {`[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?`, LiteralNumberFloat, nil}, + {`0x[0-9a-f]+`, LiteralNumberHex, nil}, + {`[0-9]+`, LiteralNumberInteger, nil}, + {`"(\\\\|\\"|[^"])*"`, LiteralStringDouble, nil}, + {`'(\\\\|\\'|[^'])*'`, LiteralStringSingle, nil}, + {`[~^*!%&<>|+=:;,/?\\{}\[\]().-]+`, Operator, nil}, + }, + "funcparams": { + {`\s+`, Text, nil}, + {`(\s*)(\.\.\.)?([$a-zA-Z_]\w*)(\s*)(:)(\s*)([$a-zA-Z_]\w*(?:\.<\w+>)?|\*)(\s*)`, ByGroups(Text, Punctuation, Name, Text, Operator, Text, KeywordType, Text), Push("defval")}, + {`\)`, Operator, Push("type")}, + }, + "type": { + {`(\s*)(:)(\s*)([$a-zA-Z_]\w*(?:\.<\w+>)?|\*)`, ByGroups(Text, Operator, Text, KeywordType), Pop(2)}, + {`\s+`, Text, Pop(2)}, + Default(Pop(2)), + }, + "defval": { + {`(=)(\s*)([^(),]+)(\s*)(,?)`, ByGroups(Operator, Text, UsingSelf("root"), Text, Operator), Pop(1)}, + {`,`, Operator, Pop(1)}, + Default(Pop(1)), + }, + } +} diff --git a/vendor/github.com/alecthomas/chroma/lexers/a/ada.go b/vendor/github.com/alecthomas/chroma/lexers/a/ada.go new file mode 100644 index 000000000..916727124 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/a/ada.go @@ -0,0 +1,118 @@ +package a + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// Ada lexer. +var Ada = internal.Register(MustNewLazyLexer( + &Config{ + Name: "Ada", + Aliases: []string{"ada", "ada95", "ada2005"}, + Filenames: []string{"*.adb", "*.ads", "*.ada"}, + MimeTypes: []string{"text/x-ada"}, + CaseInsensitive: true, + }, + adaRules, +)) + +func adaRules() Rules { + return Rules{ + "root": { + {`[^\S\n]+`, Text, nil}, + {`--.*?\n`, CommentSingle, nil}, + {`[^\S\n]+`, Text, nil}, + {`function|procedure|entry`, KeywordDeclaration, Push("subprogram")}, + {`(subtype|type)(\s+)(\w+)`, ByGroups(KeywordDeclaration, Text, KeywordType), Push("type_def")}, + {`task|protected`, KeywordDeclaration, nil}, + {`(subtype)(\s+)`, ByGroups(KeywordDeclaration, Text), nil}, + {`(end)(\s+)`, ByGroups(KeywordReserved, Text), Push("end")}, + {`(pragma)(\s+)(\w+)`, ByGroups(KeywordReserved, Text, CommentPreproc), nil}, + {`(true|false|null)\b`, KeywordConstant, nil}, + {Words(``, `\b`, `Address`, `Byte`, `Boolean`, `Character`, `Controlled`, `Count`, `Cursor`, `Duration`, `File_Mode`, `File_Type`, `Float`, `Generator`, `Integer`, `Long_Float`, `Long_Integer`, `Long_Long_Float`, `Long_Long_Integer`, `Natural`, `Positive`, `Reference_Type`, `Short_Float`, `Short_Integer`, `Short_Short_Float`, `Short_Short_Integer`, `String`, `Wide_Character`, `Wide_String`), KeywordType, nil}, + {`(and(\s+then)?|in|mod|not|or(\s+else)|rem)\b`, OperatorWord, nil}, + {`generic|private`, KeywordDeclaration, nil}, + {`package`, KeywordDeclaration, Push("package")}, + {`array\b`, KeywordReserved, Push("array_def")}, + {`(with|use)(\s+)`, ByGroups(KeywordNamespace, Text), Push("import")}, + {`(\w+)(\s*)(:)(\s*)(constant)`, ByGroups(NameConstant, Text, Punctuation, Text, KeywordReserved), nil}, + {`<<\w+>>`, NameLabel, nil}, + {`(\w+)(\s*)(:)(\s*)(declare|begin|loop|for|while)`, ByGroups(NameLabel, Text, Punctuation, Text, KeywordReserved), nil}, + {Words(`\b`, `\b`, `abort`, `abs`, `abstract`, `accept`, `access`, `aliased`, `all`, `array`, `at`, `begin`, `body`, `case`, `constant`, `declare`, `delay`, `delta`, `digits`, `do`, `else`, `elsif`, `end`, `entry`, `exception`, `exit`, `interface`, `for`, `goto`, `if`, `is`, `limited`, `loop`, `new`, `null`, `of`, `or`, `others`, `out`, `overriding`, `pragma`, `protected`, `raise`, `range`, `record`, `renames`, `requeue`, `return`, `reverse`, `select`, `separate`, `subtype`, `synchronized`, `task`, `tagged`, `terminate`, `then`, `type`, `until`, `when`, `while`, `xor`), KeywordReserved, nil}, + {`"[^"]*"`, LiteralString, nil}, + Include("attribute"), + Include("numbers"), + {`'[^']'`, LiteralStringChar, nil}, + {`(\w+)(\s*|[(,])`, ByGroups(Name, UsingSelf("root")), nil}, + {`(<>|=>|:=|[()|:;,.'])`, Punctuation, nil}, + {`[*<>+=/&-]`, Operator, nil}, + {`\n+`, Text, nil}, + }, + "numbers": { + {`[0-9_]+#[0-9a-f]+#`, LiteralNumberHex, nil}, + {`[0-9_]+\.[0-9_]*`, LiteralNumberFloat, nil}, + {`[0-9_]+`, LiteralNumberInteger, nil}, + }, + "attribute": { + {`(')(\w+)`, ByGroups(Punctuation, NameAttribute), nil}, + }, + "subprogram": { + {`\(`, Punctuation, Push("#pop", "formal_part")}, + {`;`, Punctuation, Pop(1)}, + {`is\b`, KeywordReserved, Pop(1)}, + {`"[^"]+"|\w+`, NameFunction, nil}, + Include("root"), + }, + "end": { + {`(if|case|record|loop|select)`, KeywordReserved, nil}, + {`"[^"]+"|[\w.]+`, NameFunction, nil}, + {`\s+`, Text, nil}, + {`;`, Punctuation, Pop(1)}, + }, + "type_def": { + {`;`, Punctuation, Pop(1)}, + {`\(`, Punctuation, Push("formal_part")}, + {`with|and|use`, KeywordReserved, nil}, + {`array\b`, KeywordReserved, Push("#pop", "array_def")}, + {`record\b`, KeywordReserved, Push("record_def")}, + {`(null record)(;)`, ByGroups(KeywordReserved, Punctuation), Pop(1)}, + Include("root"), + }, + "array_def": { + {`;`, Punctuation, Pop(1)}, + {`(\w+)(\s+)(range)`, ByGroups(KeywordType, Text, KeywordReserved), nil}, + Include("root"), + }, + "record_def": { + {`end record`, KeywordReserved, Pop(1)}, + Include("root"), + }, + "import": { + {`[\w.]+`, NameNamespace, Pop(1)}, + Default(Pop(1)), + }, + "formal_part": { + {`\)`, Punctuation, Pop(1)}, + {`\w+`, NameVariable, nil}, + {`,|:[^=]`, Punctuation, nil}, + {`(in|not|null|out|access)\b`, KeywordReserved, nil}, + Include("root"), + }, + "package": { + {`body`, KeywordDeclaration, nil}, + {`is\s+new|renames`, KeywordReserved, nil}, + {`is`, KeywordReserved, Pop(1)}, + {`;`, Punctuation, Pop(1)}, + {`\(`, Punctuation, Push("package_instantiation")}, + {`([\w.]+)`, NameClass, nil}, + Include("root"), + }, + "package_instantiation": { + {`("[^"]+"|\w+)(\s+)(=>)`, ByGroups(NameVariable, Text, Punctuation), nil}, + {`[\w.\'"]`, Text, nil}, + {`\)`, Punctuation, Pop(1)}, + Include("root"), + }, + } +} diff --git a/vendor/github.com/alecthomas/chroma/lexers/a/al.go b/vendor/github.com/alecthomas/chroma/lexers/a/al.go new file mode 100644 index 000000000..4055f3651 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/a/al.go @@ -0,0 +1,47 @@ +package a + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// Al lexer. +var Al = internal.Register(MustNewLazyLexer( + &Config{ + Name: "AL", + Aliases: []string{"al"}, + Filenames: []string{"*.al", "*.dal"}, + MimeTypes: []string{"text/x-al"}, + DotAll: true, + CaseInsensitive: true, + }, + alRules, +)) + +// https://github.com/microsoft/AL/blob/master/grammar/alsyntax.tmlanguage +func alRules() Rules { + return Rules{ + "root": { + {`\s+`, TextWhitespace, nil}, + {`(?s)\/\*.*?\\*\*\/`, CommentMultiline, nil}, + {`(?s)//.*?\n`, CommentSingle, nil}, + {`\"([^\"])*\"`, Text, nil}, + {`'([^'])*'`, LiteralString, nil}, + {`\b(?i:(ARRAY|ASSERTERROR|BEGIN|BREAK|CASE|DO|DOWNTO|ELSE|END|EVENT|EXIT|FOR|FOREACH|FUNCTION|IF|IMPLEMENTS|IN|INDATASET|INTERFACE|INTERNAL|LOCAL|OF|PROCEDURE|PROGRAM|PROTECTED|REPEAT|RUNONCLIENT|SECURITYFILTERING|SUPPRESSDISPOSE|TEMPORARY|THEN|TO|TRIGGER|UNTIL|VAR|WHILE|WITH|WITHEVENTS))\b`, Keyword, nil}, + {`\b(?i:(AND|DIV|MOD|NOT|OR|XOR))\b`, OperatorWord, nil}, + {`\b(?i:(AVERAGE|CONST|COUNT|EXIST|FIELD|FILTER|LOOKUP|MAX|MIN|ORDER|SORTING|SUM|TABLEDATA|UPPERLIMIT|WHERE|ASCENDING|DESCENDING))\b`, Keyword, nil}, + {`\b(?i:(CODEUNIT|PAGE|PAGEEXTENSION|PAGECUSTOMIZATION|DOTNET|ENUM|ENUMEXTENSION|VALUE|QUERY|REPORT|TABLE|TABLEEXTENSION|XMLPORT|PROFILE|CONTROLADDIN|REPORTEXTENSION|INTERFACE|PERMISSIONSET|PERMISSIONSETEXTENSION|ENTITLEMENT))\b`, Keyword, nil}, + {`\b(?i:(Action|Array|Automation|BigInteger|BigText|Blob|Boolean|Byte|Char|ClientType|Code|Codeunit|CompletionTriggerErrorLevel|ConnectionType|Database|DataClassification|DataScope|Date|DateFormula|DateTime|Decimal|DefaultLayout|Dialog|Dictionary|DotNet|DotNetAssembly|DotNetTypeDeclaration|Duration|Enum|ErrorInfo|ErrorType|ExecutionContext|ExecutionMode|FieldClass|FieldRef|FieldType|File|FilterPageBuilder|Guid|InStream|Integer|Joker|KeyRef|List|ModuleDependencyInfo|ModuleInfo|None|Notification|NotificationScope|ObjectType|Option|OutStream|Page|PageResult|Query|Record|RecordId|RecordRef|Report|ReportFormat|SecurityFilter|SecurityFiltering|Table|TableConnectionType|TableFilter|TestAction|TestField|TestFilterField|TestPage|TestPermissions|TestRequestPage|Text|TextBuilder|TextConst|TextEncoding|Time|TransactionModel|TransactionType|Variant|Verbosity|Version|XmlPort|HttpContent|HttpHeaders|HttpClient|HttpRequestMessage|HttpResponseMessage|JsonToken|JsonValue|JsonArray|JsonObject|View|Views|XmlAttribute|XmlAttributeCollection|XmlComment|XmlCData|XmlDeclaration|XmlDocument|XmlDocumentType|XmlElement|XmlNamespaceManager|XmlNameTable|XmlNode|XmlNodeList|XmlProcessingInstruction|XmlReadOptions|XmlText|XmlWriteOptions|WebServiceActionContext|WebServiceActionResultCode|SessionSettings))\b`, Keyword, nil}, + {`\b([<>]=|<>|<|>)\b?`, Operator, nil}, + {`\b(\-|\+|\/|\*)\b`, Operator, nil}, + {`\s*(\:=|\+=|-=|\/=|\*=)\s*?`, Operator, nil}, + {`\b(?i:(ADD|ADDFIRST|ADDLAST|ADDAFTER|ADDBEFORE|ACTION|ACTIONS|AREA|ASSEMBLY|CHARTPART|CUEGROUP|CUSTOMIZES|COLUMN|DATAITEM|DATASET|ELEMENTS|EXTENDS|FIELD|FIELDGROUP|FIELDATTRIBUTE|FIELDELEMENT|FIELDGROUPS|FIELDS|FILTER|FIXED|GRID|GROUP|MOVEAFTER|MOVEBEFORE|KEY|KEYS|LABEL|LABELS|LAYOUT|MODIFY|MOVEFIRST|MOVELAST|MOVEBEFORE|MOVEAFTER|PART|REPEATER|USERCONTROL|REQUESTPAGE|SCHEMA|SEPARATOR|SYSTEMPART|TABLEELEMENT|TEXTATTRIBUTE|TEXTELEMENT|TYPE))\b`, Keyword, nil}, + {`\s*[(\.\.)&\|]\s*`, Operator, nil}, + {`\b((0(x|X)[0-9a-fA-F]*)|(([0-9]+\.?[0-9]*)|(\.[0-9]+))((e|E)(\+|-)?[0-9]+)?)(L|l|UL|ul|u|U|F|f|ll|LL|ull|ULL)?\b`, LiteralNumber, nil}, + {`[;:,]`, Punctuation, nil}, + {`#[ \t]*(if|else|elif|endif|define|undef|region|endregion|pragma)\b.*?\n`, CommentPreproc, nil}, + {`\w+`, Text, nil}, + {`.`, Text, nil}, + }, + } +} diff --git a/vendor/github.com/alecthomas/chroma/lexers/a/angular2.go b/vendor/github.com/alecthomas/chroma/lexers/a/angular2.go new file mode 100644 index 000000000..a947edad6 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/a/angular2.go @@ -0,0 +1,46 @@ +package a + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// Angular2 lexer. +var Angular2 = internal.Register(MustNewLazyLexer( + &Config{ + Name: "Angular2", + Aliases: []string{"ng2"}, + Filenames: []string{}, + MimeTypes: []string{}, + }, + angular2Rules, +)) + +func angular2Rules() Rules { + return Rules{ + "root": { + {`[^{([*#]+`, Other, nil}, + {`(\{\{)(\s*)`, ByGroups(CommentPreproc, Text), Push("ngExpression")}, + {`([([]+)([\w:.-]+)([\])]+)(\s*)(=)(\s*)`, ByGroups(Punctuation, NameAttribute, Punctuation, Text, Operator, Text), Push("attr")}, + {`([([]+)([\w:.-]+)([\])]+)(\s*)`, ByGroups(Punctuation, NameAttribute, Punctuation, Text), nil}, + {`([*#])([\w:.-]+)(\s*)(=)(\s*)`, ByGroups(Punctuation, NameAttribute, Punctuation, Operator), Push("attr")}, + {`([*#])([\w:.-]+)(\s*)`, ByGroups(Punctuation, NameAttribute, Punctuation), nil}, + }, + "ngExpression": { + {`\s+(\|\s+)?`, Text, nil}, + {`\}\}`, CommentPreproc, Pop(1)}, + {`:?(true|false)`, LiteralStringBoolean, nil}, + {`:?"(\\\\|\\"|[^"])*"`, LiteralStringDouble, nil}, + {`:?'(\\\\|\\'|[^'])*'`, LiteralStringSingle, nil}, + {`[0-9](\.[0-9]*)?(eE[+-][0-9])?[flFLdD]?|0[xX][0-9a-fA-F]+[Ll]?`, LiteralNumber, nil}, + {`[a-zA-Z][\w-]*(\(.*\))?`, NameVariable, nil}, + {`\.[\w-]+(\(.*\))?`, NameVariable, nil}, + {`(\?)(\s*)([^}\s]+)(\s*)(:)(\s*)([^}\s]+)(\s*)`, ByGroups(Operator, Text, LiteralString, Text, Operator, Text, LiteralString, Text), nil}, + }, + "attr": { + {`".*?"`, LiteralString, Pop(1)}, + {`'.*?'`, LiteralString, Pop(1)}, + {`[^\s>]+`, LiteralString, Pop(1)}, + }, + } +} diff --git a/vendor/github.com/alecthomas/chroma/lexers/a/antlr.go b/vendor/github.com/alecthomas/chroma/lexers/a/antlr.go new file mode 100644 index 000000000..c744353f1 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/a/antlr.go @@ -0,0 +1,105 @@ +package a + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// ANTLR lexer. +var ANTLR = internal.Register(MustNewLazyLexer( + &Config{ + Name: "ANTLR", + Aliases: []string{"antlr"}, + Filenames: []string{}, + MimeTypes: []string{}, + }, + antlrRules, +)) + +func antlrRules() Rules { + return Rules{ + "whitespace": { + {`\s+`, TextWhitespace, nil}, + }, + "comments": { + {`//.*$`, Comment, nil}, + {`/\*(.|\n)*?\*/`, Comment, nil}, + }, + "root": { + Include("whitespace"), + Include("comments"), + {`(lexer|parser|tree)?(\s*)(grammar\b)(\s*)([A-Za-z]\w*)(;)`, ByGroups(Keyword, TextWhitespace, Keyword, TextWhitespace, NameClass, Punctuation), nil}, + {`options\b`, Keyword, Push("options")}, + {`tokens\b`, Keyword, Push("tokens")}, + {`(scope)(\s*)([A-Za-z]\w*)(\s*)(\{)`, ByGroups(Keyword, TextWhitespace, NameVariable, TextWhitespace, Punctuation), Push("action")}, + {`(catch|finally)\b`, Keyword, Push("exception")}, + {`(@[A-Za-z]\w*)(\s*)(::)?(\s*)([A-Za-z]\w*)(\s*)(\{)`, ByGroups(NameLabel, TextWhitespace, Punctuation, TextWhitespace, NameLabel, TextWhitespace, Punctuation), Push("action")}, + {`((?:protected|private|public|fragment)\b)?(\s*)([A-Za-z]\w*)(!)?`, ByGroups(Keyword, TextWhitespace, NameLabel, Punctuation), Push("rule-alts", "rule-prelims")}, + }, + "exception": { + {`\n`, TextWhitespace, Pop(1)}, + {`\s`, TextWhitespace, nil}, + Include("comments"), + {`\[`, Punctuation, Push("nested-arg-action")}, + {`\{`, Punctuation, Push("action")}, + }, + "rule-prelims": { + Include("whitespace"), + Include("comments"), + {`returns\b`, Keyword, nil}, + {`\[`, Punctuation, Push("nested-arg-action")}, + {`\{`, Punctuation, Push("action")}, + {`(throws)(\s+)([A-Za-z]\w*)`, ByGroups(Keyword, TextWhitespace, NameLabel), nil}, + {`(,)(\s*)([A-Za-z]\w*)`, ByGroups(Punctuation, TextWhitespace, NameLabel), nil}, + {`options\b`, Keyword, Push("options")}, + {`(scope)(\s+)(\{)`, ByGroups(Keyword, TextWhitespace, Punctuation), Push("action")}, + {`(scope)(\s+)([A-Za-z]\w*)(\s*)(;)`, ByGroups(Keyword, TextWhitespace, NameLabel, TextWhitespace, Punctuation), nil}, + {`(@[A-Za-z]\w*)(\s*)(\{)`, ByGroups(NameLabel, TextWhitespace, Punctuation), Push("action")}, + {`:`, Punctuation, Pop(1)}, + }, + "rule-alts": { + Include("whitespace"), + Include("comments"), + {`options\b`, Keyword, Push("options")}, + {`:`, Punctuation, nil}, + {`'(\\\\|\\'|[^'])*'`, LiteralString, nil}, + {`"(\\\\|\\"|[^"])*"`, LiteralString, nil}, + {`<<([^>]|>[^>])>>`, LiteralString, nil}, + {`\$?[A-Z_]\w*`, NameConstant, nil}, + {`\$?[a-z_]\w*`, NameVariable, nil}, + {`(\+|\||->|=>|=|\(|\)|\.\.|\.|\?|\*|\^|!|\#|~)`, Operator, nil}, + {`,`, Punctuation, nil}, + {`\[`, Punctuation, Push("nested-arg-action")}, + {`\{`, Punctuation, Push("action")}, + {`;`, Punctuation, Pop(1)}, + }, + "tokens": { + Include("whitespace"), + Include("comments"), + {`\{`, Punctuation, nil}, + {`([A-Z]\w*)(\s*)(=)?(\s*)(\'(?:\\\\|\\\'|[^\']*)\')?(\s*)(;)`, ByGroups(NameLabel, TextWhitespace, Punctuation, TextWhitespace, LiteralString, TextWhitespace, Punctuation), nil}, + {`\}`, Punctuation, Pop(1)}, + }, + "options": { + Include("whitespace"), + Include("comments"), + {`\{`, Punctuation, nil}, + {`([A-Za-z]\w*)(\s*)(=)(\s*)([A-Za-z]\w*|\'(?:\\\\|\\\'|[^\']*)\'|[0-9]+|\*)(\s*)(;)`, ByGroups(NameVariable, TextWhitespace, Punctuation, TextWhitespace, Text, TextWhitespace, Punctuation), nil}, + {`\}`, Punctuation, Pop(1)}, + }, + "action": { + {`([^${}\'"/\\]+|"(\\\\|\\"|[^"])*"|'(\\\\|\\'|[^'])*'|//.*$\n?|/\*(.|\n)*?\*/|/(?!\*)(\\\\|\\/|[^/])*/|\\(?!%)|/)+`, Other, nil}, + {`(\\)(%)`, ByGroups(Punctuation, Other), nil}, + {`(\$[a-zA-Z]+)(\.?)(text|value)?`, ByGroups(NameVariable, Punctuation, NameProperty), nil}, + {`\{`, Punctuation, Push()}, + {`\}`, Punctuation, Pop(1)}, + }, + "nested-arg-action": { + {`([^$\[\]\'"/]+|"(\\\\|\\"|[^"])*"|'(\\\\|\\'|[^'])*'|//.*$\n?|/\*(.|\n)*?\*/|/(?!\*)(\\\\|\\/|[^/])*/|/)+`, Other, nil}, + {`\[`, Punctuation, Push()}, + {`\]`, Punctuation, Pop(1)}, + {`(\$[a-zA-Z]+)(\.?)(text|value)?`, ByGroups(NameVariable, Punctuation, NameProperty), nil}, + {`(\\\\|\\\]|\\\[|[^\[\]])+`, Other, nil}, + }, + } +} diff --git a/vendor/github.com/alecthomas/chroma/lexers/a/apache.go b/vendor/github.com/alecthomas/chroma/lexers/a/apache.go new file mode 100644 index 000000000..5685eb15c --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/a/apache.go @@ -0,0 +1,42 @@ +package a + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// Apacheconf lexer. +var Apacheconf = internal.Register(MustNewLazyLexer( + &Config{ + Name: "ApacheConf", + Aliases: []string{"apacheconf", "aconf", "apache"}, + Filenames: []string{".htaccess", "apache.conf", "apache2.conf"}, + MimeTypes: []string{"text/x-apacheconf"}, + CaseInsensitive: true, + }, + apacheconfRules, +)) + +func apacheconfRules() Rules { + return Rules{ + "root": { + {`\s+`, Text, nil}, + {`(#.*?)$`, Comment, nil}, + {`(<[^\s>]+)(?:(\s+)(.*?))?(>)`, ByGroups(NameTag, Text, LiteralString, NameTag), nil}, + {`([a-z]\w*)(\s+)`, ByGroups(NameBuiltin, Text), Push("value")}, + {`\.+`, Text, nil}, + }, + "value": { + {`\\\n`, Text, nil}, + {`$`, Text, Pop(1)}, + {`\\`, Text, nil}, + {`[^\S\n]+`, Text, nil}, + {`\d+\.\d+\.\d+\.\d+(?:/\d+)?`, LiteralNumber, nil}, + {`\d+`, LiteralNumber, nil}, + {`/([a-z0-9][\w./-]+)`, LiteralStringOther, nil}, + {`(on|off|none|any|all|double|email|dns|min|minimal|os|productonly|full|emerg|alert|crit|error|warn|notice|info|debug|registry|script|inetd|standalone|user|group)\b`, Keyword, nil}, + {`"([^"\\]*(?:\\.[^"\\]*)*)"`, LiteralStringDouble, nil}, + {`[^\s"\\]+`, Text, nil}, + }, + } +} diff --git a/vendor/github.com/alecthomas/chroma/lexers/a/apl.go b/vendor/github.com/alecthomas/chroma/lexers/a/apl.go new file mode 100644 index 000000000..f929d2e2b --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/a/apl.go @@ -0,0 +1,40 @@ +package a + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// Apl lexer. +var Apl = internal.Register(MustNewLazyLexer( + &Config{ + Name: "APL", + Aliases: []string{"apl"}, + Filenames: []string{"*.apl"}, + MimeTypes: []string{}, + }, + aplRules, +)) + +func aplRules() Rules { + return Rules{ + "root": { + {`\s+`, Text, nil}, + {`[⍝#].*$`, CommentSingle, nil}, + {`\'((\'\')|[^\'])*\'`, LiteralStringSingle, nil}, + {`"(("")|[^"])*"`, LiteralStringDouble, nil}, + {`[⋄◇()]`, Punctuation, nil}, + {`[\[\];]`, LiteralStringRegex, nil}, + {`⎕[A-Za-zΔ∆⍙][A-Za-zΔ∆⍙_¯0-9]*`, NameFunction, nil}, + {`[A-Za-zΔ∆⍙_][A-Za-zΔ∆⍙_¯0-9]*`, NameVariable, nil}, + {`¯?(0[Xx][0-9A-Fa-f]+|[0-9]*\.?[0-9]+([Ee][+¯]?[0-9]+)?|¯|∞)([Jj]¯?(0[Xx][0-9A-Fa-f]+|[0-9]*\.?[0-9]+([Ee][+¯]?[0-9]+)?|¯|∞))?`, LiteralNumber, nil}, + {`[\.\\/⌿⍀¨⍣⍨⍠⍤∘⍥@⌺⌶⍢]`, NameAttribute, nil}, + {`[+\-×÷⌈⌊∣|⍳?*⍟○!⌹<≤=>≥≠≡≢∊⍷∪∩~∨∧⍱⍲⍴,⍪⌽⊖⍉↑↓⊂⊃⌷⍋⍒⊤⊥⍕⍎⊣⊢⍁⍂≈⌸⍯↗⊆⍸]`, Operator, nil}, + {`⍬`, NameConstant, nil}, + {`[⎕⍞]`, NameVariableGlobal, nil}, + {`[←→]`, KeywordDeclaration, nil}, + {`[⍺⍵⍶⍹∇:]`, NameBuiltinPseudo, nil}, + {`[{}]`, KeywordType, nil}, + }, + } +} diff --git a/vendor/github.com/alecthomas/chroma/lexers/a/applescript.go b/vendor/github.com/alecthomas/chroma/lexers/a/applescript.go new file mode 100644 index 000000000..b6a53c5ff --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/a/applescript.go @@ -0,0 +1,59 @@ +package a + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// Applescript lexer. +var Applescript = internal.Register(MustNewLazyLexer( + &Config{ + Name: "AppleScript", + Aliases: []string{"applescript"}, + Filenames: []string{"*.applescript"}, + MimeTypes: []string{}, + DotAll: true, + }, + applescriptRules, +)) + +func applescriptRules() Rules { + return Rules{ + "root": { + {`\s+`, Text, nil}, + {`¬\n`, LiteralStringEscape, nil}, + {`'s\s+`, Text, nil}, + {`(--|#).*?$`, Comment, nil}, + {`\(\*`, CommentMultiline, Push("comment")}, + {`[(){}!,.:]`, Punctuation, nil}, + {`(«)([^»]+)(»)`, ByGroups(Text, NameBuiltin, Text), nil}, + {`\b((?:considering|ignoring)\s*)(application responses|case|diacriticals|hyphens|numeric strings|punctuation|white space)`, ByGroups(Keyword, NameBuiltin), nil}, + {`(-|\*|\+|&|≠|>=?|<=?|=|≥|≤|/|÷|\^)`, Operator, nil}, + {`\b(and|or|is equal|equals|(is )?equal to|is not|isn't|isn't equal( to)?|is not equal( to)?|doesn't equal|does not equal|(is )?greater than|comes after|is not less than or equal( to)?|isn't less than or equal( to)?|(is )?less than|comes before|is not greater than or equal( to)?|isn't greater than or equal( to)?|(is )?greater than or equal( to)?|is not less than|isn't less than|does not come before|doesn't come before|(is )?less than or equal( to)?|is not greater than|isn't greater than|does not come after|doesn't come after|starts? with|begins? with|ends? with|contains?|does not contain|doesn't contain|is in|is contained by|is not in|is not contained by|isn't contained by|div|mod|not|(a )?(ref( to)?|reference to)|is|does)\b`, OperatorWord, nil}, + {`^(\s*(?:on|end)\s+)(zoomed|write to file|will zoom|will show|will select tab view item|will resize( sub views)?|will resign active|will quit|will pop up|will open|will move|will miniaturize|will hide|will finish launching|will display outline cell|will display item cell|will display cell|will display browser cell|will dismiss|will close|will become active|was miniaturized|was hidden|update toolbar item|update parameters|update menu item|shown|should zoom|should selection change|should select tab view item|should select row|should select item|should select column|should quit( after last window closed)?|should open( untitled)?|should expand item|should end editing|should collapse item|should close|should begin editing|selection changing|selection changed|selected tab view item|scroll wheel|rows changed|right mouse up|right mouse dragged|right mouse down|resized( sub views)?|resigned main|resigned key|resigned active|read from file|prepare table drop|prepare table drag|prepare outline drop|prepare outline drag|prepare drop|plugin loaded|parameters updated|panel ended|opened|open untitled|number of rows|number of items|number of browser rows|moved|mouse up|mouse moved|mouse exited|mouse entered|mouse dragged|mouse down|miniaturized|load data representation|launched|keyboard up|keyboard down|items changed|item value changed|item value|item expandable|idle|exposed|end editing|drop|drag( (entered|exited|updated))?|double clicked|document nib name|dialog ended|deminiaturized|data representation|conclude drop|column resized|column moved|column clicked|closed|clicked toolbar item|clicked|choose menu item|child of item|changed|change item value|change cell value|cell value changed|cell value|bounds changed|begin editing|became main|became key|awake from nib|alert ended|activated|action|accept table drop|accept outline drop)`, ByGroups(Keyword, NameFunction), nil}, + {`^(\s*)(in|on|script|to)(\s+)`, ByGroups(Text, Keyword, Text), nil}, + {`\b(as )(alias |application |boolean |class |constant |date |file |integer |list |number |POSIX file |real |record |reference |RGB color |script |text |unit types|(?:Unicode )?text|string)\b`, ByGroups(Keyword, NameClass), nil}, + {`\b(AppleScript|current application|false|linefeed|missing value|pi|quote|result|return|space|tab|text item delimiters|true|version)\b`, NameConstant, nil}, + {`\b(ASCII (character|number)|activate|beep|choose URL|choose application|choose color|choose file( name)?|choose folder|choose from list|choose remote application|clipboard info|close( access)?|copy|count|current date|delay|delete|display (alert|dialog)|do shell script|duplicate|exists|get eof|get volume settings|info for|launch|list (disks|folder)|load script|log|make|mount volume|new|offset|open( (for access|location))?|path to|print|quit|random number|read|round|run( script)?|say|scripting components|set (eof|the clipboard to|volume)|store script|summarize|system attribute|system info|the clipboard|time to GMT|write|quoted form)\b`, NameBuiltin, nil}, + {`\b(considering|else|error|exit|from|if|ignoring|in|repeat|tell|then|times|to|try|until|using terms from|while|with|with timeout( of)?|with transaction|by|continue|end|its?|me|my|return|of|as)\b`, Keyword, nil}, + {`\b(global|local|prop(erty)?|set|get)\b`, Keyword, nil}, + {`\b(but|put|returning|the)\b`, NameBuiltin, nil}, + {`\b(attachment|attribute run|character|day|month|paragraph|word|year)s?\b`, NameBuiltin, nil}, + {`\b(about|above|against|apart from|around|aside from|at|below|beneath|beside|between|for|given|instead of|on|onto|out of|over|since)\b`, NameBuiltin, nil}, + {`\b(accepts arrow key|action method|active|alignment|allowed identifiers|allows branch selection|allows column reordering|allows column resizing|allows column selection|allows customization|allows editing text attributes|allows empty selection|allows mixed state|allows multiple selection|allows reordering|allows undo|alpha( value)?|alternate image|alternate increment value|alternate title|animation delay|associated file name|associated object|auto completes|auto display|auto enables items|auto repeat|auto resizes( outline column)?|auto save expanded items|auto save name|auto save table columns|auto saves configuration|auto scroll|auto sizes all columns to fit|auto sizes cells|background color|bezel state|bezel style|bezeled|border rect|border type|bordered|bounds( rotation)?|box type|button returned|button type|can choose directories|can choose files|can draw|can hide|cell( (background color|size|type))?|characters|class|click count|clicked( data)? column|clicked data item|clicked( data)? row|closeable|collating|color( (mode|panel))|command key down|configuration|content(s| (size|view( margins)?))?|context|continuous|control key down|control size|control tint|control view|controller visible|coordinate system|copies( on scroll)?|corner view|current cell|current column|current( field)? editor|current( menu)? item|current row|current tab view item|data source|default identifiers|delta (x|y|z)|destination window|directory|display mode|displayed cell|document( (edited|rect|view))?|double value|dragged column|dragged distance|dragged items|draws( cell)? background|draws grid|dynamically scrolls|echos bullets|edge|editable|edited( data)? column|edited data item|edited( data)? row|enabled|enclosing scroll view|ending page|error handling|event number|event type|excluded from windows menu|executable path|expanded|fax number|field editor|file kind|file name|file type|first responder|first visible column|flipped|floating|font( panel)?|formatter|frameworks path|frontmost|gave up|grid color|has data items|has horizontal ruler|has horizontal scroller|has parent data item|has resize indicator|has shadow|has sub menu|has vertical ruler|has vertical scroller|header cell|header view|hidden|hides when deactivated|highlights by|horizontal line scroll|horizontal page scroll|horizontal ruler view|horizontally resizable|icon image|id|identifier|ignores multiple clicks|image( (alignment|dims when disabled|frame style|scaling))?|imports graphics|increment value|indentation per level|indeterminate|index|integer value|intercell spacing|item height|key( (code|equivalent( modifier)?|window))?|knob thickness|label|last( visible)? column|leading offset|leaf|level|line scroll|loaded|localized sort|location|loop mode|main( (bunde|menu|window))?|marker follows cell|matrix mode|maximum( content)? size|maximum visible columns|menu( form representation)?|miniaturizable|miniaturized|minimized image|minimized title|minimum column width|minimum( content)? size|modal|modified|mouse down state|movie( (controller|file|rect))?|muted|name|needs display|next state|next text|number of tick marks|only tick mark values|opaque|open panel|option key down|outline table column|page scroll|pages across|pages down|palette label|pane splitter|parent data item|parent window|pasteboard|path( (names|separator))?|playing|plays every frame|plays selection only|position|preferred edge|preferred type|pressure|previous text|prompt|properties|prototype cell|pulls down|rate|released when closed|repeated|requested print time|required file type|resizable|resized column|resource path|returns records|reuses columns|rich text|roll over|row height|rulers visible|save panel|scripts path|scrollable|selectable( identifiers)?|selected cell|selected( data)? columns?|selected data items?|selected( data)? rows?|selected item identifier|selection by rect|send action on arrow key|sends action when done editing|separates columns|separator item|sequence number|services menu|shared frameworks path|shared support path|sheet|shift key down|shows alpha|shows state by|size( mode)?|smart insert delete enabled|sort case sensitivity|sort column|sort order|sort type|sorted( data rows)?|sound|source( mask)?|spell checking enabled|starting page|state|string value|sub menu|super menu|super view|tab key traverses cells|tab state|tab type|tab view|table view|tag|target( printer)?|text color|text container insert|text container origin|text returned|tick mark position|time stamp|title(d| (cell|font|height|position|rect))?|tool tip|toolbar|trailing offset|transparent|treat packages as directories|truncated labels|types|unmodified characters|update views|use sort indicator|user defaults|uses data source|uses ruler|uses threaded animation|uses title from previous column|value wraps|version|vertical( (line scroll|page scroll|ruler view))?|vertically resizable|view|visible( document rect)?|volume|width|window|windows menu|wraps|zoomable|zoomed)\b`, NameAttribute, nil}, + {`\b(action cell|alert reply|application|box|browser( cell)?|bundle|button( cell)?|cell|clip view|color well|color-panel|combo box( item)?|control|data( (cell|column|item|row|source))?|default entry|dialog reply|document|drag info|drawer|event|font(-panel)?|formatter|image( (cell|view))?|matrix|menu( item)?|item|movie( view)?|open-panel|outline view|panel|pasteboard|plugin|popup button|progress indicator|responder|save-panel|scroll view|secure text field( cell)?|slider|sound|split view|stepper|tab view( item)?|table( (column|header cell|header view|view))|text( (field( cell)?|view))?|toolbar( item)?|user-defaults|view|window)s?\b`, NameBuiltin, nil}, + {`\b(animate|append|call method|center|close drawer|close panel|display|display alert|display dialog|display panel|go|hide|highlight|increment|item for|load image|load movie|load nib|load panel|load sound|localized string|lock focus|log|open drawer|path for|pause|perform action|play|register|resume|scroll|select( all)?|show|size to fit|start|step back|step forward|stop|synchronize|unlock focus|update)\b`, NameBuiltin, nil}, + {`\b((in )?back of|(in )?front of|[0-9]+(st|nd|rd|th)|first|second|third|fourth|fifth|sixth|seventh|eighth|ninth|tenth|after|back|before|behind|every|front|index|last|middle|some|that|through|thru|where|whose)\b`, NameBuiltin, nil}, + {`"(\\\\|\\"|[^"])*"`, LiteralStringDouble, nil}, + {`\b([a-zA-Z]\w*)\b`, NameVariable, nil}, + {`[-+]?(\d+\.\d*|\d*\.\d+)(E[-+][0-9]+)?`, LiteralNumberFloat, nil}, + {`[-+]?\d+`, LiteralNumberInteger, nil}, + }, + "comment": { + {`\(\*`, CommentMultiline, Push()}, + {`\*\)`, CommentMultiline, Pop(1)}, + {`[^*(]+`, CommentMultiline, nil}, + {`[*(]`, CommentMultiline, nil}, + }, + } +} diff --git a/vendor/github.com/alecthomas/chroma/lexers/a/arduino.go b/vendor/github.com/alecthomas/chroma/lexers/a/arduino.go new file mode 100644 index 000000000..0edbe3f93 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/a/arduino.go @@ -0,0 +1,114 @@ +package a + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// Arduino lexer. +var Arduino = internal.Register(MustNewLazyLexer( + &Config{ + Name: "Arduino", + Aliases: []string{"arduino"}, + Filenames: []string{"*.ino"}, + MimeTypes: []string{"text/x-arduino"}, + EnsureNL: true, + }, + arduinoRules, +)) + +func arduinoRules() Rules { + return Rules{ + "statements": { + {Words(``, `\b`, `catch`, `const_cast`, `delete`, `dynamic_cast`, `explicit`, `export`, `friend`, `mutable`, `namespace`, `new`, `operator`, `private`, `protected`, `public`, `reinterpret_cast`, `restrict`, `static_cast`, `template`, `this`, `throw`, `throws`, `try`, `typeid`, `typename`, `using`, `virtual`, `constexpr`, `nullptr`, `decltype`, `thread_local`, `alignas`, `alignof`, `static_assert`, `noexcept`, `override`, `final`), Keyword, nil}, + {`char(16_t|32_t)\b`, KeywordType, nil}, + {`(class)\b`, ByGroups(Keyword, Text), Push("classname")}, + {`(R)(")([^\\()\s]{,16})(\()((?:.|\n)*?)(\)\3)(")`, ByGroups(LiteralStringAffix, LiteralString, LiteralStringDelimiter, LiteralStringDelimiter, LiteralString, LiteralStringDelimiter, LiteralString), nil}, + {`(u8|u|U)(")`, ByGroups(LiteralStringAffix, LiteralString), Push("string")}, + {`(L?)(")`, ByGroups(LiteralStringAffix, LiteralString), Push("string")}, + {`(L?)(')(\\.|\\[0-7]{1,3}|\\x[a-fA-F0-9]{1,2}|[^\\\'\n])(')`, ByGroups(LiteralStringAffix, LiteralStringChar, LiteralStringChar, LiteralStringChar), nil}, + {`(\d+\.\d*|\.\d+|\d+)[eE][+-]?\d+[LlUu]*`, LiteralNumberFloat, nil}, + {`(\d+\.\d*|\.\d+|\d+[fF])[fF]?`, LiteralNumberFloat, nil}, + {`0x[0-9a-fA-F]+[LlUu]*`, LiteralNumberHex, nil}, + {`0[0-7]+[LlUu]*`, LiteralNumberOct, nil}, + {`\d+[LlUu]*`, LiteralNumberInteger, nil}, + {`\*/`, Error, nil}, + {`[~!%^&*+=|?:<>/-]`, Operator, nil}, + {`[()\[\],.]`, Punctuation, nil}, + {Words(``, `\b`, `asm`, `auto`, `break`, `case`, `const`, `continue`, `default`, `do`, `else`, `enum`, `extern`, `for`, `goto`, `if`, `register`, `restricted`, `return`, `sizeof`, `static`, `struct`, `switch`, `typedef`, `union`, `volatile`, `while`), Keyword, nil}, + {`(_Bool|_Complex|_Imaginary|array|atomic_bool|atomic_char|atomic_int|atomic_llong|atomic_long|atomic_schar|atomic_short|atomic_uchar|atomic_uint|atomic_ullong|atomic_ulong|atomic_ushort|auto|bool|boolean|BooleanVariables|Byte|byte|Char|char|char16_t|char32_t|class|complex|Const|const|const_cast|delete|double|dynamic_cast|enum|explicit|extern|Float|float|friend|inline|Int|int|int16_t|int32_t|int64_t|int8_t|Long|long|new|NULL|null|operator|private|PROGMEM|protected|public|register|reinterpret_cast|short|signed|sizeof|Static|static|static_cast|String|struct|typedef|uint16_t|uint32_t|uint64_t|uint8_t|union|unsigned|virtual|Void|void|Volatile|volatile|word)\b`, KeywordType, nil}, + // Start of: Arduino-specific syntax + {`(and|final|If|Loop|loop|not|or|override|setup|Setup|throw|try|xor)\b`, Keyword, nil}, // Addition to keywords already defined by C++ + {`(ANALOG_MESSAGE|BIN|CHANGE|DEC|DEFAULT|DIGITAL_MESSAGE|EXTERNAL|FALLING|FIRMATA_STRING|HALF_PI|HEX|HIGH|INPUT|INPUT_PULLUP|INTERNAL|INTERNAL1V1|INTERNAL1V1|INTERNAL2V56|INTERNAL2V56|LED_BUILTIN|LED_BUILTIN_RX|LED_BUILTIN_TX|LOW|LSBFIRST|MSBFIRST|OCT|OUTPUT|PI|REPORT_ANALOG|REPORT_DIGITAL|RISING|SET_PIN_MODE|SYSEX_START|SYSTEM_RESET|TWO_PI)\b`, KeywordConstant, nil}, + {`(boolean|const|byte|word|string|String|array)\b`, NameVariable, nil}, + {`(Keyboard|KeyboardController|MouseController|SoftwareSerial|EthernetServer|EthernetClient|LiquidCrystal|RobotControl|GSMVoiceCall|EthernetUDP|EsploraTFT|HttpClient|RobotMotor|WiFiClient|GSMScanner|FileSystem|Scheduler|GSMServer|YunClient|YunServer|IPAddress|GSMClient|GSMModem|Keyboard|Ethernet|Console|GSMBand|Esplora|Stepper|Process|WiFiUDP|GSM_SMS|Mailbox|USBHost|Firmata|PImage|Client|Server|GSMPIN|FileIO|Bridge|Serial|EEPROM|Stream|Mouse|Audio|Servo|File|Task|GPRS|WiFi|Wire|TFT|GSM|SPI|SD)\b`, NameClass, nil}, + {`(abs|Abs|accept|ACos|acos|acosf|addParameter|analogRead|AnalogRead|analogReadResolution|AnalogReadResolution|analogReference|AnalogReference|analogWrite|AnalogWrite|analogWriteResolution|AnalogWriteResolution|answerCall|asin|ASin|asinf|atan|ATan|atan2|ATan2|atan2f|atanf|attach|attached|attachGPRS|attachInterrupt|AttachInterrupt|autoscroll|available|availableForWrite|background|beep|begin|beginPacket|beginSD|beginSMS|beginSpeaker|beginTFT|beginTransmission|beginWrite|bit|Bit|BitClear|bitClear|bitRead|BitRead|bitSet|BitSet|BitWrite|bitWrite|blink|blinkVersion|BSSID|buffer|byte|cbrt|cbrtf|Ceil|ceil|ceilf|changePIN|char|charAt|checkPIN|checkPUK|checkReg|circle|cityNameRead|cityNameWrite|clear|clearScreen|click|close|compareTo|compassRead|concat|config|connect|connected|constrain|Constrain|copysign|copysignf|cos|Cos|cosf|cosh|coshf|countryNameRead|countryNameWrite|createChar|cursor|debugPrint|degrees|Delay|delay|DelayMicroseconds|delayMicroseconds|detach|DetachInterrupt|detachInterrupt|DigitalPinToInterrupt|digitalPinToInterrupt|DigitalRead|digitalRead|DigitalWrite|digitalWrite|disconnect|display|displayLogos|drawBMP|drawCompass|encryptionType|end|endPacket|endSMS|endsWith|endTransmission|endWrite|equals|equalsIgnoreCase|exists|exitValue|Exp|exp|expf|fabs|fabsf|fdim|fdimf|fill|find|findUntil|float|floor|Floor|floorf|flush|fma|fmaf|fmax|fmaxf|fmin|fminf|fmod|fmodf|gatewayIP|get|getAsynchronously|getBand|getButton|getBytes|getCurrentCarrier|getIMEI|getKey|getModifiers|getOemKey|getPINUsed|getResult|getSignalStrength|getSocket|getVoiceCallStatus|getXChange|getYChange|hangCall|height|highByte|HighByte|home|hypot|hypotf|image|indexOf|int|interrupts|IPAddress|IRread|isActionDone|isAlpha|isAlphaNumeric|isAscii|isControl|isDigit|isDirectory|isfinite|isGraph|isHexadecimalDigit|isinf|isListening|isLowerCase|isnan|isPIN|isPressed|isPrintable|isPunct|isSpace|isUpperCase|isValid|isWhitespace|keyboardRead|keyPressed|keyReleased|knobRead|lastIndexOf|ldexp|ldexpf|leftToRight|length|line|lineFollowConfig|listen|listenOnLocalhost|loadImage|localIP|log|Log|log10|log10f|logf|long|lowByte|LowByte|lrint|lrintf|lround|lroundf|macAddress|maintain|map|Map|Max|max|messageAvailable|Micros|micros|millis|Millis|Min|min|mkdir|motorsStop|motorsWrite|mouseDragged|mouseMoved|mousePressed|mouseReleased|move|noAutoscroll|noBlink|noBuffer|noCursor|noDisplay|noFill|noInterrupts|NoInterrupts|noListenOnLocalhost|noStroke|noTone|NoTone|onReceive|onRequest|open|openNextFile|overflow|parseCommand|parseFloat|parseInt|parsePacket|pauseMode|peek|PinMode|pinMode|playFile|playMelody|point|pointTo|position|Pow|pow|powf|prepare|press|print|printFirmwareVersion|println|printVersion|process|processInput|PulseIn|pulseIn|pulseInLong|PulseInLong|put|radians|random|Random|randomSeed|RandomSeed|read|readAccelerometer|readBlue|readButton|readBytes|readBytesUntil|readGreen|readJoystickButton|readJoystickSwitch|readJoystickX|readJoystickY|readLightSensor|readMessage|readMicrophone|readNetworks|readRed|readSlider|readString|readStringUntil|readTemperature|ready|rect|release|releaseAll|remoteIP|remoteNumber|remotePort|remove|replace|requestFrom|retrieveCallingNumber|rewindDirectory|rightToLeft|rmdir|robotNameRead|robotNameWrite|round|roundf|RSSI|run|runAsynchronously|running|runShellCommand|runShellCommandAsynchronously|scanNetworks|scrollDisplayLeft|scrollDisplayRight|seek|sendAnalog|sendDigitalPortPair|sendDigitalPorts|sendString|sendSysex|Serial_Available|Serial_Begin|Serial_End|Serial_Flush|Serial_Peek|Serial_Print|Serial_Println|Serial_Read|serialEvent|setBand|setBitOrder|setCharAt|setClockDivider|setCursor|setDataMode|setDNS|setFirmwareVersion|setMode|setPINUsed|setSpeed|setTextSize|setTimeout|ShiftIn|shiftIn|ShiftOut|shiftOut|shutdown|signbit|sin|Sin|sinf|sinh|sinhf|size|sizeof|Sq|sq|Sqrt|sqrt|sqrtf|SSID|startLoop|startsWith|step|stop|stroke|subnetMask|substring|switchPIN|tan|Tan|tanf|tanh|tanhf|tempoWrite|text|toCharArray|toInt|toLowerCase|tone|Tone|toUpperCase|transfer|trim|trunc|truncf|tuneWrite|turn|updateIR|userNameRead|userNameWrite|voiceCall|waitContinue|width|WiFiServer|word|write|writeBlue|writeGreen|writeJSON|writeMessage|writeMicroseconds|writeRed|writeRGB|yield|Yield)\b`, NameFunction, nil}, + // End of: Arduino-specific syntax + {Words(``, `\b`, `inline`, `_inline`, `__inline`, `naked`, `restrict`, `thread`, `typename`), KeywordReserved, nil}, + {`(__m(128i|128d|128|64))\b`, KeywordReserved, nil}, + {Words(`__`, `\b`, `asm`, `int8`, `based`, `except`, `int16`, `stdcall`, `cdecl`, `fastcall`, `int32`, `declspec`, `finally`, `int64`, `try`, `leave`, `wchar_t`, `w64`, `unaligned`, `raise`, `noop`, `identifier`, `forceinline`, `assume`), KeywordReserved, nil}, + {`(true|false|NULL)\b`, NameBuiltin, nil}, + {`([a-zA-Z_]\w*)(\s*)(:)(?!:)`, ByGroups(NameLabel, Text, Punctuation), nil}, + {`[a-zA-Z_]\w*`, Name, nil}, + }, + "root": { + Include("whitespace"), + {`((?:[\w*\s])+?(?:\s|[*]))([a-zA-Z_]\w*)(\s*\([^;]*?\))([^;{]*)(\{)`, ByGroups(UsingSelf("root"), NameFunction, UsingSelf("root"), UsingSelf("root"), Punctuation), Push("function")}, + {`((?:[\w*\s])+?(?:\s|[*]))([a-zA-Z_]\w*)(\s*\([^;]*?\))([^;]*)(;)`, ByGroups(UsingSelf("root"), NameFunction, UsingSelf("root"), UsingSelf("root"), Punctuation), nil}, + Default(Push("statement")), + {Words(`__`, `\b`, `virtual_inheritance`, `uuidof`, `super`, `single_inheritance`, `multiple_inheritance`, `interface`, `event`), KeywordReserved, nil}, + {`__(offload|blockingoffload|outer)\b`, KeywordPseudo, nil}, + }, + "classname": { + {`[a-zA-Z_]\w*`, NameClass, Pop(1)}, + {`\s*(?=>)`, Text, Pop(1)}, + }, + "whitespace": { + {`^#if\s+0`, CommentPreproc, Push("if0")}, + {`^#`, CommentPreproc, Push("macro")}, + {`^(\s*(?:/[*].*?[*]/\s*)?)(#if\s+0)`, ByGroups(UsingSelf("root"), CommentPreproc), Push("if0")}, + {`^(\s*(?:/[*].*?[*]/\s*)?)(#)`, ByGroups(UsingSelf("root"), CommentPreproc), Push("macro")}, + {`\n`, Text, nil}, + {`\s+`, Text, nil}, + {`\\\n`, Text, nil}, + {`//(\n|[\w\W]*?[^\\]\n)`, CommentSingle, nil}, + {`/(\\\n)?[*][\w\W]*?[*](\\\n)?/`, CommentMultiline, nil}, + {`/(\\\n)?[*][\w\W]*`, CommentMultiline, nil}, + }, + "statement": { + Include("whitespace"), + Include("statements"), + {`[{}]`, Punctuation, nil}, + {`;`, Punctuation, Pop(1)}, + }, + "function": { + Include("whitespace"), + Include("statements"), + {`;`, Punctuation, nil}, + {`\{`, Punctuation, Push()}, + {`\}`, Punctuation, Pop(1)}, + }, + "string": { + {`"`, LiteralString, Pop(1)}, + {`\\([\\abfnrtv"\']|x[a-fA-F0-9]{2,4}|u[a-fA-F0-9]{4}|U[a-fA-F0-9]{8}|[0-7]{1,3})`, LiteralStringEscape, nil}, + {`[^\\"\n]+`, LiteralString, nil}, + {`\\\n`, LiteralString, nil}, + {`\\`, LiteralString, nil}, + }, + "macro": { + {`(include)(\s*(?:/[*].*?[*]/\s*)?)([^\n]+)`, ByGroups(CommentPreproc, Text, CommentPreprocFile), nil}, + {`[^/\n]+`, CommentPreproc, nil}, + {`/[*](.|\n)*?[*]/`, CommentMultiline, nil}, + {`//.*?\n`, CommentSingle, Pop(1)}, + {`/`, CommentPreproc, nil}, + {`(?<=\\)\n`, CommentPreproc, nil}, + {`\n`, CommentPreproc, Pop(1)}, + }, + "if0": { + {`^\s*#if.*?(?+*%\^/!=|])=?`, Operator, Push("slashstartsregex")}, + {`[{(\[;,]`, Punctuation, Push("slashstartsregex")}, + {`[})\].]`, Punctuation, nil}, + {`(break|continue|do|while|exit|for|if|else|return|switch|case|default)\b`, Keyword, Push("slashstartsregex")}, + {`function\b`, KeywordDeclaration, Push("slashstartsregex")}, + {`(atan2|cos|exp|int|log|rand|sin|sqrt|srand|gensub|gsub|index|length|match|split|patsplit|sprintf|sub|substr|tolower|toupper|close|fflush|getline|next(file)|print|printf|strftime|systime|mktime|delete|system|strtonum|and|compl|lshift|or|rshift|asorti?|isarray|bindtextdomain|dcn?gettext|@(include|load|namespace))\b`, KeywordReserved, nil}, + {`(ARGC|ARGIND|ARGV|BEGIN(FILE)?|BINMODE|CONVFMT|ENVIRON|END(FILE)?|ERRNO|FIELDWIDTHS|FILENAME|FNR|FPAT|FS|IGNORECASE|LINT|NF|NR|OFMT|OFS|ORS|PROCINFO|RLENGTH|RS|RSTART|RT|SUBSEP|TEXTDOMAIN)\b`, NameBuiltin, nil}, + {`[@$a-zA-Z_]\w*`, NameOther, nil}, + {`[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?`, LiteralNumberFloat, nil}, + {`0x[0-9a-fA-F]+`, LiteralNumberHex, nil}, + {`[0-9]+`, LiteralNumberInteger, nil}, + {`"(\\\\|\\"|[^"])*"`, LiteralStringDouble, nil}, + {`'(\\\\|\\'|[^'])*'`, LiteralStringSingle, nil}, + }, + } +} diff --git a/vendor/github.com/alecthomas/chroma/lexers/b/ballerina.go b/vendor/github.com/alecthomas/chroma/lexers/b/ballerina.go new file mode 100644 index 000000000..d8916d0e6 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/b/ballerina.go @@ -0,0 +1,50 @@ +package b + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// Ballerina lexer. +var Ballerina = internal.Register(MustNewLazyLexer( + &Config{ + Name: "Ballerina", + Aliases: []string{"ballerina"}, + Filenames: []string{"*.bal"}, + MimeTypes: []string{"text/x-ballerina"}, + DotAll: true, + }, + ballerinaRules, +)) + +func ballerinaRules() Rules { + return Rules{ + "root": { + {`[^\S\n]+`, Text, nil}, + {`//.*?\n`, CommentSingle, nil}, + {`/\*.*?\*/`, CommentMultiline, nil}, + {`(break|catch|continue|done|else|finally|foreach|forever|fork|if|lock|match|return|throw|transaction|try|while)\b`, Keyword, nil}, + {`((?:(?:[^\W\d]|\$)[\w.\[\]$<>]*\s+)+?)((?:[^\W\d]|\$)[\w$]*)(\s*)(\()`, ByGroups(UsingSelf("root"), NameFunction, Text, Operator), nil}, + {`@[^\W\d][\w.]*`, NameDecorator, nil}, + {`(annotation|bind|but|endpoint|error|function|object|private|public|returns|service|type|var|with|worker)\b`, KeywordDeclaration, nil}, + {`(boolean|byte|decimal|float|int|json|map|nil|record|string|table|xml)\b`, KeywordType, nil}, + {`(true|false|null)\b`, KeywordConstant, nil}, + {`(import)(\s+)`, ByGroups(KeywordNamespace, Text), Push("import")}, + {`"(\\\\|\\"|[^"])*"`, LiteralString, nil}, + {`'\\.'|'[^\\]'|'\\u[0-9a-fA-F]{4}'`, LiteralStringChar, nil}, + {`(\.)((?:[^\W\d]|\$)[\w$]*)`, ByGroups(Operator, NameAttribute), nil}, + {`^\s*([^\W\d]|\$)[\w$]*:`, NameLabel, nil}, + {`([^\W\d]|\$)[\w$]*`, Name, nil}, + {`([0-9][0-9_]*\.([0-9][0-9_]*)?|\.[0-9][0-9_]*)([eE][+\-]?[0-9][0-9_]*)?[fFdD]?|[0-9][eE][+\-]?[0-9][0-9_]*[fFdD]?|[0-9]([eE][+\-]?[0-9][0-9_]*)?[fFdD]|0[xX]([0-9a-fA-F][0-9a-fA-F_]*\.?|([0-9a-fA-F][0-9a-fA-F_]*)?\.[0-9a-fA-F][0-9a-fA-F_]*)[pP][+\-]?[0-9][0-9_]*[fFdD]?`, LiteralNumberFloat, nil}, + {`0[xX][0-9a-fA-F][0-9a-fA-F_]*[lL]?`, LiteralNumberHex, nil}, + {`0[bB][01][01_]*[lL]?`, LiteralNumberBin, nil}, + {`0[0-7_]+[lL]?`, LiteralNumberOct, nil}, + {`0|[1-9][0-9_]*[lL]?`, LiteralNumberInteger, nil}, + {`[~^*!%&\[\](){}<>|+=:;,./?-]`, Operator, nil}, + {`\n`, Text, nil}, + }, + "import": { + {`[\w.]+`, NameNamespace, Pop(1)}, + }, + } +} diff --git a/vendor/github.com/alecthomas/chroma/lexers/b/bash.go b/vendor/github.com/alecthomas/chroma/lexers/b/bash.go new file mode 100644 index 000000000..34b1e222d --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/b/bash.go @@ -0,0 +1,100 @@ +package b + +import ( + "regexp" + + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// TODO(moorereason): can this be factored away? +var bashAnalyserRe = regexp.MustCompile(`(?m)^#!.*/bin/(?:env |)(?:bash|zsh|sh|ksh)`) + +// Bash lexer. +var Bash = internal.Register(MustNewLazyLexer( + &Config{ + Name: "Bash", + Aliases: []string{"bash", "sh", "ksh", "zsh", "shell"}, + Filenames: []string{"*.sh", "*.ksh", "*.bash", "*.ebuild", "*.eclass", ".env", "*.env", "*.exheres-0", "*.exlib", "*.zsh", "*.zshrc", ".bashrc", "bashrc", ".bash_*", "bash_*", "zshrc", ".zshrc", "PKGBUILD"}, + MimeTypes: []string{"application/x-sh", "application/x-shellscript"}, + }, + bashRules, +).SetAnalyser(func(text string) float32 { + if bashAnalyserRe.FindString(text) != "" { + return 1.0 + } + return 0.0 +})) + +func bashRules() Rules { + return Rules{ + "root": { + Include("basic"), + {"`", LiteralStringBacktick, Push("backticks")}, + Include("data"), + Include("interp"), + }, + "interp": { + {`\$\(\(`, Keyword, Push("math")}, + {`\$\(`, Keyword, Push("paren")}, + {`\$\{#?`, LiteralStringInterpol, Push("curly")}, + {`\$[a-zA-Z_]\w*`, NameVariable, nil}, + {`\$(?:\d+|[#$?!_*@-])`, NameVariable, nil}, + {`\$`, Text, nil}, + }, + "basic": { + {`\b(if|fi|else|while|do|done|for|then|return|function|case|select|continue|until|esac|elif)(\s*)\b`, ByGroups(Keyword, Text), nil}, + {"\\b(alias|bg|bind|break|builtin|caller|cd|command|compgen|complete|declare|dirs|disown|echo|enable|eval|exec|exit|export|false|fc|fg|getopts|hash|help|history|jobs|kill|let|local|logout|popd|printf|pushd|pwd|read|readonly|set|shift|shopt|source|suspend|test|time|times|trap|true|type|typeset|ulimit|umask|unalias|unset|wait)(?=[\\s)`])", NameBuiltin, nil}, + {`\A#!.+\n`, CommentPreproc, nil}, + {`#.*(\S|$)`, CommentSingle, nil}, + {`\\[\w\W]`, LiteralStringEscape, nil}, + {`(\b\w+)(\s*)(\+?=)`, ByGroups(NameVariable, Text, Operator), nil}, + {`[\[\]{}()=]`, Operator, nil}, + {`<<<`, Operator, nil}, + {`<<-?\s*(\'?)\\?(\w+)[\w\W]+?\2`, LiteralString, nil}, + {`&&|\|\|`, Operator, nil}, + }, + "data": { + {`(?s)\$?"(\\\\|\\[0-7]+|\\.|[^"\\$])*"`, LiteralStringDouble, nil}, + {`"`, LiteralStringDouble, Push("string")}, + {`(?s)\$'(\\\\|\\[0-7]+|\\.|[^'\\])*'`, LiteralStringSingle, nil}, + {`(?s)'.*?'`, LiteralStringSingle, nil}, + {`;`, Punctuation, nil}, + {`&`, Punctuation, nil}, + {`\|`, Punctuation, nil}, + {`\s+`, Text, nil}, + {`\d+(?= |$)`, LiteralNumber, nil}, + {"[^=\\s\\[\\]{}()$\"\\'`\\\\<&|;]+", Text, nil}, + {`<`, Text, nil}, + }, + "string": { + {`"`, LiteralStringDouble, Pop(1)}, + {`(?s)(\\\\|\\[0-7]+|\\.|[^"\\$])+`, LiteralStringDouble, nil}, + Include("interp"), + }, + "curly": { + {`\}`, LiteralStringInterpol, Pop(1)}, + {`:-`, Keyword, nil}, + {`\w+`, NameVariable, nil}, + {"[^}:\"\\'`$\\\\]+", Punctuation, nil}, + {`:`, Punctuation, nil}, + Include("root"), + }, + "paren": { + {`\)`, Keyword, Pop(1)}, + Include("root"), + }, + "math": { + {`\)\)`, Keyword, Pop(1)}, + {`[-+*/%^|&]|\*\*|\|\|`, Operator, nil}, + {`\d+#\d+`, LiteralNumber, nil}, + {`\d+#(?! )`, LiteralNumber, nil}, + {`\d+`, LiteralNumber, nil}, + Include("root"), + }, + "backticks": { + {"`", LiteralStringBacktick, Pop(1)}, + Include("root"), + }, + } +} diff --git a/vendor/github.com/alecthomas/chroma/lexers/b/bashsession.go b/vendor/github.com/alecthomas/chroma/lexers/b/bashsession.go new file mode 100644 index 000000000..055cbb37a --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/b/bashsession.go @@ -0,0 +1,27 @@ +package b + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// BashSession lexer. +var BashSession = internal.Register(MustNewLazyLexer( + &Config{ + Name: "BashSession", + Aliases: []string{"bash-session", "console", "shell-session"}, + Filenames: []string{".sh-session"}, + MimeTypes: []string{"text/x-sh"}, + EnsureNL: true, + }, + bashsessionRules, +)) + +func bashsessionRules() Rules { + return Rules{ + "root": { + {`^((?:\[[^]]+@[^]]+\]\s?)?[#$%>])(\s*)(.*\n?)`, ByGroups(GenericPrompt, Text, Using(Bash)), nil}, + {`^.+\n?`, GenericOutput, nil}, + }, + } +} diff --git a/vendor/github.com/alecthomas/chroma/lexers/b/batch.go b/vendor/github.com/alecthomas/chroma/lexers/b/batch.go new file mode 100644 index 000000000..f7a86776e --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/b/batch.go @@ -0,0 +1,198 @@ +package b + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// Batchfile lexer. +var Batchfile = internal.Register(MustNewLazyLexer( + &Config{ + Name: "Batchfile", + Aliases: []string{"bat", "batch", "dosbatch", "winbatch"}, + Filenames: []string{"*.bat", "*.cmd"}, + MimeTypes: []string{"application/x-dos-batch"}, + CaseInsensitive: true, + }, + batchfileRules, +)) + +func batchfileRules() Rules { + return Rules{ + "root": { + {`\)((?=\()|(?=\^?[\t\v\f\r ,;=\xa0]|[&<>|\n\x1a]))(?:(?:[^\n\x1a^]|\^[\n\x1a]?[\w\W])*)`, CommentSingle, nil}, + {`(?=((?:(?<=^[^:])|^[^:]?)[\t\v\f\r ,;=\xa0]*)(:))`, Text, Push("follow")}, + {`(?:(?:(?:\^[\n\x1a])?[\t\v\f\r ,;=\xa0])+)`, UsingSelf("text"), nil}, + Include("redirect"), + {`[\n\x1a]+`, Text, nil}, + {`\(`, Punctuation, Push("root/compound")}, + {`@+`, Punctuation, nil}, + {`((?:for|if|rem)(?:(?=(?:\^[\n\x1a]?)?/)|(?:(?!\^)|(?<=m))(?:(?=\()|(?=\^?[\t\v\f\r ,;=\xa0]|[&<>|\n\x1a]))))((?:(?:(?:\^[\n\x1a])?[\t\v\f\r ,;=\xa0])+)?(?:(?:(?:\^[\n\x1a]?)?[^"\n\x1a&<>|\t\v\f\r ,;=\xa0])+)?(?:\^[\n\x1a]?)?/(?:\^[\n\x1a]?)?\?)`, ByGroups(Keyword, UsingSelf("text")), Push("follow")}, + {`(goto(?=(?:\^[\n\x1a]?)?[\t\v\f\r ,;=\xa0+./:[\\\]]|[\n\x1a&<>|(]))((?:(?:"[^\n\x1a"]*(?:"|(?=[\n\x1a])))|(?:(?:%(?:\*|(?:~[a-z]*(?:\$[^:]+:)?)?\d|[^%:\n\x1a]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^%\n\x1a^]|\^[^%\n\x1a])[^=\n\x1a]*=(?:[^%\n\x1a^]|\^[^%\n\x1a])*)?)?%))|(?:\^?![^!:\n\x1a]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^!\n\x1a^]|\^[^!\n\x1a])[^=\n\x1a]*=(?:[^!\n\x1a^]|\^[^!\n\x1a])*)?)?\^?!))|[^"%\n\x1a&<>|])*(?:\^[\n\x1a]?)?/(?:\^[\n\x1a]?)?\?(?:(?:"[^\n\x1a"]*(?:"|(?=[\n\x1a])))|(?:(?:%(?:\*|(?:~[a-z]*(?:\$[^:]+:)?)?\d|[^%:\n\x1a]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^%\n\x1a^]|\^[^%\n\x1a])[^=\n\x1a]*=(?:[^%\n\x1a^]|\^[^%\n\x1a])*)?)?%))|(?:\^?![^!:\n\x1a]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^!\n\x1a^]|\^[^!\n\x1a])[^=\n\x1a]*=(?:[^!\n\x1a^]|\^[^!\n\x1a])*)?)?\^?!))|[^"%\n\x1a&<>|])*)`, ByGroups(Keyword, UsingSelf("text")), Push("follow")}, + {Words(``, `(?=(?:\^[\n\x1a]?)?[\t\v\f\r ,;=\xa0+./:[\\\]]|[\n\x1a&<>|(])`, `assoc`, `break`, `cd`, `chdir`, `cls`, `color`, `copy`, `date`, `del`, `dir`, `dpath`, `echo`, `endlocal`, `erase`, `exit`, `ftype`, `keys`, `md`, `mkdir`, `mklink`, `move`, `path`, `pause`, `popd`, `prompt`, `pushd`, `rd`, `ren`, `rename`, `rmdir`, `setlocal`, `shift`, `start`, `time`, `title`, `type`, `ver`, `verify`, `vol`), Keyword, Push("follow")}, + {`(call)((?:(?:(?:\^[\n\x1a])?[\t\v\f\r ,;=\xa0])+)?)(:)`, ByGroups(Keyword, UsingSelf("text"), Punctuation), Push("call")}, + {`call(?=(?:\^[\n\x1a]?)?[\t\v\f\r ,;=\xa0+./:[\\\]]|[\n\x1a&<>|(])`, Keyword, nil}, + {`(for(?=\^?[\t\v\f\r ,;=\xa0]|[&<>|\n\x1a])(?!\^))((?:(?:(?:\^[\n\x1a])?[\t\v\f\r ,;=\xa0])+))(/f(?=\^?[\t\v\f\r ,;=\xa0]|[&<>|\n\x1a]))`, ByGroups(Keyword, UsingSelf("text"), Keyword), Push("for/f", "for")}, + {`(for(?=\^?[\t\v\f\r ,;=\xa0]|[&<>|\n\x1a])(?!\^))((?:(?:(?:\^[\n\x1a])?[\t\v\f\r ,;=\xa0])+))(/l(?=\^?[\t\v\f\r ,;=\xa0]|[&<>|\n\x1a]))`, ByGroups(Keyword, UsingSelf("text"), Keyword), Push("for/l", "for")}, + {`for(?=\^?[\t\v\f\r ,;=\xa0]|[&<>|\n\x1a])(?!\^)`, Keyword, Push("for2", "for")}, + {`(goto(?=(?:\^[\n\x1a]?)?[\t\v\f\r ,;=\xa0+./:[\\\]]|[\n\x1a&<>|(]))((?:(?:(?:\^[\n\x1a])?[\t\v\f\r ,;=\xa0])+)?)(:?)`, ByGroups(Keyword, UsingSelf("text"), Punctuation), Push("label")}, + {`(if(?:(?=\()|(?=\^?[\t\v\f\r ,;=\xa0]|[&<>|\n\x1a]))(?!\^))((?:(?:(?:\^[\n\x1a])?[\t\v\f\r ,;=\xa0])+)?)((?:/i(?=\^?[\t\v\f\r ,;=\xa0]|[&<>|\n\x1a]))?)((?:(?:(?:\^[\n\x1a])?[\t\v\f\r ,;=\xa0])+)?)((?:not(?=\^?[\t\v\f\r ,;=\xa0]|[&<>|\n\x1a]))?)((?:(?:(?:\^[\n\x1a])?[\t\v\f\r ,;=\xa0])+)?)`, ByGroups(Keyword, UsingSelf("text"), Keyword, UsingSelf("text"), Keyword, UsingSelf("text")), Push("(?", "if")}, + {`rem(((?=\()|(?=\^?[\t\v\f\r ,;=\xa0]|[&<>|\n\x1a]))(?:(?:(?:\^[\n\x1a])?[\t\v\f\r ,;=\xa0])+)?(?:[&<>|]+|(?:(?:"[^\n\x1a"]*(?:"|(?=[\n\x1a])))|(?:(?:%(?:\*|(?:~[a-z]*(?:\$[^:]+:)?)?\d|[^%:\n\x1a]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^%\n\x1a^]|\^[^%\n\x1a])[^=\n\x1a]*=(?:[^%\n\x1a^]|\^[^%\n\x1a])*)?)?%))|(?:\^?![^!:\n\x1a]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^!\n\x1a^]|\^[^!\n\x1a])[^=\n\x1a]*=(?:[^!\n\x1a^]|\^[^!\n\x1a])*)?)?\^?!))|(?:(?:(?:\^[\n\x1a]?)?[^"\n\x1a&<>|\t\v\f\r ,;=\xa0])+))+)?.*|(?=(?:\^[\n\x1a]?)?[\t\v\f\r ,;=\xa0+./:[\\\]]|[\n\x1a&<>|(])(?:(?:[^\n\x1a^]|\^[\n\x1a]?[\w\W])*))`, CommentSingle, Push("follow")}, + {`(set(?=(?:\^[\n\x1a]?)?[\t\v\f\r ,;=\xa0+./:[\\\]]|[\n\x1a&<>|(]))((?:(?:\^[\n\x1a]?)?[^\S\n])*)(/a)`, ByGroups(Keyword, UsingSelf("text"), Keyword), Push("arithmetic")}, + {`(set(?=(?:\^[\n\x1a]?)?[\t\v\f\r ,;=\xa0+./:[\\\]]|[\n\x1a&<>|(]))((?:(?:\^[\n\x1a]?)?[^\S\n])*)((?:/p)?)((?:(?:\^[\n\x1a]?)?[^\S\n])*)((?:(?:(?:\^[\n\x1a]?)?[^"\n\x1a&<>|^=]|\^[\n\x1a]?[^"=])+)?)((?:(?:\^[\n\x1a]?)?=)?)`, ByGroups(Keyword, UsingSelf("text"), Keyword, UsingSelf("text"), UsingSelf("variable"), Punctuation), Push("follow")}, + Default(Push("follow")), + }, + "follow": { + {`((?:(?<=^[^:])|^[^:]?)[\t\v\f\r ,;=\xa0]*)(:)([\t\v\f\r ,;=\xa0]*)((?:(?:[^\n\x1a&<>|\t\v\f\r ,;=\xa0+:^]|\^[\n\x1a]?[\w\W])*))(.*)`, ByGroups(Text, Punctuation, Text, NameLabel, CommentSingle), nil}, + Include("redirect"), + {`(?=[\n\x1a])`, Text, Pop(1)}, + {`\|\|?|&&?`, Punctuation, Pop(1)}, + Include("text"), + }, + "arithmetic": { + {`0[0-7]+`, LiteralNumberOct, nil}, + {`0x[\da-f]+`, LiteralNumberHex, nil}, + {`\d+`, LiteralNumberInteger, nil}, + {`[(),]+`, Punctuation, nil}, + {`([=+\-*/!~]|%|\^\^)+`, Operator, nil}, + {`((?:"[^\n\x1a"]*(?:"|(?=[\n\x1a])))|(?:(?:%(?:\*|(?:~[a-z]*(?:\$[^:]+:)?)?\d|[^%:\n\x1a]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^%\n\x1a^]|\^[^%\n\x1a])[^=\n\x1a]*=(?:[^%\n\x1a^]|\^[^%\n\x1a])*)?)?%))|(?:\^?![^!:\n\x1a]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^!\n\x1a^]|\^[^!\n\x1a])[^=\n\x1a]*=(?:[^!\n\x1a^]|\^[^!\n\x1a])*)?)?\^?!))|(\^[\n\x1a]?)?[^()=+\-*/!~%^"\n\x1a&<>|\t\v\f\r ,;=\xa0]|\^[\n\x1a\t\v\f\r ,;=\xa0]?[\w\W])+`, UsingSelf("variable"), nil}, + {`(?=[\x00|&])`, Text, Pop(1)}, + Include("follow"), + }, + "call": { + {`(:?)((?:(?:[^\n\x1a&<>|\t\v\f\r ,;=\xa0+:^]|\^[\n\x1a]?[\w\W])*))`, ByGroups(Punctuation, NameLabel), Pop(1)}, + }, + "label": { + {`((?:(?:[^\n\x1a&<>|\t\v\f\r ,;=\xa0+:^]|\^[\n\x1a]?[\w\W])*)?)((?:(?:"[^\n\x1a"]*(?:"|(?=[\n\x1a])))|(?:(?:%(?:\*|(?:~[a-z]*(?:\$[^:]+:)?)?\d|[^%:\n\x1a]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^%\n\x1a^]|\^[^%\n\x1a])[^=\n\x1a]*=(?:[^%\n\x1a^]|\^[^%\n\x1a])*)?)?%))|(?:\^?![^!:\n\x1a]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^!\n\x1a^]|\^[^!\n\x1a])[^=\n\x1a]*=(?:[^!\n\x1a^]|\^[^!\n\x1a])*)?)?\^?!))|\^[\n\x1a]?[\w\W]|[^"%^\n\x1a&<>|])*)`, ByGroups(NameLabel, CommentSingle), Pop(1)}, + }, + "redirect": { + {`((?:(?<=[\n\x1a\t\v\f\r ,;=\xa0])\d)?)(>>?&|<&)([\n\x1a\t\v\f\r ,;=\xa0]*)(\d)`, ByGroups(LiteralNumberInteger, Punctuation, Text, LiteralNumberInteger), nil}, + {`((?:(?<=[\n\x1a\t\v\f\r ,;=\xa0])(?>?|<)((?:(?:(?:\^[\n\x1a])?[\t\v\f\r ,;=\xa0])+)?(?:[&<>|]+|(?:(?:"[^\n\x1a"]*(?:"|(?=[\n\x1a])))|(?:(?:%(?:\*|(?:~[a-z]*(?:\$[^:]+:)?)?\d|[^%:\n\x1a]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^%\n\x1a^]|\^[^%\n\x1a])[^=\n\x1a]*=(?:[^%\n\x1a^]|\^[^%\n\x1a])*)?)?%))|(?:\^?![^!:\n\x1a]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^!\n\x1a^]|\^[^!\n\x1a])[^=\n\x1a]*=(?:[^!\n\x1a^]|\^[^!\n\x1a])*)?)?\^?!))|(?:(?:(?:\^[\n\x1a]?)?[^"\n\x1a&<>|\t\v\f\r ,;=\xa0])+))+))`, ByGroups(LiteralNumberInteger, Punctuation, UsingSelf("text")), nil}, + }, + "root/compound": { + {`\)`, Punctuation, Pop(1)}, + {`(?=((?:(?<=^[^:])|^[^:]?)[\t\v\f\r ,;=\xa0]*)(:))`, Text, Push("follow/compound")}, + {`(?:(?:(?:\^[\n\x1a])?[\t\v\f\r ,;=\xa0])+)`, UsingSelf("text"), nil}, + Include("redirect/compound"), + {`[\n\x1a]+`, Text, nil}, + {`\(`, Punctuation, Push("root/compound")}, + {`@+`, Punctuation, nil}, + {`((?:for|if|rem)(?:(?=(?:\^[\n\x1a]?)?/)|(?:(?!\^)|(?<=m))(?:(?=\()|(?:(?=\))|(?=\^?[\t\v\f\r ,;=\xa0]|[&<>|\n\x1a])))))((?:(?:(?:\^[\n\x1a])?[\t\v\f\r ,;=\xa0])+)?(?:(?:(?:\^[\n\x1a]?)?[^"\n\x1a&<>|\t\v\f\r ,;=\xa0)])+)?(?:\^[\n\x1a]?)?/(?:\^[\n\x1a]?)?\?)`, ByGroups(Keyword, UsingSelf("text")), Push("follow/compound")}, + {`(goto(?:(?=\))|(?=(?:\^[\n\x1a]?)?[\t\v\f\r ,;=\xa0+./:[\\\]]|[\n\x1a&<>|(])))((?:(?:"[^\n\x1a"]*(?:"|(?=[\n\x1a])))|(?:(?:%(?:\*|(?:~[a-z]*(?:\$[^:]+:)?)?\d|[^%:\n\x1a]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^%\n\x1a^]|\^[^%\n\x1a])[^=\n\x1a]*=(?:[^%\n\x1a^]|\^[^%\n\x1a])*)?)?%))|(?:\^?![^!:\n\x1a]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^!\n\x1a^]|\^[^!\n\x1a])[^=\n\x1a]*=(?:[^!\n\x1a^]|\^[^!\n\x1a])*)?)?\^?!))|[^"%\n\x1a&<>|)])*(?:\^[\n\x1a]?)?/(?:\^[\n\x1a]?)?\?(?:(?:"[^\n\x1a"]*(?:"|(?=[\n\x1a])))|(?:(?:%(?:\*|(?:~[a-z]*(?:\$[^:]+:)?)?\d|[^%:\n\x1a]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^%\n\x1a^]|\^[^%\n\x1a])[^=\n\x1a]*=(?:[^%\n\x1a^]|\^[^%\n\x1a])*)?)?%))|(?:\^?![^!:\n\x1a]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^!\n\x1a^]|\^[^!\n\x1a])[^=\n\x1a]*=(?:[^!\n\x1a^]|\^[^!\n\x1a])*)?)?\^?!))|[^"%\n\x1a&<>|)])*)`, ByGroups(Keyword, UsingSelf("text")), Push("follow/compound")}, + {Words(``, `(?:(?=\))|(?=(?:\^[\n\x1a]?)?[\t\v\f\r ,;=\xa0+./:[\\\]]|[\n\x1a&<>|(]))`, `assoc`, `break`, `cd`, `chdir`, `cls`, `color`, `copy`, `date`, `del`, `dir`, `dpath`, `echo`, `endlocal`, `erase`, `exit`, `ftype`, `keys`, `md`, `mkdir`, `mklink`, `move`, `path`, `pause`, `popd`, `prompt`, `pushd`, `rd`, `ren`, `rename`, `rmdir`, `setlocal`, `shift`, `start`, `time`, `title`, `type`, `ver`, `verify`, `vol`), Keyword, Push("follow/compound")}, + {`(call)((?:(?:(?:\^[\n\x1a])?[\t\v\f\r ,;=\xa0])+)?)(:)`, ByGroups(Keyword, UsingSelf("text"), Punctuation), Push("call/compound")}, + {`call(?:(?=\))|(?=(?:\^[\n\x1a]?)?[\t\v\f\r ,;=\xa0+./:[\\\]]|[\n\x1a&<>|(]))`, Keyword, nil}, + {`(for(?:(?=\))|(?=\^?[\t\v\f\r ,;=\xa0]|[&<>|\n\x1a]))(?!\^))((?:(?:(?:\^[\n\x1a])?[\t\v\f\r ,;=\xa0])+))(/f(?:(?=\))|(?=\^?[\t\v\f\r ,;=\xa0]|[&<>|\n\x1a])))`, ByGroups(Keyword, UsingSelf("text"), Keyword), Push("for/f", "for")}, + {`(for(?:(?=\))|(?=\^?[\t\v\f\r ,;=\xa0]|[&<>|\n\x1a]))(?!\^))((?:(?:(?:\^[\n\x1a])?[\t\v\f\r ,;=\xa0])+))(/l(?:(?=\))|(?=\^?[\t\v\f\r ,;=\xa0]|[&<>|\n\x1a])))`, ByGroups(Keyword, UsingSelf("text"), Keyword), Push("for/l", "for")}, + {`for(?:(?=\))|(?=\^?[\t\v\f\r ,;=\xa0]|[&<>|\n\x1a]))(?!\^)`, Keyword, Push("for2", "for")}, + {`(goto(?:(?=\))|(?=(?:\^[\n\x1a]?)?[\t\v\f\r ,;=\xa0+./:[\\\]]|[\n\x1a&<>|(])))((?:(?:(?:\^[\n\x1a])?[\t\v\f\r ,;=\xa0])+)?)(:?)`, ByGroups(Keyword, UsingSelf("text"), Punctuation), Push("label/compound")}, + {`(if(?:(?=\()|(?:(?=\))|(?=\^?[\t\v\f\r ,;=\xa0]|[&<>|\n\x1a])))(?!\^))((?:(?:(?:\^[\n\x1a])?[\t\v\f\r ,;=\xa0])+)?)((?:/i(?:(?=\))|(?=\^?[\t\v\f\r ,;=\xa0]|[&<>|\n\x1a])))?)((?:(?:(?:\^[\n\x1a])?[\t\v\f\r ,;=\xa0])+)?)((?:not(?:(?=\))|(?=\^?[\t\v\f\r ,;=\xa0]|[&<>|\n\x1a])))?)((?:(?:(?:\^[\n\x1a])?[\t\v\f\r ,;=\xa0])+)?)`, ByGroups(Keyword, UsingSelf("text"), Keyword, UsingSelf("text"), Keyword, UsingSelf("text")), Push("(?", "if")}, + {`rem(((?=\()|(?:(?=\))|(?=\^?[\t\v\f\r ,;=\xa0]|[&<>|\n\x1a])))(?:(?:(?:\^[\n\x1a])?[\t\v\f\r ,;=\xa0])+)?(?:[&<>|]+|(?:(?:"[^\n\x1a"]*(?:"|(?=[\n\x1a])))|(?:(?:%(?:\*|(?:~[a-z]*(?:\$[^:]+:)?)?\d|[^%:\n\x1a]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^%\n\x1a^]|\^[^%\n\x1a])[^=\n\x1a]*=(?:[^%\n\x1a^]|\^[^%\n\x1a])*)?)?%))|(?:\^?![^!:\n\x1a]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^!\n\x1a^]|\^[^!\n\x1a])[^=\n\x1a]*=(?:[^!\n\x1a^]|\^[^!\n\x1a])*)?)?\^?!))|(?:(?:(?:\^[\n\x1a]?)?[^"\n\x1a&<>|\t\v\f\r ,;=\xa0])+))+)?.*|(?:(?=\))|(?=(?:\^[\n\x1a]?)?[\t\v\f\r ,;=\xa0+./:[\\\]]|[\n\x1a&<>|(]))(?:(?:[^\n\x1a^)]|\^[\n\x1a]?[^)])*))`, CommentSingle, Push("follow/compound")}, + {`(set(?:(?=\))|(?=(?:\^[\n\x1a]?)?[\t\v\f\r ,;=\xa0+./:[\\\]]|[\n\x1a&<>|(])))((?:(?:\^[\n\x1a]?)?[^\S\n])*)(/a)`, ByGroups(Keyword, UsingSelf("text"), Keyword), Push("arithmetic/compound")}, + {`(set(?:(?=\))|(?=(?:\^[\n\x1a]?)?[\t\v\f\r ,;=\xa0+./:[\\\]]|[\n\x1a&<>|(])))((?:(?:\^[\n\x1a]?)?[^\S\n])*)((?:/p)?)((?:(?:\^[\n\x1a]?)?[^\S\n])*)((?:(?:(?:\^[\n\x1a]?)?[^"\n\x1a&<>|^=)]|\^[\n\x1a]?[^"=])+)?)((?:(?:\^[\n\x1a]?)?=)?)`, ByGroups(Keyword, UsingSelf("text"), Keyword, UsingSelf("text"), UsingSelf("variable"), Punctuation), Push("follow/compound")}, + Default(Push("follow/compound")), + }, + "follow/compound": { + {`(?=\))`, Text, Pop(1)}, + {`((?:(?<=^[^:])|^[^:]?)[\t\v\f\r ,;=\xa0]*)(:)([\t\v\f\r ,;=\xa0]*)((?:(?:[^\n\x1a&<>|\t\v\f\r ,;=\xa0+:^)]|\^[\n\x1a]?[^)])*))(.*)`, ByGroups(Text, Punctuation, Text, NameLabel, CommentSingle), nil}, + Include("redirect/compound"), + {`(?=[\n\x1a])`, Text, Pop(1)}, + {`\|\|?|&&?`, Punctuation, Pop(1)}, + Include("text"), + }, + "arithmetic/compound": { + {`(?=\))`, Text, Pop(1)}, + {`0[0-7]+`, LiteralNumberOct, nil}, + {`0x[\da-f]+`, LiteralNumberHex, nil}, + {`\d+`, LiteralNumberInteger, nil}, + {`[(),]+`, Punctuation, nil}, + {`([=+\-*/!~]|%|\^\^)+`, Operator, nil}, + {`((?:"[^\n\x1a"]*(?:"|(?=[\n\x1a])))|(?:(?:%(?:\*|(?:~[a-z]*(?:\$[^:]+:)?)?\d|[^%:\n\x1a]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^%\n\x1a^]|\^[^%\n\x1a])[^=\n\x1a]*=(?:[^%\n\x1a^]|\^[^%\n\x1a])*)?)?%))|(?:\^?![^!:\n\x1a]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^!\n\x1a^]|\^[^!\n\x1a])[^=\n\x1a]*=(?:[^!\n\x1a^]|\^[^!\n\x1a])*)?)?\^?!))|(\^[\n\x1a]?)?[^()=+\-*/!~%^"\n\x1a&<>|\t\v\f\r ,;=\xa0]|\^[\n\x1a\t\v\f\r ,;=\xa0]?[^)])+`, UsingSelf("variable"), nil}, + {`(?=[\x00|&])`, Text, Pop(1)}, + Include("follow"), + }, + "call/compound": { + {`(?=\))`, Text, Pop(1)}, + {`(:?)((?:(?:[^\n\x1a&<>|\t\v\f\r ,;=\xa0+:^)]|\^[\n\x1a]?[^)])*))`, ByGroups(Punctuation, NameLabel), Pop(1)}, + }, + "label/compound": { + {`(?=\))`, Text, Pop(1)}, + {`((?:(?:[^\n\x1a&<>|\t\v\f\r ,;=\xa0+:^)]|\^[\n\x1a]?[^)])*)?)((?:(?:"[^\n\x1a"]*(?:"|(?=[\n\x1a])))|(?:(?:%(?:\*|(?:~[a-z]*(?:\$[^:]+:)?)?\d|[^%:\n\x1a]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^%\n\x1a^]|\^[^%\n\x1a])[^=\n\x1a]*=(?:[^%\n\x1a^]|\^[^%\n\x1a])*)?)?%))|(?:\^?![^!:\n\x1a]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^!\n\x1a^]|\^[^!\n\x1a])[^=\n\x1a]*=(?:[^!\n\x1a^]|\^[^!\n\x1a])*)?)?\^?!))|\^[\n\x1a]?[^)]|[^"%^\n\x1a&<>|)])*)`, ByGroups(NameLabel, CommentSingle), Pop(1)}, + }, + "redirect/compound": { + {`((?:(?<=[\n\x1a\t\v\f\r ,;=\xa0])\d)?)(>>?&|<&)([\n\x1a\t\v\f\r ,;=\xa0]*)(\d)`, ByGroups(LiteralNumberInteger, Punctuation, Text, LiteralNumberInteger), nil}, + {`((?:(?<=[\n\x1a\t\v\f\r ,;=\xa0])(?>?|<)((?:(?:(?:\^[\n\x1a])?[\t\v\f\r ,;=\xa0])+)?(?:[&<>|]+|(?:(?:"[^\n\x1a"]*(?:"|(?=[\n\x1a])))|(?:(?:%(?:\*|(?:~[a-z]*(?:\$[^:]+:)?)?\d|[^%:\n\x1a]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^%\n\x1a^]|\^[^%\n\x1a])[^=\n\x1a]*=(?:[^%\n\x1a^]|\^[^%\n\x1a])*)?)?%))|(?:\^?![^!:\n\x1a]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^!\n\x1a^]|\^[^!\n\x1a])[^=\n\x1a]*=(?:[^!\n\x1a^]|\^[^!\n\x1a])*)?)?\^?!))|(?:(?:(?:\^[\n\x1a]?)?[^"\n\x1a&<>|\t\v\f\r ,;=\xa0)])+))+))`, ByGroups(LiteralNumberInteger, Punctuation, UsingSelf("text")), nil}, + }, + "variable-or-escape": { + {`(?:(?:%(?:\*|(?:~[a-z]*(?:\$[^:]+:)?)?\d|[^%:\n\x1a]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^%\n\x1a^]|\^[^%\n\x1a])[^=\n\x1a]*=(?:[^%\n\x1a^]|\^[^%\n\x1a])*)?)?%))|(?:\^?![^!:\n\x1a]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^!\n\x1a^]|\^[^!\n\x1a])[^=\n\x1a]*=(?:[^!\n\x1a^]|\^[^!\n\x1a])*)?)?\^?!))`, NameVariable, nil}, + {`%%|\^[\n\x1a]?(\^!|[\w\W])`, LiteralStringEscape, nil}, + }, + "string": { + {`"`, LiteralStringDouble, Pop(1)}, + {`(?:(?:%(?:\*|(?:~[a-z]*(?:\$[^:]+:)?)?\d|[^%:\n\x1a]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^%\n\x1a^]|\^[^%\n\x1a])[^=\n\x1a]*=(?:[^%\n\x1a^]|\^[^%\n\x1a])*)?)?%))|(?:\^?![^!:\n\x1a]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^!\n\x1a^]|\^[^!\n\x1a])[^=\n\x1a]*=(?:[^!\n\x1a^]|\^[^!\n\x1a])*)?)?\^?!))`, NameVariable, nil}, + {`\^!|%%`, LiteralStringEscape, nil}, + {`[^"%^\n\x1a]+|[%^]`, LiteralStringDouble, nil}, + Default(Pop(1)), + }, + "sqstring": { + Include("variable-or-escape"), + {`[^%]+|%`, LiteralStringSingle, nil}, + }, + "bqstring": { + Include("variable-or-escape"), + {`[^%]+|%`, LiteralStringBacktick, nil}, + }, + "text": { + {`"`, LiteralStringDouble, Push("string")}, + Include("variable-or-escape"), + {`[^"%^\n\x1a&<>|\t\v\f\r ,;=\xa0\d)]+|.`, Text, nil}, + }, + "variable": { + {`"`, LiteralStringDouble, Push("string")}, + Include("variable-or-escape"), + {`[^"%^\n\x1a]+|.`, NameVariable, nil}, + }, + "for": { + {`((?:(?:(?:\^[\n\x1a])?[\t\v\f\r ,;=\xa0])+))(in)((?:(?:(?:\^[\n\x1a])?[\t\v\f\r ,;=\xa0])+))(\()`, ByGroups(UsingSelf("text"), Keyword, UsingSelf("text"), Punctuation), Pop(1)}, + Include("follow"), + }, + "for2": { + {`\)`, Punctuation, nil}, + {`((?:(?:(?:\^[\n\x1a])?[\t\v\f\r ,;=\xa0])+))(do(?=\^?[\t\v\f\r ,;=\xa0]|[&<>|\n\x1a]))`, ByGroups(UsingSelf("text"), Keyword), Pop(1)}, + {`[\n\x1a]+`, Text, nil}, + Include("follow"), + }, + "for/f": { + {`(")((?:(?:(?:%(?:\*|(?:~[a-z]*(?:\$[^:]+:)?)?\d|[^%:\n\x1a]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^%\n\x1a^]|\^[^%\n\x1a])[^=\n\x1a]*=(?:[^%\n\x1a^]|\^[^%\n\x1a])*)?)?%))|(?:\^?![^!:\n\x1a]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^!\n\x1a^]|\^[^!\n\x1a])[^=\n\x1a]*=(?:[^!\n\x1a^]|\^[^!\n\x1a])*)?)?\^?!))|[^"])*?")([\n\x1a\t\v\f\r ,;=\xa0]*)(\))`, ByGroups(LiteralStringDouble, UsingSelf("string"), Text, Punctuation), nil}, + {`"`, LiteralStringDouble, Push("#pop", "for2", "string")}, + {`('(?:%%|(?:(?:%(?:\*|(?:~[a-z]*(?:\$[^:]+:)?)?\d|[^%:\n\x1a]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^%\n\x1a^]|\^[^%\n\x1a])[^=\n\x1a]*=(?:[^%\n\x1a^]|\^[^%\n\x1a])*)?)?%))|(?:\^?![^!:\n\x1a]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^!\n\x1a^]|\^[^!\n\x1a])[^=\n\x1a]*=(?:[^!\n\x1a^]|\^[^!\n\x1a])*)?)?\^?!))|[\w\W])*?')([\n\x1a\t\v\f\r ,;=\xa0]*)(\))`, ByGroups(UsingSelf("sqstring"), Text, Punctuation), nil}, + {"(`(?:%%|(?:(?:%(?:\\*|(?:~[a-z]*(?:\\$[^:]+:)?)?\\d|[^%:\\n\\x1a]+(?::(?:~(?:-?\\d+)?(?:,(?:-?\\d+)?)?|(?:[^%\\n\\x1a^]|\\^[^%\\n\\x1a])[^=\\n\\x1a]*=(?:[^%\\n\\x1a^]|\\^[^%\\n\\x1a])*)?)?%))|(?:\\^?![^!:\\n\\x1a]+(?::(?:~(?:-?\\d+)?(?:,(?:-?\\d+)?)?|(?:[^!\\n\\x1a^]|\\^[^!\\n\\x1a])[^=\\n\\x1a]*=(?:[^!\\n\\x1a^]|\\^[^!\\n\\x1a])*)?)?\\^?!))|[\\w\\W])*?`)([\\n\\x1a\\t\\v\\f\\r ,;=\\xa0]*)(\\))", ByGroups(UsingSelf("bqstring"), Text, Punctuation), nil}, + Include("for2"), + }, + "for/l": { + {`-?\d+`, LiteralNumberInteger, nil}, + Include("for2"), + }, + "if": { + {`((?:cmdextversion|errorlevel)(?=\^?[\t\v\f\r ,;=\xa0]|[&<>|\n\x1a]))((?:(?:(?:\^[\n\x1a])?[\t\v\f\r ,;=\xa0])+))(\d+)`, ByGroups(Keyword, UsingSelf("text"), LiteralNumberInteger), Pop(1)}, + {`(defined(?=\^?[\t\v\f\r ,;=\xa0]|[&<>|\n\x1a]))((?:(?:(?:\^[\n\x1a])?[\t\v\f\r ,;=\xa0])+))((?:[&<>|]+|(?:(?:"[^\n\x1a"]*(?:"|(?=[\n\x1a])))|(?:(?:%(?:\*|(?:~[a-z]*(?:\$[^:]+:)?)?\d|[^%:\n\x1a]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^%\n\x1a^]|\^[^%\n\x1a])[^=\n\x1a]*=(?:[^%\n\x1a^]|\^[^%\n\x1a])*)?)?%))|(?:\^?![^!:\n\x1a]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^!\n\x1a^]|\^[^!\n\x1a])[^=\n\x1a]*=(?:[^!\n\x1a^]|\^[^!\n\x1a])*)?)?\^?!))|(?:(?:(?:\^[\n\x1a]?)?[^"\n\x1a&<>|\t\v\f\r ,;=\xa0])+))+))`, ByGroups(Keyword, UsingSelf("text"), UsingSelf("variable")), Pop(1)}, + {`(exist(?=\^?[\t\v\f\r ,;=\xa0]|[&<>|\n\x1a]))((?:(?:(?:\^[\n\x1a])?[\t\v\f\r ,;=\xa0])+)(?:[&<>|]+|(?:(?:"[^\n\x1a"]*(?:"|(?=[\n\x1a])))|(?:(?:%(?:\*|(?:~[a-z]*(?:\$[^:]+:)?)?\d|[^%:\n\x1a]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^%\n\x1a^]|\^[^%\n\x1a])[^=\n\x1a]*=(?:[^%\n\x1a^]|\^[^%\n\x1a])*)?)?%))|(?:\^?![^!:\n\x1a]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^!\n\x1a^]|\^[^!\n\x1a])[^=\n\x1a]*=(?:[^!\n\x1a^]|\^[^!\n\x1a])*)?)?\^?!))|(?:(?:(?:\^[\n\x1a]?)?[^"\n\x1a&<>|\t\v\f\r ,;=\xa0])+))+))`, ByGroups(Keyword, UsingSelf("text")), Pop(1)}, + {`((?:-?(?:0[0-7]+|0x[\da-f]+|\d+)(?=\^?[\t\v\f\r ,;=\xa0]|[&<>|\n\x1a]))(?:(?:(?:\^[\n\x1a])?[\t\v\f\r ,;=\xa0])+))((?:equ|geq|gtr|leq|lss|neq))((?:(?:(?:\^[\n\x1a])?[\t\v\f\r ,;=\xa0])+)(?:-?(?:0[0-7]+|0x[\da-f]+|\d+)(?=\^?[\t\v\f\r ,;=\xa0]|[&<>|\n\x1a])))`, ByGroups(UsingSelf("arithmetic"), OperatorWord, UsingSelf("arithmetic")), Pop(1)}, + {`(?:[&<>|]+|(?:(?:"[^\n\x1a"]*(?:"|(?=[\n\x1a])))|(?:(?:%(?:\*|(?:~[a-z]*(?:\$[^:]+:)?)?\d|[^%:\n\x1a]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^%\n\x1a^]|\^[^%\n\x1a])[^=\n\x1a]*=(?:[^%\n\x1a^]|\^[^%\n\x1a])*)?)?%))|(?:\^?![^!:\n\x1a]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^!\n\x1a^]|\^[^!\n\x1a])[^=\n\x1a]*=(?:[^!\n\x1a^]|\^[^!\n\x1a])*)?)?\^?!))|(?:(?:(?:\^[\n\x1a]?)?[^"\n\x1a&<>|\t\v\f\r ,;=\xa0])+))+)`, UsingSelf("text"), Push("#pop", "if2")}, + }, + "if2": { + {`((?:(?:(?:\^[\n\x1a])?[\t\v\f\r ,;=\xa0])+)?)(==)((?:(?:(?:\^[\n\x1a])?[\t\v\f\r ,;=\xa0])+)?(?:[&<>|]+|(?:(?:"[^\n\x1a"]*(?:"|(?=[\n\x1a])))|(?:(?:%(?:\*|(?:~[a-z]*(?:\$[^:]+:)?)?\d|[^%:\n\x1a]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^%\n\x1a^]|\^[^%\n\x1a])[^=\n\x1a]*=(?:[^%\n\x1a^]|\^[^%\n\x1a])*)?)?%))|(?:\^?![^!:\n\x1a]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^!\n\x1a^]|\^[^!\n\x1a])[^=\n\x1a]*=(?:[^!\n\x1a^]|\^[^!\n\x1a])*)?)?\^?!))|(?:(?:(?:\^[\n\x1a]?)?[^"\n\x1a&<>|\t\v\f\r ,;=\xa0])+))+))`, ByGroups(UsingSelf("text"), Operator, UsingSelf("text")), Pop(1)}, + {`((?:(?:(?:\^[\n\x1a])?[\t\v\f\r ,;=\xa0])+))((?:equ|geq|gtr|leq|lss|neq))((?:(?:(?:\^[\n\x1a])?[\t\v\f\r ,;=\xa0])+)(?:[&<>|]+|(?:(?:"[^\n\x1a"]*(?:"|(?=[\n\x1a])))|(?:(?:%(?:\*|(?:~[a-z]*(?:\$[^:]+:)?)?\d|[^%:\n\x1a]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^%\n\x1a^]|\^[^%\n\x1a])[^=\n\x1a]*=(?:[^%\n\x1a^]|\^[^%\n\x1a])*)?)?%))|(?:\^?![^!:\n\x1a]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^!\n\x1a^]|\^[^!\n\x1a])[^=\n\x1a]*=(?:[^!\n\x1a^]|\^[^!\n\x1a])*)?)?\^?!))|(?:(?:(?:\^[\n\x1a]?)?[^"\n\x1a&<>|\t\v\f\r ,;=\xa0])+))+))`, ByGroups(UsingSelf("text"), OperatorWord, UsingSelf("text")), Pop(1)}, + }, + "(?": { + {`(?:(?:(?:\^[\n\x1a])?[\t\v\f\r ,;=\xa0])+)`, UsingSelf("text"), nil}, + {`\(`, Punctuation, Push("#pop", "else?", "root/compound")}, + Default(Pop(1)), + }, + "else?": { + {`(?:(?:(?:\^[\n\x1a])?[\t\v\f\r ,;=\xa0])+)`, UsingSelf("text"), nil}, + {`else(?=\^?[\t\v\f\r ,;=\xa0]|[&<>|\n\x1a])`, Keyword, Pop(1)}, + Default(Pop(1)), + }, + } +} diff --git a/vendor/github.com/alecthomas/chroma/lexers/b/bibtex.go b/vendor/github.com/alecthomas/chroma/lexers/b/bibtex.go new file mode 100644 index 000000000..d6a0ae334 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/b/bibtex.go @@ -0,0 +1,80 @@ +package b + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// Bibtex lexer. +var Bibtex = internal.Register(MustNewLazyLexer( + &Config{ + Name: "BibTeX", + Aliases: []string{"bib", "bibtex"}, + Filenames: []string{"*.bib"}, + MimeTypes: []string{"text/x-bibtex"}, + NotMultiline: true, + CaseInsensitive: true, + }, + bibtexRules, +)) + +func bibtexRules() Rules { + return Rules{ + "root": { + Include("whitespace"), + {`@comment`, Comment, nil}, + {`@preamble`, NameClass, Push("closing-brace", "value", "opening-brace")}, + {`@string`, NameClass, Push("closing-brace", "field", "opening-brace")}, + {"@[a-z_@!$&*+\\-./:;<>?\\[\\\\\\]^`|~][\\w@!$&*+\\-./:;<>?\\[\\\\\\]^`|~]*", NameClass, Push("closing-brace", "command-body", "opening-brace")}, + {`.+`, Comment, nil}, + }, + "opening-brace": { + Include("whitespace"), + {`[{(]`, Punctuation, Pop(1)}, + }, + "closing-brace": { + Include("whitespace"), + {`[})]`, Punctuation, Pop(1)}, + }, + "command-body": { + Include("whitespace"), + {`[^\s\,\}]+`, NameLabel, Push("#pop", "fields")}, + }, + "fields": { + Include("whitespace"), + {`,`, Punctuation, Push("field")}, + Default(Pop(1)), + }, + "field": { + Include("whitespace"), + {"[a-z_@!$&*+\\-./:;<>?\\[\\\\\\]^`|~][\\w@!$&*+\\-./:;<>?\\[\\\\\\]^`|~]*", NameAttribute, Push("value", "=")}, + Default(Pop(1)), + }, + "=": { + Include("whitespace"), + {`=`, Punctuation, Pop(1)}, + }, + "value": { + Include("whitespace"), + {"[a-z_@!$&*+\\-./:;<>?\\[\\\\\\]^`|~][\\w@!$&*+\\-./:;<>?\\[\\\\\\]^`|~]*", NameVariable, nil}, + {`"`, LiteralString, Push("quoted-string")}, + {`\{`, LiteralString, Push("braced-string")}, + {`[\d]+`, LiteralNumber, nil}, + {`#`, Punctuation, nil}, + Default(Pop(1)), + }, + "quoted-string": { + {`\{`, LiteralString, Push("braced-string")}, + {`"`, LiteralString, Pop(1)}, + {`[^\{\"]+`, LiteralString, nil}, + }, + "braced-string": { + {`\{`, LiteralString, Push()}, + {`\}`, LiteralString, Pop(1)}, + {`[^\{\}]+`, LiteralString, nil}, + }, + "whitespace": { + {`\s+`, Text, nil}, + }, + } +} diff --git a/vendor/github.com/alecthomas/chroma/lexers/b/bicep.go b/vendor/github.com/alecthomas/chroma/lexers/b/bicep.go new file mode 100644 index 000000000..ec136fbc5 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/b/bicep.go @@ -0,0 +1,112 @@ +package b + +import ( + "strings" + + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// Bicep lexer. +var Bicep = internal.Register(MustNewLazyLexer( + &Config{ + Name: "Bicep", + Aliases: []string{"bicep"}, + Filenames: []string{"*.bicep"}, + }, + bicepRules, +)) + +func bicepRules() Rules { + bicepFunctions := []string{ + "any", + "array", + "concat", + "contains", + "empty", + "first", + "intersection", + "items", + "last", + "length", + "min", + "max", + "range", + "skip", + "take", + "union", + "dateTimeAdd", + "utcNow", + "deployment", + "environment", + "loadFileAsBase64", + "loadTextContent", + "int", + "json", + "extensionResourceId", + "getSecret", + "list", + "listKeys", + "listKeyValue", + "listAccountSas", + "listSecrets", + "pickZones", + "reference", + "resourceId", + "subscriptionResourceId", + "tenantResourceId", + "managementGroup", + "resourceGroup", + "subscription", + "tenant", + "base64", + "base64ToJson", + "base64ToString", + "dataUri", + "dataUriToString", + "endsWith", + "format", + "guid", + "indexOf", + "lastIndexOf", + "length", + "newGuid", + "padLeft", + "replace", + "split", + "startsWith", + "string", + "substring", + "toLower", + "toUpper", + "trim", + "uniqueString", + "uri", + "uriComponent", + "uriComponentToString", + } + + return Rules{ + "root": { + {`//[^\n\r]+`, CommentSingle, nil}, + {`/\*.*?\*/`, CommentMultiline, nil}, + {`([']?\w+[']?)(:)`, ByGroups(NameProperty, Punctuation), nil}, + {`\b('(resourceGroup|subscription|managementGroup|tenant)')\b`, KeywordNamespace, nil}, + {`'[\w\$\{\(\)\}\.]{1,}?'`, LiteralStringInterpol, nil}, + {`('''|').*?('''|')`, LiteralString, nil}, + {`\b(allowed|batchSize|description|maxLength|maxValue|metadata|minLength|minValue|secure)\b`, NameDecorator, nil}, + {`\b(az|sys)\.`, NameNamespace, nil}, + {`\b(` + strings.Join(bicepFunctions, "|") + `)\b`, NameFunction, nil}, + // https://docs.microsoft.com/en-us/azure/azure-resource-manager/bicep/bicep-functions-logical + {`\b(bool)(\()`, ByGroups(NameFunction, Punctuation), nil}, + {`\b(for|if|in)\b`, Keyword, nil}, + {`\b(module|output|param|resource|var)\b`, KeywordDeclaration, nil}, + {`\b(array|bool|int|object|string)\b`, KeywordType, nil}, + // https://docs.microsoft.com/en-us/azure/azure-resource-manager/bicep/operators + {`(>=|>|<=|<|==|!=|=~|!~|::|&&|\?\?|!|-|%|\*|\/|\+)`, Operator, nil}, + {`[\(\)\[\]\.:\?{}@=]`, Punctuation, nil}, + {`[\w_-]+`, Text, nil}, + {`\s+`, TextWhitespace, nil}, + }, + } +} diff --git a/vendor/github.com/alecthomas/chroma/lexers/b/blitz.go b/vendor/github.com/alecthomas/chroma/lexers/b/blitz.go new file mode 100644 index 000000000..119f96b09 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/b/blitz.go @@ -0,0 +1,52 @@ +package b + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// Blitzbasic lexer. +var Blitzbasic = internal.Register(MustNewLazyLexer( + &Config{ + Name: "BlitzBasic", + Aliases: []string{"blitzbasic", "b3d", "bplus"}, + Filenames: []string{"*.bb", "*.decls"}, + MimeTypes: []string{"text/x-bb"}, + CaseInsensitive: true, + }, + blitzbasicRules, +)) + +func blitzbasicRules() Rules { + return Rules{ + "root": { + {`[ \t]+`, Text, nil}, + {`;.*?\n`, CommentSingle, nil}, + {`"`, LiteralStringDouble, Push("string")}, + {`[0-9]+\.[0-9]*(?!\.)`, LiteralNumberFloat, nil}, + {`\.[0-9]+(?!\.)`, LiteralNumberFloat, nil}, + {`[0-9]+`, LiteralNumberInteger, nil}, + {`\$[0-9a-f]+`, LiteralNumberHex, nil}, + {`\%[10]+`, LiteralNumberBin, nil}, + {Words(`\b`, `\b`, `Shl`, `Shr`, `Sar`, `Mod`, `Or`, `And`, `Not`, `Abs`, `Sgn`, `Handle`, `Int`, `Float`, `Str`, `First`, `Last`, `Before`, `After`), Operator, nil}, + {`([+\-*/~=<>^])`, Operator, nil}, + {`[(),:\[\]\\]`, Punctuation, nil}, + {`\.([ \t]*)([a-z]\w*)`, NameLabel, nil}, + {`\b(New)\b([ \t]+)([a-z]\w*)`, ByGroups(KeywordReserved, Text, NameClass), nil}, + {`\b(Gosub|Goto)\b([ \t]+)([a-z]\w*)`, ByGroups(KeywordReserved, Text, NameLabel), nil}, + {`\b(Object)\b([ \t]*)([.])([ \t]*)([a-z]\w*)\b`, ByGroups(Operator, Text, Punctuation, Text, NameClass), nil}, + {`\b([a-z]\w*)(?:([ \t]*)(@{1,2}|[#$%])|([ \t]*)([.])([ \t]*)(?:([a-z]\w*)))?\b([ \t]*)(\()`, ByGroups(NameFunction, Text, KeywordType, Text, Punctuation, Text, NameClass, Text, Punctuation), nil}, + {`\b(Function)\b([ \t]+)([a-z]\w*)(?:([ \t]*)(@{1,2}|[#$%])|([ \t]*)([.])([ \t]*)(?:([a-z]\w*)))?`, ByGroups(KeywordReserved, Text, NameFunction, Text, KeywordType, Text, Punctuation, Text, NameClass), nil}, + {`\b(Type)([ \t]+)([a-z]\w*)`, ByGroups(KeywordReserved, Text, NameClass), nil}, + {`\b(Pi|True|False|Null)\b`, KeywordConstant, nil}, + {`\b(Local|Global|Const|Field|Dim)\b`, KeywordDeclaration, nil}, + {Words(`\b`, `\b`, `End`, `Return`, `Exit`, `Chr`, `Len`, `Asc`, `New`, `Delete`, `Insert`, `Include`, `Function`, `Type`, `If`, `Then`, `Else`, `ElseIf`, `EndIf`, `For`, `To`, `Next`, `Step`, `Each`, `While`, `Wend`, `Repeat`, `Until`, `Forever`, `Select`, `Case`, `Default`, `Goto`, `Gosub`, `Data`, `Read`, `Restore`), KeywordReserved, nil}, + {`([a-z]\w*)(?:([ \t]*)(@{1,2}|[#$%])|([ \t]*)([.])([ \t]*)(?:([a-z]\w*)))?`, ByGroups(NameVariable, Text, KeywordType, Text, Punctuation, Text, NameClass), nil}, + }, + "string": { + {`""`, LiteralStringDouble, nil}, + {`"C?`, LiteralStringDouble, Pop(1)}, + {`[^"]+`, LiteralStringDouble, nil}, + }, + } +} diff --git a/vendor/github.com/alecthomas/chroma/lexers/b/bnf.go b/vendor/github.com/alecthomas/chroma/lexers/b/bnf.go new file mode 100644 index 000000000..dcf7360c1 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/b/bnf.go @@ -0,0 +1,28 @@ +package b + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// Bnf lexer. +var Bnf = internal.Register(MustNewLazyLexer( + &Config{ + Name: "BNF", + Aliases: []string{"bnf"}, + Filenames: []string{"*.bnf"}, + MimeTypes: []string{"text/x-bnf"}, + }, + bnfRules, +)) + +func bnfRules() Rules { + return Rules{ + "root": { + {`(<)([ -;=?-~]+)(>)`, ByGroups(Punctuation, NameClass, Punctuation), nil}, + {`::=`, Operator, nil}, + {`[^<>:]+`, Text, nil}, + {`.`, Text, nil}, + }, + } +} diff --git a/vendor/github.com/alecthomas/chroma/lexers/b/brainfuck.go b/vendor/github.com/alecthomas/chroma/lexers/b/brainfuck.go new file mode 100644 index 000000000..d35e9c6db --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/b/brainfuck.go @@ -0,0 +1,38 @@ +package b + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// Brainfuck lexer. +var Brainfuck = internal.Register(MustNewLazyLexer( + &Config{ + Name: "Brainfuck", + Aliases: []string{"brainfuck", "bf"}, + Filenames: []string{"*.bf", "*.b"}, + MimeTypes: []string{"application/x-brainfuck"}, + }, + brainfuckRules, +)) + +func brainfuckRules() Rules { + return Rules{ + "common": { + {`[.,]+`, NameTag, nil}, + {`[+-]+`, NameBuiltin, nil}, + {`[<>]+`, NameVariable, nil}, + {`[^.,+\-<>\[\]]+`, Comment, nil}, + }, + "root": { + {`\[`, Keyword, Push("loop")}, + {`\]`, Error, nil}, + Include("common"), + }, + "loop": { + {`\[`, Keyword, Push()}, + {`\]`, Keyword, Pop(1)}, + Include("common"), + }, + } +} diff --git a/vendor/github.com/alecthomas/chroma/lexers/c/c.go b/vendor/github.com/alecthomas/chroma/lexers/c/c.go new file mode 100644 index 000000000..3698c4f29 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/c/c.go @@ -0,0 +1,96 @@ +package c + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// C lexer. +var C = internal.Register(MustNewLazyLexer( + &Config{ + Name: "C", + Aliases: []string{"c"}, + Filenames: []string{"*.c", "*.h", "*.idc", "*.x[bp]m"}, + MimeTypes: []string{"text/x-chdr", "text/x-csrc", "image/x-xbitmap", "image/x-xpixmap"}, + EnsureNL: true, + }, + cRules, +)) + +func cRules() Rules { + return Rules{ + "whitespace": { + {`^#if\s+0`, CommentPreproc, Push("if0")}, + {`^#`, CommentPreproc, Push("macro")}, + {`^(\s*(?:/[*].*?[*]/\s*)?)(#if\s+0)`, ByGroups(UsingSelf("root"), CommentPreproc), Push("if0")}, + {`^(\s*(?:/[*].*?[*]/\s*)?)(#)`, ByGroups(UsingSelf("root"), CommentPreproc), Push("macro")}, + {`\n`, Text, nil}, + {`\s+`, Text, nil}, + {`\\\n`, Text, nil}, + {`//(\n|[\w\W]*?[^\\]\n)`, CommentSingle, nil}, + {`/(\\\n)?[*][\w\W]*?[*](\\\n)?/`, CommentMultiline, nil}, + {`/(\\\n)?[*][\w\W]*`, CommentMultiline, nil}, + }, + "statements": { + {`(L?)(")`, ByGroups(LiteralStringAffix, LiteralString), Push("string")}, + {`(L?)(')(\\.|\\[0-7]{1,3}|\\x[a-fA-F0-9]{1,2}|[^\\\'\n])(')`, ByGroups(LiteralStringAffix, LiteralStringChar, LiteralStringChar, LiteralStringChar), nil}, + {`(\d+\.\d*|\.\d+|\d+)[eE][+-]?\d+[LlUu]*`, LiteralNumberFloat, nil}, + {`(\d+\.\d*|\.\d+|\d+[fF])[fF]?`, LiteralNumberFloat, nil}, + {`0x[0-9a-fA-F]+[LlUu]*`, LiteralNumberHex, nil}, + {`0[0-7]+[LlUu]*`, LiteralNumberOct, nil}, + {`\d+[LlUu]*`, LiteralNumberInteger, nil}, + {`\*/`, Error, nil}, + {`[~!%^&*+=|?:<>/-]`, Operator, nil}, + {`[()\[\],.]`, Punctuation, nil}, + {Words(``, `\b`, `asm`, `auto`, `break`, `case`, `const`, `continue`, `default`, `do`, `else`, `enum`, `extern`, `for`, `goto`, `if`, `register`, `restricted`, `return`, `sizeof`, `static`, `struct`, `switch`, `typedef`, `union`, `volatile`, `while`), Keyword, nil}, + {`(bool|int|long|float|short|double|char((8|16|32)_t)?|unsigned|signed|void|u?int(_fast|_least|)(8|16|32|64)_t)\b`, KeywordType, nil}, + {Words(``, `\b`, `inline`, `_inline`, `__inline`, `naked`, `restrict`, `thread`, `typename`), KeywordReserved, nil}, + {`(__m(128i|128d|128|64))\b`, KeywordReserved, nil}, + {Words(`__`, `\b`, `asm`, `int8`, `based`, `except`, `int16`, `stdcall`, `cdecl`, `fastcall`, `int32`, `declspec`, `finally`, `int64`, `try`, `leave`, `wchar_t`, `w64`, `unaligned`, `raise`, `noop`, `identifier`, `forceinline`, `assume`), KeywordReserved, nil}, + {`(true|false|NULL)\b`, NameBuiltin, nil}, + {`([a-zA-Z_]\w*)(\s*)(:)(?!:)`, ByGroups(NameLabel, Text, Punctuation), nil}, + {`[a-zA-Z_]\w*`, Name, nil}, + }, + "root": { + Include("whitespace"), + {`((?:[\w*\s])+?(?:\s|[*]))([a-zA-Z_]\w*)(\s*\([^;]*?\))([^;{]*)(\{)`, ByGroups(UsingSelf("root"), NameFunction, UsingSelf("root"), UsingSelf("root"), Punctuation), Push("function")}, + {`((?:[\w*\s])+?(?:\s|[*]))([a-zA-Z_]\w*)(\s*\([^;]*?\))([^;]*)(;)`, ByGroups(UsingSelf("root"), NameFunction, UsingSelf("root"), UsingSelf("root"), Punctuation), nil}, + Default(Push("statement")), + }, + "statement": { + Include("whitespace"), + Include("statements"), + {`[{}]`, Punctuation, nil}, + {`;`, Punctuation, Pop(1)}, + }, + "function": { + Include("whitespace"), + Include("statements"), + {`;`, Punctuation, nil}, + {`\{`, Punctuation, Push()}, + {`\}`, Punctuation, Pop(1)}, + }, + "string": { + {`"`, LiteralString, Pop(1)}, + {`\\([\\abfnrtv"\']|x[a-fA-F0-9]{2,4}|u[a-fA-F0-9]{4}|U[a-fA-F0-9]{8}|[0-7]{1,3})`, LiteralStringEscape, nil}, + {`[^\\"\n]+`, LiteralString, nil}, + {`\\\n`, LiteralString, nil}, + {`\\`, LiteralString, nil}, + }, + "macro": { + {`(include)(\s*(?:/[*].*?[*]/\s*)?)([^\n]+)`, ByGroups(CommentPreproc, Text, CommentPreprocFile), nil}, + {`[^/\n]+`, CommentPreproc, nil}, + {`/[*](.|\n)*?[*]/`, CommentMultiline, nil}, + {`//.*?\n`, CommentSingle, Pop(1)}, + {`/`, CommentPreproc, nil}, + {`(?<=\\)\n`, CommentPreproc, nil}, + {`\n`, CommentPreproc, Pop(1)}, + }, + "if0": { + {`^\s*#if.*?(?\]`, NameDecorator, Push("matcher")}, + // These cannot have matchers but may have things that look like + // matchers in their arguments, so we just parse as a subdirective. + {`try_files`, Keyword, Push("subdirective")}, + // These are special, they can nest more directives + {`handle_errors|handle|route|handle_path|not`, Keyword, Push("nested_directive")}, + // Any other directive + {`[^\s#]+`, Keyword, Push("directive")}, + Include("base"), + }, + "matcher": { + {`\{`, Punctuation, Push("block")}, + // Not can be one-liner + {`not`, Keyword, Push("deep_not_matcher")}, + // Any other same-line matcher + {`[^\s#]+`, Keyword, Push("arguments")}, + // Terminators + {`\n`, Text, Pop(1)}, + {`\}`, Punctuation, Pop(1)}, + Include("base"), + }, + "block": { + {`\}`, Punctuation, Pop(2)}, + // Not can be one-liner + {`not`, Keyword, Push("not_matcher")}, + // Any other subdirective + {`[^\s#]+`, Keyword, Push("subdirective")}, + Include("base"), + }, + "nested_block": { + {`\}`, Punctuation, Pop(2)}, + // Matcher definition + {`@[^\s]+(?=\s)`, NameDecorator, Push("matcher")}, + // Something that starts with literally < is probably a docs stub + {`\<[^#]+\>`, Keyword, Push("nested_directive")}, + // Any other directive + {`[^\s#]+`, Keyword, Push("nested_directive")}, + Include("base"), + }, + "not_matcher": { + {`\}`, Punctuation, Pop(2)}, + {`\{(?=\s)`, Punctuation, Push("block")}, + {`[^\s#]+`, Keyword, Push("arguments")}, + {`\s+`, Text, nil}, + }, + "deep_not_matcher": { + {`\}`, Punctuation, Pop(2)}, + {`\{(?=\s)`, Punctuation, Push("block")}, + {`[^\s#]+`, Keyword, Push("deep_subdirective")}, + {`\s+`, Text, nil}, + }, + "directive": { + {`\{(?=\s)`, Punctuation, Push("block")}, + Include("matcher_token"), + Include("comments_pop_1"), + {`\n`, Text, Pop(1)}, + Include("base"), + }, + "nested_directive": { + {`\{(?=\s)`, Punctuation, Push("nested_block")}, + Include("matcher_token"), + Include("comments_pop_1"), + {`\n`, Text, Pop(1)}, + Include("base"), + }, + "subdirective": { + {`\{(?=\s)`, Punctuation, Push("block")}, + Include("comments_pop_1"), + {`\n`, Text, Pop(1)}, + Include("base"), + }, + "arguments": { + {`\{(?=\s)`, Punctuation, Push("block")}, + Include("comments_pop_2"), + {`\\\n`, Text, nil}, // Skip escaped newlines + {`\n`, Text, Pop(2)}, + Include("base"), + }, + "deep_subdirective": { + {`\{(?=\s)`, Punctuation, Push("block")}, + Include("comments_pop_3"), + {`\n`, Text, Pop(3)}, + Include("base"), + }, + "matcher_token": { + {`@[^\s]+`, NameDecorator, Push("arguments")}, // Named matcher + {`/[^\s]+`, NameDecorator, Push("arguments")}, // Path matcher + {`\*`, NameDecorator, Push("arguments")}, // Wildcard path matcher + {`\[\\]`, NameDecorator, Push("arguments")}, // Matcher token stub for docs + }, + "comments": { + {`^#.*\n`, CommentSingle, nil}, // Comment at start of line + {`\s+#.*\n`, CommentSingle, nil}, // Comment preceded by whitespace + }, + "comments_pop_1": { + {`^#.*\n`, CommentSingle, Pop(1)}, // Comment at start of line + {`\s+#.*\n`, CommentSingle, Pop(1)}, // Comment preceded by whitespace + }, + "comments_pop_2": { + {`^#.*\n`, CommentSingle, Pop(2)}, // Comment at start of line + {`\s+#.*\n`, CommentSingle, Pop(2)}, // Comment preceded by whitespace + }, + "comments_pop_3": { + {`^#.*\n`, CommentSingle, Pop(3)}, // Comment at start of line + {`\s+#.*\n`, CommentSingle, Pop(3)}, // Comment preceded by whitespace + }, + "base": { + Include("comments"), + {`(on|off|first|last|before|after|internal|strip_prefix|strip_suffix|replace)\b`, NameConstant, nil}, + {`(https?://)?([a-z0-9.-]+)(:)([0-9]+)`, ByGroups(Name, Name, Punctuation, LiteralNumberInteger), nil}, + {`[a-z-]+/[a-z-+]+`, LiteralString, nil}, + {`[0-9]+[km]?\b`, LiteralNumberInteger, nil}, + {`\{[\w+.\$-]+\}`, LiteralStringEscape, nil}, // Placeholder + {`\[(?=[^#{}$]+\])`, Punctuation, nil}, + {`\]|\|`, Punctuation, nil}, + {`[^\s#{}$\]]+`, LiteralString, nil}, + {`/[^\s#]*`, Name, nil}, + {`\s+`, Text, nil}, + }, + } +} + +// Caddyfile lexer. +var Caddyfile = internal.Register(MustNewLazyLexer( + &Config{ + Name: "Caddyfile", + Aliases: []string{"caddyfile", "caddy"}, + Filenames: []string{"Caddyfile*"}, + MimeTypes: []string{}, + }, + caddyfileRules, +)) + +func caddyfileRules() Rules { + return Rules{ + "root": { + Include("comments"), + // Global options block + {`^\s*(\{)\s*$`, ByGroups(Punctuation), Push("globals")}, + // Snippets + {`(\([^\s#]+\))(\s*)(\{)`, ByGroups(NameVariableAnonymous, Text, Punctuation), Push("snippet")}, + // Site label + {`[^#{(\s,]+`, GenericHeading, Push("label")}, + // Site label with placeholder + {`\{[\w+.\$-]+\}`, LiteralStringEscape, Push("label")}, + {`\s+`, Text, nil}, + }, + "globals": { + {`\}`, Punctuation, Pop(1)}, + {`[^\s#]+`, Keyword, Push("directive")}, + Include("base"), + }, + "snippet": { + {`\}`, Punctuation, Pop(1)}, + // Matcher definition + {`@[^\s]+(?=\s)`, NameDecorator, Push("matcher")}, + // Any directive + {`[^\s#]+`, Keyword, Push("directive")}, + Include("base"), + }, + "label": { + // Allow multiple labels, comma separated, newlines after + // a comma means another label is coming + {`,\s*\n?`, Text, nil}, + {` `, Text, nil}, + // Site label with placeholder + {`\{[\w+.\$-]+\}`, LiteralStringEscape, nil}, + // Site label + {`[^#{(\s,]+`, GenericHeading, nil}, + // Comment after non-block label (hack because comments end in \n) + {`#.*\n`, CommentSingle, Push("site_block")}, + // Note: if \n, we'll never pop out of the site_block, it's valid + {`\{(?=\s)|\n`, Punctuation, Push("site_block")}, + }, + "site_block": { + {`\}`, Punctuation, Pop(2)}, + Include("site_block_common"), + }, + }.Merge(caddyfileCommonRules()) +} + +// Caddyfile directive-only lexer. +var CaddyfileDirectives = internal.Register(MustNewLazyLexer( + &Config{ + Name: "Caddyfile Directives", + Aliases: []string{"caddyfile-directives", "caddyfile-d", "caddy-d"}, + Filenames: []string{}, + MimeTypes: []string{}, + }, + caddyfileDirectivesRules, +)) + +func caddyfileDirectivesRules() Rules { + return Rules{ + // Same as "site_block" in Caddyfile + "root": { + Include("site_block_common"), + }, + }.Merge(caddyfileCommonRules()) +} diff --git a/vendor/github.com/alecthomas/chroma/lexers/c/capnproto.go b/vendor/github.com/alecthomas/chroma/lexers/c/capnproto.go new file mode 100644 index 000000000..ec0d892c0 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/c/capnproto.go @@ -0,0 +1,65 @@ +package c + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// Cap'N'Proto Proto lexer. +var CapNProto = internal.Register(MustNewLazyLexer( + &Config{ + Name: "Cap'n Proto", + Aliases: []string{"capnp"}, + Filenames: []string{"*.capnp"}, + MimeTypes: []string{}, + }, + capNProtoRules, +)) + +func capNProtoRules() Rules { + return Rules{ + "root": { + {`#.*?$`, CommentSingle, nil}, + {`@[0-9a-zA-Z]*`, NameDecorator, nil}, + {`=`, Literal, Push("expression")}, + {`:`, NameClass, Push("type")}, + {`\$`, NameAttribute, Push("annotation")}, + {`(struct|enum|interface|union|import|using|const|annotation|extends|in|of|on|as|with|from|fixed)\b`, Keyword, nil}, + {`[\w.]+`, Name, nil}, + {`[^#@=:$\w]+`, Text, nil}, + }, + "type": { + {`[^][=;,(){}$]+`, NameClass, nil}, + {`[[(]`, NameClass, Push("parentype")}, + Default(Pop(1)), + }, + "parentype": { + {`[^][;()]+`, NameClass, nil}, + {`[[(]`, NameClass, Push()}, + {`[])]`, NameClass, Pop(1)}, + Default(Pop(1)), + }, + "expression": { + {`[^][;,(){}$]+`, Literal, nil}, + {`[[(]`, Literal, Push("parenexp")}, + Default(Pop(1)), + }, + "parenexp": { + {`[^][;()]+`, Literal, nil}, + {`[[(]`, Literal, Push()}, + {`[])]`, Literal, Pop(1)}, + Default(Pop(1)), + }, + "annotation": { + {`[^][;,(){}=:]+`, NameAttribute, nil}, + {`[[(]`, NameAttribute, Push("annexp")}, + Default(Pop(1)), + }, + "annexp": { + {`[^][;()]+`, NameAttribute, nil}, + {`[[(]`, NameAttribute, Push()}, + {`[])]`, NameAttribute, Pop(1)}, + Default(Pop(1)), + }, + } +} diff --git a/vendor/github.com/alecthomas/chroma/lexers/c/ceylon.go b/vendor/github.com/alecthomas/chroma/lexers/c/ceylon.go new file mode 100644 index 000000000..9d424d4b2 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/c/ceylon.go @@ -0,0 +1,67 @@ +package c + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// Ceylon lexer. +var Ceylon = internal.Register(MustNewLazyLexer( + &Config{ + Name: "Ceylon", + Aliases: []string{"ceylon"}, + Filenames: []string{"*.ceylon"}, + MimeTypes: []string{"text/x-ceylon"}, + DotAll: true, + }, + ceylonRules, +)) + +func ceylonRules() Rules { + return Rules{ + "root": { + {`^(\s*(?:[a-zA-Z_][\w.\[\]]*\s+)+?)([a-zA-Z_]\w*)(\s*)(\()`, ByGroups(UsingSelf("root"), NameFunction, Text, Operator), nil}, + {`[^\S\n]+`, Text, nil}, + {`//.*?\n`, CommentSingle, nil}, + {`/\*`, CommentMultiline, Push("comment")}, + {`(shared|abstract|formal|default|actual|variable|deprecated|small|late|literal|doc|by|see|throws|optional|license|tagged|final|native|annotation|sealed)\b`, NameDecorator, nil}, + {`(break|case|catch|continue|else|finally|for|in|if|return|switch|this|throw|try|while|is|exists|dynamic|nonempty|then|outer|assert|let)\b`, Keyword, nil}, + {`(abstracts|extends|satisfies|super|given|of|out|assign)\b`, KeywordDeclaration, nil}, + {`(function|value|void|new)\b`, KeywordType, nil}, + {`(assembly|module|package)(\s+)`, ByGroups(KeywordNamespace, Text), nil}, + {`(true|false|null)\b`, KeywordConstant, nil}, + {`(class|interface|object|alias)(\s+)`, ByGroups(KeywordDeclaration, Text), Push("class")}, + {`(import)(\s+)`, ByGroups(KeywordNamespace, Text), Push("import")}, + {`"(\\\\|\\"|[^"])*"`, LiteralString, nil}, + {`'\\.'|'[^\\]'|'\\\{#[0-9a-fA-F]{4}\}'`, LiteralStringChar, nil}, + {"\".*``.*``.*\"", LiteralStringInterpol, nil}, + {`(\.)([a-z_]\w*)`, ByGroups(Operator, NameAttribute), nil}, + {`[a-zA-Z_]\w*:`, NameLabel, nil}, + {`[a-zA-Z_]\w*`, Name, nil}, + {`[~^*!%&\[\](){}<>|+=:;,./?-]`, Operator, nil}, + {`\d{1,3}(_\d{3})+\.\d{1,3}(_\d{3})+[kMGTPmunpf]?`, LiteralNumberFloat, nil}, + {`\d{1,3}(_\d{3})+\.[0-9]+([eE][+-]?[0-9]+)?[kMGTPmunpf]?`, LiteralNumberFloat, nil}, + {`[0-9][0-9]*\.\d{1,3}(_\d{3})+[kMGTPmunpf]?`, LiteralNumberFloat, nil}, + {`[0-9][0-9]*\.[0-9]+([eE][+-]?[0-9]+)?[kMGTPmunpf]?`, LiteralNumberFloat, nil}, + {`#([0-9a-fA-F]{4})(_[0-9a-fA-F]{4})+`, LiteralNumberHex, nil}, + {`#[0-9a-fA-F]+`, LiteralNumberHex, nil}, + {`\$([01]{4})(_[01]{4})+`, LiteralNumberBin, nil}, + {`\$[01]+`, LiteralNumberBin, nil}, + {`\d{1,3}(_\d{3})+[kMGTP]?`, LiteralNumberInteger, nil}, + {`[0-9]+[kMGTP]?`, LiteralNumberInteger, nil}, + {`\n`, Text, nil}, + }, + "class": { + {`[A-Za-z_]\w*`, NameClass, Pop(1)}, + }, + "import": { + {`[a-z][\w.]*`, NameNamespace, Pop(1)}, + }, + "comment": { + {`[^*/]`, CommentMultiline, nil}, + {`/\*`, CommentMultiline, Push()}, + {`\*/`, CommentMultiline, Pop(1)}, + {`[*/]`, CommentMultiline, nil}, + }, + } +} diff --git a/vendor/github.com/alecthomas/chroma/lexers/c/cfengine3.go b/vendor/github.com/alecthomas/chroma/lexers/c/cfengine3.go new file mode 100644 index 000000000..f3050346f --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/c/cfengine3.go @@ -0,0 +1,60 @@ +package c + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// Cfengine3 lexer. +var Cfengine3 = internal.Register(MustNewLazyLexer( + &Config{ + Name: "CFEngine3", + Aliases: []string{"cfengine3", "cf3"}, + Filenames: []string{"*.cf"}, + MimeTypes: []string{}, + }, + cfengine3Rules, +)) + +func cfengine3Rules() Rules { + return Rules{ + "root": { + {`#.*?\n`, Comment, nil}, + {`(body)(\s+)(\S+)(\s+)(control)`, ByGroups(Keyword, Text, Keyword, Text, Keyword), nil}, + {`(body|bundle)(\s+)(\S+)(\s+)(\w+)(\()`, ByGroups(Keyword, Text, Keyword, Text, NameFunction, Punctuation), Push("arglist")}, + {`(body|bundle)(\s+)(\S+)(\s+)(\w+)`, ByGroups(Keyword, Text, Keyword, Text, NameFunction), nil}, + {`(")([^"]+)(")(\s+)(string|slist|int|real)(\s*)(=>)(\s*)`, ByGroups(Punctuation, NameVariable, Punctuation, Text, KeywordType, Text, Operator, Text), nil}, + {`(\S+)(\s*)(=>)(\s*)`, ByGroups(KeywordReserved, Text, Operator, Text), nil}, + {`"`, LiteralString, Push("string")}, + {`(\w+)(\()`, ByGroups(NameFunction, Punctuation), nil}, + {`([\w.!&|()]+)(::)`, ByGroups(NameClass, Punctuation), nil}, + {`(\w+)(:)`, ByGroups(KeywordDeclaration, Punctuation), nil}, + {`@[{(][^)}]+[})]`, NameVariable, nil}, + {`[(){},;]`, Punctuation, nil}, + {`=>`, Operator, nil}, + {`->`, Operator, nil}, + {`\d+\.\d+`, LiteralNumberFloat, nil}, + {`\d+`, LiteralNumberInteger, nil}, + {`\w+`, NameFunction, nil}, + {`\s+`, Text, nil}, + }, + "string": { + {`\$[{(]`, LiteralStringInterpol, Push("interpol")}, + {`\\.`, LiteralStringEscape, nil}, + {`"`, LiteralString, Pop(1)}, + {`\n`, LiteralString, nil}, + {`.`, LiteralString, nil}, + }, + "interpol": { + {`\$[{(]`, LiteralStringInterpol, Push()}, + {`[})]`, LiteralStringInterpol, Pop(1)}, + {`[^${()}]+`, LiteralStringInterpol, nil}, + }, + "arglist": { + {`\)`, Punctuation, Pop(1)}, + {`,`, Punctuation, nil}, + {`\w+`, NameVariable, nil}, + {`\s+`, Text, nil}, + }, + } +} diff --git a/vendor/github.com/alecthomas/chroma/lexers/c/chaiscript.go b/vendor/github.com/alecthomas/chroma/lexers/c/chaiscript.go new file mode 100644 index 000000000..58db9aac9 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/c/chaiscript.go @@ -0,0 +1,67 @@ +package c + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// Chaiscript lexer. +var Chaiscript = internal.Register(MustNewLazyLexer( + &Config{ + Name: "ChaiScript", + Aliases: []string{"chai", "chaiscript"}, + Filenames: []string{"*.chai"}, + MimeTypes: []string{"text/x-chaiscript", "application/x-chaiscript"}, + DotAll: true, + }, + chaiscriptRules, +)) + +func chaiscriptRules() Rules { + return Rules{ + "commentsandwhitespace": { + {`\s+`, Text, nil}, + {`//.*?\n`, CommentSingle, nil}, + {`/\*.*?\*/`, CommentMultiline, nil}, + {`^\#.*?\n`, CommentSingle, nil}, + }, + "slashstartsregex": { + Include("commentsandwhitespace"), + {`/(\\.|[^[/\\\n]|\[(\\.|[^\]\\\n])*])+/([gim]+\b|\B)`, LiteralStringRegex, Pop(1)}, + {`(?=/)`, Text, Push("#pop", "badregex")}, + Default(Pop(1)), + }, + "badregex": { + {`\n`, Text, Pop(1)}, + }, + "root": { + Include("commentsandwhitespace"), + {`\n`, Text, nil}, + {`[^\S\n]+`, Text, nil}, + {`\+\+|--|~|&&|\?|:|\|\||\\(?=\n)|\.\.(<<|>>>?|==?|!=?|[-<>+*%&|^/])=?`, Operator, Push("slashstartsregex")}, + {`[{(\[;,]`, Punctuation, Push("slashstartsregex")}, + {`[})\].]`, Punctuation, nil}, + {`[=+\-*/]`, Operator, nil}, + {`(for|in|while|do|break|return|continue|if|else|throw|try|catch)\b`, Keyword, Push("slashstartsregex")}, + {`(var)\b`, KeywordDeclaration, Push("slashstartsregex")}, + {`(attr|def|fun)\b`, KeywordReserved, nil}, + {`(true|false)\b`, KeywordConstant, nil}, + {`(eval|throw)\b`, NameBuiltin, nil}, + {"`\\S+`", NameBuiltin, nil}, + {`[$a-zA-Z_]\w*`, NameOther, nil}, + {`[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?`, LiteralNumberFloat, nil}, + {`0x[0-9a-fA-F]+`, LiteralNumberHex, nil}, + {`[0-9]+`, LiteralNumberInteger, nil}, + {`"`, LiteralStringDouble, Push("dqstring")}, + {`'(\\\\|\\'|[^'])*'`, LiteralStringSingle, nil}, + }, + "dqstring": { + {`\$\{[^"}]+?\}`, LiteralStringInterpol, nil}, + {`\$`, LiteralStringDouble, nil}, + {`\\\\`, LiteralStringDouble, nil}, + {`\\"`, LiteralStringDouble, nil}, + {`[^\\"$]+`, LiteralStringDouble, nil}, + {`"`, LiteralStringDouble, Pop(1)}, + }, + } +} diff --git a/vendor/github.com/alecthomas/chroma/lexers/c/cheetah.go b/vendor/github.com/alecthomas/chroma/lexers/c/cheetah.go new file mode 100644 index 000000000..bd5fb9d9c --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/c/cheetah.go @@ -0,0 +1,41 @@ +package c + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" + . "github.com/alecthomas/chroma/lexers/p" // nolint +) + +// Cheetah lexer. +var Cheetah = internal.Register(MustNewLazyLexer( + &Config{ + Name: "Cheetah", + Aliases: []string{"cheetah", "spitfire"}, + Filenames: []string{"*.tmpl", "*.spt"}, + MimeTypes: []string{"application/x-cheetah", "application/x-spitfire"}, + }, + cheetahRules, +)) + +func cheetahRules() Rules { + return Rules{ + "root": { + {`(##[^\n]*)$`, ByGroups(Comment), nil}, + {`#[*](.|\n)*?[*]#`, Comment, nil}, + {`#end[^#\n]*(?:#|$)`, CommentPreproc, nil}, + {`#slurp$`, CommentPreproc, nil}, + {`(#[a-zA-Z]+)([^#\n]*)(#|$)`, ByGroups(CommentPreproc, Using(Python), CommentPreproc), nil}, + {`(\$)([a-zA-Z_][\w.]*\w)`, ByGroups(CommentPreproc, Using(Python)), nil}, + {`(\$\{!?)(.*?)(\})(?s)`, ByGroups(CommentPreproc, Using(Python), CommentPreproc), nil}, + {`(?sx) + (.+?) # anything, followed by: + (?: + (?=\#[#a-zA-Z]*) | # an eval comment + (?=\$[a-zA-Z_{]) | # a substitution + \Z # end of string + ) + `, Other, nil}, + {`\s+`, Text, nil}, + }, + } +} diff --git a/vendor/github.com/alecthomas/chroma/lexers/c/cl.go b/vendor/github.com/alecthomas/chroma/lexers/c/cl.go new file mode 100644 index 000000000..bb162734c --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/c/cl.go @@ -0,0 +1,310 @@ +package c + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +var ( + clBuiltinFunctions = []string{ + "<", "<=", "=", ">", ">=", "-", "/", "/=", "*", "+", "1-", "1+", + "abort", "abs", "acons", "acos", "acosh", "add-method", "adjoin", + "adjustable-array-p", "adjust-array", "allocate-instance", + "alpha-char-p", "alphanumericp", "append", "apply", "apropos", + "apropos-list", "aref", "arithmetic-error-operands", + "arithmetic-error-operation", "array-dimension", "array-dimensions", + "array-displacement", "array-element-type", "array-has-fill-pointer-p", + "array-in-bounds-p", "arrayp", "array-rank", "array-row-major-index", + "array-total-size", "ash", "asin", "asinh", "assoc", "assoc-if", + "assoc-if-not", "atan", "atanh", "atom", "bit", "bit-and", "bit-andc1", + "bit-andc2", "bit-eqv", "bit-ior", "bit-nand", "bit-nor", "bit-not", + "bit-orc1", "bit-orc2", "bit-vector-p", "bit-xor", "boole", + "both-case-p", "boundp", "break", "broadcast-stream-streams", + "butlast", "byte", "byte-position", "byte-size", "caaaar", "caaadr", + "caaar", "caadar", "caaddr", "caadr", "caar", "cadaar", "cadadr", + "cadar", "caddar", "cadddr", "caddr", "cadr", "call-next-method", "car", + "cdaaar", "cdaadr", "cdaar", "cdadar", "cdaddr", "cdadr", "cdar", + "cddaar", "cddadr", "cddar", "cdddar", "cddddr", "cdddr", "cddr", "cdr", + "ceiling", "cell-error-name", "cerror", "change-class", "char", "char<", + "char<=", "char=", "char>", "char>=", "char/=", "character", + "characterp", "char-code", "char-downcase", "char-equal", + "char-greaterp", "char-int", "char-lessp", "char-name", + "char-not-equal", "char-not-greaterp", "char-not-lessp", "char-upcase", + "cis", "class-name", "class-of", "clear-input", "clear-output", + "close", "clrhash", "code-char", "coerce", "compile", + "compiled-function-p", "compile-file", "compile-file-pathname", + "compiler-macro-function", "complement", "complex", "complexp", + "compute-applicable-methods", "compute-restarts", "concatenate", + "concatenated-stream-streams", "conjugate", "cons", "consp", + "constantly", "constantp", "continue", "copy-alist", "copy-list", + "copy-pprint-dispatch", "copy-readtable", "copy-seq", "copy-structure", + "copy-symbol", "copy-tree", "cos", "cosh", "count", "count-if", + "count-if-not", "decode-float", "decode-universal-time", "delete", + "delete-duplicates", "delete-file", "delete-if", "delete-if-not", + "delete-package", "denominator", "deposit-field", "describe", + "describe-object", "digit-char", "digit-char-p", "directory", + "directory-namestring", "disassemble", "documentation", "dpb", + "dribble", "echo-stream-input-stream", "echo-stream-output-stream", + "ed", "eighth", "elt", "encode-universal-time", "endp", + "enough-namestring", "ensure-directories-exist", + "ensure-generic-function", "eq", "eql", "equal", "equalp", "error", + "eval", "evenp", "every", "exp", "export", "expt", "fboundp", + "fceiling", "fdefinition", "ffloor", "fifth", "file-author", + "file-error-pathname", "file-length", "file-namestring", + "file-position", "file-string-length", "file-write-date", + "fill", "fill-pointer", "find", "find-all-symbols", "find-class", + "find-if", "find-if-not", "find-method", "find-package", "find-restart", + "find-symbol", "finish-output", "first", "float", "float-digits", + "floatp", "float-precision", "float-radix", "float-sign", "floor", + "fmakunbound", "force-output", "format", "fourth", "fresh-line", + "fround", "ftruncate", "funcall", "function-keywords", + "function-lambda-expression", "functionp", "gcd", "gensym", "gentemp", + "get", "get-decoded-time", "get-dispatch-macro-character", "getf", + "gethash", "get-internal-real-time", "get-internal-run-time", + "get-macro-character", "get-output-stream-string", "get-properties", + "get-setf-expansion", "get-universal-time", "graphic-char-p", + "hash-table-count", "hash-table-p", "hash-table-rehash-size", + "hash-table-rehash-threshold", "hash-table-size", "hash-table-test", + "host-namestring", "identity", "imagpart", "import", + "initialize-instance", "input-stream-p", "inspect", + "integer-decode-float", "integer-length", "integerp", + "interactive-stream-p", "intern", "intersection", + "invalid-method-error", "invoke-debugger", "invoke-restart", + "invoke-restart-interactively", "isqrt", "keywordp", "last", "lcm", + "ldb", "ldb-test", "ldiff", "length", "lisp-implementation-type", + "lisp-implementation-version", "list", "list*", "list-all-packages", + "listen", "list-length", "listp", "load", + "load-logical-pathname-translations", "log", "logand", "logandc1", + "logandc2", "logbitp", "logcount", "logeqv", "logical-pathname", + "logical-pathname-translations", "logior", "lognand", "lognor", + "lognot", "logorc1", "logorc2", "logtest", "logxor", "long-site-name", + "lower-case-p", "machine-instance", "machine-type", "machine-version", + "macroexpand", "macroexpand-1", "macro-function", "make-array", + "make-broadcast-stream", "make-concatenated-stream", "make-condition", + "make-dispatch-macro-character", "make-echo-stream", "make-hash-table", + "make-instance", "make-instances-obsolete", "make-list", + "make-load-form", "make-load-form-saving-slots", "make-package", + "make-pathname", "make-random-state", "make-sequence", "make-string", + "make-string-input-stream", "make-string-output-stream", "make-symbol", + "make-synonym-stream", "make-two-way-stream", "makunbound", "map", + "mapc", "mapcan", "mapcar", "mapcon", "maphash", "map-into", "mapl", + "maplist", "mask-field", "max", "member", "member-if", "member-if-not", + "merge", "merge-pathnames", "method-combination-error", + "method-qualifiers", "min", "minusp", "mismatch", "mod", + "muffle-warning", "name-char", "namestring", "nbutlast", "nconc", + "next-method-p", "nintersection", "ninth", "no-applicable-method", + "no-next-method", "not", "notany", "notevery", "nreconc", "nreverse", + "nset-difference", "nset-exclusive-or", "nstring-capitalize", + "nstring-downcase", "nstring-upcase", "nsublis", "nsubst", "nsubst-if", + "nsubst-if-not", "nsubstitute", "nsubstitute-if", "nsubstitute-if-not", + "nth", "nthcdr", "null", "numberp", "numerator", "nunion", "oddp", + "open", "open-stream-p", "output-stream-p", "package-error-package", + "package-name", "package-nicknames", "packagep", + "package-shadowing-symbols", "package-used-by-list", "package-use-list", + "pairlis", "parse-integer", "parse-namestring", "pathname", + "pathname-device", "pathname-directory", "pathname-host", + "pathname-match-p", "pathname-name", "pathnamep", "pathname-type", + "pathname-version", "peek-char", "phase", "plusp", "position", + "position-if", "position-if-not", "pprint", "pprint-dispatch", + "pprint-fill", "pprint-indent", "pprint-linear", "pprint-newline", + "pprint-tab", "pprint-tabular", "prin1", "prin1-to-string", "princ", + "princ-to-string", "print", "print-object", "probe-file", "proclaim", + "provide", "random", "random-state-p", "rassoc", "rassoc-if", + "rassoc-if-not", "rational", "rationalize", "rationalp", "read", + "read-byte", "read-char", "read-char-no-hang", "read-delimited-list", + "read-from-string", "read-line", "read-preserving-whitespace", + "read-sequence", "readtable-case", "readtablep", "realp", "realpart", + "reduce", "reinitialize-instance", "rem", "remhash", "remove", + "remove-duplicates", "remove-if", "remove-if-not", "remove-method", + "remprop", "rename-file", "rename-package", "replace", "require", + "rest", "restart-name", "revappend", "reverse", "room", "round", + "row-major-aref", "rplaca", "rplacd", "sbit", "scale-float", "schar", + "search", "second", "set", "set-difference", + "set-dispatch-macro-character", "set-exclusive-or", + "set-macro-character", "set-pprint-dispatch", "set-syntax-from-char", + "seventh", "shadow", "shadowing-import", "shared-initialize", + "short-site-name", "signal", "signum", "simple-bit-vector-p", + "simple-condition-format-arguments", "simple-condition-format-control", + "simple-string-p", "simple-vector-p", "sin", "sinh", "sixth", "sleep", + "slot-boundp", "slot-exists-p", "slot-makunbound", "slot-missing", + "slot-unbound", "slot-value", "software-type", "software-version", + "some", "sort", "special-operator-p", "sqrt", "stable-sort", + "standard-char-p", "store-value", "stream-element-type", + "stream-error-stream", "stream-external-format", "streamp", "string", + "string<", "string<=", "string=", "string>", "string>=", "string/=", + "string-capitalize", "string-downcase", "string-equal", + "string-greaterp", "string-left-trim", "string-lessp", + "string-not-equal", "string-not-greaterp", "string-not-lessp", + "stringp", "string-right-trim", "string-trim", "string-upcase", + "sublis", "subseq", "subsetp", "subst", "subst-if", "subst-if-not", + "substitute", "substitute-if", "substitute-if-not", "subtypep", "svref", + "sxhash", "symbol-function", "symbol-name", "symbolp", "symbol-package", + "symbol-plist", "symbol-value", "synonym-stream-symbol", "syntax:", + "tailp", "tan", "tanh", "tenth", "terpri", "third", + "translate-logical-pathname", "translate-pathname", "tree-equal", + "truename", "truncate", "two-way-stream-input-stream", + "two-way-stream-output-stream", "type-error-datum", + "type-error-expected-type", "type-of", "typep", "unbound-slot-instance", + "unexport", "unintern", "union", "unread-char", "unuse-package", + "update-instance-for-different-class", + "update-instance-for-redefined-class", "upgraded-array-element-type", + "upgraded-complex-part-type", "upper-case-p", "use-package", + "user-homedir-pathname", "use-value", "values", "values-list", "vector", + "vectorp", "vector-pop", "vector-push", "vector-push-extend", "warn", + "wild-pathname-p", "write", "write-byte", "write-char", "write-line", + "write-sequence", "write-string", "write-to-string", "yes-or-no-p", + "y-or-n-p", "zerop", + } + + clSpecialForms = []string{ + "block", "catch", "declare", "eval-when", "flet", "function", "go", "if", + "labels", "lambda", "let", "let*", "load-time-value", "locally", "macrolet", + "multiple-value-call", "multiple-value-prog1", "progn", "progv", "quote", + "return-from", "setq", "symbol-macrolet", "tagbody", "the", "throw", + "unwind-protect", + } + + clMacros = []string{ + "and", "assert", "call-method", "case", "ccase", "check-type", "cond", + "ctypecase", "decf", "declaim", "defclass", "defconstant", "defgeneric", + "define-compiler-macro", "define-condition", "define-method-combination", + "define-modify-macro", "define-setf-expander", "define-symbol-macro", + "defmacro", "defmethod", "defpackage", "defparameter", "defsetf", + "defstruct", "deftype", "defun", "defvar", "destructuring-bind", "do", + "do*", "do-all-symbols", "do-external-symbols", "dolist", "do-symbols", + "dotimes", "ecase", "etypecase", "formatter", "handler-bind", + "handler-case", "ignore-errors", "incf", "in-package", "lambda", "loop", + "loop-finish", "make-method", "multiple-value-bind", "multiple-value-list", + "multiple-value-setq", "nth-value", "or", "pop", + "pprint-exit-if-list-exhausted", "pprint-logical-block", "pprint-pop", + "print-unreadable-object", "prog", "prog*", "prog1", "prog2", "psetf", + "psetq", "push", "pushnew", "remf", "restart-bind", "restart-case", + "return", "rotatef", "setf", "shiftf", "step", "time", "trace", "typecase", + "unless", "untrace", "when", "with-accessors", "with-compilation-unit", + "with-condition-restarts", "with-hash-table-iterator", + "with-input-from-string", "with-open-file", "with-open-stream", + "with-output-to-string", "with-package-iterator", "with-simple-restart", + "with-slots", "with-standard-io-syntax", + } + + clLambdaListKeywords = []string{ + "&allow-other-keys", "&aux", "&body", "&environment", "&key", "&optional", + "&rest", "&whole", + } + + clDeclarations = []string{ + "dynamic-extent", "ignore", "optimize", "ftype", "inline", "special", + "ignorable", "notinline", "type", + } + + clBuiltinTypes = []string{ + "atom", "boolean", "base-char", "base-string", "bignum", "bit", + "compiled-function", "extended-char", "fixnum", "keyword", "nil", + "signed-byte", "short-float", "single-float", "double-float", "long-float", + "simple-array", "simple-base-string", "simple-bit-vector", "simple-string", + "simple-vector", "standard-char", "unsigned-byte", + + // Condition Types + "arithmetic-error", "cell-error", "condition", "control-error", + "division-by-zero", "end-of-file", "error", "file-error", + "floating-point-inexact", "floating-point-overflow", + "floating-point-underflow", "floating-point-invalid-operation", + "parse-error", "package-error", "print-not-readable", "program-error", + "reader-error", "serious-condition", "simple-condition", "simple-error", + "simple-type-error", "simple-warning", "stream-error", "storage-condition", + "style-warning", "type-error", "unbound-variable", "unbound-slot", + "undefined-function", "warning", + } + + clBuiltinClasses = []string{ + "array", "broadcast-stream", "bit-vector", "built-in-class", "character", + "class", "complex", "concatenated-stream", "cons", "echo-stream", + "file-stream", "float", "function", "generic-function", "hash-table", + "integer", "list", "logical-pathname", "method-combination", "method", + "null", "number", "package", "pathname", "ratio", "rational", "readtable", + "real", "random-state", "restart", "sequence", "standard-class", + "standard-generic-function", "standard-method", "standard-object", + "string-stream", "stream", "string", "structure-class", "structure-object", + "symbol", "synonym-stream", "t", "two-way-stream", "vector", + } +) + +// Common Lisp lexer. +var CommonLisp = internal.Register(TypeRemappingLexer(MustNewLazyLexer( + &Config{ + Name: "Common Lisp", + Aliases: []string{"common-lisp", "cl", "lisp"}, + Filenames: []string{"*.cl", "*.lisp"}, + MimeTypes: []string{"text/x-common-lisp"}, + CaseInsensitive: true, + }, + commonLispRules, +), TypeMapping{ + {NameVariable, NameFunction, clBuiltinFunctions}, + {NameVariable, Keyword, clSpecialForms}, + {NameVariable, NameBuiltin, clMacros}, + {NameVariable, Keyword, clLambdaListKeywords}, + {NameVariable, Keyword, clDeclarations}, + {NameVariable, KeywordType, clBuiltinTypes}, + {NameVariable, NameClass, clBuiltinClasses}, +})) + +func commonLispRules() Rules { + return Rules{ + "root": { + Default(Push("body")), + }, + "multiline-comment": { + {`#\|`, CommentMultiline, Push()}, + {`\|#`, CommentMultiline, Pop(1)}, + {`[^|#]+`, CommentMultiline, nil}, + {`[|#]`, CommentMultiline, nil}, + }, + "commented-form": { + {`\(`, CommentPreproc, Push()}, + {`\)`, CommentPreproc, Pop(1)}, + {`[^()]+`, CommentPreproc, nil}, + }, + "body": { + {`\s+`, Text, nil}, + {`;.*$`, CommentSingle, nil}, + {`#\|`, CommentMultiline, Push("multiline-comment")}, + {`#\d*Y.*$`, CommentSpecial, nil}, + {`"(\\.|\\\n|[^"\\])*"`, LiteralString, nil}, + {`:(\|[^|]+\||(?:\\.|[\w!$%&*+-/<=>?@\[\]^{}~])(?:\\.|[\w!$%&*+-/<=>?@\[\]^{}~]|[#.:])*)`, LiteralStringSymbol, nil}, + {`::(\|[^|]+\||(?:\\.|[\w!$%&*+-/<=>?@\[\]^{}~])(?:\\.|[\w!$%&*+-/<=>?@\[\]^{}~]|[#.:])*)`, LiteralStringSymbol, nil}, + {`:#(\|[^|]+\||(?:\\.|[\w!$%&*+-/<=>?@\[\]^{}~])(?:\\.|[\w!$%&*+-/<=>?@\[\]^{}~]|[#.:])*)`, LiteralStringSymbol, nil}, + {`'(\|[^|]+\||(?:\\.|[\w!$%&*+-/<=>?@\[\]^{}~])(?:\\.|[\w!$%&*+-/<=>?@\[\]^{}~]|[#.:])*)`, LiteralStringSymbol, nil}, + {`'`, Operator, nil}, + {"`", Operator, nil}, + {"[-+]?\\d+\\.?(?=[ \"()\\'\\n,;`])", LiteralNumberInteger, nil}, + {"[-+]?\\d+/\\d+(?=[ \"()\\'\\n,;`])", LiteralNumber, nil}, + {"[-+]?(\\d*\\.\\d+([defls][-+]?\\d+)?|\\d+(\\.\\d*)?[defls][-+]?\\d+)(?=[ \"()\\'\\n,;`])", LiteralNumberFloat, nil}, + {"#\\\\.(?=[ \"()\\'\\n,;`])", LiteralStringChar, nil}, + {`#\\(\|[^|]+\||(?:\\.|[\w!$%&*+-/<=>?@\[\]^{}~])(?:\\.|[\w!$%&*+-/<=>?@\[\]^{}~]|[#.:])*)`, LiteralStringChar, nil}, + {`#\(`, Operator, Push("body")}, + {`#\d*\*[01]*`, LiteralOther, nil}, + {`#:(\|[^|]+\||(?:\\.|[\w!$%&*+-/<=>?@\[\]^{}~])(?:\\.|[\w!$%&*+-/<=>?@\[\]^{}~]|[#.:])*)`, LiteralStringSymbol, nil}, + {`#[.,]`, Operator, nil}, + {`#\'`, NameFunction, nil}, + {`#b[+-]?[01]+(/[01]+)?`, LiteralNumberBin, nil}, + {`#o[+-]?[0-7]+(/[0-7]+)?`, LiteralNumberOct, nil}, + {`#x[+-]?[0-9a-f]+(/[0-9a-f]+)?`, LiteralNumberHex, nil}, + {`#\d+r[+-]?[0-9a-z]+(/[0-9a-z]+)?`, LiteralNumber, nil}, + {`(#c)(\()`, ByGroups(LiteralNumber, Punctuation), Push("body")}, + {`(#\d+a)(\()`, ByGroups(LiteralOther, Punctuation), Push("body")}, + {`(#s)(\()`, ByGroups(LiteralOther, Punctuation), Push("body")}, + {`#p?"(\\.|[^"])*"`, LiteralOther, nil}, + {`#\d+=`, Operator, nil}, + {`#\d+#`, Operator, nil}, + {"#+nil(?=[ \"()\\'\\n,;`])\\s*\\(", CommentPreproc, Push("commented-form")}, + {`#[+-]`, Operator, nil}, + {`(,@|,|\.)`, Operator, nil}, + {"(t|nil)(?=[ \"()\\'\\n,;`])", NameConstant, nil}, + {`\*(\|[^|]+\||(?:\\.|[\w!$%&*+-/<=>?@\[\]^{}~])(?:\\.|[\w!$%&*+-/<=>?@\[\]^{}~]|[#.:])*)\*`, NameVariableGlobal, nil}, + {`(\|[^|]+\||(?:\\.|[\w!$%&*+-/<=>?@\[\]^{}~])(?:\\.|[\w!$%&*+-/<=>?@\[\]^{}~]|[#.:])*)`, NameVariable, nil}, + {`\(`, Punctuation, Push("body")}, + {`\)`, Punctuation, Pop(1)}, + }, + } +} diff --git a/vendor/github.com/alecthomas/chroma/lexers/c/clojure.go b/vendor/github.com/alecthomas/chroma/lexers/c/clojure.go new file mode 100644 index 000000000..f99f906c2 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/c/clojure.go @@ -0,0 +1,42 @@ +package c + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// Clojure lexer. +var Clojure = internal.Register(MustNewLazyLexer( + &Config{ + Name: "Clojure", + Aliases: []string{"clojure", "clj"}, + Filenames: []string{"*.clj"}, + MimeTypes: []string{"text/x-clojure", "application/x-clojure"}, + }, + clojureRules, +)) + +func clojureRules() Rules { + return Rules{ + "root": { + {`;.*$`, CommentSingle, nil}, + {`[,\s]+`, Text, nil}, + {`-?\d+\.\d+`, LiteralNumberFloat, nil}, + {`-?\d+`, LiteralNumberInteger, nil}, + {`0x-?[abcdef\d]+`, LiteralNumberHex, nil}, + {`"(\\\\|\\"|[^"])*"`, LiteralString, nil}, + {`'(?!#)[\w!$%*+<=>?/.#-]+`, LiteralStringSymbol, nil}, + {`\\(.|[a-z]+)`, LiteralStringChar, nil}, + {`::?#?(?!#)[\w!$%*+<=>?/.#-]+`, LiteralStringSymbol, nil}, + {"~@|[`\\'#^~&@]", Operator, nil}, + {Words(``, ` `, `.`, `def`, `do`, `fn`, `if`, `let`, `new`, `quote`, `var`, `loop`), Keyword, nil}, + {Words(``, ` `, `def-`, `defn`, `defn-`, `defmacro`, `defmulti`, `defmethod`, `defstruct`, `defonce`, `declare`, `definline`, `definterface`, `defprotocol`, `defrecord`, `deftype`, `defproject`, `ns`), KeywordDeclaration, nil}, + {Words(``, ` `, `*`, `+`, `-`, `->`, `/`, `<`, `<=`, `=`, `==`, `>`, `>=`, `..`, `accessor`, `agent`, `agent-errors`, `aget`, `alength`, `all-ns`, `alter`, `and`, `append-child`, `apply`, `array-map`, `aset`, `aset-boolean`, `aset-byte`, `aset-char`, `aset-double`, `aset-float`, `aset-int`, `aset-long`, `aset-short`, `assert`, `assoc`, `await`, `await-for`, `bean`, `binding`, `bit-and`, `bit-not`, `bit-or`, `bit-shift-left`, `bit-shift-right`, `bit-xor`, `boolean`, `branch?`, `butlast`, `byte`, `cast`, `char`, `children`, `class`, `clear-agent-errors`, `comment`, `commute`, `comp`, `comparator`, `complement`, `concat`, `conj`, `cons`, `constantly`, `cond`, `if-not`, `construct-proxy`, `contains?`, `count`, `create-ns`, `create-struct`, `cycle`, `dec`, `deref`, `difference`, `disj`, `dissoc`, `distinct`, `doall`, `doc`, `dorun`, `doseq`, `dosync`, `dotimes`, `doto`, `double`, `down`, `drop`, `drop-while`, `edit`, `end?`, `ensure`, `eval`, `every?`, `false?`, `ffirst`, `file-seq`, `filter`, `find`, `find-doc`, `find-ns`, `find-var`, `first`, `float`, `flush`, `for`, `fnseq`, `frest`, `gensym`, `get-proxy-class`, `get`, `hash-map`, `hash-set`, `identical?`, `identity`, `if-let`, `import`, `in-ns`, `inc`, `index`, `insert-child`, `insert-left`, `insert-right`, `inspect-table`, `inspect-tree`, `instance?`, `int`, `interleave`, `intersection`, `into`, `into-array`, `iterate`, `join`, `key`, `keys`, `keyword`, `keyword?`, `last`, `lazy-cat`, `lazy-cons`, `left`, `lefts`, `line-seq`, `list*`, `list`, `load`, `load-file`, `locking`, `long`, `loop`, `macroexpand`, `macroexpand-1`, `make-array`, `make-node`, `map`, `map-invert`, `map?`, `mapcat`, `max`, `max-key`, `memfn`, `merge`, `merge-with`, `meta`, `min`, `min-key`, `name`, `namespace`, `neg?`, `new`, `newline`, `next`, `nil?`, `node`, `not`, `not-any?`, `not-every?`, `not=`, `ns-imports`, `ns-interns`, `ns-map`, `ns-name`, `ns-publics`, `ns-refers`, `ns-resolve`, `ns-unmap`, `nth`, `nthrest`, `or`, `parse`, `partial`, `path`, `peek`, `pop`, `pos?`, `pr`, `pr-str`, `print`, `print-str`, `println`, `println-str`, `prn`, `prn-str`, `project`, `proxy`, `proxy-mappings`, `quot`, `rand`, `rand-int`, `range`, `re-find`, `re-groups`, `re-matcher`, `re-matches`, `re-pattern`, `re-seq`, `read`, `read-line`, `reduce`, `ref`, `ref-set`, `refer`, `rem`, `remove`, `remove-method`, `remove-ns`, `rename`, `rename-keys`, `repeat`, `replace`, `replicate`, `resolve`, `rest`, `resultset-seq`, `reverse`, `rfirst`, `right`, `rights`, `root`, `rrest`, `rseq`, `second`, `select`, `select-keys`, `send`, `send-off`, `seq`, `seq-zip`, `seq?`, `set`, `short`, `slurp`, `some`, `sort`, `sort-by`, `sorted-map`, `sorted-map-by`, `sorted-set`, `special-symbol?`, `split-at`, `split-with`, `str`, `string?`, `struct`, `struct-map`, `subs`, `subvec`, `symbol`, `symbol?`, `sync`, `take`, `take-nth`, `take-while`, `test`, `time`, `to-array`, `to-array-2d`, `tree-seq`, `true?`, `union`, `up`, `update-proxy`, `val`, `vals`, `var-get`, `var-set`, `var?`, `vector`, `vector-zip`, `vector?`, `when`, `when-first`, `when-let`, `when-not`, `with-local-vars`, `with-meta`, `with-open`, `with-out-str`, `xml-seq`, `xml-zip`, `zero?`, `zipmap`, `zipper`), NameBuiltin, nil}, + {`(?<=\()(?!#)[\w!$%*+<=>?/.#-]+`, NameFunction, nil}, + {`(?!#)[\w!$%*+<=>?/.#-]+`, NameVariable, nil}, + {`(\[|\])`, Punctuation, nil}, + {`(\{|\})`, Punctuation, nil}, + {`(\(|\))`, Punctuation, nil}, + }, + } +} diff --git a/vendor/github.com/alecthomas/chroma/lexers/c/cmake.go b/vendor/github.com/alecthomas/chroma/lexers/c/cmake.go new file mode 100644 index 000000000..0e0708db4 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/c/cmake.go @@ -0,0 +1,48 @@ +package c + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// Cmake lexer. +var Cmake = internal.Register(MustNewLazyLexer( + &Config{ + Name: "CMake", + Aliases: []string{"cmake"}, + Filenames: []string{"*.cmake", "CMakeLists.txt"}, + MimeTypes: []string{"text/x-cmake"}, + }, + cmakeRules, +)) + +func cmakeRules() Rules { + return Rules{ + "root": { + {`\b(\w+)([ \t]*)(\()`, ByGroups(NameBuiltin, Text, Punctuation), Push("args")}, + Include("keywords"), + Include("ws"), + }, + "args": { + {`\(`, Punctuation, Push()}, + {`\)`, Punctuation, Pop(1)}, + {`(\$\{)(.+?)(\})`, ByGroups(Operator, NameVariable, Operator), nil}, + {`(\$ENV\{)(.+?)(\})`, ByGroups(Operator, NameVariable, Operator), nil}, + {`(\$<)(.+?)(>)`, ByGroups(Operator, NameVariable, Operator), nil}, + {`(?s)".*?"`, LiteralStringDouble, nil}, + {`\\\S+`, LiteralString, nil}, + {`[^)$"# \t\n]+`, LiteralString, nil}, + {`\n`, Text, nil}, + Include("keywords"), + Include("ws"), + }, + "string": {}, + "keywords": { + {`\b(WIN32|UNIX|APPLE|CYGWIN|BORLAND|MINGW|MSVC|MSVC_IDE|MSVC60|MSVC70|MSVC71|MSVC80|MSVC90)\b`, Keyword, nil}, + }, + "ws": { + {`[ \t]+`, Text, nil}, + {`#.*\n`, Comment, nil}, + }, + } +} diff --git a/vendor/github.com/alecthomas/chroma/lexers/c/cobol.go b/vendor/github.com/alecthomas/chroma/lexers/c/cobol.go new file mode 100644 index 000000000..8b2f6d9bc --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/c/cobol.go @@ -0,0 +1,55 @@ +package c + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// Cobol lexer. +var Cobol = internal.Register(MustNewLazyLexer( + &Config{ + Name: "COBOL", + Aliases: []string{"cobol"}, + Filenames: []string{"*.cob", "*.COB", "*.cpy", "*.CPY"}, + MimeTypes: []string{"text/x-cobol"}, + CaseInsensitive: true, + }, + cobolRules, +)) + +func cobolRules() Rules { + return Rules{ + "root": { + Include("comment"), + Include("strings"), + Include("core"), + Include("nums"), + {`[a-z0-9]([\w\-]*[a-z0-9]+)?`, NameVariable, nil}, + {`[ \t]+`, Text, nil}, + }, + "comment": { + {`(^.{6}[*/].*\n|^.{6}|\*>.*\n)`, Comment, nil}, + }, + "core": { + {`(^|(?<=[^\w\-]))(ALL\s+)?((ZEROES)|(HIGH-VALUE|LOW-VALUE|QUOTE|SPACE|ZERO)(S)?)\s*($|(?=[^\w\-]))`, NameConstant, nil}, + {Words(`(^|(?<=[^\w\-]))`, `\s*($|(?=[^\w\-]))`, `ACCEPT`, `ADD`, `ALLOCATE`, `CALL`, `CANCEL`, `CLOSE`, `COMPUTE`, `CONFIGURATION`, `CONTINUE`, `DATA`, `DELETE`, `DISPLAY`, `DIVIDE`, `DIVISION`, `ELSE`, `END`, `END-ACCEPT`, `END-ADD`, `END-CALL`, `END-COMPUTE`, `END-DELETE`, `END-DISPLAY`, `END-DIVIDE`, `END-EVALUATE`, `END-IF`, `END-MULTIPLY`, `END-OF-PAGE`, `END-PERFORM`, `END-READ`, `END-RETURN`, `END-REWRITE`, `END-SEARCH`, `END-START`, `END-STRING`, `END-SUBTRACT`, `END-UNSTRING`, `END-WRITE`, `ENVIRONMENT`, `EVALUATE`, `EXIT`, `FD`, `FILE`, `FILE-CONTROL`, `FOREVER`, `FREE`, `GENERATE`, `GO`, `GOBACK`, `IDENTIFICATION`, `IF`, `INITIALIZE`, `INITIATE`, `INPUT-OUTPUT`, `INSPECT`, `INVOKE`, `I-O-CONTROL`, `LINKAGE`, `LOCAL-STORAGE`, `MERGE`, `MOVE`, `MULTIPLY`, `OPEN`, `PERFORM`, `PROCEDURE`, `PROGRAM-ID`, `RAISE`, `READ`, `RELEASE`, `RESUME`, `RETURN`, `REWRITE`, `SCREEN`, `SD`, `SEARCH`, `SECTION`, `SET`, `SORT`, `START`, `STOP`, `STRING`, `SUBTRACT`, `SUPPRESS`, `TERMINATE`, `THEN`, `UNLOCK`, `UNSTRING`, `USE`, `VALIDATE`, `WORKING-STORAGE`, `WRITE`), KeywordReserved, nil}, + {Words(`(^|(?<=[^\w\-]))`, `\s*($|(?=[^\w\-]))`, `ACCESS`, `ADDRESS`, `ADVANCING`, `AFTER`, `ALL`, `ALPHABET`, `ALPHABETIC`, `ALPHABETIC-LOWER`, `ALPHABETIC-UPPER`, `ALPHANUMERIC`, `ALPHANUMERIC-EDITED`, `ALSO`, `ALTER`, `ALTERNATEANY`, `ARE`, `AREA`, `AREAS`, `ARGUMENT-NUMBER`, `ARGUMENT-VALUE`, `AS`, `ASCENDING`, `ASSIGN`, `AT`, `AUTO`, `AUTO-SKIP`, `AUTOMATIC`, `AUTOTERMINATE`, `BACKGROUND-COLOR`, `BASED`, `BEEP`, `BEFORE`, `BELL`, `BLANK`, `BLINK`, `BLOCK`, `BOTTOM`, `BY`, `BYTE-LENGTH`, `CHAINING`, `CHARACTER`, `CHARACTERS`, `CLASS`, `CODE`, `CODE-SET`, `COL`, `COLLATING`, `COLS`, `COLUMN`, `COLUMNS`, `COMMA`, `COMMAND-LINE`, `COMMIT`, `COMMON`, `CONSTANT`, `CONTAINS`, `CONTENT`, `CONTROL`, `CONTROLS`, `CONVERTING`, `COPY`, `CORR`, `CORRESPONDING`, `COUNT`, `CRT`, `CURRENCY`, `CURSOR`, `CYCLE`, `DATE`, `DAY`, `DAY-OF-WEEK`, `DE`, `DEBUGGING`, `DECIMAL-POINT`, `DECLARATIVES`, `DEFAULT`, `DELIMITED`, `DELIMITER`, `DEPENDING`, `DESCENDING`, `DETAIL`, `DISK`, `DOWN`, `DUPLICATES`, `DYNAMIC`, `EBCDIC`, `ENTRY`, `ENVIRONMENT-NAME`, `ENVIRONMENT-VALUE`, `EOL`, `EOP`, `EOS`, `ERASE`, `ERROR`, `ESCAPE`, `EXCEPTION`, `EXCLUSIVE`, `EXTEND`, `EXTERNAL`, `FILE-ID`, `FILLER`, `FINAL`, `FIRST`, `FIXED`, `FLOAT-LONG`, `FLOAT-SHORT`, `FOOTING`, `FOR`, `FOREGROUND-COLOR`, `FORMAT`, `FROM`, `FULL`, `FUNCTION`, `FUNCTION-ID`, `GIVING`, `GLOBAL`, `GROUP`, `HEADING`, `HIGHLIGHT`, `I-O`, `ID`, `IGNORE`, `IGNORING`, `IN`, `INDEX`, `INDEXED`, `INDICATE`, `INITIAL`, `INITIALIZED`, `INPUT`, `INTO`, `INTRINSIC`, `INVALID`, `IS`, `JUST`, `JUSTIFIED`, `KEY`, `LABEL`, `LAST`, `LEADING`, `LEFT`, `LENGTH`, `LIMIT`, `LIMITS`, `LINAGE`, `LINAGE-COUNTER`, `LINE`, `LINES`, `LOCALE`, `LOCK`, `LOWLIGHT`, `MANUAL`, `MEMORY`, `MINUS`, `MODE`, `MULTIPLE`, `NATIONAL`, `NATIONAL-EDITED`, `NATIVE`, `NEGATIVE`, `NEXT`, `NO`, `NULL`, `NULLS`, `NUMBER`, `NUMBERS`, `NUMERIC`, `NUMERIC-EDITED`, `OBJECT-COMPUTER`, `OCCURS`, `OF`, `OFF`, `OMITTED`, `ON`, `ONLY`, `OPTIONAL`, `ORDER`, `ORGANIZATION`, `OTHER`, `OUTPUT`, `OVERFLOW`, `OVERLINE`, `PACKED-DECIMAL`, `PADDING`, `PAGE`, `PARAGRAPH`, `PLUS`, `POINTER`, `POSITION`, `POSITIVE`, `PRESENT`, `PREVIOUS`, `PRINTER`, `PRINTING`, `PROCEDURE-POINTER`, `PROCEDURES`, `PROCEED`, `PROGRAM`, `PROGRAM-POINTER`, `PROMPT`, `QUOTE`, `QUOTES`, `RANDOM`, `RD`, `RECORD`, `RECORDING`, `RECORDS`, `RECURSIVE`, `REDEFINES`, `REEL`, `REFERENCE`, `RELATIVE`, `REMAINDER`, `REMOVAL`, `RENAMES`, `REPLACING`, `REPORT`, `REPORTING`, `REPORTS`, `REPOSITORY`, `REQUIRED`, `RESERVE`, `RETURNING`, `REVERSE-VIDEO`, `REWIND`, `RIGHT`, `ROLLBACK`, `ROUNDED`, `RUN`, `SAME`, `SCROLL`, `SECURE`, `SEGMENT-LIMIT`, `SELECT`, `SENTENCE`, `SEPARATE`, `SEQUENCE`, `SEQUENTIAL`, `SHARING`, `SIGN`, `SIGNED`, `SIGNED-INT`, `SIGNED-LONG`, `SIGNED-SHORT`, `SIZE`, `SORT-MERGE`, `SOURCE`, `SOURCE-COMPUTER`, `SPECIAL-NAMES`, `STANDARD`, `STANDARD-1`, `STANDARD-2`, `STATUS`, `SUM`, `SYMBOLIC`, `SYNC`, `SYNCHRONIZED`, `TALLYING`, `TAPE`, `TEST`, `THROUGH`, `THRU`, `TIME`, `TIMES`, `TO`, `TOP`, `TRAILING`, `TRANSFORM`, `TYPE`, `UNDERLINE`, `UNIT`, `UNSIGNED`, `UNSIGNED-INT`, `UNSIGNED-LONG`, `UNSIGNED-SHORT`, `UNTIL`, `UP`, `UPDATE`, `UPON`, `USAGE`, `USING`, `VALUE`, `VALUES`, `VARYING`, `WAIT`, `WHEN`, `WITH`, `WORDS`, `YYYYDDD`, `YYYYMMDD`), KeywordPseudo, nil}, + {Words(`(^|(?<=[^\w\-]))`, `\s*($|(?=[^\w\-]))`, `ACTIVE-CLASS`, `ALIGNED`, `ANYCASE`, `ARITHMETIC`, `ATTRIBUTE`, `B-AND`, `B-NOT`, `B-OR`, `B-XOR`, `BIT`, `BOOLEAN`, `CD`, `CENTER`, `CF`, `CH`, `CHAIN`, `CLASS-ID`, `CLASSIFICATION`, `COMMUNICATION`, `CONDITION`, `DATA-POINTER`, `DESTINATION`, `DISABLE`, `EC`, `EGI`, `EMI`, `ENABLE`, `END-RECEIVE`, `ENTRY-CONVENTION`, `EO`, `ESI`, `EXCEPTION-OBJECT`, `EXPANDS`, `FACTORY`, `FLOAT-BINARY-16`, `FLOAT-BINARY-34`, `FLOAT-BINARY-7`, `FLOAT-DECIMAL-16`, `FLOAT-DECIMAL-34`, `FLOAT-EXTENDED`, `FORMAT`, `FUNCTION-POINTER`, `GET`, `GROUP-USAGE`, `IMPLEMENTS`, `INFINITY`, `INHERITS`, `INTERFACE`, `INTERFACE-ID`, `INVOKE`, `LC_ALL`, `LC_COLLATE`, `LC_CTYPE`, `LC_MESSAGES`, `LC_MONETARY`, `LC_NUMERIC`, `LC_TIME`, `LINE-COUNTER`, `MESSAGE`, `METHOD`, `METHOD-ID`, `NESTED`, `NONE`, `NORMAL`, `OBJECT`, `OBJECT-REFERENCE`, `OPTIONS`, `OVERRIDE`, `PAGE-COUNTER`, `PF`, `PH`, `PROPERTY`, `PROTOTYPE`, `PURGE`, `QUEUE`, `RAISE`, `RAISING`, `RECEIVE`, `RELATION`, `REPLACE`, `REPRESENTS-NOT-A-NUMBER`, `RESET`, `RESUME`, `RETRY`, `RF`, `RH`, `SECONDS`, `SEGMENT`, `SELF`, `SEND`, `SOURCES`, `STATEMENT`, `STEP`, `STRONG`, `SUB-QUEUE-1`, `SUB-QUEUE-2`, `SUB-QUEUE-3`, `SUPER`, `SYMBOL`, `SYSTEM-DEFAULT`, `TABLE`, `TERMINAL`, `TEXT`, `TYPEDEF`, `UCS-4`, `UNIVERSAL`, `USER-DEFAULT`, `UTF-16`, `UTF-8`, `VAL-STATUS`, `VALID`, `VALIDATE`, `VALIDATE-STATUS`), Error, nil}, + {`(^|(?<=[^\w\-]))(PIC\s+.+?(?=(\s|\.\s))|PICTURE\s+.+?(?=(\s|\.\s))|(COMPUTATIONAL)(-[1-5X])?|(COMP)(-[1-5X])?|BINARY-C-LONG|BINARY-CHAR|BINARY-DOUBLE|BINARY-LONG|BINARY-SHORT|BINARY)\s*($|(?=[^\w\-]))`, KeywordType, nil}, + {`(\*\*|\*|\+|-|/|<=|>=|<|>|==|/=|=)`, Operator, nil}, + {`([(),;:&%.])`, Punctuation, nil}, + {`(^|(?<=[^\w\-]))(ABS|ACOS|ANNUITY|ASIN|ATAN|BYTE-LENGTH|CHAR|COMBINED-DATETIME|CONCATENATE|COS|CURRENT-DATE|DATE-OF-INTEGER|DATE-TO-YYYYMMDD|DAY-OF-INTEGER|DAY-TO-YYYYDDD|EXCEPTION-(?:FILE|LOCATION|STATEMENT|STATUS)|EXP10|EXP|E|FACTORIAL|FRACTION-PART|INTEGER-OF-(?:DATE|DAY|PART)|INTEGER|LENGTH|LOCALE-(?:DATE|TIME(?:-FROM-SECONDS)?)|LOG(?:10)?|LOWER-CASE|MAX|MEAN|MEDIAN|MIDRANGE|MIN|MOD|NUMVAL(?:-C)?|ORD(?:-MAX|-MIN)?|PI|PRESENT-VALUE|RANDOM|RANGE|REM|REVERSE|SECONDS-FROM-FORMATTED-TIME|SECONDS-PAST-MIDNIGHT|SIGN|SIN|SQRT|STANDARD-DEVIATION|STORED-CHAR-LENGTH|SUBSTITUTE(?:-CASE)?|SUM|TAN|TEST-DATE-YYYYMMDD|TEST-DAY-YYYYDDD|TRIM|UPPER-CASE|VARIANCE|WHEN-COMPILED|YEAR-TO-YYYY)\s*($|(?=[^\w\-]))`, NameFunction, nil}, + {`(^|(?<=[^\w\-]))(true|false)\s*($|(?=[^\w\-]))`, NameBuiltin, nil}, + {`(^|(?<=[^\w\-]))(equal|equals|ne|lt|le|gt|ge|greater|less|than|not|and|or)\s*($|(?=[^\w\-]))`, OperatorWord, nil}, + }, + "strings": { + {`"[^"\n]*("|\n)`, LiteralStringDouble, nil}, + {`'[^'\n]*('|\n)`, LiteralStringSingle, nil}, + }, + "nums": { + {`\d+(\s*|\.$|$)`, LiteralNumberInteger, nil}, + {`[+-]?\d*\.\d+(E[-+]?\d+)?`, LiteralNumberFloat, nil}, + {`[+-]?\d+\.\d*(E[-+]?\d+)?`, LiteralNumberFloat, nil}, + }, + } +} diff --git a/vendor/github.com/alecthomas/chroma/lexers/c/coffee.go b/vendor/github.com/alecthomas/chroma/lexers/c/coffee.go new file mode 100644 index 000000000..381a8fed8 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/c/coffee.go @@ -0,0 +1,95 @@ +package c + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// Coffeescript lexer. +var Coffeescript = internal.Register(MustNewLazyLexer( + &Config{ + Name: "CoffeeScript", + Aliases: []string{"coffee-script", "coffeescript", "coffee"}, + Filenames: []string{"*.coffee"}, + MimeTypes: []string{"text/coffeescript"}, + NotMultiline: true, + DotAll: true, + }, + coffeescriptRules, +)) + +func coffeescriptRules() Rules { + return Rules{ + "commentsandwhitespace": { + {`\s+`, Text, nil}, + {`###[^#].*?###`, CommentMultiline, nil}, + {`#(?!##[^#]).*?\n`, CommentSingle, nil}, + }, + "multilineregex": { + {`[^/#]+`, LiteralStringRegex, nil}, + {`///([gim]+\b|\B)`, LiteralStringRegex, Pop(1)}, + {`#\{`, LiteralStringInterpol, Push("interpoling_string")}, + {`[/#]`, LiteralStringRegex, nil}, + }, + "slashstartsregex": { + Include("commentsandwhitespace"), + {`///`, LiteralStringRegex, Push("#pop", "multilineregex")}, + {`/(?! )(\\.|[^[/\\\n]|\[(\\.|[^\]\\\n])*])+/([gim]+\b|\B)`, LiteralStringRegex, Pop(1)}, + {`/`, Operator, nil}, + Default(Pop(1)), + }, + "root": { + Include("commentsandwhitespace"), + {`^(?=\s|/)`, Text, Push("slashstartsregex")}, + {"\\+\\+|~|&&|\\band\\b|\\bor\\b|\\bis\\b|\\bisnt\\b|\\bnot\\b|\\?|:|\\|\\||\\\\(?=\\n)|(<<|>>>?|==?(?!>)|!=?|=(?!>)|-(?!>)|[<>+*`%&\\|\\^/])=?", Operator, Push("slashstartsregex")}, + {`(?:\([^()]*\))?\s*[=-]>`, NameFunction, Push("slashstartsregex")}, + {`[{(\[;,]`, Punctuation, Push("slashstartsregex")}, + {`[})\].]`, Punctuation, nil}, + {`(?=|<|>|==`, Operator, nil}, + {`mod\b`, Operator, nil}, + {`(eq|lt|gt|lte|gte|not|is|and|or)\b`, Operator, nil}, + {`\|\||&&`, Operator, nil}, + {`\?`, Operator, nil}, + {`"`, LiteralStringDouble, Push("string")}, + {`'.*?'`, LiteralStringSingle, nil}, + {`\d+`, LiteralNumber, nil}, + {`(if|else|len|var|xml|default|break|switch|component|property|function|do|try|catch|in|continue|for|return|while|required|any|array|binary|boolean|component|date|guid|numeric|query|string|struct|uuid|case)\b`, Keyword, nil}, + {`(true|false|null)\b`, KeywordConstant, nil}, + {`(application|session|client|cookie|super|this|variables|arguments)\b`, NameConstant, nil}, + {`([a-z_$][\w.]*)(\s*)(\()`, ByGroups(NameFunction, Text, Punctuation), nil}, + {`[a-z_$][\w.]*`, NameVariable, nil}, + {`[()\[\]{};:,.\\]`, Punctuation, nil}, + {`\s+`, Text, nil}, + }, + "string": { + {`""`, LiteralStringDouble, nil}, + {`#.+?#`, LiteralStringInterpol, nil}, + {`[^"#]+`, LiteralStringDouble, nil}, + {`#`, LiteralStringDouble, nil}, + {`"`, LiteralStringDouble, Pop(1)}, + }, + } +} diff --git a/vendor/github.com/alecthomas/chroma/lexers/c/coq.go b/vendor/github.com/alecthomas/chroma/lexers/c/coq.go new file mode 100644 index 000000000..e0103ef66 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/c/coq.go @@ -0,0 +1,67 @@ +package c + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// Coq lexer. +var Coq = internal.Register(MustNewLazyLexer( + &Config{ + Name: "Coq", + Aliases: []string{"coq"}, + Filenames: []string{"*.v"}, + MimeTypes: []string{"text/x-coq"}, + }, + coqRules, +)) + +func coqRules() Rules { + return Rules{ + "root": { + {`\s+`, Text, nil}, + {`false|true|\(\)|\[\]`, NameBuiltinPseudo, nil}, + {`\(\*`, Comment, Push("comment")}, + {Words(`\b`, `\b`, `Section`, `Module`, `End`, `Require`, `Import`, `Export`, `Variable`, `Variables`, `Parameter`, `Parameters`, `Axiom`, `Hypothesis`, `Hypotheses`, `Notation`, `Local`, `Tactic`, `Reserved`, `Scope`, `Open`, `Close`, `Bind`, `Delimit`, `Definition`, `Let`, `Ltac`, `Fixpoint`, `CoFixpoint`, `Morphism`, `Relation`, `Implicit`, `Arguments`, `Set`, `Unset`, `Contextual`, `Strict`, `Prenex`, `Implicits`, `Inductive`, `CoInductive`, `Record`, `Structure`, `Canonical`, `Coercion`, `Theorem`, `Lemma`, `Corollary`, `Proposition`, `Fact`, `Remark`, `Example`, `Proof`, `Goal`, `Save`, `Qed`, `Defined`, `Hint`, `Resolve`, `Rewrite`, `View`, `Search`, `Show`, `Print`, `Printing`, `All`, `Graph`, `Projections`, `inside`, `outside`, `Check`, `Global`, `Instance`, `Class`, `Existing`, `Universe`, `Polymorphic`, `Monomorphic`, `Context`), KeywordNamespace, nil}, + {Words(`\b`, `\b`, `forall`, `exists`, `exists2`, `fun`, `fix`, `cofix`, `struct`, `match`, `end`, `in`, `return`, `let`, `if`, `is`, `then`, `else`, `for`, `of`, `nosimpl`, `with`, `as`), Keyword, nil}, + {Words(`\b`, `\b`, `Type`, `Prop`), KeywordType, nil}, + {Words(`\b`, `\b`, `pose`, `set`, `move`, `case`, `elim`, `apply`, `clear`, `hnf`, `intro`, `intros`, `generalize`, `rename`, `pattern`, `after`, `destruct`, `induction`, `using`, `refine`, `inversion`, `injection`, `rewrite`, `congr`, `unlock`, `compute`, `ring`, `field`, `replace`, `fold`, `unfold`, `change`, `cutrewrite`, `simpl`, `have`, `suff`, `wlog`, `suffices`, `without`, `loss`, `nat_norm`, `assert`, `cut`, `trivial`, `revert`, `bool_congr`, `nat_congr`, `symmetry`, `transitivity`, `auto`, `split`, `left`, `right`, `autorewrite`, `tauto`, `setoid_rewrite`, `intuition`, `eauto`, `eapply`, `econstructor`, `etransitivity`, `constructor`, `erewrite`, `red`, `cbv`, `lazy`, `vm_compute`, `native_compute`, `subst`), Keyword, nil}, + {Words(`\b`, `\b`, `by`, `done`, `exact`, `reflexivity`, `tauto`, `romega`, `omega`, `assumption`, `solve`, `contradiction`, `discriminate`, `congruence`), KeywordPseudo, nil}, + {Words(`\b`, `\b`, `do`, `last`, `first`, `try`, `idtac`, `repeat`), KeywordReserved, nil}, + {`\b([A-Z][\w\']*)`, Name, nil}, + {"(\u03bb|\u03a0|\\|\\}|\\{\\||\\\\/|/\\\\|=>|~|\\}|\\|]|\\||\\{<|\\{|`|_|]|\\[\\||\\[>|\\[<|\\[|\\?\\?|\\?|>\\}|>]|>|=|<->|<-|<|;;|;|:>|:=|::|:|\\.\\.|\\.|->|-\\.|-|,|\\+|\\*|\\)|\\(|&&|&|#|!=)", Operator, nil}, + {`([=<>@^|&+\*/$%-]|[!?~])?[!$%&*+\./:<=>?@^|~-]`, Operator, nil}, + {`\b(unit|nat|bool|string|ascii|list)\b`, KeywordType, nil}, + {`[^\W\d][\w']*`, Name, nil}, + {`\d[\d_]*`, LiteralNumberInteger, nil}, + {`0[xX][\da-fA-F][\da-fA-F_]*`, LiteralNumberHex, nil}, + {`0[oO][0-7][0-7_]*`, LiteralNumberOct, nil}, + {`0[bB][01][01_]*`, LiteralNumberBin, nil}, + {`-?\d[\d_]*(.[\d_]*)?([eE][+\-]?\d[\d_]*)`, LiteralNumberFloat, nil}, + {`'(?:(\\[\\\"'ntbr ])|(\\[0-9]{3})|(\\x[0-9a-fA-F]{2}))'`, LiteralStringChar, nil}, + {`'.'`, LiteralStringChar, nil}, + {`'`, Keyword, nil}, + {`"`, LiteralStringDouble, Push("string")}, + {`[~?][a-z][\w\']*:`, Name, nil}, + }, + "comment": { + {`[^(*)]+`, Comment, nil}, + {`\(\*`, Comment, Push()}, + {`\*\)`, Comment, Pop(1)}, + {`[(*)]`, Comment, nil}, + }, + "string": { + {`[^"]+`, LiteralStringDouble, nil}, + {`""`, LiteralStringDouble, nil}, + {`"`, LiteralStringDouble, Pop(1)}, + }, + "dotted": { + {`\s+`, Text, nil}, + {`\.`, Punctuation, nil}, + {`[A-Z][\w\']*(?=\s*\.)`, NameNamespace, nil}, + {`[A-Z][\w\']*`, NameClass, Pop(1)}, + {`[a-z][a-z0-9_\']*`, Name, Pop(1)}, + Default(Pop(1)), + }, + } +} diff --git a/vendor/github.com/alecthomas/chroma/lexers/c/cpp.go b/vendor/github.com/alecthomas/chroma/lexers/c/cpp.go new file mode 100644 index 000000000..3f4a1fde0 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/c/cpp.go @@ -0,0 +1,110 @@ +package c + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// CPP lexer. +var CPP = internal.Register(MustNewLazyLexer( + &Config{ + Name: "C++", + Aliases: []string{"cpp", "c++"}, + Filenames: []string{"*.cpp", "*.hpp", "*.c++", "*.h++", "*.cc", "*.hh", "*.cxx", "*.hxx", "*.C", "*.H", "*.cp", "*.CPP"}, + MimeTypes: []string{"text/x-c++hdr", "text/x-c++src"}, + EnsureNL: true, + }, + cppRules, +)) + +func cppRules() Rules { + return Rules{ + "statements": { + {Words(``, `\b`, `catch`, `const_cast`, `delete`, `dynamic_cast`, `explicit`, `export`, `friend`, `mutable`, `namespace`, `new`, `operator`, `private`, `protected`, `public`, `reinterpret_cast`, `restrict`, `static_cast`, `template`, `this`, `throw`, `throws`, `try`, `typeid`, `typename`, `using`, `virtual`, `constexpr`, `nullptr`, `decltype`, `thread_local`, `alignas`, `alignof`, `static_assert`, `noexcept`, `override`, `final`, `concept`, `requires`, `consteval`, `co_await`, `co_return`, `co_yield`), Keyword, nil}, + {`(enum)\b(\s+)(class)\b(\s*)`, ByGroups(Keyword, Text, Keyword, Text), Push("classname")}, + {`(class|struct|enum|union)\b(\s*)`, ByGroups(Keyword, Text), Push("classname")}, + {`\[\[.+\]\]`, NameAttribute, nil}, + {`(R)(")([^\\()\s]{,16})(\()((?:.|\n)*?)(\)\3)(")`, ByGroups(LiteralStringAffix, LiteralString, LiteralStringDelimiter, LiteralStringDelimiter, LiteralString, LiteralStringDelimiter, LiteralString), nil}, + {`(u8|u|U)(")`, ByGroups(LiteralStringAffix, LiteralString), Push("string")}, + {`(L?)(")`, ByGroups(LiteralStringAffix, LiteralString), Push("string")}, + {`(L?)(')(\\.|\\[0-7]{1,3}|\\x[a-fA-F0-9]{1,2}|[^\\\'\n])(')`, ByGroups(LiteralStringAffix, LiteralStringChar, LiteralStringChar, LiteralStringChar), nil}, + {`(\d+\.\d*|\.\d+|\d+)[eE][+-]?\d+[LlUu]*`, LiteralNumberFloat, nil}, + {`(\d+\.\d*|\.\d+|\d+[fF])[fF]?`, LiteralNumberFloat, nil}, + {`0[xX]([0-9A-Fa-f]('?[0-9A-Fa-f]+)*)[LlUu]*`, LiteralNumberHex, nil}, + {`0('?[0-7]+)+[LlUu]*`, LiteralNumberOct, nil}, + {`0[Bb][01]('?[01]+)*[LlUu]*`, LiteralNumberBin, nil}, + {`[0-9]('?[0-9]+)*[LlUu]*`, LiteralNumberInteger, nil}, + {`\*/`, Error, nil}, + {`[~!%^&*+=|?:<>/-]`, Operator, nil}, + {`[()\[\],.]`, Punctuation, nil}, + {Words(``, `\b`, `asm`, `auto`, `break`, `case`, `const`, `continue`, `default`, `do`, `else`, `enum`, `extern`, `for`, `goto`, `if`, `register`, `restricted`, `return`, `sizeof`, `static`, `struct`, `switch`, `typedef`, `union`, `volatile`, `while`), Keyword, nil}, + {`(bool|int|long|float|short|double|char((8|16|32)_t)?|wchar_t|unsigned|signed|void|u?int(_fast|_least|)(8|16|32|64)_t)\b`, KeywordType, nil}, + {Words(``, `\b`, `inline`, `_inline`, `__inline`, `naked`, `restrict`, `thread`, `typename`), KeywordReserved, nil}, + {`(__m(128i|128d|128|64))\b`, KeywordReserved, nil}, + {Words(`__`, `\b`, `asm`, `int8`, `based`, `except`, `int16`, `stdcall`, `cdecl`, `fastcall`, `int32`, `declspec`, `finally`, `int64`, `try`, `leave`, `w64`, `unaligned`, `raise`, `noop`, `identifier`, `forceinline`, `assume`), KeywordReserved, nil}, + {`(true|false|NULL)\b`, NameBuiltin, nil}, + {`([a-zA-Z_]\w*)(\s*)(:)(?!:)`, ByGroups(NameLabel, Text, Punctuation), nil}, + {`[a-zA-Z_]\w*`, Name, nil}, + }, + "root": { + Include("whitespace"), + {`((?:[\w*\s])+?(?:\s|[*]))([a-zA-Z_]\w*)(\s*\([^;]*?\))([^;{]*)(\{)`, ByGroups(UsingSelf("root"), NameFunction, UsingSelf("root"), UsingSelf("root"), Punctuation), Push("function")}, + {`((?:[\w*\s])+?(?:\s|[*]))([a-zA-Z_]\w*)(\s*\([^;]*?\))([^;]*)(;)`, ByGroups(UsingSelf("root"), NameFunction, UsingSelf("root"), UsingSelf("root"), Punctuation), nil}, + Default(Push("statement")), + {Words(`__`, `\b`, `virtual_inheritance`, `uuidof`, `super`, `single_inheritance`, `multiple_inheritance`, `interface`, `event`), KeywordReserved, nil}, + {`__(offload|blockingoffload|outer)\b`, KeywordPseudo, nil}, + }, + "classname": { + {`(\[\[.+\]\])(\s*)`, ByGroups(NameAttribute, Text), nil}, + {`[a-zA-Z_]\w*`, NameClass, Pop(1)}, + {`\s*(?=[>{])`, Text, Pop(1)}, + }, + "whitespace": { + {`^#if\s+0`, CommentPreproc, Push("if0")}, + {`^#`, CommentPreproc, Push("macro")}, + {`^(\s*(?:/[*].*?[*]/\s*)?)(#if\s+0)`, ByGroups(UsingSelf("root"), CommentPreproc), Push("if0")}, + {`^(\s*(?:/[*].*?[*]/\s*)?)(#)`, ByGroups(UsingSelf("root"), CommentPreproc), Push("macro")}, + {`\n`, Text, nil}, + {`\s+`, Text, nil}, + {`\\\n`, Text, nil}, + {`//(\n|[\w\W]*?[^\\]\n)`, CommentSingle, nil}, + {`/(\\\n)?[*][\w\W]*?[*](\\\n)?/`, CommentMultiline, nil}, + {`/(\\\n)?[*][\w\W]*`, CommentMultiline, nil}, + }, + "statement": { + Include("whitespace"), + Include("statements"), + {`[{]`, Punctuation, Push("root")}, + {`[;}]`, Punctuation, Pop(1)}, + }, + "function": { + Include("whitespace"), + Include("statements"), + {`;`, Punctuation, nil}, + {`\{`, Punctuation, Push()}, + {`\}`, Punctuation, Pop(1)}, + }, + "string": { + {`"`, LiteralString, Pop(1)}, + {`\\([\\abfnrtv"\']|x[a-fA-F0-9]{2,4}|u[a-fA-F0-9]{4}|U[a-fA-F0-9]{8}|[0-7]{1,3})`, LiteralStringEscape, nil}, + {`[^\\"\n]+`, LiteralString, nil}, + {`\\\n`, LiteralString, nil}, + {`\\`, LiteralString, nil}, + }, + "macro": { + {`(include)(\s*(?:/[*].*?[*]/\s*)?)([^\n]+)`, ByGroups(CommentPreproc, Text, CommentPreprocFile), nil}, + {`[^/\n]+`, CommentPreproc, nil}, + {`/[*](.|\n)*?[*]/`, CommentMultiline, nil}, + {`//.*?\n`, CommentSingle, Pop(1)}, + {`/`, CommentPreproc, nil}, + {`(?<=\\)\n`, CommentPreproc, nil}, + {`\n`, CommentPreproc, Pop(1)}, + }, + "if0": { + {`^\s*#if.*?(?=~!@#%^&|`?-]+", Operator, nil}, + { + `(?s)(java|javascript)(\s+)(AS)(\s+)('|\$\$)(.*?)(\5)`, + UsingByGroup( + internal.Get, + 1, 6, + NameBuiltin, TextWhitespace, Keyword, TextWhitespace, + LiteralStringHeredoc, LiteralStringHeredoc, LiteralStringHeredoc, + ), + nil, + }, + {`(true|false|null)\b`, KeywordConstant, nil}, + {`0x[0-9a-f]+`, LiteralNumberHex, nil}, + {`[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}`, LiteralNumberHex, nil}, + {`\.[0-9]+(e[+-]?[0-9]+)?`, Error, nil}, + {`-?[0-9]+(\.[0-9])?(e[+-]?[0-9]+)?`, LiteralNumberFloat, nil}, + {`[0-9]+`, LiteralNumberInteger, nil}, + {`'`, LiteralStringSingle, Push("string")}, + {`"`, LiteralStringName, Push("quoted-ident")}, + {`\$\$`, LiteralStringHeredoc, Push("dollar-string")}, + {`[a-z_]\w*`, Name, nil}, + {`:(['"]?)[a-z]\w*\b\1`, NameVariable, nil}, + {`[;:()\[\]\{\},.]`, Punctuation, nil}, + }, + "multiline-comments": { + {`/\*`, CommentMultiline, Push("multiline-comments")}, + {`\*/`, CommentMultiline, Pop(1)}, + {`[^/*]+`, CommentMultiline, nil}, + {`[/*]`, CommentMultiline, nil}, + }, + "string": { + {`[^']+`, LiteralStringSingle, nil}, + {`''`, LiteralStringSingle, nil}, + {`'`, LiteralStringSingle, Pop(1)}, + }, + "quoted-ident": { + {`[^"]+`, LiteralStringName, nil}, + {`""`, LiteralStringName, nil}, + {`"`, LiteralStringName, Pop(1)}, + }, + "dollar-string": { + {`[^\$]+`, LiteralStringHeredoc, nil}, + {`\$\$`, LiteralStringHeredoc, Pop(1)}, + }, + } +} diff --git a/vendor/github.com/alecthomas/chroma/lexers/c/crystal.go b/vendor/github.com/alecthomas/chroma/lexers/c/crystal.go new file mode 100644 index 000000000..f06830d29 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/c/crystal.go @@ -0,0 +1,266 @@ +package c + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// Crystal lexer. +var Crystal = internal.Register(MustNewLazyLexer( + &Config{ + Name: "Crystal", + Aliases: []string{"cr", "crystal"}, + Filenames: []string{"*.cr"}, + MimeTypes: []string{"text/x-crystal"}, + DotAll: true, + }, + crystalRules, +)) + +func crystalRules() Rules { + return Rules{ + "root": { + {`#.*?$`, CommentSingle, nil}, + {Words(``, `\b`, `abstract`, `asm`, `as`, `begin`, `break`, `case`, `do`, `else`, `elsif`, `end`, `ensure`, `extend`, `ifdef`, `if`, `include`, `instance_sizeof`, `next`, `of`, `pointerof`, `private`, `protected`, `rescue`, `return`, `require`, `sizeof`, `super`, `then`, `typeof`, `unless`, `until`, `when`, `while`, `with`, `yield`), Keyword, nil}, + {Words(``, `\b`, `true`, `false`, `nil`), KeywordConstant, nil}, + {`(module|lib)(\s+)([a-zA-Z_]\w*(?:::[a-zA-Z_]\w*)*)`, ByGroups(Keyword, Text, NameNamespace), nil}, + {`(def|fun|macro)(\s+)((?:[a-zA-Z_]\w*::)*)`, ByGroups(Keyword, Text, NameNamespace), Push("funcname")}, + {"def(?=[*%&^`~+-/\\[<>=])", Keyword, Push("funcname")}, + {`(class|struct|union|type|alias|enum)(\s+)((?:[a-zA-Z_]\w*::)*)`, ByGroups(Keyword, Text, NameNamespace), Push("classname")}, + {`(self|out|uninitialized)\b|(is_a|responds_to)\?`, KeywordPseudo, nil}, + {Words(``, `\b`, `debugger`, `record`, `pp`, `assert_responds_to`, `spawn`, `parallel`, `getter`, `setter`, `property`, `delegate`, `def_hash`, `def_equals`, `def_equals_and_hash`, `forward_missing_to`), NameBuiltinPseudo, nil}, + {`getter[!?]|property[!?]|__(DIR|FILE|LINE)__\b`, NameBuiltinPseudo, nil}, + {Words(`(?~!:])|(?<=(?:\s|;)when\s)|(?<=(?:\s|;)or\s)|(?<=(?:\s|;)and\s)|(?<=\.index\s)|(?<=\.scan\s)|(?<=\.sub\s)|(?<=\.sub!\s)|(?<=\.gsub\s)|(?<=\.gsub!\s)|(?<=\.match\s)|(?<=(?:\s|;)if\s)|(?<=(?:\s|;)elsif\s)|(?<=^when\s)|(?<=^index\s)|(?<=^scan\s)|(?<=^sub\s)|(?<=^gsub\s)|(?<=^sub!\s)|(?<=^gsub!\s)|(?<=^match\s)|(?<=^if\s)|(?<=^elsif\s))(\s*)(/)`, ByGroups(Text, LiteralStringRegex), Push("multiline-regex")}, + {`(?<=\(|,|\[)/`, LiteralStringRegex, Push("multiline-regex")}, + {`(\s+)(/)(?![\s=])`, ByGroups(Text, LiteralStringRegex), Push("multiline-regex")}, + {`(0o[0-7]+(?:_[0-7]+)*(?:_?[iu][0-9]+)?)\b(\s*)([/?])?`, ByGroups(LiteralNumberOct, Text, Operator), nil}, + {`(0x[0-9A-Fa-f]+(?:_[0-9A-Fa-f]+)*(?:_?[iu][0-9]+)?)\b(\s*)([/?])?`, ByGroups(LiteralNumberHex, Text, Operator), nil}, + {`(0b[01]+(?:_[01]+)*(?:_?[iu][0-9]+)?)\b(\s*)([/?])?`, ByGroups(LiteralNumberBin, Text, Operator), nil}, + {`((?:0(?![0-9])|[1-9][\d_]*)(?:\.\d[\d_]*)(?:e[+-]?[0-9]+)?(?:_?f[0-9]+)?)(\s*)([/?])?`, ByGroups(LiteralNumberFloat, Text, Operator), nil}, + {`((?:0(?![0-9])|[1-9][\d_]*)(?:\.\d[\d_]*)?(?:e[+-]?[0-9]+)(?:_?f[0-9]+)?)(\s*)([/?])?`, ByGroups(LiteralNumberFloat, Text, Operator), nil}, + {`((?:0(?![0-9])|[1-9][\d_]*)(?:\.\d[\d_]*)?(?:e[+-]?[0-9]+)?(?:_?f[0-9]+))(\s*)([/?])?`, ByGroups(LiteralNumberFloat, Text, Operator), nil}, + {`(0\b|[1-9][\d]*(?:_\d+)*(?:_?[iu][0-9]+)?)\b(\s*)([/?])?`, ByGroups(LiteralNumberInteger, Text, Operator), nil}, + {`@@[a-zA-Z_]\w*`, NameVariableClass, nil}, + {`@[a-zA-Z_]\w*`, NameVariableInstance, nil}, + {`\$\w+`, NameVariableGlobal, nil}, + {"\\$[!@&`\\'+~=/\\\\,;.<>_*$?:\"^-]", NameVariableGlobal, nil}, + {`\$-[0adFiIlpvw]`, NameVariableGlobal, nil}, + {`::`, Operator, nil}, + Include("strings"), + {`\?(\\[MC]-)*(\\([\\befnrtv#"\']|x[a-fA-F0-9]{1,2}|[0-7]{1,3})|\S)(?!\w)`, LiteralStringChar, nil}, + {`[A-Z][A-Z_]+\b`, NameConstant, nil}, + {`\{%`, LiteralStringInterpol, Push("in-macro-control")}, + {`\{\{`, LiteralStringInterpol, Push("in-macro-expr")}, + {`(@\[)(\s*)([A-Z]\w*)`, ByGroups(Operator, Text, NameDecorator), Push("in-attr")}, + {Words(`(\.|::)`, ``, `!=`, `!~`, `!`, `%`, `&&`, `&`, `**`, `*`, `+`, `-`, `/`, `<=>`, `<<`, `<=`, `<`, `===`, `==`, `=~`, `=`, `>=`, `>>`, `>`, `[]=`, `[]?`, `[]`, `^`, `||`, `|`, `~`), ByGroups(Operator, NameOperator), nil}, + {"(\\.|::)([a-zA-Z_]\\w*[!?]?|[*%&^`~+\\-/\\[<>=])", ByGroups(Operator, Name), nil}, + {`[a-zA-Z_]\w*(?:[!?](?!=))?`, Name, nil}, + {`(\[|\]\??|\*\*|<=>?|>=|<>?|=~|===|!~|&&?|\|\||\.{1,3})`, Operator, nil}, + {`[-+/*%=<>&!^|~]=?`, Operator, nil}, + {`[(){};,/?:\\]`, Punctuation, nil}, + {`\s+`, Text, nil}, + }, + "funcname": { + {"(?:([a-zA-Z_]\\w*)(\\.))?([a-zA-Z_]\\w*[!?]?|\\*\\*?|[-+]@?|[/%&|^`~]|\\[\\]=?|<<|>>|<=?>|>=?|===?)", ByGroups(NameClass, Operator, NameFunction), Pop(1)}, + Default(Pop(1)), + }, + "classname": { + {`[A-Z_]\w*`, NameClass, nil}, + {`(\()(\s*)([A-Z_]\w*)(\s*)(\))`, ByGroups(Punctuation, Text, NameClass, Text, Punctuation), nil}, + Default(Pop(1)), + }, + "in-intp": { + {`\{`, LiteralStringInterpol, Push()}, + {`\}`, LiteralStringInterpol, Pop(1)}, + Include("root"), + }, + "string-intp": { + {`#\{`, LiteralStringInterpol, Push("in-intp")}, + }, + "string-escaped": { + {`\\([\\befnstv#"\']|x[a-fA-F0-9]{1,2}|[0-7]{1,3})`, LiteralStringEscape, nil}, + }, + "string-intp-escaped": { + Include("string-intp"), + Include("string-escaped"), + }, + "interpolated-regex": { + Include("string-intp"), + {`[\\#]`, LiteralStringRegex, nil}, + {`[^\\#]+`, LiteralStringRegex, nil}, + }, + "interpolated-string": { + Include("string-intp"), + {`[\\#]`, LiteralStringOther, nil}, + {`[^\\#]+`, LiteralStringOther, nil}, + }, + "multiline-regex": { + Include("string-intp"), + {`\\\\`, LiteralStringRegex, nil}, + {`\\/`, LiteralStringRegex, nil}, + {`[\\#]`, LiteralStringRegex, nil}, + {`[^\\/#]+`, LiteralStringRegex, nil}, + {`/[imsx]*`, LiteralStringRegex, Pop(1)}, + }, + "end-part": { + {`.+`, CommentPreproc, Pop(1)}, + }, + "in-macro-control": { + {`\{%`, LiteralStringInterpol, Push()}, + {`%\}`, LiteralStringInterpol, Pop(1)}, + {`for\b|in\b`, Keyword, nil}, + Include("root"), + }, + "in-macro-expr": { + {`\{\{`, LiteralStringInterpol, Push()}, + {`\}\}`, LiteralStringInterpol, Pop(1)}, + Include("root"), + }, + "in-attr": { + {`\[`, Operator, Push()}, + {`\]`, Operator, Pop(1)}, + Include("root"), + }, + "strings": { + {`\:@{0,2}[a-zA-Z_]\w*[!?]?`, LiteralStringSymbol, nil}, + {Words(`\:@{0,2}`, ``, `!=`, `!~`, `!`, `%`, `&&`, `&`, `**`, `*`, `+`, `-`, `/`, `<=>`, `<<`, `<=`, `<`, `===`, `==`, `=~`, `=`, `>=`, `>>`, `>`, `[]=`, `[]?`, `[]`, `^`, `||`, `|`, `~`), LiteralStringSymbol, nil}, + {`:'(\\\\|\\'|[^'])*'`, LiteralStringSymbol, nil}, + {`'(\\\\|\\'|[^']|\\[^'\\]+)'`, LiteralStringChar, nil}, + {`:"`, LiteralStringSymbol, Push("simple-sym")}, + {`([a-zA-Z_]\w*)(:)(?!:)`, ByGroups(LiteralStringSymbol, Punctuation), nil}, + {`"`, LiteralStringDouble, Push("simple-string")}, + {"(?&!^|~,(])(\s*)(%([\t ])(?:(?:\\\3|(?!\3).)*)\3)`, ByGroups(Text, LiteralStringOther, None), nil}, + {`^(\s*)(%([\t ])(?:(?:\\\3|(?!\3).)*)\3)`, ByGroups(Text, LiteralStringOther, None), nil}, + {`(%([\[{(<]))((?:\\\2|(?!\2).)*)(\2)`, String, nil}, + }, + "simple-string": { + Include("string-intp-escaped"), + {`[^\\"#]+`, LiteralStringDouble, nil}, + {`[\\#]`, LiteralStringDouble, nil}, + {`"`, LiteralStringDouble, Pop(1)}, + }, + "simple-sym": { + Include("string-escaped"), + {`[^\\"#]+`, LiteralStringSymbol, nil}, + {`[\\#]`, LiteralStringSymbol, nil}, + {`"`, LiteralStringSymbol, Pop(1)}, + }, + "simple-backtick": { + Include("string-intp-escaped"), + {"[^\\\\`#]+", LiteralStringBacktick, nil}, + {`[\\#]`, LiteralStringBacktick, nil}, + {"`", LiteralStringBacktick, Pop(1)}, + }, + "cb-intp-string": { + {`\\[\{]`, LiteralStringOther, nil}, + {`\{`, LiteralStringOther, Push()}, + {`\}`, LiteralStringOther, Pop(1)}, + Include("string-intp-escaped"), + {`[\\#{}]`, LiteralStringOther, nil}, + {`[^\\#{}]+`, LiteralStringOther, nil}, + }, + "cb-string": { + {`\\[\\{}]`, LiteralStringOther, nil}, + {`\{`, LiteralStringOther, Push()}, + {`\}`, LiteralStringOther, Pop(1)}, + {`[\\#{}]`, LiteralStringOther, nil}, + {`[^\\#{}]+`, LiteralStringOther, nil}, + }, + "cb-regex": { + {`\\[\\{}]`, LiteralStringRegex, nil}, + {`\{`, LiteralStringRegex, Push()}, + {`\}[imsx]*`, LiteralStringRegex, Pop(1)}, + Include("string-intp"), + {`[\\#{}]`, LiteralStringRegex, nil}, + {`[^\\#{}]+`, LiteralStringRegex, nil}, + }, + "sb-intp-string": { + {`\\[\[]`, LiteralStringOther, nil}, + {`\[`, LiteralStringOther, Push()}, + {`\]`, LiteralStringOther, Pop(1)}, + Include("string-intp-escaped"), + {`[\\#\[\]]`, LiteralStringOther, nil}, + {`[^\\#\[\]]+`, LiteralStringOther, nil}, + }, + "sb-string": { + {`\\[\\\[\]]`, LiteralStringOther, nil}, + {`\[`, LiteralStringOther, Push()}, + {`\]`, LiteralStringOther, Pop(1)}, + {`[\\#\[\]]`, LiteralStringOther, nil}, + {`[^\\#\[\]]+`, LiteralStringOther, nil}, + }, + "sb-regex": { + {`\\[\\\[\]]`, LiteralStringRegex, nil}, + {`\[`, LiteralStringRegex, Push()}, + {`\][imsx]*`, LiteralStringRegex, Pop(1)}, + Include("string-intp"), + {`[\\#\[\]]`, LiteralStringRegex, nil}, + {`[^\\#\[\]]+`, LiteralStringRegex, nil}, + }, + "pa-intp-string": { + {`\\[\(]`, LiteralStringOther, nil}, + {`\(`, LiteralStringOther, Push()}, + {`\)`, LiteralStringOther, Pop(1)}, + Include("string-intp-escaped"), + {`[\\#()]`, LiteralStringOther, nil}, + {`[^\\#()]+`, LiteralStringOther, nil}, + }, + "pa-string": { + {`\\[\\()]`, LiteralStringOther, nil}, + {`\(`, LiteralStringOther, Push()}, + {`\)`, LiteralStringOther, Pop(1)}, + {`[\\#()]`, LiteralStringOther, nil}, + {`[^\\#()]+`, LiteralStringOther, nil}, + }, + "pa-regex": { + {`\\[\\()]`, LiteralStringRegex, nil}, + {`\(`, LiteralStringRegex, Push()}, + {`\)[imsx]*`, LiteralStringRegex, Pop(1)}, + Include("string-intp"), + {`[\\#()]`, LiteralStringRegex, nil}, + {`[^\\#()]+`, LiteralStringRegex, nil}, + }, + "ab-intp-string": { + {`\\[<]`, LiteralStringOther, nil}, + {`<`, LiteralStringOther, Push()}, + {`>`, LiteralStringOther, Pop(1)}, + Include("string-intp-escaped"), + {`[\\#<>]`, LiteralStringOther, nil}, + {`[^\\#<>]+`, LiteralStringOther, nil}, + }, + "ab-string": { + {`\\[\\<>]`, LiteralStringOther, nil}, + {`<`, LiteralStringOther, Push()}, + {`>`, LiteralStringOther, Pop(1)}, + {`[\\#<>]`, LiteralStringOther, nil}, + {`[^\\#<>]+`, LiteralStringOther, nil}, + }, + "ab-regex": { + {`\\[\\<>]`, LiteralStringRegex, nil}, + {`<`, LiteralStringRegex, Push()}, + {`>[imsx]*`, LiteralStringRegex, Pop(1)}, + Include("string-intp"), + {`[\\#<>]`, LiteralStringRegex, nil}, + {`[^\\#<>]+`, LiteralStringRegex, nil}, + }, + } +} diff --git a/vendor/github.com/alecthomas/chroma/lexers/c/csharp.go b/vendor/github.com/alecthomas/chroma/lexers/c/csharp.go new file mode 100644 index 000000000..4776864db --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/c/csharp.go @@ -0,0 +1,56 @@ +package c + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// CSharp lexer. +var CSharp = internal.Register(MustNewLazyLexer( + &Config{ + Name: "C#", + Aliases: []string{"csharp", "c#"}, + Filenames: []string{"*.cs"}, + MimeTypes: []string{"text/x-csharp"}, + DotAll: true, + EnsureNL: true, + }, + cSharpRules, +)) + +func cSharpRules() Rules { + return Rules{ + "root": { + {`^\s*\[.*?\]`, NameAttribute, nil}, + {`[^\S\n]+`, Text, nil}, + {`\\\n`, Text, nil}, + {`///[^\n\r]+`, CommentSpecial, nil}, + {`//[^\n\r]+`, CommentSingle, nil}, + {`/[*].*?[*]/`, CommentMultiline, nil}, + {`\n`, Text, nil}, + {`[~!%^&*()+=|\[\]:;,.<>/?-]`, Punctuation, nil}, + {`[{}]`, Punctuation, nil}, + {`@"(""|[^"])*"`, LiteralString, nil}, + {`\$@?"(""|[^"])*"`, LiteralString, nil}, + {`"(\\\\|\\"|[^"\n])*["\n]`, LiteralString, nil}, + {`'\\.'|'[^\\]'`, LiteralStringChar, nil}, + {`0[xX][0-9a-fA-F]+[Ll]?|[0-9_](\.[0-9]*)?([eE][+-]?[0-9]+)?[flFLdD]?`, LiteralNumber, nil}, + {`#[ \t]*(if|endif|else|elif|define|undef|line|error|warning|region|endregion|pragma|nullable)\b[^\n\r]+`, CommentPreproc, nil}, + {`\b(extern)(\s+)(alias)\b`, ByGroups(Keyword, Text, Keyword), nil}, + {`(abstract|as|async|await|base|break|by|case|catch|checked|const|continue|default|delegate|do|else|enum|event|explicit|extern|false|finally|fixed|for|foreach|goto|if|implicit|in|init|internal|is|let|lock|new|null|on|operator|out|override|params|private|protected|public|readonly|ref|return|sealed|sizeof|stackalloc|static|switch|this|throw|true|try|typeof|unchecked|unsafe|virtual|void|while|get|set|new|partial|yield|add|remove|value|alias|ascending|descending|from|group|into|orderby|select|thenby|where|join|equals)\b`, Keyword, nil}, + {`(global)(::)`, ByGroups(Keyword, Punctuation), nil}, + {`(bool|byte|char|decimal|double|dynamic|float|int|long|object|sbyte|short|string|uint|ulong|ushort|var)\b\??`, KeywordType, nil}, + {`(class|struct|record|interface)(\s+)`, ByGroups(Keyword, Text), Push("class")}, + {`(namespace|using)(\s+)`, ByGroups(Keyword, Text), Push("namespace")}, + {`@?[_a-zA-Z]\w*`, Name, nil}, + }, + "class": { + {`@?[_a-zA-Z]\w*`, NameClass, Pop(1)}, + Default(Pop(1)), + }, + "namespace": { + {`(?=\()`, Text, Pop(1)}, + {`(@?[_a-zA-Z]\w*|\.)+`, NameNamespace, Pop(1)}, + }, + } +} diff --git a/vendor/github.com/alecthomas/chroma/lexers/c/css.go b/vendor/github.com/alecthomas/chroma/lexers/c/css.go new file mode 100644 index 000000000..9f3a01d54 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/c/css.go @@ -0,0 +1,121 @@ +package c + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// CSS lexer. +var CSS = internal.Register(MustNewLazyLexer( + &Config{ + Name: "CSS", + Aliases: []string{"css"}, + Filenames: []string{"*.css"}, + MimeTypes: []string{"text/css"}, + }, + cssRules, +)) + +func cssRules() Rules { + return Rules{ + "root": { + Include("basics"), + }, + "basics": { + {`\s+`, Text, nil}, + {`/\*(?:.|\n)*?\*/`, Comment, nil}, + {`\{`, Punctuation, Push("content")}, + {`(\:{1,2})([\w-]+)`, ByGroups(Punctuation, NameDecorator), nil}, + {`(\.)([\w-]+)`, ByGroups(Punctuation, NameClass), nil}, + {`(\#)([\w-]+)`, ByGroups(Punctuation, NameNamespace), nil}, + {`(@)([\w-]+)`, ByGroups(Punctuation, Keyword), Push("atrule")}, + {`[\w-]+`, NameTag, nil}, + {`[~^*!%&$\[\]()<>|+=@:;,./?-]`, Operator, nil}, + {`"(\\\\|\\"|[^"])*"`, LiteralStringDouble, nil}, + {`'(\\\\|\\'|[^'])*'`, LiteralStringSingle, nil}, + }, + "atrule": { + {`\{`, Punctuation, Push("atcontent")}, + {`;`, Punctuation, Pop(1)}, + Include("basics"), + }, + "atcontent": { + Include("basics"), + {`\}`, Punctuation, Pop(2)}, + }, + "atparenthesis": { + Include("common-values"), + {`/\*(?:.|\n)*?\*/`, Comment, nil}, + Include("numeric-values"), + {`[*+/-]`, Operator, nil}, + {`[,]`, Punctuation, nil}, + {`"(\\\\|\\"|[^"])*"`, LiteralStringDouble, nil}, + {`'(\\\\|\\'|[^'])*'`, LiteralStringSingle, nil}, + {`[a-zA-Z_-]\w*`, Name, nil}, + {`\(`, Punctuation, Push("atparenthesis")}, + {`\)`, Punctuation, Pop(1)}, + }, + "content": { + {`\s+`, Text, nil}, + {`\}`, Punctuation, Pop(1)}, + {`;`, Punctuation, nil}, + {`^@.*?$`, CommentPreproc, nil}, + {Words(``, ``, `-ms-`, `mso-`, `-moz-`, `-o-`, `-xv-`, `-atsc-`, `-wap-`, `-khtml-`, `-webkit-`, `prince-`, `-ah-`, `-hp-`, `-ro-`, `-rim-`, `-tc-`), KeywordPseudo, nil}, + {`(align-content|align-items|align-self|alignment-baseline|all|animation|animation-delay|animation-direction|animation-duration|animation-fill-mode|animation-iteration-count|animation-name|animation-play-state|animation-timing-function|appearance|azimuth|backface-visibility|background|background-attachment|background-blend-mode|background-clip|background-color|background-image|background-origin|background-position|background-repeat|background-size|baseline-shift|bookmark-label|bookmark-level|bookmark-state|border|border-bottom|border-bottom-color|border-bottom-left-radius|border-bottom-right-radius|border-bottom-style|border-bottom-width|border-boundary|border-collapse|border-color|border-image|border-image-outset|border-image-repeat|border-image-slice|border-image-source|border-image-width|border-left|border-left-color|border-left-style|border-left-width|border-radius|border-right|border-right-color|border-right-style|border-right-width|border-spacing|border-style|border-top|border-top-color|border-top-left-radius|border-top-right-radius|border-top-style|border-top-width|border-width|bottom|box-decoration-break|box-shadow|box-sizing|box-snap|box-suppress|break-after|break-before|break-inside|caption-side|caret|caret-animation|caret-color|caret-shape|chains|clear|clip|clip-path|clip-rule|color|color-interpolation-filters|column-count|column-fill|column-gap|column-rule|column-rule-color|column-rule-style|column-rule-width|column-span|column-width|columns|content|counter-increment|counter-reset|counter-set|crop|cue|cue-after|cue-before|cursor|direction|display|dominant-baseline|elevation|empty-cells|filter|flex|flex-basis|flex-direction|flex-flow|flex-grow|flex-shrink|flex-wrap|float|float-defer|float-offset|float-reference|flood-color|flood-opacity|flow|flow-from|flow-into|font|font-family|font-feature-settings|font-kerning|font-language-override|font-size|font-size-adjust|font-stretch|font-style|font-synthesis|font-variant|font-variant-alternates|font-variant-caps|font-variant-east-asian|font-variant-ligatures|font-variant-numeric|font-variant-position|font-weight|footnote-display|footnote-policy|glyph-orientation-vertical|grid|grid-area|grid-auto-columns|grid-auto-flow|grid-auto-rows|grid-column|grid-column-end|grid-column-gap|grid-column-start|grid-gap|grid-row|grid-row-end|grid-row-gap|grid-row-start|grid-template|grid-template-areas|grid-template-columns|grid-template-rows|hanging-punctuation|height|hyphenate-character|hyphenate-limit-chars|hyphenate-limit-last|hyphenate-limit-lines|hyphenate-limit-zone|hyphens|image-orientation|image-resolution|initial-letter|initial-letter-align|initial-letter-wrap|isolation|justify-content|justify-items|justify-self|left|letter-spacing|lighting-color|line-break|line-grid|line-height|line-snap|list-style|list-style-image|list-style-position|list-style-type|margin|margin-bottom|margin-left|margin-right|margin-top|marker-side|marquee-direction|marquee-loop|marquee-speed|marquee-style|mask|mask-border|mask-border-mode|mask-border-outset|mask-border-repeat|mask-border-slice|mask-border-source|mask-border-width|mask-clip|mask-composite|mask-image|mask-mode|mask-origin|mask-position|mask-repeat|mask-size|mask-type|max-height|max-lines|max-width|min-height|min-width|mix-blend-mode|motion|motion-offset|motion-path|motion-rotation|move-to|nav-down|nav-left|nav-right|nav-up|object-fit|object-position|offset-after|offset-before|offset-end|offset-start|opacity|order|orphans|outline|outline-color|outline-offset|outline-style|outline-width|overflow|overflow-style|overflow-wrap|overflow-x|overflow-y|padding|padding-bottom|padding-left|padding-right|padding-top|page|page-break-after|page-break-before|page-break-inside|page-policy|pause|pause-after|pause-before|perspective|perspective-origin|pitch|pitch-range|play-during|polar-angle|polar-distance|position|presentation-level|quotes|region-fragment|resize|rest|rest-after|rest-before|richness|right|rotation|rotation-point|ruby-align|ruby-merge|ruby-position|running|scroll-snap-coordinate|scroll-snap-destination|scroll-snap-points-x|scroll-snap-points-y|scroll-snap-type|shape-image-threshold|shape-inside|shape-margin|shape-outside|size|speak|speak-as|speak-header|speak-numeral|speak-punctuation|speech-rate|stress|string-set|tab-size|table-layout|text-align|text-align-last|text-combine-upright|text-decoration|text-decoration-color|text-decoration-line|text-decoration-skip|text-decoration-style|text-emphasis|text-emphasis-color|text-emphasis-position|text-emphasis-style|text-indent|text-justify|text-orientation|text-overflow|text-shadow|text-space-collapse|text-space-trim|text-spacing|text-transform|text-underline-position|text-wrap|top|transform|transform-origin|transform-style|transition|transition-delay|transition-duration|transition-property|transition-timing-function|unicode-bidi|user-select|vertical-align|visibility|voice-balance|voice-duration|voice-family|voice-pitch|voice-range|voice-rate|voice-stress|voice-volume|volume|white-space|widows|width|will-change|word-break|word-spacing|word-wrap|wrap-after|wrap-before|wrap-flow|wrap-inside|wrap-through|writing-mode|z-index)(\s*)(\:)`, ByGroups(Keyword, Text, Punctuation), Push("value-start")}, + {`(--[a-zA-Z_][\w-]*)(\s*)(\:)`, ByGroups(NameVariable, Text, Punctuation), Push("value-start")}, + {`([a-zA-Z_][\w-]*)(\s*)(\:)`, ByGroups(Name, Text, Punctuation), Push("value-start")}, + {`/\*(?:.|\n)*?\*/`, Comment, nil}, + }, + "value-start": { + Include("common-values"), + {Words(``, `\b`, `align-content`, `align-items`, `align-self`, `alignment-baseline`, `all`, `animation`, `animation-delay`, `animation-direction`, `animation-duration`, `animation-fill-mode`, `animation-iteration-count`, `animation-name`, `animation-play-state`, `animation-timing-function`, `appearance`, `azimuth`, `backface-visibility`, `background`, `background-attachment`, `background-blend-mode`, `background-clip`, `background-color`, `background-image`, `background-origin`, `background-position`, `background-repeat`, `background-size`, `baseline-shift`, `bookmark-label`, `bookmark-level`, `bookmark-state`, `border`, `border-bottom`, `border-bottom-color`, `border-bottom-left-radius`, `border-bottom-right-radius`, `border-bottom-style`, `border-bottom-width`, `border-boundary`, `border-collapse`, `border-color`, `border-image`, `border-image-outset`, `border-image-repeat`, `border-image-slice`, `border-image-source`, `border-image-width`, `border-left`, `border-left-color`, `border-left-style`, `border-left-width`, `border-radius`, `border-right`, `border-right-color`, `border-right-style`, `border-right-width`, `border-spacing`, `border-style`, `border-top`, `border-top-color`, `border-top-left-radius`, `border-top-right-radius`, `border-top-style`, `border-top-width`, `border-width`, `bottom`, `box-decoration-break`, `box-shadow`, `box-sizing`, `box-snap`, `box-suppress`, `break-after`, `break-before`, `break-inside`, `caption-side`, `caret`, `caret-animation`, `caret-color`, `caret-shape`, `chains`, `clear`, `clip`, `clip-path`, `clip-rule`, `color`, `color-interpolation-filters`, `column-count`, `column-fill`, `column-gap`, `column-rule`, `column-rule-color`, `column-rule-style`, `column-rule-width`, `column-span`, `column-width`, `columns`, `content`, `counter-increment`, `counter-reset`, `counter-set`, `crop`, `cue`, `cue-after`, `cue-before`, `cursor`, `direction`, `display`, `dominant-baseline`, `elevation`, `empty-cells`, `filter`, `flex`, `flex-basis`, `flex-direction`, `flex-flow`, `flex-grow`, `flex-shrink`, `flex-wrap`, `float`, `float-defer`, `float-offset`, `float-reference`, `flood-color`, `flood-opacity`, `flow`, `flow-from`, `flow-into`, `font`, `font-family`, `font-feature-settings`, `font-kerning`, `font-language-override`, `font-size`, `font-size-adjust`, `font-stretch`, `font-style`, `font-synthesis`, `font-variant`, `font-variant-alternates`, `font-variant-caps`, `font-variant-east-asian`, `font-variant-ligatures`, `font-variant-numeric`, `font-variant-position`, `font-weight`, `footnote-display`, `footnote-policy`, `glyph-orientation-vertical`, `grid`, `grid-area`, `grid-auto-columns`, `grid-auto-flow`, `grid-auto-rows`, `grid-column`, `grid-column-end`, `grid-column-gap`, `grid-column-start`, `grid-gap`, `grid-row`, `grid-row-end`, `grid-row-gap`, `grid-row-start`, `grid-template`, `grid-template-areas`, `grid-template-columns`, `grid-template-rows`, `hanging-punctuation`, `height`, `hyphenate-character`, `hyphenate-limit-chars`, `hyphenate-limit-last`, `hyphenate-limit-lines`, `hyphenate-limit-zone`, `hyphens`, `image-orientation`, `image-resolution`, `initial-letter`, `initial-letter-align`, `initial-letter-wrap`, `isolation`, `justify-content`, `justify-items`, `justify-self`, `left`, `letter-spacing`, `lighting-color`, `line-break`, `line-grid`, `line-height`, `line-snap`, `list-style`, `list-style-image`, `list-style-position`, `list-style-type`, `margin`, `margin-bottom`, `margin-left`, `margin-right`, `margin-top`, `marker-side`, `marquee-direction`, `marquee-loop`, `marquee-speed`, `marquee-style`, `mask`, `mask-border`, `mask-border-mode`, `mask-border-outset`, `mask-border-repeat`, `mask-border-slice`, `mask-border-source`, `mask-border-width`, `mask-clip`, `mask-composite`, `mask-image`, `mask-mode`, `mask-origin`, `mask-position`, `mask-repeat`, `mask-size`, `mask-type`, `max-height`, `max-lines`, `max-width`, `min-height`, `min-width`, `mix-blend-mode`, `motion`, `motion-offset`, `motion-path`, `motion-rotation`, `move-to`, `nav-down`, `nav-left`, `nav-right`, `nav-up`, `object-fit`, `object-position`, `offset-after`, `offset-before`, `offset-end`, `offset-start`, `opacity`, `order`, `orphans`, `outline`, `outline-color`, `outline-offset`, `outline-style`, `outline-width`, `overflow`, `overflow-style`, `overflow-wrap`, `overflow-x`, `overflow-y`, `padding`, `padding-bottom`, `padding-left`, `padding-right`, `padding-top`, `page`, `page-break-after`, `page-break-before`, `page-break-inside`, `page-policy`, `pause`, `pause-after`, `pause-before`, `perspective`, `perspective-origin`, `pitch`, `pitch-range`, `play-during`, `polar-angle`, `polar-distance`, `position`, `presentation-level`, `quotes`, `region-fragment`, `resize`, `rest`, `rest-after`, `rest-before`, `richness`, `right`, `rotation`, `rotation-point`, `ruby-align`, `ruby-merge`, `ruby-position`, `running`, `scroll-snap-coordinate`, `scroll-snap-destination`, `scroll-snap-points-x`, `scroll-snap-points-y`, `scroll-snap-type`, `shape-image-threshold`, `shape-inside`, `shape-margin`, `shape-outside`, `size`, `speak`, `speak-as`, `speak-header`, `speak-numeral`, `speak-punctuation`, `speech-rate`, `stress`, `string-set`, `tab-size`, `table-layout`, `text-align`, `text-align-last`, `text-combine-upright`, `text-decoration`, `text-decoration-color`, `text-decoration-line`, `text-decoration-skip`, `text-decoration-style`, `text-emphasis`, `text-emphasis-color`, `text-emphasis-position`, `text-emphasis-style`, `text-indent`, `text-justify`, `text-orientation`, `text-overflow`, `text-shadow`, `text-space-collapse`, `text-space-trim`, `text-spacing`, `text-transform`, `text-underline-position`, `text-wrap`, `top`, `transform`, `transform-origin`, `transform-style`, `transition`, `transition-delay`, `transition-duration`, `transition-property`, `transition-timing-function`, `unicode-bidi`, `user-select`, `vertical-align`, `visibility`, `voice-balance`, `voice-duration`, `voice-family`, `voice-pitch`, `voice-range`, `voice-rate`, `voice-stress`, `voice-volume`, `volume`, `white-space`, `widows`, `width`, `will-change`, `word-break`, `word-spacing`, `word-wrap`, `wrap-after`, `wrap-before`, `wrap-flow`, `wrap-inside`, `wrap-through`, `writing-mode`, `z-index`), Keyword, nil}, + {`\!important`, CommentPreproc, nil}, + {`/\*(?:.|\n)*?\*/`, Comment, nil}, + Include("numeric-values"), + {`[~^*!%&<>|+=@:./?-]+`, Operator, nil}, + {`[\[\](),]+`, Punctuation, nil}, + {`"(\\\\|\\"|[^"])*"`, LiteralStringDouble, nil}, + {`'(\\\\|\\'|[^'])*'`, LiteralStringSingle, nil}, + {`[a-zA-Z_][\w-]*`, Name, nil}, + {`;`, Punctuation, Pop(1)}, + {`\}`, Punctuation, Pop(2)}, + }, + "function-start": { + Include("common-values"), + {`/\*(?:.|\n)*?\*/`, Comment, nil}, + Include("numeric-values"), + {`[*+/-]`, Operator, nil}, + {`[,]`, Punctuation, nil}, + {`"(\\\\|\\"|[^"])*"`, LiteralStringDouble, nil}, + {`'(\\\\|\\'|[^'])*'`, LiteralStringSingle, nil}, + {`[a-zA-Z_-]\w*`, Name, nil}, + {`\(`, Punctuation, Push("atparenthesis")}, + {`\)`, Punctuation, Pop(1)}, + }, + "common-values": { + {`\s+`, Text, nil}, + {Words(``, ``, `-ms-`, `mso-`, `-moz-`, `-o-`, `-xv-`, `-atsc-`, `-wap-`, `-khtml-`, `-webkit-`, `prince-`, `-ah-`, `-hp-`, `-ro-`, `-rim-`, `-tc-`), KeywordPseudo, nil}, + Include("urls"), + {`(attr|blackness|blend|blenda|blur|brightness|calc|circle|color-mod|contrast|counter|cubic-bezier|device-cmyk|drop-shadow|ellipse|gray|grayscale|hsl|hsla|hue|hue-rotate|hwb|image|inset|invert|lightness|linear-gradient|matrix|matrix3d|opacity|perspective|polygon|radial-gradient|rect|repeating-linear-gradient|repeating-radial-gradient|rgb|rgba|rotate|rotate3d|rotateX|rotateY|rotateZ|saturate|saturation|scale|scale3d|scaleX|scaleY|scaleZ|sepia|shade|skewX|skewY|steps|tint|toggle|translate|translate3d|translateX|translateY|translateZ|whiteness)(\()`, ByGroups(NameBuiltin, Punctuation), Push("function-start")}, + {`([a-zA-Z_][\w-]+)(\()`, ByGroups(NameFunction, Punctuation), Push("function-start")}, + {Words(``, `\b`, `absolute`, `alias`, `all`, `all-petite-caps`, `all-scroll`, `all-small-caps`, `allow-end`, `alpha`, `alternate`, `alternate-reverse`, `always`, `armenian`, `auto`, `avoid`, `avoid-column`, `avoid-page`, `backwards`, `balance`, `baseline`, `below`, `blink`, `block`, `bold`, `bolder`, `border-box`, `both`, `bottom`, `box-decoration`, `break-word`, `capitalize`, `cell`, `center`, `circle`, `clip`, `clone`, `close-quote`, `col-resize`, `collapse`, `color`, `color-burn`, `color-dodge`, `column`, `column-reverse`, `compact`, `condensed`, `contain`, `container`, `content-box`, `context-menu`, `copy`, `cover`, `crisp-edges`, `crosshair`, `currentColor`, `cursive`, `darken`, `dashed`, `decimal`, `decimal-leading-zero`, `default`, `descendants`, `difference`, `digits`, `disc`, `distribute`, `dot`, `dotted`, `double`, `double-circle`, `e-resize`, `each-line`, `ease`, `ease-in`, `ease-in-out`, `ease-out`, `edges`, `ellipsis`, `end`, `ew-resize`, `exclusion`, `expanded`, `extra-condensed`, `extra-expanded`, `fantasy`, `fill`, `fill-box`, `filled`, `first`, `fixed`, `flat`, `flex`, `flex-end`, `flex-start`, `flip`, `force-end`, `forwards`, `from-image`, `full-width`, `geometricPrecision`, `georgian`, `groove`, `hanging`, `hard-light`, `help`, `hidden`, `hide`, `horizontal`, `hue`, `icon`, `infinite`, `inherit`, `initial`, `ink`, `inline`, `inline-block`, `inline-flex`, `inline-table`, `inset`, `inside`, `inter-word`, `invert`, `isolate`, `italic`, `justify`, `large`, `larger`, `last`, `left`, `lighten`, `lighter`, `line-through`, `linear`, `list-item`, `local`, `loose`, `lower-alpha`, `lower-greek`, `lower-latin`, `lower-roman`, `lowercase`, `ltr`, `luminance`, `luminosity`, `mandatory`, `manipulation`, `manual`, `margin-box`, `match-parent`, `medium`, `mixed`, `monospace`, `move`, `multiply`, `n-resize`, `ne-resize`, `nesw-resize`, `no-close-quote`, `no-drop`, `no-open-quote`, `no-repeat`, `none`, `normal`, `not-allowed`, `nowrap`, `ns-resize`, `nw-resize`, `nwse-resize`, `objects`, `oblique`, `off`, `on`, `open`, `open-quote`, `optimizeLegibility`, `optimizeSpeed`, `outset`, `outside`, `over`, `overlay`, `overline`, `padding-box`, `page`, `pan-down`, `pan-left`, `pan-right`, `pan-up`, `pan-x`, `pan-y`, `paused`, `petite-caps`, `pixelated`, `pointer`, `preserve-3d`, `progress`, `proximity`, `relative`, `repeat`, `repeat no-repeat`, `repeat-x`, `repeat-y`, `reverse`, `ridge`, `right`, `round`, `row`, `row-resize`, `row-reverse`, `rtl`, `ruby`, `ruby-base`, `ruby-base-container`, `ruby-text`, `ruby-text-container`, `run-in`, `running`, `s-resize`, `sans-serif`, `saturation`, `scale-down`, `screen`, `scroll`, `se-resize`, `semi-condensed`, `semi-expanded`, `separate`, `serif`, `sesame`, `show`, `sideways`, `sideways-left`, `sideways-right`, `slice`, `small`, `small-caps`, `smaller`, `smooth`, `snap`, `soft-light`, `solid`, `space`, `space-around`, `space-between`, `spaces`, `square`, `start`, `static`, `step-end`, `step-start`, `sticky`, `stretch`, `strict`, `stroke-box`, `style`, `sw-resize`, `table`, `table-caption`, `table-cell`, `table-column`, `table-column-group`, `table-footer-group`, `table-header-group`, `table-row`, `table-row-group`, `text`, `thick`, `thin`, `titling-caps`, `to`, `top`, `triangle`, `ultra-condensed`, `ultra-expanded`, `under`, `underline`, `unicase`, `unset`, `upper-alpha`, `upper-latin`, `upper-roman`, `uppercase`, `upright`, `use-glyph-orientation`, `vertical`, `vertical-text`, `view-box`, `visible`, `w-resize`, `wait`, `wavy`, `weight`, `weight style`, `wrap`, `wrap-reverse`, `x-large`, `x-small`, `xx-large`, `xx-small`, `zoom-in`, `zoom-out`), KeywordConstant, nil}, + {Words(``, `\b`, `above`, `aural`, `behind`, `bidi-override`, `center-left`, `center-right`, `cjk-ideographic`, `continuous`, `crop`, `cross`, `embed`, `far-left`, `far-right`, `fast`, `faster`, `hebrew`, `high`, `higher`, `hiragana`, `hiragana-iroha`, `katakana`, `katakana-iroha`, `landscape`, `left-side`, `leftwards`, `level`, `loud`, `low`, `lower`, `message-box`, `middle`, `mix`, `narrower`, `once`, `portrait`, `right-side`, `rightwards`, `silent`, `slow`, `slower`, `small-caption`, `soft`, `spell-out`, `status-bar`, `super`, `text-bottom`, `text-top`, `wider`, `x-fast`, `x-high`, `x-loud`, `x-low`, `x-soft`, `yes`, `pre`, `pre-wrap`, `pre-line`), KeywordConstant, nil}, + {Words(``, `\b`, `aliceblue`, `antiquewhite`, `aqua`, `aquamarine`, `azure`, `beige`, `bisque`, `black`, `blanchedalmond`, `blue`, `blueviolet`, `brown`, `burlywood`, `cadetblue`, `chartreuse`, `chocolate`, `coral`, `cornflowerblue`, `cornsilk`, `crimson`, `cyan`, `darkblue`, `darkcyan`, `darkgoldenrod`, `darkgray`, `darkgreen`, `darkgrey`, `darkkhaki`, `darkmagenta`, `darkolivegreen`, `darkorange`, `darkorchid`, `darkred`, `darksalmon`, `darkseagreen`, `darkslateblue`, `darkslategray`, `darkslategrey`, `darkturquoise`, `darkviolet`, `deeppink`, `deepskyblue`, `dimgray`, `dimgrey`, `dodgerblue`, `firebrick`, `floralwhite`, `forestgreen`, `fuchsia`, `gainsboro`, `ghostwhite`, `gold`, `goldenrod`, `gray`, `green`, `greenyellow`, `grey`, `honeydew`, `hotpink`, `indianred`, `indigo`, `ivory`, `khaki`, `lavender`, `lavenderblush`, `lawngreen`, `lemonchiffon`, `lightblue`, `lightcoral`, `lightcyan`, `lightgoldenrodyellow`, `lightgray`, `lightgreen`, `lightgrey`, `lightpink`, `lightsalmon`, `lightseagreen`, `lightskyblue`, `lightslategray`, `lightslategrey`, `lightsteelblue`, `lightyellow`, `lime`, `limegreen`, `linen`, `magenta`, `maroon`, `mediumaquamarine`, `mediumblue`, `mediumorchid`, `mediumpurple`, `mediumseagreen`, `mediumslateblue`, `mediumspringgreen`, `mediumturquoise`, `mediumvioletred`, `midnightblue`, `mintcream`, `mistyrose`, `moccasin`, `navajowhite`, `navy`, `oldlace`, `olive`, `olivedrab`, `orange`, `orangered`, `orchid`, `palegoldenrod`, `palegreen`, `paleturquoise`, `palevioletred`, `papayawhip`, `peachpuff`, `peru`, `pink`, `plum`, `powderblue`, `purple`, `rebeccapurple`, `red`, `rosybrown`, `royalblue`, `saddlebrown`, `salmon`, `sandybrown`, `seagreen`, `seashell`, `sienna`, `silver`, `skyblue`, `slateblue`, `slategray`, `slategrey`, `snow`, `springgreen`, `steelblue`, `tan`, `teal`, `thistle`, `tomato`, `turquoise`, `violet`, `wheat`, `white`, `whitesmoke`, `yellow`, `yellowgreen`, `transparent`), KeywordConstant, nil}, + }, + "urls": { + {`(url)(\()(".*?")(\))`, ByGroups(NameBuiltin, Punctuation, LiteralStringDouble, Punctuation), nil}, + {`(url)(\()('.*?')(\))`, ByGroups(NameBuiltin, Punctuation, LiteralStringSingle, Punctuation), nil}, + {`(url)(\()(.*?)(\))`, ByGroups(NameBuiltin, Punctuation, LiteralStringOther, Punctuation), nil}, + }, + "numeric-values": { + {`\#[a-zA-Z0-9]{1,6}`, LiteralNumberHex, nil}, + {`[+\-]?[0-9]*[.][0-9]+`, LiteralNumberFloat, Push("numeric-end")}, + {`[+\-]?[0-9]+`, LiteralNumberInteger, Push("numeric-end")}, + }, + "numeric-end": { + {Words(``, `\b`, `deg`, `grad`, `rad`, `turn`, `Hz`, `kHz`, `em`, `ex`, `ch`, `rem`, `vh`, `vw`, `vmin`, `vmax`, `px`, `mm`, `cm`, `in`, `pt`, `pc`, `q`, `dpi`, `dpcm`, `dppx`, `s`, `ms`), KeywordType, nil}, + {`%`, KeywordType, nil}, + Default(Pop(1)), + }, + } +} diff --git a/vendor/github.com/alecthomas/chroma/lexers/c/cython.go b/vendor/github.com/alecthomas/chroma/lexers/c/cython.go new file mode 100644 index 000000000..0cce20410 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/c/cython.go @@ -0,0 +1,139 @@ +package c + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// Cython lexer. +var Cython = internal.Register(MustNewLazyLexer( + &Config{ + Name: "Cython", + Aliases: []string{"cython", "pyx", "pyrex"}, + Filenames: []string{"*.pyx", "*.pxd", "*.pxi"}, + MimeTypes: []string{"text/x-cython", "application/x-cython"}, + }, + cythonRules, +)) + +func cythonRules() Rules { + return Rules{ + "root": { + {`\n`, Text, nil}, + {`^(\s*)("""(?:.|\n)*?""")`, ByGroups(Text, LiteralStringDoc), nil}, + {`^(\s*)('''(?:.|\n)*?''')`, ByGroups(Text, LiteralStringDoc), nil}, + {`[^\S\n]+`, Text, nil}, + {`#.*$`, Comment, nil}, + {`[]{}:(),;[]`, Punctuation, nil}, + {`\\\n`, Text, nil}, + {`\\`, Text, nil}, + {`(in|is|and|or|not)\b`, OperatorWord, nil}, + {`(<)([a-zA-Z0-9.?]+)(>)`, ByGroups(Punctuation, KeywordType, Punctuation), nil}, + {`!=|==|<<|>>|[-~+/*%=<>&^|.?]`, Operator, nil}, + {`(from)(\d+)(<=)(\s+)(<)(\d+)(:)`, ByGroups(Keyword, LiteralNumberInteger, Operator, Name, Operator, Name, Punctuation), nil}, + Include("keywords"), + {`(def|property)(\s+)`, ByGroups(Keyword, Text), Push("funcname")}, + {`(cp?def)(\s+)`, ByGroups(Keyword, Text), Push("cdef")}, + {`(cdef)(:)`, ByGroups(Keyword, Punctuation), nil}, + {`(class|struct)(\s+)`, ByGroups(Keyword, Text), Push("classname")}, + {`(from)(\s+)`, ByGroups(Keyword, Text), Push("fromimport")}, + {`(c?import)(\s+)`, ByGroups(Keyword, Text), Push("import")}, + Include("builtins"), + Include("backtick"), + {`(?:[rR]|[uU][rR]|[rR][uU])"""`, LiteralString, Push("tdqs")}, + {`(?:[rR]|[uU][rR]|[rR][uU])'''`, LiteralString, Push("tsqs")}, + {`(?:[rR]|[uU][rR]|[rR][uU])"`, LiteralString, Push("dqs")}, + {`(?:[rR]|[uU][rR]|[rR][uU])'`, LiteralString, Push("sqs")}, + {`[uU]?"""`, LiteralString, Combined("stringescape", "tdqs")}, + {`[uU]?'''`, LiteralString, Combined("stringescape", "tsqs")}, + {`[uU]?"`, LiteralString, Combined("stringescape", "dqs")}, + {`[uU]?'`, LiteralString, Combined("stringescape", "sqs")}, + Include("name"), + Include("numbers"), + }, + "keywords": { + {Words(``, `\b`, `assert`, `break`, `by`, `continue`, `ctypedef`, `del`, `elif`, `else`, `except`, `except?`, `exec`, `finally`, `for`, `fused`, `gil`, `global`, `if`, `include`, `lambda`, `nogil`, `pass`, `print`, `raise`, `return`, `try`, `while`, `yield`, `as`, `with`), Keyword, nil}, + {`(DEF|IF|ELIF|ELSE)\b`, CommentPreproc, nil}, + }, + "builtins": { + {Words(`(?`, CommentPreproc, Pop(1)}, + {`(<<<)([\'"]?)((?:[\\_a-z]|[^\x00-\x7f])(?:[\\\w]|[^\x00-\x7f])*)(\2\n.*?\n\s*)(\3)(;?)(\n)`, ByGroups(LiteralString, LiteralString, LiteralStringDelimiter, LiteralString, LiteralStringDelimiter, Punctuation, Text), nil}, + {`\s+`, Text, nil}, + {`#.*?\n`, CommentSingle, nil}, + {`//.*?\n`, CommentSingle, nil}, + {`/\*\*/`, CommentMultiline, nil}, + {`/\*\*.*?\*/`, LiteralStringDoc, nil}, + {`/\*.*?\*/`, CommentMultiline, nil}, + {`(->|::)(\s*)((?:[\\_a-z]|[^\x00-\x7f])(?:[\\\w]|[^\x00-\x7f])*)`, ByGroups(Operator, Text, NameAttribute), nil}, + {`[~!%^&*+=|:.<>/@-]+`, Operator, nil}, + {`\?`, Operator, nil}, + {`[\[\]{}();,]+`, Punctuation, nil}, + {`(class)(\s+)`, ByGroups(Keyword, Text), Push("classname")}, + {`(function)(\s*)(?=\()`, ByGroups(Keyword, Text), nil}, + {`(function)(\s+)(&?)(\s*)`, ByGroups(Keyword, Text, Operator, Text), Push("functionname")}, + {`(const)(\s+)((?:[\\_a-z]|[^\x00-\x7f])(?:[\\\w]|[^\x00-\x7f])*)`, ByGroups(Keyword, Text, NameConstant), nil}, + {`(and|E_PARSE|old_function|E_ERROR|or|as|E_WARNING|parent|eval|PHP_OS|break|exit|case|extends|PHP_VERSION|cfunction|FALSE|print|for|require|continue|foreach|require_once|declare|return|default|static|do|switch|die|stdClass|echo|else|TRUE|elseif|var|empty|if|xor|enddeclare|include|virtual|endfor|include_once|while|endforeach|global|endif|list|endswitch|new|endwhile|not|array|E_ALL|NULL|final|php_user_filter|interface|implements|public|private|protected|abstract|clone|try|catch|throw|this|use|namespace|trait|yield|finally)\b`, Keyword, nil}, + {`(true|false|null)\b`, KeywordConstant, nil}, + Include("magicconstants"), + {`\$\{\$+(?:[\\_a-z]|[^\x00-\x7f])(?:[\\\w]|[^\x00-\x7f])*\}`, NameVariable, nil}, + {`\$+(?:[\\_a-z]|[^\x00-\x7f])(?:[\\\w]|[^\x00-\x7f])*`, NameVariable, nil}, + {`(?:[\\_a-z]|[^\x00-\x7f])(?:[\\\w]|[^\x00-\x7f])*`, NameOther, nil}, + {`(\d+\.\d*|\d*\.\d+)(e[+-]?[0-9]+)?`, LiteralNumberFloat, nil}, + {`\d+e[+-]?[0-9]+`, LiteralNumberFloat, nil}, + {`0[0-7]+`, LiteralNumberOct, nil}, + {`0x[a-f0-9_]+`, LiteralNumberHex, nil}, + {`\d[\d_]*`, LiteralNumberInteger, nil}, + {`0b[01]+`, LiteralNumberBin, nil}, + {`'([^'\\]*(?:\\.[^'\\]*)*)'`, LiteralStringSingle, nil}, + {"`([^`\\\\]*(?:\\\\.[^`\\\\]*)*)`", LiteralStringBacktick, nil}, + {`"`, LiteralStringDouble, Push("string")}, + }, + "magicfuncs": { + {Words(``, `\b`, `__construct`, `__destruct`, `__call`, `__callStatic`, `__get`, `__set`, `__isset`, `__unset`, `__sleep`, `__wakeup`, `__toString`, `__invoke`, `__set_state`, `__clone`, `__debugInfo`), NameFunctionMagic, nil}, + }, + "magicconstants": { + {Words(``, `\b`, `__LINE__`, `__FILE__`, `__DIR__`, `__FUNCTION__`, `__CLASS__`, `__TRAIT__`, `__METHOD__`, `__NAMESPACE__`), NameConstant, nil}, + }, + "classname": { + {`(?:[\\_a-z]|[^\x00-\x7f])(?:[\\\w]|[^\x00-\x7f])*`, NameClass, Pop(1)}, + }, + "functionname": { + Include("magicfuncs"), + {`(?:[\\_a-z]|[^\x00-\x7f])(?:[\\\w]|[^\x00-\x7f])*`, NameFunction, Pop(1)}, + Default(Pop(1)), + }, + "string": { + {`"`, LiteralStringDouble, Pop(1)}, + {`[^{$"\\]+`, LiteralStringDouble, nil}, + {`\\([nrt"$\\]|[0-7]{1,3}|x[0-9a-f]{1,2})`, LiteralStringEscape, nil}, + {`\$(?:[\\_a-z]|[^\x00-\x7f])(?:[\\\w]|[^\x00-\x7f])*(\[\S+?\]|->(?:[\\_a-z]|[^\x00-\x7f])(?:[\\\w]|[^\x00-\x7f])*)?`, LiteralStringInterpol, nil}, + {`(\{\$\{)(.*?)(\}\})`, ByGroups(LiteralStringInterpol, UsingSelf("root"), LiteralStringInterpol), nil}, + {`(\{)(\$.*?)(\})`, ByGroups(LiteralStringInterpol, UsingSelf("root"), LiteralStringInterpol), nil}, + {`(\$\{)(\S+)(\})`, ByGroups(LiteralStringInterpol, NameVariable, LiteralStringInterpol), nil}, + {`[${\\]`, LiteralStringDouble, nil}, + }, + } +} diff --git a/vendor/github.com/alecthomas/chroma/lexers/circular/phtml.go b/vendor/github.com/alecthomas/chroma/lexers/circular/phtml.go new file mode 100644 index 000000000..fc2e2eaf9 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/circular/phtml.go @@ -0,0 +1,39 @@ +package circular + +import ( + "strings" + + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/h" + "github.com/alecthomas/chroma/lexers/internal" +) + +// PHTML lexer is PHP in HTML. +var PHTML = internal.Register(DelegatingLexer(h.HTML, MustNewLazyLexer( + &Config{ + Name: "PHTML", + Aliases: []string{"phtml"}, + Filenames: []string{"*.phtml", "*.php", "*.php[345]", "*.inc"}, + MimeTypes: []string{"application/x-php", "application/x-httpd-php", "application/x-httpd-php3", "application/x-httpd-php4", "application/x-httpd-php5", "text/x-php"}, + DotAll: true, + CaseInsensitive: true, + EnsureNL: true, + Priority: 2, + }, + phtmlRules, +).SetAnalyser(func(text string) float32 { + if strings.Contains(text, "]*\s+)+?)((?:[^\W\d]|\$)[\w$]*)(\s*)(\()`, ByGroups(UsingSelf("root"), NameFunction, Text, Operator), nil}, + + // https://dlang.org/spec/attribute.html#uda + {`@[\w.]*`, NameDecorator, nil}, + {`(abstract|auto|alias|align|const|delegate|enum|export|final|function|inout|lazy|nothrow|override|package|private|protected|public|pure|static|synchronized|template|volatile|__gshared)\b`, KeywordDeclaration, nil}, + + // https://dlang.org/spec/type.html#basic-data-types + {`(void|bool|byte|ubyte|short|ushort|int|uint|long|ulong|cent|ucent|float|double|real|ifloat|idouble|ireal|cfloat|cdouble|creal|char|wchar|dchar|string|wstring|dstring)\b`, KeywordType, nil}, + {`(module)(\s+)`, ByGroups(KeywordNamespace, Text), Push("import")}, + {`(true|false|null)\b`, KeywordConstant, nil}, + {`(class|interface|struct|template|union)(\s+)`, ByGroups(KeywordDeclaration, Text), Push("class")}, + {`(import)(\s+)`, ByGroups(KeywordNamespace, Text), Push("import")}, + + // https://dlang.org/spec/lex.html#string_literals + // TODO support delimited strings + {`[qr]?"(\\\\|\\"|[^"])*"[cwd]?`, LiteralString, nil}, + {"(`)([^`]*)(`)[cwd]?", LiteralString, nil}, + {`'\\.'|'[^\\]'|'\\u[0-9a-fA-F]{4}'`, LiteralStringChar, nil}, + {`(\.)((?:[^\W\d]|\$)[\w$]*)`, ByGroups(Operator, NameAttribute), nil}, + {`^\s*([^\W\d]|\$)[\w$]*:`, NameLabel, nil}, + + // https://dlang.org/spec/lex.html#floatliteral + {`([0-9][0-9_]*\.([0-9][0-9_]*)?|\.[0-9][0-9_]*)([eE][+\-]?[0-9][0-9_]*)?[fFL]?i?|[0-9][eE][+\-]?[0-9][0-9_]*[fFL]?|[0-9]([eE][+\-]?[0-9][0-9_]*)?[fFL]|0[xX]([0-9a-fA-F][0-9a-fA-F_]*\.?|([0-9a-fA-F][0-9a-fA-F_]*)?\.[0-9a-fA-F][0-9a-fA-F_]*)[pP][+\-]?[0-9][0-9_]*[fFL]?`, LiteralNumberFloat, nil}, + // https://dlang.org/spec/lex.html#integerliteral + {`0[xX][0-9a-fA-F][0-9a-fA-F_]*[lL]?`, LiteralNumberHex, nil}, + {`0[bB][01][01_]*[lL]?`, LiteralNumberBin, nil}, + {`0[0-7_]+[lL]?`, LiteralNumberOct, nil}, + {`0|[1-9][0-9_]*[lL]?`, LiteralNumberInteger, nil}, + {`([~^*!%&\[\](){}<>|+=:;,./?-]|q{)`, Operator, nil}, + {`([^\W\d]|\$)[\w$]*`, Name, nil}, + {`\n`, Text, nil}, + }, + "class": { + {`([^\W\d]|\$)[\w$]*`, NameClass, Pop(1)}, + }, + "import": { + {`[\w.]+\*?`, NameNamespace, Pop(1)}, + }, + } +} diff --git a/vendor/github.com/alecthomas/chroma/lexers/d/dart.go b/vendor/github.com/alecthomas/chroma/lexers/d/dart.go new file mode 100644 index 000000000..c1dbb5cd5 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/d/dart.go @@ -0,0 +1,95 @@ +package d + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// Dart lexer. +var Dart = internal.Register(MustNewLazyLexer( + &Config{ + Name: "Dart", + Aliases: []string{"dart"}, + Filenames: []string{"*.dart"}, + MimeTypes: []string{"text/x-dart"}, + DotAll: true, + }, + dartRules, +)) + +func dartRules() Rules { + return Rules{ + "root": { + Include("string_literal"), + {`#!(.*?)$`, CommentPreproc, nil}, + {`\b(import|export)\b`, Keyword, Push("import_decl")}, + {`\b(library|source|part of|part)\b`, Keyword, nil}, + {`[^\S\n]+`, Text, nil}, + {`//.*?\n`, CommentSingle, nil}, + {`/\*.*?\*/`, CommentMultiline, nil}, + {`\b(class)\b(\s+)`, ByGroups(KeywordDeclaration, Text), Push("class")}, + {`\b(assert|break|case|catch|continue|default|do|else|finally|for|if|in|is|new|return|super|switch|this|throw|try|while)\b`, Keyword, nil}, + {`\b(abstract|async|await|const|extends|factory|final|get|implements|native|operator|set|static|sync|typedef|var|with|yield)\b`, KeywordDeclaration, nil}, + {`\b(bool|double|dynamic|int|num|Object|String|void)\b`, KeywordType, nil}, + {`\b(false|null|true)\b`, KeywordConstant, nil}, + {`[~!%^&*+=|?:<>/-]|as\b`, Operator, nil}, + {`[a-zA-Z_$]\w*:`, NameLabel, nil}, + {`[a-zA-Z_$]\w*`, Name, nil}, + {`[(){}\[\],.;]`, Punctuation, nil}, + {`0[xX][0-9a-fA-F]+`, LiteralNumberHex, nil}, + {`\d+(\.\d*)?([eE][+-]?\d+)?`, LiteralNumber, nil}, + {`\.\d+([eE][+-]?\d+)?`, LiteralNumber, nil}, + {`\n`, Text, nil}, + }, + "class": { + {`[a-zA-Z_$]\w*`, NameClass, Pop(1)}, + }, + "import_decl": { + Include("string_literal"), + {`\s+`, Text, nil}, + {`\b(as|show|hide)\b`, Keyword, nil}, + {`[a-zA-Z_$]\w*`, Name, nil}, + {`\,`, Punctuation, nil}, + {`\;`, Punctuation, Pop(1)}, + }, + "string_literal": { + {`r"""([\w\W]*?)"""`, LiteralStringDouble, nil}, + {`r'''([\w\W]*?)'''`, LiteralStringSingle, nil}, + {`r"(.*?)"`, LiteralStringDouble, nil}, + {`r'(.*?)'`, LiteralStringSingle, nil}, + {`"""`, LiteralStringDouble, Push("string_double_multiline")}, + {`'''`, LiteralStringSingle, Push("string_single_multiline")}, + {`"`, LiteralStringDouble, Push("string_double")}, + {`'`, LiteralStringSingle, Push("string_single")}, + }, + "string_common": { + {`\\(x[0-9A-Fa-f]{2}|u[0-9A-Fa-f]{4}|u\{[0-9A-Fa-f]*\}|[a-z'\"$\\])`, LiteralStringEscape, nil}, + {`(\$)([a-zA-Z_]\w*)`, ByGroups(LiteralStringInterpol, Name), nil}, + {`(\$\{)(.*?)(\})`, ByGroups(LiteralStringInterpol, UsingSelf("root"), LiteralStringInterpol), nil}, + }, + "string_double": { + {`"`, LiteralStringDouble, Pop(1)}, + {`[^"$\\\n]+`, LiteralStringDouble, nil}, + Include("string_common"), + {`\$+`, LiteralStringDouble, nil}, + }, + "string_double_multiline": { + {`"""`, LiteralStringDouble, Pop(1)}, + {`[^"$\\]+`, LiteralStringDouble, nil}, + Include("string_common"), + {`(\$|\")+`, LiteralStringDouble, nil}, + }, + "string_single": { + {`'`, LiteralStringSingle, Pop(1)}, + {`[^'$\\\n]+`, LiteralStringSingle, nil}, + Include("string_common"), + {`\$+`, LiteralStringSingle, nil}, + }, + "string_single_multiline": { + {`'''`, LiteralStringSingle, Pop(1)}, + {`[^\'$\\]+`, LiteralStringSingle, nil}, + Include("string_common"), + {`(\$|\')+`, LiteralStringSingle, nil}, + }, + } +} diff --git a/vendor/github.com/alecthomas/chroma/lexers/d/diff.go b/vendor/github.com/alecthomas/chroma/lexers/d/diff.go new file mode 100644 index 000000000..d4d6db450 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/d/diff.go @@ -0,0 +1,33 @@ +package d + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// Diff lexer. +var Diff = internal.Register(MustNewLazyLexer( + &Config{ + Name: "Diff", + Aliases: []string{"diff", "udiff"}, + EnsureNL: true, + Filenames: []string{"*.diff", "*.patch"}, + MimeTypes: []string{"text/x-diff", "text/x-patch"}, + }, + diffRules, +)) + +func diffRules() Rules { + return Rules{ + "root": { + {` .*\n`, Text, nil}, + {`\+.*\n`, GenericInserted, nil}, + {`-.*\n`, GenericDeleted, nil}, + {`!.*\n`, GenericStrong, nil}, + {`@.*\n`, GenericSubheading, nil}, + {`([Ii]ndex|diff).*\n`, GenericHeading, nil}, + {`=.*\n`, GenericHeading, nil}, + {`.*\n`, Text, nil}, + }, + } +} diff --git a/vendor/github.com/alecthomas/chroma/lexers/d/django.go b/vendor/github.com/alecthomas/chroma/lexers/d/django.go new file mode 100644 index 000000000..5d3baa916 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/d/django.go @@ -0,0 +1,57 @@ +package d + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// Django/Jinja lexer. +var DjangoJinja = internal.Register(MustNewLazyLexer( + &Config{ + Name: "Django/Jinja", + Aliases: []string{"django", "jinja"}, + Filenames: []string{}, + MimeTypes: []string{"application/x-django-templating", "application/x-jinja"}, + DotAll: true, + }, + djangoJinjaRules, +)) + +func djangoJinjaRules() Rules { + return Rules{ + "root": { + {`[^{]+`, Other, nil}, + {`\{\{`, CommentPreproc, Push("var")}, + {`\{[*#].*?[*#]\}`, Comment, nil}, + {`(\{%)(-?\s*)(comment)(\s*-?)(%\})(.*?)(\{%)(-?\s*)(endcomment)(\s*-?)(%\})`, ByGroups(CommentPreproc, Text, Keyword, Text, CommentPreproc, Comment, CommentPreproc, Text, Keyword, Text, CommentPreproc), nil}, + {`(\{%)(-?\s*)(raw)(\s*-?)(%\})(.*?)(\{%)(-?\s*)(endraw)(\s*-?)(%\})`, ByGroups(CommentPreproc, Text, Keyword, Text, CommentPreproc, Text, CommentPreproc, Text, Keyword, Text, CommentPreproc), nil}, + {`(\{%)(-?\s*)(filter)(\s+)([a-zA-Z_]\w*)`, ByGroups(CommentPreproc, Text, Keyword, Text, NameFunction), Push("block")}, + {`(\{%)(-?\s*)([a-zA-Z_]\w*)`, ByGroups(CommentPreproc, Text, Keyword), Push("block")}, + {`\{`, Other, nil}, + }, + "varnames": { + {`(\|)(\s*)([a-zA-Z_]\w*)`, ByGroups(Operator, Text, NameFunction), nil}, + {`(is)(\s+)(not)?(\s+)?([a-zA-Z_]\w*)`, ByGroups(Keyword, Text, Keyword, Text, NameFunction), nil}, + {`(_|true|false|none|True|False|None)\b`, KeywordPseudo, nil}, + {`(in|as|reversed|recursive|not|and|or|is|if|else|import|with(?:(?:out)?\s*context)?|scoped|ignore\s+missing)\b`, Keyword, nil}, + {`(loop|block|super|forloop)\b`, NameBuiltin, nil}, + {`[a-zA-Z_][\w-]*`, NameVariable, nil}, + {`\.\w+`, NameVariable, nil}, + {`:?"(\\\\|\\"|[^"])*"`, LiteralStringDouble, nil}, + {`:?'(\\\\|\\'|[^'])*'`, LiteralStringSingle, nil}, + {`([{}()\[\]+\-*/,:~]|[><=]=?)`, Operator, nil}, + {`[0-9](\.[0-9]*)?(eE[+-][0-9])?[flFLdD]?|0[xX][0-9a-fA-F]+[Ll]?`, LiteralNumber, nil}, + }, + "var": { + {`\s+`, Text, nil}, + {`(-?)(\}\})`, ByGroups(Text, CommentPreproc), Pop(1)}, + Include("varnames"), + }, + "block": { + {`\s+`, Text, nil}, + {`(-?)(%\})`, ByGroups(Text, CommentPreproc), Pop(1)}, + Include("varnames"), + {`.`, Punctuation, nil}, + }, + } +} diff --git a/vendor/github.com/alecthomas/chroma/lexers/d/docker.go b/vendor/github.com/alecthomas/chroma/lexers/d/docker.go new file mode 100644 index 000000000..ea808f047 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/d/docker.go @@ -0,0 +1,35 @@ +package d + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/b" + "github.com/alecthomas/chroma/lexers/internal" + "github.com/alecthomas/chroma/lexers/j" +) + +// Docker lexer. +var Docker = internal.Register(MustNewLazyLexer( + &Config{ + Name: "Docker", + Aliases: []string{"docker", "dockerfile"}, + Filenames: []string{"Dockerfile", "*.docker"}, + MimeTypes: []string{"text/x-dockerfile-config"}, + CaseInsensitive: true, + }, + dockerRules, +)) + +func dockerRules() Rules { + return Rules{ + "root": { + {`#.*`, Comment, nil}, + {`(ONBUILD)((?:\s*\\?\s*))`, ByGroups(Keyword, Using(b.Bash)), nil}, + {`(HEALTHCHECK)(((?:\s*\\?\s*)--\w+=\w+(?:\s*\\?\s*))*)`, ByGroups(Keyword, Using(b.Bash)), nil}, + {`(VOLUME|ENTRYPOINT|CMD|SHELL)((?:\s*\\?\s*))(\[.*?\])`, ByGroups(Keyword, Using(b.Bash), Using(j.JSON)), nil}, + {`(LABEL|ENV|ARG)((?:(?:\s*\\?\s*)\w+=\w+(?:\s*\\?\s*))*)`, ByGroups(Keyword, Using(b.Bash)), nil}, + {`((?:FROM|MAINTAINER|EXPOSE|WORKDIR|USER|STOPSIGNAL)|VOLUME)\b(.*)`, ByGroups(Keyword, LiteralString), nil}, + {`((?:RUN|CMD|ENTRYPOINT|ENV|ARG|LABEL|ADD|COPY))`, Keyword, nil}, + {`(.*\\\n)*.+`, Using(b.Bash), nil}, + }, + } +} diff --git a/vendor/github.com/alecthomas/chroma/lexers/d/dtd.go b/vendor/github.com/alecthomas/chroma/lexers/d/dtd.go new file mode 100644 index 000000000..aeec6b1d9 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/d/dtd.go @@ -0,0 +1,73 @@ +package d + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// Dtd lexer. +var Dtd = internal.Register(MustNewLazyLexer( + &Config{ + Name: "DTD", + Aliases: []string{"dtd"}, + Filenames: []string{"*.dtd"}, + MimeTypes: []string{"application/xml-dtd"}, + DotAll: true, + }, + dtdRules, +)) + +func dtdRules() Rules { + return Rules{ + "root": { + Include("common"), + {`(\s]+)`, ByGroups(Keyword, Text, NameTag), nil}, + {`PUBLIC|SYSTEM`, KeywordConstant, nil}, + {`[\[\]>]`, Keyword, nil}, + }, + "common": { + {`\s+`, Text, nil}, + {`(%|&)[^;]*;`, NameEntity, nil}, + {``, Comment, Pop(1)}, + {`-`, Comment, nil}, + }, + "element": { + Include("common"), + {`EMPTY|ANY|#PCDATA`, KeywordConstant, nil}, + {`[^>\s|()?+*,]+`, NameTag, nil}, + {`>`, Keyword, Pop(1)}, + }, + "attlist": { + Include("common"), + {`CDATA|IDREFS|IDREF|ID|NMTOKENS|NMTOKEN|ENTITIES|ENTITY|NOTATION`, KeywordConstant, nil}, + {`#REQUIRED|#IMPLIED|#FIXED`, KeywordConstant, nil}, + {`xml:space|xml:lang`, KeywordReserved, nil}, + {`[^>\s|()?+*,]+`, NameAttribute, nil}, + {`>`, Keyword, Pop(1)}, + }, + "entity": { + Include("common"), + {`SYSTEM|PUBLIC|NDATA`, KeywordConstant, nil}, + {`[^>\s|()?+*,]+`, NameEntity, nil}, + {`>`, Keyword, Pop(1)}, + }, + "notation": { + Include("common"), + {`SYSTEM|PUBLIC`, KeywordConstant, nil}, + {`[^>\s|()?+*,]+`, NameAttribute, nil}, + {`>`, Keyword, Pop(1)}, + }, + } +} diff --git a/vendor/github.com/alecthomas/chroma/lexers/d/dylan.go b/vendor/github.com/alecthomas/chroma/lexers/d/dylan.go new file mode 100644 index 000000000..feda74870 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/d/dylan.go @@ -0,0 +1,76 @@ +package d + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// Dylan lexer. +var Dylan = internal.Register(MustNewLazyLexer( + &Config{ + Name: "Dylan", + Aliases: []string{"dylan"}, + Filenames: []string{"*.dylan", "*.dyl", "*.intr"}, + MimeTypes: []string{"text/x-dylan"}, + CaseInsensitive: true, + }, + func() Rules { + return Rules{ + "root": { + {`\s+`, Whitespace, nil}, + {`//.*?\n`, CommentSingle, nil}, + {`([a-z0-9-]+:)([ \t]*)(.*(?:\n[ \t].+)*)`, ByGroups(NameAttribute, Whitespace, LiteralString), nil}, + Default(Push("code")), + }, + "code": { + {`\s+`, Whitespace, nil}, + {`//.*?\n`, CommentSingle, nil}, + {`/\*`, CommentMultiline, Push("comment")}, + {`"`, LiteralString, Push("string")}, + {`'(\\.|\\[0-7]{1,3}|\\x[a-f0-9]{1,2}|[^\\\'\n])'`, LiteralStringChar, nil}, + {`#b[01]+`, LiteralNumberBin, nil}, + {`#o[0-7]+`, LiteralNumberOct, nil}, + {`[-+]?(\d*\.\d+([ed][-+]?\d+)?|\d+(\.\d*)?e[-+]?\d+)`, LiteralNumberFloat, nil}, + {`[-+]?\d+`, LiteralNumberInteger, nil}, + {`#x[0-9a-f]+`, LiteralNumberHex, nil}, + + {`(\?\\?)([\w!&*<>|^$%@+~?/=-]+)(:)(token|name|variable|expression|body|case-body|\*)`, + ByGroups(Operator, NameVariable, Operator, NameBuiltin), nil}, + {`(\?)(:)(token|name|variable|expression|body|case-body|\*)`, + ByGroups(Operator, Operator, NameVariable), nil}, + {`(\?\\?)([\w!&*<>|^$%@+~?/=-]+)`, ByGroups(Operator, NameVariable), nil}, + + {`(=>|::|#\(|#\[|##|\?\?|\?=|\?|[(){}\[\],.;])`, Punctuation, nil}, + {`:=`, Operator, nil}, + {`#[tf]`, Literal, nil}, + {`#"`, LiteralStringSymbol, Push("symbol")}, + {`#[a-z0-9-]+`, Keyword, nil}, + {`#(all-keys|include|key|next|rest)`, Keyword, nil}, + {`[\w!&*<>|^$%@+~?/=-]+:`, KeywordConstant, nil}, + {`<[\w!&*<>|^$%@+~?/=-]+>`, NameClass, nil}, + {`\*[\w!&*<>|^$%@+~?/=-]+\*`, NameVariableGlobal, nil}, + {`\$[\w!&*<>|^$%@+~?/=-]+`, NameConstant, nil}, + {`(let|method|function)([ \t]+)([\w!&*<>|^$%@+~?/=-]+)`, ByGroups(NameBuiltin, Whitespace, NameVariable), nil}, + {`(error|signal|return|break)`, NameException, nil}, + {`(\\?)([\w!&*<>|^$%@+~?/=-]+)`, ByGroups(Operator, Name), nil}, + }, + "comment": { + {`[^*/]`, CommentMultiline, nil}, + {`/\*`, CommentMultiline, Push()}, + {`\*/`, CommentMultiline, Pop(1)}, + {`[*/]`, CommentMultiline, nil}, + }, + "symbol": { + {`"`, LiteralStringSymbol, Pop(1)}, + {`[^\\"]+`, LiteralStringSymbol, nil}, + }, + "string": { + {`"`, LiteralString, Pop(1)}, + {`\\([\\abfnrtv"\']|x[a-f0-9]{2,4}|[0-7]{1,3})`, LiteralStringEscape, nil}, + {`[^\\"\n]+`, LiteralString, nil}, + {`\\\n`, LiteralString, nil}, + {`\\`, LiteralString, nil}, + }, + } + }, +)) diff --git a/vendor/github.com/alecthomas/chroma/lexers/e/ebnf.go b/vendor/github.com/alecthomas/chroma/lexers/e/ebnf.go new file mode 100644 index 000000000..5ccdd38ea --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/e/ebnf.go @@ -0,0 +1,55 @@ +package e + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// Ebnf lexer. +var Ebnf = internal.Register(MustNewLazyLexer( + &Config{ + Name: "EBNF", + Aliases: []string{"ebnf"}, + Filenames: []string{"*.ebnf"}, + MimeTypes: []string{"text/x-ebnf"}, + }, + ebnfRules, +)) + +func ebnfRules() Rules { + return Rules{ + "root": { + Include("whitespace"), + Include("comment_start"), + Include("identifier"), + {`=`, Operator, Push("production")}, + }, + "production": { + Include("whitespace"), + Include("comment_start"), + Include("identifier"), + {`"[^"]*"`, LiteralStringDouble, nil}, + {`'[^']*'`, LiteralStringSingle, nil}, + {`(\?[^?]*\?)`, NameEntity, nil}, + {`[\[\]{}(),|]`, Punctuation, nil}, + {`-`, Operator, nil}, + {`;`, Punctuation, Pop(1)}, + {`\.`, Punctuation, Pop(1)}, + }, + "whitespace": { + {`\s+`, Text, nil}, + }, + "comment_start": { + {`\(\*`, CommentMultiline, Push("comment")}, + }, + "comment": { + {`[^*)]`, CommentMultiline, nil}, + Include("comment_start"), + {`\*\)`, CommentMultiline, Pop(1)}, + {`[*)]`, CommentMultiline, nil}, + }, + "identifier": { + {`([a-zA-Z][\w \-]*)`, Keyword, nil}, + }, + } +} diff --git a/vendor/github.com/alecthomas/chroma/lexers/e/elixir.go b/vendor/github.com/alecthomas/chroma/lexers/e/elixir.go new file mode 100644 index 000000000..f283f846b --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/e/elixir.go @@ -0,0 +1,281 @@ +package e + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// Elixir lexer. +var Elixir = internal.Register(MustNewLazyLexer( + &Config{ + Name: "Elixir", + Aliases: []string{"elixir", "ex", "exs"}, + Filenames: []string{"*.ex", "*.exs"}, + MimeTypes: []string{"text/x-elixir"}, + }, + elixirRules, +)) + +func elixirRules() Rules { + return Rules{ + "root": { + {`\s+`, Text, nil}, + {`#.*$`, CommentSingle, nil}, + {`(\?)(\\x\{)([\da-fA-F]+)(\})`, ByGroups(LiteralStringChar, LiteralStringEscape, LiteralNumberHex, LiteralStringEscape), nil}, + {`(\?)(\\x[\da-fA-F]{1,2})`, ByGroups(LiteralStringChar, LiteralStringEscape), nil}, + {`(\?)(\\[abdefnrstv])`, ByGroups(LiteralStringChar, LiteralStringEscape), nil}, + {`\?\\?.`, LiteralStringChar, nil}, + {`:::`, LiteralStringSymbol, nil}, + {`::`, Operator, nil}, + {`:(?:\.\.\.|<<>>|%\{\}|%|\{\})`, LiteralStringSymbol, nil}, + {`:(?:(?:\.\.\.|[a-z_]\w*[!?]?)|[A-Z]\w*(?:\.[A-Z]\w*)*|(?:\<\<\<|\>\>\>|\|\|\||\&\&\&|\^\^\^|\~\~\~|\=\=\=|\!\=\=|\~\>\>|\<\~\>|\|\~\>|\<\|\>|\=\=|\!\=|\<\=|\>\=|\&\&|\|\||\<\>|\+\+|\-\-|\|\>|\=\~|\-\>|\<\-|\||\.|\=|\~\>|\<\~|\<|\>|\+|\-|\*|\/|\!|\^|\&))`, LiteralStringSymbol, nil}, + {`:"`, LiteralStringSymbol, Push("string_double_atom")}, + {`:'`, LiteralStringSymbol, Push("string_single_atom")}, + {`((?:\.\.\.|<<>>|%\{\}|%|\{\})|(?:(?:\.\.\.|[a-z_]\w*[!?]?)|[A-Z]\w*(?:\.[A-Z]\w*)*|(?:\<\<\<|\>\>\>|\|\|\||\&\&\&|\^\^\^|\~\~\~|\=\=\=|\!\=\=|\~\>\>|\<\~\>|\|\~\>|\<\|\>|\=\=|\!\=|\<\=|\>\=|\&\&|\|\||\<\>|\+\+|\-\-|\|\>|\=\~|\-\>|\<\-|\||\.|\=|\~\>|\<\~|\<|\>|\+|\-|\*|\/|\!|\^|\&)))(:)(?=\s|\n)`, ByGroups(LiteralStringSymbol, Punctuation), nil}, + {`(fn|do|end|after|else|rescue|catch)\b`, Keyword, nil}, + {`(not|and|or|when|in)\b`, OperatorWord, nil}, + {`(case|cond|for|if|unless|try|receive|raise|quote|unquote|unquote_splicing|throw|super|while)\b`, Keyword, nil}, + {`(def|defp|defmodule|defprotocol|defmacro|defmacrop|defdelegate|defexception|defstruct|defimpl|defcallback)\b`, KeywordDeclaration, nil}, + {`(import|require|use|alias)\b`, KeywordNamespace, nil}, + {`(nil|true|false)\b`, NameConstant, nil}, + {`(_|__MODULE__|__DIR__|__ENV__|__CALLER__)\b`, NamePseudo, nil}, + {`@(?:\.\.\.|[a-z_]\w*[!?]?)`, NameAttribute, nil}, + {`(?:\.\.\.|[a-z_]\w*[!?]?)`, Name, nil}, + {`(%?)([A-Z]\w*(?:\.[A-Z]\w*)*)`, ByGroups(Punctuation, NameClass), nil}, + {`\<\<\<|\>\>\>|\|\|\||\&\&\&|\^\^\^|\~\~\~|\=\=\=|\!\=\=|\~\>\>|\<\~\>|\|\~\>|\<\|\>`, Operator, nil}, + {`\=\=|\!\=|\<\=|\>\=|\&\&|\|\||\<\>|\+\+|\-\-|\|\>|\=\~|\-\>|\<\-|\||\.|\=|\~\>|\<\~`, Operator, nil}, + {`\\\\|\<\<|\>\>|\=\>|\(|\)|\:|\;|\,|\[|\]`, Punctuation, nil}, + {`&\d`, NameEntity, nil}, + {`\<|\>|\+|\-|\*|\/|\!|\^|\&`, Operator, nil}, + {`0b[01](_?[01])*`, LiteralNumberBin, nil}, + {`0o[0-7](_?[0-7])*`, LiteralNumberOct, nil}, + {`0x[\da-fA-F](_?[\dA-Fa-f])*`, LiteralNumberHex, nil}, + {`\d(_?\d)*\.\d(_?\d)*([eE][-+]?\d(_?\d)*)?`, LiteralNumberFloat, nil}, + {`\d(_?\d)*`, LiteralNumberInteger, nil}, + {`"""\s*`, LiteralStringHeredoc, Push("heredoc_double")}, + {`'''\s*$`, LiteralStringHeredoc, Push("heredoc_single")}, + {`"`, LiteralStringDouble, Push("string_double")}, + {`'`, LiteralStringSingle, Push("string_single")}, + Include("sigils"), + {`%\{`, Punctuation, Push("map_key")}, + {`\{`, Punctuation, Push("tuple")}, + }, + "heredoc_double": { + {`^\s*"""`, LiteralStringHeredoc, Pop(1)}, + Include("heredoc_interpol"), + }, + "heredoc_single": { + {`^\s*'''`, LiteralStringHeredoc, Pop(1)}, + Include("heredoc_interpol"), + }, + "heredoc_interpol": { + {`[^#\\\n]+`, LiteralStringHeredoc, nil}, + Include("escapes"), + {`\\.`, LiteralStringHeredoc, nil}, + {`\n+`, LiteralStringHeredoc, nil}, + Include("interpol"), + }, + "heredoc_no_interpol": { + {`[^\\\n]+`, LiteralStringHeredoc, nil}, + {`\\.`, LiteralStringHeredoc, nil}, + {`\n+`, LiteralStringHeredoc, nil}, + }, + "escapes": { + {`(\\x\{)([\da-fA-F]+)(\})`, ByGroups(LiteralStringEscape, LiteralNumberHex, LiteralStringEscape), nil}, + {`(\\x[\da-fA-F]{1,2})`, LiteralStringEscape, nil}, + {`(\\[abdefnrstv])`, LiteralStringEscape, nil}, + }, + "interpol": { + {`#\{`, LiteralStringInterpol, Push("interpol_string")}, + }, + "interpol_string": { + {`\}`, LiteralStringInterpol, Pop(1)}, + Include("root"), + }, + "map_key": { + Include("root"), + {`:`, Punctuation, Push("map_val")}, + {`=>`, Punctuation, Push("map_val")}, + {`\}`, Punctuation, Pop(1)}, + }, + "map_val": { + Include("root"), + {`,`, Punctuation, Pop(1)}, + {`(?=\})`, Punctuation, Pop(1)}, + }, + "tuple": { + Include("root"), + {`\}`, Punctuation, Pop(1)}, + }, + "string_double": { + {`[^#"\\]+`, LiteralStringDouble, nil}, + Include("escapes"), + {`\\.`, LiteralStringDouble, nil}, + {`(")`, ByGroups(LiteralStringDouble), Pop(1)}, + Include("interpol"), + }, + "string_single": { + {`[^#'\\]+`, LiteralStringSingle, nil}, + Include("escapes"), + {`\\.`, LiteralStringSingle, nil}, + {`(')`, ByGroups(LiteralStringSingle), Pop(1)}, + Include("interpol"), + }, + "string_double_atom": { + {`[^#"\\]+`, LiteralStringSymbol, nil}, + Include("escapes"), + {`\\.`, LiteralStringSymbol, nil}, + {`(")`, ByGroups(LiteralStringSymbol), Pop(1)}, + Include("interpol"), + }, + "string_single_atom": { + {`[^#'\\]+`, LiteralStringSymbol, nil}, + Include("escapes"), + {`\\.`, LiteralStringSymbol, nil}, + {`(')`, ByGroups(LiteralStringSymbol), Pop(1)}, + Include("interpol"), + }, + "sigils": { + {`(~[a-z])(""")`, ByGroups(LiteralStringOther, LiteralStringHeredoc), Push("triquot-end", "triquot-intp")}, + {`(~[A-Z])(""")`, ByGroups(LiteralStringOther, LiteralStringHeredoc), Push("triquot-end", "triquot-no-intp")}, + {`(~[a-z])(''')`, ByGroups(LiteralStringOther, LiteralStringHeredoc), Push("triapos-end", "triapos-intp")}, + {`(~[A-Z])(''')`, ByGroups(LiteralStringOther, LiteralStringHeredoc), Push("triapos-end", "triapos-no-intp")}, + {`~[a-z]\{`, LiteralStringOther, Push("cb-intp")}, + {`~[A-Z]\{`, LiteralStringOther, Push("cb-no-intp")}, + {`~[a-z]\[`, LiteralStringOther, Push("sb-intp")}, + {`~[A-Z]\[`, LiteralStringOther, Push("sb-no-intp")}, + {`~[a-z]\(`, LiteralStringOther, Push("pa-intp")}, + {`~[A-Z]\(`, LiteralStringOther, Push("pa-no-intp")}, + {`~[a-z]<`, LiteralStringOther, Push("ab-intp")}, + {`~[A-Z]<`, LiteralStringOther, Push("ab-no-intp")}, + {`~[a-z]/`, LiteralStringOther, Push("slas-intp")}, + {`~[A-Z]/`, LiteralStringOther, Push("slas-no-intp")}, + {`~[a-z]\|`, LiteralStringOther, Push("pipe-intp")}, + {`~[A-Z]\|`, LiteralStringOther, Push("pipe-no-intp")}, + {`~[a-z]"`, LiteralStringOther, Push("quot-intp")}, + {`~[A-Z]"`, LiteralStringOther, Push("quot-no-intp")}, + {`~[a-z]'`, LiteralStringOther, Push("apos-intp")}, + {`~[A-Z]'`, LiteralStringOther, Push("apos-no-intp")}, + }, + "triquot-end": { + {`[a-zA-Z]+`, LiteralStringOther, Pop(1)}, + Default(Pop(1)), + }, + "triquot-intp": { + {`^\s*"""`, LiteralStringHeredoc, Pop(1)}, + Include("heredoc_interpol"), + }, + "triquot-no-intp": { + {`^\s*"""`, LiteralStringHeredoc, Pop(1)}, + Include("heredoc_no_interpol"), + }, + "triapos-end": { + {`[a-zA-Z]+`, LiteralStringOther, Pop(1)}, + Default(Pop(1)), + }, + "triapos-intp": { + {`^\s*'''`, LiteralStringHeredoc, Pop(1)}, + Include("heredoc_interpol"), + }, + "triapos-no-intp": { + {`^\s*'''`, LiteralStringHeredoc, Pop(1)}, + Include("heredoc_no_interpol"), + }, + "cb-intp": { + {`[^#\}\\]+`, LiteralStringOther, nil}, + Include("escapes"), + {`\\.`, LiteralStringOther, nil}, + {`\}[a-zA-Z]*`, LiteralStringOther, Pop(1)}, + Include("interpol"), + }, + "cb-no-intp": { + {`[^\}\\]+`, LiteralStringOther, nil}, + {`\\.`, LiteralStringOther, nil}, + {`\}[a-zA-Z]*`, LiteralStringOther, Pop(1)}, + }, + "sb-intp": { + {`[^#\]\\]+`, LiteralStringOther, nil}, + Include("escapes"), + {`\\.`, LiteralStringOther, nil}, + {`\][a-zA-Z]*`, LiteralStringOther, Pop(1)}, + Include("interpol"), + }, + "sb-no-intp": { + {`[^\]\\]+`, LiteralStringOther, nil}, + {`\\.`, LiteralStringOther, nil}, + {`\][a-zA-Z]*`, LiteralStringOther, Pop(1)}, + }, + "pa-intp": { + {`[^#\)\\]+`, LiteralStringOther, nil}, + Include("escapes"), + {`\\.`, LiteralStringOther, nil}, + {`\)[a-zA-Z]*`, LiteralStringOther, Pop(1)}, + Include("interpol"), + }, + "pa-no-intp": { + {`[^\)\\]+`, LiteralStringOther, nil}, + {`\\.`, LiteralStringOther, nil}, + {`\)[a-zA-Z]*`, LiteralStringOther, Pop(1)}, + }, + "ab-intp": { + {`[^#>\\]+`, LiteralStringOther, nil}, + Include("escapes"), + {`\\.`, LiteralStringOther, nil}, + {`>[a-zA-Z]*`, LiteralStringOther, Pop(1)}, + Include("interpol"), + }, + "ab-no-intp": { + {`[^>\\]+`, LiteralStringOther, nil}, + {`\\.`, LiteralStringOther, nil}, + {`>[a-zA-Z]*`, LiteralStringOther, Pop(1)}, + }, + "slas-intp": { + {`[^#/\\]+`, LiteralStringOther, nil}, + Include("escapes"), + {`\\.`, LiteralStringOther, nil}, + {`/[a-zA-Z]*`, LiteralStringOther, Pop(1)}, + Include("interpol"), + }, + "slas-no-intp": { + {`[^/\\]+`, LiteralStringOther, nil}, + {`\\.`, LiteralStringOther, nil}, + {`/[a-zA-Z]*`, LiteralStringOther, Pop(1)}, + }, + "pipe-intp": { + {`[^#\|\\]+`, LiteralStringOther, nil}, + Include("escapes"), + {`\\.`, LiteralStringOther, nil}, + {`\|[a-zA-Z]*`, LiteralStringOther, Pop(1)}, + Include("interpol"), + }, + "pipe-no-intp": { + {`[^\|\\]+`, LiteralStringOther, nil}, + {`\\.`, LiteralStringOther, nil}, + {`\|[a-zA-Z]*`, LiteralStringOther, Pop(1)}, + }, + "quot-intp": { + {`[^#"\\]+`, LiteralStringOther, nil}, + Include("escapes"), + {`\\.`, LiteralStringOther, nil}, + {`"[a-zA-Z]*`, LiteralStringOther, Pop(1)}, + Include("interpol"), + }, + "quot-no-intp": { + {`[^"\\]+`, LiteralStringOther, nil}, + {`\\.`, LiteralStringOther, nil}, + {`"[a-zA-Z]*`, LiteralStringOther, Pop(1)}, + }, + "apos-intp": { + {`[^#'\\]+`, LiteralStringOther, nil}, + Include("escapes"), + {`\\.`, LiteralStringOther, nil}, + {`'[a-zA-Z]*`, LiteralStringOther, Pop(1)}, + Include("interpol"), + }, + "apos-no-intp": { + {`[^'\\]+`, LiteralStringOther, nil}, + {`\\.`, LiteralStringOther, nil}, + {`'[a-zA-Z]*`, LiteralStringOther, Pop(1)}, + }, + } +} diff --git a/vendor/github.com/alecthomas/chroma/lexers/e/elm.go b/vendor/github.com/alecthomas/chroma/lexers/e/elm.go new file mode 100644 index 000000000..0fb668953 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/e/elm.go @@ -0,0 +1,63 @@ +package e + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// Elm lexer. +var Elm = internal.Register(MustNewLazyLexer( + &Config{ + Name: "Elm", + Aliases: []string{"elm"}, + Filenames: []string{"*.elm"}, + MimeTypes: []string{"text/x-elm"}, + }, + elmRules, +)) + +func elmRules() Rules { + return Rules{ + "root": { + {`\{-`, CommentMultiline, Push("comment")}, + {`--.*`, CommentSingle, nil}, + {`\s+`, Text, nil}, + {`"`, LiteralString, Push("doublequote")}, + {`^\s*module\s*`, KeywordNamespace, Push("imports")}, + {`^\s*import\s*`, KeywordNamespace, Push("imports")}, + {`\[glsl\|.*`, NameEntity, Push("shader")}, + {Words(``, `\b`, `alias`, `as`, `case`, `else`, `if`, `import`, `in`, `let`, `module`, `of`, `port`, `then`, `type`, `where`), KeywordReserved, nil}, + {`[A-Z]\w*`, KeywordType, nil}, + {`^main `, KeywordReserved, nil}, + {Words(`\(`, `\)`, `~`, `||`, `|>`, `|`, "`", `^`, `\`, `'`, `>>`, `>=`, `>`, `==`, `=`, `<~`, `<|`, `<=`, `<<`, `<-`, `<`, `::`, `:`, `/=`, `//`, `/`, `..`, `.`, `->`, `-`, `++`, `+`, `*`, `&&`, `%`), NameFunction, nil}, + {Words(``, ``, `~`, `||`, `|>`, `|`, "`", `^`, `\`, `'`, `>>`, `>=`, `>`, `==`, `=`, `<~`, `<|`, `<=`, `<<`, `<-`, `<`, `::`, `:`, `/=`, `//`, `/`, `..`, `.`, `->`, `-`, `++`, `+`, `*`, `&&`, `%`), NameFunction, nil}, + Include("numbers"), + {`[a-z_][a-zA-Z_\']*`, NameVariable, nil}, + {`[,()\[\]{}]`, Punctuation, nil}, + }, + "comment": { + {`-(?!\})`, CommentMultiline, nil}, + {`\{-`, CommentMultiline, Push("comment")}, + {`[^-}]`, CommentMultiline, nil}, + {`-\}`, CommentMultiline, Pop(1)}, + }, + "doublequote": { + {`\\u[0-9a-fA-F]{4}`, LiteralStringEscape, nil}, + {`\\[nrfvb\\"]`, LiteralStringEscape, nil}, + {`[^"]`, LiteralString, nil}, + {`"`, LiteralString, Pop(1)}, + }, + "imports": { + {`\w+(\.\w+)*`, NameClass, Pop(1)}, + }, + "numbers": { + {`_?\d+\.(?=\d+)`, LiteralNumberFloat, nil}, + {`_?\d+`, LiteralNumberInteger, nil}, + }, + "shader": { + {`\|(?!\])`, NameEntity, nil}, + {`\|\]`, NameEntity, Pop(1)}, + {`.*\n`, NameEntity, nil}, + }, + } +} diff --git a/vendor/github.com/alecthomas/chroma/lexers/e/emacs.go b/vendor/github.com/alecthomas/chroma/lexers/e/emacs.go new file mode 100644 index 000000000..51c49101d --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/e/emacs.go @@ -0,0 +1,586 @@ +package e + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +var ( + emacsMacros = []string{ + "atomic-change-group", "case", "block", "cl-block", "cl-callf", "cl-callf2", + "cl-case", "cl-decf", "cl-declaim", "cl-declare", + "cl-define-compiler-macro", "cl-defmacro", "cl-defstruct", + "cl-defsubst", "cl-deftype", "cl-defun", "cl-destructuring-bind", + "cl-do", "cl-do*", "cl-do-all-symbols", "cl-do-symbols", "cl-dolist", + "cl-dotimes", "cl-ecase", "cl-etypecase", "eval-when", "cl-eval-when", "cl-flet", + "cl-flet*", "cl-function", "cl-incf", "cl-labels", "cl-letf", + "cl-letf*", "cl-load-time-value", "cl-locally", "cl-loop", + "cl-macrolet", "cl-multiple-value-bind", "cl-multiple-value-setq", + "cl-progv", "cl-psetf", "cl-psetq", "cl-pushnew", "cl-remf", + "cl-return", "cl-return-from", "cl-rotatef", "cl-shiftf", + "cl-symbol-macrolet", "cl-tagbody", "cl-the", "cl-typecase", + "combine-after-change-calls", "condition-case-unless-debug", "decf", + "declaim", "declare", "declare-function", "def-edebug-spec", + "defadvice", "defclass", "defcustom", "defface", "defgeneric", + "defgroup", "define-advice", "define-alternatives", + "define-compiler-macro", "define-derived-mode", "define-generic-mode", + "define-global-minor-mode", "define-globalized-minor-mode", + "define-minor-mode", "define-modify-macro", + "define-obsolete-face-alias", "define-obsolete-function-alias", + "define-obsolete-variable-alias", "define-setf-expander", + "define-skeleton", "defmacro", "defmethod", "defsetf", "defstruct", + "defsubst", "deftheme", "deftype", "defun", "defvar-local", + "delay-mode-hooks", "destructuring-bind", "do", "do*", + "do-all-symbols", "do-symbols", "dolist", "dont-compile", "dotimes", + "dotimes-with-progress-reporter", "ecase", "ert-deftest", "etypecase", + "eval-and-compile", "eval-when-compile", "flet", "ignore-errors", + "incf", "labels", "lambda", "letrec", "lexical-let", "lexical-let*", + "loop", "multiple-value-bind", "multiple-value-setq", "noreturn", + "oref", "oref-default", "oset", "oset-default", "pcase", + "pcase-defmacro", "pcase-dolist", "pcase-exhaustive", "pcase-let", + "pcase-let*", "pop", "psetf", "psetq", "push", "pushnew", "remf", + "return", "rotatef", "rx", "save-match-data", "save-selected-window", + "save-window-excursion", "setf", "setq-local", "shiftf", + "track-mouse", "typecase", "unless", "use-package", "when", + "while-no-input", "with-case-table", "with-category-table", + "with-coding-priority", "with-current-buffer", "with-demoted-errors", + "with-eval-after-load", "with-file-modes", "with-local-quit", + "with-output-to-string", "with-output-to-temp-buffer", + "with-parsed-tramp-file-name", "with-selected-frame", + "with-selected-window", "with-silent-modifications", "with-slots", + "with-syntax-table", "with-temp-buffer", "with-temp-file", + "with-temp-message", "with-timeout", "with-tramp-connection-property", + "with-tramp-file-property", "with-tramp-progress-reporter", + "with-wrapper-hook", "load-time-value", "locally", "macrolet", "progv", + "return-from", + } + + emacsSpecialForms = []string{ + "and", "catch", "cond", "condition-case", "defconst", "defvar", + "function", "if", "interactive", "let", "let*", "or", "prog1", + "prog2", "progn", "quote", "save-current-buffer", "save-excursion", + "save-restriction", "setq", "setq-default", "subr-arity", + "unwind-protect", "while", + } + + emacsBuiltinFunction = []string{ + "%", "*", "+", "-", "/", "/=", "1+", "1-", "<", "<=", "=", ">", ">=", + "Snarf-documentation", "abort-recursive-edit", "abs", + "accept-process-output", "access-file", "accessible-keymaps", "acos", + "active-minibuffer-window", "add-face-text-property", + "add-name-to-file", "add-text-properties", "all-completions", + "append", "apply", "apropos-internal", "aref", "arrayp", "aset", + "ash", "asin", "assoc", "assoc-string", "assq", "atan", "atom", + "autoload", "autoload-do-load", "backtrace", "backtrace--locals", + "backtrace-debug", "backtrace-eval", "backtrace-frame", + "backward-char", "backward-prefix-chars", "barf-if-buffer-read-only", + "base64-decode-region", "base64-decode-string", + "base64-encode-region", "base64-encode-string", "beginning-of-line", + "bidi-find-overridden-directionality", "bidi-resolved-levels", + "bitmap-spec-p", "bobp", "bolp", "bool-vector", + "bool-vector-count-consecutive", "bool-vector-count-population", + "bool-vector-exclusive-or", "bool-vector-intersection", + "bool-vector-not", "bool-vector-p", "bool-vector-set-difference", + "bool-vector-subsetp", "bool-vector-union", "boundp", + "buffer-base-buffer", "buffer-chars-modified-tick", + "buffer-enable-undo", "buffer-file-name", "buffer-has-markers-at", + "buffer-list", "buffer-live-p", "buffer-local-value", + "buffer-local-variables", "buffer-modified-p", "buffer-modified-tick", + "buffer-name", "buffer-size", "buffer-string", "buffer-substring", + "buffer-substring-no-properties", "buffer-swap-text", "bufferp", + "bury-buffer-internal", "byte-code", "byte-code-function-p", + "byte-to-position", "byte-to-string", "byteorder", + "call-interactively", "call-last-kbd-macro", "call-process", + "call-process-region", "cancel-kbd-macro-events", "capitalize", + "capitalize-region", "capitalize-word", "car", "car-less-than-car", + "car-safe", "case-table-p", "category-docstring", + "category-set-mnemonics", "category-table", "category-table-p", + "ccl-execute", "ccl-execute-on-string", "ccl-program-p", "cdr", + "cdr-safe", "ceiling", "char-after", "char-before", + "char-category-set", "char-charset", "char-equal", "char-or-string-p", + "char-resolve-modifiers", "char-syntax", "char-table-extra-slot", + "char-table-p", "char-table-parent", "char-table-range", + "char-table-subtype", "char-to-string", "char-width", "characterp", + "charset-after", "charset-id-internal", "charset-plist", + "charset-priority-list", "charsetp", "check-coding-system", + "check-coding-systems-region", "clear-buffer-auto-save-failure", + "clear-charset-maps", "clear-face-cache", "clear-font-cache", + "clear-image-cache", "clear-string", "clear-this-command-keys", + "close-font", "clrhash", "coding-system-aliases", + "coding-system-base", "coding-system-eol-type", "coding-system-p", + "coding-system-plist", "coding-system-priority-list", + "coding-system-put", "color-distance", "color-gray-p", + "color-supported-p", "combine-after-change-execute", + "command-error-default-function", "command-remapping", "commandp", + "compare-buffer-substrings", "compare-strings", + "compare-window-configurations", "completing-read", + "compose-region-internal", "compose-string-internal", + "composition-get-gstring", "compute-motion", "concat", "cons", + "consp", "constrain-to-field", "continue-process", + "controlling-tty-p", "coordinates-in-window-p", "copy-alist", + "copy-category-table", "copy-file", "copy-hash-table", "copy-keymap", + "copy-marker", "copy-sequence", "copy-syntax-table", "copysign", + "cos", "current-active-maps", "current-bidi-paragraph-direction", + "current-buffer", "current-case-table", "current-column", + "current-global-map", "current-idle-time", "current-indentation", + "current-input-mode", "current-local-map", "current-message", + "current-minor-mode-maps", "current-time", "current-time-string", + "current-time-zone", "current-window-configuration", + "cygwin-convert-file-name-from-windows", + "cygwin-convert-file-name-to-windows", "daemon-initialized", + "daemonp", "dbus--init-bus", "dbus-get-unique-name", + "dbus-message-internal", "debug-timer-check", "declare-equiv-charset", + "decode-big5-char", "decode-char", "decode-coding-region", + "decode-coding-string", "decode-sjis-char", "decode-time", + "default-boundp", "default-file-modes", "default-printer-name", + "default-toplevel-value", "default-value", "define-category", + "define-charset-alias", "define-charset-internal", + "define-coding-system-alias", "define-coding-system-internal", + "define-fringe-bitmap", "define-hash-table-test", "define-key", + "define-prefix-command", "delete", + "delete-all-overlays", "delete-and-extract-region", "delete-char", + "delete-directory-internal", "delete-field", "delete-file", + "delete-frame", "delete-other-windows-internal", "delete-overlay", + "delete-process", "delete-region", "delete-terminal", + "delete-window-internal", "delq", "describe-buffer-bindings", + "describe-vector", "destroy-fringe-bitmap", "detect-coding-region", + "detect-coding-string", "ding", "directory-file-name", + "directory-files", "directory-files-and-attributes", "discard-input", + "display-supports-face-attributes-p", "do-auto-save", "documentation", + "documentation-property", "downcase", "downcase-region", + "downcase-word", "draw-string", "dump-colors", "dump-emacs", + "dump-face", "dump-frame-glyph-matrix", "dump-glyph-matrix", + "dump-glyph-row", "dump-redisplay-history", "dump-tool-bar-row", + "elt", "emacs-pid", "encode-big5-char", "encode-char", + "encode-coding-region", "encode-coding-string", "encode-sjis-char", + "encode-time", "end-kbd-macro", "end-of-line", "eobp", "eolp", "eq", + "eql", "equal", "equal-including-properties", "erase-buffer", + "error-message-string", "eval", "eval-buffer", "eval-region", + "event-convert-list", "execute-kbd-macro", "exit-recursive-edit", + "exp", "expand-file-name", "expt", "external-debugging-output", + "face-attribute-relative-p", "face-attributes-as-vector", "face-font", + "fboundp", "fceiling", "fetch-bytecode", "ffloor", + "field-beginning", "field-end", "field-string", + "field-string-no-properties", "file-accessible-directory-p", + "file-acl", "file-attributes", "file-attributes-lessp", + "file-directory-p", "file-executable-p", "file-exists-p", + "file-locked-p", "file-modes", "file-name-absolute-p", + "file-name-all-completions", "file-name-as-directory", + "file-name-completion", "file-name-directory", + "file-name-nondirectory", "file-newer-than-file-p", "file-readable-p", + "file-regular-p", "file-selinux-context", "file-symlink-p", + "file-system-info", "file-system-info", "file-writable-p", + "fillarray", "find-charset-region", "find-charset-string", + "find-coding-systems-region-internal", "find-composition-internal", + "find-file-name-handler", "find-font", "find-operation-coding-system", + "float", "float-time", "floatp", "floor", "fmakunbound", + "following-char", "font-at", "font-drive-otf", "font-face-attributes", + "font-family-list", "font-get", "font-get-glyphs", + "font-get-system-font", "font-get-system-normal-font", "font-info", + "font-match-p", "font-otf-alternates", "font-put", + "font-shape-gstring", "font-spec", "font-variation-glyphs", + "font-xlfd-name", "fontp", "fontset-font", "fontset-info", + "fontset-list", "fontset-list-all", "force-mode-line-update", + "force-window-update", "format", "format-mode-line", + "format-network-address", "format-time-string", "forward-char", + "forward-comment", "forward-line", "forward-word", + "frame-border-width", "frame-bottom-divider-width", + "frame-can-run-window-configuration-change-hook", "frame-char-height", + "frame-char-width", "frame-face-alist", "frame-first-window", + "frame-focus", "frame-font-cache", "frame-fringe-width", "frame-list", + "frame-live-p", "frame-or-buffer-changed-p", "frame-parameter", + "frame-parameters", "frame-pixel-height", "frame-pixel-width", + "frame-pointer-visible-p", "frame-right-divider-width", + "frame-root-window", "frame-scroll-bar-height", + "frame-scroll-bar-width", "frame-selected-window", "frame-terminal", + "frame-text-cols", "frame-text-height", "frame-text-lines", + "frame-text-width", "frame-total-cols", "frame-total-lines", + "frame-visible-p", "framep", "frexp", "fringe-bitmaps-at-pos", + "fround", "fset", "ftruncate", "funcall", "funcall-interactively", + "function-equal", "functionp", "gap-position", "gap-size", + "garbage-collect", "gc-status", "generate-new-buffer-name", "get", + "get-buffer", "get-buffer-create", "get-buffer-process", + "get-buffer-window", "get-byte", "get-char-property", + "get-char-property-and-overlay", "get-file-buffer", "get-file-char", + "get-internal-run-time", "get-load-suffixes", "get-pos-property", + "get-process", "get-screen-color", "get-text-property", + "get-unicode-property-internal", "get-unused-category", + "get-unused-iso-final-char", "getenv-internal", "gethash", + "gfile-add-watch", "gfile-rm-watch", "global-key-binding", + "gnutls-available-p", "gnutls-boot", "gnutls-bye", "gnutls-deinit", + "gnutls-error-fatalp", "gnutls-error-string", "gnutls-errorp", + "gnutls-get-initstage", "gnutls-peer-status", + "gnutls-peer-status-warning-describe", "goto-char", "gpm-mouse-start", + "gpm-mouse-stop", "group-gid", "group-real-gid", + "handle-save-session", "handle-switch-frame", "hash-table-count", + "hash-table-p", "hash-table-rehash-size", + "hash-table-rehash-threshold", "hash-table-size", "hash-table-test", + "hash-table-weakness", "iconify-frame", "identity", "image-flush", + "image-mask-p", "image-metadata", "image-size", "imagemagick-types", + "imagep", "indent-to", "indirect-function", "indirect-variable", + "init-image-library", "inotify-add-watch", "inotify-rm-watch", + "input-pending-p", "insert", "insert-and-inherit", + "insert-before-markers", "insert-before-markers-and-inherit", + "insert-buffer-substring", "insert-byte", "insert-char", + "insert-file-contents", "insert-startup-screen", "int86", + "integer-or-marker-p", "integerp", "interactive-form", "intern", + "intern-soft", "internal--track-mouse", "internal-char-font", + "internal-complete-buffer", "internal-copy-lisp-face", + "internal-default-process-filter", + "internal-default-process-sentinel", "internal-describe-syntax-value", + "internal-event-symbol-parse-modifiers", + "internal-face-x-get-resource", "internal-get-lisp-face-attribute", + "internal-lisp-face-attribute-values", "internal-lisp-face-empty-p", + "internal-lisp-face-equal-p", "internal-lisp-face-p", + "internal-make-lisp-face", "internal-make-var-non-special", + "internal-merge-in-global-face", + "internal-set-alternative-font-family-alist", + "internal-set-alternative-font-registry-alist", + "internal-set-font-selection-order", + "internal-set-lisp-face-attribute", + "internal-set-lisp-face-attribute-from-resource", + "internal-show-cursor", "internal-show-cursor-p", "interrupt-process", + "invisible-p", "invocation-directory", "invocation-name", "isnan", + "iso-charset", "key-binding", "key-description", + "keyboard-coding-system", "keymap-parent", "keymap-prompt", "keymapp", + "keywordp", "kill-all-local-variables", "kill-buffer", "kill-emacs", + "kill-local-variable", "kill-process", "last-nonminibuffer-frame", + "lax-plist-get", "lax-plist-put", "ldexp", "length", + "libxml-parse-html-region", "libxml-parse-xml-region", + "line-beginning-position", "line-end-position", "line-pixel-height", + "list", "list-fonts", "list-system-processes", "listp", "load", + "load-average", "local-key-binding", "local-variable-if-set-p", + "local-variable-p", "locale-info", "locate-file-internal", + "lock-buffer", "log", "logand", "logb", "logior", "lognot", "logxor", + "looking-at", "lookup-image", "lookup-image-map", "lookup-key", + "lower-frame", "lsh", "macroexpand", "make-bool-vector", + "make-byte-code", "make-category-set", "make-category-table", + "make-char", "make-char-table", "make-directory-internal", + "make-frame-invisible", "make-frame-visible", "make-hash-table", + "make-indirect-buffer", "make-keymap", "make-list", + "make-local-variable", "make-marker", "make-network-process", + "make-overlay", "make-serial-process", "make-sparse-keymap", + "make-string", "make-symbol", "make-symbolic-link", "make-temp-name", + "make-terminal-frame", "make-variable-buffer-local", + "make-variable-frame-local", "make-vector", "makunbound", + "map-char-table", "map-charset-chars", "map-keymap", + "map-keymap-internal", "mapatoms", "mapc", "mapcar", "mapconcat", + "maphash", "mark-marker", "marker-buffer", "marker-insertion-type", + "marker-position", "markerp", "match-beginning", "match-data", + "match-end", "matching-paren", "max", "max-char", "md5", "member", + "memory-info", "memory-limit", "memory-use-counts", "memq", "memql", + "menu-bar-menu-at-x-y", "menu-or-popup-active-p", + "menu-or-popup-active-p", "merge-face-attribute", "message", + "message-box", "message-or-box", "min", + "minibuffer-completion-contents", "minibuffer-contents", + "minibuffer-contents-no-properties", "minibuffer-depth", + "minibuffer-prompt", "minibuffer-prompt-end", + "minibuffer-selected-window", "minibuffer-window", "minibufferp", + "minor-mode-key-binding", "mod", "modify-category-entry", + "modify-frame-parameters", "modify-syntax-entry", + "mouse-pixel-position", "mouse-position", "move-overlay", + "move-point-visually", "move-to-column", "move-to-window-line", + "msdos-downcase-filename", "msdos-long-file-names", "msdos-memget", + "msdos-memput", "msdos-mouse-disable", "msdos-mouse-enable", + "msdos-mouse-init", "msdos-mouse-p", "msdos-remember-default-colors", + "msdos-set-keyboard", "msdos-set-mouse-buttons", + "multibyte-char-to-unibyte", "multibyte-string-p", "narrow-to-region", + "natnump", "nconc", "network-interface-info", + "network-interface-list", "new-fontset", "newline-cache-check", + "next-char-property-change", "next-frame", "next-overlay-change", + "next-property-change", "next-read-file-uses-dialog-p", + "next-single-char-property-change", "next-single-property-change", + "next-window", "nlistp", "nreverse", "nth", "nthcdr", "null", + "number-or-marker-p", "number-to-string", "numberp", + "open-dribble-file", "open-font", "open-termscript", + "optimize-char-table", "other-buffer", "other-window-for-scrolling", + "overlay-buffer", "overlay-end", "overlay-get", "overlay-lists", + "overlay-properties", "overlay-put", "overlay-recenter", + "overlay-start", "overlayp", "overlays-at", "overlays-in", + "parse-partial-sexp", "play-sound-internal", "plist-get", + "plist-member", "plist-put", "point", "point-marker", "point-max", + "point-max-marker", "point-min", "point-min-marker", + "pos-visible-in-window-p", "position-bytes", "posix-looking-at", + "posix-search-backward", "posix-search-forward", "posix-string-match", + "posn-at-point", "posn-at-x-y", "preceding-char", + "prefix-numeric-value", "previous-char-property-change", + "previous-frame", "previous-overlay-change", + "previous-property-change", "previous-single-char-property-change", + "previous-single-property-change", "previous-window", "prin1", + "prin1-to-string", "princ", "print", "process-attributes", + "process-buffer", "process-coding-system", "process-command", + "process-connection", "process-contact", "process-datagram-address", + "process-exit-status", "process-filter", "process-filter-multibyte-p", + "process-id", "process-inherit-coding-system-flag", "process-list", + "process-mark", "process-name", "process-plist", + "process-query-on-exit-flag", "process-running-child-p", + "process-send-eof", "process-send-region", "process-send-string", + "process-sentinel", "process-status", "process-tty-name", + "process-type", "processp", "profiler-cpu-log", + "profiler-cpu-running-p", "profiler-cpu-start", "profiler-cpu-stop", + "profiler-memory-log", "profiler-memory-running-p", + "profiler-memory-start", "profiler-memory-stop", "propertize", + "purecopy", "put", "put-text-property", + "put-unicode-property-internal", "puthash", "query-font", + "query-fontset", "quit-process", "raise-frame", "random", "rassoc", + "rassq", "re-search-backward", "re-search-forward", "read", + "read-buffer", "read-char", "read-char-exclusive", + "read-coding-system", "read-command", "read-event", + "read-from-minibuffer", "read-from-string", "read-function", + "read-key-sequence", "read-key-sequence-vector", + "read-no-blanks-input", "read-non-nil-coding-system", "read-string", + "read-variable", "recent-auto-save-p", "recent-doskeys", + "recent-keys", "recenter", "recursion-depth", "recursive-edit", + "redirect-debugging-output", "redirect-frame-focus", "redisplay", + "redraw-display", "redraw-frame", "regexp-quote", "region-beginning", + "region-end", "register-ccl-program", "register-code-conversion-map", + "remhash", "remove-list-of-text-properties", "remove-text-properties", + "rename-buffer", "rename-file", "replace-match", + "reset-this-command-lengths", "resize-mini-window-internal", + "restore-buffer-modified-p", "resume-tty", "reverse", "round", + "run-hook-with-args", "run-hook-with-args-until-failure", + "run-hook-with-args-until-success", "run-hook-wrapped", "run-hooks", + "run-window-configuration-change-hook", "run-window-scroll-functions", + "safe-length", "scan-lists", "scan-sexps", "scroll-down", + "scroll-left", "scroll-other-window", "scroll-right", "scroll-up", + "search-backward", "search-forward", "secure-hash", "select-frame", + "select-window", "selected-frame", "selected-window", + "self-insert-command", "send-string-to-terminal", "sequencep", + "serial-process-configure", "set", "set-buffer", + "set-buffer-auto-saved", "set-buffer-major-mode", + "set-buffer-modified-p", "set-buffer-multibyte", "set-case-table", + "set-category-table", "set-char-table-extra-slot", + "set-char-table-parent", "set-char-table-range", "set-charset-plist", + "set-charset-priority", "set-coding-system-priority", + "set-cursor-size", "set-default", "set-default-file-modes", + "set-default-toplevel-value", "set-file-acl", "set-file-modes", + "set-file-selinux-context", "set-file-times", "set-fontset-font", + "set-frame-height", "set-frame-position", "set-frame-selected-window", + "set-frame-size", "set-frame-width", "set-fringe-bitmap-face", + "set-input-interrupt-mode", "set-input-meta-mode", "set-input-mode", + "set-keyboard-coding-system-internal", "set-keymap-parent", + "set-marker", "set-marker-insertion-type", "set-match-data", + "set-message-beep", "set-minibuffer-window", + "set-mouse-pixel-position", "set-mouse-position", + "set-network-process-option", "set-output-flow-control", + "set-process-buffer", "set-process-coding-system", + "set-process-datagram-address", "set-process-filter", + "set-process-filter-multibyte", + "set-process-inherit-coding-system-flag", "set-process-plist", + "set-process-query-on-exit-flag", "set-process-sentinel", + "set-process-window-size", "set-quit-char", + "set-safe-terminal-coding-system-internal", "set-screen-color", + "set-standard-case-table", "set-syntax-table", + "set-terminal-coding-system-internal", "set-terminal-local-value", + "set-terminal-parameter", "set-text-properties", "set-time-zone-rule", + "set-visited-file-modtime", "set-window-buffer", + "set-window-combination-limit", "set-window-configuration", + "set-window-dedicated-p", "set-window-display-table", + "set-window-fringes", "set-window-hscroll", "set-window-margins", + "set-window-new-normal", "set-window-new-pixel", + "set-window-new-total", "set-window-next-buffers", + "set-window-parameter", "set-window-point", "set-window-prev-buffers", + "set-window-redisplay-end-trigger", "set-window-scroll-bars", + "set-window-start", "set-window-vscroll", "setcar", "setcdr", + "setplist", "show-face-resources", "signal", "signal-process", "sin", + "single-key-description", "skip-chars-backward", "skip-chars-forward", + "skip-syntax-backward", "skip-syntax-forward", "sleep-for", "sort", + "sort-charsets", "special-variable-p", "split-char", + "split-window-internal", "sqrt", "standard-case-table", + "standard-category-table", "standard-syntax-table", "start-kbd-macro", + "start-process", "stop-process", "store-kbd-macro-event", "string", + "string-as-multibyte", "string-as-unibyte", "string-bytes", + "string-collate-equalp", "string-collate-lessp", "string-equal", + "string-lessp", "string-make-multibyte", "string-make-unibyte", + "string-match", "string-to-char", "string-to-multibyte", + "string-to-number", "string-to-syntax", "string-to-unibyte", + "string-width", "stringp", "subr-name", "subrp", + "subst-char-in-region", "substitute-command-keys", + "substitute-in-file-name", "substring", "substring-no-properties", + "suspend-emacs", "suspend-tty", "suspicious-object", "sxhash", + "symbol-function", "symbol-name", "symbol-plist", "symbol-value", + "symbolp", "syntax-table", "syntax-table-p", "system-groups", + "system-move-file-to-trash", "system-name", "system-users", "tan", + "terminal-coding-system", "terminal-list", "terminal-live-p", + "terminal-local-value", "terminal-name", "terminal-parameter", + "terminal-parameters", "terpri", "test-completion", + "text-char-description", "text-properties-at", "text-property-any", + "text-property-not-all", "this-command-keys", + "this-command-keys-vector", "this-single-command-keys", + "this-single-command-raw-keys", "time-add", "time-less-p", + "time-subtract", "tool-bar-get-system-style", "tool-bar-height", + "tool-bar-pixel-width", "top-level", "trace-redisplay", + "trace-to-stderr", "translate-region-internal", "transpose-regions", + "truncate", "try-completion", "tty-display-color-cells", + "tty-display-color-p", "tty-no-underline", + "tty-suppress-bold-inverse-default-colors", "tty-top-frame", + "tty-type", "type-of", "undo-boundary", "unencodable-char-position", + "unhandled-file-name-directory", "unibyte-char-to-multibyte", + "unibyte-string", "unicode-property-table-internal", "unify-charset", + "unintern", "unix-sync", "unlock-buffer", "upcase", "upcase-initials", + "upcase-initials-region", "upcase-region", "upcase-word", + "use-global-map", "use-local-map", "user-full-name", + "user-login-name", "user-real-login-name", "user-real-uid", + "user-uid", "variable-binding-locus", "vconcat", "vector", + "vector-or-char-table-p", "vectorp", "verify-visited-file-modtime", + "vertical-motion", "visible-frame-list", "visited-file-modtime", + "w16-get-clipboard-data", "w16-selection-exists-p", + "w16-set-clipboard-data", "w32-battery-status", + "w32-default-color-map", "w32-define-rgb-color", + "w32-display-monitor-attributes-list", "w32-frame-menu-bar-size", + "w32-frame-rect", "w32-get-clipboard-data", + "w32-get-codepage-charset", "w32-get-console-codepage", + "w32-get-console-output-codepage", "w32-get-current-locale-id", + "w32-get-default-locale-id", "w32-get-keyboard-layout", + "w32-get-locale-info", "w32-get-valid-codepages", + "w32-get-valid-keyboard-layouts", "w32-get-valid-locale-ids", + "w32-has-winsock", "w32-long-file-name", "w32-reconstruct-hot-key", + "w32-register-hot-key", "w32-registered-hot-keys", + "w32-selection-exists-p", "w32-send-sys-command", + "w32-set-clipboard-data", "w32-set-console-codepage", + "w32-set-console-output-codepage", "w32-set-current-locale", + "w32-set-keyboard-layout", "w32-set-process-priority", + "w32-shell-execute", "w32-short-file-name", "w32-toggle-lock-key", + "w32-unload-winsock", "w32-unregister-hot-key", "w32-window-exists-p", + "w32notify-add-watch", "w32notify-rm-watch", + "waiting-for-user-input-p", "where-is-internal", "widen", + "widget-apply", "widget-get", "widget-put", + "window-absolute-pixel-edges", "window-at", "window-body-height", + "window-body-width", "window-bottom-divider-width", "window-buffer", + "window-combination-limit", "window-configuration-frame", + "window-configuration-p", "window-dedicated-p", + "window-display-table", "window-edges", "window-end", "window-frame", + "window-fringes", "window-header-line-height", "window-hscroll", + "window-inside-absolute-pixel-edges", "window-inside-edges", + "window-inside-pixel-edges", "window-left-child", + "window-left-column", "window-line-height", "window-list", + "window-list-1", "window-live-p", "window-margins", + "window-minibuffer-p", "window-mode-line-height", "window-new-normal", + "window-new-pixel", "window-new-total", "window-next-buffers", + "window-next-sibling", "window-normal-size", "window-old-point", + "window-parameter", "window-parameters", "window-parent", + "window-pixel-edges", "window-pixel-height", "window-pixel-left", + "window-pixel-top", "window-pixel-width", "window-point", + "window-prev-buffers", "window-prev-sibling", + "window-redisplay-end-trigger", "window-resize-apply", + "window-resize-apply-total", "window-right-divider-width", + "window-scroll-bar-height", "window-scroll-bar-width", + "window-scroll-bars", "window-start", "window-system", + "window-text-height", "window-text-pixel-size", "window-text-width", + "window-top-child", "window-top-line", "window-total-height", + "window-total-width", "window-use-time", "window-valid-p", + "window-vscroll", "windowp", "write-char", "write-region", + "x-backspace-delete-keys-p", "x-change-window-property", + "x-change-window-property", "x-close-connection", + "x-close-connection", "x-create-frame", "x-create-frame", + "x-delete-window-property", "x-delete-window-property", + "x-disown-selection-internal", "x-display-backing-store", + "x-display-backing-store", "x-display-color-cells", + "x-display-color-cells", "x-display-grayscale-p", + "x-display-grayscale-p", "x-display-list", "x-display-list", + "x-display-mm-height", "x-display-mm-height", "x-display-mm-width", + "x-display-mm-width", "x-display-monitor-attributes-list", + "x-display-pixel-height", "x-display-pixel-height", + "x-display-pixel-width", "x-display-pixel-width", "x-display-planes", + "x-display-planes", "x-display-save-under", "x-display-save-under", + "x-display-screens", "x-display-screens", "x-display-visual-class", + "x-display-visual-class", "x-family-fonts", "x-file-dialog", + "x-file-dialog", "x-file-dialog", "x-focus-frame", "x-frame-geometry", + "x-frame-geometry", "x-get-atom-name", "x-get-resource", + "x-get-selection-internal", "x-hide-tip", "x-hide-tip", + "x-list-fonts", "x-load-color-file", "x-menu-bar-open-internal", + "x-menu-bar-open-internal", "x-open-connection", "x-open-connection", + "x-own-selection-internal", "x-parse-geometry", "x-popup-dialog", + "x-popup-menu", "x-register-dnd-atom", "x-select-font", + "x-select-font", "x-selection-exists-p", "x-selection-owner-p", + "x-send-client-message", "x-server-max-request-size", + "x-server-max-request-size", "x-server-vendor", "x-server-vendor", + "x-server-version", "x-server-version", "x-show-tip", "x-show-tip", + "x-synchronize", "x-synchronize", "x-uses-old-gtk-dialog", + "x-window-property", "x-window-property", "x-wm-set-size-hint", + "xw-color-defined-p", "xw-color-defined-p", "xw-color-values", + "xw-color-values", "xw-display-color-p", "xw-display-color-p", + "yes-or-no-p", "zlib-available-p", "zlib-decompress-region", + "forward-point", + } + + emacsBuiltinFunctionHighlighted = []string{ + "defvaralias", "provide", "require", + "with-no-warnings", "define-widget", "with-electric-help", + "throw", "defalias", "featurep", + } + + emacsLambdaListKeywords = []string{ + "&allow-other-keys", "&aux", "&body", "&environment", "&key", "&optional", + "&rest", "&whole", + } + + emacsErrorKeywords = []string{ + "cl-assert", "cl-check-type", "error", "signal", + "user-error", "warn", + } +) + +// EmacsLisp lexer. +var EmacsLisp = internal.Register(TypeRemappingLexer(MustNewLazyLexer( + &Config{ + Name: "EmacsLisp", + Aliases: []string{"emacs", "elisp", "emacs-lisp"}, + Filenames: []string{"*.el"}, + MimeTypes: []string{"text/x-elisp", "application/x-elisp"}, + }, + emacsLispRules, +), TypeMapping{ + {NameVariable, NameFunction, emacsBuiltinFunction}, + {NameVariable, NameBuiltin, emacsSpecialForms}, + {NameVariable, NameException, emacsErrorKeywords}, + {NameVariable, NameBuiltin, append(emacsBuiltinFunctionHighlighted, emacsMacros...)}, + {NameVariable, KeywordPseudo, emacsLambdaListKeywords}, +})) + +func emacsLispRules() Rules { + return Rules{ + "root": { + Default(Push("body")), + }, + "body": { + {`\s+`, Text, nil}, + {`;.*$`, CommentSingle, nil}, + {`"`, LiteralString, Push("string")}, + {`\?([^\\]|\\.)`, LiteralStringChar, nil}, + {`:((?:\\.|[\w!$%&*+-/<=>?@^{}~|])(?:\\.|[\w!$%&*+-/<=>?@^{}~|]|[#.:])*)`, NameBuiltin, nil}, + {`::((?:\\.|[\w!$%&*+-/<=>?@^{}~|])(?:\\.|[\w!$%&*+-/<=>?@^{}~|]|[#.:])*)`, LiteralStringSymbol, nil}, + {`'((?:\\.|[\w!$%&*+-/<=>?@^{}~|])(?:\\.|[\w!$%&*+-/<=>?@^{}~|]|[#.:])*)`, LiteralStringSymbol, nil}, + {`'`, Operator, nil}, + {"`", Operator, nil}, + {"[-+]?\\d+\\.?(?=[ \"()\\]\\'\\n,;`])", LiteralNumberInteger, nil}, + {"[-+]?\\d+/\\d+(?=[ \"()\\]\\'\\n,;`])", LiteralNumber, nil}, + {"[-+]?(\\d*\\.\\d+([defls][-+]?\\d+)?|\\d+(\\.\\d*)?[defls][-+]?\\d+)(?=[ \"()\\]\\'\\n,;`])", LiteralNumberFloat, nil}, + {`\[|\]`, Punctuation, nil}, + {`#:((?:\\.|[\w!$%&*+-/<=>?@^{}~|])(?:\\.|[\w!$%&*+-/<=>?@^{}~|]|[#.:])*)`, LiteralStringSymbol, nil}, + {`#\^\^?`, Operator, nil}, + {`#\'`, NameFunction, nil}, + {`#[bB][+-]?[01]+(/[01]+)?`, LiteralNumberBin, nil}, + {`#[oO][+-]?[0-7]+(/[0-7]+)?`, LiteralNumberOct, nil}, + {`#[xX][+-]?[0-9a-fA-F]+(/[0-9a-fA-F]+)?`, LiteralNumberHex, nil}, + {`#\d+r[+-]?[0-9a-zA-Z]+(/[0-9a-zA-Z]+)?`, LiteralNumber, nil}, + {`#\d+=`, Operator, nil}, + {`#\d+#`, Operator, nil}, + {`(,@|,|\.|:)`, Operator, nil}, + {"(t|nil)(?=[ \"()\\]\\'\\n,;`])", NameConstant, nil}, + {`\*((?:\\.|[\w!$%&*+-/<=>?@^{}~|])(?:\\.|[\w!$%&*+-/<=>?@^{}~|]|[#.:])*)\*`, NameVariableGlobal, nil}, + {`((?:\\.|[\w!$%&*+-/<=>?@^{}~|])(?:\\.|[\w!$%&*+-/<=>?@^{}~|]|[#.:])*)`, NameVariable, nil}, + {`#\(`, Operator, Push("body")}, + {`\(`, Punctuation, Push("body")}, + {`\)`, Punctuation, Pop(1)}, + }, + "string": { + {"[^\"\\\\`]+", LiteralString, nil}, + {"`((?:\\\\.|[\\w!$%&*+-/<=>?@^{}~|])(?:\\\\.|[\\w!$%&*+-/<=>?@^{}~|]|[#.:])*)\\'", LiteralStringSymbol, nil}, + {"`", LiteralString, nil}, + {`\\.`, LiteralString, nil}, + {`\\\n`, LiteralString, nil}, + {`"`, LiteralString, Pop(1)}, + }, + } +} diff --git a/vendor/github.com/alecthomas/chroma/lexers/e/erlang.go b/vendor/github.com/alecthomas/chroma/lexers/e/erlang.go new file mode 100644 index 000000000..5f5c9caad --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/e/erlang.go @@ -0,0 +1,70 @@ +package e + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// Erlang lexer. +var Erlang = internal.Register(MustNewLazyLexer( + &Config{ + Name: "Erlang", + Aliases: []string{"erlang"}, + Filenames: []string{"*.erl", "*.hrl", "*.es", "*.escript"}, + MimeTypes: []string{"text/x-erlang"}, + }, + erlangRules, +)) + +func erlangRules() Rules { + return Rules{ + "root": { + {`\s+`, Text, nil}, + {`%.*\n`, Comment, nil}, + {Words(``, `\b`, `after`, `begin`, `case`, `catch`, `cond`, `end`, `fun`, `if`, `let`, `of`, `query`, `receive`, `try`, `when`), Keyword, nil}, + {Words(``, `\b`, `abs`, `append_element`, `apply`, `atom_to_list`, `binary_to_list`, `bitstring_to_list`, `binary_to_term`, `bit_size`, `bump_reductions`, `byte_size`, `cancel_timer`, `check_process_code`, `delete_module`, `demonitor`, `disconnect_node`, `display`, `element`, `erase`, `exit`, `float`, `float_to_list`, `fun_info`, `fun_to_list`, `function_exported`, `garbage_collect`, `get`, `get_keys`, `group_leader`, `hash`, `hd`, `integer_to_list`, `iolist_to_binary`, `iolist_size`, `is_atom`, `is_binary`, `is_bitstring`, `is_boolean`, `is_builtin`, `is_float`, `is_function`, `is_integer`, `is_list`, `is_number`, `is_pid`, `is_port`, `is_process_alive`, `is_record`, `is_reference`, `is_tuple`, `length`, `link`, `list_to_atom`, `list_to_binary`, `list_to_bitstring`, `list_to_existing_atom`, `list_to_float`, `list_to_integer`, `list_to_pid`, `list_to_tuple`, `load_module`, `localtime_to_universaltime`, `make_tuple`, `md5`, `md5_final`, `md5_update`, `memory`, `module_loaded`, `monitor`, `monitor_node`, `node`, `nodes`, `open_port`, `phash`, `phash2`, `pid_to_list`, `port_close`, `port_command`, `port_connect`, `port_control`, `port_call`, `port_info`, `port_to_list`, `process_display`, `process_flag`, `process_info`, `purge_module`, `put`, `read_timer`, `ref_to_list`, `register`, `resume_process`, `round`, `send`, `send_after`, `send_nosuspend`, `set_cookie`, `setelement`, `size`, `spawn`, `spawn_link`, `spawn_monitor`, `spawn_opt`, `split_binary`, `start_timer`, `statistics`, `suspend_process`, `system_flag`, `system_info`, `system_monitor`, `system_profile`, `term_to_binary`, `tl`, `trace`, `trace_delivered`, `trace_info`, `trace_pattern`, `trunc`, `tuple_size`, `tuple_to_list`, `universaltime_to_localtime`, `unlink`, `unregister`, `whereis`), NameBuiltin, nil}, + {Words(``, `\b`, `and`, `andalso`, `band`, `bnot`, `bor`, `bsl`, `bsr`, `bxor`, `div`, `not`, `or`, `orelse`, `rem`, `xor`), OperatorWord, nil}, + {`^-`, Punctuation, Push("directive")}, + {`(\+\+?|--?|\*|/|<|>|/=|=:=|=/=|=<|>=|==?|<-|!|\?)`, Operator, nil}, + {`"`, LiteralString, Push("string")}, + {`<<`, NameLabel, nil}, + {`>>`, NameLabel, nil}, + {`((?:[a-z]\w*|'[^\n']*[^\\]'))(:)`, ByGroups(NameNamespace, Punctuation), nil}, + {`(?:^|(?<=:))((?:[a-z]\w*|'[^\n']*[^\\]'))(\s*)(\()`, ByGroups(NameFunction, Text, Punctuation), nil}, + {`[+-]?(?:[2-9]|[12][0-9]|3[0-6])#[0-9a-zA-Z]+`, LiteralNumberInteger, nil}, + {`[+-]?\d+`, LiteralNumberInteger, nil}, + {`[+-]?\d+.\d+`, LiteralNumberFloat, nil}, + {`[]\[:_@\".{}()|;,]`, Punctuation, nil}, + {`(?:[A-Z_]\w*)`, NameVariable, nil}, + {`(?:[a-z]\w*|'[^\n']*[^\\]')`, Name, nil}, + {`\?(?:(?:[A-Z_]\w*)|(?:[a-z]\w*|'[^\n']*[^\\]'))`, NameConstant, nil}, + {`\$(?:(?:\\(?:[bdefnrstv\'"\\]|[0-7][0-7]?[0-7]?|(?:x[0-9a-fA-F]{2}|x\{[0-9a-fA-F]+\})|\^[a-zA-Z]))|\\[ %]|[^\\])`, LiteralStringChar, nil}, + {`#(?:[a-z]\w*|'[^\n']*[^\\]')(:?\.(?:[a-z]\w*|'[^\n']*[^\\]'))?`, NameLabel, nil}, + {`\A#!.+\n`, CommentHashbang, nil}, + {`#\{`, Punctuation, Push("map_key")}, + }, + "string": { + {`(?:\\(?:[bdefnrstv\'"\\]|[0-7][0-7]?[0-7]?|(?:x[0-9a-fA-F]{2}|x\{[0-9a-fA-F]+\})|\^[a-zA-Z]))`, LiteralStringEscape, nil}, + {`"`, LiteralString, Pop(1)}, + {`~[0-9.*]*[~#+BPWXb-ginpswx]`, LiteralStringInterpol, nil}, + {`[^"\\~]+`, LiteralString, nil}, + {`~`, LiteralString, nil}, + }, + "directive": { + {`(define)(\s*)(\()((?:(?:[A-Z_]\w*)|(?:[a-z]\w*|'[^\n']*[^\\]')))`, ByGroups(NameEntity, Text, Punctuation, NameConstant), Pop(1)}, + {`(record)(\s*)(\()((?:(?:[A-Z_]\w*)|(?:[a-z]\w*|'[^\n']*[^\\]')))`, ByGroups(NameEntity, Text, Punctuation, NameLabel), Pop(1)}, + {`(?:[a-z]\w*|'[^\n']*[^\\]')`, NameEntity, Pop(1)}, + }, + "map_key": { + Include("root"), + {`=>`, Punctuation, Push("map_val")}, + {`:=`, Punctuation, Push("map_val")}, + {`\}`, Punctuation, Pop(1)}, + }, + "map_val": { + Include("root"), + {`,`, Punctuation, Pop(1)}, + {`(?=\})`, Punctuation, Pop(1)}, + }, + } +} diff --git a/vendor/github.com/alecthomas/chroma/lexers/f/factor.go b/vendor/github.com/alecthomas/chroma/lexers/f/factor.go new file mode 100644 index 000000000..d88beb231 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/f/factor.go @@ -0,0 +1,119 @@ +package f + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// Factor lexer. +var Factor = internal.Register(MustNewLazyLexer( + &Config{ + Name: "Factor", + Aliases: []string{"factor"}, + Filenames: []string{"*.factor"}, + MimeTypes: []string{"text/x-factor"}, + }, + factorRules, +)) + +func factorRules() Rules { + return Rules{ + "root": { + {`#!.*$`, CommentPreproc, nil}, + Default(Push("base")), + }, + "base": { + {`\s+`, Text, nil}, + {`((?:MACRO|MEMO|TYPED)?:[:]?)(\s+)(\S+)`, ByGroups(Keyword, Text, NameFunction), nil}, + {`(M:[:]?)(\s+)(\S+)(\s+)(\S+)`, ByGroups(Keyword, Text, NameClass, Text, NameFunction), nil}, + {`(C:)(\s+)(\S+)(\s+)(\S+)`, ByGroups(Keyword, Text, NameFunction, Text, NameClass), nil}, + {`(GENERIC:)(\s+)(\S+)`, ByGroups(Keyword, Text, NameFunction), nil}, + {`(HOOK:|GENERIC#)(\s+)(\S+)(\s+)(\S+)`, ByGroups(Keyword, Text, NameFunction, Text, NameFunction), nil}, + {`\(\s`, NameFunction, Push("stackeffect")}, + {`;\s`, Keyword, nil}, + {`(USING:)(\s+)`, ByGroups(KeywordNamespace, Text), Push("vocabs")}, + {`(USE:|UNUSE:|IN:|QUALIFIED:)(\s+)(\S+)`, ByGroups(KeywordNamespace, Text, NameNamespace), nil}, + {`(QUALIFIED-WITH:)(\s+)(\S+)(\s+)(\S+)`, ByGroups(KeywordNamespace, Text, NameNamespace, Text, NameNamespace), nil}, + {`(FROM:|EXCLUDE:)(\s+)(\S+)(\s+=>\s)`, ByGroups(KeywordNamespace, Text, NameNamespace, Text), Push("words")}, + {`(RENAME:)(\s+)(\S+)(\s+)(\S+)(\s+=>\s+)(\S+)`, ByGroups(KeywordNamespace, Text, NameFunction, Text, NameNamespace, Text, NameFunction), nil}, + {`(ALIAS:|TYPEDEF:)(\s+)(\S+)(\s+)(\S+)`, ByGroups(KeywordNamespace, Text, NameFunction, Text, NameFunction), nil}, + {`(DEFER:|FORGET:|POSTPONE:)(\s+)(\S+)`, ByGroups(KeywordNamespace, Text, NameFunction), nil}, + {`(TUPLE:|ERROR:)(\s+)(\S+)(\s+<\s+)(\S+)`, ByGroups(Keyword, Text, NameClass, Text, NameClass), Push("slots")}, + {`(TUPLE:|ERROR:|BUILTIN:)(\s+)(\S+)`, ByGroups(Keyword, Text, NameClass), Push("slots")}, + {`(MIXIN:|UNION:|INTERSECTION:)(\s+)(\S+)`, ByGroups(Keyword, Text, NameClass), nil}, + {`(PREDICATE:)(\s+)(\S+)(\s+<\s+)(\S+)`, ByGroups(Keyword, Text, NameClass, Text, NameClass), nil}, + {`(C:)(\s+)(\S+)(\s+)(\S+)`, ByGroups(Keyword, Text, NameFunction, Text, NameClass), nil}, + {`(INSTANCE:)(\s+)(\S+)(\s+)(\S+)`, ByGroups(Keyword, Text, NameClass, Text, NameClass), nil}, + {`(SLOT:)(\s+)(\S+)`, ByGroups(Keyword, Text, NameFunction), nil}, + {`(SINGLETON:)(\s+)(\S+)`, ByGroups(Keyword, Text, NameClass), nil}, + {`SINGLETONS:`, Keyword, Push("classes")}, + {`(CONSTANT:|SYMBOL:|MAIN:|HELP:)(\s+)(\S+)`, ByGroups(Keyword, Text, NameFunction), nil}, + {`SYMBOLS:\s`, Keyword, Push("words")}, + {`SYNTAX:\s`, Keyword, nil}, + {`ALIEN:\s`, Keyword, nil}, + {`(STRUCT:)(\s+)(\S+)`, ByGroups(Keyword, Text, NameClass), nil}, + {`(FUNCTION:)(\s+\S+\s+)(\S+)(\s+\(\s+[^)]+\)\s)`, ByGroups(KeywordNamespace, Text, NameFunction, Text), nil}, + {`(FUNCTION-ALIAS:)(\s+)(\S+)(\s+\S+\s+)(\S+)(\s+\(\s+[^)]+\)\s)`, ByGroups(KeywordNamespace, Text, NameFunction, Text, NameFunction, Text), nil}, + {`(?:)\s`, KeywordNamespace, nil}, + {`"""\s+(?:.|\n)*?\s+"""`, LiteralString, nil}, + {`"(?:\\\\|\\"|[^"])*"`, LiteralString, nil}, + {`\S+"\s+(?:\\\\|\\"|[^"])*"`, LiteralString, nil}, + {`CHAR:\s+(?:\\[\\abfnrstv]|[^\\]\S*)\s`, LiteralStringChar, nil}, + {`!\s+.*$`, Comment, nil}, + {`#!\s+.*$`, Comment, nil}, + {`/\*\s+(?:.|\n)*?\s\*/\s`, Comment, nil}, + {`[tf]\s`, NameConstant, nil}, + {`[\\$]\s+\S+`, NameConstant, nil}, + {`M\\\s+\S+\s+\S+`, NameConstant, nil}, + {`[+-]?(?:[\d,]*\d)?\.(?:\d([\d,]*\d)?)?(?:[eE][+-]?\d+)?\s`, LiteralNumber, nil}, + {`[+-]?\d(?:[\d,]*\d)?(?:[eE][+-]?\d+)?\s`, LiteralNumber, nil}, + {`0x[a-fA-F\d](?:[a-fA-F\d,]*[a-fA-F\d])?(?:p\d([\d,]*\d)?)?\s`, LiteralNumber, nil}, + {`NAN:\s+[a-fA-F\d](?:[a-fA-F\d,]*[a-fA-F\d])?(?:p\d([\d,]*\d)?)?\s`, LiteralNumber, nil}, + {`0b[01]+\s`, LiteralNumberBin, nil}, + {`0o[0-7]+\s`, LiteralNumberOct, nil}, + {`(?:\d([\d,]*\d)?)?\+\d(?:[\d,]*\d)?/\d(?:[\d,]*\d)?\s`, LiteralNumber, nil}, + {`(?:\-\d([\d,]*\d)?)?\-\d(?:[\d,]*\d)?/\d(?:[\d,]*\d)?\s`, LiteralNumber, nil}, + {`(?:deprecated|final|foldable|flushable|inline|recursive)\s`, Keyword, nil}, + {Words(``, `\s`, `-rot`, `2bi`, `2bi@`, `2bi*`, `2curry`, `2dip`, `2drop`, `2dup`, `2keep`, `2nip`, `2over`, `2tri`, `2tri@`, `2tri*`, `3bi`, `3curry`, `3dip`, `3drop`, `3dup`, `3keep`, `3tri`, `4dip`, `4drop`, `4dup`, `4keep`, ``, `=`, `>boolean`, `clone`, `?`, `?execute`, `?if`, `and`, `assert`, `assert=`, `assert?`, `bi`, `bi-curry`, `bi-curry@`, `bi-curry*`, `bi@`, `bi*`, `boa`, `boolean`, `boolean?`, `both?`, `build`, `call`, `callstack`, `callstack>array`, `callstack?`, `clear`, `(clone)`, `compose`, `compose?`, `curry`, `curry?`, `datastack`, `die`, `dip`, `do`, `drop`, `dup`, `dupd`, `either?`, `eq?`, `equal?`, `execute`, `hashcode`, `hashcode*`, `identity-hashcode`, `identity-tuple`, `identity-tuple?`, `if`, `if*`, `keep`, `loop`, `most`, `new`, `nip`, `not`, `null`, `object`, `or`, `over`, `pick`, `prepose`, `retainstack`, `rot`, `same?`, `swap`, `swapd`, `throw`, `tri`, `tri-curry`, `tri-curry@`, `tri-curry*`, `tri@`, `tri*`, `tuple`, `tuple?`, `unless`, `unless*`, `until`, `when`, `when*`, `while`, `with`, `wrapper`, `wrapper?`, `xor`), NameBuiltin, nil}, + {Words(``, `\s`, `2cache`, ``, `>alist`, `?at`, `?of`, `assoc`, `assoc-all?`, `assoc-any?`, `assoc-clone-like`, `assoc-combine`, `assoc-diff`, `assoc-diff!`, `assoc-differ`, `assoc-each`, `assoc-empty?`, `assoc-filter`, `assoc-filter!`, `assoc-filter-as`, `assoc-find`, `assoc-hashcode`, `assoc-intersect`, `assoc-like`, `assoc-map`, `assoc-map-as`, `assoc-partition`, `assoc-refine`, `assoc-size`, `assoc-stack`, `assoc-subset?`, `assoc-union`, `assoc-union!`, `assoc=`, `assoc>map`, `assoc?`, `at`, `at+`, `at*`, `cache`, `change-at`, `clear-assoc`, `delete-at`, `delete-at*`, `enum`, `enum?`, `extract-keys`, `inc-at`, `key?`, `keys`, `map>assoc`, `maybe-set-at`, `new-assoc`, `of`, `push-at`, `rename-at`, `set-at`, `sift-keys`, `sift-values`, `substitute`, `unzip`, `value-at`, `value-at*`, `value?`, `values`, `zip`), NameBuiltin, nil}, + {Words(``, `\s`, `2cleave`, `2cleave>quot`, `3cleave`, `3cleave>quot`, `4cleave`, `4cleave>quot`, `alist>quot`, `call-effect`, `case`, `case-find`, `case>quot`, `cleave`, `cleave>quot`, `cond`, `cond>quot`, `deep-spread>quot`, `execute-effect`, `linear-case-quot`, `no-case`, `no-case?`, `no-cond`, `no-cond?`, `recursive-hashcode`, `shallow-spread>quot`, `spread`, `to-fixed-point`, `wrong-values`, `wrong-values?`), NameBuiltin, nil}, + {Words(``, `\s`, `-`, `/`, `/f`, `/i`, `/mod`, `2/`, `2^`, `<`, `<=`, ``, `>`, `>=`, `>bignum`, `>fixnum`, `>float`, `>integer`, `(all-integers?)`, `(each-integer)`, `(find-integer)`, `*`, `+`, `?1+`, `abs`, `align`, `all-integers?`, `bignum`, `bignum?`, `bit?`, `bitand`, `bitnot`, `bitor`, `bits>double`, `bits>float`, `bitxor`, `complex`, `complex?`, `denominator`, `double>bits`, `each-integer`, `even?`, `find-integer`, `find-last-integer`, `fixnum`, `fixnum?`, `float`, `float>bits`, `float?`, `fp-bitwise=`, `fp-infinity?`, `fp-nan-payload`, `fp-nan?`, `fp-qnan?`, `fp-sign`, `fp-snan?`, `fp-special?`, `if-zero`, `imaginary-part`, `integer`, `integer>fixnum`, `integer>fixnum-strict`, `integer?`, `log2`, `log2-expects-positive`, `log2-expects-positive?`, `mod`, `neg`, `neg?`, `next-float`, `next-power-of-2`, `number`, `number=`, `number?`, `numerator`, `odd?`, `out-of-fixnum-range`, `out-of-fixnum-range?`, `power-of-2?`, `prev-float`, `ratio`, `ratio?`, `rational`, `rational?`, `real`, `real-part`, `real?`, `recip`, `rem`, `sgn`, `shift`, `sq`, `times`, `u<`, `u<=`, `u>`, `u>=`, `unless-zero`, `unordered?`, `when-zero`, `zero?`), NameBuiltin, nil}, + {Words(``, `\s`, `1sequence`, `2all?`, `2each`, `2map`, `2map-as`, `2map-reduce`, `2reduce`, `2selector`, `2sequence`, `3append`, `3append-as`, `3each`, `3map`, `3map-as`, `3sequence`, `4sequence`, ``, ``, ``, `?first`, `?last`, `?nth`, `?second`, `?set-nth`, `accumulate`, `accumulate!`, `accumulate-as`, `all?`, `any?`, `append`, `append!`, `append-as`, `assert-sequence`, `assert-sequence=`, `assert-sequence?`, `binary-reduce`, `bounds-check`, `bounds-check?`, `bounds-error`, `bounds-error?`, `but-last`, `but-last-slice`, `cartesian-each`, `cartesian-map`, `cartesian-product`, `change-nth`, `check-slice`, `check-slice-error`, `clone-like`, `collapse-slice`, `collector`, `collector-for`, `concat`, `concat-as`, `copy`, `count`, `cut`, `cut-slice`, `cut*`, `delete-all`, `delete-slice`, `drop-prefix`, `each`, `each-from`, `each-index`, `empty?`, `exchange`, `filter`, `filter!`, `filter-as`, `find`, `find-from`, `find-index`, `find-index-from`, `find-last`, `find-last-from`, `first`, `first2`, `first3`, `first4`, `flip`, `follow`, `fourth`, `glue`, `halves`, `harvest`, `head`, `head-slice`, `head-slice*`, `head*`, `head?`, `if-empty`, `immutable`, `immutable-sequence`, `immutable-sequence?`, `immutable?`, `index`, `index-from`, `indices`, `infimum`, `infimum-by`, `insert-nth`, `interleave`, `iota`, `iota-tuple`, `iota-tuple?`, `join`, `join-as`, `last`, `last-index`, `last-index-from`, `length`, `lengthen`, `like`, `longer`, `longer?`, `longest`, `map`, `map!`, `map-as`, `map-find`, `map-find-last`, `map-index`, `map-integers`, `map-reduce`, `map-sum`, `max-length`, `member-eq?`, `member?`, `midpoint@`, `min-length`, `mismatch`, `move`, `new-like`, `new-resizable`, `new-sequence`, `non-negative-integer-expected`, `non-negative-integer-expected?`, `nth`, `nths`, `pad-head`, `pad-tail`, `padding`, `partition`, `pop`, `pop*`, `prefix`, `prepend`, `prepend-as`, `produce`, `produce-as`, `product`, `push`, `push-all`, `push-either`, `push-if`, `reduce`, `reduce-index`, `remove`, `remove!`, `remove-eq`, `remove-eq!`, `remove-nth`, `remove-nth!`, `repetition`, `repetition?`, `replace-slice`, `replicate`, `replicate-as`, `rest`, `rest-slice`, `reverse`, `reverse!`, `reversed`, `reversed?`, `second`, `selector`, `selector-for`, `sequence`, `sequence-hashcode`, `sequence=`, `sequence?`, `set-first`, `set-fourth`, `set-last`, `set-length`, `set-nth`, `set-second`, `set-third`, `short`, `shorten`, `shorter`, `shorter?`, `shortest`, `sift`, `slice`, `slice-error`, `slice-error?`, `slice?`, `snip`, `snip-slice`, `start`, `start*`, `subseq`, `subseq?`, `suffix`, `suffix!`, `sum`, `sum-lengths`, `supremum`, `supremum-by`, `surround`, `tail`, `tail-slice`, `tail-slice*`, `tail*`, `tail?`, `third`, `trim`, `trim-head`, `trim-head-slice`, `trim-slice`, `trim-tail`, `trim-tail-slice`, `unclip`, `unclip-last`, `unclip-last-slice`, `unclip-slice`, `unless-empty`, `virtual-exemplar`, `virtual-sequence`, `virtual-sequence?`, `virtual@`, `when-empty`), NameBuiltin, nil}, + {Words(``, `\s`, `+@`, `change`, `change-global`, `counter`, `dec`, `get`, `get-global`, `global`, `inc`, `init-namespaces`, `initialize`, `is-global`, `make-assoc`, `namespace`, `namestack`, `off`, `on`, `set`, `set-global`, `set-namestack`, `toggle`, `with-global`, `with-scope`, `with-variable`, `with-variables`), NameBuiltin, nil}, + {Words(``, `\s`, `1array`, `2array`, `3array`, `4array`, ``, `>array`, `array`, `array?`, `pair`, `pair?`, `resize-array`), NameBuiltin, nil}, + {Words(``, `\s`, `(each-stream-block-slice)`, `(each-stream-block)`, `(stream-contents-by-block)`, `(stream-contents-by-element)`, `(stream-contents-by-length-or-block)`, `(stream-contents-by-length)`, `+byte+`, `+character+`, `bad-seek-type`, `bad-seek-type?`, `bl`, `contents`, `each-block`, `each-block-size`, `each-block-slice`, `each-line`, `each-morsel`, `each-stream-block`, `each-stream-block-slice`, `each-stream-line`, `error-stream`, `flush`, `input-stream`, `input-stream?`, `invalid-read-buffer`, `invalid-read-buffer?`, `lines`, `nl`, `output-stream`, `output-stream?`, `print`, `read`, `read-into`, `read-partial`, `read-partial-into`, `read-until`, `read1`, `readln`, `seek-absolute`, `seek-absolute?`, `seek-end`, `seek-end?`, `seek-input`, `seek-output`, `seek-relative`, `seek-relative?`, `stream-bl`, `stream-contents`, `stream-contents*`, `stream-copy`, `stream-copy*`, `stream-element-type`, `stream-flush`, `stream-length`, `stream-lines`, `stream-nl`, `stream-print`, `stream-read`, `stream-read-into`, `stream-read-partial`, `stream-read-partial-into`, `stream-read-partial-unsafe`, `stream-read-unsafe`, `stream-read-until`, `stream-read1`, `stream-readln`, `stream-seek`, `stream-seekable?`, `stream-tell`, `stream-write`, `stream-write1`, `tell-input`, `tell-output`, `with-error-stream`, `with-error-stream*`, `with-error>output`, `with-input-output+error-streams`, `with-input-output+error-streams*`, `with-input-stream`, `with-input-stream*`, `with-output-stream`, `with-output-stream*`, `with-output>error`, `with-output+error-stream`, `with-output+error-stream*`, `with-streams`, `with-streams*`, `write`, `write1`), NameBuiltin, nil}, + {Words(``, `\s`, `1string`, ``, `>string`, `resize-string`, `string`, `string?`), NameBuiltin, nil}, + {Words(``, `\s`, `1vector`, ``, `>vector`, `?push`, `vector`, `vector?`), NameBuiltin, nil}, + {Words(``, `\s`, ``, ``, ``, `attempt-all`, `attempt-all-error`, `attempt-all-error?`, `callback-error-hook`, `callcc0`, `callcc1`, `cleanup`, `compute-restarts`, `condition`, `condition?`, `continuation`, `continuation?`, `continue`, `continue-restart`, `continue-with`, `current-continuation`, `error`, `error-continuation`, `error-in-thread`, `error-thread`, `ifcc`, `ignore-errors`, `in-callback?`, `original-error`, `recover`, `restart`, `restart?`, `restarts`, `rethrow`, `rethrow-restarts`, `return`, `return-continuation`, `thread-error-hook`, `throw-continue`, `throw-restarts`, `with-datastack`, `with-return`), NameBuiltin, nil}, + {`\S+`, Text, nil}, + }, + "stackeffect": { + {`\s+`, Text, nil}, + {`\(\s+`, NameFunction, Push("stackeffect")}, + {`\)\s`, NameFunction, Pop(1)}, + {`--\s`, NameFunction, nil}, + {`\S+`, NameVariable, nil}, + }, + "slots": { + {`\s+`, Text, nil}, + {`;\s`, Keyword, Pop(1)}, + {`(\{\s+)(\S+)(\s+[^}]+\s+\}\s)`, ByGroups(Text, NameVariable, Text), nil}, + {`\S+`, NameVariable, nil}, + }, + "vocabs": { + {`\s+`, Text, nil}, + {`;\s`, Keyword, Pop(1)}, + {`\S+`, NameNamespace, nil}, + }, + "classes": { + {`\s+`, Text, nil}, + {`;\s`, Keyword, Pop(1)}, + {`\S+`, NameClass, nil}, + }, + "words": { + {`\s+`, Text, nil}, + {`;\s`, Keyword, Pop(1)}, + {`\S+`, NameFunction, nil}, + }, + } +} diff --git a/vendor/github.com/alecthomas/chroma/lexers/f/fennel.go b/vendor/github.com/alecthomas/chroma/lexers/f/fennel.go new file mode 100644 index 000000000..23134d4eb --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/f/fennel.go @@ -0,0 +1,66 @@ +package f + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// Fennel lexer. +var Fennel = internal.Register(MustNewLazyLexer( + &Config{ + Name: "Fennel", + Aliases: []string{"fennel", "fnl"}, + Filenames: []string{"*.fennel"}, + MimeTypes: []string{"text/x-fennel", "application/x-fennel"}, + }, + fennelRules, +)) + +// Here's some Fennel code used to generate the lists of keywords: +// (local fennel (require :fennel)) +// +// (fn member? [t x] (each [_ y (ipairs t)] (when (= y x) (lua "return true")))) +// +// (local declarations [:fn :lambda :λ :local :var :global :macro :macros]) +// (local keywords []) +// (local globals []) +// +// (each [name data (pairs (fennel.syntax))] +// (if (member? declarations name) nil ; already populated +// data.special? (table.insert keywords name) +// data.macro? (table.insert keywords name) +// data.global? (table.insert globals name))) +// +// (fn quoted [tbl] +// (table.sort tbl) +// (table.concat (icollect [_ k (ipairs tbl)] +// (string.format "`%s`" k)) ", ")) +// +// (print :Keyword (quoted keywords)) +// (print :KeywordDeclaration (quoted declarations)) +// (print :NameBuiltin (quoted globals)) + +func fennelRules() Rules { + return Rules{ + "root": { + {`;.*$`, CommentSingle, nil}, + {`\s+`, Whitespace, nil}, + {`-?\d+\.\d+`, LiteralNumberFloat, nil}, + {`-?\d+`, LiteralNumberInteger, nil}, + {`0x-?[abcdef\d]+`, LiteralNumberHex, nil}, + {`"(\\\\|\\"|[^"])*"`, LiteralString, nil}, + {`'(?!#)[\w!$%*+<=>?/.#-]+`, LiteralStringSymbol, nil}, + {`\\(.|[a-z]+)`, LiteralStringChar, nil}, + {`::?#?(?!#)[\w!$%*+<=>?/.#-]+`, LiteralStringSymbol, nil}, + {"~@|[`\\'#^~&@]", Operator, nil}, + {Words(``, ` `, `#`, `%`, `*`, `+`, `-`, `->`, `->>`, `-?>`, `-?>>`, `.`, `..`, `/`, `//`, `:`, `<`, `<=`, `=`, `>`, `>=`, `?.`, `^`, `accumulate`, `and`, `band`, `bnot`, `bor`, `bxor`, `collect`, `comment`, `do`, `doc`, `doto`, `each`, `eval-compiler`, `for`, `hashfn`, `icollect`, `if`, `import-macros`, `include`, `length`, `let`, `lshift`, `lua`, `macrodebug`, `match`, `not`, `not=`, `or`, `partial`, `pick-args`, `pick-values`, `quote`, `require-macros`, `rshift`, `set`, `set-forcibly!`, `tset`, `values`, `when`, `while`, `with-open`, `~=`), Keyword, nil}, + {Words(``, ` `, `fn`, `global`, `lambda`, `local`, `macro`, `macros`, `var`, `λ`), KeywordDeclaration, nil}, + {Words(``, ` `, `_G`, `arg`, `assert`, `bit32`, `bit32.arshift`, `bit32.band`, `bit32.bnot`, `bit32.bor`, `bit32.btest`, `bit32.bxor`, `bit32.extract`, `bit32.lrotate`, `bit32.lshift`, `bit32.replace`, `bit32.rrotate`, `bit32.rshift`, `collectgarbage`, `coroutine`, `coroutine.create`, `coroutine.resume`, `coroutine.running`, `coroutine.status`, `coroutine.wrap`, `coroutine.yield`, `debug`, `debug.debug`, `debug.gethook`, `debug.getinfo`, `debug.getlocal`, `debug.getmetatable`, `debug.getregistry`, `debug.getupvalue`, `debug.getuservalue`, `debug.sethook`, `debug.setlocal`, `debug.setmetatable`, `debug.setupvalue`, `debug.setuservalue`, `debug.traceback`, `debug.upvalueid`, `debug.upvaluejoin`, `dofile`, `error`, `getmetatable`, `io`, `io.close`, `io.flush`, `io.input`, `io.lines`, `io.open`, `io.output`, `io.popen`, `io.read`, `io.tmpfile`, `io.type`, `io.write`, `ipairs`, `load`, `loadfile`, `loadstring`, `math`, `math.abs`, `math.acos`, `math.asin`, `math.atan`, `math.atan2`, `math.ceil`, `math.cos`, `math.cosh`, `math.deg`, `math.exp`, `math.floor`, `math.fmod`, `math.frexp`, `math.ldexp`, `math.log`, `math.log10`, `math.max`, `math.min`, `math.modf`, `math.pow`, `math.rad`, `math.random`, `math.randomseed`, `math.sin`, `math.sinh`, `math.sqrt`, `math.tan`, `math.tanh`, `module`, `next`, `os`, `os.clock`, `os.date`, `os.difftime`, `os.execute`, `os.exit`, `os.getenv`, `os.remove`, `os.rename`, `os.setlocale`, `os.time`, `os.tmpname`, `package`, `package.loadlib`, `package.searchpath`, `package.seeall`, `pairs`, `pcall`, `print`, `rawequal`, `rawget`, `rawlen`, `rawset`, `require`, `select`, `setmetatable`, `string`, `string.byte`, `string.char`, `string.dump`, `string.find`, `string.format`, `string.gmatch`, `string.gsub`, `string.len`, `string.lower`, `string.match`, `string.rep`, `string.reverse`, `string.sub`, `string.upper`, `table`, `table.concat`, `table.insert`, `table.maxn`, `table.pack`, `table.remove`, `table.sort`, `table.unpack`, `tonumber`, `tostring`, `type`, `unpack`, `xpcall`), NameBuiltin, nil}, + {`(?<=\()(?!#)[\w!$%*+<=>?/.#-]+`, NameFunction, nil}, + {`(?!#)[\w!$%*+<=>?/.#-]+`, NameVariable, nil}, + {`(\[|\])`, Punctuation, nil}, + {`(\{|\})`, Punctuation, nil}, + {`(\(|\))`, Punctuation, nil}, + }, + } +} diff --git a/vendor/github.com/alecthomas/chroma/lexers/f/fish.go b/vendor/github.com/alecthomas/chroma/lexers/f/fish.go new file mode 100644 index 000000000..29d5028d5 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/f/fish.go @@ -0,0 +1,98 @@ +package f + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// Fish lexer. +var Fish = internal.Register(MustNewLazyLexer( + &Config{ + Name: "Fish", + Aliases: []string{"fish", "fishshell"}, + Filenames: []string{"*.fish", "*.load"}, + MimeTypes: []string{"application/x-fish"}, + }, + fishRules, +)) + +func fishRules() Rules { + keywords := []string{ + `begin`, `end`, `if`, `else`, `while`, `break`, `for`, `return`, `function`, `block`, + `case`, `continue`, `switch`, `not`, `and`, `or`, `set`, `echo`, `exit`, `pwd`, `true`, + `false`, `cd`, `cdh`, `count`, `test`, + } + keywordsPattern := Words(`\b`, `\b`, keywords...) + + builtins := []string{ + `alias`, `bg`, `bind`, `breakpoint`, `builtin`, `argparse`, `abbr`, `string`, `command`, + `commandline`, `complete`, `contains`, `dirh`, `dirs`, `disown`, `emit`, `eval`, `exec`, + `fg`, `fish`, `fish_add_path`, `fish_breakpoint_prompt`, `fish_command_not_found`, + `fish_config`, `fish_git_prompt`, `fish_greeting`, `fish_hg_prompt`, `fish_indent`, + `fish_is_root_user`, `fish_key_reader`, `fish_mode_prompt`, `fish_opt`, `fish_pager`, + `fish_prompt`, `fish_right_prompt`, `fish_status_to_signal`, `fish_svn_prompt`, + `fish_title`, `fish_update_completions`, `fish_vcs_prompt`, `fishd`, `funced`, + `funcsave`, `functions`, `help`, `history`, `isatty`, `jobs`, `math`, `mimedb`, `nextd`, + `open`, `prompt_pwd`, `realpath`, `popd`, `prevd`, `psub`, `pushd`, `random`, `read`, + `set_color`, `source`, `status`, `suspend`, `trap`, `type`, `ulimit`, `umask`, `vared`, + `fc`, `getopts`, `hash`, `kill`, `printf`, `time`, `wait`, + } + + return Rules{ + "root": { + Include("basic"), + Include("interp"), + Include("data"), + }, + "interp": { + {`\$\(\(`, Keyword, Push("math")}, + {`\(`, Keyword, Push("paren")}, + {`\$#?(\w+|.)`, NameVariable, nil}, + }, + "basic": { + {Words(`(?<=(?:^|\A|;|&&|\|\||\||`+keywordsPattern+`)\s*)`, `(?=;?\b)`, keywords...), Keyword, nil}, + {`(?<=for\s+\S+\s+)in\b`, Keyword, nil}, + {Words(`\b`, `\s*\b(?!\.)`, builtins...), NameBuiltin, nil}, + {`#!.*\n`, CommentHashbang, nil}, + {`#.*\n`, Comment, nil}, + {`\\[\w\W]`, LiteralStringEscape, nil}, + {`(\b\w+)(\s*)(=)`, ByGroups(NameVariable, Text, Operator), nil}, + {`[\[\]()={}]`, Operator, nil}, + {`(?<=\[[^\]]+)\.\.|-(?=[^\[]+\])`, Operator, nil}, + {`<<-?\s*(\'?)\\?(\w+)[\w\W]+?\2`, LiteralString, nil}, + {`(?<=set\s+(?:--?[^\d\W][\w-]*\s+)?)\w+`, NameVariable, nil}, + {`(?<=for\s+)\w[\w-]*(?=\s+in)`, NameVariable, nil}, + {`(?<=function\s+)\w(?:[^\n])*?(?= *[-\n])`, NameFunction, nil}, + {`(?<=(?:^|\b(?:and|or|sudo)\b|;|\|\||&&|\||\(|(?:\b\w+\s*=\S+\s)) *)\w[\w-]*`, NameFunction, nil}, + }, + "data": { + {`(?s)\$?"(\\\\|\\[0-7]+|\\.|[^"\\$])*"`, LiteralStringDouble, nil}, + {`"`, LiteralStringDouble, Push("string")}, + {`(?s)\$'(\\\\|\\[0-7]+|\\.|[^'\\])*'`, LiteralStringSingle, nil}, + {`(?s)'.*?'`, LiteralStringSingle, nil}, + {`;`, Punctuation, nil}, + {`&&|\|\||&|\||\^|<|>`, Operator, nil}, + {`\s+`, Text, nil}, + {`\b\d+\b`, LiteralNumber, nil}, + {`(?<=\s+)--?[^\d][\w-]*`, NameAttribute, nil}, + {".+?", Text, nil}, + }, + "string": { + {`"`, LiteralStringDouble, Pop(1)}, + {`(?s)(\\\\|\\[0-7]+|\\.|[^"\\$])+`, LiteralStringDouble, nil}, + Include("interp"), + }, + "paren": { + {`\)`, Keyword, Pop(1)}, + Include("root"), + }, + "math": { + {`\)\)`, Keyword, Pop(1)}, + {`[-+*/%^|&]|\*\*|\|\|`, Operator, nil}, + {`\d+#\d+`, LiteralNumber, nil}, + {`\d+#(?! )`, LiteralNumber, nil}, + {`\d+`, LiteralNumber, nil}, + Include("root"), + }, + } +} diff --git a/vendor/github.com/alecthomas/chroma/lexers/f/forth.go b/vendor/github.com/alecthomas/chroma/lexers/f/forth.go new file mode 100644 index 000000000..8d66708dc --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/f/forth.go @@ -0,0 +1,44 @@ +package f + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// Forth lexer. +var Forth = internal.Register(MustNewLazyLexer( + &Config{ + Name: "Forth", + Aliases: []string{"forth"}, + Filenames: []string{"*.frt", "*.fth", "*.fs"}, + MimeTypes: []string{"application/x-forth"}, + CaseInsensitive: true, + }, + forthRules, +)) + +func forthRules() Rules { + return Rules{ + "root": { + {`\s+`, Text, nil}, + {`\\.*?\n`, CommentSingle, nil}, + {`\([\s].*?\)`, CommentSingle, nil}, + {`(:|variable|constant|value|buffer:)(\s+)`, ByGroups(KeywordNamespace, Text), Push("worddef")}, + {`([.sc]")(\s+?)`, ByGroups(LiteralString, Text), Push("stringdef")}, + {`(blk|block|buffer|evaluate|flush|load|save-buffers|update|empty-buffers|list|refill|scr|thru|\#s|\*\/mod|\+loop|\/mod|0<|0=|1\+|1-|2!|2\*|2\/|2@|2drop|2dup|2over|2swap|>body|>in|>number|>r|\?dup|abort|abort\"|abs|accept|align|aligned|allot|and|base|begin|bl|c!|c,|c@|cell\+|cells|char|char\+|chars|constant|count|cr|create|decimal|depth|do|does>|drop|dup|else|emit|environment\?|evaluate|execute|exit|fill|find|fm\/mod|here|hold|i|if|immediate|invert|j|key|leave|literal|loop|lshift|m\*|max|min|mod|move|negate|or|over|postpone|quit|r>|r@|recurse|repeat|rot|rshift|s\"|s>d|sign|sm\/rem|source|space|spaces|state|swap|then|type|u\.|u\<|um\*|um\/mod|unloop|until|variable|while|word|xor|\[char\]|\[\'\]|@|!|\#|<\#|\#>|:|;|\+|-|\*|\/|,|<|>|\|1\+|1-|\.|\.r|0<>|0>|2>r|2r>|2r@|:noname|\?do|again|c\"|case|compile,|endcase|endof|erase|false|hex|marker|nip|of|pad|parse|pick|refill|restore-input|roll|save-input|source-id|to|true|tuck|u\.r|u>|unused|value|within|\[compile\]|\#tib|convert|expect|query|span|tib|2constant|2literal|2variable|d\+|d-|d\.|d\.r|d0<|d0=|d2\*|d2\/|d<|d=|d>s|dabs|dmax|dmin|dnegate|m\*\/|m\+|2rot|du<|catch|throw|abort|abort\"|at-xy|key\?|page|ekey|ekey>char|ekey\?|emit\?|ms|time&date|BIN|CLOSE-FILE|CREATE-FILE|DELETE-FILE|FILE-POSITION|FILE-SIZE|INCLUDE-FILE|INCLUDED|OPEN-FILE|R\/O|R\/W|READ-FILE|READ-LINE|REPOSITION-FILE|RESIZE-FILE|S\"|SOURCE-ID|W/O|WRITE-FILE|WRITE-LINE|FILE-STATUS|FLUSH-FILE|REFILL|RENAME-FILE|>float|d>f|f!|f\*|f\+|f-|f\/|f0<|f0=|f<|f>d|f@|falign|faligned|fconstant|fdepth|fdrop|fdup|fliteral|float\+|floats|floor|fmax|fmin|fnegate|fover|frot|fround|fswap|fvariable|represent|df!|df@|dfalign|dfaligned|dfloat\+|dfloats|f\*\*|f\.|fabs|facos|facosh|falog|fasin|fasinh|fatan|fatan2|fatanh|fcos|fcosh|fe\.|fexp|fexpm1|fln|flnp1|flog|fs\.|fsin|fsincos|fsinh|fsqrt|ftan|ftanh|f~|precision|set-precision|sf!|sf@|sfalign|sfaligned|sfloat\+|sfloats|\(local\)|to|locals\||allocate|free|resize|definitions|find|forth-wordlist|get-current|get-order|search-wordlist|set-current|set-order|wordlist|also|forth|only|order|previous|-trailing|\/string|blank|cmove|cmove>|compare|search|sliteral|.s|dump|see|words|;code|ahead|assembler|bye|code|cs-pick|cs-roll|editor|state|\[else\]|\[if\]|\[then\]|forget|defer|defer@|defer!|action-of|begin-structure|field:|buffer:|parse-name|buffer:|traverse-wordlist|n>r|nr>|2value|fvalue|name>interpret|name>compile|name>string|cfield:|end-structure)\s`, Keyword, nil}, + {`(\$[0-9A-F]+)`, LiteralNumberHex, nil}, + {`(\#|%|&|\-|\+)?[0-9]+`, LiteralNumberInteger, nil}, + {`(\#|%|&|\-|\+)?[0-9.]+`, KeywordType, nil}, + {`(@i|!i|@e|!e|pause|noop|turnkey|sleep|itype|icompare|sp@|sp!|rp@|rp!|up@|up!|>a|a>|a@|a!|a@+|a@-|>b|b>|b@|b!|b@+|b@-|find-name|1ms|sp0|rp0|\(evaluate\)|int-trap|int!)\s`, NameConstant, nil}, + {`(do-recognizer|r:fail|recognizer:|get-recognizers|set-recognizers|r:float|r>comp|r>int|r>post|r:name|r:word|r:dnum|r:num|recognizer|forth-recognizer|rec:num|rec:float|rec:word)\s`, NameDecorator, nil}, + {`(Evalue|Rvalue|Uvalue|Edefer|Rdefer|Udefer)(\s+)`, ByGroups(KeywordNamespace, Text), Push("worddef")}, + {`[^\s]+(?=[\s])`, NameFunction, nil}, + }, + "worddef": { + {`\S+`, NameClass, Pop(1)}, + }, + "stringdef": { + {`[^"]+`, LiteralString, Pop(1)}, + }, + } +} diff --git a/vendor/github.com/alecthomas/chroma/lexers/f/fortran.go b/vendor/github.com/alecthomas/chroma/lexers/f/fortran.go new file mode 100644 index 000000000..af4a969ce --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/f/fortran.go @@ -0,0 +1,51 @@ +package f + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// Fortran lexer. +var Fortran = internal.Register(MustNewLazyLexer( + &Config{ + Name: "Fortran", + Aliases: []string{"fortran"}, + Filenames: []string{"*.f03", "*.f90", "*.F03", "*.F90"}, + MimeTypes: []string{"text/x-fortran"}, + CaseInsensitive: true, + }, + fortranRules, +)) + +func fortranRules() Rules { + return Rules{ + "root": { + {`^#.*\n`, CommentPreproc, nil}, + {`!.*\n`, Comment, nil}, + Include("strings"), + Include("core"), + {`[a-z][\w$]*`, Name, nil}, + Include("nums"), + {`[\s]+`, Text, nil}, + }, + "core": { + {Words(`\b`, `\s*\b`, `ABSTRACT`, `ACCEPT`, `ALL`, `ALLSTOP`, `ALLOCATABLE`, `ALLOCATE`, `ARRAY`, `ASSIGN`, `ASSOCIATE`, `ASYNCHRONOUS`, `BACKSPACE`, `BIND`, `BLOCK`, `BLOCKDATA`, `BYTE`, `CALL`, `CASE`, `CLASS`, `CLOSE`, `CODIMENSION`, `COMMON`, `CONCURRRENT`, `CONTIGUOUS`, `CONTAINS`, `CONTINUE`, `CRITICAL`, `CYCLE`, `DATA`, `DEALLOCATE`, `DECODE`, `DEFERRED`, `DIMENSION`, `DO`, `ELEMENTAL`, `ELSE`, `ENCODE`, `END`, `ENTRY`, `ENUM`, `ENUMERATOR`, `EQUIVALENCE`, `EXIT`, `EXTENDS`, `EXTERNAL`, `EXTRINSIC`, `FILE`, `FINAL`, `FORALL`, `FORMAT`, `FUNCTION`, `GENERIC`, `GOTO`, `IF`, `IMAGES`, `IMPLICIT`, `IMPORT`, `IMPURE`, `INCLUDE`, `INQUIRE`, `INTENT`, `INTERFACE`, `INTRINSIC`, `IS`, `LOCK`, `MEMORY`, `MODULE`, `NAMELIST`, `NULLIFY`, `NONE`, `NON_INTRINSIC`, `NON_OVERRIDABLE`, `NOPASS`, `OPEN`, `OPTIONAL`, `OPTIONS`, `PARAMETER`, `PASS`, `PAUSE`, `POINTER`, `PRINT`, `PRIVATE`, `PROGRAM`, `PROCEDURE`, `PROTECTED`, `PUBLIC`, `PURE`, `READ`, `RECURSIVE`, `RESULT`, `RETURN`, `REWIND`, `SAVE`, `SELECT`, `SEQUENCE`, `STOP`, `SUBMODULE`, `SUBROUTINE`, `SYNC`, `SYNCALL`, `SYNCIMAGES`, `SYNCMEMORY`, `TARGET`, `THEN`, `TYPE`, `UNLOCK`, `USE`, `VALUE`, `VOLATILE`, `WHERE`, `WRITE`, `WHILE`), Keyword, nil}, + {Words(`\b`, `\s*\b`, `CHARACTER`, `COMPLEX`, `DOUBLE PRECISION`, `DOUBLE COMPLEX`, `INTEGER`, `LOGICAL`, `REAL`, `C_INT`, `C_SHORT`, `C_LONG`, `C_LONG_LONG`, `C_SIGNED_CHAR`, `C_SIZE_T`, `C_INT8_T`, `C_INT16_T`, `C_INT32_T`, `C_INT64_T`, `C_INT_LEAST8_T`, `C_INT_LEAST16_T`, `C_INT_LEAST32_T`, `C_INT_LEAST64_T`, `C_INT_FAST8_T`, `C_INT_FAST16_T`, `C_INT_FAST32_T`, `C_INT_FAST64_T`, `C_INTMAX_T`, `C_INTPTR_T`, `C_FLOAT`, `C_DOUBLE`, `C_LONG_DOUBLE`, `C_FLOAT_COMPLEX`, `C_DOUBLE_COMPLEX`, `C_LONG_DOUBLE_COMPLEX`, `C_BOOL`, `C_CHAR`, `C_PTR`, `C_FUNPTR`), KeywordType, nil}, + {`(\*\*|\*|\+|-|\/|<|>|<=|>=|==|\/=|=)`, Operator, nil}, + {`(::)`, KeywordDeclaration, nil}, + {`[()\[\],:&%;.]`, Punctuation, nil}, + {Words(`\b`, `\s*\b`, `Abort`, `Abs`, `Access`, `AChar`, `ACos`, `ACosH`, `AdjustL`, `AdjustR`, `AImag`, `AInt`, `Alarm`, `All`, `Allocated`, `ALog`, `AMax`, `AMin`, `AMod`, `And`, `ANInt`, `Any`, `ASin`, `ASinH`, `Associated`, `ATan`, `ATanH`, `Atomic_Define`, `Atomic_Ref`, `BesJ`, `BesJN`, `Bessel_J0`, `Bessel_J1`, `Bessel_JN`, `Bessel_Y0`, `Bessel_Y1`, `Bessel_YN`, `BesY`, `BesYN`, `BGE`, `BGT`, `BLE`, `BLT`, `Bit_Size`, `BTest`, `CAbs`, `CCos`, `Ceiling`, `CExp`, `Char`, `ChDir`, `ChMod`, `CLog`, `Cmplx`, `Command_Argument_Count`, `Complex`, `Conjg`, `Cos`, `CosH`, `Count`, `CPU_Time`, `CShift`, `CSin`, `CSqRt`, `CTime`, `C_Loc`, `C_Associated`, `C_Null_Ptr`, `C_Null_Funptr`, `C_F_Pointer`, `C_F_ProcPointer`, `C_Null_Char`, `C_Alert`, `C_Backspace`, `C_Form_Feed`, `C_FunLoc`, `C_Sizeof`, `C_New_Line`, `C_Carriage_Return`, `C_Horizontal_Tab`, `C_Vertical_Tab`, `DAbs`, `DACos`, `DASin`, `DATan`, `Date_and_Time`, `DbesJ`, `DbesJN`, `DbesY`, `DbesYN`, `Dble`, `DCos`, `DCosH`, `DDiM`, `DErF`, `DErFC`, `DExp`, `Digits`, `DiM`, `DInt`, `DLog`, `DMax`, `DMin`, `DMod`, `DNInt`, `Dot_Product`, `DProd`, `DSign`, `DSinH`, `DShiftL`, `DShiftR`, `DSin`, `DSqRt`, `DTanH`, `DTan`, `DTime`, `EOShift`, `Epsilon`, `ErF`, `ErFC`, `ErFC_Scaled`, `ETime`, `Execute_Command_Line`, `Exit`, `Exp`, `Exponent`, `Extends_Type_Of`, `FDate`, `FGet`, `FGetC`, `FindLoc`, `Float`, `Floor`, `Flush`, `FNum`, `FPutC`, `FPut`, `Fraction`, `FSeek`, `FStat`, `FTell`, `Gamma`, `GError`, `GetArg`, `Get_Command`, `Get_Command_Argument`, `Get_Environment_Variable`, `GetCWD`, `GetEnv`, `GetGId`, `GetLog`, `GetPId`, `GetUId`, `GMTime`, `HostNm`, `Huge`, `Hypot`, `IAbs`, `IAChar`, `IAll`, `IAnd`, `IAny`, `IArgC`, `IBClr`, `IBits`, `IBSet`, `IChar`, `IDate`, `IDiM`, `IDInt`, `IDNInt`, `IEOr`, `IErrNo`, `IFix`, `Imag`, `ImagPart`, `Image_Index`, `Index`, `Int`, `IOr`, `IParity`, `IRand`, `IsaTty`, `IShft`, `IShftC`, `ISign`, `Iso_C_Binding`, `Is_Contiguous`, `Is_Iostat_End`, `Is_Iostat_Eor`, `ITime`, `Kill`, `Kind`, `LBound`, `LCoBound`, `Len`, `Len_Trim`, `LGe`, `LGt`, `Link`, `LLe`, `LLt`, `LnBlnk`, `Loc`, `Log`, `Log_Gamma`, `Logical`, `Long`, `LShift`, `LStat`, `LTime`, `MaskL`, `MaskR`, `MatMul`, `Max`, `MaxExponent`, `MaxLoc`, `MaxVal`, `MClock`, `Merge`, `Merge_Bits`, `Move_Alloc`, `Min`, `MinExponent`, `MinLoc`, `MinVal`, `Mod`, `Modulo`, `MvBits`, `Nearest`, `New_Line`, `NInt`, `Norm2`, `Not`, `Null`, `Num_Images`, `Or`, `Pack`, `Parity`, `PError`, `Precision`, `Present`, `Product`, `Radix`, `Rand`, `Random_Number`, `Random_Seed`, `Range`, `Real`, `RealPart`, `Rename`, `Repeat`, `Reshape`, `RRSpacing`, `RShift`, `Same_Type_As`, `Scale`, `Scan`, `Second`, `Selected_Char_Kind`, `Selected_Int_Kind`, `Selected_Real_Kind`, `Set_Exponent`, `Shape`, `ShiftA`, `ShiftL`, `ShiftR`, `Short`, `Sign`, `Signal`, `SinH`, `Sin`, `Sleep`, `Sngl`, `Spacing`, `Spread`, `SqRt`, `SRand`, `Stat`, `Storage_Size`, `Sum`, `SymLnk`, `System`, `System_Clock`, `Tan`, `TanH`, `Time`, `This_Image`, `Tiny`, `TrailZ`, `Transfer`, `Transpose`, `Trim`, `TtyNam`, `UBound`, `UCoBound`, `UMask`, `Unlink`, `Unpack`, `Verify`, `XOr`, `ZAbs`, `ZCos`, `ZExp`, `ZLog`, `ZSin`, `ZSqRt`), NameBuiltin, nil}, + {`\.(true|false)\.`, NameBuiltin, nil}, + {`\.(eq|ne|lt|le|gt|ge|not|and|or|eqv|neqv)\.`, OperatorWord, nil}, + }, + "strings": { + {`(?s)"(\\\\|\\[0-7]+|\\.|[^"\\])*"`, LiteralStringDouble, nil}, + {`(?s)'(\\\\|\\[0-7]+|\\.|[^'\\])*'`, LiteralStringSingle, nil}, + }, + "nums": { + {`\d+(?![.e])(_[a-z]\w+)?`, LiteralNumberInteger, nil}, + {`[+-]?\d*\.\d+([ed][-+]?\d+)?(_[a-z]\w+)?`, LiteralNumberFloat, nil}, + {`[+-]?\d+\.\d*([ed][-+]?\d+)?(_[a-z]\w+)?`, LiteralNumberFloat, nil}, + }, + } +} diff --git a/vendor/github.com/alecthomas/chroma/lexers/f/fortran_fixed.go b/vendor/github.com/alecthomas/chroma/lexers/f/fortran_fixed.go new file mode 100644 index 000000000..7c646386d --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/f/fortran_fixed.go @@ -0,0 +1,39 @@ +package f + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// FortranFixed lexer. +var FortranFixed = internal.Register(MustNewLazyLexer( + &Config{ + Name: "FortranFixed", + Aliases: []string{"fortranfixed"}, + Filenames: []string{"*.f", "*.F"}, + MimeTypes: []string{"text/x-fortran"}, + NotMultiline: true, + CaseInsensitive: true, + }, + func() Rules { + return Rules{ + "root": { + {`[C*].*\n`, Comment, nil}, + {`#.*\n`, CommentPreproc, nil}, + {`[\t ]*!.*\n`, Comment, nil}, + {`(.{5})`, NameLabel, Push("cont-char")}, + {`.*\n`, Using(Fortran), nil}, + }, + "cont-char": { + {` `, Text, Push("code")}, + {`0`, Comment, Push("code")}, + {`.`, GenericStrong, Push("code")}, + }, + "code": { + {`(.{66})(.*)(\n)`, ByGroups(Using(Fortran), Comment, Text), Push("root")}, + {`.*\n`, Using(Fortran), Push("root")}, + Default(Push("root")), + }, + } + }, +)) diff --git a/vendor/github.com/alecthomas/chroma/lexers/f/fsharp.go b/vendor/github.com/alecthomas/chroma/lexers/f/fsharp.go new file mode 100644 index 000000000..44fced4d5 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/f/fsharp.go @@ -0,0 +1,98 @@ +package f + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// Fsharp lexer. +var Fsharp = internal.Register(MustNewLazyLexer( + &Config{ + Name: "FSharp", + Aliases: []string{"fsharp"}, + Filenames: []string{"*.fs", "*.fsi"}, + MimeTypes: []string{"text/x-fsharp"}, + }, + fsharpRules, +)) + +func fsharpRules() Rules { + return Rules{ + "escape-sequence": { + {`\\[\\"\'ntbrafv]`, LiteralStringEscape, nil}, + {`\\[0-9]{3}`, LiteralStringEscape, nil}, + {`\\u[0-9a-fA-F]{4}`, LiteralStringEscape, nil}, + {`\\U[0-9a-fA-F]{8}`, LiteralStringEscape, nil}, + }, + "root": { + {`\s+`, Text, nil}, + {`\(\)|\[\]`, NameBuiltinPseudo, nil}, + {`\b(?|-|\\.\\.|\\.|::|:=|:>|:|;;|;|<-|<\\]|<|>\\]|>|\\?\\?|\\?|\\[<|\\[\\||\\[|\\]|_|`|\\{|\\|\\]|\\||\\}|~|<@@|<@|=|@>|@@>)", Operator, nil}, + {`([=<>@^|&+\*/$%-]|[!?~])?[!$%&*+\./:<=>?@^|~-]`, Operator, nil}, + {`\b(and|or|not)\b`, OperatorWord, nil}, + {`\b(sbyte|byte|char|nativeint|unativeint|float32|single|float|double|int8|uint8|int16|uint16|int32|uint32|int64|uint64|decimal|unit|bool|string|list|exn|obj|enum)\b`, KeywordType, nil}, + {`#[ \t]*(if|endif|else|line|nowarn|light|\d+)\b.*?\n`, CommentPreproc, nil}, + {`[^\W\d][\w']*`, Name, nil}, + {`\d[\d_]*[uU]?[yslLnQRZINGmM]?`, LiteralNumberInteger, nil}, + {`0[xX][\da-fA-F][\da-fA-F_]*[uU]?[yslLn]?[fF]?`, LiteralNumberHex, nil}, + {`0[oO][0-7][0-7_]*[uU]?[yslLn]?`, LiteralNumberOct, nil}, + {`0[bB][01][01_]*[uU]?[yslLn]?`, LiteralNumberBin, nil}, + {`-?\d[\d_]*(.[\d_]*)?([eE][+\-]?\d[\d_]*)[fFmM]?`, LiteralNumberFloat, nil}, + {`'(?:(\\[\\\"'ntbr ])|(\\[0-9]{3})|(\\x[0-9a-fA-F]{2}))'B?`, LiteralStringChar, nil}, + {`'.'`, LiteralStringChar, nil}, + {`'`, Keyword, nil}, + {`@?"`, LiteralStringDouble, Push("string")}, + {`[~?][a-z][\w\']*:`, NameVariable, nil}, + }, + "dotted": { + {`\s+`, Text, nil}, + {`\.`, Punctuation, nil}, + {`[A-Z][\w\']*(?=\s*\.)`, NameNamespace, nil}, + {`[A-Z][\w\']*`, Name, Pop(1)}, + {`[a-z_][\w\']*`, Name, Pop(1)}, + Default(Pop(1)), + }, + "comment": { + {`[^(*)@"]+`, Comment, nil}, + {`\(\*`, Comment, Push()}, + {`\*\)`, Comment, Pop(1)}, + {`@"`, LiteralString, Push("lstring")}, + {`"""`, LiteralString, Push("tqs")}, + {`"`, LiteralString, Push("string")}, + {`[(*)@]`, Comment, nil}, + }, + "string": { + {`[^\\"]+`, LiteralString, nil}, + Include("escape-sequence"), + {`\\\n`, LiteralString, nil}, + {`\n`, LiteralString, nil}, + {`"B?`, LiteralString, Pop(1)}, + }, + "lstring": { + {`[^"]+`, LiteralString, nil}, + {`\n`, LiteralString, nil}, + {`""`, LiteralString, nil}, + {`"B?`, LiteralString, Pop(1)}, + }, + "tqs": { + {`[^"]+`, LiteralString, nil}, + {`\n`, LiteralString, nil}, + {`"""B?`, LiteralString, Pop(1)}, + {`"`, LiteralString, nil}, + }, + } +} diff --git a/vendor/github.com/alecthomas/chroma/lexers/g/gas.go b/vendor/github.com/alecthomas/chroma/lexers/g/gas.go new file mode 100644 index 000000000..1f733f6ac --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/g/gas.go @@ -0,0 +1,59 @@ +package g + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// Gas lexer. +var Gas = internal.Register(MustNewLazyLexer( + &Config{ + Name: "GAS", + Aliases: []string{"gas", "asm"}, + Filenames: []string{"*.s", "*.S"}, + MimeTypes: []string{"text/x-gas"}, + }, + gasRules, +)) + +func gasRules() Rules { + return Rules{ + "root": { + Include("whitespace"), + {`(?:[a-zA-Z$_][\w$.@-]*|\.[\w$.@-]+):`, NameLabel, nil}, + {`\.(?:[a-zA-Z$_][\w$.@-]*|\.[\w$.@-]+)`, NameAttribute, Push("directive-args")}, + {`lock|rep(n?z)?|data\d+`, NameAttribute, nil}, + {`(?:[a-zA-Z$_][\w$.@-]*|\.[\w$.@-]+)`, NameFunction, Push("instruction-args")}, + {`[\r\n]+`, Text, nil}, + }, + "directive-args": { + {`(?:[a-zA-Z$_][\w$.@-]*|\.[\w$.@-]+)`, NameConstant, nil}, + {`"(\\"|[^"])*"`, LiteralString, nil}, + {`@(?:[a-zA-Z$_][\w$.@-]*|\.[\w$.@-]+)`, NameAttribute, nil}, + {`(?:0[xX][a-zA-Z0-9]+|\d+)`, LiteralNumberInteger, nil}, + {`[\r\n]+`, Text, Pop(1)}, + Include("punctuation"), + Include("whitespace"), + }, + "instruction-args": { + {`([a-z0-9]+)( )(<)((?:[a-zA-Z$_][\w$.@-]*|\.[\w$.@-]+))(>)`, ByGroups(LiteralNumberHex, Text, Punctuation, NameConstant, Punctuation), nil}, + {`([a-z0-9]+)( )(<)((?:[a-zA-Z$_][\w$.@-]*|\.[\w$.@-]+))([-+])((?:0[xX][a-zA-Z0-9]+|\d+))(>)`, ByGroups(LiteralNumberHex, Text, Punctuation, NameConstant, Punctuation, LiteralNumberInteger, Punctuation), nil}, + {`(?:[a-zA-Z$_][\w$.@-]*|\.[\w$.@-]+)`, NameConstant, nil}, + {`(?:0[xX][a-zA-Z0-9]+|\d+)`, LiteralNumberInteger, nil}, + {`%(?:[a-zA-Z$_][\w$.@-]*|\.[\w$.@-]+)`, NameVariable, nil}, + {`$(?:0[xX][a-zA-Z0-9]+|\d+)`, LiteralNumberInteger, nil}, + {`$'(.|\\')'`, LiteralStringChar, nil}, + {`[\r\n]+`, Text, Pop(1)}, + Include("punctuation"), + Include("whitespace"), + }, + "whitespace": { + {`\n`, Text, nil}, + {`\s+`, Text, nil}, + {`[;#].*?\n`, Comment, nil}, + }, + "punctuation": { + {`[-*,.()\[\]!:]+`, Punctuation, nil}, + }, + } +} diff --git a/vendor/github.com/alecthomas/chroma/lexers/g/gdscript.go b/vendor/github.com/alecthomas/chroma/lexers/g/gdscript.go new file mode 100644 index 000000000..2b6af973f --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/g/gdscript.go @@ -0,0 +1,128 @@ +package g + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// GDScript lexer. +var GDScript = internal.Register(MustNewLazyLexer( + &Config{ + Name: "GDScript", + Aliases: []string{"gdscript", "gd"}, + Filenames: []string{"*.gd"}, + MimeTypes: []string{"text/x-gdscript", "application/x-gdscript"}, + }, + gdscriptRules, +)) + +func gdscriptRules() Rules { + return Rules{ + "root": { + {`\n`, Text, nil}, + {`^(\s*)([rRuUbB]{,2})("""(?:.|\n)*?""")`, ByGroups(Text, LiteralStringAffix, LiteralStringDoc), nil}, + {`^(\s*)([rRuUbB]{,2})('''(?:.|\n)*?''')`, ByGroups(Text, LiteralStringAffix, LiteralStringDoc), nil}, + {`[^\S\n]+`, Text, nil}, + {`#.*$`, CommentSingle, nil}, + {`[]{}:(),;[]`, Punctuation, nil}, + {`\\\n`, Text, nil}, + {`\\`, Text, nil}, + {`(in|and|or|not)\b`, OperatorWord, nil}, + {`!=|==|<<|>>|&&|\+=|-=|\*=|/=|%=|&=|\|=|\|\||[-~+/*%=<>&^.!|$]`, Operator, nil}, + Include("keywords"), + {`(def)((?:\s|\\\s)+)`, ByGroups(Keyword, Text), Push("funcname")}, + {`(class)((?:\s|\\\s)+)`, ByGroups(Keyword, Text), Push("classname")}, + Include("builtins"), + {`([rR]|[uUbB][rR]|[rR][uUbB])(""")`, ByGroups(LiteralStringAffix, LiteralStringDouble), Push("tdqs")}, + {`([rR]|[uUbB][rR]|[rR][uUbB])(''')`, ByGroups(LiteralStringAffix, LiteralStringSingle), Push("tsqs")}, + {`([rR]|[uUbB][rR]|[rR][uUbB])(")`, ByGroups(LiteralStringAffix, LiteralStringDouble), Push("dqs")}, + {`([rR]|[uUbB][rR]|[rR][uUbB])(')`, ByGroups(LiteralStringAffix, LiteralStringSingle), Push("sqs")}, + {`([uUbB]?)(""")`, ByGroups(LiteralStringAffix, LiteralStringDouble), Combined("stringescape", "tdqs")}, + {`([uUbB]?)(''')`, ByGroups(LiteralStringAffix, LiteralStringSingle), Combined("stringescape", "tsqs")}, + {`([uUbB]?)(")`, ByGroups(LiteralStringAffix, LiteralStringDouble), Combined("stringescape", "dqs")}, + {`([uUbB]?)(')`, ByGroups(LiteralStringAffix, LiteralStringSingle), Combined("stringescape", "sqs")}, + Include("name"), + Include("numbers"), + }, + "keywords": { + {Words(``, `\b`, + `if`, `elif`, `else`, `for`, `do`, + `while`, `switch`, `case`, `break`, `continue`, + `pass`, `return`, `class`, `extends`, `tool`, + `signal`, `func`, `static`, `const`, `enum`, + `var`, `onready`, `export`, `setget`, `breakpoint`), Keyword, nil}, + }, + "builtins": { + {Words(`(?)`, ByGroups(CommentPreproc, Using(Python), CommentPreproc), nil}, + {`<\s*(script|style)\s*.*?>.*?<\s*/\1\s*>`, Other, nil}, + {`<\s*py:[a-zA-Z0-9]+`, NameTag, Push("pytag")}, + {`<\s*[a-zA-Z0-9:.]+`, NameTag, Push("tag")}, + Include("variable"), + {`[<$]`, Other, nil}, + }, + "pytag": { + {`\s+`, Text, nil}, + {`[\w:-]+\s*=`, NameAttribute, Push("pyattr")}, + {`/?\s*>`, NameTag, Pop(1)}, + }, + "pyattr": { + {`(")(.*?)(")`, ByGroups(LiteralString, Using(Python), LiteralString), Pop(1)}, + {`(')(.*?)(')`, ByGroups(LiteralString, Using(Python), LiteralString), Pop(1)}, + {`[^\s>]+`, LiteralString, Pop(1)}, + }, + "tag": { + {`\s+`, Text, nil}, + {`py:[\w-]+\s*=`, NameAttribute, Push("pyattr")}, + {`[\w:-]+\s*=`, NameAttribute, Push("attr")}, + {`/?\s*>`, NameTag, Pop(1)}, + }, + "attr": { + {`"`, LiteralString, Push("attr-dstring")}, + {`'`, LiteralString, Push("attr-sstring")}, + {`[^\s>]*`, LiteralString, Pop(1)}, + }, + "attr-dstring": { + {`"`, LiteralString, Pop(1)}, + Include("strings"), + {`'`, LiteralString, nil}, + }, + "attr-sstring": { + {`'`, LiteralString, Pop(1)}, + Include("strings"), + {`'`, LiteralString, nil}, + }, + "strings": { + {`[^"'$]+`, LiteralString, nil}, + Include("variable"), + }, + "variable": { + {`(?]+>)`, NameVariable, nil}, + }, + "numbers": { + {`(\d+\.?\d*|\d*\.\d+)([eE][+-]?[0-9]+)?`, LiteralString, nil}, + }, + "string": { + Include("tableVars"), + {`(\s|.)`, LiteralString, nil}, + }, + "pyString": { + {`"""`, Keyword, Pop(1)}, + Include("string"), + }, + "stepContentRoot": { + {`$`, Keyword, Pop(1)}, + Include("stepContent"), + }, + "stepContentStack": { + {`$`, Keyword, Pop(2)}, + Include("stepContent"), + }, + "stepContent": { + {`"`, NameFunction, Push("doubleString")}, + Include("tableVars"), + Include("numbers"), + Include("comments"), + {`(\s|.)`, NameFunction, nil}, + }, + "tableContent": { + {`\s+\|\s*$`, Keyword, Pop(1)}, + Include("comments"), + {`\\\|`, LiteralString, nil}, + {`\s*\|`, Keyword, nil}, + {`"`, LiteralString, Push("doubleStringTable")}, + Include("string"), + }, + "doubleString": { + {`"`, NameFunction, Pop(1)}, + Include("string"), + }, + "doubleStringTable": { + {`"`, LiteralString, Pop(1)}, + Include("string"), + }, + "root": { + {`\n`, NameFunction, nil}, + Include("comments"), + {`"""`, Keyword, Push("pyString")}, + {`\s+\|`, Keyword, Push("tableContent")}, + {`"`, NameFunction, Push("doubleString")}, + Include("tableVars"), + Include("numbers"), + {`(\s*)(@[^@\r\n\t ]+)`, ByGroups(NameFunction, NameTag), nil}, + {stepKeywords, ByGroups(NameFunction, Keyword), Push("stepContentRoot")}, + {featureKeywords, ByGroups(Keyword, Keyword, NameFunction), Push("narrative")}, + {featureElementKeywords, ByGroups(NameFunction, Keyword, Keyword, NameFunction), Push("featureElements")}, + {examplesKeywords, ByGroups(NameFunction, Keyword, Keyword, NameFunction), Push("examplesTable")}, + {`(\s|.)`, NameFunction, nil}, + }, + } +} diff --git a/vendor/github.com/alecthomas/chroma/lexers/g/glsl.go b/vendor/github.com/alecthomas/chroma/lexers/g/glsl.go new file mode 100644 index 000000000..14aa58db3 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/g/glsl.go @@ -0,0 +1,41 @@ +package g + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// GLSL lexer. +var GLSL = internal.Register(MustNewLazyLexer( + &Config{ + Name: "GLSL", + Aliases: []string{"glsl"}, + Filenames: []string{"*.vert", "*.frag", "*.geo"}, + MimeTypes: []string{"text/x-glslsrc"}, + }, + glslRules, +)) + +func glslRules() Rules { + return Rules{ + "root": { + {`^#.*`, CommentPreproc, nil}, + {`//.*`, CommentSingle, nil}, + {`/(\\\n)?[*](.|\n)*?[*](\\\n)?/`, CommentMultiline, nil}, + {`\+|-|~|!=?|\*|/|%|<<|>>|<=?|>=?|==?|&&?|\^|\|\|?`, Operator, nil}, + {`[?:]`, Operator, nil}, + {`\bdefined\b`, Operator, nil}, + {`[;{}(),\[\]]`, Punctuation, nil}, + {`[+-]?\d*\.\d+([eE][-+]?\d+)?`, LiteralNumberFloat, nil}, + {`[+-]?\d+\.\d*([eE][-+]?\d+)?`, LiteralNumberFloat, nil}, + {`0[xX][0-9a-fA-F]*`, LiteralNumberHex, nil}, + {`0[0-7]*`, LiteralNumberOct, nil}, + {`[1-9][0-9]*`, LiteralNumberInteger, nil}, + {Words(`\b`, `\b`, `attribute`, `const`, `uniform`, `varying`, `centroid`, `break`, `continue`, `do`, `for`, `while`, `if`, `else`, `in`, `out`, `inout`, `float`, `int`, `void`, `bool`, `true`, `false`, `invariant`, `discard`, `return`, `mat2`, `mat3mat4`, `mat2x2`, `mat3x2`, `mat4x2`, `mat2x3`, `mat3x3`, `mat4x3`, `mat2x4`, `mat3x4`, `mat4x4`, `vec2`, `vec3`, `vec4`, `ivec2`, `ivec3`, `ivec4`, `bvec2`, `bvec3`, `bvec4`, `sampler1D`, `sampler2D`, `sampler3DsamplerCube`, `sampler1DShadow`, `sampler2DShadow`, `struct`), Keyword, nil}, + {Words(`\b`, `\b`, `asm`, `class`, `union`, `enum`, `typedef`, `template`, `this`, `packed`, `goto`, `switch`, `default`, `inline`, `noinline`, `volatile`, `public`, `static`, `extern`, `external`, `interface`, `long`, `short`, `double`, `half`, `fixed`, `unsigned`, `lowp`, `mediump`, `highp`, `precision`, `input`, `output`, `hvec2`, `hvec3`, `hvec4`, `dvec2`, `dvec3`, `dvec4`, `fvec2`, `fvec3`, `fvec4`, `sampler2DRect`, `sampler3DRect`, `sampler2DRectShadow`, `sizeof`, `cast`, `namespace`, `using`), Keyword, nil}, + {`[a-zA-Z_]\w*`, Name, nil}, + {`\.`, Punctuation, nil}, + {`\s+`, Text, nil}, + }, + } +} diff --git a/vendor/github.com/alecthomas/chroma/lexers/g/gnuplot.go b/vendor/github.com/alecthomas/chroma/lexers/g/gnuplot.go new file mode 100644 index 000000000..a7a250941 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/g/gnuplot.go @@ -0,0 +1,121 @@ +package g + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// Gnuplot lexer. +var Gnuplot = internal.Register(MustNewLazyLexer( + &Config{ + Name: "Gnuplot", + Aliases: []string{"gnuplot"}, + Filenames: []string{"*.plot", "*.plt"}, + MimeTypes: []string{"text/x-gnuplot"}, + }, + gnuplotRules, +)) + +func gnuplotRules() Rules { + return Rules{ + "root": { + Include("whitespace"), + {`bind\b|bin\b|bi\b`, Keyword, Push("bind")}, + {`exit\b|exi\b|ex\b|quit\b|qui\b|qu\b|q\b`, Keyword, Push("quit")}, + {`fit\b|fi\b|f\b`, Keyword, Push("fit")}, + {`(if)(\s*)(\()`, ByGroups(Keyword, Text, Punctuation), Push("if")}, + {`else\b`, Keyword, nil}, + {`pause\b|paus\b|pau\b|pa\b`, Keyword, Push("pause")}, + {`plot\b|plo\b|pl\b|p\b|replot\b|replo\b|repl\b|rep\b|splot\b|splo\b|spl\b|sp\b`, Keyword, Push("plot")}, + {`save\b|sav\b|sa\b`, Keyword, Push("save")}, + {`set\b|se\b`, Keyword, Push("genericargs", "optionarg")}, + {`show\b|sho\b|sh\b|unset\b|unse\b|uns\b`, Keyword, Push("noargs", "optionarg")}, + {`lower\b|lowe\b|low\b|raise\b|rais\b|rai\b|ra\b|call\b|cal\b|ca\b|cd\b|clear\b|clea\b|cle\b|cl\b|help\b|hel\b|he\b|h\b|\?\b|history\b|histor\b|histo\b|hist\b|his\b|hi\b|load\b|loa\b|lo\b|l\b|print\b|prin\b|pri\b|pr\b|pwd\b|reread\b|rerea\b|rere\b|rer\b|re\b|reset\b|rese\b|res\b|screendump\b|screendum\b|screendu\b|screend\b|screen\b|scree\b|scre\b|scr\b|shell\b|shel\b|she\b|system\b|syste\b|syst\b|sys\b|sy\b|update\b|updat\b|upda\b|upd\b|up\b`, Keyword, Push("genericargs")}, + {`pwd\b|reread\b|rerea\b|rere\b|rer\b|re\b|reset\b|rese\b|res\b|screendump\b|screendum\b|screendu\b|screend\b|screen\b|scree\b|scre\b|scr\b|shell\b|shel\b|she\b|test\b`, Keyword, Push("noargs")}, + {`([a-zA-Z_]\w*)(\s*)(=)`, ByGroups(NameVariable, Text, Operator), Push("genericargs")}, + {`([a-zA-Z_]\w*)(\s*\(.*?\)\s*)(=)`, ByGroups(NameFunction, Text, Operator), Push("genericargs")}, + {`@[a-zA-Z_]\w*`, NameConstant, nil}, + {`;`, Keyword, nil}, + }, + "comment": { + {`[^\\\n]`, Comment, nil}, + {`\\\n`, Comment, nil}, + {`\\`, Comment, nil}, + Default(Pop(1)), + }, + "whitespace": { + {`#`, Comment, Push("comment")}, + {`[ \t\v\f]+`, Text, nil}, + }, + "noargs": { + Include("whitespace"), + {`;`, Punctuation, Pop(1)}, + {`\n`, Text, Pop(1)}, + }, + "dqstring": { + {`"`, LiteralString, Pop(1)}, + {`\\([\\abfnrtv"\']|x[a-fA-F0-9]{2,4}|[0-7]{1,3})`, LiteralStringEscape, nil}, + {`[^\\"\n]+`, LiteralString, nil}, + {`\\\n`, LiteralString, nil}, + {`\\`, LiteralString, nil}, + {`\n`, LiteralString, Pop(1)}, + }, + "sqstring": { + {`''`, LiteralString, nil}, + {`'`, LiteralString, Pop(1)}, + {`[^\\'\n]+`, LiteralString, nil}, + {`\\\n`, LiteralString, nil}, + {`\\`, LiteralString, nil}, + {`\n`, LiteralString, Pop(1)}, + }, + "genericargs": { + Include("noargs"), + {`"`, LiteralString, Push("dqstring")}, + {`'`, LiteralString, Push("sqstring")}, + {`(\d+\.\d*|\.\d+|\d+)[eE][+-]?\d+`, LiteralNumberFloat, nil}, + {`(\d+\.\d*|\.\d+)`, LiteralNumberFloat, nil}, + {`-?\d+`, LiteralNumberInteger, nil}, + {`[,.~!%^&*+=|?:<>/-]`, Operator, nil}, + {`[{}()\[\]]`, Punctuation, nil}, + {`(eq|ne)\b`, OperatorWord, nil}, + {`([a-zA-Z_]\w*)(\s*)(\()`, ByGroups(NameFunction, Text, Punctuation), nil}, + {`[a-zA-Z_]\w*`, Name, nil}, + {`@[a-zA-Z_]\w*`, NameConstant, nil}, + {`\\\n`, Text, nil}, + }, + "optionarg": { + Include("whitespace"), + {`all\b|al\b|a\b|angles\b|angle\b|angl\b|ang\b|an\b|arrow\b|arro\b|arr\b|ar\b|autoscale\b|autoscal\b|autosca\b|autosc\b|autos\b|auto\b|aut\b|au\b|bars\b|bar\b|ba\b|b\b|border\b|borde\b|bord\b|bor\b|boxwidth\b|boxwidt\b|boxwid\b|boxwi\b|boxw\b|box\b|clabel\b|clabe\b|clab\b|cla\b|cl\b|clip\b|cli\b|cl\b|c\b|cntrparam\b|cntrpara\b|cntrpar\b|cntrpa\b|cntrp\b|cntr\b|cnt\b|cn\b|contour\b|contou\b|conto\b|cont\b|con\b|co\b|data\b|dat\b|da\b|datafile\b|datafil\b|datafi\b|dataf\b|data\b|dgrid3d\b|dgrid3\b|dgrid\b|dgri\b|dgr\b|dg\b|dummy\b|dumm\b|dum\b|du\b|encoding\b|encodin\b|encodi\b|encod\b|enco\b|enc\b|decimalsign\b|decimalsig\b|decimalsi\b|decimals\b|decimal\b|decima\b|decim\b|deci\b|dec\b|fit\b|fontpath\b|fontpat\b|fontpa\b|fontp\b|font\b|format\b|forma\b|form\b|for\b|fo\b|function\b|functio\b|functi\b|funct\b|func\b|fun\b|fu\b|functions\b|function\b|functio\b|functi\b|funct\b|func\b|fun\b|fu\b|grid\b|gri\b|gr\b|g\b|hidden3d\b|hidden3\b|hidden\b|hidde\b|hidd\b|hid\b|historysize\b|historysiz\b|historysi\b|historys\b|history\b|histor\b|histo\b|hist\b|his\b|isosamples\b|isosample\b|isosampl\b|isosamp\b|isosam\b|isosa\b|isos\b|iso\b|is\b|key\b|ke\b|k\b|keytitle\b|keytitl\b|keytit\b|keyti\b|keyt\b|label\b|labe\b|lab\b|la\b|linestyle\b|linestyl\b|linesty\b|linest\b|lines\b|line\b|lin\b|li\b|ls\b|loadpath\b|loadpat\b|loadpa\b|loadp\b|load\b|loa\b|locale\b|local\b|loca\b|loc\b|logscale\b|logscal\b|logsca\b|logsc\b|logs\b|log\b|macros\b|macro\b|macr\b|mac\b|mapping\b|mappin\b|mappi\b|mapp\b|map\b|mapping3d\b|mapping3\b|mapping\b|mappin\b|mappi\b|mapp\b|map\b|margin\b|margi\b|marg\b|mar\b|lmargin\b|lmargi\b|lmarg\b|lmar\b|rmargin\b|rmargi\b|rmarg\b|rmar\b|tmargin\b|tmargi\b|tmarg\b|tmar\b|bmargin\b|bmargi\b|bmarg\b|bmar\b|mouse\b|mous\b|mou\b|mo\b|multiplot\b|multiplo\b|multipl\b|multip\b|multi\b|mxtics\b|mxtic\b|mxti\b|mxt\b|nomxtics\b|nomxtic\b|nomxti\b|nomxt\b|mx2tics\b|mx2tic\b|mx2ti\b|mx2t\b|nomx2tics\b|nomx2tic\b|nomx2ti\b|nomx2t\b|mytics\b|mytic\b|myti\b|myt\b|nomytics\b|nomytic\b|nomyti\b|nomyt\b|my2tics\b|my2tic\b|my2ti\b|my2t\b|nomy2tics\b|nomy2tic\b|nomy2ti\b|nomy2t\b|mztics\b|mztic\b|mzti\b|mzt\b|nomztics\b|nomztic\b|nomzti\b|nomzt\b|mcbtics\b|mcbtic\b|mcbti\b|mcbt\b|nomcbtics\b|nomcbtic\b|nomcbti\b|nomcbt\b|offsets\b|offset\b|offse\b|offs\b|off\b|of\b|origin\b|origi\b|orig\b|ori\b|or\b|output\b|outpu\b|outp\b|out\b|ou\b|o\b|parametric\b|parametri\b|parametr\b|paramet\b|parame\b|param\b|para\b|par\b|pa\b|pm3d\b|pm3\b|pm\b|palette\b|palett\b|palet\b|pale\b|pal\b|colorbox\b|colorbo\b|colorb\b|plot\b|plo\b|pl\b|p\b|pointsize\b|pointsiz\b|pointsi\b|points\b|point\b|poin\b|poi\b|polar\b|pola\b|pol\b|print\b|prin\b|pri\b|pr\b|object\b|objec\b|obje\b|obj\b|samples\b|sample\b|sampl\b|samp\b|sam\b|sa\b|size\b|siz\b|si\b|style\b|styl\b|sty\b|st\b|surface\b|surfac\b|surfa\b|surf\b|sur\b|su\b|table\b|terminal\b|termina\b|termin\b|termi\b|term\b|ter\b|te\b|t\b|termoptions\b|termoption\b|termoptio\b|termopti\b|termopt\b|termop\b|termo\b|tics\b|tic\b|ti\b|ticscale\b|ticscal\b|ticsca\b|ticsc\b|ticslevel\b|ticsleve\b|ticslev\b|ticsle\b|ticsl\b|timefmt\b|timefm\b|timef\b|timestamp\b|timestam\b|timesta\b|timest\b|times\b|time\b|tim\b|title\b|titl\b|tit\b|variables\b|variable\b|variabl\b|variab\b|varia\b|vari\b|var\b|va\b|v\b|version\b|versio\b|versi\b|vers\b|ver\b|ve\b|view\b|vie\b|vi\b|xyplane\b|xyplan\b|xypla\b|xypl\b|xyp\b|xdata\b|xdat\b|xda\b|x2data\b|x2dat\b|x2da\b|ydata\b|ydat\b|yda\b|y2data\b|y2dat\b|y2da\b|zdata\b|zdat\b|zda\b|cbdata\b|cbdat\b|cbda\b|xlabel\b|xlabe\b|xlab\b|xla\b|xl\b|x2label\b|x2labe\b|x2lab\b|x2la\b|x2l\b|ylabel\b|ylabe\b|ylab\b|yla\b|yl\b|y2label\b|y2labe\b|y2lab\b|y2la\b|y2l\b|zlabel\b|zlabe\b|zlab\b|zla\b|zl\b|cblabel\b|cblabe\b|cblab\b|cbla\b|cbl\b|xtics\b|xtic\b|xti\b|noxtics\b|noxtic\b|noxti\b|x2tics\b|x2tic\b|x2ti\b|nox2tics\b|nox2tic\b|nox2ti\b|ytics\b|ytic\b|yti\b|noytics\b|noytic\b|noyti\b|y2tics\b|y2tic\b|y2ti\b|noy2tics\b|noy2tic\b|noy2ti\b|ztics\b|ztic\b|zti\b|noztics\b|noztic\b|nozti\b|cbtics\b|cbtic\b|cbti\b|nocbtics\b|nocbtic\b|nocbti\b|xdtics\b|xdtic\b|xdti\b|noxdtics\b|noxdtic\b|noxdti\b|x2dtics\b|x2dtic\b|x2dti\b|nox2dtics\b|nox2dtic\b|nox2dti\b|ydtics\b|ydtic\b|ydti\b|noydtics\b|noydtic\b|noydti\b|y2dtics\b|y2dtic\b|y2dti\b|noy2dtics\b|noy2dtic\b|noy2dti\b|zdtics\b|zdtic\b|zdti\b|nozdtics\b|nozdtic\b|nozdti\b|cbdtics\b|cbdtic\b|cbdti\b|nocbdtics\b|nocbdtic\b|nocbdti\b|xmtics\b|xmtic\b|xmti\b|noxmtics\b|noxmtic\b|noxmti\b|x2mtics\b|x2mtic\b|x2mti\b|nox2mtics\b|nox2mtic\b|nox2mti\b|ymtics\b|ymtic\b|ymti\b|noymtics\b|noymtic\b|noymti\b|y2mtics\b|y2mtic\b|y2mti\b|noy2mtics\b|noy2mtic\b|noy2mti\b|zmtics\b|zmtic\b|zmti\b|nozmtics\b|nozmtic\b|nozmti\b|cbmtics\b|cbmtic\b|cbmti\b|nocbmtics\b|nocbmtic\b|nocbmti\b|xrange\b|xrang\b|xran\b|xra\b|xr\b|x2range\b|x2rang\b|x2ran\b|x2ra\b|x2r\b|yrange\b|yrang\b|yran\b|yra\b|yr\b|y2range\b|y2rang\b|y2ran\b|y2ra\b|y2r\b|zrange\b|zrang\b|zran\b|zra\b|zr\b|cbrange\b|cbrang\b|cbran\b|cbra\b|cbr\b|rrange\b|rrang\b|rran\b|rra\b|rr\b|trange\b|trang\b|tran\b|tra\b|tr\b|urange\b|urang\b|uran\b|ura\b|ur\b|vrange\b|vrang\b|vran\b|vra\b|vr\b|xzeroaxis\b|xzeroaxi\b|xzeroax\b|xzeroa\b|x2zeroaxis\b|x2zeroaxi\b|x2zeroax\b|x2zeroa\b|yzeroaxis\b|yzeroaxi\b|yzeroax\b|yzeroa\b|y2zeroaxis\b|y2zeroaxi\b|y2zeroax\b|y2zeroa\b|zzeroaxis\b|zzeroaxi\b|zzeroax\b|zzeroa\b|zeroaxis\b|zeroaxi\b|zeroax\b|zeroa\b|zero\b|zer\b|ze\b|z\b`, NameBuiltin, Pop(1)}, + }, + "bind": { + {`!`, Keyword, Pop(1)}, + {`allwindows\b|allwindow\b|allwindo\b|allwind\b|allwin\b|allwi\b|allw\b|all\b`, NameBuiltin, nil}, + Include("genericargs"), + }, + "quit": { + {`gnuplot\b`, Keyword, nil}, + Include("noargs"), + }, + "fit": { + {`via\b`, NameBuiltin, nil}, + Include("plot"), + }, + "if": { + {`\)`, Punctuation, Pop(1)}, + Include("genericargs"), + }, + "pause": { + {`(mouse|any|button1|button2|button3)\b`, NameBuiltin, nil}, + {`keypress\b|keypres\b|keypre\b|keypr\b|keyp\b|key\b`, NameBuiltin, nil}, + Include("genericargs"), + }, + "plot": { + {`axes\b|axe\b|ax\b|axis\b|axi\b|binary\b|binar\b|bina\b|bin\b|every\b|ever\b|eve\b|ev\b|index\b|inde\b|ind\b|in\b|i\b|matrix\b|matri\b|matr\b|mat\b|smooth\b|smoot\b|smoo\b|smo\b|sm\b|s\b|thru\b|title\b|titl\b|tit\b|ti\b|t\b|notitle\b|notitl\b|notit\b|noti\b|not\b|using\b|usin\b|usi\b|us\b|u\b|with\b|wit\b|wi\b|w\b`, NameBuiltin, nil}, + Include("genericargs"), + }, + "save": { + {`functions\b|function\b|functio\b|functi\b|funct\b|func\b|fun\b|fu\b|f\b|set\b|se\b|s\b|terminal\b|termina\b|termin\b|termi\b|term\b|ter\b|te\b|t\b|variables\b|variable\b|variabl\b|variab\b|varia\b|vari\b|var\b|va\b|v\b`, NameBuiltin, nil}, + Include("genericargs"), + }, + } +} diff --git a/vendor/github.com/alecthomas/chroma/lexers/g/go.go b/vendor/github.com/alecthomas/chroma/lexers/g/go.go new file mode 100644 index 000000000..9dc4075bf --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/g/go.go @@ -0,0 +1,123 @@ +package g + +import ( + "strings" + + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/h" + "github.com/alecthomas/chroma/lexers/internal" +) + +// Go lexer. +var Go = internal.Register(MustNewLazyLexer( + &Config{ + Name: "Go", + Aliases: []string{"go", "golang"}, + Filenames: []string{"*.go"}, + MimeTypes: []string{"text/x-gosrc"}, + EnsureNL: true, + }, + goRules, +).SetAnalyser(func(text string) float32 { + if strings.Contains(text, "fmt.") && strings.Contains(text, "package ") { + return 0.5 + } + if strings.Contains(text, "package ") { + return 0.1 + } + return 0.0 +})) + +func goRules() Rules { + return Rules{ + "root": { + {`\n`, Text, nil}, + {`\s+`, Text, nil}, + {`\\\n`, Text, nil}, + {`//(.*?)\n`, CommentSingle, nil}, + {`/(\\\n)?[*](.|\n)*?[*](\\\n)?/`, CommentMultiline, nil}, + {`(import|package)\b`, KeywordNamespace, nil}, + {`(var|func|struct|map|chan|type|interface|const)\b`, KeywordDeclaration, nil}, + {Words(``, `\b`, `break`, `default`, `select`, `case`, `defer`, `go`, `else`, `goto`, `switch`, `fallthrough`, `if`, `range`, `continue`, `for`, `return`), Keyword, nil}, + {`(true|false|iota|nil)\b`, KeywordConstant, nil}, + {Words(``, `\b(\()`, `uint`, `uint8`, `uint16`, `uint32`, `uint64`, `int`, `int8`, `int16`, `int32`, `int64`, `float`, `float32`, `float64`, `complex64`, `complex128`, `byte`, `rune`, `string`, `bool`, `error`, `uintptr`, `print`, `println`, `panic`, `recover`, `close`, `complex`, `real`, `imag`, `len`, `cap`, `append`, `copy`, `delete`, `new`, `make`), ByGroups(NameBuiltin, Punctuation), nil}, + {Words(``, `\b`, `uint`, `uint8`, `uint16`, `uint32`, `uint64`, `int`, `int8`, `int16`, `int32`, `int64`, `float`, `float32`, `float64`, `complex64`, `complex128`, `byte`, `rune`, `string`, `bool`, `error`, `uintptr`), KeywordType, nil}, + {`\d+i`, LiteralNumber, nil}, + {`\d+\.\d*([Ee][-+]\d+)?i`, LiteralNumber, nil}, + {`\.\d+([Ee][-+]\d+)?i`, LiteralNumber, nil}, + {`\d+[Ee][-+]\d+i`, LiteralNumber, nil}, + {`\d+(\.\d+[eE][+\-]?\d+|\.\d*|[eE][+\-]?\d+)`, LiteralNumberFloat, nil}, + {`\.\d+([eE][+\-]?\d+)?`, LiteralNumberFloat, nil}, + {`0[0-7]+`, LiteralNumberOct, nil}, + {`0[xX][0-9a-fA-F_]+`, LiteralNumberHex, nil}, + {`0b[01_]+`, LiteralNumberBin, nil}, + {`(0|[1-9][0-9_]*)`, LiteralNumberInteger, nil}, + {`'(\\['"\\abfnrtv]|\\x[0-9a-fA-F]{2}|\\[0-7]{1,3}|\\u[0-9a-fA-F]{4}|\\U[0-9a-fA-F]{8}|[^\\])'`, LiteralStringChar, nil}, + {"(`)([^`]*)(`)", ByGroups(LiteralString, Using(TypeRemappingLexer(GoTextTemplate, TypeMapping{{Other, LiteralString, nil}})), LiteralString), nil}, + {`"(\\\\|\\"|[^"])*"`, LiteralString, nil}, + {`(<<=|>>=|<<|>>|<=|>=|&\^=|&\^|\+=|-=|\*=|/=|%=|&=|\|=|&&|\|\||<-|\+\+|--|==|!=|:=|\.\.\.|[+\-*/%&])`, Operator, nil}, + {`([a-zA-Z_]\w*)(\s*)(\()`, ByGroups(NameFunction, UsingSelf("root"), Punctuation), nil}, + {`[|^<>=!()\[\]{}.,;:]`, Punctuation, nil}, + {`[^\W\d]\w*`, NameOther, nil}, + }, + } +} + +func goTemplateRules() Rules { + return Rules{ + "root": { + {`{{(- )?/\*(.|\n)*?\*/( -)?}}`, CommentMultiline, nil}, + {`{{[-]?`, CommentPreproc, Push("template")}, + {`[^{]+`, Other, nil}, + {`{`, Other, nil}, + }, + "template": { + {`[-]?}}`, CommentPreproc, Pop(1)}, + {`(?=}})`, CommentPreproc, Pop(1)}, // Terminate the pipeline + {`\(`, Operator, Push("subexpression")}, + {`"(\\\\|\\"|[^"])*"`, LiteralString, nil}, + Include("expression"), + }, + "subexpression": { + {`\)`, Operator, Pop(1)}, + Include("expression"), + }, + "expression": { + {`\s+`, Whitespace, nil}, + {`\(`, Operator, Push("subexpression")}, + {`(range|if|else|while|with|template|end|true|false|nil|and|call|html|index|js|len|not|or|print|printf|println|urlquery|eq|ne|lt|le|gt|ge)\b`, Keyword, nil}, + {`\||:?=|,`, Operator, nil}, + {`[$]?[^\W\d]\w*`, NameOther, nil}, + {`\$|[$]?\.(?:[^\W\d]\w*)?`, NameAttribute, nil}, + {`"(\\\\|\\"|[^"])*"`, LiteralString, nil}, + {`-?\d+i`, LiteralNumber, nil}, + {`-?\d+\.\d*([Ee][-+]\d+)?i`, LiteralNumber, nil}, + {`\.\d+([Ee][-+]\d+)?i`, LiteralNumber, nil}, + {`-?\d+[Ee][-+]\d+i`, LiteralNumber, nil}, + {`-?\d+(\.\d+[eE][+\-]?\d+|\.\d*|[eE][+\-]?\d+)`, LiteralNumberFloat, nil}, + {`-?\.\d+([eE][+\-]?\d+)?`, LiteralNumberFloat, nil}, + {`-?0[0-7]+`, LiteralNumberOct, nil}, + {`-?0[xX][0-9a-fA-F]+`, LiteralNumberHex, nil}, + {`-?0b[01_]+`, LiteralNumberBin, nil}, + {`-?(0|[1-9][0-9]*)`, LiteralNumberInteger, nil}, + {`'(\\['"\\abfnrtv]|\\x[0-9a-fA-F]{2}|\\[0-7]{1,3}|\\u[0-9a-fA-F]{4}|\\U[0-9a-fA-F]{8}|[^\\])'`, LiteralStringChar, nil}, + {"`[^`]*`", LiteralString, nil}, + }, + } +} + +var GoHTMLTemplate = internal.Register(DelegatingLexer(h.HTML, MustNewLazyLexer( + &Config{ + Name: "Go HTML Template", + Aliases: []string{"go-html-template"}, + }, + goTemplateRules, +))) + +var GoTextTemplate = internal.Register(MustNewLazyLexer( + &Config{ + Name: "Go Text Template", + Aliases: []string{"go-text-template"}, + }, + goTemplateRules, +)) diff --git a/vendor/github.com/alecthomas/chroma/lexers/g/graphql.go b/vendor/github.com/alecthomas/chroma/lexers/g/graphql.go new file mode 100644 index 000000000..7d465cd4c --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/g/graphql.go @@ -0,0 +1,49 @@ +package g + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// Go lexer. +var Graphql = internal.Register(MustNewLazyLexer( + &Config{ + Name: "GraphQL", + Aliases: []string{"graphql", "graphqls", "gql"}, + Filenames: []string{"*.graphql", "*.graphqls"}, + }, + graphqlRules, +)) + +func graphqlRules() Rules { + return Rules{ + "root": { + {`(query|mutation|subscription|fragment|scalar|implements|interface|union|enum|input|type)`, KeywordDeclaration, Push("type")}, + {`(on|extend|schema|directive|\.\.\.)`, KeywordDeclaration, nil}, + {`(QUERY|MUTATION|SUBSCRIPTION|FIELD|FRAGMENT_DEFINITION|FRAGMENT_SPREAD|INLINE_FRAGMENT|SCHEMA|SCALAR|OBJECT|FIELD_DEFINITION|ARGUMENT_DEFINITION|INTERFACE|UNION|ENUM|ENUM_VALUE|INPUT_OBJECT|INPUT_FIELD_DEFINITION)\b`, KeywordConstant, nil}, + {`[^\W\d]\w*`, NameProperty, nil}, + {`\@\w+`, NameDecorator, nil}, + {`:`, Punctuation, Push("type")}, + {`[\(\)\{\}\[\],!\|=]`, Punctuation, nil}, + {`\$\w+`, NameVariable, nil}, + {`\d+i`, LiteralNumber, nil}, + {`\d+\.\d*([Ee][-+]\d+)?i`, LiteralNumber, nil}, + {`\.\d+([Ee][-+]\d+)?i`, LiteralNumber, nil}, + {`\d+[Ee][-+]\d+i`, LiteralNumber, nil}, + {`\d+(\.\d+[eE][+\-]?\d+|\.\d*|[eE][+\-]?\d+)`, LiteralNumberFloat, nil}, + {`\.\d+([eE][+\-]?\d+)?`, LiteralNumberFloat, nil}, + {`(0|[1-9][0-9]*)`, LiteralNumberInteger, nil}, + {`"""[\x00-\x7F]*?"""`, LiteralString, nil}, + {`"(\\["\\abfnrtv]|\\x[0-9a-fA-F]{2}|\\[0-7]{1,3}|\\u[0-9a-fA-F]{4}|\\U[0-9a-fA-F]{8}|[^\\])"`, LiteralStringChar, nil}, + {`"(\\\\|\\"|[^"])*"`, LiteralString, nil}, + {`"(true|false|null)*"`, Literal, nil}, + {`[\r\n\s]+`, Whitespace, nil}, + {`#[^\r\n]*`, Comment, nil}, + }, + // Treats the next word as a class, default rules it would be a property + "type": { + {`[^\W\d]\w*`, NameClass, Pop(1)}, + Include("root"), + }, + } +} diff --git a/vendor/github.com/alecthomas/chroma/lexers/g/groff.go b/vendor/github.com/alecthomas/chroma/lexers/g/groff.go new file mode 100644 index 000000000..65a70934f --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/g/groff.go @@ -0,0 +1,47 @@ +package g + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// Groff lexer. +var Groff = internal.Register(MustNewLazyLexer( + &Config{ + Name: "Groff", + Aliases: []string{"groff", "nroff", "man"}, + Filenames: []string{"*.[1-9]", "*.1p", "*.3pm", "*.man"}, + MimeTypes: []string{"application/x-troff", "text/troff"}, + }, + func() Rules { + return Rules{ + "root": { + {`(\.)(\w+)`, ByGroups(Text, Keyword), Push("request")}, + {`\.`, Punctuation, Push("request")}, + {`[^\\\n]+`, Text, Push("textline")}, + Default(Push("textline")), + }, + "textline": { + Include("escapes"), + {`[^\\\n]+`, Text, nil}, + {`\n`, Text, Pop(1)}, + }, + "escapes": { + {`\\"[^\n]*`, Comment, nil}, + {`\\[fn]\w`, LiteralStringEscape, nil}, + {`\\\(.{2}`, LiteralStringEscape, nil}, + {`\\.\[.*\]`, LiteralStringEscape, nil}, + {`\\.`, LiteralStringEscape, nil}, + {`\\\n`, Text, Push("request")}, + }, + "request": { + {`\n`, Text, Pop(1)}, + Include("escapes"), + {`"[^\n"]+"`, LiteralStringDouble, nil}, + {`\d+`, LiteralNumber, nil}, + {`\S+`, LiteralString, nil}, + {`\s+`, Text, nil}, + }, + } + }, +)) diff --git a/vendor/github.com/alecthomas/chroma/lexers/g/groovy.go b/vendor/github.com/alecthomas/chroma/lexers/g/groovy.go new file mode 100644 index 000000000..8d37bd87e --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/g/groovy.go @@ -0,0 +1,62 @@ +package g + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// Groovy lexer. +var Groovy = internal.Register(MustNewLazyLexer( + &Config{ + Name: "Groovy", + Aliases: []string{"groovy"}, + Filenames: []string{"*.groovy", "*.gradle"}, + MimeTypes: []string{"text/x-groovy"}, + DotAll: true, + }, + groovyRules, +)) + +func groovyRules() Rules { + return Rules{ + "root": { + {`#!(.*?)$`, CommentPreproc, Push("base")}, + Default(Push("base")), + }, + "base": { + {`^(\s*(?:[a-zA-Z_][\w.\[\]]*\s+)+?)([a-zA-Z_]\w*)(\s*)(\()`, ByGroups(UsingSelf("root"), NameFunction, Text, Operator), nil}, + {`[^\S\n]+`, Text, nil}, + {`//.*?\n`, CommentSingle, nil}, + {`/\*.*?\*/`, CommentMultiline, nil}, + {`@[a-zA-Z_][\w.]*`, NameDecorator, nil}, + {`(as|assert|break|case|catch|continue|default|do|else|finally|for|if|in|goto|instanceof|new|return|switch|this|throw|try|while|in|as)\b`, Keyword, nil}, + {`(abstract|const|enum|extends|final|implements|native|private|protected|public|static|strictfp|super|synchronized|throws|transient|volatile)\b`, KeywordDeclaration, nil}, + {`(def|boolean|byte|char|double|float|int|long|short|void)\b`, KeywordType, nil}, + {`(package)(\s+)`, ByGroups(KeywordNamespace, Text), nil}, + {`(true|false|null)\b`, KeywordConstant, nil}, + {`(class|interface)(\s+)`, ByGroups(KeywordDeclaration, Text), Push("class")}, + {`(import)(\s+)`, ByGroups(KeywordNamespace, Text), Push("import")}, + {`""".*?"""`, LiteralStringDouble, nil}, + {`'''.*?'''`, LiteralStringSingle, nil}, + {`"(\\\\|\\"|[^"])*"`, LiteralStringDouble, nil}, + {`'(\\\\|\\'|[^'])*'`, LiteralStringSingle, nil}, + {`\$/((?!/\$).)*/\$`, LiteralString, nil}, + {`/(\\\\|\\"|[^/])*/`, LiteralString, nil}, + {`'\\.'|'[^\\]'|'\\u[0-9a-fA-F]{4}'`, LiteralStringChar, nil}, + {`(\.)([a-zA-Z_]\w*)`, ByGroups(Operator, NameAttribute), nil}, + {`[a-zA-Z_]\w*:`, NameLabel, nil}, + {`[a-zA-Z_$]\w*`, Name, nil}, + {`[~^*!%&\[\](){}<>|+=:;,./?-]`, Operator, nil}, + {`[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?`, LiteralNumberFloat, nil}, + {`0x[0-9a-fA-F]+`, LiteralNumberHex, nil}, + {`[0-9]+L?`, LiteralNumberInteger, nil}, + {`\n`, Text, nil}, + }, + "class": { + {`[a-zA-Z_]\w*`, NameClass, Pop(1)}, + }, + "import": { + {`[\w.]+\*?`, NameNamespace, Pop(1)}, + }, + } +} diff --git a/vendor/github.com/alecthomas/chroma/lexers/h/handlebars.go b/vendor/github.com/alecthomas/chroma/lexers/h/handlebars.go new file mode 100644 index 000000000..d34ef3a98 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/h/handlebars.go @@ -0,0 +1,60 @@ +package h + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// Handlebars lexer. +var Handlebars = internal.Register(MustNewLazyLexer( + &Config{ + Name: "Handlebars", + Aliases: []string{"handlebars", "hbs"}, + Filenames: []string{"*.handlebars", "*.hbs"}, + MimeTypes: []string{}, + }, + handlebarsRules, +)) + +func handlebarsRules() Rules { + return Rules{ + "root": { + {`[^{]+`, Other, nil}, + {`\{\{!.*\}\}`, Comment, nil}, + {`(\{\{\{)(\s*)`, ByGroups(CommentSpecial, Text), Push("tag")}, + {`(\{\{)(\s*)`, ByGroups(CommentPreproc, Text), Push("tag")}, + }, + "tag": { + {`\s+`, Text, nil}, + {`\}\}\}`, CommentSpecial, Pop(1)}, + {`\}\}`, CommentPreproc, Pop(1)}, + {`([#/]*)(each|if|unless|else|with|log|in(?:line)?)`, ByGroups(Keyword, Keyword), nil}, + {`#\*inline`, Keyword, nil}, + {`([#/])([\w-]+)`, ByGroups(NameFunction, NameFunction), nil}, + {`([\w-]+)(=)`, ByGroups(NameAttribute, Operator), nil}, + {`(>)(\s*)(@partial-block)`, ByGroups(Keyword, Text, Keyword), nil}, + {`(#?>)(\s*)([\w-]+)`, ByGroups(Keyword, Text, NameVariable), nil}, + {`(>)(\s*)(\()`, ByGroups(Keyword, Text, Punctuation), Push("dynamic-partial")}, + Include("generic"), + }, + "dynamic-partial": { + {`\s+`, Text, nil}, + {`\)`, Punctuation, Pop(1)}, + {`(lookup)(\s+)(\.|this)(\s+)`, ByGroups(Keyword, Text, NameVariable, Text), nil}, + {`(lookup)(\s+)(\S+)`, ByGroups(Keyword, Text, UsingSelf("variable")), nil}, + {`[\w-]+`, NameFunction, nil}, + Include("generic"), + }, + "variable": { + {`[a-zA-Z][\w-]*`, NameVariable, nil}, + {`\.[\w-]+`, NameVariable, nil}, + {`(this\/|\.\/|(\.\.\/)+)[\w-]+`, NameVariable, nil}, + }, + "generic": { + Include("variable"), + {`:?"(\\\\|\\"|[^"])*"`, LiteralStringDouble, nil}, + {`:?'(\\\\|\\'|[^'])*'`, LiteralStringSingle, nil}, + {`[0-9](\.[0-9]*)?(eE[+-][0-9])?[flFLdD]?|0[xX][0-9a-fA-F]+[Ll]?`, LiteralNumber, nil}, + }, + } +} diff --git a/vendor/github.com/alecthomas/chroma/lexers/h/haskell.go b/vendor/github.com/alecthomas/chroma/lexers/h/haskell.go new file mode 100644 index 000000000..e34f03ef1 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/h/haskell.go @@ -0,0 +1,103 @@ +package h + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// Haskell lexer. +var Haskell = internal.Register(MustNewLazyLexer( + &Config{ + Name: "Haskell", + Aliases: []string{"haskell", "hs"}, + Filenames: []string{"*.hs"}, + MimeTypes: []string{"text/x-haskell"}, + }, + haskellRules, +)) + +func haskellRules() Rules { + return Rules{ + "root": { + {`\s+`, Text, nil}, + {`--(?![!#$%&*+./<=>?@^|_~:\\]).*?$`, CommentSingle, nil}, + {`\{-`, CommentMultiline, Push("comment")}, + {`\bimport\b`, KeywordReserved, Push("import")}, + {`\bmodule\b`, KeywordReserved, Push("module")}, + {`\berror\b`, NameException, nil}, + {`\b(case|class|data|default|deriving|do|else|family|if|in|infix[lr]?|instance|let|newtype|of|then|type|where|_)(?!\')\b`, KeywordReserved, nil}, + {`'[^\\]'`, LiteralStringChar, nil}, + {`^[_\p{Ll}][\w\']*`, NameFunction, nil}, + {`'?[_\p{Ll}][\w']*`, Name, nil}, + {`('')?[\p{Lu}][\w\']*`, KeywordType, nil}, + {`(')[\p{Lu}][\w\']*`, KeywordType, nil}, + {`(')\[[^\]]*\]`, KeywordType, nil}, + {`(')\([^)]*\)`, KeywordType, nil}, + {`\\(?![:!#$%&*+.\\/<=>?@^|~-]+)`, NameFunction, nil}, + {`(<-|::|->|=>|=)(?![:!#$%&*+.\\/<=>?@^|~-]+)`, OperatorWord, nil}, + {`:[:!#$%&*+.\\/<=>?@^|~-]*`, KeywordType, nil}, + {`[:!#$%&*+.\\/<=>?@^|~-]+`, Operator, nil}, + {`\d+[eE][+-]?\d+`, LiteralNumberFloat, nil}, + {`\d+\.\d+([eE][+-]?\d+)?`, LiteralNumberFloat, nil}, + {`0[oO][0-7]+`, LiteralNumberOct, nil}, + {`0[xX][\da-fA-F]+`, LiteralNumberHex, nil}, + {`\d+`, LiteralNumberInteger, nil}, + {`'`, LiteralStringChar, Push("character")}, + {`"`, LiteralString, Push("string")}, + {`\[\]`, KeywordType, nil}, + {`\(\)`, NameBuiltin, nil}, + {"[][(),;`{}]", Punctuation, nil}, + }, + "import": { + {`\s+`, Text, nil}, + {`"`, LiteralString, Push("string")}, + {`\)`, Punctuation, Pop(1)}, + {`qualified\b`, Keyword, nil}, + {`([\p{Lu}][\w.]*)(\s+)(as)(\s+)([\p{Lu}][\w.]*)`, ByGroups(NameNamespace, Text, Keyword, Text, Name), Pop(1)}, + {`([\p{Lu}][\w.]*)(\s+)(hiding)(\s+)(\()`, ByGroups(NameNamespace, Text, Keyword, Text, Punctuation), Push("funclist")}, + {`([\p{Lu}][\w.]*)(\s+)(\()`, ByGroups(NameNamespace, Text, Punctuation), Push("funclist")}, + {`[\w.]+`, NameNamespace, Pop(1)}, + }, + "module": { + {`\s+`, Text, nil}, + {`([\p{Lu}][\w.]*)(\s+)(\()`, ByGroups(NameNamespace, Text, Punctuation), Push("funclist")}, + {`[\p{Lu}][\w.]*`, NameNamespace, Pop(1)}, + }, + "funclist": { + {`\s+`, Text, nil}, + {`[\p{Lu}]\w*`, KeywordType, nil}, + {`(_[\w\']+|[\p{Ll}][\w\']*)`, NameFunction, nil}, + {`--(?![!#$%&*+./<=>?@^|_~:\\]).*?$`, CommentSingle, nil}, + {`\{-`, CommentMultiline, Push("comment")}, + {`,`, Punctuation, nil}, + {`[:!#$%&*+.\\/<=>?@^|~-]+`, Operator, nil}, + {`\(`, Punctuation, Push("funclist", "funclist")}, + {`\)`, Punctuation, Pop(2)}, + }, + "comment": { + {`[^-{}]+`, CommentMultiline, nil}, + {`\{-`, CommentMultiline, Push()}, + {`-\}`, CommentMultiline, Pop(1)}, + {`[-{}]`, CommentMultiline, nil}, + }, + "character": { + {`[^\\']'`, LiteralStringChar, Pop(1)}, + {`\\`, LiteralStringEscape, Push("escape")}, + {`'`, LiteralStringChar, Pop(1)}, + }, + "string": { + {`[^\\"]+`, LiteralString, nil}, + {`\\`, LiteralStringEscape, Push("escape")}, + {`"`, LiteralString, Pop(1)}, + }, + "escape": { + {`[abfnrtv"\'&\\]`, LiteralStringEscape, Pop(1)}, + {`\^[][\p{Lu}@^_]`, LiteralStringEscape, Pop(1)}, + {`NUL|SOH|[SE]TX|EOT|ENQ|ACK|BEL|BS|HT|LF|VT|FF|CR|S[OI]|DLE|DC[1-4]|NAK|SYN|ETB|CAN|EM|SUB|ESC|[FGRU]S|SP|DEL`, LiteralStringEscape, Pop(1)}, + {`o[0-7]+`, LiteralStringEscape, Pop(1)}, + {`x[\da-fA-F]+`, LiteralStringEscape, Pop(1)}, + {`\d+`, LiteralStringEscape, Pop(1)}, + {`\s+\\`, LiteralStringEscape, Pop(1)}, + }, + } +} diff --git a/vendor/github.com/alecthomas/chroma/lexers/h/haxe.go b/vendor/github.com/alecthomas/chroma/lexers/h/haxe.go new file mode 100644 index 000000000..cc8c693ef --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/h/haxe.go @@ -0,0 +1,646 @@ +package h + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// Haxe lexer. +var Haxe = internal.Register(MustNewLazyLexer( + &Config{ + Name: "Haxe", + Aliases: []string{"hx", "haxe", "hxsl"}, + Filenames: []string{"*.hx", "*.hxsl"}, + MimeTypes: []string{"text/haxe", "text/x-haxe", "text/x-hx"}, + DotAll: true, + }, + haxeRules, +)) + +func haxeRules() Rules { + return Rules{ + "root": { + Include("spaces"), + Include("meta"), + {`(?:package)\b`, KeywordNamespace, Push("semicolon", "package")}, + {`(?:import)\b`, KeywordNamespace, Push("semicolon", "import")}, + {`(?:using)\b`, KeywordNamespace, Push("semicolon", "using")}, + {`(?:extern|private)\b`, KeywordDeclaration, nil}, + {`(?:abstract)\b`, KeywordDeclaration, Push("abstract")}, + {`(?:class|interface)\b`, KeywordDeclaration, Push("class")}, + {`(?:enum)\b`, KeywordDeclaration, Push("enum")}, + {`(?:typedef)\b`, KeywordDeclaration, Push("typedef")}, + {`(?=.)`, Text, Push("expr-statement")}, + }, + "spaces": { + {`\s+`, Text, nil}, + {`//[^\n\r]*`, CommentSingle, nil}, + {`/\*.*?\*/`, CommentMultiline, nil}, + {`(#)(if|elseif|else|end|error)\b`, CommentPreproc, MutatorFunc(haxePreProcMutator)}, + }, + "string-single-interpol": { + {`\$\{`, LiteralStringInterpol, Push("string-interpol-close", "expr")}, + {`\$\$`, LiteralStringEscape, nil}, + {`\$(?=(?:_*[a-z]\w*|_+[0-9]\w*|_*[A-Z]\w*|_+|\$\w+))`, LiteralStringInterpol, Push("ident")}, + Include("string-single"), + }, + "string-single": { + {`'`, LiteralStringSingle, Pop(1)}, + {`\\.`, LiteralStringEscape, nil}, + {`.`, LiteralStringSingle, nil}, + }, + "string-double": { + {`"`, LiteralStringDouble, Pop(1)}, + {`\\.`, LiteralStringEscape, nil}, + {`.`, LiteralStringDouble, nil}, + }, + "string-interpol-close": { + {`\$(?:_*[a-z]\w*|_+[0-9]\w*|_*[A-Z]\w*|_+|\$\w+)`, LiteralStringInterpol, nil}, + {`\}`, LiteralStringInterpol, Pop(1)}, + }, + "package": { + Include("spaces"), + {`(?:_*[a-z]\w*|_+[0-9]\w*|_*[A-Z]\w*|_+|\$\w+)`, NameNamespace, nil}, + {`\.`, Punctuation, Push("import-ident")}, + Default(Pop(1)), + }, + "import": { + Include("spaces"), + {`(?:_*[a-z]\w*|_+[0-9]\w*|_*[A-Z]\w*|_+|\$\w+)`, NameNamespace, nil}, + {`\*`, Keyword, nil}, + {`\.`, Punctuation, Push("import-ident")}, + {`in`, KeywordNamespace, Push("ident")}, + Default(Pop(1)), + }, + "import-ident": { + Include("spaces"), + {`\*`, Keyword, Pop(1)}, + {`(?:_*[a-z]\w*|_+[0-9]\w*|_*[A-Z]\w*|_+|\$\w+)`, NameNamespace, Pop(1)}, + }, + "using": { + Include("spaces"), + {`(?:_*[a-z]\w*|_+[0-9]\w*|_*[A-Z]\w*|_+|\$\w+)`, NameNamespace, nil}, + {`\.`, Punctuation, Push("import-ident")}, + Default(Pop(1)), + }, + "preproc-error": { + {`\s+`, CommentPreproc, nil}, + {`'`, LiteralStringSingle, Push("#pop", "string-single")}, + {`"`, LiteralStringDouble, Push("#pop", "string-double")}, + Default(Pop(1)), + }, + "preproc-expr": { + {`\s+`, CommentPreproc, nil}, + {`\!`, CommentPreproc, nil}, + {`\(`, CommentPreproc, Push("#pop", "preproc-parenthesis")}, + {`(?:_*[a-z]\w*|_+[0-9]\w*|_*[A-Z]\w*|_+|\$\w+)`, CommentPreproc, Pop(1)}, + {`\.[0-9]+`, LiteralNumberFloat, nil}, + {`[0-9]+[eE][+\-]?[0-9]+`, LiteralNumberFloat, nil}, + {`[0-9]+\.[0-9]*[eE][+\-]?[0-9]+`, LiteralNumberFloat, nil}, + {`[0-9]+\.[0-9]+`, LiteralNumberFloat, nil}, + {`[0-9]+\.(?!(?:_*[a-z]\w*|_+[0-9]\w*|_*[A-Z]\w*|_+|\$\w+)|\.\.)`, LiteralNumberFloat, nil}, + {`0x[0-9a-fA-F]+`, LiteralNumberHex, nil}, + {`[0-9]+`, LiteralNumberInteger, nil}, + {`'`, LiteralStringSingle, Push("#pop", "string-single")}, + {`"`, LiteralStringDouble, Push("#pop", "string-double")}, + }, + "preproc-parenthesis": { + {`\s+`, CommentPreproc, nil}, + {`\)`, CommentPreproc, Pop(1)}, + Default(Push("preproc-expr-in-parenthesis")), + }, + "preproc-expr-chain": { + {`\s+`, CommentPreproc, nil}, + {`(?:%=|&=|\|=|\^=|\+=|\-=|\*=|/=|<<=|>\s*>\s*=|>\s*>\s*>\s*=|==|!=|<=|>\s*=|&&|\|\||<<|>>>|>\s*>|\.\.\.|<|>|%|&|\||\^|\+|\*|/|\-|=>|=)`, CommentPreproc, Push("#pop", "preproc-expr-in-parenthesis")}, + Default(Pop(1)), + }, + "preproc-expr-in-parenthesis": { + {`\s+`, CommentPreproc, nil}, + {`\!`, CommentPreproc, nil}, + {`\(`, CommentPreproc, Push("#pop", "preproc-expr-chain", "preproc-parenthesis")}, + {`(?:_*[a-z]\w*|_+[0-9]\w*|_*[A-Z]\w*|_+|\$\w+)`, CommentPreproc, Push("#pop", "preproc-expr-chain")}, + {`\.[0-9]+`, LiteralNumberFloat, Push("#pop", "preproc-expr-chain")}, + {`[0-9]+[eE][+\-]?[0-9]+`, LiteralNumberFloat, Push("#pop", "preproc-expr-chain")}, + {`[0-9]+\.[0-9]*[eE][+\-]?[0-9]+`, LiteralNumberFloat, Push("#pop", "preproc-expr-chain")}, + {`[0-9]+\.[0-9]+`, LiteralNumberFloat, Push("#pop", "preproc-expr-chain")}, + {`[0-9]+\.(?!(?:_*[a-z]\w*|_+[0-9]\w*|_*[A-Z]\w*|_+|\$\w+)|\.\.)`, LiteralNumberFloat, Push("#pop", "preproc-expr-chain")}, + {`0x[0-9a-fA-F]+`, LiteralNumberHex, Push("#pop", "preproc-expr-chain")}, + {`[0-9]+`, LiteralNumberInteger, Push("#pop", "preproc-expr-chain")}, + {`'`, LiteralStringSingle, Push("#pop", "preproc-expr-chain", "string-single")}, + {`"`, LiteralStringDouble, Push("#pop", "preproc-expr-chain", "string-double")}, + }, + "abstract": { + Include("spaces"), + Default(Pop(1), Push("abstract-body"), Push("abstract-relation"), Push("abstract-opaque"), Push("type-param-constraint"), Push("type-name")), + }, + "abstract-body": { + Include("spaces"), + {`\{`, Punctuation, Push("#pop", "class-body")}, + }, + "abstract-opaque": { + Include("spaces"), + {`\(`, Punctuation, Push("#pop", "parenthesis-close", "type")}, + Default(Pop(1)), + }, + "abstract-relation": { + Include("spaces"), + {`(?:to|from)`, KeywordDeclaration, Push("type")}, + {`,`, Punctuation, nil}, + Default(Pop(1)), + }, + "meta": { + Include("spaces"), + {`@`, NameDecorator, Push("meta-body", "meta-ident", "meta-colon")}, + }, + "meta-colon": { + Include("spaces"), + {`:`, NameDecorator, Pop(1)}, + Default(Pop(1)), + }, + "meta-ident": { + Include("spaces"), + {`(?:_*[a-z]\w*|_+[0-9]\w*|_*[A-Z]\w*|_+|\$\w+)`, NameDecorator, Pop(1)}, + }, + "meta-body": { + Include("spaces"), + {`\(`, NameDecorator, Push("#pop", "meta-call")}, + Default(Pop(1)), + }, + "meta-call": { + Include("spaces"), + {`\)`, NameDecorator, Pop(1)}, + Default(Pop(1), Push("meta-call-sep"), Push("expr")), + }, + "meta-call-sep": { + Include("spaces"), + {`\)`, NameDecorator, Pop(1)}, + {`,`, Punctuation, Push("#pop", "meta-call")}, + }, + "typedef": { + Include("spaces"), + Default(Pop(1), Push("typedef-body"), Push("type-param-constraint"), Push("type-name")), + }, + "typedef-body": { + Include("spaces"), + {`=`, Operator, Push("#pop", "optional-semicolon", "type")}, + }, + "enum": { + Include("spaces"), + Default(Pop(1), Push("enum-body"), Push("bracket-open"), Push("type-param-constraint"), Push("type-name")), + }, + "enum-body": { + Include("spaces"), + Include("meta"), + {`\}`, Punctuation, Pop(1)}, + {`(?!(?:function|class|static|var|if|else|while|do|for|break|return|continue|extends|implements|import|switch|case|default|public|private|try|untyped|catch|new|this|throw|extern|enum|in|interface|cast|override|dynamic|typedef|package|inline|using|null|true|false|abstract)\b)(?:_*[a-z]\w*|_+[0-9]\w*|_*[A-Z]\w*|_+|\$\w+)`, Name, Push("enum-member", "type-param-constraint")}, + }, + "enum-member": { + Include("spaces"), + {`\(`, Punctuation, Push("#pop", "semicolon", "flag", "function-param")}, + Default(Pop(1), Push("semicolon"), Push("flag")), + }, + "class": { + Include("spaces"), + Default(Pop(1), Push("class-body"), Push("bracket-open"), Push("extends"), Push("type-param-constraint"), Push("type-name")), + }, + "extends": { + Include("spaces"), + {`(?:extends|implements)\b`, KeywordDeclaration, Push("type")}, + {`,`, Punctuation, nil}, + Default(Pop(1)), + }, + "bracket-open": { + Include("spaces"), + {`\{`, Punctuation, Pop(1)}, + }, + "bracket-close": { + Include("spaces"), + {`\}`, Punctuation, Pop(1)}, + }, + "class-body": { + Include("spaces"), + Include("meta"), + {`\}`, Punctuation, Pop(1)}, + {`(?:static|public|private|override|dynamic|inline|macro)\b`, KeywordDeclaration, nil}, + Default(Push("class-member")), + }, + "class-member": { + Include("spaces"), + {`(var)\b`, KeywordDeclaration, Push("#pop", "optional-semicolon", "var")}, + {`(function)\b`, KeywordDeclaration, Push("#pop", "optional-semicolon", "class-method")}, + }, + "function-local": { + Include("spaces"), + {`(?!(?:function|class|static|var|if|else|while|do|for|break|return|continue|extends|implements|import|switch|case|default|public|private|try|untyped|catch|new|this|throw|extern|enum|in|interface|cast|override|dynamic|typedef|package|inline|using|null|true|false|abstract)\b)(?:_*[a-z]\w*|_+[0-9]\w*|_*[A-Z]\w*|_+|\$\w+)`, NameFunction, Push("#pop", "optional-expr", "flag", "function-param", "parenthesis-open", "type-param-constraint")}, + Default(Pop(1), Push("optional-expr"), Push("flag"), Push("function-param"), Push("parenthesis-open"), Push("type-param-constraint")), + }, + "optional-expr": { + Include("spaces"), + Include("expr"), + Default(Pop(1)), + }, + "class-method": { + Include("spaces"), + {`(?:_*[a-z]\w*|_+[0-9]\w*|_*[A-Z]\w*|_+|\$\w+)`, NameFunction, Push("#pop", "optional-expr", "flag", "function-param", "parenthesis-open", "type-param-constraint")}, + }, + "function-param": { + Include("spaces"), + {`\)`, Punctuation, Pop(1)}, + {`\?`, Punctuation, nil}, + {`(?!(?:function|class|static|var|if|else|while|do|for|break|return|continue|extends|implements|import|switch|case|default|public|private|try|untyped|catch|new|this|throw|extern|enum|in|interface|cast|override|dynamic|typedef|package|inline|using|null|true|false|abstract)\b)(?:_*[a-z]\w*|_+[0-9]\w*|_*[A-Z]\w*|_+|\$\w+)`, Name, Push("#pop", "function-param-sep", "assign", "flag")}, + }, + "function-param-sep": { + Include("spaces"), + {`\)`, Punctuation, Pop(1)}, + {`,`, Punctuation, Push("#pop", "function-param")}, + }, + "prop-get-set": { + Include("spaces"), + {`\(`, Punctuation, Push("#pop", "parenthesis-close", "prop-get-set-opt", "comma", "prop-get-set-opt")}, + Default(Pop(1)), + }, + "prop-get-set-opt": { + Include("spaces"), + {`(?:default|null|never|dynamic|get|set)\b`, Keyword, Pop(1)}, + {`(?!(?:function|class|static|var|if|else|while|do|for|break|return|continue|extends|implements|import|switch|case|default|public|private|try|untyped|catch|new|this|throw|extern|enum|in|interface|cast|override|dynamic|typedef|package|inline|using|null|true|false|abstract)\b)(?:_*[a-z]\w*|_+[0-9]\w*|_*[A-Z]\w*|_+|\$\w+)`, Text, Pop(1)}, + }, + "expr-statement": { + Include("spaces"), + Default(Pop(1), Push("optional-semicolon"), Push("expr")), + }, + "expr": { + Include("spaces"), + {`@`, NameDecorator, Push("#pop", "optional-expr", "meta-body", "meta-ident", "meta-colon")}, + {`(?:\+\+|\-\-|~(?!/)|!|\-)`, Operator, nil}, + {`\(`, Punctuation, Push("#pop", "expr-chain", "parenthesis")}, + {`(?:static|public|private|override|dynamic|inline)\b`, KeywordDeclaration, nil}, + {`(?:function)\b`, KeywordDeclaration, Push("#pop", "expr-chain", "function-local")}, + {`\{`, Punctuation, Push("#pop", "expr-chain", "bracket")}, + {`(?:true|false|null)\b`, KeywordConstant, Push("#pop", "expr-chain")}, + {`(?:this)\b`, Keyword, Push("#pop", "expr-chain")}, + {`(?:cast)\b`, Keyword, Push("#pop", "expr-chain", "cast")}, + {`(?:try)\b`, Keyword, Push("#pop", "catch", "expr")}, + {`(?:var)\b`, KeywordDeclaration, Push("#pop", "var")}, + {`(?:new)\b`, Keyword, Push("#pop", "expr-chain", "new")}, + {`(?:switch)\b`, Keyword, Push("#pop", "switch")}, + {`(?:if)\b`, Keyword, Push("#pop", "if")}, + {`(?:do)\b`, Keyword, Push("#pop", "do")}, + {`(?:while)\b`, Keyword, Push("#pop", "while")}, + {`(?:for)\b`, Keyword, Push("#pop", "for")}, + {`(?:untyped|throw)\b`, Keyword, nil}, + {`(?:return)\b`, Keyword, Push("#pop", "optional-expr")}, + {`(?:macro)\b`, Keyword, Push("#pop", "macro")}, + {`(?:continue|break)\b`, Keyword, Pop(1)}, + {`(?:\$\s*[a-z]\b|\$(?!(?:_*[a-z]\w*|_+[0-9]\w*|_*[A-Z]\w*|_+|\$\w+)))`, Name, Push("#pop", "dollar")}, + {`(?!(?:function|class|static|var|if|else|while|do|for|break|return|continue|extends|implements|import|switch|case|default|public|private|try|untyped|catch|new|this|throw|extern|enum|in|interface|cast|override|dynamic|typedef|package|inline|using|null|true|false|abstract)\b)(?:_*[a-z]\w*|_+[0-9]\w*|_*[A-Z]\w*|_+|\$\w+)`, Name, Push("#pop", "expr-chain")}, + {`\.[0-9]+`, LiteralNumberFloat, Push("#pop", "expr-chain")}, + {`[0-9]+[eE][+\-]?[0-9]+`, LiteralNumberFloat, Push("#pop", "expr-chain")}, + {`[0-9]+\.[0-9]*[eE][+\-]?[0-9]+`, LiteralNumberFloat, Push("#pop", "expr-chain")}, + {`[0-9]+\.[0-9]+`, LiteralNumberFloat, Push("#pop", "expr-chain")}, + {`[0-9]+\.(?!(?:_*[a-z]\w*|_+[0-9]\w*|_*[A-Z]\w*|_+|\$\w+)|\.\.)`, LiteralNumberFloat, Push("#pop", "expr-chain")}, + {`0x[0-9a-fA-F]+`, LiteralNumberHex, Push("#pop", "expr-chain")}, + {`[0-9]+`, LiteralNumberInteger, Push("#pop", "expr-chain")}, + {`'`, LiteralStringSingle, Push("#pop", "expr-chain", "string-single-interpol")}, + {`"`, LiteralStringDouble, Push("#pop", "expr-chain", "string-double")}, + {`~/(\\\\|\\/|[^/\n])*/[gimsu]*`, LiteralStringRegex, Push("#pop", "expr-chain")}, + {`\[`, Punctuation, Push("#pop", "expr-chain", "array-decl")}, + }, + "expr-chain": { + Include("spaces"), + {`(?:\+\+|\-\-)`, Operator, nil}, + {`(?:%=|&=|\|=|\^=|\+=|\-=|\*=|/=|<<=|>\s*>\s*=|>\s*>\s*>\s*=|==|!=|<=|>\s*=|&&|\|\||<<|>>>|>\s*>|\.\.\.|<|>|%|&|\||\^|\+|\*|/|\-|=>|=)`, Operator, Push("#pop", "expr")}, + {`(?:in)\b`, Keyword, Push("#pop", "expr")}, + {`\?`, Operator, Push("#pop", "expr", "ternary", "expr")}, + {`(\.)((?!(?:function|class|static|var|if|else|while|do|for|break|return|continue|extends|implements|import|switch|case|default|public|private|try|untyped|catch|new|this|throw|extern|enum|in|interface|cast|override|dynamic|typedef|package|inline|using|null|true|false|abstract)\b)(?:_*[a-z]\w*|_+[0-9]\w*|_*[A-Z]\w*|_+|\$\w+))`, ByGroups(Punctuation, Name), nil}, + {`\[`, Punctuation, Push("array-access")}, + {`\(`, Punctuation, Push("call")}, + Default(Pop(1)), + }, + "macro": { + Include("spaces"), + Include("meta"), + {`:`, Punctuation, Push("#pop", "type")}, + {`(?:extern|private)\b`, KeywordDeclaration, nil}, + {`(?:abstract)\b`, KeywordDeclaration, Push("#pop", "optional-semicolon", "abstract")}, + {`(?:class|interface)\b`, KeywordDeclaration, Push("#pop", "optional-semicolon", "macro-class")}, + {`(?:enum)\b`, KeywordDeclaration, Push("#pop", "optional-semicolon", "enum")}, + {`(?:typedef)\b`, KeywordDeclaration, Push("#pop", "optional-semicolon", "typedef")}, + Default(Pop(1), Push("expr")), + }, + "macro-class": { + {`\{`, Punctuation, Push("#pop", "class-body")}, + Include("class"), + }, + "cast": { + Include("spaces"), + {`\(`, Punctuation, Push("#pop", "parenthesis-close", "cast-type", "expr")}, + Default(Pop(1), Push("expr")), + }, + "cast-type": { + Include("spaces"), + {`,`, Punctuation, Push("#pop", "type")}, + Default(Pop(1)), + }, + "catch": { + Include("spaces"), + {`(?:catch)\b`, Keyword, Push("expr", "function-param", "parenthesis-open")}, + Default(Pop(1)), + }, + "do": { + Include("spaces"), + Default(Pop(1), Push("do-while"), Push("expr")), + }, + "do-while": { + Include("spaces"), + {`(?:while)\b`, Keyword, Push("#pop", "parenthesis", "parenthesis-open")}, + }, + "while": { + Include("spaces"), + {`\(`, Punctuation, Push("#pop", "expr", "parenthesis")}, + }, + "for": { + Include("spaces"), + {`\(`, Punctuation, Push("#pop", "expr", "parenthesis")}, + }, + "if": { + Include("spaces"), + {`\(`, Punctuation, Push("#pop", "else", "optional-semicolon", "expr", "parenthesis")}, + }, + "else": { + Include("spaces"), + {`(?:else)\b`, Keyword, Push("#pop", "expr")}, + Default(Pop(1)), + }, + "switch": { + Include("spaces"), + Default(Pop(1), Push("switch-body"), Push("bracket-open"), Push("expr")), + }, + "switch-body": { + Include("spaces"), + {`(?:case|default)\b`, Keyword, Push("case-block", "case")}, + {`\}`, Punctuation, Pop(1)}, + }, + "case": { + Include("spaces"), + {`:`, Punctuation, Pop(1)}, + Default(Pop(1), Push("case-sep"), Push("case-guard"), Push("expr")), + }, + "case-sep": { + Include("spaces"), + {`:`, Punctuation, Pop(1)}, + {`,`, Punctuation, Push("#pop", "case")}, + }, + "case-guard": { + Include("spaces"), + {`(?:if)\b`, Keyword, Push("#pop", "parenthesis", "parenthesis-open")}, + Default(Pop(1)), + }, + "case-block": { + Include("spaces"), + {`(?!(?:case|default)\b|\})`, Keyword, Push("expr-statement")}, + Default(Pop(1)), + }, + "new": { + Include("spaces"), + Default(Pop(1), Push("call"), Push("parenthesis-open"), Push("type")), + }, + "array-decl": { + Include("spaces"), + {`\]`, Punctuation, Pop(1)}, + Default(Pop(1), Push("array-decl-sep"), Push("expr")), + }, + "array-decl-sep": { + Include("spaces"), + {`\]`, Punctuation, Pop(1)}, + {`,`, Punctuation, Push("#pop", "array-decl")}, + }, + "array-access": { + Include("spaces"), + Default(Pop(1), Push("array-access-close"), Push("expr")), + }, + "array-access-close": { + Include("spaces"), + {`\]`, Punctuation, Pop(1)}, + }, + "comma": { + Include("spaces"), + {`,`, Punctuation, Pop(1)}, + }, + "colon": { + Include("spaces"), + {`:`, Punctuation, Pop(1)}, + }, + "semicolon": { + Include("spaces"), + {`;`, Punctuation, Pop(1)}, + }, + "optional-semicolon": { + Include("spaces"), + {`;`, Punctuation, Pop(1)}, + Default(Pop(1)), + }, + "ident": { + Include("spaces"), + {`(?:_*[a-z]\w*|_+[0-9]\w*|_*[A-Z]\w*|_+|\$\w+)`, Name, Pop(1)}, + }, + "dollar": { + Include("spaces"), + {`\{`, Punctuation, Push("#pop", "expr-chain", "bracket-close", "expr")}, + Default(Pop(1), Push("expr-chain")), + }, + "type-name": { + Include("spaces"), + {`_*[A-Z]\w*`, Name, Pop(1)}, + }, + "type-full-name": { + Include("spaces"), + {`\.`, Punctuation, Push("ident")}, + Default(Pop(1)), + }, + "type": { + Include("spaces"), + {`\?`, Punctuation, nil}, + {`(?:_*[a-z]\w*|_+[0-9]\w*|_*[A-Z]\w*|_+|\$\w+)`, Name, Push("#pop", "type-check", "type-full-name")}, + {`\{`, Punctuation, Push("#pop", "type-check", "type-struct")}, + {`\(`, Punctuation, Push("#pop", "type-check", "type-parenthesis")}, + }, + "type-parenthesis": { + Include("spaces"), + Default(Pop(1), Push("parenthesis-close"), Push("type")), + }, + "type-check": { + Include("spaces"), + {`->`, Punctuation, Push("#pop", "type")}, + {`<(?!=)`, Punctuation, Push("type-param")}, + Default(Pop(1)), + }, + "type-struct": { + Include("spaces"), + {`\}`, Punctuation, Pop(1)}, + {`\?`, Punctuation, nil}, + {`>`, Punctuation, Push("comma", "type")}, + {`(?!(?:function|class|static|var|if|else|while|do|for|break|return|continue|extends|implements|import|switch|case|default|public|private|try|untyped|catch|new|this|throw|extern|enum|in|interface|cast|override|dynamic|typedef|package|inline|using|null|true|false|abstract)\b)(?:_*[a-z]\w*|_+[0-9]\w*|_*[A-Z]\w*|_+|\$\w+)`, Name, Push("#pop", "type-struct-sep", "type", "colon")}, + Include("class-body"), + }, + "type-struct-sep": { + Include("spaces"), + {`\}`, Punctuation, Pop(1)}, + {`,`, Punctuation, Push("#pop", "type-struct")}, + }, + "type-param-type": { + {`\.[0-9]+`, LiteralNumberFloat, Pop(1)}, + {`[0-9]+[eE][+\-]?[0-9]+`, LiteralNumberFloat, Pop(1)}, + {`[0-9]+\.[0-9]*[eE][+\-]?[0-9]+`, LiteralNumberFloat, Pop(1)}, + {`[0-9]+\.[0-9]+`, LiteralNumberFloat, Pop(1)}, + {`[0-9]+\.(?!(?:_*[a-z]\w*|_+[0-9]\w*|_*[A-Z]\w*|_+|\$\w+)|\.\.)`, LiteralNumberFloat, Pop(1)}, + {`0x[0-9a-fA-F]+`, LiteralNumberHex, Pop(1)}, + {`[0-9]+`, LiteralNumberInteger, Pop(1)}, + {`'`, LiteralStringSingle, Push("#pop", "string-single")}, + {`"`, LiteralStringDouble, Push("#pop", "string-double")}, + {`~/(\\\\|\\/|[^/\n])*/[gim]*`, LiteralStringRegex, Pop(1)}, + {`\[`, Operator, Push("#pop", "array-decl")}, + Include("type"), + }, + "type-param": { + Include("spaces"), + Default(Pop(1), Push("type-param-sep"), Push("type-param-type")), + }, + "type-param-sep": { + Include("spaces"), + {`>`, Punctuation, Pop(1)}, + {`,`, Punctuation, Push("#pop", "type-param")}, + }, + "type-param-constraint": { + Include("spaces"), + {`<(?!=)`, Punctuation, Push("#pop", "type-param-constraint-sep", "type-param-constraint-flag", "type-name")}, + Default(Pop(1)), + }, + "type-param-constraint-sep": { + Include("spaces"), + {`>`, Punctuation, Pop(1)}, + {`,`, Punctuation, Push("#pop", "type-param-constraint-sep", "type-param-constraint-flag", "type-name")}, + }, + "type-param-constraint-flag": { + Include("spaces"), + {`:`, Punctuation, Push("#pop", "type-param-constraint-flag-type")}, + Default(Pop(1)), + }, + "type-param-constraint-flag-type": { + Include("spaces"), + {`\(`, Punctuation, Push("#pop", "type-param-constraint-flag-type-sep", "type")}, + Default(Pop(1), Push("type")), + }, + "type-param-constraint-flag-type-sep": { + Include("spaces"), + {`\)`, Punctuation, Pop(1)}, + {`,`, Punctuation, Push("type")}, + }, + "parenthesis": { + Include("spaces"), + Default(Pop(1), Push("parenthesis-close"), Push("flag"), Push("expr")), + }, + "parenthesis-open": { + Include("spaces"), + {`\(`, Punctuation, Pop(1)}, + }, + "parenthesis-close": { + Include("spaces"), + {`\)`, Punctuation, Pop(1)}, + }, + "var": { + Include("spaces"), + {`(?!(?:function|class|static|var|if|else|while|do|for|break|return|continue|extends|implements|import|switch|case|default|public|private|try|untyped|catch|new|this|throw|extern|enum|in|interface|cast|override|dynamic|typedef|package|inline|using|null|true|false|abstract)\b)(?:_*[a-z]\w*|_+[0-9]\w*|_*[A-Z]\w*|_+|\$\w+)`, Text, Push("#pop", "var-sep", "assign", "flag", "prop-get-set")}, + }, + "var-sep": { + Include("spaces"), + {`,`, Punctuation, Push("#pop", "var")}, + Default(Pop(1)), + }, + "assign": { + Include("spaces"), + {`=`, Operator, Push("#pop", "expr")}, + Default(Pop(1)), + }, + "flag": { + Include("spaces"), + {`:`, Punctuation, Push("#pop", "type")}, + Default(Pop(1)), + }, + "ternary": { + Include("spaces"), + {`:`, Operator, Pop(1)}, + }, + "call": { + Include("spaces"), + {`\)`, Punctuation, Pop(1)}, + Default(Pop(1), Push("call-sep"), Push("expr")), + }, + "call-sep": { + Include("spaces"), + {`\)`, Punctuation, Pop(1)}, + {`,`, Punctuation, Push("#pop", "call")}, + }, + "bracket": { + Include("spaces"), + {`(?!(?:\$\s*[a-z]\b|\$(?!(?:_*[a-z]\w*|_+[0-9]\w*|_*[A-Z]\w*|_+|\$\w+))))(?!(?:function|class|static|var|if|else|while|do|for|break|return|continue|extends|implements|import|switch|case|default|public|private|try|untyped|catch|new|this|throw|extern|enum|in|interface|cast|override|dynamic|typedef|package|inline|using|null|true|false|abstract)\b)(?:_*[a-z]\w*|_+[0-9]\w*|_*[A-Z]\w*|_+|\$\w+)`, Name, Push("#pop", "bracket-check")}, + {`'`, LiteralStringSingle, Push("#pop", "bracket-check", "string-single")}, + {`"`, LiteralStringDouble, Push("#pop", "bracket-check", "string-double")}, + Default(Pop(1), Push("block")), + }, + "bracket-check": { + Include("spaces"), + {`:`, Punctuation, Push("#pop", "object-sep", "expr")}, + Default(Pop(1), Push("block"), Push("optional-semicolon"), Push("expr-chain")), + }, + "block": { + Include("spaces"), + {`\}`, Punctuation, Pop(1)}, + Default(Push("expr-statement")), + }, + "object": { + Include("spaces"), + {`\}`, Punctuation, Pop(1)}, + Default(Pop(1), Push("object-sep"), Push("expr"), Push("colon"), Push("ident-or-string")), + }, + "ident-or-string": { + Include("spaces"), + {`(?!(?:function|class|static|var|if|else|while|do|for|break|return|continue|extends|implements|import|switch|case|default|public|private|try|untyped|catch|new|this|throw|extern|enum|in|interface|cast|override|dynamic|typedef|package|inline|using|null|true|false|abstract)\b)(?:_*[a-z]\w*|_+[0-9]\w*|_*[A-Z]\w*|_+|\$\w+)`, Name, Pop(1)}, + {`'`, LiteralStringSingle, Push("#pop", "string-single")}, + {`"`, LiteralStringDouble, Push("#pop", "string-double")}, + }, + "object-sep": { + Include("spaces"), + {`\}`, Punctuation, Pop(1)}, + {`,`, Punctuation, Push("#pop", "object")}, + }, + } +} + +func haxePreProcMutator(state *LexerState) error { + stack, ok := state.Get("haxe-pre-proc").([][]string) + if !ok { + stack = [][]string{} + } + + proc := state.Groups[2] + switch proc { + case "if": + stack = append(stack, state.Stack) + case "else", "elseif": + if len(stack) > 0 { + state.Stack = stack[len(stack)-1] + } + case "end": + stack = stack[:len(stack)-1] + } + + if proc == "if" || proc == "elseif" { + state.Stack = append(state.Stack, "preproc-expr") + } + + if proc == "error" { + state.Stack = append(state.Stack, "preproc-error") + } + state.Set("haxe-pre-proc", stack) + return nil +} diff --git a/vendor/github.com/alecthomas/chroma/lexers/h/hcl.go b/vendor/github.com/alecthomas/chroma/lexers/h/hcl.go new file mode 100644 index 000000000..7206fba08 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/h/hcl.go @@ -0,0 +1,73 @@ +package h + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// HCL lexer. +var HCL = internal.Register(MustNewLazyLexer( + &Config{ + Name: "HCL", + Aliases: []string{"hcl"}, + Filenames: []string{"*.hcl"}, + MimeTypes: []string{"application/x-hcl"}, + }, + hclRules, +)) + +func hclRules() Rules { + return Rules{ + "root": { + Include("string"), + Include("punctuation"), + Include("curly"), + Include("basic"), + Include("whitespace"), + {`[0-9]+`, LiteralNumber, nil}, + }, + "basic": { + {Words(`\b`, `\b`, `true`, `false`), KeywordType, nil}, + {`\s*/\*`, CommentMultiline, Push("comment")}, + {`\s*#.*\n`, CommentSingle, nil}, + {`(.*?)(\s*)(=)`, ByGroups(Name, Text, Operator), nil}, + {`\d+`, Number, nil}, + {`\b\w+\b`, Keyword, nil}, + {`\$\{`, LiteralStringInterpol, Push("var_builtin")}, + }, + "function": { + {`(\s+)(".*")(\s+)`, ByGroups(Text, LiteralString, Text), nil}, + Include("punctuation"), + Include("curly"), + }, + "var_builtin": { + {`\$\{`, LiteralStringInterpol, Push()}, + {Words(`\b`, `\b`, `concat`, `file`, `join`, `lookup`, `element`), NameBuiltin, nil}, + Include("string"), + Include("punctuation"), + {`\s+`, Text, nil}, + {`\}`, LiteralStringInterpol, Pop(1)}, + }, + "string": { + {`(".*")`, ByGroups(LiteralStringDouble), nil}, + }, + "punctuation": { + {`[\[\](),.]`, Punctuation, nil}, + }, + "curly": { + {`\{`, TextPunctuation, nil}, + {`\}`, TextPunctuation, nil}, + }, + "comment": { + {`[^*/]`, CommentMultiline, nil}, + {`/\*`, CommentMultiline, Push()}, + {`\*/`, CommentMultiline, Pop(1)}, + {`[*/]`, CommentMultiline, nil}, + }, + "whitespace": { + {`\n`, Text, nil}, + {`\s+`, Text, nil}, + {`\\\n`, Text, nil}, + }, + } +} diff --git a/vendor/github.com/alecthomas/chroma/lexers/h/hexdump.go b/vendor/github.com/alecthomas/chroma/lexers/h/hexdump.go new file mode 100644 index 000000000..089353749 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/h/hexdump.go @@ -0,0 +1,71 @@ +package h + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// Hexdump lexer. +var Hexdump = internal.Register(MustNewLazyLexer( + &Config{ + Name: "Hexdump", + Aliases: []string{"hexdump"}, + Filenames: []string{}, + MimeTypes: []string{}, + }, + hexdumpRules, +)) + +func hexdumpRules() Rules { + return Rules{ + "root": { + {`\n`, Text, nil}, + Include("offset"), + {`([0-9A-Ha-h]{2})(\-)([0-9A-Ha-h]{2})`, ByGroups(LiteralNumberHex, Punctuation, LiteralNumberHex), nil}, + {`[0-9A-Ha-h]{2}`, LiteralNumberHex, nil}, + {`(\s{2,3})(\>)(.{16})(\<)$`, ByGroups(Text, Punctuation, LiteralString, Punctuation), Push("bracket-strings")}, + {`(\s{2,3})(\|)(.{16})(\|)$`, ByGroups(Text, Punctuation, LiteralString, Punctuation), Push("piped-strings")}, + {`(\s{2,3})(\>)(.{1,15})(\<)$`, ByGroups(Text, Punctuation, LiteralString, Punctuation), nil}, + {`(\s{2,3})(\|)(.{1,15})(\|)$`, ByGroups(Text, Punctuation, LiteralString, Punctuation), nil}, + {`(\s{2,3})(.{1,15})$`, ByGroups(Text, LiteralString), nil}, + {`(\s{2,3})(.{16}|.{20})$`, ByGroups(Text, LiteralString), Push("nonpiped-strings")}, + {`\s`, Text, nil}, + {`^\*`, Punctuation, nil}, + }, + "offset": { + {`^([0-9A-Ha-h]+)(:)`, ByGroups(NameLabel, Punctuation), Push("offset-mode")}, + {`^[0-9A-Ha-h]+`, NameLabel, nil}, + }, + "offset-mode": { + {`\s`, Text, Pop(1)}, + {`[0-9A-Ha-h]+`, NameLabel, nil}, + {`:`, Punctuation, nil}, + }, + "piped-strings": { + {`\n`, Text, nil}, + Include("offset"), + {`[0-9A-Ha-h]{2}`, LiteralNumberHex, nil}, + {`(\s{2,3})(\|)(.{1,16})(\|)$`, ByGroups(Text, Punctuation, LiteralString, Punctuation), nil}, + {`\s`, Text, nil}, + {`^\*`, Punctuation, nil}, + }, + "bracket-strings": { + {`\n`, Text, nil}, + Include("offset"), + {`[0-9A-Ha-h]{2}`, LiteralNumberHex, nil}, + {`(\s{2,3})(\>)(.{1,16})(\<)$`, ByGroups(Text, Punctuation, LiteralString, Punctuation), nil}, + {`\s`, Text, nil}, + {`^\*`, Punctuation, nil}, + }, + "nonpiped-strings": { + {`\n`, Text, nil}, + Include("offset"), + {`([0-9A-Ha-h]{2})(\-)([0-9A-Ha-h]{2})`, ByGroups(LiteralNumberHex, Punctuation, LiteralNumberHex), nil}, + {`[0-9A-Ha-h]{2}`, LiteralNumberHex, nil}, + {`(\s{19,})(.{1,20}?)$`, ByGroups(Text, LiteralString), nil}, + {`(\s{2,3})(.{1,20})$`, ByGroups(Text, LiteralString), nil}, + {`\s`, Text, nil}, + {`^\*`, Punctuation, nil}, + }, + } +} diff --git a/vendor/github.com/alecthomas/chroma/lexers/h/hlb.go b/vendor/github.com/alecthomas/chroma/lexers/h/hlb.go new file mode 100644 index 000000000..6c5f63771 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/h/hlb.go @@ -0,0 +1,58 @@ +package h + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// HLB lexer. +var HLB = internal.Register(MustNewLazyLexer( + &Config{ + Name: "HLB", + Aliases: []string{"hlb"}, + Filenames: []string{"*.hlb"}, + MimeTypes: []string{}, + }, + hlbRules, +)) + +func hlbRules() Rules { + return Rules{ + "root": { + {`(#.*)`, ByGroups(CommentSingle), nil}, + {`((\b(0(b|B|o|O|x|X)[a-fA-F0-9]+)\b)|(\b(0|[1-9][0-9]*)\b))`, ByGroups(LiteralNumber), nil}, + {`((\b(true|false)\b))`, ByGroups(NameBuiltin), nil}, + {`(\bstring\b|\bint\b|\bbool\b|\bfs\b|\boption\b)`, ByGroups(KeywordType), nil}, + {`(\b[a-zA-Z_][a-zA-Z0-9]*\b)(\()`, ByGroups(NameFunction, Punctuation), Push("params")}, + {`(\{)`, ByGroups(Punctuation), Push("block")}, + {`(\n|\r|\r\n)`, Text, nil}, + {`.`, Text, nil}, + }, + "string": { + {`"`, LiteralString, Pop(1)}, + {`\\"`, LiteralString, nil}, + {`[^\\"]+`, LiteralString, nil}, + }, + "block": { + {`(\})`, ByGroups(Punctuation), Pop(1)}, + {`(#.*)`, ByGroups(CommentSingle), nil}, + {`((\b(0(b|B|o|O|x|X)[a-fA-F0-9]+)\b)|(\b(0|[1-9][0-9]*)\b))`, ByGroups(LiteralNumber), nil}, + {`((\b(true|false)\b))`, ByGroups(KeywordConstant), nil}, + {`"`, LiteralString, Push("string")}, + {`(with)`, ByGroups(KeywordReserved), nil}, + {`(as)([\t ]+)(\b[a-zA-Z_][a-zA-Z0-9]*\b)`, ByGroups(KeywordReserved, Text, NameFunction), nil}, + {`(\bstring\b|\bint\b|\bbool\b|\bfs\b|\boption\b)([\t ]+)(\{)`, ByGroups(KeywordType, Text, Punctuation), Push("block")}, + {`(?!\b(?:scratch|image|resolve|http|checksum|chmod|filename|git|keepGitDir|local|includePatterns|excludePatterns|followPaths|generate|frontendInput|shell|run|readonlyRootfs|env|dir|user|network|security|host|ssh|secret|mount|target|localPath|uid|gid|mode|readonly|tmpfs|sourcePath|cache|mkdir|createParents|chown|createdTime|mkfile|rm|allowNotFound|allowWildcards|copy|followSymlinks|contentsOnly|unpack|createDestPath)\b)(\b[a-zA-Z_][a-zA-Z0-9]*\b)`, ByGroups(NameOther), nil}, + {`(\n|\r|\r\n)`, Text, nil}, + {`.`, Text, nil}, + }, + "params": { + {`(\))`, ByGroups(Punctuation), Pop(1)}, + {`(variadic)`, ByGroups(Keyword), nil}, + {`(\bstring\b|\bint\b|\bbool\b|\bfs\b|\boption\b)`, ByGroups(KeywordType), nil}, + {`(\b[a-zA-Z_][a-zA-Z0-9]*\b)`, ByGroups(NameOther), nil}, + {`(\n|\r|\r\n)`, Text, nil}, + {`.`, Text, nil}, + }, + } +} diff --git a/vendor/github.com/alecthomas/chroma/lexers/h/html.go b/vendor/github.com/alecthomas/chroma/lexers/h/html.go new file mode 100644 index 000000000..b9ca1e1d6 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/h/html.go @@ -0,0 +1,63 @@ +package h + +import ( + . "github.com/alecthomas/chroma" // nolint + . "github.com/alecthomas/chroma/lexers/c" // nolint + "github.com/alecthomas/chroma/lexers/internal" + . "github.com/alecthomas/chroma/lexers/j" // nolint +) + +// HTML lexer. +var HTML = internal.Register(MustNewLazyLexer( + &Config{ + Name: "HTML", + Aliases: []string{"html"}, + Filenames: []string{"*.html", "*.htm", "*.xhtml", "*.xslt"}, + MimeTypes: []string{"text/html", "application/xhtml+xml"}, + NotMultiline: true, + DotAll: true, + CaseInsensitive: true, + }, + htmlRules, +)) + +func htmlRules() Rules { + return Rules{ + "root": { + {`[^<&]+`, Text, nil}, + {`&\S*?;`, NameEntity, nil}, + {`\<\!\[CDATA\[.*?\]\]\>`, CommentPreproc, nil}, + {``, Comment, Pop(1)}, + {`-`, Comment, nil}, + }, + "tag": { + {`\s+`, Text, nil}, + {`([\w:-]+\s*)(=)(\s*)`, ByGroups(NameAttribute, Operator, Text), Push("attr")}, + {`[\w:-]+`, NameAttribute, nil}, + {`(/?)(\s*)(>)`, ByGroups(Punctuation, Text, Punctuation), Pop(1)}, + }, + "script-content": { + {`(<)(\s*)(/)(\s*)(script)(\s*)(>)`, ByGroups(Punctuation, Text, Punctuation, Text, NameTag, Text, Punctuation), Pop(1)}, + {`.+?(?=<\s*/\s*script\s*>)`, Using(Javascript), nil}, + }, + "style-content": { + {`(<)(\s*)(/)(\s*)(style)(\s*)(>)`, ByGroups(Punctuation, Text, Punctuation, Text, NameTag, Text, Punctuation), Pop(1)}, + {`.+?(?=<\s*/\s*style\s*>)`, Using(CSS), nil}, + }, + "attr": { + {`".*?"`, LiteralString, Pop(1)}, + {`'.*?'`, LiteralString, Pop(1)}, + {`[^\s>]+`, LiteralString, Pop(1)}, + }, + } +} diff --git a/vendor/github.com/alecthomas/chroma/lexers/h/http.go b/vendor/github.com/alecthomas/chroma/lexers/h/http.go new file mode 100644 index 000000000..c515ed4c1 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/h/http.go @@ -0,0 +1,132 @@ +package h + +import ( + "strings" + + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// HTTP lexer. +var HTTP = internal.Register(httpBodyContentTypeLexer(MustNewLazyLexer( + &Config{ + Name: "HTTP", + Aliases: []string{"http"}, + Filenames: []string{}, + MimeTypes: []string{}, + NotMultiline: true, + DotAll: true, + }, + httpRules, +))) + +func httpRules() Rules { + return Rules{ + "root": { + {`(GET|POST|PUT|DELETE|HEAD|OPTIONS|TRACE|PATCH|CONNECT)( +)([^ ]+)( +)(HTTP)(/)([12]\.[01])(\r?\n|\Z)`, ByGroups(NameFunction, Text, NameNamespace, Text, KeywordReserved, Operator, LiteralNumber, Text), Push("headers")}, + {`(HTTP)(/)([12]\.[01])( +)(\d{3})( +)([^\r\n]+)(\r?\n|\Z)`, ByGroups(KeywordReserved, Operator, LiteralNumber, Text, LiteralNumber, Text, NameException, Text), Push("headers")}, + }, + "headers": { + {`([^\s:]+)( *)(:)( *)([^\r\n]+)(\r?\n|\Z)`, EmitterFunc(httpHeaderBlock), nil}, + {`([\t ]+)([^\r\n]+)(\r?\n|\Z)`, EmitterFunc(httpContinuousHeaderBlock), nil}, + {`\r?\n`, Text, Push("content")}, + }, + "content": { + {`.+`, EmitterFunc(httpContentBlock), nil}, + }, + } +} + +func httpContentBlock(groups []string, state *LexerState) Iterator { + tokens := []Token{ + {Generic, groups[0]}, + } + return Literator(tokens...) +} + +func httpHeaderBlock(groups []string, state *LexerState) Iterator { + tokens := []Token{ + {Name, groups[1]}, + {Text, groups[2]}, + {Operator, groups[3]}, + {Text, groups[4]}, + {Literal, groups[5]}, + {Text, groups[6]}, + } + return Literator(tokens...) +} + +func httpContinuousHeaderBlock(groups []string, state *LexerState) Iterator { + tokens := []Token{ + {Text, groups[1]}, + {Literal, groups[2]}, + {Text, groups[3]}, + } + return Literator(tokens...) +} + +func httpBodyContentTypeLexer(lexer Lexer) Lexer { return &httpBodyContentTyper{lexer} } + +type httpBodyContentTyper struct{ Lexer } + +func (d *httpBodyContentTyper) Tokenise(options *TokeniseOptions, text string) (Iterator, error) { // nolint: gocognit + var contentType string + var isContentType bool + var subIterator Iterator + + it, err := d.Lexer.Tokenise(options, text) + if err != nil { + return nil, err + } + + return func() Token { + token := it() + + if token == EOF { + if subIterator != nil { + return subIterator() + } + return EOF + } + + switch { + case token.Type == Name && strings.ToLower(token.Value) == "content-type": + { + isContentType = true + } + case token.Type == Literal && isContentType: + { + isContentType = false + contentType = strings.TrimSpace(token.Value) + pos := strings.Index(contentType, ";") + if pos > 0 { + contentType = strings.TrimSpace(contentType[:pos]) + } + } + case token.Type == Generic && contentType != "": + { + lexer := internal.MatchMimeType(contentType) + + // application/calendar+xml can be treated as application/xml + // if there's not a better match. + if lexer == nil && strings.Contains(contentType, "+") { + slashPos := strings.Index(contentType, "/") + plusPos := strings.LastIndex(contentType, "+") + contentType = contentType[:slashPos+1] + contentType[plusPos+1:] + lexer = internal.MatchMimeType(contentType) + } + + if lexer == nil { + token.Type = Text + } else { + subIterator, err = lexer.Tokenise(nil, token.Value) + if err != nil { + panic(err) + } + return EOF + } + } + } + return token + }, nil +} diff --git a/vendor/github.com/alecthomas/chroma/lexers/h/hy.go b/vendor/github.com/alecthomas/chroma/lexers/h/hy.go new file mode 100644 index 000000000..7a0789799 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/h/hy.go @@ -0,0 +1,55 @@ +package h + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// Hy lexer. +var Hy = internal.Register(MustNewLazyLexer( + &Config{ + Name: "Hy", + Aliases: []string{"hylang"}, + Filenames: []string{"*.hy"}, + MimeTypes: []string{"text/x-hy", "application/x-hy"}, + }, + hyRules, +)) + +func hyRules() Rules { + return Rules{ + "root": { + {`;.*$`, CommentSingle, nil}, + {`[,\s]+`, Text, nil}, + {`-?\d+\.\d+`, LiteralNumberFloat, nil}, + {`-?\d+`, LiteralNumberInteger, nil}, + {`0[0-7]+j?`, LiteralNumberOct, nil}, + {`0[xX][a-fA-F0-9]+`, LiteralNumberHex, nil}, + {`"(\\\\|\\"|[^"])*"`, LiteralString, nil}, + {`'(?!#)[\w!$%*+<=>?/.#-]+`, LiteralStringSymbol, nil}, + {`\\(.|[a-z]+)`, LiteralStringChar, nil}, + {`^(\s*)([rRuU]{,2}"""(?:.|\n)*?""")`, ByGroups(Text, LiteralStringDoc), nil}, + {`^(\s*)([rRuU]{,2}'''(?:.|\n)*?''')`, ByGroups(Text, LiteralStringDoc), nil}, + {`::?(?!#)[\w!$%*+<=>?/.#-]+`, LiteralStringSymbol, nil}, + {"~@|[`\\'#^~&@]", Operator, nil}, + Include("py-keywords"), + Include("py-builtins"), + {Words(``, ` `, `cond`, `for`, `->`, `->>`, `car`, `cdr`, `first`, `rest`, `let`, `when`, `unless`, `import`, `do`, `progn`, `get`, `slice`, `assoc`, `with-decorator`, `,`, `list_comp`, `kwapply`, `~`, `is`, `in`, `is-not`, `not-in`, `quasiquote`, `unquote`, `unquote-splice`, `quote`, `|`, `<<=`, `>>=`, `foreach`, `while`, `eval-and-compile`, `eval-when-compile`), Keyword, nil}, + {Words(``, ` `, `def`, `defn`, `defun`, `defmacro`, `defclass`, `lambda`, `fn`, `setv`), KeywordDeclaration, nil}, + {Words(``, ` `, `cycle`, `dec`, `distinct`, `drop`, `even?`, `filter`, `inc`, `instance?`, `iterable?`, `iterate`, `iterator?`, `neg?`, `none?`, `nth`, `numeric?`, `odd?`, `pos?`, `remove`, `repeat`, `repeatedly`, `take`, `take_nth`, `take_while`, `zero?`), NameBuiltin, nil}, + {`(?<=\()(?!#)[\w!$%*+<=>?/.#-]+`, NameFunction, nil}, + {`(?!#)[\w!$%*+<=>?/.#-]+`, NameVariable, nil}, + {`(\[|\])`, Punctuation, nil}, + {`(\{|\})`, Punctuation, nil}, + {`(\(|\))`, Punctuation, nil}, + }, + "py-keywords": { + {Words(``, `\b`, `assert`, `break`, `continue`, `del`, `elif`, `else`, `except`, `exec`, `finally`, `for`, `global`, `if`, `lambda`, `pass`, `print`, `raise`, `return`, `try`, `while`, `yield`, `yield from`, `as`, `with`), Keyword, nil}, + }, + "py-builtins": { + {Words(`(??@^|_~:\\]).*?)$`, ByGroups(Text, CommentSingle), nil}, + {`(\s*)(\|{3}.*?)$`, ByGroups(Text, CommentSingle), nil}, + {`(\s*)(\{-)`, ByGroups(Text, CommentMultiline), Push("comment")}, + {`^(\s*)([^\s(){}]+)(\s*)(:)(\s*)`, ByGroups(Text, NameFunction, Text, OperatorWord, Text), nil}, + {`\b(case|class|data|default|using|do|else|if|in|infix[lr]?|instance|rewrite|auto|namespace|codata|mutual|private|public|abstract|total|partial|let|proof|of|then|static|where|_|with|pattern|term|syntax|prefix|postulate|parameters|record|dsl|impossible|implicit|tactics|intros|intro|compute|refine|exact|trivial)(?!\')\b`, KeywordReserved, nil}, + {`(import|module)(\s+)`, ByGroups(KeywordReserved, Text), Push("module")}, + {`('')?[A-Z][\w\']*`, KeywordType, nil}, + {`[a-z][\w\']*`, Text, nil}, + {`(<-|::|->|=>|=)`, OperatorWord, nil}, + {`([(){}\[\]:!#$%&*+.\\/<=>?@^|~-]+)`, OperatorWord, nil}, + {`\d+[eE][+-]?\d+`, LiteralNumberFloat, nil}, + {`\d+\.\d+([eE][+-]?\d+)?`, LiteralNumberFloat, nil}, + {`0[xX][\da-fA-F]+`, LiteralNumberHex, nil}, + {`\d+`, LiteralNumberInteger, nil}, + {`'`, LiteralStringChar, Push("character")}, + {`"`, LiteralString, Push("string")}, + {`[^\s(){}]+`, Text, nil}, + {`\s+?`, Text, nil}, + }, + "module": { + {`\s+`, Text, nil}, + {`([A-Z][\w.]*)(\s+)(\()`, ByGroups(NameNamespace, Text, Punctuation), Push("funclist")}, + {`[A-Z][\w.]*`, NameNamespace, Pop(1)}, + }, + "funclist": { + {`\s+`, Text, nil}, + {`[A-Z]\w*`, KeywordType, nil}, + {`(_[\w\']+|[a-z][\w\']*)`, NameFunction, nil}, + {`--.*$`, CommentSingle, nil}, + {`\{-`, CommentMultiline, Push("comment")}, + {`,`, Punctuation, nil}, + {`[:!#$%&*+.\\/<=>?@^|~-]+`, Operator, nil}, + {`\(`, Punctuation, Push("funclist", "funclist")}, + {`\)`, Punctuation, Pop(2)}, + }, + "comment": { + {`[^-{}]+`, CommentMultiline, nil}, + {`\{-`, CommentMultiline, Push()}, + {`-\}`, CommentMultiline, Pop(1)}, + {`[-{}]`, CommentMultiline, nil}, + }, + "character": { + {`[^\\']`, LiteralStringChar, nil}, + {`\\`, LiteralStringEscape, Push("escape")}, + {`'`, LiteralStringChar, Pop(1)}, + }, + "string": { + {`[^\\"]+`, LiteralString, nil}, + {`\\`, LiteralStringEscape, Push("escape")}, + {`"`, LiteralString, Pop(1)}, + }, + "escape": { + {`[abfnrtv"\'&\\]`, LiteralStringEscape, Pop(1)}, + {`\^[][A-Z@^_]`, LiteralStringEscape, Pop(1)}, + {`NUL|SOH|[SE]TX|EOT|ENQ|ACK|BEL|BS|HT|LF|VT|FF|CR|S[OI]|DLE|DC[1-4]|NAK|SYN|ETB|CAN|EM|SUB|ESC|[FGRU]S|SP|DEL`, LiteralStringEscape, Pop(1)}, + {`o[0-7]+`, LiteralStringEscape, Pop(1)}, + {`x[\da-fA-F]+`, LiteralStringEscape, Pop(1)}, + {`\d+`, LiteralStringEscape, Pop(1)}, + {`\s+\\`, LiteralStringEscape, Pop(1)}, + }, + } +} diff --git a/vendor/github.com/alecthomas/chroma/lexers/i/igor.go b/vendor/github.com/alecthomas/chroma/lexers/i/igor.go new file mode 100644 index 000000000..bbb1d7230 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/i/igor.go @@ -0,0 +1,36 @@ +package i + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// Igor lexer. +var Igor = internal.Register(MustNewLazyLexer( + &Config{ + Name: "Igor", + Aliases: []string{"igor", "igorpro"}, + Filenames: []string{"*.ipf"}, + MimeTypes: []string{"text/ipf"}, + CaseInsensitive: true, + }, + igorRules, +)) + +func igorRules() Rules { + return Rules{ + "root": { + {`//.*$`, CommentSingle, nil}, + {`"([^"\\]|\\.)*"`, LiteralString, nil}, + {Words(`\b`, `\b`, `if`, `else`, `elseif`, `endif`, `for`, `endfor`, `strswitch`, `switch`, `case`, `default`, `endswitch`, `do`, `while`, `try`, `catch`, `endtry`, `break`, `continue`, `return`, `AbortOnRTE`, `AbortOnValue`), Keyword, nil}, + {Words(`\b`, `\b`, `variable`, `string`, `constant`, `strconstant`, `NVAR`, `SVAR`, `WAVE`, `STRUCT`, `dfref`, `funcref`, `char`, `uchar`, `int16`, `uint16`, `int32`, `uint32`, `int64`, `uint64`, `float`, `double`), KeywordType, nil}, + {Words(`\b`, `\b`, `override`, `ThreadSafe`, `MultiThread`, `static`, `Proc`, `Picture`, `Prompt`, `DoPrompt`, `macro`, `window`, `function`, `end`, `Structure`, `EndStructure`, `EndMacro`, `Menu`, `SubMenu`), KeywordReserved, nil}, + {Words(`\b`, `\b`, `Abort`, `AddFIFOData`, `AddFIFOVectData`, `AddMovieAudio`, `AddMovieFrame`, `AddWavesToBoxPlot`, `AddWavesToViolinPlot`, `AdoptFiles`, `APMath`, `Append`, `AppendBoxPlot`, `AppendImage`, `AppendLayoutObject`, `AppendMatrixContour`, `AppendText`, `AppendToGizmo`, `AppendToGraph`, `AppendToLayout`, `AppendToTable`, `AppendViolinPlot`, `AppendXYZContour`, `AutoPositionWindow`, `AxonTelegraphFindServers`, `BackgroundInfo`, `Beep`, `BoundingBall`, `BoxSmooth`, `BrowseURL`, `BuildMenu`, `Button`, `cd`, `Chart`, `CheckBox`, `CheckDisplayed`, `ChooseColor`, `Close`, `CloseHelp`, `CloseMovie`, `CloseProc`, `ColorScale`, `ColorTab2Wave`, `Concatenate`, `ControlBar`, `ControlInfo`, `ControlUpdate`, `ConvertGlobalStringTextEncoding`, `ConvexHull`, `Convolve`, `CopyDimLabels`, `CopyFile`, `CopyFolder`, `CopyScales`, `Correlate`, `CreateAliasShortcut`, `CreateBrowser`, `Cross`, `CtrlBackground`, `CtrlFIFO`, `CtrlNamedBackground`, `Cursor`, `CurveFit`, `CustomControl`, `CWT`, `DAQmx_AI_SetupReader`, `DAQmx_AO_SetOutputs`, `DAQmx_CTR_CountEdges`, `DAQmx_CTR_OutputPulse`, `DAQmx_CTR_Period`, `DAQmx_CTR_PulseWidth`, `DAQmx_DIO_Config`, `DAQmx_DIO_WriteNewData`, `DAQmx_Scan`, `DAQmx_WaveformGen`, `Debugger`, `DebuggerOptions`, `DefaultFont`, `DefaultGuiControls`, `DefaultGuiFont`, `DefaultTextEncoding`, `DefineGuide`, `DelayUpdate`, `DeleteAnnotations`, `DeleteFile`, `DeleteFolder`, `DeletePoints`, `Differentiate`, `dir`, `Display`, `DisplayHelpTopic`, `DisplayProcedure`, `DoAlert`, `DoIgorMenu`, `DoUpdate`, `DoWindow`, `DoXOPIdle`, `DPSS`, `DrawAction`, `DrawArc`, `DrawBezier`, `DrawLine`, `DrawOval`, `DrawPICT`, `DrawPoly`, `DrawRect`, `DrawRRect`, `DrawText`, `DrawUserShape`, `DSPDetrend`, `DSPPeriodogram`, `Duplicate`, `DuplicateDataFolder`, `DWT`, `EdgeStats`, `Edit`, `ErrorBars`, `EstimatePeakSizes`, `Execute`, `ExecuteScriptText`, `ExperimentInfo`, `ExperimentModified`, `ExportGizmo`, `Extract`, `FastGaussTransform`, `FastOp`, `FBinRead`, `FBinWrite`, `FFT`, `FGetPos`, `FIFOStatus`, `FIFO2Wave`, `FilterFIR`, `FilterIIR`, `FindAPeak`, `FindContour`, `FindDuplicates`, `FindLevel`, `FindLevels`, `FindPeak`, `FindPointsInPoly`, `FindRoots`, `FindSequence`, `FindValue`, `FMaxFlat`, `FPClustering`, `fprintf`, `FReadLine`, `FSetPos`, `FStatus`, `FTPCreateDirectory`, `FTPDelete`, `FTPDownload`, `FTPUpload`, `FuncFit`, `FuncFitMD`, `GBLoadWave`, `GetAxis`, `GetCamera`, `GetFileFolderInfo`, `GetGizmo`, `GetLastUserMenuInfo`, `GetMarquee`, `GetMouse`, `GetSelection`, `GetWindow`, `GISCreateVectorLayer`, `GISGetRasterInfo`, `GISGetRegisteredFileInfo`, `GISGetVectorLayerInfo`, `GISLoadRasterData`, `GISLoadVectorData`, `GISRasterizeVectorData`, `GISRegisterFile`, `GISTransformCoords`, `GISUnRegisterFile`, `GISWriteFieldData`, `GISWriteGeometryData`, `GISWriteRaster`, `GPIBReadBinaryWave2`, `GPIBReadBinary2`, `GPIBReadWave2`, `GPIBRead2`, `GPIBWriteBinaryWave2`, `GPIBWriteBinary2`, `GPIBWriteWave2`, `GPIBWrite2`, `GPIB2`, `GraphNormal`, `GraphWaveDraw`, `GraphWaveEdit`, `Grep`, `GroupBox`, `Hanning`, `HDFInfo`, `HDFReadImage`, `HDFReadSDS`, `HDFReadVset`, `HDF5CloseFile`, `HDF5CloseGroup`, `HDF5ConvertColors`, `HDF5CreateFile`, `HDF5CreateGroup`, `HDF5CreateLink`, `HDF5Dump`, `HDF5DumpErrors`, `HDF5DumpState`, `HDF5FlushFile`, `HDF5ListAttributes`, `HDF5ListGroup`, `HDF5LoadData`, `HDF5LoadGroup`, `HDF5LoadImage`, `HDF5OpenFile`, `HDF5OpenGroup`, `HDF5SaveData`, `HDF5SaveGroup`, `HDF5SaveImage`, `HDF5TestOperation`, `HDF5UnlinkObject`, `HideIgorMenus`, `HideInfo`, `HideProcedures`, `HideTools`, `HilbertTransform`, `Histogram`, `ICA`, `IFFT`, `ImageAnalyzeParticles`, `ImageBlend`, `ImageBoundaryToMask`, `ImageComposite`, `ImageEdgeDetection`, `ImageFileInfo`, `ImageFilter`, `ImageFocus`, `ImageFromXYZ`, `ImageGenerateROIMask`, `ImageGLCM`, `ImageHistModification`, `ImageHistogram`, `ImageInterpolate`, `ImageLineProfile`, `ImageLoad`, `ImageMorphology`, `ImageRegistration`, `ImageRemoveBackground`, `ImageRestore`, `ImageRotate`, `ImageSave`, `ImageSeedFill`, `ImageSkeleton3d`, `ImageSnake`, `ImageStats`, `ImageThreshold`, `ImageTransform`, `ImageUnwrapPhase`, `ImageWindow`, `IndexSort`, `InsertPoints`, `Integrate`, `IntegrateODE`, `Integrate2D`, `Interpolate2`, `Interpolate3D`, `Interp3DPath`, `ITCCloseAll2`, `ITCCloseDevice2`, `ITCConfigAllChannels2`, `ITCConfigChannelReset2`, `ITCConfigChannelUpload2`, `ITCConfigChannel2`, `ITCFIFOAvailableAll2`, `ITCFIFOAvailable2`, `ITCGetAllChannelsConfig2`, `ITCGetChannelConfig2`, `ITCGetCurrentDevice2`, `ITCGetDeviceInfo2`, `ITCGetDevices2`, `ITCGetErrorString2`, `ITCGetSerialNumber2`, `ITCGetState2`, `ITCGetVersions2`, `ITCInitialize2`, `ITCOpenDevice2`, `ITCReadADC2`, `ITCReadDigital2`, `ITCReadTimer2`, `ITCSelectDevice2`, `ITCSetDAC2`, `ITCSetGlobals2`, `ITCSetModes2`, `ITCSetState2`, `ITCStartAcq2`, `ITCStopAcq2`, `ITCUpdateFIFOPositionAll2`, `ITCUpdateFIFOPosition2`, `ITCWriteDigital2`, `JCAMPLoadWave`, `JointHistogram`, `KillBackground`, `KillControl`, `KillDataFolder`, `KillFIFO`, `KillFreeAxis`, `KillPath`, `KillPICTs`, `KillStrings`, `KillVariables`, `KillWaves`, `KillWindow`, `KMeans`, `Label`, `Layout`, `LayoutPageAction`, `LayoutSlideShow`, `Legend`, `LinearFeedbackShiftRegister`, `ListBox`, `LoadData`, `LoadPackagePreferences`, `LoadPICT`, `LoadWave`, `Loess`, `LombPeriodogram`, `Make`, `MakeIndex`, `MarkPerfTestTime`, `MatrixConvolve`, `MatrixCorr`, `MatrixEigenV`, `MatrixFilter`, `MatrixGaussJ`, `MatrixGLM`, `MatrixInverse`, `MatrixLinearSolve`, `MatrixLinearSolveTD`, `MatrixLLS`, `MatrixLUBkSub`, `MatrixLUD`, `MatrixLUDTD`, `MatrixMultiply`, `MatrixOP`, `MatrixSchur`, `MatrixSolve`, `MatrixSVBkSub`, `MatrixSVD`, `MatrixTranspose`, `MCC_FindServers`, `MeasureStyledText`, `MFR_CheckForNewBricklets`, `MFR_CloseResultFile`, `MFR_CreateOverviewTable`, `MFR_GetBrickletCount`, `MFR_GetBrickletData`, `MFR_GetBrickletDeployData`, `MFR_GetBrickletMetaData`, `MFR_GetBrickletRawData`, `MFR_GetReportTemplate`, `MFR_GetResultFileMetaData`, `MFR_GetResultFileName`, `MFR_GetVernissageVersion`, `MFR_GetVersion`, `MFR_GetXOPErrorMessage`, `MFR_OpenResultFile`, `MLLoadWave`, `Modify`, `ModifyBoxPlot`, `ModifyBrowser`, `ModifyCamera`, `ModifyContour`, `ModifyControl`, `ModifyControlList`, `ModifyFreeAxis`, `ModifyGizmo`, `ModifyGraph`, `ModifyImage`, `ModifyLayout`, `ModifyPanel`, `ModifyTable`, `ModifyViolinPlot`, `ModifyWaterfall`, `MoveDataFolder`, `MoveFile`, `MoveFolder`, `MoveString`, `MoveSubwindow`, `MoveVariable`, `MoveWave`, `MoveWindow`, `MultiTaperPSD`, `MultiThreadingControl`, `NC_CloseFile`, `NC_DumpErrors`, `NC_Inquire`, `NC_ListAttributes`, `NC_ListObjects`, `NC_LoadData`, `NC_OpenFile`, `NeuralNetworkRun`, `NeuralNetworkTrain`, `NewCamera`, `NewDataFolder`, `NewFIFO`, `NewFIFOChan`, `NewFreeAxis`, `NewGizmo`, `NewImage`, `NewLayout`, `NewMovie`, `NewNotebook`, `NewPanel`, `NewPath`, `NewWaterfall`, `NILoadWave`, `NI4882`, `Note`, `Notebook`, `NotebookAction`, `Open`, `OpenHelp`, `OpenNotebook`, `Optimize`, `ParseOperationTemplate`, `PathInfo`, `PauseForUser`, `PauseUpdate`, `PCA`, `PlayMovie`, `PlayMovieAction`, `PlaySound`, `PopupContextualMenu`, `PopupMenu`, `Preferences`, `PrimeFactors`, `Print`, `printf`, `PrintGraphs`, `PrintLayout`, `PrintNotebook`, `PrintSettings`, `PrintTable`, `Project`, `PulseStats`, `PutScrapText`, `pwd`, `Quit`, `RatioFromNumber`, `Redimension`, `Remez`, `Remove`, `RemoveContour`, `RemoveFromGizmo`, `RemoveFromGraph`, `RemoveFromLayout`, `RemoveFromTable`, `RemoveImage`, `RemoveLayoutObjects`, `RemovePath`, `Rename`, `RenameDataFolder`, `RenamePath`, `RenamePICT`, `RenameWindow`, `ReorderImages`, `ReorderTraces`, `ReplaceText`, `ReplaceWave`, `Resample`, `ResumeUpdate`, `Reverse`, `Rotate`, `Save`, `SaveData`, `SaveExperiment`, `SaveGizmoCopy`, `SaveGraphCopy`, `SaveNotebook`, `SavePackagePreferences`, `SavePICT`, `SaveTableCopy`, `SetActiveSubwindow`, `SetAxis`, `SetBackground`, `SetDashPattern`, `SetDataFolder`, `SetDimLabel`, `SetDrawEnv`, `SetDrawLayer`, `SetFileFolderInfo`, `SetFormula`, `SetIdlePeriod`, `SetIgorHook`, `SetIgorMenuMode`, `SetIgorOption`, `SetMarquee`, `SetProcessSleep`, `SetRandomSeed`, `SetScale`, `SetVariable`, `SetWaveLock`, `SetWaveTextEncoding`, `SetWindow`, `ShowIgorMenus`, `ShowInfo`, `ShowTools`, `Silent`, `Sleep`, `Slider`, `Smooth`, `SmoothCustom`, `Sort`, `SortColumns`, `SoundInRecord`, `SoundInSet`, `SoundInStartChart`, `SoundInStatus`, `SoundInStopChart`, `SoundLoadWave`, `SoundSaveWave`, `SphericalInterpolate`, `SphericalTriangulate`, `SplitString`, `SplitWave`, `sprintf`, `SQLHighLevelOp`, `sscanf`, `Stack`, `StackWindows`, `StatsAngularDistanceTest`, `StatsANOVA1Test`, `StatsANOVA2NRTest`, `StatsANOVA2RMTest`, `StatsANOVA2Test`, `StatsChiTest`, `StatsCircularCorrelationTest`, `StatsCircularMeans`, `StatsCircularMoments`, `StatsCircularTwoSampleTest`, `StatsCochranTest`, `StatsContingencyTable`, `StatsDIPTest`, `StatsDunnettTest`, `StatsFriedmanTest`, `StatsFTest`, `StatsHodgesAjneTest`, `StatsJBTest`, `StatsKDE`, `StatsKendallTauTest`, `StatsKSTest`, `StatsKWTest`, `StatsLinearCorrelationTest`, `StatsLinearRegression`, `StatsMultiCorrelationTest`, `StatsNPMCTest`, `StatsNPNominalSRTest`, `StatsQuantiles`, `StatsRankCorrelationTest`, `StatsResample`, `StatsSample`, `StatsScheffeTest`, `StatsShapiroWilkTest`, `StatsSignTest`, `StatsSRTest`, `StatsTTest`, `StatsTukeyTest`, `StatsVariancesTest`, `StatsWatsonUSquaredTest`, `StatsWatsonWilliamsTest`, `StatsWheelerWatsonTest`, `StatsWilcoxonRankTest`, `StatsWRCorrelationTest`, `STFT`, `String`, `StructFill`, `StructGet`, `StructPut`, `SumDimension`, `SumSeries`, `TabControl`, `Tag`, `TDMLoadData`, `TDMSaveData`, `TextBox`, `ThreadGroupPutDF`, `ThreadStart`, `TickWavesFromAxis`, `Tile`, `TileWindows`, `TitleBox`, `ToCommandLine`, `ToolsGrid`, `Triangulate3d`, `Unwrap`, `URLRequest`, `ValDisplay`, `Variable`, `VDTClosePort2`, `VDTGetPortList2`, `VDTGetStatus2`, `VDTOpenPort2`, `VDTOperationsPort2`, `VDTReadBinaryWave2`, `VDTReadBinary2`, `VDTReadHexWave2`, `VDTReadHex2`, `VDTReadWave2`, `VDTRead2`, `VDTTerminalPort2`, `VDTWriteBinaryWave2`, `VDTWriteBinary2`, `VDTWriteHexWave2`, `VDTWriteHex2`, `VDTWriteWave2`, `VDTWrite2`, `VDT2`, `VISAControl`, `VISARead`, `VISAReadBinary`, `VISAReadBinaryWave`, `VISAReadWave`, `VISAWrite`, `VISAWriteBinary`, `VISAWriteBinaryWave`, `VISAWriteWave`, `WaveMeanStdv`, `WaveStats`, `WaveTransform`, `wfprintf`, `WignerTransform`, `WindowFunction`, `XLLoadWave`), NameClass, nil}, + {Words(`\b`, `\b`, `abs`, `acos`, `acosh`, `AddListItem`, `AiryA`, `AiryAD`, `AiryB`, `AiryBD`, `alog`, `AnnotationInfo`, `AnnotationList`, `area`, `areaXY`, `asin`, `asinh`, `atan`, `atanh`, `atan2`, `AxisInfo`, `AxisList`, `AxisValFromPixel`, `AxonTelegraphAGetDataNum`, `AxonTelegraphAGetDataString`, `AxonTelegraphAGetDataStruct`, `AxonTelegraphGetDataNum`, `AxonTelegraphGetDataString`, `AxonTelegraphGetDataStruct`, `AxonTelegraphGetTimeoutMs`, `AxonTelegraphSetTimeoutMs`, `Base64Decode`, `Base64Encode`, `Besseli`, `Besselj`, `Besselk`, `Bessely`, `beta`, `betai`, `BinarySearch`, `BinarySearchInterp`, `binomial`, `binomialln`, `binomialNoise`, `cabs`, `CaptureHistory`, `CaptureHistoryStart`, `ceil`, `cequal`, `char2num`, `chebyshev`, `chebyshevU`, `CheckName`, `ChildWindowList`, `CleanupName`, `cmplx`, `cmpstr`, `conj`, `ContourInfo`, `ContourNameList`, `ContourNameToWaveRef`, `ContourZ`, `ControlNameList`, `ConvertTextEncoding`, `cos`, `cosh`, `cosIntegral`, `cot`, `coth`, `CountObjects`, `CountObjectsDFR`, `cpowi`, `CreationDate`, `csc`, `csch`, `CsrInfo`, `CsrWave`, `CsrWaveRef`, `CsrXWave`, `CsrXWaveRef`, `CTabList`, `DataFolderDir`, `DataFolderExists`, `DataFolderRefsEqual`, `DataFolderRefStatus`, `date`, `datetime`, `DateToJulian`, `date2secs`, `Dawson`, `defined`, `deltax`, `digamma`, `dilogarithm`, `DimDelta`, `DimOffset`, `DimSize`, `ei`, `enoise`, `equalWaves`, `erf`, `erfc`, `erfcw`, `exists`, `exp`, `expInt`, `expIntegralE1`, `expNoise`, `factorial`, `Faddeeva`, `fakedata`, `faverage`, `faverageXY`, `fDAQmx_AI_GetReader`, `fDAQmx_AO_UpdateOutputs`, `fDAQmx_ConnectTerminals`, `fDAQmx_CTR_Finished`, `fDAQmx_CTR_IsFinished`, `fDAQmx_CTR_IsPulseFinished`, `fDAQmx_CTR_ReadCounter`, `fDAQmx_CTR_ReadWithOptions`, `fDAQmx_CTR_SetPulseFrequency`, `fDAQmx_CTR_Start`, `fDAQmx_DeviceNames`, `fDAQmx_DIO_Finished`, `fDAQmx_DIO_PortWidth`, `fDAQmx_DIO_Read`, `fDAQmx_DIO_Write`, `fDAQmx_DisconnectTerminals`, `fDAQmx_ErrorString`, `fDAQmx_ExternalCalDate`, `fDAQmx_NumAnalogInputs`, `fDAQmx_NumAnalogOutputs`, `fDAQmx_NumCounters`, `fDAQmx_NumDIOPorts`, `fDAQmx_ReadChan`, `fDAQmx_ReadNamedChan`, `fDAQmx_ResetDevice`, `fDAQmx_ScanGetAvailable`, `fDAQmx_ScanGetNextIndex`, `fDAQmx_ScanStart`, `fDAQmx_ScanStop`, `fDAQmx_ScanWait`, `fDAQmx_ScanWaitWithTimeout`, `fDAQmx_SelfCalDate`, `fDAQmx_SelfCalibration`, `fDAQmx_WaveformStart`, `fDAQmx_WaveformStop`, `fDAQmx_WF_IsFinished`, `fDAQmx_WF_WaitUntilFinished`, `fDAQmx_WriteChan`, `FetchURL`, `FindDimLabel`, `FindListItem`, `floor`, `FontList`, `FontSizeHeight`, `FontSizeStringWidth`, `FresnelCos`, `FresnelSin`, `FuncRefInfo`, `FunctionInfo`, `FunctionList`, `FunctionPath`, `gamma`, `gammaEuler`, `gammaInc`, `gammaNoise`, `gammln`, `gammp`, `gammq`, `Gauss`, `Gauss1D`, `Gauss2D`, `gcd`, `GetBrowserLine`, `GetBrowserSelection`, `GetDataFolder`, `GetDataFolderDFR`, `GetDefaultFont`, `GetDefaultFontSize`, `GetDefaultFontStyle`, `GetDimLabel`, `GetEnvironmentVariable`, `GetErrMessage`, `GetFormula`, `GetIndependentModuleName`, `GetIndexedObjName`, `GetIndexedObjNameDFR`, `GetKeyState`, `GetRTErrMessage`, `GetRTError`, `GetRTLocation`, `GetRTLocInfo`, `GetRTStackInfo`, `GetScrapText`, `GetUserData`, `GetWavesDataFolder`, `GetWavesDataFolderDFR`, `GISGetAllFileFormats`, `GISSRefsAreEqual`, `GizmoInfo`, `GizmoScale`, `gnoise`, `GrepList`, `GrepString`, `GuideInfo`, `GuideNameList`, `Hash`, `hcsr`, `HDF5AttributeInfo`, `HDF5DatasetInfo`, `HDF5LibraryInfo`, `HDF5TypeInfo`, `hermite`, `hermiteGauss`, `HyperGNoise`, `HyperGPFQ`, `HyperG0F1`, `HyperG1F1`, `HyperG2F1`, `IgorInfo`, `IgorVersion`, `imag`, `ImageInfo`, `ImageNameList`, `ImageNameToWaveRef`, `IndependentModuleList`, `IndexedDir`, `IndexedFile`, `IndexToScale`, `Inf`, `Integrate1D`, `interp`, `Interp2D`, `Interp3D`, `inverseERF`, `inverseERFC`, `ItemsInList`, `JacobiCn`, `JacobiSn`, `JulianToDate`, `Laguerre`, `LaguerreA`, `LaguerreGauss`, `LambertW`, `LayoutInfo`, `leftx`, `LegendreA`, `limit`, `ListMatch`, `ListToTextWave`, `ListToWaveRefWave`, `ln`, `log`, `logNormalNoise`, `lorentzianNoise`, `LowerStr`, `MacroList`, `magsqr`, `MandelbrotPoint`, `MarcumQ`, `MatrixCondition`, `MatrixDet`, `MatrixDot`, `MatrixRank`, `MatrixTrace`, `max`, `MCC_AutoBridgeBal`, `MCC_AutoFastComp`, `MCC_AutoPipetteOffset`, `MCC_AutoSlowComp`, `MCC_AutoWholeCellComp`, `MCC_GetBridgeBalEnable`, `MCC_GetBridgeBalResist`, `MCC_GetFastCompCap`, `MCC_GetFastCompTau`, `MCC_GetHolding`, `MCC_GetHoldingEnable`, `MCC_GetMode`, `MCC_GetNeutralizationCap`, `MCC_GetNeutralizationEnable`, `MCC_GetOscKillerEnable`, `MCC_GetPipetteOffset`, `MCC_GetPrimarySignalGain`, `MCC_GetPrimarySignalHPF`, `MCC_GetPrimarySignalLPF`, `MCC_GetRsCompBandwidth`, `MCC_GetRsCompCorrection`, `MCC_GetRsCompEnable`, `MCC_GetRsCompPrediction`, `MCC_GetSecondarySignalGain`, `MCC_GetSecondarySignalLPF`, `MCC_GetSlowCompCap`, `MCC_GetSlowCompTau`, `MCC_GetSlowCompTauX20Enable`, `MCC_GetSlowCurrentInjEnable`, `MCC_GetSlowCurrentInjLevel`, `MCC_GetSlowCurrentInjSetlTime`, `MCC_GetWholeCellCompCap`, `MCC_GetWholeCellCompEnable`, `MCC_GetWholeCellCompResist`, `MCC_SelectMultiClamp700B`, `MCC_SetBridgeBalEnable`, `MCC_SetBridgeBalResist`, `MCC_SetFastCompCap`, `MCC_SetFastCompTau`, `MCC_SetHolding`, `MCC_SetHoldingEnable`, `MCC_SetMode`, `MCC_SetNeutralizationCap`, `MCC_SetNeutralizationEnable`, `MCC_SetOscKillerEnable`, `MCC_SetPipetteOffset`, `MCC_SetPrimarySignalGain`, `MCC_SetPrimarySignalHPF`, `MCC_SetPrimarySignalLPF`, `MCC_SetRsCompBandwidth`, `MCC_SetRsCompCorrection`, `MCC_SetRsCompEnable`, `MCC_SetRsCompPrediction`, `MCC_SetSecondarySignalGain`, `MCC_SetSecondarySignalLPF`, `MCC_SetSlowCompCap`, `MCC_SetSlowCompTau`, `MCC_SetSlowCompTauX20Enable`, `MCC_SetSlowCurrentInjEnable`, `MCC_SetSlowCurrentInjLevel`, `MCC_SetSlowCurrentInjSetlTime`, `MCC_SetTimeoutMs`, `MCC_SetWholeCellCompCap`, `MCC_SetWholeCellCompEnable`, `MCC_SetWholeCellCompResist`, `mean`, `median`, `min`, `mod`, `ModDate`, `MPFXEMGPeak`, `MPFXExpConvExpPeak`, `MPFXGaussPeak`, `MPFXLorenzianPeak`, `MPFXVoigtPeak`, `NameOfWave`, `NaN`, `NewFreeDataFolder`, `NewFreeWave`, `norm`, `NormalizeUnicode`, `note`, `NumberByKey`, `numpnts`, `numtype`, `NumVarOrDefault`, `num2char`, `num2istr`, `num2str`, `NVAR_Exists`, `OperationList`, `PadString`, `PanelResolution`, `ParamIsDefault`, `ParseFilePath`, `PathList`, `pcsr`, `Pi`, `PICTInfo`, `PICTList`, `PixelFromAxisVal`, `pnt2x`, `poissonNoise`, `poly`, `PolygonArea`, `poly2D`, `PossiblyQuoteName`, `ProcedureText`, `p2rect`, `qcsr`, `real`, `RemoveByKey`, `RemoveEnding`, `RemoveFromList`, `RemoveListItem`, `ReplaceNumberByKey`, `ReplaceString`, `ReplaceStringByKey`, `rightx`, `round`, `r2polar`, `sawtooth`, `scaleToIndex`, `ScreenResolution`, `sec`, `sech`, `Secs2Date`, `Secs2Time`, `SelectNumber`, `SelectString`, `SetEnvironmentVariable`, `sign`, `sin`, `sinc`, `sinh`, `sinIntegral`, `SortList`, `SpecialCharacterInfo`, `SpecialCharacterList`, `SpecialDirPath`, `SphericalBessJ`, `SphericalBessJD`, `SphericalBessY`, `SphericalBessYD`, `SphericalHarmonics`, `SQLAllocHandle`, `SQLAllocStmt`, `SQLBinaryWavesToTextWave`, `SQLBindCol`, `SQLBindParameter`, `SQLBrowseConnect`, `SQLBulkOperations`, `SQLCancel`, `SQLCloseCursor`, `SQLColAttributeNum`, `SQLColAttributeStr`, `SQLColumnPrivileges`, `SQLColumns`, `SQLConnect`, `SQLDataSources`, `SQLDescribeCol`, `SQLDescribeParam`, `SQLDisconnect`, `SQLDriverConnect`, `SQLDrivers`, `SQLEndTran`, `SQLError`, `SQLExecDirect`, `SQLExecute`, `SQLFetch`, `SQLFetchScroll`, `SQLForeignKeys`, `SQLFreeConnect`, `SQLFreeEnv`, `SQLFreeHandle`, `SQLFreeStmt`, `SQLGetConnectAttrNum`, `SQLGetConnectAttrStr`, `SQLGetCursorName`, `SQLGetDataNum`, `SQLGetDataStr`, `SQLGetDescFieldNum`, `SQLGetDescFieldStr`, `SQLGetDescRec`, `SQLGetDiagFieldNum`, `SQLGetDiagFieldStr`, `SQLGetDiagRec`, `SQLGetEnvAttrNum`, `SQLGetEnvAttrStr`, `SQLGetFunctions`, `SQLGetInfoNum`, `SQLGetInfoStr`, `SQLGetStmtAttrNum`, `SQLGetStmtAttrStr`, `SQLGetTypeInfo`, `SQLMoreResults`, `SQLNativeSql`, `SQLNumParams`, `SQLNumResultCols`, `SQLNumResultRowsIfKnown`, `SQLNumRowsFetched`, `SQLParamData`, `SQLPrepare`, `SQLPrimaryKeys`, `SQLProcedureColumns`, `SQLProcedures`, `SQLPutData`, `SQLReinitialize`, `SQLRowCount`, `SQLSetConnectAttrNum`, `SQLSetConnectAttrStr`, `SQLSetCursorName`, `SQLSetDescFieldNum`, `SQLSetDescFieldStr`, `SQLSetDescRec`, `SQLSetEnvAttrNum`, `SQLSetEnvAttrStr`, `SQLSetPos`, `SQLSetStmtAttrNum`, `SQLSetStmtAttrStr`, `SQLSpecialColumns`, `SQLStatistics`, `SQLTablePrivileges`, `SQLTables`, `SQLTextWaveToBinaryWaves`, `SQLTextWaveTo2DBinaryWave`, `SQLUpdateBoundValues`, `SQLXOPCheckState`, `SQL2DBinaryWaveToTextWave`, `sqrt`, `StartMSTimer`, `StatsBetaCDF`, `StatsBetaPDF`, `StatsBinomialCDF`, `StatsBinomialPDF`, `StatsCauchyCDF`, `StatsCauchyPDF`, `StatsChiCDF`, `StatsChiPDF`, `StatsCMSSDCDF`, `StatsCorrelation`, `StatsDExpCDF`, `StatsDExpPDF`, `StatsErlangCDF`, `StatsErlangPDF`, `StatsErrorPDF`, `StatsEValueCDF`, `StatsEValuePDF`, `StatsExpCDF`, `StatsExpPDF`, `StatsFCDF`, `StatsFPDF`, `StatsFriedmanCDF`, `StatsGammaCDF`, `StatsGammaPDF`, `StatsGeometricCDF`, `StatsGeometricPDF`, `StatsGEVCDF`, `StatsGEVPDF`, `StatsHyperGCDF`, `StatsHyperGPDF`, `StatsInvBetaCDF`, `StatsInvBinomialCDF`, `StatsInvCauchyCDF`, `StatsInvChiCDF`, `StatsInvCMSSDCDF`, `StatsInvDExpCDF`, `StatsInvEValueCDF`, `StatsInvExpCDF`, `StatsInvFCDF`, `StatsInvFriedmanCDF`, `StatsInvGammaCDF`, `StatsInvGeometricCDF`, `StatsInvKuiperCDF`, `StatsInvLogisticCDF`, `StatsInvLogNormalCDF`, `StatsInvMaxwellCDF`, `StatsInvMooreCDF`, `StatsInvNBinomialCDF`, `StatsInvNCChiCDF`, `StatsInvNCFCDF`, `StatsInvNormalCDF`, `StatsInvParetoCDF`, `StatsInvPoissonCDF`, `StatsInvPowerCDF`, `StatsInvQCDF`, `StatsInvQpCDF`, `StatsInvRayleighCDF`, `StatsInvRectangularCDF`, `StatsInvSpearmanCDF`, `StatsInvStudentCDF`, `StatsInvTopDownCDF`, `StatsInvTriangularCDF`, `StatsInvUsquaredCDF`, `StatsInvVonMisesCDF`, `StatsInvWeibullCDF`, `StatsKuiperCDF`, `StatsLogisticCDF`, `StatsLogisticPDF`, `StatsLogNormalCDF`, `StatsLogNormalPDF`, `StatsMaxwellCDF`, `StatsMaxwellPDF`, `StatsMedian`, `StatsMooreCDF`, `StatsNBinomialCDF`, `StatsNBinomialPDF`, `StatsNCChiCDF`, `StatsNCChiPDF`, `StatsNCFCDF`, `StatsNCFPDF`, `StatsNCTCDF`, `StatsNCTPDF`, `StatsNormalCDF`, `StatsNormalPDF`, `StatsParetoCDF`, `StatsParetoPDF`, `StatsPermute`, `StatsPoissonCDF`, `StatsPoissonPDF`, `StatsPowerCDF`, `StatsPowerNoise`, `StatsPowerPDF`, `StatsQCDF`, `StatsQpCDF`, `StatsRayleighCDF`, `StatsRayleighPDF`, `StatsRectangularCDF`, `StatsRectangularPDF`, `StatsRunsCDF`, `StatsSpearmanRhoCDF`, `StatsStudentCDF`, `StatsStudentPDF`, `StatsTopDownCDF`, `StatsTriangularCDF`, `StatsTriangularPDF`, `StatsTrimmedMean`, `StatsUSquaredCDF`, `StatsVonMisesCDF`, `StatsVonMisesNoise`, `StatsVonMisesPDF`, `StatsWaldCDF`, `StatsWaldPDF`, `StatsWeibullCDF`, `StatsWeibullPDF`, `StopMSTimer`, `StringByKey`, `stringCRC`, `StringFromList`, `StringList`, `stringmatch`, `strlen`, `strsearch`, `StrVarOrDefault`, `str2num`, `StudentA`, `StudentT`, `sum`, `SVAR_Exists`, `TableInfo`, `TagVal`, `TagWaveRef`, `tan`, `tango_close_device`, `tango_command_inout`, `tango_compute_image_proj`, `tango_get_dev_attr_list`, `tango_get_dev_black_box`, `tango_get_dev_cmd_list`, `tango_get_dev_status`, `tango_get_dev_timeout`, `tango_get_error_stack`, `tango_open_device`, `tango_ping_device`, `tango_read_attribute`, `tango_read_attributes`, `tango_reload_dev_interface`, `tango_resume_attr_monitor`, `tango_set_attr_monitor_period`, `tango_set_dev_timeout`, `tango_start_attr_monitor`, `tango_stop_attr_monitor`, `tango_suspend_attr_monitor`, `tango_write_attribute`, `tango_write_attributes`, `tanh`, `TDMAddChannel`, `TDMAddGroup`, `TDMAppendDataValues`, `TDMAppendDataValuesTime`, `TDMChannelPropertyExists`, `TDMCloseChannel`, `TDMCloseFile`, `TDMCloseGroup`, `TDMCreateChannelProperty`, `TDMCreateFile`, `TDMCreateFileProperty`, `TDMCreateGroupProperty`, `TDMFilePropertyExists`, `TDMGetChannelPropertyNames`, `TDMGetChannelPropertyNum`, `TDMGetChannelPropertyStr`, `TDMGetChannelPropertyTime`, `TDMGetChannelPropertyType`, `TDMGetChannels`, `TDMGetChannelStringPropertyLen`, `TDMGetDataType`, `TDMGetDataValues`, `TDMGetDataValuesTime`, `TDMGetFilePropertyNames`, `TDMGetFilePropertyNum`, `TDMGetFilePropertyStr`, `TDMGetFilePropertyTime`, `TDMGetFilePropertyType`, `TDMGetFileStringPropertyLen`, `TDMGetGroupPropertyNames`, `TDMGetGroupPropertyNum`, `TDMGetGroupPropertyStr`, `TDMGetGroupPropertyTime`, `TDMGetGroupPropertyType`, `TDMGetGroups`, `TDMGetGroupStringPropertyLen`, `TDMGetLibraryErrorDescription`, `TDMGetNumChannelProperties`, `TDMGetNumChannels`, `TDMGetNumDataValues`, `TDMGetNumFileProperties`, `TDMGetNumGroupProperties`, `TDMGetNumGroups`, `TDMGroupPropertyExists`, `TDMOpenFile`, `TDMOpenFileEx`, `TDMRemoveChannel`, `TDMRemoveGroup`, `TDMReplaceDataValues`, `TDMReplaceDataValuesTime`, `TDMSaveFile`, `TDMSetChannelPropertyNum`, `TDMSetChannelPropertyStr`, `TDMSetChannelPropertyTime`, `TDMSetDataValues`, `TDMSetDataValuesTime`, `TDMSetFilePropertyNum`, `TDMSetFilePropertyStr`, `TDMSetFilePropertyTime`, `TDMSetGroupPropertyNum`, `TDMSetGroupPropertyStr`, `TDMSetGroupPropertyTime`, `TextEncodingCode`, `TextEncodingName`, `TextFile`, `ThreadGroupCreate`, `ThreadGroupGetDF`, `ThreadGroupGetDFR`, `ThreadGroupRelease`, `ThreadGroupWait`, `ThreadProcessorCount`, `ThreadReturnValue`, `ticks`, `time`, `TraceFromPixel`, `TraceInfo`, `TraceNameList`, `TraceNameToWaveRef`, `TrimString`, `trunc`, `UniqueName`, `UnPadString`, `UnsetEnvironmentVariable`, `UpperStr`, `URLDecode`, `URLEncode`, `VariableList`, `Variance`, `vcsr`, `viAssertIntrSignal`, `viAssertTrigger`, `viAssertUtilSignal`, `viClear`, `viClose`, `viDisableEvent`, `viDiscardEvents`, `viEnableEvent`, `viFindNext`, `viFindRsrc`, `viGetAttribute`, `viGetAttributeString`, `viGpibCommand`, `viGpibControlATN`, `viGpibControlREN`, `viGpibPassControl`, `viGpibSendIFC`, `viIn8`, `viIn16`, `viIn32`, `viLock`, `viMapAddress`, `viMapTrigger`, `viMemAlloc`, `viMemFree`, `viMoveIn8`, `viMoveIn16`, `viMoveIn32`, `viMoveOut8`, `viMoveOut16`, `viMoveOut32`, `viOpen`, `viOpenDefaultRM`, `viOut8`, `viOut16`, `viOut32`, `viPeek8`, `viPeek16`, `viPeek32`, `viPoke8`, `viPoke16`, `viPoke32`, `viRead`, `viReadSTB`, `viSetAttribute`, `viSetAttributeString`, `viStatusDesc`, `viTerminate`, `viUnlock`, `viUnmapAddress`, `viUnmapTrigger`, `viUsbControlIn`, `viUsbControlOut`, `viVxiCommandQuery`, `viWaitOnEvent`, `viWrite`, `VoigtFunc`, `VoigtPeak`, `WaveCRC`, `WaveDims`, `WaveExists`, `WaveHash`, `WaveInfo`, `WaveList`, `WaveMax`, `WaveMin`, `WaveName`, `WaveRefIndexed`, `WaveRefIndexedDFR`, `WaveRefsEqual`, `WaveRefWaveToList`, `WaveTextEncoding`, `WaveType`, `WaveUnits`, `WhichListItem`, `WinList`, `WinName`, `WinRecreation`, `WinType`, `wnoise`, `xcsr`, `XWaveName`, `XWaveRefFromTrace`, `x2pnt`, `zcsr`, `ZernikeR`, `zeromq_client_connect`, `zeromq_client_connect`, `zeromq_client_recv`, `zeromq_client_recv`, `zeromq_client_send`, `zeromq_client_send`, `zeromq_handler_start`, `zeromq_handler_start`, `zeromq_handler_stop`, `zeromq_handler_stop`, `zeromq_server_bind`, `zeromq_server_bind`, `zeromq_server_recv`, `zeromq_server_recv`, `zeromq_server_send`, `zeromq_server_send`, `zeromq_set`, `zeromq_set`, `zeromq_stop`, `zeromq_stop`, `zeromq_test_callfunction`, `zeromq_test_callfunction`, `zeromq_test_serializeWave`, `zeromq_test_serializeWave`, `zeta`), NameFunction, nil}, + {`^#(include|pragma|define|undef|ifdef|ifndef|if|elif|else|endif)`, NameDecorator, nil}, + {`[^a-z"/]+$`, Text, nil}, + {`.`, Text, nil}, + {`\n|\r`, Text, nil}, + }, + } +} diff --git a/vendor/github.com/alecthomas/chroma/lexers/i/ini.go b/vendor/github.com/alecthomas/chroma/lexers/i/ini.go new file mode 100644 index 000000000..46b2ce238 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/i/ini.go @@ -0,0 +1,29 @@ +package i + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// Ini lexer. +var Ini = internal.Register(MustNewLazyLexer( + &Config{ + Name: "INI", + Aliases: []string{"ini", "cfg", "dosini"}, + Filenames: []string{"*.ini", "*.cfg", "*.inf", ".gitconfig", ".editorconfig"}, + MimeTypes: []string{"text/x-ini", "text/inf"}, + }, + iniRules, +)) + +func iniRules() Rules { + return Rules{ + "root": { + {`\s+`, Text, nil}, + {`[;#].*`, CommentSingle, nil}, + {`\[.*?\]$`, Keyword, nil}, + {`(.*?)([ \t]*)(=)([ \t]*)(.*(?:\n[ \t].+)*)`, ByGroups(NameAttribute, Text, Operator, Text, LiteralString), nil}, + {`(.+?)$`, NameAttribute, nil}, + }, + } +} diff --git a/vendor/github.com/alecthomas/chroma/lexers/i/io.go b/vendor/github.com/alecthomas/chroma/lexers/i/io.go new file mode 100644 index 000000000..8b2e53a07 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/i/io.go @@ -0,0 +1,44 @@ +package i + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// Io lexer. +var Io = internal.Register(MustNewLazyLexer( + &Config{ + Name: "Io", + Aliases: []string{"io"}, + Filenames: []string{"*.io"}, + MimeTypes: []string{"text/x-iosrc"}, + }, + ioRules, +)) + +func ioRules() Rules { + return Rules{ + "root": { + {`\n`, Text, nil}, + {`\s+`, Text, nil}, + {`//(.*?)\n`, CommentSingle, nil}, + {`#(.*?)\n`, CommentSingle, nil}, + {`/(\\\n)?[*](.|\n)*?[*](\\\n)?/`, CommentMultiline, nil}, + {`/\+`, CommentMultiline, Push("nestedcomment")}, + {`"(\\\\|\\"|[^"])*"`, LiteralString, nil}, + {`::=|:=|=|\(|\)|;|,|\*|-|\+|>|<|@|!|/|\||\^|\.|%|&|\[|\]|\{|\}`, Operator, nil}, + {`(clone|do|doFile|doString|method|for|if|else|elseif|then)\b`, Keyword, nil}, + {`(nil|false|true)\b`, NameConstant, nil}, + {`(Object|list|List|Map|args|Sequence|Coroutine|File)\b`, NameBuiltin, nil}, + {`[a-zA-Z_]\w*`, Name, nil}, + {`(\d+\.?\d*|\d*\.\d+)([eE][+-]?[0-9]+)?`, LiteralNumberFloat, nil}, + {`\d+`, LiteralNumberInteger, nil}, + }, + "nestedcomment": { + {`[^+/]+`, CommentMultiline, nil}, + {`/\+`, CommentMultiline, Push()}, + {`\+/`, CommentMultiline, Pop(1)}, + {`[+/]`, CommentMultiline, nil}, + }, + } +} diff --git a/vendor/github.com/alecthomas/chroma/lexers/internal/api.go b/vendor/github.com/alecthomas/chroma/lexers/internal/api.go new file mode 100644 index 000000000..12fa45ff8 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/internal/api.go @@ -0,0 +1,201 @@ +// Package internal contains common API functions and structures shared between lexer packages. +package internal + +import ( + "path/filepath" + "sort" + "strings" + + "github.com/alecthomas/chroma" +) + +var ( + ignoredSuffixes = [...]string{ + // Editor backups + "~", ".bak", ".old", ".orig", + // Debian and derivatives apt/dpkg/ucf backups + ".dpkg-dist", ".dpkg-old", ".ucf-dist", ".ucf-new", ".ucf-old", + // Red Hat and derivatives rpm backups + ".rpmnew", ".rpmorig", ".rpmsave", + // Build system input/template files + ".in", + } +) + +// Registry of Lexers. +var Registry = struct { + Lexers chroma.Lexers + byName map[string]chroma.Lexer + byAlias map[string]chroma.Lexer +}{ + byName: map[string]chroma.Lexer{}, + byAlias: map[string]chroma.Lexer{}, +} + +// Names of all lexers, optionally including aliases. +func Names(withAliases bool) []string { + out := []string{} + for _, lexer := range Registry.Lexers { + config := lexer.Config() + out = append(out, config.Name) + if withAliases { + out = append(out, config.Aliases...) + } + } + sort.Strings(out) + return out +} + +// Get a Lexer by name, alias or file extension. +func Get(name string) chroma.Lexer { + if lexer := Registry.byName[name]; lexer != nil { + return lexer + } + if lexer := Registry.byAlias[name]; lexer != nil { + return lexer + } + if lexer := Registry.byName[strings.ToLower(name)]; lexer != nil { + return lexer + } + if lexer := Registry.byAlias[strings.ToLower(name)]; lexer != nil { + return lexer + } + + candidates := chroma.PrioritisedLexers{} + // Try file extension. + if lexer := Match("filename." + name); lexer != nil { + candidates = append(candidates, lexer) + } + // Try exact filename. + if lexer := Match(name); lexer != nil { + candidates = append(candidates, lexer) + } + if len(candidates) == 0 { + return nil + } + sort.Sort(candidates) + return candidates[0] +} + +// MatchMimeType attempts to find a lexer for the given MIME type. +func MatchMimeType(mimeType string) chroma.Lexer { + matched := chroma.PrioritisedLexers{} + for _, l := range Registry.Lexers { + for _, lmt := range l.Config().MimeTypes { + if mimeType == lmt { + matched = append(matched, l) + } + } + } + if len(matched) != 0 { + sort.Sort(matched) + return matched[0] + } + return nil +} + +// Match returns the first lexer matching filename. +func Match(filename string) chroma.Lexer { + filename = filepath.Base(filename) + matched := chroma.PrioritisedLexers{} + // First, try primary filename matches. + for _, lexer := range Registry.Lexers { + config := lexer.Config() + for _, glob := range config.Filenames { + ok, err := filepath.Match(glob, filename) + if err != nil { // nolint + panic(err) + } else if ok { + matched = append(matched, lexer) + } else { + for _, suf := range &ignoredSuffixes { + ok, err := filepath.Match(glob+suf, filename) + if err != nil { + panic(err) + } else if ok { + matched = append(matched, lexer) + break + } + } + } + } + } + if len(matched) > 0 { + sort.Sort(matched) + return matched[0] + } + matched = nil + // Next, try filename aliases. + for _, lexer := range Registry.Lexers { + config := lexer.Config() + for _, glob := range config.AliasFilenames { + ok, err := filepath.Match(glob, filename) + if err != nil { // nolint + panic(err) + } else if ok { + matched = append(matched, lexer) + } else { + for _, suf := range &ignoredSuffixes { + ok, err := filepath.Match(glob+suf, filename) + if err != nil { + panic(err) + } else if ok { + matched = append(matched, lexer) + break + } + } + } + } + } + if len(matched) > 0 { + sort.Sort(matched) + return matched[0] + } + return nil +} + +// Analyse text content and return the "best" lexer.. +func Analyse(text string) chroma.Lexer { + var picked chroma.Lexer + highest := float32(0.0) + for _, lexer := range Registry.Lexers { + if analyser, ok := lexer.(chroma.Analyser); ok { + weight := analyser.AnalyseText(text) + if weight > highest { + picked = lexer + highest = weight + } + } + } + return picked +} + +// Register a Lexer with the global registry. +func Register(lexer chroma.Lexer) chroma.Lexer { + config := lexer.Config() + Registry.byName[config.Name] = lexer + Registry.byName[strings.ToLower(config.Name)] = lexer + for _, alias := range config.Aliases { + Registry.byAlias[alias] = lexer + Registry.byAlias[strings.ToLower(alias)] = lexer + } + Registry.Lexers = append(Registry.Lexers, lexer) + return lexer +} + +// PlaintextRules is used for the fallback lexer as well as the explicit +// plaintext lexer. +func PlaintextRules() chroma.Rules { + return chroma.Rules{ + "root": []chroma.Rule{ + {`.+`, chroma.Text, nil}, + {`\n`, chroma.Text, nil}, + }, + } +} + +// Fallback lexer if no other is found. +var Fallback chroma.Lexer = chroma.MustNewLazyLexer(&chroma.Config{ + Name: "fallback", + Filenames: []string{"*"}, +}, PlaintextRules) diff --git a/vendor/github.com/alecthomas/chroma/lexers/j/j.go b/vendor/github.com/alecthomas/chroma/lexers/j/j.go new file mode 100644 index 000000000..9a2a4e3c0 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/j/j.go @@ -0,0 +1,77 @@ +package j + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// J lexer. +var J = internal.Register(MustNewLazyLexer( + &Config{ + Name: "J", + Aliases: []string{"j"}, + Filenames: []string{"*.ijs"}, + MimeTypes: []string{"text/x-j"}, + }, + jRules, +)) + +func jRules() Rules { + return Rules{ + "root": { + {`#!.*$`, CommentPreproc, nil}, + {`NB\..*`, CommentSingle, nil}, + {`\n+\s*Note`, CommentMultiline, Push("comment")}, + {`\s*Note.*`, CommentSingle, nil}, + {`\s+`, Text, nil}, + {`'`, LiteralString, Push("singlequote")}, + {`0\s+:\s*0|noun\s+define\s*$`, NameEntity, Push("nounDefinition")}, + {`(([1-4]|13)\s+:\s*0|(adverb|conjunction|dyad|monad|verb)\s+define)\b`, NameFunction, Push("explicitDefinition")}, + {Words(``, `\b[a-zA-Z]\w*\.`, `for_`, `goto_`, `label_`), NameLabel, nil}, + {Words(``, `\.`, `assert`, `break`, `case`, `catch`, `catchd`, `catcht`, `continue`, `do`, `else`, `elseif`, `end`, `fcase`, `for`, `if`, `return`, `select`, `throw`, `try`, `while`, `whilst`), NameLabel, nil}, + {`\b[a-zA-Z]\w*`, NameVariable, nil}, + {Words(``, ``, `ARGV`, `CR`, `CRLF`, `DEL`, `Debug`, `EAV`, `EMPTY`, `FF`, `JVERSION`, `LF`, `LF2`, `Note`, `TAB`, `alpha17`, `alpha27`, `apply`, `bind`, `boxopen`, `boxxopen`, `bx`, `clear`, `cutLF`, `cutopen`, `datatype`, `def`, `dfh`, `drop`, `each`, `echo`, `empty`, `erase`, `every`, `evtloop`, `exit`, `expand`, `fetch`, `file2url`, `fixdotdot`, `fliprgb`, `getargs`, `getenv`, `hfd`, `inv`, `inverse`, `iospath`, `isatty`, `isutf8`, `items`, `leaf`, `list`, `nameclass`, `namelist`, `names`, `nc`, `nl`, `on`, `pick`, `rows`, `script`, `scriptd`, `sign`, `sminfo`, `smoutput`, `sort`, `split`, `stderr`, `stdin`, `stdout`, `table`, `take`, `timespacex`, `timex`, `tmoutput`, `toCRLF`, `toHOST`, `toJ`, `tolower`, `toupper`, `type`, `ucp`, `ucpcount`, `usleep`, `utf8`, `uucp`), NameFunction, nil}, + {`=[.:]`, Operator, nil}, + {"[-=+*#$%@!~`^&\";:.,<>{}\\[\\]\\\\|/]", Operator, nil}, + {`[abCdDeEfHiIjLMoprtT]\.`, KeywordReserved, nil}, + {`[aDiLpqsStux]\:`, KeywordReserved, nil}, + {`(_[0-9])\:`, KeywordConstant, nil}, + {`\(`, Punctuation, Push("parentheses")}, + Include("numbers"), + }, + "comment": { + {`[^)]`, CommentMultiline, nil}, + {`^\)`, CommentMultiline, Pop(1)}, + {`[)]`, CommentMultiline, nil}, + }, + "explicitDefinition": { + {`\b[nmuvxy]\b`, NameDecorator, nil}, + Include("root"), + {`[^)]`, Name, nil}, + {`^\)`, NameLabel, Pop(1)}, + {`[)]`, Name, nil}, + }, + "numbers": { + {`\b_{1,2}\b`, LiteralNumber, nil}, + {`_?\d+(\.\d+)?(\s*[ejr]\s*)_?\d+(\.?=\d+)?`, LiteralNumber, nil}, + {`_?\d+\.(?=\d+)`, LiteralNumberFloat, nil}, + {`_?\d+x`, LiteralNumberIntegerLong, nil}, + {`_?\d+`, LiteralNumberInteger, nil}, + }, + "nounDefinition": { + {`[^)]`, LiteralString, nil}, + {`^\)`, NameLabel, Pop(1)}, + {`[)]`, LiteralString, nil}, + }, + "parentheses": { + {`\)`, Punctuation, Pop(1)}, + Include("explicitDefinition"), + Include("root"), + }, + "singlequote": { + {`[^']`, LiteralString, nil}, + {`''`, LiteralString, nil}, + {`'`, LiteralString, Pop(1)}, + }, + } +} diff --git a/vendor/github.com/alecthomas/chroma/lexers/j/java.go b/vendor/github.com/alecthomas/chroma/lexers/j/java.go new file mode 100644 index 000000000..48a9d9fee --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/j/java.go @@ -0,0 +1,56 @@ +package j + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// Java lexer. +var Java = internal.Register(MustNewLazyLexer( + &Config{ + Name: "Java", + Aliases: []string{"java"}, + Filenames: []string{"*.java"}, + MimeTypes: []string{"text/x-java"}, + DotAll: true, + EnsureNL: true, + }, + javaRules, +)) + +func javaRules() Rules { + return Rules{ + "root": { + {`[^\S\n]+`, Text, nil}, + {`//.*?\n`, CommentSingle, nil}, + {`/\*.*?\*/`, CommentMultiline, nil}, + {`(assert|break|case|catch|continue|default|do|else|finally|for|if|goto|instanceof|new|return|switch|this|throw|try|while)\b`, Keyword, nil}, + {`((?:(?:[^\W\d]|\$)[\w.\[\]$<>]*\s+)+?)((?:[^\W\d]|\$)[\w$]*)(\s*)(\()`, ByGroups(UsingSelf("root"), NameFunction, Text, Operator), nil}, + {`@[^\W\d][\w.]*`, NameDecorator, nil}, + {`(abstract|const|enum|extends|final|implements|native|private|protected|public|static|strictfp|super|synchronized|throws|transient|volatile)\b`, KeywordDeclaration, nil}, + {`(boolean|byte|char|double|float|int|long|short|void)\b`, KeywordType, nil}, + {`(package)(\s+)`, ByGroups(KeywordNamespace, Text), Push("import")}, + {`(true|false|null)\b`, KeywordConstant, nil}, + {`(class|interface)(\s+)`, ByGroups(KeywordDeclaration, Text), Push("class")}, + {`(import(?:\s+static)?)(\s+)`, ByGroups(KeywordNamespace, Text), Push("import")}, + {`"(\\\\|\\"|[^"])*"`, LiteralString, nil}, + {`'\\.'|'[^\\]'|'\\u[0-9a-fA-F]{4}'`, LiteralStringChar, nil}, + {`(\.)((?:[^\W\d]|\$)[\w$]*)`, ByGroups(Operator, NameAttribute), nil}, + {`^\s*([^\W\d]|\$)[\w$]*:`, NameLabel, nil}, + {`([^\W\d]|\$)[\w$]*`, Name, nil}, + {`([0-9][0-9_]*\.([0-9][0-9_]*)?|\.[0-9][0-9_]*)([eE][+\-]?[0-9][0-9_]*)?[fFdD]?|[0-9][eE][+\-]?[0-9][0-9_]*[fFdD]?|[0-9]([eE][+\-]?[0-9][0-9_]*)?[fFdD]|0[xX]([0-9a-fA-F][0-9a-fA-F_]*\.?|([0-9a-fA-F][0-9a-fA-F_]*)?\.[0-9a-fA-F][0-9a-fA-F_]*)[pP][+\-]?[0-9][0-9_]*[fFdD]?`, LiteralNumberFloat, nil}, + {`0[xX][0-9a-fA-F][0-9a-fA-F_]*[lL]?`, LiteralNumberHex, nil}, + {`0[bB][01][01_]*[lL]?`, LiteralNumberBin, nil}, + {`0[0-7_]+[lL]?`, LiteralNumberOct, nil}, + {`0|[1-9][0-9_]*[lL]?`, LiteralNumberInteger, nil}, + {`[~^*!%&\[\](){}<>|+=:;,./?-]`, Operator, nil}, + {`\n`, Text, nil}, + }, + "class": { + {`([^\W\d]|\$)[\w$]*`, NameClass, Pop(1)}, + }, + "import": { + {`[\w.]+\*?`, NameNamespace, Pop(1)}, + }, + } +} diff --git a/vendor/github.com/alecthomas/chroma/lexers/j/javascript.go b/vendor/github.com/alecthomas/chroma/lexers/j/javascript.go new file mode 100644 index 000000000..5c6b93794 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/j/javascript.go @@ -0,0 +1,74 @@ +package j + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// Javascript lexer. +var JavascriptRules = Rules{ + "commentsandwhitespace": { + {`\s+`, Text, nil}, + {``, `<--`, `<-->`, `>`, `<`, `>=`, `≥`, `<=`, `≤`, `==`, `===`, `≡`, `!=`, `≠`, `!==`, `≢`, `∈`, `∉`, `∋`, `∌`, `⊆`, `⊈`, `⊂`, `⊄`, `⊊`, `∝`, `∊`, `∍`, `∥`, `∦`, `∷`, `∺`, `∻`, `∽`, `∾`, `≁`, `≃`, `≂`, `≄`, `≅`, `≆`, `≇`, `≈`, `≉`, `≊`, `≋`, `≌`, `≍`, `≎`, `≐`, `≑`, `≒`, `≓`, `≖`, `≗`, `≘`, `≙`, `≚`, `≛`, `≜`, `≝`, `≞`, `≟`, `≣`, `≦`, `≧`, `≨`, `≩`, `≪`, `≫`, `≬`, `≭`, `≮`, `≯`, `≰`, `≱`, `≲`, `≳`, `≴`, `≵`, `≶`, `≷`, `≸`, `≹`, `≺`, `≻`, `≼`, `≽`, `≾`, `≿`, `⊀`, `⊁`, `⊃`, `⊅`, `⊇`, `⊉`, `⊋`, `⊏`, `⊐`, `⊑`, `⊒`, `⊜`, `⊩`, `⊬`, `⊮`, `⊰`, `⊱`, `⊲`, `⊳`, `⊴`, `⊵`, `⊶`, `⊷`, `⋍`, `⋐`, `⋑`, `⋕`, `⋖`, `⋗`, `⋘`, `⋙`, `⋚`, `⋛`, `⋜`, `⋝`, `⋞`, `⋟`, `⋠`, `⋡`, `⋢`, `⋣`, `⋤`, `⋥`, `⋦`, `⋧`, `⋨`, `⋩`, `⋪`, `⋫`, `⋬`, `⋭`, `⋲`, `⋳`, `⋴`, `⋵`, `⋶`, `⋷`, `⋸`, `⋹`, `⋺`, `⋻`, `⋼`, `⋽`, `⋾`, `⋿`, `⟈`, `⟉`, `⟒`, `⦷`, `⧀`, `⧁`, `⧡`, `⧣`, `⧤`, `⧥`, `⩦`, `⩧`, `⩪`, `⩫`, `⩬`, `⩭`, `⩮`, `⩯`, `⩰`, `⩱`, `⩲`, `⩳`, `⩵`, `⩶`, `⩷`, `⩸`, `⩹`, `⩺`, `⩻`, `⩼`, `⩽`, `⩾`, `⩿`, `⪀`, `⪁`, `⪂`, `⪃`, `⪄`, `⪅`, `⪆`, `⪇`, `⪈`, `⪉`, `⪊`, `⪋`, `⪌`, `⪍`, `⪎`, `⪏`, `⪐`, `⪑`, `⪒`, `⪓`, `⪔`, `⪕`, `⪖`, `⪗`, `⪘`, `⪙`, `⪚`, `⪛`, `⪜`, `⪝`, `⪞`, `⪟`, `⪠`, `⪡`, `⪢`, `⪣`, `⪤`, `⪥`, `⪦`, `⪧`, `⪨`, `⪩`, `⪪`, `⪫`, `⪬`, `⪭`, `⪮`, `⪯`, `⪰`, `⪱`, `⪲`, `⪳`, `⪴`, `⪵`, `⪶`, `⪷`, `⪸`, `⪹`, `⪺`, `⪻`, `⪼`, `⪽`, `⪾`, `⪿`, `⫀`, `⫁`, `⫂`, `⫃`, `⫄`, `⫅`, `⫆`, `⫇`, `⫈`, `⫉`, `⫊`, `⫋`, `⫌`, `⫍`, `⫎`, `⫏`, `⫐`, `⫑`, `⫒`, `⫓`, `⫔`, `⫕`, `⫖`, `⫗`, `⫘`, `⫙`, `⫷`, `⫸`, `⫹`, `⫺`, `⊢`, `⊣`, `⟂`, `<:`, `>:`, `<|`, `|>`, `…`, `⁝`, `⋮`, `⋱`, `⋰`, `⋯`, `+`, `-`, `¦`, `|`, `⊕`, `⊖`, `⊞`, `⊟`, `++`, `∪`, `∨`, `⊔`, `±`, `∓`, `∔`, `∸`, `≏`, `⊎`, `⊻`, `⊽`, `⋎`, `⋓`, `⧺`, `⧻`, `⨈`, `⨢`, `⨣`, `⨤`, `⨥`, `⨦`, `⨧`, `⨨`, `⨩`, `⨪`, `⨫`, `⨬`, `⨭`, `⨮`, `⨹`, `⨺`, `⩁`, `⩂`, `⩅`, `⩊`, `⩌`, `⩏`, `⩐`, `⩒`, `⩔`, `⩖`, `⩗`, `⩛`, `⩝`, `⩡`, `⩢`, `⩣`, `*`, `/`, `⌿`, `÷`, `%`, `&`, `⋅`, `∘`, `×`, `\`, `∩`, `∧`, `⊗`, `⊘`, `⊙`, `⊚`, `⊛`, `⊠`, `⊡`, `⊓`, `∗`, `∙`, `∤`, `⅋`, `≀`, `⊼`, `⋄`, `⋆`, `⋇`, `⋉`, `⋊`, `⋋`, `⋌`, `⋏`, `⋒`, `⟑`, `⦸`, `⦼`, `⦾`, `⦿`, `⧶`, `⧷`, `⨇`, `⨰`, `⨱`, `⨲`, `⨳`, `⨴`, `⨵`, `⨶`, `⨷`, `⨸`, `⨻`, `⨼`, `⨽`, `⩀`, `⩃`, `⩄`, `⩋`, `⩍`, `⩎`, `⩑`, `⩓`, `⩕`, `⩘`, `⩚`, `⩜`, `⩞`, `⩟`, `⩠`, `⫛`, `⊍`, `▷`, `⨝`, `⟕`, `⟖`, `⟗`, `⨟`, `//`, `>>`, `<<`, `>>>`, `^`, `↑`, `↓`, `⇵`, `⟰`, `⟱`, `⤈`, `⤉`, `⤊`, `⤋`, `⤒`, `⤓`, `⥉`, `⥌`, `⥍`, `⥏`, `⥑`, `⥔`, `⥕`, `⥘`, `⥙`, `⥜`, `⥝`, `⥠`, `⥡`, `⥣`, `⥥`, `⥮`, `⥯`, `↑`, `↓`, `!`, `¬`, `√`, `∛`, `∜`), Operator, nil}, + {Words(``, `[²³¹ʰʲʳʷʸˡˢˣᴬᴮᴰᴱᴳᴴᴵᴶᴷᴸᴹᴺᴼᴾᴿᵀᵁᵂᵃᵇᵈᵉᵍᵏᵐᵒᵖᵗᵘᵛᵝᵞᵟᵠᵡᵢᵣᵤᵥᵦᵧᵨᵩᵪᶜᶠᶥᶦᶫᶰᶸᶻᶿ′″‴‵‶‷⁗⁰ⁱ⁴⁵⁶⁷⁸⁹⁺⁻⁼⁽⁾ⁿ₀₁₂₃₄₅₆₇₈₉₊₋₌₍₎ₐₑₒₓₕₖₗₘₙₚₛₜⱼⱽ]*`, `.=`, `.+=`, `.-=`, `.*=`, `./=`, `.//=`, `.\=`, `.^=`, `.÷=`, `.%=`, `.<<=`, `.>>=`, `.>>>=`, `.|=`, `.&=`, `.⊻=`, `.≔`, `.⩴`, `.≕'`, `.~`, `.=>`, `.→`, `.↔`, `.↚`, `.↛`, `.↞`, `.↠`, `.↢`, `.↣`, `.↦`, `.↤`, `.↮`, `.⇎`, `.⇍`, `.⇏`, `.⇐`, `.⇒`, `.⇔`, `.⇴`, `.⇶`, `.⇷`, `.⇸`, `.⇹`, `.⇺`, `.⇻`, `.⇼`, `.⇽`, `.⇾`, `.⇿`, `.⟵`, `.⟶`, `.⟷`, `.⟹`, `.⟺`, `.⟻`, `.⟼`, `.⟽`, `.⟾`, `.⟿`, `.⤀`, `.⤁`, `.⤂`, `.⤃`, `.⤄`, `.⤅`, `.⤆`, `.⤇`, `.⤌`, `.⤍`, `.⤎`, `.⤏`, `.⤐`, `.⤑`, `.⤔`, `.⤕`, `.⤖`, `.⤗`, `.⤘`, `.⤝`, `.⤞`, `.⤟`, `.⤠`, `.⥄`, `.⥅`, `.⥆`, `.⥇`, `.⥈`, `.⥊`, `.⥋`, `.⥎`, `.⥐`, `.⥒`, `.⥓`, `.⥖`, `.⥗`, `.⥚`, `.⥛`, `.⥞`, `.⥟`, `.⥢`, `.⥤`, `.⥦`, `.⥧`, `.⥨`, `.⥩`, `.⥪`, `.⥫`, `.⥬`, `.⥭`, `.⥰`, `.⧴`, `.⬱`, `.⬰`, `.⬲`, `.⬳`, `.⬴`, `.⬵`, `.⬶`, `.⬷`, `.⬸`, `.⬹`, `.⬺`, `.⬻`, `.⬼`, `.⬽`, `.⬾`, `.⬿`, `.⭀`, `.⭁`, `.⭂`, `.⭃`, `.⭄`, `.⭇`, `.⭈`, `.⭉`, `.⭊`, `.⭋`, `.⭌`, `.←`, `.→`, `.⇜`, `.⇝`, `.↜`, `.↝`, `.↩`, `.↪`, `.↫`, `.↬`, `.↼`, `.↽`, `.⇀`, `.⇁`, `.⇄`, `.⇆`, `.⇇`, `.⇉`, `.⇋`, `.⇌`, `.⇚`, `.⇛`, `.⇠`, `.⇢`, `.↷`, `.↶`, `.↺`, `.↻`, `.-->`, `.<--`, `.<-->`, `.>`, `.<`, `.>=`, `.≥`, `.<=`, `.≤`, `.==`, `.===`, `.≡`, `.!=`, `.≠`, `.!==`, `.≢`, `.∈`, `.∉`, `.∋`, `.∌`, `.⊆`, `.⊈`, `.⊂`, `.⊄`, `.⊊`, `.∝`, `.∊`, `.∍`, `.∥`, `.∦`, `.∷`, `.∺`, `.∻`, `.∽`, `.∾`, `.≁`, `.≃`, `.≂`, `.≄`, `.≅`, `.≆`, `.≇`, `.≈`, `.≉`, `.≊`, `.≋`, `.≌`, `.≍`, `.≎`, `.≐`, `.≑`, `.≒`, `.≓`, `.≖`, `.≗`, `.≘`, `.≙`, `.≚`, `.≛`, `.≜`, `.≝`, `.≞`, `.≟`, `.≣`, `.≦`, `.≧`, `.≨`, `.≩`, `.≪`, `.≫`, `.≬`, `.≭`, `.≮`, `.≯`, `.≰`, `.≱`, `.≲`, `.≳`, `.≴`, `.≵`, `.≶`, `.≷`, `.≸`, `.≹`, `.≺`, `.≻`, `.≼`, `.≽`, `.≾`, `.≿`, `.⊀`, `.⊁`, `.⊃`, `.⊅`, `.⊇`, `.⊉`, `.⊋`, `.⊏`, `.⊐`, `.⊑`, `.⊒`, `.⊜`, `.⊩`, `.⊬`, `.⊮`, `.⊰`, `.⊱`, `.⊲`, `.⊳`, `.⊴`, `.⊵`, `.⊶`, `.⊷`, `.⋍`, `.⋐`, `.⋑`, `.⋕`, `.⋖`, `.⋗`, `.⋘`, `.⋙`, `.⋚`, `.⋛`, `.⋜`, `.⋝`, `.⋞`, `.⋟`, `.⋠`, `.⋡`, `.⋢`, `.⋣`, `.⋤`, `.⋥`, `.⋦`, `.⋧`, `.⋨`, `.⋩`, `.⋪`, `.⋫`, `.⋬`, `.⋭`, `.⋲`, `.⋳`, `.⋴`, `.⋵`, `.⋶`, `.⋷`, `.⋸`, `.⋹`, `.⋺`, `.⋻`, `.⋼`, `.⋽`, `.⋾`, `.⋿`, `.⟈`, `.⟉`, `.⟒`, `.⦷`, `.⧀`, `.⧁`, `.⧡`, `.⧣`, `.⧤`, `.⧥`, `.⩦`, `.⩧`, `.⩪`, `.⩫`, `.⩬`, `.⩭`, `.⩮`, `.⩯`, `.⩰`, `.⩱`, `.⩲`, `.⩳`, `.⩵`, `.⩶`, `.⩷`, `.⩸`, `.⩹`, `.⩺`, `.⩻`, `.⩼`, `.⩽`, `.⩾`, `.⩿`, `.⪀`, `.⪁`, `.⪂`, `.⪃`, `.⪄`, `.⪅`, `.⪆`, `.⪇`, `.⪈`, `.⪉`, `.⪊`, `.⪋`, `.⪌`, `.⪍`, `.⪎`, `.⪏`, `.⪐`, `.⪑`, `.⪒`, `.⪓`, `.⪔`, `.⪕`, `.⪖`, `.⪗`, `.⪘`, `.⪙`, `.⪚`, `.⪛`, `.⪜`, `.⪝`, `.⪞`, `.⪟`, `.⪠`, `.⪡`, `.⪢`, `.⪣`, `.⪤`, `.⪥`, `.⪦`, `.⪧`, `.⪨`, `.⪩`, `.⪪`, `.⪫`, `.⪬`, `.⪭`, `.⪮`, `.⪯`, `.⪰`, `.⪱`, `.⪲`, `.⪳`, `.⪴`, `.⪵`, `.⪶`, `.⪷`, `.⪸`, `.⪹`, `.⪺`, `.⪻`, `.⪼`, `.⪽`, `.⪾`, `.⪿`, `.⫀`, `.⫁`, `.⫂`, `.⫃`, `.⫄`, `.⫅`, `.⫆`, `.⫇`, `.⫈`, `.⫉`, `.⫊`, `.⫋`, `.⫌`, `.⫍`, `.⫎`, `.⫏`, `.⫐`, `.⫑`, `.⫒`, `.⫓`, `.⫔`, `.⫕`, `.⫖`, `.⫗`, `.⫘`, `.⫙`, `.⫷`, `.⫸`, `.⫹`, `.⫺`, `.⊢`, `.⊣`, `.⟂`, `.<:`, `.>:`, `.<|`, `.|>`, `.…`, `.⁝`, `.⋮`, `.⋱`, `.⋰`, `.⋯`, `.+`, `.-`, `.¦`, `.|`, `.⊕`, `.⊖`, `.⊞`, `.⊟`, `.++`, `.∪`, `.∨`, `.⊔`, `.±`, `.∓`, `.∔`, `.∸`, `.≏`, `.⊎`, `.⊻`, `.⊽`, `.⋎`, `.⋓`, `.⧺`, `.⧻`, `.⨈`, `.⨢`, `.⨣`, `.⨤`, `.⨥`, `.⨦`, `.⨧`, `.⨨`, `.⨩`, `.⨪`, `.⨫`, `.⨬`, `.⨭`, `.⨮`, `.⨹`, `.⨺`, `.⩁`, `.⩂`, `.⩅`, `.⩊`, `.⩌`, `.⩏`, `.⩐`, `.⩒`, `.⩔`, `.⩖`, `.⩗`, `.⩛`, `.⩝`, `.⩡`, `.⩢`, `.⩣`, `.*`, `./`, `.⌿`, `.÷`, `.%`, `.&`, `.⋅`, `.∘`, `.×`, `.\`, `.∩`, `.∧`, `.⊗`, `.⊘`, `.⊙`, `.⊚`, `.⊛`, `.⊠`, `.⊡`, `.⊓`, `.∗`, `.∙`, `.∤`, `.⅋`, `.≀`, `.⊼`, `.⋄`, `.⋆`, `.⋇`, `.⋉`, `.⋊`, `.⋋`, `.⋌`, `.⋏`, `.⋒`, `.⟑`, `.⦸`, `.⦼`, `.⦾`, `.⦿`, `.⧶`, `.⧷`, `.⨇`, `.⨰`, `.⨱`, `.⨲`, `.⨳`, `.⨴`, `.⨵`, `.⨶`, `.⨷`, `.⨸`, `.⨻`, `.⨼`, `.⨽`, `.⩀`, `.⩃`, `.⩄`, `.⩋`, `.⩍`, `.⩎`, `.⩑`, `.⩓`, `.⩕`, `.⩘`, `.⩚`, `.⩜`, `.⩞`, `.⩟`, `.⩠`, `.⫛`, `.⊍`, `.▷`, `.⨝`, `.⟕`, `.⟖`, `.⟗`, `.⨟`, `.//`, `.>>`, `.<<`, `.>>>`, `.^`, `.↑`, `.↓`, `.⇵`, `.⟰`, `.⟱`, `.⤈`, `.⤉`, `.⤊`, `.⤋`, `.⤒`, `.⤓`, `.⥉`, `.⥌`, `.⥍`, `.⥏`, `.⥑`, `.⥔`, `.⥕`, `.⥘`, `.⥙`, `.⥜`, `.⥝`, `.⥠`, `.⥡`, `.⥣`, `.⥥`, `.⥮`, `.⥯`, `.↑`, `.↓`, `.!`, `.¬`, `.√`, `.∛`, `.∜`), Operator, nil}, + {Words(``, ``, `...`, `..`), Operator, nil}, + {`'(\\.|\\[0-7]{1,3}|\\x[a-fA-F0-9]{1,3}|\\u[a-fA-F0-9]{1,4}|\\U[a-fA-F0-9]{1,6}|[^\\\'\n])'`, LiteralStringChar, nil}, + {`(?<=[.\w)\]])(\'[²³¹ʰʲʳʷʸˡˢˣᴬᴮᴰᴱᴳᴴᴵᴶᴷᴸᴹᴺᴼᴾᴿᵀᵁᵂᵃᵇᵈᵉᵍᵏᵐᵒᵖᵗᵘᵛᵝᵞᵟᵠᵡᵢᵣᵤᵥᵦᵧᵨᵩᵪᶜᶠᶥᶦᶫᶰᶸᶻᶿ′″‴‵‶‷⁗⁰ⁱ⁴⁵⁶⁷⁸⁹⁺⁻⁼⁽⁾ⁿ₀₁₂₃₄₅₆₇₈₉₊₋₌₍₎ₐₑₒₓₕₖₗₘₙₚₛₜⱼⱽ]*)+`, Operator, nil}, + {`(raw)(""")`, ByGroups(LiteralStringAffix, LiteralString), Push("tqrawstring")}, + {`(raw)(")`, ByGroups(LiteralStringAffix, LiteralString), Push("rawstring")}, + {`(r)(""")`, ByGroups(LiteralStringAffix, LiteralStringRegex), Push("tqregex")}, + {`(r)(")`, ByGroups(LiteralStringAffix, LiteralStringRegex), Push("regex")}, + {`((?:[a-zA-Z_¡-􏿿][a-zA-Z_0-9!¡-􏿿]*))?(""")`, ByGroups(LiteralStringAffix, LiteralString), Push("tqstring")}, + {`((?:[a-zA-Z_¡-􏿿][a-zA-Z_0-9!¡-􏿿]*))?(")`, ByGroups(LiteralStringAffix, LiteralString), Push("string")}, + {"((?:[a-zA-Z_\u00a1-\U0010ffff][a-zA-Z_0-9!\u00a1-\U0010ffff]*))?(```)", ByGroups(LiteralStringAffix, LiteralStringBacktick), Push("tqcommand")}, + {"((?:[a-zA-Z_\u00a1-\U0010ffff][a-zA-Z_0-9!\u00a1-\U0010ffff]*))?(`)", ByGroups(LiteralStringAffix, LiteralStringBacktick), Push("command")}, + {`((?:[a-zA-Z_¡-􏿿][a-zA-Z_0-9!¡-􏿿]*))(\{)`, ByGroups(KeywordType, Punctuation), Push("curly")}, + {`(where)(\s+)((?:[a-zA-Z_¡-􏿿][a-zA-Z_0-9!¡-􏿿]*))`, ByGroups(Keyword, Text, KeywordType), nil}, + {`(\{)`, Punctuation, Push("curly")}, + {`(abstract[ \t]+type|primitive[ \t]+type|mutable[ \t]+struct|struct)([\s()]+)((?:[a-zA-Z_¡-􏿿][a-zA-Z_0-9!¡-􏿿]*))`, ByGroups(Keyword, Text, KeywordType), nil}, + {`@(?:[a-zA-Z_¡-􏿿][a-zA-Z_0-9!¡-􏿿]*)`, NameDecorator, nil}, + {Words(`@`, `[²³¹ʰʲʳʷʸˡˢˣᴬᴮᴰᴱᴳᴴᴵᴶᴷᴸᴹᴺᴼᴾᴿᵀᵁᵂᵃᵇᵈᵉᵍᵏᵐᵒᵖᵗᵘᵛᵝᵞᵟᵠᵡᵢᵣᵤᵥᵦᵧᵨᵩᵪᶜᶠᶥᶦᶫᶰᶸᶻᶿ′″‴‵‶‷⁗⁰ⁱ⁴⁵⁶⁷⁸⁹⁺⁻⁼⁽⁾ⁿ₀₁₂₃₄₅₆₇₈₉₊₋₌₍₎ₐₑₒₓₕₖₗₘₙₚₛₜⱼⱽ]*`, `->`, `:=`, `$=`, `?`, `||`, `&&`, `:`, `$`, `::`, `..`, `.`, `=`, `+=`, `-=`, `*=`, `/=`, `//=`, `\=`, `^=`, `÷=`, `%=`, `<<=`, `>>=`, `>>>=`, `|=`, `&=`, `⊻=`, `≔`, `⩴`, `≕'`, `~`, `=>`, `→`, `↔`, `↚`, `↛`, `↞`, `↠`, `↢`, `↣`, `↦`, `↤`, `↮`, `⇎`, `⇍`, `⇏`, `⇐`, `⇒`, `⇔`, `⇴`, `⇶`, `⇷`, `⇸`, `⇹`, `⇺`, `⇻`, `⇼`, `⇽`, `⇾`, `⇿`, `⟵`, `⟶`, `⟷`, `⟹`, `⟺`, `⟻`, `⟼`, `⟽`, `⟾`, `⟿`, `⤀`, `⤁`, `⤂`, `⤃`, `⤄`, `⤅`, `⤆`, `⤇`, `⤌`, `⤍`, `⤎`, `⤏`, `⤐`, `⤑`, `⤔`, `⤕`, `⤖`, `⤗`, `⤘`, `⤝`, `⤞`, `⤟`, `⤠`, `⥄`, `⥅`, `⥆`, `⥇`, `⥈`, `⥊`, `⥋`, `⥎`, `⥐`, `⥒`, `⥓`, `⥖`, `⥗`, `⥚`, `⥛`, `⥞`, `⥟`, `⥢`, `⥤`, `⥦`, `⥧`, `⥨`, `⥩`, `⥪`, `⥫`, `⥬`, `⥭`, `⥰`, `⧴`, `⬱`, `⬰`, `⬲`, `⬳`, `⬴`, `⬵`, `⬶`, `⬷`, `⬸`, `⬹`, `⬺`, `⬻`, `⬼`, `⬽`, `⬾`, `⬿`, `⭀`, `⭁`, `⭂`, `⭃`, `⭄`, `⭇`, `⭈`, `⭉`, `⭊`, `⭋`, `⭌`, `←`, `→`, `⇜`, `⇝`, `↜`, `↝`, `↩`, `↪`, `↫`, `↬`, `↼`, `↽`, `⇀`, `⇁`, `⇄`, `⇆`, `⇇`, `⇉`, `⇋`, `⇌`, `⇚`, `⇛`, `⇠`, `⇢`, `↷`, `↶`, `↺`, `↻`, `-->`, `<--`, `<-->`, `>`, `<`, `>=`, `≥`, `<=`, `≤`, `==`, `===`, `≡`, `!=`, `≠`, `!==`, `≢`, `∈`, `∉`, `∋`, `∌`, `⊆`, `⊈`, `⊂`, `⊄`, `⊊`, `∝`, `∊`, `∍`, `∥`, `∦`, `∷`, `∺`, `∻`, `∽`, `∾`, `≁`, `≃`, `≂`, `≄`, `≅`, `≆`, `≇`, `≈`, `≉`, `≊`, `≋`, `≌`, `≍`, `≎`, `≐`, `≑`, `≒`, `≓`, `≖`, `≗`, `≘`, `≙`, `≚`, `≛`, `≜`, `≝`, `≞`, `≟`, `≣`, `≦`, `≧`, `≨`, `≩`, `≪`, `≫`, `≬`, `≭`, `≮`, `≯`, `≰`, `≱`, `≲`, `≳`, `≴`, `≵`, `≶`, `≷`, `≸`, `≹`, `≺`, `≻`, `≼`, `≽`, `≾`, `≿`, `⊀`, `⊁`, `⊃`, `⊅`, `⊇`, `⊉`, `⊋`, `⊏`, `⊐`, `⊑`, `⊒`, `⊜`, `⊩`, `⊬`, `⊮`, `⊰`, `⊱`, `⊲`, `⊳`, `⊴`, `⊵`, `⊶`, `⊷`, `⋍`, `⋐`, `⋑`, `⋕`, `⋖`, `⋗`, `⋘`, `⋙`, `⋚`, `⋛`, `⋜`, `⋝`, `⋞`, `⋟`, `⋠`, `⋡`, `⋢`, `⋣`, `⋤`, `⋥`, `⋦`, `⋧`, `⋨`, `⋩`, `⋪`, `⋫`, `⋬`, `⋭`, `⋲`, `⋳`, `⋴`, `⋵`, `⋶`, `⋷`, `⋸`, `⋹`, `⋺`, `⋻`, `⋼`, `⋽`, `⋾`, `⋿`, `⟈`, `⟉`, `⟒`, `⦷`, `⧀`, `⧁`, `⧡`, `⧣`, `⧤`, `⧥`, `⩦`, `⩧`, `⩪`, `⩫`, `⩬`, `⩭`, `⩮`, `⩯`, `⩰`, `⩱`, `⩲`, `⩳`, `⩵`, `⩶`, `⩷`, `⩸`, `⩹`, `⩺`, `⩻`, `⩼`, `⩽`, `⩾`, `⩿`, `⪀`, `⪁`, `⪂`, `⪃`, `⪄`, `⪅`, `⪆`, `⪇`, `⪈`, `⪉`, `⪊`, `⪋`, `⪌`, `⪍`, `⪎`, `⪏`, `⪐`, `⪑`, `⪒`, `⪓`, `⪔`, `⪕`, `⪖`, `⪗`, `⪘`, `⪙`, `⪚`, `⪛`, `⪜`, `⪝`, `⪞`, `⪟`, `⪠`, `⪡`, `⪢`, `⪣`, `⪤`, `⪥`, `⪦`, `⪧`, `⪨`, `⪩`, `⪪`, `⪫`, `⪬`, `⪭`, `⪮`, `⪯`, `⪰`, `⪱`, `⪲`, `⪳`, `⪴`, `⪵`, `⪶`, `⪷`, `⪸`, `⪹`, `⪺`, `⪻`, `⪼`, `⪽`, `⪾`, `⪿`, `⫀`, `⫁`, `⫂`, `⫃`, `⫄`, `⫅`, `⫆`, `⫇`, `⫈`, `⫉`, `⫊`, `⫋`, `⫌`, `⫍`, `⫎`, `⫏`, `⫐`, `⫑`, `⫒`, `⫓`, `⫔`, `⫕`, `⫖`, `⫗`, `⫘`, `⫙`, `⫷`, `⫸`, `⫹`, `⫺`, `⊢`, `⊣`, `⟂`, `<:`, `>:`, `<|`, `|>`, `…`, `⁝`, `⋮`, `⋱`, `⋰`, `⋯`, `+`, `-`, `¦`, `|`, `⊕`, `⊖`, `⊞`, `⊟`, `++`, `∪`, `∨`, `⊔`, `±`, `∓`, `∔`, `∸`, `≏`, `⊎`, `⊻`, `⊽`, `⋎`, `⋓`, `⧺`, `⧻`, `⨈`, `⨢`, `⨣`, `⨤`, `⨥`, `⨦`, `⨧`, `⨨`, `⨩`, `⨪`, `⨫`, `⨬`, `⨭`, `⨮`, `⨹`, `⨺`, `⩁`, `⩂`, `⩅`, `⩊`, `⩌`, `⩏`, `⩐`, `⩒`, `⩔`, `⩖`, `⩗`, `⩛`, `⩝`, `⩡`, `⩢`, `⩣`, `*`, `/`, `⌿`, `÷`, `%`, `&`, `⋅`, `∘`, `×`, `\`, `∩`, `∧`, `⊗`, `⊘`, `⊙`, `⊚`, `⊛`, `⊠`, `⊡`, `⊓`, `∗`, `∙`, `∤`, `⅋`, `≀`, `⊼`, `⋄`, `⋆`, `⋇`, `⋉`, `⋊`, `⋋`, `⋌`, `⋏`, `⋒`, `⟑`, `⦸`, `⦼`, `⦾`, `⦿`, `⧶`, `⧷`, `⨇`, `⨰`, `⨱`, `⨲`, `⨳`, `⨴`, `⨵`, `⨶`, `⨷`, `⨸`, `⨻`, `⨼`, `⨽`, `⩀`, `⩃`, `⩄`, `⩋`, `⩍`, `⩎`, `⩑`, `⩓`, `⩕`, `⩘`, `⩚`, `⩜`, `⩞`, `⩟`, `⩠`, `⫛`, `⊍`, `▷`, `⨝`, `⟕`, `⟖`, `⟗`, `⨟`, `//`, `>>`, `<<`, `>>>`, `^`, `↑`, `↓`, `⇵`, `⟰`, `⟱`, `⤈`, `⤉`, `⤊`, `⤋`, `⤒`, `⤓`, `⥉`, `⥌`, `⥍`, `⥏`, `⥑`, `⥔`, `⥕`, `⥘`, `⥙`, `⥜`, `⥝`, `⥠`, `⥡`, `⥣`, `⥥`, `⥮`, `⥯`, `↑`, `↓`, `!`, `¬`, `√`, `∛`, `∜`), NameDecorator, nil}, + {Words(``, `\b`, `baremodule`, `begin`, `break`, `catch`, `ccall`, `const`, `continue`, `do`, `else`, `elseif`, `end`, `export`, `finally`, `for`, `function`, `global`, `if`, `import`, `in`, `isa`, `let`, `local`, `macro`, `module`, `quote`, `return`, `try`, `using`, `where`, `while`), Keyword, nil}, + {Words(``, `\b`, `AbstractArray`, `AbstractChannel`, `AbstractChar`, `AbstractDict`, `AbstractDisplay`, `AbstractFloat`, `AbstractIrrational`, `AbstractMatch`, `AbstractMatrix`, `AbstractPattern`, `AbstractRange`, `AbstractSet`, `AbstractString`, `AbstractUnitRange`, `AbstractVecOrMat`, `AbstractVector`, `Any`, `ArgumentError`, `Array`, `AssertionError`, `BigFloat`, `BigInt`, `BitArray`, `BitMatrix`, `BitSet`, `BitVector`, `Bool`, `BoundsError`, `CapturedException`, `CartesianIndex`, `CartesianIndices`, `Cchar`, `Cdouble`, `Cfloat`, `Channel`, `Char`, `Cint`, `Cintmax_t`, `Clong`, `Clonglong`, `Cmd`, `Colon`, `Complex`, `ComplexF16`, `ComplexF32`, `ComplexF64`, `ComposedFunction`, `CompositeException`, `Condition`, `Cptrdiff_t`, `Cshort`, `Csize_t`, `Cssize_t`, `Cstring`, `Cuchar`, `Cuint`, `Cuintmax_t`, `Culong`, `Culonglong`, `Cushort`, `Cvoid`, `Cwchar_t`, `Cwstring`, `DataType`, `DenseArray`, `DenseMatrix`, `DenseVecOrMat`, `DenseVector`, `Dict`, `DimensionMismatch`, `Dims`, `DivideError`, `DomainError`, `EOFError`, `Enum`, `ErrorException`, `Exception`, `ExponentialBackOff`, `Expr`, `Float16`, `Float32`, `Float64`, `Function`, `GlobalRef`, `HTML`, `IO`, `IOBuffer`, `IOContext`, `IOStream`, `IdDict`, `IndexCartesian`, `IndexLinear`, `IndexStyle`, `InexactError`, `InitError`, `Int`, `Int128`, `Int16`, `Int32`, `Int64`, `Int8`, `Integer`, `InterruptException`, `InvalidStateException`, `Irrational`, `KeyError`, `LinRange`, `LineNumberNode`, `LinearIndices`, `LoadError`, `MIME`, `Matrix`, `Method`, `MethodError`, `Missing`, `MissingException`, `Module`, `NTuple`, `NamedTuple`, `Nothing`, `Number`, `OrdinalRange`, `OutOfMemoryError`, `OverflowError`, `Pair`, `PartialQuickSort`, `PermutedDimsArray`, `Pipe`, `ProcessFailedException`, `Ptr`, `QuoteNode`, `Rational`, `RawFD`, `ReadOnlyMemoryError`, `Real`, `ReentrantLock`, `Ref`, `Regex`, `RegexMatch`, `RoundingMode`, `SegmentationFault`, `Set`, `Signed`, `Some`, `StackOverflowError`, `StepRange`, `StepRangeLen`, `StridedArray`, `StridedMatrix`, `StridedVecOrMat`, `StridedVector`, `String`, `StringIndexError`, `SubArray`, `SubString`, `SubstitutionString`, `Symbol`, `SystemError`, `Task`, `TaskFailedException`, `Text`, `TextDisplay`, `Timer`, `Tuple`, `Type`, `TypeError`, `TypeVar`, `UInt`, `UInt128`, `UInt16`, `UInt32`, `UInt64`, `UInt8`, `UndefInitializer`, `UndefKeywordError`, `UndefRefError`, `UndefVarError`, `Union`, `UnionAll`, `UnitRange`, `Unsigned`, `Val`, `Vararg`, `VecElement`, `VecOrMat`, `Vector`, `VersionNumber`, `WeakKeyDict`, `WeakRef`), KeywordType, nil}, + {Words(``, `\b`, `ARGS`, `C_NULL`, `DEPOT_PATH`, `ENDIAN_BOM`, `ENV`, `Inf`, `Inf16`, `Inf32`, `Inf64`, `InsertionSort`, `LOAD_PATH`, `MergeSort`, `NaN`, `NaN16`, `NaN32`, `NaN64`, `PROGRAM_FILE`, `QuickSort`, `RoundDown`, `RoundFromZero`, `RoundNearest`, `RoundNearestTiesAway`, `RoundNearestTiesUp`, `RoundToZero`, `RoundUp`, `VERSION`, `devnull`, `false`, `im`, `missing`, `nothing`, `pi`, `stderr`, `stdin`, `stdout`, `true`, `undef`, `π`, `ℯ`), NameBuiltin, nil}, + {`(?:[a-zA-Z_¡-􏿿][a-zA-Z_0-9!¡-􏿿]*)`, Name, nil}, + {`(\d+((_\d+)+)?\.(?!\.)(\d+((_\d+)+)?)?|\.\d+((_\d+)+)?)([eEf][+-]?[0-9]+)?`, LiteralNumberFloat, nil}, + {`\d+((_\d+)+)?[eEf][+-]?[0-9]+`, LiteralNumberFloat, nil}, + {`0x[a-fA-F0-9]+((_[a-fA-F0-9]+)+)?(\.([a-fA-F0-9]+((_[a-fA-F0-9]+)+)?)?)?p[+-]?\d+`, LiteralNumberFloat, nil}, + {`0b[01]+((_[01]+)+)?`, LiteralNumberBin, nil}, + {`0o[0-7]+((_[0-7]+)+)?`, LiteralNumberOct, nil}, + {`0x[a-fA-F0-9]+((_[a-fA-F0-9]+)+)?`, LiteralNumberHex, nil}, + {`\d+((_\d+)+)?`, LiteralNumberInteger, nil}, + {Words(``, ``, `.`), Operator, nil}, + }, + "blockcomment": { + {`[^=#]`, CommentMultiline, nil}, + {`#=`, CommentMultiline, Push()}, + {`=#`, CommentMultiline, Pop(1)}, + {`[=#]`, CommentMultiline, nil}, + }, + "curly": { + {`\{`, Punctuation, Push()}, + {`\}`, Punctuation, Pop(1)}, + {`(?:[a-zA-Z_¡-􏿿][a-zA-Z_0-9!¡-􏿿]*)`, KeywordType, nil}, + Include("root"), + }, + "tqrawstring": { + {`"""`, LiteralString, Pop(1)}, + {`([^"]|"[^"][^"])+`, LiteralString, nil}, + }, + "rawstring": { + {`"`, LiteralString, Pop(1)}, + {`\\"`, LiteralStringEscape, nil}, + {`([^"\\]|\\[^"])+`, LiteralString, nil}, + }, + "interp": { + {`\$(?:[a-zA-Z_¡-􏿿][a-zA-Z_0-9!¡-􏿿]*)`, LiteralStringInterpol, nil}, + {`(\$)(\()`, ByGroups(LiteralStringInterpol, Punctuation), Push("in-intp")}, + }, + "in-intp": { + {`\(`, Punctuation, Push()}, + {`\)`, Punctuation, Pop(1)}, + Include("root"), + }, + "string": { + {`(")((?:[a-zA-Z_¡-􏿿][a-zA-Z_0-9!¡-􏿿]*)|\d+)?`, ByGroups(LiteralString, LiteralStringAffix), Pop(1)}, + {`\\([\\"\'$nrbtfav]|(x|u|U)[a-fA-F0-9]+|\d+)`, LiteralStringEscape, nil}, + Include("interp"), + {`%[-#0 +]*([0-9]+|[*])?(\.([0-9]+|[*]))?[hlL]?[E-GXc-giorsux%]`, LiteralStringInterpol, nil}, + {`[^"$%\\]+`, LiteralString, nil}, + {`.`, LiteralString, nil}, + }, + "tqstring": { + {`(""")((?:[a-zA-Z_¡-􏿿][a-zA-Z_0-9!¡-􏿿]*)|\d+)?`, ByGroups(LiteralString, LiteralStringAffix), Pop(1)}, + {`\\([\\"\'$nrbtfav]|(x|u|U)[a-fA-F0-9]+|\d+)`, LiteralStringEscape, nil}, + Include("interp"), + {`[^"$%\\]+`, LiteralString, nil}, + {`.`, LiteralString, nil}, + }, + "regex": { + {`(")([imsxa]*)?`, ByGroups(LiteralStringRegex, LiteralStringAffix), Pop(1)}, + {`\\"`, LiteralStringRegex, nil}, + {`[^\\"]+`, LiteralStringRegex, nil}, + }, + "tqregex": { + {`(""")([imsxa]*)?`, ByGroups(LiteralStringRegex, LiteralStringAffix), Pop(1)}, + {`[^"]+`, LiteralStringRegex, nil}, + }, + "command": { + {"(`)((?:[a-zA-Z_\u00a1-\U0010ffff][a-zA-Z_0-9!\u00a1-\U0010ffff]*)|\\d+)?", ByGroups(LiteralStringBacktick, LiteralStringAffix), Pop(1)}, + {"\\\\[`$]", LiteralStringEscape, nil}, + Include("interp"), + {"[^\\\\`$]+", LiteralStringBacktick, nil}, + {`.`, LiteralStringBacktick, nil}, + }, + "tqcommand": { + {"(```)((?:[a-zA-Z_\u00a1-\U0010ffff][a-zA-Z_0-9!\u00a1-\U0010ffff]*)|\\d+)?", ByGroups(LiteralStringBacktick, LiteralStringAffix), Pop(1)}, + {`\\\$`, LiteralStringEscape, nil}, + Include("interp"), + {"[^\\\\`$]+", LiteralStringBacktick, nil}, + {`.`, LiteralStringBacktick, nil}, + }, + } +} diff --git a/vendor/github.com/alecthomas/chroma/lexers/j/jungle.go b/vendor/github.com/alecthomas/chroma/lexers/j/jungle.go new file mode 100644 index 000000000..5dbda9fba --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/j/jungle.go @@ -0,0 +1,54 @@ +package j + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +var Jungle = internal.Register(MustNewLazyLexer( + &Config{ + Name: "Jungle", + Aliases: []string{"jungle"}, + Filenames: []string{"*.jungle"}, + MimeTypes: []string{"text/x-jungle"}, + }, + jungleRules, +)) + +func jungleRules() Rules { + return Rules{ + "root": { + {`[^\S\n]+`, Text, nil}, + {`\n`, Text, nil}, + {`#(\n|[\w\W]*?[^#]\n)`, CommentSingle, nil}, + {`^(?=\S)`, None, Push("instruction")}, + {`[\.;\[\]\(\)\$]`, Punctuation, nil}, + {`[a-zA-Z_]\w*`, Name, nil}, + }, + "instruction": { + {`[^\S\n]+`, Text, nil}, + {`=`, Operator, Push("value")}, + {`(?=\S)`, None, Push("var")}, + Default(Pop(1)), + }, + "value": { + {`[^\S\n]+`, Text, nil}, + {`\$\(`, Punctuation, Push("var")}, + {`[;\[\]\(\)\$]`, Punctuation, nil}, + {`#(\n|[\w\W]*?[^#]\n)`, CommentSingle, nil}, + {`[\w_\-\.\/\\]+`, Text, nil}, + Default(Pop(1)), + }, + "var": { + {`[^\S\n]+`, Text, nil}, + {`\b(((re)?source|barrel)Path|excludeAnnotations|annotations|lang)\b`, NameBuiltin, nil}, + {`\bbase\b`, NameConstant, nil}, + {`\b(ind|zsm|hrv|ces|dan|dut|eng|fin|fre|deu|gre|hun|ita|nob|po[lr]|rus|sl[ov]|spa|swe|ara|heb|zh[st]|jpn|kor|tha|vie|bul|tur)`, NameConstant, nil}, + {`\b((semi)?round|rectangle)(-\d+x\d+)?\b`, NameConstant, nil}, + {`[\.;\[\]\(\$]`, Punctuation, nil}, + {`\)`, Punctuation, Pop(1)}, + {`[a-zA-Z_]\w*`, Name, nil}, + Default(Pop(1)), + }, + } +} diff --git a/vendor/github.com/alecthomas/chroma/lexers/k/kotlin.go b/vendor/github.com/alecthomas/chroma/lexers/k/kotlin.go new file mode 100644 index 000000000..deb182e9b --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/k/kotlin.go @@ -0,0 +1,98 @@ +package k + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// Kotlin lexer. +var Kotlin = internal.Register(MustNewLazyLexer( + &Config{ + Name: "Kotlin", + Aliases: []string{"kotlin"}, + Filenames: []string{"*.kt"}, + MimeTypes: []string{"text/x-kotlin"}, + DotAll: true, + }, + kotlinRules, +)) + +func kotlinRules() Rules { + const kotlinIdentifier = "(?:[_\\p{L}][\\p{L}\\p{N}]*|`@?[_\\p{L}][\\p{L}\\p{N}]+`)" + + return Rules{ + "root": { + {`^\s*\[.*?\]`, NameAttribute, nil}, + {`[^\S\n]+`, Text, nil}, + {`\\\n`, Text, nil}, + {`//[^\n]*\n?`, CommentSingle, nil}, + {`/[*].*?[*]/`, CommentMultiline, nil}, + {`\n`, Text, nil}, + {`!==|!in|!is|===`, Operator, nil}, + {`%=|&&|\*=|\+\+|\+=|--|-=|->|\.\.|\/=|::|<=|==|>=|!!|!=|\|\||\?[:.]`, Operator, nil}, + {`[~!%^&*()+=|\[\]:;,.<>\/?-]`, Punctuation, nil}, + {`[{}]`, Punctuation, nil}, + {`"""`, LiteralString, Push("rawstring")}, + {`"`, LiteralStringDouble, Push("string")}, + {`(')(\\u[0-9a-fA-F]{4})(')`, ByGroups(LiteralStringChar, LiteralStringEscape, LiteralStringChar), nil}, + {`'\\.'|'[^\\]'`, LiteralStringChar, nil}, + {`0[xX][0-9a-fA-F]+[Uu]?[Ll]?|[0-9]+(\.[0-9]*)?([eE][+-][0-9]+)?[fF]?[Uu]?[Ll]?`, LiteralNumber, nil}, + {`(companion)(\s+)(object)`, ByGroups(Keyword, Text, Keyword), nil}, + {`(class|interface|object)(\s+)`, ByGroups(Keyword, Text), Push("class")}, + {`(package|import)(\s+)`, ByGroups(Keyword, Text), Push("package")}, + {`(val|var)(\s+)`, ByGroups(Keyword, Text), Push("property")}, + {`(fun)(\s+)`, ByGroups(Keyword, Text), Push("function")}, + {`(abstract|actual|annotation|as|as\?|break|by|catch|class|companion|const|constructor|continue|crossinline|data|delegate|do|dynamic|else|enum|expect|external|false|field|file|final|finally|for|fun|get|if|import|in|infix|init|inline|inner|interface|internal|is|it|lateinit|noinline|null|object|open|operator|out|override|package|param|private|property|protected|public|receiver|reified|return|sealed|set|setparam|super|suspend|tailrec|this|throw|true|try|typealias|typeof|val|var|vararg|when|where|while)\b`, Keyword, nil}, + {`@` + kotlinIdentifier, NameDecorator, nil}, + {kotlinIdentifier, Name, nil}, + }, + "package": { + {`\S+`, NameNamespace, Pop(1)}, + }, + "class": { + // \x60 is the back tick character (`) + {`\x60[^\x60]+?\x60`, NameClass, Pop(1)}, + {kotlinIdentifier, NameClass, Pop(1)}, + }, + "property": { + {`\x60[^\x60]+?\x60`, NameProperty, Pop(1)}, + {kotlinIdentifier, NameProperty, Pop(1)}, + }, + "generics-specification": { + {`<`, Punctuation, Push("generics-specification")}, // required for generics inside generics e.g. > + {`>`, Punctuation, Pop(1)}, + {`[,:*?]`, Punctuation, nil}, + {`(in|out|reified)`, Keyword, nil}, + {`\x60[^\x60]+?\x60`, NameClass, nil}, + {kotlinIdentifier, NameClass, nil}, + {`\s+`, Text, nil}, + }, + "function": { + {`<`, Punctuation, Push("generics-specification")}, + {`\x60[^\x60]+?\x60`, NameFunction, Pop(1)}, + {kotlinIdentifier, NameFunction, Pop(1)}, + {`\s+`, Text, nil}, + }, + "rawstring": { + // raw strings don't allow character escaping + {`"""`, LiteralString, Pop(1)}, + {`(?:[^$"]+|\"{1,2}[^"])+`, LiteralString, nil}, + Include("string-interpol"), + // remaining dollar signs are just a string + {`\$`, LiteralString, nil}, + }, + "string": { + {`\\[tbnr'"\\\$]`, LiteralStringEscape, nil}, + {`\\u[0-9a-fA-F]{4}`, LiteralStringEscape, nil}, + {`"`, LiteralStringDouble, Pop(1)}, + Include("string-interpol"), + {`[^\n\\"$]+`, LiteralStringDouble, nil}, + // remaining dollar signs are just a string + {`\$`, LiteralStringDouble, nil}, + }, + "string-interpol": { + {`\$` + kotlinIdentifier, LiteralStringInterpol, nil}, + {`\${[^}\n]*}`, LiteralStringInterpol, nil}, + }, + } +} diff --git a/vendor/github.com/alecthomas/chroma/lexers/l/lighttpd.go b/vendor/github.com/alecthomas/chroma/lexers/l/lighttpd.go new file mode 100644 index 000000000..6d83298b6 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/l/lighttpd.go @@ -0,0 +1,34 @@ +package l + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// Lighttpd Configuration File lexer. +var Lighttpd = internal.Register(MustNewLazyLexer( + &Config{ + Name: "Lighttpd configuration file", + Aliases: []string{"lighty", "lighttpd"}, + Filenames: []string{}, + MimeTypes: []string{"text/x-lighttpd-conf"}, + }, + lighttpdRules, +)) + +func lighttpdRules() Rules { + return Rules{ + "root": { + {`#.*\n`, CommentSingle, nil}, + {`/\S*`, Name, nil}, + {`[a-zA-Z._-]+`, Keyword, nil}, + {`\d+\.\d+\.\d+\.\d+(?:/\d+)?`, LiteralNumber, nil}, + {`[0-9]+`, LiteralNumber, nil}, + {`=>|=~|\+=|==|=|\+`, Operator, nil}, + {`\$[A-Z]+`, NameBuiltin, nil}, + {`[(){}\[\],]`, Punctuation, nil}, + {`"([^"\\]*(?:\\.[^"\\]*)*)"`, LiteralStringDouble, nil}, + {`\s+`, Text, nil}, + }, + } +} diff --git a/vendor/github.com/alecthomas/chroma/lexers/l/llvm.go b/vendor/github.com/alecthomas/chroma/lexers/l/llvm.go new file mode 100644 index 000000000..e9ea319e3 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/l/llvm.go @@ -0,0 +1,47 @@ +package l + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// Llvm lexer. +var Llvm = internal.Register(MustNewLazyLexer( + &Config{ + Name: "LLVM", + Aliases: []string{"llvm"}, + Filenames: []string{"*.ll"}, + MimeTypes: []string{"text/x-llvm"}, + }, + llvmRules, +)) + +func llvmRules() Rules { + return Rules{ + "root": { + Include("whitespace"), + {`([-a-zA-Z$._][\w\-$.]*|"[^"]*?")\s*:`, NameLabel, nil}, + Include("keyword"), + {`%([-a-zA-Z$._][\w\-$.]*|"[^"]*?")`, NameVariable, nil}, + {`@([-a-zA-Z$._][\w\-$.]*|"[^"]*?")`, NameVariableGlobal, nil}, + {`%\d+`, NameVariableAnonymous, nil}, + {`@\d+`, NameVariableGlobal, nil}, + {`#\d+`, NameVariableGlobal, nil}, + {`!([-a-zA-Z$._][\w\-$.]*|"[^"]*?")`, NameVariable, nil}, + {`!\d+`, NameVariableAnonymous, nil}, + {`c?"[^"]*?"`, LiteralString, nil}, + {`0[xX][a-fA-F0-9]+`, LiteralNumber, nil}, + {`-?\d+(?:[.]\d+)?(?:[eE][-+]?\d+(?:[.]\d+)?)?`, LiteralNumber, nil}, + {`[=<>{}\[\]()*.,!]|x\b`, Punctuation, nil}, + }, + "whitespace": { + {`(\n|\s)+`, Text, nil}, + {`;.*?\n`, Comment, nil}, + }, + "keyword": { + {Words(``, `\b`, `begin`, `end`, `true`, `false`, `declare`, `define`, `global`, `constant`, `private`, `linker_private`, `internal`, `available_externally`, `linkonce`, `linkonce_odr`, `weak`, `weak_odr`, `appending`, `dllimport`, `dllexport`, `common`, `default`, `hidden`, `protected`, `extern_weak`, `external`, `thread_local`, `zeroinitializer`, `undef`, `null`, `to`, `tail`, `target`, `triple`, `datalayout`, `volatile`, `nuw`, `nsw`, `nnan`, `ninf`, `nsz`, `arcp`, `fast`, `exact`, `inbounds`, `align`, `addrspace`, `section`, `alias`, `module`, `asm`, `sideeffect`, `gc`, `dbg`, `linker_private_weak`, `attributes`, `blockaddress`, `initialexec`, `localdynamic`, `localexec`, `prefix`, `unnamed_addr`, `ccc`, `fastcc`, `coldcc`, `x86_stdcallcc`, `x86_fastcallcc`, `arm_apcscc`, `arm_aapcscc`, `arm_aapcs_vfpcc`, `ptx_device`, `ptx_kernel`, `intel_ocl_bicc`, `msp430_intrcc`, `spir_func`, `spir_kernel`, `x86_64_sysvcc`, `x86_64_win64cc`, `x86_thiscallcc`, `cc`, `c`, `signext`, `zeroext`, `inreg`, `sret`, `nounwind`, `noreturn`, `noalias`, `nocapture`, `byval`, `nest`, `readnone`, `readonly`, `inlinehint`, `noinline`, `alwaysinline`, `optsize`, `ssp`, `sspreq`, `noredzone`, `noimplicitfloat`, `naked`, `builtin`, `cold`, `nobuiltin`, `noduplicate`, `nonlazybind`, `optnone`, `returns_twice`, `sanitize_address`, `sanitize_memory`, `sanitize_thread`, `sspstrong`, `uwtable`, `returned`, `type`, `opaque`, `eq`, `ne`, `slt`, `sgt`, `sle`, `sge`, `ult`, `ugt`, `ule`, `uge`, `oeq`, `one`, `olt`, `ogt`, `ole`, `oge`, `ord`, `uno`, `ueq`, `une`, `x`, `acq_rel`, `acquire`, `alignstack`, `atomic`, `catch`, `cleanup`, `filter`, `inteldialect`, `max`, `min`, `monotonic`, `nand`, `personality`, `release`, `seq_cst`, `singlethread`, `umax`, `umin`, `unordered`, `xchg`, `add`, `fadd`, `sub`, `fsub`, `mul`, `fmul`, `udiv`, `sdiv`, `fdiv`, `urem`, `srem`, `frem`, `shl`, `lshr`, `ashr`, `and`, `or`, `xor`, `icmp`, `fcmp`, `phi`, `call`, `trunc`, `zext`, `sext`, `fptrunc`, `fpext`, `uitofp`, `sitofp`, `fptoui`, `fptosi`, `inttoptr`, `ptrtoint`, `bitcast`, `addrspacecast`, `select`, `va_arg`, `ret`, `br`, `switch`, `invoke`, `unwind`, `unreachable`, `indirectbr`, `landingpad`, `resume`, `malloc`, `alloca`, `free`, `load`, `store`, `getelementptr`, `extractelement`, `insertelement`, `shufflevector`, `getresult`, `extractvalue`, `insertvalue`, `atomicrmw`, `cmpxchg`, `fence`, `allocsize`, `amdgpu_cs`, `amdgpu_gs`, `amdgpu_kernel`, `amdgpu_ps`, `amdgpu_vs`, `any`, `anyregcc`, `argmemonly`, `avr_intrcc`, `avr_signalcc`, `caller`, `catchpad`, `catchret`, `catchswitch`, `cleanuppad`, `cleanupret`, `comdat`, `convergent`, `cxx_fast_tlscc`, `deplibs`, `dereferenceable`, `dereferenceable_or_null`, `distinct`, `exactmatch`, `externally_initialized`, `from`, `ghccc`, `hhvm_ccc`, `hhvmcc`, `ifunc`, `inaccessiblemem_or_argmemonly`, `inaccessiblememonly`, `inalloca`, `jumptable`, `largest`, `local_unnamed_addr`, `minsize`, `musttail`, `noduplicates`, `none`, `nonnull`, `norecurse`, `notail`, `preserve_allcc`, `preserve_mostcc`, `prologue`, `safestack`, `samesize`, `source_filename`, `swiftcc`, `swifterror`, `swiftself`, `webkit_jscc`, `within`, `writeonly`, `x86_intrcc`, `x86_vectorcallcc`), Keyword, nil}, + {Words(``, ``, `void`, `half`, `float`, `double`, `x86_fp80`, `fp128`, `ppc_fp128`, `label`, `metadata`, `token`), KeywordType, nil}, + {`i[1-9]\d*`, Keyword, nil}, + }, + } +} diff --git a/vendor/github.com/alecthomas/chroma/lexers/l/lua.go b/vendor/github.com/alecthomas/chroma/lexers/l/lua.go new file mode 100644 index 000000000..db574a1b9 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/l/lua.go @@ -0,0 +1,79 @@ +package l + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// Lua lexer. +var Lua = internal.Register(MustNewLazyLexer( + &Config{ + Name: "Lua", + Aliases: []string{"lua"}, + Filenames: []string{"*.lua", "*.wlua"}, + MimeTypes: []string{"text/x-lua", "application/x-lua"}, + }, + luaRules, +)) + +func luaRules() Rules { + return Rules{ + "root": { + {`#!.*`, CommentPreproc, nil}, + Default(Push("base")), + }, + "ws": { + {`(?:--\[(=*)\[[\w\W]*?\](\1)\])`, CommentMultiline, nil}, + {`(?:--.*$)`, CommentSingle, nil}, + {`(?:\s+)`, Text, nil}, + }, + "base": { + Include("ws"), + {`(?i)0x[\da-f]*(\.[\da-f]*)?(p[+-]?\d+)?`, LiteralNumberHex, nil}, + {`(?i)(\d*\.\d+|\d+\.\d*)(e[+-]?\d+)?`, LiteralNumberFloat, nil}, + {`(?i)\d+e[+-]?\d+`, LiteralNumberFloat, nil}, + {`\d+`, LiteralNumberInteger, nil}, + {`(?s)\[(=*)\[.*?\]\1\]`, LiteralString, nil}, + {`::`, Punctuation, Push("label")}, + {`\.{3}`, Punctuation, nil}, + {`[=<>|~&+\-*/%#^]+|\.\.`, Operator, nil}, + {`[\[\]{}().,:;]`, Punctuation, nil}, + {`(and|or|not)\b`, OperatorWord, nil}, + {`(break|do|else|elseif|end|for|if|in|repeat|return|then|until|while)\b`, KeywordReserved, nil}, + {`goto\b`, KeywordReserved, Push("goto")}, + {`(local)\b`, KeywordDeclaration, nil}, + {`(true|false|nil)\b`, KeywordConstant, nil}, + {`(function)\b`, KeywordReserved, Push("funcname")}, + {`[A-Za-z_]\w*(\.[A-Za-z_]\w*)?`, Name, nil}, + {`'`, LiteralStringSingle, Combined("stringescape", "sqs")}, + {`"`, LiteralStringDouble, Combined("stringescape", "dqs")}, + }, + "funcname": { + Include("ws"), + {`[.:]`, Punctuation, nil}, + {`(?:[^\W\d]\w*)(?=(?:(?:--\[(=*)\[[\w\W]*?\](\2)\])|(?:--.*$)|(?:\s+))*[.:])`, NameClass, nil}, + {`(?:[^\W\d]\w*)`, NameFunction, Pop(1)}, + {`\(`, Punctuation, Pop(1)}, + }, + "goto": { + Include("ws"), + {`(?:[^\W\d]\w*)`, NameLabel, Pop(1)}, + }, + "label": { + Include("ws"), + {`::`, Punctuation, Pop(1)}, + {`(?:[^\W\d]\w*)`, NameLabel, nil}, + }, + "stringescape": { + {`\\([abfnrtv\\"\']|[\r\n]{1,2}|z\s*|x[0-9a-fA-F]{2}|\d{1,3}|u\{[0-9a-fA-F]+\})`, LiteralStringEscape, nil}, + }, + "sqs": { + {`'`, LiteralStringSingle, Pop(1)}, + {`[^\\']+`, LiteralStringSingle, nil}, + }, + "dqs": { + {`"`, LiteralStringDouble, Pop(1)}, + {`[^\\"]+`, LiteralStringDouble, nil}, + }, + } +} diff --git a/vendor/github.com/alecthomas/chroma/lexers/lexers.go b/vendor/github.com/alecthomas/chroma/lexers/lexers.go new file mode 100644 index 000000000..2b429212e --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/lexers.go @@ -0,0 +1,60 @@ +// Package lexers contains the registry of all lexers. +// +// Sub-packages contain lexer implementations. +package lexers + +// nolint +import ( + "github.com/alecthomas/chroma" + _ "github.com/alecthomas/chroma/lexers/a" + _ "github.com/alecthomas/chroma/lexers/b" + _ "github.com/alecthomas/chroma/lexers/c" + _ "github.com/alecthomas/chroma/lexers/circular" + _ "github.com/alecthomas/chroma/lexers/d" + _ "github.com/alecthomas/chroma/lexers/e" + _ "github.com/alecthomas/chroma/lexers/f" + _ "github.com/alecthomas/chroma/lexers/g" + _ "github.com/alecthomas/chroma/lexers/h" + _ "github.com/alecthomas/chroma/lexers/i" + "github.com/alecthomas/chroma/lexers/internal" + _ "github.com/alecthomas/chroma/lexers/j" + _ "github.com/alecthomas/chroma/lexers/k" + _ "github.com/alecthomas/chroma/lexers/l" + _ "github.com/alecthomas/chroma/lexers/m" + _ "github.com/alecthomas/chroma/lexers/n" + _ "github.com/alecthomas/chroma/lexers/o" + _ "github.com/alecthomas/chroma/lexers/p" + _ "github.com/alecthomas/chroma/lexers/q" + _ "github.com/alecthomas/chroma/lexers/r" + _ "github.com/alecthomas/chroma/lexers/s" + _ "github.com/alecthomas/chroma/lexers/t" + _ "github.com/alecthomas/chroma/lexers/v" + _ "github.com/alecthomas/chroma/lexers/w" + _ "github.com/alecthomas/chroma/lexers/x" + _ "github.com/alecthomas/chroma/lexers/y" + _ "github.com/alecthomas/chroma/lexers/z" +) + +// Registry of Lexers. +var Registry = internal.Registry + +// Names of all lexers, optionally including aliases. +func Names(withAliases bool) []string { return internal.Names(withAliases) } + +// Get a Lexer by name, alias or file extension. +func Get(name string) chroma.Lexer { return internal.Get(name) } + +// MatchMimeType attempts to find a lexer for the given MIME type. +func MatchMimeType(mimeType string) chroma.Lexer { return internal.MatchMimeType(mimeType) } + +// Match returns the first lexer matching filename. +func Match(filename string) chroma.Lexer { return internal.Match(filename) } + +// Analyse text content and return the "best" lexer.. +func Analyse(text string) chroma.Lexer { return internal.Analyse(text) } + +// Register a Lexer with the global registry. +func Register(lexer chroma.Lexer) chroma.Lexer { return internal.Register(lexer) } + +// Fallback lexer if no other is found. +var Fallback = internal.Fallback diff --git a/vendor/github.com/alecthomas/chroma/lexers/m/make.go b/vendor/github.com/alecthomas/chroma/lexers/m/make.go new file mode 100644 index 000000000..905491a64 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/m/make.go @@ -0,0 +1,58 @@ +package m + +import ( + . "github.com/alecthomas/chroma" // nolint + . "github.com/alecthomas/chroma/lexers/b" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// Makefile lexer. +var Makefile = internal.Register(MustNewLazyLexer( + &Config{ + Name: "Base Makefile", + Aliases: []string{"make", "makefile", "mf", "bsdmake"}, + Filenames: []string{"*.mak", "*.mk", "Makefile", "makefile", "Makefile.*", "GNUmakefile"}, + MimeTypes: []string{"text/x-makefile"}, + EnsureNL: true, + }, + makefileRules, +)) + +func makefileRules() Rules { + return Rules{ + "root": { + {`^(?:[\t ]+.*\n|\n)+`, Using(Bash), nil}, + {`\$[<@$+%?|*]`, Keyword, nil}, + {`\s+`, Text, nil}, + {`#.*?\n`, Comment, nil}, + {`(export)(\s+)(?=[\w${}\t -]+\n)`, ByGroups(Keyword, Text), Push("export")}, + {`export\s+`, Keyword, nil}, + {`([\w${}().-]+)(\s*)([!?:+]?=)([ \t]*)((?:.*\\\n)+|.*\n)`, ByGroups(NameVariable, Text, Operator, Text, Using(Bash)), nil}, + {`(?s)"(\\\\|\\.|[^"\\])*"`, LiteralStringDouble, nil}, + {`(?s)'(\\\\|\\.|[^'\\])*'`, LiteralStringSingle, nil}, + {`([^\n:]+)(:+)([ \t]*)`, ByGroups(NameFunction, Operator, Text), Push("block-header")}, + {`\$\(`, Keyword, Push("expansion")}, + }, + "expansion": { + {`[^$a-zA-Z_()]+`, Text, nil}, + {`[a-zA-Z_]+`, NameVariable, nil}, + {`\$`, Keyword, nil}, + {`\(`, Keyword, Push()}, + {`\)`, Keyword, Pop(1)}, + }, + "export": { + {`[\w${}-]+`, NameVariable, nil}, + {`\n`, Text, Pop(1)}, + {`\s+`, Text, nil}, + }, + "block-header": { + {`[,|]`, Punctuation, nil}, + {`#.*?\n`, Comment, Pop(1)}, + {`\\\n`, Text, nil}, + {`\$\(`, Keyword, Push("expansion")}, + {`[a-zA-Z_]+`, Name, nil}, + {`\n`, Text, Pop(1)}, + {`.`, Text, nil}, + }, + } +} diff --git a/vendor/github.com/alecthomas/chroma/lexers/m/mako.go b/vendor/github.com/alecthomas/chroma/lexers/m/mako.go new file mode 100644 index 000000000..6f777dd58 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/m/mako.go @@ -0,0 +1,64 @@ +package m + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" + . "github.com/alecthomas/chroma/lexers/p" // nolint +) + +// Mako lexer. +var Mako = internal.Register(MustNewLazyLexer( + &Config{ + Name: "Mako", + Aliases: []string{"mako"}, + Filenames: []string{"*.mao"}, + MimeTypes: []string{"application/x-mako"}, + }, + makoRules, +)) + +func makoRules() Rules { + return Rules{ + "root": { + {`(\s*)(%)(\s*end(?:\w+))(\n|\Z)`, ByGroups(Text, CommentPreproc, Keyword, Other), nil}, + {`(\s*)(%)([^\n]*)(\n|\Z)`, ByGroups(Text, CommentPreproc, Using(Python), Other), nil}, + {`(\s*)(##[^\n]*)(\n|\Z)`, ByGroups(Text, CommentPreproc, Other), nil}, + {`(?s)<%doc>.*?`, CommentPreproc, nil}, + {`(<%)([\w.:]+)`, ByGroups(CommentPreproc, NameBuiltin), Push("tag")}, + {`()`, ByGroups(CommentPreproc, NameBuiltin, CommentPreproc), nil}, + {`<%(?=([\w.:]+))`, CommentPreproc, Push("ondeftags")}, + {`(<%(?:!?))(.*?)(%>)(?s)`, ByGroups(CommentPreproc, Using(Python), CommentPreproc), nil}, + {`(\$\{)(.*?)(\})`, ByGroups(CommentPreproc, Using(Python), CommentPreproc), nil}, + {`(?sx) + (.+?) # anything, followed by: + (?: + (?<=\n)(?=%|\#\#) | # an eval or comment line + (?=\#\*) | # multiline comment + (?=`, CommentPreproc, Pop(1)}, + {`\s+`, Text, nil}, + }, + "attr": { + {`".*?"`, LiteralString, Pop(1)}, + {`'.*?'`, LiteralString, Pop(1)}, + {`[^\s>]+`, LiteralString, Pop(1)}, + }, + } +} diff --git a/vendor/github.com/alecthomas/chroma/lexers/m/markdown.go b/vendor/github.com/alecthomas/chroma/lexers/m/markdown.go new file mode 100644 index 000000000..e50e4702e --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/m/markdown.go @@ -0,0 +1,53 @@ +package m + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/h" + "github.com/alecthomas/chroma/lexers/internal" +) + +// Markdown lexer. +var Markdown = internal.Register(DelegatingLexer(h.HTML, MustNewLazyLexer( + &Config{ + Name: "markdown", + Aliases: []string{"md", "mkd"}, + Filenames: []string{"*.md", "*.mkd", "*.markdown"}, + MimeTypes: []string{"text/x-markdown"}, + }, + markdownRules, +))) + +func markdownRules() Rules { + return Rules{ + "root": { + {`^(#[^#].+\n)`, ByGroups(GenericHeading), nil}, + {`^(#{2,6}.+\n)`, ByGroups(GenericSubheading), nil}, + {`^(\s*)([*-] )(\[[ xX]\])( .+\n)`, ByGroups(Text, Keyword, Keyword, UsingSelf("inline")), nil}, + {`^(\s*)([*-])(\s)(.+\n)`, ByGroups(Text, Keyword, Text, UsingSelf("inline")), nil}, + {`^(\s*)([0-9]+\.)( .+\n)`, ByGroups(Text, Keyword, UsingSelf("inline")), nil}, + {`^(\s*>\s)(.+\n)`, ByGroups(Keyword, GenericEmph), nil}, + {"^(```\\n)([\\w\\W]*?)(^```$)", ByGroups(String, Text, String), nil}, + { + "^(```)(\\w+)(\\n)([\\w\\W]*?)(^```$)", + UsingByGroup( + internal.Get, + 2, 4, + String, String, String, Text, String, + ), + nil, + }, + Include("inline"), + }, + "inline": { + {`\\.`, Text, nil}, + {`(\s)(\*|_)((?:(?!\2).)*)(\2)((?=\W|\n))`, ByGroups(Text, GenericEmph, GenericEmph, GenericEmph, Text), nil}, + {`(\s)((\*\*|__).*?)\3((?=\W|\n))`, ByGroups(Text, GenericStrong, GenericStrong, Text), nil}, + {`(\s)(~~[^~]+~~)((?=\W|\n))`, ByGroups(Text, GenericDeleted, Text), nil}, + {"`[^`]+`", LiteralStringBacktick, nil}, + {`[@#][\w/:]+`, NameEntity, nil}, + {`(!?\[)([^]]+)(\])(\()([^)]+)(\))`, ByGroups(Text, NameTag, Text, Text, NameAttribute, Text), nil}, + {`[^\\\s]+`, Other, nil}, + {`.|\n`, Other, nil}, + }, + } +} diff --git a/vendor/github.com/alecthomas/chroma/lexers/m/mason.go b/vendor/github.com/alecthomas/chroma/lexers/m/mason.go new file mode 100644 index 000000000..bc48f5caa --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/m/mason.go @@ -0,0 +1,47 @@ +package m + +import ( + . "github.com/alecthomas/chroma" // nolint + . "github.com/alecthomas/chroma/lexers/h" // nolint + "github.com/alecthomas/chroma/lexers/internal" + . "github.com/alecthomas/chroma/lexers/p" // nolint +) + +// Mason lexer. +var Mason = internal.Register(MustNewLazyLexer( + &Config{ + Name: "Mason", + Aliases: []string{"mason"}, + Filenames: []string{"*.m", "*.mhtml", "*.mc", "*.mi", "autohandler", "dhandler"}, + MimeTypes: []string{"application/x-mason"}, + Priority: 0.1, + }, + masonRules, +)) + +func masonRules() Rules { + return Rules{ + "root": { + {`\s+`, Text, nil}, + {`(<%doc>)(.*?)()(?s)`, ByGroups(NameTag, CommentMultiline, NameTag), nil}, + {`(<%(?:def|method))(\s*)(.*?)(>)(.*?)()(?s)`, ByGroups(NameTag, Text, NameFunction, NameTag, UsingSelf("root"), NameTag), nil}, + {`(<%\w+)(.*?)(>)(.*?)()(?s)`, ByGroups(NameTag, NameFunction, NameTag, Using(Perl), NameTag), nil}, + {`(<&[^|])(.*?)(,.*?)?(&>)(?s)`, ByGroups(NameTag, NameFunction, Using(Perl), NameTag), nil}, + {`(<&\|)(.*?)(,.*?)?(&>)(?s)`, ByGroups(NameTag, NameFunction, Using(Perl), NameTag), nil}, + {``, NameTag, nil}, + {`(<%!?)(.*?)(%>)(?s)`, ByGroups(NameTag, Using(Perl), NameTag), nil}, + {`(?<=^)#[^\n]*(\n|\Z)`, Comment, nil}, + {`(?<=^)(%)([^\n]*)(\n|\Z)`, ByGroups(NameTag, Using(Perl), Other), nil}, + {`(?sx) + (.+?) # anything, followed by: + (?: + (?<=\n)(?=[%#]) | # an eval or comment line + (?=`, `:>`, `/.`, `+`, `-`, `*`, `/`, `^`, `&&`, `||`, `!`, `<>`, `|`, `/;`, `?`, `@`, `//`, `/@`, `@@`, `@@@`, `~~`, `===`, `&`, `<`, `>`, `<=`, `>=`), Operator, nil}, + {Words(``, ``, `,`, `;`, `(`, `)`, `[`, `]`, `{`, `}`), Punctuation, nil}, + {`".*?"`, LiteralString, nil}, + {`\s+`, TextWhitespace, nil}, + }, + } +} diff --git a/vendor/github.com/alecthomas/chroma/lexers/m/matlab.go b/vendor/github.com/alecthomas/chroma/lexers/m/matlab.go new file mode 100644 index 000000000..4e98d6977 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/m/matlab.go @@ -0,0 +1,55 @@ +package m + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// Matlab lexer. +var Matlab = internal.Register(MustNewLazyLexer( + &Config{ + Name: "Matlab", + Aliases: []string{"matlab"}, + Filenames: []string{"*.m"}, + MimeTypes: []string{"text/matlab"}, + }, + matlabRules, +)) + +func matlabRules() Rules { + return Rules{ + "root": { + {`\n`, Text, nil}, + {`^!.*`, LiteralStringOther, nil}, + {`%\{\s*\n`, CommentMultiline, Push("blockcomment")}, + {`%.*$`, Comment, nil}, + {`^\s*function`, Keyword, Push("deffunc")}, + {Words(``, `\b`, `break`, `case`, `catch`, `classdef`, `continue`, `else`, `elseif`, `end`, `enumerated`, `events`, `for`, `function`, `global`, `if`, `methods`, `otherwise`, `parfor`, `persistent`, `properties`, `return`, `spmd`, `switch`, `try`, `while`), Keyword, nil}, + {`(sin|sind|sinh|asin|asind|asinh|cos|cosd|cosh|acos|acosd|acosh|tan|tand|tanh|atan|atand|atan2|atanh|sec|secd|sech|asec|asecd|asech|csc|cscd|csch|acsc|acscd|acsch|cot|cotd|coth|acot|acotd|acoth|hypot|exp|expm1|log|log1p|log10|log2|pow2|realpow|reallog|realsqrt|sqrt|nthroot|nextpow2|abs|angle|complex|conj|imag|real|unwrap|isreal|cplxpair|fix|floor|ceil|round|mod|rem|sign|airy|besselj|bessely|besselh|besseli|besselk|beta|betainc|betaln|ellipj|ellipke|erf|erfc|erfcx|erfinv|expint|gamma|gammainc|gammaln|psi|legendre|cross|dot|factor|isprime|primes|gcd|lcm|rat|rats|perms|nchoosek|factorial|cart2sph|cart2pol|pol2cart|sph2cart|hsv2rgb|rgb2hsv|zeros|ones|eye|repmat|rand|randn|linspace|logspace|freqspace|meshgrid|accumarray|size|length|ndims|numel|disp|isempty|isequal|isequalwithequalnans|cat|reshape|diag|blkdiag|tril|triu|fliplr|flipud|flipdim|rot90|find|end|sub2ind|ind2sub|bsxfun|ndgrid|permute|ipermute|shiftdim|circshift|squeeze|isscalar|isvector|ans|eps|realmax|realmin|pi|i|inf|nan|isnan|isinf|isfinite|j|why|compan|gallery|hadamard|hankel|hilb|invhilb|magic|pascal|rosser|toeplitz|vander|wilkinson)\b`, NameBuiltin, nil}, + {`\.\.\..*$`, Comment, nil}, + {`-|==|~=|<|>|<=|>=|&&|&|~|\|\|?`, Operator, nil}, + {`\.\*|\*|\+|\.\^|\.\\|\.\/|\/|\\`, Operator, nil}, + {`\[|\]|\(|\)|\{|\}|:|@|\.|,`, Punctuation, nil}, + {`=|:|;`, Punctuation, nil}, + {`(?<=[\w)\].])\'+`, Operator, nil}, + {`(\d+\.\d*|\d*\.\d+)([eEf][+-]?[0-9]+)?`, LiteralNumberFloat, nil}, + {`\d+[eEf][+-]?[0-9]+`, LiteralNumberFloat, nil}, + {`\d+`, LiteralNumberInteger, nil}, + {`(?=]|<=|>=)`, Punctuation, nil}, + Include("simplevalue"), + {`\s+`, TextWhitespace, nil}, + }, + } + }, +)) diff --git a/vendor/github.com/alecthomas/chroma/lexers/m/meson.go b/vendor/github.com/alecthomas/chroma/lexers/m/meson.go new file mode 100644 index 000000000..2a6a22f89 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/m/meson.go @@ -0,0 +1,51 @@ +package m + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// Meson lexer. +var Meson = internal.Register(MustNewLazyLexer( + &Config{ + Name: "Meson", + Aliases: []string{"meson", "meson.build"}, + Filenames: []string{"meson.build", "meson_options.txt"}, + MimeTypes: []string{"text/x-meson"}, + }, + func() Rules { + return Rules{ + "root": { + {`#.*?$`, Comment, nil}, + {`'''.*'''`, LiteralStringSingle, nil}, + {`[1-9][0-9]*`, LiteralNumberInteger, nil}, + {`0o[0-7]+`, LiteralNumberOct, nil}, + {`0x[a-fA-F0-9]+`, LiteralNumberHex, nil}, + Include("string"), + Include("keywords"), + Include("expr"), + {`[a-zA-Z_][a-zA-Z_0-9]*`, Name, nil}, + {`\s+`, TextWhitespace, nil}, + }, + "string": { + {`[']{3}([']{0,2}([^\\']|\\(.|\n)))*[']{3}`, LiteralString, nil}, + {`'.*?(?/-]`, Operator, nil}, + {`[()\[\],.]`, Punctuation, nil}, + {Words(``, `\b`, `break`, `case`, `const`, `continue`, `do`, `else`, `enum`, `extern`, `for`, `if`, `return`, `sizeof`, `static`, `struct`, `switch`, `typedef`, `union`, `while`), Keyword, nil}, + {`(bool|float|half|long|ptrdiff_t|size_t|unsigned|u?char|u?int((8|16|32|64)_t)?|u?short)\b`, KeywordType, nil}, + {`(bool|float|half|u?(char|int|long|short))(2|3|4)\b`, KeywordType, nil}, + {`packed_(float|half|long|u?(char|int|short))(2|3|4)\b`, KeywordType, nil}, + {`(float|half)(2|3|4)x(2|3|4)\b`, KeywordType, nil}, + {`atomic_u?int\b`, KeywordType, nil}, + {`(rg?(8|16)(u|s)norm|rgba(8|16)(u|s)norm|srgba8unorm|rgb10a2|rg11b10f|rgb9e5)\b`, KeywordType, nil}, + {`(array|depth(2d|cube)(_array)?|depth2d_ms(_array)?|sampler|texture_buffer|texture(1|2)d(_array)?|texture2d_ms(_array)?|texture3d|texturecube(_array)?|uniform|visible_function_table)\b`, KeywordType, nil}, + {`(true|false|NULL)\b`, NameBuiltin, nil}, + {Words(``, `\b`, `device`, `constant`, `ray_data`, `thread`, `threadgroup`, `threadgroup_imageblock`), Keyword, nil}, + {`([a-zA-Z_]\w*)(\s*)(:)(?!:)`, ByGroups(NameLabel, Text, Punctuation), nil}, + {`[a-zA-Z_]\w*`, Name, nil}, + }, + "root": { + Include("whitespace"), + {`(fragment|kernel|vertex)?((?:[\w*\s])+?(?:\s|[*]))([a-zA-Z_]\w*)(\s*\([^;]*?\))([^;{]*)(\{)`, ByGroups(Keyword, UsingSelf("root"), NameFunction, UsingSelf("root"), UsingSelf("root"), Punctuation), Push("function")}, + {`(fragment|kernel|vertex)?((?:[\w*\s])+?(?:\s|[*]))([a-zA-Z_]\w*)(\s*\([^;]*?\))([^;]*)(;)`, ByGroups(Keyword, UsingSelf("root"), NameFunction, UsingSelf("root"), UsingSelf("root"), Punctuation), nil}, + Default(Push("statement")), + }, + "classname": { + {`(\[\[.+\]\])(\s*)`, ByGroups(NameAttribute, Text), nil}, + {`[a-zA-Z_]\w*`, NameClass, Pop(1)}, + {`\s*(?=[>{])`, Text, Pop(1)}, + }, + "whitespace": { + {`^#if\s+0`, CommentPreproc, Push("if0")}, + {`^#`, CommentPreproc, Push("macro")}, + {`^(\s*(?:/[*].*?[*]/\s*)?)(#if\s+0)`, ByGroups(UsingSelf("root"), CommentPreproc), Push("if0")}, + {`^(\s*(?:/[*].*?[*]/\s*)?)(#)`, ByGroups(UsingSelf("root"), CommentPreproc), Push("macro")}, + {`\n`, Text, nil}, + {`\s+`, Text, nil}, + {`\\\n`, Text, nil}, + {`//(\n|[\w\W]*?[^\\]\n)`, CommentSingle, nil}, + {`/(\\\n)?[*][\w\W]*?[*](\\\n)?/`, CommentMultiline, nil}, + {`/(\\\n)?[*][\w\W]*`, CommentMultiline, nil}, + }, + "statement": { + Include("whitespace"), + Include("statements"), + {`[{]`, Punctuation, Push("root")}, + {`[;}]`, Punctuation, Pop(1)}, + }, + "function": { + Include("whitespace"), + Include("statements"), + {`;`, Punctuation, nil}, + {`\{`, Punctuation, Push()}, + {`\}`, Punctuation, Pop(1)}, + }, + "macro": { + {`(include)(\s*(?:/[*].*?[*]/\s*)?)([^\n]+)`, ByGroups(CommentPreproc, Text, CommentPreprocFile), nil}, + {`[^/\n]+`, CommentPreproc, nil}, + {`/[*](.|\n)*?[*]/`, CommentMultiline, nil}, + {`//.*?\n`, CommentSingle, Pop(1)}, + {`/`, CommentPreproc, nil}, + {`(?<=\\)\n`, CommentPreproc, nil}, + {`\n`, CommentPreproc, Pop(1)}, + }, + "if0": { + {`^\s*#if.*?(?|->|<-|\\/|xor|/\\)`, Operator, nil}, + {`(<|>|<=|>=|==|=|!=)`, Operator, nil}, + {`(\+|-|\*|/|div|mod)`, Operator, nil}, + {Words(`\b`, `\b`, `in`, `subset`, `superset`, `union`, `diff`, `symdiff`, `intersect`), Operator, nil}, + {`(\\|\.\.|\+\+)`, Operator, nil}, + {`[|()\[\]{},:;]`, Punctuation, nil}, + {`(true|false)\b`, KeywordConstant, nil}, + {`([+-]?)\d+(\.(?!\.)\d*)?([eE][-+]?\d+)?`, LiteralNumber, nil}, + {`::\s*([^\W\d]\w*)(\s*\([^\)]*\))?`, NameDecorator, nil}, + {`\b([^\W\d]\w*)\b(\()`, ByGroups(NameFunction, Punctuation), nil}, + {`[^\W\d]\w*`, NameOther, nil}, + }, + } +} diff --git a/vendor/github.com/alecthomas/chroma/lexers/m/mlir.go b/vendor/github.com/alecthomas/chroma/lexers/m/mlir.go new file mode 100644 index 000000000..112a1c3a6 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/m/mlir.go @@ -0,0 +1,47 @@ +package m + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// MLIR lexer. +var Mlir = internal.Register(MustNewLazyLexer( + &Config{ + Name: "MLIR", + Aliases: []string{"mlir"}, + Filenames: []string{"*.mlir"}, + MimeTypes: []string{"text/x-mlir"}, + }, + mlirRules, +)) + +func mlirRules() Rules { + return Rules{ + "root": { + Include("whitespace"), + {`c?"[^"]*?"`, LiteralString, nil}, + {`\^([-a-zA-Z$._][\w\-$.0-9]*)\s*`, NameLabel, nil}, + {`([\w\d_$.]+)\s*=`, NameLabel, nil}, + Include("keyword"), + {`->`, Punctuation, nil}, + {`@([\w_][\w\d_$.]*)`, NameFunction, nil}, + {`[%#][\w\d_$.]+`, NameVariable, nil}, + {`([1-9?][\d?]*\s*x)+`, LiteralNumber, nil}, + {`0[xX][a-fA-F0-9]+`, LiteralNumber, nil}, + {`-?\d+(?:[.]\d+)?(?:[eE][-+]?\d+(?:[.]\d+)?)?`, LiteralNumber, nil}, + {`[=<>{}\[\]()*.,!:]|x\b`, Punctuation, nil}, + {`[\w\d]+`, Text, nil}, + }, + "whitespace": { + {`(\n|\s)+`, Text, nil}, + {`//.*?\n`, Comment, nil}, + }, + "keyword": { + {Words(``, ``, `constant`, `return`), KeywordType, nil}, + {Words(``, ``, `func`, `loc`, `memref`, `tensor`, `vector`), KeywordType, nil}, + {`bf16|f16|f32|f64|index`, Keyword, nil}, + {`i[1-9]\d*`, Keyword, nil}, + }, + } +} diff --git a/vendor/github.com/alecthomas/chroma/lexers/m/modula2.go b/vendor/github.com/alecthomas/chroma/lexers/m/modula2.go new file mode 100644 index 000000000..6ab05c9ed --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/m/modula2.go @@ -0,0 +1,119 @@ +package m + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// Modula-2 lexer. +var Modula2 = internal.Register(MustNewLazyLexer( + &Config{ + Name: "Modula-2", + Aliases: []string{"modula2", "m2"}, + Filenames: []string{"*.def", "*.mod"}, + MimeTypes: []string{"text/x-modula2"}, + DotAll: true, + }, + modula2Rules, +)) + +func modula2Rules() Rules { + return Rules{ + "whitespace": { + {`\n+`, Text, nil}, + {`\s+`, Text, nil}, + }, + "dialecttags": { + {`\(\*!m2pim\*\)`, CommentSpecial, nil}, + {`\(\*!m2iso\*\)`, CommentSpecial, nil}, + {`\(\*!m2r10\*\)`, CommentSpecial, nil}, + {`\(\*!objm2\*\)`, CommentSpecial, nil}, + {`\(\*!m2iso\+aglet\*\)`, CommentSpecial, nil}, + {`\(\*!m2pim\+gm2\*\)`, CommentSpecial, nil}, + {`\(\*!m2iso\+p1\*\)`, CommentSpecial, nil}, + {`\(\*!m2iso\+xds\*\)`, CommentSpecial, nil}, + }, + "identifiers": { + {`([a-zA-Z_$][\w$]*)`, Name, nil}, + }, + "prefixed_number_literals": { + {`0b[01]+(\'[01]+)*`, LiteralNumberBin, nil}, + {`0[ux][0-9A-F]+(\'[0-9A-F]+)*`, LiteralNumberHex, nil}, + }, + "plain_number_literals": { + {`[0-9]+(\'[0-9]+)*\.[0-9]+(\'[0-9]+)*[eE][+-]?[0-9]+(\'[0-9]+)*`, LiteralNumberFloat, nil}, + {`[0-9]+(\'[0-9]+)*\.[0-9]+(\'[0-9]+)*`, LiteralNumberFloat, nil}, + {`[0-9]+(\'[0-9]+)*`, LiteralNumberInteger, nil}, + }, + "suffixed_number_literals": { + {`[0-7]+B`, LiteralNumberOct, nil}, + {`[0-7]+C`, LiteralNumberOct, nil}, + {`[0-9A-F]+H`, LiteralNumberHex, nil}, + }, + "string_literals": { + {`'(\\\\|\\'|[^'])*'`, LiteralString, nil}, + {`"(\\\\|\\"|[^"])*"`, LiteralString, nil}, + }, + "digraph_operators": { + {`\*\.`, Operator, nil}, + {`\+>`, Operator, nil}, + {`<>`, Operator, nil}, + {`<=`, Operator, nil}, + {`>=`, Operator, nil}, + {`==`, Operator, nil}, + {`::`, Operator, nil}, + {`:=`, Operator, nil}, + {`\+\+`, Operator, nil}, + {`--`, Operator, nil}, + }, + "unigraph_operators": { + {`[+-]`, Operator, nil}, + {`[*/]`, Operator, nil}, + {`\\`, Operator, nil}, + {`[=#<>]`, Operator, nil}, + {`\^`, Operator, nil}, + {`@`, Operator, nil}, + {`&`, Operator, nil}, + {`~`, Operator, nil}, + {"`", Operator, nil}, + }, + "digraph_punctuation": { + {`\.\.`, Punctuation, nil}, + {`<<`, Punctuation, nil}, + {`>>`, Punctuation, nil}, + {`->`, Punctuation, nil}, + {`\|#`, Punctuation, nil}, + {`##`, Punctuation, nil}, + {`\|\*`, Punctuation, nil}, + }, + "unigraph_punctuation": { + {`[()\[\]{},.:;|]`, Punctuation, nil}, + {`!`, Punctuation, nil}, + {`\?`, Punctuation, nil}, + }, + "comments": { + {`^//.*?\n`, CommentSingle, nil}, + {`\(\*([^$].*?)\*\)`, CommentMultiline, nil}, + {`/\*(.*?)\*/`, CommentMultiline, nil}, + }, + "pragmas": { + {`<\*.*?\*>`, CommentPreproc, nil}, + {`\(\*\$.*?\*\)`, CommentPreproc, nil}, + }, + "root": { + Include("whitespace"), + Include("dialecttags"), + Include("pragmas"), + Include("comments"), + Include("identifiers"), + Include("suffixed_number_literals"), + Include("prefixed_number_literals"), + Include("plain_number_literals"), + Include("string_literals"), + Include("digraph_punctuation"), + Include("digraph_operators"), + Include("unigraph_punctuation"), + Include("unigraph_operators"), + }, + } +} diff --git a/vendor/github.com/alecthomas/chroma/lexers/m/monkeyc.go b/vendor/github.com/alecthomas/chroma/lexers/m/monkeyc.go new file mode 100644 index 000000000..8ad81cc08 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/m/monkeyc.go @@ -0,0 +1,66 @@ +package m + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +var MonkeyC = internal.Register(MustNewLazyLexer( + &Config{ + Name: "MonkeyC", + Aliases: []string{"monkeyc"}, + Filenames: []string{"*.mc"}, + MimeTypes: []string{"text/x-monkeyc"}, + }, + monkeyCRules, +)) + +func monkeyCRules() Rules { + return Rules{ + "root": { + {`[^\S\n]+`, Text, nil}, + {`\n`, Text, nil}, + {`//(\n|[\w\W]*?[^\\]\n)`, CommentSingle, nil}, + {`/(\\\n)?[*][\w\W]*?[*](\\\n)?/`, CommentMultiline, nil}, + {`/(\\\n)?[*][\w\W]*`, CommentMultiline, nil}, + {`:[a-zA-Z_][\w_\.]*`, StringSymbol, nil}, + {`[{}\[\]\(\),;:\.]`, Punctuation, nil}, + {`[&~\|\^!+\-*\/%=?]`, Operator, nil}, + {`=>|[+-]=|&&|\|\||>>|<<|[<>]=?|[!=]=`, Operator, nil}, + {`\b(and|or|instanceof|has|extends|new)`, OperatorWord, nil}, + {Words(``, `\b`, `NaN`, `null`, `true`, `false`), KeywordConstant, nil}, + {`(using)((?:\s|\\\\s)+)`, ByGroups(KeywordNamespace, Text), Push("import")}, + {`(class)((?:\s|\\\\s)+)`, ByGroups(KeywordDeclaration, Text), Push("class")}, + {`(function)((?:\s|\\\\s)+)`, ByGroups(KeywordDeclaration, Text), Push("function")}, + {`(module)((?:\s|\\\\s)+)`, ByGroups(KeywordDeclaration, Text), Push("module")}, + {`\b(if|else|for|switch|case|while|break|continue|default|do|try|catch|finally|return|throw|extends|function)\b`, Keyword, nil}, + {`\b(const|enum|hidden|public|protected|private|static)\b`, KeywordType, nil}, + {`\bvar\b`, KeywordDeclaration, nil}, + {`\b(Activity(Monitor|Recording)?|Ant(Plus)?|Application|Attention|Background|Communications|Cryptography|FitContributor|Graphics|Gregorian|Lang|Math|Media|Persisted(Content|Locations)|Position|Properties|Sensor(History|Logging)?|Storage|StringUtil|System|Test|Time(r)?|Toybox|UserProfile|WatchUi|Rez|Drawables|Strings|Fonts|method)\b`, NameBuiltin, nil}, + {`\b(me|self|\$)\b`, NameBuiltinPseudo, nil}, + {`"(\\\\|\\"|[^"])*"`, LiteralStringDouble, nil}, + {`'(\\\\|\\'|[^''])*'`, LiteralStringSingle, nil}, + {`-?(0x[0-9a-fA-F]+l?)`, NumberHex, nil}, + {`-?([0-9]+(\.[0-9]+[df]?|[df]))\b`, NumberFloat, nil}, + {`-?([0-9]+l?)`, NumberInteger, nil}, + {`[a-zA-Z_]\w*`, Name, nil}, + }, + "import": { + {`([a-zA-Z_][\w_\.]*)(?:(\s+)(as)(\s+)([a-zA-Z_][\w_]*))?`, ByGroups(NameNamespace, Text, KeywordNamespace, Text, NameNamespace), nil}, + Default(Pop(1)), + }, + "class": { + {`([a-zA-Z_][\w_\.]*)(?:(\s+)(extends)(\s+)([a-zA-Z_][\w_\.]*))?`, ByGroups(NameClass, Text, KeywordDeclaration, Text, NameClass), nil}, + Default(Pop(1)), + }, + "function": { + {`initialize`, NameFunctionMagic, nil}, + {`[a-zA-Z_][\w_\.]*`, NameFunction, nil}, + Default(Pop(1)), + }, + "module": { + {`[a-zA-Z_][\w_\.]*`, NameNamespace, nil}, + Default(Pop(1)), + }, + } +} diff --git a/vendor/github.com/alecthomas/chroma/lexers/m/mwscript.go b/vendor/github.com/alecthomas/chroma/lexers/m/mwscript.go new file mode 100644 index 000000000..0ba0ba97c --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/m/mwscript.go @@ -0,0 +1,57 @@ +package m + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// MorrowindScript lexer. +var MorrowindScript = internal.Register(MustNewLazyLexer( + &Config{ + Name: "MorrowindScript", + Aliases: []string{"morrowind", "mwscript"}, + Filenames: []string{}, + MimeTypes: []string{}, + }, + morrowindScriptRules, +)) + +func morrowindScriptRules() Rules { + return Rules{ + "root": { + {`\s+`, Text, nil}, + {`;.*$`, Comment, nil}, + {`(["'])(?:(?=(\\?))\2.)*?\1`, LiteralString, nil}, + {`[0-9]+`, LiteralNumberInteger, nil}, + {`[0-9]+\.[0-9]*(?!\.)`, LiteralNumberFloat, nil}, + Include("keywords"), + Include("types"), + Include("builtins"), + Include("punct"), + Include("operators"), + {`\n`, Text, nil}, + {`\S+\s+`, Text, nil}, + {`[a-zA-Z0-9_]\w*`, Name, nil}, + }, + "keywords": { + {`(?i)(begin|if|else|elseif|endif|while|endwhile|return|to)\b`, Keyword, nil}, + {`(?i)(end)\b`, Keyword, nil}, + {`(?i)(end)\w+.*$`, Text, nil}, + {`[\w+]->[\w+]`, Operator, nil}, + }, + "builtins": { + {`(?i)(Activate|AddItem|AddSoulGem|AddSpell|AddToLevCreature|AddToLevItem|AddTopic|AIActivate|AIEscort|AIEscortCell|AIFollow|AiFollowCell|AITravel|AIWander|BecomeWerewolf|Cast|ChangeWeather|Choice|ClearForceJump|ClearForceMoveJump|ClearForceRun|ClearForceSneak|ClearInfoActor|Disable|DisableLevitation|DisablePlayerControls|DisablePlayerFighting|DisablePlayerJumping|DisablePlayerLooking|DisablePlayerMagic|DisablePlayerViewSwitch|DisableTeleporting|DisableVanityMode|DontSaveObject|Drop|Enable|EnableBirthMenu|EnableClassMenu|EnableInventoryMenu|EnableLevelUpMenu|EnableLevitation|EnableMagicMenu|EnableMapMenu|EnableNameMenu|EnablePlayerControls|EnablePlayerFighting|EnablePlayerJumping|EnablePlayerLooking|EnablePlayerMagic|EnablePlayerViewSwitch|EnableRaceMenu|EnableRest|EnableStatsMenu|EnableTeleporting|EnableVanityMode|Equip|ExplodeSpell|Face|FadeIn|FadeOut|FadeTo|Fall|ForceGreeting|ForceJump|ForceRun|ForceSneak|Flee|GotoJail|HurtCollidingActor|HurtStandingActor|Journal|Lock|LoopGroup|LowerRank|MenuTest|MessageBox|ModAcrobatics|ModAgility|ModAlarm|ModAlchemy|ModAlteration|ModArmorBonus|ModArmorer|ModAthletics|ModAttackBonus|ModAxe|ModBlock|ModBluntWeapon|ModCastPenalty|ModChameleon|ModConjuration|ModCurrentFatigue|ModCurrentHealth|ModCurrentMagicka|ModDefendBonus|ModDestruction|ModDisposition|ModEnchant|ModEndurance|ModFactionReaction|ModFatigue|ModFight|ModFlee|ModFlying|ModHandToHand|ModHealth|ModHeavyArmor|ModIllusion|ModIntelligence|ModInvisible|ModLightArmor|ModLongBlade|ModLuck|ModMagicka|ModMarksman|ModMediumArmor|ModMercantile|ModMysticism|ModParalysis|ModPCCrimeLevel|ModPCFacRep|ModPersonality|ModRegion|ModReputation|ModResistBlight|ModResistCorprus|ModResistDisease|ModResistFire|ModResistFrost|ModResistMagicka|ModResistNormalWeapons|ModResistParalysis|ModResistPoison|ModResistShock|ModRestoration|ModScale|ModSecurity|ModShortBlade|ModSilence|ModSneak|ModSpear|ModSpeechcraft|ModSpeed|ModStrength|ModSuperJump|ModSwimSpeed|ModUnarmored|ModWaterBreathing|ModWaterLevel|ModWaterWalking|ModWillpower|Move|MoveWorld|PayFine|PayFineThief|PCClearExpelled|PCExpell|PCForce1stPerson|PCForce3rdPerson|PCJoinFaction|PCLowerRank|PCRaiseRank|PlaceAtMe|PlaceAtPC|PlaceItem|PlaceItemCell|PlayBink|PlayGroup|PlayLoopSound3D|PlayLoopSound3DVP|PlaySound|PlaySound3D|PlaySound3DVP|PlaySoundVP|Position|PositionCell|RaiseRank|RemoveEffects|RemoveFromLevCreature|RemoveFromLevItem|RemoveItem|RemoveSoulgem|RemoveSpell|RemoveSpellEffects|ResetActors|Resurrect|Rotate|RotateWorld|Say|StartScript|[S|s]et|SetAcrobatics|SetAgility|SetAlarm|SetAlchemy|SetAlteration|SetAngle|SetArmorBonus|SetArmorer|SetAthletics|SetAtStart|SetAttackBonus|SetAxe|SetBlock|SetBluntWeapon|SetCastPenalty|SetChameleon|SetConjuration|SetDelete|SetDefendBonus|SetDestruction|SetDisposition|SetEnchant|SetEndurance|SetFactionReaction|SetFatigue|SetFight|SetFlee|SetFlying|SetHandToHand|SetHealth|SetHeavyArmor|SetIllusion|SetIntelligence|SetInvisible|SetJournalIndex|SetLightArmor|SetLevel|SetLongBlade|SetLuck|SetMagicka|SetMarksman|SetMediumArmor|SetMercantile|SetMysticism|SetParalysis|SetPCCCrimeLevel|SetPCFacRep|SetPersonality|SetPos|SetReputation|SetResistBlight|SetResistCorprus|SetResistDisease|SetResistFire|SetResistFrost|SetResistMagicka|SetResistNormalWeapons|SetResistParalysis|SetResistPoison|SetResistShock|SetRestoration|SetScale|SetSecurity|SetShortBlade|SetSilence|SetSneak|SetSpear|SetSpeechcraft|SetSpeed|SetStrength|SetSuperJump|SetSwimSpeed|SetUnarmored|SetWaterBreathing|SetWaterlevel|SetWaterWalking|SetWerewolfAcrobatics|SetWillpower|ShowMap|ShowRestMenu|SkipAnim|StartCombat|StopCombat|StopScript|StopSound|StreamMusic|TurnMoonRed|TurnMoonWhite|UndoWerewolf|Unlock|WakeUpPC|CenterOnCell|CenterOnExterior|FillMap|FixMe|ToggleAI|ToggleCollision|ToggleFogOfWar|ToggleGodMode|ToggleMenus|ToggleSky|ToggleWorld|ToggleVanityMode|CellChanged|GetAcrobatics|GetAgility|GetAIPackageDone|GetAlarm|GetAlchemy|GetAlteration|GetAngle|GetArmorBonus|GetArmorer|GetAthletics|GetAttackBonus|GetAttacked|GetArmorType,|GetAxe|GetBlightDisease|GetBlock|GetBluntWeapon|GetButtonPressed|GetCastPenalty|GetChameleon|GetCollidingActor|GetCollidingPC|GetCommonDisease|GetConjuration|GetCurrentAIPackage|GetCurrentTime|GetCurrentWeather|GetDeadCount|GetDefendBonus|GetDestruction|GetDetected|GetDisabled|GetDisposition|GetDistance|GetEffect|GetEnchant|GetEndurance|GetFatigue|GetFight|GetFlee|GetFlying|GetForceJump|GetForceRun|GetForceSneak|GetHandToHand|GetHealth|GetHealthGetRatio|GetHeavyArmor|GetIllusion|GetIntelligence|GetInterior|GetInvisible|GetItemCount|GetJournalIndex|GetLightArmor|GetLineOfSight|GetLOS|GetLevel|GetLocked|GetLongBlade|GetLuck|GetMagicka|GetMarksman|GetMasserPhase|GetSecundaPhase|GetMediumArmor|GetMercantile|GetMysticism|GetParalysis|GetPCCell|GetPCCrimeLevel|GetPCinJail|GetPCJumping|GetPCRank|GetPCRunning|GetPCSleep|GetPCSneaking|GetPCTraveling|GetPersonality|GetPlayerControlsDisabled|GetPlayerFightingDisabled|GetPlayerJumpingDisabled|GetPlayerLookingDisabled|GetPlayerMagicDisabled|GetPos|GetRace|GetReputation|GetResistBlight|GetResistCorprus|GetResistDisease|GetResistFire|GetResistFrost|GetResistMagicka|GetResistNormalWeapons|GetResistParalysis|GetResistPoison|GetResistShock|GetRestoration|GetScale|GetSecondsPassed|GetSecurity|GetShortBlade|GetSilence|GetSneak|GetSoundPlaying|GetSpear|GetSpeechcraft|GetSpeed|GetSpell|GetSpellEffects|GetSpellReadied|GetSquareRoot|GetStandingActor|GetStandingPC|GetStrength|GetSuperJump|GetSwimSpeed|GetTarget|GetUnarmored|GetVanityModeDisabled|GetWaterBreathing|GetWaterLevel|GetWaterWalking|GetWeaponDrawn|GetWeaponType|GetWerewolfKills|GetWillpower|GetWindSpeed|HasItemEquipped|HasSoulgem|HitAttemptOnMe|HitOnMe|IsWerewolf|MenuMode|OnActivate|OnDeath|OnKnockout|OnMurder|PCExpelled|PCGet3rdPerson|PCKnownWerewolf|Random|RepairedOnMe|SameFaction|SayDone|ScriptRunning|AllowWereWolfForceGreeting|Companion|MinimumProfit|NoFlee|NoHello|NoIdle|NoLore|OnPCAdd|OnPCDrop|OnPCEquip|OnPCHitMe|OnPCRepair|PCSkipEquip|OnPCSoulGemUse|StayOutside|CrimeGoldDiscount|CrimeGoldTurnIn|Day|DaysPassed|GameHour|Month|NPCVoiceDistance|PCRace|PCWerewolf|PCVampire|TimeScale|VampClan|Year)\b`, NameBuiltin, nil}, + {`(?i)(sEffectWaterBreathing|sEffectSwiftSwim|sEffectWaterWalking|sEffectShield|sEffectFireShield|sEffectLightningShield|sEffectFrostShield|sEffectBurden|sEffectFeather|sEffectJump|sEffectLevitate|sEffectSlowFall|sEffectLock|sEffectOpen|sEffectFireDamage|sEffectShockDamage|sEffectFrostDamage|sEffectDrainAttribute|sEffectDrainHealth|sEffectDrainSpellpoints|sEffectDrainFatigue|sEffectDrainSkill|sEffectDamageAttribute|sEffectDamageHealth|sEffectDamageMagicka|sEffectDamageFatigue|sEffectDamageSkill|sEffectPoison|sEffectWeaknessToFire|sEffectWeaknessToFrost|sEffectWeaknessToShock|sEffectWeaknessToMagicka|sEffectWeaknessToCommonDisease|sEffectWeaknessToBlightDisease|sEffectWeaknessToCorprusDisease|sEffectWeaknessToPoison|sEffectWeaknessToNormalWeapons|sEffectDisintegrateWeapon|sEffectDisintegrateArmor|sEffectInvisibility|sEffectChameleon|sEffectLight|sEffectSanctuary|sEffectNightEye|sEffectCharm|sEffectParalyze|sEffectSilence|sEffectBlind|sEffectSound|sEffectCalmHumanoid|sEffectCalmCreature|sEffectFrenzyHumanoid|sEffectFrenzyCreature|sEffectDemoralizeHumanoid|sEffectDemoralizeCreature|sEffectRallyHumanoid|sEffectRallyCreature|sEffectDispel|sEffectSoultrap|sEffectTelekinesis|sEffectMark|sEffectRecall|sEffectDivineIntervention|sEffectAlmsiviIntervention|sEffectDetectAnimal|sEffectDetectEnchantment|sEffectDetectKey|sEffectSpellAbsorption|sEffectReflect|sEffectCureCommonDisease|sEffectCureBlightDisease|sEffectCureCorprusDisease|sEffectCurePoison|sEffectCureParalyzation|sEffectRestoreAttribute|sEffectRestoreHealth|sEffectRestoreSpellPoints|sEffectRestoreFatigue|sEffectRestoreSkill|sEffectFortifyAttribute|sEffectFortifyHealth|sEffectFortifySpellpoints|sEffectFortifyFatigue|sEffectFortifySkill|sEffectFortifyMagickaMultiplier|sEffectAbsorbAttribute|sEffectAbsorbHealth|sEffectAbsorbSpellPoints|sEffectAbsorbFatigue|sEffectAbsorbSkill|sEffectResistFire|sEffectResistFrost|sEffectResistShock|sEffectResistMagicka|sEffectResistCommonDisease|sEffectResistBlightDisease|sEffectResistCorprusDisease|sEffectResistPoison|sEffectResistNormalWeapons|sEffectResistParalysis|sEffectRemoveCurse|sEffectTurnUndead|sEffectSummonScamp|sEffectSummonClannfear|sEffectSummonDaedroth|sEffectSummonDremora|sEffectSummonAncestralGhost|sEffectSummonSkeletalMinion|sEffectSummonLeastBonewalker|sEffectSummonGreaterBonewalker|sEffectSummonBonelord|sEffectSummonWingedTwilight|sEffectSummonHunger|sEffectSummonGoldensaint|sEffectSummonFlameAtronach|sEffectSummonFrostAtronach|sEffectSummonStormAtronach|sEffectFortifyAttackBonus|sEffectCommandCreatures|sEffectCommandHumanoids|sEffectBoundDagger|sEffectBoundLongsword|sEffectBoundMace|sEffectBoundBattleAxe|sEffectBoundSpear|sEffectBoundLongbow|sEffectExtraSpell|sEffectBoundCuirass|sEffectBoundHelm|sEffectBoundBoots|sEffectBoundShield|sEffectBoundGloves|sEffectCorpus|sEffectVampirism|sEffectSummonCenturionSphere|sEffectSunDamage|sEffectStuntedMagicka)`, NameBuiltin, nil}, + }, + "types": { + {`(?i)(short|long|float)\b`, KeywordType, nil}, + }, + "punct": { + {`[()]`, Punctuation, nil}, + }, + "operators": { + {`[#=,./%+\-?]`, Operator, nil}, + {`(==|<=|<|>=|>|!=)`, Operator, nil}, + }, + } +} diff --git a/vendor/github.com/alecthomas/chroma/lexers/m/myghty.go b/vendor/github.com/alecthomas/chroma/lexers/m/myghty.go new file mode 100644 index 000000000..eac082cff --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/m/myghty.go @@ -0,0 +1,44 @@ +package m + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" + . "github.com/alecthomas/chroma/lexers/p" // nolint +) + +// Myghty lexer. +var Myghty = internal.Register(MustNewLazyLexer( + &Config{ + Name: "Myghty", + Aliases: []string{"myghty"}, + Filenames: []string{"*.myt", "autodelegate"}, + MimeTypes: []string{"application/x-myghty"}, + }, + myghtyRules, +)) + +func myghtyRules() Rules { + return Rules{ + "root": { + {`\s+`, Text, nil}, + {`(<%(?:def|method))(\s*)(.*?)(>)(.*?)()(?s)`, ByGroups(NameTag, Text, NameFunction, NameTag, UsingSelf("root"), NameTag), nil}, + {`(<%\w+)(.*?)(>)(.*?)()(?s)`, ByGroups(NameTag, NameFunction, NameTag, Using(Python2), NameTag), nil}, + {`(<&[^|])(.*?)(,.*?)?(&>)`, ByGroups(NameTag, NameFunction, Using(Python2), NameTag), nil}, + {`(<&\|)(.*?)(,.*?)?(&>)(?s)`, ByGroups(NameTag, NameFunction, Using(Python2), NameTag), nil}, + {``, NameTag, nil}, + {`(<%!?)(.*?)(%>)(?s)`, ByGroups(NameTag, Using(Python2), NameTag), nil}, + {`(?<=^)#[^\n]*(\n|\Z)`, Comment, nil}, + {`(?<=^)(%)([^\n]*)(\n|\Z)`, ByGroups(NameTag, Using(Python2), Other), nil}, + {`(?sx) + (.+?) # anything, followed by: + (?: + (?<=\n)(?=[%#]) | # an eval or comment line + (?== 1 && nameBetweenBacktickCount >= (2*nameBetweenBracketCount) { + // Found at least twice as many `name` as [name]. + result += 0.5 + } else if nameBetweenBacktickCount > nameBetweenBracketCount { + result += 0.2 + } else if nameBetweenBacktickCount > 0 { + result += 0.1 + } + + return result +})) + +func mySQLRules() Rules { + return Rules{ + "root": { + {`\s+`, TextWhitespace, nil}, + {`(#|--\s+).*\n?`, CommentSingle, nil}, + {`/\*`, CommentMultiline, Push("multiline-comments")}, + {`[0-9]+`, LiteralNumberInteger, nil}, + {`[0-9]*\.[0-9]+(e[+-][0-9]+)`, LiteralNumberFloat, nil}, + {`((?:_[a-z0-9]+)?)(')`, ByGroups(LiteralStringAffix, LiteralStringSingle), Push("string")}, + {`((?:_[a-z0-9]+)?)(")`, ByGroups(LiteralStringAffix, LiteralStringDouble), Push("double-string")}, + {"[+*/<>=~!@#%^&|`?-]", Operator, nil}, + {`\b(tinyint|smallint|mediumint|int|integer|bigint|date|datetime|time|bit|bool|tinytext|mediumtext|longtext|text|tinyblob|mediumblob|longblob|blob|float|double|double\s+precision|real|numeric|dec|decimal|timestamp|year|char|varchar|varbinary|varcharacter|enum|set)(\b\s*)(\()?`, ByGroups(KeywordType, TextWhitespace, Punctuation), nil}, + {`\b(add|all|alter|analyze|and|as|asc|asensitive|before|between|bigint|binary|blob|both|by|call|cascade|case|change|char|character|check|collate|column|condition|constraint|continue|convert|create|cross|current_date|current_time|current_timestamp|current_user|cursor|database|databases|day_hour|day_microsecond|day_minute|day_second|dec|decimal|declare|default|delayed|delete|desc|describe|deterministic|distinct|distinctrow|div|double|drop|dual|each|else|elseif|enclosed|escaped|exists|exit|explain|fetch|flush|float|float4|float8|for|force|foreign|from|fulltext|grant|group|having|high_priority|hour_microsecond|hour_minute|hour_second|identified|if|ignore|in|index|infile|inner|inout|insensitive|insert|int|int1|int2|int3|int4|int8|integer|interval|into|is|iterate|join|key|keys|kill|leading|leave|left|like|limit|lines|load|localtime|localtimestamp|lock|long|loop|low_priority|match|minute_microsecond|minute_second|mod|modifies|natural|no_write_to_binlog|not|numeric|on|optimize|option|optionally|or|order|out|outer|outfile|precision|primary|privileges|procedure|purge|raid0|read|reads|real|references|regexp|release|rename|repeat|replace|require|restrict|return|revoke|right|rlike|schema|schemas|second_microsecond|select|sensitive|separator|set|show|smallint|soname|spatial|specific|sql|sql_big_result|sql_calc_found_rows|sql_small_result|sqlexception|sqlstate|sqlwarning|ssl|starting|straight_join|table|terminated|then|to|trailing|trigger|undo|union|unique|unlock|unsigned|update|usage|use|user|using|utc_date|utc_time|utc_timestamp|values|varying|when|where|while|with|write|x509|xor|year_month|zerofill)\b`, Keyword, nil}, + {`\b(auto_increment|engine|charset|tables)\b`, KeywordPseudo, nil}, + {`(true|false|null)`, NameConstant, nil}, + {`([a-z_]\w*)(\s*)(\()`, ByGroups(NameFunction, TextWhitespace, Punctuation), nil}, + {`[a-z_]\w*`, Name, nil}, + {`@[a-z0-9]*[._]*[a-z0-9]*`, NameVariable, nil}, + {`[;:()\[\],.]`, Punctuation, nil}, + }, + "multiline-comments": { + {`/\*`, CommentMultiline, Push("multiline-comments")}, + {`\*/`, CommentMultiline, Pop(1)}, + {`[^/*]+`, CommentMultiline, nil}, + {`[/*]`, CommentMultiline, nil}, + }, + "string": { + {`[^']+`, LiteralStringSingle, nil}, + {`''`, LiteralStringSingle, nil}, + {`'`, LiteralStringSingle, Pop(1)}, + }, + "double-string": { + {`[^"]+`, LiteralStringDouble, nil}, + {`""`, LiteralStringDouble, nil}, + {`"`, LiteralStringDouble, Pop(1)}, + }, + } +} diff --git a/vendor/github.com/alecthomas/chroma/lexers/n/nasm.go b/vendor/github.com/alecthomas/chroma/lexers/n/nasm.go new file mode 100644 index 000000000..de6734e86 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/n/nasm.go @@ -0,0 +1,63 @@ +package n + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// Nasm lexer. +var Nasm = internal.Register(MustNewLazyLexer( + &Config{ + Name: "NASM", + Aliases: []string{"nasm"}, + Filenames: []string{"*.asm", "*.ASM"}, + MimeTypes: []string{"text/x-nasm"}, + CaseInsensitive: true, + }, + nasmRules, +)) + +func nasmRules() Rules { + return Rules{ + "root": { + {`^\s*%`, CommentPreproc, Push("preproc")}, + Include("whitespace"), + {`[a-z$._?][\w$.?#@~]*:`, NameLabel, nil}, + {`([a-z$._?][\w$.?#@~]*)(\s+)(equ)`, ByGroups(NameConstant, KeywordDeclaration, KeywordDeclaration), Push("instruction-args")}, + {`BITS|USE16|USE32|SECTION|SEGMENT|ABSOLUTE|EXTERN|GLOBAL|ORG|ALIGN|STRUC|ENDSTRUC|COMMON|CPU|GROUP|UPPERCASE|IMPORT|EXPORT|LIBRARY|MODULE`, Keyword, Push("instruction-args")}, + {`(?:res|d)[bwdqt]|times`, KeywordDeclaration, Push("instruction-args")}, + {`[a-z$._?][\w$.?#@~]*`, NameFunction, Push("instruction-args")}, + {`[\r\n]+`, Text, nil}, + }, + "instruction-args": { + {"\"(\\\\\"|[^\"\\n])*\"|'(\\\\'|[^'\\n])*'|`(\\\\`|[^`\\n])*`", LiteralString, nil}, + {`(?:0x[0-9a-f]+|$0[0-9a-f]*|[0-9]+[0-9a-f]*h)`, LiteralNumberHex, nil}, + {`[0-7]+q`, LiteralNumberOct, nil}, + {`[01]+b`, LiteralNumberBin, nil}, + {`[0-9]+\.e?[0-9]+`, LiteralNumberFloat, nil}, + {`[0-9]+`, LiteralNumberInteger, nil}, + Include("punctuation"), + {`r[0-9][0-5]?[bwd]|[a-d][lh]|[er]?[a-d]x|[er]?[sb]p|[er]?[sd]i|[c-gs]s|st[0-7]|mm[0-7]|cr[0-4]|dr[0-367]|tr[3-7]`, NameBuiltin, nil}, + {`[a-z$._?][\w$.?#@~]*`, NameVariable, nil}, + {`[\r\n]+`, Text, Pop(1)}, + Include("whitespace"), + }, + "preproc": { + {`[^;\n]+`, CommentPreproc, nil}, + {`;.*?\n`, CommentSingle, Pop(1)}, + {`\n`, CommentPreproc, Pop(1)}, + }, + "whitespace": { + {`\n`, Text, nil}, + {`[ \t]+`, Text, nil}, + {`;.*`, CommentSingle, nil}, + }, + "punctuation": { + {`[,():\[\]]+`, Punctuation, nil}, + {`[&|^<>+*/%~-]+`, Operator, nil}, + {`[$]+`, KeywordConstant, nil}, + {`seg|wrt|strict`, OperatorWord, nil}, + {`byte|[dq]?word`, KeywordType, nil}, + }, + } +} diff --git a/vendor/github.com/alecthomas/chroma/lexers/n/newspeak.go b/vendor/github.com/alecthomas/chroma/lexers/n/newspeak.go new file mode 100644 index 000000000..c9d3ae032 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/n/newspeak.go @@ -0,0 +1,59 @@ +package n + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// Newspeak lexer. +var Newspeak = internal.Register(MustNewLazyLexer( + &Config{ + Name: "Newspeak", + Aliases: []string{"newspeak"}, + Filenames: []string{"*.ns2"}, + MimeTypes: []string{"text/x-newspeak"}, + }, + newspeakRules, +)) + +func newspeakRules() Rules { + return Rules{ + "root": { + {`\b(Newsqueak2)\b`, KeywordDeclaration, nil}, + {`'[^']*'`, LiteralString, nil}, + {`\b(class)(\s+)(\w+)(\s*)`, ByGroups(KeywordDeclaration, Text, NameClass, Text), nil}, + {`\b(mixin|self|super|private|public|protected|nil|true|false)\b`, Keyword, nil}, + {`(\w+\:)(\s*)([a-zA-Z_]\w+)`, ByGroups(NameFunction, Text, NameVariable), nil}, + {`(\w+)(\s*)(=)`, ByGroups(NameAttribute, Text, Operator), nil}, + {`<\w+>`, CommentSpecial, nil}, + Include("expressionstat"), + Include("whitespace"), + }, + "expressionstat": { + {`(\d+\.\d*|\.\d+|\d+[fF])[fF]?`, LiteralNumberFloat, nil}, + {`\d+`, LiteralNumberInteger, nil}, + {`:\w+`, NameVariable, nil}, + {`(\w+)(::)`, ByGroups(NameVariable, Operator), nil}, + {`\w+:`, NameFunction, nil}, + {`\w+`, NameVariable, nil}, + {`\(|\)`, Punctuation, nil}, + {`\[|\]`, Punctuation, nil}, + {`\{|\}`, Punctuation, nil}, + {`(\^|\+|\/|~|\*|<|>|=|@|%|\||&|\?|!|,|-|:)`, Operator, nil}, + {`\.|;`, Punctuation, nil}, + Include("whitespace"), + Include("literals"), + }, + "literals": { + {`\$.`, LiteralString, nil}, + {`'[^']*'`, LiteralString, nil}, + {`#'[^']*'`, LiteralStringSymbol, nil}, + {`#\w+:?`, LiteralStringSymbol, nil}, + {`#(\+|\/|~|\*|<|>|=|@|%|\||&|\?|!|,|-)+`, LiteralStringSymbol, nil}, + }, + "whitespace": { + {`\s+`, Text, nil}, + {`"[^"]*"`, Comment, nil}, + }, + } +} diff --git a/vendor/github.com/alecthomas/chroma/lexers/n/nginx.go b/vendor/github.com/alecthomas/chroma/lexers/n/nginx.go new file mode 100644 index 000000000..6d80523ec --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/n/nginx.go @@ -0,0 +1,51 @@ +package n + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// Nginx Configuration File lexer. +var Nginx = internal.Register(MustNewLazyLexer( + &Config{ + Name: "Nginx configuration file", + Aliases: []string{"nginx"}, + Filenames: []string{"nginx.conf"}, + MimeTypes: []string{"text/x-nginx-conf"}, + }, + nginxRules, +)) + +func nginxRules() Rules { + return Rules{ + "root": { + {`(include)(\s+)([^\s;]+)`, ByGroups(Keyword, Text, Name), nil}, + {`[^\s;#]+`, Keyword, Push("stmt")}, + Include("base"), + }, + "block": { + {`\}`, Punctuation, Pop(2)}, + {`[^\s;#]+`, KeywordNamespace, Push("stmt")}, + Include("base"), + }, + "stmt": { + {`\{`, Punctuation, Push("block")}, + {`;`, Punctuation, Pop(1)}, + Include("base"), + }, + "base": { + {`#.*\n`, CommentSingle, nil}, + {`on|off`, NameConstant, nil}, + {`\$[^\s;#()]+`, NameVariable, nil}, + {`([a-z0-9.-]+)(:)([0-9]+)`, ByGroups(Name, Punctuation, LiteralNumberInteger), nil}, + {`[a-z-]+/[a-z-+]+`, LiteralString, nil}, + {`[0-9]+[km]?\b`, LiteralNumberInteger, nil}, + {`(~)(\s*)([^\s{]+)`, ByGroups(Punctuation, Text, LiteralStringRegex), nil}, + {`[:=~]`, Punctuation, nil}, + {`[^\s;#{}$]+`, LiteralString, nil}, + {`/[^\s;#]*`, Name, nil}, + {`\s+`, Text, nil}, + {`[$;]`, Text, nil}, + }, + } +} diff --git a/vendor/github.com/alecthomas/chroma/lexers/n/nim.go b/vendor/github.com/alecthomas/chroma/lexers/n/nim.go new file mode 100644 index 000000000..3f98086b4 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/n/nim.go @@ -0,0 +1,97 @@ +package n + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// Nim lexer. +var Nim = internal.Register(MustNewLazyLexer( + &Config{ + Name: "Nim", + Aliases: []string{"nim", "nimrod"}, + Filenames: []string{"*.nim", "*.nimrod"}, + MimeTypes: []string{"text/x-nim"}, + CaseInsensitive: true, + }, + nimRules, +)) + +func nimRules() Rules { + return Rules{ + "root": { + {`#\[[\s\S]*?\]#`, CommentMultiline, nil}, + {`##.*$`, LiteralStringDoc, nil}, + {`#.*$`, Comment, nil}, + {`[*=><+\-/@$~&%!?|\\\[\]]`, Operator, nil}, + {"\\.\\.|\\.|,|\\[\\.|\\.\\]|\\{\\.|\\.\\}|\\(\\.|\\.\\)|\\{|\\}|\\(|\\)|:|\\^|`|;", Punctuation, nil}, + {`(?:[\w]+)"`, LiteralString, Push("rdqs")}, + {`"""`, LiteralString, Push("tdqs")}, + {`"`, LiteralString, Push("dqs")}, + {`'`, LiteralStringChar, Push("chars")}, + {`(a_?n_?d_?|o_?r_?|n_?o_?t_?|x_?o_?r_?|s_?h_?l_?|s_?h_?r_?|d_?i_?v_?|m_?o_?d_?|i_?n_?|n_?o_?t_?i_?n_?|i_?s_?|i_?s_?n_?o_?t_?)\b`, OperatorWord, nil}, + {`(p_?r_?o_?c_?\s)(?![(\[\]])`, Keyword, Push("funcname")}, + {`(a_?d_?d_?r_?|a_?n_?d_?|a_?s_?|a_?s_?m_?|a_?t_?o_?m_?i_?c_?|b_?i_?n_?d_?|b_?l_?o_?c_?k_?|b_?r_?e_?a_?k_?|c_?a_?s_?e_?|c_?a_?s_?t_?|c_?o_?n_?c_?e_?p_?t_?|c_?o_?n_?s_?t_?|c_?o_?n_?t_?i_?n_?u_?e_?|c_?o_?n_?v_?e_?r_?t_?e_?r_?|d_?e_?f_?e_?r_?|d_?i_?s_?c_?a_?r_?d_?|d_?i_?s_?t_?i_?n_?c_?t_?|d_?i_?v_?|d_?o_?|e_?l_?i_?f_?|e_?l_?s_?e_?|e_?n_?d_?|e_?n_?u_?m_?|e_?x_?c_?e_?p_?t_?|e_?x_?p_?o_?r_?t_?|f_?i_?n_?a_?l_?l_?y_?|f_?o_?r_?|f_?u_?n_?c_?|i_?f_?|i_?n_?|y_?i_?e_?l_?d_?|i_?n_?t_?e_?r_?f_?a_?c_?e_?|i_?s_?|i_?s_?n_?o_?t_?|i_?t_?e_?r_?a_?t_?o_?r_?|l_?e_?t_?|m_?a_?c_?r_?o_?|m_?e_?t_?h_?o_?d_?|m_?i_?x_?i_?n_?|m_?o_?d_?|n_?o_?t_?|n_?o_?t_?i_?n_?|o_?b_?j_?e_?c_?t_?|o_?f_?|o_?r_?|o_?u_?t_?|p_?r_?o_?c_?|p_?t_?r_?|r_?a_?i_?s_?e_?|r_?e_?f_?|r_?e_?t_?u_?r_?n_?|s_?h_?a_?r_?e_?d_?|s_?h_?l_?|s_?h_?r_?|s_?t_?a_?t_?i_?c_?|t_?e_?m_?p_?l_?a_?t_?e_?|t_?r_?y_?|t_?u_?p_?l_?e_?|t_?y_?p_?e_?|w_?h_?e_?n_?|w_?h_?i_?l_?e_?|w_?i_?t_?h_?|w_?i_?t_?h_?o_?u_?t_?|x_?o_?r_?)\b`, Keyword, nil}, + {`(f_?r_?o_?m_?|i_?m_?p_?o_?r_?t_?|i_?n_?c_?l_?u_?d_?e_?)\b`, KeywordNamespace, nil}, + {`(v_?a_?r)\b`, KeywordDeclaration, nil}, + {`(i_?n_?t_?|i_?n_?t_?8_?|i_?n_?t_?1_?6_?|i_?n_?t_?3_?2_?|i_?n_?t_?6_?4_?|f_?l_?o_?a_?t_?|f_?l_?o_?a_?t_?3_?2_?|f_?l_?o_?a_?t_?6_?4_?|b_?o_?o_?l_?|c_?h_?a_?r_?|r_?a_?n_?g_?e_?|a_?r_?r_?a_?y_?|s_?e_?q_?|s_?e_?t_?|s_?t_?r_?i_?n_?g_?)\b`, KeywordType, nil}, + {`(n_?i_?l_?|t_?r_?u_?e_?|f_?a_?l_?s_?e_?)\b`, KeywordPseudo, nil}, + {`\b_\b`, Name, nil}, // Standalone _ used as discardable variable identifier + {`\b((?![_\d])\w)(((?!_)\w)|(_(?!_)\w))*`, Name, nil}, + {`[0-9][0-9_]*(?=([e.]|\'f(32|64)))`, LiteralNumberFloat, Push("float-suffix", "float-number")}, + {`0x[a-f0-9][a-f0-9_]*`, LiteralNumberHex, Push("int-suffix")}, + {`0b[01][01_]*`, LiteralNumberBin, Push("int-suffix")}, + {`0o[0-7][0-7_]*`, LiteralNumberOct, Push("int-suffix")}, + {`[0-9][0-9_]*`, LiteralNumberInteger, Push("int-suffix")}, + {`\s+`, Text, nil}, + {`.+$`, Error, nil}, + }, + "chars": { + {`\\([\\abcefnrtvl"\']|x[a-f0-9]{2}|[0-9]{1,3})`, LiteralStringEscape, nil}, + {`'`, LiteralStringChar, Pop(1)}, + {`.`, LiteralStringChar, nil}, + }, + "strings": { + {`(? <= < >= > *")...), Operator, nil}, + {`[;:]`, Punctuation, nil}, + }, + "comment": { + {`\*/`, CommentMultiline, Pop(1)}, + {`.|\n`, CommentMultiline, nil}, + }, + "paren": { + {`\)`, Punctuation, Pop(1)}, + Include("root"), + }, + "list": { + {`\]`, Punctuation, Pop(1)}, + Include("root"), + }, + "qstring": { + {`"`, StringDouble, Pop(1)}, + {`\${`, StringInterpol, Push("interpol")}, + {`\\.`, StringEscape, nil}, + {`.|\n`, StringDouble, nil}, + }, + "istring": { + {`''\$`, StringEscape, nil}, // "$" + {`'''`, StringEscape, nil}, // "''" + {`''\\.`, StringEscape, nil}, // "\." + {`''`, StringSingle, Pop(1)}, + {`\${`, StringInterpol, Push("interpol")}, + // The next rule is important: "$" escapes any symbol except "{"! + {`\$.`, StringSingle, nil}, // "$." + {`.|\n`, StringSingle, nil}, + }, + "scope": { + {`}:`, Punctuation, Pop(1)}, + {`}`, Punctuation, Pop(1)}, + {`in` + nixb, Keyword, Pop(1)}, + {`\${`, StringInterpol, Push("interpol")}, + Include("root"), // "==" has to be above "=" + {Words(``, ``, strings.Fields("= ? ,")...), Operator, nil}, + }, + "interpol": { + {`}`, StringInterpol, Pop(1)}, + Include("root"), + }, + "id": { + {`[a-zA-Z_][a-zA-Z0-9_'-]*`, Name, nil}, + }, + "uri": { + {`[a-zA-Z][a-zA-Z0-9+.-]*:[a-zA-Z0-9%/?:@&=+$,_.!~*'-]+`, StringDoc, nil}, + }, + "path": { + {`[a-zA-Z0-9._+-]*(/[a-zA-Z0-9._+-]+)+`, StringRegex, nil}, + {`~(/[a-zA-Z0-9._+-]+)+/?`, StringRegex, nil}, + {`<[a-zA-Z0-9._+-]+(/[a-zA-Z0-9._+-]+)*>`, StringRegex, nil}, + }, + "int": { + {`-?[0-9]+` + nixb, NumberInteger, nil}, + }, + "float": { + {`-?(([1-9][0-9]*\.[0-9]*)|(0?\.[0-9]+))([Ee][+-]?[0-9]+)?` + nixb, NumberFloat, nil}, + }, + "space": { + {`[ \t\r\n]+`, Text, nil}, + }, + } +} diff --git a/vendor/github.com/alecthomas/chroma/lexers/o/objectivec.go b/vendor/github.com/alecthomas/chroma/lexers/o/objectivec.go new file mode 100644 index 000000000..0ae302960 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/o/objectivec.go @@ -0,0 +1,169 @@ +package o + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// Objective-C lexer. +var ObjectiveC = internal.Register(MustNewLazyLexer( + &Config{ + Name: "Objective-C", + Aliases: []string{"objective-c", "objectivec", "obj-c", "objc"}, + Filenames: []string{"*.m", "*.h"}, + MimeTypes: []string{"text/x-objective-c"}, + }, + objectiveCRules, +)) + +func objectiveCRules() Rules { + return Rules{ + "statements": { + {`@"`, LiteralString, Push("string")}, + {`@(YES|NO)`, LiteralNumber, nil}, + {`@'(\\.|\\[0-7]{1,3}|\\x[a-fA-F0-9]{1,2}|[^\\\'\n])'`, LiteralStringChar, nil}, + {`@(\d+\.\d*|\.\d+|\d+)[eE][+-]?\d+[lL]?`, LiteralNumberFloat, nil}, + {`@(\d+\.\d*|\.\d+|\d+[fF])[fF]?`, LiteralNumberFloat, nil}, + {`@0x[0-9a-fA-F]+[Ll]?`, LiteralNumberHex, nil}, + {`@0[0-7]+[Ll]?`, LiteralNumberOct, nil}, + {`@\d+[Ll]?`, LiteralNumberInteger, nil}, + {`@\(`, Literal, Push("literal_number")}, + {`@\[`, Literal, Push("literal_array")}, + {`@\{`, Literal, Push("literal_dictionary")}, + {Words(``, `\b`, `@selector`, `@private`, `@protected`, `@public`, `@encode`, `@synchronized`, `@try`, `@throw`, `@catch`, `@finally`, `@end`, `@property`, `@synthesize`, `__bridge`, `__bridge_transfer`, `__autoreleasing`, `__block`, `__weak`, `__strong`, `weak`, `strong`, `copy`, `retain`, `assign`, `unsafe_unretained`, `atomic`, `nonatomic`, `readonly`, `readwrite`, `setter`, `getter`, `typeof`, `in`, `out`, `inout`, `release`, `class`, `@dynamic`, `@optional`, `@required`, `@autoreleasepool`), Keyword, nil}, + {Words(``, `\b`, `id`, `instancetype`, `Class`, `IMP`, `SEL`, `BOOL`, `IBOutlet`, `IBAction`, `unichar`), KeywordType, nil}, + {`@(true|false|YES|NO)\n`, NameBuiltin, nil}, + {`(YES|NO|nil|self|super)\b`, NameBuiltin, nil}, + {`(Boolean|UInt8|SInt8|UInt16|SInt16|UInt32|SInt32)\b`, KeywordType, nil}, + {`(TRUE|FALSE)\b`, NameBuiltin, nil}, + {`(@interface|@implementation)(\s+)`, ByGroups(Keyword, Text), Push("#pop", "oc_classname")}, + {`(@class|@protocol)(\s+)`, ByGroups(Keyword, Text), Push("#pop", "oc_forward_classname")}, + {`@`, Punctuation, nil}, + {`(L?)(")`, ByGroups(LiteralStringAffix, LiteralString), Push("string")}, + {`(L?)(')(\\.|\\[0-7]{1,3}|\\x[a-fA-F0-9]{1,2}|[^\\\'\n])(')`, ByGroups(LiteralStringAffix, LiteralStringChar, LiteralStringChar, LiteralStringChar), nil}, + {`(\d+\.\d*|\.\d+|\d+)[eE][+-]?\d+[LlUu]*`, LiteralNumberFloat, nil}, + {`(\d+\.\d*|\.\d+|\d+[fF])[fF]?`, LiteralNumberFloat, nil}, + {`0x[0-9a-fA-F]+[LlUu]*`, LiteralNumberHex, nil}, + {`0[0-7]+[LlUu]*`, LiteralNumberOct, nil}, + {`\d+[LlUu]*`, LiteralNumberInteger, nil}, + {`\*/`, Error, nil}, + {`[~!%^&*+=|?:<>/-]`, Operator, nil}, + {`[()\[\],.]`, Punctuation, nil}, + {Words(``, `\b`, `asm`, `auto`, `break`, `case`, `const`, `continue`, `default`, `do`, `else`, `enum`, `extern`, `for`, `goto`, `if`, `register`, `restricted`, `return`, `sizeof`, `static`, `struct`, `switch`, `typedef`, `union`, `volatile`, `while`), Keyword, nil}, + {`(bool|int|long|float|short|double|char|unsigned|signed|void)\b`, KeywordType, nil}, + {Words(``, `\b`, `inline`, `_inline`, `__inline`, `naked`, `restrict`, `thread`, `typename`), KeywordReserved, nil}, + {`(__m(128i|128d|128|64))\b`, KeywordReserved, nil}, + {Words(`__`, `\b`, `asm`, `int8`, `based`, `except`, `int16`, `stdcall`, `cdecl`, `fastcall`, `int32`, `declspec`, `finally`, `int64`, `try`, `leave`, `wchar_t`, `w64`, `unaligned`, `raise`, `noop`, `identifier`, `forceinline`, `assume`), KeywordReserved, nil}, + {`(true|false|NULL)\b`, NameBuiltin, nil}, + {`([a-zA-Z_]\w*)(\s*)(:)(?!:)`, ByGroups(NameLabel, Text, Punctuation), nil}, + {`[a-zA-Z_]\w*`, Name, nil}, + }, + "oc_classname": { + {`([a-zA-Z$_][\w$]*)(\s*:\s*)([a-zA-Z$_][\w$]*)?(\s*)(\{)`, ByGroups(NameClass, Text, NameClass, Text, Punctuation), Push("#pop", "oc_ivars")}, + {`([a-zA-Z$_][\w$]*)(\s*:\s*)([a-zA-Z$_][\w$]*)?`, ByGroups(NameClass, Text, NameClass), Pop(1)}, + {`([a-zA-Z$_][\w$]*)(\s*)(\([a-zA-Z$_][\w$]*\))(\s*)(\{)`, ByGroups(NameClass, Text, NameLabel, Text, Punctuation), Push("#pop", "oc_ivars")}, + {`([a-zA-Z$_][\w$]*)(\s*)(\([a-zA-Z$_][\w$]*\))`, ByGroups(NameClass, Text, NameLabel), Pop(1)}, + {`([a-zA-Z$_][\w$]*)(\s*)(\{)`, ByGroups(NameClass, Text, Punctuation), Push("#pop", "oc_ivars")}, + {`([a-zA-Z$_][\w$]*)`, NameClass, Pop(1)}, + }, + "oc_forward_classname": { + {`([a-zA-Z$_][\w$]*)(\s*,\s*)`, ByGroups(NameClass, Text), Push("oc_forward_classname")}, + {`([a-zA-Z$_][\w$]*)(\s*;?)`, ByGroups(NameClass, Text), Pop(1)}, + }, + "oc_ivars": { + Include("whitespace"), + Include("statements"), + {`;`, Punctuation, nil}, + {`\{`, Punctuation, Push()}, + {`\}`, Punctuation, Pop(1)}, + }, + "root": { + {`^([-+])(\s*)(\(.*?\))?(\s*)([a-zA-Z$_][\w$]*:?)`, ByGroups(Punctuation, Text, UsingSelf("root"), Text, NameFunction), Push("method")}, + Include("whitespace"), + {`((?:[\w*\s])+?(?:\s|[*]))([a-zA-Z_]\w*)(\s*\([^;]*?\))([^;{]*)(\{)`, ByGroups(UsingSelf("root"), NameFunction, UsingSelf("root"), UsingSelf("root"), Punctuation), Push("function")}, + {`((?:[\w*\s])+?(?:\s|[*]))([a-zA-Z_]\w*)(\s*\([^;]*?\))([^;]*)(;)`, ByGroups(UsingSelf("root"), NameFunction, UsingSelf("root"), UsingSelf("root"), Punctuation), nil}, + Default(Push("statement")), + }, + "method": { + Include("whitespace"), + {`,`, Punctuation, nil}, + {`\.\.\.`, Punctuation, nil}, + {`(\(.*?\))(\s*)([a-zA-Z$_][\w$]*)`, ByGroups(UsingSelf("root"), Text, NameVariable), nil}, + {`[a-zA-Z$_][\w$]*:`, NameFunction, nil}, + {`;`, Punctuation, Pop(1)}, + {`\{`, Punctuation, Push("function")}, + Default(Pop(1)), + }, + "literal_number": { + {`\(`, Punctuation, Push("literal_number_inner")}, + {`\)`, Literal, Pop(1)}, + Include("statement"), + }, + "literal_number_inner": { + {`\(`, Punctuation, Push()}, + {`\)`, Punctuation, Pop(1)}, + Include("statement"), + }, + "literal_array": { + {`\[`, Punctuation, Push("literal_array_inner")}, + {`\]`, Literal, Pop(1)}, + Include("statement"), + }, + "literal_array_inner": { + {`\[`, Punctuation, Push()}, + {`\]`, Punctuation, Pop(1)}, + Include("statement"), + }, + "literal_dictionary": { + {`\}`, Literal, Pop(1)}, + Include("statement"), + }, + "whitespace": { + {`^#if\s+0`, CommentPreproc, Push("if0")}, + {`^#`, CommentPreproc, Push("macro")}, + {`^(\s*(?:/[*].*?[*]/\s*)?)(#if\s+0)`, ByGroups(UsingSelf("root"), CommentPreproc), Push("if0")}, + {`^(\s*(?:/[*].*?[*]/\s*)?)(#)`, ByGroups(UsingSelf("root"), CommentPreproc), Push("macro")}, + {`\n`, Text, nil}, + {`\s+`, Text, nil}, + {`\\\n`, Text, nil}, + {`//(\n|[\w\W]*?[^\\]\n)`, CommentSingle, nil}, + {`/(\\\n)?[*][\w\W]*?[*](\\\n)?/`, CommentMultiline, nil}, + {`/(\\\n)?[*][\w\W]*`, CommentMultiline, nil}, + }, + "statement": { + Include("whitespace"), + Include("statements"), + {`[{}]`, Punctuation, nil}, + {`;`, Punctuation, Pop(1)}, + }, + "function": { + Include("whitespace"), + Include("statements"), + {`;`, Punctuation, nil}, + {`\{`, Punctuation, Push()}, + {`\}`, Punctuation, Pop(1)}, + }, + "string": { + {`"`, LiteralString, Pop(1)}, + {`\\([\\abfnrtv"\']|x[a-fA-F0-9]{2,4}|u[a-fA-F0-9]{4}|U[a-fA-F0-9]{8}|[0-7]{1,3})`, LiteralStringEscape, nil}, + {`[^\\"\n]+`, LiteralString, nil}, + {`\\\n`, LiteralString, nil}, + {`\\`, LiteralString, nil}, + }, + "macro": { + {`(include)(\s*(?:/[*].*?[*]/\s*)?)([^\n]+)`, ByGroups(CommentPreproc, Text, CommentPreprocFile), nil}, + {`[^/\n]+`, CommentPreproc, nil}, + {`/[*](.|\n)*?[*]/`, CommentMultiline, nil}, + {`//.*?\n`, CommentSingle, Pop(1)}, + {`/`, CommentPreproc, nil}, + {`(?<=\\)\n`, CommentPreproc, nil}, + {`\n`, CommentPreproc, Pop(1)}, + }, + "if0": { + {`^\s*#if.*?(?|\\[<|\\[|\\?\\?|\\?|>\\}|>]|>|=|<-|<|;;|;|:>|:=|::|:|\\.\\.|\\.|->|-\\.|-|,|\\+|\\*|\\)|\\(|&&|&|#|!=)", Operator, nil}, + {`([=<>@^|&+\*/$%-]|[!?~])?[!$%&*+\./:<=>?@^|~-]`, Operator, nil}, + {`\b(and|asr|land|lor|lsl|lxor|mod|or)\b`, OperatorWord, nil}, + {`\b(unit|int|float|bool|string|char|list|array)\b`, KeywordType, nil}, + {`[^\W\d][\w']*`, Name, nil}, + {`-?\d[\d_]*(.[\d_]*)?([eE][+\-]?\d[\d_]*)`, LiteralNumberFloat, nil}, + {`0[xX][\da-fA-F][\da-fA-F_]*`, LiteralNumberHex, nil}, + {`0[oO][0-7][0-7_]*`, LiteralNumberOct, nil}, + {`0[bB][01][01_]*`, LiteralNumberBin, nil}, + {`\d[\d_]*`, LiteralNumberInteger, nil}, + {`'(?:(\\[\\\"'ntbr ])|(\\[0-9]{3})|(\\x[0-9a-fA-F]{2}))'`, LiteralStringChar, nil}, + {`'.'`, LiteralStringChar, nil}, + {`'`, Keyword, nil}, + {`"`, LiteralStringDouble, Push("string")}, + {`[~?][a-z][\w\']*:`, NameVariable, nil}, + }, + "comment": { + {`[^(*)]+`, Comment, nil}, + {`\(\*`, Comment, Push()}, + {`\*\)`, Comment, Pop(1)}, + {`[(*)]`, Comment, nil}, + }, + "string": { + {`[^\\"]+`, LiteralStringDouble, nil}, + Include("escape-sequence"), + {`\\\n`, LiteralStringDouble, nil}, + {`"`, LiteralStringDouble, Pop(1)}, + }, + "dotted": { + {`\s+`, Text, nil}, + {`\.`, Punctuation, nil}, + {`[A-Z][\w\']*(?=\s*\.)`, NameNamespace, nil}, + {`[A-Z][\w\']*`, NameClass, Pop(1)}, + {`[a-z_][\w\']*`, Name, Pop(1)}, + Default(Pop(1)), + }, + } +} diff --git a/vendor/github.com/alecthomas/chroma/lexers/o/octave.go b/vendor/github.com/alecthomas/chroma/lexers/o/octave.go new file mode 100644 index 000000000..99fe298c7 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/o/octave.go @@ -0,0 +1,50 @@ +package o + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// Octave lexer. +var Octave = internal.Register(MustNewLazyLexer( + &Config{ + Name: "Octave", + Aliases: []string{"octave"}, + Filenames: []string{"*.m"}, + MimeTypes: []string{"text/octave"}, + }, + octaveRules, +)) + +func octaveRules() Rules { + return Rules{ + "root": { + {`[%#].*$`, Comment, nil}, + {`^\s*function`, Keyword, Push("deffunc")}, + {Words(``, `\b`, `__FILE__`, `__LINE__`, `break`, `case`, `catch`, `classdef`, `continue`, `do`, `else`, `elseif`, `end`, `end_try_catch`, `end_unwind_protect`, `endclassdef`, `endevents`, `endfor`, `endfunction`, `endif`, `endmethods`, `endproperties`, `endswitch`, `endwhile`, `events`, `for`, `function`, `get`, `global`, `if`, `methods`, `otherwise`, `persistent`, `properties`, `return`, `set`, `static`, `switch`, `try`, `until`, `unwind_protect`, `unwind_protect_cleanup`, `while`), Keyword, nil}, + {Words(``, `\b`, `addlistener`, `addpath`, `addproperty`, `all`, `and`, `any`, `argnames`, `argv`, `assignin`, `atexit`, `autoload`, `available_graphics_toolkits`, `beep_on_error`, `bitand`, `bitmax`, `bitor`, `bitshift`, `bitxor`, `cat`, `cell`, `cellstr`, `char`, `class`, `clc`, `columns`, `command_line_path`, `completion_append_char`, `completion_matches`, `complex`, `confirm_recursive_rmdir`, `cputime`, `crash_dumps_octave_core`, `ctranspose`, `cumprod`, `cumsum`, `debug_on_error`, `debug_on_interrupt`, `debug_on_warning`, `default_save_options`, `dellistener`, `diag`, `diff`, `disp`, `doc_cache_file`, `do_string_escapes`, `double`, `drawnow`, `e`, `echo_executing_commands`, `eps`, `eq`, `errno`, `errno_list`, `error`, `eval`, `evalin`, `exec`, `exist`, `exit`, `eye`, `false`, `fclear`, `fclose`, `fcntl`, `fdisp`, `feof`, `ferror`, `feval`, `fflush`, `fgetl`, `fgets`, `fieldnames`, `file_in_loadpath`, `file_in_path`, `filemarker`, `filesep`, `find_dir_in_path`, `fixed_point_format`, `fnmatch`, `fopen`, `fork`, `formula`, `fprintf`, `fputs`, `fread`, `freport`, `frewind`, `fscanf`, `fseek`, `fskipl`, `ftell`, `functions`, `fwrite`, `ge`, `genpath`, `get`, `getegid`, `getenv`, `geteuid`, `getgid`, `getpgrp`, `getpid`, `getppid`, `getuid`, `glob`, `gt`, `gui_mode`, `history_control`, `history_file`, `history_size`, `history_timestamp_format_string`, `home`, `horzcat`, `hypot`, `ifelse`, `ignore_function_time_stamp`, `inferiorto`, `info_file`, `info_program`, `inline`, `input`, `intmax`, `intmin`, `ipermute`, `is_absolute_filename`, `isargout`, `isbool`, `iscell`, `iscellstr`, `ischar`, `iscomplex`, `isempty`, `isfield`, `isfloat`, `isglobal`, `ishandle`, `isieee`, `isindex`, `isinteger`, `islogical`, `ismatrix`, `ismethod`, `isnull`, `isnumeric`, `isobject`, `isreal`, `is_rooted_relative_filename`, `issorted`, `isstruct`, `isvarname`, `kbhit`, `keyboard`, `kill`, `lasterr`, `lasterror`, `lastwarn`, `ldivide`, `le`, `length`, `link`, `linspace`, `logical`, `lstat`, `lt`, `make_absolute_filename`, `makeinfo_program`, `max_recursion_depth`, `merge`, `methods`, `mfilename`, `minus`, `mislocked`, `mkdir`, `mkfifo`, `mkstemp`, `mldivide`, `mlock`, `mouse_wheel_zoom`, `mpower`, `mrdivide`, `mtimes`, `munlock`, `nargin`, `nargout`, `native_float_format`, `ndims`, `ne`, `nfields`, `nnz`, `norm`, `not`, `numel`, `nzmax`, `octave_config_info`, `octave_core_file_limit`, `octave_core_file_name`, `octave_core_file_options`, `ones`, `or`, `output_max_field_width`, `output_precision`, `page_output_immediately`, `page_screen_output`, `path`, `pathsep`, `pause`, `pclose`, `permute`, `pi`, `pipe`, `plus`, `popen`, `power`, `print_empty_dimensions`, `printf`, `print_struct_array_contents`, `prod`, `program_invocation_name`, `program_name`, `putenv`, `puts`, `pwd`, `quit`, `rats`, `rdivide`, `readdir`, `readlink`, `read_readline_init_file`, `realmax`, `realmin`, `rehash`, `rename`, `repelems`, `re_read_readline_init_file`, `reset`, `reshape`, `resize`, `restoredefaultpath`, `rethrow`, `rmdir`, `rmfield`, `rmpath`, `rows`, `save_header_format_string`, `save_precision`, `saving_history`, `scanf`, `set`, `setenv`, `shell_cmd`, `sighup_dumps_octave_core`, `sigterm_dumps_octave_core`, `silent_functions`, `single`, `size`, `size_equal`, `sizemax`, `sizeof`, `sleep`, `source`, `sparse_auto_mutate`, `split_long_rows`, `sprintf`, `squeeze`, `sscanf`, `stat`, `stderr`, `stdin`, `stdout`, `strcmp`, `strcmpi`, `string_fill_char`, `strncmp`, `strncmpi`, `struct`, `struct_levels_to_print`, `strvcat`, `subsasgn`, `subsref`, `sum`, `sumsq`, `superiorto`, `suppress_verbose_help_message`, `symlink`, `system`, `tic`, `tilde_expand`, `times`, `tmpfile`, `tmpnam`, `toc`, `toupper`, `transpose`, `true`, `typeinfo`, `umask`, `uminus`, `uname`, `undo_string_escapes`, `unlink`, `uplus`, `upper`, `usage`, `usleep`, `vec`, `vectorize`, `vertcat`, `waitpid`, `warning`, `warranty`, `whos_line_format`, `yes_or_no`, `zeros`, `inf`, `Inf`, `nan`, `NaN`, `close`, `load`, `who`, `whos`, `accumarray`, `accumdim`, `acosd`, `acotd`, `acscd`, `addtodate`, `allchild`, `ancestor`, `anova`, `arch_fit`, `arch_rnd`, `arch_test`, `area`, `arma_rnd`, `arrayfun`, `ascii`, `asctime`, `asecd`, `asind`, `assert`, `atand`, `autoreg_matrix`, `autumn`, `axes`, `axis`, `bar`, `barh`, `bartlett`, `bartlett_test`, `beep`, `betacdf`, `betainv`, `betapdf`, `betarnd`, `bicgstab`, `bicubic`, `binary`, `binocdf`, `binoinv`, `binopdf`, `binornd`, `bitcmp`, `bitget`, `bitset`, `blackman`, `blanks`, `blkdiag`, `bone`, `box`, `brighten`, `calendar`, `cast`, `cauchy_cdf`, `cauchy_inv`, `cauchy_pdf`, `cauchy_rnd`, `caxis`, `celldisp`, `center`, `cgs`, `chisquare_test_homogeneity`, `chisquare_test_independence`, `circshift`, `cla`, `clabel`, `clf`, `clock`, `cloglog`, `closereq`, `colon`, `colorbar`, `colormap`, `colperm`, `comet`, `common_size`, `commutation_matrix`, `compan`, `compare_versions`, `compass`, `computer`, `cond`, `condest`, `contour`, `contourc`, `contourf`, `contrast`, `conv`, `convhull`, `cool`, `copper`, `copyfile`, `cor`, `corrcoef`, `cor_test`, `cosd`, `cotd`, `cov`, `cplxpair`, `cross`, `cscd`, `cstrcat`, `csvread`, `csvwrite`, `ctime`, `cumtrapz`, `curl`, `cut`, `cylinder`, `date`, `datenum`, `datestr`, `datetick`, `datevec`, `dblquad`, `deal`, `deblank`, `deconv`, `delaunay`, `delaunayn`, `delete`, `demo`, `detrend`, `diffpara`, `diffuse`, `dir`, `discrete_cdf`, `discrete_inv`, `discrete_pdf`, `discrete_rnd`, `display`, `divergence`, `dlmwrite`, `dos`, `dsearch`, `dsearchn`, `duplication_matrix`, `durbinlevinson`, `ellipsoid`, `empirical_cdf`, `empirical_inv`, `empirical_pdf`, `empirical_rnd`, `eomday`, `errorbar`, `etime`, `etreeplot`, `example`, `expcdf`, `expinv`, `expm`, `exppdf`, `exprnd`, `ezcontour`, `ezcontourf`, `ezmesh`, `ezmeshc`, `ezplot`, `ezpolar`, `ezsurf`, `ezsurfc`, `factor`, `factorial`, `fail`, `fcdf`, `feather`, `fftconv`, `fftfilt`, `fftshift`, `figure`, `fileattrib`, `fileparts`, `fill`, `findall`, `findobj`, `findstr`, `finv`, `flag`, `flipdim`, `fliplr`, `flipud`, `fpdf`, `fplot`, `fractdiff`, `freqz`, `freqz_plot`, `frnd`, `fsolve`, `f_test_regression`, `ftp`, `fullfile`, `fzero`, `gamcdf`, `gaminv`, `gampdf`, `gamrnd`, `gca`, `gcbf`, `gcbo`, `gcf`, `genvarname`, `geocdf`, `geoinv`, `geopdf`, `geornd`, `getfield`, `ginput`, `glpk`, `gls`, `gplot`, `gradient`, `graphics_toolkit`, `gray`, `grid`, `griddata`, `griddatan`, `gtext`, `gunzip`, `gzip`, `hadamard`, `hamming`, `hankel`, `hanning`, `hggroup`, `hidden`, `hilb`, `hist`, `histc`, `hold`, `hot`, `hotelling_test`, `housh`, `hsv`, `hurst`, `hygecdf`, `hygeinv`, `hygepdf`, `hygernd`, `idivide`, `ifftshift`, `image`, `imagesc`, `imfinfo`, `imread`, `imshow`, `imwrite`, `index`, `info`, `inpolygon`, `inputname`, `interpft`, `interpn`, `intersect`, `invhilb`, `iqr`, `isa`, `isdefinite`, `isdir`, `is_duplicate_entry`, `isequal`, `isequalwithequalnans`, `isfigure`, `ishermitian`, `ishghandle`, `is_leap_year`, `isletter`, `ismac`, `ismember`, `ispc`, `isprime`, `isprop`, `isscalar`, `issquare`, `isstrprop`, `issymmetric`, `isunix`, `is_valid_file_id`, `isvector`, `jet`, `kendall`, `kolmogorov_smirnov_cdf`, `kolmogorov_smirnov_test`, `kruskal_wallis_test`, `krylov`, `kurtosis`, `laplace_cdf`, `laplace_inv`, `laplace_pdf`, `laplace_rnd`, `legend`, `legendre`, `license`, `line`, `linkprop`, `list_primes`, `loadaudio`, `loadobj`, `logistic_cdf`, `logistic_inv`, `logistic_pdf`, `logistic_rnd`, `logit`, `loglog`, `loglogerr`, `logm`, `logncdf`, `logninv`, `lognpdf`, `lognrnd`, `logspace`, `lookfor`, `ls_command`, `lsqnonneg`, `magic`, `mahalanobis`, `manova`, `matlabroot`, `mcnemar_test`, `mean`, `meansq`, `median`, `menu`, `mesh`, `meshc`, `meshgrid`, `meshz`, `mexext`, `mget`, `mkpp`, `mode`, `moment`, `movefile`, `mpoles`, `mput`, `namelengthmax`, `nargchk`, `nargoutchk`, `nbincdf`, `nbininv`, `nbinpdf`, `nbinrnd`, `nchoosek`, `ndgrid`, `newplot`, `news`, `nonzeros`, `normcdf`, `normest`, `norminv`, `normpdf`, `normrnd`, `now`, `nthroot`, `null`, `ocean`, `ols`, `onenormest`, `optimget`, `optimset`, `orderfields`, `orient`, `orth`, `pack`, `pareto`, `parseparams`, `pascal`, `patch`, `pathdef`, `pcg`, `pchip`, `pcolor`, `pcr`, `peaks`, `periodogram`, `perl`, `perms`, `pie`, `pink`, `planerot`, `playaudio`, `plot`, `plotmatrix`, `plotyy`, `poisscdf`, `poissinv`, `poisspdf`, `poissrnd`, `polar`, `poly`, `polyaffine`, `polyarea`, `polyderiv`, `polyfit`, `polygcd`, `polyint`, `polyout`, `polyreduce`, `polyval`, `polyvalm`, `postpad`, `powerset`, `ppder`, `ppint`, `ppjumps`, `ppplot`, `ppval`, `pqpnonneg`, `prepad`, `primes`, `print`, `print_usage`, `prism`, `probit`, `qp`, `qqplot`, `quadcc`, `quadgk`, `quadl`, `quadv`, `quiver`, `qzhess`, `rainbow`, `randi`, `range`, `rank`, `ranks`, `rat`, `reallog`, `realpow`, `realsqrt`, `record`, `rectangle_lw`, `rectangle_sw`, `rectint`, `refresh`, `refreshdata`, `regexptranslate`, `repmat`, `residue`, `ribbon`, `rindex`, `roots`, `rose`, `rosser`, `rotdim`, `rref`, `run`, `run_count`, `rundemos`, `run_test`, `runtests`, `saveas`, `saveaudio`, `saveobj`, `savepath`, `scatter`, `secd`, `semilogx`, `semilogxerr`, `semilogy`, `semilogyerr`, `setaudio`, `setdiff`, `setfield`, `setxor`, `shading`, `shift`, `shiftdim`, `sign_test`, `sinc`, `sind`, `sinetone`, `sinewave`, `skewness`, `slice`, `sombrero`, `sortrows`, `spaugment`, `spconvert`, `spdiags`, `spearman`, `spectral_adf`, `spectral_xdf`, `specular`, `speed`, `spencer`, `speye`, `spfun`, `sphere`, `spinmap`, `spline`, `spones`, `sprand`, `sprandn`, `sprandsym`, `spring`, `spstats`, `spy`, `sqp`, `stairs`, `statistics`, `std`, `stdnormal_cdf`, `stdnormal_inv`, `stdnormal_pdf`, `stdnormal_rnd`, `stem`, `stft`, `strcat`, `strchr`, `strjust`, `strmatch`, `strread`, `strsplit`, `strtok`, `strtrim`, `strtrunc`, `structfun`, `studentize`, `subplot`, `subsindex`, `subspace`, `substr`, `substruct`, `summer`, `surf`, `surface`, `surfc`, `surfl`, `surfnorm`, `svds`, `swapbytes`, `sylvester_matrix`, `symvar`, `synthesis`, `table`, `tand`, `tar`, `tcdf`, `tempdir`, `tempname`, `test`, `text`, `textread`, `textscan`, `tinv`, `title`, `toeplitz`, `tpdf`, `trace`, `trapz`, `treelayout`, `treeplot`, `triangle_lw`, `triangle_sw`, `tril`, `trimesh`, `triplequad`, `triplot`, `trisurf`, `triu`, `trnd`, `tsearchn`, `t_test`, `t_test_regression`, `type`, `unidcdf`, `unidinv`, `unidpdf`, `unidrnd`, `unifcdf`, `unifinv`, `unifpdf`, `unifrnd`, `union`, `unique`, `unix`, `unmkpp`, `unpack`, `untabify`, `untar`, `unwrap`, `unzip`, `u_test`, `validatestring`, `vander`, `var`, `var_test`, `vech`, `ver`, `version`, `view`, `voronoi`, `voronoin`, `waitforbuttonpress`, `wavread`, `wavwrite`, `wblcdf`, `wblinv`, `wblpdf`, `wblrnd`, `weekday`, `welch_test`, `what`, `white`, `whitebg`, `wienrnd`, `wilcoxon_test`, `wilkinson`, `winter`, `xlabel`, `xlim`, `ylabel`, `yulewalker`, `zip`, `zlabel`, `z_test`, `airy`, `amd`, `balance`, `besselh`, `besseli`, `besselj`, `besselk`, `bessely`, `bitpack`, `bsxfun`, `builtin`, `ccolamd`, `cellfun`, `cellslices`, `chol`, `choldelete`, `cholinsert`, `cholinv`, `cholshift`, `cholupdate`, `colamd`, `colloc`, `convhulln`, `convn`, `csymamd`, `cummax`, `cummin`, `daspk`, `daspk_options`, `dasrt`, `dasrt_options`, `dassl`, `dassl_options`, `dbclear`, `dbdown`, `dbstack`, `dbstatus`, `dbstop`, `dbtype`, `dbup`, `dbwhere`, `det`, `dlmread`, `dmperm`, `dot`, `eig`, `eigs`, `endgrent`, `endpwent`, `etree`, `fft`, `fftn`, `fftw`, `filter`, `find`, `full`, `gcd`, `getgrent`, `getgrgid`, `getgrnam`, `getpwent`, `getpwnam`, `getpwuid`, `getrusage`, `givens`, `gmtime`, `gnuplot_binary`, `hess`, `ifft`, `ifftn`, `inv`, `isdebugmode`, `issparse`, `kron`, `localtime`, `lookup`, `lsode`, `lsode_options`, `lu`, `luinc`, `luupdate`, `matrix_type`, `max`, `min`, `mktime`, `pinv`, `qr`, `qrdelete`, `qrinsert`, `qrshift`, `qrupdate`, `quad`, `quad_options`, `qz`, `rand`, `rande`, `randg`, `randn`, `randp`, `randperm`, `rcond`, `regexp`, `regexpi`, `regexprep`, `schur`, `setgrent`, `setpwent`, `sort`, `spalloc`, `sparse`, `spparms`, `sprank`, `sqrtm`, `strfind`, `strftime`, `strptime`, `strrep`, `svd`, `svd_driver`, `syl`, `symamd`, `symbfact`, `symrcm`, `time`, `tsearch`, `typecast`, `urlread`, `urlwrite`, `abs`, `acos`, `acosh`, `acot`, `acoth`, `acsc`, `acsch`, `angle`, `arg`, `asec`, `asech`, `asin`, `asinh`, `atan`, `atanh`, `beta`, `betainc`, `betaln`, `bincoeff`, `cbrt`, `ceil`, `conj`, `cos`, `cosh`, `cot`, `coth`, `csc`, `csch`, `erf`, `erfc`, `erfcx`, `erfinv`, `exp`, `finite`, `fix`, `floor`, `fmod`, `gamma`, `gammainc`, `gammaln`, `imag`, `isalnum`, `isalpha`, `isascii`, `iscntrl`, `isdigit`, `isfinite`, `isgraph`, `isinf`, `islower`, `isna`, `isnan`, `isprint`, `ispunct`, `isspace`, `isupper`, `isxdigit`, `lcm`, `lgamma`, `log`, `lower`, `mod`, `real`, `rem`, `round`, `roundb`, `sec`, `sech`, `sign`, `sin`, `sinh`, `sqrt`, `tan`, `tanh`, `toascii`, `tolower`, `xor`), NameBuiltin, nil}, + {Words(``, `\b`, `EDITOR`, `EXEC_PATH`, `I`, `IMAGE_PATH`, `NA`, `OCTAVE_HOME`, `OCTAVE_VERSION`, `PAGER`, `PAGER_FLAGS`, `SEEK_CUR`, `SEEK_END`, `SEEK_SET`, `SIG`, `S_ISBLK`, `S_ISCHR`, `S_ISDIR`, `S_ISFIFO`, `S_ISLNK`, `S_ISREG`, `S_ISSOCK`, `WCONTINUE`, `WCOREDUMP`, `WEXITSTATUS`, `WIFCONTINUED`, `WIFEXITED`, `WIFSIGNALED`, `WIFSTOPPED`, `WNOHANG`, `WSTOPSIG`, `WTERMSIG`, `WUNTRACED`), NameConstant, nil}, + {`-=|!=|!|/=|--`, Operator, nil}, + {`-|==|~=|<|>|<=|>=|&&|&|~|\|\|?`, Operator, nil}, + {`\*=|\+=|\^=|\/=|\\=|\*\*|\+\+|\.\*\*`, Operator, nil}, + {`\.\*|\*|\+|\.\^|\.\\|\.\/|\/|\\`, Operator, nil}, + {`[\[\](){}:@.,]`, Punctuation, nil}, + {`=|:|;`, Punctuation, nil}, + {`"[^"]*"`, LiteralString, nil}, + {`(\d+\.\d*|\d*\.\d+)([eEf][+-]?[0-9]+)?`, LiteralNumberFloat, nil}, + {`\d+[eEf][+-]?[0-9]+`, LiteralNumberFloat, nil}, + {`\d+`, LiteralNumberInteger, nil}, + {`(?<=[\w)\].])\'+`, Operator, nil}, + {`(?=|<=|<>|\+|-|=|>|<|\*|/|%)`, Operator, nil}, + {`(;|,|\)|\(|\.)`, Punctuation, nil}, + {Words(``, `\b`, `истина`, `true`, `ложь`, `false`, `и`, `and`, `или`, `or`, `не`, `not`), Operator, nil}, + {Words(``, `\b`, `если`, `if`, `тогда`, `then`, `иначе`, `else`, `иначеесли`, `elsif`, `конецесли`, `endif`), Operator, nil}, + {Words(``, `\b`, `для`, `for`, `каждого`, `each`, `из`, `in`, `цикл`, `do`, `пока`, `while`, `конеццикла`, `enddo`, `по`, `to`), Operator, nil}, + {Words(``, `\b`, `прервать`, `break`, `продолжить`, `continue`, `возврат`, `return`, `перейти`, `goto`), Operator, nil}, + {Words(``, `\b`, `процедура`, `procedure`, `конецпроцедуры`, `endprocedure`, `функция`, `function`, `конецфункции`, `endfunction`), Keyword, nil}, + {Words(``, `\b`, `новый`, `new`, `знач`, `val`, `экспорт`, `export`, `перем`, `var`), Keyword, nil}, + {Words(``, `\b`, `попытка`, `try`, `исключение`, `except`, `вызватьисключение`, `raise`, `конецпопытки`, `endtry`), Keyword, nil}, + {Words(``, `\b`, `выполнить`, `execute`, `вычислить`, `eval`), Keyword, nil}, + {`"`, LiteralString, Push("string")}, + {`[_а-яА-Я0-9][а-яА-Я0-9]*`, Name, nil}, + {`[_\w][\w]*`, Name, nil}, + }, + "string": { + {`""`, LiteralString, nil}, + {`"C?`, LiteralString, Pop(1)}, + {`[^"]+`, LiteralString, nil}, + }, + } +} diff --git a/vendor/github.com/alecthomas/chroma/lexers/o/openedgeabl.go b/vendor/github.com/alecthomas/chroma/lexers/o/openedgeabl.go new file mode 100644 index 000000000..55619711d --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/o/openedgeabl.go @@ -0,0 +1,50 @@ +package o + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// OpenedgeAbl lexer. +var OpenEdgeABL = internal.Register(MustNewLazyLexer( + &Config{ + Name: "OpenEdge ABL", + Aliases: []string{"openedge", "abl", "progress", "openedgeabl"}, + Filenames: []string{"*.p", "*.cls", "*.w", "*.i"}, + MimeTypes: []string{"text/x-openedge", "application/x-openedge"}, + }, + func() Rules { + return Rules{ + "root": { + {`/\*`, CommentMultiline, Push("comment")}, + {`//.*?$`, CommentSingle, nil}, + {`\{`, CommentPreproc, Push("preprocessor")}, + {`\s*&.*`, CommentPreproc, nil}, + {`0[xX][0-9a-fA-F]+[LlUu]*`, LiteralNumberHex, nil}, + {`(?i)(DEFINE|DEF|DEFI|DEFIN)\b`, KeywordDeclaration, nil}, + {`(?i)(^|(?<=[^\w\-]))(CHARACTER|CHAR|CHARA|CHARAC|CHARACT|CHARACTE|COM-HANDLE|DATE|DATETIME|DATETIME-TZ|DECIMAL|DEC|DECI|DECIM|DECIMA|HANDLE|INT64|INTEGER|INT|INTE|INTEG|INTEGE|LOGICAL|LONGCHAR|MEMPTR|RAW|RECID|ROWID)\s*($|(?=[^\w\-]))`, KeywordType, nil}, + {Words(`(?i)(^|(?<=[^\w\-]))`, `\s*($|(?=[^\w\-]))`, `ABS`, `ABSO`, `ABSOL`, `ABSOLU`, `ABSOLUT`, `ABSOLUTE`, `ABSTRACT`, `ACCELERATOR`, `ACCUM`, `ACCUMU`, `ACCUMUL`, `ACCUMULA`, `ACCUMULAT`, `ACCUMULATE`, `ACTIVE-FORM`, `ACTIVE-WINDOW`, `ADD`, `ADD-BUFFER`, `ADD-CALC-COLUMN`, `ADD-COLUMNS-FROM`, `ADD-EVENTS-PROCEDURE`, `ADD-FIELDS-FROM`, `ADD-FIRST`, `ADD-INDEX-FIELD`, `ADD-LAST`, `ADD-LIKE-COLUMN`, `ADD-LIKE-FIELD`, `ADD-LIKE-INDEX`, `ADD-NEW-FIELD`, `ADD-NEW-INDEX`, `ADD-SCHEMA-LOCATION`, `ADD-SUPER-PROCEDURE`, `ADM-DATA`, `ADVISE`, `ALERT-BOX`, `ALIAS`, `ALL`, `ALLOW-COLUMN-SEARCHING`, `ALLOW-REPLICATION`, `ALTER`, `ALWAYS-ON-TOP`, `AMBIG`, `AMBIGU`, `AMBIGUO`, `AMBIGUOU`, `AMBIGUOUS`, `ANALYZ`, `ANALYZE`, `AND`, `ANSI-ONLY`, `ANY`, `ANYWHERE`, `APPEND`, `APPL-ALERT`, `APPL-ALERT-`, `APPL-ALERT-B`, `APPL-ALERT-BO`, `APPL-ALERT-BOX`, `APPL-ALERT-BOXE`, `APPL-ALERT-BOXES`, `APPL-CONTEXT-ID`, `APPLICATION`, `APPLY`, `APPSERVER-INFO`, `APPSERVER-PASSWORD`, `APPSERVER-USERID`, `ARRAY-MESSAGE`, `AS`, `ASC`, `ASCE`, `ASCEN`, `ASCEND`, `ASCENDI`, `ASCENDIN`, `ASCENDING`, `ASK-OVERWRITE`, `ASSEMBLY`, `ASSIGN`, `ASYNC-REQUEST-COUNT`, `ASYNC-REQUEST-HANDLE`, `ASYNCHRONOUS`, `AT`, `ATTACHED-PAIRLIST`, `ATTR`, `ATTR-SPACE`, `ATTRI`, `ATTRIB`, `ATTRIBU`, `ATTRIBUT`, `AUDIT-CONTROL`, `AUDIT-ENABLED`, `AUDIT-EVENT-CONTEXT`, `AUDIT-POLICY`, `AUTHENTICATION-FAILED`, `AUTHORIZATION`, `AUTO-COMP`, `AUTO-COMPL`, `AUTO-COMPLE`, `AUTO-COMPLET`, `AUTO-COMPLETI`, `AUTO-COMPLETIO`, `AUTO-COMPLETION`, `AUTO-END-KEY`, `AUTO-ENDKEY`, `AUTO-GO`, `AUTO-IND`, `AUTO-INDE`, `AUTO-INDEN`, `AUTO-INDENT`, `AUTO-RESIZE`, `AUTO-RET`, `AUTO-RETU`, `AUTO-RETUR`, `AUTO-RETURN`, `AUTO-SYNCHRONIZE`, `AUTO-Z`, `AUTO-ZA`, `AUTO-ZAP`, `AUTOMATIC`, `AVAIL`, `AVAILA`, `AVAILAB`, `AVAILABL`, `AVAILABLE`, `AVAILABLE-FORMATS`, `AVE`, `AVER`, `AVERA`, `AVERAG`, `AVERAGE`, `AVG`, `BACK`, `BACKG`, `BACKGR`, `BACKGRO`, `BACKGROU`, `BACKGROUN`, `BACKGROUND`, `BACKWARD`, `BACKWARDS`, `BASE64-DECODE`, `BASE64-ENCODE`, `BASE-ADE`, `BASE-KEY`, `BATCH`, `BATCH-`, `BATCH-M`, `BATCH-MO`, `BATCH-MOD`, `BATCH-MODE`, `BATCH-SIZE`, `BEFORE-H`, `BEFORE-HI`, `BEFORE-HID`, `BEFORE-HIDE`, `BEGIN-EVENT-GROUP`, `BEGINS`, `BELL`, `BETWEEN`, `BGC`, `BGCO`, `BGCOL`, `BGCOLO`, `BGCOLOR`, `BIG-ENDIAN`, `BINARY`, `BIND`, `BIND-WHERE`, `BLANK`, `BLOCK-ITERATION-DISPLAY`, `BLOCK-LEVEL`, `BORDER-B`, `BORDER-BO`, `BORDER-BOT`, `BORDER-BOTT`, `BORDER-BOTTO`, `BORDER-BOTTOM-CHARS`, `BORDER-BOTTOM-P`, `BORDER-BOTTOM-PI`, `BORDER-BOTTOM-PIX`, `BORDER-BOTTOM-PIXE`, `BORDER-BOTTOM-PIXEL`, `BORDER-BOTTOM-PIXELS`, `BORDER-L`, `BORDER-LE`, `BORDER-LEF`, `BORDER-LEFT`, `BORDER-LEFT-`, `BORDER-LEFT-C`, `BORDER-LEFT-CH`, `BORDER-LEFT-CHA`, `BORDER-LEFT-CHAR`, `BORDER-LEFT-CHARS`, `BORDER-LEFT-P`, `BORDER-LEFT-PI`, `BORDER-LEFT-PIX`, `BORDER-LEFT-PIXE`, `BORDER-LEFT-PIXEL`, `BORDER-LEFT-PIXELS`, `BORDER-R`, `BORDER-RI`, `BORDER-RIG`, `BORDER-RIGH`, `BORDER-RIGHT`, `BORDER-RIGHT-`, `BORDER-RIGHT-C`, `BORDER-RIGHT-CH`, `BORDER-RIGHT-CHA`, `BORDER-RIGHT-CHAR`, `BORDER-RIGHT-CHARS`, `BORDER-RIGHT-P`, `BORDER-RIGHT-PI`, `BORDER-RIGHT-PIX`, `BORDER-RIGHT-PIXE`, `BORDER-RIGHT-PIXEL`, `BORDER-RIGHT-PIXELS`, `BORDER-T`, `BORDER-TO`, `BORDER-TOP`, `BORDER-TOP-`, `BORDER-TOP-C`, `BORDER-TOP-CH`, `BORDER-TOP-CHA`, `BORDER-TOP-CHAR`, `BORDER-TOP-CHARS`, `BORDER-TOP-P`, `BORDER-TOP-PI`, `BORDER-TOP-PIX`, `BORDER-TOP-PIXE`, `BORDER-TOP-PIXEL`, `BORDER-TOP-PIXELS`, `BOX`, `BOX-SELECT`, `BOX-SELECTA`, `BOX-SELECTAB`, `BOX-SELECTABL`, `BOX-SELECTABLE`, `BREAK`, `BROWSE`, `BUFFER`, `BUFFER-CHARS`, `BUFFER-COMPARE`, `BUFFER-COPY`, `BUFFER-CREATE`, `BUFFER-DELETE`, `BUFFER-FIELD`, `BUFFER-HANDLE`, `BUFFER-LINES`, `BUFFER-NAME`, `BUFFER-PARTITION-ID`, `BUFFER-RELEASE`, `BUFFER-VALUE`, `BUTTON`, `BUTTONS`, `BY`, `BY-POINTER`, `BY-VARIANT-POINTER`, `CACHE`, `CACHE-SIZE`, `CALL`, `CALL-NAME`, `CALL-TYPE`, `CAN-CREATE`, `CAN-DELETE`, `CAN-DO`, `CAN-DO-DOMAIN-SUPPORT`, `CAN-FIND`, `CAN-QUERY`, `CAN-READ`, `CAN-SET`, `CAN-WRITE`, `CANCEL-BREAK`, `CANCEL-BUTTON`, `CAPS`, `CAREFUL-PAINT`, `CASE`, `CASE-SEN`, `CASE-SENS`, `CASE-SENSI`, `CASE-SENSIT`, `CASE-SENSITI`, `CASE-SENSITIV`, `CASE-SENSITIVE`, `CAST`, `CATCH`, `CDECL`, `CENTER`, `CENTERE`, `CENTERED`, `CHAINED`, `CHARACTER`, `CHARACTER_LENGTH`, `CHARSET`, `CHECK`, `CHECKED`, `CHOOSE`, `CHR`, `CLASS`, `CLASS-TYPE`, `CLEAR`, `CLEAR-APPL-CONTEXT`, `CLEAR-LOG`, `CLEAR-SELECT`, `CLEAR-SELECTI`, `CLEAR-SELECTIO`, `CLEAR-SELECTION`, `CLEAR-SORT-ARROW`, `CLEAR-SORT-ARROWS`, `CLIENT-CONNECTION-ID`, `CLIENT-PRINCIPAL`, `CLIENT-TTY`, `CLIENT-TYPE`, `CLIENT-WORKSTATION`, `CLIPBOARD`, `CLOSE`, `CLOSE-LOG`, `CODE`, `CODEBASE-LOCATOR`, `CODEPAGE`, `CODEPAGE-CONVERT`, `COL`, `COL-OF`, `COLLATE`, `COLON`, `COLON-ALIGN`, `COLON-ALIGNE`, `COLON-ALIGNED`, `COLOR`, `COLOR-TABLE`, `COLU`, `COLUM`, `COLUMN`, `COLUMN-BGCOLOR`, `COLUMN-DCOLOR`, `COLUMN-FGCOLOR`, `COLUMN-FONT`, `COLUMN-LAB`, `COLUMN-LABE`, `COLUMN-LABEL`, `COLUMN-MOVABLE`, `COLUMN-OF`, `COLUMN-PFCOLOR`, `COLUMN-READ-ONLY`, `COLUMN-RESIZABLE`, `COLUMN-SCROLLING`, `COLUMNS`, `COM-HANDLE`, `COM-SELF`, `COMBO-BOX`, `COMMAND`, `COMPARES`, `COMPILE`, `COMPILER`, `COMPLETE`, `CONFIG-NAME`, `CONNECT`, `CONNECTED`, `CONSTRUCTOR`, `CONTAINS`, `CONTENTS`, `CONTEXT`, `CONTEXT-HELP`, `CONTEXT-HELP-FILE`, `CONTEXT-HELP-ID`, `CONTEXT-POPUP`, `CONTROL`, `CONTROL-BOX`, `CONTROL-FRAME`, `CONVERT`, `CONVERT-3D-COLORS`, `CONVERT-TO-OFFS`, `CONVERT-TO-OFFSE`, `CONVERT-TO-OFFSET`, `COPY-DATASET`, `COPY-LOB`, `COPY-SAX-ATTRIBUTES`, `COPY-TEMP-TABLE`, `COUNT`, `COUNT-OF`, `CPCASE`, `CPCOLL`, `CPINTERNAL`, `CPLOG`, `CPPRINT`, `CPRCODEIN`, `CPRCODEOUT`, `CPSTREAM`, `CPTERM`, `CRC-VALUE`, `CREATE`, `CREATE-LIKE`, `CREATE-LIKE-SEQUENTIAL`, `CREATE-NODE-NAMESPACE`, `CREATE-RESULT-LIST-ENTRY`, `CREATE-TEST-FILE`, `CURRENT`, `CURRENT-CHANGED`, `CURRENT-COLUMN`, `CURRENT-ENV`, `CURRENT-ENVI`, `CURRENT-ENVIR`, `CURRENT-ENVIRO`, `CURRENT-ENVIRON`, `CURRENT-ENVIRONM`, `CURRENT-ENVIRONME`, `CURRENT-ENVIRONMEN`, `CURRENT-ENVIRONMENT`, `CURRENT-ITERATION`, `CURRENT-LANG`, `CURRENT-LANGU`, `CURRENT-LANGUA`, `CURRENT-LANGUAG`, `CURRENT-LANGUAGE`, `CURRENT-QUERY`, `CURRENT-REQUEST-INFO`, `CURRENT-RESPONSE-INFO`, `CURRENT-RESULT-ROW`, `CURRENT-ROW-MODIFIED`, `CURRENT-VALUE`, `CURRENT-WINDOW`, `CURRENT_DATE`, `CURS`, `CURSO`, `CURSOR`, `CURSOR-CHAR`, `CURSOR-LINE`, `CURSOR-OFFSET`, `DATA-BIND`, `DATA-ENTRY-RET`, `DATA-ENTRY-RETU`, `DATA-ENTRY-RETUR`, `DATA-ENTRY-RETURN`, `DATA-REL`, `DATA-RELA`, `DATA-RELAT`, `DATA-RELATI`, `DATA-RELATIO`, `DATA-RELATION`, `DATA-SOURCE`, `DATA-SOURCE-COMPLETE-MAP`, `DATA-SOURCE-MODIFIED`, `DATA-SOURCE-ROWID`, `DATA-T`, `DATA-TY`, `DATA-TYP`, `DATA-TYPE`, `DATABASE`, `DATASERVERS`, `DATASET`, `DATASET-HANDLE`, `DATE`, `DATE-F`, `DATE-FO`, `DATE-FOR`, `DATE-FORM`, `DATE-FORMA`, `DATE-FORMAT`, `DAY`, `DB-CONTEXT`, `DB-REFERENCES`, `DBCODEPAGE`, `DBCOLLATION`, `DBNAME`, `DBPARAM`, `DBREST`, `DBRESTR`, `DBRESTRI`, `DBRESTRIC`, `DBRESTRICT`, `DBRESTRICTI`, `DBRESTRICTIO`, `DBRESTRICTION`, `DBRESTRICTIONS`, `DBTASKID`, `DBTYPE`, `DBVERS`, `DBVERSI`, `DBVERSIO`, `DBVERSION`, `DCOLOR`, `DDE`, `DDE-ERROR`, `DDE-I`, `DDE-ID`, `DDE-ITEM`, `DDE-NAME`, `DDE-TOPIC`, `DEBLANK`, `DEBU`, `DEBUG`, `DEBUG-ALERT`, `DEBUG-LIST`, `DEBUGGER`, `DECIMAL`, `DECIMALS`, `DECLARE`, `DECLARE-NAMESPACE`, `DECRYPT`, `DEFAULT`, `DEFAULT-B`, `DEFAULT-BU`, `DEFAULT-BUFFER-HANDLE`, `DEFAULT-BUT`, `DEFAULT-BUTT`, `DEFAULT-BUTTO`, `DEFAULT-BUTTON`, `DEFAULT-COMMIT`, `DEFAULT-EX`, `DEFAULT-EXT`, `DEFAULT-EXTE`, `DEFAULT-EXTEN`, `DEFAULT-EXTENS`, `DEFAULT-EXTENSI`, `DEFAULT-EXTENSIO`, `DEFAULT-EXTENSION`, `DEFAULT-NOXL`, `DEFAULT-NOXLA`, `DEFAULT-NOXLAT`, `DEFAULT-NOXLATE`, `DEFAULT-VALUE`, `DEFAULT-WINDOW`, `DEFINE`, `DEFINE-USER-EVENT-MANAGER`, `DEFINED`, `DEL`, `DELE`, `DELEGATE`, `DELET`, `DELETE PROCEDURE`, `DELETE`, `DELETE-CHAR`, `DELETE-CHARA`, `DELETE-CHARAC`, `DELETE-CHARACT`, `DELETE-CHARACTE`, `DELETE-CHARACTER`, `DELETE-CURRENT-ROW`, `DELETE-LINE`, `DELETE-RESULT-LIST-ENTRY`, `DELETE-SELECTED-ROW`, `DELETE-SELECTED-ROWS`, `DELIMITER`, `DESC`, `DESCE`, `DESCEN`, `DESCEND`, `DESCENDI`, `DESCENDIN`, `DESCENDING`, `DESELECT-FOCUSED-ROW`, `DESELECT-ROWS`, `DESELECT-SELECTED-ROW`, `DESELECTION`, `DESTRUCTOR`, `DIALOG-BOX`, `DICT`, `DICTI`, `DICTIO`, `DICTION`, `DICTIONA`, `DICTIONAR`, `DICTIONARY`, `DIR`, `DISABLE`, `DISABLE-AUTO-ZAP`, `DISABLE-DUMP-TRIGGERS`, `DISABLE-LOAD-TRIGGERS`, `DISABLED`, `DISCON`, `DISCONN`, `DISCONNE`, `DISCONNEC`, `DISCONNECT`, `DISP`, `DISPL`, `DISPLA`, `DISPLAY`, `DISPLAY-MESSAGE`, `DISPLAY-T`, `DISPLAY-TY`, `DISPLAY-TYP`, `DISPLAY-TYPE`, `DISTINCT`, `DO`, `DOMAIN-DESCRIPTION`, `DOMAIN-NAME`, `DOMAIN-TYPE`, `DOS`, `DOUBLE`, `DOWN`, `DRAG-ENABLED`, `DROP`, `DROP-DOWN`, `DROP-DOWN-LIST`, `DROP-FILE-NOTIFY`, `DROP-TARGET`, `DS-CLOSE-CURSOR`, `DSLOG-MANAGER`, `DUMP`, `DYNAMIC`, `DYNAMIC-ENUM`, `DYNAMIC-FUNCTION`, `DYNAMIC-INVOKE`, `EACH`, `ECHO`, `EDGE`, `EDGE-`, `EDGE-C`, `EDGE-CH`, `EDGE-CHA`, `EDGE-CHAR`, `EDGE-CHARS`, `EDGE-P`, `EDGE-PI`, `EDGE-PIX`, `EDGE-PIXE`, `EDGE-PIXEL`, `EDGE-PIXELS`, `EDIT-CAN-PASTE`, `EDIT-CAN-UNDO`, `EDIT-CLEAR`, `EDIT-COPY`, `EDIT-CUT`, `EDIT-PASTE`, `EDIT-UNDO`, `EDITING`, `EDITOR`, `ELSE`, `EMPTY`, `EMPTY-TEMP-TABLE`, `ENABLE`, `ENABLED-FIELDS`, `ENCODE`, `ENCRYPT`, `ENCRYPT-AUDIT-MAC-KEY`, `ENCRYPTION-SALT`, `END`, `END-DOCUMENT`, `END-ELEMENT`, `END-EVENT-GROUP`, `END-FILE-DROP`, `END-KEY`, `END-MOVE`, `END-RESIZE`, `END-ROW-RESIZE`, `END-USER-PROMPT`, `ENDKEY`, `ENTERED`, `ENTITY-EXPANSION-LIMIT`, `ENTRY`, `ENUM`, `EQ`, `ERROR`, `ERROR-COL`, `ERROR-COLU`, `ERROR-COLUM`, `ERROR-COLUMN`, `ERROR-ROW`, `ERROR-STACK-TRACE`, `ERROR-STAT`, `ERROR-STATU`, `ERROR-STATUS`, `ESCAPE`, `ETIME`, `EVENT`, `EVENT-GROUP-ID`, `EVENT-PROCEDURE`, `EVENT-PROCEDURE-CONTEXT`, `EVENT-T`, `EVENT-TY`, `EVENT-TYP`, `EVENT-TYPE`, `EVENTS`, `EXCEPT`, `EXCLUSIVE`, `EXCLUSIVE-`, `EXCLUSIVE-ID`, `EXCLUSIVE-L`, `EXCLUSIVE-LO`, `EXCLUSIVE-LOC`, `EXCLUSIVE-LOCK`, `EXCLUSIVE-WEB-USER`, `EXECUTE`, `EXISTS`, `EXP`, `EXPAND`, `EXPANDABLE`, `EXPLICIT`, `EXPORT`, `EXPORT-PRINCIPAL`, `EXTENDED`, `EXTENT`, `EXTERNAL`, `FALSE`, `FETCH`, `FETCH-SELECTED-ROW`, `FGC`, `FGCO`, `FGCOL`, `FGCOLO`, `FGCOLOR`, `FIELD`, `FIELDS`, `FILE`, `FILE-CREATE-DATE`, `FILE-CREATE-TIME`, `FILE-INFO`, `FILE-INFOR`, `FILE-INFORM`, `FILE-INFORMA`, `FILE-INFORMAT`, `FILE-INFORMATI`, `FILE-INFORMATIO`, `FILE-INFORMATION`, `FILE-MOD-DATE`, `FILE-MOD-TIME`, `FILE-NAME`, `FILE-OFF`, `FILE-OFFS`, `FILE-OFFSE`, `FILE-OFFSET`, `FILE-SIZE`, `FILE-TYPE`, `FILENAME`, `FILL`, `FILL-IN`, `FILLED`, `FILTERS`, `FINAL`, `FINALLY`, `FIND`, `FIND-BY-ROWID`, `FIND-CASE-SENSITIVE`, `FIND-CURRENT`, `FIND-FIRST`, `FIND-GLOBAL`, `FIND-LAST`, `FIND-NEXT-OCCURRENCE`, `FIND-PREV-OCCURRENCE`, `FIND-SELECT`, `FIND-UNIQUE`, `FIND-WRAP-AROUND`, `FINDER`, `FIRST`, `FIRST-ASYNCH-REQUEST`, `FIRST-CHILD`, `FIRST-COLUMN`, `FIRST-FORM`, `FIRST-OBJECT`, `FIRST-OF`, `FIRST-PROC`, `FIRST-PROCE`, `FIRST-PROCED`, `FIRST-PROCEDU`, `FIRST-PROCEDUR`, `FIRST-PROCEDURE`, `FIRST-SERVER`, `FIRST-TAB-I`, `FIRST-TAB-IT`, `FIRST-TAB-ITE`, `FIRST-TAB-ITEM`, `FIT-LAST-COLUMN`, `FIXED-ONLY`, `FLAT-BUTTON`, `FLOAT`, `FOCUS`, `FOCUSED-ROW`, `FOCUSED-ROW-SELECTED`, `FONT`, `FONT-TABLE`, `FOR`, `FORCE-FILE`, `FORE`, `FOREG`, `FOREGR`, `FOREGRO`, `FOREGROU`, `FOREGROUN`, `FOREGROUND`, `FORM INPUT`, `FORM`, `FORM-LONG-INPUT`, `FORMA`, `FORMAT`, `FORMATTE`, `FORMATTED`, `FORWARD`, `FORWARDS`, `FRAGMEN`, `FRAGMENT`, `FRAM`, `FRAME`, `FRAME-COL`, `FRAME-DB`, `FRAME-DOWN`, `FRAME-FIELD`, `FRAME-FILE`, `FRAME-INDE`, `FRAME-INDEX`, `FRAME-LINE`, `FRAME-NAME`, `FRAME-ROW`, `FRAME-SPA`, `FRAME-SPAC`, `FRAME-SPACI`, `FRAME-SPACIN`, `FRAME-SPACING`, `FRAME-VAL`, `FRAME-VALU`, `FRAME-VALUE`, `FRAME-X`, `FRAME-Y`, `FREQUENCY`, `FROM`, `FROM-C`, `FROM-CH`, `FROM-CHA`, `FROM-CHAR`, `FROM-CHARS`, `FROM-CUR`, `FROM-CURR`, `FROM-CURRE`, `FROM-CURREN`, `FROM-CURRENT`, `FROM-P`, `FROM-PI`, `FROM-PIX`, `FROM-PIXE`, `FROM-PIXEL`, `FROM-PIXELS`, `FULL-HEIGHT`, `FULL-HEIGHT-`, `FULL-HEIGHT-C`, `FULL-HEIGHT-CH`, `FULL-HEIGHT-CHA`, `FULL-HEIGHT-CHAR`, `FULL-HEIGHT-CHARS`, `FULL-HEIGHT-P`, `FULL-HEIGHT-PI`, `FULL-HEIGHT-PIX`, `FULL-HEIGHT-PIXE`, `FULL-HEIGHT-PIXEL`, `FULL-HEIGHT-PIXELS`, `FULL-PATHN`, `FULL-PATHNA`, `FULL-PATHNAM`, `FULL-PATHNAME`, `FULL-WIDTH`, `FULL-WIDTH-`, `FULL-WIDTH-C`, `FULL-WIDTH-CH`, `FULL-WIDTH-CHA`, `FULL-WIDTH-CHAR`, `FULL-WIDTH-CHARS`, `FULL-WIDTH-P`, `FULL-WIDTH-PI`, `FULL-WIDTH-PIX`, `FULL-WIDTH-PIXE`, `FULL-WIDTH-PIXEL`, `FULL-WIDTH-PIXELS`, `FUNCTION`, `FUNCTION-CALL-TYPE`, `GATEWAY`, `GATEWAYS`, `GE`, `GENERATE-MD5`, `GENERATE-PBE-KEY`, `GENERATE-PBE-SALT`, `GENERATE-RANDOM-KEY`, `GENERATE-UUID`, `GET`, `GET-ATTR-CALL-TYPE`, `GET-ATTRIBUTE-NODE`, `GET-BINARY-DATA`, `GET-BLUE`, `GET-BLUE-`, `GET-BLUE-V`, `GET-BLUE-VA`, `GET-BLUE-VAL`, `GET-BLUE-VALU`, `GET-BLUE-VALUE`, `GET-BROWSE-COLUMN`, `GET-BUFFER-HANDLE`, `GET-BYTE`, `GET-CALLBACK-PROC-CONTEXT`, `GET-CALLBACK-PROC-NAME`, `GET-CGI-LIST`, `GET-CGI-LONG-VALUE`, `GET-CGI-VALUE`, `GET-CLASS`, `GET-CODEPAGES`, `GET-COLLATIONS`, `GET-CONFIG-VALUE`, `GET-CURRENT`, `GET-DOUBLE`, `GET-DROPPED-FILE`, `GET-DYNAMIC`, `GET-ERROR-COLUMN`, `GET-ERROR-ROW`, `GET-FILE`, `GET-FILE-NAME`, `GET-FILE-OFFSE`, `GET-FILE-OFFSET`, `GET-FIRST`, `GET-FLOAT`, `GET-GREEN`, `GET-GREEN-`, `GET-GREEN-V`, `GET-GREEN-VA`, `GET-GREEN-VAL`, `GET-GREEN-VALU`, `GET-GREEN-VALUE`, `GET-INDEX-BY-NAMESPACE-NAME`, `GET-INDEX-BY-QNAME`, `GET-INT64`, `GET-ITERATION`, `GET-KEY-VAL`, `GET-KEY-VALU`, `GET-KEY-VALUE`, `GET-LAST`, `GET-LOCALNAME-BY-INDEX`, `GET-LONG`, `GET-MESSAGE`, `GET-NEXT`, `GET-NUMBER`, `GET-POINTER-VALUE`, `GET-PREV`, `GET-PRINTERS`, `GET-PROPERTY`, `GET-QNAME-BY-INDEX`, `GET-RED`, `GET-RED-`, `GET-RED-V`, `GET-RED-VA`, `GET-RED-VAL`, `GET-RED-VALU`, `GET-RED-VALUE`, `GET-REPOSITIONED-ROW`, `GET-RGB-VALUE`, `GET-SELECTED`, `GET-SELECTED-`, `GET-SELECTED-W`, `GET-SELECTED-WI`, `GET-SELECTED-WID`, `GET-SELECTED-WIDG`, `GET-SELECTED-WIDGE`, `GET-SELECTED-WIDGET`, `GET-SHORT`, `GET-SIGNATURE`, `GET-SIZE`, `GET-STRING`, `GET-TAB-ITEM`, `GET-TEXT-HEIGHT`, `GET-TEXT-HEIGHT-`, `GET-TEXT-HEIGHT-C`, `GET-TEXT-HEIGHT-CH`, `GET-TEXT-HEIGHT-CHA`, `GET-TEXT-HEIGHT-CHAR`, `GET-TEXT-HEIGHT-CHARS`, `GET-TEXT-HEIGHT-P`, `GET-TEXT-HEIGHT-PI`, `GET-TEXT-HEIGHT-PIX`, `GET-TEXT-HEIGHT-PIXE`, `GET-TEXT-HEIGHT-PIXEL`, `GET-TEXT-HEIGHT-PIXELS`, `GET-TEXT-WIDTH`, `GET-TEXT-WIDTH-`, `GET-TEXT-WIDTH-C`, `GET-TEXT-WIDTH-CH`, `GET-TEXT-WIDTH-CHA`, `GET-TEXT-WIDTH-CHAR`, `GET-TEXT-WIDTH-CHARS`, `GET-TEXT-WIDTH-P`, `GET-TEXT-WIDTH-PI`, `GET-TEXT-WIDTH-PIX`, `GET-TEXT-WIDTH-PIXE`, `GET-TEXT-WIDTH-PIXEL`, `GET-TEXT-WIDTH-PIXELS`, `GET-TYPE-BY-INDEX`, `GET-TYPE-BY-NAMESPACE-NAME`, `GET-TYPE-BY-QNAME`, `GET-UNSIGNED-LONG`, `GET-UNSIGNED-SHORT`, `GET-URI-BY-INDEX`, `GET-VALUE-BY-INDEX`, `GET-VALUE-BY-NAMESPACE-NAME`, `GET-VALUE-BY-QNAME`, `GET-WAIT-STATE`, `GETBYTE`, `GLOBAL`, `GO-ON`, `GO-PEND`, `GO-PENDI`, `GO-PENDIN`, `GO-PENDING`, `GRANT`, `GRAPHIC-E`, `GRAPHIC-ED`, `GRAPHIC-EDG`, `GRAPHIC-EDGE`, `GRID-FACTOR-H`, `GRID-FACTOR-HO`, `GRID-FACTOR-HOR`, `GRID-FACTOR-HORI`, `GRID-FACTOR-HORIZ`, `GRID-FACTOR-HORIZO`, `GRID-FACTOR-HORIZON`, `GRID-FACTOR-HORIZONT`, `GRID-FACTOR-HORIZONTA`, `GRID-FACTOR-HORIZONTAL`, `GRID-FACTOR-V`, `GRID-FACTOR-VE`, `GRID-FACTOR-VER`, `GRID-FACTOR-VERT`, `GRID-FACTOR-VERTI`, `GRID-FACTOR-VERTIC`, `GRID-FACTOR-VERTICA`, `GRID-FACTOR-VERTICAL`, `GRID-SNAP`, `GRID-UNIT-HEIGHT`, `GRID-UNIT-HEIGHT-`, `GRID-UNIT-HEIGHT-C`, `GRID-UNIT-HEIGHT-CH`, `GRID-UNIT-HEIGHT-CHA`, `GRID-UNIT-HEIGHT-CHARS`, `GRID-UNIT-HEIGHT-P`, `GRID-UNIT-HEIGHT-PI`, `GRID-UNIT-HEIGHT-PIX`, `GRID-UNIT-HEIGHT-PIXE`, `GRID-UNIT-HEIGHT-PIXEL`, `GRID-UNIT-HEIGHT-PIXELS`, `GRID-UNIT-WIDTH`, `GRID-UNIT-WIDTH-`, `GRID-UNIT-WIDTH-C`, `GRID-UNIT-WIDTH-CH`, `GRID-UNIT-WIDTH-CHA`, `GRID-UNIT-WIDTH-CHAR`, `GRID-UNIT-WIDTH-CHARS`, `GRID-UNIT-WIDTH-P`, `GRID-UNIT-WIDTH-PI`, `GRID-UNIT-WIDTH-PIX`, `GRID-UNIT-WIDTH-PIXE`, `GRID-UNIT-WIDTH-PIXEL`, `GRID-UNIT-WIDTH-PIXELS`, `GRID-VISIBLE`, `GROUP`, `GT`, `GUID`, `HANDLE`, `HANDLER`, `HAS-RECORDS`, `HAVING`, `HEADER`, `HEIGHT`, `HEIGHT-`, `HEIGHT-C`, `HEIGHT-CH`, `HEIGHT-CHA`, `HEIGHT-CHAR`, `HEIGHT-CHARS`, `HEIGHT-P`, `HEIGHT-PI`, `HEIGHT-PIX`, `HEIGHT-PIXE`, `HEIGHT-PIXEL`, `HEIGHT-PIXELS`, `HELP`, `HEX-DECODE`, `HEX-ENCODE`, `HIDDEN`, `HIDE`, `HORI`, `HORIZ`, `HORIZO`, `HORIZON`, `HORIZONT`, `HORIZONTA`, `HORIZONTAL`, `HOST-BYTE-ORDER`, `HTML-CHARSET`, `HTML-END-OF-LINE`, `HTML-END-OF-PAGE`, `HTML-FRAME-BEGIN`, `HTML-FRAME-END`, `HTML-HEADER-BEGIN`, `HTML-HEADER-END`, `HTML-TITLE-BEGIN`, `HTML-TITLE-END`, `HWND`, `ICON`, `IF`, `IMAGE`, `IMAGE-DOWN`, `IMAGE-INSENSITIVE`, `IMAGE-SIZE`, `IMAGE-SIZE-C`, `IMAGE-SIZE-CH`, `IMAGE-SIZE-CHA`, `IMAGE-SIZE-CHAR`, `IMAGE-SIZE-CHARS`, `IMAGE-SIZE-P`, `IMAGE-SIZE-PI`, `IMAGE-SIZE-PIX`, `IMAGE-SIZE-PIXE`, `IMAGE-SIZE-PIXEL`, `IMAGE-SIZE-PIXELS`, `IMAGE-UP`, `IMMEDIATE-DISPLAY`, `IMPLEMENTS`, `IMPORT`, `IMPORT-PRINCIPAL`, `IN`, `IN-HANDLE`, `INCREMENT-EXCLUSIVE-ID`, `INDEX`, `INDEX-HINT`, `INDEX-INFORMATION`, `INDEXED-REPOSITION`, `INDICATOR`, `INFO`, `INFOR`, `INFORM`, `INFORMA`, `INFORMAT`, `INFORMATI`, `INFORMATIO`, `INFORMATION`, `INHERIT-BGC`, `INHERIT-BGCO`, `INHERIT-BGCOL`, `INHERIT-BGCOLO`, `INHERIT-BGCOLOR`, `INHERIT-FGC`, `INHERIT-FGCO`, `INHERIT-FGCOL`, `INHERIT-FGCOLO`, `INHERIT-FGCOLOR`, `INHERITS`, `INIT`, `INITI`, `INITIA`, `INITIAL`, `INITIAL-DIR`, `INITIAL-FILTER`, `INITIALIZE-DOCUMENT-TYPE`, `INITIATE`, `INNER-CHARS`, `INNER-LINES`, `INPUT`, `INPUT-O`, `INPUT-OU`, `INPUT-OUT`, `INPUT-OUTP`, `INPUT-OUTPU`, `INPUT-OUTPUT`, `INPUT-VALUE`, `INSERT`, `INSERT-ATTRIBUTE`, `INSERT-B`, `INSERT-BA`, `INSERT-BAC`, `INSERT-BACK`, `INSERT-BACKT`, `INSERT-BACKTA`, `INSERT-BACKTAB`, `INSERT-FILE`, `INSERT-ROW`, `INSERT-STRING`, `INSERT-T`, `INSERT-TA`, `INSERT-TAB`, `INT64`, `INT`, `INTEGER`, `INTERFACE`, `INTERNAL-ENTRIES`, `INTO`, `INVOKE`, `IS`, `IS-ATTR`, `IS-ATTR-`, `IS-ATTR-S`, `IS-ATTR-SP`, `IS-ATTR-SPA`, `IS-ATTR-SPAC`, `IS-ATTR-SPACE`, `IS-CLASS`, `IS-JSON`, `IS-LEAD-BYTE`, `IS-OPEN`, `IS-PARAMETER-SET`, `IS-PARTITIONED`, `IS-ROW-SELECTED`, `IS-SELECTED`, `IS-XML`, `ITEM`, `ITEMS-PER-ROW`, `JOIN`, `JOIN-BY-SQLDB`, `KBLABEL`, `KEEP-CONNECTION-OPEN`, `KEEP-FRAME-Z`, `KEEP-FRAME-Z-`, `KEEP-FRAME-Z-O`, `KEEP-FRAME-Z-OR`, `KEEP-FRAME-Z-ORD`, `KEEP-FRAME-Z-ORDE`, `KEEP-FRAME-Z-ORDER`, `KEEP-MESSAGES`, `KEEP-SECURITY-CACHE`, `KEEP-TAB-ORDER`, `KEY`, `KEY-CODE`, `KEY-FUNC`, `KEY-FUNCT`, `KEY-FUNCTI`, `KEY-FUNCTIO`, `KEY-FUNCTION`, `KEY-LABEL`, `KEYCODE`, `KEYFUNC`, `KEYFUNCT`, `KEYFUNCTI`, `KEYFUNCTIO`, `KEYFUNCTION`, `KEYLABEL`, `KEYS`, `KEYWORD`, `KEYWORD-ALL`, `LABEL`, `LABEL-BGC`, `LABEL-BGCO`, `LABEL-BGCOL`, `LABEL-BGCOLO`, `LABEL-BGCOLOR`, `LABEL-DC`, `LABEL-DCO`, `LABEL-DCOL`, `LABEL-DCOLO`, `LABEL-DCOLOR`, `LABEL-FGC`, `LABEL-FGCO`, `LABEL-FGCOL`, `LABEL-FGCOLO`, `LABEL-FGCOLOR`, `LABEL-FONT`, `LABEL-PFC`, `LABEL-PFCO`, `LABEL-PFCOL`, `LABEL-PFCOLO`, `LABEL-PFCOLOR`, `LABELS`, `LABELS-HAVE-COLONS`, `LANDSCAPE`, `LANGUAGE`, `LANGUAGES`, `LARGE`, `LARGE-TO-SMALL`, `LAST`, `LAST-ASYNCH-REQUEST`, `LAST-BATCH`, `LAST-CHILD`, `LAST-EVEN`, `LAST-EVENT`, `LAST-FORM`, `LAST-KEY`, `LAST-OBJECT`, `LAST-OF`, `LAST-PROCE`, `LAST-PROCED`, `LAST-PROCEDU`, `LAST-PROCEDUR`, `LAST-PROCEDURE`, `LAST-SERVER`, `LAST-TAB-I`, `LAST-TAB-IT`, `LAST-TAB-ITE`, `LAST-TAB-ITEM`, `LASTKEY`, `LC`, `LDBNAME`, `LE`, `LEAVE`, `LEFT-ALIGN`, `LEFT-ALIGNE`, `LEFT-ALIGNED`, `LEFT-TRIM`, `LENGTH`, `LIBRARY`, `LIKE`, `LIKE-SEQUENTIAL`, `LINE`, `LINE-COUNT`, `LINE-COUNTE`, `LINE-COUNTER`, `LIST-EVENTS`, `LIST-ITEM-PAIRS`, `LIST-ITEMS`, `LIST-PROPERTY-NAMES`, `LIST-QUERY-ATTRS`, `LIST-SET-ATTRS`, `LIST-WIDGETS`, `LISTI`, `LISTIN`, `LISTING`, `LITERAL-QUESTION`, `LITTLE-ENDIAN`, `LOAD`, `LOAD-DOMAINS`, `LOAD-ICON`, `LOAD-IMAGE`, `LOAD-IMAGE-DOWN`, `LOAD-IMAGE-INSENSITIVE`, `LOAD-IMAGE-UP`, `LOAD-MOUSE-P`, `LOAD-MOUSE-PO`, `LOAD-MOUSE-POI`, `LOAD-MOUSE-POIN`, `LOAD-MOUSE-POINT`, `LOAD-MOUSE-POINTE`, `LOAD-MOUSE-POINTER`, `LOAD-PICTURE`, `LOAD-SMALL-ICON`, `LOCAL-NAME`, `LOCAL-VERSION-INFO`, `LOCATOR-COLUMN-NUMBER`, `LOCATOR-LINE-NUMBER`, `LOCATOR-PUBLIC-ID`, `LOCATOR-SYSTEM-ID`, `LOCATOR-TYPE`, `LOCK-REGISTRATION`, `LOCKED`, `LOG`, `LOG-AUDIT-EVENT`, `LOG-MANAGER`, `LOGICAL`, `LOGIN-EXPIRATION-TIMESTAMP`, `LOGIN-HOST`, `LOGIN-STATE`, `LOGOUT`, `LONGCHAR`, `LOOKAHEAD`, `LOOKUP`, `LT`, `MACHINE-CLASS`, `MANDATORY`, `MANUAL-HIGHLIGHT`, `MAP`, `MARGIN-EXTRA`, `MARGIN-HEIGHT`, `MARGIN-HEIGHT-`, `MARGIN-HEIGHT-C`, `MARGIN-HEIGHT-CH`, `MARGIN-HEIGHT-CHA`, `MARGIN-HEIGHT-CHAR`, `MARGIN-HEIGHT-CHARS`, `MARGIN-HEIGHT-P`, `MARGIN-HEIGHT-PI`, `MARGIN-HEIGHT-PIX`, `MARGIN-HEIGHT-PIXE`, `MARGIN-HEIGHT-PIXEL`, `MARGIN-HEIGHT-PIXELS`, `MARGIN-WIDTH`, `MARGIN-WIDTH-`, `MARGIN-WIDTH-C`, `MARGIN-WIDTH-CH`, `MARGIN-WIDTH-CHA`, `MARGIN-WIDTH-CHAR`, `MARGIN-WIDTH-CHARS`, `MARGIN-WIDTH-P`, `MARGIN-WIDTH-PI`, `MARGIN-WIDTH-PIX`, `MARGIN-WIDTH-PIXE`, `MARGIN-WIDTH-PIXEL`, `MARGIN-WIDTH-PIXELS`, `MARK-NEW`, `MARK-ROW-STATE`, `MATCHES`, `MAX`, `MAX-BUTTON`, `MAX-CHARS`, `MAX-DATA-GUESS`, `MAX-HEIGHT`, `MAX-HEIGHT-C`, `MAX-HEIGHT-CH`, `MAX-HEIGHT-CHA`, `MAX-HEIGHT-CHAR`, `MAX-HEIGHT-CHARS`, `MAX-HEIGHT-P`, `MAX-HEIGHT-PI`, `MAX-HEIGHT-PIX`, `MAX-HEIGHT-PIXE`, `MAX-HEIGHT-PIXEL`, `MAX-HEIGHT-PIXELS`, `MAX-ROWS`, `MAX-SIZE`, `MAX-VAL`, `MAX-VALU`, `MAX-VALUE`, `MAX-WIDTH`, `MAX-WIDTH-`, `MAX-WIDTH-C`, `MAX-WIDTH-CH`, `MAX-WIDTH-CHA`, `MAX-WIDTH-CHAR`, `MAX-WIDTH-CHARS`, `MAX-WIDTH-P`, `MAX-WIDTH-PI`, `MAX-WIDTH-PIX`, `MAX-WIDTH-PIXE`, `MAX-WIDTH-PIXEL`, `MAX-WIDTH-PIXELS`, `MAXI`, `MAXIM`, `MAXIMIZE`, `MAXIMU`, `MAXIMUM`, `MAXIMUM-LEVEL`, `MD5-DIGEST`, `MEMBER`, `MEMPTR-TO-NODE-VALUE`, `MENU`, `MENU-BAR`, `MENU-ITEM`, `MENU-K`, `MENU-KE`, `MENU-KEY`, `MENU-M`, `MENU-MO`, `MENU-MOU`, `MENU-MOUS`, `MENU-MOUSE`, `MENUBAR`, `MERGE-BY-FIELD`, `MESSAGE`, `MESSAGE-AREA`, `MESSAGE-AREA-FONT`, `MESSAGE-LINES`, `METHOD`, `MIN`, `MIN-BUTTON`, `MIN-COLUMN-WIDTH-C`, `MIN-COLUMN-WIDTH-CH`, `MIN-COLUMN-WIDTH-CHA`, `MIN-COLUMN-WIDTH-CHAR`, `MIN-COLUMN-WIDTH-CHARS`, `MIN-COLUMN-WIDTH-P`, `MIN-COLUMN-WIDTH-PI`, `MIN-COLUMN-WIDTH-PIX`, `MIN-COLUMN-WIDTH-PIXE`, `MIN-COLUMN-WIDTH-PIXEL`, `MIN-COLUMN-WIDTH-PIXELS`, `MIN-HEIGHT`, `MIN-HEIGHT-`, `MIN-HEIGHT-C`, `MIN-HEIGHT-CH`, `MIN-HEIGHT-CHA`, `MIN-HEIGHT-CHAR`, `MIN-HEIGHT-CHARS`, `MIN-HEIGHT-P`, `MIN-HEIGHT-PI`, `MIN-HEIGHT-PIX`, `MIN-HEIGHT-PIXE`, `MIN-HEIGHT-PIXEL`, `MIN-HEIGHT-PIXELS`, `MIN-SIZE`, `MIN-VAL`, `MIN-VALU`, `MIN-VALUE`, `MIN-WIDTH`, `MIN-WIDTH-`, `MIN-WIDTH-C`, `MIN-WIDTH-CH`, `MIN-WIDTH-CHA`, `MIN-WIDTH-CHAR`, `MIN-WIDTH-CHARS`, `MIN-WIDTH-P`, `MIN-WIDTH-PI`, `MIN-WIDTH-PIX`, `MIN-WIDTH-PIXE`, `MIN-WIDTH-PIXEL`, `MIN-WIDTH-PIXELS`, `MINI`, `MINIM`, `MINIMU`, `MINIMUM`, `MOD`, `MODIFIED`, `MODU`, `MODUL`, `MODULO`, `MONTH`, `MOUSE`, `MOUSE-P`, `MOUSE-PO`, `MOUSE-POI`, `MOUSE-POIN`, `MOUSE-POINT`, `MOUSE-POINTE`, `MOUSE-POINTER`, `MOVABLE`, `MOVE-AFTER`, `MOVE-AFTER-`, `MOVE-AFTER-T`, `MOVE-AFTER-TA`, `MOVE-AFTER-TAB`, `MOVE-AFTER-TAB-`, `MOVE-AFTER-TAB-I`, `MOVE-AFTER-TAB-IT`, `MOVE-AFTER-TAB-ITE`, `MOVE-AFTER-TAB-ITEM`, `MOVE-BEFOR`, `MOVE-BEFORE`, `MOVE-BEFORE-`, `MOVE-BEFORE-T`, `MOVE-BEFORE-TA`, `MOVE-BEFORE-TAB`, `MOVE-BEFORE-TAB-`, `MOVE-BEFORE-TAB-I`, `MOVE-BEFORE-TAB-IT`, `MOVE-BEFORE-TAB-ITE`, `MOVE-BEFORE-TAB-ITEM`, `MOVE-COL`, `MOVE-COLU`, `MOVE-COLUM`, `MOVE-COLUMN`, `MOVE-TO-B`, `MOVE-TO-BO`, `MOVE-TO-BOT`, `MOVE-TO-BOTT`, `MOVE-TO-BOTTO`, `MOVE-TO-BOTTOM`, `MOVE-TO-EOF`, `MOVE-TO-T`, `MOVE-TO-TO`, `MOVE-TO-TOP`, `MPE`, `MTIME`, `MULTI-COMPILE`, `MULTIPLE`, `MULTIPLE-KEY`, `MULTITASKING-INTERVAL`, `MUST-EXIST`, `NAME`, `NAMESPACE-PREFIX`, `NAMESPACE-URI`, `NATIVE`, `NE`, `NEEDS-APPSERVER-PROMPT`, `NEEDS-PROMPT`, `NEW`, `NEW-INSTANCE`, `NEW-ROW`, `NEXT`, `NEXT-COLUMN`, `NEXT-PROMPT`, `NEXT-ROWID`, `NEXT-SIBLING`, `NEXT-TAB-I`, `NEXT-TAB-IT`, `NEXT-TAB-ITE`, `NEXT-TAB-ITEM`, `NEXT-VALUE`, `NO`, `NO-APPLY`, `NO-ARRAY-MESSAGE`, `NO-ASSIGN`, `NO-ATTR`, `NO-ATTR-`, `NO-ATTR-L`, `NO-ATTR-LI`, `NO-ATTR-LIS`, `NO-ATTR-LIST`, `NO-ATTR-S`, `NO-ATTR-SP`, `NO-ATTR-SPA`, `NO-ATTR-SPAC`, `NO-ATTR-SPACE`, `NO-AUTO-VALIDATE`, `NO-BIND-WHERE`, `NO-BOX`, `NO-CONSOLE`, `NO-CONVERT`, `NO-CONVERT-3D-COLORS`, `NO-CURRENT-VALUE`, `NO-DEBUG`, `NO-DRAG`, `NO-ECHO`, `NO-EMPTY-SPACE`, `NO-ERROR`, `NO-F`, `NO-FI`, `NO-FIL`, `NO-FILL`, `NO-FOCUS`, `NO-HELP`, `NO-HIDE`, `NO-INDEX-HINT`, `NO-INHERIT-BGC`, `NO-INHERIT-BGCO`, `NO-INHERIT-BGCOLOR`, `NO-INHERIT-FGC`, `NO-INHERIT-FGCO`, `NO-INHERIT-FGCOL`, `NO-INHERIT-FGCOLO`, `NO-INHERIT-FGCOLOR`, `NO-JOIN-BY-SQLDB`, `NO-LABE`, `NO-LABELS`, `NO-LOBS`, `NO-LOCK`, `NO-LOOKAHEAD`, `NO-MAP`, `NO-MES`, `NO-MESS`, `NO-MESSA`, `NO-MESSAG`, `NO-MESSAGE`, `NO-PAUSE`, `NO-PREFE`, `NO-PREFET`, `NO-PREFETC`, `NO-PREFETCH`, `NO-ROW-MARKERS`, `NO-SCROLLBAR-VERTICAL`, `NO-SEPARATE-CONNECTION`, `NO-SEPARATORS`, `NO-TAB-STOP`, `NO-UND`, `NO-UNDE`, `NO-UNDER`, `NO-UNDERL`, `NO-UNDERLI`, `NO-UNDERLIN`, `NO-UNDERLINE`, `NO-UNDO`, `NO-VAL`, `NO-VALI`, `NO-VALID`, `NO-VALIDA`, `NO-VALIDAT`, `NO-VALIDATE`, `NO-WAIT`, `NO-WORD-WRAP`, `NODE-VALUE-TO-MEMPTR`, `NONAMESPACE-SCHEMA-LOCATION`, `NONE`, `NORMALIZE`, `NOT`, `NOT-ACTIVE`, `NOW`, `NULL`, `NUM-ALI`, `NUM-ALIA`, `NUM-ALIAS`, `NUM-ALIASE`, `NUM-ALIASES`, `NUM-BUFFERS`, `NUM-BUT`, `NUM-BUTT`, `NUM-BUTTO`, `NUM-BUTTON`, `NUM-BUTTONS`, `NUM-COL`, `NUM-COLU`, `NUM-COLUM`, `NUM-COLUMN`, `NUM-COLUMNS`, `NUM-COPIES`, `NUM-DBS`, `NUM-DROPPED-FILES`, `NUM-ENTRIES`, `NUM-FIELDS`, `NUM-FORMATS`, `NUM-ITEMS`, `NUM-ITERATIONS`, `NUM-LINES`, `NUM-LOCKED-COL`, `NUM-LOCKED-COLU`, `NUM-LOCKED-COLUM`, `NUM-LOCKED-COLUMN`, `NUM-LOCKED-COLUMNS`, `NUM-MESSAGES`, `NUM-PARAMETERS`, `NUM-REFERENCES`, `NUM-REPLACED`, `NUM-RESULTS`, `NUM-SELECTED`, `NUM-SELECTED-`, `NUM-SELECTED-ROWS`, `NUM-SELECTED-W`, `NUM-SELECTED-WI`, `NUM-SELECTED-WID`, `NUM-SELECTED-WIDG`, `NUM-SELECTED-WIDGE`, `NUM-SELECTED-WIDGET`, `NUM-SELECTED-WIDGETS`, `NUM-TABS`, `NUM-TO-RETAIN`, `NUM-VISIBLE-COLUMNS`, `NUMERIC`, `NUMERIC-F`, `NUMERIC-FO`, `NUMERIC-FOR`, `NUMERIC-FORM`, `NUMERIC-FORMA`, `NUMERIC-FORMAT`, `OCTET-LENGTH`, `OF`, `OFF`, `OK`, `OK-CANCEL`, `OLD`, `ON`, `ON-FRAME`, `ON-FRAME-`, `ON-FRAME-B`, `ON-FRAME-BO`, `ON-FRAME-BOR`, `ON-FRAME-BORD`, `ON-FRAME-BORDE`, `ON-FRAME-BORDER`, `OPEN`, `OPSYS`, `OPTION`, `OR`, `ORDERED-JOIN`, `ORDINAL`, `OS-APPEND`, `OS-COMMAND`, `OS-COPY`, `OS-CREATE-DIR`, `OS-DELETE`, `OS-DIR`, `OS-DRIVE`, `OS-DRIVES`, `OS-ERROR`, `OS-GETENV`, `OS-RENAME`, `OTHERWISE`, `OUTPUT`, `OVERLAY`, `OVERRIDE`, `OWNER`, `PAGE`, `PAGE-BOT`, `PAGE-BOTT`, `PAGE-BOTTO`, `PAGE-BOTTOM`, `PAGE-NUM`, `PAGE-NUMB`, `PAGE-NUMBE`, `PAGE-NUMBER`, `PAGE-SIZE`, `PAGE-TOP`, `PAGE-WID`, `PAGE-WIDT`, `PAGE-WIDTH`, `PAGED`, `PARAM`, `PARAME`, `PARAMET`, `PARAMETE`, `PARAMETER`, `PARENT`, `PARSE-STATUS`, `PARTIAL-KEY`, `PASCAL`, `PASSWORD-FIELD`, `PATHNAME`, `PAUSE`, `PBE-HASH-ALG`, `PBE-HASH-ALGO`, `PBE-HASH-ALGOR`, `PBE-HASH-ALGORI`, `PBE-HASH-ALGORIT`, `PBE-HASH-ALGORITH`, `PBE-HASH-ALGORITHM`, `PBE-KEY-ROUNDS`, `PDBNAME`, `PERSIST`, `PERSISTE`, `PERSISTEN`, `PERSISTENT`, `PERSISTENT-CACHE-DISABLED`, `PFC`, `PFCO`, `PFCOL`, `PFCOLO`, `PFCOLOR`, `PIXELS`, `PIXELS-PER-COL`, `PIXELS-PER-COLU`, `PIXELS-PER-COLUM`, `PIXELS-PER-COLUMN`, `PIXELS-PER-ROW`, `POPUP-M`, `POPUP-ME`, `POPUP-MEN`, `POPUP-MENU`, `POPUP-O`, `POPUP-ON`, `POPUP-ONL`, `POPUP-ONLY`, `PORTRAIT`, `POSITION`, `PRECISION`, `PREFER-DATASET`, `PREPARE-STRING`, `PREPARED`, `PREPROC`, `PREPROCE`, `PREPROCES`, `PREPROCESS`, `PRESEL`, `PRESELE`, `PRESELEC`, `PRESELECT`, `PREV`, `PREV-COLUMN`, `PREV-SIBLING`, `PREV-TAB-I`, `PREV-TAB-IT`, `PREV-TAB-ITE`, `PREV-TAB-ITEM`, `PRIMARY`, `PRINTER`, `PRINTER-CONTROL-HANDLE`, `PRINTER-HDC`, `PRINTER-NAME`, `PRINTER-PORT`, `PRINTER-SETUP`, `PRIVATE`, `PRIVATE-D`, `PRIVATE-DA`, `PRIVATE-DAT`, `PRIVATE-DATA`, `PRIVILEGES`, `PROC-HA`, `PROC-HAN`, `PROC-HAND`, `PROC-HANDL`, `PROC-HANDLE`, `PROC-ST`, `PROC-STA`, `PROC-STAT`, `PROC-STATU`, `PROC-STATUS`, `PROC-TEXT`, `PROC-TEXT-BUFFER`, `PROCE`, `PROCED`, `PROCEDU`, `PROCEDUR`, `PROCEDURE`, `PROCEDURE-CALL-TYPE`, `PROCEDURE-TYPE`, `PROCESS`, `PROFILER`, `PROGRAM-NAME`, `PROGRESS`, `PROGRESS-S`, `PROGRESS-SO`, `PROGRESS-SOU`, `PROGRESS-SOUR`, `PROGRESS-SOURC`, `PROGRESS-SOURCE`, `PROMPT`, `PROMPT-F`, `PROMPT-FO`, `PROMPT-FOR`, `PROMSGS`, `PROPATH`, `PROPERTY`, `PROTECTED`, `PROVERS`, `PROVERSI`, `PROVERSIO`, `PROVERSION`, `PROXY`, `PROXY-PASSWORD`, `PROXY-USERID`, `PUBLIC`, `PUBLIC-ID`, `PUBLISH`, `PUBLISHED-EVENTS`, `PUT`, `PUT-BYTE`, `PUT-DOUBLE`, `PUT-FLOAT`, `PUT-INT64`, `PUT-KEY-VAL`, `PUT-KEY-VALU`, `PUT-KEY-VALUE`, `PUT-LONG`, `PUT-SHORT`, `PUT-STRING`, `PUT-UNSIGNED-LONG`, `PUTBYTE`, `QUERY`, `QUERY-CLOSE`, `QUERY-OFF-END`, `QUERY-OPEN`, `QUERY-PREPARE`, `QUERY-TUNING`, `QUESTION`, `QUIT`, `QUOTER`, `R-INDEX`, `RADIO-BUTTONS`, `RADIO-SET`, `RANDOM`, `RAW`, `RAW-TRANSFER`, `RCODE-INFO`, `RCODE-INFOR`, `RCODE-INFORM`, `RCODE-INFORMA`, `RCODE-INFORMAT`, `RCODE-INFORMATI`, `RCODE-INFORMATIO`, `RCODE-INFORMATION`, `READ-AVAILABLE`, `READ-EXACT-NUM`, `READ-FILE`, `READ-JSON`, `READ-ONLY`, `READ-XML`, `READ-XMLSCHEMA`, `READKEY`, `REAL`, `RECID`, `RECORD-LENGTH`, `RECT`, `RECTA`, `RECTAN`, `RECTANG`, `RECTANGL`, `RECTANGLE`, `RECURSIVE`, `REFERENCE-ONLY`, `REFRESH`, `REFRESH-AUDIT-POLICY`, `REFRESHABLE`, `REGISTER-DOMAIN`, `RELEASE`, `REMOTE`, `REMOVE-EVENTS-PROCEDURE`, `REMOVE-SUPER-PROCEDURE`, `REPEAT`, `REPLACE`, `REPLACE-SELECTION-TEXT`, `REPOSITION`, `REPOSITION-BACKWARD`, `REPOSITION-FORWARD`, `REPOSITION-MODE`, `REPOSITION-TO-ROW`, `REPOSITION-TO-ROWID`, `REQUEST`, `REQUEST-INFO`, `RESET`, `RESIZA`, `RESIZAB`, `RESIZABL`, `RESIZABLE`, `RESIZE`, `RESPONSE-INFO`, `RESTART-ROW`, `RESTART-ROWID`, `RETAIN`, `RETAIN-SHAPE`, `RETRY`, `RETRY-CANCEL`, `RETURN`, `RETURN-ALIGN`, `RETURN-ALIGNE`, `RETURN-INS`, `RETURN-INSE`, `RETURN-INSER`, `RETURN-INSERT`, `RETURN-INSERTE`, `RETURN-INSERTED`, `RETURN-TO-START-DI`, `RETURN-TO-START-DIR`, `RETURN-VAL`, `RETURN-VALU`, `RETURN-VALUE`, `RETURN-VALUE-DATA-TYPE`, `RETURNS`, `REVERSE-FROM`, `REVERT`, `REVOKE`, `RGB-VALUE`, `RIGHT-ALIGNED`, `RIGHT-TRIM`, `ROLES`, `ROUND`, `ROUTINE-LEVEL`, `ROW`, `ROW-HEIGHT-CHARS`, `ROW-HEIGHT-PIXELS`, `ROW-MARKERS`, `ROW-OF`, `ROW-RESIZABLE`, `ROWID`, `RULE`, `RUN`, `RUN-PROCEDURE`, `SAVE CACHE`, `SAVE`, `SAVE-AS`, `SAVE-FILE`, `SAX-COMPLE`, `SAX-COMPLET`, `SAX-COMPLETE`, `SAX-PARSE`, `SAX-PARSE-FIRST`, `SAX-PARSE-NEXT`, `SAX-PARSER-ERROR`, `SAX-RUNNING`, `SAX-UNINITIALIZED`, `SAX-WRITE-BEGIN`, `SAX-WRITE-COMPLETE`, `SAX-WRITE-CONTENT`, `SAX-WRITE-ELEMENT`, `SAX-WRITE-ERROR`, `SAX-WRITE-IDLE`, `SAX-WRITE-TAG`, `SAX-WRITER`, `SCHEMA`, `SCHEMA-LOCATION`, `SCHEMA-MARSHAL`, `SCHEMA-PATH`, `SCREEN`, `SCREEN-IO`, `SCREEN-LINES`, `SCREEN-VAL`, `SCREEN-VALU`, `SCREEN-VALUE`, `SCROLL`, `SCROLL-BARS`, `SCROLL-DELTA`, `SCROLL-OFFSET`, `SCROLL-TO-CURRENT-ROW`, `SCROLL-TO-I`, `SCROLL-TO-IT`, `SCROLL-TO-ITE`, `SCROLL-TO-ITEM`, `SCROLL-TO-SELECTED-ROW`, `SCROLLABLE`, `SCROLLBAR-H`, `SCROLLBAR-HO`, `SCROLLBAR-HOR`, `SCROLLBAR-HORI`, `SCROLLBAR-HORIZ`, `SCROLLBAR-HORIZO`, `SCROLLBAR-HORIZON`, `SCROLLBAR-HORIZONT`, `SCROLLBAR-HORIZONTA`, `SCROLLBAR-HORIZONTAL`, `SCROLLBAR-V`, `SCROLLBAR-VE`, `SCROLLBAR-VER`, `SCROLLBAR-VERT`, `SCROLLBAR-VERTI`, `SCROLLBAR-VERTIC`, `SCROLLBAR-VERTICA`, `SCROLLBAR-VERTICAL`, `SCROLLED-ROW-POS`, `SCROLLED-ROW-POSI`, `SCROLLED-ROW-POSIT`, `SCROLLED-ROW-POSITI`, `SCROLLED-ROW-POSITIO`, `SCROLLED-ROW-POSITION`, `SCROLLING`, `SDBNAME`, `SEAL`, `SEAL-TIMESTAMP`, `SEARCH`, `SEARCH-SELF`, `SEARCH-TARGET`, `SECTION`, `SECURITY-POLICY`, `SEEK`, `SELECT`, `SELECT-ALL`, `SELECT-FOCUSED-ROW`, `SELECT-NEXT-ROW`, `SELECT-PREV-ROW`, `SELECT-ROW`, `SELECTABLE`, `SELECTED`, `SELECTION`, `SELECTION-END`, `SELECTION-LIST`, `SELECTION-START`, `SELECTION-TEXT`, `SELF`, `SEND`, `SEND-SQL-STATEMENT`, `SENSITIVE`, `SEPARATE-CONNECTION`, `SEPARATOR-FGCOLOR`, `SEPARATORS`, `SERIALIZABLE`, `SERIALIZE-HIDDEN`, `SERIALIZE-NAME`, `SERVER`, `SERVER-CONNECTION-BOUND`, `SERVER-CONNECTION-BOUND-REQUEST`, `SERVER-CONNECTION-CONTEXT`, `SERVER-CONNECTION-ID`, `SERVER-OPERATING-MODE`, `SESSION`, `SESSION-ID`, `SET`, `SET-APPL-CONTEXT`, `SET-ATTR-CALL-TYPE`, `SET-ATTRIBUTE-NODE`, `SET-BLUE`, `SET-BLUE-`, `SET-BLUE-V`, `SET-BLUE-VA`, `SET-BLUE-VAL`, `SET-BLUE-VALU`, `SET-BLUE-VALUE`, `SET-BREAK`, `SET-BUFFERS`, `SET-CALLBACK`, `SET-CLIENT`, `SET-COMMIT`, `SET-CONTENTS`, `SET-CURRENT-VALUE`, `SET-DB-CLIENT`, `SET-DYNAMIC`, `SET-EVENT-MANAGER-OPTION`, `SET-GREEN`, `SET-GREEN-`, `SET-GREEN-V`, `SET-GREEN-VA`, `SET-GREEN-VAL`, `SET-GREEN-VALU`, `SET-GREEN-VALUE`, `SET-INPUT-SOURCE`, `SET-OPTION`, `SET-OUTPUT-DESTINATION`, `SET-PARAMETER`, `SET-POINTER-VALUE`, `SET-PROPERTY`, `SET-RED`, `SET-RED-`, `SET-RED-V`, `SET-RED-VA`, `SET-RED-VAL`, `SET-RED-VALU`, `SET-RED-VALUE`, `SET-REPOSITIONED-ROW`, `SET-RGB-VALUE`, `SET-ROLLBACK`, `SET-SELECTION`, `SET-SIZE`, `SET-SORT-ARROW`, `SET-WAIT-STATE`, `SETUSER`, `SETUSERI`, `SETUSERID`, `SHA1-DIGEST`, `SHARE`, `SHARE-`, `SHARE-L`, `SHARE-LO`, `SHARE-LOC`, `SHARE-LOCK`, `SHARED`, `SHOW-IN-TASKBAR`, `SHOW-STAT`, `SHOW-STATS`, `SIDE-LAB`, `SIDE-LABE`, `SIDE-LABEL`, `SIDE-LABEL-H`, `SIDE-LABEL-HA`, `SIDE-LABEL-HAN`, `SIDE-LABEL-HAND`, `SIDE-LABEL-HANDL`, `SIDE-LABEL-HANDLE`, `SIDE-LABELS`, `SIGNATURE`, `SILENT`, `SIMPLE`, `SINGLE`, `SINGLE-RUN`, `SINGLETON`, `SIZE`, `SIZE-C`, `SIZE-CH`, `SIZE-CHA`, `SIZE-CHAR`, `SIZE-CHARS`, `SIZE-P`, `SIZE-PI`, `SIZE-PIX`, `SIZE-PIXE`, `SIZE-PIXEL`, `SIZE-PIXELS`, `SKIP`, `SKIP-DELETED-RECORD`, `SLIDER`, `SMALL-ICON`, `SMALL-TITLE`, `SMALLINT`, `SOME`, `SORT`, `SORT-ASCENDING`, `SORT-NUMBER`, `SOURCE`, `SOURCE-PROCEDURE`, `SPACE`, `SQL`, `SQRT`, `SSL-SERVER-NAME`, `STANDALONE`, `START`, `START-DOCUMENT`, `START-ELEMENT`, `START-MOVE`, `START-RESIZE`, `START-ROW-RESIZE`, `STATE-DETAIL`, `STATIC`, `STATUS`, `STATUS-AREA`, `STATUS-AREA-FONT`, `STDCALL`, `STOP`, `STOP-AFTER`, `STOP-PARSING`, `STOPPE`, `STOPPED`, `STORED-PROC`, `STORED-PROCE`, `STORED-PROCED`, `STORED-PROCEDU`, `STORED-PROCEDUR`, `STORED-PROCEDURE`, `STREAM`, `STREAM-HANDLE`, `STREAM-IO`, `STRETCH-TO-FIT`, `STRICT`, `STRICT-ENTITY-RESOLUTION`, `STRING`, `STRING-VALUE`, `STRING-XREF`, `SUB-AVE`, `SUB-AVER`, `SUB-AVERA`, `SUB-AVERAG`, `SUB-AVERAGE`, `SUB-COUNT`, `SUB-MAXIMUM`, `SUB-MENU`, `SUB-MIN`, `SUB-MINIMUM`, `SUB-TOTAL`, `SUBSCRIBE`, `SUBST`, `SUBSTI`, `SUBSTIT`, `SUBSTITU`, `SUBSTITUT`, `SUBSTITUTE`, `SUBSTR`, `SUBSTRI`, `SUBSTRIN`, `SUBSTRING`, `SUBTYPE`, `SUM`, `SUM-MAX`, `SUM-MAXI`, `SUM-MAXIM`, `SUM-MAXIMU`, `SUPER`, `SUPER-PROCEDURES`, `SUPPRESS-NAMESPACE-PROCESSING`, `SUPPRESS-W`, `SUPPRESS-WA`, `SUPPRESS-WAR`, `SUPPRESS-WARN`, `SUPPRESS-WARNI`, `SUPPRESS-WARNIN`, `SUPPRESS-WARNING`, `SUPPRESS-WARNINGS`, `SYMMETRIC-ENCRYPTION-ALGORITHM`, `SYMMETRIC-ENCRYPTION-IV`, `SYMMETRIC-ENCRYPTION-KEY`, `SYMMETRIC-SUPPORT`, `SYSTEM-ALERT`, `SYSTEM-ALERT-`, `SYSTEM-ALERT-B`, `SYSTEM-ALERT-BO`, `SYSTEM-ALERT-BOX`, `SYSTEM-ALERT-BOXE`, `SYSTEM-ALERT-BOXES`, `SYSTEM-DIALOG`, `SYSTEM-HELP`, `SYSTEM-ID`, `TAB-POSITION`, `TAB-STOP`, `TABLE`, `TABLE-HANDLE`, `TABLE-NUMBER`, `TABLE-SCAN`, `TARGET`, `TARGET-PROCEDURE`, `TEMP-DIR`, `TEMP-DIRE`, `TEMP-DIREC`, `TEMP-DIRECT`, `TEMP-DIRECTO`, `TEMP-DIRECTOR`, `TEMP-DIRECTORY`, `TEMP-TABLE`, `TEMP-TABLE-PREPARE`, `TERM`, `TERMI`, `TERMIN`, `TERMINA`, `TERMINAL`, `TERMINATE`, `TEXT`, `TEXT-CURSOR`, `TEXT-SEG-GROW`, `TEXT-SELECTED`, `THEN`, `THIS-OBJECT`, `THIS-PROCEDURE`, `THREAD-SAFE`, `THREE-D`, `THROUGH`, `THROW`, `THRU`, `TIC-MARKS`, `TIME`, `TIME-SOURCE`, `TITLE`, `TITLE-BGC`, `TITLE-BGCO`, `TITLE-BGCOL`, `TITLE-BGCOLO`, `TITLE-BGCOLOR`, `TITLE-DC`, `TITLE-DCO`, `TITLE-DCOL`, `TITLE-DCOLO`, `TITLE-DCOLOR`, `TITLE-FGC`, `TITLE-FGCO`, `TITLE-FGCOL`, `TITLE-FGCOLO`, `TITLE-FGCOLOR`, `TITLE-FO`, `TITLE-FON`, `TITLE-FONT`, `TO`, `TO-ROWID`, `TODAY`, `TOGGLE-BOX`, `TOOLTIP`, `TOOLTIPS`, `TOP-NAV-QUERY`, `TOP-ONLY`, `TOPIC`, `TOTAL`, `TRAILING`, `TRANS`, `TRANS-INIT-PROCEDURE`, `TRANSACTION`, `TRANSACTION-MODE`, `TRANSPARENT`, `TRIGGER`, `TRIGGERS`, `TRIM`, `TRUE`, `TRUNC`, `TRUNCA`, `TRUNCAT`, `TRUNCATE`, `TYPE`, `TYPE-OF`, `UNBOX`, `UNBUFF`, `UNBUFFE`, `UNBUFFER`, `UNBUFFERE`, `UNBUFFERED`, `UNDERL`, `UNDERLI`, `UNDERLIN`, `UNDERLINE`, `UNDO`, `UNFORM`, `UNFORMA`, `UNFORMAT`, `UNFORMATT`, `UNFORMATTE`, `UNFORMATTED`, `UNION`, `UNIQUE`, `UNIQUE-ID`, `UNIQUE-MATCH`, `UNIX`, `UNLESS-HIDDEN`, `UNLOAD`, `UNSIGNED-LONG`, `UNSUBSCRIBE`, `UP`, `UPDATE`, `UPDATE-ATTRIBUTE`, `URL`, `URL-DECODE`, `URL-ENCODE`, `URL-PASSWORD`, `URL-USERID`, `USE`, `USE-DICT-EXPS`, `USE-FILENAME`, `USE-INDEX`, `USE-REVVIDEO`, `USE-TEXT`, `USE-UNDERLINE`, `USE-WIDGET-POOL`, `USER`, `USER-ID`, `USERID`, `USING`, `V6DISPLAY`, `V6FRAME`, `VALID-EVENT`, `VALID-HANDLE`, `VALID-OBJECT`, `VALIDATE`, `VALIDATE-EXPRESSION`, `VALIDATE-MESSAGE`, `VALIDATE-SEAL`, `VALIDATION-ENABLED`, `VALUE`, `VALUE-CHANGED`, `VALUES`, `VAR`, `VARI`, `VARIA`, `VARIAB`, `VARIABL`, `VARIABLE`, `VERBOSE`, `VERSION`, `VERT`, `VERTI`, `VERTIC`, `VERTICA`, `VERTICAL`, `VIEW`, `VIEW-AS`, `VIEW-FIRST-COLUMN-ON-REOPEN`, `VIRTUAL-HEIGHT`, `VIRTUAL-HEIGHT-`, `VIRTUAL-HEIGHT-C`, `VIRTUAL-HEIGHT-CH`, `VIRTUAL-HEIGHT-CHA`, `VIRTUAL-HEIGHT-CHAR`, `VIRTUAL-HEIGHT-CHARS`, `VIRTUAL-HEIGHT-P`, `VIRTUAL-HEIGHT-PI`, `VIRTUAL-HEIGHT-PIX`, `VIRTUAL-HEIGHT-PIXE`, `VIRTUAL-HEIGHT-PIXEL`, `VIRTUAL-HEIGHT-PIXELS`, `VIRTUAL-WIDTH`, `VIRTUAL-WIDTH-`, `VIRTUAL-WIDTH-C`, `VIRTUAL-WIDTH-CH`, `VIRTUAL-WIDTH-CHA`, `VIRTUAL-WIDTH-CHAR`, `VIRTUAL-WIDTH-CHARS`, `VIRTUAL-WIDTH-P`, `VIRTUAL-WIDTH-PI`, `VIRTUAL-WIDTH-PIX`, `VIRTUAL-WIDTH-PIXE`, `VIRTUAL-WIDTH-PIXEL`, `VIRTUAL-WIDTH-PIXELS`, `VISIBLE`, `VOID`, `WAIT`, `WAIT-FOR`, `WARNING`, `WEB-CONTEXT`, `WEEKDAY`, `WHEN`, `WHERE`, `WHILE`, `WIDGET`, `WIDGET-E`, `WIDGET-EN`, `WIDGET-ENT`, `WIDGET-ENTE`, `WIDGET-ENTER`, `WIDGET-ID`, `WIDGET-L`, `WIDGET-LE`, `WIDGET-LEA`, `WIDGET-LEAV`, `WIDGET-LEAVE`, `WIDGET-POOL`, `WIDTH`, `WIDTH-`, `WIDTH-C`, `WIDTH-CH`, `WIDTH-CHA`, `WIDTH-CHAR`, `WIDTH-CHARS`, `WIDTH-P`, `WIDTH-PI`, `WIDTH-PIX`, `WIDTH-PIXE`, `WIDTH-PIXEL`, `WIDTH-PIXELS`, `WINDOW`, `WINDOW-MAXIM`, `WINDOW-MAXIMI`, `WINDOW-MAXIMIZ`, `WINDOW-MAXIMIZE`, `WINDOW-MAXIMIZED`, `WINDOW-MINIM`, `WINDOW-MINIMI`, `WINDOW-MINIMIZ`, `WINDOW-MINIMIZE`, `WINDOW-MINIMIZED`, `WINDOW-NAME`, `WINDOW-NORMAL`, `WINDOW-STA`, `WINDOW-STAT`, `WINDOW-STATE`, `WINDOW-SYSTEM`, `WITH`, `WORD-INDEX`, `WORD-WRAP`, `WORK-AREA-HEIGHT-PIXELS`, `WORK-AREA-WIDTH-PIXELS`, `WORK-AREA-X`, `WORK-AREA-Y`, `WORK-TAB`, `WORK-TABL`, `WORK-TABLE`, `WORKFILE`, `WRITE`, `WRITE-CDATA`, `WRITE-CHARACTERS`, `WRITE-COMMENT`, `WRITE-DATA-ELEMENT`, `WRITE-EMPTY-ELEMENT`, `WRITE-ENTITY-REF`, `WRITE-EXTERNAL-DTD`, `WRITE-FRAGMENT`, `WRITE-JSON`, `WRITE-MESSAGE`, `WRITE-PROCESSING-INSTRUCTION`, `WRITE-STATUS`, `WRITE-XML`, `WRITE-XMLSCHEMA`, `X`, `X-OF`, `XCODE`, `XML-DATA-TYPE`, `XML-ENTITY-EXPANSION-LIMIT`, `XML-NODE-TYPE`, `XML-SCHEMA-PATH`, `XML-STRICT-ENTITY-RESOLUTION`, `XML-SUPPRESS-NAMESPACE-PROCESSING`, `XREF`, `XREF-XML`, `Y`, `Y-OF`, `YEAR`, `YEAR-OFFSET`, `YES`, `YES-NO`, `YES-NO-CANCEL`), KeywordReserved, nil}, //nolint + {`"(\\\\|\\[^\\]|[^"\\])*"`, LiteralStringDouble, nil}, + {`'(\\\\|\\[^\\]|[^'\\])*'`, LiteralStringSingle, nil}, + {`[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?`, LiteralNumberFloat, nil}, + {`[0-9]+`, LiteralNumberInteger, nil}, + {`\s+`, Text, nil}, + {`[+*/=-]`, Operator, nil}, + {`[.:()]`, Punctuation, nil}, + {`.`, NameVariable, nil}, + }, + "comment": { + {`[^*/]`, CommentMultiline, nil}, + {`/\*`, CommentMultiline, Push()}, + {`\*/`, CommentMultiline, Pop(1)}, + {`[*/]`, CommentMultiline, nil}, + {`\/\/`, CommentSingle, nil}, + }, + "preprocessor": { + {`[^{}]`, CommentPreproc, nil}, + {`\{`, CommentPreproc, Push()}, + {`\}`, CommentPreproc, Pop(1)}, + }, + } + }, +)) diff --git a/vendor/github.com/alecthomas/chroma/lexers/o/openscad.go b/vendor/github.com/alecthomas/chroma/lexers/o/openscad.go new file mode 100644 index 000000000..d18b9caab --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/o/openscad.go @@ -0,0 +1,47 @@ +package o + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +var OpenSCAD = internal.Register(MustNewLazyLexer( + &Config{ + Name: "OpenSCAD", + Aliases: []string{"openscad"}, + Filenames: []string{"*.scad"}, + MimeTypes: []string{"text/x-scad"}, + }, + openSCADRules, +)) + +func openSCADRules() Rules { + return Rules{ + "root": { + {`[^\S\n]+`, Text, nil}, + {`\n`, Text, nil}, + {`//(\n|[\w\W]*?[^\\]\n)`, CommentSingle, nil}, + {`/(\\\n)?[*][\w\W]*?[*](\\\n)?/`, CommentMultiline, nil}, + {`/(\\\n)?[*][\w\W]*`, CommentMultiline, nil}, + {`[{}\[\]\(\),;:]`, Punctuation, nil}, + {`[*!#%\-+=?/]`, Operator, nil}, + {`<|<=|==|!=|>=|>|&&|\|\|`, Operator, nil}, + {`\$(f[asn]|t|vp[rtd]|children)`, NameVariableMagic, nil}, + {Words(``, `\b`, `PI`, `undef`), KeywordConstant, nil}, + {`(use|include)((?:\s|\\\\s)+)`, ByGroups(KeywordNamespace, Text), Push("includes")}, + {`(module)(\s*)([^\s\(]+)`, ByGroups(KeywordNamespace, Text, NameNamespace), nil}, + {`(function)(\s*)([^\s\(]+)`, ByGroups(KeywordDeclaration, Text, NameFunction), nil}, + {`\b(true|false)\b`, Literal, nil}, + {`\b(function|module|include|use|for|intersection_for|if|else|return)\b`, Keyword, nil}, + {`\b(circle|square|polygon|text|sphere|cube|cylinder|polyhedron|translate|rotate|scale|resize|mirror|multmatrix|color|offset|hull|minkowski|union|difference|intersection|abs|sign|sin|cos|tan|acos|asin|atan|atan2|floor|round|ceil|ln|log|pow|sqrt|exp|rands|min|max|concat|lookup|str|chr|search|version|version_num|norm|cross|parent_module|echo|import|import_dxf|dxf_linear_extrude|linear_extrude|rotate_extrude|surface|projection|render|dxf_cross|dxf_dim|let|assign|len)\b`, NameBuiltin, nil}, + {`\bchildren\b`, NameBuiltinPseudo, nil}, + {`"(\\\\|\\"|[^"])*"`, LiteralStringDouble, nil}, + {`-?\d+(\.\d+)?(e[+-]?\d+)?`, Number, nil}, + {`[a-zA-Z_]\w*`, Name, nil}, + }, + "includes": { + {"(<)([^>]*)(>)", ByGroups(Punctuation, CommentPreprocFile, Punctuation), nil}, + Default(Pop(1)), + }, + } +} diff --git a/vendor/github.com/alecthomas/chroma/lexers/o/org.go b/vendor/github.com/alecthomas/chroma/lexers/o/org.go new file mode 100644 index 000000000..00f6df445 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/o/org.go @@ -0,0 +1,108 @@ +package o + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// Org mode lexer. +var Org = internal.Register(MustNewLazyLexer( + &Config{ + Name: "Org Mode", + Aliases: []string{"org", "orgmode"}, + Filenames: []string{"*.org"}, + MimeTypes: []string{"text/org"}, // https://lists.gnu.org/r/emacs-orgmode/2017-09/msg00087.html + }, + orgRules, +)) + +func orgRules() Rules { + return Rules{ + "root": { + {`^# .*$`, Comment, nil}, + // Headings + {`^(\*)( COMMENT)( .*)$`, ByGroups(GenericHeading, NameEntity, GenericStrong), nil}, + {`^(\*\*+)( COMMENT)( .*)$`, ByGroups(GenericSubheading, NameEntity, Text), nil}, + {`^(\*)( DONE)( .*)$`, ByGroups(GenericHeading, LiteralStringRegex, GenericStrong), nil}, + {`^(\*\*+)( DONE)( .*)$`, ByGroups(GenericSubheading, LiteralStringRegex, Text), nil}, + {`^(\*)( TODO)( .*)$`, ByGroups(GenericHeading, Error, GenericStrong), nil}, + {`^(\*\*+)( TODO)( .*)$`, ByGroups(GenericSubheading, Error, Text), nil}, + {`^(\*)( .+?)( :[a-zA-Z0-9_@:]+:)$`, ByGroups(GenericHeading, GenericStrong, GenericEmph), nil}, // Level 1 heading with tags + {`^(\*)( .+)$`, ByGroups(GenericHeading, GenericStrong), nil}, // // Level 1 heading with NO tags + {`^(\*\*+)( .+?)( :[a-zA-Z0-9_@:]+:)$`, ByGroups(GenericSubheading, Text, GenericEmph), nil}, // Level 2+ heading with tags + {`^(\*\*+)( .+)$`, ByGroups(GenericSubheading, Text), nil}, // Level 2+ heading with NO tags + // Checkbox lists + {`^( *)([+-] )(\[[ X]\])( .+)$`, ByGroups(Text, Keyword, Keyword, UsingSelf("inline")), nil}, + {`^( +)(\* )(\[[ X]\])( .+)$`, ByGroups(Text, Keyword, Keyword, UsingSelf("inline")), nil}, + // Definition lists + {`^( *)([+-] )([^ \n]+ ::)( .+)$`, ByGroups(Text, Keyword, Keyword, UsingSelf("inline")), nil}, + {`^( +)(\* )([^ \n]+ ::)( .+)$`, ByGroups(Text, Keyword, Keyword, UsingSelf("inline")), nil}, + // Unordered lists + {`^( *)([+-] )(.+)$`, ByGroups(Text, Keyword, UsingSelf("inline")), nil}, + {`^( +)(\* )(.+)$`, ByGroups(Text, Keyword, UsingSelf("inline")), nil}, + // Ordered lists + {`^( *)([0-9]+[.)])( \[@[0-9]+\])( .+)$`, ByGroups(Text, Keyword, GenericEmph, UsingSelf("inline")), nil}, + {`^( *)([0-9]+[.)])( .+)$`, ByGroups(Text, Keyword, UsingSelf("inline")), nil}, + // Dynamic Blocks + {`(?i)^( *#\+begin: )([^ ]+)([\w\W]*?\n)([\w\W]*?)(^ *#\+end: *$)`, ByGroups(Comment, CommentSpecial, Comment, UsingSelf("inline"), Comment), nil}, + // Blocks + // - Comment Blocks + {`(?i)^( *#\+begin_comment *\n)([\w\W]*?)(^ *#\+end_comment *$)`, ByGroups(Comment, Comment, Comment), nil}, + // - Src Blocks + { + `(?i)^( *#\+begin_src )([^ \n]+)(.*?\n)([\w\W]*?)(^ *#\+end_src *$)`, + UsingByGroup( + internal.Get, + 2, 4, + Comment, CommentSpecial, Comment, Text, Comment, + ), + nil, + }, + // - Export Blocks + { + `(?i)^( *#\+begin_export )(\w+)( *\n)([\w\W]*?)(^ *#\+end_export *$)`, + UsingByGroup( + internal.Get, + 2, 4, + Comment, CommentSpecial, Text, Text, Comment, + ), + nil, + }, + // - Org Special, Example, Verse, etc. Blocks + {`(?i)^( *#\+begin_)(\w+)( *\n)([\w\W]*?)(^ *#\+end_\2)( *$)`, ByGroups(Comment, Comment, Text, Text, Comment, Text), nil}, + // Keywords + {`^(#\+\w+)(:.*)$`, ByGroups(CommentSpecial, Comment), nil}, // Other Org keywords like #+title + // Properties and Drawers + {`(?i)^( *:\w+: *\n)([\w\W]*?)(^ *:end: *$)`, ByGroups(Comment, CommentSpecial, Comment), nil}, + // Line break operator + {`^(.*)(\\\\)$`, ByGroups(UsingSelf("inline"), Operator), nil}, + // Deadline/Scheduled + {`(?i)^( *(?:DEADLINE|SCHEDULED): )(<[^<>]+?> *)$`, ByGroups(Comment, CommentSpecial), nil}, // DEADLINE/SCHEDULED: + // DONE state CLOSED + {`(?i)^( *CLOSED: )(\[[^][]+?\] *)$`, ByGroups(Comment, CommentSpecial), nil}, // CLOSED: [datestamp] + // All other lines + Include("inline"), + }, + "inline": { + {`(\s)*(\*[^ \n*][^*]+?[^ \n*]\*)((?=\W|\n|$))`, ByGroups(Text, GenericStrong, Text), nil}, // Bold + {`(\s)*(/[^/]+?/)((?=\W|\n|$))`, ByGroups(Text, GenericEmph, Text), nil}, // Italic + {`(\s)*(=[^\n=]+?=)((?=\W|\n|$))`, ByGroups(Text, NameClass, Text), nil}, // Verbatim + {`(\s)*(~[^\n~]+?~)((?=\W|\n|$))`, ByGroups(Text, NameClass, Text), nil}, // Code + {`(\s)*(\+[^+]+?\+)((?=\W|\n|$))`, ByGroups(Text, GenericDeleted, Text), nil}, // Strikethrough + {`(\s)*(_[^_]+?_)((?=\W|\n|$))`, ByGroups(Text, GenericUnderline, Text), nil}, // Underline + {`(<)([^<>]+?)(>)`, ByGroups(Text, String, Text), nil}, // + {`[{]{3}[^}]+[}]{3}`, NameBuiltin, nil}, // {{{macro(foo,1)}}} + {`([^[])(\[fn:)([^]]+?)(\])([^]])`, ByGroups(Text, NameBuiltinPseudo, LiteralString, NameBuiltinPseudo, Text), nil}, // [fn:1] + // Links + {`(\[\[)([^][]+?)(\]\[)([^][]+)(\]\])`, ByGroups(Text, NameAttribute, Text, NameTag, Text), nil}, // [[link][descr]] + {`(\[\[)([^][]+?)(\]\])`, ByGroups(Text, NameAttribute, Text), nil}, // [[link]] + {`(<<)([^<>]+?)(>>)`, ByGroups(Text, NameAttribute, Text), nil}, // <> + // Tables + {`^( *)(\|[ -].*?[ -]\|)$`, ByGroups(Text, String), nil}, + // Blank lines, newlines + {`\n`, Text, nil}, + // Any other text + {`.`, Text, nil}, + }, + } +} diff --git a/vendor/github.com/alecthomas/chroma/lexers/p/pacman.go b/vendor/github.com/alecthomas/chroma/lexers/p/pacman.go new file mode 100644 index 000000000..00c6255af --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/p/pacman.go @@ -0,0 +1,30 @@ +package p + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// Pacmanconf lexer. +var Pacmanconf = internal.Register(MustNewLazyLexer( + &Config{ + Name: "PacmanConf", + Aliases: []string{"pacmanconf"}, + Filenames: []string{"pacman.conf"}, + MimeTypes: []string{}, + }, + pacmanconfRules, +)) + +func pacmanconfRules() Rules { + return Rules{ + "root": { + {`#.*$`, CommentSingle, nil}, + {`^\s*\[.*?\]\s*$`, Keyword, nil}, + {`(\w+)(\s*)(=)`, ByGroups(NameAttribute, Text, Operator), nil}, + {`^(\s*)(\w+)(\s*)$`, ByGroups(Text, NameAttribute, Text), nil}, + {Words(``, `\b`, `$repo`, `$arch`, `%o`, `%u`), NameVariable, nil}, + {`.`, Text, nil}, + }, + } +} diff --git a/vendor/github.com/alecthomas/chroma/lexers/p/perl.go b/vendor/github.com/alecthomas/chroma/lexers/p/perl.go new file mode 100644 index 000000000..6aa338ad1 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/p/perl.go @@ -0,0 +1,142 @@ +package p + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// Perl lexer. +var Perl = internal.Register(MustNewLazyLexer( + &Config{ + Name: "Perl", + Aliases: []string{"perl", "pl"}, + Filenames: []string{"*.pl", "*.pm", "*.t"}, + MimeTypes: []string{"text/x-perl", "application/x-perl"}, + DotAll: true, + }, + perlRules, +)) + +func perlRules() Rules { + return Rules{ + "balanced-regex": { + {`/(\\\\|\\[^\\]|[^\\/])*/[egimosx]*`, LiteralStringRegex, Pop(1)}, + {`!(\\\\|\\[^\\]|[^\\!])*![egimosx]*`, LiteralStringRegex, Pop(1)}, + {`\\(\\\\|[^\\])*\\[egimosx]*`, LiteralStringRegex, Pop(1)}, + {`\{(\\\\|\\[^\\]|[^\\}])*\}[egimosx]*`, LiteralStringRegex, Pop(1)}, + {`<(\\\\|\\[^\\]|[^\\>])*>[egimosx]*`, LiteralStringRegex, Pop(1)}, + {`\[(\\\\|\\[^\\]|[^\\\]])*\][egimosx]*`, LiteralStringRegex, Pop(1)}, + {`\((\\\\|\\[^\\]|[^\\)])*\)[egimosx]*`, LiteralStringRegex, Pop(1)}, + {`@(\\\\|\\[^\\]|[^\\@])*@[egimosx]*`, LiteralStringRegex, Pop(1)}, + {`%(\\\\|\\[^\\]|[^\\%])*%[egimosx]*`, LiteralStringRegex, Pop(1)}, + {`\$(\\\\|\\[^\\]|[^\\$])*\$[egimosx]*`, LiteralStringRegex, Pop(1)}, + }, + "root": { + {`\A\#!.+?$`, CommentHashbang, nil}, + {`\#.*?$`, CommentSingle, nil}, + {`^=[a-zA-Z0-9]+\s+.*?\n=cut`, CommentMultiline, nil}, + {Words(``, `\b`, `case`, `continue`, `do`, `else`, `elsif`, `for`, `foreach`, `if`, `last`, `my`, `next`, `our`, `redo`, `reset`, `then`, `unless`, `until`, `while`, `print`, `new`, `BEGIN`, `CHECK`, `INIT`, `END`, `return`), Keyword, nil}, + {`(format)(\s+)(\w+)(\s*)(=)(\s*\n)`, ByGroups(Keyword, Text, Name, Text, Punctuation, Text), Push("format")}, + {`(eq|lt|gt|le|ge|ne|not|and|or|cmp)\b`, OperatorWord, nil}, + {`s/(\\\\|\\[^\\]|[^\\/])*/(\\\\|\\[^\\]|[^\\/])*/[egimosx]*`, LiteralStringRegex, nil}, + {`s!(\\\\|\\!|[^!])*!(\\\\|\\!|[^!])*![egimosx]*`, LiteralStringRegex, nil}, + {`s\\(\\\\|[^\\])*\\(\\\\|[^\\])*\\[egimosx]*`, LiteralStringRegex, nil}, + {`s@(\\\\|\\[^\\]|[^\\@])*@(\\\\|\\[^\\]|[^\\@])*@[egimosx]*`, LiteralStringRegex, nil}, + {`s%(\\\\|\\[^\\]|[^\\%])*%(\\\\|\\[^\\]|[^\\%])*%[egimosx]*`, LiteralStringRegex, nil}, + {`s\{(\\\\|\\[^\\]|[^\\}])*\}\s*`, LiteralStringRegex, Push("balanced-regex")}, + {`s<(\\\\|\\[^\\]|[^\\>])*>\s*`, LiteralStringRegex, Push("balanced-regex")}, + {`s\[(\\\\|\\[^\\]|[^\\\]])*\]\s*`, LiteralStringRegex, Push("balanced-regex")}, + {`s\((\\\\|\\[^\\]|[^\\)])*\)\s*`, LiteralStringRegex, Push("balanced-regex")}, + {`m?/(\\\\|\\[^\\]|[^\\/\n])*/[gcimosx]*`, LiteralStringRegex, nil}, + {`m(?=[/!\\{<\[(@%$])`, LiteralStringRegex, Push("balanced-regex")}, + {`((?<==~)|(?<=\())\s*/(\\\\|\\[^\\]|[^\\/])*/[gcimosx]*`, LiteralStringRegex, nil}, + {`\s+`, Text, nil}, + {Words(``, `\b`, `abs`, `accept`, `alarm`, `atan2`, `bind`, `binmode`, `bless`, `caller`, `chdir`, `chmod`, `chomp`, `chop`, `chown`, `chr`, `chroot`, `close`, `closedir`, `connect`, `continue`, `cos`, `crypt`, `dbmclose`, `dbmopen`, `defined`, `delete`, `die`, `dump`, `each`, `endgrent`, `endhostent`, `endnetent`, `endprotoent`, `endpwent`, `endservent`, `eof`, `eval`, `exec`, `exists`, `exit`, `exp`, `fcntl`, `fileno`, `flock`, `fork`, `format`, `formline`, `getc`, `getgrent`, `getgrgid`, `getgrnam`, `gethostbyaddr`, `gethostbyname`, `gethostent`, `getlogin`, `getnetbyaddr`, `getnetbyname`, `getnetent`, `getpeername`, `getpgrp`, `getppid`, `getpriority`, `getprotobyname`, `getprotobynumber`, `getprotoent`, `getpwent`, `getpwnam`, `getpwuid`, `getservbyname`, `getservbyport`, `getservent`, `getsockname`, `getsockopt`, `glob`, `gmtime`, `goto`, `grep`, `hex`, `import`, `index`, `int`, `ioctl`, `join`, `keys`, `kill`, `last`, `lc`, `lcfirst`, `length`, `link`, `listen`, `local`, `localtime`, `log`, `lstat`, `map`, `mkdir`, `msgctl`, `msgget`, `msgrcv`, `msgsnd`, `my`, `next`, `oct`, `open`, `opendir`, `ord`, `our`, `pack`, `pipe`, `pop`, `pos`, `printf`, `prototype`, `push`, `quotemeta`, `rand`, `read`, `readdir`, `readline`, `readlink`, `readpipe`, `recv`, `redo`, `ref`, `rename`, `reverse`, `rewinddir`, `rindex`, `rmdir`, `scalar`, `seek`, `seekdir`, `select`, `semctl`, `semget`, `semop`, `send`, `setgrent`, `sethostent`, `setnetent`, `setpgrp`, `setpriority`, `setprotoent`, `setpwent`, `setservent`, `setsockopt`, `shift`, `shmctl`, `shmget`, `shmread`, `shmwrite`, `shutdown`, `sin`, `sleep`, `socket`, `socketpair`, `sort`, `splice`, `split`, `sprintf`, `sqrt`, `srand`, `stat`, `study`, `substr`, `symlink`, `syscall`, `sysopen`, `sysread`, `sysseek`, `system`, `syswrite`, `tell`, `telldir`, `tie`, `tied`, `time`, `times`, `tr`, `truncate`, `uc`, `ucfirst`, `umask`, `undef`, `unlink`, `unpack`, `unshift`, `untie`, `utime`, `values`, `vec`, `wait`, `waitpid`, `wantarray`, `warn`, `write`), NameBuiltin, nil}, + {`((__(DATA|DIE|WARN)__)|(STD(IN|OUT|ERR)))\b`, NameBuiltinPseudo, nil}, + {`(<<)([\'"]?)([a-zA-Z_]\w*)(\2;?\n.*?\n)(\3)(\n)`, ByGroups(LiteralString, LiteralString, LiteralStringDelimiter, LiteralString, LiteralStringDelimiter, Text), nil}, + {`__END__`, CommentPreproc, Push("end-part")}, + {`\$\^[ADEFHILMOPSTWX]`, NameVariableGlobal, nil}, + {"\\$[\\\\\\\"\\[\\]'&`+*.,;=%~?@$!<>(^|/-](?!\\w)", NameVariableGlobal, nil}, + {`[$@%#]+`, NameVariable, Push("varname")}, + {`0_?[0-7]+(_[0-7]+)*`, LiteralNumberOct, nil}, + {`0x[0-9A-Fa-f]+(_[0-9A-Fa-f]+)*`, LiteralNumberHex, nil}, + {`0b[01]+(_[01]+)*`, LiteralNumberBin, nil}, + {`(?i)(\d*(_\d*)*\.\d+(_\d*)*|\d+(_\d*)*\.\d+(_\d*)*)(e[+-]?\d+)?`, LiteralNumberFloat, nil}, + {`(?i)\d+(_\d*)*e[+-]?\d+(_\d*)*`, LiteralNumberFloat, nil}, + {`\d+(_\d+)*`, LiteralNumberInteger, nil}, + {`'(\\\\|\\[^\\]|[^'\\])*'`, LiteralString, nil}, + {`"(\\\\|\\[^\\]|[^"\\])*"`, LiteralString, nil}, + {"`(\\\\\\\\|\\\\[^\\\\]|[^`\\\\])*`", LiteralStringBacktick, nil}, + {`<([^\s>]+)>`, LiteralStringRegex, nil}, + {`(q|qq|qw|qr|qx)\{`, LiteralStringOther, Push("cb-string")}, + {`(q|qq|qw|qr|qx)\(`, LiteralStringOther, Push("rb-string")}, + {`(q|qq|qw|qr|qx)\[`, LiteralStringOther, Push("sb-string")}, + {`(q|qq|qw|qr|qx)\<`, LiteralStringOther, Push("lt-string")}, + {`(q|qq|qw|qr|qx)([\W_])(.|\n)*?\2`, LiteralStringOther, nil}, + {`(package)(\s+)([a-zA-Z_]\w*(?:::[a-zA-Z_]\w*)*)`, ByGroups(Keyword, Text, NameNamespace), nil}, + {`(use|require|no)(\s+)([a-zA-Z_]\w*(?:::[a-zA-Z_]\w*)*)`, ByGroups(Keyword, Text, NameNamespace), nil}, + {`(sub)(\s+)`, ByGroups(Keyword, Text), Push("funcname")}, + {Words(``, `\b`, `no`, `package`, `require`, `use`), Keyword, nil}, + {`(\[\]|\*\*|::|<<|>>|>=|<=>|<=|={3}|!=|=~|!~|&&?|\|\||\.{1,3})`, Operator, nil}, + {`[-+/*%=<>&^|!\\~]=?`, Operator, nil}, + {`[()\[\]:;,<>/?{}]`, Punctuation, nil}, + {`(?=\w)`, Name, Push("name")}, + }, + "format": { + {`\.\n`, LiteralStringInterpol, Pop(1)}, + {`[^\n]*\n`, LiteralStringInterpol, nil}, + }, + "varname": { + {`\s+`, Text, nil}, + {`\{`, Punctuation, Pop(1)}, + {`\)|,`, Punctuation, Pop(1)}, + {`\w+::`, NameNamespace, nil}, + {`[\w:]+`, NameVariable, Pop(1)}, + }, + "name": { + {`[a-zA-Z_]\w*(::[a-zA-Z_]\w*)*(::)?(?=\s*->)`, NameNamespace, Pop(1)}, + {`[a-zA-Z_]\w*(::[a-zA-Z_]\w*)*::`, NameNamespace, Pop(1)}, + {`[\w:]+`, Name, Pop(1)}, + {`[A-Z_]+(?=\W)`, NameConstant, Pop(1)}, + {`(?=\W)`, Text, Pop(1)}, + }, + "funcname": { + {`[a-zA-Z_]\w*[!?]?`, NameFunction, nil}, + {`\s+`, Text, nil}, + {`(\([$@%]*\))(\s*)`, ByGroups(Punctuation, Text), nil}, + {`;`, Punctuation, Pop(1)}, + {`.*?\{`, Punctuation, Pop(1)}, + }, + "cb-string": { + {`\\[{}\\]`, LiteralStringOther, nil}, + {`\\`, LiteralStringOther, nil}, + {`\{`, LiteralStringOther, Push("cb-string")}, + {`\}`, LiteralStringOther, Pop(1)}, + {`[^{}\\]+`, LiteralStringOther, nil}, + }, + "rb-string": { + {`\\[()\\]`, LiteralStringOther, nil}, + {`\\`, LiteralStringOther, nil}, + {`\(`, LiteralStringOther, Push("rb-string")}, + {`\)`, LiteralStringOther, Pop(1)}, + {`[^()]+`, LiteralStringOther, nil}, + }, + "sb-string": { + {`\\[\[\]\\]`, LiteralStringOther, nil}, + {`\\`, LiteralStringOther, nil}, + {`\[`, LiteralStringOther, Push("sb-string")}, + {`\]`, LiteralStringOther, Pop(1)}, + {`[^\[\]]+`, LiteralStringOther, nil}, + }, + "lt-string": { + {`\\[<>\\]`, LiteralStringOther, nil}, + {`\\`, LiteralStringOther, nil}, + {`\<`, LiteralStringOther, Push("lt-string")}, + {`\>`, LiteralStringOther, Pop(1)}, + {`[^<>]+`, LiteralStringOther, nil}, + }, + "end-part": { + {`.+`, CommentPreproc, Pop(1)}, + }, + } +} diff --git a/vendor/github.com/alecthomas/chroma/lexers/p/pig.go b/vendor/github.com/alecthomas/chroma/lexers/p/pig.go new file mode 100644 index 000000000..f2852c18a --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/p/pig.go @@ -0,0 +1,61 @@ +package p + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// Pig lexer. +var Pig = internal.Register(MustNewLazyLexer( + &Config{ + Name: "Pig", + Aliases: []string{"pig"}, + Filenames: []string{"*.pig"}, + MimeTypes: []string{"text/x-pig"}, + CaseInsensitive: true, + }, + pigRules, +)) + +func pigRules() Rules { + return Rules{ + "root": { + {`\s+`, Text, nil}, + {`--.*`, Comment, nil}, + {`/\*[\w\W]*?\*/`, CommentMultiline, nil}, + {`\\\n`, Text, nil}, + {`\\`, Text, nil}, + {`\'(?:\\[ntbrf\\\']|\\u[0-9a-f]{4}|[^\'\\\n\r])*\'`, LiteralString, nil}, + Include("keywords"), + Include("types"), + Include("builtins"), + Include("punct"), + Include("operators"), + {`[0-9]*\.[0-9]+(e[0-9]+)?[fd]?`, LiteralNumberFloat, nil}, + {`0x[0-9a-f]+`, LiteralNumberHex, nil}, + {`[0-9]+L?`, LiteralNumberInteger, nil}, + {`\n`, Text, nil}, + {`([a-z_]\w*)(\s*)(\()`, ByGroups(NameFunction, Text, Punctuation), nil}, + {`[()#:]`, Text, nil}, + {`[^(:#\'")\s]+`, Text, nil}, + {`\S+\s+`, Text, nil}, + }, + "keywords": { + {`(assert|and|any|all|arrange|as|asc|bag|by|cache|CASE|cat|cd|cp|%declare|%default|define|dense|desc|describe|distinct|du|dump|eval|exex|explain|filter|flatten|foreach|full|generate|group|help|if|illustrate|import|inner|input|into|is|join|kill|left|limit|load|ls|map|matches|mkdir|mv|not|null|onschema|or|order|outer|output|parallel|pig|pwd|quit|register|returns|right|rm|rmf|rollup|run|sample|set|ship|split|stderr|stdin|stdout|store|stream|through|union|using|void)\b`, Keyword, nil}, + }, + "builtins": { + {`(AVG|BinStorage|cogroup|CONCAT|copyFromLocal|copyToLocal|COUNT|cross|DIFF|MAX|MIN|PigDump|PigStorage|SIZE|SUM|TextLoader|TOKENIZE)\b`, NameBuiltin, nil}, + }, + "types": { + {`(bytearray|BIGINTEGER|BIGDECIMAL|chararray|datetime|double|float|int|long|tuple)\b`, KeywordType, nil}, + }, + "punct": { + {`[;(){}\[\]]`, Punctuation, nil}, + }, + "operators": { + {`[#=,./%+\-?]`, Operator, nil}, + {`(eq|gt|lt|gte|lte|neq|matches)\b`, Operator, nil}, + {`(==|<=|<|>=|>|!=)`, Operator, nil}, + }, + } +} diff --git a/vendor/github.com/alecthomas/chroma/lexers/p/pkgconfig.go b/vendor/github.com/alecthomas/chroma/lexers/p/pkgconfig.go new file mode 100644 index 000000000..9e024107f --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/p/pkgconfig.go @@ -0,0 +1,45 @@ +package p + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// Pkgconfig lexer. +var Pkgconfig = internal.Register(MustNewLazyLexer( + &Config{ + Name: "PkgConfig", + Aliases: []string{"pkgconfig"}, + Filenames: []string{"*.pc"}, + MimeTypes: []string{}, + }, + pkgconfigRules, +)) + +func pkgconfigRules() Rules { + return Rules{ + "root": { + {`#.*$`, CommentSingle, nil}, + {`^(\w+)(=)`, ByGroups(NameAttribute, Operator), nil}, + {`^([\w.]+)(:)`, ByGroups(NameTag, Punctuation), Push("spvalue")}, + Include("interp"), + {`[^${}#=:\n.]+`, Text, nil}, + {`.`, Text, nil}, + }, + "interp": { + {`\$\$`, Text, nil}, + {`\$\{`, LiteralStringInterpol, Push("curly")}, + }, + "curly": { + {`\}`, LiteralStringInterpol, Pop(1)}, + {`\w+`, NameAttribute, nil}, + }, + "spvalue": { + Include("interp"), + {`#.*$`, CommentSingle, Pop(1)}, + {`\n`, Text, Pop(1)}, + {`[^${}#\n]+`, Text, nil}, + {`.`, Text, nil}, + }, + } +} diff --git a/vendor/github.com/alecthomas/chroma/lexers/p/plaintext.go b/vendor/github.com/alecthomas/chroma/lexers/p/plaintext.go new file mode 100644 index 000000000..058c02a89 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/p/plaintext.go @@ -0,0 +1,17 @@ +package p + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +var Plaintext = internal.Register(MustNewLazyLexer( + &Config{ + Name: "plaintext", + Aliases: []string{"text", "plain", "no-highlight"}, + Filenames: []string{"*.txt"}, + MimeTypes: []string{"text/plain"}, + Priority: 0.1, + }, + internal.PlaintextRules, +)) diff --git a/vendor/github.com/alecthomas/chroma/lexers/p/plsql.go b/vendor/github.com/alecthomas/chroma/lexers/p/plsql.go new file mode 100644 index 000000000..324d3feb2 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/p/plsql.go @@ -0,0 +1,62 @@ +package p + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// Pl/Pgsql lexer. +var PLpgSQL = internal.Register(MustNewLazyLexer( + &Config{ + Name: "PL/pgSQL", + Aliases: []string{"plpgsql"}, + Filenames: []string{}, + MimeTypes: []string{"text/x-plpgsql"}, + NotMultiline: true, + CaseInsensitive: true, + }, + plpgSQLRules, +)) + +func plpgSQLRules() Rules { + return Rules{ + "root": { + {`\%[a-z]\w*\b`, NameBuiltin, nil}, + {`:=`, Operator, nil}, + {`\<\<[a-z]\w*\>\>`, NameLabel, nil}, + {`\#[a-z]\w*\b`, KeywordPseudo, nil}, + {`\s+`, TextWhitespace, nil}, + {`--.*\n?`, CommentSingle, nil}, + {`/\*`, CommentMultiline, Push("multiline-comments")}, + {`(bigint|bigserial|bit|bit\s+varying|bool|boolean|box|bytea|char|character|character\s+varying|cidr|circle|date|decimal|double\s+precision|float4|float8|inet|int|int2|int4|int8|integer|interval|json|jsonb|line|lseg|macaddr|money|numeric|path|pg_lsn|point|polygon|real|serial|serial2|serial4|serial8|smallint|smallserial|text|time|timestamp|timestamptz|timetz|tsquery|tsvector|txid_snapshot|uuid|varbit|varchar|with\s+time\s+zone|without\s+time\s+zone|xml|anyarray|anyelement|anyenum|anynonarray|anyrange|cstring|fdw_handler|internal|language_handler|opaque|record|void)\b`, NameBuiltin, nil}, + {Words(``, `\b`, `ABORT`, `ABSOLUTE`, `ACCESS`, `ACTION`, `ADD`, `ADMIN`, `AFTER`, `AGGREGATE`, `ALL`, `ALSO`, `ALTER`, `ALWAYS`, `ANALYSE`, `ANALYZE`, `AND`, `ANY`, `ARRAY`, `AS`, `ASC`, `ASSERTION`, `ASSIGNMENT`, `ASYMMETRIC`, `AT`, `ATTRIBUTE`, `AUTHORIZATION`, `BACKWARD`, `BEFORE`, `BEGIN`, `BETWEEN`, `BIGINT`, `BINARY`, `BIT`, `BOOLEAN`, `BOTH`, `BY`, `CACHE`, `CALLED`, `CASCADE`, `CASCADED`, `CASE`, `CAST`, `CATALOG`, `CHAIN`, `CHAR`, `CHARACTER`, `CHARACTERISTICS`, `CHECK`, `CHECKPOINT`, `CLASS`, `CLOSE`, `CLUSTER`, `COALESCE`, `COLLATE`, `COLLATION`, `COLUMN`, `COMMENT`, `COMMENTS`, `COMMIT`, `COMMITTED`, `CONCURRENTLY`, `CONFIGURATION`, `CONNECTION`, `CONSTRAINT`, `CONSTRAINTS`, `CONTENT`, `CONTINUE`, `CONVERSION`, `COPY`, `COST`, `CREATE`, `CROSS`, `CSV`, `CURRENT`, `CURRENT_CATALOG`, `CURRENT_DATE`, `CURRENT_ROLE`, `CURRENT_SCHEMA`, `CURRENT_TIME`, `CURRENT_TIMESTAMP`, `CURRENT_USER`, `CURSOR`, `CYCLE`, `DATA`, `DATABASE`, `DAY`, `DEALLOCATE`, `DEC`, `DECIMAL`, `DECLARE`, `DEFAULT`, `DEFAULTS`, `DEFERRABLE`, `DEFERRED`, `DEFINER`, `DELETE`, `DELIMITER`, `DELIMITERS`, `DESC`, `DICTIONARY`, `DISABLE`, `DISCARD`, `DISTINCT`, `DO`, `DOCUMENT`, `DOMAIN`, `DOUBLE`, `DROP`, `EACH`, `ELSE`, `ENABLE`, `ENCODING`, `ENCRYPTED`, `END`, `ENUM`, `ESCAPE`, `EVENT`, `EXCEPT`, `EXCLUDE`, `EXCLUDING`, `EXCLUSIVE`, `EXECUTE`, `EXISTS`, `EXPLAIN`, `EXTENSION`, `EXTERNAL`, `EXTRACT`, `FALSE`, `FAMILY`, `FETCH`, `FILTER`, `FIRST`, `FLOAT`, `FOLLOWING`, `FOR`, `FORCE`, `FOREIGN`, `FORWARD`, `FREEZE`, `FROM`, `FULL`, `FUNCTION`, `FUNCTIONS`, `GLOBAL`, `GRANT`, `GRANTED`, `GREATEST`, `GROUP`, `HANDLER`, `HAVING`, `HEADER`, `HOLD`, `HOUR`, `IDENTITY`, `IF`, `ILIKE`, `IMMEDIATE`, `IMMUTABLE`, `IMPLICIT`, `IN`, `INCLUDING`, `INCREMENT`, `INDEX`, `INDEXES`, `INHERIT`, `INHERITS`, `INITIALLY`, `INLINE`, `INNER`, `INOUT`, `INPUT`, `INSENSITIVE`, `INSERT`, `INSTEAD`, `INT`, `INTEGER`, `INTERSECT`, `INTERVAL`, `INTO`, `INVOKER`, `IS`, `ISNULL`, `ISOLATION`, `JOIN`, `KEY`, `LABEL`, `LANGUAGE`, `LARGE`, `LAST`, `LATERAL`, `LC_COLLATE`, `LC_CTYPE`, `LEADING`, `LEAKPROOF`, `LEAST`, `LEFT`, `LEVEL`, `LIKE`, `LIMIT`, `LISTEN`, `LOAD`, `LOCAL`, `LOCALTIME`, `LOCALTIMESTAMP`, `LOCATION`, `LOCK`, `MAPPING`, `MATCH`, `MATERIALIZED`, `MAXVALUE`, `MINUTE`, `MINVALUE`, `MODE`, `MONTH`, `MOVE`, `NAME`, `NAMES`, `NATIONAL`, `NATURAL`, `NCHAR`, `NEXT`, `NO`, `NONE`, `NOT`, `NOTHING`, `NOTIFY`, `NOTNULL`, `NOWAIT`, `NULL`, `NULLIF`, `NULLS`, `NUMERIC`, `OBJECT`, `OF`, `OFF`, `OFFSET`, `OIDS`, `ON`, `ONLY`, `OPERATOR`, `OPTION`, `OPTIONS`, `OR`, `ORDER`, `ORDINALITY`, `OUT`, `OUTER`, `OVER`, `OVERLAPS`, `OVERLAY`, `OWNED`, `OWNER`, `PARSER`, `PARTIAL`, `PARTITION`, `PASSING`, `PASSWORD`, `PLACING`, `PLANS`, `POLICY`, `POSITION`, `PRECEDING`, `PRECISION`, `PREPARE`, `PREPARED`, `PRESERVE`, `PRIMARY`, `PRIOR`, `PRIVILEGES`, `PROCEDURAL`, `PROCEDURE`, `PROGRAM`, `QUOTE`, `RANGE`, `READ`, `REAL`, `REASSIGN`, `RECHECK`, `RECURSIVE`, `REF`, `REFERENCES`, `REFRESH`, `REINDEX`, `RELATIVE`, `RELEASE`, `RENAME`, `REPEATABLE`, `REPLACE`, `REPLICA`, `RESET`, `RESTART`, `RESTRICT`, `RETURNING`, `RETURNS`, `REVOKE`, `RIGHT`, `ROLE`, `ROLLBACK`, `ROW`, `ROWS`, `RULE`, `SAVEPOINT`, `SCHEMA`, `SCROLL`, `SEARCH`, `SECOND`, `SECURITY`, `SELECT`, `SEQUENCE`, `SEQUENCES`, `SERIALIZABLE`, `SERVER`, `SESSION`, `SESSION_USER`, `SET`, `SETOF`, `SHARE`, `SHOW`, `SIMILAR`, `SIMPLE`, `SMALLINT`, `SNAPSHOT`, `SOME`, `STABLE`, `STANDALONE`, `START`, `STATEMENT`, `STATISTICS`, `STDIN`, `STDOUT`, `STORAGE`, `STRICT`, `STRIP`, `SUBSTRING`, `SYMMETRIC`, `SYSID`, `SYSTEM`, `TABLE`, `TABLES`, `TABLESPACE`, `TEMP`, `TEMPLATE`, `TEMPORARY`, `TEXT`, `THEN`, `TIME`, `TIMESTAMP`, `TO`, `TRAILING`, `TRANSACTION`, `TREAT`, `TRIGGER`, `TRIM`, `TRUE`, `TRUNCATE`, `TRUSTED`, `TYPE`, `TYPES`, `UNBOUNDED`, `UNCOMMITTED`, `UNENCRYPTED`, `UNION`, `UNIQUE`, `UNKNOWN`, `UNLISTEN`, `UNLOGGED`, `UNTIL`, `UPDATE`, `USER`, `USING`, `VACUUM`, `VALID`, `VALIDATE`, `VALIDATOR`, `VALUE`, `VALUES`, `VARCHAR`, `VARIADIC`, `VARYING`, `VERBOSE`, `VERSION`, `VIEW`, `VIEWS`, `VOLATILE`, `WHEN`, `WHERE`, `WHITESPACE`, `WINDOW`, `WITH`, `WITHIN`, `WITHOUT`, `WORK`, `WRAPPER`, `WRITE`, `XML`, `XMLATTRIBUTES`, `XMLCONCAT`, `XMLELEMENT`, `XMLEXISTS`, `XMLFOREST`, `XMLPARSE`, `XMLPI`, `XMLROOT`, `XMLSERIALIZE`, `YEAR`, `YES`, `ZONE`, `ALIAS`, `CONSTANT`, `DIAGNOSTICS`, `ELSIF`, `EXCEPTION`, `EXIT`, `FOREACH`, `GET`, `LOOP`, `NOTICE`, `OPEN`, `PERFORM`, `QUERY`, `RAISE`, `RETURN`, `REVERSE`, `SQLSTATE`, `WHILE`), Keyword, nil}, + {"[+*/<>=~!@#%^&|`?-]+", Operator, nil}, + {`::`, Operator, nil}, + {`\$\d+`, NameVariable, nil}, + {`([0-9]*\.[0-9]*|[0-9]+)(e[+-]?[0-9]+)?`, LiteralNumberFloat, nil}, + {`[0-9]+`, LiteralNumberInteger, nil}, + {`((?:E|U&)?)(')`, ByGroups(LiteralStringAffix, LiteralStringSingle), Push("string")}, + {`((?:U&)?)(")`, ByGroups(LiteralStringAffix, LiteralStringName), Push("quoted-ident")}, + // { `(?s)(\$)([^$]*)(\$)(.*?)(\$)(\2)(\$)`, ?? ??, nil }, + {`[a-z_]\w*`, Name, nil}, + {`:(['"]?)[a-z]\w*\b\1`, NameVariable, nil}, + {`[;:()\[\]{},.]`, Punctuation, nil}, + }, + "multiline-comments": { + {`/\*`, CommentMultiline, Push("multiline-comments")}, + {`\*/`, CommentMultiline, Pop(1)}, + {`[^/*]+`, CommentMultiline, nil}, + {`[/*]`, CommentMultiline, nil}, + }, + "string": { + {`[^']+`, LiteralStringSingle, nil}, + {`''`, LiteralStringSingle, nil}, + {`'`, LiteralStringSingle, Pop(1)}, + }, + "quoted-ident": { + {`[^"]+`, LiteralStringName, nil}, + {`""`, LiteralStringName, nil}, + {`"`, LiteralStringName, Pop(1)}, + }, + } +} diff --git a/vendor/github.com/alecthomas/chroma/lexers/p/plutus_core.go b/vendor/github.com/alecthomas/chroma/lexers/p/plutus_core.go new file mode 100644 index 000000000..b2ad4e108 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/p/plutus_core.go @@ -0,0 +1,76 @@ +package p + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// nolint + +// Lexer for the Plutus Core Languages (version 2.1) +// +// including both Typed- and Untyped- versions +// based on “Formal Specification of the Plutus Core Language (version 2.1)”, published 6th April 2021: +// https://hydra.iohk.io/build/8205579/download/1/plutus-core-specification.pdf + +var PlutusCoreLang = internal.Register(MustNewLazyLexer( + &Config{ + Name: "Plutus Core", + Aliases: []string{"plutus-core", "plc"}, + Filenames: []string{"*.plc"}, + MimeTypes: []string{"text/x-plutus-core", "application/x-plutus-core"}, + }, + plutusCoreRules, +)) + +func plutusCoreRules() Rules { + return Rules{ + "root": { + {`\s+`, Text, nil}, + {`(\(|\))`, Punctuation, nil}, + {`(\[|\])`, Punctuation, nil}, + {`({|})`, Punctuation, nil}, + + // Constants. Figure 1. + // For version, see handling of (program ...) below. + {`([+-]?\d+)`, LiteralNumberInteger, nil}, + {`(#([a-fA-F0-9][a-fA-F0-9])+)`, LiteralString, nil}, + {`(\(\))`, NameConstant, nil}, + {`(True|False)`, NameConstant, nil}, + + // Keywords. Figures 2 and 15. + // Special handling for program because it is followed by a version. + {`(con |abs |iwrap |unwrap |lam |builtin |delay |force |error)`, Keyword, nil}, + {`(fun |all |ifix |lam |con )`, Keyword, nil}, + {`(type|fun )`, Keyword, nil}, + {`(program )(\S+)`, ByGroups(Keyword, LiteralString), nil}, + + // Built-in Types. Figure 12. + {`(unit|bool|integer|bytestring|string)`, KeywordType, nil}, + + // Built-ins Functions. Figure 14 but, more importantly, implementation: + // https://github.com/input-output-hk/plutus/blob/6d759c4/plutus-core/plutus-core/src/PlutusCore/Default/Builtins.hs#L42-L111 + {`(addInteger |subtractInteger |multiplyInteger |divideInteger |quotientInteger |remainderInteger |modInteger |equalsInteger |lessThanInteger |lessThanEqualsInteger )`, NameBuiltin, nil}, + {`(appendByteString |consByteString |sliceByteString |lengthOfByteString |indexByteString |equalsByteString |lessThanByteString |lessThanEqualsByteString )`, NameBuiltin, nil}, + {`(sha2_256 |sha3_256 |blake2b_256 |verifySignature )`, NameBuiltin, nil}, + {`(appendString |equalsString |encodeUtf8 |decodeUtf8 )`, NameBuiltin, nil}, + {`(ifThenElse )`, NameBuiltin, nil}, + {`(chooseUnit )`, NameBuiltin, nil}, + {`(trace )`, NameBuiltin, nil}, + {`(fstPair |sndPair )`, NameBuiltin, nil}, + {`(chooseList |mkCons |headList |tailList |nullList )`, NameBuiltin, nil}, + {`(chooseData |constrData |mapData |listData |iData |bData |unConstrData |unMapData |unListData |unIData |unBData |equalsData )`, NameBuiltin, nil}, + {`(mkPairData |mkNilData |mkNilPairData )`, NameBuiltin, nil}, + + // Name. Figure 1. + {`([a-zA-Z][a-zA-Z0-9_']*)`, Name, nil}, + + // Unicode String. Not in the specification. + {`"`, LiteralStringDouble, Push("string")}, + }, + "string": { + {`[^\\"]+`, LiteralStringDouble, nil}, + {`"`, LiteralStringDouble, Pop(1)}, + }, + } +} diff --git a/vendor/github.com/alecthomas/chroma/lexers/p/pony.go b/vendor/github.com/alecthomas/chroma/lexers/p/pony.go new file mode 100644 index 000000000..41d568289 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/p/pony.go @@ -0,0 +1,63 @@ +package p + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// Pony lexer. +var Pony = internal.Register(MustNewLazyLexer( + &Config{ + Name: "Pony", + Aliases: []string{"pony"}, + Filenames: []string{"*.pony"}, + MimeTypes: []string{}, + }, + ponyRules, +)) + +func ponyRules() Rules { + return Rules{ + "root": { + {`\n`, Text, nil}, + {`[^\S\n]+`, Text, nil}, + {`//.*\n`, CommentSingle, nil}, + {`/\*`, CommentMultiline, Push("nested_comment")}, + {`"""(?:.|\n)*?"""`, LiteralStringDoc, nil}, + {`"`, LiteralString, Push("string")}, + {`\'.*\'`, LiteralStringChar, nil}, + {`=>|[]{}:().~;,|&!^?[]`, Punctuation, nil}, + {Words(``, `\b`, `addressof`, `and`, `as`, `consume`, `digestof`, `is`, `isnt`, `not`, `or`), OperatorWord, nil}, + {`!=|==|<<|>>|[-+/*%=<>]`, Operator, nil}, + {Words(``, `\b`, `box`, `break`, `compile_error`, `compile_intrinsic`, `continue`, `do`, `else`, `elseif`, `embed`, `end`, `error`, `for`, `if`, `ifdef`, `in`, `iso`, `lambda`, `let`, `match`, `object`, `recover`, `ref`, `repeat`, `return`, `tag`, `then`, `this`, `trn`, `try`, `until`, `use`, `var`, `val`, `where`, `while`, `with`, `#any`, `#read`, `#send`, `#share`), Keyword, nil}, + {`(actor|class|struct|primitive|interface|trait|type)((?:\s)+)`, ByGroups(Keyword, Text), Push("typename")}, + {`(new|fun|be)((?:\s)+)`, ByGroups(Keyword, Text), Push("methodname")}, + {Words(``, `\b`, `U8`, `U16`, `U32`, `U64`, `ULong`, `USize`, `U128`, `Unsigned`, `Stringable`, `String`, `StringBytes`, `StringRunes`, `InputNotify`, `InputStream`, `Stdin`, `ByteSeq`, `ByteSeqIter`, `OutStream`, `StdStream`, `SourceLoc`, `I8`, `I16`, `I32`, `I64`, `ILong`, `ISize`, `I128`, `Signed`, `Seq`, `RuntimeOptions`, `Real`, `Integer`, `SignedInteger`, `UnsignedInteger`, `FloatingPoint`, `Number`, `Int`, `ReadSeq`, `ReadElement`, `Pointer`, `Platform`, `NullablePointer`, `None`, `Iterator`, `F32`, `F64`, `Float`, `Env`, `DoNotOptimise`, `DisposableActor`, `Less`, `Equal`, `Greater`, `Compare`, `HasEq`, `Equatable`, `Comparable`, `Bool`, `AsioEventID`, `AsioEventNotify`, `AsioEvent`, `Array`, `ArrayKeys`, `ArrayValues`, `ArrayPairs`, `Any`, `AmbientAuth`), KeywordType, nil}, + {`_?[A-Z]\w*`, NameClass, nil}, + {`string\(\)`, NameOther, nil}, + {`(\d+\.\d*|\.\d+|\d+)[eE][+-]?\d+`, LiteralNumberFloat, nil}, + {`0x[0-9a-fA-F]+`, LiteralNumberHex, nil}, + {`\d+`, LiteralNumberInteger, nil}, + {`(true|false)\b`, Keyword, nil}, + {`_\d*`, Name, nil}, + {`_?[a-z][\w\'_]*`, Name, nil}, + }, + "typename": { + {`(iso|trn|ref|val|box|tag)?((?:\s)*)(_?[A-Z]\w*)`, ByGroups(Keyword, Text, NameClass), Pop(1)}, + }, + "methodname": { + {`(iso|trn|ref|val|box|tag)?((?:\s)*)(_?[a-z]\w*)`, ByGroups(Keyword, Text, NameFunction), Pop(1)}, + }, + "nested_comment": { + {`[^*/]+`, CommentMultiline, nil}, + {`/\*`, CommentMultiline, Push()}, + {`\*/`, CommentMultiline, Pop(1)}, + {`[*/]`, CommentMultiline, nil}, + }, + "string": { + {`"`, LiteralString, Pop(1)}, + {`\\"`, LiteralString, nil}, + {`[^\\"]+`, LiteralString, nil}, + }, + } +} diff --git a/vendor/github.com/alecthomas/chroma/lexers/p/postgres.go b/vendor/github.com/alecthomas/chroma/lexers/p/postgres.go new file mode 100644 index 000000000..8977e2cfd --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/p/postgres.go @@ -0,0 +1,83 @@ +package p + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// Postgresql Sql Dialect lexer. +var PostgreSQL = internal.Register(MustNewLazyLexer( + &Config{ + Name: "PostgreSQL SQL dialect", + Aliases: []string{"postgresql", "postgres"}, + Filenames: []string{}, + MimeTypes: []string{"text/x-postgresql"}, + NotMultiline: true, + CaseInsensitive: true, + }, + postgreSQLRules, +)) + +func postgreSQLRules() Rules { + return Rules{ + "root": { + {`\s+`, Text, nil}, + {`--.*\n?`, CommentSingle, nil}, + {`/\*`, CommentMultiline, Push("multiline-comments")}, + {`(bigint|bigserial|bit|bit\s+varying|bool|boolean|box|bytea|char|character|character\s+varying|cidr|circle|date|decimal|double\s+precision|float4|float8|inet|int|int2|int4|int8|integer|interval|json|jsonb|line|lseg|macaddr|money|numeric|path|pg_lsn|point|polygon|real|serial|serial2|serial4|serial8|smallint|smallserial|text|time|timestamp|timestamptz|timetz|tsquery|tsvector|txid_snapshot|uuid|varbit|varchar|with\s+time\s+zone|without\s+time\s+zone|xml|anyarray|anyelement|anyenum|anynonarray|anyrange|cstring|fdw_handler|internal|language_handler|opaque|record|void)\b`, NameBuiltin, nil}, + { + `(?s)(DO)(\s+)(?:(LANGUAGE)?(\s+)('?)(\w+)?('?)(\s+))?(\$)([^$]*)(\$)(.*?)(\$)(\10)(\$)`, + UsingByGroup( + internal.Get, + 6, 12, + Keyword, Text, Keyword, Text, // DO LANGUAGE + StringSingle, StringSingle, StringSingle, Text, // 'plpgsql' + StringHeredoc, StringHeredoc, StringHeredoc, // $tag$ + StringHeredoc, // (code block) + StringHeredoc, StringHeredoc, StringHeredoc, // $tag$ + ), + nil, + }, + {Words(``, `\b`, `ABORT`, `ABSOLUTE`, `ACCESS`, `ACTION`, `ADD`, `ADMIN`, `AFTER`, `AGGREGATE`, `ALL`, `ALSO`, `ALTER`, `ALWAYS`, `ANALYSE`, `ANALYZE`, `AND`, `ANY`, `ARRAY`, `AS`, `ASC`, `ASSERTION`, `ASSIGNMENT`, `ASYMMETRIC`, `AT`, `ATTRIBUTE`, `AUTHORIZATION`, `BACKWARD`, `BEFORE`, `BEGIN`, `BETWEEN`, `BIGINT`, `BINARY`, `BIT`, `BOOLEAN`, `BOTH`, `BY`, `CACHE`, `CALLED`, `CASCADE`, `CASCADED`, `CASE`, `CAST`, `CATALOG`, `CHAIN`, `CHAR`, `CHARACTER`, `CHARACTERISTICS`, `CHECK`, `CHECKPOINT`, `CLASS`, `CLOSE`, `CLUSTER`, `COALESCE`, `COLLATE`, `COLLATION`, `COLUMN`, `COMMENT`, `COMMENTS`, `COMMIT`, `COMMITTED`, `CONCURRENTLY`, `CONFIGURATION`, `CONNECTION`, `CONSTRAINT`, `CONSTRAINTS`, `CONTENT`, `CONTINUE`, `CONVERSION`, `COPY`, `COST`, `CREATE`, `CROSS`, `CSV`, `CURRENT`, `CURRENT_CATALOG`, `CURRENT_DATE`, `CURRENT_ROLE`, `CURRENT_SCHEMA`, `CURRENT_TIME`, `CURRENT_TIMESTAMP`, `CURRENT_USER`, `CURSOR`, `CYCLE`, `DATA`, `DATABASE`, `DAY`, `DEALLOCATE`, `DEC`, `DECIMAL`, `DECLARE`, `DEFAULT`, `DEFAULTS`, `DEFERRABLE`, `DEFERRED`, `DEFINER`, `DELETE`, `DELIMITER`, `DELIMITERS`, `DESC`, `DICTIONARY`, `DISABLE`, `DISCARD`, `DISTINCT`, `DO`, `DOCUMENT`, `DOMAIN`, `DOUBLE`, `DROP`, `EACH`, `ELSE`, `ENABLE`, `ENCODING`, `ENCRYPTED`, `END`, `ENUM`, `ESCAPE`, `EVENT`, `EXCEPT`, `EXCLUDE`, `EXCLUDING`, `EXCLUSIVE`, `EXECUTE`, `EXISTS`, `EXPLAIN`, `EXTENSION`, `EXTERNAL`, `EXTRACT`, `FALSE`, `FAMILY`, `FETCH`, `FILTER`, `FIRST`, `FLOAT`, `FOLLOWING`, `FOR`, `FORCE`, `FOREIGN`, `FORWARD`, `FREEZE`, `FROM`, `FULL`, `FUNCTION`, `FUNCTIONS`, `GLOBAL`, `GRANT`, `GRANTED`, `GREATEST`, `GROUP`, `HANDLER`, `HAVING`, `HEADER`, `HOLD`, `HOUR`, `IDENTITY`, `IF`, `ILIKE`, `IMMEDIATE`, `IMMUTABLE`, `IMPLICIT`, `IN`, `INCLUDING`, `INCREMENT`, `INDEX`, `INDEXES`, `INHERIT`, `INHERITS`, `INITIALLY`, `INLINE`, `INNER`, `INOUT`, `INPUT`, `INSENSITIVE`, `INSERT`, `INSTEAD`, `INT`, `INTEGER`, `INTERSECT`, `INTERVAL`, `INTO`, `INVOKER`, `IS`, `ISNULL`, `ISOLATION`, `JOIN`, `KEY`, `LABEL`, `LANGUAGE`, `LARGE`, `LAST`, `LATERAL`, `LC_COLLATE`, `LC_CTYPE`, `LEADING`, `LEAKPROOF`, `LEAST`, `LEFT`, `LEVEL`, `LIKE`, `LIMIT`, `LISTEN`, `LOAD`, `LOCAL`, `LOCALTIME`, `LOCALTIMESTAMP`, `LOCATION`, `LOCK`, `MAPPING`, `MATCH`, `MATERIALIZED`, `MAXVALUE`, `MINUTE`, `MINVALUE`, `MODE`, `MONTH`, `MOVE`, `NAME`, `NAMES`, `NATIONAL`, `NATURAL`, `NCHAR`, `NEXT`, `NO`, `NONE`, `NOT`, `NOTHING`, `NOTIFY`, `NOTNULL`, `NOWAIT`, `NULL`, `NULLIF`, `NULLS`, `NUMERIC`, `OBJECT`, `OF`, `OFF`, `OFFSET`, `OIDS`, `ON`, `ONLY`, `OPERATOR`, `OPTION`, `OPTIONS`, `OR`, `ORDER`, `ORDINALITY`, `OUT`, `OUTER`, `OVER`, `OVERLAPS`, `OVERLAY`, `OWNED`, `OWNER`, `PARSER`, `PARTIAL`, `PARTITION`, `PASSING`, `PASSWORD`, `PLACING`, `PLANS`, `POLICY`, `POSITION`, `PRECEDING`, `PRECISION`, `PREPARE`, `PREPARED`, `PRESERVE`, `PRIMARY`, `PRIOR`, `PRIVILEGES`, `PROCEDURAL`, `PROCEDURE`, `PROGRAM`, `QUOTE`, `RANGE`, `READ`, `REAL`, `REASSIGN`, `RECHECK`, `RECURSIVE`, `REF`, `REFERENCES`, `REFRESH`, `REINDEX`, `RELATIVE`, `RELEASE`, `RENAME`, `REPEATABLE`, `REPLACE`, `REPLICA`, `RESET`, `RESTART`, `RESTRICT`, `RETURNING`, `RETURNS`, `REVOKE`, `RIGHT`, `ROLE`, `ROLLBACK`, `ROW`, `ROWS`, `RULE`, `SAVEPOINT`, `SCHEMA`, `SCROLL`, `SEARCH`, `SECOND`, `SECURITY`, `SELECT`, `SEQUENCE`, `SEQUENCES`, `SERIALIZABLE`, `SERVER`, `SESSION`, `SESSION_USER`, `SET`, `SETOF`, `SHARE`, `SHOW`, `SIMILAR`, `SIMPLE`, `SMALLINT`, `SNAPSHOT`, `SOME`, `STABLE`, `STANDALONE`, `START`, `STATEMENT`, `STATISTICS`, `STDIN`, `STDOUT`, `STORAGE`, `STRICT`, `STRIP`, `SUBSTRING`, `SYMMETRIC`, `SYSID`, `SYSTEM`, `TABLE`, `TABLES`, `TABLESPACE`, `TEMP`, `TEMPLATE`, `TEMPORARY`, `TEXT`, `THEN`, `TIME`, `TIMESTAMP`, `TO`, `TRAILING`, `TRANSACTION`, `TREAT`, `TRIGGER`, `TRIM`, `TRUE`, `TRUNCATE`, `TRUSTED`, `TYPE`, `TYPES`, `UNBOUNDED`, `UNCOMMITTED`, `UNENCRYPTED`, `UNION`, `UNIQUE`, `UNKNOWN`, `UNLISTEN`, `UNLOGGED`, `UNTIL`, `UPDATE`, `USER`, `USING`, `VACUUM`, `VALID`, `VALIDATE`, `VALIDATOR`, `VALUE`, `VALUES`, `VARCHAR`, `VARIADIC`, `VARYING`, `VERBOSE`, `VERSION`, `VIEW`, `VIEWS`, `VOLATILE`, `WHEN`, `WHERE`, `WHITESPACE`, `WINDOW`, `WITH`, `WITHIN`, `WITHOUT`, `WORK`, `WRAPPER`, `WRITE`, `XML`, `XMLATTRIBUTES`, `XMLCONCAT`, `XMLELEMENT`, `XMLEXISTS`, `XMLFOREST`, `XMLPARSE`, `XMLPI`, `XMLROOT`, `XMLSERIALIZE`, `YEAR`, `YES`, `ZONE`), Keyword, nil}, + {"[+*/<>=~!@#%^&|`?-]+", Operator, nil}, + {`::`, Operator, nil}, + {`\$\d+`, NameVariable, nil}, + {`([0-9]*\.[0-9]*|[0-9]+)(e[+-]?[0-9]+)?`, LiteralNumberFloat, nil}, + {`[0-9]+`, LiteralNumberInteger, nil}, + {`((?:E|U&)?)(')`, ByGroups(LiteralStringAffix, LiteralStringSingle), Push("string")}, + {`((?:U&)?)(")`, ByGroups(LiteralStringAffix, LiteralStringName), Push("quoted-ident")}, + { + `(?s)(\$)([^$]*)(\$)(.*?)(\$)(\2)(\$)(\s+)(LANGUAGE)?(\s+)('?)(\w+)?('?)`, + UsingByGroup(internal.Get, + 12, 4, + StringHeredoc, StringHeredoc, StringHeredoc, // $tag$ + StringHeredoc, // (code block) + StringHeredoc, StringHeredoc, StringHeredoc, // $tag$ + Text, Keyword, Text, // LANGUAGE + StringSingle, StringSingle, StringSingle, // 'type' + ), + nil, + }, + {`(?s)(\$)([^$]*)(\$)(.*?)(\$)(\2)(\$)`, LiteralStringHeredoc, nil}, + {`[a-z_]\w*`, Name, nil}, + {`:(['"]?)[a-z]\w*\b\1`, NameVariable, nil}, + {`[;:()\[\]{},.]`, Punctuation, nil}, + }, + "multiline-comments": { + {`/\*`, CommentMultiline, Push("multiline-comments")}, + {`\*/`, CommentMultiline, Pop(1)}, + {`[^/*]+`, CommentMultiline, nil}, + {`[/*]`, CommentMultiline, nil}, + }, + "string": { + {`[^']+`, LiteralStringSingle, nil}, + {`''`, LiteralStringSingle, nil}, + {`'`, LiteralStringSingle, Pop(1)}, + }, + "quoted-ident": { + {`[^"]+`, LiteralStringName, nil}, + {`""`, LiteralStringName, nil}, + {`"`, LiteralStringName, Pop(1)}, + }, + } +} diff --git a/vendor/github.com/alecthomas/chroma/lexers/p/postscript.go b/vendor/github.com/alecthomas/chroma/lexers/p/postscript.go new file mode 100644 index 000000000..0b51ba5ef --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/p/postscript.go @@ -0,0 +1,50 @@ +package p + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// Postscript lexer. +var Postscript = internal.Register(MustNewLazyLexer( + &Config{ + Name: "PostScript", + Aliases: []string{"postscript", "postscr"}, + Filenames: []string{"*.ps", "*.eps"}, + MimeTypes: []string{"application/postscript"}, + }, + postscriptRules, +)) + +func postscriptRules() Rules { + return Rules{ + "root": { + {`^%!.+\n`, CommentPreproc, nil}, + {`%%.*\n`, CommentSpecial, nil}, + {`(^%.*\n){2,}`, CommentMultiline, nil}, + {`%.*\n`, CommentSingle, nil}, + {`\(`, LiteralString, Push("stringliteral")}, + {`[{}<>\[\]]`, Punctuation, nil}, + {`<[0-9A-Fa-f]+>(?=[()<>\[\]{}/%\s])`, LiteralNumberHex, nil}, + {`[0-9]+\#(\-|\+)?([0-9]+\.?|[0-9]*\.[0-9]+|[0-9]+\.[0-9]*)((e|E)[0-9]+)?(?=[()<>\[\]{}/%\s])`, LiteralNumberOct, nil}, + {`(\-|\+)?([0-9]+\.?|[0-9]*\.[0-9]+|[0-9]+\.[0-9]*)((e|E)[0-9]+)?(?=[()<>\[\]{}/%\s])`, LiteralNumberFloat, nil}, + {`(\-|\+)?[0-9]+(?=[()<>\[\]{}/%\s])`, LiteralNumberInteger, nil}, + {`\/[^()<>\[\]{}/%\s]+(?=[()<>\[\]{}/%\s])`, NameVariable, nil}, + {`[^()<>\[\]{}/%\s]+(?=[()<>\[\]{}/%\s])`, NameFunction, nil}, + {`(false|true)(?=[()<>\[\]{}/%\s])`, KeywordConstant, nil}, + {`(eq|ne|g[et]|l[et]|and|or|not|if(?:else)?|for(?:all)?)(?=[()<>\[\]{}/%\s])`, KeywordReserved, nil}, + {Words(``, `(?=[()<>\[\]{}/%\s])`, `abs`, `add`, `aload`, `arc`, `arcn`, `array`, `atan`, `begin`, `bind`, `ceiling`, `charpath`, `clip`, `closepath`, `concat`, `concatmatrix`, `copy`, `cos`, `currentlinewidth`, `currentmatrix`, `currentpoint`, `curveto`, `cvi`, `cvs`, `def`, `defaultmatrix`, `dict`, `dictstackoverflow`, `div`, `dtransform`, `dup`, `end`, `exch`, `exec`, `exit`, `exp`, `fill`, `findfont`, `floor`, `get`, `getinterval`, `grestore`, `gsave`, `gt`, `identmatrix`, `idiv`, `idtransform`, `index`, `invertmatrix`, `itransform`, `length`, `lineto`, `ln`, `load`, `log`, `loop`, `matrix`, `mod`, `moveto`, `mul`, `neg`, `newpath`, `pathforall`, `pathbbox`, `pop`, `print`, `pstack`, `put`, `quit`, `rand`, `rangecheck`, `rcurveto`, `repeat`, `restore`, `rlineto`, `rmoveto`, `roll`, `rotate`, `round`, `run`, `save`, `scale`, `scalefont`, `setdash`, `setfont`, `setgray`, `setlinecap`, `setlinejoin`, `setlinewidth`, `setmatrix`, `setrgbcolor`, `shfill`, `show`, `showpage`, `sin`, `sqrt`, `stack`, `stringwidth`, `stroke`, `strokepath`, `sub`, `syntaxerror`, `transform`, `translate`, `truncate`, `typecheck`, `undefined`, `undefinedfilename`, `undefinedresult`), NameBuiltin, nil}, + {`\s+`, Text, nil}, + }, + "stringliteral": { + {`[^()\\]+`, LiteralString, nil}, + {`\\`, LiteralStringEscape, Push("escape")}, + {`\(`, LiteralString, Push()}, + {`\)`, LiteralString, Pop(1)}, + }, + "escape": { + {`[0-8]{3}|n|r|t|b|f|\\|\(|\)`, LiteralStringEscape, Pop(1)}, + Default(Pop(1)), + }, + } +} diff --git a/vendor/github.com/alecthomas/chroma/lexers/p/povray.go b/vendor/github.com/alecthomas/chroma/lexers/p/povray.go new file mode 100644 index 000000000..2d870f106 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/p/povray.go @@ -0,0 +1,39 @@ +package p + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// Povray lexer. +var Povray = internal.Register(MustNewLazyLexer( + &Config{ + Name: "POVRay", + Aliases: []string{"pov"}, + Filenames: []string{"*.pov", "*.inc"}, + MimeTypes: []string{"text/x-povray"}, + }, + povrayRules, +)) + +func povrayRules() Rules { + return Rules{ + "root": { + {`/\*[\w\W]*?\*/`, CommentMultiline, nil}, + {`//.*\n`, CommentSingle, nil}, + {`(?s)"(?:\\.|[^"\\])+"`, LiteralStringDouble, nil}, + {Words(`#`, `\b`, `break`, `case`, `debug`, `declare`, `default`, `define`, `else`, `elseif`, `end`, `error`, `fclose`, `fopen`, `for`, `if`, `ifdef`, `ifndef`, `include`, `local`, `macro`, `range`, `read`, `render`, `statistics`, `switch`, `undef`, `version`, `warning`, `while`, `write`), CommentPreproc, nil}, + {Words(`\b`, `\b`, `aa_level`, `aa_threshold`, `abs`, `acos`, `acosh`, `adaptive`, `adc_bailout`, `agate`, `agate_turb`, `all`, `alpha`, `ambient`, `ambient_light`, `angle`, `aperture`, `arc_angle`, `area_light`, `asc`, `asin`, `asinh`, `assumed_gamma`, `atan`, `atan2`, `atanh`, `atmosphere`, `atmospheric_attenuation`, `attenuating`, `average`, `background`, `black_hole`, `blue`, `blur_samples`, `bounded_by`, `box_mapping`, `bozo`, `break`, `brick`, `brick_size`, `brightness`, `brilliance`, `bumps`, `bumpy1`, `bumpy2`, `bumpy3`, `bump_map`, `bump_size`, `case`, `caustics`, `ceil`, `checker`, `chr`, `clipped_by`, `clock`, `color`, `color_map`, `colour`, `colour_map`, `component`, `composite`, `concat`, `confidence`, `conic_sweep`, `constant`, `control0`, `control1`, `cos`, `cosh`, `count`, `crackle`, `crand`, `cube`, `cubic_spline`, `cylindrical_mapping`, `debug`, `declare`, `default`, `degrees`, `dents`, `diffuse`, `direction`, `distance`, `distance_maximum`, `div`, `dust`, `dust_type`, `eccentricity`, `else`, `emitting`, `end`, `error`, `error_bound`, `exp`, `exponent`, `fade_distance`, `fade_power`, `falloff`, `falloff_angle`, `false`, `file_exists`, `filter`, `finish`, `fisheye`, `flatness`, `flip`, `floor`, `focal_point`, `fog`, `fog_alt`, `fog_offset`, `fog_type`, `frequency`, `gif`, `global_settings`, `glowing`, `gradient`, `granite`, `gray_threshold`, `green`, `halo`, `hexagon`, `hf_gray_16`, `hierarchy`, `hollow`, `hypercomplex`, `if`, `ifdef`, `iff`, `image_map`, `incidence`, `include`, `int`, `interpolate`, `inverse`, `ior`, `irid`, `irid_wavelength`, `jitter`, `lambda`, `leopard`, `linear`, `linear_spline`, `linear_sweep`, `location`, `log`, `looks_like`, `look_at`, `low_error_factor`, `mandel`, `map_type`, `marble`, `material_map`, `matrix`, `max`, `max_intersections`, `max_iteration`, `max_trace_level`, `max_value`, `metallic`, `min`, `minimum_reuse`, `mod`, `mortar`, `nearest_count`, `no`, `normal`, `normal_map`, `no_shadow`, `number_of_waves`, `octaves`, `off`, `offset`, `omega`, `omnimax`, `on`, `once`, `onion`, `open`, `orthographic`, `panoramic`, `pattern1`, `pattern2`, `pattern3`, `perspective`, `pgm`, `phase`, `phong`, `phong_size`, `pi`, `pigment`, `pigment_map`, `planar_mapping`, `png`, `point_at`, `pot`, `pow`, `ppm`, `precision`, `pwr`, `quadratic_spline`, `quaternion`, `quick_color`, `quick_colour`, `quilted`, `radial`, `radians`, `radiosity`, `radius`, `rainbow`, `ramp_wave`, `rand`, `range`, `reciprocal`, `recursion_limit`, `red`, `reflection`, `refraction`, `render`, `repeat`, `rgb`, `rgbf`, `rgbft`, `rgbt`, `right`, `ripples`, `rotate`, `roughness`, `samples`, `scale`, `scallop_wave`, `scattering`, `seed`, `shadowless`, `sin`, `sine_wave`, `sinh`, `sky`, `sky_sphere`, `slice`, `slope_map`, `smooth`, `specular`, `spherical_mapping`, `spiral`, `spiral1`, `spiral2`, `spotlight`, `spotted`, `sqr`, `sqrt`, `statistics`, `str`, `strcmp`, `strength`, `strlen`, `strlwr`, `strupr`, `sturm`, `substr`, `switch`, `sys`, `t`, `tan`, `tanh`, `test_camera_1`, `test_camera_2`, `test_camera_3`, `test_camera_4`, `texture`, `texture_map`, `tga`, `thickness`, `threshold`, `tightness`, `tile2`, `tiles`, `track`, `transform`, `translate`, `transmit`, `triangle_wave`, `true`, `ttf`, `turbulence`, `turb_depth`, `type`, `ultra_wide_angle`, `up`, `use_color`, `use_colour`, `use_index`, `u_steps`, `val`, `variance`, `vaxis_rotate`, `vcross`, `vdot`, `version`, `vlength`, `vnormalize`, `volume_object`, `volume_rendered`, `vol_with_light`, `vrotate`, `v_steps`, `warning`, `warp`, `water_level`, `waves`, `while`, `width`, `wood`, `wrinkles`, `yes`), Keyword, nil}, + {Words(``, `\b`, `bicubic_patch`, `blob`, `box`, `camera`, `cone`, `cubic`, `cylinder`, `difference`, `disc`, `height_field`, `intersection`, `julia_fractal`, `lathe`, `light_source`, `merge`, `mesh`, `object`, `plane`, `poly`, `polygon`, `prism`, `quadric`, `quartic`, `smooth_triangle`, `sor`, `sphere`, `superellipsoid`, `text`, `torus`, `triangle`, `union`), NameBuiltin, nil}, + {`[\[\](){}<>;,]`, Punctuation, nil}, + {`[-+*/=]`, Operator, nil}, + {`\b(x|y|z|u|v)\b`, NameBuiltinPseudo, nil}, + {`[a-zA-Z_]\w*`, Name, nil}, + {`[0-9]+\.[0-9]*`, LiteralNumberFloat, nil}, + {`\.[0-9]+`, LiteralNumberFloat, nil}, + {`[0-9]+`, LiteralNumberInteger, nil}, + {`"(\\\\|\\"|[^"])*"`, LiteralString, nil}, + {`\s+`, Text, nil}, + }, + } +} diff --git a/vendor/github.com/alecthomas/chroma/lexers/p/powerquery.go b/vendor/github.com/alecthomas/chroma/lexers/p/powerquery.go new file mode 100644 index 000000000..0302420ea --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/p/powerquery.go @@ -0,0 +1,38 @@ +package p + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// PowerQuery lexer. +var PowerQuery = internal.Register(MustNewLazyLexer( + &Config{ + Name: "PowerQuery", + Aliases: []string{"powerquery", "pq"}, + Filenames: []string{"*.pq"}, + MimeTypes: []string{"text/x-powerquery"}, + DotAll: true, + CaseInsensitive: true, + }, + powerqueryRules, +)) + +func powerqueryRules() Rules { + return Rules{ + "root": { + {`\s+`, Text, nil}, + {`//.*?\n`, CommentSingle, nil}, + {`/\*.*?\*/`, CommentMultiline, nil}, + {`"(\\\\|\\"|[^"])*"`, LiteralString, nil}, + {`(and|as|each|else|error|false|if|in|is|let|meta|not|null|or|otherwise|section|shared|then|true|try|type)\b`, Keyword, nil}, + {`(#binary|#date|#datetime|#datetimezone|#duration|#infinity|#nan|#sections|#shared|#table|#time)\b`, KeywordType, nil}, + {`(([a-zA-Z]|_)[\w|._]*|#"[^"]+")`, Name, nil}, + {`0[xX][0-9a-fA-F][0-9a-fA-F_]*[lL]?`, LiteralNumberHex, nil}, + {`([0-9]+\.[0-9]+|\.[0-9]+)([eE][0-9]+)?`, LiteralNumberFloat, nil}, + {`[0-9]+`, LiteralNumberInteger, nil}, + {`[\(\)\[\]\{\}]`, Punctuation, nil}, + {`\.\.|\.\.\.|=>|<=|>=|<>|[@!?,;=<>\+\-\*\/&]`, Operator, nil}, + }, + } +} diff --git a/vendor/github.com/alecthomas/chroma/lexers/p/powershell.go b/vendor/github.com/alecthomas/chroma/lexers/p/powershell.go new file mode 100644 index 000000000..9b7b56ee0 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/p/powershell.go @@ -0,0 +1,70 @@ +package p + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// Powershell lexer. +var Powershell = internal.Register(MustNewLazyLexer( + &Config{ + Name: "PowerShell", + Aliases: []string{"powershell", "posh", "ps1", "psm1", "psd1"}, + Filenames: []string{"*.ps1", "*.psm1", "*.psd1"}, + MimeTypes: []string{"text/x-powershell"}, + DotAll: true, + CaseInsensitive: true, + }, + powershellRules, +)) + +func powershellRules() Rules { + return Rules{ + "root": { + {`\(`, Punctuation, Push("child")}, + {`\s+`, Text, nil}, + {`^(\s*#[#\s]*)(\.(?:component|description|example|externalhelp|forwardhelpcategory|forwardhelptargetname|functionality|inputs|link|notes|outputs|parameter|remotehelprunspace|role|synopsis))([^\n]*$)`, ByGroups(Comment, LiteralStringDoc, Comment), nil}, + {`#[^\n]*?$`, Comment, nil}, + {`(<|<)#`, CommentMultiline, Push("multline")}, + {`(?i)([A-Z]:)`, Name, nil}, + {`@"\n`, LiteralStringHeredoc, Push("heredoc-double")}, + {`@'\n.*?\n'@`, LiteralStringHeredoc, nil}, + {"`[\\'\"$@-]", Punctuation, nil}, + {`"`, LiteralStringDouble, Push("string")}, + {`'([^']|'')*'`, LiteralStringSingle, nil}, + {`(\$|@@|@)((global|script|private|env):)?\w+`, NameVariable, nil}, + {`[a-z]\w*-[a-z]\w*\b`, NameBuiltin, nil}, + {`(while|validateset|validaterange|validatepattern|validatelength|validatecount|until|trap|switch|return|ref|process|param|parameter|in|if|global:|function|foreach|for|finally|filter|end|elseif|else|dynamicparam|do|default|continue|cmdletbinding|break|begin|alias|\?|%|#script|#private|#local|#global|mandatory|parametersetname|position|valuefrompipeline|valuefrompipelinebypropertyname|valuefromremainingarguments|helpmessage|try|catch|throw)\b`, Keyword, nil}, + {`-(and|as|band|bnot|bor|bxor|casesensitive|ccontains|ceq|cge|cgt|cle|clike|clt|cmatch|cne|cnotcontains|cnotlike|cnotmatch|contains|creplace|eq|exact|f|file|ge|gt|icontains|ieq|ige|igt|ile|ilike|ilt|imatch|ine|inotcontains|inotlike|inotmatch|ireplace|is|isnot|le|like|lt|match|ne|not|notcontains|notlike|notmatch|or|regex|replace|wildcard)\b`, Operator, nil}, + {`(ac|asnp|cat|cd|cfs|chdir|clc|clear|clhy|cli|clp|cls|clv|cnsn|compare|copy|cp|cpi|cpp|curl|cvpa|dbp|del|diff|dir|dnsn|ebp|echo|epal|epcsv|epsn|erase|etsn|exsn|fc|fhx|fl|foreach|ft|fw|gal|gbp|gc|gci|gcm|gcs|gdr|ghy|gi|gjb|gl|gm|gmo|gp|gps|gpv|group|gsn|gsnp|gsv|gu|gv|gwmi|h|history|icm|iex|ihy|ii|ipal|ipcsv|ipmo|ipsn|irm|ise|iwmi|iwr|kill|lp|ls|man|md|measure|mi|mount|move|mp|mv|nal|ndr|ni|nmo|npssc|nsn|nv|ogv|oh|popd|ps|pushd|pwd|r|rbp|rcjb|rcsn|rd|rdr|ren|ri|rjb|rm|rmdir|rmo|rni|rnp|rp|rsn|rsnp|rujb|rv|rvpa|rwmi|sajb|sal|saps|sasv|sbp|sc|select|set|shcm|si|sl|sleep|sls|sort|sp|spjb|spps|spsv|start|sujb|sv|swmi|tee|trcm|type|wget|where|wjb|write)\s`, NameBuiltin, nil}, + {"\\[[a-z_\\[][\\w. `,\\[\\]]*\\]", NameConstant, nil}, + {`-[a-z_]\w*`, Name, nil}, + {`\w+`, Name, nil}, + {"[.,;@{}\\[\\]$()=+*/\\\\&%!~?^`|<>-]|::", Punctuation, nil}, + }, + "child": { + {`\)`, Punctuation, Pop(1)}, + Include("root"), + }, + "multline": { + {`[^#&.]+`, CommentMultiline, nil}, + {`#(>|>)`, CommentMultiline, Pop(1)}, + {`\.(component|description|example|externalhelp|forwardhelpcategory|forwardhelptargetname|functionality|inputs|link|notes|outputs|parameter|remotehelprunspace|role|synopsis)`, LiteralStringDoc, nil}, + {`[#&.]`, CommentMultiline, nil}, + }, + "string": { + {"`[0abfnrtv'\\\"$`]", LiteralStringEscape, nil}, + {"[^$`\"]+", LiteralStringDouble, nil}, + {`\$\(`, Punctuation, Push("child")}, + {`""`, LiteralStringDouble, nil}, + {"[`$]", LiteralStringDouble, nil}, + {`"`, LiteralStringDouble, Pop(1)}, + }, + "heredoc-double": { + {`\n"@`, LiteralStringHeredoc, Pop(1)}, + {`\$\(`, Punctuation, Push("child")}, + {`[^@\n]+"]`, LiteralStringHeredoc, nil}, + {`.`, LiteralStringHeredoc, nil}, + }, + } +} diff --git a/vendor/github.com/alecthomas/chroma/lexers/p/prolog.go b/vendor/github.com/alecthomas/chroma/lexers/p/prolog.go new file mode 100644 index 000000000..a2f346b62 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/p/prolog.go @@ -0,0 +1,54 @@ +package p + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// Prolog lexer. +var Prolog = internal.Register(MustNewLazyLexer( + &Config{ + Name: "Prolog", + Aliases: []string{"prolog"}, + Filenames: []string{"*.ecl", "*.prolog", "*.pro", "*.pl"}, + MimeTypes: []string{"text/x-prolog"}, + }, + prologRules, +)) + +func prologRules() Rules { + return Rules{ + "root": { + {`/\*`, CommentMultiline, Push("nested-comment")}, + {`%.*`, CommentSingle, nil}, + {`0\'.`, LiteralStringChar, nil}, + {`0b[01]+`, LiteralNumberBin, nil}, + {`0o[0-7]+`, LiteralNumberOct, nil}, + {`0x[0-9a-fA-F]+`, LiteralNumberHex, nil}, + {`\d\d?\'[a-zA-Z0-9]+`, LiteralNumberInteger, nil}, + {`(\d+\.\d*|\d*\.\d+)([eE][+-]?[0-9]+)?`, LiteralNumberFloat, nil}, + {`\d+`, LiteralNumberInteger, nil}, + {`[\[\](){}|.,;!]`, Punctuation, nil}, + {`:-|-->`, Punctuation, nil}, + {`"(?:\\x[0-9a-fA-F]+\\|\\u[0-9a-fA-F]{4}|\\U[0-9a-fA-F]{8}|\\[0-7]+\\|\\["\nabcefnrstv]|[^\\"])*"`, LiteralStringDouble, nil}, + {`'(?:''|[^'])*'`, LiteralStringAtom, nil}, + {`is\b`, Operator, nil}, + {`(<|>|=<|>=|==|=:=|=|/|//|\*|\+|-)(?=\s|[a-zA-Z0-9\[])`, Operator, nil}, + {`(mod|div|not)\b`, Operator, nil}, + {`_`, Keyword, nil}, + {`([a-z]+)(:)`, ByGroups(NameNamespace, Punctuation), nil}, + {`([a-zÀ-῿぀-퟿-￯][\w$À-῿぀-퟿-￯]*)(\s*)(:-|-->)`, ByGroups(NameFunction, Text, Operator), nil}, + {`([a-zÀ-῿぀-퟿-￯][\w$À-῿぀-퟿-￯]*)(\s*)(\()`, ByGroups(NameFunction, Text, Punctuation), nil}, + {`[a-zÀ-῿぀-퟿-￯][\w$À-῿぀-퟿-￯]*`, LiteralStringAtom, nil}, + {`[#&*+\-./:<=>?@\\^~¡-¿‐-〿]+`, LiteralStringAtom, nil}, + {`[A-Z_]\w*`, NameVariable, nil}, + {`\s+|[ -‏￰-￾￯]`, Text, nil}, + }, + "nested-comment": { + {`\*/`, CommentMultiline, Pop(1)}, + {`/\*`, CommentMultiline, Push()}, + {`[^*/]+`, CommentMultiline, nil}, + {`[*/]`, CommentMultiline, nil}, + }, + } +} diff --git a/vendor/github.com/alecthomas/chroma/lexers/p/promql.go b/vendor/github.com/alecthomas/chroma/lexers/p/promql.go new file mode 100644 index 000000000..e80b926c2 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/p/promql.go @@ -0,0 +1,59 @@ +package p + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// Promql lexer. +var Promql = internal.Register(MustNewLazyLexer( + &Config{ + Name: "PromQL", + Aliases: []string{"promql"}, + Filenames: []string{"*.promql"}, + MimeTypes: []string{}, + }, + promqlRules, +)) + +func promqlRules() Rules { + return Rules{ + "root": { + {`\n`, TextWhitespace, nil}, + {`\s+`, TextWhitespace, nil}, + {`,`, Punctuation, nil}, + {Words(``, `\b`, `bool`, `by`, `group_left`, `group_right`, `ignoring`, `offset`, `on`, `without`), Keyword, nil}, + {Words(``, `\b`, `sum`, `min`, `max`, `avg`, `group`, `stddev`, `stdvar`, `count`, `count_values`, `bottomk`, `topk`, `quantile`), Keyword, nil}, + {Words(``, `\b`, `abs`, `absent`, `absent_over_time`, `avg_over_time`, `ceil`, `changes`, `clamp_max`, `clamp_min`, `count_over_time`, `day_of_month`, `day_of_week`, `days_in_month`, `delta`, `deriv`, `exp`, `floor`, `histogram_quantile`, `holt_winters`, `hour`, `idelta`, `increase`, `irate`, `label_join`, `label_replace`, `ln`, `log10`, `log2`, `max_over_time`, `min_over_time`, `minute`, `month`, `predict_linear`, `quantile_over_time`, `rate`, `resets`, `round`, `scalar`, `sort`, `sort_desc`, `sqrt`, `stddev_over_time`, `stdvar_over_time`, `sum_over_time`, `time`, `timestamp`, `vector`, `year`), KeywordReserved, nil}, + {`[1-9][0-9]*[smhdwy]`, LiteralString, nil}, + {`-?[0-9]+\.[0-9]+`, LiteralNumberFloat, nil}, + {`-?[0-9]+`, LiteralNumberInteger, nil}, + {`#.*?$`, CommentSingle, nil}, + {`(\+|\-|\*|\/|\%|\^)`, Operator, nil}, + {`==|!=|>=|<=|<|>`, Operator, nil}, + {`and|or|unless`, OperatorWord, nil}, + {`[_a-zA-Z][a-zA-Z0-9_]+`, NameVariable, nil}, + {`(["\'])(.*?)(["\'])`, ByGroups(Punctuation, LiteralString, Punctuation), nil}, + {`\(`, Operator, Push("function")}, + {`\)`, Operator, nil}, + {`\{`, Punctuation, Push("labels")}, + {`\[`, Punctuation, Push("range")}, + }, + "labels": { + {`\}`, Punctuation, Pop(1)}, + {`\n`, TextWhitespace, nil}, + {`\s+`, TextWhitespace, nil}, + {`,`, Punctuation, nil}, + {`([_a-zA-Z][a-zA-Z0-9_]*?)(\s*?)(=~|!=|=|!~)(\s*?)("|')(.*?)("|')`, ByGroups(NameLabel, TextWhitespace, Operator, TextWhitespace, Punctuation, LiteralString, Punctuation), nil}, + }, + "range": { + {`\]`, Punctuation, Pop(1)}, + {`[1-9][0-9]*[smhdwy]`, LiteralString, nil}, + }, + "function": { + {`\)`, Operator, Pop(1)}, + {`\(`, Operator, Push()}, + Default(Pop(1)), + }, + } +} diff --git a/vendor/github.com/alecthomas/chroma/lexers/p/protobuf.go b/vendor/github.com/alecthomas/chroma/lexers/p/protobuf.go new file mode 100644 index 000000000..76576cb35 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/p/protobuf.go @@ -0,0 +1,57 @@ +package p + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// ProtocolBuffer lexer. +var ProtocolBuffer = internal.Register(MustNewLazyLexer( + &Config{ + Name: "Protocol Buffer", + Aliases: []string{"protobuf", "proto"}, + Filenames: []string{"*.proto"}, + MimeTypes: []string{}, + }, + protocolBufferRules, +)) + +func protocolBufferRules() Rules { + return Rules{ + "root": { + {`[ \t]+`, Text, nil}, + {`[,;{}\[\]()<>]`, Punctuation, nil}, + {`/(\\\n)?/(\n|(.|\n)*?[^\\]\n)`, CommentSingle, nil}, + {`/(\\\n)?\*(.|\n)*?\*(\\\n)?/`, CommentMultiline, nil}, + {Words(`\b`, `\b`, `import`, `option`, `optional`, `required`, `repeated`, `default`, `packed`, `ctype`, `extensions`, `to`, `max`, `rpc`, `returns`, `oneof`), Keyword, nil}, + {Words(``, `\b`, `int32`, `int64`, `uint32`, `uint64`, `sint32`, `sint64`, `fixed32`, `fixed64`, `sfixed32`, `sfixed64`, `float`, `double`, `bool`, `string`, `bytes`), KeywordType, nil}, + {`(true|false)\b`, KeywordConstant, nil}, + {`(package)(\s+)`, ByGroups(KeywordNamespace, Text), Push("package")}, + {`(message|extend)(\s+)`, ByGroups(KeywordDeclaration, Text), Push("message")}, + {`(enum|group|service)(\s+)`, ByGroups(KeywordDeclaration, Text), Push("type")}, + {`\".*?\"`, LiteralString, nil}, + {`\'.*?\'`, LiteralString, nil}, + {`(\d+\.\d*|\.\d+|\d+)[eE][+-]?\d+[LlUu]*`, LiteralNumberFloat, nil}, + {`(\d+\.\d*|\.\d+|\d+[fF])[fF]?`, LiteralNumberFloat, nil}, + {`(\-?(inf|nan))\b`, LiteralNumberFloat, nil}, + {`0x[0-9a-fA-F]+[LlUu]*`, LiteralNumberHex, nil}, + {`0[0-7]+[LlUu]*`, LiteralNumberOct, nil}, + {`\d+[LlUu]*`, LiteralNumberInteger, nil}, + {`[+-=]`, Operator, nil}, + {`([a-zA-Z_][\w.]*)([ \t]*)(=)`, ByGroups(Name, Text, Operator), nil}, + {`[a-zA-Z_][\w.]*`, Name, nil}, + }, + "package": { + {`[a-zA-Z_]\w*`, NameNamespace, Pop(1)}, + Default(Pop(1)), + }, + "message": { + {`[a-zA-Z_]\w*`, NameClass, Pop(1)}, + Default(Pop(1)), + }, + "type": { + {`[a-zA-Z_]\w*`, Name, Pop(1)}, + Default(Pop(1)), + }, + } +} diff --git a/vendor/github.com/alecthomas/chroma/lexers/p/puppet.go b/vendor/github.com/alecthomas/chroma/lexers/p/puppet.go new file mode 100644 index 000000000..b32caed09 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/p/puppet.go @@ -0,0 +1,60 @@ +package p + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// Puppet lexer. +var Puppet = internal.Register(MustNewLazyLexer( + &Config{ + Name: "Puppet", + Aliases: []string{"puppet"}, + Filenames: []string{"*.pp"}, + MimeTypes: []string{}, + }, + puppetRules, +)) + +func puppetRules() Rules { + return Rules{ + "root": { + Include("comments"), + Include("keywords"), + Include("names"), + Include("numbers"), + Include("operators"), + Include("strings"), + {`[]{}:(),;[]`, Punctuation, nil}, + {`[^\S\n]+`, Text, nil}, + }, + "comments": { + {`\s*#.*$`, Comment, nil}, + {`/(\\\n)?[*](.|\n)*?[*](\\\n)?/`, CommentMultiline, nil}, + }, + "operators": { + {`(=>|\?|<|>|=|\+|-|/|\*|~|!|\|)`, Operator, nil}, + {`(in|and|or|not)\b`, OperatorWord, nil}, + }, + "names": { + {`[a-zA-Z_]\w*`, NameAttribute, nil}, + {`(\$\S+)(\[)(\S+)(\])`, ByGroups(NameVariable, Punctuation, LiteralString, Punctuation), nil}, + {`\$\S+`, NameVariable, nil}, + }, + "numbers": { + {`(\d+\.\d*|\d*\.\d+)([eE][+-]?[0-9]+)?j?`, LiteralNumberFloat, nil}, + {`\d+[eE][+-]?[0-9]+j?`, LiteralNumberFloat, nil}, + {`0[0-7]+j?`, LiteralNumberOct, nil}, + {`0[xX][a-fA-F0-9]+`, LiteralNumberHex, nil}, + {`\d+L`, LiteralNumberIntegerLong, nil}, + {`\d+j?`, LiteralNumberInteger, nil}, + }, + "keywords": { + {Words(`(?i)`, `\b`, `absent`, `alert`, `alias`, `audit`, `augeas`, `before`, `case`, `check`, `class`, `computer`, `configured`, `contained`, `create_resources`, `crit`, `cron`, `debug`, `default`, `define`, `defined`, `directory`, `else`, `elsif`, `emerg`, `err`, `exec`, `extlookup`, `fail`, `false`, `file`, `filebucket`, `fqdn_rand`, `generate`, `host`, `if`, `import`, `include`, `info`, `inherits`, `inline_template`, `installed`, `interface`, `k5login`, `latest`, `link`, `loglevel`, `macauthorization`, `mailalias`, `maillist`, `mcx`, `md5`, `mount`, `mounted`, `nagios_command`, `nagios_contact`, `nagios_contactgroup`, `nagios_host`, `nagios_hostdependency`, `nagios_hostescalation`, `nagios_hostextinfo`, `nagios_hostgroup`, `nagios_service`, `nagios_servicedependency`, `nagios_serviceescalation`, `nagios_serviceextinfo`, `nagios_servicegroup`, `nagios_timeperiod`, `node`, `noop`, `notice`, `notify`, `package`, `present`, `purged`, `realize`, `regsubst`, `resources`, `role`, `router`, `running`, `schedule`, `scheduled_task`, `search`, `selboolean`, `selmodule`, `service`, `sha1`, `shellquote`, `split`, `sprintf`, `ssh_authorized_key`, `sshkey`, `stage`, `stopped`, `subscribe`, `tag`, `tagged`, `template`, `tidy`, `true`, `undef`, `unmounted`, `user`, `versioncmp`, `vlan`, `warning`, `yumrepo`, `zfs`, `zone`, `zpool`), Keyword, nil}, + }, + "strings": { + {`"([^"])*"`, LiteralString, nil}, + {`'(\\'|[^'])*'`, LiteralString, nil}, + }, + } +} diff --git a/vendor/github.com/alecthomas/chroma/lexers/p/python.go b/vendor/github.com/alecthomas/chroma/lexers/p/python.go new file mode 100644 index 000000000..ad10a322e --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/p/python.go @@ -0,0 +1,211 @@ +package p + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// Python lexer. +var Python = internal.Register(MustNewLazyLexer( + &Config{ + Name: "Python", + Aliases: []string{"python", "py", "sage", "python3", "py3"}, + Filenames: []string{"*.py", "*.pyi", "*.pyw", "*.jy", "*.sage", "*.sc", "SConstruct", "SConscript", "*.bzl", "BUCK", "BUILD", "BUILD.bazel", "WORKSPACE", "*.tac"}, + MimeTypes: []string{"text/x-python", "application/x-python", "text/x-python3", "application/x-python3"}, + }, + pythonRules, +)) + +func pythonRules() Rules { + const pythonIdentifier = `[_\p{L}][_\p{L}\p{N}]*` + + return Rules{ + "root": { + {`\n`, Text, nil}, + {`^(\s*)([rRuUbB]{,2})("""(?:.|\n)*?""")`, ByGroups(Text, LiteralStringAffix, LiteralStringDoc), nil}, + {`^(\s*)([rRuUbB]{,2})('''(?:.|\n)*?''')`, ByGroups(Text, LiteralStringAffix, LiteralStringDoc), nil}, + {`\A#!.+$`, CommentHashbang, nil}, + {`#.*$`, CommentSingle, nil}, + {`\\\n`, Text, nil}, + {`\\`, Text, nil}, + Include("keywords"), + {`(def)((?:\s|\\\s)+)`, ByGroups(Keyword, Text), Push("funcname")}, + {`(class)((?:\s|\\\s)+)`, ByGroups(Keyword, Text), Push("classname")}, + {`(from)((?:\s|\\\s)+)`, ByGroups(KeywordNamespace, Text), Push("fromimport")}, + {`(import)((?:\s|\\\s)+)`, ByGroups(KeywordNamespace, Text), Push("import")}, + Include("expr"), + }, + "expr": { + {`(?i)(rf|fr)(""")`, ByGroups(LiteralStringAffix, LiteralStringDouble), Combined("rfstringescape", "tdqf")}, + {`(?i)(rf|fr)(''')`, ByGroups(LiteralStringAffix, LiteralStringSingle), Combined("rfstringescape", "tsqf")}, + {`(?i)(rf|fr)(")`, ByGroups(LiteralStringAffix, LiteralStringDouble), Combined("rfstringescape", "dqf")}, + {`(?i)(rf|fr)(')`, ByGroups(LiteralStringAffix, LiteralStringSingle), Combined("rfstringescape", "sqf")}, + {`([fF])(""")`, ByGroups(LiteralStringAffix, LiteralStringDouble), Combined("fstringescape", "tdqf")}, + {`([fF])(''')`, ByGroups(LiteralStringAffix, LiteralStringSingle), Combined("fstringescape", "tsqf")}, + {`([fF])(")`, ByGroups(LiteralStringAffix, LiteralStringDouble), Combined("fstringescape", "dqf")}, + {`([fF])(')`, ByGroups(LiteralStringAffix, LiteralStringSingle), Combined("fstringescape", "sqf")}, + {`(?i)(rb|br|r)(""")`, ByGroups(LiteralStringAffix, LiteralStringDouble), Push("tdqs")}, + {`(?i)(rb|br|r)(''')`, ByGroups(LiteralStringAffix, LiteralStringSingle), Push("tsqs")}, + {`(?i)(rb|br|r)(")`, ByGroups(LiteralStringAffix, LiteralStringDouble), Push("dqs")}, + {`(?i)(rb|br|r)(')`, ByGroups(LiteralStringAffix, LiteralStringSingle), Push("sqs")}, + {`([uUbB]?)(""")`, ByGroups(LiteralStringAffix, LiteralStringDouble), Combined("stringescape", "tdqs")}, + {`([uUbB]?)(''')`, ByGroups(LiteralStringAffix, LiteralStringSingle), Combined("stringescape", "tsqs")}, + {`([uUbB]?)(")`, ByGroups(LiteralStringAffix, LiteralStringDouble), Combined("stringescape", "dqs")}, + {`([uUbB]?)(')`, ByGroups(LiteralStringAffix, LiteralStringSingle), Combined("stringescape", "sqs")}, + {`[^\S\n]+`, Text, nil}, + Include("numbers"), + {`!=|==|<<|>>|:=|[-~+/*%=<>&^|.]`, Operator, nil}, + {`[]{}:(),;[]`, Punctuation, nil}, + {`(in|is|and|or|not)\b`, OperatorWord, nil}, + Include("expr-keywords"), + Include("builtins"), + Include("magicfuncs"), + Include("magicvars"), + Include("name"), + }, + "expr-inside-fstring": { + {`[{([]`, Punctuation, Push("expr-inside-fstring-inner")}, + {`(=\s*)?(\![sraf])?\}`, LiteralStringInterpol, Pop(1)}, + {`(=\s*)?(\![sraf])?:`, LiteralStringInterpol, Pop(1)}, + {`\s+`, Text, nil}, + Include("expr"), + }, + "expr-inside-fstring-inner": { + {`[{([]`, Punctuation, Push("expr-inside-fstring-inner")}, + {`[])}]`, Punctuation, Pop(1)}, + {`\s+`, Text, nil}, + Include("expr"), + }, + "expr-keywords": { + {Words(``, `\b`, `async for`, `await`, `else`, `for`, `if`, `lambda`, `yield`, `yield from`), Keyword, nil}, + {Words(``, `\b`, `True`, `False`, `None`), KeywordConstant, nil}, + }, + "keywords": { + {Words(``, `\b`, `assert`, `async`, `await`, `break`, `continue`, `del`, `elif`, `else`, `except`, `finally`, `for`, `global`, `if`, `lambda`, `pass`, `raise`, `nonlocal`, `return`, `try`, `while`, `yield`, `yield from`, `as`, `with`), Keyword, nil}, + {Words(``, `\b`, `True`, `False`, `None`), KeywordConstant, nil}, + }, + "builtins": { + {Words(`(?=\^])?[-+ ]?#?0?(\d+)?,?(\.\d+)?[E-GXb-gnosx%]?)?\}`, LiteralStringInterpol, nil}, + {`[^\\\'"%{\n]+`, LiteralStringSingle, nil}, + {`[\'"\\]`, LiteralStringSingle, nil}, + {`%|(\{{1,2})`, LiteralStringSingle, nil}, + }, + "strings-double": { + {`%(\(\w+\))?[-#0 +]*([0-9]+|[*])?(\.([0-9]+|[*]))?[hlL]?[E-GXc-giorsaux%]`, LiteralStringInterpol, nil}, + {`\{((\w+)((\.\w+)|(\[[^\]]+\]))*)?(\![sra])?(\:(.?[<>=\^])?[-+ ]?#?0?(\d+)?,?(\.\d+)?[E-GXb-gnosx%]?)?\}`, LiteralStringInterpol, nil}, + {`[^\\\'"%{\n]+`, LiteralStringDouble, nil}, + {`[\'"\\]`, LiteralStringDouble, nil}, + {`%|(\{{1,2})`, LiteralStringDouble, nil}, + }, + "dqf": { + {`"`, LiteralStringDouble, Pop(1)}, + {`\\\\|\\"|\\\n`, LiteralStringEscape, nil}, + Include("fstrings-double"), + }, + "sqf": { + {`'`, LiteralStringSingle, Pop(1)}, + {`\\\\|\\'|\\\n`, LiteralStringEscape, nil}, + Include("fstrings-single"), + }, + "dqs": { + {`"`, LiteralStringDouble, Pop(1)}, + {`\\\\|\\"|\\\n`, LiteralStringEscape, nil}, + Include("strings-double"), + }, + "sqs": { + {`'`, LiteralStringSingle, Pop(1)}, + {`\\\\|\\'|\\\n`, LiteralStringEscape, nil}, + Include("strings-single"), + }, + "tdqf": { + {`"""`, LiteralStringDouble, Pop(1)}, + Include("fstrings-double"), + {`\n`, LiteralStringDouble, nil}, + }, + "tsqf": { + {`'''`, LiteralStringSingle, Pop(1)}, + Include("fstrings-single"), + {`\n`, LiteralStringSingle, nil}, + }, + "tdqs": { + {`"""`, LiteralStringDouble, Pop(1)}, + Include("strings-double"), + {`\n`, LiteralStringDouble, nil}, + }, + "tsqs": { + {`'''`, LiteralStringSingle, Pop(1)}, + Include("strings-single"), + {`\n`, LiteralStringSingle, nil}, + }, + } +} diff --git a/vendor/github.com/alecthomas/chroma/lexers/p/python2.go b/vendor/github.com/alecthomas/chroma/lexers/p/python2.go new file mode 100644 index 000000000..f1f60360b --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/p/python2.go @@ -0,0 +1,141 @@ +package p + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// Python2 lexer. +var Python2 = internal.Register(MustNewLazyLexer( + &Config{ + Name: "Python 2", + Aliases: []string{"python2", "py2"}, + Filenames: []string{}, + MimeTypes: []string{"text/x-python2", "application/x-python2"}, + }, + python2Rules, +)) + +func python2Rules() Rules { + return Rules{ + "root": { + {`\n`, Text, nil}, + {`^(\s*)([rRuUbB]{,2})("""(?:.|\n)*?""")`, ByGroups(Text, LiteralStringAffix, LiteralStringDoc), nil}, + {`^(\s*)([rRuUbB]{,2})('''(?:.|\n)*?''')`, ByGroups(Text, LiteralStringAffix, LiteralStringDoc), nil}, + {`[^\S\n]+`, Text, nil}, + {`\A#!.+$`, CommentHashbang, nil}, + {`#.*$`, CommentSingle, nil}, + {`[]{}:(),;[]`, Punctuation, nil}, + {`\\\n`, Text, nil}, + {`\\`, Text, nil}, + {`(in|is|and|or|not)\b`, OperatorWord, nil}, + {`!=|==|<<|>>|[-~+/*%=<>&^|.]`, Operator, nil}, + Include("keywords"), + {`(def)((?:\s|\\\s)+)`, ByGroups(Keyword, Text), Push("funcname")}, + {`(class)((?:\s|\\\s)+)`, ByGroups(Keyword, Text), Push("classname")}, + {`(from)((?:\s|\\\s)+)`, ByGroups(KeywordNamespace, Text), Push("fromimport")}, + {`(import)((?:\s|\\\s)+)`, ByGroups(KeywordNamespace, Text), Push("import")}, + Include("builtins"), + Include("magicfuncs"), + Include("magicvars"), + Include("backtick"), + {`([rR]|[uUbB][rR]|[rR][uUbB])(""")`, ByGroups(LiteralStringAffix, LiteralStringDouble), Push("tdqs")}, + {`([rR]|[uUbB][rR]|[rR][uUbB])(''')`, ByGroups(LiteralStringAffix, LiteralStringSingle), Push("tsqs")}, + {`([rR]|[uUbB][rR]|[rR][uUbB])(")`, ByGroups(LiteralStringAffix, LiteralStringDouble), Push("dqs")}, + {`([rR]|[uUbB][rR]|[rR][uUbB])(')`, ByGroups(LiteralStringAffix, LiteralStringSingle), Push("sqs")}, + {`([uUbB]?)(""")`, ByGroups(LiteralStringAffix, LiteralStringDouble), Combined("stringescape", "tdqs")}, + {`([uUbB]?)(''')`, ByGroups(LiteralStringAffix, LiteralStringSingle), Combined("stringescape", "tsqs")}, + {`([uUbB]?)(")`, ByGroups(LiteralStringAffix, LiteralStringDouble), Combined("stringescape", "dqs")}, + {`([uUbB]?)(')`, ByGroups(LiteralStringAffix, LiteralStringSingle), Combined("stringescape", "sqs")}, + Include("name"), + Include("numbers"), + }, + "keywords": { + {Words(``, `\b`, `assert`, `break`, `continue`, `del`, `elif`, `else`, `except`, `exec`, `finally`, `for`, `global`, `if`, `lambda`, `pass`, `print`, `raise`, `return`, `try`, `while`, `yield`, `yield from`, `as`, `with`), Keyword, nil}, + }, + "builtins": { + {Words(`(?>|[-~+/\\*%=<>&^|?:!.]`, Operator, nil}, + {`[\[\]{}(),;]`, Punctuation, nil}, + {`[\w]+`, NameVariableGlobal, nil}, + }, + "declarations": { + {`\b(DATA|LET)(?=\(|\b)`, KeywordDeclaration, nil}, + }, + "functions": { + {`\b(ABS|ASC|ATN|CDBL|CHR\$|CINT|CLNG|COMMAND\$|COS|CSNG|CSRLIN|CVD|CVDMBF|CVI|CVL|CVS|CVSMBF|DATE\$|ENVIRON\$|EOF|ERDEV|ERDEV\$|ERL|ERR|EXP|FILEATTR|FIX|FRE|FREEFILE|HEX\$|INKEY\$|INP|INPUT\$|INSTR|INT|IOCTL\$|LBOUND|LCASE\$|LEFT\$|LEN|LOC|LOF|LOG|LPOS|LTRIM\$|MID\$|MKD\$|MKDMBF\$|MKI\$|MKL\$|MKS\$|MKSMBF\$|OCT\$|PEEK|PEN|PLAY|PMAP|POINT|POS|RIGHT\$|RND|RTRIM\$|SADD|SCREEN|SEEK|SETMEM|SGN|SIN|SPACE\$|SPC|SQR|STICK|STR\$|STRIG|STRING\$|TAB|TAN|TIME\$|TIMER|UBOUND|UCASE\$|VAL|VARPTR|VARPTR\$|VARSEG)(?=\(|\b)`, KeywordReserved, nil}, + }, + "metacommands": { + {`\b(\$DYNAMIC|\$INCLUDE|\$STATIC)(?=\(|\b)`, KeywordConstant, nil}, + }, + "operators": { + {`\b(AND|EQV|IMP|NOT|OR|XOR)(?=\(|\b)`, OperatorWord, nil}, + }, + "statements": { + {`\b(BEEP|BLOAD|BSAVE|CALL|CALL\ ABSOLUTE|CALL\ INTERRUPT|CALLS|CHAIN|CHDIR|CIRCLE|CLEAR|CLOSE|CLS|COLOR|COM|COMMON|CONST|DATA|DATE\$|DECLARE|DEF\ FN|DEF\ SEG|DEFDBL|DEFINT|DEFLNG|DEFSNG|DEFSTR|DEF|DIM|DO|LOOP|DRAW|END|ENVIRON|ERASE|ERROR|EXIT|FIELD|FILES|FOR|NEXT|FUNCTION|GET|GOSUB|GOTO|IF|THEN|INPUT|INPUT\ \#|IOCTL|KEY|KEY|KILL|LET|LINE|LINE\ INPUT|LINE\ INPUT\ \#|LOCATE|LOCK|UNLOCK|LPRINT|LSET|MID\$|MKDIR|NAME|ON\ COM|ON\ ERROR|ON\ KEY|ON\ PEN|ON\ PLAY|ON\ STRIG|ON\ TIMER|ON\ UEVENT|ON|OPEN|OPEN\ COM|OPTION\ BASE|OUT|PAINT|PALETTE|PCOPY|PEN|PLAY|POKE|PRESET|PRINT|PRINT\ \#|PRINT\ USING|PSET|PUT|PUT|RANDOMIZE|READ|REDIM|REM|RESET|RESTORE|RESUME|RETURN|RMDIR|RSET|RUN|SCREEN|SEEK|SELECT\ CASE|SHARED|SHELL|SLEEP|SOUND|STATIC|STOP|STRIG|SUB|SWAP|SYSTEM|TIME\$|TIMER|TROFF|TRON|TYPE|UEVENT|UNLOCK|VIEW|WAIT|WHILE|WEND|WIDTH|WINDOW|WRITE)\b`, KeywordReserved, nil}, + }, + "keywords": { + {`\b(ACCESS|ALIAS|ANY|APPEND|AS|BASE|BINARY|BYVAL|CASE|CDECL|DOUBLE|ELSE|ELSEIF|ENDIF|INTEGER|IS|LIST|LOCAL|LONG|LOOP|MOD|NEXT|OFF|ON|OUTPUT|RANDOM|SIGNAL|SINGLE|STEP|STRING|THEN|TO|UNTIL|USING|WEND)\b`, Keyword, nil}, + }, + } +} diff --git a/vendor/github.com/alecthomas/chroma/lexers/q/qml.go b/vendor/github.com/alecthomas/chroma/lexers/q/qml.go new file mode 100644 index 000000000..dfd3c7537 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/q/qml.go @@ -0,0 +1,58 @@ +package q + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// Qml lexer. +var Qml = internal.Register(MustNewLazyLexer( + &Config{ + Name: "QML", + Aliases: []string{"qml", "qbs"}, + Filenames: []string{"*.qml", "*.qbs"}, + MimeTypes: []string{"application/x-qml", "application/x-qt.qbs+qml"}, + DotAll: true, + }, + qmlRules, +)) + +func qmlRules() Rules { + return Rules{ + "commentsandwhitespace": { + {`\s+`, Text, nil}, + {``, `role`, `state`, `sub`, `no`, + `submethod`, `subset`, `succeed`, `supersede`, `try`, `unit`, `unless`, `until`, + `use`, `when`, `while`, `with`, `without`, `export`, `native`, `repr`, `required`, `rw`, + `symbol`, `default`, `cached`, `DEPRECATED`, `dynamic`, `hidden-from-backtrace`, `nodal`, + `pure`, `raw`, `start`, `react`, `supply`, `whenever`, `also`, `rule`, `token`, `regex`, + `dynamic-scope`, `built`, `temp`, + } + + keywordsPattern := Words(`(?)`, `(>=)`, `minmax`, `notandthen`, `S`, + } + + wordOperatorsPattern := Words(`(?<=^|\b|\s)`, `(?=$|\b|\s)`, wordOperators...) + + operators := []string{ + `++`, `--`, `-`, `**`, `!`, `+`, `~`, `?`, `+^`, `~^`, `?^`, `^`, `*`, `/`, `%`, `%%`, `+&`, + `+<`, `+>`, `~&`, `~<`, `~>`, `?&`, `+|`, `+^`, `~|`, `~^`, `?`, `?|`, `?^`, `&`, `^`, + `<=>`, `^…^`, `^…`, `…^`, `…`, `...`, `...^`, `^...`, `^...^`, `..`, `..^`, `^..`, `^..^`, + `::=`, `:=`, `!=`, `==`, `<=`, `<`, `>=`, `>`, `~~`, `===`, `&&`, `||`, `|`, `^^`, `//`, + `??`, `!!`, `^fff^`, `^ff^`, `<==`, `==>`, `<<==`, `==>>`, `=>`, `=`, `<<`, `«`, `>>`, `»`, + `,`, `>>.`, `».`, `.&`, `.=`, `.^`, `.?`, `.+`, `.*`, `.`, `∘`, `∩`, `⊍`, `∪`, `⊎`, `∖`, + `⊖`, `≠`, `≤`, `≥`, `=:=`, `=~=`, `≅`, `∈`, `∉`, `≡`, `≢`, `∋`, `∌`, `⊂`, `⊄`, `⊆`, `⊈`, + `⊃`, `⊅`, `⊇`, `⊉`, `:`, `!!!`, `???`, `¯`, `×`, `÷`, `−`, `⁺`, `⁻`, + } + + operatorsPattern := Words(``, ``, operators...) + + builtinTypes := []string{ + `False`, `True`, `Order`, `More`, `Less`, `Same`, `Any`, `Array`, `Associative`, `AST`, + `atomicint`, `Attribute`, `Backtrace`, `Backtrace::Frame`, `Bag`, `Baggy`, `BagHash`, + `Blob`, `Block`, `Bool`, `Buf`, `Callable`, `CallFrame`, `Cancellation`, `Capture`, + `CArray`, `Channel`, `Code`, `compiler`, `Complex`, `ComplexStr`, `CompUnit`, + `CompUnit::PrecompilationRepository`, `CompUnit::Repository`, `Empty`, + `CompUnit::Repository::FileSystem`, `CompUnit::Repository::Installation`, `Cool`, + `CurrentThreadScheduler`, `CX::Warn`, `CX::Take`, `CX::Succeed`, `CX::Return`, `CX::Redo`, + `CX::Proceed`, `CX::Next`, `CX::Last`, `CX::Emit`, `CX::Done`, `Cursor`, `Date`, `Dateish`, + `DateTime`, `Distribution`, `Distribution::Hash`, `Distribution::Locally`, + `Distribution::Path`, `Distribution::Resource`, `Distro`, `Duration`, `Encoding`, + `Encoding::Registry`, `Endian`, `Enumeration`, `Exception`, `Failure`, `FatRat`, `Grammar`, + `Hash`, `HyperWhatever`, `Instant`, `Int`, `int`, `int16`, `int32`, `int64`, `int8`, `str`, + `IntStr`, `IO`, `IO::ArgFiles`, `IO::CatHandle`, `IO::Handle`, `IO::Notification`, + `IO::Notification::Change`, `IO::Path`, `IO::Path::Cygwin`, `IO::Path::Parts`, + `IO::Path::QNX`, `IO::Path::Unix`, `IO::Path::Win32`, `IO::Pipe`, `IO::Socket`, + `IO::Socket::Async`, `IO::Socket::Async::ListenSocket`, `IO::Socket::INET`, `IO::Spec`, + `IO::Spec::Cygwin`, `IO::Spec::QNX`, `IO::Spec::Unix`, `IO::Spec::Win32`, `IO::Special`, + `Iterable`, `Iterator`, `Junction`, `Kernel`, `Label`, `List`, `Lock`, `Lock::Async`, + `Lock::ConditionVariable`, `long`, `longlong`, `Macro`, `Map`, `Match`, + `Metamodel::AttributeContainer`, `Metamodel::C3MRO`, `Metamodel::ClassHOW`, + `Metamodel::ConcreteRoleHOW`, `Metamodel::CurriedRoleHOW`, `Metamodel::DefiniteHOW`, + `Metamodel::Documenting`, `Metamodel::EnumHOW`, `Metamodel::Finalization`, + `Metamodel::MethodContainer`, `Metamodel::Mixins`, `Metamodel::MROBasedMethodDispatch`, + `Metamodel::MultipleInheritance`, `Metamodel::Naming`, `Metamodel::Primitives`, + `Metamodel::PrivateMethodContainer`, `Metamodel::RoleContainer`, `Metamodel::RolePunning`, + `Metamodel::Stashing`, `Metamodel::Trusting`, `Metamodel::Versioning`, `Method`, `Mix`, + `MixHash`, `Mixy`, `Mu`, `NFC`, `NFD`, `NFKC`, `NFKD`, `Nil`, `Num`, `num32`, `num64`, + `Numeric`, `NumStr`, `ObjAt`, `Order`, `Pair`, `Parameter`, `Perl`, `Pod::Block`, + `Pod::Block::Code`, `Pod::Block::Comment`, `Pod::Block::Declarator`, `Pod::Block::Named`, + `Pod::Block::Para`, `Pod::Block::Table`, `Pod::Heading`, `Pod::Item`, `Pointer`, + `Positional`, `PositionalBindFailover`, `Proc`, `Proc::Async`, `Promise`, `Proxy`, + `PseudoStash`, `QuantHash`, `RaceSeq`, `Raku`, `Range`, `Rat`, `Rational`, `RatStr`, + `Real`, `Regex`, `Routine`, `Routine::WrapHandle`, `Scalar`, `Scheduler`, `Semaphore`, + `Seq`, `Sequence`, `Set`, `SetHash`, `Setty`, `Signature`, `size_t`, `Slip`, `Stash`, + `Str`, `StrDistance`, `Stringy`, `Sub`, `Submethod`, `Supplier`, `Supplier::Preserving`, + `Supply`, `Systemic`, `Tap`, `Telemetry`, `Telemetry::Instrument::Thread`, + `Telemetry::Instrument::ThreadPool`, `Telemetry::Instrument::Usage`, `Telemetry::Period`, + `Telemetry::Sampler`, `Thread`, `Test`, `ThreadPoolScheduler`, `UInt`, `uint16`, `uint32`, + `uint64`, `uint8`, `Uni`, `utf8`, `ValueObjAt`, `Variable`, `Version`, `VM`, `Whatever`, + `WhateverCode`, `WrapHandle`, `NativeCall`, + // Pragmas + `precompilation`, `experimental`, `worries`, `MONKEY-TYPING`, `MONKEY-SEE-NO-EVAL`, + `MONKEY-GUTS`, `fatal`, `lib`, `isms`, `newline`, `nqp`, `soft`, + `strict`, `trace`, `variables`, + } + + builtinTypesPattern := Words(`(? 0 { + if tokenClass == rakuPod { + match, err := podRegex.FindRunesMatchStartingAt(text, searchPos+nChars) + if err == nil { + closingChars = match.Runes() + nextClosePos = match.Index + } else { + nextClosePos = -1 + } + } else { + nextClosePos = indexAt(text, closingChars, searchPos+nChars) + } + + nextOpenPos := indexAt(text, openingChars, searchPos+nChars) + + switch { + case nextClosePos == -1: + nextClosePos = len(text) + nestingLevel = 0 + case nextOpenPos != -1 && nextOpenPos < nextClosePos: + nestingLevel++ + nChars = len(openingChars) + searchPos = nextOpenPos + default: // next_close_pos < next_open_pos + nestingLevel-- + nChars = len(closingChars) + searchPos = nextClosePos + } + } + + endPos = nextClosePos + } + + if endPos < 0 { + // if we didn't find a closer, just highlight the + // rest of the text in this class + endPos = len(text) + } + + adverbre := regexp.MustCompile(`:to\b|:heredoc\b`) + var heredocTerminator []rune + var endHeredocPos int + if adverbre.MatchString(string(adverbs)) { + if endPos != len(text) { + heredocTerminator = text[state.Pos:endPos] + nChars = len(heredocTerminator) + } else { + endPos = state.Pos + 1 + heredocTerminator = []rune{} + nChars = 0 + } + + if nChars > 0 { + endHeredocPos = indexAt(text[endPos:], heredocTerminator, 0) + if endHeredocPos > -1 { + endPos += endHeredocPos + } else { + endPos = len(text) + } + } + } + + textBetweenBrackets := string(text[state.Pos:endPos]) + switch tokenClass { + case rakuPod, rakuPodDeclaration, rakuNameAttribute: + state.NamedGroups[`value`] = textBetweenBrackets + state.NamedGroups[`closing_delimiters`] = string(closingChars) + case rakuQuote: + if len(heredocTerminator) > 0 { + // Length of heredoc terminator + closing chars + `;` + heredocFristPunctuationLen := nChars + len(openingChars) + 1 + + state.NamedGroups[`opening_delimiters`] = string(openingChars) + + string(text[state.Pos:state.Pos+heredocFristPunctuationLen]) + + state.NamedGroups[`value`] = + string(text[state.Pos+heredocFristPunctuationLen : endPos]) + + if endHeredocPos > -1 { + state.NamedGroups[`closing_delimiters`] = string(heredocTerminator) + } + } else { + state.NamedGroups[`value`] = textBetweenBrackets + if nChars > 0 { + state.NamedGroups[`closing_delimiters`] = string(closingChars) + } + } + default: + state.Groups = []string{state.Groups[0] + string(text[state.Pos:endPos+nChars])} + } + + state.Pos = endPos + nChars + + return nil + } + } + + // Raku rules + // Empty capture groups are placeholders and will be replaced by mutators + // DO NOT REMOVE THEM! + return Rules{ + "root": { + // Placeholder, will be overwritten by mutators, DO NOT REMOVE! + {`\A\z`, nil, nil}, + Include("common"), + {`{`, Punctuation, Push(`root`)}, + {`\(`, Punctuation, Push(`root`)}, + {`[)}]`, Punctuation, Pop(1)}, + {`;`, Punctuation, nil}, + {`\[|\]`, Operator, nil}, + {`.+?`, Text, nil}, + }, + "common": { + {`^#![^\n]*$`, CommentHashbang, nil}, + Include("pod"), + // Multi-line, Embedded comment + { + "#`(?(?" + bracketsPattern + `)\k*)`, + CommentMultiline, + findBrackets(rakuMultilineComment), + }, + {`#[^\n]*$`, CommentSingle, nil}, + // /regex/ + { + `(?<=(?:^|\(|=|:|~~|\[|{|,|=>)\s*)(/)(?!\]|\))((?:\\\\|\\/|.)*?)((?>)(\S+?)(<<)`, ByGroups(Operator, UsingSelf("root"), Operator), nil}, + {`(»)(\S+?)(«)`, ByGroups(Operator, UsingSelf("root"), Operator), nil}, + // Hyperoperator | «*« + {`(<<)(\S+?)(<<)`, ByGroups(Operator, UsingSelf("root"), Operator), nil}, + {`(«)(\S+?)(«)`, ByGroups(Operator, UsingSelf("root"), Operator), nil}, + // Hyperoperator | »*» + {`(>>)(\S+?)(>>)`, ByGroups(Operator, UsingSelf("root"), Operator), nil}, + {`(»)(\S+?)(»)`, ByGroups(Operator, UsingSelf("root"), Operator), nil}, + // <> + {`(?>)[^\n])+?[},;] *\n)(?!(?:(?!>>).)+?>>\S+?>>)`, Punctuation, Push("<<")}, + // «quoted words» + {`(? operators | something < onething > something + { + `(?<=[$@%&]?\w[\w':-]* +)(<=?)( *[^ ]+? *)(>=?)(?= *[$@%&]?\w[\w':-]*)`, + ByGroups(Operator, UsingSelf("root"), Operator), + nil, + }, + // + { + `(?])+?)(>)(?!\s*(?:\d+|\.(?:Int|Numeric)|[$@%]\*?\w[\w':-]*[^(]|\s+\[))`, + ByGroups(Punctuation, String, Punctuation), + nil, + }, + {`C?X::['\w:-]+`, NameException, nil}, + Include("metaoperator"), + // Pair | key => value + { + `(\w[\w'-]*)(\s*)(=>)`, + ByGroups(String, Text, Operator), + nil, + }, + Include("colon-pair"), + // Token + { + `(?<=(?:^|\s)(?:regex|token|rule)(\s+))` + namePattern + colonPairLookahead + `\s*[({])`, + NameFunction, + Push("token", "name-adverb"), + }, + // Substitution + {`(?<=^|\b|\s)(?(?:qq|q|Q))(?(?::?(?:heredoc|to|qq|ww|q|w|s|a|h|f|c|b|to|v|x))*)(?\s*)(?(?[^0-9a-zA-Z:\s])\k*)`, + EmitterFunc(quote), + findBrackets(rakuQuote), + }, + // Function + { + `\b` + namePattern + colonPairLookahead + `\()`, + NameFunction, + Push("name-adverb"), + }, + // Method + { + `(?(?[^\w:\s])\k*)`, + ByGroupNames( + map[string]Emitter{ + `opening_delimiters`: Punctuation, + `delimiter`: nil, + }, + ), + findBrackets(rakuMatchRegex), + }, + }, + "substitution": { + Include("colon-pair-attribute"), + // Substitution | s{regex} = value + { + `(?(?` + bracketsPattern + `)\k*)`, + ByGroupNames(map[string]Emitter{ + `opening_delimiters`: Punctuation, + `delimiter`: nil, + }), + findBrackets(rakuMatchRegex), + }, + // Substitution | s/regex/string/ + { + `(?[^\w:\s])`, + Punctuation, + findBrackets(rakuSubstitutionRegex), + }, + }, + "number": { + {`0_?[0-7]+(_[0-7]+)*`, LiteralNumberOct, nil}, + {`0x[0-9A-Fa-f]+(_[0-9A-Fa-f]+)*`, LiteralNumberHex, nil}, + {`0b[01]+(_[01]+)*`, LiteralNumberBin, nil}, + { + `(?i)(\d*(_\d*)*\.\d+(_\d*)*|\d+(_\d*)*\.\d+(_\d*)*)(e[+-]?\d+)?`, + LiteralNumberFloat, + nil, + }, + {`(?i)\d+(_\d*)*e[+-]?\d+(_\d*)*`, LiteralNumberFloat, nil}, + {`(?<=\d+)i`, NameConstant, nil}, + {`\d+(_\d+)*`, LiteralNumberInteger, nil}, + }, + "name-adverb": { + Include("colon-pair-attribute-keyvalue"), + Default(Pop(1)), + }, + "colon-pair": { + // :key(value) + {colonPairPattern, colonPair(String), findBrackets(rakuNameAttribute)}, + // :123abc + { + `(:)(\d+)(\w[\w'-]*)`, + ByGroups(Punctuation, UsingSelf("number"), String), + nil, + }, + // :key + {`(:)(!?)(\w[\w'-]*)`, ByGroups(Punctuation, Operator, String), nil}, + {`\s+`, Text, nil}, + }, + "colon-pair-attribute": { + // :key(value) + {colonPairPattern, colonPair(NameAttribute), findBrackets(rakuNameAttribute)}, + // :123abc + { + `(:)(\d+)(\w[\w'-]*)`, + ByGroups(Punctuation, UsingSelf("number"), NameAttribute), + nil, + }, + // :key + {`(:)(!?)(\w[\w'-]*)`, ByGroups(Punctuation, Operator, NameAttribute), nil}, + {`\s+`, Text, nil}, + }, + "colon-pair-attribute-keyvalue": { + // :key(value) + {colonPairPattern, colonPair(NameAttribute), findBrackets(rakuNameAttribute)}, + }, + "escape-qq": { + { + `(? + { + `(?`), + tokenType: Punctuation, + stateName: `root`, + pushState: true, + }), + }, + // {code} + Include(`closure`), + // Properties + {`(:)(\w+)`, ByGroups(Punctuation, NameAttribute), nil}, + // Operator + {`\|\||\||&&|&|\.\.|\*\*|%%|%|:|!|<<|«|>>|»|\+|\*\*|\*|\?|=|~|<~~>`, Operator, nil}, + // Anchors + {`\^\^|\^|\$\$|\$`, NameEntity, nil}, + {`\.`, NameEntity, nil}, + {`#[^\n]*\n`, CommentSingle, nil}, + // Lookaround + { + `(?`), + tokenType: Punctuation, + stateName: `regex`, + pushState: true, + }), + }, + { + `(?)`, + ByGroups(Punctuation, Operator, OperatorWord, Punctuation), + nil, + }, + // <$variable> + { + `(?)`, + ByGroups(Punctuation, Operator, NameVariable, Punctuation), + nil, + }, + // Capture markers + {`(?`, Operator, nil}, + { + `(? + {`(?`, Punctuation, Pop(1)}, + // + { + `\(`, + Punctuation, + replaceRule(ruleReplacingConfig{ + delimiter: []rune(`)>`), + tokenType: Punctuation, + stateName: `root`, + popState: true, + pushState: true, + }), + }, + // + { + `\s+`, + StringRegex, + replaceRule(ruleReplacingConfig{ + delimiter: []rune(`>`), + tokenType: Punctuation, + stateName: `regex`, + popState: true, + pushState: true, + }), + }, + // + { + `:`, + Punctuation, + replaceRule(ruleReplacingConfig{ + delimiter: []rune(`>`), + tokenType: Punctuation, + stateName: `root`, + popState: true, + pushState: true, + }), + }, + }, + "regex-variable": { + Include(`regex-starting-operators`), + // + {`(&)?(\w[\w':-]*)(>)`, ByGroups(Operator, NameFunction, Punctuation), Pop(1)}, + // `, Punctuation, Pop(1)}, + Include("regex-class-builtin"), + Include("variable"), + Include(`regex-starting-operators`), + Include("colon-pair-attribute"), + {`(?] + { + `\b([RZX]+)\b(\[)([^\s\]]+?)(\])`, + ByGroups(OperatorWord, Punctuation, UsingSelf("root"), Punctuation), + nil, + }, + // Z=> + {`\b([RZX]+)\b([^\s\]]+)`, ByGroups(OperatorWord, UsingSelf("operator")), nil}, + }, + "operator": { + // Word Operator + {wordOperatorsPattern, OperatorWord, nil}, + // Operator + {operatorsPattern, Operator, nil}, + }, + "pod": { + // Single-line pod declaration + {`(#[|=])\s`, Keyword, Push("pod-single")}, + // Multi-line pod declaration + { + "(?#[|=])(?(?" + bracketsPattern + `)\k*)(?)(?)`, + ByGroupNames( + map[string]Emitter{ + `keyword`: Keyword, + `opening_delimiters`: Punctuation, + `delimiter`: nil, + `value`: UsingSelf("pod-declaration"), + `closing_delimiters`: Punctuation, + }), + findBrackets(rakuPodDeclaration), + }, + Include("pod-blocks"), + }, + "pod-blocks": { + // =begin code + { + `(?<=^ *)(? *)(?=begin)(? +)(?code)(?[^\n]*)(?.*?)(?^\k)(?=end)(? +)\k`, + EmitterFunc(podCode), + nil, + }, + // =begin + { + `(?<=^ *)(? *)(?=begin)(? +)(?!code)(?\w[\w'-]*)(?[^\n]*)(?)(?)`, + ByGroupNames( + map[string]Emitter{ + `ws`: Comment, + `keyword`: Keyword, + `ws2`: StringDoc, + `name`: Keyword, + `config`: EmitterFunc(podConfig), + `value`: UsingSelf("pod-begin"), + `closing_delimiters`: Keyword, + }), + findBrackets(rakuPod), + }, + // =for ... + { + `(?<=^ *)(? *)(?=(?:for|defn))(? +)(?\w[\w'-]*)(?[^\n]*\n)`, + ByGroups(Comment, Keyword, StringDoc, Keyword, EmitterFunc(podConfig)), + Push("pod-paragraph"), + }, + // =config + { + `(?<=^ *)(? *)(?=config)(? +)(?\w[\w'-]*)(?[^\n]*\n)`, + ByGroups(Comment, Keyword, StringDoc, Keyword, EmitterFunc(podConfig)), + nil, + }, + // =alias + { + `(?<=^ *)(? *)(?=alias)(? +)(?\w[\w'-]*)(?[^\n]*\n)`, + ByGroups(Comment, Keyword, StringDoc, Keyword, StringDoc), + nil, + }, + // =encoding + { + `(?<=^ *)(? *)(?=encoding)(? +)(?[^\n]+)`, + ByGroups(Comment, Keyword, StringDoc, Name), + nil, + }, + // =para ... + { + `(?<=^ *)(? *)(?=(?:para|table|pod))(?(? *)(?=head\d+)(? *)(?#?)`, + ByGroups(Comment, Keyword, GenericHeading, Keyword), + Push("pod-heading"), + }, + // =item ... + { + `(?<=^ *)(? *)(?=(?:item\d*|comment|data|[A-Z]+))(? *)(?#?)`, + ByGroups(Comment, Keyword, StringDoc, Keyword), + Push("pod-paragraph"), + }, + { + `(?<=^ *)(? *)(?=finish)(?[^\n]*)`, + ByGroups(Comment, Keyword, EmitterFunc(podConfig)), + Push("pod-finish"), + }, + // ={custom} ... + { + `(?<=^ *)(? *)(?=\w[\w'-]*)(? *)(?#?)`, + ByGroups(Comment, Name, StringDoc, Keyword), + Push("pod-paragraph"), + }, + // = podconfig + { + `(?<=^ *)(? *=)(? *)(?(?::\w[\w'-]*(?:` + colonPairOpeningBrackets + `.+?` + + colonPairClosingBrackets + `) *)*\n)`, + ByGroups(Keyword, StringDoc, EmitterFunc(podConfig)), + nil, + }, + }, + "pod-begin": { + Include("pod-blocks"), + Include("pre-pod-formatter"), + {`.+?`, StringDoc, nil}, + }, + "pod-declaration": { + Include("pre-pod-formatter"), + {`.+?`, StringDoc, nil}, + }, + "pod-paragraph": { + {`\n *\n|\n(?=^ *=)`, StringDoc, Pop(1)}, + Include("pre-pod-formatter"), + {`.+?`, StringDoc, nil}, + }, + "pod-single": { + {`\n`, StringDoc, Pop(1)}, + Include("pre-pod-formatter"), + {`.+?`, StringDoc, nil}, + }, + "pod-heading": { + {`\n *\n|\n(?=^ *=)`, GenericHeading, Pop(1)}, + Include("pre-pod-formatter"), + {`.+?`, GenericHeading, nil}, + }, + "pod-finish": { + {`\z`, nil, Pop(1)}, + Include("pre-pod-formatter"), + {`.+?`, StringDoc, nil}, + }, + "pre-pod-formatter": { + // C, B, ... + { + `(?[CBIUDTKRPAELZVMSXN])(?<+|«)`, + ByGroups(Keyword, Punctuation), + findBrackets(rakuPodFormatter), + }, + }, + "pod-formatter": { + // Placeholder rule, will be replaced by mutators. DO NOT REMOVE! + {`>`, Punctuation, Pop(1)}, + Include("pre-pod-formatter"), + // Placeholder rule, will be replaced by mutators. DO NOT REMOVE! + {`.+?`, StringOther, nil}, + }, + "variable": { + {variablePattern, NameVariable, Push("name-adverb")}, + {globalVariablePattern, NameVariableGlobal, Push("name-adverb")}, + {`[$@]<[^>]+>`, NameVariable, nil}, + {`\$[/!¢]`, NameVariable, nil}, + {`[$@%]`, NameVariable, nil}, + }, + "single-quote": { + {`(?>(?!\s*(?:\d+|\.(?:Int|Numeric)|[$@%]\*?[\w':-]+|\s+\[))`, Punctuation, Pop(1)}, + Include("ww"), + }, + "«": { + {`»(?!\s*(?:\d+|\.(?:Int|Numeric)|[$@%]\*?[\w':-]+|\s+\[))`, Punctuation, Pop(1)}, + Include("ww"), + }, + "ww": { + Include("single-quote"), + Include("qq"), + }, + "qq": { + Include("qq-variable"), + Include("closure"), + Include(`escape-char`), + Include("escape-hexadecimal"), + Include("escape-c-name"), + Include("escape-qq"), + {`.+?`, StringDouble, nil}, + }, + "qq-variable": { + { + `(?\.)(?` + namePattern + `)` + colonPairLookahead + `\()`, + ByGroupNames(map[string]Emitter{ + `operator`: Operator, + `method_name`: NameFunction, + }), + Push(`name-adverb`), + }, + // Function/Signature + { + `\(`, Punctuation, replaceRule( + ruleReplacingConfig{ + delimiter: []rune(`)`), + tokenType: Punctuation, + stateName: `root`, + pushState: true, + }), + }, + Default(Pop(1)), + }, + "Q": { + Include("escape-qq"), + {`.+?`, String, nil}, + }, + "Q-closure": { + Include("escape-qq"), + Include("closure"), + {`.+?`, String, nil}, + }, + "Q-variable": { + Include("escape-qq"), + Include("qq-variable"), + {`.+?`, String, nil}, + }, + "closure": { + {`(? -1 { + idx = utf8.RuneCountInString(text[:idx]) + + // Search again if the substr is escaped with backslash + if (idx > 1 && strFromPos[idx-1] == '\\' && strFromPos[idx-2] != '\\') || + (idx == 1 && strFromPos[idx-1] == '\\') { + idx = indexAt(str[pos:], substr, idx+1) + + idx = utf8.RuneCountInString(text[:idx]) + + if idx < 0 { + return idx + } + } + idx += pos + } + + return idx +} + +// Tells if an array of string contains a string +func contains(s []string, e string) bool { + for _, value := range s { + if value == e { + return true + } + } + return false +} + +type rulePosition int + +const ( + topRule rulePosition = 0 + bottomRule = -1 +) + +type ruleMakingConfig struct { + delimiter []rune + pattern string + tokenType Emitter + mutator Mutator + numberOfDelimiterChars int +} + +type ruleReplacingConfig struct { + delimiter []rune + pattern string + tokenType Emitter + numberOfDelimiterChars int + mutator Mutator + appendMutator Mutator + rulePosition rulePosition + stateName string + pop bool + popState bool + pushState bool +} + +// Pops rule from state-stack and replaces the rule with the previous rule +func popRule(rule ruleReplacingConfig) MutatorFunc { + return func(state *LexerState) error { + stackName := genStackName(rule.stateName, rule.rulePosition) + + stack, ok := state.Get(stackName).([]ruleReplacingConfig) + + if ok && len(stack) > 0 { + // Pop from stack + stack = stack[:len(stack)-1] + lastRule := stack[len(stack)-1] + lastRule.pushState = false + lastRule.popState = false + lastRule.pop = true + state.Set(stackName, stack) + + // Call replaceRule to use the last rule + err := replaceRule(lastRule)(state) + if err != nil { + panic(err) + } + } + + return nil + } +} + +// Replaces a state's rule based on the rule config and position +func replaceRule(rule ruleReplacingConfig) MutatorFunc { + return func(state *LexerState) error { + stateName := rule.stateName + stackName := genStackName(rule.stateName, rule.rulePosition) + + stack, ok := state.Get(stackName).([]ruleReplacingConfig) + if !ok { + stack = []ruleReplacingConfig{} + } + + // If state-stack is empty fill it with the placeholder rule + if len(stack) == 0 { + stack = []ruleReplacingConfig{ + { + // Placeholder, will be overwritten by mutators, DO NOT REMOVE! + pattern: `\A\z`, + tokenType: nil, + mutator: nil, + stateName: stateName, + rulePosition: rule.rulePosition, + }, + } + state.Set(stackName, stack) + } + + var mutator Mutator + mutators := []Mutator{} + + switch { + case rule.rulePosition == topRule && rule.mutator == nil: + // Default mutator for top rule + mutators = []Mutator{Pop(1), popRule(rule)} + case rule.rulePosition == topRule && rule.mutator != nil: + // Default mutator for top rule, when rule.mutator is set + mutators = []Mutator{rule.mutator, popRule(rule)} + case rule.mutator != nil: + mutators = []Mutator{rule.mutator} + } + + if rule.appendMutator != nil { + mutators = append(mutators, rule.appendMutator) + } + + if len(mutators) > 0 { + mutator = Mutators(mutators...) + } else { + mutator = nil + } + + ruleConfig := ruleMakingConfig{ + pattern: rule.pattern, + delimiter: rule.delimiter, + numberOfDelimiterChars: rule.numberOfDelimiterChars, + tokenType: rule.tokenType, + mutator: mutator, + } + + cRule := makeRule(ruleConfig) + + switch rule.rulePosition { + case topRule: + state.Rules[stateName][0] = cRule + case bottomRule: + state.Rules[stateName][len(state.Rules[stateName])-1] = cRule + } + + // Pop state name from stack if asked. State should be popped first before Pushing + if rule.popState { + err := Pop(1)(state) + if err != nil { + panic(err) + } + } + + // Push state name to stack if asked + if rule.pushState { + err := Push(stateName)(state) + if err != nil { + panic(err) + } + } + + if !rule.pop { + state.Set(stackName, append(stack, rule)) + } + + return nil + } +} + +// Generates rule replacing stack using state name and rule position +func genStackName(stateName string, rulePosition rulePosition) (stackName string) { + switch rulePosition { + case topRule: + stackName = stateName + `-top-stack` + case bottomRule: + stackName = stateName + `-bottom-stack` + } + return +} + +// Makes a compiled rule and returns it +func makeRule(config ruleMakingConfig) *CompiledRule { + var rePattern string + + if len(config.delimiter) > 0 { + delimiter := string(config.delimiter) + + if config.numberOfDelimiterChars > 1 { + delimiter = strings.Repeat(delimiter, config.numberOfDelimiterChars) + } + + rePattern = `(? 1 { + lang = langMatch[1] + } + + // Tokenise code based on lang property + sublexer := internal.Get(lang) + if sublexer != nil { + iterator, err := sublexer.Tokenise(nil, state.NamedGroups[`value`]) + + if err != nil { + panic(err) + } else { + iterators = append(iterators, iterator) + } + } else { + iterators = append(iterators, Literator(tokens[4])) + } + + // Append the rest of the tokens + iterators = append(iterators, Literator(tokens[5:]...)) + + return Concaterator(iterators...) +} diff --git a/vendor/github.com/alecthomas/chroma/lexers/r/reasonml.go b/vendor/github.com/alecthomas/chroma/lexers/r/reasonml.go new file mode 100644 index 000000000..0ade5691d --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/r/reasonml.go @@ -0,0 +1,71 @@ +package r + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// Reasonml lexer. +var Reasonml = internal.Register(MustNewLazyLexer( + &Config{ + Name: "ReasonML", + Aliases: []string{"reason", "reasonml"}, + Filenames: []string{"*.re", "*.rei"}, + MimeTypes: []string{"text/x-reasonml"}, + }, + reasonmlRules, +)) + +func reasonmlRules() Rules { + return Rules{ + "escape-sequence": { + {`\\[\\"\'ntbr]`, LiteralStringEscape, nil}, + {`\\[0-9]{3}`, LiteralStringEscape, nil}, + {`\\x[0-9a-fA-F]{2}`, LiteralStringEscape, nil}, + }, + "root": { + {`\s+`, Text, nil}, + {`false|true|\(\)|\[\]`, NameBuiltinPseudo, nil}, + {`\b([A-Z][\w\']*)(?=\s*\.)`, NameNamespace, Push("dotted")}, + {`\b([A-Z][\w\']*)`, NameClass, nil}, + {`//.*?\n`, CommentSingle, nil}, + {`\/\*(?![\/])`, CommentMultiline, Push("comment")}, + {`\b(as|assert|begin|class|constraint|do|done|downto|else|end|exception|external|false|for|fun|esfun|function|functor|if|in|include|inherit|initializer|lazy|let|switch|module|pub|mutable|new|nonrec|object|of|open|pri|rec|sig|struct|then|to|true|try|type|val|virtual|when|while|with)\b`, Keyword, nil}, + {"(~|\\}|\\|]|\\||\\|\\||\\{<|\\{|`|_|]|\\[\\||\\[>|\\[<|\\[|\\?\\?|\\?|>\\}|>]|>|=|<-|<|;;|;|:>|:=|::|:|\\.\\.\\.|\\.\\.|\\.|=>|-\\.|-|,|\\+|\\*|\\)|\\(|&&|&|#|!=)", OperatorWord, nil}, + {`([=<>@^|&+\*/$%-]|[!?~])?[!$%&*+\./:<=>?@^|~-]`, Operator, nil}, + {`\b(and|asr|land|lor|lsl|lsr|lxor|mod|or)\b`, OperatorWord, nil}, + {`\b(unit|int|float|bool|string|char|list|array)\b`, KeywordType, nil}, + {`[^\W\d][\w']*`, Name, nil}, + {`-?\d[\d_]*(.[\d_]*)?([eE][+\-]?\d[\d_]*)`, LiteralNumberFloat, nil}, + {`0[xX][\da-fA-F][\da-fA-F_]*`, LiteralNumberHex, nil}, + {`0[oO][0-7][0-7_]*`, LiteralNumberOct, nil}, + {`0[bB][01][01_]*`, LiteralNumberBin, nil}, + {`\d[\d_]*`, LiteralNumberInteger, nil}, + {`'(?:(\\[\\\"'ntbr ])|(\\[0-9]{3})|(\\x[0-9a-fA-F]{2}))'`, LiteralStringChar, nil}, + {`'.'`, LiteralStringChar, nil}, + {`'`, Keyword, nil}, + {`"`, LiteralStringDouble, Push("string")}, + {`[~?][a-z][\w\']*:`, NameVariable, nil}, + }, + "comment": { + {`[^\/*]+`, CommentMultiline, nil}, + {`\/\*`, CommentMultiline, Push()}, + {`\*\/`, CommentMultiline, Pop(1)}, + {`[\*]`, CommentMultiline, nil}, + }, + "string": { + {`[^\\"]+`, LiteralStringDouble, nil}, + Include("escape-sequence"), + {`\\\n`, LiteralStringDouble, nil}, + {`"`, LiteralStringDouble, Pop(1)}, + }, + "dotted": { + {`\s+`, Text, nil}, + {`\.`, Punctuation, nil}, + {`[A-Z][\w\']*(?=\s*\.)`, NameNamespace, nil}, + {`[A-Z][\w\']*`, NameClass, Pop(1)}, + {`[a-z_][\w\']*`, Name, Pop(1)}, + Default(Pop(1)), + }, + } +} diff --git a/vendor/github.com/alecthomas/chroma/lexers/r/regedit.go b/vendor/github.com/alecthomas/chroma/lexers/r/regedit.go new file mode 100644 index 000000000..617723457 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/r/regedit.go @@ -0,0 +1,36 @@ +package r + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// Reg lexer. +var Reg = internal.Register(MustNewLazyLexer( + &Config{ + Name: "reg", + Aliases: []string{"registry"}, + Filenames: []string{"*.reg"}, + MimeTypes: []string{"text/x-windows-registry"}, + }, + regRules, +)) + +func regRules() Rules { + return Rules{ + "root": { + {`Windows Registry Editor.*`, Text, nil}, + {`\s+`, Text, nil}, + {`[;#].*`, CommentSingle, nil}, + {`(\[)(-?)(HKEY_[A-Z_]+)(.*?\])$`, ByGroups(Keyword, Operator, NameBuiltin, Keyword), nil}, + {`("(?:\\"|\\\\|[^"])+")([ \t]*)(=)([ \t]*)`, ByGroups(NameAttribute, Text, Operator, Text), Push("value")}, + {`(.*?)([ \t]*)(=)([ \t]*)`, ByGroups(NameAttribute, Text, Operator, Text), Push("value")}, + }, + "value": { + {`-`, Operator, Pop(1)}, + {`(dword|hex(?:\([0-9a-fA-F]\))?)(:)([0-9a-fA-F,]+)`, ByGroups(NameVariable, Punctuation, LiteralNumber), Pop(1)}, + {`.+`, LiteralString, Pop(1)}, + Default(Pop(1)), + }, + } +} diff --git a/vendor/github.com/alecthomas/chroma/lexers/r/rexx.go b/vendor/github.com/alecthomas/chroma/lexers/r/rexx.go new file mode 100644 index 000000000..38ca768d0 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/r/rexx.go @@ -0,0 +1,63 @@ +package r + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// Rexx lexer. +var Rexx = internal.Register(MustNewLazyLexer( + &Config{ + Name: "Rexx", + Aliases: []string{"rexx", "arexx"}, + Filenames: []string{"*.rexx", "*.rex", "*.rx", "*.arexx"}, + MimeTypes: []string{"text/x-rexx"}, + NotMultiline: true, + CaseInsensitive: true, + }, + rexxRules, +)) + +func rexxRules() Rules { + return Rules{ + "root": { + {`\s`, TextWhitespace, nil}, + {`/\*`, CommentMultiline, Push("comment")}, + {`"`, LiteralString, Push("string_double")}, + {`'`, LiteralString, Push("string_single")}, + {`[0-9]+(\.[0-9]+)?(e[+-]?[0-9])?`, LiteralNumber, nil}, + {`([a-z_]\w*)(\s*)(:)(\s*)(procedure)\b`, ByGroups(NameFunction, TextWhitespace, Operator, TextWhitespace, KeywordDeclaration), nil}, + {`([a-z_]\w*)(\s*)(:)`, ByGroups(NameLabel, TextWhitespace, Operator), nil}, + Include("function"), + Include("keyword"), + Include("operator"), + {`[a-z_]\w*`, Text, nil}, + }, + "function": { + {Words(``, `(\s*)(\()`, `abbrev`, `abs`, `address`, `arg`, `b2x`, `bitand`, `bitor`, `bitxor`, `c2d`, `c2x`, `center`, `charin`, `charout`, `chars`, `compare`, `condition`, `copies`, `d2c`, `d2x`, `datatype`, `date`, `delstr`, `delword`, `digits`, `errortext`, `form`, `format`, `fuzz`, `insert`, `lastpos`, `left`, `length`, `linein`, `lineout`, `lines`, `max`, `min`, `overlay`, `pos`, `queued`, `random`, `reverse`, `right`, `sign`, `sourceline`, `space`, `stream`, `strip`, `substr`, `subword`, `symbol`, `time`, `trace`, `translate`, `trunc`, `value`, `verify`, `word`, `wordindex`, `wordlength`, `wordpos`, `words`, `x2b`, `x2c`, `x2d`, `xrange`), ByGroups(NameBuiltin, TextWhitespace, Operator), nil}, + }, + "keyword": { + {`(address|arg|by|call|do|drop|else|end|exit|for|forever|if|interpret|iterate|leave|nop|numeric|off|on|options|parse|pull|push|queue|return|say|select|signal|to|then|trace|until|while)\b`, KeywordReserved, nil}, + }, + "operator": { + {`(-|//|/|\(|\)|\*\*|\*|\\<<|\\<|\\==|\\=|\\>>|\\>|\\|\|\||\||&&|&|%|\+|<<=|<<|<=|<>|<|==|=|><|>=|>>=|>>|>|¬<<|¬<|¬==|¬=|¬>>|¬>|¬|\.|,)`, Operator, nil}, + }, + "string_double": { + {`[^"\n]+`, LiteralString, nil}, + {`""`, LiteralString, nil}, + {`"`, LiteralString, Pop(1)}, + {`\n`, Text, Pop(1)}, + }, + "string_single": { + {`[^\'\n]`, LiteralString, nil}, + {`\'\'`, LiteralString, nil}, + {`\'`, LiteralString, Pop(1)}, + {`\n`, Text, Pop(1)}, + }, + "comment": { + {`[^*]+`, CommentMultiline, nil}, + {`\*/`, CommentMultiline, Pop(1)}, + {`\*`, CommentMultiline, nil}, + }, + } +} diff --git a/vendor/github.com/alecthomas/chroma/lexers/r/rst.go b/vendor/github.com/alecthomas/chroma/lexers/r/rst.go new file mode 100644 index 000000000..2a874790e --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/r/rst.go @@ -0,0 +1,90 @@ +package r + +import ( + "strings" + + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// Restructuredtext lexer. +var Restructuredtext = internal.Register(MustNewLazyLexer( + &Config{ + Name: "reStructuredText", + Aliases: []string{"rst", "rest", "restructuredtext"}, + Filenames: []string{"*.rst", "*.rest"}, + MimeTypes: []string{"text/x-rst", "text/prs.fallenstein.rst"}, + }, + restructuredtextRules, +)) + +func restructuredtextRules() Rules { + return Rules{ + "root": { + {"^(=+|-+|`+|:+|\\.+|\\'+|\"+|~+|\\^+|_+|\\*+|\\++|#+)([ \\t]*\\n)(.+)(\\n)(\\1)(\\n)", ByGroups(GenericHeading, Text, GenericHeading, Text, GenericHeading, Text), nil}, + {"^(\\S.*)(\\n)(={3,}|-{3,}|`{3,}|:{3,}|\\.{3,}|\\'{3,}|\"{3,}|~{3,}|\\^{3,}|_{3,}|\\*{3,}|\\+{3,}|#{3,})(\\n)", ByGroups(GenericHeading, Text, GenericHeading, Text), nil}, + {`^(\s*)([-*+])( .+\n(?:\1 .+\n)*)`, ByGroups(Text, LiteralNumber, UsingSelf("inline")), nil}, + {`^(\s*)([0-9#ivxlcmIVXLCM]+\.)( .+\n(?:\1 .+\n)*)`, ByGroups(Text, LiteralNumber, UsingSelf("inline")), nil}, + {`^(\s*)(\(?[0-9#ivxlcmIVXLCM]+\))( .+\n(?:\1 .+\n)*)`, ByGroups(Text, LiteralNumber, UsingSelf("inline")), nil}, + {`^(\s*)([A-Z]+\.)( .+\n(?:\1 .+\n)+)`, ByGroups(Text, LiteralNumber, UsingSelf("inline")), nil}, + {`^(\s*)(\(?[A-Za-z]+\))( .+\n(?:\1 .+\n)+)`, ByGroups(Text, LiteralNumber, UsingSelf("inline")), nil}, + {`^(\s*)(\|)( .+\n(?:\| .+\n)*)`, ByGroups(Text, Operator, UsingSelf("inline")), nil}, + {`^( *\.\.)(\s*)((?:source)?code(?:-block)?)(::)([ \t]*)([^\n]+)(\n[ \t]*\n)([ \t]+)(.*)(\n)((?:(?:\8.*|)\n)+)`, EmitterFunc(rstCodeBlock), nil}, + {`^( *\.\.)(\s*)([\w:-]+?)(::)(?:([ \t]*)(.*))`, ByGroups(Punctuation, Text, OperatorWord, Punctuation, Text, UsingSelf("inline")), nil}, + {`^( *\.\.)(\s*)(_(?:[^:\\]|\\.)+:)(.*?)$`, ByGroups(Punctuation, Text, NameTag, UsingSelf("inline")), nil}, + {`^( *\.\.)(\s*)(\[.+\])(.*?)$`, ByGroups(Punctuation, Text, NameTag, UsingSelf("inline")), nil}, + {`^( *\.\.)(\s*)(\|.+\|)(\s*)([\w:-]+?)(::)(?:([ \t]*)(.*))`, ByGroups(Punctuation, Text, NameTag, Text, OperatorWord, Punctuation, Text, UsingSelf("inline")), nil}, + {`^ *\.\..*(\n( +.*\n|\n)+)?`, CommentPreproc, nil}, + {`^( *)(:[a-zA-Z-]+:)(\s*)$`, ByGroups(Text, NameClass, Text), nil}, + {`^( *)(:.*?:)([ \t]+)(.*?)$`, ByGroups(Text, NameClass, Text, NameFunction), nil}, + {`^(\S.*(?)(`__?)", ByGroups(LiteralString, LiteralStringInterpol, LiteralString), nil}, + {"`.+?`__?", LiteralString, nil}, + {"(`.+?`)(:[a-zA-Z0-9:-]+?:)?", ByGroups(NameVariable, NameAttribute), nil}, + {"(:[a-zA-Z0-9:-]+?:)(`.+?`)", ByGroups(NameAttribute, NameVariable), nil}, + {`\*\*.+?\*\*`, GenericStrong, nil}, + {`\*.+?\*`, GenericEmph, nil}, + {`\[.*?\]_`, LiteralString, nil}, + {`<.+?>`, NameTag, nil}, + {"[^\\\\\\n\\[*`:]+", Text, nil}, + {`.`, Text, nil}, + }, + "literal": { + {"[^`]+", LiteralString, nil}, + {"``((?=$)|(?=[-/:.,; \\n\\x00\\\u2010\\\u2011\\\u2012\\\u2013\\\u2014\\\u00a0\\'\\\"\\)\\]\\}\\>\\\u2019\\\u201d\\\u00bb\\!\\?]))", LiteralString, Pop(1)}, + {"`", LiteralString, nil}, + }, + } +} + +func rstCodeBlock(groups []string, state *LexerState) Iterator { + iterators := []Iterator{} + tokens := []Token{ + {Punctuation, groups[1]}, + {Text, groups[2]}, + {OperatorWord, groups[3]}, + {Punctuation, groups[4]}, + {Text, groups[5]}, + {Keyword, groups[6]}, + {Text, groups[7]}, + } + code := strings.Join(groups[8:], "") + lexer := internal.Get(groups[6]) + if lexer == nil { + tokens = append(tokens, Token{String, code}) + iterators = append(iterators, Literator(tokens...)) + } else { + sub, err := lexer.Tokenise(nil, code) + if err != nil { + panic(err) + } + iterators = append(iterators, Literator(tokens...), sub) + } + return Concaterator(iterators...) +} diff --git a/vendor/github.com/alecthomas/chroma/lexers/r/ruby.go b/vendor/github.com/alecthomas/chroma/lexers/r/ruby.go new file mode 100644 index 000000000..296cae9c0 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/r/ruby.go @@ -0,0 +1,254 @@ +package r + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// Ruby lexer. +var Ruby = internal.Register(MustNewLazyLexer( + &Config{ + Name: "Ruby", + Aliases: []string{"rb", "ruby", "duby"}, + Filenames: []string{"*.rb", "*.rbw", "Rakefile", "*.rake", "*.gemspec", "*.rbx", "*.duby", "Gemfile"}, + MimeTypes: []string{"text/x-ruby", "application/x-ruby"}, + DotAll: true, + }, + rubyRules, +)) + +func rubyRules() Rules { + return Rules{ + "root": { + {`\A#!.+?$`, CommentHashbang, nil}, + {`#.*?$`, CommentSingle, nil}, + {`=begin\s.*?\n=end.*?$`, CommentMultiline, nil}, + {Words(``, `\b`, `BEGIN`, `END`, `alias`, `begin`, `break`, `case`, `defined?`, `do`, `else`, `elsif`, `end`, `ensure`, `for`, `if`, `in`, `next`, `redo`, `rescue`, `raise`, `retry`, `return`, `super`, `then`, `undef`, `unless`, `until`, `when`, `while`, `yield`), Keyword, nil}, + {`(module)(\s+)([a-zA-Z_]\w*(?:::[a-zA-Z_]\w*)*)`, ByGroups(Keyword, Text, NameNamespace), nil}, + {`(def)(\s+)`, ByGroups(Keyword, Text), Push("funcname")}, + {"def(?=[*%&^`~+-/\\[<>=])", Keyword, Push("funcname")}, + {`(class)(\s+)`, ByGroups(Keyword, Text), Push("classname")}, + {Words(``, `\b`, `initialize`, `new`, `loop`, `include`, `extend`, `raise`, `attr_reader`, `attr_writer`, `attr_accessor`, `attr`, `catch`, `throw`, `private`, `module_function`, `public`, `protected`, `true`, `false`, `nil`), KeywordPseudo, nil}, + {`(not|and|or)\b`, OperatorWord, nil}, + {Words(``, `\?`, `autoload`, `block_given`, `const_defined`, `eql`, `equal`, `frozen`, `include`, `instance_of`, `is_a`, `iterator`, `kind_of`, `method_defined`, `nil`, `private_method_defined`, `protected_method_defined`, `public_method_defined`, `respond_to`, `tainted`), NameBuiltin, nil}, + {`(chomp|chop|exit|gsub|sub)!`, NameBuiltin, nil}, + {Words(`(?~!:])|(?<=(?:\s|;)when\s)|(?<=(?:\s|;)or\s)|(?<=(?:\s|;)and\s)|(?<=\.index\s)|(?<=\.scan\s)|(?<=\.sub\s)|(?<=\.sub!\s)|(?<=\.gsub\s)|(?<=\.gsub!\s)|(?<=\.match\s)|(?<=(?:\s|;)if\s)|(?<=(?:\s|;)elsif\s)|(?<=^when\s)|(?<=^index\s)|(?<=^scan\s)|(?<=^sub\s)|(?<=^gsub\s)|(?<=^sub!\s)|(?<=^gsub!\s)|(?<=^match\s)|(?<=^if\s)|(?<=^elsif\s))(\s*)(/)`, ByGroups(Text, LiteralStringRegex), Push("multiline-regex")}, + {`(?<=\(|,|\[)/`, LiteralStringRegex, Push("multiline-regex")}, + {`(\s+)(/)(?![\s=])`, ByGroups(Text, LiteralStringRegex), Push("multiline-regex")}, + {`(0_?[0-7]+(?:_[0-7]+)*)(\s*)([/?])?`, ByGroups(LiteralNumberOct, Text, Operator), nil}, + {`(0x[0-9A-Fa-f]+(?:_[0-9A-Fa-f]+)*)(\s*)([/?])?`, ByGroups(LiteralNumberHex, Text, Operator), nil}, + {`(0b[01]+(?:_[01]+)*)(\s*)([/?])?`, ByGroups(LiteralNumberBin, Text, Operator), nil}, + {`([\d]+(?:[_e]\d+)*)(\s*)([/?])?`, ByGroups(LiteralNumberInteger, Text, Operator), nil}, + {`@@[a-zA-Z_]\w*`, NameVariableClass, nil}, + {`@[a-zA-Z_]\w*`, NameVariableInstance, nil}, + {`\$\w+`, NameVariableGlobal, nil}, + {"\\$[!@&`\\'+~=/\\\\,;.<>_*$?:\"^-]", NameVariableGlobal, nil}, + {`\$-[0adFiIlpvw]`, NameVariableGlobal, nil}, + {`::`, Operator, nil}, + Include("strings"), + {`\?(\\[MC]-)*(\\([\\abefnrstv#"\']|x[a-fA-F0-9]{1,2}|[0-7]{1,3})|\S)(?!\w)`, LiteralStringChar, nil}, + {`[A-Z]\w+`, NameConstant, nil}, + {Words(`(\.|::)`, ``, `*`, `**`, `-`, `+`, `-@`, `+@`, `/`, `%`, `&`, `|`, `^`, "`", `~`, `[]`, `[]=`, `<<`, `>>`, `<`, `<>`, `<=>`, `>`, `>=`, `==`, `===`), ByGroups(Operator, NameOperator), nil}, + {"(\\.|::)([a-zA-Z_]\\w*[!?]?|[*%&^`~+\\-/\\[<>=])", ByGroups(Operator, Name), nil}, + {`[a-zA-Z_]\w*[!?]?`, Name, nil}, + {`(\[|\]|\*\*|<>?|>=|<=|<=>|=~|={3}|!~|&&?|\|\||\.{1,3})`, Operator, nil}, + {`[-+/*%=<>&!^|~]=?`, Operator, nil}, + {`[(){};,/?:\\]`, Punctuation, nil}, + {`\s+`, Text, nil}, + }, + "funcname": { + {`\(`, Punctuation, Push("defexpr")}, + {"(?:([a-zA-Z_]\\w*)(\\.))?([a-zA-Z_]\\w*[!?]?|\\*\\*?|[-+]@?|[/%&|^`~]|\\[\\]=?|<<|>>|<=?>|>=?|===?)", ByGroups(NameClass, Operator, NameFunction), Pop(1)}, + Default(Pop(1)), + }, + "classname": { + {`\(`, Punctuation, Push("defexpr")}, + {`<<`, Operator, Pop(1)}, + {`[A-Z_]\w*`, NameClass, Pop(1)}, + Default(Pop(1)), + }, + "defexpr": { + {`(\))(\.|::)?`, ByGroups(Punctuation, Operator), Pop(1)}, + {`\(`, Operator, Push()}, + Include("root"), + }, + "in-intp": { + {`\{`, LiteralStringInterpol, Push()}, + {`\}`, LiteralStringInterpol, Pop(1)}, + Include("root"), + }, + "string-intp": { + {`#\{`, LiteralStringInterpol, Push("in-intp")}, + {`#@@?[a-zA-Z_]\w*`, LiteralStringInterpol, nil}, + {`#\$[a-zA-Z_]\w*`, LiteralStringInterpol, nil}, + }, + "string-intp-escaped": { + Include("string-intp"), + {`\\([\\abefnrstv#"\']|x[a-fA-F0-9]{1,2}|[0-7]{1,3})`, LiteralStringEscape, nil}, + }, + "interpolated-regex": { + Include("string-intp"), + {`[\\#]`, LiteralStringRegex, nil}, + {`[^\\#]+`, LiteralStringRegex, nil}, + }, + "interpolated-string": { + Include("string-intp"), + {`[\\#]`, LiteralStringOther, nil}, + {`[^\\#]+`, LiteralStringOther, nil}, + }, + "multiline-regex": { + Include("string-intp"), + {`\\\\`, LiteralStringRegex, nil}, + {`\\/`, LiteralStringRegex, nil}, + {`[\\#]`, LiteralStringRegex, nil}, + {`[^\\/#]+`, LiteralStringRegex, nil}, + {`/[mixounse]*`, LiteralStringRegex, Pop(1)}, + }, + "end-part": { + {`.+`, CommentPreproc, Pop(1)}, + }, + "strings": { + {`\:@{0,2}[a-zA-Z_]\w*[!?]?`, LiteralStringSymbol, nil}, + {Words(`\:@{0,2}`, ``, `*`, `**`, `-`, `+`, `-@`, `+@`, `/`, `%`, `&`, `|`, `^`, "`", `~`, `[]`, `[]=`, `<<`, `>>`, `<`, `<>`, `<=>`, `>`, `>=`, `==`, `===`), LiteralStringSymbol, nil}, + {`:'(\\\\|\\'|[^'])*'`, LiteralStringSymbol, nil}, + {`'(\\\\|\\'|[^'])*'`, LiteralStringSingle, nil}, + {`:"`, LiteralStringSymbol, Push("simple-sym")}, + {`([a-zA-Z_]\w*)(:)(?!:)`, ByGroups(LiteralStringSymbol, Punctuation), nil}, + {`"`, LiteralStringDouble, Push("simple-string")}, + {"(?&!^|~,(])(\s*)(%([\t ])(?:(?:\\\3|(?!\3).)*)\3)`, ByGroups(Text, LiteralStringOther, None), nil}, + {`^(\s*)(%([\t ])(?:(?:\\\3|(?!\3).)*)\3)`, ByGroups(Text, LiteralStringOther, None), nil}, + {`(%([^a-zA-Z0-9\s]))((?:\\\2|(?!\2).)*)(\2)`, String, nil}, + }, + "simple-string": { + Include("string-intp-escaped"), + {`[^\\"#]+`, LiteralStringDouble, nil}, + {`[\\#]`, LiteralStringDouble, nil}, + {`"`, LiteralStringDouble, Pop(1)}, + }, + "simple-sym": { + Include("string-intp-escaped"), + {`[^\\"#]+`, LiteralStringSymbol, nil}, + {`[\\#]`, LiteralStringSymbol, nil}, + {`"`, LiteralStringSymbol, Pop(1)}, + }, + "simple-backtick": { + Include("string-intp-escaped"), + {"[^\\\\`#]+", LiteralStringBacktick, nil}, + {`[\\#]`, LiteralStringBacktick, nil}, + {"`", LiteralStringBacktick, Pop(1)}, + }, + "cb-intp-string": { + {`\\[\\{}]`, LiteralStringOther, nil}, + {`\{`, LiteralStringOther, Push()}, + {`\}`, LiteralStringOther, Pop(1)}, + Include("string-intp-escaped"), + {`[\\#{}]`, LiteralStringOther, nil}, + {`[^\\#{}]+`, LiteralStringOther, nil}, + }, + "cb-string": { + {`\\[\\{}]`, LiteralStringOther, nil}, + {`\{`, LiteralStringOther, Push()}, + {`\}`, LiteralStringOther, Pop(1)}, + {`[\\#{}]`, LiteralStringOther, nil}, + {`[^\\#{}]+`, LiteralStringOther, nil}, + }, + "cb-regex": { + {`\\[\\{}]`, LiteralStringRegex, nil}, + {`\{`, LiteralStringRegex, Push()}, + {`\}[mixounse]*`, LiteralStringRegex, Pop(1)}, + Include("string-intp"), + {`[\\#{}]`, LiteralStringRegex, nil}, + {`[^\\#{}]+`, LiteralStringRegex, nil}, + }, + "sb-intp-string": { + {`\\[\\\[\]]`, LiteralStringOther, nil}, + {`\[`, LiteralStringOther, Push()}, + {`\]`, LiteralStringOther, Pop(1)}, + Include("string-intp-escaped"), + {`[\\#\[\]]`, LiteralStringOther, nil}, + {`[^\\#\[\]]+`, LiteralStringOther, nil}, + }, + "sb-string": { + {`\\[\\\[\]]`, LiteralStringOther, nil}, + {`\[`, LiteralStringOther, Push()}, + {`\]`, LiteralStringOther, Pop(1)}, + {`[\\#\[\]]`, LiteralStringOther, nil}, + {`[^\\#\[\]]+`, LiteralStringOther, nil}, + }, + "sb-regex": { + {`\\[\\\[\]]`, LiteralStringRegex, nil}, + {`\[`, LiteralStringRegex, Push()}, + {`\][mixounse]*`, LiteralStringRegex, Pop(1)}, + Include("string-intp"), + {`[\\#\[\]]`, LiteralStringRegex, nil}, + {`[^\\#\[\]]+`, LiteralStringRegex, nil}, + }, + "pa-intp-string": { + {`\\[\\()]`, LiteralStringOther, nil}, + {`\(`, LiteralStringOther, Push()}, + {`\)`, LiteralStringOther, Pop(1)}, + Include("string-intp-escaped"), + {`[\\#()]`, LiteralStringOther, nil}, + {`[^\\#()]+`, LiteralStringOther, nil}, + }, + "pa-string": { + {`\\[\\()]`, LiteralStringOther, nil}, + {`\(`, LiteralStringOther, Push()}, + {`\)`, LiteralStringOther, Pop(1)}, + {`[\\#()]`, LiteralStringOther, nil}, + {`[^\\#()]+`, LiteralStringOther, nil}, + }, + "pa-regex": { + {`\\[\\()]`, LiteralStringRegex, nil}, + {`\(`, LiteralStringRegex, Push()}, + {`\)[mixounse]*`, LiteralStringRegex, Pop(1)}, + Include("string-intp"), + {`[\\#()]`, LiteralStringRegex, nil}, + {`[^\\#()]+`, LiteralStringRegex, nil}, + }, + "ab-intp-string": { + {`\\[\\<>]`, LiteralStringOther, nil}, + {`<`, LiteralStringOther, Push()}, + {`>`, LiteralStringOther, Pop(1)}, + Include("string-intp-escaped"), + {`[\\#<>]`, LiteralStringOther, nil}, + {`[^\\#<>]+`, LiteralStringOther, nil}, + }, + "ab-string": { + {`\\[\\<>]`, LiteralStringOther, nil}, + {`<`, LiteralStringOther, Push()}, + {`>`, LiteralStringOther, Pop(1)}, + {`[\\#<>]`, LiteralStringOther, nil}, + {`[^\\#<>]+`, LiteralStringOther, nil}, + }, + "ab-regex": { + {`\\[\\<>]`, LiteralStringRegex, nil}, + {`<`, LiteralStringRegex, Push()}, + {`>[mixounse]*`, LiteralStringRegex, Pop(1)}, + Include("string-intp"), + {`[\\#<>]`, LiteralStringRegex, nil}, + {`[^\\#<>]+`, LiteralStringRegex, nil}, + }, + } +} diff --git a/vendor/github.com/alecthomas/chroma/lexers/r/rust.go b/vendor/github.com/alecthomas/chroma/lexers/r/rust.go new file mode 100644 index 000000000..5399c9d7b --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/r/rust.go @@ -0,0 +1,133 @@ +package r + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// Rust lexer. +var Rust = internal.Register(MustNewLazyLexer( + &Config{ + Name: "Rust", + Aliases: []string{"rust", "rs"}, + Filenames: []string{"*.rs", "*.rs.in"}, + MimeTypes: []string{"text/rust", "text/x-rust"}, + EnsureNL: true, + }, + rustRules, +)) + +func rustRules() Rules { + return Rules{ + "root": { + {`#![^[\r\n].*$`, CommentPreproc, nil}, + Default(Push("base")), + }, + "base": { + {`\n`, TextWhitespace, nil}, + {`\s+`, TextWhitespace, nil}, + {`//!.*?\n`, LiteralStringDoc, nil}, + {`///(\n|[^/].*?\n)`, LiteralStringDoc, nil}, + {`//(.*?)\n`, CommentSingle, nil}, + {`/\*\*(\n|[^/*])`, LiteralStringDoc, Push("doccomment")}, + {`/\*!`, LiteralStringDoc, Push("doccomment")}, + {`/\*`, CommentMultiline, Push("comment")}, + {`r#*"(?:\\.|[^\\;])*"#*`, LiteralString, nil}, + {`"(?:\\.|[^\\"])*"`, LiteralString, nil}, + {`\$([a-zA-Z_]\w*|\(,?|\),?|,?)`, CommentPreproc, nil}, + {Words(``, `\b`, `as`, `async`, `await`, `box`, `const`, `crate`, `dyn`, `else`, `extern`, `for`, `if`, `impl`, `in`, `loop`, `match`, `move`, `mut`, `pub`, `ref`, `return`, `static`, `super`, `trait`, `unsafe`, `use`, `where`, `while`), Keyword, nil}, + {Words(``, `\b`, `abstract`, `become`, `do`, `final`, `macro`, `override`, `priv`, `typeof`, `try`, `unsized`, `virtual`, `yield`), KeywordReserved, nil}, + {`(true|false)\b`, KeywordConstant, nil}, + {`self\b`, NameBuiltinPseudo, nil}, + {`mod\b`, Keyword, Push("modname")}, + {`let\b`, KeywordDeclaration, nil}, + {`fn\b`, Keyword, Push("funcname")}, + {`(struct|enum|type|union)\b`, Keyword, Push("typename")}, + {`(default)(\s+)(type|fn)\b`, ByGroups(Keyword, Text, Keyword), nil}, + {Words(``, `\b`, `u8`, `u16`, `u32`, `u64`, `u128`, `i8`, `i16`, `i32`, `i64`, `i128`, `usize`, `isize`, `f32`, `f64`, `char`, `str`, `bool`), KeywordType, nil}, + {`[sS]elf\b`, NameBuiltinPseudo, nil}, + {Words(``, `\b`, `Copy`, `Send`, `Sized`, `Sync`, `Unpin`, `Drop`, `Fn`, `FnMut`, `FnOnce`, `drop`, `Box`, `ToOwned`, `Clone`, `PartialEq`, `PartialOrd`, `Eq`, `Ord`, `AsRef`, `AsMut`, `Into`, `From`, `Default`, `Iterator`, `Extend`, `IntoIterator`, `DoubleEndedIterator`, `ExactSizeIterator`, `Option`, `Some`, `None`, `Result`, `Ok`, `Err`, `String`, `ToString`, `Vec`), NameBuiltin, nil}, + {Words(``, `!`, `asm`, `assert`, `assert_eq`, `assert_ne`, `cfg`, `column`, `compile_error`, `concat`, `concat_idents`, `dbg`, `debug_assert`, `debug_assert_eq`, `debug_assert_ne`, `env`, `eprint`, `eprintln`, `file`, `format`, `format_args`, `format_args_nl`, `global_asm`, `include`, `include_bytes`, `include_str`, `is_aarch64_feature_detected`, `is_arm_feature_detected`, `is_mips64_feature_detected`, `is_mips_feature_detected`, `is_powerpc64_feature_detected`, `is_powerpc_feature_detected`, `is_x86_feature_detected`, `line`, `llvm_asm`, `log_syntax`, `macro_rules`, `matches`, `module_path`, `option_env`, `panic`, `print`, `println`, `stringify`, `thread_local`, `todo`, `trace_macros`, `unimplemented`, `unreachable`, `vec`, `write`, `writeln`), NameFunctionMagic, nil}, + {`::\b`, Text, nil}, + {`(?::|->)`, Text, Push("typename")}, + {`(break|continue)(\b\s*)(\'[A-Za-z_]\w*)?`, ByGroups(Keyword, TextWhitespace, NameLabel), nil}, + {`'(\\['"\\nrt]|\\x[0-7][0-9a-fA-F]|\\0|\\u\{[0-9a-fA-F]{1,6}\}|.)'`, LiteralStringChar, nil}, + {`b'(\\['"\\nrt]|\\x[0-9a-fA-F]{2}|\\0|\\u\{[0-9a-fA-F]{1,6}\}|.)'`, LiteralStringChar, nil}, + {`0b[01_]+`, LiteralNumberBin, Push("number_lit")}, + {`0o[0-7_]+`, LiteralNumberOct, Push("number_lit")}, + {`0[xX][0-9a-fA-F_]+`, LiteralNumberHex, Push("number_lit")}, + {`[0-9][0-9_]*(\.[0-9_]+[eE][+\-]?[0-9_]+|\.[0-9_]*(?!\.)|[eE][+\-]?[0-9_]+)`, LiteralNumberFloat, Push("number_lit")}, + {`[0-9][0-9_]*`, LiteralNumberInteger, Push("number_lit")}, + {`b"`, LiteralString, Push("bytestring")}, + {`(?s)b?r(#*)".*?"\1`, LiteralString, nil}, + {`'`, Operator, Push("lifetime")}, + {`\.\.=?`, Operator, nil}, + {`[{}()\[\],.;]`, Punctuation, nil}, + {`[+\-*/%&|<>^!~@=:?]`, Operator, nil}, + {`(r#)?[a-zA-Z_]\w*`, Name, nil}, + {`r#[a-zA-Z_]\w*`, Name, nil}, + {`#!?\[`, CommentPreproc, Push("attribute[")}, + {`#`, Text, nil}, + }, + "comment": { + {`[^*/]+`, CommentMultiline, nil}, + {`/\*`, CommentMultiline, Push()}, + {`\*/`, CommentMultiline, Pop(1)}, + {`[*/]`, CommentMultiline, nil}, + }, + "doccomment": { + {`[^*/]+`, LiteralStringDoc, nil}, + {`/\*`, LiteralStringDoc, Push()}, + {`\*/`, LiteralStringDoc, Pop(1)}, + {`[*/]`, LiteralStringDoc, nil}, + }, + "modname": { + {`\s+`, Text, nil}, + {`[a-zA-Z_]\w*`, NameNamespace, Pop(1)}, + Default(Pop(1)), + }, + "funcname": { + {`\s+`, Text, nil}, + {`[a-zA-Z_]\w*`, NameFunction, Pop(1)}, + Default(Pop(1)), + }, + "typename": { + {`\s+`, Text, nil}, + {`&`, KeywordPseudo, nil}, + {`'`, Operator, Push("lifetime")}, + {Words(``, `\b`, `Copy`, `Send`, `Sized`, `Sync`, `Unpin`, `Drop`, `Fn`, `FnMut`, `FnOnce`, `drop`, `Box`, `ToOwned`, `Clone`, `PartialEq`, `PartialOrd`, `Eq`, `Ord`, `AsRef`, `AsMut`, `Into`, `From`, `Default`, `Iterator`, `Extend`, `IntoIterator`, `DoubleEndedIterator`, `ExactSizeIterator`, `Option`, `Some`, `None`, `Result`, `Ok`, `Err`, `String`, `ToString`, `Vec`), NameBuiltin, nil}, + {Words(``, `\b`, `u8`, `u16`, `u32`, `u64`, `u128`, `i8`, `i16`, `i32`, `i64`, `i128`, `usize`, `isize`, `f32`, `f64`, `char`, `str`, `bool`), KeywordType, nil}, + {`[a-zA-Z_]\w*`, NameClass, Pop(1)}, + Default(Pop(1)), + }, + "lifetime": { + {`(static|_)`, NameBuiltin, nil}, + {`[a-zA-Z_]+\w*`, NameAttribute, nil}, + Default(Pop(1)), + }, + "number_lit": { + {`[ui](8|16|32|64|size)`, Keyword, Pop(1)}, + {`f(32|64)`, Keyword, Pop(1)}, + Default(Pop(1)), + }, + "string": { + {`"`, LiteralString, Pop(1)}, + {`\\['"\\nrt]|\\x[0-7][0-9a-fA-F]|\\0|\\u\{[0-9a-fA-F]{1,6}\}`, LiteralStringEscape, nil}, + {`[^\\"]+`, LiteralString, nil}, + {`\\`, LiteralString, nil}, + }, + "bytestring": { + {`\\x[89a-fA-F][0-9a-fA-F]`, LiteralStringEscape, nil}, + Include("string"), + }, + "attribute_common": { + {`"`, LiteralString, Push("string")}, + {`\[`, CommentPreproc, Push("attribute[")}, + }, + "attribute[": { + Include("attribute_common"), + {`\]`, CommentPreproc, Pop(1)}, + {`[^"\]\[]+`, CommentPreproc, nil}, + }, + } +} diff --git a/vendor/github.com/alecthomas/chroma/lexers/s/sas.go b/vendor/github.com/alecthomas/chroma/lexers/s/sas.go new file mode 100644 index 000000000..016a91bbd --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/s/sas.go @@ -0,0 +1,98 @@ +package s + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// Sas lexer. +var Sas = internal.Register(MustNewLazyLexer( + &Config{ + Name: "SAS", + Aliases: []string{"sas"}, + Filenames: []string{"*.SAS", "*.sas"}, + MimeTypes: []string{"text/x-sas", "text/sas", "application/x-sas"}, + CaseInsensitive: true, + }, + sasRules, +)) + +func sasRules() Rules { + return Rules{ + "root": { + Include("comments"), + Include("proc-data"), + Include("cards-datalines"), + Include("logs"), + Include("general"), + {`.`, Text, nil}, + {`\\\n`, Text, nil}, + {`\n`, Text, nil}, + }, + "comments": { + {`^\s*\*.*?;`, Comment, nil}, + {`/\*.*?\*/`, Comment, nil}, + {`^\s*\*(.|\n)*?;`, CommentMultiline, nil}, + {`/[*](.|\n)*?[*]/`, CommentMultiline, nil}, + }, + "proc-data": { + {`(^|;)\s*(proc \w+|data|run|quit)[\s;]`, KeywordReserved, nil}, + }, + "cards-datalines": { + {`^\s*(datalines|cards)\s*;\s*$`, Keyword, Push("data")}, + }, + "data": { + {`(.|\n)*^\s*;\s*$`, Other, Pop(1)}, + }, + "logs": { + {`\n?^\s*%?put `, Keyword, Push("log-messages")}, + }, + "log-messages": { + {`NOTE(:|-).*`, Generic, Pop(1)}, + {`WARNING(:|-).*`, GenericEmph, Pop(1)}, + {`ERROR(:|-).*`, GenericError, Pop(1)}, + Include("general"), + }, + "general": { + Include("keywords"), + Include("vars-strings"), + Include("special"), + Include("numbers"), + }, + "keywords": { + {Words(`\b`, `\b`, `abort`, `array`, `attrib`, `by`, `call`, `cards`, `cards4`, `catname`, `continue`, `datalines`, `datalines4`, `delete`, `delim`, `delimiter`, `display`, `dm`, `drop`, `endsas`, `error`, `file`, `filename`, `footnote`, `format`, `goto`, `in`, `infile`, `informat`, `input`, `keep`, `label`, `leave`, `length`, `libname`, `link`, `list`, `lostcard`, `merge`, `missing`, `modify`, `options`, `output`, `out`, `page`, `put`, `redirect`, `remove`, `rename`, `replace`, `retain`, `return`, `select`, `set`, `skip`, `startsas`, `stop`, `title`, `update`, `waitsas`, `where`, `window`, `x`, `systask`), Keyword, nil}, + {Words(`\b`, `\b`, `add`, `and`, `alter`, `as`, `cascade`, `check`, `create`, `delete`, `describe`, `distinct`, `drop`, `foreign`, `from`, `group`, `having`, `index`, `insert`, `into`, `in`, `key`, `like`, `message`, `modify`, `msgtype`, `not`, `null`, `on`, `or`, `order`, `primary`, `references`, `reset`, `restrict`, `select`, `set`, `table`, `unique`, `update`, `validate`, `view`, `where`), Keyword, nil}, + {Words(`\b`, `\b`, `do`, `if`, `then`, `else`, `end`, `until`, `while`), Keyword, nil}, + {Words(`%`, `\b`, `bquote`, `nrbquote`, `cmpres`, `qcmpres`, `compstor`, `datatyp`, `display`, `do`, `else`, `end`, `eval`, `global`, `goto`, `if`, `index`, `input`, `keydef`, `label`, `left`, `length`, `let`, `local`, `lowcase`, `macro`, `mend`, `nrquote`, `nrstr`, `put`, `qleft`, `qlowcase`, `qscan`, `qsubstr`, `qsysfunc`, `qtrim`, `quote`, `qupcase`, `scan`, `str`, `substr`, `superq`, `syscall`, `sysevalf`, `sysexec`, `sysfunc`, `sysget`, `syslput`, `sysprod`, `sysrc`, `sysrput`, `then`, `to`, `trim`, `unquote`, `until`, `upcase`, `verify`, `while`, `window`), NameBuiltin, nil}, + {Words(`\b`, `\(`, `abs`, `addr`, `airy`, `arcos`, `arsin`, `atan`, `attrc`, `attrn`, `band`, `betainv`, `blshift`, `bnot`, `bor`, `brshift`, `bxor`, `byte`, `cdf`, `ceil`, `cexist`, `cinv`, `close`, `cnonct`, `collate`, `compbl`, `compound`, `compress`, `cos`, `cosh`, `css`, `curobs`, `cv`, `daccdb`, `daccdbsl`, `daccsl`, `daccsyd`, `dacctab`, `dairy`, `date`, `datejul`, `datepart`, `datetime`, `day`, `dclose`, `depdb`, `depdbsl`, `depsl`, `depsyd`, `deptab`, `dequote`, `dhms`, `dif`, `digamma`, `dim`, `dinfo`, `dnum`, `dopen`, `doptname`, `doptnum`, `dread`, `dropnote`, `dsname`, `erf`, `erfc`, `exist`, `exp`, `fappend`, `fclose`, `fcol`, `fdelete`, `fetch`, `fetchobs`, `fexist`, `fget`, `fileexist`, `filename`, `fileref`, `finfo`, `finv`, `fipname`, `fipnamel`, `fipstate`, `floor`, `fnonct`, `fnote`, `fopen`, `foptname`, `foptnum`, `fpoint`, `fpos`, `fput`, `fread`, `frewind`, `frlen`, `fsep`, `fuzz`, `fwrite`, `gaminv`, `gamma`, `getoption`, `getvarc`, `getvarn`, `hbound`, `hms`, `hosthelp`, `hour`, `ibessel`, `index`, `indexc`, `indexw`, `input`, `inputc`, `inputn`, `int`, `intck`, `intnx`, `intrr`, `irr`, `jbessel`, `juldate`, `kurtosis`, `lag`, `lbound`, `left`, `length`, `lgamma`, `libname`, `libref`, `log`, `log10`, `log2`, `logpdf`, `logpmf`, `logsdf`, `lowcase`, `max`, `mdy`, `mean`, `min`, `minute`, `mod`, `month`, `mopen`, `mort`, `n`, `netpv`, `nmiss`, `normal`, `note`, `npv`, `open`, `ordinal`, `pathname`, `pdf`, `peek`, `peekc`, `pmf`, `point`, `poisson`, `poke`, `probbeta`, `probbnml`, `probchi`, `probf`, `probgam`, `probhypr`, `probit`, `probnegb`, `probnorm`, `probt`, `put`, `putc`, `putn`, `qtr`, `quote`, `ranbin`, `rancau`, `ranexp`, `rangam`, `range`, `rank`, `rannor`, `ranpoi`, `rantbl`, `rantri`, `ranuni`, `repeat`, `resolve`, `reverse`, `rewind`, `right`, `round`, `saving`, `scan`, `sdf`, `second`, `sign`, `sin`, `sinh`, `skewness`, `soundex`, `spedis`, `sqrt`, `std`, `stderr`, `stfips`, `stname`, `stnamel`, `substr`, `sum`, `symget`, `sysget`, `sysmsg`, `sysprod`, `sysrc`, `system`, `tan`, `tanh`, `time`, `timepart`, `tinv`, `tnonct`, `today`, `translate`, `tranwrd`, `trigamma`, `trim`, `trimn`, `trunc`, `uniform`, `upcase`, `uss`, `var`, `varfmt`, `varinfmt`, `varlabel`, `varlen`, `varname`, `varnum`, `varray`, `varrayx`, `vartype`, `verify`, `vformat`, `vformatd`, `vformatdx`, `vformatn`, `vformatnx`, `vformatw`, `vformatwx`, `vformatx`, `vinarray`, `vinarrayx`, `vinformat`, `vinformatd`, `vinformatdx`, `vinformatn`, `vinformatnx`, `vinformatw`, `vinformatwx`, `vinformatx`, `vlabel`, `vlabelx`, `vlength`, `vlengthx`, `vname`, `vnamex`, `vtype`, `vtypex`, `weekday`, `year`, `yyq`, `zipfips`, `zipname`, `zipnamel`, `zipstate`), NameBuiltin, nil}, + }, + "vars-strings": { + {`&[a-z_]\w{0,31}\.?`, NameVariable, nil}, + {`%[a-z_]\w{0,31}`, NameFunction, nil}, + {`\'`, LiteralString, Push("string_squote")}, + {`"`, LiteralString, Push("string_dquote")}, + }, + "string_squote": { + {`'`, LiteralString, Pop(1)}, + {`\\\\|\\"|\\\n`, LiteralStringEscape, nil}, + {`[^$\'\\]+`, LiteralString, nil}, + {`[$\'\\]`, LiteralString, nil}, + }, + "string_dquote": { + {`"`, LiteralString, Pop(1)}, + {`\\\\|\\"|\\\n`, LiteralStringEscape, nil}, + {`&`, NameVariable, Push("validvar")}, + {`[^$&"\\]+`, LiteralString, nil}, + {`[$"\\]`, LiteralString, nil}, + }, + "validvar": { + {`[a-z_]\w{0,31}\.?`, NameVariable, Pop(1)}, + }, + "numbers": { + {`\b[+-]?([0-9]+(\.[0-9]+)?|\.[0-9]+|\.)(E[+-]?[0-9]+)?i?\b`, LiteralNumber, nil}, + }, + "special": { + {`(null|missing|_all_|_automatic_|_character_|_n_|_infile_|_name_|_null_|_numeric_|_user_|_webout_)`, KeywordConstant, nil}, + }, + } +} diff --git a/vendor/github.com/alecthomas/chroma/lexers/s/sass.go b/vendor/github.com/alecthomas/chroma/lexers/s/sass.go new file mode 100644 index 000000000..1b7337798 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/s/sass.go @@ -0,0 +1,148 @@ +package s + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// Sass lexer. +var Sass = internal.Register(MustNewLazyLexer( + &Config{ + Name: "Sass", + Aliases: []string{"sass"}, + Filenames: []string{"*.sass"}, + MimeTypes: []string{"text/x-sass"}, + CaseInsensitive: true, + }, + sassRules, +)) + +func sassRules() Rules { + return Rules{ + // "root": { + // }, + "root": { + {`[ \t]*\n`, Text, nil}, + // { `[ \t]*`, ?? ??, nil }, + // { `//[^\n]*`, ?? .callback at 0x106936048> ??, Push("root") }, + // { `/\*[^\n]*`, ?? .callback at 0x1069360d0> ??, Push("root") }, + {`@import`, Keyword, Push("import")}, + {`@for`, Keyword, Push("for")}, + {`@(debug|warn|if|while)`, Keyword, Push("value")}, + {`(@mixin)( [\w-]+)`, ByGroups(Keyword, NameFunction), Push("value")}, + {`(@include)( [\w-]+)`, ByGroups(Keyword, NameDecorator), Push("value")}, + {`@extend`, Keyword, Push("selector")}, + {`@[\w-]+`, Keyword, Push("selector")}, + {`=[\w-]+`, NameFunction, Push("value")}, + {`\+[\w-]+`, NameDecorator, Push("value")}, + {`([!$][\w-]\w*)([ \t]*(?:(?:\|\|)?=|:))`, ByGroups(NameVariable, Operator), Push("value")}, + {`:`, NameAttribute, Push("old-style-attr")}, + {`(?=.+?[=:]([^a-z]|$))`, NameAttribute, Push("new-style-attr")}, + Default(Push("selector")), + }, + "single-comment": { + {`.+`, CommentSingle, nil}, + {`\n`, Text, Push("root")}, + }, + "multi-comment": { + {`.+`, CommentMultiline, nil}, + {`\n`, Text, Push("root")}, + }, + "import": { + {`[ \t]+`, Text, nil}, + {`\S+`, LiteralString, nil}, + {`\n`, Text, Push("root")}, + }, + "old-style-attr": { + {`[^\s:="\[]+`, NameAttribute, nil}, + {`#\{`, LiteralStringInterpol, Push("interpolation")}, + {`[ \t]*=`, Operator, Push("value")}, + Default(Push("value")), + }, + "new-style-attr": { + {`[^\s:="\[]+`, NameAttribute, nil}, + {`#\{`, LiteralStringInterpol, Push("interpolation")}, + {`[ \t]*[=:]`, Operator, Push("value")}, + }, + "inline-comment": { + {`(\\#|#(?=[^\n{])|\*(?=[^\n/])|[^\n#*])+`, CommentMultiline, nil}, + {`#\{`, LiteralStringInterpol, Push("interpolation")}, + {`\*/`, Comment, Pop(1)}, + }, + "value": { + {`[ \t]+`, Text, nil}, + {`[!$][\w-]+`, NameVariable, nil}, + {`url\(`, LiteralStringOther, Push("string-url")}, + {`[a-z_-][\w-]*(?=\()`, NameFunction, nil}, + {Words(``, `\b`, `align-content`, `align-items`, `align-self`, `alignment-baseline`, `all`, `animation`, `animation-delay`, `animation-direction`, `animation-duration`, `animation-fill-mode`, `animation-iteration-count`, `animation-name`, `animation-play-state`, `animation-timing-function`, `appearance`, `azimuth`, `backface-visibility`, `background`, `background-attachment`, `background-blend-mode`, `background-clip`, `background-color`, `background-image`, `background-origin`, `background-position`, `background-repeat`, `background-size`, `baseline-shift`, `bookmark-label`, `bookmark-level`, `bookmark-state`, `border`, `border-bottom`, `border-bottom-color`, `border-bottom-left-radius`, `border-bottom-right-radius`, `border-bottom-style`, `border-bottom-width`, `border-boundary`, `border-collapse`, `border-color`, `border-image`, `border-image-outset`, `border-image-repeat`, `border-image-slice`, `border-image-source`, `border-image-width`, `border-left`, `border-left-color`, `border-left-style`, `border-left-width`, `border-radius`, `border-right`, `border-right-color`, `border-right-style`, `border-right-width`, `border-spacing`, `border-style`, `border-top`, `border-top-color`, `border-top-left-radius`, `border-top-right-radius`, `border-top-style`, `border-top-width`, `border-width`, `bottom`, `box-decoration-break`, `box-shadow`, `box-sizing`, `box-snap`, `box-suppress`, `break-after`, `break-before`, `break-inside`, `caption-side`, `caret`, `caret-animation`, `caret-color`, `caret-shape`, `chains`, `clear`, `clip`, `clip-path`, `clip-rule`, `color`, `color-interpolation-filters`, `column-count`, `column-fill`, `column-gap`, `column-rule`, `column-rule-color`, `column-rule-style`, `column-rule-width`, `column-span`, `column-width`, `columns`, `content`, `counter-increment`, `counter-reset`, `counter-set`, `crop`, `cue`, `cue-after`, `cue-before`, `cursor`, `direction`, `display`, `dominant-baseline`, `elevation`, `empty-cells`, `filter`, `flex`, `flex-basis`, `flex-direction`, `flex-flow`, `flex-grow`, `flex-shrink`, `flex-wrap`, `float`, `float-defer`, `float-offset`, `float-reference`, `flood-color`, `flood-opacity`, `flow`, `flow-from`, `flow-into`, `font`, `font-family`, `font-feature-settings`, `font-kerning`, `font-language-override`, `font-size`, `font-size-adjust`, `font-stretch`, `font-style`, `font-synthesis`, `font-variant`, `font-variant-alternates`, `font-variant-caps`, `font-variant-east-asian`, `font-variant-ligatures`, `font-variant-numeric`, `font-variant-position`, `font-weight`, `footnote-display`, `footnote-policy`, `glyph-orientation-vertical`, `grid`, `grid-area`, `grid-auto-columns`, `grid-auto-flow`, `grid-auto-rows`, `grid-column`, `grid-column-end`, `grid-column-gap`, `grid-column-start`, `grid-gap`, `grid-row`, `grid-row-end`, `grid-row-gap`, `grid-row-start`, `grid-template`, `grid-template-areas`, `grid-template-columns`, `grid-template-rows`, `hanging-punctuation`, `height`, `hyphenate-character`, `hyphenate-limit-chars`, `hyphenate-limit-last`, `hyphenate-limit-lines`, `hyphenate-limit-zone`, `hyphens`, `image-orientation`, `image-resolution`, `initial-letter`, `initial-letter-align`, `initial-letter-wrap`, `isolation`, `justify-content`, `justify-items`, `justify-self`, `left`, `letter-spacing`, `lighting-color`, `line-break`, `line-grid`, `line-height`, `line-snap`, `list-style`, `list-style-image`, `list-style-position`, `list-style-type`, `margin`, `margin-bottom`, `margin-left`, `margin-right`, `margin-top`, `marker-side`, `marquee-direction`, `marquee-loop`, `marquee-speed`, `marquee-style`, `mask`, `mask-border`, `mask-border-mode`, `mask-border-outset`, `mask-border-repeat`, `mask-border-slice`, `mask-border-source`, `mask-border-width`, `mask-clip`, `mask-composite`, `mask-image`, `mask-mode`, `mask-origin`, `mask-position`, `mask-repeat`, `mask-size`, `mask-type`, `max-height`, `max-lines`, `max-width`, `min-height`, `min-width`, `mix-blend-mode`, `motion`, `motion-offset`, `motion-path`, `motion-rotation`, `move-to`, `nav-down`, `nav-left`, `nav-right`, `nav-up`, `object-fit`, `object-position`, `offset-after`, `offset-before`, `offset-end`, `offset-start`, `opacity`, `order`, `orphans`, `outline`, `outline-color`, `outline-offset`, `outline-style`, `outline-width`, `overflow`, `overflow-style`, `overflow-wrap`, `overflow-x`, `overflow-y`, `padding`, `padding-bottom`, `padding-left`, `padding-right`, `padding-top`, `page`, `page-break-after`, `page-break-before`, `page-break-inside`, `page-policy`, `pause`, `pause-after`, `pause-before`, `perspective`, `perspective-origin`, `pitch`, `pitch-range`, `play-during`, `polar-angle`, `polar-distance`, `position`, `presentation-level`, `quotes`, `region-fragment`, `resize`, `rest`, `rest-after`, `rest-before`, `richness`, `right`, `rotation`, `rotation-point`, `ruby-align`, `ruby-merge`, `ruby-position`, `running`, `scroll-snap-coordinate`, `scroll-snap-destination`, `scroll-snap-points-x`, `scroll-snap-points-y`, `scroll-snap-type`, `shape-image-threshold`, `shape-inside`, `shape-margin`, `shape-outside`, `size`, `speak`, `speak-as`, `speak-header`, `speak-numeral`, `speak-punctuation`, `speech-rate`, `stress`, `string-set`, `tab-size`, `table-layout`, `text-align`, `text-align-last`, `text-combine-upright`, `text-decoration`, `text-decoration-color`, `text-decoration-line`, `text-decoration-skip`, `text-decoration-style`, `text-emphasis`, `text-emphasis-color`, `text-emphasis-position`, `text-emphasis-style`, `text-indent`, `text-justify`, `text-orientation`, `text-overflow`, `text-shadow`, `text-space-collapse`, `text-space-trim`, `text-spacing`, `text-transform`, `text-underline-position`, `text-wrap`, `top`, `transform`, `transform-origin`, `transform-style`, `transition`, `transition-delay`, `transition-duration`, `transition-property`, `transition-timing-function`, `unicode-bidi`, `user-select`, `vertical-align`, `visibility`, `voice-balance`, `voice-duration`, `voice-family`, `voice-pitch`, `voice-range`, `voice-rate`, `voice-stress`, `voice-volume`, `volume`, `white-space`, `widows`, `width`, `will-change`, `word-break`, `word-spacing`, `word-wrap`, `wrap-after`, `wrap-before`, `wrap-flow`, `wrap-inside`, `wrap-through`, `writing-mode`, `z-index`, `above`, `absolute`, `always`, `armenian`, `aural`, `auto`, `avoid`, `baseline`, `behind`, `below`, `bidi-override`, `blink`, `block`, `bold`, `bolder`, `both`, `capitalize`, `center-left`, `center-right`, `center`, `circle`, `cjk-ideographic`, `close-quote`, `collapse`, `condensed`, `continuous`, `crop`, `crosshair`, `cross`, `cursive`, `dashed`, `decimal-leading-zero`, `decimal`, `default`, `digits`, `disc`, `dotted`, `double`, `e-resize`, `embed`, `extra-condensed`, `extra-expanded`, `expanded`, `fantasy`, `far-left`, `far-right`, `faster`, `fast`, `fixed`, `georgian`, `groove`, `hebrew`, `help`, `hidden`, `hide`, `higher`, `high`, `hiragana-iroha`, `hiragana`, `icon`, `inherit`, `inline-table`, `inline`, `inset`, `inside`, `invert`, `italic`, `justify`, `katakana-iroha`, `katakana`, `landscape`, `larger`, `large`, `left-side`, `leftwards`, `level`, `lighter`, `line-through`, `list-item`, `loud`, `lower-alpha`, `lower-greek`, `lower-roman`, `lowercase`, `ltr`, `lower`, `low`, `medium`, `message-box`, `middle`, `mix`, `monospace`, `n-resize`, `narrower`, `ne-resize`, `no-close-quote`, `no-open-quote`, `no-repeat`, `none`, `normal`, `nowrap`, `nw-resize`, `oblique`, `once`, `open-quote`, `outset`, `outside`, `overline`, `pointer`, `portrait`, `px`, `relative`, `repeat-x`, `repeat-y`, `repeat`, `rgb`, `ridge`, `right-side`, `rightwards`, `s-resize`, `sans-serif`, `scroll`, `se-resize`, `semi-condensed`, `semi-expanded`, `separate`, `serif`, `show`, `silent`, `slow`, `slower`, `small-caps`, `small-caption`, `smaller`, `soft`, `solid`, `spell-out`, `square`, `static`, `status-bar`, `super`, `sw-resize`, `table-caption`, `table-cell`, `table-column`, `table-column-group`, `table-footer-group`, `table-header-group`, `table-row`, `table-row-group`, `text`, `text-bottom`, `text-top`, `thick`, `thin`, `transparent`, `ultra-condensed`, `ultra-expanded`, `underline`, `upper-alpha`, `upper-latin`, `upper-roman`, `uppercase`, `url`, `visible`, `w-resize`, `wait`, `wider`, `x-fast`, `x-high`, `x-large`, `x-loud`, `x-low`, `x-small`, `x-soft`, `xx-large`, `xx-small`, `yes`), NameConstant, nil}, + {Words(``, `\b`, `aliceblue`, `antiquewhite`, `aqua`, `aquamarine`, `azure`, `beige`, `bisque`, `black`, `blanchedalmond`, `blue`, `blueviolet`, `brown`, `burlywood`, `cadetblue`, `chartreuse`, `chocolate`, `coral`, `cornflowerblue`, `cornsilk`, `crimson`, `cyan`, `darkblue`, `darkcyan`, `darkgoldenrod`, `darkgray`, `darkgreen`, `darkgrey`, `darkkhaki`, `darkmagenta`, `darkolivegreen`, `darkorange`, `darkorchid`, `darkred`, `darksalmon`, `darkseagreen`, `darkslateblue`, `darkslategray`, `darkslategrey`, `darkturquoise`, `darkviolet`, `deeppink`, `deepskyblue`, `dimgray`, `dimgrey`, `dodgerblue`, `firebrick`, `floralwhite`, `forestgreen`, `fuchsia`, `gainsboro`, `ghostwhite`, `gold`, `goldenrod`, `gray`, `green`, `greenyellow`, `grey`, `honeydew`, `hotpink`, `indianred`, `indigo`, `ivory`, `khaki`, `lavender`, `lavenderblush`, `lawngreen`, `lemonchiffon`, `lightblue`, `lightcoral`, `lightcyan`, `lightgoldenrodyellow`, `lightgray`, `lightgreen`, `lightgrey`, `lightpink`, `lightsalmon`, `lightseagreen`, `lightskyblue`, `lightslategray`, `lightslategrey`, `lightsteelblue`, `lightyellow`, `lime`, `limegreen`, `linen`, `magenta`, `maroon`, `mediumaquamarine`, `mediumblue`, `mediumorchid`, `mediumpurple`, `mediumseagreen`, `mediumslateblue`, `mediumspringgreen`, `mediumturquoise`, `mediumvioletred`, `midnightblue`, `mintcream`, `mistyrose`, `moccasin`, `navajowhite`, `navy`, `oldlace`, `olive`, `olivedrab`, `orange`, `orangered`, `orchid`, `palegoldenrod`, `palegreen`, `paleturquoise`, `palevioletred`, `papayawhip`, `peachpuff`, `peru`, `pink`, `plum`, `powderblue`, `purple`, `rebeccapurple`, `red`, `rosybrown`, `royalblue`, `saddlebrown`, `salmon`, `sandybrown`, `seagreen`, `seashell`, `sienna`, `silver`, `skyblue`, `slateblue`, `slategray`, `slategrey`, `snow`, `springgreen`, `steelblue`, `tan`, `teal`, `thistle`, `tomato`, `turquoise`, `violet`, `wheat`, `white`, `whitesmoke`, `yellow`, `yellowgreen`, `transparent`), NameEntity, nil}, + {Words(``, `\b`, `black`, `silver`, `gray`, `white`, `maroon`, `red`, `purple`, `fuchsia`, `green`, `lime`, `olive`, `yellow`, `navy`, `blue`, `teal`, `aqua`), NameBuiltin, nil}, + {`\!(important|default)`, NameException, nil}, + {`(true|false)`, NamePseudo, nil}, + {`(and|or|not)`, OperatorWord, nil}, + {`/\*`, CommentMultiline, Push("inline-comment")}, + {`//[^\n]*`, CommentSingle, nil}, + {`\#[a-z0-9]{1,6}`, LiteralNumberHex, nil}, + {`(-?\d+)(\%|[a-z]+)?`, ByGroups(LiteralNumberInteger, KeywordType), nil}, + {`(-?\d*\.\d+)(\%|[a-z]+)?`, ByGroups(LiteralNumberFloat, KeywordType), nil}, + {`#\{`, LiteralStringInterpol, Push("interpolation")}, + {`[~^*!&%<>|+=@:,./?-]+`, Operator, nil}, + {`[\[\]()]+`, Punctuation, nil}, + {`"`, LiteralStringDouble, Push("string-double")}, + {`'`, LiteralStringSingle, Push("string-single")}, + {`[a-z_-][\w-]*`, Name, nil}, + {`\n`, Text, Push("root")}, + }, + "interpolation": { + {`\}`, LiteralStringInterpol, Pop(1)}, + Include("value"), + }, + "selector": { + {`[ \t]+`, Text, nil}, + {`\:`, NameDecorator, Push("pseudo-class")}, + {`\.`, NameClass, Push("class")}, + {`\#`, NameNamespace, Push("id")}, + {`[\w-]+`, NameTag, nil}, + {`#\{`, LiteralStringInterpol, Push("interpolation")}, + {`&`, Keyword, nil}, + {`[~^*!&\[\]()<>|+=@:;,./?-]`, Operator, nil}, + {`"`, LiteralStringDouble, Push("string-double")}, + {`'`, LiteralStringSingle, Push("string-single")}, + {`\n`, Text, Push("root")}, + }, + "string-double": { + {`(\\.|#(?=[^\n{])|[^\n"#])+`, LiteralStringDouble, nil}, + {`#\{`, LiteralStringInterpol, Push("interpolation")}, + {`"`, LiteralStringDouble, Pop(1)}, + }, + "string-single": { + {`(\\.|#(?=[^\n{])|[^\n'#])+`, LiteralStringSingle, nil}, + {`#\{`, LiteralStringInterpol, Push("interpolation")}, + {`'`, LiteralStringSingle, Pop(1)}, + }, + "string-url": { + {`(\\#|#(?=[^\n{])|[^\n#)])+`, LiteralStringOther, nil}, + {`#\{`, LiteralStringInterpol, Push("interpolation")}, + {`\)`, LiteralStringOther, Pop(1)}, + }, + "pseudo-class": { + {`[\w-]+`, NameDecorator, nil}, + {`#\{`, LiteralStringInterpol, Push("interpolation")}, + Default(Pop(1)), + }, + "class": { + {`[\w-]+`, NameClass, nil}, + {`#\{`, LiteralStringInterpol, Push("interpolation")}, + Default(Pop(1)), + }, + "id": { + {`[\w-]+`, NameNamespace, nil}, + {`#\{`, LiteralStringInterpol, Push("interpolation")}, + Default(Pop(1)), + }, + "for": { + {`(from|to|through)`, OperatorWord, nil}, + Include("value"), + }, + } +} diff --git a/vendor/github.com/alecthomas/chroma/lexers/s/scala.go b/vendor/github.com/alecthomas/chroma/lexers/s/scala.go new file mode 100644 index 000000000..6798c0c6b --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/s/scala.go @@ -0,0 +1,116 @@ +package s + +import ( + "fmt" + + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// Scala lexer. +var Scala = internal.Register(MustNewLazyLexer( + &Config{ + Name: "Scala", + Aliases: []string{"scala"}, + Filenames: []string{"*.scala"}, + MimeTypes: []string{"text/x-scala"}, + DotAll: true, + }, + scalaRules, +)) + +func scalaRules() Rules { + var ( + scalaOp = "[-~\\^\\*!%&\\\\<>\\|+=:/?@\xa6-\xa7\xa9\xac\xae\xb0-\xb1\xb6\xd7\xf7\u03f6\u0482\u0606-\u0608\u060e-\u060f\u06e9\u06fd-\u06fe\u07f6\u09fa\u0b70\u0bf3-\u0bf8\u0bfa\u0c7f\u0cf1-\u0cf2\u0d79\u0f01-\u0f03\u0f13-\u0f17\u0f1a-\u0f1f\u0f34\u0f36\u0f38\u0fbe-\u0fc5\u0fc7-\u0fcf\u109e-\u109f\u1360\u1390-\u1399\u1940\u19e0-\u19ff\u1b61-\u1b6a\u1b74-\u1b7c\u2044\u2052\u207a-\u207c\u208a-\u208c\u2100-\u2101\u2103-\u2106\u2108-\u2109\u2114\u2116-\u2118\u211e-\u2123\u2125\u2127\u2129\u212e\u213a-\u213b\u2140-\u2144\u214a-\u214d\u214f\u2190-\u2328\u232b-\u244a\u249c-\u24e9\u2500-\u2767\u2794-\u27c4\u27c7-\u27e5\u27f0-\u2982\u2999-\u29d7\u29dc-\u29fb\u29fe-\u2b54\u2ce5-\u2cea\u2e80-\u2ffb\u3004\u3012-\u3013\u3020\u3036-\u3037\u303e-\u303f\u3190-\u3191\u3196-\u319f\u31c0-\u31e3\u3200-\u321e\u322a-\u3250\u3260-\u327f\u328a-\u32b0\u32c0-\u33ff\u4dc0-\u4dff\ua490-\ua4c6\ua828-\ua82b\ufb29\ufdfd\ufe62\ufe64-\ufe66\uff0b\uff1c-\uff1e\uff5c\uff5e\uffe2\uffe4\uffe8-\uffee\ufffc-\ufffd]+" + scalaUpper = `[\\$_\p{Lu}]` + scalaLetter = `[\\$_\p{L}]` + scalaIDRest = fmt.Sprintf(`%s(?:%s|[0-9])*(?:(?<=_)%s)?`, scalaLetter, scalaLetter, scalaOp) + ) + + return Rules{ + "root": { + {`(class|trait|object)(\s+)`, ByGroups(Keyword, Text), Push("class")}, + {`[^\S\n]+`, Text, nil}, + {`//.*?\n`, CommentSingle, nil}, + {`/\*`, CommentMultiline, Push("comment")}, + {`@` + scalaIDRest, NameDecorator, nil}, + {`(abstract|ca(?:se|tch)|d(?:ef|o)|e(?:lse|xtends)|f(?:inal(?:ly)?|or(?:Some)?)|i(?:f|mplicit)|lazy|match|new|override|pr(?:ivate|otected)|re(?:quires|turn)|s(?:ealed|uper)|t(?:h(?:is|row)|ry)|va[lr]|w(?:hile|ith)|yield)\b|(<[%:-]|=>|>:|[#=@_⇒←])(\b|(?=\s)|$)`, Keyword, nil}, + {`:(?!` + scalaOp + `%s)`, Keyword, Push("type")}, + {fmt.Sprintf("%s%s\\b", scalaUpper, scalaIDRest), NameClass, nil}, + {`(true|false|null)\b`, KeywordConstant, nil}, + {`(import|package)(\s+)`, ByGroups(Keyword, Text), Push("import")}, + {`(type)(\s+)`, ByGroups(Keyword, Text), Push("type")}, + {`""".*?"""(?!")`, LiteralString, nil}, + {`"(\\\\|\\"|[^"])*"`, LiteralString, nil}, + {`'\\.'|'[^\\]'|'\\u[0-9a-fA-F]{4}'`, LiteralStringChar, nil}, + {"'" + scalaIDRest, TextSymbol, nil}, + {`[fs]"""`, LiteralString, Push("interptriplestring")}, + {`[fs]"`, LiteralString, Push("interpstring")}, + {`raw"(\\\\|\\"|[^"])*"`, LiteralString, nil}, + {scalaIDRest, Name, nil}, + {"`[^`]+`", Name, nil}, + {`\[`, Operator, Push("typeparam")}, + {`[(){};,.#]`, Operator, nil}, + {scalaOp, Operator, nil}, + {`([0-9][0-9]*\.[0-9]*|\.[0-9]+)([eE][+-]?[0-9]+)?[fFdD]?`, LiteralNumberFloat, nil}, + {`0x[0-9a-fA-F]+`, LiteralNumberHex, nil}, + {`[0-9]+L?`, LiteralNumberInteger, nil}, + {`\n`, Text, nil}, + }, + "class": { + {fmt.Sprintf("(%s|%s|`[^`]+`)(\\s*)(\\[)", scalaIDRest, scalaOp), ByGroups(NameClass, Text, Operator), Push("typeparam")}, + {`\s+`, Text, nil}, + {`\{`, Operator, Pop(1)}, + {`\(`, Operator, Pop(1)}, + {`//.*?\n`, CommentSingle, Pop(1)}, + {fmt.Sprintf("%s|%s|`[^`]+`", scalaIDRest, scalaOp), NameClass, Pop(1)}, + }, + "type": { + {`\s+`, Text, nil}, + {`<[%:]|>:|[#_]|forSome|type`, Keyword, nil}, + {`([,);}]|=>|=|⇒)(\s*)`, ByGroups(Operator, Text), Pop(1)}, + {`[({]`, Operator, Push()}, + {fmt.Sprintf("((?:%s|%s|`[^`]+`)(?:\\.(?:%s|%s|`[^`]+`))*)(\\s*)(\\[)", scalaIDRest, scalaOp, scalaIDRest, scalaOp), ByGroups(KeywordType, Text, Operator), Push("#pop", "typeparam")}, + {fmt.Sprintf("((?:%s|%s|`[^`]+`)(?:\\.(?:%s|%s|`[^`]+`))*)(\\s*)$", scalaIDRest, scalaOp, scalaIDRest, scalaOp), ByGroups(KeywordType, Text), Pop(1)}, + {`//.*?\n`, CommentSingle, Pop(1)}, + {fmt.Sprintf("\\.|%s|%s|`[^`]+`", scalaIDRest, scalaOp), KeywordType, nil}, + }, + "typeparam": { + {`[\s,]+`, Text, nil}, + {`<[%:]|=>|>:|[#_⇒]|forSome|type`, Keyword, nil}, + {`([\])}])`, Operator, Pop(1)}, + {`[(\[{]`, Operator, Push()}, + {fmt.Sprintf("\\.|%s|%s|`[^`]+`", scalaIDRest, scalaOp), KeywordType, nil}, + }, + "comment": { + {`[^/*]+`, CommentMultiline, nil}, + {`/\*`, CommentMultiline, Push()}, + {`\*/`, CommentMultiline, Pop(1)}, + {`[*/]`, CommentMultiline, nil}, + }, + "import": { + {fmt.Sprintf("(%s|\\.)+", scalaIDRest), NameNamespace, Pop(1)}, + }, + "interpstringcommon": { + {`[^"$\\]+`, LiteralString, nil}, + {`\$\$`, LiteralString, nil}, + {`\$` + scalaLetter + `(?:` + scalaLetter + `|\d)*`, LiteralStringInterpol, nil}, + {`\$\{`, LiteralStringInterpol, Push("interpbrace")}, + {`\\.`, LiteralString, nil}, + }, + "interptriplestring": { + {`"""(?!")`, LiteralString, Pop(1)}, + {`"`, LiteralString, nil}, + Include("interpstringcommon"), + }, + "interpstring": { + {`"`, LiteralString, Pop(1)}, + Include("interpstringcommon"), + }, + "interpbrace": { + {`\}`, LiteralStringInterpol, Pop(1)}, + {`\{`, LiteralStringInterpol, Push()}, + Include("root"), + }, + } +} diff --git a/vendor/github.com/alecthomas/chroma/lexers/s/scheme.go b/vendor/github.com/alecthomas/chroma/lexers/s/scheme.go new file mode 100644 index 000000000..72a99f60b --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/s/scheme.go @@ -0,0 +1,57 @@ +package s + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// nolint + +// Scheme lexer. +var SchemeLang = internal.Register(MustNewLazyLexer( + &Config{ + Name: "Scheme", + Aliases: []string{"scheme", "scm"}, + Filenames: []string{"*.scm", "*.ss"}, + MimeTypes: []string{"text/x-scheme", "application/x-scheme"}, + }, + schemeLangRules, +)) + +func schemeLangRules() Rules { + return Rules{ + "root": { + {`;.*$`, CommentSingle, nil}, + {`#\|`, CommentMultiline, Push("multiline-comment")}, + {`#;\s*\(`, Comment, Push("commented-form")}, + {`#!r6rs`, Comment, nil}, + {`\s+`, Text, nil}, + {`-?\d+\.\d+`, LiteralNumberFloat, nil}, + {`-?\d+`, LiteralNumberInteger, nil}, + {`"(\\\\|\\"|[^"])*"`, LiteralString, nil}, + {`'[\w!$%&*+,/:<=>?@^~|-]+`, LiteralStringSymbol, nil}, + {`#\\(alarm|backspace|delete|esc|linefeed|newline|page|return|space|tab|vtab|x[0-9a-zA-Z]{1,5}|.)`, LiteralStringChar, nil}, + {`(#t|#f)`, NameConstant, nil}, + {"('|#|`|,@|,|\\.)", Operator, nil}, + {`(lambda |define |if |else |cond |and |or |case |let |let\* |letrec |begin |do |delay |set\! |\=\> |quote |quasiquote |unquote |unquote\-splicing |define\-syntax |let\-syntax |letrec\-syntax |syntax\-rules )`, Keyword, nil}, + {`(?<='\()[\w!$%&*+,/:<=>?@^~|-]+`, NameVariable, nil}, + {`(?<=#\()[\w!$%&*+,/:<=>?@^~|-]+`, NameVariable, nil}, + {`(?<=\()(\* |\+ |\- |\/ |\< |\<\= |\= |\> |\>\= |abs |acos |angle |append |apply |asin |assoc |assq |assv |atan |boolean\? |caaaar |caaadr |caaar |caadar |caaddr |caadr |caar |cadaar |cadadr |cadar |caddar |cadddr |caddr |cadr |call\-with\-current\-continuation |call\-with\-input\-file |call\-with\-output\-file |call\-with\-values |call\/cc |car |cdaaar |cdaadr |cdaar |cdadar |cdaddr |cdadr |cdar |cddaar |cddadr |cddar |cdddar |cddddr |cdddr |cddr |cdr |ceiling |char\-\>integer |char\-alphabetic\? |char\-ci\<\=\? |char\-ci\<\? |char\-ci\=\? |char\-ci\>\=\? |char\-ci\>\? |char\-downcase |char\-lower\-case\? |char\-numeric\? |char\-ready\? |char\-upcase |char\-upper\-case\? |char\-whitespace\? |char\<\=\? |char\<\? |char\=\? |char\>\=\? |char\>\? |char\? |close\-input\-port |close\-output\-port |complex\? |cons |cos |current\-input\-port |current\-output\-port |denominator |display |dynamic\-wind |eof\-object\? |eq\? |equal\? |eqv\? |eval |even\? |exact\-\>inexact |exact\? |exp |expt |floor |for\-each |force |gcd |imag\-part |inexact\-\>exact |inexact\? |input\-port\? |integer\-\>char |integer\? |interaction\-environment |lcm |length |list |list\-\>string |list\-\>vector |list\-ref |list\-tail |list\? |load |log |magnitude |make\-polar |make\-rectangular |make\-string |make\-vector |map |max |member |memq |memv |min |modulo |negative\? |newline |not |null\-environment |null\? |number\-\>string |number\? |numerator |odd\? |open\-input\-file |open\-output\-file |output\-port\? |pair\? |peek\-char |port\? |positive\? |procedure\? |quotient |rational\? |rationalize |read |read\-char |real\-part |real\? |remainder |reverse |round |scheme\-report\-environment |set\-car\! |set\-cdr\! |sin |sqrt |string |string\-\>list |string\-\>number |string\-\>symbol |string\-append |string\-ci\<\=\? |string\-ci\<\? |string\-ci\=\? |string\-ci\>\=\? |string\-ci\>\? |string\-copy |string\-fill\! |string\-length |string\-ref |string\-set\! |string\<\=\? |string\<\? |string\=\? |string\>\=\? |string\>\? |string\? |substring |symbol\-\>string |symbol\? |tan |transcript\-off |transcript\-on |truncate |values |vector |vector\-\>list |vector\-fill\! |vector\-length |vector\-ref |vector\-set\! |vector\? |with\-input\-from\-file |with\-output\-to\-file |write |write\-char |zero\? )`, NameBuiltin, nil}, + {`(?<=\()[\w!$%&*+,/:<=>?@^~|-]+`, NameFunction, nil}, + {`[\w!$%&*+,/:<=>?@^~|-]+`, NameVariable, nil}, + {`(\(|\))`, Punctuation, nil}, + {`(\[|\])`, Punctuation, nil}, + }, + "multiline-comment": { + {`#\|`, CommentMultiline, Push()}, + {`\|#`, CommentMultiline, Pop(1)}, + {`[^|#]+`, CommentMultiline, nil}, + {`[|#]`, CommentMultiline, nil}, + }, + "commented-form": { + {`\(`, Comment, Push()}, + {`\)`, Comment, Pop(1)}, + {`[^()]+`, Comment, nil}, + }, + } +} diff --git a/vendor/github.com/alecthomas/chroma/lexers/s/scilab.go b/vendor/github.com/alecthomas/chroma/lexers/s/scilab.go new file mode 100644 index 000000000..f8164927f --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/s/scilab.go @@ -0,0 +1,48 @@ +package s + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// Scilab lexer. +var Scilab = internal.Register(MustNewLazyLexer( + &Config{ + Name: "Scilab", + Aliases: []string{"scilab"}, + Filenames: []string{"*.sci", "*.sce", "*.tst"}, + MimeTypes: []string{"text/scilab"}, + }, + scilabRules, +)) + +func scilabRules() Rules { + return Rules{ + "root": { + {`//.*?$`, CommentSingle, nil}, + {`^\s*function`, Keyword, Push("deffunc")}, + {Words(``, `\b`, `__FILE__`, `__LINE__`, `break`, `case`, `catch`, `classdef`, `continue`, `do`, `else`, `elseif`, `end`, `end_try_catch`, `end_unwind_protect`, `endclassdef`, `endevents`, `endfor`, `endfunction`, `endif`, `endmethods`, `endproperties`, `endswitch`, `endwhile`, `events`, `for`, `function`, `get`, `global`, `if`, `methods`, `otherwise`, `persistent`, `properties`, `return`, `set`, `static`, `switch`, `try`, `until`, `unwind_protect`, `unwind_protect_cleanup`, `while`), Keyword, nil}, + {Words(``, `\b`, `!!_invoke_`, `%H5Object_e`, `%H5Object_fieldnames`, `%H5Object_p`, `%XMLAttr_6`, `%XMLAttr_e`, `%XMLAttr_i_XMLElem`, `%XMLAttr_length`, `%XMLAttr_p`, `%XMLAttr_size`, `%XMLDoc_6`, `%XMLDoc_e`, `%XMLDoc_i_XMLList`, `%XMLDoc_p`, `%XMLElem_6`, `%XMLElem_e`, `%XMLElem_i_XMLDoc`, `%XMLElem_i_XMLElem`, `%XMLElem_i_XMLList`, `%XMLElem_p`, `%XMLList_6`, `%XMLList_e`, `%XMLList_i_XMLElem`, `%XMLList_i_XMLList`, `%XMLList_length`, `%XMLList_p`, `%XMLList_size`, `%XMLNs_6`, `%XMLNs_e`, `%XMLNs_i_XMLElem`, `%XMLNs_p`, `%XMLSet_6`, `%XMLSet_e`, `%XMLSet_length`, `%XMLSet_p`, `%XMLSet_size`, `%XMLValid_p`, `%_EClass_6`, `%_EClass_e`, `%_EClass_p`, `%_EObj_0`, `%_EObj_1__EObj`, `%_EObj_1_b`, `%_EObj_1_c`, `%_EObj_1_i`, `%_EObj_1_s`, `%_EObj_2__EObj`, `%_EObj_2_b`, `%_EObj_2_c`, `%_EObj_2_i`, `%_EObj_2_s`, `%_EObj_3__EObj`, `%_EObj_3_b`, `%_EObj_3_c`, `%_EObj_3_i`, `%_EObj_3_s`, `%_EObj_4__EObj`, `%_EObj_4_b`, `%_EObj_4_c`, `%_EObj_4_i`, `%_EObj_4_s`, `%_EObj_5`, `%_EObj_6`, `%_EObj_a__EObj`, `%_EObj_a_b`, `%_EObj_a_c`, `%_EObj_a_i`, `%_EObj_a_s`, `%_EObj_d__EObj`, `%_EObj_d_b`, `%_EObj_d_c`, `%_EObj_d_i`, `%_EObj_d_s`, `%_EObj_disp`, `%_EObj_e`, `%_EObj_g__EObj`, `%_EObj_g_b`, `%_EObj_g_c`, `%_EObj_g_i`, `%_EObj_g_s`, `%_EObj_h__EObj`, `%_EObj_h_b`, `%_EObj_h_c`, `%_EObj_h_i`, `%_EObj_h_s`, `%_EObj_i__EObj`, `%_EObj_j__EObj`, `%_EObj_j_b`, `%_EObj_j_c`, `%_EObj_j_i`, `%_EObj_j_s`, `%_EObj_k__EObj`, `%_EObj_k_b`, `%_EObj_k_c`, `%_EObj_k_i`, `%_EObj_k_s`, `%_EObj_l__EObj`, `%_EObj_l_b`, `%_EObj_l_c`, `%_EObj_l_i`, `%_EObj_l_s`, `%_EObj_m__EObj`, `%_EObj_m_b`, `%_EObj_m_c`, `%_EObj_m_i`, `%_EObj_m_s`, `%_EObj_n__EObj`, `%_EObj_n_b`, `%_EObj_n_c`, `%_EObj_n_i`, `%_EObj_n_s`, `%_EObj_o__EObj`, `%_EObj_o_b`, `%_EObj_o_c`, `%_EObj_o_i`, `%_EObj_o_s`, `%_EObj_p`, `%_EObj_p__EObj`, `%_EObj_p_b`, `%_EObj_p_c`, `%_EObj_p_i`, `%_EObj_p_s`, `%_EObj_q__EObj`, `%_EObj_q_b`, `%_EObj_q_c`, `%_EObj_q_i`, `%_EObj_q_s`, `%_EObj_r__EObj`, `%_EObj_r_b`, `%_EObj_r_c`, `%_EObj_r_i`, `%_EObj_r_s`, `%_EObj_s__EObj`, `%_EObj_s_b`, `%_EObj_s_c`, `%_EObj_s_i`, `%_EObj_s_s`, `%_EObj_t`, `%_EObj_x__EObj`, `%_EObj_x_b`, `%_EObj_x_c`, `%_EObj_x_i`, `%_EObj_x_s`, `%_EObj_y__EObj`, `%_EObj_y_b`, `%_EObj_y_c`, `%_EObj_y_i`, `%_EObj_y_s`, `%_EObj_z__EObj`, `%_EObj_z_b`, `%_EObj_z_c`, `%_EObj_z_i`, `%_EObj_z_s`, `%_eigs`, `%_load`, `%b_1__EObj`, `%b_2__EObj`, `%b_3__EObj`, `%b_4__EObj`, `%b_a__EObj`, `%b_d__EObj`, `%b_g__EObj`, `%b_h__EObj`, `%b_i_XMLList`, `%b_i__EObj`, `%b_j__EObj`, `%b_k__EObj`, `%b_l__EObj`, `%b_m__EObj`, `%b_n__EObj`, `%b_o__EObj`, `%b_p__EObj`, `%b_q__EObj`, `%b_r__EObj`, `%b_s__EObj`, `%b_x__EObj`, `%b_y__EObj`, `%b_z__EObj`, `%c_1__EObj`, `%c_2__EObj`, `%c_3__EObj`, `%c_4__EObj`, `%c_a__EObj`, `%c_d__EObj`, `%c_g__EObj`, `%c_h__EObj`, `%c_i_XMLAttr`, `%c_i_XMLDoc`, `%c_i_XMLElem`, `%c_i_XMLList`, `%c_i__EObj`, `%c_j__EObj`, `%c_k__EObj`, `%c_l__EObj`, `%c_m__EObj`, `%c_n__EObj`, `%c_o__EObj`, `%c_p__EObj`, `%c_q__EObj`, `%c_r__EObj`, `%c_s__EObj`, `%c_x__EObj`, `%c_y__EObj`, `%c_z__EObj`, `%ce_i_XMLList`, `%fptr_i_XMLList`, `%h_i_XMLList`, `%hm_i_XMLList`, `%i_1__EObj`, `%i_2__EObj`, `%i_3__EObj`, `%i_4__EObj`, `%i_a__EObj`, `%i_abs`, `%i_cumprod`, `%i_cumsum`, `%i_d__EObj`, `%i_diag`, `%i_g__EObj`, `%i_h__EObj`, `%i_i_XMLList`, `%i_i__EObj`, `%i_j__EObj`, `%i_k__EObj`, `%i_l__EObj`, `%i_m__EObj`, `%i_matrix`, `%i_max`, `%i_maxi`, `%i_min`, `%i_mini`, `%i_mput`, `%i_n__EObj`, `%i_o__EObj`, `%i_p`, `%i_p__EObj`, `%i_prod`, `%i_q__EObj`, `%i_r__EObj`, `%i_s__EObj`, `%i_sum`, `%i_tril`, `%i_triu`, `%i_x__EObj`, `%i_y__EObj`, `%i_z__EObj`, `%ip_i_XMLList`, `%l_i_XMLList`, `%l_i__EObj`, `%lss_i_XMLList`, `%mc_i_XMLList`, `%msp_full`, `%msp_i_XMLList`, `%msp_spget`, `%p_i_XMLList`, `%ptr_i_XMLList`, `%r_i_XMLList`, `%s_1__EObj`, `%s_2__EObj`, `%s_3__EObj`, `%s_4__EObj`, `%s_a__EObj`, `%s_d__EObj`, `%s_g__EObj`, `%s_h__EObj`, `%s_i_XMLList`, `%s_i__EObj`, `%s_j__EObj`, `%s_k__EObj`, `%s_l__EObj`, `%s_m__EObj`, `%s_n__EObj`, `%s_o__EObj`, `%s_p__EObj`, `%s_q__EObj`, `%s_r__EObj`, `%s_s__EObj`, `%s_x__EObj`, `%s_y__EObj`, `%s_z__EObj`, `%sp_i_XMLList`, `%spb_i_XMLList`, `%st_i_XMLList`, `Calendar`, `ClipBoard`, `Matplot`, `Matplot1`, `PlaySound`, `TCL_DeleteInterp`, `TCL_DoOneEvent`, `TCL_EvalFile`, `TCL_EvalStr`, `TCL_ExistArray`, `TCL_ExistInterp`, `TCL_ExistVar`, `TCL_GetVar`, `TCL_GetVersion`, `TCL_SetVar`, `TCL_UnsetVar`, `TCL_UpVar`, `_`, `_code2str`, `_d`, `_str2code`, `about`, `abs`, `acos`, `addModulePreferences`, `addcolor`, `addf`, `addhistory`, `addinter`, `addlocalizationdomain`, `amell`, `and`, `argn`, `arl2_ius`, `ascii`, `asin`, `atan`, `backslash`, `balanc`, `banner`, `base2dec`, `basename`, `bdiag`, `beep`, `besselh`, `besseli`, `besselj`, `besselk`, `bessely`, `beta`, `bezout`, `bfinit`, `blkfc1i`, `blkslvi`, `bool2s`, `browsehistory`, `browsevar`, `bsplin3val`, `buildDoc`, `buildouttb`, `bvode`, `c_link`, `call`, `callblk`, `captions`, `cd`, `cdfbet`, `cdfbin`, `cdfchi`, `cdfchn`, `cdff`, `cdffnc`, `cdfgam`, `cdfnbn`, `cdfnor`, `cdfpoi`, `cdft`, `ceil`, `champ`, `champ1`, `chdir`, `chol`, `clc`, `clean`, `clear`, `clearfun`, `clearglobal`, `closeEditor`, `closeEditvar`, `closeXcos`, `code2str`, `coeff`, `color`, `comp`, `completion`, `conj`, `contour2di`, `contr`, `conv2`, `convstr`, `copy`, `copyfile`, `corr`, `cos`, `coserror`, `createdir`, `cshep2d`, `csvDefault`, `csvIsnum`, `csvRead`, `csvStringToDouble`, `csvTextScan`, `csvWrite`, `ctree2`, `ctree3`, `ctree4`, `cumprod`, `cumsum`, `curblock`, `curblockc`, `daskr`, `dasrt`, `dassl`, `data2sig`, `datatipCreate`, `datatipManagerMode`, `datatipMove`, `datatipRemove`, `datatipSetDisplay`, `datatipSetInterp`, `datatipSetOrientation`, `datatipSetStyle`, `datatipToggle`, `dawson`, `dct`, `debug`, `dec2base`, `deff`, `definedfields`, `degree`, `delbpt`, `delete`, `deletefile`, `delip`, `delmenu`, `det`, `dgettext`, `dhinf`, `diag`, `diary`, `diffobjs`, `disp`, `dispbpt`, `displayhistory`, `disposefftwlibrary`, `dlgamma`, `dnaupd`, `dneupd`, `double`, `drawaxis`, `drawlater`, `drawnow`, `driver`, `dsaupd`, `dsearch`, `dseupd`, `dst`, `duplicate`, `editvar`, `emptystr`, `end_scicosim`, `ereduc`, `erf`, `erfc`, `erfcx`, `erfi`, `errcatch`, `errclear`, `error`, `eval_cshep2d`, `exec`, `execstr`, `exists`, `exit`, `exp`, `expm`, `exportUI`, `export_to_hdf5`, `eye`, `fadj2sp`, `fec`, `feval`, `fft`, `fftw`, `fftw_flags`, `fftw_forget_wisdom`, `fftwlibraryisloaded`, `figure`, `file`, `filebrowser`, `fileext`, `fileinfo`, `fileparts`, `filesep`, `find`, `findBD`, `findfiles`, `fire_closing_finished`, `floor`, `format`, `fort`, `fprintfMat`, `freq`, `frexp`, `fromc`, `fromjava`, `fscanfMat`, `fsolve`, `fstair`, `full`, `fullpath`, `funcprot`, `funptr`, `gamma`, `gammaln`, `geom3d`, `get`, `getURL`, `get_absolute_file_path`, `get_fftw_wisdom`, `getblocklabel`, `getcallbackobject`, `getdate`, `getdebuginfo`, `getdefaultlanguage`, `getdrives`, `getdynlibext`, `getenv`, `getfield`, `gethistory`, `gethistoryfile`, `getinstalledlookandfeels`, `getio`, `getlanguage`, `getlongpathname`, `getlookandfeel`, `getmd5`, `getmemory`, `getmodules`, `getos`, `getpid`, `getrelativefilename`, `getscicosvars`, `getscilabmode`, `getshortpathname`, `gettext`, `getvariablesonstack`, `getversion`, `glist`, `global`, `glue`, `grand`, `graphicfunction`, `grayplot`, `grep`, `gsort`, `gstacksize`, `h5attr`, `h5close`, `h5cp`, `h5dataset`, `h5dump`, `h5exists`, `h5flush`, `h5get`, `h5group`, `h5isArray`, `h5isAttr`, `h5isCompound`, `h5isFile`, `h5isGroup`, `h5isList`, `h5isRef`, `h5isSet`, `h5isSpace`, `h5isType`, `h5isVlen`, `h5label`, `h5ln`, `h5ls`, `h5mount`, `h5mv`, `h5open`, `h5read`, `h5readattr`, `h5rm`, `h5umount`, `h5write`, `h5writeattr`, `havewindow`, `helpbrowser`, `hess`, `hinf`, `historymanager`, `historysize`, `host`, `htmlDump`, `htmlRead`, `htmlReadStr`, `htmlWrite`, `iconvert`, `ieee`, `ilib_verbose`, `imag`, `impl`, `import_from_hdf5`, `imult`, `inpnvi`, `int`, `int16`, `int2d`, `int32`, `int3d`, `int8`, `interp`, `interp2d`, `interp3d`, `intg`, `intppty`, `inttype`, `inv`, `invoke_lu`, `is_handle_valid`, `is_hdf5_file`, `isalphanum`, `isascii`, `isdef`, `isdigit`, `isdir`, `isequal`, `isequalbitwise`, `iserror`, `isfile`, `isglobal`, `isletter`, `isnum`, `isreal`, `iswaitingforinput`, `jallowClassReloading`, `jarray`, `jautoTranspose`, `jautoUnwrap`, `javaclasspath`, `javalibrarypath`, `jcast`, `jcompile`, `jconvMatrixMethod`, `jcreatejar`, `jdeff`, `jdisableTrace`, `jenableTrace`, `jexists`, `jgetclassname`, `jgetfield`, `jgetfields`, `jgetinfo`, `jgetmethods`, `jimport`, `jinvoke`, `jinvoke_db`, `jnewInstance`, `jremove`, `jsetfield`, `junwrap`, `junwraprem`, `jwrap`, `jwrapinfloat`, `kron`, `lasterror`, `ldiv`, `ldivf`, `legendre`, `length`, `lib`, `librarieslist`, `libraryinfo`, `light`, `linear_interpn`, `lines`, `link`, `linmeq`, `list`, `listvar_in_hdf5`, `load`, `loadGui`, `loadScicos`, `loadXcos`, `loadfftwlibrary`, `loadhistory`, `log`, `log1p`, `lsq`, `lsq_splin`, `lsqrsolve`, `lsslist`, `lstcat`, `lstsize`, `ltitr`, `lu`, `ludel`, `lufact`, `luget`, `lusolve`, `macr2lst`, `macr2tree`, `matfile_close`, `matfile_listvar`, `matfile_open`, `matfile_varreadnext`, `matfile_varwrite`, `matrix`, `max`, `maxfiles`, `mclearerr`, `mclose`, `meof`, `merror`, `messagebox`, `mfprintf`, `mfscanf`, `mget`, `mgeti`, `mgetl`, `mgetstr`, `min`, `mlist`, `mode`, `model2blk`, `mopen`, `move`, `movefile`, `mprintf`, `mput`, `mputl`, `mputstr`, `mscanf`, `mseek`, `msprintf`, `msscanf`, `mtell`, `mtlb_mode`, `mtlb_sparse`, `mucomp`, `mulf`, `name2rgb`, `nearfloat`, `newaxes`, `newest`, `newfun`, `nnz`, `norm`, `notify`, `number_properties`, `ode`, `odedc`, `ones`, `openged`, `opentk`, `optim`, `or`, `ordmmd`, `parallel_concurrency`, `parallel_run`, `param3d`, `param3d1`, `part`, `pathconvert`, `pathsep`, `phase_simulation`, `plot2d`, `plot2d1`, `plot2d2`, `plot2d3`, `plot2d4`, `plot3d`, `plot3d1`, `plotbrowser`, `pointer_xproperty`, `poly`, `ppol`, `pppdiv`, `predef`, `preferences`, `print`, `printf`, `printfigure`, `printsetupbox`, `prod`, `progressionbar`, `prompt`, `pwd`, `qld`, `qp_solve`, `qr`, `raise_window`, `rand`, `rankqr`, `rat`, `rcond`, `rdivf`, `read`, `read4b`, `read_csv`, `readb`, `readgateway`, `readmps`, `real`, `realtime`, `realtimeinit`, `regexp`, `relocate_handle`, `remez`, `removeModulePreferences`, `removedir`, `removelinehistory`, `res_with_prec`, `resethistory`, `residu`, `resume`, `return`, `ricc`, `rlist`, `roots`, `rotate_axes`, `round`, `rpem`, `rtitr`, `rubberbox`, `save`, `saveGui`, `saveafterncommands`, `saveconsecutivecommands`, `savehistory`, `schur`, `sci_haltscicos`, `sci_tree2`, `sci_tree3`, `sci_tree4`, `sciargs`, `scicos_debug`, `scicos_debug_count`, `scicos_time`, `scicosim`, `scinotes`, `sctree`, `semidef`, `set`, `set_blockerror`, `set_fftw_wisdom`, `set_xproperty`, `setbpt`, `setdefaultlanguage`, `setenv`, `setfield`, `sethistoryfile`, `setlanguage`, `setlookandfeel`, `setmenu`, `sfact`, `sfinit`, `show_window`, `sident`, `sig2data`, `sign`, `simp`, `simp_mode`, `sin`, `size`, `slash`, `sleep`, `sorder`, `sparse`, `spchol`, `spcompack`, `spec`, `spget`, `splin`, `splin2d`, `splin3d`, `splitURL`, `spones`, `sprintf`, `sqrt`, `stacksize`, `str2code`, `strcat`, `strchr`, `strcmp`, `strcspn`, `strindex`, `string`, `stringbox`, `stripblanks`, `strncpy`, `strrchr`, `strrev`, `strsplit`, `strspn`, `strstr`, `strsubst`, `strtod`, `strtok`, `subf`, `sum`, `svd`, `swap_handles`, `symfcti`, `syredi`, `system_getproperty`, `system_setproperty`, `ta2lpd`, `tan`, `taucs_chdel`, `taucs_chfact`, `taucs_chget`, `taucs_chinfo`, `taucs_chsolve`, `tempname`, `testmatrix`, `timer`, `tlist`, `tohome`, `tokens`, `toolbar`, `toprint`, `tr_zer`, `tril`, `triu`, `type`, `typename`, `uiDisplayTree`, `uicontextmenu`, `uicontrol`, `uigetcolor`, `uigetdir`, `uigetfile`, `uigetfont`, `uimenu`, `uint16`, `uint32`, `uint8`, `uipopup`, `uiputfile`, `uiwait`, `ulink`, `umf_ludel`, `umf_lufact`, `umf_luget`, `umf_luinfo`, `umf_lusolve`, `umfpack`, `unglue`, `unix`, `unsetmenu`, `unzoom`, `updatebrowsevar`, `usecanvas`, `useeditor`, `user`, `var2vec`, `varn`, `vec2var`, `waitbar`, `warnBlockByUID`, `warning`, `what`, `where`, `whereis`, `who`, `winsid`, `with_module`, `writb`, `write`, `write4b`, `write_csv`, `x_choose`, `x_choose_modeless`, `x_dialog`, `x_mdialog`, `xarc`, `xarcs`, `xarrows`, `xchange`, `xchoicesi`, `xclick`, `xcos`, `xcosAddToolsMenu`, `xcosConfigureXmlFile`, `xcosDiagramToScilab`, `xcosPalCategoryAdd`, `xcosPalDelete`, `xcosPalDisable`, `xcosPalEnable`, `xcosPalGenerateIcon`, `xcosPalGet`, `xcosPalLoad`, `xcosPalMove`, `xcosSimulationStarted`, `xcosUpdateBlock`, `xdel`, `xend`, `xfarc`, `xfarcs`, `xfpoly`, `xfpolys`, `xfrect`, `xget`, `xgetmouse`, `xgraduate`, `xgrid`, `xinit`, `xlfont`, `xls_open`, `xls_read`, `xmlAddNs`, `xmlAppend`, `xmlAsNumber`, `xmlAsText`, `xmlDTD`, `xmlDelete`, `xmlDocument`, `xmlDump`, `xmlElement`, `xmlFormat`, `xmlGetNsByHref`, `xmlGetNsByPrefix`, `xmlGetOpenDocs`, `xmlIsValidObject`, `xmlName`, `xmlNs`, `xmlRead`, `xmlReadStr`, `xmlRelaxNG`, `xmlRemove`, `xmlSchema`, `xmlSetAttributes`, `xmlValidate`, `xmlWrite`, `xmlXPath`, `xname`, `xpause`, `xpoly`, `xpolys`, `xrect`, `xrects`, `xs2bmp`, `xs2emf`, `xs2eps`, `xs2gif`, `xs2jpg`, `xs2pdf`, `xs2png`, `xs2ppm`, `xs2ps`, `xs2svg`, `xsegs`, `xset`, `xstring`, `xstringb`, `xtitle`, `zeros`, `znaupd`, `zneupd`, `zoom_rect`, `abort`, `apropos`, `break`, `case`, `catch`, `continue`, `do`, `else`, `elseif`, `end`, `endfunction`, `for`, `function`, `help`, `if`, `pause`, `quit`, `select`, `then`, `try`, `while`, `!_deff_wrapper`, `%0_i_st`, `%3d_i_h`, `%Block_xcosUpdateBlock`, `%TNELDER_p`, `%TNELDER_string`, `%TNMPLOT_p`, `%TNMPLOT_string`, `%TOPTIM_p`, `%TOPTIM_string`, `%TSIMPLEX_p`, `%TSIMPLEX_string`, `%_EVoid_p`, `%_gsort`, `%_listvarinfile`, `%_rlist`, `%_save`, `%_sodload`, `%_strsplit`, `%_unwrap`, `%ar_p`, `%asn`, `%b_a_b`, `%b_a_s`, `%b_c_s`, `%b_c_spb`, `%b_cumprod`, `%b_cumsum`, `%b_d_s`, `%b_diag`, `%b_e`, `%b_f_s`, `%b_f_spb`, `%b_g_s`, `%b_g_spb`, `%b_grand`, `%b_h_s`, `%b_h_spb`, `%b_i_b`, `%b_i_ce`, `%b_i_h`, `%b_i_hm`, `%b_i_s`, `%b_i_sp`, `%b_i_spb`, `%b_i_st`, `%b_iconvert`, `%b_l_b`, `%b_l_s`, `%b_m_b`, `%b_m_s`, `%b_matrix`, `%b_n_hm`, `%b_o_hm`, `%b_p_s`, `%b_prod`, `%b_r_b`, `%b_r_s`, `%b_s_b`, `%b_s_s`, `%b_string`, `%b_sum`, `%b_tril`, `%b_triu`, `%b_x_b`, `%b_x_s`, `%bicg`, `%bicgstab`, `%c_a_c`, `%c_b_c`, `%c_b_s`, `%c_diag`, `%c_dsearch`, `%c_e`, `%c_eye`, `%c_f_s`, `%c_grand`, `%c_i_c`, `%c_i_ce`, `%c_i_h`, `%c_i_hm`, `%c_i_lss`, `%c_i_r`, `%c_i_s`, `%c_i_st`, `%c_matrix`, `%c_n_l`, `%c_n_st`, `%c_o_l`, `%c_o_st`, `%c_ones`, `%c_rand`, `%c_tril`, `%c_triu`, `%cblock_c_cblock`, `%cblock_c_s`, `%cblock_e`, `%cblock_f_cblock`, `%cblock_p`, `%cblock_size`, `%ce_6`, `%ce_c_ce`, `%ce_e`, `%ce_f_ce`, `%ce_i_ce`, `%ce_i_s`, `%ce_i_st`, `%ce_matrix`, `%ce_p`, `%ce_size`, `%ce_string`, `%ce_t`, `%cgs`, `%champdat_i_h`, `%choose`, `%diagram_xcos`, `%dir_p`, `%fptr_i_st`, `%grand_perm`, `%grayplot_i_h`, `%h_i_st`, `%hmS_k_hmS_generic`, `%hm_1_hm`, `%hm_1_s`, `%hm_2_hm`, `%hm_2_s`, `%hm_3_hm`, `%hm_3_s`, `%hm_4_hm`, `%hm_4_s`, `%hm_5`, `%hm_a_hm`, `%hm_a_r`, `%hm_a_s`, `%hm_abs`, `%hm_and`, `%hm_bool2s`, `%hm_c_hm`, `%hm_ceil`, `%hm_conj`, `%hm_cos`, `%hm_cumprod`, `%hm_cumsum`, `%hm_d_hm`, `%hm_d_s`, `%hm_degree`, `%hm_dsearch`, `%hm_e`, `%hm_exp`, `%hm_eye`, `%hm_f_hm`, `%hm_find`, `%hm_floor`, `%hm_g_hm`, `%hm_grand`, `%hm_gsort`, `%hm_h_hm`, `%hm_i_b`, `%hm_i_ce`, `%hm_i_h`, `%hm_i_hm`, `%hm_i_i`, `%hm_i_p`, `%hm_i_r`, `%hm_i_s`, `%hm_i_st`, `%hm_iconvert`, `%hm_imag`, `%hm_int`, `%hm_isnan`, `%hm_isreal`, `%hm_j_hm`, `%hm_j_s`, `%hm_k_hm`, `%hm_k_s`, `%hm_log`, `%hm_m_p`, `%hm_m_r`, `%hm_m_s`, `%hm_matrix`, `%hm_max`, `%hm_mean`, `%hm_median`, `%hm_min`, `%hm_n_b`, `%hm_n_c`, `%hm_n_hm`, `%hm_n_i`, `%hm_n_p`, `%hm_n_s`, `%hm_o_b`, `%hm_o_c`, `%hm_o_hm`, `%hm_o_i`, `%hm_o_p`, `%hm_o_s`, `%hm_ones`, `%hm_or`, `%hm_p`, `%hm_prod`, `%hm_q_hm`, `%hm_r_s`, `%hm_rand`, `%hm_real`, `%hm_round`, `%hm_s`, `%hm_s_hm`, `%hm_s_r`, `%hm_s_s`, `%hm_sign`, `%hm_sin`, `%hm_size`, `%hm_sqrt`, `%hm_stdev`, `%hm_string`, `%hm_sum`, `%hm_x_hm`, `%hm_x_p`, `%hm_x_s`, `%hm_zeros`, `%i_1_s`, `%i_2_s`, `%i_3_s`, `%i_4_s`, `%i_Matplot`, `%i_a_i`, `%i_a_s`, `%i_and`, `%i_ascii`, `%i_b_s`, `%i_bezout`, `%i_champ`, `%i_champ1`, `%i_contour`, `%i_contour2d`, `%i_d_i`, `%i_d_s`, `%i_dsearch`, `%i_e`, `%i_fft`, `%i_g_i`, `%i_gcd`, `%i_grand`, `%i_h_i`, `%i_i_ce`, `%i_i_h`, `%i_i_hm`, `%i_i_i`, `%i_i_s`, `%i_i_st`, `%i_j_i`, `%i_j_s`, `%i_l_s`, `%i_lcm`, `%i_length`, `%i_m_i`, `%i_m_s`, `%i_mfprintf`, `%i_mprintf`, `%i_msprintf`, `%i_n_s`, `%i_o_s`, `%i_or`, `%i_p_i`, `%i_p_s`, `%i_plot2d`, `%i_plot2d1`, `%i_plot2d2`, `%i_q_s`, `%i_r_i`, `%i_r_s`, `%i_round`, `%i_s_i`, `%i_s_s`, `%i_sign`, `%i_string`, `%i_x_i`, `%i_x_s`, `%ip_a_s`, `%ip_i_st`, `%ip_m_s`, `%ip_n_ip`, `%ip_o_ip`, `%ip_p`, `%ip_part`, `%ip_s_s`, `%ip_string`, `%k`, `%l_i_h`, `%l_i_s`, `%l_i_st`, `%l_isequal`, `%l_n_c`, `%l_n_l`, `%l_n_m`, `%l_n_p`, `%l_n_s`, `%l_n_st`, `%l_o_c`, `%l_o_l`, `%l_o_m`, `%l_o_p`, `%l_o_s`, `%l_o_st`, `%lss_a_lss`, `%lss_a_p`, `%lss_a_r`, `%lss_a_s`, `%lss_c_lss`, `%lss_c_p`, `%lss_c_r`, `%lss_c_s`, `%lss_e`, `%lss_eye`, `%lss_f_lss`, `%lss_f_p`, `%lss_f_r`, `%lss_f_s`, `%lss_i_ce`, `%lss_i_lss`, `%lss_i_p`, `%lss_i_r`, `%lss_i_s`, `%lss_i_st`, `%lss_inv`, `%lss_l_lss`, `%lss_l_p`, `%lss_l_r`, `%lss_l_s`, `%lss_m_lss`, `%lss_m_p`, `%lss_m_r`, `%lss_m_s`, `%lss_n_lss`, `%lss_n_p`, `%lss_n_r`, `%lss_n_s`, `%lss_norm`, `%lss_o_lss`, `%lss_o_p`, `%lss_o_r`, `%lss_o_s`, `%lss_ones`, `%lss_r_lss`, `%lss_r_p`, `%lss_r_r`, `%lss_r_s`, `%lss_rand`, `%lss_s`, `%lss_s_lss`, `%lss_s_p`, `%lss_s_r`, `%lss_s_s`, `%lss_size`, `%lss_t`, `%lss_v_lss`, `%lss_v_p`, `%lss_v_r`, `%lss_v_s`, `%lt_i_s`, `%m_n_l`, `%m_o_l`, `%mc_i_h`, `%mc_i_s`, `%mc_i_st`, `%mc_n_st`, `%mc_o_st`, `%mc_string`, `%mps_p`, `%mps_string`, `%msp_a_s`, `%msp_abs`, `%msp_e`, `%msp_find`, `%msp_i_s`, `%msp_i_st`, `%msp_length`, `%msp_m_s`, `%msp_maxi`, `%msp_n_msp`, `%msp_nnz`, `%msp_o_msp`, `%msp_p`, `%msp_sparse`, `%msp_spones`, `%msp_t`, `%p_a_lss`, `%p_a_r`, `%p_c_lss`, `%p_c_r`, `%p_cumprod`, `%p_cumsum`, `%p_d_p`, `%p_d_r`, `%p_d_s`, `%p_det`, `%p_e`, `%p_f_lss`, `%p_f_r`, `%p_grand`, `%p_i_ce`, `%p_i_h`, `%p_i_hm`, `%p_i_lss`, `%p_i_p`, `%p_i_r`, `%p_i_s`, `%p_i_st`, `%p_inv`, `%p_j_s`, `%p_k_p`, `%p_k_r`, `%p_k_s`, `%p_l_lss`, `%p_l_p`, `%p_l_r`, `%p_l_s`, `%p_m_hm`, `%p_m_lss`, `%p_m_r`, `%p_matrix`, `%p_n_l`, `%p_n_lss`, `%p_n_r`, `%p_o_l`, `%p_o_lss`, `%p_o_r`, `%p_o_sp`, `%p_p_s`, `%p_part`, `%p_prod`, `%p_q_p`, `%p_q_r`, `%p_q_s`, `%p_r_lss`, `%p_r_p`, `%p_r_r`, `%p_r_s`, `%p_s_lss`, `%p_s_r`, `%p_simp`, `%p_string`, `%p_sum`, `%p_v_lss`, `%p_v_p`, `%p_v_r`, `%p_v_s`, `%p_x_hm`, `%p_x_r`, `%p_y_p`, `%p_y_r`, `%p_y_s`, `%p_z_p`, `%p_z_r`, `%p_z_s`, `%pcg`, `%plist_p`, `%plist_string`, `%r_0`, `%r_a_hm`, `%r_a_lss`, `%r_a_p`, `%r_a_r`, `%r_a_s`, `%r_c_lss`, `%r_c_p`, `%r_c_r`, `%r_c_s`, `%r_clean`, `%r_cumprod`, `%r_cumsum`, `%r_d_p`, `%r_d_r`, `%r_d_s`, `%r_det`, `%r_diag`, `%r_e`, `%r_eye`, `%r_f_lss`, `%r_f_p`, `%r_f_r`, `%r_f_s`, `%r_i_ce`, `%r_i_hm`, `%r_i_lss`, `%r_i_p`, `%r_i_r`, `%r_i_s`, `%r_i_st`, `%r_inv`, `%r_j_s`, `%r_k_p`, `%r_k_r`, `%r_k_s`, `%r_l_lss`, `%r_l_p`, `%r_l_r`, `%r_l_s`, `%r_m_hm`, `%r_m_lss`, `%r_m_p`, `%r_m_r`, `%r_m_s`, `%r_matrix`, `%r_n_lss`, `%r_n_p`, `%r_n_r`, `%r_n_s`, `%r_norm`, `%r_o_lss`, `%r_o_p`, `%r_o_r`, `%r_o_s`, `%r_ones`, `%r_p`, `%r_p_s`, `%r_prod`, `%r_q_p`, `%r_q_r`, `%r_q_s`, `%r_r_lss`, `%r_r_p`, `%r_r_r`, `%r_r_s`, `%r_rand`, `%r_s`, `%r_s_hm`, `%r_s_lss`, `%r_s_p`, `%r_s_r`, `%r_s_s`, `%r_simp`, `%r_size`, `%r_string`, `%r_sum`, `%r_t`, `%r_tril`, `%r_triu`, `%r_v_lss`, `%r_v_p`, `%r_v_r`, `%r_v_s`, `%r_varn`, `%r_x_p`, `%r_x_r`, `%r_x_s`, `%r_y_p`, `%r_y_r`, `%r_y_s`, `%r_z_p`, `%r_z_r`, `%r_z_s`, `%s_1_hm`, `%s_1_i`, `%s_2_hm`, `%s_2_i`, `%s_3_hm`, `%s_3_i`, `%s_4_hm`, `%s_4_i`, `%s_5`, `%s_a_b`, `%s_a_hm`, `%s_a_i`, `%s_a_ip`, `%s_a_lss`, `%s_a_msp`, `%s_a_r`, `%s_a_sp`, `%s_and`, `%s_b_i`, `%s_b_s`, `%s_bezout`, `%s_c_b`, `%s_c_cblock`, `%s_c_lss`, `%s_c_r`, `%s_c_sp`, `%s_d_b`, `%s_d_i`, `%s_d_p`, `%s_d_r`, `%s_d_sp`, `%s_e`, `%s_f_b`, `%s_f_cblock`, `%s_f_lss`, `%s_f_r`, `%s_f_sp`, `%s_g_b`, `%s_g_s`, `%s_gcd`, `%s_grand`, `%s_h_b`, `%s_h_s`, `%s_i_b`, `%s_i_c`, `%s_i_ce`, `%s_i_h`, `%s_i_hm`, `%s_i_i`, `%s_i_lss`, `%s_i_p`, `%s_i_r`, `%s_i_s`, `%s_i_sp`, `%s_i_spb`, `%s_i_st`, `%s_j_i`, `%s_k_hm`, `%s_k_p`, `%s_k_r`, `%s_k_sp`, `%s_l_b`, `%s_l_hm`, `%s_l_i`, `%s_l_lss`, `%s_l_p`, `%s_l_r`, `%s_l_s`, `%s_l_sp`, `%s_lcm`, `%s_m_b`, `%s_m_hm`, `%s_m_i`, `%s_m_ip`, `%s_m_lss`, `%s_m_msp`, `%s_m_r`, `%s_matrix`, `%s_n_hm`, `%s_n_i`, `%s_n_l`, `%s_n_lss`, `%s_n_r`, `%s_n_st`, `%s_o_hm`, `%s_o_i`, `%s_o_l`, `%s_o_lss`, `%s_o_r`, `%s_o_st`, `%s_or`, `%s_p_b`, `%s_p_i`, `%s_pow`, `%s_q_hm`, `%s_q_i`, `%s_q_p`, `%s_q_r`, `%s_q_sp`, `%s_r_b`, `%s_r_i`, `%s_r_lss`, `%s_r_p`, `%s_r_r`, `%s_r_s`, `%s_r_sp`, `%s_s_b`, `%s_s_hm`, `%s_s_i`, `%s_s_ip`, `%s_s_lss`, `%s_s_r`, `%s_s_sp`, `%s_simp`, `%s_v_lss`, `%s_v_p`, `%s_v_r`, `%s_v_s`, `%s_x_b`, `%s_x_hm`, `%s_x_i`, `%s_x_r`, `%s_y_p`, `%s_y_r`, `%s_y_sp`, `%s_z_p`, `%s_z_r`, `%s_z_sp`, `%sn`, `%sp_a_s`, `%sp_a_sp`, `%sp_and`, `%sp_c_s`, `%sp_ceil`, `%sp_conj`, `%sp_cos`, `%sp_cumprod`, `%sp_cumsum`, `%sp_d_s`, `%sp_d_sp`, `%sp_det`, `%sp_diag`, `%sp_e`, `%sp_exp`, `%sp_f_s`, `%sp_floor`, `%sp_grand`, `%sp_gsort`, `%sp_i_ce`, `%sp_i_h`, `%sp_i_s`, `%sp_i_sp`, `%sp_i_st`, `%sp_int`, `%sp_inv`, `%sp_k_s`, `%sp_k_sp`, `%sp_l_s`, `%sp_l_sp`, `%sp_length`, `%sp_max`, `%sp_min`, `%sp_norm`, `%sp_or`, `%sp_p_s`, `%sp_prod`, `%sp_q_s`, `%sp_q_sp`, `%sp_r_s`, `%sp_r_sp`, `%sp_round`, `%sp_s_s`, `%sp_s_sp`, `%sp_sin`, `%sp_sqrt`, `%sp_string`, `%sp_sum`, `%sp_tril`, `%sp_triu`, `%sp_y_s`, `%sp_y_sp`, `%sp_z_s`, `%sp_z_sp`, `%spb_and`, `%spb_c_b`, `%spb_cumprod`, `%spb_cumsum`, `%spb_diag`, `%spb_e`, `%spb_f_b`, `%spb_g_b`, `%spb_g_spb`, `%spb_h_b`, `%spb_h_spb`, `%spb_i_b`, `%spb_i_ce`, `%spb_i_h`, `%spb_i_st`, `%spb_or`, `%spb_prod`, `%spb_sum`, `%spb_tril`, `%spb_triu`, `%st_6`, `%st_c_st`, `%st_e`, `%st_f_st`, `%st_i_b`, `%st_i_c`, `%st_i_fptr`, `%st_i_h`, `%st_i_i`, `%st_i_ip`, `%st_i_lss`, `%st_i_msp`, `%st_i_p`, `%st_i_r`, `%st_i_s`, `%st_i_sp`, `%st_i_spb`, `%st_i_st`, `%st_matrix`, `%st_n_c`, `%st_n_l`, `%st_n_mc`, `%st_n_p`, `%st_n_s`, `%st_o_c`, `%st_o_l`, `%st_o_mc`, `%st_o_p`, `%st_o_s`, `%st_o_tl`, `%st_p`, `%st_size`, `%st_string`, `%st_t`, `%ticks_i_h`, `%xls_e`, `%xls_p`, `%xlssheet_e`, `%xlssheet_p`, `%xlssheet_size`, `%xlssheet_string`, `DominationRank`, `G_make`, `IsAScalar`, `NDcost`, `OS_Version`, `PlotSparse`, `ReadHBSparse`, `TCL_CreateSlave`, `abcd`, `abinv`, `accept_func_default`, `accept_func_vfsa`, `acf`, `acosd`, `acosh`, `acoshm`, `acosm`, `acot`, `acotd`, `acoth`, `acsc`, `acscd`, `acsch`, `add_demo`, `add_help_chapter`, `add_module_help_chapter`, `add_param`, `add_profiling`, `adj2sp`, `aff2ab`, `ana_style`, `analpf`, `analyze`, `aplat`, `arhnk`, `arl2`, `arma2p`, `arma2ss`, `armac`, `armax`, `armax1`, `arobasestring2strings`, `arsimul`, `ascii2string`, `asciimat`, `asec`, `asecd`, `asech`, `asind`, `asinh`, `asinhm`, `asinm`, `assert_checkalmostequal`, `assert_checkequal`, `assert_checkerror`, `assert_checkfalse`, `assert_checkfilesequal`, `assert_checktrue`, `assert_comparecomplex`, `assert_computedigits`, `assert_cond2reltol`, `assert_cond2reqdigits`, `assert_generror`, `atand`, `atanh`, `atanhm`, `atanm`, `atomsAutoload`, `atomsAutoloadAdd`, `atomsAutoloadDel`, `atomsAutoloadList`, `atomsCategoryList`, `atomsCheckModule`, `atomsDepTreeShow`, `atomsGetConfig`, `atomsGetInstalled`, `atomsGetInstalledPath`, `atomsGetLoaded`, `atomsGetLoadedPath`, `atomsInstall`, `atomsIsInstalled`, `atomsIsLoaded`, `atomsList`, `atomsLoad`, `atomsQuit`, `atomsRemove`, `atomsRepositoryAdd`, `atomsRepositoryDel`, `atomsRepositoryList`, `atomsRestoreConfig`, `atomsSaveConfig`, `atomsSearch`, `atomsSetConfig`, `atomsShow`, `atomsSystemInit`, `atomsSystemUpdate`, `atomsTest`, `atomsUpdate`, `atomsVersion`, `augment`, `auread`, `auwrite`, `balreal`, `bench_run`, `bilin`, `bilt`, `bin2dec`, `binomial`, `bitand`, `bitcmp`, `bitget`, `bitor`, `bitset`, `bitxor`, `black`, `blanks`, `bloc2exp`, `bloc2ss`, `block_parameter_error`, `bode`, `bode_asymp`, `bstap`, `buttmag`, `bvodeS`, `bytecode`, `bytecodewalk`, `cainv`, `calendar`, `calerf`, `calfrq`, `canon`, `casc`, `cat`, `cat_code`, `cb_m2sci_gui`, `ccontrg`, `cell`, `cell2mat`, `cellstr`, `center`, `cepstrum`, `cfspec`, `char`, `chart`, `cheb1mag`, `cheb2mag`, `check_gateways`, `check_modules_xml`, `check_versions`, `chepol`, `chfact`, `chsolve`, `classmarkov`, `clean_help`, `clock`, `cls2dls`, `cmb_lin`, `cmndred`, `cmoment`, `coding_ga_binary`, `coding_ga_identity`, `coff`, `coffg`, `colcomp`, `colcompr`, `colinout`, `colregul`, `companion`, `complex`, `compute_initial_temp`, `cond`, `cond2sp`, `condestsp`, `configure_msifort`, `configure_msvc`, `conjgrad`, `cont_frm`, `cont_mat`, `contrss`, `conv`, `convert_to_float`, `convertindex`, `convol`, `convol2d`, `copfac`, `correl`, `cosd`, `cosh`, `coshm`, `cosm`, `cotd`, `cotg`, `coth`, `cothm`, `cov`, `covar`, `createXConfiguration`, `createfun`, `createstruct`, `cross`, `crossover_ga_binary`, `crossover_ga_default`, `csc`, `cscd`, `csch`, `csgn`, `csim`, `cspect`, `ctr_gram`, `czt`, `dae`, `daeoptions`, `damp`, `datafit`, `date`, `datenum`, `datevec`, `dbphi`, `dcf`, `ddp`, `dec2bin`, `dec2hex`, `dec2oct`, `del_help_chapter`, `del_module_help_chapter`, `demo_begin`, `demo_choose`, `demo_compiler`, `demo_end`, `demo_file_choice`, `demo_folder_choice`, `demo_function_choice`, `demo_gui`, `demo_run`, `demo_viewCode`, `denom`, `derivat`, `derivative`, `des2ss`, `des2tf`, `detectmsifort64tools`, `detectmsvc64tools`, `determ`, `detr`, `detrend`, `devtools_run_builder`, `dhnorm`, `diff`, `diophant`, `dir`, `dirname`, `dispfiles`, `dllinfo`, `dscr`, `dsimul`, `dt_ility`, `dtsi`, `edit`, `edit_error`, `editor`, `eigenmarkov`, `eigs`, `ell1mag`, `enlarge_shape`, `entropy`, `eomday`, `epred`, `eqfir`, `eqiir`, `equil`, `equil1`, `erfinv`, `etime`, `eval`, `evans`, `evstr`, `example_run`, `expression2code`, `extract_help_examples`, `factor`, `factorial`, `factors`, `faurre`, `ffilt`, `fft2`, `fftshift`, `fieldnames`, `filt_sinc`, `filter`, `findABCD`, `findAC`, `findBDK`, `findR`, `find_freq`, `find_links`, `find_scicos_version`, `findm`, `findmsifortcompiler`, `findmsvccompiler`, `findx0BD`, `firstnonsingleton`, `fix`, `fixedpointgcd`, `flipdim`, `flts`, `fminsearch`, `formatBlackTip`, `formatBodeMagTip`, `formatBodePhaseTip`, `formatGainplotTip`, `formatHallModuleTip`, `formatHallPhaseTip`, `formatNicholsGainTip`, `formatNicholsPhaseTip`, `formatNyquistTip`, `formatPhaseplotTip`, `formatSgridDampingTip`, `formatSgridFreqTip`, `formatZgridDampingTip`, `formatZgridFreqTip`, `format_txt`, `fourplan`, `frep2tf`, `freson`, `frfit`, `frmag`, `fseek_origin`, `fsfirlin`, `fspec`, `fspecg`, `fstabst`, `ftest`, `ftuneq`, `fullfile`, `fullrf`, `fullrfk`, `fun2string`, `g_margin`, `gainplot`, `gamitg`, `gcare`, `gcd`, `gencompilationflags_unix`, `generateBlockImage`, `generateBlockImages`, `generic_i_ce`, `generic_i_h`, `generic_i_hm`, `generic_i_s`, `generic_i_st`, `genlib`, `genmarkov`, `geomean`, `getDiagramVersion`, `getModelicaPath`, `getPreferencesValue`, `get_file_path`, `get_function_path`, `get_param`, `get_profile`, `get_scicos_version`, `getd`, `getscilabkeywords`, `getshell`, `gettklib`, `gfare`, `gfrancis`, `givens`, `glever`, `gmres`, `group`, `gschur`, `gspec`, `gtild`, `h2norm`, `h_cl`, `h_inf`, `h_inf_st`, `h_norm`, `hallchart`, `halt`, `hank`, `hankelsv`, `harmean`, `haveacompiler`, `head_comments`, `help_from_sci`, `help_skeleton`, `hermit`, `hex2dec`, `hilb`, `hilbert`, `histc`, `horner`, `householder`, `hrmt`, `htrianr`, `hypermat`, `idct`, `idst`, `ifft`, `ifftshift`, `iir`, `iirgroup`, `iirlp`, `iirmod`, `ilib_build`, `ilib_build_jar`, `ilib_compile`, `ilib_for_link`, `ilib_gen_Make`, `ilib_gen_Make_unix`, `ilib_gen_cleaner`, `ilib_gen_gateway`, `ilib_gen_loader`, `ilib_include_flag`, `ilib_mex_build`, `im_inv`, `importScicosDiagram`, `importScicosPal`, `importXcosDiagram`, `imrep2ss`, `ind2sub`, `inistate`, `init_ga_default`, `init_param`, `initial_scicos_tables`, `input`, `instruction2code`, `intc`, `intdec`, `integrate`, `interp1`, `interpln`, `intersect`, `intl`, `intsplin`, `inttrap`, `inv_coeff`, `invr`, `invrs`, `invsyslin`, `iqr`, `isLeapYear`, `is_absolute_path`, `is_param`, `iscell`, `iscellstr`, `iscolumn`, `isempty`, `isfield`, `isinf`, `ismatrix`, `isnan`, `isrow`, `isscalar`, `issparse`, `issquare`, `isstruct`, `isvector`, `jmat`, `justify`, `kalm`, `karmarkar`, `kernel`, `kpure`, `krac2`, `kroneck`, `lattn`, `lattp`, `launchtest`, `lcf`, `lcm`, `lcmdiag`, `leastsq`, `leqe`, `leqr`, `lev`, `levin`, `lex_sort`, `lft`, `lin`, `lin2mu`, `lincos`, `lindquist`, `linf`, `linfn`, `linsolve`, `linspace`, `list2vec`, `list_param`, `listfiles`, `listfunctions`, `listvarinfile`, `lmisolver`, `lmitool`, `loadXcosLibs`, `loadmatfile`, `loadwave`, `log10`, `log2`, `logm`, `logspace`, `lqe`, `lqg`, `lqg2stan`, `lqg_ltr`, `lqr`, `ls`, `lyap`, `m2sci_gui`, `m_circle`, `macglov`, `macrovar`, `mad`, `makecell`, `manedit`, `mapsound`, `markp2ss`, `matfile2sci`, `mdelete`, `mean`, `meanf`, `median`, `members`, `mese`, `meshgrid`, `mfft`, `mfile2sci`, `minreal`, `minss`, `mkdir`, `modulo`, `moment`, `mrfit`, `msd`, `mstr2sci`, `mtlb`, `mtlb_0`, `mtlb_a`, `mtlb_all`, `mtlb_any`, `mtlb_axes`, `mtlb_axis`, `mtlb_beta`, `mtlb_box`, `mtlb_choices`, `mtlb_close`, `mtlb_colordef`, `mtlb_cond`, `mtlb_cov`, `mtlb_cumprod`, `mtlb_cumsum`, `mtlb_dec2hex`, `mtlb_delete`, `mtlb_diag`, `mtlb_diff`, `mtlb_dir`, `mtlb_double`, `mtlb_e`, `mtlb_echo`, `mtlb_error`, `mtlb_eval`, `mtlb_exist`, `mtlb_eye`, `mtlb_false`, `mtlb_fft`, `mtlb_fftshift`, `mtlb_filter`, `mtlb_find`, `mtlb_findstr`, `mtlb_fliplr`, `mtlb_fopen`, `mtlb_format`, `mtlb_fprintf`, `mtlb_fread`, `mtlb_fscanf`, `mtlb_full`, `mtlb_fwrite`, `mtlb_get`, `mtlb_grid`, `mtlb_hold`, `mtlb_i`, `mtlb_ifft`, `mtlb_image`, `mtlb_imp`, `mtlb_int16`, `mtlb_int32`, `mtlb_int8`, `mtlb_is`, `mtlb_isa`, `mtlb_isfield`, `mtlb_isletter`, `mtlb_isspace`, `mtlb_l`, `mtlb_legendre`, `mtlb_linspace`, `mtlb_logic`, `mtlb_logical`, `mtlb_loglog`, `mtlb_lower`, `mtlb_max`, `mtlb_mean`, `mtlb_median`, `mtlb_mesh`, `mtlb_meshdom`, `mtlb_min`, `mtlb_more`, `mtlb_num2str`, `mtlb_ones`, `mtlb_pcolor`, `mtlb_plot`, `mtlb_prod`, `mtlb_qr`, `mtlb_qz`, `mtlb_rand`, `mtlb_randn`, `mtlb_rcond`, `mtlb_realmax`, `mtlb_realmin`, `mtlb_s`, `mtlb_semilogx`, `mtlb_semilogy`, `mtlb_setstr`, `mtlb_size`, `mtlb_sort`, `mtlb_sortrows`, `mtlb_sprintf`, `mtlb_sscanf`, `mtlb_std`, `mtlb_strcmp`, `mtlb_strcmpi`, `mtlb_strfind`, `mtlb_strrep`, `mtlb_subplot`, `mtlb_sum`, `mtlb_t`, `mtlb_toeplitz`, `mtlb_tril`, `mtlb_triu`, `mtlb_true`, `mtlb_type`, `mtlb_uint16`, `mtlb_uint32`, `mtlb_uint8`, `mtlb_upper`, `mtlb_var`, `mtlb_zeros`, `mu2lin`, `mutation_ga_binary`, `mutation_ga_default`, `mvcorrel`, `mvvacov`, `nancumsum`, `nand2mean`, `nanmax`, `nanmean`, `nanmeanf`, `nanmedian`, `nanmin`, `nanreglin`, `nanstdev`, `nansum`, `narsimul`, `ndgrid`, `ndims`, `nehari`, `neigh_func_csa`, `neigh_func_default`, `neigh_func_fsa`, `neigh_func_vfsa`, `neldermead_cget`, `neldermead_configure`, `neldermead_costf`, `neldermead_defaultoutput`, `neldermead_destroy`, `neldermead_function`, `neldermead_get`, `neldermead_log`, `neldermead_new`, `neldermead_restart`, `neldermead_search`, `neldermead_updatesimp`, `nextpow2`, `nfreq`, `nicholschart`, `nlev`, `nmplot_cget`, `nmplot_configure`, `nmplot_contour`, `nmplot_destroy`, `nmplot_function`, `nmplot_get`, `nmplot_historyplot`, `nmplot_log`, `nmplot_new`, `nmplot_outputcmd`, `nmplot_restart`, `nmplot_search`, `nmplot_simplexhistory`, `noisegen`, `nonreg_test_run`, `now`, `nthroot`, `null`, `num2cell`, `numderivative`, `numdiff`, `numer`, `nyquist`, `nyquistfrequencybounds`, `obs_gram`, `obscont`, `observer`, `obsv_mat`, `obsvss`, `oct2dec`, `odeoptions`, `optim_ga`, `optim_moga`, `optim_nsga`, `optim_nsga2`, `optim_sa`, `optimbase_cget`, `optimbase_checkbounds`, `optimbase_checkcostfun`, `optimbase_checkx0`, `optimbase_configure`, `optimbase_destroy`, `optimbase_function`, `optimbase_get`, `optimbase_hasbounds`, `optimbase_hasconstraints`, `optimbase_hasnlcons`, `optimbase_histget`, `optimbase_histset`, `optimbase_incriter`, `optimbase_isfeasible`, `optimbase_isinbounds`, `optimbase_isinnonlincons`, `optimbase_log`, `optimbase_logshutdown`, `optimbase_logstartup`, `optimbase_new`, `optimbase_outputcmd`, `optimbase_outstruct`, `optimbase_proj2bnds`, `optimbase_set`, `optimbase_stoplog`, `optimbase_terminate`, `optimget`, `optimplotfunccount`, `optimplotfval`, `optimplotx`, `optimset`, `optimsimplex_center`, `optimsimplex_check`, `optimsimplex_compsomefv`, `optimsimplex_computefv`, `optimsimplex_deltafv`, `optimsimplex_deltafvmax`, `optimsimplex_destroy`, `optimsimplex_dirmat`, `optimsimplex_fvmean`, `optimsimplex_fvstdev`, `optimsimplex_fvvariance`, `optimsimplex_getall`, `optimsimplex_getallfv`, `optimsimplex_getallx`, `optimsimplex_getfv`, `optimsimplex_getn`, `optimsimplex_getnbve`, `optimsimplex_getve`, `optimsimplex_getx`, `optimsimplex_gradientfv`, `optimsimplex_log`, `optimsimplex_new`, `optimsimplex_reflect`, `optimsimplex_setall`, `optimsimplex_setallfv`, `optimsimplex_setallx`, `optimsimplex_setfv`, `optimsimplex_setn`, `optimsimplex_setnbve`, `optimsimplex_setve`, `optimsimplex_setx`, `optimsimplex_shrink`, `optimsimplex_size`, `optimsimplex_sort`, `optimsimplex_xbar`, `orth`, `output_ga_default`, `output_moga_default`, `output_nsga2_default`, `output_nsga_default`, `p_margin`, `pack`, `pareto_filter`, `parrot`, `pbig`, `pca`, `pcg`, `pdiv`, `pen2ea`, `pencan`, `pencost`, `penlaur`, `perctl`, `perl`, `perms`, `permute`, `pertrans`, `pfactors`, `pfss`, `phasemag`, `phaseplot`, `phc`, `pinv`, `playsnd`, `plotprofile`, `plzr`, `pmodulo`, `pol2des`, `pol2str`, `polar`, `polfact`, `prbs_a`, `prettyprint`, `primes`, `princomp`, `profile`, `proj`, `projsl`, `projspec`, `psmall`, `pspect`, `qmr`, `qpsolve`, `quart`, `quaskro`, `rafiter`, `randpencil`, `range`, `rank`, `readxls`, `recompilefunction`, `recons`, `reglin`, `regress`, `remezb`, `remove_param`, `remove_profiling`, `repfreq`, `replace_Ix_by_Fx`, `repmat`, `reset_profiling`, `resize_matrix`, `returntoscilab`, `rhs2code`, `ric_desc`, `riccati`, `rmdir`, `routh_t`, `rowcomp`, `rowcompr`, `rowinout`, `rowregul`, `rowshuff`, `rref`, `sample`, `samplef`, `samwr`, `savematfile`, `savewave`, `scanf`, `sci2exp`, `sciGUI_init`, `sci_sparse`, `scicos_getvalue`, `scicos_simulate`, `scicos_workspace_init`, `scisptdemo`, `scitest`, `sdiff`, `sec`, `secd`, `sech`, `selection_ga_elitist`, `selection_ga_random`, `sensi`, `setPreferencesValue`, `set_param`, `setdiff`, `sgrid`, `show_margins`, `show_pca`, `showprofile`, `signm`, `sinc`, `sincd`, `sind`, `sinh`, `sinhm`, `sinm`, `sm2des`, `sm2ss`, `smga`, `smooth`, `solve`, `sound`, `soundsec`, `sp2adj`, `spaninter`, `spanplus`, `spantwo`, `specfact`, `speye`, `sprand`, `spzeros`, `sqroot`, `sqrtm`, `squarewave`, `squeeze`, `srfaur`, `srkf`, `ss2des`, `ss2ss`, `ss2tf`, `sskf`, `ssprint`, `ssrand`, `st_deviation`, `st_i_generic`, `st_ility`, `stabil`, `statgain`, `stdev`, `stdevf`, `steadycos`, `strange`, `strcmpi`, `struct`, `sub2ind`, `sva`, `svplot`, `sylm`, `sylv`, `sysconv`, `sysdiag`, `sysfact`, `syslin`, `syssize`, `system`, `systmat`, `tabul`, `tand`, `tanh`, `tanhm`, `tanm`, `tbx_build_blocks`, `tbx_build_cleaner`, `tbx_build_gateway`, `tbx_build_gateway_clean`, `tbx_build_gateway_loader`, `tbx_build_help`, `tbx_build_help_loader`, `tbx_build_loader`, `tbx_build_localization`, `tbx_build_macros`, `tbx_build_pal_loader`, `tbx_build_src`, `tbx_builder`, `tbx_builder_gateway`, `tbx_builder_gateway_lang`, `tbx_builder_help`, `tbx_builder_help_lang`, `tbx_builder_macros`, `tbx_builder_src`, `tbx_builder_src_lang`, `tbx_generate_pofile`, `temp_law_csa`, `temp_law_default`, `temp_law_fsa`, `temp_law_huang`, `temp_law_vfsa`, `test_clean`, `test_on_columns`, `test_run`, `test_run_level`, `testexamples`, `tf2des`, `tf2ss`, `thrownan`, `tic`, `time_id`, `toc`, `toeplitz`, `tokenpos`, `toolboxes`, `trace`, `trans`, `translatepaths`, `tree2code`, `trfmod`, `trianfml`, `trimmean`, `trisolve`, `trzeros`, `typeof`, `ui_observer`, `union`, `unique`, `unit_test_run`, `unix_g`, `unix_s`, `unix_w`, `unix_x`, `unobs`, `unpack`, `unwrap`, `variance`, `variancef`, `vec2list`, `vectorfind`, `ver`, `warnobsolete`, `wavread`, `wavwrite`, `wcenter`, `weekday`, `wfir`, `wfir_gui`, `whereami`, `who_user`, `whos`, `wiener`, `wigner`, `window`, `winlist`, `with_javasci`, `with_macros_source`, `with_modelica_compiler`, `with_tk`, `xcorr`, `xcosBlockEval`, `xcosBlockInterface`, `xcosCodeGeneration`, `xcosConfigureModelica`, `xcosPal`, `xcosPalAdd`, `xcosPalAddBlock`, `xcosPalExport`, `xcosPalGenerateAllIcons`, `xcosShowBlockWarning`, `xcosValidateBlockSet`, `xcosValidateCompareBlock`, `xcos_compile`, `xcos_debug_gui`, `xcos_run`, `xcos_simulate`, `xcov`, `xmltochm`, `xmltoformat`, `xmltohtml`, `xmltojar`, `xmltopdf`, `xmltops`, `xmltoweb`, `yulewalk`, `zeropen`, `zgrid`, `zpbutt`, `zpch1`, `zpch2`, `zpell`), NameBuiltin, nil}, + {Words(``, `\b`, `$`, `%F`, `%T`, `%e`, `%eps`, `%f`, `%fftw`, `%gui`, `%i`, `%inf`, `%io`, `%modalWarning`, `%nan`, `%pi`, `%s`, `%t`, `%tk`, `%toolboxes`, `%toolboxes_dir`, `%z`, `PWD`, `SCI`, `SCIHOME`, `TMPDIR`, `arnoldilib`, `assertlib`, `atomslib`, `cacsdlib`, `compatibility_functilib`, `corelib`, `data_structureslib`, `demo_toolslib`, `development_toolslib`, `differential_equationlib`, `dynamic_linklib`, `elementary_functionslib`, `enull`, `evoid`, `external_objectslib`, `fd`, `fileiolib`, `functionslib`, `genetic_algorithmslib`, `helptoolslib`, `home`, `integerlib`, `interpolationlib`, `iolib`, `jnull`, `jvoid`, `linear_algebralib`, `m2scilib`, `matiolib`, `modules_managerlib`, `neldermeadlib`, `optimbaselib`, `optimizationlib`, `optimsimplexlib`, `output_streamlib`, `overloadinglib`, `parameterslib`, `polynomialslib`, `preferenceslib`, `randliblib`, `scicos_autolib`, `scicos_utilslib`, `scinoteslib`, `signal_processinglib`, `simulated_annealinglib`, `soundlib`, `sparselib`, `special_functionslib`, `spreadsheetlib`, `statisticslib`, `stringlib`, `tclscilib`, `timelib`, `umfpacklib`, `xcoslib`), NameConstant, nil}, + {`-|==|~=|<|>|<=|>=|&&|&|~|\|\|?`, Operator, nil}, + {`\.\*|\*|\+|\.\^|\.\\|\.\/|\/|\\`, Operator, nil}, + {`[\[\](){}@.,=:;]`, Punctuation, nil}, + {`"[^"]*"`, LiteralString, nil}, + {`(?<=[\w)\].])\'+`, Operator, nil}, + {`(?|+=@:,./?-]+`, Operator, nil}, + {`[\[\]()]+`, Punctuation, nil}, + {`"`, LiteralStringDouble, Push("string-double")}, + {`'`, LiteralStringSingle, Push("string-single")}, + {`[a-z_-][\w-]*`, Name, nil}, + {`\n`, Text, nil}, + {`[;{}]`, Punctuation, Pop(1)}, + }, + "interpolation": { + {`\}`, LiteralStringInterpol, Pop(1)}, + Include("value"), + }, + "selector": { + {`[ \t]+`, Text, nil}, + {`\:`, NameDecorator, Push("pseudo-class")}, + {`\.`, NameClass, Push("class")}, + {`#\{`, LiteralStringInterpol, Push("interpolation")}, + {`\#`, NameNamespace, Push("id")}, + {`&`, Keyword, nil}, + {`[~^*!&\[\]()<>|+=@:,./?-]`, Operator, nil}, + {`(%)([\w-]+)`, ByGroups(Operator, NameClass), nil}, + {`"`, LiteralStringDouble, Push("string-double")}, + {`'`, LiteralStringSingle, Push("string-single")}, + {`\n`, Text, nil}, + {`[;{}]`, Punctuation, Pop(1)}, + {`[\w-]+`, NameTag, nil}, + }, + "string-double": { + {`(\\.|#(?=[^\n{])|[^\n"#])+`, LiteralStringDouble, nil}, + {`#\{`, LiteralStringInterpol, Push("interpolation")}, + {`"`, LiteralStringDouble, Pop(1)}, + }, + "string-single": { + {`(\\.|#(?=[^\n{])|[^\n'#])+`, LiteralStringSingle, nil}, + {`#\{`, LiteralStringInterpol, Push("interpolation")}, + {`'`, LiteralStringSingle, Pop(1)}, + }, + "string-url": { + {`(\\#|#(?=[^\n{])|[^\n#)])+`, LiteralStringOther, nil}, + {`#\{`, LiteralStringInterpol, Push("interpolation")}, + {`\)`, LiteralStringOther, Pop(1)}, + }, + "pseudo-class": { + {`[\w-]+`, NameDecorator, nil}, + {`#\{`, LiteralStringInterpol, Push("interpolation")}, + Default(Pop(1)), + }, + "class": { + {`[\w-]+`, NameClass, nil}, + {`#\{`, LiteralStringInterpol, Push("interpolation")}, + Default(Pop(1)), + }, + "id": { + {`[\w-]+`, NameNamespace, nil}, + {`#\{`, LiteralStringInterpol, Push("interpolation")}, + Default(Pop(1)), + }, + "for": { + {`(from|to|through)`, OperatorWord, nil}, + Include("value"), + }, + "each": { + {`in`, OperatorWord, nil}, + Include("value"), + }, + } +} diff --git a/vendor/github.com/alecthomas/chroma/lexers/s/sieve.go b/vendor/github.com/alecthomas/chroma/lexers/s/sieve.go new file mode 100644 index 000000000..ff5edf93b --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/s/sieve.go @@ -0,0 +1,37 @@ +package s + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// Sieve lexer. +var Sieve = internal.Register(MustNewLazyLexer( + &Config{ + Name: "Sieve", + Aliases: []string{"sieve"}, + Filenames: []string{"*.siv", "*.sieve"}, + MimeTypes: []string{}, + }, + func() Rules { + return Rules{ + "root": { + {`\s+`, Text, nil}, + {`[();,{}\[\]]`, Punctuation, nil}, + {`(?i)require`, KeywordNamespace, nil}, + {`(?i)(:)(addresses|all|contains|content|create|copy|comparator|count|days|detail|domain|fcc|flags|from|handle|importance|is|localpart|length|lowerfirst|lower|matches|message|mime|options|over|percent|quotewildcard|raw|regex|specialuse|subject|text|under|upperfirst|upper|value)`, ByGroups(NameTag, NameTag), nil}, + {`(?i)(address|addflag|allof|anyof|body|discard|elsif|else|envelope|ereject|exists|false|fileinto|if|hasflag|header|keep|notify_method_capability|notify|not|redirect|reject|removeflag|setflag|size|spamtest|stop|string|true|vacation|virustest)`, NameBuiltin, nil}, + {`(?i)set`, KeywordDeclaration, nil}, + {`([0-9.]+)([kmgKMG])?`, ByGroups(LiteralNumber, LiteralNumber), nil}, + {`#.*$`, CommentSingle, nil}, + {`/\*.*\*/`, CommentMultiline, nil}, + {`"[^"]*?"`, LiteralString, nil}, + {`text:`, NameTag, Push("text")}, + }, + "text": { + {`[^.].*?\n`, LiteralString, nil}, + {`^\.`, Punctuation, Pop(1)}, + }, + } + }, +)) diff --git a/vendor/github.com/alecthomas/chroma/lexers/s/smalltalk.go b/vendor/github.com/alecthomas/chroma/lexers/s/smalltalk.go new file mode 100644 index 000000000..b4143d02f --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/s/smalltalk.go @@ -0,0 +1,103 @@ +package s + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// Smalltalk lexer. +var Smalltalk = internal.Register(MustNewLazyLexer( + &Config{ + Name: "Smalltalk", + Aliases: []string{"smalltalk", "squeak", "st"}, + Filenames: []string{"*.st"}, + MimeTypes: []string{"text/x-smalltalk"}, + }, + smalltalkRules, +)) + +func smalltalkRules() Rules { + return Rules{ + "root": { + {`(<)(\w+:)(.*?)(>)`, ByGroups(Text, Keyword, Text, Text), nil}, + Include("squeak fileout"), + Include("whitespaces"), + Include("method definition"), + {`(\|)([\w\s]*)(\|)`, ByGroups(Operator, NameVariable, Operator), nil}, + Include("objects"), + {`\^|:=|_`, Operator, nil}, + {`[\]({}.;!]`, Text, nil}, + }, + "method definition": { + {`([a-zA-Z]+\w*:)(\s*)(\w+)`, ByGroups(NameFunction, Text, NameVariable), nil}, + {`^(\b[a-zA-Z]+\w*\b)(\s*)$`, ByGroups(NameFunction, Text), nil}, + {`^([-+*/\\~<>=|&!?,@%]+)(\s*)(\w+)(\s*)$`, ByGroups(NameFunction, Text, NameVariable, Text), nil}, + }, + "blockvariables": { + Include("whitespaces"), + {`(:)(\s*)(\w+)`, ByGroups(Operator, Text, NameVariable), nil}, + {`\|`, Operator, Pop(1)}, + Default(Pop(1)), + }, + "literals": { + {`'(''|[^'])*'`, LiteralString, Push("afterobject")}, + {`\$.`, LiteralStringChar, Push("afterobject")}, + {`#\(`, LiteralStringSymbol, Push("parenth")}, + {`\)`, Text, Push("afterobject")}, + {`(\d+r)?-?\d+(\.\d+)?(e-?\d+)?`, LiteralNumber, Push("afterobject")}, + }, + "_parenth_helper": { + Include("whitespaces"), + {`(\d+r)?-?\d+(\.\d+)?(e-?\d+)?`, LiteralNumber, nil}, + {`[-+*/\\~<>=|&#!?,@%\w:]+`, LiteralStringSymbol, nil}, + {`'(''|[^'])*'`, LiteralString, nil}, + {`\$.`, LiteralStringChar, nil}, + {`#*\(`, LiteralStringSymbol, Push("inner_parenth")}, + }, + "parenth": { + {`\)`, LiteralStringSymbol, Push("root", "afterobject")}, + Include("_parenth_helper"), + }, + "inner_parenth": { + {`\)`, LiteralStringSymbol, Pop(1)}, + Include("_parenth_helper"), + }, + "whitespaces": { + {`\s+`, Text, nil}, + {`"(""|[^"])*"`, Comment, nil}, + }, + "objects": { + {`\[`, Text, Push("blockvariables")}, + {`\]`, Text, Push("afterobject")}, + {`\b(self|super|true|false|nil|thisContext)\b`, NameBuiltinPseudo, Push("afterobject")}, + {`\b[A-Z]\w*(?!:)\b`, NameClass, Push("afterobject")}, + {`\b[a-z]\w*(?!:)\b`, NameVariable, Push("afterobject")}, + {`#("(""|[^"])*"|[-+*/\\~<>=|&!?,@%]+|[\w:]+)`, LiteralStringSymbol, Push("afterobject")}, + Include("literals"), + }, + "afterobject": { + {`! !$`, Keyword, Pop(1)}, + Include("whitespaces"), + {`\b(ifTrue:|ifFalse:|whileTrue:|whileFalse:|timesRepeat:)`, NameBuiltin, Pop(1)}, + {`\b(new\b(?!:))`, NameBuiltin, nil}, + {`:=|_`, Operator, Pop(1)}, + {`\b[a-zA-Z]+\w*:`, NameFunction, Pop(1)}, + {`\b[a-zA-Z]+\w*`, NameFunction, nil}, + {`\w+:?|[-+*/\\~<>=|&!?,@%]+`, NameFunction, Pop(1)}, + {`\.`, Punctuation, Pop(1)}, + {`;`, Punctuation, nil}, + {`[\])}]`, Text, nil}, + {`[\[({]`, Text, Pop(1)}, + }, + "squeak fileout": { + {`^"(""|[^"])*"!`, Keyword, nil}, + {`^'(''|[^'])*'!`, Keyword, nil}, + {`^(!)(\w+)( commentStamp: )(.*?)( prior: .*?!\n)(.*?)(!)`, ByGroups(Keyword, NameClass, Keyword, LiteralString, Keyword, Text, Keyword), nil}, + {`^(!)(\w+(?: class)?)( methodsFor: )('(?:''|[^'])*')(.*?!)`, ByGroups(Keyword, NameClass, Keyword, LiteralString, Keyword), nil}, + {`^(\w+)( subclass: )(#\w+)(\s+instanceVariableNames: )(.*?)(\s+classVariableNames: )(.*?)(\s+poolDictionaries: )(.*?)(\s+category: )(.*?)(!)`, ByGroups(NameClass, Keyword, LiteralStringSymbol, Keyword, LiteralString, Keyword, LiteralString, Keyword, LiteralString, Keyword, LiteralString, Keyword), nil}, + {`^(\w+(?: class)?)(\s+instanceVariableNames: )(.*?)(!)`, ByGroups(NameClass, Keyword, LiteralString, Keyword), nil}, + {`(!\n)(\].*)(! !)$`, ByGroups(Keyword, Text, Keyword), nil}, + {`! !$`, Keyword, nil}, + }, + } +} diff --git a/vendor/github.com/alecthomas/chroma/lexers/s/smarty.go b/vendor/github.com/alecthomas/chroma/lexers/s/smarty.go new file mode 100644 index 000000000..566efbbef --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/s/smarty.go @@ -0,0 +1,44 @@ +package s + +import ( + . "github.com/alecthomas/chroma" // nolint + . "github.com/alecthomas/chroma/lexers/circular" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// Smarty lexer. +var Smarty = internal.Register(MustNewLazyLexer( + &Config{ + Name: "Smarty", + Aliases: []string{"smarty"}, + Filenames: []string{"*.tpl"}, + MimeTypes: []string{"application/x-smarty"}, + DotAll: true, + }, + smartyRules, +)) + +func smartyRules() Rules { + return Rules{ + "root": { + {`[^{]+`, Other, nil}, + {`(\{)(\*.*?\*)(\})`, ByGroups(CommentPreproc, Comment, CommentPreproc), nil}, + {`(\{php\})(.*?)(\{/php\})`, ByGroups(CommentPreproc, Using(PHP), CommentPreproc), nil}, + {`(\{)(/?[a-zA-Z_]\w*)(\s*)`, ByGroups(CommentPreproc, NameFunction, Text), Push("smarty")}, + {`\{`, CommentPreproc, Push("smarty")}, + }, + "smarty": { + {`\s+`, Text, nil}, + {`\{`, CommentPreproc, Push()}, + {`\}`, CommentPreproc, Pop(1)}, + {`#[a-zA-Z_]\w*#`, NameVariable, nil}, + {`\$[a-zA-Z_]\w*(\.\w+)*`, NameVariable, nil}, + {`[~!%^&*()+=|\[\]:;,.<>/?@-]`, Operator, nil}, + {`(true|false|null)\b`, KeywordConstant, nil}, + {`[0-9](\.[0-9]*)?(eE[+-][0-9])?[flFLdD]?|0[xX][0-9a-fA-F]+[Ll]?`, LiteralNumber, nil}, + {`"(\\\\|\\"|[^"])*"`, LiteralStringDouble, nil}, + {`'(\\\\|\\'|[^'])*'`, LiteralStringSingle, nil}, + {`[a-zA-Z_]\w*`, NameAttribute, nil}, + }, + } +} diff --git a/vendor/github.com/alecthomas/chroma/lexers/s/sml.go b/vendor/github.com/alecthomas/chroma/lexers/s/sml.go new file mode 100644 index 000000000..c795ea867 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/s/sml.go @@ -0,0 +1,204 @@ +package s + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// Standard ML lexer. +var StandardML = internal.Register(MustNewLazyLexer( + &Config{ + Name: "Standard ML", + Aliases: []string{"sml"}, + Filenames: []string{"*.sml", "*.sig", "*.fun"}, + MimeTypes: []string{"text/x-standardml", "application/x-standardml"}, + }, + standardMLRules, +)) + +func standardMLRules() Rules { + return Rules{ + "whitespace": { + {`\s+`, Text, nil}, + {`\(\*`, CommentMultiline, Push("comment")}, + }, + "delimiters": { + {`\(|\[|\{`, Punctuation, Push("main")}, + {`\)|\]|\}`, Punctuation, Pop(1)}, + {`\b(let|if|local)\b(?!\')`, KeywordReserved, Push("main", "main")}, + {`\b(struct|sig|while)\b(?!\')`, KeywordReserved, Push("main")}, + {`\b(do|else|end|in|then)\b(?!\')`, KeywordReserved, Pop(1)}, + }, + "core": { + {`(_|\}|\{|\)|;|,|\[|\(|\]|\.\.\.)`, Punctuation, nil}, + {`#"`, LiteralStringChar, Push("char")}, + {`"`, LiteralStringDouble, Push("string")}, + {`~?0x[0-9a-fA-F]+`, LiteralNumberHex, nil}, + {`0wx[0-9a-fA-F]+`, LiteralNumberHex, nil}, + {`0w\d+`, LiteralNumberInteger, nil}, + {`~?\d+\.\d+[eE]~?\d+`, LiteralNumberFloat, nil}, + {`~?\d+\.\d+`, LiteralNumberFloat, nil}, + {`~?\d+[eE]~?\d+`, LiteralNumberFloat, nil}, + {`~?\d+`, LiteralNumberInteger, nil}, + {`#\s*[1-9][0-9]*`, NameLabel, nil}, + {`#\s*([a-zA-Z][\w']*)`, NameLabel, nil}, + {"#\\s+([!%&$#+\\-/:<=>?@\\\\~`^|*]+)", NameLabel, nil}, + {`\b(datatype|abstype)\b(?!\')`, KeywordReserved, Push("dname")}, + {`(?=\b(exception)\b(?!\'))`, Text, Push("ename")}, + {`\b(functor|include|open|signature|structure)\b(?!\')`, KeywordReserved, Push("sname")}, + {`\b(type|eqtype)\b(?!\')`, KeywordReserved, Push("tname")}, + {`\'[\w\']*`, NameDecorator, nil}, + {`([a-zA-Z][\w']*)(\.)`, NameNamespace, Push("dotted")}, + {`\b(abstype|and|andalso|as|case|datatype|do|else|end|exception|fn|fun|handle|if|in|infix|infixr|let|local|nonfix|of|op|open|orelse|raise|rec|then|type|val|with|withtype|while|eqtype|functor|include|sharing|sig|signature|struct|structure|where)\b`, KeywordReserved, nil}, + {`([a-zA-Z][\w']*)`, Name, nil}, + {`\b(:|\|,=|=>|->|#|:>)\b`, KeywordReserved, nil}, + {"([!%&$#+\\-/:<=>?@\\\\~`^|*]+)", Name, nil}, + }, + "dotted": { + {`([a-zA-Z][\w']*)(\.)`, NameNamespace, nil}, + // ignoring reserved words + {`([a-zA-Z][\w']*)`, Name, Pop(1)}, + // ignoring reserved words + {"([!%&$#+\\-/:<=>?@\\\\~`^|*]+)", Name, Pop(1)}, + {`\s+`, Error, nil}, + {`\S+`, Error, nil}, + }, + "root": { + Default(Push("main")), + }, + "main": { + Include("whitespace"), + {`\b(val|and)\b(?!\')`, KeywordReserved, Push("vname")}, + {`\b(fun)\b(?!\')`, KeywordReserved, Push("#pop", "main-fun", "fname")}, + Include("delimiters"), + Include("core"), + {`\S+`, Error, nil}, + }, + "main-fun": { + Include("whitespace"), + {`\s`, Text, nil}, + {`\(\*`, CommentMultiline, Push("comment")}, + {`\b(fun|and)\b(?!\')`, KeywordReserved, Push("fname")}, + {`\b(val)\b(?!\')`, KeywordReserved, Push("#pop", "main", "vname")}, + {`\|`, Punctuation, Push("fname")}, + {`\b(case|handle)\b(?!\')`, KeywordReserved, Push("#pop", "main")}, + Include("delimiters"), + Include("core"), + {`\S+`, Error, nil}, + }, + "char": { + {`[^"\\]`, LiteralStringChar, nil}, + {`\\[\\"abtnvfr]`, LiteralStringEscape, nil}, + {`\\\^[\x40-\x5e]`, LiteralStringEscape, nil}, + {`\\[0-9]{3}`, LiteralStringEscape, nil}, + {`\\u[0-9a-fA-F]{4}`, LiteralStringEscape, nil}, + {`\\\s+\\`, LiteralStringInterpol, nil}, + {`"`, LiteralStringChar, Pop(1)}, + }, + "string": { + {`[^"\\]`, LiteralStringDouble, nil}, + {`\\[\\"abtnvfr]`, LiteralStringEscape, nil}, + {`\\\^[\x40-\x5e]`, LiteralStringEscape, nil}, + {`\\[0-9]{3}`, LiteralStringEscape, nil}, + {`\\u[0-9a-fA-F]{4}`, LiteralStringEscape, nil}, + {`\\\s+\\`, LiteralStringInterpol, nil}, + {`"`, LiteralStringDouble, Pop(1)}, + }, + "breakout": { + {`(?=\b(where|do|handle|if|sig|op|while|case|as|else|signature|andalso|struct|infixr|functor|in|structure|then|local|rec|end|fun|of|orelse|val|include|fn|with|exception|let|and|infix|sharing|datatype|type|abstype|withtype|eqtype|nonfix|raise|open)\b(?!\'))`, Text, Pop(1)}, + }, + "sname": { + Include("whitespace"), + Include("breakout"), + {`([a-zA-Z][\w']*)`, NameNamespace, nil}, + Default(Pop(1)), + }, + "fname": { + Include("whitespace"), + {`\'[\w\']*`, NameDecorator, nil}, + {`\(`, Punctuation, Push("tyvarseq")}, + {`([a-zA-Z][\w']*)`, NameFunction, Pop(1)}, + {"([!%&$#+\\-/:<=>?@\\\\~`^|*]+)", NameFunction, Pop(1)}, + Default(Pop(1)), + }, + "vname": { + Include("whitespace"), + {`\'[\w\']*`, NameDecorator, nil}, + {`\(`, Punctuation, Push("tyvarseq")}, + {"([a-zA-Z][\\w']*)(\\s*)(=(?![!%&$#+\\-/:<=>?@\\\\~`^|*]+))", ByGroups(NameVariable, Text, Punctuation), Pop(1)}, + {"([!%&$#+\\-/:<=>?@\\\\~`^|*]+)(\\s*)(=(?![!%&$#+\\-/:<=>?@\\\\~`^|*]+))", ByGroups(NameVariable, Text, Punctuation), Pop(1)}, + {`([a-zA-Z][\w']*)`, NameVariable, Pop(1)}, + {"([!%&$#+\\-/:<=>?@\\\\~`^|*]+)", NameVariable, Pop(1)}, + Default(Pop(1)), + }, + "tname": { + Include("whitespace"), + Include("breakout"), + {`\'[\w\']*`, NameDecorator, nil}, + {`\(`, Punctuation, Push("tyvarseq")}, + {"=(?![!%&$#+\\-/:<=>?@\\\\~`^|*]+)", Punctuation, Push("#pop", "typbind")}, + {`([a-zA-Z][\w']*)`, KeywordType, nil}, + {"([!%&$#+\\-/:<=>?@\\\\~`^|*]+)", KeywordType, nil}, + {`\S+`, Error, Pop(1)}, + }, + "typbind": { + Include("whitespace"), + {`\b(and)\b(?!\')`, KeywordReserved, Push("#pop", "tname")}, + Include("breakout"), + Include("core"), + {`\S+`, Error, Pop(1)}, + }, + "dname": { + Include("whitespace"), + Include("breakout"), + {`\'[\w\']*`, NameDecorator, nil}, + {`\(`, Punctuation, Push("tyvarseq")}, + {`(=)(\s*)(datatype)`, ByGroups(Punctuation, Text, KeywordReserved), Pop(1)}, + {"=(?![!%&$#+\\-/:<=>?@\\\\~`^|*]+)", Punctuation, Push("#pop", "datbind", "datcon")}, + {`([a-zA-Z][\w']*)`, KeywordType, nil}, + {"([!%&$#+\\-/:<=>?@\\\\~`^|*]+)", KeywordType, nil}, + {`\S+`, Error, Pop(1)}, + }, + "datbind": { + Include("whitespace"), + {`\b(and)\b(?!\')`, KeywordReserved, Push("#pop", "dname")}, + {`\b(withtype)\b(?!\')`, KeywordReserved, Push("#pop", "tname")}, + {`\b(of)\b(?!\')`, KeywordReserved, nil}, + {`(\|)(\s*)([a-zA-Z][\w']*)`, ByGroups(Punctuation, Text, NameClass), nil}, + {"(\\|)(\\s+)([!%&$#+\\-/:<=>?@\\\\~`^|*]+)", ByGroups(Punctuation, Text, NameClass), nil}, + Include("breakout"), + Include("core"), + {`\S+`, Error, nil}, + }, + "ename": { + Include("whitespace"), + {`(exception|and)\b(\s+)([a-zA-Z][\w']*)`, ByGroups(KeywordReserved, Text, NameClass), nil}, + {"(exception|and)\\b(\\s*)([!%&$#+\\-/:<=>?@\\\\~`^|*]+)", ByGroups(KeywordReserved, Text, NameClass), nil}, + {`\b(of)\b(?!\')`, KeywordReserved, nil}, + Include("breakout"), + Include("core"), + {`\S+`, Error, nil}, + }, + "datcon": { + Include("whitespace"), + {`([a-zA-Z][\w']*)`, NameClass, Pop(1)}, + {"([!%&$#+\\-/:<=>?@\\\\~`^|*]+)", NameClass, Pop(1)}, + {`\S+`, Error, Pop(1)}, + }, + "tyvarseq": { + {`\s`, Text, nil}, + {`\(\*`, CommentMultiline, Push("comment")}, + {`\'[\w\']*`, NameDecorator, nil}, + {`[a-zA-Z][\w']*`, Name, nil}, + {`,`, Punctuation, nil}, + {`\)`, Punctuation, Pop(1)}, + {"[!%&$#+\\-/:<=>?@\\\\~`^|*]+", Name, nil}, + }, + "comment": { + {`[^(*)]`, CommentMultiline, nil}, + {`\(\*`, CommentMultiline, Push()}, + {`\*\)`, CommentMultiline, Pop(1)}, + {`[(*)]`, CommentMultiline, nil}, + }, + } +} diff --git a/vendor/github.com/alecthomas/chroma/lexers/s/snobol.go b/vendor/github.com/alecthomas/chroma/lexers/s/snobol.go new file mode 100644 index 000000000..452903467 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/s/snobol.go @@ -0,0 +1,52 @@ +package s + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// Snobol lexer. +var Snobol = internal.Register(MustNewLazyLexer( + &Config{ + Name: "Snobol", + Aliases: []string{"snobol"}, + Filenames: []string{"*.snobol"}, + MimeTypes: []string{"text/x-snobol"}, + }, + snobolRules, +)) + +func snobolRules() Rules { + return Rules{ + "root": { + {`\*.*\n`, Comment, nil}, + {`[+.] `, Punctuation, Push("statement")}, + {`-.*\n`, Comment, nil}, + {`END\s*\n`, NameLabel, Push("heredoc")}, + {`[A-Za-z$][\w$]*`, NameLabel, Push("statement")}, + {`\s+`, Text, Push("statement")}, + }, + "statement": { + {`\s*\n`, Text, Pop(1)}, + {`\s+`, Text, nil}, + {`(?<=[^\w.])(LT|LE|EQ|NE|GE|GT|INTEGER|IDENT|DIFFER|LGT|SIZE|REPLACE|TRIM|DUPL|REMDR|DATE|TIME|EVAL|APPLY|OPSYN|LOAD|UNLOAD|LEN|SPAN|BREAK|ANY|NOTANY|TAB|RTAB|REM|POS|RPOS|FAIL|FENCE|ABORT|ARB|ARBNO|BAL|SUCCEED|INPUT|OUTPUT|TERMINAL)(?=[^\w.])`, NameBuiltin, nil}, + {`[A-Za-z][\w.]*`, Name, nil}, + {`\*\*|[?$.!%*/#+\-@|&\\=]`, Operator, nil}, + {`"[^"]*"`, LiteralString, nil}, + {`'[^']*'`, LiteralString, nil}, + {`[0-9]+(?=[^.EeDd])`, LiteralNumberInteger, nil}, + {`[0-9]+(\.[0-9]*)?([EDed][-+]?[0-9]+)?`, LiteralNumberFloat, nil}, + {`:`, Punctuation, Push("goto")}, + {`[()<>,;]`, Punctuation, nil}, + }, + "goto": { + {`\s*\n`, Text, Pop(2)}, + {`\s+`, Text, nil}, + {`F|S`, Keyword, nil}, + {`(\()([A-Za-z][\w.]*)(\))`, ByGroups(Punctuation, NameLabel, Punctuation), nil}, + }, + "heredoc": { + {`.*\n`, LiteralStringHeredoc, nil}, + }, + } +} diff --git a/vendor/github.com/alecthomas/chroma/lexers/s/solidity.go b/vendor/github.com/alecthomas/chroma/lexers/s/solidity.go new file mode 100644 index 000000000..62a3a40a1 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/s/solidity.go @@ -0,0 +1,118 @@ +package s + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// Solidity lexer. +var Solidity = internal.Register(MustNewLazyLexer( + &Config{ + Name: "Solidity", + Aliases: []string{"sol", "solidity"}, + Filenames: []string{"*.sol"}, + MimeTypes: []string{}, + DotAll: true, + }, + solidityRules, +)) + +func solidityRules() Rules { + return Rules{ + "assembly": { + Include("comments"), + Include("numbers"), + Include("strings"), + Include("whitespace"), + {`\{`, Punctuation, Push()}, + {`\}`, Punctuation, Pop(1)}, + {`[(),]`, Punctuation, nil}, + {`:=|=:`, Operator, nil}, + {`(let)(\s*)(\w*\b)`, ByGroups(OperatorWord, Text, NameVariable), nil}, + {`(\w*\b)(\:[^=])`, ByGroups(NameLabel, Punctuation), nil}, + {`(stop|add|mul|sub|div|sdiv|mod|smod|addmod|mulmod|exp|signextend|lt|gt|slt|sgt|eq|iszero|and|or|xor|not|byte|keccak256|sha3|address|balance|origin|caller|callvalue|calldataload|calldatasize|calldatacopy|codesize|codecopy|gasprice|extcodesize|extcodecopy|blockhash|coinbase|timestamp|number|difficulty|gaslimit|pop|mload|mstore|mstore8|sload|sstore|for|switch|jump|jumpi|pc|msize|gas|jumpdest|push1|push2|push32|dup1|dup2|dup16|swap1|swap2|swap16|log0|log1|log4|create|call|callcode|return|delegatecall|suicide|returndatasize|returndatacopy|staticcall|revert|invalid)\b`, NameFunction, nil}, + {`[a-zA-Z_]\w*`, Name, nil}, + }, + "comments": { + {`//([\w\W]*?\n)`, CommentSingle, nil}, + {`/[*][\w\W]*?[*]/`, CommentMultiline, nil}, + {`/[*][\w\W]*`, CommentMultiline, nil}, + }, + "keywords-other": { + {Words(``, `\b`, `for`, `in`, `while`, `do`, `break`, `return`, `returns`, `continue`, `if`, `else`, `try`, `catch`, `throw`, `_`, `new`, `delete`, `is`, `as`, `from`, `memory`, `storage`), Keyword, nil}, + {`assembly\b`, Keyword, Push("assembly")}, + {`(contract|interface|enum|event|struct)(\s+)([a-zA-Z_]\w*)`, ByGroups(KeywordDeclaration, Text, NameClass), nil}, + {`(function|modifier)(\s+)([a-zA-Z_]\w*)`, ByGroups(KeywordDeclaration, Text, NameFunction), nil}, + {Words(``, `\b`, `contract`, `interface`, `enum`, `event`, `constructor`, `function`, `library`, `mapping`, `modifier`, `struct`, `var`), KeywordDeclaration, nil}, + {Words(``, `\b`, `abstract`, `external`, `internal`, `private`, `public`), Keyword, nil}, + {Words(``, `\b`, `anonymous`, `constant`, `immutable`, `indexed`, `override`, `payable`, `pure`, `view`, `virtual`), Keyword, nil}, + {`(import|using)\b`, KeywordNamespace, nil}, + {`pragma (solidity|experimental)\b`, Keyword, nil}, + {Words(``, `\b`, `after`, `alias`, `apply`, `auto`, `case`, `copyof`, `default`, `define`, `final`, `implements`, `inline`, `let`, `macro`, `match`, `mutable`, `null`, `of`, `partial`, `promise`, `reference`, `relocatable`, `sealed`, `sizeof`, `static`, `supports`, `switch`, `typedef`, `typeof`, `unchecked`), KeywordReserved, nil}, + {`(true|false)\b`, KeywordConstant, nil}, + {`(wei|finney|szabo|ether)\b`, KeywordConstant, nil}, + {`(seconds|minutes|hours|days|weeks|years)\b`, KeywordConstant, nil}, + }, + "keywords-types": { + {Words(``, `\b`, `address`, `bool`, `byte`, `bytes`, `int`, `fixed`, `string`, `ufixed`, `uint`), KeywordType, nil}, + {Words(``, `\b`, `int8`, `int16`, `int24`, `int32`, `int40`, `int48`, `int56`, `int64`, `int72`, `int80`, `int88`, `int96`, `int104`, `int112`, `int120`, `int128`, `int136`, `int144`, `int152`, `int160`, `int168`, `int176`, `int184`, `int192`, `int200`, `int208`, `int216`, `int224`, `int232`, `int240`, `int248`, `int256`), KeywordType, nil}, + {Words(``, `\b`, `uint8`, `uint16`, `uint24`, `uint32`, `uint40`, `uint48`, `uint56`, `uint64`, `uint72`, `uint80`, `uint88`, `uint96`, `uint104`, `uint112`, `uint120`, `uint128`, `uint136`, `uint144`, `uint152`, `uint160`, `uint168`, `uint176`, `uint184`, `uint192`, `uint200`, `uint208`, `uint216`, `uint224`, `uint232`, `uint240`, `uint248`, `uint256`), KeywordType, nil}, + {Words(``, `\b`, `bytes1`, `bytes2`, `bytes3`, `bytes4`, `bytes5`, `bytes6`, `bytes7`, `bytes8`, `bytes9`, `bytes10`, `bytes11`, `bytes12`, `bytes13`, `bytes14`, `bytes15`, `bytes16`, `bytes17`, `bytes18`, `bytes19`, `bytes20`, `bytes21`, `bytes22`, `bytes23`, `bytes24`, `bytes25`, `bytes26`, `bytes27`, `bytes28`, `bytes29`, `bytes30`, `bytes31`, `bytes32`), KeywordType, nil}, + {Words(``, `\b`, `fixed8x0`, `fixed8x1`, `fixed8x2`, `fixed8x3`, `fixed8x4`, `fixed8x5`, `fixed8x6`, `fixed8x7`, `fixed8x8`, `fixed8x9`, `fixed8x10`, `fixed8x11`, `fixed8x12`, `fixed8x13`, `fixed8x14`, `fixed8x15`, `fixed8x16`, `fixed8x17`, `fixed8x18`, `fixed8x19`, `fixed8x20`, `fixed8x21`, `fixed8x22`, `fixed8x23`, `fixed8x24`, `fixed8x25`, `fixed8x26`, `fixed8x27`, `fixed8x28`, `fixed8x29`, `fixed8x30`, `fixed8x31`, `fixed8x32`, `fixed8x33`, `fixed8x34`, `fixed8x35`, `fixed8x36`, `fixed8x37`, `fixed8x38`, `fixed8x39`, `fixed8x40`, `fixed8x41`, `fixed8x42`, `fixed8x43`, `fixed8x44`, `fixed8x45`, `fixed8x46`, `fixed8x47`, `fixed8x48`, `fixed8x49`, `fixed8x50`, `fixed8x51`, `fixed8x52`, `fixed8x53`, `fixed8x54`, `fixed8x55`, `fixed8x56`, `fixed8x57`, `fixed8x58`, `fixed8x59`, `fixed8x60`, `fixed8x61`, `fixed8x62`, `fixed8x63`, `fixed8x64`, `fixed8x65`, `fixed8x66`, `fixed8x67`, `fixed8x68`, `fixed8x69`, `fixed8x70`, `fixed8x71`, `fixed8x72`, `fixed8x73`, `fixed8x74`, `fixed8x75`, `fixed8x76`, `fixed8x77`, `fixed8x78`, `fixed8x79`, `fixed8x80`, `fixed16x0`, `fixed16x1`, `fixed16x2`, `fixed16x3`, `fixed16x4`, `fixed16x5`, `fixed16x6`, `fixed16x7`, `fixed16x8`, `fixed16x9`, `fixed16x10`, `fixed16x11`, `fixed16x12`, `fixed16x13`, `fixed16x14`, `fixed16x15`, `fixed16x16`, `fixed16x17`, `fixed16x18`, `fixed16x19`, `fixed16x20`, `fixed16x21`, `fixed16x22`, `fixed16x23`, `fixed16x24`, `fixed16x25`, `fixed16x26`, `fixed16x27`, `fixed16x28`, `fixed16x29`, `fixed16x30`, `fixed16x31`, `fixed16x32`, `fixed16x33`, `fixed16x34`, `fixed16x35`, `fixed16x36`, `fixed16x37`, `fixed16x38`, `fixed16x39`, `fixed16x40`, `fixed16x41`, `fixed16x42`, `fixed16x43`, `fixed16x44`, `fixed16x45`, `fixed16x46`, `fixed16x47`, `fixed16x48`, `fixed16x49`, `fixed16x50`, `fixed16x51`, `fixed16x52`, `fixed16x53`, `fixed16x54`, `fixed16x55`, `fixed16x56`, `fixed16x57`, `fixed16x58`, `fixed16x59`, `fixed16x60`, `fixed16x61`, `fixed16x62`, `fixed16x63`, `fixed16x64`, `fixed16x65`, `fixed16x66`, `fixed16x67`, `fixed16x68`, `fixed16x69`, `fixed16x70`, `fixed16x71`, `fixed16x72`, `fixed16x73`, `fixed16x74`, `fixed16x75`, `fixed16x76`, `fixed16x77`, `fixed16x78`, `fixed16x79`, `fixed16x80`, `fixed24x0`, `fixed24x1`, `fixed24x2`, `fixed24x3`, `fixed24x4`, `fixed24x5`, `fixed24x6`, `fixed24x7`, `fixed24x8`, `fixed24x9`, `fixed24x10`, `fixed24x11`, `fixed24x12`, `fixed24x13`, `fixed24x14`, `fixed24x15`, `fixed24x16`, `fixed24x17`, `fixed24x18`, `fixed24x19`, `fixed24x20`, `fixed24x21`, `fixed24x22`, `fixed24x23`, `fixed24x24`, `fixed24x25`, `fixed24x26`, `fixed24x27`, `fixed24x28`, `fixed24x29`, `fixed24x30`, `fixed24x31`, `fixed24x32`, `fixed24x33`, `fixed24x34`, `fixed24x35`, `fixed24x36`, `fixed24x37`, `fixed24x38`, `fixed24x39`, `fixed24x40`, `fixed24x41`, `fixed24x42`, `fixed24x43`, `fixed24x44`, `fixed24x45`, `fixed24x46`, `fixed24x47`, `fixed24x48`, `fixed24x49`, `fixed24x50`, `fixed24x51`, `fixed24x52`, `fixed24x53`, `fixed24x54`, `fixed24x55`, `fixed24x56`, `fixed24x57`, `fixed24x58`, `fixed24x59`, `fixed24x60`, `fixed24x61`, `fixed24x62`, `fixed24x63`, `fixed24x64`, `fixed24x65`, `fixed24x66`, `fixed24x67`, `fixed24x68`, `fixed24x69`, `fixed24x70`, `fixed24x71`, `fixed24x72`, `fixed24x73`, `fixed24x74`, `fixed24x75`, `fixed24x76`, `fixed24x77`, `fixed24x78`, `fixed24x79`, `fixed24x80`, `fixed32x0`, `fixed32x1`, `fixed32x2`, `fixed32x3`, `fixed32x4`, `fixed32x5`, `fixed32x6`, `fixed32x7`, `fixed32x8`, `fixed32x9`, `fixed32x10`, `fixed32x11`, `fixed32x12`, `fixed32x13`, `fixed32x14`, `fixed32x15`, `fixed32x16`, `fixed32x17`, `fixed32x18`, `fixed32x19`, `fixed32x20`, `fixed32x21`, `fixed32x22`, `fixed32x23`, `fixed32x24`, `fixed32x25`, `fixed32x26`, `fixed32x27`, `fixed32x28`, `fixed32x29`, `fixed32x30`, `fixed32x31`, `fixed32x32`, `fixed32x33`, `fixed32x34`, `fixed32x35`, `fixed32x36`, `fixed32x37`, `fixed32x38`, `fixed32x39`, `fixed32x40`, `fixed32x41`, `fixed32x42`, `fixed32x43`, `fixed32x44`, `fixed32x45`, `fixed32x46`, `fixed32x47`, `fixed32x48`, `fixed32x49`, `fixed32x50`, `fixed32x51`, `fixed32x52`, `fixed32x53`, `fixed32x54`, `fixed32x55`, `fixed32x56`, `fixed32x57`, `fixed32x58`, `fixed32x59`, `fixed32x60`, `fixed32x61`, `fixed32x62`, `fixed32x63`, `fixed32x64`, `fixed32x65`, `fixed32x66`, `fixed32x67`, `fixed32x68`, `fixed32x69`, `fixed32x70`, `fixed32x71`, `fixed32x72`, `fixed32x73`, `fixed32x74`, `fixed32x75`, `fixed32x76`, `fixed32x77`, `fixed32x78`, `fixed32x79`, `fixed32x80`, `fixed40x0`, `fixed40x1`, `fixed40x2`, `fixed40x3`, `fixed40x4`, `fixed40x5`, `fixed40x6`, `fixed40x7`, `fixed40x8`, `fixed40x9`, `fixed40x10`, `fixed40x11`, `fixed40x12`, `fixed40x13`, `fixed40x14`, `fixed40x15`, `fixed40x16`, `fixed40x17`, `fixed40x18`, `fixed40x19`, `fixed40x20`, `fixed40x21`, `fixed40x22`, `fixed40x23`, `fixed40x24`, `fixed40x25`, `fixed40x26`, `fixed40x27`, `fixed40x28`, `fixed40x29`, `fixed40x30`, `fixed40x31`, `fixed40x32`, `fixed40x33`, `fixed40x34`, `fixed40x35`, `fixed40x36`, `fixed40x37`, `fixed40x38`, `fixed40x39`, `fixed40x40`, `fixed40x41`, `fixed40x42`, `fixed40x43`, `fixed40x44`, `fixed40x45`, `fixed40x46`, `fixed40x47`, `fixed40x48`, `fixed40x49`, `fixed40x50`, `fixed40x51`, `fixed40x52`, `fixed40x53`, `fixed40x54`, `fixed40x55`, `fixed40x56`, `fixed40x57`, `fixed40x58`, `fixed40x59`, `fixed40x60`, `fixed40x61`, `fixed40x62`, `fixed40x63`, `fixed40x64`, `fixed40x65`, `fixed40x66`, `fixed40x67`, `fixed40x68`, `fixed40x69`, `fixed40x70`, `fixed40x71`, `fixed40x72`, `fixed40x73`, `fixed40x74`, `fixed40x75`, `fixed40x76`, `fixed40x77`, `fixed40x78`, `fixed40x79`, `fixed40x80`, `fixed48x0`, `fixed48x1`, `fixed48x2`, `fixed48x3`, `fixed48x4`, `fixed48x5`, `fixed48x6`, `fixed48x7`, `fixed48x8`, `fixed48x9`, `fixed48x10`, `fixed48x11`, `fixed48x12`, `fixed48x13`, `fixed48x14`, `fixed48x15`, `fixed48x16`, `fixed48x17`, `fixed48x18`, `fixed48x19`, `fixed48x20`, `fixed48x21`, `fixed48x22`, `fixed48x23`, `fixed48x24`, `fixed48x25`, `fixed48x26`, `fixed48x27`, `fixed48x28`, `fixed48x29`, `fixed48x30`, `fixed48x31`, `fixed48x32`, `fixed48x33`, `fixed48x34`, `fixed48x35`, `fixed48x36`, `fixed48x37`, `fixed48x38`, `fixed48x39`, `fixed48x40`, `fixed48x41`, `fixed48x42`, `fixed48x43`, `fixed48x44`, `fixed48x45`, `fixed48x46`, `fixed48x47`, `fixed48x48`, `fixed48x49`, `fixed48x50`, `fixed48x51`, `fixed48x52`, `fixed48x53`, `fixed48x54`, `fixed48x55`, `fixed48x56`, `fixed48x57`, `fixed48x58`, `fixed48x59`, `fixed48x60`, `fixed48x61`, `fixed48x62`, `fixed48x63`, `fixed48x64`, `fixed48x65`, `fixed48x66`, `fixed48x67`, `fixed48x68`, `fixed48x69`, `fixed48x70`, `fixed48x71`, `fixed48x72`, `fixed48x73`, `fixed48x74`, `fixed48x75`, `fixed48x76`, `fixed48x77`, `fixed48x78`, `fixed48x79`, `fixed48x80`, `fixed56x0`, `fixed56x1`, `fixed56x2`, `fixed56x3`, `fixed56x4`, `fixed56x5`, `fixed56x6`, `fixed56x7`, `fixed56x8`, `fixed56x9`, `fixed56x10`, `fixed56x11`, `fixed56x12`, `fixed56x13`, `fixed56x14`, `fixed56x15`, `fixed56x16`, `fixed56x17`, `fixed56x18`, `fixed56x19`, `fixed56x20`, `fixed56x21`, `fixed56x22`, `fixed56x23`, `fixed56x24`, `fixed56x25`, `fixed56x26`, `fixed56x27`, `fixed56x28`, `fixed56x29`, `fixed56x30`, `fixed56x31`, `fixed56x32`, `fixed56x33`, `fixed56x34`, `fixed56x35`, `fixed56x36`, `fixed56x37`, `fixed56x38`, `fixed56x39`, `fixed56x40`, `fixed56x41`, `fixed56x42`, `fixed56x43`, `fixed56x44`, `fixed56x45`, `fixed56x46`, `fixed56x47`, `fixed56x48`, `fixed56x49`, `fixed56x50`, `fixed56x51`, `fixed56x52`, `fixed56x53`, `fixed56x54`, `fixed56x55`, `fixed56x56`, `fixed56x57`, `fixed56x58`, `fixed56x59`, `fixed56x60`, `fixed56x61`, `fixed56x62`, `fixed56x63`, `fixed56x64`, `fixed56x65`, `fixed56x66`, `fixed56x67`, `fixed56x68`, `fixed56x69`, `fixed56x70`, `fixed56x71`, `fixed56x72`, `fixed56x73`, `fixed56x74`, `fixed56x75`, `fixed56x76`, `fixed56x77`, `fixed56x78`, `fixed56x79`, `fixed56x80`, `fixed64x0`, `fixed64x1`, `fixed64x2`, `fixed64x3`, `fixed64x4`, `fixed64x5`, `fixed64x6`, `fixed64x7`, `fixed64x8`, `fixed64x9`, `fixed64x10`, `fixed64x11`, `fixed64x12`, `fixed64x13`, `fixed64x14`, `fixed64x15`, `fixed64x16`, `fixed64x17`, `fixed64x18`, `fixed64x19`, `fixed64x20`, `fixed64x21`, `fixed64x22`, `fixed64x23`, `fixed64x24`, `fixed64x25`, `fixed64x26`, `fixed64x27`, `fixed64x28`, `fixed64x29`, `fixed64x30`, `fixed64x31`, `fixed64x32`, `fixed64x33`, `fixed64x34`, `fixed64x35`, `fixed64x36`, `fixed64x37`, `fixed64x38`, `fixed64x39`, `fixed64x40`, `fixed64x41`, `fixed64x42`, `fixed64x43`, `fixed64x44`, `fixed64x45`, `fixed64x46`, `fixed64x47`, `fixed64x48`, `fixed64x49`, `fixed64x50`, `fixed64x51`, `fixed64x52`, `fixed64x53`, `fixed64x54`, `fixed64x55`, `fixed64x56`, `fixed64x57`, `fixed64x58`, `fixed64x59`, `fixed64x60`, `fixed64x61`, `fixed64x62`, `fixed64x63`, `fixed64x64`, `fixed64x65`, `fixed64x66`, `fixed64x67`, `fixed64x68`, `fixed64x69`, `fixed64x70`, `fixed64x71`, `fixed64x72`, `fixed64x73`, `fixed64x74`, `fixed64x75`, `fixed64x76`, `fixed64x77`, `fixed64x78`, `fixed64x79`, `fixed64x80`, `fixed72x0`, `fixed72x1`, `fixed72x2`, `fixed72x3`, `fixed72x4`, `fixed72x5`, `fixed72x6`, `fixed72x7`, `fixed72x8`, `fixed72x9`, `fixed72x10`, `fixed72x11`, `fixed72x12`, `fixed72x13`, `fixed72x14`, `fixed72x15`, `fixed72x16`, `fixed72x17`, `fixed72x18`, `fixed72x19`, `fixed72x20`, `fixed72x21`, `fixed72x22`, `fixed72x23`, `fixed72x24`, `fixed72x25`, `fixed72x26`, `fixed72x27`, `fixed72x28`, `fixed72x29`, `fixed72x30`, `fixed72x31`, `fixed72x32`, `fixed72x33`, `fixed72x34`, `fixed72x35`, `fixed72x36`, `fixed72x37`, `fixed72x38`, `fixed72x39`, `fixed72x40`, `fixed72x41`, `fixed72x42`, `fixed72x43`, `fixed72x44`, `fixed72x45`, `fixed72x46`, `fixed72x47`, `fixed72x48`, `fixed72x49`, `fixed72x50`, `fixed72x51`, `fixed72x52`, `fixed72x53`, `fixed72x54`, `fixed72x55`, `fixed72x56`, `fixed72x57`, `fixed72x58`, `fixed72x59`, `fixed72x60`, `fixed72x61`, `fixed72x62`, `fixed72x63`, `fixed72x64`, `fixed72x65`, `fixed72x66`, `fixed72x67`, `fixed72x68`, `fixed72x69`, `fixed72x70`, `fixed72x71`, `fixed72x72`, `fixed72x73`, `fixed72x74`, `fixed72x75`, `fixed72x76`, `fixed72x77`, `fixed72x78`, `fixed72x79`, `fixed72x80`, `fixed80x0`, `fixed80x1`, `fixed80x2`, `fixed80x3`, `fixed80x4`, `fixed80x5`, `fixed80x6`, `fixed80x7`, `fixed80x8`, `fixed80x9`, `fixed80x10`, `fixed80x11`, `fixed80x12`, `fixed80x13`, `fixed80x14`, `fixed80x15`, `fixed80x16`, `fixed80x17`, `fixed80x18`, `fixed80x19`, `fixed80x20`, `fixed80x21`, `fixed80x22`, `fixed80x23`, `fixed80x24`, `fixed80x25`, `fixed80x26`, `fixed80x27`, `fixed80x28`, `fixed80x29`, `fixed80x30`, `fixed80x31`, `fixed80x32`, `fixed80x33`, `fixed80x34`, `fixed80x35`, `fixed80x36`, `fixed80x37`, `fixed80x38`, `fixed80x39`, `fixed80x40`, `fixed80x41`, `fixed80x42`, `fixed80x43`, `fixed80x44`, `fixed80x45`, `fixed80x46`, `fixed80x47`, `fixed80x48`, `fixed80x49`, `fixed80x50`, `fixed80x51`, `fixed80x52`, `fixed80x53`, `fixed80x54`, `fixed80x55`, `fixed80x56`, `fixed80x57`, `fixed80x58`, `fixed80x59`, `fixed80x60`, `fixed80x61`, `fixed80x62`, `fixed80x63`, `fixed80x64`, `fixed80x65`, `fixed80x66`, `fixed80x67`, `fixed80x68`, `fixed80x69`, `fixed80x70`, `fixed80x71`, `fixed80x72`, `fixed80x73`, `fixed80x74`, `fixed80x75`, `fixed80x76`, `fixed80x77`, `fixed80x78`, `fixed80x79`, `fixed80x80`, `fixed88x0`, `fixed88x1`, `fixed88x2`, `fixed88x3`, `fixed88x4`, `fixed88x5`, `fixed88x6`, `fixed88x7`, `fixed88x8`, `fixed88x9`, `fixed88x10`, `fixed88x11`, `fixed88x12`, `fixed88x13`, `fixed88x14`, `fixed88x15`, `fixed88x16`, `fixed88x17`, `fixed88x18`, `fixed88x19`, `fixed88x20`, `fixed88x21`, `fixed88x22`, `fixed88x23`, `fixed88x24`, `fixed88x25`, `fixed88x26`, `fixed88x27`, `fixed88x28`, `fixed88x29`, `fixed88x30`, `fixed88x31`, `fixed88x32`, `fixed88x33`, `fixed88x34`, `fixed88x35`, `fixed88x36`, `fixed88x37`, `fixed88x38`, `fixed88x39`, `fixed88x40`, `fixed88x41`, `fixed88x42`, `fixed88x43`, `fixed88x44`, `fixed88x45`, `fixed88x46`, `fixed88x47`, `fixed88x48`, `fixed88x49`, `fixed88x50`, `fixed88x51`, `fixed88x52`, `fixed88x53`, `fixed88x54`, `fixed88x55`, `fixed88x56`, `fixed88x57`, `fixed88x58`, `fixed88x59`, `fixed88x60`, `fixed88x61`, `fixed88x62`, `fixed88x63`, `fixed88x64`, `fixed88x65`, `fixed88x66`, `fixed88x67`, `fixed88x68`, `fixed88x69`, `fixed88x70`, `fixed88x71`, `fixed88x72`, `fixed88x73`, `fixed88x74`, `fixed88x75`, `fixed88x76`, `fixed88x77`, `fixed88x78`, `fixed88x79`, `fixed88x80`, `fixed96x0`, `fixed96x1`, `fixed96x2`, `fixed96x3`, `fixed96x4`, `fixed96x5`, `fixed96x6`, `fixed96x7`, `fixed96x8`, `fixed96x9`, `fixed96x10`, `fixed96x11`, `fixed96x12`, `fixed96x13`, `fixed96x14`, `fixed96x15`, `fixed96x16`, `fixed96x17`, `fixed96x18`, `fixed96x19`, `fixed96x20`, `fixed96x21`, `fixed96x22`, `fixed96x23`, `fixed96x24`, `fixed96x25`, `fixed96x26`, `fixed96x27`, `fixed96x28`, `fixed96x29`, `fixed96x30`, `fixed96x31`, `fixed96x32`, `fixed96x33`, `fixed96x34`, `fixed96x35`, `fixed96x36`, `fixed96x37`, `fixed96x38`, `fixed96x39`, `fixed96x40`, `fixed96x41`, `fixed96x42`, `fixed96x43`, `fixed96x44`, `fixed96x45`, `fixed96x46`, `fixed96x47`, `fixed96x48`, `fixed96x49`, `fixed96x50`, `fixed96x51`, `fixed96x52`, `fixed96x53`, `fixed96x54`, `fixed96x55`, `fixed96x56`, `fixed96x57`, `fixed96x58`, `fixed96x59`, `fixed96x60`, `fixed96x61`, `fixed96x62`, `fixed96x63`, `fixed96x64`, `fixed96x65`, `fixed96x66`, `fixed96x67`, `fixed96x68`, `fixed96x69`, `fixed96x70`, `fixed96x71`, `fixed96x72`, `fixed96x73`, `fixed96x74`, `fixed96x75`, `fixed96x76`, `fixed96x77`, `fixed96x78`, `fixed96x79`, `fixed96x80`, `fixed104x0`, `fixed104x1`, `fixed104x2`, `fixed104x3`, `fixed104x4`, `fixed104x5`, `fixed104x6`, `fixed104x7`, `fixed104x8`, `fixed104x9`, `fixed104x10`, `fixed104x11`, `fixed104x12`, `fixed104x13`, `fixed104x14`, `fixed104x15`, `fixed104x16`, `fixed104x17`, `fixed104x18`, `fixed104x19`, `fixed104x20`, `fixed104x21`, `fixed104x22`, `fixed104x23`, `fixed104x24`, `fixed104x25`, `fixed104x26`, `fixed104x27`, `fixed104x28`, `fixed104x29`, `fixed104x30`, `fixed104x31`, `fixed104x32`, `fixed104x33`, `fixed104x34`, `fixed104x35`, `fixed104x36`, `fixed104x37`, `fixed104x38`, `fixed104x39`, `fixed104x40`, `fixed104x41`, `fixed104x42`, `fixed104x43`, `fixed104x44`, `fixed104x45`, `fixed104x46`, `fixed104x47`, `fixed104x48`, `fixed104x49`, `fixed104x50`, `fixed104x51`, `fixed104x52`, `fixed104x53`, `fixed104x54`, `fixed104x55`, `fixed104x56`, `fixed104x57`, `fixed104x58`, `fixed104x59`, `fixed104x60`, `fixed104x61`, `fixed104x62`, `fixed104x63`, `fixed104x64`, `fixed104x65`, `fixed104x66`, `fixed104x67`, `fixed104x68`, `fixed104x69`, `fixed104x70`, `fixed104x71`, `fixed104x72`, `fixed104x73`, `fixed104x74`, `fixed104x75`, `fixed104x76`, `fixed104x77`, `fixed104x78`, `fixed104x79`, `fixed104x80`, `fixed112x0`, `fixed112x1`, `fixed112x2`, `fixed112x3`, `fixed112x4`, `fixed112x5`, `fixed112x6`, `fixed112x7`, `fixed112x8`, `fixed112x9`, `fixed112x10`, `fixed112x11`, `fixed112x12`, `fixed112x13`, `fixed112x14`, `fixed112x15`, `fixed112x16`, `fixed112x17`, `fixed112x18`, `fixed112x19`, `fixed112x20`, `fixed112x21`, `fixed112x22`, `fixed112x23`, `fixed112x24`, `fixed112x25`, `fixed112x26`, `fixed112x27`, `fixed112x28`, `fixed112x29`, `fixed112x30`, `fixed112x31`, `fixed112x32`, `fixed112x33`, `fixed112x34`, `fixed112x35`, `fixed112x36`, `fixed112x37`, `fixed112x38`, `fixed112x39`, `fixed112x40`, `fixed112x41`, `fixed112x42`, `fixed112x43`, `fixed112x44`, `fixed112x45`, `fixed112x46`, `fixed112x47`, `fixed112x48`, `fixed112x49`, `fixed112x50`, `fixed112x51`, `fixed112x52`, `fixed112x53`, `fixed112x54`, `fixed112x55`, `fixed112x56`, `fixed112x57`, `fixed112x58`, `fixed112x59`, `fixed112x60`, `fixed112x61`, `fixed112x62`, `fixed112x63`, `fixed112x64`, `fixed112x65`, `fixed112x66`, `fixed112x67`, `fixed112x68`, `fixed112x69`, `fixed112x70`, `fixed112x71`, `fixed112x72`, `fixed112x73`, `fixed112x74`, `fixed112x75`, `fixed112x76`, `fixed112x77`, `fixed112x78`, `fixed112x79`, `fixed112x80`, `fixed120x0`, `fixed120x1`, `fixed120x2`, `fixed120x3`, `fixed120x4`, `fixed120x5`, `fixed120x6`, `fixed120x7`, `fixed120x8`, `fixed120x9`, `fixed120x10`, `fixed120x11`, `fixed120x12`, `fixed120x13`, `fixed120x14`, `fixed120x15`, `fixed120x16`, `fixed120x17`, `fixed120x18`, `fixed120x19`, `fixed120x20`, `fixed120x21`, `fixed120x22`, `fixed120x23`, `fixed120x24`, `fixed120x25`, `fixed120x26`, `fixed120x27`, `fixed120x28`, `fixed120x29`, `fixed120x30`, `fixed120x31`, `fixed120x32`, `fixed120x33`, `fixed120x34`, `fixed120x35`, `fixed120x36`, `fixed120x37`, `fixed120x38`, `fixed120x39`, `fixed120x40`, `fixed120x41`, `fixed120x42`, `fixed120x43`, `fixed120x44`, `fixed120x45`, `fixed120x46`, `fixed120x47`, `fixed120x48`, `fixed120x49`, `fixed120x50`, `fixed120x51`, `fixed120x52`, `fixed120x53`, `fixed120x54`, `fixed120x55`, `fixed120x56`, `fixed120x57`, `fixed120x58`, `fixed120x59`, `fixed120x60`, `fixed120x61`, `fixed120x62`, `fixed120x63`, `fixed120x64`, `fixed120x65`, `fixed120x66`, `fixed120x67`, `fixed120x68`, `fixed120x69`, `fixed120x70`, `fixed120x71`, `fixed120x72`, `fixed120x73`, `fixed120x74`, `fixed120x75`, `fixed120x76`, `fixed120x77`, `fixed120x78`, `fixed120x79`, `fixed120x80`, `fixed128x0`, `fixed128x1`, `fixed128x2`, `fixed128x3`, `fixed128x4`, `fixed128x5`, `fixed128x6`, `fixed128x7`, `fixed128x8`, `fixed128x9`, `fixed128x10`, `fixed128x11`, `fixed128x12`, `fixed128x13`, `fixed128x14`, `fixed128x15`, `fixed128x16`, `fixed128x17`, `fixed128x18`, `fixed128x19`, `fixed128x20`, `fixed128x21`, `fixed128x22`, `fixed128x23`, `fixed128x24`, `fixed128x25`, `fixed128x26`, `fixed128x27`, `fixed128x28`, `fixed128x29`, `fixed128x30`, `fixed128x31`, `fixed128x32`, `fixed128x33`, `fixed128x34`, `fixed128x35`, `fixed128x36`, `fixed128x37`, `fixed128x38`, `fixed128x39`, `fixed128x40`, `fixed128x41`, `fixed128x42`, `fixed128x43`, `fixed128x44`, `fixed128x45`, `fixed128x46`, `fixed128x47`, `fixed128x48`, `fixed128x49`, `fixed128x50`, `fixed128x51`, `fixed128x52`, `fixed128x53`, `fixed128x54`, `fixed128x55`, `fixed128x56`, `fixed128x57`, `fixed128x58`, `fixed128x59`, `fixed128x60`, `fixed128x61`, `fixed128x62`, `fixed128x63`, `fixed128x64`, `fixed128x65`, `fixed128x66`, `fixed128x67`, `fixed128x68`, `fixed128x69`, `fixed128x70`, `fixed128x71`, `fixed128x72`, `fixed128x73`, `fixed128x74`, `fixed128x75`, `fixed128x76`, `fixed128x77`, `fixed128x78`, `fixed128x79`, `fixed128x80`, `fixed136x0`, `fixed136x1`, `fixed136x2`, `fixed136x3`, `fixed136x4`, `fixed136x5`, `fixed136x6`, `fixed136x7`, `fixed136x8`, `fixed136x9`, `fixed136x10`, `fixed136x11`, `fixed136x12`, `fixed136x13`, `fixed136x14`, `fixed136x15`, `fixed136x16`, `fixed136x17`, `fixed136x18`, `fixed136x19`, `fixed136x20`, `fixed136x21`, `fixed136x22`, `fixed136x23`, `fixed136x24`, `fixed136x25`, `fixed136x26`, `fixed136x27`, `fixed136x28`, `fixed136x29`, `fixed136x30`, `fixed136x31`, `fixed136x32`, `fixed136x33`, `fixed136x34`, `fixed136x35`, `fixed136x36`, `fixed136x37`, `fixed136x38`, `fixed136x39`, `fixed136x40`, `fixed136x41`, `fixed136x42`, `fixed136x43`, `fixed136x44`, `fixed136x45`, `fixed136x46`, `fixed136x47`, `fixed136x48`, `fixed136x49`, `fixed136x50`, `fixed136x51`, `fixed136x52`, `fixed136x53`, `fixed136x54`, `fixed136x55`, `fixed136x56`, `fixed136x57`, `fixed136x58`, `fixed136x59`, `fixed136x60`, `fixed136x61`, `fixed136x62`, `fixed136x63`, `fixed136x64`, `fixed136x65`, `fixed136x66`, `fixed136x67`, `fixed136x68`, `fixed136x69`, `fixed136x70`, `fixed136x71`, `fixed136x72`, `fixed136x73`, `fixed136x74`, `fixed136x75`, `fixed136x76`, `fixed136x77`, `fixed136x78`, `fixed136x79`, `fixed136x80`, `fixed144x0`, `fixed144x1`, `fixed144x2`, `fixed144x3`, `fixed144x4`, `fixed144x5`, `fixed144x6`, `fixed144x7`, `fixed144x8`, `fixed144x9`, `fixed144x10`, `fixed144x11`, `fixed144x12`, `fixed144x13`, `fixed144x14`, `fixed144x15`, `fixed144x16`, `fixed144x17`, `fixed144x18`, `fixed144x19`, `fixed144x20`, `fixed144x21`, `fixed144x22`, `fixed144x23`, `fixed144x24`, `fixed144x25`, `fixed144x26`, `fixed144x27`, `fixed144x28`, `fixed144x29`, `fixed144x30`, `fixed144x31`, `fixed144x32`, `fixed144x33`, `fixed144x34`, `fixed144x35`, `fixed144x36`, `fixed144x37`, `fixed144x38`, `fixed144x39`, `fixed144x40`, `fixed144x41`, `fixed144x42`, `fixed144x43`, `fixed144x44`, `fixed144x45`, `fixed144x46`, `fixed144x47`, `fixed144x48`, `fixed144x49`, `fixed144x50`, `fixed144x51`, `fixed144x52`, `fixed144x53`, `fixed144x54`, `fixed144x55`, `fixed144x56`, `fixed144x57`, `fixed144x58`, `fixed144x59`, `fixed144x60`, `fixed144x61`, `fixed144x62`, `fixed144x63`, `fixed144x64`, `fixed144x65`, `fixed144x66`, `fixed144x67`, `fixed144x68`, `fixed144x69`, `fixed144x70`, `fixed144x71`, `fixed144x72`, `fixed144x73`, `fixed144x74`, `fixed144x75`, `fixed144x76`, `fixed144x77`, `fixed144x78`, `fixed144x79`, `fixed144x80`, `fixed152x0`, `fixed152x1`, `fixed152x2`, `fixed152x3`, `fixed152x4`, `fixed152x5`, `fixed152x6`, `fixed152x7`, `fixed152x8`, `fixed152x9`, `fixed152x10`, `fixed152x11`, `fixed152x12`, `fixed152x13`, `fixed152x14`, `fixed152x15`, `fixed152x16`, `fixed152x17`, `fixed152x18`, `fixed152x19`, `fixed152x20`, `fixed152x21`, `fixed152x22`, `fixed152x23`, `fixed152x24`, `fixed152x25`, `fixed152x26`, `fixed152x27`, `fixed152x28`, `fixed152x29`, `fixed152x30`, `fixed152x31`, `fixed152x32`, `fixed152x33`, `fixed152x34`, `fixed152x35`, `fixed152x36`, `fixed152x37`, `fixed152x38`, `fixed152x39`, `fixed152x40`, `fixed152x41`, `fixed152x42`, `fixed152x43`, `fixed152x44`, `fixed152x45`, `fixed152x46`, `fixed152x47`, `fixed152x48`, `fixed152x49`, `fixed152x50`, `fixed152x51`, `fixed152x52`, `fixed152x53`, `fixed152x54`, `fixed152x55`, `fixed152x56`, `fixed152x57`, `fixed152x58`, `fixed152x59`, `fixed152x60`, `fixed152x61`, `fixed152x62`, `fixed152x63`, `fixed152x64`, `fixed152x65`, `fixed152x66`, `fixed152x67`, `fixed152x68`, `fixed152x69`, `fixed152x70`, `fixed152x71`, `fixed152x72`, `fixed152x73`, `fixed152x74`, `fixed152x75`, `fixed152x76`, `fixed152x77`, `fixed152x78`, `fixed152x79`, `fixed152x80`, `fixed160x0`, `fixed160x1`, `fixed160x2`, `fixed160x3`, `fixed160x4`, `fixed160x5`, `fixed160x6`, `fixed160x7`, `fixed160x8`, `fixed160x9`, `fixed160x10`, `fixed160x11`, `fixed160x12`, `fixed160x13`, `fixed160x14`, `fixed160x15`, `fixed160x16`, `fixed160x17`, `fixed160x18`, `fixed160x19`, `fixed160x20`, `fixed160x21`, `fixed160x22`, `fixed160x23`, `fixed160x24`, `fixed160x25`, `fixed160x26`, `fixed160x27`, `fixed160x28`, `fixed160x29`, `fixed160x30`, `fixed160x31`, `fixed160x32`, `fixed160x33`, `fixed160x34`, `fixed160x35`, `fixed160x36`, `fixed160x37`, `fixed160x38`, `fixed160x39`, `fixed160x40`, `fixed160x41`, `fixed160x42`, `fixed160x43`, `fixed160x44`, `fixed160x45`, `fixed160x46`, `fixed160x47`, `fixed160x48`, `fixed160x49`, `fixed160x50`, `fixed160x51`, `fixed160x52`, `fixed160x53`, `fixed160x54`, `fixed160x55`, `fixed160x56`, `fixed160x57`, `fixed160x58`, `fixed160x59`, `fixed160x60`, `fixed160x61`, `fixed160x62`, `fixed160x63`, `fixed160x64`, `fixed160x65`, `fixed160x66`, `fixed160x67`, `fixed160x68`, `fixed160x69`, `fixed160x70`, `fixed160x71`, `fixed160x72`, `fixed160x73`, `fixed160x74`, `fixed160x75`, `fixed160x76`, `fixed160x77`, `fixed160x78`, `fixed160x79`, `fixed160x80`, `fixed168x0`, `fixed168x1`, `fixed168x2`, `fixed168x3`, `fixed168x4`, `fixed168x5`, `fixed168x6`, `fixed168x7`, `fixed168x8`, `fixed168x9`, `fixed168x10`, `fixed168x11`, `fixed168x12`, `fixed168x13`, `fixed168x14`, `fixed168x15`, `fixed168x16`, `fixed168x17`, `fixed168x18`, `fixed168x19`, `fixed168x20`, `fixed168x21`, `fixed168x22`, `fixed168x23`, `fixed168x24`, `fixed168x25`, `fixed168x26`, `fixed168x27`, `fixed168x28`, `fixed168x29`, `fixed168x30`, `fixed168x31`, `fixed168x32`, `fixed168x33`, `fixed168x34`, `fixed168x35`, `fixed168x36`, `fixed168x37`, `fixed168x38`, `fixed168x39`, `fixed168x40`, `fixed168x41`, `fixed168x42`, `fixed168x43`, `fixed168x44`, `fixed168x45`, `fixed168x46`, `fixed168x47`, `fixed168x48`, `fixed168x49`, `fixed168x50`, `fixed168x51`, `fixed168x52`, `fixed168x53`, `fixed168x54`, `fixed168x55`, `fixed168x56`, `fixed168x57`, `fixed168x58`, `fixed168x59`, `fixed168x60`, `fixed168x61`, `fixed168x62`, `fixed168x63`, `fixed168x64`, `fixed168x65`, `fixed168x66`, `fixed168x67`, `fixed168x68`, `fixed168x69`, `fixed168x70`, `fixed168x71`, `fixed168x72`, `fixed168x73`, `fixed168x74`, `fixed168x75`, `fixed168x76`, `fixed168x77`, `fixed168x78`, `fixed168x79`, `fixed168x80`, `fixed176x0`, `fixed176x1`, `fixed176x2`, `fixed176x3`, `fixed176x4`, `fixed176x5`, `fixed176x6`, `fixed176x7`, `fixed176x8`, `fixed176x9`, `fixed176x10`, `fixed176x11`, `fixed176x12`, `fixed176x13`, `fixed176x14`, `fixed176x15`, `fixed176x16`, `fixed176x17`, `fixed176x18`, `fixed176x19`, `fixed176x20`, `fixed176x21`, `fixed176x22`, `fixed176x23`, `fixed176x24`, `fixed176x25`, `fixed176x26`, `fixed176x27`, `fixed176x28`, `fixed176x29`, `fixed176x30`, `fixed176x31`, `fixed176x32`, `fixed176x33`, `fixed176x34`, `fixed176x35`, `fixed176x36`, `fixed176x37`, `fixed176x38`, `fixed176x39`, `fixed176x40`, `fixed176x41`, `fixed176x42`, `fixed176x43`, `fixed176x44`, `fixed176x45`, `fixed176x46`, `fixed176x47`, `fixed176x48`, `fixed176x49`, `fixed176x50`, `fixed176x51`, `fixed176x52`, `fixed176x53`, `fixed176x54`, `fixed176x55`, `fixed176x56`, `fixed176x57`, `fixed176x58`, `fixed176x59`, `fixed176x60`, `fixed176x61`, `fixed176x62`, `fixed176x63`, `fixed176x64`, `fixed176x65`, `fixed176x66`, `fixed176x67`, `fixed176x68`, `fixed176x69`, `fixed176x70`, `fixed176x71`, `fixed176x72`, `fixed176x73`, `fixed176x74`, `fixed176x75`, `fixed176x76`, `fixed176x77`, `fixed176x78`, `fixed176x79`, `fixed176x80`, `fixed184x0`, `fixed184x1`, `fixed184x2`, `fixed184x3`, `fixed184x4`, `fixed184x5`, `fixed184x6`, `fixed184x7`, `fixed184x8`, `fixed184x9`, `fixed184x10`, `fixed184x11`, `fixed184x12`, `fixed184x13`, `fixed184x14`, `fixed184x15`, `fixed184x16`, `fixed184x17`, `fixed184x18`, `fixed184x19`, `fixed184x20`, `fixed184x21`, `fixed184x22`, `fixed184x23`, `fixed184x24`, `fixed184x25`, `fixed184x26`, `fixed184x27`, `fixed184x28`, `fixed184x29`, `fixed184x30`, `fixed184x31`, `fixed184x32`, `fixed184x33`, `fixed184x34`, `fixed184x35`, `fixed184x36`, `fixed184x37`, `fixed184x38`, `fixed184x39`, `fixed184x40`, `fixed184x41`, `fixed184x42`, `fixed184x43`, `fixed184x44`, `fixed184x45`, `fixed184x46`, `fixed184x47`, `fixed184x48`, `fixed184x49`, `fixed184x50`, `fixed184x51`, `fixed184x52`, `fixed184x53`, `fixed184x54`, `fixed184x55`, `fixed184x56`, `fixed184x57`, `fixed184x58`, `fixed184x59`, `fixed184x60`, `fixed184x61`, `fixed184x62`, `fixed184x63`, `fixed184x64`, `fixed184x65`, `fixed184x66`, `fixed184x67`, `fixed184x68`, `fixed184x69`, `fixed184x70`, `fixed184x71`, `fixed184x72`, `fixed192x0`, `fixed192x1`, `fixed192x2`, `fixed192x3`, `fixed192x4`, `fixed192x5`, `fixed192x6`, `fixed192x7`, `fixed192x8`, `fixed192x9`, `fixed192x10`, `fixed192x11`, `fixed192x12`, `fixed192x13`, `fixed192x14`, `fixed192x15`, `fixed192x16`, `fixed192x17`, `fixed192x18`, `fixed192x19`, `fixed192x20`, `fixed192x21`, `fixed192x22`, `fixed192x23`, `fixed192x24`, `fixed192x25`, `fixed192x26`, `fixed192x27`, `fixed192x28`, `fixed192x29`, `fixed192x30`, `fixed192x31`, `fixed192x32`, `fixed192x33`, `fixed192x34`, `fixed192x35`, `fixed192x36`, `fixed192x37`, `fixed192x38`, `fixed192x39`, `fixed192x40`, `fixed192x41`, `fixed192x42`, `fixed192x43`, `fixed192x44`, `fixed192x45`, `fixed192x46`, `fixed192x47`, `fixed192x48`, `fixed192x49`, `fixed192x50`, `fixed192x51`, `fixed192x52`, `fixed192x53`, `fixed192x54`, `fixed192x55`, `fixed192x56`, `fixed192x57`, `fixed192x58`, `fixed192x59`, `fixed192x60`, `fixed192x61`, `fixed192x62`, `fixed192x63`, `fixed192x64`, `fixed200x0`, `fixed200x1`, `fixed200x2`, `fixed200x3`, `fixed200x4`, `fixed200x5`, `fixed200x6`, `fixed200x7`, `fixed200x8`, `fixed200x9`, `fixed200x10`, `fixed200x11`, `fixed200x12`, `fixed200x13`, `fixed200x14`, `fixed200x15`, `fixed200x16`, `fixed200x17`, `fixed200x18`, `fixed200x19`, `fixed200x20`, `fixed200x21`, `fixed200x22`, `fixed200x23`, `fixed200x24`, `fixed200x25`, `fixed200x26`, `fixed200x27`, `fixed200x28`, `fixed200x29`, `fixed200x30`, `fixed200x31`, `fixed200x32`, `fixed200x33`, `fixed200x34`, `fixed200x35`, `fixed200x36`, `fixed200x37`, `fixed200x38`, `fixed200x39`, `fixed200x40`, `fixed200x41`, `fixed200x42`, `fixed200x43`, `fixed200x44`, `fixed200x45`, `fixed200x46`, `fixed200x47`, `fixed200x48`, `fixed200x49`, `fixed200x50`, `fixed200x51`, `fixed200x52`, `fixed200x53`, `fixed200x54`, `fixed200x55`, `fixed200x56`, `fixed208x0`, `fixed208x1`, `fixed208x2`, `fixed208x3`, `fixed208x4`, `fixed208x5`, `fixed208x6`, `fixed208x7`, `fixed208x8`, `fixed208x9`, `fixed208x10`, `fixed208x11`, `fixed208x12`, `fixed208x13`, `fixed208x14`, `fixed208x15`, `fixed208x16`, `fixed208x17`, `fixed208x18`, `fixed208x19`, `fixed208x20`, `fixed208x21`, `fixed208x22`, `fixed208x23`, `fixed208x24`, `fixed208x25`, `fixed208x26`, `fixed208x27`, `fixed208x28`, `fixed208x29`, `fixed208x30`, `fixed208x31`, `fixed208x32`, `fixed208x33`, `fixed208x34`, `fixed208x35`, `fixed208x36`, `fixed208x37`, `fixed208x38`, `fixed208x39`, `fixed208x40`, `fixed208x41`, `fixed208x42`, `fixed208x43`, `fixed208x44`, `fixed208x45`, `fixed208x46`, `fixed208x47`, `fixed208x48`, `fixed216x0`, `fixed216x1`, `fixed216x2`, `fixed216x3`, `fixed216x4`, `fixed216x5`, `fixed216x6`, `fixed216x7`, `fixed216x8`, `fixed216x9`, `fixed216x10`, `fixed216x11`, `fixed216x12`, `fixed216x13`, `fixed216x14`, `fixed216x15`, `fixed216x16`, `fixed216x17`, `fixed216x18`, `fixed216x19`, `fixed216x20`, `fixed216x21`, `fixed216x22`, `fixed216x23`, `fixed216x24`, `fixed216x25`, `fixed216x26`, `fixed216x27`, `fixed216x28`, `fixed216x29`, `fixed216x30`, `fixed216x31`, `fixed216x32`, `fixed216x33`, `fixed216x34`, `fixed216x35`, `fixed216x36`, `fixed216x37`, `fixed216x38`, `fixed216x39`, `fixed216x40`, `fixed224x0`, `fixed224x1`, `fixed224x2`, `fixed224x3`, `fixed224x4`, `fixed224x5`, `fixed224x6`, `fixed224x7`, `fixed224x8`, `fixed224x9`, `fixed224x10`, `fixed224x11`, `fixed224x12`, `fixed224x13`, `fixed224x14`, `fixed224x15`, `fixed224x16`, `fixed224x17`, `fixed224x18`, `fixed224x19`, `fixed224x20`, `fixed224x21`, `fixed224x22`, `fixed224x23`, `fixed224x24`, `fixed224x25`, `fixed224x26`, `fixed224x27`, `fixed224x28`, `fixed224x29`, `fixed224x30`, `fixed224x31`, `fixed224x32`, `fixed232x0`, `fixed232x1`, `fixed232x2`, `fixed232x3`, `fixed232x4`, `fixed232x5`, `fixed232x6`, `fixed232x7`, `fixed232x8`, `fixed232x9`, `fixed232x10`, `fixed232x11`, `fixed232x12`, `fixed232x13`, `fixed232x14`, `fixed232x15`, `fixed232x16`, `fixed232x17`, `fixed232x18`, `fixed232x19`, `fixed232x20`, `fixed232x21`, `fixed232x22`, `fixed232x23`, `fixed232x24`, `fixed240x0`, `fixed240x1`, `fixed240x2`, `fixed240x3`, `fixed240x4`, `fixed240x5`, `fixed240x6`, `fixed240x7`, `fixed240x8`, `fixed240x9`, `fixed240x10`, `fixed240x11`, `fixed240x12`, `fixed240x13`, `fixed240x14`, `fixed240x15`, `fixed240x16`, `fixed248x0`, `fixed248x1`, `fixed248x2`, `fixed248x3`, `fixed248x4`, `fixed248x5`, `fixed248x6`, `fixed248x7`, `fixed248x8`, `fixed256x0`), KeywordType, nil}, + {Words(``, `\b`, `ufixed8x0`, `ufixed8x1`, `ufixed8x2`, `ufixed8x3`, `ufixed8x4`, `ufixed8x5`, `ufixed8x6`, `ufixed8x7`, `ufixed8x8`, `ufixed8x9`, `ufixed8x10`, `ufixed8x11`, `ufixed8x12`, `ufixed8x13`, `ufixed8x14`, `ufixed8x15`, `ufixed8x16`, `ufixed8x17`, `ufixed8x18`, `ufixed8x19`, `ufixed8x20`, `ufixed8x21`, `ufixed8x22`, `ufixed8x23`, `ufixed8x24`, `ufixed8x25`, `ufixed8x26`, `ufixed8x27`, `ufixed8x28`, `ufixed8x29`, `ufixed8x30`, `ufixed8x31`, `ufixed8x32`, `ufixed8x33`, `ufixed8x34`, `ufixed8x35`, `ufixed8x36`, `ufixed8x37`, `ufixed8x38`, `ufixed8x39`, `ufixed8x40`, `ufixed8x41`, `ufixed8x42`, `ufixed8x43`, `ufixed8x44`, `ufixed8x45`, `ufixed8x46`, `ufixed8x47`, `ufixed8x48`, `ufixed8x49`, `ufixed8x50`, `ufixed8x51`, `ufixed8x52`, `ufixed8x53`, `ufixed8x54`, `ufixed8x55`, `ufixed8x56`, `ufixed8x57`, `ufixed8x58`, `ufixed8x59`, `ufixed8x60`, `ufixed8x61`, `ufixed8x62`, `ufixed8x63`, `ufixed8x64`, `ufixed8x65`, `ufixed8x66`, `ufixed8x67`, `ufixed8x68`, `ufixed8x69`, `ufixed8x70`, `ufixed8x71`, `ufixed8x72`, `ufixed8x73`, `ufixed8x74`, `ufixed8x75`, `ufixed8x76`, `ufixed8x77`, `ufixed8x78`, `ufixed8x79`, `ufixed8x80`, `ufixed16x0`, `ufixed16x1`, `ufixed16x2`, `ufixed16x3`, `ufixed16x4`, `ufixed16x5`, `ufixed16x6`, `ufixed16x7`, `ufixed16x8`, `ufixed16x9`, `ufixed16x10`, `ufixed16x11`, `ufixed16x12`, `ufixed16x13`, `ufixed16x14`, `ufixed16x15`, `ufixed16x16`, `ufixed16x17`, `ufixed16x18`, `ufixed16x19`, `ufixed16x20`, `ufixed16x21`, `ufixed16x22`, `ufixed16x23`, `ufixed16x24`, `ufixed16x25`, `ufixed16x26`, `ufixed16x27`, `ufixed16x28`, `ufixed16x29`, `ufixed16x30`, `ufixed16x31`, `ufixed16x32`, `ufixed16x33`, `ufixed16x34`, `ufixed16x35`, `ufixed16x36`, `ufixed16x37`, `ufixed16x38`, `ufixed16x39`, `ufixed16x40`, `ufixed16x41`, `ufixed16x42`, `ufixed16x43`, `ufixed16x44`, `ufixed16x45`, `ufixed16x46`, `ufixed16x47`, `ufixed16x48`, `ufixed16x49`, `ufixed16x50`, `ufixed16x51`, `ufixed16x52`, `ufixed16x53`, `ufixed16x54`, `ufixed16x55`, `ufixed16x56`, `ufixed16x57`, `ufixed16x58`, `ufixed16x59`, `ufixed16x60`, `ufixed16x61`, `ufixed16x62`, `ufixed16x63`, `ufixed16x64`, `ufixed16x65`, `ufixed16x66`, `ufixed16x67`, `ufixed16x68`, `ufixed16x69`, `ufixed16x70`, `ufixed16x71`, `ufixed16x72`, `ufixed16x73`, `ufixed16x74`, `ufixed16x75`, `ufixed16x76`, `ufixed16x77`, `ufixed16x78`, `ufixed16x79`, `ufixed16x80`, `ufixed24x0`, `ufixed24x1`, `ufixed24x2`, `ufixed24x3`, `ufixed24x4`, `ufixed24x5`, `ufixed24x6`, `ufixed24x7`, `ufixed24x8`, `ufixed24x9`, `ufixed24x10`, `ufixed24x11`, `ufixed24x12`, `ufixed24x13`, `ufixed24x14`, `ufixed24x15`, `ufixed24x16`, `ufixed24x17`, `ufixed24x18`, `ufixed24x19`, `ufixed24x20`, `ufixed24x21`, `ufixed24x22`, `ufixed24x23`, `ufixed24x24`, `ufixed24x25`, `ufixed24x26`, `ufixed24x27`, `ufixed24x28`, `ufixed24x29`, `ufixed24x30`, `ufixed24x31`, `ufixed24x32`, `ufixed24x33`, `ufixed24x34`, `ufixed24x35`, `ufixed24x36`, `ufixed24x37`, `ufixed24x38`, `ufixed24x39`, `ufixed24x40`, `ufixed24x41`, `ufixed24x42`, `ufixed24x43`, `ufixed24x44`, `ufixed24x45`, `ufixed24x46`, `ufixed24x47`, `ufixed24x48`, `ufixed24x49`, `ufixed24x50`, `ufixed24x51`, `ufixed24x52`, `ufixed24x53`, `ufixed24x54`, `ufixed24x55`, `ufixed24x56`, `ufixed24x57`, `ufixed24x58`, `ufixed24x59`, `ufixed24x60`, `ufixed24x61`, `ufixed24x62`, `ufixed24x63`, `ufixed24x64`, `ufixed24x65`, `ufixed24x66`, `ufixed24x67`, `ufixed24x68`, `ufixed24x69`, `ufixed24x70`, `ufixed24x71`, `ufixed24x72`, `ufixed24x73`, `ufixed24x74`, `ufixed24x75`, `ufixed24x76`, `ufixed24x77`, `ufixed24x78`, `ufixed24x79`, `ufixed24x80`, `ufixed32x0`, `ufixed32x1`, `ufixed32x2`, `ufixed32x3`, `ufixed32x4`, `ufixed32x5`, `ufixed32x6`, `ufixed32x7`, `ufixed32x8`, `ufixed32x9`, `ufixed32x10`, `ufixed32x11`, `ufixed32x12`, `ufixed32x13`, `ufixed32x14`, `ufixed32x15`, `ufixed32x16`, `ufixed32x17`, `ufixed32x18`, `ufixed32x19`, `ufixed32x20`, `ufixed32x21`, `ufixed32x22`, `ufixed32x23`, `ufixed32x24`, `ufixed32x25`, `ufixed32x26`, `ufixed32x27`, `ufixed32x28`, `ufixed32x29`, `ufixed32x30`, `ufixed32x31`, `ufixed32x32`, `ufixed32x33`, `ufixed32x34`, `ufixed32x35`, `ufixed32x36`, `ufixed32x37`, `ufixed32x38`, `ufixed32x39`, `ufixed32x40`, `ufixed32x41`, `ufixed32x42`, `ufixed32x43`, `ufixed32x44`, `ufixed32x45`, `ufixed32x46`, `ufixed32x47`, `ufixed32x48`, `ufixed32x49`, `ufixed32x50`, `ufixed32x51`, `ufixed32x52`, `ufixed32x53`, `ufixed32x54`, `ufixed32x55`, `ufixed32x56`, `ufixed32x57`, `ufixed32x58`, `ufixed32x59`, `ufixed32x60`, `ufixed32x61`, `ufixed32x62`, `ufixed32x63`, `ufixed32x64`, `ufixed32x65`, `ufixed32x66`, `ufixed32x67`, `ufixed32x68`, `ufixed32x69`, `ufixed32x70`, `ufixed32x71`, `ufixed32x72`, `ufixed32x73`, `ufixed32x74`, `ufixed32x75`, `ufixed32x76`, `ufixed32x77`, `ufixed32x78`, `ufixed32x79`, `ufixed32x80`, `ufixed40x0`, `ufixed40x1`, `ufixed40x2`, `ufixed40x3`, `ufixed40x4`, `ufixed40x5`, `ufixed40x6`, `ufixed40x7`, `ufixed40x8`, `ufixed40x9`, `ufixed40x10`, `ufixed40x11`, `ufixed40x12`, `ufixed40x13`, `ufixed40x14`, `ufixed40x15`, `ufixed40x16`, `ufixed40x17`, `ufixed40x18`, `ufixed40x19`, `ufixed40x20`, `ufixed40x21`, `ufixed40x22`, `ufixed40x23`, `ufixed40x24`, `ufixed40x25`, `ufixed40x26`, `ufixed40x27`, `ufixed40x28`, `ufixed40x29`, `ufixed40x30`, `ufixed40x31`, `ufixed40x32`, `ufixed40x33`, `ufixed40x34`, `ufixed40x35`, `ufixed40x36`, `ufixed40x37`, `ufixed40x38`, `ufixed40x39`, `ufixed40x40`, `ufixed40x41`, `ufixed40x42`, `ufixed40x43`, `ufixed40x44`, `ufixed40x45`, `ufixed40x46`, `ufixed40x47`, `ufixed40x48`, `ufixed40x49`, `ufixed40x50`, `ufixed40x51`, `ufixed40x52`, `ufixed40x53`, `ufixed40x54`, `ufixed40x55`, `ufixed40x56`, `ufixed40x57`, `ufixed40x58`, `ufixed40x59`, `ufixed40x60`, `ufixed40x61`, `ufixed40x62`, `ufixed40x63`, `ufixed40x64`, `ufixed40x65`, `ufixed40x66`, `ufixed40x67`, `ufixed40x68`, `ufixed40x69`, `ufixed40x70`, `ufixed40x71`, `ufixed40x72`, `ufixed40x73`, `ufixed40x74`, `ufixed40x75`, `ufixed40x76`, `ufixed40x77`, `ufixed40x78`, `ufixed40x79`, `ufixed40x80`, `ufixed48x0`, `ufixed48x1`, `ufixed48x2`, `ufixed48x3`, `ufixed48x4`, `ufixed48x5`, `ufixed48x6`, `ufixed48x7`, `ufixed48x8`, `ufixed48x9`, `ufixed48x10`, `ufixed48x11`, `ufixed48x12`, `ufixed48x13`, `ufixed48x14`, `ufixed48x15`, `ufixed48x16`, `ufixed48x17`, `ufixed48x18`, `ufixed48x19`, `ufixed48x20`, `ufixed48x21`, `ufixed48x22`, `ufixed48x23`, `ufixed48x24`, `ufixed48x25`, `ufixed48x26`, `ufixed48x27`, `ufixed48x28`, `ufixed48x29`, `ufixed48x30`, `ufixed48x31`, `ufixed48x32`, `ufixed48x33`, `ufixed48x34`, `ufixed48x35`, `ufixed48x36`, `ufixed48x37`, `ufixed48x38`, `ufixed48x39`, `ufixed48x40`, `ufixed48x41`, `ufixed48x42`, `ufixed48x43`, `ufixed48x44`, `ufixed48x45`, `ufixed48x46`, `ufixed48x47`, `ufixed48x48`, `ufixed48x49`, `ufixed48x50`, `ufixed48x51`, `ufixed48x52`, `ufixed48x53`, `ufixed48x54`, `ufixed48x55`, `ufixed48x56`, `ufixed48x57`, `ufixed48x58`, `ufixed48x59`, `ufixed48x60`, `ufixed48x61`, `ufixed48x62`, `ufixed48x63`, `ufixed48x64`, `ufixed48x65`, `ufixed48x66`, `ufixed48x67`, `ufixed48x68`, `ufixed48x69`, `ufixed48x70`, `ufixed48x71`, `ufixed48x72`, `ufixed48x73`, `ufixed48x74`, `ufixed48x75`, `ufixed48x76`, `ufixed48x77`, `ufixed48x78`, `ufixed48x79`, `ufixed48x80`, `ufixed56x0`, `ufixed56x1`, `ufixed56x2`, `ufixed56x3`, `ufixed56x4`, `ufixed56x5`, `ufixed56x6`, `ufixed56x7`, `ufixed56x8`, `ufixed56x9`, `ufixed56x10`, `ufixed56x11`, `ufixed56x12`, `ufixed56x13`, `ufixed56x14`, `ufixed56x15`, `ufixed56x16`, `ufixed56x17`, `ufixed56x18`, `ufixed56x19`, `ufixed56x20`, `ufixed56x21`, `ufixed56x22`, `ufixed56x23`, `ufixed56x24`, `ufixed56x25`, `ufixed56x26`, `ufixed56x27`, `ufixed56x28`, `ufixed56x29`, `ufixed56x30`, `ufixed56x31`, `ufixed56x32`, `ufixed56x33`, `ufixed56x34`, `ufixed56x35`, `ufixed56x36`, `ufixed56x37`, `ufixed56x38`, `ufixed56x39`, `ufixed56x40`, `ufixed56x41`, `ufixed56x42`, `ufixed56x43`, `ufixed56x44`, `ufixed56x45`, `ufixed56x46`, `ufixed56x47`, `ufixed56x48`, `ufixed56x49`, `ufixed56x50`, `ufixed56x51`, `ufixed56x52`, `ufixed56x53`, `ufixed56x54`, `ufixed56x55`, `ufixed56x56`, `ufixed56x57`, `ufixed56x58`, `ufixed56x59`, `ufixed56x60`, `ufixed56x61`, `ufixed56x62`, `ufixed56x63`, `ufixed56x64`, `ufixed56x65`, `ufixed56x66`, `ufixed56x67`, `ufixed56x68`, `ufixed56x69`, `ufixed56x70`, `ufixed56x71`, `ufixed56x72`, `ufixed56x73`, `ufixed56x74`, `ufixed56x75`, `ufixed56x76`, `ufixed56x77`, `ufixed56x78`, `ufixed56x79`, `ufixed56x80`, `ufixed64x0`, `ufixed64x1`, `ufixed64x2`, `ufixed64x3`, `ufixed64x4`, `ufixed64x5`, `ufixed64x6`, `ufixed64x7`, `ufixed64x8`, `ufixed64x9`, `ufixed64x10`, `ufixed64x11`, `ufixed64x12`, `ufixed64x13`, `ufixed64x14`, `ufixed64x15`, `ufixed64x16`, `ufixed64x17`, `ufixed64x18`, `ufixed64x19`, `ufixed64x20`, `ufixed64x21`, `ufixed64x22`, `ufixed64x23`, `ufixed64x24`, `ufixed64x25`, `ufixed64x26`, `ufixed64x27`, `ufixed64x28`, `ufixed64x29`, `ufixed64x30`, `ufixed64x31`, `ufixed64x32`, `ufixed64x33`, `ufixed64x34`, `ufixed64x35`, `ufixed64x36`, `ufixed64x37`, `ufixed64x38`, `ufixed64x39`, `ufixed64x40`, `ufixed64x41`, `ufixed64x42`, `ufixed64x43`, `ufixed64x44`, `ufixed64x45`, `ufixed64x46`, `ufixed64x47`, `ufixed64x48`, `ufixed64x49`, `ufixed64x50`, `ufixed64x51`, `ufixed64x52`, `ufixed64x53`, `ufixed64x54`, `ufixed64x55`, `ufixed64x56`, `ufixed64x57`, `ufixed64x58`, `ufixed64x59`, `ufixed64x60`, `ufixed64x61`, `ufixed64x62`, `ufixed64x63`, `ufixed64x64`, `ufixed64x65`, `ufixed64x66`, `ufixed64x67`, `ufixed64x68`, `ufixed64x69`, `ufixed64x70`, `ufixed64x71`, `ufixed64x72`, `ufixed64x73`, `ufixed64x74`, `ufixed64x75`, `ufixed64x76`, `ufixed64x77`, `ufixed64x78`, `ufixed64x79`, `ufixed64x80`, `ufixed72x0`, `ufixed72x1`, `ufixed72x2`, `ufixed72x3`, `ufixed72x4`, `ufixed72x5`, `ufixed72x6`, `ufixed72x7`, `ufixed72x8`, `ufixed72x9`, `ufixed72x10`, `ufixed72x11`, `ufixed72x12`, `ufixed72x13`, `ufixed72x14`, `ufixed72x15`, `ufixed72x16`, `ufixed72x17`, `ufixed72x18`, `ufixed72x19`, `ufixed72x20`, `ufixed72x21`, `ufixed72x22`, `ufixed72x23`, `ufixed72x24`, `ufixed72x25`, `ufixed72x26`, `ufixed72x27`, `ufixed72x28`, `ufixed72x29`, `ufixed72x30`, `ufixed72x31`, `ufixed72x32`, `ufixed72x33`, `ufixed72x34`, `ufixed72x35`, `ufixed72x36`, `ufixed72x37`, `ufixed72x38`, `ufixed72x39`, `ufixed72x40`, `ufixed72x41`, `ufixed72x42`, `ufixed72x43`, `ufixed72x44`, `ufixed72x45`, `ufixed72x46`, `ufixed72x47`, `ufixed72x48`, `ufixed72x49`, `ufixed72x50`, `ufixed72x51`, `ufixed72x52`, `ufixed72x53`, `ufixed72x54`, `ufixed72x55`, `ufixed72x56`, `ufixed72x57`, `ufixed72x58`, `ufixed72x59`, `ufixed72x60`, `ufixed72x61`, `ufixed72x62`, `ufixed72x63`, `ufixed72x64`, `ufixed72x65`, `ufixed72x66`, `ufixed72x67`, `ufixed72x68`, `ufixed72x69`, `ufixed72x70`, `ufixed72x71`, `ufixed72x72`, `ufixed72x73`, `ufixed72x74`, `ufixed72x75`, `ufixed72x76`, `ufixed72x77`, `ufixed72x78`, `ufixed72x79`, `ufixed72x80`, `ufixed80x0`, `ufixed80x1`, `ufixed80x2`, `ufixed80x3`, `ufixed80x4`, `ufixed80x5`, `ufixed80x6`, `ufixed80x7`, `ufixed80x8`, `ufixed80x9`, `ufixed80x10`, `ufixed80x11`, `ufixed80x12`, `ufixed80x13`, `ufixed80x14`, `ufixed80x15`, `ufixed80x16`, `ufixed80x17`, `ufixed80x18`, `ufixed80x19`, `ufixed80x20`, `ufixed80x21`, `ufixed80x22`, `ufixed80x23`, `ufixed80x24`, `ufixed80x25`, `ufixed80x26`, `ufixed80x27`, `ufixed80x28`, `ufixed80x29`, `ufixed80x30`, `ufixed80x31`, `ufixed80x32`, `ufixed80x33`, `ufixed80x34`, `ufixed80x35`, `ufixed80x36`, `ufixed80x37`, `ufixed80x38`, `ufixed80x39`, `ufixed80x40`, `ufixed80x41`, `ufixed80x42`, `ufixed80x43`, `ufixed80x44`, `ufixed80x45`, `ufixed80x46`, `ufixed80x47`, `ufixed80x48`, `ufixed80x49`, `ufixed80x50`, `ufixed80x51`, `ufixed80x52`, `ufixed80x53`, `ufixed80x54`, `ufixed80x55`, `ufixed80x56`, `ufixed80x57`, `ufixed80x58`, `ufixed80x59`, `ufixed80x60`, `ufixed80x61`, `ufixed80x62`, `ufixed80x63`, `ufixed80x64`, `ufixed80x65`, `ufixed80x66`, `ufixed80x67`, `ufixed80x68`, `ufixed80x69`, `ufixed80x70`, `ufixed80x71`, `ufixed80x72`, `ufixed80x73`, `ufixed80x74`, `ufixed80x75`, `ufixed80x76`, `ufixed80x77`, `ufixed80x78`, `ufixed80x79`, `ufixed80x80`, `ufixed88x0`, `ufixed88x1`, `ufixed88x2`, `ufixed88x3`, `ufixed88x4`, `ufixed88x5`, `ufixed88x6`, `ufixed88x7`, `ufixed88x8`, `ufixed88x9`, `ufixed88x10`, `ufixed88x11`, `ufixed88x12`, `ufixed88x13`, `ufixed88x14`, `ufixed88x15`, `ufixed88x16`, `ufixed88x17`, `ufixed88x18`, `ufixed88x19`, `ufixed88x20`, `ufixed88x21`, `ufixed88x22`, `ufixed88x23`, `ufixed88x24`, `ufixed88x25`, `ufixed88x26`, `ufixed88x27`, `ufixed88x28`, `ufixed88x29`, `ufixed88x30`, `ufixed88x31`, `ufixed88x32`, `ufixed88x33`, `ufixed88x34`, `ufixed88x35`, `ufixed88x36`, `ufixed88x37`, `ufixed88x38`, `ufixed88x39`, `ufixed88x40`, `ufixed88x41`, `ufixed88x42`, `ufixed88x43`, `ufixed88x44`, `ufixed88x45`, `ufixed88x46`, `ufixed88x47`, `ufixed88x48`, `ufixed88x49`, `ufixed88x50`, `ufixed88x51`, `ufixed88x52`, `ufixed88x53`, `ufixed88x54`, `ufixed88x55`, `ufixed88x56`, `ufixed88x57`, `ufixed88x58`, `ufixed88x59`, `ufixed88x60`, `ufixed88x61`, `ufixed88x62`, `ufixed88x63`, `ufixed88x64`, `ufixed88x65`, `ufixed88x66`, `ufixed88x67`, `ufixed88x68`, `ufixed88x69`, `ufixed88x70`, `ufixed88x71`, `ufixed88x72`, `ufixed88x73`, `ufixed88x74`, `ufixed88x75`, `ufixed88x76`, `ufixed88x77`, `ufixed88x78`, `ufixed88x79`, `ufixed88x80`, `ufixed96x0`, `ufixed96x1`, `ufixed96x2`, `ufixed96x3`, `ufixed96x4`, `ufixed96x5`, `ufixed96x6`, `ufixed96x7`, `ufixed96x8`, `ufixed96x9`, `ufixed96x10`, `ufixed96x11`, `ufixed96x12`, `ufixed96x13`, `ufixed96x14`, `ufixed96x15`, `ufixed96x16`, `ufixed96x17`, `ufixed96x18`, `ufixed96x19`, `ufixed96x20`, `ufixed96x21`, `ufixed96x22`, `ufixed96x23`, `ufixed96x24`, `ufixed96x25`, `ufixed96x26`, `ufixed96x27`, `ufixed96x28`, `ufixed96x29`, `ufixed96x30`, `ufixed96x31`, `ufixed96x32`, `ufixed96x33`, `ufixed96x34`, `ufixed96x35`, `ufixed96x36`, `ufixed96x37`, `ufixed96x38`, `ufixed96x39`, `ufixed96x40`, `ufixed96x41`, `ufixed96x42`, `ufixed96x43`, `ufixed96x44`, `ufixed96x45`, `ufixed96x46`, `ufixed96x47`, `ufixed96x48`, `ufixed96x49`, `ufixed96x50`, `ufixed96x51`, `ufixed96x52`, `ufixed96x53`, `ufixed96x54`, `ufixed96x55`, `ufixed96x56`, `ufixed96x57`, `ufixed96x58`, `ufixed96x59`, `ufixed96x60`, `ufixed96x61`, `ufixed96x62`, `ufixed96x63`, `ufixed96x64`, `ufixed96x65`, `ufixed96x66`, `ufixed96x67`, `ufixed96x68`, `ufixed96x69`, `ufixed96x70`, `ufixed96x71`, `ufixed96x72`, `ufixed96x73`, `ufixed96x74`, `ufixed96x75`, `ufixed96x76`, `ufixed96x77`, `ufixed96x78`, `ufixed96x79`, `ufixed96x80`, `ufixed104x0`, `ufixed104x1`, `ufixed104x2`, `ufixed104x3`, `ufixed104x4`, `ufixed104x5`, `ufixed104x6`, `ufixed104x7`, `ufixed104x8`, `ufixed104x9`, `ufixed104x10`, `ufixed104x11`, `ufixed104x12`, `ufixed104x13`, `ufixed104x14`, `ufixed104x15`, `ufixed104x16`, `ufixed104x17`, `ufixed104x18`, `ufixed104x19`, `ufixed104x20`, `ufixed104x21`, `ufixed104x22`, `ufixed104x23`, `ufixed104x24`, `ufixed104x25`, `ufixed104x26`, `ufixed104x27`, `ufixed104x28`, `ufixed104x29`, `ufixed104x30`, `ufixed104x31`, `ufixed104x32`, `ufixed104x33`, `ufixed104x34`, `ufixed104x35`, `ufixed104x36`, `ufixed104x37`, `ufixed104x38`, `ufixed104x39`, `ufixed104x40`, `ufixed104x41`, `ufixed104x42`, `ufixed104x43`, `ufixed104x44`, `ufixed104x45`, `ufixed104x46`, `ufixed104x47`, `ufixed104x48`, `ufixed104x49`, `ufixed104x50`, `ufixed104x51`, `ufixed104x52`, `ufixed104x53`, `ufixed104x54`, `ufixed104x55`, `ufixed104x56`, `ufixed104x57`, `ufixed104x58`, `ufixed104x59`, `ufixed104x60`, `ufixed104x61`, `ufixed104x62`, `ufixed104x63`, `ufixed104x64`, `ufixed104x65`, `ufixed104x66`, `ufixed104x67`, `ufixed104x68`, `ufixed104x69`, `ufixed104x70`, `ufixed104x71`, `ufixed104x72`, `ufixed104x73`, `ufixed104x74`, `ufixed104x75`, `ufixed104x76`, `ufixed104x77`, `ufixed104x78`, `ufixed104x79`, `ufixed104x80`, `ufixed112x0`, `ufixed112x1`, `ufixed112x2`, `ufixed112x3`, `ufixed112x4`, `ufixed112x5`, `ufixed112x6`, `ufixed112x7`, `ufixed112x8`, `ufixed112x9`, `ufixed112x10`, `ufixed112x11`, `ufixed112x12`, `ufixed112x13`, `ufixed112x14`, `ufixed112x15`, `ufixed112x16`, `ufixed112x17`, `ufixed112x18`, `ufixed112x19`, `ufixed112x20`, `ufixed112x21`, `ufixed112x22`, `ufixed112x23`, `ufixed112x24`, `ufixed112x25`, `ufixed112x26`, `ufixed112x27`, `ufixed112x28`, `ufixed112x29`, `ufixed112x30`, `ufixed112x31`, `ufixed112x32`, `ufixed112x33`, `ufixed112x34`, `ufixed112x35`, `ufixed112x36`, `ufixed112x37`, `ufixed112x38`, `ufixed112x39`, `ufixed112x40`, `ufixed112x41`, `ufixed112x42`, `ufixed112x43`, `ufixed112x44`, `ufixed112x45`, `ufixed112x46`, `ufixed112x47`, `ufixed112x48`, `ufixed112x49`, `ufixed112x50`, `ufixed112x51`, `ufixed112x52`, `ufixed112x53`, `ufixed112x54`, `ufixed112x55`, `ufixed112x56`, `ufixed112x57`, `ufixed112x58`, `ufixed112x59`, `ufixed112x60`, `ufixed112x61`, `ufixed112x62`, `ufixed112x63`, `ufixed112x64`, `ufixed112x65`, `ufixed112x66`, `ufixed112x67`, `ufixed112x68`, `ufixed112x69`, `ufixed112x70`, `ufixed112x71`, `ufixed112x72`, `ufixed112x73`, `ufixed112x74`, `ufixed112x75`, `ufixed112x76`, `ufixed112x77`, `ufixed112x78`, `ufixed112x79`, `ufixed112x80`, `ufixed120x0`, `ufixed120x1`, `ufixed120x2`, `ufixed120x3`, `ufixed120x4`, `ufixed120x5`, `ufixed120x6`, `ufixed120x7`, `ufixed120x8`, `ufixed120x9`, `ufixed120x10`, `ufixed120x11`, `ufixed120x12`, `ufixed120x13`, `ufixed120x14`, `ufixed120x15`, `ufixed120x16`, `ufixed120x17`, `ufixed120x18`, `ufixed120x19`, `ufixed120x20`, `ufixed120x21`, `ufixed120x22`, `ufixed120x23`, `ufixed120x24`, `ufixed120x25`, `ufixed120x26`, `ufixed120x27`, `ufixed120x28`, `ufixed120x29`, `ufixed120x30`, `ufixed120x31`, `ufixed120x32`, `ufixed120x33`, `ufixed120x34`, `ufixed120x35`, `ufixed120x36`, `ufixed120x37`, `ufixed120x38`, `ufixed120x39`, `ufixed120x40`, `ufixed120x41`, `ufixed120x42`, `ufixed120x43`, `ufixed120x44`, `ufixed120x45`, `ufixed120x46`, `ufixed120x47`, `ufixed120x48`, `ufixed120x49`, `ufixed120x50`, `ufixed120x51`, `ufixed120x52`, `ufixed120x53`, `ufixed120x54`, `ufixed120x55`, `ufixed120x56`, `ufixed120x57`, `ufixed120x58`, `ufixed120x59`, `ufixed120x60`, `ufixed120x61`, `ufixed120x62`, `ufixed120x63`, `ufixed120x64`, `ufixed120x65`, `ufixed120x66`, `ufixed120x67`, `ufixed120x68`, `ufixed120x69`, `ufixed120x70`, `ufixed120x71`, `ufixed120x72`, `ufixed120x73`, `ufixed120x74`, `ufixed120x75`, `ufixed120x76`, `ufixed120x77`, `ufixed120x78`, `ufixed120x79`, `ufixed120x80`, `ufixed128x0`, `ufixed128x1`, `ufixed128x2`, `ufixed128x3`, `ufixed128x4`, `ufixed128x5`, `ufixed128x6`, `ufixed128x7`, `ufixed128x8`, `ufixed128x9`, `ufixed128x10`, `ufixed128x11`, `ufixed128x12`, `ufixed128x13`, `ufixed128x14`, `ufixed128x15`, `ufixed128x16`, `ufixed128x17`, `ufixed128x18`, `ufixed128x19`, `ufixed128x20`, `ufixed128x21`, `ufixed128x22`, `ufixed128x23`, `ufixed128x24`, `ufixed128x25`, `ufixed128x26`, `ufixed128x27`, `ufixed128x28`, `ufixed128x29`, `ufixed128x30`, `ufixed128x31`, `ufixed128x32`, `ufixed128x33`, `ufixed128x34`, `ufixed128x35`, `ufixed128x36`, `ufixed128x37`, `ufixed128x38`, `ufixed128x39`, `ufixed128x40`, `ufixed128x41`, `ufixed128x42`, `ufixed128x43`, `ufixed128x44`, `ufixed128x45`, `ufixed128x46`, `ufixed128x47`, `ufixed128x48`, `ufixed128x49`, `ufixed128x50`, `ufixed128x51`, `ufixed128x52`, `ufixed128x53`, `ufixed128x54`, `ufixed128x55`, `ufixed128x56`, `ufixed128x57`, `ufixed128x58`, `ufixed128x59`, `ufixed128x60`, `ufixed128x61`, `ufixed128x62`, `ufixed128x63`, `ufixed128x64`, `ufixed128x65`, `ufixed128x66`, `ufixed128x67`, `ufixed128x68`, `ufixed128x69`, `ufixed128x70`, `ufixed128x71`, `ufixed128x72`, `ufixed128x73`, `ufixed128x74`, `ufixed128x75`, `ufixed128x76`, `ufixed128x77`, `ufixed128x78`, `ufixed128x79`, `ufixed128x80`, `ufixed136x0`, `ufixed136x1`, `ufixed136x2`, `ufixed136x3`, `ufixed136x4`, `ufixed136x5`, `ufixed136x6`, `ufixed136x7`, `ufixed136x8`, `ufixed136x9`, `ufixed136x10`, `ufixed136x11`, `ufixed136x12`, `ufixed136x13`, `ufixed136x14`, `ufixed136x15`, `ufixed136x16`, `ufixed136x17`, `ufixed136x18`, `ufixed136x19`, `ufixed136x20`, `ufixed136x21`, `ufixed136x22`, `ufixed136x23`, `ufixed136x24`, `ufixed136x25`, `ufixed136x26`, `ufixed136x27`, `ufixed136x28`, `ufixed136x29`, `ufixed136x30`, `ufixed136x31`, `ufixed136x32`, `ufixed136x33`, `ufixed136x34`, `ufixed136x35`, `ufixed136x36`, `ufixed136x37`, `ufixed136x38`, `ufixed136x39`, `ufixed136x40`, `ufixed136x41`, `ufixed136x42`, `ufixed136x43`, `ufixed136x44`, `ufixed136x45`, `ufixed136x46`, `ufixed136x47`, `ufixed136x48`, `ufixed136x49`, `ufixed136x50`, `ufixed136x51`, `ufixed136x52`, `ufixed136x53`, `ufixed136x54`, `ufixed136x55`, `ufixed136x56`, `ufixed136x57`, `ufixed136x58`, `ufixed136x59`, `ufixed136x60`, `ufixed136x61`, `ufixed136x62`, `ufixed136x63`, `ufixed136x64`, `ufixed136x65`, `ufixed136x66`, `ufixed136x67`, `ufixed136x68`, `ufixed136x69`, `ufixed136x70`, `ufixed136x71`, `ufixed136x72`, `ufixed136x73`, `ufixed136x74`, `ufixed136x75`, `ufixed136x76`, `ufixed136x77`, `ufixed136x78`, `ufixed136x79`, `ufixed136x80`, `ufixed144x0`, `ufixed144x1`, `ufixed144x2`, `ufixed144x3`, `ufixed144x4`, `ufixed144x5`, `ufixed144x6`, `ufixed144x7`, `ufixed144x8`, `ufixed144x9`, `ufixed144x10`, `ufixed144x11`, `ufixed144x12`, `ufixed144x13`, `ufixed144x14`, `ufixed144x15`, `ufixed144x16`, `ufixed144x17`, `ufixed144x18`, `ufixed144x19`, `ufixed144x20`, `ufixed144x21`, `ufixed144x22`, `ufixed144x23`, `ufixed144x24`, `ufixed144x25`, `ufixed144x26`, `ufixed144x27`, `ufixed144x28`, `ufixed144x29`, `ufixed144x30`, `ufixed144x31`, `ufixed144x32`, `ufixed144x33`, `ufixed144x34`, `ufixed144x35`, `ufixed144x36`, `ufixed144x37`, `ufixed144x38`, `ufixed144x39`, `ufixed144x40`, `ufixed144x41`, `ufixed144x42`, `ufixed144x43`, `ufixed144x44`, `ufixed144x45`, `ufixed144x46`, `ufixed144x47`, `ufixed144x48`, `ufixed144x49`, `ufixed144x50`, `ufixed144x51`, `ufixed144x52`, `ufixed144x53`, `ufixed144x54`, `ufixed144x55`, `ufixed144x56`, `ufixed144x57`, `ufixed144x58`, `ufixed144x59`, `ufixed144x60`, `ufixed144x61`, `ufixed144x62`, `ufixed144x63`, `ufixed144x64`, `ufixed144x65`, `ufixed144x66`, `ufixed144x67`, `ufixed144x68`, `ufixed144x69`, `ufixed144x70`, `ufixed144x71`, `ufixed144x72`, `ufixed144x73`, `ufixed144x74`, `ufixed144x75`, `ufixed144x76`, `ufixed144x77`, `ufixed144x78`, `ufixed144x79`, `ufixed144x80`, `ufixed152x0`, `ufixed152x1`, `ufixed152x2`, `ufixed152x3`, `ufixed152x4`, `ufixed152x5`, `ufixed152x6`, `ufixed152x7`, `ufixed152x8`, `ufixed152x9`, `ufixed152x10`, `ufixed152x11`, `ufixed152x12`, `ufixed152x13`, `ufixed152x14`, `ufixed152x15`, `ufixed152x16`, `ufixed152x17`, `ufixed152x18`, `ufixed152x19`, `ufixed152x20`, `ufixed152x21`, `ufixed152x22`, `ufixed152x23`, `ufixed152x24`, `ufixed152x25`, `ufixed152x26`, `ufixed152x27`, `ufixed152x28`, `ufixed152x29`, `ufixed152x30`, `ufixed152x31`, `ufixed152x32`, `ufixed152x33`, `ufixed152x34`, `ufixed152x35`, `ufixed152x36`, `ufixed152x37`, `ufixed152x38`, `ufixed152x39`, `ufixed152x40`, `ufixed152x41`, `ufixed152x42`, `ufixed152x43`, `ufixed152x44`, `ufixed152x45`, `ufixed152x46`, `ufixed152x47`, `ufixed152x48`, `ufixed152x49`, `ufixed152x50`, `ufixed152x51`, `ufixed152x52`, `ufixed152x53`, `ufixed152x54`, `ufixed152x55`, `ufixed152x56`, `ufixed152x57`, `ufixed152x58`, `ufixed152x59`, `ufixed152x60`, `ufixed152x61`, `ufixed152x62`, `ufixed152x63`, `ufixed152x64`, `ufixed152x65`, `ufixed152x66`, `ufixed152x67`, `ufixed152x68`, `ufixed152x69`, `ufixed152x70`, `ufixed152x71`, `ufixed152x72`, `ufixed152x73`, `ufixed152x74`, `ufixed152x75`, `ufixed152x76`, `ufixed152x77`, `ufixed152x78`, `ufixed152x79`, `ufixed152x80`, `ufixed160x0`, `ufixed160x1`, `ufixed160x2`, `ufixed160x3`, `ufixed160x4`, `ufixed160x5`, `ufixed160x6`, `ufixed160x7`, `ufixed160x8`, `ufixed160x9`, `ufixed160x10`, `ufixed160x11`, `ufixed160x12`, `ufixed160x13`, `ufixed160x14`, `ufixed160x15`, `ufixed160x16`, `ufixed160x17`, `ufixed160x18`, `ufixed160x19`, `ufixed160x20`, `ufixed160x21`, `ufixed160x22`, `ufixed160x23`, `ufixed160x24`, `ufixed160x25`, `ufixed160x26`, `ufixed160x27`, `ufixed160x28`, `ufixed160x29`, `ufixed160x30`, `ufixed160x31`, `ufixed160x32`, `ufixed160x33`, `ufixed160x34`, `ufixed160x35`, `ufixed160x36`, `ufixed160x37`, `ufixed160x38`, `ufixed160x39`, `ufixed160x40`, `ufixed160x41`, `ufixed160x42`, `ufixed160x43`, `ufixed160x44`, `ufixed160x45`, `ufixed160x46`, `ufixed160x47`, `ufixed160x48`, `ufixed160x49`, `ufixed160x50`, `ufixed160x51`, `ufixed160x52`, `ufixed160x53`, `ufixed160x54`, `ufixed160x55`, `ufixed160x56`, `ufixed160x57`, `ufixed160x58`, `ufixed160x59`, `ufixed160x60`, `ufixed160x61`, `ufixed160x62`, `ufixed160x63`, `ufixed160x64`, `ufixed160x65`, `ufixed160x66`, `ufixed160x67`, `ufixed160x68`, `ufixed160x69`, `ufixed160x70`, `ufixed160x71`, `ufixed160x72`, `ufixed160x73`, `ufixed160x74`, `ufixed160x75`, `ufixed160x76`, `ufixed160x77`, `ufixed160x78`, `ufixed160x79`, `ufixed160x80`, `ufixed168x0`, `ufixed168x1`, `ufixed168x2`, `ufixed168x3`, `ufixed168x4`, `ufixed168x5`, `ufixed168x6`, `ufixed168x7`, `ufixed168x8`, `ufixed168x9`, `ufixed168x10`, `ufixed168x11`, `ufixed168x12`, `ufixed168x13`, `ufixed168x14`, `ufixed168x15`, `ufixed168x16`, `ufixed168x17`, `ufixed168x18`, `ufixed168x19`, `ufixed168x20`, `ufixed168x21`, `ufixed168x22`, `ufixed168x23`, `ufixed168x24`, `ufixed168x25`, `ufixed168x26`, `ufixed168x27`, `ufixed168x28`, `ufixed168x29`, `ufixed168x30`, `ufixed168x31`, `ufixed168x32`, `ufixed168x33`, `ufixed168x34`, `ufixed168x35`, `ufixed168x36`, `ufixed168x37`, `ufixed168x38`, `ufixed168x39`, `ufixed168x40`, `ufixed168x41`, `ufixed168x42`, `ufixed168x43`, `ufixed168x44`, `ufixed168x45`, `ufixed168x46`, `ufixed168x47`, `ufixed168x48`, `ufixed168x49`, `ufixed168x50`, `ufixed168x51`, `ufixed168x52`, `ufixed168x53`, `ufixed168x54`, `ufixed168x55`, `ufixed168x56`, `ufixed168x57`, `ufixed168x58`, `ufixed168x59`, `ufixed168x60`, `ufixed168x61`, `ufixed168x62`, `ufixed168x63`, `ufixed168x64`, `ufixed168x65`, `ufixed168x66`, `ufixed168x67`, `ufixed168x68`, `ufixed168x69`, `ufixed168x70`, `ufixed168x71`, `ufixed168x72`, `ufixed168x73`, `ufixed168x74`, `ufixed168x75`, `ufixed168x76`, `ufixed168x77`, `ufixed168x78`, `ufixed168x79`, `ufixed168x80`, `ufixed176x0`, `ufixed176x1`, `ufixed176x2`, `ufixed176x3`, `ufixed176x4`, `ufixed176x5`, `ufixed176x6`, `ufixed176x7`, `ufixed176x8`, `ufixed176x9`, `ufixed176x10`, `ufixed176x11`, `ufixed176x12`, `ufixed176x13`, `ufixed176x14`, `ufixed176x15`, `ufixed176x16`, `ufixed176x17`, `ufixed176x18`, `ufixed176x19`, `ufixed176x20`, `ufixed176x21`, `ufixed176x22`, `ufixed176x23`, `ufixed176x24`, `ufixed176x25`, `ufixed176x26`, `ufixed176x27`, `ufixed176x28`, `ufixed176x29`, `ufixed176x30`, `ufixed176x31`, `ufixed176x32`, `ufixed176x33`, `ufixed176x34`, `ufixed176x35`, `ufixed176x36`, `ufixed176x37`, `ufixed176x38`, `ufixed176x39`, `ufixed176x40`, `ufixed176x41`, `ufixed176x42`, `ufixed176x43`, `ufixed176x44`, `ufixed176x45`, `ufixed176x46`, `ufixed176x47`, `ufixed176x48`, `ufixed176x49`, `ufixed176x50`, `ufixed176x51`, `ufixed176x52`, `ufixed176x53`, `ufixed176x54`, `ufixed176x55`, `ufixed176x56`, `ufixed176x57`, `ufixed176x58`, `ufixed176x59`, `ufixed176x60`, `ufixed176x61`, `ufixed176x62`, `ufixed176x63`, `ufixed176x64`, `ufixed176x65`, `ufixed176x66`, `ufixed176x67`, `ufixed176x68`, `ufixed176x69`, `ufixed176x70`, `ufixed176x71`, `ufixed176x72`, `ufixed176x73`, `ufixed176x74`, `ufixed176x75`, `ufixed176x76`, `ufixed176x77`, `ufixed176x78`, `ufixed176x79`, `ufixed176x80`, `ufixed184x0`, `ufixed184x1`, `ufixed184x2`, `ufixed184x3`, `ufixed184x4`, `ufixed184x5`, `ufixed184x6`, `ufixed184x7`, `ufixed184x8`, `ufixed184x9`, `ufixed184x10`, `ufixed184x11`, `ufixed184x12`, `ufixed184x13`, `ufixed184x14`, `ufixed184x15`, `ufixed184x16`, `ufixed184x17`, `ufixed184x18`, `ufixed184x19`, `ufixed184x20`, `ufixed184x21`, `ufixed184x22`, `ufixed184x23`, `ufixed184x24`, `ufixed184x25`, `ufixed184x26`, `ufixed184x27`, `ufixed184x28`, `ufixed184x29`, `ufixed184x30`, `ufixed184x31`, `ufixed184x32`, `ufixed184x33`, `ufixed184x34`, `ufixed184x35`, `ufixed184x36`, `ufixed184x37`, `ufixed184x38`, `ufixed184x39`, `ufixed184x40`, `ufixed184x41`, `ufixed184x42`, `ufixed184x43`, `ufixed184x44`, `ufixed184x45`, `ufixed184x46`, `ufixed184x47`, `ufixed184x48`, `ufixed184x49`, `ufixed184x50`, `ufixed184x51`, `ufixed184x52`, `ufixed184x53`, `ufixed184x54`, `ufixed184x55`, `ufixed184x56`, `ufixed184x57`, `ufixed184x58`, `ufixed184x59`, `ufixed184x60`, `ufixed184x61`, `ufixed184x62`, `ufixed184x63`, `ufixed184x64`, `ufixed184x65`, `ufixed184x66`, `ufixed184x67`, `ufixed184x68`, `ufixed184x69`, `ufixed184x70`, `ufixed184x71`, `ufixed184x72`, `ufixed192x0`, `ufixed192x1`, `ufixed192x2`, `ufixed192x3`, `ufixed192x4`, `ufixed192x5`, `ufixed192x6`, `ufixed192x7`, `ufixed192x8`, `ufixed192x9`, `ufixed192x10`, `ufixed192x11`, `ufixed192x12`, `ufixed192x13`, `ufixed192x14`, `ufixed192x15`, `ufixed192x16`, `ufixed192x17`, `ufixed192x18`, `ufixed192x19`, `ufixed192x20`, `ufixed192x21`, `ufixed192x22`, `ufixed192x23`, `ufixed192x24`, `ufixed192x25`, `ufixed192x26`, `ufixed192x27`, `ufixed192x28`, `ufixed192x29`, `ufixed192x30`, `ufixed192x31`, `ufixed192x32`, `ufixed192x33`, `ufixed192x34`, `ufixed192x35`, `ufixed192x36`, `ufixed192x37`, `ufixed192x38`, `ufixed192x39`, `ufixed192x40`, `ufixed192x41`, `ufixed192x42`, `ufixed192x43`, `ufixed192x44`, `ufixed192x45`, `ufixed192x46`, `ufixed192x47`, `ufixed192x48`, `ufixed192x49`, `ufixed192x50`, `ufixed192x51`, `ufixed192x52`, `ufixed192x53`, `ufixed192x54`, `ufixed192x55`, `ufixed192x56`, `ufixed192x57`, `ufixed192x58`, `ufixed192x59`, `ufixed192x60`, `ufixed192x61`, `ufixed192x62`, `ufixed192x63`, `ufixed192x64`, `ufixed200x0`, `ufixed200x1`, `ufixed200x2`, `ufixed200x3`, `ufixed200x4`, `ufixed200x5`, `ufixed200x6`, `ufixed200x7`, `ufixed200x8`, `ufixed200x9`, `ufixed200x10`, `ufixed200x11`, `ufixed200x12`, `ufixed200x13`, `ufixed200x14`, `ufixed200x15`, `ufixed200x16`, `ufixed200x17`, `ufixed200x18`, `ufixed200x19`, `ufixed200x20`, `ufixed200x21`, `ufixed200x22`, `ufixed200x23`, `ufixed200x24`, `ufixed200x25`, `ufixed200x26`, `ufixed200x27`, `ufixed200x28`, `ufixed200x29`, `ufixed200x30`, `ufixed200x31`, `ufixed200x32`, `ufixed200x33`, `ufixed200x34`, `ufixed200x35`, `ufixed200x36`, `ufixed200x37`, `ufixed200x38`, `ufixed200x39`, `ufixed200x40`, `ufixed200x41`, `ufixed200x42`, `ufixed200x43`, `ufixed200x44`, `ufixed200x45`, `ufixed200x46`, `ufixed200x47`, `ufixed200x48`, `ufixed200x49`, `ufixed200x50`, `ufixed200x51`, `ufixed200x52`, `ufixed200x53`, `ufixed200x54`, `ufixed200x55`, `ufixed200x56`, `ufixed208x0`, `ufixed208x1`, `ufixed208x2`, `ufixed208x3`, `ufixed208x4`, `ufixed208x5`, `ufixed208x6`, `ufixed208x7`, `ufixed208x8`, `ufixed208x9`, `ufixed208x10`, `ufixed208x11`, `ufixed208x12`, `ufixed208x13`, `ufixed208x14`, `ufixed208x15`, `ufixed208x16`, `ufixed208x17`, `ufixed208x18`, `ufixed208x19`, `ufixed208x20`, `ufixed208x21`, `ufixed208x22`, `ufixed208x23`, `ufixed208x24`, `ufixed208x25`, `ufixed208x26`, `ufixed208x27`, `ufixed208x28`, `ufixed208x29`, `ufixed208x30`, `ufixed208x31`, `ufixed208x32`, `ufixed208x33`, `ufixed208x34`, `ufixed208x35`, `ufixed208x36`, `ufixed208x37`, `ufixed208x38`, `ufixed208x39`, `ufixed208x40`, `ufixed208x41`, `ufixed208x42`, `ufixed208x43`, `ufixed208x44`, `ufixed208x45`, `ufixed208x46`, `ufixed208x47`, `ufixed208x48`, `ufixed216x0`, `ufixed216x1`, `ufixed216x2`, `ufixed216x3`, `ufixed216x4`, `ufixed216x5`, `ufixed216x6`, `ufixed216x7`, `ufixed216x8`, `ufixed216x9`, `ufixed216x10`, `ufixed216x11`, `ufixed216x12`, `ufixed216x13`, `ufixed216x14`, `ufixed216x15`, `ufixed216x16`, `ufixed216x17`, `ufixed216x18`, `ufixed216x19`, `ufixed216x20`, `ufixed216x21`, `ufixed216x22`, `ufixed216x23`, `ufixed216x24`, `ufixed216x25`, `ufixed216x26`, `ufixed216x27`, `ufixed216x28`, `ufixed216x29`, `ufixed216x30`, `ufixed216x31`, `ufixed216x32`, `ufixed216x33`, `ufixed216x34`, `ufixed216x35`, `ufixed216x36`, `ufixed216x37`, `ufixed216x38`, `ufixed216x39`, `ufixed216x40`, `ufixed224x0`, `ufixed224x1`, `ufixed224x2`, `ufixed224x3`, `ufixed224x4`, `ufixed224x5`, `ufixed224x6`, `ufixed224x7`, `ufixed224x8`, `ufixed224x9`, `ufixed224x10`, `ufixed224x11`, `ufixed224x12`, `ufixed224x13`, `ufixed224x14`, `ufixed224x15`, `ufixed224x16`, `ufixed224x17`, `ufixed224x18`, `ufixed224x19`, `ufixed224x20`, `ufixed224x21`, `ufixed224x22`, `ufixed224x23`, `ufixed224x24`, `ufixed224x25`, `ufixed224x26`, `ufixed224x27`, `ufixed224x28`, `ufixed224x29`, `ufixed224x30`, `ufixed224x31`, `ufixed224x32`, `ufixed232x0`, `ufixed232x1`, `ufixed232x2`, `ufixed232x3`, `ufixed232x4`, `ufixed232x5`, `ufixed232x6`, `ufixed232x7`, `ufixed232x8`, `ufixed232x9`, `ufixed232x10`, `ufixed232x11`, `ufixed232x12`, `ufixed232x13`, `ufixed232x14`, `ufixed232x15`, `ufixed232x16`, `ufixed232x17`, `ufixed232x18`, `ufixed232x19`, `ufixed232x20`, `ufixed232x21`, `ufixed232x22`, `ufixed232x23`, `ufixed232x24`, `ufixed240x0`, `ufixed240x1`, `ufixed240x2`, `ufixed240x3`, `ufixed240x4`, `ufixed240x5`, `ufixed240x6`, `ufixed240x7`, `ufixed240x8`, `ufixed240x9`, `ufixed240x10`, `ufixed240x11`, `ufixed240x12`, `ufixed240x13`, `ufixed240x14`, `ufixed240x15`, `ufixed240x16`, `ufixed248x0`, `ufixed248x1`, `ufixed248x2`, `ufixed248x3`, `ufixed248x4`, `ufixed248x5`, `ufixed248x6`, `ufixed248x7`, `ufixed248x8`, `ufixed256x0`), KeywordType, nil}, + }, + "numbers": { + {`0[xX][0-9a-fA-F]+`, LiteralNumberHex, nil}, + {`[0-9]+`, LiteralNumberInteger, nil}, + }, + "string-parse-common": { + {`\\(u[0-9a-fA-F]{4}|x..|[^x])`, LiteralStringEscape, nil}, + {`[^\\"\'\n]+`, LiteralString, nil}, + {`\\\n`, LiteralString, nil}, + {`\\`, LiteralString, nil}, + }, + "string-parse-double": { + {`"`, LiteralString, Pop(1)}, + {`'`, LiteralString, nil}, + }, + "string-parse-single": { + {`'`, LiteralString, Pop(1)}, + {`"`, LiteralString, nil}, + }, + "strings": { + {`hex'[0-9a-fA-F]+'`, LiteralString, nil}, + {`hex"[0-9a-fA-F]+"`, LiteralString, nil}, + {`"`, LiteralString, Combined("string-parse-common", "string-parse-double")}, + {`'`, LiteralString, Combined("string-parse-common", "string-parse-single")}, + }, + "whitespace": { + {`\s+`, Text, nil}, + }, + "root": { + Include("comments"), + Include("keywords-types"), + Include("keywords-other"), + Include("numbers"), + Include("strings"), + Include("whitespace"), + {`\+\+|--|\*\*|\?|:|~|&&|\|\||=>|==?|!=?|(<<|>>>?|[-<>+*%&|^/])=?`, Operator, nil}, + {`[{(\[;,]`, Punctuation, nil}, + {`[})\].]`, Punctuation, nil}, + {`(abi|block|msg|tx)\b`, NameBuiltin, nil}, + {`(?!abi\.)(decode|encode|encodePacked|encodeWithSelector|encodeWithSignature|encodeWithSelector)\b`, NameBuiltin, nil}, + {`(?!block\.)(chainid|coinbase|difficulty|gaslimit|number|timestamp)\b`, NameBuiltin, nil}, + {`(?!msg\.)(data|gas|sender|value)\b`, NameBuiltin, nil}, + {`(?!tx\.)(gasprice|origin)\b`, NameBuiltin, nil}, + {`(type)(\()([a-zA-Z_]\w*)(\))`, ByGroups(NameBuiltin, Punctuation, NameClass, Punctuation), nil}, + {`(?!type\([a-zA-Z_]\w*\)\.)(creationCode|interfaceId|max|min|name|runtimeCode)\b`, NameBuiltin, nil}, + {`(now|this|super|gasleft)\b`, NameBuiltin, nil}, + {`(selfdestruct|suicide)\b`, NameBuiltin, nil}, + {`(?!0x[0-9a-fA-F]+\.)(balance|code|codehash|send|transfer)\b`, NameBuiltin, nil}, + {`(assert|revert|require)\b`, NameBuiltin, nil}, + {`(call|callcode|delegatecall)\b`, NameBuiltin, nil}, + {`selector\b`, NameBuiltin, nil}, + {`(addmod|blockhash|ecrecover|keccak256|mulmod|ripemd160|sha256|sha3)\b`, NameBuiltin, nil}, + {`[a-zA-Z_]\w*`, Name, nil}, + }, + } +} diff --git a/vendor/github.com/alecthomas/chroma/lexers/s/sparql.go b/vendor/github.com/alecthomas/chroma/lexers/s/sparql.go new file mode 100644 index 000000000..3f56f9c03 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/s/sparql.go @@ -0,0 +1,73 @@ +package s + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// Sparql lexer. +var Sparql = internal.Register(MustNewLazyLexer( + &Config{ + Name: "SPARQL", + Aliases: []string{"sparql"}, + Filenames: []string{"*.rq", "*.sparql"}, + MimeTypes: []string{"application/sparql-query"}, + }, + sparqlRules, +)) + +func sparqlRules() Rules { + return Rules{ + "root": { + {`\s+`, Text, nil}, + {`((?i)select|construct|describe|ask|where|filter|group\s+by|minus|distinct|reduced|from\s+named|from|order\s+by|desc|asc|limit|offset|bindings|load|clear|drop|create|add|move|copy|insert\s+data|delete\s+data|delete\s+where|delete|insert|using\s+named|using|graph|default|named|all|optional|service|silent|bind|union|not\s+in|in|as|having|to|prefix|base)\b`, Keyword, nil}, + {`(a)\b`, Keyword, nil}, + {"(<(?:[^<>\"{}|^`\\\\\\x00-\\x20])*>)", NameLabel, nil}, + {`(_:[_\p{L}\p{N}](?:[-_.\p{L}\p{N}]*[-_\p{L}\p{N}])?)`, NameLabel, nil}, + {`[?$][_\p{L}\p{N}]+`, NameVariable, nil}, + {`([\p{L}][-_.\p{L}\p{N}]*)?(\:)((?:[_:\p{L}\p{N}]|(?:%[0-9A-Fa-f][0-9A-Fa-f])|(?:\\[ _~.\-!$&"()*+,;=/?#@%]))(?:(?:[-_:.\p{L}\p{N}]|(?:%[0-9A-Fa-f][0-9A-Fa-f])|(?:\\[ _~.\-!$&"()*+,;=/?#@%]))*(?:[-_:\p{L}\p{N}]|(?:%[0-9A-Fa-f][0-9A-Fa-f])|(?:\\[ _~.\-!$&"()*+,;=/?#@%])))?)?`, ByGroups(NameNamespace, Punctuation, NameTag), nil}, + {`((?i)str|lang|langmatches|datatype|bound|iri|uri|bnode|rand|abs|ceil|floor|round|concat|strlen|ucase|lcase|encode_for_uri|contains|strstarts|strends|strbefore|strafter|year|month|day|hours|minutes|seconds|timezone|tz|now|md5|sha1|sha256|sha384|sha512|coalesce|if|strlang|strdt|sameterm|isiri|isuri|isblank|isliteral|isnumeric|regex|substr|replace|exists|not\s+exists|count|sum|min|max|avg|sample|group_concat|separator)\b`, NameFunction, nil}, + {`(true|false)`, KeywordConstant, nil}, + {`[+\-]?(\d+\.\d*[eE][+-]?\d+|\.?\d+[eE][+-]?\d+)`, LiteralNumberFloat, nil}, + {`[+\-]?(\d+\.\d*|\.\d+)`, LiteralNumberFloat, nil}, + {`[+\-]?\d+`, LiteralNumberInteger, nil}, + {`(\|\||&&|=|\*|\-|\+|/|!=|<=|>=|!|<|>)`, Operator, nil}, + {`[(){}.;,:^\[\]]`, Punctuation, nil}, + {`#[^\n]*`, Comment, nil}, + {`"""`, LiteralString, Push("triple-double-quoted-string")}, + {`"`, LiteralString, Push("single-double-quoted-string")}, + {`'''`, LiteralString, Push("triple-single-quoted-string")}, + {`'`, LiteralString, Push("single-single-quoted-string")}, + }, + "triple-double-quoted-string": { + {`"""`, LiteralString, Push("end-of-string")}, + {`[^\\]+`, LiteralString, nil}, + {`\\`, LiteralString, Push("string-escape")}, + }, + "single-double-quoted-string": { + {`"`, LiteralString, Push("end-of-string")}, + {`[^"\\\n]+`, LiteralString, nil}, + {`\\`, LiteralString, Push("string-escape")}, + }, + "triple-single-quoted-string": { + {`'''`, LiteralString, Push("end-of-string")}, + {`[^\\]+`, LiteralString, nil}, + {`\\`, LiteralStringEscape, Push("string-escape")}, + }, + "single-single-quoted-string": { + {`'`, LiteralString, Push("end-of-string")}, + {`[^'\\\n]+`, LiteralString, nil}, + {`\\`, LiteralString, Push("string-escape")}, + }, + "string-escape": { + {`u[0-9A-Fa-f]{4}`, LiteralStringEscape, Pop(1)}, + {`U[0-9A-Fa-f]{8}`, LiteralStringEscape, Pop(1)}, + {`.`, LiteralStringEscape, Pop(1)}, + }, + "end-of-string": { + {`(@)([a-zA-Z]+(?:-[a-zA-Z0-9]+)*)`, ByGroups(Operator, NameFunction), Pop(2)}, + {`\^\^`, Operator, Pop(2)}, + Default(Pop(2)), + }, + } +} diff --git a/vendor/github.com/alecthomas/chroma/lexers/s/sql.go b/vendor/github.com/alecthomas/chroma/lexers/s/sql.go new file mode 100644 index 000000000..fb8c8edf1 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/s/sql.go @@ -0,0 +1,53 @@ +package s + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// SQL lexer. +var SQL = internal.Register(MustNewLazyLexer( + &Config{ + Name: "SQL", + Aliases: []string{"sql"}, + Filenames: []string{"*.sql"}, + MimeTypes: []string{"text/x-sql"}, + NotMultiline: true, + CaseInsensitive: true, + }, + sqlRules, +)) + +func sqlRules() Rules { + return Rules{ + "root": { + {`\s+`, TextWhitespace, nil}, + {`--.*\n?`, CommentSingle, nil}, + {`/\*`, CommentMultiline, Push("multiline-comments")}, + {`'`, LiteralStringSingle, Push("string")}, + {`"`, LiteralStringDouble, Push("double-string")}, + {Words(``, `\b`, `ABORT`, `ABS`, `ABSOLUTE`, `ACCESS`, `ADA`, `ADD`, `ADMIN`, `AFTER`, `AGGREGATE`, `ALIAS`, `ALL`, `ALLOCATE`, `ALTER`, `ANALYSE`, `ANALYZE`, `AND`, `ANY`, `ARE`, `AS`, `ASC`, `ASENSITIVE`, `ASSERTION`, `ASSIGNMENT`, `ASYMMETRIC`, `AT`, `ATOMIC`, `AUTHORIZATION`, `AVG`, `BACKWARD`, `BEFORE`, `BEGIN`, `BETWEEN`, `BITVAR`, `BIT_LENGTH`, `BOTH`, `BREADTH`, `BY`, `C`, `CACHE`, `CALL`, `CALLED`, `CARDINALITY`, `CASCADE`, `CASCADED`, `CASE`, `CAST`, `CATALOG`, `CATALOG_NAME`, `CHAIN`, `CHARACTERISTICS`, `CHARACTER_LENGTH`, `CHARACTER_SET_CATALOG`, `CHARACTER_SET_NAME`, `CHARACTER_SET_SCHEMA`, `CHAR_LENGTH`, `CHECK`, `CHECKED`, `CHECKPOINT`, `CLASS`, `CLASS_ORIGIN`, `CLOB`, `CLOSE`, `CLUSTER`, `COALSECE`, `COBOL`, `COLLATE`, `COLLATION`, `COLLATION_CATALOG`, `COLLATION_NAME`, `COLLATION_SCHEMA`, `COLUMN`, `COLUMN_NAME`, `COMMAND_FUNCTION`, `COMMAND_FUNCTION_CODE`, `COMMENT`, `COMMIT`, `COMMITTED`, `COMPLETION`, `CONDITION_NUMBER`, `CONNECT`, `CONNECTION`, `CONNECTION_NAME`, `CONSTRAINT`, `CONSTRAINTS`, `CONSTRAINT_CATALOG`, `CONSTRAINT_NAME`, `CONSTRAINT_SCHEMA`, `CONSTRUCTOR`, `CONTAINS`, `CONTINUE`, `CONVERSION`, `CONVERT`, `COPY`, `CORRESPONTING`, `COUNT`, `CREATE`, `CREATEDB`, `CREATEUSER`, `CROSS`, `CUBE`, `CURRENT`, `CURRENT_DATE`, `CURRENT_PATH`, `CURRENT_ROLE`, `CURRENT_TIME`, `CURRENT_TIMESTAMP`, `CURRENT_USER`, `CURSOR`, `CURSOR_NAME`, `CYCLE`, `DATA`, `DATABASE`, `DATETIME_INTERVAL_CODE`, `DATETIME_INTERVAL_PRECISION`, `DAY`, `DEALLOCATE`, `DECLARE`, `DEFAULT`, `DEFAULTS`, `DEFERRABLE`, `DEFERRED`, `DEFINED`, `DEFINER`, `DELETE`, `DELIMITER`, `DELIMITERS`, `DEREF`, `DESC`, `DESCRIBE`, `DESCRIPTOR`, `DESTROY`, `DESTRUCTOR`, `DETERMINISTIC`, `DIAGNOSTICS`, `DICTIONARY`, `DISCONNECT`, `DISPATCH`, `DISTINCT`, `DO`, `DOMAIN`, `DROP`, `DYNAMIC`, `DYNAMIC_FUNCTION`, `DYNAMIC_FUNCTION_CODE`, `EACH`, `ELSE`, `ELSIF`, `ENCODING`, `ENCRYPTED`, `END`, `END-EXEC`, `EQUALS`, `ESCAPE`, `EVERY`, `EXCEPTION`, `EXCEPT`, `EXCLUDING`, `EXCLUSIVE`, `EXEC`, `EXECUTE`, `EXISTING`, `EXISTS`, `EXPLAIN`, `EXTERNAL`, `EXTRACT`, `FALSE`, `FETCH`, `FINAL`, `FIRST`, `FOR`, `FORCE`, `FOREIGN`, `FORTRAN`, `FORWARD`, `FOUND`, `FREE`, `FREEZE`, `FROM`, `FULL`, `FUNCTION`, `G`, `GENERAL`, `GENERATED`, `GET`, `GLOBAL`, `GO`, `GOTO`, `GRANT`, `GRANTED`, `GROUP`, `GROUPING`, `HANDLER`, `HAVING`, `HIERARCHY`, `HOLD`, `HOST`, `IDENTITY`, `IF`, `IGNORE`, `ILIKE`, `IMMEDIATE`, `IMMUTABLE`, `IMPLEMENTATION`, `IMPLICIT`, `IN`, `INCLUDING`, `INCREMENT`, `INDEX`, `INDITCATOR`, `INFIX`, `INHERITS`, `INITIALIZE`, `INITIALLY`, `INNER`, `INOUT`, `INPUT`, `INSENSITIVE`, `INSERT`, `INSTANTIABLE`, `INSTEAD`, `INTERSECT`, `INTO`, `INVOKER`, `IS`, `ISNULL`, `ISOLATION`, `ITERATE`, `JOIN`, `KEY`, `KEY_MEMBER`, `KEY_TYPE`, `LANCOMPILER`, `LANGUAGE`, `LARGE`, `LAST`, `LATERAL`, `LEADING`, `LEFT`, `LENGTH`, `LESS`, `LEVEL`, `LIKE`, `LIMIT`, `LISTEN`, `LOAD`, `LOCAL`, `LOCALTIME`, `LOCALTIMESTAMP`, `LOCATION`, `LOCATOR`, `LOCK`, `LOWER`, `MAP`, `MATCH`, `MAX`, `MAXVALUE`, `MESSAGE_LENGTH`, `MESSAGE_OCTET_LENGTH`, `MESSAGE_TEXT`, `METHOD`, `MIN`, `MINUTE`, `MINVALUE`, `MOD`, `MODE`, `MODIFIES`, `MODIFY`, `MONTH`, `MORE`, `MOVE`, `MUMPS`, `NAMES`, `NATIONAL`, `NATURAL`, `NCHAR`, `NCLOB`, `NEW`, `NEXT`, `NO`, `NOCREATEDB`, `NOCREATEUSER`, `NONE`, `NOT`, `NOTHING`, `NOTIFY`, `NOTNULL`, `NULL`, `NULLABLE`, `NULLIF`, `OBJECT`, `OCTET_LENGTH`, `OF`, `OFF`, `OFFSET`, `OIDS`, `OLD`, `ON`, `ONLY`, `OPEN`, `OPERATION`, `OPERATOR`, `OPTION`, `OPTIONS`, `OR`, `ORDER`, `ORDINALITY`, `OUT`, `OUTER`, `OUTPUT`, `OVERLAPS`, `OVERLAY`, `OVERRIDING`, `OWNER`, `PAD`, `PARAMETER`, `PARAMETERS`, `PARAMETER_MODE`, `PARAMATER_NAME`, `PARAMATER_ORDINAL_POSITION`, `PARAMETER_SPECIFIC_CATALOG`, `PARAMETER_SPECIFIC_NAME`, `PARAMATER_SPECIFIC_SCHEMA`, `PARTIAL`, `PASCAL`, `PENDANT`, `PLACING`, `PLI`, `POSITION`, `POSTFIX`, `PRECISION`, `PREFIX`, `PREORDER`, `PREPARE`, `PRESERVE`, `PRIMARY`, `PRIOR`, `PRIVILEGES`, `PROCEDURAL`, `PROCEDURE`, `PUBLIC`, `READ`, `READS`, `RECHECK`, `RECURSIVE`, `REF`, `REFERENCES`, `REFERENCING`, `REINDEX`, `RELATIVE`, `RENAME`, `REPEATABLE`, `REPLACE`, `RESET`, `RESTART`, `RESTRICT`, `RESULT`, `RETURN`, `RETURNED_LENGTH`, `RETURNED_OCTET_LENGTH`, `RETURNED_SQLSTATE`, `RETURNS`, `REVOKE`, `RIGHT`, `ROLE`, `ROLLBACK`, `ROLLUP`, `ROUTINE`, `ROUTINE_CATALOG`, `ROUTINE_NAME`, `ROUTINE_SCHEMA`, `ROW`, `ROWS`, `ROW_COUNT`, `RULE`, `SAVE_POINT`, `SCALE`, `SCHEMA`, `SCHEMA_NAME`, `SCOPE`, `SCROLL`, `SEARCH`, `SECOND`, `SECURITY`, `SELECT`, `SELF`, `SENSITIVE`, `SERIALIZABLE`, `SERVER_NAME`, `SESSION`, `SESSION_USER`, `SET`, `SETOF`, `SETS`, `SHARE`, `SHOW`, `SIMILAR`, `SIMPLE`, `SIZE`, `SOME`, `SOURCE`, `SPACE`, `SPECIFIC`, `SPECIFICTYPE`, `SPECIFIC_NAME`, `SQL`, `SQLCODE`, `SQLERROR`, `SQLEXCEPTION`, `SQLSTATE`, `SQLWARNINIG`, `STABLE`, `START`, `STATE`, `STATEMENT`, `STATIC`, `STATISTICS`, `STDIN`, `STDOUT`, `STORAGE`, `STRICT`, `STRUCTURE`, `STYPE`, `SUBCLASS_ORIGIN`, `SUBLIST`, `SUBSTRING`, `SUM`, `SYMMETRIC`, `SYSID`, `SYSTEM`, `SYSTEM_USER`, `TABLE`, `TABLE_NAME`, ` TEMP`, `TEMPLATE`, `TEMPORARY`, `TERMINATE`, `THAN`, `THEN`, `TIMESTAMP`, `TIMEZONE_HOUR`, `TIMEZONE_MINUTE`, `TO`, `TOAST`, `TRAILING`, `TRANSACTION`, `TRANSACTIONS_COMMITTED`, `TRANSACTIONS_ROLLED_BACK`, `TRANSACTION_ACTIVE`, `TRANSFORM`, `TRANSFORMS`, `TRANSLATE`, `TRANSLATION`, `TREAT`, `TRIGGER`, `TRIGGER_CATALOG`, `TRIGGER_NAME`, `TRIGGER_SCHEMA`, `TRIM`, `TRUE`, `TRUNCATE`, `TRUSTED`, `TYPE`, `UNCOMMITTED`, `UNDER`, `UNENCRYPTED`, `UNION`, `UNIQUE`, `UNKNOWN`, `UNLISTEN`, `UNNAMED`, `UNNEST`, `UNTIL`, `UPDATE`, `UPPER`, `USAGE`, `USER`, `USER_DEFINED_TYPE_CATALOG`, `USER_DEFINED_TYPE_NAME`, `USER_DEFINED_TYPE_SCHEMA`, `USING`, `VACUUM`, `VALID`, `VALIDATOR`, `VALUES`, `VARIABLE`, `VERBOSE`, `VERSION`, `VIEW`, `VOLATILE`, `WHEN`, `WHENEVER`, `WHERE`, `WITH`, `WITHOUT`, `WORK`, `WRITE`, `YEAR`, `ZONE`), Keyword, nil}, + {Words(``, `\b`, `ARRAY`, `BIGINT`, `BINARY`, `BIT`, `BLOB`, `BOOLEAN`, `CHAR`, `CHARACTER`, `DATE`, `DEC`, `DECIMAL`, `FLOAT`, `INT`, `INTEGER`, `INTERVAL`, `NUMBER`, `NUMERIC`, `REAL`, `SERIAL`, `SMALLINT`, `VARCHAR`, `VARYING`, `INT8`, `SERIAL8`, `TEXT`), NameBuiltin, nil}, + {"[+*/<>=~!@#%^&|`?-]", Operator, nil}, + {`[0-9]+`, LiteralNumberInteger, nil}, + {`[a-z_][\w$]*`, Name, nil}, + {`[;:()\[\],.]`, Punctuation, nil}, + }, + "multiline-comments": { + {`/\*`, CommentMultiline, Push("multiline-comments")}, + {`\*/`, CommentMultiline, Pop(1)}, + {`[^/*]+`, CommentMultiline, nil}, + {`[/*]`, CommentMultiline, nil}, + }, + "string": { + {`[^']+`, LiteralStringSingle, nil}, + {`''`, LiteralStringSingle, nil}, + {`'`, LiteralStringSingle, Pop(1)}, + }, + "double-string": { + {`[^"]+`, LiteralStringDouble, nil}, + {`""`, LiteralStringDouble, nil}, + {`"`, LiteralStringDouble, Pop(1)}, + }, + } +} diff --git a/vendor/github.com/alecthomas/chroma/lexers/s/squid.go b/vendor/github.com/alecthomas/chroma/lexers/s/squid.go new file mode 100644 index 000000000..b4da3da10 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/s/squid.go @@ -0,0 +1,42 @@ +package s + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// Squidconf lexer. +var Squidconf = internal.Register(MustNewLazyLexer( + &Config{ + Name: "SquidConf", + Aliases: []string{"squidconf", "squid.conf", "squid"}, + Filenames: []string{"squid.conf"}, + MimeTypes: []string{"text/x-squidconf"}, + NotMultiline: true, + CaseInsensitive: true, + }, + squidconfRules, +)) + +func squidconfRules() Rules { + return Rules{ + "root": { + {`\s+`, TextWhitespace, nil}, + {`#`, Comment, Push("comment")}, + {Words(`\b`, `\b`, `access_log`, `acl`, `always_direct`, `announce_host`, `announce_period`, `announce_port`, `announce_to`, `anonymize_headers`, `append_domain`, `as_whois_server`, `auth_param_basic`, `authenticate_children`, `authenticate_program`, `authenticate_ttl`, `broken_posts`, `buffered_logs`, `cache_access_log`, `cache_announce`, `cache_dir`, `cache_dns_program`, `cache_effective_group`, `cache_effective_user`, `cache_host`, `cache_host_acl`, `cache_host_domain`, `cache_log`, `cache_mem`, `cache_mem_high`, `cache_mem_low`, `cache_mgr`, `cachemgr_passwd`, `cache_peer`, `cache_peer_access`, `cahce_replacement_policy`, `cache_stoplist`, `cache_stoplist_pattern`, `cache_store_log`, `cache_swap`, `cache_swap_high`, `cache_swap_log`, `cache_swap_low`, `client_db`, `client_lifetime`, `client_netmask`, `connect_timeout`, `coredump_dir`, `dead_peer_timeout`, `debug_options`, `delay_access`, `delay_class`, `delay_initial_bucket_level`, `delay_parameters`, `delay_pools`, `deny_info`, `dns_children`, `dns_defnames`, `dns_nameservers`, `dns_testnames`, `emulate_httpd_log`, `err_html_text`, `fake_user_agent`, `firewall_ip`, `forwarded_for`, `forward_snmpd_port`, `fqdncache_size`, `ftpget_options`, `ftpget_program`, `ftp_list_width`, `ftp_passive`, `ftp_user`, `half_closed_clients`, `header_access`, `header_replace`, `hierarchy_stoplist`, `high_response_time_warning`, `high_page_fault_warning`, `hosts_file`, `htcp_port`, `http_access`, `http_anonymizer`, `httpd_accel`, `httpd_accel_host`, `httpd_accel_port`, `httpd_accel_uses_host_header`, `httpd_accel_with_proxy`, `http_port`, `http_reply_access`, `icp_access`, `icp_hit_stale`, `icp_port`, `icp_query_timeout`, `ident_lookup`, `ident_lookup_access`, `ident_timeout`, `incoming_http_average`, `incoming_icp_average`, `inside_firewall`, `ipcache_high`, `ipcache_low`, `ipcache_size`, `local_domain`, `local_ip`, `logfile_rotate`, `log_fqdn`, `log_icp_queries`, `log_mime_hdrs`, `maximum_object_size`, `maximum_single_addr_tries`, `mcast_groups`, `mcast_icp_query_timeout`, `mcast_miss_addr`, `mcast_miss_encode_key`, `mcast_miss_port`, `memory_pools`, `memory_pools_limit`, `memory_replacement_policy`, `mime_table`, `min_http_poll_cnt`, `min_icp_poll_cnt`, `minimum_direct_hops`, `minimum_object_size`, `minimum_retry_timeout`, `miss_access`, `negative_dns_ttl`, `negative_ttl`, `neighbor_timeout`, `neighbor_type_domain`, `netdb_high`, `netdb_low`, `netdb_ping_period`, `netdb_ping_rate`, `never_direct`, `no_cache`, `passthrough_proxy`, `pconn_timeout`, `pid_filename`, `pinger_program`, `positive_dns_ttl`, `prefer_direct`, `proxy_auth`, `proxy_auth_realm`, `query_icmp`, `quick_abort`, `quick_abort_max`, `quick_abort_min`, `quick_abort_pct`, `range_offset_limit`, `read_timeout`, `redirect_children`, `redirect_program`, `redirect_rewrites_host_header`, `reference_age`, `refresh_pattern`, `reload_into_ims`, `request_body_max_size`, `request_size`, `request_timeout`, `shutdown_lifetime`, `single_parent_bypass`, `siteselect_timeout`, `snmp_access`, `snmp_incoming_address`, `snmp_port`, `source_ping`, `ssl_proxy`, `store_avg_object_size`, `store_objects_per_bucket`, `strip_query_terms`, `swap_level1_dirs`, `swap_level2_dirs`, `tcp_incoming_address`, `tcp_outgoing_address`, `tcp_recv_bufsize`, `test_reachability`, `udp_hit_obj`, `udp_hit_obj_size`, `udp_incoming_address`, `udp_outgoing_address`, `unique_hostname`, `unlinkd_program`, `uri_whitespace`, `useragent_log`, `visible_hostname`, `wais_relay`, `wais_relay_host`, `wais_relay_port`), Keyword, nil}, + {Words(`\b`, `\b`, `proxy-only`, `weight`, `ttl`, `no-query`, `default`, `round-robin`, `multicast-responder`, `on`, `off`, `all`, `deny`, `allow`, `via`, `parent`, `no-digest`, `heap`, `lru`, `realm`, `children`, `q1`, `q2`, `credentialsttl`, `none`, `disable`, `offline_toggle`, `diskd`), NameConstant, nil}, + {Words(`\b`, `\b`, `shutdown`, `info`, `parameter`, `server_list`, `client_list`, `squid.conf`), LiteralString, nil}, + {Words(`stats/`, `\b`, `objects`, `vm_objects`, `utilization`, `ipcache`, `fqdncache`, `dns`, `redirector`, `io`, `reply_headers`, `filedescriptors`, `netdb`), LiteralString, nil}, + {Words(`log/`, `=`, `status`, `enable`, `disable`, `clear`), LiteralString, nil}, + {Words(`\b`, `\b`, `url_regex`, `urlpath_regex`, `referer_regex`, `port`, `proto`, `req_mime_type`, `rep_mime_type`, `method`, `browser`, `user`, `src`, `dst`, `time`, `dstdomain`, `ident`, `snmp_community`), Keyword, nil}, + {`(?:(?:(?:[3-9]\d?|2(?:5[0-5]|[0-4]?\d)?|1\d{0,2}|0x0*[0-9a-f]{1,2}|0+[1-3]?[0-7]{0,2})(?:\.(?:[3-9]\d?|2(?:5[0-5]|[0-4]?\d)?|1\d{0,2}|0x0*[0-9a-f]{1,2}|0+[1-3]?[0-7]{0,2})){3})|(?!.*::.*::)(?:(?!:)|:(?=:))(?:[0-9a-f]{0,4}(?:(?<=::)|(?\=|\<|\>|\=\=|\!\=|\&\&|\|\||\=|\:\=|\?\=|\+\=|\-\=|\*\=|\/\=|\%\=|\?|\:)`, Operator, nil}, + {`\b(and|if unless|in|is|is a|is defined|is not|isnt|or|not)\b`, OperatorWord, nil}, + {`(\#[a-f0-9]{3,6})`, LiteralNumberHex, nil}, + {`\n`, Text, nil}, + }, + "inline-comment": { + {`\*/`, Comment, Pop(1)}, + }, + "atrule": { + {`\{`, Punctuation, Push("atcontent")}, + {`$`, Punctuation, Pop(1)}, + Include("root"), + }, + "atcontent": { + Include("root"), + {`\}`, Punctuation, Pop(2)}, + }, + "function-start": { + {`\)`, Punctuation, Pop(1)}, + Include("root"), + }, + "values": { + {`\s+`, Text, nil}, + {`(\#[a-f0-9]{3,6})`, LiteralNumberHex, nil}, + {Words(`\b`, `\b`, `absolute`, `alias`, `all`, `all-petite-caps`, `all-scroll`, `all-small-caps`, `allow-end`, `alpha`, `alternate`, `alternate-reverse`, `always`, `armenian`, `auto`, `avoid`, `avoid-column`, `avoid-page`, `backwards`, `balance`, `baseline`, `below`, `blink`, `block`, `bold`, `bolder`, `border-box`, `both`, `bottom`, `box-decoration`, `break-word`, `capitalize`, `cell`, `center`, `circle`, `clip`, `clone`, `close-quote`, `col-resize`, `collapse`, `color`, `color-burn`, `color-dodge`, `column`, `column-reverse`, `compact`, `condensed`, `contain`, `container`, `content-box`, `context-menu`, `copy`, `cover`, `crisp-edges`, `crosshair`, `currentColor`, `cursive`, `darken`, `dashed`, `decimal`, `decimal-leading-zero`, `default`, `descendants`, `difference`, `digits`, `disc`, `distribute`, `dot`, `dotted`, `double`, `double-circle`, `e-resize`, `each-line`, `ease`, `ease-in`, `ease-in-out`, `ease-out`, `edges`, `ellipsis`, `end`, `ew-resize`, `exclusion`, `expanded`, `extra-condensed`, `extra-expanded`, `fantasy`, `fill`, `fill-box`, `filled`, `first`, `fixed`, `flat`, `flex`, `flex-end`, `flex-start`, `flip`, `force-end`, `forwards`, `from-image`, `full-width`, `geometricPrecision`, `georgian`, `groove`, `hanging`, `hard-light`, `help`, `hidden`, `hide`, `horizontal`, `hue`, `icon`, `infinite`, `inherit`, `initial`, `ink`, `inline`, `inline-block`, `inline-flex`, `inline-table`, `inset`, `inside`, `inter-word`, `invert`, `isolate`, `italic`, `justify`, `large`, `larger`, `last`, `left`, `lighten`, `lighter`, `line-through`, `linear`, `list-item`, `local`, `loose`, `lower-alpha`, `lower-greek`, `lower-latin`, `lower-roman`, `lowercase`, `ltr`, `luminance`, `luminosity`, `mandatory`, `manipulation`, `manual`, `margin-box`, `match-parent`, `medium`, `mixed`, `monospace`, `move`, `multiply`, `n-resize`, `ne-resize`, `nesw-resize`, `no-close-quote`, `no-drop`, `no-open-quote`, `no-repeat`, `none`, `normal`, `not-allowed`, `nowrap`, `ns-resize`, `nw-resize`, `nwse-resize`, `objects`, `oblique`, `off`, `on`, `open`, `open-quote`, `optimizeLegibility`, `optimizeSpeed`, `outset`, `outside`, `over`, `overlay`, `overline`, `padding-box`, `page`, `pan-down`, `pan-left`, `pan-right`, `pan-up`, `pan-x`, `pan-y`, `paused`, `petite-caps`, `pixelated`, `pointer`, `preserve-3d`, `progress`, `proximity`, `relative`, `repeat`, `repeat no-repeat`, `repeat-x`, `repeat-y`, `reverse`, `ridge`, `right`, `round`, `row`, `row-resize`, `row-reverse`, `rtl`, `ruby`, `ruby-base`, `ruby-base-container`, `ruby-text`, `ruby-text-container`, `run-in`, `running`, `s-resize`, `sans-serif`, `saturation`, `scale-down`, `screen`, `scroll`, `se-resize`, `semi-condensed`, `semi-expanded`, `separate`, `serif`, `sesame`, `show`, `sideways`, `sideways-left`, `sideways-right`, `slice`, `small`, `small-caps`, `smaller`, `smooth`, `snap`, `soft-light`, `solid`, `space`, `space-around`, `space-between`, `spaces`, `square`, `start`, `static`, `step-end`, `step-start`, `sticky`, `stretch`, `strict`, `stroke-box`, `style`, `sw-resize`, `table`, `table-caption`, `table-cell`, `table-column`, `table-column-group`, `table-footer-group`, `table-header-group`, `table-row`, `table-row-group`, `text`, `thick`, `thin`, `titling-caps`, `to`, `top`, `triangle`, `ultra-condensed`, `ultra-expanded`, `under`, `underline`, `unicase`, `unset`, `upper-alpha`, `upper-latin`, `upper-roman`, `uppercase`, `upright`, `use-glyph-orientation`, `vertical`, `vertical-text`, `view-box`, `visible`, `w-resize`, `wait`, `wavy`, `weight`, `weight style`, `wrap`, `wrap-reverse`, `x-large`, `x-small`, `xx-large`, `xx-small`, `zoom-in`, `zoom-out`), KeywordConstant, nil}, + {`\;?`, Punctuation, Pop(1)}, + }, + } +} diff --git a/vendor/github.com/alecthomas/chroma/lexers/s/svelte.go b/vendor/github.com/alecthomas/chroma/lexers/s/svelte.go new file mode 100644 index 000000000..1f91a2130 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/s/svelte.go @@ -0,0 +1,73 @@ +package s + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/h" + "github.com/alecthomas/chroma/lexers/internal" + "github.com/alecthomas/chroma/lexers/t" +) + +// Svelte lexer. +var Svelte = internal.Register(DelegatingLexer(h.HTML, MustNewLazyLexer( + &Config{ + Name: "Svelte", + Aliases: []string{"svelte"}, + Filenames: []string{"*.svelte"}, + MimeTypes: []string{"application/x-svelte"}, + DotAll: true, + }, + svelteRules, +))) + +func svelteRules() Rules { + return Rules{ + "root": { + // Let HTML handle the comments, including comments containing script and style tags + {``, Other, Pop(1)}, + {`.+?`, Other, nil}, + }, + "templates": { + {`}`, Punctuation, Pop(1)}, + // Let TypeScript handle strings and the curly braces inside them + {`(?|[<&?](?=\\w)|(?<=\\w)[>!?]", Punctuation, nil}, + {`[/=\-+!*%<>&|^?~]+`, Operator, nil}, + {`[a-zA-Z_]\w*`, Name, nil}, + }, + "keywords": { + {Words(``, `\b`, `as`, `break`, `case`, `catch`, `continue`, `default`, `defer`, `do`, `else`, `fallthrough`, `for`, `guard`, `if`, `in`, `is`, `repeat`, `return`, `#selector`, `switch`, `throw`, `try`, `where`, `while`), Keyword, nil}, + {`@availability\([^)]+\)`, KeywordReserved, nil}, + {Words(``, `\b`, `associativity`, `convenience`, `dynamic`, `didSet`, `final`, `get`, `indirect`, `infix`, `inout`, `lazy`, `left`, `mutating`, `none`, `nonmutating`, `optional`, `override`, `postfix`, `precedence`, `prefix`, `Protocol`, `required`, `rethrows`, `right`, `set`, `throws`, `Type`, `unowned`, `weak`, `willSet`, `@availability`, `@autoclosure`, `@noreturn`, `@NSApplicationMain`, `@NSCopying`, `@NSManaged`, `@objc`, `@UIApplicationMain`, `@IBAction`, `@IBDesignable`, `@IBInspectable`, `@IBOutlet`), KeywordReserved, nil}, + {`(as|dynamicType|false|is|nil|self|Self|super|true|__COLUMN__|__FILE__|__FUNCTION__|__LINE__|_|#(?:file|line|column|function))\b`, KeywordConstant, nil}, + {`import\b`, KeywordDeclaration, Push("module")}, + {`(class|enum|extension|struct|protocol)(\s+)([a-zA-Z_]\w*)`, ByGroups(KeywordDeclaration, Text, NameClass), nil}, + {`(func)(\s+)([a-zA-Z_]\w*)`, ByGroups(KeywordDeclaration, Text, NameFunction), nil}, + {`(var|let)(\s+)([a-zA-Z_]\w*)`, ByGroups(KeywordDeclaration, Text, NameVariable), nil}, + {Words(``, `\b`, `class`, `deinit`, `enum`, `extension`, `func`, `import`, `init`, `internal`, `let`, `operator`, `private`, `protocol`, `public`, `static`, `struct`, `subscript`, `typealias`, `var`), KeywordDeclaration, nil}, + }, + "comment": { + {`:param: [a-zA-Z_]\w*|:returns?:|(FIXME|MARK|TODO):`, CommentSpecial, nil}, + }, + "comment-single": { + {`\n`, Text, Pop(1)}, + Include("comment"), + {`[^\n]`, CommentSingle, nil}, + }, + "comment-multi": { + Include("comment"), + {`[^*/]`, CommentMultiline, nil}, + {`/\*`, CommentMultiline, Push()}, + {`\*/`, CommentMultiline, Pop(1)}, + {`[*/]`, CommentMultiline, nil}, + }, + "module": { + {`\n`, Text, Pop(1)}, + {`[a-zA-Z_]\w*`, NameClass, nil}, + Include("root"), + }, + "preproc": { + {`\n`, Text, Pop(1)}, + Include("keywords"), + {`[A-Za-z]\w*`, CommentPreproc, nil}, + Include("root"), + }, + "string": { + {`\\\(`, LiteralStringInterpol, Push("string-intp")}, + {`"`, LiteralString, Pop(1)}, + {`\\['"\\nrt]|\\x[0-9a-fA-F]{2}|\\[0-7]{1,3}|\\u[0-9a-fA-F]{4}|\\U[0-9a-fA-F]{8}`, LiteralStringEscape, nil}, + {`[^\\"]+`, LiteralString, nil}, + {`\\`, LiteralString, nil}, + }, + "string-intp": { + {`\(`, LiteralStringInterpol, Push()}, + {`\)`, LiteralStringInterpol, Pop(1)}, + Include("root"), + }, + } +} diff --git a/vendor/github.com/alecthomas/chroma/lexers/s/systemd.go b/vendor/github.com/alecthomas/chroma/lexers/s/systemd.go new file mode 100644 index 000000000..0da97821e --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/s/systemd.go @@ -0,0 +1,33 @@ +package s + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +var SYSTEMD = internal.Register(MustNewLazyLexer( + &Config{ + Name: "SYSTEMD", + Aliases: []string{"systemd"}, + // Suspects: man systemd.index | grep -E 'systemd\..*configuration' + Filenames: []string{"*.automount", "*.device", "*.dnssd", "*.link", "*.mount", "*.netdev", "*.network", "*.path", "*.scope", "*.service", "*.slice", "*.socket", "*.swap", "*.target", "*.timer"}, + MimeTypes: []string{"text/plain"}, + }, + systemdRules, +)) + +func systemdRules() Rules { + return Rules{ + "root": { + {`\s+`, Text, nil}, + {`[;#].*`, Comment, nil}, + {`\[.*?\]$`, Keyword, nil}, + {`(.*?)(=)(.*)(\\\n)`, ByGroups(NameAttribute, Operator, LiteralString, Text), Push("continuation")}, + {`(.*?)(=)(.*)`, ByGroups(NameAttribute, Operator, LiteralString), nil}, + }, + "continuation": { + {`(.*?)(\\\n)`, ByGroups(LiteralString, Text), nil}, + {`(.*)`, LiteralString, Pop(1)}, + }, + } +} diff --git a/vendor/github.com/alecthomas/chroma/lexers/s/systemverilog.go b/vendor/github.com/alecthomas/chroma/lexers/s/systemverilog.go new file mode 100644 index 000000000..a9a77f542 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/s/systemverilog.go @@ -0,0 +1,77 @@ +package s + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// Systemverilog lexer. +var Systemverilog = internal.Register(MustNewLazyLexer( + &Config{ + Name: "systemverilog", + Aliases: []string{"systemverilog", "sv"}, + Filenames: []string{"*.sv", "*.svh"}, + MimeTypes: []string{"text/x-systemverilog"}, + EnsureNL: true, + }, + systemvarilogRules, +)) + +func systemvarilogRules() Rules { + return Rules{ + "root": { + {"^\\s*`define", CommentPreproc, Push("macro")}, + {`^(\s*)(package)(\s+)`, ByGroups(Text, KeywordNamespace, Text), nil}, + {`^(\s*)(import)(\s+)("DPI(?:-C)?")(\s+)`, ByGroups(Text, KeywordNamespace, Text, LiteralString, Text), nil}, + {`^(\s*)(import)(\s+)`, ByGroups(Text, KeywordNamespace, Text), Push("import")}, + {`\n`, Text, nil}, + {`\s+`, Text, nil}, + {`\\\n`, Text, nil}, + {`/(\\\n)?/(\n|(.|\n)*?[^\\]\n)`, CommentSingle, nil}, + {`/(\\\n)?[*](.|\n)*?[*](\\\n)?/`, CommentMultiline, nil}, + {`[{}#@]`, Punctuation, nil}, + {`L?"`, LiteralString, Push("string")}, + {`L?'(\\.|\\[0-7]{1,3}|\\x[a-fA-F0-9]{1,2}|[^\\\'\n])'`, LiteralStringChar, nil}, + {`(\d+\.\d*|\.\d+|\d+)[eE][+-]?\d+[lL]?`, LiteralNumberFloat, nil}, + {`(\d+\.\d*|\.\d+|\d+[fF])[fF]?`, LiteralNumberFloat, nil}, + {`([0-9]+)|(\'h)[0-9a-fA-F]+`, LiteralNumberHex, nil}, + {`([0-9]+)|(\'b)[01]+`, LiteralNumberBin, nil}, + {`([0-9]+)|(\'d)[0-9]+`, LiteralNumberInteger, nil}, + {`([0-9]+)|(\'o)[0-7]+`, LiteralNumberOct, nil}, + {`\'[01xz]`, LiteralNumber, nil}, + {`\d+[Ll]?`, LiteralNumberInteger, nil}, + {`\*/`, Error, nil}, + {`[~!%^&*+=|?:<>/-]`, Operator, nil}, + {`[()\[\],.;\']`, Punctuation, nil}, + {"`[a-zA-Z_]\\w*", NameConstant, nil}, + {Words(``, `\b`, `accept_on`, `alias`, `always`, `always_comb`, `always_ff`, `always_latch`, `and`, `assert`, `assign`, `assume`, `automatic`, `before`, `begin`, `bind`, `bins`, `binsof`, `bit`, `break`, `buf`, `bufif0`, `bufif1`, `byte`, `case`, `casex`, `casez`, `cell`, `chandle`, `checker`, `class`, `clocking`, `cmos`, `config`, `const`, `constraint`, `context`, `continue`, `cover`, `covergroup`, `coverpoint`, `cross`, `deassign`, `default`, `defparam`, `design`, `disable`, `dist`, `do`, `edge`, `else`, `end`, `endcase`, `endchecker`, `endclass`, `endclocking`, `endconfig`, `endfunction`, `endgenerate`, `endgroup`, `endinterface`, `endmodule`, `endpackage`, `endprimitive`, `endprogram`, `endproperty`, `endsequence`, `endspecify`, `endtable`, `endtask`, `enum`, `event`, `eventually`, `expect`, `export`, `extends`, `extern`, `final`, `first_match`, `for`, `force`, `foreach`, `forever`, `fork`, `forkjoin`, `function`, `generate`, `genvar`, `global`, `highz0`, `highz1`, `if`, `iff`, `ifnone`, `ignore_bins`, `illegal_bins`, `implies`, `import`, `incdir`, `include`, `initial`, `inout`, `input`, `inside`, `instance`, `int`, `integer`, `interface`, `intersect`, `join`, `join_any`, `join_none`, `large`, `let`, `liblist`, `library`, `local`, `localparam`, `logic`, `longint`, `macromodule`, `matches`, `medium`, `modport`, `module`, `nand`, `negedge`, `new`, `nexttime`, `nmos`, `nor`, `noshowcancelled`, `not`, `notif0`, `notif1`, `null`, `or`, `output`, `package`, `packed`, `parameter`, `pmos`, `posedge`, `primitive`, `priority`, `program`, `property`, `protected`, `pull0`, `pull1`, `pulldown`, `pullup`, `pulsestyle_ondetect`, `pulsestyle_onevent`, `pure`, `rand`, `randc`, `randcase`, `randsequence`, `rcmos`, `real`, `realtime`, `ref`, `reg`, `reject_on`, `release`, `repeat`, `restrict`, `return`, `rnmos`, `rpmos`, `rtran`, `rtranif0`, `rtranif1`, `s_always`, `s_eventually`, `s_nexttime`, `s_until`, `s_until_with`, `scalared`, `sequence`, `shortint`, `shortreal`, `showcancelled`, `signed`, `small`, `solve`, `specify`, `specparam`, `static`, `string`, `strong`, `strong0`, `strong1`, `struct`, `super`, `supply0`, `supply1`, `sync_accept_on`, `sync_reject_on`, `table`, `tagged`, `task`, `this`, `throughout`, `time`, `timeprecision`, `timeunit`, `tran`, `tranif0`, `tranif1`, `tri`, `tri0`, `tri1`, `triand`, `trior`, `trireg`, `type`, `typedef`, `union`, `unique`, `unique0`, `unsigned`, `until`, `until_with`, `untyped`, `use`, `uwire`, `var`, `vectored`, `virtual`, `void`, `wait`, `wait_order`, `wand`, `weak`, `weak0`, `weak1`, `while`, `wildcard`, `wire`, `with`, `within`, `wor`, `xnor`, `xor`), Keyword, nil}, + {Words(``, `\b`, "`__FILE__", "`__LINE__", "`begin_keywords", "`celldefine", "`default_nettype", "`define", "`else", "`elsif", "`end_keywords", "`endcelldefine", "`endif", "`ifdef", "`ifndef", "`include", "`line", "`nounconnected_drive", "`pragma", "`resetall", "`timescale", "`unconnected_drive", "`undef", "`undefineall"), CommentPreproc, nil}, + {Words(``, `\b`, `$display`, `$displayb`, `$displayh`, `$displayo`, `$dumpall`, `$dumpfile`, `$dumpflush`, `$dumplimit`, `$dumpoff`, `$dumpon`, `$dumpports`, `$dumpportsall`, `$dumpportsflush`, `$dumpportslimit`, `$dumpportsoff`, `$dumpportson`, `$dumpvars`, `$fclose`, `$fdisplay`, `$fdisplayb`, `$fdisplayh`, `$fdisplayo`, `$feof`, `$ferror`, `$fflush`, `$fgetc`, `$fgets`, `$finish`, `$fmonitor`, `$fmonitorb`, `$fmonitorh`, `$fmonitoro`, `$fopen`, `$fread`, `$fscanf`, `$fseek`, `$fstrobe`, `$fstrobeb`, `$fstrobeh`, `$fstrobeo`, `$ftell`, `$fwrite`, `$fwriteb`, `$fwriteh`, `$fwriteo`, `$monitor`, `$monitorb`, `$monitorh`, `$monitoro`, `$monitoroff`, `$monitoron`, `$plusargs`, `$random`, `$readmemb`, `$readmemh`, `$rewind`, `$sformat`, `$sformatf`, `$sscanf`, `$strobe`, `$strobeb`, `$strobeh`, `$strobeo`, `$swrite`, `$swriteb`, `$swriteh`, `$swriteo`, `$test`, `$ungetc`, `$value$plusargs`, `$write`, `$writeb`, `$writeh`, `$writememb`, `$writememh`, `$writeo`), NameBuiltin, nil}, + {`(class)(\s+)`, ByGroups(Keyword, Text), Push("classname")}, + {Words(``, `\b`, `byte`, `shortint`, `int`, `longint`, `integer`, `time`, `bit`, `logic`, `reg`, `supply0`, `supply1`, `tri`, `triand`, `trior`, `tri0`, `tri1`, `trireg`, `uwire`, `wire`, `wand`, `woshortreal`, `real`, `realtime`), KeywordType, nil}, + {`[a-zA-Z_]\w*:(?!:)`, NameLabel, nil}, + {`\$?[a-zA-Z_]\w*`, Name, nil}, + }, + "classname": { + {`[a-zA-Z_]\w*`, NameClass, Pop(1)}, + }, + "string": { + {`"`, LiteralString, Pop(1)}, + {`\\([\\abfnrtv"\']|x[a-fA-F0-9]{2,4}|[0-7]{1,3})`, LiteralStringEscape, nil}, + {`[^\\"\n]+`, LiteralString, nil}, + {`\\\n`, LiteralString, nil}, + {`\\`, LiteralString, nil}, + }, + "macro": { + {`[^/\n]+`, CommentPreproc, nil}, + {`/[*](.|\n)*?[*]/`, CommentMultiline, nil}, + {`//.*?\n`, CommentSingle, Pop(1)}, + {`/`, CommentPreproc, nil}, + {`(?<=\\)\n`, CommentPreproc, nil}, + {`\n`, CommentPreproc, Pop(1)}, + }, + "import": { + {`[\w:]+\*?`, NameNamespace, Pop(1)}, + }, + } +} diff --git a/vendor/github.com/alecthomas/chroma/lexers/t/tablegen.go b/vendor/github.com/alecthomas/chroma/lexers/t/tablegen.go new file mode 100644 index 000000000..ca05ed32d --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/t/tablegen.go @@ -0,0 +1,46 @@ +package t + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// TableGen lexer. +var Tablegen = internal.Register(MustNewLazyLexer( + &Config{ + Name: "TableGen", + Aliases: []string{"tablegen"}, + Filenames: []string{"*.td"}, + MimeTypes: []string{"text/x-tablegen"}, + }, + tablegenRules, +)) + +func tablegenRules() Rules { + return Rules{ + "root": { + Include("macro"), + Include("whitespace"), + {`c?"[^"]*?"`, LiteralString, nil}, + Include("keyword"), + {`\$[_a-zA-Z][_\w]*`, NameVariable, nil}, + {`\d*[_a-zA-Z][_\w]*`, NameVariable, nil}, + {`\[\{[\w\W]*?\}\]`, LiteralString, nil}, + {`[+-]?\d+|0x[\da-fA-F]+|0b[01]+`, LiteralNumber, nil}, + {`[=<>{}\[\]()*.,!:;]`, Punctuation, nil}, + }, + "macro": { + {`(#include\s+)("[^"]*")`, ByGroups(CommentPreproc, LiteralString), nil}, + {`^\s*#(ifdef|ifndef)\s+[_\w][_\w\d]*`, CommentPreproc, nil}, + {`^\s*#define\s+[_\w][_\w\d]*`, CommentPreproc, nil}, + {`^\s*#endif`, CommentPreproc, nil}, + }, + "whitespace": { + {`(\n|\s)+`, Text, nil}, + {`//.*?\n`, Comment, nil}, + }, + "keyword": { + {Words(``, `\b`, `bit`, `bits`, `class`, `code`, `dag`, `def`, `defm`, `field`, `foreach`, `in`, `int`, `let`, `list`, `multiclass`, `string`), Keyword, nil}, + }, + } +} diff --git a/vendor/github.com/alecthomas/chroma/lexers/t/tasm.go b/vendor/github.com/alecthomas/chroma/lexers/t/tasm.go new file mode 100644 index 000000000..0164a7368 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/t/tasm.go @@ -0,0 +1,65 @@ +package t + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// Tasm lexer. +var Tasm = internal.Register(MustNewLazyLexer( + &Config{ + Name: "TASM", + Aliases: []string{"tasm"}, + Filenames: []string{"*.asm", "*.ASM", "*.tasm"}, + MimeTypes: []string{"text/x-tasm"}, + CaseInsensitive: true, + }, + tasmRules, +)) + +func tasmRules() Rules { + return Rules{ + "root": { + {`^\s*%`, CommentPreproc, Push("preproc")}, + Include("whitespace"), + {`[@a-z$._?][\w$.?#@~]*:`, NameLabel, nil}, + {`BITS|USE16|USE32|SECTION|SEGMENT|ABSOLUTE|EXTERN|GLOBAL|ORG|ALIGN|STRUC|ENDSTRUC|ENDS|COMMON|CPU|GROUP|UPPERCASE|INCLUDE|EXPORT|LIBRARY|MODULE|PROC|ENDP|USES|ARG|DATASEG|UDATASEG|END|IDEAL|P386|MODEL|ASSUME|CODESEG|SIZE`, Keyword, Push("instruction-args")}, + {`([@a-z$._?][\w$.?#@~]*)(\s+)(db|dd|dw|T[A-Z][a-z]+)`, ByGroups(NameConstant, KeywordDeclaration, KeywordDeclaration), Push("instruction-args")}, + {`(?:res|d)[bwdqt]|times`, KeywordDeclaration, Push("instruction-args")}, + {`[@a-z$._?][\w$.?#@~]*`, NameFunction, Push("instruction-args")}, + {`[\r\n]+`, Text, nil}, + }, + "instruction-args": { + {"\"(\\\\\"|[^\"\\n])*\"|'(\\\\'|[^'\\n])*'|`(\\\\`|[^`\\n])*`", LiteralString, nil}, + {`(?:0x[0-9a-f]+|$0[0-9a-f]*|[0-9]+[0-9a-f]*h)`, LiteralNumberHex, nil}, + {`[0-7]+q`, LiteralNumberOct, nil}, + {`[01]+b`, LiteralNumberBin, nil}, + {`[0-9]+\.e?[0-9]+`, LiteralNumberFloat, nil}, + {`[0-9]+`, LiteralNumberInteger, nil}, + Include("punctuation"), + {`r[0-9][0-5]?[bwd]|[a-d][lh]|[er]?[a-d]x|[er]?[sb]p|[er]?[sd]i|[c-gs]s|st[0-7]|mm[0-7]|cr[0-4]|dr[0-367]|tr[3-7]`, NameBuiltin, nil}, + {`[@a-z$._?][\w$.?#@~]*`, NameVariable, nil}, + {`(\\\s*)(;.*)([\r\n])`, ByGroups(Text, CommentSingle, Text), nil}, + {`[\r\n]+`, Text, Pop(1)}, + Include("whitespace"), + }, + "preproc": { + {`[^;\n]+`, CommentPreproc, nil}, + {`;.*?\n`, CommentSingle, Pop(1)}, + {`\n`, CommentPreproc, Pop(1)}, + }, + "whitespace": { + {`[\n\r]`, Text, nil}, + {`\\[\n\r]`, Text, nil}, + {`[ \t]+`, Text, nil}, + {`;.*`, CommentSingle, nil}, + }, + "punctuation": { + {`[,():\[\]]+`, Punctuation, nil}, + {`[&|^<>+*=/%~-]+`, Operator, nil}, + {`[$]+`, KeywordConstant, nil}, + {`seg|wrt|strict`, OperatorWord, nil}, + {`byte|[dq]?word`, KeywordType, nil}, + }, + } +} diff --git a/vendor/github.com/alecthomas/chroma/lexers/t/tcl.go b/vendor/github.com/alecthomas/chroma/lexers/t/tcl.go new file mode 100644 index 000000000..5f7030ab3 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/t/tcl.go @@ -0,0 +1,120 @@ +package t + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// Tcl lexer. +var Tcl = internal.Register(MustNewLazyLexer( + &Config{ + Name: "Tcl", + Aliases: []string{"tcl"}, + Filenames: []string{"*.tcl", "*.rvt"}, + MimeTypes: []string{"text/x-tcl", "text/x-script.tcl", "application/x-tcl"}, + }, + tclRules, +)) + +func tclRules() Rules { + return Rules{ + "root": { + Include("command"), + Include("basic"), + Include("data"), + {`\}`, Keyword, nil}, + }, + "command": { + {Words(`\b`, `\b`, `after`, `apply`, `array`, `break`, `catch`, `continue`, `elseif`, `else`, `error`, `eval`, `expr`, `for`, `foreach`, `global`, `if`, `namespace`, `proc`, `rename`, `return`, `set`, `switch`, `then`, `trace`, `unset`, `update`, `uplevel`, `upvar`, `variable`, `vwait`, `while`), Keyword, Push("params")}, + {Words(`\b`, `\b`, `append`, `bgerror`, `binary`, `cd`, `chan`, `clock`, `close`, `concat`, `dde`, `dict`, `encoding`, `eof`, `exec`, `exit`, `fblocked`, `fconfigure`, `fcopy`, `file`, `fileevent`, `flush`, `format`, `gets`, `glob`, `history`, `http`, `incr`, `info`, `interp`, `join`, `lappend`, `lassign`, `lindex`, `linsert`, `list`, `llength`, `load`, `loadTk`, `lrange`, `lrepeat`, `lreplace`, `lreverse`, `lsearch`, `lset`, `lsort`, `mathfunc`, `mathop`, `memory`, `msgcat`, `open`, `package`, `pid`, `pkg::create`, `pkg_mkIndex`, `platform`, `platform::shell`, `puts`, `pwd`, `re_syntax`, `read`, `refchan`, `regexp`, `registry`, `regsub`, `scan`, `seek`, `socket`, `source`, `split`, `string`, `subst`, `tell`, `time`, `tm`, `unknown`, `unload`), NameBuiltin, Push("params")}, + {`([\w.-]+)`, NameVariable, Push("params")}, + {`#`, Comment, Push("comment")}, + }, + "command-in-brace": { + {Words(`\b`, `\b`, `after`, `apply`, `array`, `break`, `catch`, `continue`, `elseif`, `else`, `error`, `eval`, `expr`, `for`, `foreach`, `global`, `if`, `namespace`, `proc`, `rename`, `return`, `set`, `switch`, `then`, `trace`, `unset`, `update`, `uplevel`, `upvar`, `variable`, `vwait`, `while`), Keyword, Push("params-in-brace")}, + {Words(`\b`, `\b`, `append`, `bgerror`, `binary`, `cd`, `chan`, `clock`, `close`, `concat`, `dde`, `dict`, `encoding`, `eof`, `exec`, `exit`, `fblocked`, `fconfigure`, `fcopy`, `file`, `fileevent`, `flush`, `format`, `gets`, `glob`, `history`, `http`, `incr`, `info`, `interp`, `join`, `lappend`, `lassign`, `lindex`, `linsert`, `list`, `llength`, `load`, `loadTk`, `lrange`, `lrepeat`, `lreplace`, `lreverse`, `lsearch`, `lset`, `lsort`, `mathfunc`, `mathop`, `memory`, `msgcat`, `open`, `package`, `pid`, `pkg::create`, `pkg_mkIndex`, `platform`, `platform::shell`, `puts`, `pwd`, `re_syntax`, `read`, `refchan`, `regexp`, `registry`, `regsub`, `scan`, `seek`, `socket`, `source`, `split`, `string`, `subst`, `tell`, `time`, `tm`, `unknown`, `unload`), NameBuiltin, Push("params-in-brace")}, + {`([\w.-]+)`, NameVariable, Push("params-in-brace")}, + {`#`, Comment, Push("comment")}, + }, + "command-in-bracket": { + {Words(`\b`, `\b`, `after`, `apply`, `array`, `break`, `catch`, `continue`, `elseif`, `else`, `error`, `eval`, `expr`, `for`, `foreach`, `global`, `if`, `namespace`, `proc`, `rename`, `return`, `set`, `switch`, `then`, `trace`, `unset`, `update`, `uplevel`, `upvar`, `variable`, `vwait`, `while`), Keyword, Push("params-in-bracket")}, + {Words(`\b`, `\b`, `append`, `bgerror`, `binary`, `cd`, `chan`, `clock`, `close`, `concat`, `dde`, `dict`, `encoding`, `eof`, `exec`, `exit`, `fblocked`, `fconfigure`, `fcopy`, `file`, `fileevent`, `flush`, `format`, `gets`, `glob`, `history`, `http`, `incr`, `info`, `interp`, `join`, `lappend`, `lassign`, `lindex`, `linsert`, `list`, `llength`, `load`, `loadTk`, `lrange`, `lrepeat`, `lreplace`, `lreverse`, `lsearch`, `lset`, `lsort`, `mathfunc`, `mathop`, `memory`, `msgcat`, `open`, `package`, `pid`, `pkg::create`, `pkg_mkIndex`, `platform`, `platform::shell`, `puts`, `pwd`, `re_syntax`, `read`, `refchan`, `regexp`, `registry`, `regsub`, `scan`, `seek`, `socket`, `source`, `split`, `string`, `subst`, `tell`, `time`, `tm`, `unknown`, `unload`), NameBuiltin, Push("params-in-bracket")}, + {`([\w.-]+)`, NameVariable, Push("params-in-bracket")}, + {`#`, Comment, Push("comment")}, + }, + "command-in-paren": { + {Words(`\b`, `\b`, `after`, `apply`, `array`, `break`, `catch`, `continue`, `elseif`, `else`, `error`, `eval`, `expr`, `for`, `foreach`, `global`, `if`, `namespace`, `proc`, `rename`, `return`, `set`, `switch`, `then`, `trace`, `unset`, `update`, `uplevel`, `upvar`, `variable`, `vwait`, `while`), Keyword, Push("params-in-paren")}, + {Words(`\b`, `\b`, `append`, `bgerror`, `binary`, `cd`, `chan`, `clock`, `close`, `concat`, `dde`, `dict`, `encoding`, `eof`, `exec`, `exit`, `fblocked`, `fconfigure`, `fcopy`, `file`, `fileevent`, `flush`, `format`, `gets`, `glob`, `history`, `http`, `incr`, `info`, `interp`, `join`, `lappend`, `lassign`, `lindex`, `linsert`, `list`, `llength`, `load`, `loadTk`, `lrange`, `lrepeat`, `lreplace`, `lreverse`, `lsearch`, `lset`, `lsort`, `mathfunc`, `mathop`, `memory`, `msgcat`, `open`, `package`, `pid`, `pkg::create`, `pkg_mkIndex`, `platform`, `platform::shell`, `puts`, `pwd`, `re_syntax`, `read`, `refchan`, `regexp`, `registry`, `regsub`, `scan`, `seek`, `socket`, `source`, `split`, `string`, `subst`, `tell`, `time`, `tm`, `unknown`, `unload`), NameBuiltin, Push("params-in-paren")}, + {`([\w.-]+)`, NameVariable, Push("params-in-paren")}, + {`#`, Comment, Push("comment")}, + }, + "basic": { + {`\(`, Keyword, Push("paren")}, + {`\[`, Keyword, Push("bracket")}, + {`\{`, Keyword, Push("brace")}, + {`"`, LiteralStringDouble, Push("string")}, + {`(eq|ne|in|ni)\b`, OperatorWord, nil}, + {`!=|==|<<|>>|<=|>=|&&|\|\||\*\*|[-+~!*/%<>&^|?:]`, Operator, nil}, + }, + "data": { + {`\s+`, Text, nil}, + {`0x[a-fA-F0-9]+`, LiteralNumberHex, nil}, + {`0[0-7]+`, LiteralNumberOct, nil}, + {`\d+\.\d+`, LiteralNumberFloat, nil}, + {`\d+`, LiteralNumberInteger, nil}, + {`\$([\w.:-]+)`, NameVariable, nil}, + {`([\w.:-]+)`, Text, nil}, + }, + "params": { + {`;`, Keyword, Pop(1)}, + {`\n`, Text, Pop(1)}, + {`(else|elseif|then)\b`, Keyword, nil}, + Include("basic"), + Include("data"), + }, + "params-in-brace": { + {`\}`, Keyword, Push("#pop", "#pop")}, + Include("params"), + }, + "params-in-paren": { + {`\)`, Keyword, Push("#pop", "#pop")}, + Include("params"), + }, + "params-in-bracket": { + {`\]`, Keyword, Push("#pop", "#pop")}, + Include("params"), + }, + "string": { + {`\[`, LiteralStringDouble, Push("string-square")}, + {`(?s)(\\\\|\\[0-7]+|\\.|[^"\\])`, LiteralStringDouble, nil}, + {`"`, LiteralStringDouble, Pop(1)}, + }, + "string-square": { + {`\[`, LiteralStringDouble, Push("string-square")}, + {`(?s)(\\\\|\\[0-7]+|\\.|\\\n|[^\]\\])`, LiteralStringDouble, nil}, + {`\]`, LiteralStringDouble, Pop(1)}, + }, + "brace": { + {`\}`, Keyword, Pop(1)}, + Include("command-in-brace"), + Include("basic"), + Include("data"), + }, + "paren": { + {`\)`, Keyword, Pop(1)}, + Include("command-in-paren"), + Include("basic"), + Include("data"), + }, + "bracket": { + {`\]`, Keyword, Pop(1)}, + Include("command-in-bracket"), + Include("basic"), + Include("data"), + }, + "comment": { + {`.*[^\\]\n`, Comment, Pop(1)}, + {`.*\\\n`, Comment, nil}, + }, + } +} diff --git a/vendor/github.com/alecthomas/chroma/lexers/t/tcsh.go b/vendor/github.com/alecthomas/chroma/lexers/t/tcsh.go new file mode 100644 index 000000000..fec0cb2ea --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/t/tcsh.go @@ -0,0 +1,63 @@ +package t + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// Tcsh lexer. +var Tcsh = internal.Register(MustNewLazyLexer( + &Config{ + Name: "Tcsh", + Aliases: []string{"tcsh", "csh"}, + Filenames: []string{"*.tcsh", "*.csh"}, + MimeTypes: []string{"application/x-csh"}, + }, + tcshRules, +)) + +func tcshRules() Rules { + return Rules{ + "root": { + Include("basic"), + {`\$\(`, Keyword, Push("paren")}, + {`\$\{#?`, Keyword, Push("curly")}, + {"`", LiteralStringBacktick, Push("backticks")}, + Include("data"), + }, + "basic": { + {`\b(if|endif|else|while|then|foreach|case|default|continue|goto|breaksw|end|switch|endsw)\s*\b`, Keyword, nil}, + {`\b(alias|alloc|bg|bindkey|break|builtins|bye|caller|cd|chdir|complete|dirs|echo|echotc|eval|exec|exit|fg|filetest|getxvers|glob|getspath|hashstat|history|hup|inlib|jobs|kill|limit|log|login|logout|ls-F|migrate|newgrp|nice|nohup|notify|onintr|popd|printenv|pushd|rehash|repeat|rootnode|popd|pushd|set|shift|sched|setenv|setpath|settc|setty|setxvers|shift|source|stop|suspend|source|suspend|telltc|time|umask|unalias|uncomplete|unhash|universe|unlimit|unset|unsetenv|ver|wait|warp|watchlog|where|which)\s*\b`, NameBuiltin, nil}, + {`#.*`, Comment, nil}, + {`\\[\w\W]`, LiteralStringEscape, nil}, + {`(\b\w+)(\s*)(=)`, ByGroups(NameVariable, Text, Operator), nil}, + {`[\[\]{}()=]+`, Operator, nil}, + {`<<\s*(\'?)\\?(\w+)[\w\W]+?\2`, LiteralString, nil}, + {`;`, Punctuation, nil}, + }, + "data": { + {`(?s)"(\\\\|\\[0-7]+|\\.|[^"\\])*"`, LiteralStringDouble, nil}, + {`(?s)'(\\\\|\\[0-7]+|\\.|[^'\\])*'`, LiteralStringSingle, nil}, + {`\s+`, Text, nil}, + {"[^=\\s\\[\\]{}()$\"\\'`\\\\;#]+", Text, nil}, + {`\d+(?= |\Z)`, LiteralNumber, nil}, + {`\$#?(\w+|.)`, NameVariable, nil}, + }, + "curly": { + {`\}`, Keyword, Pop(1)}, + {`:-`, Keyword, nil}, + {`\w+`, NameVariable, nil}, + {"[^}:\"\\'`$]+", Punctuation, nil}, + {`:`, Punctuation, nil}, + Include("root"), + }, + "paren": { + {`\)`, Keyword, Pop(1)}, + Include("root"), + }, + "backticks": { + {"`", LiteralStringBacktick, Pop(1)}, + Include("root"), + }, + } +} diff --git a/vendor/github.com/alecthomas/chroma/lexers/t/termcap.go b/vendor/github.com/alecthomas/chroma/lexers/t/termcap.go new file mode 100644 index 000000000..8db20b563 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/t/termcap.go @@ -0,0 +1,46 @@ +package t + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// Termcap lexer. +var Termcap = internal.Register(MustNewLazyLexer( + &Config{ + Name: "Termcap", + Aliases: []string{"termcap"}, + Filenames: []string{"termcap", "termcap.src"}, + MimeTypes: []string{}, + }, + termcapRules, +)) + +func termcapRules() Rules { + return Rules{ + "root": { + {`^#.*$`, Comment, nil}, + {`^[^\s#:|]+`, NameTag, Push("names")}, + }, + "names": { + {`\n`, Text, Pop(1)}, + {`:`, Punctuation, Push("defs")}, + {`\|`, Punctuation, nil}, + {`[^:|]+`, NameAttribute, nil}, + }, + "defs": { + {`\\\n[ \t]*`, Text, nil}, + {`\n[ \t]*`, Text, Pop(2)}, + {`(#)([0-9]+)`, ByGroups(Operator, LiteralNumber), nil}, + {`=`, Operator, Push("data")}, + {`:`, Punctuation, nil}, + {`[^\s:=#]+`, NameClass, nil}, + }, + "data": { + {`\\072`, Literal, nil}, + {`:`, Punctuation, Pop(1)}, + {`[^:\\]+`, Literal, nil}, + {`.`, Literal, nil}, + }, + } +} diff --git a/vendor/github.com/alecthomas/chroma/lexers/t/terminfo.go b/vendor/github.com/alecthomas/chroma/lexers/t/terminfo.go new file mode 100644 index 000000000..b3f273549 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/t/terminfo.go @@ -0,0 +1,46 @@ +package t + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// Terminfo lexer. +var Terminfo = internal.Register(MustNewLazyLexer( + &Config{ + Name: "Terminfo", + Aliases: []string{"terminfo"}, + Filenames: []string{"terminfo", "terminfo.src"}, + MimeTypes: []string{}, + }, + terminfoRules, +)) + +func terminfoRules() Rules { + return Rules{ + "root": { + {`^#.*$`, Comment, nil}, + {`^[^\s#,|]+`, NameTag, Push("names")}, + }, + "names": { + {`\n`, Text, Pop(1)}, + {`(,)([ \t]*)`, ByGroups(Punctuation, Text), Push("defs")}, + {`\|`, Punctuation, nil}, + {`[^,|]+`, NameAttribute, nil}, + }, + "defs": { + {`\n[ \t]+`, Text, nil}, + {`\n`, Text, Pop(2)}, + {`(#)([0-9]+)`, ByGroups(Operator, LiteralNumber), nil}, + {`=`, Operator, Push("data")}, + {`(,)([ \t]*)`, ByGroups(Punctuation, Text), nil}, + {`[^\s,=#]+`, NameClass, nil}, + }, + "data": { + {`\\[,\\]`, Literal, nil}, + {`(,)([ \t]*)`, ByGroups(Punctuation, Text), Pop(1)}, + {`[^\\,]+`, Literal, nil}, + {`.`, Literal, nil}, + }, + } +} diff --git a/vendor/github.com/alecthomas/chroma/lexers/t/terraform.go b/vendor/github.com/alecthomas/chroma/lexers/t/terraform.go new file mode 100644 index 000000000..c9c0905de --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/t/terraform.go @@ -0,0 +1,64 @@ +package t + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// Terraform lexer. +var Terraform = internal.Register(MustNewLazyLexer( + &Config{ + Name: "Terraform", + Aliases: []string{"terraform", "tf"}, + Filenames: []string{"*.tf"}, + MimeTypes: []string{"application/x-tf", "application/x-terraform"}, + }, + terraformRules, +)) + +func terraformRules() Rules { + return Rules{ + "root": { + {`[\[\](),.{}]`, Punctuation, nil}, + {`-?[0-9]+`, LiteralNumber, nil}, + {`=>`, Punctuation, nil}, + {Words(``, `\b`, `true`, `false`), KeywordConstant, nil}, + {`/(?s)\*(((?!\*/).)*)\*/`, CommentMultiline, nil}, + {`\s*(#|//).*\n`, CommentSingle, nil}, + {`([a-zA-Z]\w*)(\s*)(=(?!>))`, ByGroups(NameAttribute, Text, Text), nil}, + {Words(`^\s*`, `\b`, `variable`, `data`, `resource`, `provider`, `provisioner`, `module`, `output`), KeywordReserved, nil}, + {Words(``, `\b`, `for`, `in`), Keyword, nil}, + {Words(``, ``, `count`, `data`, `var`, `module`, `each`), NameBuiltin, nil}, + {Words(``, `\b`, `abs`, `ceil`, `floor`, `log`, `max`, `min`, `parseint`, `pow`, `signum`), NameBuiltin, nil}, + {Words(``, `\b`, `chomp`, `format`, `formatlist`, `indent`, `join`, `lower`, `regex`, `regexall`, `replace`, `split`, `strrev`, `substr`, `title`, `trim`, `trimprefix`, `trimsuffix`, `trimspace`, `upper`), NameBuiltin, nil}, + {Words(`[^.]`, `\b`, `chunklist`, `coalesce`, `coalescelist`, `compact`, `concat`, `contains`, `distinct`, `element`, `flatten`, `index`, `keys`, `length`, `list`, `lookup`, `map`, `matchkeys`, `merge`, `range`, `reverse`, `setintersection`, `setproduct`, `setsubtract`, `setunion`, `slice`, `sort`, `transpose`, `values`, `zipmap`), NameBuiltin, nil}, + {Words(`[^.]`, `\b`, `base64decode`, `base64encode`, `base64gzip`, `csvdecode`, `jsondecode`, `jsonencode`, `urlencode`, `yamldecode`, `yamlencode`), NameBuiltin, nil}, + {Words(``, `\b`, `abspath`, `dirname`, `pathexpand`, `basename`, `file`, `fileexists`, `fileset`, `filebase64`, `templatefile`), NameBuiltin, nil}, + {Words(``, `\b`, `formatdate`, `timeadd`, `timestamp`), NameBuiltin, nil}, + {Words(``, `\b`, `base64sha256`, `base64sha512`, `bcrypt`, `filebase64sha256`, `filebase64sha512`, `filemd5`, `filesha1`, `filesha256`, `filesha512`, `md5`, `rsadecrypt`, `sha1`, `sha256`, `sha512`, `uuid`, `uuidv5`), NameBuiltin, nil}, + {Words(``, `\b`, `cidrhost`, `cidrnetmask`, `cidrsubnet`), NameBuiltin, nil}, + {Words(``, `\b`, `can`, `tobool`, `tolist`, `tomap`, `tonumber`, `toset`, `tostring`, `try`), NameBuiltin, nil}, + {`=(?!>)|\+|-|\*|\/|:|!|%|>|<(?!<)|>=|<=|==|!=|&&|\||\?`, Operator, nil}, + {`\n|\s+|\\\n`, Text, nil}, + {`[a-zA-Z]\w*`, NameOther, nil}, + {`"`, LiteralStringDouble, Push("string")}, + {`(?s)(<<-?)(\w+)(\n\s*(?:(?!\2).)*\s*\n\s*)(\2)`, ByGroups(Operator, Operator, String, Operator), nil}, + }, + "declaration": { + {`(\s*)("(?:\\\\|\\"|[^"])*")(\s*)`, ByGroups(Text, NameVariable, Text), nil}, + {`\{`, Punctuation, Pop(1)}, + }, + "string": { + {`"`, LiteralStringDouble, Pop(1)}, + {`\\\\`, LiteralStringDouble, nil}, + {`\\\\"`, LiteralStringDouble, nil}, + {`\$\{`, LiteralStringInterpol, Push("interp-inside")}, + {`\$`, LiteralStringDouble, nil}, + {`[^"\\\\$]+`, LiteralStringDouble, nil}, + }, + "interp-inside": { + {`\}`, LiteralStringInterpol, Pop(1)}, + Include("root"), + }, + } +} diff --git a/vendor/github.com/alecthomas/chroma/lexers/t/tex.go b/vendor/github.com/alecthomas/chroma/lexers/t/tex.go new file mode 100644 index 000000000..f9413b370 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/t/tex.go @@ -0,0 +1,60 @@ +package t + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// Tex lexer. +var TeX = internal.Register(MustNewLazyLexer( + &Config{ + Name: "TeX", + Aliases: []string{"tex", "latex"}, + Filenames: []string{"*.tex", "*.aux", "*.toc"}, + MimeTypes: []string{"text/x-tex", "text/x-latex"}, + }, + texRules, +)) + +func texRules() Rules { + return Rules{ + "general": { + {`%.*?\n`, Comment, nil}, + {`[{}]`, NameBuiltin, nil}, + {`[&_^]`, NameBuiltin, nil}, + }, + "root": { + {`\\\[`, LiteralStringBacktick, Push("displaymath")}, + {`\\\(`, LiteralString, Push("inlinemath")}, + {`\$\$`, LiteralStringBacktick, Push("displaymath")}, + {`\$`, LiteralString, Push("inlinemath")}, + {`\\([a-zA-Z]+|.)`, Keyword, Push("command")}, + {`\\$`, Keyword, nil}, + Include("general"), + {`[^\\$%&_^{}]+`, Text, nil}, + }, + "math": { + {`\\([a-zA-Z]+|.)`, NameVariable, nil}, + Include("general"), + {`[0-9]+`, LiteralNumber, nil}, + {`[-=!+*/()\[\]]`, Operator, nil}, + {`[^=!+*/()\[\]\\$%&_^{}0-9-]+`, NameBuiltin, nil}, + }, + "inlinemath": { + {`\\\)`, LiteralString, Pop(1)}, + {`\$`, LiteralString, Pop(1)}, + Include("math"), + }, + "displaymath": { + {`\\\]`, LiteralString, Pop(1)}, + {`\$\$`, LiteralString, Pop(1)}, + {`\$`, NameBuiltin, nil}, + Include("math"), + }, + "command": { + {`\[.*?\]`, NameAttribute, nil}, + {`\*`, Keyword, nil}, + Default(Pop(1)), + }, + } +} diff --git a/vendor/github.com/alecthomas/chroma/lexers/t/thrift.go b/vendor/github.com/alecthomas/chroma/lexers/t/thrift.go new file mode 100644 index 000000000..61669d712 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/t/thrift.go @@ -0,0 +1,77 @@ +package t + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// Thrift lexer. +var Thrift = internal.Register(MustNewLazyLexer( + &Config{ + Name: "Thrift", + Aliases: []string{"thrift"}, + Filenames: []string{"*.thrift"}, + MimeTypes: []string{"application/x-thrift"}, + }, + thriftRules, +)) + +func thriftRules() Rules { + return Rules{ + "root": { + Include("whitespace"), + Include("comments"), + {`"`, LiteralStringDouble, Combined("stringescape", "dqs")}, + {`\'`, LiteralStringSingle, Combined("stringescape", "sqs")}, + {`(namespace)(\s+)`, ByGroups(KeywordNamespace, TextWhitespace), Push("namespace")}, + {`(enum|union|struct|service|exception)(\s+)`, ByGroups(KeywordDeclaration, TextWhitespace), Push("class")}, + {`((?:(?:[^\W\d]|\$)[\w.\[\]$<>]*\s+)+?)((?:[^\W\d]|\$)[\w$]*)(\s*)(\()`, ByGroups(UsingSelf("root"), NameFunction, Text, Operator), nil}, + Include("keywords"), + Include("numbers"), + {`[&=]`, Operator, nil}, + {`[:;,{}()<>\[\]]`, Punctuation, nil}, + {`[a-zA-Z_](\.\w|\w)*`, Name, nil}, + }, + "whitespace": { + {`\n`, TextWhitespace, nil}, + {`\s+`, TextWhitespace, nil}, + }, + "comments": { + {`#.*$`, Comment, nil}, + {`//.*?\n`, Comment, nil}, + {`/\*[\w\W]*?\*/`, CommentMultiline, nil}, + }, + "stringescape": { + {`\\([\\nrt"\'])`, LiteralStringEscape, nil}, + }, + "dqs": { + {`"`, LiteralStringDouble, Pop(1)}, + {`[^\\"\n]+`, LiteralStringDouble, nil}, + }, + "sqs": { + {`'`, LiteralStringSingle, Pop(1)}, + {`[^\\\'\n]+`, LiteralStringSingle, nil}, + }, + "namespace": { + {`[a-z*](\.\w|\w)*`, NameNamespace, Pop(1)}, + Default(Pop(1)), + }, + "class": { + {`[a-zA-Z_]\w*`, NameClass, Pop(1)}, + Default(Pop(1)), + }, + "keywords": { + {`(async|oneway|extends|throws|required|optional)\b`, Keyword, nil}, + {`(true|false)\b`, KeywordConstant, nil}, + {`(const|typedef)\b`, KeywordDeclaration, nil}, + {Words(``, `\b`, `cpp_namespace`, `cpp_include`, `cpp_type`, `java_package`, `cocoa_prefix`, `csharp_namespace`, `delphi_namespace`, `php_namespace`, `py_module`, `perl_package`, `ruby_namespace`, `smalltalk_category`, `smalltalk_prefix`, `xsd_all`, `xsd_optional`, `xsd_nillable`, `xsd_namespace`, `xsd_attrs`, `include`), KeywordNamespace, nil}, + {Words(``, `\b`, `void`, `bool`, `byte`, `i16`, `i32`, `i64`, `double`, `string`, `binary`, `map`, `list`, `set`, `slist`, `senum`), KeywordType, nil}, + {Words(`\b`, `\b`, `BEGIN`, `END`, `__CLASS__`, `__DIR__`, `__FILE__`, `__FUNCTION__`, `__LINE__`, `__METHOD__`, `__NAMESPACE__`, `abstract`, `alias`, `and`, `args`, `as`, `assert`, `begin`, `break`, `case`, `catch`, `class`, `clone`, `continue`, `declare`, `def`, `default`, `del`, `delete`, `do`, `dynamic`, `elif`, `else`, `elseif`, `elsif`, `end`, `enddeclare`, `endfor`, `endforeach`, `endif`, `endswitch`, `endwhile`, `ensure`, `except`, `exec`, `finally`, `float`, `for`, `foreach`, `function`, `global`, `goto`, `if`, `implements`, `import`, `in`, `inline`, `instanceof`, `interface`, `is`, `lambda`, `module`, `native`, `new`, `next`, `nil`, `not`, `or`, `pass`, `public`, `print`, `private`, `protected`, `raise`, `redo`, `rescue`, `retry`, `register`, `return`, `self`, `sizeof`, `static`, `super`, `switch`, `synchronized`, `then`, `this`, `throw`, `transient`, `try`, `undef`, `unless`, `unsigned`, `until`, `use`, `var`, `virtual`, `volatile`, `when`, `while`, `with`, `xor`, `yield`), KeywordReserved, nil}, + }, + "numbers": { + {`[+-]?(\d+\.\d+([eE][+-]?\d+)?|\.?\d+[eE][+-]?\d+)`, LiteralNumberFloat, nil}, + {`[+-]?0x[0-9A-Fa-f]+`, LiteralNumberHex, nil}, + {`[+-]?[0-9]+`, LiteralNumberInteger, nil}, + }, + } +} diff --git a/vendor/github.com/alecthomas/chroma/lexers/t/toml.go b/vendor/github.com/alecthomas/chroma/lexers/t/toml.go new file mode 100644 index 000000000..18a44c5b2 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/t/toml.go @@ -0,0 +1,33 @@ +package t + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +var TOML = internal.Register(MustNewLazyLexer( + &Config{ + Name: "TOML", + Aliases: []string{"toml"}, + Filenames: []string{"*.toml"}, + MimeTypes: []string{"text/x-toml"}, + }, + tomlRules, +)) + +func tomlRules() Rules { + return Rules{ + "root": { + {`\s+`, Text, nil}, + {`#.*`, Comment, nil}, + {Words(``, `\b`, `true`, `false`), KeywordConstant, nil}, + {`\d\d\d\d-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}(\.\d\+)?(Z|[+-]\d{2}:\d{2})`, LiteralDate, nil}, + {`[+-]?[0-9](_?\d)*\.\d+`, LiteralNumberFloat, nil}, + {`[+-]?[0-9](_?\d)*`, LiteralNumberInteger, nil}, + {`"(\\\\|\\"|[^"])*"`, StringDouble, nil}, + {`'(\\\\|\\'|[^'])*'`, StringSingle, nil}, + {`[.,=\[\]{}]`, Punctuation, nil}, + {`[A-Za-z0-9_-]+`, NameOther, nil}, + }, + } +} diff --git a/vendor/github.com/alecthomas/chroma/lexers/t/tradingview.go b/vendor/github.com/alecthomas/chroma/lexers/t/tradingview.go new file mode 100644 index 000000000..bf303c648 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/t/tradingview.go @@ -0,0 +1,44 @@ +package t + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// TradingView lexer +var TradingView = internal.Register(MustNewLazyLexer( + &Config{ + Name: "TradingView", + Aliases: []string{"tradingview", "tv"}, + Filenames: []string{"*.tv"}, + MimeTypes: []string{"text/x-tradingview"}, + DotAll: true, + EnsureNL: true, + }, + tradingViewRules, +)) + +func tradingViewRules() Rules { + return Rules{ + "root": { + {`[^\S\n]+|\n|[()]`, Text, nil}, + {`(//.*?)(\n)`, ByGroups(CommentSingle, Text), nil}, + {`>=|<=|==|!=|>|<|\?|-|\+|\*|\/|%|\[|\]`, Operator, nil}, + {`[:,.]`, Punctuation, nil}, + {`=`, KeywordPseudo, nil}, + {`"(\\\\|\\"|[^"\n])*["\n]`, LiteralString, nil}, + {`'\\.'|'[^\\]'`, LiteralString, nil}, + {`[0-9](\.[0-9]*)?([eE][+-][0-9]+)?`, LiteralNumber, nil}, + {`#[a-fA-F0-9]{8}|#[a-fA-F0-9]{6}|#[a-fA-F0-9]{3}`, LiteralStringOther, nil}, + {`(abs|acos|alertcondition|alma|asin|atan|atr|avg|barcolor|barssince|bgcolor|cci|ceil|change|cog|color\.new|correlation|cos|crossover|crossunder|cum|dev|ema|exp|falling|fill|fixnan|floor|heikinashi|highest|highestbars|hline|iff|kagi|label\.(delete|get_text|get_x|get_y|new|set_color|set_size|set_style|set_text|set_textcolor|set_x|set_xloc|set_xy|set_y|set_yloc)|line\.(new|delete|get_x1|get_x2|get_y1|get_y2|set_color|set_width|set_style|set_extend|set_xy1|set_xy2|set_x1|set_x2|set_y1|set_y2|set_xloc)|linebreak|linreg|log|log10|lowest|lowestbars|macd|max|max_bars_back|min|mom|nz|percentile_(linear_interpolation|nearest_rank)|percentrank|pivothigh|pivotlow|plot|plotarrow|plotbar|plotcandle|plotchar|plotshape|pointfigure|pow|renko|rising|rma|roc|round|rsi|sar|security|sign|sin|sma|sqrt|stdev|stoch|study|sum|swma|tan|timestamp|tostring|tsi|valuewhen|variance|vwma|wma|strategy\.(cancel|cancel_all|close|close_all|entry|exit|order|risk\.(allow_entry_in|max_cons_loss_days|max_drawdown|max_intraday_filled_orders|max_intraday_loss|max_position_size)))\b`, NameFunction, nil}, + {`\b(bool|color|cross|dayofmonth|dayofweek|float|hour|input|int|label|line|minute|month|na|offset|second|strategy|string|tickerid|time|tr|vwap|weekofyear|year)(\()`, ByGroups(NameFunction, Text), nil}, // functions that can also be variable + {`(accdist|adjustment\.(dividends|none|splits)|aqua|area|areabr|bar_index|black|blue|bool|circles|close|columns|currency\.(AUD|CAD|CHF|EUR|GBP|HKD|JPY|NOK|NONE|NZD|RUB|SEK|SGD|TRY|USD|ZAR)|color\.(aqua|black|blue|fuchsia|gray|green|lime|maroon|navy|olive|orange|purple|red|silver|teal|white|yellow)|dashed|dotted|dayofweek\.(monday|tuesday|wednesday|thursday|friday|saturday|sunday)|extend\.(both|left|right|none)|float|format\.(inherit|price|volume)|friday|fuchsia|gray|green|high|histogram|hl2|hlc3|hline\.style_(dotted|solid|dashed)|input\.(bool|float|integer|resolution|session|source|string|symbol)|integer|interval|isdaily|isdwm|isintraday|ismonthly|isweekly|label\.style_(arrowdown|arrowup|circle|cross|diamond|flag|labeldown|labelup|none|square|triangledown|triangleup|xcross)|lime|line\.style_(dashed|dotted|solid|arrow_both|arrow_left|arrow_right)|linebr|location\.(abovebar|absolute|belowbar|bottom|top)|low|maroon|monday|n|navy|ohlc4|olive|open|orange|period|plot\.style_(area|areabr|circles|columns|cross|histogram|line|linebr|stepline)|purple|red|resolution|saturday|scale\.(left|none|right)|session|session\.(extended|regular)|silver|size\.(auto|huge|large|normal|small|tiny)|solid|source|stepline|string|sunday|symbol|syminfo\.(mintick|pointvalue|prefix|root|session|ticker|tickerid|timezone)|teal|thursday|ticker|timeframe\.(isdaily|isdwm|isintraday|ismonthly|isweekly|multiplier|period)|timenow|tuesday|volume|wednesday|white|yellow|strategy\.(cash|closedtrades|commission\.(cash_per_contract|cash_per_order|percent)|direction\.(all|long|short)|equity|eventrades|fixed|grossloss|grossprofit|initial_capital|long|losstrades|max_contracts_held_(all|long|short)|max_drawdown|netprofit|oca\.(cancel|none|reduce)|openprofit|opentrades|percent_of_equity|position_avg_price|position_entry_name|position_size|short|wintrades)|shape\.(arrowdown|arrowup|circle|cross|diamond|flag|labeldown|labelup|square|triangledown|triangleup|xcross)|barstate\.is(first|history|last|new|realtime)|barmerge\.(gaps_on|gaps_off|lookahead_on|lookahead_off)|xloc\.bar_(index|time)|yloc\.(abovebar|belowbar|price))\b`, NameVariable, nil}, + {`(cross|dayofmonth|dayofweek|hour|minute|month|na|second|tickerid|time|tr|vwap|weekofyear|year)(\b[^\(])`, ByGroups(NameVariable, Text), nil}, // variables that can also be function + {`(int|float|bool|color|string|label|line)(\b[^\(=.])`, ByGroups(KeywordType, Text), nil}, // types that can also be a function + {`(var)\b`, KeywordType, nil}, + {`(true|false)\b`, KeywordConstant, nil}, + {`(and|or|not|if|else|for|to)\b`, OperatorWord, nil}, + {`@?[_a-zA-Z]\w*`, Text, nil}, + }, + } +} diff --git a/vendor/github.com/alecthomas/chroma/lexers/t/transactsql.go b/vendor/github.com/alecthomas/chroma/lexers/t/transactsql.go new file mode 100644 index 000000000..e5beb0669 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/t/transactsql.go @@ -0,0 +1,64 @@ +package t + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// TransactSQL lexer. +var TransactSQL = internal.Register(MustNewLazyLexer( + &Config{ + Name: "Transact-SQL", + Aliases: []string{"tsql", "t-sql"}, + MimeTypes: []string{"text/x-tsql"}, + NotMultiline: true, + CaseInsensitive: true, + }, + transactSQLRules, +)) + +func transactSQLRules() Rules { + return Rules{ + "root": { + {`\s+`, TextWhitespace, nil}, + {`--(?m).*?$\n?`, CommentSingle, nil}, + {`/\*`, CommentMultiline, Push("multiline-comments")}, + {`'`, LiteralStringSingle, Push("string")}, + {`"`, LiteralStringName, Push("quoted-ident")}, + {Words(``, ``, `!<`, `!=`, `!>`, `<`, `<=`, `<>`, `=`, `>`, `>=`, `+`, `+=`, `-`, `-=`, `*`, `*=`, `/`, `/=`, `%`, `%=`, `&`, `&=`, `|`, `|=`, `^`, `^=`, `~`, `::`), Operator, nil}, + {Words(``, `\b`, `all`, `and`, `any`, `between`, `except`, `exists`, `in`, `intersect`, `like`, `not`, `or`, `some`, `union`), OperatorWord, nil}, + {Words(``, `\b`, `bigint`, `binary`, `bit`, `char`, `cursor`, `date`, `datetime`, `datetime2`, `datetimeoffset`, `decimal`, `float`, `hierarchyid`, `image`, `int`, `money`, `nchar`, `ntext`, `numeric`, `nvarchar`, `real`, `smalldatetime`, `smallint`, `smallmoney`, `sql_variant`, `table`, `text`, `time`, `timestamp`, `tinyint`, `uniqueidentifier`, `varbinary`, `varchar`, `xml`), NameClass, nil}, + {Words(``, `\b`, `$partition`, `abs`, `acos`, `app_name`, `applock_mode`, `applock_test`, `ascii`, `asin`, `assemblyproperty`, `atan`, `atn2`, `avg`, `binary_checksum`, `cast`, `ceiling`, `certencoded`, `certprivatekey`, `char`, `charindex`, `checksum`, `checksum_agg`, `choose`, `col_length`, `col_name`, `columnproperty`, `compress`, `concat`, `connectionproperty`, `context_info`, `convert`, `cos`, `cot`, `count`, `count_big`, `current_request_id`, `current_timestamp`, `current_transaction_id`, `current_user`, `cursor_status`, `database_principal_id`, `databasepropertyex`, `dateadd`, `datediff`, `datediff_big`, `datefromparts`, `datename`, `datepart`, `datetime2fromparts`, `datetimefromparts`, `datetimeoffsetfromparts`, `day`, `db_id`, `db_name`, `decompress`, `degrees`, `dense_rank`, `difference`, `eomonth`, `error_line`, `error_message`, `error_number`, `error_procedure`, `error_severity`, `error_state`, `exp`, `file_id`, `file_idex`, `file_name`, `filegroup_id`, `filegroup_name`, `filegroupproperty`, `fileproperty`, `floor`, `format`, `formatmessage`, `fulltextcatalogproperty`, `fulltextserviceproperty`, `get_filestream_transaction_context`, `getansinull`, `getdate`, `getutcdate`, `grouping`, `grouping_id`, `has_perms_by_name`, `host_id`, `host_name`, `iif`, `index_col`, `indexkey_property`, `indexproperty`, `is_member`, `is_rolemember`, `is_srvrolemember`, `isdate`, `isjson`, `isnull`, `isnumeric`, `json_modify`, `json_query`, `json_value`, `left`, `len`, `log`, `log10`, `lower`, `ltrim`, `max`, `min`, `min_active_rowversion`, `month`, `nchar`, `newid`, `newsequentialid`, `ntile`, `object_definition`, `object_id`, `object_name`, `object_schema_name`, `objectproperty`, `objectpropertyex`, `opendatasource`, `openjson`, `openquery`, `openrowset`, `openxml`, `original_db_name`, `original_login`, `parse`, `parsename`, `patindex`, `permissions`, `pi`, `power`, `pwdcompare`, `pwdencrypt`, `quotename`, `radians`, `rand`, `rank`, `replace`, `replicate`, `reverse`, `right`, `round`, `row_number`, `rowcount_big`, `rtrim`, `schema_id`, `schema_name`, `scope_identity`, `serverproperty`, `session_context`, `session_user`, `sign`, `sin`, `smalldatetimefromparts`, `soundex`, `sp_helplanguage`, `space`, `sqrt`, `square`, `stats_date`, `stdev`, `stdevp`, `str`, `string_escape`, `string_split`, `stuff`, `substring`, `sum`, `suser_id`, `suser_name`, `suser_sid`, `suser_sname`, `switchoffset`, `sysdatetime`, `sysdatetimeoffset`, `system_user`, `sysutcdatetime`, `tan`, `textptr`, `textvalid`, `timefromparts`, `todatetimeoffset`, `try_cast`, `try_convert`, `try_parse`, `type_id`, `type_name`, `typeproperty`, `unicode`, `upper`, `user_id`, `user_name`, `var`, `varp`, `xact_state`, `year`), NameFunction, nil}, + {`(goto)(\s+)(\w+\b)`, ByGroups(Keyword, TextWhitespace, NameLabel), nil}, + {Words(``, `\b`, `absolute`, `action`, `ada`, `add`, `admin`, `after`, `aggregate`, `alias`, `all`, `allocate`, `alter`, `and`, `any`, `are`, `array`, `as`, `asc`, `asensitive`, `assertion`, `asymmetric`, `at`, `atomic`, `authorization`, `avg`, `backup`, `before`, `begin`, `between`, `binary`, `bit`, `bit_length`, `blob`, `boolean`, `both`, `breadth`, `break`, `browse`, `bulk`, `by`, `call`, `called`, `cardinality`, `cascade`, `cascaded`, `case`, `cast`, `catalog`, `catch`, `char`, `char_length`, `character`, `character_length`, `check`, `checkpoint`, `class`, `clob`, `close`, `clustered`, `coalesce`, `collate`, `collation`, `collect`, `column`, `commit`, `completion`, `compute`, `condition`, `connect`, `connection`, `constraint`, `constraints`, `constructor`, `contains`, `containstable`, `continue`, `convert`, `corr`, `corresponding`, `count`, `covar_pop`, `covar_samp`, `create`, `cross`, `cube`, `cume_dist`, `current`, `current_catalog`, `current_date`, `current_default_transform_group`, `current_path`, `current_role`, `current_schema`, `current_time`, `current_timestamp`, `current_transform_group_for_type`, `current_user`, `cursor`, `cycle`, `data`, `database`, `date`, `day`, `dbcc`, `deallocate`, `dec`, `decimal`, `declare`, `default`, `deferrable`, `deferred`, `delete`, `deny`, `depth`, `deref`, `desc`, `describe`, `descriptor`, `destroy`, `destructor`, `deterministic`, `diagnostics`, `dictionary`, `disconnect`, `disk`, `distinct`, `distributed`, `domain`, `double`, `drop`, `dump`, `dynamic`, `each`, `element`, `else`, `end`, `end-exec`, `equals`, `errlvl`, `escape`, `every`, `except`, `exception`, `exec`, `execute`, `exists`, `exit`, `external`, `extract`, `false`, `fetch`, `file`, `fillfactor`, `filter`, `first`, `float`, `for`, `foreign`, `fortran`, `found`, `free`, `freetext`, `freetexttable`, `from`, `full`, `fulltexttable`, `function`, `fusion`, `general`, `get`, `global`, `go`, `goto`, `grant`, `group`, `grouping`, `having`, `hold`, `holdlock`, `host`, `hour`, `identity`, `identity_insert`, `identitycol`, `if`, `ignore`, `immediate`, `in`, `include`, `index`, `indicator`, `initialize`, `initially`, `inner`, `inout`, `input`, `insensitive`, `insert`, `int`, `integer`, `intersect`, `intersection`, `interval`, `into`, `is`, `isolation`, `iterate`, `join`, `key`, `kill`, `language`, `large`, `last`, `lateral`, `leading`, `left`, `less`, `level`, `like`, `like_regex`, `limit`, `lineno`, `ln`, `load`, `local`, `localtime`, `localtimestamp`, `locator`, `lower`, `map`, `match`, `max`, `member`, `merge`, `method`, `min`, `minute`, `mod`, `modifies`, `modify`, `module`, `month`, `multiset`, `names`, `national`, `natural`, `nchar`, `nclob`, `new`, `next`, `no`, `nocheck`, `nonclustered`, `none`, `normalize`, `not`, `null`, `nullif`, `numeric`, `object`, `occurrences_regex`, `octet_length`, `of`, `off`, `offsets`, `old`, `on`, `only`, `open`, `opendatasource`, `openquery`, `openrowset`, `openxml`, `operation`, `option`, `or`, `order`, `ordinality`, `out`, `outer`, `output`, `over`, `overlaps`, `overlay`, `pad`, `parameter`, `parameters`, `partial`, `partition`, `pascal`, `path`, `percent`, `percent_rank`, `percentile_cont`, `percentile_disc`, `pivot`, `plan`, `position`, `position_regex`, `postfix`, `precision`, `prefix`, `preorder`, `prepare`, `preserve`, `primary`, `print`, `prior`, `privileges`, `proc`, `procedure`, `public`, `raiserror`, `range`, `read`, `reads`, `readtext`, `real`, `reconfigure`, `recursive`, `ref`, `references`, `referencing`, `regr_avgx`, `regr_avgy`, `regr_count`, `regr_intercept`, `regr_r2`, `regr_slope`, `regr_sxx`, `regr_sxy`, `regr_syy`, `relative`, `release`, `replication`, `restore`, `restrict`, `result`, `return`, `returns`, `revert`, `revoke`, `right`, `role`, `rollback`, `rollup`, `routine`, `row`, `rowcount`, `rowguidcol`, `rows`, `rule`, `save`, `savepoint`, `schema`, `scope`, `scroll`, `search`, `second`, `section`, `securityaudit`, `select`, `semantickeyphrasetable`, `semanticsimilaritydetailstable`, `semanticsimilaritytable`, `sensitive`, `sequence`, `session`, `session_user`, `set`, `sets`, `setuser`, `shutdown`, `similar`, `size`, `smallint`, `some`, `space`, `specific`, `specifictype`, `sql`, `sqlca`, `sqlcode`, `sqlerror`, `sqlexception`, `sqlstate`, `sqlwarning`, `start`, `state`, `statement`, `static`, `statistics`, `stddev_pop`, `stddev_samp`, `structure`, `submultiset`, `substring`, `substring_regex`, `sum`, `symmetric`, `system`, `system_user`, `table`, `tablesample`, `temporary`, `terminate`, `textsize`, `than`, `then`, `throw`, `time`, `timestamp`, `timezone_hour`, `timezone_minute`, `to`, `top`, `trailing`, `tran`, `transaction`, `translate`, `translate_regex`, `translation`, `treat`, `trigger`, `trim`, `true`, `truncate`, `try`, `try_convert`, `tsequal`, `uescape`, `under`, `union`, `unique`, `unknown`, `unnest`, `unpivot`, `update`, `updatetext`, `upper`, `usage`, `use`, `user`, `using`, `value`, `values`, `var_pop`, `var_samp`, `varchar`, `variable`, `varying`, `view`, `waitfor`, `when`, `whenever`, `where`, `while`, `width_bucket`, `window`, `with`, `within`, `without`, `work`, `write`, `writetext`, `xmlagg`, `xmlattributes`, `xmlbinary`, `xmlcast`, `xmlcomment`, `xmlconcat`, `xmldocument`, `xmlelement`, `xmlexists`, `xmlforest`, `xmliterate`, `xmlnamespaces`, `xmlparse`, `xmlpi`, `xmlquery`, `xmlserialize`, `xmltable`, `xmltext`, `xmlvalidate`, `year`, `zone`), Keyword, nil}, + {`(\[)([^]]+)(\])`, ByGroups(Operator, Name, Operator), nil}, + {`0x[0-9a-f]+`, LiteralNumberHex, nil}, + {`[0-9]+\.[0-9]*(e[+-]?[0-9]+)?`, LiteralNumberFloat, nil}, + {`\.[0-9]+(e[+-]?[0-9]+)?`, LiteralNumberFloat, nil}, + {`[0-9]+e[+-]?[0-9]+`, LiteralNumberFloat, nil}, + {`[0-9]+`, LiteralNumberInteger, nil}, + {`[;(),.]`, Punctuation, nil}, + {`@@\w+`, NameBuiltin, nil}, + {`@\w+`, NameVariable, nil}, + {`(\w+)(:)`, ByGroups(NameLabel, Punctuation), nil}, + {`#?#?\w+`, Name, nil}, + {`\?`, NameVariableMagic, nil}, + }, + "multiline-comments": { + {`/\*`, CommentMultiline, Push("multiline-comments")}, + {`\*/`, CommentMultiline, Pop(1)}, + {`[^/*]+`, CommentMultiline, nil}, + {`[/*]`, CommentMultiline, nil}, + }, + "string": { + {`[^']+`, LiteralStringSingle, nil}, + {`''`, LiteralStringSingle, nil}, + {`'`, LiteralStringSingle, Pop(1)}, + }, + "quoted-ident": { + {`[^"]+`, LiteralStringName, nil}, + {`""`, LiteralStringName, nil}, + {`"`, LiteralStringName, Pop(1)}, + }, + } +} diff --git a/vendor/github.com/alecthomas/chroma/lexers/t/turing.go b/vendor/github.com/alecthomas/chroma/lexers/t/turing.go new file mode 100644 index 000000000..e34a6c1ce --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/t/turing.go @@ -0,0 +1,47 @@ +package t + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// Turing lexer. +var Turing = internal.Register(MustNewLazyLexer( + &Config{ + Name: "Turing", + Aliases: []string{"turing"}, + Filenames: []string{"*.turing", "*.tu"}, + MimeTypes: []string{"text/x-turing"}, + }, + turingRules, +)) + +func turingRules() Rules { + return Rules{ + "root": { + {`\n`, Text, nil}, + {`\s+`, Text, nil}, + {`\\\n`, Text, nil}, + {`%(.*?)\n`, CommentSingle, nil}, + {`/(\\\n)?[*](.|\n)*?[*](\\\n)?/`, CommentMultiline, nil}, + {`(var|fcn|function|proc|procedure|process|class|end|record|type|begin|case|loop|for|const|union|monitor|module|handler)\b`, KeywordDeclaration, nil}, + {`(all|asm|assert|bind|bits|body|break|by|cheat|checked|close|condition|decreasing|def|deferred|else|elsif|exit|export|external|flexible|fork|forward|free|get|if|implement|import|include|inherit|init|invariant|label|new|objectclass|of|opaque|open|packed|pause|pervasive|post|pre|priority|put|quit|read|register|result|seek|self|set|signal|skip|tag|tell|then|timeout|to|unchecked|unqualified|wait|when|write)\b`, Keyword, nil}, + {`(true|false)\b`, KeywordConstant, nil}, + {Words(``, `\b`, `addressint`, `array`, `boolean`, `char`, `int`, `int1`, `int2`, `int4`, `int8`, `nat`, `nat1`, `nat2`, `nat4`, `nat8`, `pointer`, `real`, `real4`, `real8`, `string`, `enum`), KeywordType, nil}, + {`\d+i`, LiteralNumber, nil}, + {`\d+\.\d*([Ee][-+]\d+)?i`, LiteralNumber, nil}, + {`\.\d+([Ee][-+]\d+)?i`, LiteralNumber, nil}, + {`\d+[Ee][-+]\d+i`, LiteralNumber, nil}, + {`\d+(\.\d+[eE][+\-]?\d+|\.\d*|[eE][+\-]?\d+)`, LiteralNumberFloat, nil}, + {`\.\d+([eE][+\-]?\d+)?`, LiteralNumberFloat, nil}, + {`0[0-7]+`, LiteralNumberOct, nil}, + {`0[xX][0-9a-fA-F]+`, LiteralNumberHex, nil}, + {`(0|[1-9][0-9]*)`, LiteralNumberInteger, nil}, + {`(div|mod|rem|\*\*|=|<|>|>=|<=|not=|not|and|or|xor|=>|in|shl|shr|->|~|~=|~in|&|:=|\.\.|[\^+\-*/&#])`, Operator, nil}, + {`'(\\['"\\abfnrtv]|\\x[0-9a-fA-F]{2}|\\[0-7]{1,3}|\\u[0-9a-fA-F]{4}|\\U[0-9a-fA-F]{8}|[^\\])'`, LiteralStringChar, nil}, + {`"(\\\\|\\"|[^"])*"`, LiteralString, nil}, + {`[()\[\]{}.,:]`, Punctuation, nil}, + {`[^\W\d]\w*`, NameOther, nil}, + }, + } +} diff --git a/vendor/github.com/alecthomas/chroma/lexers/t/turtle.go b/vendor/github.com/alecthomas/chroma/lexers/t/turtle.go new file mode 100644 index 000000000..437f375f5 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/t/turtle.go @@ -0,0 +1,71 @@ +package t + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// Turtle lexer. +var Turtle = internal.Register(MustNewLazyLexer( + &Config{ + Name: "Turtle", + Aliases: []string{"turtle"}, + Filenames: []string{"*.ttl"}, + MimeTypes: []string{"text/turtle", "application/x-turtle"}, + NotMultiline: true, + CaseInsensitive: true, + }, + turtleRules, +)) + +func turtleRules() Rules { + return Rules{ + "root": { + {`\s+`, TextWhitespace, nil}, + {"(@base|BASE)(\\s+)(<[^<>\"{}|^`\\\\\\x00-\\x20]*>)(\\s*)(\\.?)", ByGroups(Keyword, TextWhitespace, NameVariable, TextWhitespace, Punctuation), nil}, + {"(@prefix|PREFIX)(\\s+)((?:[a-z][\\w-]*)?\\:)(\\s+)(<[^<>\"{}|^`\\\\\\x00-\\x20]*>)(\\s*)(\\.?)", ByGroups(Keyword, TextWhitespace, NameNamespace, TextWhitespace, NameVariable, TextWhitespace, Punctuation), nil}, + {`(?<=\s)a(?=\s)`, KeywordType, nil}, + {"(<[^<>\"{}|^`\\\\\\x00-\\x20]*>)", NameVariable, nil}, + {`((?:[a-z][\w-]*)?\:)([a-z][\w-]*)`, ByGroups(NameNamespace, NameTag), nil}, + {`#[^\n]+`, Comment, nil}, + {`\b(true|false)\b`, Literal, nil}, + {`[+\-]?\d*\.\d+`, LiteralNumberFloat, nil}, + {`[+\-]?\d*(:?\.\d+)?E[+\-]?\d+`, LiteralNumberFloat, nil}, + {`[+\-]?\d+`, LiteralNumberInteger, nil}, + {`[\[\](){}.;,:^]`, Punctuation, nil}, + {`"""`, LiteralString, Push("triple-double-quoted-string")}, + {`"`, LiteralString, Push("single-double-quoted-string")}, + {`'''`, LiteralString, Push("triple-single-quoted-string")}, + {`'`, LiteralString, Push("single-single-quoted-string")}, + }, + "triple-double-quoted-string": { + {`"""`, LiteralString, Push("end-of-string")}, + {`[^\\]+`, LiteralString, nil}, + {`\\`, LiteralString, Push("string-escape")}, + }, + "single-double-quoted-string": { + {`"`, LiteralString, Push("end-of-string")}, + {`[^"\\\n]+`, LiteralString, nil}, + {`\\`, LiteralString, Push("string-escape")}, + }, + "triple-single-quoted-string": { + {`'''`, LiteralString, Push("end-of-string")}, + {`[^\\]+`, LiteralString, nil}, + {`\\`, LiteralString, Push("string-escape")}, + }, + "single-single-quoted-string": { + {`'`, LiteralString, Push("end-of-string")}, + {`[^'\\\n]+`, LiteralString, nil}, + {`\\`, LiteralString, Push("string-escape")}, + }, + "string-escape": { + {`.`, LiteralString, Pop(1)}, + }, + "end-of-string": { + {`(@)([a-z]+(:?-[a-z0-9]+)*)`, ByGroups(Operator, GenericEmph, GenericEmph), Pop(2)}, + {"(\\^\\^)(<[^<>\"{}|^`\\\\\\x00-\\x20]*>)", ByGroups(Operator, GenericEmph), Pop(2)}, + {`(\^\^)((?:[a-z][\w-]*)?\:)([a-z][\w-]*)`, ByGroups(Operator, GenericEmph, GenericEmph), Pop(2)}, + Default(Pop(2)), + }, + } +} diff --git a/vendor/github.com/alecthomas/chroma/lexers/t/twig.go b/vendor/github.com/alecthomas/chroma/lexers/t/twig.go new file mode 100644 index 000000000..0e0c1f18a --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/t/twig.go @@ -0,0 +1,58 @@ +package t + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// Twig lexer. +var Twig = internal.Register(MustNewLazyLexer( + &Config{ + Name: "Twig", + Aliases: []string{"twig"}, + Filenames: []string{}, + MimeTypes: []string{"application/x-twig"}, + DotAll: true, + }, + twigRules, +)) + +func twigRules() Rules { + return Rules{ + "root": { + {`[^{]+`, Other, nil}, + {`\{\{`, CommentPreproc, Push("var")}, + {`\{\#.*?\#\}`, Comment, nil}, + {`(\{%)(-?\s*)(raw)(\s*-?)(%\})(.*?)(\{%)(-?\s*)(endraw)(\s*-?)(%\})`, ByGroups(CommentPreproc, Text, Keyword, Text, CommentPreproc, Other, CommentPreproc, Text, Keyword, Text, CommentPreproc), nil}, + {`(\{%)(-?\s*)(verbatim)(\s*-?)(%\})(.*?)(\{%)(-?\s*)(endverbatim)(\s*-?)(%\})`, ByGroups(CommentPreproc, Text, Keyword, Text, CommentPreproc, Other, CommentPreproc, Text, Keyword, Text, CommentPreproc), nil}, + {`(\{%)(-?\s*)(filter)(\s+)((?:[\\_a-z]|[^\x00-\x7f])(?:[\\\w-]|[^\x00-\x7f])*)`, ByGroups(CommentPreproc, Text, Keyword, Text, NameFunction), Push("tag")}, + {`(\{%)(-?\s*)([a-zA-Z_]\w*)`, ByGroups(CommentPreproc, Text, Keyword), Push("tag")}, + {`\{`, Other, nil}, + }, + "varnames": { + {`(\|)(\s*)((?:[\\_a-z]|[^\x00-\x7f])(?:[\\\w-]|[^\x00-\x7f])*)`, ByGroups(Operator, Text, NameFunction), nil}, + {`(is)(\s+)(not)?(\s*)((?:[\\_a-z]|[^\x00-\x7f])(?:[\\\w-]|[^\x00-\x7f])*)`, ByGroups(Keyword, Text, Keyword, Text, NameFunction), nil}, + {`(?i)(true|false|none|null)\b`, KeywordPseudo, nil}, + {`(in|not|and|b-and|or|b-or|b-xor|isif|elseif|else|importconstant|defined|divisibleby|empty|even|iterable|odd|sameasmatches|starts\s+with|ends\s+with)\b`, Keyword, nil}, + {`(loop|block|parent)\b`, NameBuiltin, nil}, + {`(?:[\\_a-z]|[^\x00-\x7f])(?:[\\\w-]|[^\x00-\x7f])*`, NameVariable, nil}, + {`\.(?:[\\_a-z]|[^\x00-\x7f])(?:[\\\w-]|[^\x00-\x7f])*`, NameVariable, nil}, + {`\.[0-9]+`, LiteralNumber, nil}, + {`:?"(\\\\|\\"|[^"])*"`, LiteralStringDouble, nil}, + {`:?'(\\\\|\\'|[^'])*'`, LiteralStringSingle, nil}, + {`([{}()\[\]+\-*/,:~%]|\.\.|\?|:|\*\*|\/\/|!=|[><=]=?)`, Operator, nil}, + {`[0-9](\.[0-9]*)?(eE[+-][0-9])?[flFLdD]?|0[xX][0-9a-fA-F]+[Ll]?`, LiteralNumber, nil}, + }, + "var": { + {`\s+`, Text, nil}, + {`(-?)(\}\})`, ByGroups(Text, CommentPreproc), Pop(1)}, + Include("varnames"), + }, + "tag": { + {`\s+`, Text, nil}, + {`(-?)(%\})`, ByGroups(Text, CommentPreproc), Pop(1)}, + Include("varnames"), + {`.`, Punctuation, nil}, + }, + } +} diff --git a/vendor/github.com/alecthomas/chroma/lexers/t/typescript.go b/vendor/github.com/alecthomas/chroma/lexers/t/typescript.go new file mode 100644 index 000000000..0625bb092 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/t/typescript.go @@ -0,0 +1,101 @@ +package t + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// TypeScript lexer. +var TypeScript = internal.Register(MustNewLazyLexer( + &Config{ + Name: "TypeScript", + Aliases: []string{"ts", "tsx", "typescript"}, + Filenames: []string{"*.ts", "*.tsx"}, + MimeTypes: []string{"text/x-typescript"}, + DotAll: true, + EnsureNL: true, + }, + typeScriptRules, +)) + +func typeScriptRules() Rules { + return Rules{ + "commentsandwhitespace": { + {`\s+`, Text, nil}, + {``, Comment, Pop(1)}, + {`-`, Comment, nil}, + }, + "tag": { + {`\s+`, Text, nil}, + {`[\w.:-]+\s*=`, NameAttribute, Push("attr")}, + {`/?\s*>`, NameTag, Pop(1)}, + }, + "attr": { + {`\s+`, Text, nil}, + {`".*?"`, LiteralString, Pop(1)}, + {`'.*?'`, LiteralString, Pop(1)}, + {`[^\s>]+`, LiteralString, Pop(1)}, + }, + } +} diff --git a/vendor/github.com/alecthomas/chroma/lexers/x/xorg.go b/vendor/github.com/alecthomas/chroma/lexers/x/xorg.go new file mode 100644 index 000000000..7936a51ab --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/x/xorg.go @@ -0,0 +1,29 @@ +package x + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// Xorg lexer. +var Xorg = internal.Register(MustNewLazyLexer( + &Config{ + Name: "Xorg", + Aliases: []string{"xorg.conf"}, + Filenames: []string{"xorg.conf"}, + MimeTypes: []string{}, + }, + xorgRules, +)) + +func xorgRules() Rules { + return Rules{ + "root": { + {`\s+`, TextWhitespace, nil}, + {`#.*$`, Comment, nil}, + {`((|Sub)Section)(\s+)("\w+")`, ByGroups(KeywordNamespace, LiteralStringEscape, TextWhitespace, LiteralStringEscape), nil}, + {`(End(|Sub)Section)`, KeywordNamespace, nil}, + {`(\w+)(\s+)([^\n#]+)`, ByGroups(NameKeyword, TextWhitespace, LiteralString), nil}, + }, + } +} diff --git a/vendor/github.com/alecthomas/chroma/lexers/y/yaml.go b/vendor/github.com/alecthomas/chroma/lexers/y/yaml.go new file mode 100644 index 000000000..488f76004 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/y/yaml.go @@ -0,0 +1,58 @@ +package y + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +var YAML = internal.Register(MustNewLazyLexer( + &Config{ + Name: "YAML", + Aliases: []string{"yaml"}, + Filenames: []string{"*.yaml", "*.yml"}, + MimeTypes: []string{"text/x-yaml"}, + }, + yamlRules, +)) + +func yamlRules() Rules { + return Rules{ + "root": { + Include("whitespace"), + {`^---`, NameNamespace, nil}, + {`^\.\.\.`, NameNamespace, nil}, + {`[\n?]?\s*- `, Text, nil}, + {`#.*$`, Comment, nil}, + {`!![^\s]+`, CommentPreproc, nil}, + {`&[^\s]+`, CommentPreproc, nil}, + {`\*[^\s]+`, CommentPreproc, nil}, + {`^%include\s+[^\n\r]+`, CommentPreproc, nil}, + Include("key"), + Include("value"), + {`[?:,\[\]]`, Punctuation, nil}, + {`.`, Text, nil}, + }, + "value": { + {`([>|](?:[+-])?)(\n(^ {1,})(?:.*\n*(?:^\3 *).*)*)`, ByGroups(Punctuation, StringDoc, Whitespace), nil}, + {Words(``, `\b`, "true", "True", "TRUE", "false", "False", "FALSE", "null", + "y", "Y", "yes", "Yes", "YES", "n", "N", "no", "No", "NO", + "on", "On", "ON", "off", "Off", "OFF"), KeywordConstant, nil}, + {`"(?:\\.|[^"])*"`, StringDouble, nil}, + {`'(?:\\.|[^'])*'`, StringSingle, nil}, + {`\d\d\d\d-\d\d-\d\d([T ]\d\d:\d\d:\d\d(\.\d+)?(Z|\s+[-+]\d+)?)?`, LiteralDate, nil}, + {`\b[+\-]?(0x[\da-f]+|0o[0-7]+|(\d+\.?\d*|\.?\d+)(e[\+\-]?\d+)?|\.inf|\.nan)\b`, Number, nil}, + {`([^\{\}\[\]\?,\:\!\-\*&\@].*)( )+(#.*)`, ByGroups(Literal, Whitespace, Comment), nil}, + {`[^\{\}\[\]\?,\:\!\-\*&\@].*`, Literal, nil}, + }, + "key": { + {`"[^"\n].*": `, NameTag, nil}, + {`(-)( )([^"\n{]*)(:)( )`, ByGroups(Punctuation, Whitespace, NameTag, Punctuation, Whitespace), nil}, + {`([^"\n{]*)(:)( )`, ByGroups(NameTag, Punctuation, Whitespace), nil}, + {`([^"\n{]*)(:)(\n)`, ByGroups(NameTag, Punctuation, Whitespace), nil}, + }, + "whitespace": { + {`\s+`, Whitespace, nil}, + {`\n+`, Whitespace, nil}, + }, + } +} diff --git a/vendor/github.com/alecthomas/chroma/lexers/y/yang.go b/vendor/github.com/alecthomas/chroma/lexers/y/yang.go new file mode 100644 index 000000000..36349eb9f --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/y/yang.go @@ -0,0 +1,71 @@ +package y + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +var YANG = internal.Register(MustNewLazyLexer( + &Config{ + Name: "YANG", + Aliases: []string{"yang"}, + Filenames: []string{"*.yang"}, + MimeTypes: []string{"application/yang"}, + }, + yangRules, +)) + +func yangRules() Rules { + return Rules{ + "root": { + {`\s+`, Whitespace, nil}, + {`[\{\}\;]+`, Punctuation, nil}, + {`(?<|^!?/\-*&~:]`, Operator, nil}, + {`[{}()\[\],.;]`, Punctuation, nil}, + }, + } +} diff --git a/vendor/github.com/alecthomas/chroma/lexers/z/zig.go b/vendor/github.com/alecthomas/chroma/lexers/z/zig.go new file mode 100644 index 000000000..56f54fd98 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/z/zig.go @@ -0,0 +1,58 @@ +package z + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// Zig lexer. +var Zig = internal.Register(MustNewLazyLexer( + &Config{ + Name: "Zig", + Aliases: []string{"zig"}, + Filenames: []string{"*.zig"}, + MimeTypes: []string{"text/zig"}, + }, + zigRules, +)) + +func zigRules() Rules { + return Rules{ + "root": { + {`\n`, TextWhitespace, nil}, + {`\s+`, TextWhitespace, nil}, + {`//.*?\n`, CommentSingle, nil}, + {Words(``, `\b`, `break`, `return`, `continue`, `asm`, `defer`, `errdefer`, `unreachable`, `try`, `catch`, `async`, `await`, `suspend`, `resume`, `cancel`), Keyword, nil}, + {Words(``, `\b`, `const`, `var`, `extern`, `packed`, `export`, `pub`, `noalias`, `inline`, `comptime`, `nakedcc`, `stdcallcc`, `volatile`, `allowzero`, `align`, `linksection`, `threadlocal`), KeywordReserved, nil}, + {Words(``, `\b`, `struct`, `enum`, `union`, `error`), Keyword, nil}, + {Words(``, `\b`, `while`, `for`), Keyword, nil}, + {Words(``, `\b`, `bool`, `f16`, `f32`, `f64`, `f128`, `void`, `noreturn`, `type`, `anyerror`, `promise`, `i0`, `u0`, `isize`, `usize`, `comptime_int`, `comptime_float`, `c_short`, `c_ushort`, `c_int`, `c_uint`, `c_long`, `c_ulong`, `c_longlong`, `c_ulonglong`, `c_longdouble`, `c_voidi8`, `u8`, `i16`, `u16`, `i32`, `u32`, `i64`, `u64`, `i128`, `u128`), KeywordType, nil}, + {Words(``, `\b`, `true`, `false`, `null`, `undefined`), KeywordConstant, nil}, + {Words(``, `\b`, `if`, `else`, `switch`, `and`, `or`, `orelse`), Keyword, nil}, + {Words(``, `\b`, `fn`, `usingnamespace`, `test`), Keyword, nil}, + {`0x[0-9a-fA-F]+\.[0-9a-fA-F]+([pP][\-+]?[0-9a-fA-F]+)?`, LiteralNumberFloat, nil}, + {`0x[0-9a-fA-F]+\.?[pP][\-+]?[0-9a-fA-F]+`, LiteralNumberFloat, nil}, + {`[0-9]+\.[0-9]+([eE][-+]?[0-9]+)?`, LiteralNumberFloat, nil}, + {`[0-9]+\.?[eE][-+]?[0-9]+`, LiteralNumberFloat, nil}, + {`0b(?:_?[01])+`, LiteralNumberBin, nil}, + {`0o(?:_?[0-7])+`, LiteralNumberOct, nil}, + {`0x(?:_?[0-9a-fA-F])+`, LiteralNumberHex, nil}, + {`(?:_?[0-9])+`, LiteralNumberInteger, nil}, + {`@[a-zA-Z_]\w*`, NameBuiltin, nil}, + {`[a-zA-Z_]\w*`, Name, nil}, + {`\'\\\'\'`, LiteralStringEscape, nil}, + {`\'\\(|x[a-fA-F0-9]{2}|u[a-fA-F0-9]{4}|U[a-fA-F0-9]{6}|[nr\\t\'"])\'`, LiteralStringEscape, nil}, + {`\'[^\\\']\'`, LiteralString, nil}, + {`\\\\[^\n]*`, LiteralStringHeredoc, nil}, + {`c\\\\[^\n]*`, LiteralStringHeredoc, nil}, + {`c?"`, LiteralString, Push("string")}, + {`[+%=><|^!?/\-*&~:]`, Operator, nil}, + {`[{}()\[\],.;]`, Punctuation, nil}, + }, + "string": { + {`\\(x[a-fA-F0-9]{2}|u[a-fA-F0-9]{4}|U[a-fA-F0-9]{6}|[nr\\t\'"])`, LiteralStringEscape, nil}, + {`[^\\"\n]+`, LiteralString, nil}, + {`"`, LiteralString, Pop(1)}, + }, + } +} diff --git a/vendor/github.com/alecthomas/chroma/mutators.go b/vendor/github.com/alecthomas/chroma/mutators.go new file mode 100644 index 000000000..bd6d720e0 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/mutators.go @@ -0,0 +1,131 @@ +package chroma + +import ( + "fmt" + "strings" +) + +// A Mutator modifies the behaviour of the lexer. +type Mutator interface { + // Mutate the lexer state machine as it is processing. + Mutate(state *LexerState) error +} + +// A LexerMutator is an additional interface that a Mutator can implement +// to modify the lexer when it is compiled. +type LexerMutator interface { + // Rules are the lexer rules, state is the state key for the rule the mutator is associated with. + MutateLexer(rules CompiledRules, state string, rule int) error +} + +// A MutatorFunc is a Mutator that mutates the lexer state machine as it is processing. +type MutatorFunc func(state *LexerState) error + +func (m MutatorFunc) Mutate(state *LexerState) error { return m(state) } // nolint + +// Mutators applies a set of Mutators in order. +func Mutators(modifiers ...Mutator) MutatorFunc { + return func(state *LexerState) error { + for _, modifier := range modifiers { + if err := modifier.Mutate(state); err != nil { + return err + } + } + return nil + } +} + +type includeMutator struct { + state string +} + +// Include the given state. +func Include(state string) Rule { + return Rule{Mutator: &includeMutator{state}} +} + +func (i *includeMutator) Mutate(s *LexerState) error { + return fmt.Errorf("should never reach here Include(%q)", i.state) +} + +func (i *includeMutator) MutateLexer(rules CompiledRules, state string, rule int) error { + includedRules, ok := rules[i.state] + if !ok { + return fmt.Errorf("invalid include state %q", i.state) + } + rules[state] = append(rules[state][:rule], append(includedRules, rules[state][rule+1:]...)...) + return nil +} + +type combinedMutator struct { + states []string +} + +// Combined creates a new anonymous state from the given states, and pushes that state. +func Combined(states ...string) Mutator { + return &combinedMutator{states} +} + +func (c *combinedMutator) Mutate(s *LexerState) error { + return fmt.Errorf("should never reach here Combined(%v)", c.states) +} + +func (c *combinedMutator) MutateLexer(rules CompiledRules, state string, rule int) error { + name := "__combined_" + strings.Join(c.states, "__") + if _, ok := rules[name]; !ok { + combined := []*CompiledRule{} + for _, state := range c.states { + rules, ok := rules[state] + if !ok { + return fmt.Errorf("invalid combine state %q", state) + } + combined = append(combined, rules...) + } + rules[name] = combined + } + rules[state][rule].Mutator = Push(name) + return nil +} + +// Push states onto the stack. +func Push(states ...string) MutatorFunc { + return func(s *LexerState) error { + if len(states) == 0 { + s.Stack = append(s.Stack, s.State) + } else { + for _, state := range states { + if state == "#pop" { + s.Stack = s.Stack[:len(s.Stack)-1] + } else { + s.Stack = append(s.Stack, state) + } + } + } + return nil + } +} + +// Pop state from the stack when rule matches. +func Pop(n int) MutatorFunc { + return func(state *LexerState) error { + if len(state.Stack) == 0 { + return fmt.Errorf("nothing to pop") + } + state.Stack = state.Stack[:len(state.Stack)-n] + return nil + } +} + +// Default returns a Rule that applies a set of Mutators. +func Default(mutators ...Mutator) Rule { + return Rule{Mutator: Mutators(mutators...)} +} + +// Stringify returns the raw string for a set of tokens. +func Stringify(tokens ...Token) string { + out := []string{} + for _, t := range tokens { + out = append(out, t.Value) + } + return strings.Join(out, "") +} diff --git a/vendor/github.com/alecthomas/chroma/pygments-lexers.txt b/vendor/github.com/alecthomas/chroma/pygments-lexers.txt new file mode 100644 index 000000000..bf4d9de17 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/pygments-lexers.txt @@ -0,0 +1,322 @@ +Generated with: + + g 'class.*RegexLexer' | pawk --strict -F: '"pygments.lexers.%s.%s" % (f[0].split(".")[0], f[2].split()[1].split("(")[0])' > lexers.txt + +kotlin: + invalid unicode escape sequences + FIXED: Have to disable wide Unicode characters in unistring.py + +pygments.lexers.ambient.AmbientTalkLexer +pygments.lexers.ampl.AmplLexer +pygments.lexers.actionscript.ActionScriptLexer +pygments.lexers.actionscript.ActionScript3Lexer +pygments.lexers.actionscript.MxmlLexer +pygments.lexers.algebra.GAPLexer +pygments.lexers.algebra.MathematicaLexer +pygments.lexers.algebra.MuPADLexer +pygments.lexers.algebra.BCLexer +pygments.lexers.apl.APLLexer +pygments.lexers.bibtex.BibTeXLexer +pygments.lexers.bibtex.BSTLexer +pygments.lexers.basic.BlitzMaxLexer +pygments.lexers.basic.BlitzBasicLexer +pygments.lexers.basic.MonkeyLexer +pygments.lexers.basic.CbmBasicV2Lexer +pygments.lexers.basic.QBasicLexer +pygments.lexers.automation.AutohotkeyLexer +pygments.lexers.automation.AutoItLexer +pygments.lexers.archetype.AtomsLexer +pygments.lexers.c_like.ClayLexer +pygments.lexers.c_like.ValaLexer +pygments.lexers.asm.GasLexer +pygments.lexers.asm.ObjdumpLexer +pygments.lexers.asm.HsailLexer +pygments.lexers.asm.LlvmLexer +pygments.lexers.asm.NasmLexer +pygments.lexers.asm.TasmLexer +pygments.lexers.asm.Ca65Lexer +pygments.lexers.business.CobolLexer +pygments.lexers.business.ABAPLexer +pygments.lexers.business.OpenEdgeLexer +pygments.lexers.business.GoodDataCLLexer +pygments.lexers.business.MaqlLexer +pygments.lexers.capnproto.CapnProtoLexer +pygments.lexers.chapel.ChapelLexer +pygments.lexers.clean.CleanLexer +pygments.lexers.c_cpp.CFamilyLexer +pygments.lexers.console.VCTreeStatusLexer +pygments.lexers.console.PyPyLogLexer +pygments.lexers.csound.CsoundLexer +pygments.lexers.csound.CsoundDocumentLexer +pygments.lexers.csound.CsoundDocumentLexer +pygments.lexers.crystal.CrystalLexer +pygments.lexers.dalvik.SmaliLexer +pygments.lexers.css.CssLexer +pygments.lexers.css.SassLexer +pygments.lexers.css.ScssLexer +pygments.lexers.configs.IniLexer +pygments.lexers.configs.RegeditLexer +pygments.lexers.configs.PropertiesLexer +pygments.lexers.configs.KconfigLexer +pygments.lexers.configs.Cfengine3Lexer +pygments.lexers.configs.ApacheConfLexer +pygments.lexers.configs.SquidConfLexer +pygments.lexers.configs.NginxConfLexer +pygments.lexers.configs.LighttpdConfLexer +pygments.lexers.configs.DockerLexer +pygments.lexers.configs.TerraformLexer +pygments.lexers.configs.TermcapLexer +pygments.lexers.configs.TerminfoLexer +pygments.lexers.configs.PkgConfigLexer +pygments.lexers.configs.PacmanConfLexer +pygments.lexers.data.YamlLexer +pygments.lexers.data.JsonLexer +pygments.lexers.diff.DiffLexer +pygments.lexers.diff.DarcsPatchLexer +pygments.lexers.diff.WDiffLexer +pygments.lexers.dotnet.CSharpLexer +pygments.lexers.dotnet.NemerleLexer +pygments.lexers.dotnet.BooLexer +pygments.lexers.dotnet.VbNetLexer +pygments.lexers.dotnet.GenericAspxLexer +pygments.lexers.dotnet.FSharpLexer +pygments.lexers.dylan.DylanLexer +pygments.lexers.dylan.DylanLidLexer +pygments.lexers.ecl.ECLLexer +pygments.lexers.eiffel.EiffelLexer +pygments.lexers.dsls.ProtoBufLexer +pygments.lexers.dsls.ThriftLexer +pygments.lexers.dsls.BroLexer +pygments.lexers.dsls.PuppetLexer +pygments.lexers.dsls.RslLexer +pygments.lexers.dsls.MscgenLexer +pygments.lexers.dsls.VGLLexer +pygments.lexers.dsls.AlloyLexer +pygments.lexers.dsls.PanLexer +pygments.lexers.dsls.CrmshLexer +pygments.lexers.dsls.FlatlineLexer +pygments.lexers.dsls.SnowballLexer +pygments.lexers.elm.ElmLexer +pygments.lexers.erlang.ErlangLexer +pygments.lexers.erlang.ElixirLexer +pygments.lexers.ezhil.EzhilLexer +pygments.lexers.esoteric.BrainfuckLexer +pygments.lexers.esoteric.BefungeLexer +pygments.lexers.esoteric.CAmkESLexer +pygments.lexers.esoteric.CapDLLexer +pygments.lexers.esoteric.RedcodeLexer +pygments.lexers.esoteric.AheuiLexer +pygments.lexers.factor.FactorLexer +pygments.lexers.fantom.FantomLexer +pygments.lexers.felix.FelixLexer +pygments.lexers.forth.ForthLexer +pygments.lexers.fortran.FortranLexer +pygments.lexers.fortran.FortranFixedLexer +pygments.lexers.go.GoLexer +pygments.lexers.foxpro.FoxProLexer +pygments.lexers.graph.CypherLexer +pygments.lexers.grammar_notation.BnfLexer +pygments.lexers.grammar_notation.AbnfLexer +pygments.lexers.grammar_notation.JsgfLexer +pygments.lexers.graphics.GLShaderLexer +pygments.lexers.graphics.PostScriptLexer +pygments.lexers.graphics.AsymptoteLexer +pygments.lexers.graphics.GnuplotLexer +pygments.lexers.graphics.PovrayLexer +pygments.lexers.hexdump.HexdumpLexer +pygments.lexers.haskell.HaskellLexer +pygments.lexers.haskell.IdrisLexer +pygments.lexers.haskell.AgdaLexer +pygments.lexers.haskell.CryptolLexer +pygments.lexers.haskell.KokaLexer +pygments.lexers.haxe.HaxeLexer +pygments.lexers.haxe.HxmlLexer +pygments.lexers.hdl.VerilogLexer +pygments.lexers.hdl.SystemVerilogLexer +pygments.lexers.hdl.VhdlLexer +pygments.lexers.idl.IDLLexer +pygments.lexers.inferno.LimboLexer +pygments.lexers.igor.IgorLexer +pygments.lexers.html.HtmlLexer +pygments.lexers.html.DtdLexer +pygments.lexers.html.XmlLexer +pygments.lexers.html.HamlLexer +pygments.lexers.html.ScamlLexer +pygments.lexers.html.PugLexer +pygments.lexers.installers.NSISLexer +pygments.lexers.installers.RPMSpecLexer +pygments.lexers.installers.SourcesListLexer +pygments.lexers.installers.DebianControlLexer +pygments.lexers.iolang.IoLexer +pygments.lexers.julia.JuliaLexer +pygments.lexers.int_fiction.Inform6Lexer +pygments.lexers.int_fiction.Inform7Lexer +pygments.lexers.int_fiction.Tads3Lexer +pygments.lexers.make.BaseMakefileLexer +pygments.lexers.make.CMakeLexer +pygments.lexers.javascript.JavascriptLexer +pygments.lexers.javascript.KalLexer +pygments.lexers.javascript.LiveScriptLexer +pygments.lexers.javascript.DartLexer +pygments.lexers.javascript.TypeScriptLexer +pygments.lexers.javascript.LassoLexer +pygments.lexers.javascript.ObjectiveJLexer +pygments.lexers.javascript.CoffeeScriptLexer +pygments.lexers.javascript.MaskLexer +pygments.lexers.javascript.EarlGreyLexer +pygments.lexers.javascript.JuttleLexer +pygments.lexers.jvm.JavaLexer +pygments.lexers.jvm.ScalaLexer +pygments.lexers.jvm.GosuLexer +pygments.lexers.jvm.GroovyLexer +pygments.lexers.jvm.IokeLexer +pygments.lexers.jvm.ClojureLexer +pygments.lexers.jvm.TeaLangLexer +pygments.lexers.jvm.CeylonLexer +pygments.lexers.jvm.KotlinLexer +pygments.lexers.jvm.XtendLexer +pygments.lexers.jvm.PigLexer +pygments.lexers.jvm.GoloLexer +pygments.lexers.jvm.JasminLexer +pygments.lexers.markup.BBCodeLexer +pygments.lexers.markup.MoinWikiLexer +pygments.lexers.markup.RstLexer +pygments.lexers.markup.TexLexer +pygments.lexers.markup.GroffLexer +pygments.lexers.markup.MozPreprocHashLexer +pygments.lexers.markup.MarkdownLexer +pygments.lexers.ml.SMLLexer +pygments.lexers.ml.OcamlLexer +pygments.lexers.ml.OpaLexer +pygments.lexers.modeling.ModelicaLexer +pygments.lexers.modeling.BugsLexer +pygments.lexers.modeling.JagsLexer +pygments.lexers.modeling.StanLexer +pygments.lexers.matlab.MatlabLexer +pygments.lexers.matlab.OctaveLexer +pygments.lexers.matlab.ScilabLexer +pygments.lexers.monte.MonteLexer +pygments.lexers.lisp.SchemeLexer +pygments.lexers.lisp.CommonLispLexer +pygments.lexers.lisp.HyLexer +pygments.lexers.lisp.RacketLexer +pygments.lexers.lisp.NewLispLexer +pygments.lexers.lisp.EmacsLispLexer +pygments.lexers.lisp.ShenLexer +pygments.lexers.lisp.XtlangLexer +pygments.lexers.modula2.Modula2Lexer +pygments.lexers.ncl.NCLLexer +pygments.lexers.nim.NimLexer +pygments.lexers.nit.NitLexer +pygments.lexers.nix.NixLexer +pygments.lexers.oberon.ComponentPascalLexer +pygments.lexers.ooc.OocLexer +pygments.lexers.objective.SwiftLexer +pygments.lexers.parasail.ParaSailLexer +pygments.lexers.pawn.SourcePawnLexer +pygments.lexers.pawn.PawnLexer +pygments.lexers.pascal.AdaLexer +pygments.lexers.parsers.RagelLexer +pygments.lexers.parsers.RagelEmbeddedLexer +pygments.lexers.parsers.AntlrLexer +pygments.lexers.parsers.TreetopBaseLexer +pygments.lexers.parsers.EbnfLexer +pygments.lexers.php.ZephirLexer +pygments.lexers.php.PhpLexer +pygments.lexers.perl.PerlLexer +pygments.lexers.perl.Perl6Lexer +pygments.lexers.praat.PraatLexer +pygments.lexers.prolog.PrologLexer +pygments.lexers.prolog.LogtalkLexer +pygments.lexers.qvt.QVToLexer +pygments.lexers.rdf.SparqlLexer +pygments.lexers.rdf.TurtleLexer +pygments.lexers.python.PythonLexer +pygments.lexers.python.Python3Lexer +pygments.lexers.python.PythonTracebackLexer +pygments.lexers.python.Python3TracebackLexer +pygments.lexers.python.CythonLexer +pygments.lexers.python.DgLexer +pygments.lexers.rebol.RebolLexer +pygments.lexers.rebol.RedLexer +pygments.lexers.resource.ResourceLexer +pygments.lexers.rnc.RNCCompactLexer +pygments.lexers.roboconf.RoboconfGraphLexer +pygments.lexers.roboconf.RoboconfInstancesLexer +pygments.lexers.rust.RustLexer +pygments.lexers.ruby.RubyLexer +pygments.lexers.ruby.FancyLexer +pygments.lexers.sas.SASLexer +pygments.lexers.smalltalk.SmalltalkLexer +pygments.lexers.smalltalk.NewspeakLexer +pygments.lexers.smv.NuSMVLexer +pygments.lexers.shell.BashLexer +pygments.lexers.shell.BatchLexer +pygments.lexers.shell.TcshLexer +pygments.lexers.shell.PowerShellLexer +pygments.lexers.shell.FishShellLexer +pygments.lexers.snobol.SnobolLexer +pygments.lexers.scripting.LuaLexer +pygments.lexers.scripting.ChaiscriptLexer +pygments.lexers.scripting.LSLLexer +pygments.lexers.scripting.AppleScriptLexer +pygments.lexers.scripting.RexxLexer +pygments.lexers.scripting.MOOCodeLexer +pygments.lexers.scripting.HybrisLexer +pygments.lexers.scripting.EasytrieveLexer +pygments.lexers.scripting.JclLexer +pygments.lexers.supercollider.SuperColliderLexer +pygments.lexers.stata.StataLexer +pygments.lexers.tcl.TclLexer +pygments.lexers.sql.PostgresLexer +pygments.lexers.sql.PlPgsqlLexer +pygments.lexers.sql.PsqlRegexLexer +pygments.lexers.sql.SqlLexer +pygments.lexers.sql.TransactSqlLexer +pygments.lexers.sql.MySqlLexer +pygments.lexers.sql.RqlLexer +pygments.lexers.testing.GherkinLexer +pygments.lexers.testing.TAPLexer +pygments.lexers.textedit.AwkLexer +pygments.lexers.textedit.VimLexer +pygments.lexers.textfmts.IrcLogsLexer +pygments.lexers.textfmts.GettextLexer +pygments.lexers.textfmts.HttpLexer +pygments.lexers.textfmts.TodotxtLexer +pygments.lexers.trafficscript.RtsLexer +pygments.lexers.theorem.CoqLexer +pygments.lexers.theorem.IsabelleLexer +pygments.lexers.theorem.LeanLexer +pygments.lexers.templates.SmartyLexer +pygments.lexers.templates.VelocityLexer +pygments.lexers.templates.DjangoLexer +pygments.lexers.templates.MyghtyLexer +pygments.lexers.templates.MasonLexer +pygments.lexers.templates.MakoLexer +pygments.lexers.templates.CheetahLexer +pygments.lexers.templates.GenshiTextLexer +pygments.lexers.templates.GenshiMarkupLexer +pygments.lexers.templates.JspRootLexer +pygments.lexers.templates.EvoqueLexer +pygments.lexers.templates.ColdfusionLexer +pygments.lexers.templates.ColdfusionMarkupLexer +pygments.lexers.templates.TeaTemplateRootLexer +pygments.lexers.templates.HandlebarsLexer +pygments.lexers.templates.LiquidLexer +pygments.lexers.templates.TwigLexer +pygments.lexers.templates.Angular2Lexer +pygments.lexers.urbi.UrbiscriptLexer +pygments.lexers.typoscript.TypoScriptCssDataLexer +pygments.lexers.typoscript.TypoScriptHtmlDataLexer +pygments.lexers.typoscript.TypoScriptLexer +pygments.lexers.varnish.VCLLexer +pygments.lexers.verification.BoogieLexer +pygments.lexers.verification.SilverLexer +pygments.lexers.x10.X10Lexer +pygments.lexers.whiley.WhileyLexer +pygments.lexers.xorg.XorgLexer +pygments.lexers.webmisc.DuelLexer +pygments.lexers.webmisc.XQueryLexer +pygments.lexers.webmisc.QmlLexer +pygments.lexers.webmisc.CirruLexer +pygments.lexers.webmisc.SlimLexer diff --git a/vendor/github.com/alecthomas/chroma/regexp.go b/vendor/github.com/alecthomas/chroma/regexp.go new file mode 100644 index 000000000..4096dfc46 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/regexp.go @@ -0,0 +1,572 @@ +package chroma + +import ( + "fmt" + "os" + "path/filepath" + "regexp" + "sort" + "strings" + "sync" + "time" + "unicode/utf8" + + "github.com/dlclark/regexp2" +) + +// A Rule is the fundamental matching unit of the Regex lexer state machine. +type Rule struct { + Pattern string + Type Emitter + Mutator Mutator +} + +// An Emitter takes group matches and returns tokens. +type Emitter interface { + // Emit tokens for the given regex groups. + Emit(groups []string, state *LexerState) Iterator +} + +// EmitterFunc is a function that is an Emitter. +type EmitterFunc func(groups []string, state *LexerState) Iterator + +// Emit tokens for groups. +func (e EmitterFunc) Emit(groups []string, state *LexerState) Iterator { + return e(groups, state) +} + +// ByGroups emits a token for each matching group in the rule's regex. +func ByGroups(emitters ...Emitter) Emitter { + return EmitterFunc(func(groups []string, state *LexerState) Iterator { + iterators := make([]Iterator, 0, len(groups)-1) + if len(emitters) != len(groups)-1 { + iterators = append(iterators, Error.Emit(groups, state)) + // panic(errors.Errorf("number of groups %q does not match number of emitters %v", groups, emitters)) + } else { + for i, group := range groups[1:] { + if emitters[i] != nil { + iterators = append(iterators, emitters[i].Emit([]string{group}, state)) + } + } + } + return Concaterator(iterators...) + }) +} + +// ByGroupNames emits a token for each named matching group in the rule's regex. +func ByGroupNames(emitters map[string]Emitter) Emitter { + return EmitterFunc(func(groups []string, state *LexerState) Iterator { + iterators := make([]Iterator, 0, len(state.NamedGroups)-1) + if len(state.NamedGroups)-1 == 0 { + if emitter, ok := emitters[`0`]; ok { + iterators = append(iterators, emitter.Emit(groups, state)) + } else { + iterators = append(iterators, Error.Emit(groups, state)) + } + } else { + ruleRegex := state.Rules[state.State][state.Rule].Regexp + for i := 1; i < len(state.NamedGroups); i++ { + groupName := ruleRegex.GroupNameFromNumber(i) + group := state.NamedGroups[groupName] + if emitter, ok := emitters[groupName]; ok { + if emitter != nil { + iterators = append(iterators, emitter.Emit([]string{group}, state)) + } + } else { + iterators = append(iterators, Error.Emit([]string{group}, state)) + } + } + } + return Concaterator(iterators...) + }) +} + +// UsingByGroup emits tokens for the matched groups in the regex using a +// "sublexer". Used when lexing code blocks where the name of a sublexer is +// contained within the block, for example on a Markdown text block or SQL +// language block. +// +// The sublexer will be retrieved using sublexerGetFunc (typically +// internal.Get), using the captured value from the matched sublexerNameGroup. +// +// If sublexerGetFunc returns a non-nil lexer for the captured sublexerNameGroup, +// then tokens for the matched codeGroup will be emitted using the retrieved +// lexer. Otherwise, if the sublexer is nil, then tokens will be emitted from +// the passed emitter. +// +// Example: +// +// var Markdown = internal.Register(MustNewLexer( +// &Config{ +// Name: "markdown", +// Aliases: []string{"md", "mkd"}, +// Filenames: []string{"*.md", "*.mkd", "*.markdown"}, +// MimeTypes: []string{"text/x-markdown"}, +// }, +// Rules{ +// "root": { +// {"^(```)(\\w+)(\\n)([\\w\\W]*?)(^```$)", +// UsingByGroup( +// internal.Get, +// 2, 4, +// String, String, String, Text, String, +// ), +// nil, +// }, +// }, +// }, +// )) +// +// See the lexers/m/markdown.go for the complete example. +// +// Note: panic's if the number emitters does not equal the number of matched +// groups in the regex. +func UsingByGroup(sublexerGetFunc func(string) Lexer, sublexerNameGroup, codeGroup int, emitters ...Emitter) Emitter { + return EmitterFunc(func(groups []string, state *LexerState) Iterator { + // bounds check + if len(emitters) != len(groups)-1 { + panic("UsingByGroup expects number of emitters to be the same as len(groups)-1") + } + + // grab sublexer + sublexer := sublexerGetFunc(groups[sublexerNameGroup]) + + // build iterators + iterators := make([]Iterator, len(groups)-1) + for i, group := range groups[1:] { + if i == codeGroup-1 && sublexer != nil { + var err error + iterators[i], err = sublexer.Tokenise(nil, groups[codeGroup]) + if err != nil { + panic(err) + } + } else if emitters[i] != nil { + iterators[i] = emitters[i].Emit([]string{group}, state) + } + } + + return Concaterator(iterators...) + }) +} + +// Using returns an Emitter that uses a given Lexer for parsing and emitting. +func Using(lexer Lexer) Emitter { + return EmitterFunc(func(groups []string, _ *LexerState) Iterator { + it, err := lexer.Tokenise(&TokeniseOptions{State: "root", Nested: true}, groups[0]) + if err != nil { + panic(err) + } + return it + }) +} + +// UsingSelf is like Using, but uses the current Lexer. +func UsingSelf(stateName string) Emitter { + return EmitterFunc(func(groups []string, state *LexerState) Iterator { + it, err := state.Lexer.Tokenise(&TokeniseOptions{State: stateName, Nested: true}, groups[0]) + if err != nil { + panic(err) + } + return it + }) +} + +// Words creates a regex that matches any of the given literal words. +func Words(prefix, suffix string, words ...string) string { + sort.Slice(words, func(i, j int) bool { + return len(words[j]) < len(words[i]) + }) + for i, word := range words { + words[i] = regexp.QuoteMeta(word) + } + return prefix + `(` + strings.Join(words, `|`) + `)` + suffix +} + +// Tokenise text using lexer, returning tokens as a slice. +func Tokenise(lexer Lexer, options *TokeniseOptions, text string) ([]Token, error) { + var out []Token + it, err := lexer.Tokenise(options, text) + if err != nil { + return nil, err + } + for t := it(); t != EOF; t = it() { + out = append(out, t) + } + return out, nil +} + +// Rules maps from state to a sequence of Rules. +type Rules map[string][]Rule + +// Rename clones rules then a rule. +func (r Rules) Rename(oldRule, newRule string) Rules { + r = r.Clone() + r[newRule] = r[oldRule] + delete(r, oldRule) + return r +} + +// Clone returns a clone of the Rules. +func (r Rules) Clone() Rules { + out := map[string][]Rule{} + for key, rules := range r { + out[key] = make([]Rule, len(rules)) + copy(out[key], rules) + } + return out +} + +// Merge creates a clone of "r" then merges "rules" into the clone. +func (r Rules) Merge(rules Rules) Rules { + out := r.Clone() + for k, v := range rules.Clone() { + out[k] = v + } + return out +} + +// MustNewLazyLexer creates a new Lexer with deferred rules generation or panics. +func MustNewLazyLexer(config *Config, rulesFunc func() Rules) *RegexLexer { + lexer, err := NewLazyLexer(config, rulesFunc) + if err != nil { + panic(err) + } + return lexer +} + +// NewLazyLexer creates a new regex-based Lexer with deferred rules generation. +func NewLazyLexer(config *Config, rulesFunc func() Rules) (*RegexLexer, error) { + if config == nil { + config = &Config{} + } + for _, glob := range append(config.Filenames, config.AliasFilenames...) { + _, err := filepath.Match(glob, "") + if err != nil { + return nil, fmt.Errorf("%s: %q is not a valid glob: %w", config.Name, glob, err) + } + } + return &RegexLexer{ + config: config, + compilerFunc: rulesFunc, + }, nil +} + +// MustNewLexer creates a new Lexer or panics. +// +// Deprecated: Use MustNewLazyLexer instead. +func MustNewLexer(config *Config, rules Rules) *RegexLexer { // nolint: forbidigo + lexer, err := NewLexer(config, rules) // nolint: forbidigo + if err != nil { + panic(err) + } + return lexer +} + +// NewLexer creates a new regex-based Lexer. +// +// "rules" is a state machine transitition map. Each key is a state. Values are sets of rules +// that match input, optionally modify lexer state, and output tokens. +// +// Deprecated: Use NewLazyLexer instead. +func NewLexer(config *Config, rules Rules) (*RegexLexer, error) { // nolint: forbidigo + return NewLazyLexer(config, func() Rules { return rules }) +} + +// Trace enables debug tracing. +func (r *RegexLexer) Trace(trace bool) *RegexLexer { + r.trace = trace + return r +} + +// A CompiledRule is a Rule with a pre-compiled regex. +// +// Note that regular expressions are lazily compiled on first use of the lexer. +type CompiledRule struct { + Rule + Regexp *regexp2.Regexp + flags string +} + +// CompiledRules is a map of rule name to sequence of compiled rules in that rule. +type CompiledRules map[string][]*CompiledRule + +// LexerState contains the state for a single lex. +type LexerState struct { + Lexer *RegexLexer + Text []rune + Pos int + Rules CompiledRules + Stack []string + State string + Rule int + // Group matches. + Groups []string + // Named Group matches. + NamedGroups map[string]string + // Custum context for mutators. + MutatorContext map[interface{}]interface{} + iteratorStack []Iterator + options *TokeniseOptions + newlineAdded bool +} + +// Set mutator context. +func (l *LexerState) Set(key interface{}, value interface{}) { + l.MutatorContext[key] = value +} + +// Get mutator context. +func (l *LexerState) Get(key interface{}) interface{} { + return l.MutatorContext[key] +} + +// Iterator returns the next Token from the lexer. +func (l *LexerState) Iterator() Token { // nolint: gocognit + end := len(l.Text) + if l.newlineAdded { + end-- + } + for l.Pos < end && len(l.Stack) > 0 { + // Exhaust the iterator stack, if any. + for len(l.iteratorStack) > 0 { + n := len(l.iteratorStack) - 1 + t := l.iteratorStack[n]() + if t == EOF { + l.iteratorStack = l.iteratorStack[:n] + continue + } + return t + } + + l.State = l.Stack[len(l.Stack)-1] + if l.Lexer.trace { + fmt.Fprintf(os.Stderr, "%s: pos=%d, text=%q\n", l.State, l.Pos, string(l.Text[l.Pos:])) + } + selectedRule, ok := l.Rules[l.State] + if !ok { + panic("unknown state " + l.State) + } + ruleIndex, rule, groups, namedGroups := matchRules(l.Text, l.Pos, selectedRule) + // No match. + if groups == nil { + // From Pygments :\ + // + // If the RegexLexer encounters a newline that is flagged as an error token, the stack is + // emptied and the lexer continues scanning in the 'root' state. This can help producing + // error-tolerant highlighting for erroneous input, e.g. when a single-line string is not + // closed. + if l.Text[l.Pos] == '\n' && l.State != l.options.State { + l.Stack = []string{l.options.State} + continue + } + l.Pos++ + return Token{Error, string(l.Text[l.Pos-1 : l.Pos])} + } + l.Rule = ruleIndex + l.Groups = groups + l.NamedGroups = namedGroups + l.Pos += utf8.RuneCountInString(groups[0]) + if rule.Mutator != nil { + if err := rule.Mutator.Mutate(l); err != nil { + panic(err) + } + } + if rule.Type != nil { + l.iteratorStack = append(l.iteratorStack, rule.Type.Emit(l.Groups, l)) + } + } + // Exhaust the IteratorStack, if any. + // Duplicate code, but eh. + for len(l.iteratorStack) > 0 { + n := len(l.iteratorStack) - 1 + t := l.iteratorStack[n]() + if t == EOF { + l.iteratorStack = l.iteratorStack[:n] + continue + } + return t + } + + // If we get to here and we still have text, return it as an error. + if l.Pos != len(l.Text) && len(l.Stack) == 0 { + value := string(l.Text[l.Pos:]) + l.Pos = len(l.Text) + return Token{Type: Error, Value: value} + } + return EOF +} + +// RegexLexer is the default lexer implementation used in Chroma. +type RegexLexer struct { + config *Config + analyser func(text string) float32 + trace bool + + mu sync.Mutex + compiled bool + rules map[string][]*CompiledRule + compilerFunc func() Rules + compileOnce sync.Once +} + +// SetAnalyser sets the analyser function used to perform content inspection. +func (r *RegexLexer) SetAnalyser(analyser func(text string) float32) *RegexLexer { + r.analyser = analyser + return r +} + +func (r *RegexLexer) AnalyseText(text string) float32 { // nolint + if r.analyser != nil { + return r.analyser(text) + } + return 0.0 +} + +func (r *RegexLexer) Config() *Config { // nolint + return r.config +} + +// Regex compilation is deferred until the lexer is used. This is to avoid significant init() time costs. +func (r *RegexLexer) maybeCompile() (err error) { + r.mu.Lock() + defer r.mu.Unlock() + if r.compiled { + return nil + } + for state, rules := range r.rules { + for i, rule := range rules { + if rule.Regexp == nil { + pattern := "(?:" + rule.Pattern + ")" + if rule.flags != "" { + pattern = "(?" + rule.flags + ")" + pattern + } + pattern = `\G` + pattern + rule.Regexp, err = regexp2.Compile(pattern, regexp2.RE2) + if err != nil { + return fmt.Errorf("failed to compile rule %s.%d: %s", state, i, err) + } + rule.Regexp.MatchTimeout = time.Millisecond * 250 + } + } + } +restart: + seen := map[LexerMutator]bool{} + for state := range r.rules { + for i := 0; i < len(r.rules[state]); i++ { + rule := r.rules[state][i] + if compile, ok := rule.Mutator.(LexerMutator); ok { + if seen[compile] { + return fmt.Errorf("saw mutator %T twice; this should not happen", compile) + } + seen[compile] = true + if err := compile.MutateLexer(r.rules, state, i); err != nil { + return err + } + // Process the rules again in case the mutator added/removed rules. + // + // This sounds bad, but shouldn't be significant in practice. + goto restart + } + } + } + r.compiled = true + return nil +} + +func (r *RegexLexer) compileRules() error { + rules := r.compilerFunc() + if _, ok := rules["root"]; !ok { + return fmt.Errorf("no \"root\" state") + } + compiledRules := map[string][]*CompiledRule{} + for state, rules := range rules { + compiledRules[state] = nil + for _, rule := range rules { + flags := "" + if !r.config.NotMultiline { + flags += "m" + } + if r.config.CaseInsensitive { + flags += "i" + } + if r.config.DotAll { + flags += "s" + } + compiledRules[state] = append(compiledRules[state], &CompiledRule{Rule: rule, flags: flags}) + } + } + + r.rules = compiledRules + return nil +} + +func (r *RegexLexer) Tokenise(options *TokeniseOptions, text string) (Iterator, error) { // nolint + var err error + if r.compilerFunc != nil { + r.compileOnce.Do(func() { + err = r.compileRules() + }) + } + if err != nil { + return nil, err + } + if err := r.maybeCompile(); err != nil { + return nil, err + } + if options == nil { + options = defaultOptions + } + if options.EnsureLF { + text = ensureLF(text) + } + newlineAdded := false + if !options.Nested && r.config.EnsureNL && !strings.HasSuffix(text, "\n") { + text += "\n" + newlineAdded = true + } + state := &LexerState{ + newlineAdded: newlineAdded, + options: options, + Lexer: r, + Text: []rune(text), + Stack: []string{options.State}, + Rules: r.rules, + MutatorContext: map[interface{}]interface{}{}, + } + return state.Iterator, nil +} + +func matchRules(text []rune, pos int, rules []*CompiledRule) (int, *CompiledRule, []string, map[string]string) { + for i, rule := range rules { + match, err := rule.Regexp.FindRunesMatchStartingAt(text, pos) + if match != nil && err == nil && match.Index == pos { + groups := []string{} + namedGroups := make(map[string]string) + for _, g := range match.Groups() { + namedGroups[g.Name] = g.String() + groups = append(groups, g.String()) + } + return i, rule, groups, namedGroups + } + } + return 0, &CompiledRule{}, nil, nil +} + +// replace \r and \r\n with \n +// same as strings.ReplaceAll but more efficient +func ensureLF(text string) string { + buf := make([]byte, len(text)) + var j int + for i := 0; i < len(text); i++ { + c := text[i] + if c == '\r' { + if i < len(text)-1 && text[i+1] == '\n' { + continue + } + c = '\n' + } + buf[j] = c + j++ + } + return string(buf[:j]) +} diff --git a/vendor/github.com/alecthomas/chroma/remap.go b/vendor/github.com/alecthomas/chroma/remap.go new file mode 100644 index 000000000..cfb5c3814 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/remap.go @@ -0,0 +1,80 @@ +package chroma + +type remappingLexer struct { + lexer Lexer + mapper func(Token) []Token +} + +// RemappingLexer remaps a token to a set of, potentially empty, tokens. +func RemappingLexer(lexer Lexer, mapper func(Token) []Token) Lexer { + return &remappingLexer{lexer, mapper} +} + +func (r *remappingLexer) Config() *Config { + return r.lexer.Config() +} + +func (r *remappingLexer) Tokenise(options *TokeniseOptions, text string) (Iterator, error) { + it, err := r.lexer.Tokenise(options, text) + if err != nil { + return nil, err + } + var buffer []Token + return func() Token { + for { + if len(buffer) > 0 { + t := buffer[0] + buffer = buffer[1:] + return t + } + t := it() + if t == EOF { + return t + } + buffer = r.mapper(t) + } + }, nil +} + +// TypeMapping defines type maps for the TypeRemappingLexer. +type TypeMapping []struct { + From, To TokenType + Words []string +} + +// TypeRemappingLexer remaps types of tokens coming from a parent Lexer. +// +// eg. Map "defvaralias" tokens of type NameVariable to NameFunction: +// +// mapping := TypeMapping{ +// {NameVariable, NameFunction, []string{"defvaralias"}, +// } +// lexer = TypeRemappingLexer(lexer, mapping) +func TypeRemappingLexer(lexer Lexer, mapping TypeMapping) Lexer { + // Lookup table for fast remapping. + lut := map[TokenType]map[string]TokenType{} + for _, rt := range mapping { + km, ok := lut[rt.From] + if !ok { + km = map[string]TokenType{} + lut[rt.From] = km + } + if len(rt.Words) == 0 { + km[""] = rt.To + } else { + for _, k := range rt.Words { + km[k] = rt.To + } + } + } + return RemappingLexer(lexer, func(t Token) []Token { + if k, ok := lut[t.Type]; ok { + if tt, ok := k[t.Value]; ok { + t.Type = tt + } else if tt, ok := k[""]; ok { + t.Type = tt + } + } + return []Token{t} + }) +} diff --git a/vendor/github.com/alecthomas/chroma/style.go b/vendor/github.com/alecthomas/chroma/style.go new file mode 100644 index 000000000..1319fc424 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/style.go @@ -0,0 +1,344 @@ +package chroma + +import ( + "fmt" + "strings" +) + +// Trilean value for StyleEntry value inheritance. +type Trilean uint8 + +// Trilean states. +const ( + Pass Trilean = iota + Yes + No +) + +func (t Trilean) String() string { + switch t { + case Yes: + return "Yes" + case No: + return "No" + default: + return "Pass" + } +} + +// Prefix returns s with "no" as a prefix if Trilean is no. +func (t Trilean) Prefix(s string) string { + if t == Yes { + return s + } else if t == No { + return "no" + s + } + return "" +} + +// A StyleEntry in the Style map. +type StyleEntry struct { + // Hex colours. + Colour Colour + Background Colour + Border Colour + + Bold Trilean + Italic Trilean + Underline Trilean + NoInherit bool +} + +func (s StyleEntry) String() string { + out := []string{} + if s.Bold != Pass { + out = append(out, s.Bold.Prefix("bold")) + } + if s.Italic != Pass { + out = append(out, s.Italic.Prefix("italic")) + } + if s.Underline != Pass { + out = append(out, s.Underline.Prefix("underline")) + } + if s.NoInherit { + out = append(out, "noinherit") + } + if s.Colour.IsSet() { + out = append(out, s.Colour.String()) + } + if s.Background.IsSet() { + out = append(out, "bg:"+s.Background.String()) + } + if s.Border.IsSet() { + out = append(out, "border:"+s.Border.String()) + } + return strings.Join(out, " ") +} + +// Sub subtracts e from s where elements match. +func (s StyleEntry) Sub(e StyleEntry) StyleEntry { + out := StyleEntry{} + if e.Colour != s.Colour { + out.Colour = s.Colour + } + if e.Background != s.Background { + out.Background = s.Background + } + if e.Bold != s.Bold { + out.Bold = s.Bold + } + if e.Italic != s.Italic { + out.Italic = s.Italic + } + if e.Underline != s.Underline { + out.Underline = s.Underline + } + if e.Border != s.Border { + out.Border = s.Border + } + return out +} + +// Inherit styles from ancestors. +// +// Ancestors should be provided from oldest to newest. +func (s StyleEntry) Inherit(ancestors ...StyleEntry) StyleEntry { + out := s + for i := len(ancestors) - 1; i >= 0; i-- { + if out.NoInherit { + return out + } + ancestor := ancestors[i] + if !out.Colour.IsSet() { + out.Colour = ancestor.Colour + } + if !out.Background.IsSet() { + out.Background = ancestor.Background + } + if !out.Border.IsSet() { + out.Border = ancestor.Border + } + if out.Bold == Pass { + out.Bold = ancestor.Bold + } + if out.Italic == Pass { + out.Italic = ancestor.Italic + } + if out.Underline == Pass { + out.Underline = ancestor.Underline + } + } + return out +} + +func (s StyleEntry) IsZero() bool { + return s.Colour == 0 && s.Background == 0 && s.Border == 0 && s.Bold == Pass && s.Italic == Pass && + s.Underline == Pass && !s.NoInherit +} + +// A StyleBuilder is a mutable structure for building styles. +// +// Once built, a Style is immutable. +type StyleBuilder struct { + entries map[TokenType]string + name string + parent *Style +} + +func NewStyleBuilder(name string) *StyleBuilder { + return &StyleBuilder{name: name, entries: map[TokenType]string{}} +} + +func (s *StyleBuilder) AddAll(entries StyleEntries) *StyleBuilder { + for ttype, entry := range entries { + s.entries[ttype] = entry + } + return s +} + +func (s *StyleBuilder) Get(ttype TokenType) StyleEntry { + // This is less than ideal, but it's the price for having to check errors on each Add(). + entry, _ := ParseStyleEntry(s.entries[ttype]) + return entry.Inherit(s.parent.Get(ttype)) +} + +// Add an entry to the Style map. +// +// See http://pygments.org/docs/styles/#style-rules for details. +func (s *StyleBuilder) Add(ttype TokenType, entry string) *StyleBuilder { // nolint: gocyclo + s.entries[ttype] = entry + return s +} + +func (s *StyleBuilder) AddEntry(ttype TokenType, entry StyleEntry) *StyleBuilder { + s.entries[ttype] = entry.String() + return s +} + +func (s *StyleBuilder) Build() (*Style, error) { + style := &Style{ + Name: s.name, + entries: map[TokenType]StyleEntry{}, + parent: s.parent, + } + for ttype, descriptor := range s.entries { + entry, err := ParseStyleEntry(descriptor) + if err != nil { + return nil, fmt.Errorf("invalid entry for %s: %s", ttype, err) + } + style.entries[ttype] = entry + } + return style, nil +} + +// StyleEntries mapping TokenType to colour definition. +type StyleEntries map[TokenType]string + +// NewStyle creates a new style definition. +func NewStyle(name string, entries StyleEntries) (*Style, error) { + return NewStyleBuilder(name).AddAll(entries).Build() +} + +// MustNewStyle creates a new style or panics. +func MustNewStyle(name string, entries StyleEntries) *Style { + style, err := NewStyle(name, entries) + if err != nil { + panic(err) + } + return style +} + +// A Style definition. +// +// See http://pygments.org/docs/styles/ for details. Semantics are intended to be identical. +type Style struct { + Name string + entries map[TokenType]StyleEntry + parent *Style +} + +// Types that are styled. +func (s *Style) Types() []TokenType { + dedupe := map[TokenType]bool{} + for tt := range s.entries { + dedupe[tt] = true + } + if s.parent != nil { + for _, tt := range s.parent.Types() { + dedupe[tt] = true + } + } + out := make([]TokenType, 0, len(dedupe)) + for tt := range dedupe { + out = append(out, tt) + } + return out +} + +// Builder creates a mutable builder from this Style. +// +// The builder can then be safely modified. This is a cheap operation. +func (s *Style) Builder() *StyleBuilder { + return &StyleBuilder{ + name: s.Name, + entries: map[TokenType]string{}, + parent: s, + } +} + +// Has checks if an exact style entry match exists for a token type. +// +// This is distinct from Get() which will merge parent tokens. +func (s *Style) Has(ttype TokenType) bool { + return !s.get(ttype).IsZero() || s.synthesisable(ttype) +} + +// Get a style entry. Will try sub-category or category if an exact match is not found, and +// finally return the Background. +func (s *Style) Get(ttype TokenType) StyleEntry { + return s.get(ttype).Inherit( + s.get(Background), + s.get(Text), + s.get(ttype.Category()), + s.get(ttype.SubCategory())) +} + +func (s *Style) get(ttype TokenType) StyleEntry { + out := s.entries[ttype] + if out.IsZero() && s.parent != nil { + return s.parent.get(ttype) + } + if out.IsZero() && s.synthesisable(ttype) { + out = s.synthesise(ttype) + } + return out +} + +func (s *Style) synthesise(ttype TokenType) StyleEntry { + bg := s.get(Background) + text := StyleEntry{Colour: bg.Colour} + text.Colour = text.Colour.BrightenOrDarken(0.5) + + switch ttype { + // If we don't have a line highlight colour, make one that is 10% brighter/darker than the background. + case LineHighlight: + return StyleEntry{Background: bg.Background.BrightenOrDarken(0.1)} + + // If we don't have line numbers, use the text colour but 20% brighter/darker + case LineNumbers, LineNumbersTable: + return text + + default: + return StyleEntry{} + } +} + +func (s *Style) synthesisable(ttype TokenType) bool { + return ttype == LineHighlight || ttype == LineNumbers || ttype == LineNumbersTable +} + +// ParseStyleEntry parses a Pygments style entry. +func ParseStyleEntry(entry string) (StyleEntry, error) { // nolint: gocyclo + out := StyleEntry{} + parts := strings.Fields(entry) + for _, part := range parts { + switch { + case part == "italic": + out.Italic = Yes + case part == "noitalic": + out.Italic = No + case part == "bold": + out.Bold = Yes + case part == "nobold": + out.Bold = No + case part == "underline": + out.Underline = Yes + case part == "nounderline": + out.Underline = No + case part == "inherit": + out.NoInherit = false + case part == "noinherit": + out.NoInherit = true + case part == "bg:": + out.Background = 0 + case strings.HasPrefix(part, "bg:#"): + out.Background = ParseColour(part[3:]) + if !out.Background.IsSet() { + return StyleEntry{}, fmt.Errorf("invalid background colour %q", part) + } + case strings.HasPrefix(part, "border:#"): + out.Border = ParseColour(part[7:]) + if !out.Border.IsSet() { + return StyleEntry{}, fmt.Errorf("invalid border colour %q", part) + } + case strings.HasPrefix(part, "#"): + out.Colour = ParseColour(part) + if !out.Colour.IsSet() { + return StyleEntry{}, fmt.Errorf("invalid colour %q", part) + } + default: + return StyleEntry{}, fmt.Errorf("unknown style element %q", part) + } + } + return out, nil +} diff --git a/vendor/github.com/alecthomas/chroma/styles/abap.go b/vendor/github.com/alecthomas/chroma/styles/abap.go new file mode 100644 index 000000000..b6d07fb27 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/styles/abap.go @@ -0,0 +1,18 @@ +package styles + +import ( + "github.com/alecthomas/chroma" +) + +// Abap style. +var Abap = Register(chroma.MustNewStyle("abap", chroma.StyleEntries{ + chroma.Comment: "italic #888", + chroma.CommentSpecial: "#888", + chroma.Keyword: "#00f", + chroma.OperatorWord: "#00f", + chroma.Name: "#000", + chroma.LiteralNumber: "#3af", + chroma.LiteralString: "#5a2", + chroma.Error: "#F00", + chroma.Background: " bg:#ffffff", +})) diff --git a/vendor/github.com/alecthomas/chroma/styles/algol.go b/vendor/github.com/alecthomas/chroma/styles/algol.go new file mode 100644 index 000000000..1e8a7b442 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/styles/algol.go @@ -0,0 +1,25 @@ +package styles + +import ( + "github.com/alecthomas/chroma" +) + +// Algol style. +var Algol = Register(chroma.MustNewStyle("algol", chroma.StyleEntries{ + chroma.Comment: "italic #888", + chroma.CommentPreproc: "bold noitalic #888", + chroma.CommentSpecial: "bold noitalic #888", + chroma.Keyword: "underline bold", + chroma.KeywordDeclaration: "italic", + chroma.NameBuiltin: "bold italic", + chroma.NameBuiltinPseudo: "bold italic", + chroma.NameNamespace: "bold italic #666", + chroma.NameClass: "bold italic #666", + chroma.NameFunction: "bold italic #666", + chroma.NameVariable: "bold italic #666", + chroma.NameConstant: "bold italic #666", + chroma.OperatorWord: "bold", + chroma.LiteralString: "italic #666", + chroma.Error: "border:#FF0000", + chroma.Background: " bg:#ffffff", +})) diff --git a/vendor/github.com/alecthomas/chroma/styles/algol_nu.go b/vendor/github.com/alecthomas/chroma/styles/algol_nu.go new file mode 100644 index 000000000..f8c6f177e --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/styles/algol_nu.go @@ -0,0 +1,25 @@ +package styles + +import ( + "github.com/alecthomas/chroma" +) + +// AlgolNu style. +var AlgolNu = Register(chroma.MustNewStyle("algol_nu", chroma.StyleEntries{ + chroma.Comment: "italic #888", + chroma.CommentPreproc: "bold noitalic #888", + chroma.CommentSpecial: "bold noitalic #888", + chroma.Keyword: "bold", + chroma.KeywordDeclaration: "italic", + chroma.NameBuiltin: "bold italic", + chroma.NameBuiltinPseudo: "bold italic", + chroma.NameNamespace: "bold italic #666", + chroma.NameClass: "bold italic #666", + chroma.NameFunction: "bold italic #666", + chroma.NameVariable: "bold italic #666", + chroma.NameConstant: "bold italic #666", + chroma.OperatorWord: "bold", + chroma.LiteralString: "italic #666", + chroma.Error: "border:#FF0000", + chroma.Background: " bg:#ffffff", +})) diff --git a/vendor/github.com/alecthomas/chroma/styles/api.go b/vendor/github.com/alecthomas/chroma/styles/api.go new file mode 100644 index 000000000..f3ce67398 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/styles/api.go @@ -0,0 +1,37 @@ +package styles + +import ( + "sort" + + "github.com/alecthomas/chroma" +) + +// Registry of Styles. +var Registry = map[string]*chroma.Style{} + +// Fallback style. Reassign to change the default fallback style. +var Fallback = SwapOff + +// Register a chroma.Style. +func Register(style *chroma.Style) *chroma.Style { + Registry[style.Name] = style + return style +} + +// Names of all available styles. +func Names() []string { + out := []string{} + for name := range Registry { + out = append(out, name) + } + sort.Strings(out) + return out +} + +// Get named style, or Fallback. +func Get(name string) *chroma.Style { + if style, ok := Registry[name]; ok { + return style + } + return Fallback +} diff --git a/vendor/github.com/alecthomas/chroma/styles/arduino.go b/vendor/github.com/alecthomas/chroma/styles/arduino.go new file mode 100644 index 000000000..9e48fb4ae --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/styles/arduino.go @@ -0,0 +1,25 @@ +package styles + +import ( + "github.com/alecthomas/chroma" +) + +// Arduino style. +var Arduino = Register(chroma.MustNewStyle("arduino", chroma.StyleEntries{ + chroma.Error: "#a61717", + chroma.Comment: "#95a5a6", + chroma.CommentPreproc: "#728E00", + chroma.Keyword: "#728E00", + chroma.KeywordConstant: "#00979D", + chroma.KeywordPseudo: "#00979D", + chroma.KeywordReserved: "#00979D", + chroma.KeywordType: "#00979D", + chroma.Operator: "#728E00", + chroma.Name: "#434f54", + chroma.NameBuiltin: "#728E00", + chroma.NameFunction: "#D35400", + chroma.NameOther: "#728E00", + chroma.LiteralNumber: "#8A7B52", + chroma.LiteralString: "#7F8C8D", + chroma.Background: " bg:#ffffff", +})) diff --git a/vendor/github.com/alecthomas/chroma/styles/autumn.go b/vendor/github.com/alecthomas/chroma/styles/autumn.go new file mode 100644 index 000000000..3966372b9 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/styles/autumn.go @@ -0,0 +1,43 @@ +package styles + +import ( + "github.com/alecthomas/chroma" +) + +// Autumn style. +var Autumn = Register(chroma.MustNewStyle("autumn", chroma.StyleEntries{ + chroma.TextWhitespace: "#bbbbbb", + chroma.Comment: "italic #aaaaaa", + chroma.CommentPreproc: "noitalic #4c8317", + chroma.CommentSpecial: "italic #0000aa", + chroma.Keyword: "#0000aa", + chroma.KeywordType: "#00aaaa", + chroma.OperatorWord: "#0000aa", + chroma.NameBuiltin: "#00aaaa", + chroma.NameFunction: "#00aa00", + chroma.NameClass: "underline #00aa00", + chroma.NameNamespace: "underline #00aaaa", + chroma.NameVariable: "#aa0000", + chroma.NameConstant: "#aa0000", + chroma.NameEntity: "bold #800", + chroma.NameAttribute: "#1e90ff", + chroma.NameTag: "bold #1e90ff", + chroma.NameDecorator: "#888888", + chroma.LiteralString: "#aa5500", + chroma.LiteralStringSymbol: "#0000aa", + chroma.LiteralStringRegex: "#009999", + chroma.LiteralNumber: "#009999", + chroma.GenericHeading: "bold #000080", + chroma.GenericSubheading: "bold #800080", + chroma.GenericDeleted: "#aa0000", + chroma.GenericInserted: "#00aa00", + chroma.GenericError: "#aa0000", + chroma.GenericEmph: "italic", + chroma.GenericStrong: "bold", + chroma.GenericPrompt: "#555555", + chroma.GenericOutput: "#888888", + chroma.GenericTraceback: "#aa0000", + chroma.GenericUnderline: "underline", + chroma.Error: "#F00 bg:#FAA", + chroma.Background: " bg:#ffffff", +})) diff --git a/vendor/github.com/alecthomas/chroma/styles/base16-snazzy.go b/vendor/github.com/alecthomas/chroma/styles/base16-snazzy.go new file mode 100644 index 000000000..160c75be8 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/styles/base16-snazzy.go @@ -0,0 +1,81 @@ +package styles + +import ( + "github.com/alecthomas/chroma" +) + +// Base16Snazzy style +var Base16Snazzy = Register(chroma.MustNewStyle("base16-snazzy", chroma.StyleEntries{ + chroma.Comment: "#78787e", + chroma.CommentHashbang: "#78787e", + chroma.CommentMultiline: "#78787e", + chroma.CommentPreproc: "#78787e", + chroma.CommentSingle: "#78787e", + chroma.CommentSpecial: "#78787e", + chroma.Generic: "#e2e4e5", + chroma.GenericDeleted: "#ff5c57", + chroma.GenericEmph: "#e2e4e5 underline", + chroma.GenericError: "#ff5c57", + chroma.GenericHeading: "#e2e4e5 bold", + chroma.GenericInserted: "#e2e4e5 bold", + chroma.GenericOutput: "#43454f", + chroma.GenericPrompt: "#e2e4e5", + chroma.GenericStrong: "#e2e4e5 italic", + chroma.GenericSubheading: "#e2e4e5 bold", + chroma.GenericTraceback: "#e2e4e5", + chroma.GenericUnderline: "underline", + chroma.Error: "#ff5c57", + chroma.Keyword: "#ff6ac1", + chroma.KeywordConstant: "#ff6ac1", + chroma.KeywordDeclaration: "#ff5c57", + chroma.KeywordNamespace: "#ff6ac1", + chroma.KeywordPseudo: "#ff6ac1", + chroma.KeywordReserved: "#ff6ac1", + chroma.KeywordType: "#9aedfe", + chroma.Literal: "#e2e4e5", + chroma.LiteralDate: "#e2e4e5", + chroma.Name: "#e2e4e5", + chroma.NameAttribute: "#57c7ff", + chroma.NameBuiltin: "#ff5c57", + chroma.NameBuiltinPseudo: "#e2e4e5", + chroma.NameClass: "#f3f99d", + chroma.NameConstant: "#ff9f43", + chroma.NameDecorator: "#ff9f43", + chroma.NameEntity: "#e2e4e5", + chroma.NameException: "#e2e4e5", + chroma.NameFunction: "#57c7ff", + chroma.NameLabel: "#ff5c57", + chroma.NameNamespace: "#e2e4e5", + chroma.NameOther: "#e2e4e5", + chroma.NameTag: "#ff6ac1", + chroma.NameVariable: "#ff5c57", + chroma.NameVariableClass: "#ff5c57", + chroma.NameVariableGlobal: "#ff5c57", + chroma.NameVariableInstance: "#ff5c57", + chroma.LiteralNumber: "#ff9f43", + chroma.LiteralNumberBin: "#ff9f43", + chroma.LiteralNumberFloat: "#ff9f43", + chroma.LiteralNumberHex: "#ff9f43", + chroma.LiteralNumberInteger: "#ff9f43", + chroma.LiteralNumberIntegerLong: "#ff9f43", + chroma.LiteralNumberOct: "#ff9f43", + chroma.Operator: "#ff6ac1", + chroma.OperatorWord: "#ff6ac1", + chroma.Other: "#e2e4e5", + chroma.Punctuation: "#e2e4e5", + chroma.LiteralString: "#5af78e", + chroma.LiteralStringBacktick: "#5af78e", + chroma.LiteralStringChar: "#5af78e", + chroma.LiteralStringDoc: "#5af78e", + chroma.LiteralStringDouble: "#5af78e", + chroma.LiteralStringEscape: "#5af78e", + chroma.LiteralStringHeredoc: "#5af78e", + chroma.LiteralStringInterpol: "#5af78e", + chroma.LiteralStringOther: "#5af78e", + chroma.LiteralStringRegex: "#5af78e", + chroma.LiteralStringSingle: "#5af78e", + chroma.LiteralStringSymbol: "#5af78e", + chroma.Text: "#e2e4e5", + chroma.TextWhitespace: "#e2e4e5", + chroma.Background: " bg:#282a36", +})) diff --git a/vendor/github.com/alecthomas/chroma/styles/borland.go b/vendor/github.com/alecthomas/chroma/styles/borland.go new file mode 100644 index 000000000..9c0fff6fc --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/styles/borland.go @@ -0,0 +1,33 @@ +package styles + +import ( + "github.com/alecthomas/chroma" +) + +// Borland style. +var Borland = Register(chroma.MustNewStyle("borland", chroma.StyleEntries{ + chroma.TextWhitespace: "#bbbbbb", + chroma.Comment: "italic #008800", + chroma.CommentPreproc: "noitalic #008080", + chroma.CommentSpecial: "noitalic bold", + chroma.LiteralString: "#0000FF", + chroma.LiteralStringChar: "#800080", + chroma.LiteralNumber: "#0000FF", + chroma.Keyword: "bold #000080", + chroma.OperatorWord: "bold", + chroma.NameTag: "bold #000080", + chroma.NameAttribute: "#FF0000", + chroma.GenericHeading: "#999999", + chroma.GenericSubheading: "#aaaaaa", + chroma.GenericDeleted: "bg:#ffdddd #000000", + chroma.GenericInserted: "bg:#ddffdd #000000", + chroma.GenericError: "#aa0000", + chroma.GenericEmph: "italic", + chroma.GenericStrong: "bold", + chroma.GenericPrompt: "#555555", + chroma.GenericOutput: "#888888", + chroma.GenericTraceback: "#aa0000", + chroma.GenericUnderline: "underline", + chroma.Error: "bg:#e3d2d2 #a61717", + chroma.Background: " bg:#ffffff", +})) diff --git a/vendor/github.com/alecthomas/chroma/styles/bw.go b/vendor/github.com/alecthomas/chroma/styles/bw.go new file mode 100644 index 000000000..3e800d5a7 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/styles/bw.go @@ -0,0 +1,30 @@ +package styles + +import ( + "github.com/alecthomas/chroma" +) + +// BlackWhite style. +var BlackWhite = Register(chroma.MustNewStyle("bw", chroma.StyleEntries{ + chroma.Comment: "italic", + chroma.CommentPreproc: "noitalic", + chroma.Keyword: "bold", + chroma.KeywordPseudo: "nobold", + chroma.KeywordType: "nobold", + chroma.OperatorWord: "bold", + chroma.NameClass: "bold", + chroma.NameNamespace: "bold", + chroma.NameException: "bold", + chroma.NameEntity: "bold", + chroma.NameTag: "bold", + chroma.LiteralString: "italic", + chroma.LiteralStringInterpol: "bold", + chroma.LiteralStringEscape: "bold", + chroma.GenericHeading: "bold", + chroma.GenericSubheading: "bold", + chroma.GenericEmph: "italic", + chroma.GenericStrong: "bold", + chroma.GenericPrompt: "bold", + chroma.Error: "border:#FF0000", + chroma.Background: " bg:#ffffff", +})) diff --git a/vendor/github.com/alecthomas/chroma/styles/colorful.go b/vendor/github.com/alecthomas/chroma/styles/colorful.go new file mode 100644 index 000000000..dc77c5bfe --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/styles/colorful.go @@ -0,0 +1,59 @@ +package styles + +import ( + "github.com/alecthomas/chroma" +) + +// Colorful style. +var Colorful = Register(chroma.MustNewStyle("colorful", chroma.StyleEntries{ + chroma.TextWhitespace: "#bbbbbb", + chroma.Comment: "#888", + chroma.CommentPreproc: "#579", + chroma.CommentSpecial: "bold #cc0000", + chroma.Keyword: "bold #080", + chroma.KeywordPseudo: "#038", + chroma.KeywordType: "#339", + chroma.Operator: "#333", + chroma.OperatorWord: "bold #000", + chroma.NameBuiltin: "#007020", + chroma.NameFunction: "bold #06B", + chroma.NameClass: "bold #B06", + chroma.NameNamespace: "bold #0e84b5", + chroma.NameException: "bold #F00", + chroma.NameVariable: "#963", + chroma.NameVariableInstance: "#33B", + chroma.NameVariableClass: "#369", + chroma.NameVariableGlobal: "bold #d70", + chroma.NameConstant: "bold #036", + chroma.NameLabel: "bold #970", + chroma.NameEntity: "bold #800", + chroma.NameAttribute: "#00C", + chroma.NameTag: "#070", + chroma.NameDecorator: "bold #555", + chroma.LiteralString: "bg:#fff0f0", + chroma.LiteralStringChar: "#04D bg:", + chroma.LiteralStringDoc: "#D42 bg:", + chroma.LiteralStringInterpol: "bg:#eee", + chroma.LiteralStringEscape: "bold #666", + chroma.LiteralStringRegex: "bg:#fff0ff #000", + chroma.LiteralStringSymbol: "#A60 bg:", + chroma.LiteralStringOther: "#D20", + chroma.LiteralNumber: "bold #60E", + chroma.LiteralNumberInteger: "bold #00D", + chroma.LiteralNumberFloat: "bold #60E", + chroma.LiteralNumberHex: "bold #058", + chroma.LiteralNumberOct: "bold #40E", + chroma.GenericHeading: "bold #000080", + chroma.GenericSubheading: "bold #800080", + chroma.GenericDeleted: "#A00000", + chroma.GenericInserted: "#00A000", + chroma.GenericError: "#FF0000", + chroma.GenericEmph: "italic", + chroma.GenericStrong: "bold", + chroma.GenericPrompt: "bold #c65d09", + chroma.GenericOutput: "#888", + chroma.GenericTraceback: "#04D", + chroma.GenericUnderline: "underline", + chroma.Error: "#F00 bg:#FAA", + chroma.Background: " bg:#ffffff", +})) diff --git a/vendor/github.com/alecthomas/chroma/styles/doom-one.go b/vendor/github.com/alecthomas/chroma/styles/doom-one.go new file mode 100644 index 000000000..645045550 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/styles/doom-one.go @@ -0,0 +1,58 @@ +package styles + +import ( + "github.com/alecthomas/chroma" +) + +// Doom One style. Inspired by Atom One and Doom Emacs's Atom One theme +var DoomOne = Register(chroma.MustNewStyle("doom-one", chroma.StyleEntries{ + chroma.Text: "#b0c4de", + chroma.Error: "#b0c4de", + chroma.Comment: "italic #8a93a5", + chroma.CommentHashbang: "bold", + chroma.Keyword: "#c678dd", + chroma.KeywordType: "#ef8383", + chroma.KeywordConstant: "bold #b756ff", + chroma.Operator: "#c7bf54", + chroma.OperatorWord: "bold #b756ff", + chroma.Punctuation: "#b0c4de", + chroma.Name: "#c1abea", + chroma.NameAttribute: "#b3d23c", + chroma.NameBuiltin: "#ef8383", + chroma.NameClass: "#76a9f9", + chroma.NameConstant: "bold #b756ff", + chroma.NameDecorator: "#e5c07b", + chroma.NameEntity: "#bda26f", + chroma.NameException: "bold #fd7474", + chroma.NameFunction: "#00b1f7", + chroma.NameProperty: "#cebc3a", + chroma.NameLabel: "#f5a40d", + chroma.NameNamespace: "#76a9f9", + chroma.NameTag: "#e06c75", + chroma.NameVariable: "#DCAEEA", + chroma.NameVariableGlobal: "bold #DCAEEA", + chroma.NameVariableInstance: "#e06c75", + chroma.Literal: "#98c379", + chroma.Number: "#d19a66", + chroma.String: "#98c379", + chroma.StringDoc: "#7e97c3", + chroma.StringDouble: "#63c381", + chroma.StringEscape: "bold #d26464", + chroma.StringHeredoc: "#98c379", + chroma.StringInterpol: "#98c379", + chroma.StringOther: "#70b33f", + chroma.StringRegex: "#56b6c2", + chroma.StringSingle: "#98c379", + chroma.StringSymbol: "#56b6c2", + chroma.Generic: "#b0c4de", + chroma.GenericEmph: "italic", + chroma.GenericHeading: "bold #a2cbff", + chroma.GenericInserted: "#a6e22e", + chroma.GenericOutput: "#a6e22e", + chroma.GenericUnderline: "underline", + chroma.GenericPrompt: "#a6e22e", + chroma.GenericStrong: "bold", + chroma.GenericSubheading: "#a2cbff", + chroma.GenericTraceback: "#a2cbff", + chroma.Background: "#b0c4de bg:#282c34", +})) diff --git a/vendor/github.com/alecthomas/chroma/styles/doom-one2.go b/vendor/github.com/alecthomas/chroma/styles/doom-one2.go new file mode 100644 index 000000000..465417391 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/styles/doom-one2.go @@ -0,0 +1,71 @@ +package styles + +import ( + "github.com/alecthomas/chroma" +) + +// Doom One 2 style. Inspired by Atom One and Doom Emacs's Atom One theme +var DoomOne2 = Register(chroma.MustNewStyle("doom-one2", chroma.StyleEntries{ + chroma.Text: "#b0c4de", + chroma.Error: "#b0c4de", + chroma.Comment: "italic #8a93a5", + chroma.CommentHashbang: "bold", + chroma.Keyword: "#76a9f9", + chroma.KeywordConstant: "#e5c07b", + chroma.KeywordType: "#e5c07b", + chroma.Operator: "#54b1c7", + chroma.OperatorWord: "bold #b756ff", + chroma.Punctuation: "#abb2bf", + chroma.Name: "#aa89ea", + chroma.NameAttribute: "#cebc3a", + chroma.NameBuiltin: "#e5c07b", + chroma.NameClass: "#ca72ff", + chroma.NameConstant: "bold", + chroma.NameDecorator: "#e5c07b", + chroma.NameEntity: "#bda26f", + chroma.NameException: "bold #fd7474", + chroma.NameFunction: "#00b1f7", + chroma.NameProperty: "#cebc3a", + chroma.NameLabel: "#f5a40d", + chroma.NameNamespace: "#ca72ff", + chroma.NameTag: "#76a9f9", + chroma.NameVariable: "#DCAEEA", + chroma.NameVariableClass: "#DCAEEA", + chroma.NameVariableGlobal: "bold #DCAEEA", + chroma.NameVariableInstance: "#e06c75", + chroma.NameVariableMagic: "#DCAEEA", + chroma.Literal: "#98c379", + chroma.LiteralDate: "#98c379", + chroma.Number: "#d19a66", + chroma.NumberBin: "#d19a66", + chroma.NumberFloat: "#d19a66", + chroma.NumberHex: "#d19a66", + chroma.NumberInteger: "#d19a66", + chroma.NumberIntegerLong: "#d19a66", + chroma.NumberOct: "#d19a66", + chroma.String: "#98c379", + chroma.StringAffix: "#98c379", + chroma.StringBacktick: "#98c379", + chroma.StringDelimiter: "#98c379", + chroma.StringDoc: "#7e97c3", + chroma.StringDouble: "#63c381", + chroma.StringEscape: "bold #d26464", + chroma.StringHeredoc: "#98c379", + chroma.StringInterpol: "#98c379", + chroma.StringOther: "#70b33f", + chroma.StringRegex: "#56b6c2", + chroma.StringSingle: "#98c379", + chroma.StringSymbol: "#56b6c2", + chroma.Generic: "#b0c4de", + chroma.GenericDeleted: "#b0c4de", + chroma.GenericEmph: "italic", + chroma.GenericHeading: "bold #a2cbff", + chroma.GenericInserted: "#a6e22e", + chroma.GenericOutput: "#a6e22e", + chroma.GenericUnderline: "underline", + chroma.GenericPrompt: "#a6e22e", + chroma.GenericStrong: "bold", + chroma.GenericSubheading: "#a2cbff", + chroma.GenericTraceback: "#a2cbff", + chroma.Background: "#b0c4de bg:#282c34", +})) diff --git a/vendor/github.com/alecthomas/chroma/styles/dracula.go b/vendor/github.com/alecthomas/chroma/styles/dracula.go new file mode 100644 index 000000000..d1542f271 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/styles/dracula.go @@ -0,0 +1,81 @@ +package styles + +import ( + "github.com/alecthomas/chroma" +) + +// Dracula Style +var Dracula = Register(chroma.MustNewStyle("dracula", chroma.StyleEntries{ + chroma.Comment: "#6272a4", + chroma.CommentHashbang: "#6272a4", + chroma.CommentMultiline: "#6272a4", + chroma.CommentPreproc: "#ff79c6", + chroma.CommentSingle: "#6272a4", + chroma.CommentSpecial: "#6272a4", + chroma.Generic: "#f8f8f2", + chroma.GenericDeleted: "#ff5555", + chroma.GenericEmph: "#f8f8f2 underline", + chroma.GenericError: "#f8f8f2", + chroma.GenericHeading: "#f8f8f2 bold", + chroma.GenericInserted: "#50fa7b bold", + chroma.GenericOutput: "#44475a", + chroma.GenericPrompt: "#f8f8f2", + chroma.GenericStrong: "#f8f8f2", + chroma.GenericSubheading: "#f8f8f2 bold", + chroma.GenericTraceback: "#f8f8f2", + chroma.GenericUnderline: "underline", + chroma.Error: "#f8f8f2", + chroma.Keyword: "#ff79c6", + chroma.KeywordConstant: "#ff79c6", + chroma.KeywordDeclaration: "#8be9fd italic", + chroma.KeywordNamespace: "#ff79c6", + chroma.KeywordPseudo: "#ff79c6", + chroma.KeywordReserved: "#ff79c6", + chroma.KeywordType: "#8be9fd", + chroma.Literal: "#f8f8f2", + chroma.LiteralDate: "#f8f8f2", + chroma.Name: "#f8f8f2", + chroma.NameAttribute: "#50fa7b", + chroma.NameBuiltin: "#8be9fd italic", + chroma.NameBuiltinPseudo: "#f8f8f2", + chroma.NameClass: "#50fa7b", + chroma.NameConstant: "#f8f8f2", + chroma.NameDecorator: "#f8f8f2", + chroma.NameEntity: "#f8f8f2", + chroma.NameException: "#f8f8f2", + chroma.NameFunction: "#50fa7b", + chroma.NameLabel: "#8be9fd italic", + chroma.NameNamespace: "#f8f8f2", + chroma.NameOther: "#f8f8f2", + chroma.NameTag: "#ff79c6", + chroma.NameVariable: "#8be9fd italic", + chroma.NameVariableClass: "#8be9fd italic", + chroma.NameVariableGlobal: "#8be9fd italic", + chroma.NameVariableInstance: "#8be9fd italic", + chroma.LiteralNumber: "#bd93f9", + chroma.LiteralNumberBin: "#bd93f9", + chroma.LiteralNumberFloat: "#bd93f9", + chroma.LiteralNumberHex: "#bd93f9", + chroma.LiteralNumberInteger: "#bd93f9", + chroma.LiteralNumberIntegerLong: "#bd93f9", + chroma.LiteralNumberOct: "#bd93f9", + chroma.Operator: "#ff79c6", + chroma.OperatorWord: "#ff79c6", + chroma.Other: "#f8f8f2", + chroma.Punctuation: "#f8f8f2", + chroma.LiteralString: "#f1fa8c", + chroma.LiteralStringBacktick: "#f1fa8c", + chroma.LiteralStringChar: "#f1fa8c", + chroma.LiteralStringDoc: "#f1fa8c", + chroma.LiteralStringDouble: "#f1fa8c", + chroma.LiteralStringEscape: "#f1fa8c", + chroma.LiteralStringHeredoc: "#f1fa8c", + chroma.LiteralStringInterpol: "#f1fa8c", + chroma.LiteralStringOther: "#f1fa8c", + chroma.LiteralStringRegex: "#f1fa8c", + chroma.LiteralStringSingle: "#f1fa8c", + chroma.LiteralStringSymbol: "#f1fa8c", + chroma.Text: "#f8f8f2", + chroma.TextWhitespace: "#f8f8f2", + chroma.Background: " bg:#282a36", +})) diff --git a/vendor/github.com/alecthomas/chroma/styles/emacs.go b/vendor/github.com/alecthomas/chroma/styles/emacs.go new file mode 100644 index 000000000..4835abd71 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/styles/emacs.go @@ -0,0 +1,51 @@ +package styles + +import ( + "github.com/alecthomas/chroma" +) + +// Emacs style. +var Emacs = Register(chroma.MustNewStyle("emacs", chroma.StyleEntries{ + chroma.TextWhitespace: "#bbbbbb", + chroma.Comment: "italic #008800", + chroma.CommentPreproc: "noitalic", + chroma.CommentSpecial: "noitalic bold", + chroma.Keyword: "bold #AA22FF", + chroma.KeywordPseudo: "nobold", + chroma.KeywordType: "bold #00BB00", + chroma.Operator: "#666666", + chroma.OperatorWord: "bold #AA22FF", + chroma.NameBuiltin: "#AA22FF", + chroma.NameFunction: "#00A000", + chroma.NameClass: "#0000FF", + chroma.NameNamespace: "bold #0000FF", + chroma.NameException: "bold #D2413A", + chroma.NameVariable: "#B8860B", + chroma.NameConstant: "#880000", + chroma.NameLabel: "#A0A000", + chroma.NameEntity: "bold #999999", + chroma.NameAttribute: "#BB4444", + chroma.NameTag: "bold #008000", + chroma.NameDecorator: "#AA22FF", + chroma.LiteralString: "#BB4444", + chroma.LiteralStringDoc: "italic", + chroma.LiteralStringInterpol: "bold #BB6688", + chroma.LiteralStringEscape: "bold #BB6622", + chroma.LiteralStringRegex: "#BB6688", + chroma.LiteralStringSymbol: "#B8860B", + chroma.LiteralStringOther: "#008000", + chroma.LiteralNumber: "#666666", + chroma.GenericHeading: "bold #000080", + chroma.GenericSubheading: "bold #800080", + chroma.GenericDeleted: "#A00000", + chroma.GenericInserted: "#00A000", + chroma.GenericError: "#FF0000", + chroma.GenericEmph: "italic", + chroma.GenericStrong: "bold", + chroma.GenericPrompt: "bold #000080", + chroma.GenericOutput: "#888", + chroma.GenericTraceback: "#04D", + chroma.GenericUnderline: "underline", + chroma.Error: "border:#FF0000", + chroma.Background: " bg:#f8f8f8", +})) diff --git a/vendor/github.com/alecthomas/chroma/styles/friendly.go b/vendor/github.com/alecthomas/chroma/styles/friendly.go new file mode 100644 index 000000000..ad02341d3 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/styles/friendly.go @@ -0,0 +1,51 @@ +package styles + +import ( + "github.com/alecthomas/chroma" +) + +// Friendly style. +var Friendly = Register(chroma.MustNewStyle("friendly", chroma.StyleEntries{ + chroma.TextWhitespace: "#bbbbbb", + chroma.Comment: "italic #60a0b0", + chroma.CommentPreproc: "noitalic #007020", + chroma.CommentSpecial: "noitalic bg:#fff0f0", + chroma.Keyword: "bold #007020", + chroma.KeywordPseudo: "nobold", + chroma.KeywordType: "nobold #902000", + chroma.Operator: "#666666", + chroma.OperatorWord: "bold #007020", + chroma.NameBuiltin: "#007020", + chroma.NameFunction: "#06287e", + chroma.NameClass: "bold #0e84b5", + chroma.NameNamespace: "bold #0e84b5", + chroma.NameException: "#007020", + chroma.NameVariable: "#bb60d5", + chroma.NameConstant: "#60add5", + chroma.NameLabel: "bold #002070", + chroma.NameEntity: "bold #d55537", + chroma.NameAttribute: "#4070a0", + chroma.NameTag: "bold #062873", + chroma.NameDecorator: "bold #555555", + chroma.LiteralString: "#4070a0", + chroma.LiteralStringDoc: "italic", + chroma.LiteralStringInterpol: "italic #70a0d0", + chroma.LiteralStringEscape: "bold #4070a0", + chroma.LiteralStringRegex: "#235388", + chroma.LiteralStringSymbol: "#517918", + chroma.LiteralStringOther: "#c65d09", + chroma.LiteralNumber: "#40a070", + chroma.GenericHeading: "bold #000080", + chroma.GenericSubheading: "bold #800080", + chroma.GenericDeleted: "#A00000", + chroma.GenericInserted: "#00A000", + chroma.GenericError: "#FF0000", + chroma.GenericEmph: "italic", + chroma.GenericStrong: "bold", + chroma.GenericPrompt: "bold #c65d09", + chroma.GenericOutput: "#888", + chroma.GenericTraceback: "#04D", + chroma.GenericUnderline: "underline", + chroma.Error: "border:#FF0000", + chroma.Background: " bg:#f0f0f0", +})) diff --git a/vendor/github.com/alecthomas/chroma/styles/fruity.go b/vendor/github.com/alecthomas/chroma/styles/fruity.go new file mode 100644 index 000000000..c2577fa27 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/styles/fruity.go @@ -0,0 +1,26 @@ +package styles + +import ( + "github.com/alecthomas/chroma" +) + +// Fruity style. +var Fruity = Register(chroma.MustNewStyle("fruity", chroma.StyleEntries{ + chroma.TextWhitespace: "#888888", + chroma.Background: "#ffffff bg:#111111", + chroma.GenericOutput: "#444444 bg:#222222", + chroma.Keyword: "#fb660a bold", + chroma.KeywordPseudo: "nobold", + chroma.LiteralNumber: "#0086f7 bold", + chroma.NameTag: "#fb660a bold", + chroma.NameVariable: "#fb660a", + chroma.Comment: "#008800 bg:#0f140f italic", + chroma.NameAttribute: "#ff0086 bold", + chroma.LiteralString: "#0086d2", + chroma.NameFunction: "#ff0086 bold", + chroma.GenericHeading: "#ffffff bold", + chroma.KeywordType: "#cdcaa9 bold", + chroma.GenericSubheading: "#ffffff bold", + chroma.NameConstant: "#0086d2", + chroma.CommentPreproc: "#ff0007 bold", +})) diff --git a/vendor/github.com/alecthomas/chroma/styles/github.go b/vendor/github.com/alecthomas/chroma/styles/github.go new file mode 100644 index 000000000..7ef2481ca --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/styles/github.go @@ -0,0 +1,51 @@ +package styles + +import ( + "github.com/alecthomas/chroma" +) + +// GitHub style. +var GitHub = Register(chroma.MustNewStyle("github", chroma.StyleEntries{ + chroma.CommentMultiline: "italic #999988", + chroma.CommentPreproc: "bold #999999", + chroma.CommentSingle: "italic #999988", + chroma.CommentSpecial: "bold italic #999999", + chroma.Comment: "italic #999988", + chroma.Error: "bg:#e3d2d2 #a61717", + chroma.GenericDeleted: "bg:#ffdddd #000000", + chroma.GenericEmph: "italic #000000", + chroma.GenericError: "#aa0000", + chroma.GenericHeading: "#999999", + chroma.GenericInserted: "bg:#ddffdd #000000", + chroma.GenericOutput: "#888888", + chroma.GenericPrompt: "#555555", + chroma.GenericStrong: "bold", + chroma.GenericSubheading: "#aaaaaa", + chroma.GenericTraceback: "#aa0000", + chroma.GenericUnderline: "underline", + chroma.KeywordType: "bold #445588", + chroma.Keyword: "bold #000000", + chroma.LiteralNumber: "#009999", + chroma.LiteralStringRegex: "#009926", + chroma.LiteralStringSymbol: "#990073", + chroma.LiteralString: "#d14", + chroma.NameAttribute: "#008080", + chroma.NameBuiltinPseudo: "#999999", + chroma.NameBuiltin: "#0086B3", + chroma.NameClass: "bold #445588", + chroma.NameConstant: "#008080", + chroma.NameDecorator: "bold #3c5d5d", + chroma.NameEntity: "#800080", + chroma.NameException: "bold #990000", + chroma.NameFunction: "bold #990000", + chroma.NameLabel: "bold #990000", + chroma.NameNamespace: "#555555", + chroma.NameTag: "#000080", + chroma.NameVariableClass: "#008080", + chroma.NameVariableGlobal: "#008080", + chroma.NameVariableInstance: "#008080", + chroma.NameVariable: "#008080", + chroma.Operator: "bold #000000", + chroma.TextWhitespace: "#bbbbbb", + chroma.Background: " bg:#ffffff", +})) diff --git a/vendor/github.com/alecthomas/chroma/styles/hr_dark.go b/vendor/github.com/alecthomas/chroma/styles/hr_dark.go new file mode 100644 index 000000000..10bb64fbd --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/styles/hr_dark.go @@ -0,0 +1,17 @@ +package styles + +import ( + "github.com/alecthomas/chroma" +) + +// Theme based on HackerRank Dark Editor theme +var HrDark = Register(chroma.MustNewStyle("hrdark", chroma.StyleEntries{ + chroma.Comment: "italic #828b96", + chroma.Keyword: "#ff636f", + chroma.OperatorWord: "#ff636f", + chroma.Name: "#58a1dd", + chroma.Literal: "#a6be9d", + chroma.Operator: "#ff636f", + chroma.Background: "#1d2432", + chroma.Other: "#fff", +})) diff --git a/vendor/github.com/alecthomas/chroma/styles/hr_high_contrast.go b/vendor/github.com/alecthomas/chroma/styles/hr_high_contrast.go new file mode 100644 index 000000000..d1988589c --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/styles/hr_high_contrast.go @@ -0,0 +1,19 @@ +package styles + +import ( + "github.com/alecthomas/chroma" +) + +// Theme based on HackerRank High Contrast Editor Theme +var HrHighContrast = Register(chroma.MustNewStyle("hr_high_contrast", chroma.StyleEntries{ + chroma.Comment: "#5a8349", + chroma.Keyword: "#467faf", + chroma.OperatorWord: "#467faf", + chroma.Name: "#ffffff", + chroma.LiteralString: "#a87662", + chroma.LiteralNumber: "#fff", + chroma.LiteralStringBoolean: "#467faf", + chroma.Operator: "#e4e400", + chroma.Background: "#000", + chroma.Other: "#d5d500", +})) diff --git a/vendor/github.com/alecthomas/chroma/styles/igor.go b/vendor/github.com/alecthomas/chroma/styles/igor.go new file mode 100644 index 000000000..6a6d4cd08 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/styles/igor.go @@ -0,0 +1,16 @@ +package styles + +import ( + "github.com/alecthomas/chroma" +) + +// Igor style. +var Igor = Register(chroma.MustNewStyle("igor", chroma.StyleEntries{ + chroma.Comment: "italic #FF0000", + chroma.Keyword: "#0000FF", + chroma.NameFunction: "#C34E00", + chroma.NameDecorator: "#CC00A3", + chroma.NameClass: "#007575", + chroma.LiteralString: "#009C00", + chroma.Background: " bg:#ffffff", +})) diff --git a/vendor/github.com/alecthomas/chroma/styles/lovelace.go b/vendor/github.com/alecthomas/chroma/styles/lovelace.go new file mode 100644 index 000000000..074cc0896 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/styles/lovelace.go @@ -0,0 +1,60 @@ +package styles + +import ( + "github.com/alecthomas/chroma" +) + +// Lovelace style. +var Lovelace = Register(chroma.MustNewStyle("lovelace", chroma.StyleEntries{ + chroma.TextWhitespace: "#a89028", + chroma.Comment: "italic #888888", + chroma.CommentHashbang: "#287088", + chroma.CommentMultiline: "#888888", + chroma.CommentPreproc: "noitalic #289870", + chroma.Keyword: "#2838b0", + chroma.KeywordConstant: "italic #444444", + chroma.KeywordDeclaration: "italic", + chroma.KeywordType: "italic", + chroma.Operator: "#666666", + chroma.OperatorWord: "#a848a8", + chroma.Punctuation: "#888888", + chroma.NameAttribute: "#388038", + chroma.NameBuiltin: "#388038", + chroma.NameBuiltinPseudo: "italic", + chroma.NameClass: "#287088", + chroma.NameConstant: "#b85820", + chroma.NameDecorator: "#287088", + chroma.NameEntity: "#709030", + chroma.NameException: "#908828", + chroma.NameFunction: "#785840", + chroma.NameFunctionMagic: "#b85820", + chroma.NameLabel: "#289870", + chroma.NameNamespace: "#289870", + chroma.NameTag: "#2838b0", + chroma.NameVariable: "#b04040", + chroma.NameVariableGlobal: "#908828", + chroma.NameVariableMagic: "#b85820", + chroma.LiteralString: "#b83838", + chroma.LiteralStringAffix: "#444444", + chroma.LiteralStringChar: "#a848a8", + chroma.LiteralStringDelimiter: "#b85820", + chroma.LiteralStringDoc: "italic #b85820", + chroma.LiteralStringEscape: "#709030", + chroma.LiteralStringInterpol: "underline", + chroma.LiteralStringOther: "#a848a8", + chroma.LiteralStringRegex: "#a848a8", + chroma.LiteralNumber: "#444444", + chroma.GenericDeleted: "#c02828", + chroma.GenericEmph: "italic", + chroma.GenericError: "#c02828", + chroma.GenericHeading: "#666666", + chroma.GenericSubheading: "#444444", + chroma.GenericInserted: "#388038", + chroma.GenericOutput: "#666666", + chroma.GenericPrompt: "#444444", + chroma.GenericStrong: "bold", + chroma.GenericTraceback: "#2838b0", + chroma.GenericUnderline: "underline", + chroma.Error: "bg:#a848a8", + chroma.Background: " bg:#ffffff", +})) diff --git a/vendor/github.com/alecthomas/chroma/styles/manni.go b/vendor/github.com/alecthomas/chroma/styles/manni.go new file mode 100644 index 000000000..9942e7d09 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/styles/manni.go @@ -0,0 +1,51 @@ +package styles + +import ( + "github.com/alecthomas/chroma" +) + +// Manni style. +var Manni = Register(chroma.MustNewStyle("manni", chroma.StyleEntries{ + chroma.TextWhitespace: "#bbbbbb", + chroma.Comment: "italic #0099FF", + chroma.CommentPreproc: "noitalic #009999", + chroma.CommentSpecial: "bold", + chroma.Keyword: "bold #006699", + chroma.KeywordPseudo: "nobold", + chroma.KeywordType: "#007788", + chroma.Operator: "#555555", + chroma.OperatorWord: "bold #000000", + chroma.NameBuiltin: "#336666", + chroma.NameFunction: "#CC00FF", + chroma.NameClass: "bold #00AA88", + chroma.NameNamespace: "bold #00CCFF", + chroma.NameException: "bold #CC0000", + chroma.NameVariable: "#003333", + chroma.NameConstant: "#336600", + chroma.NameLabel: "#9999FF", + chroma.NameEntity: "bold #999999", + chroma.NameAttribute: "#330099", + chroma.NameTag: "bold #330099", + chroma.NameDecorator: "#9999FF", + chroma.LiteralString: "#CC3300", + chroma.LiteralStringDoc: "italic", + chroma.LiteralStringInterpol: "#AA0000", + chroma.LiteralStringEscape: "bold #CC3300", + chroma.LiteralStringRegex: "#33AAAA", + chroma.LiteralStringSymbol: "#FFCC33", + chroma.LiteralStringOther: "#CC3300", + chroma.LiteralNumber: "#FF6600", + chroma.GenericHeading: "bold #003300", + chroma.GenericSubheading: "bold #003300", + chroma.GenericDeleted: "border:#CC0000 bg:#FFCCCC", + chroma.GenericInserted: "border:#00CC00 bg:#CCFFCC", + chroma.GenericError: "#FF0000", + chroma.GenericEmph: "italic", + chroma.GenericStrong: "bold", + chroma.GenericPrompt: "bold #000099", + chroma.GenericOutput: "#AAAAAA", + chroma.GenericTraceback: "#99CC66", + chroma.GenericUnderline: "underline", + chroma.Error: "bg:#FFAAAA #AA0000", + chroma.Background: " bg:#f0f3f3", +})) diff --git a/vendor/github.com/alecthomas/chroma/styles/monokai.go b/vendor/github.com/alecthomas/chroma/styles/monokai.go new file mode 100644 index 000000000..2586795ac --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/styles/monokai.go @@ -0,0 +1,36 @@ +package styles + +import ( + "github.com/alecthomas/chroma" +) + +// Monokai style. +var Monokai = Register(chroma.MustNewStyle("monokai", chroma.StyleEntries{ + chroma.Text: "#f8f8f2", + chroma.Error: "#960050 bg:#1e0010", + chroma.Comment: "#75715e", + chroma.Keyword: "#66d9ef", + chroma.KeywordNamespace: "#f92672", + chroma.Operator: "#f92672", + chroma.Punctuation: "#f8f8f2", + chroma.Name: "#f8f8f2", + chroma.NameAttribute: "#a6e22e", + chroma.NameClass: "#a6e22e", + chroma.NameConstant: "#66d9ef", + chroma.NameDecorator: "#a6e22e", + chroma.NameException: "#a6e22e", + chroma.NameFunction: "#a6e22e", + chroma.NameOther: "#a6e22e", + chroma.NameTag: "#f92672", + chroma.LiteralNumber: "#ae81ff", + chroma.Literal: "#ae81ff", + chroma.LiteralDate: "#e6db74", + chroma.LiteralString: "#e6db74", + chroma.LiteralStringEscape: "#ae81ff", + chroma.GenericDeleted: "#f92672", + chroma.GenericEmph: "italic", + chroma.GenericInserted: "#a6e22e", + chroma.GenericStrong: "bold", + chroma.GenericSubheading: "#75715e", + chroma.Background: "bg:#272822", +})) diff --git a/vendor/github.com/alecthomas/chroma/styles/monokailight.go b/vendor/github.com/alecthomas/chroma/styles/monokailight.go new file mode 100644 index 000000000..61818a680 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/styles/monokailight.go @@ -0,0 +1,33 @@ +package styles + +import ( + "github.com/alecthomas/chroma" +) + +// MonokaiLight style. +var MonokaiLight = Register(chroma.MustNewStyle("monokailight", chroma.StyleEntries{ + chroma.Text: "#272822", + chroma.Error: "#960050 bg:#1e0010", + chroma.Comment: "#75715e", + chroma.Keyword: "#00a8c8", + chroma.KeywordNamespace: "#f92672", + chroma.Operator: "#f92672", + chroma.Punctuation: "#111111", + chroma.Name: "#111111", + chroma.NameAttribute: "#75af00", + chroma.NameClass: "#75af00", + chroma.NameConstant: "#00a8c8", + chroma.NameDecorator: "#75af00", + chroma.NameException: "#75af00", + chroma.NameFunction: "#75af00", + chroma.NameOther: "#75af00", + chroma.NameTag: "#f92672", + chroma.LiteralNumber: "#ae81ff", + chroma.Literal: "#ae81ff", + chroma.LiteralDate: "#d88200", + chroma.LiteralString: "#d88200", + chroma.LiteralStringEscape: "#8045FF", + chroma.GenericEmph: "italic", + chroma.GenericStrong: "bold", + chroma.Background: " bg:#fafafa", +})) diff --git a/vendor/github.com/alecthomas/chroma/styles/murphy.go b/vendor/github.com/alecthomas/chroma/styles/murphy.go new file mode 100644 index 000000000..90e83c76a --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/styles/murphy.go @@ -0,0 +1,59 @@ +package styles + +import ( + "github.com/alecthomas/chroma" +) + +// Murphy style. +var Murphy = Register(chroma.MustNewStyle("murphy", chroma.StyleEntries{ + chroma.TextWhitespace: "#bbbbbb", + chroma.Comment: "#666 italic", + chroma.CommentPreproc: "#579 noitalic", + chroma.CommentSpecial: "#c00 bold", + chroma.Keyword: "bold #289", + chroma.KeywordPseudo: "#08f", + chroma.KeywordType: "#66f", + chroma.Operator: "#333", + chroma.OperatorWord: "bold #000", + chroma.NameBuiltin: "#072", + chroma.NameFunction: "bold #5ed", + chroma.NameClass: "bold #e9e", + chroma.NameNamespace: "bold #0e84b5", + chroma.NameException: "bold #F00", + chroma.NameVariable: "#036", + chroma.NameVariableInstance: "#aaf", + chroma.NameVariableClass: "#ccf", + chroma.NameVariableGlobal: "#f84", + chroma.NameConstant: "bold #5ed", + chroma.NameLabel: "bold #970", + chroma.NameEntity: "#800", + chroma.NameAttribute: "#007", + chroma.NameTag: "#070", + chroma.NameDecorator: "bold #555", + chroma.LiteralString: "bg:#e0e0ff", + chroma.LiteralStringChar: "#88F bg:", + chroma.LiteralStringDoc: "#D42 bg:", + chroma.LiteralStringInterpol: "bg:#eee", + chroma.LiteralStringEscape: "bold #666", + chroma.LiteralStringRegex: "bg:#e0e0ff #000", + chroma.LiteralStringSymbol: "#fc8 bg:", + chroma.LiteralStringOther: "#f88", + chroma.LiteralNumber: "bold #60E", + chroma.LiteralNumberInteger: "bold #66f", + chroma.LiteralNumberFloat: "bold #60E", + chroma.LiteralNumberHex: "bold #058", + chroma.LiteralNumberOct: "bold #40E", + chroma.GenericHeading: "bold #000080", + chroma.GenericSubheading: "bold #800080", + chroma.GenericDeleted: "#A00000", + chroma.GenericInserted: "#00A000", + chroma.GenericError: "#FF0000", + chroma.GenericEmph: "italic", + chroma.GenericStrong: "bold", + chroma.GenericPrompt: "bold #c65d09", + chroma.GenericOutput: "#888", + chroma.GenericTraceback: "#04D", + chroma.GenericUnderline: "underline", + chroma.Error: "#F00 bg:#FAA", + chroma.Background: " bg:#ffffff", +})) diff --git a/vendor/github.com/alecthomas/chroma/styles/native.go b/vendor/github.com/alecthomas/chroma/styles/native.go new file mode 100644 index 000000000..9fae09aca --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/styles/native.go @@ -0,0 +1,42 @@ +package styles + +import ( + "github.com/alecthomas/chroma" +) + +// Native style. +var Native = Register(chroma.MustNewStyle("native", chroma.StyleEntries{ + chroma.Background: "#d0d0d0 bg:#202020", + chroma.TextWhitespace: "#666666", + chroma.Comment: "italic #999999", + chroma.CommentPreproc: "noitalic bold #cd2828", + chroma.CommentSpecial: "noitalic bold #e50808 bg:#520000", + chroma.Keyword: "bold #6ab825", + chroma.KeywordPseudo: "nobold", + chroma.OperatorWord: "bold #6ab825", + chroma.LiteralString: "#ed9d13", + chroma.LiteralStringOther: "#ffa500", + chroma.LiteralNumber: "#3677a9", + chroma.NameBuiltin: "#24909d", + chroma.NameVariable: "#40ffff", + chroma.NameConstant: "#40ffff", + chroma.NameClass: "underline #447fcf", + chroma.NameFunction: "#447fcf", + chroma.NameNamespace: "underline #447fcf", + chroma.NameException: "#bbbbbb", + chroma.NameTag: "bold #6ab825", + chroma.NameAttribute: "#bbbbbb", + chroma.NameDecorator: "#ffa500", + chroma.GenericHeading: "bold #ffffff", + chroma.GenericSubheading: "underline #ffffff", + chroma.GenericDeleted: "#d22323", + chroma.GenericInserted: "#589819", + chroma.GenericError: "#d22323", + chroma.GenericEmph: "italic", + chroma.GenericStrong: "bold", + chroma.GenericPrompt: "#aaaaaa", + chroma.GenericOutput: "#cccccc", + chroma.GenericTraceback: "#d22323", + chroma.GenericUnderline: "underline", + chroma.Error: "bg:#e3d2d2 #a61717", +})) diff --git a/vendor/github.com/alecthomas/chroma/styles/nord.go b/vendor/github.com/alecthomas/chroma/styles/nord.go new file mode 100644 index 000000000..0fcbc5d40 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/styles/nord.go @@ -0,0 +1,75 @@ +package styles + +import ( + "github.com/alecthomas/chroma" +) + +var ( + // colors and palettes based on https://www.nordtheme.com/docs/colors-and-palettes + nord0 = "#2e3440" + nord1 = "#3b4252" // nolint + nord2 = "#434c5e" // nolint + nord3 = "#4c566a" + nord3b = "#616e87" + + nord4 = "#d8dee9" + nord5 = "#e5e9f0" // nolint + nord6 = "#eceff4" + + nord7 = "#8fbcbb" + nord8 = "#88c0d0" + nord9 = "#81a1c1" + nord10 = "#5e81ac" + + nord11 = "#bf616a" + nord12 = "#d08770" + nord13 = "#ebcb8b" + nord14 = "#a3be8c" + nord15 = "#b48ead" +) + +// Nord, an arctic, north-bluish color palette +var Nord = Register(chroma.MustNewStyle("nord", chroma.StyleEntries{ + chroma.TextWhitespace: nord4, + chroma.Comment: "italic " + nord3b, + chroma.CommentPreproc: nord10, + chroma.Keyword: "bold " + nord9, + chroma.KeywordPseudo: "nobold " + nord9, + chroma.KeywordType: "nobold " + nord9, + chroma.Operator: nord9, + chroma.OperatorWord: "bold " + nord9, + chroma.Name: nord4, + chroma.NameBuiltin: nord9, + chroma.NameFunction: nord8, + chroma.NameClass: nord7, + chroma.NameNamespace: nord7, + chroma.NameException: nord11, + chroma.NameVariable: nord4, + chroma.NameConstant: nord7, + chroma.NameLabel: nord7, + chroma.NameEntity: nord12, + chroma.NameAttribute: nord7, + chroma.NameTag: nord9, + chroma.NameDecorator: nord12, + chroma.Punctuation: nord6, + chroma.LiteralString: nord14, + chroma.LiteralStringDoc: nord3b, + chroma.LiteralStringInterpol: nord14, + chroma.LiteralStringEscape: nord13, + chroma.LiteralStringRegex: nord13, + chroma.LiteralStringSymbol: nord14, + chroma.LiteralStringOther: nord14, + chroma.LiteralNumber: nord15, + chroma.GenericHeading: "bold " + nord8, + chroma.GenericSubheading: "bold " + nord8, + chroma.GenericDeleted: nord11, + chroma.GenericInserted: nord14, + chroma.GenericError: nord11, + chroma.GenericEmph: "italic", + chroma.GenericStrong: "bold", + chroma.GenericPrompt: "bold " + nord3, + chroma.GenericOutput: nord4, + chroma.GenericTraceback: nord11, + chroma.Error: nord11, + chroma.Background: nord4 + " bg:" + nord0, +})) diff --git a/vendor/github.com/alecthomas/chroma/styles/onesenterprise.go b/vendor/github.com/alecthomas/chroma/styles/onesenterprise.go new file mode 100644 index 000000000..9048e38ad --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/styles/onesenterprise.go @@ -0,0 +1,17 @@ +package styles + +import ( + "github.com/alecthomas/chroma" +) + +// 1S:Designer color palette +var OnesEnterprise = Register(chroma.MustNewStyle("onesenterprise", chroma.StyleEntries{ + chroma.Text: "#000000", + chroma.Comment: "#008000", + chroma.CommentPreproc: "#963200", + chroma.Operator: "#FF0000", + chroma.Keyword: "#FF0000", + chroma.Punctuation: "#FF0000", + chroma.LiteralString: "#000000", + chroma.Name: "#0000FF", +})) diff --git a/vendor/github.com/alecthomas/chroma/styles/paraiso-dark.go b/vendor/github.com/alecthomas/chroma/styles/paraiso-dark.go new file mode 100644 index 000000000..c8cf47310 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/styles/paraiso-dark.go @@ -0,0 +1,44 @@ +package styles + +import ( + "github.com/alecthomas/chroma" +) + +// ParaisoDark style. +var ParaisoDark = Register(chroma.MustNewStyle("paraiso-dark", chroma.StyleEntries{ + chroma.Text: "#e7e9db", + chroma.Error: "#ef6155", + chroma.Comment: "#776e71", + chroma.Keyword: "#815ba4", + chroma.KeywordNamespace: "#5bc4bf", + chroma.KeywordType: "#fec418", + chroma.Operator: "#5bc4bf", + chroma.Punctuation: "#e7e9db", + chroma.Name: "#e7e9db", + chroma.NameAttribute: "#06b6ef", + chroma.NameClass: "#fec418", + chroma.NameConstant: "#ef6155", + chroma.NameDecorator: "#5bc4bf", + chroma.NameException: "#ef6155", + chroma.NameFunction: "#06b6ef", + chroma.NameNamespace: "#fec418", + chroma.NameOther: "#06b6ef", + chroma.NameTag: "#5bc4bf", + chroma.NameVariable: "#ef6155", + chroma.LiteralNumber: "#f99b15", + chroma.Literal: "#f99b15", + chroma.LiteralDate: "#48b685", + chroma.LiteralString: "#48b685", + chroma.LiteralStringChar: "#e7e9db", + chroma.LiteralStringDoc: "#776e71", + chroma.LiteralStringEscape: "#f99b15", + chroma.LiteralStringInterpol: "#f99b15", + chroma.GenericDeleted: "#ef6155", + chroma.GenericEmph: "italic", + chroma.GenericHeading: "bold #e7e9db", + chroma.GenericInserted: "#48b685", + chroma.GenericPrompt: "bold #776e71", + chroma.GenericStrong: "bold", + chroma.GenericSubheading: "bold #5bc4bf", + chroma.Background: "bg:#2f1e2e", +})) diff --git a/vendor/github.com/alecthomas/chroma/styles/paraiso-light.go b/vendor/github.com/alecthomas/chroma/styles/paraiso-light.go new file mode 100644 index 000000000..b514dfa16 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/styles/paraiso-light.go @@ -0,0 +1,44 @@ +package styles + +import ( + "github.com/alecthomas/chroma" +) + +// ParaisoLight style. +var ParaisoLight = Register(chroma.MustNewStyle("paraiso-light", chroma.StyleEntries{ + chroma.Text: "#2f1e2e", + chroma.Error: "#ef6155", + chroma.Comment: "#8d8687", + chroma.Keyword: "#815ba4", + chroma.KeywordNamespace: "#5bc4bf", + chroma.KeywordType: "#fec418", + chroma.Operator: "#5bc4bf", + chroma.Punctuation: "#2f1e2e", + chroma.Name: "#2f1e2e", + chroma.NameAttribute: "#06b6ef", + chroma.NameClass: "#fec418", + chroma.NameConstant: "#ef6155", + chroma.NameDecorator: "#5bc4bf", + chroma.NameException: "#ef6155", + chroma.NameFunction: "#06b6ef", + chroma.NameNamespace: "#fec418", + chroma.NameOther: "#06b6ef", + chroma.NameTag: "#5bc4bf", + chroma.NameVariable: "#ef6155", + chroma.LiteralNumber: "#f99b15", + chroma.Literal: "#f99b15", + chroma.LiteralDate: "#48b685", + chroma.LiteralString: "#48b685", + chroma.LiteralStringChar: "#2f1e2e", + chroma.LiteralStringDoc: "#8d8687", + chroma.LiteralStringEscape: "#f99b15", + chroma.LiteralStringInterpol: "#f99b15", + chroma.GenericDeleted: "#ef6155", + chroma.GenericEmph: "italic", + chroma.GenericHeading: "bold #2f1e2e", + chroma.GenericInserted: "#48b685", + chroma.GenericPrompt: "bold #8d8687", + chroma.GenericStrong: "bold", + chroma.GenericSubheading: "bold #5bc4bf", + chroma.Background: "bg:#e7e9db", +})) diff --git a/vendor/github.com/alecthomas/chroma/styles/pastie.go b/vendor/github.com/alecthomas/chroma/styles/pastie.go new file mode 100644 index 000000000..9a6854439 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/styles/pastie.go @@ -0,0 +1,52 @@ +package styles + +import ( + "github.com/alecthomas/chroma" +) + +// Pastie style. +var Pastie = Register(chroma.MustNewStyle("pastie", chroma.StyleEntries{ + chroma.TextWhitespace: "#bbbbbb", + chroma.Comment: "#888888", + chroma.CommentPreproc: "bold #cc0000", + chroma.CommentSpecial: "bg:#fff0f0 bold #cc0000", + chroma.LiteralString: "bg:#fff0f0 #dd2200", + chroma.LiteralStringRegex: "bg:#fff0ff #008800", + chroma.LiteralStringOther: "bg:#f0fff0 #22bb22", + chroma.LiteralStringSymbol: "#aa6600", + chroma.LiteralStringInterpol: "#3333bb", + chroma.LiteralStringEscape: "#0044dd", + chroma.OperatorWord: "#008800", + chroma.Keyword: "bold #008800", + chroma.KeywordPseudo: "nobold", + chroma.KeywordType: "#888888", + chroma.NameClass: "bold #bb0066", + chroma.NameException: "bold #bb0066", + chroma.NameFunction: "bold #0066bb", + chroma.NameProperty: "bold #336699", + chroma.NameNamespace: "bold #bb0066", + chroma.NameBuiltin: "#003388", + chroma.NameVariable: "#336699", + chroma.NameVariableClass: "#336699", + chroma.NameVariableInstance: "#3333bb", + chroma.NameVariableGlobal: "#dd7700", + chroma.NameConstant: "bold #003366", + chroma.NameTag: "bold #bb0066", + chroma.NameAttribute: "#336699", + chroma.NameDecorator: "#555555", + chroma.NameLabel: "italic #336699", + chroma.LiteralNumber: "bold #0000DD", + chroma.GenericHeading: "#333", + chroma.GenericSubheading: "#666", + chroma.GenericDeleted: "bg:#ffdddd #000000", + chroma.GenericInserted: "bg:#ddffdd #000000", + chroma.GenericError: "#aa0000", + chroma.GenericEmph: "italic", + chroma.GenericStrong: "bold", + chroma.GenericPrompt: "#555555", + chroma.GenericOutput: "#888888", + chroma.GenericTraceback: "#aa0000", + chroma.GenericUnderline: "underline", + chroma.Error: "bg:#e3d2d2 #a61717", + chroma.Background: " bg:#ffffff", +})) diff --git a/vendor/github.com/alecthomas/chroma/styles/perldoc.go b/vendor/github.com/alecthomas/chroma/styles/perldoc.go new file mode 100644 index 000000000..e1372fdfb --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/styles/perldoc.go @@ -0,0 +1,44 @@ +package styles + +import ( + "github.com/alecthomas/chroma" +) + +// Perldoc style. +var Perldoc = Register(chroma.MustNewStyle("perldoc", chroma.StyleEntries{ + chroma.TextWhitespace: "#bbbbbb", + chroma.Comment: "#228B22", + chroma.CommentPreproc: "#1e889b", + chroma.CommentSpecial: "#8B008B bold", + chroma.LiteralString: "#CD5555", + chroma.LiteralStringHeredoc: "#1c7e71 italic", + chroma.LiteralStringRegex: "#1c7e71", + chroma.LiteralStringOther: "#cb6c20", + chroma.LiteralNumber: "#B452CD", + chroma.OperatorWord: "#8B008B", + chroma.Keyword: "#8B008B bold", + chroma.KeywordType: "#00688B", + chroma.NameClass: "#008b45 bold", + chroma.NameException: "#008b45 bold", + chroma.NameFunction: "#008b45", + chroma.NameNamespace: "#008b45 underline", + chroma.NameVariable: "#00688B", + chroma.NameConstant: "#00688B", + chroma.NameDecorator: "#707a7c", + chroma.NameTag: "#8B008B bold", + chroma.NameAttribute: "#658b00", + chroma.NameBuiltin: "#658b00", + chroma.GenericHeading: "bold #000080", + chroma.GenericSubheading: "bold #800080", + chroma.GenericDeleted: "#aa0000", + chroma.GenericInserted: "#00aa00", + chroma.GenericError: "#aa0000", + chroma.GenericEmph: "italic", + chroma.GenericStrong: "bold", + chroma.GenericPrompt: "#555555", + chroma.GenericOutput: "#888888", + chroma.GenericTraceback: "#aa0000", + chroma.GenericUnderline: "underline", + chroma.Error: "bg:#e3d2d2 #a61717", + chroma.Background: " bg:#eeeedd", +})) diff --git a/vendor/github.com/alecthomas/chroma/styles/pygments.go b/vendor/github.com/alecthomas/chroma/styles/pygments.go new file mode 100644 index 000000000..327033b71 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/styles/pygments.go @@ -0,0 +1,55 @@ +package styles + +import ( + "github.com/alecthomas/chroma" +) + +// Pygments default theme. +var Pygments = Register(chroma.MustNewStyle("pygments", chroma.StyleEntries{ + chroma.Whitespace: "#bbbbbb", + chroma.Comment: "italic #408080", + chroma.CommentPreproc: "noitalic #BC7A00", + + chroma.Keyword: "bold #008000", + chroma.KeywordPseudo: "nobold", + chroma.KeywordType: "nobold #B00040", + + chroma.Operator: "#666666", + chroma.OperatorWord: "bold #AA22FF", + + chroma.NameBuiltin: "#008000", + chroma.NameFunction: "#0000FF", + chroma.NameClass: "bold #0000FF", + chroma.NameNamespace: "bold #0000FF", + chroma.NameException: "bold #D2413A", + chroma.NameVariable: "#19177C", + chroma.NameConstant: "#880000", + chroma.NameLabel: "#A0A000", + chroma.NameEntity: "bold #999999", + chroma.NameAttribute: "#7D9029", + chroma.NameTag: "bold #008000", + chroma.NameDecorator: "#AA22FF", + + chroma.String: "#BA2121", + chroma.StringDoc: "italic", + chroma.StringInterpol: "bold #BB6688", + chroma.StringEscape: "bold #BB6622", + chroma.StringRegex: "#BB6688", + chroma.StringSymbol: "#19177C", + chroma.StringOther: "#008000", + chroma.Number: "#666666", + + chroma.GenericHeading: "bold #000080", + chroma.GenericSubheading: "bold #800080", + chroma.GenericDeleted: "#A00000", + chroma.GenericInserted: "#00A000", + chroma.GenericError: "#FF0000", + chroma.GenericEmph: "italic", + chroma.GenericStrong: "bold", + chroma.GenericPrompt: "bold #000080", + chroma.GenericOutput: "#888", + chroma.GenericTraceback: "#04D", + chroma.GenericUnderline: "underline", + + chroma.Error: "border:#FF0000", +})) diff --git a/vendor/github.com/alecthomas/chroma/styles/rainbow_dash.go b/vendor/github.com/alecthomas/chroma/styles/rainbow_dash.go new file mode 100644 index 000000000..37d66ca25 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/styles/rainbow_dash.go @@ -0,0 +1,47 @@ +package styles + +import ( + "github.com/alecthomas/chroma" +) + +// RainbowDash style. +var RainbowDash = Register(chroma.MustNewStyle("rainbow_dash", chroma.StyleEntries{ + chroma.Comment: "italic #0080ff", + chroma.CommentPreproc: "noitalic", + chroma.CommentSpecial: "bold", + chroma.Error: "bg:#cc0000 #ffffff", + chroma.GenericDeleted: "border:#c5060b bg:#ffcccc", + chroma.GenericEmph: "italic", + chroma.GenericError: "#ff0000", + chroma.GenericHeading: "bold #2c5dcd", + chroma.GenericInserted: "border:#00cc00 bg:#ccffcc", + chroma.GenericOutput: "#aaaaaa", + chroma.GenericPrompt: "bold #2c5dcd", + chroma.GenericStrong: "bold", + chroma.GenericSubheading: "bold #2c5dcd", + chroma.GenericTraceback: "#c5060b", + chroma.GenericUnderline: "underline", + chroma.Keyword: "bold #2c5dcd", + chroma.KeywordPseudo: "nobold", + chroma.KeywordType: "#5918bb", + chroma.NameAttribute: "italic #2c5dcd", + chroma.NameBuiltin: "bold #5918bb", + chroma.NameClass: "underline", + chroma.NameConstant: "#318495", + chroma.NameDecorator: "bold #ff8000", + chroma.NameEntity: "bold #5918bb", + chroma.NameException: "bold #5918bb", + chroma.NameFunction: "bold #ff8000", + chroma.NameTag: "bold #2c5dcd", + chroma.LiteralNumber: "bold #5918bb", + chroma.Operator: "#2c5dcd", + chroma.OperatorWord: "bold", + chroma.LiteralString: "#00cc66", + chroma.LiteralStringDoc: "italic", + chroma.LiteralStringEscape: "bold #c5060b", + chroma.LiteralStringOther: "#318495", + chroma.LiteralStringSymbol: "bold #c5060b", + chroma.Text: "#4d4d4d", + chroma.TextWhitespace: "#cbcbcb", + chroma.Background: " bg:#ffffff", +})) diff --git a/vendor/github.com/alecthomas/chroma/styles/rrt.go b/vendor/github.com/alecthomas/chroma/styles/rrt.go new file mode 100644 index 000000000..2ccf2cadf --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/styles/rrt.go @@ -0,0 +1,20 @@ +package styles + +import ( + "github.com/alecthomas/chroma" +) + +// Rrt style. +var Rrt = Register(chroma.MustNewStyle("rrt", chroma.StyleEntries{ + chroma.CommentPreproc: "#e5e5e5", + chroma.Comment: "#00ff00", + chroma.KeywordType: "#ee82ee", + chroma.Keyword: "#ff0000", + chroma.LiteralNumber: "#ff6600", + chroma.LiteralStringSymbol: "#ff6600", + chroma.LiteralString: "#87ceeb", + chroma.NameFunction: "#ffff00", + chroma.NameConstant: "#7fffd4", + chroma.NameVariable: "#eedd82", + chroma.Background: "#f8f8f2 bg:#000000", +})) diff --git a/vendor/github.com/alecthomas/chroma/styles/solarized-dark.go b/vendor/github.com/alecthomas/chroma/styles/solarized-dark.go new file mode 100644 index 000000000..2724df24e --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/styles/solarized-dark.go @@ -0,0 +1,46 @@ +package styles + +import ( + "github.com/alecthomas/chroma" +) + +// SolarizedDark style. +var SolarizedDark = Register(chroma.MustNewStyle("solarized-dark", chroma.StyleEntries{ + chroma.Keyword: "#719e07", + chroma.KeywordConstant: "#CB4B16", + chroma.KeywordDeclaration: "#268BD2", + chroma.KeywordReserved: "#268BD2", + chroma.KeywordType: "#DC322F", + chroma.NameAttribute: "#93A1A1", + chroma.NameBuiltin: "#B58900", + chroma.NameBuiltinPseudo: "#268BD2", + chroma.NameClass: "#268BD2", + chroma.NameConstant: "#CB4B16", + chroma.NameDecorator: "#268BD2", + chroma.NameEntity: "#CB4B16", + chroma.NameException: "#CB4B16", + chroma.NameFunction: "#268BD2", + chroma.NameTag: "#268BD2", + chroma.NameVariable: "#268BD2", + chroma.LiteralString: "#2AA198", + chroma.LiteralStringBacktick: "#586E75", + chroma.LiteralStringChar: "#2AA198", + chroma.LiteralStringDoc: "#93A1A1", + chroma.LiteralStringEscape: "#CB4B16", + chroma.LiteralStringHeredoc: "#93A1A1", + chroma.LiteralStringRegex: "#DC322F", + chroma.LiteralNumber: "#2AA198", + chroma.Operator: "#719e07", + chroma.Comment: "#586E75", + chroma.CommentPreproc: "#719e07", + chroma.CommentSpecial: "#719e07", + chroma.GenericDeleted: "#DC322F", + chroma.GenericEmph: "italic", + chroma.GenericError: "#DC322F bold", + chroma.GenericHeading: "#CB4B16", + chroma.GenericInserted: "#719e07", + chroma.GenericStrong: "bold", + chroma.GenericSubheading: "#268BD2", + chroma.Background: "#93A1A1 bg:#002B36", + chroma.Other: "#CB4B16", +})) diff --git a/vendor/github.com/alecthomas/chroma/styles/solarized-dark256.go b/vendor/github.com/alecthomas/chroma/styles/solarized-dark256.go new file mode 100644 index 000000000..a24ddc153 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/styles/solarized-dark256.go @@ -0,0 +1,48 @@ +package styles + +import ( + "github.com/alecthomas/chroma" +) + +// SolarizedDark256 style. +var SolarizedDark256 = Register(chroma.MustNewStyle("solarized-dark256", chroma.StyleEntries{ + chroma.Keyword: "#5f8700", + chroma.KeywordConstant: "#d75f00", + chroma.KeywordDeclaration: "#0087ff", + chroma.KeywordNamespace: "#d75f00", + chroma.KeywordReserved: "#0087ff", + chroma.KeywordType: "#af0000", + chroma.NameAttribute: "#8a8a8a", + chroma.NameBuiltin: "#0087ff", + chroma.NameBuiltinPseudo: "#0087ff", + chroma.NameClass: "#0087ff", + chroma.NameConstant: "#d75f00", + chroma.NameDecorator: "#0087ff", + chroma.NameEntity: "#d75f00", + chroma.NameException: "#af8700", + chroma.NameFunction: "#0087ff", + chroma.NameTag: "#0087ff", + chroma.NameVariable: "#0087ff", + chroma.LiteralString: "#00afaf", + chroma.LiteralStringBacktick: "#4e4e4e", + chroma.LiteralStringChar: "#00afaf", + chroma.LiteralStringDoc: "#00afaf", + chroma.LiteralStringEscape: "#af0000", + chroma.LiteralStringHeredoc: "#00afaf", + chroma.LiteralStringRegex: "#af0000", + chroma.LiteralNumber: "#00afaf", + chroma.Operator: "#8a8a8a", + chroma.OperatorWord: "#5f8700", + chroma.Comment: "#4e4e4e", + chroma.CommentPreproc: "#5f8700", + chroma.CommentSpecial: "#5f8700", + chroma.GenericDeleted: "#af0000", + chroma.GenericEmph: "italic", + chroma.GenericError: "#af0000 bold", + chroma.GenericHeading: "#d75f00", + chroma.GenericInserted: "#5f8700", + chroma.GenericStrong: "bold", + chroma.GenericSubheading: "#0087ff", + chroma.Background: "#8a8a8a bg:#1c1c1c", + chroma.Other: "#d75f00", +})) diff --git a/vendor/github.com/alecthomas/chroma/styles/solarized-light.go b/vendor/github.com/alecthomas/chroma/styles/solarized-light.go new file mode 100644 index 000000000..b6d5234a7 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/styles/solarized-light.go @@ -0,0 +1,24 @@ +package styles + +import ( + "github.com/alecthomas/chroma" +) + +// SolarizedLight style. +var SolarizedLight = Register(chroma.MustNewStyle("solarized-light", chroma.StyleEntries{ + chroma.Text: "bg: #eee8d5 #586e75", + chroma.Keyword: "#859900", + chroma.KeywordConstant: "bold", + chroma.KeywordNamespace: "#dc322f bold", + chroma.KeywordType: "bold", + chroma.Name: "#268bd2", + chroma.NameBuiltin: "#cb4b16", + chroma.NameClass: "#cb4b16", + chroma.NameTag: "bold", + chroma.Literal: "#2aa198", + chroma.LiteralNumber: "bold", + chroma.OperatorWord: "#859900", + chroma.Comment: "#93a1a1 italic", + chroma.Generic: "#d33682", + chroma.Background: " bg:#eee8d5", +})) diff --git a/vendor/github.com/alecthomas/chroma/styles/swapoff.go b/vendor/github.com/alecthomas/chroma/styles/swapoff.go new file mode 100644 index 000000000..e4daae61c --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/styles/swapoff.go @@ -0,0 +1,25 @@ +package styles + +import ( + "github.com/alecthomas/chroma" +) + +// SwapOff theme. +var SwapOff = Register(chroma.MustNewStyle("swapoff", chroma.StyleEntries{ + chroma.Background: "#lightgray bg:#black", + chroma.Number: "bold #ansiyellow", + chroma.Comment: "#ansiteal", + chroma.CommentPreproc: "bold #ansigreen", + chroma.String: "bold #ansiturquoise", + chroma.Keyword: "bold #ansiwhite", + chroma.NameKeyword: "bold #ansiwhite", + chroma.NameBuiltin: "bold #ansiwhite", + chroma.GenericHeading: "bold", + chroma.GenericSubheading: "bold", + chroma.GenericStrong: "bold", + chroma.GenericUnderline: "underline", + chroma.NameTag: "bold", + chroma.NameAttribute: "#ansiteal", + chroma.Error: "#ansired", + chroma.LiteralDate: "bold #ansiyellow", +})) diff --git a/vendor/github.com/alecthomas/chroma/styles/tango.go b/vendor/github.com/alecthomas/chroma/styles/tango.go new file mode 100644 index 000000000..ae1afe064 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/styles/tango.go @@ -0,0 +1,79 @@ +package styles + +import ( + "github.com/alecthomas/chroma" +) + +// Tango style. +var Tango = Register(chroma.MustNewStyle("tango", chroma.StyleEntries{ + chroma.TextWhitespace: "underline #f8f8f8", + chroma.Error: "#a40000 border:#ef2929", + chroma.Other: "#000000", + chroma.Comment: "italic #8f5902", + chroma.CommentMultiline: "italic #8f5902", + chroma.CommentPreproc: "italic #8f5902", + chroma.CommentSingle: "italic #8f5902", + chroma.CommentSpecial: "italic #8f5902", + chroma.Keyword: "bold #204a87", + chroma.KeywordConstant: "bold #204a87", + chroma.KeywordDeclaration: "bold #204a87", + chroma.KeywordNamespace: "bold #204a87", + chroma.KeywordPseudo: "bold #204a87", + chroma.KeywordReserved: "bold #204a87", + chroma.KeywordType: "bold #204a87", + chroma.Operator: "bold #ce5c00", + chroma.OperatorWord: "bold #204a87", + chroma.Punctuation: "bold #000000", + chroma.Name: "#000000", + chroma.NameAttribute: "#c4a000", + chroma.NameBuiltin: "#204a87", + chroma.NameBuiltinPseudo: "#3465a4", + chroma.NameClass: "#000000", + chroma.NameConstant: "#000000", + chroma.NameDecorator: "bold #5c35cc", + chroma.NameEntity: "#ce5c00", + chroma.NameException: "bold #cc0000", + chroma.NameFunction: "#000000", + chroma.NameProperty: "#000000", + chroma.NameLabel: "#f57900", + chroma.NameNamespace: "#000000", + chroma.NameOther: "#000000", + chroma.NameTag: "bold #204a87", + chroma.NameVariable: "#000000", + chroma.NameVariableClass: "#000000", + chroma.NameVariableGlobal: "#000000", + chroma.NameVariableInstance: "#000000", + chroma.LiteralNumber: "bold #0000cf", + chroma.LiteralNumberFloat: "bold #0000cf", + chroma.LiteralNumberHex: "bold #0000cf", + chroma.LiteralNumberInteger: "bold #0000cf", + chroma.LiteralNumberIntegerLong: "bold #0000cf", + chroma.LiteralNumberOct: "bold #0000cf", + chroma.Literal: "#000000", + chroma.LiteralDate: "#000000", + chroma.LiteralString: "#4e9a06", + chroma.LiteralStringBacktick: "#4e9a06", + chroma.LiteralStringChar: "#4e9a06", + chroma.LiteralStringDoc: "italic #8f5902", + chroma.LiteralStringDouble: "#4e9a06", + chroma.LiteralStringEscape: "#4e9a06", + chroma.LiteralStringHeredoc: "#4e9a06", + chroma.LiteralStringInterpol: "#4e9a06", + chroma.LiteralStringOther: "#4e9a06", + chroma.LiteralStringRegex: "#4e9a06", + chroma.LiteralStringSingle: "#4e9a06", + chroma.LiteralStringSymbol: "#4e9a06", + chroma.Generic: "#000000", + chroma.GenericDeleted: "#a40000", + chroma.GenericEmph: "italic #000000", + chroma.GenericError: "#ef2929", + chroma.GenericHeading: "bold #000080", + chroma.GenericInserted: "#00A000", + chroma.GenericOutput: "italic #000000", + chroma.GenericPrompt: "#8f5902", + chroma.GenericStrong: "bold #000000", + chroma.GenericSubheading: "bold #800080", + chroma.GenericTraceback: "bold #a40000", + chroma.GenericUnderline: "underline", + chroma.Background: " bg:#f8f8f8", +})) diff --git a/vendor/github.com/alecthomas/chroma/styles/trac.go b/vendor/github.com/alecthomas/chroma/styles/trac.go new file mode 100644 index 000000000..3b09c44e8 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/styles/trac.go @@ -0,0 +1,42 @@ +package styles + +import ( + "github.com/alecthomas/chroma" +) + +// Trac style. +var Trac = Register(chroma.MustNewStyle("trac", chroma.StyleEntries{ + chroma.TextWhitespace: "#bbbbbb", + chroma.Comment: "italic #999988", + chroma.CommentPreproc: "bold noitalic #999999", + chroma.CommentSpecial: "bold #999999", + chroma.Operator: "bold", + chroma.LiteralString: "#bb8844", + chroma.LiteralStringRegex: "#808000", + chroma.LiteralNumber: "#009999", + chroma.Keyword: "bold", + chroma.KeywordType: "#445588", + chroma.NameBuiltin: "#999999", + chroma.NameFunction: "bold #990000", + chroma.NameClass: "bold #445588", + chroma.NameException: "bold #990000", + chroma.NameNamespace: "#555555", + chroma.NameVariable: "#008080", + chroma.NameConstant: "#008080", + chroma.NameTag: "#000080", + chroma.NameAttribute: "#008080", + chroma.NameEntity: "#800080", + chroma.GenericHeading: "#999999", + chroma.GenericSubheading: "#aaaaaa", + chroma.GenericDeleted: "bg:#ffdddd #000000", + chroma.GenericInserted: "bg:#ddffdd #000000", + chroma.GenericError: "#aa0000", + chroma.GenericEmph: "italic", + chroma.GenericStrong: "bold", + chroma.GenericPrompt: "#555555", + chroma.GenericOutput: "#888888", + chroma.GenericTraceback: "#aa0000", + chroma.GenericUnderline: "underline", + chroma.Error: "bg:#e3d2d2 #a61717", + chroma.Background: " bg:#ffffff", +})) diff --git a/vendor/github.com/alecthomas/chroma/styles/vim.go b/vendor/github.com/alecthomas/chroma/styles/vim.go new file mode 100644 index 000000000..6296042c2 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/styles/vim.go @@ -0,0 +1,36 @@ +package styles + +import ( + "github.com/alecthomas/chroma" +) + +// Vim style. +var Vim = Register(chroma.MustNewStyle("vim", chroma.StyleEntries{ + chroma.Background: "#cccccc bg:#000000", + chroma.Comment: "#000080", + chroma.CommentSpecial: "bold #cd0000", + chroma.Keyword: "#cdcd00", + chroma.KeywordDeclaration: "#00cd00", + chroma.KeywordNamespace: "#cd00cd", + chroma.KeywordType: "#00cd00", + chroma.Operator: "#3399cc", + chroma.OperatorWord: "#cdcd00", + chroma.NameClass: "#00cdcd", + chroma.NameBuiltin: "#cd00cd", + chroma.NameException: "bold #666699", + chroma.NameVariable: "#00cdcd", + chroma.LiteralString: "#cd0000", + chroma.LiteralNumber: "#cd00cd", + chroma.GenericHeading: "bold #000080", + chroma.GenericSubheading: "bold #800080", + chroma.GenericDeleted: "#cd0000", + chroma.GenericInserted: "#00cd00", + chroma.GenericError: "#FF0000", + chroma.GenericEmph: "italic", + chroma.GenericStrong: "bold", + chroma.GenericPrompt: "bold #000080", + chroma.GenericOutput: "#888", + chroma.GenericTraceback: "#04D", + chroma.GenericUnderline: "underline", + chroma.Error: "border:#FF0000", +})) diff --git a/vendor/github.com/alecthomas/chroma/styles/vs.go b/vendor/github.com/alecthomas/chroma/styles/vs.go new file mode 100644 index 000000000..acbcb9153 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/styles/vs.go @@ -0,0 +1,23 @@ +package styles + +import ( + "github.com/alecthomas/chroma" +) + +// VisualStudio style. +var VisualStudio = Register(chroma.MustNewStyle("vs", chroma.StyleEntries{ + chroma.Comment: "#008000", + chroma.CommentPreproc: "#0000ff", + chroma.Keyword: "#0000ff", + chroma.OperatorWord: "#0000ff", + chroma.KeywordType: "#2b91af", + chroma.NameClass: "#2b91af", + chroma.LiteralString: "#a31515", + chroma.GenericHeading: "bold", + chroma.GenericSubheading: "bold", + chroma.GenericEmph: "italic", + chroma.GenericStrong: "bold", + chroma.GenericPrompt: "bold", + chroma.Error: "border:#FF0000", + chroma.Background: " bg:#ffffff", +})) diff --git a/vendor/github.com/alecthomas/chroma/styles/vulcan.go b/vendor/github.com/alecthomas/chroma/styles/vulcan.go new file mode 100644 index 000000000..82ad1a14b --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/styles/vulcan.go @@ -0,0 +1,95 @@ +package styles + +import ( + "github.com/alecthomas/chroma" +) + +var ( + // inspired by Doom Emacs's One Doom Theme + black = "#282C34" + grey = "#3E4460" + grey2 = "#43454f" + white = "#C9C9C9" + red = "#CF5967" + yellow = "#ECBE7B" + green = "#82CC6A" + cyan = "#56B6C2" + blue = "#7FBAF5" + blue2 = "#57C7FF" + purple = "#BC74C4" +) + +var Vulcan = Register(chroma.MustNewStyle("vulcan", chroma.StyleEntries{ + chroma.Comment: grey, + chroma.CommentHashbang: grey + " italic", + chroma.CommentMultiline: grey, + chroma.CommentPreproc: blue, + chroma.CommentSingle: grey, + chroma.CommentSpecial: purple + " italic", + chroma.Generic: white, + chroma.GenericDeleted: red, + chroma.GenericEmph: white + " underline", + chroma.GenericError: red + " bold", + chroma.GenericHeading: yellow + " bold", + chroma.GenericInserted: yellow, + chroma.GenericOutput: grey2, + chroma.GenericPrompt: white, + chroma.GenericStrong: red + " bold", + chroma.GenericSubheading: red + " italic", + chroma.GenericTraceback: white, + chroma.GenericUnderline: "underline", + chroma.Error: red, + chroma.Keyword: blue, + chroma.KeywordConstant: red + " bg:" + grey2, + chroma.KeywordDeclaration: blue, + chroma.KeywordNamespace: purple, + chroma.KeywordPseudo: purple, + chroma.KeywordReserved: blue, + chroma.KeywordType: blue2 + " bold", + chroma.Literal: white, + chroma.LiteralDate: blue2, + chroma.Name: white, + chroma.NameAttribute: purple, + chroma.NameBuiltin: blue, + chroma.NameBuiltinPseudo: blue, + chroma.NameClass: yellow, + chroma.NameConstant: yellow, + chroma.NameDecorator: yellow, + chroma.NameEntity: white, + chroma.NameException: red, + chroma.NameFunction: blue2, + chroma.NameLabel: red, + chroma.NameNamespace: white, + chroma.NameOther: white, + chroma.NameTag: purple, + chroma.NameVariable: purple + " italic", + chroma.NameVariableClass: blue2 + " bold", + chroma.NameVariableGlobal: yellow, + chroma.NameVariableInstance: blue2, + chroma.LiteralNumber: cyan, + chroma.LiteralNumberBin: blue2, + chroma.LiteralNumberFloat: cyan, + chroma.LiteralNumberHex: blue2, + chroma.LiteralNumberInteger: cyan, + chroma.LiteralNumberIntegerLong: cyan, + chroma.LiteralNumberOct: blue2, + chroma.Operator: purple, + chroma.OperatorWord: purple, + chroma.Other: white, + chroma.Punctuation: cyan, + chroma.LiteralString: green, + chroma.LiteralStringBacktick: blue2, + chroma.LiteralStringChar: blue2, + chroma.LiteralStringDoc: green, + chroma.LiteralStringDouble: green, + chroma.LiteralStringEscape: cyan, + chroma.LiteralStringHeredoc: cyan, + chroma.LiteralStringInterpol: green, + chroma.LiteralStringOther: green, + chroma.LiteralStringRegex: blue2, + chroma.LiteralStringSingle: green, + chroma.LiteralStringSymbol: green, + chroma.Text: white, + chroma.TextWhitespace: white, + chroma.Background: " bg: " + black, +})) diff --git a/vendor/github.com/alecthomas/chroma/styles/witchhazel.go b/vendor/github.com/alecthomas/chroma/styles/witchhazel.go new file mode 100644 index 000000000..4aea2788f --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/styles/witchhazel.go @@ -0,0 +1,52 @@ +// Copyright 2018 Alethea Katherine Flowers +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package styles + +import ( + "github.com/alecthomas/chroma" +) + +// WitchHazel Style +var WitchHazel = Register(chroma.MustNewStyle("witchhazel", chroma.StyleEntries{ + chroma.Text: "#F8F8F2", + chroma.Whitespace: "#A8757B", + chroma.Error: "#960050 bg:#1e0010", + chroma.Comment: "#b0bec5", + chroma.Keyword: "#C2FFDF", + chroma.KeywordNamespace: "#FFB8D1", + chroma.Operator: "#FFB8D1", + chroma.Punctuation: "#F8F8F2", + chroma.Name: "#F8F8F2", + chroma.NameAttribute: "#ceb1ff", + chroma.NameBuiltinPseudo: "#80cbc4", + chroma.NameClass: "#ceb1ff", + chroma.NameConstant: "#C5A3FF", + chroma.NameDecorator: "#ceb1ff", + chroma.NameException: "#ceb1ff", + chroma.NameFunction: "#ceb1ff", + chroma.NameProperty: "#F8F8F2", + chroma.NameTag: "#FFB8D1", + chroma.NameVariable: "#F8F8F2", + chroma.Number: "#C5A3FF", + chroma.Literal: "#ae81ff", + chroma.LiteralDate: "#e6db74", + chroma.String: "#1bc5e0", + chroma.GenericDeleted: "#f92672", + chroma.GenericEmph: "italic", + chroma.GenericInserted: "#a6e22e", + chroma.GenericStrong: "bold", + chroma.GenericSubheading: "#75715e", + chroma.Background: " bg:#433e56", +})) diff --git a/vendor/github.com/alecthomas/chroma/styles/xcode-dark.go b/vendor/github.com/alecthomas/chroma/styles/xcode-dark.go new file mode 100644 index 000000000..9a9d7579a --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/styles/xcode-dark.go @@ -0,0 +1,62 @@ +package styles + +import ( + "github.com/alecthomas/chroma" +) + +var ( + // Inspired by Apple's Xcode "Default (Dark)" Theme + background = "#1F1F24" + plainText = "#FFFFFF" + comments = "#6C7986" + strings = "#FC6A5D" + numbers = "#D0BF69" + keywords = "#FC5FA3" + preprocessorStatements = "#FD8F3F" + typeDeclarations = "#5DD8FF" + otherDeclarations = "#41A1C0" + otherFunctionAndMethodNames = "#A167E6" + otherTypeNames = "#D0A8FF" +) + +// Xcode dark style +var XcodeDark = Register(chroma.MustNewStyle("xcode-dark", chroma.StyleEntries{ + chroma.Background: plainText + " bg:" + background, + + chroma.Comment: comments, + chroma.CommentMultiline: comments, + chroma.CommentPreproc: preprocessorStatements, + chroma.CommentSingle: comments, + chroma.CommentSpecial: comments + " italic", + + chroma.Error: "#960050", + + chroma.Keyword: keywords, + chroma.KeywordConstant: keywords, + chroma.KeywordDeclaration: keywords, + chroma.KeywordReserved: keywords, + + chroma.LiteralNumber: numbers, + chroma.LiteralNumberBin: numbers, + chroma.LiteralNumberFloat: numbers, + chroma.LiteralNumberHex: numbers, + chroma.LiteralNumberInteger: numbers, + chroma.LiteralNumberOct: numbers, + + chroma.LiteralString: strings, + chroma.LiteralStringEscape: strings, + chroma.LiteralStringInterpol: plainText, + + chroma.Name: plainText, + chroma.NameBuiltin: otherTypeNames, + chroma.NameBuiltinPseudo: otherFunctionAndMethodNames, + chroma.NameClass: typeDeclarations, + chroma.NameFunction: otherDeclarations, + chroma.NameVariable: otherDeclarations, + + chroma.Operator: plainText, + + chroma.Punctuation: plainText, + + chroma.Text: plainText, +})) diff --git a/vendor/github.com/alecthomas/chroma/styles/xcode.go b/vendor/github.com/alecthomas/chroma/styles/xcode.go new file mode 100644 index 000000000..115cf71cd --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/styles/xcode.go @@ -0,0 +1,29 @@ +package styles + +import ( + "github.com/alecthomas/chroma" +) + +// Xcode style. +var Xcode = Register(chroma.MustNewStyle("xcode", chroma.StyleEntries{ + chroma.Comment: "#177500", + chroma.CommentPreproc: "#633820", + chroma.LiteralString: "#C41A16", + chroma.LiteralStringChar: "#2300CE", + chroma.Operator: "#000000", + chroma.Keyword: "#A90D91", + chroma.Name: "#000000", + chroma.NameAttribute: "#836C28", + chroma.NameClass: "#3F6E75", + chroma.NameFunction: "#000000", + chroma.NameBuiltin: "#A90D91", + chroma.NameBuiltinPseudo: "#5B269A", + chroma.NameVariable: "#000000", + chroma.NameTag: "#000000", + chroma.NameDecorator: "#000000", + chroma.NameLabel: "#000000", + chroma.Literal: "#1C01CE", + chroma.LiteralNumber: "#1C01CE", + chroma.Error: "#000000", + chroma.Background: " bg:#ffffff", +})) diff --git a/vendor/github.com/alecthomas/chroma/table.py b/vendor/github.com/alecthomas/chroma/table.py new file mode 100644 index 000000000..779453982 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/table.py @@ -0,0 +1,32 @@ +#!/usr/bin/env python3 +import re +from collections import defaultdict +from subprocess import check_output + +README_FILE = "README.md" + + +lines = check_output(["go", "run", "./cmd/chroma/main.go", "--list"]).decode("utf-8").splitlines() +lines = [line.strip() for line in lines if line.startswith(" ") and not line.startswith(" ")] +lines = sorted(lines, key=lambda l: l.lower()) + +table = defaultdict(list) + +for line in lines: + table[line[0].upper()].append(line) + +rows = [] +for key, value in table.items(): + rows.append("{} | {}".format(key, ", ".join(value))) +tbody = "\n".join(rows) + +with open(README_FILE, "r") as f: + content = f.read() + +with open(README_FILE, "w") as f: + marker = re.compile(r"(?P:----: \\| --------\n).*?(?P\n\n)", re.DOTALL) + replacement = r"\g%s\g" % tbody + updated_content = marker.sub(replacement, content) + f.write(updated_content) + +print(tbody) diff --git a/vendor/github.com/alecthomas/chroma/tokentype_string.go b/vendor/github.com/alecthomas/chroma/tokentype_string.go new file mode 100644 index 000000000..9c302f9c9 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/tokentype_string.go @@ -0,0 +1,219 @@ +// Code generated by "stringer -type TokenType"; DO NOT EDIT. + +package chroma + +import "strconv" + +func _() { + // An "invalid array index" compiler error signifies that the constant values have changed. + // Re-run the stringer command to generate them again. + var x [1]struct{} + _ = x[Background - -1] + _ = x[PreWrapper - -2] + _ = x[Line - -3] + _ = x[LineNumbers - -4] + _ = x[LineNumbersTable - -5] + _ = x[LineHighlight - -6] + _ = x[LineTable - -7] + _ = x[LineTableTD - -8] + _ = x[CodeLine - -9] + _ = x[Error - -10] + _ = x[Other - -11] + _ = x[None - -12] + _ = x[EOFType-0] + _ = x[Keyword-1000] + _ = x[KeywordConstant-1001] + _ = x[KeywordDeclaration-1002] + _ = x[KeywordNamespace-1003] + _ = x[KeywordPseudo-1004] + _ = x[KeywordReserved-1005] + _ = x[KeywordType-1006] + _ = x[Name-2000] + _ = x[NameAttribute-2001] + _ = x[NameBuiltin-2002] + _ = x[NameBuiltinPseudo-2003] + _ = x[NameClass-2004] + _ = x[NameConstant-2005] + _ = x[NameDecorator-2006] + _ = x[NameEntity-2007] + _ = x[NameException-2008] + _ = x[NameFunction-2009] + _ = x[NameFunctionMagic-2010] + _ = x[NameKeyword-2011] + _ = x[NameLabel-2012] + _ = x[NameNamespace-2013] + _ = x[NameOperator-2014] + _ = x[NameOther-2015] + _ = x[NamePseudo-2016] + _ = x[NameProperty-2017] + _ = x[NameTag-2018] + _ = x[NameVariable-2019] + _ = x[NameVariableAnonymous-2020] + _ = x[NameVariableClass-2021] + _ = x[NameVariableGlobal-2022] + _ = x[NameVariableInstance-2023] + _ = x[NameVariableMagic-2024] + _ = x[Literal-3000] + _ = x[LiteralDate-3001] + _ = x[LiteralOther-3002] + _ = x[LiteralString-3100] + _ = x[LiteralStringAffix-3101] + _ = x[LiteralStringAtom-3102] + _ = x[LiteralStringBacktick-3103] + _ = x[LiteralStringBoolean-3104] + _ = x[LiteralStringChar-3105] + _ = x[LiteralStringDelimiter-3106] + _ = x[LiteralStringDoc-3107] + _ = x[LiteralStringDouble-3108] + _ = x[LiteralStringEscape-3109] + _ = x[LiteralStringHeredoc-3110] + _ = x[LiteralStringInterpol-3111] + _ = x[LiteralStringName-3112] + _ = x[LiteralStringOther-3113] + _ = x[LiteralStringRegex-3114] + _ = x[LiteralStringSingle-3115] + _ = x[LiteralStringSymbol-3116] + _ = x[LiteralNumber-3200] + _ = x[LiteralNumberBin-3201] + _ = x[LiteralNumberFloat-3202] + _ = x[LiteralNumberHex-3203] + _ = x[LiteralNumberInteger-3204] + _ = x[LiteralNumberIntegerLong-3205] + _ = x[LiteralNumberOct-3206] + _ = x[Operator-4000] + _ = x[OperatorWord-4001] + _ = x[Punctuation-5000] + _ = x[Comment-6000] + _ = x[CommentHashbang-6001] + _ = x[CommentMultiline-6002] + _ = x[CommentSingle-6003] + _ = x[CommentSpecial-6004] + _ = x[CommentPreproc-6100] + _ = x[CommentPreprocFile-6101] + _ = x[Generic-7000] + _ = x[GenericDeleted-7001] + _ = x[GenericEmph-7002] + _ = x[GenericError-7003] + _ = x[GenericHeading-7004] + _ = x[GenericInserted-7005] + _ = x[GenericOutput-7006] + _ = x[GenericPrompt-7007] + _ = x[GenericStrong-7008] + _ = x[GenericSubheading-7009] + _ = x[GenericTraceback-7010] + _ = x[GenericUnderline-7011] + _ = x[Text-8000] + _ = x[TextWhitespace-8001] + _ = x[TextSymbol-8002] + _ = x[TextPunctuation-8003] +} + +const _TokenType_name = "NoneOtherErrorCodeLineLineTableTDLineTableLineHighlightLineNumbersTableLineNumbersLinePreWrapperBackgroundEOFTypeKeywordKeywordConstantKeywordDeclarationKeywordNamespaceKeywordPseudoKeywordReservedKeywordTypeNameNameAttributeNameBuiltinNameBuiltinPseudoNameClassNameConstantNameDecoratorNameEntityNameExceptionNameFunctionNameFunctionMagicNameKeywordNameLabelNameNamespaceNameOperatorNameOtherNamePseudoNamePropertyNameTagNameVariableNameVariableAnonymousNameVariableClassNameVariableGlobalNameVariableInstanceNameVariableMagicLiteralLiteralDateLiteralOtherLiteralStringLiteralStringAffixLiteralStringAtomLiteralStringBacktickLiteralStringBooleanLiteralStringCharLiteralStringDelimiterLiteralStringDocLiteralStringDoubleLiteralStringEscapeLiteralStringHeredocLiteralStringInterpolLiteralStringNameLiteralStringOtherLiteralStringRegexLiteralStringSingleLiteralStringSymbolLiteralNumberLiteralNumberBinLiteralNumberFloatLiteralNumberHexLiteralNumberIntegerLiteralNumberIntegerLongLiteralNumberOctOperatorOperatorWordPunctuationCommentCommentHashbangCommentMultilineCommentSingleCommentSpecialCommentPreprocCommentPreprocFileGenericGenericDeletedGenericEmphGenericErrorGenericHeadingGenericInsertedGenericOutputGenericPromptGenericStrongGenericSubheadingGenericTracebackGenericUnderlineTextTextWhitespaceTextSymbolTextPunctuation" + +var _TokenType_map = map[TokenType]string{ + -12: _TokenType_name[0:4], + -11: _TokenType_name[4:9], + -10: _TokenType_name[9:14], + -9: _TokenType_name[14:22], + -8: _TokenType_name[22:33], + -7: _TokenType_name[33:42], + -6: _TokenType_name[42:55], + -5: _TokenType_name[55:71], + -4: _TokenType_name[71:82], + -3: _TokenType_name[82:86], + -2: _TokenType_name[86:96], + -1: _TokenType_name[96:106], + 0: _TokenType_name[106:113], + 1000: _TokenType_name[113:120], + 1001: _TokenType_name[120:135], + 1002: _TokenType_name[135:153], + 1003: _TokenType_name[153:169], + 1004: _TokenType_name[169:182], + 1005: _TokenType_name[182:197], + 1006: _TokenType_name[197:208], + 2000: _TokenType_name[208:212], + 2001: _TokenType_name[212:225], + 2002: _TokenType_name[225:236], + 2003: _TokenType_name[236:253], + 2004: _TokenType_name[253:262], + 2005: _TokenType_name[262:274], + 2006: _TokenType_name[274:287], + 2007: _TokenType_name[287:297], + 2008: _TokenType_name[297:310], + 2009: _TokenType_name[310:322], + 2010: _TokenType_name[322:339], + 2011: _TokenType_name[339:350], + 2012: _TokenType_name[350:359], + 2013: _TokenType_name[359:372], + 2014: _TokenType_name[372:384], + 2015: _TokenType_name[384:393], + 2016: _TokenType_name[393:403], + 2017: _TokenType_name[403:415], + 2018: _TokenType_name[415:422], + 2019: _TokenType_name[422:434], + 2020: _TokenType_name[434:455], + 2021: _TokenType_name[455:472], + 2022: _TokenType_name[472:490], + 2023: _TokenType_name[490:510], + 2024: _TokenType_name[510:527], + 3000: _TokenType_name[527:534], + 3001: _TokenType_name[534:545], + 3002: _TokenType_name[545:557], + 3100: _TokenType_name[557:570], + 3101: _TokenType_name[570:588], + 3102: _TokenType_name[588:605], + 3103: _TokenType_name[605:626], + 3104: _TokenType_name[626:646], + 3105: _TokenType_name[646:663], + 3106: _TokenType_name[663:685], + 3107: _TokenType_name[685:701], + 3108: _TokenType_name[701:720], + 3109: _TokenType_name[720:739], + 3110: _TokenType_name[739:759], + 3111: _TokenType_name[759:780], + 3112: _TokenType_name[780:797], + 3113: _TokenType_name[797:815], + 3114: _TokenType_name[815:833], + 3115: _TokenType_name[833:852], + 3116: _TokenType_name[852:871], + 3200: _TokenType_name[871:884], + 3201: _TokenType_name[884:900], + 3202: _TokenType_name[900:918], + 3203: _TokenType_name[918:934], + 3204: _TokenType_name[934:954], + 3205: _TokenType_name[954:978], + 3206: _TokenType_name[978:994], + 4000: _TokenType_name[994:1002], + 4001: _TokenType_name[1002:1014], + 5000: _TokenType_name[1014:1025], + 6000: _TokenType_name[1025:1032], + 6001: _TokenType_name[1032:1047], + 6002: _TokenType_name[1047:1063], + 6003: _TokenType_name[1063:1076], + 6004: _TokenType_name[1076:1090], + 6100: _TokenType_name[1090:1104], + 6101: _TokenType_name[1104:1122], + 7000: _TokenType_name[1122:1129], + 7001: _TokenType_name[1129:1143], + 7002: _TokenType_name[1143:1154], + 7003: _TokenType_name[1154:1166], + 7004: _TokenType_name[1166:1180], + 7005: _TokenType_name[1180:1195], + 7006: _TokenType_name[1195:1208], + 7007: _TokenType_name[1208:1221], + 7008: _TokenType_name[1221:1234], + 7009: _TokenType_name[1234:1251], + 7010: _TokenType_name[1251:1267], + 7011: _TokenType_name[1267:1283], + 8000: _TokenType_name[1283:1287], + 8001: _TokenType_name[1287:1301], + 8002: _TokenType_name[1301:1311], + 8003: _TokenType_name[1311:1326], +} + +func (i TokenType) String() string { + if str, ok := _TokenType_map[i]; ok { + return str + } + return "TokenType(" + strconv.FormatInt(int64(i), 10) + ")" +} diff --git a/vendor/github.com/alecthomas/chroma/types.go b/vendor/github.com/alecthomas/chroma/types.go new file mode 100644 index 000000000..3f15be84b --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/types.go @@ -0,0 +1,356 @@ +package chroma + +import ( + "encoding/json" + "fmt" +) + +//go:generate stringer -type TokenType + +// TokenType is the type of token to highlight. +// +// It is also an Emitter, emitting a single token of itself +type TokenType int + +func (t TokenType) MarshalJSON() ([]byte, error) { return json.Marshal(t.String()) } +func (t *TokenType) UnmarshalJSON(data []byte) error { + key := "" + err := json.Unmarshal(data, &key) + if err != nil { + return err + } + for tt, text := range _TokenType_map { + if text == key { + *t = tt + return nil + } + } + return fmt.Errorf("unknown TokenType %q", data) +} + +// Set of TokenTypes. +// +// Categories of types are grouped in ranges of 1000, while sub-categories are in ranges of 100. For +// example, the literal category is in the range 3000-3999. The sub-category for literal strings is +// in the range 3100-3199. + +// Meta token types. +const ( + // Default background style. + Background TokenType = -1 - iota + // PreWrapper style. + PreWrapper + // Line style. + Line + // Line numbers in output. + LineNumbers + // Line numbers in output when in table. + LineNumbersTable + // Line higlight style. + LineHighlight + // Line numbers table wrapper style. + LineTable + // Line numbers table TD wrapper style. + LineTableTD + // Code line wrapper style. + CodeLine + // Input that could not be tokenised. + Error + // Other is used by the Delegate lexer to indicate which tokens should be handled by the delegate. + Other + // No highlighting. + None + // Used as an EOF marker / nil token + EOFType TokenType = 0 +) + +// Keywords. +const ( + Keyword TokenType = 1000 + iota + KeywordConstant + KeywordDeclaration + KeywordNamespace + KeywordPseudo + KeywordReserved + KeywordType +) + +// Names. +const ( + Name TokenType = 2000 + iota + NameAttribute + NameBuiltin + NameBuiltinPseudo + NameClass + NameConstant + NameDecorator + NameEntity + NameException + NameFunction + NameFunctionMagic + NameKeyword + NameLabel + NameNamespace + NameOperator + NameOther + NamePseudo + NameProperty + NameTag + NameVariable + NameVariableAnonymous + NameVariableClass + NameVariableGlobal + NameVariableInstance + NameVariableMagic +) + +// Literals. +const ( + Literal TokenType = 3000 + iota + LiteralDate + LiteralOther +) + +// Strings. +const ( + LiteralString TokenType = 3100 + iota + LiteralStringAffix + LiteralStringAtom + LiteralStringBacktick + LiteralStringBoolean + LiteralStringChar + LiteralStringDelimiter + LiteralStringDoc + LiteralStringDouble + LiteralStringEscape + LiteralStringHeredoc + LiteralStringInterpol + LiteralStringName + LiteralStringOther + LiteralStringRegex + LiteralStringSingle + LiteralStringSymbol +) + +// Literals. +const ( + LiteralNumber TokenType = 3200 + iota + LiteralNumberBin + LiteralNumberFloat + LiteralNumberHex + LiteralNumberInteger + LiteralNumberIntegerLong + LiteralNumberOct +) + +// Operators. +const ( + Operator TokenType = 4000 + iota + OperatorWord +) + +// Punctuation. +const ( + Punctuation TokenType = 5000 + iota +) + +// Comments. +const ( + Comment TokenType = 6000 + iota + CommentHashbang + CommentMultiline + CommentSingle + CommentSpecial +) + +// Preprocessor "comments". +const ( + CommentPreproc TokenType = 6100 + iota + CommentPreprocFile +) + +// Generic tokens. +const ( + Generic TokenType = 7000 + iota + GenericDeleted + GenericEmph + GenericError + GenericHeading + GenericInserted + GenericOutput + GenericPrompt + GenericStrong + GenericSubheading + GenericTraceback + GenericUnderline +) + +// Text. +const ( + Text TokenType = 8000 + iota + TextWhitespace + TextSymbol + TextPunctuation +) + +// Aliases. +const ( + Whitespace = TextWhitespace + + Date = LiteralDate + + String = LiteralString + StringAffix = LiteralStringAffix + StringBacktick = LiteralStringBacktick + StringChar = LiteralStringChar + StringDelimiter = LiteralStringDelimiter + StringDoc = LiteralStringDoc + StringDouble = LiteralStringDouble + StringEscape = LiteralStringEscape + StringHeredoc = LiteralStringHeredoc + StringInterpol = LiteralStringInterpol + StringOther = LiteralStringOther + StringRegex = LiteralStringRegex + StringSingle = LiteralStringSingle + StringSymbol = LiteralStringSymbol + + Number = LiteralNumber + NumberBin = LiteralNumberBin + NumberFloat = LiteralNumberFloat + NumberHex = LiteralNumberHex + NumberInteger = LiteralNumberInteger + NumberIntegerLong = LiteralNumberIntegerLong + NumberOct = LiteralNumberOct +) + +var ( + StandardTypes = map[TokenType]string{ + Background: "bg", + PreWrapper: "chroma", + Line: "line", + LineNumbers: "ln", + LineNumbersTable: "lnt", + LineHighlight: "hl", + LineTable: "lntable", + LineTableTD: "lntd", + CodeLine: "cl", + Text: "", + Whitespace: "w", + Error: "err", + Other: "x", + // I have no idea what this is used for... + // Escape: "esc", + + Keyword: "k", + KeywordConstant: "kc", + KeywordDeclaration: "kd", + KeywordNamespace: "kn", + KeywordPseudo: "kp", + KeywordReserved: "kr", + KeywordType: "kt", + + Name: "n", + NameAttribute: "na", + NameBuiltin: "nb", + NameBuiltinPseudo: "bp", + NameClass: "nc", + NameConstant: "no", + NameDecorator: "nd", + NameEntity: "ni", + NameException: "ne", + NameFunction: "nf", + NameFunctionMagic: "fm", + NameProperty: "py", + NameLabel: "nl", + NameNamespace: "nn", + NameOther: "nx", + NameTag: "nt", + NameVariable: "nv", + NameVariableClass: "vc", + NameVariableGlobal: "vg", + NameVariableInstance: "vi", + NameVariableMagic: "vm", + + Literal: "l", + LiteralDate: "ld", + + String: "s", + StringAffix: "sa", + StringBacktick: "sb", + StringChar: "sc", + StringDelimiter: "dl", + StringDoc: "sd", + StringDouble: "s2", + StringEscape: "se", + StringHeredoc: "sh", + StringInterpol: "si", + StringOther: "sx", + StringRegex: "sr", + StringSingle: "s1", + StringSymbol: "ss", + + Number: "m", + NumberBin: "mb", + NumberFloat: "mf", + NumberHex: "mh", + NumberInteger: "mi", + NumberIntegerLong: "il", + NumberOct: "mo", + + Operator: "o", + OperatorWord: "ow", + + Punctuation: "p", + + Comment: "c", + CommentHashbang: "ch", + CommentMultiline: "cm", + CommentPreproc: "cp", + CommentPreprocFile: "cpf", + CommentSingle: "c1", + CommentSpecial: "cs", + + Generic: "g", + GenericDeleted: "gd", + GenericEmph: "ge", + GenericError: "gr", + GenericHeading: "gh", + GenericInserted: "gi", + GenericOutput: "go", + GenericPrompt: "gp", + GenericStrong: "gs", + GenericSubheading: "gu", + GenericTraceback: "gt", + GenericUnderline: "gl", + } +) + +func (t TokenType) Parent() TokenType { + if t%100 != 0 { + return t / 100 * 100 + } + if t%1000 != 0 { + return t / 1000 * 1000 + } + return 0 +} + +func (t TokenType) Category() TokenType { + return t / 1000 * 1000 +} + +func (t TokenType) SubCategory() TokenType { + return t / 100 * 100 +} + +func (t TokenType) InCategory(other TokenType) bool { + return t/1000 == other/1000 +} + +func (t TokenType) InSubCategory(other TokenType) bool { + return t/100 == other/100 +} + +func (t TokenType) Emit(groups []string, _ *LexerState) Iterator { + return Literator(Token{Type: t, Value: groups[0]}) +} diff --git a/vendor/github.com/dlclark/regexp2/.gitignore b/vendor/github.com/dlclark/regexp2/.gitignore new file mode 100644 index 000000000..fb844c330 --- /dev/null +++ b/vendor/github.com/dlclark/regexp2/.gitignore @@ -0,0 +1,27 @@ +# Compiled Object files, Static and Dynamic libs (Shared Objects) +*.o +*.a +*.so + +# Folders +_obj +_test + +# Architecture specific extensions/prefixes +*.[568vq] +[568vq].out + +*.cgo1.go +*.cgo2.c +_cgo_defun.c +_cgo_gotypes.go +_cgo_export.* + +_testmain.go + +*.exe +*.test +*.prof +*.out + +.DS_Store diff --git a/vendor/github.com/dlclark/regexp2/.travis.yml b/vendor/github.com/dlclark/regexp2/.travis.yml new file mode 100644 index 000000000..2aa5ea1ee --- /dev/null +++ b/vendor/github.com/dlclark/regexp2/.travis.yml @@ -0,0 +1,5 @@ +language: go + +go: + - 1.9 + - tip \ No newline at end of file diff --git a/vendor/github.com/dlclark/regexp2/ATTRIB b/vendor/github.com/dlclark/regexp2/ATTRIB new file mode 100644 index 000000000..cdf4560b9 --- /dev/null +++ b/vendor/github.com/dlclark/regexp2/ATTRIB @@ -0,0 +1,133 @@ +============ +These pieces of code were ported from dotnet/corefx: + +syntax/charclass.go (from RegexCharClass.cs): ported to use the built-in Go unicode classes. Canonicalize is + a direct port, but most of the other code required large changes because the C# implementation + used a string to represent the CharSet data structure and I cleaned that up in my implementation. + +syntax/code.go (from RegexCode.cs): ported literally with various cleanups and layout to make it more Go-ish. + +syntax/escape.go (from RegexParser.cs): ported Escape method and added some optimizations. Unescape is inspired by + the C# implementation but couldn't be directly ported because of the lack of do-while syntax in Go. + +syntax/parser.go (from RegexpParser.cs and RegexOptions.cs): ported parser struct and associated methods as + literally as possible. Several language differences required changes. E.g. lack pre/post-fix increments as + expressions, lack of do-while loops, lack of overloads, etc. + +syntax/prefix.go (from RegexFCD.cs and RegexBoyerMoore.cs): ported as literally as possible and added support + for unicode chars that are longer than the 16-bit char in C# for the 32-bit rune in Go. + +syntax/replacerdata.go (from RegexReplacement.cs): conceptually ported and re-organized to handle differences + in charclass implementation, and fix odd code layout between RegexParser.cs, Regex.cs, and RegexReplacement.cs. + +syntax/tree.go (from RegexTree.cs and RegexNode.cs): ported literally as possible. + +syntax/writer.go (from RegexWriter.cs): ported literally with minor changes to make it more Go-ish. + +match.go (from RegexMatch.cs): ported, simplified, and changed to handle Go's lack of inheritence. + +regexp.go (from Regex.cs and RegexOptions.cs): conceptually serves the same "starting point", but is simplified + and changed to handle differences in C# strings and Go strings/runes. + +replace.go (from RegexReplacement.cs): ported closely and then cleaned up to combine the MatchEvaluator and + simple string replace implementations. + +runner.go (from RegexRunner.cs): ported literally as possible. + +regexp_test.go (from CaptureTests.cs and GroupNamesAndNumbers.cs): conceptually ported, but the code was + manually structured like Go tests. + +replace_test.go (from RegexReplaceStringTest0.cs): conceptually ported + +rtl_test.go (from RightToLeft.cs): conceptually ported +--- +dotnet/corefx was released under this license: + +The MIT License (MIT) + +Copyright (c) Microsoft Corporation + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + +============ +These pieces of code are copied from the Go framework: + +- The overall directory structure of regexp2 was inspired by the Go runtime regexp package. +- The optimization in the escape method of syntax/escape.go is from the Go runtime QuoteMeta() func in regexp/regexp.go +- The method signatures in regexp.go are designed to match the Go framework regexp methods closely +- func regexp2.MustCompile and func quote are almost identifical to the regexp package versions +- BenchmarkMatch* and TestProgramTooLong* funcs in regexp_performance_test.go were copied from the framework + regexp/exec_test.go +--- +The Go framework was released under this license: + +Copyright (c) 2012 The Go Authors. All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + * Redistributions of source code must retain the above copyright +notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above +copyright notice, this list of conditions and the following disclaimer +in the documentation and/or other materials provided with the +distribution. + * Neither the name of Google Inc. nor the names of its +contributors may be used to endorse or promote products derived from +this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +============ +Some test data were gathered from the Mono project. + +regexp_mono_test.go: ported from https://github.com/mono/mono/blob/master/mcs/class/System/Test/System.Text.RegularExpressions/PerlTrials.cs +--- +Mono tests released under this license: + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +"Software"), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + diff --git a/vendor/github.com/dlclark/regexp2/LICENSE b/vendor/github.com/dlclark/regexp2/LICENSE new file mode 100644 index 000000000..fe83dfdc9 --- /dev/null +++ b/vendor/github.com/dlclark/regexp2/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) Doug Clark + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/vendor/github.com/dlclark/regexp2/README.md b/vendor/github.com/dlclark/regexp2/README.md new file mode 100644 index 000000000..4e4abb4c6 --- /dev/null +++ b/vendor/github.com/dlclark/regexp2/README.md @@ -0,0 +1,83 @@ +# regexp2 - full featured regular expressions for Go +Regexp2 is a feature-rich RegExp engine for Go. It doesn't have constant time guarantees like the built-in `regexp` package, but it allows backtracking and is compatible with Perl5 and .NET. You'll likely be better off with the RE2 engine from the `regexp` package and should only use this if you need to write very complex patterns or require compatibility with .NET. + +## Basis of the engine +The engine is ported from the .NET framework's System.Text.RegularExpressions.Regex engine. That engine was open sourced in 2015 under the MIT license. There are some fundamental differences between .NET strings and Go strings that required a bit of borrowing from the Go framework regex engine as well. I cleaned up a couple of the dirtier bits during the port (regexcharclass.cs was terrible), but the parse tree, code emmitted, and therefore patterns matched should be identical. + +## Installing +This is a go-gettable library, so install is easy: + + go get github.com/dlclark/regexp2/... + +## Usage +Usage is similar to the Go `regexp` package. Just like in `regexp`, you start by converting a regex into a state machine via the `Compile` or `MustCompile` methods. They ultimately do the same thing, but `MustCompile` will panic if the regex is invalid. You can then use the provided `Regexp` struct to find matches repeatedly. A `Regexp` struct is safe to use across goroutines. + +```go +re := regexp2.MustCompile(`Your pattern`, 0) +if isMatch, _ := re.MatchString(`Something to match`); isMatch { + //do something +} +``` + +The only error that the `*Match*` methods *should* return is a Timeout if you set the `re.MatchTimeout` field. Any other error is a bug in the `regexp2` package. If you need more details about capture groups in a match then use the `FindStringMatch` method, like so: + +```go +if m, _ := re.FindStringMatch(`Something to match`); m != nil { + // the whole match is always group 0 + fmt.Printf("Group 0: %v\n", m.String()) + + // you can get all the groups too + gps := m.Groups() + + // a group can be captured multiple times, so each cap is separately addressable + fmt.Printf("Group 1, first capture", gps[1].Captures[0].String()) + fmt.Printf("Group 1, second capture", gps[1].Captures[1].String()) +} +``` + +Group 0 is embedded in the Match. Group 0 is an automatically-assigned group that encompasses the whole pattern. This means that `m.String()` is the same as `m.Group.String()` and `m.Groups()[0].String()` + +The __last__ capture is embedded in each group, so `g.String()` will return the same thing as `g.Capture.String()` and `g.Captures[len(g.Captures)-1].String()`. + +## Compare `regexp` and `regexp2` +| Category | regexp | regexp2 | +| --- | --- | --- | +| Catastrophic backtracking possible | no, constant execution time guarantees | yes, if your pattern is at risk you can use the `re.MatchTimeout` field | +| Python-style capture groups `(?Pre)` | yes | no (yes in RE2 compat mode) | +| .NET-style capture groups `(?re)` or `(?'name're)` | no | yes | +| comments `(?#comment)` | no | yes | +| branch numbering reset `(?\|a\|b)` | no | no | +| possessive match `(?>re)` | no | yes | +| positive lookahead `(?=re)` | no | yes | +| negative lookahead `(?!re)` | no | yes | +| positive lookbehind `(?<=re)` | no | yes | +| negative lookbehind `(?re)`) +* change singleline behavior for `$` to only match end of string (like RE2) (see [#24](https://github.com/dlclark/regexp2/issues/24)) + +```go +re := regexp2.MustCompile(`Your RE2-compatible pattern`, regexp2.RE2) +if isMatch, _ := re.MatchString(`Something to match`); isMatch { + //do something +} +``` + +This feature is a work in progress and I'm open to ideas for more things to put here (maybe more relaxed character escaping rules?). + + +## Library features that I'm still working on +- Regex split + +## Potential bugs +I've run a battery of tests against regexp2 from various sources and found the debug output matches the .NET engine, but .NET and Go handle strings very differently. I've attempted to handle these differences, but most of my testing deals with basic ASCII with a little bit of multi-byte Unicode. There's a chance that there are bugs in the string handling related to character sets with supplementary Unicode chars. Right-to-Left support is coded, but not well tested either. + +## Find a bug? +I'm open to new issues and pull requests with tests if you find something odd! diff --git a/vendor/github.com/dlclark/regexp2/match.go b/vendor/github.com/dlclark/regexp2/match.go new file mode 100644 index 000000000..1871cffe3 --- /dev/null +++ b/vendor/github.com/dlclark/regexp2/match.go @@ -0,0 +1,347 @@ +package regexp2 + +import ( + "bytes" + "fmt" +) + +// Match is a single regex result match that contains groups and repeated captures +// -Groups +// -Capture +type Match struct { + Group //embeded group 0 + + regex *Regexp + otherGroups []Group + + // input to the match + textpos int + textstart int + + capcount int + caps []int + sparseCaps map[int]int + + // output from the match + matches [][]int + matchcount []int + + // whether we've done any balancing with this match. If we + // have done balancing, we'll need to do extra work in Tidy(). + balancing bool +} + +// Group is an explicit or implit (group 0) matched group within the pattern +type Group struct { + Capture // the last capture of this group is embeded for ease of use + + Name string // group name + Captures []Capture // captures of this group +} + +// Capture is a single capture of text within the larger original string +type Capture struct { + // the original string + text []rune + // the position in the original string where the first character of + // captured substring was found. + Index int + // the length of the captured substring. + Length int +} + +// String returns the captured text as a String +func (c *Capture) String() string { + return string(c.text[c.Index : c.Index+c.Length]) +} + +// Runes returns the captured text as a rune slice +func (c *Capture) Runes() []rune { + return c.text[c.Index : c.Index+c.Length] +} + +func newMatch(regex *Regexp, capcount int, text []rune, startpos int) *Match { + m := Match{ + regex: regex, + matchcount: make([]int, capcount), + matches: make([][]int, capcount), + textstart: startpos, + balancing: false, + } + m.Name = "0" + m.text = text + m.matches[0] = make([]int, 2) + return &m +} + +func newMatchSparse(regex *Regexp, caps map[int]int, capcount int, text []rune, startpos int) *Match { + m := newMatch(regex, capcount, text, startpos) + m.sparseCaps = caps + return m +} + +func (m *Match) reset(text []rune, textstart int) { + m.text = text + m.textstart = textstart + for i := 0; i < len(m.matchcount); i++ { + m.matchcount[i] = 0 + } + m.balancing = false +} + +func (m *Match) tidy(textpos int) { + + interval := m.matches[0] + m.Index = interval[0] + m.Length = interval[1] + m.textpos = textpos + m.capcount = m.matchcount[0] + //copy our root capture to the list + m.Group.Captures = []Capture{m.Group.Capture} + + if m.balancing { + // The idea here is that we want to compact all of our unbalanced captures. To do that we + // use j basically as a count of how many unbalanced captures we have at any given time + // (really j is an index, but j/2 is the count). First we skip past all of the real captures + // until we find a balance captures. Then we check each subsequent entry. If it's a balance + // capture (it's negative), we decrement j. If it's a real capture, we increment j and copy + // it down to the last free position. + for cap := 0; cap < len(m.matchcount); cap++ { + limit := m.matchcount[cap] * 2 + matcharray := m.matches[cap] + + var i, j int + + for i = 0; i < limit; i++ { + if matcharray[i] < 0 { + break + } + } + + for j = i; i < limit; i++ { + if matcharray[i] < 0 { + // skip negative values + j-- + } else { + // but if we find something positive (an actual capture), copy it back to the last + // unbalanced position. + if i != j { + matcharray[j] = matcharray[i] + } + j++ + } + } + + m.matchcount[cap] = j / 2 + } + + m.balancing = false + } +} + +// isMatched tells if a group was matched by capnum +func (m *Match) isMatched(cap int) bool { + return cap < len(m.matchcount) && m.matchcount[cap] > 0 && m.matches[cap][m.matchcount[cap]*2-1] != (-3+1) +} + +// matchIndex returns the index of the last specified matched group by capnum +func (m *Match) matchIndex(cap int) int { + i := m.matches[cap][m.matchcount[cap]*2-2] + if i >= 0 { + return i + } + + return m.matches[cap][-3-i] +} + +// matchLength returns the length of the last specified matched group by capnum +func (m *Match) matchLength(cap int) int { + i := m.matches[cap][m.matchcount[cap]*2-1] + if i >= 0 { + return i + } + + return m.matches[cap][-3-i] +} + +// Nonpublic builder: add a capture to the group specified by "c" +func (m *Match) addMatch(c, start, l int) { + + if m.matches[c] == nil { + m.matches[c] = make([]int, 2) + } + + capcount := m.matchcount[c] + + if capcount*2+2 > len(m.matches[c]) { + oldmatches := m.matches[c] + newmatches := make([]int, capcount*8) + copy(newmatches, oldmatches[:capcount*2]) + m.matches[c] = newmatches + } + + m.matches[c][capcount*2] = start + m.matches[c][capcount*2+1] = l + m.matchcount[c] = capcount + 1 + //log.Printf("addMatch: c=%v, i=%v, l=%v ... matches: %v", c, start, l, m.matches) +} + +// Nonpublic builder: Add a capture to balance the specified group. This is used by the +// balanced match construct. (?...) +// +// If there were no such thing as backtracking, this would be as simple as calling RemoveMatch(c). +// However, since we have backtracking, we need to keep track of everything. +func (m *Match) balanceMatch(c int) { + m.balancing = true + + // we'll look at the last capture first + capcount := m.matchcount[c] + target := capcount*2 - 2 + + // first see if it is negative, and therefore is a reference to the next available + // capture group for balancing. If it is, we'll reset target to point to that capture. + if m.matches[c][target] < 0 { + target = -3 - m.matches[c][target] + } + + // move back to the previous capture + target -= 2 + + // if the previous capture is a reference, just copy that reference to the end. Otherwise, point to it. + if target >= 0 && m.matches[c][target] < 0 { + m.addMatch(c, m.matches[c][target], m.matches[c][target+1]) + } else { + m.addMatch(c, -3-target, -4-target /* == -3 - (target + 1) */) + } +} + +// Nonpublic builder: removes a group match by capnum +func (m *Match) removeMatch(c int) { + m.matchcount[c]-- +} + +// GroupCount returns the number of groups this match has matched +func (m *Match) GroupCount() int { + return len(m.matchcount) +} + +// GroupByName returns a group based on the name of the group, or nil if the group name does not exist +func (m *Match) GroupByName(name string) *Group { + num := m.regex.GroupNumberFromName(name) + if num < 0 { + return nil + } + return m.GroupByNumber(num) +} + +// GroupByNumber returns a group based on the number of the group, or nil if the group number does not exist +func (m *Match) GroupByNumber(num int) *Group { + // check our sparse map + if m.sparseCaps != nil { + if newNum, ok := m.sparseCaps[num]; ok { + num = newNum + } + } + if num >= len(m.matchcount) || num < 0 { + return nil + } + + if num == 0 { + return &m.Group + } + + m.populateOtherGroups() + + return &m.otherGroups[num-1] +} + +// Groups returns all the capture groups, starting with group 0 (the full match) +func (m *Match) Groups() []Group { + m.populateOtherGroups() + g := make([]Group, len(m.otherGroups)+1) + g[0] = m.Group + copy(g[1:], m.otherGroups) + return g +} + +func (m *Match) populateOtherGroups() { + // Construct all the Group objects first time called + if m.otherGroups == nil { + m.otherGroups = make([]Group, len(m.matchcount)-1) + for i := 0; i < len(m.otherGroups); i++ { + m.otherGroups[i] = newGroup(m.regex.GroupNameFromNumber(i+1), m.text, m.matches[i+1], m.matchcount[i+1]) + } + } +} + +func (m *Match) groupValueAppendToBuf(groupnum int, buf *bytes.Buffer) { + c := m.matchcount[groupnum] + if c == 0 { + return + } + + matches := m.matches[groupnum] + + index := matches[(c-1)*2] + last := index + matches[(c*2)-1] + + for ; index < last; index++ { + buf.WriteRune(m.text[index]) + } +} + +func newGroup(name string, text []rune, caps []int, capcount int) Group { + g := Group{} + g.text = text + if capcount > 0 { + g.Index = caps[(capcount-1)*2] + g.Length = caps[(capcount*2)-1] + } + g.Name = name + g.Captures = make([]Capture, capcount) + for i := 0; i < capcount; i++ { + g.Captures[i] = Capture{ + text: text, + Index: caps[i*2], + Length: caps[i*2+1], + } + } + //log.Printf("newGroup! capcount %v, %+v", capcount, g) + + return g +} + +func (m *Match) dump() string { + buf := &bytes.Buffer{} + buf.WriteRune('\n') + if len(m.sparseCaps) > 0 { + for k, v := range m.sparseCaps { + fmt.Fprintf(buf, "Slot %v -> %v\n", k, v) + } + } + + for i, g := range m.Groups() { + fmt.Fprintf(buf, "Group %v (%v), %v caps:\n", i, g.Name, len(g.Captures)) + + for _, c := range g.Captures { + fmt.Fprintf(buf, " (%v, %v) %v\n", c.Index, c.Length, c.String()) + } + } + /* + for i := 0; i < len(m.matchcount); i++ { + fmt.Fprintf(buf, "\nGroup %v (%v):\n", i, m.regex.GroupNameFromNumber(i)) + + for j := 0; j < m.matchcount[i]; j++ { + text := "" + + if m.matches[i][j*2] >= 0 { + start := m.matches[i][j*2] + text = m.text[start : start+m.matches[i][j*2+1]] + } + + fmt.Fprintf(buf, " (%v, %v) %v\n", m.matches[i][j*2], m.matches[i][j*2+1], text) + } + } + */ + return buf.String() +} diff --git a/vendor/github.com/dlclark/regexp2/regexp.go b/vendor/github.com/dlclark/regexp2/regexp.go new file mode 100644 index 000000000..7c7b01d87 --- /dev/null +++ b/vendor/github.com/dlclark/regexp2/regexp.go @@ -0,0 +1,355 @@ +/* +Package regexp2 is a regexp package that has an interface similar to Go's framework regexp engine but uses a +more feature full regex engine behind the scenes. + +It doesn't have constant time guarantees, but it allows backtracking and is compatible with Perl5 and .NET. +You'll likely be better off with the RE2 engine from the regexp package and should only use this if you +need to write very complex patterns or require compatibility with .NET. +*/ +package regexp2 + +import ( + "errors" + "math" + "strconv" + "sync" + "time" + + "github.com/dlclark/regexp2/syntax" +) + +// Default timeout used when running regexp matches -- "forever" +var DefaultMatchTimeout = time.Duration(math.MaxInt64) + +// Regexp is the representation of a compiled regular expression. +// A Regexp is safe for concurrent use by multiple goroutines. +type Regexp struct { + //timeout when trying to find matches + MatchTimeout time.Duration + + // read-only after Compile + pattern string // as passed to Compile + options RegexOptions // options + + caps map[int]int // capnum->index + capnames map[string]int //capture group name -> index + capslist []string //sorted list of capture group names + capsize int // size of the capture array + + code *syntax.Code // compiled program + + // cache of machines for running regexp + muRun sync.Mutex + runner []*runner +} + +// Compile parses a regular expression and returns, if successful, +// a Regexp object that can be used to match against text. +func Compile(expr string, opt RegexOptions) (*Regexp, error) { + // parse it + tree, err := syntax.Parse(expr, syntax.RegexOptions(opt)) + if err != nil { + return nil, err + } + + // translate it to code + code, err := syntax.Write(tree) + if err != nil { + return nil, err + } + + // return it + return &Regexp{ + pattern: expr, + options: opt, + caps: code.Caps, + capnames: tree.Capnames, + capslist: tree.Caplist, + capsize: code.Capsize, + code: code, + MatchTimeout: DefaultMatchTimeout, + }, nil +} + +// MustCompile is like Compile but panics if the expression cannot be parsed. +// It simplifies safe initialization of global variables holding compiled regular +// expressions. +func MustCompile(str string, opt RegexOptions) *Regexp { + regexp, error := Compile(str, opt) + if error != nil { + panic(`regexp2: Compile(` + quote(str) + `): ` + error.Error()) + } + return regexp +} + +// Escape adds backslashes to any special characters in the input string +func Escape(input string) string { + return syntax.Escape(input) +} + +// Unescape removes any backslashes from previously-escaped special characters in the input string +func Unescape(input string) (string, error) { + return syntax.Unescape(input) +} + +// String returns the source text used to compile the regular expression. +func (re *Regexp) String() string { + return re.pattern +} + +func quote(s string) string { + if strconv.CanBackquote(s) { + return "`" + s + "`" + } + return strconv.Quote(s) +} + +// RegexOptions impact the runtime and parsing behavior +// for each specific regex. They are setable in code as well +// as in the regex pattern itself. +type RegexOptions int32 + +const ( + None RegexOptions = 0x0 + IgnoreCase = 0x0001 // "i" + Multiline = 0x0002 // "m" + ExplicitCapture = 0x0004 // "n" + Compiled = 0x0008 // "c" + Singleline = 0x0010 // "s" + IgnorePatternWhitespace = 0x0020 // "x" + RightToLeft = 0x0040 // "r" + Debug = 0x0080 // "d" + ECMAScript = 0x0100 // "e" + RE2 = 0x0200 // RE2 (regexp package) compatibility mode +) + +func (re *Regexp) RightToLeft() bool { + return re.options&RightToLeft != 0 +} + +func (re *Regexp) Debug() bool { + return re.options&Debug != 0 +} + +// Replace searches the input string and replaces each match found with the replacement text. +// Count will limit the number of matches attempted and startAt will allow +// us to skip past possible matches at the start of the input (left or right depending on RightToLeft option). +// Set startAt and count to -1 to go through the whole string +func (re *Regexp) Replace(input, replacement string, startAt, count int) (string, error) { + data, err := syntax.NewReplacerData(replacement, re.caps, re.capsize, re.capnames, syntax.RegexOptions(re.options)) + if err != nil { + return "", err + } + //TODO: cache ReplacerData + + return replace(re, data, nil, input, startAt, count) +} + +// ReplaceFunc searches the input string and replaces each match found using the string from the evaluator +// Count will limit the number of matches attempted and startAt will allow +// us to skip past possible matches at the start of the input (left or right depending on RightToLeft option). +// Set startAt and count to -1 to go through the whole string. +func (re *Regexp) ReplaceFunc(input string, evaluator MatchEvaluator, startAt, count int) (string, error) { + return replace(re, nil, evaluator, input, startAt, count) +} + +// FindStringMatch searches the input string for a Regexp match +func (re *Regexp) FindStringMatch(s string) (*Match, error) { + // convert string to runes + return re.run(false, -1, getRunes(s)) +} + +// FindRunesMatch searches the input rune slice for a Regexp match +func (re *Regexp) FindRunesMatch(r []rune) (*Match, error) { + return re.run(false, -1, r) +} + +// FindStringMatchStartingAt searches the input string for a Regexp match starting at the startAt index +func (re *Regexp) FindStringMatchStartingAt(s string, startAt int) (*Match, error) { + if startAt > len(s) { + return nil, errors.New("startAt must be less than the length of the input string") + } + r, startAt := re.getRunesAndStart(s, startAt) + if startAt == -1 { + // we didn't find our start index in the string -- that's a problem + return nil, errors.New("startAt must align to the start of a valid rune in the input string") + } + + return re.run(false, startAt, r) +} + +// FindRunesMatchStartingAt searches the input rune slice for a Regexp match starting at the startAt index +func (re *Regexp) FindRunesMatchStartingAt(r []rune, startAt int) (*Match, error) { + return re.run(false, startAt, r) +} + +// FindNextMatch returns the next match in the same input string as the match parameter. +// Will return nil if there is no next match or if given a nil match. +func (re *Regexp) FindNextMatch(m *Match) (*Match, error) { + if m == nil { + return nil, nil + } + + // If previous match was empty, advance by one before matching to prevent + // infinite loop + startAt := m.textpos + if m.Length == 0 { + if m.textpos == len(m.text) { + return nil, nil + } + + if re.RightToLeft() { + startAt-- + } else { + startAt++ + } + } + return re.run(false, startAt, m.text) +} + +// MatchString return true if the string matches the regex +// error will be set if a timeout occurs +func (re *Regexp) MatchString(s string) (bool, error) { + m, err := re.run(true, -1, getRunes(s)) + if err != nil { + return false, err + } + return m != nil, nil +} + +func (re *Regexp) getRunesAndStart(s string, startAt int) ([]rune, int) { + if startAt < 0 { + if re.RightToLeft() { + r := getRunes(s) + return r, len(r) + } + return getRunes(s), 0 + } + ret := make([]rune, len(s)) + i := 0 + runeIdx := -1 + for strIdx, r := range s { + if strIdx == startAt { + runeIdx = i + } + ret[i] = r + i++ + } + if startAt == len(s) { + runeIdx = i + } + return ret[:i], runeIdx +} + +func getRunes(s string) []rune { + return []rune(s) +} + +// MatchRunes return true if the runes matches the regex +// error will be set if a timeout occurs +func (re *Regexp) MatchRunes(r []rune) (bool, error) { + m, err := re.run(true, -1, r) + if err != nil { + return false, err + } + return m != nil, nil +} + +// GetGroupNames Returns the set of strings used to name capturing groups in the expression. +func (re *Regexp) GetGroupNames() []string { + var result []string + + if re.capslist == nil { + result = make([]string, re.capsize) + + for i := 0; i < len(result); i++ { + result[i] = strconv.Itoa(i) + } + } else { + result = make([]string, len(re.capslist)) + copy(result, re.capslist) + } + + return result +} + +// GetGroupNumbers returns the integer group numbers corresponding to a group name. +func (re *Regexp) GetGroupNumbers() []int { + var result []int + + if re.caps == nil { + result = make([]int, re.capsize) + + for i := 0; i < len(result); i++ { + result[i] = i + } + } else { + result = make([]int, len(re.caps)) + + for k, v := range re.caps { + result[v] = k + } + } + + return result +} + +// GroupNameFromNumber retrieves a group name that corresponds to a group number. +// It will return "" for and unknown group number. Unnamed groups automatically +// receive a name that is the decimal string equivalent of its number. +func (re *Regexp) GroupNameFromNumber(i int) string { + if re.capslist == nil { + if i >= 0 && i < re.capsize { + return strconv.Itoa(i) + } + + return "" + } + + if re.caps != nil { + var ok bool + if i, ok = re.caps[i]; !ok { + return "" + } + } + + if i >= 0 && i < len(re.capslist) { + return re.capslist[i] + } + + return "" +} + +// GroupNumberFromName returns a group number that corresponds to a group name. +// Returns -1 if the name is not a recognized group name. Numbered groups +// automatically get a group name that is the decimal string equivalent of its number. +func (re *Regexp) GroupNumberFromName(name string) int { + // look up name if we have a hashtable of names + if re.capnames != nil { + if k, ok := re.capnames[name]; ok { + return k + } + + return -1 + } + + // convert to an int if it looks like a number + result := 0 + for i := 0; i < len(name); i++ { + ch := name[i] + + if ch > '9' || ch < '0' { + return -1 + } + + result *= 10 + result += int(ch - '0') + } + + // return int if it's in range + if result >= 0 && result < re.capsize { + return result + } + + return -1 +} diff --git a/vendor/github.com/dlclark/regexp2/replace.go b/vendor/github.com/dlclark/regexp2/replace.go new file mode 100644 index 000000000..0376bd9d3 --- /dev/null +++ b/vendor/github.com/dlclark/regexp2/replace.go @@ -0,0 +1,177 @@ +package regexp2 + +import ( + "bytes" + "errors" + + "github.com/dlclark/regexp2/syntax" +) + +const ( + replaceSpecials = 4 + replaceLeftPortion = -1 + replaceRightPortion = -2 + replaceLastGroup = -3 + replaceWholeString = -4 +) + +// MatchEvaluator is a function that takes a match and returns a replacement string to be used +type MatchEvaluator func(Match) string + +// Three very similar algorithms appear below: replace (pattern), +// replace (evaluator), and split. + +// Replace Replaces all occurrences of the regex in the string with the +// replacement pattern. +// +// Note that the special case of no matches is handled on its own: +// with no matches, the input string is returned unchanged. +// The right-to-left case is split out because StringBuilder +// doesn't handle right-to-left string building directly very well. +func replace(regex *Regexp, data *syntax.ReplacerData, evaluator MatchEvaluator, input string, startAt, count int) (string, error) { + if count < -1 { + return "", errors.New("Count too small") + } + if count == 0 { + return "", nil + } + + m, err := regex.FindStringMatchStartingAt(input, startAt) + + if err != nil { + return "", err + } + if m == nil { + return input, nil + } + + buf := &bytes.Buffer{} + text := m.text + + if !regex.RightToLeft() { + prevat := 0 + for m != nil { + if m.Index != prevat { + buf.WriteString(string(text[prevat:m.Index])) + } + prevat = m.Index + m.Length + if evaluator == nil { + replacementImpl(data, buf, m) + } else { + buf.WriteString(evaluator(*m)) + } + + count-- + if count == 0 { + break + } + m, err = regex.FindNextMatch(m) + if err != nil { + return "", nil + } + } + + if prevat < len(text) { + buf.WriteString(string(text[prevat:])) + } + } else { + prevat := len(text) + var al []string + + for m != nil { + if m.Index+m.Length != prevat { + al = append(al, string(text[m.Index+m.Length:prevat])) + } + prevat = m.Index + if evaluator == nil { + replacementImplRTL(data, &al, m) + } else { + al = append(al, evaluator(*m)) + } + + count-- + if count == 0 { + break + } + m, err = regex.FindNextMatch(m) + if err != nil { + return "", nil + } + } + + if prevat > 0 { + buf.WriteString(string(text[:prevat])) + } + + for i := len(al) - 1; i >= 0; i-- { + buf.WriteString(al[i]) + } + } + + return buf.String(), nil +} + +// Given a Match, emits into the StringBuilder the evaluated +// substitution pattern. +func replacementImpl(data *syntax.ReplacerData, buf *bytes.Buffer, m *Match) { + for _, r := range data.Rules { + + if r >= 0 { // string lookup + buf.WriteString(data.Strings[r]) + } else if r < -replaceSpecials { // group lookup + m.groupValueAppendToBuf(-replaceSpecials-1-r, buf) + } else { + switch -replaceSpecials - 1 - r { // special insertion patterns + case replaceLeftPortion: + for i := 0; i < m.Index; i++ { + buf.WriteRune(m.text[i]) + } + case replaceRightPortion: + for i := m.Index + m.Length; i < len(m.text); i++ { + buf.WriteRune(m.text[i]) + } + case replaceLastGroup: + m.groupValueAppendToBuf(m.GroupCount()-1, buf) + case replaceWholeString: + for i := 0; i < len(m.text); i++ { + buf.WriteRune(m.text[i]) + } + } + } + } +} + +func replacementImplRTL(data *syntax.ReplacerData, al *[]string, m *Match) { + l := *al + buf := &bytes.Buffer{} + + for _, r := range data.Rules { + buf.Reset() + if r >= 0 { // string lookup + l = append(l, data.Strings[r]) + } else if r < -replaceSpecials { // group lookup + m.groupValueAppendToBuf(-replaceSpecials-1-r, buf) + l = append(l, buf.String()) + } else { + switch -replaceSpecials - 1 - r { // special insertion patterns + case replaceLeftPortion: + for i := 0; i < m.Index; i++ { + buf.WriteRune(m.text[i]) + } + case replaceRightPortion: + for i := m.Index + m.Length; i < len(m.text); i++ { + buf.WriteRune(m.text[i]) + } + case replaceLastGroup: + m.groupValueAppendToBuf(m.GroupCount()-1, buf) + case replaceWholeString: + for i := 0; i < len(m.text); i++ { + buf.WriteRune(m.text[i]) + } + } + l = append(l, buf.String()) + } + } + + *al = l +} diff --git a/vendor/github.com/dlclark/regexp2/runner.go b/vendor/github.com/dlclark/regexp2/runner.go new file mode 100644 index 000000000..4d7f9b061 --- /dev/null +++ b/vendor/github.com/dlclark/regexp2/runner.go @@ -0,0 +1,1634 @@ +package regexp2 + +import ( + "bytes" + "errors" + "fmt" + "math" + "strconv" + "strings" + "time" + "unicode" + + "github.com/dlclark/regexp2/syntax" +) + +type runner struct { + re *Regexp + code *syntax.Code + + runtextstart int // starting point for search + + runtext []rune // text to search + runtextpos int // current position in text + runtextend int + + // The backtracking stack. Opcodes use this to store data regarding + // what they have matched and where to backtrack to. Each "frame" on + // the stack takes the form of [CodePosition Data1 Data2...], where + // CodePosition is the position of the current opcode and + // the data values are all optional. The CodePosition can be negative, and + // these values (also called "back2") are used by the BranchMark family of opcodes + // to indicate whether they are backtracking after a successful or failed + // match. + // When we backtrack, we pop the CodePosition off the stack, set the current + // instruction pointer to that code position, and mark the opcode + // with a backtracking flag ("Back"). Each opcode then knows how to + // handle its own data. + runtrack []int + runtrackpos int + + // This stack is used to track text positions across different opcodes. + // For example, in /(a*b)+/, the parentheses result in a SetMark/CaptureMark + // pair. SetMark records the text position before we match a*b. Then + // CaptureMark uses that position to figure out where the capture starts. + // Opcodes which push onto this stack are always paired with other opcodes + // which will pop the value from it later. A successful match should mean + // that this stack is empty. + runstack []int + runstackpos int + + // The crawl stack is used to keep track of captures. Every time a group + // has a capture, we push its group number onto the runcrawl stack. In + // the case of a balanced match, we push BOTH groups onto the stack. + runcrawl []int + runcrawlpos int + + runtrackcount int // count of states that may do backtracking + + runmatch *Match // result object + + ignoreTimeout bool + timeout time.Duration // timeout in milliseconds (needed for actual) + timeoutChecksToSkip int + timeoutAt time.Time + + operator syntax.InstOp + codepos int + rightToLeft bool + caseInsensitive bool +} + +// run searches for matches and can continue from the previous match +// +// quick is usually false, but can be true to not return matches, just put it in caches +// textstart is -1 to start at the "beginning" (depending on Right-To-Left), otherwise an index in input +// input is the string to search for our regex pattern +func (re *Regexp) run(quick bool, textstart int, input []rune) (*Match, error) { + + // get a cached runner + runner := re.getRunner() + defer re.putRunner(runner) + + if textstart < 0 { + if re.RightToLeft() { + textstart = len(input) + } else { + textstart = 0 + } + } + + return runner.scan(input, textstart, quick, re.MatchTimeout) +} + +// Scans the string to find the first match. Uses the Match object +// both to feed text in and as a place to store matches that come out. +// +// All the action is in the Go() method. Our +// responsibility is to load up the class members before +// calling Go. +// +// The optimizer can compute a set of candidate starting characters, +// and we could use a separate method Skip() that will quickly scan past +// any characters that we know can't match. +func (r *runner) scan(rt []rune, textstart int, quick bool, timeout time.Duration) (*Match, error) { + r.timeout = timeout + r.ignoreTimeout = (time.Duration(math.MaxInt64) == timeout) + r.runtextstart = textstart + r.runtext = rt + r.runtextend = len(rt) + + stoppos := r.runtextend + bump := 1 + + if r.re.RightToLeft() { + bump = -1 + stoppos = 0 + } + + r.runtextpos = textstart + initted := false + + r.startTimeoutWatch() + for { + if r.re.Debug() { + //fmt.Printf("\nSearch content: %v\n", string(r.runtext)) + fmt.Printf("\nSearch range: from 0 to %v\n", r.runtextend) + fmt.Printf("Firstchar search starting at %v stopping at %v\n", r.runtextpos, stoppos) + } + + if r.findFirstChar() { + if err := r.checkTimeout(); err != nil { + return nil, err + } + + if !initted { + r.initMatch() + initted = true + } + + if r.re.Debug() { + fmt.Printf("Executing engine starting at %v\n\n", r.runtextpos) + } + + if err := r.execute(); err != nil { + return nil, err + } + + if r.runmatch.matchcount[0] > 0 { + // We'll return a match even if it touches a previous empty match + return r.tidyMatch(quick), nil + } + + // reset state for another go + r.runtrackpos = len(r.runtrack) + r.runstackpos = len(r.runstack) + r.runcrawlpos = len(r.runcrawl) + } + + // failure! + + if r.runtextpos == stoppos { + r.tidyMatch(true) + return nil, nil + } + + // Recognize leading []* and various anchors, and bump on failure accordingly + + // r.bump by one and start again + + r.runtextpos += bump + } + // We never get here +} + +func (r *runner) execute() error { + + r.goTo(0) + + for { + + if r.re.Debug() { + r.dumpState() + } + + if err := r.checkTimeout(); err != nil { + return err + } + + switch r.operator { + case syntax.Stop: + return nil + + case syntax.Nothing: + break + + case syntax.Goto: + r.goTo(r.operand(0)) + continue + + case syntax.Testref: + if !r.runmatch.isMatched(r.operand(0)) { + break + } + r.advance(1) + continue + + case syntax.Lazybranch: + r.trackPush1(r.textPos()) + r.advance(1) + continue + + case syntax.Lazybranch | syntax.Back: + r.trackPop() + r.textto(r.trackPeek()) + r.goTo(r.operand(0)) + continue + + case syntax.Setmark: + r.stackPush(r.textPos()) + r.trackPush() + r.advance(0) + continue + + case syntax.Nullmark: + r.stackPush(-1) + r.trackPush() + r.advance(0) + continue + + case syntax.Setmark | syntax.Back, syntax.Nullmark | syntax.Back: + r.stackPop() + break + + case syntax.Getmark: + r.stackPop() + r.trackPush1(r.stackPeek()) + r.textto(r.stackPeek()) + r.advance(0) + continue + + case syntax.Getmark | syntax.Back: + r.trackPop() + r.stackPush(r.trackPeek()) + break + + case syntax.Capturemark: + if r.operand(1) != -1 && !r.runmatch.isMatched(r.operand(1)) { + break + } + r.stackPop() + if r.operand(1) != -1 { + r.transferCapture(r.operand(0), r.operand(1), r.stackPeek(), r.textPos()) + } else { + r.capture(r.operand(0), r.stackPeek(), r.textPos()) + } + r.trackPush1(r.stackPeek()) + + r.advance(2) + + continue + + case syntax.Capturemark | syntax.Back: + r.trackPop() + r.stackPush(r.trackPeek()) + r.uncapture() + if r.operand(0) != -1 && r.operand(1) != -1 { + r.uncapture() + } + + break + + case syntax.Branchmark: + r.stackPop() + + matched := r.textPos() - r.stackPeek() + + if matched != 0 { // Nonempty match -> loop now + r.trackPush2(r.stackPeek(), r.textPos()) // Save old mark, textpos + r.stackPush(r.textPos()) // Make new mark + r.goTo(r.operand(0)) // Loop + } else { // Empty match -> straight now + r.trackPushNeg1(r.stackPeek()) // Save old mark + r.advance(1) // Straight + } + continue + + case syntax.Branchmark | syntax.Back: + r.trackPopN(2) + r.stackPop() + r.textto(r.trackPeekN(1)) // Recall position + r.trackPushNeg1(r.trackPeek()) // Save old mark + r.advance(1) // Straight + continue + + case syntax.Branchmark | syntax.Back2: + r.trackPop() + r.stackPush(r.trackPeek()) // Recall old mark + break // Backtrack + + case syntax.Lazybranchmark: + { + // We hit this the first time through a lazy loop and after each + // successful match of the inner expression. It simply continues + // on and doesn't loop. + r.stackPop() + + oldMarkPos := r.stackPeek() + + if r.textPos() != oldMarkPos { // Nonempty match -> try to loop again by going to 'back' state + if oldMarkPos != -1 { + r.trackPush2(oldMarkPos, r.textPos()) // Save old mark, textpos + } else { + r.trackPush2(r.textPos(), r.textPos()) + } + } else { + // The inner expression found an empty match, so we'll go directly to 'back2' if we + // backtrack. In this case, we need to push something on the stack, since back2 pops. + // However, in the case of ()+? or similar, this empty match may be legitimate, so push the text + // position associated with that empty match. + r.stackPush(oldMarkPos) + + r.trackPushNeg1(r.stackPeek()) // Save old mark + } + r.advance(1) + continue + } + + case syntax.Lazybranchmark | syntax.Back: + + // After the first time, Lazybranchmark | syntax.Back occurs + // with each iteration of the loop, and therefore with every attempted + // match of the inner expression. We'll try to match the inner expression, + // then go back to Lazybranchmark if successful. If the inner expression + // fails, we go to Lazybranchmark | syntax.Back2 + + r.trackPopN(2) + pos := r.trackPeekN(1) + r.trackPushNeg1(r.trackPeek()) // Save old mark + r.stackPush(pos) // Make new mark + r.textto(pos) // Recall position + r.goTo(r.operand(0)) // Loop + continue + + case syntax.Lazybranchmark | syntax.Back2: + // The lazy loop has failed. We'll do a true backtrack and + // start over before the lazy loop. + r.stackPop() + r.trackPop() + r.stackPush(r.trackPeek()) // Recall old mark + break + + case syntax.Setcount: + r.stackPush2(r.textPos(), r.operand(0)) + r.trackPush() + r.advance(1) + continue + + case syntax.Nullcount: + r.stackPush2(-1, r.operand(0)) + r.trackPush() + r.advance(1) + continue + + case syntax.Setcount | syntax.Back: + r.stackPopN(2) + break + + case syntax.Nullcount | syntax.Back: + r.stackPopN(2) + break + + case syntax.Branchcount: + // r.stackPush: + // 0: Mark + // 1: Count + + r.stackPopN(2) + mark := r.stackPeek() + count := r.stackPeekN(1) + matched := r.textPos() - mark + + if count >= r.operand(1) || (matched == 0 && count >= 0) { // Max loops or empty match -> straight now + r.trackPushNeg2(mark, count) // Save old mark, count + r.advance(2) // Straight + } else { // Nonempty match -> count+loop now + r.trackPush1(mark) // remember mark + r.stackPush2(r.textPos(), count+1) // Make new mark, incr count + r.goTo(r.operand(0)) // Loop + } + continue + + case syntax.Branchcount | syntax.Back: + // r.trackPush: + // 0: Previous mark + // r.stackPush: + // 0: Mark (= current pos, discarded) + // 1: Count + r.trackPop() + r.stackPopN(2) + if r.stackPeekN(1) > 0 { // Positive -> can go straight + r.textto(r.stackPeek()) // Zap to mark + r.trackPushNeg2(r.trackPeek(), r.stackPeekN(1)-1) // Save old mark, old count + r.advance(2) // Straight + continue + } + r.stackPush2(r.trackPeek(), r.stackPeekN(1)-1) // recall old mark, old count + break + + case syntax.Branchcount | syntax.Back2: + // r.trackPush: + // 0: Previous mark + // 1: Previous count + r.trackPopN(2) + r.stackPush2(r.trackPeek(), r.trackPeekN(1)) // Recall old mark, old count + break // Backtrack + + case syntax.Lazybranchcount: + // r.stackPush: + // 0: Mark + // 1: Count + + r.stackPopN(2) + mark := r.stackPeek() + count := r.stackPeekN(1) + + if count < 0 { // Negative count -> loop now + r.trackPushNeg1(mark) // Save old mark + r.stackPush2(r.textPos(), count+1) // Make new mark, incr count + r.goTo(r.operand(0)) // Loop + } else { // Nonneg count -> straight now + r.trackPush3(mark, count, r.textPos()) // Save mark, count, position + r.advance(2) // Straight + } + continue + + case syntax.Lazybranchcount | syntax.Back: + // r.trackPush: + // 0: Mark + // 1: Count + // 2: r.textPos + + r.trackPopN(3) + mark := r.trackPeek() + textpos := r.trackPeekN(2) + + if r.trackPeekN(1) < r.operand(1) && textpos != mark { // Under limit and not empty match -> loop + r.textto(textpos) // Recall position + r.stackPush2(textpos, r.trackPeekN(1)+1) // Make new mark, incr count + r.trackPushNeg1(mark) // Save old mark + r.goTo(r.operand(0)) // Loop + continue + } else { // Max loops or empty match -> backtrack + r.stackPush2(r.trackPeek(), r.trackPeekN(1)) // Recall old mark, count + break // backtrack + } + + case syntax.Lazybranchcount | syntax.Back2: + // r.trackPush: + // 0: Previous mark + // r.stackPush: + // 0: Mark (== current pos, discarded) + // 1: Count + r.trackPop() + r.stackPopN(2) + r.stackPush2(r.trackPeek(), r.stackPeekN(1)-1) // Recall old mark, count + break // Backtrack + + case syntax.Setjump: + r.stackPush2(r.trackpos(), r.crawlpos()) + r.trackPush() + r.advance(0) + continue + + case syntax.Setjump | syntax.Back: + r.stackPopN(2) + break + + case syntax.Backjump: + // r.stackPush: + // 0: Saved trackpos + // 1: r.crawlpos + r.stackPopN(2) + r.trackto(r.stackPeek()) + + for r.crawlpos() != r.stackPeekN(1) { + r.uncapture() + } + + break + + case syntax.Forejump: + // r.stackPush: + // 0: Saved trackpos + // 1: r.crawlpos + r.stackPopN(2) + r.trackto(r.stackPeek()) + r.trackPush1(r.stackPeekN(1)) + r.advance(0) + continue + + case syntax.Forejump | syntax.Back: + // r.trackPush: + // 0: r.crawlpos + r.trackPop() + + for r.crawlpos() != r.trackPeek() { + r.uncapture() + } + + break + + case syntax.Bol: + if r.leftchars() > 0 && r.charAt(r.textPos()-1) != '\n' { + break + } + r.advance(0) + continue + + case syntax.Eol: + if r.rightchars() > 0 && r.charAt(r.textPos()) != '\n' { + break + } + r.advance(0) + continue + + case syntax.Boundary: + if !r.isBoundary(r.textPos(), 0, r.runtextend) { + break + } + r.advance(0) + continue + + case syntax.Nonboundary: + if r.isBoundary(r.textPos(), 0, r.runtextend) { + break + } + r.advance(0) + continue + + case syntax.ECMABoundary: + if !r.isECMABoundary(r.textPos(), 0, r.runtextend) { + break + } + r.advance(0) + continue + + case syntax.NonECMABoundary: + if r.isECMABoundary(r.textPos(), 0, r.runtextend) { + break + } + r.advance(0) + continue + + case syntax.Beginning: + if r.leftchars() > 0 { + break + } + r.advance(0) + continue + + case syntax.Start: + if r.textPos() != r.textstart() { + break + } + r.advance(0) + continue + + case syntax.EndZ: + rchars := r.rightchars() + if rchars > 1 { + break + } + // RE2 and EcmaScript define $ as "asserts position at the end of the string" + // PCRE/.NET adds "or before the line terminator right at the end of the string (if any)" + if (r.re.options & (RE2 | ECMAScript)) != 0 { + // RE2/Ecmascript mode + if rchars > 0 { + break + } + } else if rchars == 1 && r.charAt(r.textPos()) != '\n' { + // "regular" mode + break + } + + r.advance(0) + continue + + case syntax.End: + if r.rightchars() > 0 { + break + } + r.advance(0) + continue + + case syntax.One: + if r.forwardchars() < 1 || r.forwardcharnext() != rune(r.operand(0)) { + break + } + + r.advance(1) + continue + + case syntax.Notone: + if r.forwardchars() < 1 || r.forwardcharnext() == rune(r.operand(0)) { + break + } + + r.advance(1) + continue + + case syntax.Set: + + if r.forwardchars() < 1 || !r.code.Sets[r.operand(0)].CharIn(r.forwardcharnext()) { + break + } + + r.advance(1) + continue + + case syntax.Multi: + if !r.runematch(r.code.Strings[r.operand(0)]) { + break + } + + r.advance(1) + continue + + case syntax.Ref: + + capnum := r.operand(0) + + if r.runmatch.isMatched(capnum) { + if !r.refmatch(r.runmatch.matchIndex(capnum), r.runmatch.matchLength(capnum)) { + break + } + } else { + if (r.re.options & ECMAScript) == 0 { + break + } + } + + r.advance(1) + continue + + case syntax.Onerep: + + c := r.operand(1) + + if r.forwardchars() < c { + break + } + + ch := rune(r.operand(0)) + + for c > 0 { + if r.forwardcharnext() != ch { + goto BreakBackward + } + c-- + } + + r.advance(2) + continue + + case syntax.Notonerep: + + c := r.operand(1) + + if r.forwardchars() < c { + break + } + ch := rune(r.operand(0)) + + for c > 0 { + if r.forwardcharnext() == ch { + goto BreakBackward + } + c-- + } + + r.advance(2) + continue + + case syntax.Setrep: + + c := r.operand(1) + + if r.forwardchars() < c { + break + } + + set := r.code.Sets[r.operand(0)] + + for c > 0 { + if !set.CharIn(r.forwardcharnext()) { + goto BreakBackward + } + c-- + } + + r.advance(2) + continue + + case syntax.Oneloop: + + c := r.operand(1) + + if c > r.forwardchars() { + c = r.forwardchars() + } + + ch := rune(r.operand(0)) + i := c + + for ; i > 0; i-- { + if r.forwardcharnext() != ch { + r.backwardnext() + break + } + } + + if c > i { + r.trackPush2(c-i-1, r.textPos()-r.bump()) + } + + r.advance(2) + continue + + case syntax.Notoneloop: + + c := r.operand(1) + + if c > r.forwardchars() { + c = r.forwardchars() + } + + ch := rune(r.operand(0)) + i := c + + for ; i > 0; i-- { + if r.forwardcharnext() == ch { + r.backwardnext() + break + } + } + + if c > i { + r.trackPush2(c-i-1, r.textPos()-r.bump()) + } + + r.advance(2) + continue + + case syntax.Setloop: + + c := r.operand(1) + + if c > r.forwardchars() { + c = r.forwardchars() + } + + set := r.code.Sets[r.operand(0)] + i := c + + for ; i > 0; i-- { + if !set.CharIn(r.forwardcharnext()) { + r.backwardnext() + break + } + } + + if c > i { + r.trackPush2(c-i-1, r.textPos()-r.bump()) + } + + r.advance(2) + continue + + case syntax.Oneloop | syntax.Back, syntax.Notoneloop | syntax.Back: + + r.trackPopN(2) + i := r.trackPeek() + pos := r.trackPeekN(1) + + r.textto(pos) + + if i > 0 { + r.trackPush2(i-1, pos-r.bump()) + } + + r.advance(2) + continue + + case syntax.Setloop | syntax.Back: + + r.trackPopN(2) + i := r.trackPeek() + pos := r.trackPeekN(1) + + r.textto(pos) + + if i > 0 { + r.trackPush2(i-1, pos-r.bump()) + } + + r.advance(2) + continue + + case syntax.Onelazy, syntax.Notonelazy: + + c := r.operand(1) + + if c > r.forwardchars() { + c = r.forwardchars() + } + + if c > 0 { + r.trackPush2(c-1, r.textPos()) + } + + r.advance(2) + continue + + case syntax.Setlazy: + + c := r.operand(1) + + if c > r.forwardchars() { + c = r.forwardchars() + } + + if c > 0 { + r.trackPush2(c-1, r.textPos()) + } + + r.advance(2) + continue + + case syntax.Onelazy | syntax.Back: + + r.trackPopN(2) + pos := r.trackPeekN(1) + r.textto(pos) + + if r.forwardcharnext() != rune(r.operand(0)) { + break + } + + i := r.trackPeek() + + if i > 0 { + r.trackPush2(i-1, pos+r.bump()) + } + + r.advance(2) + continue + + case syntax.Notonelazy | syntax.Back: + + r.trackPopN(2) + pos := r.trackPeekN(1) + r.textto(pos) + + if r.forwardcharnext() == rune(r.operand(0)) { + break + } + + i := r.trackPeek() + + if i > 0 { + r.trackPush2(i-1, pos+r.bump()) + } + + r.advance(2) + continue + + case syntax.Setlazy | syntax.Back: + + r.trackPopN(2) + pos := r.trackPeekN(1) + r.textto(pos) + + if !r.code.Sets[r.operand(0)].CharIn(r.forwardcharnext()) { + break + } + + i := r.trackPeek() + + if i > 0 { + r.trackPush2(i-1, pos+r.bump()) + } + + r.advance(2) + continue + + default: + return errors.New("unknown state in regex runner") + } + + BreakBackward: + ; + + // "break Backward" comes here: + r.backtrack() + } +} + +// increase the size of stack and track storage +func (r *runner) ensureStorage() { + if r.runstackpos < r.runtrackcount*4 { + doubleIntSlice(&r.runstack, &r.runstackpos) + } + if r.runtrackpos < r.runtrackcount*4 { + doubleIntSlice(&r.runtrack, &r.runtrackpos) + } +} + +func doubleIntSlice(s *[]int, pos *int) { + oldLen := len(*s) + newS := make([]int, oldLen*2) + + copy(newS[oldLen:], *s) + *pos += oldLen + *s = newS +} + +// Save a number on the longjump unrolling stack +func (r *runner) crawl(i int) { + if r.runcrawlpos == 0 { + doubleIntSlice(&r.runcrawl, &r.runcrawlpos) + } + r.runcrawlpos-- + r.runcrawl[r.runcrawlpos] = i +} + +// Remove a number from the longjump unrolling stack +func (r *runner) popcrawl() int { + val := r.runcrawl[r.runcrawlpos] + r.runcrawlpos++ + return val +} + +// Get the height of the stack +func (r *runner) crawlpos() int { + return len(r.runcrawl) - r.runcrawlpos +} + +func (r *runner) advance(i int) { + r.codepos += (i + 1) + r.setOperator(r.code.Codes[r.codepos]) +} + +func (r *runner) goTo(newpos int) { + // when branching backward or in place, ensure storage + if newpos <= r.codepos { + r.ensureStorage() + } + + r.setOperator(r.code.Codes[newpos]) + r.codepos = newpos +} + +func (r *runner) textto(newpos int) { + r.runtextpos = newpos +} + +func (r *runner) trackto(newpos int) { + r.runtrackpos = len(r.runtrack) - newpos +} + +func (r *runner) textstart() int { + return r.runtextstart +} + +func (r *runner) textPos() int { + return r.runtextpos +} + +// push onto the backtracking stack +func (r *runner) trackpos() int { + return len(r.runtrack) - r.runtrackpos +} + +func (r *runner) trackPush() { + r.runtrackpos-- + r.runtrack[r.runtrackpos] = r.codepos +} + +func (r *runner) trackPush1(I1 int) { + r.runtrackpos-- + r.runtrack[r.runtrackpos] = I1 + r.runtrackpos-- + r.runtrack[r.runtrackpos] = r.codepos +} + +func (r *runner) trackPush2(I1, I2 int) { + r.runtrackpos-- + r.runtrack[r.runtrackpos] = I1 + r.runtrackpos-- + r.runtrack[r.runtrackpos] = I2 + r.runtrackpos-- + r.runtrack[r.runtrackpos] = r.codepos +} + +func (r *runner) trackPush3(I1, I2, I3 int) { + r.runtrackpos-- + r.runtrack[r.runtrackpos] = I1 + r.runtrackpos-- + r.runtrack[r.runtrackpos] = I2 + r.runtrackpos-- + r.runtrack[r.runtrackpos] = I3 + r.runtrackpos-- + r.runtrack[r.runtrackpos] = r.codepos +} + +func (r *runner) trackPushNeg1(I1 int) { + r.runtrackpos-- + r.runtrack[r.runtrackpos] = I1 + r.runtrackpos-- + r.runtrack[r.runtrackpos] = -r.codepos +} + +func (r *runner) trackPushNeg2(I1, I2 int) { + r.runtrackpos-- + r.runtrack[r.runtrackpos] = I1 + r.runtrackpos-- + r.runtrack[r.runtrackpos] = I2 + r.runtrackpos-- + r.runtrack[r.runtrackpos] = -r.codepos +} + +func (r *runner) backtrack() { + newpos := r.runtrack[r.runtrackpos] + r.runtrackpos++ + + if r.re.Debug() { + if newpos < 0 { + fmt.Printf(" Backtracking (back2) to code position %v\n", -newpos) + } else { + fmt.Printf(" Backtracking to code position %v\n", newpos) + } + } + + if newpos < 0 { + newpos = -newpos + r.setOperator(r.code.Codes[newpos] | syntax.Back2) + } else { + r.setOperator(r.code.Codes[newpos] | syntax.Back) + } + + // When branching backward, ensure storage + if newpos < r.codepos { + r.ensureStorage() + } + + r.codepos = newpos +} + +func (r *runner) setOperator(op int) { + r.caseInsensitive = (0 != (op & syntax.Ci)) + r.rightToLeft = (0 != (op & syntax.Rtl)) + r.operator = syntax.InstOp(op & ^(syntax.Rtl | syntax.Ci)) +} + +func (r *runner) trackPop() { + r.runtrackpos++ +} + +// pop framesize items from the backtracking stack +func (r *runner) trackPopN(framesize int) { + r.runtrackpos += framesize +} + +// Technically we are actually peeking at items already popped. So if you want to +// get and pop the top item from the stack, you do +// r.trackPop(); +// r.trackPeek(); +func (r *runner) trackPeek() int { + return r.runtrack[r.runtrackpos-1] +} + +// get the ith element down on the backtracking stack +func (r *runner) trackPeekN(i int) int { + return r.runtrack[r.runtrackpos-i-1] +} + +// Push onto the grouping stack +func (r *runner) stackPush(I1 int) { + r.runstackpos-- + r.runstack[r.runstackpos] = I1 +} + +func (r *runner) stackPush2(I1, I2 int) { + r.runstackpos-- + r.runstack[r.runstackpos] = I1 + r.runstackpos-- + r.runstack[r.runstackpos] = I2 +} + +func (r *runner) stackPop() { + r.runstackpos++ +} + +// pop framesize items from the grouping stack +func (r *runner) stackPopN(framesize int) { + r.runstackpos += framesize +} + +// Technically we are actually peeking at items already popped. So if you want to +// get and pop the top item from the stack, you do +// r.stackPop(); +// r.stackPeek(); +func (r *runner) stackPeek() int { + return r.runstack[r.runstackpos-1] +} + +// get the ith element down on the grouping stack +func (r *runner) stackPeekN(i int) int { + return r.runstack[r.runstackpos-i-1] +} + +func (r *runner) operand(i int) int { + return r.code.Codes[r.codepos+i+1] +} + +func (r *runner) leftchars() int { + return r.runtextpos +} + +func (r *runner) rightchars() int { + return r.runtextend - r.runtextpos +} + +func (r *runner) bump() int { + if r.rightToLeft { + return -1 + } + return 1 +} + +func (r *runner) forwardchars() int { + if r.rightToLeft { + return r.runtextpos + } + return r.runtextend - r.runtextpos +} + +func (r *runner) forwardcharnext() rune { + var ch rune + if r.rightToLeft { + r.runtextpos-- + ch = r.runtext[r.runtextpos] + } else { + ch = r.runtext[r.runtextpos] + r.runtextpos++ + } + + if r.caseInsensitive { + return unicode.ToLower(ch) + } + return ch +} + +func (r *runner) runematch(str []rune) bool { + var pos int + + c := len(str) + if !r.rightToLeft { + if r.runtextend-r.runtextpos < c { + return false + } + + pos = r.runtextpos + c + } else { + if r.runtextpos-0 < c { + return false + } + + pos = r.runtextpos + } + + if !r.caseInsensitive { + for c != 0 { + c-- + pos-- + if str[c] != r.runtext[pos] { + return false + } + } + } else { + for c != 0 { + c-- + pos-- + if str[c] != unicode.ToLower(r.runtext[pos]) { + return false + } + } + } + + if !r.rightToLeft { + pos += len(str) + } + + r.runtextpos = pos + + return true +} + +func (r *runner) refmatch(index, len int) bool { + var c, pos, cmpos int + + if !r.rightToLeft { + if r.runtextend-r.runtextpos < len { + return false + } + + pos = r.runtextpos + len + } else { + if r.runtextpos-0 < len { + return false + } + + pos = r.runtextpos + } + cmpos = index + len + + c = len + + if !r.caseInsensitive { + for c != 0 { + c-- + cmpos-- + pos-- + if r.runtext[cmpos] != r.runtext[pos] { + return false + } + + } + } else { + for c != 0 { + c-- + cmpos-- + pos-- + + if unicode.ToLower(r.runtext[cmpos]) != unicode.ToLower(r.runtext[pos]) { + return false + } + } + } + + if !r.rightToLeft { + pos += len + } + + r.runtextpos = pos + + return true +} + +func (r *runner) backwardnext() { + if r.rightToLeft { + r.runtextpos++ + } else { + r.runtextpos-- + } +} + +func (r *runner) charAt(j int) rune { + return r.runtext[j] +} + +func (r *runner) findFirstChar() bool { + + if 0 != (r.code.Anchors & (syntax.AnchorBeginning | syntax.AnchorStart | syntax.AnchorEndZ | syntax.AnchorEnd)) { + if !r.code.RightToLeft { + if (0 != (r.code.Anchors&syntax.AnchorBeginning) && r.runtextpos > 0) || + (0 != (r.code.Anchors&syntax.AnchorStart) && r.runtextpos > r.runtextstart) { + r.runtextpos = r.runtextend + return false + } + if 0 != (r.code.Anchors&syntax.AnchorEndZ) && r.runtextpos < r.runtextend-1 { + r.runtextpos = r.runtextend - 1 + } else if 0 != (r.code.Anchors&syntax.AnchorEnd) && r.runtextpos < r.runtextend { + r.runtextpos = r.runtextend + } + } else { + if (0 != (r.code.Anchors&syntax.AnchorEnd) && r.runtextpos < r.runtextend) || + (0 != (r.code.Anchors&syntax.AnchorEndZ) && (r.runtextpos < r.runtextend-1 || + (r.runtextpos == r.runtextend-1 && r.charAt(r.runtextpos) != '\n'))) || + (0 != (r.code.Anchors&syntax.AnchorStart) && r.runtextpos < r.runtextstart) { + r.runtextpos = 0 + return false + } + if 0 != (r.code.Anchors&syntax.AnchorBeginning) && r.runtextpos > 0 { + r.runtextpos = 0 + } + } + + if r.code.BmPrefix != nil { + return r.code.BmPrefix.IsMatch(r.runtext, r.runtextpos, 0, r.runtextend) + } + + return true // found a valid start or end anchor + } else if r.code.BmPrefix != nil { + r.runtextpos = r.code.BmPrefix.Scan(r.runtext, r.runtextpos, 0, r.runtextend) + + if r.runtextpos == -1 { + if r.code.RightToLeft { + r.runtextpos = 0 + } else { + r.runtextpos = r.runtextend + } + return false + } + + return true + } else if r.code.FcPrefix == nil { + return true + } + + r.rightToLeft = r.code.RightToLeft + r.caseInsensitive = r.code.FcPrefix.CaseInsensitive + + set := r.code.FcPrefix.PrefixSet + if set.IsSingleton() { + ch := set.SingletonChar() + for i := r.forwardchars(); i > 0; i-- { + if ch == r.forwardcharnext() { + r.backwardnext() + return true + } + } + } else { + for i := r.forwardchars(); i > 0; i-- { + n := r.forwardcharnext() + //fmt.Printf("%v in %v: %v\n", string(n), set.String(), set.CharIn(n)) + if set.CharIn(n) { + r.backwardnext() + return true + } + } + } + + return false +} + +func (r *runner) initMatch() { + // Use a hashtable'ed Match object if the capture numbers are sparse + + if r.runmatch == nil { + if r.re.caps != nil { + r.runmatch = newMatchSparse(r.re, r.re.caps, r.re.capsize, r.runtext, r.runtextstart) + } else { + r.runmatch = newMatch(r.re, r.re.capsize, r.runtext, r.runtextstart) + } + } else { + r.runmatch.reset(r.runtext, r.runtextstart) + } + + // note we test runcrawl, because it is the last one to be allocated + // If there is an alloc failure in the middle of the three allocations, + // we may still return to reuse this instance, and we want to behave + // as if the allocations didn't occur. (we used to test _trackcount != 0) + + if r.runcrawl != nil { + r.runtrackpos = len(r.runtrack) + r.runstackpos = len(r.runstack) + r.runcrawlpos = len(r.runcrawl) + return + } + + r.initTrackCount() + + tracksize := r.runtrackcount * 8 + stacksize := r.runtrackcount * 8 + + if tracksize < 32 { + tracksize = 32 + } + if stacksize < 16 { + stacksize = 16 + } + + r.runtrack = make([]int, tracksize) + r.runtrackpos = tracksize + + r.runstack = make([]int, stacksize) + r.runstackpos = stacksize + + r.runcrawl = make([]int, 32) + r.runcrawlpos = 32 +} + +func (r *runner) tidyMatch(quick bool) *Match { + if !quick { + match := r.runmatch + + r.runmatch = nil + + match.tidy(r.runtextpos) + return match + } else { + // send back our match -- it's not leaving the package, so it's safe to not clean it up + // this reduces allocs for frequent calls to the "IsMatch" bool-only functions + return r.runmatch + } +} + +// capture captures a subexpression. Note that the +// capnum used here has already been mapped to a non-sparse +// index (by the code generator RegexWriter). +func (r *runner) capture(capnum, start, end int) { + if end < start { + T := end + end = start + start = T + } + + r.crawl(capnum) + r.runmatch.addMatch(capnum, start, end-start) +} + +// transferCapture captures a subexpression. Note that the +// capnum used here has already been mapped to a non-sparse +// index (by the code generator RegexWriter). +func (r *runner) transferCapture(capnum, uncapnum, start, end int) { + var start2, end2 int + + // these are the two intervals that are cancelling each other + + if end < start { + T := end + end = start + start = T + } + + start2 = r.runmatch.matchIndex(uncapnum) + end2 = start2 + r.runmatch.matchLength(uncapnum) + + // The new capture gets the innermost defined interval + + if start >= end2 { + end = start + start = end2 + } else if end <= start2 { + start = start2 + } else { + if end > end2 { + end = end2 + } + if start2 > start { + start = start2 + } + } + + r.crawl(uncapnum) + r.runmatch.balanceMatch(uncapnum) + + if capnum != -1 { + r.crawl(capnum) + r.runmatch.addMatch(capnum, start, end-start) + } +} + +// revert the last capture +func (r *runner) uncapture() { + capnum := r.popcrawl() + r.runmatch.removeMatch(capnum) +} + +//debug + +func (r *runner) dumpState() { + back := "" + if r.operator&syntax.Back != 0 { + back = " Back" + } + if r.operator&syntax.Back2 != 0 { + back += " Back2" + } + fmt.Printf("Text: %v\nTrack: %v\nStack: %v\n %s%s\n\n", + r.textposDescription(), + r.stackDescription(r.runtrack, r.runtrackpos), + r.stackDescription(r.runstack, r.runstackpos), + r.code.OpcodeDescription(r.codepos), + back) +} + +func (r *runner) stackDescription(a []int, index int) string { + buf := &bytes.Buffer{} + + fmt.Fprintf(buf, "%v/%v", len(a)-index, len(a)) + if buf.Len() < 8 { + buf.WriteString(strings.Repeat(" ", 8-buf.Len())) + } + + buf.WriteRune('(') + for i := index; i < len(a); i++ { + if i > index { + buf.WriteRune(' ') + } + + buf.WriteString(strconv.Itoa(a[i])) + } + + buf.WriteRune(')') + + return buf.String() +} + +func (r *runner) textposDescription() string { + buf := &bytes.Buffer{} + + buf.WriteString(strconv.Itoa(r.runtextpos)) + + if buf.Len() < 8 { + buf.WriteString(strings.Repeat(" ", 8-buf.Len())) + } + + if r.runtextpos > 0 { + buf.WriteString(syntax.CharDescription(r.runtext[r.runtextpos-1])) + } else { + buf.WriteRune('^') + } + + buf.WriteRune('>') + + for i := r.runtextpos; i < r.runtextend; i++ { + buf.WriteString(syntax.CharDescription(r.runtext[i])) + } + if buf.Len() >= 64 { + buf.Truncate(61) + buf.WriteString("...") + } else { + buf.WriteRune('$') + } + + return buf.String() +} + +// decide whether the pos +// at the specified index is a boundary or not. It's just not worth +// emitting inline code for this logic. +func (r *runner) isBoundary(index, startpos, endpos int) bool { + return (index > startpos && syntax.IsWordChar(r.runtext[index-1])) != + (index < endpos && syntax.IsWordChar(r.runtext[index])) +} + +func (r *runner) isECMABoundary(index, startpos, endpos int) bool { + return (index > startpos && syntax.IsECMAWordChar(r.runtext[index-1])) != + (index < endpos && syntax.IsECMAWordChar(r.runtext[index])) +} + +// this seems like a comment to justify randomly picking 1000 :-P +// We have determined this value in a series of experiments where x86 retail +// builds (ono-lab-optimized) were run on different pattern/input pairs. Larger values +// of TimeoutCheckFrequency did not tend to increase performance; smaller values +// of TimeoutCheckFrequency tended to slow down the execution. +const timeoutCheckFrequency int = 1000 + +func (r *runner) startTimeoutWatch() { + if r.ignoreTimeout { + return + } + + r.timeoutChecksToSkip = timeoutCheckFrequency + r.timeoutAt = time.Now().Add(r.timeout) +} + +func (r *runner) checkTimeout() error { + if r.ignoreTimeout { + return nil + } + r.timeoutChecksToSkip-- + if r.timeoutChecksToSkip != 0 { + return nil + } + + r.timeoutChecksToSkip = timeoutCheckFrequency + return r.doCheckTimeout() +} + +func (r *runner) doCheckTimeout() error { + current := time.Now() + + if current.Before(r.timeoutAt) { + return nil + } + + if r.re.Debug() { + //Debug.WriteLine("") + //Debug.WriteLine("RegEx match timeout occurred!") + //Debug.WriteLine("Specified timeout: " + TimeSpan.FromMilliseconds(_timeout).ToString()) + //Debug.WriteLine("Timeout check frequency: " + TimeoutCheckFrequency) + //Debug.WriteLine("Search pattern: " + _runregex._pattern) + //Debug.WriteLine("Input: " + r.runtext) + //Debug.WriteLine("About to throw RegexMatchTimeoutException.") + } + + return fmt.Errorf("match timeout after %v on input `%v`", r.timeout, string(r.runtext)) +} + +func (r *runner) initTrackCount() { + r.runtrackcount = r.code.TrackCount +} + +// getRunner returns a run to use for matching re. +// It uses the re's runner cache if possible, to avoid +// unnecessary allocation. +func (re *Regexp) getRunner() *runner { + re.muRun.Lock() + if n := len(re.runner); n > 0 { + z := re.runner[n-1] + re.runner = re.runner[:n-1] + re.muRun.Unlock() + return z + } + re.muRun.Unlock() + z := &runner{ + re: re, + code: re.code, + } + return z +} + +// putRunner returns a runner to the re's cache. +// There is no attempt to limit the size of the cache, so it will +// grow to the maximum number of simultaneous matches +// run using re. (The cache empties when re gets garbage collected.) +func (re *Regexp) putRunner(r *runner) { + re.muRun.Lock() + re.runner = append(re.runner, r) + re.muRun.Unlock() +} diff --git a/vendor/github.com/dlclark/regexp2/syntax/charclass.go b/vendor/github.com/dlclark/regexp2/syntax/charclass.go new file mode 100644 index 000000000..53974d101 --- /dev/null +++ b/vendor/github.com/dlclark/regexp2/syntax/charclass.go @@ -0,0 +1,854 @@ +package syntax + +import ( + "bytes" + "encoding/binary" + "fmt" + "sort" + "unicode" + "unicode/utf8" +) + +// CharSet combines start-end rune ranges and unicode categories representing a set of characters +type CharSet struct { + ranges []singleRange + categories []category + sub *CharSet //optional subtractor + negate bool + anything bool +} + +type category struct { + negate bool + cat string +} + +type singleRange struct { + first rune + last rune +} + +const ( + spaceCategoryText = " " + wordCategoryText = "W" +) + +var ( + ecmaSpace = []rune{0x0009, 0x000e, 0x0020, 0x0021, 0x00a0, 0x00a1, 0x1680, 0x1681, 0x2000, 0x200b, 0x2028, 0x202a, 0x202f, 0x2030, 0x205f, 0x2060, 0x3000, 0x3001, 0xfeff, 0xff00} + ecmaWord = []rune{0x0030, 0x003a, 0x0041, 0x005b, 0x005f, 0x0060, 0x0061, 0x007b} + ecmaDigit = []rune{0x0030, 0x003a} +) + +var ( + AnyClass = getCharSetFromOldString([]rune{0}, false) + ECMAAnyClass = getCharSetFromOldString([]rune{0, 0x000a, 0x000b, 0x000d, 0x000e}, false) + NoneClass = getCharSetFromOldString(nil, false) + ECMAWordClass = getCharSetFromOldString(ecmaWord, false) + NotECMAWordClass = getCharSetFromOldString(ecmaWord, true) + ECMASpaceClass = getCharSetFromOldString(ecmaSpace, false) + NotECMASpaceClass = getCharSetFromOldString(ecmaSpace, true) + ECMADigitClass = getCharSetFromOldString(ecmaDigit, false) + NotECMADigitClass = getCharSetFromOldString(ecmaDigit, true) + + WordClass = getCharSetFromCategoryString(false, false, wordCategoryText) + NotWordClass = getCharSetFromCategoryString(true, false, wordCategoryText) + SpaceClass = getCharSetFromCategoryString(false, false, spaceCategoryText) + NotSpaceClass = getCharSetFromCategoryString(true, false, spaceCategoryText) + DigitClass = getCharSetFromCategoryString(false, false, "Nd") + NotDigitClass = getCharSetFromCategoryString(false, true, "Nd") +) + +var unicodeCategories = func() map[string]*unicode.RangeTable { + retVal := make(map[string]*unicode.RangeTable) + for k, v := range unicode.Scripts { + retVal[k] = v + } + for k, v := range unicode.Categories { + retVal[k] = v + } + for k, v := range unicode.Properties { + retVal[k] = v + } + return retVal +}() + +func getCharSetFromCategoryString(negateSet bool, negateCat bool, cats ...string) func() *CharSet { + if negateCat && negateSet { + panic("BUG! You should only negate the set OR the category in a constant setup, but not both") + } + + c := CharSet{negate: negateSet} + + c.categories = make([]category, len(cats)) + for i, cat := range cats { + c.categories[i] = category{cat: cat, negate: negateCat} + } + return func() *CharSet { + //make a copy each time + local := c + //return that address + return &local + } +} + +func getCharSetFromOldString(setText []rune, negate bool) func() *CharSet { + c := CharSet{} + if len(setText) > 0 { + fillFirst := false + l := len(setText) + if negate { + if setText[0] == 0 { + setText = setText[1:] + } else { + l++ + fillFirst = true + } + } + + if l%2 == 0 { + c.ranges = make([]singleRange, l/2) + } else { + c.ranges = make([]singleRange, l/2+1) + } + + first := true + if fillFirst { + c.ranges[0] = singleRange{first: 0} + first = false + } + + i := 0 + for _, r := range setText { + if first { + // lower bound in a new range + c.ranges[i] = singleRange{first: r} + first = false + } else { + c.ranges[i].last = r - 1 + i++ + first = true + } + } + if !first { + c.ranges[i].last = utf8.MaxRune + } + } + + return func() *CharSet { + local := c + return &local + } +} + +// Copy makes a deep copy to prevent accidental mutation of a set +func (c CharSet) Copy() CharSet { + ret := CharSet{ + anything: c.anything, + negate: c.negate, + } + + ret.ranges = append(ret.ranges, c.ranges...) + ret.categories = append(ret.categories, c.categories...) + + if c.sub != nil { + sub := c.sub.Copy() + ret.sub = &sub + } + + return ret +} + +// gets a human-readable description for a set string +func (c CharSet) String() string { + buf := &bytes.Buffer{} + buf.WriteRune('[') + + if c.IsNegated() { + buf.WriteRune('^') + } + + for _, r := range c.ranges { + + buf.WriteString(CharDescription(r.first)) + if r.first != r.last { + if r.last-r.first != 1 { + //groups that are 1 char apart skip the dash + buf.WriteRune('-') + } + buf.WriteString(CharDescription(r.last)) + } + } + + for _, c := range c.categories { + buf.WriteString(c.String()) + } + + if c.sub != nil { + buf.WriteRune('-') + buf.WriteString(c.sub.String()) + } + + buf.WriteRune(']') + + return buf.String() +} + +// mapHashFill converts a charset into a buffer for use in maps +func (c CharSet) mapHashFill(buf *bytes.Buffer) { + if c.negate { + buf.WriteByte(0) + } else { + buf.WriteByte(1) + } + + binary.Write(buf, binary.LittleEndian, len(c.ranges)) + binary.Write(buf, binary.LittleEndian, len(c.categories)) + for _, r := range c.ranges { + buf.WriteRune(r.first) + buf.WriteRune(r.last) + } + for _, ct := range c.categories { + buf.WriteString(ct.cat) + if ct.negate { + buf.WriteByte(1) + } else { + buf.WriteByte(0) + } + } + + if c.sub != nil { + c.sub.mapHashFill(buf) + } +} + +// CharIn returns true if the rune is in our character set (either ranges or categories). +// It handles negations and subtracted sub-charsets. +func (c CharSet) CharIn(ch rune) bool { + val := false + // in s && !s.subtracted + + //check ranges + for _, r := range c.ranges { + if ch < r.first { + continue + } + if ch <= r.last { + val = true + break + } + } + + //check categories if we haven't already found a range + if !val && len(c.categories) > 0 { + for _, ct := range c.categories { + // special categories...then unicode + if ct.cat == spaceCategoryText { + if unicode.IsSpace(ch) { + // we found a space so we're done + // negate means this is a "bad" thing + val = !ct.negate + break + } else if ct.negate { + val = true + break + } + } else if ct.cat == wordCategoryText { + if IsWordChar(ch) { + val = !ct.negate + break + } else if ct.negate { + val = true + break + } + } else if unicode.Is(unicodeCategories[ct.cat], ch) { + // if we're in this unicode category then we're done + // if negate=true on this category then we "failed" our test + // otherwise we're good that we found it + val = !ct.negate + break + } else if ct.negate { + val = true + break + } + } + } + + // negate the whole char set + if c.negate { + val = !val + } + + // get subtracted recurse + if val && c.sub != nil { + val = !c.sub.CharIn(ch) + } + + //log.Printf("Char '%v' in %v == %v", string(ch), c.String(), val) + return val +} + +func (c category) String() string { + switch c.cat { + case spaceCategoryText: + if c.negate { + return "\\S" + } + return "\\s" + case wordCategoryText: + if c.negate { + return "\\W" + } + return "\\w" + } + if _, ok := unicodeCategories[c.cat]; ok { + + if c.negate { + return "\\P{" + c.cat + "}" + } + return "\\p{" + c.cat + "}" + } + return "Unknown category: " + c.cat +} + +// CharDescription Produces a human-readable description for a single character. +func CharDescription(ch rune) string { + /*if ch == '\\' { + return "\\\\" + } + + if ch > ' ' && ch <= '~' { + return string(ch) + } else if ch == '\n' { + return "\\n" + } else if ch == ' ' { + return "\\ " + }*/ + + b := &bytes.Buffer{} + escape(b, ch, false) //fmt.Sprintf("%U", ch) + return b.String() +} + +// According to UTS#18 Unicode Regular Expressions (http://www.unicode.org/reports/tr18/) +// RL 1.4 Simple Word Boundaries The class of includes all Alphabetic +// values from the Unicode character database, from UnicodeData.txt [UData], plus the U+200C +// ZERO WIDTH NON-JOINER and U+200D ZERO WIDTH JOINER. +func IsWordChar(r rune) bool { + //"L", "Mn", "Nd", "Pc" + return unicode.In(r, + unicode.Categories["L"], unicode.Categories["Mn"], + unicode.Categories["Nd"], unicode.Categories["Pc"]) || r == '\u200D' || r == '\u200C' + //return 'A' <= r && r <= 'Z' || 'a' <= r && r <= 'z' || '0' <= r && r <= '9' || r == '_' +} + +func IsECMAWordChar(r rune) bool { + return unicode.In(r, + unicode.Categories["L"], unicode.Categories["Mn"], + unicode.Categories["Nd"], unicode.Categories["Pc"]) + + //return 'A' <= r && r <= 'Z' || 'a' <= r && r <= 'z' || '0' <= r && r <= '9' || r == '_' +} + +// SingletonChar will return the char from the first range without validation. +// It assumes you have checked for IsSingleton or IsSingletonInverse and will panic given bad input +func (c CharSet) SingletonChar() rune { + return c.ranges[0].first +} + +func (c CharSet) IsSingleton() bool { + return !c.negate && //negated is multiple chars + len(c.categories) == 0 && len(c.ranges) == 1 && // multiple ranges and unicode classes represent multiple chars + c.sub == nil && // subtraction means we've got multiple chars + c.ranges[0].first == c.ranges[0].last // first and last equal means we're just 1 char +} + +func (c CharSet) IsSingletonInverse() bool { + return c.negate && //same as above, but requires negated + len(c.categories) == 0 && len(c.ranges) == 1 && // multiple ranges and unicode classes represent multiple chars + c.sub == nil && // subtraction means we've got multiple chars + c.ranges[0].first == c.ranges[0].last // first and last equal means we're just 1 char +} + +func (c CharSet) IsMergeable() bool { + return !c.IsNegated() && !c.HasSubtraction() +} + +func (c CharSet) IsNegated() bool { + return c.negate +} + +func (c CharSet) HasSubtraction() bool { + return c.sub != nil +} + +func (c CharSet) IsEmpty() bool { + return len(c.ranges) == 0 && len(c.categories) == 0 && c.sub == nil +} + +func (c *CharSet) addDigit(ecma, negate bool, pattern string) { + if ecma { + if negate { + c.addRanges(NotECMADigitClass().ranges) + } else { + c.addRanges(ECMADigitClass().ranges) + } + } else { + c.addCategories(category{cat: "Nd", negate: negate}) + } +} + +func (c *CharSet) addChar(ch rune) { + c.addRange(ch, ch) +} + +func (c *CharSet) addSpace(ecma, negate bool) { + if ecma { + if negate { + c.addRanges(NotECMASpaceClass().ranges) + } else { + c.addRanges(ECMASpaceClass().ranges) + } + } else { + c.addCategories(category{cat: spaceCategoryText, negate: negate}) + } +} + +func (c *CharSet) addWord(ecma, negate bool) { + if ecma { + if negate { + c.addRanges(NotECMAWordClass().ranges) + } else { + c.addRanges(ECMAWordClass().ranges) + } + } else { + c.addCategories(category{cat: wordCategoryText, negate: negate}) + } +} + +// Add set ranges and categories into ours -- no deduping or anything +func (c *CharSet) addSet(set CharSet) { + if c.anything { + return + } + if set.anything { + c.makeAnything() + return + } + // just append here to prevent double-canon + c.ranges = append(c.ranges, set.ranges...) + c.addCategories(set.categories...) + c.canonicalize() +} + +func (c *CharSet) makeAnything() { + c.anything = true + c.categories = []category{} + c.ranges = AnyClass().ranges +} + +func (c *CharSet) addCategories(cats ...category) { + // don't add dupes and remove positive+negative + if c.anything { + // if we've had a previous positive+negative group then + // just return, we're as broad as we can get + return + } + + for _, ct := range cats { + found := false + for _, ct2 := range c.categories { + if ct.cat == ct2.cat { + if ct.negate != ct2.negate { + // oposite negations...this mean we just + // take us as anything and move on + c.makeAnything() + return + } + found = true + break + } + } + + if !found { + c.categories = append(c.categories, ct) + } + } +} + +// Merges new ranges to our own +func (c *CharSet) addRanges(ranges []singleRange) { + if c.anything { + return + } + c.ranges = append(c.ranges, ranges...) + c.canonicalize() +} + +// Merges everything but the new ranges into our own +func (c *CharSet) addNegativeRanges(ranges []singleRange) { + if c.anything { + return + } + + var hi rune + + // convert incoming ranges into opposites, assume they are in order + for _, r := range ranges { + if hi < r.first { + c.ranges = append(c.ranges, singleRange{hi, r.first - 1}) + } + hi = r.last + 1 + } + + if hi < utf8.MaxRune { + c.ranges = append(c.ranges, singleRange{hi, utf8.MaxRune}) + } + + c.canonicalize() +} + +func isValidUnicodeCat(catName string) bool { + _, ok := unicodeCategories[catName] + return ok +} + +func (c *CharSet) addCategory(categoryName string, negate, caseInsensitive bool, pattern string) { + if !isValidUnicodeCat(categoryName) { + // unknown unicode category, script, or property "blah" + panic(fmt.Errorf("Unknown unicode category, script, or property '%v'", categoryName)) + + } + + if caseInsensitive && (categoryName == "Ll" || categoryName == "Lu" || categoryName == "Lt") { + // when RegexOptions.IgnoreCase is specified then {Ll} {Lu} and {Lt} cases should all match + c.addCategories( + category{cat: "Ll", negate: negate}, + category{cat: "Lu", negate: negate}, + category{cat: "Lt", negate: negate}) + } + c.addCategories(category{cat: categoryName, negate: negate}) +} + +func (c *CharSet) addSubtraction(sub *CharSet) { + c.sub = sub +} + +func (c *CharSet) addRange(chMin, chMax rune) { + c.ranges = append(c.ranges, singleRange{first: chMin, last: chMax}) + c.canonicalize() +} + +func (c *CharSet) addNamedASCII(name string, negate bool) bool { + var rs []singleRange + + switch name { + case "alnum": + rs = []singleRange{singleRange{'0', '9'}, singleRange{'A', 'Z'}, singleRange{'a', 'z'}} + case "alpha": + rs = []singleRange{singleRange{'A', 'Z'}, singleRange{'a', 'z'}} + case "ascii": + rs = []singleRange{singleRange{0, 0x7f}} + case "blank": + rs = []singleRange{singleRange{'\t', '\t'}, singleRange{' ', ' '}} + case "cntrl": + rs = []singleRange{singleRange{0, 0x1f}, singleRange{0x7f, 0x7f}} + case "digit": + c.addDigit(false, negate, "") + case "graph": + rs = []singleRange{singleRange{'!', '~'}} + case "lower": + rs = []singleRange{singleRange{'a', 'z'}} + case "print": + rs = []singleRange{singleRange{' ', '~'}} + case "punct": //[!-/:-@[-`{-~] + rs = []singleRange{singleRange{'!', '/'}, singleRange{':', '@'}, singleRange{'[', '`'}, singleRange{'{', '~'}} + case "space": + c.addSpace(true, negate) + case "upper": + rs = []singleRange{singleRange{'A', 'Z'}} + case "word": + c.addWord(true, negate) + case "xdigit": + rs = []singleRange{singleRange{'0', '9'}, singleRange{'A', 'F'}, singleRange{'a', 'f'}} + default: + return false + } + + if len(rs) > 0 { + if negate { + c.addNegativeRanges(rs) + } else { + c.addRanges(rs) + } + } + + return true +} + +type singleRangeSorter []singleRange + +func (p singleRangeSorter) Len() int { return len(p) } +func (p singleRangeSorter) Less(i, j int) bool { return p[i].first < p[j].first } +func (p singleRangeSorter) Swap(i, j int) { p[i], p[j] = p[j], p[i] } + +// Logic to reduce a character class to a unique, sorted form. +func (c *CharSet) canonicalize() { + var i, j int + var last rune + + // + // Find and eliminate overlapping or abutting ranges + // + + if len(c.ranges) > 1 { + sort.Sort(singleRangeSorter(c.ranges)) + + done := false + + for i, j = 1, 0; ; i++ { + for last = c.ranges[j].last; ; i++ { + if i == len(c.ranges) || last == utf8.MaxRune { + done = true + break + } + + CurrentRange := c.ranges[i] + if CurrentRange.first > last+1 { + break + } + + if last < CurrentRange.last { + last = CurrentRange.last + } + } + + c.ranges[j] = singleRange{first: c.ranges[j].first, last: last} + + j++ + + if done { + break + } + + if j < i { + c.ranges[j] = c.ranges[i] + } + } + + c.ranges = append(c.ranges[:j], c.ranges[len(c.ranges):]...) + } +} + +// Adds to the class any lowercase versions of characters already +// in the class. Used for case-insensitivity. +func (c *CharSet) addLowercase() { + if c.anything { + return + } + toAdd := []singleRange{} + for i := 0; i < len(c.ranges); i++ { + r := c.ranges[i] + if r.first == r.last { + lower := unicode.ToLower(r.first) + c.ranges[i] = singleRange{first: lower, last: lower} + } else { + toAdd = append(toAdd, r) + } + } + + for _, r := range toAdd { + c.addLowercaseRange(r.first, r.last) + } + c.canonicalize() +} + +/************************************************************************** + Let U be the set of Unicode character values and let L be the lowercase + function, mapping from U to U. To perform case insensitive matching of + character sets, we need to be able to map an interval I in U, say + + I = [chMin, chMax] = { ch : chMin <= ch <= chMax } + + to a set A such that A contains L(I) and A is contained in the union of + I and L(I). + + The table below partitions U into intervals on which L is non-decreasing. + Thus, for any interval J = [a, b] contained in one of these intervals, + L(J) is contained in [L(a), L(b)]. + + It is also true that for any such J, [L(a), L(b)] is contained in the + union of J and L(J). This does not follow from L being non-decreasing on + these intervals. It follows from the nature of the L on each interval. + On each interval, L has one of the following forms: + + (1) L(ch) = constant (LowercaseSet) + (2) L(ch) = ch + offset (LowercaseAdd) + (3) L(ch) = ch | 1 (LowercaseBor) + (4) L(ch) = ch + (ch & 1) (LowercaseBad) + + It is easy to verify that for any of these forms [L(a), L(b)] is + contained in the union of [a, b] and L([a, b]). +***************************************************************************/ + +const ( + LowercaseSet = 0 // Set to arg. + LowercaseAdd = 1 // Add arg. + LowercaseBor = 2 // Bitwise or with 1. + LowercaseBad = 3 // Bitwise and with 1 and add original. +) + +type lcMap struct { + chMin, chMax rune + op, data int32 +} + +var lcTable = []lcMap{ + lcMap{'\u0041', '\u005A', LowercaseAdd, 32}, + lcMap{'\u00C0', '\u00DE', LowercaseAdd, 32}, + lcMap{'\u0100', '\u012E', LowercaseBor, 0}, + lcMap{'\u0130', '\u0130', LowercaseSet, 0x0069}, + lcMap{'\u0132', '\u0136', LowercaseBor, 0}, + lcMap{'\u0139', '\u0147', LowercaseBad, 0}, + lcMap{'\u014A', '\u0176', LowercaseBor, 0}, + lcMap{'\u0178', '\u0178', LowercaseSet, 0x00FF}, + lcMap{'\u0179', '\u017D', LowercaseBad, 0}, + lcMap{'\u0181', '\u0181', LowercaseSet, 0x0253}, + lcMap{'\u0182', '\u0184', LowercaseBor, 0}, + lcMap{'\u0186', '\u0186', LowercaseSet, 0x0254}, + lcMap{'\u0187', '\u0187', LowercaseSet, 0x0188}, + lcMap{'\u0189', '\u018A', LowercaseAdd, 205}, + lcMap{'\u018B', '\u018B', LowercaseSet, 0x018C}, + lcMap{'\u018E', '\u018E', LowercaseSet, 0x01DD}, + lcMap{'\u018F', '\u018F', LowercaseSet, 0x0259}, + lcMap{'\u0190', '\u0190', LowercaseSet, 0x025B}, + lcMap{'\u0191', '\u0191', LowercaseSet, 0x0192}, + lcMap{'\u0193', '\u0193', LowercaseSet, 0x0260}, + lcMap{'\u0194', '\u0194', LowercaseSet, 0x0263}, + lcMap{'\u0196', '\u0196', LowercaseSet, 0x0269}, + lcMap{'\u0197', '\u0197', LowercaseSet, 0x0268}, + lcMap{'\u0198', '\u0198', LowercaseSet, 0x0199}, + lcMap{'\u019C', '\u019C', LowercaseSet, 0x026F}, + lcMap{'\u019D', '\u019D', LowercaseSet, 0x0272}, + lcMap{'\u019F', '\u019F', LowercaseSet, 0x0275}, + lcMap{'\u01A0', '\u01A4', LowercaseBor, 0}, + lcMap{'\u01A7', '\u01A7', LowercaseSet, 0x01A8}, + lcMap{'\u01A9', '\u01A9', LowercaseSet, 0x0283}, + lcMap{'\u01AC', '\u01AC', LowercaseSet, 0x01AD}, + lcMap{'\u01AE', '\u01AE', LowercaseSet, 0x0288}, + lcMap{'\u01AF', '\u01AF', LowercaseSet, 0x01B0}, + lcMap{'\u01B1', '\u01B2', LowercaseAdd, 217}, + lcMap{'\u01B3', '\u01B5', LowercaseBad, 0}, + lcMap{'\u01B7', '\u01B7', LowercaseSet, 0x0292}, + lcMap{'\u01B8', '\u01B8', LowercaseSet, 0x01B9}, + lcMap{'\u01BC', '\u01BC', LowercaseSet, 0x01BD}, + lcMap{'\u01C4', '\u01C5', LowercaseSet, 0x01C6}, + lcMap{'\u01C7', '\u01C8', LowercaseSet, 0x01C9}, + lcMap{'\u01CA', '\u01CB', LowercaseSet, 0x01CC}, + lcMap{'\u01CD', '\u01DB', LowercaseBad, 0}, + lcMap{'\u01DE', '\u01EE', LowercaseBor, 0}, + lcMap{'\u01F1', '\u01F2', LowercaseSet, 0x01F3}, + lcMap{'\u01F4', '\u01F4', LowercaseSet, 0x01F5}, + lcMap{'\u01FA', '\u0216', LowercaseBor, 0}, + lcMap{'\u0386', '\u0386', LowercaseSet, 0x03AC}, + lcMap{'\u0388', '\u038A', LowercaseAdd, 37}, + lcMap{'\u038C', '\u038C', LowercaseSet, 0x03CC}, + lcMap{'\u038E', '\u038F', LowercaseAdd, 63}, + lcMap{'\u0391', '\u03AB', LowercaseAdd, 32}, + lcMap{'\u03E2', '\u03EE', LowercaseBor, 0}, + lcMap{'\u0401', '\u040F', LowercaseAdd, 80}, + lcMap{'\u0410', '\u042F', LowercaseAdd, 32}, + lcMap{'\u0460', '\u0480', LowercaseBor, 0}, + lcMap{'\u0490', '\u04BE', LowercaseBor, 0}, + lcMap{'\u04C1', '\u04C3', LowercaseBad, 0}, + lcMap{'\u04C7', '\u04C7', LowercaseSet, 0x04C8}, + lcMap{'\u04CB', '\u04CB', LowercaseSet, 0x04CC}, + lcMap{'\u04D0', '\u04EA', LowercaseBor, 0}, + lcMap{'\u04EE', '\u04F4', LowercaseBor, 0}, + lcMap{'\u04F8', '\u04F8', LowercaseSet, 0x04F9}, + lcMap{'\u0531', '\u0556', LowercaseAdd, 48}, + lcMap{'\u10A0', '\u10C5', LowercaseAdd, 48}, + lcMap{'\u1E00', '\u1EF8', LowercaseBor, 0}, + lcMap{'\u1F08', '\u1F0F', LowercaseAdd, -8}, + lcMap{'\u1F18', '\u1F1F', LowercaseAdd, -8}, + lcMap{'\u1F28', '\u1F2F', LowercaseAdd, -8}, + lcMap{'\u1F38', '\u1F3F', LowercaseAdd, -8}, + lcMap{'\u1F48', '\u1F4D', LowercaseAdd, -8}, + lcMap{'\u1F59', '\u1F59', LowercaseSet, 0x1F51}, + lcMap{'\u1F5B', '\u1F5B', LowercaseSet, 0x1F53}, + lcMap{'\u1F5D', '\u1F5D', LowercaseSet, 0x1F55}, + lcMap{'\u1F5F', '\u1F5F', LowercaseSet, 0x1F57}, + lcMap{'\u1F68', '\u1F6F', LowercaseAdd, -8}, + lcMap{'\u1F88', '\u1F8F', LowercaseAdd, -8}, + lcMap{'\u1F98', '\u1F9F', LowercaseAdd, -8}, + lcMap{'\u1FA8', '\u1FAF', LowercaseAdd, -8}, + lcMap{'\u1FB8', '\u1FB9', LowercaseAdd, -8}, + lcMap{'\u1FBA', '\u1FBB', LowercaseAdd, -74}, + lcMap{'\u1FBC', '\u1FBC', LowercaseSet, 0x1FB3}, + lcMap{'\u1FC8', '\u1FCB', LowercaseAdd, -86}, + lcMap{'\u1FCC', '\u1FCC', LowercaseSet, 0x1FC3}, + lcMap{'\u1FD8', '\u1FD9', LowercaseAdd, -8}, + lcMap{'\u1FDA', '\u1FDB', LowercaseAdd, -100}, + lcMap{'\u1FE8', '\u1FE9', LowercaseAdd, -8}, + lcMap{'\u1FEA', '\u1FEB', LowercaseAdd, -112}, + lcMap{'\u1FEC', '\u1FEC', LowercaseSet, 0x1FE5}, + lcMap{'\u1FF8', '\u1FF9', LowercaseAdd, -128}, + lcMap{'\u1FFA', '\u1FFB', LowercaseAdd, -126}, + lcMap{'\u1FFC', '\u1FFC', LowercaseSet, 0x1FF3}, + lcMap{'\u2160', '\u216F', LowercaseAdd, 16}, + lcMap{'\u24B6', '\u24D0', LowercaseAdd, 26}, + lcMap{'\uFF21', '\uFF3A', LowercaseAdd, 32}, +} + +func (c *CharSet) addLowercaseRange(chMin, chMax rune) { + var i, iMax, iMid int + var chMinT, chMaxT rune + var lc lcMap + + for i, iMax = 0, len(lcTable); i < iMax; { + iMid = (i + iMax) / 2 + if lcTable[iMid].chMax < chMin { + i = iMid + 1 + } else { + iMax = iMid + } + } + + for ; i < len(lcTable); i++ { + lc = lcTable[i] + if lc.chMin > chMax { + return + } + chMinT = lc.chMin + if chMinT < chMin { + chMinT = chMin + } + + chMaxT = lc.chMax + if chMaxT > chMax { + chMaxT = chMax + } + + switch lc.op { + case LowercaseSet: + chMinT = rune(lc.data) + chMaxT = rune(lc.data) + break + case LowercaseAdd: + chMinT += lc.data + chMaxT += lc.data + break + case LowercaseBor: + chMinT |= 1 + chMaxT |= 1 + break + case LowercaseBad: + chMinT += (chMinT & 1) + chMaxT += (chMaxT & 1) + break + } + + if chMinT < chMin || chMaxT > chMax { + c.addRange(chMinT, chMaxT) + } + } +} diff --git a/vendor/github.com/dlclark/regexp2/syntax/code.go b/vendor/github.com/dlclark/regexp2/syntax/code.go new file mode 100644 index 000000000..686e822af --- /dev/null +++ b/vendor/github.com/dlclark/regexp2/syntax/code.go @@ -0,0 +1,274 @@ +package syntax + +import ( + "bytes" + "fmt" + "math" +) + +// similar to prog.go in the go regex package...also with comment 'may not belong in this package' + +// File provides operator constants for use by the Builder and the Machine. + +// Implementation notes: +// +// Regexps are built into RegexCodes, which contain an operation array, +// a string table, and some constants. +// +// Each operation is one of the codes below, followed by the integer +// operands specified for each op. +// +// Strings and sets are indices into a string table. + +type InstOp int + +const ( + // lef/back operands description + + Onerep InstOp = 0 // lef,back char,min,max a {n} + Notonerep = 1 // lef,back char,min,max .{n} + Setrep = 2 // lef,back set,min,max [\d]{n} + + Oneloop = 3 // lef,back char,min,max a {,n} + Notoneloop = 4 // lef,back char,min,max .{,n} + Setloop = 5 // lef,back set,min,max [\d]{,n} + + Onelazy = 6 // lef,back char,min,max a {,n}? + Notonelazy = 7 // lef,back char,min,max .{,n}? + Setlazy = 8 // lef,back set,min,max [\d]{,n}? + + One = 9 // lef char a + Notone = 10 // lef char [^a] + Set = 11 // lef set [a-z\s] \w \s \d + + Multi = 12 // lef string abcd + Ref = 13 // lef group \# + + Bol = 14 // ^ + Eol = 15 // $ + Boundary = 16 // \b + Nonboundary = 17 // \B + Beginning = 18 // \A + Start = 19 // \G + EndZ = 20 // \Z + End = 21 // \Z + + Nothing = 22 // Reject! + + // Primitive control structures + + Lazybranch = 23 // back jump straight first + Branchmark = 24 // back jump branch first for loop + Lazybranchmark = 25 // back jump straight first for loop + Nullcount = 26 // back val set counter, null mark + Setcount = 27 // back val set counter, make mark + Branchcount = 28 // back jump,limit branch++ if zero<=c impl group slots + Capsize int // number of impl group slots + FcPrefix *Prefix // the set of candidate first characters (may be null) + BmPrefix *BmPrefix // the fixed prefix string as a Boyer-Moore machine (may be null) + Anchors AnchorLoc // the set of zero-length start anchors (RegexFCD.Bol, etc) + RightToLeft bool // true if right to left +} + +func opcodeBacktracks(op InstOp) bool { + op &= Mask + + switch op { + case Oneloop, Notoneloop, Setloop, Onelazy, Notonelazy, Setlazy, Lazybranch, Branchmark, Lazybranchmark, + Nullcount, Setcount, Branchcount, Lazybranchcount, Setmark, Capturemark, Getmark, Setjump, Backjump, + Forejump, Goto: + return true + + default: + return false + } +} + +func opcodeSize(op InstOp) int { + op &= Mask + + switch op { + case Nothing, Bol, Eol, Boundary, Nonboundary, ECMABoundary, NonECMABoundary, Beginning, Start, EndZ, + End, Nullmark, Setmark, Getmark, Setjump, Backjump, Forejump, Stop: + return 1 + + case One, Notone, Multi, Ref, Testref, Goto, Nullcount, Setcount, Lazybranch, Branchmark, Lazybranchmark, + Prune, Set: + return 2 + + case Capturemark, Branchcount, Lazybranchcount, Onerep, Notonerep, Oneloop, Notoneloop, Onelazy, Notonelazy, + Setlazy, Setrep, Setloop: + return 3 + + default: + panic(fmt.Errorf("Unexpected op code: %v", op)) + } +} + +var codeStr = []string{ + "Onerep", "Notonerep", "Setrep", + "Oneloop", "Notoneloop", "Setloop", + "Onelazy", "Notonelazy", "Setlazy", + "One", "Notone", "Set", + "Multi", "Ref", + "Bol", "Eol", "Boundary", "Nonboundary", "Beginning", "Start", "EndZ", "End", + "Nothing", + "Lazybranch", "Branchmark", "Lazybranchmark", + "Nullcount", "Setcount", "Branchcount", "Lazybranchcount", + "Nullmark", "Setmark", "Capturemark", "Getmark", + "Setjump", "Backjump", "Forejump", "Testref", "Goto", + "Prune", "Stop", + "ECMABoundary", "NonECMABoundary", +} + +func operatorDescription(op InstOp) string { + desc := codeStr[op&Mask] + if (op & Ci) != 0 { + desc += "-Ci" + } + if (op & Rtl) != 0 { + desc += "-Rtl" + } + if (op & Back) != 0 { + desc += "-Back" + } + if (op & Back2) != 0 { + desc += "-Back2" + } + + return desc +} + +// OpcodeDescription is a humman readable string of the specific offset +func (c *Code) OpcodeDescription(offset int) string { + buf := &bytes.Buffer{} + + op := InstOp(c.Codes[offset]) + fmt.Fprintf(buf, "%06d ", offset) + + if opcodeBacktracks(op & Mask) { + buf.WriteString("*") + } else { + buf.WriteString(" ") + } + buf.WriteString(operatorDescription(op)) + buf.WriteString("(") + op &= Mask + + switch op { + case One, Notone, Onerep, Notonerep, Oneloop, Notoneloop, Onelazy, Notonelazy: + buf.WriteString("Ch = ") + buf.WriteString(CharDescription(rune(c.Codes[offset+1]))) + + case Set, Setrep, Setloop, Setlazy: + buf.WriteString("Set = ") + buf.WriteString(c.Sets[c.Codes[offset+1]].String()) + + case Multi: + fmt.Fprintf(buf, "String = %s", string(c.Strings[c.Codes[offset+1]])) + + case Ref, Testref: + fmt.Fprintf(buf, "Index = %d", c.Codes[offset+1]) + + case Capturemark: + fmt.Fprintf(buf, "Index = %d", c.Codes[offset+1]) + if c.Codes[offset+2] != -1 { + fmt.Fprintf(buf, ", Unindex = %d", c.Codes[offset+2]) + } + + case Nullcount, Setcount: + fmt.Fprintf(buf, "Value = %d", c.Codes[offset+1]) + + case Goto, Lazybranch, Branchmark, Lazybranchmark, Branchcount, Lazybranchcount: + fmt.Fprintf(buf, "Addr = %d", c.Codes[offset+1]) + } + + switch op { + case Onerep, Notonerep, Oneloop, Notoneloop, Onelazy, Notonelazy, Setrep, Setloop, Setlazy: + buf.WriteString(", Rep = ") + if c.Codes[offset+2] == math.MaxInt32 { + buf.WriteString("inf") + } else { + fmt.Fprintf(buf, "%d", c.Codes[offset+2]) + } + + case Branchcount, Lazybranchcount: + buf.WriteString(", Limit = ") + if c.Codes[offset+2] == math.MaxInt32 { + buf.WriteString("inf") + } else { + fmt.Fprintf(buf, "%d", c.Codes[offset+2]) + } + + } + + buf.WriteString(")") + + return buf.String() +} + +func (c *Code) Dump() string { + buf := &bytes.Buffer{} + + if c.RightToLeft { + fmt.Fprintln(buf, "Direction: right-to-left") + } else { + fmt.Fprintln(buf, "Direction: left-to-right") + } + if c.FcPrefix == nil { + fmt.Fprintln(buf, "Firstchars: n/a") + } else { + fmt.Fprintf(buf, "Firstchars: %v\n", c.FcPrefix.PrefixSet.String()) + } + + if c.BmPrefix == nil { + fmt.Fprintln(buf, "Prefix: n/a") + } else { + fmt.Fprintf(buf, "Prefix: %v\n", Escape(c.BmPrefix.String())) + } + + fmt.Fprintf(buf, "Anchors: %v\n", c.Anchors) + fmt.Fprintln(buf) + + if c.BmPrefix != nil { + fmt.Fprintln(buf, "BoyerMoore:") + fmt.Fprintln(buf, c.BmPrefix.Dump(" ")) + } + for i := 0; i < len(c.Codes); i += opcodeSize(InstOp(c.Codes[i])) { + fmt.Fprintln(buf, c.OpcodeDescription(i)) + } + + return buf.String() +} diff --git a/vendor/github.com/dlclark/regexp2/syntax/escape.go b/vendor/github.com/dlclark/regexp2/syntax/escape.go new file mode 100644 index 000000000..609df1073 --- /dev/null +++ b/vendor/github.com/dlclark/regexp2/syntax/escape.go @@ -0,0 +1,94 @@ +package syntax + +import ( + "bytes" + "strconv" + "strings" + "unicode" +) + +func Escape(input string) string { + b := &bytes.Buffer{} + for _, r := range input { + escape(b, r, false) + } + return b.String() +} + +const meta = `\.+*?()|[]{}^$# ` + +func escape(b *bytes.Buffer, r rune, force bool) { + if unicode.IsPrint(r) { + if strings.IndexRune(meta, r) >= 0 || force { + b.WriteRune('\\') + } + b.WriteRune(r) + return + } + + switch r { + case '\a': + b.WriteString(`\a`) + case '\f': + b.WriteString(`\f`) + case '\n': + b.WriteString(`\n`) + case '\r': + b.WriteString(`\r`) + case '\t': + b.WriteString(`\t`) + case '\v': + b.WriteString(`\v`) + default: + if r < 0x100 { + b.WriteString(`\x`) + s := strconv.FormatInt(int64(r), 16) + if len(s) == 1 { + b.WriteRune('0') + } + b.WriteString(s) + break + } + b.WriteString(`\u`) + b.WriteString(strconv.FormatInt(int64(r), 16)) + } +} + +func Unescape(input string) (string, error) { + idx := strings.IndexRune(input, '\\') + // no slashes means no unescape needed + if idx == -1 { + return input, nil + } + + buf := bytes.NewBufferString(input[:idx]) + // get the runes for the rest of the string -- we're going full parser scan on this + + p := parser{} + p.setPattern(input[idx+1:]) + for { + if p.rightMost() { + return "", p.getErr(ErrIllegalEndEscape) + } + r, err := p.scanCharEscape() + if err != nil { + return "", err + } + buf.WriteRune(r) + // are we done? + if p.rightMost() { + return buf.String(), nil + } + + r = p.moveRightGetChar() + for r != '\\' { + buf.WriteRune(r) + if p.rightMost() { + // we're done, no more slashes + return buf.String(), nil + } + // keep scanning until we get another slash + r = p.moveRightGetChar() + } + } +} diff --git a/vendor/github.com/dlclark/regexp2/syntax/fuzz.go b/vendor/github.com/dlclark/regexp2/syntax/fuzz.go new file mode 100644 index 000000000..ee863866d --- /dev/null +++ b/vendor/github.com/dlclark/regexp2/syntax/fuzz.go @@ -0,0 +1,20 @@ +// +build gofuzz + +package syntax + +// Fuzz is the input point for go-fuzz +func Fuzz(data []byte) int { + sdata := string(data) + tree, err := Parse(sdata, RegexOptions(0)) + if err != nil { + return 0 + } + + // translate it to code + _, err = Write(tree) + if err != nil { + panic(err) + } + + return 1 +} diff --git a/vendor/github.com/dlclark/regexp2/syntax/parser.go b/vendor/github.com/dlclark/regexp2/syntax/parser.go new file mode 100644 index 000000000..da14f98e3 --- /dev/null +++ b/vendor/github.com/dlclark/regexp2/syntax/parser.go @@ -0,0 +1,2213 @@ +package syntax + +import ( + "fmt" + "math" + "os" + "sort" + "strconv" + "unicode" +) + +type RegexOptions int32 + +const ( + IgnoreCase RegexOptions = 0x0001 // "i" + Multiline = 0x0002 // "m" + ExplicitCapture = 0x0004 // "n" + Compiled = 0x0008 // "c" + Singleline = 0x0010 // "s" + IgnorePatternWhitespace = 0x0020 // "x" + RightToLeft = 0x0040 // "r" + Debug = 0x0080 // "d" + ECMAScript = 0x0100 // "e" + RE2 = 0x0200 // RE2 compat mode +) + +func optionFromCode(ch rune) RegexOptions { + // case-insensitive + switch ch { + case 'i', 'I': + return IgnoreCase + case 'r', 'R': + return RightToLeft + case 'm', 'M': + return Multiline + case 'n', 'N': + return ExplicitCapture + case 's', 'S': + return Singleline + case 'x', 'X': + return IgnorePatternWhitespace + case 'd', 'D': + return Debug + case 'e', 'E': + return ECMAScript + default: + return 0 + } +} + +// An Error describes a failure to parse a regular expression +// and gives the offending expression. +type Error struct { + Code ErrorCode + Expr string + Args []interface{} +} + +func (e *Error) Error() string { + if len(e.Args) == 0 { + return "error parsing regexp: " + e.Code.String() + " in `" + e.Expr + "`" + } + return "error parsing regexp: " + fmt.Sprintf(e.Code.String(), e.Args...) + " in `" + e.Expr + "`" +} + +// An ErrorCode describes a failure to parse a regular expression. +type ErrorCode string + +const ( + // internal issue + ErrInternalError ErrorCode = "regexp/syntax: internal error" + // Parser errors + ErrUnterminatedComment = "unterminated comment" + ErrInvalidCharRange = "invalid character class range" + ErrInvalidRepeatSize = "invalid repeat count" + ErrInvalidUTF8 = "invalid UTF-8" + ErrCaptureGroupOutOfRange = "capture group number out of range" + ErrUnexpectedParen = "unexpected )" + ErrMissingParen = "missing closing )" + ErrMissingBrace = "missing closing }" + ErrInvalidRepeatOp = "invalid nested repetition operator" + ErrMissingRepeatArgument = "missing argument to repetition operator" + ErrConditionalExpression = "illegal conditional (?(...)) expression" + ErrTooManyAlternates = "too many | in (?()|)" + ErrUnrecognizedGrouping = "unrecognized grouping construct: (%v" + ErrInvalidGroupName = "invalid group name: group names must begin with a word character and have a matching terminator" + ErrCapNumNotZero = "capture number cannot be zero" + ErrUndefinedBackRef = "reference to undefined group number %v" + ErrUndefinedNameRef = "reference to undefined group name %v" + ErrAlternationCantCapture = "alternation conditions do not capture and cannot be named" + ErrAlternationCantHaveComment = "alternation conditions cannot be comments" + ErrMalformedReference = "(?(%v) ) malformed" + ErrUndefinedReference = "(?(%v) ) reference to undefined group" + ErrIllegalEndEscape = "illegal \\ at end of pattern" + ErrMalformedSlashP = "malformed \\p{X} character escape" + ErrIncompleteSlashP = "incomplete \\p{X} character escape" + ErrUnknownSlashP = "unknown unicode category, script, or property '%v'" + ErrUnrecognizedEscape = "unrecognized escape sequence \\%v" + ErrMissingControl = "missing control character" + ErrUnrecognizedControl = "unrecognized control character" + ErrTooFewHex = "insufficient hexadecimal digits" + ErrInvalidHex = "hex values may not be larger than 0x10FFFF" + ErrMalformedNameRef = "malformed \\k<...> named back reference" + ErrBadClassInCharRange = "cannot include class \\%v in character range" + ErrUnterminatedBracket = "unterminated [] set" + ErrSubtractionMustBeLast = "a subtraction must be the last element in a character class" + ErrReversedCharRange = "[x-y] range in reverse order" +) + +func (e ErrorCode) String() string { + return string(e) +} + +type parser struct { + stack *regexNode + group *regexNode + alternation *regexNode + concatenation *regexNode + unit *regexNode + + patternRaw string + pattern []rune + + currentPos int + specialCase *unicode.SpecialCase + + autocap int + capcount int + captop int + capsize int + + caps map[int]int + capnames map[string]int + + capnumlist []int + capnamelist []string + + options RegexOptions + optionsStack []RegexOptions + ignoreNextParen bool +} + +const ( + maxValueDiv10 int = math.MaxInt32 / 10 + maxValueMod10 = math.MaxInt32 % 10 +) + +// Parse converts a regex string into a parse tree +func Parse(re string, op RegexOptions) (*RegexTree, error) { + p := parser{ + options: op, + caps: make(map[int]int), + } + p.setPattern(re) + + if err := p.countCaptures(); err != nil { + return nil, err + } + + p.reset(op) + root, err := p.scanRegex() + + if err != nil { + return nil, err + } + tree := &RegexTree{ + root: root, + caps: p.caps, + capnumlist: p.capnumlist, + captop: p.captop, + Capnames: p.capnames, + Caplist: p.capnamelist, + options: op, + } + + if tree.options&Debug > 0 { + os.Stdout.WriteString(tree.Dump()) + } + + return tree, nil +} + +func (p *parser) setPattern(pattern string) { + p.patternRaw = pattern + p.pattern = make([]rune, 0, len(pattern)) + + //populate our rune array to handle utf8 encoding + for _, r := range pattern { + p.pattern = append(p.pattern, r) + } +} +func (p *parser) getErr(code ErrorCode, args ...interface{}) error { + return &Error{Code: code, Expr: p.patternRaw, Args: args} +} + +func (p *parser) noteCaptureSlot(i, pos int) { + if _, ok := p.caps[i]; !ok { + // the rhs of the hashtable isn't used in the parser + p.caps[i] = pos + p.capcount++ + + if p.captop <= i { + if i == math.MaxInt32 { + p.captop = i + } else { + p.captop = i + 1 + } + } + } +} + +func (p *parser) noteCaptureName(name string, pos int) { + if p.capnames == nil { + p.capnames = make(map[string]int) + } + + if _, ok := p.capnames[name]; !ok { + p.capnames[name] = pos + p.capnamelist = append(p.capnamelist, name) + } +} + +func (p *parser) assignNameSlots() { + if p.capnames != nil { + for _, name := range p.capnamelist { + for p.isCaptureSlot(p.autocap) { + p.autocap++ + } + pos := p.capnames[name] + p.capnames[name] = p.autocap + p.noteCaptureSlot(p.autocap, pos) + + p.autocap++ + } + } + + // if the caps array has at least one gap, construct the list of used slots + if p.capcount < p.captop { + p.capnumlist = make([]int, p.capcount) + i := 0 + + for k := range p.caps { + p.capnumlist[i] = k + i++ + } + + sort.Ints(p.capnumlist) + } + + // merge capsnumlist into capnamelist + if p.capnames != nil || p.capnumlist != nil { + var oldcapnamelist []string + var next int + var k int + + if p.capnames == nil { + oldcapnamelist = nil + p.capnames = make(map[string]int) + p.capnamelist = []string{} + next = -1 + } else { + oldcapnamelist = p.capnamelist + p.capnamelist = []string{} + next = p.capnames[oldcapnamelist[0]] + } + + for i := 0; i < p.capcount; i++ { + j := i + if p.capnumlist != nil { + j = p.capnumlist[i] + } + + if next == j { + p.capnamelist = append(p.capnamelist, oldcapnamelist[k]) + k++ + + if k == len(oldcapnamelist) { + next = -1 + } else { + next = p.capnames[oldcapnamelist[k]] + } + + } else { + //feature: culture? + str := strconv.Itoa(j) + p.capnamelist = append(p.capnamelist, str) + p.capnames[str] = j + } + } + } +} + +func (p *parser) consumeAutocap() int { + r := p.autocap + p.autocap++ + return r +} + +// CountCaptures is a prescanner for deducing the slots used for +// captures by doing a partial tokenization of the pattern. +func (p *parser) countCaptures() error { + var ch rune + + p.noteCaptureSlot(0, 0) + + p.autocap = 1 + + for p.charsRight() > 0 { + pos := p.textpos() + ch = p.moveRightGetChar() + switch ch { + case '\\': + if p.charsRight() > 0 { + p.scanBackslash(true) + } + + case '#': + if p.useOptionX() { + p.moveLeft() + p.scanBlank() + } + + case '[': + p.scanCharSet(false, true) + + case ')': + if !p.emptyOptionsStack() { + p.popOptions() + } + + case '(': + if p.charsRight() >= 2 && p.rightChar(1) == '#' && p.rightChar(0) == '?' { + p.moveLeft() + p.scanBlank() + } else { + p.pushOptions() + if p.charsRight() > 0 && p.rightChar(0) == '?' { + // we have (?... + p.moveRight(1) + + if p.charsRight() > 1 && (p.rightChar(0) == '<' || p.rightChar(0) == '\'') { + // named group: (?<... or (?'... + + p.moveRight(1) + ch = p.rightChar(0) + + if ch != '0' && IsWordChar(ch) { + if ch >= '1' && ch <= '9' { + dec, err := p.scanDecimal() + if err != nil { + return err + } + p.noteCaptureSlot(dec, pos) + } else { + p.noteCaptureName(p.scanCapname(), pos) + } + } + } else if p.useRE2() && p.charsRight() > 2 && (p.rightChar(0) == 'P' && p.rightChar(1) == '<') { + // RE2-compat (?P<) + p.moveRight(2) + ch = p.rightChar(0) + if IsWordChar(ch) { + p.noteCaptureName(p.scanCapname(), pos) + } + + } else { + // (?... + + // get the options if it's an option construct (?cimsx-cimsx...) + p.scanOptions() + + if p.charsRight() > 0 { + if p.rightChar(0) == ')' { + // (?cimsx-cimsx) + p.moveRight(1) + p.popKeepOptions() + } else if p.rightChar(0) == '(' { + // alternation construct: (?(foo)yes|no) + // ignore the next paren so we don't capture the condition + p.ignoreNextParen = true + + // break from here so we don't reset ignoreNextParen + continue + } + } + } + } else { + if !p.useOptionN() && !p.ignoreNextParen { + p.noteCaptureSlot(p.consumeAutocap(), pos) + } + } + } + + p.ignoreNextParen = false + + } + } + + p.assignNameSlots() + return nil +} + +func (p *parser) reset(topopts RegexOptions) { + p.currentPos = 0 + p.autocap = 1 + p.ignoreNextParen = false + + if len(p.optionsStack) > 0 { + p.optionsStack = p.optionsStack[:0] + } + + p.options = topopts + p.stack = nil +} + +func (p *parser) scanRegex() (*regexNode, error) { + ch := '@' // nonspecial ch, means at beginning + isQuant := false + + p.startGroup(newRegexNodeMN(ntCapture, p.options, 0, -1)) + + for p.charsRight() > 0 { + wasPrevQuantifier := isQuant + isQuant = false + + if err := p.scanBlank(); err != nil { + return nil, err + } + + startpos := p.textpos() + + // move past all of the normal characters. We'll stop when we hit some kind of control character, + // or if IgnorePatternWhiteSpace is on, we'll stop when we see some whitespace. + if p.useOptionX() { + for p.charsRight() > 0 { + ch = p.rightChar(0) + //UGLY: clean up, this is ugly + if !(!isStopperX(ch) || (ch == '{' && !p.isTrueQuantifier())) { + break + } + p.moveRight(1) + } + } else { + for p.charsRight() > 0 { + ch = p.rightChar(0) + if !(!isSpecial(ch) || ch == '{' && !p.isTrueQuantifier()) { + break + } + p.moveRight(1) + } + } + + endpos := p.textpos() + + p.scanBlank() + + if p.charsRight() == 0 { + ch = '!' // nonspecial, means at end + } else if ch = p.rightChar(0); isSpecial(ch) { + isQuant = isQuantifier(ch) + p.moveRight(1) + } else { + ch = ' ' // nonspecial, means at ordinary char + } + + if startpos < endpos { + cchUnquantified := endpos - startpos + if isQuant { + cchUnquantified-- + } + wasPrevQuantifier = false + + if cchUnquantified > 0 { + p.addToConcatenate(startpos, cchUnquantified, false) + } + + if isQuant { + p.addUnitOne(p.charAt(endpos - 1)) + } + } + + switch ch { + case '!': + goto BreakOuterScan + + case ' ': + goto ContinueOuterScan + + case '[': + cc, err := p.scanCharSet(p.useOptionI(), false) + if err != nil { + return nil, err + } + p.addUnitSet(cc) + + case '(': + p.pushOptions() + + if grouper, err := p.scanGroupOpen(); err != nil { + return nil, err + } else if grouper == nil { + p.popKeepOptions() + } else { + p.pushGroup() + p.startGroup(grouper) + } + + continue + + case '|': + p.addAlternate() + goto ContinueOuterScan + + case ')': + if p.emptyStack() { + return nil, p.getErr(ErrUnexpectedParen) + } + + if err := p.addGroup(); err != nil { + return nil, err + } + if err := p.popGroup(); err != nil { + return nil, err + } + p.popOptions() + + if p.unit == nil { + goto ContinueOuterScan + } + + case '\\': + n, err := p.scanBackslash(false) + if err != nil { + return nil, err + } + p.addUnitNode(n) + + case '^': + if p.useOptionM() { + p.addUnitType(ntBol) + } else { + p.addUnitType(ntBeginning) + } + + case '$': + if p.useOptionM() { + p.addUnitType(ntEol) + } else { + p.addUnitType(ntEndZ) + } + + case '.': + if p.useOptionE() { + p.addUnitSet(ECMAAnyClass()) + } else if p.useOptionS() { + p.addUnitSet(AnyClass()) + } else { + p.addUnitNotone('\n') + } + + case '{', '*', '+', '?': + if p.unit == nil { + if wasPrevQuantifier { + return nil, p.getErr(ErrInvalidRepeatOp) + } else { + return nil, p.getErr(ErrMissingRepeatArgument) + } + } + p.moveLeft() + + default: + return nil, p.getErr(ErrInternalError) + } + + if err := p.scanBlank(); err != nil { + return nil, err + } + + if p.charsRight() > 0 { + isQuant = p.isTrueQuantifier() + } + if p.charsRight() == 0 || !isQuant { + //maintain odd C# assignment order -- not sure if required, could clean up? + p.addConcatenate() + goto ContinueOuterScan + } + + ch = p.moveRightGetChar() + + // Handle quantifiers + for p.unit != nil { + var min, max int + var lazy bool + + switch ch { + case '*': + min = 0 + max = math.MaxInt32 + + case '?': + min = 0 + max = 1 + + case '+': + min = 1 + max = math.MaxInt32 + + case '{': + { + var err error + startpos = p.textpos() + if min, err = p.scanDecimal(); err != nil { + return nil, err + } + max = min + if startpos < p.textpos() { + if p.charsRight() > 0 && p.rightChar(0) == ',' { + p.moveRight(1) + if p.charsRight() == 0 || p.rightChar(0) == '}' { + max = math.MaxInt32 + } else { + if max, err = p.scanDecimal(); err != nil { + return nil, err + } + } + } + } + + if startpos == p.textpos() || p.charsRight() == 0 || p.moveRightGetChar() != '}' { + p.addConcatenate() + p.textto(startpos - 1) + goto ContinueOuterScan + } + } + + default: + return nil, p.getErr(ErrInternalError) + } + + if err := p.scanBlank(); err != nil { + return nil, err + } + + if p.charsRight() == 0 || p.rightChar(0) != '?' { + lazy = false + } else { + p.moveRight(1) + lazy = true + } + + if min > max { + return nil, p.getErr(ErrInvalidRepeatSize) + } + + p.addConcatenate3(lazy, min, max) + } + + ContinueOuterScan: + } + +BreakOuterScan: + ; + + if !p.emptyStack() { + return nil, p.getErr(ErrMissingParen) + } + + if err := p.addGroup(); err != nil { + return nil, err + } + + return p.unit, nil + +} + +/* + * Simple parsing for replacement patterns + */ +func (p *parser) scanReplacement() (*regexNode, error) { + var c, startpos int + + p.concatenation = newRegexNode(ntConcatenate, p.options) + + for { + c = p.charsRight() + if c == 0 { + break + } + + startpos = p.textpos() + + for c > 0 && p.rightChar(0) != '$' { + p.moveRight(1) + c-- + } + + p.addToConcatenate(startpos, p.textpos()-startpos, true) + + if c > 0 { + if p.moveRightGetChar() == '$' { + n, err := p.scanDollar() + if err != nil { + return nil, err + } + p.addUnitNode(n) + } + p.addConcatenate() + } + } + + return p.concatenation, nil +} + +/* + * Scans $ patterns recognized within replacement patterns + */ +func (p *parser) scanDollar() (*regexNode, error) { + if p.charsRight() == 0 { + return newRegexNodeCh(ntOne, p.options, '$'), nil + } + + ch := p.rightChar(0) + angled := false + backpos := p.textpos() + lastEndPos := backpos + + // Note angle + + if ch == '{' && p.charsRight() > 1 { + angled = true + p.moveRight(1) + ch = p.rightChar(0) + } + + // Try to parse backreference: \1 or \{1} or \{cap} + + if ch >= '0' && ch <= '9' { + if !angled && p.useOptionE() { + capnum := -1 + newcapnum := int(ch - '0') + p.moveRight(1) + if p.isCaptureSlot(newcapnum) { + capnum = newcapnum + lastEndPos = p.textpos() + } + + for p.charsRight() > 0 { + ch = p.rightChar(0) + if ch < '0' || ch > '9' { + break + } + digit := int(ch - '0') + if newcapnum > maxValueDiv10 || (newcapnum == maxValueDiv10 && digit > maxValueMod10) { + return nil, p.getErr(ErrCaptureGroupOutOfRange) + } + + newcapnum = newcapnum*10 + digit + + p.moveRight(1) + if p.isCaptureSlot(newcapnum) { + capnum = newcapnum + lastEndPos = p.textpos() + } + } + p.textto(lastEndPos) + if capnum >= 0 { + return newRegexNodeM(ntRef, p.options, capnum), nil + } + } else { + capnum, err := p.scanDecimal() + if err != nil { + return nil, err + } + if !angled || p.charsRight() > 0 && p.moveRightGetChar() == '}' { + if p.isCaptureSlot(capnum) { + return newRegexNodeM(ntRef, p.options, capnum), nil + } + } + } + } else if angled && IsWordChar(ch) { + capname := p.scanCapname() + + if p.charsRight() > 0 && p.moveRightGetChar() == '}' { + if p.isCaptureName(capname) { + return newRegexNodeM(ntRef, p.options, p.captureSlotFromName(capname)), nil + } + } + } else if !angled { + capnum := 1 + + switch ch { + case '$': + p.moveRight(1) + return newRegexNodeCh(ntOne, p.options, '$'), nil + case '&': + capnum = 0 + case '`': + capnum = replaceLeftPortion + case '\'': + capnum = replaceRightPortion + case '+': + capnum = replaceLastGroup + case '_': + capnum = replaceWholeString + } + + if capnum != 1 { + p.moveRight(1) + return newRegexNodeM(ntRef, p.options, capnum), nil + } + } + + // unrecognized $: literalize + + p.textto(backpos) + return newRegexNodeCh(ntOne, p.options, '$'), nil +} + +// scanGroupOpen scans chars following a '(' (not counting the '('), and returns +// a RegexNode for the type of group scanned, or nil if the group +// simply changed options (?cimsx-cimsx) or was a comment (#...). +func (p *parser) scanGroupOpen() (*regexNode, error) { + var ch rune + var nt nodeType + var err error + close := '>' + start := p.textpos() + + // just return a RegexNode if we have: + // 1. "(" followed by nothing + // 2. "(x" where x != ? + // 3. "(?)" + if p.charsRight() == 0 || p.rightChar(0) != '?' || (p.rightChar(0) == '?' && (p.charsRight() > 1 && p.rightChar(1) == ')')) { + if p.useOptionN() || p.ignoreNextParen { + p.ignoreNextParen = false + return newRegexNode(ntGroup, p.options), nil + } + return newRegexNodeMN(ntCapture, p.options, p.consumeAutocap(), -1), nil + } + + p.moveRight(1) + + for { + if p.charsRight() == 0 { + break + } + + switch ch = p.moveRightGetChar(); ch { + case ':': + nt = ntGroup + + case '=': + p.options &= ^RightToLeft + nt = ntRequire + + case '!': + p.options &= ^RightToLeft + nt = ntPrevent + + case '>': + nt = ntGreedy + + case '\'': + close = '\'' + fallthrough + + case '<': + if p.charsRight() == 0 { + goto BreakRecognize + } + + switch ch = p.moveRightGetChar(); ch { + case '=': + if close == '\'' { + goto BreakRecognize + } + + p.options |= RightToLeft + nt = ntRequire + + case '!': + if close == '\'' { + goto BreakRecognize + } + + p.options |= RightToLeft + nt = ntPrevent + + default: + p.moveLeft() + capnum := -1 + uncapnum := -1 + proceed := false + + // grab part before - + + if ch >= '0' && ch <= '9' { + if capnum, err = p.scanDecimal(); err != nil { + return nil, err + } + + if !p.isCaptureSlot(capnum) { + capnum = -1 + } + + // check if we have bogus characters after the number + if p.charsRight() > 0 && !(p.rightChar(0) == close || p.rightChar(0) == '-') { + return nil, p.getErr(ErrInvalidGroupName) + } + if capnum == 0 { + return nil, p.getErr(ErrCapNumNotZero) + } + } else if IsWordChar(ch) { + capname := p.scanCapname() + + if p.isCaptureName(capname) { + capnum = p.captureSlotFromName(capname) + } + + // check if we have bogus character after the name + if p.charsRight() > 0 && !(p.rightChar(0) == close || p.rightChar(0) == '-') { + return nil, p.getErr(ErrInvalidGroupName) + } + } else if ch == '-' { + proceed = true + } else { + // bad group name - starts with something other than a word character and isn't a number + return nil, p.getErr(ErrInvalidGroupName) + } + + // grab part after - if any + + if (capnum != -1 || proceed == true) && p.charsRight() > 0 && p.rightChar(0) == '-' { + p.moveRight(1) + + //no more chars left, no closing char, etc + if p.charsRight() == 0 { + return nil, p.getErr(ErrInvalidGroupName) + } + + ch = p.rightChar(0) + if ch >= '0' && ch <= '9' { + if uncapnum, err = p.scanDecimal(); err != nil { + return nil, err + } + + if !p.isCaptureSlot(uncapnum) { + return nil, p.getErr(ErrUndefinedBackRef, uncapnum) + } + + // check if we have bogus characters after the number + if p.charsRight() > 0 && p.rightChar(0) != close { + return nil, p.getErr(ErrInvalidGroupName) + } + } else if IsWordChar(ch) { + uncapname := p.scanCapname() + + if !p.isCaptureName(uncapname) { + return nil, p.getErr(ErrUndefinedNameRef, uncapname) + } + uncapnum = p.captureSlotFromName(uncapname) + + // check if we have bogus character after the name + if p.charsRight() > 0 && p.rightChar(0) != close { + return nil, p.getErr(ErrInvalidGroupName) + } + } else { + // bad group name - starts with something other than a word character and isn't a number + return nil, p.getErr(ErrInvalidGroupName) + } + } + + // actually make the node + + if (capnum != -1 || uncapnum != -1) && p.charsRight() > 0 && p.moveRightGetChar() == close { + return newRegexNodeMN(ntCapture, p.options, capnum, uncapnum), nil + } + goto BreakRecognize + } + + case '(': + // alternation construct (?(...) | ) + + parenPos := p.textpos() + if p.charsRight() > 0 { + ch = p.rightChar(0) + + // check if the alternation condition is a backref + if ch >= '0' && ch <= '9' { + var capnum int + if capnum, err = p.scanDecimal(); err != nil { + return nil, err + } + if p.charsRight() > 0 && p.moveRightGetChar() == ')' { + if p.isCaptureSlot(capnum) { + return newRegexNodeM(ntTestref, p.options, capnum), nil + } + return nil, p.getErr(ErrUndefinedReference, capnum) + } + + return nil, p.getErr(ErrMalformedReference, capnum) + + } else if IsWordChar(ch) { + capname := p.scanCapname() + + if p.isCaptureName(capname) && p.charsRight() > 0 && p.moveRightGetChar() == ')' { + return newRegexNodeM(ntTestref, p.options, p.captureSlotFromName(capname)), nil + } + } + } + // not a backref + nt = ntTestgroup + p.textto(parenPos - 1) // jump to the start of the parentheses + p.ignoreNextParen = true // but make sure we don't try to capture the insides + + charsRight := p.charsRight() + if charsRight >= 3 && p.rightChar(1) == '?' { + rightchar2 := p.rightChar(2) + // disallow comments in the condition + if rightchar2 == '#' { + return nil, p.getErr(ErrAlternationCantHaveComment) + } + + // disallow named capture group (?<..>..) in the condition + if rightchar2 == '\'' { + return nil, p.getErr(ErrAlternationCantCapture) + } + + if charsRight >= 4 && (rightchar2 == '<' && p.rightChar(3) != '!' && p.rightChar(3) != '=') { + return nil, p.getErr(ErrAlternationCantCapture) + } + } + + case 'P': + if p.useRE2() { + // support for P syntax + if p.charsRight() < 3 { + goto BreakRecognize + } + + ch = p.moveRightGetChar() + if ch != '<' { + goto BreakRecognize + } + + ch = p.moveRightGetChar() + p.moveLeft() + + if IsWordChar(ch) { + capnum := -1 + capname := p.scanCapname() + + if p.isCaptureName(capname) { + capnum = p.captureSlotFromName(capname) + } + + // check if we have bogus character after the name + if p.charsRight() > 0 && p.rightChar(0) != '>' { + return nil, p.getErr(ErrInvalidGroupName) + } + + // actually make the node + + if capnum != -1 && p.charsRight() > 0 && p.moveRightGetChar() == '>' { + return newRegexNodeMN(ntCapture, p.options, capnum, -1), nil + } + goto BreakRecognize + + } else { + // bad group name - starts with something other than a word character and isn't a number + return nil, p.getErr(ErrInvalidGroupName) + } + } + // if we're not using RE2 compat mode then + // we just behave like normal + fallthrough + + default: + p.moveLeft() + + nt = ntGroup + // disallow options in the children of a testgroup node + if p.group.t != ntTestgroup { + p.scanOptions() + } + if p.charsRight() == 0 { + goto BreakRecognize + } + + if ch = p.moveRightGetChar(); ch == ')' { + return nil, nil + } + + if ch != ':' { + goto BreakRecognize + } + + } + + return newRegexNode(nt, p.options), nil + } + +BreakRecognize: + + // break Recognize comes here + + return nil, p.getErr(ErrUnrecognizedGrouping, string(p.pattern[start:p.textpos()])) +} + +// scans backslash specials and basics +func (p *parser) scanBackslash(scanOnly bool) (*regexNode, error) { + + if p.charsRight() == 0 { + return nil, p.getErr(ErrIllegalEndEscape) + } + + switch ch := p.rightChar(0); ch { + case 'b', 'B', 'A', 'G', 'Z', 'z': + p.moveRight(1) + return newRegexNode(p.typeFromCode(ch), p.options), nil + + case 'w': + p.moveRight(1) + if p.useOptionE() { + return newRegexNodeSet(ntSet, p.options, ECMAWordClass()), nil + } + return newRegexNodeSet(ntSet, p.options, WordClass()), nil + + case 'W': + p.moveRight(1) + if p.useOptionE() { + return newRegexNodeSet(ntSet, p.options, NotECMAWordClass()), nil + } + return newRegexNodeSet(ntSet, p.options, NotWordClass()), nil + + case 's': + p.moveRight(1) + if p.useOptionE() { + return newRegexNodeSet(ntSet, p.options, ECMASpaceClass()), nil + } + return newRegexNodeSet(ntSet, p.options, SpaceClass()), nil + + case 'S': + p.moveRight(1) + if p.useOptionE() { + return newRegexNodeSet(ntSet, p.options, NotECMASpaceClass()), nil + } + return newRegexNodeSet(ntSet, p.options, NotSpaceClass()), nil + + case 'd': + p.moveRight(1) + if p.useOptionE() { + return newRegexNodeSet(ntSet, p.options, ECMADigitClass()), nil + } + return newRegexNodeSet(ntSet, p.options, DigitClass()), nil + + case 'D': + p.moveRight(1) + if p.useOptionE() { + return newRegexNodeSet(ntSet, p.options, NotECMADigitClass()), nil + } + return newRegexNodeSet(ntSet, p.options, NotDigitClass()), nil + + case 'p', 'P': + p.moveRight(1) + prop, err := p.parseProperty() + if err != nil { + return nil, err + } + cc := &CharSet{} + cc.addCategory(prop, (ch != 'p'), p.useOptionI(), p.patternRaw) + if p.useOptionI() { + cc.addLowercase() + } + + return newRegexNodeSet(ntSet, p.options, cc), nil + + default: + return p.scanBasicBackslash(scanOnly) + } +} + +// Scans \-style backreferences and character escapes +func (p *parser) scanBasicBackslash(scanOnly bool) (*regexNode, error) { + if p.charsRight() == 0 { + return nil, p.getErr(ErrIllegalEndEscape) + } + angled := false + close := '\x00' + + backpos := p.textpos() + ch := p.rightChar(0) + + // allow \k instead of \, which is now deprecated + + if ch == 'k' { + if p.charsRight() >= 2 { + p.moveRight(1) + ch = p.moveRightGetChar() + + if ch == '<' || ch == '\'' { + angled = true + if ch == '\'' { + close = '\'' + } else { + close = '>' + } + } + } + + if !angled || p.charsRight() <= 0 { + return nil, p.getErr(ErrMalformedNameRef) + } + + ch = p.rightChar(0) + + } else if (ch == '<' || ch == '\'') && p.charsRight() > 1 { // Note angle without \g + angled = true + if ch == '\'' { + close = '\'' + } else { + close = '>' + } + + p.moveRight(1) + ch = p.rightChar(0) + } + + // Try to parse backreference: \<1> or \ + + if angled && ch >= '0' && ch <= '9' { + capnum, err := p.scanDecimal() + if err != nil { + return nil, err + } + + if p.charsRight() > 0 && p.moveRightGetChar() == close { + if p.isCaptureSlot(capnum) { + return newRegexNodeM(ntRef, p.options, capnum), nil + } + return nil, p.getErr(ErrUndefinedBackRef, capnum) + } + } else if !angled && ch >= '1' && ch <= '9' { // Try to parse backreference or octal: \1 + capnum, err := p.scanDecimal() + if err != nil { + return nil, err + } + + if scanOnly { + return nil, nil + } + + if p.isCaptureSlot(capnum) { + return newRegexNodeM(ntRef, p.options, capnum), nil + } + if capnum <= 9 && !p.useOptionE() { + return nil, p.getErr(ErrUndefinedBackRef, capnum) + } + + } else if angled && IsWordChar(ch) { + capname := p.scanCapname() + + if p.charsRight() > 0 && p.moveRightGetChar() == close { + if p.isCaptureName(capname) { + return newRegexNodeM(ntRef, p.options, p.captureSlotFromName(capname)), nil + } + return nil, p.getErr(ErrUndefinedNameRef, capname) + } + } + + // Not backreference: must be char code + + p.textto(backpos) + ch, err := p.scanCharEscape() + if err != nil { + return nil, err + } + + if p.useOptionI() { + ch = unicode.ToLower(ch) + } + + return newRegexNodeCh(ntOne, p.options, ch), nil +} + +// Scans X for \p{X} or \P{X} +func (p *parser) parseProperty() (string, error) { + if p.charsRight() < 3 { + return "", p.getErr(ErrIncompleteSlashP) + } + ch := p.moveRightGetChar() + if ch != '{' { + return "", p.getErr(ErrMalformedSlashP) + } + + startpos := p.textpos() + for p.charsRight() > 0 { + ch = p.moveRightGetChar() + if !(IsWordChar(ch) || ch == '-') { + p.moveLeft() + break + } + } + capname := string(p.pattern[startpos:p.textpos()]) + + if p.charsRight() == 0 || p.moveRightGetChar() != '}' { + return "", p.getErr(ErrIncompleteSlashP) + } + + if !isValidUnicodeCat(capname) { + return "", p.getErr(ErrUnknownSlashP, capname) + } + + return capname, nil +} + +// Returns ReNode type for zero-length assertions with a \ code. +func (p *parser) typeFromCode(ch rune) nodeType { + switch ch { + case 'b': + if p.useOptionE() { + return ntECMABoundary + } + return ntBoundary + case 'B': + if p.useOptionE() { + return ntNonECMABoundary + } + return ntNonboundary + case 'A': + return ntBeginning + case 'G': + return ntStart + case 'Z': + return ntEndZ + case 'z': + return ntEnd + default: + return ntNothing + } +} + +// Scans whitespace or x-mode comments. +func (p *parser) scanBlank() error { + if p.useOptionX() { + for { + for p.charsRight() > 0 && isSpace(p.rightChar(0)) { + p.moveRight(1) + } + + if p.charsRight() == 0 { + break + } + + if p.rightChar(0) == '#' { + for p.charsRight() > 0 && p.rightChar(0) != '\n' { + p.moveRight(1) + } + } else if p.charsRight() >= 3 && p.rightChar(2) == '#' && + p.rightChar(1) == '?' && p.rightChar(0) == '(' { + for p.charsRight() > 0 && p.rightChar(0) != ')' { + p.moveRight(1) + } + if p.charsRight() == 0 { + return p.getErr(ErrUnterminatedComment) + } + p.moveRight(1) + } else { + break + } + } + } else { + for { + if p.charsRight() < 3 || p.rightChar(2) != '#' || + p.rightChar(1) != '?' || p.rightChar(0) != '(' { + return nil + } + + for p.charsRight() > 0 && p.rightChar(0) != ')' { + p.moveRight(1) + } + if p.charsRight() == 0 { + return p.getErr(ErrUnterminatedComment) + } + p.moveRight(1) + } + } + return nil +} + +func (p *parser) scanCapname() string { + startpos := p.textpos() + + for p.charsRight() > 0 { + if !IsWordChar(p.moveRightGetChar()) { + p.moveLeft() + break + } + } + + return string(p.pattern[startpos:p.textpos()]) +} + +//Scans contents of [] (not including []'s), and converts to a set. +func (p *parser) scanCharSet(caseInsensitive, scanOnly bool) (*CharSet, error) { + ch := '\x00' + chPrev := '\x00' + inRange := false + firstChar := true + closed := false + + var cc *CharSet + if !scanOnly { + cc = &CharSet{} + } + + if p.charsRight() > 0 && p.rightChar(0) == '^' { + p.moveRight(1) + if !scanOnly { + cc.negate = true + } + } + + for ; p.charsRight() > 0; firstChar = false { + fTranslatedChar := false + ch = p.moveRightGetChar() + if ch == ']' { + if !firstChar { + closed = true + break + } else if p.useOptionE() { + if !scanOnly { + cc.addRanges(NoneClass().ranges) + } + closed = true + break + } + + } else if ch == '\\' && p.charsRight() > 0 { + switch ch = p.moveRightGetChar(); ch { + case 'D', 'd': + if !scanOnly { + if inRange { + return nil, p.getErr(ErrBadClassInCharRange, ch) + } + cc.addDigit(p.useOptionE(), ch == 'D', p.patternRaw) + } + continue + + case 'S', 's': + if !scanOnly { + if inRange { + return nil, p.getErr(ErrBadClassInCharRange, ch) + } + cc.addSpace(p.useOptionE(), ch == 'S') + } + continue + + case 'W', 'w': + if !scanOnly { + if inRange { + return nil, p.getErr(ErrBadClassInCharRange, ch) + } + + cc.addWord(p.useOptionE(), ch == 'W') + } + continue + + case 'p', 'P': + if !scanOnly { + if inRange { + return nil, p.getErr(ErrBadClassInCharRange, ch) + } + prop, err := p.parseProperty() + if err != nil { + return nil, err + } + cc.addCategory(prop, (ch != 'p'), caseInsensitive, p.patternRaw) + } else { + p.parseProperty() + } + + continue + + case '-': + if !scanOnly { + cc.addRange(ch, ch) + } + continue + + default: + p.moveLeft() + var err error + ch, err = p.scanCharEscape() // non-literal character + if err != nil { + return nil, err + } + fTranslatedChar = true + break // this break will only break out of the switch + } + } else if ch == '[' { + // This is code for Posix style properties - [:Ll:] or [:IsTibetan:]. + // It currently doesn't do anything other than skip the whole thing! + if p.charsRight() > 0 && p.rightChar(0) == ':' && !inRange { + savePos := p.textpos() + + p.moveRight(1) + negate := false + if p.charsRight() > 1 && p.rightChar(0) == '^' { + negate = true + p.moveRight(1) + } + + nm := p.scanCapname() // snag the name + if !scanOnly && p.useRE2() { + // look up the name since these are valid for RE2 + // add the group based on the name + if ok := cc.addNamedASCII(nm, negate); !ok { + return nil, p.getErr(ErrInvalidCharRange) + } + } + if p.charsRight() < 2 || p.moveRightGetChar() != ':' || p.moveRightGetChar() != ']' { + p.textto(savePos) + } else if p.useRE2() { + // move on + continue + } + } + } + + if inRange { + inRange = false + if !scanOnly { + if ch == '[' && !fTranslatedChar && !firstChar { + // We thought we were in a range, but we're actually starting a subtraction. + // In that case, we'll add chPrev to our char class, skip the opening [, and + // scan the new character class recursively. + cc.addChar(chPrev) + sub, err := p.scanCharSet(caseInsensitive, false) + if err != nil { + return nil, err + } + cc.addSubtraction(sub) + + if p.charsRight() > 0 && p.rightChar(0) != ']' { + return nil, p.getErr(ErrSubtractionMustBeLast) + } + } else { + // a regular range, like a-z + if chPrev > ch { + return nil, p.getErr(ErrReversedCharRange) + } + cc.addRange(chPrev, ch) + } + } + } else if p.charsRight() >= 2 && p.rightChar(0) == '-' && p.rightChar(1) != ']' { + // this could be the start of a range + chPrev = ch + inRange = true + p.moveRight(1) + } else if p.charsRight() >= 1 && ch == '-' && !fTranslatedChar && p.rightChar(0) == '[' && !firstChar { + // we aren't in a range, and now there is a subtraction. Usually this happens + // only when a subtraction follows a range, like [a-z-[b]] + if !scanOnly { + p.moveRight(1) + sub, err := p.scanCharSet(caseInsensitive, false) + if err != nil { + return nil, err + } + cc.addSubtraction(sub) + + if p.charsRight() > 0 && p.rightChar(0) != ']' { + return nil, p.getErr(ErrSubtractionMustBeLast) + } + } else { + p.moveRight(1) + p.scanCharSet(caseInsensitive, true) + } + } else { + if !scanOnly { + cc.addRange(ch, ch) + } + } + } + + if !closed { + return nil, p.getErr(ErrUnterminatedBracket) + } + + if !scanOnly && caseInsensitive { + cc.addLowercase() + } + + return cc, nil +} + +// Scans any number of decimal digits (pegs value at 2^31-1 if too large) +func (p *parser) scanDecimal() (int, error) { + i := 0 + var d int + + for p.charsRight() > 0 { + d = int(p.rightChar(0) - '0') + if d < 0 || d > 9 { + break + } + p.moveRight(1) + + if i > maxValueDiv10 || (i == maxValueDiv10 && d > maxValueMod10) { + return 0, p.getErr(ErrCaptureGroupOutOfRange) + } + + i *= 10 + i += d + } + + return int(i), nil +} + +// Returns true for options allowed only at the top level +func isOnlyTopOption(option RegexOptions) bool { + return option == RightToLeft || option == ECMAScript || option == RE2 +} + +// Scans cimsx-cimsx option string, stops at the first unrecognized char. +func (p *parser) scanOptions() { + + for off := false; p.charsRight() > 0; p.moveRight(1) { + ch := p.rightChar(0) + + if ch == '-' { + off = true + } else if ch == '+' { + off = false + } else { + option := optionFromCode(ch) + if option == 0 || isOnlyTopOption(option) { + return + } + + if off { + p.options &= ^option + } else { + p.options |= option + } + } + } +} + +// Scans \ code for escape codes that map to single unicode chars. +func (p *parser) scanCharEscape() (r rune, err error) { + + ch := p.moveRightGetChar() + + if ch >= '0' && ch <= '7' { + p.moveLeft() + return p.scanOctal(), nil + } + + pos := p.textpos() + + switch ch { + case 'x': + // support for \x{HEX} syntax from Perl and PCRE + if p.charsRight() > 0 && p.rightChar(0) == '{' { + if p.useOptionE() { + return ch, nil + } + p.moveRight(1) + return p.scanHexUntilBrace() + } else { + r, err = p.scanHex(2) + } + case 'u': + r, err = p.scanHex(4) + case 'a': + return '\u0007', nil + case 'b': + return '\b', nil + case 'e': + return '\u001B', nil + case 'f': + return '\f', nil + case 'n': + return '\n', nil + case 'r': + return '\r', nil + case 't': + return '\t', nil + case 'v': + return '\u000B', nil + case 'c': + r, err = p.scanControl() + default: + if !p.useOptionE() && IsWordChar(ch) { + return 0, p.getErr(ErrUnrecognizedEscape, string(ch)) + } + return ch, nil + } + if err != nil && p.useOptionE() { + p.textto(pos) + return ch, nil + } + return +} + +// Grabs and converts an ascii control character +func (p *parser) scanControl() (rune, error) { + if p.charsRight() <= 0 { + return 0, p.getErr(ErrMissingControl) + } + + ch := p.moveRightGetChar() + + // \ca interpreted as \cA + + if ch >= 'a' && ch <= 'z' { + ch = (ch - ('a' - 'A')) + } + ch = (ch - '@') + if ch >= 0 && ch < ' ' { + return ch, nil + } + + return 0, p.getErr(ErrUnrecognizedControl) + +} + +// Scan hex digits until we hit a closing brace. +// Non-hex digits, hex value too large for UTF-8, or running out of chars are errors +func (p *parser) scanHexUntilBrace() (rune, error) { + // PCRE spec reads like unlimited hex digits are allowed, but unicode has a limit + // so we can enforce that + i := 0 + hasContent := false + + for p.charsRight() > 0 { + ch := p.moveRightGetChar() + if ch == '}' { + // hit our close brace, we're done here + // prevent \x{} + if !hasContent { + return 0, p.getErr(ErrTooFewHex) + } + return rune(i), nil + } + hasContent = true + // no brace needs to be hex digit + d := hexDigit(ch) + if d < 0 { + return 0, p.getErr(ErrMissingBrace) + } + + i *= 0x10 + i += d + + if i > unicode.MaxRune { + return 0, p.getErr(ErrInvalidHex) + } + } + + // we only make it here if we run out of digits without finding the brace + return 0, p.getErr(ErrMissingBrace) +} + +// Scans exactly c hex digits (c=2 for \xFF, c=4 for \uFFFF) +func (p *parser) scanHex(c int) (rune, error) { + + i := 0 + + if p.charsRight() >= c { + for c > 0 { + d := hexDigit(p.moveRightGetChar()) + if d < 0 { + break + } + i *= 0x10 + i += d + c-- + } + } + + if c > 0 { + return 0, p.getErr(ErrTooFewHex) + } + + return rune(i), nil +} + +// Returns n <= 0xF for a hex digit. +func hexDigit(ch rune) int { + + if d := uint(ch - '0'); d <= 9 { + return int(d) + } + + if d := uint(ch - 'a'); d <= 5 { + return int(d + 0xa) + } + + if d := uint(ch - 'A'); d <= 5 { + return int(d + 0xa) + } + + return -1 +} + +// Scans up to three octal digits (stops before exceeding 0377). +func (p *parser) scanOctal() rune { + // Consume octal chars only up to 3 digits and value 0377 + + c := 3 + + if c > p.charsRight() { + c = p.charsRight() + } + + //we know the first char is good because the caller had to check + i := 0 + d := int(p.rightChar(0) - '0') + for c > 0 && d <= 7 && d >= 0 { + if i >= 0x20 && p.useOptionE() { + break + } + i *= 8 + i += d + c-- + + p.moveRight(1) + if !p.rightMost() { + d = int(p.rightChar(0) - '0') + } + } + + // Octal codes only go up to 255. Any larger and the behavior that Perl follows + // is simply to truncate the high bits. + i &= 0xFF + + return rune(i) +} + +// Returns the current parsing position. +func (p *parser) textpos() int { + return p.currentPos +} + +// Zaps to a specific parsing position. +func (p *parser) textto(pos int) { + p.currentPos = pos +} + +// Returns the char at the right of the current parsing position and advances to the right. +func (p *parser) moveRightGetChar() rune { + ch := p.pattern[p.currentPos] + p.currentPos++ + return ch +} + +// Moves the current position to the right. +func (p *parser) moveRight(i int) { + // default would be 1 + p.currentPos += i +} + +// Moves the current parsing position one to the left. +func (p *parser) moveLeft() { + p.currentPos-- +} + +// Returns the char left of the current parsing position. +func (p *parser) charAt(i int) rune { + return p.pattern[i] +} + +// Returns the char i chars right of the current parsing position. +func (p *parser) rightChar(i int) rune { + // default would be 0 + return p.pattern[p.currentPos+i] +} + +// Number of characters to the right of the current parsing position. +func (p *parser) charsRight() int { + return len(p.pattern) - p.currentPos +} + +func (p *parser) rightMost() bool { + return p.currentPos == len(p.pattern) +} + +// Looks up the slot number for a given name +func (p *parser) captureSlotFromName(capname string) int { + return p.capnames[capname] +} + +// True if the capture slot was noted +func (p *parser) isCaptureSlot(i int) bool { + if p.caps != nil { + _, ok := p.caps[i] + return ok + } + + return (i >= 0 && i < p.capsize) +} + +// Looks up the slot number for a given name +func (p *parser) isCaptureName(capname string) bool { + if p.capnames == nil { + return false + } + + _, ok := p.capnames[capname] + return ok +} + +// option shortcuts + +// True if N option disabling '(' autocapture is on. +func (p *parser) useOptionN() bool { + return (p.options & ExplicitCapture) != 0 +} + +// True if I option enabling case-insensitivity is on. +func (p *parser) useOptionI() bool { + return (p.options & IgnoreCase) != 0 +} + +// True if M option altering meaning of $ and ^ is on. +func (p *parser) useOptionM() bool { + return (p.options & Multiline) != 0 +} + +// True if S option altering meaning of . is on. +func (p *parser) useOptionS() bool { + return (p.options & Singleline) != 0 +} + +// True if X option enabling whitespace/comment mode is on. +func (p *parser) useOptionX() bool { + return (p.options & IgnorePatternWhitespace) != 0 +} + +// True if E option enabling ECMAScript behavior on. +func (p *parser) useOptionE() bool { + return (p.options & ECMAScript) != 0 +} + +// true to use RE2 compatibility parsing behavior. +func (p *parser) useRE2() bool { + return (p.options & RE2) != 0 +} + +// True if options stack is empty. +func (p *parser) emptyOptionsStack() bool { + return len(p.optionsStack) == 0 +} + +// Finish the current quantifiable (when a quantifier is not found or is not possible) +func (p *parser) addConcatenate() { + // The first (| inside a Testgroup group goes directly to the group + p.concatenation.addChild(p.unit) + p.unit = nil +} + +// Finish the current quantifiable (when a quantifier is found) +func (p *parser) addConcatenate3(lazy bool, min, max int) { + p.concatenation.addChild(p.unit.makeQuantifier(lazy, min, max)) + p.unit = nil +} + +// Sets the current unit to a single char node +func (p *parser) addUnitOne(ch rune) { + if p.useOptionI() { + ch = unicode.ToLower(ch) + } + + p.unit = newRegexNodeCh(ntOne, p.options, ch) +} + +// Sets the current unit to a single inverse-char node +func (p *parser) addUnitNotone(ch rune) { + if p.useOptionI() { + ch = unicode.ToLower(ch) + } + + p.unit = newRegexNodeCh(ntNotone, p.options, ch) +} + +// Sets the current unit to a single set node +func (p *parser) addUnitSet(set *CharSet) { + p.unit = newRegexNodeSet(ntSet, p.options, set) +} + +// Sets the current unit to a subtree +func (p *parser) addUnitNode(node *regexNode) { + p.unit = node +} + +// Sets the current unit to an assertion of the specified type +func (p *parser) addUnitType(t nodeType) { + p.unit = newRegexNode(t, p.options) +} + +// Finish the current group (in response to a ')' or end) +func (p *parser) addGroup() error { + if p.group.t == ntTestgroup || p.group.t == ntTestref { + p.group.addChild(p.concatenation.reverseLeft()) + if (p.group.t == ntTestref && len(p.group.children) > 2) || len(p.group.children) > 3 { + return p.getErr(ErrTooManyAlternates) + } + } else { + p.alternation.addChild(p.concatenation.reverseLeft()) + p.group.addChild(p.alternation) + } + + p.unit = p.group + return nil +} + +// Pops the option stack, but keeps the current options unchanged. +func (p *parser) popKeepOptions() { + lastIdx := len(p.optionsStack) - 1 + p.optionsStack = p.optionsStack[:lastIdx] +} + +// Recalls options from the stack. +func (p *parser) popOptions() { + lastIdx := len(p.optionsStack) - 1 + // get the last item on the stack and then remove it by reslicing + p.options = p.optionsStack[lastIdx] + p.optionsStack = p.optionsStack[:lastIdx] +} + +// Saves options on a stack. +func (p *parser) pushOptions() { + p.optionsStack = append(p.optionsStack, p.options) +} + +// Add a string to the last concatenate. +func (p *parser) addToConcatenate(pos, cch int, isReplacement bool) { + var node *regexNode + + if cch == 0 { + return + } + + if cch > 1 { + str := p.pattern[pos : pos+cch] + + if p.useOptionI() && !isReplacement { + // We do the ToLower character by character for consistency. With surrogate chars, doing + // a ToLower on the entire string could actually change the surrogate pair. This is more correct + // linguistically, but since Regex doesn't support surrogates, it's more important to be + // consistent. + for i := 0; i < len(str); i++ { + str[i] = unicode.ToLower(str[i]) + } + } + + node = newRegexNodeStr(ntMulti, p.options, str) + } else { + ch := p.charAt(pos) + + if p.useOptionI() && !isReplacement { + ch = unicode.ToLower(ch) + } + + node = newRegexNodeCh(ntOne, p.options, ch) + } + + p.concatenation.addChild(node) +} + +// Push the parser state (in response to an open paren) +func (p *parser) pushGroup() { + p.group.next = p.stack + p.alternation.next = p.group + p.concatenation.next = p.alternation + p.stack = p.concatenation +} + +// Remember the pushed state (in response to a ')') +func (p *parser) popGroup() error { + p.concatenation = p.stack + p.alternation = p.concatenation.next + p.group = p.alternation.next + p.stack = p.group.next + + // The first () inside a Testgroup group goes directly to the group + if p.group.t == ntTestgroup && len(p.group.children) == 0 { + if p.unit == nil { + return p.getErr(ErrConditionalExpression) + } + + p.group.addChild(p.unit) + p.unit = nil + } + return nil +} + +// True if the group stack is empty. +func (p *parser) emptyStack() bool { + return p.stack == nil +} + +// Start a new round for the parser state (in response to an open paren or string start) +func (p *parser) startGroup(openGroup *regexNode) { + p.group = openGroup + p.alternation = newRegexNode(ntAlternate, p.options) + p.concatenation = newRegexNode(ntConcatenate, p.options) +} + +// Finish the current concatenation (in response to a |) +func (p *parser) addAlternate() { + // The | parts inside a Testgroup group go directly to the group + + if p.group.t == ntTestgroup || p.group.t == ntTestref { + p.group.addChild(p.concatenation.reverseLeft()) + } else { + p.alternation.addChild(p.concatenation.reverseLeft()) + } + + p.concatenation = newRegexNode(ntConcatenate, p.options) +} + +// For categorizing ascii characters. + +const ( + Q byte = 5 // quantifier + S = 4 // ordinary stopper + Z = 3 // ScanBlank stopper + X = 2 // whitespace + E = 1 // should be escaped +) + +var _category = []byte{ + //01 2 3 4 5 6 7 8 9 A B C D E F 0 1 2 3 4 5 6 7 8 9 A B C D E F + 0, 0, 0, 0, 0, 0, 0, 0, 0, X, X, X, X, X, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + // ! " # $ % & ' ( ) * + , - . / 0 1 2 3 4 5 6 7 8 9 : ; < = > ? + X, 0, 0, Z, S, 0, 0, 0, S, S, Q, Q, 0, 0, S, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, Q, + //@A B C D E F G H I J K L M N O P Q R S T U V W X Y Z [ \ ] ^ _ + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, S, S, 0, S, 0, + //'a b c d e f g h i j k l m n o p q r s t u v w x y z { | } ~ + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, Q, S, 0, 0, 0, +} + +func isSpace(ch rune) bool { + return (ch <= ' ' && _category[ch] == X) +} + +// Returns true for those characters that terminate a string of ordinary chars. +func isSpecial(ch rune) bool { + return (ch <= '|' && _category[ch] >= S) +} + +// Returns true for those characters that terminate a string of ordinary chars. +func isStopperX(ch rune) bool { + return (ch <= '|' && _category[ch] >= X) +} + +// Returns true for those characters that begin a quantifier. +func isQuantifier(ch rune) bool { + return (ch <= '{' && _category[ch] >= Q) +} + +func (p *parser) isTrueQuantifier() bool { + nChars := p.charsRight() + if nChars == 0 { + return false + } + + startpos := p.textpos() + ch := p.charAt(startpos) + if ch != '{' { + return ch <= '{' && _category[ch] >= Q + } + + //UGLY: this is ugly -- the original code was ugly too + pos := startpos + for { + nChars-- + if nChars <= 0 { + break + } + pos++ + ch = p.charAt(pos) + if ch < '0' || ch > '9' { + break + } + } + + if nChars == 0 || pos-startpos == 1 { + return false + } + if ch == '}' { + return true + } + if ch != ',' { + return false + } + for { + nChars-- + if nChars <= 0 { + break + } + pos++ + ch = p.charAt(pos) + if ch < '0' || ch > '9' { + break + } + } + + return nChars > 0 && ch == '}' +} diff --git a/vendor/github.com/dlclark/regexp2/syntax/prefix.go b/vendor/github.com/dlclark/regexp2/syntax/prefix.go new file mode 100644 index 000000000..011ef0b41 --- /dev/null +++ b/vendor/github.com/dlclark/regexp2/syntax/prefix.go @@ -0,0 +1,896 @@ +package syntax + +import ( + "bytes" + "fmt" + "strconv" + "unicode" + "unicode/utf8" +) + +type Prefix struct { + PrefixStr []rune + PrefixSet CharSet + CaseInsensitive bool +} + +// It takes a RegexTree and computes the set of chars that can start it. +func getFirstCharsPrefix(tree *RegexTree) *Prefix { + s := regexFcd{ + fcStack: make([]regexFc, 32), + intStack: make([]int, 32), + } + fc := s.regexFCFromRegexTree(tree) + + if fc == nil || fc.nullable || fc.cc.IsEmpty() { + return nil + } + fcSet := fc.getFirstChars() + return &Prefix{PrefixSet: fcSet, CaseInsensitive: fc.caseInsensitive} +} + +type regexFcd struct { + intStack []int + intDepth int + fcStack []regexFc + fcDepth int + skipAllChildren bool // don't process any more children at the current level + skipchild bool // don't process the current child. + failed bool +} + +/* + * The main FC computation. It does a shortcutted depth-first walk + * through the tree and calls CalculateFC to emits code before + * and after each child of an interior node, and at each leaf. + */ +func (s *regexFcd) regexFCFromRegexTree(tree *RegexTree) *regexFc { + curNode := tree.root + curChild := 0 + + for { + if len(curNode.children) == 0 { + // This is a leaf node + s.calculateFC(curNode.t, curNode, 0) + } else if curChild < len(curNode.children) && !s.skipAllChildren { + // This is an interior node, and we have more children to analyze + s.calculateFC(curNode.t|beforeChild, curNode, curChild) + + if !s.skipchild { + curNode = curNode.children[curChild] + // this stack is how we get a depth first walk of the tree. + s.pushInt(curChild) + curChild = 0 + } else { + curChild++ + s.skipchild = false + } + continue + } + + // This is an interior node where we've finished analyzing all the children, or + // the end of a leaf node. + s.skipAllChildren = false + + if s.intIsEmpty() { + break + } + + curChild = s.popInt() + curNode = curNode.next + + s.calculateFC(curNode.t|afterChild, curNode, curChild) + if s.failed { + return nil + } + + curChild++ + } + + if s.fcIsEmpty() { + return nil + } + + return s.popFC() +} + +// To avoid recursion, we use a simple integer stack. +// This is the push. +func (s *regexFcd) pushInt(I int) { + if s.intDepth >= len(s.intStack) { + expanded := make([]int, s.intDepth*2) + copy(expanded, s.intStack) + s.intStack = expanded + } + + s.intStack[s.intDepth] = I + s.intDepth++ +} + +// True if the stack is empty. +func (s *regexFcd) intIsEmpty() bool { + return s.intDepth == 0 +} + +// This is the pop. +func (s *regexFcd) popInt() int { + s.intDepth-- + return s.intStack[s.intDepth] +} + +// We also use a stack of RegexFC objects. +// This is the push. +func (s *regexFcd) pushFC(fc regexFc) { + if s.fcDepth >= len(s.fcStack) { + expanded := make([]regexFc, s.fcDepth*2) + copy(expanded, s.fcStack) + s.fcStack = expanded + } + + s.fcStack[s.fcDepth] = fc + s.fcDepth++ +} + +// True if the stack is empty. +func (s *regexFcd) fcIsEmpty() bool { + return s.fcDepth == 0 +} + +// This is the pop. +func (s *regexFcd) popFC() *regexFc { + s.fcDepth-- + return &s.fcStack[s.fcDepth] +} + +// This is the top. +func (s *regexFcd) topFC() *regexFc { + return &s.fcStack[s.fcDepth-1] +} + +// Called in Beforechild to prevent further processing of the current child +func (s *regexFcd) skipChild() { + s.skipchild = true +} + +// FC computation and shortcut cases for each node type +func (s *regexFcd) calculateFC(nt nodeType, node *regexNode, CurIndex int) { + //fmt.Printf("NodeType: %v, CurIndex: %v, Desc: %v\n", nt, CurIndex, node.description()) + ci := false + rtl := false + + if nt <= ntRef { + if (node.options & IgnoreCase) != 0 { + ci = true + } + if (node.options & RightToLeft) != 0 { + rtl = true + } + } + + switch nt { + case ntConcatenate | beforeChild, ntAlternate | beforeChild, ntTestref | beforeChild, ntLoop | beforeChild, ntLazyloop | beforeChild: + break + + case ntTestgroup | beforeChild: + if CurIndex == 0 { + s.skipChild() + } + break + + case ntEmpty: + s.pushFC(regexFc{nullable: true}) + break + + case ntConcatenate | afterChild: + if CurIndex != 0 { + child := s.popFC() + cumul := s.topFC() + + s.failed = !cumul.addFC(*child, true) + } + + fc := s.topFC() + if !fc.nullable { + s.skipAllChildren = true + } + break + + case ntTestgroup | afterChild: + if CurIndex > 1 { + child := s.popFC() + cumul := s.topFC() + + s.failed = !cumul.addFC(*child, false) + } + break + + case ntAlternate | afterChild, ntTestref | afterChild: + if CurIndex != 0 { + child := s.popFC() + cumul := s.topFC() + + s.failed = !cumul.addFC(*child, false) + } + break + + case ntLoop | afterChild, ntLazyloop | afterChild: + if node.m == 0 { + fc := s.topFC() + fc.nullable = true + } + break + + case ntGroup | beforeChild, ntGroup | afterChild, ntCapture | beforeChild, ntCapture | afterChild, ntGreedy | beforeChild, ntGreedy | afterChild: + break + + case ntRequire | beforeChild, ntPrevent | beforeChild: + s.skipChild() + s.pushFC(regexFc{nullable: true}) + break + + case ntRequire | afterChild, ntPrevent | afterChild: + break + + case ntOne, ntNotone: + s.pushFC(newRegexFc(node.ch, nt == ntNotone, false, ci)) + break + + case ntOneloop, ntOnelazy: + s.pushFC(newRegexFc(node.ch, false, node.m == 0, ci)) + break + + case ntNotoneloop, ntNotonelazy: + s.pushFC(newRegexFc(node.ch, true, node.m == 0, ci)) + break + + case ntMulti: + if len(node.str) == 0 { + s.pushFC(regexFc{nullable: true}) + } else if !rtl { + s.pushFC(newRegexFc(node.str[0], false, false, ci)) + } else { + s.pushFC(newRegexFc(node.str[len(node.str)-1], false, false, ci)) + } + break + + case ntSet: + s.pushFC(regexFc{cc: node.set.Copy(), nullable: false, caseInsensitive: ci}) + break + + case ntSetloop, ntSetlazy: + s.pushFC(regexFc{cc: node.set.Copy(), nullable: node.m == 0, caseInsensitive: ci}) + break + + case ntRef: + s.pushFC(regexFc{cc: *AnyClass(), nullable: true, caseInsensitive: false}) + break + + case ntNothing, ntBol, ntEol, ntBoundary, ntNonboundary, ntECMABoundary, ntNonECMABoundary, ntBeginning, ntStart, ntEndZ, ntEnd: + s.pushFC(regexFc{nullable: true}) + break + + default: + panic(fmt.Sprintf("unexpected op code: %v", nt)) + } +} + +type regexFc struct { + cc CharSet + nullable bool + caseInsensitive bool +} + +func newRegexFc(ch rune, not, nullable, caseInsensitive bool) regexFc { + r := regexFc{ + caseInsensitive: caseInsensitive, + nullable: nullable, + } + if not { + if ch > 0 { + r.cc.addRange('\x00', ch-1) + } + if ch < 0xFFFF { + r.cc.addRange(ch+1, utf8.MaxRune) + } + } else { + r.cc.addRange(ch, ch) + } + return r +} + +func (r *regexFc) getFirstChars() CharSet { + if r.caseInsensitive { + r.cc.addLowercase() + } + + return r.cc +} + +func (r *regexFc) addFC(fc regexFc, concatenate bool) bool { + if !r.cc.IsMergeable() || !fc.cc.IsMergeable() { + return false + } + + if concatenate { + if !r.nullable { + return true + } + + if !fc.nullable { + r.nullable = false + } + } else { + if fc.nullable { + r.nullable = true + } + } + + r.caseInsensitive = r.caseInsensitive || fc.caseInsensitive + r.cc.addSet(fc.cc) + + return true +} + +// This is a related computation: it takes a RegexTree and computes the +// leading substring if it sees one. It's quite trivial and gives up easily. +func getPrefix(tree *RegexTree) *Prefix { + var concatNode *regexNode + nextChild := 0 + + curNode := tree.root + + for { + switch curNode.t { + case ntConcatenate: + if len(curNode.children) > 0 { + concatNode = curNode + nextChild = 0 + } + + case ntGreedy, ntCapture: + curNode = curNode.children[0] + concatNode = nil + continue + + case ntOneloop, ntOnelazy: + if curNode.m > 0 { + return &Prefix{ + PrefixStr: repeat(curNode.ch, curNode.m), + CaseInsensitive: (curNode.options & IgnoreCase) != 0, + } + } + return nil + + case ntOne: + return &Prefix{ + PrefixStr: []rune{curNode.ch}, + CaseInsensitive: (curNode.options & IgnoreCase) != 0, + } + + case ntMulti: + return &Prefix{ + PrefixStr: curNode.str, + CaseInsensitive: (curNode.options & IgnoreCase) != 0, + } + + case ntBol, ntEol, ntBoundary, ntECMABoundary, ntBeginning, ntStart, + ntEndZ, ntEnd, ntEmpty, ntRequire, ntPrevent: + + default: + return nil + } + + if concatNode == nil || nextChild >= len(concatNode.children) { + return nil + } + + curNode = concatNode.children[nextChild] + nextChild++ + } +} + +// repeat the rune r, c times... up to the max of MaxPrefixSize +func repeat(r rune, c int) []rune { + if c > MaxPrefixSize { + c = MaxPrefixSize + } + + ret := make([]rune, c) + + // binary growth using copy for speed + ret[0] = r + bp := 1 + for bp < len(ret) { + copy(ret[bp:], ret[:bp]) + bp *= 2 + } + + return ret +} + +// BmPrefix precomputes the Boyer-Moore +// tables for fast string scanning. These tables allow +// you to scan for the first occurrence of a string within +// a large body of text without examining every character. +// The performance of the heuristic depends on the actual +// string and the text being searched, but usually, the longer +// the string that is being searched for, the fewer characters +// need to be examined. +type BmPrefix struct { + positive []int + negativeASCII []int + negativeUnicode [][]int + pattern []rune + lowASCII rune + highASCII rune + rightToLeft bool + caseInsensitive bool +} + +func newBmPrefix(pattern []rune, caseInsensitive, rightToLeft bool) *BmPrefix { + + b := &BmPrefix{ + rightToLeft: rightToLeft, + caseInsensitive: caseInsensitive, + pattern: pattern, + } + + if caseInsensitive { + for i := 0; i < len(b.pattern); i++ { + // We do the ToLower character by character for consistency. With surrogate chars, doing + // a ToLower on the entire string could actually change the surrogate pair. This is more correct + // linguistically, but since Regex doesn't support surrogates, it's more important to be + // consistent. + + b.pattern[i] = unicode.ToLower(b.pattern[i]) + } + } + + var beforefirst, last, bump int + var scan, match int + + if !rightToLeft { + beforefirst = -1 + last = len(b.pattern) - 1 + bump = 1 + } else { + beforefirst = len(b.pattern) + last = 0 + bump = -1 + } + + // PART I - the good-suffix shift table + // + // compute the positive requirement: + // if char "i" is the first one from the right that doesn't match, + // then we know the matcher can advance by _positive[i]. + // + // This algorithm is a simplified variant of the standard + // Boyer-Moore good suffix calculation. + + b.positive = make([]int, len(b.pattern)) + + examine := last + ch := b.pattern[examine] + b.positive[examine] = bump + examine -= bump + +Outerloop: + for { + // find an internal char (examine) that matches the tail + + for { + if examine == beforefirst { + break Outerloop + } + if b.pattern[examine] == ch { + break + } + examine -= bump + } + + match = last + scan = examine + + // find the length of the match + for { + if scan == beforefirst || b.pattern[match] != b.pattern[scan] { + // at the end of the match, note the difference in _positive + // this is not the length of the match, but the distance from the internal match + // to the tail suffix. + if b.positive[match] == 0 { + b.positive[match] = match - scan + } + + // System.Diagnostics.Debug.WriteLine("Set positive[" + match + "] to " + (match - scan)); + + break + } + + scan -= bump + match -= bump + } + + examine -= bump + } + + match = last - bump + + // scan for the chars for which there are no shifts that yield a different candidate + + // The inside of the if statement used to say + // "_positive[match] = last - beforefirst;" + // This is slightly less aggressive in how much we skip, but at worst it + // should mean a little more work rather than skipping a potential match. + for match != beforefirst { + if b.positive[match] == 0 { + b.positive[match] = bump + } + + match -= bump + } + + // PART II - the bad-character shift table + // + // compute the negative requirement: + // if char "ch" is the reject character when testing position "i", + // we can slide up by _negative[ch]; + // (_negative[ch] = str.Length - 1 - str.LastIndexOf(ch)) + // + // the lookup table is divided into ASCII and Unicode portions; + // only those parts of the Unicode 16-bit code set that actually + // appear in the string are in the table. (Maximum size with + // Unicode is 65K; ASCII only case is 512 bytes.) + + b.negativeASCII = make([]int, 128) + + for i := 0; i < len(b.negativeASCII); i++ { + b.negativeASCII[i] = last - beforefirst + } + + b.lowASCII = 127 + b.highASCII = 0 + + for examine = last; examine != beforefirst; examine -= bump { + ch = b.pattern[examine] + + switch { + case ch < 128: + if b.lowASCII > ch { + b.lowASCII = ch + } + + if b.highASCII < ch { + b.highASCII = ch + } + + if b.negativeASCII[ch] == last-beforefirst { + b.negativeASCII[ch] = last - examine + } + case ch <= 0xffff: + i, j := ch>>8, ch&0xFF + + if b.negativeUnicode == nil { + b.negativeUnicode = make([][]int, 256) + } + + if b.negativeUnicode[i] == nil { + newarray := make([]int, 256) + + for k := 0; k < len(newarray); k++ { + newarray[k] = last - beforefirst + } + + if i == 0 { + copy(newarray, b.negativeASCII) + //TODO: this line needed? + b.negativeASCII = newarray + } + + b.negativeUnicode[i] = newarray + } + + if b.negativeUnicode[i][j] == last-beforefirst { + b.negativeUnicode[i][j] = last - examine + } + default: + // we can't do the filter because this algo doesn't support + // unicode chars >0xffff + return nil + } + } + + return b +} + +func (b *BmPrefix) String() string { + return string(b.pattern) +} + +// Dump returns the contents of the filter as a human readable string +func (b *BmPrefix) Dump(indent string) string { + buf := &bytes.Buffer{} + + fmt.Fprintf(buf, "%sBM Pattern: %s\n%sPositive: ", indent, string(b.pattern), indent) + for i := 0; i < len(b.positive); i++ { + buf.WriteString(strconv.Itoa(b.positive[i])) + buf.WriteRune(' ') + } + buf.WriteRune('\n') + + if b.negativeASCII != nil { + buf.WriteString(indent) + buf.WriteString("Negative table\n") + for i := 0; i < len(b.negativeASCII); i++ { + if b.negativeASCII[i] != len(b.pattern) { + fmt.Fprintf(buf, "%s %s %s\n", indent, Escape(string(rune(i))), strconv.Itoa(b.negativeASCII[i])) + } + } + } + + return buf.String() +} + +// Scan uses the Boyer-Moore algorithm to find the first occurrence +// of the specified string within text, beginning at index, and +// constrained within beglimit and endlimit. +// +// The direction and case-sensitivity of the match is determined +// by the arguments to the RegexBoyerMoore constructor. +func (b *BmPrefix) Scan(text []rune, index, beglimit, endlimit int) int { + var ( + defadv, test, test2 int + match, startmatch, endmatch int + bump, advance int + chTest rune + unicodeLookup []int + ) + + if !b.rightToLeft { + defadv = len(b.pattern) + startmatch = len(b.pattern) - 1 + endmatch = 0 + test = index + defadv - 1 + bump = 1 + } else { + defadv = -len(b.pattern) + startmatch = 0 + endmatch = -defadv - 1 + test = index + defadv + bump = -1 + } + + chMatch := b.pattern[startmatch] + + for { + if test >= endlimit || test < beglimit { + return -1 + } + + chTest = text[test] + + if b.caseInsensitive { + chTest = unicode.ToLower(chTest) + } + + if chTest != chMatch { + if chTest < 128 { + advance = b.negativeASCII[chTest] + } else if chTest < 0xffff && len(b.negativeUnicode) > 0 { + unicodeLookup = b.negativeUnicode[chTest>>8] + if len(unicodeLookup) > 0 { + advance = unicodeLookup[chTest&0xFF] + } else { + advance = defadv + } + } else { + advance = defadv + } + + test += advance + } else { // if (chTest == chMatch) + test2 = test + match = startmatch + + for { + if match == endmatch { + if b.rightToLeft { + return test2 + 1 + } else { + return test2 + } + } + + match -= bump + test2 -= bump + + chTest = text[test2] + + if b.caseInsensitive { + chTest = unicode.ToLower(chTest) + } + + if chTest != b.pattern[match] { + advance = b.positive[match] + if (chTest & 0xFF80) == 0 { + test2 = (match - startmatch) + b.negativeASCII[chTest] + } else if chTest < 0xffff && len(b.negativeUnicode) > 0 { + unicodeLookup = b.negativeUnicode[chTest>>8] + if len(unicodeLookup) > 0 { + test2 = (match - startmatch) + unicodeLookup[chTest&0xFF] + } else { + test += advance + break + } + } else { + test += advance + break + } + + if b.rightToLeft { + if test2 < advance { + advance = test2 + } + } else if test2 > advance { + advance = test2 + } + + test += advance + break + } + } + } + } +} + +// When a regex is anchored, we can do a quick IsMatch test instead of a Scan +func (b *BmPrefix) IsMatch(text []rune, index, beglimit, endlimit int) bool { + if !b.rightToLeft { + if index < beglimit || endlimit-index < len(b.pattern) { + return false + } + + return b.matchPattern(text, index) + } else { + if index > endlimit || index-beglimit < len(b.pattern) { + return false + } + + return b.matchPattern(text, index-len(b.pattern)) + } +} + +func (b *BmPrefix) matchPattern(text []rune, index int) bool { + if len(text)-index < len(b.pattern) { + return false + } + + if b.caseInsensitive { + for i := 0; i < len(b.pattern); i++ { + //Debug.Assert(textinfo.ToLower(_pattern[i]) == _pattern[i], "pattern should be converted to lower case in constructor!"); + if unicode.ToLower(text[index+i]) != b.pattern[i] { + return false + } + } + return true + } else { + for i := 0; i < len(b.pattern); i++ { + if text[index+i] != b.pattern[i] { + return false + } + } + return true + } +} + +type AnchorLoc int16 + +// where the regex can be pegged +const ( + AnchorBeginning AnchorLoc = 0x0001 + AnchorBol = 0x0002 + AnchorStart = 0x0004 + AnchorEol = 0x0008 + AnchorEndZ = 0x0010 + AnchorEnd = 0x0020 + AnchorBoundary = 0x0040 + AnchorECMABoundary = 0x0080 +) + +func getAnchors(tree *RegexTree) AnchorLoc { + + var concatNode *regexNode + nextChild, result := 0, AnchorLoc(0) + + curNode := tree.root + + for { + switch curNode.t { + case ntConcatenate: + if len(curNode.children) > 0 { + concatNode = curNode + nextChild = 0 + } + + case ntGreedy, ntCapture: + curNode = curNode.children[0] + concatNode = nil + continue + + case ntBol, ntEol, ntBoundary, ntECMABoundary, ntBeginning, + ntStart, ntEndZ, ntEnd: + return result | anchorFromType(curNode.t) + + case ntEmpty, ntRequire, ntPrevent: + + default: + return result + } + + if concatNode == nil || nextChild >= len(concatNode.children) { + return result + } + + curNode = concatNode.children[nextChild] + nextChild++ + } +} + +func anchorFromType(t nodeType) AnchorLoc { + switch t { + case ntBol: + return AnchorBol + case ntEol: + return AnchorEol + case ntBoundary: + return AnchorBoundary + case ntECMABoundary: + return AnchorECMABoundary + case ntBeginning: + return AnchorBeginning + case ntStart: + return AnchorStart + case ntEndZ: + return AnchorEndZ + case ntEnd: + return AnchorEnd + default: + return 0 + } +} + +// anchorDescription returns a human-readable description of the anchors +func (anchors AnchorLoc) String() string { + buf := &bytes.Buffer{} + + if 0 != (anchors & AnchorBeginning) { + buf.WriteString(", Beginning") + } + if 0 != (anchors & AnchorStart) { + buf.WriteString(", Start") + } + if 0 != (anchors & AnchorBol) { + buf.WriteString(", Bol") + } + if 0 != (anchors & AnchorBoundary) { + buf.WriteString(", Boundary") + } + if 0 != (anchors & AnchorECMABoundary) { + buf.WriteString(", ECMABoundary") + } + if 0 != (anchors & AnchorEol) { + buf.WriteString(", Eol") + } + if 0 != (anchors & AnchorEnd) { + buf.WriteString(", End") + } + if 0 != (anchors & AnchorEndZ) { + buf.WriteString(", EndZ") + } + + // trim off comma + if buf.Len() >= 2 { + return buf.String()[2:] + } + return "None" +} diff --git a/vendor/github.com/dlclark/regexp2/syntax/replacerdata.go b/vendor/github.com/dlclark/regexp2/syntax/replacerdata.go new file mode 100644 index 000000000..bcf4d3f25 --- /dev/null +++ b/vendor/github.com/dlclark/regexp2/syntax/replacerdata.go @@ -0,0 +1,87 @@ +package syntax + +import ( + "bytes" + "errors" +) + +type ReplacerData struct { + Rep string + Strings []string + Rules []int +} + +const ( + replaceSpecials = 4 + replaceLeftPortion = -1 + replaceRightPortion = -2 + replaceLastGroup = -3 + replaceWholeString = -4 +) + +//ErrReplacementError is a general error during parsing the replacement text +var ErrReplacementError = errors.New("Replacement pattern error.") + +// NewReplacerData will populate a reusable replacer data struct based on the given replacement string +// and the capture group data from a regexp +func NewReplacerData(rep string, caps map[int]int, capsize int, capnames map[string]int, op RegexOptions) (*ReplacerData, error) { + p := parser{ + options: op, + caps: caps, + capsize: capsize, + capnames: capnames, + } + p.setPattern(rep) + concat, err := p.scanReplacement() + if err != nil { + return nil, err + } + + if concat.t != ntConcatenate { + panic(ErrReplacementError) + } + + sb := &bytes.Buffer{} + var ( + strings []string + rules []int + ) + + for _, child := range concat.children { + switch child.t { + case ntMulti: + child.writeStrToBuf(sb) + + case ntOne: + sb.WriteRune(child.ch) + + case ntRef: + if sb.Len() > 0 { + rules = append(rules, len(strings)) + strings = append(strings, sb.String()) + sb.Reset() + } + slot := child.m + + if len(caps) > 0 && slot >= 0 { + slot = caps[slot] + } + + rules = append(rules, -replaceSpecials-1-slot) + + default: + panic(ErrReplacementError) + } + } + + if sb.Len() > 0 { + rules = append(rules, len(strings)) + strings = append(strings, sb.String()) + } + + return &ReplacerData{ + Rep: rep, + Strings: strings, + Rules: rules, + }, nil +} diff --git a/vendor/github.com/dlclark/regexp2/syntax/tree.go b/vendor/github.com/dlclark/regexp2/syntax/tree.go new file mode 100644 index 000000000..ea2882931 --- /dev/null +++ b/vendor/github.com/dlclark/regexp2/syntax/tree.go @@ -0,0 +1,654 @@ +package syntax + +import ( + "bytes" + "fmt" + "math" + "strconv" +) + +type RegexTree struct { + root *regexNode + caps map[int]int + capnumlist []int + captop int + Capnames map[string]int + Caplist []string + options RegexOptions +} + +// It is built into a parsed tree for a regular expression. + +// Implementation notes: +// +// Since the node tree is a temporary data structure only used +// during compilation of the regexp to integer codes, it's +// designed for clarity and convenience rather than +// space efficiency. +// +// RegexNodes are built into a tree, linked by the n.children list. +// Each node also has a n.parent and n.ichild member indicating +// its parent and which child # it is in its parent's list. +// +// RegexNodes come in as many types as there are constructs in +// a regular expression, for example, "concatenate", "alternate", +// "one", "rept", "group". There are also node types for basic +// peephole optimizations, e.g., "onerep", "notsetrep", etc. +// +// Because perl 5 allows "lookback" groups that scan backwards, +// each node also gets a "direction". Normally the value of +// boolean n.backward = false. +// +// During parsing, top-level nodes are also stacked onto a parse +// stack (a stack of trees). For this purpose we have a n.next +// pointer. [Note that to save a few bytes, we could overload the +// n.parent pointer instead.] +// +// On the parse stack, each tree has a "role" - basically, the +// nonterminal in the grammar that the parser has currently +// assigned to the tree. That code is stored in n.role. +// +// Finally, some of the different kinds of nodes have data. +// Two integers (for the looping constructs) are stored in +// n.operands, an an object (either a string or a set) +// is stored in n.data +type regexNode struct { + t nodeType + children []*regexNode + str []rune + set *CharSet + ch rune + m int + n int + options RegexOptions + next *regexNode +} + +type nodeType int32 + +const ( + // The following are leaves, and correspond to primitive operations + + ntOnerep nodeType = 0 // lef,back char,min,max a {n} + ntNotonerep = 1 // lef,back char,min,max .{n} + ntSetrep = 2 // lef,back set,min,max [\d]{n} + ntOneloop = 3 // lef,back char,min,max a {,n} + ntNotoneloop = 4 // lef,back char,min,max .{,n} + ntSetloop = 5 // lef,back set,min,max [\d]{,n} + ntOnelazy = 6 // lef,back char,min,max a {,n}? + ntNotonelazy = 7 // lef,back char,min,max .{,n}? + ntSetlazy = 8 // lef,back set,min,max [\d]{,n}? + ntOne = 9 // lef char a + ntNotone = 10 // lef char [^a] + ntSet = 11 // lef set [a-z\s] \w \s \d + ntMulti = 12 // lef string abcd + ntRef = 13 // lef group \# + ntBol = 14 // ^ + ntEol = 15 // $ + ntBoundary = 16 // \b + ntNonboundary = 17 // \B + ntBeginning = 18 // \A + ntStart = 19 // \G + ntEndZ = 20 // \Z + ntEnd = 21 // \Z + + // Interior nodes do not correspond to primitive operations, but + // control structures compositing other operations + + // Concat and alternate take n children, and can run forward or backwards + + ntNothing = 22 // [] + ntEmpty = 23 // () + ntAlternate = 24 // a|b + ntConcatenate = 25 // ab + ntLoop = 26 // m,x * + ? {,} + ntLazyloop = 27 // m,x *? +? ?? {,}? + ntCapture = 28 // n () + ntGroup = 29 // (?:) + ntRequire = 30 // (?=) (?<=) + ntPrevent = 31 // (?!) (?) (?<) + ntTestref = 33 // (?(n) | ) + ntTestgroup = 34 // (?(...) | ) + + ntECMABoundary = 41 // \b + ntNonECMABoundary = 42 // \B +) + +func newRegexNode(t nodeType, opt RegexOptions) *regexNode { + return ®exNode{ + t: t, + options: opt, + } +} + +func newRegexNodeCh(t nodeType, opt RegexOptions, ch rune) *regexNode { + return ®exNode{ + t: t, + options: opt, + ch: ch, + } +} + +func newRegexNodeStr(t nodeType, opt RegexOptions, str []rune) *regexNode { + return ®exNode{ + t: t, + options: opt, + str: str, + } +} + +func newRegexNodeSet(t nodeType, opt RegexOptions, set *CharSet) *regexNode { + return ®exNode{ + t: t, + options: opt, + set: set, + } +} + +func newRegexNodeM(t nodeType, opt RegexOptions, m int) *regexNode { + return ®exNode{ + t: t, + options: opt, + m: m, + } +} +func newRegexNodeMN(t nodeType, opt RegexOptions, m, n int) *regexNode { + return ®exNode{ + t: t, + options: opt, + m: m, + n: n, + } +} + +func (n *regexNode) writeStrToBuf(buf *bytes.Buffer) { + for i := 0; i < len(n.str); i++ { + buf.WriteRune(n.str[i]) + } +} + +func (n *regexNode) addChild(child *regexNode) { + reduced := child.reduce() + n.children = append(n.children, reduced) + reduced.next = n +} + +func (n *regexNode) insertChildren(afterIndex int, nodes []*regexNode) { + newChildren := make([]*regexNode, 0, len(n.children)+len(nodes)) + n.children = append(append(append(newChildren, n.children[:afterIndex]...), nodes...), n.children[afterIndex:]...) +} + +// removes children including the start but not the end index +func (n *regexNode) removeChildren(startIndex, endIndex int) { + n.children = append(n.children[:startIndex], n.children[endIndex:]...) +} + +// Pass type as OneLazy or OneLoop +func (n *regexNode) makeRep(t nodeType, min, max int) { + n.t += (t - ntOne) + n.m = min + n.n = max +} + +func (n *regexNode) reduce() *regexNode { + switch n.t { + case ntAlternate: + return n.reduceAlternation() + + case ntConcatenate: + return n.reduceConcatenation() + + case ntLoop, ntLazyloop: + return n.reduceRep() + + case ntGroup: + return n.reduceGroup() + + case ntSet, ntSetloop: + return n.reduceSet() + + default: + return n + } +} + +// Basic optimization. Single-letter alternations can be replaced +// by faster set specifications, and nested alternations with no +// intervening operators can be flattened: +// +// a|b|c|def|g|h -> [a-c]|def|[gh] +// apple|(?:orange|pear)|grape -> apple|orange|pear|grape +func (n *regexNode) reduceAlternation() *regexNode { + if len(n.children) == 0 { + return newRegexNode(ntNothing, n.options) + } + + wasLastSet := false + lastNodeCannotMerge := false + var optionsLast RegexOptions + var i, j int + + for i, j = 0, 0; i < len(n.children); i, j = i+1, j+1 { + at := n.children[i] + + if j < i { + n.children[j] = at + } + + for { + if at.t == ntAlternate { + for k := 0; k < len(at.children); k++ { + at.children[k].next = n + } + n.insertChildren(i+1, at.children) + + j-- + } else if at.t == ntSet || at.t == ntOne { + // Cannot merge sets if L or I options differ, or if either are negated. + optionsAt := at.options & (RightToLeft | IgnoreCase) + + if at.t == ntSet { + if !wasLastSet || optionsLast != optionsAt || lastNodeCannotMerge || !at.set.IsMergeable() { + wasLastSet = true + lastNodeCannotMerge = !at.set.IsMergeable() + optionsLast = optionsAt + break + } + } else if !wasLastSet || optionsLast != optionsAt || lastNodeCannotMerge { + wasLastSet = true + lastNodeCannotMerge = false + optionsLast = optionsAt + break + } + + // The last node was a Set or a One, we're a Set or One and our options are the same. + // Merge the two nodes. + j-- + prev := n.children[j] + + var prevCharClass *CharSet + if prev.t == ntOne { + prevCharClass = &CharSet{} + prevCharClass.addChar(prev.ch) + } else { + prevCharClass = prev.set + } + + if at.t == ntOne { + prevCharClass.addChar(at.ch) + } else { + prevCharClass.addSet(*at.set) + } + + prev.t = ntSet + prev.set = prevCharClass + } else if at.t == ntNothing { + j-- + } else { + wasLastSet = false + lastNodeCannotMerge = false + } + break + } + } + + if j < i { + n.removeChildren(j, i) + } + + return n.stripEnation(ntNothing) +} + +// Basic optimization. Adjacent strings can be concatenated. +// +// (?:abc)(?:def) -> abcdef +func (n *regexNode) reduceConcatenation() *regexNode { + // Eliminate empties and concat adjacent strings/chars + + var optionsLast RegexOptions + var optionsAt RegexOptions + var i, j int + + if len(n.children) == 0 { + return newRegexNode(ntEmpty, n.options) + } + + wasLastString := false + + for i, j = 0, 0; i < len(n.children); i, j = i+1, j+1 { + var at, prev *regexNode + + at = n.children[i] + + if j < i { + n.children[j] = at + } + + if at.t == ntConcatenate && + ((at.options & RightToLeft) == (n.options & RightToLeft)) { + for k := 0; k < len(at.children); k++ { + at.children[k].next = n + } + + //insert at.children at i+1 index in n.children + n.insertChildren(i+1, at.children) + + j-- + } else if at.t == ntMulti || at.t == ntOne { + // Cannot merge strings if L or I options differ + optionsAt = at.options & (RightToLeft | IgnoreCase) + + if !wasLastString || optionsLast != optionsAt { + wasLastString = true + optionsLast = optionsAt + continue + } + + j-- + prev = n.children[j] + + if prev.t == ntOne { + prev.t = ntMulti + prev.str = []rune{prev.ch} + } + + if (optionsAt & RightToLeft) == 0 { + if at.t == ntOne { + prev.str = append(prev.str, at.ch) + } else { + prev.str = append(prev.str, at.str...) + } + } else { + if at.t == ntOne { + // insert at the front by expanding our slice, copying the data over, and then setting the value + prev.str = append(prev.str, 0) + copy(prev.str[1:], prev.str) + prev.str[0] = at.ch + } else { + //insert at the front...this one we'll make a new slice and copy both into it + merge := make([]rune, len(prev.str)+len(at.str)) + copy(merge, at.str) + copy(merge[len(at.str):], prev.str) + prev.str = merge + } + } + } else if at.t == ntEmpty { + j-- + } else { + wasLastString = false + } + } + + if j < i { + // remove indices j through i from the children + n.removeChildren(j, i) + } + + return n.stripEnation(ntEmpty) +} + +// Nested repeaters just get multiplied with each other if they're not +// too lumpy +func (n *regexNode) reduceRep() *regexNode { + + u := n + t := n.t + min := n.m + max := n.n + + for { + if len(u.children) == 0 { + break + } + + child := u.children[0] + + // multiply reps of the same type only + if child.t != t { + childType := child.t + + if !(childType >= ntOneloop && childType <= ntSetloop && t == ntLoop || + childType >= ntOnelazy && childType <= ntSetlazy && t == ntLazyloop) { + break + } + } + + // child can be too lumpy to blur, e.g., (a {100,105}) {3} or (a {2,})? + // [but things like (a {2,})+ are not too lumpy...] + if u.m == 0 && child.m > 1 || child.n < child.m*2 { + break + } + + u = child + if u.m > 0 { + if (math.MaxInt32-1)/u.m < min { + u.m = math.MaxInt32 + } else { + u.m = u.m * min + } + } + if u.n > 0 { + if (math.MaxInt32-1)/u.n < max { + u.n = math.MaxInt32 + } else { + u.n = u.n * max + } + } + } + + if math.MaxInt32 == min { + return newRegexNode(ntNothing, n.options) + } + return u + +} + +// Simple optimization. If a concatenation or alternation has only +// one child strip out the intermediate node. If it has zero children, +// turn it into an empty. +func (n *regexNode) stripEnation(emptyType nodeType) *regexNode { + switch len(n.children) { + case 0: + return newRegexNode(emptyType, n.options) + case 1: + return n.children[0] + default: + return n + } +} + +func (n *regexNode) reduceGroup() *regexNode { + u := n + + for u.t == ntGroup { + u = u.children[0] + } + + return u +} + +// Simple optimization. If a set is a singleton, an inverse singleton, +// or empty, it's transformed accordingly. +func (n *regexNode) reduceSet() *regexNode { + // Extract empty-set, one and not-one case as special + + if n.set == nil { + n.t = ntNothing + } else if n.set.IsSingleton() { + n.ch = n.set.SingletonChar() + n.set = nil + n.t += (ntOne - ntSet) + } else if n.set.IsSingletonInverse() { + n.ch = n.set.SingletonChar() + n.set = nil + n.t += (ntNotone - ntSet) + } + + return n +} + +func (n *regexNode) reverseLeft() *regexNode { + if n.options&RightToLeft != 0 && n.t == ntConcatenate && len(n.children) > 0 { + //reverse children order + for left, right := 0, len(n.children)-1; left < right; left, right = left+1, right-1 { + n.children[left], n.children[right] = n.children[right], n.children[left] + } + } + + return n +} + +func (n *regexNode) makeQuantifier(lazy bool, min, max int) *regexNode { + if min == 0 && max == 0 { + return newRegexNode(ntEmpty, n.options) + } + + if min == 1 && max == 1 { + return n + } + + switch n.t { + case ntOne, ntNotone, ntSet: + if lazy { + n.makeRep(Onelazy, min, max) + } else { + n.makeRep(Oneloop, min, max) + } + return n + + default: + var t nodeType + if lazy { + t = ntLazyloop + } else { + t = ntLoop + } + result := newRegexNodeMN(t, n.options, min, max) + result.addChild(n) + return result + } +} + +// debug functions + +var typeStr = []string{ + "Onerep", "Notonerep", "Setrep", + "Oneloop", "Notoneloop", "Setloop", + "Onelazy", "Notonelazy", "Setlazy", + "One", "Notone", "Set", + "Multi", "Ref", + "Bol", "Eol", "Boundary", "Nonboundary", + "Beginning", "Start", "EndZ", "End", + "Nothing", "Empty", + "Alternate", "Concatenate", + "Loop", "Lazyloop", + "Capture", "Group", "Require", "Prevent", "Greedy", + "Testref", "Testgroup", + "Unknown", "Unknown", "Unknown", + "Unknown", "Unknown", "Unknown", + "ECMABoundary", "NonECMABoundary", +} + +func (n *regexNode) description() string { + buf := &bytes.Buffer{} + + buf.WriteString(typeStr[n.t]) + + if (n.options & ExplicitCapture) != 0 { + buf.WriteString("-C") + } + if (n.options & IgnoreCase) != 0 { + buf.WriteString("-I") + } + if (n.options & RightToLeft) != 0 { + buf.WriteString("-L") + } + if (n.options & Multiline) != 0 { + buf.WriteString("-M") + } + if (n.options & Singleline) != 0 { + buf.WriteString("-S") + } + if (n.options & IgnorePatternWhitespace) != 0 { + buf.WriteString("-X") + } + if (n.options & ECMAScript) != 0 { + buf.WriteString("-E") + } + + switch n.t { + case ntOneloop, ntNotoneloop, ntOnelazy, ntNotonelazy, ntOne, ntNotone: + buf.WriteString("(Ch = " + CharDescription(n.ch) + ")") + break + case ntCapture: + buf.WriteString("(index = " + strconv.Itoa(n.m) + ", unindex = " + strconv.Itoa(n.n) + ")") + break + case ntRef, ntTestref: + buf.WriteString("(index = " + strconv.Itoa(n.m) + ")") + break + case ntMulti: + fmt.Fprintf(buf, "(String = %s)", string(n.str)) + break + case ntSet, ntSetloop, ntSetlazy: + buf.WriteString("(Set = " + n.set.String() + ")") + break + } + + switch n.t { + case ntOneloop, ntNotoneloop, ntOnelazy, ntNotonelazy, ntSetloop, ntSetlazy, ntLoop, ntLazyloop: + buf.WriteString("(Min = ") + buf.WriteString(strconv.Itoa(n.m)) + buf.WriteString(", Max = ") + if n.n == math.MaxInt32 { + buf.WriteString("inf") + } else { + buf.WriteString(strconv.Itoa(n.n)) + } + buf.WriteString(")") + + break + } + + return buf.String() +} + +var padSpace = []byte(" ") + +func (t *RegexTree) Dump() string { + return t.root.dump() +} + +func (n *regexNode) dump() string { + var stack []int + CurNode := n + CurChild := 0 + + buf := bytes.NewBufferString(CurNode.description()) + buf.WriteRune('\n') + + for { + if CurNode.children != nil && CurChild < len(CurNode.children) { + stack = append(stack, CurChild+1) + CurNode = CurNode.children[CurChild] + CurChild = 0 + + Depth := len(stack) + if Depth > 32 { + Depth = 32 + } + buf.Write(padSpace[:Depth]) + buf.WriteString(CurNode.description()) + buf.WriteRune('\n') + } else { + if len(stack) == 0 { + break + } + + CurChild = stack[len(stack)-1] + stack = stack[:len(stack)-1] + CurNode = CurNode.next + } + } + return buf.String() +} diff --git a/vendor/github.com/dlclark/regexp2/syntax/writer.go b/vendor/github.com/dlclark/regexp2/syntax/writer.go new file mode 100644 index 000000000..a5aa11ca0 --- /dev/null +++ b/vendor/github.com/dlclark/regexp2/syntax/writer.go @@ -0,0 +1,500 @@ +package syntax + +import ( + "bytes" + "fmt" + "math" + "os" +) + +func Write(tree *RegexTree) (*Code, error) { + w := writer{ + intStack: make([]int, 0, 32), + emitted: make([]int, 2), + stringhash: make(map[string]int), + sethash: make(map[string]int), + } + + code, err := w.codeFromTree(tree) + + if tree.options&Debug > 0 && code != nil { + os.Stdout.WriteString(code.Dump()) + os.Stdout.WriteString("\n") + } + + return code, err +} + +type writer struct { + emitted []int + + intStack []int + curpos int + stringhash map[string]int + stringtable [][]rune + sethash map[string]int + settable []*CharSet + counting bool + count int + trackcount int + caps map[int]int +} + +const ( + beforeChild nodeType = 64 + afterChild = 128 + //MaxPrefixSize is the largest number of runes we'll use for a BoyerMoyer prefix + MaxPrefixSize = 50 +) + +// The top level RegexCode generator. It does a depth-first walk +// through the tree and calls EmitFragment to emits code before +// and after each child of an interior node, and at each leaf. +// +// It runs two passes, first to count the size of the generated +// code, and second to generate the code. +// +// We should time it against the alternative, which is +// to just generate the code and grow the array as we go. +func (w *writer) codeFromTree(tree *RegexTree) (*Code, error) { + var ( + curNode *regexNode + curChild int + capsize int + ) + // construct sparse capnum mapping if some numbers are unused + + if tree.capnumlist == nil || tree.captop == len(tree.capnumlist) { + capsize = tree.captop + w.caps = nil + } else { + capsize = len(tree.capnumlist) + w.caps = tree.caps + for i := 0; i < len(tree.capnumlist); i++ { + w.caps[tree.capnumlist[i]] = i + } + } + + w.counting = true + + for { + if !w.counting { + w.emitted = make([]int, w.count) + } + + curNode = tree.root + curChild = 0 + + w.emit1(Lazybranch, 0) + + for { + if len(curNode.children) == 0 { + w.emitFragment(curNode.t, curNode, 0) + } else if curChild < len(curNode.children) { + w.emitFragment(curNode.t|beforeChild, curNode, curChild) + + curNode = curNode.children[curChild] + + w.pushInt(curChild) + curChild = 0 + continue + } + + if w.emptyStack() { + break + } + + curChild = w.popInt() + curNode = curNode.next + + w.emitFragment(curNode.t|afterChild, curNode, curChild) + curChild++ + } + + w.patchJump(0, w.curPos()) + w.emit(Stop) + + if !w.counting { + break + } + + w.counting = false + } + + fcPrefix := getFirstCharsPrefix(tree) + prefix := getPrefix(tree) + rtl := (tree.options & RightToLeft) != 0 + + var bmPrefix *BmPrefix + //TODO: benchmark string prefixes + if prefix != nil && len(prefix.PrefixStr) > 0 && MaxPrefixSize > 0 { + if len(prefix.PrefixStr) > MaxPrefixSize { + // limit prefix changes to 10k + prefix.PrefixStr = prefix.PrefixStr[:MaxPrefixSize] + } + bmPrefix = newBmPrefix(prefix.PrefixStr, prefix.CaseInsensitive, rtl) + } else { + bmPrefix = nil + } + + return &Code{ + Codes: w.emitted, + Strings: w.stringtable, + Sets: w.settable, + TrackCount: w.trackcount, + Caps: w.caps, + Capsize: capsize, + FcPrefix: fcPrefix, + BmPrefix: bmPrefix, + Anchors: getAnchors(tree), + RightToLeft: rtl, + }, nil +} + +// The main RegexCode generator. It does a depth-first walk +// through the tree and calls EmitFragment to emits code before +// and after each child of an interior node, and at each leaf. +func (w *writer) emitFragment(nodetype nodeType, node *regexNode, curIndex int) error { + bits := InstOp(0) + + if nodetype <= ntRef { + if (node.options & RightToLeft) != 0 { + bits |= Rtl + } + if (node.options & IgnoreCase) != 0 { + bits |= Ci + } + } + ntBits := nodeType(bits) + + switch nodetype { + case ntConcatenate | beforeChild, ntConcatenate | afterChild, ntEmpty: + break + + case ntAlternate | beforeChild: + if curIndex < len(node.children)-1 { + w.pushInt(w.curPos()) + w.emit1(Lazybranch, 0) + } + + case ntAlternate | afterChild: + if curIndex < len(node.children)-1 { + lbPos := w.popInt() + w.pushInt(w.curPos()) + w.emit1(Goto, 0) + w.patchJump(lbPos, w.curPos()) + } else { + for i := 0; i < curIndex; i++ { + w.patchJump(w.popInt(), w.curPos()) + } + } + break + + case ntTestref | beforeChild: + if curIndex == 0 { + w.emit(Setjump) + w.pushInt(w.curPos()) + w.emit1(Lazybranch, 0) + w.emit1(Testref, w.mapCapnum(node.m)) + w.emit(Forejump) + } + + case ntTestref | afterChild: + if curIndex == 0 { + branchpos := w.popInt() + w.pushInt(w.curPos()) + w.emit1(Goto, 0) + w.patchJump(branchpos, w.curPos()) + w.emit(Forejump) + if len(node.children) <= 1 { + w.patchJump(w.popInt(), w.curPos()) + } + } else if curIndex == 1 { + w.patchJump(w.popInt(), w.curPos()) + } + + case ntTestgroup | beforeChild: + if curIndex == 0 { + w.emit(Setjump) + w.emit(Setmark) + w.pushInt(w.curPos()) + w.emit1(Lazybranch, 0) + } + + case ntTestgroup | afterChild: + if curIndex == 0 { + w.emit(Getmark) + w.emit(Forejump) + } else if curIndex == 1 { + Branchpos := w.popInt() + w.pushInt(w.curPos()) + w.emit1(Goto, 0) + w.patchJump(Branchpos, w.curPos()) + w.emit(Getmark) + w.emit(Forejump) + if len(node.children) <= 2 { + w.patchJump(w.popInt(), w.curPos()) + } + } else if curIndex == 2 { + w.patchJump(w.popInt(), w.curPos()) + } + + case ntLoop | beforeChild, ntLazyloop | beforeChild: + + if node.n < math.MaxInt32 || node.m > 1 { + if node.m == 0 { + w.emit1(Nullcount, 0) + } else { + w.emit1(Setcount, 1-node.m) + } + } else if node.m == 0 { + w.emit(Nullmark) + } else { + w.emit(Setmark) + } + + if node.m == 0 { + w.pushInt(w.curPos()) + w.emit1(Goto, 0) + } + w.pushInt(w.curPos()) + + case ntLoop | afterChild, ntLazyloop | afterChild: + + startJumpPos := w.curPos() + lazy := (nodetype - (ntLoop | afterChild)) + + if node.n < math.MaxInt32 || node.m > 1 { + if node.n == math.MaxInt32 { + w.emit2(InstOp(Branchcount+lazy), w.popInt(), math.MaxInt32) + } else { + w.emit2(InstOp(Branchcount+lazy), w.popInt(), node.n-node.m) + } + } else { + w.emit1(InstOp(Branchmark+lazy), w.popInt()) + } + + if node.m == 0 { + w.patchJump(w.popInt(), startJumpPos) + } + + case ntGroup | beforeChild, ntGroup | afterChild: + + case ntCapture | beforeChild: + w.emit(Setmark) + + case ntCapture | afterChild: + w.emit2(Capturemark, w.mapCapnum(node.m), w.mapCapnum(node.n)) + + case ntRequire | beforeChild: + // NOTE: the following line causes lookahead/lookbehind to be + // NON-BACKTRACKING. It can be commented out with (*) + w.emit(Setjump) + + w.emit(Setmark) + + case ntRequire | afterChild: + w.emit(Getmark) + + // NOTE: the following line causes lookahead/lookbehind to be + // NON-BACKTRACKING. It can be commented out with (*) + w.emit(Forejump) + + case ntPrevent | beforeChild: + w.emit(Setjump) + w.pushInt(w.curPos()) + w.emit1(Lazybranch, 0) + + case ntPrevent | afterChild: + w.emit(Backjump) + w.patchJump(w.popInt(), w.curPos()) + w.emit(Forejump) + + case ntGreedy | beforeChild: + w.emit(Setjump) + + case ntGreedy | afterChild: + w.emit(Forejump) + + case ntOne, ntNotone: + w.emit1(InstOp(node.t|ntBits), int(node.ch)) + + case ntNotoneloop, ntNotonelazy, ntOneloop, ntOnelazy: + if node.m > 0 { + if node.t == ntOneloop || node.t == ntOnelazy { + w.emit2(Onerep|bits, int(node.ch), node.m) + } else { + w.emit2(Notonerep|bits, int(node.ch), node.m) + } + } + if node.n > node.m { + if node.n == math.MaxInt32 { + w.emit2(InstOp(node.t|ntBits), int(node.ch), math.MaxInt32) + } else { + w.emit2(InstOp(node.t|ntBits), int(node.ch), node.n-node.m) + } + } + + case ntSetloop, ntSetlazy: + if node.m > 0 { + w.emit2(Setrep|bits, w.setCode(node.set), node.m) + } + if node.n > node.m { + if node.n == math.MaxInt32 { + w.emit2(InstOp(node.t|ntBits), w.setCode(node.set), math.MaxInt32) + } else { + w.emit2(InstOp(node.t|ntBits), w.setCode(node.set), node.n-node.m) + } + } + + case ntMulti: + w.emit1(InstOp(node.t|ntBits), w.stringCode(node.str)) + + case ntSet: + w.emit1(InstOp(node.t|ntBits), w.setCode(node.set)) + + case ntRef: + w.emit1(InstOp(node.t|ntBits), w.mapCapnum(node.m)) + + case ntNothing, ntBol, ntEol, ntBoundary, ntNonboundary, ntECMABoundary, ntNonECMABoundary, ntBeginning, ntStart, ntEndZ, ntEnd: + w.emit(InstOp(node.t)) + + default: + return fmt.Errorf("unexpected opcode in regular expression generation: %v", nodetype) + } + + return nil +} + +// To avoid recursion, we use a simple integer stack. +// This is the push. +func (w *writer) pushInt(i int) { + w.intStack = append(w.intStack, i) +} + +// Returns true if the stack is empty. +func (w *writer) emptyStack() bool { + return len(w.intStack) == 0 +} + +// This is the pop. +func (w *writer) popInt() int { + //get our item + idx := len(w.intStack) - 1 + i := w.intStack[idx] + //trim our slice + w.intStack = w.intStack[:idx] + return i +} + +// Returns the current position in the emitted code. +func (w *writer) curPos() int { + return w.curpos +} + +// Fixes up a jump instruction at the specified offset +// so that it jumps to the specified jumpDest. +func (w *writer) patchJump(offset, jumpDest int) { + w.emitted[offset+1] = jumpDest +} + +// Returns an index in the set table for a charset +// uses a map to eliminate duplicates. +func (w *writer) setCode(set *CharSet) int { + if w.counting { + return 0 + } + + buf := &bytes.Buffer{} + + set.mapHashFill(buf) + hash := buf.String() + i, ok := w.sethash[hash] + if !ok { + i = len(w.sethash) + w.sethash[hash] = i + w.settable = append(w.settable, set) + } + return i +} + +// Returns an index in the string table for a string. +// uses a map to eliminate duplicates. +func (w *writer) stringCode(str []rune) int { + if w.counting { + return 0 + } + + hash := string(str) + i, ok := w.stringhash[hash] + if !ok { + i = len(w.stringhash) + w.stringhash[hash] = i + w.stringtable = append(w.stringtable, str) + } + + return i +} + +// When generating code on a regex that uses a sparse set +// of capture slots, we hash them to a dense set of indices +// for an array of capture slots. Instead of doing the hash +// at match time, it's done at compile time, here. +func (w *writer) mapCapnum(capnum int) int { + if capnum == -1 { + return -1 + } + + if w.caps != nil { + return w.caps[capnum] + } + + return capnum +} + +// Emits a zero-argument operation. Note that the emit +// functions all run in two modes: they can emit code, or +// they can just count the size of the code. +func (w *writer) emit(op InstOp) { + if w.counting { + w.count++ + if opcodeBacktracks(op) { + w.trackcount++ + } + return + } + w.emitted[w.curpos] = int(op) + w.curpos++ +} + +// Emits a one-argument operation. +func (w *writer) emit1(op InstOp, opd1 int) { + if w.counting { + w.count += 2 + if opcodeBacktracks(op) { + w.trackcount++ + } + return + } + w.emitted[w.curpos] = int(op) + w.curpos++ + w.emitted[w.curpos] = opd1 + w.curpos++ +} + +// Emits a two-argument operation. +func (w *writer) emit2(op InstOp, opd1, opd2 int) { + if w.counting { + w.count += 3 + if opcodeBacktracks(op) { + w.trackcount++ + } + return + } + w.emitted[w.curpos] = int(op) + w.curpos++ + w.emitted[w.curpos] = opd1 + w.curpos++ + w.emitted[w.curpos] = opd2 + w.curpos++ +} diff --git a/vendor/github.com/dlclark/regexp2/testoutput1 b/vendor/github.com/dlclark/regexp2/testoutput1 new file mode 100644 index 000000000..fbf63fdf2 --- /dev/null +++ b/vendor/github.com/dlclark/regexp2/testoutput1 @@ -0,0 +1,7061 @@ +# This set of tests is for features that are compatible with all versions of +# Perl >= 5.10, in non-UTF mode. It should run clean for the 8-bit, 16-bit, and +# 32-bit PCRE libraries, and also using the perltest.pl script. + +#forbid_utf +#newline_default lf any anycrlf +#perltest + +/the quick brown fox/ + the quick brown fox + 0: the quick brown fox + What do you know about the quick brown fox? + 0: the quick brown fox +\= Expect no match + The quick brown FOX +No match + What do you know about THE QUICK BROWN FOX? +No match + +/The quick brown fox/i + the quick brown fox + 0: the quick brown fox + The quick brown FOX + 0: The quick brown FOX + What do you know about the quick brown fox? + 0: the quick brown fox + What do you know about THE QUICK BROWN FOX? + 0: THE QUICK BROWN FOX + +/abcd\t\n\r\f\a\e\071\x3b\$\\\?caxyz/ + abcd\t\n\r\f\a\e9;\$\\?caxyz + 0: abcd\x09\x0a\x0d\x0c\x07\x1b9;$\?caxyz + +/a*abc?xyz+pqr{3}ab{2,}xy{4,5}pq{0,6}AB{0,}zz/ + abxyzpqrrrabbxyyyypqAzz + 0: abxyzpqrrrabbxyyyypqAzz + abxyzpqrrrabbxyyyypqAzz + 0: abxyzpqrrrabbxyyyypqAzz + aabxyzpqrrrabbxyyyypqAzz + 0: aabxyzpqrrrabbxyyyypqAzz + aaabxyzpqrrrabbxyyyypqAzz + 0: aaabxyzpqrrrabbxyyyypqAzz + aaaabxyzpqrrrabbxyyyypqAzz + 0: aaaabxyzpqrrrabbxyyyypqAzz + abcxyzpqrrrabbxyyyypqAzz + 0: abcxyzpqrrrabbxyyyypqAzz + aabcxyzpqrrrabbxyyyypqAzz + 0: aabcxyzpqrrrabbxyyyypqAzz + aaabcxyzpqrrrabbxyyyypAzz + 0: aaabcxyzpqrrrabbxyyyypAzz + aaabcxyzpqrrrabbxyyyypqAzz + 0: aaabcxyzpqrrrabbxyyyypqAzz + aaabcxyzpqrrrabbxyyyypqqAzz + 0: aaabcxyzpqrrrabbxyyyypqqAzz + aaabcxyzpqrrrabbxyyyypqqqAzz + 0: aaabcxyzpqrrrabbxyyyypqqqAzz + aaabcxyzpqrrrabbxyyyypqqqqAzz + 0: aaabcxyzpqrrrabbxyyyypqqqqAzz + aaabcxyzpqrrrabbxyyyypqqqqqAzz + 0: aaabcxyzpqrrrabbxyyyypqqqqqAzz + aaabcxyzpqrrrabbxyyyypqqqqqqAzz + 0: aaabcxyzpqrrrabbxyyyypqqqqqqAzz + aaaabcxyzpqrrrabbxyyyypqAzz + 0: aaaabcxyzpqrrrabbxyyyypqAzz + abxyzzpqrrrabbxyyyypqAzz + 0: abxyzzpqrrrabbxyyyypqAzz + aabxyzzzpqrrrabbxyyyypqAzz + 0: aabxyzzzpqrrrabbxyyyypqAzz + aaabxyzzzzpqrrrabbxyyyypqAzz + 0: aaabxyzzzzpqrrrabbxyyyypqAzz + aaaabxyzzzzpqrrrabbxyyyypqAzz + 0: aaaabxyzzzzpqrrrabbxyyyypqAzz + abcxyzzpqrrrabbxyyyypqAzz + 0: abcxyzzpqrrrabbxyyyypqAzz + aabcxyzzzpqrrrabbxyyyypqAzz + 0: aabcxyzzzpqrrrabbxyyyypqAzz + aaabcxyzzzzpqrrrabbxyyyypqAzz + 0: aaabcxyzzzzpqrrrabbxyyyypqAzz + aaaabcxyzzzzpqrrrabbxyyyypqAzz + 0: aaaabcxyzzzzpqrrrabbxyyyypqAzz + aaaabcxyzzzzpqrrrabbbxyyyypqAzz + 0: aaaabcxyzzzzpqrrrabbbxyyyypqAzz + aaaabcxyzzzzpqrrrabbbxyyyyypqAzz + 0: aaaabcxyzzzzpqrrrabbbxyyyyypqAzz + aaabcxyzpqrrrabbxyyyypABzz + 0: aaabcxyzpqrrrabbxyyyypABzz + aaabcxyzpqrrrabbxyyyypABBzz + 0: aaabcxyzpqrrrabbxyyyypABBzz + >>>aaabxyzpqrrrabbxyyyypqAzz + 0: aaabxyzpqrrrabbxyyyypqAzz + >aaaabxyzpqrrrabbxyyyypqAzz + 0: aaaabxyzpqrrrabbxyyyypqAzz + >>>>abcxyzpqrrrabbxyyyypqAzz + 0: abcxyzpqrrrabbxyyyypqAzz +\= Expect no match + abxyzpqrrabbxyyyypqAzz +No match + abxyzpqrrrrabbxyyyypqAzz +No match + abxyzpqrrrabxyyyypqAzz +No match + aaaabcxyzzzzpqrrrabbbxyyyyyypqAzz +No match + aaaabcxyzzzzpqrrrabbbxyyypqAzz +No match + aaabcxyzpqrrrabbxyyyypqqqqqqqAzz +No match + +/^(abc){1,2}zz/ + abczz + 0: abczz + 1: abc + abcabczz + 0: abcabczz + 1: abc +\= Expect no match + zz +No match + abcabcabczz +No match + >>abczz +No match + +/^(b+?|a){1,2}?c/ + bc + 0: bc + 1: b + bbc + 0: bbc + 1: b + bbbc + 0: bbbc + 1: bb + bac + 0: bac + 1: a + bbac + 0: bbac + 1: a + aac + 0: aac + 1: a + abbbbbbbbbbbc + 0: abbbbbbbbbbbc + 1: bbbbbbbbbbb + bbbbbbbbbbbac + 0: bbbbbbbbbbbac + 1: a +\= Expect no match + aaac +No match + abbbbbbbbbbbac +No match + +/^(b+|a){1,2}c/ + bc + 0: bc + 1: b + bbc + 0: bbc + 1: bb + bbbc + 0: bbbc + 1: bbb + bac + 0: bac + 1: a + bbac + 0: bbac + 1: a + aac + 0: aac + 1: a + abbbbbbbbbbbc + 0: abbbbbbbbbbbc + 1: bbbbbbbbbbb + bbbbbbbbbbbac + 0: bbbbbbbbbbbac + 1: a +\= Expect no match + aaac +No match + abbbbbbbbbbbac +No match + +/^(b+|a){1,2}?bc/ + bbc + 0: bbc + 1: b + +/^(b*|ba){1,2}?bc/ + babc + 0: babc + 1: ba + bbabc + 0: bbabc + 1: ba + bababc + 0: bababc + 1: ba +\= Expect no match + bababbc +No match + babababc +No match + +/^(ba|b*){1,2}?bc/ + babc + 0: babc + 1: ba + bbabc + 0: bbabc + 1: ba + bababc + 0: bababc + 1: ba +\= Expect no match + bababbc +No match + babababc +No match + +#/^\ca\cA\c[;\c:/ +# \x01\x01\e;z +# 0: \x01\x01\x1b;z + +/^[ab\]cde]/ + athing + 0: a + bthing + 0: b + ]thing + 0: ] + cthing + 0: c + dthing + 0: d + ething + 0: e +\= Expect no match + fthing +No match + [thing +No match + \\thing +No match + +/^[]cde]/ + ]thing + 0: ] + cthing + 0: c + dthing + 0: d + ething + 0: e +\= Expect no match + athing +No match + fthing +No match + +/^[^ab\]cde]/ + fthing + 0: f + [thing + 0: [ + \\thing + 0: \ +\= Expect no match + athing +No match + bthing +No match + ]thing +No match + cthing +No match + dthing +No match + ething +No match + +/^[^]cde]/ + athing + 0: a + fthing + 0: f +\= Expect no match + ]thing +No match + cthing +No match + dthing +No match + ething +No match + +# DLC - I don't get this one +#/^\/ +#  +# 0: \x81 + +#updated to handle 16-bits utf8 +/^ÿ/ + ÿ + 0: \xc3\xbf + +/^[0-9]+$/ + 0 + 0: 0 + 1 + 0: 1 + 2 + 0: 2 + 3 + 0: 3 + 4 + 0: 4 + 5 + 0: 5 + 6 + 0: 6 + 7 + 0: 7 + 8 + 0: 8 + 9 + 0: 9 + 10 + 0: 10 + 100 + 0: 100 +\= Expect no match + abc +No match + +/^.*nter/ + enter + 0: enter + inter + 0: inter + uponter + 0: uponter + +/^xxx[0-9]+$/ + xxx0 + 0: xxx0 + xxx1234 + 0: xxx1234 +\= Expect no match + xxx +No match + +/^.+[0-9][0-9][0-9]$/ + x123 + 0: x123 + x1234 + 0: x1234 + xx123 + 0: xx123 + 123456 + 0: 123456 +\= Expect no match + 123 +No match + +/^.+?[0-9][0-9][0-9]$/ + x123 + 0: x123 + x1234 + 0: x1234 + xx123 + 0: xx123 + 123456 + 0: 123456 +\= Expect no match + 123 +No match + +/^([^!]+)!(.+)=apquxz\.ixr\.zzz\.ac\.uk$/ + abc!pqr=apquxz.ixr.zzz.ac.uk + 0: abc!pqr=apquxz.ixr.zzz.ac.uk + 1: abc + 2: pqr +\= Expect no match + !pqr=apquxz.ixr.zzz.ac.uk +No match + abc!=apquxz.ixr.zzz.ac.uk +No match + abc!pqr=apquxz:ixr.zzz.ac.uk +No match + abc!pqr=apquxz.ixr.zzz.ac.ukk +No match + +/:/ + Well, we need a colon: somewhere + 0: : +\= Expect no match + Fail without a colon +No match + +/([\da-f:]+)$/i + 0abc + 0: 0abc + 1: 0abc + abc + 0: abc + 1: abc + fed + 0: fed + 1: fed + E + 0: E + 1: E + :: + 0: :: + 1: :: + 5f03:12C0::932e + 0: 5f03:12C0::932e + 1: 5f03:12C0::932e + fed def + 0: def + 1: def + Any old stuff + 0: ff + 1: ff +\= Expect no match + 0zzz +No match + gzzz +No match + fed\x20 +No match + Any old rubbish +No match + +/^.*\.(\d{1,3})\.(\d{1,3})\.(\d{1,3})$/ + .1.2.3 + 0: .1.2.3 + 1: 1 + 2: 2 + 3: 3 + A.12.123.0 + 0: A.12.123.0 + 1: 12 + 2: 123 + 3: 0 +\= Expect no match + .1.2.3333 +No match + 1.2.3 +No match + 1234.2.3 +No match + +/^(\d+)\s+IN\s+SOA\s+(\S+)\s+(\S+)\s*\(\s*$/ + 1 IN SOA non-sp1 non-sp2( + 0: 1 IN SOA non-sp1 non-sp2( + 1: 1 + 2: non-sp1 + 3: non-sp2 + 1 IN SOA non-sp1 non-sp2 ( + 0: 1 IN SOA non-sp1 non-sp2 ( + 1: 1 + 2: non-sp1 + 3: non-sp2 +\= Expect no match + 1IN SOA non-sp1 non-sp2( +No match + +/^[a-zA-Z\d][a-zA-Z\d\-]*(\.[a-zA-Z\d][a-zA-z\d\-]*)*\.$/ + a. + 0: a. + Z. + 0: Z. + 2. + 0: 2. + ab-c.pq-r. + 0: ab-c.pq-r. + 1: .pq-r + sxk.zzz.ac.uk. + 0: sxk.zzz.ac.uk. + 1: .uk + x-.y-. + 0: x-.y-. + 1: .y- +\= Expect no match + -abc.peq. +No match + +/^\*\.[a-z]([a-z\-\d]*[a-z\d]+)?(\.[a-z]([a-z\-\d]*[a-z\d]+)?)*$/ + *.a + 0: *.a + *.b0-a + 0: *.b0-a + 1: 0-a + *.c3-b.c + 0: *.c3-b.c + 1: 3-b + 2: .c + *.c-a.b-c + 0: *.c-a.b-c + 1: -a + 2: .b-c + 3: -c +\= Expect no match + *.0 +No match + *.a- +No match + *.a-b.c- +No match + *.c-a.0-c +No match + +/^(?=ab(de))(abd)(e)/ + abde + 0: abde + 1: de + 2: abd + 3: e + +/^(?!(ab)de|x)(abd)(f)/ + abdf + 0: abdf + 1: + 2: abd + 3: f + +/^(?=(ab(cd)))(ab)/ + abcd + 0: ab + 1: abcd + 2: cd + 3: ab + +/^[\da-f](\.[\da-f])*$/i + a.b.c.d + 0: a.b.c.d + 1: .d + A.B.C.D + 0: A.B.C.D + 1: .D + a.b.c.1.2.3.C + 0: a.b.c.1.2.3.C + 1: .C + +/^\".*\"\s*(;.*)?$/ + \"1234\" + 0: "1234" + \"abcd\" ; + 0: "abcd" ; + 1: ; + \"\" ; rhubarb + 0: "" ; rhubarb + 1: ; rhubarb +\= Expect no match + \"1234\" : things +No match + +/^$/ + \ + 0: +\= Expect no match + A non-empty line +No match + +/ ^ a (?# begins with a) b\sc (?# then b c) $ (?# then end)/x + ab c + 0: ab c +\= Expect no match + abc +No match + ab cde +No match + +/(?x) ^ a (?# begins with a) b\sc (?# then b c) $ (?# then end)/ + ab c + 0: ab c +\= Expect no match + abc +No match + ab cde +No match + +/^ a\ b[c ]d $/x + a bcd + 0: a bcd + a b d + 0: a b d +\= Expect no match + abcd +No match + ab d +No match + +/^(a(b(c)))(d(e(f)))(h(i(j)))(k(l(m)))$/ + abcdefhijklm + 0: abcdefhijklm + 1: abc + 2: bc + 3: c + 4: def + 5: ef + 6: f + 7: hij + 8: ij + 9: j +10: klm +11: lm +12: m + +/^(?:a(b(c)))(?:d(e(f)))(?:h(i(j)))(?:k(l(m)))$/ + abcdefhijklm + 0: abcdefhijklm + 1: bc + 2: c + 3: ef + 4: f + 5: ij + 6: j + 7: lm + 8: m + +#/^[\w][\W][\s][\S][\d][\D][\b][\n][\c]][\022]/ +# a+ Z0+\x08\n\x1d\x12 +# 0: a+ Z0+\x08\x0a\x1d\x12 + +/^[.^$|()*+?{,}]+/ + .^\$(*+)|{?,?} + 0: .^$(*+)|{?,?} + +/^a*\w/ + z + 0: z + az + 0: az + aaaz + 0: aaaz + a + 0: a + aa + 0: aa + aaaa + 0: aaaa + a+ + 0: a + aa+ + 0: aa + +/^a*?\w/ + z + 0: z + az + 0: a + aaaz + 0: a + a + 0: a + aa + 0: a + aaaa + 0: a + a+ + 0: a + aa+ + 0: a + +/^a+\w/ + az + 0: az + aaaz + 0: aaaz + aa + 0: aa + aaaa + 0: aaaa + aa+ + 0: aa + +/^a+?\w/ + az + 0: az + aaaz + 0: aa + aa + 0: aa + aaaa + 0: aa + aa+ + 0: aa + +/^\d{8}\w{2,}/ + 1234567890 + 0: 1234567890 + 12345678ab + 0: 12345678ab + 12345678__ + 0: 12345678__ +\= Expect no match + 1234567 +No match + +/^[aeiou\d]{4,5}$/ + uoie + 0: uoie + 1234 + 0: 1234 + 12345 + 0: 12345 + aaaaa + 0: aaaaa +\= Expect no match + 123456 +No match + +/^[aeiou\d]{4,5}?/ + uoie + 0: uoie + 1234 + 0: 1234 + 12345 + 0: 1234 + aaaaa + 0: aaaa + 123456 + 0: 1234 + +/\A(abc|def)=(\1){2,3}\Z/ + abc=abcabc + 0: abc=abcabc + 1: abc + 2: abc + def=defdefdef + 0: def=defdefdef + 1: def + 2: def +\= Expect no match + abc=defdef +No match + +/^(a)(b)(c)(d)(e)(f)(g)(h)(i)(j)(k)\11*(\3\4)\1(?#)2$/ + abcdefghijkcda2 + 0: abcdefghijkcda2 + 1: a + 2: b + 3: c + 4: d + 5: e + 6: f + 7: g + 8: h + 9: i +10: j +11: k +12: cd + abcdefghijkkkkcda2 + 0: abcdefghijkkkkcda2 + 1: a + 2: b + 3: c + 4: d + 5: e + 6: f + 7: g + 8: h + 9: i +10: j +11: k +12: cd + +/(cat(a(ract|tonic)|erpillar)) \1()2(3)/ + cataract cataract23 + 0: cataract cataract23 + 1: cataract + 2: aract + 3: ract + 4: + 5: 3 + catatonic catatonic23 + 0: catatonic catatonic23 + 1: catatonic + 2: atonic + 3: tonic + 4: + 5: 3 + caterpillar caterpillar23 + 0: caterpillar caterpillar23 + 1: caterpillar + 2: erpillar + 3: + 4: + 5: 3 + + +/^From +([^ ]+) +[a-zA-Z][a-zA-Z][a-zA-Z] +[a-zA-Z][a-zA-Z][a-zA-Z] +[0-9]?[0-9] +[0-9][0-9]:[0-9][0-9]/ + From abcd Mon Sep 01 12:33:02 1997 + 0: From abcd Mon Sep 01 12:33 + 1: abcd + +/^From\s+\S+\s+([a-zA-Z]{3}\s+){2}\d{1,2}\s+\d\d:\d\d/ + From abcd Mon Sep 01 12:33:02 1997 + 0: From abcd Mon Sep 01 12:33 + 1: Sep + From abcd Mon Sep 1 12:33:02 1997 + 0: From abcd Mon Sep 1 12:33 + 1: Sep +\= Expect no match + From abcd Sep 01 12:33:02 1997 +No match + +/^12.34/s + 12\n34 + 0: 12\x0a34 + 12\r34 + 0: 12\x0d34 + +/\w+(?=\t)/ + the quick brown\t fox + 0: brown + +/foo(?!bar)(.*)/ + foobar is foolish see? + 0: foolish see? + 1: lish see? + +/(?:(?!foo)...|^.{0,2})bar(.*)/ + foobar crowbar etc + 0: rowbar etc + 1: etc + barrel + 0: barrel + 1: rel + 2barrel + 0: 2barrel + 1: rel + A barrel + 0: A barrel + 1: rel + +/^(\D*)(?=\d)(?!123)/ + abc456 + 0: abc + 1: abc +\= Expect no match + abc123 +No match + +/^1234(?# test newlines + inside)/ + 1234 + 0: 1234 + +/^1234 #comment in extended re + /x + 1234 + 0: 1234 + +/#rhubarb + abcd/x + abcd + 0: abcd + +/^abcd#rhubarb/x + abcd + 0: abcd + +/^(a)\1{2,3}(.)/ + aaab + 0: aaab + 1: a + 2: b + aaaab + 0: aaaab + 1: a + 2: b + aaaaab + 0: aaaaa + 1: a + 2: a + aaaaaab + 0: aaaaa + 1: a + 2: a + +/(?!^)abc/ + the abc + 0: abc +\= Expect no match + abc +No match + +/(?=^)abc/ + abc + 0: abc +\= Expect no match + the abc +No match + +/^[ab]{1,3}(ab*|b)/ + aabbbbb + 0: aabb + 1: b + +/^[ab]{1,3}?(ab*|b)/ + aabbbbb + 0: aabbbbb + 1: abbbbb + +/^[ab]{1,3}?(ab*?|b)/ + aabbbbb + 0: aa + 1: a + +/^[ab]{1,3}(ab*?|b)/ + aabbbbb + 0: aabb + 1: b + +/ (?: [\040\t] | \( +(?: [^\\\x80-\xff\n\015()] | \\ [^\x80-\xff] | \( (?: [^\\\x80-\xff\n\015()] | \\ [^\x80-\xff] )* \) )* +\) )* # optional leading comment +(?: (?: +[^(\040)<>@,;:".\\\[\]\000-\037\x80-\xff]+ # some number of atom characters... +(?![^(\040)<>@,;:".\\\[\]\000-\037\x80-\xff]) # ..not followed by something that could be part of an atom +| +" (?: # opening quote... +[^\\\x80-\xff\n\015"] # Anything except backslash and quote +| # or +\\ [^\x80-\xff] # Escaped something (something != CR) +)* " # closing quote +) # initial word +(?: (?: [\040\t] | \( +(?: [^\\\x80-\xff\n\015()] | \\ [^\x80-\xff] | \( (?: [^\\\x80-\xff\n\015()] | \\ [^\x80-\xff] )* \) )* +\) )* \. (?: [\040\t] | \( +(?: [^\\\x80-\xff\n\015()] | \\ [^\x80-\xff] | \( (?: [^\\\x80-\xff\n\015()] | \\ [^\x80-\xff] )* \) )* +\) )* (?: +[^(\040)<>@,;:".\\\[\]\000-\037\x80-\xff]+ # some number of atom characters... +(?![^(\040)<>@,;:".\\\[\]\000-\037\x80-\xff]) # ..not followed by something that could be part of an atom +| +" (?: # opening quote... +[^\\\x80-\xff\n\015"] # Anything except backslash and quote +| # or +\\ [^\x80-\xff] # Escaped something (something != CR) +)* " # closing quote +) )* # further okay, if led by a period +(?: [\040\t] | \( +(?: [^\\\x80-\xff\n\015()] | \\ [^\x80-\xff] | \( (?: [^\\\x80-\xff\n\015()] | \\ [^\x80-\xff] )* \) )* +\) )* @ (?: [\040\t] | \( +(?: [^\\\x80-\xff\n\015()] | \\ [^\x80-\xff] | \( (?: [^\\\x80-\xff\n\015()] | \\ [^\x80-\xff] )* \) )* +\) )* (?: +[^(\040)<>@,;:".\\\[\]\000-\037\x80-\xff]+ # some number of atom characters... +(?![^(\040)<>@,;:".\\\[\]\000-\037\x80-\xff]) # ..not followed by something that could be part of an atom +| \[ # [ +(?: [^\\\x80-\xff\n\015\[\]] | \\ [^\x80-\xff] )* # stuff +\] # ] +) # initial subdomain +(?: # +(?: [\040\t] | \( +(?: [^\\\x80-\xff\n\015()] | \\ [^\x80-\xff] | \( (?: [^\\\x80-\xff\n\015()] | \\ [^\x80-\xff] )* \) )* +\) )* \. # if led by a period... +(?: [\040\t] | \( +(?: [^\\\x80-\xff\n\015()] | \\ [^\x80-\xff] | \( (?: [^\\\x80-\xff\n\015()] | \\ [^\x80-\xff] )* \) )* +\) )* (?: +[^(\040)<>@,;:".\\\[\]\000-\037\x80-\xff]+ # some number of atom characters... +(?![^(\040)<>@,;:".\\\[\]\000-\037\x80-\xff]) # ..not followed by something that could be part of an atom +| \[ # [ +(?: [^\\\x80-\xff\n\015\[\]] | \\ [^\x80-\xff] )* # stuff +\] # ] +) # ...further okay +)* +# address +| # or +(?: +[^(\040)<>@,;:".\\\[\]\000-\037\x80-\xff]+ # some number of atom characters... +(?![^(\040)<>@,;:".\\\[\]\000-\037\x80-\xff]) # ..not followed by something that could be part of an atom +| +" (?: # opening quote... +[^\\\x80-\xff\n\015"] # Anything except backslash and quote +| # or +\\ [^\x80-\xff] # Escaped something (something != CR) +)* " # closing quote +) # one word, optionally followed by.... +(?: +[^()<>@,;:".\\\[\]\x80-\xff\000-\010\012-\037] | # atom and space parts, or... +\( +(?: [^\\\x80-\xff\n\015()] | \\ [^\x80-\xff] | \( (?: [^\\\x80-\xff\n\015()] | \\ [^\x80-\xff] )* \) )* +\) | # comments, or... + +" (?: # opening quote... +[^\\\x80-\xff\n\015"] # Anything except backslash and quote +| # or +\\ [^\x80-\xff] # Escaped something (something != CR) +)* " # closing quote +# quoted strings +)* +< (?: [\040\t] | \( +(?: [^\\\x80-\xff\n\015()] | \\ [^\x80-\xff] | \( (?: [^\\\x80-\xff\n\015()] | \\ [^\x80-\xff] )* \) )* +\) )* # leading < +(?: @ (?: [\040\t] | \( +(?: [^\\\x80-\xff\n\015()] | \\ [^\x80-\xff] | \( (?: [^\\\x80-\xff\n\015()] | \\ [^\x80-\xff] )* \) )* +\) )* (?: +[^(\040)<>@,;:".\\\[\]\000-\037\x80-\xff]+ # some number of atom characters... +(?![^(\040)<>@,;:".\\\[\]\000-\037\x80-\xff]) # ..not followed by something that could be part of an atom +| \[ # [ +(?: [^\\\x80-\xff\n\015\[\]] | \\ [^\x80-\xff] )* # stuff +\] # ] +) # initial subdomain +(?: # +(?: [\040\t] | \( +(?: [^\\\x80-\xff\n\015()] | \\ [^\x80-\xff] | \( (?: [^\\\x80-\xff\n\015()] | \\ [^\x80-\xff] )* \) )* +\) )* \. # if led by a period... +(?: [\040\t] | \( +(?: [^\\\x80-\xff\n\015()] | \\ [^\x80-\xff] | \( (?: [^\\\x80-\xff\n\015()] | \\ [^\x80-\xff] )* \) )* +\) )* (?: +[^(\040)<>@,;:".\\\[\]\000-\037\x80-\xff]+ # some number of atom characters... +(?![^(\040)<>@,;:".\\\[\]\000-\037\x80-\xff]) # ..not followed by something that could be part of an atom +| \[ # [ +(?: [^\\\x80-\xff\n\015\[\]] | \\ [^\x80-\xff] )* # stuff +\] # ] +) # ...further okay +)* + +(?: (?: [\040\t] | \( +(?: [^\\\x80-\xff\n\015()] | \\ [^\x80-\xff] | \( (?: [^\\\x80-\xff\n\015()] | \\ [^\x80-\xff] )* \) )* +\) )* , (?: [\040\t] | \( +(?: [^\\\x80-\xff\n\015()] | \\ [^\x80-\xff] | \( (?: [^\\\x80-\xff\n\015()] | \\ [^\x80-\xff] )* \) )* +\) )* @ (?: [\040\t] | \( +(?: [^\\\x80-\xff\n\015()] | \\ [^\x80-\xff] | \( (?: [^\\\x80-\xff\n\015()] | \\ [^\x80-\xff] )* \) )* +\) )* (?: +[^(\040)<>@,;:".\\\[\]\000-\037\x80-\xff]+ # some number of atom characters... +(?![^(\040)<>@,;:".\\\[\]\000-\037\x80-\xff]) # ..not followed by something that could be part of an atom +| \[ # [ +(?: [^\\\x80-\xff\n\015\[\]] | \\ [^\x80-\xff] )* # stuff +\] # ] +) # initial subdomain +(?: # +(?: [\040\t] | \( +(?: [^\\\x80-\xff\n\015()] | \\ [^\x80-\xff] | \( (?: [^\\\x80-\xff\n\015()] | \\ [^\x80-\xff] )* \) )* +\) )* \. # if led by a period... +(?: [\040\t] | \( +(?: [^\\\x80-\xff\n\015()] | \\ [^\x80-\xff] | \( (?: [^\\\x80-\xff\n\015()] | \\ [^\x80-\xff] )* \) )* +\) )* (?: +[^(\040)<>@,;:".\\\[\]\000-\037\x80-\xff]+ # some number of atom characters... +(?![^(\040)<>@,;:".\\\[\]\000-\037\x80-\xff]) # ..not followed by something that could be part of an atom +| \[ # [ +(?: [^\\\x80-\xff\n\015\[\]] | \\ [^\x80-\xff] )* # stuff +\] # ] +) # ...further okay +)* +)* # further okay, if led by comma +: # closing colon +(?: [\040\t] | \( +(?: [^\\\x80-\xff\n\015()] | \\ [^\x80-\xff] | \( (?: [^\\\x80-\xff\n\015()] | \\ [^\x80-\xff] )* \) )* +\) )* )? # optional route +(?: +[^(\040)<>@,;:".\\\[\]\000-\037\x80-\xff]+ # some number of atom characters... +(?![^(\040)<>@,;:".\\\[\]\000-\037\x80-\xff]) # ..not followed by something that could be part of an atom +| +" (?: # opening quote... +[^\\\x80-\xff\n\015"] # Anything except backslash and quote +| # or +\\ [^\x80-\xff] # Escaped something (something != CR) +)* " # closing quote +) # initial word +(?: (?: [\040\t] | \( +(?: [^\\\x80-\xff\n\015()] | \\ [^\x80-\xff] | \( (?: [^\\\x80-\xff\n\015()] | \\ [^\x80-\xff] )* \) )* +\) )* \. (?: [\040\t] | \( +(?: [^\\\x80-\xff\n\015()] | \\ [^\x80-\xff] | \( (?: [^\\\x80-\xff\n\015()] | \\ [^\x80-\xff] )* \) )* +\) )* (?: +[^(\040)<>@,;:".\\\[\]\000-\037\x80-\xff]+ # some number of atom characters... +(?![^(\040)<>@,;:".\\\[\]\000-\037\x80-\xff]) # ..not followed by something that could be part of an atom +| +" (?: # opening quote... +[^\\\x80-\xff\n\015"] # Anything except backslash and quote +| # or +\\ [^\x80-\xff] # Escaped something (something != CR) +)* " # closing quote +) )* # further okay, if led by a period +(?: [\040\t] | \( +(?: [^\\\x80-\xff\n\015()] | \\ [^\x80-\xff] | \( (?: [^\\\x80-\xff\n\015()] | \\ [^\x80-\xff] )* \) )* +\) )* @ (?: [\040\t] | \( +(?: [^\\\x80-\xff\n\015()] | \\ [^\x80-\xff] | \( (?: [^\\\x80-\xff\n\015()] | \\ [^\x80-\xff] )* \) )* +\) )* (?: +[^(\040)<>@,;:".\\\[\]\000-\037\x80-\xff]+ # some number of atom characters... +(?![^(\040)<>@,;:".\\\[\]\000-\037\x80-\xff]) # ..not followed by something that could be part of an atom +| \[ # [ +(?: [^\\\x80-\xff\n\015\[\]] | \\ [^\x80-\xff] )* # stuff +\] # ] +) # initial subdomain +(?: # +(?: [\040\t] | \( +(?: [^\\\x80-\xff\n\015()] | \\ [^\x80-\xff] | \( (?: [^\\\x80-\xff\n\015()] | \\ [^\x80-\xff] )* \) )* +\) )* \. # if led by a period... +(?: [\040\t] | \( +(?: [^\\\x80-\xff\n\015()] | \\ [^\x80-\xff] | \( (?: [^\\\x80-\xff\n\015()] | \\ [^\x80-\xff] )* \) )* +\) )* (?: +[^(\040)<>@,;:".\\\[\]\000-\037\x80-\xff]+ # some number of atom characters... +(?![^(\040)<>@,;:".\\\[\]\000-\037\x80-\xff]) # ..not followed by something that could be part of an atom +| \[ # [ +(?: [^\\\x80-\xff\n\015\[\]] | \\ [^\x80-\xff] )* # stuff +\] # ] +) # ...further okay +)* +# address spec +(?: [\040\t] | \( +(?: [^\\\x80-\xff\n\015()] | \\ [^\x80-\xff] | \( (?: [^\\\x80-\xff\n\015()] | \\ [^\x80-\xff] )* \) )* +\) )* > # trailing > +# name and address +) (?: [\040\t] | \( +(?: [^\\\x80-\xff\n\015()] | \\ [^\x80-\xff] | \( (?: [^\\\x80-\xff\n\015()] | \\ [^\x80-\xff] )* \) )* +\) )* # optional trailing comment +/x + Alan Other + 0: Alan Other + + 0: user@dom.ain + user\@dom.ain + 0: user@dom.ain + \"A. Other\" (a comment) + 0: "A. Other" (a comment) + A. Other (a comment) + 0: Other (a comment) + \"/s=user/ou=host/o=place/prmd=uu.yy/admd= /c=gb/\"\@x400-re.lay + 0: "/s=user/ou=host/o=place/prmd=uu.yy/admd= /c=gb/"@x400-re.lay + A missing angle @,;:".\\\[\]\000-\037\x80-\xff]+ # some number of atom characters... +(?![^(\040)<>@,;:".\\\[\]\000-\037\x80-\xff]) # ..not followed by something that could be part of an atom +# Atom +| # or +" # " +[^\\\x80-\xff\n\015"] * # normal +(?: \\ [^\x80-\xff] [^\\\x80-\xff\n\015"] * )* # ( special normal* )* +" # " +# Quoted string +) +[\040\t]* # Nab whitespace. +(?: +\( # ( +[^\\\x80-\xff\n\015()] * # normal* +(?: # ( +(?: \\ [^\x80-\xff] | +\( # ( +[^\\\x80-\xff\n\015()] * # normal* +(?: \\ [^\x80-\xff] [^\\\x80-\xff\n\015()] * )* # (special normal*)* +\) # ) +) # special +[^\\\x80-\xff\n\015()] * # normal* +)* # )* +\) # ) +[\040\t]* )* # If comment found, allow more spaces. +(?: +\. +[\040\t]* # Nab whitespace. +(?: +\( # ( +[^\\\x80-\xff\n\015()] * # normal* +(?: # ( +(?: \\ [^\x80-\xff] | +\( # ( +[^\\\x80-\xff\n\015()] * # normal* +(?: \\ [^\x80-\xff] [^\\\x80-\xff\n\015()] * )* # (special normal*)* +\) # ) +) # special +[^\\\x80-\xff\n\015()] * # normal* +)* # )* +\) # ) +[\040\t]* )* # If comment found, allow more spaces. +(?: +[^(\040)<>@,;:".\\\[\]\000-\037\x80-\xff]+ # some number of atom characters... +(?![^(\040)<>@,;:".\\\[\]\000-\037\x80-\xff]) # ..not followed by something that could be part of an atom +# Atom +| # or +" # " +[^\\\x80-\xff\n\015"] * # normal +(?: \\ [^\x80-\xff] [^\\\x80-\xff\n\015"] * )* # ( special normal* )* +" # " +# Quoted string +) +[\040\t]* # Nab whitespace. +(?: +\( # ( +[^\\\x80-\xff\n\015()] * # normal* +(?: # ( +(?: \\ [^\x80-\xff] | +\( # ( +[^\\\x80-\xff\n\015()] * # normal* +(?: \\ [^\x80-\xff] [^\\\x80-\xff\n\015()] * )* # (special normal*)* +\) # ) +) # special +[^\\\x80-\xff\n\015()] * # normal* +)* # )* +\) # ) +[\040\t]* )* # If comment found, allow more spaces. +# additional words +)* +@ +[\040\t]* # Nab whitespace. +(?: +\( # ( +[^\\\x80-\xff\n\015()] * # normal* +(?: # ( +(?: \\ [^\x80-\xff] | +\( # ( +[^\\\x80-\xff\n\015()] * # normal* +(?: \\ [^\x80-\xff] [^\\\x80-\xff\n\015()] * )* # (special normal*)* +\) # ) +) # special +[^\\\x80-\xff\n\015()] * # normal* +)* # )* +\) # ) +[\040\t]* )* # If comment found, allow more spaces. +(?: +[^(\040)<>@,;:".\\\[\]\000-\037\x80-\xff]+ # some number of atom characters... +(?![^(\040)<>@,;:".\\\[\]\000-\037\x80-\xff]) # ..not followed by something that could be part of an atom +| +\[ # [ +(?: [^\\\x80-\xff\n\015\[\]] | \\ [^\x80-\xff] )* # stuff +\] # ] +) +[\040\t]* # Nab whitespace. +(?: +\( # ( +[^\\\x80-\xff\n\015()] * # normal* +(?: # ( +(?: \\ [^\x80-\xff] | +\( # ( +[^\\\x80-\xff\n\015()] * # normal* +(?: \\ [^\x80-\xff] [^\\\x80-\xff\n\015()] * )* # (special normal*)* +\) # ) +) # special +[^\\\x80-\xff\n\015()] * # normal* +)* # )* +\) # ) +[\040\t]* )* # If comment found, allow more spaces. +# optional trailing comments +(?: +\. +[\040\t]* # Nab whitespace. +(?: +\( # ( +[^\\\x80-\xff\n\015()] * # normal* +(?: # ( +(?: \\ [^\x80-\xff] | +\( # ( +[^\\\x80-\xff\n\015()] * # normal* +(?: \\ [^\x80-\xff] [^\\\x80-\xff\n\015()] * )* # (special normal*)* +\) # ) +) # special +[^\\\x80-\xff\n\015()] * # normal* +)* # )* +\) # ) +[\040\t]* )* # If comment found, allow more spaces. +(?: +[^(\040)<>@,;:".\\\[\]\000-\037\x80-\xff]+ # some number of atom characters... +(?![^(\040)<>@,;:".\\\[\]\000-\037\x80-\xff]) # ..not followed by something that could be part of an atom +| +\[ # [ +(?: [^\\\x80-\xff\n\015\[\]] | \\ [^\x80-\xff] )* # stuff +\] # ] +) +[\040\t]* # Nab whitespace. +(?: +\( # ( +[^\\\x80-\xff\n\015()] * # normal* +(?: # ( +(?: \\ [^\x80-\xff] | +\( # ( +[^\\\x80-\xff\n\015()] * # normal* +(?: \\ [^\x80-\xff] [^\\\x80-\xff\n\015()] * )* # (special normal*)* +\) # ) +) # special +[^\\\x80-\xff\n\015()] * # normal* +)* # )* +\) # ) +[\040\t]* )* # If comment found, allow more spaces. +# optional trailing comments +)* +# address +| # or +(?: +[^(\040)<>@,;:".\\\[\]\000-\037\x80-\xff]+ # some number of atom characters... +(?![^(\040)<>@,;:".\\\[\]\000-\037\x80-\xff]) # ..not followed by something that could be part of an atom +# Atom +| # or +" # " +[^\\\x80-\xff\n\015"] * # normal +(?: \\ [^\x80-\xff] [^\\\x80-\xff\n\015"] * )* # ( special normal* )* +" # " +# Quoted string +) +# leading word +[^()<>@,;:".\\\[\]\x80-\xff\000-\010\012-\037] * # "normal" atoms and or spaces +(?: +(?: +\( # ( +[^\\\x80-\xff\n\015()] * # normal* +(?: # ( +(?: \\ [^\x80-\xff] | +\( # ( +[^\\\x80-\xff\n\015()] * # normal* +(?: \\ [^\x80-\xff] [^\\\x80-\xff\n\015()] * )* # (special normal*)* +\) # ) +) # special +[^\\\x80-\xff\n\015()] * # normal* +)* # )* +\) # ) +| +" # " +[^\\\x80-\xff\n\015"] * # normal +(?: \\ [^\x80-\xff] [^\\\x80-\xff\n\015"] * )* # ( special normal* )* +" # " +) # "special" comment or quoted string +[^()<>@,;:".\\\[\]\x80-\xff\000-\010\012-\037] * # more "normal" +)* +< +[\040\t]* # Nab whitespace. +(?: +\( # ( +[^\\\x80-\xff\n\015()] * # normal* +(?: # ( +(?: \\ [^\x80-\xff] | +\( # ( +[^\\\x80-\xff\n\015()] * # normal* +(?: \\ [^\x80-\xff] [^\\\x80-\xff\n\015()] * )* # (special normal*)* +\) # ) +) # special +[^\\\x80-\xff\n\015()] * # normal* +)* # )* +\) # ) +[\040\t]* )* # If comment found, allow more spaces. +# < +(?: +@ +[\040\t]* # Nab whitespace. +(?: +\( # ( +[^\\\x80-\xff\n\015()] * # normal* +(?: # ( +(?: \\ [^\x80-\xff] | +\( # ( +[^\\\x80-\xff\n\015()] * # normal* +(?: \\ [^\x80-\xff] [^\\\x80-\xff\n\015()] * )* # (special normal*)* +\) # ) +) # special +[^\\\x80-\xff\n\015()] * # normal* +)* # )* +\) # ) +[\040\t]* )* # If comment found, allow more spaces. +(?: +[^(\040)<>@,;:".\\\[\]\000-\037\x80-\xff]+ # some number of atom characters... +(?![^(\040)<>@,;:".\\\[\]\000-\037\x80-\xff]) # ..not followed by something that could be part of an atom +| +\[ # [ +(?: [^\\\x80-\xff\n\015\[\]] | \\ [^\x80-\xff] )* # stuff +\] # ] +) +[\040\t]* # Nab whitespace. +(?: +\( # ( +[^\\\x80-\xff\n\015()] * # normal* +(?: # ( +(?: \\ [^\x80-\xff] | +\( # ( +[^\\\x80-\xff\n\015()] * # normal* +(?: \\ [^\x80-\xff] [^\\\x80-\xff\n\015()] * )* # (special normal*)* +\) # ) +) # special +[^\\\x80-\xff\n\015()] * # normal* +)* # )* +\) # ) +[\040\t]* )* # If comment found, allow more spaces. +# optional trailing comments +(?: +\. +[\040\t]* # Nab whitespace. +(?: +\( # ( +[^\\\x80-\xff\n\015()] * # normal* +(?: # ( +(?: \\ [^\x80-\xff] | +\( # ( +[^\\\x80-\xff\n\015()] * # normal* +(?: \\ [^\x80-\xff] [^\\\x80-\xff\n\015()] * )* # (special normal*)* +\) # ) +) # special +[^\\\x80-\xff\n\015()] * # normal* +)* # )* +\) # ) +[\040\t]* )* # If comment found, allow more spaces. +(?: +[^(\040)<>@,;:".\\\[\]\000-\037\x80-\xff]+ # some number of atom characters... +(?![^(\040)<>@,;:".\\\[\]\000-\037\x80-\xff]) # ..not followed by something that could be part of an atom +| +\[ # [ +(?: [^\\\x80-\xff\n\015\[\]] | \\ [^\x80-\xff] )* # stuff +\] # ] +) +[\040\t]* # Nab whitespace. +(?: +\( # ( +[^\\\x80-\xff\n\015()] * # normal* +(?: # ( +(?: \\ [^\x80-\xff] | +\( # ( +[^\\\x80-\xff\n\015()] * # normal* +(?: \\ [^\x80-\xff] [^\\\x80-\xff\n\015()] * )* # (special normal*)* +\) # ) +) # special +[^\\\x80-\xff\n\015()] * # normal* +)* # )* +\) # ) +[\040\t]* )* # If comment found, allow more spaces. +# optional trailing comments +)* +(?: , +[\040\t]* # Nab whitespace. +(?: +\( # ( +[^\\\x80-\xff\n\015()] * # normal* +(?: # ( +(?: \\ [^\x80-\xff] | +\( # ( +[^\\\x80-\xff\n\015()] * # normal* +(?: \\ [^\x80-\xff] [^\\\x80-\xff\n\015()] * )* # (special normal*)* +\) # ) +) # special +[^\\\x80-\xff\n\015()] * # normal* +)* # )* +\) # ) +[\040\t]* )* # If comment found, allow more spaces. +@ +[\040\t]* # Nab whitespace. +(?: +\( # ( +[^\\\x80-\xff\n\015()] * # normal* +(?: # ( +(?: \\ [^\x80-\xff] | +\( # ( +[^\\\x80-\xff\n\015()] * # normal* +(?: \\ [^\x80-\xff] [^\\\x80-\xff\n\015()] * )* # (special normal*)* +\) # ) +) # special +[^\\\x80-\xff\n\015()] * # normal* +)* # )* +\) # ) +[\040\t]* )* # If comment found, allow more spaces. +(?: +[^(\040)<>@,;:".\\\[\]\000-\037\x80-\xff]+ # some number of atom characters... +(?![^(\040)<>@,;:".\\\[\]\000-\037\x80-\xff]) # ..not followed by something that could be part of an atom +| +\[ # [ +(?: [^\\\x80-\xff\n\015\[\]] | \\ [^\x80-\xff] )* # stuff +\] # ] +) +[\040\t]* # Nab whitespace. +(?: +\( # ( +[^\\\x80-\xff\n\015()] * # normal* +(?: # ( +(?: \\ [^\x80-\xff] | +\( # ( +[^\\\x80-\xff\n\015()] * # normal* +(?: \\ [^\x80-\xff] [^\\\x80-\xff\n\015()] * )* # (special normal*)* +\) # ) +) # special +[^\\\x80-\xff\n\015()] * # normal* +)* # )* +\) # ) +[\040\t]* )* # If comment found, allow more spaces. +# optional trailing comments +(?: +\. +[\040\t]* # Nab whitespace. +(?: +\( # ( +[^\\\x80-\xff\n\015()] * # normal* +(?: # ( +(?: \\ [^\x80-\xff] | +\( # ( +[^\\\x80-\xff\n\015()] * # normal* +(?: \\ [^\x80-\xff] [^\\\x80-\xff\n\015()] * )* # (special normal*)* +\) # ) +) # special +[^\\\x80-\xff\n\015()] * # normal* +)* # )* +\) # ) +[\040\t]* )* # If comment found, allow more spaces. +(?: +[^(\040)<>@,;:".\\\[\]\000-\037\x80-\xff]+ # some number of atom characters... +(?![^(\040)<>@,;:".\\\[\]\000-\037\x80-\xff]) # ..not followed by something that could be part of an atom +| +\[ # [ +(?: [^\\\x80-\xff\n\015\[\]] | \\ [^\x80-\xff] )* # stuff +\] # ] +) +[\040\t]* # Nab whitespace. +(?: +\( # ( +[^\\\x80-\xff\n\015()] * # normal* +(?: # ( +(?: \\ [^\x80-\xff] | +\( # ( +[^\\\x80-\xff\n\015()] * # normal* +(?: \\ [^\x80-\xff] [^\\\x80-\xff\n\015()] * )* # (special normal*)* +\) # ) +) # special +[^\\\x80-\xff\n\015()] * # normal* +)* # )* +\) # ) +[\040\t]* )* # If comment found, allow more spaces. +# optional trailing comments +)* +)* # additional domains +: +[\040\t]* # Nab whitespace. +(?: +\( # ( +[^\\\x80-\xff\n\015()] * # normal* +(?: # ( +(?: \\ [^\x80-\xff] | +\( # ( +[^\\\x80-\xff\n\015()] * # normal* +(?: \\ [^\x80-\xff] [^\\\x80-\xff\n\015()] * )* # (special normal*)* +\) # ) +) # special +[^\\\x80-\xff\n\015()] * # normal* +)* # )* +\) # ) +[\040\t]* )* # If comment found, allow more spaces. +# optional trailing comments +)? # optional route +(?: +[^(\040)<>@,;:".\\\[\]\000-\037\x80-\xff]+ # some number of atom characters... +(?![^(\040)<>@,;:".\\\[\]\000-\037\x80-\xff]) # ..not followed by something that could be part of an atom +# Atom +| # or +" # " +[^\\\x80-\xff\n\015"] * # normal +(?: \\ [^\x80-\xff] [^\\\x80-\xff\n\015"] * )* # ( special normal* )* +" # " +# Quoted string +) +[\040\t]* # Nab whitespace. +(?: +\( # ( +[^\\\x80-\xff\n\015()] * # normal* +(?: # ( +(?: \\ [^\x80-\xff] | +\( # ( +[^\\\x80-\xff\n\015()] * # normal* +(?: \\ [^\x80-\xff] [^\\\x80-\xff\n\015()] * )* # (special normal*)* +\) # ) +) # special +[^\\\x80-\xff\n\015()] * # normal* +)* # )* +\) # ) +[\040\t]* )* # If comment found, allow more spaces. +(?: +\. +[\040\t]* # Nab whitespace. +(?: +\( # ( +[^\\\x80-\xff\n\015()] * # normal* +(?: # ( +(?: \\ [^\x80-\xff] | +\( # ( +[^\\\x80-\xff\n\015()] * # normal* +(?: \\ [^\x80-\xff] [^\\\x80-\xff\n\015()] * )* # (special normal*)* +\) # ) +) # special +[^\\\x80-\xff\n\015()] * # normal* +)* # )* +\) # ) +[\040\t]* )* # If comment found, allow more spaces. +(?: +[^(\040)<>@,;:".\\\[\]\000-\037\x80-\xff]+ # some number of atom characters... +(?![^(\040)<>@,;:".\\\[\]\000-\037\x80-\xff]) # ..not followed by something that could be part of an atom +# Atom +| # or +" # " +[^\\\x80-\xff\n\015"] * # normal +(?: \\ [^\x80-\xff] [^\\\x80-\xff\n\015"] * )* # ( special normal* )* +" # " +# Quoted string +) +[\040\t]* # Nab whitespace. +(?: +\( # ( +[^\\\x80-\xff\n\015()] * # normal* +(?: # ( +(?: \\ [^\x80-\xff] | +\( # ( +[^\\\x80-\xff\n\015()] * # normal* +(?: \\ [^\x80-\xff] [^\\\x80-\xff\n\015()] * )* # (special normal*)* +\) # ) +) # special +[^\\\x80-\xff\n\015()] * # normal* +)* # )* +\) # ) +[\040\t]* )* # If comment found, allow more spaces. +# additional words +)* +@ +[\040\t]* # Nab whitespace. +(?: +\( # ( +[^\\\x80-\xff\n\015()] * # normal* +(?: # ( +(?: \\ [^\x80-\xff] | +\( # ( +[^\\\x80-\xff\n\015()] * # normal* +(?: \\ [^\x80-\xff] [^\\\x80-\xff\n\015()] * )* # (special normal*)* +\) # ) +) # special +[^\\\x80-\xff\n\015()] * # normal* +)* # )* +\) # ) +[\040\t]* )* # If comment found, allow more spaces. +(?: +[^(\040)<>@,;:".\\\[\]\000-\037\x80-\xff]+ # some number of atom characters... +(?![^(\040)<>@,;:".\\\[\]\000-\037\x80-\xff]) # ..not followed by something that could be part of an atom +| +\[ # [ +(?: [^\\\x80-\xff\n\015\[\]] | \\ [^\x80-\xff] )* # stuff +\] # ] +) +[\040\t]* # Nab whitespace. +(?: +\( # ( +[^\\\x80-\xff\n\015()] * # normal* +(?: # ( +(?: \\ [^\x80-\xff] | +\( # ( +[^\\\x80-\xff\n\015()] * # normal* +(?: \\ [^\x80-\xff] [^\\\x80-\xff\n\015()] * )* # (special normal*)* +\) # ) +) # special +[^\\\x80-\xff\n\015()] * # normal* +)* # )* +\) # ) +[\040\t]* )* # If comment found, allow more spaces. +# optional trailing comments +(?: +\. +[\040\t]* # Nab whitespace. +(?: +\( # ( +[^\\\x80-\xff\n\015()] * # normal* +(?: # ( +(?: \\ [^\x80-\xff] | +\( # ( +[^\\\x80-\xff\n\015()] * # normal* +(?: \\ [^\x80-\xff] [^\\\x80-\xff\n\015()] * )* # (special normal*)* +\) # ) +) # special +[^\\\x80-\xff\n\015()] * # normal* +)* # )* +\) # ) +[\040\t]* )* # If comment found, allow more spaces. +(?: +[^(\040)<>@,;:".\\\[\]\000-\037\x80-\xff]+ # some number of atom characters... +(?![^(\040)<>@,;:".\\\[\]\000-\037\x80-\xff]) # ..not followed by something that could be part of an atom +| +\[ # [ +(?: [^\\\x80-\xff\n\015\[\]] | \\ [^\x80-\xff] )* # stuff +\] # ] +) +[\040\t]* # Nab whitespace. +(?: +\( # ( +[^\\\x80-\xff\n\015()] * # normal* +(?: # ( +(?: \\ [^\x80-\xff] | +\( # ( +[^\\\x80-\xff\n\015()] * # normal* +(?: \\ [^\x80-\xff] [^\\\x80-\xff\n\015()] * )* # (special normal*)* +\) # ) +) # special +[^\\\x80-\xff\n\015()] * # normal* +)* # )* +\) # ) +[\040\t]* )* # If comment found, allow more spaces. +# optional trailing comments +)* +# address spec +> # > +# name and address +) +/x + Alan Other + 0: Alan Other + + 0: user@dom.ain + user\@dom.ain + 0: user@dom.ain + \"A. Other\" (a comment) + 0: "A. Other" + A. Other (a comment) + 0: Other + \"/s=user/ou=host/o=place/prmd=uu.yy/admd= /c=gb/\"\@x400-re.lay + 0: "/s=user/ou=host/o=place/prmd=uu.yy/admd= /c=gb/"@x400-re.lay + A missing angle ?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~\x7f + +/P[^*]TAIRE[^*]{1,6}?LL/ + xxxxxxxxxxxPSTAIREISLLxxxxxxxxx + 0: PSTAIREISLL + +/P[^*]TAIRE[^*]{1,}?LL/ + xxxxxxxxxxxPSTAIREISLLxxxxxxxxx + 0: PSTAIREISLL + +/(\.\d\d[1-9]?)\d+/ + 1.230003938 + 0: .230003938 + 1: .23 + 1.875000282 + 0: .875000282 + 1: .875 + 1.235 + 0: .235 + 1: .23 + +/(\.\d\d((?=0)|\d(?=\d)))/ + 1.230003938 + 0: .23 + 1: .23 + 2: + 1.875000282 + 0: .875 + 1: .875 + 2: 5 +\= Expect no match + 1.235 +No match + +/\b(foo)\s+(\w+)/i + Food is on the foo table + 0: foo table + 1: foo + 2: table + +/foo(.*)bar/ + The food is under the bar in the barn. + 0: food is under the bar in the bar + 1: d is under the bar in the + +/foo(.*?)bar/ + The food is under the bar in the barn. + 0: food is under the bar + 1: d is under the + +/(.*)(\d*)/ + I have 2 numbers: 53147 + 0: I have 2 numbers: 53147 + 1: I have 2 numbers: 53147 + 2: + +/(.*)(\d+)/ + I have 2 numbers: 53147 + 0: I have 2 numbers: 53147 + 1: I have 2 numbers: 5314 + 2: 7 + +/(.*?)(\d*)/ + I have 2 numbers: 53147 + 0: + 1: + 2: + +/(.*?)(\d+)/ + I have 2 numbers: 53147 + 0: I have 2 + 1: I have + 2: 2 + +/(.*)(\d+)$/ + I have 2 numbers: 53147 + 0: I have 2 numbers: 53147 + 1: I have 2 numbers: 5314 + 2: 7 + +/(.*?)(\d+)$/ + I have 2 numbers: 53147 + 0: I have 2 numbers: 53147 + 1: I have 2 numbers: + 2: 53147 + +/(.*)\b(\d+)$/ + I have 2 numbers: 53147 + 0: I have 2 numbers: 53147 + 1: I have 2 numbers: + 2: 53147 + +/(.*\D)(\d+)$/ + I have 2 numbers: 53147 + 0: I have 2 numbers: 53147 + 1: I have 2 numbers: + 2: 53147 + +/^\D*(?!123)/ + ABC123 + 0: AB + +/^(\D*)(?=\d)(?!123)/ + ABC445 + 0: ABC + 1: ABC +\= Expect no match + ABC123 +No match + +/^[W-]46]/ + W46]789 + 0: W46] + -46]789 + 0: -46] +\= Expect no match + Wall +No match + Zebra +No match + 42 +No match + [abcd] +No match + ]abcd[ +No match + +/^[W-\]46]/ + W46]789 + 0: W + Wall + 0: W + Zebra + 0: Z + Xylophone + 0: X + 42 + 0: 4 + [abcd] + 0: [ + ]abcd[ + 0: ] + \\backslash + 0: \ +\= Expect no match + -46]789 +No match + well +No match + +/\d\d\/\d\d\/\d\d\d\d/ + 01/01/2000 + 0: 01/01/2000 + +/word (?:[a-zA-Z0-9]+ ){0,10}otherword/ + word cat dog elephant mussel cow horse canary baboon snake shark otherword + 0: word cat dog elephant mussel cow horse canary baboon snake shark otherword +\= Expect no match + word cat dog elephant mussel cow horse canary baboon snake shark +No match + +/word (?:[a-zA-Z0-9]+ ){0,300}otherword/ +\= Expect no match + word cat dog elephant mussel cow horse canary baboon snake shark the quick brown fox and the lazy dog and several other words getting close to thirty by now I hope +No match + +/^(a){0,0}/ + bcd + 0: + abc + 0: + aab + 0: + +/^(a){0,1}/ + bcd + 0: + abc + 0: a + 1: a + aab + 0: a + 1: a + +/^(a){0,2}/ + bcd + 0: + abc + 0: a + 1: a + aab + 0: aa + 1: a + +/^(a){0,3}/ + bcd + 0: + abc + 0: a + 1: a + aab + 0: aa + 1: a + aaa + 0: aaa + 1: a + +/^(a){0,}/ + bcd + 0: + abc + 0: a + 1: a + aab + 0: aa + 1: a + aaa + 0: aaa + 1: a + aaaaaaaa + 0: aaaaaaaa + 1: a + +/^(a){1,1}/ + abc + 0: a + 1: a + aab + 0: a + 1: a +\= Expect no match + bcd +No match + +/^(a){1,2}/ + abc + 0: a + 1: a + aab + 0: aa + 1: a +\= Expect no match + bcd +No match + +/^(a){1,3}/ + abc + 0: a + 1: a + aab + 0: aa + 1: a + aaa + 0: aaa + 1: a +\= Expect no match + bcd +No match + +/^(a){1,}/ + abc + 0: a + 1: a + aab + 0: aa + 1: a + aaa + 0: aaa + 1: a + aaaaaaaa + 0: aaaaaaaa + 1: a +\= Expect no match + bcd +No match + +/.*\.gif/ + borfle\nbib.gif\nno + 0: bib.gif + +/.{0,}\.gif/ + borfle\nbib.gif\nno + 0: bib.gif + +/.*\.gif/m + borfle\nbib.gif\nno + 0: bib.gif + +/.*\.gif/s + borfle\nbib.gif\nno + 0: borfle\x0abib.gif + +/.*\.gif/ms + borfle\nbib.gif\nno + 0: borfle\x0abib.gif + +/.*$/ + borfle\nbib.gif\nno + 0: no + +/.*$/m + borfle\nbib.gif\nno + 0: borfle + +/.*$/s + borfle\nbib.gif\nno + 0: borfle\x0abib.gif\x0ano + +/.*$/ms + borfle\nbib.gif\nno + 0: borfle\x0abib.gif\x0ano + +/.*$/ + borfle\nbib.gif\nno\n + 0: no + +/.*$/m + borfle\nbib.gif\nno\n + 0: borfle + +/.*$/s + borfle\nbib.gif\nno\n + 0: borfle\x0abib.gif\x0ano\x0a + +/.*$/ms + borfle\nbib.gif\nno\n + 0: borfle\x0abib.gif\x0ano\x0a + +/(.*X|^B)/ + abcde\n1234Xyz + 0: 1234X + 1: 1234X + BarFoo + 0: B + 1: B +\= Expect no match + abcde\nBar +No match + +/(.*X|^B)/m + abcde\n1234Xyz + 0: 1234X + 1: 1234X + BarFoo + 0: B + 1: B + abcde\nBar + 0: B + 1: B + +/(.*X|^B)/s + abcde\n1234Xyz + 0: abcde\x0a1234X + 1: abcde\x0a1234X + BarFoo + 0: B + 1: B +\= Expect no match + abcde\nBar +No match + +/(.*X|^B)/ms + abcde\n1234Xyz + 0: abcde\x0a1234X + 1: abcde\x0a1234X + BarFoo + 0: B + 1: B + abcde\nBar + 0: B + 1: B + +/(?s)(.*X|^B)/ + abcde\n1234Xyz + 0: abcde\x0a1234X + 1: abcde\x0a1234X + BarFoo + 0: B + 1: B +\= Expect no match + abcde\nBar +No match + +/(?s:.*X|^B)/ + abcde\n1234Xyz + 0: abcde\x0a1234X + BarFoo + 0: B +\= Expect no match + abcde\nBar +No match + +/^.*B/ +\= Expect no match + abc\nB +No match + +/(?s)^.*B/ + abc\nB + 0: abc\x0aB + +/(?m)^.*B/ + abc\nB + 0: B + +/(?ms)^.*B/ + abc\nB + 0: abc\x0aB + +/(?ms)^B/ + abc\nB + 0: B + +/(?s)B$/ + B\n + 0: B + +/^[0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9]/ + 123456654321 + 0: 123456654321 + +/^\d\d\d\d\d\d\d\d\d\d\d\d/ + 123456654321 + 0: 123456654321 + +/^[\d][\d][\d][\d][\d][\d][\d][\d][\d][\d][\d][\d]/ + 123456654321 + 0: 123456654321 + +/^[abc]{12}/ + abcabcabcabc + 0: abcabcabcabc + +/^[a-c]{12}/ + abcabcabcabc + 0: abcabcabcabc + +/^(a|b|c){12}/ + abcabcabcabc + 0: abcabcabcabc + 1: c + +/^[abcdefghijklmnopqrstuvwxy0123456789]/ + n + 0: n +\= Expect no match + z +No match + +/abcde{0,0}/ + abcd + 0: abcd +\= Expect no match + abce +No match + +/ab[cd]{0,0}e/ + abe + 0: abe +\= Expect no match + abcde +No match + +/ab(c){0,0}d/ + abd + 0: abd +\= Expect no match + abcd +No match + +/a(b*)/ + a + 0: a + 1: + ab + 0: ab + 1: b + abbbb + 0: abbbb + 1: bbbb +\= Expect no match + bbbbb +No match + +/ab\d{0}e/ + abe + 0: abe +\= Expect no match + ab1e +No match + +/"([^\\"]+|\\.)*"/ + the \"quick\" brown fox + 0: "quick" + 1: quick + \"the \\\"quick\\\" brown fox\" + 0: "the \"quick\" brown fox" + 1: brown fox + +/]{0,})>]{0,})>([\d]{0,}\.)(.*)((
([\w\W\s\d][^<>]{0,})|[\s]{0,}))<\/a><\/TD>]{0,})>([\w\W\s\d][^<>]{0,})<\/TD>]{0,})>([\w\W\s\d][^<>]{0,})<\/TD><\/TR>/is + 43.Word Processor
(N-1286)
Lega lstaff.comCA - Statewide + 0: 43.Word Processor
(N-1286)
Lega lstaff.comCA - Statewide + 1: BGCOLOR='#DBE9E9' + 2: align=left valign=top + 3: 43. + 4: Word Processor
(N-1286) + 5: + 6: + 7: + 8: align=left valign=top + 9: Lega lstaff.com +10: align=left valign=top +11: CA - Statewide + +/a[^a]b/ + acb + 0: acb + a\nb + 0: a\x0ab + +/a.b/ + acb + 0: acb +\= Expect no match + a\nb +No match + +/a[^a]b/s + acb + 0: acb + a\nb + 0: a\x0ab + +/a.b/s + acb + 0: acb + a\nb + 0: a\x0ab + +/^(b+?|a){1,2}?c/ + bac + 0: bac + 1: a + bbac + 0: bbac + 1: a + bbbac + 0: bbbac + 1: a + bbbbac + 0: bbbbac + 1: a + bbbbbac + 0: bbbbbac + 1: a + +/^(b+|a){1,2}?c/ + bac + 0: bac + 1: a + bbac + 0: bbac + 1: a + bbbac + 0: bbbac + 1: a + bbbbac + 0: bbbbac + 1: a + bbbbbac + 0: bbbbbac + 1: a + +/(?!\A)x/m + a\bx\n + 0: x + a\nx\n + 0: x +\= Expect no match + x\nb\n +No match + +/(A|B)*?CD/ + CD + 0: CD + +/(A|B)*CD/ + CD + 0: CD + +/(AB)*?\1/ + ABABAB + 0: ABAB + 1: AB + +/(AB)*\1/ + ABABAB + 0: ABABAB + 1: AB + +/(?.*/)foo" + /this/is/a/very/long/line/in/deed/with/very/many/slashes/in/and/foo + 0: /this/is/a/very/long/line/in/deed/with/very/many/slashes/in/and/foo +\= Expect no match + /this/is/a/very/long/line/in/deed/with/very/many/slashes/in/it/you/see/ +No match + +/(?>(\.\d\d[1-9]?))\d+/ + 1.230003938 + 0: .230003938 + 1: .23 + 1.875000282 + 0: .875000282 + 1: .875 +\= Expect no match + 1.235 +No match + +/^((?>\w+)|(?>\s+))*$/ + now is the time for all good men to come to the aid of the party + 0: now is the time for all good men to come to the aid of the party + 1: party +\= Expect no match + this is not a line with only words and spaces! +No match + +/(\d+)(\w)/ + 12345a + 0: 12345a + 1: 12345 + 2: a + 12345+ + 0: 12345 + 1: 1234 + 2: 5 + +/((?>\d+))(\w)/ + 12345a + 0: 12345a + 1: 12345 + 2: a +\= Expect no match + 12345+ +No match + +/(?>a+)b/ + aaab + 0: aaab + +/((?>a+)b)/ + aaab + 0: aaab + 1: aaab + +/(?>(a+))b/ + aaab + 0: aaab + 1: aaa + +/(?>b)+/ + aaabbbccc + 0: bbb + +/(?>a+|b+|c+)*c/ + aaabbbbccccd + 0: aaabbbbc + +/((?>[^()]+)|\([^()]*\))+/ + ((abc(ade)ufh()()x + 0: abc(ade)ufh()()x + 1: x + +/\(((?>[^()]+)|\([^()]+\))+\)/ + (abc) + 0: (abc) + 1: abc + (abc(def)xyz) + 0: (abc(def)xyz) + 1: xyz +\= Expect no match + ((()aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa +No match + +/a(?-i)b/i + ab + 0: ab + Ab + 0: Ab +\= Expect no match + aB +No match + AB +No match + +/(a (?x)b c)d e/ + a bcd e + 0: a bcd e + 1: a bc +\= Expect no match + a b cd e +No match + abcd e +No match + a bcde +No match + +/(a b(?x)c d (?-x)e f)/ + a bcde f + 0: a bcde f + 1: a bcde f +\= Expect no match + abcdef +No match + +/(a(?i)b)c/ + abc + 0: abc + 1: ab + aBc + 0: aBc + 1: aB +\= Expect no match + abC +No match + aBC +No match + Abc +No match + ABc +No match + ABC +No match + AbC +No match + +/a(?i:b)c/ + abc + 0: abc + aBc + 0: aBc +\= Expect no match + ABC +No match + abC +No match + aBC +No match + +/a(?i:b)*c/ + aBc + 0: aBc + aBBc + 0: aBBc +\= Expect no match + aBC +No match + aBBC +No match + +/a(?=b(?i)c)\w\wd/ + abcd + 0: abcd + abCd + 0: abCd +\= Expect no match + aBCd +No match + abcD +No match + +/(?s-i:more.*than).*million/i + more than million + 0: more than million + more than MILLION + 0: more than MILLION + more \n than Million + 0: more \x0a than Million +\= Expect no match + MORE THAN MILLION +No match + more \n than \n million +No match + +/(?:(?s-i)more.*than).*million/i + more than million + 0: more than million + more than MILLION + 0: more than MILLION + more \n than Million + 0: more \x0a than Million +\= Expect no match + MORE THAN MILLION +No match + more \n than \n million +No match + +/(?>a(?i)b+)+c/ + abc + 0: abc + aBbc + 0: aBbc + aBBc + 0: aBBc +\= Expect no match + Abc +No match + abAb +No match + abbC +No match + +/(?=a(?i)b)\w\wc/ + abc + 0: abc + aBc + 0: aBc +\= Expect no match + Ab +No match + abC +No match + aBC +No match + +/(?<=a(?i)b)(\w\w)c/ + abxxc + 0: xxc + 1: xx + aBxxc + 0: xxc + 1: xx +\= Expect no match + Abxxc +No match + ABxxc +No match + abxxC +No match + +/(?:(a)|b)(?(1)A|B)/ + aA + 0: aA + 1: a + bB + 0: bB +\= Expect no match + aB +No match + bA +No match + +/^(a)?(?(1)a|b)+$/ + aa + 0: aa + 1: a + b + 0: b + bb + 0: bb +\= Expect no match + ab +No match + +# Perl gets this next one wrong if the pattern ends with $; in that case it +# fails to match "12". + +/^(?(?=abc)\w{3}:|\d\d)/ + abc: + 0: abc: + 12 + 0: 12 + 123 + 0: 12 +\= Expect no match + xyz +No match + +/^(?(?!abc)\d\d|\w{3}:)$/ + abc: + 0: abc: + 12 + 0: 12 +\= Expect no match + 123 +No match + xyz +No match + +/(?(?<=foo)bar|cat)/ + foobar + 0: bar + cat + 0: cat + fcat + 0: cat + focat + 0: cat +\= Expect no match + foocat +No match + +/(?(?a*)*/ + a + 0: a + aa + 0: aa + aaaa + 0: aaaa + +/(abc|)+/ + abc + 0: abc + 1: + abcabc + 0: abcabc + 1: + abcabcabc + 0: abcabcabc + 1: + xyz + 0: + 1: + +/([a]*)*/ + a + 0: a + 1: + aaaaa + 0: aaaaa + 1: + +/([ab]*)*/ + a + 0: a + 1: + b + 0: b + 1: + ababab + 0: ababab + 1: + aaaabcde + 0: aaaab + 1: + bbbb + 0: bbbb + 1: + +/([^a]*)*/ + b + 0: b + 1: + bbbb + 0: bbbb + 1: + aaa + 0: + 1: + +/([^ab]*)*/ + cccc + 0: cccc + 1: + abab + 0: + 1: + +/([a]*?)*/ + a + 0: + 1: + aaaa + 0: + 1: + +/([ab]*?)*/ + a + 0: + 1: + b + 0: + 1: + abab + 0: + 1: + baba + 0: + 1: + +/([^a]*?)*/ + b + 0: + 1: + bbbb + 0: + 1: + aaa + 0: + 1: + +/([^ab]*?)*/ + c + 0: + 1: + cccc + 0: + 1: + baba + 0: + 1: + +/(?>a*)*/ + a + 0: a + aaabcde + 0: aaa + +/((?>a*))*/ + aaaaa + 0: aaaaa + 1: + aabbaa + 0: aa + 1: + +/((?>a*?))*/ + aaaaa + 0: + 1: + aabbaa + 0: + 1: + +/(?(?=[^a-z]+[a-z]) \d{2}-[a-z]{3}-\d{2} | \d{2}-\d{2}-\d{2} ) /x + 12-sep-98 + 0: 12-sep-98 + 12-09-98 + 0: 12-09-98 +\= Expect no match + sep-12-98 +No match + +/(?<=(foo))bar\1/ + foobarfoo + 0: barfoo + 1: foo + foobarfootling + 0: barfoo + 1: foo +\= Expect no match + foobar +No match + barfoo +No match + +/(?i:saturday|sunday)/ + saturday + 0: saturday + sunday + 0: sunday + Saturday + 0: Saturday + Sunday + 0: Sunday + SATURDAY + 0: SATURDAY + SUNDAY + 0: SUNDAY + SunDay + 0: SunDay + +/(a(?i)bc|BB)x/ + abcx + 0: abcx + 1: abc + aBCx + 0: aBCx + 1: aBC + bbx + 0: bbx + 1: bb + BBx + 0: BBx + 1: BB +\= Expect no match + abcX +No match + aBCX +No match + bbX +No match + BBX +No match + +/^([ab](?i)[cd]|[ef])/ + ac + 0: ac + 1: ac + aC + 0: aC + 1: aC + bD + 0: bD + 1: bD + elephant + 0: e + 1: e + Europe + 0: E + 1: E + frog + 0: f + 1: f + France + 0: F + 1: F +\= Expect no match + Africa +No match + +/^(ab|a(?i)[b-c](?m-i)d|x(?i)y|z)/ + ab + 0: ab + 1: ab + aBd + 0: aBd + 1: aBd + xy + 0: xy + 1: xy + xY + 0: xY + 1: xY + zebra + 0: z + 1: z + Zambesi + 0: Z + 1: Z +\= Expect no match + aCD +No match + XY +No match + +/(?<=foo\n)^bar/m + foo\nbar + 0: bar +\= Expect no match + bar +No match + baz\nbar +No match + +/(?<=(?]&/ + <&OUT + 0: <& + +/^(a\1?){4}$/ + aaaaaaaaaa + 0: aaaaaaaaaa + 1: aaaa +\= Expect no match + AB +No match + aaaaaaaaa +No match + aaaaaaaaaaa +No match + +/^(a(?(1)\1)){4}$/ + aaaaaaaaaa + 0: aaaaaaaaaa + 1: aaaa +\= Expect no match + aaaaaaaaa +No match + aaaaaaaaaaa +No match + +/(?:(f)(o)(o)|(b)(a)(r))*/ + foobar + 0: foobar + 1: f + 2: o + 3: o + 4: b + 5: a + 6: r + +/(?<=a)b/ + ab + 0: b +\= Expect no match + cb +No match + b +No match + +/(? + 2: abcd + xy:z:::abcd + 0: xy:z:::abcd + 1: xy:z::: + 2: abcd + +/^[^bcd]*(c+)/ + aexycd + 0: aexyc + 1: c + +/(a*)b+/ + caab + 0: aab + 1: aa + +/([\w:]+::)?(\w+)$/ + abcd + 0: abcd + 1: + 2: abcd + xy:z:::abcd + 0: xy:z:::abcd + 1: xy:z::: + 2: abcd +\= Expect no match + abcd: +No match + abcd: +No match + +/^[^bcd]*(c+)/ + aexycd + 0: aexyc + 1: c + +/(>a+)ab/ + +/(?>a+)b/ + aaab + 0: aaab + +/([[:]+)/ + a:[b]: + 0: :[ + 1: :[ + +/([[=]+)/ + a=[b]= + 0: =[ + 1: =[ + +/([[.]+)/ + a.[b]. + 0: .[ + 1: .[ + +/((?>a+)b)/ + aaab + 0: aaab + 1: aaab + +/(?>(a+))b/ + aaab + 0: aaab + 1: aaa + +/((?>[^()]+)|\([^()]*\))+/ + ((abc(ade)ufh()()x + 0: abc(ade)ufh()()x + 1: x + +/a\Z/ +\= Expect no match + aaab +No match + a\nb\n +No match + +/b\Z/ + a\nb\n + 0: b + +/b\z/ + +/b\Z/ + a\nb + 0: b + +/b\z/ + a\nb + 0: b + +/^(?>(?(1)\.|())[^\W_](?>[a-z0-9-]*[^\W_])?)+$/ + a + 0: a + 1: + abc + 0: abc + 1: + a-b + 0: a-b + 1: + 0-9 + 0: 0-9 + 1: + a.b + 0: a.b + 1: + 5.6.7 + 0: 5.6.7 + 1: + the.quick.brown.fox + 0: the.quick.brown.fox + 1: + a100.b200.300c + 0: a100.b200.300c + 1: + 12-ab.1245 + 0: 12-ab.1245 + 1: +\= Expect no match + \ +No match + .a +No match + -a +No match + a- +No match + a. +No match + a_b +No match + a.- +No match + a.. +No match + ab..bc +No match + the.quick.brown.fox- +No match + the.quick.brown.fox. +No match + the.quick.brown.fox_ +No match + the.quick.brown.fox+ +No match + +/(?>.*)(?<=(abcd|wxyz))/ + alphabetabcd + 0: alphabetabcd + 1: abcd + endingwxyz + 0: endingwxyz + 1: wxyz +\= Expect no match + a rather long string that doesn't end with one of them +No match + +/word (?>(?:(?!otherword)[a-zA-Z0-9]+ ){0,30})otherword/ + word cat dog elephant mussel cow horse canary baboon snake shark otherword + 0: word cat dog elephant mussel cow horse canary baboon snake shark otherword +\= Expect no match + word cat dog elephant mussel cow horse canary baboon snake shark +No match + +/word (?>[a-zA-Z0-9]+ ){0,30}otherword/ +\= Expect no match + word cat dog elephant mussel cow horse canary baboon snake shark the quick brown fox and the lazy dog and several other words getting close to thirty by now I hope +No match + +/(?<=\d{3}(?!999))foo/ + 999foo + 0: foo + 123999foo + 0: foo +\= Expect no match + 123abcfoo +No match + +/(?<=(?!...999)\d{3})foo/ + 999foo + 0: foo + 123999foo + 0: foo +\= Expect no match + 123abcfoo +No match + +/(?<=\d{3}(?!999)...)foo/ + 123abcfoo + 0: foo + 123456foo + 0: foo +\= Expect no match + 123999foo +No match + +/(?<=\d{3}...)(? + 2: + 3: abcd +
+ 2: + 3: abcd + \s*)=(?>\s*) # find + 2: + 3: abcd + Z)+|A)*/ + ZABCDEFG + 0: ZA + 1: A + +/((?>)+|A)*/ + ZABCDEFG + 0: + 1: + +/^[\d-a]/ + abcde + 0: a + -things + 0: - + 0digit + 0: 0 +\= Expect no match + bcdef +No match + +/[\s]+/ + > \x09\x0a\x0c\x0d\x0b< + 0: \x09\x0a\x0c\x0d\x0b + +/\s+/ + > \x09\x0a\x0c\x0d\x0b< + 0: \x09\x0a\x0c\x0d\x0b + +/a b/x + ab + 0: ab + +/(?!\A)x/m + a\nxb\n + 0: x + +/(?!^)x/m +\= Expect no match + a\nxb\n +No match + +#/abc\Qabc\Eabc/ +# abcabcabc +# 0: abcabcabc + +#/abc\Q(*+|\Eabc/ +# abc(*+|abc +# 0: abc(*+|abc + +#/ abc\Q abc\Eabc/x +# abc abcabc +# 0: abc abcabc +#\= Expect no match +# abcabcabc +#No match + +#/abc#comment +# \Q#not comment +# literal\E/x +# abc#not comment\n literal +# 0: abc#not comment\x0a literal + +#/abc#comment +# \Q#not comment +# literal/x +# abc#not comment\n literal +# 0: abc#not comment\x0a literal + +#/abc#comment +# \Q#not comment +# literal\E #more comment +# /x +# abc#not comment\n literal +# 0: abc#not comment\x0a literal + +#/abc#comment +# \Q#not comment +# literal\E #more comment/x +# abc#not comment\n literal +# 0: abc#not comment\x0a literal + +#/\Qabc\$xyz\E/ +# abc\\\$xyz +# 0: abc\$xyz + +#/\Qabc\E\$\Qxyz\E/ +# abc\$xyz +# 0: abc$xyz + +/\Gabc/ + abc + 0: abc +\= Expect no match + xyzabc +No match + +/a(?x: b c )d/ + XabcdY + 0: abcd +\= Expect no match + Xa b c d Y +No match + +/((?x)x y z | a b c)/ + XabcY + 0: abc + 1: abc + AxyzB + 0: xyz + 1: xyz + +/(?i)AB(?-i)C/ + XabCY + 0: abC +\= Expect no match + XabcY +No match + +/((?i)AB(?-i)C|D)E/ + abCE + 0: abCE + 1: abC + DE + 0: DE + 1: D +\= Expect no match + abcE +No match + abCe +No match + dE +No match + De +No match + +/(.*)\d+\1/ + abc123abc + 0: abc123abc + 1: abc + abc123bc + 0: bc123bc + 1: bc + +/(.*)\d+\1/s + abc123abc + 0: abc123abc + 1: abc + abc123bc + 0: bc123bc + 1: bc + +/((.*))\d+\1/ + abc123abc + 0: abc123abc + 1: abc + 2: abc + abc123bc + 0: bc123bc + 1: bc + 2: bc + +# This tests for an IPv6 address in the form where it can have up to +# eight components, one and only one of which is empty. This must be +# an internal component. + +/^(?!:) # colon disallowed at start + (?: # start of item + (?: [0-9a-f]{1,4} | # 1-4 hex digits or + (?(1)0 | () ) ) # if null previously matched, fail; else null + : # followed by colon + ){1,7} # end item; 1-7 of them required + [0-9a-f]{1,4} $ # final hex number at end of string + (?(1)|.) # check that there was an empty component + /ix + a123::a123 + 0: a123::a123 + 1: + a123:b342::abcd + 0: a123:b342::abcd + 1: + a123:b342::324e:abcd + 0: a123:b342::324e:abcd + 1: + a123:ddde:b342::324e:abcd + 0: a123:ddde:b342::324e:abcd + 1: + a123:ddde:b342::324e:dcba:abcd + 0: a123:ddde:b342::324e:dcba:abcd + 1: + a123:ddde:9999:b342::324e:dcba:abcd + 0: a123:ddde:9999:b342::324e:dcba:abcd + 1: +\= Expect no match + 1:2:3:4:5:6:7:8 +No match + a123:bce:ddde:9999:b342::324e:dcba:abcd +No match + a123::9999:b342::324e:dcba:abcd +No match + abcde:2:3:4:5:6:7:8 +No match + ::1 +No match + abcd:fee0:123:: +No match + :1 +No match + 1: +No match + +#/[z\Qa-d]\E]/ +# z +# 0: z +# a +# 0: a +# - +# 0: - +# d +# 0: d +# ] +# 0: ] +#\= Expect no match +# b +#No match + +#TODO: PCRE has an optimization to make this workable, .NET does not +#/(a+)*b/ +#\= Expect no match +# aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa +#No match + +# All these had to be updated because we understand unicode +# and this looks like it's expecting single byte matches + +# .NET generates \xe4...not sure what's up, might just be different code pages +/(?i)reg(?:ul(?:[aä]|ae)r|ex)/ + REGular + 0: REGular + regulaer + 0: regulaer + Regex + 0: Regex + regulär + 0: regul\xc3\xa4r + +#/Åæåä[à-ÿÀ-ß]+/ +# Åæåäà +# 0: \xc5\xe6\xe5\xe4\xe0 +# Åæåäÿ +# 0: \xc5\xe6\xe5\xe4\xff +# ÅæåäÀ +# 0: \xc5\xe6\xe5\xe4\xc0 +# Åæåäß +# 0: \xc5\xe6\xe5\xe4\xdf + +/(?<=Z)X./ + \x84XAZXB + 0: XB + +/ab cd (?x) de fg/ + ab cd defg + 0: ab cd defg + +/ab cd(?x) de fg/ + ab cddefg + 0: ab cddefg +\= Expect no match + abcddefg +No match + +/(? + 2: + D + 0: D + 1: + 2: + +# this is really long with debug -- removing for now +#/(a|)*\d/ +# aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa4 +# 0: aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa4 +# 1: +#\= Expect no match +# aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa +#No match + +/(?>a|)*\d/ + aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa4 + 0: aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa4 +\= Expect no match + aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa +No match + +/(?:a|)*\d/ + aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa4 + 0: aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa4 +\= Expect no match + aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa +No match + +/^(?s)(?>.*)(? + 2: a + +/(?>(a))b|(a)c/ + ac + 0: ac + 1: + 2: a + +/(?=(a))ab|(a)c/ + ac + 0: ac + 1: + 2: a + +/((?>(a))b|(a)c)/ + ac + 0: ac + 1: ac + 2: + 3: a + +/(?=(?>(a))b|(a)c)(..)/ + ac + 0: ac + 1: + 2: a + 3: ac + +/(?>(?>(a))b|(a)c)/ + ac + 0: ac + 1: + 2: a + +/((?>(a+)b)+(aabab))/ + aaaabaaabaabab + 0: aaaabaaabaabab + 1: aaaabaaabaabab + 2: aaa + 3: aabab + +/(?>a+|ab)+?c/ +\= Expect no match + aabc +No match + +/(?>a+|ab)+c/ +\= Expect no match + aabc +No match + +/(?:a+|ab)+c/ + aabc + 0: aabc + +/^(?:a|ab)+c/ + aaaabc + 0: aaaabc + +/(?=abc){0}xyz/ + xyz + 0: xyz + +/(?=abc){1}xyz/ +\= Expect no match + xyz +No match + +/(?=(a))?./ + ab + 0: a + 1: a + bc + 0: b + +/(?=(a))??./ + ab + 0: a + bc + 0: b + +/^(?!a){0}\w+/ + aaaaa + 0: aaaaa + +/(?<=(abc))?xyz/ + abcxyz + 0: xyz + 1: abc + pqrxyz + 0: xyz + +/^[g]+/ + ggg<<>> + 0: ggg<<>> +\= Expect no match + \\ga +No match + +/^[ga]+/ + gggagagaxyz + 0: gggagaga + +/[:a]xxx[b:]/ + :xxx: + 0: :xxx: + +/(?<=a{2})b/i + xaabc + 0: b +\= Expect no match + xabc +No match + +/(? +# 4: +# 5: c +# 6: d +# 7: Y + +#/^X(?7)(a)(?|(b|(?|(r)|(t))(s))|(q))(c)(d)(Y)/ +# XYabcdY +# 0: XYabcdY +# 1: a +# 2: b +# 3: +# 4: +# 5: c +# 6: d +# 7: Y + +/(?'abc'\w+):\k{2}/ + a:aaxyz + 0: a:aa + 1: a + ab:ababxyz + 0: ab:abab + 1: ab +\= Expect no match + a:axyz +No match + ab:abxyz +No match + +/^(?a)? (?(ab)b|c) (?(ab)d|e)/x + abd + 0: abd + 1: a + ce + 0: ce + +# .NET has more consistent grouping numbers with these dupe groups for the two options +/(?:a(? (?')|(?")) |b(? (?')|(?")) ) (?(quote)[a-z]+|[0-9]+)/x,dupnames + a\"aaaaa + 0: a"aaaaa + 1: " + 2: + 3: " + b\"aaaaa + 0: b"aaaaa + 1: " + 2: + 3: " +\= Expect no match + b\"11111 +No match + +#/(?P(?P0)(?P>L1)|(?P>L2))/ +# 0 +# 0: 0 +# 1: 0 +# 00 +# 0: 00 +# 1: 00 +# 2: 0 +# 0000 +# 0: 0000 +# 1: 0000 +# 2: 0 + +#/(?P(?P0)|(?P>L2)(?P>L1))/ +# 0 +# 0: 0 +# 1: 0 +# 2: 0 +# 00 +# 0: 0 +# 1: 0 +# 2: 0 +# 0000 +# 0: 0 +# 1: 0 +# 2: 0 + +# Check the use of names for failure + +# Check opening parens in comment when seeking forward reference. + +#/(?P(?P=abn)xxx|)+/ +# xxx +# 0: +# 1: + +#Posses +/^(a)?(\w)/ + aaaaX + 0: aa + 1: a + 2: a + YZ + 0: Y + 1: + 2: Y + +#Posses +/^(?:a)?(\w)/ + aaaaX + 0: aa + 1: a + YZ + 0: Y + 1: Y + +/\A.*?(a|bc)/ + ba + 0: ba + 1: a + +/\A.*?(?:a|bc|d)/ + ba + 0: ba + +# -------------------------- + +/(another)?(\1?)test/ + hello world test + 0: test + 1: + 2: + +/(another)?(\1+)test/ +\= Expect no match + hello world test +No match + +/((?:a?)*)*c/ + aac + 0: aac + 1: + +/((?>a?)*)*c/ + aac + 0: aac + 1: + +/(?>.*?a)(?<=ba)/ + aba + 0: ba + +/(?:.*?a)(?<=ba)/ + aba + 0: aba + +/(?>.*?a)b/s + aab + 0: ab + +/(?>.*?a)b/ + aab + 0: ab + +/(?>^a)b/s +\= Expect no match + aab +No match + +/(?>.*?)(?<=(abcd)|(wxyz))/ + alphabetabcd + 0: + 1: abcd + endingwxyz + 0: + 1: + 2: wxyz + +/(?>.*)(?<=(abcd)|(wxyz))/ + alphabetabcd + 0: alphabetabcd + 1: abcd + endingwxyz + 0: endingwxyz + 1: + 2: wxyz + +"(?>.*)foo" +\= Expect no match + abcdfooxyz +No match + +"(?>.*?)foo" + abcdfooxyz + 0: foo + +# Tests that try to figure out how Perl works. My hypothesis is that the first +# verb that is backtracked onto is the one that acts. This seems to be the case +# almost all the time, but there is one exception that is perhaps a bug. + +/a(?=bc).|abd/ + abd + 0: abd + abc + 0: ab + +/a(?>bc)d|abd/ + abceabd + 0: abd + +# These tests were formerly in test 2, but changes in PCRE and Perl have +# made them compatible. + +/^(a)?(?(1)a|b)+$/ +\= Expect no match + a +No match + +# ---- + +/^\d*\w{4}/ + 1234 + 0: 1234 +\= Expect no match + 123 +No match + +/^[^b]*\w{4}/ + aaaa + 0: aaaa +\= Expect no match + aaa +No match + +/^[^b]*\w{4}/i + aaaa + 0: aaaa +\= Expect no match + aaa +No match + +/^a*\w{4}/ + aaaa + 0: aaaa +\= Expect no match + aaa +No match + +/^a*\w{4}/i + aaaa + 0: aaaa +\= Expect no match + aaa +No match + +/(?:(?foo)|(?bar))\k/dupnames + foofoo + 0: foofoo + 1: foo + barbar + 0: barbar + 1: bar + +# A notable difference between PCRE and .NET. According to +# the PCRE docs: +# If you make a subroutine call to a non-unique named +# subpattern, the one that corresponds to the first +# occurrence of the name is used. In the absence of +# duplicate numbers (see the previous section) this is +# the one with the lowest number. +# .NET takes the most recently captured number according to MSDN: +# A backreference refers to the most recent definition of +# a group (the definition most immediately to the left, +# when matching left to right). When a group makes multiple +# captures, a backreference refers to the most recent capture. + +#/(?A)(?:(?foo)|(?bar))\k/dupnames +# AfooA +# 0: AfooA +# 1: A +# 2: foo +# AbarA +# 0: AbarA +# 1: A +# 2: +# 3: bar +#\= Expect no match +# Afoofoo +#No match +# Abarbar +#No match + +/^(\d+)\s+IN\s+SOA\s+(\S+)\s+(\S+)\s*\(\s*$/ + 1 IN SOA non-sp1 non-sp2( + 0: 1 IN SOA non-sp1 non-sp2( + 1: 1 + 2: non-sp1 + 3: non-sp2 + +# TODO: .NET's group number ordering here in the second example is a bit odd +/^ (?:(?A)|(?'B'B)(?A)) (?(A)x) (?(B)y)$/x,dupnames + Ax + 0: Ax + 1: A + BAxy + 0: BAxy + 1: A + 2: B + +/ ^ a + b $ /x + aaaab + 0: aaaab + +/ ^ a + #comment + b $ /x + aaaab + 0: aaaab + +/ ^ a + #comment + #comment + b $ /x + aaaab + 0: aaaab + +/ ^ (?> a + ) b $ /x + aaaab + 0: aaaab + +/ ^ ( a + ) + \w $ /x + aaaab + 0: aaaab + 1: aaaa + +/(?:x|(?:(xx|yy)+|x|x|x|x|x)|a|a|a)bc/ +\= Expect no match + acb +No match + +#Posses +#/\A(?:[^\"]+|\"(?:[^\"]*|\"\")*\")+/ +# NON QUOTED \"QUOT\"\"ED\" AFTER \"NOT MATCHED +# 0: NON QUOTED "QUOT""ED" AFTER + +#Posses +#/\A(?:[^\"]+|\"(?:[^\"]+|\"\")*\")+/ +# NON QUOTED \"QUOT\"\"ED\" AFTER \"NOT MATCHED +# 0: NON QUOTED "QUOT""ED" AFTER + +#Posses +#/\A(?:[^\"]+|\"(?:[^\"]+|\"\")+\")+/ +# NON QUOTED \"QUOT\"\"ED\" AFTER \"NOT MATCHED +# 0: NON QUOTED "QUOT""ED" AFTER + +#Posses +#/\A([^\"1]+|[\"2]([^\"3]*|[\"4][\"5])*[\"6])+/ +# NON QUOTED \"QUOT\"\"ED\" AFTER \"NOT MATCHED +# 0: NON QUOTED "QUOT""ED" AFTER +# 1: AFTER +# 2: + +/^\w+(?>\s*)(?<=\w)/ + test test + 0: tes + +#/(?Pa)?(?Pb)?(?()c|d)*l/ +# acl +# 0: acl +# 1: a +# bdl +# 0: bdl +# 1: +# 2: b +# adl +# 0: dl +# bcl +# 0: l + +/\sabc/ + \x0babc + 0: \x0babc + +#/[\Qa]\E]+/ +# aa]] +# 0: aa]] + +#/[\Q]a\E]+/ +# aa]] +# 0: aa]] + +/A((((((((a))))))))\8B/ + AaaB + 0: AaaB + 1: a + 2: a + 3: a + 4: a + 5: a + 6: a + 7: a + 8: a + +/A(((((((((a)))))))))\9B/ + AaaB + 0: AaaB + 1: a + 2: a + 3: a + 4: a + 5: a + 6: a + 7: a + 8: a + 9: a + +/(|ab)*?d/ + abd + 0: abd + 1: ab + xyd + 0: d + +/(\2|a)(\1)/ + aaa + 0: aa + 1: a + 2: a + +/(\2)(\1)/ + +"Z*(|d*){216}" + +/((((((((((((x))))))))))))\12/ + xx + 0: xx + 1: x + 2: x + 3: x + 4: x + 5: x + 6: x + 7: x + 8: x + 9: x +10: x +11: x +12: x + +#"(?|(\k'Pm')|(?'Pm'))" +# abcd +# 0: +# 1: + +#/(?|(aaa)|(b))\g{1}/ +# aaaaaa +# 0: aaaaaa +# 1: aaa +# bb +# 0: bb +# 1: b + +#/(?|(aaa)|(b))(?1)/ +# aaaaaa +# 0: aaaaaa +# 1: aaa +# baaa +# 0: baaa +# 1: b +#\= Expect no match +# bb +#No match + +#/(?|(aaa)|(b))/ +# xaaa +# 0: aaa +# 1: aaa +# xbc +# 0: b +# 1: b + +#/(?|(?'a'aaa)|(?'a'b))\k'a'/ +# aaaaaa +# 0: aaaaaa +# 1: aaa +# bb +# 0: bb +# 1: b + +#/(?|(?'a'aaa)|(?'a'b))(?'a'cccc)\k'a'/dupnames +# aaaccccaaa +# 0: aaaccccaaa +# 1: aaa +# 2: cccc +# bccccb +# 0: bccccb +# 1: b +# 2: cccc + +# End of testinput1 diff --git a/vendor/github.com/yuin/goldmark-highlighting/.gitignore b/vendor/github.com/yuin/goldmark-highlighting/.gitignore new file mode 100644 index 000000000..6e4db9226 --- /dev/null +++ b/vendor/github.com/yuin/goldmark-highlighting/.gitignore @@ -0,0 +1,13 @@ +# Binaries for programs and plugins +*.exe +*.exe~ +*.dll +*.so +*.dylib + +# Test binary, build with `go test -c` +*.test +*.pprof + +# Output of the go coverage tool, specifically when used with LiteIDE +*.out diff --git a/vendor/github.com/yuin/goldmark-highlighting/LICENSE b/vendor/github.com/yuin/goldmark-highlighting/LICENSE new file mode 100644 index 000000000..dc5b2a690 --- /dev/null +++ b/vendor/github.com/yuin/goldmark-highlighting/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2019 Yusuke Inuzuka + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/vendor/github.com/yuin/goldmark-highlighting/README.md b/vendor/github.com/yuin/goldmark-highlighting/README.md new file mode 100644 index 000000000..857b61b03 --- /dev/null +++ b/vendor/github.com/yuin/goldmark-highlighting/README.md @@ -0,0 +1,66 @@ +goldmark-highlighting +========================= + +goldmark-highlighting is an extension for the [goldmark](http://github.com/yuin/goldmark) +that adds syntax-highlighting to the fenced code blocks. + +goldmark-highlighting uses [chroma](https://github.com/alecthomas/chroma) as a +syntax highlighter. + +Installation +-------------------- + +``` +go get github.com/yuin/goldmark-highlighting +``` + +Usage +-------------------- + +```go +import ( + "bytes" + "fmt" + "github.com/alecthomas/chroma/formatters/html" + "github.com/yuin/goldmark" + "github.com/yuin/goldmark/extension" + "github.com/yuin/goldmark/parser" + "github.com/yuin/goldmark-highlighting" + +) + +func main() { + markdown := goldmark.New( + goldmark.WithExtensions( + highlighting.Highlighting, + ), + ) + var buf bytes.Buffer + if err := markdown.Convert([]byte(source), &buf); err != nil { + panic(err) + } + fmt.Print(title) +} +``` + + +```go + markdown := goldmark.New( + goldmark.WithExtensions( + highlighting.NewHighlighting( + highlighting.WithStyle("monokai"), + highlighting.WithFormatOptions( + html.WithLineNumbers(), + ), + ), + ), + ) +``` + +License +-------------------- +MIT + +Author +-------------------- +Yusuke Inuzuka diff --git a/vendor/github.com/yuin/goldmark-highlighting/go.mod b/vendor/github.com/yuin/goldmark-highlighting/go.mod new file mode 100644 index 000000000..d5e4a6f30 --- /dev/null +++ b/vendor/github.com/yuin/goldmark-highlighting/go.mod @@ -0,0 +1,8 @@ +module github.com/yuin/goldmark-highlighting + +go 1.13 + +require ( + github.com/alecthomas/chroma v0.10.0 + github.com/yuin/goldmark v1.4.5 +) diff --git a/vendor/github.com/yuin/goldmark-highlighting/go.sum b/vendor/github.com/yuin/goldmark-highlighting/go.sum new file mode 100644 index 000000000..a930dc43d --- /dev/null +++ b/vendor/github.com/yuin/goldmark-highlighting/go.sum @@ -0,0 +1,20 @@ +github.com/alecthomas/chroma v0.10.0 h1:7XDcGkCQopCNKjZHfYrNLraA+M7e0fMiJ/Mfikbfjek= +github.com/alecthomas/chroma v0.10.0/go.mod h1:jtJATyUxlIORhUOFNA9NZDWGAQ8wpxQQqNSB4rjA/1s= +github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= +github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/dlclark/regexp2 v1.4.0 h1:F1rxgk7p4uKjwIQxBs9oAXe5CqrXlCduYEJvrF4u93E= +github.com/dlclark/regexp2 v1.4.0/go.mod h1:2pZnwuY/m+8K6iRw6wQdMtk+rH5tNGR1i55kozfMjCc= +github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= +github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= +github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= +github.com/stretchr/testify v1.7.0 h1:nwc3DEeHmmLAfoZucVR881uASk0Mfjw8xYJ99tb5CcY= +github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= +github.com/yuin/goldmark v1.4.4 h1:zNWRjYUW32G9KirMXYHQHVNFkXvMI7LpgNW2AgYAoIs= +github.com/yuin/goldmark v1.4.4/go.mod h1:rmuwmfZ0+bvzB24eSC//bk1R1Zp3hM0OXYv/G2LIilg= +github.com/yuin/goldmark v1.4.5 h1:4OEQwtW2uLXjEdgnGM3Vg652Pq37X7NOIRzFWb3BzIc= +github.com/yuin/goldmark v1.4.5/go.mod h1:rmuwmfZ0+bvzB24eSC//bk1R1Zp3hM0OXYv/G2LIilg= +gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405 h1:yhCVgyC4o1eVCa2tZl7eS0r+SDo693bJlVdllGtEeKM= +gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c h1:dUUwHk2QECo/6vqA44rthZ8ie2QXMNeKRTHCNY2nXvo= +gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= diff --git a/vendor/github.com/yuin/goldmark-highlighting/highlighting.go b/vendor/github.com/yuin/goldmark-highlighting/highlighting.go new file mode 100644 index 000000000..ffdaaed71 --- /dev/null +++ b/vendor/github.com/yuin/goldmark-highlighting/highlighting.go @@ -0,0 +1,578 @@ +// package highlighting is a extension for the goldmark(http://github.com/yuin/goldmark). +// +// This extension adds syntax-highlighting to the fenced code blocks using +// chroma(https://github.com/alecthomas/chroma). +package highlighting + +import ( + "bytes" + "io" + "strconv" + "strings" + + "github.com/yuin/goldmark" + "github.com/yuin/goldmark/ast" + "github.com/yuin/goldmark/parser" + "github.com/yuin/goldmark/renderer" + "github.com/yuin/goldmark/renderer/html" + "github.com/yuin/goldmark/text" + "github.com/yuin/goldmark/util" + + "github.com/alecthomas/chroma" + chromahtml "github.com/alecthomas/chroma/formatters/html" + "github.com/alecthomas/chroma/lexers" + "github.com/alecthomas/chroma/styles" +) + +// ImmutableAttributes is a read-only interface for ast.Attributes. +type ImmutableAttributes interface { + // Get returns (value, true) if an attribute associated with given + // name exists, otherwise (nil, false) + Get(name []byte) (interface{}, bool) + + // GetString returns (value, true) if an attribute associated with given + // name exists, otherwise (nil, false) + GetString(name string) (interface{}, bool) + + // All returns all attributes. + All() []ast.Attribute +} + +type immutableAttributes struct { + n ast.Node +} + +func (a *immutableAttributes) Get(name []byte) (interface{}, bool) { + return a.n.Attribute(name) +} + +func (a *immutableAttributes) GetString(name string) (interface{}, bool) { + return a.n.AttributeString(name) +} + +func (a *immutableAttributes) All() []ast.Attribute { + if a.n.Attributes() == nil { + return []ast.Attribute{} + } + return a.n.Attributes() +} + +// CodeBlockContext holds contextual information of code highlighting. +type CodeBlockContext interface { + // Language returns (language, true) if specified, otherwise (nil, false). + Language() ([]byte, bool) + + // Highlighted returns true if this code block can be highlighted, otherwise false. + Highlighted() bool + + // Attributes return attributes of the code block. + Attributes() ImmutableAttributes +} + +type codeBlockContext struct { + language []byte + highlighted bool + attributes ImmutableAttributes +} + +func newCodeBlockContext(language []byte, highlighted bool, attrs ImmutableAttributes) CodeBlockContext { + return &codeBlockContext{ + language: language, + highlighted: highlighted, + attributes: attrs, + } +} + +func (c *codeBlockContext) Language() ([]byte, bool) { + if c.language != nil { + return c.language, true + } + return nil, false +} + +func (c *codeBlockContext) Highlighted() bool { + return c.highlighted +} + +func (c *codeBlockContext) Attributes() ImmutableAttributes { + return c.attributes +} + +// WrapperRenderer renders wrapper elements like div, pre, etc. +type WrapperRenderer func(w util.BufWriter, context CodeBlockContext, entering bool) + +// CodeBlockOptions creates Chroma options per code block. +type CodeBlockOptions func(ctx CodeBlockContext) []chromahtml.Option + +// Config struct holds options for the extension. +type Config struct { + html.Config + + // Style is a highlighting style. + // Supported styles are defined under https://github.com/alecthomas/chroma/tree/master/formatters. + Style string + + // Pass in a custom Chroma style. If this is not nil, the Style string will be ignored + CustomStyle *chroma.Style + + // If set, will try to guess language if none provided. + // If the guessing fails, we will fall back to a text lexer. + // Note that while Chroma's API supports language guessing, the implementation + // is not there yet, so you will currently always get the basic text lexer. + GuessLanguage bool + + // FormatOptions is a option related to output formats. + // See https://github.com/alecthomas/chroma#the-html-formatter for details. + FormatOptions []chromahtml.Option + + // CSSWriter is an io.Writer that will be used as CSS data output buffer. + // If WithClasses() is enabled, you can get CSS data corresponds to the style. + CSSWriter io.Writer + + // CodeBlockOptions allows set Chroma options per code block. + CodeBlockOptions CodeBlockOptions + + // WrapperRenderer allows you to change wrapper elements. + WrapperRenderer WrapperRenderer +} + +// NewConfig returns a new Config with defaults. +func NewConfig() Config { + return Config{ + Config: html.NewConfig(), + Style: "github", + FormatOptions: []chromahtml.Option{}, + CSSWriter: nil, + WrapperRenderer: nil, + CodeBlockOptions: nil, + } +} + +// SetOption implements renderer.SetOptioner. +func (c *Config) SetOption(name renderer.OptionName, value interface{}) { + switch name { + case optStyle: + c.Style = value.(string) + case optCustomStyle: + c.CustomStyle = value.(*chroma.Style) + case optFormatOptions: + if value != nil { + c.FormatOptions = value.([]chromahtml.Option) + } + case optCSSWriter: + c.CSSWriter = value.(io.Writer) + case optWrapperRenderer: + c.WrapperRenderer = value.(WrapperRenderer) + case optCodeBlockOptions: + c.CodeBlockOptions = value.(CodeBlockOptions) + case optGuessLanguage: + c.GuessLanguage = value.(bool) + default: + c.Config.SetOption(name, value) + } +} + +// Option interface is a functional option interface for the extension. +type Option interface { + renderer.Option + // SetHighlightingOption sets given option to the extension. + SetHighlightingOption(*Config) +} + +type withHTMLOptions struct { + value []html.Option +} + +func (o *withHTMLOptions) SetConfig(c *renderer.Config) { + if o.value != nil { + for _, v := range o.value { + v.(renderer.Option).SetConfig(c) + } + } +} + +func (o *withHTMLOptions) SetHighlightingOption(c *Config) { + if o.value != nil { + for _, v := range o.value { + v.SetHTMLOption(&c.Config) + } + } +} + +// WithHTMLOptions is functional option that wraps goldmark HTMLRenderer options. +func WithHTMLOptions(opts ...html.Option) Option { + return &withHTMLOptions{opts} +} + +const optStyle renderer.OptionName = "HighlightingStyle" +const optCustomStyle renderer.OptionName = "HighlightingCustomStyle" + +var highlightLinesAttrName = []byte("hl_lines") + +var styleAttrName = []byte("hl_style") +var nohlAttrName = []byte("nohl") +var linenosAttrName = []byte("linenos") +var linenosTableAttrValue = []byte("table") +var linenosInlineAttrValue = []byte("inline") +var linenostartAttrName = []byte("linenostart") + +type withStyle struct { + value string +} + +func (o *withStyle) SetConfig(c *renderer.Config) { + c.Options[optStyle] = o.value +} + +func (o *withStyle) SetHighlightingOption(c *Config) { + c.Style = o.value +} + +// WithStyle is a functional option that changes highlighting style. +func WithStyle(style string) Option { + return &withStyle{style} +} + +type withCustomStyle struct { + value *chroma.Style +} + +func (o *withCustomStyle) SetConfig(c *renderer.Config) { + c.Options[optCustomStyle] = o.value +} + +func (o *withCustomStyle) SetHighlightingOption(c *Config) { + c.CustomStyle = o.value +} + +// WithStyle is a functional option that changes highlighting style. +func WithCustomStyle(style *chroma.Style) Option { + return &withCustomStyle{style} +} + +const optCSSWriter renderer.OptionName = "HighlightingCSSWriter" + +type withCSSWriter struct { + value io.Writer +} + +func (o *withCSSWriter) SetConfig(c *renderer.Config) { + c.Options[optCSSWriter] = o.value +} + +func (o *withCSSWriter) SetHighlightingOption(c *Config) { + c.CSSWriter = o.value +} + +// WithCSSWriter is a functional option that sets io.Writer for CSS data. +func WithCSSWriter(w io.Writer) Option { + return &withCSSWriter{w} +} + +const optGuessLanguage renderer.OptionName = "HighlightingGuessLanguage" + +type withGuessLanguage struct { + value bool +} + +func (o *withGuessLanguage) SetConfig(c *renderer.Config) { + c.Options[optGuessLanguage] = o.value +} + +func (o *withGuessLanguage) SetHighlightingOption(c *Config) { + c.GuessLanguage = o.value +} + +// WithGuessLanguage is a functional option that toggles language guessing +// if none provided. +func WithGuessLanguage(b bool) Option { + return &withGuessLanguage{value: b} +} + +const optWrapperRenderer renderer.OptionName = "HighlightingWrapperRenderer" + +type withWrapperRenderer struct { + value WrapperRenderer +} + +func (o *withWrapperRenderer) SetConfig(c *renderer.Config) { + c.Options[optWrapperRenderer] = o.value +} + +func (o *withWrapperRenderer) SetHighlightingOption(c *Config) { + c.WrapperRenderer = o.value +} + +// WithWrapperRenderer is a functional option that sets WrapperRenderer that +// renders wrapper elements like div, pre, etc. +func WithWrapperRenderer(w WrapperRenderer) Option { + return &withWrapperRenderer{w} +} + +const optCodeBlockOptions renderer.OptionName = "HighlightingCodeBlockOptions" + +type withCodeBlockOptions struct { + value CodeBlockOptions +} + +func (o *withCodeBlockOptions) SetConfig(c *renderer.Config) { + c.Options[optWrapperRenderer] = o.value +} + +func (o *withCodeBlockOptions) SetHighlightingOption(c *Config) { + c.CodeBlockOptions = o.value +} + +// WithCodeBlockOptions is a functional option that sets CodeBlockOptions that +// allows setting Chroma options per code block. +func WithCodeBlockOptions(c CodeBlockOptions) Option { + return &withCodeBlockOptions{value: c} +} + +const optFormatOptions renderer.OptionName = "HighlightingFormatOptions" + +type withFormatOptions struct { + value []chromahtml.Option +} + +func (o *withFormatOptions) SetConfig(c *renderer.Config) { + if _, ok := c.Options[optFormatOptions]; !ok { + c.Options[optFormatOptions] = []chromahtml.Option{} + } + c.Options[optFormatOptions] = append(c.Options[optFormatOptions].([]chromahtml.Option), o.value...) +} + +func (o *withFormatOptions) SetHighlightingOption(c *Config) { + c.FormatOptions = append(c.FormatOptions, o.value...) +} + +// WithFormatOptions is a functional option that wraps chroma HTML formatter options. +func WithFormatOptions(opts ...chromahtml.Option) Option { + return &withFormatOptions{opts} +} + +// HTMLRenderer struct is a renderer.NodeRenderer implementation for the extension. +type HTMLRenderer struct { + Config +} + +// NewHTMLRenderer builds a new HTMLRenderer with given options and returns it. +func NewHTMLRenderer(opts ...Option) renderer.NodeRenderer { + r := &HTMLRenderer{ + Config: NewConfig(), + } + for _, opt := range opts { + opt.SetHighlightingOption(&r.Config) + } + return r +} + +// RegisterFuncs implements NodeRenderer.RegisterFuncs. +func (r *HTMLRenderer) RegisterFuncs(reg renderer.NodeRendererFuncRegisterer) { + reg.Register(ast.KindFencedCodeBlock, r.renderFencedCodeBlock) +} + +func getAttributes(node *ast.FencedCodeBlock, infostr []byte) ImmutableAttributes { + if node.Attributes() != nil { + return &immutableAttributes{node} + } + if infostr != nil { + attrStartIdx := -1 + + for idx, char := range infostr { + if char == '{' { + attrStartIdx = idx + break + } + } + if attrStartIdx > 0 { + n := ast.NewTextBlock() // dummy node for storing attributes + attrStr := infostr[attrStartIdx:] + if attrs, hasAttr := parser.ParseAttributes(text.NewReader(attrStr)); hasAttr { + for _, attr := range attrs { + n.SetAttribute(attr.Name, attr.Value) + } + return &immutableAttributes{n} + } + } + } + return nil +} + +func (r *HTMLRenderer) renderFencedCodeBlock(w util.BufWriter, source []byte, node ast.Node, entering bool) (ast.WalkStatus, error) { + n := node.(*ast.FencedCodeBlock) + if !entering { + return ast.WalkContinue, nil + } + language := n.Language(source) + + chromaFormatterOptions := make([]chromahtml.Option, len(r.FormatOptions)) + copy(chromaFormatterOptions, r.FormatOptions) + + style := r.CustomStyle + if style == nil { + style = styles.Get(r.Style) + } + nohl := false + + var info []byte + if n.Info != nil { + info = n.Info.Segment.Value(source) + } + attrs := getAttributes(n, info) + if attrs != nil { + baseLineNumber := 1 + if linenostartAttr, ok := attrs.Get(linenostartAttrName); ok { + if linenostart, ok := linenostartAttr.(float64); ok { + baseLineNumber = int(linenostart) + chromaFormatterOptions = append(chromaFormatterOptions, chromahtml.BaseLineNumber(baseLineNumber)) + } + } + if linesAttr, hasLinesAttr := attrs.Get(highlightLinesAttrName); hasLinesAttr { + if lines, ok := linesAttr.([]interface{}); ok { + var hlRanges [][2]int + for _, l := range lines { + if ln, ok := l.(float64); ok { + hlRanges = append(hlRanges, [2]int{int(ln) + baseLineNumber - 1, int(ln) + baseLineNumber - 1}) + } + if rng, ok := l.([]uint8); ok { + slices := strings.Split(string([]byte(rng)), "-") + lhs, err := strconv.Atoi(slices[0]) + if err != nil { + continue + } + rhs := lhs + if len(slices) > 1 { + rhs, err = strconv.Atoi(slices[1]) + if err != nil { + continue + } + } + hlRanges = append(hlRanges, [2]int{lhs + baseLineNumber - 1, rhs + baseLineNumber - 1}) + } + } + chromaFormatterOptions = append(chromaFormatterOptions, chromahtml.HighlightLines(hlRanges)) + } + } + if styleAttr, hasStyleAttr := attrs.Get(styleAttrName); hasStyleAttr { + if st, ok := styleAttr.([]uint8); ok { + styleStr := string([]byte(st)) + style = styles.Get(styleStr) + } + } + if _, hasNohlAttr := attrs.Get(nohlAttrName); hasNohlAttr { + nohl = true + } + + if linenosAttr, ok := attrs.Get(linenosAttrName); ok { + switch v := linenosAttr.(type) { + case bool: + chromaFormatterOptions = append(chromaFormatterOptions, chromahtml.WithLineNumbers(v)) + case []uint8: + if v != nil { + chromaFormatterOptions = append(chromaFormatterOptions, chromahtml.WithLineNumbers(true)) + } + if bytes.Equal(v, linenosTableAttrValue) { + chromaFormatterOptions = append(chromaFormatterOptions, chromahtml.LineNumbersInTable(true)) + } else if bytes.Equal(v, linenosInlineAttrValue) { + chromaFormatterOptions = append(chromaFormatterOptions, chromahtml.LineNumbersInTable(false)) + } + } + } + } + + var lexer chroma.Lexer + if language != nil { + lexer = lexers.Get(string(language)) + } + if !nohl && (lexer != nil || r.GuessLanguage) { + if style == nil { + style = styles.Fallback + } + var buffer bytes.Buffer + l := n.Lines().Len() + for i := 0; i < l; i++ { + line := n.Lines().At(i) + buffer.Write(line.Value(source)) + } + + if lexer == nil { + lexer = lexers.Analyse(buffer.String()) + if lexer == nil { + lexer = lexers.Fallback + } + language = []byte(strings.ToLower(lexer.Config().Name)) + } + lexer = chroma.Coalesce(lexer) + + iterator, err := lexer.Tokenise(nil, buffer.String()) + if err == nil { + c := newCodeBlockContext(language, true, attrs) + + if r.CodeBlockOptions != nil { + chromaFormatterOptions = append(chromaFormatterOptions, r.CodeBlockOptions(c)...) + } + formatter := chromahtml.New(chromaFormatterOptions...) + if r.WrapperRenderer != nil { + r.WrapperRenderer(w, c, true) + } + _ = formatter.Format(w, style, iterator) == nil + if r.WrapperRenderer != nil { + r.WrapperRenderer(w, c, false) + } + if r.CSSWriter != nil { + _ = formatter.WriteCSS(r.CSSWriter, style) + } + return ast.WalkContinue, nil + } + } + + var c CodeBlockContext + if r.WrapperRenderer != nil { + c = newCodeBlockContext(language, false, attrs) + r.WrapperRenderer(w, c, true) + } else { + _, _ = w.WriteString("\n") + } + return ast.WalkContinue, nil +} + +type highlighting struct { + options []Option +} + +// Highlighting is a goldmark.Extender implementation. +var Highlighting = &highlighting{ + options: []Option{}, +} + +// NewHighlighting returns a new extension with given options. +func NewHighlighting(opts ...Option) goldmark.Extender { + return &highlighting{ + options: opts, + } +} + +// Extend implements goldmark.Extender. +func (e *highlighting) Extend(m goldmark.Markdown) { + m.Renderer().AddOptions(renderer.WithNodeRenderers( + util.Prioritized(NewHTMLRenderer(e.options...), 200), + )) +} diff --git a/vendor/github.com/yuin/goldmark-meta/README.md b/vendor/github.com/yuin/goldmark-meta/README.md index 98f926b35..4eb7728da 100644 --- a/vendor/github.com/yuin/goldmark-meta/README.md +++ b/vendor/github.com/yuin/goldmark-meta/README.md @@ -1,5 +1,10 @@ goldmark-meta ========================= +[![GoDev][godev-image]][godev-url] + +[godev-image]: https://pkg.go.dev/badge/github.com/yuin/goldmark-meta +[godev-url]: https://pkg.go.dev/github.com/yuin/goldmark-meta + goldmark-meta is an extension for the [goldmark](http://github.com/yuin/goldmark) that allows you to define document metadata in YAML format. @@ -83,6 +88,41 @@ Tags: } ``` +Or `WithStoresInDocument` option: + +```go +import ( + "bytes" + "fmt" + "github.com/yuin/goldmark" + "github.com/yuin/goldmark/extension" + "github.com/yuin/goldmark/parser" + "github.com/yuin/goldmark-meta" +) + +func main() { + markdown := goldmark.New( + goldmark.WithExtensions( + meta.New( + meta.WithStoresInDocument(), + ), + ), + ) + source := `--- +Title: goldmark-meta +Summary: Add YAML metadata to the document +Tags: + - markdown + - goldmark +--- +` + + document := markdown.Parser().Parse(text.NewReader([]byte(source))) + metaData := document.OwnerDocument().Meta() + title := metaData["Title"] + fmt.Print(title) +``` + ### Render the metadata as a table You need to add `extension.TableHTMLRenderer` or the `Table` extension to diff --git a/vendor/github.com/yuin/goldmark-meta/go.mod b/vendor/github.com/yuin/goldmark-meta/go.mod index b5496c175..fb6b72148 100644 --- a/vendor/github.com/yuin/goldmark-meta/go.mod +++ b/vendor/github.com/yuin/goldmark-meta/go.mod @@ -1,8 +1,8 @@ module github.com/yuin/goldmark-meta -go 1.13 +go 1.17 require ( - github.com/yuin/goldmark v1.1.7 - gopkg.in/yaml.v2 v2.2.2 + github.com/yuin/goldmark v1.4.6 + gopkg.in/yaml.v2 v2.3.0 ) diff --git a/vendor/github.com/yuin/goldmark-meta/go.sum b/vendor/github.com/yuin/goldmark-meta/go.sum index 7cd925cd5..27fe65bd6 100644 --- a/vendor/github.com/yuin/goldmark-meta/go.sum +++ b/vendor/github.com/yuin/goldmark-meta/go.sum @@ -1,6 +1,6 @@ -github.com/yuin/goldmark v1.1.7 h1:XiwWADvxJeIM1JbXqthrEhDc19hTMui+o+QaY1hGXlk= -github.com/yuin/goldmark v1.1.7/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= +github.com/yuin/goldmark v1.4.6 h1:EQ1OkiNq/eMbQxs/2O/A8VDIHERXGH14s19ednd4XIw= +github.com/yuin/goldmark v1.4.6/go.mod h1:rmuwmfZ0+bvzB24eSC//bk1R1Zp3hM0OXYv/G2LIilg= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405 h1:yhCVgyC4o1eVCa2tZl7eS0r+SDo693bJlVdllGtEeKM= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= -gopkg.in/yaml.v2 v2.2.2 h1:ZCJp+EgiOT7lHqUV2J862kp8Qj64Jo6az82+3Td9dZw= -gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.3.0 h1:clyUAQHOM3G0M3f5vQj7LuJrETvjVot3Z5el9nffUtU= +gopkg.in/yaml.v2 v2.3.0/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= diff --git a/vendor/github.com/yuin/goldmark-meta/meta.go b/vendor/github.com/yuin/goldmark-meta/meta.go index 286043713..f6c93f194 100644 --- a/vendor/github.com/yuin/goldmark-meta/meta.go +++ b/vendor/github.com/yuin/goldmark-meta/meta.go @@ -7,6 +7,7 @@ package meta import ( "bytes" "fmt" + "github.com/yuin/goldmark" gast "github.com/yuin/goldmark/ast" east "github.com/yuin/goldmark/extension/ast" @@ -26,6 +27,11 @@ type data struct { var contextKey = parser.NewContextKey() +// Option interface sets options for this extension. +type Option interface { + metaOption() +} + // Get returns a YAML metadata. func Get(pc parser.Context) map[string]interface{} { v := pc.Get(contextKey) @@ -36,6 +42,20 @@ func Get(pc parser.Context) map[string]interface{} { return d.Map } +// TryGet tries to get a YAML metadata. +// If there are YAML parsing errors, then nil and error are returned +func TryGet(pc parser.Context) (map[string]interface{}, error) { + dtmp := pc.Get(contextKey) + if dtmp == nil { + return nil, nil + } + d := dtmp.(*data) + if d.Error != nil { + return nil, d.Error + } + return d.Map, nil +} + // GetItems returns a YAML metadata. // GetItems preserves defined key order. func GetItems(pc parser.Context) yaml.MapSlice { @@ -47,14 +67,29 @@ func GetItems(pc parser.Context) yaml.MapSlice { return d.Items } +// TryGetItems returns a YAML metadata. +// TryGetItems preserves defined key order. +// If there are YAML parsing errors, then nil and erro are returned. +func TryGetItems(pc parser.Context) (yaml.MapSlice, error) { + dtmp := pc.Get(contextKey) + if dtmp == nil { + return nil, nil + } + d := dtmp.(*data) + if d.Error != nil { + return nil, d.Error + } + return d.Items, nil +} + type metaParser struct { } -var defaultMetaParser = &metaParser{} +var defaultParser = &metaParser{} // NewParser returns a BlockParser that can parse YAML metadata blocks. func NewParser() parser.BlockParser { - return defaultMetaParser + return defaultParser } func isSeparator(line []byte) bool { @@ -85,7 +120,7 @@ func (b *metaParser) Open(parent gast.Node, reader text.Reader, pc parser.Contex func (b *metaParser) Continue(node gast.Node, reader text.Reader, pc parser.Context) parser.State { line, segment := reader.PeekLine() - if isSeparator(line) { + if isSeparator(line) && !util.IsBlank(line) { reader.Advance(segment.Len()) return parser.Close } @@ -132,9 +167,74 @@ func (b *metaParser) CanAcceptIndentedLine() bool { } type astTransformer struct { + transformerConfig +} + +type transformerConfig struct { + // Renders metadata as an html table. + Table bool + + // Stores metadata in ast.Document.Meta(). + StoresInDocument bool +} + +type transformerOption interface { + Option + + // SetMetaOption sets options for the metadata parser. + SetMetaOption(*transformerConfig) +} + +var _ transformerOption = &withTable{} + +type withTable struct { + value bool +} + +func (o *withTable) metaOption() {} + +func (o *withTable) SetMetaOption(m *transformerConfig) { + m.Table = o.value +} + +// WithTable is a functional option that renders a YAML metadata as a table. +func WithTable() Option { + return &withTable{ + value: true, + } +} + +var _ transformerOption = &withStoresInDocument{} + +type withStoresInDocument struct { + value bool +} + +func (o *withStoresInDocument) metaOption() {} + +func (o *withStoresInDocument) SetMetaOption(c *transformerConfig) { + c.StoresInDocument = o.value } -var defaultASTTransformer = &astTransformer{} +// WithStoresInDocument is a functional option that parser will store YAML meta in ast.Document.Meta(). +func WithStoresInDocument() Option { + return &withStoresInDocument{ + value: true, + } +} + +func newTransformer(opts ...transformerOption) parser.ASTTransformer { + p := &astTransformer{ + transformerConfig: transformerConfig{ + Table: false, + StoresInDocument: false, + }, + } + for _, o := range opts { + o.SetMetaOption(&p.transformerConfig) + } + return p +} func (a *astTransformer) Transform(node *gast.Document, reader text.Reader, pc parser.Context) { dtmp := pc.Get(contextKey) @@ -149,45 +249,43 @@ func (a *astTransformer) Transform(node *gast.Document, reader text.Reader, pc p return } - meta := GetItems(pc) - if meta == nil { - return - } - table := east.NewTable() - alignments := []east.Alignment{} - for range meta { - alignments = append(alignments, east.AlignNone) - } - row := east.NewTableRow(alignments) - for _, item := range meta { - cell := east.NewTableCell() - cell.AppendChild(cell, gast.NewString([]byte(fmt.Sprintf("%v", item.Key)))) - row.AppendChild(row, cell) - } - table.AppendChild(table, east.NewTableHeader(row)) + if a.Table { + meta := GetItems(pc) + if meta == nil { + return + } + table := east.NewTable() + alignments := []east.Alignment{} + for range meta { + alignments = append(alignments, east.AlignNone) + } + row := east.NewTableRow(alignments) + for _, item := range meta { + cell := east.NewTableCell() + cell.AppendChild(cell, gast.NewString([]byte(fmt.Sprintf("%v", item.Key)))) + row.AppendChild(row, cell) + } + table.AppendChild(table, east.NewTableHeader(row)) - row = east.NewTableRow(alignments) - for _, item := range meta { - cell := east.NewTableCell() - cell.AppendChild(cell, gast.NewString([]byte(fmt.Sprintf("%v", item.Value)))) - row.AppendChild(row, cell) + row = east.NewTableRow(alignments) + for _, item := range meta { + cell := east.NewTableCell() + cell.AppendChild(cell, gast.NewString([]byte(fmt.Sprintf("%v", item.Value)))) + row.AppendChild(row, cell) + } + table.AppendChild(table, row) + node.InsertBefore(node, node.FirstChild(), table) } - table.AppendChild(table, row) - node.InsertBefore(node, node.FirstChild(), table) -} - -// Option is a functional option type for this extension. -type Option func(*meta) -// WithTable is a functional option that renders a YAML metadata as a table. -func WithTable() Option { - return func(m *meta) { - m.Table = true + if a.StoresInDocument { + for k, v := range d.Map { + node.AddMeta(k, v) + } } } type meta struct { - Table bool + options []Option } // Meta is a extension for the goldmark. @@ -195,24 +293,28 @@ var Meta = &meta{} // New returns a new Meta extension. func New(opts ...Option) goldmark.Extender { - e := &meta{} - for _, opt := range opts { - opt(e) + e := &meta{ + options: opts, } return e } +// Extend implements goldmark.Extender. func (e *meta) Extend(m goldmark.Markdown) { + topts := []transformerOption{} + for _, opt := range e.options { + if topt, ok := opt.(transformerOption); ok { + topts = append(topts, topt) + } + } m.Parser().AddOptions( parser.WithBlockParsers( util.Prioritized(NewParser(), 0), ), ) - if e.Table { - m.Parser().AddOptions( - parser.WithASTTransformers( - util.Prioritized(defaultASTTransformer, 0), - ), - ) - } + m.Parser().AddOptions( + parser.WithASTTransformers( + util.Prioritized(newTransformer(topts...), 0), + ), + ) } diff --git a/vendor/gopkg.in/yaml.v2/apic.go b/vendor/gopkg.in/yaml.v2/apic.go index 1f7e87e67..d2c2308f1 100644 --- a/vendor/gopkg.in/yaml.v2/apic.go +++ b/vendor/gopkg.in/yaml.v2/apic.go @@ -86,6 +86,7 @@ func yaml_emitter_initialize(emitter *yaml_emitter_t) { raw_buffer: make([]byte, 0, output_raw_buffer_size), states: make([]yaml_emitter_state_t, 0, initial_stack_size), events: make([]yaml_event_t, 0, initial_queue_size), + best_width: -1, } } diff --git a/vendor/modules.txt b/vendor/modules.txt index 386744f99..0577cfe2c 100644 --- a/vendor/modules.txt +++ b/vendor/modules.txt @@ -100,6 +100,39 @@ github.com/RichardKnop/redsync # github.com/RoaringBitmap/roaring v0.4.23 ## explicit github.com/RoaringBitmap/roaring +# github.com/alecthomas/chroma v0.10.0 +## explicit +github.com/alecthomas/chroma +github.com/alecthomas/chroma/formatters/html +github.com/alecthomas/chroma/lexers +github.com/alecthomas/chroma/lexers/a +github.com/alecthomas/chroma/lexers/b +github.com/alecthomas/chroma/lexers/c +github.com/alecthomas/chroma/lexers/circular +github.com/alecthomas/chroma/lexers/d +github.com/alecthomas/chroma/lexers/e +github.com/alecthomas/chroma/lexers/f +github.com/alecthomas/chroma/lexers/g +github.com/alecthomas/chroma/lexers/h +github.com/alecthomas/chroma/lexers/i +github.com/alecthomas/chroma/lexers/internal +github.com/alecthomas/chroma/lexers/j +github.com/alecthomas/chroma/lexers/k +github.com/alecthomas/chroma/lexers/l +github.com/alecthomas/chroma/lexers/m +github.com/alecthomas/chroma/lexers/n +github.com/alecthomas/chroma/lexers/o +github.com/alecthomas/chroma/lexers/p +github.com/alecthomas/chroma/lexers/q +github.com/alecthomas/chroma/lexers/r +github.com/alecthomas/chroma/lexers/s +github.com/alecthomas/chroma/lexers/t +github.com/alecthomas/chroma/lexers/v +github.com/alecthomas/chroma/lexers/w +github.com/alecthomas/chroma/lexers/x +github.com/alecthomas/chroma/lexers/y +github.com/alecthomas/chroma/lexers/z +github.com/alecthomas/chroma/styles # github.com/alibabacloud-go/alibabacloud-gateway-spi v0.0.4 github.com/alibabacloud-go/alibabacloud-gateway-spi/client # github.com/alibabacloud-go/darabonba-openapi v0.1.18 @@ -282,6 +315,9 @@ github.com/dgrijalva/jwt-go # github.com/disintegration/imaging v1.6.2 ## explicit github.com/disintegration/imaging +# github.com/dlclark/regexp2 v1.4.0 +github.com/dlclark/regexp2 +github.com/dlclark/regexp2/syntax # github.com/dustin/go-humanize v1.0.0 ## explicit github.com/dustin/go-humanize @@ -864,7 +900,10 @@ github.com/yuin/goldmark/renderer github.com/yuin/goldmark/renderer/html github.com/yuin/goldmark/text github.com/yuin/goldmark/util -# github.com/yuin/goldmark-meta v0.0.0-20191126180153-f0638e958b60 +# github.com/yuin/goldmark-highlighting v0.0.0-20220208100518-594be1970594 +## explicit +github.com/yuin/goldmark-highlighting +# github.com/yuin/goldmark-meta v1.1.0 ## explicit github.com/yuin/goldmark-meta # go.etcd.io/bbolt v1.3.4 @@ -1177,7 +1216,7 @@ gopkg.in/testfixtures.v2 gopkg.in/toqueteos/substring.v1 # gopkg.in/warnings.v0 v0.1.2 gopkg.in/warnings.v0 -# gopkg.in/yaml.v2 v2.2.8 +# gopkg.in/yaml.v2 v2.3.0 ## explicit gopkg.in/yaml.v2 # gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c
')
+	}
+	l := n.Lines().Len()
+	for i := 0; i < l; i++ {
+		line := n.Lines().At(i)
+		r.Writer.RawWrite(w, line.Value(source))
+	}
+	if r.WrapperRenderer != nil {
+		r.WrapperRenderer(w, c, false)
+	} else {
+		_, _ = w.WriteString("