@@ -22,6 +22,7 @@ require ( | |||
github.com/PuerkitoBio/goquery v1.5.0 | |||
github.com/RichardKnop/machinery v1.6.9 | |||
github.com/RoaringBitmap/roaring v0.4.23 // indirect | |||
github.com/alecthomas/chroma v0.10.0 | |||
github.com/alibabacloud-go/darabonba-openapi v0.1.18 | |||
github.com/alibabacloud-go/dysmsapi-20170525/v2 v2.0.9 | |||
github.com/alibabacloud-go/tea v1.1.17 | |||
@@ -120,8 +121,9 @@ require ( | |||
github.com/urfave/cli v1.22.1 | |||
github.com/xanzy/go-gitlab v0.31.0 | |||
github.com/yohcop/openid-go v1.0.0 | |||
github.com/yuin/goldmark v1.1.30 | |||
github.com/yuin/goldmark-meta v0.0.0-20191126180153-f0638e958b60 | |||
github.com/yuin/goldmark v1.4.13 | |||
github.com/yuin/goldmark-highlighting v0.0.0-20220208100518-594be1970594 | |||
github.com/yuin/goldmark-meta v1.1.0 | |||
golang.org/x/crypto v0.0.0-20200510223506-06a226fb4e37 | |||
golang.org/x/mod v0.3.0 // indirect | |||
golang.org/x/net v0.0.0-20200513185701-a91f0712d120 | |||
@@ -138,7 +140,7 @@ require ( | |||
gopkg.in/ldap.v3 v3.0.2 | |||
gopkg.in/macaron.v1 v1.3.9 // indirect | |||
gopkg.in/testfixtures.v2 v2.5.0 | |||
gopkg.in/yaml.v2 v2.2.8 | |||
gopkg.in/yaml.v2 v2.3.0 | |||
mvdan.cc/xurls/v2 v2.1.0 | |||
strk.kbt.io/projects/go/libravatar v0.0.0-20191008002943-06d1c002b251 | |||
xorm.io/builder v0.3.7 | |||
@@ -76,6 +76,8 @@ github.com/Shopify/toxiproxy v2.1.4+incompatible/go.mod h1:OXgGpZ6Cli1/URJOF1DMx | |||
github.com/Unknwon/com v0.0.0-20190321035513-0fed4efef755/go.mod h1:voKvFVpXBJxdIPeqjoJuLK+UVcRlo/JLjeToGxPYu68= | |||
github.com/alcortesm/tgz v0.0.0-20161220082320-9c5fe88206d7 h1:uSoVVbwJiQipAclBbw+8quDsfcvFjOpI5iCf4p/cqCs= | |||
github.com/alcortesm/tgz v0.0.0-20161220082320-9c5fe88206d7/go.mod h1:6zEj6s6u/ghQa61ZWa/C2Aw3RkjiTBOix7dkqa1VLIs= | |||
github.com/alecthomas/chroma v0.10.0 h1:7XDcGkCQopCNKjZHfYrNLraA+M7e0fMiJ/Mfikbfjek= | |||
github.com/alecthomas/chroma v0.10.0/go.mod h1:jtJATyUxlIORhUOFNA9NZDWGAQ8wpxQQqNSB4rjA/1s= | |||
github.com/alecthomas/template v0.0.0-20160405071501-a0175ee3bccc/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc= | |||
github.com/alecthomas/units v0.0.0-20151022065526-2efee857e7cf/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0= | |||
github.com/alibabacloud-go/alibabacloud-gateway-spi v0.0.2/go.mod h1:sCavSAvdzOjul4cEqeVtvlSaSScfNsTQ+46HwlTL1hc= | |||
@@ -203,6 +205,8 @@ github.com/dgrijalva/jwt-go v3.2.0+incompatible/go.mod h1:E3ru+11k8xSBh+hMPgOLZm | |||
github.com/dgryski/go-sip13 v0.0.0-20181026042036-e10d5fee7954/go.mod h1:vAd38F8PWV+bWy6jNmig1y/TA+kYO4g3RSRF0IAv0no= | |||
github.com/disintegration/imaging v1.6.2 h1:w1LecBlG2Lnp8B3jk5zSuNqd7b4DXhcjwek1ei82L+c= | |||
github.com/disintegration/imaging v1.6.2/go.mod h1:44/5580QXChDfwIclfc/PCwrr44amcmDAg8hxG0Ewe4= | |||
github.com/dlclark/regexp2 v1.4.0 h1:F1rxgk7p4uKjwIQxBs9oAXe5CqrXlCduYEJvrF4u93E= | |||
github.com/dlclark/regexp2 v1.4.0/go.mod h1:2pZnwuY/m+8K6iRw6wQdMtk+rH5tNGR1i55kozfMjCc= | |||
github.com/docker/go-units v0.3.3/go.mod h1:fgPhTUdO+D/Jk86RDLlptpiXQzgHJF7gydDDbaIK4Dk= | |||
github.com/docker/go-units v0.4.0/go.mod h1:fgPhTUdO+D/Jk86RDLlptpiXQzgHJF7gydDDbaIK4Dk= | |||
github.com/dustin/go-humanize v1.0.0 h1:VSnTsYCnlFHaM2/igO1h6X3HA71jcobQuxemgkq4zYo= | |||
@@ -804,8 +808,16 @@ github.com/yuin/goldmark v1.1.27 h1:nqDD4MMMQA0lmWq03Z2/myGPYLQoXtmi0rGVs95ntbo= | |||
github.com/yuin/goldmark v1.1.27/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= | |||
github.com/yuin/goldmark v1.1.30 h1:j4d4Lw3zqZelDhBksEo3BnWg9xhXRQGJPPSL6OApZjI= | |||
github.com/yuin/goldmark v1.1.30/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= | |||
github.com/yuin/goldmark v1.4.5/go.mod h1:rmuwmfZ0+bvzB24eSC//bk1R1Zp3hM0OXYv/G2LIilg= | |||
github.com/yuin/goldmark v1.4.6/go.mod h1:rmuwmfZ0+bvzB24eSC//bk1R1Zp3hM0OXYv/G2LIilg= | |||
github.com/yuin/goldmark v1.4.13 h1:fVcFKWvrslecOb/tg+Cc05dkeYx540o0FuFt3nUVDoE= | |||
github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY= | |||
github.com/yuin/goldmark-highlighting v0.0.0-20220208100518-594be1970594 h1:yHfZyN55+5dp1wG7wDKv8HQ044moxkyGq12KFFMFDxg= | |||
github.com/yuin/goldmark-highlighting v0.0.0-20220208100518-594be1970594/go.mod h1:U9ihbh+1ZN7fR5Se3daSPoz1CGF9IYtSvWwVQtnzGHU= | |||
github.com/yuin/goldmark-meta v0.0.0-20191126180153-f0638e958b60 h1:gZucqLjL1eDzVWrXj4uiWeMbAopJlBR2mKQAsTGdPwo= | |||
github.com/yuin/goldmark-meta v0.0.0-20191126180153-f0638e958b60/go.mod h1:i9VhcIHN2PxXMbQrKqXNueok6QNONoPjNMoj9MygVL0= | |||
github.com/yuin/goldmark-meta v1.1.0 h1:pWw+JLHGZe8Rk0EGsMVssiNb/AaPMHfSRszZeUeiOUc= | |||
github.com/yuin/goldmark-meta v1.1.0/go.mod h1:U4spWENafuA7Zyg+Lj5RqK/MF+ovMYtBvXi1lBb2VP0= | |||
github.com/ziutek/mymysql v1.5.4 h1:GB0qdRGsTwQSBVYuVShFBKaXSnSnYYC2d9knnE1LHFs= | |||
github.com/ziutek/mymysql v1.5.4/go.mod h1:LMSpPZ6DbqWFxNCHW77HeMg9I646SAhApZ/wKdgO/C0= | |||
go.etcd.io/bbolt v1.3.2/go.mod h1:IbVyRI1SCnLcuJnV2u8VeU0CEYM7e686BmAb1XKL+uU= | |||
@@ -1086,6 +1098,8 @@ gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= | |||
gopkg.in/yaml.v2 v2.2.4/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= | |||
gopkg.in/yaml.v2 v2.2.8 h1:obN1ZagJSUGI0Ek/LBmuj4SNLPfIny3KsKFopxRdj10= | |||
gopkg.in/yaml.v2 v2.2.8/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= | |||
gopkg.in/yaml.v2 v2.3.0 h1:clyUAQHOM3G0M3f5vQj7LuJrETvjVot3Z5el9nffUtU= | |||
gopkg.in/yaml.v2 v2.3.0/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= | |||
gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c h1:dUUwHk2QECo/6vqA44rthZ8ie2QXMNeKRTHCNY2nXvo= | |||
gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= | |||
grpc.go4.org v0.0.0-20170609214715-11d0a25b4919/go.mod h1:77eQGdRu53HpSqPFJFmuJdjuHRquDANNeA4x7B8WQ9o= | |||
@@ -1680,21 +1680,6 @@ func GetCloudbrainsNeededStopByUserID(userID int64) ([]*Cloudbrain, error) { | |||
return cloudBrains, err | |||
} | |||
func GetWaittingTop() ([]*CloudbrainInfo, error) { | |||
sess := x.NewSession() | |||
defer sess.Close() | |||
var cond = builder.NewCond() | |||
cond = cond.And( | |||
builder.Eq{"cloudbrain.status": string(JobWaiting)}, | |||
) | |||
sess.OrderBy("cloudbrain.created_unix ASC limit 1") | |||
cloudbrains := make([]*CloudbrainInfo, 0, 1) | |||
if err := sess.Table(&Cloudbrain{}).Where(cond). | |||
Find(&cloudbrains); err != nil { | |||
log.Info("find error.") | |||
} | |||
return cloudbrains, nil | |||
} | |||
func GetModelartsReDebugTaskByJobId(jobID string) ([]*Cloudbrain, error) { | |||
sess := x.NewSession() | |||
defer sess.Close() | |||
@@ -36,133 +36,6 @@ type TaskDetail struct { | |||
FlavorName string `json:"FlavorName"` | |||
} | |||
func GetDebugOnePeriodCount(beginTime time.Time, endTime time.Time) (int64, error) { | |||
countSql := "SELECT count(*) FROM " + | |||
"public.cloudbrain where created_unix >=" + strconv.FormatInt(beginTime.Unix(), 10) + | |||
" and created_unix<" + strconv.FormatInt(endTime.Unix(), 10) + | |||
" and job_type ='" + string(JobTypeDebug) + "'" + | |||
" and type='" + strconv.Itoa(TypeCloudBrainOne) + "'" | |||
return x.SQL(countSql).Count() | |||
} | |||
func GetDebugOnePeriodDuration(beginTime time.Time, endTime time.Time) (int64, error) { | |||
total, err := x.Where("created_unix >= ? And created_unix < ? And job_type = ? And type = ? ", strconv.FormatInt(beginTime.Unix(), 10), strconv.FormatInt(endTime.Unix(), 10), JobTypeDebug, TypeCloudBrainOne).SumInt(&Cloudbrain{}, "duration") | |||
if err != nil { | |||
return 0, err | |||
} | |||
return total, nil | |||
} | |||
func GetTrainOnePeriodCount(beginTime time.Time, endTime time.Time) (int64, error) { | |||
countSql := "SELECT count(*) FROM " + | |||
"public.cloudbrain where created_unix >=" + strconv.FormatInt(beginTime.Unix(), 10) + | |||
" and created_unix<" + strconv.FormatInt(endTime.Unix(), 10) + | |||
" and job_type ='" + string(JobTypeTrain) + "'" + | |||
" and type='" + strconv.Itoa(TypeCloudBrainOne) + "'" | |||
return x.SQL(countSql).Count() | |||
} | |||
func GetTrainOnePeriodDuration(beginTime time.Time, endTime time.Time) (int64, error) { | |||
total, err := x.Where("created_unix >= ? And created_unix < ? And job_type = ? And type = ? ", strconv.FormatInt(beginTime.Unix(), 10), strconv.FormatInt(endTime.Unix(), 10), JobTypeTrain, TypeCloudBrainOne).SumInt(&Cloudbrain{}, "duration") | |||
if err != nil { | |||
return 0, err | |||
} | |||
return total, nil | |||
} | |||
func GetBenchmarkOnePeriodCount(beginTime time.Time, endTime time.Time) (int64, error) { | |||
countSql := "SELECT count(*) FROM " + | |||
"public.cloudbrain where created_unix >=" + strconv.FormatInt(beginTime.Unix(), 10) + | |||
" and created_unix<" + strconv.FormatInt(endTime.Unix(), 10) + | |||
" and job_type ='" + string(JobTypeBenchmark) + "'" + | |||
" and type='" + strconv.Itoa(TypeCloudBrainOne) + "'" | |||
return x.SQL(countSql).Count() | |||
} | |||
func GetBenchmarkOnePeriodDuration(beginTime time.Time, endTime time.Time) (int64, error) { | |||
total, err := x.Where("created_unix >= ? And created_unix < ? And job_type = ? And type = ? ", strconv.FormatInt(beginTime.Unix(), 10), strconv.FormatInt(endTime.Unix(), 10), JobTypeBenchmark, TypeCloudBrainOne).SumInt(&Cloudbrain{}, "duration") | |||
if err != nil { | |||
return 0, err | |||
} | |||
return total, nil | |||
} | |||
func GetDebugTwoPeriodCount(beginTime time.Time, endTime time.Time) (int64, error) { | |||
countSql := "SELECT count(*) FROM " + | |||
"public.cloudbrain where created_unix >=" + strconv.FormatInt(beginTime.Unix(), 10) + | |||
" and created_unix<" + strconv.FormatInt(endTime.Unix(), 10) + | |||
" and job_type ='" + string(JobTypeDebug) + "'" + | |||
" and type='" + strconv.Itoa(TypeCloudBrainTwo) + "'" | |||
return x.SQL(countSql).Count() | |||
} | |||
func GetDebugTwoPeriodDuration(beginTime time.Time, endTime time.Time) (int64, error) { | |||
total, err := x.Where("created_unix >= ? And created_unix < ? And job_type = ? And type = ? ", strconv.FormatInt(beginTime.Unix(), 10), strconv.FormatInt(endTime.Unix(), 10), JobTypeDebug, TypeCloudBrainTwo).SumInt(&Cloudbrain{}, "duration") | |||
if err != nil { | |||
return 0, err | |||
} | |||
return total, nil | |||
} | |||
func GetTrainTwoPeriodCount(beginTime time.Time, endTime time.Time) (int64, error) { | |||
countSql := "SELECT count(*) FROM " + | |||
"public.cloudbrain where created_unix >=" + strconv.FormatInt(beginTime.Unix(), 10) + | |||
" and created_unix<" + strconv.FormatInt(endTime.Unix(), 10) + | |||
" and job_type ='" + string(JobTypeTrain) + "'" + | |||
" and type='" + strconv.Itoa(TypeCloudBrainTwo) + "'" | |||
return x.SQL(countSql).Count() | |||
} | |||
func GetTrainTwoPeriodDuration(beginTime time.Time, endTime time.Time) (int64, error) { | |||
total, err := x.Where("created_unix >= ? And created_unix < ? And job_type = ? And type = ? ", strconv.FormatInt(beginTime.Unix(), 10), strconv.FormatInt(endTime.Unix(), 10), JobTypeTrain, TypeCloudBrainTwo).SumInt(&Cloudbrain{}, "duration") | |||
if err != nil { | |||
return 0, err | |||
} | |||
return total, nil | |||
} | |||
func GetInferenceTwoPeriodCount(beginTime time.Time, endTime time.Time) (int64, error) { | |||
countSql := "SELECT count(*) FROM " + | |||
"public.cloudbrain where created_unix >=" + strconv.FormatInt(beginTime.Unix(), 10) + | |||
" and created_unix<" + strconv.FormatInt(endTime.Unix(), 10) + | |||
" and job_type ='" + string(JobTypeInference) + "'" + | |||
" and type='" + strconv.Itoa(TypeCloudBrainTwo) + "'" | |||
return x.SQL(countSql).Count() | |||
} | |||
func GetInferenceTwoPeriodDuration(beginTime time.Time, endTime time.Time) (int64, error) { | |||
total, err := x.Where("created_unix >= ? And created_unix < ? And job_type = ? And type = ? ", strconv.FormatInt(beginTime.Unix(), 10), strconv.FormatInt(endTime.Unix(), 10), JobTypeInference, TypeCloudBrainTwo).SumInt(&Cloudbrain{}, "duration") | |||
if err != nil { | |||
return 0, err | |||
} | |||
return total, nil | |||
} | |||
func GetCloudBrainOnePeriodCount(beginTime time.Time, endTime time.Time) (int64, error) { | |||
countSql := "SELECT count(*) FROM " + | |||
"public.cloudbrain where created_unix >=" + strconv.FormatInt(beginTime.Unix(), 10) + | |||
" and created_unix<" + strconv.FormatInt(endTime.Unix(), 10) + | |||
" and type='" + strconv.Itoa(TypeCloudBrainOne) + "'" | |||
return x.SQL(countSql).Count() | |||
} | |||
func GetCloudBrainOnePeriodDuration(beginTime time.Time, endTime time.Time) (int64, error) { | |||
total, err := x.Where("created_unix >= ? And created_unix < ? And type = ? ", strconv.FormatInt(beginTime.Unix(), 10), strconv.FormatInt(endTime.Unix(), 10), TypeCloudBrainOne).SumInt(&Cloudbrain{}, "duration") | |||
if err != nil { | |||
return 0, err | |||
} | |||
return total, nil | |||
} | |||
func GetCloudBrainTwoPeriodCount(beginTime time.Time, endTime time.Time) (int64, error) { | |||
countSql := "SELECT count(*) FROM " + | |||
"public.cloudbrain where created_unix >=" + strconv.FormatInt(beginTime.Unix(), 10) + | |||
" and created_unix<" + strconv.FormatInt(endTime.Unix(), 10) + | |||
" and type='" + strconv.Itoa(TypeCloudBrainTwo) + "'" | |||
return x.SQL(countSql).Count() | |||
} | |||
func GetCloudBrainTwoPeriodDuration(beginTime time.Time, endTime time.Time) (int64, error) { | |||
total, err := x.Where("created_unix >= ? And created_unix < ? And type = ? ", strconv.FormatInt(beginTime.Unix(), 10), strconv.FormatInt(endTime.Unix(), 10), TypeCloudBrainTwo).SumInt(&Cloudbrain{}, "duration") | |||
if err != nil { | |||
return 0, err | |||
} | |||
return total, nil | |||
} | |||
func GetTodayCreatorCount(beginTime time.Time, endTime time.Time) (int64, error) { | |||
countSql := "SELECT count(distinct user_id) FROM " + | |||
"public.cloudbrain where created_unix >=" + strconv.FormatInt(beginTime.Unix(), 10) + | |||
@@ -211,6 +84,22 @@ func GetAllStatusCloudBrain() map[string]int { | |||
return cloudBrainStatusResult | |||
} | |||
func GetWaittingTop() ([]*CloudbrainInfo, error) { | |||
sess := x.NewSession() | |||
defer sess.Close() | |||
var cond = builder.NewCond() | |||
cond = cond.And( | |||
builder.Eq{"cloudbrain.status": string(JobWaiting)}, | |||
) | |||
sess.OrderBy("cloudbrain.created_unix ASC limit 10") | |||
cloudbrains := make([]*CloudbrainInfo, 0, 10) | |||
if err := sess.Table(&Cloudbrain{}).Where(cond). | |||
Find(&cloudbrains); err != nil { | |||
log.Info("find error.") | |||
} | |||
return cloudbrains, nil | |||
} | |||
func GetRunningTop() ([]*CloudbrainInfo, error) { | |||
sess := x.NewSession() | |||
defer sess.Close() | |||
@@ -2250,6 +2250,18 @@ func CheckRepoStats(ctx context.Context) error { | |||
"UPDATE `repository` SET num_stars=(SELECT COUNT(*) FROM `star` WHERE repo_id=?) WHERE id=?", | |||
"repository count 'num_stars'", | |||
}, | |||
//Repository.NumIssues | |||
{ | |||
"SELECT repo.id FROM `repository` repo WHERE repo.num_issues!=(SELECT COUNT(*) FROM `issue` WHERE repo_id=repo.id AND is_pull=false)", | |||
"UPDATE `repository` SET num_issues=(SELECT COUNT(*) FROM `issue` WHERE repo_id=? AND is_pull=false) WHERE id=?", | |||
"repository count 'num_issues'", | |||
}, | |||
//Repository.NumPulls | |||
{ | |||
"SELECT repo.id FROM `repository` repo WHERE repo.num_pulls!=(SELECT COUNT(*) FROM `issue` WHERE repo_id=repo.id AND is_pull=true)", | |||
"UPDATE `repository` SET num_pulls=(SELECT COUNT(*) FROM `issue` WHERE repo_id=? AND is_pull=true) WHERE id=?", | |||
"repository count 'num_pulls'", | |||
}, | |||
// Label.NumIssues | |||
{ | |||
"SELECT label.id FROM `label` WHERE label.num_issues!=(SELECT COUNT(*) FROM `issue_label` WHERE label_id=label.id)", | |||
@@ -1768,7 +1768,6 @@ func (opts *SearchUserOptions) toConds() builder.Cond { | |||
if !opts.IsActive.IsNone() { | |||
cond = cond.And(builder.Eq{"is_active": opts.IsActive.IsTrue()}) | |||
} | |||
return cond | |||
} | |||
@@ -1780,12 +1779,15 @@ func SearchUsers(opts *SearchUserOptions) (users []*User, _ int64, _ error) { | |||
if err != nil { | |||
return nil, 0, fmt.Errorf("Count: %v", err) | |||
} | |||
orderby := opts.OrderBy.String() | |||
if len(opts.OrderBy) == 0 { | |||
opts.OrderBy = SearchOrderByAlphabetically | |||
orderby = SearchOrderByAlphabetically.String() | |||
lowerKeyword := strings.ToLower(opts.Keyword) | |||
if len(opts.Keyword) > 0 { | |||
orderby = " CASE when lower_name='" + lowerKeyword + "' then 0 when strpos(lower_name,'" + lowerKeyword + "')>0 then 1 else 2 END ASC" | |||
} | |||
} | |||
sess := x.Where(cond).OrderBy(opts.OrderBy.String()) | |||
sess := x.Where(cond).OrderBy(orderby) | |||
if opts.Page != 0 { | |||
sess = opts.setSessionPagination(sess) | |||
} | |||
@@ -412,7 +412,16 @@ func QueryUserStaticDataAll(opts *UserBusinessAnalysisQueryOptions) ([]*UserBusi | |||
func QueryDataForUserDefineFromDb(opts *UserBusinessAnalysisQueryOptions, key string) ([]*UserBusinessAnalysis, int64) { | |||
statictisSess := xStatistic.NewSession() | |||
defer statictisSess.Close() | |||
cond := "data_date='" + key + "'" | |||
var cond = builder.NewCond() | |||
cond = cond.And( | |||
builder.Eq{"data_date": key}, | |||
) | |||
if len(opts.UserName) > 0 { | |||
cond = cond.And( | |||
builder.Like{"name", opts.UserName}, | |||
) | |||
} | |||
allCount, err := statictisSess.Where(cond).Count(new(UserBusinessAnalysis)) | |||
if err == nil { | |||
if allCount > 0 { | |||
@@ -7,6 +7,7 @@ package markdown | |||
import ( | |||
"bytes" | |||
"strings" | |||
"sync" | |||
"code.gitea.io/gitea/modules/log" | |||
@@ -14,6 +15,8 @@ import ( | |||
"code.gitea.io/gitea/modules/markup/common" | |||
"code.gitea.io/gitea/modules/setting" | |||
giteautil "code.gitea.io/gitea/modules/util" | |||
chromahtml "github.com/alecthomas/chroma/formatters/html" | |||
highlighting "github.com/yuin/goldmark-highlighting" | |||
"github.com/yuin/goldmark" | |||
meta "github.com/yuin/goldmark-meta" | |||
@@ -42,16 +45,48 @@ func NewGiteaParseContext(urlPrefix string, isWiki bool) parser.Context { | |||
func RenderRaw(body []byte, urlPrefix string, wikiMarkdown bool) []byte { | |||
once.Do(func() { | |||
converter = goldmark.New( | |||
goldmark.WithExtensions(extension.Table, | |||
goldmark.WithExtensions( | |||
extension.NewTable( | |||
extension.WithTableCellAlignMethod(extension.TableCellAlignAttribute)), | |||
extension.Strikethrough, | |||
extension.TaskList, | |||
extension.DefinitionList, | |||
common.FootnoteExtension, | |||
extension.NewTypographer( | |||
extension.WithTypographicSubstitutions(extension.TypographicSubstitutions{ | |||
extension.EnDash: nil, | |||
extension.EmDash: nil, | |||
extension.Ellipsis: nil, | |||
highlighting.NewHighlighting( | |||
highlighting.WithFormatOptions( | |||
chromahtml.WithClasses(true), | |||
chromahtml.PreventSurroundingPre(true), | |||
), | |||
highlighting.WithWrapperRenderer(func(w util.BufWriter, c highlighting.CodeBlockContext, entering bool) { | |||
if entering { | |||
language, _ := c.Language() | |||
if language == nil { | |||
language = []byte("text") | |||
} | |||
languageStr := string(language) | |||
preClasses := []string{"code-block"} | |||
if languageStr == "mermaid" { | |||
preClasses = append(preClasses, "is-loading") | |||
} | |||
_, err := w.WriteString(`<pre class="` + strings.Join(preClasses, " ") + `">`) | |||
if err != nil { | |||
return | |||
} | |||
// include language-x class as part of commonmark spec | |||
_, err = w.WriteString(`<code class="chroma language-` + string(language) + `">`) | |||
if err != nil { | |||
return | |||
} | |||
} else { | |||
_, err := w.WriteString("</code></pre>") | |||
if err != nil { | |||
return | |||
} | |||
} | |||
}), | |||
), | |||
meta.Meta, | |||
@@ -179,30 +179,82 @@ func GetOneLevelAllObjectUnderDirMinio(bucket string, prefixRootPath string, rel | |||
output, err := core.ListObjects(bucket, Prefix, "", "", 1000) | |||
fileInfos := make([]FileInfo, 0) | |||
prefixLen := len(Prefix) | |||
fileMap := make(map[string]bool, 0) | |||
if err == nil { | |||
for _, val := range output.Contents { | |||
log.Info("val key=" + val.Key) | |||
var isDir bool | |||
var fileName string | |||
if val.Key == Prefix { | |||
continue | |||
} | |||
if strings.HasSuffix(val.Key, "/") { | |||
fileName = val.Key[prefixLen:] | |||
log.Info("fileName =" + fileName) | |||
files := strings.Split(fileName, "/") | |||
if fileMap[files[0]] { | |||
continue | |||
} else { | |||
fileMap[files[0]] = true | |||
} | |||
ParenDir := relativePath | |||
fileName = files[0] | |||
if len(files) > 1 { | |||
isDir = true | |||
fileName = val.Key[prefixLen : len(val.Key)-1] | |||
relativePath += val.Key[prefixLen:] | |||
ParenDir += fileName + "/" | |||
} else { | |||
isDir = false | |||
fileName = val.Key[prefixLen:] | |||
} | |||
fileInfo := FileInfo{ | |||
ModTime: val.LastModified.Local().Format("2006-01-02 15:04:05"), | |||
FileName: fileName, | |||
Size: val.Size, | |||
IsDir: isDir, | |||
ParenDir: relativePath, | |||
ParenDir: ParenDir, | |||
} | |||
fileInfos = append(fileInfos, fileInfo) | |||
// log.Info("val key=" + val.Key) | |||
// var isDir bool | |||
// var fileName string | |||
// if val.Key == Prefix { | |||
// continue | |||
// } | |||
// fileName = val.Key[prefixLen:] | |||
// log.Info("fileName =" + fileName) | |||
// files := strings.Split(fileName, "/") | |||
// if fileMap[files[0]] { | |||
// continue | |||
// } else { | |||
// fileMap[files[0]] = true | |||
// } | |||
// ParenDir := relativePath | |||
// fileName = files[0] | |||
// if len(files) > 1 { | |||
// isDir = true | |||
// ParenDir += fileName + "/" | |||
// } else { | |||
// isDir = false | |||
// } | |||
// // if strings.HasSuffix(val.Key, "/") { | |||
// // isDir = true | |||
// // fileName = val.Key[prefixLen : len(val.Key)-1] | |||
// // relativePath += val.Key[prefixLen:] | |||
// // } else { | |||
// // isDir = false | |||
// // fileName = val.Key[prefixLen:] | |||
// // } | |||
// fileInfo := FileInfo{ | |||
// ModTime: val.LastModified.Local().Format("2006-01-02 15:04:05"), | |||
// FileName: fileName, | |||
// Size: val.Size, | |||
// IsDir: isDir, | |||
// ParenDir: relativePath, | |||
// } | |||
// fileInfos = append(fileInfos, fileInfo) | |||
} | |||
return fileInfos, err | |||
} else { | |||
@@ -395,29 +395,6 @@ func GetOneLevelAllObjectUnderDir(bucket string, prefixRootPath string, relative | |||
} else { | |||
isDir = false | |||
} | |||
// if strings.Contains(val.Key[prefixLen:len(val.Key)-1], "/") { | |||
// files := strings.Split(fileName, "/") | |||
// fileName = files[0] | |||
// isDir = true | |||
// if fileMap[files[0]] { | |||
// continue | |||
// } else { | |||
// fileMap[files[0]] = true | |||
// } | |||
// } else { | |||
// if strings.HasSuffix(val.Key, "/") { | |||
// isDir = true | |||
// fileName = val.Key[prefixLen : len(val.Key)-1] | |||
// relativePath += val.Key[prefixLen:] | |||
// } else { | |||
// isDir = false | |||
// fileName = val.Key[prefixLen:] | |||
// } | |||
// fileMap[fileName] = true | |||
// } | |||
fileInfo := FileInfo{ | |||
ModTime: val.LastModified.Local().Format("2006-01-02 15:04:05"), | |||
FileName: fileName, | |||
@@ -252,10 +252,10 @@ page_dev_env_desc2_title=Model Management and Sharing | |||
page_dev_env_desc2_desc=Associate the model with the code version, you can adjust the model in different ways based on the historical version of the code and save the results. The trained model can be open and shared, so that more people can use the model to test and give feedback. | |||
page_dev_env_desc3_title=Once Configuration, Multiple Reuse | |||
page_dev_env_desc3_desc=Provide execution environment sharing, Once Configuration, Multiple Reuse. Lower the threshold of model development, and avoid spending repetitive time configuring complex environments. | |||
page_dev_yunlao=PengCheng Cloudbrain Open Source Collaboration | |||
page_dev_yunlao_desc1=The platform has been connected with Pengcheng Cloudbrain and can use the rich computing resources of Pengcheng Cloudbrain to complete AI development tasks. | |||
page_dev_yunlao_desc2=Pengcheng Cloudbrain's existing AI computing power is 100p FLOPS@FP16 (billions of half precision floating-point calculations per second), the main hardware infrastructure is composed of GPU server equipped with NVIDIA Tesla V100 and Atlas 900 AI cluster equipped with Kunpeng and Ascend processors. | |||
page_dev_yunlao_desc3=Developers can freely choose the corresponding computing resources according to their needs, and can test the adaptability, performance, stability of the model in different hardware environments. | |||
page_dev_yunlao=OpenI AI Collaboration Platform | |||
page_dev_yunlao_desc1=OpenI AI collaboration platform has been connected with Pengcheng CloudBrain and China computing network (c2net) in phase I, and can use the rich computing resources of Pengcheng CloudBrain and China computing network to complete AI development tasks. | |||
page_dev_yunlao_desc2=Pengcheng CloudBrain's existing AI computing power is 100p FLOPS@FP16 (billions of half precision floating-point calculations per second), the main hardware infrastructure is composed of GPU servers equipped with NVIDIA Tesla V100 and A100, and Atlas 900 AI clusters equipped with Kunpeng and shengteng processors; China computing network (c2net) phase I can realize the high-speed network interconnection between different AI computing centers, realize the reasonable scheduling of computing power and the flexible allocation of resources. At present, it has been connected to 11 intelligent computing centers, with a total scale of 1924p. | |||
page_dev_yunlao_desc3=OpenI AI collaboration platform has been connected to Pengcheng Cloud Computing Institute, Chengdu Intelligent Computing Center, Zhongyuan Intelligent Computing Center, Hefei brain and other nodes. Developers can freely choose the corresponding computing resources according to their use needs, and can test the adaptability, performance, stability, etc. of the model in different hardware environments. | |||
page_dev_yunlao_desc4=If your model requires more computing resources, you can also apply for it separately. | |||
page_dev_yunlao_apply=Apply Separately | |||
@@ -3142,5 +3142,6 @@ Not_Stopped=The job is not stopped, can not be deleted. | |||
Already_stopped=The job is already stopped. | |||
Stopped_failed=Fail to stop the job, please try again later. | |||
Stopped_success_update_status_fail=Succeed in stopping th job, but failed to update the job status and duration time. | |||
load_code_failed=Fail to load code, please check if the right branch is selected. | |||
error.dataset_select = dataset select error:the count exceed the limit or has same name |
@@ -254,11 +254,11 @@ page_dev_env_desc2_title=模型管理与共享 | |||
page_dev_env_desc2_desc=将模型与代码版本建立关联,可以基于代码历史版本,使用不同的方式调整模型,并将结果保存下来;训练好的模型可以开放共享,让更多人的使用模型测试并提出反馈 | |||
page_dev_env_desc3_title=一次配置,多次使用 | |||
page_dev_env_desc3_desc=提供执行环境共享,一次配置,多次使用,降低模型开发门槛,避免花费重复的时间配置复杂的环境 | |||
page_dev_yunlao=鹏城云脑开源协同 | |||
page_dev_yunlao_desc1=平台已经与鹏城云脑打通,可以利用鹏城云脑的丰富算力资源,完成AI开发任务 | |||
page_dev_yunlao_desc2=鹏城云脑现有AI算力100P FLOPS@FP16(每秒十亿亿次半精度浮点计算),主要硬件基础设施由搭载英伟达Tesla V100 的GPU服务器和搭载鲲鹏、昇腾处理器的Atlas 900 AI集群构成 | |||
page_dev_yunlao_desc3=开发者可以根据使用需求,自由选择相应计算资源,可以测试模型在不同硬件环境下的适配能力、性能、稳定性等 | |||
page_dev_yunlao_desc4=如果您的模型需要更多的计算资源,也可以单独申请 | |||
page_dev_yunlao=启智AI协作平台 | |||
page_dev_yunlao_desc1=启智AI协作平台已经与鹏城云脑、中国算力网(C2Net)一期打通,可以利用鹏城云脑和中国算力网的丰富算力资源,完成AI开发任务。 | |||
page_dev_yunlao_desc2=鹏城云脑现有AI算力100P FLOPS@FP16(每秒十亿亿次半精度浮点计算),主要硬件基础设施由搭载英伟达Tesla V100 和A100 的GPU服务器,以及搭载鲲鹏、昇腾处理器的Atlas 900 AI集群构成;中国算力网(C2Net)一期可实现不同人工智能计算中心之间高速网络互联,实现算力合理调度和资源弹性分配,目前已接入11家智算中心,算力总规模1924P。 | |||
page_dev_yunlao_desc3=启智AI协作平台已接入其中的鹏城云计算所、成都智算中心、中原智算中心、合肥类脑等节点,开发者可以根据使用需求,自由选择相应计算资源,可以测试模型在不同硬件环境下的适配能力、性能、稳定性等。 | |||
page_dev_yunlao_desc4=如果您的模型需要更多的计算资源,也可以单独申请。 | |||
page_dev_yunlao_apply=单独申请 | |||
search=搜索 | |||
@@ -3157,6 +3157,7 @@ Not_Stopped=任务还未终止,不能删除。 | |||
Already_stopped=任务已停止。 | |||
Stopped_failed=任务停止失败,请稍后再试。 | |||
Stopped_success_update_status_fail=任务停止成功,状态及运行时间更新失败。 | |||
load_code_failed=代码加载失败,请确认选择了正确的分支。 | |||
error.dataset_select = 数据集选择错误:数量超过限制或者有同名数据集 |
@@ -123,13 +123,13 @@ function loadimg(uuid,filename){ | |||
function loadimg(){ | |||
var length = labeltastresult[fileindex].pic_image_field.length; | |||
if(labeltastresult[fileindex].pic_image_field.substring(length - 5) == ".json" | |||
|| labeltastresult[fileindex].pic_image_field.substring(length - 4) == ".xml" | |||
|| labeltastresult[fileindex].pic_image_field.substring(length - 4) == ".txt" | |||
|| labeltastresult[fileindex].pic_image_field.substring(length - 4) == ".csv" | |||
|| labeltastresult[fileindex].pic_image_field.substring(length - 3) == ".md" | |||
|| labeltastresult[fileindex].pic_image_field.substring(length - 3) == ".py" | |||
|| labeltastresult[fileindex].pic_image_field.substring(length - 3) == ".sh"){ | |||
if(labeltastresult[fileindex].pic_image_field.substring(length - 5).toLowerCase() == ".json" | |||
|| labeltastresult[fileindex].pic_image_field.substring(length - 4).toLowerCase() == ".xml" | |||
|| labeltastresult[fileindex].pic_image_field.substring(length - 4).toLowerCase() == ".txt" | |||
|| labeltastresult[fileindex].pic_image_field.substring(length - 4).toLowerCase() == ".csv" | |||
|| labeltastresult[fileindex].pic_image_field.substring(length - 3).toLowerCase() == ".md" | |||
|| labeltastresult[fileindex].pic_image_field.substring(length - 3).toLowerCase() == ".py" | |||
|| labeltastresult[fileindex].pic_image_field.substring(length - 3).toLowerCase() == ".sh"){ | |||
//文本 | |||
canvas.style.display="none"; | |||
@@ -138,11 +138,11 @@ function loadimg(){ | |||
$('#textcontent').height(canvas.height-40) | |||
$("#textcontent").text(textContent); | |||
}else{ | |||
if(labeltastresult[fileindex].pic_image_field.substring(length - 5) == ".jpeg" | |||
|| labeltastresult[fileindex].pic_image_field.substring(length - 4) == ".jpg" | |||
|| labeltastresult[fileindex].pic_image_field.substring(length - 4) == ".bmp" | |||
|| labeltastresult[fileindex].pic_image_field.substring(length - 4) == ".gif" | |||
|| labeltastresult[fileindex].pic_image_field.substring(length - 4) == ".png"){ | |||
if(labeltastresult[fileindex].pic_image_field.substring(length - 5).toLowerCase() == ".jpeg" | |||
|| labeltastresult[fileindex].pic_image_field.substring(length - 4).toLowerCase() == ".jpg" | |||
|| labeltastresult[fileindex].pic_image_field.substring(length - 4).toLowerCase() == ".bmp" | |||
|| labeltastresult[fileindex].pic_image_field.substring(length - 4).toLowerCase() == ".gif" | |||
|| labeltastresult[fileindex].pic_image_field.substring(length - 4).toLowerCase() == ".png"){ | |||
canvas.style.display="block"; | |||
document.getElementById("textcontent").style.display="none"; | |||
img.src = ip + "/getgiteaimage?uuid=" + dataset_id + "&filename=" + labeltastresult[fileindex].pic_image_field; | |||
@@ -328,12 +328,12 @@ func CloudBrainCreate(ctx *context.Context, form auth.CreateCloudBrainForm) { | |||
if branchName == "" { | |||
branchName = cloudbrain.DefaultBranchName | |||
} | |||
downloadCode(repo, codePath, branchName) | |||
uploadCodeToMinio(codePath+"/", jobName, cloudbrain.CodeMountPath+"/") | |||
modelPath := setting.JobPath + jobName + cloudbrain.ModelMountPath + "/" | |||
mkModelPath(modelPath) | |||
uploadCodeToMinio(modelPath, jobName, cloudbrain.ModelMountPath+"/") | |||
errStr = loadCodeAndMakeModelPath(repo, codePath, branchName, jobName, cloudbrain.ModelMountPath) | |||
if errStr != "" { | |||
cloudBrainNewDataPrepare(ctx) | |||
ctx.RenderWithErr(ctx.Tr(errStr), tpl, &form) | |||
return | |||
} | |||
commitID, _ := ctx.Repo.GitRepo.GetBranchCommitID(branchName) | |||
@@ -378,6 +378,30 @@ func CloudBrainCreate(ctx *context.Context, form auth.CreateCloudBrainForm) { | |||
} | |||
} | |||
func loadCodeAndMakeModelPath(repo *models.Repository, codePath string, branchName string, jobName string, resultPath string) string { | |||
err := downloadCode(repo, codePath, branchName) | |||
if err != nil { | |||
return "cloudbrain.load_code_failed" | |||
} | |||
err = uploadCodeToMinio(codePath+"/", jobName, cloudbrain.CodeMountPath+"/") | |||
if err != nil { | |||
return "cloudbrain.load_code_failed" | |||
} | |||
modelPath := setting.JobPath + jobName + resultPath + "/" | |||
err = mkModelPath(modelPath) | |||
if err != nil { | |||
return "cloudbrain.load_code_failed" | |||
} | |||
err = uploadCodeToMinio(modelPath, jobName, resultPath+"/") | |||
if err != nil { | |||
return "cloudbrain.load_code_failed" | |||
} | |||
return "" | |||
} | |||
func CloudBrainInferenceJobCreate(ctx *context.Context, form auth.CreateCloudBrainInferencForm) { | |||
ctx.Data["PageIsCloudBrain"] = true | |||
displayJobName := form.DisplayJobName | |||
@@ -444,11 +468,12 @@ func CloudBrainInferenceJobCreate(ctx *context.Context, form auth.CreateCloudBra | |||
if branchName == "" { | |||
branchName = cloudbrain.DefaultBranchName | |||
} | |||
downloadCode(repo, codePath, branchName) | |||
uploadCodeToMinio(codePath+"/", jobName, cloudbrain.CodeMountPath+"/") | |||
resultPath := setting.JobPath + jobName + cloudbrain.ResultPath + "/" | |||
mkResultPath(resultPath) | |||
uploadCodeToMinio(resultPath, jobName, cloudbrain.ResultPath+"/") | |||
errStr := loadCodeAndMakeModelPath(repo, codePath, branchName, jobName, cloudbrain.ResultPath) | |||
if errStr != "" { | |||
cloudBrainNewDataPrepare(ctx) | |||
ctx.RenderWithErr(ctx.Tr(errStr), tpl, &form) | |||
return | |||
} | |||
commitID, _ := ctx.Repo.GitRepo.GetBranchCommitID(branchName) | |||
@@ -886,7 +911,7 @@ func cloudBrainShow(ctx *context.Context, tpName base.TplName, jobType models.Jo | |||
} | |||
} | |||
ctx.Data["datasetDownload"] = GetCloudBrainDataSetInfo(task.Uuid, false) | |||
ctx.Data["task"] = task | |||
labelName := strings.Fields(task.LabelName) | |||
ctx.Data["LabelName"] = labelName | |||
@@ -1664,11 +1689,7 @@ func uploadCodeToMinio(codePath, jobName, parentDir string) error { | |||
} | |||
func mkModelPath(modelPath string) error { | |||
return mkPathAndReadMeFile(modelPath, "You can put the model file into this directory and download it by the web page.") | |||
} | |||
func mkResultPath(resultPath string) error { | |||
return mkPathAndReadMeFile(resultPath, "You can put the result file into this directory and download it by the web page.") | |||
return mkPathAndReadMeFile(modelPath, "You can put the files into this directory and download the files by the web page.") | |||
} | |||
func mkPathAndReadMeFile(path string, text string) error { | |||
@@ -3,6 +3,7 @@ package repo | |||
import ( | |||
"encoding/json" | |||
"errors" | |||
"fmt" | |||
"io/ioutil" | |||
"net/http" | |||
"os" | |||
@@ -45,8 +46,7 @@ func GrampusTrainJobGPUNew(ctx *context.Context) { | |||
ctx.ServerError("get new train-job info failed", err) | |||
return | |||
} | |||
waitCount := cloudbrain.GetWaitingCloudbrainCount(models.TypeC2Net, models.GPUResource, models.JobTypeTrain) | |||
ctx.Data["WaitCount"] = waitCount | |||
ctx.HTML(http.StatusOK, tplGrampusTrainJobGPUNew) | |||
} | |||
@@ -57,8 +57,6 @@ func GrampusTrainJobNPUNew(ctx *context.Context) { | |||
ctx.ServerError("get new train-job info failed", err) | |||
return | |||
} | |||
waitCount := cloudbrain.GetWaitingCloudbrainCount(models.TypeC2Net, models.NPUResource, models.JobTypeTrain) | |||
ctx.Data["WaitCount"] = waitCount | |||
ctx.HTML(200, tplGrampusTrainJobNPUNew) | |||
} | |||
@@ -131,8 +129,12 @@ func grampusTrainJobNewDataPrepare(ctx *context.Context, processType string) err | |||
if processType == grampus.ProcessorTypeGPU { | |||
ctx.Data["datasetType"] = models.TypeCloudBrainOne | |||
waitCount := cloudbrain.GetWaitingCloudbrainCount(models.TypeC2Net, models.GPUResource, models.JobTypeTrain) | |||
ctx.Data["WaitCount"] = waitCount | |||
} else if processType == grampus.ProcessorTypeNPU { | |||
ctx.Data["datasetType"] = models.TypeCloudBrainTwo | |||
waitCount := cloudbrain.GetWaitingCloudbrainCount(models.TypeC2Net, models.NPUResource, models.JobTypeTrain) | |||
ctx.Data["WaitCount"] = waitCount | |||
} | |||
return nil | |||
@@ -280,7 +282,7 @@ func GrampusTrainJobGpuCreate(ctx *context.Context, form auth.CreateGrampusTrain | |||
if err := downloadZipCode(ctx, codeLocalPath, branchName); err != nil { | |||
log.Error("downloadZipCode failed, server timed out: %s (%v)", repo.FullName(), err, ctx.Data["MsgID"]) | |||
grampusTrainJobNewDataPrepare(ctx, grampus.ProcessorTypeGPU) | |||
ctx.RenderWithErr("Create task failed, internal error", tplGrampusTrainJobGPUNew, &form) | |||
ctx.RenderWithErr(ctx.Tr("cloudbrain.load_code_failed"), tplGrampusTrainJobGPUNew, &form) | |||
return | |||
} | |||
@@ -289,7 +291,7 @@ func GrampusTrainJobGpuCreate(ctx *context.Context, form auth.CreateGrampusTrain | |||
if err := uploadCodeToMinio(codeLocalPath+"/", jobName, cloudbrain.CodeMountPath+"/"); err != nil { | |||
log.Error("Failed to uploadCodeToMinio: %s (%v)", repo.FullName(), err, ctx.Data["MsgID"]) | |||
grampusTrainJobNewDataPrepare(ctx, grampus.ProcessorTypeGPU) | |||
ctx.RenderWithErr("Create task failed, internal error", tplGrampusTrainJobGPUNew, &form) | |||
ctx.RenderWithErr(ctx.Tr("cloudbrain.load_code_failed"), tplGrampusTrainJobGPUNew, &form) | |||
return | |||
} | |||
@@ -297,7 +299,7 @@ func GrampusTrainJobGpuCreate(ctx *context.Context, form auth.CreateGrampusTrain | |||
if err := mkModelPath(modelPath); err != nil { | |||
log.Error("Failed to mkModelPath: %s (%v)", repo.FullName(), err, ctx.Data["MsgID"]) | |||
grampusTrainJobNewDataPrepare(ctx, grampus.ProcessorTypeGPU) | |||
ctx.RenderWithErr("Create task failed, internal error", tplGrampusTrainJobGPUNew, &form) | |||
ctx.RenderWithErr(ctx.Tr("cloudbrain.load_code_failed"), tplGrampusTrainJobGPUNew, &form) | |||
return | |||
} | |||
@@ -305,7 +307,7 @@ func GrampusTrainJobGpuCreate(ctx *context.Context, form auth.CreateGrampusTrain | |||
if err := uploadCodeToMinio(modelPath, jobName, cloudbrain.ModelMountPath+"/"); err != nil { | |||
log.Error("Failed to uploadCodeToMinio: %s (%v)", repo.FullName(), err, ctx.Data["MsgID"]) | |||
grampusTrainJobNewDataPrepare(ctx, grampus.ProcessorTypeGPU) | |||
ctx.RenderWithErr("Create task failed, internal error", tplGrampusTrainJobGPUNew, &form) | |||
ctx.RenderWithErr(ctx.Tr("cloudbrain.load_code_failed"), tplGrampusTrainJobGPUNew, &form) | |||
return | |||
} | |||
@@ -464,22 +466,22 @@ func GrampusTrainJobNpuCreate(ctx *context.Context, form auth.CreateGrampusTrain | |||
if err := downloadZipCode(ctx, codeLocalPath, branchName); err != nil { | |||
log.Error("downloadZipCode failed, server timed out: %s (%v)", repo.FullName(), err) | |||
grampusTrainJobNewDataPrepare(ctx, grampus.ProcessorTypeNPU) | |||
ctx.RenderWithErr("Create task failed, server timed out", tplGrampusTrainJobNPUNew, &form) | |||
ctx.RenderWithErr(ctx.Tr("cloudbrain.load_code_failed"), tplGrampusTrainJobNPUNew, &form) | |||
return | |||
} | |||
//todo: upload code (send to file_server todo this work?) | |||
if err := obsMkdir(setting.CodePathPrefix + jobName + modelarts.OutputPath); err != nil { | |||
log.Error("Failed to obsMkdir_output: %s (%v)", repo.FullName(), err) | |||
grampusTrainJobNewDataPrepare(ctx, grampus.ProcessorTypeGPU) | |||
ctx.RenderWithErr("Failed to obsMkdir_output", tplGrampusTrainJobNPUNew, &form) | |||
grampusTrainJobNewDataPrepare(ctx, grampus.ProcessorTypeNPU) | |||
ctx.RenderWithErr(ctx.Tr("cloudbrain.load_code_failed"), tplGrampusTrainJobNPUNew, &form) | |||
return | |||
} | |||
if err := uploadCodeToObs(codeLocalPath, jobName, ""); err != nil { | |||
log.Error("Failed to uploadCodeToObs: %s (%v)", repo.FullName(), err) | |||
grampusTrainJobNewDataPrepare(ctx, grampus.ProcessorTypeNPU) | |||
ctx.RenderWithErr("Failed to uploadCodeToObs", tplGrampusTrainJobNPUNew, &form) | |||
ctx.RenderWithErr(ctx.Tr("cloudbrain.load_code_failed"), tplGrampusTrainJobNPUNew, &form) | |||
return | |||
} | |||
@@ -695,7 +697,7 @@ func GrampusTrainJobShow(ctx *context.Context) { | |||
taskList := make([]*models.Cloudbrain, 0) | |||
taskList = append(taskList, task) | |||
ctx.Data["version_list_task"] = taskList | |||
ctx.Data["datasetDownload"] = GetCloudBrainDataSetInfo(task.Uuid, false) | |||
ctx.Data["canDownload"] = cloudbrain.CanModifyJob(ctx, task) | |||
ctx.Data["displayJobName"] = task.DisplayJobName | |||
@@ -839,6 +841,9 @@ func downloadZipCode(ctx *context.Context, codePath, branchName string) error { | |||
log.Error("GetBranchCommit failed:" + err.Error()) | |||
return err | |||
} | |||
} else { | |||
log.Error("the branch is not exist: " + branchName) | |||
return fmt.Errorf("The branch does not exist.") | |||
} | |||
archivePath = path.Join(archivePath, grampus.CodeArchiveName) | |||
@@ -119,8 +119,7 @@ func MustEnableModelArts(ctx *context.Context) { | |||
func NotebookNew(ctx *context.Context) { | |||
notebookNewDataPrepare(ctx) | |||
waitCount := cloudbrain.GetWaitingCloudbrainCount(models.TypeCloudBrainTwo, "") | |||
ctx.Data["WaitCount"] = waitCount | |||
ctx.HTML(200, tplModelArtsNotebookNew) | |||
} | |||
@@ -150,6 +149,9 @@ func notebookNewDataPrepare(ctx *context.Context) error { | |||
ctx.Data["datasetType"] = models.TypeCloudBrainTwo | |||
waitCount := cloudbrain.GetWaitingCloudbrainCount(models.TypeCloudBrainTwo, "") | |||
ctx.Data["WaitCount"] = waitCount | |||
return nil | |||
} | |||
@@ -302,34 +304,7 @@ func NotebookShow(ctx *context.Context) { | |||
datasetDownload := make([]models.DatasetDownload, 0) | |||
if ctx.IsSigned { | |||
if task.Uuid != "" && task.UserID == ctx.User.ID { | |||
uuidList := strings.Split(task.Uuid, ";") | |||
for _, uuidStr := range uuidList { | |||
attachment, err := models.GetAttachmentByUUID(uuidStr) | |||
if err != nil { | |||
log.Error("GetAttachmentByUUID failed:%v", err.Error()) | |||
return | |||
} | |||
dataset, err := models.GetDatasetByID(attachment.DatasetID) | |||
if err != nil { | |||
log.Error("GetDatasetByID failed:%v", err.Error()) | |||
return | |||
} | |||
repo, err := models.GetRepositoryByID(dataset.RepoID) | |||
if err != nil { | |||
log.Error("GetRepositoryByID failed:%v", err.Error()) | |||
return | |||
} | |||
datasetDownload = append(datasetDownload, models.DatasetDownload{ | |||
DatasetName: attachment.Name, | |||
DatasetDownloadLink: attachment.S3DownloadURL(), | |||
RepositoryLink: repo.Link() + "/datasets", | |||
}) | |||
} | |||
// datasetName, err := GetDatasetNameByUUID(task.Uuid) | |||
// if err == nil { | |||
// task.DatasetName = datasetName | |||
// } | |||
datasetDownload = GetCloudBrainDataSetInfo(task.Uuid, true) | |||
} | |||
} | |||
user, err := models.GetUserByID(task.UserID) | |||
@@ -375,6 +350,39 @@ func NotebookShow(ctx *context.Context) { | |||
ctx.HTML(200, tplModelArtsNotebookShow) | |||
} | |||
func GetCloudBrainDataSetInfo(uuid string, isNeedDown bool) []models.DatasetDownload { | |||
datasetDownload := make([]models.DatasetDownload, 0) | |||
uuidList := strings.Split(uuid, ";") | |||
for _, uuidStr := range uuidList { | |||
attachment, err := models.GetAttachmentByUUID(uuidStr) | |||
if err != nil { | |||
log.Error("GetAttachmentByUUID failed:%v", err.Error()) | |||
return datasetDownload | |||
} | |||
dataset, err := models.GetDatasetByID(attachment.DatasetID) | |||
if err != nil { | |||
log.Error("GetDatasetByID failed:%v", err.Error()) | |||
return datasetDownload | |||
} | |||
repo, err := models.GetRepositoryByID(dataset.RepoID) | |||
if err != nil { | |||
log.Error("GetRepositoryByID failed:%v", err.Error()) | |||
return datasetDownload | |||
} | |||
url := "" | |||
if isNeedDown { | |||
url = attachment.S3DownloadURL() | |||
} | |||
datasetDownload = append(datasetDownload, models.DatasetDownload{ | |||
DatasetName: attachment.Name, | |||
DatasetDownloadLink: url, | |||
RepositoryLink: repo.Link() + "/datasets", | |||
}) | |||
} | |||
return datasetDownload | |||
} | |||
func setShowSpecBySpecialPoolConfig(ctx *context.Context, findSpec bool, task *models.Cloudbrain) { | |||
modelarts.InitSpecialPool() | |||
if modelarts.SpecialPools != nil && !findSpec { | |||
@@ -670,8 +678,6 @@ func TrainJobNew(ctx *context.Context) { | |||
ctx.ServerError("get new train-job info failed", err) | |||
return | |||
} | |||
waitCount := cloudbrain.GetWaitingCloudbrainCount(models.TypeCloudBrainTwo, "") | |||
ctx.Data["WaitCount"] = waitCount | |||
ctx.HTML(200, tplModelArtsTrainJobNew) | |||
} | |||
@@ -741,6 +747,8 @@ func trainJobNewDataPrepare(ctx *context.Context) error { | |||
} | |||
ctx.Data["config_list"] = configList.ParaConfigs | |||
ctx.Data["datasetType"] = models.TypeCloudBrainTwo | |||
waitCount := cloudbrain.GetWaitingCloudbrainCount(models.TypeCloudBrainTwo, "") | |||
ctx.Data["WaitCount"] = waitCount | |||
return nil | |||
} | |||
@@ -857,6 +865,8 @@ func trainJobErrorNewDataPrepare(ctx *context.Context, form auth.CreateModelArts | |||
ctx.Data["dataset_name"] = datasetNames | |||
ctx.Data["branch_name"] = form.BranchName | |||
ctx.Data["datasetType"] = models.TypeCloudBrainTwo | |||
waitCount := cloudbrain.GetWaitingCloudbrainCount(models.TypeCloudBrainTwo, "") | |||
ctx.Data["WaitCount"] = waitCount | |||
return nil | |||
} | |||
@@ -868,8 +878,6 @@ func TrainJobNewVersion(ctx *context.Context) { | |||
ctx.ServerError("get new train-job info failed", err) | |||
return | |||
} | |||
waitCount := cloudbrain.GetWaitingCloudbrainCount(models.TypeCloudBrainTwo, "") | |||
ctx.Data["WaitCount"] = waitCount | |||
ctx.HTML(200, tplModelArtsTrainJobVersionNew) | |||
} | |||
@@ -962,6 +970,8 @@ func trainJobNewVersionDataPrepare(ctx *context.Context) error { | |||
return err | |||
} | |||
ctx.Data["config_list"] = configList.ParaConfigs | |||
waitCount := cloudbrain.GetWaitingCloudbrainCount(models.TypeCloudBrainTwo, "") | |||
ctx.Data["WaitCount"] = waitCount | |||
return nil | |||
} | |||
@@ -1053,6 +1063,8 @@ func versionErrorDataPrepare(ctx *context.Context, form auth.CreateModelArtsTrai | |||
} | |||
ctx.Data["config_list"] = configList.ParaConfigs | |||
ctx.Data["datasetType"] = models.TypeCloudBrainTwo | |||
waitCount := cloudbrain.GetWaitingCloudbrainCount(models.TypeCloudBrainTwo, "") | |||
ctx.Data["WaitCount"] = waitCount | |||
return nil | |||
} | |||
@@ -1141,7 +1153,7 @@ func TrainJobCreate(ctx *context.Context, form auth.CreateModelArtsTrainJobForm) | |||
if err := downloadCode(repo, codeLocalPath, branch_name); err != nil { | |||
log.Error("downloadCode failed, server timed out: %s (%v)", repo.FullName(), err) | |||
trainJobErrorNewDataPrepare(ctx, form) | |||
ctx.RenderWithErr("Create task failed, server timed out", tplModelArtsTrainJobNew, &form) | |||
ctx.RenderWithErr(ctx.Tr("cloudbrain.load_code_failed"), tplModelArtsTrainJobNew, &form) | |||
return | |||
} | |||
@@ -1165,7 +1177,7 @@ func TrainJobCreate(ctx *context.Context, form auth.CreateModelArtsTrainJobForm) | |||
// if err := uploadCodeToObs(codeLocalPath, jobName, parentDir); err != nil { | |||
log.Error("Failed to uploadCodeToObs: %s (%v)", repo.FullName(), err) | |||
trainJobErrorNewDataPrepare(ctx, form) | |||
ctx.RenderWithErr("Failed to uploadCodeToObs", tplModelArtsTrainJobNew, &form) | |||
ctx.RenderWithErr(ctx.Tr("cloudbrain.load_code_failed"), tplModelArtsTrainJobNew, &form) | |||
return | |||
} | |||
@@ -1420,7 +1432,7 @@ func TrainJobCreateVersion(ctx *context.Context, form auth.CreateModelArtsTrainJ | |||
if err := downloadCode(repo, codeLocalPath, branch_name); err != nil { | |||
log.Error("Failed git clone repo to local(!: %s (%v)", repo.FullName(), err) | |||
versionErrorDataPrepare(ctx, form) | |||
ctx.RenderWithErr("Failed git clone repo to local!", tplModelArtsTrainJobVersionNew, &form) | |||
ctx.RenderWithErr(ctx.Tr("cloudbrain.load_code_failed"), tplModelArtsTrainJobVersionNew, &form) | |||
return | |||
} | |||
@@ -1445,7 +1457,7 @@ func TrainJobCreateVersion(ctx *context.Context, form auth.CreateModelArtsTrainJ | |||
if err := uploadCodeToObs(codeLocalPath, jobName, parentDir); err != nil { | |||
log.Error("Failed to uploadCodeToObs: %s (%v)", repo.FullName(), err) | |||
versionErrorDataPrepare(ctx, form) | |||
ctx.RenderWithErr("Failed to uploadCodeToObs", tplModelArtsTrainJobVersionNew, &form) | |||
ctx.RenderWithErr(ctx.Tr("cloudbrain.load_code_failed"), tplModelArtsTrainJobVersionNew, &form) | |||
return | |||
} | |||
@@ -1758,7 +1770,7 @@ func TrainJobShow(ctx *context.Context) { | |||
return | |||
} | |||
ctx.Data["canNewJob"] = canNewJob | |||
datasetList := make([][]models.DatasetDownload, 0) | |||
//将运行参数转化为epoch_size = 3, device_target = Ascend的格式 | |||
for i, task := range VersionListTasks { | |||
@@ -1781,7 +1793,7 @@ func TrainJobShow(ctx *context.Context) { | |||
} else { | |||
VersionListTasks[i].Parameters = "" | |||
} | |||
datasetList = append(datasetList, GetCloudBrainDataSetInfo(task.Uuid, false)) | |||
VersionListTasks[i].CanDel = cloudbrain.CanDeleteJob(ctx, &task.Cloudbrain) | |||
VersionListTasks[i].CanModify = cloudbrain.CanModifyJob(ctx, &task.Cloudbrain) | |||
} | |||
@@ -1793,6 +1805,7 @@ func TrainJobShow(ctx *context.Context) { | |||
ctx.Data["displayJobName"] = VersionListTasks[0].DisplayJobName | |||
ctx.Data["version_list_task"] = VersionListTasks | |||
ctx.Data["version_list_count"] = VersionListCount | |||
ctx.Data["datasetList"] = datasetList | |||
ctx.Data["canDownload"] = cloudbrain.CanModifyJob(ctx, &VersionListTasks[0].Cloudbrain) | |||
ctx.HTML(http.StatusOK, tplModelArtsTrainJobShow) | |||
} | |||
@@ -2084,7 +2097,7 @@ func InferenceJobCreate(ctx *context.Context, form auth.CreateModelArtsInference | |||
if err := downloadCode(repo, codeLocalPath, branch_name); err != nil { | |||
log.Error("Create task failed, server timed out: %s (%v)", repo.FullName(), err) | |||
inferenceJobErrorNewDataPrepare(ctx, form) | |||
ctx.RenderWithErr("Create task failed, server timed out", tplModelArtsInferenceJobNew, &form) | |||
ctx.RenderWithErr(ctx.Tr("cloudbrain.load_code_failed"), tplModelArtsInferenceJobNew, &form) | |||
return | |||
} | |||
@@ -2106,7 +2119,7 @@ func InferenceJobCreate(ctx *context.Context, form auth.CreateModelArtsInference | |||
if err := uploadCodeToObs(codeLocalPath, jobName, ""); err != nil { | |||
log.Error("Failed to uploadCodeToObs: %s (%v)", repo.FullName(), err) | |||
inferenceJobErrorNewDataPrepare(ctx, form) | |||
ctx.RenderWithErr("Failed to uploadCodeToObs", tplModelArtsInferenceJobNew, &form) | |||
ctx.RenderWithErr(ctx.Tr("cloudbrain.load_code_failed"), tplModelArtsInferenceJobNew, &form) | |||
return | |||
} | |||
@@ -2317,8 +2330,7 @@ func InferenceJobNew(ctx *context.Context) { | |||
ctx.ServerError("get new inference-job info failed", err) | |||
return | |||
} | |||
waitCount := cloudbrain.GetWaitingCloudbrainCount(models.TypeCloudBrainTwo, "") | |||
ctx.Data["WaitCount"] = waitCount | |||
ctx.HTML(200, tplModelArtsInferenceJobNew) | |||
} | |||
func inferenceJobNewDataPrepare(ctx *context.Context) error { | |||
@@ -2389,6 +2401,8 @@ func inferenceJobNewDataPrepare(ctx *context.Context) error { | |||
}) | |||
ctx.Data["MODEL_COUNT"] = model_count | |||
ctx.Data["datasetType"] = models.TypeCloudBrainTwo | |||
waitCount := cloudbrain.GetWaitingCloudbrainCount(models.TypeCloudBrainTwo, "") | |||
ctx.Data["WaitCount"] = waitCount | |||
return nil | |||
} | |||
@@ -2462,6 +2476,8 @@ func inferenceJobErrorNewDataPrepare(ctx *context.Context, form auth.CreateModel | |||
ctx.Data["ckpt_name"] = form.CkptName | |||
ctx.Data["train_url"] = form.TrainUrl | |||
ctx.Data["datasetType"] = models.TypeCloudBrainTwo | |||
waitCount := cloudbrain.GetWaitingCloudbrainCount(models.TypeCloudBrainTwo, "") | |||
ctx.Data["WaitCount"] = waitCount | |||
return nil | |||
} | |||
@@ -2515,7 +2531,7 @@ func InferenceJobShow(ctx *context.Context) { | |||
ctx.Data["displayJobName"] = task.DisplayJobName | |||
ctx.Data["task"] = task | |||
ctx.Data["canDownload"] = cloudbrain.CanModifyJob(ctx, task) | |||
ctx.Data["datasetDownload"] = GetCloudBrainDataSetInfo(task.Uuid, false) | |||
tempUids := []int64{} | |||
tempUids = append(tempUids, task.UserID) | |||
JobCreater, err := models.GetUserNamesByIDs(tempUids) | |||
@@ -314,7 +314,7 @@ func searchRepo(ctx *context.Context, TableName string, Key string, Page int, Pa | |||
res, err := client.Search(TableName).Query(boolQ).SortBy(getSort(SortBy, ascending, "num_stars", false)...).From(from).Size(Size).Highlight(queryHighlight("alias", "description", "topics")).Do(ctx.Req.Context()) | |||
if err == nil { | |||
esresult := makeRepoResult(res, Key, OnlyReturnNum, language) | |||
setForkRepoOrder(esresult) | |||
setForkRepoOrder(esresult, SortBy) | |||
resultObj.Total = resultObj.PrivateTotal + esresult.Total | |||
isNeedSort := false | |||
if len(resultObj.Result) > 0 { | |||
@@ -347,7 +347,10 @@ func searchRepo(ctx *context.Context, TableName string, Key string, Page int, Pa | |||
} | |||
} | |||
func setForkRepoOrder(esresult *SearchRes) { | |||
func setForkRepoOrder(esresult *SearchRes, SortBy string) { | |||
if SortBy == "default" || SortBy == "" { | |||
return | |||
} | |||
forkidMap := make(map[string]int, 0) | |||
for index, re := range esresult.Result { | |||
if re["fork_id"] != nil { | |||
@@ -90,7 +90,7 @@ | |||
<div class="ui container"> | |||
<div class="ui center am-pt-30 am-pb-30"> | |||
<h2>智算网络</h2> | |||
<p><span class="ui text grey">人工智能算力网络推进联盟已接入10家智算中心,算力总规模1542P</p> | |||
<p><span class="ui text grey">人工智能算力网络推进联盟已接入11家智算中心,算力总规模1924P</p> | |||
</div> | |||
<div id="app" v-cloak> | |||
@@ -30,7 +30,7 @@ | |||
<div class="column"> | |||
<div class="ui blue small menu compact selectcloudbrain"> | |||
<a class="item" href="{{.RepoLink}}/debugjob?debugListType=all">{{$.i18n.Tr "repo.modelarts.notebook"}}</a> | |||
<a class="item" href="{{.RepoLink}}/modelarts/train-job">{{$.i18n.Tr "repo.modelarts.train_job"}}</a> | |||
<a class="item" href="{{.RepoLink}}/modelarts/train-job?listType=all">{{$.i18n.Tr "repo.modelarts.train_job"}}</a> | |||
<a class="item" href="{{.RepoLink}}/modelarts/inference-job">{{$.i18n.Tr "repo.modelarts.infer_job"}}</a> | |||
<a class="active item" href="{{.RepoLink}}/cloudbrain/benchmark">{{$.i18n.Tr "repo.modelarts.evaluate_job"}}</a> | |||
</div> | |||
@@ -468,7 +468,9 @@ | |||
<td class="ti-text-form-content"> | |||
<div class="text-span text-span-w"> | |||
{{.DatasetName}} | |||
{{range $m ,$n := $.datasetDownload}} | |||
<a href="{{.RepositoryLink}}" target="_blank">{{.DatasetName}}</a> | |||
{{end}} | |||
</div> | |||
</td> | |||
</tr> | |||
@@ -412,7 +412,9 @@ | |||
<td class="ti-text-form-content"> | |||
<div class="text-span text-span-w" | |||
id="{{.VersionName}}-BenchmarkTypeName"> | |||
{{.DatasetName}} | |||
{{range $m ,$n := $.datasetDownload}} | |||
<a href="{{.RepositoryLink}}">{{.DatasetName}}</a> | |||
{{end}} | |||
</div> | |||
</td> | |||
</tr> | |||
@@ -430,7 +430,9 @@ | |||
<td class="ti-text-form-content"> | |||
<div class="text-span text-span-w"> | |||
{{.DatasetName}} | |||
{{range $m ,$n := $.datasetDownload}} | |||
<a href="{{.RepositoryLink}}" target="_blank">{{.DatasetName}}</a> | |||
{{end}} | |||
</div> | |||
</td> | |||
</tr> | |||
@@ -135,7 +135,7 @@ | |||
<a class="active item" | |||
href="{{.RepoLink}}/debugjob?debugListType=all">{{$.i18n.Tr "repo.modelarts.notebook"}}</a> | |||
<a class="item" | |||
href="{{.RepoLink}}/modelarts/train-job">{{$.i18n.Tr "repo.modelarts.train_job"}}</a> | |||
href="{{.RepoLink}}/modelarts/train-job?listType=all">{{$.i18n.Tr "repo.modelarts.train_job"}}</a> | |||
<a class="item" | |||
href="{{.RepoLink}}/modelarts/inference-job">{{$.i18n.Tr "repo.modelarts.infer_job"}}</a> | |||
<a class="item" | |||
@@ -418,7 +418,9 @@ | |||
<td class="ti-text-form-content"> | |||
<div class="text-span text-span-w"> | |||
{{.DatasetName}} | |||
{{range $m ,$n := $.datasetDownload}} | |||
<a href="{{.RepositoryLink}}" target="_blank">{{.DatasetName}}</a> | |||
{{end}} | |||
</div> | |||
</td> | |||
</tr> | |||
@@ -23,7 +23,7 @@ | |||
<div class="column"> | |||
<div class="ui blue small menu compact selectcloudbrain"> | |||
<a class="item" href="{{.RepoLink}}/debugjob?debugListType=all">{{$.i18n.Tr "repo.modelarts.notebook"}}</a> | |||
<a class="item" href="{{.RepoLink}}/modelarts/train-job">{{$.i18n.Tr "repo.modelarts.train_job"}}</a> | |||
<a class="item" href="{{.RepoLink}}/modelarts/train-job?listType=all">{{$.i18n.Tr "repo.modelarts.train_job"}}</a> | |||
<a class="active item" href="{{.RepoLink}}/modelarts/inference-job">{{$.i18n.Tr "repo.modelarts.infer_job"}}</a> | |||
<a class="item" href="{{.RepoLink}}/cloudbrain/benchmark">{{$.i18n.Tr "repo.modelarts.evaluate_job"}}</a> | |||
</div> | |||
@@ -409,7 +409,9 @@ td, th { | |||
<td class="ti-text-form-content"> | |||
<div class="text-span text-span-w"> | |||
{{.DatasetName}} | |||
{{range $m ,$n := $.datasetDownload}} | |||
<a href="{{.RepositoryLink}}" target="_blank">{{.DatasetName}}</a> | |||
{{end}} | |||
</div> | |||
</td> | |||
</tr> | |||
@@ -439,7 +439,7 @@ | |||
<tbody> | |||
{{range $.datasetDownload}} | |||
<tr> | |||
<td style="word-wrap: break-word;word-break: break-all;"><a href="{{.RepositoryLink}}">{{.DatasetName}}</a></td> | |||
<td style="word-wrap: break-word;word-break: break-all;"><a href="{{.RepositoryLink}}" target="_blank">{{.DatasetName}}</a></td> | |||
<td style="word-wrap: break-word;word-break: break-all;">{{.DatasetDownloadLink}}</td> | |||
<td class="center aligned"><a class="ui poping up clipboard" id="clipboard-btn" data-original="{{$.i18n.Tr "repo.copy_link"}}" data-success="{{$.i18n.Tr "repo.copy_link_success"}}" data-error="{{$.i18n.Tr "repo.copy_link_error"}}" data-content="{{$.i18n.Tr "repo.copy_link"}}" data-variation="inverted tiny" data-clipboard-text="{{.DatasetDownloadLink}}">复制链接</a></td> | |||
</tr> | |||
@@ -33,7 +33,7 @@ | |||
<div class="column"> | |||
<div class="ui blue small menu compact selectcloudbrain"> | |||
<a class="item" href="{{.RepoLink}}/debugjob?debugListType=all">{{$.i18n.Tr "repo.modelarts.notebook"}}</a> | |||
<a class="active item" href="{{.RepoLink}}/modelarts/train-job">{{$.i18n.Tr "repo.modelarts.train_job"}}</a> | |||
<a class="active item" href="{{.RepoLink}}/modelarts/train-job?modelarts/train-job?listType=all">{{$.i18n.Tr "repo.modelarts.train_job"}}</a> | |||
<a class="item" href="{{.RepoLink}}/modelarts/inference-job">{{$.i18n.Tr "repo.modelarts.infer_job"}}</a> | |||
<a class="item" href="{{.RepoLink}}/cloudbrain/benchmark">{{$.i18n.Tr "repo.modelarts.evaluate_job"}}</a> | |||
</div> | |||
@@ -452,7 +452,13 @@ | |||
<td class="ti-text-form-content"> | |||
<div class="text-span text-span-w"> | |||
{{.DatasetName}} | |||
{{range $m ,$n := $.datasetList}} | |||
{{if eq $k $m}} | |||
{{range $f ,$g := $n}} | |||
<a href="{{.RepositoryLink}}" target="_blank">{{.DatasetName}}</a> | |||
{{end}} | |||
{{end}} | |||
{{end}} | |||
</div> | |||
</td> | |||
</tr> | |||
@@ -125,6 +125,14 @@ | |||
</td> | |||
</tr> | |||
<tr> | |||
<td class="ti-text-form-label text-width80">训练任务</td> | |||
<td class="ti-text-form-content word-elipsis"> | |||
<a id="DisplayJobNameHref" class="title" style="font-size: 14px;"> | |||
<span id="DisplayJobName" class="fitted" style="width: 90%;vertical-align: middle;"></span> | |||
</a> | |||
</td> | |||
</tr> | |||
<tr> | |||
<td class="ti-text-form-label text-width80">{{$.i18n.Tr "repo.modelarts.code_version"}}</td> | |||
<td class="ti-text-form-content word-elipsis"><span id="CodeBranch" title=""></span></td> | |||
</tr> | |||
@@ -197,6 +205,7 @@ | |||
{{template "base/footer" .}} | |||
<script> | |||
let url = location.href.split('show_model')[0] | |||
let trainJobUrl =url.split('modelmanage')[0] | |||
let ID = location.search.split('?name=').pop() | |||
$(document).ready(function(){ | |||
$('.secondary.menu .item').tab(); | |||
@@ -279,6 +288,10 @@ function transObj(data){ | |||
WorkServerNumber:TrainTaskInfo.WorkServerNumber || '--', | |||
Parameters:Parameters, | |||
EngineName:EngineName, | |||
DisplayJobName:TrainTaskInfo.DisplayJobName || '--', | |||
TrainJobVersionName:TrainTaskInfo.VersionName || '', | |||
CloudBrainJobID:TrainTaskInfo.JobID|| '', | |||
CloudBrainType:TrainTaskInfo.Type, | |||
} | |||
let initModelAcc = { | |||
Accuracy: modelAcc.Accuracy || '--', | |||
@@ -365,6 +378,26 @@ function renderInfo(obj,accObj,id){ | |||
$('#CodeBranch').append(html) | |||
} | |||
else if(key==="DisplayJobName"){ | |||
let type=obj["CloudBrainType"] | |||
let href="" | |||
if(type==1){ | |||
href=trainJobUrl + "modelarts/train-job/" + obj["CloudBrainJobID"] | |||
}else if(type==0){ | |||
href=trainJobUrl + "cloudbrain/train-job/" + obj["CloudBrainJobID"] | |||
}else if(type==2){ | |||
href=trainJobUrl + "grampus/train-job/" + obj["CloudBrainJobID"] | |||
} | |||
$(`#DisplayJobNameHref`).attr("href",href) | |||
$(`#DisplayJobNameHref`).attr("title",obj[key]) | |||
$(`#${key}`).text(obj[key]) | |||
let versionName = obj["TrainJobVersionName"] | |||
if(versionName!=""){ | |||
let html = `<span style="margin-left:1rem" class="ui label">${versionName}</span>` | |||
$('#DisplayJobName').append(html) | |||
} | |||
} | |||
else if(key==="Parameters"){ | |||
if(obj[key]==='--'){ | |||
$(`#${key}`).text(obj[key]) | |||
@@ -133,16 +133,79 @@ function submitDeleteForm() { | |||
$("#delete-file-form").submit() | |||
} | |||
} | |||
const baseUrls = {}; | |||
const justDomain = /^[^:]+:\/*[^/]*$/; | |||
const protocol = /^([^:]+:)[\s\S]*$/; | |||
const domain = /^([^:]+:\/*[^/]*)[\s\S]*$/; | |||
const originIndependentUrl = /^$|^[a-z][a-z0-9+.-]*:|^[?#]/i; | |||
function rtrim(str, c, invert) { | |||
const l = str.length; | |||
if (l === 0) { | |||
return ''; | |||
} | |||
let suffLen = 0; | |||
while (suffLen < l) { | |||
const currChar = str.charAt(l - suffLen - 1); | |||
if (currChar === c && !invert) { | |||
suffLen++; | |||
} else if (currChar !== c && invert) { | |||
suffLen++; | |||
} else { | |||
break; | |||
} | |||
} | |||
return str.slice(0, l - suffLen); | |||
} | |||
function resolveUrl(base, href) { | |||
if (!baseUrls[' ' + base]) { | |||
if (justDomain.test(base)) { | |||
baseUrls[' ' + base] = base + '/'; | |||
} else { | |||
baseUrls[' ' + base] = rtrim(base, '/', true); | |||
} | |||
} | |||
base = baseUrls[' ' + base]; | |||
const relativeBase = base.indexOf(':') === -1; | |||
if (href.substring(0, 2) === '//') { | |||
if (relativeBase) { | |||
return href; | |||
} | |||
return base.replace(protocol, '$1') + href; | |||
} else if (href.charAt(0) === '/') { | |||
if (relativeBase) { | |||
return href; | |||
} | |||
return base.replace(domain, '$1') + href; | |||
} else { | |||
return base + href; | |||
} | |||
} | |||
function showNoteBook(){ | |||
var isNoteBook = {{.IsNoteBook}} | |||
var isNoteBook = {{.IsNoteBook}}; | |||
if (isNoteBook) { | |||
var jsonStr = "{{.FileContent}}" | |||
var jsonStr = "{{.FileContent}}"; | |||
var baseUrl={{.FileParentURL}}; | |||
nb.markdown.setOptions({ | |||
baseUrl: {{.FileParentURL}} | |||
baseUrl: baseUrl | |||
}); | |||
var notebook = nb.parse(JSON.parse(jsonStr)); | |||
var rendered = notebook.render(); | |||
$("#notebook").append(rendered); | |||
$("#notebook img").each(function(){ | |||
var oldSrc = $(this).attr('src'); | |||
if (!originIndependentUrl.test(oldSrc)){ | |||
var newSrc=resolveUrl(baseUrl,oldSrc); | |||
$(this).attr('src', newSrc); | |||
} | |||
}); | |||
Prism.highlightAll(); | |||
} | |||
} | |||
@@ -22,24 +22,26 @@ | |||
{{template "user/dashboard/heatmap" .}} | |||
{{end}} | |||
{{template "user/dashboard/feeds" .}} | |||
<diV id = "activity_cont" style="display: none;"> | |||
<div class="ui placeholder segment bgtask-none padding_none line" > | |||
<div class="act_title" style="padding-left: 0px ;"> | |||
{{.i18n.Tr "home.activity"}} : | |||
{{ if eq (len .Feeds) 0 }} | |||
<diV ss="{{(len .Feeds)}}" id = "activity_cont" style="display: block;"> | |||
<div class="ui placeholder segment bgtask-none padding_none line" > | |||
<div class="act_title" style="padding-left: 0px ;"> | |||
{{.i18n.Tr "home.activity"}} : | |||
</div> | |||
<div class="ui icon header bgtask-header-pic"></div> | |||
<p class="p_hint"> | |||
{{.i18n.Tr "home.no_events"}} | |||
</p> | |||
</div> | |||
<div class="ui icon header bgtask-header-pic"></div> | |||
<p class="p_hint"> | |||
{{.i18n.Tr "home.no_events"}} | |||
</p> | |||
</div> | |||
</diV> | |||
</diV> | |||
{{ end }} | |||
</div> | |||
{{template "user/dashboard/repolist" .}} | |||
</div> | |||
</div> | |||
</div> | |||
{{template "base/footer" .}} | |||
<script> | |||
<script> | |||
const {AppSubUrl, StaticUrlPrefix, csrf} = window.config; | |||
uid_ = Number((document.querySelector('meta[name=_context_uid]') || {}).content) | |||
@@ -49,11 +51,10 @@ | |||
$.getJSON(URL, (result, _textStatus, request) => { | |||
const counts_pro = request.getResponseHeader('X-Total-Count'); | |||
console.log("count:",counts_pro) | |||
if (counts_pro == 0){ | |||
if (counts_pro == 0 && {{ (len .Feeds) }} == 0) { | |||
document.getElementById("default_page").style.display = "block"; | |||
document.getElementById("activity_cont").style.display = "block" | |||
} | |||
}) | |||
}) | |||
</script> | |||
<style> | |||
@@ -0,0 +1,19 @@ | |||
# Binaries for programs and plugins | |||
*.exe | |||
*.dll | |||
*.so | |||
*.dylib | |||
/cmd/chroma/chroma | |||
# Test binary, build with `go test -c` | |||
*.test | |||
# Output of the go coverage tool, specifically when used with LiteIDE | |||
*.out | |||
# Project-local glide cache, RE: https://github.com/Masterminds/glide/issues/736 | |||
.glide/ | |||
_models/ | |||
_examples/ |
@@ -0,0 +1,76 @@ | |||
run: | |||
tests: true | |||
skip-dirs: | |||
- _examples | |||
output: | |||
print-issued-lines: false | |||
linters: | |||
enable-all: true | |||
disable: | |||
- maligned | |||
- megacheck | |||
- lll | |||
- gocyclo | |||
- dupl | |||
- gochecknoglobals | |||
- funlen | |||
- godox | |||
- wsl | |||
- gomnd | |||
- gocognit | |||
- goerr113 | |||
- nolintlint | |||
- testpackage | |||
- godot | |||
- nestif | |||
- paralleltest | |||
- nlreturn | |||
- cyclop | |||
- exhaustivestruct | |||
- gci | |||
- gofumpt | |||
- errorlint | |||
- exhaustive | |||
- ifshort | |||
- wrapcheck | |||
- stylecheck | |||
linters-settings: | |||
govet: | |||
check-shadowing: true | |||
gocyclo: | |||
min-complexity: 10 | |||
dupl: | |||
threshold: 100 | |||
goconst: | |||
min-len: 8 | |||
min-occurrences: 3 | |||
forbidigo: | |||
forbid: | |||
- (Must)?NewLexer | |||
exclude_godoc_examples: false | |||
issues: | |||
max-per-linter: 0 | |||
max-same: 0 | |||
exclude-use-default: false | |||
exclude: | |||
# Captured by errcheck. | |||
- '^(G104|G204):' | |||
# Very commonly not checked. | |||
- 'Error return value of .(.*\.Help|.*\.MarkFlagRequired|(os\.)?std(out|err)\..*|.*Close|.*Flush|os\.Remove(All)?|.*printf?|os\.(Un)?Setenv). is not checked' | |||
- 'exported method (.*\.MarshalJSON|.*\.UnmarshalJSON|.*\.EntityURN|.*\.GoString|.*\.Pos) should have comment or be unexported' | |||
- 'composite literal uses unkeyed fields' | |||
- 'declaration of "err" shadows declaration' | |||
- 'should not use dot imports' | |||
- 'Potential file inclusion via variable' | |||
- 'should have comment or be unexported' | |||
- 'comment on exported var .* should be of the form' | |||
- 'at least one file in a package should have a package comment' | |||
- 'string literal contains the Unicode' | |||
- 'methods on the same type should have the same receiver name' | |||
- '_TokenType_name should be _TokenTypeName' | |||
- '`_TokenType_map` should be `_TokenTypeMap`' |
@@ -0,0 +1,37 @@ | |||
project_name: chroma | |||
release: | |||
github: | |||
owner: alecthomas | |||
name: chroma | |||
brews: | |||
- | |||
install: bin.install "chroma" | |||
env: | |||
- CGO_ENABLED=0 | |||
builds: | |||
- goos: | |||
- linux | |||
- darwin | |||
- windows | |||
goarch: | |||
- arm64 | |||
- amd64 | |||
- "386" | |||
goarm: | |||
- "6" | |||
dir: ./cmd/chroma | |||
main: . | |||
ldflags: -s -w -X main.version={{.Version}} -X main.commit={{.Commit}} -X main.date={{.Date}} | |||
binary: chroma | |||
archives: | |||
- | |||
format: tar.gz | |||
name_template: '{{ .Binary }}-{{ .Version }}-{{ .Os }}-{{ .Arch }}{{ if .Arm }}v{{ | |||
.Arm }}{{ end }}' | |||
files: | |||
- COPYING | |||
- README* | |||
snapshot: | |||
name_template: SNAPSHOT-{{ .Commit }} | |||
checksum: | |||
name_template: '{{ .ProjectName }}-{{ .Version }}-checksums.txt' |
@@ -0,0 +1,19 @@ | |||
Copyright (C) 2017 Alec Thomas | |||
Permission is hereby granted, free of charge, to any person obtaining a copy of | |||
this software and associated documentation files (the "Software"), to deal in | |||
the Software without restriction, including without limitation the rights to | |||
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies | |||
of the Software, and to permit persons to whom the Software is furnished to do | |||
so, subject to the following conditions: | |||
The above copyright notice and this permission notice shall be included in all | |||
copies or substantial portions of the Software. | |||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR | |||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, | |||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE | |||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER | |||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, | |||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE | |||
SOFTWARE. |
@@ -0,0 +1,19 @@ | |||
.PHONY: chromad upload all | |||
VERSION ?= $(shell git describe --tags --dirty --always) | |||
all: README.md tokentype_string.go | |||
README.md: lexers/*/*.go | |||
./table.py | |||
tokentype_string.go: types.go | |||
go generate | |||
chromad: | |||
rm -f chromad | |||
(export CGOENABLED=0 GOOS=linux GOARCH=amd64; cd ./cmd/chromad && go build -ldflags="-X 'main.version=$(VERSION)'" -o ../../chromad .) | |||
upload: chromad | |||
scp chromad root@swapoff.org: && \ | |||
ssh root@swapoff.org 'install -m755 ./chromad /srv/http/swapoff.org/bin && service chromad restart' |
@@ -0,0 +1,285 @@ | |||
# Chroma — A general purpose syntax highlighter in pure Go | |||
[](https://godoc.org/github.com/alecthomas/chroma) [](https://github.com/alecthomas/chroma/actions/workflows/ci.yml) [](https://invite.slack.golangbridge.org/) | |||
> **NOTE:** As Chroma has just been released, its API is still in flux. That said, the high-level interface should not change significantly. | |||
Chroma takes source code and other structured text and converts it into syntax | |||
highlighted HTML, ANSI-coloured text, etc. | |||
Chroma is based heavily on [Pygments](http://pygments.org/), and includes | |||
translators for Pygments lexers and styles. | |||
<a id="markdown-table-of-contents" name="table-of-contents"></a> | |||
## Table of Contents | |||
<!-- TOC --> | |||
1. [Table of Contents](#table-of-contents) | |||
2. [Supported languages](#supported-languages) | |||
3. [Try it](#try-it) | |||
4. [Using the library](#using-the-library) | |||
1. [Quick start](#quick-start) | |||
2. [Identifying the language](#identifying-the-language) | |||
3. [Formatting the output](#formatting-the-output) | |||
4. [The HTML formatter](#the-html-formatter) | |||
5. [More detail](#more-detail) | |||
1. [Lexers](#lexers) | |||
2. [Formatters](#formatters) | |||
3. [Styles](#styles) | |||
6. [Command-line interface](#command-line-interface) | |||
7. [What's missing compared to Pygments?](#whats-missing-compared-to-pygments) | |||
<!-- /TOC --> | |||
<a id="markdown-supported-languages" name="supported-languages"></a> | |||
## Supported languages | |||
Prefix | Language | |||
:----: | -------- | |||
A | ABAP, ABNF, ActionScript, ActionScript 3, Ada, Angular2, ANTLR, ApacheConf, APL, AppleScript, Arduino, Awk | |||
B | Ballerina, Base Makefile, Bash, Batchfile, BibTeX, Bicep, BlitzBasic, BNF, Brainfuck | |||
C | C, C#, C++, Caddyfile, Caddyfile Directives, Cap'n Proto, Cassandra CQL, Ceylon, CFEngine3, cfstatement, ChaiScript, Cheetah, Clojure, CMake, COBOL, CoffeeScript, Common Lisp, Coq, Crystal, CSS, Cython | |||
D | D, Dart, Diff, Django/Jinja, Docker, DTD, Dylan | |||
E | EBNF, Elixir, Elm, EmacsLisp, Erlang | |||
F | Factor, Fish, Forth, Fortran, FSharp | |||
G | GAS, GDScript, Genshi, Genshi HTML, Genshi Text, Gherkin, GLSL, Gnuplot, Go, Go HTML Template, Go Text Template, GraphQL, Groff, Groovy | |||
H | Handlebars, Haskell, Haxe, HCL, Hexdump, HLB, HTML, HTTP, Hy | |||
I | Idris, Igor, INI, Io | |||
J | J, Java, JavaScript, JSON, Julia, Jungle | |||
K | Kotlin | |||
L | Lighttpd configuration file, LLVM, Lua | |||
M | Mako, markdown, Mason, Mathematica, Matlab, MiniZinc, MLIR, Modula-2, MonkeyC, MorrowindScript, Myghty, MySQL | |||
N | NASM, Newspeak, Nginx configuration file, Nim, Nix | |||
O | Objective-C, OCaml, Octave, OnesEnterprise, OpenEdge ABL, OpenSCAD, Org Mode | |||
P | PacmanConf, Perl, PHP, PHTML, Pig, PkgConfig, PL/pgSQL, plaintext, Pony, PostgreSQL SQL dialect, PostScript, POVRay, PowerShell, Prolog, PromQL, Protocol Buffer, Puppet, Python 2, Python | |||
Q | QBasic | |||
R | R, Racket, Ragel, Raku, react, ReasonML, reg, reStructuredText, Rexx, Ruby, Rust | |||
S | SAS, Sass, Scala, Scheme, Scilab, SCSS, Smalltalk, Smarty, Snobol, Solidity, SPARQL, SQL, SquidConf, Standard ML, Stylus, Svelte, Swift, SYSTEMD, systemverilog | |||
T | TableGen, TASM, Tcl, Tcsh, Termcap, Terminfo, Terraform, TeX, Thrift, TOML, TradingView, Transact-SQL, Turing, Turtle, Twig, TypeScript, TypoScript, TypoScriptCssData, TypoScriptHtmlData | |||
V | VB.net, verilog, VHDL, VimL, vue | |||
W | WDTE | |||
X | XML, Xorg | |||
Y | YAML, YANG | |||
Z | Zig | |||
_I will attempt to keep this section up to date, but an authoritative list can be | |||
displayed with `chroma --list`._ | |||
<a id="markdown-try-it" name="try-it"></a> | |||
## Try it | |||
Try out various languages and styles on the [Chroma Playground](https://swapoff.org/chroma/playground/). | |||
<a id="markdown-using-the-library" name="using-the-library"></a> | |||
## Using the library | |||
Chroma, like Pygments, has the concepts of | |||
[lexers](https://github.com/alecthomas/chroma/tree/master/lexers), | |||
[formatters](https://github.com/alecthomas/chroma/tree/master/formatters) and | |||
[styles](https://github.com/alecthomas/chroma/tree/master/styles). | |||
Lexers convert source text into a stream of tokens, styles specify how token | |||
types are mapped to colours, and formatters convert tokens and styles into | |||
formatted output. | |||
A package exists for each of these, containing a global `Registry` variable | |||
with all of the registered implementations. There are also helper functions | |||
for using the registry in each package, such as looking up lexers by name or | |||
matching filenames, etc. | |||
In all cases, if a lexer, formatter or style can not be determined, `nil` will | |||
be returned. In this situation you may want to default to the `Fallback` | |||
value in each respective package, which provides sane defaults. | |||
<a id="markdown-quick-start" name="quick-start"></a> | |||
### Quick start | |||
A convenience function exists that can be used to simply format some source | |||
text, without any effort: | |||
```go | |||
err := quick.Highlight(os.Stdout, someSourceCode, "go", "html", "monokai") | |||
``` | |||
<a id="markdown-identifying-the-language" name="identifying-the-language"></a> | |||
### Identifying the language | |||
To highlight code, you'll first have to identify what language the code is | |||
written in. There are three primary ways to do that: | |||
1. Detect the language from its filename. | |||
```go | |||
lexer := lexers.Match("foo.go") | |||
``` | |||
3. Explicitly specify the language by its Chroma syntax ID (a full list is available from `lexers.Names()`). | |||
```go | |||
lexer := lexers.Get("go") | |||
``` | |||
3. Detect the language from its content. | |||
```go | |||
lexer := lexers.Analyse("package main\n\nfunc main()\n{\n}\n") | |||
``` | |||
In all cases, `nil` will be returned if the language can not be identified. | |||
```go | |||
if lexer == nil { | |||
lexer = lexers.Fallback | |||
} | |||
``` | |||
At this point, it should be noted that some lexers can be extremely chatty. To | |||
mitigate this, you can use the coalescing lexer to coalesce runs of identical | |||
token types into a single token: | |||
```go | |||
lexer = chroma.Coalesce(lexer) | |||
``` | |||
<a id="markdown-formatting-the-output" name="formatting-the-output"></a> | |||
### Formatting the output | |||
Once a language is identified you will need to pick a formatter and a style (theme). | |||
```go | |||
style := styles.Get("swapoff") | |||
if style == nil { | |||
style = styles.Fallback | |||
} | |||
formatter := formatters.Get("html") | |||
if formatter == nil { | |||
formatter = formatters.Fallback | |||
} | |||
``` | |||
Then obtain an iterator over the tokens: | |||
```go | |||
contents, err := ioutil.ReadAll(r) | |||
iterator, err := lexer.Tokenise(nil, string(contents)) | |||
``` | |||
And finally, format the tokens from the iterator: | |||
```go | |||
err := formatter.Format(w, style, iterator) | |||
``` | |||
<a id="markdown-the-html-formatter" name="the-html-formatter"></a> | |||
### The HTML formatter | |||
By default the `html` registered formatter generates standalone HTML with | |||
embedded CSS. More flexibility is available through the `formatters/html` package. | |||
Firstly, the output generated by the formatter can be customised with the | |||
following constructor options: | |||
- `Standalone()` - generate standalone HTML with embedded CSS. | |||
- `WithClasses()` - use classes rather than inlined style attributes. | |||
- `ClassPrefix(prefix)` - prefix each generated CSS class. | |||
- `TabWidth(width)` - Set the rendered tab width, in characters. | |||
- `WithLineNumbers()` - Render line numbers (style with `LineNumbers`). | |||
- `LinkableLineNumbers()` - Make the line numbers linkable and be a link to themselves. | |||
- `HighlightLines(ranges)` - Highlight lines in these ranges (style with `LineHighlight`). | |||
- `LineNumbersInTable()` - Use a table for formatting line numbers and code, rather than spans. | |||
If `WithClasses()` is used, the corresponding CSS can be obtained from the formatter with: | |||
```go | |||
formatter := html.New(html.WithClasses()) | |||
err := formatter.WriteCSS(w, style) | |||
``` | |||
<a id="markdown-more-detail" name="more-detail"></a> | |||
## More detail | |||
<a id="markdown-lexers" name="lexers"></a> | |||
### Lexers | |||
See the [Pygments documentation](http://pygments.org/docs/lexerdevelopment/) | |||
for details on implementing lexers. Most concepts apply directly to Chroma, | |||
but see existing lexer implementations for real examples. | |||
In many cases lexers can be automatically converted directly from Pygments by | |||
using the included Python 3 script `pygments2chroma.py`. I use something like | |||
the following: | |||
```sh | |||
python3 _tools/pygments2chroma.py \ | |||
pygments.lexers.jvm.KotlinLexer \ | |||
> lexers/k/kotlin.go \ | |||
&& gofmt -s -w lexers/k/kotlin.go | |||
``` | |||
See notes in [pygments-lexers.txt](https://github.com/alecthomas/chroma/blob/master/pygments-lexers.txt) | |||
for a list of lexers, and notes on some of the issues importing them. | |||
<a id="markdown-formatters" name="formatters"></a> | |||
### Formatters | |||
Chroma supports HTML output, as well as terminal output in 8 colour, 256 colour, and true-colour. | |||
A `noop` formatter is included that outputs the token text only, and a `tokens` | |||
formatter outputs raw tokens. The latter is useful for debugging lexers. | |||
<a id="markdown-styles" name="styles"></a> | |||
### Styles | |||
Chroma styles use the [same syntax](http://pygments.org/docs/styles/) as Pygments. | |||
All Pygments styles have been converted to Chroma using the `_tools/style.py` script. | |||
When you work with one of [Chroma's styles](https://github.com/alecthomas/chroma/tree/master/styles), know that the `chroma.Background` token type provides the default style for tokens. It does so by defining a foreground color and background color. | |||
For example, this gives each token name not defined in the style a default color of `#f8f8f8` and uses `#000000` for the highlighted code block's background: | |||
~~~go | |||
chroma.Background: "#f8f8f2 bg:#000000", | |||
~~~ | |||
Also, token types in a style file are hierarchical. For instance, when `CommentSpecial` is not defined, Chroma uses the token style from `Comment`. So when several comment tokens use the same color, you'll only need to define `Comment` and override the one that has a different color. | |||
For a quick overview of the available styles and how they look, check out the [Chroma Style Gallery](https://xyproto.github.io/splash/docs/). | |||
<a id="markdown-command-line-interface" name="command-line-interface"></a> | |||
## Command-line interface | |||
A command-line interface to Chroma is included. | |||
Binaries are available to install from [the releases page](https://github.com/alecthomas/chroma/releases). | |||
The CLI can be used as a preprocessor to colorise output of `less(1)`, | |||
see documentation for the `LESSOPEN` environment variable. | |||
The `--fail` flag can be used to suppress output and return with exit status | |||
1 to facilitate falling back to some other preprocessor in case chroma | |||
does not resolve a specific lexer to use for the given file. For example: | |||
```shell | |||
export LESSOPEN='| p() { chroma --fail "$1" || cat "$1"; }; p "%s"' | |||
``` | |||
Replace `cat` with your favourite fallback preprocessor. | |||
When invoked as `.lessfilter`, the `--fail` flag is automatically turned | |||
on under the hood for easy integration with [lesspipe shipping with | |||
Debian and derivatives](https://manpages.debian.org/lesspipe#USER_DEFINED_FILTERS); | |||
for that setup the `chroma` executable can be just symlinked to `~/.lessfilter`. | |||
<a id="markdown-whats-missing-compared-to-pygments" name="whats-missing-compared-to-pygments"></a> | |||
## What's missing compared to Pygments? | |||
- Quite a few lexers, for various reasons (pull-requests welcome): | |||
- Pygments lexers for complex languages often include custom code to | |||
handle certain aspects, such as Raku's ability to nest code inside | |||
regular expressions. These require time and effort to convert. | |||
- I mostly only converted languages I had heard of, to reduce the porting cost. | |||
- Some more esoteric features of Pygments are omitted for simplicity. | |||
- Though the Chroma API supports content detection, very few languages support them. | |||
I have plans to implement a statistical analyser at some point, but not enough time. |
@@ -0,0 +1,35 @@ | |||
package chroma | |||
// Coalesce is a Lexer interceptor that collapses runs of common types into a single token. | |||
func Coalesce(lexer Lexer) Lexer { return &coalescer{lexer} } | |||
type coalescer struct{ Lexer } | |||
func (d *coalescer) Tokenise(options *TokeniseOptions, text string) (Iterator, error) { | |||
var prev Token | |||
it, err := d.Lexer.Tokenise(options, text) | |||
if err != nil { | |||
return nil, err | |||
} | |||
return func() Token { | |||
for token := it(); token != (EOF); token = it() { | |||
if len(token.Value) == 0 { | |||
continue | |||
} | |||
if prev == EOF { | |||
prev = token | |||
} else { | |||
if prev.Type == token.Type && len(prev.Value) < 8192 { | |||
prev.Value += token.Value | |||
} else { | |||
out := prev | |||
prev = token | |||
return out | |||
} | |||
} | |||
} | |||
out := prev | |||
prev = EOF | |||
return out | |||
}, nil | |||
} |
@@ -0,0 +1,164 @@ | |||
package chroma | |||
import ( | |||
"fmt" | |||
"math" | |||
"strconv" | |||
"strings" | |||
) | |||
// ANSI2RGB maps ANSI colour names, as supported by Chroma, to hex RGB values. | |||
var ANSI2RGB = map[string]string{ | |||
"#ansiblack": "000000", | |||
"#ansidarkred": "7f0000", | |||
"#ansidarkgreen": "007f00", | |||
"#ansibrown": "7f7fe0", | |||
"#ansidarkblue": "00007f", | |||
"#ansipurple": "7f007f", | |||
"#ansiteal": "007f7f", | |||
"#ansilightgray": "e5e5e5", | |||
// Normal | |||
"#ansidarkgray": "555555", | |||
"#ansired": "ff0000", | |||
"#ansigreen": "00ff00", | |||
"#ansiyellow": "ffff00", | |||
"#ansiblue": "0000ff", | |||
"#ansifuchsia": "ff00ff", | |||
"#ansiturquoise": "00ffff", | |||
"#ansiwhite": "ffffff", | |||
// Aliases without the "ansi" prefix, because...why? | |||
"#black": "000000", | |||
"#darkred": "7f0000", | |||
"#darkgreen": "007f00", | |||
"#brown": "7f7fe0", | |||
"#darkblue": "00007f", | |||
"#purple": "7f007f", | |||
"#teal": "007f7f", | |||
"#lightgray": "e5e5e5", | |||
// Normal | |||
"#darkgray": "555555", | |||
"#red": "ff0000", | |||
"#green": "00ff00", | |||
"#yellow": "ffff00", | |||
"#blue": "0000ff", | |||
"#fuchsia": "ff00ff", | |||
"#turquoise": "00ffff", | |||
"#white": "ffffff", | |||
} | |||
// Colour represents an RGB colour. | |||
type Colour int32 | |||
// NewColour creates a Colour directly from RGB values. | |||
func NewColour(r, g, b uint8) Colour { | |||
return ParseColour(fmt.Sprintf("%02x%02x%02x", r, g, b)) | |||
} | |||
// Distance between this colour and another. | |||
// | |||
// This uses the approach described here (https://www.compuphase.com/cmetric.htm). | |||
// This is not as accurate as LAB, et. al. but is *vastly* simpler and sufficient for our needs. | |||
func (c Colour) Distance(e2 Colour) float64 { | |||
ar, ag, ab := int64(c.Red()), int64(c.Green()), int64(c.Blue()) | |||
br, bg, bb := int64(e2.Red()), int64(e2.Green()), int64(e2.Blue()) | |||
rmean := (ar + br) / 2 | |||
r := ar - br | |||
g := ag - bg | |||
b := ab - bb | |||
return math.Sqrt(float64((((512 + rmean) * r * r) >> 8) + 4*g*g + (((767 - rmean) * b * b) >> 8))) | |||
} | |||
// Brighten returns a copy of this colour with its brightness adjusted. | |||
// | |||
// If factor is negative, the colour is darkened. | |||
// | |||
// Uses approach described here (http://www.pvladov.com/2012/09/make-color-lighter-or-darker.html). | |||
func (c Colour) Brighten(factor float64) Colour { | |||
r := float64(c.Red()) | |||
g := float64(c.Green()) | |||
b := float64(c.Blue()) | |||
if factor < 0 { | |||
factor++ | |||
r *= factor | |||
g *= factor | |||
b *= factor | |||
} else { | |||
r = (255-r)*factor + r | |||
g = (255-g)*factor + g | |||
b = (255-b)*factor + b | |||
} | |||
return NewColour(uint8(r), uint8(g), uint8(b)) | |||
} | |||
// BrightenOrDarken brightens a colour if it is < 0.5 brighteness or darkens if > 0.5 brightness. | |||
func (c Colour) BrightenOrDarken(factor float64) Colour { | |||
if c.Brightness() < 0.5 { | |||
return c.Brighten(factor) | |||
} | |||
return c.Brighten(-factor) | |||
} | |||
// Brightness of the colour (roughly) in the range 0.0 to 1.0 | |||
func (c Colour) Brightness() float64 { | |||
return (float64(c.Red()) + float64(c.Green()) + float64(c.Blue())) / 255.0 / 3.0 | |||
} | |||
// ParseColour in the forms #rgb, #rrggbb, #ansi<colour>, or #<colour>. | |||
// Will return an "unset" colour if invalid. | |||
func ParseColour(colour string) Colour { | |||
colour = normaliseColour(colour) | |||
n, err := strconv.ParseUint(colour, 16, 32) | |||
if err != nil { | |||
return 0 | |||
} | |||
return Colour(n + 1) | |||
} | |||
// MustParseColour is like ParseColour except it panics if the colour is invalid. | |||
// | |||
// Will panic if colour is in an invalid format. | |||
func MustParseColour(colour string) Colour { | |||
parsed := ParseColour(colour) | |||
if !parsed.IsSet() { | |||
panic(fmt.Errorf("invalid colour %q", colour)) | |||
} | |||
return parsed | |||
} | |||
// IsSet returns true if the colour is set. | |||
func (c Colour) IsSet() bool { return c != 0 } | |||
func (c Colour) String() string { return fmt.Sprintf("#%06x", int(c-1)) } | |||
func (c Colour) GoString() string { return fmt.Sprintf("Colour(0x%06x)", int(c-1)) } | |||
// Red component of colour. | |||
func (c Colour) Red() uint8 { return uint8(((c - 1) >> 16) & 0xff) } | |||
// Green component of colour. | |||
func (c Colour) Green() uint8 { return uint8(((c - 1) >> 8) & 0xff) } | |||
// Blue component of colour. | |||
func (c Colour) Blue() uint8 { return uint8((c - 1) & 0xff) } | |||
// Colours is an orderable set of colours. | |||
type Colours []Colour | |||
func (c Colours) Len() int { return len(c) } | |||
func (c Colours) Swap(i, j int) { c[i], c[j] = c[j], c[i] } | |||
func (c Colours) Less(i, j int) bool { return c[i] < c[j] } | |||
// Convert colours to #rrggbb. | |||
func normaliseColour(colour string) string { | |||
if ansi, ok := ANSI2RGB[colour]; ok { | |||
return ansi | |||
} | |||
if strings.HasPrefix(colour, "#") { | |||
colour = colour[1:] | |||
if len(colour) == 3 { | |||
return colour[0:1] + colour[0:1] + colour[1:2] + colour[1:2] + colour[2:3] + colour[2:3] | |||
} | |||
} | |||
return colour | |||
} |
@@ -0,0 +1,137 @@ | |||
package chroma | |||
import ( | |||
"bytes" | |||
) | |||
type delegatingLexer struct { | |||
root Lexer | |||
language Lexer | |||
} | |||
// DelegatingLexer combines two lexers to handle the common case of a language embedded inside another, such as PHP | |||
// inside HTML or PHP inside plain text. | |||
// | |||
// It takes two lexer as arguments: a root lexer and a language lexer. First everything is scanned using the language | |||
// lexer, which must return "Other" for unrecognised tokens. Then all "Other" tokens are lexed using the root lexer. | |||
// Finally, these two sets of tokens are merged. | |||
// | |||
// The lexers from the template lexer package use this base lexer. | |||
func DelegatingLexer(root Lexer, language Lexer) Lexer { | |||
return &delegatingLexer{ | |||
root: root, | |||
language: language, | |||
} | |||
} | |||
func (d *delegatingLexer) Config() *Config { | |||
return d.language.Config() | |||
} | |||
// An insertion is the character range where language tokens should be inserted. | |||
type insertion struct { | |||
start, end int | |||
tokens []Token | |||
} | |||
func (d *delegatingLexer) Tokenise(options *TokeniseOptions, text string) (Iterator, error) { // nolint: gocognit | |||
tokens, err := Tokenise(Coalesce(d.language), options, text) | |||
if err != nil { | |||
return nil, err | |||
} | |||
// Compute insertions and gather "Other" tokens. | |||
others := &bytes.Buffer{} | |||
insertions := []*insertion{} | |||
var insert *insertion | |||
offset := 0 | |||
var last Token | |||
for _, t := range tokens { | |||
if t.Type == Other { | |||
if last != EOF && insert != nil && last.Type != Other { | |||
insert.end = offset | |||
} | |||
others.WriteString(t.Value) | |||
} else { | |||
if last == EOF || last.Type == Other { | |||
insert = &insertion{start: offset} | |||
insertions = append(insertions, insert) | |||
} | |||
insert.tokens = append(insert.tokens, t) | |||
} | |||
last = t | |||
offset += len(t.Value) | |||
} | |||
if len(insertions) == 0 { | |||
return d.root.Tokenise(options, text) | |||
} | |||
// Lex the other tokens. | |||
rootTokens, err := Tokenise(Coalesce(d.root), options, others.String()) | |||
if err != nil { | |||
return nil, err | |||
} | |||
// Interleave the two sets of tokens. | |||
var out []Token | |||
offset = 0 // Offset into text. | |||
tokenIndex := 0 | |||
nextToken := func() Token { | |||
if tokenIndex >= len(rootTokens) { | |||
return EOF | |||
} | |||
t := rootTokens[tokenIndex] | |||
tokenIndex++ | |||
return t | |||
} | |||
insertionIndex := 0 | |||
nextInsertion := func() *insertion { | |||
if insertionIndex >= len(insertions) { | |||
return nil | |||
} | |||
i := insertions[insertionIndex] | |||
insertionIndex++ | |||
return i | |||
} | |||
t := nextToken() | |||
i := nextInsertion() | |||
for t != EOF || i != nil { | |||
// fmt.Printf("%d->%d:%q %d->%d:%q\n", offset, offset+len(t.Value), t.Value, i.start, i.end, Stringify(i.tokens...)) | |||
if t == EOF || (i != nil && i.start < offset+len(t.Value)) { | |||
var l Token | |||
l, t = splitToken(t, i.start-offset) | |||
if l != EOF { | |||
out = append(out, l) | |||
offset += len(l.Value) | |||
} | |||
out = append(out, i.tokens...) | |||
offset += i.end - i.start | |||
if t == EOF { | |||
t = nextToken() | |||
} | |||
i = nextInsertion() | |||
} else { | |||
out = append(out, t) | |||
offset += len(t.Value) | |||
t = nextToken() | |||
} | |||
} | |||
return Literator(out...), nil | |||
} | |||
func splitToken(t Token, offset int) (l Token, r Token) { | |||
if t == EOF { | |||
return EOF, EOF | |||
} | |||
if offset == 0 { | |||
return EOF, t | |||
} | |||
if offset == len(t.Value) { | |||
return t, EOF | |||
} | |||
l = t.Clone() | |||
r = t.Clone() | |||
l.Value = l.Value[:offset] | |||
r.Value = r.Value[offset:] | |||
return | |||
} |
@@ -0,0 +1,7 @@ | |||
// Package chroma takes source code and other structured text and converts it into syntax highlighted HTML, ANSI- | |||
// coloured text, etc. | |||
// | |||
// Chroma is based heavily on Pygments, and includes translators for Pygments lexers and styles. | |||
// | |||
// For more information, go here: https://github.com/alecthomas/chroma | |||
package chroma |
@@ -0,0 +1,43 @@ | |||
package chroma | |||
import ( | |||
"io" | |||
) | |||
// A Formatter for Chroma lexers. | |||
type Formatter interface { | |||
// Format returns a formatting function for tokens. | |||
// | |||
// If the iterator panics, the Formatter should recover. | |||
Format(w io.Writer, style *Style, iterator Iterator) error | |||
} | |||
// A FormatterFunc is a Formatter implemented as a function. | |||
// | |||
// Guards against iterator panics. | |||
type FormatterFunc func(w io.Writer, style *Style, iterator Iterator) error | |||
func (f FormatterFunc) Format(w io.Writer, s *Style, it Iterator) (err error) { // nolint | |||
defer func() { | |||
if perr := recover(); perr != nil { | |||
err = perr.(error) | |||
} | |||
}() | |||
return f(w, s, it) | |||
} | |||
type recoveringFormatter struct { | |||
Formatter | |||
} | |||
func (r recoveringFormatter) Format(w io.Writer, s *Style, it Iterator) (err error) { | |||
defer func() { | |||
if perr := recover(); perr != nil { | |||
err = perr.(error) | |||
} | |||
}() | |||
return r.Formatter.Format(w, s, it) | |||
} | |||
// RecoveringFormatter wraps a formatter with panic recovery. | |||
func RecoveringFormatter(formatter Formatter) Formatter { return recoveringFormatter{formatter} } |
@@ -0,0 +1,500 @@ | |||
package html | |||
import ( | |||
"fmt" | |||
"html" | |||
"io" | |||
"sort" | |||
"strings" | |||
"github.com/alecthomas/chroma" | |||
) | |||
// Option sets an option of the HTML formatter. | |||
type Option func(f *Formatter) | |||
// Standalone configures the HTML formatter for generating a standalone HTML document. | |||
func Standalone(b bool) Option { return func(f *Formatter) { f.standalone = b } } | |||
// ClassPrefix sets the CSS class prefix. | |||
func ClassPrefix(prefix string) Option { return func(f *Formatter) { f.prefix = prefix } } | |||
// WithClasses emits HTML using CSS classes, rather than inline styles. | |||
func WithClasses(b bool) Option { return func(f *Formatter) { f.Classes = b } } | |||
// WithAllClasses disables an optimisation that omits redundant CSS classes. | |||
func WithAllClasses(b bool) Option { return func(f *Formatter) { f.allClasses = b } } | |||
// TabWidth sets the number of characters for a tab. Defaults to 8. | |||
func TabWidth(width int) Option { return func(f *Formatter) { f.tabWidth = width } } | |||
// PreventSurroundingPre prevents the surrounding pre tags around the generated code. | |||
func PreventSurroundingPre(b bool) Option { | |||
return func(f *Formatter) { | |||
if b { | |||
f.preWrapper = nopPreWrapper | |||
} else { | |||
f.preWrapper = defaultPreWrapper | |||
} | |||
} | |||
} | |||
// WithPreWrapper allows control of the surrounding pre tags. | |||
func WithPreWrapper(wrapper PreWrapper) Option { | |||
return func(f *Formatter) { | |||
f.preWrapper = wrapper | |||
} | |||
} | |||
// WrapLongLines wraps long lines. | |||
func WrapLongLines(b bool) Option { | |||
return func(f *Formatter) { | |||
f.wrapLongLines = b | |||
} | |||
} | |||
// WithLineNumbers formats output with line numbers. | |||
func WithLineNumbers(b bool) Option { | |||
return func(f *Formatter) { | |||
f.lineNumbers = b | |||
} | |||
} | |||
// LineNumbersInTable will, when combined with WithLineNumbers, separate the line numbers | |||
// and code in table td's, which make them copy-and-paste friendly. | |||
func LineNumbersInTable(b bool) Option { | |||
return func(f *Formatter) { | |||
f.lineNumbersInTable = b | |||
} | |||
} | |||
// LinkableLineNumbers decorates the line numbers HTML elements with an "id" | |||
// attribute so they can be linked. | |||
func LinkableLineNumbers(b bool, prefix string) Option { | |||
return func(f *Formatter) { | |||
f.linkableLineNumbers = b | |||
f.lineNumbersIDPrefix = prefix | |||
} | |||
} | |||
// HighlightLines higlights the given line ranges with the Highlight style. | |||
// | |||
// A range is the beginning and ending of a range as 1-based line numbers, inclusive. | |||
func HighlightLines(ranges [][2]int) Option { | |||
return func(f *Formatter) { | |||
f.highlightRanges = ranges | |||
sort.Sort(f.highlightRanges) | |||
} | |||
} | |||
// BaseLineNumber sets the initial number to start line numbering at. Defaults to 1. | |||
func BaseLineNumber(n int) Option { | |||
return func(f *Formatter) { | |||
f.baseLineNumber = n | |||
} | |||
} | |||
// New HTML formatter. | |||
func New(options ...Option) *Formatter { | |||
f := &Formatter{ | |||
baseLineNumber: 1, | |||
preWrapper: defaultPreWrapper, | |||
} | |||
for _, option := range options { | |||
option(f) | |||
} | |||
return f | |||
} | |||
// PreWrapper defines the operations supported in WithPreWrapper. | |||
type PreWrapper interface { | |||
// Start is called to write a start <pre> element. | |||
// The code flag tells whether this block surrounds | |||
// highlighted code. This will be false when surrounding | |||
// line numbers. | |||
Start(code bool, styleAttr string) string | |||
// End is called to write the end </pre> element. | |||
End(code bool) string | |||
} | |||
type preWrapper struct { | |||
start func(code bool, styleAttr string) string | |||
end func(code bool) string | |||
} | |||
func (p preWrapper) Start(code bool, styleAttr string) string { | |||
return p.start(code, styleAttr) | |||
} | |||
func (p preWrapper) End(code bool) string { | |||
return p.end(code) | |||
} | |||
var ( | |||
nopPreWrapper = preWrapper{ | |||
start: func(code bool, styleAttr string) string { return "" }, | |||
end: func(code bool) string { return "" }, | |||
} | |||
defaultPreWrapper = preWrapper{ | |||
start: func(code bool, styleAttr string) string { | |||
if code { | |||
return fmt.Sprintf(`<pre tabindex="0"%s><code>`, styleAttr) | |||
} | |||
return fmt.Sprintf(`<pre tabindex="0"%s>`, styleAttr) | |||
}, | |||
end: func(code bool) string { | |||
if code { | |||
return `</code></pre>` | |||
} | |||
return `</pre>` | |||
}, | |||
} | |||
) | |||
// Formatter that generates HTML. | |||
type Formatter struct { | |||
standalone bool | |||
prefix string | |||
Classes bool // Exported field to detect when classes are being used | |||
allClasses bool | |||
preWrapper PreWrapper | |||
tabWidth int | |||
wrapLongLines bool | |||
lineNumbers bool | |||
lineNumbersInTable bool | |||
linkableLineNumbers bool | |||
lineNumbersIDPrefix string | |||
highlightRanges highlightRanges | |||
baseLineNumber int | |||
} | |||
type highlightRanges [][2]int | |||
func (h highlightRanges) Len() int { return len(h) } | |||
func (h highlightRanges) Swap(i, j int) { h[i], h[j] = h[j], h[i] } | |||
func (h highlightRanges) Less(i, j int) bool { return h[i][0] < h[j][0] } | |||
func (f *Formatter) Format(w io.Writer, style *chroma.Style, iterator chroma.Iterator) (err error) { | |||
return f.writeHTML(w, style, iterator.Tokens()) | |||
} | |||
// We deliberately don't use html/template here because it is two orders of magnitude slower (benchmarked). | |||
// | |||
// OTOH we need to be super careful about correct escaping... | |||
func (f *Formatter) writeHTML(w io.Writer, style *chroma.Style, tokens []chroma.Token) (err error) { // nolint: gocyclo | |||
css := f.styleToCSS(style) | |||
if !f.Classes { | |||
for t, style := range css { | |||
css[t] = compressStyle(style) | |||
} | |||
} | |||
if f.standalone { | |||
fmt.Fprint(w, "<html>\n") | |||
if f.Classes { | |||
fmt.Fprint(w, "<style type=\"text/css\">\n") | |||
err = f.WriteCSS(w, style) | |||
if err != nil { | |||
return err | |||
} | |||
fmt.Fprintf(w, "body { %s; }\n", css[chroma.Background]) | |||
fmt.Fprint(w, "</style>") | |||
} | |||
fmt.Fprintf(w, "<body%s>\n", f.styleAttr(css, chroma.Background)) | |||
} | |||
wrapInTable := f.lineNumbers && f.lineNumbersInTable | |||
lines := chroma.SplitTokensIntoLines(tokens) | |||
lineDigits := len(fmt.Sprintf("%d", f.baseLineNumber+len(lines)-1)) | |||
highlightIndex := 0 | |||
if wrapInTable { | |||
// List line numbers in its own <td> | |||
fmt.Fprintf(w, "<div%s>\n", f.styleAttr(css, chroma.PreWrapper)) | |||
fmt.Fprintf(w, "<table%s><tr>", f.styleAttr(css, chroma.LineTable)) | |||
fmt.Fprintf(w, "<td%s>\n", f.styleAttr(css, chroma.LineTableTD)) | |||
fmt.Fprintf(w, f.preWrapper.Start(false, f.styleAttr(css, chroma.PreWrapper))) | |||
for index := range lines { | |||
line := f.baseLineNumber + index | |||
highlight, next := f.shouldHighlight(highlightIndex, line) | |||
if next { | |||
highlightIndex++ | |||
} | |||
if highlight { | |||
fmt.Fprintf(w, "<span%s>", f.styleAttr(css, chroma.LineHighlight)) | |||
} | |||
fmt.Fprintf(w, "<span%s%s>%s\n</span>", f.styleAttr(css, chroma.LineNumbersTable), f.lineIDAttribute(line), f.lineTitleWithLinkIfNeeded(lineDigits, line)) | |||
if highlight { | |||
fmt.Fprintf(w, "</span>") | |||
} | |||
} | |||
fmt.Fprint(w, f.preWrapper.End(false)) | |||
fmt.Fprint(w, "</td>\n") | |||
fmt.Fprintf(w, "<td%s>\n", f.styleAttr(css, chroma.LineTableTD, "width:100%")) | |||
} | |||
fmt.Fprintf(w, f.preWrapper.Start(true, f.styleAttr(css, chroma.PreWrapper))) | |||
highlightIndex = 0 | |||
for index, tokens := range lines { | |||
// 1-based line number. | |||
line := f.baseLineNumber + index | |||
highlight, next := f.shouldHighlight(highlightIndex, line) | |||
if next { | |||
highlightIndex++ | |||
} | |||
// Start of Line | |||
fmt.Fprint(w, `<span`) | |||
if highlight { | |||
// Line + LineHighlight | |||
if f.Classes { | |||
fmt.Fprintf(w, ` class="%s %s"`, f.class(chroma.Line), f.class(chroma.LineHighlight)) | |||
} else { | |||
fmt.Fprintf(w, ` style="%s %s"`, css[chroma.Line], css[chroma.LineHighlight]) | |||
} | |||
fmt.Fprint(w, `>`) | |||
} else { | |||
fmt.Fprintf(w, "%s>", f.styleAttr(css, chroma.Line)) | |||
} | |||
// Line number | |||
if f.lineNumbers && !wrapInTable { | |||
fmt.Fprintf(w, "<span%s%s>%s</span>", f.styleAttr(css, chroma.LineNumbers), f.lineIDAttribute(line), f.lineTitleWithLinkIfNeeded(lineDigits, line)) | |||
} | |||
fmt.Fprintf(w, `<span%s>`, f.styleAttr(css, chroma.CodeLine)) | |||
for _, token := range tokens { | |||
html := html.EscapeString(token.String()) | |||
attr := f.styleAttr(css, token.Type) | |||
if attr != "" { | |||
html = fmt.Sprintf("<span%s>%s</span>", attr, html) | |||
} | |||
fmt.Fprint(w, html) | |||
} | |||
fmt.Fprint(w, `</span>`) // End of CodeLine | |||
fmt.Fprint(w, `</span>`) // End of Line | |||
} | |||
fmt.Fprintf(w, f.preWrapper.End(true)) | |||
if wrapInTable { | |||
fmt.Fprint(w, "</td></tr></table>\n") | |||
fmt.Fprint(w, "</div>\n") | |||
} | |||
if f.standalone { | |||
fmt.Fprint(w, "\n</body>\n") | |||
fmt.Fprint(w, "</html>\n") | |||
} | |||
return nil | |||
} | |||
func (f *Formatter) lineIDAttribute(line int) string { | |||
if !f.linkableLineNumbers { | |||
return "" | |||
} | |||
return fmt.Sprintf(" id=\"%s\"", f.lineID(line)) | |||
} | |||
func (f *Formatter) lineTitleWithLinkIfNeeded(lineDigits, line int) string { | |||
title := fmt.Sprintf("%*d", lineDigits, line) | |||
if !f.linkableLineNumbers { | |||
return title | |||
} | |||
return fmt.Sprintf("<a style=\"outline: none; text-decoration:none; color:inherit\" href=\"#%s\">%s</a>", f.lineID(line), title) | |||
} | |||
func (f *Formatter) lineID(line int) string { | |||
return fmt.Sprintf("%s%d", f.lineNumbersIDPrefix, line) | |||
} | |||
func (f *Formatter) shouldHighlight(highlightIndex, line int) (bool, bool) { | |||
next := false | |||
for highlightIndex < len(f.highlightRanges) && line > f.highlightRanges[highlightIndex][1] { | |||
highlightIndex++ | |||
next = true | |||
} | |||
if highlightIndex < len(f.highlightRanges) { | |||
hrange := f.highlightRanges[highlightIndex] | |||
if line >= hrange[0] && line <= hrange[1] { | |||
return true, next | |||
} | |||
} | |||
return false, next | |||
} | |||
func (f *Formatter) class(t chroma.TokenType) string { | |||
for t != 0 { | |||
if cls, ok := chroma.StandardTypes[t]; ok { | |||
if cls != "" { | |||
return f.prefix + cls | |||
} | |||
return "" | |||
} | |||
t = t.Parent() | |||
} | |||
if cls := chroma.StandardTypes[t]; cls != "" { | |||
return f.prefix + cls | |||
} | |||
return "" | |||
} | |||
func (f *Formatter) styleAttr(styles map[chroma.TokenType]string, tt chroma.TokenType, extraCSS ...string) string { | |||
if f.Classes { | |||
cls := f.class(tt) | |||
if cls == "" { | |||
return "" | |||
} | |||
return fmt.Sprintf(` class="%s"`, cls) | |||
} | |||
if _, ok := styles[tt]; !ok { | |||
tt = tt.SubCategory() | |||
if _, ok := styles[tt]; !ok { | |||
tt = tt.Category() | |||
if _, ok := styles[tt]; !ok { | |||
return "" | |||
} | |||
} | |||
} | |||
css := []string{styles[tt]} | |||
css = append(css, extraCSS...) | |||
return fmt.Sprintf(` style="%s"`, strings.Join(css, ";")) | |||
} | |||
func (f *Formatter) tabWidthStyle() string { | |||
if f.tabWidth != 0 && f.tabWidth != 8 { | |||
return fmt.Sprintf("; -moz-tab-size: %[1]d; -o-tab-size: %[1]d; tab-size: %[1]d", f.tabWidth) | |||
} | |||
return "" | |||
} | |||
// WriteCSS writes CSS style definitions (without any surrounding HTML). | |||
func (f *Formatter) WriteCSS(w io.Writer, style *chroma.Style) error { | |||
css := f.styleToCSS(style) | |||
// Special-case background as it is mapped to the outer ".chroma" class. | |||
if _, err := fmt.Fprintf(w, "/* %s */ .%sbg { %s }\n", chroma.Background, f.prefix, css[chroma.Background]); err != nil { | |||
return err | |||
} | |||
// Special-case PreWrapper as it is the ".chroma" class. | |||
if _, err := fmt.Fprintf(w, "/* %s */ .%schroma { %s }\n", chroma.PreWrapper, f.prefix, css[chroma.PreWrapper]); err != nil { | |||
return err | |||
} | |||
// Special-case code column of table to expand width. | |||
if f.lineNumbers && f.lineNumbersInTable { | |||
if _, err := fmt.Fprintf(w, "/* %s */ .%schroma .%s:last-child { width: 100%%; }", | |||
chroma.LineTableTD, f.prefix, f.class(chroma.LineTableTD)); err != nil { | |||
return err | |||
} | |||
} | |||
// Special-case line number highlighting when targeted. | |||
if f.lineNumbers || f.lineNumbersInTable { | |||
targetedLineCSS := StyleEntryToCSS(style.Get(chroma.LineHighlight)) | |||
for _, tt := range []chroma.TokenType{chroma.LineNumbers, chroma.LineNumbersTable} { | |||
fmt.Fprintf(w, "/* %s targeted by URL anchor */ .%schroma .%s:target { %s }\n", tt, f.prefix, f.class(tt), targetedLineCSS) | |||
} | |||
} | |||
tts := []int{} | |||
for tt := range css { | |||
tts = append(tts, int(tt)) | |||
} | |||
sort.Ints(tts) | |||
for _, ti := range tts { | |||
tt := chroma.TokenType(ti) | |||
switch tt { | |||
case chroma.Background, chroma.PreWrapper: | |||
continue | |||
} | |||
class := f.class(tt) | |||
if class == "" { | |||
continue | |||
} | |||
styles := css[tt] | |||
if _, err := fmt.Fprintf(w, "/* %s */ .%schroma .%s { %s }\n", tt, f.prefix, class, styles); err != nil { | |||
return err | |||
} | |||
} | |||
return nil | |||
} | |||
func (f *Formatter) styleToCSS(style *chroma.Style) map[chroma.TokenType]string { | |||
classes := map[chroma.TokenType]string{} | |||
bg := style.Get(chroma.Background) | |||
// Convert the style. | |||
for t := range chroma.StandardTypes { | |||
entry := style.Get(t) | |||
if t != chroma.Background { | |||
entry = entry.Sub(bg) | |||
} | |||
if !f.allClasses && entry.IsZero() { | |||
continue | |||
} | |||
classes[t] = StyleEntryToCSS(entry) | |||
} | |||
classes[chroma.Background] += f.tabWidthStyle() | |||
classes[chroma.PreWrapper] += classes[chroma.Background] + `;` | |||
// Make PreWrapper a grid to show highlight style with full width. | |||
if len(f.highlightRanges) > 0 { | |||
classes[chroma.PreWrapper] += `display: grid;` | |||
} | |||
// Make PreWrapper wrap long lines. | |||
if f.wrapLongLines { | |||
classes[chroma.PreWrapper] += `white-space: pre-wrap; word-break: break-word;` | |||
} | |||
lineNumbersStyle := `white-space: pre; user-select: none; margin-right: 0.4em; padding: 0 0.4em 0 0.4em;` | |||
// All rules begin with default rules followed by user provided rules | |||
classes[chroma.Line] = `display: flex;` + classes[chroma.Line] | |||
classes[chroma.LineNumbers] = lineNumbersStyle + classes[chroma.LineNumbers] | |||
classes[chroma.LineNumbersTable] = lineNumbersStyle + classes[chroma.LineNumbersTable] | |||
classes[chroma.LineTable] = "border-spacing: 0; padding: 0; margin: 0; border: 0;" + classes[chroma.LineTable] | |||
classes[chroma.LineTableTD] = "vertical-align: top; padding: 0; margin: 0; border: 0;" + classes[chroma.LineTableTD] | |||
return classes | |||
} | |||
// StyleEntryToCSS converts a chroma.StyleEntry to CSS attributes. | |||
func StyleEntryToCSS(e chroma.StyleEntry) string { | |||
styles := []string{} | |||
if e.Colour.IsSet() { | |||
styles = append(styles, "color: "+e.Colour.String()) | |||
} | |||
if e.Background.IsSet() { | |||
styles = append(styles, "background-color: "+e.Background.String()) | |||
} | |||
if e.Bold == chroma.Yes { | |||
styles = append(styles, "font-weight: bold") | |||
} | |||
if e.Italic == chroma.Yes { | |||
styles = append(styles, "font-style: italic") | |||
} | |||
if e.Underline == chroma.Yes { | |||
styles = append(styles, "text-decoration: underline") | |||
} | |||
return strings.Join(styles, "; ") | |||
} | |||
// Compress CSS attributes - remove spaces, transform 6-digit colours to 3. | |||
func compressStyle(s string) string { | |||
parts := strings.Split(s, ";") | |||
out := []string{} | |||
for _, p := range parts { | |||
p = strings.Join(strings.Fields(p), " ") | |||
p = strings.Replace(p, ": ", ":", 1) | |||
if strings.Contains(p, "#") { | |||
c := p[len(p)-6:] | |||
if c[0] == c[1] && c[2] == c[3] && c[4] == c[5] { | |||
p = p[:len(p)-6] + c[0:1] + c[2:3] + c[4:5] | |||
} | |||
} | |||
out = append(out, p) | |||
} | |||
return strings.Join(out, ";") | |||
} |
@@ -0,0 +1,9 @@ | |||
module github.com/alecthomas/chroma | |||
go 1.13 | |||
require ( | |||
github.com/davecgh/go-spew v1.1.1 // indirect | |||
github.com/dlclark/regexp2 v1.4.0 | |||
github.com/stretchr/testify v1.7.0 | |||
) |
@@ -0,0 +1,14 @@ | |||
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= | |||
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= | |||
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= | |||
github.com/dlclark/regexp2 v1.4.0 h1:F1rxgk7p4uKjwIQxBs9oAXe5CqrXlCduYEJvrF4u93E= | |||
github.com/dlclark/regexp2 v1.4.0/go.mod h1:2pZnwuY/m+8K6iRw6wQdMtk+rH5tNGR1i55kozfMjCc= | |||
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= | |||
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= | |||
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= | |||
github.com/stretchr/testify v1.7.0 h1:nwc3DEeHmmLAfoZucVR881uASk0Mfjw8xYJ99tb5CcY= | |||
github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= | |||
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405 h1:yhCVgyC4o1eVCa2tZl7eS0r+SDo693bJlVdllGtEeKM= | |||
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= | |||
gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c h1:dUUwHk2QECo/6vqA44rthZ8ie2QXMNeKRTHCNY2nXvo= | |||
gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= |
@@ -0,0 +1,76 @@ | |||
package chroma | |||
import "strings" | |||
// An Iterator across tokens. | |||
// | |||
// EOF will be returned at the end of the Token stream. | |||
// | |||
// If an error occurs within an Iterator, it may propagate this in a panic. Formatters should recover. | |||
type Iterator func() Token | |||
// Tokens consumes all tokens from the iterator and returns them as a slice. | |||
func (i Iterator) Tokens() []Token { | |||
var out []Token | |||
for t := i(); t != EOF; t = i() { | |||
out = append(out, t) | |||
} | |||
return out | |||
} | |||
// Concaterator concatenates tokens from a series of iterators. | |||
func Concaterator(iterators ...Iterator) Iterator { | |||
return func() Token { | |||
for len(iterators) > 0 { | |||
t := iterators[0]() | |||
if t != EOF { | |||
return t | |||
} | |||
iterators = iterators[1:] | |||
} | |||
return EOF | |||
} | |||
} | |||
// Literator converts a sequence of literal Tokens into an Iterator. | |||
func Literator(tokens ...Token) Iterator { | |||
return func() Token { | |||
if len(tokens) == 0 { | |||
return EOF | |||
} | |||
token := tokens[0] | |||
tokens = tokens[1:] | |||
return token | |||
} | |||
} | |||
// SplitTokensIntoLines splits tokens containing newlines in two. | |||
func SplitTokensIntoLines(tokens []Token) (out [][]Token) { | |||
var line []Token // nolint: prealloc | |||
for _, token := range tokens { | |||
for strings.Contains(token.Value, "\n") { | |||
parts := strings.SplitAfterN(token.Value, "\n", 2) | |||
// Token becomes the tail. | |||
token.Value = parts[1] | |||
// Append the head to the line and flush the line. | |||
clone := token.Clone() | |||
clone.Value = parts[0] | |||
line = append(line, clone) | |||
out = append(out, line) | |||
line = nil | |||
} | |||
line = append(line, token) | |||
} | |||
if len(line) > 0 { | |||
out = append(out, line) | |||
} | |||
// Strip empty trailing token line. | |||
if len(out) > 0 { | |||
last := out[len(out)-1] | |||
if len(last) == 1 && last[0].Value == "" { | |||
out = out[:len(out)-1] | |||
} | |||
} | |||
return | |||
} |
@@ -0,0 +1,128 @@ | |||
package chroma | |||
import ( | |||
"fmt" | |||
"strings" | |||
) | |||
var ( | |||
defaultOptions = &TokeniseOptions{ | |||
State: "root", | |||
EnsureLF: true, | |||
} | |||
) | |||
// Config for a lexer. | |||
type Config struct { | |||
// Name of the lexer. | |||
Name string | |||
// Shortcuts for the lexer | |||
Aliases []string | |||
// File name globs | |||
Filenames []string | |||
// Secondary file name globs | |||
AliasFilenames []string | |||
// MIME types | |||
MimeTypes []string | |||
// Regex matching is case-insensitive. | |||
CaseInsensitive bool | |||
// Regex matches all characters. | |||
DotAll bool | |||
// Regex does not match across lines ($ matches EOL). | |||
// | |||
// Defaults to multiline. | |||
NotMultiline bool | |||
// Don't strip leading and trailing newlines from the input. | |||
// DontStripNL bool | |||
// Strip all leading and trailing whitespace from the input | |||
// StripAll bool | |||
// Make sure that the input ends with a newline. This | |||
// is required for some lexers that consume input linewise. | |||
EnsureNL bool | |||
// If given and greater than 0, expand tabs in the input. | |||
// TabSize int | |||
// Priority of lexer. | |||
// | |||
// If this is 0 it will be treated as a default of 1. | |||
Priority float32 | |||
} | |||
// Token output to formatter. | |||
type Token struct { | |||
Type TokenType `json:"type"` | |||
Value string `json:"value"` | |||
} | |||
func (t *Token) String() string { return t.Value } | |||
func (t *Token) GoString() string { return fmt.Sprintf("&Token{%s, %q}", t.Type, t.Value) } | |||
// Clone returns a clone of the Token. | |||
func (t *Token) Clone() Token { | |||
return *t | |||
} | |||
// EOF is returned by lexers at the end of input. | |||
var EOF Token | |||
// TokeniseOptions contains options for tokenisers. | |||
type TokeniseOptions struct { | |||
// State to start tokenisation in. Defaults to "root". | |||
State string | |||
// Nested tokenisation. | |||
Nested bool | |||
// If true, all EOLs are converted into LF | |||
// by replacing CRLF and CR | |||
EnsureLF bool | |||
} | |||
// A Lexer for tokenising source code. | |||
type Lexer interface { | |||
// Config describing the features of the Lexer. | |||
Config() *Config | |||
// Tokenise returns an Iterator over tokens in text. | |||
Tokenise(options *TokeniseOptions, text string) (Iterator, error) | |||
} | |||
// Lexers is a slice of lexers sortable by name. | |||
type Lexers []Lexer | |||
func (l Lexers) Len() int { return len(l) } | |||
func (l Lexers) Swap(i, j int) { l[i], l[j] = l[j], l[i] } | |||
func (l Lexers) Less(i, j int) bool { | |||
return strings.ToLower(l[i].Config().Name) < strings.ToLower(l[j].Config().Name) | |||
} | |||
// PrioritisedLexers is a slice of lexers sortable by priority. | |||
type PrioritisedLexers []Lexer | |||
func (l PrioritisedLexers) Len() int { return len(l) } | |||
func (l PrioritisedLexers) Swap(i, j int) { l[i], l[j] = l[j], l[i] } | |||
func (l PrioritisedLexers) Less(i, j int) bool { | |||
ip := l[i].Config().Priority | |||
if ip == 0 { | |||
ip = 1 | |||
} | |||
jp := l[j].Config().Priority | |||
if jp == 0 { | |||
jp = 1 | |||
} | |||
return ip > jp | |||
} | |||
// Analyser determines how appropriate this lexer is for the given text. | |||
type Analyser interface { | |||
AnalyseText(text string) float32 | |||
} |
@@ -0,0 +1,40 @@ | |||
# Lexer tests | |||
The tests in this directory feed a known input `testdata/<name>.actual` into the parser for `<name>` and check | |||
that its output matches `<name>.exported`. | |||
It is also possible to perform several tests on a same parser `<name>`, by placing know inputs `*.actual` into a | |||
directory `testdata/<name>/`. | |||
## Running the tests | |||
Run the tests as normal: | |||
```go | |||
go test ./lexers | |||
``` | |||
## Update existing tests | |||
When you add a new test data file (`*.actual`), you need to regenerate all tests. That's how Chroma creates the `*.expected` test file based on the corresponding lexer. | |||
To regenerate all tests, type in your terminal: | |||
```go | |||
RECORD=true go test ./lexers | |||
``` | |||
This first sets the `RECORD` environment variable to `true`. Then it runs `go test` on the `./lexers` directory of the Chroma project. | |||
(That environment variable tells Chroma it needs to output test data. After running `go test ./lexers` you can remove or reset that variable.) | |||
### Windows users | |||
Windows users will find that the `RECORD=true go test ./lexers` command fails in both the standard command prompt terminal and in PowerShell. | |||
Instead we have to perform both steps separately: | |||
- Set the `RECORD` environment variable to `true`. | |||
+ In the regular command prompt window, the `set` command sets an environment variable for the current session: `set RECORD=true`. See [this page](https://superuser.com/questions/212150/how-to-set-env-variable-in-windows-cmd-line) for more. | |||
+ In PowerShell, you can use the `$env:RECORD = 'true'` command for that. See [this article](https://mcpmag.com/articles/2019/03/28/environment-variables-in-powershell.aspx) for more. | |||
+ You can also make a persistent environment variable by hand in the Windows computer settings. See [this article](https://www.computerhope.com/issues/ch000549.htm) for how. | |||
- When the environment variable is set, run `go tests ./lexers`. | |||
Chroma will now regenerate the test files and print its results to the console window. |
@@ -0,0 +1,60 @@ | |||
package a | |||
import ( | |||
. "github.com/alecthomas/chroma" // nolint | |||
"github.com/alecthomas/chroma/lexers/internal" | |||
) | |||
// ABAP lexer. | |||
var Abap = internal.Register(MustNewLazyLexer( | |||
&Config{ | |||
Name: "ABAP", | |||
Aliases: []string{"abap"}, | |||
Filenames: []string{"*.abap", "*.ABAP"}, | |||
MimeTypes: []string{"text/x-abap"}, | |||
CaseInsensitive: true, | |||
}, | |||
abapRules, | |||
)) | |||
func abapRules() Rules { | |||
return Rules{ | |||
"common": { | |||
{`\s+`, Text, nil}, | |||
{`^\*.*$`, CommentSingle, nil}, | |||
{`\".*?\n`, CommentSingle, nil}, | |||
{`##\w+`, CommentSpecial, nil}, | |||
}, | |||
"variable-names": { | |||
{`<\S+>`, NameVariable, nil}, | |||
{`\w[\w~]*(?:(\[\])|->\*)?`, NameVariable, nil}, | |||
}, | |||
"root": { | |||
Include("common"), | |||
{`CALL\s+(?:BADI|CUSTOMER-FUNCTION|FUNCTION)`, Keyword, nil}, | |||
{`(CALL\s+(?:DIALOG|SCREEN|SUBSCREEN|SELECTION-SCREEN|TRANSACTION|TRANSFORMATION))\b`, Keyword, nil}, | |||
{`(FORM|PERFORM)(\s+)(\w+)`, ByGroups(Keyword, Text, NameFunction), nil}, | |||
{`(PERFORM)(\s+)(\()(\w+)(\))`, ByGroups(Keyword, Text, Punctuation, NameVariable, Punctuation), nil}, | |||
{`(MODULE)(\s+)(\S+)(\s+)(INPUT|OUTPUT)`, ByGroups(Keyword, Text, NameFunction, Text, Keyword), nil}, | |||
{`(METHOD)(\s+)([\w~]+)`, ByGroups(Keyword, Text, NameFunction), nil}, | |||
{`(\s+)([\w\-]+)([=\-]>)([\w\-~]+)`, ByGroups(Text, NameVariable, Operator, NameFunction), nil}, | |||
{`(?<=(=|-)>)([\w\-~]+)(?=\()`, NameFunction, nil}, | |||
{`(TEXT)(-)(\d{3})`, ByGroups(Keyword, Punctuation, LiteralNumberInteger), nil}, | |||
{`(TEXT)(-)(\w{3})`, ByGroups(Keyword, Punctuation, NameVariable), nil}, | |||
{`(ADD-CORRESPONDING|AUTHORITY-CHECK|CLASS-DATA|CLASS-EVENTS|CLASS-METHODS|CLASS-POOL|DELETE-ADJACENT|DIVIDE-CORRESPONDING|EDITOR-CALL|ENHANCEMENT-POINT|ENHANCEMENT-SECTION|EXIT-COMMAND|FIELD-GROUPS|FIELD-SYMBOLS|FUNCTION-POOL|INTERFACE-POOL|INVERTED-DATE|LOAD-OF-PROGRAM|LOG-POINT|MESSAGE-ID|MOVE-CORRESPONDING|MULTIPLY-CORRESPONDING|NEW-LINE|NEW-PAGE|NEW-SECTION|NO-EXTENSION|OUTPUT-LENGTH|PRINT-CONTROL|SELECT-OPTIONS|START-OF-SELECTION|SUBTRACT-CORRESPONDING|SYNTAX-CHECK|SYSTEM-EXCEPTIONS|TYPE-POOL|TYPE-POOLS|NO-DISPLAY)\b`, Keyword, nil}, | |||
{`(?<![-\>])(CREATE\s+(PUBLIC|PRIVATE|DATA|OBJECT)|(PUBLIC|PRIVATE|PROTECTED)\s+SECTION|(TYPE|LIKE)\s+((LINE\s+OF|REF\s+TO|(SORTED|STANDARD|HASHED)\s+TABLE\s+OF))?|FROM\s+(DATABASE|MEMORY)|CALL\s+METHOD|(GROUP|ORDER) BY|HAVING|SEPARATED BY|GET\s+(BADI|BIT|CURSOR|DATASET|LOCALE|PARAMETER|PF-STATUS|(PROPERTY|REFERENCE)\s+OF|RUN\s+TIME|TIME\s+(STAMP)?)?|SET\s+(BIT|BLANK\s+LINES|COUNTRY|CURSOR|DATASET|EXTENDED\s+CHECK|HANDLER|HOLD\s+DATA|LANGUAGE|LEFT\s+SCROLL-BOUNDARY|LOCALE|MARGIN|PARAMETER|PF-STATUS|PROPERTY\s+OF|RUN\s+TIME\s+(ANALYZER|CLOCK\s+RESOLUTION)|SCREEN|TITLEBAR|UPADTE\s+TASK\s+LOCAL|USER-COMMAND)|CONVERT\s+((INVERTED-)?DATE|TIME|TIME\s+STAMP|TEXT)|(CLOSE|OPEN)\s+(DATASET|CURSOR)|(TO|FROM)\s+(DATA BUFFER|INTERNAL TABLE|MEMORY ID|DATABASE|SHARED\s+(MEMORY|BUFFER))|DESCRIBE\s+(DISTANCE\s+BETWEEN|FIELD|LIST|TABLE)|FREE\s(MEMORY|OBJECT)?|PROCESS\s+(BEFORE\s+OUTPUT|AFTER\s+INPUT|ON\s+(VALUE-REQUEST|HELP-REQUEST))|AT\s+(LINE-SELECTION|USER-COMMAND|END\s+OF|NEW)|AT\s+SELECTION-SCREEN(\s+(ON(\s+(BLOCK|(HELP|VALUE)-REQUEST\s+FOR|END\s+OF|RADIOBUTTON\s+GROUP))?|OUTPUT))?|SELECTION-SCREEN:?\s+((BEGIN|END)\s+OF\s+((TABBED\s+)?BLOCK|LINE|SCREEN)|COMMENT|FUNCTION\s+KEY|INCLUDE\s+BLOCKS|POSITION|PUSHBUTTON|SKIP|ULINE)|LEAVE\s+(LIST-PROCESSING|PROGRAM|SCREEN|TO LIST-PROCESSING|TO TRANSACTION)(ENDING|STARTING)\s+AT|FORMAT\s+(COLOR|INTENSIFIED|INVERSE|HOTSPOT|INPUT|FRAMES|RESET)|AS\s+(CHECKBOX|SUBSCREEN|WINDOW)|WITH\s+(((NON-)?UNIQUE)?\s+KEY|FRAME)|(BEGIN|END)\s+OF|DELETE(\s+ADJACENT\s+DUPLICATES\sFROM)?|COMPARING(\s+ALL\s+FIELDS)?|(INSERT|APPEND)(\s+INITIAL\s+LINE\s+(IN)?TO|\s+LINES\s+OF)?|IN\s+((BYTE|CHARACTER)\s+MODE|PROGRAM)|END-OF-(DEFINITION|PAGE|SELECTION)|WITH\s+FRAME(\s+TITLE)|(REPLACE|FIND)\s+((FIRST|ALL)\s+OCCURRENCES?\s+OF\s+)?(SUBSTRING|REGEX)?|MATCH\s+(LENGTH|COUNT|LINE|OFFSET)|(RESPECTING|IGNORING)\s+CASE|IN\s+UPDATE\s+TASK|(SOURCE|RESULT)\s+(XML)?|REFERENCE\s+INTO|AND\s+(MARK|RETURN)|CLIENT\s+SPECIFIED|CORRESPONDING\s+FIELDS\s+OF|IF\s+FOUND|FOR\s+EVENT|INHERITING\s+FROM|LEAVE\s+TO\s+SCREEN|LOOP\s+AT\s+(SCREEN)?|LOWER\s+CASE|MATCHCODE\s+OBJECT|MODIF\s+ID|MODIFY\s+SCREEN|NESTING\s+LEVEL|NO\s+INTERVALS|OF\s+STRUCTURE|RADIOBUTTON\s+GROUP|RANGE\s+OF|REF\s+TO|SUPPRESS DIALOG|TABLE\s+OF|UPPER\s+CASE|TRANSPORTING\s+NO\s+FIELDS|VALUE\s+CHECK|VISIBLE\s+LENGTH|HEADER\s+LINE|COMMON\s+PART)\b`, Keyword, nil}, | |||
{`(^|(?<=(\s|\.)))(ABBREVIATED|ABSTRACT|ADD|ALIASES|ALIGN|ALPHA|ASSERT|AS|ASSIGN(ING)?|AT(\s+FIRST)?|BACK|BLOCK|BREAK-POINT|CASE|CATCH|CHANGING|CHECK|CLASS|CLEAR|COLLECT|COLOR|COMMIT|CREATE|COMMUNICATION|COMPONENTS?|COMPUTE|CONCATENATE|CONDENSE|CONSTANTS|CONTEXTS|CONTINUE|CONTROLS|COUNTRY|CURRENCY|DATA|DATE|DECIMALS|DEFAULT|DEFINE|DEFINITION|DEFERRED|DEMAND|DETAIL|DIRECTORY|DIVIDE|DO|DUMMY|ELSE(IF)?|ENDAT|ENDCASE|ENDCATCH|ENDCLASS|ENDDO|ENDFORM|ENDFUNCTION|ENDIF|ENDINTERFACE|ENDLOOP|ENDMETHOD|ENDMODULE|ENDSELECT|ENDTRY|ENDWHILE|ENHANCEMENT|EVENTS|EXACT|EXCEPTIONS?|EXIT|EXPONENT|EXPORT|EXPORTING|EXTRACT|FETCH|FIELDS?|FOR|FORM|FORMAT|FREE|FROM|FUNCTION|HIDE|ID|IF|IMPORT|IMPLEMENTATION|IMPORTING|IN|INCLUDE|INCLUDING|INDEX|INFOTYPES|INITIALIZATION|INTERFACE|INTERFACES|INTO|LANGUAGE|LEAVE|LENGTH|LINES|LOAD|LOCAL|JOIN|KEY|NEXT|MAXIMUM|MESSAGE|METHOD[S]?|MINIMUM|MODULE|MODIFIER|MODIFY|MOVE|MULTIPLY|NODES|NUMBER|OBLIGATORY|OBJECT|OF|OFF|ON|OTHERS|OVERLAY|PACK|PAD|PARAMETERS|PERCENTAGE|POSITION|PROGRAM|PROVIDE|PUBLIC|PUT|PF\d\d|RAISE|RAISING|RANGES?|READ|RECEIVE|REDEFINITION|REFRESH|REJECT|REPORT|RESERVE|RESUME|RETRY|RETURN|RETURNING|RIGHT|ROLLBACK|REPLACE|SCROLL|SEARCH|SELECT|SHIFT|SIGN|SINGLE|SIZE|SKIP|SORT|SPLIT|STATICS|STOP|STYLE|SUBMATCHES|SUBMIT|SUBTRACT|SUM(?!\()|SUMMARY|SUMMING|SUPPLY|TABLE|TABLES|TIMESTAMP|TIMES?|TIMEZONE|TITLE|\??TO|TOP-OF-PAGE|TRANSFER|TRANSLATE|TRY|TYPES|ULINE|UNDER|UNPACK|UPDATE|USING|VALUE|VALUES|VIA|VARYING|VARY|WAIT|WHEN|WHERE|WIDTH|WHILE|WITH|WINDOW|WRITE|XSD|ZERO)\b`, Keyword, nil}, | |||
{`(abs|acos|asin|atan|boolc|boolx|bit_set|char_off|charlen|ceil|cmax|cmin|condense|contains|contains_any_of|contains_any_not_of|concat_lines_of|cos|cosh|count|count_any_of|count_any_not_of|dbmaxlen|distance|escape|exp|find|find_end|find_any_of|find_any_not_of|floor|frac|from_mixed|insert|lines|log|log10|match|matches|nmax|nmin|numofchar|repeat|replace|rescale|reverse|round|segment|shift_left|shift_right|sign|sin|sinh|sqrt|strlen|substring|substring_after|substring_from|substring_before|substring_to|tan|tanh|to_upper|to_lower|to_mixed|translate|trunc|xstrlen)(\()\b`, ByGroups(NameBuiltin, Punctuation), nil}, | |||
{`&[0-9]`, Name, nil}, | |||
{`[0-9]+`, LiteralNumberInteger, nil}, | |||
{`(?<=(\s|.))(AND|OR|EQ|NE|GT|LT|GE|LE|CO|CN|CA|NA|CS|NOT|NS|CP|NP|BYTE-CO|BYTE-CN|BYTE-CA|BYTE-NA|BYTE-CS|BYTE-NS|IS\s+(NOT\s+)?(INITIAL|ASSIGNED|REQUESTED|BOUND))\b`, OperatorWord, nil}, | |||
Include("variable-names"), | |||
{`[?*<>=\-+&]`, Operator, nil}, | |||
{`'(''|[^'])*'`, LiteralStringSingle, nil}, | |||
{"`([^`])*`", LiteralStringSingle, nil}, | |||
{`([|}])([^{}|]*?)([|{])`, ByGroups(Punctuation, LiteralStringSingle, Punctuation), nil}, | |||
{`[/;:()\[\],.]`, Punctuation, nil}, | |||
{`(!)(\w+)`, ByGroups(Operator, Name), nil}, | |||
}, | |||
} | |||
} |
@@ -0,0 +1,42 @@ | |||
package a | |||
import ( | |||
. "github.com/alecthomas/chroma" // nolint | |||
"github.com/alecthomas/chroma/lexers/internal" | |||
) | |||
// Abnf lexer. | |||
var Abnf = internal.Register(MustNewLazyLexer( | |||
&Config{ | |||
Name: "ABNF", | |||
Aliases: []string{"abnf"}, | |||
Filenames: []string{"*.abnf"}, | |||
MimeTypes: []string{"text/x-abnf"}, | |||
}, | |||
abnfRules, | |||
)) | |||
func abnfRules() Rules { | |||
return Rules{ | |||
"root": { | |||
{`;.*$`, CommentSingle, nil}, | |||
{`(%[si])?"[^"]*"`, Literal, nil}, | |||
{`%b[01]+\-[01]+\b`, Literal, nil}, | |||
{`%b[01]+(\.[01]+)*\b`, Literal, nil}, | |||
{`%d[0-9]+\-[0-9]+\b`, Literal, nil}, | |||
{`%d[0-9]+(\.[0-9]+)*\b`, Literal, nil}, | |||
{`%x[0-9a-fA-F]+\-[0-9a-fA-F]+\b`, Literal, nil}, | |||
{`%x[0-9a-fA-F]+(\.[0-9a-fA-F]+)*\b`, Literal, nil}, | |||
{`\b[0-9]+\*[0-9]+`, Operator, nil}, | |||
{`\b[0-9]+\*`, Operator, nil}, | |||
{`\b[0-9]+`, Operator, nil}, | |||
{`\*`, Operator, nil}, | |||
{Words(``, `\b`, `ALPHA`, `BIT`, `CHAR`, `CR`, `CRLF`, `CTL`, `DIGIT`, `DQUOTE`, `HEXDIG`, `HTAB`, `LF`, `LWSP`, `OCTET`, `SP`, `VCHAR`, `WSP`), Keyword, nil}, | |||
{`[a-zA-Z][a-zA-Z0-9-]+\b`, NameClass, nil}, | |||
{`(=/|=|/)`, Operator, nil}, | |||
{`[\[\]()]`, Punctuation, nil}, | |||
{`\s+`, Text, nil}, | |||
{`.`, Text, nil}, | |||
}, | |||
} | |||
} |
@@ -0,0 +1,43 @@ | |||
package a | |||
import ( | |||
. "github.com/alecthomas/chroma" // nolint | |||
"github.com/alecthomas/chroma/lexers/internal" | |||
) | |||
// Actionscript lexer. | |||
var Actionscript = internal.Register(MustNewLazyLexer( | |||
&Config{ | |||
Name: "ActionScript", | |||
Aliases: []string{"as", "actionscript"}, | |||
Filenames: []string{"*.as"}, | |||
MimeTypes: []string{"application/x-actionscript", "text/x-actionscript", "text/actionscript"}, | |||
NotMultiline: true, | |||
DotAll: true, | |||
}, | |||
actionscriptRules, | |||
)) | |||
func actionscriptRules() Rules { | |||
return Rules{ | |||
"root": { | |||
{`\s+`, Text, nil}, | |||
{`//.*?\n`, CommentSingle, nil}, | |||
{`/\*.*?\*/`, CommentMultiline, nil}, | |||
{`/(\\\\|\\/|[^/\n])*/[gim]*`, LiteralStringRegex, nil}, | |||
{`[~^*!%&<>|+=:;,/?\\-]+`, Operator, nil}, | |||
{`[{}\[\]();.]+`, Punctuation, nil}, | |||
{Words(``, `\b`, `case`, `default`, `for`, `each`, `in`, `while`, `do`, `break`, `return`, `continue`, `if`, `else`, `throw`, `try`, `catch`, `var`, `with`, `new`, `typeof`, `arguments`, `instanceof`, `this`, `switch`), Keyword, nil}, | |||
{Words(``, `\b`, `class`, `public`, `final`, `internal`, `native`, `override`, `private`, `protected`, `static`, `import`, `extends`, `implements`, `interface`, `intrinsic`, `return`, `super`, `dynamic`, `function`, `const`, `get`, `namespace`, `package`, `set`), KeywordDeclaration, nil}, | |||
{`(true|false|null|NaN|Infinity|-Infinity|undefined|Void)\b`, KeywordConstant, nil}, | |||
{Words(``, `\b`, `Accessibility`, `AccessibilityProperties`, `ActionScriptVersion`, `ActivityEvent`, `AntiAliasType`, `ApplicationDomain`, `AsBroadcaster`, `Array`, `AsyncErrorEvent`, `AVM1Movie`, `BevelFilter`, `Bitmap`, `BitmapData`, `BitmapDataChannel`, `BitmapFilter`, `BitmapFilterQuality`, `BitmapFilterType`, `BlendMode`, `BlurFilter`, `Boolean`, `ByteArray`, `Camera`, `Capabilities`, `CapsStyle`, `Class`, `Color`, `ColorMatrixFilter`, `ColorTransform`, `ContextMenu`, `ContextMenuBuiltInItems`, `ContextMenuEvent`, `ContextMenuItem`, `ConvultionFilter`, `CSMSettings`, `DataEvent`, `Date`, `DefinitionError`, `DeleteObjectSample`, `Dictionary`, `DisplacmentMapFilter`, `DisplayObject`, `DisplacmentMapFilterMode`, `DisplayObjectContainer`, `DropShadowFilter`, `Endian`, `EOFError`, `Error`, `ErrorEvent`, `EvalError`, `Event`, `EventDispatcher`, `EventPhase`, `ExternalInterface`, `FileFilter`, `FileReference`, `FileReferenceList`, `FocusDirection`, `FocusEvent`, `Font`, `FontStyle`, `FontType`, `FrameLabel`, `FullScreenEvent`, `Function`, `GlowFilter`, `GradientBevelFilter`, `GradientGlowFilter`, `GradientType`, `Graphics`, `GridFitType`, `HTTPStatusEvent`, `IBitmapDrawable`, `ID3Info`, `IDataInput`, `IDataOutput`, `IDynamicPropertyOutputIDynamicPropertyWriter`, `IEventDispatcher`, `IExternalizable`, `IllegalOperationError`, `IME`, `IMEConversionMode`, `IMEEvent`, `int`, `InteractiveObject`, `InterpolationMethod`, `InvalidSWFError`, `InvokeEvent`, `IOError`, `IOErrorEvent`, `JointStyle`, `Key`, `Keyboard`, `KeyboardEvent`, `KeyLocation`, `LineScaleMode`, `Loader`, `LoaderContext`, `LoaderInfo`, `LoadVars`, `LocalConnection`, `Locale`, `Math`, `Matrix`, `MemoryError`, `Microphone`, `MorphShape`, `Mouse`, `MouseEvent`, `MovieClip`, `MovieClipLoader`, `Namespace`, `NetConnection`, `NetStatusEvent`, `NetStream`, `NewObjectSample`, `Number`, `Object`, `ObjectEncoding`, `PixelSnapping`, `Point`, `PrintJob`, `PrintJobOptions`, `PrintJobOrientation`, `ProgressEvent`, `Proxy`, `QName`, `RangeError`, `Rectangle`, `ReferenceError`, `RegExp`, `Responder`, `Sample`, `Scene`, `ScriptTimeoutError`, `Security`, `SecurityDomain`, `SecurityError`, `SecurityErrorEvent`, `SecurityPanel`, `Selection`, `Shape`, `SharedObject`, `SharedObjectFlushStatus`, `SimpleButton`, `Socket`, `Sound`, `SoundChannel`, `SoundLoaderContext`, `SoundMixer`, `SoundTransform`, `SpreadMethod`, `Sprite`, `StackFrame`, `StackOverflowError`, `Stage`, `StageAlign`, `StageDisplayState`, `StageQuality`, `StageScaleMode`, `StaticText`, `StatusEvent`, `String`, `StyleSheet`, `SWFVersion`, `SyncEvent`, `SyntaxError`, `System`, `TextColorType`, `TextField`, `TextFieldAutoSize`, `TextFieldType`, `TextFormat`, `TextFormatAlign`, `TextLineMetrics`, `TextRenderer`, `TextSnapshot`, `Timer`, `TimerEvent`, `Transform`, `TypeError`, `uint`, `URIError`, `URLLoader`, `URLLoaderDataFormat`, `URLRequest`, `URLRequestHeader`, `URLRequestMethod`, `URLStream`, `URLVariabeles`, `VerifyError`, `Video`, `XML`, `XMLDocument`, `XMLList`, `XMLNode`, `XMLNodeType`, `XMLSocket`, `XMLUI`), NameBuiltin, nil}, | |||
{Words(``, `\b`, `decodeURI`, `decodeURIComponent`, `encodeURI`, `escape`, `eval`, `isFinite`, `isNaN`, `isXMLName`, `clearInterval`, `fscommand`, `getTimer`, `getURL`, `getVersion`, `parseFloat`, `parseInt`, `setInterval`, `trace`, `updateAfterEvent`, `unescape`), NameFunction, nil}, | |||
{`[$a-zA-Z_]\w*`, NameOther, nil}, | |||
{`[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?`, LiteralNumberFloat, nil}, | |||
{`0x[0-9a-f]+`, LiteralNumberHex, nil}, | |||
{`[0-9]+`, LiteralNumberInteger, nil}, | |||
{`"(\\\\|\\"|[^"])*"`, LiteralStringDouble, nil}, | |||
{`'(\\\\|\\'|[^'])*'`, LiteralStringSingle, nil}, | |||
}, | |||
} | |||
} |
@@ -0,0 +1,60 @@ | |||
package a | |||
import ( | |||
. "github.com/alecthomas/chroma" // nolint | |||
"github.com/alecthomas/chroma/lexers/internal" | |||
) | |||
// Actionscript 3 lexer. | |||
var Actionscript3 = internal.Register(MustNewLazyLexer( | |||
&Config{ | |||
Name: "ActionScript 3", | |||
Aliases: []string{"as3", "actionscript3"}, | |||
Filenames: []string{"*.as"}, | |||
MimeTypes: []string{"application/x-actionscript3", "text/x-actionscript3", "text/actionscript3"}, | |||
DotAll: true, | |||
}, | |||
actionscript3Rules, | |||
)) | |||
func actionscript3Rules() Rules { | |||
return Rules{ | |||
"root": { | |||
{`\s+`, Text, nil}, | |||
{`(function\s+)([$a-zA-Z_]\w*)(\s*)(\()`, ByGroups(KeywordDeclaration, NameFunction, Text, Operator), Push("funcparams")}, | |||
{`(var|const)(\s+)([$a-zA-Z_]\w*)(\s*)(:)(\s*)([$a-zA-Z_]\w*(?:\.<\w+>)?)`, ByGroups(KeywordDeclaration, Text, Name, Text, Punctuation, Text, KeywordType), nil}, | |||
{`(import|package)(\s+)((?:[$a-zA-Z_]\w*|\.)+)(\s*)`, ByGroups(Keyword, Text, NameNamespace, Text), nil}, | |||
{`(new)(\s+)([$a-zA-Z_]\w*(?:\.<\w+>)?)(\s*)(\()`, ByGroups(Keyword, Text, KeywordType, Text, Operator), nil}, | |||
{`//.*?\n`, CommentSingle, nil}, | |||
{`/\*.*?\*/`, CommentMultiline, nil}, | |||
{`/(\\\\|\\/|[^\n])*/[gisx]*`, LiteralStringRegex, nil}, | |||
{`(\.)([$a-zA-Z_]\w*)`, ByGroups(Operator, NameAttribute), nil}, | |||
{`(case|default|for|each|in|while|do|break|return|continue|if|else|throw|try|catch|with|new|typeof|arguments|instanceof|this|switch|import|include|as|is)\b`, Keyword, nil}, | |||
{`(class|public|final|internal|native|override|private|protected|static|import|extends|implements|interface|intrinsic|return|super|dynamic|function|const|get|namespace|package|set)\b`, KeywordDeclaration, nil}, | |||
{`(true|false|null|NaN|Infinity|-Infinity|undefined|void)\b`, KeywordConstant, nil}, | |||
{`(decodeURI|decodeURIComponent|encodeURI|escape|eval|isFinite|isNaN|isXMLName|clearInterval|fscommand|getTimer|getURL|getVersion|isFinite|parseFloat|parseInt|setInterval|trace|updateAfterEvent|unescape)\b`, NameFunction, nil}, | |||
{`[$a-zA-Z_]\w*`, Name, nil}, | |||
{`[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?`, LiteralNumberFloat, nil}, | |||
{`0x[0-9a-f]+`, LiteralNumberHex, nil}, | |||
{`[0-9]+`, LiteralNumberInteger, nil}, | |||
{`"(\\\\|\\"|[^"])*"`, LiteralStringDouble, nil}, | |||
{`'(\\\\|\\'|[^'])*'`, LiteralStringSingle, nil}, | |||
{`[~^*!%&<>|+=:;,/?\\{}\[\]().-]+`, Operator, nil}, | |||
}, | |||
"funcparams": { | |||
{`\s+`, Text, nil}, | |||
{`(\s*)(\.\.\.)?([$a-zA-Z_]\w*)(\s*)(:)(\s*)([$a-zA-Z_]\w*(?:\.<\w+>)?|\*)(\s*)`, ByGroups(Text, Punctuation, Name, Text, Operator, Text, KeywordType, Text), Push("defval")}, | |||
{`\)`, Operator, Push("type")}, | |||
}, | |||
"type": { | |||
{`(\s*)(:)(\s*)([$a-zA-Z_]\w*(?:\.<\w+>)?|\*)`, ByGroups(Text, Operator, Text, KeywordType), Pop(2)}, | |||
{`\s+`, Text, Pop(2)}, | |||
Default(Pop(2)), | |||
}, | |||
"defval": { | |||
{`(=)(\s*)([^(),]+)(\s*)(,?)`, ByGroups(Operator, Text, UsingSelf("root"), Text, Operator), Pop(1)}, | |||
{`,`, Operator, Pop(1)}, | |||
Default(Pop(1)), | |||
}, | |||
} | |||
} |
@@ -0,0 +1,118 @@ | |||
package a | |||
import ( | |||
. "github.com/alecthomas/chroma" // nolint | |||
"github.com/alecthomas/chroma/lexers/internal" | |||
) | |||
// Ada lexer. | |||
var Ada = internal.Register(MustNewLazyLexer( | |||
&Config{ | |||
Name: "Ada", | |||
Aliases: []string{"ada", "ada95", "ada2005"}, | |||
Filenames: []string{"*.adb", "*.ads", "*.ada"}, | |||
MimeTypes: []string{"text/x-ada"}, | |||
CaseInsensitive: true, | |||
}, | |||
adaRules, | |||
)) | |||
func adaRules() Rules { | |||
return Rules{ | |||
"root": { | |||
{`[^\S\n]+`, Text, nil}, | |||
{`--.*?\n`, CommentSingle, nil}, | |||
{`[^\S\n]+`, Text, nil}, | |||
{`function|procedure|entry`, KeywordDeclaration, Push("subprogram")}, | |||
{`(subtype|type)(\s+)(\w+)`, ByGroups(KeywordDeclaration, Text, KeywordType), Push("type_def")}, | |||
{`task|protected`, KeywordDeclaration, nil}, | |||
{`(subtype)(\s+)`, ByGroups(KeywordDeclaration, Text), nil}, | |||
{`(end)(\s+)`, ByGroups(KeywordReserved, Text), Push("end")}, | |||
{`(pragma)(\s+)(\w+)`, ByGroups(KeywordReserved, Text, CommentPreproc), nil}, | |||
{`(true|false|null)\b`, KeywordConstant, nil}, | |||
{Words(``, `\b`, `Address`, `Byte`, `Boolean`, `Character`, `Controlled`, `Count`, `Cursor`, `Duration`, `File_Mode`, `File_Type`, `Float`, `Generator`, `Integer`, `Long_Float`, `Long_Integer`, `Long_Long_Float`, `Long_Long_Integer`, `Natural`, `Positive`, `Reference_Type`, `Short_Float`, `Short_Integer`, `Short_Short_Float`, `Short_Short_Integer`, `String`, `Wide_Character`, `Wide_String`), KeywordType, nil}, | |||
{`(and(\s+then)?|in|mod|not|or(\s+else)|rem)\b`, OperatorWord, nil}, | |||
{`generic|private`, KeywordDeclaration, nil}, | |||
{`package`, KeywordDeclaration, Push("package")}, | |||
{`array\b`, KeywordReserved, Push("array_def")}, | |||
{`(with|use)(\s+)`, ByGroups(KeywordNamespace, Text), Push("import")}, | |||
{`(\w+)(\s*)(:)(\s*)(constant)`, ByGroups(NameConstant, Text, Punctuation, Text, KeywordReserved), nil}, | |||
{`<<\w+>>`, NameLabel, nil}, | |||
{`(\w+)(\s*)(:)(\s*)(declare|begin|loop|for|while)`, ByGroups(NameLabel, Text, Punctuation, Text, KeywordReserved), nil}, | |||
{Words(`\b`, `\b`, `abort`, `abs`, `abstract`, `accept`, `access`, `aliased`, `all`, `array`, `at`, `begin`, `body`, `case`, `constant`, `declare`, `delay`, `delta`, `digits`, `do`, `else`, `elsif`, `end`, `entry`, `exception`, `exit`, `interface`, `for`, `goto`, `if`, `is`, `limited`, `loop`, `new`, `null`, `of`, `or`, `others`, `out`, `overriding`, `pragma`, `protected`, `raise`, `range`, `record`, `renames`, `requeue`, `return`, `reverse`, `select`, `separate`, `subtype`, `synchronized`, `task`, `tagged`, `terminate`, `then`, `type`, `until`, `when`, `while`, `xor`), KeywordReserved, nil}, | |||
{`"[^"]*"`, LiteralString, nil}, | |||
Include("attribute"), | |||
Include("numbers"), | |||
{`'[^']'`, LiteralStringChar, nil}, | |||
{`(\w+)(\s*|[(,])`, ByGroups(Name, UsingSelf("root")), nil}, | |||
{`(<>|=>|:=|[()|:;,.'])`, Punctuation, nil}, | |||
{`[*<>+=/&-]`, Operator, nil}, | |||
{`\n+`, Text, nil}, | |||
}, | |||
"numbers": { | |||
{`[0-9_]+#[0-9a-f]+#`, LiteralNumberHex, nil}, | |||
{`[0-9_]+\.[0-9_]*`, LiteralNumberFloat, nil}, | |||
{`[0-9_]+`, LiteralNumberInteger, nil}, | |||
}, | |||
"attribute": { | |||
{`(')(\w+)`, ByGroups(Punctuation, NameAttribute), nil}, | |||
}, | |||
"subprogram": { | |||
{`\(`, Punctuation, Push("#pop", "formal_part")}, | |||
{`;`, Punctuation, Pop(1)}, | |||
{`is\b`, KeywordReserved, Pop(1)}, | |||
{`"[^"]+"|\w+`, NameFunction, nil}, | |||
Include("root"), | |||
}, | |||
"end": { | |||
{`(if|case|record|loop|select)`, KeywordReserved, nil}, | |||
{`"[^"]+"|[\w.]+`, NameFunction, nil}, | |||
{`\s+`, Text, nil}, | |||
{`;`, Punctuation, Pop(1)}, | |||
}, | |||
"type_def": { | |||
{`;`, Punctuation, Pop(1)}, | |||
{`\(`, Punctuation, Push("formal_part")}, | |||
{`with|and|use`, KeywordReserved, nil}, | |||
{`array\b`, KeywordReserved, Push("#pop", "array_def")}, | |||
{`record\b`, KeywordReserved, Push("record_def")}, | |||
{`(null record)(;)`, ByGroups(KeywordReserved, Punctuation), Pop(1)}, | |||
Include("root"), | |||
}, | |||
"array_def": { | |||
{`;`, Punctuation, Pop(1)}, | |||
{`(\w+)(\s+)(range)`, ByGroups(KeywordType, Text, KeywordReserved), nil}, | |||
Include("root"), | |||
}, | |||
"record_def": { | |||
{`end record`, KeywordReserved, Pop(1)}, | |||
Include("root"), | |||
}, | |||
"import": { | |||
{`[\w.]+`, NameNamespace, Pop(1)}, | |||
Default(Pop(1)), | |||
}, | |||
"formal_part": { | |||
{`\)`, Punctuation, Pop(1)}, | |||
{`\w+`, NameVariable, nil}, | |||
{`,|:[^=]`, Punctuation, nil}, | |||
{`(in|not|null|out|access)\b`, KeywordReserved, nil}, | |||
Include("root"), | |||
}, | |||
"package": { | |||
{`body`, KeywordDeclaration, nil}, | |||
{`is\s+new|renames`, KeywordReserved, nil}, | |||
{`is`, KeywordReserved, Pop(1)}, | |||
{`;`, Punctuation, Pop(1)}, | |||
{`\(`, Punctuation, Push("package_instantiation")}, | |||
{`([\w.]+)`, NameClass, nil}, | |||
Include("root"), | |||
}, | |||
"package_instantiation": { | |||
{`("[^"]+"|\w+)(\s+)(=>)`, ByGroups(NameVariable, Text, Punctuation), nil}, | |||
{`[\w.\'"]`, Text, nil}, | |||
{`\)`, Punctuation, Pop(1)}, | |||
Include("root"), | |||
}, | |||
} | |||
} |
@@ -0,0 +1,47 @@ | |||
package a | |||
import ( | |||
. "github.com/alecthomas/chroma" // nolint | |||
"github.com/alecthomas/chroma/lexers/internal" | |||
) | |||
// Al lexer. | |||
var Al = internal.Register(MustNewLazyLexer( | |||
&Config{ | |||
Name: "AL", | |||
Aliases: []string{"al"}, | |||
Filenames: []string{"*.al", "*.dal"}, | |||
MimeTypes: []string{"text/x-al"}, | |||
DotAll: true, | |||
CaseInsensitive: true, | |||
}, | |||
alRules, | |||
)) | |||
// https://github.com/microsoft/AL/blob/master/grammar/alsyntax.tmlanguage | |||
func alRules() Rules { | |||
return Rules{ | |||
"root": { | |||
{`\s+`, TextWhitespace, nil}, | |||
{`(?s)\/\*.*?\\*\*\/`, CommentMultiline, nil}, | |||
{`(?s)//.*?\n`, CommentSingle, nil}, | |||
{`\"([^\"])*\"`, Text, nil}, | |||
{`'([^'])*'`, LiteralString, nil}, | |||
{`\b(?i:(ARRAY|ASSERTERROR|BEGIN|BREAK|CASE|DO|DOWNTO|ELSE|END|EVENT|EXIT|FOR|FOREACH|FUNCTION|IF|IMPLEMENTS|IN|INDATASET|INTERFACE|INTERNAL|LOCAL|OF|PROCEDURE|PROGRAM|PROTECTED|REPEAT|RUNONCLIENT|SECURITYFILTERING|SUPPRESSDISPOSE|TEMPORARY|THEN|TO|TRIGGER|UNTIL|VAR|WHILE|WITH|WITHEVENTS))\b`, Keyword, nil}, | |||
{`\b(?i:(AND|DIV|MOD|NOT|OR|XOR))\b`, OperatorWord, nil}, | |||
{`\b(?i:(AVERAGE|CONST|COUNT|EXIST|FIELD|FILTER|LOOKUP|MAX|MIN|ORDER|SORTING|SUM|TABLEDATA|UPPERLIMIT|WHERE|ASCENDING|DESCENDING))\b`, Keyword, nil}, | |||
{`\b(?i:(CODEUNIT|PAGE|PAGEEXTENSION|PAGECUSTOMIZATION|DOTNET|ENUM|ENUMEXTENSION|VALUE|QUERY|REPORT|TABLE|TABLEEXTENSION|XMLPORT|PROFILE|CONTROLADDIN|REPORTEXTENSION|INTERFACE|PERMISSIONSET|PERMISSIONSETEXTENSION|ENTITLEMENT))\b`, Keyword, nil}, | |||
{`\b(?i:(Action|Array|Automation|BigInteger|BigText|Blob|Boolean|Byte|Char|ClientType|Code|Codeunit|CompletionTriggerErrorLevel|ConnectionType|Database|DataClassification|DataScope|Date|DateFormula|DateTime|Decimal|DefaultLayout|Dialog|Dictionary|DotNet|DotNetAssembly|DotNetTypeDeclaration|Duration|Enum|ErrorInfo|ErrorType|ExecutionContext|ExecutionMode|FieldClass|FieldRef|FieldType|File|FilterPageBuilder|Guid|InStream|Integer|Joker|KeyRef|List|ModuleDependencyInfo|ModuleInfo|None|Notification|NotificationScope|ObjectType|Option|OutStream|Page|PageResult|Query|Record|RecordId|RecordRef|Report|ReportFormat|SecurityFilter|SecurityFiltering|Table|TableConnectionType|TableFilter|TestAction|TestField|TestFilterField|TestPage|TestPermissions|TestRequestPage|Text|TextBuilder|TextConst|TextEncoding|Time|TransactionModel|TransactionType|Variant|Verbosity|Version|XmlPort|HttpContent|HttpHeaders|HttpClient|HttpRequestMessage|HttpResponseMessage|JsonToken|JsonValue|JsonArray|JsonObject|View|Views|XmlAttribute|XmlAttributeCollection|XmlComment|XmlCData|XmlDeclaration|XmlDocument|XmlDocumentType|XmlElement|XmlNamespaceManager|XmlNameTable|XmlNode|XmlNodeList|XmlProcessingInstruction|XmlReadOptions|XmlText|XmlWriteOptions|WebServiceActionContext|WebServiceActionResultCode|SessionSettings))\b`, Keyword, nil}, | |||
{`\b([<>]=|<>|<|>)\b?`, Operator, nil}, | |||
{`\b(\-|\+|\/|\*)\b`, Operator, nil}, | |||
{`\s*(\:=|\+=|-=|\/=|\*=)\s*?`, Operator, nil}, | |||
{`\b(?i:(ADD|ADDFIRST|ADDLAST|ADDAFTER|ADDBEFORE|ACTION|ACTIONS|AREA|ASSEMBLY|CHARTPART|CUEGROUP|CUSTOMIZES|COLUMN|DATAITEM|DATASET|ELEMENTS|EXTENDS|FIELD|FIELDGROUP|FIELDATTRIBUTE|FIELDELEMENT|FIELDGROUPS|FIELDS|FILTER|FIXED|GRID|GROUP|MOVEAFTER|MOVEBEFORE|KEY|KEYS|LABEL|LABELS|LAYOUT|MODIFY|MOVEFIRST|MOVELAST|MOVEBEFORE|MOVEAFTER|PART|REPEATER|USERCONTROL|REQUESTPAGE|SCHEMA|SEPARATOR|SYSTEMPART|TABLEELEMENT|TEXTATTRIBUTE|TEXTELEMENT|TYPE))\b`, Keyword, nil}, | |||
{`\s*[(\.\.)&\|]\s*`, Operator, nil}, | |||
{`\b((0(x|X)[0-9a-fA-F]*)|(([0-9]+\.?[0-9]*)|(\.[0-9]+))((e|E)(\+|-)?[0-9]+)?)(L|l|UL|ul|u|U|F|f|ll|LL|ull|ULL)?\b`, LiteralNumber, nil}, | |||
{`[;:,]`, Punctuation, nil}, | |||
{`#[ \t]*(if|else|elif|endif|define|undef|region|endregion|pragma)\b.*?\n`, CommentPreproc, nil}, | |||
{`\w+`, Text, nil}, | |||
{`.`, Text, nil}, | |||
}, | |||
} | |||
} |
@@ -0,0 +1,46 @@ | |||
package a | |||
import ( | |||
. "github.com/alecthomas/chroma" // nolint | |||
"github.com/alecthomas/chroma/lexers/internal" | |||
) | |||
// Angular2 lexer. | |||
var Angular2 = internal.Register(MustNewLazyLexer( | |||
&Config{ | |||
Name: "Angular2", | |||
Aliases: []string{"ng2"}, | |||
Filenames: []string{}, | |||
MimeTypes: []string{}, | |||
}, | |||
angular2Rules, | |||
)) | |||
func angular2Rules() Rules { | |||
return Rules{ | |||
"root": { | |||
{`[^{([*#]+`, Other, nil}, | |||
{`(\{\{)(\s*)`, ByGroups(CommentPreproc, Text), Push("ngExpression")}, | |||
{`([([]+)([\w:.-]+)([\])]+)(\s*)(=)(\s*)`, ByGroups(Punctuation, NameAttribute, Punctuation, Text, Operator, Text), Push("attr")}, | |||
{`([([]+)([\w:.-]+)([\])]+)(\s*)`, ByGroups(Punctuation, NameAttribute, Punctuation, Text), nil}, | |||
{`([*#])([\w:.-]+)(\s*)(=)(\s*)`, ByGroups(Punctuation, NameAttribute, Punctuation, Operator), Push("attr")}, | |||
{`([*#])([\w:.-]+)(\s*)`, ByGroups(Punctuation, NameAttribute, Punctuation), nil}, | |||
}, | |||
"ngExpression": { | |||
{`\s+(\|\s+)?`, Text, nil}, | |||
{`\}\}`, CommentPreproc, Pop(1)}, | |||
{`:?(true|false)`, LiteralStringBoolean, nil}, | |||
{`:?"(\\\\|\\"|[^"])*"`, LiteralStringDouble, nil}, | |||
{`:?'(\\\\|\\'|[^'])*'`, LiteralStringSingle, nil}, | |||
{`[0-9](\.[0-9]*)?(eE[+-][0-9])?[flFLdD]?|0[xX][0-9a-fA-F]+[Ll]?`, LiteralNumber, nil}, | |||
{`[a-zA-Z][\w-]*(\(.*\))?`, NameVariable, nil}, | |||
{`\.[\w-]+(\(.*\))?`, NameVariable, nil}, | |||
{`(\?)(\s*)([^}\s]+)(\s*)(:)(\s*)([^}\s]+)(\s*)`, ByGroups(Operator, Text, LiteralString, Text, Operator, Text, LiteralString, Text), nil}, | |||
}, | |||
"attr": { | |||
{`".*?"`, LiteralString, Pop(1)}, | |||
{`'.*?'`, LiteralString, Pop(1)}, | |||
{`[^\s>]+`, LiteralString, Pop(1)}, | |||
}, | |||
} | |||
} |
@@ -0,0 +1,105 @@ | |||
package a | |||
import ( | |||
. "github.com/alecthomas/chroma" // nolint | |||
"github.com/alecthomas/chroma/lexers/internal" | |||
) | |||
// ANTLR lexer. | |||
var ANTLR = internal.Register(MustNewLazyLexer( | |||
&Config{ | |||
Name: "ANTLR", | |||
Aliases: []string{"antlr"}, | |||
Filenames: []string{}, | |||
MimeTypes: []string{}, | |||
}, | |||
antlrRules, | |||
)) | |||
func antlrRules() Rules { | |||
return Rules{ | |||
"whitespace": { | |||
{`\s+`, TextWhitespace, nil}, | |||
}, | |||
"comments": { | |||
{`//.*$`, Comment, nil}, | |||
{`/\*(.|\n)*?\*/`, Comment, nil}, | |||
}, | |||
"root": { | |||
Include("whitespace"), | |||
Include("comments"), | |||
{`(lexer|parser|tree)?(\s*)(grammar\b)(\s*)([A-Za-z]\w*)(;)`, ByGroups(Keyword, TextWhitespace, Keyword, TextWhitespace, NameClass, Punctuation), nil}, | |||
{`options\b`, Keyword, Push("options")}, | |||
{`tokens\b`, Keyword, Push("tokens")}, | |||
{`(scope)(\s*)([A-Za-z]\w*)(\s*)(\{)`, ByGroups(Keyword, TextWhitespace, NameVariable, TextWhitespace, Punctuation), Push("action")}, | |||
{`(catch|finally)\b`, Keyword, Push("exception")}, | |||
{`(@[A-Za-z]\w*)(\s*)(::)?(\s*)([A-Za-z]\w*)(\s*)(\{)`, ByGroups(NameLabel, TextWhitespace, Punctuation, TextWhitespace, NameLabel, TextWhitespace, Punctuation), Push("action")}, | |||
{`((?:protected|private|public|fragment)\b)?(\s*)([A-Za-z]\w*)(!)?`, ByGroups(Keyword, TextWhitespace, NameLabel, Punctuation), Push("rule-alts", "rule-prelims")}, | |||
}, | |||
"exception": { | |||
{`\n`, TextWhitespace, Pop(1)}, | |||
{`\s`, TextWhitespace, nil}, | |||
Include("comments"), | |||
{`\[`, Punctuation, Push("nested-arg-action")}, | |||
{`\{`, Punctuation, Push("action")}, | |||
}, | |||
"rule-prelims": { | |||
Include("whitespace"), | |||
Include("comments"), | |||
{`returns\b`, Keyword, nil}, | |||
{`\[`, Punctuation, Push("nested-arg-action")}, | |||
{`\{`, Punctuation, Push("action")}, | |||
{`(throws)(\s+)([A-Za-z]\w*)`, ByGroups(Keyword, TextWhitespace, NameLabel), nil}, | |||
{`(,)(\s*)([A-Za-z]\w*)`, ByGroups(Punctuation, TextWhitespace, NameLabel), nil}, | |||
{`options\b`, Keyword, Push("options")}, | |||
{`(scope)(\s+)(\{)`, ByGroups(Keyword, TextWhitespace, Punctuation), Push("action")}, | |||
{`(scope)(\s+)([A-Za-z]\w*)(\s*)(;)`, ByGroups(Keyword, TextWhitespace, NameLabel, TextWhitespace, Punctuation), nil}, | |||
{`(@[A-Za-z]\w*)(\s*)(\{)`, ByGroups(NameLabel, TextWhitespace, Punctuation), Push("action")}, | |||
{`:`, Punctuation, Pop(1)}, | |||
}, | |||
"rule-alts": { | |||
Include("whitespace"), | |||
Include("comments"), | |||
{`options\b`, Keyword, Push("options")}, | |||
{`:`, Punctuation, nil}, | |||
{`'(\\\\|\\'|[^'])*'`, LiteralString, nil}, | |||
{`"(\\\\|\\"|[^"])*"`, LiteralString, nil}, | |||
{`<<([^>]|>[^>])>>`, LiteralString, nil}, | |||
{`\$?[A-Z_]\w*`, NameConstant, nil}, | |||
{`\$?[a-z_]\w*`, NameVariable, nil}, | |||
{`(\+|\||->|=>|=|\(|\)|\.\.|\.|\?|\*|\^|!|\#|~)`, Operator, nil}, | |||
{`,`, Punctuation, nil}, | |||
{`\[`, Punctuation, Push("nested-arg-action")}, | |||
{`\{`, Punctuation, Push("action")}, | |||
{`;`, Punctuation, Pop(1)}, | |||
}, | |||
"tokens": { | |||
Include("whitespace"), | |||
Include("comments"), | |||
{`\{`, Punctuation, nil}, | |||
{`([A-Z]\w*)(\s*)(=)?(\s*)(\'(?:\\\\|\\\'|[^\']*)\')?(\s*)(;)`, ByGroups(NameLabel, TextWhitespace, Punctuation, TextWhitespace, LiteralString, TextWhitespace, Punctuation), nil}, | |||
{`\}`, Punctuation, Pop(1)}, | |||
}, | |||
"options": { | |||
Include("whitespace"), | |||
Include("comments"), | |||
{`\{`, Punctuation, nil}, | |||
{`([A-Za-z]\w*)(\s*)(=)(\s*)([A-Za-z]\w*|\'(?:\\\\|\\\'|[^\']*)\'|[0-9]+|\*)(\s*)(;)`, ByGroups(NameVariable, TextWhitespace, Punctuation, TextWhitespace, Text, TextWhitespace, Punctuation), nil}, | |||
{`\}`, Punctuation, Pop(1)}, | |||
}, | |||
"action": { | |||
{`([^${}\'"/\\]+|"(\\\\|\\"|[^"])*"|'(\\\\|\\'|[^'])*'|//.*$\n?|/\*(.|\n)*?\*/|/(?!\*)(\\\\|\\/|[^/])*/|\\(?!%)|/)+`, Other, nil}, | |||
{`(\\)(%)`, ByGroups(Punctuation, Other), nil}, | |||
{`(\$[a-zA-Z]+)(\.?)(text|value)?`, ByGroups(NameVariable, Punctuation, NameProperty), nil}, | |||
{`\{`, Punctuation, Push()}, | |||
{`\}`, Punctuation, Pop(1)}, | |||
}, | |||
"nested-arg-action": { | |||
{`([^$\[\]\'"/]+|"(\\\\|\\"|[^"])*"|'(\\\\|\\'|[^'])*'|//.*$\n?|/\*(.|\n)*?\*/|/(?!\*)(\\\\|\\/|[^/])*/|/)+`, Other, nil}, | |||
{`\[`, Punctuation, Push()}, | |||
{`\]`, Punctuation, Pop(1)}, | |||
{`(\$[a-zA-Z]+)(\.?)(text|value)?`, ByGroups(NameVariable, Punctuation, NameProperty), nil}, | |||
{`(\\\\|\\\]|\\\[|[^\[\]])+`, Other, nil}, | |||
}, | |||
} | |||
} |
@@ -0,0 +1,42 @@ | |||
package a | |||
import ( | |||
. "github.com/alecthomas/chroma" // nolint | |||
"github.com/alecthomas/chroma/lexers/internal" | |||
) | |||
// Apacheconf lexer. | |||
var Apacheconf = internal.Register(MustNewLazyLexer( | |||
&Config{ | |||
Name: "ApacheConf", | |||
Aliases: []string{"apacheconf", "aconf", "apache"}, | |||
Filenames: []string{".htaccess", "apache.conf", "apache2.conf"}, | |||
MimeTypes: []string{"text/x-apacheconf"}, | |||
CaseInsensitive: true, | |||
}, | |||
apacheconfRules, | |||
)) | |||
func apacheconfRules() Rules { | |||
return Rules{ | |||
"root": { | |||
{`\s+`, Text, nil}, | |||
{`(#.*?)$`, Comment, nil}, | |||
{`(<[^\s>]+)(?:(\s+)(.*?))?(>)`, ByGroups(NameTag, Text, LiteralString, NameTag), nil}, | |||
{`([a-z]\w*)(\s+)`, ByGroups(NameBuiltin, Text), Push("value")}, | |||
{`\.+`, Text, nil}, | |||
}, | |||
"value": { | |||
{`\\\n`, Text, nil}, | |||
{`$`, Text, Pop(1)}, | |||
{`\\`, Text, nil}, | |||
{`[^\S\n]+`, Text, nil}, | |||
{`\d+\.\d+\.\d+\.\d+(?:/\d+)?`, LiteralNumber, nil}, | |||
{`\d+`, LiteralNumber, nil}, | |||
{`/([a-z0-9][\w./-]+)`, LiteralStringOther, nil}, | |||
{`(on|off|none|any|all|double|email|dns|min|minimal|os|productonly|full|emerg|alert|crit|error|warn|notice|info|debug|registry|script|inetd|standalone|user|group)\b`, Keyword, nil}, | |||
{`"([^"\\]*(?:\\.[^"\\]*)*)"`, LiteralStringDouble, nil}, | |||
{`[^\s"\\]+`, Text, nil}, | |||
}, | |||
} | |||
} |
@@ -0,0 +1,40 @@ | |||
package a | |||
import ( | |||
. "github.com/alecthomas/chroma" // nolint | |||
"github.com/alecthomas/chroma/lexers/internal" | |||
) | |||
// Apl lexer. | |||
var Apl = internal.Register(MustNewLazyLexer( | |||
&Config{ | |||
Name: "APL", | |||
Aliases: []string{"apl"}, | |||
Filenames: []string{"*.apl"}, | |||
MimeTypes: []string{}, | |||
}, | |||
aplRules, | |||
)) | |||
func aplRules() Rules { | |||
return Rules{ | |||
"root": { | |||
{`\s+`, Text, nil}, | |||
{`[⍝#].*$`, CommentSingle, nil}, | |||
{`\'((\'\')|[^\'])*\'`, LiteralStringSingle, nil}, | |||
{`"(("")|[^"])*"`, LiteralStringDouble, nil}, | |||
{`[⋄◇()]`, Punctuation, nil}, | |||
{`[\[\];]`, LiteralStringRegex, nil}, | |||
{`⎕[A-Za-zΔ∆⍙][A-Za-zΔ∆⍙_¯0-9]*`, NameFunction, nil}, | |||
{`[A-Za-zΔ∆⍙_][A-Za-zΔ∆⍙_¯0-9]*`, NameVariable, nil}, | |||
{`¯?(0[Xx][0-9A-Fa-f]+|[0-9]*\.?[0-9]+([Ee][+¯]?[0-9]+)?|¯|∞)([Jj]¯?(0[Xx][0-9A-Fa-f]+|[0-9]*\.?[0-9]+([Ee][+¯]?[0-9]+)?|¯|∞))?`, LiteralNumber, nil}, | |||
{`[\.\\/⌿⍀¨⍣⍨⍠⍤∘⍥@⌺⌶⍢]`, NameAttribute, nil}, | |||
{`[+\-×÷⌈⌊∣|⍳?*⍟○!⌹<≤=>≥≠≡≢∊⍷∪∩~∨∧⍱⍲⍴,⍪⌽⊖⍉↑↓⊂⊃⌷⍋⍒⊤⊥⍕⍎⊣⊢⍁⍂≈⌸⍯↗⊆⍸]`, Operator, nil}, | |||
{`⍬`, NameConstant, nil}, | |||
{`[⎕⍞]`, NameVariableGlobal, nil}, | |||
{`[←→]`, KeywordDeclaration, nil}, | |||
{`[⍺⍵⍶⍹∇:]`, NameBuiltinPseudo, nil}, | |||
{`[{}]`, KeywordType, nil}, | |||
}, | |||
} | |||
} |
@@ -0,0 +1,59 @@ | |||
package a | |||
import ( | |||
. "github.com/alecthomas/chroma" // nolint | |||
"github.com/alecthomas/chroma/lexers/internal" | |||
) | |||
// Applescript lexer. | |||
var Applescript = internal.Register(MustNewLazyLexer( | |||
&Config{ | |||
Name: "AppleScript", | |||
Aliases: []string{"applescript"}, | |||
Filenames: []string{"*.applescript"}, | |||
MimeTypes: []string{}, | |||
DotAll: true, | |||
}, | |||
applescriptRules, | |||
)) | |||
func applescriptRules() Rules { | |||
return Rules{ | |||
"root": { | |||
{`\s+`, Text, nil}, | |||
{`¬\n`, LiteralStringEscape, nil}, | |||
{`'s\s+`, Text, nil}, | |||
{`(--|#).*?$`, Comment, nil}, | |||
{`\(\*`, CommentMultiline, Push("comment")}, | |||
{`[(){}!,.:]`, Punctuation, nil}, | |||
{`(«)([^»]+)(»)`, ByGroups(Text, NameBuiltin, Text), nil}, | |||
{`\b((?:considering|ignoring)\s*)(application responses|case|diacriticals|hyphens|numeric strings|punctuation|white space)`, ByGroups(Keyword, NameBuiltin), nil}, | |||
{`(-|\*|\+|&|≠|>=?|<=?|=|≥|≤|/|÷|\^)`, Operator, nil}, | |||
{`\b(and|or|is equal|equals|(is )?equal to|is not|isn't|isn't equal( to)?|is not equal( to)?|doesn't equal|does not equal|(is )?greater than|comes after|is not less than or equal( to)?|isn't less than or equal( to)?|(is )?less than|comes before|is not greater than or equal( to)?|isn't greater than or equal( to)?|(is )?greater than or equal( to)?|is not less than|isn't less than|does not come before|doesn't come before|(is )?less than or equal( to)?|is not greater than|isn't greater than|does not come after|doesn't come after|starts? with|begins? with|ends? with|contains?|does not contain|doesn't contain|is in|is contained by|is not in|is not contained by|isn't contained by|div|mod|not|(a )?(ref( to)?|reference to)|is|does)\b`, OperatorWord, nil}, | |||
{`^(\s*(?:on|end)\s+)(zoomed|write to file|will zoom|will show|will select tab view item|will resize( sub views)?|will resign active|will quit|will pop up|will open|will move|will miniaturize|will hide|will finish launching|will display outline cell|will display item cell|will display cell|will display browser cell|will dismiss|will close|will become active|was miniaturized|was hidden|update toolbar item|update parameters|update menu item|shown|should zoom|should selection change|should select tab view item|should select row|should select item|should select column|should quit( after last window closed)?|should open( untitled)?|should expand item|should end editing|should collapse item|should close|should begin editing|selection changing|selection changed|selected tab view item|scroll wheel|rows changed|right mouse up|right mouse dragged|right mouse down|resized( sub views)?|resigned main|resigned key|resigned active|read from file|prepare table drop|prepare table drag|prepare outline drop|prepare outline drag|prepare drop|plugin loaded|parameters updated|panel ended|opened|open untitled|number of rows|number of items|number of browser rows|moved|mouse up|mouse moved|mouse exited|mouse entered|mouse dragged|mouse down|miniaturized|load data representation|launched|keyboard up|keyboard down|items changed|item value changed|item value|item expandable|idle|exposed|end editing|drop|drag( (entered|exited|updated))?|double clicked|document nib name|dialog ended|deminiaturized|data representation|conclude drop|column resized|column moved|column clicked|closed|clicked toolbar item|clicked|choose menu item|child of item|changed|change item value|change cell value|cell value changed|cell value|bounds changed|begin editing|became main|became key|awake from nib|alert ended|activated|action|accept table drop|accept outline drop)`, ByGroups(Keyword, NameFunction), nil}, | |||
{`^(\s*)(in|on|script|to)(\s+)`, ByGroups(Text, Keyword, Text), nil}, | |||
{`\b(as )(alias |application |boolean |class |constant |date |file |integer |list |number |POSIX file |real |record |reference |RGB color |script |text |unit types|(?:Unicode )?text|string)\b`, ByGroups(Keyword, NameClass), nil}, | |||
{`\b(AppleScript|current application|false|linefeed|missing value|pi|quote|result|return|space|tab|text item delimiters|true|version)\b`, NameConstant, nil}, | |||
{`\b(ASCII (character|number)|activate|beep|choose URL|choose application|choose color|choose file( name)?|choose folder|choose from list|choose remote application|clipboard info|close( access)?|copy|count|current date|delay|delete|display (alert|dialog)|do shell script|duplicate|exists|get eof|get volume settings|info for|launch|list (disks|folder)|load script|log|make|mount volume|new|offset|open( (for access|location))?|path to|print|quit|random number|read|round|run( script)?|say|scripting components|set (eof|the clipboard to|volume)|store script|summarize|system attribute|system info|the clipboard|time to GMT|write|quoted form)\b`, NameBuiltin, nil}, | |||
{`\b(considering|else|error|exit|from|if|ignoring|in|repeat|tell|then|times|to|try|until|using terms from|while|with|with timeout( of)?|with transaction|by|continue|end|its?|me|my|return|of|as)\b`, Keyword, nil}, | |||
{`\b(global|local|prop(erty)?|set|get)\b`, Keyword, nil}, | |||
{`\b(but|put|returning|the)\b`, NameBuiltin, nil}, | |||
{`\b(attachment|attribute run|character|day|month|paragraph|word|year)s?\b`, NameBuiltin, nil}, | |||
{`\b(about|above|against|apart from|around|aside from|at|below|beneath|beside|between|for|given|instead of|on|onto|out of|over|since)\b`, NameBuiltin, nil}, | |||
{`\b(accepts arrow key|action method|active|alignment|allowed identifiers|allows branch selection|allows column reordering|allows column resizing|allows column selection|allows customization|allows editing text attributes|allows empty selection|allows mixed state|allows multiple selection|allows reordering|allows undo|alpha( value)?|alternate image|alternate increment value|alternate title|animation delay|associated file name|associated object|auto completes|auto display|auto enables items|auto repeat|auto resizes( outline column)?|auto save expanded items|auto save name|auto save table columns|auto saves configuration|auto scroll|auto sizes all columns to fit|auto sizes cells|background color|bezel state|bezel style|bezeled|border rect|border type|bordered|bounds( rotation)?|box type|button returned|button type|can choose directories|can choose files|can draw|can hide|cell( (background color|size|type))?|characters|class|click count|clicked( data)? column|clicked data item|clicked( data)? row|closeable|collating|color( (mode|panel))|command key down|configuration|content(s| (size|view( margins)?))?|context|continuous|control key down|control size|control tint|control view|controller visible|coordinate system|copies( on scroll)?|corner view|current cell|current column|current( field)? editor|current( menu)? item|current row|current tab view item|data source|default identifiers|delta (x|y|z)|destination window|directory|display mode|displayed cell|document( (edited|rect|view))?|double value|dragged column|dragged distance|dragged items|draws( cell)? background|draws grid|dynamically scrolls|echos bullets|edge|editable|edited( data)? column|edited data item|edited( data)? row|enabled|enclosing scroll view|ending page|error handling|event number|event type|excluded from windows menu|executable path|expanded|fax number|field editor|file kind|file name|file type|first responder|first visible column|flipped|floating|font( panel)?|formatter|frameworks path|frontmost|gave up|grid color|has data items|has horizontal ruler|has horizontal scroller|has parent data item|has resize indicator|has shadow|has sub menu|has vertical ruler|has vertical scroller|header cell|header view|hidden|hides when deactivated|highlights by|horizontal line scroll|horizontal page scroll|horizontal ruler view|horizontally resizable|icon image|id|identifier|ignores multiple clicks|image( (alignment|dims when disabled|frame style|scaling))?|imports graphics|increment value|indentation per level|indeterminate|index|integer value|intercell spacing|item height|key( (code|equivalent( modifier)?|window))?|knob thickness|label|last( visible)? column|leading offset|leaf|level|line scroll|loaded|localized sort|location|loop mode|main( (bunde|menu|window))?|marker follows cell|matrix mode|maximum( content)? size|maximum visible columns|menu( form representation)?|miniaturizable|miniaturized|minimized image|minimized title|minimum column width|minimum( content)? size|modal|modified|mouse down state|movie( (controller|file|rect))?|muted|name|needs display|next state|next text|number of tick marks|only tick mark values|opaque|open panel|option key down|outline table column|page scroll|pages across|pages down|palette label|pane splitter|parent data item|parent window|pasteboard|path( (names|separator))?|playing|plays every frame|plays selection only|position|preferred edge|preferred type|pressure|previous text|prompt|properties|prototype cell|pulls down|rate|released when closed|repeated|requested print time|required file type|resizable|resized column|resource path|returns records|reuses columns|rich text|roll over|row height|rulers visible|save panel|scripts path|scrollable|selectable( identifiers)?|selected cell|selected( data)? columns?|selected data items?|selected( data)? rows?|selected item identifier|selection by rect|send action on arrow key|sends action when done editing|separates columns|separator item|sequence number|services menu|shared frameworks path|shared support path|sheet|shift key down|shows alpha|shows state by|size( mode)?|smart insert delete enabled|sort case sensitivity|sort column|sort order|sort type|sorted( data rows)?|sound|source( mask)?|spell checking enabled|starting page|state|string value|sub menu|super menu|super view|tab key traverses cells|tab state|tab type|tab view|table view|tag|target( printer)?|text color|text container insert|text container origin|text returned|tick mark position|time stamp|title(d| (cell|font|height|position|rect))?|tool tip|toolbar|trailing offset|transparent|treat packages as directories|truncated labels|types|unmodified characters|update views|use sort indicator|user defaults|uses data source|uses ruler|uses threaded animation|uses title from previous column|value wraps|version|vertical( (line scroll|page scroll|ruler view))?|vertically resizable|view|visible( document rect)?|volume|width|window|windows menu|wraps|zoomable|zoomed)\b`, NameAttribute, nil}, | |||
{`\b(action cell|alert reply|application|box|browser( cell)?|bundle|button( cell)?|cell|clip view|color well|color-panel|combo box( item)?|control|data( (cell|column|item|row|source))?|default entry|dialog reply|document|drag info|drawer|event|font(-panel)?|formatter|image( (cell|view))?|matrix|menu( item)?|item|movie( view)?|open-panel|outline view|panel|pasteboard|plugin|popup button|progress indicator|responder|save-panel|scroll view|secure text field( cell)?|slider|sound|split view|stepper|tab view( item)?|table( (column|header cell|header view|view))|text( (field( cell)?|view))?|toolbar( item)?|user-defaults|view|window)s?\b`, NameBuiltin, nil}, | |||
{`\b(animate|append|call method|center|close drawer|close panel|display|display alert|display dialog|display panel|go|hide|highlight|increment|item for|load image|load movie|load nib|load panel|load sound|localized string|lock focus|log|open drawer|path for|pause|perform action|play|register|resume|scroll|select( all)?|show|size to fit|start|step back|step forward|stop|synchronize|unlock focus|update)\b`, NameBuiltin, nil}, | |||
{`\b((in )?back of|(in )?front of|[0-9]+(st|nd|rd|th)|first|second|third|fourth|fifth|sixth|seventh|eighth|ninth|tenth|after|back|before|behind|every|front|index|last|middle|some|that|through|thru|where|whose)\b`, NameBuiltin, nil}, | |||
{`"(\\\\|\\"|[^"])*"`, LiteralStringDouble, nil}, | |||
{`\b([a-zA-Z]\w*)\b`, NameVariable, nil}, | |||
{`[-+]?(\d+\.\d*|\d*\.\d+)(E[-+][0-9]+)?`, LiteralNumberFloat, nil}, | |||
{`[-+]?\d+`, LiteralNumberInteger, nil}, | |||
}, | |||
"comment": { | |||
{`\(\*`, CommentMultiline, Push()}, | |||
{`\*\)`, CommentMultiline, Pop(1)}, | |||
{`[^*(]+`, CommentMultiline, nil}, | |||
{`[*(]`, CommentMultiline, nil}, | |||
}, | |||
} | |||
} |
@@ -0,0 +1,114 @@ | |||
package a | |||
import ( | |||
. "github.com/alecthomas/chroma" // nolint | |||
"github.com/alecthomas/chroma/lexers/internal" | |||
) | |||
// Arduino lexer. | |||
var Arduino = internal.Register(MustNewLazyLexer( | |||
&Config{ | |||
Name: "Arduino", | |||
Aliases: []string{"arduino"}, | |||
Filenames: []string{"*.ino"}, | |||
MimeTypes: []string{"text/x-arduino"}, | |||
EnsureNL: true, | |||
}, | |||
arduinoRules, | |||
)) | |||
func arduinoRules() Rules { | |||
return Rules{ | |||
"statements": { | |||
{Words(``, `\b`, `catch`, `const_cast`, `delete`, `dynamic_cast`, `explicit`, `export`, `friend`, `mutable`, `namespace`, `new`, `operator`, `private`, `protected`, `public`, `reinterpret_cast`, `restrict`, `static_cast`, `template`, `this`, `throw`, `throws`, `try`, `typeid`, `typename`, `using`, `virtual`, `constexpr`, `nullptr`, `decltype`, `thread_local`, `alignas`, `alignof`, `static_assert`, `noexcept`, `override`, `final`), Keyword, nil}, | |||
{`char(16_t|32_t)\b`, KeywordType, nil}, | |||
{`(class)\b`, ByGroups(Keyword, Text), Push("classname")}, | |||
{`(R)(")([^\\()\s]{,16})(\()((?:.|\n)*?)(\)\3)(")`, ByGroups(LiteralStringAffix, LiteralString, LiteralStringDelimiter, LiteralStringDelimiter, LiteralString, LiteralStringDelimiter, LiteralString), nil}, | |||
{`(u8|u|U)(")`, ByGroups(LiteralStringAffix, LiteralString), Push("string")}, | |||
{`(L?)(")`, ByGroups(LiteralStringAffix, LiteralString), Push("string")}, | |||
{`(L?)(')(\\.|\\[0-7]{1,3}|\\x[a-fA-F0-9]{1,2}|[^\\\'\n])(')`, ByGroups(LiteralStringAffix, LiteralStringChar, LiteralStringChar, LiteralStringChar), nil}, | |||
{`(\d+\.\d*|\.\d+|\d+)[eE][+-]?\d+[LlUu]*`, LiteralNumberFloat, nil}, | |||
{`(\d+\.\d*|\.\d+|\d+[fF])[fF]?`, LiteralNumberFloat, nil}, | |||
{`0x[0-9a-fA-F]+[LlUu]*`, LiteralNumberHex, nil}, | |||
{`0[0-7]+[LlUu]*`, LiteralNumberOct, nil}, | |||
{`\d+[LlUu]*`, LiteralNumberInteger, nil}, | |||
{`\*/`, Error, nil}, | |||
{`[~!%^&*+=|?:<>/-]`, Operator, nil}, | |||
{`[()\[\],.]`, Punctuation, nil}, | |||
{Words(``, `\b`, `asm`, `auto`, `break`, `case`, `const`, `continue`, `default`, `do`, `else`, `enum`, `extern`, `for`, `goto`, `if`, `register`, `restricted`, `return`, `sizeof`, `static`, `struct`, `switch`, `typedef`, `union`, `volatile`, `while`), Keyword, nil}, | |||
{`(_Bool|_Complex|_Imaginary|array|atomic_bool|atomic_char|atomic_int|atomic_llong|atomic_long|atomic_schar|atomic_short|atomic_uchar|atomic_uint|atomic_ullong|atomic_ulong|atomic_ushort|auto|bool|boolean|BooleanVariables|Byte|byte|Char|char|char16_t|char32_t|class|complex|Const|const|const_cast|delete|double|dynamic_cast|enum|explicit|extern|Float|float|friend|inline|Int|int|int16_t|int32_t|int64_t|int8_t|Long|long|new|NULL|null|operator|private|PROGMEM|protected|public|register|reinterpret_cast|short|signed|sizeof|Static|static|static_cast|String|struct|typedef|uint16_t|uint32_t|uint64_t|uint8_t|union|unsigned|virtual|Void|void|Volatile|volatile|word)\b`, KeywordType, nil}, | |||
// Start of: Arduino-specific syntax | |||
{`(and|final|If|Loop|loop|not|or|override|setup|Setup|throw|try|xor)\b`, Keyword, nil}, // Addition to keywords already defined by C++ | |||
{`(ANALOG_MESSAGE|BIN|CHANGE|DEC|DEFAULT|DIGITAL_MESSAGE|EXTERNAL|FALLING|FIRMATA_STRING|HALF_PI|HEX|HIGH|INPUT|INPUT_PULLUP|INTERNAL|INTERNAL1V1|INTERNAL1V1|INTERNAL2V56|INTERNAL2V56|LED_BUILTIN|LED_BUILTIN_RX|LED_BUILTIN_TX|LOW|LSBFIRST|MSBFIRST|OCT|OUTPUT|PI|REPORT_ANALOG|REPORT_DIGITAL|RISING|SET_PIN_MODE|SYSEX_START|SYSTEM_RESET|TWO_PI)\b`, KeywordConstant, nil}, | |||
{`(boolean|const|byte|word|string|String|array)\b`, NameVariable, nil}, | |||
{`(Keyboard|KeyboardController|MouseController|SoftwareSerial|EthernetServer|EthernetClient|LiquidCrystal|RobotControl|GSMVoiceCall|EthernetUDP|EsploraTFT|HttpClient|RobotMotor|WiFiClient|GSMScanner|FileSystem|Scheduler|GSMServer|YunClient|YunServer|IPAddress|GSMClient|GSMModem|Keyboard|Ethernet|Console|GSMBand|Esplora|Stepper|Process|WiFiUDP|GSM_SMS|Mailbox|USBHost|Firmata|PImage|Client|Server|GSMPIN|FileIO|Bridge|Serial|EEPROM|Stream|Mouse|Audio|Servo|File|Task|GPRS|WiFi|Wire|TFT|GSM|SPI|SD)\b`, NameClass, nil}, | |||
{`(abs|Abs|accept|ACos|acos|acosf|addParameter|analogRead|AnalogRead|analogReadResolution|AnalogReadResolution|analogReference|AnalogReference|analogWrite|AnalogWrite|analogWriteResolution|AnalogWriteResolution|answerCall|asin|ASin|asinf|atan|ATan|atan2|ATan2|atan2f|atanf|attach|attached|attachGPRS|attachInterrupt|AttachInterrupt|autoscroll|available|availableForWrite|background|beep|begin|beginPacket|beginSD|beginSMS|beginSpeaker|beginTFT|beginTransmission|beginWrite|bit|Bit|BitClear|bitClear|bitRead|BitRead|bitSet|BitSet|BitWrite|bitWrite|blink|blinkVersion|BSSID|buffer|byte|cbrt|cbrtf|Ceil|ceil|ceilf|changePIN|char|charAt|checkPIN|checkPUK|checkReg|circle|cityNameRead|cityNameWrite|clear|clearScreen|click|close|compareTo|compassRead|concat|config|connect|connected|constrain|Constrain|copysign|copysignf|cos|Cos|cosf|cosh|coshf|countryNameRead|countryNameWrite|createChar|cursor|debugPrint|degrees|Delay|delay|DelayMicroseconds|delayMicroseconds|detach|DetachInterrupt|detachInterrupt|DigitalPinToInterrupt|digitalPinToInterrupt|DigitalRead|digitalRead|DigitalWrite|digitalWrite|disconnect|display|displayLogos|drawBMP|drawCompass|encryptionType|end|endPacket|endSMS|endsWith|endTransmission|endWrite|equals|equalsIgnoreCase|exists|exitValue|Exp|exp|expf|fabs|fabsf|fdim|fdimf|fill|find|findUntil|float|floor|Floor|floorf|flush|fma|fmaf|fmax|fmaxf|fmin|fminf|fmod|fmodf|gatewayIP|get|getAsynchronously|getBand|getButton|getBytes|getCurrentCarrier|getIMEI|getKey|getModifiers|getOemKey|getPINUsed|getResult|getSignalStrength|getSocket|getVoiceCallStatus|getXChange|getYChange|hangCall|height|highByte|HighByte|home|hypot|hypotf|image|indexOf|int|interrupts|IPAddress|IRread|isActionDone|isAlpha|isAlphaNumeric|isAscii|isControl|isDigit|isDirectory|isfinite|isGraph|isHexadecimalDigit|isinf|isListening|isLowerCase|isnan|isPIN|isPressed|isPrintable|isPunct|isSpace|isUpperCase|isValid|isWhitespace|keyboardRead|keyPressed|keyReleased|knobRead|lastIndexOf|ldexp|ldexpf|leftToRight|length|line|lineFollowConfig|listen|listenOnLocalhost|loadImage|localIP|log|Log|log10|log10f|logf|long|lowByte|LowByte|lrint|lrintf|lround|lroundf|macAddress|maintain|map|Map|Max|max|messageAvailable|Micros|micros|millis|Millis|Min|min|mkdir|motorsStop|motorsWrite|mouseDragged|mouseMoved|mousePressed|mouseReleased|move|noAutoscroll|noBlink|noBuffer|noCursor|noDisplay|noFill|noInterrupts|NoInterrupts|noListenOnLocalhost|noStroke|noTone|NoTone|onReceive|onRequest|open|openNextFile|overflow|parseCommand|parseFloat|parseInt|parsePacket|pauseMode|peek|PinMode|pinMode|playFile|playMelody|point|pointTo|position|Pow|pow|powf|prepare|press|print|printFirmwareVersion|println|printVersion|process|processInput|PulseIn|pulseIn|pulseInLong|PulseInLong|put|radians|random|Random|randomSeed|RandomSeed|read|readAccelerometer|readBlue|readButton|readBytes|readBytesUntil|readGreen|readJoystickButton|readJoystickSwitch|readJoystickX|readJoystickY|readLightSensor|readMessage|readMicrophone|readNetworks|readRed|readSlider|readString|readStringUntil|readTemperature|ready|rect|release|releaseAll|remoteIP|remoteNumber|remotePort|remove|replace|requestFrom|retrieveCallingNumber|rewindDirectory|rightToLeft|rmdir|robotNameRead|robotNameWrite|round|roundf|RSSI|run|runAsynchronously|running|runShellCommand|runShellCommandAsynchronously|scanNetworks|scrollDisplayLeft|scrollDisplayRight|seek|sendAnalog|sendDigitalPortPair|sendDigitalPorts|sendString|sendSysex|Serial_Available|Serial_Begin|Serial_End|Serial_Flush|Serial_Peek|Serial_Print|Serial_Println|Serial_Read|serialEvent|setBand|setBitOrder|setCharAt|setClockDivider|setCursor|setDataMode|setDNS|setFirmwareVersion|setMode|setPINUsed|setSpeed|setTextSize|setTimeout|ShiftIn|shiftIn|ShiftOut|shiftOut|shutdown|signbit|sin|Sin|sinf|sinh|sinhf|size|sizeof|Sq|sq|Sqrt|sqrt|sqrtf|SSID|startLoop|startsWith|step|stop|stroke|subnetMask|substring|switchPIN|tan|Tan|tanf|tanh|tanhf|tempoWrite|text|toCharArray|toInt|toLowerCase|tone|Tone|toUpperCase|transfer|trim|trunc|truncf|tuneWrite|turn|updateIR|userNameRead|userNameWrite|voiceCall|waitContinue|width|WiFiServer|word|write|writeBlue|writeGreen|writeJSON|writeMessage|writeMicroseconds|writeRed|writeRGB|yield|Yield)\b`, NameFunction, nil}, | |||
// End of: Arduino-specific syntax | |||
{Words(``, `\b`, `inline`, `_inline`, `__inline`, `naked`, `restrict`, `thread`, `typename`), KeywordReserved, nil}, | |||
{`(__m(128i|128d|128|64))\b`, KeywordReserved, nil}, | |||
{Words(`__`, `\b`, `asm`, `int8`, `based`, `except`, `int16`, `stdcall`, `cdecl`, `fastcall`, `int32`, `declspec`, `finally`, `int64`, `try`, `leave`, `wchar_t`, `w64`, `unaligned`, `raise`, `noop`, `identifier`, `forceinline`, `assume`), KeywordReserved, nil}, | |||
{`(true|false|NULL)\b`, NameBuiltin, nil}, | |||
{`([a-zA-Z_]\w*)(\s*)(:)(?!:)`, ByGroups(NameLabel, Text, Punctuation), nil}, | |||
{`[a-zA-Z_]\w*`, Name, nil}, | |||
}, | |||
"root": { | |||
Include("whitespace"), | |||
{`((?:[\w*\s])+?(?:\s|[*]))([a-zA-Z_]\w*)(\s*\([^;]*?\))([^;{]*)(\{)`, ByGroups(UsingSelf("root"), NameFunction, UsingSelf("root"), UsingSelf("root"), Punctuation), Push("function")}, | |||
{`((?:[\w*\s])+?(?:\s|[*]))([a-zA-Z_]\w*)(\s*\([^;]*?\))([^;]*)(;)`, ByGroups(UsingSelf("root"), NameFunction, UsingSelf("root"), UsingSelf("root"), Punctuation), nil}, | |||
Default(Push("statement")), | |||
{Words(`__`, `\b`, `virtual_inheritance`, `uuidof`, `super`, `single_inheritance`, `multiple_inheritance`, `interface`, `event`), KeywordReserved, nil}, | |||
{`__(offload|blockingoffload|outer)\b`, KeywordPseudo, nil}, | |||
}, | |||
"classname": { | |||
{`[a-zA-Z_]\w*`, NameClass, Pop(1)}, | |||
{`\s*(?=>)`, Text, Pop(1)}, | |||
}, | |||
"whitespace": { | |||
{`^#if\s+0`, CommentPreproc, Push("if0")}, | |||
{`^#`, CommentPreproc, Push("macro")}, | |||
{`^(\s*(?:/[*].*?[*]/\s*)?)(#if\s+0)`, ByGroups(UsingSelf("root"), CommentPreproc), Push("if0")}, | |||
{`^(\s*(?:/[*].*?[*]/\s*)?)(#)`, ByGroups(UsingSelf("root"), CommentPreproc), Push("macro")}, | |||
{`\n`, Text, nil}, | |||
{`\s+`, Text, nil}, | |||
{`\\\n`, Text, nil}, | |||
{`//(\n|[\w\W]*?[^\\]\n)`, CommentSingle, nil}, | |||
{`/(\\\n)?[*][\w\W]*?[*](\\\n)?/`, CommentMultiline, nil}, | |||
{`/(\\\n)?[*][\w\W]*`, CommentMultiline, nil}, | |||
}, | |||
"statement": { | |||
Include("whitespace"), | |||
Include("statements"), | |||
{`[{}]`, Punctuation, nil}, | |||
{`;`, Punctuation, Pop(1)}, | |||
}, | |||
"function": { | |||
Include("whitespace"), | |||
Include("statements"), | |||
{`;`, Punctuation, nil}, | |||
{`\{`, Punctuation, Push()}, | |||
{`\}`, Punctuation, Pop(1)}, | |||
}, | |||
"string": { | |||
{`"`, LiteralString, Pop(1)}, | |||
{`\\([\\abfnrtv"\']|x[a-fA-F0-9]{2,4}|u[a-fA-F0-9]{4}|U[a-fA-F0-9]{8}|[0-7]{1,3})`, LiteralStringEscape, nil}, | |||
{`[^\\"\n]+`, LiteralString, nil}, | |||
{`\\\n`, LiteralString, nil}, | |||
{`\\`, LiteralString, nil}, | |||
}, | |||
"macro": { | |||
{`(include)(\s*(?:/[*].*?[*]/\s*)?)([^\n]+)`, ByGroups(CommentPreproc, Text, CommentPreprocFile), nil}, | |||
{`[^/\n]+`, CommentPreproc, nil}, | |||
{`/[*](.|\n)*?[*]/`, CommentMultiline, nil}, | |||
{`//.*?\n`, CommentSingle, Pop(1)}, | |||
{`/`, CommentPreproc, nil}, | |||
{`(?<=\\)\n`, CommentPreproc, nil}, | |||
{`\n`, CommentPreproc, Pop(1)}, | |||
}, | |||
"if0": { | |||
{`^\s*#if.*?(?<!\\)\n`, CommentPreproc, Push()}, | |||
{`^\s*#el(?:se|if).*\n`, CommentPreproc, Pop(1)}, | |||
{`^\s*#endif.*?(?<!\\)\n`, CommentPreproc, Pop(1)}, | |||
{`.*?\n`, Comment, nil}, | |||
}, | |||
} | |||
} |
@@ -0,0 +1,72 @@ | |||
package a | |||
import ( | |||
. "github.com/alecthomas/chroma" // nolint | |||
"github.com/alecthomas/chroma/lexers/internal" | |||
) | |||
var ArmAsm = internal.Register(MustNewLazyLexer( | |||
&Config{ | |||
Name: "ArmAsm", | |||
Aliases: []string{"armasm"}, | |||
EnsureNL: true, | |||
Filenames: []string{"*.s", "*.S"}, | |||
MimeTypes: []string{"text/x-armasm", "text/x-asm"}, | |||
}, | |||
armasmRules, | |||
)) | |||
func armasmRules() Rules { | |||
return Rules{ | |||
"commentsandwhitespace": { | |||
{`\s+`, Text, nil}, | |||
{`[@;].*?\n`, CommentSingle, nil}, | |||
{`/\*.*?\*/`, CommentMultiline, nil}, | |||
}, | |||
"literal": { | |||
// Binary | |||
{`0b[01]+`, NumberBin, Pop(1)}, | |||
// Hex | |||
{`0x\w{1,8}`, NumberHex, Pop(1)}, | |||
// Octal | |||
{`0\d+`, NumberOct, Pop(1)}, | |||
// Float | |||
{`\d+?\.\d+?`, NumberFloat, Pop(1)}, | |||
// Integer | |||
{`\d+`, NumberInteger, Pop(1)}, | |||
// String | |||
{`(")(.+)(")`, ByGroups(Punctuation, StringDouble, Punctuation), Pop(1)}, | |||
// Char | |||
{`(')(.{1}|\\.{1})(')`, ByGroups(Punctuation, StringChar, Punctuation), Pop(1)}, | |||
}, | |||
"opcode": { | |||
// Escape at line end | |||
{`\n`, Text, Pop(1)}, | |||
// Comment | |||
{`(@|;).*\n`, CommentSingle, Pop(1)}, | |||
// Whitespace | |||
{`(\s+|,)`, Text, nil}, | |||
// Register by number | |||
{`[rapcfxwbhsdqv]\d{1,2}`, NameClass, nil}, | |||
// Address by hex | |||
{`=0x\w+`, ByGroups(Text, NameLabel), nil}, | |||
// Pseudo address by label | |||
{`(=)(\w+)`, ByGroups(Text, NameLabel), nil}, | |||
// Immediate | |||
{`#`, Text, Push("literal")}, | |||
}, | |||
"root": { | |||
Include("commentsandwhitespace"), | |||
// Directive with optional param | |||
{`(\.\w+)([ \t]+\w+\s+?)?`, ByGroups(KeywordNamespace, NameLabel), nil}, | |||
// Label with data | |||
{`(\w+)(:)(\s+\.\w+\s+)`, ByGroups(NameLabel, Punctuation, KeywordNamespace), Push("literal")}, | |||
// Label | |||
{`(\w+)(:)`, ByGroups(NameLabel, Punctuation), nil}, | |||
// Syscall Op | |||
{`svc\s+\w+`, NameNamespace, nil}, | |||
// Opcode | |||
{`[a-zA-Z]+`, Text, Push("opcode")}, | |||
}, | |||
} | |||
} |
@@ -0,0 +1,52 @@ | |||
package a | |||
import ( | |||
. "github.com/alecthomas/chroma" // nolint | |||
"github.com/alecthomas/chroma/lexers/internal" | |||
) | |||
// Awk lexer. | |||
var Awk = internal.Register(MustNewLazyLexer( | |||
&Config{ | |||
Name: "Awk", | |||
Aliases: []string{"awk", "gawk", "mawk", "nawk"}, | |||
Filenames: []string{"*.awk"}, | |||
MimeTypes: []string{"application/x-awk"}, | |||
}, | |||
awkRules, | |||
)) | |||
func awkRules() Rules { | |||
return Rules{ | |||
"commentsandwhitespace": { | |||
{`\s+`, Text, nil}, | |||
{`#.*$`, CommentSingle, nil}, | |||
}, | |||
"slashstartsregex": { | |||
Include("commentsandwhitespace"), | |||
{`/(\\.|[^[/\\\n]|\[(\\.|[^\]\\\n])*])+/\B`, LiteralStringRegex, Pop(1)}, | |||
{`(?=/)`, Text, Push("#pop", "badregex")}, | |||
Default(Pop(1)), | |||
}, | |||
"badregex": { | |||
{`\n`, Text, Pop(1)}, | |||
}, | |||
"root": { | |||
{`^(?=\s|/)`, Text, Push("slashstartsregex")}, | |||
Include("commentsandwhitespace"), | |||
{`\+\+|--|\|\||&&|in\b|\$|!?~|\|&|(\*\*|[-<>+*%\^/!=|])=?`, Operator, Push("slashstartsregex")}, | |||
{`[{(\[;,]`, Punctuation, Push("slashstartsregex")}, | |||
{`[})\].]`, Punctuation, nil}, | |||
{`(break|continue|do|while|exit|for|if|else|return|switch|case|default)\b`, Keyword, Push("slashstartsregex")}, | |||
{`function\b`, KeywordDeclaration, Push("slashstartsregex")}, | |||
{`(atan2|cos|exp|int|log|rand|sin|sqrt|srand|gensub|gsub|index|length|match|split|patsplit|sprintf|sub|substr|tolower|toupper|close|fflush|getline|next(file)|print|printf|strftime|systime|mktime|delete|system|strtonum|and|compl|lshift|or|rshift|asorti?|isarray|bindtextdomain|dcn?gettext|@(include|load|namespace))\b`, KeywordReserved, nil}, | |||
{`(ARGC|ARGIND|ARGV|BEGIN(FILE)?|BINMODE|CONVFMT|ENVIRON|END(FILE)?|ERRNO|FIELDWIDTHS|FILENAME|FNR|FPAT|FS|IGNORECASE|LINT|NF|NR|OFMT|OFS|ORS|PROCINFO|RLENGTH|RS|RSTART|RT|SUBSEP|TEXTDOMAIN)\b`, NameBuiltin, nil}, | |||
{`[@$a-zA-Z_]\w*`, NameOther, nil}, | |||
{`[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?`, LiteralNumberFloat, nil}, | |||
{`0x[0-9a-fA-F]+`, LiteralNumberHex, nil}, | |||
{`[0-9]+`, LiteralNumberInteger, nil}, | |||
{`"(\\\\|\\"|[^"])*"`, LiteralStringDouble, nil}, | |||
{`'(\\\\|\\'|[^'])*'`, LiteralStringSingle, nil}, | |||
}, | |||
} | |||
} |
@@ -0,0 +1,50 @@ | |||
package b | |||
import ( | |||
. "github.com/alecthomas/chroma" // nolint | |||
"github.com/alecthomas/chroma/lexers/internal" | |||
) | |||
// Ballerina lexer. | |||
var Ballerina = internal.Register(MustNewLazyLexer( | |||
&Config{ | |||
Name: "Ballerina", | |||
Aliases: []string{"ballerina"}, | |||
Filenames: []string{"*.bal"}, | |||
MimeTypes: []string{"text/x-ballerina"}, | |||
DotAll: true, | |||
}, | |||
ballerinaRules, | |||
)) | |||
func ballerinaRules() Rules { | |||
return Rules{ | |||
"root": { | |||
{`[^\S\n]+`, Text, nil}, | |||
{`//.*?\n`, CommentSingle, nil}, | |||
{`/\*.*?\*/`, CommentMultiline, nil}, | |||
{`(break|catch|continue|done|else|finally|foreach|forever|fork|if|lock|match|return|throw|transaction|try|while)\b`, Keyword, nil}, | |||
{`((?:(?:[^\W\d]|\$)[\w.\[\]$<>]*\s+)+?)((?:[^\W\d]|\$)[\w$]*)(\s*)(\()`, ByGroups(UsingSelf("root"), NameFunction, Text, Operator), nil}, | |||
{`@[^\W\d][\w.]*`, NameDecorator, nil}, | |||
{`(annotation|bind|but|endpoint|error|function|object|private|public|returns|service|type|var|with|worker)\b`, KeywordDeclaration, nil}, | |||
{`(boolean|byte|decimal|float|int|json|map|nil|record|string|table|xml)\b`, KeywordType, nil}, | |||
{`(true|false|null)\b`, KeywordConstant, nil}, | |||
{`(import)(\s+)`, ByGroups(KeywordNamespace, Text), Push("import")}, | |||
{`"(\\\\|\\"|[^"])*"`, LiteralString, nil}, | |||
{`'\\.'|'[^\\]'|'\\u[0-9a-fA-F]{4}'`, LiteralStringChar, nil}, | |||
{`(\.)((?:[^\W\d]|\$)[\w$]*)`, ByGroups(Operator, NameAttribute), nil}, | |||
{`^\s*([^\W\d]|\$)[\w$]*:`, NameLabel, nil}, | |||
{`([^\W\d]|\$)[\w$]*`, Name, nil}, | |||
{`([0-9][0-9_]*\.([0-9][0-9_]*)?|\.[0-9][0-9_]*)([eE][+\-]?[0-9][0-9_]*)?[fFdD]?|[0-9][eE][+\-]?[0-9][0-9_]*[fFdD]?|[0-9]([eE][+\-]?[0-9][0-9_]*)?[fFdD]|0[xX]([0-9a-fA-F][0-9a-fA-F_]*\.?|([0-9a-fA-F][0-9a-fA-F_]*)?\.[0-9a-fA-F][0-9a-fA-F_]*)[pP][+\-]?[0-9][0-9_]*[fFdD]?`, LiteralNumberFloat, nil}, | |||
{`0[xX][0-9a-fA-F][0-9a-fA-F_]*[lL]?`, LiteralNumberHex, nil}, | |||
{`0[bB][01][01_]*[lL]?`, LiteralNumberBin, nil}, | |||
{`0[0-7_]+[lL]?`, LiteralNumberOct, nil}, | |||
{`0|[1-9][0-9_]*[lL]?`, LiteralNumberInteger, nil}, | |||
{`[~^*!%&\[\](){}<>|+=:;,./?-]`, Operator, nil}, | |||
{`\n`, Text, nil}, | |||
}, | |||
"import": { | |||
{`[\w.]+`, NameNamespace, Pop(1)}, | |||
}, | |||
} | |||
} |
@@ -0,0 +1,100 @@ | |||
package b | |||
import ( | |||
"regexp" | |||
. "github.com/alecthomas/chroma" // nolint | |||
"github.com/alecthomas/chroma/lexers/internal" | |||
) | |||
// TODO(moorereason): can this be factored away? | |||
var bashAnalyserRe = regexp.MustCompile(`(?m)^#!.*/bin/(?:env |)(?:bash|zsh|sh|ksh)`) | |||
// Bash lexer. | |||
var Bash = internal.Register(MustNewLazyLexer( | |||
&Config{ | |||
Name: "Bash", | |||
Aliases: []string{"bash", "sh", "ksh", "zsh", "shell"}, | |||
Filenames: []string{"*.sh", "*.ksh", "*.bash", "*.ebuild", "*.eclass", ".env", "*.env", "*.exheres-0", "*.exlib", "*.zsh", "*.zshrc", ".bashrc", "bashrc", ".bash_*", "bash_*", "zshrc", ".zshrc", "PKGBUILD"}, | |||
MimeTypes: []string{"application/x-sh", "application/x-shellscript"}, | |||
}, | |||
bashRules, | |||
).SetAnalyser(func(text string) float32 { | |||
if bashAnalyserRe.FindString(text) != "" { | |||
return 1.0 | |||
} | |||
return 0.0 | |||
})) | |||
func bashRules() Rules { | |||
return Rules{ | |||
"root": { | |||
Include("basic"), | |||
{"`", LiteralStringBacktick, Push("backticks")}, | |||
Include("data"), | |||
Include("interp"), | |||
}, | |||
"interp": { | |||
{`\$\(\(`, Keyword, Push("math")}, | |||
{`\$\(`, Keyword, Push("paren")}, | |||
{`\$\{#?`, LiteralStringInterpol, Push("curly")}, | |||
{`\$[a-zA-Z_]\w*`, NameVariable, nil}, | |||
{`\$(?:\d+|[#$?!_*@-])`, NameVariable, nil}, | |||
{`\$`, Text, nil}, | |||
}, | |||
"basic": { | |||
{`\b(if|fi|else|while|do|done|for|then|return|function|case|select|continue|until|esac|elif)(\s*)\b`, ByGroups(Keyword, Text), nil}, | |||
{"\\b(alias|bg|bind|break|builtin|caller|cd|command|compgen|complete|declare|dirs|disown|echo|enable|eval|exec|exit|export|false|fc|fg|getopts|hash|help|history|jobs|kill|let|local|logout|popd|printf|pushd|pwd|read|readonly|set|shift|shopt|source|suspend|test|time|times|trap|true|type|typeset|ulimit|umask|unalias|unset|wait)(?=[\\s)`])", NameBuiltin, nil}, | |||
{`\A#!.+\n`, CommentPreproc, nil}, | |||
{`#.*(\S|$)`, CommentSingle, nil}, | |||
{`\\[\w\W]`, LiteralStringEscape, nil}, | |||
{`(\b\w+)(\s*)(\+?=)`, ByGroups(NameVariable, Text, Operator), nil}, | |||
{`[\[\]{}()=]`, Operator, nil}, | |||
{`<<<`, Operator, nil}, | |||
{`<<-?\s*(\'?)\\?(\w+)[\w\W]+?\2`, LiteralString, nil}, | |||
{`&&|\|\|`, Operator, nil}, | |||
}, | |||
"data": { | |||
{`(?s)\$?"(\\\\|\\[0-7]+|\\.|[^"\\$])*"`, LiteralStringDouble, nil}, | |||
{`"`, LiteralStringDouble, Push("string")}, | |||
{`(?s)\$'(\\\\|\\[0-7]+|\\.|[^'\\])*'`, LiteralStringSingle, nil}, | |||
{`(?s)'.*?'`, LiteralStringSingle, nil}, | |||
{`;`, Punctuation, nil}, | |||
{`&`, Punctuation, nil}, | |||
{`\|`, Punctuation, nil}, | |||
{`\s+`, Text, nil}, | |||
{`\d+(?= |$)`, LiteralNumber, nil}, | |||
{"[^=\\s\\[\\]{}()$\"\\'`\\\\<&|;]+", Text, nil}, | |||
{`<`, Text, nil}, | |||
}, | |||
"string": { | |||
{`"`, LiteralStringDouble, Pop(1)}, | |||
{`(?s)(\\\\|\\[0-7]+|\\.|[^"\\$])+`, LiteralStringDouble, nil}, | |||
Include("interp"), | |||
}, | |||
"curly": { | |||
{`\}`, LiteralStringInterpol, Pop(1)}, | |||
{`:-`, Keyword, nil}, | |||
{`\w+`, NameVariable, nil}, | |||
{"[^}:\"\\'`$\\\\]+", Punctuation, nil}, | |||
{`:`, Punctuation, nil}, | |||
Include("root"), | |||
}, | |||
"paren": { | |||
{`\)`, Keyword, Pop(1)}, | |||
Include("root"), | |||
}, | |||
"math": { | |||
{`\)\)`, Keyword, Pop(1)}, | |||
{`[-+*/%^|&]|\*\*|\|\|`, Operator, nil}, | |||
{`\d+#\d+`, LiteralNumber, nil}, | |||
{`\d+#(?! )`, LiteralNumber, nil}, | |||
{`\d+`, LiteralNumber, nil}, | |||
Include("root"), | |||
}, | |||
"backticks": { | |||
{"`", LiteralStringBacktick, Pop(1)}, | |||
Include("root"), | |||
}, | |||
} | |||
} |
@@ -0,0 +1,27 @@ | |||
package b | |||
import ( | |||
. "github.com/alecthomas/chroma" // nolint | |||
"github.com/alecthomas/chroma/lexers/internal" | |||
) | |||
// BashSession lexer. | |||
var BashSession = internal.Register(MustNewLazyLexer( | |||
&Config{ | |||
Name: "BashSession", | |||
Aliases: []string{"bash-session", "console", "shell-session"}, | |||
Filenames: []string{".sh-session"}, | |||
MimeTypes: []string{"text/x-sh"}, | |||
EnsureNL: true, | |||
}, | |||
bashsessionRules, | |||
)) | |||
func bashsessionRules() Rules { | |||
return Rules{ | |||
"root": { | |||
{`^((?:\[[^]]+@[^]]+\]\s?)?[#$%>])(\s*)(.*\n?)`, ByGroups(GenericPrompt, Text, Using(Bash)), nil}, | |||
{`^.+\n?`, GenericOutput, nil}, | |||
}, | |||
} | |||
} |
@@ -0,0 +1,198 @@ | |||
package b | |||
import ( | |||
. "github.com/alecthomas/chroma" // nolint | |||
"github.com/alecthomas/chroma/lexers/internal" | |||
) | |||
// Batchfile lexer. | |||
var Batchfile = internal.Register(MustNewLazyLexer( | |||
&Config{ | |||
Name: "Batchfile", | |||
Aliases: []string{"bat", "batch", "dosbatch", "winbatch"}, | |||
Filenames: []string{"*.bat", "*.cmd"}, | |||
MimeTypes: []string{"application/x-dos-batch"}, | |||
CaseInsensitive: true, | |||
}, | |||
batchfileRules, | |||
)) | |||
func batchfileRules() Rules { | |||
return Rules{ | |||
"root": { | |||
{`\)((?=\()|(?=\^?[\t\v\f\r ,;=\xa0]|[&<>|\n\x1a]))(?:(?:[^\n\x1a^]|\^[\n\x1a]?[\w\W])*)`, CommentSingle, nil}, | |||
{`(?=((?:(?<=^[^:])|^[^:]?)[\t\v\f\r ,;=\xa0]*)(:))`, Text, Push("follow")}, | |||
{`(?:(?:(?:\^[\n\x1a])?[\t\v\f\r ,;=\xa0])+)`, UsingSelf("text"), nil}, | |||
Include("redirect"), | |||
{`[\n\x1a]+`, Text, nil}, | |||
{`\(`, Punctuation, Push("root/compound")}, | |||
{`@+`, Punctuation, nil}, | |||
{`((?:for|if|rem)(?:(?=(?:\^[\n\x1a]?)?/)|(?:(?!\^)|(?<=m))(?:(?=\()|(?=\^?[\t\v\f\r ,;=\xa0]|[&<>|\n\x1a]))))((?:(?:(?:\^[\n\x1a])?[\t\v\f\r ,;=\xa0])+)?(?:(?:(?:\^[\n\x1a]?)?[^"\n\x1a&<>|\t\v\f\r ,;=\xa0])+)?(?:\^[\n\x1a]?)?/(?:\^[\n\x1a]?)?\?)`, ByGroups(Keyword, UsingSelf("text")), Push("follow")}, | |||
{`(goto(?=(?:\^[\n\x1a]?)?[\t\v\f\r ,;=\xa0+./:[\\\]]|[\n\x1a&<>|(]))((?:(?:"[^\n\x1a"]*(?:"|(?=[\n\x1a])))|(?:(?:%(?:\*|(?:~[a-z]*(?:\$[^:]+:)?)?\d|[^%:\n\x1a]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^%\n\x1a^]|\^[^%\n\x1a])[^=\n\x1a]*=(?:[^%\n\x1a^]|\^[^%\n\x1a])*)?)?%))|(?:\^?![^!:\n\x1a]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^!\n\x1a^]|\^[^!\n\x1a])[^=\n\x1a]*=(?:[^!\n\x1a^]|\^[^!\n\x1a])*)?)?\^?!))|[^"%\n\x1a&<>|])*(?:\^[\n\x1a]?)?/(?:\^[\n\x1a]?)?\?(?:(?:"[^\n\x1a"]*(?:"|(?=[\n\x1a])))|(?:(?:%(?:\*|(?:~[a-z]*(?:\$[^:]+:)?)?\d|[^%:\n\x1a]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^%\n\x1a^]|\^[^%\n\x1a])[^=\n\x1a]*=(?:[^%\n\x1a^]|\^[^%\n\x1a])*)?)?%))|(?:\^?![^!:\n\x1a]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^!\n\x1a^]|\^[^!\n\x1a])[^=\n\x1a]*=(?:[^!\n\x1a^]|\^[^!\n\x1a])*)?)?\^?!))|[^"%\n\x1a&<>|])*)`, ByGroups(Keyword, UsingSelf("text")), Push("follow")}, | |||
{Words(``, `(?=(?:\^[\n\x1a]?)?[\t\v\f\r ,;=\xa0+./:[\\\]]|[\n\x1a&<>|(])`, `assoc`, `break`, `cd`, `chdir`, `cls`, `color`, `copy`, `date`, `del`, `dir`, `dpath`, `echo`, `endlocal`, `erase`, `exit`, `ftype`, `keys`, `md`, `mkdir`, `mklink`, `move`, `path`, `pause`, `popd`, `prompt`, `pushd`, `rd`, `ren`, `rename`, `rmdir`, `setlocal`, `shift`, `start`, `time`, `title`, `type`, `ver`, `verify`, `vol`), Keyword, Push("follow")}, | |||
{`(call)((?:(?:(?:\^[\n\x1a])?[\t\v\f\r ,;=\xa0])+)?)(:)`, ByGroups(Keyword, UsingSelf("text"), Punctuation), Push("call")}, | |||
{`call(?=(?:\^[\n\x1a]?)?[\t\v\f\r ,;=\xa0+./:[\\\]]|[\n\x1a&<>|(])`, Keyword, nil}, | |||
{`(for(?=\^?[\t\v\f\r ,;=\xa0]|[&<>|\n\x1a])(?!\^))((?:(?:(?:\^[\n\x1a])?[\t\v\f\r ,;=\xa0])+))(/f(?=\^?[\t\v\f\r ,;=\xa0]|[&<>|\n\x1a]))`, ByGroups(Keyword, UsingSelf("text"), Keyword), Push("for/f", "for")}, | |||
{`(for(?=\^?[\t\v\f\r ,;=\xa0]|[&<>|\n\x1a])(?!\^))((?:(?:(?:\^[\n\x1a])?[\t\v\f\r ,;=\xa0])+))(/l(?=\^?[\t\v\f\r ,;=\xa0]|[&<>|\n\x1a]))`, ByGroups(Keyword, UsingSelf("text"), Keyword), Push("for/l", "for")}, | |||
{`for(?=\^?[\t\v\f\r ,;=\xa0]|[&<>|\n\x1a])(?!\^)`, Keyword, Push("for2", "for")}, | |||
{`(goto(?=(?:\^[\n\x1a]?)?[\t\v\f\r ,;=\xa0+./:[\\\]]|[\n\x1a&<>|(]))((?:(?:(?:\^[\n\x1a])?[\t\v\f\r ,;=\xa0])+)?)(:?)`, ByGroups(Keyword, UsingSelf("text"), Punctuation), Push("label")}, | |||
{`(if(?:(?=\()|(?=\^?[\t\v\f\r ,;=\xa0]|[&<>|\n\x1a]))(?!\^))((?:(?:(?:\^[\n\x1a])?[\t\v\f\r ,;=\xa0])+)?)((?:/i(?=\^?[\t\v\f\r ,;=\xa0]|[&<>|\n\x1a]))?)((?:(?:(?:\^[\n\x1a])?[\t\v\f\r ,;=\xa0])+)?)((?:not(?=\^?[\t\v\f\r ,;=\xa0]|[&<>|\n\x1a]))?)((?:(?:(?:\^[\n\x1a])?[\t\v\f\r ,;=\xa0])+)?)`, ByGroups(Keyword, UsingSelf("text"), Keyword, UsingSelf("text"), Keyword, UsingSelf("text")), Push("(?", "if")}, | |||
{`rem(((?=\()|(?=\^?[\t\v\f\r ,;=\xa0]|[&<>|\n\x1a]))(?:(?:(?:\^[\n\x1a])?[\t\v\f\r ,;=\xa0])+)?(?:[&<>|]+|(?:(?:"[^\n\x1a"]*(?:"|(?=[\n\x1a])))|(?:(?:%(?:\*|(?:~[a-z]*(?:\$[^:]+:)?)?\d|[^%:\n\x1a]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^%\n\x1a^]|\^[^%\n\x1a])[^=\n\x1a]*=(?:[^%\n\x1a^]|\^[^%\n\x1a])*)?)?%))|(?:\^?![^!:\n\x1a]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^!\n\x1a^]|\^[^!\n\x1a])[^=\n\x1a]*=(?:[^!\n\x1a^]|\^[^!\n\x1a])*)?)?\^?!))|(?:(?:(?:\^[\n\x1a]?)?[^"\n\x1a&<>|\t\v\f\r ,;=\xa0])+))+)?.*|(?=(?:\^[\n\x1a]?)?[\t\v\f\r ,;=\xa0+./:[\\\]]|[\n\x1a&<>|(])(?:(?:[^\n\x1a^]|\^[\n\x1a]?[\w\W])*))`, CommentSingle, Push("follow")}, | |||
{`(set(?=(?:\^[\n\x1a]?)?[\t\v\f\r ,;=\xa0+./:[\\\]]|[\n\x1a&<>|(]))((?:(?:\^[\n\x1a]?)?[^\S\n])*)(/a)`, ByGroups(Keyword, UsingSelf("text"), Keyword), Push("arithmetic")}, | |||
{`(set(?=(?:\^[\n\x1a]?)?[\t\v\f\r ,;=\xa0+./:[\\\]]|[\n\x1a&<>|(]))((?:(?:\^[\n\x1a]?)?[^\S\n])*)((?:/p)?)((?:(?:\^[\n\x1a]?)?[^\S\n])*)((?:(?:(?:\^[\n\x1a]?)?[^"\n\x1a&<>|^=]|\^[\n\x1a]?[^"=])+)?)((?:(?:\^[\n\x1a]?)?=)?)`, ByGroups(Keyword, UsingSelf("text"), Keyword, UsingSelf("text"), UsingSelf("variable"), Punctuation), Push("follow")}, | |||
Default(Push("follow")), | |||
}, | |||
"follow": { | |||
{`((?:(?<=^[^:])|^[^:]?)[\t\v\f\r ,;=\xa0]*)(:)([\t\v\f\r ,;=\xa0]*)((?:(?:[^\n\x1a&<>|\t\v\f\r ,;=\xa0+:^]|\^[\n\x1a]?[\w\W])*))(.*)`, ByGroups(Text, Punctuation, Text, NameLabel, CommentSingle), nil}, | |||
Include("redirect"), | |||
{`(?=[\n\x1a])`, Text, Pop(1)}, | |||
{`\|\|?|&&?`, Punctuation, Pop(1)}, | |||
Include("text"), | |||
}, | |||
"arithmetic": { | |||
{`0[0-7]+`, LiteralNumberOct, nil}, | |||
{`0x[\da-f]+`, LiteralNumberHex, nil}, | |||
{`\d+`, LiteralNumberInteger, nil}, | |||
{`[(),]+`, Punctuation, nil}, | |||
{`([=+\-*/!~]|%|\^\^)+`, Operator, nil}, | |||
{`((?:"[^\n\x1a"]*(?:"|(?=[\n\x1a])))|(?:(?:%(?:\*|(?:~[a-z]*(?:\$[^:]+:)?)?\d|[^%:\n\x1a]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^%\n\x1a^]|\^[^%\n\x1a])[^=\n\x1a]*=(?:[^%\n\x1a^]|\^[^%\n\x1a])*)?)?%))|(?:\^?![^!:\n\x1a]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^!\n\x1a^]|\^[^!\n\x1a])[^=\n\x1a]*=(?:[^!\n\x1a^]|\^[^!\n\x1a])*)?)?\^?!))|(\^[\n\x1a]?)?[^()=+\-*/!~%^"\n\x1a&<>|\t\v\f\r ,;=\xa0]|\^[\n\x1a\t\v\f\r ,;=\xa0]?[\w\W])+`, UsingSelf("variable"), nil}, | |||
{`(?=[\x00|&])`, Text, Pop(1)}, | |||
Include("follow"), | |||
}, | |||
"call": { | |||
{`(:?)((?:(?:[^\n\x1a&<>|\t\v\f\r ,;=\xa0+:^]|\^[\n\x1a]?[\w\W])*))`, ByGroups(Punctuation, NameLabel), Pop(1)}, | |||
}, | |||
"label": { | |||
{`((?:(?:[^\n\x1a&<>|\t\v\f\r ,;=\xa0+:^]|\^[\n\x1a]?[\w\W])*)?)((?:(?:"[^\n\x1a"]*(?:"|(?=[\n\x1a])))|(?:(?:%(?:\*|(?:~[a-z]*(?:\$[^:]+:)?)?\d|[^%:\n\x1a]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^%\n\x1a^]|\^[^%\n\x1a])[^=\n\x1a]*=(?:[^%\n\x1a^]|\^[^%\n\x1a])*)?)?%))|(?:\^?![^!:\n\x1a]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^!\n\x1a^]|\^[^!\n\x1a])[^=\n\x1a]*=(?:[^!\n\x1a^]|\^[^!\n\x1a])*)?)?\^?!))|\^[\n\x1a]?[\w\W]|[^"%^\n\x1a&<>|])*)`, ByGroups(NameLabel, CommentSingle), Pop(1)}, | |||
}, | |||
"redirect": { | |||
{`((?:(?<=[\n\x1a\t\v\f\r ,;=\xa0])\d)?)(>>?&|<&)([\n\x1a\t\v\f\r ,;=\xa0]*)(\d)`, ByGroups(LiteralNumberInteger, Punctuation, Text, LiteralNumberInteger), nil}, | |||
{`((?:(?<=[\n\x1a\t\v\f\r ,;=\xa0])(?<!\^[\n\x1a])\d)?)(>>?|<)((?:(?:(?:\^[\n\x1a])?[\t\v\f\r ,;=\xa0])+)?(?:[&<>|]+|(?:(?:"[^\n\x1a"]*(?:"|(?=[\n\x1a])))|(?:(?:%(?:\*|(?:~[a-z]*(?:\$[^:]+:)?)?\d|[^%:\n\x1a]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^%\n\x1a^]|\^[^%\n\x1a])[^=\n\x1a]*=(?:[^%\n\x1a^]|\^[^%\n\x1a])*)?)?%))|(?:\^?![^!:\n\x1a]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^!\n\x1a^]|\^[^!\n\x1a])[^=\n\x1a]*=(?:[^!\n\x1a^]|\^[^!\n\x1a])*)?)?\^?!))|(?:(?:(?:\^[\n\x1a]?)?[^"\n\x1a&<>|\t\v\f\r ,;=\xa0])+))+))`, ByGroups(LiteralNumberInteger, Punctuation, UsingSelf("text")), nil}, | |||
}, | |||
"root/compound": { | |||
{`\)`, Punctuation, Pop(1)}, | |||
{`(?=((?:(?<=^[^:])|^[^:]?)[\t\v\f\r ,;=\xa0]*)(:))`, Text, Push("follow/compound")}, | |||
{`(?:(?:(?:\^[\n\x1a])?[\t\v\f\r ,;=\xa0])+)`, UsingSelf("text"), nil}, | |||
Include("redirect/compound"), | |||
{`[\n\x1a]+`, Text, nil}, | |||
{`\(`, Punctuation, Push("root/compound")}, | |||
{`@+`, Punctuation, nil}, | |||
{`((?:for|if|rem)(?:(?=(?:\^[\n\x1a]?)?/)|(?:(?!\^)|(?<=m))(?:(?=\()|(?:(?=\))|(?=\^?[\t\v\f\r ,;=\xa0]|[&<>|\n\x1a])))))((?:(?:(?:\^[\n\x1a])?[\t\v\f\r ,;=\xa0])+)?(?:(?:(?:\^[\n\x1a]?)?[^"\n\x1a&<>|\t\v\f\r ,;=\xa0)])+)?(?:\^[\n\x1a]?)?/(?:\^[\n\x1a]?)?\?)`, ByGroups(Keyword, UsingSelf("text")), Push("follow/compound")}, | |||
{`(goto(?:(?=\))|(?=(?:\^[\n\x1a]?)?[\t\v\f\r ,;=\xa0+./:[\\\]]|[\n\x1a&<>|(])))((?:(?:"[^\n\x1a"]*(?:"|(?=[\n\x1a])))|(?:(?:%(?:\*|(?:~[a-z]*(?:\$[^:]+:)?)?\d|[^%:\n\x1a]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^%\n\x1a^]|\^[^%\n\x1a])[^=\n\x1a]*=(?:[^%\n\x1a^]|\^[^%\n\x1a])*)?)?%))|(?:\^?![^!:\n\x1a]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^!\n\x1a^]|\^[^!\n\x1a])[^=\n\x1a]*=(?:[^!\n\x1a^]|\^[^!\n\x1a])*)?)?\^?!))|[^"%\n\x1a&<>|)])*(?:\^[\n\x1a]?)?/(?:\^[\n\x1a]?)?\?(?:(?:"[^\n\x1a"]*(?:"|(?=[\n\x1a])))|(?:(?:%(?:\*|(?:~[a-z]*(?:\$[^:]+:)?)?\d|[^%:\n\x1a]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^%\n\x1a^]|\^[^%\n\x1a])[^=\n\x1a]*=(?:[^%\n\x1a^]|\^[^%\n\x1a])*)?)?%))|(?:\^?![^!:\n\x1a]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^!\n\x1a^]|\^[^!\n\x1a])[^=\n\x1a]*=(?:[^!\n\x1a^]|\^[^!\n\x1a])*)?)?\^?!))|[^"%\n\x1a&<>|)])*)`, ByGroups(Keyword, UsingSelf("text")), Push("follow/compound")}, | |||
{Words(``, `(?:(?=\))|(?=(?:\^[\n\x1a]?)?[\t\v\f\r ,;=\xa0+./:[\\\]]|[\n\x1a&<>|(]))`, `assoc`, `break`, `cd`, `chdir`, `cls`, `color`, `copy`, `date`, `del`, `dir`, `dpath`, `echo`, `endlocal`, `erase`, `exit`, `ftype`, `keys`, `md`, `mkdir`, `mklink`, `move`, `path`, `pause`, `popd`, `prompt`, `pushd`, `rd`, `ren`, `rename`, `rmdir`, `setlocal`, `shift`, `start`, `time`, `title`, `type`, `ver`, `verify`, `vol`), Keyword, Push("follow/compound")}, | |||
{`(call)((?:(?:(?:\^[\n\x1a])?[\t\v\f\r ,;=\xa0])+)?)(:)`, ByGroups(Keyword, UsingSelf("text"), Punctuation), Push("call/compound")}, | |||
{`call(?:(?=\))|(?=(?:\^[\n\x1a]?)?[\t\v\f\r ,;=\xa0+./:[\\\]]|[\n\x1a&<>|(]))`, Keyword, nil}, | |||
{`(for(?:(?=\))|(?=\^?[\t\v\f\r ,;=\xa0]|[&<>|\n\x1a]))(?!\^))((?:(?:(?:\^[\n\x1a])?[\t\v\f\r ,;=\xa0])+))(/f(?:(?=\))|(?=\^?[\t\v\f\r ,;=\xa0]|[&<>|\n\x1a])))`, ByGroups(Keyword, UsingSelf("text"), Keyword), Push("for/f", "for")}, | |||
{`(for(?:(?=\))|(?=\^?[\t\v\f\r ,;=\xa0]|[&<>|\n\x1a]))(?!\^))((?:(?:(?:\^[\n\x1a])?[\t\v\f\r ,;=\xa0])+))(/l(?:(?=\))|(?=\^?[\t\v\f\r ,;=\xa0]|[&<>|\n\x1a])))`, ByGroups(Keyword, UsingSelf("text"), Keyword), Push("for/l", "for")}, | |||
{`for(?:(?=\))|(?=\^?[\t\v\f\r ,;=\xa0]|[&<>|\n\x1a]))(?!\^)`, Keyword, Push("for2", "for")}, | |||
{`(goto(?:(?=\))|(?=(?:\^[\n\x1a]?)?[\t\v\f\r ,;=\xa0+./:[\\\]]|[\n\x1a&<>|(])))((?:(?:(?:\^[\n\x1a])?[\t\v\f\r ,;=\xa0])+)?)(:?)`, ByGroups(Keyword, UsingSelf("text"), Punctuation), Push("label/compound")}, | |||
{`(if(?:(?=\()|(?:(?=\))|(?=\^?[\t\v\f\r ,;=\xa0]|[&<>|\n\x1a])))(?!\^))((?:(?:(?:\^[\n\x1a])?[\t\v\f\r ,;=\xa0])+)?)((?:/i(?:(?=\))|(?=\^?[\t\v\f\r ,;=\xa0]|[&<>|\n\x1a])))?)((?:(?:(?:\^[\n\x1a])?[\t\v\f\r ,;=\xa0])+)?)((?:not(?:(?=\))|(?=\^?[\t\v\f\r ,;=\xa0]|[&<>|\n\x1a])))?)((?:(?:(?:\^[\n\x1a])?[\t\v\f\r ,;=\xa0])+)?)`, ByGroups(Keyword, UsingSelf("text"), Keyword, UsingSelf("text"), Keyword, UsingSelf("text")), Push("(?", "if")}, | |||
{`rem(((?=\()|(?:(?=\))|(?=\^?[\t\v\f\r ,;=\xa0]|[&<>|\n\x1a])))(?:(?:(?:\^[\n\x1a])?[\t\v\f\r ,;=\xa0])+)?(?:[&<>|]+|(?:(?:"[^\n\x1a"]*(?:"|(?=[\n\x1a])))|(?:(?:%(?:\*|(?:~[a-z]*(?:\$[^:]+:)?)?\d|[^%:\n\x1a]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^%\n\x1a^]|\^[^%\n\x1a])[^=\n\x1a]*=(?:[^%\n\x1a^]|\^[^%\n\x1a])*)?)?%))|(?:\^?![^!:\n\x1a]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^!\n\x1a^]|\^[^!\n\x1a])[^=\n\x1a]*=(?:[^!\n\x1a^]|\^[^!\n\x1a])*)?)?\^?!))|(?:(?:(?:\^[\n\x1a]?)?[^"\n\x1a&<>|\t\v\f\r ,;=\xa0])+))+)?.*|(?:(?=\))|(?=(?:\^[\n\x1a]?)?[\t\v\f\r ,;=\xa0+./:[\\\]]|[\n\x1a&<>|(]))(?:(?:[^\n\x1a^)]|\^[\n\x1a]?[^)])*))`, CommentSingle, Push("follow/compound")}, | |||
{`(set(?:(?=\))|(?=(?:\^[\n\x1a]?)?[\t\v\f\r ,;=\xa0+./:[\\\]]|[\n\x1a&<>|(])))((?:(?:\^[\n\x1a]?)?[^\S\n])*)(/a)`, ByGroups(Keyword, UsingSelf("text"), Keyword), Push("arithmetic/compound")}, | |||
{`(set(?:(?=\))|(?=(?:\^[\n\x1a]?)?[\t\v\f\r ,;=\xa0+./:[\\\]]|[\n\x1a&<>|(])))((?:(?:\^[\n\x1a]?)?[^\S\n])*)((?:/p)?)((?:(?:\^[\n\x1a]?)?[^\S\n])*)((?:(?:(?:\^[\n\x1a]?)?[^"\n\x1a&<>|^=)]|\^[\n\x1a]?[^"=])+)?)((?:(?:\^[\n\x1a]?)?=)?)`, ByGroups(Keyword, UsingSelf("text"), Keyword, UsingSelf("text"), UsingSelf("variable"), Punctuation), Push("follow/compound")}, | |||
Default(Push("follow/compound")), | |||
}, | |||
"follow/compound": { | |||
{`(?=\))`, Text, Pop(1)}, | |||
{`((?:(?<=^[^:])|^[^:]?)[\t\v\f\r ,;=\xa0]*)(:)([\t\v\f\r ,;=\xa0]*)((?:(?:[^\n\x1a&<>|\t\v\f\r ,;=\xa0+:^)]|\^[\n\x1a]?[^)])*))(.*)`, ByGroups(Text, Punctuation, Text, NameLabel, CommentSingle), nil}, | |||
Include("redirect/compound"), | |||
{`(?=[\n\x1a])`, Text, Pop(1)}, | |||
{`\|\|?|&&?`, Punctuation, Pop(1)}, | |||
Include("text"), | |||
}, | |||
"arithmetic/compound": { | |||
{`(?=\))`, Text, Pop(1)}, | |||
{`0[0-7]+`, LiteralNumberOct, nil}, | |||
{`0x[\da-f]+`, LiteralNumberHex, nil}, | |||
{`\d+`, LiteralNumberInteger, nil}, | |||
{`[(),]+`, Punctuation, nil}, | |||
{`([=+\-*/!~]|%|\^\^)+`, Operator, nil}, | |||
{`((?:"[^\n\x1a"]*(?:"|(?=[\n\x1a])))|(?:(?:%(?:\*|(?:~[a-z]*(?:\$[^:]+:)?)?\d|[^%:\n\x1a]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^%\n\x1a^]|\^[^%\n\x1a])[^=\n\x1a]*=(?:[^%\n\x1a^]|\^[^%\n\x1a])*)?)?%))|(?:\^?![^!:\n\x1a]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^!\n\x1a^]|\^[^!\n\x1a])[^=\n\x1a]*=(?:[^!\n\x1a^]|\^[^!\n\x1a])*)?)?\^?!))|(\^[\n\x1a]?)?[^()=+\-*/!~%^"\n\x1a&<>|\t\v\f\r ,;=\xa0]|\^[\n\x1a\t\v\f\r ,;=\xa0]?[^)])+`, UsingSelf("variable"), nil}, | |||
{`(?=[\x00|&])`, Text, Pop(1)}, | |||
Include("follow"), | |||
}, | |||
"call/compound": { | |||
{`(?=\))`, Text, Pop(1)}, | |||
{`(:?)((?:(?:[^\n\x1a&<>|\t\v\f\r ,;=\xa0+:^)]|\^[\n\x1a]?[^)])*))`, ByGroups(Punctuation, NameLabel), Pop(1)}, | |||
}, | |||
"label/compound": { | |||
{`(?=\))`, Text, Pop(1)}, | |||
{`((?:(?:[^\n\x1a&<>|\t\v\f\r ,;=\xa0+:^)]|\^[\n\x1a]?[^)])*)?)((?:(?:"[^\n\x1a"]*(?:"|(?=[\n\x1a])))|(?:(?:%(?:\*|(?:~[a-z]*(?:\$[^:]+:)?)?\d|[^%:\n\x1a]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^%\n\x1a^]|\^[^%\n\x1a])[^=\n\x1a]*=(?:[^%\n\x1a^]|\^[^%\n\x1a])*)?)?%))|(?:\^?![^!:\n\x1a]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^!\n\x1a^]|\^[^!\n\x1a])[^=\n\x1a]*=(?:[^!\n\x1a^]|\^[^!\n\x1a])*)?)?\^?!))|\^[\n\x1a]?[^)]|[^"%^\n\x1a&<>|)])*)`, ByGroups(NameLabel, CommentSingle), Pop(1)}, | |||
}, | |||
"redirect/compound": { | |||
{`((?:(?<=[\n\x1a\t\v\f\r ,;=\xa0])\d)?)(>>?&|<&)([\n\x1a\t\v\f\r ,;=\xa0]*)(\d)`, ByGroups(LiteralNumberInteger, Punctuation, Text, LiteralNumberInteger), nil}, | |||
{`((?:(?<=[\n\x1a\t\v\f\r ,;=\xa0])(?<!\^[\n\x1a])\d)?)(>>?|<)((?:(?:(?:\^[\n\x1a])?[\t\v\f\r ,;=\xa0])+)?(?:[&<>|]+|(?:(?:"[^\n\x1a"]*(?:"|(?=[\n\x1a])))|(?:(?:%(?:\*|(?:~[a-z]*(?:\$[^:]+:)?)?\d|[^%:\n\x1a]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^%\n\x1a^]|\^[^%\n\x1a])[^=\n\x1a]*=(?:[^%\n\x1a^]|\^[^%\n\x1a])*)?)?%))|(?:\^?![^!:\n\x1a]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^!\n\x1a^]|\^[^!\n\x1a])[^=\n\x1a]*=(?:[^!\n\x1a^]|\^[^!\n\x1a])*)?)?\^?!))|(?:(?:(?:\^[\n\x1a]?)?[^"\n\x1a&<>|\t\v\f\r ,;=\xa0)])+))+))`, ByGroups(LiteralNumberInteger, Punctuation, UsingSelf("text")), nil}, | |||
}, | |||
"variable-or-escape": { | |||
{`(?:(?:%(?:\*|(?:~[a-z]*(?:\$[^:]+:)?)?\d|[^%:\n\x1a]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^%\n\x1a^]|\^[^%\n\x1a])[^=\n\x1a]*=(?:[^%\n\x1a^]|\^[^%\n\x1a])*)?)?%))|(?:\^?![^!:\n\x1a]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^!\n\x1a^]|\^[^!\n\x1a])[^=\n\x1a]*=(?:[^!\n\x1a^]|\^[^!\n\x1a])*)?)?\^?!))`, NameVariable, nil}, | |||
{`%%|\^[\n\x1a]?(\^!|[\w\W])`, LiteralStringEscape, nil}, | |||
}, | |||
"string": { | |||
{`"`, LiteralStringDouble, Pop(1)}, | |||
{`(?:(?:%(?:\*|(?:~[a-z]*(?:\$[^:]+:)?)?\d|[^%:\n\x1a]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^%\n\x1a^]|\^[^%\n\x1a])[^=\n\x1a]*=(?:[^%\n\x1a^]|\^[^%\n\x1a])*)?)?%))|(?:\^?![^!:\n\x1a]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^!\n\x1a^]|\^[^!\n\x1a])[^=\n\x1a]*=(?:[^!\n\x1a^]|\^[^!\n\x1a])*)?)?\^?!))`, NameVariable, nil}, | |||
{`\^!|%%`, LiteralStringEscape, nil}, | |||
{`[^"%^\n\x1a]+|[%^]`, LiteralStringDouble, nil}, | |||
Default(Pop(1)), | |||
}, | |||
"sqstring": { | |||
Include("variable-or-escape"), | |||
{`[^%]+|%`, LiteralStringSingle, nil}, | |||
}, | |||
"bqstring": { | |||
Include("variable-or-escape"), | |||
{`[^%]+|%`, LiteralStringBacktick, nil}, | |||
}, | |||
"text": { | |||
{`"`, LiteralStringDouble, Push("string")}, | |||
Include("variable-or-escape"), | |||
{`[^"%^\n\x1a&<>|\t\v\f\r ,;=\xa0\d)]+|.`, Text, nil}, | |||
}, | |||
"variable": { | |||
{`"`, LiteralStringDouble, Push("string")}, | |||
Include("variable-or-escape"), | |||
{`[^"%^\n\x1a]+|.`, NameVariable, nil}, | |||
}, | |||
"for": { | |||
{`((?:(?:(?:\^[\n\x1a])?[\t\v\f\r ,;=\xa0])+))(in)((?:(?:(?:\^[\n\x1a])?[\t\v\f\r ,;=\xa0])+))(\()`, ByGroups(UsingSelf("text"), Keyword, UsingSelf("text"), Punctuation), Pop(1)}, | |||
Include("follow"), | |||
}, | |||
"for2": { | |||
{`\)`, Punctuation, nil}, | |||
{`((?:(?:(?:\^[\n\x1a])?[\t\v\f\r ,;=\xa0])+))(do(?=\^?[\t\v\f\r ,;=\xa0]|[&<>|\n\x1a]))`, ByGroups(UsingSelf("text"), Keyword), Pop(1)}, | |||
{`[\n\x1a]+`, Text, nil}, | |||
Include("follow"), | |||
}, | |||
"for/f": { | |||
{`(")((?:(?:(?:%(?:\*|(?:~[a-z]*(?:\$[^:]+:)?)?\d|[^%:\n\x1a]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^%\n\x1a^]|\^[^%\n\x1a])[^=\n\x1a]*=(?:[^%\n\x1a^]|\^[^%\n\x1a])*)?)?%))|(?:\^?![^!:\n\x1a]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^!\n\x1a^]|\^[^!\n\x1a])[^=\n\x1a]*=(?:[^!\n\x1a^]|\^[^!\n\x1a])*)?)?\^?!))|[^"])*?")([\n\x1a\t\v\f\r ,;=\xa0]*)(\))`, ByGroups(LiteralStringDouble, UsingSelf("string"), Text, Punctuation), nil}, | |||
{`"`, LiteralStringDouble, Push("#pop", "for2", "string")}, | |||
{`('(?:%%|(?:(?:%(?:\*|(?:~[a-z]*(?:\$[^:]+:)?)?\d|[^%:\n\x1a]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^%\n\x1a^]|\^[^%\n\x1a])[^=\n\x1a]*=(?:[^%\n\x1a^]|\^[^%\n\x1a])*)?)?%))|(?:\^?![^!:\n\x1a]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^!\n\x1a^]|\^[^!\n\x1a])[^=\n\x1a]*=(?:[^!\n\x1a^]|\^[^!\n\x1a])*)?)?\^?!))|[\w\W])*?')([\n\x1a\t\v\f\r ,;=\xa0]*)(\))`, ByGroups(UsingSelf("sqstring"), Text, Punctuation), nil}, | |||
{"(`(?:%%|(?:(?:%(?:\\*|(?:~[a-z]*(?:\\$[^:]+:)?)?\\d|[^%:\\n\\x1a]+(?::(?:~(?:-?\\d+)?(?:,(?:-?\\d+)?)?|(?:[^%\\n\\x1a^]|\\^[^%\\n\\x1a])[^=\\n\\x1a]*=(?:[^%\\n\\x1a^]|\\^[^%\\n\\x1a])*)?)?%))|(?:\\^?![^!:\\n\\x1a]+(?::(?:~(?:-?\\d+)?(?:,(?:-?\\d+)?)?|(?:[^!\\n\\x1a^]|\\^[^!\\n\\x1a])[^=\\n\\x1a]*=(?:[^!\\n\\x1a^]|\\^[^!\\n\\x1a])*)?)?\\^?!))|[\\w\\W])*?`)([\\n\\x1a\\t\\v\\f\\r ,;=\\xa0]*)(\\))", ByGroups(UsingSelf("bqstring"), Text, Punctuation), nil}, | |||
Include("for2"), | |||
}, | |||
"for/l": { | |||
{`-?\d+`, LiteralNumberInteger, nil}, | |||
Include("for2"), | |||
}, | |||
"if": { | |||
{`((?:cmdextversion|errorlevel)(?=\^?[\t\v\f\r ,;=\xa0]|[&<>|\n\x1a]))((?:(?:(?:\^[\n\x1a])?[\t\v\f\r ,;=\xa0])+))(\d+)`, ByGroups(Keyword, UsingSelf("text"), LiteralNumberInteger), Pop(1)}, | |||
{`(defined(?=\^?[\t\v\f\r ,;=\xa0]|[&<>|\n\x1a]))((?:(?:(?:\^[\n\x1a])?[\t\v\f\r ,;=\xa0])+))((?:[&<>|]+|(?:(?:"[^\n\x1a"]*(?:"|(?=[\n\x1a])))|(?:(?:%(?:\*|(?:~[a-z]*(?:\$[^:]+:)?)?\d|[^%:\n\x1a]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^%\n\x1a^]|\^[^%\n\x1a])[^=\n\x1a]*=(?:[^%\n\x1a^]|\^[^%\n\x1a])*)?)?%))|(?:\^?![^!:\n\x1a]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^!\n\x1a^]|\^[^!\n\x1a])[^=\n\x1a]*=(?:[^!\n\x1a^]|\^[^!\n\x1a])*)?)?\^?!))|(?:(?:(?:\^[\n\x1a]?)?[^"\n\x1a&<>|\t\v\f\r ,;=\xa0])+))+))`, ByGroups(Keyword, UsingSelf("text"), UsingSelf("variable")), Pop(1)}, | |||
{`(exist(?=\^?[\t\v\f\r ,;=\xa0]|[&<>|\n\x1a]))((?:(?:(?:\^[\n\x1a])?[\t\v\f\r ,;=\xa0])+)(?:[&<>|]+|(?:(?:"[^\n\x1a"]*(?:"|(?=[\n\x1a])))|(?:(?:%(?:\*|(?:~[a-z]*(?:\$[^:]+:)?)?\d|[^%:\n\x1a]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^%\n\x1a^]|\^[^%\n\x1a])[^=\n\x1a]*=(?:[^%\n\x1a^]|\^[^%\n\x1a])*)?)?%))|(?:\^?![^!:\n\x1a]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^!\n\x1a^]|\^[^!\n\x1a])[^=\n\x1a]*=(?:[^!\n\x1a^]|\^[^!\n\x1a])*)?)?\^?!))|(?:(?:(?:\^[\n\x1a]?)?[^"\n\x1a&<>|\t\v\f\r ,;=\xa0])+))+))`, ByGroups(Keyword, UsingSelf("text")), Pop(1)}, | |||
{`((?:-?(?:0[0-7]+|0x[\da-f]+|\d+)(?=\^?[\t\v\f\r ,;=\xa0]|[&<>|\n\x1a]))(?:(?:(?:\^[\n\x1a])?[\t\v\f\r ,;=\xa0])+))((?:equ|geq|gtr|leq|lss|neq))((?:(?:(?:\^[\n\x1a])?[\t\v\f\r ,;=\xa0])+)(?:-?(?:0[0-7]+|0x[\da-f]+|\d+)(?=\^?[\t\v\f\r ,;=\xa0]|[&<>|\n\x1a])))`, ByGroups(UsingSelf("arithmetic"), OperatorWord, UsingSelf("arithmetic")), Pop(1)}, | |||
{`(?:[&<>|]+|(?:(?:"[^\n\x1a"]*(?:"|(?=[\n\x1a])))|(?:(?:%(?:\*|(?:~[a-z]*(?:\$[^:]+:)?)?\d|[^%:\n\x1a]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^%\n\x1a^]|\^[^%\n\x1a])[^=\n\x1a]*=(?:[^%\n\x1a^]|\^[^%\n\x1a])*)?)?%))|(?:\^?![^!:\n\x1a]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^!\n\x1a^]|\^[^!\n\x1a])[^=\n\x1a]*=(?:[^!\n\x1a^]|\^[^!\n\x1a])*)?)?\^?!))|(?:(?:(?:\^[\n\x1a]?)?[^"\n\x1a&<>|\t\v\f\r ,;=\xa0])+))+)`, UsingSelf("text"), Push("#pop", "if2")}, | |||
}, | |||
"if2": { | |||
{`((?:(?:(?:\^[\n\x1a])?[\t\v\f\r ,;=\xa0])+)?)(==)((?:(?:(?:\^[\n\x1a])?[\t\v\f\r ,;=\xa0])+)?(?:[&<>|]+|(?:(?:"[^\n\x1a"]*(?:"|(?=[\n\x1a])))|(?:(?:%(?:\*|(?:~[a-z]*(?:\$[^:]+:)?)?\d|[^%:\n\x1a]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^%\n\x1a^]|\^[^%\n\x1a])[^=\n\x1a]*=(?:[^%\n\x1a^]|\^[^%\n\x1a])*)?)?%))|(?:\^?![^!:\n\x1a]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^!\n\x1a^]|\^[^!\n\x1a])[^=\n\x1a]*=(?:[^!\n\x1a^]|\^[^!\n\x1a])*)?)?\^?!))|(?:(?:(?:\^[\n\x1a]?)?[^"\n\x1a&<>|\t\v\f\r ,;=\xa0])+))+))`, ByGroups(UsingSelf("text"), Operator, UsingSelf("text")), Pop(1)}, | |||
{`((?:(?:(?:\^[\n\x1a])?[\t\v\f\r ,;=\xa0])+))((?:equ|geq|gtr|leq|lss|neq))((?:(?:(?:\^[\n\x1a])?[\t\v\f\r ,;=\xa0])+)(?:[&<>|]+|(?:(?:"[^\n\x1a"]*(?:"|(?=[\n\x1a])))|(?:(?:%(?:\*|(?:~[a-z]*(?:\$[^:]+:)?)?\d|[^%:\n\x1a]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^%\n\x1a^]|\^[^%\n\x1a])[^=\n\x1a]*=(?:[^%\n\x1a^]|\^[^%\n\x1a])*)?)?%))|(?:\^?![^!:\n\x1a]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^!\n\x1a^]|\^[^!\n\x1a])[^=\n\x1a]*=(?:[^!\n\x1a^]|\^[^!\n\x1a])*)?)?\^?!))|(?:(?:(?:\^[\n\x1a]?)?[^"\n\x1a&<>|\t\v\f\r ,;=\xa0])+))+))`, ByGroups(UsingSelf("text"), OperatorWord, UsingSelf("text")), Pop(1)}, | |||
}, | |||
"(?": { | |||
{`(?:(?:(?:\^[\n\x1a])?[\t\v\f\r ,;=\xa0])+)`, UsingSelf("text"), nil}, | |||
{`\(`, Punctuation, Push("#pop", "else?", "root/compound")}, | |||
Default(Pop(1)), | |||
}, | |||
"else?": { | |||
{`(?:(?:(?:\^[\n\x1a])?[\t\v\f\r ,;=\xa0])+)`, UsingSelf("text"), nil}, | |||
{`else(?=\^?[\t\v\f\r ,;=\xa0]|[&<>|\n\x1a])`, Keyword, Pop(1)}, | |||
Default(Pop(1)), | |||
}, | |||
} | |||
} |
@@ -0,0 +1,80 @@ | |||
package b | |||
import ( | |||
. "github.com/alecthomas/chroma" // nolint | |||
"github.com/alecthomas/chroma/lexers/internal" | |||
) | |||
// Bibtex lexer. | |||
var Bibtex = internal.Register(MustNewLazyLexer( | |||
&Config{ | |||
Name: "BibTeX", | |||
Aliases: []string{"bib", "bibtex"}, | |||
Filenames: []string{"*.bib"}, | |||
MimeTypes: []string{"text/x-bibtex"}, | |||
NotMultiline: true, | |||
CaseInsensitive: true, | |||
}, | |||
bibtexRules, | |||
)) | |||
func bibtexRules() Rules { | |||
return Rules{ | |||
"root": { | |||
Include("whitespace"), | |||
{`@comment`, Comment, nil}, | |||
{`@preamble`, NameClass, Push("closing-brace", "value", "opening-brace")}, | |||
{`@string`, NameClass, Push("closing-brace", "field", "opening-brace")}, | |||
{"@[a-z_@!$&*+\\-./:;<>?\\[\\\\\\]^`|~][\\w@!$&*+\\-./:;<>?\\[\\\\\\]^`|~]*", NameClass, Push("closing-brace", "command-body", "opening-brace")}, | |||
{`.+`, Comment, nil}, | |||
}, | |||
"opening-brace": { | |||
Include("whitespace"), | |||
{`[{(]`, Punctuation, Pop(1)}, | |||
}, | |||
"closing-brace": { | |||
Include("whitespace"), | |||
{`[})]`, Punctuation, Pop(1)}, | |||
}, | |||
"command-body": { | |||
Include("whitespace"), | |||
{`[^\s\,\}]+`, NameLabel, Push("#pop", "fields")}, | |||
}, | |||
"fields": { | |||
Include("whitespace"), | |||
{`,`, Punctuation, Push("field")}, | |||
Default(Pop(1)), | |||
}, | |||
"field": { | |||
Include("whitespace"), | |||
{"[a-z_@!$&*+\\-./:;<>?\\[\\\\\\]^`|~][\\w@!$&*+\\-./:;<>?\\[\\\\\\]^`|~]*", NameAttribute, Push("value", "=")}, | |||
Default(Pop(1)), | |||
}, | |||
"=": { | |||
Include("whitespace"), | |||
{`=`, Punctuation, Pop(1)}, | |||
}, | |||
"value": { | |||
Include("whitespace"), | |||
{"[a-z_@!$&*+\\-./:;<>?\\[\\\\\\]^`|~][\\w@!$&*+\\-./:;<>?\\[\\\\\\]^`|~]*", NameVariable, nil}, | |||
{`"`, LiteralString, Push("quoted-string")}, | |||
{`\{`, LiteralString, Push("braced-string")}, | |||
{`[\d]+`, LiteralNumber, nil}, | |||
{`#`, Punctuation, nil}, | |||
Default(Pop(1)), | |||
}, | |||
"quoted-string": { | |||
{`\{`, LiteralString, Push("braced-string")}, | |||
{`"`, LiteralString, Pop(1)}, | |||
{`[^\{\"]+`, LiteralString, nil}, | |||
}, | |||
"braced-string": { | |||
{`\{`, LiteralString, Push()}, | |||
{`\}`, LiteralString, Pop(1)}, | |||
{`[^\{\}]+`, LiteralString, nil}, | |||
}, | |||
"whitespace": { | |||
{`\s+`, Text, nil}, | |||
}, | |||
} | |||
} |
@@ -0,0 +1,112 @@ | |||
package b | |||
import ( | |||
"strings" | |||
. "github.com/alecthomas/chroma" // nolint | |||
"github.com/alecthomas/chroma/lexers/internal" | |||
) | |||
// Bicep lexer. | |||
var Bicep = internal.Register(MustNewLazyLexer( | |||
&Config{ | |||
Name: "Bicep", | |||
Aliases: []string{"bicep"}, | |||
Filenames: []string{"*.bicep"}, | |||
}, | |||
bicepRules, | |||
)) | |||
func bicepRules() Rules { | |||
bicepFunctions := []string{ | |||
"any", | |||
"array", | |||
"concat", | |||
"contains", | |||
"empty", | |||
"first", | |||
"intersection", | |||
"items", | |||
"last", | |||
"length", | |||
"min", | |||
"max", | |||
"range", | |||
"skip", | |||
"take", | |||
"union", | |||
"dateTimeAdd", | |||
"utcNow", | |||
"deployment", | |||
"environment", | |||
"loadFileAsBase64", | |||
"loadTextContent", | |||
"int", | |||
"json", | |||
"extensionResourceId", | |||
"getSecret", | |||
"list", | |||
"listKeys", | |||
"listKeyValue", | |||
"listAccountSas", | |||
"listSecrets", | |||
"pickZones", | |||
"reference", | |||
"resourceId", | |||
"subscriptionResourceId", | |||
"tenantResourceId", | |||
"managementGroup", | |||
"resourceGroup", | |||
"subscription", | |||
"tenant", | |||
"base64", | |||
"base64ToJson", | |||
"base64ToString", | |||
"dataUri", | |||
"dataUriToString", | |||
"endsWith", | |||
"format", | |||
"guid", | |||
"indexOf", | |||
"lastIndexOf", | |||
"length", | |||
"newGuid", | |||
"padLeft", | |||
"replace", | |||
"split", | |||
"startsWith", | |||
"string", | |||
"substring", | |||
"toLower", | |||
"toUpper", | |||
"trim", | |||
"uniqueString", | |||
"uri", | |||
"uriComponent", | |||
"uriComponentToString", | |||
} | |||
return Rules{ | |||
"root": { | |||
{`//[^\n\r]+`, CommentSingle, nil}, | |||
{`/\*.*?\*/`, CommentMultiline, nil}, | |||
{`([']?\w+[']?)(:)`, ByGroups(NameProperty, Punctuation), nil}, | |||
{`\b('(resourceGroup|subscription|managementGroup|tenant)')\b`, KeywordNamespace, nil}, | |||
{`'[\w\$\{\(\)\}\.]{1,}?'`, LiteralStringInterpol, nil}, | |||
{`('''|').*?('''|')`, LiteralString, nil}, | |||
{`\b(allowed|batchSize|description|maxLength|maxValue|metadata|minLength|minValue|secure)\b`, NameDecorator, nil}, | |||
{`\b(az|sys)\.`, NameNamespace, nil}, | |||
{`\b(` + strings.Join(bicepFunctions, "|") + `)\b`, NameFunction, nil}, | |||
// https://docs.microsoft.com/en-us/azure/azure-resource-manager/bicep/bicep-functions-logical | |||
{`\b(bool)(\()`, ByGroups(NameFunction, Punctuation), nil}, | |||
{`\b(for|if|in)\b`, Keyword, nil}, | |||
{`\b(module|output|param|resource|var)\b`, KeywordDeclaration, nil}, | |||
{`\b(array|bool|int|object|string)\b`, KeywordType, nil}, | |||
// https://docs.microsoft.com/en-us/azure/azure-resource-manager/bicep/operators | |||
{`(>=|>|<=|<|==|!=|=~|!~|::|&&|\?\?|!|-|%|\*|\/|\+)`, Operator, nil}, | |||
{`[\(\)\[\]\.:\?{}@=]`, Punctuation, nil}, | |||
{`[\w_-]+`, Text, nil}, | |||
{`\s+`, TextWhitespace, nil}, | |||
}, | |||
} | |||
} |
@@ -0,0 +1,52 @@ | |||
package b | |||
import ( | |||
. "github.com/alecthomas/chroma" // nolint | |||
"github.com/alecthomas/chroma/lexers/internal" | |||
) | |||
// Blitzbasic lexer. | |||
var Blitzbasic = internal.Register(MustNewLazyLexer( | |||
&Config{ | |||
Name: "BlitzBasic", | |||
Aliases: []string{"blitzbasic", "b3d", "bplus"}, | |||
Filenames: []string{"*.bb", "*.decls"}, | |||
MimeTypes: []string{"text/x-bb"}, | |||
CaseInsensitive: true, | |||
}, | |||
blitzbasicRules, | |||
)) | |||
func blitzbasicRules() Rules { | |||
return Rules{ | |||
"root": { | |||
{`[ \t]+`, Text, nil}, | |||
{`;.*?\n`, CommentSingle, nil}, | |||
{`"`, LiteralStringDouble, Push("string")}, | |||
{`[0-9]+\.[0-9]*(?!\.)`, LiteralNumberFloat, nil}, | |||
{`\.[0-9]+(?!\.)`, LiteralNumberFloat, nil}, | |||
{`[0-9]+`, LiteralNumberInteger, nil}, | |||
{`\$[0-9a-f]+`, LiteralNumberHex, nil}, | |||
{`\%[10]+`, LiteralNumberBin, nil}, | |||
{Words(`\b`, `\b`, `Shl`, `Shr`, `Sar`, `Mod`, `Or`, `And`, `Not`, `Abs`, `Sgn`, `Handle`, `Int`, `Float`, `Str`, `First`, `Last`, `Before`, `After`), Operator, nil}, | |||
{`([+\-*/~=<>^])`, Operator, nil}, | |||
{`[(),:\[\]\\]`, Punctuation, nil}, | |||
{`\.([ \t]*)([a-z]\w*)`, NameLabel, nil}, | |||
{`\b(New)\b([ \t]+)([a-z]\w*)`, ByGroups(KeywordReserved, Text, NameClass), nil}, | |||
{`\b(Gosub|Goto)\b([ \t]+)([a-z]\w*)`, ByGroups(KeywordReserved, Text, NameLabel), nil}, | |||
{`\b(Object)\b([ \t]*)([.])([ \t]*)([a-z]\w*)\b`, ByGroups(Operator, Text, Punctuation, Text, NameClass), nil}, | |||
{`\b([a-z]\w*)(?:([ \t]*)(@{1,2}|[#$%])|([ \t]*)([.])([ \t]*)(?:([a-z]\w*)))?\b([ \t]*)(\()`, ByGroups(NameFunction, Text, KeywordType, Text, Punctuation, Text, NameClass, Text, Punctuation), nil}, | |||
{`\b(Function)\b([ \t]+)([a-z]\w*)(?:([ \t]*)(@{1,2}|[#$%])|([ \t]*)([.])([ \t]*)(?:([a-z]\w*)))?`, ByGroups(KeywordReserved, Text, NameFunction, Text, KeywordType, Text, Punctuation, Text, NameClass), nil}, | |||
{`\b(Type)([ \t]+)([a-z]\w*)`, ByGroups(KeywordReserved, Text, NameClass), nil}, | |||
{`\b(Pi|True|False|Null)\b`, KeywordConstant, nil}, | |||
{`\b(Local|Global|Const|Field|Dim)\b`, KeywordDeclaration, nil}, | |||
{Words(`\b`, `\b`, `End`, `Return`, `Exit`, `Chr`, `Len`, `Asc`, `New`, `Delete`, `Insert`, `Include`, `Function`, `Type`, `If`, `Then`, `Else`, `ElseIf`, `EndIf`, `For`, `To`, `Next`, `Step`, `Each`, `While`, `Wend`, `Repeat`, `Until`, `Forever`, `Select`, `Case`, `Default`, `Goto`, `Gosub`, `Data`, `Read`, `Restore`), KeywordReserved, nil}, | |||
{`([a-z]\w*)(?:([ \t]*)(@{1,2}|[#$%])|([ \t]*)([.])([ \t]*)(?:([a-z]\w*)))?`, ByGroups(NameVariable, Text, KeywordType, Text, Punctuation, Text, NameClass), nil}, | |||
}, | |||
"string": { | |||
{`""`, LiteralStringDouble, nil}, | |||
{`"C?`, LiteralStringDouble, Pop(1)}, | |||
{`[^"]+`, LiteralStringDouble, nil}, | |||
}, | |||
} | |||
} |
@@ -0,0 +1,28 @@ | |||
package b | |||
import ( | |||
. "github.com/alecthomas/chroma" // nolint | |||
"github.com/alecthomas/chroma/lexers/internal" | |||
) | |||
// Bnf lexer. | |||
var Bnf = internal.Register(MustNewLazyLexer( | |||
&Config{ | |||
Name: "BNF", | |||
Aliases: []string{"bnf"}, | |||
Filenames: []string{"*.bnf"}, | |||
MimeTypes: []string{"text/x-bnf"}, | |||
}, | |||
bnfRules, | |||
)) | |||
func bnfRules() Rules { | |||
return Rules{ | |||
"root": { | |||
{`(<)([ -;=?-~]+)(>)`, ByGroups(Punctuation, NameClass, Punctuation), nil}, | |||
{`::=`, Operator, nil}, | |||
{`[^<>:]+`, Text, nil}, | |||
{`.`, Text, nil}, | |||
}, | |||
} | |||
} |
@@ -0,0 +1,38 @@ | |||
package b | |||
import ( | |||
. "github.com/alecthomas/chroma" // nolint | |||
"github.com/alecthomas/chroma/lexers/internal" | |||
) | |||
// Brainfuck lexer. | |||
var Brainfuck = internal.Register(MustNewLazyLexer( | |||
&Config{ | |||
Name: "Brainfuck", | |||
Aliases: []string{"brainfuck", "bf"}, | |||
Filenames: []string{"*.bf", "*.b"}, | |||
MimeTypes: []string{"application/x-brainfuck"}, | |||
}, | |||
brainfuckRules, | |||
)) | |||
func brainfuckRules() Rules { | |||
return Rules{ | |||
"common": { | |||
{`[.,]+`, NameTag, nil}, | |||
{`[+-]+`, NameBuiltin, nil}, | |||
{`[<>]+`, NameVariable, nil}, | |||
{`[^.,+\-<>\[\]]+`, Comment, nil}, | |||
}, | |||
"root": { | |||
{`\[`, Keyword, Push("loop")}, | |||
{`\]`, Error, nil}, | |||
Include("common"), | |||
}, | |||
"loop": { | |||
{`\[`, Keyword, Push()}, | |||
{`\]`, Keyword, Pop(1)}, | |||
Include("common"), | |||
}, | |||
} | |||
} |
@@ -0,0 +1,96 @@ | |||
package c | |||
import ( | |||
. "github.com/alecthomas/chroma" // nolint | |||
"github.com/alecthomas/chroma/lexers/internal" | |||
) | |||
// C lexer. | |||
var C = internal.Register(MustNewLazyLexer( | |||
&Config{ | |||
Name: "C", | |||
Aliases: []string{"c"}, | |||
Filenames: []string{"*.c", "*.h", "*.idc", "*.x[bp]m"}, | |||
MimeTypes: []string{"text/x-chdr", "text/x-csrc", "image/x-xbitmap", "image/x-xpixmap"}, | |||
EnsureNL: true, | |||
}, | |||
cRules, | |||
)) | |||
func cRules() Rules { | |||
return Rules{ | |||
"whitespace": { | |||
{`^#if\s+0`, CommentPreproc, Push("if0")}, | |||
{`^#`, CommentPreproc, Push("macro")}, | |||
{`^(\s*(?:/[*].*?[*]/\s*)?)(#if\s+0)`, ByGroups(UsingSelf("root"), CommentPreproc), Push("if0")}, | |||
{`^(\s*(?:/[*].*?[*]/\s*)?)(#)`, ByGroups(UsingSelf("root"), CommentPreproc), Push("macro")}, | |||
{`\n`, Text, nil}, | |||
{`\s+`, Text, nil}, | |||
{`\\\n`, Text, nil}, | |||
{`//(\n|[\w\W]*?[^\\]\n)`, CommentSingle, nil}, | |||
{`/(\\\n)?[*][\w\W]*?[*](\\\n)?/`, CommentMultiline, nil}, | |||
{`/(\\\n)?[*][\w\W]*`, CommentMultiline, nil}, | |||
}, | |||
"statements": { | |||
{`(L?)(")`, ByGroups(LiteralStringAffix, LiteralString), Push("string")}, | |||
{`(L?)(')(\\.|\\[0-7]{1,3}|\\x[a-fA-F0-9]{1,2}|[^\\\'\n])(')`, ByGroups(LiteralStringAffix, LiteralStringChar, LiteralStringChar, LiteralStringChar), nil}, | |||
{`(\d+\.\d*|\.\d+|\d+)[eE][+-]?\d+[LlUu]*`, LiteralNumberFloat, nil}, | |||
{`(\d+\.\d*|\.\d+|\d+[fF])[fF]?`, LiteralNumberFloat, nil}, | |||
{`0x[0-9a-fA-F]+[LlUu]*`, LiteralNumberHex, nil}, | |||
{`0[0-7]+[LlUu]*`, LiteralNumberOct, nil}, | |||
{`\d+[LlUu]*`, LiteralNumberInteger, nil}, | |||
{`\*/`, Error, nil}, | |||
{`[~!%^&*+=|?:<>/-]`, Operator, nil}, | |||
{`[()\[\],.]`, Punctuation, nil}, | |||
{Words(``, `\b`, `asm`, `auto`, `break`, `case`, `const`, `continue`, `default`, `do`, `else`, `enum`, `extern`, `for`, `goto`, `if`, `register`, `restricted`, `return`, `sizeof`, `static`, `struct`, `switch`, `typedef`, `union`, `volatile`, `while`), Keyword, nil}, | |||
{`(bool|int|long|float|short|double|char((8|16|32)_t)?|unsigned|signed|void|u?int(_fast|_least|)(8|16|32|64)_t)\b`, KeywordType, nil}, | |||
{Words(``, `\b`, `inline`, `_inline`, `__inline`, `naked`, `restrict`, `thread`, `typename`), KeywordReserved, nil}, | |||
{`(__m(128i|128d|128|64))\b`, KeywordReserved, nil}, | |||
{Words(`__`, `\b`, `asm`, `int8`, `based`, `except`, `int16`, `stdcall`, `cdecl`, `fastcall`, `int32`, `declspec`, `finally`, `int64`, `try`, `leave`, `wchar_t`, `w64`, `unaligned`, `raise`, `noop`, `identifier`, `forceinline`, `assume`), KeywordReserved, nil}, | |||
{`(true|false|NULL)\b`, NameBuiltin, nil}, | |||
{`([a-zA-Z_]\w*)(\s*)(:)(?!:)`, ByGroups(NameLabel, Text, Punctuation), nil}, | |||
{`[a-zA-Z_]\w*`, Name, nil}, | |||
}, | |||
"root": { | |||
Include("whitespace"), | |||
{`((?:[\w*\s])+?(?:\s|[*]))([a-zA-Z_]\w*)(\s*\([^;]*?\))([^;{]*)(\{)`, ByGroups(UsingSelf("root"), NameFunction, UsingSelf("root"), UsingSelf("root"), Punctuation), Push("function")}, | |||
{`((?:[\w*\s])+?(?:\s|[*]))([a-zA-Z_]\w*)(\s*\([^;]*?\))([^;]*)(;)`, ByGroups(UsingSelf("root"), NameFunction, UsingSelf("root"), UsingSelf("root"), Punctuation), nil}, | |||
Default(Push("statement")), | |||
}, | |||
"statement": { | |||
Include("whitespace"), | |||
Include("statements"), | |||
{`[{}]`, Punctuation, nil}, | |||
{`;`, Punctuation, Pop(1)}, | |||
}, | |||
"function": { | |||
Include("whitespace"), | |||
Include("statements"), | |||
{`;`, Punctuation, nil}, | |||
{`\{`, Punctuation, Push()}, | |||
{`\}`, Punctuation, Pop(1)}, | |||
}, | |||
"string": { | |||
{`"`, LiteralString, Pop(1)}, | |||
{`\\([\\abfnrtv"\']|x[a-fA-F0-9]{2,4}|u[a-fA-F0-9]{4}|U[a-fA-F0-9]{8}|[0-7]{1,3})`, LiteralStringEscape, nil}, | |||
{`[^\\"\n]+`, LiteralString, nil}, | |||
{`\\\n`, LiteralString, nil}, | |||
{`\\`, LiteralString, nil}, | |||
}, | |||
"macro": { | |||
{`(include)(\s*(?:/[*].*?[*]/\s*)?)([^\n]+)`, ByGroups(CommentPreproc, Text, CommentPreprocFile), nil}, | |||
{`[^/\n]+`, CommentPreproc, nil}, | |||
{`/[*](.|\n)*?[*]/`, CommentMultiline, nil}, | |||
{`//.*?\n`, CommentSingle, Pop(1)}, | |||
{`/`, CommentPreproc, nil}, | |||
{`(?<=\\)\n`, CommentPreproc, nil}, | |||
{`\n`, CommentPreproc, Pop(1)}, | |||
}, | |||
"if0": { | |||
{`^\s*#if.*?(?<!\\)\n`, CommentPreproc, Push()}, | |||
{`^\s*#el(?:se|if).*\n`, CommentPreproc, Pop(1)}, | |||
{`^\s*#endif.*?(?<!\\)\n`, CommentPreproc, Pop(1)}, | |||
{`.*?\n`, Comment, nil}, | |||
}, | |||
} | |||
} |
@@ -0,0 +1,216 @@ | |||
package c | |||
import ( | |||
. "github.com/alecthomas/chroma" // nolint | |||
"github.com/alecthomas/chroma/lexers/internal" | |||
) | |||
// caddyfileCommon are the rules common to both of the lexer variants | |||
func caddyfileCommonRules() Rules { | |||
return Rules{ | |||
"site_block_common": { | |||
// Import keyword | |||
{`(import)(\s+)([^\s]+)`, ByGroups(Keyword, Text, NameVariableMagic), nil}, | |||
// Matcher definition | |||
{`@[^\s]+(?=\s)`, NameDecorator, Push("matcher")}, | |||
// Matcher token stub for docs | |||
{`\[\<matcher\>\]`, NameDecorator, Push("matcher")}, | |||
// These cannot have matchers but may have things that look like | |||
// matchers in their arguments, so we just parse as a subdirective. | |||
{`try_files`, Keyword, Push("subdirective")}, | |||
// These are special, they can nest more directives | |||
{`handle_errors|handle|route|handle_path|not`, Keyword, Push("nested_directive")}, | |||
// Any other directive | |||
{`[^\s#]+`, Keyword, Push("directive")}, | |||
Include("base"), | |||
}, | |||
"matcher": { | |||
{`\{`, Punctuation, Push("block")}, | |||
// Not can be one-liner | |||
{`not`, Keyword, Push("deep_not_matcher")}, | |||
// Any other same-line matcher | |||
{`[^\s#]+`, Keyword, Push("arguments")}, | |||
// Terminators | |||
{`\n`, Text, Pop(1)}, | |||
{`\}`, Punctuation, Pop(1)}, | |||
Include("base"), | |||
}, | |||
"block": { | |||
{`\}`, Punctuation, Pop(2)}, | |||
// Not can be one-liner | |||
{`not`, Keyword, Push("not_matcher")}, | |||
// Any other subdirective | |||
{`[^\s#]+`, Keyword, Push("subdirective")}, | |||
Include("base"), | |||
}, | |||
"nested_block": { | |||
{`\}`, Punctuation, Pop(2)}, | |||
// Matcher definition | |||
{`@[^\s]+(?=\s)`, NameDecorator, Push("matcher")}, | |||
// Something that starts with literally < is probably a docs stub | |||
{`\<[^#]+\>`, Keyword, Push("nested_directive")}, | |||
// Any other directive | |||
{`[^\s#]+`, Keyword, Push("nested_directive")}, | |||
Include("base"), | |||
}, | |||
"not_matcher": { | |||
{`\}`, Punctuation, Pop(2)}, | |||
{`\{(?=\s)`, Punctuation, Push("block")}, | |||
{`[^\s#]+`, Keyword, Push("arguments")}, | |||
{`\s+`, Text, nil}, | |||
}, | |||
"deep_not_matcher": { | |||
{`\}`, Punctuation, Pop(2)}, | |||
{`\{(?=\s)`, Punctuation, Push("block")}, | |||
{`[^\s#]+`, Keyword, Push("deep_subdirective")}, | |||
{`\s+`, Text, nil}, | |||
}, | |||
"directive": { | |||
{`\{(?=\s)`, Punctuation, Push("block")}, | |||
Include("matcher_token"), | |||
Include("comments_pop_1"), | |||
{`\n`, Text, Pop(1)}, | |||
Include("base"), | |||
}, | |||
"nested_directive": { | |||
{`\{(?=\s)`, Punctuation, Push("nested_block")}, | |||
Include("matcher_token"), | |||
Include("comments_pop_1"), | |||
{`\n`, Text, Pop(1)}, | |||
Include("base"), | |||
}, | |||
"subdirective": { | |||
{`\{(?=\s)`, Punctuation, Push("block")}, | |||
Include("comments_pop_1"), | |||
{`\n`, Text, Pop(1)}, | |||
Include("base"), | |||
}, | |||
"arguments": { | |||
{`\{(?=\s)`, Punctuation, Push("block")}, | |||
Include("comments_pop_2"), | |||
{`\\\n`, Text, nil}, // Skip escaped newlines | |||
{`\n`, Text, Pop(2)}, | |||
Include("base"), | |||
}, | |||
"deep_subdirective": { | |||
{`\{(?=\s)`, Punctuation, Push("block")}, | |||
Include("comments_pop_3"), | |||
{`\n`, Text, Pop(3)}, | |||
Include("base"), | |||
}, | |||
"matcher_token": { | |||
{`@[^\s]+`, NameDecorator, Push("arguments")}, // Named matcher | |||
{`/[^\s]+`, NameDecorator, Push("arguments")}, // Path matcher | |||
{`\*`, NameDecorator, Push("arguments")}, // Wildcard path matcher | |||
{`\[\<matcher\>\]`, NameDecorator, Push("arguments")}, // Matcher token stub for docs | |||
}, | |||
"comments": { | |||
{`^#.*\n`, CommentSingle, nil}, // Comment at start of line | |||
{`\s+#.*\n`, CommentSingle, nil}, // Comment preceded by whitespace | |||
}, | |||
"comments_pop_1": { | |||
{`^#.*\n`, CommentSingle, Pop(1)}, // Comment at start of line | |||
{`\s+#.*\n`, CommentSingle, Pop(1)}, // Comment preceded by whitespace | |||
}, | |||
"comments_pop_2": { | |||
{`^#.*\n`, CommentSingle, Pop(2)}, // Comment at start of line | |||
{`\s+#.*\n`, CommentSingle, Pop(2)}, // Comment preceded by whitespace | |||
}, | |||
"comments_pop_3": { | |||
{`^#.*\n`, CommentSingle, Pop(3)}, // Comment at start of line | |||
{`\s+#.*\n`, CommentSingle, Pop(3)}, // Comment preceded by whitespace | |||
}, | |||
"base": { | |||
Include("comments"), | |||
{`(on|off|first|last|before|after|internal|strip_prefix|strip_suffix|replace)\b`, NameConstant, nil}, | |||
{`(https?://)?([a-z0-9.-]+)(:)([0-9]+)`, ByGroups(Name, Name, Punctuation, LiteralNumberInteger), nil}, | |||
{`[a-z-]+/[a-z-+]+`, LiteralString, nil}, | |||
{`[0-9]+[km]?\b`, LiteralNumberInteger, nil}, | |||
{`\{[\w+.\$-]+\}`, LiteralStringEscape, nil}, // Placeholder | |||
{`\[(?=[^#{}$]+\])`, Punctuation, nil}, | |||
{`\]|\|`, Punctuation, nil}, | |||
{`[^\s#{}$\]]+`, LiteralString, nil}, | |||
{`/[^\s#]*`, Name, nil}, | |||
{`\s+`, Text, nil}, | |||
}, | |||
} | |||
} | |||
// Caddyfile lexer. | |||
var Caddyfile = internal.Register(MustNewLazyLexer( | |||
&Config{ | |||
Name: "Caddyfile", | |||
Aliases: []string{"caddyfile", "caddy"}, | |||
Filenames: []string{"Caddyfile*"}, | |||
MimeTypes: []string{}, | |||
}, | |||
caddyfileRules, | |||
)) | |||
func caddyfileRules() Rules { | |||
return Rules{ | |||
"root": { | |||
Include("comments"), | |||
// Global options block | |||
{`^\s*(\{)\s*$`, ByGroups(Punctuation), Push("globals")}, | |||
// Snippets | |||
{`(\([^\s#]+\))(\s*)(\{)`, ByGroups(NameVariableAnonymous, Text, Punctuation), Push("snippet")}, | |||
// Site label | |||
{`[^#{(\s,]+`, GenericHeading, Push("label")}, | |||
// Site label with placeholder | |||
{`\{[\w+.\$-]+\}`, LiteralStringEscape, Push("label")}, | |||
{`\s+`, Text, nil}, | |||
}, | |||
"globals": { | |||
{`\}`, Punctuation, Pop(1)}, | |||
{`[^\s#]+`, Keyword, Push("directive")}, | |||
Include("base"), | |||
}, | |||
"snippet": { | |||
{`\}`, Punctuation, Pop(1)}, | |||
// Matcher definition | |||
{`@[^\s]+(?=\s)`, NameDecorator, Push("matcher")}, | |||
// Any directive | |||
{`[^\s#]+`, Keyword, Push("directive")}, | |||
Include("base"), | |||
}, | |||
"label": { | |||
// Allow multiple labels, comma separated, newlines after | |||
// a comma means another label is coming | |||
{`,\s*\n?`, Text, nil}, | |||
{` `, Text, nil}, | |||
// Site label with placeholder | |||
{`\{[\w+.\$-]+\}`, LiteralStringEscape, nil}, | |||
// Site label | |||
{`[^#{(\s,]+`, GenericHeading, nil}, | |||
// Comment after non-block label (hack because comments end in \n) | |||
{`#.*\n`, CommentSingle, Push("site_block")}, | |||
// Note: if \n, we'll never pop out of the site_block, it's valid | |||
{`\{(?=\s)|\n`, Punctuation, Push("site_block")}, | |||
}, | |||
"site_block": { | |||
{`\}`, Punctuation, Pop(2)}, | |||
Include("site_block_common"), | |||
}, | |||
}.Merge(caddyfileCommonRules()) | |||
} | |||
// Caddyfile directive-only lexer. | |||
var CaddyfileDirectives = internal.Register(MustNewLazyLexer( | |||
&Config{ | |||
Name: "Caddyfile Directives", | |||
Aliases: []string{"caddyfile-directives", "caddyfile-d", "caddy-d"}, | |||
Filenames: []string{}, | |||
MimeTypes: []string{}, | |||
}, | |||
caddyfileDirectivesRules, | |||
)) | |||
func caddyfileDirectivesRules() Rules { | |||
return Rules{ | |||
// Same as "site_block" in Caddyfile | |||
"root": { | |||
Include("site_block_common"), | |||
}, | |||
}.Merge(caddyfileCommonRules()) | |||
} |
@@ -0,0 +1,65 @@ | |||
package c | |||
import ( | |||
. "github.com/alecthomas/chroma" // nolint | |||
"github.com/alecthomas/chroma/lexers/internal" | |||
) | |||
// Cap'N'Proto Proto lexer. | |||
var CapNProto = internal.Register(MustNewLazyLexer( | |||
&Config{ | |||
Name: "Cap'n Proto", | |||
Aliases: []string{"capnp"}, | |||
Filenames: []string{"*.capnp"}, | |||
MimeTypes: []string{}, | |||
}, | |||
capNProtoRules, | |||
)) | |||
func capNProtoRules() Rules { | |||
return Rules{ | |||
"root": { | |||
{`#.*?$`, CommentSingle, nil}, | |||
{`@[0-9a-zA-Z]*`, NameDecorator, nil}, | |||
{`=`, Literal, Push("expression")}, | |||
{`:`, NameClass, Push("type")}, | |||
{`\$`, NameAttribute, Push("annotation")}, | |||
{`(struct|enum|interface|union|import|using|const|annotation|extends|in|of|on|as|with|from|fixed)\b`, Keyword, nil}, | |||
{`[\w.]+`, Name, nil}, | |||
{`[^#@=:$\w]+`, Text, nil}, | |||
}, | |||
"type": { | |||
{`[^][=;,(){}$]+`, NameClass, nil}, | |||
{`[[(]`, NameClass, Push("parentype")}, | |||
Default(Pop(1)), | |||
}, | |||
"parentype": { | |||
{`[^][;()]+`, NameClass, nil}, | |||
{`[[(]`, NameClass, Push()}, | |||
{`[])]`, NameClass, Pop(1)}, | |||
Default(Pop(1)), | |||
}, | |||
"expression": { | |||
{`[^][;,(){}$]+`, Literal, nil}, | |||
{`[[(]`, Literal, Push("parenexp")}, | |||
Default(Pop(1)), | |||
}, | |||
"parenexp": { | |||
{`[^][;()]+`, Literal, nil}, | |||
{`[[(]`, Literal, Push()}, | |||
{`[])]`, Literal, Pop(1)}, | |||
Default(Pop(1)), | |||
}, | |||
"annotation": { | |||
{`[^][;,(){}=:]+`, NameAttribute, nil}, | |||
{`[[(]`, NameAttribute, Push("annexp")}, | |||
Default(Pop(1)), | |||
}, | |||
"annexp": { | |||
{`[^][;()]+`, NameAttribute, nil}, | |||
{`[[(]`, NameAttribute, Push()}, | |||
{`[])]`, NameAttribute, Pop(1)}, | |||
Default(Pop(1)), | |||
}, | |||
} | |||
} |
@@ -0,0 +1,67 @@ | |||
package c | |||
import ( | |||
. "github.com/alecthomas/chroma" // nolint | |||
"github.com/alecthomas/chroma/lexers/internal" | |||
) | |||
// Ceylon lexer. | |||
var Ceylon = internal.Register(MustNewLazyLexer( | |||
&Config{ | |||
Name: "Ceylon", | |||
Aliases: []string{"ceylon"}, | |||
Filenames: []string{"*.ceylon"}, | |||
MimeTypes: []string{"text/x-ceylon"}, | |||
DotAll: true, | |||
}, | |||
ceylonRules, | |||
)) | |||
func ceylonRules() Rules { | |||
return Rules{ | |||
"root": { | |||
{`^(\s*(?:[a-zA-Z_][\w.\[\]]*\s+)+?)([a-zA-Z_]\w*)(\s*)(\()`, ByGroups(UsingSelf("root"), NameFunction, Text, Operator), nil}, | |||
{`[^\S\n]+`, Text, nil}, | |||
{`//.*?\n`, CommentSingle, nil}, | |||
{`/\*`, CommentMultiline, Push("comment")}, | |||
{`(shared|abstract|formal|default|actual|variable|deprecated|small|late|literal|doc|by|see|throws|optional|license|tagged|final|native|annotation|sealed)\b`, NameDecorator, nil}, | |||
{`(break|case|catch|continue|else|finally|for|in|if|return|switch|this|throw|try|while|is|exists|dynamic|nonempty|then|outer|assert|let)\b`, Keyword, nil}, | |||
{`(abstracts|extends|satisfies|super|given|of|out|assign)\b`, KeywordDeclaration, nil}, | |||
{`(function|value|void|new)\b`, KeywordType, nil}, | |||
{`(assembly|module|package)(\s+)`, ByGroups(KeywordNamespace, Text), nil}, | |||
{`(true|false|null)\b`, KeywordConstant, nil}, | |||
{`(class|interface|object|alias)(\s+)`, ByGroups(KeywordDeclaration, Text), Push("class")}, | |||
{`(import)(\s+)`, ByGroups(KeywordNamespace, Text), Push("import")}, | |||
{`"(\\\\|\\"|[^"])*"`, LiteralString, nil}, | |||
{`'\\.'|'[^\\]'|'\\\{#[0-9a-fA-F]{4}\}'`, LiteralStringChar, nil}, | |||
{"\".*``.*``.*\"", LiteralStringInterpol, nil}, | |||
{`(\.)([a-z_]\w*)`, ByGroups(Operator, NameAttribute), nil}, | |||
{`[a-zA-Z_]\w*:`, NameLabel, nil}, | |||
{`[a-zA-Z_]\w*`, Name, nil}, | |||
{`[~^*!%&\[\](){}<>|+=:;,./?-]`, Operator, nil}, | |||
{`\d{1,3}(_\d{3})+\.\d{1,3}(_\d{3})+[kMGTPmunpf]?`, LiteralNumberFloat, nil}, | |||
{`\d{1,3}(_\d{3})+\.[0-9]+([eE][+-]?[0-9]+)?[kMGTPmunpf]?`, LiteralNumberFloat, nil}, | |||
{`[0-9][0-9]*\.\d{1,3}(_\d{3})+[kMGTPmunpf]?`, LiteralNumberFloat, nil}, | |||
{`[0-9][0-9]*\.[0-9]+([eE][+-]?[0-9]+)?[kMGTPmunpf]?`, LiteralNumberFloat, nil}, | |||
{`#([0-9a-fA-F]{4})(_[0-9a-fA-F]{4})+`, LiteralNumberHex, nil}, | |||
{`#[0-9a-fA-F]+`, LiteralNumberHex, nil}, | |||
{`\$([01]{4})(_[01]{4})+`, LiteralNumberBin, nil}, | |||
{`\$[01]+`, LiteralNumberBin, nil}, | |||
{`\d{1,3}(_\d{3})+[kMGTP]?`, LiteralNumberInteger, nil}, | |||
{`[0-9]+[kMGTP]?`, LiteralNumberInteger, nil}, | |||
{`\n`, Text, nil}, | |||
}, | |||
"class": { | |||
{`[A-Za-z_]\w*`, NameClass, Pop(1)}, | |||
}, | |||
"import": { | |||
{`[a-z][\w.]*`, NameNamespace, Pop(1)}, | |||
}, | |||
"comment": { | |||
{`[^*/]`, CommentMultiline, nil}, | |||
{`/\*`, CommentMultiline, Push()}, | |||
{`\*/`, CommentMultiline, Pop(1)}, | |||
{`[*/]`, CommentMultiline, nil}, | |||
}, | |||
} | |||
} |
@@ -0,0 +1,60 @@ | |||
package c | |||
import ( | |||
. "github.com/alecthomas/chroma" // nolint | |||
"github.com/alecthomas/chroma/lexers/internal" | |||
) | |||
// Cfengine3 lexer. | |||
var Cfengine3 = internal.Register(MustNewLazyLexer( | |||
&Config{ | |||
Name: "CFEngine3", | |||
Aliases: []string{"cfengine3", "cf3"}, | |||
Filenames: []string{"*.cf"}, | |||
MimeTypes: []string{}, | |||
}, | |||
cfengine3Rules, | |||
)) | |||
func cfengine3Rules() Rules { | |||
return Rules{ | |||
"root": { | |||
{`#.*?\n`, Comment, nil}, | |||
{`(body)(\s+)(\S+)(\s+)(control)`, ByGroups(Keyword, Text, Keyword, Text, Keyword), nil}, | |||
{`(body|bundle)(\s+)(\S+)(\s+)(\w+)(\()`, ByGroups(Keyword, Text, Keyword, Text, NameFunction, Punctuation), Push("arglist")}, | |||
{`(body|bundle)(\s+)(\S+)(\s+)(\w+)`, ByGroups(Keyword, Text, Keyword, Text, NameFunction), nil}, | |||
{`(")([^"]+)(")(\s+)(string|slist|int|real)(\s*)(=>)(\s*)`, ByGroups(Punctuation, NameVariable, Punctuation, Text, KeywordType, Text, Operator, Text), nil}, | |||
{`(\S+)(\s*)(=>)(\s*)`, ByGroups(KeywordReserved, Text, Operator, Text), nil}, | |||
{`"`, LiteralString, Push("string")}, | |||
{`(\w+)(\()`, ByGroups(NameFunction, Punctuation), nil}, | |||
{`([\w.!&|()]+)(::)`, ByGroups(NameClass, Punctuation), nil}, | |||
{`(\w+)(:)`, ByGroups(KeywordDeclaration, Punctuation), nil}, | |||
{`@[{(][^)}]+[})]`, NameVariable, nil}, | |||
{`[(){},;]`, Punctuation, nil}, | |||
{`=>`, Operator, nil}, | |||
{`->`, Operator, nil}, | |||
{`\d+\.\d+`, LiteralNumberFloat, nil}, | |||
{`\d+`, LiteralNumberInteger, nil}, | |||
{`\w+`, NameFunction, nil}, | |||
{`\s+`, Text, nil}, | |||
}, | |||
"string": { | |||
{`\$[{(]`, LiteralStringInterpol, Push("interpol")}, | |||
{`\\.`, LiteralStringEscape, nil}, | |||
{`"`, LiteralString, Pop(1)}, | |||
{`\n`, LiteralString, nil}, | |||
{`.`, LiteralString, nil}, | |||
}, | |||
"interpol": { | |||
{`\$[{(]`, LiteralStringInterpol, Push()}, | |||
{`[})]`, LiteralStringInterpol, Pop(1)}, | |||
{`[^${()}]+`, LiteralStringInterpol, nil}, | |||
}, | |||
"arglist": { | |||
{`\)`, Punctuation, Pop(1)}, | |||
{`,`, Punctuation, nil}, | |||
{`\w+`, NameVariable, nil}, | |||
{`\s+`, Text, nil}, | |||
}, | |||
} | |||
} |
@@ -0,0 +1,67 @@ | |||
package c | |||
import ( | |||
. "github.com/alecthomas/chroma" // nolint | |||
"github.com/alecthomas/chroma/lexers/internal" | |||
) | |||
// Chaiscript lexer. | |||
var Chaiscript = internal.Register(MustNewLazyLexer( | |||
&Config{ | |||
Name: "ChaiScript", | |||
Aliases: []string{"chai", "chaiscript"}, | |||
Filenames: []string{"*.chai"}, | |||
MimeTypes: []string{"text/x-chaiscript", "application/x-chaiscript"}, | |||
DotAll: true, | |||
}, | |||
chaiscriptRules, | |||
)) | |||
func chaiscriptRules() Rules { | |||
return Rules{ | |||
"commentsandwhitespace": { | |||
{`\s+`, Text, nil}, | |||
{`//.*?\n`, CommentSingle, nil}, | |||
{`/\*.*?\*/`, CommentMultiline, nil}, | |||
{`^\#.*?\n`, CommentSingle, nil}, | |||
}, | |||
"slashstartsregex": { | |||
Include("commentsandwhitespace"), | |||
{`/(\\.|[^[/\\\n]|\[(\\.|[^\]\\\n])*])+/([gim]+\b|\B)`, LiteralStringRegex, Pop(1)}, | |||
{`(?=/)`, Text, Push("#pop", "badregex")}, | |||
Default(Pop(1)), | |||
}, | |||
"badregex": { | |||
{`\n`, Text, Pop(1)}, | |||
}, | |||
"root": { | |||
Include("commentsandwhitespace"), | |||
{`\n`, Text, nil}, | |||
{`[^\S\n]+`, Text, nil}, | |||
{`\+\+|--|~|&&|\?|:|\|\||\\(?=\n)|\.\.(<<|>>>?|==?|!=?|[-<>+*%&|^/])=?`, Operator, Push("slashstartsregex")}, | |||
{`[{(\[;,]`, Punctuation, Push("slashstartsregex")}, | |||
{`[})\].]`, Punctuation, nil}, | |||
{`[=+\-*/]`, Operator, nil}, | |||
{`(for|in|while|do|break|return|continue|if|else|throw|try|catch)\b`, Keyword, Push("slashstartsregex")}, | |||
{`(var)\b`, KeywordDeclaration, Push("slashstartsregex")}, | |||
{`(attr|def|fun)\b`, KeywordReserved, nil}, | |||
{`(true|false)\b`, KeywordConstant, nil}, | |||
{`(eval|throw)\b`, NameBuiltin, nil}, | |||
{"`\\S+`", NameBuiltin, nil}, | |||
{`[$a-zA-Z_]\w*`, NameOther, nil}, | |||
{`[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?`, LiteralNumberFloat, nil}, | |||
{`0x[0-9a-fA-F]+`, LiteralNumberHex, nil}, | |||
{`[0-9]+`, LiteralNumberInteger, nil}, | |||
{`"`, LiteralStringDouble, Push("dqstring")}, | |||
{`'(\\\\|\\'|[^'])*'`, LiteralStringSingle, nil}, | |||
}, | |||
"dqstring": { | |||
{`\$\{[^"}]+?\}`, LiteralStringInterpol, nil}, | |||
{`\$`, LiteralStringDouble, nil}, | |||
{`\\\\`, LiteralStringDouble, nil}, | |||
{`\\"`, LiteralStringDouble, nil}, | |||
{`[^\\"$]+`, LiteralStringDouble, nil}, | |||
{`"`, LiteralStringDouble, Pop(1)}, | |||
}, | |||
} | |||
} |
@@ -0,0 +1,41 @@ | |||
package c | |||
import ( | |||
. "github.com/alecthomas/chroma" // nolint | |||
"github.com/alecthomas/chroma/lexers/internal" | |||
. "github.com/alecthomas/chroma/lexers/p" // nolint | |||
) | |||
// Cheetah lexer. | |||
var Cheetah = internal.Register(MustNewLazyLexer( | |||
&Config{ | |||
Name: "Cheetah", | |||
Aliases: []string{"cheetah", "spitfire"}, | |||
Filenames: []string{"*.tmpl", "*.spt"}, | |||
MimeTypes: []string{"application/x-cheetah", "application/x-spitfire"}, | |||
}, | |||
cheetahRules, | |||
)) | |||
func cheetahRules() Rules { | |||
return Rules{ | |||
"root": { | |||
{`(##[^\n]*)$`, ByGroups(Comment), nil}, | |||
{`#[*](.|\n)*?[*]#`, Comment, nil}, | |||
{`#end[^#\n]*(?:#|$)`, CommentPreproc, nil}, | |||
{`#slurp$`, CommentPreproc, nil}, | |||
{`(#[a-zA-Z]+)([^#\n]*)(#|$)`, ByGroups(CommentPreproc, Using(Python), CommentPreproc), nil}, | |||
{`(\$)([a-zA-Z_][\w.]*\w)`, ByGroups(CommentPreproc, Using(Python)), nil}, | |||
{`(\$\{!?)(.*?)(\})(?s)`, ByGroups(CommentPreproc, Using(Python), CommentPreproc), nil}, | |||
{`(?sx) | |||
(.+?) # anything, followed by: | |||
(?: | |||
(?=\#[#a-zA-Z]*) | # an eval comment | |||
(?=\$[a-zA-Z_{]) | # a substitution | |||
\Z # end of string | |||
) | |||
`, Other, nil}, | |||
{`\s+`, Text, nil}, | |||
}, | |||
} | |||
} |
@@ -0,0 +1,310 @@ | |||
package c | |||
import ( | |||
. "github.com/alecthomas/chroma" // nolint | |||
"github.com/alecthomas/chroma/lexers/internal" | |||
) | |||
var ( | |||
clBuiltinFunctions = []string{ | |||
"<", "<=", "=", ">", ">=", "-", "/", "/=", "*", "+", "1-", "1+", | |||
"abort", "abs", "acons", "acos", "acosh", "add-method", "adjoin", | |||
"adjustable-array-p", "adjust-array", "allocate-instance", | |||
"alpha-char-p", "alphanumericp", "append", "apply", "apropos", | |||
"apropos-list", "aref", "arithmetic-error-operands", | |||
"arithmetic-error-operation", "array-dimension", "array-dimensions", | |||
"array-displacement", "array-element-type", "array-has-fill-pointer-p", | |||
"array-in-bounds-p", "arrayp", "array-rank", "array-row-major-index", | |||
"array-total-size", "ash", "asin", "asinh", "assoc", "assoc-if", | |||
"assoc-if-not", "atan", "atanh", "atom", "bit", "bit-and", "bit-andc1", | |||
"bit-andc2", "bit-eqv", "bit-ior", "bit-nand", "bit-nor", "bit-not", | |||
"bit-orc1", "bit-orc2", "bit-vector-p", "bit-xor", "boole", | |||
"both-case-p", "boundp", "break", "broadcast-stream-streams", | |||
"butlast", "byte", "byte-position", "byte-size", "caaaar", "caaadr", | |||
"caaar", "caadar", "caaddr", "caadr", "caar", "cadaar", "cadadr", | |||
"cadar", "caddar", "cadddr", "caddr", "cadr", "call-next-method", "car", | |||
"cdaaar", "cdaadr", "cdaar", "cdadar", "cdaddr", "cdadr", "cdar", | |||
"cddaar", "cddadr", "cddar", "cdddar", "cddddr", "cdddr", "cddr", "cdr", | |||
"ceiling", "cell-error-name", "cerror", "change-class", "char", "char<", | |||
"char<=", "char=", "char>", "char>=", "char/=", "character", | |||
"characterp", "char-code", "char-downcase", "char-equal", | |||
"char-greaterp", "char-int", "char-lessp", "char-name", | |||
"char-not-equal", "char-not-greaterp", "char-not-lessp", "char-upcase", | |||
"cis", "class-name", "class-of", "clear-input", "clear-output", | |||
"close", "clrhash", "code-char", "coerce", "compile", | |||
"compiled-function-p", "compile-file", "compile-file-pathname", | |||
"compiler-macro-function", "complement", "complex", "complexp", | |||
"compute-applicable-methods", "compute-restarts", "concatenate", | |||
"concatenated-stream-streams", "conjugate", "cons", "consp", | |||
"constantly", "constantp", "continue", "copy-alist", "copy-list", | |||
"copy-pprint-dispatch", "copy-readtable", "copy-seq", "copy-structure", | |||
"copy-symbol", "copy-tree", "cos", "cosh", "count", "count-if", | |||
"count-if-not", "decode-float", "decode-universal-time", "delete", | |||
"delete-duplicates", "delete-file", "delete-if", "delete-if-not", | |||
"delete-package", "denominator", "deposit-field", "describe", | |||
"describe-object", "digit-char", "digit-char-p", "directory", | |||
"directory-namestring", "disassemble", "documentation", "dpb", | |||
"dribble", "echo-stream-input-stream", "echo-stream-output-stream", | |||
"ed", "eighth", "elt", "encode-universal-time", "endp", | |||
"enough-namestring", "ensure-directories-exist", | |||
"ensure-generic-function", "eq", "eql", "equal", "equalp", "error", | |||
"eval", "evenp", "every", "exp", "export", "expt", "fboundp", | |||
"fceiling", "fdefinition", "ffloor", "fifth", "file-author", | |||
"file-error-pathname", "file-length", "file-namestring", | |||
"file-position", "file-string-length", "file-write-date", | |||
"fill", "fill-pointer", "find", "find-all-symbols", "find-class", | |||
"find-if", "find-if-not", "find-method", "find-package", "find-restart", | |||
"find-symbol", "finish-output", "first", "float", "float-digits", | |||
"floatp", "float-precision", "float-radix", "float-sign", "floor", | |||
"fmakunbound", "force-output", "format", "fourth", "fresh-line", | |||
"fround", "ftruncate", "funcall", "function-keywords", | |||
"function-lambda-expression", "functionp", "gcd", "gensym", "gentemp", | |||
"get", "get-decoded-time", "get-dispatch-macro-character", "getf", | |||
"gethash", "get-internal-real-time", "get-internal-run-time", | |||
"get-macro-character", "get-output-stream-string", "get-properties", | |||
"get-setf-expansion", "get-universal-time", "graphic-char-p", | |||
"hash-table-count", "hash-table-p", "hash-table-rehash-size", | |||
"hash-table-rehash-threshold", "hash-table-size", "hash-table-test", | |||
"host-namestring", "identity", "imagpart", "import", | |||
"initialize-instance", "input-stream-p", "inspect", | |||
"integer-decode-float", "integer-length", "integerp", | |||
"interactive-stream-p", "intern", "intersection", | |||
"invalid-method-error", "invoke-debugger", "invoke-restart", | |||
"invoke-restart-interactively", "isqrt", "keywordp", "last", "lcm", | |||
"ldb", "ldb-test", "ldiff", "length", "lisp-implementation-type", | |||
"lisp-implementation-version", "list", "list*", "list-all-packages", | |||
"listen", "list-length", "listp", "load", | |||
"load-logical-pathname-translations", "log", "logand", "logandc1", | |||
"logandc2", "logbitp", "logcount", "logeqv", "logical-pathname", | |||
"logical-pathname-translations", "logior", "lognand", "lognor", | |||
"lognot", "logorc1", "logorc2", "logtest", "logxor", "long-site-name", | |||
"lower-case-p", "machine-instance", "machine-type", "machine-version", | |||
"macroexpand", "macroexpand-1", "macro-function", "make-array", | |||
"make-broadcast-stream", "make-concatenated-stream", "make-condition", | |||
"make-dispatch-macro-character", "make-echo-stream", "make-hash-table", | |||
"make-instance", "make-instances-obsolete", "make-list", | |||
"make-load-form", "make-load-form-saving-slots", "make-package", | |||
"make-pathname", "make-random-state", "make-sequence", "make-string", | |||
"make-string-input-stream", "make-string-output-stream", "make-symbol", | |||
"make-synonym-stream", "make-two-way-stream", "makunbound", "map", | |||
"mapc", "mapcan", "mapcar", "mapcon", "maphash", "map-into", "mapl", | |||
"maplist", "mask-field", "max", "member", "member-if", "member-if-not", | |||
"merge", "merge-pathnames", "method-combination-error", | |||
"method-qualifiers", "min", "minusp", "mismatch", "mod", | |||
"muffle-warning", "name-char", "namestring", "nbutlast", "nconc", | |||
"next-method-p", "nintersection", "ninth", "no-applicable-method", | |||
"no-next-method", "not", "notany", "notevery", "nreconc", "nreverse", | |||
"nset-difference", "nset-exclusive-or", "nstring-capitalize", | |||
"nstring-downcase", "nstring-upcase", "nsublis", "nsubst", "nsubst-if", | |||
"nsubst-if-not", "nsubstitute", "nsubstitute-if", "nsubstitute-if-not", | |||
"nth", "nthcdr", "null", "numberp", "numerator", "nunion", "oddp", | |||
"open", "open-stream-p", "output-stream-p", "package-error-package", | |||
"package-name", "package-nicknames", "packagep", | |||
"package-shadowing-symbols", "package-used-by-list", "package-use-list", | |||
"pairlis", "parse-integer", "parse-namestring", "pathname", | |||
"pathname-device", "pathname-directory", "pathname-host", | |||
"pathname-match-p", "pathname-name", "pathnamep", "pathname-type", | |||
"pathname-version", "peek-char", "phase", "plusp", "position", | |||
"position-if", "position-if-not", "pprint", "pprint-dispatch", | |||
"pprint-fill", "pprint-indent", "pprint-linear", "pprint-newline", | |||
"pprint-tab", "pprint-tabular", "prin1", "prin1-to-string", "princ", | |||
"princ-to-string", "print", "print-object", "probe-file", "proclaim", | |||
"provide", "random", "random-state-p", "rassoc", "rassoc-if", | |||
"rassoc-if-not", "rational", "rationalize", "rationalp", "read", | |||
"read-byte", "read-char", "read-char-no-hang", "read-delimited-list", | |||
"read-from-string", "read-line", "read-preserving-whitespace", | |||
"read-sequence", "readtable-case", "readtablep", "realp", "realpart", | |||
"reduce", "reinitialize-instance", "rem", "remhash", "remove", | |||
"remove-duplicates", "remove-if", "remove-if-not", "remove-method", | |||
"remprop", "rename-file", "rename-package", "replace", "require", | |||
"rest", "restart-name", "revappend", "reverse", "room", "round", | |||
"row-major-aref", "rplaca", "rplacd", "sbit", "scale-float", "schar", | |||
"search", "second", "set", "set-difference", | |||
"set-dispatch-macro-character", "set-exclusive-or", | |||
"set-macro-character", "set-pprint-dispatch", "set-syntax-from-char", | |||
"seventh", "shadow", "shadowing-import", "shared-initialize", | |||
"short-site-name", "signal", "signum", "simple-bit-vector-p", | |||
"simple-condition-format-arguments", "simple-condition-format-control", | |||
"simple-string-p", "simple-vector-p", "sin", "sinh", "sixth", "sleep", | |||
"slot-boundp", "slot-exists-p", "slot-makunbound", "slot-missing", | |||
"slot-unbound", "slot-value", "software-type", "software-version", | |||
"some", "sort", "special-operator-p", "sqrt", "stable-sort", | |||
"standard-char-p", "store-value", "stream-element-type", | |||
"stream-error-stream", "stream-external-format", "streamp", "string", | |||
"string<", "string<=", "string=", "string>", "string>=", "string/=", | |||
"string-capitalize", "string-downcase", "string-equal", | |||
"string-greaterp", "string-left-trim", "string-lessp", | |||
"string-not-equal", "string-not-greaterp", "string-not-lessp", | |||
"stringp", "string-right-trim", "string-trim", "string-upcase", | |||
"sublis", "subseq", "subsetp", "subst", "subst-if", "subst-if-not", | |||
"substitute", "substitute-if", "substitute-if-not", "subtypep", "svref", | |||
"sxhash", "symbol-function", "symbol-name", "symbolp", "symbol-package", | |||
"symbol-plist", "symbol-value", "synonym-stream-symbol", "syntax:", | |||
"tailp", "tan", "tanh", "tenth", "terpri", "third", | |||
"translate-logical-pathname", "translate-pathname", "tree-equal", | |||
"truename", "truncate", "two-way-stream-input-stream", | |||
"two-way-stream-output-stream", "type-error-datum", | |||
"type-error-expected-type", "type-of", "typep", "unbound-slot-instance", | |||
"unexport", "unintern", "union", "unread-char", "unuse-package", | |||
"update-instance-for-different-class", | |||
"update-instance-for-redefined-class", "upgraded-array-element-type", | |||
"upgraded-complex-part-type", "upper-case-p", "use-package", | |||
"user-homedir-pathname", "use-value", "values", "values-list", "vector", | |||
"vectorp", "vector-pop", "vector-push", "vector-push-extend", "warn", | |||
"wild-pathname-p", "write", "write-byte", "write-char", "write-line", | |||
"write-sequence", "write-string", "write-to-string", "yes-or-no-p", | |||
"y-or-n-p", "zerop", | |||
} | |||
clSpecialForms = []string{ | |||
"block", "catch", "declare", "eval-when", "flet", "function", "go", "if", | |||
"labels", "lambda", "let", "let*", "load-time-value", "locally", "macrolet", | |||
"multiple-value-call", "multiple-value-prog1", "progn", "progv", "quote", | |||
"return-from", "setq", "symbol-macrolet", "tagbody", "the", "throw", | |||
"unwind-protect", | |||
} | |||
clMacros = []string{ | |||
"and", "assert", "call-method", "case", "ccase", "check-type", "cond", | |||
"ctypecase", "decf", "declaim", "defclass", "defconstant", "defgeneric", | |||
"define-compiler-macro", "define-condition", "define-method-combination", | |||
"define-modify-macro", "define-setf-expander", "define-symbol-macro", | |||
"defmacro", "defmethod", "defpackage", "defparameter", "defsetf", | |||
"defstruct", "deftype", "defun", "defvar", "destructuring-bind", "do", | |||
"do*", "do-all-symbols", "do-external-symbols", "dolist", "do-symbols", | |||
"dotimes", "ecase", "etypecase", "formatter", "handler-bind", | |||
"handler-case", "ignore-errors", "incf", "in-package", "lambda", "loop", | |||
"loop-finish", "make-method", "multiple-value-bind", "multiple-value-list", | |||
"multiple-value-setq", "nth-value", "or", "pop", | |||
"pprint-exit-if-list-exhausted", "pprint-logical-block", "pprint-pop", | |||
"print-unreadable-object", "prog", "prog*", "prog1", "prog2", "psetf", | |||
"psetq", "push", "pushnew", "remf", "restart-bind", "restart-case", | |||
"return", "rotatef", "setf", "shiftf", "step", "time", "trace", "typecase", | |||
"unless", "untrace", "when", "with-accessors", "with-compilation-unit", | |||
"with-condition-restarts", "with-hash-table-iterator", | |||
"with-input-from-string", "with-open-file", "with-open-stream", | |||
"with-output-to-string", "with-package-iterator", "with-simple-restart", | |||
"with-slots", "with-standard-io-syntax", | |||
} | |||
clLambdaListKeywords = []string{ | |||
"&allow-other-keys", "&aux", "&body", "&environment", "&key", "&optional", | |||
"&rest", "&whole", | |||
} | |||
clDeclarations = []string{ | |||
"dynamic-extent", "ignore", "optimize", "ftype", "inline", "special", | |||
"ignorable", "notinline", "type", | |||
} | |||
clBuiltinTypes = []string{ | |||
"atom", "boolean", "base-char", "base-string", "bignum", "bit", | |||
"compiled-function", "extended-char", "fixnum", "keyword", "nil", | |||
"signed-byte", "short-float", "single-float", "double-float", "long-float", | |||
"simple-array", "simple-base-string", "simple-bit-vector", "simple-string", | |||
"simple-vector", "standard-char", "unsigned-byte", | |||
// Condition Types | |||
"arithmetic-error", "cell-error", "condition", "control-error", | |||
"division-by-zero", "end-of-file", "error", "file-error", | |||
"floating-point-inexact", "floating-point-overflow", | |||
"floating-point-underflow", "floating-point-invalid-operation", | |||
"parse-error", "package-error", "print-not-readable", "program-error", | |||
"reader-error", "serious-condition", "simple-condition", "simple-error", | |||
"simple-type-error", "simple-warning", "stream-error", "storage-condition", | |||
"style-warning", "type-error", "unbound-variable", "unbound-slot", | |||
"undefined-function", "warning", | |||
} | |||
clBuiltinClasses = []string{ | |||
"array", "broadcast-stream", "bit-vector", "built-in-class", "character", | |||
"class", "complex", "concatenated-stream", "cons", "echo-stream", | |||
"file-stream", "float", "function", "generic-function", "hash-table", | |||
"integer", "list", "logical-pathname", "method-combination", "method", | |||
"null", "number", "package", "pathname", "ratio", "rational", "readtable", | |||
"real", "random-state", "restart", "sequence", "standard-class", | |||
"standard-generic-function", "standard-method", "standard-object", | |||
"string-stream", "stream", "string", "structure-class", "structure-object", | |||
"symbol", "synonym-stream", "t", "two-way-stream", "vector", | |||
} | |||
) | |||
// Common Lisp lexer. | |||
var CommonLisp = internal.Register(TypeRemappingLexer(MustNewLazyLexer( | |||
&Config{ | |||
Name: "Common Lisp", | |||
Aliases: []string{"common-lisp", "cl", "lisp"}, | |||
Filenames: []string{"*.cl", "*.lisp"}, | |||
MimeTypes: []string{"text/x-common-lisp"}, | |||
CaseInsensitive: true, | |||
}, | |||
commonLispRules, | |||
), TypeMapping{ | |||
{NameVariable, NameFunction, clBuiltinFunctions}, | |||
{NameVariable, Keyword, clSpecialForms}, | |||
{NameVariable, NameBuiltin, clMacros}, | |||
{NameVariable, Keyword, clLambdaListKeywords}, | |||
{NameVariable, Keyword, clDeclarations}, | |||
{NameVariable, KeywordType, clBuiltinTypes}, | |||
{NameVariable, NameClass, clBuiltinClasses}, | |||
})) | |||
func commonLispRules() Rules { | |||
return Rules{ | |||
"root": { | |||
Default(Push("body")), | |||
}, | |||
"multiline-comment": { | |||
{`#\|`, CommentMultiline, Push()}, | |||
{`\|#`, CommentMultiline, Pop(1)}, | |||
{`[^|#]+`, CommentMultiline, nil}, | |||
{`[|#]`, CommentMultiline, nil}, | |||
}, | |||
"commented-form": { | |||
{`\(`, CommentPreproc, Push()}, | |||
{`\)`, CommentPreproc, Pop(1)}, | |||
{`[^()]+`, CommentPreproc, nil}, | |||
}, | |||
"body": { | |||
{`\s+`, Text, nil}, | |||
{`;.*$`, CommentSingle, nil}, | |||
{`#\|`, CommentMultiline, Push("multiline-comment")}, | |||
{`#\d*Y.*$`, CommentSpecial, nil}, | |||
{`"(\\.|\\\n|[^"\\])*"`, LiteralString, nil}, | |||
{`:(\|[^|]+\||(?:\\.|[\w!$%&*+-/<=>?@\[\]^{}~])(?:\\.|[\w!$%&*+-/<=>?@\[\]^{}~]|[#.:])*)`, LiteralStringSymbol, nil}, | |||
{`::(\|[^|]+\||(?:\\.|[\w!$%&*+-/<=>?@\[\]^{}~])(?:\\.|[\w!$%&*+-/<=>?@\[\]^{}~]|[#.:])*)`, LiteralStringSymbol, nil}, | |||
{`:#(\|[^|]+\||(?:\\.|[\w!$%&*+-/<=>?@\[\]^{}~])(?:\\.|[\w!$%&*+-/<=>?@\[\]^{}~]|[#.:])*)`, LiteralStringSymbol, nil}, | |||
{`'(\|[^|]+\||(?:\\.|[\w!$%&*+-/<=>?@\[\]^{}~])(?:\\.|[\w!$%&*+-/<=>?@\[\]^{}~]|[#.:])*)`, LiteralStringSymbol, nil}, | |||
{`'`, Operator, nil}, | |||
{"`", Operator, nil}, | |||
{"[-+]?\\d+\\.?(?=[ \"()\\'\\n,;`])", LiteralNumberInteger, nil}, | |||
{"[-+]?\\d+/\\d+(?=[ \"()\\'\\n,;`])", LiteralNumber, nil}, | |||
{"[-+]?(\\d*\\.\\d+([defls][-+]?\\d+)?|\\d+(\\.\\d*)?[defls][-+]?\\d+)(?=[ \"()\\'\\n,;`])", LiteralNumberFloat, nil}, | |||
{"#\\\\.(?=[ \"()\\'\\n,;`])", LiteralStringChar, nil}, | |||
{`#\\(\|[^|]+\||(?:\\.|[\w!$%&*+-/<=>?@\[\]^{}~])(?:\\.|[\w!$%&*+-/<=>?@\[\]^{}~]|[#.:])*)`, LiteralStringChar, nil}, | |||
{`#\(`, Operator, Push("body")}, | |||
{`#\d*\*[01]*`, LiteralOther, nil}, | |||
{`#:(\|[^|]+\||(?:\\.|[\w!$%&*+-/<=>?@\[\]^{}~])(?:\\.|[\w!$%&*+-/<=>?@\[\]^{}~]|[#.:])*)`, LiteralStringSymbol, nil}, | |||
{`#[.,]`, Operator, nil}, | |||
{`#\'`, NameFunction, nil}, | |||
{`#b[+-]?[01]+(/[01]+)?`, LiteralNumberBin, nil}, | |||
{`#o[+-]?[0-7]+(/[0-7]+)?`, LiteralNumberOct, nil}, | |||
{`#x[+-]?[0-9a-f]+(/[0-9a-f]+)?`, LiteralNumberHex, nil}, | |||
{`#\d+r[+-]?[0-9a-z]+(/[0-9a-z]+)?`, LiteralNumber, nil}, | |||
{`(#c)(\()`, ByGroups(LiteralNumber, Punctuation), Push("body")}, | |||
{`(#\d+a)(\()`, ByGroups(LiteralOther, Punctuation), Push("body")}, | |||
{`(#s)(\()`, ByGroups(LiteralOther, Punctuation), Push("body")}, | |||
{`#p?"(\\.|[^"])*"`, LiteralOther, nil}, | |||
{`#\d+=`, Operator, nil}, | |||
{`#\d+#`, Operator, nil}, | |||
{"#+nil(?=[ \"()\\'\\n,;`])\\s*\\(", CommentPreproc, Push("commented-form")}, | |||
{`#[+-]`, Operator, nil}, | |||
{`(,@|,|\.)`, Operator, nil}, | |||
{"(t|nil)(?=[ \"()\\'\\n,;`])", NameConstant, nil}, | |||
{`\*(\|[^|]+\||(?:\\.|[\w!$%&*+-/<=>?@\[\]^{}~])(?:\\.|[\w!$%&*+-/<=>?@\[\]^{}~]|[#.:])*)\*`, NameVariableGlobal, nil}, | |||
{`(\|[^|]+\||(?:\\.|[\w!$%&*+-/<=>?@\[\]^{}~])(?:\\.|[\w!$%&*+-/<=>?@\[\]^{}~]|[#.:])*)`, NameVariable, nil}, | |||
{`\(`, Punctuation, Push("body")}, | |||
{`\)`, Punctuation, Pop(1)}, | |||
}, | |||
} | |||
} |
@@ -0,0 +1,42 @@ | |||
package c | |||
import ( | |||
. "github.com/alecthomas/chroma" // nolint | |||
"github.com/alecthomas/chroma/lexers/internal" | |||
) | |||
// Clojure lexer. | |||
var Clojure = internal.Register(MustNewLazyLexer( | |||
&Config{ | |||
Name: "Clojure", | |||
Aliases: []string{"clojure", "clj"}, | |||
Filenames: []string{"*.clj"}, | |||
MimeTypes: []string{"text/x-clojure", "application/x-clojure"}, | |||
}, | |||
clojureRules, | |||
)) | |||
func clojureRules() Rules { | |||
return Rules{ | |||
"root": { | |||
{`;.*$`, CommentSingle, nil}, | |||
{`[,\s]+`, Text, nil}, | |||
{`-?\d+\.\d+`, LiteralNumberFloat, nil}, | |||
{`-?\d+`, LiteralNumberInteger, nil}, | |||
{`0x-?[abcdef\d]+`, LiteralNumberHex, nil}, | |||
{`"(\\\\|\\"|[^"])*"`, LiteralString, nil}, | |||
{`'(?!#)[\w!$%*+<=>?/.#-]+`, LiteralStringSymbol, nil}, | |||
{`\\(.|[a-z]+)`, LiteralStringChar, nil}, | |||
{`::?#?(?!#)[\w!$%*+<=>?/.#-]+`, LiteralStringSymbol, nil}, | |||
{"~@|[`\\'#^~&@]", Operator, nil}, | |||
{Words(``, ` `, `.`, `def`, `do`, `fn`, `if`, `let`, `new`, `quote`, `var`, `loop`), Keyword, nil}, | |||
{Words(``, ` `, `def-`, `defn`, `defn-`, `defmacro`, `defmulti`, `defmethod`, `defstruct`, `defonce`, `declare`, `definline`, `definterface`, `defprotocol`, `defrecord`, `deftype`, `defproject`, `ns`), KeywordDeclaration, nil}, | |||
{Words(``, ` `, `*`, `+`, `-`, `->`, `/`, `<`, `<=`, `=`, `==`, `>`, `>=`, `..`, `accessor`, `agent`, `agent-errors`, `aget`, `alength`, `all-ns`, `alter`, `and`, `append-child`, `apply`, `array-map`, `aset`, `aset-boolean`, `aset-byte`, `aset-char`, `aset-double`, `aset-float`, `aset-int`, `aset-long`, `aset-short`, `assert`, `assoc`, `await`, `await-for`, `bean`, `binding`, `bit-and`, `bit-not`, `bit-or`, `bit-shift-left`, `bit-shift-right`, `bit-xor`, `boolean`, `branch?`, `butlast`, `byte`, `cast`, `char`, `children`, `class`, `clear-agent-errors`, `comment`, `commute`, `comp`, `comparator`, `complement`, `concat`, `conj`, `cons`, `constantly`, `cond`, `if-not`, `construct-proxy`, `contains?`, `count`, `create-ns`, `create-struct`, `cycle`, `dec`, `deref`, `difference`, `disj`, `dissoc`, `distinct`, `doall`, `doc`, `dorun`, `doseq`, `dosync`, `dotimes`, `doto`, `double`, `down`, `drop`, `drop-while`, `edit`, `end?`, `ensure`, `eval`, `every?`, `false?`, `ffirst`, `file-seq`, `filter`, `find`, `find-doc`, `find-ns`, `find-var`, `first`, `float`, `flush`, `for`, `fnseq`, `frest`, `gensym`, `get-proxy-class`, `get`, `hash-map`, `hash-set`, `identical?`, `identity`, `if-let`, `import`, `in-ns`, `inc`, `index`, `insert-child`, `insert-left`, `insert-right`, `inspect-table`, `inspect-tree`, `instance?`, `int`, `interleave`, `intersection`, `into`, `into-array`, `iterate`, `join`, `key`, `keys`, `keyword`, `keyword?`, `last`, `lazy-cat`, `lazy-cons`, `left`, `lefts`, `line-seq`, `list*`, `list`, `load`, `load-file`, `locking`, `long`, `loop`, `macroexpand`, `macroexpand-1`, `make-array`, `make-node`, `map`, `map-invert`, `map?`, `mapcat`, `max`, `max-key`, `memfn`, `merge`, `merge-with`, `meta`, `min`, `min-key`, `name`, `namespace`, `neg?`, `new`, `newline`, `next`, `nil?`, `node`, `not`, `not-any?`, `not-every?`, `not=`, `ns-imports`, `ns-interns`, `ns-map`, `ns-name`, `ns-publics`, `ns-refers`, `ns-resolve`, `ns-unmap`, `nth`, `nthrest`, `or`, `parse`, `partial`, `path`, `peek`, `pop`, `pos?`, `pr`, `pr-str`, `print`, `print-str`, `println`, `println-str`, `prn`, `prn-str`, `project`, `proxy`, `proxy-mappings`, `quot`, `rand`, `rand-int`, `range`, `re-find`, `re-groups`, `re-matcher`, `re-matches`, `re-pattern`, `re-seq`, `read`, `read-line`, `reduce`, `ref`, `ref-set`, `refer`, `rem`, `remove`, `remove-method`, `remove-ns`, `rename`, `rename-keys`, `repeat`, `replace`, `replicate`, `resolve`, `rest`, `resultset-seq`, `reverse`, `rfirst`, `right`, `rights`, `root`, `rrest`, `rseq`, `second`, `select`, `select-keys`, `send`, `send-off`, `seq`, `seq-zip`, `seq?`, `set`, `short`, `slurp`, `some`, `sort`, `sort-by`, `sorted-map`, `sorted-map-by`, `sorted-set`, `special-symbol?`, `split-at`, `split-with`, `str`, `string?`, `struct`, `struct-map`, `subs`, `subvec`, `symbol`, `symbol?`, `sync`, `take`, `take-nth`, `take-while`, `test`, `time`, `to-array`, `to-array-2d`, `tree-seq`, `true?`, `union`, `up`, `update-proxy`, `val`, `vals`, `var-get`, `var-set`, `var?`, `vector`, `vector-zip`, `vector?`, `when`, `when-first`, `when-let`, `when-not`, `with-local-vars`, `with-meta`, `with-open`, `with-out-str`, `xml-seq`, `xml-zip`, `zero?`, `zipmap`, `zipper`), NameBuiltin, nil}, | |||
{`(?<=\()(?!#)[\w!$%*+<=>?/.#-]+`, NameFunction, nil}, | |||
{`(?!#)[\w!$%*+<=>?/.#-]+`, NameVariable, nil}, | |||
{`(\[|\])`, Punctuation, nil}, | |||
{`(\{|\})`, Punctuation, nil}, | |||
{`(\(|\))`, Punctuation, nil}, | |||
}, | |||
} | |||
} |
@@ -0,0 +1,48 @@ | |||
package c | |||
import ( | |||
. "github.com/alecthomas/chroma" // nolint | |||
"github.com/alecthomas/chroma/lexers/internal" | |||
) | |||
// Cmake lexer. | |||
var Cmake = internal.Register(MustNewLazyLexer( | |||
&Config{ | |||
Name: "CMake", | |||
Aliases: []string{"cmake"}, | |||
Filenames: []string{"*.cmake", "CMakeLists.txt"}, | |||
MimeTypes: []string{"text/x-cmake"}, | |||
}, | |||
cmakeRules, | |||
)) | |||
func cmakeRules() Rules { | |||
return Rules{ | |||
"root": { | |||
{`\b(\w+)([ \t]*)(\()`, ByGroups(NameBuiltin, Text, Punctuation), Push("args")}, | |||
Include("keywords"), | |||
Include("ws"), | |||
}, | |||
"args": { | |||
{`\(`, Punctuation, Push()}, | |||
{`\)`, Punctuation, Pop(1)}, | |||
{`(\$\{)(.+?)(\})`, ByGroups(Operator, NameVariable, Operator), nil}, | |||
{`(\$ENV\{)(.+?)(\})`, ByGroups(Operator, NameVariable, Operator), nil}, | |||
{`(\$<)(.+?)(>)`, ByGroups(Operator, NameVariable, Operator), nil}, | |||
{`(?s)".*?"`, LiteralStringDouble, nil}, | |||
{`\\\S+`, LiteralString, nil}, | |||
{`[^)$"# \t\n]+`, LiteralString, nil}, | |||
{`\n`, Text, nil}, | |||
Include("keywords"), | |||
Include("ws"), | |||
}, | |||
"string": {}, | |||
"keywords": { | |||
{`\b(WIN32|UNIX|APPLE|CYGWIN|BORLAND|MINGW|MSVC|MSVC_IDE|MSVC60|MSVC70|MSVC71|MSVC80|MSVC90)\b`, Keyword, nil}, | |||
}, | |||
"ws": { | |||
{`[ \t]+`, Text, nil}, | |||
{`#.*\n`, Comment, nil}, | |||
}, | |||
} | |||
} |
@@ -0,0 +1,55 @@ | |||
package c | |||
import ( | |||
. "github.com/alecthomas/chroma" // nolint | |||
"github.com/alecthomas/chroma/lexers/internal" | |||
) | |||
// Cobol lexer. | |||
var Cobol = internal.Register(MustNewLazyLexer( | |||
&Config{ | |||
Name: "COBOL", | |||
Aliases: []string{"cobol"}, | |||
Filenames: []string{"*.cob", "*.COB", "*.cpy", "*.CPY"}, | |||
MimeTypes: []string{"text/x-cobol"}, | |||
CaseInsensitive: true, | |||
}, | |||
cobolRules, | |||
)) | |||
func cobolRules() Rules { | |||
return Rules{ | |||
"root": { | |||
Include("comment"), | |||
Include("strings"), | |||
Include("core"), | |||
Include("nums"), | |||
{`[a-z0-9]([\w\-]*[a-z0-9]+)?`, NameVariable, nil}, | |||
{`[ \t]+`, Text, nil}, | |||
}, | |||
"comment": { | |||
{`(^.{6}[*/].*\n|^.{6}|\*>.*\n)`, Comment, nil}, | |||
}, | |||
"core": { | |||
{`(^|(?<=[^\w\-]))(ALL\s+)?((ZEROES)|(HIGH-VALUE|LOW-VALUE|QUOTE|SPACE|ZERO)(S)?)\s*($|(?=[^\w\-]))`, NameConstant, nil}, | |||
{Words(`(^|(?<=[^\w\-]))`, `\s*($|(?=[^\w\-]))`, `ACCEPT`, `ADD`, `ALLOCATE`, `CALL`, `CANCEL`, `CLOSE`, `COMPUTE`, `CONFIGURATION`, `CONTINUE`, `DATA`, `DELETE`, `DISPLAY`, `DIVIDE`, `DIVISION`, `ELSE`, `END`, `END-ACCEPT`, `END-ADD`, `END-CALL`, `END-COMPUTE`, `END-DELETE`, `END-DISPLAY`, `END-DIVIDE`, `END-EVALUATE`, `END-IF`, `END-MULTIPLY`, `END-OF-PAGE`, `END-PERFORM`, `END-READ`, `END-RETURN`, `END-REWRITE`, `END-SEARCH`, `END-START`, `END-STRING`, `END-SUBTRACT`, `END-UNSTRING`, `END-WRITE`, `ENVIRONMENT`, `EVALUATE`, `EXIT`, `FD`, `FILE`, `FILE-CONTROL`, `FOREVER`, `FREE`, `GENERATE`, `GO`, `GOBACK`, `IDENTIFICATION`, `IF`, `INITIALIZE`, `INITIATE`, `INPUT-OUTPUT`, `INSPECT`, `INVOKE`, `I-O-CONTROL`, `LINKAGE`, `LOCAL-STORAGE`, `MERGE`, `MOVE`, `MULTIPLY`, `OPEN`, `PERFORM`, `PROCEDURE`, `PROGRAM-ID`, `RAISE`, `READ`, `RELEASE`, `RESUME`, `RETURN`, `REWRITE`, `SCREEN`, `SD`, `SEARCH`, `SECTION`, `SET`, `SORT`, `START`, `STOP`, `STRING`, `SUBTRACT`, `SUPPRESS`, `TERMINATE`, `THEN`, `UNLOCK`, `UNSTRING`, `USE`, `VALIDATE`, `WORKING-STORAGE`, `WRITE`), KeywordReserved, nil}, | |||
{Words(`(^|(?<=[^\w\-]))`, `\s*($|(?=[^\w\-]))`, `ACCESS`, `ADDRESS`, `ADVANCING`, `AFTER`, `ALL`, `ALPHABET`, `ALPHABETIC`, `ALPHABETIC-LOWER`, `ALPHABETIC-UPPER`, `ALPHANUMERIC`, `ALPHANUMERIC-EDITED`, `ALSO`, `ALTER`, `ALTERNATEANY`, `ARE`, `AREA`, `AREAS`, `ARGUMENT-NUMBER`, `ARGUMENT-VALUE`, `AS`, `ASCENDING`, `ASSIGN`, `AT`, `AUTO`, `AUTO-SKIP`, `AUTOMATIC`, `AUTOTERMINATE`, `BACKGROUND-COLOR`, `BASED`, `BEEP`, `BEFORE`, `BELL`, `BLANK`, `BLINK`, `BLOCK`, `BOTTOM`, `BY`, `BYTE-LENGTH`, `CHAINING`, `CHARACTER`, `CHARACTERS`, `CLASS`, `CODE`, `CODE-SET`, `COL`, `COLLATING`, `COLS`, `COLUMN`, `COLUMNS`, `COMMA`, `COMMAND-LINE`, `COMMIT`, `COMMON`, `CONSTANT`, `CONTAINS`, `CONTENT`, `CONTROL`, `CONTROLS`, `CONVERTING`, `COPY`, `CORR`, `CORRESPONDING`, `COUNT`, `CRT`, `CURRENCY`, `CURSOR`, `CYCLE`, `DATE`, `DAY`, `DAY-OF-WEEK`, `DE`, `DEBUGGING`, `DECIMAL-POINT`, `DECLARATIVES`, `DEFAULT`, `DELIMITED`, `DELIMITER`, `DEPENDING`, `DESCENDING`, `DETAIL`, `DISK`, `DOWN`, `DUPLICATES`, `DYNAMIC`, `EBCDIC`, `ENTRY`, `ENVIRONMENT-NAME`, `ENVIRONMENT-VALUE`, `EOL`, `EOP`, `EOS`, `ERASE`, `ERROR`, `ESCAPE`, `EXCEPTION`, `EXCLUSIVE`, `EXTEND`, `EXTERNAL`, `FILE-ID`, `FILLER`, `FINAL`, `FIRST`, `FIXED`, `FLOAT-LONG`, `FLOAT-SHORT`, `FOOTING`, `FOR`, `FOREGROUND-COLOR`, `FORMAT`, `FROM`, `FULL`, `FUNCTION`, `FUNCTION-ID`, `GIVING`, `GLOBAL`, `GROUP`, `HEADING`, `HIGHLIGHT`, `I-O`, `ID`, `IGNORE`, `IGNORING`, `IN`, `INDEX`, `INDEXED`, `INDICATE`, `INITIAL`, `INITIALIZED`, `INPUT`, `INTO`, `INTRINSIC`, `INVALID`, `IS`, `JUST`, `JUSTIFIED`, `KEY`, `LABEL`, `LAST`, `LEADING`, `LEFT`, `LENGTH`, `LIMIT`, `LIMITS`, `LINAGE`, `LINAGE-COUNTER`, `LINE`, `LINES`, `LOCALE`, `LOCK`, `LOWLIGHT`, `MANUAL`, `MEMORY`, `MINUS`, `MODE`, `MULTIPLE`, `NATIONAL`, `NATIONAL-EDITED`, `NATIVE`, `NEGATIVE`, `NEXT`, `NO`, `NULL`, `NULLS`, `NUMBER`, `NUMBERS`, `NUMERIC`, `NUMERIC-EDITED`, `OBJECT-COMPUTER`, `OCCURS`, `OF`, `OFF`, `OMITTED`, `ON`, `ONLY`, `OPTIONAL`, `ORDER`, `ORGANIZATION`, `OTHER`, `OUTPUT`, `OVERFLOW`, `OVERLINE`, `PACKED-DECIMAL`, `PADDING`, `PAGE`, `PARAGRAPH`, `PLUS`, `POINTER`, `POSITION`, `POSITIVE`, `PRESENT`, `PREVIOUS`, `PRINTER`, `PRINTING`, `PROCEDURE-POINTER`, `PROCEDURES`, `PROCEED`, `PROGRAM`, `PROGRAM-POINTER`, `PROMPT`, `QUOTE`, `QUOTES`, `RANDOM`, `RD`, `RECORD`, `RECORDING`, `RECORDS`, `RECURSIVE`, `REDEFINES`, `REEL`, `REFERENCE`, `RELATIVE`, `REMAINDER`, `REMOVAL`, `RENAMES`, `REPLACING`, `REPORT`, `REPORTING`, `REPORTS`, `REPOSITORY`, `REQUIRED`, `RESERVE`, `RETURNING`, `REVERSE-VIDEO`, `REWIND`, `RIGHT`, `ROLLBACK`, `ROUNDED`, `RUN`, `SAME`, `SCROLL`, `SECURE`, `SEGMENT-LIMIT`, `SELECT`, `SENTENCE`, `SEPARATE`, `SEQUENCE`, `SEQUENTIAL`, `SHARING`, `SIGN`, `SIGNED`, `SIGNED-INT`, `SIGNED-LONG`, `SIGNED-SHORT`, `SIZE`, `SORT-MERGE`, `SOURCE`, `SOURCE-COMPUTER`, `SPECIAL-NAMES`, `STANDARD`, `STANDARD-1`, `STANDARD-2`, `STATUS`, `SUM`, `SYMBOLIC`, `SYNC`, `SYNCHRONIZED`, `TALLYING`, `TAPE`, `TEST`, `THROUGH`, `THRU`, `TIME`, `TIMES`, `TO`, `TOP`, `TRAILING`, `TRANSFORM`, `TYPE`, `UNDERLINE`, `UNIT`, `UNSIGNED`, `UNSIGNED-INT`, `UNSIGNED-LONG`, `UNSIGNED-SHORT`, `UNTIL`, `UP`, `UPDATE`, `UPON`, `USAGE`, `USING`, `VALUE`, `VALUES`, `VARYING`, `WAIT`, `WHEN`, `WITH`, `WORDS`, `YYYYDDD`, `YYYYMMDD`), KeywordPseudo, nil}, | |||
{Words(`(^|(?<=[^\w\-]))`, `\s*($|(?=[^\w\-]))`, `ACTIVE-CLASS`, `ALIGNED`, `ANYCASE`, `ARITHMETIC`, `ATTRIBUTE`, `B-AND`, `B-NOT`, `B-OR`, `B-XOR`, `BIT`, `BOOLEAN`, `CD`, `CENTER`, `CF`, `CH`, `CHAIN`, `CLASS-ID`, `CLASSIFICATION`, `COMMUNICATION`, `CONDITION`, `DATA-POINTER`, `DESTINATION`, `DISABLE`, `EC`, `EGI`, `EMI`, `ENABLE`, `END-RECEIVE`, `ENTRY-CONVENTION`, `EO`, `ESI`, `EXCEPTION-OBJECT`, `EXPANDS`, `FACTORY`, `FLOAT-BINARY-16`, `FLOAT-BINARY-34`, `FLOAT-BINARY-7`, `FLOAT-DECIMAL-16`, `FLOAT-DECIMAL-34`, `FLOAT-EXTENDED`, `FORMAT`, `FUNCTION-POINTER`, `GET`, `GROUP-USAGE`, `IMPLEMENTS`, `INFINITY`, `INHERITS`, `INTERFACE`, `INTERFACE-ID`, `INVOKE`, `LC_ALL`, `LC_COLLATE`, `LC_CTYPE`, `LC_MESSAGES`, `LC_MONETARY`, `LC_NUMERIC`, `LC_TIME`, `LINE-COUNTER`, `MESSAGE`, `METHOD`, `METHOD-ID`, `NESTED`, `NONE`, `NORMAL`, `OBJECT`, `OBJECT-REFERENCE`, `OPTIONS`, `OVERRIDE`, `PAGE-COUNTER`, `PF`, `PH`, `PROPERTY`, `PROTOTYPE`, `PURGE`, `QUEUE`, `RAISE`, `RAISING`, `RECEIVE`, `RELATION`, `REPLACE`, `REPRESENTS-NOT-A-NUMBER`, `RESET`, `RESUME`, `RETRY`, `RF`, `RH`, `SECONDS`, `SEGMENT`, `SELF`, `SEND`, `SOURCES`, `STATEMENT`, `STEP`, `STRONG`, `SUB-QUEUE-1`, `SUB-QUEUE-2`, `SUB-QUEUE-3`, `SUPER`, `SYMBOL`, `SYSTEM-DEFAULT`, `TABLE`, `TERMINAL`, `TEXT`, `TYPEDEF`, `UCS-4`, `UNIVERSAL`, `USER-DEFAULT`, `UTF-16`, `UTF-8`, `VAL-STATUS`, `VALID`, `VALIDATE`, `VALIDATE-STATUS`), Error, nil}, | |||
{`(^|(?<=[^\w\-]))(PIC\s+.+?(?=(\s|\.\s))|PICTURE\s+.+?(?=(\s|\.\s))|(COMPUTATIONAL)(-[1-5X])?|(COMP)(-[1-5X])?|BINARY-C-LONG|BINARY-CHAR|BINARY-DOUBLE|BINARY-LONG|BINARY-SHORT|BINARY)\s*($|(?=[^\w\-]))`, KeywordType, nil}, | |||
{`(\*\*|\*|\+|-|/|<=|>=|<|>|==|/=|=)`, Operator, nil}, | |||
{`([(),;:&%.])`, Punctuation, nil}, | |||
{`(^|(?<=[^\w\-]))(ABS|ACOS|ANNUITY|ASIN|ATAN|BYTE-LENGTH|CHAR|COMBINED-DATETIME|CONCATENATE|COS|CURRENT-DATE|DATE-OF-INTEGER|DATE-TO-YYYYMMDD|DAY-OF-INTEGER|DAY-TO-YYYYDDD|EXCEPTION-(?:FILE|LOCATION|STATEMENT|STATUS)|EXP10|EXP|E|FACTORIAL|FRACTION-PART|INTEGER-OF-(?:DATE|DAY|PART)|INTEGER|LENGTH|LOCALE-(?:DATE|TIME(?:-FROM-SECONDS)?)|LOG(?:10)?|LOWER-CASE|MAX|MEAN|MEDIAN|MIDRANGE|MIN|MOD|NUMVAL(?:-C)?|ORD(?:-MAX|-MIN)?|PI|PRESENT-VALUE|RANDOM|RANGE|REM|REVERSE|SECONDS-FROM-FORMATTED-TIME|SECONDS-PAST-MIDNIGHT|SIGN|SIN|SQRT|STANDARD-DEVIATION|STORED-CHAR-LENGTH|SUBSTITUTE(?:-CASE)?|SUM|TAN|TEST-DATE-YYYYMMDD|TEST-DAY-YYYYDDD|TRIM|UPPER-CASE|VARIANCE|WHEN-COMPILED|YEAR-TO-YYYY)\s*($|(?=[^\w\-]))`, NameFunction, nil}, | |||
{`(^|(?<=[^\w\-]))(true|false)\s*($|(?=[^\w\-]))`, NameBuiltin, nil}, | |||
{`(^|(?<=[^\w\-]))(equal|equals|ne|lt|le|gt|ge|greater|less|than|not|and|or)\s*($|(?=[^\w\-]))`, OperatorWord, nil}, | |||
}, | |||
"strings": { | |||
{`"[^"\n]*("|\n)`, LiteralStringDouble, nil}, | |||
{`'[^'\n]*('|\n)`, LiteralStringSingle, nil}, | |||
}, | |||
"nums": { | |||
{`\d+(\s*|\.$|$)`, LiteralNumberInteger, nil}, | |||
{`[+-]?\d*\.\d+(E[-+]?\d+)?`, LiteralNumberFloat, nil}, | |||
{`[+-]?\d+\.\d*(E[-+]?\d+)?`, LiteralNumberFloat, nil}, | |||
}, | |||
} | |||
} |
@@ -0,0 +1,95 @@ | |||
package c | |||
import ( | |||
. "github.com/alecthomas/chroma" // nolint | |||
"github.com/alecthomas/chroma/lexers/internal" | |||
) | |||
// Coffeescript lexer. | |||
var Coffeescript = internal.Register(MustNewLazyLexer( | |||
&Config{ | |||
Name: "CoffeeScript", | |||
Aliases: []string{"coffee-script", "coffeescript", "coffee"}, | |||
Filenames: []string{"*.coffee"}, | |||
MimeTypes: []string{"text/coffeescript"}, | |||
NotMultiline: true, | |||
DotAll: true, | |||
}, | |||
coffeescriptRules, | |||
)) | |||
func coffeescriptRules() Rules { | |||
return Rules{ | |||
"commentsandwhitespace": { | |||
{`\s+`, Text, nil}, | |||
{`###[^#].*?###`, CommentMultiline, nil}, | |||
{`#(?!##[^#]).*?\n`, CommentSingle, nil}, | |||
}, | |||
"multilineregex": { | |||
{`[^/#]+`, LiteralStringRegex, nil}, | |||
{`///([gim]+\b|\B)`, LiteralStringRegex, Pop(1)}, | |||
{`#\{`, LiteralStringInterpol, Push("interpoling_string")}, | |||
{`[/#]`, LiteralStringRegex, nil}, | |||
}, | |||
"slashstartsregex": { | |||
Include("commentsandwhitespace"), | |||
{`///`, LiteralStringRegex, Push("#pop", "multilineregex")}, | |||
{`/(?! )(\\.|[^[/\\\n]|\[(\\.|[^\]\\\n])*])+/([gim]+\b|\B)`, LiteralStringRegex, Pop(1)}, | |||
{`/`, Operator, nil}, | |||
Default(Pop(1)), | |||
}, | |||
"root": { | |||
Include("commentsandwhitespace"), | |||
{`^(?=\s|/)`, Text, Push("slashstartsregex")}, | |||
{"\\+\\+|~|&&|\\band\\b|\\bor\\b|\\bis\\b|\\bisnt\\b|\\bnot\\b|\\?|:|\\|\\||\\\\(?=\\n)|(<<|>>>?|==?(?!>)|!=?|=(?!>)|-(?!>)|[<>+*`%&\\|\\^/])=?", Operator, Push("slashstartsregex")}, | |||
{`(?:\([^()]*\))?\s*[=-]>`, NameFunction, Push("slashstartsregex")}, | |||
{`[{(\[;,]`, Punctuation, Push("slashstartsregex")}, | |||
{`[})\].]`, Punctuation, nil}, | |||
{`(?<![.$])(for|own|in|of|while|until|loop|break|return|continue|switch|when|then|if|unless|else|throw|try|catch|finally|new|delete|typeof|instanceof|super|extends|this|class|by)\b`, Keyword, Push("slashstartsregex")}, | |||
{`(?<![.$])(true|false|yes|no|on|off|null|NaN|Infinity|undefined)\b`, KeywordConstant, nil}, | |||
{`(Array|Boolean|Date|Error|Function|Math|netscape|Number|Object|Packages|RegExp|String|sun|decodeURI|decodeURIComponent|encodeURI|encodeURIComponent|eval|isFinite|isNaN|parseFloat|parseInt|document|window)\b`, NameBuiltin, nil}, | |||
{`[$a-zA-Z_][\w.:$]*\s*[:=]\s`, NameVariable, Push("slashstartsregex")}, | |||
{`@[$a-zA-Z_][\w.:$]*\s*[:=]\s`, NameVariableInstance, Push("slashstartsregex")}, | |||
{`@`, NameOther, Push("slashstartsregex")}, | |||
{`@?[$a-zA-Z_][\w$]*`, NameOther, nil}, | |||
{`[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?`, LiteralNumberFloat, nil}, | |||
{`0x[0-9a-fA-F]+`, LiteralNumberHex, nil}, | |||
{`[0-9]+`, LiteralNumberInteger, nil}, | |||
{`"""`, LiteralString, Push("tdqs")}, | |||
{`'''`, LiteralString, Push("tsqs")}, | |||
{`"`, LiteralString, Push("dqs")}, | |||
{`'`, LiteralString, Push("sqs")}, | |||
}, | |||
"strings": { | |||
{`[^#\\\'"]+`, LiteralString, nil}, | |||
}, | |||
"interpoling_string": { | |||
{`\}`, LiteralStringInterpol, Pop(1)}, | |||
Include("root"), | |||
}, | |||
"dqs": { | |||
{`"`, LiteralString, Pop(1)}, | |||
{`\\.|\'`, LiteralString, nil}, | |||
{`#\{`, LiteralStringInterpol, Push("interpoling_string")}, | |||
{`#`, LiteralString, nil}, | |||
Include("strings"), | |||
}, | |||
"sqs": { | |||
{`'`, LiteralString, Pop(1)}, | |||
{`#|\\.|"`, LiteralString, nil}, | |||
Include("strings"), | |||
}, | |||
"tdqs": { | |||
{`"""`, LiteralString, Pop(1)}, | |||
{`\\.|\'|"`, LiteralString, nil}, | |||
{`#\{`, LiteralStringInterpol, Push("interpoling_string")}, | |||
{`#`, LiteralString, nil}, | |||
Include("strings"), | |||
}, | |||
"tsqs": { | |||
{`'''`, LiteralString, Pop(1)}, | |||
{`#|\\.|\'|"`, LiteralString, nil}, | |||
Include("strings"), | |||
}, | |||
} | |||
} |
@@ -0,0 +1,52 @@ | |||
package c | |||
import ( | |||
. "github.com/alecthomas/chroma" // nolint | |||
"github.com/alecthomas/chroma/lexers/internal" | |||
) | |||
// Cfstatement lexer. | |||
var Cfstatement = internal.Register(MustNewLazyLexer( | |||
&Config{ | |||
Name: "cfstatement", | |||
Aliases: []string{"cfs"}, | |||
Filenames: []string{}, | |||
MimeTypes: []string{}, | |||
NotMultiline: true, | |||
CaseInsensitive: true, | |||
}, | |||
cfstatementRules, | |||
)) | |||
func cfstatementRules() Rules { | |||
return Rules{ | |||
"root": { | |||
{`//.*?\n`, CommentSingle, nil}, | |||
{`/\*(?:.|\n)*?\*/`, CommentMultiline, nil}, | |||
{`\+\+|--`, Operator, nil}, | |||
{`[-+*/^&=!]`, Operator, nil}, | |||
{`<=|>=|<|>|==`, Operator, nil}, | |||
{`mod\b`, Operator, nil}, | |||
{`(eq|lt|gt|lte|gte|not|is|and|or)\b`, Operator, nil}, | |||
{`\|\||&&`, Operator, nil}, | |||
{`\?`, Operator, nil}, | |||
{`"`, LiteralStringDouble, Push("string")}, | |||
{`'.*?'`, LiteralStringSingle, nil}, | |||
{`\d+`, LiteralNumber, nil}, | |||
{`(if|else|len|var|xml|default|break|switch|component|property|function|do|try|catch|in|continue|for|return|while|required|any|array|binary|boolean|component|date|guid|numeric|query|string|struct|uuid|case)\b`, Keyword, nil}, | |||
{`(true|false|null)\b`, KeywordConstant, nil}, | |||
{`(application|session|client|cookie|super|this|variables|arguments)\b`, NameConstant, nil}, | |||
{`([a-z_$][\w.]*)(\s*)(\()`, ByGroups(NameFunction, Text, Punctuation), nil}, | |||
{`[a-z_$][\w.]*`, NameVariable, nil}, | |||
{`[()\[\]{};:,.\\]`, Punctuation, nil}, | |||
{`\s+`, Text, nil}, | |||
}, | |||
"string": { | |||
{`""`, LiteralStringDouble, nil}, | |||
{`#.+?#`, LiteralStringInterpol, nil}, | |||
{`[^"#]+`, LiteralStringDouble, nil}, | |||
{`#`, LiteralStringDouble, nil}, | |||
{`"`, LiteralStringDouble, Pop(1)}, | |||
}, | |||
} | |||
} |
@@ -0,0 +1,67 @@ | |||
package c | |||
import ( | |||
. "github.com/alecthomas/chroma" // nolint | |||
"github.com/alecthomas/chroma/lexers/internal" | |||
) | |||
// Coq lexer. | |||
var Coq = internal.Register(MustNewLazyLexer( | |||
&Config{ | |||
Name: "Coq", | |||
Aliases: []string{"coq"}, | |||
Filenames: []string{"*.v"}, | |||
MimeTypes: []string{"text/x-coq"}, | |||
}, | |||
coqRules, | |||
)) | |||
func coqRules() Rules { | |||
return Rules{ | |||
"root": { | |||
{`\s+`, Text, nil}, | |||
{`false|true|\(\)|\[\]`, NameBuiltinPseudo, nil}, | |||
{`\(\*`, Comment, Push("comment")}, | |||
{Words(`\b`, `\b`, `Section`, `Module`, `End`, `Require`, `Import`, `Export`, `Variable`, `Variables`, `Parameter`, `Parameters`, `Axiom`, `Hypothesis`, `Hypotheses`, `Notation`, `Local`, `Tactic`, `Reserved`, `Scope`, `Open`, `Close`, `Bind`, `Delimit`, `Definition`, `Let`, `Ltac`, `Fixpoint`, `CoFixpoint`, `Morphism`, `Relation`, `Implicit`, `Arguments`, `Set`, `Unset`, `Contextual`, `Strict`, `Prenex`, `Implicits`, `Inductive`, `CoInductive`, `Record`, `Structure`, `Canonical`, `Coercion`, `Theorem`, `Lemma`, `Corollary`, `Proposition`, `Fact`, `Remark`, `Example`, `Proof`, `Goal`, `Save`, `Qed`, `Defined`, `Hint`, `Resolve`, `Rewrite`, `View`, `Search`, `Show`, `Print`, `Printing`, `All`, `Graph`, `Projections`, `inside`, `outside`, `Check`, `Global`, `Instance`, `Class`, `Existing`, `Universe`, `Polymorphic`, `Monomorphic`, `Context`), KeywordNamespace, nil}, | |||
{Words(`\b`, `\b`, `forall`, `exists`, `exists2`, `fun`, `fix`, `cofix`, `struct`, `match`, `end`, `in`, `return`, `let`, `if`, `is`, `then`, `else`, `for`, `of`, `nosimpl`, `with`, `as`), Keyword, nil}, | |||
{Words(`\b`, `\b`, `Type`, `Prop`), KeywordType, nil}, | |||
{Words(`\b`, `\b`, `pose`, `set`, `move`, `case`, `elim`, `apply`, `clear`, `hnf`, `intro`, `intros`, `generalize`, `rename`, `pattern`, `after`, `destruct`, `induction`, `using`, `refine`, `inversion`, `injection`, `rewrite`, `congr`, `unlock`, `compute`, `ring`, `field`, `replace`, `fold`, `unfold`, `change`, `cutrewrite`, `simpl`, `have`, `suff`, `wlog`, `suffices`, `without`, `loss`, `nat_norm`, `assert`, `cut`, `trivial`, `revert`, `bool_congr`, `nat_congr`, `symmetry`, `transitivity`, `auto`, `split`, `left`, `right`, `autorewrite`, `tauto`, `setoid_rewrite`, `intuition`, `eauto`, `eapply`, `econstructor`, `etransitivity`, `constructor`, `erewrite`, `red`, `cbv`, `lazy`, `vm_compute`, `native_compute`, `subst`), Keyword, nil}, | |||
{Words(`\b`, `\b`, `by`, `done`, `exact`, `reflexivity`, `tauto`, `romega`, `omega`, `assumption`, `solve`, `contradiction`, `discriminate`, `congruence`), KeywordPseudo, nil}, | |||
{Words(`\b`, `\b`, `do`, `last`, `first`, `try`, `idtac`, `repeat`), KeywordReserved, nil}, | |||
{`\b([A-Z][\w\']*)`, Name, nil}, | |||
{"(\u03bb|\u03a0|\\|\\}|\\{\\||\\\\/|/\\\\|=>|~|\\}|\\|]|\\||\\{<|\\{|`|_|]|\\[\\||\\[>|\\[<|\\[|\\?\\?|\\?|>\\}|>]|>|=|<->|<-|<|;;|;|:>|:=|::|:|\\.\\.|\\.|->|-\\.|-|,|\\+|\\*|\\)|\\(|&&|&|#|!=)", Operator, nil}, | |||
{`([=<>@^|&+\*/$%-]|[!?~])?[!$%&*+\./:<=>?@^|~-]`, Operator, nil}, | |||
{`\b(unit|nat|bool|string|ascii|list)\b`, KeywordType, nil}, | |||
{`[^\W\d][\w']*`, Name, nil}, | |||
{`\d[\d_]*`, LiteralNumberInteger, nil}, | |||
{`0[xX][\da-fA-F][\da-fA-F_]*`, LiteralNumberHex, nil}, | |||
{`0[oO][0-7][0-7_]*`, LiteralNumberOct, nil}, | |||
{`0[bB][01][01_]*`, LiteralNumberBin, nil}, | |||
{`-?\d[\d_]*(.[\d_]*)?([eE][+\-]?\d[\d_]*)`, LiteralNumberFloat, nil}, | |||
{`'(?:(\\[\\\"'ntbr ])|(\\[0-9]{3})|(\\x[0-9a-fA-F]{2}))'`, LiteralStringChar, nil}, | |||
{`'.'`, LiteralStringChar, nil}, | |||
{`'`, Keyword, nil}, | |||
{`"`, LiteralStringDouble, Push("string")}, | |||
{`[~?][a-z][\w\']*:`, Name, nil}, | |||
}, | |||
"comment": { | |||
{`[^(*)]+`, Comment, nil}, | |||
{`\(\*`, Comment, Push()}, | |||
{`\*\)`, Comment, Pop(1)}, | |||
{`[(*)]`, Comment, nil}, | |||
}, | |||
"string": { | |||
{`[^"]+`, LiteralStringDouble, nil}, | |||
{`""`, LiteralStringDouble, nil}, | |||
{`"`, LiteralStringDouble, Pop(1)}, | |||
}, | |||
"dotted": { | |||
{`\s+`, Text, nil}, | |||
{`\.`, Punctuation, nil}, | |||
{`[A-Z][\w\']*(?=\s*\.)`, NameNamespace, nil}, | |||
{`[A-Z][\w\']*`, NameClass, Pop(1)}, | |||
{`[a-z][a-z0-9_\']*`, Name, Pop(1)}, | |||
Default(Pop(1)), | |||
}, | |||
} | |||
} |
@@ -0,0 +1,110 @@ | |||
package c | |||
import ( | |||
. "github.com/alecthomas/chroma" // nolint | |||
"github.com/alecthomas/chroma/lexers/internal" | |||
) | |||
// CPP lexer. | |||
var CPP = internal.Register(MustNewLazyLexer( | |||
&Config{ | |||
Name: "C++", | |||
Aliases: []string{"cpp", "c++"}, | |||
Filenames: []string{"*.cpp", "*.hpp", "*.c++", "*.h++", "*.cc", "*.hh", "*.cxx", "*.hxx", "*.C", "*.H", "*.cp", "*.CPP"}, | |||
MimeTypes: []string{"text/x-c++hdr", "text/x-c++src"}, | |||
EnsureNL: true, | |||
}, | |||
cppRules, | |||
)) | |||
func cppRules() Rules { | |||
return Rules{ | |||
"statements": { | |||
{Words(``, `\b`, `catch`, `const_cast`, `delete`, `dynamic_cast`, `explicit`, `export`, `friend`, `mutable`, `namespace`, `new`, `operator`, `private`, `protected`, `public`, `reinterpret_cast`, `restrict`, `static_cast`, `template`, `this`, `throw`, `throws`, `try`, `typeid`, `typename`, `using`, `virtual`, `constexpr`, `nullptr`, `decltype`, `thread_local`, `alignas`, `alignof`, `static_assert`, `noexcept`, `override`, `final`, `concept`, `requires`, `consteval`, `co_await`, `co_return`, `co_yield`), Keyword, nil}, | |||
{`(enum)\b(\s+)(class)\b(\s*)`, ByGroups(Keyword, Text, Keyword, Text), Push("classname")}, | |||
{`(class|struct|enum|union)\b(\s*)`, ByGroups(Keyword, Text), Push("classname")}, | |||
{`\[\[.+\]\]`, NameAttribute, nil}, | |||
{`(R)(")([^\\()\s]{,16})(\()((?:.|\n)*?)(\)\3)(")`, ByGroups(LiteralStringAffix, LiteralString, LiteralStringDelimiter, LiteralStringDelimiter, LiteralString, LiteralStringDelimiter, LiteralString), nil}, | |||
{`(u8|u|U)(")`, ByGroups(LiteralStringAffix, LiteralString), Push("string")}, | |||
{`(L?)(")`, ByGroups(LiteralStringAffix, LiteralString), Push("string")}, | |||
{`(L?)(')(\\.|\\[0-7]{1,3}|\\x[a-fA-F0-9]{1,2}|[^\\\'\n])(')`, ByGroups(LiteralStringAffix, LiteralStringChar, LiteralStringChar, LiteralStringChar), nil}, | |||
{`(\d+\.\d*|\.\d+|\d+)[eE][+-]?\d+[LlUu]*`, LiteralNumberFloat, nil}, | |||
{`(\d+\.\d*|\.\d+|\d+[fF])[fF]?`, LiteralNumberFloat, nil}, | |||
{`0[xX]([0-9A-Fa-f]('?[0-9A-Fa-f]+)*)[LlUu]*`, LiteralNumberHex, nil}, | |||
{`0('?[0-7]+)+[LlUu]*`, LiteralNumberOct, nil}, | |||
{`0[Bb][01]('?[01]+)*[LlUu]*`, LiteralNumberBin, nil}, | |||
{`[0-9]('?[0-9]+)*[LlUu]*`, LiteralNumberInteger, nil}, | |||
{`\*/`, Error, nil}, | |||
{`[~!%^&*+=|?:<>/-]`, Operator, nil}, | |||
{`[()\[\],.]`, Punctuation, nil}, | |||
{Words(``, `\b`, `asm`, `auto`, `break`, `case`, `const`, `continue`, `default`, `do`, `else`, `enum`, `extern`, `for`, `goto`, `if`, `register`, `restricted`, `return`, `sizeof`, `static`, `struct`, `switch`, `typedef`, `union`, `volatile`, `while`), Keyword, nil}, | |||
{`(bool|int|long|float|short|double|char((8|16|32)_t)?|wchar_t|unsigned|signed|void|u?int(_fast|_least|)(8|16|32|64)_t)\b`, KeywordType, nil}, | |||
{Words(``, `\b`, `inline`, `_inline`, `__inline`, `naked`, `restrict`, `thread`, `typename`), KeywordReserved, nil}, | |||
{`(__m(128i|128d|128|64))\b`, KeywordReserved, nil}, | |||
{Words(`__`, `\b`, `asm`, `int8`, `based`, `except`, `int16`, `stdcall`, `cdecl`, `fastcall`, `int32`, `declspec`, `finally`, `int64`, `try`, `leave`, `w64`, `unaligned`, `raise`, `noop`, `identifier`, `forceinline`, `assume`), KeywordReserved, nil}, | |||
{`(true|false|NULL)\b`, NameBuiltin, nil}, | |||
{`([a-zA-Z_]\w*)(\s*)(:)(?!:)`, ByGroups(NameLabel, Text, Punctuation), nil}, | |||
{`[a-zA-Z_]\w*`, Name, nil}, | |||
}, | |||
"root": { | |||
Include("whitespace"), | |||
{`((?:[\w*\s])+?(?:\s|[*]))([a-zA-Z_]\w*)(\s*\([^;]*?\))([^;{]*)(\{)`, ByGroups(UsingSelf("root"), NameFunction, UsingSelf("root"), UsingSelf("root"), Punctuation), Push("function")}, | |||
{`((?:[\w*\s])+?(?:\s|[*]))([a-zA-Z_]\w*)(\s*\([^;]*?\))([^;]*)(;)`, ByGroups(UsingSelf("root"), NameFunction, UsingSelf("root"), UsingSelf("root"), Punctuation), nil}, | |||
Default(Push("statement")), | |||
{Words(`__`, `\b`, `virtual_inheritance`, `uuidof`, `super`, `single_inheritance`, `multiple_inheritance`, `interface`, `event`), KeywordReserved, nil}, | |||
{`__(offload|blockingoffload|outer)\b`, KeywordPseudo, nil}, | |||
}, | |||
"classname": { | |||
{`(\[\[.+\]\])(\s*)`, ByGroups(NameAttribute, Text), nil}, | |||
{`[a-zA-Z_]\w*`, NameClass, Pop(1)}, | |||
{`\s*(?=[>{])`, Text, Pop(1)}, | |||
}, | |||
"whitespace": { | |||
{`^#if\s+0`, CommentPreproc, Push("if0")}, | |||
{`^#`, CommentPreproc, Push("macro")}, | |||
{`^(\s*(?:/[*].*?[*]/\s*)?)(#if\s+0)`, ByGroups(UsingSelf("root"), CommentPreproc), Push("if0")}, | |||
{`^(\s*(?:/[*].*?[*]/\s*)?)(#)`, ByGroups(UsingSelf("root"), CommentPreproc), Push("macro")}, | |||
{`\n`, Text, nil}, | |||
{`\s+`, Text, nil}, | |||
{`\\\n`, Text, nil}, | |||
{`//(\n|[\w\W]*?[^\\]\n)`, CommentSingle, nil}, | |||
{`/(\\\n)?[*][\w\W]*?[*](\\\n)?/`, CommentMultiline, nil}, | |||
{`/(\\\n)?[*][\w\W]*`, CommentMultiline, nil}, | |||
}, | |||
"statement": { | |||
Include("whitespace"), | |||
Include("statements"), | |||
{`[{]`, Punctuation, Push("root")}, | |||
{`[;}]`, Punctuation, Pop(1)}, | |||
}, | |||
"function": { | |||
Include("whitespace"), | |||
Include("statements"), | |||
{`;`, Punctuation, nil}, | |||
{`\{`, Punctuation, Push()}, | |||
{`\}`, Punctuation, Pop(1)}, | |||
}, | |||
"string": { | |||
{`"`, LiteralString, Pop(1)}, | |||
{`\\([\\abfnrtv"\']|x[a-fA-F0-9]{2,4}|u[a-fA-F0-9]{4}|U[a-fA-F0-9]{8}|[0-7]{1,3})`, LiteralStringEscape, nil}, | |||
{`[^\\"\n]+`, LiteralString, nil}, | |||
{`\\\n`, LiteralString, nil}, | |||
{`\\`, LiteralString, nil}, | |||
}, | |||
"macro": { | |||
{`(include)(\s*(?:/[*].*?[*]/\s*)?)([^\n]+)`, ByGroups(CommentPreproc, Text, CommentPreprocFile), nil}, | |||
{`[^/\n]+`, CommentPreproc, nil}, | |||
{`/[*](.|\n)*?[*]/`, CommentMultiline, nil}, | |||
{`//.*?\n`, CommentSingle, Pop(1)}, | |||
{`/`, CommentPreproc, nil}, | |||
{`(?<=\\)\n`, CommentPreproc, nil}, | |||
{`\n`, CommentPreproc, Pop(1)}, | |||
}, | |||
"if0": { | |||
{`^\s*#if.*?(?<!\\)\n`, CommentPreproc, Push()}, | |||
{`^\s*#el(?:se|if).*\n`, CommentPreproc, Pop(1)}, | |||
{`^\s*#endif.*?(?<!\\)\n`, CommentPreproc, Pop(1)}, | |||
{`.*?\n`, Comment, nil}, | |||
}, | |||
} | |||
} |
@@ -0,0 +1,74 @@ | |||
package c | |||
import ( | |||
. "github.com/alecthomas/chroma" // nolint | |||
"github.com/alecthomas/chroma/lexers/internal" | |||
) | |||
// CassandraCQL lexer. | |||
var CassandraCQL = internal.Register(MustNewLazyLexer( | |||
&Config{ | |||
Name: "Cassandra CQL", | |||
Aliases: []string{"cassandra", "cql"}, | |||
Filenames: []string{"*.cql"}, | |||
MimeTypes: []string{"text/x-cql"}, | |||
NotMultiline: true, | |||
CaseInsensitive: true, | |||
}, | |||
cassandraCQLRules, | |||
)) | |||
func cassandraCQLRules() Rules { | |||
return Rules{ | |||
"root": { | |||
{`\s+`, TextWhitespace, nil}, | |||
{`(--|\/\/).*\n?`, CommentSingle, nil}, | |||
{`/\*`, CommentMultiline, Push("multiline-comments")}, | |||
{`(ascii|bigint|blob|boolean|counter|date|decimal|double|float|frozen|inet|int|list|map|set|smallint|text|time|timestamp|timeuuid|tinyint|tuple|uuid|varchar|varint)\b`, NameBuiltin, nil}, | |||
{Words(``, `\b`, `ADD`, `AGGREGATE`, `ALL`, `ALLOW`, `ALTER`, `AND`, `ANY`, `APPLY`, `AS`, `ASC`, `AUTHORIZE`, `BATCH`, `BEGIN`, `BY`, `CLUSTERING`, `COLUMNFAMILY`, `COMPACT`, `CONSISTENCY`, `COUNT`, `CREATE`, `CUSTOM`, `DELETE`, `DESC`, `DISTINCT`, `DROP`, `EACH_QUORUM`, `ENTRIES`, `EXISTS`, `FILTERING`, `FROM`, `FULL`, `GRANT`, `IF`, `IN`, `INDEX`, `INFINITY`, `INSERT`, `INTO`, `KEY`, `KEYS`, `KEYSPACE`, `KEYSPACES`, `LEVEL`, `LIMIT`, `LOCAL_ONE`, `LOCAL_QUORUM`, `MATERIALIZED`, `MODIFY`, `NAN`, `NORECURSIVE`, `NOSUPERUSER`, `NOT`, `OF`, `ON`, `ONE`, `ORDER`, `PARTITION`, `PASSWORD`, `PER`, `PERMISSION`, `PERMISSIONS`, `PRIMARY`, `QUORUM`, `RENAME`, `REVOKE`, `SCHEMA`, `SELECT`, `STATIC`, `STORAGE`, `SUPERUSER`, `TABLE`, `THREE`, `TO`, `TOKEN`, `TRUNCATE`, `TTL`, `TWO`, `TYPE`, `UNLOGGED`, `UPDATE`, `USE`, `USER`, `USERS`, `USING`, `VALUES`, `VIEW`, `WHERE`, `WITH`, `WRITETIME`, `REPLICATION`, `OR`, `REPLACE`, `FUNCTION`, `CALLED`, `INPUT`, `RETURNS`, `LANGUAGE`, `ROLE`, `ROLES`, `TRIGGER`, `DURABLE_WRITES`, `LOGIN`, `OPTIONS`, `LOGGED`, `SFUNC`, `STYPE`, `FINALFUNC`, `INITCOND`, `IS`, `CONTAINS`, `JSON`, `PAGING`, `OFF`), Keyword, nil}, | |||
{"[+*/<>=~!@#%^&|`?-]+", Operator, nil}, | |||
{ | |||
`(?s)(java|javascript)(\s+)(AS)(\s+)('|\$\$)(.*?)(\5)`, | |||
UsingByGroup( | |||
internal.Get, | |||
1, 6, | |||
NameBuiltin, TextWhitespace, Keyword, TextWhitespace, | |||
LiteralStringHeredoc, LiteralStringHeredoc, LiteralStringHeredoc, | |||
), | |||
nil, | |||
}, | |||
{`(true|false|null)\b`, KeywordConstant, nil}, | |||
{`0x[0-9a-f]+`, LiteralNumberHex, nil}, | |||
{`[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}`, LiteralNumberHex, nil}, | |||
{`\.[0-9]+(e[+-]?[0-9]+)?`, Error, nil}, | |||
{`-?[0-9]+(\.[0-9])?(e[+-]?[0-9]+)?`, LiteralNumberFloat, nil}, | |||
{`[0-9]+`, LiteralNumberInteger, nil}, | |||
{`'`, LiteralStringSingle, Push("string")}, | |||
{`"`, LiteralStringName, Push("quoted-ident")}, | |||
{`\$\$`, LiteralStringHeredoc, Push("dollar-string")}, | |||
{`[a-z_]\w*`, Name, nil}, | |||
{`:(['"]?)[a-z]\w*\b\1`, NameVariable, nil}, | |||
{`[;:()\[\]\{\},.]`, Punctuation, nil}, | |||
}, | |||
"multiline-comments": { | |||
{`/\*`, CommentMultiline, Push("multiline-comments")}, | |||
{`\*/`, CommentMultiline, Pop(1)}, | |||
{`[^/*]+`, CommentMultiline, nil}, | |||
{`[/*]`, CommentMultiline, nil}, | |||
}, | |||
"string": { | |||
{`[^']+`, LiteralStringSingle, nil}, | |||
{`''`, LiteralStringSingle, nil}, | |||
{`'`, LiteralStringSingle, Pop(1)}, | |||
}, | |||
"quoted-ident": { | |||
{`[^"]+`, LiteralStringName, nil}, | |||
{`""`, LiteralStringName, nil}, | |||
{`"`, LiteralStringName, Pop(1)}, | |||
}, | |||
"dollar-string": { | |||
{`[^\$]+`, LiteralStringHeredoc, nil}, | |||
{`\$\$`, LiteralStringHeredoc, Pop(1)}, | |||
}, | |||
} | |||
} |
@@ -0,0 +1,266 @@ | |||
package c | |||
import ( | |||
. "github.com/alecthomas/chroma" // nolint | |||
"github.com/alecthomas/chroma/lexers/internal" | |||
) | |||
// Crystal lexer. | |||
var Crystal = internal.Register(MustNewLazyLexer( | |||
&Config{ | |||
Name: "Crystal", | |||
Aliases: []string{"cr", "crystal"}, | |||
Filenames: []string{"*.cr"}, | |||
MimeTypes: []string{"text/x-crystal"}, | |||
DotAll: true, | |||
}, | |||
crystalRules, | |||
)) | |||
func crystalRules() Rules { | |||
return Rules{ | |||
"root": { | |||
{`#.*?$`, CommentSingle, nil}, | |||
{Words(``, `\b`, `abstract`, `asm`, `as`, `begin`, `break`, `case`, `do`, `else`, `elsif`, `end`, `ensure`, `extend`, `ifdef`, `if`, `include`, `instance_sizeof`, `next`, `of`, `pointerof`, `private`, `protected`, `rescue`, `return`, `require`, `sizeof`, `super`, `then`, `typeof`, `unless`, `until`, `when`, `while`, `with`, `yield`), Keyword, nil}, | |||
{Words(``, `\b`, `true`, `false`, `nil`), KeywordConstant, nil}, | |||
{`(module|lib)(\s+)([a-zA-Z_]\w*(?:::[a-zA-Z_]\w*)*)`, ByGroups(Keyword, Text, NameNamespace), nil}, | |||
{`(def|fun|macro)(\s+)((?:[a-zA-Z_]\w*::)*)`, ByGroups(Keyword, Text, NameNamespace), Push("funcname")}, | |||
{"def(?=[*%&^`~+-/\\[<>=])", Keyword, Push("funcname")}, | |||
{`(class|struct|union|type|alias|enum)(\s+)((?:[a-zA-Z_]\w*::)*)`, ByGroups(Keyword, Text, NameNamespace), Push("classname")}, | |||
{`(self|out|uninitialized)\b|(is_a|responds_to)\?`, KeywordPseudo, nil}, | |||
{Words(``, `\b`, `debugger`, `record`, `pp`, `assert_responds_to`, `spawn`, `parallel`, `getter`, `setter`, `property`, `delegate`, `def_hash`, `def_equals`, `def_equals_and_hash`, `forward_missing_to`), NameBuiltinPseudo, nil}, | |||
{`getter[!?]|property[!?]|__(DIR|FILE|LINE)__\b`, NameBuiltinPseudo, nil}, | |||
{Words(`(?<!\.)`, `\b`, `Object`, `Value`, `Struct`, `Reference`, `Proc`, `Class`, `Nil`, `Symbol`, `Enum`, `Void`, `Bool`, `Number`, `Int`, `Int8`, `Int16`, `Int32`, `Int64`, `UInt8`, `UInt16`, `UInt32`, `UInt64`, `Float`, `Float32`, `Float64`, `Char`, `String`, `Pointer`, `Slice`, `Range`, `Exception`, `Regex`, `Mutex`, `StaticArray`, `Array`, `Hash`, `Set`, `Tuple`, `Deque`, `Box`, `Process`, `File`, `Dir`, `Time`, `Channel`, `Concurrent`, `Scheduler`, `abort`, `at_exit`, `caller`, `delay`, `exit`, `fork`, `future`, `get_stack_top`, `gets`, `lazy`, `loop`, `main`, `p`, `print`, `printf`, `puts`, `raise`, `rand`, `read_line`, `sleep`, `sprintf`, `system`, `with_color`), NameBuiltin, nil}, | |||
{"(?<!\\w)(<<-?)([\"`\\']?)([a-zA-Z_]\\w*)(\\2)(.*?\\n)", StringHeredoc, nil}, | |||
{`(<<-?)("|\')()(\2)(.*?\n)`, StringHeredoc, nil}, | |||
{`__END__`, CommentPreproc, Push("end-part")}, | |||
{`(?:^|(?<=[=<>~!:])|(?<=(?:\s|;)when\s)|(?<=(?:\s|;)or\s)|(?<=(?:\s|;)and\s)|(?<=\.index\s)|(?<=\.scan\s)|(?<=\.sub\s)|(?<=\.sub!\s)|(?<=\.gsub\s)|(?<=\.gsub!\s)|(?<=\.match\s)|(?<=(?:\s|;)if\s)|(?<=(?:\s|;)elsif\s)|(?<=^when\s)|(?<=^index\s)|(?<=^scan\s)|(?<=^sub\s)|(?<=^gsub\s)|(?<=^sub!\s)|(?<=^gsub!\s)|(?<=^match\s)|(?<=^if\s)|(?<=^elsif\s))(\s*)(/)`, ByGroups(Text, LiteralStringRegex), Push("multiline-regex")}, | |||
{`(?<=\(|,|\[)/`, LiteralStringRegex, Push("multiline-regex")}, | |||
{`(\s+)(/)(?![\s=])`, ByGroups(Text, LiteralStringRegex), Push("multiline-regex")}, | |||
{`(0o[0-7]+(?:_[0-7]+)*(?:_?[iu][0-9]+)?)\b(\s*)([/?])?`, ByGroups(LiteralNumberOct, Text, Operator), nil}, | |||
{`(0x[0-9A-Fa-f]+(?:_[0-9A-Fa-f]+)*(?:_?[iu][0-9]+)?)\b(\s*)([/?])?`, ByGroups(LiteralNumberHex, Text, Operator), nil}, | |||
{`(0b[01]+(?:_[01]+)*(?:_?[iu][0-9]+)?)\b(\s*)([/?])?`, ByGroups(LiteralNumberBin, Text, Operator), nil}, | |||
{`((?:0(?![0-9])|[1-9][\d_]*)(?:\.\d[\d_]*)(?:e[+-]?[0-9]+)?(?:_?f[0-9]+)?)(\s*)([/?])?`, ByGroups(LiteralNumberFloat, Text, Operator), nil}, | |||
{`((?:0(?![0-9])|[1-9][\d_]*)(?:\.\d[\d_]*)?(?:e[+-]?[0-9]+)(?:_?f[0-9]+)?)(\s*)([/?])?`, ByGroups(LiteralNumberFloat, Text, Operator), nil}, | |||
{`((?:0(?![0-9])|[1-9][\d_]*)(?:\.\d[\d_]*)?(?:e[+-]?[0-9]+)?(?:_?f[0-9]+))(\s*)([/?])?`, ByGroups(LiteralNumberFloat, Text, Operator), nil}, | |||
{`(0\b|[1-9][\d]*(?:_\d+)*(?:_?[iu][0-9]+)?)\b(\s*)([/?])?`, ByGroups(LiteralNumberInteger, Text, Operator), nil}, | |||
{`@@[a-zA-Z_]\w*`, NameVariableClass, nil}, | |||
{`@[a-zA-Z_]\w*`, NameVariableInstance, nil}, | |||
{`\$\w+`, NameVariableGlobal, nil}, | |||
{"\\$[!@&`\\'+~=/\\\\,;.<>_*$?:\"^-]", NameVariableGlobal, nil}, | |||
{`\$-[0adFiIlpvw]`, NameVariableGlobal, nil}, | |||
{`::`, Operator, nil}, | |||
Include("strings"), | |||
{`\?(\\[MC]-)*(\\([\\befnrtv#"\']|x[a-fA-F0-9]{1,2}|[0-7]{1,3})|\S)(?!\w)`, LiteralStringChar, nil}, | |||
{`[A-Z][A-Z_]+\b`, NameConstant, nil}, | |||
{`\{%`, LiteralStringInterpol, Push("in-macro-control")}, | |||
{`\{\{`, LiteralStringInterpol, Push("in-macro-expr")}, | |||
{`(@\[)(\s*)([A-Z]\w*)`, ByGroups(Operator, Text, NameDecorator), Push("in-attr")}, | |||
{Words(`(\.|::)`, ``, `!=`, `!~`, `!`, `%`, `&&`, `&`, `**`, `*`, `+`, `-`, `/`, `<=>`, `<<`, `<=`, `<`, `===`, `==`, `=~`, `=`, `>=`, `>>`, `>`, `[]=`, `[]?`, `[]`, `^`, `||`, `|`, `~`), ByGroups(Operator, NameOperator), nil}, | |||
{"(\\.|::)([a-zA-Z_]\\w*[!?]?|[*%&^`~+\\-/\\[<>=])", ByGroups(Operator, Name), nil}, | |||
{`[a-zA-Z_]\w*(?:[!?](?!=))?`, Name, nil}, | |||
{`(\[|\]\??|\*\*|<=>?|>=|<<?|>>?|=~|===|!~|&&?|\|\||\.{1,3})`, Operator, nil}, | |||
{`[-+/*%=<>&!^|~]=?`, Operator, nil}, | |||
{`[(){};,/?:\\]`, Punctuation, nil}, | |||
{`\s+`, Text, nil}, | |||
}, | |||
"funcname": { | |||
{"(?:([a-zA-Z_]\\w*)(\\.))?([a-zA-Z_]\\w*[!?]?|\\*\\*?|[-+]@?|[/%&|^`~]|\\[\\]=?|<<|>>|<=?>|>=?|===?)", ByGroups(NameClass, Operator, NameFunction), Pop(1)}, | |||
Default(Pop(1)), | |||
}, | |||
"classname": { | |||
{`[A-Z_]\w*`, NameClass, nil}, | |||
{`(\()(\s*)([A-Z_]\w*)(\s*)(\))`, ByGroups(Punctuation, Text, NameClass, Text, Punctuation), nil}, | |||
Default(Pop(1)), | |||
}, | |||
"in-intp": { | |||
{`\{`, LiteralStringInterpol, Push()}, | |||
{`\}`, LiteralStringInterpol, Pop(1)}, | |||
Include("root"), | |||
}, | |||
"string-intp": { | |||
{`#\{`, LiteralStringInterpol, Push("in-intp")}, | |||
}, | |||
"string-escaped": { | |||
{`\\([\\befnstv#"\']|x[a-fA-F0-9]{1,2}|[0-7]{1,3})`, LiteralStringEscape, nil}, | |||
}, | |||
"string-intp-escaped": { | |||
Include("string-intp"), | |||
Include("string-escaped"), | |||
}, | |||
"interpolated-regex": { | |||
Include("string-intp"), | |||
{`[\\#]`, LiteralStringRegex, nil}, | |||
{`[^\\#]+`, LiteralStringRegex, nil}, | |||
}, | |||
"interpolated-string": { | |||
Include("string-intp"), | |||
{`[\\#]`, LiteralStringOther, nil}, | |||
{`[^\\#]+`, LiteralStringOther, nil}, | |||
}, | |||
"multiline-regex": { | |||
Include("string-intp"), | |||
{`\\\\`, LiteralStringRegex, nil}, | |||
{`\\/`, LiteralStringRegex, nil}, | |||
{`[\\#]`, LiteralStringRegex, nil}, | |||
{`[^\\/#]+`, LiteralStringRegex, nil}, | |||
{`/[imsx]*`, LiteralStringRegex, Pop(1)}, | |||
}, | |||
"end-part": { | |||
{`.+`, CommentPreproc, Pop(1)}, | |||
}, | |||
"in-macro-control": { | |||
{`\{%`, LiteralStringInterpol, Push()}, | |||
{`%\}`, LiteralStringInterpol, Pop(1)}, | |||
{`for\b|in\b`, Keyword, nil}, | |||
Include("root"), | |||
}, | |||
"in-macro-expr": { | |||
{`\{\{`, LiteralStringInterpol, Push()}, | |||
{`\}\}`, LiteralStringInterpol, Pop(1)}, | |||
Include("root"), | |||
}, | |||
"in-attr": { | |||
{`\[`, Operator, Push()}, | |||
{`\]`, Operator, Pop(1)}, | |||
Include("root"), | |||
}, | |||
"strings": { | |||
{`\:@{0,2}[a-zA-Z_]\w*[!?]?`, LiteralStringSymbol, nil}, | |||
{Words(`\:@{0,2}`, ``, `!=`, `!~`, `!`, `%`, `&&`, `&`, `**`, `*`, `+`, `-`, `/`, `<=>`, `<<`, `<=`, `<`, `===`, `==`, `=~`, `=`, `>=`, `>>`, `>`, `[]=`, `[]?`, `[]`, `^`, `||`, `|`, `~`), LiteralStringSymbol, nil}, | |||
{`:'(\\\\|\\'|[^'])*'`, LiteralStringSymbol, nil}, | |||
{`'(\\\\|\\'|[^']|\\[^'\\]+)'`, LiteralStringChar, nil}, | |||
{`:"`, LiteralStringSymbol, Push("simple-sym")}, | |||
{`([a-zA-Z_]\w*)(:)(?!:)`, ByGroups(LiteralStringSymbol, Punctuation), nil}, | |||
{`"`, LiteralStringDouble, Push("simple-string")}, | |||
{"(?<!\\.)`", LiteralStringBacktick, Push("simple-backtick")}, | |||
{`%\{`, LiteralStringOther, Push("cb-intp-string")}, | |||
{`%[wi]\{`, LiteralStringOther, Push("cb-string")}, | |||
{`%r\{`, LiteralStringRegex, Push("cb-regex")}, | |||
{`%\[`, LiteralStringOther, Push("sb-intp-string")}, | |||
{`%[wi]\[`, LiteralStringOther, Push("sb-string")}, | |||
{`%r\[`, LiteralStringRegex, Push("sb-regex")}, | |||
{`%\(`, LiteralStringOther, Push("pa-intp-string")}, | |||
{`%[wi]\(`, LiteralStringOther, Push("pa-string")}, | |||
{`%r\(`, LiteralStringRegex, Push("pa-regex")}, | |||
{`%<`, LiteralStringOther, Push("ab-intp-string")}, | |||
{`%[wi]<`, LiteralStringOther, Push("ab-string")}, | |||
{`%r<`, LiteralStringRegex, Push("ab-regex")}, | |||
{`(%r([\W_]))((?:\\\2|(?!\2).)*)(\2[imsx]*)`, String, nil}, | |||
{`(%[wi]([\W_]))((?:\\\2|(?!\2).)*)(\2)`, String, nil}, | |||
{`(?<=[-+/*%=<>&!^|~,(])(\s*)(%([\t ])(?:(?:\\\3|(?!\3).)*)\3)`, ByGroups(Text, LiteralStringOther, None), nil}, | |||
{`^(\s*)(%([\t ])(?:(?:\\\3|(?!\3).)*)\3)`, ByGroups(Text, LiteralStringOther, None), nil}, | |||
{`(%([\[{(<]))((?:\\\2|(?!\2).)*)(\2)`, String, nil}, | |||
}, | |||
"simple-string": { | |||
Include("string-intp-escaped"), | |||
{`[^\\"#]+`, LiteralStringDouble, nil}, | |||
{`[\\#]`, LiteralStringDouble, nil}, | |||
{`"`, LiteralStringDouble, Pop(1)}, | |||
}, | |||
"simple-sym": { | |||
Include("string-escaped"), | |||
{`[^\\"#]+`, LiteralStringSymbol, nil}, | |||
{`[\\#]`, LiteralStringSymbol, nil}, | |||
{`"`, LiteralStringSymbol, Pop(1)}, | |||
}, | |||
"simple-backtick": { | |||
Include("string-intp-escaped"), | |||
{"[^\\\\`#]+", LiteralStringBacktick, nil}, | |||
{`[\\#]`, LiteralStringBacktick, nil}, | |||
{"`", LiteralStringBacktick, Pop(1)}, | |||
}, | |||
"cb-intp-string": { | |||
{`\\[\{]`, LiteralStringOther, nil}, | |||
{`\{`, LiteralStringOther, Push()}, | |||
{`\}`, LiteralStringOther, Pop(1)}, | |||
Include("string-intp-escaped"), | |||
{`[\\#{}]`, LiteralStringOther, nil}, | |||
{`[^\\#{}]+`, LiteralStringOther, nil}, | |||
}, | |||
"cb-string": { | |||
{`\\[\\{}]`, LiteralStringOther, nil}, | |||
{`\{`, LiteralStringOther, Push()}, | |||
{`\}`, LiteralStringOther, Pop(1)}, | |||
{`[\\#{}]`, LiteralStringOther, nil}, | |||
{`[^\\#{}]+`, LiteralStringOther, nil}, | |||
}, | |||
"cb-regex": { | |||
{`\\[\\{}]`, LiteralStringRegex, nil}, | |||
{`\{`, LiteralStringRegex, Push()}, | |||
{`\}[imsx]*`, LiteralStringRegex, Pop(1)}, | |||
Include("string-intp"), | |||
{`[\\#{}]`, LiteralStringRegex, nil}, | |||
{`[^\\#{}]+`, LiteralStringRegex, nil}, | |||
}, | |||
"sb-intp-string": { | |||
{`\\[\[]`, LiteralStringOther, nil}, | |||
{`\[`, LiteralStringOther, Push()}, | |||
{`\]`, LiteralStringOther, Pop(1)}, | |||
Include("string-intp-escaped"), | |||
{`[\\#\[\]]`, LiteralStringOther, nil}, | |||
{`[^\\#\[\]]+`, LiteralStringOther, nil}, | |||
}, | |||
"sb-string": { | |||
{`\\[\\\[\]]`, LiteralStringOther, nil}, | |||
{`\[`, LiteralStringOther, Push()}, | |||
{`\]`, LiteralStringOther, Pop(1)}, | |||
{`[\\#\[\]]`, LiteralStringOther, nil}, | |||
{`[^\\#\[\]]+`, LiteralStringOther, nil}, | |||
}, | |||
"sb-regex": { | |||
{`\\[\\\[\]]`, LiteralStringRegex, nil}, | |||
{`\[`, LiteralStringRegex, Push()}, | |||
{`\][imsx]*`, LiteralStringRegex, Pop(1)}, | |||
Include("string-intp"), | |||
{`[\\#\[\]]`, LiteralStringRegex, nil}, | |||
{`[^\\#\[\]]+`, LiteralStringRegex, nil}, | |||
}, | |||
"pa-intp-string": { | |||
{`\\[\(]`, LiteralStringOther, nil}, | |||
{`\(`, LiteralStringOther, Push()}, | |||
{`\)`, LiteralStringOther, Pop(1)}, | |||
Include("string-intp-escaped"), | |||
{`[\\#()]`, LiteralStringOther, nil}, | |||
{`[^\\#()]+`, LiteralStringOther, nil}, | |||
}, | |||
"pa-string": { | |||
{`\\[\\()]`, LiteralStringOther, nil}, | |||
{`\(`, LiteralStringOther, Push()}, | |||
{`\)`, LiteralStringOther, Pop(1)}, | |||
{`[\\#()]`, LiteralStringOther, nil}, | |||
{`[^\\#()]+`, LiteralStringOther, nil}, | |||
}, | |||
"pa-regex": { | |||
{`\\[\\()]`, LiteralStringRegex, nil}, | |||
{`\(`, LiteralStringRegex, Push()}, | |||
{`\)[imsx]*`, LiteralStringRegex, Pop(1)}, | |||
Include("string-intp"), | |||
{`[\\#()]`, LiteralStringRegex, nil}, | |||
{`[^\\#()]+`, LiteralStringRegex, nil}, | |||
}, | |||
"ab-intp-string": { | |||
{`\\[<]`, LiteralStringOther, nil}, | |||
{`<`, LiteralStringOther, Push()}, | |||
{`>`, LiteralStringOther, Pop(1)}, | |||
Include("string-intp-escaped"), | |||
{`[\\#<>]`, LiteralStringOther, nil}, | |||
{`[^\\#<>]+`, LiteralStringOther, nil}, | |||
}, | |||
"ab-string": { | |||
{`\\[\\<>]`, LiteralStringOther, nil}, | |||
{`<`, LiteralStringOther, Push()}, | |||
{`>`, LiteralStringOther, Pop(1)}, | |||
{`[\\#<>]`, LiteralStringOther, nil}, | |||
{`[^\\#<>]+`, LiteralStringOther, nil}, | |||
}, | |||
"ab-regex": { | |||
{`\\[\\<>]`, LiteralStringRegex, nil}, | |||
{`<`, LiteralStringRegex, Push()}, | |||
{`>[imsx]*`, LiteralStringRegex, Pop(1)}, | |||
Include("string-intp"), | |||
{`[\\#<>]`, LiteralStringRegex, nil}, | |||
{`[^\\#<>]+`, LiteralStringRegex, nil}, | |||
}, | |||
} | |||
} |
@@ -0,0 +1,56 @@ | |||
package c | |||
import ( | |||
. "github.com/alecthomas/chroma" // nolint | |||
"github.com/alecthomas/chroma/lexers/internal" | |||
) | |||
// CSharp lexer. | |||
var CSharp = internal.Register(MustNewLazyLexer( | |||
&Config{ | |||
Name: "C#", | |||
Aliases: []string{"csharp", "c#"}, | |||
Filenames: []string{"*.cs"}, | |||
MimeTypes: []string{"text/x-csharp"}, | |||
DotAll: true, | |||
EnsureNL: true, | |||
}, | |||
cSharpRules, | |||
)) | |||
func cSharpRules() Rules { | |||
return Rules{ | |||
"root": { | |||
{`^\s*\[.*?\]`, NameAttribute, nil}, | |||
{`[^\S\n]+`, Text, nil}, | |||
{`\\\n`, Text, nil}, | |||
{`///[^\n\r]+`, CommentSpecial, nil}, | |||
{`//[^\n\r]+`, CommentSingle, nil}, | |||
{`/[*].*?[*]/`, CommentMultiline, nil}, | |||
{`\n`, Text, nil}, | |||
{`[~!%^&*()+=|\[\]:;,.<>/?-]`, Punctuation, nil}, | |||
{`[{}]`, Punctuation, nil}, | |||
{`@"(""|[^"])*"`, LiteralString, nil}, | |||
{`\$@?"(""|[^"])*"`, LiteralString, nil}, | |||
{`"(\\\\|\\"|[^"\n])*["\n]`, LiteralString, nil}, | |||
{`'\\.'|'[^\\]'`, LiteralStringChar, nil}, | |||
{`0[xX][0-9a-fA-F]+[Ll]?|[0-9_](\.[0-9]*)?([eE][+-]?[0-9]+)?[flFLdD]?`, LiteralNumber, nil}, | |||
{`#[ \t]*(if|endif|else|elif|define|undef|line|error|warning|region|endregion|pragma|nullable)\b[^\n\r]+`, CommentPreproc, nil}, | |||
{`\b(extern)(\s+)(alias)\b`, ByGroups(Keyword, Text, Keyword), nil}, | |||
{`(abstract|as|async|await|base|break|by|case|catch|checked|const|continue|default|delegate|do|else|enum|event|explicit|extern|false|finally|fixed|for|foreach|goto|if|implicit|in|init|internal|is|let|lock|new|null|on|operator|out|override|params|private|protected|public|readonly|ref|return|sealed|sizeof|stackalloc|static|switch|this|throw|true|try|typeof|unchecked|unsafe|virtual|void|while|get|set|new|partial|yield|add|remove|value|alias|ascending|descending|from|group|into|orderby|select|thenby|where|join|equals)\b`, Keyword, nil}, | |||
{`(global)(::)`, ByGroups(Keyword, Punctuation), nil}, | |||
{`(bool|byte|char|decimal|double|dynamic|float|int|long|object|sbyte|short|string|uint|ulong|ushort|var)\b\??`, KeywordType, nil}, | |||
{`(class|struct|record|interface)(\s+)`, ByGroups(Keyword, Text), Push("class")}, | |||
{`(namespace|using)(\s+)`, ByGroups(Keyword, Text), Push("namespace")}, | |||
{`@?[_a-zA-Z]\w*`, Name, nil}, | |||
}, | |||
"class": { | |||
{`@?[_a-zA-Z]\w*`, NameClass, Pop(1)}, | |||
Default(Pop(1)), | |||
}, | |||
"namespace": { | |||
{`(?=\()`, Text, Pop(1)}, | |||
{`(@?[_a-zA-Z]\w*|\.)+`, NameNamespace, Pop(1)}, | |||
}, | |||
} | |||
} |
@@ -0,0 +1,139 @@ | |||
package c | |||
import ( | |||
. "github.com/alecthomas/chroma" // nolint | |||
"github.com/alecthomas/chroma/lexers/internal" | |||
) | |||
// Cython lexer. | |||
var Cython = internal.Register(MustNewLazyLexer( | |||
&Config{ | |||
Name: "Cython", | |||
Aliases: []string{"cython", "pyx", "pyrex"}, | |||
Filenames: []string{"*.pyx", "*.pxd", "*.pxi"}, | |||
MimeTypes: []string{"text/x-cython", "application/x-cython"}, | |||
}, | |||
cythonRules, | |||
)) | |||
func cythonRules() Rules { | |||
return Rules{ | |||
"root": { | |||
{`\n`, Text, nil}, | |||
{`^(\s*)("""(?:.|\n)*?""")`, ByGroups(Text, LiteralStringDoc), nil}, | |||
{`^(\s*)('''(?:.|\n)*?''')`, ByGroups(Text, LiteralStringDoc), nil}, | |||
{`[^\S\n]+`, Text, nil}, | |||
{`#.*$`, Comment, nil}, | |||
{`[]{}:(),;[]`, Punctuation, nil}, | |||
{`\\\n`, Text, nil}, | |||
{`\\`, Text, nil}, | |||
{`(in|is|and|or|not)\b`, OperatorWord, nil}, | |||
{`(<)([a-zA-Z0-9.?]+)(>)`, ByGroups(Punctuation, KeywordType, Punctuation), nil}, | |||
{`!=|==|<<|>>|[-~+/*%=<>&^|.?]`, Operator, nil}, | |||
{`(from)(\d+)(<=)(\s+)(<)(\d+)(:)`, ByGroups(Keyword, LiteralNumberInteger, Operator, Name, Operator, Name, Punctuation), nil}, | |||
Include("keywords"), | |||
{`(def|property)(\s+)`, ByGroups(Keyword, Text), Push("funcname")}, | |||
{`(cp?def)(\s+)`, ByGroups(Keyword, Text), Push("cdef")}, | |||
{`(cdef)(:)`, ByGroups(Keyword, Punctuation), nil}, | |||
{`(class|struct)(\s+)`, ByGroups(Keyword, Text), Push("classname")}, | |||
{`(from)(\s+)`, ByGroups(Keyword, Text), Push("fromimport")}, | |||
{`(c?import)(\s+)`, ByGroups(Keyword, Text), Push("import")}, | |||
Include("builtins"), | |||
Include("backtick"), | |||
{`(?:[rR]|[uU][rR]|[rR][uU])"""`, LiteralString, Push("tdqs")}, | |||
{`(?:[rR]|[uU][rR]|[rR][uU])'''`, LiteralString, Push("tsqs")}, | |||
{`(?:[rR]|[uU][rR]|[rR][uU])"`, LiteralString, Push("dqs")}, | |||
{`(?:[rR]|[uU][rR]|[rR][uU])'`, LiteralString, Push("sqs")}, | |||
{`[uU]?"""`, LiteralString, Combined("stringescape", "tdqs")}, | |||
{`[uU]?'''`, LiteralString, Combined("stringescape", "tsqs")}, | |||
{`[uU]?"`, LiteralString, Combined("stringescape", "dqs")}, | |||
{`[uU]?'`, LiteralString, Combined("stringescape", "sqs")}, | |||
Include("name"), | |||
Include("numbers"), | |||
}, | |||
"keywords": { | |||
{Words(``, `\b`, `assert`, `break`, `by`, `continue`, `ctypedef`, `del`, `elif`, `else`, `except`, `except?`, `exec`, `finally`, `for`, `fused`, `gil`, `global`, `if`, `include`, `lambda`, `nogil`, `pass`, `print`, `raise`, `return`, `try`, `while`, `yield`, `as`, `with`), Keyword, nil}, | |||
{`(DEF|IF|ELIF|ELSE)\b`, CommentPreproc, nil}, | |||
}, | |||
"builtins": { | |||
{Words(`(?<!\.)`, `\b`, `__import__`, `abs`, `all`, `any`, `apply`, `basestring`, `bin`, `bool`, `buffer`, `bytearray`, `bytes`, `callable`, `chr`, `classmethod`, `cmp`, `coerce`, `compile`, `complex`, `delattr`, `dict`, `dir`, `divmod`, `enumerate`, `eval`, `execfile`, `exit`, `file`, `filter`, `float`, `frozenset`, `getattr`, `globals`, `hasattr`, `hash`, `hex`, `id`, `input`, `int`, `intern`, `isinstance`, `issubclass`, `iter`, `len`, `list`, `locals`, `long`, `map`, `max`, `min`, `next`, `object`, `oct`, `open`, `ord`, `pow`, `property`, `range`, `raw_input`, `reduce`, `reload`, `repr`, `reversed`, `round`, `set`, `setattr`, `slice`, `sorted`, `staticmethod`, `str`, `sum`, `super`, `tuple`, `type`, `unichr`, `unicode`, `unsigned`, `vars`, `xrange`, `zip`), NameBuiltin, nil}, | |||
{`(?<!\.)(self|None|Ellipsis|NotImplemented|False|True|NULL)\b`, NameBuiltinPseudo, nil}, | |||
{Words(`(?<!\.)`, `\b`, `ArithmeticError`, `AssertionError`, `AttributeError`, `BaseException`, `DeprecationWarning`, `EOFError`, `EnvironmentError`, `Exception`, `FloatingPointError`, `FutureWarning`, `GeneratorExit`, `IOError`, `ImportError`, `ImportWarning`, `IndentationError`, `IndexError`, `KeyError`, `KeyboardInterrupt`, `LookupError`, `MemoryError`, `NameError`, `NotImplemented`, `NotImplementedError`, `OSError`, `OverflowError`, `OverflowWarning`, `PendingDeprecationWarning`, `ReferenceError`, `RuntimeError`, `RuntimeWarning`, `StandardError`, `StopIteration`, `SyntaxError`, `SyntaxWarning`, `SystemError`, `SystemExit`, `TabError`, `TypeError`, `UnboundLocalError`, `UnicodeDecodeError`, `UnicodeEncodeError`, `UnicodeError`, `UnicodeTranslateError`, `UnicodeWarning`, `UserWarning`, `ValueError`, `Warning`, `ZeroDivisionError`), NameException, nil}, | |||
}, | |||
"numbers": { | |||
{`(\d+\.?\d*|\d*\.\d+)([eE][+-]?[0-9]+)?`, LiteralNumberFloat, nil}, | |||
{`0\d+`, LiteralNumberOct, nil}, | |||
{`0[xX][a-fA-F0-9]+`, LiteralNumberHex, nil}, | |||
{`\d+L`, LiteralNumberIntegerLong, nil}, | |||
{`\d+`, LiteralNumberInteger, nil}, | |||
}, | |||
"backtick": { | |||
{"`.*?`", LiteralStringBacktick, nil}, | |||
}, | |||
"name": { | |||
{`@\w+`, NameDecorator, nil}, | |||
{`[a-zA-Z_]\w*`, Name, nil}, | |||
}, | |||
"funcname": { | |||
{`[a-zA-Z_]\w*`, NameFunction, Pop(1)}, | |||
}, | |||
"cdef": { | |||
{`(public|readonly|extern|api|inline)\b`, KeywordReserved, nil}, | |||
{`(struct|enum|union|class)\b`, Keyword, nil}, | |||
{`([a-zA-Z_]\w*)(\s*)(?=[(:#=]|$)`, ByGroups(NameFunction, Text), Pop(1)}, | |||
{`([a-zA-Z_]\w*)(\s*)(,)`, ByGroups(NameFunction, Text, Punctuation), nil}, | |||
{`from\b`, Keyword, Pop(1)}, | |||
{`as\b`, Keyword, nil}, | |||
{`:`, Punctuation, Pop(1)}, | |||
{`(?=["\'])`, Text, Pop(1)}, | |||
{`[a-zA-Z_]\w*`, KeywordType, nil}, | |||
{`.`, Text, nil}, | |||
}, | |||
"classname": { | |||
{`[a-zA-Z_]\w*`, NameClass, Pop(1)}, | |||
}, | |||
"import": { | |||
{`(\s+)(as)(\s+)`, ByGroups(Text, Keyword, Text), nil}, | |||
{`[a-zA-Z_][\w.]*`, NameNamespace, nil}, | |||
{`(\s*)(,)(\s*)`, ByGroups(Text, Operator, Text), nil}, | |||
Default(Pop(1)), | |||
}, | |||
"fromimport": { | |||
{`(\s+)(c?import)\b`, ByGroups(Text, Keyword), Pop(1)}, | |||
{`[a-zA-Z_.][\w.]*`, NameNamespace, nil}, | |||
Default(Pop(1)), | |||
}, | |||
"stringescape": { | |||
{`\\([\\abfnrtv"\']|\n|N\{.*?\}|u[a-fA-F0-9]{4}|U[a-fA-F0-9]{8}|x[a-fA-F0-9]{2}|[0-7]{1,3})`, LiteralStringEscape, nil}, | |||
}, | |||
"strings": { | |||
{`%(\([a-zA-Z0-9]+\))?[-#0 +]*([0-9]+|[*])?(\.([0-9]+|[*]))?[hlL]?[E-GXc-giorsux%]`, LiteralStringInterpol, nil}, | |||
{`[^\\\'"%\n]+`, LiteralString, nil}, | |||
{`[\'"\\]`, LiteralString, nil}, | |||
{`%`, LiteralString, nil}, | |||
}, | |||
"nl": { | |||
{`\n`, LiteralString, nil}, | |||
}, | |||
"dqs": { | |||
{`"`, LiteralString, Pop(1)}, | |||
{`\\\\|\\"|\\\n`, LiteralStringEscape, nil}, | |||
Include("strings"), | |||
}, | |||
"sqs": { | |||
{`'`, LiteralString, Pop(1)}, | |||
{`\\\\|\\'|\\\n`, LiteralStringEscape, nil}, | |||
Include("strings"), | |||
}, | |||
"tdqs": { | |||
{`"""`, LiteralString, Pop(1)}, | |||
Include("strings"), | |||
Include("nl"), | |||
}, | |||
"tsqs": { | |||
{`'''`, LiteralString, Pop(1)}, | |||
Include("strings"), | |||
Include("nl"), | |||
}, | |||
} | |||
} |
@@ -0,0 +1,2 @@ | |||
// Package circular exists to break circular dependencies between lexers. | |||
package circular |
@@ -0,0 +1,86 @@ | |||
package circular | |||
import ( | |||
. "github.com/alecthomas/chroma" // nolint | |||
"github.com/alecthomas/chroma/lexers/internal" | |||
) | |||
// PHP lexer for pure PHP code (not embedded in HTML). | |||
var PHP = internal.Register(MustNewLazyLexer( | |||
&Config{ | |||
Name: "PHP", | |||
Aliases: []string{"php", "php3", "php4", "php5"}, | |||
Filenames: []string{"*.php", "*.php[345]", "*.inc"}, | |||
MimeTypes: []string{"text/x-php"}, | |||
DotAll: true, | |||
CaseInsensitive: true, | |||
EnsureNL: true, | |||
}, | |||
phpRules, | |||
)) | |||
func phpRules() Rules { | |||
return phpCommonRules().Rename("php", "root") | |||
} | |||
func phpCommonRules() Rules { | |||
return Rules{ | |||
"php": { | |||
{`\?>`, CommentPreproc, Pop(1)}, | |||
{`(<<<)([\'"]?)((?:[\\_a-z]|[^\x00-\x7f])(?:[\\\w]|[^\x00-\x7f])*)(\2\n.*?\n\s*)(\3)(;?)(\n)`, ByGroups(LiteralString, LiteralString, LiteralStringDelimiter, LiteralString, LiteralStringDelimiter, Punctuation, Text), nil}, | |||
{`\s+`, Text, nil}, | |||
{`#.*?\n`, CommentSingle, nil}, | |||
{`//.*?\n`, CommentSingle, nil}, | |||
{`/\*\*/`, CommentMultiline, nil}, | |||
{`/\*\*.*?\*/`, LiteralStringDoc, nil}, | |||
{`/\*.*?\*/`, CommentMultiline, nil}, | |||
{`(->|::)(\s*)((?:[\\_a-z]|[^\x00-\x7f])(?:[\\\w]|[^\x00-\x7f])*)`, ByGroups(Operator, Text, NameAttribute), nil}, | |||
{`[~!%^&*+=|:.<>/@-]+`, Operator, nil}, | |||
{`\?`, Operator, nil}, | |||
{`[\[\]{}();,]+`, Punctuation, nil}, | |||
{`(class)(\s+)`, ByGroups(Keyword, Text), Push("classname")}, | |||
{`(function)(\s*)(?=\()`, ByGroups(Keyword, Text), nil}, | |||
{`(function)(\s+)(&?)(\s*)`, ByGroups(Keyword, Text, Operator, Text), Push("functionname")}, | |||
{`(const)(\s+)((?:[\\_a-z]|[^\x00-\x7f])(?:[\\\w]|[^\x00-\x7f])*)`, ByGroups(Keyword, Text, NameConstant), nil}, | |||
{`(and|E_PARSE|old_function|E_ERROR|or|as|E_WARNING|parent|eval|PHP_OS|break|exit|case|extends|PHP_VERSION|cfunction|FALSE|print|for|require|continue|foreach|require_once|declare|return|default|static|do|switch|die|stdClass|echo|else|TRUE|elseif|var|empty|if|xor|enddeclare|include|virtual|endfor|include_once|while|endforeach|global|endif|list|endswitch|new|endwhile|not|array|E_ALL|NULL|final|php_user_filter|interface|implements|public|private|protected|abstract|clone|try|catch|throw|this|use|namespace|trait|yield|finally)\b`, Keyword, nil}, | |||
{`(true|false|null)\b`, KeywordConstant, nil}, | |||
Include("magicconstants"), | |||
{`\$\{\$+(?:[\\_a-z]|[^\x00-\x7f])(?:[\\\w]|[^\x00-\x7f])*\}`, NameVariable, nil}, | |||
{`\$+(?:[\\_a-z]|[^\x00-\x7f])(?:[\\\w]|[^\x00-\x7f])*`, NameVariable, nil}, | |||
{`(?:[\\_a-z]|[^\x00-\x7f])(?:[\\\w]|[^\x00-\x7f])*`, NameOther, nil}, | |||
{`(\d+\.\d*|\d*\.\d+)(e[+-]?[0-9]+)?`, LiteralNumberFloat, nil}, | |||
{`\d+e[+-]?[0-9]+`, LiteralNumberFloat, nil}, | |||
{`0[0-7]+`, LiteralNumberOct, nil}, | |||
{`0x[a-f0-9_]+`, LiteralNumberHex, nil}, | |||
{`\d[\d_]*`, LiteralNumberInteger, nil}, | |||
{`0b[01]+`, LiteralNumberBin, nil}, | |||
{`'([^'\\]*(?:\\.[^'\\]*)*)'`, LiteralStringSingle, nil}, | |||
{"`([^`\\\\]*(?:\\\\.[^`\\\\]*)*)`", LiteralStringBacktick, nil}, | |||
{`"`, LiteralStringDouble, Push("string")}, | |||
}, | |||
"magicfuncs": { | |||
{Words(``, `\b`, `__construct`, `__destruct`, `__call`, `__callStatic`, `__get`, `__set`, `__isset`, `__unset`, `__sleep`, `__wakeup`, `__toString`, `__invoke`, `__set_state`, `__clone`, `__debugInfo`), NameFunctionMagic, nil}, | |||
}, | |||
"magicconstants": { | |||
{Words(``, `\b`, `__LINE__`, `__FILE__`, `__DIR__`, `__FUNCTION__`, `__CLASS__`, `__TRAIT__`, `__METHOD__`, `__NAMESPACE__`), NameConstant, nil}, | |||
}, | |||
"classname": { | |||
{`(?:[\\_a-z]|[^\x00-\x7f])(?:[\\\w]|[^\x00-\x7f])*`, NameClass, Pop(1)}, | |||
}, | |||
"functionname": { | |||
Include("magicfuncs"), | |||
{`(?:[\\_a-z]|[^\x00-\x7f])(?:[\\\w]|[^\x00-\x7f])*`, NameFunction, Pop(1)}, | |||
Default(Pop(1)), | |||
}, | |||
"string": { | |||
{`"`, LiteralStringDouble, Pop(1)}, | |||
{`[^{$"\\]+`, LiteralStringDouble, nil}, | |||
{`\\([nrt"$\\]|[0-7]{1,3}|x[0-9a-f]{1,2})`, LiteralStringEscape, nil}, | |||
{`\$(?:[\\_a-z]|[^\x00-\x7f])(?:[\\\w]|[^\x00-\x7f])*(\[\S+?\]|->(?:[\\_a-z]|[^\x00-\x7f])(?:[\\\w]|[^\x00-\x7f])*)?`, LiteralStringInterpol, nil}, | |||
{`(\{\$\{)(.*?)(\}\})`, ByGroups(LiteralStringInterpol, UsingSelf("root"), LiteralStringInterpol), nil}, | |||
{`(\{)(\$.*?)(\})`, ByGroups(LiteralStringInterpol, UsingSelf("root"), LiteralStringInterpol), nil}, | |||
{`(\$\{)(\S+)(\})`, ByGroups(LiteralStringInterpol, NameVariable, LiteralStringInterpol), nil}, | |||
{`[${\\]`, LiteralStringDouble, nil}, | |||
}, | |||
} | |||
} |
@@ -0,0 +1,39 @@ | |||
package circular | |||
import ( | |||
"strings" | |||
. "github.com/alecthomas/chroma" // nolint | |||
"github.com/alecthomas/chroma/lexers/h" | |||
"github.com/alecthomas/chroma/lexers/internal" | |||
) | |||
// PHTML lexer is PHP in HTML. | |||
var PHTML = internal.Register(DelegatingLexer(h.HTML, MustNewLazyLexer( | |||
&Config{ | |||
Name: "PHTML", | |||
Aliases: []string{"phtml"}, | |||
Filenames: []string{"*.phtml", "*.php", "*.php[345]", "*.inc"}, | |||
MimeTypes: []string{"application/x-php", "application/x-httpd-php", "application/x-httpd-php3", "application/x-httpd-php4", "application/x-httpd-php5", "text/x-php"}, | |||
DotAll: true, | |||
CaseInsensitive: true, | |||
EnsureNL: true, | |||
Priority: 2, | |||
}, | |||
phtmlRules, | |||
).SetAnalyser(func(text string) float32 { | |||
if strings.Contains(text, "<?php") { | |||
return 0.5 | |||
} | |||
return 0.0 | |||
}))) | |||
func phtmlRules() Rules { | |||
return Rules{ | |||
"root": { | |||
{`<\?(php)?`, CommentPreproc, Push("php")}, | |||
{`[^<]+`, Other, nil}, | |||
{`<`, Other, nil}, | |||
}, | |||
}.Merge(phpCommonRules()) | |||
} |
@@ -0,0 +1,73 @@ | |||
package d | |||
import ( | |||
. "github.com/alecthomas/chroma" // nolint | |||
"github.com/alecthomas/chroma/lexers/internal" | |||
) | |||
// D lexer. https://dlang.org/spec/lex.html | |||
var D = internal.Register(MustNewLazyLexer( | |||
&Config{ | |||
Name: "D", | |||
Aliases: []string{"d"}, | |||
Filenames: []string{"*.d", "*.di"}, | |||
MimeTypes: []string{"text/x-d"}, | |||
EnsureNL: true, | |||
}, | |||
dRules, | |||
)) | |||
func dRules() Rules { | |||
return Rules{ | |||
"root": { | |||
{`[^\S\n]+`, Text, nil}, | |||
// https://dlang.org/spec/lex.html#comment | |||
{`//.*?\n`, CommentSingle, nil}, | |||
{`/\*.*?\*/`, CommentMultiline, nil}, | |||
{`/\+.*?\+/`, CommentMultiline, nil}, | |||
// https://dlang.org/spec/lex.html#keywords | |||
{`(asm|assert|body|break|case|cast|catch|continue|default|debug|delete|deprecated|do|else|finally|for|foreach|foreach_reverse|goto|if|in|invariant|is|macro|mixin|new|out|pragma|return|super|switch|this|throw|try|version|while|with)\b`, Keyword, nil}, | |||
{`__(FILE|FILE_FULL_PATH|MODULE|LINE|FUNCTION|PRETTY_FUNCTION|DATE|EOF|TIME|TIMESTAMP|VENDOR|VERSION)__\b`, NameBuiltin, nil}, | |||
{`__(traits|vector|parameters)\b`, NameBuiltin, nil}, | |||
{`((?:(?:[^\W\d]|\$)[\w.\[\]$<>]*\s+)+?)((?:[^\W\d]|\$)[\w$]*)(\s*)(\()`, ByGroups(UsingSelf("root"), NameFunction, Text, Operator), nil}, | |||
// https://dlang.org/spec/attribute.html#uda | |||
{`@[\w.]*`, NameDecorator, nil}, | |||
{`(abstract|auto|alias|align|const|delegate|enum|export|final|function|inout|lazy|nothrow|override|package|private|protected|public|pure|static|synchronized|template|volatile|__gshared)\b`, KeywordDeclaration, nil}, | |||
// https://dlang.org/spec/type.html#basic-data-types | |||
{`(void|bool|byte|ubyte|short|ushort|int|uint|long|ulong|cent|ucent|float|double|real|ifloat|idouble|ireal|cfloat|cdouble|creal|char|wchar|dchar|string|wstring|dstring)\b`, KeywordType, nil}, | |||
{`(module)(\s+)`, ByGroups(KeywordNamespace, Text), Push("import")}, | |||
{`(true|false|null)\b`, KeywordConstant, nil}, | |||
{`(class|interface|struct|template|union)(\s+)`, ByGroups(KeywordDeclaration, Text), Push("class")}, | |||
{`(import)(\s+)`, ByGroups(KeywordNamespace, Text), Push("import")}, | |||
// https://dlang.org/spec/lex.html#string_literals | |||
// TODO support delimited strings | |||
{`[qr]?"(\\\\|\\"|[^"])*"[cwd]?`, LiteralString, nil}, | |||
{"(`)([^`]*)(`)[cwd]?", LiteralString, nil}, | |||
{`'\\.'|'[^\\]'|'\\u[0-9a-fA-F]{4}'`, LiteralStringChar, nil}, | |||
{`(\.)((?:[^\W\d]|\$)[\w$]*)`, ByGroups(Operator, NameAttribute), nil}, | |||
{`^\s*([^\W\d]|\$)[\w$]*:`, NameLabel, nil}, | |||
// https://dlang.org/spec/lex.html#floatliteral | |||
{`([0-9][0-9_]*\.([0-9][0-9_]*)?|\.[0-9][0-9_]*)([eE][+\-]?[0-9][0-9_]*)?[fFL]?i?|[0-9][eE][+\-]?[0-9][0-9_]*[fFL]?|[0-9]([eE][+\-]?[0-9][0-9_]*)?[fFL]|0[xX]([0-9a-fA-F][0-9a-fA-F_]*\.?|([0-9a-fA-F][0-9a-fA-F_]*)?\.[0-9a-fA-F][0-9a-fA-F_]*)[pP][+\-]?[0-9][0-9_]*[fFL]?`, LiteralNumberFloat, nil}, | |||
// https://dlang.org/spec/lex.html#integerliteral | |||
{`0[xX][0-9a-fA-F][0-9a-fA-F_]*[lL]?`, LiteralNumberHex, nil}, | |||
{`0[bB][01][01_]*[lL]?`, LiteralNumberBin, nil}, | |||
{`0[0-7_]+[lL]?`, LiteralNumberOct, nil}, | |||
{`0|[1-9][0-9_]*[lL]?`, LiteralNumberInteger, nil}, | |||
{`([~^*!%&\[\](){}<>|+=:;,./?-]|q{)`, Operator, nil}, | |||
{`([^\W\d]|\$)[\w$]*`, Name, nil}, | |||
{`\n`, Text, nil}, | |||
}, | |||
"class": { | |||
{`([^\W\d]|\$)[\w$]*`, NameClass, Pop(1)}, | |||
}, | |||
"import": { | |||
{`[\w.]+\*?`, NameNamespace, Pop(1)}, | |||
}, | |||
} | |||
} |
@@ -0,0 +1,95 @@ | |||
package d | |||
import ( | |||
. "github.com/alecthomas/chroma" // nolint | |||
"github.com/alecthomas/chroma/lexers/internal" | |||
) | |||
// Dart lexer. | |||
var Dart = internal.Register(MustNewLazyLexer( | |||
&Config{ | |||
Name: "Dart", | |||
Aliases: []string{"dart"}, | |||
Filenames: []string{"*.dart"}, | |||
MimeTypes: []string{"text/x-dart"}, | |||
DotAll: true, | |||
}, | |||
dartRules, | |||
)) | |||
func dartRules() Rules { | |||
return Rules{ | |||
"root": { | |||
Include("string_literal"), | |||
{`#!(.*?)$`, CommentPreproc, nil}, | |||
{`\b(import|export)\b`, Keyword, Push("import_decl")}, | |||
{`\b(library|source|part of|part)\b`, Keyword, nil}, | |||
{`[^\S\n]+`, Text, nil}, | |||
{`//.*?\n`, CommentSingle, nil}, | |||
{`/\*.*?\*/`, CommentMultiline, nil}, | |||
{`\b(class)\b(\s+)`, ByGroups(KeywordDeclaration, Text), Push("class")}, | |||
{`\b(assert|break|case|catch|continue|default|do|else|finally|for|if|in|is|new|return|super|switch|this|throw|try|while)\b`, Keyword, nil}, | |||
{`\b(abstract|async|await|const|extends|factory|final|get|implements|native|operator|set|static|sync|typedef|var|with|yield)\b`, KeywordDeclaration, nil}, | |||
{`\b(bool|double|dynamic|int|num|Object|String|void)\b`, KeywordType, nil}, | |||
{`\b(false|null|true)\b`, KeywordConstant, nil}, | |||
{`[~!%^&*+=|?:<>/-]|as\b`, Operator, nil}, | |||
{`[a-zA-Z_$]\w*:`, NameLabel, nil}, | |||
{`[a-zA-Z_$]\w*`, Name, nil}, | |||
{`[(){}\[\],.;]`, Punctuation, nil}, | |||
{`0[xX][0-9a-fA-F]+`, LiteralNumberHex, nil}, | |||
{`\d+(\.\d*)?([eE][+-]?\d+)?`, LiteralNumber, nil}, | |||
{`\.\d+([eE][+-]?\d+)?`, LiteralNumber, nil}, | |||
{`\n`, Text, nil}, | |||
}, | |||
"class": { | |||
{`[a-zA-Z_$]\w*`, NameClass, Pop(1)}, | |||
}, | |||
"import_decl": { | |||
Include("string_literal"), | |||
{`\s+`, Text, nil}, | |||
{`\b(as|show|hide)\b`, Keyword, nil}, | |||
{`[a-zA-Z_$]\w*`, Name, nil}, | |||
{`\,`, Punctuation, nil}, | |||
{`\;`, Punctuation, Pop(1)}, | |||
}, | |||
"string_literal": { | |||
{`r"""([\w\W]*?)"""`, LiteralStringDouble, nil}, | |||
{`r'''([\w\W]*?)'''`, LiteralStringSingle, nil}, | |||
{`r"(.*?)"`, LiteralStringDouble, nil}, | |||
{`r'(.*?)'`, LiteralStringSingle, nil}, | |||
{`"""`, LiteralStringDouble, Push("string_double_multiline")}, | |||
{`'''`, LiteralStringSingle, Push("string_single_multiline")}, | |||
{`"`, LiteralStringDouble, Push("string_double")}, | |||
{`'`, LiteralStringSingle, Push("string_single")}, | |||
}, | |||
"string_common": { | |||
{`\\(x[0-9A-Fa-f]{2}|u[0-9A-Fa-f]{4}|u\{[0-9A-Fa-f]*\}|[a-z'\"$\\])`, LiteralStringEscape, nil}, | |||
{`(\$)([a-zA-Z_]\w*)`, ByGroups(LiteralStringInterpol, Name), nil}, | |||
{`(\$\{)(.*?)(\})`, ByGroups(LiteralStringInterpol, UsingSelf("root"), LiteralStringInterpol), nil}, | |||
}, | |||
"string_double": { | |||
{`"`, LiteralStringDouble, Pop(1)}, | |||
{`[^"$\\\n]+`, LiteralStringDouble, nil}, | |||
Include("string_common"), | |||
{`\$+`, LiteralStringDouble, nil}, | |||
}, | |||
"string_double_multiline": { | |||
{`"""`, LiteralStringDouble, Pop(1)}, | |||
{`[^"$\\]+`, LiteralStringDouble, nil}, | |||
Include("string_common"), | |||
{`(\$|\")+`, LiteralStringDouble, nil}, | |||
}, | |||
"string_single": { | |||
{`'`, LiteralStringSingle, Pop(1)}, | |||
{`[^'$\\\n]+`, LiteralStringSingle, nil}, | |||
Include("string_common"), | |||
{`\$+`, LiteralStringSingle, nil}, | |||
}, | |||
"string_single_multiline": { | |||
{`'''`, LiteralStringSingle, Pop(1)}, | |||
{`[^\'$\\]+`, LiteralStringSingle, nil}, | |||
Include("string_common"), | |||
{`(\$|\')+`, LiteralStringSingle, nil}, | |||
}, | |||
} | |||
} |
@@ -0,0 +1,33 @@ | |||
package d | |||
import ( | |||
. "github.com/alecthomas/chroma" // nolint | |||
"github.com/alecthomas/chroma/lexers/internal" | |||
) | |||
// Diff lexer. | |||
var Diff = internal.Register(MustNewLazyLexer( | |||
&Config{ | |||
Name: "Diff", | |||
Aliases: []string{"diff", "udiff"}, | |||
EnsureNL: true, | |||
Filenames: []string{"*.diff", "*.patch"}, | |||
MimeTypes: []string{"text/x-diff", "text/x-patch"}, | |||
}, | |||
diffRules, | |||
)) | |||
func diffRules() Rules { | |||
return Rules{ | |||
"root": { | |||
{` .*\n`, Text, nil}, | |||
{`\+.*\n`, GenericInserted, nil}, | |||
{`-.*\n`, GenericDeleted, nil}, | |||
{`!.*\n`, GenericStrong, nil}, | |||
{`@.*\n`, GenericSubheading, nil}, | |||
{`([Ii]ndex|diff).*\n`, GenericHeading, nil}, | |||
{`=.*\n`, GenericHeading, nil}, | |||
{`.*\n`, Text, nil}, | |||
}, | |||
} | |||
} |
@@ -0,0 +1,57 @@ | |||
package d | |||
import ( | |||
. "github.com/alecthomas/chroma" // nolint | |||
"github.com/alecthomas/chroma/lexers/internal" | |||
) | |||
// Django/Jinja lexer. | |||
var DjangoJinja = internal.Register(MustNewLazyLexer( | |||
&Config{ | |||
Name: "Django/Jinja", | |||
Aliases: []string{"django", "jinja"}, | |||
Filenames: []string{}, | |||
MimeTypes: []string{"application/x-django-templating", "application/x-jinja"}, | |||
DotAll: true, | |||
}, | |||
djangoJinjaRules, | |||
)) | |||
func djangoJinjaRules() Rules { | |||
return Rules{ | |||
"root": { | |||
{`[^{]+`, Other, nil}, | |||
{`\{\{`, CommentPreproc, Push("var")}, | |||
{`\{[*#].*?[*#]\}`, Comment, nil}, | |||
{`(\{%)(-?\s*)(comment)(\s*-?)(%\})(.*?)(\{%)(-?\s*)(endcomment)(\s*-?)(%\})`, ByGroups(CommentPreproc, Text, Keyword, Text, CommentPreproc, Comment, CommentPreproc, Text, Keyword, Text, CommentPreproc), nil}, | |||
{`(\{%)(-?\s*)(raw)(\s*-?)(%\})(.*?)(\{%)(-?\s*)(endraw)(\s*-?)(%\})`, ByGroups(CommentPreproc, Text, Keyword, Text, CommentPreproc, Text, CommentPreproc, Text, Keyword, Text, CommentPreproc), nil}, | |||
{`(\{%)(-?\s*)(filter)(\s+)([a-zA-Z_]\w*)`, ByGroups(CommentPreproc, Text, Keyword, Text, NameFunction), Push("block")}, | |||
{`(\{%)(-?\s*)([a-zA-Z_]\w*)`, ByGroups(CommentPreproc, Text, Keyword), Push("block")}, | |||
{`\{`, Other, nil}, | |||
}, | |||
"varnames": { | |||
{`(\|)(\s*)([a-zA-Z_]\w*)`, ByGroups(Operator, Text, NameFunction), nil}, | |||
{`(is)(\s+)(not)?(\s+)?([a-zA-Z_]\w*)`, ByGroups(Keyword, Text, Keyword, Text, NameFunction), nil}, | |||
{`(_|true|false|none|True|False|None)\b`, KeywordPseudo, nil}, | |||
{`(in|as|reversed|recursive|not|and|or|is|if|else|import|with(?:(?:out)?\s*context)?|scoped|ignore\s+missing)\b`, Keyword, nil}, | |||
{`(loop|block|super|forloop)\b`, NameBuiltin, nil}, | |||
{`[a-zA-Z_][\w-]*`, NameVariable, nil}, | |||
{`\.\w+`, NameVariable, nil}, | |||
{`:?"(\\\\|\\"|[^"])*"`, LiteralStringDouble, nil}, | |||
{`:?'(\\\\|\\'|[^'])*'`, LiteralStringSingle, nil}, | |||
{`([{}()\[\]+\-*/,:~]|[><=]=?)`, Operator, nil}, | |||
{`[0-9](\.[0-9]*)?(eE[+-][0-9])?[flFLdD]?|0[xX][0-9a-fA-F]+[Ll]?`, LiteralNumber, nil}, | |||
}, | |||
"var": { | |||
{`\s+`, Text, nil}, | |||
{`(-?)(\}\})`, ByGroups(Text, CommentPreproc), Pop(1)}, | |||
Include("varnames"), | |||
}, | |||
"block": { | |||
{`\s+`, Text, nil}, | |||
{`(-?)(%\})`, ByGroups(Text, CommentPreproc), Pop(1)}, | |||
Include("varnames"), | |||
{`.`, Punctuation, nil}, | |||
}, | |||
} | |||
} |
@@ -0,0 +1,35 @@ | |||
package d | |||
import ( | |||
. "github.com/alecthomas/chroma" // nolint | |||
"github.com/alecthomas/chroma/lexers/b" | |||
"github.com/alecthomas/chroma/lexers/internal" | |||
"github.com/alecthomas/chroma/lexers/j" | |||
) | |||
// Docker lexer. | |||
var Docker = internal.Register(MustNewLazyLexer( | |||
&Config{ | |||
Name: "Docker", | |||
Aliases: []string{"docker", "dockerfile"}, | |||
Filenames: []string{"Dockerfile", "*.docker"}, | |||
MimeTypes: []string{"text/x-dockerfile-config"}, | |||
CaseInsensitive: true, | |||
}, | |||
dockerRules, | |||
)) | |||
func dockerRules() Rules { | |||
return Rules{ | |||
"root": { | |||
{`#.*`, Comment, nil}, | |||
{`(ONBUILD)((?:\s*\\?\s*))`, ByGroups(Keyword, Using(b.Bash)), nil}, | |||
{`(HEALTHCHECK)(((?:\s*\\?\s*)--\w+=\w+(?:\s*\\?\s*))*)`, ByGroups(Keyword, Using(b.Bash)), nil}, | |||
{`(VOLUME|ENTRYPOINT|CMD|SHELL)((?:\s*\\?\s*))(\[.*?\])`, ByGroups(Keyword, Using(b.Bash), Using(j.JSON)), nil}, | |||
{`(LABEL|ENV|ARG)((?:(?:\s*\\?\s*)\w+=\w+(?:\s*\\?\s*))*)`, ByGroups(Keyword, Using(b.Bash)), nil}, | |||
{`((?:FROM|MAINTAINER|EXPOSE|WORKDIR|USER|STOPSIGNAL)|VOLUME)\b(.*)`, ByGroups(Keyword, LiteralString), nil}, | |||
{`((?:RUN|CMD|ENTRYPOINT|ENV|ARG|LABEL|ADD|COPY))`, Keyword, nil}, | |||
{`(.*\\\n)*.+`, Using(b.Bash), nil}, | |||
}, | |||
} | |||
} |