diff --git a/cmd/run.go b/cmd/run.go
index f878d3ec..2327a465 100644
--- a/cmd/run.go
+++ b/cmd/run.go
@@ -26,6 +26,7 @@ import (
"github.com/wakatime/wakatime-cli/pkg/exitcode"
"github.com/wakatime/wakatime-cli/pkg/heartbeat"
"github.com/wakatime/wakatime-cli/pkg/ini"
+ "github.com/wakatime/wakatime-cli/pkg/lexer"
"github.com/wakatime/wakatime-cli/pkg/log"
"github.com/wakatime/wakatime-cli/pkg/offline"
"github.com/wakatime/wakatime-cli/pkg/vipertools"
@@ -68,6 +69,11 @@ func Run(cmd *cobra.Command, v *viper.Viper) {
log.Fatalf("failed to setup logging: %s", err)
}
+ // register all custom lexers
+ if err := lexer.RegisterAll(); err != nil {
+ log.Fatalf("failed to register custom lexers: %s", err)
+ }
+
if v.GetBool("user-agent") {
log.Debugln("command: user-agent")
diff --git a/go.mod b/go.mod
index e802ce85..94b86d60 100644
--- a/go.mod
+++ b/go.mod
@@ -4,7 +4,7 @@ go 1.21
require (
github.com/Azure/go-ntlmssp v0.0.0-20221128193559-754e69321358
- github.com/alecthomas/chroma v0.10.0
+ github.com/alecthomas/chroma/v2 v2.8.0
github.com/danwakefield/fnmatch v0.0.0-20160403171240-cbb64ac3d964
github.com/dlclark/regexp2 v1.10.0
github.com/gandarez/go-olson-timezone v0.1.0
@@ -28,6 +28,14 @@ require (
)
require (
+ github.com/alecthomas/colour v0.1.0 // indirect
+ github.com/alecthomas/repr v0.2.0 // indirect
+ github.com/mattn/go-isatty v0.0.14 // indirect
+ github.com/sergi/go-diff v1.2.0 // indirect
+)
+
+require (
+ github.com/alecthomas/assert v1.0.0
github.com/davecgh/go-spew v1.1.1 // indirect
github.com/fsnotify/fsnotify v1.6.0 // indirect
github.com/hashicorp/hcl v1.0.0 // indirect
@@ -50,6 +58,6 @@ require (
gopkg.in/yaml.v3 v3.0.1 // indirect
)
-replace github.com/alecthomas/chroma => github.com/wakatime/chroma v0.11.3-wakatime1
+replace github.com/alecthomas/chroma/v2 => github.com/gandarez/chroma/v2 v2.8.0-wakatime.1
replace github.com/matishsiao/goInfo => github.com/wakatime/goInfo v0.1.0-wakatime.8
diff --git a/go.sum b/go.sum
index ccf9922a..f438c3ac 100644
--- a/go.sum
+++ b/go.sum
@@ -40,13 +40,14 @@ github.com/Azure/go-ntlmssp v0.0.0-20221128193559-754e69321358 h1:mFRzDkZVAjdal+
github.com/Azure/go-ntlmssp v0.0.0-20221128193559-754e69321358/go.mod h1:chxPXzSsl7ZWRAuOIE23GDNzjWuZquvFlgA8xmpunjU=
github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU=
github.com/BurntSushi/xgb v0.0.0-20160522181843-27f122750802/go.mod h1:IVnqGOEym/WlBOVXweHU+Q+/VP0lqqI8lqeDx9IjBqo=
-github.com/alecthomas/assert v0.0.0-20170929043011-405dbfeb8e38 h1:smF2tmSOzy2Mm+0dGI2AIUHY+w0BUc+4tn40djz7+6U=
-github.com/alecthomas/assert v0.0.0-20170929043011-405dbfeb8e38/go.mod h1:r7bzyVFMNntcxPZXK3/+KdruV1H5KSlyVY0gc+NgInI=
+github.com/alecthomas/assert v1.0.0 h1:3XmGh/PSuLzDbK3W2gUbRXwgW5lqPkuqvRgeQ30FI5o=
+github.com/alecthomas/assert v1.0.0/go.mod h1:va/d2JC+M7F6s+80kl/R3G7FUiW6JzUO+hPhLyJ36ZY=
+github.com/alecthomas/assert/v2 v2.2.1 h1:XivOgYcduV98QCahG8T5XTezV5bylXe+lBxLG2K2ink=
+github.com/alecthomas/assert/v2 v2.2.1/go.mod h1:pXcQ2Asjp247dahGEmsZ6ru0UVwnkhktn7S0bBDLxvQ=
github.com/alecthomas/colour v0.1.0 h1:nOE9rJm6dsZ66RGWYSFrXw461ZIt9A6+nHgL7FRrDUk=
github.com/alecthomas/colour v0.1.0/go.mod h1:QO9JBoKquHd+jz9nshCh40fOfO+JzsoXy8qTHF68zU0=
-github.com/alecthomas/kong v0.2.11/go.mod h1:kQOmtJgV+Lb4aj+I2LEn40cbtawdWJ9Y8QLq+lElKxE=
-github.com/alecthomas/repr v0.0.0-20201120212035-bb82daffcca2 h1:G5TeG64Ox4OWq2YwlsxS7nOedU8vbGgNRTRDAjGvDCk=
-github.com/alecthomas/repr v0.0.0-20201120212035-bb82daffcca2/go.mod h1:2kn6fqh/zIyPLmm3ugklbEi5hg5wS435eygvNfaDQL8=
+github.com/alecthomas/repr v0.2.0 h1:HAzS41CIzNW5syS8Mf9UwXhNH1J9aix/BvDRf1Ml2Yk=
+github.com/alecthomas/repr v0.2.0/go.mod h1:Fr0507jx4eOXV7AlPV6AVZLYrLIuIeSOWtW57eE/O/4=
github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU=
github.com/chzyer/logex v1.1.10/go.mod h1:+Ywpsq7O8HXn0nuIou7OrIPyXbp3wmkHB+jjWRnGsAI=
github.com/chzyer/readline v0.0.0-20180603132655-2972be24d48e/go.mod h1:nSuG5e5PlCu98SY8svDHJxuZscDgtXS6KTTbou5AhLI=
@@ -62,7 +63,6 @@ github.com/danwakefield/fnmatch v0.0.0-20160403171240-cbb64ac3d964/go.mod h1:Xd9
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
-github.com/dlclark/regexp2 v1.4.0/go.mod h1:2pZnwuY/m+8K6iRw6wQdMtk+rH5tNGR1i55kozfMjCc=
github.com/dlclark/regexp2 v1.10.0 h1:+/GIL799phkJqYW+3YbOd8LCcbHzT0Pbo8zl70MHsq0=
github.com/dlclark/regexp2 v1.10.0/go.mod h1:DHkYz0B9wPfa6wondMfaivmHpzrQ3v9q8cnmRbL6yW8=
github.com/envoyproxy/go-control-plane v0.9.0/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4=
@@ -75,6 +75,8 @@ github.com/frankban/quicktest v1.14.4 h1:g2rn0vABPOOXmZUj+vbmUp0lPoXEMuhTpIluN0X
github.com/frankban/quicktest v1.14.4/go.mod h1:4ptaffx2x8+WTWXmUCuVU6aPUX1/Mz7zb5vbUoiM6w0=
github.com/fsnotify/fsnotify v1.6.0 h1:n+5WquG0fcWoWp6xPWfHdbskMCQaFnG6PfBrh1Ky4HY=
github.com/fsnotify/fsnotify v1.6.0/go.mod h1:sl3t1tCWJFWoRz9R8WJCbQihKKwmorjAbSClcnxKAGw=
+github.com/gandarez/chroma/v2 v2.8.0-wakatime.1 h1:CHVpXUWoYho4HuH0Iu9sL7b5jG93oUrmWPw5Qb5fb2s=
+github.com/gandarez/chroma/v2 v2.8.0-wakatime.1/go.mod h1:yrkMI9807G1ROx13fhe1v6PN2DDeaR73L3d+1nmYQtw=
github.com/gandarez/go-olson-timezone v0.1.0 h1:cDRlHKQE0uC3mJNZyKoQIpAuvQtV8KXwIVj8bDEEyuo=
github.com/gandarez/go-olson-timezone v0.1.0/go.mod h1:+yV/cYNjgs2JqdGShznAD4R13r8lKMGR2XlWAJqa5Yo=
github.com/gandarez/go-realpath v1.0.0 h1:fhQBRDshH/MZNmDLWM9vbBameK2fxyLr+ctqkRwbHEU=
@@ -143,6 +145,8 @@ github.com/hashicorp/golang-lru v0.5.0/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ
github.com/hashicorp/golang-lru v0.5.1/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8=
github.com/hashicorp/hcl v1.0.0 h1:0Anlzjpi4vEasTeNFn2mLJgTSwt0+6sfsiTG8qcWGx4=
github.com/hashicorp/hcl v1.0.0/go.mod h1:E5yfLk+7swimpb2L/Alb/PJmXilQ/rhwaUYs4T20WEQ=
+github.com/hexops/gotextdiff v1.0.3 h1:gitA9+qJrrTCsiCl7+kh75nPqQt1cx4ZkudSTLoUqJM=
+github.com/hexops/gotextdiff v1.0.3/go.mod h1:pSWU5MAI3yDq+fZBTazCSJysOMbxWL1BSow5/V2vxeg=
github.com/ianlancetaylor/demangle v0.0.0-20181102032728-5e5cf60278f6/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc=
github.com/ianlancetaylor/demangle v0.0.0-20200824232613-28f6c0f3b639/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc=
github.com/inconshreveable/mousetrap v1.1.0 h1:wN+x4NVGpMsO7ErUn/mUI3vEoE6Jt13X2s0bqwp9tc8=
@@ -212,9 +216,7 @@ github.com/masterzen/simplexml v0.0.0-20160608183007-4572e39b1ab9/go.mod h1:kCEb
github.com/masterzen/winrm v0.0.0-20161014151040-7a535cd943fc/go.mod h1:CfZSN7zwz5gJiFhZJz49Uzk7mEBHIceWmbFmYx7Hf7E=
github.com/masterzen/xmlpath v0.0.0-20140218185901-13f4951698ad/go.mod h1:A0zPC53iKKKcXYxr4ROjpQRQ5FgJXtelNdSmHHuq/tY=
github.com/mattn/go-colorable v0.0.6/go.mod h1:9vuHe8Xs5qXnSaW/c/ABM9alt+Vo+STaOChaDxuIBZU=
-github.com/mattn/go-colorable v0.1.8/go.mod h1:u6P/XSegPjTcexA+o6vUJrdnUu04hMope9wVRipJSqc=
github.com/mattn/go-isatty v0.0.0-20160806122752-66b8e73f3f5c/go.mod h1:M+lRXTBqGeGNdLjl/ufCoiOlB5xdOkqRJdNxMWT7Zi4=
-github.com/mattn/go-isatty v0.0.12/go.mod h1:cbi8OIDigv2wuxKPP5vlRcQ1OAZbq2CE4Kysco4FUpU=
github.com/mattn/go-isatty v0.0.14 h1:yVuAays6BHfxijgZPzw+3Zlu5yQgKGP2/hcQbHb7S9Y=
github.com/mattn/go-isatty v0.0.14/go.mod h1:7GGIvUiUoEMVVmxf/4nioHXj79iQHKdU27kJ6hsGG94=
github.com/mitchellh/go-homedir v1.1.0 h1:lukF9ziXFxDFPkA1vsr5zpc1XuPDn/wFntq5mG+4E0Y=
@@ -224,7 +226,6 @@ github.com/mitchellh/mapstructure v1.5.0/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RR
github.com/nu7hatch/gouuid v0.0.0-20131221200532-179d4d0c4d8d/go.mod h1:YUTz3bUH2ZwIWBy3CJBeOBEugqcmXREj14T+iG/4k4U=
github.com/pelletier/go-toml/v2 v2.0.9 h1:uH2qQXheeefCCkuBBSLi7jCiSmj3VRh2+Goq2N7Xxu0=
github.com/pelletier/go-toml/v2 v2.0.9/go.mod h1:tJU2Z3ZkXwnxa4DPO899bsyIoywizdUvyaeZurnPPDc=
-github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
github.com/pkg/sftp v1.13.1/go.mod h1:3HaPG6Dq1ILlpPZRO0HVMrsydcdLt6HRDccSgb87qRg=
github.com/pkg/sftp v1.13.6 h1:JFZT4XbOU7l77xGSpOdW+pwIMqP044IyjXX6FGyEKFo=
@@ -236,8 +237,8 @@ github.com/rogpeppe/go-internal v1.3.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFR
github.com/rogpeppe/go-internal v1.9.0 h1:73kH8U+JUqXU8lRuOHeVHaa/SZPifC7BkcraZVejAe8=
github.com/rogpeppe/go-internal v1.9.0/go.mod h1:WtVeX8xhTBvf0smdhujwtBcq4Qrzq/fJaraNFVN+nFs=
github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM=
-github.com/sergi/go-diff v1.1.0 h1:we8PVUC3FE2uYfodKH/nBHMSetSfHDR6scGdBi+erh0=
-github.com/sergi/go-diff v1.1.0/go.mod h1:STckp+ISIX8hZLjrqAeVduY0gWCT9IjLuqbuNXdaHfM=
+github.com/sergi/go-diff v1.2.0 h1:XU+rvMAioB0UC3q1MFrIQy4Vo5/4VsRDQQXHsEya6xQ=
+github.com/sergi/go-diff v1.2.0/go.mod h1:STckp+ISIX8hZLjrqAeVduY0gWCT9IjLuqbuNXdaHfM=
github.com/sirupsen/logrus v1.9.3 h1:dueUQJ1C2q9oE3F7wvmSGAaVtTmUizReu6fjN8uqzbQ=
github.com/sirupsen/logrus v1.9.3/go.mod h1:naHLuLoDiP4jHNo9R0sCBMtWGeIprob74mVsIT4qYEQ=
github.com/slongfield/pyfmt v0.0.0-20220222012616-ea85ff4c361f h1:Z2cODYsUxQPofhpYRMQVwWz4yUVpHF+vPi+eUdruUYI=
@@ -260,7 +261,6 @@ github.com/stretchr/objx v0.5.0/go.mod h1:Yh+to48EsGEfYuaHDzXPcE3xhTkx73EhmCGUpE
github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs=
github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4=
github.com/stretchr/testify v1.5.1/go.mod h1:5W2xD1RspED5o8YsWQXVCued0rvSQ+mT+I5cxcmMvtA=
-github.com/stretchr/testify v1.6.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU=
@@ -268,8 +268,6 @@ github.com/stretchr/testify v1.8.4 h1:CcVxjf3Q8PM0mHUKJCdn+eZZtm5yQwehR5yeSVQQcU
github.com/stretchr/testify v1.8.4/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXlSw2iwfAo=
github.com/subosito/gotenv v1.4.2 h1:X1TuBLAMDFbaTAChgCBLu3DU3UPyELpnF2jjJ2cz/S8=
github.com/subosito/gotenv v1.4.2/go.mod h1:ayKnFf/c6rvx/2iiLrJUk1e6plDbT3edrFNGqEflhK0=
-github.com/wakatime/chroma v0.11.3-wakatime1 h1:t0lXBTg0RwaFgIsqpfKfUbXBZTYT3d+qXrLVtFDP564=
-github.com/wakatime/chroma v0.11.3-wakatime1/go.mod h1:OSQG4slLkpoTtTCq3MKBL8aAWm7MpXpc+EiNmYMZ2pE=
github.com/wakatime/goInfo v0.1.0-wakatime.8 h1:MgyeRnCkynEmUxLKXnYUAP5Dd+vhKxhqg6Nx1PdAZy4=
github.com/wakatime/goInfo v0.1.0-wakatime.8/go.mod h1:aEt7p9Rvh67BYApmZwNDPpgircTO2kgdmDUoF/1QmwA=
github.com/yookoala/realpath v1.0.0 h1:7OA9pj4FZd+oZDsyvXWQvjn5oBdcHRTV44PpdMSuImQ=
@@ -405,7 +403,6 @@ golang.org/x/sys v0.0.0-20191001151750-bb3f8db39f24/go.mod h1:h1NjWce9XRLGQEsW7w
golang.org/x/sys v0.0.0-20191204072324-ce4227a45e2e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20191228213918-04cbcbbfeed8/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20200113162924-86b910548bc1/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
-golang.org/x/sys v0.0.0-20200116001909-b77594299b42/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20200122134326-e047566fdf82/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20200202164722-d101bd2416d5/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20200212091648-12a6c2dcc1e4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
@@ -428,6 +425,7 @@ golang.org/x/sys v0.0.0-20210225134936-a50acf3fe073/go.mod h1:h1NjWce9XRLGQEsW7w
golang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20210423185535-09eb48e85fd7/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
+golang.org/x/sys v0.0.0-20210630005230-0f9fa26af87c/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20220715151400-c0bba94af5f8/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
diff --git a/pkg/deps/c.go b/pkg/deps/c.go
index fd89558a..2a5f2179 100644
--- a/pkg/deps/c.go
+++ b/pkg/deps/c.go
@@ -7,10 +7,11 @@ import (
"regexp"
"strings"
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
"github.com/wakatime/wakatime-cli/pkg/log"
- "github.com/alecthomas/chroma"
- "github.com/alecthomas/chroma/lexers/c"
+ "github.com/alecthomas/chroma/v2"
+ "github.com/alecthomas/chroma/v2/lexers"
)
var cExcludeRegex = regexp.MustCompile(`(?i)^(stdio\.h|stdlib\.h|string\.h|time\.h)$`)
@@ -53,7 +54,12 @@ func (p *ParserC) Parse(filepath string) ([]string, error) {
return nil, fmt.Errorf("failed to read from reader: %s", err)
}
- iter, err := c.C.Tokenise(nil, string(data))
+ l := lexers.Get(heartbeat.LanguageC.String())
+ if l == nil {
+ return nil, fmt.Errorf("failed to get lexer for %s", heartbeat.LanguageC.String())
+ }
+
+ iter, err := l.Tokenise(nil, string(data))
if err != nil {
return nil, fmt.Errorf("failed to tokenize file content: %s", err)
}
diff --git a/pkg/deps/c_test.go b/pkg/deps/c_test.go
index 910a2b33..f7f1e2ff 100644
--- a/pkg/deps/c_test.go
+++ b/pkg/deps/c_test.go
@@ -4,22 +4,17 @@ import (
"testing"
"github.com/wakatime/wakatime-cli/pkg/deps"
- "github.com/wakatime/wakatime-cli/pkg/heartbeat"
- "github.com/alecthomas/chroma"
- "github.com/alecthomas/chroma/lexers"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
func TestParserC_Parse(t *testing.T) {
tests := map[string]struct {
- Lexer chroma.Lexer
Filepath string
Expected []string
}{
"c": {
- Lexer: lexers.Get(heartbeat.LanguageC.StringChroma()),
Filepath: "testdata/c.c",
Expected: []string{
"math",
@@ -27,7 +22,6 @@ func TestParserC_Parse(t *testing.T) {
},
},
"cpp": {
- Lexer: lexers.Get(heartbeat.LanguageCPP.StringChroma()),
Filepath: "testdata/cpp.cpp",
Expected: []string{
"iostream",
diff --git a/pkg/deps/csharp.go b/pkg/deps/csharp.go
index 1454b645..2ef40897 100644
--- a/pkg/deps/csharp.go
+++ b/pkg/deps/csharp.go
@@ -7,10 +7,11 @@ import (
"regexp"
"strings"
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
"github.com/wakatime/wakatime-cli/pkg/log"
- "github.com/alecthomas/chroma"
- "github.com/alecthomas/chroma/lexers/c"
+ "github.com/alecthomas/chroma/v2"
+ "github.com/alecthomas/chroma/v2/lexers"
)
var csharpExcludeRegex = regexp.MustCompile(`(?i)^(system|microsoft)$`)
@@ -54,7 +55,12 @@ func (p *ParserCSharp) Parse(filepath string) ([]string, error) {
return nil, fmt.Errorf("failed to read from reader: %s", err)
}
- iter, err := c.CSharp.Tokenise(nil, string(data))
+ l := lexers.Get(heartbeat.LanguageCSharp.String())
+ if l == nil {
+ return nil, fmt.Errorf("failed to get lexer for %s", heartbeat.LanguageCSharp.String())
+ }
+
+ iter, err := l.Tokenise(nil, string(data))
if err != nil {
return nil, fmt.Errorf("failed to tokenize file content: %s", err)
}
diff --git a/pkg/deps/deps.go b/pkg/deps/deps.go
index 5266afa1..c3b8e4b6 100644
--- a/pkg/deps/deps.go
+++ b/pkg/deps/deps.go
@@ -100,7 +100,7 @@ func Detect(filepath string, language heartbeat.Language) ([]string, error) {
parser = &ParserHTML{}
case heartbeat.LanguageJava:
parser = &ParserJava{}
- case heartbeat.LanguageJavaScript, heartbeat.LanguageTypeScript:
+ case heartbeat.LanguageJavaScript, heartbeat.LanguageTypeScript, heartbeat.LanguageJSX, heartbeat.LanguageTSX:
parser = &ParserJavaScript{}
case heartbeat.LanguageJSON:
parser = &ParserJSON{}
diff --git a/pkg/deps/elm.go b/pkg/deps/elm.go
index 5d1796d7..11c088d8 100644
--- a/pkg/deps/elm.go
+++ b/pkg/deps/elm.go
@@ -6,10 +6,11 @@ import (
"os"
"strings"
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
"github.com/wakatime/wakatime-cli/pkg/log"
- "github.com/alecthomas/chroma"
- "github.com/alecthomas/chroma/lexers/e"
+ "github.com/alecthomas/chroma/v2"
+ "github.com/alecthomas/chroma/v2/lexers"
)
// StateElm is a token parsing state.
@@ -50,7 +51,12 @@ func (p *ParserElm) Parse(filepath string) ([]string, error) {
return nil, fmt.Errorf("failed to read from reader: %s", err)
}
- iter, err := e.Elm.Tokenise(nil, string(data))
+ l := lexers.Get(heartbeat.LanguageElm.String())
+ if l == nil {
+ return nil, fmt.Errorf("failed to get lexer for %s", heartbeat.LanguageElm.String())
+ }
+
+ iter, err := l.Tokenise(nil, string(data))
if err != nil {
return nil, fmt.Errorf("failed to tokenize file content: %s", err)
}
diff --git a/pkg/deps/elm_test.go b/pkg/deps/elm_test.go
index 6012fcfd..e55992be 100644
--- a/pkg/deps/elm_test.go
+++ b/pkg/deps/elm_test.go
@@ -3,9 +3,10 @@ package deps_test
import (
"testing"
+ "github.com/wakatime/wakatime-cli/pkg/deps"
+
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
- "github.com/wakatime/wakatime-cli/pkg/deps"
)
func TestParserElm_Parse(t *testing.T) {
diff --git a/pkg/deps/golang.go b/pkg/deps/golang.go
index 988a403e..b4c11fd1 100644
--- a/pkg/deps/golang.go
+++ b/pkg/deps/golang.go
@@ -9,8 +9,8 @@ import (
"github.com/wakatime/wakatime-cli/pkg/log"
- "github.com/alecthomas/chroma"
- "github.com/alecthomas/chroma/lexers/g"
+ "github.com/alecthomas/chroma/v2"
+ "github.com/alecthomas/chroma/v2/lexers"
)
var goExcludeRegex = regexp.MustCompile(`^"fmt"$`)
@@ -54,7 +54,7 @@ func (p *ParserGo) Parse(filepath string) ([]string, error) {
return nil, fmt.Errorf("failed to read from reader: %s", err)
}
- iter, err := g.Go.Tokenise(nil, string(data))
+ iter, err := lexers.Go.Tokenise(nil, string(data))
if err != nil {
return nil, fmt.Errorf("failed to tokenize file content: %s", err)
}
diff --git a/pkg/deps/haskell.go b/pkg/deps/haskell.go
index 779bbeac..d9128861 100644
--- a/pkg/deps/haskell.go
+++ b/pkg/deps/haskell.go
@@ -6,10 +6,11 @@ import (
"os"
"strings"
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
"github.com/wakatime/wakatime-cli/pkg/log"
- "github.com/alecthomas/chroma"
- "github.com/alecthomas/chroma/lexers/h"
+ "github.com/alecthomas/chroma/v2"
+ "github.com/alecthomas/chroma/v2/lexers"
)
// StateHaskell is a token parsing state.
@@ -50,7 +51,12 @@ func (p *ParserHaskell) Parse(filepath string) ([]string, error) {
return nil, fmt.Errorf("failed to read from reader: %s", err)
}
- iter, err := h.Haskell.Tokenise(nil, string(data))
+ l := lexers.Get(heartbeat.LanguageHaskell.String())
+ if l == nil {
+ return nil, fmt.Errorf("failed to get lexer for %s", heartbeat.LanguageHaskell.String())
+ }
+
+ iter, err := l.Tokenise(nil, string(data))
if err != nil {
return nil, fmt.Errorf("failed to tokenize file content: %s", err)
}
diff --git a/pkg/deps/haxe.go b/pkg/deps/haxe.go
index 1b62af3a..c260266a 100644
--- a/pkg/deps/haxe.go
+++ b/pkg/deps/haxe.go
@@ -9,8 +9,8 @@ import (
"github.com/wakatime/wakatime-cli/pkg/log"
- "github.com/alecthomas/chroma"
- "github.com/alecthomas/chroma/lexers/h"
+ "github.com/alecthomas/chroma/v2"
+ "github.com/alecthomas/chroma/v2/lexers"
)
var haxeExcludeRegex = regexp.MustCompile(`(?i)^haxe$`)
@@ -53,7 +53,7 @@ func (p *ParserHaxe) Parse(filepath string) ([]string, error) {
return nil, fmt.Errorf("failed to read from reader: %s", err)
}
- iter, err := h.Haxe.Tokenise(nil, string(data))
+ iter, err := lexers.Haxe.Tokenise(nil, string(data))
if err != nil {
return nil, fmt.Errorf("failed to tokenize file content: %s", err)
}
diff --git a/pkg/deps/html.go b/pkg/deps/html.go
index ea81f518..e024185e 100644
--- a/pkg/deps/html.go
+++ b/pkg/deps/html.go
@@ -9,8 +9,8 @@ import (
"github.com/wakatime/wakatime-cli/pkg/log"
- "github.com/alecthomas/chroma"
- "github.com/alecthomas/chroma/lexers/h"
+ "github.com/alecthomas/chroma/v2"
+ "github.com/alecthomas/chroma/v2/lexers"
)
var htmlDjangoPlaceholderRegex = regexp.MustCompile(`(?i)\{\{[^\}]+\}\}[/\\]?`)
@@ -55,7 +55,7 @@ func (p *ParserHTML) Parse(filepath string) ([]string, error) {
return nil, fmt.Errorf("failed to read from reader: %s", err)
}
- iter, err := h.HTML.Tokenise(nil, string(data))
+ iter, err := lexers.HTML.Tokenise(nil, string(data))
if err != nil {
return nil, fmt.Errorf("failed to tokenize file content: %s", err)
}
diff --git a/pkg/deps/html_test.go b/pkg/deps/html_test.go
index f91b2df9..9eddf8c5 100644
--- a/pkg/deps/html_test.go
+++ b/pkg/deps/html_test.go
@@ -10,37 +10,37 @@ import (
)
func TestParserHTML_Parse(t *testing.T) {
- parser := deps.ParserHTML{}
-
- dependencies, err := parser.Parse("testdata/html.html")
- require.NoError(t, err)
-
- assert.Equal(t, []string{
- `"wakatime.js"`,
- `"../scripts/wakatime.js"`,
- `"https://www.wakatime.com/scripts/my.js"`,
- "\"this is a\n multiline value\"",
- }, dependencies)
-}
-
-func TestParserHTML_Parse_Django(t *testing.T) {
- parser := deps.ParserHTML{}
-
- dependencies, err := parser.Parse("testdata/html_django.html")
- require.NoError(t, err)
-
- assert.Equal(t, []string{
- `"libs/json2.js"`,
- }, dependencies)
-}
-
-func TestParserHTML_Parse_WithPHP(t *testing.T) {
- parser := deps.ParserHTML{}
-
- dependencies, err := parser.Parse("testdata/html_with_php.html")
- require.NoError(t, err)
-
- assert.Equal(t, []string{
- `"https://maxcdn.bootstrapcdn.com/bootstrap/3.3.5/js/bootstrap.min.js"`,
- }, dependencies)
+ tests := map[string]struct {
+ Filepath string
+ Expected []string
+ }{
+ "html": {
+ Filepath: "testdata/html.html",
+ Expected: []string{
+ `"wakatime.js"`,
+ `"../scripts/wakatime.js"`,
+ `"https://www.wakatime.com/scripts/my.js"`,
+ "\"this is a\n multiline value\"",
+ },
+ },
+ "html django": {
+ Filepath: "testdata/html_django.html",
+ Expected: []string{`"libs/json2.js"`},
+ },
+ "html with PHP": {
+ Filepath: "testdata/html_with_php.html",
+ Expected: []string{`"https://maxcdn.bootstrapcdn.com/bootstrap/3.3.5/js/bootstrap.min.js"`},
+ },
+ }
+
+ for name, test := range tests {
+ t.Run(name, func(t *testing.T) {
+ parser := deps.ParserHTML{}
+
+ dependencies, err := parser.Parse(test.Filepath)
+ require.NoError(t, err)
+
+ assert.Equal(t, test.Expected, dependencies)
+ })
+ }
}
diff --git a/pkg/deps/java.go b/pkg/deps/java.go
index 089b6cd5..36bf7226 100644
--- a/pkg/deps/java.go
+++ b/pkg/deps/java.go
@@ -7,10 +7,11 @@ import (
"regexp"
"strings"
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
"github.com/wakatime/wakatime-cli/pkg/log"
- "github.com/alecthomas/chroma"
- "github.com/alecthomas/chroma/lexers/j"
+ "github.com/alecthomas/chroma/v2"
+ "github.com/alecthomas/chroma/v2/lexers"
)
var javaExcludeRegex = regexp.MustCompile(`(?i)^(java\..*|javax\..*)`)
@@ -56,7 +57,12 @@ func (p *ParserJava) Parse(filepath string) ([]string, error) {
return nil, fmt.Errorf("failed to read from reader: %s", err)
}
- iter, err := j.Java.Tokenise(nil, string(data))
+ l := lexers.Get(heartbeat.LanguageJava.String())
+ if l == nil {
+ return nil, fmt.Errorf("failed to get lexer for %s", heartbeat.LanguageJava.String())
+ }
+
+ iter, err := l.Tokenise(nil, string(data))
if err != nil {
return nil, fmt.Errorf("failed to tokenize file content: %s", err)
}
diff --git a/pkg/deps/javascript.go b/pkg/deps/javascript.go
index c877e220..3cd1c707 100644
--- a/pkg/deps/javascript.go
+++ b/pkg/deps/javascript.go
@@ -7,10 +7,11 @@ import (
"regexp"
"strings"
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
"github.com/wakatime/wakatime-cli/pkg/log"
- "github.com/alecthomas/chroma"
- "github.com/alecthomas/chroma/lexers/j"
+ "github.com/alecthomas/chroma/v2"
+ "github.com/alecthomas/chroma/v2/lexers"
)
var javaScriptExtensionRegex = regexp.MustCompile(`\.\w{1,4}$`)
@@ -53,7 +54,12 @@ func (p *ParserJavaScript) Parse(filepath string) ([]string, error) {
return nil, fmt.Errorf("failed to read from reader: %s", err)
}
- iter, err := j.Javascript.Tokenise(nil, string(data))
+ l := lexers.Get(heartbeat.LanguageJavaScript.String())
+ if l == nil {
+ return nil, fmt.Errorf("failed to get lexer for %s", heartbeat.LanguageJavaScript.String())
+ }
+
+ iter, err := l.Tokenise(nil, string(data))
if err != nil {
return nil, fmt.Errorf("failed to tokenize file content: %s", err)
}
@@ -92,7 +98,7 @@ func (p *ParserJavaScript) processToken(token chroma.Token) {
switch token.Type {
case chroma.KeywordReserved:
p.processKeywordReserved(token.Value)
- case chroma.LiteralStringSingle:
+ case chroma.LiteralStringSingle, chroma.LiteralStringDouble:
p.processLiteralStringSingle(token.Value)
case chroma.Punctuation:
p.processPunctuation(token.Value)
diff --git a/pkg/deps/javascript_test.go b/pkg/deps/javascript_test.go
index e45fd19d..2838ca59 100644
--- a/pkg/deps/javascript_test.go
+++ b/pkg/deps/javascript_test.go
@@ -4,22 +4,17 @@ import (
"testing"
"github.com/wakatime/wakatime-cli/pkg/deps"
- "github.com/wakatime/wakatime-cli/pkg/heartbeat"
- "github.com/alecthomas/chroma"
- "github.com/alecthomas/chroma/lexers"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
func TestParserJavaScript_Parse(t *testing.T) {
tests := map[string]struct {
- Lexer chroma.Lexer
Filepath string
Expected []string
}{
"js": {
- Lexer: lexers.Get(heartbeat.LanguageJavaScript.StringChroma()),
Filepath: "testdata/es6.js",
Expected: []string{
"bravo",
@@ -36,7 +31,6 @@ func TestParserJavaScript_Parse(t *testing.T) {
},
},
"typescript": {
- Lexer: lexers.Get(heartbeat.LanguageTypeScript.StringChroma()),
Filepath: "testdata/typescript.ts",
Expected: []string{
"bravo",
@@ -52,6 +46,23 @@ func TestParserJavaScript_Parse(t *testing.T) {
"whiskey",
},
},
+ "react js": {
+ Filepath: "testdata/react.jsx",
+ Expected: []string{
+ "react",
+ "react-dom",
+ },
+ },
+ "react typescript": {
+ Filepath: "testdata/react.tsx",
+ Expected: []string{
+ "head",
+ "react",
+ "contants",
+ "Footer",
+ "Nav",
+ },
+ },
}
for name, test := range tests {
diff --git a/pkg/deps/json.go b/pkg/deps/json.go
index 9f344644..d5d6a9fe 100644
--- a/pkg/deps/json.go
+++ b/pkg/deps/json.go
@@ -7,10 +7,11 @@ import (
"path/filepath"
"strings"
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
"github.com/wakatime/wakatime-cli/pkg/log"
- "github.com/alecthomas/chroma"
- "github.com/alecthomas/chroma/lexers/j"
+ "github.com/alecthomas/chroma/v2"
+ "github.com/alecthomas/chroma/v2/lexers"
)
// nolint:gochecknoglobals
@@ -65,7 +66,12 @@ func (p *ParserJSON) Parse(filepath string) ([]string, error) {
return nil, fmt.Errorf("failed to read from reader: %s", err)
}
- iter, err := j.JSON.Tokenise(nil, string(data))
+ l := lexers.Get(heartbeat.LanguageJSON.String())
+ if l == nil {
+ return nil, fmt.Errorf("failed to get lexer for %s", heartbeat.LanguageJSON.String())
+ }
+
+ iter, err := l.Tokenise(nil, string(data))
if err != nil {
return nil, fmt.Errorf("failed to tokenize file content: %s", err)
}
diff --git a/pkg/deps/kotlin.go b/pkg/deps/kotlin.go
index ba1e2a38..401b0f55 100644
--- a/pkg/deps/kotlin.go
+++ b/pkg/deps/kotlin.go
@@ -7,10 +7,11 @@ import (
"regexp"
"strings"
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
"github.com/wakatime/wakatime-cli/pkg/log"
- "github.com/alecthomas/chroma"
- "github.com/alecthomas/chroma/lexers/k"
+ "github.com/alecthomas/chroma/v2"
+ "github.com/alecthomas/chroma/v2/lexers"
)
var kotlinExcludeRegex = regexp.MustCompile(`(?i)^java\.`)
@@ -53,7 +54,12 @@ func (p *ParserKotlin) Parse(filepath string) ([]string, error) {
return nil, fmt.Errorf("failed to read from reader: %s", err)
}
- iter, err := k.Kotlin.Tokenise(nil, string(data))
+ l := lexers.Get(heartbeat.LanguageKotlin.String())
+ if l == nil {
+ return nil, fmt.Errorf("failed to get lexer for %s", heartbeat.LanguageKotlin.String())
+ }
+
+ iter, err := l.Tokenise(nil, string(data))
if err != nil {
return nil, fmt.Errorf("failed to tokenize file content: %s", err)
}
diff --git a/pkg/deps/objectivec.go b/pkg/deps/objectivec.go
index 0fcd3187..b44ccc94 100644
--- a/pkg/deps/objectivec.go
+++ b/pkg/deps/objectivec.go
@@ -6,10 +6,11 @@ import (
"os"
"strings"
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
"github.com/wakatime/wakatime-cli/pkg/log"
- "github.com/alecthomas/chroma"
- "github.com/alecthomas/chroma/lexers/o"
+ "github.com/alecthomas/chroma/v2"
+ "github.com/alecthomas/chroma/v2/lexers"
)
// StateObjectiveC is a token parsing state.
@@ -50,7 +51,12 @@ func (p *ParserObjectiveC) Parse(filepath string) ([]string, error) {
return nil, fmt.Errorf("failed to read from reader: %s", err)
}
- iter, err := o.ObjectiveC.Tokenise(nil, string(data))
+ l := lexers.Get(heartbeat.LanguageObjectiveC.String())
+ if l == nil {
+ return nil, fmt.Errorf("failed to get lexer for %s", heartbeat.LanguageObjectiveC.String())
+ }
+
+ iter, err := l.Tokenise(nil, string(data))
if err != nil {
return nil, fmt.Errorf("failed to tokenize file content: %s", err)
}
diff --git a/pkg/deps/php.go b/pkg/deps/php.go
index a7d66d02..d991ba8d 100644
--- a/pkg/deps/php.go
+++ b/pkg/deps/php.go
@@ -7,10 +7,11 @@ import (
"regexp"
"strings"
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
"github.com/wakatime/wakatime-cli/pkg/log"
- "github.com/alecthomas/chroma"
- "github.com/alecthomas/chroma/lexers/circular"
+ "github.com/alecthomas/chroma/v2"
+ "github.com/alecthomas/chroma/v2/lexers"
)
var phpExcludeRegex = regexp.MustCompile(`(?i)(^app|app\.php)$`)
@@ -59,7 +60,12 @@ func (p *ParserPHP) Parse(filepath string) ([]string, error) {
return nil, fmt.Errorf("failed to read from reader: %s", err)
}
- iter, err := circular.PHP.Tokenise(nil, string(data))
+ l := lexers.Get(heartbeat.LanguagePHP.String())
+ if l == nil {
+ return nil, fmt.Errorf("failed to get lexer for %s", heartbeat.LanguagePHP.String())
+ }
+
+ iter, err := l.Tokenise(nil, string(data))
if err != nil {
return nil, fmt.Errorf("failed to tokenize file content: %s", err)
}
diff --git a/pkg/deps/python.go b/pkg/deps/python.go
index b649c84b..fdf0740c 100644
--- a/pkg/deps/python.go
+++ b/pkg/deps/python.go
@@ -7,10 +7,11 @@ import (
"regexp"
"strings"
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
"github.com/wakatime/wakatime-cli/pkg/log"
- "github.com/alecthomas/chroma"
- lp "github.com/alecthomas/chroma/lexers/p"
+ "github.com/alecthomas/chroma/v2"
+ "github.com/alecthomas/chroma/v2/lexers"
)
var pythonExcludeRegex = regexp.MustCompile(`(?i)^(os|sys|__[a-z]+__)$`)
@@ -56,7 +57,12 @@ func (p *ParserPython) Parse(filepath string) ([]string, error) {
return nil, fmt.Errorf("failed to read from reader: %s", err)
}
- iter, err := lp.Python.Tokenise(nil, string(data))
+ l := lexers.Get(heartbeat.LanguagePython.String())
+ if l == nil {
+ return nil, fmt.Errorf("failed to get lexer for %s", heartbeat.LanguagePython.String())
+ }
+
+ iter, err := l.Tokenise(nil, string(data))
if err != nil {
return nil, fmt.Errorf("failed to tokenize file content: %s", err)
}
diff --git a/pkg/deps/rust.go b/pkg/deps/rust.go
index 3d49978e..34f31768 100644
--- a/pkg/deps/rust.go
+++ b/pkg/deps/rust.go
@@ -6,10 +6,11 @@ import (
"os"
"strings"
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
"github.com/wakatime/wakatime-cli/pkg/log"
- "github.com/alecthomas/chroma"
- "github.com/alecthomas/chroma/lexers/r"
+ "github.com/alecthomas/chroma/v2"
+ "github.com/alecthomas/chroma/v2/lexers"
)
// StateRust is a token parsing state.
@@ -52,7 +53,12 @@ func (p *ParserRust) Parse(filepath string) ([]string, error) {
return nil, fmt.Errorf("failed to read from reader: %s", err)
}
- iter, err := r.Rust.Tokenise(nil, string(data))
+ l := lexers.Get(heartbeat.LanguageRust.String())
+ if l == nil {
+ return nil, fmt.Errorf("failed to get lexer for %s", heartbeat.LanguageRust.String())
+ }
+
+ iter, err := l.Tokenise(nil, string(data))
if err != nil {
return nil, fmt.Errorf("failed to tokenize file content: %s", err)
}
diff --git a/pkg/deps/scala.go b/pkg/deps/scala.go
index 1fb8b329..b6074529 100644
--- a/pkg/deps/scala.go
+++ b/pkg/deps/scala.go
@@ -6,10 +6,11 @@ import (
"os"
"strings"
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
"github.com/wakatime/wakatime-cli/pkg/log"
- "github.com/alecthomas/chroma"
- "github.com/alecthomas/chroma/lexers/s"
+ "github.com/alecthomas/chroma/v2"
+ "github.com/alecthomas/chroma/v2/lexers"
)
// StateScala is a token parsing state.
@@ -50,7 +51,12 @@ func (p *ParserScala) Parse(filepath string) ([]string, error) {
return nil, fmt.Errorf("failed to read from reader: %s", err)
}
- iter, err := s.Scala.Tokenise(nil, string(data))
+ l := lexers.Get(heartbeat.LanguageScala.String())
+ if l == nil {
+ return nil, fmt.Errorf("failed to get lexer for %s", heartbeat.LanguageScala.String())
+ }
+
+ iter, err := l.Tokenise(nil, string(data))
if err != nil {
return nil, fmt.Errorf("failed to tokenize file content: %s", err)
}
diff --git a/pkg/deps/swift.go b/pkg/deps/swift.go
index 2ff7c9dc..d36e12ce 100644
--- a/pkg/deps/swift.go
+++ b/pkg/deps/swift.go
@@ -7,10 +7,11 @@ import (
"regexp"
"strings"
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
"github.com/wakatime/wakatime-cli/pkg/log"
- "github.com/alecthomas/chroma"
- "github.com/alecthomas/chroma/lexers/s"
+ "github.com/alecthomas/chroma/v2"
+ "github.com/alecthomas/chroma/v2/lexers"
)
var swiftExcludeRegex = regexp.MustCompile(`(?i)^foundation$`)
@@ -53,7 +54,12 @@ func (p *ParserSwift) Parse(filepath string) ([]string, error) {
return nil, fmt.Errorf("failed to read from reader: %s", err)
}
- iter, err := s.Swift.Tokenise(nil, string(data))
+ l := lexers.Get(heartbeat.LanguageSwift.String())
+ if l == nil {
+ return nil, fmt.Errorf("failed to get lexer for %s", heartbeat.LanguageSwift.String())
+ }
+
+ iter, err := l.Tokenise(nil, string(data))
if err != nil {
return nil, fmt.Errorf("failed to tokenize file content: %s", err)
}
diff --git a/pkg/deps/testdata/react.jsx b/pkg/deps/testdata/react.jsx
new file mode 100644
index 00000000..3aa5f0e7
--- /dev/null
+++ b/pkg/deps/testdata/react.jsx
@@ -0,0 +1,12 @@
+import React from "react";
+import ReactDOM from 'react-dom';
+
+const App = () => {
+ return (
+
This is first JSX Element!
+ This is another JSX Element
+ );
+};
+
+const rootElement = document.getElementById("root");
+ReactDOM.render(, rootElement);
diff --git a/pkg/deps/testdata/react.tsx b/pkg/deps/testdata/react.tsx
new file mode 100644
index 00000000..06d39a95
--- /dev/null
+++ b/pkg/deps/testdata/react.tsx
@@ -0,0 +1,22 @@
+import Head from 'next/head';
+import { type ReactNode } from 'react';
+import { BASE_URL } from '~/utils/contants';
+import Footer from './Footer';
+import Nav from './Nav';
+
+export default function Layout({ children }: { children: ReactNode }) {
+ return (
+ <>
+
+ wakatime.com
+
+
+
+
+ >
+ );
+}
diff --git a/pkg/deps/unknown.go b/pkg/deps/unknown.go
index 1cdf8d5d..3aef7cb9 100644
--- a/pkg/deps/unknown.go
+++ b/pkg/deps/unknown.go
@@ -5,7 +5,7 @@ import (
"strings"
)
-// nolint: gochecknoglobals
+// nolint:gochecknoglobals
var filesUnknown = map[string]struct {
exact bool
dependency string
diff --git a/pkg/deps/vbnet.go b/pkg/deps/vbnet.go
index c7aae7a4..6c251f41 100644
--- a/pkg/deps/vbnet.go
+++ b/pkg/deps/vbnet.go
@@ -7,10 +7,11 @@ import (
"regexp"
"strings"
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
"github.com/wakatime/wakatime-cli/pkg/log"
- "github.com/alecthomas/chroma"
- "github.com/alecthomas/chroma/lexers/v"
+ "github.com/alecthomas/chroma/v2"
+ "github.com/alecthomas/chroma/v2/lexers"
)
var vbnetExcludeRegex = regexp.MustCompile(`(?i)^(system|microsoft)$`)
@@ -54,7 +55,12 @@ func (p *ParserVbNet) Parse(filepath string) ([]string, error) {
return nil, fmt.Errorf("failed to read from reader: %s", err)
}
- iter, err := v.VBNet.Tokenise(nil, string(data))
+ l := lexers.Get(heartbeat.LanguageVBNet.String())
+ if l == nil {
+ return nil, fmt.Errorf("failed to get lexer for %s", heartbeat.LanguageVBNet.String())
+ }
+
+ iter, err := l.Tokenise(nil, string(data))
if err != nil {
return nil, fmt.Errorf("failed to tokenize file content: %s", err)
}
diff --git a/pkg/doctype/doctype.go b/pkg/doctype/doctype.go
new file mode 100644
index 00000000..359376c3
--- /dev/null
+++ b/pkg/doctype/doctype.go
@@ -0,0 +1,32 @@
+package doctype
+
+import (
+ "fmt"
+ "regexp"
+ "strings"
+)
+
+// nolint:revive
+var doctypeLookupRe = regexp.MustCompile(`(?ms)(<\?.*?\?>)?\s*]*>`)
+
+// MatchString check if the doctype matches a regular expression (if present).
+func MatchString(text string, pattern string) (bool, error) {
+ // Note that this method only checks the first part of a DOCTYPE.
+ // eg: 'html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN"'
+ m := doctypeLookupRe.FindStringSubmatch(text)
+
+ if len(m) == 0 {
+ return false, nil
+ }
+
+ if len(pattern) == 0 {
+ return false, nil
+ }
+
+ doctypeRe, err := regexp.Compile(fmt.Sprintf("(?i)%s", pattern))
+ if err != nil {
+ return false, fmt.Errorf("failed to compile doctype regex: %s", err)
+ }
+
+ return doctypeRe.MatchString(strings.TrimSpace(m[2])), nil
+}
diff --git a/pkg/doctype/doctype_test.go b/pkg/doctype/doctype_test.go
new file mode 100644
index 00000000..3a55d0e2
--- /dev/null
+++ b/pkg/doctype/doctype_test.go
@@ -0,0 +1,43 @@
+package doctype_test
+
+import (
+ "testing"
+
+ "github.com/wakatime/wakatime-cli/pkg/doctype"
+
+ "github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/require"
+)
+
+func TestDoctype_MatchString(t *testing.T) {
+ tests := map[string]struct {
+ Text string
+ Pattern string
+ Expected bool
+ }{
+ "simple html match": {
+ Text: " ",
+ Pattern: `html.*`,
+ Expected: true,
+ },
+ "full html match": {
+ Text: "",
+ Pattern: `html`,
+ Expected: true,
+ },
+ "missing exclamation mark": {
+ Text: " ",
+ Pattern: `html.*`,
+ Expected: false,
+ },
+ }
+
+ for name, test := range tests {
+ t.Run(name, func(t *testing.T) {
+ res, err := doctype.MatchString(test.Text, test.Pattern)
+ require.NoError(t, err)
+
+ assert.Equal(t, test.Expected, res)
+ })
+ }
+}
diff --git a/pkg/heartbeat/language.go b/pkg/heartbeat/language.go
index 13416d5e..05c38844 100644
--- a/pkg/heartbeat/language.go
+++ b/pkg/heartbeat/language.go
@@ -71,6 +71,8 @@ const (
LanguageApolloGuidanceComputer
// LanguageAppleScript represents the AppleScript programming language.
LanguageAppleScript
+ // LanguageArangoDBQueryLanguage represents the ArangoDB Query Language programming language.
+ LanguageArangoDBQueryLanguage
// LanguageArc represents the Arc programming language.
LanguageArc
// LanguageArduino represents the Arduino programming language.
@@ -117,7 +119,7 @@ const (
LanguageBARE
// LanguageBash represents the Bash programming language.
LanguageBash
- // LanguageBashSession represents the BashSession programming language.
+ // LanguageBashSession represents the Bash Session programming language.
LanguageBashSession
// LanguageBasic represents the Basic programming language.
LanguageBasic
@@ -161,6 +163,8 @@ const (
LanguageBoo
// LanguageBoogie represents the Boogie programming language.
LanguageBoogie
+ // LanguageBQN represents the BQN programming language.
+ LanguageBQN
// LanguageBrainfuck represents the Brainfuck programming language.
LanguageBrainfuck
// LanguageBrightScript represents the BrightScript programming language.
@@ -687,6 +691,8 @@ const (
LanguageIsabelle
// LanguageIsabelleRoot represents the IsabelleRoot programming language.
LanguageIsabelleRoot
+ // LanguageISCdhcpd represents the ISC dhcpd programming language.
+ LanguageISCdhcpd
// LanguageJ represents the J programming language.
LanguageJ
// LanguageJAGS represents the JAGS programming language.
@@ -697,7 +703,7 @@ const (
LanguageJasmin
// LanguageJava represents the Java programming language.
LanguageJava
- // LanguageJavaProperties represents the JavaProperties programming language.
+ // LanguageJavaProperties represents the Java Properties programming language.
LanguageJavaProperties
// LanguageJavaScript represents the JavaScript programming language.
LanguageJavaScript
@@ -923,6 +929,8 @@ const (
LanguageNASM
// LanguageNASMObjdump represents the NASMObjdump programming language.
LanguageNASMObjdump
+ // LanguageNatural represents the Natural programming language.
+ LanguageNatural
// LanguageNCL represents the NCL programming language.
LanguageNCL
// LanguageNemerle represents the Nemerle programming language.
@@ -1039,10 +1047,10 @@ const (
LanguageProlog
// LanguagePromQL represents the PromQL programming language.
LanguagePromQL
- // LanguagePropertiesJava represents the JavaProperties programming language.
- LanguagePropertiesJava
// LanguageProtocolBuffer represents the ProtocolBuffer programming language.
LanguageProtocolBuffer
+ // LanguagePSL represents the Property Specification Language programming language.
+ LanguagePSL
// LanguagePsyShPHP represents the PHPPsySH programming language.
LanguagePsyShPHP
// LanguagePug represents the Pug programming language.
@@ -1283,6 +1291,8 @@ const (
LanguageStan
// LanguageStarlark represents the LanguageStarlark programming language.
LanguageStarlark
+ // LanguageStas represents the st(ack) as(sembler) programming language.
+ LanguageStas
// LanguageStata represents the Stata programming language.
LanguageStata
// LanguageSTON represents the LanguageSTON programming language.
@@ -1403,6 +1413,8 @@ const (
LanguageUrWeb
// LanguageUSD represents the USD programming language.
LanguageUSD
+ // LanguageUxntal represents the Uxntal programming language.
+ LanguageUxntal
// LanguageV represents the V programming language.
LanguageV
// LanguageVala represents the Vala programming language.
@@ -1429,6 +1441,8 @@ const (
LanguageVGL
// LanguageVHDL represents the VHDL programming language.
LanguageVHDL
+ // LanguageVHS represents the VHS programming language.
+ LanguageVHS
// LanguageVimHelpFile represents the VimHelpFile programming language.
LanguageVimHelpFile
// LanguageVimL represents the VimL programming language.
@@ -1439,6 +1453,8 @@ const (
LanguageVimSnippet
// LanguageVolt represents the Volt programming language.
LanguageVolt
+ // LanguageVShell represents the V shell programming language.
+ LanguageVShell
// LanguageVueJS represents the VueJS programming language.
LanguageVueJS
// LanguageWavefrontMaterial represents the WavefrontMaterial programming language.
@@ -1453,6 +1469,8 @@ const (
LanguageWDiff
// LanguageWebAssembly represents the WebAssembly programming language.
LanguageWebAssembly
+ // LanguageWebGPUShadingLanguage represents the WebGPU Shading Language programming language.
+ LanguageWebGPUShadingLanguage
// LanguageWebIDL represents the WebIDL programming language.
LanguageWebIDL
// LanguageWebOntologyLanguage represents the WebOntologyLanguage programming language.
@@ -1519,6 +1537,8 @@ const (
LanguageYARA
// LanguageYASnippet represents the YASnippet programming language.
LanguageYASnippet
+ // LanguageZ80Assembly represents the Z80 Assembly programming language.
+ LanguageZ80Assembly
// LanguageZAP represents the ZAP programming language.
LanguageZAP
// LanguageZed represents the Zed programming language.
@@ -1570,6 +1590,7 @@ const (
languageAPLStr = "APL"
languageApolloGuidanceComputerStr = "Apollo Guidance Computer"
languageAppleScriptStr = "AppleScript"
+ languageArangoDBQueryLanguageStr = "ArangoDB Query Language"
languageArcStr = "Arc"
languageArduinoStr = "Arduino"
languageArmAsmStr = "ArmAsm"
@@ -1617,6 +1638,7 @@ const (
languageBoaStr = "Boa"
languageBooStr = "Boo"
languageBoogieStr = "Boogie"
+ languageBQNStr = "BQN"
languageBrainfuckStr = "Brainfuck"
languageBrightScriptStr = "BrightScript"
languageBroStr = "Bro"
@@ -1880,6 +1902,7 @@ const (
languageIRCLogsStr = "IRC Logs"
languageIsabelleStr = "Isabelle"
languageIsabelleRootStr = "Isabelle ROOT"
+ languageISCdhcpdStr = "ISC dhcpd"
languageJStr = "J"
languageJAGSStr = "JAGS"
languageJadeStr = "Jade"
@@ -1998,6 +2021,7 @@ const (
languageMySQLStr = "MySQL"
languageNASMStr = "NASM"
languageNASMObjdumpStr = "objdump-nasm"
+ languageNaturalStr = "Natural"
languageNCLStr = "NCL"
languageNemerleStr = "Nemerle"
languageNeonStr = "Neon"
@@ -2020,7 +2044,7 @@ const (
languageObjectiveJStr = "Objective-J"
languageOCamlStr = "OCaml"
languageOctaveStr = "Octave"
- languageODINStr = "ODIN"
+ languageODINStr = "Odin"
languageOnesEnterpriseStr = "OnesEnterprise"
languageOocStr = "ooc"
languageOpaStr = "Opa"
@@ -2056,8 +2080,8 @@ const (
languageProcessingStr = "Processing"
languagePrologStr = "Prolog"
languagePromQLStr = "PromQL"
- languagePropertiesJavaStr = "Properties"
languageProtocolBufferStr = "Protocol Buffer"
+ languagePSLStr = "Property Specification Language"
languagePsyShPHPStr = "PsySH console session for PHP"
languagePugStr = "Pug"
languagePuppetStr = "Puppet"
@@ -2179,6 +2203,7 @@ const (
languageSSPStr = "Scalate Server Page"
languageStanStr = "Stan"
languageStarlarkStr = "Starlark"
+ languageStasStr = "st(ack) as(sembler)"
languageStataStr = "Stata"
languageSTONStr = "STON"
languageStylusStr = "Stylus"
@@ -2240,6 +2265,7 @@ const (
languageUrbiScriptStr = "UrbiScript"
languageUrWebStr = "UrWeb"
languageUSDStr = "USD"
+ languageUxntalStr = "Uxntal"
languageVStr = "V"
languageValaStr = "Vala"
languageVBStr = "VB"
@@ -2253,12 +2279,14 @@ const (
languageVerilogStr = "Verilog"
languageVGLStr = "VGL"
languageVHDLStr = "VHDL"
+ languageVHSStr = "VHS"
languageVimHelpFileStr = "Vim Help File"
languageVimLStr = "VimL"
languageVimScriptStr = "Vim Script"
languageVimSnippetStr = "Vim Snippet"
languageVisualBasicNet = "Visual Basic .NET"
languageVoltStr = "Volt"
+ languageVShellStr = "V shell"
languageVueJSStr = "Vue.js"
languageWavefrontMaterialStr = "Wavefront Material"
languageWavefrontObjectStr = "Wavefront Object"
@@ -2266,6 +2294,7 @@ const (
languageWDTEStr = "WDTE"
languageWDiffStr = "WDiff"
languageWebAssemblyStr = "WebAssembly"
+ languageWebGPUShadingLanguageStr = "WebGPU Shading Language"
languageWebIDLStr = "WebIDL"
languageWebOntologyLanguageStr = "Web Ontology Language"
languageWebVTTStr = "WebVTT"
@@ -2299,6 +2328,7 @@ const (
languageYANGStr = "YANG"
languageYARAStr = "YARA"
languageYASnippetStr = "YASnippet"
+ languageZ80AssemblyStr = "Z80 Assembly"
languageZAPStr = "ZAP"
languageZedStr = "Zed"
languageZeekStr = "Zeek"
@@ -2311,35 +2341,41 @@ const (
const (
languageAMPLChromaStr = "Ampl"
+ languageApacheConfChromaStr = "ApacheConf"
+ languageArangoDBAQLChromaStr = "ArangoDB AQL"
languageAssemblyChromaStr = "GAS"
- languageAutoHotkeyChromaStr = "autohotkey"
languageCObjdumpChromaStr = "c-objdump"
languageColdfusionCFCChromaStr = "Coldfusion CFC"
languageColdfusionHTMLChromaStr = "Coldfusion HTML"
languageCppObjdumpChromaStr = "cpp-objdump"
languageCUDAChromaStr = "CUDA"
+ languageDNSChromaStr = "dns"
languageFSharpChromaStr = "FSharp"
languageEmacsLispChromaStr = "EmacsLisp"
languageGoHTMLTemplateChromaStr = "Go HTML Template"
languageGoTextTemplateChromaStr = "Go Text Template"
languageHxmlChromaStr = "Hxml"
+ languageISCdhcpdChromaStr = "ISCdhcpd"
languageJSXChromaStr = "react"
languageJSONLDChromaStr = "JSON-LD"
languageLessChromaStr = "LessCss"
languageLiquidChromaStr = "liquid"
- languageMakefileChromaStr = "Base Makefile"
languageMarkdownChromaStr = "markdown"
languageNewLispChromaStr = "NewLisp"
languageNimrodChromaStr = "Nim"
- languagePython3ChromaStr = "Python 3"
+ languagePropertiesJavaChromaStr = "properties"
+ languagePSLChromaStr = "PSL"
languageOocChromaStr = "Ooc"
languageOrgChromaStr = "Org Mode"
languageRChromaStr = "R"
languageReasonMLChromaStr = "ReasonML"
languageREBOLChromaStr = "REBOL"
languageRexxChromaStr = "Rexx"
+ languageSedChromaStr = "Sed"
+ languageStasChromaStr = "stas"
languageSYSTEMDChromaStr = "SYSTEMD"
languageSystemVerilogChromaStr = "systemverilog"
+ languageTalChromaStr = "Tal"
languageTextChromaStr = "plaintext"
languageTransactSQLChromaStr = "Transact-SQL"
languageTypoScriptHTMLDataChromaStr = "TypoScriptHtmlData"
@@ -2417,6 +2453,8 @@ func ParseLanguage(s string) (Language, bool) {
return LanguageApolloGuidanceComputer, true
case normalizeString(languageAppleScriptStr):
return LanguageAppleScript, true
+ case normalizeString(languageArangoDBQueryLanguageStr):
+ return LanguageArangoDBQueryLanguage, true
case normalizeString(languageArcStr):
return LanguageArc, true
case normalizeString(languageArduinoStr):
@@ -2511,6 +2549,8 @@ func ParseLanguage(s string) (Language, bool) {
return LanguageBoo, true
case normalizeString(languageBoogieStr):
return LanguageBoogie, true
+ case normalizeString(languageBQNStr):
+ return LanguageBQN, true
case normalizeString(languageBrainfuckStr):
return LanguageBrainfuck, true
case normalizeString(languageBrightScriptStr):
@@ -3037,6 +3077,8 @@ func ParseLanguage(s string) (Language, bool) {
return LanguageIsabelle, true
case normalizeString(languageIsabelleRootStr):
return LanguageIsabelleRoot, true
+ case normalizeString(languageISCdhcpdStr):
+ return LanguageISCdhcpd, true
case normalizeString(languageJStr):
return LanguageJ, true
case normalizeString(languageJAGSStr):
@@ -3271,6 +3313,8 @@ func ParseLanguage(s string) (Language, bool) {
return LanguageMustache, true
case normalizeString(languageNASMStr):
return LanguageNASM, true
+ case normalizeString(languageNaturalStr):
+ return LanguageNatural, true
case normalizeString(languageNCLStr):
return LanguageNCL, true
case normalizeString(languageNemerleStr):
@@ -3389,10 +3433,10 @@ func ParseLanguage(s string) (Language, bool) {
return LanguageProlog, true
case normalizeString(languagePromQLStr):
return LanguagePromQL, true
- case normalizeString(languagePropertiesJavaStr):
- return LanguagePropertiesJava, true
case normalizeString(languageProtocolBufferStr):
return LanguageProtocolBuffer, true
+ case normalizeString(languagePSLStr):
+ return LanguagePSL, true
case normalizeString(languagePsyShPHPStr):
return LanguagePsyShPHP, true
case normalizeString(languagePugStr):
@@ -3635,6 +3679,8 @@ func ParseLanguage(s string) (Language, bool) {
return LanguageStan, true
case normalizeString(languageStarlarkStr):
return LanguageStarlark, true
+ case normalizeString(languageStasStr):
+ return LanguageStas, true
case normalizeString(languageStataStr):
return LanguageStata, true
case normalizeString(languageSTONStr):
@@ -3757,6 +3803,8 @@ func ParseLanguage(s string) (Language, bool) {
return LanguageUrWeb, true
case normalizeString(languageUSDStr):
return LanguageUSD, true
+ case normalizeString(languageUxntalStr):
+ return LanguageUxntal, true
case normalizeString(languageVStr):
return LanguageV, true
case normalizeString(languageValaStr):
@@ -3783,6 +3831,8 @@ func ParseLanguage(s string) (Language, bool) {
return LanguageVGL, true
case normalizeString(languageVHDLStr):
return LanguageVHDL, true
+ case normalizeString(languageVHSStr):
+ return LanguageVHS, true
case normalizeString(languageVimHelpFileStr):
return LanguageVimHelpFile, true
case normalizeString(languageVimLStr):
@@ -3795,6 +3845,8 @@ func ParseLanguage(s string) (Language, bool) {
return LanguageVBNet, true
case normalizeString(languageVoltStr):
return LanguageVolt, true
+ case normalizeString(languageVShellStr):
+ return LanguageVShell, true
case normalizeString(languageVueJSStr):
return LanguageVueJS, true
case normalizeString(languageWavefrontMaterialStr):
@@ -3809,6 +3861,8 @@ func ParseLanguage(s string) (Language, bool) {
return LanguageWDiff, true
case normalizeString(languageWebAssemblyStr):
return LanguageWebAssembly, true
+ case normalizeString(languageWebGPUShadingLanguageStr):
+ return LanguageWebGPUShadingLanguage, true
case normalizeString(languageWebIDLStr):
return LanguageWebIDL, true
case normalizeString(languageWebOntologyLanguageStr):
@@ -3877,6 +3931,8 @@ func ParseLanguage(s string) (Language, bool) {
return LanguageYASnippet, true
case normalizeString(languageZAPStr):
return LanguageZAP, true
+ case normalizeString(languageZ80AssemblyStr):
+ return LanguageZ80Assembly, true
case normalizeString(languageZedStr):
return LanguageZed, true
case normalizeString(languageZeekStr):
@@ -3903,10 +3959,12 @@ func ParseLanguageFromChroma(lexerName string) (Language, bool) {
switch normalizeString(lexerName) {
case normalizeString(languageAMPLChromaStr):
return LanguageAMPL, true
+ case normalizeString(languageApacheConfChromaStr):
+ return LanguageApacheConfig, true
+ case normalizeString(languageArangoDBAQLChromaStr):
+ return LanguageArangoDBQueryLanguage, true
case normalizeString(languageAssemblyChromaStr):
return LanguageAssembly, true
- case normalizeString(languageAutoHotkeyChromaStr):
- return LanguageAutoHotkey, true
case normalizeString(languageCObjdumpChromaStr):
return LanguageCObjdump, true
case normalizeString(languageColdfusionCFCChromaStr):
@@ -3917,6 +3975,8 @@ func ParseLanguageFromChroma(lexerName string) (Language, bool) {
return LanguageCppObjdump, true
case normalizeString(languageCUDAChromaStr):
return LanguageCUDA, true
+ case normalizeString(languageDNSChromaStr):
+ return LanguageDNSZone, true
case normalizeString(languageEmacsLispChromaStr):
return LanguageEmacsLisp, true
case normalizeString(languageGoHTMLTemplateChromaStr):
@@ -3927,14 +3987,14 @@ func ParseLanguageFromChroma(lexerName string) (Language, bool) {
return LanguageFSharp, true
case normalizeString(languageHxmlChromaStr):
return LanguageHxml, true
+ case normalizeString(languageISCdhcpdChromaStr):
+ return LanguageISCdhcpd, true
case normalizeString(languageJSXChromaStr):
return LanguageJSX, true
case normalizeString(languageLessChromaStr):
return LanguageLess, true
case normalizeString(languageLiquidChromaStr):
return LanguageLiquid, true
- case normalizeString(languageMakefileChromaStr):
- return LanguageMakefile, true
case normalizeString(languageMarkdownChromaStr):
return LanguageMarkdown, true
case normalizeString(languageNewLispChromaStr):
@@ -3945,18 +4005,26 @@ func ParseLanguageFromChroma(lexerName string) (Language, bool) {
return LanguageOoc, true
case normalizeString(languageOrgChromaStr):
return LanguageOrg, true
- case normalizeString(languagePython3ChromaStr):
- return LanguagePython, true
+ case normalizeString(languagePropertiesJavaChromaStr):
+ return LanguageJavaProperties, true
+ case normalizeString(languagePSLChromaStr):
+ return LanguagePSL, true
case normalizeString(languageRChromaStr):
return LanguageS, true
- case normalizeString(languageRexxChromaStr):
- return LanguageRexx, true
case normalizeString(languageReasonMLChromaStr):
return LanguageReasonML, true
case normalizeString(languageREBOLChromaStr):
return LanguageREBOL, true
+ case normalizeString(languageRexxChromaStr):
+ return LanguageRexx, true
+ case normalizeString(languageSedChromaStr):
+ return LanguageSed, true
+ case normalizeString(languageStasChromaStr):
+ return LanguageStas, true
case normalizeString(languageSystemVerilogChromaStr):
return LanguageSystemVerilog, true
+ case normalizeString(languageTalChromaStr):
+ return LanguageUxntal, true
case normalizeString(languageTextChromaStr):
return LanguageText, true
case normalizeString(languageTransactSQLChromaStr):
@@ -4067,6 +4135,8 @@ func (l Language) String() string {
return languageApolloGuidanceComputerStr
case LanguageAppleScript:
return languageAppleScriptStr
+ case LanguageArangoDBQueryLanguage:
+ return languageArangoDBQueryLanguageStr
case LanguageArc:
return languageArcStr
case LanguageArduino:
@@ -4157,6 +4227,8 @@ func (l Language) String() string {
return languageBooStr
case LanguageBoogie:
return languageBoogieStr
+ case LanguageBQN:
+ return languageBQNStr
case LanguageBrainfuck:
return languageBrainfuckStr
case LanguageBrightScript:
@@ -4679,6 +4751,8 @@ func (l Language) String() string {
return languageIsabelleStr
case LanguageIsabelleRoot:
return languageIsabelleRootStr
+ case LanguageISCdhcpd:
+ return languageISCdhcpdStr
case LanguageJ:
return languageJStr
case LanguageJAGS:
@@ -4913,6 +4987,8 @@ func (l Language) String() string {
return languageMustacheStr
case LanguageNASM:
return languageNASMStr
+ case LanguageNatural:
+ return languageNaturalStr
case LanguageNCL:
return languageNCLStr
case LanguageNemerle:
@@ -5019,6 +5095,8 @@ func (l Language) String() string {
return languagePOVRayStr
case LanguagePowerQuery:
return languagePowerQueryStr
+ case LanguagePSL:
+ return languagePSLStr
case LanguagePowerShell:
return languagePowerShellStr
case LanguagePowerShellSession:
@@ -5031,8 +5109,6 @@ func (l Language) String() string {
return languagePrologStr
case LanguagePromQL:
return languagePromQLStr
- case LanguagePropertiesJava:
- return languagePropertiesJavaStr
case LanguageProtocolBuffer:
return languageProtocolBufferStr
case LanguagePsyShPHP:
@@ -5275,6 +5351,8 @@ func (l Language) String() string {
return languageStanStr
case LanguageStarlark:
return languageStarlarkStr
+ case LanguageStas:
+ return languageStasStr
case LanguageStata:
return languageStataStr
case LanguageSTON:
@@ -5395,6 +5473,8 @@ func (l Language) String() string {
return languageUrWebStr
case LanguageUSD:
return languageUSDStr
+ case LanguageUxntal:
+ return languageUxntalStr
case LanguageV:
return languageVStr
case LanguageVala:
@@ -5421,6 +5501,8 @@ func (l Language) String() string {
return languageVerilogStr
case LanguageVHDL:
return languageVHDLStr
+ case LanguageVHS:
+ return languageVHSStr
case LanguageVimHelpFile:
return languageVimHelpFileStr
case LanguageVimL:
@@ -5431,6 +5513,8 @@ func (l Language) String() string {
return languageVimSnippetStr
case LanguageVolt:
return languageVoltStr
+ case LanguageVShell:
+ return languageVShellStr
case LanguageVueJS:
return languageVueJSStr
case LanguageWavefrontMaterial:
@@ -5445,6 +5529,8 @@ func (l Language) String() string {
return languageWDiffStr
case LanguageWebAssembly:
return languageWebAssemblyStr
+ case LanguageWebGPUShadingLanguage:
+ return languageWebGPUShadingLanguageStr
case LanguageWebIDL:
return languageWebIDLStr
case LanguageWebOntologyLanguage:
@@ -5513,6 +5599,8 @@ func (l Language) String() string {
return languageYASnippetStr
case LanguageZAP:
return languageZAPStr
+ case LanguageZ80Assembly:
+ return languageZ80AssemblyStr
case LanguageZed:
return languageZedStr
case LanguageZeek:
@@ -5540,11 +5628,11 @@ func (l Language) StringChroma() string {
case LanguageAMPL:
return languageAMPLChromaStr
case LanguageApacheConfig:
- return languageApacheConfStr
+ return languageApacheConfChromaStr
+ case LanguageArangoDBQueryLanguage:
+ return languageArangoDBAQLChromaStr
case LanguageAssembly:
return languageAssemblyChromaStr
- case LanguageAutoHotkey:
- return languageAutoHotkeyChromaStr
case LanguageCObjdump:
return languageCObjdumpChromaStr
case LanguageColdfusionCFC:
@@ -5555,6 +5643,8 @@ func (l Language) StringChroma() string {
return languageCppObjdumpChromaStr
case LanguageCUDA:
return languageCUDAChromaStr
+ case LanguageDNSZone:
+ return languageDNSChromaStr
case LanguageEmacsLisp:
return languageEmacsLispChromaStr
case LanguageFSharp:
@@ -5563,6 +5653,10 @@ func (l Language) StringChroma() string {
return languageFStarLiteralStr
case LanguageHxml:
return languageHxmlChromaStr
+ case LanguageISCdhcpd:
+ return languageISCdhcpdChromaStr
+ case LanguageJavaProperties:
+ return languagePropertiesJavaChromaStr
case LanguageJSONLD:
return languageJSONLDChromaStr
case LanguageJSX:
@@ -5571,8 +5665,6 @@ func (l Language) StringChroma() string {
return languageLessChromaStr
case LanguageLiquid:
return languageLiquidChromaStr
- case LanguageMakefile:
- return languageMakefileChromaStr
case LanguageMarkdown:
return languageMarkdownChromaStr
case LanguageNewLisp:
@@ -5583,6 +5675,8 @@ func (l Language) StringChroma() string {
return languageOocChromaStr
case LanguageOrg:
return languageOrgChromaStr
+ case LanguagePSL:
+ return languagePSLChromaStr
case LanguageReasonML:
return languageReasonMLChromaStr
case LanguageREBOL:
@@ -5591,10 +5685,16 @@ func (l Language) StringChroma() string {
return languageRexxChromaStr
case LanguageS:
return languageRChromaStr
+ case LanguageSed:
+ return languageSedChromaStr
+ case LanguageStas:
+ return languageStasChromaStr
case LanguageSYSTEMD:
return languageSYSTEMDChromaStr
case LanguageSystemVerilog:
return languageSystemVerilogChromaStr
+ case LanguageUxntal:
+ return languageTalChromaStr
case LanguageText:
return languageTextChromaStr
case LanguageTransactSQL:
diff --git a/pkg/heartbeat/language_test.go b/pkg/heartbeat/language_test.go
index 1aab236d..2e6b5ab8 100644
--- a/pkg/heartbeat/language_test.go
+++ b/pkg/heartbeat/language_test.go
@@ -7,7 +7,7 @@ import (
"github.com/wakatime/wakatime-cli/pkg/heartbeat"
- "github.com/alecthomas/chroma/lexers"
+ "github.com/alecthomas/chroma/v2/lexers"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
@@ -44,6 +44,7 @@ func languageTests() map[string]heartbeat.Language {
"Apex": heartbeat.LanguageApex,
"API Blueprint": heartbeat.LanguageAPIBlueprint,
"Apollo Guidance Computer": heartbeat.LanguageApolloGuidanceComputer,
+ "ArangoDB Query Language": heartbeat.LanguageArangoDBQueryLanguage,
"Arc": heartbeat.LanguageArc,
"Arduino": heartbeat.LanguageArduino,
"ArmAsm": heartbeat.LanguageArmAsm,
@@ -89,6 +90,7 @@ func languageTests() map[string]heartbeat.Language {
"Boa": heartbeat.LanguageBoa,
"Boo": heartbeat.LanguageBoo,
"Boogie": heartbeat.LanguageBoogie,
+ "BQN": heartbeat.LanguageBQN,
"Brainfuck": heartbeat.LanguageBrainfuck,
"BrightScript": heartbeat.LanguageBrightScript,
"Browserslist": heartbeat.LanguageBrowserslist,
@@ -351,6 +353,7 @@ func languageTests() map[string]heartbeat.Language {
"IRC Logs": heartbeat.LanguageIRCLogs,
"Isabelle": heartbeat.LanguageIsabelle,
"Isabelle ROOT": heartbeat.LanguageIsabelleRoot,
+ "ISC dhcpd": heartbeat.LanguageISCdhcpd,
"J": heartbeat.LanguageJ,
"Jade": heartbeat.LanguageJade,
"JAGS": heartbeat.LanguageJAGS,
@@ -468,6 +471,7 @@ func languageTests() map[string]heartbeat.Language {
"Myghty": heartbeat.LanguageMyghty,
"MySQL": heartbeat.LanguageMySQL,
"NASM": heartbeat.LanguageNASM,
+ "Natural": heartbeat.LanguageNatural,
"NCL": heartbeat.LanguageNCL,
"Nemerle": heartbeat.LanguageNemerle,
"Neon": heartbeat.LanguageNeon,
@@ -491,7 +495,7 @@ func languageTests() map[string]heartbeat.Language {
"Objective-J": heartbeat.LanguageObjectiveJ,
"OCaml": heartbeat.LanguageOCaml,
"Octave": heartbeat.LanguageOctave,
- "ODIN": heartbeat.LanguageODIN,
+ "Odin": heartbeat.LanguageODIN,
"OnesEnterprise": heartbeat.LanguageOnesEnterprise,
"ooc": heartbeat.LanguageOoc,
"Opa": heartbeat.LanguageOpa,
@@ -527,7 +531,7 @@ func languageTests() map[string]heartbeat.Language {
"Processing": heartbeat.LanguageProcessing,
"Prolog": heartbeat.LanguageProlog,
"PromQL": heartbeat.LanguagePromQL,
- "Properties": heartbeat.LanguagePropertiesJava,
+ "Property Specification Language": heartbeat.LanguagePSL,
"Protocol Buffer": heartbeat.LanguageProtocolBuffer,
"PsySH console session for PHP": heartbeat.LanguagePsyShPHP,
"Pug": heartbeat.LanguagePug,
@@ -640,6 +644,7 @@ func languageTests() map[string]heartbeat.Language {
"Squirrel": heartbeat.LanguageSquirrel,
"SRecode Template": heartbeat.LanguageSRecodeTemplate,
"SSH Config": heartbeat.LanguageSSHConfig,
+ "st(ack) as(sembler)": heartbeat.LanguageStas,
"Stan": heartbeat.LanguageStan,
"Starlark": heartbeat.LanguageStarlark,
"Stata": heartbeat.LanguageStata,
@@ -703,6 +708,7 @@ func languageTests() map[string]heartbeat.Language {
"UrbiScript": heartbeat.LanguageUrbiScript,
"UrWeb": heartbeat.LanguageUrWeb,
"USD": heartbeat.LanguageUSD,
+ "Uxntal": heartbeat.LanguageUxntal,
"V": heartbeat.LanguageV,
"Vala": heartbeat.LanguageVala,
"VB": heartbeat.LanguageVB,
@@ -721,6 +727,7 @@ func languageTests() map[string]heartbeat.Language {
"Vim Script": heartbeat.LanguageVimScript,
"Vim Snippet": heartbeat.LanguageVimSnippet,
"Volt": heartbeat.LanguageVolt,
+ "V shell": heartbeat.LanguageVShell,
"Vue.js": heartbeat.LanguageVueJS,
"Wavefront Material": heartbeat.LanguageWavefrontMaterial,
"Wavefront Object": heartbeat.LanguageWavefrontObject,
@@ -728,6 +735,7 @@ func languageTests() map[string]heartbeat.Language {
"WDTE": heartbeat.LanguageWDTE,
"WDiff": heartbeat.LanguageWDiff,
"WebAssembly": heartbeat.LanguageWebAssembly,
+ "WebGPU Shading Language": heartbeat.LanguageWebGPUShadingLanguage,
"WebIDL": heartbeat.LanguageWebIDL,
"Web Ontology Language": heartbeat.LanguageWebOntologyLanguage,
"WebVTT": heartbeat.LanguageWebVTT,
@@ -761,6 +769,7 @@ func languageTests() map[string]heartbeat.Language {
"YANG": heartbeat.LanguageYANG,
"YARA": heartbeat.LanguageYARA,
"YASnippet": heartbeat.LanguageYASnippet,
+ "Z80 Assembly": heartbeat.LanguageZ80Assembly,
"ZAP": heartbeat.LanguageZAP,
"Zed": heartbeat.LanguageZed,
"Zeek": heartbeat.LanguageZeek,
@@ -777,6 +786,7 @@ func languageTestsAliases() map[string]heartbeat.Language {
"ApacheConf": heartbeat.LanguageApacheConfig,
"ASP Classic": heartbeat.LanguageClassicASP,
"Batch Script": heartbeat.LanguageBatchfile,
+ "csharp": heartbeat.LanguageCSharp,
"FStar": heartbeat.LanguageFStar,
"Golang": heartbeat.LanguageGo,
"JSON-LD": heartbeat.LanguageJSONLD,
@@ -867,20 +877,22 @@ func TestParseLanguage_Unknown(t *testing.T) {
func TestParseLanguageFromChroma(t *testing.T) {
tests := map[string]heartbeat.Language{
"Ampl": heartbeat.LanguageAMPL,
- "autohotkey": heartbeat.LanguageAutoHotkey,
- "Base Makefile": heartbeat.LanguageMakefile,
+ "ApacheConf": heartbeat.LanguageApacheConfig,
+ "ArangoDB AQL": heartbeat.LanguageArangoDBQueryLanguage,
"c-objdump": heartbeat.LanguageCObjdump,
"Coldfusion CFC": heartbeat.LanguageColdfusionCFC,
"Coldfusion HTML": heartbeat.LanguageColdfusionHTML,
"cpp-objdump": heartbeat.LanguageCppObjdump,
"CUDA": heartbeat.LanguageCUDA,
+ "dns": heartbeat.LanguageDNSZone,
"EmacsLisp": heartbeat.LanguageEmacsLisp,
- "Go HTML Template": heartbeat.LanguageGo,
- "Go Text Template": heartbeat.LanguageGo,
"FSharp": heartbeat.LanguageFSharp,
"GAS": heartbeat.LanguageAssembly,
+ "Go HTML Template": heartbeat.LanguageGo,
+ "Go Text Template": heartbeat.LanguageGo,
"Hxml": heartbeat.LanguageHxml,
"JSON-LD": heartbeat.LanguageJSONLD,
+ "ISCdhcpd": heartbeat.LanguageISCdhcpd,
"LessCss": heartbeat.LanguageLess,
"liquid": heartbeat.LanguageLiquid,
"markdown": heartbeat.LanguageMarkdown,
@@ -889,14 +901,18 @@ func TestParseLanguageFromChroma(t *testing.T) {
"Ooc": heartbeat.LanguageOoc,
"Org Mode": heartbeat.LanguageOrg,
"plaintext": heartbeat.LanguageText,
- "Python 3": heartbeat.LanguagePython,
+ "properties": heartbeat.LanguageJavaProperties,
+ "PSL": heartbeat.LanguagePSL,
"R": heartbeat.LanguageS,
"react": heartbeat.LanguageJSX,
"ReasonML": heartbeat.LanguageReasonML,
"REBOL": heartbeat.LanguageREBOL,
"Rexx": heartbeat.LanguageRexx,
- "SWIG": heartbeat.LanguageSWIG,
+ "Sed": heartbeat.LanguageSed,
+ "stas": heartbeat.LanguageStas,
+ "SYSTEMD": heartbeat.LanguageSYSTEMD,
"systemverilog": heartbeat.LanguageSystemVerilog,
+ "Tal": heartbeat.LanguageUxntal,
"Transact-SQL": heartbeat.LanguageTransactSQL,
"TypoScriptCssData": heartbeat.LanguageTypoScript,
"TypoScriptHtmlData": heartbeat.LanguageTypoScript,
@@ -934,7 +950,7 @@ func TestParseLanguageFromChroma_Unknown(t *testing.T) {
}
func TestParseLanguageFromChroma_AllLexersSupported(t *testing.T) {
- for _, lexer := range lexers.Registry.Lexers {
+ for _, lexer := range lexers.GlobalLexerRegistry.Lexers {
config := lexer.Config()
parsed, ok := heartbeat.ParseLanguageFromChroma(config.Name)
@@ -990,36 +1006,44 @@ func TestLanguage_String_UnknownLanguage(t *testing.T) {
func TestLanguage_StringChroma(t *testing.T) {
tests := map[string]heartbeat.Language{
- "ApacheConf": heartbeat.LanguageApacheConfig,
"Ampl": heartbeat.LanguageAMPL,
- "autohotkey": heartbeat.LanguageAutoHotkey,
- "Base Makefile": heartbeat.LanguageMakefile,
+ "ApacheConf": heartbeat.LanguageApacheConfig,
+ "ArangoDB AQL": heartbeat.LanguageArangoDBQueryLanguage,
"c-objdump": heartbeat.LanguageCObjdump,
"Coldfusion CFC": heartbeat.LanguageColdfusionCFC,
"Coldfusion HTML": heartbeat.LanguageColdfusionHTML,
"cpp-objdump": heartbeat.LanguageCppObjdump,
"CUDA": heartbeat.LanguageCUDA,
+ "dns": heartbeat.LanguageDNSZone,
"EmacsLisp": heartbeat.LanguageEmacsLisp,
"GAS": heartbeat.LanguageAssembly,
"FSharp": heartbeat.LanguageFSharp,
"FStar": heartbeat.LanguageFStar,
"Go": heartbeat.LanguageGo,
"Hxml": heartbeat.LanguageHxml,
+ "ISCdhcpd": heartbeat.LanguageISCdhcpd,
"JSON-LD": heartbeat.LanguageJSONLD,
"LessCss": heartbeat.LanguageLess,
"liquid": heartbeat.LanguageLiquid,
"markdown": heartbeat.LanguageMarkdown,
+ "NewLisp": heartbeat.LanguageNewLisp,
"Nim": heartbeat.LanguageNimrod,
"Ooc": heartbeat.LanguageOoc,
"Org Mode": heartbeat.LanguageOrg,
"plaintext": heartbeat.LanguageText,
+ "properties": heartbeat.LanguageJavaProperties,
+ "PSL": heartbeat.LanguagePSL,
"R": heartbeat.LanguageS,
"react": heartbeat.LanguageJSX,
"ReasonML": heartbeat.LanguageReasonML,
"REBOL": heartbeat.LanguageREBOL,
"Rexx": heartbeat.LanguageRexx,
- "SWIG": heartbeat.LanguageSWIG,
+ "Sed": heartbeat.LanguageSed,
+ "stas": heartbeat.LanguageStas,
+ "SYSTEMD": heartbeat.LanguageSYSTEMD,
"systemverilog": heartbeat.LanguageSystemVerilog,
+ "Tal": heartbeat.LanguageUxntal,
+ "Transact-SQL": heartbeat.LanguageTransactSQL,
"VB.net": heartbeat.LanguageVBNet,
"verilog": heartbeat.LanguageVerilog,
"vue": heartbeat.LanguageVueJS,
@@ -1034,15 +1058,13 @@ func TestLanguage_StringChroma(t *testing.T) {
}
func TestLanguage_StringChroma_AllLexersSupported(t *testing.T) {
- for _, lexer := range lexers.Registry.Lexers {
+ for _, lexer := range lexers.GlobalLexerRegistry.Lexers {
config := lexer.Config()
// Aliases, which match in addition to standard spelling of languages are ignored here.
switch config.Name {
case "Go HTML Template", "Go Text Template":
continue
- case "Python 3":
- continue
case "TypoScriptCssData", "TypoScriptHtmlData":
continue
}
diff --git a/pkg/language/chroma.go b/pkg/language/chroma.go
index 225ebe4f..91659b14 100644
--- a/pkg/language/chroma.go
+++ b/pkg/language/chroma.go
@@ -5,41 +5,14 @@ import (
"io"
"os"
fp "path/filepath"
- "runtime/debug"
"sort"
"strings"
"github.com/wakatime/wakatime-cli/pkg/heartbeat"
"github.com/wakatime/wakatime-cli/pkg/log"
- "github.com/alecthomas/chroma"
- "github.com/alecthomas/chroma/lexers"
- _ "github.com/alecthomas/chroma/lexers/a" // not used directly
- _ "github.com/alecthomas/chroma/lexers/b" // not used directly
- _ "github.com/alecthomas/chroma/lexers/c" // not used directly
- _ "github.com/alecthomas/chroma/lexers/circular" // not used directly
- _ "github.com/alecthomas/chroma/lexers/d" // not used directly
- _ "github.com/alecthomas/chroma/lexers/e" // not used directly
- _ "github.com/alecthomas/chroma/lexers/f" // not used directly
- _ "github.com/alecthomas/chroma/lexers/g" // not used directly
- _ "github.com/alecthomas/chroma/lexers/h" // not used directly
- _ "github.com/alecthomas/chroma/lexers/i" // not used directly
- _ "github.com/alecthomas/chroma/lexers/j" // not used directly
- _ "github.com/alecthomas/chroma/lexers/k" // not used directly
- _ "github.com/alecthomas/chroma/lexers/l" // not used directly
- _ "github.com/alecthomas/chroma/lexers/m" // not used directly
- _ "github.com/alecthomas/chroma/lexers/n" // not used directly
- _ "github.com/alecthomas/chroma/lexers/o" // not used directly
- _ "github.com/alecthomas/chroma/lexers/p" // not used directly
- _ "github.com/alecthomas/chroma/lexers/q" // not used directly
- _ "github.com/alecthomas/chroma/lexers/r" // not used directly
- _ "github.com/alecthomas/chroma/lexers/s" // not used directly
- _ "github.com/alecthomas/chroma/lexers/t" // not used directly
- _ "github.com/alecthomas/chroma/lexers/v" // not used directly
- _ "github.com/alecthomas/chroma/lexers/w" // not used directly
- _ "github.com/alecthomas/chroma/lexers/x" // not used directly
- _ "github.com/alecthomas/chroma/lexers/y" // not used directly
- _ "github.com/alecthomas/chroma/lexers/z" // not used directly
+ "github.com/alecthomas/chroma/v2"
+ "github.com/alecthomas/chroma/v2/lexers"
"github.com/danwakefield/fnmatch"
)
@@ -55,7 +28,7 @@ func detectChromaCustomized(filepath string) (heartbeat.Language, float32, bool)
matched := chroma.PrioritisedLexers{}
// First, try primary filename matches.
- for _, lexer := range lexers.Registry.Lexers {
+ for _, lexer := range lexers.GlobalLexerRegistry.Lexers {
config := lexer.Config()
for _, glob := range config.Filenames {
if fnmatch.Match(glob, filename, 0) || fnmatch.Match(glob, strings.ToLower(filename), 0) {
@@ -77,7 +50,7 @@ func detectChromaCustomized(filepath string) (heartbeat.Language, float32, bool)
}
// Next, try filename aliases.
- for _, lexer := range lexers.Registry.Lexers {
+ for _, lexer := range lexers.GlobalLexerRegistry.Lexers {
config := lexer.Config()
for _, glob := range config.AliasFilenames {
if fnmatch.Match(glob, filename, 0) {
@@ -109,7 +82,7 @@ func detectChromaCustomized(filepath string) (heartbeat.Language, float32, bool)
return heartbeat.LanguageUnknown, 0, false
}
- if lexer := analyse(string(head)); lexer != nil {
+ if lexer := lexers.Analyse(string(head)); lexer != nil {
language, ok := heartbeat.ParseLanguageFromChroma(lexer.Config().Name)
if !ok {
log.Warnf("failed to parse language from chroma lexer name %q", lexer.Config().Name)
@@ -122,32 +95,6 @@ func detectChromaCustomized(filepath string) (heartbeat.Language, float32, bool)
return heartbeat.LanguageUnknown, 0, false
}
-// analyse text content and return the "best" lexer.
-// This is a copy of chroma.lexers.internal.api:Analyse().
-func analyse(text string) chroma.Lexer {
- defer func() {
- if err := recover(); err != nil {
- log.Errorf("panicked: %v. Stack: %s", err, string(debug.Stack()))
- }
- }()
-
- var picked chroma.Lexer
-
- highest := float32(0.0)
-
- for _, lexer := range lexers.Registry.Lexers {
- if analyser, ok := lexer.(chroma.Analyser); ok {
- weight := analyser.AnalyseText(text)
- if weight > highest {
- picked = lexer
- highest = weight
- }
- }
- }
-
- return picked
-}
-
// weightedLexer is a lexer with priority and weight.
type weightedLexer struct {
chroma.Lexer
@@ -174,13 +121,11 @@ func selectByCustomizedPriority(filepath string, lexers chroma.PrioritisedLexers
extensions, err := loadFolderExtensions(dir)
if err != nil {
log.Warnf("failed to load folder files extensions: %s", err)
- return lexers[0], 0
}
head, err := fileHead(filepath)
if err != nil {
log.Warnf("failed to load head from file %q: %s", filepath, err)
- return lexers[0], 0
}
var weighted []weightedLexer
diff --git a/pkg/language/language_test.go b/pkg/language/language_test.go
index f003a599..f964d734 100644
--- a/pkg/language/language_test.go
+++ b/pkg/language/language_test.go
@@ -6,6 +6,7 @@ import (
"github.com/wakatime/wakatime-cli/pkg/heartbeat"
"github.com/wakatime/wakatime-cli/pkg/language"
+ "github.com/wakatime/wakatime-cli/pkg/lexer"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
@@ -256,6 +257,9 @@ func TestDetect_FSharp_Over_Forth(t *testing.T) {
}
func TestDetect_ChromaTopLanguagesRetrofit(t *testing.T) {
+ err := lexer.RegisterAll()
+ require.NoError(t, err)
+
tests := map[string]struct {
Filepaths []string
Expected heartbeat.Language
@@ -314,14 +318,13 @@ func TestDetect_ChromaTopLanguagesRetrofit(t *testing.T) {
Expected: heartbeat.LanguageC,
},
"c++": {
- Filepaths: []string{"path/to/file.cpp"},
- Expected: heartbeat.LanguageCPP,
- },
- "c++ 2": {
- Filepaths: []string{"path/to/file.cxx"},
- Expected: heartbeat.LanguageCPP,
+ Filepaths: []string{
+ "path/to/file.cpp",
+ "path/to/file.cxx",
+ },
+ Expected: heartbeat.LanguageCPP,
},
- "c sharp": {
+ "c#": {
Filepaths: []string{"path/to/file.cs"},
Expected: heartbeat.LanguageCSharp,
},
diff --git a/pkg/language/priority.go b/pkg/language/priority.go
index 6d593dd3..112c1f5c 100644
--- a/pkg/language/priority.go
+++ b/pkg/language/priority.go
@@ -3,8 +3,16 @@ package language
func priority(lang string) (float32, bool) {
prios := map[string]float32{
"FSharp": 0.01,
- "Perl": 0.01,
- // Higher priority than the TypoScriptLexer, as TypeScript is far more
+ // Higher priority than the ca 65 assembler and ArmAsm
+ "GAS": 0.1,
+ // Higher priority than the ca Inform 6
+ "INI": 0.1,
+ // TASM uses the same file endings, but TASM is not as common as NASM, so we prioritize NASM higher by default.
+ "NASM": 0.1,
+ "Perl": 0.01,
+ // Higher priority than Rebol
+ "R": 0.1,
+ // Higher priority than the TypoScript, as TypeScript is far more
// common these days
"TypeScript": 0.5,
}
diff --git a/pkg/language/vim.go b/pkg/language/vim.go
index 6aac88c0..2709551f 100644
--- a/pkg/language/vim.go
+++ b/pkg/language/vim.go
@@ -6,8 +6,8 @@ import (
"github.com/wakatime/wakatime-cli/pkg/heartbeat"
- "github.com/alecthomas/chroma"
- "github.com/alecthomas/chroma/lexers"
+ "github.com/alecthomas/chroma/v2"
+ "github.com/alecthomas/chroma/v2/lexers"
)
var modelineRegex = regexp.MustCompile(`(?m)(?:vi|vim|ex)(?:[<=>]?\d*)?:.*(?:ft|filetype|syn|syntax)=([^:\s]+)`)
diff --git a/pkg/lexer/actionscript3.go b/pkg/lexer/actionscript3.go
new file mode 100644
index 00000000..7e0a41ea
--- /dev/null
+++ b/pkg/lexer/actionscript3.go
@@ -0,0 +1,43 @@
+package lexer
+
+import (
+ "regexp"
+
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+ "github.com/alecthomas/chroma/v2/lexers"
+)
+
+// nolint:gochecknoglobals
+var actionscript3AnalyserRe = regexp.MustCompile(`\w+\s*:\s*\w`)
+
+// ActionScript3 lexer.
+type ActionScript3 struct{}
+
+// Lexer returns the lexer.
+func (l ActionScript3) Lexer() chroma.Lexer {
+ lexer := lexers.Get(l.Name())
+ if lexer == nil {
+ return nil
+ }
+
+ if lexer, ok := lexer.(*chroma.RegexLexer); ok {
+ lexer.SetAnalyser(func(text string) float32 {
+ if actionscript3AnalyserRe.MatchString(text) {
+ return 0.3
+ }
+
+ return 0
+ })
+
+ return lexer
+ }
+
+ return nil
+}
+
+// Name returns the name of the lexer.
+func (ActionScript3) Name() string {
+ return heartbeat.LanguageActionScript.StringChroma()
+}
diff --git a/pkg/lexer/actionscript3_test.go b/pkg/lexer/actionscript3_test.go
new file mode 100644
index 00000000..50d676c4
--- /dev/null
+++ b/pkg/lexer/actionscript3_test.go
@@ -0,0 +1,41 @@
+package lexer_test
+
+import (
+ "os"
+ "testing"
+
+ "github.com/wakatime/wakatime-cli/pkg/lexer"
+
+ "github.com/stretchr/testify/assert"
+)
+
+func TestActionScript3_AnalyseText(t *testing.T) {
+ tests := map[string]struct {
+ Filepath string
+ Expected float32
+ }{
+ "basic": {
+ Filepath: "testdata/actionscript3.as",
+ Expected: 0.3,
+ },
+ "capital letters": {
+ Filepath: "testdata/actionscript3_capital_letter.as",
+ Expected: 0.3,
+ },
+ "spaces": {
+ Filepath: "testdata/actionscript3_spaces.as",
+ Expected: 0.3,
+ },
+ }
+
+ for name, test := range tests {
+ t.Run(name, func(t *testing.T) {
+ data, err := os.ReadFile(test.Filepath)
+ assert.NoError(t, err)
+
+ l := lexer.ActionScript3{}.Lexer()
+
+ assert.Equal(t, test.Expected, l.AnalyseText(string(data)))
+ })
+ }
+}
diff --git a/pkg/lexer/adl.go b/pkg/lexer/adl.go
new file mode 100644
index 00000000..18c485f6
--- /dev/null
+++ b/pkg/lexer/adl.go
@@ -0,0 +1,31 @@
+package lexer
+
+import (
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+)
+
+// ADL lexer.
+type ADL struct{}
+
+// Lexer returns the lexer.
+func (l ADL) Lexer() chroma.Lexer {
+ return chroma.MustNewLexer(
+ &chroma.Config{
+ Name: l.Name(),
+ Aliases: []string{"adl"},
+ Filenames: []string{"*.adl", "*.adls", "*.adlf", "*.adlx"},
+ },
+ func() chroma.Rules {
+ return chroma.Rules{
+ "root": {},
+ }
+ },
+ )
+}
+
+// Name returns the name of the lexer.
+func (ADL) Name() string {
+ return heartbeat.LanguageADL.StringChroma()
+}
diff --git a/pkg/lexer/agda.go b/pkg/lexer/agda.go
new file mode 100644
index 00000000..711c616f
--- /dev/null
+++ b/pkg/lexer/agda.go
@@ -0,0 +1,32 @@
+package lexer
+
+import (
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+)
+
+// Agda lexer.
+type Agda struct{}
+
+// Lexer returns the lexer.
+func (l Agda) Lexer() chroma.Lexer {
+ return chroma.MustNewLexer(
+ &chroma.Config{
+ Name: l.Name(),
+ Aliases: []string{"agda"},
+ Filenames: []string{"*.agda"},
+ MimeTypes: []string{"text/x-agda"},
+ },
+ func() chroma.Rules {
+ return chroma.Rules{
+ "root": {},
+ }
+ },
+ )
+}
+
+// Name returns the name of the lexer.
+func (Agda) Name() string {
+ return heartbeat.LanguageAgda.StringChroma()
+}
diff --git a/pkg/lexer/aheui.go b/pkg/lexer/aheui.go
new file mode 100644
index 00000000..a4bf8dda
--- /dev/null
+++ b/pkg/lexer/aheui.go
@@ -0,0 +1,31 @@
+package lexer
+
+import (
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+)
+
+// Aheui lexer.
+type Aheui struct{}
+
+// Lexer returns the lexer.
+func (l Aheui) Lexer() chroma.Lexer {
+ return chroma.MustNewLexer(
+ &chroma.Config{
+ Name: l.Name(),
+ Aliases: []string{"aheui"},
+ Filenames: []string{"*.aheui"},
+ },
+ func() chroma.Rules {
+ return chroma.Rules{
+ "root": {},
+ }
+ },
+ )
+}
+
+// Name returns the name of the lexer.
+func (Aheui) Name() string {
+ return heartbeat.LanguageAheui.StringChroma()
+}
diff --git a/pkg/lexer/alloy.go b/pkg/lexer/alloy.go
new file mode 100644
index 00000000..303fa857
--- /dev/null
+++ b/pkg/lexer/alloy.go
@@ -0,0 +1,32 @@
+package lexer
+
+import (
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+)
+
+// Alloy lexer.
+type Alloy struct{}
+
+// Lexer returns the lexer.
+func (l Alloy) Lexer() chroma.Lexer {
+ return chroma.MustNewLexer(
+ &chroma.Config{
+ Name: l.Name(),
+ Aliases: []string{"alloy"},
+ Filenames: []string{"*.als"},
+ MimeTypes: []string{"text/x-alloy"},
+ },
+ func() chroma.Rules {
+ return chroma.Rules{
+ "root": {},
+ }
+ },
+ )
+}
+
+// Name returns the name of the lexer.
+func (Alloy) Name() string {
+ return heartbeat.LanguageAlloy.StringChroma()
+}
diff --git a/pkg/lexer/ambienttalk.go b/pkg/lexer/ambienttalk.go
new file mode 100644
index 00000000..5d001a5c
--- /dev/null
+++ b/pkg/lexer/ambienttalk.go
@@ -0,0 +1,32 @@
+package lexer
+
+import (
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+)
+
+// AmbientTalk lexer.
+type AmbientTalk struct{}
+
+// Lexer returns the lexer.
+func (l AmbientTalk) Lexer() chroma.Lexer {
+ return chroma.MustNewLexer(
+ &chroma.Config{
+ Name: l.Name(),
+ Aliases: []string{"at", "ambienttalk", "ambienttalk/2"},
+ Filenames: []string{"*.at"},
+ MimeTypes: []string{"text/x-ambienttalk"},
+ },
+ func() chroma.Rules {
+ return chroma.Rules{
+ "root": {},
+ }
+ },
+ )
+}
+
+// Name returns the name of the lexer.
+func (AmbientTalk) Name() string {
+ return heartbeat.LanguageAmbientTalk.StringChroma()
+}
diff --git a/pkg/lexer/ampl.go b/pkg/lexer/ampl.go
new file mode 100644
index 00000000..fe08732d
--- /dev/null
+++ b/pkg/lexer/ampl.go
@@ -0,0 +1,31 @@
+package lexer
+
+import (
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+)
+
+// AMPL lexer.
+type AMPL struct{}
+
+// Lexer returns the lexer.
+func (l AMPL) Lexer() chroma.Lexer {
+ return chroma.MustNewLexer(
+ &chroma.Config{
+ Name: l.Name(),
+ Aliases: []string{"ampl"},
+ Filenames: []string{"*.run"},
+ },
+ func() chroma.Rules {
+ return chroma.Rules{
+ "root": {},
+ }
+ },
+ )
+}
+
+// Name returns the name of the lexer.
+func (AMPL) Name() string {
+ return heartbeat.LanguageAMPL.StringChroma()
+}
diff --git a/pkg/lexer/arrow.go b/pkg/lexer/arrow.go
new file mode 100644
index 00000000..43e899cc
--- /dev/null
+++ b/pkg/lexer/arrow.go
@@ -0,0 +1,31 @@
+package lexer
+
+import (
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+)
+
+// Arrow lexer.
+type Arrow struct{}
+
+// Lexer returns the lexer.
+func (l Arrow) Lexer() chroma.Lexer {
+ return chroma.MustNewLexer(
+ &chroma.Config{
+ Name: l.Name(),
+ Aliases: []string{"arrow"},
+ Filenames: []string{"*.arw"},
+ },
+ func() chroma.Rules {
+ return chroma.Rules{
+ "root": {},
+ }
+ },
+ )
+}
+
+// Name returns the name of the lexer.
+func (Arrow) Name() string {
+ return heartbeat.LanguageArrow.StringChroma()
+}
diff --git a/pkg/lexer/aspectj.go b/pkg/lexer/aspectj.go
new file mode 100644
index 00000000..7d5c5d0c
--- /dev/null
+++ b/pkg/lexer/aspectj.go
@@ -0,0 +1,32 @@
+package lexer
+
+import (
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+)
+
+// AspectJ lexer.
+type AspectJ struct{}
+
+// Lexer returns the lexer.
+func (l AspectJ) Lexer() chroma.Lexer {
+ return chroma.MustNewLexer(
+ &chroma.Config{
+ Name: l.Name(),
+ Aliases: []string{"aspectj"},
+ Filenames: []string{"*.aj"},
+ MimeTypes: []string{"text/x-aspectj"},
+ },
+ func() chroma.Rules {
+ return chroma.Rules{
+ "root": {},
+ }
+ },
+ )
+}
+
+// Name returns the name of the lexer.
+func (AspectJ) Name() string {
+ return heartbeat.LanguageAspectJ.StringChroma()
+}
diff --git a/pkg/lexer/aspxcsharp.go b/pkg/lexer/aspxcsharp.go
new file mode 100644
index 00000000..4f9521de
--- /dev/null
+++ b/pkg/lexer/aspxcsharp.go
@@ -0,0 +1,53 @@
+package lexer
+
+import (
+ "regexp"
+
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+)
+
+var (
+ csharpAspxAnalyzerPageLanguageRe = regexp.MustCompile(`(?i)Page\s*Language="C#"`)
+ csharpAspxAnalyzerScriptLanguageRe = regexp.MustCompile(`(?i)script[^>]+language=["\']C#`)
+)
+
+// AspxCSharp lexer.
+type AspxCSharp struct{}
+
+// Lexer returns the lexer.
+func (l AspxCSharp) Lexer() chroma.Lexer {
+ lexer := chroma.MustNewLexer(
+ &chroma.Config{
+ Name: l.Name(),
+ Aliases: []string{"aspx-cs"},
+ Filenames: []string{"*.aspx", "*.asax", "*.ascx", "*.ashx", "*.asmx", "*.axd"},
+ MimeTypes: []string{},
+ },
+ func() chroma.Rules {
+ return chroma.Rules{
+ "root": {},
+ }
+ },
+ )
+
+ lexer.SetAnalyser(func(text string) float32 {
+ if csharpAspxAnalyzerPageLanguageRe.MatchString(text) {
+ return 0.2
+ }
+
+ if csharpAspxAnalyzerScriptLanguageRe.MatchString(text) {
+ return 0.15
+ }
+
+ return 0
+ })
+
+ return lexer
+}
+
+// Name returns the name of the lexer.
+func (AspxCSharp) Name() string {
+ return heartbeat.LanguageAspxCSharp.StringChroma()
+}
diff --git a/pkg/lexer/aspxcsharp_test.go b/pkg/lexer/aspxcsharp_test.go
new file mode 100644
index 00000000..b6226e4a
--- /dev/null
+++ b/pkg/lexer/aspxcsharp_test.go
@@ -0,0 +1,37 @@
+package lexer_test
+
+import (
+ "os"
+ "testing"
+
+ "github.com/wakatime/wakatime-cli/pkg/lexer"
+
+ "github.com/stretchr/testify/assert"
+)
+
+func TestAspxCSharp_AnalyseText(t *testing.T) {
+ tests := map[string]struct {
+ Filepath string
+ Expected float32
+ }{
+ "page language": {
+ Filepath: "testdata/aspxcsharp_page_language.aspx",
+ Expected: 0.2,
+ },
+ "script language": {
+ Filepath: "testdata/aspxcsharp_script_language.aspx",
+ Expected: 0.15,
+ },
+ }
+
+ for name, test := range tests {
+ t.Run(name, func(t *testing.T) {
+ data, err := os.ReadFile(test.Filepath)
+ assert.NoError(t, err)
+
+ l := lexer.AspxCSharp{}.Lexer()
+
+ assert.Equal(t, test.Expected, l.AnalyseText(string(data)))
+ })
+ }
+}
diff --git a/pkg/lexer/aspxvb.go b/pkg/lexer/aspxvb.go
new file mode 100644
index 00000000..789e637a
--- /dev/null
+++ b/pkg/lexer/aspxvb.go
@@ -0,0 +1,54 @@
+package lexer
+
+import (
+ "regexp"
+
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+)
+
+// nolint:gochecknoglobals
+var (
+ vbAspxAnalyzerPageLanguageRe = regexp.MustCompile(`(?i)Page\s*Language="Vb"`)
+ vbAspxAnalyzerScriptLanguageRe = regexp.MustCompile(`(?i)script[^>]+language=["\']vb`)
+)
+
+// AspxVBNet lexer.
+type AspxVBNet struct{}
+
+// Lexer returns the lexer.
+func (l AspxVBNet) Lexer() chroma.Lexer {
+ lexer := chroma.MustNewLexer(
+ &chroma.Config{
+ Name: l.Name(),
+ Aliases: []string{"aspx-vb"},
+ Filenames: []string{"*.aspx", "*.asax", "*.ascx", "*.ashx", "*.asmx", "*.axd"},
+ MimeTypes: []string{},
+ },
+ func() chroma.Rules {
+ return chroma.Rules{
+ "root": {},
+ }
+ },
+ )
+
+ lexer.SetAnalyser(func(text string) float32 {
+ if vbAspxAnalyzerPageLanguageRe.MatchString(text) {
+ return 0.2
+ }
+
+ if vbAspxAnalyzerScriptLanguageRe.MatchString(text) {
+ return 0.15
+ }
+
+ return 0
+ })
+
+ return lexer
+}
+
+// Name returns the name of the lexer.
+func (AspxVBNet) Name() string {
+ return heartbeat.LanguageAspxVBNet.StringChroma()
+}
diff --git a/pkg/lexer/aspxvb_test.go b/pkg/lexer/aspxvb_test.go
new file mode 100644
index 00000000..b0e79caa
--- /dev/null
+++ b/pkg/lexer/aspxvb_test.go
@@ -0,0 +1,37 @@
+package lexer_test
+
+import (
+ "os"
+ "testing"
+
+ "github.com/wakatime/wakatime-cli/pkg/lexer"
+
+ "github.com/stretchr/testify/assert"
+)
+
+func TestAspxVBNet_AnalyseText(t *testing.T) {
+ tests := map[string]struct {
+ Filepath string
+ Expected float32
+ }{
+ "page language": {
+ Filepath: "testdata/aspxvbnet_page_language.aspx",
+ Expected: 0.2,
+ },
+ "script language": {
+ Filepath: "testdata/aspxvbnet_script_language.aspx",
+ Expected: 0.15,
+ },
+ }
+
+ for name, test := range tests {
+ t.Run(name, func(t *testing.T) {
+ data, err := os.ReadFile(test.Filepath)
+ assert.NoError(t, err)
+
+ l := lexer.AspxVBNet{}.Lexer()
+
+ assert.Equal(t, test.Expected, l.AnalyseText(string(data)))
+ })
+ }
+}
diff --git a/pkg/lexer/asymptote.go b/pkg/lexer/asymptote.go
new file mode 100644
index 00000000..799b1971
--- /dev/null
+++ b/pkg/lexer/asymptote.go
@@ -0,0 +1,32 @@
+package lexer
+
+import (
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+)
+
+// Asymptote lexer.
+type Asymptote struct{}
+
+// Lexer returns the lexer.
+func (l Asymptote) Lexer() chroma.Lexer {
+ return chroma.MustNewLexer(
+ &chroma.Config{
+ Name: l.Name(),
+ Aliases: []string{"asy", "asymptote"},
+ Filenames: []string{"*.asy"},
+ MimeTypes: []string{"text/x-asymptote"},
+ },
+ func() chroma.Rules {
+ return chroma.Rules{
+ "root": {},
+ }
+ },
+ )
+}
+
+// Name returns the name of the lexer.
+func (Asymptote) Name() string {
+ return heartbeat.LanguageAsymptote.StringChroma()
+}
diff --git a/pkg/lexer/augeas.go b/pkg/lexer/augeas.go
new file mode 100644
index 00000000..a9cd3a2a
--- /dev/null
+++ b/pkg/lexer/augeas.go
@@ -0,0 +1,31 @@
+package lexer
+
+import (
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+)
+
+// Augeas lexer.
+type Augeas struct{}
+
+// Lexer returns the lexer.
+func (l Augeas) Lexer() chroma.Lexer {
+ return chroma.MustNewLexer(
+ &chroma.Config{
+ Name: l.Name(),
+ Aliases: []string{"augeas"},
+ Filenames: []string{"*.aug"},
+ },
+ func() chroma.Rules {
+ return chroma.Rules{
+ "root": {},
+ }
+ },
+ )
+}
+
+// Name returns the name of the lexer.
+func (Augeas) Name() string {
+ return heartbeat.LanguageAugeas.StringChroma()
+}
diff --git a/pkg/lexer/bare.go b/pkg/lexer/bare.go
new file mode 100644
index 00000000..51fd1386
--- /dev/null
+++ b/pkg/lexer/bare.go
@@ -0,0 +1,31 @@
+package lexer
+
+import (
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+)
+
+// BARE lexer.
+type BARE struct{}
+
+// Lexer returns the lexer.
+func (l BARE) Lexer() chroma.Lexer {
+ return chroma.MustNewLexer(
+ &chroma.Config{
+ Name: l.Name(),
+ Aliases: []string{"bare"},
+ Filenames: []string{"*.bare"},
+ },
+ func() chroma.Rules {
+ return chroma.Rules{
+ "root": {},
+ }
+ },
+ )
+}
+
+// Name returns the name of the lexer.
+func (BARE) Name() string {
+ return heartbeat.LanguageBARE.StringChroma()
+}
diff --git a/pkg/lexer/bbcbasic.go b/pkg/lexer/bbcbasic.go
new file mode 100644
index 00000000..88d9fe38
--- /dev/null
+++ b/pkg/lexer/bbcbasic.go
@@ -0,0 +1,43 @@
+package lexer
+
+import (
+ "strings"
+
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+)
+
+// BBCBasic lexer.
+type BBCBasic struct{}
+
+// Lexer returns the lexer.
+func (l BBCBasic) Lexer() chroma.Lexer {
+ lexer := chroma.MustNewLexer(
+ &chroma.Config{
+ Name: l.Name(),
+ Aliases: []string{"bbcbasic"},
+ Filenames: []string{"*.bbc"},
+ },
+ func() chroma.Rules {
+ return chroma.Rules{
+ "root": {},
+ }
+ },
+ )
+
+ lexer.SetAnalyser(func(text string) float32 {
+ if strings.HasPrefix(text, "10REM >") || strings.HasPrefix(text, "REM >") {
+ return 0.9
+ }
+
+ return 0
+ })
+
+ return lexer
+}
+
+// Name returns the name of the lexer.
+func (BBCBasic) Name() string {
+ return heartbeat.LanguageBBCBasic.StringChroma()
+}
diff --git a/pkg/lexer/bbcbasic_test.go b/pkg/lexer/bbcbasic_test.go
new file mode 100644
index 00000000..6d0b9862
--- /dev/null
+++ b/pkg/lexer/bbcbasic_test.go
@@ -0,0 +1,37 @@
+package lexer_test
+
+import (
+ "os"
+ "testing"
+
+ "github.com/wakatime/wakatime-cli/pkg/lexer"
+
+ "github.com/stretchr/testify/assert"
+)
+
+func TestBBCBasic_AnalyseText(t *testing.T) {
+ tests := map[string]struct {
+ Filepath string
+ Expected float32
+ }{
+ "REM": {
+ Filepath: "testdata/bbcbasic_rem.bbc",
+ Expected: 0.9,
+ },
+ "10REM": {
+ Filepath: "testdata/bbcbasic_10rem.bbc",
+ Expected: 0.9,
+ },
+ }
+
+ for name, test := range tests {
+ t.Run(name, func(t *testing.T) {
+ data, err := os.ReadFile(test.Filepath)
+ assert.NoError(t, err)
+
+ l := lexer.BBCBasic{}.Lexer()
+
+ assert.Equal(t, test.Expected, l.AnalyseText(string(data)))
+ })
+ }
+}
diff --git a/pkg/lexer/bbcode.go b/pkg/lexer/bbcode.go
new file mode 100644
index 00000000..2f5a244a
--- /dev/null
+++ b/pkg/lexer/bbcode.go
@@ -0,0 +1,31 @@
+package lexer
+
+import (
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+)
+
+// BBCode lexer.
+type BBCode struct{}
+
+// Lexer returns the lexer.
+func (l BBCode) Lexer() chroma.Lexer {
+ return chroma.MustNewLexer(
+ &chroma.Config{
+ Name: l.Name(),
+ Aliases: []string{"bbcode"},
+ MimeTypes: []string{"text/x-bbcode"},
+ },
+ func() chroma.Rules {
+ return chroma.Rules{
+ "root": {},
+ }
+ },
+ )
+}
+
+// Name returns the name of the lexer.
+func (BBCode) Name() string {
+ return heartbeat.LanguageBBCode.StringChroma()
+}
diff --git a/pkg/lexer/bc.go b/pkg/lexer/bc.go
new file mode 100644
index 00000000..7f50c37e
--- /dev/null
+++ b/pkg/lexer/bc.go
@@ -0,0 +1,31 @@
+package lexer
+
+import (
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+)
+
+// BC lexer.
+type BC struct{}
+
+// Lexer returns the lexer.
+func (l BC) Lexer() chroma.Lexer {
+ return chroma.MustNewLexer(
+ &chroma.Config{
+ Name: l.Name(),
+ Aliases: []string{"bc"},
+ Filenames: []string{"*.bc"},
+ },
+ func() chroma.Rules {
+ return chroma.Rules{
+ "root": {},
+ }
+ },
+ )
+}
+
+// Name returns the name of the lexer.
+func (BC) Name() string {
+ return heartbeat.LanguageBC.StringChroma()
+}
diff --git a/pkg/lexer/befunge.go b/pkg/lexer/befunge.go
new file mode 100644
index 00000000..caed4c6e
--- /dev/null
+++ b/pkg/lexer/befunge.go
@@ -0,0 +1,32 @@
+package lexer
+
+import (
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+)
+
+// Befunge lexer.
+type Befunge struct{}
+
+// Lexer returns the lexer.
+func (l Befunge) Lexer() chroma.Lexer {
+ return chroma.MustNewLexer(
+ &chroma.Config{
+ Name: l.Name(),
+ Aliases: []string{"befunge"},
+ Filenames: []string{"*.befunge"},
+ MimeTypes: []string{"application/x-befunge"},
+ },
+ func() chroma.Rules {
+ return chroma.Rules{
+ "root": {},
+ }
+ },
+ )
+}
+
+// Name returns the name of the lexer.
+func (Befunge) Name() string {
+ return heartbeat.LanguageBefunge.StringChroma()
+}
diff --git a/pkg/lexer/blazor.go b/pkg/lexer/blazor.go
new file mode 100644
index 00000000..c8dcef73
--- /dev/null
+++ b/pkg/lexer/blazor.go
@@ -0,0 +1,31 @@
+package lexer
+
+import (
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+)
+
+// Blazor lexer.
+type Blazor struct{}
+
+// Lexer returns the lexer.
+func (l Blazor) Lexer() chroma.Lexer {
+ return chroma.MustNewLexer(
+ &chroma.Config{
+ Name: l.Name(),
+ Aliases: []string{"blazor"},
+ Filenames: []string{"*.razor"},
+ },
+ func() chroma.Rules {
+ return chroma.Rules{
+ "root": {},
+ }
+ },
+ )
+}
+
+// Name returns the name of the lexer.
+func (Blazor) Name() string {
+ return heartbeat.LanguageBlazor.StringChroma()
+}
diff --git a/pkg/lexer/blitzmax.go b/pkg/lexer/blitzmax.go
new file mode 100644
index 00000000..360bc2cb
--- /dev/null
+++ b/pkg/lexer/blitzmax.go
@@ -0,0 +1,32 @@
+package lexer
+
+import (
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+)
+
+// BlitzMax lexer.
+type BlitzMax struct{}
+
+// Lexer returns the lexer.
+func (l BlitzMax) Lexer() chroma.Lexer {
+ return chroma.MustNewLexer(
+ &chroma.Config{
+ Name: l.Name(),
+ Aliases: []string{"blitzmax", "bmax"},
+ Filenames: []string{"*.bmx"},
+ MimeTypes: []string{"text/x-bmx"},
+ },
+ func() chroma.Rules {
+ return chroma.Rules{
+ "root": {},
+ }
+ },
+ )
+}
+
+// Name returns the name of the lexer.
+func (BlitzMax) Name() string {
+ return heartbeat.LanguageBlitzMax.StringChroma()
+}
diff --git a/pkg/lexer/boa.go b/pkg/lexer/boa.go
new file mode 100644
index 00000000..716b0fa5
--- /dev/null
+++ b/pkg/lexer/boa.go
@@ -0,0 +1,31 @@
+package lexer
+
+import (
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+)
+
+// Boa lexer.
+type Boa struct{}
+
+// Lexer returns the lexer.
+func (l Boa) Lexer() chroma.Lexer {
+ return chroma.MustNewLexer(
+ &chroma.Config{
+ Name: l.Name(),
+ Aliases: []string{"boa"},
+ Filenames: []string{"*.boa"},
+ },
+ func() chroma.Rules {
+ return chroma.Rules{
+ "root": {},
+ }
+ },
+ )
+}
+
+// Name returns the name of the lexer.
+func (Boa) Name() string {
+ return heartbeat.LanguageBoa.StringChroma()
+}
diff --git a/pkg/lexer/boo.go b/pkg/lexer/boo.go
new file mode 100644
index 00000000..ce53d20d
--- /dev/null
+++ b/pkg/lexer/boo.go
@@ -0,0 +1,32 @@
+package lexer
+
+import (
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+)
+
+// Boo lexer.
+type Boo struct{}
+
+// Lexer returns the lexer.
+func (l Boo) Lexer() chroma.Lexer {
+ return chroma.MustNewLexer(
+ &chroma.Config{
+ Name: l.Name(),
+ Aliases: []string{"boo"},
+ Filenames: []string{"*.boo"},
+ MimeTypes: []string{"text/x-boo"},
+ },
+ func() chroma.Rules {
+ return chroma.Rules{
+ "root": {},
+ }
+ },
+ )
+}
+
+// Name returns the name of the lexer.
+func (Boo) Name() string {
+ return heartbeat.LanguageBoo.StringChroma()
+}
diff --git a/pkg/lexer/boogie.go b/pkg/lexer/boogie.go
new file mode 100644
index 00000000..fb6aaab7
--- /dev/null
+++ b/pkg/lexer/boogie.go
@@ -0,0 +1,31 @@
+package lexer
+
+import (
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+)
+
+// Boogie lexer.
+type Boogie struct{}
+
+// Lexer returns the lexer.
+func (l Boogie) Lexer() chroma.Lexer {
+ return chroma.MustNewLexer(
+ &chroma.Config{
+ Name: l.Name(),
+ Aliases: []string{"boogie"},
+ Filenames: []string{"*.bpl"},
+ },
+ func() chroma.Rules {
+ return chroma.Rules{
+ "root": {},
+ }
+ },
+ )
+}
+
+// Name returns the name of the lexer.
+func (Boogie) Name() string {
+ return heartbeat.LanguageBoogie.StringChroma()
+}
diff --git a/pkg/lexer/brainfuck.go b/pkg/lexer/brainfuck.go
new file mode 100644
index 00000000..a6570794
--- /dev/null
+++ b/pkg/lexer/brainfuck.go
@@ -0,0 +1,73 @@
+package lexer
+
+import (
+ "strings"
+
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+ "github.com/alecthomas/chroma/v2/lexers"
+)
+
+// Brainfuck lexer.
+type Brainfuck struct{}
+
+// Lexer returns the lexer.
+func (l Brainfuck) Lexer() chroma.Lexer {
+ lexer := lexers.Get(l.Name())
+ if lexer == nil {
+ return nil
+ }
+
+ var (
+ ok bool
+ rgxlexer *chroma.RegexLexer
+ )
+
+ if rgxlexer, ok = lexer.(*chroma.RegexLexer); !ok {
+ return nil
+ }
+
+ rgxlexer.SetAnalyser(func(text string) float32 {
+ // it's safe to assume that a program which mostly consists of + -
+ // and < > is brainfuck.
+ var plusMinusCount float64
+ var greaterLessCount float64
+
+ rangeToCheck := len(text)
+
+ if rangeToCheck > 256 {
+ rangeToCheck = 256
+ }
+
+ for _, c := range text[:rangeToCheck] {
+ if c == '+' || c == '-' {
+ plusMinusCount++
+ }
+ if c == '<' || c == '>' {
+ greaterLessCount++
+ }
+ }
+
+ if plusMinusCount > (0.25 * float64(rangeToCheck)) {
+ return 1.0
+ }
+
+ if greaterLessCount > (0.25 * float64(rangeToCheck)) {
+ return 1.0
+ }
+
+ if strings.Contains(text, "[-]") {
+ return 0.5
+ }
+
+ return 0
+ })
+
+ return rgxlexer
+}
+
+// Name returns the name of the lexer.
+func (Brainfuck) Name() string {
+ return heartbeat.LanguageBrainfuck.StringChroma()
+}
diff --git a/pkg/lexer/brainfuck_test.go b/pkg/lexer/brainfuck_test.go
new file mode 100644
index 00000000..c147602c
--- /dev/null
+++ b/pkg/lexer/brainfuck_test.go
@@ -0,0 +1,41 @@
+package lexer_test
+
+import (
+ "os"
+ "testing"
+
+ "github.com/wakatime/wakatime-cli/pkg/lexer"
+
+ "github.com/stretchr/testify/assert"
+)
+
+func TestBrainfuck_AnalyseText(t *testing.T) {
+ tests := map[string]struct {
+ Filepath string
+ Expected float32
+ }{
+ "plus minus": {
+ Filepath: "testdata/brainfuck_plus_minus.bf",
+ Expected: 1.0,
+ },
+ "greater less": {
+ Filepath: "testdata/brainfuck_greater_less.bf",
+ Expected: 1.0,
+ },
+ "minus only": {
+ Filepath: "testdata/brainfuck_minus.bf",
+ Expected: 0.5,
+ },
+ }
+
+ for name, test := range tests {
+ t.Run(name, func(t *testing.T) {
+ data, err := os.ReadFile(test.Filepath)
+ assert.NoError(t, err)
+
+ l := lexer.Brainfuck{}.Lexer()
+
+ assert.Equal(t, test.Expected, l.AnalyseText(string(data)))
+ })
+ }
+}
diff --git a/pkg/lexer/bst.go b/pkg/lexer/bst.go
new file mode 100644
index 00000000..33055d30
--- /dev/null
+++ b/pkg/lexer/bst.go
@@ -0,0 +1,31 @@
+package lexer
+
+import (
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+)
+
+// BST lexer.
+type BST struct{}
+
+// Lexer returns the lexer.
+func (l BST) Lexer() chroma.Lexer {
+ return chroma.MustNewLexer(
+ &chroma.Config{
+ Name: l.Name(),
+ Aliases: []string{"bst", "bst-pybtex"},
+ Filenames: []string{"*.bst"},
+ },
+ func() chroma.Rules {
+ return chroma.Rules{
+ "root": {},
+ }
+ },
+ )
+}
+
+// Name returns the name of the lexer.
+func (BST) Name() string {
+ return heartbeat.LanguageBST.StringChroma()
+}
diff --git a/pkg/lexer/bugs.go b/pkg/lexer/bugs.go
new file mode 100644
index 00000000..dcb4d05e
--- /dev/null
+++ b/pkg/lexer/bugs.go
@@ -0,0 +1,46 @@
+package lexer
+
+import (
+ "regexp"
+
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+)
+
+var bugsAnalyzerRe = regexp.MustCompile(`(?m)^\s*model\s*{`)
+
+// BUGS lexer.
+type BUGS struct{}
+
+// Lexer returns the lexer.
+func (l BUGS) Lexer() chroma.Lexer {
+ lexer := chroma.MustNewLexer(
+ &chroma.Config{
+ Name: l.Name(),
+ Aliases: []string{"bugs", "winbugs", "openbugs"},
+ Filenames: []string{"*.bug"},
+ MimeTypes: []string{},
+ },
+ func() chroma.Rules {
+ return chroma.Rules{
+ "root": {},
+ }
+ },
+ )
+
+ lexer.SetAnalyser(func(text string) float32 {
+ if bugsAnalyzerRe.MatchString(text) {
+ return 0.7
+ }
+
+ return 0
+ })
+
+ return lexer
+}
+
+// Name returns the name of the lexer.
+func (BUGS) Name() string {
+ return heartbeat.LanguageBUGS.StringChroma()
+}
diff --git a/pkg/lexer/bugs_test.go b/pkg/lexer/bugs_test.go
new file mode 100644
index 00000000..950c7a01
--- /dev/null
+++ b/pkg/lexer/bugs_test.go
@@ -0,0 +1,19 @@
+package lexer_test
+
+import (
+ "os"
+ "testing"
+
+ "github.com/wakatime/wakatime-cli/pkg/lexer"
+
+ "github.com/stretchr/testify/assert"
+)
+
+func TestBBUGS_AnalyseText(t *testing.T) {
+ data, err := os.ReadFile("testdata/bugs_basic.bug")
+ assert.NoError(t, err)
+
+ l := lexer.BUGS{}.Lexer()
+
+ assert.Equal(t, float32(0.7), l.AnalyseText(string(data)))
+}
diff --git a/pkg/lexer/c_test.go b/pkg/lexer/c_test.go
new file mode 100644
index 00000000..f06aecb4
--- /dev/null
+++ b/pkg/lexer/c_test.go
@@ -0,0 +1,44 @@
+package lexer_test
+
+import (
+ "os"
+ "testing"
+
+ "github.com/alecthomas/chroma/v2/lexers"
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/require"
+)
+
+func TestC_AnalyseText(t *testing.T) {
+ tests := map[string]struct {
+ Filepath string
+ Expected float32
+ }{
+ "include": {
+ Filepath: "testdata/c_include.c",
+ Expected: 0.1,
+ },
+ "ifdef": {
+ Filepath: "testdata/c_ifdef.c",
+ Expected: 0.1,
+ },
+ "ifndef": {
+ Filepath: "testdata/c_ifndef.c",
+ Expected: 0.1,
+ },
+ }
+
+ l := lexers.Get(heartbeat.LanguageC.StringChroma())
+ require.NotNil(t, l)
+
+ for name, test := range tests {
+ t.Run(name, func(t *testing.T) {
+ data, err := os.ReadFile(test.Filepath)
+ assert.NoError(t, err)
+
+ assert.Equal(t, test.Expected, l.AnalyseText(string(data)))
+ })
+ }
+}
diff --git a/pkg/lexer/ca65assembler.go b/pkg/lexer/ca65assembler.go
new file mode 100644
index 00000000..d7fb5ce2
--- /dev/null
+++ b/pkg/lexer/ca65assembler.go
@@ -0,0 +1,48 @@
+package lexer
+
+import (
+ "regexp"
+
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+)
+
+// nolint:gochecknoglobals
+var ca65AnalyserCommentRe = regexp.MustCompile(`(?m)^\s*;`)
+
+// Ca65Assembler lexer.
+type Ca65Assembler struct{}
+
+// Lexer returns the lexer.
+func (l Ca65Assembler) Lexer() chroma.Lexer {
+ lexer := chroma.MustNewLexer(
+ &chroma.Config{
+ Name: l.Name(),
+ Aliases: []string{"ca65"},
+ Filenames: []string{"*.s"},
+ MimeTypes: []string{},
+ },
+ func() chroma.Rules {
+ return chroma.Rules{
+ "root": {},
+ }
+ },
+ )
+
+ lexer.SetAnalyser(func(text string) float32 {
+ // comments in GAS start with "#".
+ if ca65AnalyserCommentRe.MatchString(text) {
+ return 0.9
+ }
+
+ return 0
+ })
+
+ return lexer
+}
+
+// Name returns the name of the lexer.
+func (Ca65Assembler) Name() string {
+ return heartbeat.LanguageCa65Assembler.StringChroma()
+}
diff --git a/pkg/lexer/ca65assembler_test.go b/pkg/lexer/ca65assembler_test.go
new file mode 100644
index 00000000..ba13474e
--- /dev/null
+++ b/pkg/lexer/ca65assembler_test.go
@@ -0,0 +1,19 @@
+package lexer_test
+
+import (
+ "os"
+ "testing"
+
+ "github.com/wakatime/wakatime-cli/pkg/lexer"
+
+ "github.com/stretchr/testify/assert"
+)
+
+func TestCa65Assembler_AnalyseText(t *testing.T) {
+ data, err := os.ReadFile("testdata/ca65assembler_comment.s")
+ assert.NoError(t, err)
+
+ l := lexer.Ca65Assembler{}.Lexer()
+
+ assert.Equal(t, float32(0.9), l.AnalyseText(string(data)))
+}
diff --git a/pkg/lexer/cadl.go b/pkg/lexer/cadl.go
new file mode 100644
index 00000000..b2619964
--- /dev/null
+++ b/pkg/lexer/cadl.go
@@ -0,0 +1,31 @@
+package lexer
+
+import (
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+)
+
+// CADL lexer.
+type CADL struct{}
+
+// Lexer returns the lexer.
+func (l CADL) Lexer() chroma.Lexer {
+ return chroma.MustNewLexer(
+ &chroma.Config{
+ Name: l.Name(),
+ Aliases: []string{"cadl"},
+ Filenames: []string{"*.cadl"},
+ },
+ func() chroma.Rules {
+ return chroma.Rules{
+ "root": {},
+ }
+ },
+ )
+}
+
+// Name returns the name of the lexer.
+func (CADL) Name() string {
+ return heartbeat.LanguageCADL.StringChroma()
+}
diff --git a/pkg/lexer/camkes.go b/pkg/lexer/camkes.go
new file mode 100644
index 00000000..72a4b656
--- /dev/null
+++ b/pkg/lexer/camkes.go
@@ -0,0 +1,31 @@
+package lexer
+
+import (
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+)
+
+// CAmkES lexer.
+type CAmkES struct{}
+
+// Lexer returns the lexer.
+func (l CAmkES) Lexer() chroma.Lexer {
+ return chroma.MustNewLexer(
+ &chroma.Config{
+ Name: l.Name(),
+ Aliases: []string{"camkes", "idl4"},
+ Filenames: []string{"*.camkes", "*.idl4"},
+ },
+ func() chroma.Rules {
+ return chroma.Rules{
+ "root": {},
+ }
+ },
+ )
+}
+
+// Name returns the name of the lexer.
+func (CAmkES) Name() string {
+ return heartbeat.LanguageCAmkES.StringChroma()
+}
diff --git a/pkg/lexer/capdl.go b/pkg/lexer/capdl.go
new file mode 100644
index 00000000..df1937dd
--- /dev/null
+++ b/pkg/lexer/capdl.go
@@ -0,0 +1,31 @@
+package lexer
+
+import (
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+)
+
+// CapDL lexer.
+type CapDL struct{}
+
+// Lexer returns the lexer.
+func (l CapDL) Lexer() chroma.Lexer {
+ return chroma.MustNewLexer(
+ &chroma.Config{
+ Name: l.Name(),
+ Aliases: []string{"capdl"},
+ Filenames: []string{"*.cdl"},
+ },
+ func() chroma.Rules {
+ return chroma.Rules{
+ "root": {},
+ }
+ },
+ )
+}
+
+// Name returns the name of the lexer.
+func (CapDL) Name() string {
+ return heartbeat.LanguageCapDL.StringChroma()
+}
diff --git a/pkg/lexer/cbmbasicv2.go b/pkg/lexer/cbmbasicv2.go
new file mode 100644
index 00000000..ce842385
--- /dev/null
+++ b/pkg/lexer/cbmbasicv2.go
@@ -0,0 +1,48 @@
+package lexer
+
+import (
+ "regexp"
+
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+)
+
+var cbmBasicV2AnalyserRe = regexp.MustCompile(`^\d+`)
+
+// CBMBasicV2 CBM BASIC V2 lexer.
+type CBMBasicV2 struct{}
+
+// Lexer returns the lexer.
+func (l CBMBasicV2) Lexer() chroma.Lexer {
+ lexer := chroma.MustNewLexer(
+ &chroma.Config{
+ Name: l.Name(),
+ Aliases: []string{"cbmbas"},
+ Filenames: []string{"*.bas"},
+ MimeTypes: []string{},
+ },
+ func() chroma.Rules {
+ return chroma.Rules{
+ "root": {},
+ }
+ },
+ )
+
+ lexer.SetAnalyser(func(text string) float32 {
+ // if it starts with a line number, it shouldn't be a "modern" Basic
+ // like VB.net
+ if cbmBasicV2AnalyserRe.MatchString(text) {
+ return 0.2
+ }
+
+ return 0
+ })
+
+ return lexer
+}
+
+// Name returns the name of the lexer.
+func (CBMBasicV2) Name() string {
+ return heartbeat.LanguageCBMBasicV2.StringChroma()
+}
diff --git a/pkg/lexer/cbmbasicv2_test.go b/pkg/lexer/cbmbasicv2_test.go
new file mode 100644
index 00000000..1ed24c37
--- /dev/null
+++ b/pkg/lexer/cbmbasicv2_test.go
@@ -0,0 +1,19 @@
+package lexer_test
+
+import (
+ "os"
+ "testing"
+
+ "github.com/wakatime/wakatime-cli/pkg/lexer"
+
+ "github.com/stretchr/testify/assert"
+)
+
+func TestCBMBasicV2_AnalyseText(t *testing.T) {
+ data, err := os.ReadFile("testdata/cbmbasicv2_basic.bas")
+ assert.NoError(t, err)
+
+ l := lexer.CBMBasicV2{}.Lexer()
+
+ assert.Equal(t, float32(0.2), l.AnalyseText(string(data)))
+}
diff --git a/pkg/lexer/charmci.go b/pkg/lexer/charmci.go
new file mode 100644
index 00000000..5268bca4
--- /dev/null
+++ b/pkg/lexer/charmci.go
@@ -0,0 +1,31 @@
+package lexer
+
+import (
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+)
+
+// Charmci lexer.
+type Charmci struct{}
+
+// Lexer returns the lexer.
+func (l Charmci) Lexer() chroma.Lexer {
+ return chroma.MustNewLexer(
+ &chroma.Config{
+ Name: l.Name(),
+ Aliases: []string{"charmci"},
+ Filenames: []string{"*.ci"},
+ },
+ func() chroma.Rules {
+ return chroma.Rules{
+ "root": {},
+ }
+ },
+ )
+}
+
+// Name returns the name of the lexer.
+func (Charmci) Name() string {
+ return heartbeat.LanguageCharmci.StringChroma()
+}
diff --git a/pkg/lexer/cirru.go b/pkg/lexer/cirru.go
new file mode 100644
index 00000000..f75a93f6
--- /dev/null
+++ b/pkg/lexer/cirru.go
@@ -0,0 +1,32 @@
+package lexer
+
+import (
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+)
+
+// Cirru lexer.
+type Cirru struct{}
+
+// Lexer returns the lexer.
+func (l Cirru) Lexer() chroma.Lexer {
+ return chroma.MustNewLexer(
+ &chroma.Config{
+ Name: l.Name(),
+ Aliases: []string{"cirru"},
+ Filenames: []string{"*.cirru"},
+ MimeTypes: []string{"text/x-cirru"},
+ },
+ func() chroma.Rules {
+ return chroma.Rules{
+ "root": {},
+ }
+ },
+ )
+}
+
+// Name returns the name of the lexer.
+func (Cirru) Name() string {
+ return heartbeat.LanguageCirru.StringChroma()
+}
diff --git a/pkg/lexer/clay.go b/pkg/lexer/clay.go
new file mode 100644
index 00000000..0d56dce8
--- /dev/null
+++ b/pkg/lexer/clay.go
@@ -0,0 +1,32 @@
+package lexer
+
+import (
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+)
+
+// Clay lexer.
+type Clay struct{}
+
+// Lexer returns the lexer.
+func (l Clay) Lexer() chroma.Lexer {
+ return chroma.MustNewLexer(
+ &chroma.Config{
+ Name: l.Name(),
+ Aliases: []string{"clay"},
+ Filenames: []string{"*.clay"},
+ MimeTypes: []string{"text/x-clay"},
+ },
+ func() chroma.Rules {
+ return chroma.Rules{
+ "root": {},
+ }
+ },
+ )
+}
+
+// Name returns the name of the lexer.
+func (Clay) Name() string {
+ return heartbeat.LanguageClay.StringChroma()
+}
diff --git a/pkg/lexer/clean.go b/pkg/lexer/clean.go
new file mode 100644
index 00000000..ff429c5c
--- /dev/null
+++ b/pkg/lexer/clean.go
@@ -0,0 +1,31 @@
+package lexer
+
+import (
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+)
+
+// Clean lexer.
+type Clean struct{}
+
+// Lexer returns the lexer.
+func (l Clean) Lexer() chroma.Lexer {
+ return chroma.MustNewLexer(
+ &chroma.Config{
+ Name: l.Name(),
+ Aliases: []string{"clean"},
+ Filenames: []string{"*.icl", "*.dcl"},
+ },
+ func() chroma.Rules {
+ return chroma.Rules{
+ "root": {},
+ }
+ },
+ )
+}
+
+// Name returns the name of the lexer.
+func (Clean) Name() string {
+ return heartbeat.LanguageClean.StringChroma()
+}
diff --git a/pkg/lexer/clojurescript.go b/pkg/lexer/clojurescript.go
new file mode 100644
index 00000000..a5cc7094
--- /dev/null
+++ b/pkg/lexer/clojurescript.go
@@ -0,0 +1,32 @@
+package lexer
+
+import (
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+)
+
+// ClojureScript lexer.
+type ClojureScript struct{}
+
+// Lexer returns the lexer.
+func (l ClojureScript) Lexer() chroma.Lexer {
+ return chroma.MustNewLexer(
+ &chroma.Config{
+ Name: l.Name(),
+ Aliases: []string{"clojurescript", "cljs"},
+ Filenames: []string{"*.cljs"},
+ MimeTypes: []string{"text/x-clojurescript", "application/x-clojurescript"},
+ },
+ func() chroma.Rules {
+ return chroma.Rules{
+ "root": {},
+ }
+ },
+ )
+}
+
+// Name returns the name of the lexer.
+func (ClojureScript) Name() string {
+ return heartbeat.LanguageClojureScript.StringChroma()
+}
diff --git a/pkg/lexer/cobjdump.go b/pkg/lexer/cobjdump.go
new file mode 100644
index 00000000..827104e8
--- /dev/null
+++ b/pkg/lexer/cobjdump.go
@@ -0,0 +1,32 @@
+package lexer
+
+import (
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+)
+
+// CObjdump lexer.
+type CObjdump struct{}
+
+// Lexer returns the lexer.
+func (l CObjdump) Lexer() chroma.Lexer {
+ return chroma.MustNewLexer(
+ &chroma.Config{
+ Name: l.Name(),
+ Aliases: []string{"c-objdump"},
+ Filenames: []string{"*.c-objdump"},
+ MimeTypes: []string{"text/x-c-objdump"},
+ },
+ func() chroma.Rules {
+ return chroma.Rules{
+ "root": {},
+ }
+ },
+ )
+}
+
+// Name returns the name of the lexer.
+func (CObjdump) Name() string {
+ return heartbeat.LanguageCObjdump.StringChroma()
+}
diff --git a/pkg/lexer/cobolfree.go b/pkg/lexer/cobolfree.go
new file mode 100644
index 00000000..07ef8d9f
--- /dev/null
+++ b/pkg/lexer/cobolfree.go
@@ -0,0 +1,31 @@
+package lexer
+
+import (
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+)
+
+// COBOLFree lexer.
+type COBOLFree struct{}
+
+// Lexer returns the lexer.
+func (l COBOLFree) Lexer() chroma.Lexer {
+ return chroma.MustNewLexer(
+ &chroma.Config{
+ Name: l.Name(),
+ Aliases: []string{"cobolfree"},
+ Filenames: []string{"*.cbl", "*.CBL"},
+ },
+ func() chroma.Rules {
+ return chroma.Rules{
+ "root": {},
+ }
+ },
+ )
+}
+
+// Name returns the name of the lexer.
+func (COBOLFree) Name() string {
+ return heartbeat.LanguageCOBOLFree.StringChroma()
+}
diff --git a/pkg/lexer/coldfusioncfc.go b/pkg/lexer/coldfusioncfc.go
new file mode 100644
index 00000000..5c7adf93
--- /dev/null
+++ b/pkg/lexer/coldfusioncfc.go
@@ -0,0 +1,31 @@
+package lexer
+
+import (
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+)
+
+// ColdfusionCFC lexer.
+type ColdfusionCFC struct{}
+
+// Lexer returns the lexer.
+func (l ColdfusionCFC) Lexer() chroma.Lexer {
+ return chroma.MustNewLexer(
+ &chroma.Config{
+ Name: l.Name(),
+ Aliases: []string{"cfc"},
+ Filenames: []string{"*.cfc"},
+ },
+ func() chroma.Rules {
+ return chroma.Rules{
+ "root": {},
+ }
+ },
+ )
+}
+
+// Name returns the name of the lexer.
+func (ColdfusionCFC) Name() string {
+ return heartbeat.LanguageColdfusionCFC.StringChroma()
+}
diff --git a/pkg/lexer/coldfusionhtml.go b/pkg/lexer/coldfusionhtml.go
new file mode 100644
index 00000000..55cf31d9
--- /dev/null
+++ b/pkg/lexer/coldfusionhtml.go
@@ -0,0 +1,32 @@
+package lexer
+
+import (
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+)
+
+// ColdfusionHTML lexer.
+type ColdfusionHTML struct{}
+
+// Lexer returns the lexer.
+func (l ColdfusionHTML) Lexer() chroma.Lexer {
+ return chroma.MustNewLexer(
+ &chroma.Config{
+ Name: l.Name(),
+ Aliases: []string{"cfm"},
+ Filenames: []string{"*.cfm", "*.cfml"},
+ MimeTypes: []string{"application/x-coldfusion"},
+ },
+ func() chroma.Rules {
+ return chroma.Rules{
+ "root": {},
+ }
+ },
+ )
+}
+
+// Name returns the name of the lexer.
+func (ColdfusionHTML) Name() string {
+ return heartbeat.LanguageColdfusionHTML.StringChroma()
+}
diff --git a/pkg/lexer/componentpascal.go b/pkg/lexer/componentpascal.go
new file mode 100644
index 00000000..aae7fc2c
--- /dev/null
+++ b/pkg/lexer/componentpascal.go
@@ -0,0 +1,32 @@
+package lexer
+
+import (
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+)
+
+// ComponentPascal lexer.
+type ComponentPascal struct{}
+
+// Lexer returns the lexer.
+func (l ComponentPascal) Lexer() chroma.Lexer {
+ return chroma.MustNewLexer(
+ &chroma.Config{
+ Name: l.Name(),
+ Aliases: []string{"componentpascal", "cp"},
+ Filenames: []string{"*.cp", "*.cps"},
+ MimeTypes: []string{"text/x-component-pascal"},
+ },
+ func() chroma.Rules {
+ return chroma.Rules{
+ "root": {},
+ }
+ },
+ )
+}
+
+// Name returns the name of the lexer.
+func (ComponentPascal) Name() string {
+ return heartbeat.LanguageComponentPascal.StringChroma()
+}
diff --git a/pkg/lexer/coq.go b/pkg/lexer/coq.go
new file mode 100644
index 00000000..9f97e369
--- /dev/null
+++ b/pkg/lexer/coq.go
@@ -0,0 +1,40 @@
+package lexer
+
+import (
+ "strings"
+
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+ "github.com/alecthomas/chroma/v2/lexers"
+)
+
+// Coq lexer.
+type Coq struct{}
+
+// Lexer returns the lexer.
+func (l Coq) Lexer() chroma.Lexer {
+ lexer := lexers.Get(l.Name())
+ if lexer == nil {
+ return nil
+ }
+
+ if lexer, ok := lexer.(*chroma.RegexLexer); ok {
+ lexer.SetAnalyser(func(text string) float32 {
+ if strings.Contains(text, "Qed") && strings.Contains(text, "Proof") {
+ return 1.0
+ }
+
+ return 0
+ })
+
+ return lexer
+ }
+
+ return nil
+}
+
+// Name returns the name of the lexer.
+func (Coq) Name() string {
+ return heartbeat.LanguageCoq.StringChroma()
+}
diff --git a/pkg/lexer/coq_test.go b/pkg/lexer/coq_test.go
new file mode 100644
index 00000000..17cebb06
--- /dev/null
+++ b/pkg/lexer/coq_test.go
@@ -0,0 +1,19 @@
+package lexer_test
+
+import (
+ "os"
+ "testing"
+
+ "github.com/wakatime/wakatime-cli/pkg/lexer"
+
+ "github.com/stretchr/testify/assert"
+)
+
+func TestCoq_AnalyseText(t *testing.T) {
+ data, err := os.ReadFile("testdata/coq_reserved_keyword.v")
+ assert.NoError(t, err)
+
+ l := lexer.Coq{}.Lexer()
+
+ assert.Equal(t, float32(1.0), l.AnalyseText(string(data)))
+}
diff --git a/pkg/lexer/cpp_test.go b/pkg/lexer/cpp_test.go
new file mode 100644
index 00000000..70e5a7a0
--- /dev/null
+++ b/pkg/lexer/cpp_test.go
@@ -0,0 +1,40 @@
+package lexer_test
+
+import (
+ "os"
+ "testing"
+
+ "github.com/alecthomas/chroma/v2/lexers"
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/require"
+)
+
+func TestCpp_AnalyseText(t *testing.T) {
+ tests := map[string]struct {
+ Filepath string
+ Expected float32
+ }{
+ "include": {
+ Filepath: "testdata/cpp_include.cpp",
+ Expected: 0.2,
+ },
+ "namespace": {
+ Filepath: "testdata/cpp_namespace.cpp",
+ Expected: 0.4,
+ },
+ }
+
+ l := lexers.Get(heartbeat.LanguageCPP.StringChroma())
+ require.NotNil(t, l)
+
+ for name, test := range tests {
+ t.Run(name, func(t *testing.T) {
+ data, err := os.ReadFile(test.Filepath)
+ assert.NoError(t, err)
+
+ assert.Equal(t, test.Expected, l.AnalyseText(string(data)))
+ })
+ }
+}
diff --git a/pkg/lexer/cppobjdump.go b/pkg/lexer/cppobjdump.go
new file mode 100644
index 00000000..131c9d64
--- /dev/null
+++ b/pkg/lexer/cppobjdump.go
@@ -0,0 +1,32 @@
+package lexer
+
+import (
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+)
+
+// CppObjdump lexer.
+type CppObjdump struct{}
+
+// Lexer returns the lexer.
+func (l CppObjdump) Lexer() chroma.Lexer {
+ return chroma.MustNewLexer(
+ &chroma.Config{
+ Name: l.Name(),
+ Aliases: []string{"cpp-objdump", "c++-objdumb", "cxx-objdump"},
+ Filenames: []string{"*.cpp-objdump", "*.c++-objdump", "*.cxx-objdump"},
+ MimeTypes: []string{"text/x-cpp-objdump"},
+ },
+ func() chroma.Rules {
+ return chroma.Rules{
+ "root": {},
+ }
+ },
+ )
+}
+
+// Name returns the name of the lexer.
+func (CppObjdump) Name() string {
+ return heartbeat.LanguageCppObjdump.StringChroma()
+}
diff --git a/pkg/lexer/cpsa.go b/pkg/lexer/cpsa.go
new file mode 100644
index 00000000..ac3b2831
--- /dev/null
+++ b/pkg/lexer/cpsa.go
@@ -0,0 +1,31 @@
+package lexer
+
+import (
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+)
+
+// CPSA lexer.
+type CPSA struct{}
+
+// Lexer returns the lexer.
+func (l CPSA) Lexer() chroma.Lexer {
+ return chroma.MustNewLexer(
+ &chroma.Config{
+ Name: l.Name(),
+ Aliases: []string{"cpsa"},
+ Filenames: []string{"*.cpsa"},
+ },
+ func() chroma.Rules {
+ return chroma.Rules{
+ "root": {},
+ }
+ },
+ )
+}
+
+// Name returns the name of the lexer.
+func (CPSA) Name() string {
+ return heartbeat.LanguageCPSA.StringChroma()
+}
diff --git a/pkg/lexer/crmsh.go b/pkg/lexer/crmsh.go
new file mode 100644
index 00000000..289e7607
--- /dev/null
+++ b/pkg/lexer/crmsh.go
@@ -0,0 +1,31 @@
+package lexer
+
+import (
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+)
+
+// Crmsh lexer.
+type Crmsh struct{}
+
+// Lexer returns the lexer.
+func (l Crmsh) Lexer() chroma.Lexer {
+ return chroma.MustNewLexer(
+ &chroma.Config{
+ Name: l.Name(),
+ Aliases: []string{"crmsh", "pcmk"},
+ Filenames: []string{"*.crmsh", "*.pcmk"},
+ },
+ func() chroma.Rules {
+ return chroma.Rules{
+ "root": {},
+ }
+ },
+ )
+}
+
+// Name returns the name of the lexer.
+func (Crmsh) Name() string {
+ return heartbeat.LanguageCrmsh.StringChroma()
+}
diff --git a/pkg/lexer/croc.go b/pkg/lexer/croc.go
new file mode 100644
index 00000000..9ec370e7
--- /dev/null
+++ b/pkg/lexer/croc.go
@@ -0,0 +1,32 @@
+package lexer
+
+import (
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+)
+
+// Croc lexer.
+type Croc struct{}
+
+// Lexer returns the lexer.
+func (l Croc) Lexer() chroma.Lexer {
+ return chroma.MustNewLexer(
+ &chroma.Config{
+ Name: l.Name(),
+ Aliases: []string{"croc"},
+ Filenames: []string{"*.croc"},
+ MimeTypes: []string{"text/x-crocsrc"},
+ },
+ func() chroma.Rules {
+ return chroma.Rules{
+ "root": {},
+ }
+ },
+ )
+}
+
+// Name returns the name of the lexer.
+func (Croc) Name() string {
+ return heartbeat.LanguageCroc.StringChroma()
+}
diff --git a/pkg/lexer/crontab.go b/pkg/lexer/crontab.go
new file mode 100644
index 00000000..4e9c92ca
--- /dev/null
+++ b/pkg/lexer/crontab.go
@@ -0,0 +1,31 @@
+package lexer
+
+import (
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+)
+
+// Crontab lexer.
+type Crontab struct{}
+
+// Lexer returns the lexer.
+func (l Crontab) Lexer() chroma.Lexer {
+ return chroma.MustNewLexer(
+ &chroma.Config{
+ Name: l.Name(),
+ Aliases: []string{"crontab"},
+ Filenames: []string{"crontab"},
+ },
+ func() chroma.Rules {
+ return chroma.Rules{
+ "root": {},
+ }
+ },
+ )
+}
+
+// Name returns the name of the lexer.
+func (Crontab) Name() string {
+ return heartbeat.LanguageCrontab.StringChroma()
+}
diff --git a/pkg/lexer/cryptol.go b/pkg/lexer/cryptol.go
new file mode 100644
index 00000000..7aad62d1
--- /dev/null
+++ b/pkg/lexer/cryptol.go
@@ -0,0 +1,32 @@
+package lexer
+
+import (
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+)
+
+// Cryptol lexer.
+type Cryptol struct{}
+
+// Lexer returns the lexer.
+func (l Cryptol) Lexer() chroma.Lexer {
+ return chroma.MustNewLexer(
+ &chroma.Config{
+ Name: l.Name(),
+ Aliases: []string{"cryptol", "cry"},
+ Filenames: []string{"*.cry"},
+ MimeTypes: []string{"text/x-cryptol"},
+ },
+ func() chroma.Rules {
+ return chroma.Rules{
+ "root": {},
+ }
+ },
+ )
+}
+
+// Name returns the name of the lexer.
+func (Cryptol) Name() string {
+ return heartbeat.LanguageCryptol.StringChroma()
+}
diff --git a/pkg/lexer/csounddocument.go b/pkg/lexer/csounddocument.go
new file mode 100644
index 00000000..f1de5b5d
--- /dev/null
+++ b/pkg/lexer/csounddocument.go
@@ -0,0 +1,31 @@
+package lexer
+
+import (
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+)
+
+// CsoundDocument lexer.
+type CsoundDocument struct{}
+
+// Lexer returns the lexer.
+func (l CsoundDocument) Lexer() chroma.Lexer {
+ return chroma.MustNewLexer(
+ &chroma.Config{
+ Name: l.Name(),
+ Aliases: []string{"csound-document", "csound-csd"},
+ Filenames: []string{"*.csd"},
+ },
+ func() chroma.Rules {
+ return chroma.Rules{
+ "root": {},
+ }
+ },
+ )
+}
+
+// Name returns the name of the lexer.
+func (CsoundDocument) Name() string {
+ return heartbeat.LanguageCsoundDocument.StringChroma()
+}
diff --git a/pkg/lexer/csoundorchestra.go b/pkg/lexer/csoundorchestra.go
new file mode 100644
index 00000000..8d48cd0b
--- /dev/null
+++ b/pkg/lexer/csoundorchestra.go
@@ -0,0 +1,31 @@
+package lexer
+
+import (
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+)
+
+// CsoundOrchestra lexer.
+type CsoundOrchestra struct{}
+
+// Lexer returns the lexer.
+func (l CsoundOrchestra) Lexer() chroma.Lexer {
+ return chroma.MustNewLexer(
+ &chroma.Config{
+ Name: l.Name(),
+ Aliases: []string{"csound", "csound-orc"},
+ Filenames: []string{"*.orc", "*.udo"},
+ },
+ func() chroma.Rules {
+ return chroma.Rules{
+ "root": {},
+ }
+ },
+ )
+}
+
+// Name returns the name of the lexer.
+func (CsoundOrchestra) Name() string {
+ return heartbeat.LanguageCsoundOrchestra.StringChroma()
+}
diff --git a/pkg/lexer/csoundscore.go b/pkg/lexer/csoundscore.go
new file mode 100644
index 00000000..1afcdc51
--- /dev/null
+++ b/pkg/lexer/csoundscore.go
@@ -0,0 +1,31 @@
+package lexer
+
+import (
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+)
+
+// CsoundScore lexer.
+type CsoundScore struct{}
+
+// Lexer returns the lexer.
+func (l CsoundScore) Lexer() chroma.Lexer {
+ return chroma.MustNewLexer(
+ &chroma.Config{
+ Name: l.Name(),
+ Aliases: []string{"csound-score", "csound-sco"},
+ Filenames: []string{"*.sco"},
+ },
+ func() chroma.Rules {
+ return chroma.Rules{
+ "root": {},
+ }
+ },
+ )
+}
+
+// Name returns the name of the lexer.
+func (CsoundScore) Name() string {
+ return heartbeat.LanguageCsoundScore.StringChroma()
+}
diff --git a/pkg/lexer/cuda.go b/pkg/lexer/cuda.go
new file mode 100644
index 00000000..307f4e02
--- /dev/null
+++ b/pkg/lexer/cuda.go
@@ -0,0 +1,44 @@
+package lexer
+
+import (
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+ "github.com/alecthomas/chroma/v2/lexers"
+)
+
+// CUDA lexer.
+type CUDA struct{}
+
+// Lexer returns the lexer.
+func (l CUDA) Lexer() chroma.Lexer {
+ lexer := chroma.MustNewLexer(
+ &chroma.Config{
+ Name: l.Name(),
+ Aliases: []string{"cuda", "cu"},
+ Filenames: []string{"*.cu", "*.cuh"},
+ MimeTypes: []string{"text/x-cuda"},
+ },
+ func() chroma.Rules {
+ return chroma.Rules{
+ "root": {},
+ }
+ },
+ )
+
+ lexer.SetAnalyser(func(text string) float32 {
+ c := lexers.Get(heartbeat.LanguageC.StringChroma())
+ if c == nil {
+ return 0
+ }
+
+ return c.AnalyseText(text)
+ })
+
+ return lexer
+}
+
+// Name returns the name of the lexer.
+func (CUDA) Name() string {
+ return heartbeat.LanguageCUDA.StringChroma()
+}
diff --git a/pkg/lexer/cuda_test.go b/pkg/lexer/cuda_test.go
new file mode 100644
index 00000000..d0777d7c
--- /dev/null
+++ b/pkg/lexer/cuda_test.go
@@ -0,0 +1,41 @@
+package lexer_test
+
+import (
+ "os"
+ "testing"
+
+ "github.com/wakatime/wakatime-cli/pkg/lexer"
+
+ "github.com/stretchr/testify/assert"
+)
+
+func TestCUDA_AnalyseText(t *testing.T) {
+ tests := map[string]struct {
+ Filepath string
+ Expected float32
+ }{
+ "include": {
+ Filepath: "testdata/cuda_include.cu",
+ Expected: 0.1,
+ },
+ "ifdef": {
+ Filepath: "testdata/cuda_ifdef.cu",
+ Expected: 0.1,
+ },
+ "ifndef": {
+ Filepath: "testdata/cuda_ifndef.cu",
+ Expected: 0.1,
+ },
+ }
+
+ for name, test := range tests {
+ t.Run(name, func(t *testing.T) {
+ data, err := os.ReadFile(test.Filepath)
+ assert.NoError(t, err)
+
+ l := lexer.CUDA{}.Lexer()
+
+ assert.Equal(t, test.Expected, l.AnalyseText(string(data)))
+ })
+ }
+}
diff --git a/pkg/lexer/cypher.go b/pkg/lexer/cypher.go
new file mode 100644
index 00000000..a148a434
--- /dev/null
+++ b/pkg/lexer/cypher.go
@@ -0,0 +1,31 @@
+package lexer
+
+import (
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+)
+
+// Cypher lexer.
+type Cypher struct{}
+
+// Lexer returns the lexer.
+func (l Cypher) Lexer() chroma.Lexer {
+ return chroma.MustNewLexer(
+ &chroma.Config{
+ Name: l.Name(),
+ Aliases: []string{"cypher"},
+ Filenames: []string{"*.cyp", "*.cypher"},
+ },
+ func() chroma.Rules {
+ return chroma.Rules{
+ "root": {},
+ }
+ },
+ )
+}
+
+// Name returns the name of the lexer.
+func (Cypher) Name() string {
+ return heartbeat.LanguageCypher.StringChroma()
+}
diff --git a/pkg/lexer/darcspatch.go b/pkg/lexer/darcspatch.go
new file mode 100644
index 00000000..e31158b7
--- /dev/null
+++ b/pkg/lexer/darcspatch.go
@@ -0,0 +1,31 @@
+package lexer
+
+import (
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+)
+
+// DarcsPatch lexer.
+type DarcsPatch struct{}
+
+// Lexer returns the lexer.
+func (l DarcsPatch) Lexer() chroma.Lexer {
+ return chroma.MustNewLexer(
+ &chroma.Config{
+ Name: l.Name(),
+ Aliases: []string{"dpatch"},
+ Filenames: []string{"*.dpatch", "*.darcspatch"},
+ },
+ func() chroma.Rules {
+ return chroma.Rules{
+ "root": {},
+ }
+ },
+ )
+}
+
+// Name returns the name of the lexer.
+func (DarcsPatch) Name() string {
+ return heartbeat.LanguageDarcsPatch.StringChroma()
+}
diff --git a/pkg/lexer/dasm16.go b/pkg/lexer/dasm16.go
new file mode 100644
index 00000000..7fd7d826
--- /dev/null
+++ b/pkg/lexer/dasm16.go
@@ -0,0 +1,32 @@
+package lexer
+
+import (
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+)
+
+// DASM16 lexer.
+type DASM16 struct{}
+
+// Lexer returns the lexer.
+func (l DASM16) Lexer() chroma.Lexer {
+ return chroma.MustNewLexer(
+ &chroma.Config{
+ Name: l.Name(),
+ Aliases: []string{"dasm16"},
+ Filenames: []string{"*.dasm16", "*.dasm"},
+ MimeTypes: []string{"text/x-dasm16"},
+ },
+ func() chroma.Rules {
+ return chroma.Rules{
+ "root": {},
+ }
+ },
+ )
+}
+
+// Name returns the name of the lexer.
+func (DASM16) Name() string {
+ return heartbeat.LanguageDASM16.StringChroma()
+}
diff --git a/pkg/lexer/debiancontrol.go b/pkg/lexer/debiancontrol.go
new file mode 100644
index 00000000..bc5d6587
--- /dev/null
+++ b/pkg/lexer/debiancontrol.go
@@ -0,0 +1,31 @@
+package lexer
+
+import (
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+)
+
+// DebianControlFile lexer.
+type DebianControlFile struct{}
+
+// Lexer returns the lexer.
+func (l DebianControlFile) Lexer() chroma.Lexer {
+ return chroma.MustNewLexer(
+ &chroma.Config{
+ Name: l.Name(),
+ Aliases: []string{"control", "debcontrol"},
+ Filenames: []string{"control"},
+ },
+ func() chroma.Rules {
+ return chroma.Rules{
+ "root": {},
+ }
+ },
+ )
+}
+
+// Name returns the name of the lexer.
+func (DebianControlFile) Name() string {
+ return heartbeat.LanguageDebianControlFile.StringChroma()
+}
diff --git a/pkg/lexer/delphi.go b/pkg/lexer/delphi.go
new file mode 100644
index 00000000..04fcfbe8
--- /dev/null
+++ b/pkg/lexer/delphi.go
@@ -0,0 +1,32 @@
+package lexer
+
+import (
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+)
+
+// Delphi lexer.
+type Delphi struct{}
+
+// Lexer returns the lexer.
+func (l Delphi) Lexer() chroma.Lexer {
+ return chroma.MustNewLexer(
+ &chroma.Config{
+ Name: l.Name(),
+ Aliases: []string{"delphi", "pas", "pascal", "objectpascal"},
+ Filenames: []string{"*.pas", "*.dpr"},
+ MimeTypes: []string{"text/x-pascal"},
+ },
+ func() chroma.Rules {
+ return chroma.Rules{
+ "root": {},
+ }
+ },
+ )
+}
+
+// Name returns the name of the lexer.
+func (Delphi) Name() string {
+ return heartbeat.LanguageDelphi.StringChroma()
+}
diff --git a/pkg/lexer/devicetree.go b/pkg/lexer/devicetree.go
new file mode 100644
index 00000000..0f92c149
--- /dev/null
+++ b/pkg/lexer/devicetree.go
@@ -0,0 +1,32 @@
+package lexer
+
+import (
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+)
+
+// Devicetree lexer.
+type Devicetree struct{}
+
+// Lexer returns the lexer.
+func (l Devicetree) Lexer() chroma.Lexer {
+ return chroma.MustNewLexer(
+ &chroma.Config{
+ Name: l.Name(),
+ Aliases: []string{"devicetree", "dts"},
+ Filenames: []string{"*.dts", "*.dtsi"},
+ MimeTypes: []string{"text/x-c"},
+ },
+ func() chroma.Rules {
+ return chroma.Rules{
+ "root": {},
+ }
+ },
+ )
+}
+
+// Name returns the name of the lexer.
+func (Devicetree) Name() string {
+ return heartbeat.LanguageDevicetree.StringChroma()
+}
diff --git a/pkg/lexer/dg.go b/pkg/lexer/dg.go
new file mode 100644
index 00000000..e561b686
--- /dev/null
+++ b/pkg/lexer/dg.go
@@ -0,0 +1,32 @@
+package lexer
+
+import (
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+)
+
+// DG lexer.
+type DG struct{}
+
+// Lexer returns the lexer.
+func (l DG) Lexer() chroma.Lexer {
+ return chroma.MustNewLexer(
+ &chroma.Config{
+ Name: l.Name(),
+ Aliases: []string{"dg"},
+ Filenames: []string{"*.dg"},
+ MimeTypes: []string{"text/x-dg"},
+ },
+ func() chroma.Rules {
+ return chroma.Rules{
+ "root": {},
+ }
+ },
+ )
+}
+
+// Name returns the name of the lexer.
+func (DG) Name() string {
+ return heartbeat.LanguageDG.StringChroma()
+}
diff --git a/pkg/lexer/dobjdump.go b/pkg/lexer/dobjdump.go
new file mode 100644
index 00000000..efc9de99
--- /dev/null
+++ b/pkg/lexer/dobjdump.go
@@ -0,0 +1,32 @@
+package lexer
+
+import (
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+)
+
+// DObjdump lexer.
+type DObjdump struct{}
+
+// Lexer returns the lexer.
+func (l DObjdump) Lexer() chroma.Lexer {
+ return chroma.MustNewLexer(
+ &chroma.Config{
+ Name: l.Name(),
+ Aliases: []string{"d-objdump"},
+ Filenames: []string{"*.d-objdump"},
+ MimeTypes: []string{"text/x-d-objdump"},
+ },
+ func() chroma.Rules {
+ return chroma.Rules{
+ "root": {},
+ }
+ },
+ )
+}
+
+// Name returns the name of the lexer.
+func (DObjdump) Name() string {
+ return heartbeat.LanguageDObjdump.StringChroma()
+}
diff --git a/pkg/lexer/duel.go b/pkg/lexer/duel.go
new file mode 100644
index 00000000..779f4fe5
--- /dev/null
+++ b/pkg/lexer/duel.go
@@ -0,0 +1,32 @@
+package lexer
+
+import (
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+)
+
+// Duel lexer.
+type Duel struct{}
+
+// Lexer returns the lexer.
+func (l Duel) Lexer() chroma.Lexer {
+ return chroma.MustNewLexer(
+ &chroma.Config{
+ Name: l.Name(),
+ Aliases: []string{"duel", "jbst", "jsonml+bst"},
+ Filenames: []string{"*.duel", "*.jbst"},
+ MimeTypes: []string{"text/x-duel", "text/x-jbst"},
+ },
+ func() chroma.Rules {
+ return chroma.Rules{
+ "root": {},
+ }
+ },
+ )
+}
+
+// Name returns the name of the lexer.
+func (Duel) Name() string {
+ return heartbeat.LanguageDuel.StringChroma()
+}
diff --git a/pkg/lexer/dylanlid.go b/pkg/lexer/dylanlid.go
new file mode 100644
index 00000000..b9fce40e
--- /dev/null
+++ b/pkg/lexer/dylanlid.go
@@ -0,0 +1,32 @@
+package lexer
+
+import (
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+)
+
+// DylanLID lexer.
+type DylanLID struct{}
+
+// Lexer returns the lexer.
+func (l DylanLID) Lexer() chroma.Lexer {
+ return chroma.MustNewLexer(
+ &chroma.Config{
+ Name: l.Name(),
+ Aliases: []string{"dylan-lid", "lid"},
+ Filenames: []string{"*.lid", "*.hdp"},
+ MimeTypes: []string{"text/x-dylan-lid"},
+ },
+ func() chroma.Rules {
+ return chroma.Rules{
+ "root": {},
+ }
+ },
+ )
+}
+
+// Name returns the name of the lexer.
+func (DylanLID) Name() string {
+ return heartbeat.LanguageDylanLID.StringChroma()
+}
diff --git a/pkg/lexer/dylansession.go b/pkg/lexer/dylansession.go
new file mode 100644
index 00000000..e2b27875
--- /dev/null
+++ b/pkg/lexer/dylansession.go
@@ -0,0 +1,32 @@
+package lexer
+
+import (
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+)
+
+// DylanSession lexer.
+type DylanSession struct{}
+
+// Lexer returns the lexer.
+func (l DylanSession) Lexer() chroma.Lexer {
+ return chroma.MustNewLexer(
+ &chroma.Config{
+ Name: l.Name(),
+ Aliases: []string{"dylan-console", "dylan-repl"},
+ Filenames: []string{"*.dylan-console"},
+ MimeTypes: []string{"text/x-dylan-console"},
+ },
+ func() chroma.Rules {
+ return chroma.Rules{
+ "root": {},
+ }
+ },
+ )
+}
+
+// Name returns the name of the lexer.
+func (DylanSession) Name() string {
+ return heartbeat.LanguageDylanSession.StringChroma()
+}
diff --git a/pkg/lexer/earlgrey.go b/pkg/lexer/earlgrey.go
new file mode 100644
index 00000000..6d5c0003
--- /dev/null
+++ b/pkg/lexer/earlgrey.go
@@ -0,0 +1,32 @@
+package lexer
+
+import (
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+)
+
+// EarlGrey lexer.
+type EarlGrey struct{}
+
+// Lexer returns the lexer.
+func (l EarlGrey) Lexer() chroma.Lexer {
+ return chroma.MustNewLexer(
+ &chroma.Config{
+ Name: l.Name(),
+ Aliases: []string{"earl-grey", "earlgrey", "eg"},
+ Filenames: []string{"*.eg"},
+ MimeTypes: []string{"text/x-earl-grey"},
+ },
+ func() chroma.Rules {
+ return chroma.Rules{
+ "root": {},
+ }
+ },
+ )
+}
+
+// Name returns the name of the lexer.
+func (EarlGrey) Name() string {
+ return heartbeat.LanguageEarlGrey.StringChroma()
+}
diff --git a/pkg/lexer/easytrieve.go b/pkg/lexer/easytrieve.go
new file mode 100644
index 00000000..b6febb3c
--- /dev/null
+++ b/pkg/lexer/easytrieve.go
@@ -0,0 +1,157 @@
+package lexer
+
+import (
+ "regexp"
+ "strings"
+
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+)
+
+var (
+ easytrieveAnalyserCommetLineRe = regexp.MustCompile(`^\s*\*`)
+ easytrieveAnalyserMacroHeaderRe = regexp.MustCompile(`\s*MACRO`)
+)
+
+// Easytrieve lexer.
+type Easytrieve struct{}
+
+// Lexer returns the lexer.
+// nolint: gocyclo
+func (l Easytrieve) Lexer() chroma.Lexer {
+ lexer := chroma.MustNewLexer(
+ &chroma.Config{
+ Name: l.Name(),
+ Aliases: []string{"easytrieve"},
+ Filenames: []string{"*.ezt", "*.mac"},
+ MimeTypes: []string{"text/x-easytrieve"},
+ },
+ func() chroma.Rules {
+ return chroma.Rules{
+ "root": {},
+ }
+ },
+ )
+
+ lexer.SetAnalyser(func(text string) float32 {
+ // Perform a structural analysis for basic Easytrieve constructs.
+ var (
+ result float32
+ hasEndProc bool
+ hasHeaderComment bool
+ hasFile bool
+ hasJob bool
+ hasProc bool
+ hasParm bool
+ hasReport bool
+ )
+
+ lines := strings.Split(text, "\n")
+
+ // Remove possible empty lines and header comments.
+ for range lines {
+ if len(lines) == 0 {
+ break
+ }
+
+ if len(strings.TrimSpace(lines[0])) > 0 && !easytrieveAnalyserCommetLineRe.MatchString(lines[0]) {
+ break
+ }
+
+ if easytrieveAnalyserCommetLineRe.MatchString(text) {
+ hasHeaderComment = true
+ }
+
+ lines = lines[1:]
+ }
+
+ if len(lines) > 0 && easytrieveAnalyserMacroHeaderRe.MatchString(lines[0]) {
+ // Looks like an Easytrieve macro.
+ result += 0.4
+
+ if hasHeaderComment {
+ result += 0.4
+ }
+
+ return result
+ }
+
+ // Scan the source for lines starting with indicators.
+ for _, line := range lines {
+ splitted := strings.Fields(line)
+
+ if len(splitted) < 2 {
+ continue
+ }
+
+ if !hasReport && !hasJob && !hasFile && !hasParm && splitted[0] == "PARM" {
+ hasParm = true
+ }
+
+ if !hasReport && !hasJob && !hasFile && splitted[0] == "FILE" {
+ hasFile = true
+ }
+
+ if !hasReport && !hasJob && splitted[0] == "JOB" {
+ hasJob = true
+ }
+
+ if !hasReport && splitted[0] == "PROC" {
+ hasProc = true
+ continue
+ }
+
+ if !hasReport && splitted[0] == "END-PROC" {
+ hasEndProc = true
+ continue
+ }
+
+ if !hasReport && splitted[0] == "REPORT" {
+ hasReport = true
+ }
+ }
+
+ // Weight the findings.
+ if hasJob && hasProc == hasEndProc && hasHeaderComment {
+ result += 0.1
+ }
+
+ if hasJob && hasProc == hasEndProc && hasParm && hasProc {
+ // Found PARM, JOB and PROC/END-PROC:
+ // pretty sure this is Easytrieve.
+ result += 0.8
+
+ return result
+ }
+
+ if hasJob && hasProc == hasEndProc && hasParm && !hasProc {
+ // Found PARAM and JOB: probably this is Easytrieve.
+ result += 0.5
+
+ return result
+ }
+
+ if hasJob && hasProc == hasEndProc && !hasParm {
+ // Found JOB and possibly other keywords: might be Easytrieve.
+ result += 0.11
+ }
+
+ if hasJob && hasProc == hasEndProc && !hasParm && hasFile {
+ result += 0.01
+ }
+
+ if hasJob && hasProc == hasEndProc && !hasParm && hasReport {
+ result += 0.01
+ }
+
+ return result
+ })
+
+ return lexer
+}
+
+// Name returns the name of the lexer.
+func (Easytrieve) Name() string {
+ return heartbeat.LanguageEasytrieve.StringChroma()
+}
diff --git a/pkg/lexer/easytrieve_test.go b/pkg/lexer/easytrieve_test.go
new file mode 100644
index 00000000..43744b67
--- /dev/null
+++ b/pkg/lexer/easytrieve_test.go
@@ -0,0 +1,37 @@
+package lexer_test
+
+import (
+ "os"
+ "testing"
+
+ "github.com/wakatime/wakatime-cli/pkg/lexer"
+
+ "github.com/stretchr/testify/assert"
+)
+
+func TestEasytrieve_AnalyseText(t *testing.T) {
+ tests := map[string]struct {
+ Filepath string
+ Expected float32
+ }{
+ "basic": {
+ Filepath: "testdata/easytrieve_basic.ezt",
+ Expected: 0.6,
+ },
+ "macro": {
+ Filepath: "testdata/easytrieve_macro.mac",
+ Expected: 0.8,
+ },
+ }
+
+ for name, test := range tests {
+ t.Run(name, func(t *testing.T) {
+ data, err := os.ReadFile(test.Filepath)
+ assert.NoError(t, err)
+
+ l := lexer.Easytrieve{}.Lexer()
+
+ assert.Equal(t, test.Expected, l.AnalyseText(string(data)))
+ })
+ }
+}
diff --git a/pkg/lexer/ec.go b/pkg/lexer/ec.go
new file mode 100644
index 00000000..6de0eca3
--- /dev/null
+++ b/pkg/lexer/ec.go
@@ -0,0 +1,44 @@
+package lexer
+
+import (
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+ "github.com/alecthomas/chroma/v2/lexers"
+)
+
+// EC lexer.
+type EC struct{}
+
+// Lexer returns the lexer.
+func (l EC) Lexer() chroma.Lexer {
+ lexer := chroma.MustNewLexer(
+ &chroma.Config{
+ Name: l.Name(),
+ Aliases: []string{"ec"},
+ Filenames: []string{"*.ec", "*.eh"},
+ MimeTypes: []string{"text/x-echdr", "text/x-ecsrc"},
+ },
+ func() chroma.Rules {
+ return chroma.Rules{
+ "root": {},
+ }
+ },
+ )
+
+ lexer.SetAnalyser(func(text string) float32 {
+ c := lexers.Get(heartbeat.LanguageC.StringChroma())
+ if c == nil {
+ return 0
+ }
+
+ return c.AnalyseText(text)
+ })
+
+ return lexer
+}
+
+// Name returns the name of the lexer.
+func (EC) Name() string {
+ return heartbeat.LanguageEC.StringChroma()
+}
diff --git a/pkg/lexer/ec_test.go b/pkg/lexer/ec_test.go
new file mode 100644
index 00000000..4128534e
--- /dev/null
+++ b/pkg/lexer/ec_test.go
@@ -0,0 +1,41 @@
+package lexer_test
+
+import (
+ "os"
+ "testing"
+
+ "github.com/wakatime/wakatime-cli/pkg/lexer"
+
+ "github.com/stretchr/testify/assert"
+)
+
+func TestEC_AnalyseText(t *testing.T) {
+ tests := map[string]struct {
+ Filepath string
+ Expected float32
+ }{
+ "include": {
+ Filepath: "testdata/ec_include.ec",
+ Expected: 0.1,
+ },
+ "ifdef": {
+ Filepath: "testdata/ec_ifdef.ec",
+ Expected: 0.1,
+ },
+ "ifndef": {
+ Filepath: "testdata/ec_ifndef.ec",
+ Expected: 0.1,
+ },
+ }
+
+ for name, test := range tests {
+ t.Run(name, func(t *testing.T) {
+ data, err := os.ReadFile(test.Filepath)
+ assert.NoError(t, err)
+
+ l := lexer.EC{}.Lexer()
+
+ assert.Equal(t, test.Expected, l.AnalyseText(string(data)))
+ })
+ }
+}
diff --git a/pkg/lexer/ecl.go b/pkg/lexer/ecl.go
new file mode 100644
index 00000000..64fab649
--- /dev/null
+++ b/pkg/lexer/ecl.go
@@ -0,0 +1,57 @@
+package lexer
+
+import (
+ "strings"
+
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+)
+
+// ECL lexer.
+type ECL struct{}
+
+// Lexer returns the lexer.
+func (l ECL) Lexer() chroma.Lexer {
+ lexer := chroma.MustNewLexer(
+ &chroma.Config{
+ Name: l.Name(),
+ Aliases: []string{"ecl"},
+ Filenames: []string{"*.ecl"},
+ MimeTypes: []string{"application/x-ecl"},
+ },
+ func() chroma.Rules {
+ return chroma.Rules{
+ "root": {},
+ }
+ },
+ )
+
+ lexer.SetAnalyser(func(text string) float32 {
+ // This is very difficult to guess relative to other business languages.
+ // -> in conjunction with BEGIN/END seems relatively rare though.
+
+ var result float32
+
+ if strings.Contains(text, "->") {
+ result += 0.01
+ }
+
+ if strings.Contains(text, "BEGIN") {
+ result += 0.01
+ }
+
+ if strings.Contains(text, "END") {
+ result += 0.01
+ }
+
+ return result
+ })
+
+ return lexer
+}
+
+// Name returns the name of the lexer.
+func (ECL) Name() string {
+ return heartbeat.LanguageECL.StringChroma()
+}
diff --git a/pkg/lexer/ecl_test.go b/pkg/lexer/ecl_test.go
new file mode 100644
index 00000000..906366e8
--- /dev/null
+++ b/pkg/lexer/ecl_test.go
@@ -0,0 +1,37 @@
+package lexer_test
+
+import (
+ "os"
+ "testing"
+
+ "github.com/wakatime/wakatime-cli/pkg/lexer"
+
+ "github.com/stretchr/testify/assert"
+)
+
+func TestECL_AnalyseText(t *testing.T) {
+ tests := map[string]struct {
+ Filepath string
+ Expected float32
+ }{
+ "basic": {
+ Filepath: "testdata/ecl_basic.ecl",
+ Expected: 0.02,
+ },
+ "pass variable": {
+ Filepath: "testdata/ecl_pass_var.ecl",
+ Expected: 0.01,
+ },
+ }
+
+ for name, test := range tests {
+ t.Run(name, func(t *testing.T) {
+ data, err := os.ReadFile(test.Filepath)
+ assert.NoError(t, err)
+
+ l := lexer.ECL{}.Lexer()
+
+ assert.Equal(t, test.Expected, l.AnalyseText(string(data)))
+ })
+ }
+}
diff --git a/pkg/lexer/eiffel.go b/pkg/lexer/eiffel.go
new file mode 100644
index 00000000..d5d64ee2
--- /dev/null
+++ b/pkg/lexer/eiffel.go
@@ -0,0 +1,32 @@
+package lexer
+
+import (
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+)
+
+// Eiffel lexer.
+type Eiffel struct{}
+
+// Lexer returns the lexer.
+func (l Eiffel) Lexer() chroma.Lexer {
+ return chroma.MustNewLexer(
+ &chroma.Config{
+ Name: l.Name(),
+ Aliases: []string{"eiffel"},
+ Filenames: []string{"*.e"},
+ MimeTypes: []string{"text/x-eiffel"},
+ },
+ func() chroma.Rules {
+ return chroma.Rules{
+ "root": {},
+ }
+ },
+ )
+}
+
+// Name returns the name of the lexer.
+func (Eiffel) Name() string {
+ return heartbeat.LanguageEiffel.StringChroma()
+}
diff --git a/pkg/lexer/elixiriexsession.go b/pkg/lexer/elixiriexsession.go
new file mode 100644
index 00000000..d8076ef7
--- /dev/null
+++ b/pkg/lexer/elixiriexsession.go
@@ -0,0 +1,31 @@
+package lexer
+
+import (
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+)
+
+// ElixirIexSsession lexer.
+type ElixirIexSsession struct{}
+
+// Lexer returns the lexer.
+func (l ElixirIexSsession) Lexer() chroma.Lexer {
+ return chroma.MustNewLexer(
+ &chroma.Config{
+ Name: l.Name(),
+ Aliases: []string{"iex"},
+ MimeTypes: []string{"text/x-elixir-shellsession"},
+ },
+ func() chroma.Rules {
+ return chroma.Rules{
+ "root": {},
+ }
+ },
+ )
+}
+
+// Name returns the name of the lexer.
+func (ElixirIexSsession) Name() string {
+ return heartbeat.LanguageElixirIexSession.StringChroma()
+}
diff --git a/pkg/lexer/email.go b/pkg/lexer/email.go
new file mode 100644
index 00000000..dfadfe9f
--- /dev/null
+++ b/pkg/lexer/email.go
@@ -0,0 +1,32 @@
+package lexer
+
+import (
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+)
+
+// EMail lexer.
+type EMail struct{}
+
+// Lexer returns the lexer.
+func (l EMail) Lexer() chroma.Lexer {
+ return chroma.MustNewLexer(
+ &chroma.Config{
+ Name: l.Name(),
+ Aliases: []string{"email", "eml"},
+ Filenames: []string{"*.eml"},
+ MimeTypes: []string{"message/rfc822"},
+ },
+ func() chroma.Rules {
+ return chroma.Rules{
+ "root": {},
+ }
+ },
+ )
+}
+
+// Name returns the name of the lexer.
+func (EMail) Name() string {
+ return heartbeat.LanguageEMail.StringChroma()
+}
diff --git a/pkg/lexer/erb.go b/pkg/lexer/erb.go
new file mode 100644
index 00000000..b5e4baaf
--- /dev/null
+++ b/pkg/lexer/erb.go
@@ -0,0 +1,43 @@
+package lexer
+
+import (
+ "strings"
+
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+)
+
+// ERB lexer.
+type ERB struct{}
+
+// Lexer returns the lexer.
+func (l ERB) Lexer() chroma.Lexer {
+ lexer := chroma.MustNewLexer(
+ &chroma.Config{
+ Name: l.Name(),
+ Aliases: []string{"erb"},
+ MimeTypes: []string{"application/x-ruby-templating"},
+ },
+ func() chroma.Rules {
+ return chroma.Rules{
+ "root": {},
+ }
+ },
+ )
+
+ lexer.SetAnalyser(func(text string) float32 {
+ if strings.Contains(text, "<%") && strings.Contains(text, "%>") {
+ return 0.4
+ }
+
+ return 0
+ })
+
+ return lexer
+}
+
+// Name returns the name of the lexer.
+func (ERB) Name() string {
+ return heartbeat.LanguageERB.StringChroma()
+}
diff --git a/pkg/lexer/erb_test.go b/pkg/lexer/erb_test.go
new file mode 100644
index 00000000..ccaf4619
--- /dev/null
+++ b/pkg/lexer/erb_test.go
@@ -0,0 +1,19 @@
+package lexer_test
+
+import (
+ "os"
+ "testing"
+
+ "github.com/wakatime/wakatime-cli/pkg/lexer"
+
+ "github.com/stretchr/testify/assert"
+)
+
+func TestERB_AnalyseText(t *testing.T) {
+ data, err := os.ReadFile("testdata/erb_basic.erb")
+ assert.NoError(t, err)
+
+ l := lexer.ERB{}.Lexer()
+
+ assert.Equal(t, float32(0.4), l.AnalyseText(string(data)))
+}
diff --git a/pkg/lexer/erlangerlsession.go b/pkg/lexer/erlangerlsession.go
new file mode 100644
index 00000000..0610e399
--- /dev/null
+++ b/pkg/lexer/erlangerlsession.go
@@ -0,0 +1,32 @@
+package lexer
+
+import (
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+)
+
+// ErlangErlSession lexer.
+type ErlangErlSession struct{}
+
+// Lexer returns the lexer.
+func (l ErlangErlSession) Lexer() chroma.Lexer {
+ return chroma.MustNewLexer(
+ &chroma.Config{
+ Name: l.Name(),
+ Aliases: []string{"erl"},
+ Filenames: []string{"*.erl-sh"},
+ MimeTypes: []string{"text/x-erl-shellsession"},
+ },
+ func() chroma.Rules {
+ return chroma.Rules{
+ "root": {},
+ }
+ },
+ )
+}
+
+// Name returns the name of the lexer.
+func (ErlangErlSession) Name() string {
+ return heartbeat.LanguageErlangErlSession.StringChroma()
+}
diff --git a/pkg/lexer/evoque.go b/pkg/lexer/evoque.go
new file mode 100644
index 00000000..4f671466
--- /dev/null
+++ b/pkg/lexer/evoque.go
@@ -0,0 +1,45 @@
+package lexer
+
+import (
+ "strings"
+
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+)
+
+// Evoque lexer.
+type Evoque struct{}
+
+// Lexer returns the lexer.
+func (l Evoque) Lexer() chroma.Lexer {
+ lexer := chroma.MustNewLexer(
+ &chroma.Config{
+ Name: l.Name(),
+ Aliases: []string{"evoque"},
+ Filenames: []string{"*.evoque"},
+ MimeTypes: []string{"application/x-evoque"},
+ },
+ func() chroma.Rules {
+ return chroma.Rules{
+ "root": {},
+ }
+ },
+ )
+
+ lexer.SetAnalyser(func(text string) float32 {
+ // Evoque templates use $evoque, which is unique.
+ if strings.Contains(text, "$evoque") {
+ return 1.0
+ }
+
+ return 0
+ })
+
+ return lexer
+}
+
+// Name returns the name of the lexer.
+func (Evoque) Name() string {
+ return heartbeat.LanguageEvoque.StringChroma()
+}
diff --git a/pkg/lexer/evoque_test.go b/pkg/lexer/evoque_test.go
new file mode 100644
index 00000000..7412d838
--- /dev/null
+++ b/pkg/lexer/evoque_test.go
@@ -0,0 +1,19 @@
+package lexer_test
+
+import (
+ "os"
+ "testing"
+
+ "github.com/wakatime/wakatime-cli/pkg/lexer"
+
+ "github.com/stretchr/testify/assert"
+)
+
+func TestEvoque_AnalyseText(t *testing.T) {
+ data, err := os.ReadFile("testdata/evoque_basic.evoque")
+ assert.NoError(t, err)
+
+ l := lexer.Evoque{}.Lexer()
+
+ assert.Equal(t, float32(1.0), l.AnalyseText(string(data)))
+}
diff --git a/pkg/lexer/execline.go b/pkg/lexer/execline.go
new file mode 100644
index 00000000..96e20dd9
--- /dev/null
+++ b/pkg/lexer/execline.go
@@ -0,0 +1,42 @@
+package lexer
+
+import (
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+ "github.com/wakatime/wakatime-cli/pkg/shebang"
+
+ "github.com/alecthomas/chroma/v2"
+)
+
+// Execline lexer.
+type Execline struct{}
+
+// Lexer returns the lexer.
+func (l Execline) Lexer() chroma.Lexer {
+ lexer := chroma.MustNewLexer(
+ &chroma.Config{
+ Name: l.Name(),
+ Aliases: []string{"execline"},
+ Filenames: []string{"*.exec"},
+ },
+ func() chroma.Rules {
+ return chroma.Rules{
+ "root": {},
+ }
+ },
+ )
+
+ lexer.SetAnalyser(func(text string) float32 {
+ if matched, _ := shebang.MatchString(text, "execlineb"); matched {
+ return 1.0
+ }
+
+ return 0
+ })
+
+ return lexer
+}
+
+// Name returns the name of the lexer.
+func (Execline) Name() string {
+ return heartbeat.LanguageExecline.StringChroma()
+}
diff --git a/pkg/lexer/execline_test.go b/pkg/lexer/execline_test.go
new file mode 100644
index 00000000..d35f7d70
--- /dev/null
+++ b/pkg/lexer/execline_test.go
@@ -0,0 +1,19 @@
+package lexer_test
+
+import (
+ "os"
+ "testing"
+
+ "github.com/wakatime/wakatime-cli/pkg/lexer"
+
+ "github.com/stretchr/testify/assert"
+)
+
+func TestExecline_AnalyseText(t *testing.T) {
+ data, err := os.ReadFile("testdata/execline_shebang.exec")
+ assert.NoError(t, err)
+
+ l := lexer.Execline{}.Lexer()
+
+ assert.Equal(t, float32(1.0), l.AnalyseText(string(data)))
+}
diff --git a/pkg/lexer/ezhil.go b/pkg/lexer/ezhil.go
new file mode 100644
index 00000000..35845017
--- /dev/null
+++ b/pkg/lexer/ezhil.go
@@ -0,0 +1,50 @@
+package lexer
+
+import (
+ "regexp"
+
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+)
+
+var ezhilAnalyserRe = regexp.MustCompile(`[u0b80-u0bff]`)
+
+// Ezhil lexer.
+type Ezhil struct{}
+
+// Lexer returns the lexer.
+func (l Ezhil) Lexer() chroma.Lexer {
+ lexer := chroma.MustNewLexer(
+ &chroma.Config{
+ Name: l.Name(),
+ Aliases: []string{"ezhil"},
+ Filenames: []string{"*.n"},
+ MimeTypes: []string{"text/x-ezhil"},
+ },
+ func() chroma.Rules {
+ return chroma.Rules{
+ "root": {},
+ }
+ },
+ )
+
+ lexer.SetAnalyser(func(text string) float32 {
+ // this language uses Tamil-script. We'll assume that if there's a
+ // decent amount of Tamil-characters, it's this language. This assumption
+ // is obviously horribly off if someone uses string literals in tamil
+ // in another language.
+ if len(ezhilAnalyserRe.FindAllString(text, -1)) > 10 {
+ return 0.25
+ }
+
+ return 0
+ })
+
+ return lexer
+}
+
+// Name returns the name of the lexer.
+func (Ezhil) Name() string {
+ return heartbeat.LanguageEzhil.StringChroma()
+}
diff --git a/pkg/lexer/ezhil_test.go b/pkg/lexer/ezhil_test.go
new file mode 100644
index 00000000..96b55a95
--- /dev/null
+++ b/pkg/lexer/ezhil_test.go
@@ -0,0 +1,19 @@
+package lexer_test
+
+import (
+ "os"
+ "testing"
+
+ "github.com/wakatime/wakatime-cli/pkg/lexer"
+
+ "github.com/stretchr/testify/assert"
+)
+
+func TestEzhil_AnalyseText(t *testing.T) {
+ data, err := os.ReadFile("testdata/ezhil_basic.n")
+ assert.NoError(t, err)
+
+ l := lexer.Ezhil{}.Lexer()
+
+ assert.Equal(t, float32(0.25), l.AnalyseText(string(data)))
+}
diff --git a/pkg/lexer/fancy.go b/pkg/lexer/fancy.go
new file mode 100644
index 00000000..c459e89d
--- /dev/null
+++ b/pkg/lexer/fancy.go
@@ -0,0 +1,32 @@
+package lexer
+
+import (
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+)
+
+// Fancy lexer.
+type Fancy struct{}
+
+// Lexer returns the lexer.
+func (l Fancy) Lexer() chroma.Lexer {
+ return chroma.MustNewLexer(
+ &chroma.Config{
+ Name: l.Name(),
+ Aliases: []string{"fancy", "fy"},
+ Filenames: []string{"*.fy", "*.fancypack"},
+ MimeTypes: []string{"text/x-fancysrc"},
+ },
+ func() chroma.Rules {
+ return chroma.Rules{
+ "root": {},
+ }
+ },
+ )
+}
+
+// Name returns the name of the lexer.
+func (Fancy) Name() string {
+ return heartbeat.LanguageFancy.StringChroma()
+}
diff --git a/pkg/lexer/fantom.go b/pkg/lexer/fantom.go
new file mode 100644
index 00000000..e1b41826
--- /dev/null
+++ b/pkg/lexer/fantom.go
@@ -0,0 +1,32 @@
+package lexer
+
+import (
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+)
+
+// Fantom lexer.
+type Fantom struct{}
+
+// Lexer returns the lexer.
+func (l Fantom) Lexer() chroma.Lexer {
+ return chroma.MustNewLexer(
+ &chroma.Config{
+ Name: l.Name(),
+ Aliases: []string{"fan"},
+ Filenames: []string{"*.fan"},
+ MimeTypes: []string{"application/x-fantom"},
+ },
+ func() chroma.Rules {
+ return chroma.Rules{
+ "root": {},
+ }
+ },
+ )
+}
+
+// Name returns the name of the lexer.
+func (Fantom) Name() string {
+ return heartbeat.LanguageFantom.StringChroma()
+}
diff --git a/pkg/lexer/felix.go b/pkg/lexer/felix.go
new file mode 100644
index 00000000..668a93c0
--- /dev/null
+++ b/pkg/lexer/felix.go
@@ -0,0 +1,32 @@
+package lexer
+
+import (
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+)
+
+// Felix lexer.
+type Felix struct{}
+
+// Lexer returns the lexer.
+func (l Felix) Lexer() chroma.Lexer {
+ return chroma.MustNewLexer(
+ &chroma.Config{
+ Name: l.Name(),
+ Aliases: []string{"felix", "flx"},
+ Filenames: []string{"*.flx", "*.flxh"},
+ MimeTypes: []string{"text/x-felix"},
+ },
+ func() chroma.Rules {
+ return chroma.Rules{
+ "root": {},
+ }
+ },
+ )
+}
+
+// Name returns the name of the lexer.
+func (Felix) Name() string {
+ return heartbeat.LanguageFelix.StringChroma()
+}
diff --git a/pkg/lexer/flatline.go b/pkg/lexer/flatline.go
new file mode 100644
index 00000000..f7683496
--- /dev/null
+++ b/pkg/lexer/flatline.go
@@ -0,0 +1,31 @@
+package lexer
+
+import (
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+)
+
+// Flatline lexer.
+type Flatline struct{}
+
+// Lexer returns the lexer.
+func (l Flatline) Lexer() chroma.Lexer {
+ return chroma.MustNewLexer(
+ &chroma.Config{
+ Name: l.Name(),
+ Aliases: []string{"flatline"},
+ MimeTypes: []string{"text/x-flatline"},
+ },
+ func() chroma.Rules {
+ return chroma.Rules{
+ "root": {},
+ }
+ },
+ )
+}
+
+// Name returns the name of the lexer.
+func (Flatline) Name() string {
+ return heartbeat.LanguageFlatline.StringChroma()
+}
diff --git a/pkg/lexer/floscript.go b/pkg/lexer/floscript.go
new file mode 100644
index 00000000..f41a742e
--- /dev/null
+++ b/pkg/lexer/floscript.go
@@ -0,0 +1,31 @@
+package lexer
+
+import (
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+)
+
+// FloScript lexer.
+type FloScript struct{}
+
+// Lexer returns the lexer.
+func (l FloScript) Lexer() chroma.Lexer {
+ return chroma.MustNewLexer(
+ &chroma.Config{
+ Name: l.Name(),
+ Aliases: []string{"floscript", "flo"},
+ Filenames: []string{"*.flo"},
+ },
+ func() chroma.Rules {
+ return chroma.Rules{
+ "root": {},
+ }
+ },
+ )
+}
+
+// Name returns the name of the lexer.
+func (FloScript) Name() string {
+ return heartbeat.LanguageFloScript.StringChroma()
+}
diff --git a/pkg/lexer/forth.go b/pkg/lexer/forth.go
new file mode 100644
index 00000000..77a80898
--- /dev/null
+++ b/pkg/lexer/forth.go
@@ -0,0 +1,48 @@
+package lexer
+
+import (
+ "regexp"
+ "strings"
+
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+ "github.com/alecthomas/chroma/v2/lexers"
+)
+
+// nolint:gochecknoglobals
+var forthAnalyzerRe = regexp.MustCompile(`\n:[^\n]+;\n`)
+
+// Forth lexer.
+type Forth struct{}
+
+// Lexer returns the lexer.
+func (l Forth) Lexer() chroma.Lexer {
+ lexer := lexers.Get(l.Name())
+ if lexer == nil {
+ return nil
+ }
+
+ if lexer, ok := lexer.(*chroma.RegexLexer); ok {
+ lexer.SetAnalyser(func(text string) float32 {
+ text = strings.ReplaceAll(text, "\r\n", "\n")
+
+ // Forth uses : COMMAND ; quite a lot in a single line, so we're trying
+ // to find that.
+ if forthAnalyzerRe.MatchString(text) {
+ return 0.3
+ }
+
+ return 0
+ })
+
+ return lexer
+ }
+
+ return nil
+}
+
+// Name returns the name of the lexer.
+func (Forth) Name() string {
+ return heartbeat.LanguageForth.StringChroma()
+}
diff --git a/pkg/lexer/forth_test.go b/pkg/lexer/forth_test.go
new file mode 100644
index 00000000..a3bbc8da
--- /dev/null
+++ b/pkg/lexer/forth_test.go
@@ -0,0 +1,19 @@
+package lexer_test
+
+import (
+ "os"
+ "testing"
+
+ "github.com/wakatime/wakatime-cli/pkg/lexer"
+
+ "github.com/stretchr/testify/assert"
+)
+
+func TestForth_AnalyseText(t *testing.T) {
+ data, err := os.ReadFile("testdata/forth_command.frt")
+ assert.NoError(t, err)
+
+ l := lexer.Forth{}.Lexer()
+
+ assert.Equal(t, float32(0.3), l.AnalyseText(string(data)))
+}
diff --git a/pkg/lexer/foxpro.go b/pkg/lexer/foxpro.go
new file mode 100644
index 00000000..b2c216b6
--- /dev/null
+++ b/pkg/lexer/foxpro.go
@@ -0,0 +1,31 @@
+package lexer
+
+import (
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+)
+
+// FoxPro lexer.
+type FoxPro struct{}
+
+// Lexer returns the lexer.
+func (l FoxPro) Lexer() chroma.Lexer {
+ return chroma.MustNewLexer(
+ &chroma.Config{
+ Name: l.Name(),
+ Aliases: []string{"foxpro", "vfp", "clipper", "xbase"},
+ Filenames: []string{"*.PRG", "*.prg"},
+ },
+ func() chroma.Rules {
+ return chroma.Rules{
+ "root": {},
+ }
+ },
+ )
+}
+
+// Name returns the name of the lexer.
+func (FoxPro) Name() string {
+ return heartbeat.LanguageFoxPro.StringChroma()
+}
diff --git a/pkg/lexer/freefem.go b/pkg/lexer/freefem.go
new file mode 100644
index 00000000..090b6ec2
--- /dev/null
+++ b/pkg/lexer/freefem.go
@@ -0,0 +1,32 @@
+package lexer
+
+import (
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+)
+
+// Freefem lexer.
+type Freefem struct{}
+
+// Lexer returns the lexer.
+func (l Freefem) Lexer() chroma.Lexer {
+ return chroma.MustNewLexer(
+ &chroma.Config{
+ Name: l.Name(),
+ Aliases: []string{"freefem"},
+ Filenames: []string{"*.edp"},
+ MimeTypes: []string{"text/x-freefem"},
+ },
+ func() chroma.Rules {
+ return chroma.Rules{
+ "root": {},
+ }
+ },
+ )
+}
+
+// Name returns the name of the lexer.
+func (Freefem) Name() string {
+ return heartbeat.LanguageFreefem.StringChroma()
+}
diff --git a/pkg/lexer/fsharp.go b/pkg/lexer/fsharp.go
new file mode 100644
index 00000000..f9110f09
--- /dev/null
+++ b/pkg/lexer/fsharp.go
@@ -0,0 +1,48 @@
+package lexer
+
+import (
+ "strings"
+
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+ "github.com/alecthomas/chroma/v2/lexers"
+)
+
+// FSharp lexer.
+type FSharp struct{}
+
+// Lexer returns the lexer.
+func (l FSharp) Lexer() chroma.Lexer {
+ lexer := lexers.Get(l.Name())
+ if lexer == nil {
+ return nil
+ }
+
+ if lexer, ok := lexer.(*chroma.RegexLexer); ok {
+ lexer.SetAnalyser(func(text string) float32 {
+ // F# doesn't have that many unique features -- |> and <| are weak
+ // indicators.
+ var result float32
+
+ if strings.Contains(text, "|>") {
+ result += 0.05
+ }
+
+ if strings.Contains(text, "<|") {
+ result += 0.05
+ }
+
+ return result
+ })
+
+ return lexer
+ }
+
+ return nil
+}
+
+// Name returns the name of the lexer.
+func (FSharp) Name() string {
+ return heartbeat.LanguageFSharp.StringChroma()
+}
diff --git a/pkg/lexer/fsharp_test.go b/pkg/lexer/fsharp_test.go
new file mode 100644
index 00000000..1a8ae083
--- /dev/null
+++ b/pkg/lexer/fsharp_test.go
@@ -0,0 +1,41 @@
+package lexer_test
+
+import (
+ "os"
+ "testing"
+
+ "github.com/wakatime/wakatime-cli/pkg/lexer"
+
+ "github.com/stretchr/testify/assert"
+)
+
+func TestFSharp_AnalyseText(t *testing.T) {
+ tests := map[string]struct {
+ Filepath string
+ Expected float32
+ }{
+ "pipeline operator": {
+ Filepath: "testdata/fsharp_pipeline_operator.fs",
+ Expected: 0.1,
+ },
+ "forward pipeline operator": {
+ Filepath: "testdata/fsharp_forward_pipeline_operator.fs",
+ Expected: 0.05,
+ },
+ "backward pipeline operator": {
+ Filepath: "testdata/fsharp_backward_pipeline_operator.fs",
+ Expected: 0.05,
+ },
+ }
+
+ for name, test := range tests {
+ t.Run(name, func(t *testing.T) {
+ data, err := os.ReadFile(test.Filepath)
+ assert.NoError(t, err)
+
+ l := lexer.FSharp{}.Lexer()
+
+ assert.Equal(t, test.Expected, l.AnalyseText(string(data)))
+ })
+ }
+}
diff --git a/pkg/lexer/fstar.go b/pkg/lexer/fstar.go
new file mode 100644
index 00000000..eb4d793e
--- /dev/null
+++ b/pkg/lexer/fstar.go
@@ -0,0 +1,32 @@
+package lexer
+
+import (
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+)
+
+// FStar lexer.
+type FStar struct{}
+
+// Lexer returns the lexer.
+func (l FStar) Lexer() chroma.Lexer {
+ return chroma.MustNewLexer(
+ &chroma.Config{
+ Name: l.Name(),
+ Aliases: []string{"fstar"},
+ Filenames: []string{"*.fst", "*.fsti"},
+ MimeTypes: []string{"text/x-fstar"},
+ },
+ func() chroma.Rules {
+ return chroma.Rules{
+ "root": {},
+ }
+ },
+ )
+}
+
+// Name returns the name of the lexer.
+func (FStar) Name() string {
+ return heartbeat.LanguageFStar.StringChroma()
+}
diff --git a/pkg/lexer/gap.go b/pkg/lexer/gap.go
new file mode 100644
index 00000000..3dd9c444
--- /dev/null
+++ b/pkg/lexer/gap.go
@@ -0,0 +1,58 @@
+package lexer
+
+import (
+ "math"
+ "regexp"
+
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+)
+
+var (
+ gapAnalyserDeclarationRe = regexp.MustCompile(
+ `(InstallTrueMethod|Declare(Attribute|Category|Filter|Operation|GlobalFunction|Synonym|SynonymAttr|Property))`)
+ gapAnalyserImplementationRe = regexp.MustCompile(
+ `(DeclareRepresentation|Install(GlobalFunction|Method|ImmediateMethod|OtherMethod)|New(Family|Type)|Objectify)`)
+)
+
+// Gap lexer.
+type Gap struct{}
+
+// Lexer returns the lexer.
+func (l Gap) Lexer() chroma.Lexer {
+ lexer := chroma.MustNewLexer(
+ &chroma.Config{
+ Name: l.Name(),
+ Aliases: []string{"gap"},
+ Filenames: []string{"*.g", "*.gd", "*.gi", "*.gap"},
+ MimeTypes: []string{},
+ },
+ func() chroma.Rules {
+ return chroma.Rules{
+ "root": {},
+ }
+ },
+ )
+
+ lexer.SetAnalyser(func(text string) float32 {
+ var result float64
+
+ if gapAnalyserDeclarationRe.MatchString(text) {
+ result += 0.7
+ }
+
+ if gapAnalyserImplementationRe.MatchString(text) {
+ result += 0.7
+ }
+
+ return float32(math.Min(result, float64(1.0)))
+ })
+
+ return lexer
+}
+
+// Name returns the name of the lexer.
+func (Gap) Name() string {
+ return heartbeat.LanguageGap.StringChroma()
+}
diff --git a/pkg/lexer/gap_test.go b/pkg/lexer/gap_test.go
new file mode 100644
index 00000000..04719648
--- /dev/null
+++ b/pkg/lexer/gap_test.go
@@ -0,0 +1,37 @@
+package lexer_test
+
+import (
+ "os"
+ "testing"
+
+ "github.com/wakatime/wakatime-cli/pkg/lexer"
+
+ "github.com/stretchr/testify/assert"
+)
+
+func TestGap_AnalyseText(t *testing.T) {
+ tests := map[string]struct {
+ Filepath string
+ Expected float32
+ }{
+ "declaration": {
+ Filepath: "testdata/gap_declaration.g",
+ Expected: 0.7,
+ },
+ "implementation": {
+ Filepath: "testdata/gap_implementation.g",
+ Expected: 0.7,
+ },
+ }
+
+ for name, test := range tests {
+ t.Run(name, func(t *testing.T) {
+ data, err := os.ReadFile(test.Filepath)
+ assert.NoError(t, err)
+
+ l := lexer.Gap{}.Lexer()
+
+ assert.Equal(t, test.Expected, l.AnalyseText(string(data)))
+ })
+ }
+}
diff --git a/pkg/lexer/gas.go b/pkg/lexer/gas.go
new file mode 100644
index 00000000..83f3ba6f
--- /dev/null
+++ b/pkg/lexer/gas.go
@@ -0,0 +1,49 @@
+package lexer
+
+import (
+ "regexp"
+
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+ "github.com/alecthomas/chroma/v2/lexers"
+)
+
+var (
+ gasAnalyzerDirectiveRe = regexp.MustCompile(`(?m)^\.(text|data|section)`)
+ gasAnalyzerOtherDirectiveRe = regexp.MustCompile(`(?m)^\.\w+`)
+)
+
+// Gas lexer.
+type Gas struct{}
+
+// Lexer returns the lexer.
+func (l Gas) Lexer() chroma.Lexer {
+ lexer := lexers.Get(l.Name())
+ if lexer == nil {
+ return nil
+ }
+
+ if lexer, ok := lexer.(*chroma.RegexLexer); ok {
+ lexer.SetAnalyser(func(text string) float32 {
+ if gasAnalyzerDirectiveRe.MatchString(text) {
+ return 1.0
+ }
+
+ if gasAnalyzerOtherDirectiveRe.MatchString(text) {
+ return 0.1
+ }
+
+ return 0
+ })
+
+ return lexer
+ }
+
+ return nil
+}
+
+// Name returns the name of the lexer.
+func (Gas) Name() string {
+ return heartbeat.LanguageGas.StringChroma()
+}
diff --git a/pkg/lexer/gas_test.go b/pkg/lexer/gas_test.go
new file mode 100644
index 00000000..acf3bb81
--- /dev/null
+++ b/pkg/lexer/gas_test.go
@@ -0,0 +1,37 @@
+package lexer_test
+
+import (
+ "os"
+ "testing"
+
+ "github.com/wakatime/wakatime-cli/pkg/lexer"
+
+ "github.com/stretchr/testify/assert"
+)
+
+func TestGas_AnalyseText(t *testing.T) {
+ tests := map[string]struct {
+ Filepath string
+ Expected float32
+ }{
+ "data directive": {
+ Filepath: "testdata/gas_data_directive.S",
+ Expected: 1.0,
+ },
+ "other directive": {
+ Filepath: "testdata/gas_other_directive.S",
+ Expected: 0.1,
+ },
+ }
+
+ for name, test := range tests {
+ t.Run(name, func(t *testing.T) {
+ data, err := os.ReadFile(test.Filepath)
+ assert.NoError(t, err)
+
+ l := lexer.Gas{}.Lexer()
+
+ assert.Equal(t, test.Expected, l.AnalyseText(string(data)))
+ })
+ }
+}
diff --git a/pkg/lexer/gdscript.go b/pkg/lexer/gdscript.go
new file mode 100644
index 00000000..6a0f685c
--- /dev/null
+++ b/pkg/lexer/gdscript.go
@@ -0,0 +1,57 @@
+package lexer
+
+import (
+ "math"
+ "regexp"
+
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+ "github.com/alecthomas/chroma/v2/lexers"
+)
+
+var (
+ gdscriptAnalyserFuncRe = regexp.MustCompile(`func (_ready|_init|_input|_process|_unhandled_input)`)
+ gdscriptAnalyserKeywordRe = regexp.MustCompile(`(extends |class_name |onready |preload|load|setget|func [^_])`)
+ gdscriptAnalyserKeyword2Re = regexp.MustCompile(`(var|const|enum|export|signal|tool)`)
+)
+
+// GDScript lexer.
+type GDScript struct{}
+
+// Lexer returns the lexer.
+func (l GDScript) Lexer() chroma.Lexer {
+ lexer := lexers.Get(l.Name())
+ if lexer == nil {
+ return nil
+ }
+
+ if lexer, ok := lexer.(*chroma.RegexLexer); ok {
+ lexer.SetAnalyser(func(text string) float32 {
+ var result float64
+
+ if gdscriptAnalyserFuncRe.MatchString(text) {
+ result += 0.8
+ }
+
+ if gdscriptAnalyserKeywordRe.MatchString(text) {
+ result += 0.4
+ }
+
+ if gdscriptAnalyserKeyword2Re.MatchString(text) {
+ result += 0.2
+ }
+
+ return float32(math.Min(result, float64(1.0)))
+ })
+
+ return lexer
+ }
+
+ return nil
+}
+
+// Name returns the name of the lexer.
+func (GDScript) Name() string {
+ return heartbeat.LanguageGDScript.StringChroma()
+}
diff --git a/pkg/lexer/gdscript_test.go b/pkg/lexer/gdscript_test.go
new file mode 100644
index 00000000..44dc9b5d
--- /dev/null
+++ b/pkg/lexer/gdscript_test.go
@@ -0,0 +1,45 @@
+package lexer_test
+
+import (
+ "os"
+ "testing"
+
+ "github.com/wakatime/wakatime-cli/pkg/lexer"
+
+ "github.com/stretchr/testify/assert"
+)
+
+func TestGdSript_AnalyseText(t *testing.T) {
+ tests := map[string]struct {
+ Filepath string
+ Expected float32
+ }{
+ "func": {
+ Filepath: "testdata/gdscript_func.gd",
+ Expected: 0.8,
+ },
+ "keyword first group": {
+ Filepath: "testdata/gdscript_keyword.gd",
+ Expected: 0.4,
+ },
+ "keyword second group": {
+ Filepath: "testdata/gdscript_keyword2.gd",
+ Expected: 0.2,
+ },
+ "full": {
+ Filepath: "testdata/gdscript_full.gd",
+ Expected: 1.0,
+ },
+ }
+
+ for name, test := range tests {
+ t.Run(name, func(t *testing.T) {
+ data, err := os.ReadFile(test.Filepath)
+ assert.NoError(t, err)
+
+ l := lexer.GDScript{}.Lexer()
+
+ assert.Equal(t, test.Expected, l.AnalyseText(string(data)))
+ })
+ }
+}
diff --git a/pkg/lexer/gettext.go b/pkg/lexer/gettext.go
new file mode 100644
index 00000000..f77fb5fe
--- /dev/null
+++ b/pkg/lexer/gettext.go
@@ -0,0 +1,32 @@
+package lexer
+
+import (
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+)
+
+// GettextCatalog lexer.
+type GettextCatalog struct{}
+
+// Lexer returns the lexer.
+func (l GettextCatalog) Lexer() chroma.Lexer {
+ return chroma.MustNewLexer(
+ &chroma.Config{
+ Name: l.Name(),
+ Aliases: []string{"pot", "po"},
+ Filenames: []string{"*.pot", "*.po"},
+ MimeTypes: []string{"application/x-gettext", "text/x-gettext", "text/gettext"},
+ },
+ func() chroma.Rules {
+ return chroma.Rules{
+ "root": {},
+ }
+ },
+ )
+}
+
+// Name returns the name of the lexer.
+func (GettextCatalog) Name() string {
+ return heartbeat.LanguageGettextCatalog.StringChroma()
+}
diff --git a/pkg/lexer/golo.go b/pkg/lexer/golo.go
new file mode 100644
index 00000000..e361885b
--- /dev/null
+++ b/pkg/lexer/golo.go
@@ -0,0 +1,31 @@
+package lexer
+
+import (
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+)
+
+// Golo lexer.
+type Golo struct{}
+
+// Lexer returns the lexer.
+func (l Golo) Lexer() chroma.Lexer {
+ return chroma.MustNewLexer(
+ &chroma.Config{
+ Name: l.Name(),
+ Aliases: []string{"golo"},
+ Filenames: []string{"*.golo"},
+ },
+ func() chroma.Rules {
+ return chroma.Rules{
+ "root": {},
+ }
+ },
+ )
+}
+
+// Name returns the name of the lexer.
+func (Golo) Name() string {
+ return heartbeat.LanguageGolo.StringChroma()
+}
diff --git a/pkg/lexer/gooddatacl.go b/pkg/lexer/gooddatacl.go
new file mode 100644
index 00000000..608fe342
--- /dev/null
+++ b/pkg/lexer/gooddatacl.go
@@ -0,0 +1,32 @@
+package lexer
+
+import (
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+)
+
+// GoodDataCL lexer.
+type GoodDataCL struct{}
+
+// Lexer returns the lexer.
+func (l GoodDataCL) Lexer() chroma.Lexer {
+ return chroma.MustNewLexer(
+ &chroma.Config{
+ Name: l.Name(),
+ Aliases: []string{"gooddata-cl"},
+ Filenames: []string{"*.gdc"},
+ MimeTypes: []string{"text/x-gooddata-cl"},
+ },
+ func() chroma.Rules {
+ return chroma.Rules{
+ "root": {},
+ }
+ },
+ )
+}
+
+// Name returns the name of the lexer.
+func (GoodDataCL) Name() string {
+ return heartbeat.LanguageGoodDataCL.StringChroma()
+}
diff --git a/pkg/lexer/gosu.go b/pkg/lexer/gosu.go
new file mode 100644
index 00000000..5f7e0e38
--- /dev/null
+++ b/pkg/lexer/gosu.go
@@ -0,0 +1,32 @@
+package lexer
+
+import (
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+)
+
+// Gosu lexer.
+type Gosu struct{}
+
+// Lexer returns the lexer.
+func (l Gosu) Lexer() chroma.Lexer {
+ return chroma.MustNewLexer(
+ &chroma.Config{
+ Name: l.Name(),
+ Aliases: []string{"gosu"},
+ Filenames: []string{"*.gs", "*.gsx", "*.gsp", "*.vark"},
+ MimeTypes: []string{"text/x-gosu"},
+ },
+ func() chroma.Rules {
+ return chroma.Rules{
+ "root": {},
+ }
+ },
+ )
+}
+
+// Name returns the name of the lexer.
+func (Gosu) Name() string {
+ return heartbeat.LanguageGosu.StringChroma()
+}
diff --git a/pkg/lexer/gosutemplate.go b/pkg/lexer/gosutemplate.go
new file mode 100644
index 00000000..db1b1e67
--- /dev/null
+++ b/pkg/lexer/gosutemplate.go
@@ -0,0 +1,32 @@
+package lexer
+
+import (
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+)
+
+// GosuTemplate lexer.
+type GosuTemplate struct{}
+
+// Lexer returns the lexer.
+func (l GosuTemplate) Lexer() chroma.Lexer {
+ return chroma.MustNewLexer(
+ &chroma.Config{
+ Name: l.Name(),
+ Aliases: []string{"gst"},
+ Filenames: []string{"*.gst"},
+ MimeTypes: []string{"text/x-gosu-template"},
+ },
+ func() chroma.Rules {
+ return chroma.Rules{
+ "root": {},
+ }
+ },
+ )
+}
+
+// Name returns the name of the lexer.
+func (GosuTemplate) Name() string {
+ return heartbeat.LanguageGosuTemplate.StringChroma()
+}
diff --git a/pkg/lexer/groff.go b/pkg/lexer/groff.go
new file mode 100644
index 00000000..6ee152b7
--- /dev/null
+++ b/pkg/lexer/groff.go
@@ -0,0 +1,73 @@
+package lexer
+
+import (
+ "regexp"
+ "unicode"
+
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+ "github.com/alecthomas/chroma/v2/lexers"
+)
+
+// nolint:gochecknoglobals
+var groffAlphanumericRe = regexp.MustCompile(`^[a-zA-Z0-9]+$`)
+
+// Groff lexer.
+type Groff struct{}
+
+// Lexer returns the lexer.
+func (l Groff) Lexer() chroma.Lexer {
+ lexer := lexers.Get(l.Name())
+ if lexer == nil {
+ return nil
+ }
+
+ var (
+ ok bool
+ rgxlexer *chroma.RegexLexer
+ )
+
+ if rgxlexer, ok = lexer.(*chroma.RegexLexer); !ok {
+ return nil
+ }
+
+ rgxlexer.SetAnalyser(func(text string) float32 {
+ if len(text) <= 1 {
+ return 0
+ }
+
+ if text[:1] != "." {
+ return 0
+ }
+
+ if len(text) <= 3 {
+ return 0
+ }
+
+ if text[:3] == `.\"` {
+ return 1.0
+ }
+
+ if len(text) <= 4 {
+ return 0
+ }
+
+ if text[:4] == ".TH " {
+ return 1.0
+ }
+
+ if groffAlphanumericRe.MatchString(text[1:3]) && unicode.IsSpace(rune(text[3])) {
+ return 0.9
+ }
+
+ return 0
+ })
+
+ return rgxlexer
+}
+
+// Name returns the name of the lexer.
+func (Groff) Name() string {
+ return heartbeat.LanguageGroff.StringChroma()
+}
diff --git a/pkg/lexer/groff_test.go b/pkg/lexer/groff_test.go
new file mode 100644
index 00000000..68793c67
--- /dev/null
+++ b/pkg/lexer/groff_test.go
@@ -0,0 +1,41 @@
+package lexer_test
+
+import (
+ "os"
+ "testing"
+
+ "github.com/wakatime/wakatime-cli/pkg/lexer"
+
+ "github.com/stretchr/testify/assert"
+)
+
+func TestGroff_AnalyseText(t *testing.T) {
+ tests := map[string]struct {
+ Filepath string
+ Expected float32
+ }{
+ "comment": {
+ Filepath: "testdata/groff_comment.man",
+ Expected: 1.0,
+ },
+ "title head": {
+ Filepath: "testdata/groff_title_head.man",
+ Expected: 1.0,
+ },
+ "macro": {
+ Filepath: "testdata/groff_macro.man",
+ Expected: 0.9,
+ },
+ }
+
+ for name, test := range tests {
+ t.Run(name, func(t *testing.T) {
+ data, err := os.ReadFile(test.Filepath)
+ assert.NoError(t, err)
+
+ l := lexer.Groff{}.Lexer()
+
+ assert.Equal(t, test.Expected, l.AnalyseText(string(data)))
+ })
+ }
+}
diff --git a/pkg/lexer/haml.go b/pkg/lexer/haml.go
new file mode 100644
index 00000000..005f6d3c
--- /dev/null
+++ b/pkg/lexer/haml.go
@@ -0,0 +1,32 @@
+package lexer
+
+import (
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+)
+
+// Haml lexer.
+type Haml struct{}
+
+// Lexer returns the lexer.
+func (l Haml) Lexer() chroma.Lexer {
+ return chroma.MustNewLexer(
+ &chroma.Config{
+ Name: l.Name(),
+ Aliases: []string{"haml"},
+ Filenames: []string{"*.haml"},
+ MimeTypes: []string{"text/x-haml"},
+ },
+ func() chroma.Rules {
+ return chroma.Rules{
+ "root": {},
+ }
+ },
+ )
+}
+
+// Name returns the name of the lexer.
+func (Haml) Name() string {
+ return heartbeat.LanguageHaml.StringChroma()
+}
diff --git a/pkg/lexer/hsail.go b/pkg/lexer/hsail.go
new file mode 100644
index 00000000..fcfd14ea
--- /dev/null
+++ b/pkg/lexer/hsail.go
@@ -0,0 +1,32 @@
+package lexer
+
+import (
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+)
+
+// HSAIL lexer.
+type HSAIL struct{}
+
+// Lexer returns the lexer.
+func (l HSAIL) Lexer() chroma.Lexer {
+ return chroma.MustNewLexer(
+ &chroma.Config{
+ Name: l.Name(),
+ Aliases: []string{"hsail", "hsa"},
+ Filenames: []string{"*.hsail"},
+ MimeTypes: []string{"text/x-hsail"},
+ },
+ func() chroma.Rules {
+ return chroma.Rules{
+ "root": {},
+ }
+ },
+ )
+}
+
+// Name returns the name of the lexer.
+func (HSAIL) Name() string {
+ return heartbeat.LanguageHSAIL.StringChroma()
+}
diff --git a/pkg/lexer/hspec.go b/pkg/lexer/hspec.go
new file mode 100644
index 00000000..27050080
--- /dev/null
+++ b/pkg/lexer/hspec.go
@@ -0,0 +1,30 @@
+package lexer
+
+import (
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+)
+
+// Hspec lexer.
+type Hspec struct{}
+
+// Lexer returns the lexer.
+func (l Hspec) Lexer() chroma.Lexer {
+ return chroma.MustNewLexer(
+ &chroma.Config{
+ Name: l.Name(),
+ Aliases: []string{"hspec"},
+ },
+ func() chroma.Rules {
+ return chroma.Rules{
+ "root": {},
+ }
+ },
+ )
+}
+
+// Name returns the name of the lexer.
+func (Hspec) Name() string {
+ return heartbeat.LanguageHspec.StringChroma()
+}
diff --git a/pkg/lexer/html.go b/pkg/lexer/html.go
new file mode 100644
index 00000000..e70af6d6
--- /dev/null
+++ b/pkg/lexer/html.go
@@ -0,0 +1,39 @@
+package lexer
+
+import (
+ "github.com/wakatime/wakatime-cli/pkg/doctype"
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+ "github.com/alecthomas/chroma/v2/lexers"
+)
+
+// HTML lexer.
+type HTML struct{}
+
+// Lexer returns the lexer.
+func (l HTML) Lexer() chroma.Lexer {
+ lexer := lexers.Get(l.Name())
+ if lexer == nil {
+ return nil
+ }
+
+ if lexer, ok := lexer.(*chroma.RegexLexer); ok {
+ lexer.SetAnalyser(func(text string) float32 {
+ if matched, _ := doctype.MatchString(text, "html"); matched {
+ return 0.5
+ }
+
+ return 0
+ })
+
+ return lexer
+ }
+
+ return nil
+}
+
+// Name returns the name of the lexer.
+func (HTML) Name() string {
+ return heartbeat.LanguageHTML.StringChroma()
+}
diff --git a/pkg/lexer/html_test.go b/pkg/lexer/html_test.go
new file mode 100644
index 00000000..0a698aad
--- /dev/null
+++ b/pkg/lexer/html_test.go
@@ -0,0 +1,19 @@
+package lexer_test
+
+import (
+ "os"
+ "testing"
+
+ "github.com/wakatime/wakatime-cli/pkg/lexer"
+
+ "github.com/stretchr/testify/assert"
+)
+
+func TestHTML_AnalyseText(t *testing.T) {
+ data, err := os.ReadFile("testdata/html_doctype.html")
+ assert.NoError(t, err)
+
+ l := lexer.HTML{}.Lexer()
+
+ assert.Equal(t, float32(0.5), l.AnalyseText(string(data)))
+}
diff --git a/pkg/lexer/http.go b/pkg/lexer/http.go
new file mode 100644
index 00000000..19c6e12e
--- /dev/null
+++ b/pkg/lexer/http.go
@@ -0,0 +1,36 @@
+package lexer
+
+import (
+ "strings"
+
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+ "github.com/alecthomas/chroma/v2/lexers"
+)
+
+// HTTP lexer.
+type HTTP struct{}
+
+// Lexer returns the lexer.
+func (HTTP) Lexer() chroma.Lexer {
+ return lexers.HTTP.SetAnalyser(func(text string) float32 {
+ if strings.HasPrefix(text, "GET") ||
+ strings.HasPrefix(text, "POST") ||
+ strings.HasPrefix(text, "PUT") ||
+ strings.HasPrefix(text, "DELETE") ||
+ strings.HasPrefix(text, "HEAD") ||
+ strings.HasPrefix(text, "OPTIONS") ||
+ strings.HasPrefix(text, "TRACE") ||
+ strings.HasPrefix(text, "PATCH") {
+ return 1.0
+ }
+
+ return 0
+ })
+}
+
+// Name returns the name of the lexer.
+func (HTTP) Name() string {
+ return heartbeat.LanguageHTTP.StringChroma()
+}
diff --git a/pkg/lexer/http_test.go b/pkg/lexer/http_test.go
new file mode 100644
index 00000000..08f30871
--- /dev/null
+++ b/pkg/lexer/http_test.go
@@ -0,0 +1,19 @@
+package lexer_test
+
+import (
+ "os"
+ "testing"
+
+ "github.com/wakatime/wakatime-cli/pkg/lexer"
+
+ "github.com/stretchr/testify/assert"
+)
+
+func TestHTTP_AnalyseText(t *testing.T) {
+ data, err := os.ReadFile("testdata/http_request.http")
+ assert.NoError(t, err)
+
+ l := lexer.HTTP{}.Lexer()
+
+ assert.Equal(t, float32(1.0), l.AnalyseText(string(data)))
+}
diff --git a/pkg/lexer/hxml.go b/pkg/lexer/hxml.go
new file mode 100644
index 00000000..18da24b0
--- /dev/null
+++ b/pkg/lexer/hxml.go
@@ -0,0 +1,31 @@
+package lexer
+
+import (
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+)
+
+// Hxml lexer.
+type Hxml struct{}
+
+// Lexer returns the lexer.
+func (l Hxml) Lexer() chroma.Lexer {
+ return chroma.MustNewLexer(
+ &chroma.Config{
+ Name: l.Name(),
+ Aliases: []string{"haxeml", "hxml"},
+ Filenames: []string{"*.hxml"},
+ },
+ func() chroma.Rules {
+ return chroma.Rules{
+ "root": {},
+ }
+ },
+ )
+}
+
+// Name returns the name of the lexer.
+func (Hxml) Name() string {
+ return heartbeat.LanguageHxml.StringChroma()
+}
diff --git a/pkg/lexer/hy.go b/pkg/lexer/hy.go
new file mode 100644
index 00000000..e612e148
--- /dev/null
+++ b/pkg/lexer/hy.go
@@ -0,0 +1,40 @@
+package lexer
+
+import (
+ "strings"
+
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+ "github.com/alecthomas/chroma/v2/lexers"
+)
+
+// Hy lexer.
+type Hy struct{}
+
+// Lexer returns the lexer.
+func (l Hy) Lexer() chroma.Lexer {
+ lexer := lexers.Get(l.Name())
+ if lexer == nil {
+ return nil
+ }
+
+ if lexer, ok := lexer.(*chroma.RegexLexer); ok {
+ lexer.SetAnalyser(func(text string) float32 {
+ if strings.Contains(text, "(import ") || strings.Contains(text, "(defn ") {
+ return 0.9
+ }
+
+ return 0
+ })
+
+ return lexer
+ }
+
+ return nil
+}
+
+// Name returns the name of the lexer.
+func (Hy) Name() string {
+ return heartbeat.LanguageHy.StringChroma()
+}
diff --git a/pkg/lexer/hy_test.go b/pkg/lexer/hy_test.go
new file mode 100644
index 00000000..f93dcedd
--- /dev/null
+++ b/pkg/lexer/hy_test.go
@@ -0,0 +1,37 @@
+package lexer_test
+
+import (
+ "os"
+ "testing"
+
+ "github.com/wakatime/wakatime-cli/pkg/lexer"
+
+ "github.com/stretchr/testify/assert"
+)
+
+func TestHy_AnalyseText(t *testing.T) {
+ tests := map[string]struct {
+ Filepath string
+ Expected float32
+ }{
+ "import": {
+ Filepath: "testdata/hy_import.hy",
+ Expected: 0.9,
+ },
+ "defn": {
+ Filepath: "testdata/hy_defn.hy",
+ Expected: 0.9,
+ },
+ }
+
+ for name, test := range tests {
+ t.Run(name, func(t *testing.T) {
+ data, err := os.ReadFile(test.Filepath)
+ assert.NoError(t, err)
+
+ l := lexer.Hy{}.Lexer()
+
+ assert.Equal(t, test.Expected, l.AnalyseText(string(data)))
+ })
+ }
+}
diff --git a/pkg/lexer/hybris.go b/pkg/lexer/hybris.go
new file mode 100644
index 00000000..0b610145
--- /dev/null
+++ b/pkg/lexer/hybris.go
@@ -0,0 +1,48 @@
+package lexer
+
+import (
+ "regexp"
+
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+)
+
+var hybrisAnalyserRe = regexp.MustCompile(`\b(?:public|private)\s+method\b`)
+
+// Hybris lexer.
+type Hybris struct{}
+
+// Lexer returns the lexer.
+func (l Hybris) Lexer() chroma.Lexer {
+ lexer := chroma.MustNewLexer(
+ &chroma.Config{
+ Name: l.Name(),
+ Aliases: []string{"hybris", "hy"},
+ Filenames: []string{"*.hy", "*.hyb"},
+ MimeTypes: []string{"text/x-hybris", "application/x-hybris"},
+ },
+ func() chroma.Rules {
+ return chroma.Rules{
+ "root": {},
+ }
+ },
+ )
+
+ lexer.SetAnalyser(func(text string) float32 {
+ // public method and private method don't seem to be quite common
+ // elsewhere.
+ if hybrisAnalyserRe.MatchString(text) {
+ return 0.01
+ }
+
+ return 0
+ })
+
+ return lexer
+}
+
+// Name returns the name of the lexer.
+func (Hybris) Name() string {
+ return heartbeat.LanguageHybris.StringChroma()
+}
diff --git a/pkg/lexer/hybris_test.go b/pkg/lexer/hybris_test.go
new file mode 100644
index 00000000..8f8ed28e
--- /dev/null
+++ b/pkg/lexer/hybris_test.go
@@ -0,0 +1,37 @@
+package lexer_test
+
+import (
+ "os"
+ "testing"
+
+ "github.com/wakatime/wakatime-cli/pkg/lexer"
+
+ "github.com/stretchr/testify/assert"
+)
+
+func TestHybris_AnalyseText(t *testing.T) {
+ tests := map[string]struct {
+ Filepath string
+ Expected float32
+ }{
+ "public method": {
+ Filepath: "testdata/hybris_public.hyb",
+ Expected: 0.01,
+ },
+ "private method": {
+ Filepath: "testdata/hybris_private.hyb",
+ Expected: 0.01,
+ },
+ }
+
+ for name, test := range tests {
+ t.Run(name, func(t *testing.T) {
+ data, err := os.ReadFile(test.Filepath)
+ assert.NoError(t, err)
+
+ l := lexer.Hybris{}.Lexer()
+
+ assert.Equal(t, test.Expected, l.AnalyseText(string(data)))
+ })
+ }
+}
diff --git a/pkg/lexer/icon.go b/pkg/lexer/icon.go
new file mode 100644
index 00000000..9cf12660
--- /dev/null
+++ b/pkg/lexer/icon.go
@@ -0,0 +1,31 @@
+package lexer
+
+import (
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+)
+
+// Icon lexer.
+type Icon struct{}
+
+// Lexer returns the lexer.
+func (l Icon) Lexer() chroma.Lexer {
+ return chroma.MustNewLexer(
+ &chroma.Config{
+ Name: l.Name(),
+ Aliases: []string{"icon"},
+ Filenames: []string{"*.icon", "*.ICON"},
+ },
+ func() chroma.Rules {
+ return chroma.Rules{
+ "root": {},
+ }
+ },
+ )
+}
+
+// Name returns the name of the lexer.
+func (Icon) Name() string {
+ return heartbeat.LanguageIcon.StringChroma()
+}
diff --git a/pkg/lexer/idl.go b/pkg/lexer/idl.go
new file mode 100644
index 00000000..0c46e56a
--- /dev/null
+++ b/pkg/lexer/idl.go
@@ -0,0 +1,51 @@
+package lexer
+
+import (
+ "strings"
+
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+)
+
+// IDL lexer.
+type IDL struct{}
+
+// Lexer returns the lexer.
+func (l IDL) Lexer() chroma.Lexer {
+ lexer := chroma.MustNewLexer(
+ &chroma.Config{
+ Name: l.Name(),
+ Aliases: []string{"idl"},
+ Filenames: []string{"*.pro"},
+ MimeTypes: []string{"text/idl"},
+ },
+ func() chroma.Rules {
+ return chroma.Rules{
+ "root": {},
+ }
+ },
+ )
+
+ lexer.SetAnalyser(func(text string) float32 {
+ // endelse seems to be unique to IDL, endswitch is rare at least.
+ var result float32
+
+ if strings.Contains(text, "endelse") {
+ result += 0.2
+ }
+
+ if strings.Contains(text, "endswitch") {
+ result += 0.01
+ }
+
+ return result
+ })
+
+ return lexer
+}
+
+// Name returns the name of the lexer.
+func (IDL) Name() string {
+ return heartbeat.LanguageIDL.StringChroma()
+}
diff --git a/pkg/lexer/idl_test.go b/pkg/lexer/idl_test.go
new file mode 100644
index 00000000..900ec0c7
--- /dev/null
+++ b/pkg/lexer/idl_test.go
@@ -0,0 +1,37 @@
+package lexer_test
+
+import (
+ "os"
+ "testing"
+
+ "github.com/wakatime/wakatime-cli/pkg/lexer"
+
+ "github.com/stretchr/testify/assert"
+)
+
+func TestIdl_AnalyseText(t *testing.T) {
+ tests := map[string]struct {
+ Filepath string
+ Expected float32
+ }{
+ "endelse": {
+ Filepath: "testdata/idl_endelse.pro",
+ Expected: 0.2,
+ },
+ "endswitch": {
+ Filepath: "testdata/idl_endswitch.pro",
+ Expected: 0.01,
+ },
+ }
+
+ for name, test := range tests {
+ t.Run(name, func(t *testing.T) {
+ data, err := os.ReadFile(test.Filepath)
+ assert.NoError(t, err)
+
+ l := lexer.IDL{}.Lexer()
+
+ assert.Equal(t, test.Expected, l.AnalyseText(string(data)))
+ })
+ }
+}
diff --git a/pkg/lexer/inform6.go b/pkg/lexer/inform6.go
new file mode 100644
index 00000000..fc44c632
--- /dev/null
+++ b/pkg/lexer/inform6.go
@@ -0,0 +1,47 @@
+package lexer
+
+import (
+ "regexp"
+
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+)
+
+var inform6AnalyserRe = regexp.MustCompile(`(?i)\borigsource\b`)
+
+// Inform6 lexer.
+type Inform6 struct{}
+
+// Lexer returns the lexer.
+func (l Inform6) Lexer() chroma.Lexer {
+ lexer := chroma.MustNewLexer(
+ &chroma.Config{
+ Name: l.Name(),
+ Aliases: []string{"inform6", "i6"},
+ Filenames: []string{"*.inf"},
+ },
+ func() chroma.Rules {
+ return chroma.Rules{
+ "root": {},
+ }
+ },
+ )
+
+ lexer.SetAnalyser(func(text string) float32 {
+ // We try to find a keyword which seem relatively common, unfortunately
+ // there is a decent overlap with Smalltalk keywords otherwise here.
+ if inform6AnalyserRe.MatchString(text) {
+ return 0.05
+ }
+
+ return 0
+ })
+
+ return lexer
+}
+
+// Name returns the name of the lexer.
+func (Inform6) Name() string {
+ return heartbeat.LanguageInform6.StringChroma()
+}
diff --git a/pkg/lexer/inform6_test.go b/pkg/lexer/inform6_test.go
new file mode 100644
index 00000000..207bdb71
--- /dev/null
+++ b/pkg/lexer/inform6_test.go
@@ -0,0 +1,19 @@
+package lexer_test
+
+import (
+ "os"
+ "testing"
+
+ "github.com/wakatime/wakatime-cli/pkg/lexer"
+
+ "github.com/stretchr/testify/assert"
+)
+
+func TestInform6_AnalyseText(t *testing.T) {
+ data, err := os.ReadFile("testdata/inform6_basic.inf")
+ assert.NoError(t, err)
+
+ l := lexer.Inform6{}.Lexer()
+
+ assert.Equal(t, float32(0.05), l.AnalyseText(string(data)))
+}
diff --git a/pkg/lexer/inform6template.go b/pkg/lexer/inform6template.go
new file mode 100644
index 00000000..09fc7290
--- /dev/null
+++ b/pkg/lexer/inform6template.go
@@ -0,0 +1,31 @@
+package lexer
+
+import (
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+)
+
+// Inform6Template lexer.
+type Inform6Template struct{}
+
+// Lexer returns the lexer.
+func (l Inform6Template) Lexer() chroma.Lexer {
+ return chroma.MustNewLexer(
+ &chroma.Config{
+ Name: l.Name(),
+ Aliases: []string{"i6t"},
+ Filenames: []string{"*.i6t"},
+ },
+ func() chroma.Rules {
+ return chroma.Rules{
+ "root": {},
+ }
+ },
+ )
+}
+
+// Name returns the name of the lexer.
+func (Inform6Template) Name() string {
+ return heartbeat.LanguageInform6Template.StringChroma()
+}
diff --git a/pkg/lexer/inform7.go b/pkg/lexer/inform7.go
new file mode 100644
index 00000000..ac51d48c
--- /dev/null
+++ b/pkg/lexer/inform7.go
@@ -0,0 +1,31 @@
+package lexer
+
+import (
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+)
+
+// Inform7 lexer.
+type Inform7 struct{}
+
+// Lexer returns the lexer.
+func (l Inform7) Lexer() chroma.Lexer {
+ return chroma.MustNewLexer(
+ &chroma.Config{
+ Name: l.Name(),
+ Aliases: []string{"inform7", "i7"},
+ Filenames: []string{"*.ni", "*.i7x"},
+ },
+ func() chroma.Rules {
+ return chroma.Rules{
+ "root": {},
+ }
+ },
+ )
+}
+
+// Name returns the name of the lexer.
+func (Inform7) Name() string {
+ return heartbeat.LanguageInform7.StringChroma()
+}
diff --git a/pkg/lexer/ini.go b/pkg/lexer/ini.go
new file mode 100644
index 00000000..1d02ca1a
--- /dev/null
+++ b/pkg/lexer/ini.go
@@ -0,0 +1,45 @@
+package lexer
+
+import (
+ "strings"
+
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+ "github.com/alecthomas/chroma/v2/lexers"
+)
+
+// INI lexer.
+type INI struct{}
+
+// Lexer returns the lexer.
+func (l INI) Lexer() chroma.Lexer {
+ lexer := lexers.Get(l.Name())
+ if lexer == nil {
+ return nil
+ }
+
+ if lexer, ok := lexer.(*chroma.RegexLexer); ok {
+ lexer.SetAnalyser(func(text string) float32 {
+ npos := strings.Count(text, "\n")
+ if npos < 3 {
+ return 0
+ }
+
+ if text[0] == '[' && text[npos-1] == ']' {
+ return 1
+ }
+
+ return 0
+ })
+
+ return lexer
+ }
+
+ return nil
+}
+
+// Name returns the name of the lexer.
+func (INI) Name() string {
+ return heartbeat.LanguageINI.StringChroma()
+}
diff --git a/pkg/lexer/ini_test.go b/pkg/lexer/ini_test.go
new file mode 100644
index 00000000..eb9612a8
--- /dev/null
+++ b/pkg/lexer/ini_test.go
@@ -0,0 +1,37 @@
+package lexer_test
+
+import (
+ "os"
+ "testing"
+
+ "github.com/wakatime/wakatime-cli/pkg/lexer"
+
+ "github.com/stretchr/testify/assert"
+)
+
+func TestIni_AnalyseText(t *testing.T) {
+ tests := map[string]struct {
+ Filepath string
+ Expected float32
+ }{
+ "real ini": {
+ Filepath: "testdata/ini_basic.ini",
+ Expected: 1.0,
+ },
+ "less than three line breaks": {
+ Filepath: "testdata/ini_invalid.ini",
+ Expected: 0,
+ },
+ }
+
+ for name, test := range tests {
+ t.Run(name, func(t *testing.T) {
+ data, err := os.ReadFile(test.Filepath)
+ assert.NoError(t, err)
+
+ l := lexer.INI{}.Lexer()
+
+ assert.Equal(t, test.Expected, l.AnalyseText(string(data)))
+ })
+ }
+}
diff --git a/pkg/lexer/ioke.go b/pkg/lexer/ioke.go
new file mode 100644
index 00000000..cdfb4b53
--- /dev/null
+++ b/pkg/lexer/ioke.go
@@ -0,0 +1,32 @@
+package lexer
+
+import (
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+)
+
+// Ioke lexer.
+type Ioke struct{}
+
+// Lexer returns the lexer.
+func (l Ioke) Lexer() chroma.Lexer {
+ return chroma.MustNewLexer(
+ &chroma.Config{
+ Name: l.Name(),
+ Aliases: []string{"ioke", "ik"},
+ Filenames: []string{"*.ik"},
+ MimeTypes: []string{"text/x-iokesrc"},
+ },
+ func() chroma.Rules {
+ return chroma.Rules{
+ "root": {},
+ }
+ },
+ )
+}
+
+// Name returns the name of the lexer.
+func (Ioke) Name() string {
+ return heartbeat.LanguageIoke.StringChroma()
+}
diff --git a/pkg/lexer/irclogs.go b/pkg/lexer/irclogs.go
new file mode 100644
index 00000000..4d60b307
--- /dev/null
+++ b/pkg/lexer/irclogs.go
@@ -0,0 +1,32 @@
+package lexer
+
+import (
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+)
+
+// IRCLogs lexer.
+type IRCLogs struct{}
+
+// Lexer returns the lexer.
+func (l IRCLogs) Lexer() chroma.Lexer {
+ return chroma.MustNewLexer(
+ &chroma.Config{
+ Name: l.Name(),
+ Aliases: []string{"irc"},
+ Filenames: []string{"*.weechatlog"},
+ MimeTypes: []string{"text/x-irclog"},
+ },
+ func() chroma.Rules {
+ return chroma.Rules{
+ "root": {},
+ }
+ },
+ )
+}
+
+// Name returns the name of the lexer.
+func (IRCLogs) Name() string {
+ return heartbeat.LanguageIRCLogs.StringChroma()
+}
diff --git a/pkg/lexer/isabelle.go b/pkg/lexer/isabelle.go
new file mode 100644
index 00000000..faf4df6a
--- /dev/null
+++ b/pkg/lexer/isabelle.go
@@ -0,0 +1,32 @@
+package lexer
+
+import (
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+)
+
+// Isabelle lexer.
+type Isabelle struct{}
+
+// Lexer returns the lexer.
+func (l Isabelle) Lexer() chroma.Lexer {
+ return chroma.MustNewLexer(
+ &chroma.Config{
+ Name: l.Name(),
+ Aliases: []string{"isabelle"},
+ Filenames: []string{"*.thy"},
+ MimeTypes: []string{"text/x-isabelle"},
+ },
+ func() chroma.Rules {
+ return chroma.Rules{
+ "root": {},
+ }
+ },
+ )
+}
+
+// Name returns the name of the lexer.
+func (Isabelle) Name() string {
+ return heartbeat.LanguageIsabelle.StringChroma()
+}
diff --git a/pkg/lexer/jags.go b/pkg/lexer/jags.go
new file mode 100644
index 00000000..a8560117
--- /dev/null
+++ b/pkg/lexer/jags.go
@@ -0,0 +1,58 @@
+package lexer
+
+import (
+ "regexp"
+
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+)
+
+var (
+ jagsAnalyserModelRe = regexp.MustCompile(`(?m)^\s*model\s*\{`)
+ jagsAnalyserDataRe = regexp.MustCompile(`(?m)^\s*data\s*\{`)
+ jagsAnalyserVarRe = regexp.MustCompile(`(?m)^\s*var`)
+)
+
+// JAGS lexer.
+type JAGS struct{}
+
+// Lexer returns the lexer.
+func (l JAGS) Lexer() chroma.Lexer {
+ lexer := chroma.MustNewLexer(
+ &chroma.Config{
+ Name: l.Name(),
+ Aliases: []string{"jags"},
+ Filenames: []string{"*.jag", "*.bug"},
+ MimeTypes: []string{},
+ },
+ func() chroma.Rules {
+ return chroma.Rules{
+ "root": {},
+ }
+ },
+ )
+
+ lexer.SetAnalyser(func(text string) float32 {
+ if jagsAnalyserModelRe.MatchString(text) {
+ if jagsAnalyserDataRe.MatchString(text) {
+ return 0.9
+ }
+
+ if jagsAnalyserVarRe.MatchString(text) {
+ return 0.9
+ }
+
+ return 0.3
+ }
+
+ return 0
+ })
+
+ return lexer
+}
+
+// Name returns the name of the lexer.
+func (JAGS) Name() string {
+ return heartbeat.LanguageJAGS.StringChroma()
+}
diff --git a/pkg/lexer/jags_test.go b/pkg/lexer/jags_test.go
new file mode 100644
index 00000000..6a0ee086
--- /dev/null
+++ b/pkg/lexer/jags_test.go
@@ -0,0 +1,41 @@
+package lexer_test
+
+import (
+ "os"
+ "testing"
+
+ "github.com/wakatime/wakatime-cli/pkg/lexer"
+
+ "github.com/stretchr/testify/assert"
+)
+
+func TestJAGS_AnalyseText(t *testing.T) {
+ tests := map[string]struct {
+ Filepath string
+ Expected float32
+ }{
+ "model only": {
+ Filepath: "testdata/jags_model.jag",
+ Expected: 0.3,
+ },
+ "model and data": {
+ Filepath: "testdata/jags_data.jag",
+ Expected: 0.9,
+ },
+ "model and var": {
+ Filepath: "testdata/jags_var.jag",
+ Expected: 0.9,
+ },
+ }
+
+ for name, test := range tests {
+ t.Run(name, func(t *testing.T) {
+ data, err := os.ReadFile(test.Filepath)
+ assert.NoError(t, err)
+
+ l := lexer.JAGS{}.Lexer()
+
+ assert.Equal(t, test.Expected, l.AnalyseText(string(data)))
+ })
+ }
+}
diff --git a/pkg/lexer/jasmin.go b/pkg/lexer/jasmin.go
new file mode 100644
index 00000000..eb438076
--- /dev/null
+++ b/pkg/lexer/jasmin.go
@@ -0,0 +1,61 @@
+package lexer
+
+import (
+ "math"
+ "regexp"
+
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+)
+
+var (
+ jasminAnalyserClassRe = regexp.MustCompile(`(?m)^\s*\.class\s`)
+ jasminAnalyserInstructionRe = regexp.MustCompile(`(?m)^\s*[a-z]+_[a-z]+\b`)
+ jasminAnalyserKeywordsRe = regexp.MustCompile(
+ `(?m)^\s*\.(attribute|bytecode|debug|deprecated|enclosing|inner|interface|limit|set|signature|stack)\b`)
+)
+
+// Jasmin lexer.
+type Jasmin struct{}
+
+// Lexer returns the lexer.
+func (l Jasmin) Lexer() chroma.Lexer {
+ lexer := chroma.MustNewLexer(
+ &chroma.Config{
+ Name: l.Name(),
+ Aliases: []string{"jasmin", "jasminxt"},
+ Filenames: []string{"*.j"},
+ },
+ func() chroma.Rules {
+ return chroma.Rules{
+ "root": {},
+ }
+ },
+ )
+
+ lexer.SetAnalyser(func(text string) float32 {
+ var result float64
+
+ if jasminAnalyserClassRe.MatchString(text) {
+ result += 0.5
+
+ if jasminAnalyserInstructionRe.MatchString(text) {
+ result += 0.3
+ }
+ }
+
+ if jasminAnalyserKeywordsRe.MatchString(text) {
+ result += 0.6
+ }
+
+ return float32(math.Min(result, float64(1.0)))
+ })
+
+ return lexer
+}
+
+// Name returns the name of the lexer.
+func (Jasmin) Name() string {
+ return heartbeat.LanguageJasmin.StringChroma()
+}
diff --git a/pkg/lexer/jasmin_test.go b/pkg/lexer/jasmin_test.go
new file mode 100644
index 00000000..61fb6914
--- /dev/null
+++ b/pkg/lexer/jasmin_test.go
@@ -0,0 +1,41 @@
+package lexer_test
+
+import (
+ "os"
+ "testing"
+
+ "github.com/wakatime/wakatime-cli/pkg/lexer"
+
+ "github.com/stretchr/testify/assert"
+)
+
+func TestJasmin_AnalyseText(t *testing.T) {
+ tests := map[string]struct {
+ Filepath string
+ Expected float32
+ }{
+ "class": {
+ Filepath: "testdata/jasmin_class.j",
+ Expected: 0.5,
+ },
+ "instruction": {
+ Filepath: "testdata/jasmin_instruction.j",
+ Expected: 0.8,
+ },
+ "keyword": {
+ Filepath: "testdata/jasmin_keyword.j",
+ Expected: 0.6,
+ },
+ }
+
+ for name, test := range tests {
+ t.Run(name, func(t *testing.T) {
+ data, err := os.ReadFile(test.Filepath)
+ assert.NoError(t, err)
+
+ l := lexer.Jasmin{}.Lexer()
+
+ assert.Equal(t, test.Expected, l.AnalyseText(string(data)))
+ })
+ }
+}
diff --git a/pkg/lexer/jcl.go b/pkg/lexer/jcl.go
new file mode 100644
index 00000000..6212fc5d
--- /dev/null
+++ b/pkg/lexer/jcl.go
@@ -0,0 +1,53 @@
+package lexer
+
+import (
+ "regexp"
+ "strings"
+
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+)
+
+var jclAnalyserJobHeaderRe = regexp.MustCompile(`(?i)^//[a-z#$@][a-z0-9#$@]{0,7}\s+job(\s+.*)?$`)
+
+// JCL lexer.
+type JCL struct{}
+
+// Lexer returns the lexer.
+func (l JCL) Lexer() chroma.Lexer {
+ lexer := chroma.MustNewLexer(
+ &chroma.Config{
+ Name: l.Name(),
+ Aliases: []string{"jcl"},
+ Filenames: []string{"*.jcl"},
+ MimeTypes: []string{"text/x-jcl"},
+ },
+ func() chroma.Rules {
+ return chroma.Rules{
+ "root": {},
+ }
+ },
+ )
+
+ lexer.SetAnalyser(func(text string) float32 {
+ // Recognize JCL job by header.
+ lines := strings.Split(text, "\n")
+ if len(lines) == 0 {
+ return 0
+ }
+
+ if jclAnalyserJobHeaderRe.MatchString(lines[0]) {
+ return 1.0
+ }
+
+ return 0
+ })
+
+ return lexer
+}
+
+// Name returns the name of the lexer.
+func (JCL) Name() string {
+ return heartbeat.LanguageJCL.StringChroma()
+}
diff --git a/pkg/lexer/jcl_test.go b/pkg/lexer/jcl_test.go
new file mode 100644
index 00000000..273c1575
--- /dev/null
+++ b/pkg/lexer/jcl_test.go
@@ -0,0 +1,19 @@
+package lexer_test
+
+import (
+ "os"
+ "testing"
+
+ "github.com/wakatime/wakatime-cli/pkg/lexer"
+
+ "github.com/stretchr/testify/assert"
+)
+
+func TestJCL_AnalyseText(t *testing.T) {
+ data, err := os.ReadFile("testdata/jcl_job_header.jcl")
+ assert.NoError(t, err)
+
+ l := lexer.JCL{}.Lexer()
+
+ assert.Equal(t, float32(1.0), l.AnalyseText(string(data)))
+}
diff --git a/pkg/lexer/jsgf.go b/pkg/lexer/jsgf.go
new file mode 100644
index 00000000..a1debdcb
--- /dev/null
+++ b/pkg/lexer/jsgf.go
@@ -0,0 +1,32 @@
+package lexer
+
+import (
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+)
+
+// JSGF lexer.
+type JSGF struct{}
+
+// Lexer returns the lexer.
+func (l JSGF) Lexer() chroma.Lexer {
+ return chroma.MustNewLexer(
+ &chroma.Config{
+ Name: l.Name(),
+ Aliases: []string{"jsgf"},
+ Filenames: []string{"*.jsgf"},
+ MimeTypes: []string{"application/jsgf", "application/x-jsgf", "text/jsgf"},
+ },
+ func() chroma.Rules {
+ return chroma.Rules{
+ "root": {},
+ }
+ },
+ )
+}
+
+// Name returns the name of the lexer.
+func (JSGF) Name() string {
+ return heartbeat.LanguageJSGF.StringChroma()
+}
diff --git a/pkg/lexer/jsonld.go b/pkg/lexer/jsonld.go
new file mode 100644
index 00000000..157c2730
--- /dev/null
+++ b/pkg/lexer/jsonld.go
@@ -0,0 +1,32 @@
+package lexer
+
+import (
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+)
+
+// JSONLD lexer.
+type JSONLD struct{}
+
+// Lexer returns the lexer.
+func (l JSONLD) Lexer() chroma.Lexer {
+ return chroma.MustNewLexer(
+ &chroma.Config{
+ Name: l.Name(),
+ Aliases: []string{"jsonld", "json-ld"},
+ Filenames: []string{"*.jsonld"},
+ MimeTypes: []string{"application/ld+json"},
+ },
+ func() chroma.Rules {
+ return chroma.Rules{
+ "root": {},
+ }
+ },
+ )
+}
+
+// Name returns the name of the lexer.
+func (JSONLD) Name() string {
+ return heartbeat.LanguageJSONLD.StringChroma()
+}
diff --git a/pkg/lexer/jsp.go b/pkg/lexer/jsp.go
new file mode 100644
index 00000000..cd03dca6
--- /dev/null
+++ b/pkg/lexer/jsp.go
@@ -0,0 +1,57 @@
+package lexer
+
+import (
+ "strings"
+
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+ "github.com/wakatime/wakatime-cli/pkg/xml"
+
+ "github.com/alecthomas/chroma/v2"
+ "github.com/alecthomas/chroma/v2/lexers"
+)
+
+// JSP lexer.
+type JSP struct{}
+
+// Lexer returns the lexer.
+func (l JSP) Lexer() chroma.Lexer {
+ lexer := chroma.MustNewLexer(
+ &chroma.Config{
+ Name: l.Name(),
+ Aliases: []string{"jsp"},
+ Filenames: []string{"*.jsp"},
+ MimeTypes: []string{"application/x-jsp"},
+ },
+ func() chroma.Rules {
+ return chroma.Rules{
+ "root": {},
+ }
+ },
+ )
+
+ lexer.SetAnalyser(func(text string) float32 {
+ var result float32
+
+ java := lexers.Get(heartbeat.LanguageJava.StringChroma())
+ if java != nil {
+ result = java.AnalyseText(text) - 0.01
+ }
+
+ if xml.MatchString(text) {
+ result += 0.4
+ }
+
+ if strings.Contains(text, "<%") && strings.Contains(text, "%>") {
+ result += 0.1
+ }
+
+ return result
+ })
+
+ return lexer
+}
+
+// Name returns the name of the lexer.
+func (JSP) Name() string {
+ return heartbeat.LanguageJSP.StringChroma()
+}
diff --git a/pkg/lexer/jsp_test.go b/pkg/lexer/jsp_test.go
new file mode 100644
index 00000000..1f994dfa
--- /dev/null
+++ b/pkg/lexer/jsp_test.go
@@ -0,0 +1,19 @@
+package lexer_test
+
+import (
+ "os"
+ "testing"
+
+ "github.com/wakatime/wakatime-cli/pkg/lexer"
+
+ "github.com/stretchr/testify/assert"
+)
+
+func TestJSP_AnalyseText(t *testing.T) {
+ data, err := os.ReadFile("testdata/jsp_basic.jsp")
+ assert.NoError(t, err)
+
+ l := lexer.JSP{}.Lexer()
+
+ assert.Equal(t, float32(0.49), l.AnalyseText(string(data)))
+}
diff --git a/pkg/lexer/juliaconsole.go b/pkg/lexer/juliaconsole.go
new file mode 100644
index 00000000..1e9bfae1
--- /dev/null
+++ b/pkg/lexer/juliaconsole.go
@@ -0,0 +1,30 @@
+package lexer
+
+import (
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+)
+
+// JuliaConsole lexer.
+type JuliaConsole struct{}
+
+// Lexer returns the lexer.
+func (l JuliaConsole) Lexer() chroma.Lexer {
+ return chroma.MustNewLexer(
+ &chroma.Config{
+ Name: l.Name(),
+ Aliases: []string{"jlcon"},
+ },
+ func() chroma.Rules {
+ return chroma.Rules{
+ "root": {},
+ }
+ },
+ )
+}
+
+// Name returns the name of the lexer.
+func (JuliaConsole) Name() string {
+ return heartbeat.LanguageJuliaConsole.StringChroma()
+}
diff --git a/pkg/lexer/juttle.go b/pkg/lexer/juttle.go
new file mode 100644
index 00000000..b3df0f8f
--- /dev/null
+++ b/pkg/lexer/juttle.go
@@ -0,0 +1,32 @@
+package lexer
+
+import (
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+)
+
+// Juttle lexer.
+type Juttle struct{}
+
+// Lexer returns the lexer.
+func (l Juttle) Lexer() chroma.Lexer {
+ return chroma.MustNewLexer(
+ &chroma.Config{
+ Name: l.Name(),
+ Aliases: []string{"juttle"},
+ Filenames: []string{"*.juttle"},
+ MimeTypes: []string{"application/juttle", "application/x-juttle", "text/x-juttle", "text/juttle"},
+ },
+ func() chroma.Rules {
+ return chroma.Rules{
+ "root": {},
+ }
+ },
+ )
+}
+
+// Name returns the name of the lexer.
+func (Juttle) Name() string {
+ return heartbeat.LanguageJuttle.StringChroma()
+}
diff --git a/pkg/lexer/kal.go b/pkg/lexer/kal.go
new file mode 100644
index 00000000..8a303598
--- /dev/null
+++ b/pkg/lexer/kal.go
@@ -0,0 +1,32 @@
+package lexer
+
+import (
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+)
+
+// Kal lexer.
+type Kal struct{}
+
+// Lexer returns the lexer.
+func (l Kal) Lexer() chroma.Lexer {
+ return chroma.MustNewLexer(
+ &chroma.Config{
+ Name: l.Name(),
+ Aliases: []string{"kal"},
+ Filenames: []string{"*.kal"},
+ MimeTypes: []string{"text/kal", "application/kal"},
+ },
+ func() chroma.Rules {
+ return chroma.Rules{
+ "root": {},
+ }
+ },
+ )
+}
+
+// Name returns the name of the lexer.
+func (Kal) Name() string {
+ return heartbeat.LanguageKal.StringChroma()
+}
diff --git a/pkg/lexer/kconfig.go b/pkg/lexer/kconfig.go
new file mode 100644
index 00000000..48870158
--- /dev/null
+++ b/pkg/lexer/kconfig.go
@@ -0,0 +1,32 @@
+package lexer
+
+import (
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+)
+
+// Kconfig lexer.
+type Kconfig struct{}
+
+// Lexer returns the lexer.
+func (l Kconfig) Lexer() chroma.Lexer {
+ return chroma.MustNewLexer(
+ &chroma.Config{
+ Name: l.Name(),
+ Aliases: []string{"kconfig", "menuconfig", "linux-config", "kernel-config"},
+ Filenames: []string{"Kconfig*", "*Config.in*", "external.in*", "standard-modules.in"},
+ MimeTypes: []string{"text/x-kconfig"},
+ },
+ func() chroma.Rules {
+ return chroma.Rules{
+ "root": {},
+ }
+ },
+ )
+}
+
+// Name returns the name of the lexer.
+func (Kconfig) Name() string {
+ return heartbeat.LanguageKconfig.StringChroma()
+}
diff --git a/pkg/lexer/kernellog.go b/pkg/lexer/kernellog.go
new file mode 100644
index 00000000..97898066
--- /dev/null
+++ b/pkg/lexer/kernellog.go
@@ -0,0 +1,31 @@
+package lexer
+
+import (
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+)
+
+// KernelLog lexer.
+type KernelLog struct{}
+
+// Lexer returns the lexer.
+func (l KernelLog) Lexer() chroma.Lexer {
+ return chroma.MustNewLexer(
+ &chroma.Config{
+ Name: l.Name(),
+ Aliases: []string{"kmsg", "dmesg"},
+ Filenames: []string{"*.kmsg", "*.dmesg"},
+ },
+ func() chroma.Rules {
+ return chroma.Rules{
+ "root": {},
+ }
+ },
+ )
+}
+
+// Name returns the name of the lexer.
+func (KernelLog) Name() string {
+ return heartbeat.LanguageKernelLog.StringChroma()
+}
diff --git a/pkg/lexer/koka.go b/pkg/lexer/koka.go
new file mode 100644
index 00000000..bb575932
--- /dev/null
+++ b/pkg/lexer/koka.go
@@ -0,0 +1,32 @@
+package lexer
+
+import (
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+)
+
+// Koka lexer.
+type Koka struct{}
+
+// Lexer returns the lexer.
+func (l Koka) Lexer() chroma.Lexer {
+ return chroma.MustNewLexer(
+ &chroma.Config{
+ Name: l.Name(),
+ Aliases: []string{"koka"},
+ Filenames: []string{"*.kk", "*.kki"},
+ MimeTypes: []string{"text/x-koka"},
+ },
+ func() chroma.Rules {
+ return chroma.Rules{
+ "root": {},
+ }
+ },
+ )
+}
+
+// Name returns the name of the lexer.
+func (Koka) Name() string {
+ return heartbeat.LanguageKoka.StringChroma()
+}
diff --git a/pkg/lexer/lasso.go b/pkg/lexer/lasso.go
new file mode 100644
index 00000000..f7f87396
--- /dev/null
+++ b/pkg/lexer/lasso.go
@@ -0,0 +1,71 @@
+package lexer
+
+import (
+ "regexp"
+ "strings"
+
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+)
+
+var (
+ lassoAnalyserDelimiterRe = regexp.MustCompile(`(?i)<\?lasso`)
+ lassoAnalyserLocalRe = regexp.MustCompile(`(?i)local\(`)
+)
+
+// Lasso lexer.
+type Lasso struct{}
+
+// Lexer returns the lexer.
+func (l Lasso) Lexer() chroma.Lexer {
+ lexer := chroma.MustNewLexer(
+ &chroma.Config{
+ Name: l.Name(),
+ Aliases: []string{
+ "lasso",
+ "lassoscript",
+ },
+ Filenames: []string{
+ "*.lasso",
+ "*.lasso[89]",
+ },
+ AliasFilenames: []string{
+ "*.incl",
+ "*.inc",
+ "*.las",
+ },
+ MimeTypes: []string{"text/x-lasso"},
+ },
+ func() chroma.Rules {
+ return chroma.Rules{
+ "root": {},
+ }
+ },
+ )
+
+ lexer.SetAnalyser(func(text string) float32 {
+ var result float32
+
+ if strings.Contains(text, "bin/lasso9") {
+ result += 0.8
+ }
+
+ if lassoAnalyserDelimiterRe.MatchString(text) {
+ result += 0.4
+ }
+
+ if lassoAnalyserLocalRe.MatchString(text) {
+ result += 0.4
+ }
+
+ return result
+ })
+
+ return lexer
+}
+
+// Name returns the name of the lexer.
+func (Lasso) Name() string {
+ return heartbeat.LanguageLasso.StringChroma()
+}
diff --git a/pkg/lexer/lasso_test.go b/pkg/lexer/lasso_test.go
new file mode 100644
index 00000000..0a45ae7c
--- /dev/null
+++ b/pkg/lexer/lasso_test.go
@@ -0,0 +1,41 @@
+package lexer_test
+
+import (
+ "os"
+ "testing"
+
+ "github.com/wakatime/wakatime-cli/pkg/lexer"
+
+ "github.com/stretchr/testify/assert"
+)
+
+func TestLasso_AnalyseText(t *testing.T) {
+ tests := map[string]struct {
+ Filepath string
+ Expected float32
+ }{
+ "shebang": {
+ Filepath: "testdata/lasso_shebang.lasso",
+ Expected: 0.8,
+ },
+ "delimiter": {
+ Filepath: "testdata/lasso_delimiter.lasso",
+ Expected: 0.4,
+ },
+ "local": {
+ Filepath: "testdata/lasso_local.lasso",
+ Expected: 0.4,
+ },
+ }
+
+ for name, test := range tests {
+ t.Run(name, func(t *testing.T) {
+ data, err := os.ReadFile(test.Filepath)
+ assert.NoError(t, err)
+
+ l := lexer.Lasso{}.Lexer()
+
+ assert.Equal(t, test.Expected, l.AnalyseText(string(data)))
+ })
+ }
+}
diff --git a/pkg/lexer/lean.go b/pkg/lexer/lean.go
new file mode 100644
index 00000000..07438fde
--- /dev/null
+++ b/pkg/lexer/lean.go
@@ -0,0 +1,32 @@
+package lexer
+
+import (
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+)
+
+// Lean lexer.
+type Lean struct{}
+
+// Lexer returns the lexer.
+func (l Lean) Lexer() chroma.Lexer {
+ return chroma.MustNewLexer(
+ &chroma.Config{
+ Name: l.Name(),
+ Aliases: []string{"lean"},
+ Filenames: []string{"*.lean"},
+ MimeTypes: []string{"text/x-lean"},
+ },
+ func() chroma.Rules {
+ return chroma.Rules{
+ "root": {},
+ }
+ },
+ )
+}
+
+// Name returns the name of the lexer.
+func (Lean) Name() string {
+ return heartbeat.LanguageLean.StringChroma()
+}
diff --git a/pkg/lexer/less.go b/pkg/lexer/less.go
new file mode 100644
index 00000000..3dead9c6
--- /dev/null
+++ b/pkg/lexer/less.go
@@ -0,0 +1,32 @@
+package lexer
+
+import (
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+)
+
+// Less lexer.
+type Less struct{}
+
+// Lexer returns the lexer.
+func (l Less) Lexer() chroma.Lexer {
+ return chroma.MustNewLexer(
+ &chroma.Config{
+ Name: l.Name(),
+ Aliases: []string{"less"},
+ Filenames: []string{"*.less"},
+ MimeTypes: []string{"text/x-less-css"},
+ },
+ func() chroma.Rules {
+ return chroma.Rules{
+ "root": {},
+ }
+ },
+ )
+}
+
+// Name returns the name of the lexer.
+func (Less) Name() string {
+ return heartbeat.LanguageLess.StringChroma()
+}
diff --git a/pkg/lexer/lexer.go b/pkg/lexer/lexer.go
new file mode 100644
index 00000000..ddfbf5e4
--- /dev/null
+++ b/pkg/lexer/lexer.go
@@ -0,0 +1,315 @@
+package lexer
+
+import (
+ "fmt"
+
+ "github.com/alecthomas/chroma/v2"
+ l "github.com/alecthomas/chroma/v2/lexers"
+)
+
+// Lexer is an interface that can be implemented by lexers to register them.
+type Lexer interface {
+ Lexer() chroma.Lexer
+ Name() string
+}
+
+// RegisterAll registers all custom lexers.
+func RegisterAll() error {
+ var lexers = []Lexer{
+ ADL{},
+ AMPL{},
+ ActionScript3{},
+ Agda{},
+ Aheui{},
+ Alloy{},
+ AmbientTalk{},
+ Arrow{},
+ AspectJ{},
+ AspxCSharp{},
+ AspxVBNet{},
+ Asymptote{},
+ Augeas{},
+ BARE{},
+ BBCBasic{},
+ BBCode{},
+ BC{},
+ BST{},
+ BUGS{},
+ Befunge{},
+ Blazor{},
+ BlitzMax{},
+ Boa{},
+ Boo{},
+ Boogie{},
+ Brainfuck{},
+ CADL{},
+ CAmkES{},
+ CBMBasicV2{},
+ COBOLFree{},
+ CObjdump{},
+ CPSA{},
+ CUDA{},
+ Ca65Assembler{},
+ CapDL{},
+ Charmci{},
+ Cirru{},
+ Clay{},
+ Clean{},
+ ClojureScript{},
+ ColdfusionCFC{},
+ ColdfusionHTML{},
+ ComponentPascal{},
+ Coq{},
+ CppObjdump{},
+ Crmsh{},
+ Croc{},
+ Crontab{},
+ Cryptol{},
+ CsoundDocument{},
+ CsoundOrchestra{},
+ CsoundScore{},
+ Cypher{},
+ DASM16{},
+ DG{},
+ DObjdump{},
+ DarcsPatch{},
+ DebianControlFile{},
+ Delphi{},
+ Devicetree{},
+ Duel{},
+ DylanLID{},
+ DylanSession{},
+ EC{},
+ ECL{},
+ EMail{},
+ ERB{},
+ EarlGrey{},
+ Easytrieve{},
+ Eiffel{},
+ ElixirIexSsession{},
+ ErlangErlSession{},
+ Evoque{},
+ Execline{},
+ Ezhil{},
+ FSharp{},
+ FStar{},
+ Fancy{},
+ Fantom{},
+ Felix{},
+ Flatline{},
+ FloScript{},
+ Forth{},
+ FoxPro{},
+ Freefem{},
+ GDScript{},
+ Gap{},
+ Gas{},
+ GettextCatalog{},
+ Golo{},
+ GoodDataCL{},
+ Gosu{},
+ GosuTemplate{},
+ Groff{},
+ HSAIL{},
+ HTML{},
+ HTTP{},
+ Haml{},
+ Hspec{},
+ Hxml{},
+ Hy{},
+ Hybris{},
+ IDL{},
+ INI{},
+ IRCLogs{},
+ Icon{},
+ Inform6{},
+ Inform6Template{},
+ Inform7{},
+ Ioke{},
+ Isabelle{},
+ JAGS{},
+ JCL{},
+ JSGF{},
+ JSONLD{},
+ JSP{},
+ Jasmin{},
+ JuliaConsole{},
+ Juttle{},
+ Kal{},
+ Kconfig{},
+ KernelLog{},
+ Koka{},
+ LLVMMIR{},
+ LLVMMIRBODY{},
+ LSL{},
+ Lasso{},
+ Lean{},
+ Less{},
+ Limbo{},
+ Liquid{},
+ LiterateAgda{},
+ LiterateCryptol{},
+ LiterateHaskell{},
+ LiterateIdris{},
+ LiveScript{},
+ Logos{},
+ Logtalk{},
+ MAQL{},
+ MIME{},
+ MOOCode{},
+ MQL{},
+ MSDOSSession{},
+ MXML{},
+ Makefile{},
+ Marko{},
+ Mask{},
+ Matlab{},
+ MatlabSession{},
+ MiniD{},
+ MiniScript{},
+ Modelica{},
+ Modula2{},
+ Monkey{},
+ Monte{},
+ MoonScript{},
+ Mosel{},
+ MozPreprocHash{},
+ MozPreprocPercent{},
+ Mscgen{},
+ MuPAD{},
+ Mustache{},
+ NASM{},
+ NASMObjdump{},
+ NCL{},
+ NSIS{},
+ Nemerle{},
+ NesC{},
+ NewLisp{},
+ Nit{},
+ Notmuch{},
+ NuSMV{},
+ NumPy{},
+ Objdump{},
+ ObjectiveC{},
+ ObjectiveCPP{},
+ ObjectiveJ{},
+ Ooc{},
+ Opa{},
+ OpenEdgeABL{},
+ PEG{},
+ POVRay{},
+ Pan{},
+ ParaSail{},
+ Pawn{},
+ Perl{},
+ Perl6{},
+ Pike{},
+ Pointless{},
+ PostgresConsole{},
+ PowerShellSession{},
+ Praat{},
+ Processing{},
+ Prolog{},
+ PsyShPHP{},
+ Pug{},
+ PyPyLog{},
+ Python{},
+ Python2{},
+ Python2Traceback{},
+ PythonConsole{},
+ PythonTraceback{},
+ QBasic{},
+ QVTO{},
+ R{},
+ RConsole{},
+ REBOL{},
+ RHTML{},
+ RNGCompact{},
+ RPMSpec{},
+ RQL{},
+ RSL{},
+ RagelEmbedded{},
+ RawToken{},
+ Razor{},
+ Rd{},
+ ReScript{},
+ Red{},
+ Redcode{},
+ ResourceBundle{},
+ Ride{},
+ RoboconfGraph{},
+ RoboconfInstances{},
+ RobotFramework{},
+ RubyIRBSession{},
+ SARL{},
+ SSP{},
+ SWIG{},
+ Scaml{},
+ Scdoc{},
+ ShExC{},
+ Shen{},
+ Silver{},
+ Singularity{},
+ SketchDrawing{},
+ Slash{},
+ Slim{},
+ Slint{},
+ Slurm{},
+ Smali{},
+ SmartGameFormat{},
+ Snowball{},
+ SourcesList{},
+ Sqlite3con{},
+ Stan{},
+ Stata{},
+ SublimeTextConfig{},
+ SuperCollider{},
+ TADS3{},
+ TAP{},
+ TASM{},
+ TNT{},
+ TcshSession{},
+ Tea{},
+ TeraTerm{},
+ Tiddler{},
+ Todotxt{},
+ TrafficScript{},
+ TransactSQL{},
+ Treetop{},
+ Turtle{},
+ USD{},
+ Ucode{},
+ Unicon{},
+ UrbiScript{},
+ VBNet{},
+ VBScript{},
+ VCL{},
+ VCLSnippets{},
+ VCTreeStatus{},
+ VGL{},
+ Velocity{},
+ Verilog{},
+ WDiff{},
+ WebIDL{},
+ X10{},
+ XAML{},
+ XML{},
+ XQuery{},
+ XSLT{},
+ Xtend{},
+ Xtlang{},
+ Zeek{},
+ Zephir{},
+ }
+
+ for _, lexer := range lexers {
+ found := lexer.Lexer()
+ if found == nil {
+ return fmt.Errorf("%q lexer not found", lexer.Name())
+ }
+
+ _ = l.Register(lexer.Lexer())
+ }
+
+ return nil
+}
diff --git a/pkg/lexer/lexer_test.go b/pkg/lexer/lexer_test.go
new file mode 100644
index 00000000..c66a2327
--- /dev/null
+++ b/pkg/lexer/lexer_test.go
@@ -0,0 +1,313 @@
+package lexer_test
+
+import (
+ "testing"
+
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2/lexers"
+ "github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/require"
+)
+
+func TestChromaLanguagesRegister(t *testing.T) {
+ tests := map[string]heartbeat.Language{
+ "actionscript 3": heartbeat.LanguageActionScript3,
+ "adl": heartbeat.LanguageADL,
+ "agda": heartbeat.LanguageAgda,
+ "aheui": heartbeat.LanguageAheui,
+ "alloy": heartbeat.LanguageAlloy,
+ "ambienttalk": heartbeat.LanguageAmbientTalk,
+ "ampl": heartbeat.LanguageAMPL,
+ "arrow": heartbeat.LanguageArrow,
+ "aspectj": heartbeat.LanguageAspectJ,
+ "aspx-cs": heartbeat.LanguageAspxCSharp,
+ "aspx-vb": heartbeat.LanguageAspxVBNet,
+ "asymptote": heartbeat.LanguageAsymptote,
+ "augeas": heartbeat.LanguageAugeas,
+ "bare": heartbeat.LanguageBARE,
+ "bbc basic": heartbeat.LanguageBBCBasic,
+ "bbcode": heartbeat.LanguageBBCode,
+ "bc": heartbeat.LanguageBC,
+ "befunge": heartbeat.LanguageBefunge,
+ "blazor": heartbeat.LanguageBlazor,
+ "blitzmax": heartbeat.LanguageBlitzMax,
+ "boa": heartbeat.LanguageBoa,
+ "boo": heartbeat.LanguageBoo,
+ "boogie": heartbeat.LanguageBoogie,
+ "brainfuck": heartbeat.LanguageBrainfuck,
+ "bst": heartbeat.LanguageBST,
+ "bugs": heartbeat.LanguageBUGS,
+ "c-objdump": heartbeat.LanguageCObjdump,
+ "ca65 assembler": heartbeat.LanguageCa65Assembler,
+ "cadl": heartbeat.LanguageCADL,
+ "camkes": heartbeat.LanguageCAmkES,
+ "capdl": heartbeat.LanguageCapDL,
+ "cbm basic v2": heartbeat.LanguageCBMBasicV2,
+ "charmci": heartbeat.LanguageCharmci,
+ "cirru": heartbeat.LanguageCirru,
+ "clay": heartbeat.LanguageClay,
+ "clean": heartbeat.LanguageClean,
+ "clojurescript": heartbeat.LanguageClojureScript,
+ "cobolfree": heartbeat.LanguageCOBOLFree,
+ "coldfusion cfc": heartbeat.LanguageColdfusionCFC,
+ "coldfusion html": heartbeat.LanguageColdfusionHTML,
+ "component pascal": heartbeat.LanguageComponentPascal,
+ "coq": heartbeat.LanguageCoq,
+ "cpp-objdump": heartbeat.LanguageCppObjdump,
+ "cpsa": heartbeat.LanguageCPSA,
+ "crmsh": heartbeat.LanguageCrmsh,
+ "croc": heartbeat.LanguageCroc,
+ "crontab": heartbeat.LanguageCrontab,
+ "cryptol": heartbeat.LanguageCryptol,
+ "csound document": heartbeat.LanguageCsoundDocument,
+ "csound orchestra": heartbeat.LanguageCsoundOrchestra,
+ "csound score": heartbeat.LanguageCsoundScore,
+ "cuda": heartbeat.LanguageCUDA,
+ "cypher": heartbeat.LanguageCypher,
+ "d-objdump": heartbeat.LanguageDObjdump,
+ "darcs patch": heartbeat.LanguageDarcsPatch,
+ "dasm16": heartbeat.LanguageDASM16,
+ "debian control file": heartbeat.LanguageDebianControlFile,
+ "debian sourcelist": heartbeat.LanguageSourcesList,
+ "delphi": heartbeat.LanguageDelphi,
+ "devicetree": heartbeat.LanguageDevicetree,
+ "dg": heartbeat.LanguageDG,
+ "duel": heartbeat.LanguageDuel,
+ "dylan session": heartbeat.LanguageDylanSession,
+ "dylanlid": heartbeat.LanguageDylanLID,
+ "e-mail": heartbeat.LanguageEMail,
+ "earl grey": heartbeat.LanguageEarlGrey,
+ "easytrieve": heartbeat.LanguageEasytrieve,
+ "ec": heartbeat.LanguageEC,
+ "ecl": heartbeat.LanguageECL,
+ "eiffel": heartbeat.LanguageEiffel,
+ "elixir iex session": heartbeat.LanguageElixirIexSession,
+ "embedded ragel": heartbeat.LanguageRagelEmbedded,
+ "erb": heartbeat.LanguageERB,
+ "erlang erl session": heartbeat.LanguageErlangErlSession,
+ "evoque": heartbeat.LanguageEvoque,
+ "execline": heartbeat.LanguageExecline,
+ "ezhil": heartbeat.LanguageEzhil,
+ "fancy": heartbeat.LanguageFancy,
+ "fantom": heartbeat.LanguageFantom,
+ "felix": heartbeat.LanguageFelix,
+ "flatline": heartbeat.LanguageFlatline,
+ "floscript": heartbeat.LanguageFloScript,
+ "forth": heartbeat.LanguageForth,
+ "foxpro": heartbeat.LanguageFoxPro,
+ "freefem": heartbeat.LanguageFreefem,
+ "fsharp": heartbeat.LanguageFSharp,
+ "fstar": heartbeat.LanguageFStar,
+ "gap": heartbeat.LanguageGap,
+ "gas": heartbeat.LanguageGas,
+ "gdscript": heartbeat.LanguageGDScript,
+ "gettext catalog": heartbeat.LanguageGettextCatalog,
+ "golo": heartbeat.LanguageGolo,
+ "gooddata-cl": heartbeat.LanguageGoodDataCL,
+ "gosu": heartbeat.LanguageGosu,
+ "gosu template": heartbeat.LanguageGosuTemplate,
+ "groff": heartbeat.LanguageGroff,
+ "haml": heartbeat.LanguageHaml,
+ "hsail": heartbeat.LanguageHSAIL,
+ "hspec": heartbeat.LanguageHspec,
+ "html": heartbeat.LanguageHTML,
+ "http": heartbeat.LanguageHTTP,
+ "hxml": heartbeat.LanguageHxml,
+ "hy": heartbeat.LanguageHy,
+ "hybris": heartbeat.LanguageHybris,
+ "icon": heartbeat.LanguageIcon,
+ "idl": heartbeat.LanguageIDL,
+ "inform 6": heartbeat.LanguageInform6,
+ "inform 6 template": heartbeat.LanguageInform6Template,
+ "inform 7": heartbeat.LanguageInform7,
+ "ini": heartbeat.LanguageINI,
+ "ioke": heartbeat.LanguageIoke,
+ "irc logs": heartbeat.LanguageIRCLogs,
+ "isabelle": heartbeat.LanguageIsabelle,
+ "jags": heartbeat.LanguageJAGS,
+ "jasmin": heartbeat.LanguageJasmin,
+ "java server page": heartbeat.LanguageJSP,
+ "jcl": heartbeat.LanguageJCL,
+ "jsgf": heartbeat.LanguageJSGF,
+ "json-ld": heartbeat.LanguageJSONLD,
+ "julia console": heartbeat.LanguageJuliaConsole,
+ "juttle": heartbeat.LanguageJuttle,
+ "kal": heartbeat.LanguageKal,
+ "kconfig": heartbeat.LanguageKconfig,
+ "kernel log": heartbeat.LanguageKernelLog,
+ "koka": heartbeat.LanguageKoka,
+ "lasso": heartbeat.LanguageLasso,
+ "lean": heartbeat.LanguageLean,
+ "lesscss": heartbeat.LanguageLess,
+ "limbo": heartbeat.LanguageLimbo,
+ "liquid": heartbeat.LanguageLiquid,
+ "literate agda": heartbeat.LanguageLiterateAgda,
+ "literate cryptol": heartbeat.LanguageLiterateCryptol,
+ "literate haskell": heartbeat.LanguageLiterateHaskell,
+ "literate idris": heartbeat.LanguageLiterateIdris,
+ "livescript": heartbeat.LanguageLiveScript,
+ "llvm-mir": heartbeat.LanguageLLVMMIR,
+ "llvm-mir body": heartbeat.LanguageLLVMMIRBody,
+ "logos": heartbeat.LanguageLogos,
+ "logtalk": heartbeat.LanguageLogtalk,
+ "lsl": heartbeat.LanguageLSL,
+ "makefile": heartbeat.LanguageMakefile,
+ "maql": heartbeat.LanguageMAQL,
+ "marko": heartbeat.LanguageMarko,
+ "mask": heartbeat.LanguageMask,
+ "matlab": heartbeat.LanguageMatlab,
+ "matlab session": heartbeat.LanguageMatlabSession,
+ "mime": heartbeat.LanguageMIME,
+ "minid": heartbeat.LanguageMiniD,
+ "miniscript": heartbeat.LanguageMiniScript,
+ "modelica": heartbeat.LanguageModelica,
+ "modula-2": heartbeat.LanguageModula2,
+ "monkey": heartbeat.LanguageMonkey,
+ "monte": heartbeat.LanguageMonte,
+ "moocode": heartbeat.LanguageMOOCode,
+ "moonscript": heartbeat.LanguageMoonScript,
+ "mosel": heartbeat.LanguageMosel,
+ "mozhashpreproc": heartbeat.LanguageMozPreprocHash,
+ "mozpercentpreproc": heartbeat.LanguageMozPreprocPercent,
+ "mql": heartbeat.LanguageMQL,
+ "mscgen": heartbeat.LanguageMscgen,
+ "msdos session": heartbeat.LanguageMSDOSSession,
+ "mupad": heartbeat.LanguageMuPAD,
+ "mustache": heartbeat.LanguageMustache,
+ "mxml": heartbeat.LanguageMXML,
+ "nasm": heartbeat.LanguageNASM,
+ "ncl": heartbeat.LanguageNCL,
+ "nemerle": heartbeat.LanguageNemerle,
+ "nesc": heartbeat.LanguageNesC,
+ "newlisp": heartbeat.LanguageNewLisp,
+ "nit": heartbeat.LanguageNit,
+ "notmuch": heartbeat.LanguageNotmuch,
+ "nsis": heartbeat.LanguageNSIS,
+ "numpy": heartbeat.LanguageNumPy,
+ "nusmv": heartbeat.LanguageNuSMV,
+ "objdump": heartbeat.LanguageObjdump,
+ "objdump-nasm": heartbeat.LanguageNASMObjdump,
+ "objective-c": heartbeat.LanguageObjectiveC,
+ "objective-c++": heartbeat.LanguageObjectiveCPP,
+ "objective-j": heartbeat.LanguageObjectiveJ,
+ "ooc": heartbeat.LanguageOoc,
+ "opa": heartbeat.LanguageOpa,
+ "openedge abl": heartbeat.LanguageOpenEdgeABL,
+ "pan": heartbeat.LanguagePan,
+ "parasail": heartbeat.LanguageParaSail,
+ "pawn": heartbeat.LanguagePawn,
+ "peg": heartbeat.LanguagePEG,
+ "perl": heartbeat.LanguagePerl,
+ "perl6": heartbeat.LanguagePerl6,
+ "pike": heartbeat.LanguagePike,
+ "pointless": heartbeat.LanguagePointless,
+ "postgresql console (psql)": heartbeat.LanguagePostgresConsole,
+ "povray": heartbeat.LanguagePOVRay,
+ "powershell session": heartbeat.LanguagePowerShellSession,
+ "praat": heartbeat.LanguagePraat,
+ "processing": heartbeat.LanguageProcessing,
+ "prolog": heartbeat.LanguageProlog,
+ "psysh console session for php": heartbeat.LanguagePsyShPHP,
+ "pug": heartbeat.LanguagePug,
+ "pypy log": heartbeat.LanguagePyPyLog,
+ "python": heartbeat.LanguagePython,
+ "python 2": heartbeat.LanguagePython2,
+ "python 2.x traceback": heartbeat.LanguagePython2Traceback,
+ "python console session": heartbeat.LanguagePythonConsole,
+ "python traceback": heartbeat.LanguagePythonTraceback,
+ "qbasic": heartbeat.LanguageQBasic,
+ "qvto": heartbeat.LanguageQVTO,
+ "r": heartbeat.LanguageR,
+ "raw token data": heartbeat.LanguageRawToken,
+ "razor": heartbeat.LanguageRazor,
+ "rconsole": heartbeat.LanguageRConsole,
+ "rd": heartbeat.LanguageRd,
+ "rebol": heartbeat.LanguageREBOL,
+ "red": heartbeat.LanguageRed,
+ "redcode": heartbeat.LanguageRedcode,
+ "relax-ng compact": heartbeat.LanguageRNGCompact,
+ "rescript": heartbeat.LanguageReScript,
+ "resourcebundle": heartbeat.LanguageResourceBundle,
+ "rhtml": heartbeat.LanguageRHTML,
+ "ride": heartbeat.LanguageRide,
+ "roboconf graph": heartbeat.LanguageRoboconfGraph,
+ "roboconf instances": heartbeat.LanguageRoboconfInstances,
+ "robotframework": heartbeat.LanguageRobotFramework,
+ "rpmspec": heartbeat.LanguageRPMSpec,
+ "rql": heartbeat.LanguageRQL,
+ "rsl": heartbeat.LanguageRSL,
+ "ruby irb session": heartbeat.LanguageRubyIRBSession,
+ "sarl": heartbeat.LanguageSARL,
+ "scalate server page": heartbeat.LanguageSSP,
+ "scaml": heartbeat.LanguageScaml,
+ "scdoc": heartbeat.LanguageScdoc,
+ "shen": heartbeat.LanguageShen,
+ "shexc": heartbeat.LanguageShExC,
+ "silver": heartbeat.LanguageSilver,
+ "singularity": heartbeat.LanguageSingularity,
+ "sketch drawing": heartbeat.LanguageSketchDrawing,
+ "slash": heartbeat.LanguageSlash,
+ "slim": heartbeat.LanguageSlim,
+ "slint": heartbeat.LanguageSlint,
+ "slurm": heartbeat.LanguageSlurm,
+ "smali": heartbeat.LanguageSmali,
+ "smartgameformat": heartbeat.LanguageSmartGameFormat,
+ "snowball": heartbeat.LanguageSnowball,
+ "sqlite3con": heartbeat.LanguageSqlite3con,
+ "stan": heartbeat.LanguageStan,
+ "stata": heartbeat.LanguageStata,
+ "sublime text config": heartbeat.LanguageSublimeTextConfig,
+ "supercollider": heartbeat.LanguageSuperCollider,
+ "swig": heartbeat.LanguageSWIG,
+ "tads 3": heartbeat.LanguageTADS3,
+ "tap": heartbeat.LanguageTAP,
+ "tasm": heartbeat.LanguageTASM,
+ "tcsh session": heartbeat.LanguageTcshSession,
+ "tea": heartbeat.LanguageTea,
+ "tera term macro": heartbeat.LanguageTeraTerm,
+ "tiddler": heartbeat.LanguageTiddler,
+ "todotxt": heartbeat.LanguageTodotxt,
+ "trafficscript": heartbeat.LanguageTrafficScript,
+ "transact-sql": heartbeat.LanguageTransactSQL,
+ "treetop": heartbeat.LanguageTreetop,
+ "turtle": heartbeat.LanguageTurtle,
+ "typographic number theory": heartbeat.LanguageTNT,
+ "ucode": heartbeat.LanguageUcode,
+ "unicon": heartbeat.LanguageUnicon,
+ "urbiscript": heartbeat.LanguageUrbiScript,
+ "usd": heartbeat.LanguageUSD,
+ "vb.net": heartbeat.LanguageVBNet,
+ "vbscript": heartbeat.LanguageVBScript,
+ "vcl": heartbeat.LanguageVCL,
+ "vclsnippets": heartbeat.LanguageVCLSnippets,
+ "vctreestatus": heartbeat.LanguageVCTreeStatus,
+ "velocity": heartbeat.LanguageVelocity,
+ "verilog": heartbeat.LanguageVerilog,
+ "vgl": heartbeat.LanguageVGL,
+ "wdiff": heartbeat.LanguageWDiff,
+ "web idl": heartbeat.LanguageWebIDL,
+ "x10": heartbeat.LanguageX10,
+ "xaml": heartbeat.LanguageXAML,
+ "xml": heartbeat.LanguageXML,
+ "xquery": heartbeat.LanguageXQuery,
+ "xslt": heartbeat.LanguageXSLT,
+ "xtend": heartbeat.LanguageXtend,
+ "xtlang": heartbeat.LanguageXtlang,
+ "zeek": heartbeat.LanguageZeek,
+ "zephir": heartbeat.LanguageZephir,
+ }
+
+ for name, test := range tests {
+ t.Run(name, func(t *testing.T) {
+ language := test.StringChroma()
+ require.NotEqual(t, heartbeat.LanguageUnknown.String(), language)
+
+ lexer := lexers.Get(language)
+ if lexer != nil {
+ // assert lexer is registered but analyser don't
+ assert.Nil(t, lexer.Config().Analyse)
+ }
+ })
+ }
+}
diff --git a/pkg/lexer/limbo.go b/pkg/lexer/limbo.go
new file mode 100644
index 00000000..9b6ca657
--- /dev/null
+++ b/pkg/lexer/limbo.go
@@ -0,0 +1,47 @@
+package lexer
+
+import (
+ "regexp"
+
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+)
+
+var limboAnalyzerRe = regexp.MustCompile(`(?m)^implement \w+;`)
+
+// Limbo lexer.
+type Limbo struct{}
+
+// Lexer returns the lexer.
+func (l Limbo) Lexer() chroma.Lexer {
+ lexer := chroma.MustNewLexer(
+ &chroma.Config{
+ Name: l.Name(),
+ Aliases: []string{"limbo"},
+ Filenames: []string{"*.b"},
+ MimeTypes: []string{"text/limbo"},
+ },
+ func() chroma.Rules {
+ return chroma.Rules{
+ "root": {},
+ }
+ },
+ )
+
+ lexer.SetAnalyser(func(text string) float32 {
+ // Any limbo module implements something
+ if limboAnalyzerRe.MatchString(text) {
+ return 0.7
+ }
+
+ return 0
+ })
+
+ return lexer
+}
+
+// Name returns the name of the lexer.
+func (Limbo) Name() string {
+ return heartbeat.LanguageLimbo.StringChroma()
+}
diff --git a/pkg/lexer/limbo_test.go b/pkg/lexer/limbo_test.go
new file mode 100644
index 00000000..7d586b9b
--- /dev/null
+++ b/pkg/lexer/limbo_test.go
@@ -0,0 +1,19 @@
+package lexer_test
+
+import (
+ "os"
+ "testing"
+
+ "github.com/wakatime/wakatime-cli/pkg/lexer"
+
+ "github.com/stretchr/testify/assert"
+)
+
+func TestLimbo_AnalyseText(t *testing.T) {
+ data, err := os.ReadFile("testdata/limbo_basic.b")
+ assert.NoError(t, err)
+
+ l := lexer.Limbo{}.Lexer()
+
+ assert.Equal(t, float32(0.7), l.AnalyseText(string(data)))
+}
diff --git a/pkg/lexer/liquid.go b/pkg/lexer/liquid.go
new file mode 100644
index 00000000..00228bb0
--- /dev/null
+++ b/pkg/lexer/liquid.go
@@ -0,0 +1,31 @@
+package lexer
+
+import (
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+)
+
+// Liquid lexer.
+type Liquid struct{}
+
+// Lexer returns the lexer.
+func (l Liquid) Lexer() chroma.Lexer {
+ return chroma.MustNewLexer(
+ &chroma.Config{
+ Name: l.Name(),
+ Aliases: []string{"liquid"},
+ Filenames: []string{"*.liquid"},
+ },
+ func() chroma.Rules {
+ return chroma.Rules{
+ "root": {},
+ }
+ },
+ )
+}
+
+// Name returns the name of the lexer.
+func (Liquid) Name() string {
+ return heartbeat.LanguageLiquid.StringChroma()
+}
diff --git a/pkg/lexer/literateagda.go b/pkg/lexer/literateagda.go
new file mode 100644
index 00000000..ac2df4dc
--- /dev/null
+++ b/pkg/lexer/literateagda.go
@@ -0,0 +1,32 @@
+package lexer
+
+import (
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+)
+
+// LiterateAgda lexer.
+type LiterateAgda struct{}
+
+// Lexer returns the lexer.
+func (l LiterateAgda) Lexer() chroma.Lexer {
+ return chroma.MustNewLexer(
+ &chroma.Config{
+ Name: l.Name(),
+ Aliases: []string{"lagda", "literate-agda"},
+ Filenames: []string{"*.lagda"},
+ MimeTypes: []string{"text/x-literate-agda"},
+ },
+ func() chroma.Rules {
+ return chroma.Rules{
+ "root": {},
+ }
+ },
+ )
+}
+
+// Name returns the name of the lexer.
+func (LiterateAgda) Name() string {
+ return heartbeat.LanguageLiterateAgda.StringChroma()
+}
diff --git a/pkg/lexer/literatecryptol.go b/pkg/lexer/literatecryptol.go
new file mode 100644
index 00000000..0843345f
--- /dev/null
+++ b/pkg/lexer/literatecryptol.go
@@ -0,0 +1,32 @@
+package lexer
+
+import (
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+)
+
+// LiterateCryptol lexer.
+type LiterateCryptol struct{}
+
+// Lexer returns the lexer.
+func (l LiterateCryptol) Lexer() chroma.Lexer {
+ return chroma.MustNewLexer(
+ &chroma.Config{
+ Name: l.Name(),
+ Aliases: []string{"lcry", "literate-cryptol", "lcryptol"},
+ Filenames: []string{"*.lcry"},
+ MimeTypes: []string{"text/x-literate-cryptol"},
+ },
+ func() chroma.Rules {
+ return chroma.Rules{
+ "root": {},
+ }
+ },
+ )
+}
+
+// Name returns the name of the lexer.
+func (LiterateCryptol) Name() string {
+ return heartbeat.LanguageLiterateCryptol.StringChroma()
+}
diff --git a/pkg/lexer/literatehaskell.go b/pkg/lexer/literatehaskell.go
new file mode 100644
index 00000000..ff86a181
--- /dev/null
+++ b/pkg/lexer/literatehaskell.go
@@ -0,0 +1,32 @@
+package lexer
+
+import (
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+)
+
+// LiterateHaskell lexer.
+type LiterateHaskell struct{}
+
+// Lexer returns the lexer.
+func (l LiterateHaskell) Lexer() chroma.Lexer {
+ return chroma.MustNewLexer(
+ &chroma.Config{
+ Name: l.Name(),
+ Aliases: []string{"lhs", "literate-haskell", "lhaskell"},
+ Filenames: []string{"*.lhs"},
+ MimeTypes: []string{"text/x-literate-haskell"},
+ },
+ func() chroma.Rules {
+ return chroma.Rules{
+ "root": {},
+ }
+ },
+ )
+}
+
+// Name returns the name of the lexer.
+func (LiterateHaskell) Name() string {
+ return heartbeat.LanguageLiterateHaskell.StringChroma()
+}
diff --git a/pkg/lexer/literateidris.go b/pkg/lexer/literateidris.go
new file mode 100644
index 00000000..58c617f2
--- /dev/null
+++ b/pkg/lexer/literateidris.go
@@ -0,0 +1,32 @@
+package lexer
+
+import (
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+)
+
+// LiterateIdris lexer.
+type LiterateIdris struct{}
+
+// Lexer returns the lexer.
+func (l LiterateIdris) Lexer() chroma.Lexer {
+ return chroma.MustNewLexer(
+ &chroma.Config{
+ Name: l.Name(),
+ Aliases: []string{"lidr", "literate-idris", "lidris"},
+ Filenames: []string{"*.lidr"},
+ MimeTypes: []string{"text/x-literate-idris"},
+ },
+ func() chroma.Rules {
+ return chroma.Rules{
+ "root": {},
+ }
+ },
+ )
+}
+
+// Name returns the name of the lexer.
+func (LiterateIdris) Name() string {
+ return heartbeat.LanguageLiterateIdris.StringChroma()
+}
diff --git a/pkg/lexer/livescript.go b/pkg/lexer/livescript.go
new file mode 100644
index 00000000..41480c7a
--- /dev/null
+++ b/pkg/lexer/livescript.go
@@ -0,0 +1,32 @@
+package lexer
+
+import (
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+)
+
+// LiveScript lexer.
+type LiveScript struct{}
+
+// Lexer returns the lexer.
+func (l LiveScript) Lexer() chroma.Lexer {
+ return chroma.MustNewLexer(
+ &chroma.Config{
+ Name: l.Name(),
+ Aliases: []string{"live-script", "livescript"},
+ Filenames: []string{"*.ls"},
+ MimeTypes: []string{"text/livescript"},
+ },
+ func() chroma.Rules {
+ return chroma.Rules{
+ "root": {},
+ }
+ },
+ )
+}
+
+// Name returns the name of the lexer.
+func (LiveScript) Name() string {
+ return heartbeat.LanguageLiveScript.StringChroma()
+}
diff --git a/pkg/lexer/llvmmir.go b/pkg/lexer/llvmmir.go
new file mode 100644
index 00000000..493ec079
--- /dev/null
+++ b/pkg/lexer/llvmmir.go
@@ -0,0 +1,31 @@
+package lexer
+
+import (
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+)
+
+// LLVMMIR lexer.
+type LLVMMIR struct{}
+
+// Lexer returns the lexer.
+func (l LLVMMIR) Lexer() chroma.Lexer {
+ return chroma.MustNewLexer(
+ &chroma.Config{
+ Name: l.Name(),
+ Aliases: []string{"llvm-mir"},
+ Filenames: []string{"*.mir"},
+ },
+ func() chroma.Rules {
+ return chroma.Rules{
+ "root": {},
+ }
+ },
+ )
+}
+
+// Name returns the name of the lexer.
+func (LLVMMIR) Name() string {
+ return heartbeat.LanguageLLVMMIR.StringChroma()
+}
diff --git a/pkg/lexer/llvmmirbody.go b/pkg/lexer/llvmmirbody.go
new file mode 100644
index 00000000..9ad3a7a6
--- /dev/null
+++ b/pkg/lexer/llvmmirbody.go
@@ -0,0 +1,30 @@
+package lexer
+
+import (
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+)
+
+// LLVMMIRBODY lexer.
+type LLVMMIRBODY struct{}
+
+// Lexer returns the lexer.
+func (l LLVMMIRBODY) Lexer() chroma.Lexer {
+ return chroma.MustNewLexer(
+ &chroma.Config{
+ Name: l.Name(),
+ Aliases: []string{"llvm-mir-body"},
+ },
+ func() chroma.Rules {
+ return chroma.Rules{
+ "root": {},
+ }
+ },
+ )
+}
+
+// Name returns the name of the lexer.
+func (LLVMMIRBODY) Name() string {
+ return heartbeat.LanguageLLVMMIRBody.StringChroma()
+}
diff --git a/pkg/lexer/logos.go b/pkg/lexer/logos.go
new file mode 100644
index 00000000..2f66a288
--- /dev/null
+++ b/pkg/lexer/logos.go
@@ -0,0 +1,47 @@
+package lexer
+
+import (
+ "regexp"
+
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+)
+
+var logosAnalyserKeywordsRe = regexp.MustCompile(`%(?:hook|ctor|init|c\()`)
+
+// Logos lexer.
+type Logos struct{}
+
+// Lexer returns the lexer.
+func (l Logos) Lexer() chroma.Lexer {
+ lexer := chroma.MustNewLexer(
+ &chroma.Config{
+ Name: l.Name(),
+ Aliases: []string{"logos"},
+ Filenames: []string{"*.x", "*.xi", "*.xm", "*.xmi"},
+ MimeTypes: []string{"text/x-logos"},
+ Priority: 0.25,
+ },
+ func() chroma.Rules {
+ return chroma.Rules{
+ "root": {},
+ }
+ },
+ )
+
+ lexer.SetAnalyser(func(text string) float32 {
+ if logosAnalyserKeywordsRe.MatchString(text) {
+ return 1.0
+ }
+
+ return 0
+ })
+
+ return lexer
+}
+
+// Name returns the name of the lexer.
+func (Logos) Name() string {
+ return heartbeat.LanguageLogos.StringChroma()
+}
diff --git a/pkg/lexer/logos_test.go b/pkg/lexer/logos_test.go
new file mode 100644
index 00000000..5e940a6a
--- /dev/null
+++ b/pkg/lexer/logos_test.go
@@ -0,0 +1,19 @@
+package lexer_test
+
+import (
+ "os"
+ "testing"
+
+ "github.com/wakatime/wakatime-cli/pkg/lexer"
+
+ "github.com/stretchr/testify/assert"
+)
+
+func TestLogos_AnalyseText(t *testing.T) {
+ data, err := os.ReadFile("testdata/logos_basic.xm")
+ assert.NoError(t, err)
+
+ l := lexer.Logos{}.Lexer()
+
+ assert.Equal(t, float32(1.0), l.AnalyseText(string(data)))
+}
diff --git a/pkg/lexer/logtalk.go b/pkg/lexer/logtalk.go
new file mode 100644
index 00000000..0175de02
--- /dev/null
+++ b/pkg/lexer/logtalk.go
@@ -0,0 +1,53 @@
+package lexer
+
+import (
+ "regexp"
+ "strings"
+
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+)
+
+var logtalkAnalyserSyntaxRe = regexp.MustCompile(`(?m)^:-\s[a-z]`)
+
+// Logtalk lexer.
+type Logtalk struct{}
+
+// Lexer returns the lexer.
+func (l Logtalk) Lexer() chroma.Lexer {
+ lexer := chroma.MustNewLexer(
+ &chroma.Config{
+ Name: l.Name(),
+ Aliases: []string{"logtalk"},
+ Filenames: []string{"*.lgt", "*.logtalk"},
+ MimeTypes: []string{"text/x-logtalk"},
+ },
+ func() chroma.Rules {
+ return chroma.Rules{
+ "root": {},
+ }
+ },
+ )
+
+ lexer.SetAnalyser(func(text string) float32 {
+ if strings.Contains(text, ":- object(") ||
+ strings.Contains(text, ":- protocol(") ||
+ strings.Contains(text, ":- category(") {
+ return 1.0
+ }
+
+ if logtalkAnalyserSyntaxRe.MatchString(text) {
+ return 0.9
+ }
+
+ return 0
+ })
+
+ return lexer
+}
+
+// Name returns the name of the lexer.
+func (Logtalk) Name() string {
+ return heartbeat.LanguageLogtalk.StringChroma()
+}
diff --git a/pkg/lexer/logtalk_test.go b/pkg/lexer/logtalk_test.go
new file mode 100644
index 00000000..90a6bc0f
--- /dev/null
+++ b/pkg/lexer/logtalk_test.go
@@ -0,0 +1,37 @@
+package lexer_test
+
+import (
+ "os"
+ "testing"
+
+ "github.com/wakatime/wakatime-cli/pkg/lexer"
+
+ "github.com/stretchr/testify/assert"
+)
+
+func TestLogtalk_AnalyseText(t *testing.T) {
+ tests := map[string]struct {
+ Filepath string
+ Expected float32
+ }{
+ "object": {
+ Filepath: "testdata/logtalk_object.lgt",
+ Expected: 1.0,
+ },
+ "basic": {
+ Filepath: "testdata/logtalk_basic.lgt",
+ Expected: 0.9,
+ },
+ }
+
+ for name, test := range tests {
+ t.Run(name, func(t *testing.T) {
+ data, err := os.ReadFile(test.Filepath)
+ assert.NoError(t, err)
+
+ l := lexer.Logtalk{}.Lexer()
+
+ assert.Equal(t, test.Expected, l.AnalyseText(string(data)))
+ })
+ }
+}
diff --git a/pkg/lexer/lsl.go b/pkg/lexer/lsl.go
new file mode 100644
index 00000000..9d516f35
--- /dev/null
+++ b/pkg/lexer/lsl.go
@@ -0,0 +1,32 @@
+package lexer
+
+import (
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+)
+
+// LSL lexer.
+type LSL struct{}
+
+// Lexer returns the lexer.
+func (l LSL) Lexer() chroma.Lexer {
+ return chroma.MustNewLexer(
+ &chroma.Config{
+ Name: l.Name(),
+ Aliases: []string{"lsl"},
+ Filenames: []string{"*.lsl"},
+ MimeTypes: []string{"text/x-lsl"},
+ },
+ func() chroma.Rules {
+ return chroma.Rules{
+ "root": {},
+ }
+ },
+ )
+}
+
+// Name returns the name of the lexer.
+func (LSL) Name() string {
+ return heartbeat.LanguageLSL.StringChroma()
+}
diff --git a/pkg/lexer/make.go b/pkg/lexer/make.go
new file mode 100644
index 00000000..447f16ca
--- /dev/null
+++ b/pkg/lexer/make.go
@@ -0,0 +1,43 @@
+package lexer
+
+import (
+ "regexp"
+
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+ "github.com/alecthomas/chroma/v2/lexers"
+)
+
+var makefileAnalyserVariableRe = regexp.MustCompile(`\$\([A-Z_]+\)`)
+
+// Makefile lexer.
+type Makefile struct{}
+
+// Lexer returns the lexer.
+func (l Makefile) Lexer() chroma.Lexer {
+ lexer := lexers.Get(l.Name())
+ if lexer == nil {
+ return nil
+ }
+
+ if lexer, ok := lexer.(*chroma.RegexLexer); ok {
+ lexer.SetAnalyser(func(text string) float32 {
+ // Many makefiles have $(BIG_CAPS) style variables.
+ if makefileAnalyserVariableRe.MatchString(text) {
+ return 0.1
+ }
+
+ return 0
+ })
+
+ return lexer
+ }
+
+ return nil
+}
+
+// Name returns the name of the lexer.
+func (Makefile) Name() string {
+ return heartbeat.LanguageMakefile.StringChroma()
+}
diff --git a/pkg/lexer/make_test.go b/pkg/lexer/make_test.go
new file mode 100644
index 00000000..9153c2aa
--- /dev/null
+++ b/pkg/lexer/make_test.go
@@ -0,0 +1,19 @@
+package lexer_test
+
+import (
+ "os"
+ "testing"
+
+ "github.com/wakatime/wakatime-cli/pkg/lexer"
+
+ "github.com/stretchr/testify/assert"
+)
+
+func TestMakefile_AnalyseText(t *testing.T) {
+ data, err := os.ReadFile("testdata/makefile")
+ assert.NoError(t, err)
+
+ l := lexer.Makefile{}.Lexer()
+
+ assert.Equal(t, float32(0.1), l.AnalyseText(string(data)))
+}
diff --git a/pkg/lexer/maql.go b/pkg/lexer/maql.go
new file mode 100644
index 00000000..8ebebc45
--- /dev/null
+++ b/pkg/lexer/maql.go
@@ -0,0 +1,32 @@
+package lexer
+
+import (
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+)
+
+// MAQL lexer.
+type MAQL struct{}
+
+// Lexer returns the lexer.
+func (l MAQL) Lexer() chroma.Lexer {
+ return chroma.MustNewLexer(
+ &chroma.Config{
+ Name: l.Name(),
+ Aliases: []string{"maql"},
+ Filenames: []string{"*.maql"},
+ MimeTypes: []string{"text/x-gooddata-maql", "application/x-gooddata-maql"},
+ },
+ func() chroma.Rules {
+ return chroma.Rules{
+ "root": {},
+ }
+ },
+ )
+}
+
+// Name returns the name of the lexer.
+func (MAQL) Name() string {
+ return heartbeat.LanguageMAQL.StringChroma()
+}
diff --git a/pkg/lexer/marko.go b/pkg/lexer/marko.go
new file mode 100644
index 00000000..1261294e
--- /dev/null
+++ b/pkg/lexer/marko.go
@@ -0,0 +1,32 @@
+package lexer
+
+import (
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+)
+
+// Marko lexer.
+type Marko struct{}
+
+// Lexer returns the lexer.
+func (l Marko) Lexer() chroma.Lexer {
+ return chroma.MustNewLexer(
+ &chroma.Config{
+ Name: l.Name(),
+ Aliases: []string{"marko"},
+ Filenames: []string{"*.marko"},
+ MimeTypes: []string{"text/x-marko"},
+ },
+ func() chroma.Rules {
+ return chroma.Rules{
+ "root": {},
+ }
+ },
+ )
+}
+
+// Name returns the name of the lexer.
+func (Marko) Name() string {
+ return heartbeat.LanguageMarko.StringChroma()
+}
diff --git a/pkg/lexer/mask.go b/pkg/lexer/mask.go
new file mode 100644
index 00000000..295b70f4
--- /dev/null
+++ b/pkg/lexer/mask.go
@@ -0,0 +1,32 @@
+package lexer
+
+import (
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+)
+
+// Mask lexer.
+type Mask struct{}
+
+// Lexer returns the lexer.
+func (l Mask) Lexer() chroma.Lexer {
+ return chroma.MustNewLexer(
+ &chroma.Config{
+ Name: l.Name(),
+ Aliases: []string{"mask"},
+ Filenames: []string{"*.mask"},
+ MimeTypes: []string{"text/x-mask"},
+ },
+ func() chroma.Rules {
+ return chroma.Rules{
+ "root": {},
+ }
+ },
+ )
+}
+
+// Name returns the name of the lexer.
+func (Mask) Name() string {
+ return heartbeat.LanguageMask.StringChroma()
+}
diff --git a/pkg/lexer/matlab.go b/pkg/lexer/matlab.go
new file mode 100644
index 00000000..5f96fc0f
--- /dev/null
+++ b/pkg/lexer/matlab.go
@@ -0,0 +1,71 @@
+package lexer
+
+import (
+ "regexp"
+ "strings"
+
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+ "github.com/alecthomas/chroma/v2/lexers"
+)
+
+var (
+ matlabAnalyserCommentRe = regexp.MustCompile(`^\s*%`)
+ matlabAnalyserSystemCMDRe = regexp.MustCompile(`^!\w+`)
+)
+
+// Matlab lexer.
+type Matlab struct{}
+
+// Lexer returns the lexer.
+func (l Matlab) Lexer() chroma.Lexer {
+ lexer := lexers.Get(l.Name())
+ if lexer == nil {
+ return nil
+ }
+
+ if lexer, ok := lexer.(*chroma.RegexLexer); ok {
+ lexer.SetAnalyser(func(text string) float32 {
+ lines := strings.Split(strings.ReplaceAll(text, "\r\n", "\n"), "\n")
+
+ var firstNonComment string
+ for _, line := range lines {
+ if !matlabAnalyserCommentRe.MatchString(line) {
+ firstNonComment = strings.TrimSpace(line)
+ break
+ }
+ }
+
+ // function declaration
+ if strings.HasPrefix(firstNonComment, "function") && !strings.Contains(firstNonComment, "{") {
+ return 1.0
+ }
+
+ // comment
+ for _, line := range lines {
+ if matlabAnalyserCommentRe.MatchString(line) {
+ return 0.2
+ }
+ }
+
+ // system cmd
+ for _, line := range lines {
+ if matlabAnalyserSystemCMDRe.MatchString(line) {
+ return 0.2
+ }
+ }
+
+ return 0
+ })
+
+ return lexer
+ }
+
+ return nil
+}
+
+// Name returns the name of the lexer.
+func (Matlab) Name() string {
+ return heartbeat.LanguageMatlab.StringChroma()
+}
diff --git a/pkg/lexer/matlab_test.go b/pkg/lexer/matlab_test.go
new file mode 100644
index 00000000..440a10ed
--- /dev/null
+++ b/pkg/lexer/matlab_test.go
@@ -0,0 +1,45 @@
+package lexer_test
+
+import (
+ "os"
+ "testing"
+
+ "github.com/wakatime/wakatime-cli/pkg/lexer"
+
+ "github.com/stretchr/testify/assert"
+)
+
+func TestMatlab_AnalyseText(t *testing.T) {
+ tests := map[string]struct {
+ Filepath string
+ Expected float32
+ }{
+ "function": {
+ Filepath: "testdata/matlab_function.m",
+ Expected: 1.0,
+ },
+ "comment": {
+ Filepath: "testdata/matlab_comment.m",
+ Expected: 0.2,
+ },
+ "systemcmd": {
+ Filepath: "testdata/matlab_systemcmd.m",
+ Expected: 0.2,
+ },
+ "windows": {
+ Filepath: "testdata/matlab_windows.m",
+ Expected: 1.0,
+ },
+ }
+
+ for name, test := range tests {
+ t.Run(name, func(t *testing.T) {
+ data, err := os.ReadFile(test.Filepath)
+ assert.NoError(t, err)
+
+ l := lexer.Matlab{}.Lexer()
+
+ assert.Equal(t, test.Expected, l.AnalyseText(string(data)))
+ })
+ }
+}
diff --git a/pkg/lexer/matlabsession.go b/pkg/lexer/matlabsession.go
new file mode 100644
index 00000000..01ed80c7
--- /dev/null
+++ b/pkg/lexer/matlabsession.go
@@ -0,0 +1,30 @@
+package lexer
+
+import (
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+)
+
+// MatlabSession lexer.
+type MatlabSession struct{}
+
+// Lexer returns the lexer.
+func (l MatlabSession) Lexer() chroma.Lexer {
+ return chroma.MustNewLexer(
+ &chroma.Config{
+ Name: l.Name(),
+ Aliases: []string{"matlabsession"},
+ },
+ func() chroma.Rules {
+ return chroma.Rules{
+ "root": {},
+ }
+ },
+ )
+}
+
+// Name returns the name of the lexer.
+func (MatlabSession) Name() string {
+ return heartbeat.LanguageMatlabSession.StringChroma()
+}
diff --git a/pkg/lexer/mime.go b/pkg/lexer/mime.go
new file mode 100644
index 00000000..8af6d9be
--- /dev/null
+++ b/pkg/lexer/mime.go
@@ -0,0 +1,31 @@
+package lexer
+
+import (
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+)
+
+// MIME lexer.
+type MIME struct{}
+
+// Lexer returns the lexer.
+func (l MIME) Lexer() chroma.Lexer {
+ return chroma.MustNewLexer(
+ &chroma.Config{
+ Name: l.Name(),
+ Aliases: []string{"mime"},
+ MimeTypes: []string{"multipart/mixed", "multipart/related", "multipart/alternative"},
+ },
+ func() chroma.Rules {
+ return chroma.Rules{
+ "root": {},
+ }
+ },
+ )
+}
+
+// Name returns the name of the lexer.
+func (MIME) Name() string {
+ return heartbeat.LanguageMIME.StringChroma()
+}
diff --git a/pkg/lexer/minid.go b/pkg/lexer/minid.go
new file mode 100644
index 00000000..587cd25e
--- /dev/null
+++ b/pkg/lexer/minid.go
@@ -0,0 +1,33 @@
+package lexer
+
+import (
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+)
+
+// MiniD lexer.
+type MiniD struct{}
+
+// Lexer returns the lexer.
+func (l MiniD) Lexer() chroma.Lexer {
+ return chroma.MustNewLexer(
+ &chroma.Config{
+ Name: l.Name(),
+ Aliases: []string{"minid"},
+ // Don't lex .md as MiniD, reserve for Markdown.
+ Filenames: []string{},
+ MimeTypes: []string{"text/x-minidsrc"},
+ },
+ func() chroma.Rules {
+ return chroma.Rules{
+ "root": {},
+ }
+ },
+ )
+}
+
+// Name returns the name of the lexer.
+func (MiniD) Name() string {
+ return heartbeat.LanguageMiniD.StringChroma()
+}
diff --git a/pkg/lexer/miniscript.go b/pkg/lexer/miniscript.go
new file mode 100644
index 00000000..72d6cf66
--- /dev/null
+++ b/pkg/lexer/miniscript.go
@@ -0,0 +1,32 @@
+package lexer
+
+import (
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+)
+
+// MiniScript lexer.
+type MiniScript struct{}
+
+// Lexer returns the lexer.
+func (l MiniScript) Lexer() chroma.Lexer {
+ return chroma.MustNewLexer(
+ &chroma.Config{
+ Name: l.Name(),
+ Aliases: []string{"ms", "miniscript"},
+ Filenames: []string{"*.ms"},
+ MimeTypes: []string{"text/x-miniscript", "application/x-miniscript"},
+ },
+ func() chroma.Rules {
+ return chroma.Rules{
+ "root": {},
+ }
+ },
+ )
+}
+
+// Name returns the name of the lexer.
+func (MiniScript) Name() string {
+ return heartbeat.LanguageMiniScript.StringChroma()
+}
diff --git a/pkg/lexer/modelica.go b/pkg/lexer/modelica.go
new file mode 100644
index 00000000..9be62124
--- /dev/null
+++ b/pkg/lexer/modelica.go
@@ -0,0 +1,32 @@
+package lexer
+
+import (
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+)
+
+// Modelica lexer.
+type Modelica struct{}
+
+// Lexer returns the lexer.
+func (l Modelica) Lexer() chroma.Lexer {
+ return chroma.MustNewLexer(
+ &chroma.Config{
+ Name: l.Name(),
+ Aliases: []string{"modelica"},
+ Filenames: []string{"*.mo"},
+ MimeTypes: []string{"text/x-modelica"},
+ },
+ func() chroma.Rules {
+ return chroma.Rules{
+ "root": {},
+ }
+ },
+ )
+}
+
+// Name returns the name of the lexer.
+func (Modelica) Name() string {
+ return heartbeat.LanguageModelica.StringChroma()
+}
diff --git a/pkg/lexer/modula2.go b/pkg/lexer/modula2.go
new file mode 100644
index 00000000..6a2dc589
--- /dev/null
+++ b/pkg/lexer/modula2.go
@@ -0,0 +1,62 @@
+package lexer
+
+import (
+ "regexp"
+ "strings"
+
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+ "github.com/alecthomas/chroma/v2/lexers"
+)
+
+var (
+ modula2AnalyserProcedureRe = regexp.MustCompile(`\bPROCEDURE\b`)
+ modula2AnalyserFunctionRe = regexp.MustCompile(`\bFUNCTION\b`)
+)
+
+// Modula2 lexer.
+type Modula2 struct{}
+
+// Lexer returns the lexer.
+func (l Modula2) Lexer() chroma.Lexer {
+ lexer := lexers.Get(l.Name())
+ if lexer == nil {
+ return nil
+ }
+
+ if lexer, ok := lexer.(*chroma.RegexLexer); ok {
+ lexer.SetAnalyser(func(text string) float32 {
+ // It's Pascal-like, but does not use FUNCTION -- uses PROCEDURE
+ // instead.
+
+ // Check if this looks like Pascal, if not, bail out early
+ if !strings.Contains(text, "(*") && !strings.Contains(text, "*)") && !strings.Contains(text, ":=") {
+ return 0
+ }
+
+ var result float32
+
+ // Procedure is in Modula2
+ if modula2AnalyserProcedureRe.MatchString(text) {
+ result += 0.6
+ }
+
+ // FUNCTION is only valid in Pascal, but not in Modula2
+ if modula2AnalyserFunctionRe.MatchString(text) {
+ result = 0
+ }
+
+ return result
+ })
+
+ return lexer
+ }
+
+ return nil
+}
+
+// Name returns the name of the lexer.
+func (Modula2) Name() string {
+ return heartbeat.LanguageModula2.StringChroma()
+}
diff --git a/pkg/lexer/modula2_test.go b/pkg/lexer/modula2_test.go
new file mode 100644
index 00000000..4ee34805
--- /dev/null
+++ b/pkg/lexer/modula2_test.go
@@ -0,0 +1,41 @@
+package lexer_test
+
+import (
+ "os"
+ "testing"
+
+ "github.com/wakatime/wakatime-cli/pkg/lexer"
+
+ "github.com/stretchr/testify/assert"
+)
+
+func TestModula2_AnalyseText(t *testing.T) {
+ tests := map[string]struct {
+ Filepath string
+ Expected float32
+ }{
+ "pascal flavour": {
+ Filepath: "testdata/modula2_pascal.def",
+ Expected: 0,
+ },
+ "pascal flavour with function": {
+ Filepath: "testdata/modula2_pascal_function.def",
+ Expected: 0,
+ },
+ "basic": {
+ Filepath: "testdata/modula2_basic.def",
+ Expected: 0.6,
+ },
+ }
+
+ for name, test := range tests {
+ t.Run(name, func(t *testing.T) {
+ data, err := os.ReadFile(test.Filepath)
+ assert.NoError(t, err)
+
+ l := lexer.Modula2{}.Lexer()
+
+ assert.Equal(t, test.Expected, l.AnalyseText(string(data)))
+ })
+ }
+}
diff --git a/pkg/lexer/monkey.go b/pkg/lexer/monkey.go
new file mode 100644
index 00000000..b99bd11a
--- /dev/null
+++ b/pkg/lexer/monkey.go
@@ -0,0 +1,32 @@
+package lexer
+
+import (
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+)
+
+// Monkey lexer.
+type Monkey struct{}
+
+// Lexer returns the lexer.
+func (l Monkey) Lexer() chroma.Lexer {
+ return chroma.MustNewLexer(
+ &chroma.Config{
+ Name: l.Name(),
+ Aliases: []string{"monkey"},
+ Filenames: []string{"*.monkey"},
+ MimeTypes: []string{"text/x-monkey"},
+ },
+ func() chroma.Rules {
+ return chroma.Rules{
+ "root": {},
+ }
+ },
+ )
+}
+
+// Name returns the name of the lexer.
+func (Monkey) Name() string {
+ return heartbeat.LanguageMonkey.StringChroma()
+}
diff --git a/pkg/lexer/monte.go b/pkg/lexer/monte.go
new file mode 100644
index 00000000..b821db2f
--- /dev/null
+++ b/pkg/lexer/monte.go
@@ -0,0 +1,31 @@
+package lexer
+
+import (
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+)
+
+// Monte lexer.
+type Monte struct{}
+
+// Lexer returns the lexer.
+func (l Monte) Lexer() chroma.Lexer {
+ return chroma.MustNewLexer(
+ &chroma.Config{
+ Name: l.Name(),
+ Aliases: []string{"monte"},
+ Filenames: []string{"*.mt"},
+ },
+ func() chroma.Rules {
+ return chroma.Rules{
+ "root": {},
+ }
+ },
+ )
+}
+
+// Name returns the name of the lexer.
+func (Monte) Name() string {
+ return heartbeat.LanguageMonte.StringChroma()
+}
diff --git a/pkg/lexer/moocode.go b/pkg/lexer/moocode.go
new file mode 100644
index 00000000..81777381
--- /dev/null
+++ b/pkg/lexer/moocode.go
@@ -0,0 +1,32 @@
+package lexer
+
+import (
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+)
+
+// MOOCode lexer.
+type MOOCode struct{}
+
+// Lexer returns the lexer.
+func (l MOOCode) Lexer() chroma.Lexer {
+ return chroma.MustNewLexer(
+ &chroma.Config{
+ Name: l.Name(),
+ Aliases: []string{"moocode", "moo"},
+ Filenames: []string{"*.moo"},
+ MimeTypes: []string{"text/x-moocode"},
+ },
+ func() chroma.Rules {
+ return chroma.Rules{
+ "root": {},
+ }
+ },
+ )
+}
+
+// Name returns the name of the lexer.
+func (MOOCode) Name() string {
+ return heartbeat.LanguageMOOCode.StringChroma()
+}
diff --git a/pkg/lexer/moonscript.go b/pkg/lexer/moonscript.go
new file mode 100644
index 00000000..1f812743
--- /dev/null
+++ b/pkg/lexer/moonscript.go
@@ -0,0 +1,31 @@
+package lexer
+
+import (
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+)
+
+// MoonScript lexer.
+type MoonScript struct{}
+
+// Lexer returns the lexer.
+func (l MoonScript) Lexer() chroma.Lexer {
+ return chroma.MustNewLexer(
+ &chroma.Config{
+ Name: l.Name(),
+ Aliases: []string{"moon", "moonscript"},
+ MimeTypes: []string{"text/x-moonscript", "application/x-moonscript"},
+ },
+ func() chroma.Rules {
+ return chroma.Rules{
+ "root": {},
+ }
+ },
+ )
+}
+
+// Name returns the name of the lexer.
+func (MoonScript) Name() string {
+ return heartbeat.LanguageMoonScript.StringChroma()
+}
diff --git a/pkg/lexer/mosel.go b/pkg/lexer/mosel.go
new file mode 100644
index 00000000..bea6589f
--- /dev/null
+++ b/pkg/lexer/mosel.go
@@ -0,0 +1,31 @@
+package lexer
+
+import (
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+)
+
+// Mosel lexer.
+type Mosel struct{}
+
+// Lexer returns the lexer.
+func (l Mosel) Lexer() chroma.Lexer {
+ return chroma.MustNewLexer(
+ &chroma.Config{
+ Name: l.Name(),
+ Aliases: []string{"model"},
+ Filenames: []string{"*.mos"},
+ },
+ func() chroma.Rules {
+ return chroma.Rules{
+ "root": {},
+ }
+ },
+ )
+}
+
+// Name returns the name of the lexer.
+func (Mosel) Name() string {
+ return heartbeat.LanguageMosel.StringChroma()
+}
diff --git a/pkg/lexer/mozhashpreproc.go b/pkg/lexer/mozhashpreproc.go
new file mode 100644
index 00000000..8a88e64f
--- /dev/null
+++ b/pkg/lexer/mozhashpreproc.go
@@ -0,0 +1,30 @@
+package lexer
+
+import (
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+)
+
+// MozPreprocHash lexer.
+type MozPreprocHash struct{}
+
+// Lexer returns the lexer.
+func (l MozPreprocHash) Lexer() chroma.Lexer {
+ return chroma.MustNewLexer(
+ &chroma.Config{
+ Name: l.Name(),
+ Aliases: []string{"mozhashpreproc"},
+ },
+ func() chroma.Rules {
+ return chroma.Rules{
+ "root": {},
+ }
+ },
+ )
+}
+
+// Name returns the name of the lexer.
+func (MozPreprocHash) Name() string {
+ return heartbeat.LanguageMozPreprocHash.StringChroma()
+}
diff --git a/pkg/lexer/mozpreprocpercent.go b/pkg/lexer/mozpreprocpercent.go
new file mode 100644
index 00000000..ca9057b5
--- /dev/null
+++ b/pkg/lexer/mozpreprocpercent.go
@@ -0,0 +1,30 @@
+package lexer
+
+import (
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+)
+
+// MozPreprocPercent lexer.
+type MozPreprocPercent struct{}
+
+// Lexer returns the lexer.
+func (l MozPreprocPercent) Lexer() chroma.Lexer {
+ return chroma.MustNewLexer(
+ &chroma.Config{
+ Name: l.Name(),
+ Aliases: []string{"mozpercentpreproc"},
+ },
+ func() chroma.Rules {
+ return chroma.Rules{
+ "root": {},
+ }
+ },
+ )
+}
+
+// Name returns the name of the lexer.
+func (MozPreprocPercent) Name() string {
+ return heartbeat.LanguageMozPreprocPercent.StringChroma()
+}
diff --git a/pkg/lexer/mql.go b/pkg/lexer/mql.go
new file mode 100644
index 00000000..02d73530
--- /dev/null
+++ b/pkg/lexer/mql.go
@@ -0,0 +1,32 @@
+package lexer
+
+import (
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+)
+
+// MQL lexer.
+type MQL struct{}
+
+// Lexer returns the lexer.
+func (l MQL) Lexer() chroma.Lexer {
+ return chroma.MustNewLexer(
+ &chroma.Config{
+ Name: l.Name(),
+ Aliases: []string{"mql", "mq4", "mq5", "mql4", "mql5"},
+ Filenames: []string{"*.mq4", "*.mq5", "*.mqh"},
+ MimeTypes: []string{"text/x-mql"},
+ },
+ func() chroma.Rules {
+ return chroma.Rules{
+ "root": {},
+ }
+ },
+ )
+}
+
+// Name returns the name of the lexer.
+func (MQL) Name() string {
+ return heartbeat.LanguageMQL.StringChroma()
+}
diff --git a/pkg/lexer/mscgen.go b/pkg/lexer/mscgen.go
new file mode 100644
index 00000000..c47c3885
--- /dev/null
+++ b/pkg/lexer/mscgen.go
@@ -0,0 +1,31 @@
+package lexer
+
+import (
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+)
+
+// Mscgen lexer.
+type Mscgen struct{}
+
+// Lexer returns the lexer.
+func (l Mscgen) Lexer() chroma.Lexer {
+ return chroma.MustNewLexer(
+ &chroma.Config{
+ Name: l.Name(),
+ Aliases: []string{"mscgen", "msc"},
+ Filenames: []string{"*.msc"},
+ },
+ func() chroma.Rules {
+ return chroma.Rules{
+ "root": {},
+ }
+ },
+ )
+}
+
+// Name returns the name of the lexer.
+func (Mscgen) Name() string {
+ return heartbeat.LanguageMscgen.StringChroma()
+}
diff --git a/pkg/lexer/msdossession.go b/pkg/lexer/msdossession.go
new file mode 100644
index 00000000..800ab77f
--- /dev/null
+++ b/pkg/lexer/msdossession.go
@@ -0,0 +1,30 @@
+package lexer
+
+import (
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+)
+
+// MSDOSSession lexer.
+type MSDOSSession struct{}
+
+// Lexer returns the lexer.
+func (l MSDOSSession) Lexer() chroma.Lexer {
+ return chroma.MustNewLexer(
+ &chroma.Config{
+ Name: l.Name(),
+ Aliases: []string{"doscon"},
+ },
+ func() chroma.Rules {
+ return chroma.Rules{
+ "root": {},
+ }
+ },
+ )
+}
+
+// Name returns the name of the lexer.
+func (MSDOSSession) Name() string {
+ return heartbeat.LanguageMSDOSSession.StringChroma()
+}
diff --git a/pkg/lexer/mupad.go b/pkg/lexer/mupad.go
new file mode 100644
index 00000000..26e87e3b
--- /dev/null
+++ b/pkg/lexer/mupad.go
@@ -0,0 +1,31 @@
+package lexer
+
+import (
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+)
+
+// MuPAD lexer.
+type MuPAD struct{}
+
+// Lexer returns the lexer.
+func (l MuPAD) Lexer() chroma.Lexer {
+ return chroma.MustNewLexer(
+ &chroma.Config{
+ Name: l.Name(),
+ Aliases: []string{"mupad"},
+ Filenames: []string{"*.mu"},
+ },
+ func() chroma.Rules {
+ return chroma.Rules{
+ "root": {},
+ }
+ },
+ )
+}
+
+// Name returns the name of the lexer.
+func (MuPAD) Name() string {
+ return heartbeat.LanguageMuPAD.StringChroma()
+}
diff --git a/pkg/lexer/mustache.go b/pkg/lexer/mustache.go
new file mode 100644
index 00000000..2a2e6d00
--- /dev/null
+++ b/pkg/lexer/mustache.go
@@ -0,0 +1,32 @@
+package lexer
+
+import (
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+)
+
+// Mustache lexer.
+type Mustache struct{}
+
+// Lexer returns the lexer.
+func (l Mustache) Lexer() chroma.Lexer {
+ return chroma.MustNewLexer(
+ &chroma.Config{
+ Name: l.Name(),
+ Aliases: []string{"mustache"},
+ Filenames: []string{"*.mustache"},
+ MimeTypes: []string{"text/x-mustache-template"},
+ },
+ func() chroma.Rules {
+ return chroma.Rules{
+ "root": {},
+ }
+ },
+ )
+}
+
+// Name returns the name of the lexer.
+func (Mustache) Name() string {
+ return heartbeat.LanguageMustache.StringChroma()
+}
diff --git a/pkg/lexer/mxml.go b/pkg/lexer/mxml.go
new file mode 100644
index 00000000..8433b555
--- /dev/null
+++ b/pkg/lexer/mxml.go
@@ -0,0 +1,32 @@
+package lexer
+
+import (
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+)
+
+// MXML lexer.
+type MXML struct{}
+
+// Lexer returns the lexer.
+func (l MXML) Lexer() chroma.Lexer {
+ return chroma.MustNewLexer(
+ &chroma.Config{
+ Name: l.Name(),
+ Aliases: []string{"mxml"},
+ Filenames: []string{"*.mxml"},
+ MimeTypes: []string{"text/xml", "application/xml"},
+ },
+ func() chroma.Rules {
+ return chroma.Rules{
+ "root": {},
+ }
+ },
+ )
+}
+
+// Name returns the name of the lexer.
+func (MXML) Name() string {
+ return heartbeat.LanguageMXML.StringChroma()
+}
diff --git a/pkg/lexer/nasm.go b/pkg/lexer/nasm.go
new file mode 100644
index 00000000..e0b7f45e
--- /dev/null
+++ b/pkg/lexer/nasm.go
@@ -0,0 +1,43 @@
+package lexer
+
+import (
+ "regexp"
+
+ "github.com/alecthomas/chroma/v2"
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2/lexers"
+)
+
+var nasmAnalyzerRe = regexp.MustCompile(`(?i)PROC`)
+
+// NASM lexer.
+type NASM struct{}
+
+// Lexer returns the lexer.
+func (l NASM) Lexer() chroma.Lexer {
+ lexer := lexers.Get(l.Name())
+ if lexer == nil {
+ return nil
+ }
+
+ if lexer, ok := lexer.(*chroma.RegexLexer); ok {
+ lexer.SetAnalyser(func(text string) float32 {
+ // Probably TASM
+ if nasmAnalyzerRe.MatchString(text) {
+ return 0
+ }
+
+ return 0
+ })
+
+ return lexer
+ }
+
+ return nil
+}
+
+// Name returns the name of the lexer.
+func (NASM) Name() string {
+ return heartbeat.LanguageNASM.StringChroma()
+}
diff --git a/pkg/lexer/nasm_test.go b/pkg/lexer/nasm_test.go
new file mode 100644
index 00000000..71407b01
--- /dev/null
+++ b/pkg/lexer/nasm_test.go
@@ -0,0 +1,19 @@
+package lexer_test
+
+import (
+ "os"
+ "testing"
+
+ "github.com/wakatime/wakatime-cli/pkg/lexer"
+
+ "github.com/stretchr/testify/assert"
+)
+
+func TestNASM_AnalyseText(t *testing.T) {
+ data, err := os.ReadFile("testdata/nasm.asm")
+ assert.NoError(t, err)
+
+ l := lexer.NASM{}.Lexer()
+
+ assert.Equal(t, float32(0), l.AnalyseText(string(data)))
+}
diff --git a/pkg/lexer/nasmobjdump.go b/pkg/lexer/nasmobjdump.go
new file mode 100644
index 00000000..b4aebeab
--- /dev/null
+++ b/pkg/lexer/nasmobjdump.go
@@ -0,0 +1,32 @@
+package lexer
+
+import (
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+)
+
+// NASMObjdump lexer.
+type NASMObjdump struct{}
+
+// Lexer returns the lexer.
+func (l NASMObjdump) Lexer() chroma.Lexer {
+ return chroma.MustNewLexer(
+ &chroma.Config{
+ Name: l.Name(),
+ Aliases: []string{"objdump-nasm"},
+ Filenames: []string{"*.objdump-intel"},
+ MimeTypes: []string{"text/x-nasm-objdump"},
+ },
+ func() chroma.Rules {
+ return chroma.Rules{
+ "root": {},
+ }
+ },
+ )
+}
+
+// Name returns the name of the lexer.
+func (NASMObjdump) Name() string {
+ return heartbeat.LanguageNASMObjdump.StringChroma()
+}
diff --git a/pkg/lexer/ncl.go b/pkg/lexer/ncl.go
new file mode 100644
index 00000000..cc3b83f6
--- /dev/null
+++ b/pkg/lexer/ncl.go
@@ -0,0 +1,32 @@
+package lexer
+
+import (
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+)
+
+// NCL lexer.
+type NCL struct{}
+
+// Lexer returns the lexer.
+func (l NCL) Lexer() chroma.Lexer {
+ return chroma.MustNewLexer(
+ &chroma.Config{
+ Name: l.Name(),
+ Aliases: []string{"ncl"},
+ Filenames: []string{"*.ncl"},
+ MimeTypes: []string{"text/ncl"},
+ },
+ func() chroma.Rules {
+ return chroma.Rules{
+ "root": {},
+ }
+ },
+ )
+}
+
+// Name returns the name of the lexer.
+func (NCL) Name() string {
+ return heartbeat.LanguageNCL.StringChroma()
+}
diff --git a/pkg/lexer/nemerle.go b/pkg/lexer/nemerle.go
new file mode 100644
index 00000000..6fd5dcc2
--- /dev/null
+++ b/pkg/lexer/nemerle.go
@@ -0,0 +1,47 @@
+package lexer
+
+import (
+ "strings"
+
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+)
+
+// Nemerle lexer.
+type Nemerle struct{}
+
+// Lexer returns the lexer.
+func (l Nemerle) Lexer() chroma.Lexer {
+ lexer := chroma.MustNewLexer(
+ &chroma.Config{
+ Name: l.Name(),
+ Aliases: []string{"nemerle"},
+ Filenames: []string{"*.n"},
+ // inferred
+ MimeTypes: []string{"text/x-nemerle"},
+ },
+ func() chroma.Rules {
+ return chroma.Rules{
+ "root": {},
+ }
+ },
+ )
+
+ lexer.SetAnalyser(func(text string) float32 {
+ // Nemerle is quite similar to Python, but @if is relatively uncommon
+ // elsewhere.
+ if strings.Contains(text, "@if") {
+ return 0.1
+ }
+
+ return 0
+ })
+
+ return lexer
+}
+
+// Name returns the name of the lexer.
+func (Nemerle) Name() string {
+ return heartbeat.LanguageNemerle.StringChroma()
+}
diff --git a/pkg/lexer/nemerle_test.go b/pkg/lexer/nemerle_test.go
new file mode 100644
index 00000000..b14c048b
--- /dev/null
+++ b/pkg/lexer/nemerle_test.go
@@ -0,0 +1,19 @@
+package lexer_test
+
+import (
+ "os"
+ "testing"
+
+ "github.com/wakatime/wakatime-cli/pkg/lexer"
+
+ "github.com/stretchr/testify/assert"
+)
+
+func TestNermerle_AnalyseText(t *testing.T) {
+ data, err := os.ReadFile("testdata/nemerle_if.n")
+ assert.NoError(t, err)
+
+ l := lexer.Nemerle{}.Lexer()
+
+ assert.Equal(t, float32(0.1), l.AnalyseText(string(data)))
+}
diff --git a/pkg/lexer/nesc.go b/pkg/lexer/nesc.go
new file mode 100644
index 00000000..1c1b1b17
--- /dev/null
+++ b/pkg/lexer/nesc.go
@@ -0,0 +1,44 @@
+package lexer
+
+import (
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+ "github.com/alecthomas/chroma/v2/lexers"
+)
+
+// NesC lexer.
+type NesC struct{}
+
+// Lexer returns the lexer.
+func (l NesC) Lexer() chroma.Lexer {
+ lexer := chroma.MustNewLexer(
+ &chroma.Config{
+ Name: l.Name(),
+ Aliases: []string{"nesc"},
+ Filenames: []string{"*.nc"},
+ MimeTypes: []string{"text/x-nescsrc"},
+ },
+ func() chroma.Rules {
+ return chroma.Rules{
+ "root": {},
+ }
+ },
+ )
+
+ lexer.SetAnalyser(func(text string) float32 {
+ c := lexers.Get(heartbeat.LanguageC.StringChroma())
+ if c == nil {
+ return 0
+ }
+
+ return c.AnalyseText(text)
+ })
+
+ return lexer
+}
+
+// Name returns the name of the lexer.
+func (NesC) Name() string {
+ return heartbeat.LanguageNesC.StringChroma()
+}
diff --git a/pkg/lexer/nesc_test.go b/pkg/lexer/nesc_test.go
new file mode 100644
index 00000000..840c3f0e
--- /dev/null
+++ b/pkg/lexer/nesc_test.go
@@ -0,0 +1,41 @@
+package lexer_test
+
+import (
+ "os"
+ "testing"
+
+ "github.com/wakatime/wakatime-cli/pkg/lexer"
+
+ "github.com/stretchr/testify/assert"
+)
+
+func TestNesc_AnalyseText(t *testing.T) {
+ tests := map[string]struct {
+ Filepath string
+ Expected float32
+ }{
+ "include": {
+ Filepath: "testdata/nesc_include.nc",
+ Expected: 0.1,
+ },
+ "ifdef": {
+ Filepath: "testdata/nesc_ifdef.nc",
+ Expected: 0.1,
+ },
+ "ifndef": {
+ Filepath: "testdata/nesc_ifndef.nc",
+ Expected: 0.1,
+ },
+ }
+
+ for name, test := range tests {
+ t.Run(name, func(t *testing.T) {
+ data, err := os.ReadFile(test.Filepath)
+ assert.NoError(t, err)
+
+ l := lexer.NesC{}.Lexer()
+
+ assert.Equal(t, test.Expected, l.AnalyseText(string(data)))
+ })
+ }
+}
diff --git a/pkg/lexer/newlisp.go b/pkg/lexer/newlisp.go
new file mode 100644
index 00000000..c941220f
--- /dev/null
+++ b/pkg/lexer/newlisp.go
@@ -0,0 +1,32 @@
+package lexer
+
+import (
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+)
+
+// NewLisp lexer.
+type NewLisp struct{}
+
+// Lexer returns the lexer.
+func (l NewLisp) Lexer() chroma.Lexer {
+ return chroma.MustNewLexer(
+ &chroma.Config{
+ Name: l.Name(),
+ Aliases: []string{"newlisp"},
+ Filenames: []string{"*.lsp", "*.nl", "*.kif"},
+ MimeTypes: []string{"text/x-newlisp", "application/x-newlisp"},
+ },
+ func() chroma.Rules {
+ return chroma.Rules{
+ "root": {},
+ }
+ },
+ )
+}
+
+// Name returns the name of the lexer.
+func (NewLisp) Name() string {
+ return heartbeat.LanguageNewLisp.StringChroma()
+}
diff --git a/pkg/lexer/nit.go b/pkg/lexer/nit.go
new file mode 100644
index 00000000..66d6cd15
--- /dev/null
+++ b/pkg/lexer/nit.go
@@ -0,0 +1,31 @@
+package lexer
+
+import (
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+)
+
+// Nit lexer.
+type Nit struct{}
+
+// Lexer returns the lexer.
+func (l Nit) Lexer() chroma.Lexer {
+ return chroma.MustNewLexer(
+ &chroma.Config{
+ Name: l.Name(),
+ Aliases: []string{"nit"},
+ Filenames: []string{"*.nit"},
+ },
+ func() chroma.Rules {
+ return chroma.Rules{
+ "root": {},
+ }
+ },
+ )
+}
+
+// Name returns the name of the lexer.
+func (Nit) Name() string {
+ return heartbeat.LanguageNit.StringChroma()
+}
diff --git a/pkg/lexer/notmuch.go b/pkg/lexer/notmuch.go
new file mode 100644
index 00000000..2738a178
--- /dev/null
+++ b/pkg/lexer/notmuch.go
@@ -0,0 +1,42 @@
+package lexer
+
+import (
+ "strings"
+
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+)
+
+// Notmuch lexer.
+type Notmuch struct{}
+
+// Lexer returns the lexer.
+func (l Notmuch) Lexer() chroma.Lexer {
+ lexer := chroma.MustNewLexer(
+ &chroma.Config{
+ Name: l.Name(),
+ Aliases: []string{"notmuch"},
+ },
+ func() chroma.Rules {
+ return chroma.Rules{
+ "root": {},
+ }
+ },
+ )
+
+ lexer.SetAnalyser(func(text string) float32 {
+ if strings.HasPrefix(text, "\fmessage{") {
+ return 1.0
+ }
+
+ return 0
+ })
+
+ return lexer
+}
+
+// Name returns the name of the lexer.
+func (Notmuch) Name() string {
+ return heartbeat.LanguageNotmuch.StringChroma()
+}
diff --git a/pkg/lexer/notmuch_test.go b/pkg/lexer/notmuch_test.go
new file mode 100644
index 00000000..1808696c
--- /dev/null
+++ b/pkg/lexer/notmuch_test.go
@@ -0,0 +1,19 @@
+package lexer_test
+
+import (
+ "os"
+ "testing"
+
+ "github.com/wakatime/wakatime-cli/pkg/lexer"
+
+ "github.com/stretchr/testify/assert"
+)
+
+func TestNotmuch_AnalyseText(t *testing.T) {
+ data, err := os.ReadFile("testdata/notmuch")
+ assert.NoError(t, err)
+
+ l := lexer.Notmuch{}.Lexer()
+
+ assert.Equal(t, float32(1.0), l.AnalyseText(string(data)))
+}
diff --git a/pkg/lexer/nsis.go b/pkg/lexer/nsis.go
new file mode 100644
index 00000000..4104d0dc
--- /dev/null
+++ b/pkg/lexer/nsis.go
@@ -0,0 +1,32 @@
+package lexer
+
+import (
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+)
+
+// NSIS lexer.
+type NSIS struct{}
+
+// Lexer returns the lexer.
+func (l NSIS) Lexer() chroma.Lexer {
+ return chroma.MustNewLexer(
+ &chroma.Config{
+ Name: l.Name(),
+ Aliases: []string{"nsis", "nsi", "nsh"},
+ Filenames: []string{"*.nsi", "*.nsh"},
+ MimeTypes: []string{"text/x-nsis"},
+ },
+ func() chroma.Rules {
+ return chroma.Rules{
+ "root": {},
+ }
+ },
+ )
+}
+
+// Name returns the name of the lexer.
+func (NSIS) Name() string {
+ return heartbeat.LanguageNSIS.StringChroma()
+}
diff --git a/pkg/lexer/numpy.go b/pkg/lexer/numpy.go
new file mode 100644
index 00000000..69d22ed0
--- /dev/null
+++ b/pkg/lexer/numpy.go
@@ -0,0 +1,55 @@
+package lexer
+
+import (
+ "strings"
+
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+ "github.com/wakatime/wakatime-cli/pkg/shebang"
+
+ "github.com/alecthomas/chroma/v2"
+)
+
+// NumPy lexer.
+type NumPy struct{}
+
+// Lexer returns the lexer.
+func (l NumPy) Lexer() chroma.Lexer {
+ lexer := chroma.MustNewLexer(
+ &chroma.Config{
+ Name: l.Name(),
+ Aliases: []string{"numpy"},
+ },
+ func() chroma.Rules {
+ return chroma.Rules{
+ "root": {},
+ }
+ },
+ )
+
+ lexer.SetAnalyser(func(text string) float32 {
+ hasPythonShebang, _ := shebang.MatchString(text, `pythonw?(3(\.\d)?)?`)
+ containsNumpyImport := strings.Contains(text, "import numpy")
+ containsFromNumpyImport := strings.Contains(text, "from numpy import")
+
+ var containsImport bool
+
+ if len(text) > 1000 {
+ containsImport = strings.Contains(text[:1000], "import ")
+ } else {
+ containsImport = strings.Contains(text, "import ")
+ }
+
+ if (hasPythonShebang || containsImport) && (containsNumpyImport || containsFromNumpyImport) {
+ return 1.0
+ }
+
+ return 0
+ })
+
+ return lexer
+}
+
+// Name returns the name of the lexer.
+func (NumPy) Name() string {
+ return heartbeat.LanguageNumPy.StringChroma()
+}
diff --git a/pkg/lexer/numpy_test.go b/pkg/lexer/numpy_test.go
new file mode 100644
index 00000000..6fcb82c7
--- /dev/null
+++ b/pkg/lexer/numpy_test.go
@@ -0,0 +1,41 @@
+package lexer_test
+
+import (
+ "os"
+ "testing"
+
+ "github.com/wakatime/wakatime-cli/pkg/lexer"
+
+ "github.com/stretchr/testify/assert"
+)
+
+func TestNumPy_AnalyseText(t *testing.T) {
+ tests := map[string]struct {
+ Filepath string
+ Expected float32
+ }{
+ "basic": {
+ Filepath: "testdata/numpy_basic",
+ Expected: 1.0,
+ },
+ "from numpy import": {
+ Filepath: "testdata/numpy_from_import",
+ Expected: 1.0,
+ },
+ "regular python": {
+ Filepath: "testdata/numpy.py",
+ Expected: 1.0,
+ },
+ }
+
+ for name, test := range tests {
+ t.Run(name, func(t *testing.T) {
+ data, err := os.ReadFile(test.Filepath)
+ assert.NoError(t, err)
+
+ l := lexer.NumPy{}.Lexer()
+
+ assert.Equal(t, test.Expected, l.AnalyseText(string(data)))
+ })
+ }
+}
diff --git a/pkg/lexer/nusmv.go b/pkg/lexer/nusmv.go
new file mode 100644
index 00000000..b48125f0
--- /dev/null
+++ b/pkg/lexer/nusmv.go
@@ -0,0 +1,31 @@
+package lexer
+
+import (
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+)
+
+// NuSMV lexer.
+type NuSMV struct{}
+
+// Lexer returns the lexer.
+func (l NuSMV) Lexer() chroma.Lexer {
+ return chroma.MustNewLexer(
+ &chroma.Config{
+ Name: l.Name(),
+ Aliases: []string{"nusmv"},
+ Filenames: []string{"*.smv"},
+ },
+ func() chroma.Rules {
+ return chroma.Rules{
+ "root": {},
+ }
+ },
+ )
+}
+
+// Name returns the name of the lexer.
+func (NuSMV) Name() string {
+ return heartbeat.LanguageNuSMV.StringChroma()
+}
diff --git a/pkg/lexer/objdump.go b/pkg/lexer/objdump.go
new file mode 100644
index 00000000..dda8345f
--- /dev/null
+++ b/pkg/lexer/objdump.go
@@ -0,0 +1,32 @@
+package lexer
+
+import (
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+)
+
+// Objdump lexer.
+type Objdump struct{}
+
+// Lexer returns the lexer.
+func (l Objdump) Lexer() chroma.Lexer {
+ return chroma.MustNewLexer(
+ &chroma.Config{
+ Name: l.Name(),
+ Aliases: []string{"objdump"},
+ Filenames: []string{"*.objdump"},
+ MimeTypes: []string{"text/x-objdump"},
+ },
+ func() chroma.Rules {
+ return chroma.Rules{
+ "root": {},
+ }
+ },
+ )
+}
+
+// Name returns the name of the lexer.
+func (Objdump) Name() string {
+ return heartbeat.LanguageObjdump.StringChroma()
+}
diff --git a/pkg/lexer/objectivec.go b/pkg/lexer/objectivec.go
new file mode 100644
index 00000000..1176c783
--- /dev/null
+++ b/pkg/lexer/objectivec.go
@@ -0,0 +1,66 @@
+package lexer
+
+import (
+ "regexp"
+ "strings"
+
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+ "github.com/alecthomas/chroma/v2/lexers"
+)
+
+var (
+ // Have to be careful not to accidentally match JavaDoc/Doxygen syntax here,
+ // since that's quite common in ordinary C/C++ files. It's OK to match
+ // JavaDoc/Doxygen keywords that only apply to Objective-C, mind.
+ //
+ // The upshot of this is that we CANNOT match @class or @interface.
+ objectiveCAnalyserKeywordsRe = regexp.MustCompile(`@(?:end|implementation|protocol)`)
+ // Matches [ ? identifier ( identifier ? ] | identifier? : )
+ // (note the identifier is *optional* when there is a ':'!)
+ objectiveCAnalyserMessageRe = regexp.MustCompile(`\[\s*[a-zA-Z_]\w*\s+(?:[a-zA-Z_]\w*\s*\]|(?:[a-zA-Z_]\w*)?:)`)
+ objectiveCAnalyserNSNumberRe = regexp.MustCompile(`@[0-9]+`)
+)
+
+// ObjectiveC lexer.
+type ObjectiveC struct{}
+
+// Lexer returns the lexer.
+func (l ObjectiveC) Lexer() chroma.Lexer {
+ lexer := lexers.Get(l.Name())
+ if lexer == nil {
+ return nil
+ }
+
+ if lexer, ok := lexer.(*chroma.RegexLexer); ok {
+ lexer.SetAnalyser(func(text string) float32 {
+ if objectiveCAnalyserKeywordsRe.MatchString(text) {
+ return 1.0
+ }
+
+ if strings.Contains(text, `@"`) {
+ return 0.8
+ }
+
+ if objectiveCAnalyserNSNumberRe.MatchString(text) {
+ return 0.7
+ }
+
+ if objectiveCAnalyserMessageRe.MatchString(text) {
+ return 0.8
+ }
+
+ return 0
+ })
+
+ return lexer
+ }
+
+ return nil
+}
+
+// Name returns the name of the lexer.
+func (ObjectiveC) Name() string {
+ return heartbeat.LanguageObjectiveC.StringChroma()
+}
diff --git a/pkg/lexer/objectivec_test.go b/pkg/lexer/objectivec_test.go
new file mode 100644
index 00000000..dbcb5842
--- /dev/null
+++ b/pkg/lexer/objectivec_test.go
@@ -0,0 +1,53 @@
+package lexer_test
+
+import (
+ "os"
+ "testing"
+
+ "github.com/wakatime/wakatime-cli/pkg/lexer"
+
+ "github.com/stretchr/testify/assert"
+)
+
+func TestObjectiveC_AnalyseText(t *testing.T) {
+ tests := map[string]struct {
+ Filepath string
+ Expected float32
+ }{
+ "keyword_end": {
+ Filepath: "testdata/objectivec_keyword_end.m",
+ Expected: 1.0,
+ },
+ "keyword_implementation": {
+ Filepath: "testdata/objectivec_keyword_implementation.m",
+ Expected: 1.0,
+ },
+ "keyword_protocol": {
+ Filepath: "testdata/objectivec_keyword_protocol.m",
+ Expected: 1.0,
+ },
+ "nsstring": {
+ Filepath: "testdata/objectivec_nsstring.m",
+ Expected: 0.8,
+ },
+ "nsnumber": {
+ Filepath: "testdata/objectivec_nsnumber.m",
+ Expected: 0.7,
+ },
+ "message": {
+ Filepath: "testdata/objectivec_message.m",
+ Expected: 0.8,
+ },
+ }
+
+ for name, test := range tests {
+ t.Run(name, func(t *testing.T) {
+ data, err := os.ReadFile(test.Filepath)
+ assert.NoError(t, err)
+
+ l := lexer.ObjectiveC{}.Lexer()
+
+ assert.Equal(t, test.Expected, l.AnalyseText(string(data)))
+ })
+ }
+}
diff --git a/pkg/lexer/objectivecpp.go b/pkg/lexer/objectivecpp.go
new file mode 100644
index 00000000..1be95400
--- /dev/null
+++ b/pkg/lexer/objectivecpp.go
@@ -0,0 +1,34 @@
+package lexer
+
+import (
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+)
+
+// ObjectiveCPP lexer.
+type ObjectiveCPP struct{}
+
+// Lexer returns the lexer.
+func (l ObjectiveCPP) Lexer() chroma.Lexer {
+ return chroma.MustNewLexer(
+ &chroma.Config{
+ Name: l.Name(),
+ Aliases: []string{"objective-c++", "objectivec++", "obj-c++", "objc++"},
+ Filenames: []string{"*.mm", "*.hh"},
+ MimeTypes: []string{"text/x-objective-c++"},
+ // Lower than C++.
+ Priority: 0.05,
+ },
+ func() chroma.Rules {
+ return chroma.Rules{
+ "root": {},
+ }
+ },
+ )
+}
+
+// Name returns the name of the lexer.
+func (ObjectiveCPP) Name() string {
+ return heartbeat.LanguageObjectiveCPP.StringChroma()
+}
diff --git a/pkg/lexer/objectivej.go b/pkg/lexer/objectivej.go
new file mode 100644
index 00000000..34033ccf
--- /dev/null
+++ b/pkg/lexer/objectivej.go
@@ -0,0 +1,47 @@
+package lexer
+
+import (
+ "regexp"
+
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+)
+
+var objectiveJAnalyserImportRe = regexp.MustCompile(`(?m)^\s*@import\s+[<"]`)
+
+// ObjectiveJ lexer.
+type ObjectiveJ struct{}
+
+// Lexer returns the lexer.
+func (l ObjectiveJ) Lexer() chroma.Lexer {
+ lexer := chroma.MustNewLexer(
+ &chroma.Config{
+ Name: l.Name(),
+ Aliases: []string{"objective-j", "objectivej", "obj-j", "objj"},
+ Filenames: []string{"*.j"},
+ MimeTypes: []string{"text/x-objective-j"},
+ },
+ func() chroma.Rules {
+ return chroma.Rules{
+ "root": {},
+ }
+ },
+ )
+
+ lexer.SetAnalyser(func(text string) float32 {
+ // special directive found in most Objective-J files.
+ if objectiveJAnalyserImportRe.MatchString(text) {
+ return 1.0
+ }
+
+ return 0
+ })
+
+ return lexer
+}
+
+// Name returns the name of the lexer.
+func (ObjectiveJ) Name() string {
+ return heartbeat.LanguageObjectiveJ.StringChroma()
+}
diff --git a/pkg/lexer/objectivej_test.go b/pkg/lexer/objectivej_test.go
new file mode 100644
index 00000000..0f9920f8
--- /dev/null
+++ b/pkg/lexer/objectivej_test.go
@@ -0,0 +1,19 @@
+package lexer_test
+
+import (
+ "os"
+ "testing"
+
+ "github.com/wakatime/wakatime-cli/pkg/lexer"
+
+ "github.com/stretchr/testify/assert"
+)
+
+func TestObjectiveJ_AnalyseText(t *testing.T) {
+ data, err := os.ReadFile("testdata/objectivej_import.j")
+ assert.NoError(t, err)
+
+ l := lexer.ObjectiveJ{}.Lexer()
+
+ assert.Equal(t, float32(1.0), l.AnalyseText(string(data)))
+}
diff --git a/pkg/lexer/ooc.go b/pkg/lexer/ooc.go
new file mode 100644
index 00000000..9ae3f9ed
--- /dev/null
+++ b/pkg/lexer/ooc.go
@@ -0,0 +1,32 @@
+package lexer
+
+import (
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+)
+
+// Ooc lexer.
+type Ooc struct{}
+
+// Lexer returns the lexer.
+func (l Ooc) Lexer() chroma.Lexer {
+ return chroma.MustNewLexer(
+ &chroma.Config{
+ Name: l.Name(),
+ Aliases: []string{"ooc"},
+ Filenames: []string{"*.ooc"},
+ MimeTypes: []string{"text/x-ooc"},
+ },
+ func() chroma.Rules {
+ return chroma.Rules{
+ "root": {},
+ }
+ },
+ )
+}
+
+// Name returns the name of the lexer.
+func (Ooc) Name() string {
+ return heartbeat.LanguageOoc.StringChroma()
+}
diff --git a/pkg/lexer/opa.go b/pkg/lexer/opa.go
new file mode 100644
index 00000000..ce1eb10d
--- /dev/null
+++ b/pkg/lexer/opa.go
@@ -0,0 +1,32 @@
+package lexer
+
+import (
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+)
+
+// Opa lexer.
+type Opa struct{}
+
+// Lexer returns the lexer.
+func (l Opa) Lexer() chroma.Lexer {
+ return chroma.MustNewLexer(
+ &chroma.Config{
+ Name: l.Name(),
+ Aliases: []string{"opa"},
+ Filenames: []string{"*.opa"},
+ MimeTypes: []string{"text/x-opa"},
+ },
+ func() chroma.Rules {
+ return chroma.Rules{
+ "root": {},
+ }
+ },
+ )
+}
+
+// Name returns the name of the lexer.
+func (Opa) Name() string {
+ return heartbeat.LanguageOpa.StringChroma()
+}
diff --git a/pkg/lexer/openedgeabl.go b/pkg/lexer/openedgeabl.go
new file mode 100644
index 00000000..04fb5f1e
--- /dev/null
+++ b/pkg/lexer/openedgeabl.go
@@ -0,0 +1,51 @@
+package lexer
+
+import (
+ "strings"
+
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+ "github.com/alecthomas/chroma/v2/lexers"
+)
+
+// OpenEdgeABL lexer.
+type OpenEdgeABL struct{}
+
+// Lexer returns the lexer.
+func (l OpenEdgeABL) Lexer() chroma.Lexer {
+ lexer := lexers.Get(l.Name())
+ if lexer == nil {
+ return nil
+ }
+
+ if lexer, ok := lexer.(*chroma.RegexLexer); ok {
+ lexer.SetAnalyser(func(text string) float32 {
+ // try to identify OpenEdge ABL based on a few common constructs.
+ var result float32
+
+ if strings.Contains(text, "END.") {
+ result += 0.05
+ }
+
+ if strings.Contains(text, "END PROCEDURE.") {
+ result += 0.05
+ }
+
+ if strings.Contains(text, "ELSE DO:") {
+ result += 0.05
+ }
+
+ return result
+ })
+
+ return lexer
+ }
+
+ return nil
+}
+
+// Name returns the name of the lexer.
+func (OpenEdgeABL) Name() string {
+ return heartbeat.LanguageOpenEdgeABL.StringChroma()
+}
diff --git a/pkg/lexer/openedgeabl_test.go b/pkg/lexer/openedgeabl_test.go
new file mode 100644
index 00000000..90d94651
--- /dev/null
+++ b/pkg/lexer/openedgeabl_test.go
@@ -0,0 +1,41 @@
+package lexer_test
+
+import (
+ "os"
+ "testing"
+
+ "github.com/wakatime/wakatime-cli/pkg/lexer"
+
+ "github.com/stretchr/testify/assert"
+)
+
+func TestOpenEdge_AnalyseText(t *testing.T) {
+ tests := map[string]struct {
+ Filepath string
+ Expected float32
+ }{
+ "end": {
+ Filepath: "testdata/openedge_end.p",
+ Expected: 0.05,
+ },
+ "end procedure": {
+ Filepath: "testdata/openedge_end_procedure.p",
+ Expected: 0.05,
+ },
+ "else do": {
+ Filepath: "testdata/openedge_else_do.p",
+ Expected: 0.05,
+ },
+ }
+
+ for name, test := range tests {
+ t.Run(name, func(t *testing.T) {
+ data, err := os.ReadFile(test.Filepath)
+ assert.NoError(t, err)
+
+ l := lexer.OpenEdgeABL{}.Lexer()
+
+ assert.Equal(t, test.Expected, l.AnalyseText(string(data)))
+ })
+ }
+}
diff --git a/pkg/lexer/pan.go b/pkg/lexer/pan.go
new file mode 100644
index 00000000..4f022923
--- /dev/null
+++ b/pkg/lexer/pan.go
@@ -0,0 +1,31 @@
+package lexer
+
+import (
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+)
+
+// Pan lexer.
+type Pan struct{}
+
+// Lexer returns the lexer.
+func (l Pan) Lexer() chroma.Lexer {
+ return chroma.MustNewLexer(
+ &chroma.Config{
+ Name: l.Name(),
+ Aliases: []string{"pan"},
+ Filenames: []string{"*.pan"},
+ },
+ func() chroma.Rules {
+ return chroma.Rules{
+ "root": {},
+ }
+ },
+ )
+}
+
+// Name returns the name of the lexer.
+func (Pan) Name() string {
+ return heartbeat.LanguagePan.StringChroma()
+}
diff --git a/pkg/lexer/parasail.go b/pkg/lexer/parasail.go
new file mode 100644
index 00000000..48c9b1d9
--- /dev/null
+++ b/pkg/lexer/parasail.go
@@ -0,0 +1,32 @@
+package lexer
+
+import (
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+)
+
+// ParaSail lexer.
+type ParaSail struct{}
+
+// Lexer returns the lexer.
+func (l ParaSail) Lexer() chroma.Lexer {
+ return chroma.MustNewLexer(
+ &chroma.Config{
+ Name: l.Name(),
+ Aliases: []string{"parasail"},
+ Filenames: []string{"*.psi", "*.psl"},
+ MimeTypes: []string{"text/x-parasail"},
+ },
+ func() chroma.Rules {
+ return chroma.Rules{
+ "root": {},
+ }
+ },
+ )
+}
+
+// Name returns the name of the lexer.
+func (ParaSail) Name() string {
+ return heartbeat.LanguageParaSail.StringChroma()
+}
diff --git a/pkg/lexer/pawn.go b/pkg/lexer/pawn.go
new file mode 100644
index 00000000..17608967
--- /dev/null
+++ b/pkg/lexer/pawn.go
@@ -0,0 +1,46 @@
+package lexer
+
+import (
+ "strings"
+
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+)
+
+// Pawn lexer.
+type Pawn struct{}
+
+// Lexer returns the lexer.
+func (l Pawn) Lexer() chroma.Lexer {
+ lexer := chroma.MustNewLexer(
+ &chroma.Config{
+ Name: l.Name(),
+ Aliases: []string{"pawn"},
+ Filenames: []string{"*.p", "*.pwn", "*.inc"},
+ MimeTypes: []string{"text/x-pawn"},
+ },
+ func() chroma.Rules {
+ return chroma.Rules{
+ "root": {},
+ }
+ },
+ )
+
+ lexer.SetAnalyser(func(text string) float32 {
+ // This is basically C. There is a keyword which doesn't exist in C
+ // though and is nearly unique to this language.
+ if strings.Contains(text, "tagof") {
+ return 0.01
+ }
+
+ return 0
+ })
+
+ return lexer
+}
+
+// Name returns the name of the lexer.
+func (Pawn) Name() string {
+ return heartbeat.LanguagePawn.StringChroma()
+}
diff --git a/pkg/lexer/pawn_test.go b/pkg/lexer/pawn_test.go
new file mode 100644
index 00000000..0d4807a7
--- /dev/null
+++ b/pkg/lexer/pawn_test.go
@@ -0,0 +1,19 @@
+package lexer_test
+
+import (
+ "os"
+ "testing"
+
+ "github.com/wakatime/wakatime-cli/pkg/lexer"
+
+ "github.com/stretchr/testify/assert"
+)
+
+func TestPawn_AnalyseText(t *testing.T) {
+ data, err := os.ReadFile("testdata/pawn_tagof.pwn")
+ assert.NoError(t, err)
+
+ l := lexer.Pawn{}.Lexer()
+
+ assert.Equal(t, float32(0.01), l.AnalyseText(string(data)))
+}
diff --git a/pkg/lexer/peg.go b/pkg/lexer/peg.go
new file mode 100644
index 00000000..d63a701c
--- /dev/null
+++ b/pkg/lexer/peg.go
@@ -0,0 +1,32 @@
+package lexer
+
+import (
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+)
+
+// PEG lexer.
+type PEG struct{}
+
+// Lexer returns the lexer.
+func (l PEG) Lexer() chroma.Lexer {
+ return chroma.MustNewLexer(
+ &chroma.Config{
+ Name: l.Name(),
+ Aliases: []string{"peg"},
+ Filenames: []string{"*.peg"},
+ MimeTypes: []string{"text/x-peg"},
+ },
+ func() chroma.Rules {
+ return chroma.Rules{
+ "root": {},
+ }
+ },
+ )
+}
+
+// Name returns the name of the lexer.
+func (PEG) Name() string {
+ return heartbeat.LanguagePEG.StringChroma()
+}
diff --git a/pkg/lexer/perl.go b/pkg/lexer/perl.go
new file mode 100644
index 00000000..2bdd03ab
--- /dev/null
+++ b/pkg/lexer/perl.go
@@ -0,0 +1,56 @@
+package lexer
+
+import (
+ "regexp"
+ "strings"
+
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+ "github.com/wakatime/wakatime-cli/pkg/shebang"
+
+ "github.com/alecthomas/chroma/v2"
+ "github.com/alecthomas/chroma/v2/lexers"
+)
+
+var perlAnalyserRe = regexp.MustCompile(`(?:my|our)\s+[$@%(]`)
+
+// Perl lexer.
+type Perl struct{}
+
+// Lexer returns the lexer.
+func (l Perl) Lexer() chroma.Lexer {
+ lexer := lexers.Get(l.Name())
+ if lexer == nil {
+ return nil
+ }
+
+ if lexer, ok := lexer.(*chroma.RegexLexer); ok {
+ lexer.SetAnalyser(func(text string) float32 {
+ if matched, _ := shebang.MatchString(text, "perl"); matched {
+ return 1.0
+ }
+
+ var result float32
+
+ if perlAnalyserRe.MatchString(text) {
+ result += 0.9
+ }
+
+ if strings.Contains(text, ":=") {
+ // := is not valid Perl, but it appears in unicon, so we should
+ // become less confident if we think we found Perl with :=
+ result /= 2
+ }
+
+ return result
+ })
+
+ return lexer
+ }
+
+ return nil
+}
+
+// Name returns the name of the lexer.
+func (Perl) Name() string {
+ return heartbeat.LanguagePerl.StringChroma()
+}
diff --git a/pkg/lexer/perl6.go b/pkg/lexer/perl6.go
new file mode 100644
index 00000000..9c8f0bd9
--- /dev/null
+++ b/pkg/lexer/perl6.go
@@ -0,0 +1,140 @@
+package lexer
+
+import (
+ "regexp"
+ "strings"
+
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+ "github.com/wakatime/wakatime-cli/pkg/shebang"
+
+ "github.com/alecthomas/chroma/v2"
+)
+
+var (
+ perl6AnalyserDecl = regexp.MustCompile(`(?:my|our|has)\s+(?:['\w:-]+\s+)?[$@%&(]`)
+ perl6AnalyserDeclScope = regexp.MustCompile(`^\s*(?:(?Pmy|our)\s+)?(?:module|class|role|enum|grammar)`)
+ perl6AnalyserOperator = regexp.MustCompile(`#.*`)
+ perl6AnalyserShell = regexp.MustCompile(`^\s*$`)
+ perl6AnalyserV6 = regexp.MustCompile(`^\s*(?:use\s+)?v6(?:\.\d(?:\.\d)?)?;`)
+ perl6BeginPodRe = regexp.MustCompile(`^=\w+`)
+ perl6EndPodRe = regexp.MustCompile(`^=(?:end|cut)`)
+)
+
+// Perl6 lexer.
+type Perl6 struct{}
+
+// Lexer returns the lexer.
+func (l Perl6) Lexer() chroma.Lexer {
+ lexer := chroma.MustNewLexer(
+ &chroma.Config{
+ Name: l.Name(),
+ Aliases: []string{"perl6", "pl6", "raku"},
+ Filenames: []string{"*.pl", "*.pm", "*.nqp", "*.p6", "*.6pl", "*.p6l", "*.pl6",
+ "*.6pm", "*.p6m", "*.pm6", "*.t", "*.raku", "*.rakumod", "*.rakutest", "*.rakudoc"},
+ MimeTypes: []string{"text/x-perl6", "application/x-perl6"},
+ },
+ func() chroma.Rules {
+ return chroma.Rules{
+ "root": {},
+ }
+ },
+ )
+
+ lexer.SetAnalyser(func(text string) float32 {
+ if matched, _ := shebang.MatchString(text, "perl6|rakudo|niecza|pugs"); matched {
+ return 1.0
+ }
+
+ var (
+ result float32
+ hasPerlDecl bool
+ )
+
+ // Check for my/our/has declarations.
+ if perl6AnalyserDecl.MatchString(text) {
+ result = 0.8
+ hasPerlDecl = true
+ }
+
+ // XXX handle block comments.
+ lines := perl6StripPod(text)
+
+ for _, line := range lines {
+ line = perl6AnalyserOperator.ReplaceAllLiteralString(line, "")
+
+ if perl6AnalyserShell.MatchString(line) {
+ continue
+ }
+
+ // Match v6; use v6; use v6.0; use v6.0.0.
+ if perl6AnalyserV6.MatchString(line) {
+ return 1.0
+ }
+
+ // Match class, module, role, enum, grammar declarations.
+ classDecl := perl6AnalyserDeclScope.FindStringSubmatch(line)
+ if len(classDecl) > 0 {
+ if hasPerlDecl || perl6GetSubgroups(classDecl)["scope"] != "" {
+ return 1.0
+ }
+
+ result = 0.05
+ continue
+ }
+ break
+ }
+
+ if strings.Contains(text, ":=") {
+ // Same logic as Perl lexer.
+ result /= 2
+ }
+
+ return result
+ })
+
+ return lexer
+}
+
+func perl6StripPod(text string) []string {
+ var (
+ inPod bool
+ strippedLines []string
+ )
+
+ lines := strings.Split(text, "\n")
+
+ for _, line := range lines {
+ if perl6EndPodRe.MatchString(line) {
+ inPod = false
+ continue
+ }
+
+ if perl6BeginPodRe.MatchString(line) {
+ inPod = true
+ continue
+ }
+
+ if !inPod {
+ strippedLines = append(strippedLines, line)
+ }
+ }
+
+ return strippedLines
+}
+
+func perl6GetSubgroups(match []string) map[string]string {
+ groups := make(map[string]string)
+
+ for i, name := range perl6AnalyserDeclScope.SubexpNames() {
+ if i > 0 && i < len(match) {
+ groups[name] = match[i]
+ }
+ }
+
+ return groups
+}
+
+// Name returns the name of the lexer.
+func (Perl6) Name() string {
+ return heartbeat.LanguagePerl6.StringChroma()
+}
diff --git a/pkg/lexer/perl6_test.go b/pkg/lexer/perl6_test.go
new file mode 100644
index 00000000..0cc7b469
--- /dev/null
+++ b/pkg/lexer/perl6_test.go
@@ -0,0 +1,53 @@
+package lexer_test
+
+import (
+ "os"
+ "testing"
+
+ "github.com/wakatime/wakatime-cli/pkg/lexer"
+
+ "github.com/stretchr/testify/assert"
+)
+
+func TestPerl6_AnalyseText(t *testing.T) {
+ tests := map[string]struct {
+ Filepath string
+ Expected float32
+ }{
+ "shebang": {
+ Filepath: "testdata/perl6_shebang.pl6",
+ Expected: 1.0,
+ },
+ "v6": {
+ Filepath: "testdata/perl6_v6.pl6",
+ Expected: 1.0,
+ },
+ "enum": {
+ Filepath: "testdata/perl6_enum.pl6",
+ Expected: 0.05,
+ },
+ "scoped class": {
+ Filepath: "testdata/perl6_scoped_class.pl6",
+ Expected: 1.0,
+ },
+ "assignment": {
+ Filepath: "testdata/perl6_assign.pl6",
+ Expected: 0.4,
+ },
+ "strip pod": {
+ Filepath: "testdata/perl6_pod.pl6",
+ Expected: 0.4,
+ },
+ }
+
+ for name, test := range tests {
+ t.Run(name, func(t *testing.T) {
+ data, err := os.ReadFile(test.Filepath)
+ assert.NoError(t, err)
+
+ l := lexer.Perl6{}.Lexer()
+
+ assert.Equal(t, test.Expected, l.AnalyseText(string(data)))
+ })
+ }
+}
diff --git a/pkg/lexer/perl_test.go b/pkg/lexer/perl_test.go
new file mode 100644
index 00000000..bc2e208b
--- /dev/null
+++ b/pkg/lexer/perl_test.go
@@ -0,0 +1,41 @@
+package lexer_test
+
+import (
+ "os"
+ "testing"
+
+ "github.com/wakatime/wakatime-cli/pkg/lexer"
+
+ "github.com/stretchr/testify/assert"
+)
+
+func TestPerl_AnalyseText(t *testing.T) {
+ tests := map[string]struct {
+ Filepath string
+ Expected float32
+ }{
+ "shebang": {
+ Filepath: "testdata/perl_shebang.pl",
+ Expected: 1.0,
+ },
+ "basic": {
+ Filepath: "testdata/perl_basic.pl",
+ Expected: 0.9,
+ },
+ "unicon": {
+ Filepath: "testdata/perl_unicon_like.pl",
+ Expected: 0.0,
+ },
+ }
+
+ for name, test := range tests {
+ t.Run(name, func(t *testing.T) {
+ data, err := os.ReadFile(test.Filepath)
+ assert.NoError(t, err)
+
+ l := lexer.Perl{}.Lexer()
+
+ assert.Equal(t, test.Expected, l.AnalyseText(string(data)))
+ })
+ }
+}
diff --git a/pkg/lexer/pike.go b/pkg/lexer/pike.go
new file mode 100644
index 00000000..362a5b96
--- /dev/null
+++ b/pkg/lexer/pike.go
@@ -0,0 +1,32 @@
+package lexer
+
+import (
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+)
+
+// Pike lexer.
+type Pike struct{}
+
+// Lexer returns the lexer.
+func (l Pike) Lexer() chroma.Lexer {
+ return chroma.MustNewLexer(
+ &chroma.Config{
+ Name: l.Name(),
+ Aliases: []string{"pike"},
+ Filenames: []string{"*.pike", "*.pmod"},
+ MimeTypes: []string{"text/x-pike"},
+ },
+ func() chroma.Rules {
+ return chroma.Rules{
+ "root": {},
+ }
+ },
+ )
+}
+
+// Name returns the name of the lexer.
+func (Pike) Name() string {
+ return heartbeat.LanguagePike.StringChroma()
+}
diff --git a/pkg/lexer/pointless.go b/pkg/lexer/pointless.go
new file mode 100644
index 00000000..c5b9d0c0
--- /dev/null
+++ b/pkg/lexer/pointless.go
@@ -0,0 +1,31 @@
+package lexer
+
+import (
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+)
+
+// Pointless lexer.
+type Pointless struct{}
+
+// Lexer returns the lexer.
+func (l Pointless) Lexer() chroma.Lexer {
+ return chroma.MustNewLexer(
+ &chroma.Config{
+ Name: l.Name(),
+ Aliases: []string{"pointless"},
+ Filenames: []string{"*.ptls"},
+ },
+ func() chroma.Rules {
+ return chroma.Rules{
+ "root": {},
+ }
+ },
+ )
+}
+
+// Name returns the name of the lexer.
+func (Pointless) Name() string {
+ return heartbeat.LanguagePointless.StringChroma()
+}
diff --git a/pkg/lexer/povray.go b/pkg/lexer/povray.go
new file mode 100644
index 00000000..0059ceac
--- /dev/null
+++ b/pkg/lexer/povray.go
@@ -0,0 +1,60 @@
+package lexer
+
+import (
+ "strings"
+
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+ "github.com/alecthomas/chroma/v2/lexers"
+)
+
+// POVRay lexer.
+type POVRay struct{}
+
+// Lexer returns the lexer.
+func (l POVRay) Lexer() chroma.Lexer {
+ lexer := lexers.Get(l.Name())
+ if lexer == nil {
+ return nil
+ }
+
+ if lexer, ok := lexer.(*chroma.RegexLexer); ok {
+ lexer.SetAnalyser(func(text string) float32 {
+ // POVRAY is similar to JSON/C, but the combination of camera and
+ // light_source is probably not very likely elsewhere. HLSL or GLSL
+ // are similar (GLSL even has #version), but they miss #declare, and
+ // light_source/camera are not keywords anywhere else -- it's fair
+ // to assume though that any POVRAY scene must have a camera and
+ // lightsource.
+ var result float32
+
+ if strings.Contains(text, "#version") {
+ result += 0.05
+ }
+
+ if strings.Contains(text, "#declare") {
+ result += 0.05
+ }
+
+ if strings.Contains(text, "camera") {
+ result += 0.05
+ }
+
+ if strings.Contains(text, "light_source") {
+ result += 0.1
+ }
+
+ return result
+ })
+
+ return lexer
+ }
+
+ return nil
+}
+
+// Name returns the name of the lexer.
+func (POVRay) Name() string {
+ return heartbeat.LanguagePOVRay.StringChroma()
+}
diff --git a/pkg/lexer/povray_test.go b/pkg/lexer/povray_test.go
new file mode 100644
index 00000000..3b8cd480
--- /dev/null
+++ b/pkg/lexer/povray_test.go
@@ -0,0 +1,45 @@
+package lexer_test
+
+import (
+ "os"
+ "testing"
+
+ "github.com/wakatime/wakatime-cli/pkg/lexer"
+
+ "github.com/stretchr/testify/assert"
+)
+
+func TestPovRay_AnalyseText(t *testing.T) {
+ tests := map[string]struct {
+ Filepath string
+ Expected float32
+ }{
+ "camera": {
+ Filepath: "testdata/povray_camera.pov",
+ Expected: 0.05,
+ },
+ "light_source": {
+ Filepath: "testdata/povray_light_source.pov",
+ Expected: 0.1,
+ },
+ "declare": {
+ Filepath: "testdata/povray_declare.pov",
+ Expected: 0.05,
+ },
+ "version": {
+ Filepath: "testdata/povray_version.pov",
+ Expected: 0.05,
+ },
+ }
+
+ for name, test := range tests {
+ t.Run(name, func(t *testing.T) {
+ data, err := os.ReadFile(test.Filepath)
+ assert.NoError(t, err)
+
+ l := lexer.POVRay{}.Lexer()
+
+ assert.Equal(t, test.Expected, l.AnalyseText(string(data)))
+ })
+ }
+}
diff --git a/pkg/lexer/powershellsession.go b/pkg/lexer/powershellsession.go
new file mode 100644
index 00000000..608ef721
--- /dev/null
+++ b/pkg/lexer/powershellsession.go
@@ -0,0 +1,30 @@
+package lexer
+
+import (
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+)
+
+// PowerShellSession lexer.
+type PowerShellSession struct{}
+
+// Lexer returns the lexer.
+func (l PowerShellSession) Lexer() chroma.Lexer {
+ return chroma.MustNewLexer(
+ &chroma.Config{
+ Name: l.Name(),
+ Aliases: []string{"ps1con"},
+ },
+ func() chroma.Rules {
+ return chroma.Rules{
+ "root": {},
+ }
+ },
+ )
+}
+
+// Name returns the name of the lexer.
+func (PowerShellSession) Name() string {
+ return heartbeat.LanguagePowerShellSession.StringChroma()
+}
diff --git a/pkg/lexer/praat.go b/pkg/lexer/praat.go
new file mode 100644
index 00000000..f20ed4df
--- /dev/null
+++ b/pkg/lexer/praat.go
@@ -0,0 +1,31 @@
+package lexer
+
+import (
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+)
+
+// Praat lexer.
+type Praat struct{}
+
+// Lexer returns the lexer.
+func (l Praat) Lexer() chroma.Lexer {
+ return chroma.MustNewLexer(
+ &chroma.Config{
+ Name: l.Name(),
+ Aliases: []string{"praat"},
+ Filenames: []string{"*.praat", "*.proc", "*.psc"},
+ },
+ func() chroma.Rules {
+ return chroma.Rules{
+ "root": {},
+ }
+ },
+ )
+}
+
+// Name returns the name of the lexer.
+func (Praat) Name() string {
+ return heartbeat.LanguagePraat.StringChroma()
+}
diff --git a/pkg/lexer/processing.go b/pkg/lexer/processing.go
new file mode 100644
index 00000000..2f900413
--- /dev/null
+++ b/pkg/lexer/processing.go
@@ -0,0 +1,31 @@
+package lexer
+
+import (
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+)
+
+// Processing lexer.
+type Processing struct{}
+
+// Lexer returns the lexer.
+func (l Processing) Lexer() chroma.Lexer {
+ return chroma.MustNewLexer(
+ &chroma.Config{
+ Name: l.Name(),
+ Aliases: []string{"processing"},
+ Filenames: []string{"*.pde"},
+ },
+ func() chroma.Rules {
+ return chroma.Rules{
+ "root": {},
+ }
+ },
+ )
+}
+
+// Name returns the name of the lexer.
+func (Processing) Name() string {
+ return heartbeat.LanguageProcessing.StringChroma()
+}
diff --git a/pkg/lexer/prolog.go b/pkg/lexer/prolog.go
new file mode 100644
index 00000000..072e391f
--- /dev/null
+++ b/pkg/lexer/prolog.go
@@ -0,0 +1,40 @@
+package lexer
+
+import (
+ "strings"
+
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+ "github.com/alecthomas/chroma/v2/lexers"
+)
+
+// Prolog lexer.
+type Prolog struct{}
+
+// Lexer returns the lexer.
+func (l Prolog) Lexer() chroma.Lexer {
+ lexer := lexers.Get(l.Name())
+ if lexer == nil {
+ return nil
+ }
+
+ if lexer, ok := lexer.(*chroma.RegexLexer); ok {
+ lexer.SetAnalyser(func(text string) float32 {
+ if strings.Contains(text, ":-") {
+ return 1.0
+ }
+
+ return 0
+ })
+
+ return lexer
+ }
+
+ return nil
+}
+
+// Name returns the name of the lexer.
+func (Prolog) Name() string {
+ return heartbeat.LanguageProlog.StringChroma()
+}
diff --git a/pkg/lexer/prolog_test.go b/pkg/lexer/prolog_test.go
new file mode 100644
index 00000000..6989f4ce
--- /dev/null
+++ b/pkg/lexer/prolog_test.go
@@ -0,0 +1,19 @@
+package lexer_test
+
+import (
+ "os"
+ "testing"
+
+ "github.com/wakatime/wakatime-cli/pkg/lexer"
+
+ "github.com/stretchr/testify/assert"
+)
+
+func TestProlog_AnalyseText(t *testing.T) {
+ data, err := os.ReadFile("testdata/prolog.ecl")
+ assert.NoError(t, err)
+
+ l := lexer.Prolog{}.Lexer()
+
+ assert.Equal(t, float32(1.0), l.AnalyseText(string(data)))
+}
diff --git a/pkg/lexer/psql.go b/pkg/lexer/psql.go
new file mode 100644
index 00000000..99663421
--- /dev/null
+++ b/pkg/lexer/psql.go
@@ -0,0 +1,31 @@
+package lexer
+
+import (
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+)
+
+// PostgresConsole lexer.
+type PostgresConsole struct{}
+
+// Lexer returns the lexer.
+func (l PostgresConsole) Lexer() chroma.Lexer {
+ return chroma.MustNewLexer(
+ &chroma.Config{
+ Name: l.Name(),
+ Aliases: []string{"psql", "postgresql-console", "postgres-console"},
+ MimeTypes: []string{"text/x-postgresql-psql"},
+ },
+ func() chroma.Rules {
+ return chroma.Rules{
+ "root": {},
+ }
+ },
+ )
+}
+
+// Name returns the name of the lexer.
+func (PostgresConsole) Name() string {
+ return heartbeat.LanguagePostgresConsole.StringChroma()
+}
diff --git a/pkg/lexer/psyshphp.go b/pkg/lexer/psyshphp.go
new file mode 100644
index 00000000..9cb4c94c
--- /dev/null
+++ b/pkg/lexer/psyshphp.go
@@ -0,0 +1,30 @@
+package lexer
+
+import (
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+)
+
+// PsyShPHP lexer.
+type PsyShPHP struct{}
+
+// Lexer returns the lexer.
+func (l PsyShPHP) Lexer() chroma.Lexer {
+ return chroma.MustNewLexer(
+ &chroma.Config{
+ Name: l.Name(),
+ Aliases: []string{"psysh"},
+ },
+ func() chroma.Rules {
+ return chroma.Rules{
+ "root": {},
+ }
+ },
+ )
+}
+
+// Name returns the name of the lexer.
+func (PsyShPHP) Name() string {
+ return heartbeat.LanguagePsyShPHP.StringChroma()
+}
diff --git a/pkg/lexer/pug.go b/pkg/lexer/pug.go
new file mode 100644
index 00000000..67e3ff2a
--- /dev/null
+++ b/pkg/lexer/pug.go
@@ -0,0 +1,32 @@
+package lexer
+
+import (
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+)
+
+// Pug lexer.
+type Pug struct{}
+
+// Lexer returns the lexer.
+func (l Pug) Lexer() chroma.Lexer {
+ return chroma.MustNewLexer(
+ &chroma.Config{
+ Name: l.Name(),
+ Aliases: []string{"pug", "jade"},
+ Filenames: []string{"*.pug", "*.jade"},
+ MimeTypes: []string{"text/x-pug", "text/x-jade"},
+ },
+ func() chroma.Rules {
+ return chroma.Rules{
+ "root": {},
+ }
+ },
+ )
+}
+
+// Name returns the name of the lexer.
+func (Pug) Name() string {
+ return heartbeat.LanguagePug.StringChroma()
+}
diff --git a/pkg/lexer/pypylog.go b/pkg/lexer/pypylog.go
new file mode 100644
index 00000000..7c26ca3e
--- /dev/null
+++ b/pkg/lexer/pypylog.go
@@ -0,0 +1,32 @@
+package lexer
+
+import (
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+)
+
+// PyPyLog lexer.
+type PyPyLog struct{}
+
+// Lexer returns the lexer.
+func (l PyPyLog) Lexer() chroma.Lexer {
+ return chroma.MustNewLexer(
+ &chroma.Config{
+ Name: l.Name(),
+ Aliases: []string{"pypylog", "pypy"},
+ Filenames: []string{"*.pypylog"},
+ MimeTypes: []string{"application/x-pypylog"},
+ },
+ func() chroma.Rules {
+ return chroma.Rules{
+ "root": {},
+ }
+ },
+ )
+}
+
+// Name returns the name of the lexer.
+func (PyPyLog) Name() string {
+ return heartbeat.LanguagePyPyLog.StringChroma()
+}
diff --git a/pkg/lexer/python.go b/pkg/lexer/python.go
new file mode 100644
index 00000000..17738441
--- /dev/null
+++ b/pkg/lexer/python.go
@@ -0,0 +1,47 @@
+package lexer
+
+import (
+ "strings"
+
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+ "github.com/wakatime/wakatime-cli/pkg/shebang"
+
+ "github.com/alecthomas/chroma/v2"
+ "github.com/alecthomas/chroma/v2/lexers"
+)
+
+// Python lexer.
+type Python struct{}
+
+// Lexer returns the lexer.
+func (l Python) Lexer() chroma.Lexer {
+ lexer := lexers.Get(l.Name())
+ if lexer == nil {
+ return nil
+ }
+
+ if lexer, ok := lexer.(*chroma.RegexLexer); ok {
+ lexer.SetAnalyser(func(text string) float32 {
+ matched, _ := shebang.MatchString(text, `pythonw?(3(\.\d)?)?`)
+
+ if len(text) > 1000 {
+ text = text[:1000]
+ }
+
+ if matched || strings.Contains(text, "import ") {
+ return 1.0
+ }
+
+ return 0
+ })
+
+ return lexer
+ }
+
+ return nil
+}
+
+// Name returns the name of the lexer.
+func (Python) Name() string {
+ return heartbeat.LanguagePython.StringChroma()
+}
diff --git a/pkg/lexer/python2.go b/pkg/lexer/python2.go
new file mode 100644
index 00000000..58ac675c
--- /dev/null
+++ b/pkg/lexer/python2.go
@@ -0,0 +1,39 @@
+package lexer
+
+import (
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+ "github.com/wakatime/wakatime-cli/pkg/shebang"
+
+ "github.com/alecthomas/chroma/v2"
+ "github.com/alecthomas/chroma/v2/lexers"
+)
+
+// Python2 lexer.
+type Python2 struct{}
+
+// Lexer returns the lexer.
+func (l Python2) Lexer() chroma.Lexer {
+ lexer := lexers.Get(l.Name())
+ if lexer == nil {
+ return nil
+ }
+
+ if lexer, ok := lexer.(*chroma.RegexLexer); ok {
+ lexer.SetAnalyser(func(text string) float32 {
+ if matched, _ := shebang.MatchString(text, `pythonw?2(\.\d)?`); matched {
+ return 1.0
+ }
+
+ return 0
+ })
+
+ return lexer
+ }
+
+ return nil
+}
+
+// Name returns the name of the lexer.
+func (Python2) Name() string {
+ return heartbeat.LanguagePython2.StringChroma()
+}
diff --git a/pkg/lexer/python2_test.go b/pkg/lexer/python2_test.go
new file mode 100644
index 00000000..d8881795
--- /dev/null
+++ b/pkg/lexer/python2_test.go
@@ -0,0 +1,19 @@
+package lexer_test
+
+import (
+ "os"
+ "testing"
+
+ "github.com/wakatime/wakatime-cli/pkg/lexer"
+
+ "github.com/stretchr/testify/assert"
+)
+
+func TestPython2_AnalyseText(t *testing.T) {
+ data, err := os.ReadFile("testdata/python2_shebang.py")
+ assert.NoError(t, err)
+
+ l := lexer.Python2{}.Lexer()
+
+ assert.Equal(t, float32(1.0), l.AnalyseText(string(data)))
+}
diff --git a/pkg/lexer/python2traceback.go b/pkg/lexer/python2traceback.go
new file mode 100644
index 00000000..3a55a428
--- /dev/null
+++ b/pkg/lexer/python2traceback.go
@@ -0,0 +1,32 @@
+package lexer
+
+import (
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+)
+
+// Python2Traceback lexer.
+type Python2Traceback struct{}
+
+// Lexer returns the lexer.
+func (l Python2Traceback) Lexer() chroma.Lexer {
+ return chroma.MustNewLexer(
+ &chroma.Config{
+ Name: l.Name(),
+ Aliases: []string{"py2tb"},
+ Filenames: []string{"*.py2tb"},
+ MimeTypes: []string{"text/x-python2-traceback"},
+ },
+ func() chroma.Rules {
+ return chroma.Rules{
+ "root": {},
+ }
+ },
+ )
+}
+
+// Name returns the name of the lexer.
+func (Python2Traceback) Name() string {
+ return heartbeat.LanguagePython2Traceback.StringChroma()
+}
diff --git a/pkg/lexer/python_test.go b/pkg/lexer/python_test.go
new file mode 100644
index 00000000..de907e10
--- /dev/null
+++ b/pkg/lexer/python_test.go
@@ -0,0 +1,37 @@
+package lexer_test
+
+import (
+ "os"
+ "testing"
+
+ "github.com/wakatime/wakatime-cli/pkg/lexer"
+
+ "github.com/stretchr/testify/assert"
+)
+
+func TestPython_AnalyseText(t *testing.T) {
+ tests := map[string]struct {
+ Filepath string
+ Expected float32
+ }{
+ "import": {
+ Filepath: "testdata/python3_import.py",
+ Expected: 1.0,
+ },
+ "shebang": {
+ Filepath: "testdata/python3_shebang.py",
+ Expected: 1.0,
+ },
+ }
+
+ for name, test := range tests {
+ t.Run(name, func(t *testing.T) {
+ data, err := os.ReadFile(test.Filepath)
+ assert.NoError(t, err)
+
+ l := lexer.Python{}.Lexer()
+
+ assert.Equal(t, test.Expected, l.AnalyseText(string(data)))
+ })
+ }
+}
diff --git a/pkg/lexer/pythonconsole.go b/pkg/lexer/pythonconsole.go
new file mode 100644
index 00000000..d4caccbf
--- /dev/null
+++ b/pkg/lexer/pythonconsole.go
@@ -0,0 +1,31 @@
+package lexer
+
+import (
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+)
+
+// PythonConsole lexer.
+type PythonConsole struct{}
+
+// Lexer returns the lexer.
+func (l PythonConsole) Lexer() chroma.Lexer {
+ return chroma.MustNewLexer(
+ &chroma.Config{
+ Name: l.Name(),
+ Aliases: []string{"pycon"},
+ MimeTypes: []string{"text/x-python-doctest"},
+ },
+ func() chroma.Rules {
+ return chroma.Rules{
+ "root": {},
+ }
+ },
+ )
+}
+
+// Name returns the name of the lexer.
+func (PythonConsole) Name() string {
+ return heartbeat.LanguagePythonConsole.StringChroma()
+}
diff --git a/pkg/lexer/pythontraceback.go b/pkg/lexer/pythontraceback.go
new file mode 100644
index 00000000..9b9c765f
--- /dev/null
+++ b/pkg/lexer/pythontraceback.go
@@ -0,0 +1,32 @@
+package lexer
+
+import (
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+)
+
+// PythonTraceback lexer.
+type PythonTraceback struct{}
+
+// Lexer returns the lexer.
+func (l PythonTraceback) Lexer() chroma.Lexer {
+ return chroma.MustNewLexer(
+ &chroma.Config{
+ Name: l.Name(),
+ Aliases: []string{"pytb", "py3tb"},
+ Filenames: []string{"*.pytb", "*.py3tb"},
+ MimeTypes: []string{"text/x-python-traceback", "text/x-python3-traceback"},
+ },
+ func() chroma.Rules {
+ return chroma.Rules{
+ "root": {},
+ }
+ },
+ )
+}
+
+// Name returns the name of the lexer.
+func (PythonTraceback) Name() string {
+ return heartbeat.LanguagePythonTraceback.StringChroma()
+}
diff --git a/pkg/lexer/qbasic.go b/pkg/lexer/qbasic.go
new file mode 100644
index 00000000..8120f2dc
--- /dev/null
+++ b/pkg/lexer/qbasic.go
@@ -0,0 +1,40 @@
+package lexer
+
+import (
+ "strings"
+
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+ "github.com/alecthomas/chroma/v2/lexers"
+)
+
+// QBasic lexer.
+type QBasic struct{}
+
+// Lexer returns the lexer.
+func (l QBasic) Lexer() chroma.Lexer {
+ lexer := lexers.Get(l.Name())
+ if lexer == nil {
+ return nil
+ }
+
+ if lexer, ok := lexer.(*chroma.RegexLexer); ok {
+ lexer.SetAnalyser(func(text string) float32 {
+ if strings.Contains(text, "$DYNAMIC") || strings.Contains(text, "$STATIC") {
+ return 0.9
+ }
+
+ return 0
+ })
+
+ return lexer
+ }
+
+ return nil
+}
+
+// Name returns the name of the lexer.
+func (QBasic) Name() string {
+ return heartbeat.LanguageQBasic.StringChroma()
+}
diff --git a/pkg/lexer/qbasic_test.go b/pkg/lexer/qbasic_test.go
new file mode 100644
index 00000000..2d5c6d5e
--- /dev/null
+++ b/pkg/lexer/qbasic_test.go
@@ -0,0 +1,37 @@
+package lexer_test
+
+import (
+ "os"
+ "testing"
+
+ "github.com/wakatime/wakatime-cli/pkg/lexer"
+
+ "github.com/stretchr/testify/assert"
+)
+
+func TestQBasic_AnalyseText(t *testing.T) {
+ tests := map[string]struct {
+ Filepath string
+ Expected float32
+ }{
+ "dynamic_cmd": {
+ Filepath: "testdata/qbasic_dynamiccmd.bas",
+ Expected: 0.9,
+ },
+ "static_cmd": {
+ Filepath: "testdata/qbasic_staticcmd.bas",
+ Expected: 0.9,
+ },
+ }
+
+ for name, test := range tests {
+ t.Run(name, func(t *testing.T) {
+ data, err := os.ReadFile(test.Filepath)
+ assert.NoError(t, err)
+
+ l := lexer.QBasic{}.Lexer()
+
+ assert.Equal(t, test.Expected, l.AnalyseText(string(data)))
+ })
+ }
+}
diff --git a/pkg/lexer/qvto.go b/pkg/lexer/qvto.go
new file mode 100644
index 00000000..010d441d
--- /dev/null
+++ b/pkg/lexer/qvto.go
@@ -0,0 +1,31 @@
+package lexer
+
+import (
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+)
+
+// QVTO lexer. For the QVT Operational Mapping language .
+type QVTO struct{}
+
+// Lexer returns the lexer.
+func (l QVTO) Lexer() chroma.Lexer {
+ return chroma.MustNewLexer(
+ &chroma.Config{
+ Name: l.Name(),
+ Aliases: []string{"qvto", "qvt"},
+ Filenames: []string{"*.qvto"},
+ },
+ func() chroma.Rules {
+ return chroma.Rules{
+ "root": {},
+ }
+ },
+ )
+}
+
+// Name returns the name of the lexer.
+func (QVTO) Name() string {
+ return heartbeat.LanguageQVTO.StringChroma()
+}
diff --git a/pkg/lexer/r.go b/pkg/lexer/r.go
new file mode 100644
index 00000000..2d086036
--- /dev/null
+++ b/pkg/lexer/r.go
@@ -0,0 +1,43 @@
+package lexer
+
+import (
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+ "github.com/alecthomas/chroma/v2/lexers"
+ "github.com/dlclark/regexp2"
+)
+
+// nolint:gochecknoglobals
+var rAnalyzerRe = regexp2.MustCompile(`[a-z0-9_\])\s]<-(?!-)`, regexp2.None)
+
+// R and also S lexer.
+type R struct{}
+
+// Lexer returns the lexer.
+func (l R) Lexer() chroma.Lexer {
+ lexer := lexers.Get(l.Name())
+ if lexer == nil {
+ return nil
+ }
+
+ if lexer, ok := lexer.(*chroma.RegexLexer); ok {
+ lexer.SetAnalyser(func(text string) float32 {
+ matched, _ := rAnalyzerRe.MatchString(text)
+ if matched {
+ return 0.11
+ }
+
+ return 0
+ })
+
+ return lexer
+ }
+
+ return nil
+}
+
+// Name returns the name of the lexer.
+func (R) Name() string {
+ return heartbeat.LanguageR.StringChroma()
+}
diff --git a/pkg/lexer/r_test.go b/pkg/lexer/r_test.go
new file mode 100644
index 00000000..212f4616
--- /dev/null
+++ b/pkg/lexer/r_test.go
@@ -0,0 +1,19 @@
+package lexer_test
+
+import (
+ "os"
+ "testing"
+
+ "github.com/wakatime/wakatime-cli/pkg/lexer"
+
+ "github.com/stretchr/testify/assert"
+)
+
+func TestR_AnalyseText(t *testing.T) {
+ data, err := os.ReadFile("testdata/r_expression.r")
+ assert.NoError(t, err)
+
+ l := lexer.R{}.Lexer()
+
+ assert.Equal(t, float32(0.11), l.AnalyseText(string(data)))
+}
diff --git a/pkg/lexer/ragelembedded.go b/pkg/lexer/ragelembedded.go
new file mode 100644
index 00000000..b615c70c
--- /dev/null
+++ b/pkg/lexer/ragelembedded.go
@@ -0,0 +1,43 @@
+package lexer
+
+import (
+ "strings"
+
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+)
+
+// RagelEmbedded lexer. A lexer for Ragel embedded in a host language file.
+type RagelEmbedded struct{}
+
+// Lexer returns the lexer.
+func (l RagelEmbedded) Lexer() chroma.Lexer {
+ lexer := chroma.MustNewLexer(
+ &chroma.Config{
+ Name: l.Name(),
+ Aliases: []string{"ragel-em"},
+ Filenames: []string{"*.rl"},
+ },
+ func() chroma.Rules {
+ return chroma.Rules{
+ "root": {},
+ }
+ },
+ )
+
+ lexer.SetAnalyser(func(text string) float32 {
+ if strings.Contains(text, "@LANG: indep") {
+ return 1.0
+ }
+
+ return 0
+ })
+
+ return lexer
+}
+
+// Name returns the name of the lexer.
+func (RagelEmbedded) Name() string {
+ return heartbeat.LanguageRagelEmbedded.StringChroma()
+}
diff --git a/pkg/lexer/ragelembedded_test.go b/pkg/lexer/ragelembedded_test.go
new file mode 100644
index 00000000..8fe99ab4
--- /dev/null
+++ b/pkg/lexer/ragelembedded_test.go
@@ -0,0 +1,19 @@
+package lexer_test
+
+import (
+ "os"
+ "testing"
+
+ "github.com/wakatime/wakatime-cli/pkg/lexer"
+
+ "github.com/stretchr/testify/assert"
+)
+
+func TestRagelEmbedded_AnalyseText(t *testing.T) {
+ data, err := os.ReadFile("testdata/ragel.rl")
+ assert.NoError(t, err)
+
+ l := lexer.RagelEmbedded{}.Lexer()
+
+ assert.Equal(t, float32(1.0), l.AnalyseText(string(data)))
+}
diff --git a/pkg/lexer/rawtoken.go b/pkg/lexer/rawtoken.go
new file mode 100644
index 00000000..67c4dfa0
--- /dev/null
+++ b/pkg/lexer/rawtoken.go
@@ -0,0 +1,31 @@
+package lexer
+
+import (
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+)
+
+// RawToken lexer.
+type RawToken struct{}
+
+// Lexer returns the lexer.
+func (l RawToken) Lexer() chroma.Lexer {
+ return chroma.MustNewLexer(
+ &chroma.Config{
+ Name: l.Name(),
+ Aliases: []string{"raw"},
+ MimeTypes: []string{"application/x-pygments-tokens"},
+ },
+ func() chroma.Rules {
+ return chroma.Rules{
+ "root": {},
+ }
+ },
+ )
+}
+
+// Name returns the name of the lexer.
+func (RawToken) Name() string {
+ return heartbeat.LanguageRawToken.StringChroma()
+}
diff --git a/pkg/lexer/razor.go b/pkg/lexer/razor.go
new file mode 100644
index 00000000..2bc5298a
--- /dev/null
+++ b/pkg/lexer/razor.go
@@ -0,0 +1,32 @@
+package lexer
+
+import (
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+)
+
+// Razor lexer. Lexer for Blazor's Razor files.
+type Razor struct{}
+
+// Lexer returns the lexer.
+func (l Razor) Lexer() chroma.Lexer {
+ return chroma.MustNewLexer(
+ &chroma.Config{
+ Name: l.Name(),
+ Aliases: []string{"razor"},
+ Filenames: []string{"*.razor"},
+ MimeTypes: []string{"text/html"},
+ },
+ func() chroma.Rules {
+ return chroma.Rules{
+ "root": {},
+ }
+ },
+ )
+}
+
+// Name returns the name of the lexer.
+func (Razor) Name() string {
+ return heartbeat.LanguageRazor.StringChroma()
+}
diff --git a/pkg/lexer/rconsole.go b/pkg/lexer/rconsole.go
new file mode 100644
index 00000000..a1222be2
--- /dev/null
+++ b/pkg/lexer/rconsole.go
@@ -0,0 +1,31 @@
+package lexer
+
+import (
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+)
+
+// RConsole lexer. For R console transcripts or R CMD BATCH output files.
+type RConsole struct{}
+
+// Lexer returns the lexer.
+func (l RConsole) Lexer() chroma.Lexer {
+ return chroma.MustNewLexer(
+ &chroma.Config{
+ Name: l.Name(),
+ Aliases: []string{"rconsole", "rout"},
+ Filenames: []string{"*.Rout"},
+ },
+ func() chroma.Rules {
+ return chroma.Rules{
+ "root": {},
+ }
+ },
+ )
+}
+
+// Name returns the name of the lexer.
+func (RConsole) Name() string {
+ return heartbeat.LanguageRConsole.StringChroma()
+}
diff --git a/pkg/lexer/rd.go b/pkg/lexer/rd.go
new file mode 100644
index 00000000..9f694ccc
--- /dev/null
+++ b/pkg/lexer/rd.go
@@ -0,0 +1,32 @@
+package lexer
+
+import (
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+)
+
+// Rd lexer. Lexer for R documentation (Rd) files.
+type Rd struct{}
+
+// Lexer returns the lexer.
+func (l Rd) Lexer() chroma.Lexer {
+ return chroma.MustNewLexer(
+ &chroma.Config{
+ Name: l.Name(),
+ Aliases: []string{"rd"},
+ Filenames: []string{"*.Rd"},
+ MimeTypes: []string{"text/x-r-doc"},
+ },
+ func() chroma.Rules {
+ return chroma.Rules{
+ "root": {},
+ }
+ },
+ )
+}
+
+// Name returns the name of the lexer.
+func (Rd) Name() string {
+ return heartbeat.LanguageRd.StringChroma()
+}
diff --git a/pkg/lexer/rebol.go b/pkg/lexer/rebol.go
new file mode 100644
index 00000000..25d0212e
--- /dev/null
+++ b/pkg/lexer/rebol.go
@@ -0,0 +1,54 @@
+package lexer
+
+import (
+ "regexp"
+
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+)
+
+var (
+ rebolAnalyserHeaderRe = regexp.MustCompile(`^\s*REBOL\s*\[`)
+ rebolAnalyserHeaderPrecedingTextRe = regexp.MustCompile(`\s*REBOL\s*\[`)
+)
+
+// REBOL lexer.
+type REBOL struct{}
+
+// Lexer returns the lexer.
+func (l REBOL) Lexer() chroma.Lexer {
+ lexer := chroma.MustNewLexer(
+ &chroma.Config{
+ Name: l.Name(),
+ Aliases: []string{"rebol"},
+ Filenames: []string{"*.r", "*.r3", "*.reb"},
+ MimeTypes: []string{"text/x-rebol"},
+ },
+ func() chroma.Rules {
+ return chroma.Rules{
+ "root": {},
+ }
+ },
+ )
+
+ lexer.SetAnalyser(func(text string) float32 {
+ // Check if code contains REBOL header, then it's probably not R code
+ if rebolAnalyserHeaderRe.MatchString(text) {
+ return 1.0
+ }
+
+ if rebolAnalyserHeaderPrecedingTextRe.MatchString(text) {
+ return 0.5
+ }
+
+ return 0
+ })
+
+ return lexer
+}
+
+// Name returns the name of the lexer.
+func (REBOL) Name() string {
+ return heartbeat.LanguageREBOL.StringChroma()
+}
diff --git a/pkg/lexer/rebol_test.go b/pkg/lexer/rebol_test.go
new file mode 100644
index 00000000..27f8a355
--- /dev/null
+++ b/pkg/lexer/rebol_test.go
@@ -0,0 +1,38 @@
+package lexer_test
+
+import (
+ "os"
+ "testing"
+
+ "github.com/wakatime/wakatime-cli/pkg/lexer"
+
+ "github.com/stretchr/testify/assert"
+)
+
+func TestREBOL_AnalyseText(t *testing.T) {
+ tests := map[string]struct {
+ Filepath string
+ Expected float32
+ }{
+ "standard": {
+ Filepath: "testdata/rebol.r",
+ Expected: 1.0,
+ },
+
+ "header preceding text": {
+ Filepath: "testdata/rebol_header_preceding_text.r",
+ Expected: 0.5,
+ },
+ }
+
+ for name, test := range tests {
+ t.Run(name, func(t *testing.T) {
+ data, err := os.ReadFile(test.Filepath)
+ assert.NoError(t, err)
+
+ l := lexer.REBOL{}.Lexer()
+
+ assert.Equal(t, test.Expected, l.AnalyseText(string(data)))
+ })
+ }
+}
diff --git a/pkg/lexer/red.go b/pkg/lexer/red.go
new file mode 100644
index 00000000..f6c8d007
--- /dev/null
+++ b/pkg/lexer/red.go
@@ -0,0 +1,32 @@
+package lexer
+
+import (
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+)
+
+// Red lexer. A Red-language lexer.
+type Red struct{}
+
+// Lexer returns the lexer.
+func (l Red) Lexer() chroma.Lexer {
+ return chroma.MustNewLexer(
+ &chroma.Config{
+ Name: l.Name(),
+ Aliases: []string{"red", "red/system"},
+ Filenames: []string{"*.red", "*.reds"},
+ MimeTypes: []string{"text/x-red", "text/x-red-system"},
+ },
+ func() chroma.Rules {
+ return chroma.Rules{
+ "root": {},
+ }
+ },
+ )
+}
+
+// Name returns the name of the lexer.
+func (Red) Name() string {
+ return heartbeat.LanguageRed.StringChroma()
+}
diff --git a/pkg/lexer/redcode.go b/pkg/lexer/redcode.go
new file mode 100644
index 00000000..876c4d88
--- /dev/null
+++ b/pkg/lexer/redcode.go
@@ -0,0 +1,31 @@
+package lexer
+
+import (
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+)
+
+// Redcode lexer.
+type Redcode struct{}
+
+// Lexer returns the lexer.
+func (l Redcode) Lexer() chroma.Lexer {
+ return chroma.MustNewLexer(
+ &chroma.Config{
+ Name: l.Name(),
+ Aliases: []string{"redcode"},
+ Filenames: []string{"*.cw"},
+ },
+ func() chroma.Rules {
+ return chroma.Rules{
+ "root": {},
+ }
+ },
+ )
+}
+
+// Name returns the name of the lexer.
+func (Redcode) Name() string {
+ return heartbeat.LanguageRedcode.StringChroma()
+}
diff --git a/pkg/lexer/rescript.go b/pkg/lexer/rescript.go
new file mode 100644
index 00000000..7476cbea
--- /dev/null
+++ b/pkg/lexer/rescript.go
@@ -0,0 +1,32 @@
+package lexer
+
+import (
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+)
+
+// ReScript lexer.
+type ReScript struct{}
+
+// Lexer returns the lexer.
+func (l ReScript) Lexer() chroma.Lexer {
+ return chroma.MustNewLexer(
+ &chroma.Config{
+ Name: l.Name(),
+ Aliases: []string{"rescript"},
+ Filenames: []string{"*.res", "*.resi"},
+ MimeTypes: []string{"text/x-rescript"},
+ },
+ func() chroma.Rules {
+ return chroma.Rules{
+ "root": {},
+ }
+ },
+ )
+}
+
+// Name returns the name of the lexer.
+func (ReScript) Name() string {
+ return heartbeat.LanguageReScript.StringChroma()
+}
diff --git a/pkg/lexer/resourcebundle.go b/pkg/lexer/resourcebundle.go
new file mode 100644
index 00000000..a0e3800a
--- /dev/null
+++ b/pkg/lexer/resourcebundle.go
@@ -0,0 +1,43 @@
+package lexer
+
+import (
+ "strings"
+
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+)
+
+// ResourceBundle lexer. Lexer for ICU ResourceBundle bundles
+//
+type ResourceBundle struct{}
+
+// Lexer returns the lexer.
+func (l ResourceBundle) Lexer() chroma.Lexer {
+ lexer := chroma.MustNewLexer(
+ &chroma.Config{
+ Name: l.Name(),
+ Aliases: []string{"resource", "resourcebundle"},
+ },
+ func() chroma.Rules {
+ return chroma.Rules{
+ "root": {},
+ }
+ },
+ )
+
+ lexer.SetAnalyser(func(text string) float32 {
+ if strings.HasPrefix(text, "root:table") {
+ return 1.0
+ }
+
+ return 0
+ })
+
+ return lexer
+}
+
+// Name returns the name of the lexer.
+func (ResourceBundle) Name() string {
+ return heartbeat.LanguageResourceBundle.StringChroma()
+}
diff --git a/pkg/lexer/resourcebundle_test.go b/pkg/lexer/resourcebundle_test.go
new file mode 100644
index 00000000..6ed6f2b1
--- /dev/null
+++ b/pkg/lexer/resourcebundle_test.go
@@ -0,0 +1,19 @@
+package lexer_test
+
+import (
+ "os"
+ "testing"
+
+ "github.com/wakatime/wakatime-cli/pkg/lexer"
+
+ "github.com/stretchr/testify/assert"
+)
+
+func TestResourceBundle_AnalyseText(t *testing.T) {
+ data, err := os.ReadFile("testdata/resource.txt")
+ assert.NoError(t, err)
+
+ l := lexer.ResourceBundle{}.Lexer()
+
+ assert.Equal(t, float32(1.0), l.AnalyseText(string(data)))
+}
diff --git a/pkg/lexer/rhtml.go b/pkg/lexer/rhtml.go
new file mode 100644
index 00000000..0fa0604d
--- /dev/null
+++ b/pkg/lexer/rhtml.go
@@ -0,0 +1,48 @@
+package lexer
+
+import (
+ "github.com/wakatime/wakatime-cli/pkg/doctype"
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+)
+
+// RHTML lexer. Subclass of the ERB lexer that highlights the unlexed data
+// with the html lexer.
+type RHTML struct{}
+
+// Lexer returns the lexer.
+func (l RHTML) Lexer() chroma.Lexer {
+ lexer := chroma.MustNewLexer(
+ &chroma.Config{
+ Name: l.Name(),
+ Aliases: []string{"rhtml", "html+erb", "html+ruby"},
+ Filenames: []string{"*.rhtml"},
+ AliasFilenames: []string{"*.html", "*.htm", "*.xhtml"},
+ MimeTypes: []string{"text/html+ruby"},
+ },
+ func() chroma.Rules {
+ return chroma.Rules{
+ "root": {},
+ }
+ },
+ )
+
+ lexer.SetAnalyser(func(text string) float32 {
+ result := ERB{}.Lexer().AnalyseText(text) - 0.01
+
+ if matched, _ := doctype.MatchString(text, "html"); matched {
+ // one more than the XmlErbLexer returns
+ result += 0.5
+ }
+
+ return result
+ })
+
+ return lexer
+}
+
+// Name returns the name of the lexer.
+func (RHTML) Name() string {
+ return heartbeat.LanguageRHTML.StringChroma()
+}
diff --git a/pkg/lexer/rhtml_test.go b/pkg/lexer/rhtml_test.go
new file mode 100644
index 00000000..a53123f2
--- /dev/null
+++ b/pkg/lexer/rhtml_test.go
@@ -0,0 +1,38 @@
+package lexer_test
+
+import (
+ "os"
+ "testing"
+
+ "github.com/wakatime/wakatime-cli/pkg/lexer"
+
+ "github.com/stretchr/testify/assert"
+)
+
+func TestRHTML_AnalyseText(t *testing.T) {
+ tests := map[string]struct {
+ Filepath string
+ Expected float32
+ }{
+ "standard": {
+ Filepath: "testdata/rhtml.rhtml",
+ Expected: 0.89,
+ },
+
+ "header preceding text": {
+ Filepath: "testdata/html.rhtml",
+ Expected: 0.49,
+ },
+ }
+
+ for name, test := range tests {
+ t.Run(name, func(t *testing.T) {
+ data, err := os.ReadFile(test.Filepath)
+ assert.NoError(t, err)
+
+ l := lexer.RHTML{}.Lexer()
+
+ assert.Equal(t, test.Expected, l.AnalyseText(string(data)))
+ })
+ }
+}
diff --git a/pkg/lexer/ride.go b/pkg/lexer/ride.go
new file mode 100644
index 00000000..272fcd8b
--- /dev/null
+++ b/pkg/lexer/ride.go
@@ -0,0 +1,33 @@
+package lexer
+
+import (
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+)
+
+// Ride lexer. For Ride
+// source code.
+type Ride struct{}
+
+// Lexer returns the lexer.
+func (l Ride) Lexer() chroma.Lexer {
+ return chroma.MustNewLexer(
+ &chroma.Config{
+ Name: l.Name(),
+ Aliases: []string{"ride"},
+ Filenames: []string{"*.ride"},
+ MimeTypes: []string{"text/x-ride"},
+ },
+ func() chroma.Rules {
+ return chroma.Rules{
+ "root": {},
+ }
+ },
+ )
+}
+
+// Name returns the name of the lexer.
+func (Ride) Name() string {
+ return heartbeat.LanguageRide.StringChroma()
+}
diff --git a/pkg/lexer/rngcompact.go b/pkg/lexer/rngcompact.go
new file mode 100644
index 00000000..8eb18090
--- /dev/null
+++ b/pkg/lexer/rngcompact.go
@@ -0,0 +1,31 @@
+package lexer
+
+import (
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+)
+
+// RNGCompact lexer. For RelaxNG-compact syntax.
+type RNGCompact struct{}
+
+// Lexer returns the lexer.
+func (l RNGCompact) Lexer() chroma.Lexer {
+ return chroma.MustNewLexer(
+ &chroma.Config{
+ Name: l.Name(),
+ Aliases: []string{"rnc", "rng-compact"},
+ Filenames: []string{"*.rnc"},
+ },
+ func() chroma.Rules {
+ return chroma.Rules{
+ "root": {},
+ }
+ },
+ )
+}
+
+// Name returns the name of the lexer.
+func (RNGCompact) Name() string {
+ return heartbeat.LanguageRNGCompact.StringChroma()
+}
diff --git a/pkg/lexer/roboconfgraph.go b/pkg/lexer/roboconfgraph.go
new file mode 100644
index 00000000..5e095c59
--- /dev/null
+++ b/pkg/lexer/roboconfgraph.go
@@ -0,0 +1,31 @@
+package lexer
+
+import (
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+)
+
+// RoboconfGraph lexer for Roboconf graph files.
+type RoboconfGraph struct{}
+
+// Lexer returns the lexer.
+func (l RoboconfGraph) Lexer() chroma.Lexer {
+ return chroma.MustNewLexer(
+ &chroma.Config{
+ Name: l.Name(),
+ Aliases: []string{"roboconf-graph"},
+ Filenames: []string{"*.graph"},
+ },
+ func() chroma.Rules {
+ return chroma.Rules{
+ "root": {},
+ }
+ },
+ )
+}
+
+// Name returns the name of the lexer.
+func (RoboconfGraph) Name() string {
+ return heartbeat.LanguageRoboconfGraph.StringChroma()
+}
diff --git a/pkg/lexer/roboconfinstances.go b/pkg/lexer/roboconfinstances.go
new file mode 100644
index 00000000..d7962e3f
--- /dev/null
+++ b/pkg/lexer/roboconfinstances.go
@@ -0,0 +1,31 @@
+package lexer
+
+import (
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+)
+
+// RoboconfInstances lexer for Roboconf instances files.
+type RoboconfInstances struct{}
+
+// Lexer returns the lexer.
+func (l RoboconfInstances) Lexer() chroma.Lexer {
+ return chroma.MustNewLexer(
+ &chroma.Config{
+ Name: l.Name(),
+ Aliases: []string{"roboconf-instances"},
+ Filenames: []string{"*.instances"},
+ },
+ func() chroma.Rules {
+ return chroma.Rules{
+ "root": {},
+ }
+ },
+ )
+}
+
+// Name returns the name of the lexer.
+func (RoboconfInstances) Name() string {
+ return heartbeat.LanguageRoboconfInstances.StringChroma()
+}
diff --git a/pkg/lexer/robotframework.go b/pkg/lexer/robotframework.go
new file mode 100644
index 00000000..54352258
--- /dev/null
+++ b/pkg/lexer/robotframework.go
@@ -0,0 +1,32 @@
+package lexer
+
+import (
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+)
+
+// RobotFramework lexer for Robot Framework test data.
+type RobotFramework struct{}
+
+// Lexer returns the lexer.
+func (l RobotFramework) Lexer() chroma.Lexer {
+ return chroma.MustNewLexer(
+ &chroma.Config{
+ Name: l.Name(),
+ Aliases: []string{"robotframework"},
+ Filenames: []string{"*.robot"},
+ MimeTypes: []string{"text/x-robotframework"},
+ },
+ func() chroma.Rules {
+ return chroma.Rules{
+ "root": {},
+ }
+ },
+ )
+}
+
+// Name returns the name of the lexer.
+func (RobotFramework) Name() string {
+ return heartbeat.LanguageRobotFramework.StringChroma()
+}
diff --git a/pkg/lexer/rpmspec.go b/pkg/lexer/rpmspec.go
new file mode 100644
index 00000000..6dd89afa
--- /dev/null
+++ b/pkg/lexer/rpmspec.go
@@ -0,0 +1,32 @@
+package lexer
+
+import (
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+)
+
+// RPMSpec lexer.
+type RPMSpec struct{}
+
+// Lexer returns the lexer.
+func (l RPMSpec) Lexer() chroma.Lexer {
+ return chroma.MustNewLexer(
+ &chroma.Config{
+ Name: l.Name(),
+ Aliases: []string{"spec"},
+ Filenames: []string{"*.spec"},
+ MimeTypes: []string{"text/x-rpm-spec"},
+ },
+ func() chroma.Rules {
+ return chroma.Rules{
+ "root": {},
+ }
+ },
+ )
+}
+
+// Name returns the name of the lexer.
+func (RPMSpec) Name() string {
+ return heartbeat.LanguageRPMSpec.StringChroma()
+}
diff --git a/pkg/lexer/rql.go b/pkg/lexer/rql.go
new file mode 100644
index 00000000..4e38fe6a
--- /dev/null
+++ b/pkg/lexer/rql.go
@@ -0,0 +1,32 @@
+package lexer
+
+import (
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+)
+
+// RQL lexer for Relation Query Language
+type RQL struct{}
+
+// Lexer returns the lexer.
+func (l RQL) Lexer() chroma.Lexer {
+ return chroma.MustNewLexer(
+ &chroma.Config{
+ Name: l.Name(),
+ Aliases: []string{"rql"},
+ Filenames: []string{"*.rql"},
+ MimeTypes: []string{"text/x-rql"},
+ },
+ func() chroma.Rules {
+ return chroma.Rules{
+ "root": {},
+ }
+ },
+ )
+}
+
+// Name returns the name of the lexer.
+func (RQL) Name() string {
+ return heartbeat.LanguageRQL.StringChroma()
+}
diff --git a/pkg/lexer/rsl.go b/pkg/lexer/rsl.go
new file mode 100644
index 00000000..02178329
--- /dev/null
+++ b/pkg/lexer/rsl.go
@@ -0,0 +1,49 @@
+package lexer
+
+import (
+ "regexp"
+
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+)
+
+var rslAnalyserRe = regexp.MustCompile(`(?i)scheme\s*.*?=\s*class\s*type`)
+
+// RSL lexer. RSL is the formal
+// specification language used in RAISE (Rigorous Approach to Industrial
+// Software Engineering) method.
+type RSL struct{}
+
+// Lexer returns the lexer.
+func (l RSL) Lexer() chroma.Lexer {
+ lexer := chroma.MustNewLexer(
+ &chroma.Config{
+ Name: l.Name(),
+ Aliases: []string{"rsl"},
+ Filenames: []string{"*.rsl"},
+ MimeTypes: []string{"text/rsl"},
+ },
+ func() chroma.Rules {
+ return chroma.Rules{
+ "root": {},
+ }
+ },
+ )
+
+ lexer.SetAnalyser(func(text string) float32 {
+ // Check for the most common text in the beginning of a RSL file.
+ if rslAnalyserRe.MatchString(text) {
+ return 1.0
+ }
+
+ return 0
+ })
+
+ return lexer
+}
+
+// Name returns the name of the lexer.
+func (RSL) Name() string {
+ return heartbeat.LanguageRSL.StringChroma()
+}
diff --git a/pkg/lexer/rsl_test.go b/pkg/lexer/rsl_test.go
new file mode 100644
index 00000000..106de3fa
--- /dev/null
+++ b/pkg/lexer/rsl_test.go
@@ -0,0 +1,19 @@
+package lexer_test
+
+import (
+ "os"
+ "testing"
+
+ "github.com/wakatime/wakatime-cli/pkg/lexer"
+
+ "github.com/stretchr/testify/assert"
+)
+
+func TestRSL_AnalyseText(t *testing.T) {
+ data, err := os.ReadFile("testdata/raise.rsl")
+ assert.NoError(t, err)
+
+ l := lexer.RSL{}.Lexer()
+
+ assert.Equal(t, float32(1.0), l.AnalyseText(string(data)))
+}
diff --git a/pkg/lexer/rubyirbsession.go b/pkg/lexer/rubyirbsession.go
new file mode 100644
index 00000000..0cb8a52d
--- /dev/null
+++ b/pkg/lexer/rubyirbsession.go
@@ -0,0 +1,31 @@
+package lexer
+
+import (
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+)
+
+// RubyIRBSession lexer. For Ruby interactive console (**irb**) output.
+type RubyIRBSession struct{}
+
+// Lexer returns the lexer.
+func (l RubyIRBSession) Lexer() chroma.Lexer {
+ return chroma.MustNewLexer(
+ &chroma.Config{
+ Name: l.Name(),
+ Aliases: []string{"rbcon", "irb"},
+ MimeTypes: []string{"text/x-ruby-shellsession"},
+ },
+ func() chroma.Rules {
+ return chroma.Rules{
+ "root": {},
+ }
+ },
+ )
+}
+
+// Name returns the name of the lexer.
+func (RubyIRBSession) Name() string {
+ return heartbeat.LanguageRubyIRBSession.StringChroma()
+}
diff --git a/pkg/lexer/sarl.go b/pkg/lexer/sarl.go
new file mode 100644
index 00000000..49775d4d
--- /dev/null
+++ b/pkg/lexer/sarl.go
@@ -0,0 +1,32 @@
+package lexer
+
+import (
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+)
+
+// SARL lexer. For SARL source code.
+type SARL struct{}
+
+// Lexer returns the lexer.
+func (l SARL) Lexer() chroma.Lexer {
+ return chroma.MustNewLexer(
+ &chroma.Config{
+ Name: l.Name(),
+ Aliases: []string{"sarl"},
+ Filenames: []string{"*.sarl"},
+ MimeTypes: []string{"text/x-sarl"},
+ },
+ func() chroma.Rules {
+ return chroma.Rules{
+ "root": {},
+ }
+ },
+ )
+}
+
+// Name returns the name of the lexer.
+func (SARL) Name() string {
+ return heartbeat.LanguageSARL.StringChroma()
+}
diff --git a/pkg/lexer/scaml.go b/pkg/lexer/scaml.go
new file mode 100644
index 00000000..a16ab4a5
--- /dev/null
+++ b/pkg/lexer/scaml.go
@@ -0,0 +1,32 @@
+package lexer
+
+import (
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+)
+
+// Scaml lexer. For Scaml markup . Scaml is Haml for Scala.
+type Scaml struct{}
+
+// Lexer returns the lexer.
+func (l Scaml) Lexer() chroma.Lexer {
+ return chroma.MustNewLexer(
+ &chroma.Config{
+ Name: l.Name(),
+ Aliases: []string{"scaml"},
+ Filenames: []string{"*.scaml"},
+ MimeTypes: []string{"text/x-scaml"},
+ },
+ func() chroma.Rules {
+ return chroma.Rules{
+ "root": {},
+ }
+ },
+ )
+}
+
+// Name returns the name of the lexer.
+func (Scaml) Name() string {
+ return heartbeat.LanguageScaml.StringChroma()
+}
diff --git a/pkg/lexer/scdoc.go b/pkg/lexer/scdoc.go
new file mode 100644
index 00000000..6d7d91ad
--- /dev/null
+++ b/pkg/lexer/scdoc.go
@@ -0,0 +1,52 @@
+package lexer
+
+import (
+ "strings"
+
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+)
+
+// Scdoc lexer.
+type Scdoc struct{}
+
+// Lexer returns the lexer.
+func (l Scdoc) Lexer() chroma.Lexer {
+ lexer := chroma.MustNewLexer(
+ &chroma.Config{
+ Name: l.Name(),
+ Aliases: []string{"scdoc", "scd"},
+ Filenames: []string{"*.scd", "*.scdoc"},
+ MimeTypes: []string{},
+ },
+ func() chroma.Rules {
+ return chroma.Rules{
+ "root": {},
+ }
+ },
+ )
+
+ lexer.SetAnalyser(func(text string) float32 {
+ // This is very similar to markdown, save for the escape characters
+ // needed for * and _.
+ var result float32
+
+ if strings.Contains(text, `\*`) {
+ result += 0.01
+ }
+
+ if strings.Contains(text, `\_`) {
+ result += 0.01
+ }
+
+ return result
+ })
+
+ return lexer
+}
+
+// Name returns the name of the lexer.
+func (Scdoc) Name() string {
+ return heartbeat.LanguageScdoc.StringChroma()
+}
diff --git a/pkg/lexer/scdoc_test.go b/pkg/lexer/scdoc_test.go
new file mode 100644
index 00000000..af99d2d8
--- /dev/null
+++ b/pkg/lexer/scdoc_test.go
@@ -0,0 +1,37 @@
+package lexer_test
+
+import (
+ "os"
+ "testing"
+
+ "github.com/wakatime/wakatime-cli/pkg/lexer"
+
+ "github.com/stretchr/testify/assert"
+)
+
+func TestScdoc_AnalyseText(t *testing.T) {
+ tests := map[string]struct {
+ Filepath string
+ Expected float32
+ }{
+ "asterisk": {
+ Filepath: "testdata/scdoc_asterisk.scd",
+ Expected: 0.01,
+ },
+ "underscore": {
+ Filepath: "testdata/scdoc_underscore.scd",
+ Expected: 0.01,
+ },
+ }
+
+ for name, test := range tests {
+ t.Run(name, func(t *testing.T) {
+ data, err := os.ReadFile(test.Filepath)
+ assert.NoError(t, err)
+
+ l := lexer.Scdoc{}.Lexer()
+
+ assert.Equal(t, test.Expected, l.AnalyseText(string(data)))
+ })
+ }
+}
diff --git a/pkg/lexer/sgf.go b/pkg/lexer/sgf.go
new file mode 100644
index 00000000..da1f52d7
--- /dev/null
+++ b/pkg/lexer/sgf.go
@@ -0,0 +1,35 @@
+package lexer
+
+import (
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+)
+
+// SmartGameFormat lexer. Lexer for Smart Game Format (sgf) file format.
+//
+// The format is used to store game records of board games for two players
+// (mainly Go game). For more information about the definition of the format,
+// see: https://www.red-bean.com/sgf/
+type SmartGameFormat struct{}
+
+// Lexer returns the lexer.
+func (l SmartGameFormat) Lexer() chroma.Lexer {
+ return chroma.MustNewLexer(
+ &chroma.Config{
+ Name: l.Name(),
+ Aliases: []string{"sgf"},
+ Filenames: []string{"*.sgf"},
+ },
+ func() chroma.Rules {
+ return chroma.Rules{
+ "root": {},
+ }
+ },
+ )
+}
+
+// Name returns the name of the lexer.
+func (SmartGameFormat) Name() string {
+ return heartbeat.LanguageSmartGameFormat.StringChroma()
+}
diff --git a/pkg/lexer/shen.go b/pkg/lexer/shen.go
new file mode 100644
index 00000000..ca71a72f
--- /dev/null
+++ b/pkg/lexer/shen.go
@@ -0,0 +1,32 @@
+package lexer
+
+import (
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+)
+
+// Shen lexer. Lexer for Shen source code.
+type Shen struct{}
+
+// Lexer returns the lexer.
+func (l Shen) Lexer() chroma.Lexer {
+ return chroma.MustNewLexer(
+ &chroma.Config{
+ Name: l.Name(),
+ Aliases: []string{"shen"},
+ Filenames: []string{"*.shen"},
+ MimeTypes: []string{"text/x-shen", "application/x-shen"},
+ },
+ func() chroma.Rules {
+ return chroma.Rules{
+ "root": {},
+ }
+ },
+ )
+}
+
+// Name returns the name of the lexer.
+func (Shen) Name() string {
+ return heartbeat.LanguageShen.StringChroma()
+}
diff --git a/pkg/lexer/shexc.go b/pkg/lexer/shexc.go
new file mode 100644
index 00000000..1886343a
--- /dev/null
+++ b/pkg/lexer/shexc.go
@@ -0,0 +1,32 @@
+package lexer
+
+import (
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+)
+
+// ShExC lexer. Lexer for ShExC shape expressions language syntax.
+type ShExC struct{}
+
+// Lexer returns the lexer.
+func (l ShExC) Lexer() chroma.Lexer {
+ return chroma.MustNewLexer(
+ &chroma.Config{
+ Name: l.Name(),
+ Aliases: []string{"shexc", "shex"},
+ Filenames: []string{"*.shex"},
+ MimeTypes: []string{"text/shex"},
+ },
+ func() chroma.Rules {
+ return chroma.Rules{
+ "root": {},
+ }
+ },
+ )
+}
+
+// Name returns the name of the lexer.
+func (ShExC) Name() string {
+ return heartbeat.LanguageShExC.StringChroma()
+}
diff --git a/pkg/lexer/silver.go b/pkg/lexer/silver.go
new file mode 100644
index 00000000..a32b9d7f
--- /dev/null
+++ b/pkg/lexer/silver.go
@@ -0,0 +1,31 @@
+package lexer
+
+import (
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+)
+
+// Silver lexer. For Silver source code.
+type Silver struct{}
+
+// Lexer returns the lexer.
+func (l Silver) Lexer() chroma.Lexer {
+ return chroma.MustNewLexer(
+ &chroma.Config{
+ Name: l.Name(),
+ Aliases: []string{"silver"},
+ Filenames: []string{"*.sil", "*.vpr"},
+ },
+ func() chroma.Rules {
+ return chroma.Rules{
+ "root": {},
+ }
+ },
+ )
+}
+
+// Name returns the name of the lexer.
+func (Silver) Name() string {
+ return heartbeat.LanguageSilver.StringChroma()
+}
diff --git a/pkg/lexer/singularity.go b/pkg/lexer/singularity.go
new file mode 100644
index 00000000..412d0b7d
--- /dev/null
+++ b/pkg/lexer/singularity.go
@@ -0,0 +1,57 @@
+package lexer
+
+import (
+ "regexp"
+
+ "github.com/alecthomas/chroma/v2"
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+)
+
+var (
+ singularityAnalyserHeaderRe = regexp.MustCompile(`(?i)\b(?:osversion|includecmd|mirrorurl)\b`)
+ singularityAnalyserSectionRe = regexp.MustCompile(
+ `%(?:pre|post|setup|environment|help|labels|test|runscript|files|startscript)\b`)
+)
+
+// Singularity lexer.
+type Singularity struct{}
+
+// Lexer returns the lexer.
+func (l Singularity) Lexer() chroma.Lexer {
+ lexer := chroma.MustNewLexer(
+ &chroma.Config{
+ Name: l.Name(),
+ Aliases: []string{"singularity"},
+ Filenames: []string{"*.def", "Singularity"},
+ MimeTypes: []string{},
+ },
+ func() chroma.Rules {
+ return chroma.Rules{
+ "root": {},
+ }
+ },
+ )
+
+ lexer.SetAnalyser(func(text string) float32 {
+ // This is a quite simple script file, but there are a few keywords
+ // which seem unique to this language.
+ var result float32
+
+ if singularityAnalyserHeaderRe.MatchString(text) {
+ result += 0.5
+ }
+
+ if singularityAnalyserSectionRe.MatchString(text) {
+ result += 0.49
+ }
+
+ return result
+ })
+
+ return lexer
+}
+
+// Name returns the name of the lexer.
+func (Singularity) Name() string {
+ return heartbeat.LanguageSingularity.StringChroma()
+}
diff --git a/pkg/lexer/singularity_test.go b/pkg/lexer/singularity_test.go
new file mode 100644
index 00000000..4bcbc9a3
--- /dev/null
+++ b/pkg/lexer/singularity_test.go
@@ -0,0 +1,41 @@
+package lexer_test
+
+import (
+ "os"
+ "testing"
+
+ "github.com/wakatime/wakatime-cli/pkg/lexer"
+
+ "github.com/stretchr/testify/assert"
+)
+
+func TestSingularity_AnalyseText(t *testing.T) {
+ tests := map[string]struct {
+ Filepath string
+ Expected float32
+ }{
+ "only header": {
+ Filepath: "testdata/singularity_only_header.def",
+ Expected: 0.5,
+ },
+ "only section": {
+ Filepath: "testdata/singularity_only_section.def",
+ Expected: 0.49,
+ },
+ "full": {
+ Filepath: "testdata/singularity_full.def",
+ Expected: 0.99,
+ },
+ }
+
+ for name, test := range tests {
+ t.Run(name, func(t *testing.T) {
+ data, err := os.ReadFile(test.Filepath)
+ assert.NoError(t, err)
+
+ l := lexer.Singularity{}.Lexer()
+
+ assert.Equal(t, test.Expected, l.AnalyseText(string(data)))
+ })
+ }
+}
diff --git a/pkg/lexer/sketchdrawing.go b/pkg/lexer/sketchdrawing.go
new file mode 100644
index 00000000..00e81d3c
--- /dev/null
+++ b/pkg/lexer/sketchdrawing.go
@@ -0,0 +1,31 @@
+package lexer
+
+import (
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+)
+
+// SketchDrawing lexer.
+type SketchDrawing struct{}
+
+// Lexer returns the lexer.
+func (l SketchDrawing) Lexer() chroma.Lexer {
+ return chroma.MustNewLexer(
+ &chroma.Config{
+ Name: l.Name(),
+ Aliases: []string{"sketch"},
+ Filenames: []string{"*.sketch"},
+ },
+ func() chroma.Rules {
+ return chroma.Rules{
+ "root": {},
+ }
+ },
+ )
+}
+
+// Name returns the name of the lexer.
+func (SketchDrawing) Name() string {
+ return heartbeat.LanguageSketchDrawing.StringChroma()
+}
diff --git a/pkg/lexer/slash.go b/pkg/lexer/slash.go
new file mode 100644
index 00000000..8bba59c4
--- /dev/null
+++ b/pkg/lexer/slash.go
@@ -0,0 +1,31 @@
+package lexer
+
+import (
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+)
+
+// Slash lexer. Lexer for the Slash programming language.
+type Slash struct{}
+
+// Lexer returns the lexer.
+func (l Slash) Lexer() chroma.Lexer {
+ return chroma.MustNewLexer(
+ &chroma.Config{
+ Name: l.Name(),
+ Aliases: []string{"slash"},
+ Filenames: []string{"*.sla"},
+ },
+ func() chroma.Rules {
+ return chroma.Rules{
+ "root": {},
+ }
+ },
+ )
+}
+
+// Name returns the name of the lexer.
+func (Slash) Name() string {
+ return heartbeat.LanguageSlash.StringChroma()
+}
diff --git a/pkg/lexer/slim.go b/pkg/lexer/slim.go
new file mode 100644
index 00000000..ac0fd53d
--- /dev/null
+++ b/pkg/lexer/slim.go
@@ -0,0 +1,32 @@
+package lexer
+
+import (
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+)
+
+// Slim lexer.
+type Slim struct{}
+
+// Lexer returns the lexer.
+func (l Slim) Lexer() chroma.Lexer {
+ return chroma.MustNewLexer(
+ &chroma.Config{
+ Name: l.Name(),
+ Aliases: []string{"slim"},
+ Filenames: []string{"*.slim"},
+ MimeTypes: []string{"text/x-slim"},
+ },
+ func() chroma.Rules {
+ return chroma.Rules{
+ "root": {},
+ }
+ },
+ )
+}
+
+// Name returns the name of the lexer.
+func (Slim) Name() string {
+ return heartbeat.LanguageSlim.StringChroma()
+}
diff --git a/pkg/lexer/slint.go b/pkg/lexer/slint.go
new file mode 100644
index 00000000..04044f69
--- /dev/null
+++ b/pkg/lexer/slint.go
@@ -0,0 +1,31 @@
+package lexer
+
+import (
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+)
+
+// Slint lexer. Lexer for the Slint programming language.
+type Slint struct{}
+
+// Lexer returns the lexer.
+func (l Slint) Lexer() chroma.Lexer {
+ return chroma.MustNewLexer(
+ &chroma.Config{
+ Name: l.Name(),
+ Aliases: []string{"slint"},
+ Filenames: []string{"*.slint"},
+ },
+ func() chroma.Rules {
+ return chroma.Rules{
+ "root": {},
+ }
+ },
+ )
+}
+
+// Name returns the name of the lexer.
+func (Slint) Name() string {
+ return heartbeat.LanguageSlint.StringChroma()
+}
diff --git a/pkg/lexer/slurm.go b/pkg/lexer/slurm.go
new file mode 100644
index 00000000..4f1b9d6c
--- /dev/null
+++ b/pkg/lexer/slurm.go
@@ -0,0 +1,43 @@
+package lexer
+
+import (
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+ "github.com/alecthomas/chroma/v2/lexers"
+)
+
+// Slurm lexer. Lexer for (ba|k|z|)sh Slurm scripts.
+type Slurm struct{}
+
+// Lexer returns the lexer.
+func (l Slurm) Lexer() chroma.Lexer {
+ lexer := chroma.MustNewLexer(
+ &chroma.Config{
+ Name: l.Name(),
+ Aliases: []string{"slurm", "sbatch"},
+ Filenames: []string{"*.sl"},
+ },
+ func() chroma.Rules {
+ return chroma.Rules{
+ "root": {},
+ }
+ },
+ )
+
+ lexer.SetAnalyser(func(text string) float32 {
+ bash := lexers.Get(heartbeat.LanguageBash.StringChroma())
+ if bash == nil {
+ return 0
+ }
+
+ return bash.AnalyseText(text)
+ })
+
+ return lexer
+}
+
+// Name returns the name of the lexer.
+func (Slurm) Name() string {
+ return heartbeat.LanguageSlurm.StringChroma()
+}
diff --git a/pkg/lexer/slurm_test.go b/pkg/lexer/slurm_test.go
new file mode 100644
index 00000000..61432e77
--- /dev/null
+++ b/pkg/lexer/slurm_test.go
@@ -0,0 +1,19 @@
+package lexer_test
+
+import (
+ "os"
+ "testing"
+
+ "github.com/wakatime/wakatime-cli/pkg/lexer"
+
+ "github.com/stretchr/testify/assert"
+)
+
+func TestSlurm_AnalyseText(t *testing.T) {
+ data, err := os.ReadFile("testdata/slurm.sl")
+ assert.NoError(t, err)
+
+ l := lexer.Slurm{}.Lexer()
+
+ assert.Equal(t, float32(1.0), l.AnalyseText(string(data)))
+}
diff --git a/pkg/lexer/smali.go b/pkg/lexer/smali.go
new file mode 100644
index 00000000..7614cc44
--- /dev/null
+++ b/pkg/lexer/smali.go
@@ -0,0 +1,67 @@
+package lexer
+
+import (
+ "regexp"
+
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+ "github.com/alecthomas/chroma/v2/lexers"
+)
+
+var (
+ smaliAnalyserClassRe = regexp.MustCompile(`(?m)^\s*\.class\s`)
+ smaliAnalyserClassKeywordsRe = regexp.MustCompile(
+ `(?m)\b((check-cast|instance-of|throw-verification-error` +
+ `)\b|(-to|add|[ais]get|[ais]put|and|cmpl|const|div|` +
+ `if|invoke|move|mul|neg|not|or|rem|return|rsub|shl` +
+ `|shr|sub|ushr)[-/])|{|}`)
+ smaliAnalyserKeywordsRe = regexp.MustCompile(
+ `(?m)(\.(catchall|epilogue|restart local|prologue)|` +
+ `\b(array-data|class-change-error|declared-synchronized|` +
+ `(field|inline|vtable)@0x[0-9a-fA-F]|generic-error|` +
+ `illegal-class-access|illegal-field-access|` +
+ `illegal-method-access|instantiation-error|no-error|` +
+ `no-such-class|no-such-field|no-such-method|` +
+ `packed-switch|sparse-switch))\b`)
+)
+
+// Smali lexer.
+type Smali struct{}
+
+// Lexer returns the lexer.
+func (l Smali) Lexer() chroma.Lexer {
+ lexer := lexers.Get(l.Name())
+ if lexer == nil {
+ return nil
+ }
+
+ if lexer, ok := lexer.(*chroma.RegexLexer); ok {
+ lexer.SetAnalyser(func(text string) float32 {
+ var result float32
+
+ if smaliAnalyserClassRe.MatchString(text) {
+ result += 0.5
+
+ if smaliAnalyserClassKeywordsRe.MatchString(text) {
+ result += 0.3
+ }
+ }
+
+ if smaliAnalyserKeywordsRe.MatchString(text) {
+ result += 0.6
+ }
+
+ return result
+ })
+
+ return lexer
+ }
+
+ return nil
+}
+
+// Name returns the name of the lexer.
+func (Smali) Name() string {
+ return heartbeat.LanguageSmali.StringChroma()
+}
diff --git a/pkg/lexer/smali_test.go b/pkg/lexer/smali_test.go
new file mode 100644
index 00000000..d4f1a3bf
--- /dev/null
+++ b/pkg/lexer/smali_test.go
@@ -0,0 +1,41 @@
+package lexer_test
+
+import (
+ "os"
+ "testing"
+
+ "github.com/wakatime/wakatime-cli/pkg/lexer"
+
+ "github.com/stretchr/testify/assert"
+)
+
+func TestSmali_AnalyseText(t *testing.T) {
+ tests := map[string]struct {
+ Filepath string
+ Expected float32
+ }{
+ "class": {
+ Filepath: "testdata/smali_class.smali",
+ Expected: 0.5,
+ },
+ "class with keyword": {
+ Filepath: "testdata/smali_class_keyword.smali",
+ Expected: 0.8,
+ },
+ "keyword": {
+ Filepath: "testdata/smali_keyword.smali",
+ Expected: 0.6,
+ },
+ }
+
+ for name, test := range tests {
+ t.Run(name, func(t *testing.T) {
+ data, err := os.ReadFile(test.Filepath)
+ assert.NoError(t, err)
+
+ l := lexer.Smali{}.Lexer()
+
+ assert.Equal(t, test.Expected, l.AnalyseText(string(data)))
+ })
+ }
+}
diff --git a/pkg/lexer/snowball.go b/pkg/lexer/snowball.go
new file mode 100644
index 00000000..f78ed8c1
--- /dev/null
+++ b/pkg/lexer/snowball.go
@@ -0,0 +1,31 @@
+package lexer
+
+import (
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+)
+
+// Snowball lexer. Lexer for Snowball source code.
+type Snowball struct{}
+
+// Lexer returns the lexer.
+func (l Snowball) Lexer() chroma.Lexer {
+ return chroma.MustNewLexer(
+ &chroma.Config{
+ Name: l.Name(),
+ Aliases: []string{"snowball"},
+ Filenames: []string{"*.sbl"},
+ },
+ func() chroma.Rules {
+ return chroma.Rules{
+ "root": {},
+ }
+ },
+ )
+}
+
+// Name returns the name of the lexer.
+func (Snowball) Name() string {
+ return heartbeat.LanguageSnowball.StringChroma()
+}
diff --git a/pkg/lexer/sourceslist.go b/pkg/lexer/sourceslist.go
new file mode 100644
index 00000000..b056045a
--- /dev/null
+++ b/pkg/lexer/sourceslist.go
@@ -0,0 +1,46 @@
+package lexer
+
+import (
+ "regexp"
+
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+)
+
+var sourcesListAnalyserRe = regexp.MustCompile(`(?m)^\s*(deb|deb-src) `)
+
+// SourcesList lexer. Lexer that highlights debian sources.list files.
+type SourcesList struct{}
+
+// Lexer returns the lexer.
+func (l SourcesList) Lexer() chroma.Lexer {
+ lexer := chroma.MustNewLexer(
+ &chroma.Config{
+ Name: l.Name(),
+ Aliases: []string{"sourceslist", "sources.list", "debsources"},
+ Filenames: []string{"sources.list"},
+ MimeTypes: []string{"application/x-debian-sourceslist"},
+ },
+ func() chroma.Rules {
+ return chroma.Rules{
+ "root": {},
+ }
+ },
+ )
+
+ lexer.SetAnalyser(func(text string) float32 {
+ if sourcesListAnalyserRe.MatchString(text) {
+ return 1.0
+ }
+
+ return 0
+ })
+
+ return lexer
+}
+
+// Name returns the name of the lexer.
+func (SourcesList) Name() string {
+ return heartbeat.LanguageSourcesList.StringChroma()
+}
diff --git a/pkg/lexer/sourceslist_test.go b/pkg/lexer/sourceslist_test.go
new file mode 100644
index 00000000..ca628d48
--- /dev/null
+++ b/pkg/lexer/sourceslist_test.go
@@ -0,0 +1,41 @@
+package lexer_test
+
+import (
+ "os"
+ "testing"
+
+ "github.com/wakatime/wakatime-cli/pkg/lexer"
+
+ "github.com/stretchr/testify/assert"
+)
+
+func TestSourcesList_AnalyseText(t *testing.T) {
+ tests := map[string]struct {
+ Filepath string
+ Expected float32
+ }{
+ "standard": {
+ Filepath: "testdata/sources.list",
+ Expected: 1.0,
+ },
+ "indented": {
+ Filepath: "testdata/sources-indented.list",
+ Expected: 1.0,
+ },
+ "invalid": {
+ Filepath: "testdata/sources-invalid.list",
+ Expected: 0.0,
+ },
+ }
+
+ for name, test := range tests {
+ t.Run(name, func(t *testing.T) {
+ data, err := os.ReadFile(test.Filepath)
+ assert.NoError(t, err)
+
+ l := lexer.SourcesList{}.Lexer()
+
+ assert.Equal(t, test.Expected, l.AnalyseText(string(data)))
+ })
+ }
+}
diff --git a/pkg/lexer/sqlite3con.go b/pkg/lexer/sqlite3con.go
new file mode 100644
index 00000000..9353330b
--- /dev/null
+++ b/pkg/lexer/sqlite3con.go
@@ -0,0 +1,32 @@
+package lexer
+
+import (
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+)
+
+// Sqlite3con lexer. Lexer for example sessions using sqlite3.
+type Sqlite3con struct{}
+
+// Lexer returns the lexer.
+func (l Sqlite3con) Lexer() chroma.Lexer {
+ return chroma.MustNewLexer(
+ &chroma.Config{
+ Name: l.Name(),
+ Aliases: []string{"sqlite3"},
+ Filenames: []string{"*.sqlite3-console"},
+ MimeTypes: []string{"text/x-sqlite3-console"},
+ },
+ func() chroma.Rules {
+ return chroma.Rules{
+ "root": {},
+ }
+ },
+ )
+}
+
+// Name returns the name of the lexer.
+func (Sqlite3con) Name() string {
+ return heartbeat.LanguageSqlite3con.StringChroma()
+}
diff --git a/pkg/lexer/ssp.go b/pkg/lexer/ssp.go
new file mode 100644
index 00000000..12ec5f3e
--- /dev/null
+++ b/pkg/lexer/ssp.go
@@ -0,0 +1,58 @@
+package lexer
+
+import (
+ "regexp"
+ "strings"
+
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+ "github.com/wakatime/wakatime-cli/pkg/xml"
+
+ "github.com/alecthomas/chroma/v2"
+)
+
+var sspAnalyserRe = regexp.MustCompile(`val \w+\s*:`)
+
+// SSP lexer. Lexer for Scalate Server Pages.
+type SSP struct{}
+
+// Lexer returns the lexer.
+func (l SSP) Lexer() chroma.Lexer {
+ lexer := chroma.MustNewLexer(
+ &chroma.Config{
+ Name: l.Name(),
+ Aliases: []string{"ssp"},
+ Filenames: []string{"*.ssp"},
+ MimeTypes: []string{"application/x-ssp"},
+ },
+ func() chroma.Rules {
+ return chroma.Rules{
+ "root": {},
+ }
+ },
+ )
+
+ lexer.SetAnalyser(func(text string) float32 {
+ var result float64
+
+ if sspAnalyserRe.MatchString(text) {
+ result += 0.6
+ }
+
+ if xml.MatchString(text) {
+ result += 0.2
+ }
+
+ if strings.Contains(text, "<%") && strings.Contains(text, "%>") {
+ result += 0.1
+ }
+
+ return float32(result)
+ })
+
+ return lexer
+}
+
+// Name returns the name of the lexer.
+func (SSP) Name() string {
+ return heartbeat.LanguageSSP.StringChroma()
+}
diff --git a/pkg/lexer/ssp_test.go b/pkg/lexer/ssp_test.go
new file mode 100644
index 00000000..e3bd31b5
--- /dev/null
+++ b/pkg/lexer/ssp_test.go
@@ -0,0 +1,19 @@
+package lexer_test
+
+import (
+ "os"
+ "testing"
+
+ "github.com/wakatime/wakatime-cli/pkg/lexer"
+
+ "github.com/stretchr/testify/assert"
+)
+
+func TestSSP_AnalyseText(t *testing.T) {
+ data, err := os.ReadFile("testdata/ssp_basic.ssp")
+ assert.NoError(t, err)
+
+ l := lexer.SSP{}.Lexer()
+
+ assert.Equal(t, float32(0.9), l.AnalyseText(string(data)))
+}
diff --git a/pkg/lexer/stan.go b/pkg/lexer/stan.go
new file mode 100644
index 00000000..b5a9cb11
--- /dev/null
+++ b/pkg/lexer/stan.go
@@ -0,0 +1,49 @@
+package lexer
+
+import (
+ "regexp"
+
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+)
+
+var stanAnalyserRe = regexp.MustCompile(`(?m)^\s*parameters\s*\{`)
+
+// Stan lexer. Lexer for Stan models.
+//
+// The Stan modeling language is specified in the *Stan Modeling Language
+// User's Guide and Reference Manual, v2.17.0*,
+// pdf `.
+type Stan struct{}
+
+// Lexer returns the lexer.
+func (l Stan) Lexer() chroma.Lexer {
+ lexer := chroma.MustNewLexer(
+ &chroma.Config{
+ Name: l.Name(),
+ Aliases: []string{"stan"},
+ Filenames: []string{"*.stan"},
+ },
+ func() chroma.Rules {
+ return chroma.Rules{
+ "root": {},
+ }
+ },
+ )
+
+ lexer.SetAnalyser(func(text string) float32 {
+ if stanAnalyserRe.MatchString(text) {
+ return 1.0
+ }
+
+ return 0
+ })
+
+ return lexer
+}
+
+// Name returns the name of the lexer.
+func (Stan) Name() string {
+ return heartbeat.LanguageStan.StringChroma()
+}
diff --git a/pkg/lexer/stan_test.go b/pkg/lexer/stan_test.go
new file mode 100644
index 00000000..d32b66ea
--- /dev/null
+++ b/pkg/lexer/stan_test.go
@@ -0,0 +1,19 @@
+package lexer_test
+
+import (
+ "os"
+ "testing"
+
+ "github.com/wakatime/wakatime-cli/pkg/lexer"
+
+ "github.com/stretchr/testify/assert"
+)
+
+func TestStan_AnalyseText(t *testing.T) {
+ data, err := os.ReadFile("testdata/stan_basic.stan")
+ assert.NoError(t, err)
+
+ l := lexer.Stan{}.Lexer()
+
+ assert.Equal(t, float32(1.0), l.AnalyseText(string(data)))
+}
diff --git a/pkg/lexer/stata.go b/pkg/lexer/stata.go
new file mode 100644
index 00000000..b6f0054b
--- /dev/null
+++ b/pkg/lexer/stata.go
@@ -0,0 +1,37 @@
+package lexer
+
+import (
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+)
+
+// Stata lexer. For Stata do files.
+//
+// Syntax based on
+// - http://fmwww.bc.edu/RePEc/bocode/s/synlightlist.ado
+// - https://github.com/isagalaev/highlight.js/blob/master/src/languages/stata.js
+// - https://github.com/jpitblado/vim-stata/blob/master/syntax/stata.vim
+type Stata struct{}
+
+// Lexer returns the lexer.
+func (l Stata) Lexer() chroma.Lexer {
+ return chroma.MustNewLexer(
+ &chroma.Config{
+ Name: l.Name(),
+ Aliases: []string{"stata", "do"},
+ Filenames: []string{"*.do", "*.ado"},
+ MimeTypes: []string{"text/x-stata", "text/stata", "application/x-stata"},
+ },
+ func() chroma.Rules {
+ return chroma.Rules{
+ "root": {},
+ }
+ },
+ )
+}
+
+// Name returns the name of the lexer.
+func (Stata) Name() string {
+ return heartbeat.LanguageStata.StringChroma()
+}
diff --git a/pkg/lexer/sublimetextconfig.go b/pkg/lexer/sublimetextconfig.go
new file mode 100644
index 00000000..db7f5d67
--- /dev/null
+++ b/pkg/lexer/sublimetextconfig.go
@@ -0,0 +1,31 @@
+package lexer
+
+import (
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+)
+
+// SublimeTextConfig lexer.
+type SublimeTextConfig struct{}
+
+// Lexer returns the lexer.
+func (l SublimeTextConfig) Lexer() chroma.Lexer {
+ return chroma.MustNewLexer(
+ &chroma.Config{
+ Name: l.Name(),
+ Aliases: []string{"sublime"},
+ Filenames: []string{"*.sublime-settings"},
+ },
+ func() chroma.Rules {
+ return chroma.Rules{
+ "root": {},
+ }
+ },
+ )
+}
+
+// Name returns the name of the lexer.
+func (SublimeTextConfig) Name() string {
+ return heartbeat.LanguageSublimeTextConfig.StringChroma()
+}
diff --git a/pkg/lexer/supercollider.go b/pkg/lexer/supercollider.go
new file mode 100644
index 00000000..6982f077
--- /dev/null
+++ b/pkg/lexer/supercollider.go
@@ -0,0 +1,45 @@
+package lexer
+
+import (
+ "strings"
+
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+)
+
+// SuperCollider lexer.
+type SuperCollider struct{}
+
+// Lexer returns the lexer.
+func (l SuperCollider) Lexer() chroma.Lexer {
+ lexer := chroma.MustNewLexer(
+ &chroma.Config{
+ Name: l.Name(),
+ Aliases: []string{"sc", "supercollider"},
+ Filenames: []string{"*.sc", "*.scd"},
+ MimeTypes: []string{"application/supercollider", "text/supercollider"},
+ },
+ func() chroma.Rules {
+ return chroma.Rules{
+ "root": {},
+ }
+ },
+ )
+
+ lexer.SetAnalyser(func(text string) float32 {
+ // We're searching for a common function and a unique keyword here.
+ if strings.Contains(text, "SinOsc") || strings.Contains(text, "thisFunctionDef") {
+ return 0.1
+ }
+
+ return 0
+ })
+
+ return lexer
+}
+
+// Name returns the name of the lexer.
+func (SuperCollider) Name() string {
+ return heartbeat.LanguageSuperCollider.StringChroma()
+}
diff --git a/pkg/lexer/supercollider_test.go b/pkg/lexer/supercollider_test.go
new file mode 100644
index 00000000..6be2dc7f
--- /dev/null
+++ b/pkg/lexer/supercollider_test.go
@@ -0,0 +1,37 @@
+package lexer_test
+
+import (
+ "os"
+ "testing"
+
+ "github.com/wakatime/wakatime-cli/pkg/lexer"
+
+ "github.com/stretchr/testify/assert"
+)
+
+func TestSuperCollider_AnalyseText(t *testing.T) {
+ tests := map[string]struct {
+ Filepath string
+ Expected float32
+ }{
+ "sinosc": {
+ Filepath: "testdata/supercollider_sinosc.sc",
+ Expected: 0.1,
+ },
+ "thisFunctionDef": {
+ Filepath: "testdata/supercollider_thisfunctiondef.sc",
+ Expected: 0.1,
+ },
+ }
+
+ for name, test := range tests {
+ t.Run(name, func(t *testing.T) {
+ data, err := os.ReadFile(test.Filepath)
+ assert.NoError(t, err)
+
+ l := lexer.SuperCollider{}.Lexer()
+
+ assert.Equal(t, test.Expected, l.AnalyseText(string(data)))
+ })
+ }
+}
diff --git a/pkg/lexer/swig.go b/pkg/lexer/swig.go
new file mode 100644
index 00000000..b031238c
--- /dev/null
+++ b/pkg/lexer/swig.go
@@ -0,0 +1,150 @@
+package lexer
+
+import (
+ "regexp"
+
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+)
+
+var (
+ swigAnalyserDirectivesRe = regexp.MustCompile(`(?m)^\s*(%[a-z_][a-z0-9_]*)`)
+ // nolint:gochecknoglobals
+ swigAnalyserDirectives = map[string]struct{}{
+ // Most common directives
+ `%apply`: {},
+ `%define`: {},
+ `%director`: {},
+ `%enddef`: {},
+ `%exception`: {},
+ `%extend`: {},
+ `%feature`: {},
+ `%fragment`: {},
+ `%ignore`: {},
+ `%immutable`: {},
+ `%import`: {},
+ `%include`: {},
+ `%inline`: {},
+ `%insert`: {},
+ `%module`: {},
+ `%newobject`: {},
+ `%nspace`: {},
+ `%pragma`: {},
+ `%rename`: {},
+ `%shared_ptr`: {},
+ `%template`: {},
+ `%typecheck`: {},
+ `%typemap`: {},
+ // Less common directives
+ `%arg`: {},
+ `%attribute`: {},
+ `%bang`: {},
+ `%begin`: {},
+ `%callback`: {},
+ `%catches`: {},
+ `%clear`: {},
+ `%constant`: {},
+ `%copyctor`: {},
+ `%csconst`: {},
+ `%csconstvalue`: {},
+ `%csenum`: {},
+ `%csmethodmodifiers`: {},
+ `%csnothrowexception`: {},
+ `%default`: {},
+ `%defaultctor`: {},
+ `%defaultdtor`: {},
+ `%defined`: {},
+ `%delete`: {},
+ `%delobject`: {},
+ `%descriptor`: {},
+ `%exceptionclass`: {},
+ `%exceptionvar`: {},
+ `%extend_smart_pointer`: {},
+ `%fragments`: {},
+ `%header`: {},
+ `%ifcplusplus`: {},
+ `%ignorewarn`: {},
+ `%implicit`: {},
+ `%implicitconv`: {},
+ `%init`: {},
+ `%javaconst`: {},
+ `%javaconstvalue`: {},
+ `%javaenum`: {},
+ `%javaexception`: {},
+ `%javamethodmodifiers`: {},
+ `%kwargs`: {},
+ `%luacode`: {},
+ `%mutable`: {},
+ `%naturalvar`: {},
+ `%nestedworkaround`: {},
+ `%perlcode`: {},
+ `%pythonabc`: {},
+ `%pythonappend`: {},
+ `%pythoncallback`: {},
+ `%pythoncode`: {},
+ `%pythondynamic`: {},
+ `%pythonmaybecall`: {},
+ `%pythonnondynamic`: {},
+ `%pythonprepend`: {},
+ `%refobject`: {},
+ `%shadow`: {},
+ `%sizeof`: {},
+ `%trackobjects`: {},
+ `%types`: {},
+ `%unrefobject`: {},
+ `%varargs`: {},
+ `%warn`: {},
+ `%warnfilter`: {},
+ }
+)
+
+// SWIG lexer.
+type SWIG struct{}
+
+// Lexer returns the lexer.
+func (l SWIG) Lexer() chroma.Lexer {
+ lexer := chroma.MustNewLexer(
+ &chroma.Config{
+ Name: l.Name(),
+ Aliases: []string{"swig"},
+ Filenames: []string{"*.swg", "*.i"},
+ MimeTypes: []string{"text/swig"},
+ // Lower than C/C++ and Objective C/C++
+ Priority: 0.04,
+ },
+ func() chroma.Rules {
+ return chroma.Rules{
+ "root": {},
+ }
+ },
+ )
+
+ lexer.SetAnalyser(func(text string) float32 {
+ var result float32
+
+ // Search for SWIG directives, which are conventionally at the beginning of
+ // a line. The probability of them being within a line is low, so let another
+ // lexer win in this case.
+ matches := swigAnalyserDirectivesRe.FindAllString(text, -1)
+
+ for _, m := range matches {
+ if _, ok := swigAnalyserDirectives[m]; ok {
+ result = 0.98
+ break
+ }
+
+ // Fraction higher than MatlabLexer
+ result = 0.91
+ }
+
+ return result
+ })
+
+ return lexer
+}
+
+// Name returns the name of the lexer.
+func (SWIG) Name() string {
+ return heartbeat.LanguageSWIG.StringChroma()
+}
diff --git a/pkg/lexer/swig_test.go b/pkg/lexer/swig_test.go
new file mode 100644
index 00000000..59684362
--- /dev/null
+++ b/pkg/lexer/swig_test.go
@@ -0,0 +1,37 @@
+package lexer_test
+
+import (
+ "os"
+ "testing"
+
+ "github.com/wakatime/wakatime-cli/pkg/lexer"
+
+ "github.com/stretchr/testify/assert"
+)
+
+func TestSWIG_AnalyseText(t *testing.T) {
+ tests := map[string]struct {
+ Filepath string
+ Expected float32
+ }{
+ "default": {
+ Filepath: "testdata/swig.i",
+ Expected: 0.98,
+ },
+ "unknown directive": {
+ Filepath: "testdata/swig_unknown_directive.i",
+ Expected: 0.91,
+ },
+ }
+
+ for name, test := range tests {
+ t.Run(name, func(t *testing.T) {
+ data, err := os.ReadFile(test.Filepath)
+ assert.NoError(t, err)
+
+ l := lexer.SWIG{}.Lexer()
+
+ assert.Equal(t, test.Expected, l.AnalyseText(string(data)))
+ })
+ }
+}
diff --git a/pkg/lexer/tads3.go b/pkg/lexer/tads3.go
new file mode 100644
index 00000000..a8ae8f12
--- /dev/null
+++ b/pkg/lexer/tads3.go
@@ -0,0 +1,53 @@
+package lexer
+
+import (
+ "strings"
+
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+)
+
+// TADS3 lexer.
+type TADS3 struct{}
+
+// Lexer returns the lexer.
+func (l TADS3) Lexer() chroma.Lexer {
+ lexer := chroma.MustNewLexer(
+ &chroma.Config{
+ Name: l.Name(),
+ Aliases: []string{"tads3"},
+ Filenames: []string{"*.t"},
+ },
+ func() chroma.Rules {
+ return chroma.Rules{
+ "root": {},
+ }
+ },
+ )
+
+ lexer.SetAnalyser(func(text string) float32 {
+ // This is a rather generic descriptive language without strong
+ // identifiers. It looks like a 'GameMainDef' has to be present,
+ // and/or a 'versionInfo' with an 'IFID' field.
+ var result float32
+
+ if strings.Contains(text, "__TADS") || strings.Contains(text, "GameMainDef") {
+ result += 0.2
+ }
+
+ // This is a fairly unique keyword which is likely used in source as well.
+ if strings.Contains(text, "versionInfo") && strings.Contains(text, "IFID") {
+ result += 0.1
+ }
+
+ return result
+ })
+
+ return lexer
+}
+
+// Name returns the name of the lexer.
+func (TADS3) Name() string {
+ return heartbeat.LanguageTADS3.StringChroma()
+}
diff --git a/pkg/lexer/tads3_test.go b/pkg/lexer/tads3_test.go
new file mode 100644
index 00000000..fdc4ba58
--- /dev/null
+++ b/pkg/lexer/tads3_test.go
@@ -0,0 +1,41 @@
+package lexer_test
+
+import (
+ "os"
+ "testing"
+
+ "github.com/wakatime/wakatime-cli/pkg/lexer"
+
+ "github.com/stretchr/testify/assert"
+)
+
+func TestTADS3_AnalyseText(t *testing.T) {
+ tests := map[string]struct {
+ Filepath string
+ Expected float32
+ }{
+ "GameMainDef": {
+ Filepath: "testdata/tads3_game_main_def.t",
+ Expected: 0.2,
+ },
+ "__TADS keyword": {
+ Filepath: "testdata/tads3_tads_keyword.t",
+ Expected: 0.2,
+ },
+ "version info": {
+ Filepath: "testdata/tads3_version_info.t",
+ Expected: 0.1,
+ },
+ }
+
+ for name, test := range tests {
+ t.Run(name, func(t *testing.T) {
+ data, err := os.ReadFile(test.Filepath)
+ assert.NoError(t, err)
+
+ l := lexer.TADS3{}.Lexer()
+
+ assert.Equal(t, test.Expected, l.AnalyseText(string(data)))
+ })
+ }
+}
diff --git a/pkg/lexer/tap.go b/pkg/lexer/tap.go
new file mode 100644
index 00000000..8721a89f
--- /dev/null
+++ b/pkg/lexer/tap.go
@@ -0,0 +1,31 @@
+package lexer
+
+import (
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+)
+
+// TAP lexer. For Test Anything Protocol (TAP) output.
+type TAP struct{}
+
+// Lexer returns the lexer.
+func (l TAP) Lexer() chroma.Lexer {
+ return chroma.MustNewLexer(
+ &chroma.Config{
+ Name: l.Name(),
+ Aliases: []string{"tap"},
+ Filenames: []string{"*.tap"},
+ },
+ func() chroma.Rules {
+ return chroma.Rules{
+ "root": {},
+ }
+ },
+ )
+}
+
+// Name returns the name of the lexer.
+func (TAP) Name() string {
+ return heartbeat.LanguageTAP.StringChroma()
+}
diff --git a/pkg/lexer/tasm.go b/pkg/lexer/tasm.go
new file mode 100644
index 00000000..316b8914
--- /dev/null
+++ b/pkg/lexer/tasm.go
@@ -0,0 +1,42 @@
+package lexer
+
+import (
+ "regexp"
+
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+ "github.com/alecthomas/chroma/v2/lexers"
+)
+
+var tasmAnalyzerRe = regexp.MustCompile(`(?i)PROC`)
+
+// TASM lexer.
+type TASM struct{}
+
+// Lexer returns the lexer.
+func (l TASM) Lexer() chroma.Lexer {
+ lexer := lexers.Get(l.Name())
+ if lexer == nil {
+ return nil
+ }
+
+ if lexer, ok := lexer.(*chroma.RegexLexer); ok {
+ lexer.SetAnalyser(func(text string) float32 {
+ if tasmAnalyzerRe.MatchString(text) {
+ return 1.0
+ }
+
+ return 0
+ })
+
+ return lexer
+ }
+
+ return nil
+}
+
+// Name returns the name of the lexer.
+func (TASM) Name() string {
+ return heartbeat.LanguageTASM.StringChroma()
+}
diff --git a/pkg/lexer/tasm_test.go b/pkg/lexer/tasm_test.go
new file mode 100644
index 00000000..417bf2e3
--- /dev/null
+++ b/pkg/lexer/tasm_test.go
@@ -0,0 +1,19 @@
+package lexer_test
+
+import (
+ "os"
+ "testing"
+
+ "github.com/wakatime/wakatime-cli/pkg/lexer"
+
+ "github.com/stretchr/testify/assert"
+)
+
+func TestTASM_AnalyseText(t *testing.T) {
+ data, err := os.ReadFile("testdata/tasm.asm")
+ assert.NoError(t, err)
+
+ l := lexer.TASM{}.Lexer()
+
+ assert.Equal(t, float32(1.0), l.AnalyseText(string(data)))
+}
diff --git a/pkg/lexer/tcshsession.go b/pkg/lexer/tcshsession.go
new file mode 100644
index 00000000..f043bed1
--- /dev/null
+++ b/pkg/lexer/tcshsession.go
@@ -0,0 +1,31 @@
+package lexer
+
+import (
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+)
+
+// TcshSession lexer. Lexer for Tcsh sessions, i.e. command lines, including a
+// prompt, interspersed with output.
+type TcshSession struct{}
+
+// Lexer returns the lexer.
+func (l TcshSession) Lexer() chroma.Lexer {
+ return chroma.MustNewLexer(
+ &chroma.Config{
+ Name: l.Name(),
+ Aliases: []string{"tcshcon"},
+ },
+ func() chroma.Rules {
+ return chroma.Rules{
+ "root": {},
+ }
+ },
+ )
+}
+
+// Name returns the name of the lexer.
+func (TcshSession) Name() string {
+ return heartbeat.LanguageTcshSession.StringChroma()
+}
diff --git a/pkg/lexer/tea.go b/pkg/lexer/tea.go
new file mode 100644
index 00000000..5826fd97
--- /dev/null
+++ b/pkg/lexer/tea.go
@@ -0,0 +1,32 @@
+package lexer
+
+import (
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+)
+
+// Tea lexer. Lexer for Tea Templates .
+type Tea struct{}
+
+// Lexer returns the lexer.
+func (l Tea) Lexer() chroma.Lexer {
+ return chroma.MustNewLexer(
+ &chroma.Config{
+ Name: l.Name(),
+ Aliases: []string{"tea"},
+ Filenames: []string{"*.tea"},
+ MimeTypes: []string{"text/x-tea"},
+ },
+ func() chroma.Rules {
+ return chroma.Rules{
+ "root": {},
+ }
+ },
+ )
+}
+
+// Name returns the name of the lexer.
+func (Tea) Name() string {
+ return heartbeat.LanguageTea.StringChroma()
+}
diff --git a/pkg/lexer/teraterm.go b/pkg/lexer/teraterm.go
new file mode 100644
index 00000000..c6e70144
--- /dev/null
+++ b/pkg/lexer/teraterm.go
@@ -0,0 +1,249 @@
+package lexer
+
+import (
+ "regexp"
+ "strings"
+
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+)
+
+var teraTermAnalyserCommandRe = regexp.MustCompile(`(?i)\b(` + strings.Join([]string{
+ "basename",
+ "beep",
+ "bplusrecv",
+ "bplussend",
+ "break",
+ "bringupbox",
+ // 'call' is handled separately.
+ "callmenu",
+ "changedir",
+ "checksum16",
+ "checksum16file",
+ "checksum32",
+ "checksum32file",
+ "checksum8",
+ "checksum8file",
+ "clearscreen",
+ "clipb2var",
+ "closesbox",
+ "closett",
+ "code2str",
+ "connect",
+ "continue",
+ "crc16",
+ "crc16file",
+ "crc32",
+ "crc32file",
+ "cygconnect",
+ "delpassword",
+ "dirname",
+ "dirnamebox",
+ "disconnect",
+ "dispstr",
+ "do",
+ "else",
+ "elseif",
+ "enablekeyb",
+ "end",
+ "endif",
+ "enduntil",
+ "endwhile",
+ "exec",
+ "execcmnd",
+ "exit",
+ "expandenv",
+ "fileclose",
+ "fileconcat",
+ "filecopy",
+ "filecreate",
+ "filedelete",
+ "filelock",
+ "filemarkptr",
+ "filenamebox",
+ "fileopen",
+ "fileread",
+ "filereadln",
+ "filerename",
+ "filesearch",
+ "fileseek",
+ "fileseekback",
+ "filestat",
+ "filestrseek",
+ "filestrseek2",
+ "filetruncate",
+ "fileunlock",
+ "filewrite",
+ "filewriteln",
+ "findclose",
+ "findfirst",
+ "findnext",
+ "flushrecv",
+ "foldercreate",
+ "folderdelete",
+ "foldersearch",
+ "for",
+ "getdate",
+ "getdir",
+ "getenv",
+ "getfileattr",
+ "gethostname",
+ "getipv4addr",
+ "getipv6addr",
+ "getmodemstatus",
+ "getpassword",
+ "getspecialfolder",
+ "gettime",
+ "gettitle",
+ "getttdir",
+ "getver",
+ // 'goto' is handled separately.
+ "if",
+ "ifdefined",
+ "include",
+ "inputbox",
+ "int2str",
+ "intdim",
+ "ispassword",
+ "kmtfinish",
+ "kmtget",
+ "kmtrecv",
+ "kmtsend",
+ "listbox",
+ "loadkeymap",
+ "logautoclosemode",
+ "logclose",
+ "loginfo",
+ "logopen",
+ "logpause",
+ "logrotate",
+ "logstart",
+ "logwrite",
+ "loop",
+ "makepath",
+ "messagebox",
+ "mpause",
+ "next",
+ "passwordbox",
+ "pause",
+ "quickvanrecv",
+ "quickvansend",
+ "random",
+ "recvln",
+ "regexoption",
+ "restoresetup",
+ "return",
+ "rotateleft",
+ "rotateright",
+ "scprecv",
+ "scpsend",
+ "send",
+ "sendbreak",
+ "sendbroadcast",
+ "sendfile",
+ "sendkcode",
+ "sendln",
+ "sendlnbroadcast",
+ "sendlnmulticast",
+ "sendmulticast",
+ "setbaud",
+ "setdate",
+ "setdebug",
+ "setdir",
+ "setdlgpos",
+ "setdtr",
+ "setecho",
+ "setenv",
+ "setexitcode",
+ "setfileattr",
+ "setflowctrl",
+ "setmulticastname",
+ "setpassword",
+ "setrts",
+ "setspeed",
+ "setsync",
+ "settime",
+ "settitle",
+ "show",
+ "showtt",
+ "sprintf",
+ "sprintf2",
+ "statusbox",
+ "str2code",
+ "str2int",
+ "strcompare",
+ "strconcat",
+ "strcopy",
+ "strdim",
+ "strinsert",
+ "strjoin",
+ "strlen",
+ "strmatch",
+ "strremove",
+ "strreplace",
+ "strscan",
+ "strspecial",
+ "strsplit",
+ "strtrim",
+ "testlink",
+ "then",
+ "tolower",
+ "toupper",
+ "unlink",
+ "until",
+ "uptime",
+ "var2clipb",
+ "wait",
+ "wait4all",
+ "waitevent",
+ "waitln",
+ "waitn",
+ "waitrecv",
+ "waitregex",
+ "while",
+ "xmodemrecv",
+ "xmodemsend",
+ "yesnobox",
+ "ymodemrecv",
+ "ymodemsend",
+ "zmodemrecv",
+ "zmodemsend",
+}, "|") + `)\b`)
+
+// TeraTerm macro lexer.
+type TeraTerm struct{}
+
+// Lexer returns the lexer.
+func (l TeraTerm) Lexer() chroma.Lexer {
+ lexer := chroma.MustNewLexer(
+ &chroma.Config{
+ Name: l.Name(),
+ Aliases: []string{"ttl", "teraterm", "teratermmacro"},
+ Filenames: []string{"*.ttl"},
+ MimeTypes: []string{"text/x-teratermmacro"},
+ },
+ func() chroma.Rules {
+ return chroma.Rules{
+ "root": {},
+ }
+ },
+ )
+
+ lexer.SetAnalyser(func(text string) float32 {
+ // Turtle and Tera Term macro files share the same file extension
+ // but each has a recognizable and distinct syntax.
+ if teraTermAnalyserCommandRe.MatchString(text) {
+ return 0.01
+ }
+
+ return 0
+ })
+
+ return lexer
+}
+
+// Name returns the name of the lexer.
+func (TeraTerm) Name() string {
+ return heartbeat.LanguageTeraTerm.StringChroma()
+}
diff --git a/pkg/lexer/teraterm_test.go b/pkg/lexer/teraterm_test.go
new file mode 100644
index 00000000..f3c30141
--- /dev/null
+++ b/pkg/lexer/teraterm_test.go
@@ -0,0 +1,19 @@
+package lexer_test
+
+import (
+ "os"
+ "testing"
+
+ "github.com/wakatime/wakatime-cli/pkg/lexer"
+
+ "github.com/stretchr/testify/assert"
+)
+
+func TestTeraTerm_AnalyseText(t *testing.T) {
+ data, err := os.ReadFile("testdata/teraterm_commands.ttl")
+ assert.NoError(t, err)
+
+ l := lexer.TeraTerm{}.Lexer()
+
+ assert.Equal(t, float32(0.01), l.AnalyseText(string(data)))
+}
diff --git a/pkg/lexer/testdata/actionscript3.as b/pkg/lexer/testdata/actionscript3.as
new file mode 100644
index 00000000..a255078d
--- /dev/null
+++ b/pkg/lexer/testdata/actionscript3.as
@@ -0,0 +1 @@
+var circle:Sprite = new Sprite();
diff --git a/pkg/lexer/testdata/actionscript3_capital_letter.as b/pkg/lexer/testdata/actionscript3_capital_letter.as
new file mode 100644
index 00000000..69b82453
--- /dev/null
+++ b/pkg/lexer/testdata/actionscript3_capital_letter.as
@@ -0,0 +1 @@
+var CIRCLE:Sprite = new Sprite();
diff --git a/pkg/lexer/testdata/actionscript3_spaces.as b/pkg/lexer/testdata/actionscript3_spaces.as
new file mode 100644
index 00000000..a9d88726
--- /dev/null
+++ b/pkg/lexer/testdata/actionscript3_spaces.as
@@ -0,0 +1 @@
+var circle: Sprite = new Sprite();
diff --git a/pkg/lexer/testdata/aspxcsharp_page_language.aspx b/pkg/lexer/testdata/aspxcsharp_page_language.aspx
new file mode 100644
index 00000000..24ed2cbb
--- /dev/null
+++ b/pkg/lexer/testdata/aspxcsharp_page_language.aspx
@@ -0,0 +1,8 @@
+<%@ Page Language="C#" AutoEventWireup="true" CodeFile = "WebForm2.aspx.cs" Inherits="NetAdmin.WebForm2"%>
+
+
+ wakatime-cli
+
+
+
+
diff --git a/pkg/lexer/testdata/aspxcsharp_script_language.aspx b/pkg/lexer/testdata/aspxcsharp_script_language.aspx
new file mode 100644
index 00000000..e812a9f3
--- /dev/null
+++ b/pkg/lexer/testdata/aspxcsharp_script_language.aspx
@@ -0,0 +1,3 @@
+
diff --git a/pkg/lexer/testdata/aspxvbnet_page_language.aspx b/pkg/lexer/testdata/aspxvbnet_page_language.aspx
new file mode 100644
index 00000000..563eaf36
--- /dev/null
+++ b/pkg/lexer/testdata/aspxvbnet_page_language.aspx
@@ -0,0 +1,8 @@
+<%@ Page Language="VB" AutoEventWireup="true" CodeFile="WebForm2.aspx.vb" Inherits="NetAdmin.WebForm2"%>
+
+
+ wakatime-cli
+
+
+
+
diff --git a/pkg/lexer/testdata/aspxvbnet_script_language.aspx b/pkg/lexer/testdata/aspxvbnet_script_language.aspx
new file mode 100644
index 00000000..9dc37d85
--- /dev/null
+++ b/pkg/lexer/testdata/aspxvbnet_script_language.aspx
@@ -0,0 +1,3 @@
+
diff --git a/pkg/lexer/testdata/bbcbasic_10rem.bbc b/pkg/lexer/testdata/bbcbasic_10rem.bbc
new file mode 100644
index 00000000..ba7a5417
--- /dev/null
+++ b/pkg/lexer/testdata/bbcbasic_10rem.bbc
@@ -0,0 +1 @@
+10REM >EIRC
diff --git a/pkg/lexer/testdata/bbcbasic_rem.bbc b/pkg/lexer/testdata/bbcbasic_rem.bbc
new file mode 100644
index 00000000..a74f405d
--- /dev/null
+++ b/pkg/lexer/testdata/bbcbasic_rem.bbc
@@ -0,0 +1 @@
+REM >Remove the line
diff --git a/pkg/lexer/testdata/brainfuck_greater_less.bf b/pkg/lexer/testdata/brainfuck_greater_less.bf
new file mode 100644
index 00000000..60de000b
--- /dev/null
+++ b/pkg/lexer/testdata/brainfuck_greater_less.bf
@@ -0,0 +1,8 @@
+>>>++[
+ <++++++++[
+ <[<++>-]>>[>>]+>>+[
+ -[->>+<<<[<[<<]<+>]>[>[>>]]]
+ <[>>[-]]>[>[-<<]>[<+<]]+<<
+ ]<[>+<-]>>-
+ ]<.[-]>>
+]
diff --git a/pkg/lexer/testdata/brainfuck_minus.bf b/pkg/lexer/testdata/brainfuck_minus.bf
new file mode 100644
index 00000000..4cce7dcd
--- /dev/null
+++ b/pkg/lexer/testdata/brainfuck_minus.bf
@@ -0,0 +1 @@
+[-]
diff --git a/pkg/lexer/testdata/brainfuck_plus_minus.bf b/pkg/lexer/testdata/brainfuck_plus_minus.bf
new file mode 100644
index 00000000..e77e7257
--- /dev/null
+++ b/pkg/lexer/testdata/brainfuck_plus_minus.bf
@@ -0,0 +1 @@
+>+>+>+>+>+>+>+[->[>]+[->[>]+>+>+[<]+<]+<]+++++++[>+++++++++++>+<<-]>+.----.>++.
diff --git a/pkg/lexer/testdata/bugs_basic.bug b/pkg/lexer/testdata/bugs_basic.bug
new file mode 100644
index 00000000..9106c868
--- /dev/null
+++ b/pkg/lexer/testdata/bugs_basic.bug
@@ -0,0 +1,25 @@
+model
+ {
+ for( i in 1 : N ) {
+ for( j in 1 : T ) {
+ Y[i , j] ~ dnorm(mu[i , j],tau.c)
+ mu[i , j] <- alpha[i] + beta[i] * (x[j] - xbar)
+ culmative.Y[i , j] <- culmative(Y[i , j], Y[i , j])
+ post.pv.Y[i , j] <- post.p.value(Y[i , j])
+ prior.pv.Y[i , j] <- prior.p.value(Y[i , j])
+ replicate.post.Y[i , j] <- replicate.post(Y[i , j])
+ pv.post.Y[i , j] <- step(Y[i , j] - replicate.post.Y[i , j])
+ replicate.prior.Y[i , j] <- replicate.prior(Y[i , j])
+ pv.prior.Y[i , j] <- step(Y[i , j] - replicate.prior.Y[i , j])
+ }
+ alpha[i] ~ dnorm(alpha.c,alpha.tau)
+ beta[i] ~ dnorm(beta.c,beta.tau)
+ }
+ tau.c ~ dgamma(0.001,0.001)
+ sigma <- 1 / sqrt(tau.c)
+ alpha.c ~ dnorm(0.0,1.0E-6)
+ alpha.tau ~ dgamma(0.001,0.001)
+ beta.c ~ dnorm(0.0,1.0E-6)
+ beta.tau ~ dgamma(0.001,0.001)
+ alpha0 <- alpha.c - xbar * beta.c
+ }
diff --git a/pkg/lexer/testdata/c_ifdef.c b/pkg/lexer/testdata/c_ifdef.c
new file mode 100644
index 00000000..9f278153
--- /dev/null
+++ b/pkg/lexer/testdata/c_ifdef.c
@@ -0,0 +1,2 @@
+
+#ifdef DEBUG
diff --git a/pkg/lexer/testdata/c_ifndef.c b/pkg/lexer/testdata/c_ifndef.c
new file mode 100644
index 00000000..9affdff2
--- /dev/null
+++ b/pkg/lexer/testdata/c_ifndef.c
@@ -0,0 +1,2 @@
+
+#ifndef DEBUG
diff --git a/pkg/lexer/testdata/c_include.c b/pkg/lexer/testdata/c_include.c
new file mode 100644
index 00000000..3b63d088
--- /dev/null
+++ b/pkg/lexer/testdata/c_include.c
@@ -0,0 +1,2 @@
+
+#include
diff --git a/pkg/lexer/testdata/ca65assembler_comment.s b/pkg/lexer/testdata/ca65assembler_comment.s
new file mode 100644
index 00000000..9f613d7a
--- /dev/null
+++ b/pkg/lexer/testdata/ca65assembler_comment.s
@@ -0,0 +1,2 @@
+
+; this is a comment for ca65 assembler
diff --git a/pkg/lexer/testdata/cbmbasicv2_basic.bas b/pkg/lexer/testdata/cbmbasicv2_basic.bas
new file mode 100644
index 00000000..dbf382c7
--- /dev/null
+++ b/pkg/lexer/testdata/cbmbasicv2_basic.bas
@@ -0,0 +1 @@
+10 PRINT "PART 1"
diff --git a/pkg/lexer/testdata/coq_reserved_keyword.v b/pkg/lexer/testdata/coq_reserved_keyword.v
new file mode 100644
index 00000000..aa38d3b0
--- /dev/null
+++ b/pkg/lexer/testdata/coq_reserved_keyword.v
@@ -0,0 +1,5 @@
+Theorem demorgan : forall (P Q : Prop),
+ ~(P \/ Q) -> ~P /\ ~Q.
+Proof.
+ tauto.
+Qed.
diff --git a/pkg/lexer/testdata/cpp_include.cpp b/pkg/lexer/testdata/cpp_include.cpp
new file mode 100644
index 00000000..604782e4
--- /dev/null
+++ b/pkg/lexer/testdata/cpp_include.cpp
@@ -0,0 +1 @@
+#include
diff --git a/pkg/lexer/testdata/cpp_namespace.cpp b/pkg/lexer/testdata/cpp_namespace.cpp
new file mode 100644
index 00000000..253f3bf8
--- /dev/null
+++ b/pkg/lexer/testdata/cpp_namespace.cpp
@@ -0,0 +1 @@
+using namespace std;
diff --git a/pkg/lexer/testdata/cuda_ifdef.cu b/pkg/lexer/testdata/cuda_ifdef.cu
new file mode 100644
index 00000000..9f278153
--- /dev/null
+++ b/pkg/lexer/testdata/cuda_ifdef.cu
@@ -0,0 +1,2 @@
+
+#ifdef DEBUG
diff --git a/pkg/lexer/testdata/cuda_ifndef.cu b/pkg/lexer/testdata/cuda_ifndef.cu
new file mode 100644
index 00000000..9affdff2
--- /dev/null
+++ b/pkg/lexer/testdata/cuda_ifndef.cu
@@ -0,0 +1,2 @@
+
+#ifndef DEBUG
diff --git a/pkg/lexer/testdata/cuda_include.cu b/pkg/lexer/testdata/cuda_include.cu
new file mode 100644
index 00000000..789b69ce
--- /dev/null
+++ b/pkg/lexer/testdata/cuda_include.cu
@@ -0,0 +1,2 @@
+
+#include
\ No newline at end of file
diff --git a/pkg/lexer/testdata/easytrieve_basic.ezt b/pkg/lexer/testdata/easytrieve_basic.ezt
new file mode 100644
index 00000000..fec2aa4c
--- /dev/null
+++ b/pkg/lexer/testdata/easytrieve_basic.ezt
@@ -0,0 +1,32 @@
+* Easytrieve Plus example programm.
+
+* Environtment section.
+PARM DEBUG(FLOW FLDCHK)
+
+* Library Section.
+FILE PERSNL FB(150 1800)
+ NAME 17 8 A
+ EMP# 9 5 N * Note: '#' is a valid character for names.
+ DEPT 98 3 N. GROSS 94 4 P 2
+ * ^ 2 field definitions in 1 line.
+
+* Call macro in example.mac.
+FILE EXAMPLE FB(80 200)
+%EXAMPLE SOMEFILE SOME
+
+* Activity Section.
+JOB INPUT PERSNL NAME FIRST-PROGRAM START AT-START FINISH AT_FINISH
+ PRINT PAY-RPT
+REPORT PAY-RPT LINESIZE 80
+ TITLE 01 'PERSONNEL REPORT EXAMPLE-1'
+ LINE 01 DEPT NAME EMP# GROSS
+
+* Procedure declarations.
+AT-START. PROC
+ DISPLAY 'PROCESSING...'
+END-PROC
+
+AT-FINISH
+PROC
+ DISPLAY 'DONE.'
+END-PROC
diff --git a/pkg/lexer/testdata/easytrieve_macro.mac b/pkg/lexer/testdata/easytrieve_macro.mac
new file mode 100644
index 00000000..1c3831d1
--- /dev/null
+++ b/pkg/lexer/testdata/easytrieve_macro.mac
@@ -0,0 +1,6 @@
+* Example Easytrieve macro declaration. For an example on calling this
+* macro, see example.ezt.
+MACRO FILENAME PREFIX
+&FILENAME.
+&PREFIX.-LINE 1 80 A
+&PREFIX.-KEY 1 8 A
diff --git a/pkg/lexer/testdata/ec_ifdef.ec b/pkg/lexer/testdata/ec_ifdef.ec
new file mode 100644
index 00000000..9f278153
--- /dev/null
+++ b/pkg/lexer/testdata/ec_ifdef.ec
@@ -0,0 +1,2 @@
+
+#ifdef DEBUG
diff --git a/pkg/lexer/testdata/ec_ifndef.ec b/pkg/lexer/testdata/ec_ifndef.ec
new file mode 100644
index 00000000..9affdff2
--- /dev/null
+++ b/pkg/lexer/testdata/ec_ifndef.ec
@@ -0,0 +1,2 @@
+
+#ifndef DEBUG
diff --git a/pkg/lexer/testdata/ec_include.ec b/pkg/lexer/testdata/ec_include.ec
new file mode 100644
index 00000000..b89ce07a
--- /dev/null
+++ b/pkg/lexer/testdata/ec_include.ec
@@ -0,0 +1,2 @@
+
+#include
\ No newline at end of file
diff --git a/pkg/lexer/testdata/ecl_basic.ecl b/pkg/lexer/testdata/ecl_basic.ecl
new file mode 100644
index 00000000..f3bc2744
--- /dev/null
+++ b/pkg/lexer/testdata/ecl_basic.ecl
@@ -0,0 +1,9 @@
+DATA132 ColumnMap(UNICODE str) := BEGINC++
+ size_t pos;
+ unsigned char col = 0;
+ memset(__result, '\0', 132); // init to no column
+ for(pos=0; pos<132; pos++) {
+ if (pos size32_t lenAbc, const char * abc;
diff --git a/pkg/lexer/testdata/erb_basic.erb b/pkg/lexer/testdata/erb_basic.erb
new file mode 100644
index 00000000..f3b98568
--- /dev/null
+++ b/pkg/lexer/testdata/erb_basic.erb
@@ -0,0 +1 @@
+<%# Non-printing tag ↓ -%>
diff --git a/pkg/lexer/testdata/evoque_basic.evoque b/pkg/lexer/testdata/evoque_basic.evoque
new file mode 100644
index 00000000..93732929
--- /dev/null
+++ b/pkg/lexer/testdata/evoque_basic.evoque
@@ -0,0 +1 @@
+$evoque{disclaimer, collection="legals"}
diff --git a/pkg/lexer/testdata/execline_shebang.exec b/pkg/lexer/testdata/execline_shebang.exec
new file mode 100644
index 00000000..dbd83b13
--- /dev/null
+++ b/pkg/lexer/testdata/execline_shebang.exec
@@ -0,0 +1 @@
+#!/usr/bin/execlineb
diff --git a/pkg/lexer/testdata/ezhil_basic.n b/pkg/lexer/testdata/ezhil_basic.n
new file mode 100644
index 00000000..13390611
--- /dev/null
+++ b/pkg/lexer/testdata/ezhil_basic.n
@@ -0,0 +1,152 @@
+# (C) முத்தையா அண்ணாமலை 2013
+# (A) என். சொக்கன்
+# எழில் தமிழ் நிரலாக்க மொழி உதாரணம்
+# Muthu A granted permission for this to be included under the BSD license
+# https://bitbucket.org/birkenfeld/pygments-main/pull-requests/443/ezhil-language-lexer-for-pygments/diff
+
+## Prime Factors Example
+## பகா எண் கூறுகளைக் கண்டறியும் உதாரணம்
+
+## இது நிரல் தரப்பட்ட எண்ணின் பகாஎண் கூறுகளைக் கண்டறியும்
+
+நிரல்பாகம் பகாஎண்ணா(எண்1)
+
+ ## இது நிரல்பாகம் தரப்பட்ட எண் பகு எண்ணா அல்லது பகா எண்ணா என்று கண்டறிந்து சொல்லும்
+ ## பகுஎண் என்றால் 0 திரும்பத் தரப்படும்
+ ## பகாஎண் என்றால் 1 திரும்பத் தரப்படும்
+
+ @(எண்1 < 0) ஆனால்
+
+ ## எதிர்மறை எண்களை நேராக்குதல்
+
+ எண்1 = எண்1 * (-1)
+
+ முடி
+
+ @(எண்1 < 2) ஆனால்
+
+ ## பூஜ்ஜியம், ஒன்று ஆகியவை பகா எண்கள் அல்ல
+
+ பின்கொடு 0
+
+ முடி
+
+ @(எண்1 == 2) ஆனால்
+
+ ## இரண்டு என்ற எண் ஒரு பகா எண்
+
+ பின்கொடு 1
+
+ முடி
+
+ மீதம் = எண்1%2
+
+ @(மீதம் == 0) ஆனால்
+
+ ## இரட்டைப்படை எண், ஆகவே, இது பகா எண் அல்ல
+
+ பின்கொடு 0
+
+ முடி
+
+ எண்1வர்க்கமூலம் = எண்1^0.5
+
+ @(எண்2 = 3, எண்2 <= எண்1வர்க்கமூலம், எண்2 = எண்2 + 2) ஆக
+
+ மீதம்1 = எண்1%எண்2
+
+ @(மீதம்1 == 0) ஆனால்
+
+ ## ஏதேனும் ஓர் எண்ணால் முழுமையாக வகுபட்டுவிட்டது, ஆகவே அது பகா எண் அல்ல
+
+ பின்கொடு 0
+
+ முடி
+
+ முடி
+
+ பின்கொடு 1
+
+முடி
+
+நிரல்பாகம் பகுத்தெடு(எண்1)
+
+ ## இது எண் தரப்பட்ட எண்ணின் பகா எண் கூறுகளைக் கண்டறிந்து பட்டியல் இடும்
+
+ கூறுகள் = பட்டியல்()
+
+ @(எண்1 < 0) ஆனால்
+
+ ## எதிர்மறை எண்களை நேராக்குதல்
+
+ எண்1 = எண்1 * (-1)
+
+ முடி
+
+ @(எண்1 <= 1) ஆனால்
+
+ ## ஒன்று அல்லது அதற்குக் குறைவான எண்களுக்குப் பகா எண் விகிதம் கண்டறியமுடியாது
+
+ பின்கொடு கூறுகள்
+
+ முடி
+
+ @(பகாஎண்ணா(எண்1) == 1) ஆனால்
+
+ ## தரப்பட்ட எண்ணே பகா எண்ணாக அமைந்துவிட்டால், அதற்கு அதுவே பகாஎண் கூறு ஆகும்
+
+ பின்இணை(கூறுகள், எண்1)
+ பின்கொடு கூறுகள்
+
+ முடி
+
+ தாற்காலிகஎண் = எண்1
+
+ எண்2 = 2
+
+ @(எண்2 <= தாற்காலிகஎண்) வரை
+
+ விடை1 = பகாஎண்ணா(எண்2)
+ மீண்டும்தொடங்கு = 0
+
+ @(விடை1 == 1) ஆனால்
+
+ விடை2 = தாற்காலிகஎண்%எண்2
+
+ @(விடை2 == 0) ஆனால்
+
+ ## பகா எண்ணால் முழுமையாக வகுபட்டுள்ளது, அதனைப் பட்டியலில் இணைக்கிறோம்
+
+ பின்இணை(கூறுகள், எண்2)
+ தாற்காலிகஎண் = தாற்காலிகஎண்/எண்2
+
+ ## மீண்டும் இரண்டில் தொடங்கி இதே கணக்கிடுதலைத் தொடரவேண்டும்
+
+ எண்2 = 2
+ மீண்டும்தொடங்கு = 1
+
+ முடி
+
+ முடி
+
+ @(மீண்டும்தொடங்கு == 0) ஆனால்
+
+ ## அடுத்த எண்ணைத் தேர்ந்தெடுத்துக் கணக்கிடுதலைத் தொடரவேண்டும்
+
+ எண்2 = எண்2 + 1
+
+ முடி
+
+ முடி
+
+ பின்கொடு கூறுகள்
+
+முடி
+
+அ = int(உள்ளீடு("உங்களுக்குப் பிடித்த ஓர் எண்ணைத் தாருங்கள்: "))
+
+பகாஎண்கூறுகள் = பட்டியல்()
+
+பகாஎண்கூறுகள் = பகுத்தெடு(அ)
+
+பதிப்பி "நீங்கள் தந்த எண்ணின் பகா எண் கூறுகள் இவை: ", பகாஎண்கூறுகள்
diff --git a/pkg/lexer/testdata/forth_command.frt b/pkg/lexer/testdata/forth_command.frt
new file mode 100644
index 00000000..15d97af9
--- /dev/null
+++ b/pkg/lexer/testdata/forth_command.frt
@@ -0,0 +1,2 @@
+
+: FLOOR5;
diff --git a/pkg/lexer/testdata/fsharp_backward_pipeline_operator.fs b/pkg/lexer/testdata/fsharp_backward_pipeline_operator.fs
new file mode 100644
index 00000000..5bb79d80
--- /dev/null
+++ b/pkg/lexer/testdata/fsharp_backward_pipeline_operator.fs
@@ -0,0 +1 @@
+let Pipeline1 x = addOne <| timesTwo x
diff --git a/pkg/lexer/testdata/fsharp_forward_pipeline_operator.fs b/pkg/lexer/testdata/fsharp_forward_pipeline_operator.fs
new file mode 100644
index 00000000..483616d3
--- /dev/null
+++ b/pkg/lexer/testdata/fsharp_forward_pipeline_operator.fs
@@ -0,0 +1 @@
+let Pipeline2 x = addOne x |> timesTwo
diff --git a/pkg/lexer/testdata/fsharp_pipeline_operator.fs b/pkg/lexer/testdata/fsharp_pipeline_operator.fs
new file mode 100644
index 00000000..50626a2e
--- /dev/null
+++ b/pkg/lexer/testdata/fsharp_pipeline_operator.fs
@@ -0,0 +1 @@
+let Pipeline1 x = addOne <| timesTwo x |> timesThree x
diff --git a/pkg/lexer/testdata/gap_declaration.g b/pkg/lexer/testdata/gap_declaration.g
new file mode 100644
index 00000000..b8af13d0
--- /dev/null
+++ b/pkg/lexer/testdata/gap_declaration.g
@@ -0,0 +1 @@
+InstallTrueMethod( IsCommutative, IsGroup and IsCyclic );
diff --git a/pkg/lexer/testdata/gap_implementation.g b/pkg/lexer/testdata/gap_implementation.g
new file mode 100644
index 00000000..e53f15e8
--- /dev/null
+++ b/pkg/lexer/testdata/gap_implementation.g
@@ -0,0 +1,9 @@
+InstallMethod( Iterator,
+ "method for `Integers'",
+ [ IsIntegers ],
+ function( Integers )
+ return Objectify( NewType( IteratorsFamily,
+ IsIterator
+ and IsIntegersIteratorCompRep ),
+ rec( counter := 0 ) );
+ end );
diff --git a/pkg/lexer/testdata/gas_data_directive.S b/pkg/lexer/testdata/gas_data_directive.S
new file mode 100644
index 00000000..312ed111
--- /dev/null
+++ b/pkg/lexer/testdata/gas_data_directive.S
@@ -0,0 +1,5 @@
+.data
+count: .quad 0
+sum: .quad 0
+format: .asciz "%g\n"
+error: .asciz "There are no command line arguments to average\n"
diff --git a/pkg/lexer/testdata/gas_other_directive.S b/pkg/lexer/testdata/gas_other_directive.S
new file mode 100644
index 00000000..c944dfc9
--- /dev/null
+++ b/pkg/lexer/testdata/gas_other_directive.S
@@ -0,0 +1,2 @@
+message:
+.ascii "Hello, world\n"
diff --git a/pkg/lexer/testdata/gdscript_full.gd b/pkg/lexer/testdata/gdscript_full.gd
new file mode 100644
index 00000000..ce083da3
--- /dev/null
+++ b/pkg/lexer/testdata/gdscript_full.gd
@@ -0,0 +1,10 @@
+class_name StateMachine
+extends Node
+
+signal state_changed(previous, new)
+
+export var initial_state = NodePath()
+
+func _input(event):
+ if event.is_action_pressed("jump"):
+ jump()
diff --git a/pkg/lexer/testdata/gdscript_func.gd b/pkg/lexer/testdata/gdscript_func.gd
new file mode 100644
index 00000000..785e1caa
--- /dev/null
+++ b/pkg/lexer/testdata/gdscript_func.gd
@@ -0,0 +1,3 @@
+func _input(event):
+ if event.is_action_pressed("jump"):
+ jump()
diff --git a/pkg/lexer/testdata/gdscript_keyword.gd b/pkg/lexer/testdata/gdscript_keyword.gd
new file mode 100644
index 00000000..41e3d5ef
--- /dev/null
+++ b/pkg/lexer/testdata/gdscript_keyword.gd
@@ -0,0 +1 @@
+class_name StateMachine
diff --git a/pkg/lexer/testdata/gdscript_keyword2.gd b/pkg/lexer/testdata/gdscript_keyword2.gd
new file mode 100644
index 00000000..ace4a7ba
--- /dev/null
+++ b/pkg/lexer/testdata/gdscript_keyword2.gd
@@ -0,0 +1 @@
+signal state_changed(previous, new)
diff --git a/pkg/lexer/testdata/groff_comment.man b/pkg/lexer/testdata/groff_comment.man
new file mode 100644
index 00000000..f537760a
--- /dev/null
+++ b/pkg/lexer/testdata/groff_comment.man
@@ -0,0 +1 @@
+.\" this a comment
diff --git a/pkg/lexer/testdata/groff_macro.man b/pkg/lexer/testdata/groff_macro.man
new file mode 100644
index 00000000..56cc4693
--- /dev/null
+++ b/pkg/lexer/testdata/groff_macro.man
@@ -0,0 +1 @@
+.SH DESCRIPTION
diff --git a/pkg/lexer/testdata/groff_title_head.man b/pkg/lexer/testdata/groff_title_head.man
new file mode 100644
index 00000000..50b88a79
--- /dev/null
+++ b/pkg/lexer/testdata/groff_title_head.man
@@ -0,0 +1 @@
+.TH COFFEE 1 "23 March 94"
diff --git a/pkg/lexer/testdata/html.rhtml b/pkg/lexer/testdata/html.rhtml
new file mode 100644
index 00000000..c53344b5
--- /dev/null
+++ b/pkg/lexer/testdata/html.rhtml
@@ -0,0 +1,8 @@
+
+
+
+ A minimal rhtml example
+
+
+
+
diff --git a/pkg/lexer/testdata/html_doctype.html b/pkg/lexer/testdata/html_doctype.html
new file mode 100644
index 00000000..1b205ba1
--- /dev/null
+++ b/pkg/lexer/testdata/html_doctype.html
@@ -0,0 +1,2 @@
+
diff --git a/pkg/lexer/testdata/http_request.http b/pkg/lexer/testdata/http_request.http
new file mode 100644
index 00000000..a93cdd94
--- /dev/null
+++ b/pkg/lexer/testdata/http_request.http
@@ -0,0 +1,15 @@
+POST /demo/submit/ HTTP/1.1
+Host: pygments.org
+Connection: keep-alivk
+Cache-Control: max-age=0
+Origin: http://pygments.org
+User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10_7_2)
+ AppleWebKit/535.7 (KHTML, like Gecko) Chrome/16.0.912.63 Safari/535.7
+Content-Type: application/x-www-form-urlencoded
+Accept: text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8
+Referer: http://pygments.org/
+Accept-Encoding: gzip,deflate,sdch
+Accept-Language: en-US,en;q=0.8
+Accept-Charset: windows-949,utf-8;q=0.7,*;q=0.3
+
+name=test&lang=text&code=asdf&user=
diff --git a/pkg/lexer/testdata/hy_defn.hy b/pkg/lexer/testdata/hy_defn.hy
new file mode 100644
index 00000000..839232c6
--- /dev/null
+++ b/pkg/lexer/testdata/hy_defn.hy
@@ -0,0 +1 @@
+(defn numeric? [x]
diff --git a/pkg/lexer/testdata/hy_import.hy b/pkg/lexer/testdata/hy_import.hy
new file mode 100644
index 00000000..b577e9ba
--- /dev/null
+++ b/pkg/lexer/testdata/hy_import.hy
@@ -0,0 +1 @@
+(import numbers)
diff --git a/pkg/lexer/testdata/hybris_private.hyb b/pkg/lexer/testdata/hybris_private.hyb
new file mode 100644
index 00000000..959e89fb
--- /dev/null
+++ b/pkg/lexer/testdata/hybris_private.hyb
@@ -0,0 +1,3 @@
+private method isBinary(){
+ return me.mode.find("b") != false;
+}
diff --git a/pkg/lexer/testdata/hybris_public.hyb b/pkg/lexer/testdata/hybris_public.hyb
new file mode 100644
index 00000000..cc4b89bf
--- /dev/null
+++ b/pkg/lexer/testdata/hybris_public.hyb
@@ -0,0 +1,3 @@
+public method File ( file ){
+ me.file = file;
+}
diff --git a/pkg/lexer/testdata/idl_endelse.pro b/pkg/lexer/testdata/idl_endelse.pro
new file mode 100644
index 00000000..b93fe3c5
--- /dev/null
+++ b/pkg/lexer/testdata/idl_endelse.pro
@@ -0,0 +1,6 @@
+if (a eq 2) and (b eq 3) then begin
+ print, 'a = ', a
+ print, 'b = ', b
+endif else begin
+ if a ne 2 then print, 'a != 2' else print, 'b != 3'
+endelse
diff --git a/pkg/lexer/testdata/idl_endswitch.pro b/pkg/lexer/testdata/idl_endswitch.pro
new file mode 100644
index 00000000..f2358f75
--- /dev/null
+++ b/pkg/lexer/testdata/idl_endswitch.pro
@@ -0,0 +1,11 @@
+pro ex_switch, x
+ switch x of
+ 1: print, 'one'
+ 2: print, 'two'
+ else: begin
+ print, 'you entered: ', x
+ print, 'please enter a value between 1 and 4'
+ end
+ endswitch
+end
+ex_switch, 2
diff --git a/pkg/lexer/testdata/inform6_basic.inf b/pkg/lexer/testdata/inform6_basic.inf
new file mode 100644
index 00000000..2e8a1332
--- /dev/null
+++ b/pkg/lexer/testdata/inform6_basic.inf
@@ -0,0 +1 @@
+Origsource "^~@{.inf";
diff --git a/pkg/lexer/testdata/ini_basic.ini b/pkg/lexer/testdata/ini_basic.ini
new file mode 100644
index 00000000..4a310c03
--- /dev/null
+++ b/pkg/lexer/testdata/ini_basic.ini
@@ -0,0 +1,9 @@
+[section]
+
+foo = bar
+continued = foo
+ baz
+conttwo =
+ foo
+; comment
+# comment
diff --git a/pkg/lexer/testdata/ini_invalid.ini b/pkg/lexer/testdata/ini_invalid.ini
new file mode 100644
index 00000000..139597f9
--- /dev/null
+++ b/pkg/lexer/testdata/ini_invalid.ini
@@ -0,0 +1,2 @@
+
+
diff --git a/pkg/lexer/testdata/jags_data.jag b/pkg/lexer/testdata/jags_data.jag
new file mode 100644
index 00000000..f61bf030
--- /dev/null
+++ b/pkg/lexer/testdata/jags_data.jag
@@ -0,0 +1,10 @@
+data {
+ D <- dim(Z)
+}
+model {
+ for (i in 1:D[1]) {
+ for (j in 1:D[2]) {
+ Z[i,j] <- dnorm(alpha[i] + beta[j], tau)
+ }
+ }
+}
diff --git a/pkg/lexer/testdata/jags_model.jag b/pkg/lexer/testdata/jags_model.jag
new file mode 100644
index 00000000..ec084fc8
--- /dev/null
+++ b/pkg/lexer/testdata/jags_model.jag
@@ -0,0 +1,7 @@
+model {
+ for (i in 1:D[1]) {
+ for (j in 1:D[2]) {
+ Z[i,j] <- dnorm(alpha[i] + beta[j], tau)
+ }
+ }
+}
diff --git a/pkg/lexer/testdata/jags_var.jag b/pkg/lexer/testdata/jags_var.jag
new file mode 100644
index 00000000..4c5f5286
--- /dev/null
+++ b/pkg/lexer/testdata/jags_var.jag
@@ -0,0 +1,9 @@
+var x[N];
+
+model {
+ for (i in 1:D[1]) {
+ for (j in 1:D[2]) {
+ Z[i,j] <- dnorm(alpha[i] + beta[j], tau)
+ }
+ }
+}
diff --git a/pkg/lexer/testdata/jasmin_class.j b/pkg/lexer/testdata/jasmin_class.j
new file mode 100644
index 00000000..02e5f344
--- /dev/null
+++ b/pkg/lexer/testdata/jasmin_class.j
@@ -0,0 +1 @@
+ .class public Calculator
diff --git a/pkg/lexer/testdata/jasmin_instruction.j b/pkg/lexer/testdata/jasmin_instruction.j
new file mode 100644
index 00000000..ace5a2b8
--- /dev/null
+++ b/pkg/lexer/testdata/jasmin_instruction.j
@@ -0,0 +1,3 @@
+ .class public Calculator
+
+ ldc_w 3.141592654 ; push PI as a double
diff --git a/pkg/lexer/testdata/jasmin_keyword.j b/pkg/lexer/testdata/jasmin_keyword.j
new file mode 100644
index 00000000..67ce43dc
--- /dev/null
+++ b/pkg/lexer/testdata/jasmin_keyword.j
@@ -0,0 +1 @@
+ .limit locals 3
diff --git a/pkg/lexer/testdata/jcl_job_header.jcl b/pkg/lexer/testdata/jcl_job_header.jcl
new file mode 100644
index 00000000..d10bc307
--- /dev/null
+++ b/pkg/lexer/testdata/jcl_job_header.jcl
@@ -0,0 +1 @@
+//IS198CPY JOB (CHR-TEST-001),'CHROMA TEST JOB',
diff --git a/pkg/lexer/testdata/jsp_basic.jsp b/pkg/lexer/testdata/jsp_basic.jsp
new file mode 100644
index 00000000..1c6664da
--- /dev/null
+++ b/pkg/lexer/testdata/jsp_basic.jsp
@@ -0,0 +1,24 @@
+
+<%= var x = 1;
+%>
+<%! int i = 0; %>
+<%! int a, b, c; %>
+<%! Circle a = new Circle(2.0); %>
+
+<%
+ String name = null;
+ if (request.getParameter("name") == null) {
+%>
+<%@ include file="error.html" %>
+<%
+ } else {
+ foo.setName(request.getParameter("name"));
+ if (foo.getName().equalsIgnoreCase("integra"))
+ name = "acura";
+ if (name.equalsIgnoreCase( "acura" )) {
+%>
+
+
+
+Calendar of
+
diff --git a/pkg/lexer/testdata/lasso_delimiter.lasso b/pkg/lexer/testdata/lasso_delimiter.lasso
new file mode 100644
index 00000000..c85e3438
--- /dev/null
+++ b/pkg/lexer/testdata/lasso_delimiter.lasso
@@ -0,0 +1,4 @@
+
diff --git a/pkg/lexer/testdata/lasso_local.lasso b/pkg/lexer/testdata/lasso_local.lasso
new file mode 100644
index 00000000..48ae7fdb
--- /dev/null
+++ b/pkg/lexer/testdata/lasso_local.lasso
@@ -0,0 +1 @@
+local(one, two, three, four) = (:1, 2, 3, 4, 5, 6)
diff --git a/pkg/lexer/testdata/lasso_shebang.lasso b/pkg/lexer/testdata/lasso_shebang.lasso
new file mode 100644
index 00000000..7f385222
--- /dev/null
+++ b/pkg/lexer/testdata/lasso_shebang.lasso
@@ -0,0 +1,3 @@
+#!/usr/bin/lasso9
+
+'The current date is ' + date
diff --git a/pkg/lexer/testdata/limbo_basic.b b/pkg/lexer/testdata/limbo_basic.b
new file mode 100644
index 00000000..462762c9
--- /dev/null
+++ b/pkg/lexer/testdata/limbo_basic.b
@@ -0,0 +1,18 @@
+implement Values;
+
+include "sys.m";
+
+sys: Sys;
+print, sprint: import sys;
+
+Values: module {
+ init: fn(nil: ref Draw->Context, nil: list of string);
+};
+
+init(nil: ref Draw->Context, nil: list of string) {
+ sys = load Sys Sys->PATH;
+
+ str := "String!";
+
+ exit;
+}
diff --git a/pkg/lexer/testdata/logos_basic.xm b/pkg/lexer/testdata/logos_basic.xm
new file mode 100644
index 00000000..39753e23
--- /dev/null
+++ b/pkg/lexer/testdata/logos_basic.xm
@@ -0,0 +1,28 @@
+%hook ABC
+- (id)a:(B)b {
+ %log;
+ return %orig(nil);
+}
+%end
+
+%subclass DEF: NSObject
+- (id)init {
+ [%c(RuntimeAccessibleClass) alloc];
+ return nil;
+}
+%end
+
+%group OptionalHooks
+%hook ABC
+- (void)release {
+ [self retain];
+ %orig;
+}
+%end
+%end
+
+%ctor {
+ %init;
+ if(OptionalCondition)
+ %init(OptionalHooks);
+}
diff --git a/pkg/lexer/testdata/logtalk_basic.lgt b/pkg/lexer/testdata/logtalk_basic.lgt
new file mode 100644
index 00000000..cff1003f
--- /dev/null
+++ b/pkg/lexer/testdata/logtalk_basic.lgt
@@ -0,0 +1,2 @@
+:- public(p1/0).
+p1 :- write('This is a public predicate'), nl.
diff --git a/pkg/lexer/testdata/logtalk_object.lgt b/pkg/lexer/testdata/logtalk_object.lgt
new file mode 100644
index 00000000..aa010133
--- /dev/null
+++ b/pkg/lexer/testdata/logtalk_object.lgt
@@ -0,0 +1 @@
+:- object(my_first_object).
diff --git a/pkg/lexer/testdata/makefile b/pkg/lexer/testdata/makefile
new file mode 100644
index 00000000..bf272c5a
--- /dev/null
+++ b/pkg/lexer/testdata/makefile
@@ -0,0 +1,1131 @@
+# Generated automatically from Makefile.pre by makesetup.
+# Top-level Makefile for Python
+#
+# As distributed, this file is called Makefile.pre.in; it is processed
+# into the real Makefile by running the script ./configure, which
+# replaces things like @spam@ with values appropriate for your system.
+# This means that if you edit Makefile, your changes get lost the next
+# time you run the configure script. Ideally, you can do:
+#
+# ./configure
+# make
+# make test
+# make install
+#
+# If you have a previous version of Python installed that you don't
+# want to overwrite, you can use "make altinstall" instead of "make
+# install". Refer to the "Installing" section in the README file for
+# additional details.
+#
+# See also the section "Build instructions" in the README file.
+
+# === Variables set by makesetup ===
+
+MODOBJS= Modules/threadmodule.o Modules/signalmodule.o Modules/posixmodule.o Modules/errnomodule.o Modules/pwdmodule.o Modules/_sre.o Modules/_codecsmodule.o Modules/zipimport.o Modules/symtablemodule.o Modules/xxsubtype.o
+MODLIBS= $(LOCALMODLIBS) $(BASEMODLIBS)
+
+# === Variables set by configure
+VERSION= 2.6
+srcdir= .
+
+
+CC= gcc -pthread
+CXX= g++ -pthread
+MAINCC= $(CC)
+LINKCC= $(PURIFY) $(MAINCC)
+AR= ar
+RANLIB= ranlib
+SVNVERSION= svnversion $(srcdir)
+
+# Shell used by make (some versions default to the login shell, which is bad)
+SHELL= /bin/sh
+
+# Use this to make a link between python$(VERSION) and python in $(BINDIR)
+LN= ln
+
+# Portable install script (configure doesn't always guess right)
+INSTALL= /usr/bin/install -c
+INSTALL_PROGRAM=${INSTALL}
+INSTALL_SCRIPT= ${INSTALL}
+INSTALL_DATA= ${INSTALL} -m 644
+# Shared libraries must be installed with executable mode on some systems;
+# rather than figuring out exactly which, we always give them executable mode.
+# Also, making them read-only seems to be a good idea...
+INSTALL_SHARED= ${INSTALL} -m 555
+
+MAKESETUP= $(srcdir)/Modules/makesetup
+
+# Compiler options
+OPT= -g -Wall -Wstrict-prototypes
+BASECFLAGS= -fno-strict-aliasing
+CFLAGS= $(BASECFLAGS) $(OPT) $(EXTRA_CFLAGS)
+# Both CPPFLAGS and LDFLAGS need to contain the shell's value for setup.py to
+# be able to build extension modules using the directories specified in the
+# environment variables
+CPPFLAGS= -I. -I$(srcdir)/Include
+LDFLAGS=
+LDLAST=
+SGI_ABI=
+CCSHARED= -fPIC
+LINKFORSHARED= -Xlinker -export-dynamic
+# Extra C flags added for building the interpreter object files.
+CFLAGSFORSHARED=
+# C flags used for building the interpreter object files
+PY_CFLAGS= $(CFLAGS) $(CPPFLAGS) $(CFLAGSFORSHARED) -DPy_BUILD_CORE
+
+
+# Machine-dependent subdirectories
+MACHDEP= linux2
+
+# Install prefix for architecture-independent files
+prefix= /usr/local
+
+# Install prefix for architecture-dependent files
+exec_prefix= ${prefix}
+
+# Expanded directories
+BINDIR= $(exec_prefix)/bin
+LIBDIR= $(exec_prefix)/lib
+MANDIR= ${prefix}/man
+INCLUDEDIR= ${prefix}/include
+CONFINCLUDEDIR= $(exec_prefix)/include
+SCRIPTDIR= $(prefix)/lib
+
+# Detailed destination directories
+BINLIBDEST= $(LIBDIR)/python$(VERSION)
+LIBDEST= $(SCRIPTDIR)/python$(VERSION)
+INCLUDEPY= $(INCLUDEDIR)/python$(VERSION)
+CONFINCLUDEPY= $(CONFINCLUDEDIR)/python$(VERSION)
+LIBP= $(LIBDIR)/python$(VERSION)
+
+# Symbols used for using shared libraries
+SO= .so
+LDSHARED= $(CC) -shared
+BLDSHARED= $(CC) -shared
+DESTSHARED= $(BINLIBDEST)/lib-dynload
+
+# Executable suffix (.exe on Windows and Mac OS X)
+EXE=
+BUILDEXE=
+
+# Short name and location for Mac OS X Python framework
+UNIVERSALSDK=
+PYTHONFRAMEWORK=
+PYTHONFRAMEWORKDIR= no-framework
+PYTHONFRAMEWORKPREFIX=
+PYTHONFRAMEWORKINSTALLDIR=
+# Deployment target selected during configure, to be checked
+# by distutils. The export statement is needed to ensure that the
+# deployment target is active during build.
+MACOSX_DEPLOYMENT_TARGET=
+#export MACOSX_DEPLOYMENT_TARGET
+
+# Options to enable prebinding (for fast startup prior to Mac OS X 10.3)
+OTHER_LIBTOOL_OPT=
+
+# Environment to run shared python without installed libraries
+RUNSHARED=
+
+# Modes for directories, executables and data files created by the
+# install process. Default to user-only-writable for all file types.
+DIRMODE= 755
+EXEMODE= 755
+FILEMODE= 644
+
+# configure script arguments
+CONFIG_ARGS= '--with-pydebug'
+
+
+# Subdirectories with code
+SRCDIRS= Parser Grammar Objects Python Modules Mac
+
+# Other subdirectories
+SUBDIRSTOO= Include Lib Misc Demo
+
+# Files and directories to be distributed
+CONFIGFILES= configure configure.in acconfig.h pyconfig.h.in Makefile.pre.in
+DISTFILES= README ChangeLog $(CONFIGFILES)
+DISTDIRS= $(SUBDIRS) $(SUBDIRSTOO) Ext-dummy
+DIST= $(DISTFILES) $(DISTDIRS)
+
+
+LIBRARY= libpython$(VERSION).a
+LDLIBRARY= libpython$(VERSION).a
+BLDLIBRARY= $(LDLIBRARY)
+DLLLIBRARY=
+LDLIBRARYDIR=
+INSTSONAME= $(LDLIBRARY)
+
+
+LIBS= -lpthread -ldl -lutil
+LIBM= -lm
+LIBC=
+SYSLIBS= $(LIBM) $(LIBC)
+SHLIBS= $(LIBS)
+
+THREADOBJ= Python/thread.o
+DLINCLDIR= .
+DYNLOADFILE= dynload_shlib.o
+MACHDEP_OBJS=
+UNICODE_OBJS= Objects/unicodeobject.o Objects/unicodectype.o
+
+PYTHON= python$(EXE)
+BUILDPYTHON= python$(BUILDEXE)
+
+# === Definitions added by makesetup ===
+
+LOCALMODLIBS=
+BASEMODLIBS=
+GLHACK=-Dclear=__GLclear
+PYTHONPATH=$(COREPYTHONPATH)
+COREPYTHONPATH=$(DESTPATH)$(SITEPATH)$(TESTPATH)$(MACHDEPPATH)$(EXTRAMACHDEPPATH)$(TKPATH)
+TKPATH=:lib-tk
+EXTRAMACHDEPPATH=
+MACHDEPPATH=:plat-$(MACHDEP)
+TESTPATH=
+SITEPATH=
+DESTPATH=
+MACHDESTLIB=$(BINLIBDEST)
+DESTLIB=$(LIBDEST)
+
+
+
+##########################################################################
+# Modules
+MODULE_OBJS= \
+ Modules/config.o \
+ Modules/getpath.o \
+ Modules/main.o \
+ Modules/gcmodule.o
+
+# Used of signalmodule.o is not available
+SIGNAL_OBJS=
+
+
+##########################################################################
+# Grammar
+GRAMMAR_H= $(srcdir)/Include/graminit.h
+GRAMMAR_C= $(srcdir)/Python/graminit.c
+GRAMMAR_INPUT= $(srcdir)/Grammar/Grammar
+
+
+##########################################################################
+# Parser
+PGEN= Parser/pgen$(EXE)
+
+POBJS= \
+ Parser/acceler.o \
+ Parser/grammar1.o \
+ Parser/listnode.o \
+ Parser/node.o \
+ Parser/parser.o \
+ Parser/parsetok.o \
+ Parser/bitset.o \
+ Parser/metagrammar.o \
+ Parser/firstsets.o \
+ Parser/grammar.o \
+ Parser/pgen.o
+
+PARSER_OBJS= $(POBJS) Parser/myreadline.o Parser/tokenizer.o
+
+PGOBJS= \
+ Objects/obmalloc.o \
+ Python/mysnprintf.o \
+ Parser/tokenizer_pgen.o \
+ Parser/printgrammar.o \
+ Parser/pgenmain.o
+
+PGENOBJS= $(PGENMAIN) $(POBJS) $(PGOBJS)
+
+##########################################################################
+# AST
+AST_H_DIR= $(srcdir)/Include
+AST_H= $(AST_H_DIR)/Python-ast.h
+AST_C_DIR= $(srcdir)/Python
+AST_C= $(AST_C_DIR)/Python-ast.c
+AST_ASDL= $(srcdir)/Parser/Python.asdl
+
+ASDLGEN_FILES= $(srcdir)/Parser/asdl.py $(srcdir)/Parser/asdl_c.py
+# XXX Note that a build now requires Python exist before the build starts
+ASDLGEN= $(srcdir)/Parser/asdl_c.py
+
+##########################################################################
+# Python
+PYTHON_OBJS= \
+ Python/Python-ast.o \
+ Python/asdl.o \
+ Python/ast.o \
+ Python/bltinmodule.o \
+ Python/ceval.o \
+ Python/compile.o \
+ Python/codecs.o \
+ Python/errors.o \
+ Python/frozen.o \
+ Python/frozenmain.o \
+ Python/future.o \
+ Python/getargs.o \
+ Python/getcompiler.o \
+ Python/getcopyright.o \
+ Python/getmtime.o \
+ Python/getplatform.o \
+ Python/getversion.o \
+ Python/graminit.o \
+ Python/import.o \
+ Python/importdl.o \
+ Python/marshal.o \
+ Python/modsupport.o \
+ Python/mystrtoul.o \
+ Python/mysnprintf.o \
+ Python/peephole.o \
+ Python/pyarena.o \
+ Python/pyfpe.o \
+ Python/pystate.o \
+ Python/pythonrun.o \
+ Python/structmember.o \
+ Python/symtable.o \
+ Python/sysmodule.o \
+ Python/traceback.o \
+ Python/getopt.o \
+ Python/pystrtod.o \
+ Python/$(DYNLOADFILE) \
+ $(MACHDEP_OBJS) \
+ $(THREADOBJ)
+
+
+##########################################################################
+# Objects
+OBJECT_OBJS= \
+ Objects/abstract.o \
+ Objects/boolobject.o \
+ Objects/bufferobject.o \
+ Objects/cellobject.o \
+ Objects/classobject.o \
+ Objects/cobject.o \
+ Objects/codeobject.o \
+ Objects/complexobject.o \
+ Objects/descrobject.o \
+ Objects/enumobject.o \
+ Objects/exceptions.o \
+ Objects/genobject.o \
+ Objects/fileobject.o \
+ Objects/floatobject.o \
+ Objects/frameobject.o \
+ Objects/funcobject.o \
+ Objects/intobject.o \
+ Objects/iterobject.o \
+ Objects/listobject.o \
+ Objects/longobject.o \
+ Objects/dictobject.o \
+ Objects/methodobject.o \
+ Objects/moduleobject.o \
+ Objects/object.o \
+ Objects/obmalloc.o \
+ Objects/rangeobject.o \
+ Objects/setobject.o \
+ Objects/sliceobject.o \
+ Objects/stringobject.o \
+ Objects/structseq.o \
+ Objects/tupleobject.o \
+ Objects/typeobject.o \
+ Objects/weakrefobject.o \
+ $(UNICODE_OBJS)
+
+
+##########################################################################
+# objects that get linked into the Python library
+LIBRARY_OBJS= \
+ Modules/_typesmodule.o \
+ Modules/getbuildinfo.o \
+ $(PARSER_OBJS) \
+ $(OBJECT_OBJS) \
+ $(PYTHON_OBJS) \
+ $(MODULE_OBJS) \
+ $(SIGNAL_OBJS) \
+ $(MODOBJS)
+
+#########################################################################
+# Rules
+
+# Default target
+all: $(BUILDPYTHON) oldsharedmods sharedmods
+
+# Build the interpreter
+$(BUILDPYTHON): Modules/python.o $(LIBRARY) $(LDLIBRARY)
+ $(LINKCC) $(LDFLAGS) $(LINKFORSHARED) -o $@ \
+ Modules/python.o \
+ $(BLDLIBRARY) $(LIBS) $(MODLIBS) $(SYSLIBS) $(LDLAST)
+
+platform: $(BUILDPYTHON)
+ $(RUNSHARED) ./$(BUILDPYTHON) -E -c 'import sys ; from distutils.util import get_platform ; print get_platform()+"-"+sys.version[0:3]' >platform
+
+
+# Build the shared modules
+sharedmods: $(BUILDPYTHON)
+ @case $$MAKEFLAGS in \
+ *-s*) $(RUNSHARED) CC='$(CC)' LDSHARED='$(BLDSHARED)' OPT='$(OPT)' ./$(BUILDPYTHON) -E $(srcdir)/setup.py -q build;; \
+ *) $(RUNSHARED) CC='$(CC)' LDSHARED='$(BLDSHARED)' OPT='$(OPT)' ./$(BUILDPYTHON) -E $(srcdir)/setup.py build;; \
+ esac
+
+# Build static library
+# avoid long command lines, same as LIBRARY_OBJS
+$(LIBRARY): $(LIBRARY_OBJS)
+ -rm -f $@
+ $(AR) cr $@ Modules/getbuildinfo.o
+ $(AR) cr $@ Modules/_typesmodule.o
+ $(AR) cr $@ $(PARSER_OBJS)
+ $(AR) cr $@ $(OBJECT_OBJS)
+ $(AR) cr $@ $(PYTHON_OBJS)
+ $(AR) cr $@ $(MODULE_OBJS) $(SIGNAL_OBJS)
+ $(AR) cr $@ $(MODOBJS)
+ $(RANLIB) $@
+
+libpython$(VERSION).so: $(LIBRARY_OBJS)
+ if test $(INSTSONAME) != $(LDLIBRARY); then \
+ $(LDSHARED) -Wl,-h$(INSTSONAME) -o $(INSTSONAME) $(LIBRARY_OBJS) $(SHLIBS) $(LIBC) $(LIBM); \
+ $(LN) -f $(INSTSONAME) $@; \
+ else\
+ $(LDSHARED) -o $@ $(LIBRARY_OBJS) $(SHLIBS) $(LIBC) $(LIBM); \
+ fi
+
+libpython$(VERSION).sl: $(LIBRARY_OBJS)
+ $(LDSHARED) -o $@ $(LIBRARY_OBJS) $(SHLIBS) $(LIBC) $(LIBM)
+
+# This rule is here for OPENSTEP/Rhapsody/MacOSX. It builds a temporary
+# minimal framework (not including the Lib directory and such) in the current
+# directory.
+RESSRCDIR=$(srcdir)/Mac/Resources/framework
+$(PYTHONFRAMEWORKDIR)/Versions/$(VERSION)/$(PYTHONFRAMEWORK): \
+ $(LIBRARY) \
+ $(RESSRCDIR)/Info.plist \
+ $(RESSRCDIR)/version.plist \
+ $(RESSRCDIR)/English.lproj/InfoPlist.strings
+ $(INSTALL) -d -m $(DIRMODE) $(PYTHONFRAMEWORKDIR)/Versions/$(VERSION)
+ if test "${UNIVERSALSDK}"; then \
+ $(CC) -o $(LDLIBRARY) -arch i386 -arch ppc -dynamiclib \
+ -isysroot "${UNIVERSALSDK}" \
+ -all_load $(LIBRARY) -Wl,-single_module \
+ -install_name $(DESTDIR)$(PYTHONFRAMEWORKINSTALLDIR)/Versions/$(VERSION)/Python \
+ -compatibility_version $(VERSION) \
+ -current_version $(VERSION); \
+ else \
+ libtool -o $(LDLIBRARY) -dynamic $(OTHER_LIBTOOL_OPT) $(LIBRARY) \
+ ;\
+ fi
+ $(INSTALL) -d -m $(DIRMODE) \
+ $(PYTHONFRAMEWORKDIR)/Versions/$(VERSION)/Resources/English.lproj
+ $(INSTALL_DATA) $(RESSRCDIR)/Info.plist \
+ $(PYTHONFRAMEWORKDIR)/Versions/$(VERSION)/Resources/Info.plist
+ $(INSTALL_DATA) $(RESSRCDIR)/version.plist \
+ $(PYTHONFRAMEWORKDIR)/Versions/$(VERSION)/Resources/version.plist
+ $(INSTALL_DATA) $(RESSRCDIR)/English.lproj/InfoPlist.strings \
+ $(PYTHONFRAMEWORKDIR)/Versions/$(VERSION)/Resources/English.lproj/InfoPlist.strings
+ $(LN) -fsn $(VERSION) $(PYTHONFRAMEWORKDIR)/Versions/Current
+ $(LN) -fsn Versions/Current/$(PYTHONFRAMEWORK) $(PYTHONFRAMEWORKDIR)/$(PYTHONFRAMEWORK)
+ $(LN) -fsn Versions/Current/Headers $(PYTHONFRAMEWORKDIR)/Headers
+ $(LN) -fsn Versions/Current/Resources $(PYTHONFRAMEWORKDIR)/Resources
+
+# This rule builds the Cygwin Python DLL and import library if configured
+# for a shared core library; otherwise, this rule is a noop.
+$(DLLLIBRARY) libpython$(VERSION).dll.a: $(LIBRARY_OBJS)
+ if test -n "$(DLLLIBRARY)"; then \
+ $(LDSHARED) -Wl,--out-implib=$@ -o $(DLLLIBRARY) $^ \
+ $(LIBS) $(MODLIBS) $(SYSLIBS); \
+ else true; \
+ fi
+
+
+oldsharedmods: $(SHAREDMODS)
+
+
+Makefile Modules/config.c: Makefile.pre \
+ $(srcdir)/Modules/config.c.in \
+ $(MAKESETUP) \
+ Modules/Setup.config \
+ Modules/Setup \
+ Modules/Setup.local
+ $(SHELL) $(MAKESETUP) -c $(srcdir)/Modules/config.c.in \
+ -s Modules \
+ Modules/Setup.config \
+ Modules/Setup.local \
+ Modules/Setup
+ @mv config.c Modules
+ @echo "The Makefile was updated, you may need to re-run make."
+
+
+Modules/Setup: $(srcdir)/Modules/Setup.dist
+ @if test -f Modules/Setup; then \
+ echo "-----------------------------------------------"; \
+ echo "Modules/Setup.dist is newer than Modules/Setup;"; \
+ echo "check to make sure you have all the updates you"; \
+ echo "need in your Modules/Setup file."; \
+ echo "Usually, copying Setup.dist to Setup will work."; \
+ echo "-----------------------------------------------"; \
+ fi
+
+############################################################################
+# Special rules for object files
+
+Modules/getbuildinfo.o: $(PARSER_OBJS) \
+ $(OBJECT_OBJS) \
+ $(PYTHON_OBJS) \
+ $(MODULE_OBJS) \
+ $(SIGNAL_OBJS) \
+ $(MODOBJS) \
+ $(srcdir)/Modules/getbuildinfo.c
+ $(CC) -c $(PY_CFLAGS) -DSVNVERSION=\"`LC_ALL=C $(SVNVERSION)`\" -o $@ $(srcdir)/Modules/getbuildinfo.c
+
+Modules/getpath.o: $(srcdir)/Modules/getpath.c Makefile
+ $(CC) -c $(PY_CFLAGS) -DPYTHONPATH='"$(PYTHONPATH)"' \
+ -DPREFIX='"$(prefix)"' \
+ -DEXEC_PREFIX='"$(exec_prefix)"' \
+ -DVERSION='"$(VERSION)"' \
+ -DVPATH='"$(VPATH)"' \
+ -o $@ $(srcdir)/Modules/getpath.c
+
+Modules/python.o: $(srcdir)/Modules/python.c
+ $(MAINCC) -c $(PY_CFLAGS) -o $@ $(srcdir)/Modules/python.c
+
+
+$(GRAMMAR_H) $(GRAMMAR_C): $(PGEN) $(GRAMMAR_INPUT)
+ -$(PGEN) $(GRAMMAR_INPUT) $(GRAMMAR_H) $(GRAMMAR_C)
+
+$(PGEN): $(PGENOBJS)
+ $(CC) $(OPT) $(LDFLAGS) $(PGENOBJS) $(LIBS) -o $(PGEN)
+
+Parser/grammar.o: $(srcdir)/Parser/grammar.c \
+ $(srcdir)/Include/token.h \
+ $(srcdir)/Include/grammar.h
+Parser/metagrammar.o: $(srcdir)/Parser/metagrammar.c
+
+Parser/tokenizer_pgen.o: $(srcdir)/Parser/tokenizer.c
+
+Parser/pgenmain.o: $(srcdir)/Include/parsetok.h
+
+$(AST_H): $(AST_ASDL) $(ASDLGEN_FILES)
+ $(ASDLGEN) -h $(AST_H_DIR) $(AST_ASDL)
+
+$(AST_C): $(AST_ASDL) $(ASDLGEN_FILES)
+ $(ASDLGEN) -c $(AST_C_DIR) $(AST_ASDL)
+
+Python/compile.o Python/symtable.o: $(GRAMMAR_H) $(AST_H)
+
+Python/getplatform.o: $(srcdir)/Python/getplatform.c
+ $(CC) -c $(PY_CFLAGS) -DPLATFORM='"$(MACHDEP)"' -o $@ $(srcdir)/Python/getplatform.c
+
+Python/importdl.o: $(srcdir)/Python/importdl.c
+ $(CC) -c $(PY_CFLAGS) -I$(DLINCLDIR) -o $@ $(srcdir)/Python/importdl.c
+
+Objects/unicodectype.o: $(srcdir)/Objects/unicodectype.c \
+ $(srcdir)/Objects/unicodetype_db.h
+
+############################################################################
+# Header files
+
+PYTHON_HEADERS= \
+ Include/Python.h \
+ Include/Python-ast.h \
+ Include/asdl.h \
+ Include/abstract.h \
+ Include/boolobject.h \
+ Include/bufferobject.h \
+ Include/ceval.h \
+ Include/classobject.h \
+ Include/cobject.h \
+ Include/code.h \
+ Include/codecs.h \
+ Include/compile.h \
+ Include/complexobject.h \
+ Include/descrobject.h \
+ Include/dictobject.h \
+ Include/enumobject.h \
+ Include/genobject.h \
+ Include/fileobject.h \
+ Include/floatobject.h \
+ Include/funcobject.h \
+ Include/import.h \
+ Include/intobject.h \
+ Include/intrcheck.h \
+ Include/iterobject.h \
+ Include/listobject.h \
+ Include/longobject.h \
+ Include/methodobject.h \
+ Include/modsupport.h \
+ Include/moduleobject.h \
+ Include/object.h \
+ Include/objimpl.h \
+ Include/parsetok.h \
+ Include/patchlevel.h \
+ Include/pyarena.h \
+ Include/pydebug.h \
+ Include/pyerrors.h \
+ Include/pyfpe.h \
+ Include/pymem.h \
+ Include/pyport.h \
+ Include/pystate.h \
+ Include/pythonrun.h \
+ Include/rangeobject.h \
+ Include/setobject.h \
+ Include/sliceobject.h \
+ Include/stringobject.h \
+ Include/structseq.h \
+ Include/structmember.h \
+ Include/symtable.h \
+ Include/sysmodule.h \
+ Include/traceback.h \
+ Include/tupleobject.h \
+ Include/unicodeobject.h \
+ Include/weakrefobject.h \
+ pyconfig.h
+
+$(LIBRARY_OBJS) $(MODOBJS) Modules/python.o: $(PYTHON_HEADERS)
+
+
+######################################################################
+
+# Test the interpreter (twice, once without .pyc files, once with)
+# In the past, we've had problems where bugs in the marshalling or
+# elsewhere caused bytecode read from .pyc files to behave differently
+# than bytecode generated directly from a .py source file. Sometimes
+# the bytecode read from a .pyc file had the bug, somtimes the directly
+# generated bytecode. This is sometimes a very shy bug needing a lot of
+# sample data.
+
+TESTOPTS= -l $(EXTRATESTOPTS)
+TESTPROG= $(srcdir)/Lib/test/regrtest.py
+TESTPYTHON= $(RUNSHARED) ./$(BUILDPYTHON) -E -tt
+test: all platform
+ -find $(srcdir)/Lib -name '*.py[co]' -print | xargs rm -f
+ -$(TESTPYTHON) $(TESTPROG) $(TESTOPTS)
+ $(TESTPYTHON) $(TESTPROG) $(TESTOPTS)
+
+testall: all platform
+ -find $(srcdir)/Lib -name '*.py[co]' -print | xargs rm -f
+ -$(TESTPYTHON) $(TESTPROG) $(TESTOPTS) -uall
+ $(TESTPYTHON) $(TESTPROG) $(TESTOPTS) -uall
+
+# Run the unitests for both architectures in a Universal build on OSX
+# Must be run on an Intel box.
+testuniversal: all platform
+ if [ `arch` != 'i386' ];then \
+ echo "This can only be used on OSX/i386" ;\
+ exit 1 ;\
+ fi
+ -find $(srcdir)/Lib -name '*.py[co]' -print | xargs rm -f
+ -$(TESTPYTHON) $(TESTPROG) $(TESTOPTS) -uall
+ $(TESTPYTHON) $(TESTPROG) $(TESTOPTS) -uall
+ $(RUNSHARED) /usr/libexec/oah/translate ./$(BUILDPYTHON) -E -tt $(TESTPROG) $(TESTOPTS) -uall
+
+
+# Like testall, but with a single pass only
+buildbottest: all platform
+ $(TESTPYTHON) $(TESTPROG) $(TESTOPTS) -uall -rw
+
+QUICKTESTOPTS= $(TESTOPTS) -x test_thread test_signal test_strftime \
+ test_unicodedata test_re test_sre test_select test_poll \
+ test_linuxaudiodev test_struct test_sunaudiodev test_zlib
+quicktest: all platform
+ -find $(srcdir)/Lib -name '*.py[co]' -print | xargs rm -f
+ -$(TESTPYTHON) $(TESTPROG) $(QUICKTESTOPTS)
+ $(TESTPYTHON) $(TESTPROG) $(QUICKTESTOPTS)
+
+MEMTESTOPTS= $(QUICKTESTOPTS) -x test_dl test___all__ test_fork1 \
+ test_longexp
+memtest: all platform
+ -rm -f $(srcdir)/Lib/test/*.py[co]
+ -$(TESTPYTHON) $(TESTPROG) $(MEMTESTOPTS)
+ $(TESTPYTHON) $(TESTPROG) $(MEMTESTOPTS)
+
+# Install everything
+install: altinstall bininstall maninstall
+
+# Install almost everything without disturbing previous versions
+altinstall: altbininstall libinstall inclinstall libainstall \
+ sharedinstall oldsharedinstall
+
+# Install shared libraries enabled by Setup
+DESTDIRS= $(exec_prefix) $(LIBDIR) $(BINLIBDEST) $(DESTSHARED)
+
+oldsharedinstall: $(DESTSHARED) $(SHAREDMODS)
+ @for i in X $(SHAREDMODS); do \
+ if test $$i != X; then \
+ echo $(INSTALL_SHARED) $$i $(DESTSHARED)/`basename $$i`; \
+ $(INSTALL_SHARED) $$i $(DESTDIR)$(DESTSHARED)/`basename $$i`; \
+ fi; \
+ done
+
+$(DESTSHARED):
+ @for i in $(DESTDIRS); \
+ do \
+ if test ! -d $(DESTDIR)$$i; then \
+ echo "Creating directory $$i"; \
+ $(INSTALL) -d -m $(DIRMODE) $(DESTDIR)$$i; \
+ else true; \
+ fi; \
+ done
+
+
+# Install the interpreter (by creating a hard link to python$(VERSION))
+bininstall: altbininstall
+ -if test -f $(DESTDIR)$(BINDIR)/$(PYTHON) -o -h $(DESTDIR)$(BINDIR)/$(PYTHON); \
+ then rm -f $(DESTDIR)$(BINDIR)/$(PYTHON); \
+ else true; \
+ fi
+ (cd $(DESTDIR)$(BINDIR); $(LN) python$(VERSION)$(EXE) $(PYTHON))
+ (cd $(DESTDIR)$(BINDIR); $(LN) -sf python$(VERSION)-config python-config)
+
+# Install the interpreter with $(VERSION) affixed
+# This goes into $(exec_prefix)
+altbininstall: $(BUILDPYTHON)
+ @for i in $(BINDIR) $(LIBDIR); \
+ do \
+ if test ! -d $(DESTDIR)$$i; then \
+ echo "Creating directory $$i"; \
+ $(INSTALL) -d -m $(DIRMODE) $(DESTDIR)$$i; \
+ else true; \
+ fi; \
+ done
+ $(INSTALL_PROGRAM) $(BUILDPYTHON) $(DESTDIR)$(BINDIR)/python$(VERSION)$(EXE)
+ if test -f libpython$(VERSION)$(SO); then \
+ if test "$(SO)" = .dll; then \
+ $(INSTALL_SHARED) libpython$(VERSION)$(SO) $(DESTDIR)$(BINDIR); \
+ else \
+ $(INSTALL_SHARED) libpython$(VERSION)$(SO) $(DESTDIR)$(LIBDIR)/$(INSTSONAME); \
+ if test libpython$(VERSION)$(SO) != $(INSTSONAME); then \
+ (cd $(DESTDIR)$(LIBDIR); $(LN) -sf $(INSTSONAME) libpython$(VERSION)$(SO)); \
+ fi \
+ fi; \
+ else true; \
+ fi
+
+# Install the manual page
+maninstall:
+ @for i in $(MANDIR) $(MANDIR)/man1; \
+ do \
+ if test ! -d $(DESTDIR)$$i; then \
+ echo "Creating directory $$i"; \
+ $(INSTALL) -d -m $(DIRMODE) $(DESTDIR)$$i; \
+ else true; \
+ fi; \
+ done
+ $(INSTALL_DATA) $(srcdir)/Misc/python.man \
+ $(DESTDIR)$(MANDIR)/man1/python.1
+
+# Install the library
+PLATDIR= plat-$(MACHDEP)
+EXTRAPLATDIR=
+EXTRAMACHDEPPATH=
+MACHDEPS= $(PLATDIR) $(EXTRAPLATDIR)
+XMLLIBSUBDIRS= xml xml/dom xml/etree xml/parsers xml/sax
+PLATMACDIRS= plat-mac plat-mac/Carbon plat-mac/lib-scriptpackages \
+ plat-mac/lib-scriptpackages/_builtinSuites \
+ plat-mac/lib-scriptpackages/CodeWarrior \
+ plat-mac/lib-scriptpackages/Explorer \
+ plat-mac/lib-scriptpackages/Finder \
+ plat-mac/lib-scriptpackages/Netscape \
+ plat-mac/lib-scriptpackages/StdSuites \
+ plat-mac/lib-scriptpackages/SystemEvents \
+ plat-mac/lib-scriptpackages/Terminal
+PLATMACPATH=:plat-mac:plat-mac/lib-scriptpackages
+LIBSUBDIRS= lib-tk site-packages test test/output test/data \
+ test/decimaltestdata \
+ encodings compiler hotshot \
+ email email/mime email/test email/test/data \
+ sqlite3 sqlite3/test \
+ logging bsddb bsddb/test csv wsgiref \
+ ctypes ctypes/test ctypes/macholib idlelib idlelib/Icons \
+ distutils distutils/command distutils/tests $(XMLLIBSUBDIRS) \
+ setuptools setuptools/command setuptools/tests setuptools.egg-info \
+ curses $(MACHDEPS)
+libinstall: $(BUILDPYTHON) $(srcdir)/Lib/$(PLATDIR)
+ @for i in $(SCRIPTDIR) $(LIBDEST); \
+ do \
+ if test ! -d $(DESTDIR)$$i; then \
+ echo "Creating directory $$i"; \
+ $(INSTALL) -d -m $(DIRMODE) $(DESTDIR)$$i; \
+ else true; \
+ fi; \
+ done
+ @for d in $(LIBSUBDIRS); \
+ do \
+ a=$(srcdir)/Lib/$$d; \
+ if test ! -d $$a; then continue; else true; fi; \
+ b=$(LIBDEST)/$$d; \
+ if test ! -d $(DESTDIR)$$b; then \
+ echo "Creating directory $$b"; \
+ $(INSTALL) -d -m $(DIRMODE) $(DESTDIR)$$b; \
+ else true; \
+ fi; \
+ done
+ @for i in $(srcdir)/Lib/*.py $(srcdir)/Lib/*.doc $(srcdir)/Lib/*.egg-info ; \
+ do \
+ if test -x $$i; then \
+ $(INSTALL_SCRIPT) $$i $(DESTDIR)$(LIBDEST); \
+ echo $(INSTALL_SCRIPT) $$i $(LIBDEST); \
+ else \
+ $(INSTALL_DATA) $$i $(DESTDIR)$(LIBDEST); \
+ echo $(INSTALL_DATA) $$i $(LIBDEST); \
+ fi; \
+ done
+ @for d in $(LIBSUBDIRS); \
+ do \
+ a=$(srcdir)/Lib/$$d; \
+ if test ! -d $$a; then continue; else true; fi; \
+ if test `ls $$a | wc -l` -lt 1; then continue; fi; \
+ b=$(LIBDEST)/$$d; \
+ for i in $$a/*; \
+ do \
+ case $$i in \
+ *CVS) ;; \
+ *.py[co]) ;; \
+ *.orig) ;; \
+ *~) ;; \
+ *) \
+ if test -d $$i; then continue; fi; \
+ if test -x $$i; then \
+ echo $(INSTALL_SCRIPT) $$i $$b; \
+ $(INSTALL_SCRIPT) $$i $(DESTDIR)$$b; \
+ else \
+ echo $(INSTALL_DATA) $$i $$b; \
+ $(INSTALL_DATA) $$i $(DESTDIR)$$b; \
+ fi;; \
+ esac; \
+ done; \
+ done
+ $(INSTALL_DATA) $(srcdir)/LICENSE $(DESTDIR)$(LIBDEST)/LICENSE.txt
+ PYTHONPATH=$(DESTDIR)$(LIBDEST) $(RUNSHARED) \
+ ./$(BUILDPYTHON) -Wi -tt $(DESTDIR)$(LIBDEST)/compileall.py \
+ -d $(LIBDEST) -f \
+ -x 'bad_coding|badsyntax|site-packages' $(DESTDIR)$(LIBDEST)
+ PYTHONPATH=$(DESTDIR)$(LIBDEST) $(RUNSHARED) \
+ ./$(BUILDPYTHON) -Wi -tt -O $(DESTDIR)$(LIBDEST)/compileall.py \
+ -d $(LIBDEST) -f \
+ -x 'bad_coding|badsyntax|site-packages' $(DESTDIR)$(LIBDEST)
+ -PYTHONPATH=$(DESTDIR)$(LIBDEST) $(RUNSHARED) \
+ ./$(BUILDPYTHON) -Wi -t $(DESTDIR)$(LIBDEST)/compileall.py \
+ -d $(LIBDEST)/site-packages -f \
+ -x badsyntax $(DESTDIR)$(LIBDEST)/site-packages
+ -PYTHONPATH=$(DESTDIR)$(LIBDEST) $(RUNSHARED) \
+ ./$(BUILDPYTHON) -Wi -t -O $(DESTDIR)$(LIBDEST)/compileall.py \
+ -d $(LIBDEST)/site-packages -f \
+ -x badsyntax $(DESTDIR)$(LIBDEST)/site-packages
+
+# Create the PLATDIR source directory, if one wasn't distributed..
+$(srcdir)/Lib/$(PLATDIR):
+ mkdir $(srcdir)/Lib/$(PLATDIR)
+ cp $(srcdir)/Lib/plat-generic/regen $(srcdir)/Lib/$(PLATDIR)/regen
+ export PATH; PATH="`pwd`:$$PATH"; \
+ export PYTHONPATH; PYTHONPATH="`pwd`/Lib"; \
+ export DYLD_FRAMEWORK_PATH; DYLD_FRAMEWORK_PATH="`pwd`"; \
+ export EXE; EXE="$(BUILDEXE)"; \
+ cd $(srcdir)/Lib/$(PLATDIR); ./regen
+
+# Install the include files
+INCLDIRSTOMAKE=$(INCLUDEDIR) $(CONFINCLUDEDIR) $(INCLUDEPY) $(CONFINCLUDEPY)
+inclinstall:
+ @for i in $(INCLDIRSTOMAKE); \
+ do \
+ if test ! -d $(DESTDIR)$$i; then \
+ echo "Creating directory $$i"; \
+ $(INSTALL) -d -m $(DIRMODE) $(DESTDIR)$$i; \
+ else true; \
+ fi; \
+ done
+ @for i in $(srcdir)/Include/*.h; \
+ do \
+ echo $(INSTALL_DATA) $$i $(INCLUDEPY); \
+ $(INSTALL_DATA) $$i $(DESTDIR)$(INCLUDEPY); \
+ done
+ $(INSTALL_DATA) pyconfig.h $(DESTDIR)$(CONFINCLUDEPY)/pyconfig.h
+
+# Install the library and miscellaneous stuff needed for extending/embedding
+# This goes into $(exec_prefix)
+LIBPL= $(LIBP)/config
+libainstall: all
+ @for i in $(LIBDIR) $(LIBP) $(LIBPL); \
+ do \
+ if test ! -d $(DESTDIR)$$i; then \
+ echo "Creating directory $$i"; \
+ $(INSTALL) -d -m $(DIRMODE) $(DESTDIR)$$i; \
+ else true; \
+ fi; \
+ done
+ @if test -d $(LIBRARY); then :; else \
+ if test "$(PYTHONFRAMEWORKDIR)" = no-framework; then \
+ if test "$(SO)" = .dll; then \
+ $(INSTALL_DATA) $(LDLIBRARY) $(DESTDIR)$(LIBPL) ; \
+ else \
+ $(INSTALL_DATA) $(LIBRARY) $(DESTDIR)$(LIBPL)/$(LIBRARY) ; \
+ $(RANLIB) $(DESTDIR)$(LIBPL)/$(LIBRARY) ; \
+ fi; \
+ else \
+ echo Skip install of $(LIBRARY) - use make frameworkinstall; \
+ fi; \
+ fi
+ $(INSTALL_DATA) Modules/config.c $(DESTDIR)$(LIBPL)/config.c
+ $(INSTALL_DATA) Modules/python.o $(DESTDIR)$(LIBPL)/python.o
+ $(INSTALL_DATA) $(srcdir)/Modules/config.c.in $(DESTDIR)$(LIBPL)/config.c.in
+ $(INSTALL_DATA) Makefile $(DESTDIR)$(LIBPL)/Makefile
+ $(INSTALL_DATA) Modules/Setup $(DESTDIR)$(LIBPL)/Setup
+ $(INSTALL_DATA) Modules/Setup.local $(DESTDIR)$(LIBPL)/Setup.local
+ $(INSTALL_DATA) Modules/Setup.config $(DESTDIR)$(LIBPL)/Setup.config
+ $(INSTALL_SCRIPT) $(srcdir)/Modules/makesetup $(DESTDIR)$(LIBPL)/makesetup
+ $(INSTALL_SCRIPT) $(srcdir)/install-sh $(DESTDIR)$(LIBPL)/install-sh
+ # Substitution happens here, as the completely-expanded BINDIR
+ # is not available in configure
+ sed -e "s,@EXENAME@,$(BINDIR)/python$(VERSION)$(EXE)," < $(srcdir)/Misc/python-config.in >python-config
+ $(INSTALL_SCRIPT) python-config $(DESTDIR)$(BINDIR)/python$(VERSION)-config
+ rm python-config
+ @if [ -s Modules/python.exp -a \
+ "`echo $(MACHDEP) | sed 's/^\(...\).*/\1/'`" = "aix" ]; then \
+ echo; echo "Installing support files for building shared extension modules on AIX:"; \
+ $(INSTALL_DATA) Modules/python.exp \
+ $(DESTDIR)$(LIBPL)/python.exp; \
+ echo; echo "$(LIBPL)/python.exp"; \
+ $(INSTALL_SCRIPT) $(srcdir)/Modules/makexp_aix \
+ $(DESTDIR)$(LIBPL)/makexp_aix; \
+ echo "$(LIBPL)/makexp_aix"; \
+ $(INSTALL_SCRIPT) $(srcdir)/Modules/ld_so_aix \
+ $(DESTDIR)$(LIBPL)/ld_so_aix; \
+ echo "$(LIBPL)/ld_so_aix"; \
+ echo; echo "See Misc/AIX-NOTES for details."; \
+ else true; \
+ fi
+ @case "$(MACHDEP)" in beos*) \
+ echo; echo "Installing support files for building shared extension modules on BeOS:"; \
+ $(INSTALL_DATA) Misc/BeOS-NOTES $(DESTDIR)$(LIBPL)/README; \
+ echo; echo "$(LIBPL)/README"; \
+ $(INSTALL_SCRIPT) Modules/ar_beos $(DESTDIR)$(LIBPL)/ar_beos; \
+ echo "$(LIBPL)/ar_beos"; \
+ $(INSTALL_SCRIPT) Modules/ld_so_beos $(DESTDIR)$(LIBPL)/ld_so_beos; \
+ echo "$(LIBPL)/ld_so_beos"; \
+ echo; echo "See Misc/BeOS-NOTES for details."; \
+ ;; \
+ esac
+
+# Install the dynamically loadable modules
+# This goes into $(exec_prefix)
+sharedinstall:
+ $(RUNSHARED) ./$(BUILDPYTHON) -E $(srcdir)/setup.py install \
+ --prefix=$(prefix) \
+ --install-scripts=$(BINDIR) \
+ --install-platlib=$(DESTSHARED) \
+ --root=/$(DESTDIR)
+
+# Here are a couple of targets for MacOSX again, to install a full
+# framework-based Python. frameworkinstall installs everything, the
+# subtargets install specific parts. Much of the actual work is offloaded to
+# the Makefile in Mac
+#
+#
+# This target is here for backward compatiblity, previous versions of Python
+# hadn't integrated framework installation in the normal install process.
+frameworkinstall: install
+
+# On install, we re-make the framework
+# structure in the install location, /Library/Frameworks/ or the argument to
+# --enable-framework. If --enable-framework has been specified then we have
+# automatically set prefix to the location deep down in the framework, so we
+# only have to cater for the structural bits of the framework.
+
+frameworkinstallframework: frameworkinstallstructure install frameworkinstallmaclib
+
+frameworkinstallstructure: $(LDLIBRARY)
+ @if test "$(PYTHONFRAMEWORKDIR)" = no-framework; then \
+ echo Not configured with --enable-framework; \
+ exit 1; \
+ else true; \
+ fi
+ @for i in $(prefix)/Resources/English.lproj $(prefix)/lib; do\
+ if test ! -d $(DESTDIR)$$i; then \
+ echo "Creating directory $(DESTDIR)$$i"; \
+ $(INSTALL) -d -m $(DIRMODE) $(DESTDIR)$$i; \
+ else true; \
+ fi; \
+ done
+ $(LN) -fsn include/python$(VERSION) $(DESTDIR)$(prefix)/Headers
+ $(INSTALL_DATA) $(RESSRCDIR)/Info.plist $(DESTDIR)$(prefix)/Resources/Info.plist
+ $(INSTALL_DATA) $(RESSRCDIR)/version.plist $(DESTDIR)$(prefix)/Resources/version.plist
+ $(INSTALL_DATA) $(RESSRCDIR)/English.lproj/InfoPlist.strings \
+ $(DESTDIR)$(prefix)/Resources/English.lproj/InfoPlist.strings
+ $(LN) -fsn $(VERSION) $(DESTDIR)$(PYTHONFRAMEWORKINSTALLDIR)/Versions/Current
+ $(LN) -fsn Versions/Current/Python $(DESTDIR)$(PYTHONFRAMEWORKINSTALLDIR)/Python
+ $(LN) -fsn Versions/Current/Headers $(DESTDIR)$(PYTHONFRAMEWORKINSTALLDIR)/Headers
+ $(LN) -fsn Versions/Current/Resources $(DESTDIR)$(PYTHONFRAMEWORKINSTALLDIR)/Resources
+ $(INSTALL_SHARED) $(LDLIBRARY) $(DESTDIR)$(PYTHONFRAMEWORKPREFIX)/$(LDLIBRARY)
+
+# This installs Mac/Lib into the framework
+# Install a number of symlinks to keep software that expects a normal unix
+# install (which includes python-config) happy.
+frameworkinstallmaclib:
+ ln -fs "../../../Python" "$(DESTDIR)$(prefix)/lib/python$(VERSION)/config/libpython$(VERSION).a"
+ cd Mac && $(MAKE) installmacsubtree DESTDIR="$(DESTDIR)"
+
+# This installs the IDE, the Launcher and other apps into /Applications
+frameworkinstallapps:
+ cd Mac && $(MAKE) installapps DESTDIR="$(DESTDIR)"
+
+# This install the unix python and pythonw tools in /usr/local/bin
+frameworkinstallunixtools:
+ cd Mac && $(MAKE) installunixtools DESTDIR="$(DESTDIR)"
+
+frameworkaltinstallunixtools:
+ cd Mac && $(MAKE) altinstallunixtools DESTDIR="$(DESTDIR)"
+
+# This installs the Demos and Tools into the applications directory.
+# It is not part of a normal frameworkinstall
+frameworkinstallextras:
+ cd Mac && Make installextras DESTDIR="$(DESTDIR)"
+
+# This installs a few of the useful scripts in Tools/scripts
+scriptsinstall:
+ SRCDIR=$(srcdir) $(RUNSHARED) \
+ ./$(BUILDPYTHON) $(srcdir)/Tools/scripts/setup.py install \
+ --prefix=$(prefix) \
+ --install-scripts=$(BINDIR) \
+ --root=/$(DESTDIR)
+
+# Build the toplevel Makefile
+Makefile.pre: Makefile.pre.in config.status
+ CONFIG_FILES=Makefile.pre CONFIG_HEADERS= $(SHELL) config.status
+ $(MAKE) -f Makefile.pre Makefile
+
+# Run the configure script.
+config.status: $(srcdir)/configure
+ $(SHELL) $(srcdir)/configure $(CONFIG_ARGS)
+
+.PRECIOUS: config.status $(BUILDPYTHON) Makefile Makefile.pre
+
+# Some make's put the object file in the current directory
+.c.o:
+ $(CC) -c $(PY_CFLAGS) -o $@ $<
+
+# Run reindent on the library
+reindent:
+ ./python$(EXEEXT) $(srcdir)/Tools/scripts/reindent.py -r $(srcdir)/Lib
+
+# Rerun configure with the same options as it was run last time,
+# provided the config.status script exists
+recheck:
+ $(SHELL) config.status --recheck
+ $(SHELL) config.status
+
+# Rebuild the configure script from configure.in; also rebuild pyconfig.h.in
+autoconf:
+ (cd $(srcdir); autoconf)
+ (cd $(srcdir); autoheader)
+
+# Create a tags file for vi
+tags::
+ cd $(srcdir); \
+ ctags -w -t Include/*.h; \
+ for i in $(SRCDIRS); do ctags -w -t -a $$i/*.[ch]; \
+ done; \
+ sort -o tags tags
+
+# Create a tags file for GNU Emacs
+TAGS::
+ cd $(srcdir); \
+ etags Include/*.h; \
+ for i in $(SRCDIRS); do etags -a $$i/*.[ch]; done
+
+# Sanitation targets -- clean leaves libraries, executables and tags
+# files, which clobber removes those as well
+pycremoval:
+ find $(srcdir) -name '*.py[co]' -exec rm -f {} ';'
+
+clean: pycremoval
+ find . -name '*.o' -exec rm -f {} ';'
+ find . -name '*.s[ol]' -exec rm -f {} ';'
+ find $(srcdir)/build -name 'fficonfig.h' -exec rm -f {} ';' || true
+ find $(srcdir)/build -name 'fficonfig.py' -exec rm -f {} ';' || true
+
+clobber: clean
+ -rm -f $(BUILDPYTHON) $(PGEN) $(LIBRARY) $(LDLIBRARY) $(DLLLIBRARY) \
+ tags TAGS \
+ config.cache config.log pyconfig.h Modules/config.c
+ -rm -rf build platform
+ -rm -rf $(PYTHONFRAMEWORKDIR)
+
+# Make things extra clean, before making a distribution:
+# remove all generated files, even Makefile[.pre]
+# Keep configure and Python-ast.[ch], it's possible they can't be generated
+distclean: clobber
+ -rm -f core Makefile Makefile.pre config.status \
+ Modules/Setup Modules/Setup.local Modules/Setup.config
+ find $(srcdir) '(' -name '*.fdc' -o -name '*~' \
+ -o -name '[@,#]*' -o -name '*.old' \
+ -o -name '*.orig' -o -name '*.rej' \
+ -o -name '*.bak' ')' \
+ -exec rm -f {} ';'
+
+# Check for smelly exported symbols (not starting with Py/_Py)
+smelly: all
+ nm -p $(LIBRARY) | \
+ sed -n "/ [TDB] /s/.* //p" | grep -v "^_*Py" | sort -u; \
+
+# Find files with funny names
+funny:
+ find $(DISTDIRS) -type d \
+ -o -name '*.[chs]' \
+ -o -name '*.py' \
+ -o -name '*.doc' \
+ -o -name '*.sty' \
+ -o -name '*.bib' \
+ -o -name '*.dat' \
+ -o -name '*.el' \
+ -o -name '*.fd' \
+ -o -name '*.in' \
+ -o -name '*.tex' \
+ -o -name '*,[vpt]' \
+ -o -name 'Setup' \
+ -o -name 'Setup.*' \
+ -o -name README \
+ -o -name Makefile \
+ -o -name ChangeLog \
+ -o -name Repository \
+ -o -name Root \
+ -o -name Entries \
+ -o -name Tag \
+ -o -name tags \
+ -o -name TAGS \
+ -o -name .cvsignore \
+ -o -name MANIFEST \
+ -o -print
+
+# Dependencies
+
+Python/thread.o: $(srcdir)/Python/thread_atheos.h $(srcdir)/Python/thread_beos.h $(srcdir)/Python/thread_cthread.h $(srcdir)/Python/thread_foobar.h $(srcdir)/Python/thread_lwp.h $(srcdir)/Python/thread_nt.h $(srcdir)/Python/thread_os2.h $(srcdir)/Python/thread_pth.h $(srcdir)/Python/thread_pthread.h $(srcdir)/Python/thread_sgi.h $(srcdir)/Python/thread_solaris.h $(srcdir)/Python/thread_wince.h
+
+# Declare targets that aren't real files
+.PHONY: all sharedmods oldsharedmods test quicktest memtest
+.PHONY: install altinstall oldsharedinstall bininstall altbininstall
+.PHONY: maninstall libinstall inclinstall libainstall sharedinstall
+.PHONY: frameworkinstall frameworkinstallframework frameworkinstallstructure
+.PHONY: frameworkinstallmaclib frameworkinstallapps frameworkinstallunixtools
+.PHONY: frameworkaltinstallunixtools recheck autoconf clean clobber distclean
+.PHONY: smelly funny
+
+# IF YOU PUT ANYTHING HERE IT WILL GO AWAY
+
+# Rules appended by makedepend
+
+Modules/threadmodule.o: $(srcdir)/Modules/threadmodule.c; $(CC) $(PY_CFLAGS) -c $(srcdir)/Modules/threadmodule.c -o Modules/threadmodule.o
+Modules/threadmodule$(SO): Modules/threadmodule.o; $(LDSHARED) Modules/threadmodule.o -o Modules/threadmodule$(SO)
+Modules/signalmodule.o: $(srcdir)/Modules/signalmodule.c; $(CC) $(PY_CFLAGS) -c $(srcdir)/Modules/signalmodule.c -o Modules/signalmodule.o
+Modules/signalmodule$(SO): Modules/signalmodule.o; $(LDSHARED) Modules/signalmodule.o -o Modules/signalmodule$(SO)
+Modules/posixmodule.o: $(srcdir)/Modules/posixmodule.c; $(CC) $(PY_CFLAGS) -c $(srcdir)/Modules/posixmodule.c -o Modules/posixmodule.o
+Modules/posixmodule$(SO): Modules/posixmodule.o; $(LDSHARED) Modules/posixmodule.o -o Modules/posixmodule$(SO)
+Modules/errnomodule.o: $(srcdir)/Modules/errnomodule.c; $(CC) $(PY_CFLAGS) -c $(srcdir)/Modules/errnomodule.c -o Modules/errnomodule.o
+Modules/errnomodule$(SO): Modules/errnomodule.o; $(LDSHARED) Modules/errnomodule.o -o Modules/errnomodule$(SO)
+Modules/pwdmodule.o: $(srcdir)/Modules/pwdmodule.c; $(CC) $(PY_CFLAGS) -c $(srcdir)/Modules/pwdmodule.c -o Modules/pwdmodule.o
+Modules/pwdmodule$(SO): Modules/pwdmodule.o; $(LDSHARED) Modules/pwdmodule.o -o Modules/pwdmodule$(SO)
+Modules/_sre.o: $(srcdir)/Modules/_sre.c; $(CC) $(PY_CFLAGS) -c $(srcdir)/Modules/_sre.c -o Modules/_sre.o
+Modules/_sre$(SO): Modules/_sre.o; $(LDSHARED) Modules/_sre.o -o Modules/_sre$(SO)
+Modules/_codecsmodule.o: $(srcdir)/Modules/_codecsmodule.c; $(CC) $(PY_CFLAGS) -c $(srcdir)/Modules/_codecsmodule.c -o Modules/_codecsmodule.o
+Modules/_codecsmodule$(SO): Modules/_codecsmodule.o; $(LDSHARED) Modules/_codecsmodule.o -o Modules/_codecsmodule$(SO)
+Modules/zipimport.o: $(srcdir)/Modules/zipimport.c; $(CC) $(PY_CFLAGS) -c $(srcdir)/Modules/zipimport.c -o Modules/zipimport.o
+Modules/zipimport$(SO): Modules/zipimport.o; $(LDSHARED) Modules/zipimport.o -o Modules/zipimport$(SO)
+Modules/symtablemodule.o: $(srcdir)/Modules/symtablemodule.c; $(CC) $(PY_CFLAGS) -c $(srcdir)/Modules/symtablemodule.c -o Modules/symtablemodule.o
+Modules/_symtablemodule$(SO): Modules/symtablemodule.o; $(LDSHARED) Modules/symtablemodule.o -o Modules/_symtablemodule$(SO)
+Modules/xxsubtype.o: $(srcdir)/Modules/xxsubtype.c; $(CC) $(PY_CFLAGS) -c $(srcdir)/Modules/xxsubtype.c -o Modules/xxsubtype.o
+Modules/xxsubtype$(SO): Modules/xxsubtype.o; $(LDSHARED) Modules/xxsubtype.o -o Modules/xxsubtype$(SO)
diff --git a/pkg/lexer/testdata/mason_calling_component.m b/pkg/lexer/testdata/mason_calling_component.m
new file mode 100644
index 00000000..41fd4bfa
--- /dev/null
+++ b/pkg/lexer/testdata/mason_calling_component.m
@@ -0,0 +1,4 @@
+<&
+ /path/to/comp.mi,
+ name=>value
+&>
diff --git a/pkg/lexer/testdata/mason_unnamed_block.m b/pkg/lexer/testdata/mason_unnamed_block.m
new file mode 100644
index 00000000..a00f89a8
--- /dev/null
+++ b/pkg/lexer/testdata/mason_unnamed_block.m
@@ -0,0 +1,5 @@
+<%class>
+has 'foo';
+has 'bar' => (required => 1);
+has 'baz' => (isa => 'Int', default => 17);
+%class>
diff --git a/pkg/lexer/testdata/matlab_comment.m b/pkg/lexer/testdata/matlab_comment.m
new file mode 100644
index 00000000..db46851f
--- /dev/null
+++ b/pkg/lexer/testdata/matlab_comment.m
@@ -0,0 +1,2 @@
+
+% comment
diff --git a/pkg/lexer/testdata/matlab_function.m b/pkg/lexer/testdata/matlab_function.m
new file mode 100644
index 00000000..003ba1f7
--- /dev/null
+++ b/pkg/lexer/testdata/matlab_function.m
@@ -0,0 +1,2 @@
+% comment
+function foo = bar(a, b, c)
diff --git a/pkg/lexer/testdata/matlab_systemcmd.m b/pkg/lexer/testdata/matlab_systemcmd.m
new file mode 100644
index 00000000..e1b662d0
--- /dev/null
+++ b/pkg/lexer/testdata/matlab_systemcmd.m
@@ -0,0 +1,3 @@
+
+!rmdir oldtests
+
diff --git a/pkg/lexer/testdata/matlab_windows.m b/pkg/lexer/testdata/matlab_windows.m
new file mode 100644
index 00000000..90fa3314
--- /dev/null
+++ b/pkg/lexer/testdata/matlab_windows.m
@@ -0,0 +1,2 @@
+% comment
+function foo = bar(a, b, c)
diff --git a/pkg/lexer/testdata/modula2_basic.def b/pkg/lexer/testdata/modula2_basic.def
new file mode 100644
index 00000000..499db8ee
--- /dev/null
+++ b/pkg/lexer/testdata/modula2_basic.def
@@ -0,0 +1,18 @@
+MODULE Areas;
+
+FROM Terminal2 IMPORT WriteString, WriteChar, WriteLn, ReadChar,
+ WriteReal, ReadReal;
+
+VAR InChar, CapInChar : CHAR;
+
+PROCEDURE AreaOfSquare;
+VAR Length, Area : REAL;
+BEGIN
+ WriteString("Square Enter length of a side : ");
+ ReadReal(Length);
+ Area := Length * Length;
+ WriteLn;
+ WriteString("The area is ");
+ WriteReal(Area,15);
+ WriteLn;
+END AreaOfSquare;
diff --git a/pkg/lexer/testdata/modula2_pascal.def b/pkg/lexer/testdata/modula2_pascal.def
new file mode 100644
index 00000000..33f969c8
--- /dev/null
+++ b/pkg/lexer/testdata/modula2_pascal.def
@@ -0,0 +1,14 @@
+Program WakaTime_Cli;
+Var
+ Num1, Num2, Sum : Integer;
+
+(* Here the main program block starts *)
+Begin {no semicolon}
+ Write('Input number 1:');
+ Readln(Num1);
+ Writeln('Input number 2:');
+ Readln(Num2);
+ Sum := Num1 + Num2; {addition}
+ Writeln(Sum);
+ Readln;
+End.
diff --git a/pkg/lexer/testdata/modula2_pascal_function.def b/pkg/lexer/testdata/modula2_pascal_function.def
new file mode 100644
index 00000000..aab52e80
--- /dev/null
+++ b/pkg/lexer/testdata/modula2_pascal_function.def
@@ -0,0 +1,13 @@
+function max(num1, num2: integer): integer;
+
+var
+ result: integer;
+
+begin
+ if (num1 > num2) then
+ result := num1
+
+ else
+ result := num2;
+ max := result;
+end;
diff --git a/pkg/lexer/testdata/mysql_backtick.sql b/pkg/lexer/testdata/mysql_backtick.sql
new file mode 100644
index 00000000..d8927346
--- /dev/null
+++ b/pkg/lexer/testdata/mysql_backtick.sql
@@ -0,0 +1 @@
+CREATE TABLE `my_table` (id INT);
diff --git a/pkg/lexer/testdata/nasm.asm b/pkg/lexer/testdata/nasm.asm
new file mode 100644
index 00000000..252dc14c
--- /dev/null
+++ b/pkg/lexer/testdata/nasm.asm
@@ -0,0 +1,15 @@
+.model small
+.stack 100h
+.data
+msg db "Merry Christmas!",'$'
+.code
+main proc
+ mov ax, SEG msg
+ mov ds, ax
+ mov dx, offset msg
+ mov ah, 9
+ int 21h
+ mov ax, 4c00h
+ int 21h
+main endp
+end main
diff --git a/pkg/lexer/testdata/nemerle_if.n b/pkg/lexer/testdata/nemerle_if.n
new file mode 100644
index 00000000..781d6337
--- /dev/null
+++ b/pkg/lexer/testdata/nemerle_if.n
@@ -0,0 +1 @@
+@if(ok, 0, -1)
diff --git a/pkg/lexer/testdata/nesc_ifdef.nc b/pkg/lexer/testdata/nesc_ifdef.nc
new file mode 100644
index 00000000..2cc51856
--- /dev/null
+++ b/pkg/lexer/testdata/nesc_ifdef.nc
@@ -0,0 +1,2 @@
+
+#ifdef LOW_POWER_LISTENING
diff --git a/pkg/lexer/testdata/nesc_ifndef.nc b/pkg/lexer/testdata/nesc_ifndef.nc
new file mode 100644
index 00000000..07926d53
--- /dev/null
+++ b/pkg/lexer/testdata/nesc_ifndef.nc
@@ -0,0 +1,2 @@
+
+#ifndef LPL_SLEEP_INTERVAL
diff --git a/pkg/lexer/testdata/nesc_include.nc b/pkg/lexer/testdata/nesc_include.nc
new file mode 100644
index 00000000..313720d5
--- /dev/null
+++ b/pkg/lexer/testdata/nesc_include.nc
@@ -0,0 +1,2 @@
+
+#include
diff --git a/pkg/lexer/testdata/notmuch b/pkg/lexer/testdata/notmuch
new file mode 100644
index 00000000..61be8c6a
--- /dev/null
+++ b/pkg/lexer/testdata/notmuch
@@ -0,0 +1,15 @@
+message{ id:5d0693e2.1c69fb81.d5fc9.1f6e@mx.google.com depth:0 match:1 excluded:0 filename:/home/user/mail/INBOX/new/1560712171_0.11014.blue,U=20254,FMD5=7e33429f656f1e6e9d79b29c3f82c57e:2,
+header{
+John Doe (1 mins. ago) (inbox unread)
+Subject: Hello world!
+From: john.doe@example.com
+Date: Sun, 16 Jun 2019 16:00:00 -0300
+header}
+body{
+part{ ID: 1, Content-type: text/plain
+#!/bin/sh
+
+echo 'Hello world!'
+part}
+body}
+message}
diff --git a/pkg/lexer/testdata/numpy.py b/pkg/lexer/testdata/numpy.py
new file mode 100644
index 00000000..ed389a97
--- /dev/null
+++ b/pkg/lexer/testdata/numpy.py
@@ -0,0 +1,6 @@
+import re
+import numpy as np
+
+arr = np.array([1, 2, 3, 4, 5])
+
+print('NumPy')
diff --git a/pkg/lexer/testdata/numpy_basic b/pkg/lexer/testdata/numpy_basic
new file mode 100644
index 00000000..ee4a05cc
--- /dev/null
+++ b/pkg/lexer/testdata/numpy_basic
@@ -0,0 +1,7 @@
+#!/usr/bin/env python3
+
+import numpy as np
+
+arr = np.array([1, 2, 3, 4, 5])
+
+print(arr)
diff --git a/pkg/lexer/testdata/numpy_from_import b/pkg/lexer/testdata/numpy_from_import
new file mode 100644
index 00000000..1eb6aa67
--- /dev/null
+++ b/pkg/lexer/testdata/numpy_from_import
@@ -0,0 +1,5 @@
+#!/usr/bin/env python3
+
+from numpy import pi
+
+print(pi)
diff --git a/pkg/lexer/testdata/objectivec_keyword_end.m b/pkg/lexer/testdata/objectivec_keyword_end.m
new file mode 100644
index 00000000..5b2538b8
--- /dev/null
+++ b/pkg/lexer/testdata/objectivec_keyword_end.m
@@ -0,0 +1 @@
+@end
diff --git a/pkg/lexer/testdata/objectivec_keyword_implementation.m b/pkg/lexer/testdata/objectivec_keyword_implementation.m
new file mode 100644
index 00000000..87e62f18
--- /dev/null
+++ b/pkg/lexer/testdata/objectivec_keyword_implementation.m
@@ -0,0 +1 @@
+@implementation
diff --git a/pkg/lexer/testdata/objectivec_keyword_protocol.m b/pkg/lexer/testdata/objectivec_keyword_protocol.m
new file mode 100644
index 00000000..fc2ac85c
--- /dev/null
+++ b/pkg/lexer/testdata/objectivec_keyword_protocol.m
@@ -0,0 +1 @@
+@protocol
diff --git a/pkg/lexer/testdata/objectivec_message.m b/pkg/lexer/testdata/objectivec_message.m
new file mode 100644
index 00000000..d9b520f9
--- /dev/null
+++ b/pkg/lexer/testdata/objectivec_message.m
@@ -0,0 +1 @@
+[ WakaTime wakatime ]
diff --git a/pkg/lexer/testdata/objectivec_nsnumber.m b/pkg/lexer/testdata/objectivec_nsnumber.m
new file mode 100644
index 00000000..12edb821
--- /dev/null
+++ b/pkg/lexer/testdata/objectivec_nsnumber.m
@@ -0,0 +1 @@
+@1234
diff --git a/pkg/lexer/testdata/objectivec_nsstring.m b/pkg/lexer/testdata/objectivec_nsstring.m
new file mode 100644
index 00000000..4401240e
--- /dev/null
+++ b/pkg/lexer/testdata/objectivec_nsstring.m
@@ -0,0 +1 @@
+@"WakaTime"
diff --git a/pkg/lexer/testdata/objectivej_import copy.j b/pkg/lexer/testdata/objectivej_import copy.j
new file mode 100644
index 00000000..62e83234
--- /dev/null
+++ b/pkg/lexer/testdata/objectivej_import copy.j
@@ -0,0 +1 @@
+@import
diff --git a/pkg/lexer/testdata/objectivej_import.j b/pkg/lexer/testdata/objectivej_import.j
new file mode 100644
index 00000000..62e83234
--- /dev/null
+++ b/pkg/lexer/testdata/objectivej_import.j
@@ -0,0 +1 @@
+@import
diff --git a/pkg/lexer/testdata/openedge_else_do.p b/pkg/lexer/testdata/openedge_else_do.p
new file mode 100644
index 00000000..23713b27
--- /dev/null
+++ b/pkg/lexer/testdata/openedge_else_do.p
@@ -0,0 +1,3 @@
+ELSE DO:
+ ans = TRUE.
+ MESSAGE "Has this order been paid?" UPDATE ans.
diff --git a/pkg/lexer/testdata/openedge_end.p b/pkg/lexer/testdata/openedge_end.p
new file mode 100644
index 00000000..0e546cfb
--- /dev/null
+++ b/pkg/lexer/testdata/openedge_end.p
@@ -0,0 +1,6 @@
+ON CHOOSE OF send-button DO:
+ RUN runRemoteProc.
+ S1 = "Ran proc(" + STRING(xmtcnt) + ")".
+ DISPLAY S1 WITH FRAME foo 1 DOWN.
+ HIDE FRAME bar.
+END.
diff --git a/pkg/lexer/testdata/openedge_end_procedure.p b/pkg/lexer/testdata/openedge_end_procedure.p
new file mode 100644
index 00000000..382a567f
--- /dev/null
+++ b/pkg/lexer/testdata/openedge_end_procedure.p
@@ -0,0 +1,7 @@
+PROCEDURE runRemoteProc:
+ DEFINE VARIABLE ix AS CHARACTER NO-UNDO.
+ ASSIGN
+ xmtcnt = xmtcnt + 1
+ ix = FILL("X", 30).
+ sh:CANCEL-REQUESTS-AFTER(10).
+END PROCEDURE.
diff --git a/pkg/lexer/testdata/pawn_tagof.pwn b/pkg/lexer/testdata/pawn_tagof.pwn
new file mode 100644
index 00000000..219d67c6
--- /dev/null
+++ b/pkg/lexer/testdata/pawn_tagof.pwn
@@ -0,0 +1,10 @@
+stock Print({_, Float, bool}:arg, arg_tag=tagof(arg))
+{
+ switch(arg_tag)
+ {
+ case (tagof(Float:)):
+ PrintFloat(Float:arg);
+ default:
+ PrintInt(_:arg);
+ }
+}
diff --git a/pkg/lexer/testdata/perl6_assign.pl6 b/pkg/lexer/testdata/perl6_assign.pl6
new file mode 100644
index 00000000..de4816b6
--- /dev/null
+++ b/pkg/lexer/testdata/perl6_assign.pl6
@@ -0,0 +1 @@
+my %hash := Hash.new;
diff --git a/pkg/lexer/testdata/perl6_enum.pl6 b/pkg/lexer/testdata/perl6_enum.pl6
new file mode 100644
index 00000000..d872c3ee
--- /dev/null
+++ b/pkg/lexer/testdata/perl6_enum.pl6
@@ -0,0 +1,3 @@
+# a sample comment
+
+enum DirStat ;
diff --git a/pkg/lexer/testdata/perl6_pod.pl6 b/pkg/lexer/testdata/perl6_pod.pl6
new file mode 100644
index 00000000..db73246f
--- /dev/null
+++ b/pkg/lexer/testdata/perl6_pod.pl6
@@ -0,0 +1,7 @@
+=begin pod
+
+Here's some POD! Wooo
+
+=end pod
+
+my %hash := Hash.new;
diff --git a/pkg/lexer/testdata/perl6_scoped_class.pl6 b/pkg/lexer/testdata/perl6_scoped_class.pl6
new file mode 100644
index 00000000..75254d9c
--- /dev/null
+++ b/pkg/lexer/testdata/perl6_scoped_class.pl6
@@ -0,0 +1,4 @@
+my class Point {
+ has Int $.x;
+ has Int $.y;
+}
diff --git a/pkg/lexer/testdata/perl6_shebang.pl6 b/pkg/lexer/testdata/perl6_shebang.pl6
new file mode 100644
index 00000000..ec460af4
--- /dev/null
+++ b/pkg/lexer/testdata/perl6_shebang.pl6
@@ -0,0 +1 @@
+#!/usr/bin/env perl6
diff --git a/pkg/lexer/testdata/perl6_v6.pl6 b/pkg/lexer/testdata/perl6_v6.pl6
new file mode 100644
index 00000000..96a23abc
--- /dev/null
+++ b/pkg/lexer/testdata/perl6_v6.pl6
@@ -0,0 +1 @@
+use v6;
diff --git a/pkg/lexer/testdata/perl_basic.pl b/pkg/lexer/testdata/perl_basic.pl
new file mode 100644
index 00000000..786cf623
--- /dev/null
+++ b/pkg/lexer/testdata/perl_basic.pl
@@ -0,0 +1 @@
+my $string = "wakatime-cli";
diff --git a/pkg/lexer/testdata/perl_shebang.pl b/pkg/lexer/testdata/perl_shebang.pl
new file mode 100644
index 00000000..5ba39a26
--- /dev/null
+++ b/pkg/lexer/testdata/perl_shebang.pl
@@ -0,0 +1 @@
+#!/usr/bin/env perl
diff --git a/pkg/lexer/testdata/perl_unicon_like.pl b/pkg/lexer/testdata/perl_unicon_like.pl
new file mode 100644
index 00000000..8ba51658
--- /dev/null
+++ b/pkg/lexer/testdata/perl_unicon_like.pl
@@ -0,0 +1,2 @@
+while line := read(f) do
+ write(line)
diff --git a/pkg/lexer/testdata/povray_camera.pov b/pkg/lexer/testdata/povray_camera.pov
new file mode 100644
index 00000000..34068d6e
--- /dev/null
+++ b/pkg/lexer/testdata/povray_camera.pov
@@ -0,0 +1,9 @@
+//Place the camera
+camera {
+ sky <0,0,1> //Don't change this
+ direction <-1,0,0> //Don't change this
+ right <-4/3,0,0> //Don't change this
+ location <30,10,1.5> //Camera location
+ look_at <0,0,0> //Where camera is pointing
+ angle 15 //Angle of the view--increase to see more, decrease to see less
+}
diff --git a/pkg/lexer/testdata/povray_declare.pov b/pkg/lexer/testdata/povray_declare.pov
new file mode 100644
index 00000000..840474c2
--- /dev/null
+++ b/pkg/lexer/testdata/povray_declare.pov
@@ -0,0 +1,5 @@
+//Create a box that extends between the 2 specified points
+#declare mycube = box {
+ <0,0,0> // one corner position
+ <1,1,1> // other corner position
+}
diff --git a/pkg/lexer/testdata/povray_light_source.pov b/pkg/lexer/testdata/povray_light_source.pov
new file mode 100644
index 00000000..10631e12
--- /dev/null
+++ b/pkg/lexer/testdata/povray_light_source.pov
@@ -0,0 +1,5 @@
+//Place a light--you can have more than one!
+light_source {
+ <10,-10,20> //Change this if you want to put the light at a different point
+ color White*2 //Multiplying by 2 doubles the brightness
+}
diff --git a/pkg/lexer/testdata/povray_version.pov b/pkg/lexer/testdata/povray_version.pov
new file mode 100644
index 00000000..d3a5ce06
--- /dev/null
+++ b/pkg/lexer/testdata/povray_version.pov
@@ -0,0 +1 @@
+#version 1.0; // Change to 1.0 mode
diff --git a/pkg/lexer/testdata/prolog.ecl b/pkg/lexer/testdata/prolog.ecl
new file mode 100644
index 00000000..c30bf303
--- /dev/null
+++ b/pkg/lexer/testdata/prolog.ecl
@@ -0,0 +1,6 @@
+%% Sorted is a sorted version of List if Sorted is
+%% a permutation of List (same elements in possibly
+%% different order) and Sorted is sorted (second rule).
+sorted(List, Sorted) :-
+ perm(List, Sorted),
+ sorted(Sorted).
diff --git a/pkg/lexer/testdata/python2_shebang.py b/pkg/lexer/testdata/python2_shebang.py
new file mode 100644
index 00000000..18ff5363
--- /dev/null
+++ b/pkg/lexer/testdata/python2_shebang.py
@@ -0,0 +1 @@
+#!/usr/bin/env python2
diff --git a/pkg/lexer/testdata/python3_import.py b/pkg/lexer/testdata/python3_import.py
new file mode 100644
index 00000000..fe1ab764
--- /dev/null
+++ b/pkg/lexer/testdata/python3_import.py
@@ -0,0 +1,3 @@
+import math
+
+print(math.pi)
diff --git a/pkg/lexer/testdata/python3_shebang.py b/pkg/lexer/testdata/python3_shebang.py
new file mode 100644
index 00000000..e5a0d9b4
--- /dev/null
+++ b/pkg/lexer/testdata/python3_shebang.py
@@ -0,0 +1 @@
+#!/usr/bin/env python3
diff --git a/pkg/lexer/testdata/qbasic_dynamiccmd.bas b/pkg/lexer/testdata/qbasic_dynamiccmd.bas
new file mode 100644
index 00000000..8ee6d63c
--- /dev/null
+++ b/pkg/lexer/testdata/qbasic_dynamiccmd.bas
@@ -0,0 +1,2 @@
+REM $DYNAMIC 'create dynamic arrays only
+DIM array(10) 'create array with 11 elements
diff --git a/pkg/lexer/testdata/qbasic_staticcmd.bas b/pkg/lexer/testdata/qbasic_staticcmd.bas
new file mode 100644
index 00000000..67f4fb25
--- /dev/null
+++ b/pkg/lexer/testdata/qbasic_staticcmd.bas
@@ -0,0 +1,4 @@
+REM $STATIC
+
+INPUT "Enter array size: ", size
+DIM array(size) 'using an actual number instead of the variable will create an error!
diff --git a/pkg/lexer/testdata/r_expression.r b/pkg/lexer/testdata/r_expression.r
new file mode 100644
index 00000000..fe4e2a28
--- /dev/null
+++ b/pkg/lexer/testdata/r_expression.r
@@ -0,0 +1 @@
+x <- 1:12
diff --git a/pkg/lexer/testdata/ragel.rl b/pkg/lexer/testdata/ragel.rl
new file mode 100644
index 00000000..b291770a
--- /dev/null
+++ b/pkg/lexer/testdata/ragel.rl
@@ -0,0 +1,17 @@
+/*
+ * @LANG: indep
+ */
+
+%%{
+ machine any1;
+ main := any;
+}%%
+
+##### INPUT #####
+""
+"x"
+"xx"
+##### OUTPUT #####
+FAIL
+ACCEPT
+FAIL
\ No newline at end of file
diff --git a/pkg/lexer/testdata/raise.rsl b/pkg/lexer/testdata/raise.rsl
new file mode 100644
index 00000000..4f033988
--- /dev/null
+++ b/pkg/lexer/testdata/raise.rsl
@@ -0,0 +1,12 @@
+scheme SET_DATABASE =
+ class
+ type
+ Database = Person-set,
+ Person = Text
+ value
+ empty : Database = {} ,
+ register : Person × Database → Database
+ register(p,db) ≡ db ∪ { p } ,
+ is_in : Person × Database → Bool
+ is_in(p,db) ≡ p ∈ db
+ end
diff --git a/pkg/lexer/testdata/rebol.r b/pkg/lexer/testdata/rebol.r
new file mode 100644
index 00000000..d7a0e7ca
--- /dev/null
+++ b/pkg/lexer/testdata/rebol.r
@@ -0,0 +1,30 @@
+REBOL [
+ Title: "Resizable Digital Clock"
+ Version: 1.3.3
+ Author: "Carl Sassenrath"
+]
+
+f: layout [
+ origin 0
+ b: banner 140x32 rate 1
+ effect [gradient 0x1 0.0.150 0.0.50]
+ feel [engage: func [f a e] [set-face b now/time]]
+]
+
+resize: does [
+ b/size: max 20x20 min 1000x200 f/size
+ b/font/size: max 24 f/size/y - 40
+ b/text: "Resize Me"
+ b/size/x: 1024 ; for size-text
+ b/size/x: 20 + first size-text b
+ f/size: b/size
+ show f
+]
+
+view/options/new f 'resize
+resize
+insert-event-func [
+ if event/type = 'resize [resize]
+ event
+]
+do-events
diff --git a/pkg/lexer/testdata/rebol_header_preceding_text.r b/pkg/lexer/testdata/rebol_header_preceding_text.r
new file mode 100644
index 00000000..999a235b
--- /dev/null
+++ b/pkg/lexer/testdata/rebol_header_preceding_text.r
@@ -0,0 +1,25 @@
+preface.... everything what is before header is not evaluated
+so this should not be colorized:
+1 + 2
+
+REBOL [] ;<- this is minimal header, everything behind it must be colorized
+
+;## String tests ##
+print "Hello ^"World" ;<- with escaped char
+multiline-string: {
+ bla bla "bla" {bla}
+}
+char-a: #"a"
+escaped-a: #"^(61)"
+new-line: #"^/"
+
+;## Binaries ##
+print decompress 64#{eJzLSM3JyQcABiwCFQUAAAA=}
+;2#{0000 00000} ;<- this one is invalid!
+2#{}
+#{FF00}
+
+;##Date + time ##
+1-Feb-2009
+1-Feb-2009/2:24:46+1:0
+1:0 1:1:1 -0:1.1
diff --git a/pkg/lexer/testdata/resource.txt b/pkg/lexer/testdata/resource.txt
new file mode 100644
index 00000000..219fdc05
--- /dev/null
+++ b/pkg/lexer/testdata/resource.txt
@@ -0,0 +1,8 @@
+root:table {
+ usage:string { "Usage: genrb [Options] files" }
+ version:int { 122 }
+ errorcodes:array {
+ :string { "Invalid argument" }
+ :string { "File not found" }
+ }
+}
diff --git a/pkg/lexer/testdata/rhtml.rhtml b/pkg/lexer/testdata/rhtml.rhtml
new file mode 100644
index 00000000..27fa75c3
--- /dev/null
+++ b/pkg/lexer/testdata/rhtml.rhtml
@@ -0,0 +1,13 @@
+
+
+
+ A minimal rhtml example
+
+
+
+ <% @products.each do |p| %>
+ - <%= @p.name %>
+ <% end %>
+
+
+
diff --git a/pkg/lexer/testdata/scdoc_asterisk.scd b/pkg/lexer/testdata/scdoc_asterisk.scd
new file mode 100644
index 00000000..73b734a1
--- /dev/null
+++ b/pkg/lexer/testdata/scdoc_asterisk.scd
@@ -0,0 +1 @@
+\_This formatting\_ will not be interpreted by scdoc.
diff --git a/pkg/lexer/testdata/scdoc_underscore.scd b/pkg/lexer/testdata/scdoc_underscore.scd
new file mode 100644
index 00000000..da721711
--- /dev/null
+++ b/pkg/lexer/testdata/scdoc_underscore.scd
@@ -0,0 +1 @@
+\*This formatting\* will not be interpreted by scdoc.
diff --git a/pkg/lexer/testdata/singularity_full.def b/pkg/lexer/testdata/singularity_full.def
new file mode 100644
index 00000000..788bd1b5
--- /dev/null
+++ b/pkg/lexer/testdata/singularity_full.def
@@ -0,0 +1,12 @@
+BoOtStRaP: library # pass: headers are case-insensitive
+# pass: do not highlight '%'
+MirrorURL: http://mirror.centos.org/centos-%{OSVERSION}/%{OSVERSION}/os/$basearch/
+ From: ubuntu:18.04 # pass: leading whitespace allowed
+
+%setup
+ touch /file1
+touch ${SINGULARITY_ROOTFS}/file2 # pass: leading whitespace optional
+
+%files
+ /file1
+ /file1 /opt
diff --git a/pkg/lexer/testdata/singularity_only_header.def b/pkg/lexer/testdata/singularity_only_header.def
new file mode 100644
index 00000000..d477c551
--- /dev/null
+++ b/pkg/lexer/testdata/singularity_only_header.def
@@ -0,0 +1,4 @@
+BoOtStRaP: library # pass: headers are case-insensitive
+# pass: do not highlight '%'
+MirrorURL: http://mirror.centos.org/centos-%{OSVERSION}/%{OSVERSION}/os/$basearch/
+ From: ubuntu:18.04 # pass: leading whitespace allowed
diff --git a/pkg/lexer/testdata/singularity_only_section.def b/pkg/lexer/testdata/singularity_only_section.def
new file mode 100644
index 00000000..fef5c3ca
--- /dev/null
+++ b/pkg/lexer/testdata/singularity_only_section.def
@@ -0,0 +1,16 @@
+%setup
+ touch /file1
+touch ${SINGULARITY_ROOTFS}/file2 # pass: leading whitespace optional
+
+%files
+ /file1
+ /file1 /opt
+
+%environment
+ export LISTEN_PORT=12345
+ export LC_ALL=C
+
+%runscript
+ echo "Container was created $NOW"
+ echo "Arguments received: $*"
+ exec echo "$@"
diff --git a/pkg/lexer/testdata/slurm.sl b/pkg/lexer/testdata/slurm.sl
new file mode 100644
index 00000000..3d70d1bd
--- /dev/null
+++ b/pkg/lexer/testdata/slurm.sl
@@ -0,0 +1,17 @@
+#!/bin/bash
+#SBATCH --job-name=serial_job_test # Job name
+#SBATCH --mail-type=END,FAIL # Mail events (NONE, BEGIN, END, FAIL, ALL)
+#SBATCH --mail-user=email@ufl.edu # Where to send mail
+#SBATCH --ntasks=1 # Run on a single CPU
+#SBATCH --mem=1gb # Job memory request
+#SBATCH --time=00:05:00 # Time limit hrs:min:sec
+#SBATCH --output=serial_test_%j.log # Standard output and error log
+pwd; hostname; date
+
+module load python
+
+echo "Running plot script on a single CPU core"
+
+python /ufrc/data/training/SLURM/plot_template.py
+
+date
diff --git a/pkg/lexer/testdata/smali_class.smali b/pkg/lexer/testdata/smali_class.smali
new file mode 100644
index 00000000..2b5d7d07
--- /dev/null
+++ b/pkg/lexer/testdata/smali_class.smali
@@ -0,0 +1 @@
+.class public LHelloWorld;
diff --git a/pkg/lexer/testdata/smali_class_keyword.smali b/pkg/lexer/testdata/smali_class_keyword.smali
new file mode 100644
index 00000000..cfc07845
--- /dev/null
+++ b/pkg/lexer/testdata/smali_class_keyword.smali
@@ -0,0 +1,27 @@
+.class public LHelloWorld;
+
+#Ye olde hello world application
+#To assemble and run this on a phone or emulator:
+#
+#java -jar smali.jar -o classes.dex HelloWorld.smali
+#zip HelloWorld.zip classes.dex
+#adb push HelloWorld.zip /data/local
+#adb shell dalvikvm -cp /data/local/HelloWorld.zip HelloWorld
+#
+#if you get out of memory type errors when running smali.jar, try
+#java -Xmx512m -jar smali.jar HelloWorld.smali
+#instead
+
+.super Ljava/lang/Object;
+
+.method public static main([Ljava/lang/String;)V
+ .registers 2
+
+ sget-object v0, Ljava/lang/System;->out:Ljava/io/PrintStream;
+
+ const-string v1, "Hello World!"
+
+ invoke-virtual {v0, v1}, Ljava/io/PrintStream;->println(Ljava/lang/String;)V
+
+ return-void
+.end method
diff --git a/pkg/lexer/testdata/smali_keyword.smali b/pkg/lexer/testdata/smali_keyword.smali
new file mode 100644
index 00000000..cf300f88
--- /dev/null
+++ b/pkg/lexer/testdata/smali_keyword.smali
@@ -0,0 +1,34 @@
+.method public getTokens(I)I
+ .locals 2
+ .param p1, "amt" # I
+
+ .prologue
+ const/4 v0, 0x0
+
+ .line 2
+ iget-boolean v1, p0, Lcom/limbenjamin/Example;->isPaid:Z
+
+ if-nez v1, :cond_1
+
+ .line 5
+ :cond_0
+ :goto_0
+ return v0
+
+ .line 2
+ :cond_1
+ iget-object v1, p0, Lcom/limbenjamin/Example;->handler:Lcom/limbenjamin/ExampleHandler;
+
+ if-eqz v1, :cond_0
+
+ .line 3
+ move v3, p1
+
+ iget-object v0, p0, Lcom/limbenjamin/Example;->handler:Lcom/limbenjamin/ExampleHandler;
+
+ invoke-interface {v0, v3}, Lcom/limbenjamin/ExampleHandler;->creditTokens(I)V
+
+ move-result v0
+
+ goto :goto_0
+.end method
diff --git a/pkg/lexer/testdata/sources-indented.list b/pkg/lexer/testdata/sources-indented.list
new file mode 100644
index 00000000..3609f013
--- /dev/null
+++ b/pkg/lexer/testdata/sources-indented.list
@@ -0,0 +1,3 @@
+
+ deb http://deb.debian.org/debian buster main
+ deb-src http://deb.debian.org/debian buster main
diff --git a/pkg/lexer/testdata/sources-invalid.list b/pkg/lexer/testdata/sources-invalid.list
new file mode 100644
index 00000000..2cf16e14
--- /dev/null
+++ b/pkg/lexer/testdata/sources-invalid.list
@@ -0,0 +1,3 @@
+
+ xxx deb http://deb.debian.org/debian buster main
+ xxx deb-src http://deb.debian.org/debian buster main
diff --git a/pkg/lexer/testdata/sources.list b/pkg/lexer/testdata/sources.list
new file mode 100644
index 00000000..37177509
--- /dev/null
+++ b/pkg/lexer/testdata/sources.list
@@ -0,0 +1,8 @@
+deb http://deb.debian.org/debian buster main
+deb-src http://deb.debian.org/debian buster main
+
+deb http://deb.debian.org/debian-security/ buster/updates main
+deb-src http://deb.debian.org/debian-security/ buster/updates main
+
+deb http://deb.debian.org/debian buster-updates main
+deb-src http://deb.debian.org/debian buster-updates main
diff --git a/pkg/lexer/testdata/ssp_basic.ssp b/pkg/lexer/testdata/ssp_basic.ssp
new file mode 100644
index 00000000..08d80c9c
--- /dev/null
+++ b/pkg/lexer/testdata/ssp_basic.ssp
@@ -0,0 +1,2 @@
+<%@ import val model: Person %>
+Hello ${name}, what is the weather like in ${city}
diff --git a/pkg/lexer/testdata/stan_basic.stan b/pkg/lexer/testdata/stan_basic.stan
new file mode 100644
index 00000000..83f06fc5
--- /dev/null
+++ b/pkg/lexer/testdata/stan_basic.stan
@@ -0,0 +1,18 @@
+data {
+ int n; //number of schools
+ real y[n]; // effect of coaching
+ real sigma[n]; // standard errors of effects
+}
+parameters {
+ real mu; // the overall mean effect
+ real tau; // the inverse variance of the effect
+ vector[n] eta; // standardized school-level effects (see below)
+}
+transformed parameters {
+ vector[n] theta;
+ theta = mu + tau * eta; // find theta from mu, tau, and eta
+}
+model {
+ target += normal_lpdf(eta | 0, 1); // eta follows standard normal
+ target += normal_lpdf(y | theta, sigma); // y follows normal with mean theta and sd sigma
+}
diff --git a/pkg/lexer/testdata/supercollider_sinosc.sc b/pkg/lexer/testdata/supercollider_sinosc.sc
new file mode 100644
index 00000000..8b595117
--- /dev/null
+++ b/pkg/lexer/testdata/supercollider_sinosc.sc
@@ -0,0 +1 @@
+{ [SinOsc.ar(440, 0, 0.2), SinOsc.ar(442, 0, 0.2)] }.play;
diff --git a/pkg/lexer/testdata/supercollider_thisfunctiondef.sc b/pkg/lexer/testdata/supercollider_thisfunctiondef.sc
new file mode 100644
index 00000000..2ed5be84
--- /dev/null
+++ b/pkg/lexer/testdata/supercollider_thisfunctiondef.sc
@@ -0,0 +1 @@
+[thisFunctionDef.varNames, thisFunctionDef.prototypeFrame[thisFunctionDef.numArgs ..]].flop.flatten;
diff --git a/pkg/lexer/testdata/swig copy.i b/pkg/lexer/testdata/swig copy.i
new file mode 100644
index 00000000..63204631
--- /dev/null
+++ b/pkg/lexer/testdata/swig copy.i
@@ -0,0 +1,9 @@
+%module swig_example
+
+// Add necessary symbols to generated header
+%{
+#include "swig-example.h"
+%}
+
+// Process symbols in header
+%include "swig-example.h"
diff --git a/pkg/lexer/testdata/swig.i b/pkg/lexer/testdata/swig.i
new file mode 100644
index 00000000..63204631
--- /dev/null
+++ b/pkg/lexer/testdata/swig.i
@@ -0,0 +1,9 @@
+%module swig_example
+
+// Add necessary symbols to generated header
+%{
+#include "swig-example.h"
+%}
+
+// Process symbols in header
+%include "swig-example.h"
diff --git a/pkg/lexer/testdata/swig_unknown_directive.i b/pkg/lexer/testdata/swig_unknown_directive.i
new file mode 100644
index 00000000..4062f19d
--- /dev/null
+++ b/pkg/lexer/testdata/swig_unknown_directive.i
@@ -0,0 +1 @@
+%unknown
diff --git a/pkg/lexer/testdata/tads3_game_main_def.t b/pkg/lexer/testdata/tads3_game_main_def.t
new file mode 100644
index 00000000..fdc55195
--- /dev/null
+++ b/pkg/lexer/testdata/tads3_game_main_def.t
@@ -0,0 +1,9 @@
+gameMain: GameMainDef
+ initialPlayerChar: Actor {
+ desc = "You look the same as usual, but you feel unusually sentimental."
+ }
+ showIntro
+ {
+ "wakatime-cli";
+ }
+;
diff --git a/pkg/lexer/testdata/tads3_tads_keyword.t b/pkg/lexer/testdata/tads3_tads_keyword.t
new file mode 100644
index 00000000..54648f7a
--- /dev/null
+++ b/pkg/lexer/testdata/tads3_tads_keyword.t
@@ -0,0 +1,4 @@
+for (local i in 0 .. __TADS3)
+ word += concat(
+ rand(rand('', clusters, consonants)), rand('"h"?'),
+ rand(vowels...), rand('','', 'i', 'u', rand(ends)));
diff --git a/pkg/lexer/testdata/tads3_version_info.t b/pkg/lexer/testdata/tads3_version_info.t
new file mode 100644
index 00000000..cda51148
--- /dev/null
+++ b/pkg/lexer/testdata/tads3_version_info.t
@@ -0,0 +1,4 @@
+versionInfo: GameID
+ IFID = '17d8efc3-07da-4dde-a837-ff7c4e386a77'
+ name = 'Chromalion'
+;
diff --git a/pkg/lexer/testdata/tasm.asm b/pkg/lexer/testdata/tasm.asm
new file mode 100644
index 00000000..252dc14c
--- /dev/null
+++ b/pkg/lexer/testdata/tasm.asm
@@ -0,0 +1,15 @@
+.model small
+.stack 100h
+.data
+msg db "Merry Christmas!",'$'
+.code
+main proc
+ mov ax, SEG msg
+ mov ds, ax
+ mov dx, offset msg
+ mov ah, 9
+ int 21h
+ mov ax, 4c00h
+ int 21h
+main endp
+end main
diff --git a/pkg/lexer/testdata/teraterm_commands.ttl b/pkg/lexer/testdata/teraterm_commands.ttl
new file mode 100644
index 00000000..e4ee0c81
--- /dev/null
+++ b/pkg/lexer/testdata/teraterm_commands.ttl
@@ -0,0 +1,10 @@
+strcompare c "thing"
+if result = 1 then
+ goto label_
+elseif result > -1 then
+ goto 10
+elseif d > (1+2*3)/7 then
+ messagebox "thing"
+else
+ messagebox "done"
+endif
diff --git a/pkg/lexer/testdata/transactsql_bracket.sql b/pkg/lexer/testdata/transactsql_bracket.sql
new file mode 100644
index 00000000..68cddaa6
--- /dev/null
+++ b/pkg/lexer/testdata/transactsql_bracket.sql
@@ -0,0 +1,3 @@
+SELECT *
+FROM [TableX]
+WHERE [KeyCol] = 124
diff --git a/pkg/lexer/testdata/transactsql_declare.sql b/pkg/lexer/testdata/transactsql_declare.sql
new file mode 100644
index 00000000..1ab71363
--- /dev/null
+++ b/pkg/lexer/testdata/transactsql_declare.sql
@@ -0,0 +1 @@
+DECLARE @find VARCHAR(30);
diff --git a/pkg/lexer/testdata/transactsql_go.sql b/pkg/lexer/testdata/transactsql_go.sql
new file mode 100644
index 00000000..9e19232f
--- /dev/null
+++ b/pkg/lexer/testdata/transactsql_go.sql
@@ -0,0 +1,2 @@
+DROP TABLE TestTable;
+GO
diff --git a/pkg/lexer/testdata/transactsql_variable.sql b/pkg/lexer/testdata/transactsql_variable.sql
new file mode 100644
index 00000000..ee747e8f
--- /dev/null
+++ b/pkg/lexer/testdata/transactsql_variable.sql
@@ -0,0 +1 @@
+SET @MyCounter = 0;
diff --git a/pkg/lexer/testdata/turtle_basic.ttl b/pkg/lexer/testdata/turtle_basic.ttl
new file mode 100644
index 00000000..2c9ec56a
--- /dev/null
+++ b/pkg/lexer/testdata/turtle_basic.ttl
@@ -0,0 +1,6 @@
+@base .
+@prefix dcterms: . @prefix xs: .
+@prefix mads: .
+@prefix skos: .
+PREFIX dc: # SPARQL-like syntax is OK
+@prefix : . # empty prefix is OK
diff --git a/pkg/lexer/testdata/ucode_endrepeat.u b/pkg/lexer/testdata/ucode_endrepeat.u
new file mode 100644
index 00000000..c268006f
--- /dev/null
+++ b/pkg/lexer/testdata/ucode_endrepeat.u
@@ -0,0 +1,4 @@
+repeat {
+ write(i)
+}
+endrepeat
diff --git a/pkg/lexer/testdata/ucode_endsuspend.u b/pkg/lexer/testdata/ucode_endsuspend.u
new file mode 100644
index 00000000..e8f1bd01
--- /dev/null
+++ b/pkg/lexer/testdata/ucode_endsuspend.u
@@ -0,0 +1,2 @@
+suspend |writes(" e1")\3 do writes(" e2")
+endsuspend
diff --git a/pkg/lexer/testdata/ucode_procedure.u b/pkg/lexer/testdata/ucode_procedure.u
new file mode 100644
index 00000000..9604e523
--- /dev/null
+++ b/pkg/lexer/testdata/ucode_procedure.u
@@ -0,0 +1,3 @@
+procedure main()
+write("hello, world")
+end
diff --git a/pkg/lexer/testdata/ucode_self.u b/pkg/lexer/testdata/ucode_self.u
new file mode 100644
index 00000000..6ab4775d
--- /dev/null
+++ b/pkg/lexer/testdata/ucode_self.u
@@ -0,0 +1 @@
+\self /self
diff --git a/pkg/lexer/testdata/ucode_varset.u b/pkg/lexer/testdata/ucode_varset.u
new file mode 100644
index 00000000..201a96c9
--- /dev/null
+++ b/pkg/lexer/testdata/ucode_varset.u
@@ -0,0 +1 @@
+x := "Example"
diff --git a/pkg/lexer/testdata/urbiscript_freezeif.u b/pkg/lexer/testdata/urbiscript_freezeif.u
new file mode 100644
index 00000000..90f4c084
--- /dev/null
+++ b/pkg/lexer/testdata/urbiscript_freezeif.u
@@ -0,0 +1,4 @@
+timeout(3.2s) detach({
+ freezeif(b) every(500ms) echo("tick"),
+ freezeif(!b) every(500ms) echo("tack")
+ })|;
diff --git a/pkg/lexer/testdata/urbiscript_waituntil.u b/pkg/lexer/testdata/urbiscript_waituntil.u
new file mode 100644
index 00000000..2cc64631
--- /dev/null
+++ b/pkg/lexer/testdata/urbiscript_waituntil.u
@@ -0,0 +1 @@
+waituntil (e?(1, var b));
diff --git a/pkg/lexer/testdata/vb_if.vb b/pkg/lexer/testdata/vb_if.vb
new file mode 100644
index 00000000..63e3ea71
--- /dev/null
+++ b/pkg/lexer/testdata/vb_if.vb
@@ -0,0 +1,2 @@
+
+#If DEBUG Then
diff --git a/pkg/lexer/testdata/vb_module.vb b/pkg/lexer/testdata/vb_module.vb
new file mode 100644
index 00000000..4eda5609
--- /dev/null
+++ b/pkg/lexer/testdata/vb_module.vb
@@ -0,0 +1,2 @@
+
+Module Module1
diff --git a/pkg/lexer/testdata/vb_namespace.vb b/pkg/lexer/testdata/vb_namespace.vb
new file mode 100644
index 00000000..95bdf8f8
--- /dev/null
+++ b/pkg/lexer/testdata/vb_namespace.vb
@@ -0,0 +1,2 @@
+
+Namespace WakaTime
diff --git a/pkg/lexer/testdata/vcl_preceding_comments.vcl b/pkg/lexer/testdata/vcl_preceding_comments.vcl
new file mode 100644
index 00000000..a29101d1
--- /dev/null
+++ b/pkg/lexer/testdata/vcl_preceding_comments.vcl
@@ -0,0 +1,20 @@
+#########################################################################
+# This is an example VCL file for Varnish 4.0. #
+# From: https://gist.github.com/davidthingsaker/6b0997b641fdd370a395 #
+# LICENSE: If this could help you in any way, you are obliged to use it #
+# for free with no limitations. #
+#########################################################################
+
+
+# Marker to tell the VCL compiler that this VCL has been adapted to the
+# new 4.0 format.
+vcl 4.0;
+
+import std;
+
+# Default backend definition. Set this to point to your content server.
+backend default {
+ .host = "127.0.0.1";
+ .port = "8080";
+}
+
diff --git a/pkg/lexer/testdata/vcl_top_line.vcl b/pkg/lexer/testdata/vcl_top_line.vcl
new file mode 100644
index 00000000..63566a8d
--- /dev/null
+++ b/pkg/lexer/testdata/vcl_top_line.vcl
@@ -0,0 +1,9 @@
+vcl 4.0;
+
+import std;
+
+# Default backend definition. Set this to point to your content server.
+backend default {
+ .host = "127.0.0.1";
+ .port = "8080";
+}
diff --git a/pkg/lexer/testdata/velocity_all.vm b/pkg/lexer/testdata/velocity_all.vm
new file mode 100644
index 00000000..343aa893
--- /dev/null
+++ b/pkg/lexer/testdata/velocity_all.vm
@@ -0,0 +1,27 @@
+#macro (writeTable $productList)
+ #set ($rowCount = 1)
+ #foreach($product in $productList)
+ #if ($rowCount % 2 == 0)
+ #set ($bgcolor = "#FFFFFF")
+ #else
+ #set ($bgcolor = "#CCCCCC")
+ #end
+
+ $product.name |
+ $product.price |
+
+ #set ($rowCount = $rowCount + 1)
+ #end
+#end
+
+
+
+
+ Macros Test
+
+
+
+ #writeTable($products)
+
+
+
diff --git a/pkg/lexer/testdata/velocity_foreach.vm b/pkg/lexer/testdata/velocity_foreach.vm
new file mode 100644
index 00000000..af9b1c74
--- /dev/null
+++ b/pkg/lexer/testdata/velocity_foreach.vm
@@ -0,0 +1,5 @@
+
+ #foreach( $product in $allProducts )
+ - $product
+ #end
+
diff --git a/pkg/lexer/testdata/velocity_if.vm b/pkg/lexer/testdata/velocity_if.vm
new file mode 100644
index 00000000..d545f281
--- /dev/null
+++ b/pkg/lexer/testdata/velocity_if.vm
@@ -0,0 +1,4 @@
+
+#if( $display )
+ Velocity!
+#end
diff --git a/pkg/lexer/testdata/velocity_macro.vm b/pkg/lexer/testdata/velocity_macro.vm
new file mode 100644
index 00000000..d1e2b1ef
--- /dev/null
+++ b/pkg/lexer/testdata/velocity_macro.vm
@@ -0,0 +1,4 @@
+
+#macro(getBookListLink, $readingTrackerResult)
+ $readingTrackerResult.getBookListLink()
+#end
diff --git a/pkg/lexer/testdata/velocity_reference.vm b/pkg/lexer/testdata/velocity_reference.vm
new file mode 100644
index 00000000..b2fd0f8a
--- /dev/null
+++ b/pkg/lexer/testdata/velocity_reference.vm
@@ -0,0 +1,2 @@
+
+Hello $name! Welcome to Velocity!
diff --git a/pkg/lexer/testdata/verilog_all.v b/pkg/lexer/testdata/verilog_all.v
new file mode 100644
index 00000000..e8dc1b61
--- /dev/null
+++ b/pkg/lexer/testdata/verilog_all.v
@@ -0,0 +1,10 @@
+wire A , B , C , D , E ; // simple 1 -bit wide wires
+wire [8:0] Wide ; // a 9 -bit wide wire
+reg I ;
+
+assign A = B & C ; // using a wire with an assign statement
+
+always @ ( B or C ) begin
+I = B | C ; // using wires on the right - hand side of an always@
+// assignment
+end
diff --git a/pkg/lexer/testdata/verilog_assign.v b/pkg/lexer/testdata/verilog_assign.v
new file mode 100644
index 00000000..d67f624f
--- /dev/null
+++ b/pkg/lexer/testdata/verilog_assign.v
@@ -0,0 +1 @@
+assign A = B & C ;
diff --git a/pkg/lexer/testdata/verilog_reg.v b/pkg/lexer/testdata/verilog_reg.v
new file mode 100644
index 00000000..37f585eb
--- /dev/null
+++ b/pkg/lexer/testdata/verilog_reg.v
@@ -0,0 +1 @@
+reg [3:0] binary_out ;
diff --git a/pkg/lexer/testdata/verilog_wire.v b/pkg/lexer/testdata/verilog_wire.v
new file mode 100644
index 00000000..18c22638
--- /dev/null
+++ b/pkg/lexer/testdata/verilog_wire.v
@@ -0,0 +1 @@
+wire [8:0] Wide ;
diff --git a/pkg/lexer/testdata/xml_doctype_html.xml b/pkg/lexer/testdata/xml_doctype_html.xml
new file mode 100644
index 00000000..4d415ee3
--- /dev/null
+++ b/pkg/lexer/testdata/xml_doctype_html.xml
@@ -0,0 +1 @@
+
diff --git a/pkg/lexer/testdata/xslt.xsl b/pkg/lexer/testdata/xslt.xsl
new file mode 100644
index 00000000..05d9f92a
--- /dev/null
+++ b/pkg/lexer/testdata/xslt.xsl
@@ -0,0 +1,15 @@
+
+
+
+
+
+
+ Article -
+ Authors:
+
+
+
+ -
+
+
+
diff --git a/pkg/lexer/tiddler.go b/pkg/lexer/tiddler.go
new file mode 100644
index 00000000..3a7aa31a
--- /dev/null
+++ b/pkg/lexer/tiddler.go
@@ -0,0 +1,32 @@
+package lexer
+
+import (
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+)
+
+// Tiddler lexer. For TiddlyWiki5 markup.
+type Tiddler struct{}
+
+// Lexer returns the lexer.
+func (l Tiddler) Lexer() chroma.Lexer {
+ return chroma.MustNewLexer(
+ &chroma.Config{
+ Name: l.Name(),
+ Aliases: []string{"tid"},
+ Filenames: []string{"*.tid"},
+ MimeTypes: []string{"text/vnd.tiddlywiki"},
+ },
+ func() chroma.Rules {
+ return chroma.Rules{
+ "root": {},
+ }
+ },
+ )
+}
+
+// Name returns the name of the lexer.
+func (Tiddler) Name() string {
+ return heartbeat.LanguageTiddler.StringChroma()
+}
diff --git a/pkg/lexer/tnt.go b/pkg/lexer/tnt.go
new file mode 100644
index 00000000..30562333
--- /dev/null
+++ b/pkg/lexer/tnt.go
@@ -0,0 +1,33 @@
+package lexer
+
+import (
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+)
+
+// TNT lexer. Lexer for Typographic Number Theory, as described in the book
+// Gödel, Escher, Bach, by Douglas R. Hofstadter, or as summarized here:
+// https://github.com/Kenny2github/language-tnt/blob/master/README.md#summary-of-tnt
+type TNT struct{}
+
+// Lexer returns the lexer.
+func (l TNT) Lexer() chroma.Lexer {
+ return chroma.MustNewLexer(
+ &chroma.Config{
+ Name: l.Name(),
+ Aliases: []string{"tnt"},
+ Filenames: []string{"*.tnt"},
+ },
+ func() chroma.Rules {
+ return chroma.Rules{
+ "root": {},
+ }
+ },
+ )
+}
+
+// Name returns the name of the lexer.
+func (TNT) Name() string {
+ return heartbeat.LanguageTNT.StringChroma()
+}
diff --git a/pkg/lexer/todotxt.go b/pkg/lexer/todotxt.go
new file mode 100644
index 00000000..9d7a02e4
--- /dev/null
+++ b/pkg/lexer/todotxt.go
@@ -0,0 +1,34 @@
+package lexer
+
+import (
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+)
+
+// Todotxt lexer. Lexer for Todo.txt todo list format.
+type Todotxt struct{}
+
+// Lexer returns the lexer.
+func (l Todotxt) Lexer() chroma.Lexer {
+ return chroma.MustNewLexer(
+ &chroma.Config{
+ Name: l.Name(),
+ Aliases: []string{"todotxt"},
+ // *.todotxt is not a standard extension for Todo.txt files; including it
+ // makes testing easier, and also makes autodetecting file type easier.
+ Filenames: []string{"todo.txt", "*.todotxt"},
+ MimeTypes: []string{"text/x-todo"},
+ },
+ func() chroma.Rules {
+ return chroma.Rules{
+ "root": {},
+ }
+ },
+ )
+}
+
+// Name returns the name of the lexer.
+func (Todotxt) Name() string {
+ return heartbeat.LanguageTodotxt.StringChroma()
+}
diff --git a/pkg/lexer/trafficscript.go b/pkg/lexer/trafficscript.go
new file mode 100644
index 00000000..ff438a5f
--- /dev/null
+++ b/pkg/lexer/trafficscript.go
@@ -0,0 +1,32 @@
+package lexer
+
+import (
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+)
+
+// TrafficScript lexer. For `Riverbed Stingray Traffic Manager
+// `
+type TrafficScript struct{}
+
+// Lexer returns the lexer.
+func (l TrafficScript) Lexer() chroma.Lexer {
+ return chroma.MustNewLexer(
+ &chroma.Config{
+ Name: l.Name(),
+ Aliases: []string{"rts", "trafficscript"},
+ Filenames: []string{"*.rts"},
+ },
+ func() chroma.Rules {
+ return chroma.Rules{
+ "root": {},
+ }
+ },
+ )
+}
+
+// Name returns the name of the lexer.
+func (TrafficScript) Name() string {
+ return heartbeat.LanguageTrafficScript.StringChroma()
+}
diff --git a/pkg/lexer/transactsql.go b/pkg/lexer/transactsql.go
new file mode 100644
index 00000000..415d0a7e
--- /dev/null
+++ b/pkg/lexer/transactsql.go
@@ -0,0 +1,82 @@
+package lexer
+
+import (
+ "regexp"
+
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+ "github.com/alecthomas/chroma/v2/lexers"
+)
+
+var (
+ tSQLAnalyserGoRe = regexp.MustCompile(`(?i)\bgo\b`)
+ tSQLAnalyserDeclareRe = regexp.MustCompile(`(?i)\bdeclare\s+@`)
+ tSQLAnalyserVariableRe = regexp.MustCompile(`@[a-zA-Z_]\w*\b`)
+ tSQLAnalyserNameBetweenBacktickRe = regexp.MustCompile("`[a-zA-Z_]\\w*`")
+ tSQLAnalyserNameBetweenBracketRe = regexp.MustCompile(`\[[a-zA-Z_]\w*\]`)
+)
+
+// TransactSQL lexer.
+type TransactSQL struct{}
+
+// Lexer returns the lexer.
+func (l TransactSQL) Lexer() chroma.Lexer {
+ lexer := lexers.Get(l.Name())
+ if lexer == nil {
+ return nil
+ }
+
+ var (
+ ok bool
+ rgxlexer *chroma.RegexLexer
+ )
+
+ if rgxlexer, ok = lexer.(*chroma.RegexLexer); !ok {
+ return nil
+ }
+
+ rgxlexer.SetAnalyser(func(text string) float32 {
+ if tSQLAnalyserDeclareRe.MatchString(text) {
+ // Found T-SQL variable declaration.
+ return 1.0
+ }
+
+ nameBetweenBacktickCount := len(tSQLAnalyserNameBetweenBacktickRe.FindAllString(text, -1))
+ nameBetweenBracketCount := len(tSQLAnalyserNameBetweenBracketRe.FindAllString(text, -1))
+
+ var result float32
+
+ // We need to check if there are any names using
+ // backticks or brackets, as otherwise both are 0
+ // and 0 >= 2 * 0, so we would always assume it's true
+ dialectNameCount := nameBetweenBacktickCount + nameBetweenBracketCount
+
+ // nolint: gocritic
+ if dialectNameCount >= 1 && nameBetweenBracketCount >= (2*nameBetweenBacktickCount) {
+ // Found at least twice as many [name] as `name`.
+ result += 0.5
+ } else if nameBetweenBracketCount > nameBetweenBacktickCount {
+ result += 0.2
+ } else if nameBetweenBracketCount > 0 {
+ result += 0.1
+ }
+
+ if tSQLAnalyserVariableRe.MatchString(text) {
+ result += 0.1
+ }
+
+ if tSQLAnalyserGoRe.MatchString(text) {
+ result += 0.1
+ }
+
+ return result
+ })
+
+ return rgxlexer
+}
+
+// Name returns the name of the lexer.
+func (TransactSQL) Name() string {
+ return heartbeat.LanguageTransactSQL.StringChroma()
+}
diff --git a/pkg/lexer/transactsql_test.go b/pkg/lexer/transactsql_test.go
new file mode 100644
index 00000000..0563bb8e
--- /dev/null
+++ b/pkg/lexer/transactsql_test.go
@@ -0,0 +1,45 @@
+package lexer_test
+
+import (
+ "os"
+ "testing"
+
+ "github.com/wakatime/wakatime-cli/pkg/lexer"
+
+ "github.com/stretchr/testify/assert"
+)
+
+func TestTransactSQL_AnalyseText(t *testing.T) {
+ tests := map[string]struct {
+ Filepath string
+ Expected float32
+ }{
+ "declare": {
+ Filepath: "testdata/transactsql_declare.sql",
+ Expected: 1.0,
+ },
+ "bracket": {
+ Filepath: "testdata/transactsql_bracket.sql",
+ Expected: 0.5,
+ },
+ "variable": {
+ Filepath: "testdata/transactsql_variable.sql",
+ Expected: 0.1,
+ },
+ "go": {
+ Filepath: "testdata/transactsql_go.sql",
+ Expected: 0.1,
+ },
+ }
+
+ for name, test := range tests {
+ t.Run(name, func(t *testing.T) {
+ data, err := os.ReadFile(test.Filepath)
+ assert.NoError(t, err)
+
+ l := lexer.TransactSQL{}.Lexer()
+
+ assert.Equal(t, test.Expected, l.AnalyseText(string(data)))
+ })
+ }
+}
diff --git a/pkg/lexer/treetop.go b/pkg/lexer/treetop.go
new file mode 100644
index 00000000..fbd6ed8b
--- /dev/null
+++ b/pkg/lexer/treetop.go
@@ -0,0 +1,31 @@
+package lexer
+
+import (
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+)
+
+// Treetop lexer. A lexer for Treetop grammars.
+type Treetop struct{}
+
+// Lexer returns the lexer.
+func (l Treetop) Lexer() chroma.Lexer {
+ return chroma.MustNewLexer(
+ &chroma.Config{
+ Name: l.Name(),
+ Aliases: []string{"treetop"},
+ Filenames: []string{"*.treetop", "*.tt"},
+ },
+ func() chroma.Rules {
+ return chroma.Rules{
+ "root": {},
+ }
+ },
+ )
+}
+
+// Name returns the name of the lexer.
+func (Treetop) Name() string {
+ return heartbeat.LanguageTreetop.StringChroma()
+}
diff --git a/pkg/lexer/turtle.go b/pkg/lexer/turtle.go
new file mode 100644
index 00000000..3e926bf7
--- /dev/null
+++ b/pkg/lexer/turtle.go
@@ -0,0 +1,44 @@
+package lexer
+
+import (
+ "regexp"
+
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+ "github.com/alecthomas/chroma/v2/lexers"
+)
+
+var turtleAnalyserRe = regexp.MustCompile(`^\s*(@base|BASE|@prefix|PREFIX)`)
+
+// Turtle lexer.
+type Turtle struct{}
+
+// Lexer returns the lexer.
+func (l Turtle) Lexer() chroma.Lexer {
+ lexer := lexers.Get(l.Name())
+ if lexer == nil {
+ return nil
+ }
+
+ if lexer, ok := lexer.(*chroma.RegexLexer); ok {
+ lexer.SetAnalyser(func(text string) float32 {
+ // Turtle and Tera Term macro files share the same file extension
+ // but each has a recognizable and distinct syntax.
+ if turtleAnalyserRe.MatchString(text) {
+ return 0.8
+ }
+
+ return 0
+ })
+
+ return lexer
+ }
+
+ return nil
+}
+
+// Name returns the name of the lexer.
+func (Turtle) Name() string {
+ return heartbeat.LanguageTurtle.StringChroma()
+}
diff --git a/pkg/lexer/turtle_test.go b/pkg/lexer/turtle_test.go
new file mode 100644
index 00000000..c4693b28
--- /dev/null
+++ b/pkg/lexer/turtle_test.go
@@ -0,0 +1,19 @@
+package lexer_test
+
+import (
+ "os"
+ "testing"
+
+ "github.com/wakatime/wakatime-cli/pkg/lexer"
+
+ "github.com/stretchr/testify/assert"
+)
+
+func TestTurtle_AnalyseText(t *testing.T) {
+ data, err := os.ReadFile("testdata/turtle_basic.ttl")
+ assert.NoError(t, err)
+
+ l := lexer.Turtle{}.Lexer()
+
+ assert.Equal(t, float32(0.8), l.AnalyseText(string(data)))
+}
diff --git a/pkg/lexer/ucode.go b/pkg/lexer/ucode.go
new file mode 100644
index 00000000..a6bae316
--- /dev/null
+++ b/pkg/lexer/ucode.go
@@ -0,0 +1,66 @@
+package lexer
+
+import (
+ "strings"
+
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+)
+
+// Ucode lexer.
+type Ucode struct{}
+
+// Lexer returns the lexer.
+func (l Ucode) Lexer() chroma.Lexer {
+ lexer := chroma.MustNewLexer(
+ &chroma.Config{
+ Name: l.Name(),
+ Aliases: []string{"ucode"},
+ Filenames: []string{"*.u", "*.u1", "*.u2"},
+ },
+ func() chroma.Rules {
+ return chroma.Rules{
+ "root": {},
+ }
+ },
+ )
+
+ lexer.SetAnalyser(func(text string) float32 {
+ // endsuspend and endrepeat are unique to this language, and
+ // \self, /self doesn't seem to get used anywhere else either.
+ var result float32
+
+ if strings.Contains(text, "endsuspend") {
+ result += 0.1
+ }
+
+ if strings.Contains(text, "endrepeat") {
+ result += 0.1
+ }
+
+ if strings.Contains(text, ":=") {
+ result += 0.01
+ }
+
+ if strings.Contains(text, "procedure") && strings.Contains(text, "end") {
+ result += 0.01
+ }
+
+ // This seems quite unique to unicon -- doesn't appear in any other
+ // example source we have (A quick search reveals that \SELF appears in
+ // Perl/Raku code)
+ if strings.Contains(text, `\self`) && strings.Contains(text, "/self") {
+ result += 0.5
+ }
+
+ return result
+ })
+
+ return lexer
+}
+
+// Name returns the name of the lexer.
+func (Ucode) Name() string {
+ return heartbeat.LanguageUcode.StringChroma()
+}
diff --git a/pkg/lexer/ucode_test.go b/pkg/lexer/ucode_test.go
new file mode 100644
index 00000000..014146db
--- /dev/null
+++ b/pkg/lexer/ucode_test.go
@@ -0,0 +1,49 @@
+package lexer_test
+
+import (
+ "os"
+ "testing"
+
+ "github.com/wakatime/wakatime-cli/pkg/lexer"
+
+ "github.com/stretchr/testify/assert"
+)
+
+func TestUcode_AnalyseText(t *testing.T) {
+ tests := map[string]struct {
+ Filepath string
+ Expected float32
+ }{
+ "endsuspend": {
+ Filepath: "testdata/ucode_endsuspend.u",
+ Expected: 0.1,
+ },
+ "endrepeat": {
+ Filepath: "testdata/ucode_endrepeat.u",
+ Expected: 0.1,
+ },
+ "variable set": {
+ Filepath: "testdata/ucode_varset.u",
+ Expected: 0.01,
+ },
+ "procedure": {
+ Filepath: "testdata/ucode_procedure.u",
+ Expected: 0.01,
+ },
+ "self": {
+ Filepath: "testdata/ucode_self.u",
+ Expected: 0.5,
+ },
+ }
+
+ for name, test := range tests {
+ t.Run(name, func(t *testing.T) {
+ data, err := os.ReadFile(test.Filepath)
+ assert.NoError(t, err)
+
+ l := lexer.Ucode{}.Lexer()
+
+ assert.Equal(t, test.Expected, l.AnalyseText(string(data)))
+ })
+ }
+}
diff --git a/pkg/lexer/unicon.go b/pkg/lexer/unicon.go
new file mode 100644
index 00000000..a0ea0634
--- /dev/null
+++ b/pkg/lexer/unicon.go
@@ -0,0 +1,32 @@
+package lexer
+
+import (
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+)
+
+// Unicon lexer.
+type Unicon struct{}
+
+// Lexer returns the lexer.
+func (l Unicon) Lexer() chroma.Lexer {
+ return chroma.MustNewLexer(
+ &chroma.Config{
+ Name: l.Name(),
+ Aliases: []string{"unicon"},
+ Filenames: []string{"*.icn"},
+ MimeTypes: []string{"text/unicon"},
+ },
+ func() chroma.Rules {
+ return chroma.Rules{
+ "root": {},
+ }
+ },
+ )
+}
+
+// Name returns the name of the lexer.
+func (Unicon) Name() string {
+ return heartbeat.LanguageUnicon.StringChroma()
+}
diff --git a/pkg/lexer/urbiscript.go b/pkg/lexer/urbiscript.go
new file mode 100644
index 00000000..c4197cf9
--- /dev/null
+++ b/pkg/lexer/urbiscript.go
@@ -0,0 +1,52 @@
+package lexer
+
+import (
+ "strings"
+
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+)
+
+// UrbiScript lexer.
+type UrbiScript struct{}
+
+// Lexer returns the lexer.
+func (l UrbiScript) Lexer() chroma.Lexer {
+ lexer := chroma.MustNewLexer(
+ &chroma.Config{
+ Name: l.Name(),
+ Aliases: []string{"urbiscript"},
+ Filenames: []string{"*.u"},
+ MimeTypes: []string{"application/x-urbiscript"},
+ },
+ func() chroma.Rules {
+ return chroma.Rules{
+ "root": {},
+ }
+ },
+ )
+
+ lexer.SetAnalyser(func(text string) float32 {
+ // This is fairly similar to C and others, but freezeif and
+ // waituntil are unique keywords.
+ var result float32
+
+ if strings.Contains(text, "freezeif") {
+ result += 0.05
+ }
+
+ if strings.Contains(text, "waituntil") {
+ result += 0.05
+ }
+
+ return result
+ })
+
+ return lexer
+}
+
+// Name returns the name of the lexer.
+func (UrbiScript) Name() string {
+ return heartbeat.LanguageUrbiScript.StringChroma()
+}
diff --git a/pkg/lexer/urbiscript_test.go b/pkg/lexer/urbiscript_test.go
new file mode 100644
index 00000000..b403fbee
--- /dev/null
+++ b/pkg/lexer/urbiscript_test.go
@@ -0,0 +1,37 @@
+package lexer_test
+
+import (
+ "os"
+ "testing"
+
+ "github.com/wakatime/wakatime-cli/pkg/lexer"
+
+ "github.com/stretchr/testify/assert"
+)
+
+func TestUrbiScript_AnalyseText(t *testing.T) {
+ tests := map[string]struct {
+ Filepath string
+ Expected float32
+ }{
+ "freezeif": {
+ Filepath: "testdata/urbiscript_freezeif.u",
+ Expected: 0.05,
+ },
+ "waituntil": {
+ Filepath: "testdata/urbiscript_waituntil.u",
+ Expected: 0.05,
+ },
+ }
+
+ for name, test := range tests {
+ t.Run(name, func(t *testing.T) {
+ data, err := os.ReadFile(test.Filepath)
+ assert.NoError(t, err)
+
+ l := lexer.UrbiScript{}.Lexer()
+
+ assert.Equal(t, test.Expected, l.AnalyseText(string(data)))
+ })
+ }
+}
diff --git a/pkg/lexer/usd.go b/pkg/lexer/usd.go
new file mode 100644
index 00000000..74d4f44b
--- /dev/null
+++ b/pkg/lexer/usd.go
@@ -0,0 +1,31 @@
+package lexer
+
+import (
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+)
+
+// USD lexer.
+type USD struct{}
+
+// Lexer returns the lexer.
+func (l USD) Lexer() chroma.Lexer {
+ return chroma.MustNewLexer(
+ &chroma.Config{
+ Name: l.Name(),
+ Aliases: []string{"usd", "usda"},
+ Filenames: []string{"*.usd", "*.usda"},
+ },
+ func() chroma.Rules {
+ return chroma.Rules{
+ "root": {},
+ }
+ },
+ )
+}
+
+// Name returns the name of the lexer.
+func (USD) Name() string {
+ return heartbeat.LanguageUSD.StringChroma()
+}
diff --git a/pkg/lexer/vbnet.go b/pkg/lexer/vbnet.go
new file mode 100644
index 00000000..9c3fb602
--- /dev/null
+++ b/pkg/lexer/vbnet.go
@@ -0,0 +1,42 @@
+package lexer
+
+import (
+ "regexp"
+
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+ "github.com/alecthomas/chroma/v2/lexers"
+)
+
+var vbnetAnalyserRe = regexp.MustCompile(`(?m)^\s*(#If|Module|Namespace)`)
+
+// VBNet lexer.
+type VBNet struct{}
+
+// Lexer returns the lexer.
+func (l VBNet) Lexer() chroma.Lexer {
+ lexer := lexers.Get(l.Name())
+ if lexer == nil {
+ return nil
+ }
+
+ if lexer, ok := lexer.(*chroma.RegexLexer); ok {
+ lexer.SetAnalyser(func(text string) float32 {
+ if vbnetAnalyserRe.MatchString(text) {
+ return 0.5
+ }
+
+ return 0
+ })
+
+ return lexer
+ }
+
+ return nil
+}
+
+// Name returns the name of the lexer.
+func (VBNet) Name() string {
+ return heartbeat.LanguageVBNet.StringChroma()
+}
diff --git a/pkg/lexer/vbnet_test.go b/pkg/lexer/vbnet_test.go
new file mode 100644
index 00000000..65b121e0
--- /dev/null
+++ b/pkg/lexer/vbnet_test.go
@@ -0,0 +1,41 @@
+package lexer_test
+
+import (
+ "os"
+ "testing"
+
+ "github.com/wakatime/wakatime-cli/pkg/lexer"
+
+ "github.com/stretchr/testify/assert"
+)
+
+func TestVBNet_AnalyseText(t *testing.T) {
+ tests := map[string]struct {
+ Filepath string
+ Expected float32
+ }{
+ "module": {
+ Filepath: "testdata/vb_module.vb",
+ Expected: 0.5,
+ },
+ "namespace": {
+ Filepath: "testdata/vb_namespace.vb",
+ Expected: 0.5,
+ },
+ "if": {
+ Filepath: "testdata/vb_if.vb",
+ Expected: 0.5,
+ },
+ }
+
+ for name, test := range tests {
+ t.Run(name, func(t *testing.T) {
+ data, err := os.ReadFile(test.Filepath)
+ assert.NoError(t, err)
+
+ l := lexer.VBNet{}.Lexer()
+
+ assert.Equal(t, test.Expected, l.AnalyseText(string(data)))
+ })
+ }
+}
diff --git a/pkg/lexer/vbscript.go b/pkg/lexer/vbscript.go
new file mode 100644
index 00000000..345ad9fc
--- /dev/null
+++ b/pkg/lexer/vbscript.go
@@ -0,0 +1,31 @@
+package lexer
+
+import (
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+)
+
+// VBScript lexer.
+type VBScript struct{}
+
+// Lexer returns the lexer.
+func (l VBScript) Lexer() chroma.Lexer {
+ return chroma.MustNewLexer(
+ &chroma.Config{
+ Name: l.Name(),
+ Aliases: []string{"vbscript"},
+ Filenames: []string{"*.vbs", "*.VBS"},
+ },
+ func() chroma.Rules {
+ return chroma.Rules{
+ "root": {},
+ }
+ },
+ )
+}
+
+// Name returns the name of the lexer.
+func (VBScript) Name() string {
+ return heartbeat.LanguageVBScript.StringChroma()
+}
diff --git a/pkg/lexer/vcl.go b/pkg/lexer/vcl.go
new file mode 100644
index 00000000..6b064bd7
--- /dev/null
+++ b/pkg/lexer/vcl.go
@@ -0,0 +1,57 @@
+package lexer
+
+import (
+ "strings"
+
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+)
+
+// VCL lexer.
+type VCL struct{}
+
+// Lexer returns the lexer.
+func (l VCL) Lexer() chroma.Lexer {
+ lexer := chroma.MustNewLexer(
+ &chroma.Config{
+ Name: l.Name(),
+ Aliases: []string{"vcl"},
+ Filenames: []string{"*.vcl"},
+ MimeTypes: []string{"text/x-vclsrc"},
+ },
+ func() chroma.Rules {
+ return chroma.Rules{
+ "root": {},
+ }
+ },
+ )
+
+ lexer.SetAnalyser(func(text string) float32 {
+ // If the very first line is 'vcl 4.0;' it's pretty much guaranteed
+ // that this is VCL
+ if strings.HasPrefix(text, "vcl 4.0;") {
+ return 1.0
+ }
+
+ if len(text) > 1000 {
+ text = text[:1000]
+ }
+
+ // Skip over comments and blank lines
+ // This is accurate enough that returning 0.9 is reasonable.
+ // Almost no VCL files start without some comments.
+ if strings.Contains(text, "\nvcl 4.0;") {
+ return 0.9
+ }
+
+ return 0
+ })
+
+ return lexer
+}
+
+// Name returns the name of the lexer.
+func (VCL) Name() string {
+ return heartbeat.LanguageVCL.StringChroma()
+}
diff --git a/pkg/lexer/vcl_test.go b/pkg/lexer/vcl_test.go
new file mode 100644
index 00000000..91a66d89
--- /dev/null
+++ b/pkg/lexer/vcl_test.go
@@ -0,0 +1,37 @@
+package lexer_test
+
+import (
+ "os"
+ "testing"
+
+ "github.com/wakatime/wakatime-cli/pkg/lexer"
+
+ "github.com/stretchr/testify/assert"
+)
+
+func TestVCL_AnalyseText(t *testing.T) {
+ tests := map[string]struct {
+ Filepath string
+ Expected float32
+ }{
+ "top line": {
+ Filepath: "testdata/vcl_top_line.vcl",
+ Expected: 1.0,
+ },
+ "with preceding comments": {
+ Filepath: "testdata/vcl_preceding_comments.vcl",
+ Expected: 0.9,
+ },
+ }
+
+ for name, test := range tests {
+ t.Run(name, func(t *testing.T) {
+ data, err := os.ReadFile(test.Filepath)
+ assert.NoError(t, err)
+
+ l := lexer.VCL{}.Lexer()
+
+ assert.Equal(t, test.Expected, l.AnalyseText(string(data)))
+ })
+ }
+}
diff --git a/pkg/lexer/vclsnippet.go b/pkg/lexer/vclsnippet.go
new file mode 100644
index 00000000..c57f7a3e
--- /dev/null
+++ b/pkg/lexer/vclsnippet.go
@@ -0,0 +1,31 @@
+package lexer
+
+import (
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+)
+
+// VCLSnippets lexer.
+type VCLSnippets struct{}
+
+// Lexer returns the lexer.
+func (l VCLSnippets) Lexer() chroma.Lexer {
+ return chroma.MustNewLexer(
+ &chroma.Config{
+ Name: l.Name(),
+ Aliases: []string{"vclsnippets", "vclsnippet"},
+ MimeTypes: []string{"text/x-vclsnippet"},
+ },
+ func() chroma.Rules {
+ return chroma.Rules{
+ "root": {},
+ }
+ },
+ )
+}
+
+// Name returns the name of the lexer.
+func (VCLSnippets) Name() string {
+ return heartbeat.LanguageVCLSnippets.StringChroma()
+}
diff --git a/pkg/lexer/vctreestatus.go b/pkg/lexer/vctreestatus.go
new file mode 100644
index 00000000..2bc8b80a
--- /dev/null
+++ b/pkg/lexer/vctreestatus.go
@@ -0,0 +1,30 @@
+package lexer
+
+import (
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+)
+
+// VCTreeStatus lexer.
+type VCTreeStatus struct{}
+
+// Lexer returns the lexer.
+func (l VCTreeStatus) Lexer() chroma.Lexer {
+ return chroma.MustNewLexer(
+ &chroma.Config{
+ Name: l.Name(),
+ Aliases: []string{"vctreestatus"},
+ },
+ func() chroma.Rules {
+ return chroma.Rules{
+ "root": {},
+ }
+ },
+ )
+}
+
+// Name returns the name of the lexer.
+func (VCTreeStatus) Name() string {
+ return heartbeat.LanguageVCTreeStatus.StringChroma()
+}
diff --git a/pkg/lexer/velocity.go b/pkg/lexer/velocity.go
new file mode 100644
index 00000000..c7e4b827
--- /dev/null
+++ b/pkg/lexer/velocity.go
@@ -0,0 +1,64 @@
+package lexer
+
+import (
+ "regexp"
+
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+)
+
+var (
+ velocityAnalzserMacroRe = regexp.MustCompile(`(?s)#\{?macro\}?\(.*?\).*?#\{?end\}?`)
+ velocityAnalzserIfRe = regexp.MustCompile(`(?s)#\{?if\}?\(.+?\).*?#\{?end\}?`)
+ velocityAnalzserForeachRe = regexp.MustCompile(`(?s)#\{?foreach\}?\(.+?\).*?#\{?end\}?`)
+ velocityAnalzserReferenceRe = regexp.MustCompile(`\$!?\{?[a-zA-Z_]\w*(\([^)]*\))?(\.\w+(\([^)]*\))?)*\}?`)
+)
+
+// Velocity lexer.
+type Velocity struct{}
+
+// Lexer returns the lexer.
+func (l Velocity) Lexer() chroma.Lexer {
+ lexer := chroma.MustNewLexer(
+ &chroma.Config{
+ Name: l.Name(),
+ Aliases: []string{"velocity"},
+ Filenames: []string{"*.vm", "*.fhtml"},
+ },
+ func() chroma.Rules {
+ return chroma.Rules{
+ "root": {},
+ }
+ },
+ )
+
+ lexer.SetAnalyser(func(text string) float32 {
+ var result float64
+
+ if velocityAnalzserMacroRe.MatchString(text) {
+ result += 0.25
+ }
+
+ if velocityAnalzserIfRe.MatchString(text) {
+ result += 0.15
+ }
+
+ if velocityAnalzserForeachRe.MatchString(text) {
+ result += 0.15
+ }
+
+ if velocityAnalzserReferenceRe.MatchString(text) {
+ result += 0.01
+ }
+
+ return float32(result)
+ })
+
+ return lexer
+}
+
+// Name returns the name of the lexer.
+func (Velocity) Name() string {
+ return heartbeat.LanguageVelocity.StringChroma()
+}
diff --git a/pkg/lexer/velocity_test.go b/pkg/lexer/velocity_test.go
new file mode 100644
index 00000000..b6eaead2
--- /dev/null
+++ b/pkg/lexer/velocity_test.go
@@ -0,0 +1,49 @@
+package lexer_test
+
+import (
+ "os"
+ "testing"
+
+ "github.com/wakatime/wakatime-cli/pkg/lexer"
+
+ "github.com/stretchr/testify/assert"
+)
+
+func TestVelocity_AnalyseText(t *testing.T) {
+ tests := map[string]struct {
+ Filepath string
+ Expected float32
+ }{
+ "macro": {
+ Filepath: "testdata/velocity_macro.vm",
+ Expected: 0.26,
+ },
+ "if": {
+ Filepath: "testdata/velocity_if.vm",
+ Expected: 0.16,
+ },
+ "foreach": {
+ Filepath: "testdata/velocity_foreach.vm",
+ Expected: 0.16,
+ },
+ "reference": {
+ Filepath: "testdata/velocity_reference.vm",
+ Expected: 0.01,
+ },
+ "all": {
+ Filepath: "testdata/velocity_all.vm",
+ Expected: 0.16,
+ },
+ }
+
+ for name, test := range tests {
+ t.Run(name, func(t *testing.T) {
+ data, err := os.ReadFile(test.Filepath)
+ assert.NoError(t, err)
+
+ l := lexer.Velocity{}.Lexer()
+
+ assert.Equal(t, test.Expected, l.AnalyseText(string(data)))
+ })
+ }
+}
diff --git a/pkg/lexer/verilog.go b/pkg/lexer/verilog.go
new file mode 100644
index 00000000..63daecea
--- /dev/null
+++ b/pkg/lexer/verilog.go
@@ -0,0 +1,52 @@
+package lexer
+
+import (
+ "strings"
+
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+ "github.com/alecthomas/chroma/v2/lexers"
+)
+
+// Verilog lexer.
+type Verilog struct{}
+
+// Lexer returns the lexer.
+func (l Verilog) Lexer() chroma.Lexer {
+ lexer := lexers.Get(l.Name())
+ if lexer == nil {
+ return nil
+ }
+
+ if lexer, ok := lexer.(*chroma.RegexLexer); ok {
+ lexer.SetAnalyser(func(text string) float32 {
+ // Verilog code will use one of reg/wire/assign for sure, and that
+ // is not common elsewhere.
+ var result float32
+
+ if strings.Contains(text, "reg") {
+ result += 0.1
+ }
+
+ if strings.Contains(text, "wire") {
+ result += 0.1
+ }
+
+ if strings.Contains(text, "assign") {
+ result += 0.1
+ }
+
+ return result
+ })
+
+ return lexer
+ }
+
+ return nil
+}
+
+// Name returns the name of the lexer.
+func (Verilog) Name() string {
+ return heartbeat.LanguageVerilog.StringChroma()
+}
diff --git a/pkg/lexer/verilog_test.go b/pkg/lexer/verilog_test.go
new file mode 100644
index 00000000..c175d524
--- /dev/null
+++ b/pkg/lexer/verilog_test.go
@@ -0,0 +1,45 @@
+package lexer_test
+
+import (
+ "os"
+ "testing"
+
+ "github.com/wakatime/wakatime-cli/pkg/lexer"
+
+ "github.com/stretchr/testify/assert"
+)
+
+func TestVerilog_AnalyseText(t *testing.T) {
+ tests := map[string]struct {
+ Filepath string
+ Expected float32
+ }{
+ "reg": {
+ Filepath: "testdata/verilog_reg.v",
+ Expected: 0.1,
+ },
+ "wire": {
+ Filepath: "testdata/verilog_wire.v",
+ Expected: 0.1,
+ },
+ "assign": {
+ Filepath: "testdata/verilog_assign.v",
+ Expected: 0.1,
+ },
+ "all": {
+ Filepath: "testdata/verilog_all.v",
+ Expected: 0.3,
+ },
+ }
+
+ for name, test := range tests {
+ t.Run(name, func(t *testing.T) {
+ data, err := os.ReadFile(test.Filepath)
+ assert.NoError(t, err)
+
+ l := lexer.Verilog{}.Lexer()
+
+ assert.Equal(t, test.Expected, l.AnalyseText(string(data)))
+ })
+ }
+}
diff --git a/pkg/lexer/vgl.go b/pkg/lexer/vgl.go
new file mode 100644
index 00000000..17488942
--- /dev/null
+++ b/pkg/lexer/vgl.go
@@ -0,0 +1,31 @@
+package lexer
+
+import (
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+)
+
+// VGL lexer.
+type VGL struct{}
+
+// Lexer returns the lexer.
+func (l VGL) Lexer() chroma.Lexer {
+ return chroma.MustNewLexer(
+ &chroma.Config{
+ Name: l.Name(),
+ Aliases: []string{"vgl"},
+ Filenames: []string{"*.rpf"},
+ },
+ func() chroma.Rules {
+ return chroma.Rules{
+ "root": {},
+ }
+ },
+ )
+}
+
+// Name returns the name of the lexer.
+func (VGL) Name() string {
+ return heartbeat.LanguageVGL.StringChroma()
+}
diff --git a/pkg/lexer/wdiff.go b/pkg/lexer/wdiff.go
new file mode 100644
index 00000000..bd61f409
--- /dev/null
+++ b/pkg/lexer/wdiff.go
@@ -0,0 +1,31 @@
+package lexer
+
+import (
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+)
+
+// WDiff lexer.
+type WDiff struct{}
+
+// Lexer returns the lexer.
+func (l WDiff) Lexer() chroma.Lexer {
+ return chroma.MustNewLexer(
+ &chroma.Config{
+ Name: l.Name(),
+ Aliases: []string{"wdiff"},
+ Filenames: []string{"*.wdiff"},
+ },
+ func() chroma.Rules {
+ return chroma.Rules{
+ "root": {},
+ }
+ },
+ )
+}
+
+// Name returns the name of the lexer.
+func (WDiff) Name() string {
+ return heartbeat.LanguageWDiff.StringChroma()
+}
diff --git a/pkg/lexer/webidl.go b/pkg/lexer/webidl.go
new file mode 100644
index 00000000..f47a66ce
--- /dev/null
+++ b/pkg/lexer/webidl.go
@@ -0,0 +1,31 @@
+package lexer
+
+import (
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+)
+
+// WebIDL lexer.
+type WebIDL struct{}
+
+// Lexer returns the lexer.
+func (l WebIDL) Lexer() chroma.Lexer {
+ return chroma.MustNewLexer(
+ &chroma.Config{
+ Name: l.Name(),
+ Aliases: []string{"webidl"},
+ Filenames: []string{"*.webidl"},
+ },
+ func() chroma.Rules {
+ return chroma.Rules{
+ "root": {},
+ }
+ },
+ )
+}
+
+// Name returns the name of the lexer.
+func (WebIDL) Name() string {
+ return heartbeat.LanguageWebIDL.StringChroma()
+}
diff --git a/pkg/lexer/x10.go b/pkg/lexer/x10.go
new file mode 100644
index 00000000..82913d68
--- /dev/null
+++ b/pkg/lexer/x10.go
@@ -0,0 +1,32 @@
+package lexer
+
+import (
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+)
+
+// X10 lexer.
+type X10 struct{}
+
+// Lexer returns the lexer.
+func (l X10) Lexer() chroma.Lexer {
+ return chroma.MustNewLexer(
+ &chroma.Config{
+ Name: l.Name(),
+ Aliases: []string{"x10", "xten"},
+ Filenames: []string{"*.x10"},
+ MimeTypes: []string{"text/x-x10"},
+ },
+ func() chroma.Rules {
+ return chroma.Rules{
+ "root": {},
+ }
+ },
+ )
+}
+
+// Name returns the name of the lexer.
+func (X10) Name() string {
+ return heartbeat.LanguageX10.StringChroma()
+}
diff --git a/pkg/lexer/xaml.go b/pkg/lexer/xaml.go
new file mode 100644
index 00000000..928570f1
--- /dev/null
+++ b/pkg/lexer/xaml.go
@@ -0,0 +1,32 @@
+package lexer
+
+import (
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+)
+
+// XAML lexer.
+type XAML struct{}
+
+// Lexer returns the lexer.
+func (l XAML) Lexer() chroma.Lexer {
+ return chroma.MustNewLexer(
+ &chroma.Config{
+ Name: l.Name(),
+ Aliases: []string{"xaml"},
+ Filenames: []string{"*.xaml"},
+ MimeTypes: []string{"application/xaml+xml"},
+ },
+ func() chroma.Rules {
+ return chroma.Rules{
+ "root": {},
+ }
+ },
+ )
+}
+
+// Name returns the name of the lexer.
+func (XAML) Name() string {
+ return heartbeat.LanguageXAML.StringChroma()
+}
diff --git a/pkg/lexer/xml.go b/pkg/lexer/xml.go
new file mode 100644
index 00000000..f6e3862b
--- /dev/null
+++ b/pkg/lexer/xml.go
@@ -0,0 +1,39 @@
+package lexer
+
+import (
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+ "github.com/wakatime/wakatime-cli/pkg/xml"
+
+ "github.com/alecthomas/chroma/v2"
+ "github.com/alecthomas/chroma/v2/lexers"
+)
+
+// XML lexer.
+type XML struct{}
+
+// Lexer returns the lexer.
+func (l XML) Lexer() chroma.Lexer {
+ lexer := lexers.Get(l.Name())
+ if lexer == nil {
+ return nil
+ }
+
+ if lexer, ok := lexer.(*chroma.RegexLexer); ok {
+ lexer.SetAnalyser(func(text string) float32 {
+ if xml.MatchString(text) {
+ return 0.45 // less than HTML.
+ }
+
+ return 0
+ })
+
+ return lexer
+ }
+
+ return nil
+}
+
+// Name returns the name of the lexer.
+func (XML) Name() string {
+ return heartbeat.LanguageXML.StringChroma()
+}
diff --git a/pkg/lexer/xml_test.go b/pkg/lexer/xml_test.go
new file mode 100644
index 00000000..c973ac86
--- /dev/null
+++ b/pkg/lexer/xml_test.go
@@ -0,0 +1,19 @@
+package lexer_test
+
+import (
+ "os"
+ "testing"
+
+ "github.com/wakatime/wakatime-cli/pkg/lexer"
+
+ "github.com/stretchr/testify/assert"
+)
+
+func TestXML_AnalyseText(t *testing.T) {
+ data, err := os.ReadFile("testdata/xml_doctype_html.xml")
+ assert.NoError(t, err)
+
+ l := lexer.XML{}.Lexer()
+
+ assert.Equal(t, float32(0.45), l.AnalyseText(string(data)))
+}
diff --git a/pkg/lexer/xquery.go b/pkg/lexer/xquery.go
new file mode 100644
index 00000000..7bfeaf82
--- /dev/null
+++ b/pkg/lexer/xquery.go
@@ -0,0 +1,32 @@
+package lexer
+
+import (
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+
+ "github.com/alecthomas/chroma/v2"
+)
+
+// XQuery lexer.
+type XQuery struct{}
+
+// Lexer returns the lexer.
+func (l XQuery) Lexer() chroma.Lexer {
+ return chroma.MustNewLexer(
+ &chroma.Config{
+ Name: l.Name(),
+ Aliases: []string{"xquery", "xqy", "xq", "xql", "xqm"},
+ Filenames: []string{"*.xqy", "*.xquery", "*.xq", "*.xql", "*.xqm"},
+ MimeTypes: []string{"text/xquery", "application/xquery"},
+ },
+ func() chroma.Rules {
+ return chroma.Rules{
+ "root": {},
+ }
+ },
+ )
+}
+
+// Name returns the name of the lexer.
+func (XQuery) Name() string {
+ return heartbeat.LanguageXQuery.StringChroma()
+}
diff --git a/pkg/lexer/xslt.go b/pkg/lexer/xslt.go
new file mode 100644
index 00000000..92ec63c5
--- /dev/null
+++ b/pkg/lexer/xslt.go
@@ -0,0 +1,46 @@
+package lexer
+
+import (
+ "strings"
+
+ "github.com/wakatime/wakatime-cli/pkg/heartbeat"
+ "github.com/wakatime/wakatime-cli/pkg/xml"
+
+ "github.com/alecthomas/chroma/v2"
+)
+
+// XSLT lexer.
+type XSLT struct{}
+
+// Lexer returns the lexer.
+func (XSLT) Lexer() chroma.Lexer {
+ lexer := chroma.MustNewLexer(
+ &chroma.Config{
+ Name: "XSLT",
+ Aliases: []string{"xslt"},
+ // xpl is XProc
+ Filenames: []string{"*.xsl", "*.xslt", "*.xpl"},
+ MimeTypes: []string{"application/xsl+xml", "application/xslt+xml"},
+ },
+ func() chroma.Rules {
+ return chroma.Rules{
+ "root": {},
+ }
+ },
+ )
+
+ lexer.SetAnalyser(func(text string) float32 {
+ if xml.MatchString(text) && strings.Contains(text, ".*?`, regexp2.None)
+ xmlDeclarationRe = regexp.MustCompile(`(?i)\s*<\?xml[^>]*\?>`)
+)
+
+// MatchString check if a text looks like XML.
+func MatchString(text string) bool {
+ // Check if a doctype exists or if we have some tags.
+ if xmlDeclarationRe.MatchString(text) {
+ return true
+ }
+
+ if matched, _ := doctype.MatchString(text, ""); matched {
+ return true
+ }
+
+ if len(text) > 1000 {
+ text = text[:1000]
+ }
+
+ if matched, _ := tagRe.MatchString(text); matched {
+ return true
+ }
+
+ return false
+}
diff --git a/pkg/xml/xml_test.go b/pkg/xml/xml_test.go
new file mode 100644
index 00000000..7d2224f9
--- /dev/null
+++ b/pkg/xml/xml_test.go
@@ -0,0 +1,37 @@
+package xml_test
+
+import (
+ "testing"
+
+ "github.com/wakatime/wakatime-cli/pkg/xml"
+
+ "github.com/stretchr/testify/assert"
+)
+
+func TestXML_MatchString(t *testing.T) {
+ tests := map[string]struct {
+ Text string
+ Pattern string
+ Expected bool
+ }{
+ "simple xml match": {
+ Text: "",
+ Expected: true,
+ },
+ "xmlns": {
+ Text: "abc",
+ Expected: true,
+ },
+ "html": {
+ Text: "",
+ Expected: false,
+ },
+ }
+
+ for name, test := range tests {
+ t.Run(name, func(t *testing.T) {
+ res := xml.MatchString(test.Text)
+ assert.Equal(t, test.Expected, res)
+ })
+ }
+}