packages/docutils/highlite

  Source   Edit

Source highlighter for programming or markup languages. Currently only few languages are supported, other languages may be added. The interface supports one language nested in another.

You can use this to build your own syntax highlighting, check this example:

let code = """for x in $int.high: echo x.ord mod 2 == 0"""
var toknizr: GeneralTokenizer
initGeneralTokenizer(toknizr, code)
while true:
  getNextToken(toknizr, langNim)
  case toknizr.kind
  of gtEof: break  # End Of File (or string)
  of gtWhitespace:
    echo gtWhitespace # Maybe you want "visible" whitespaces?.
    echo substr(code, toknizr.start, toknizr.length + toknizr.start - 1)
  of gtOperator:
    echo gtOperator # Maybe you want Operators to use a specific color?.
    echo substr(code, toknizr.start, toknizr.length + toknizr.start - 1)
  # of gtSomeSymbol: syntaxHighlight("Comic Sans", "bold", "99px", "pink")
  else:
    echo toknizr.kind # All the kinds of tokens can be processed here.
    echo substr(code, toknizr.start, toknizr.length + toknizr.start - 1)

The proc getSourceLanguage can get the language enum from a string:

for l in ["C", "jAvA", "Nim", "c#"]: echo getSourceLanguage(l)

There is also a Cmd pseudo-language supported, which is a simple generic shell/cmdline tokenizer (UNIX shell/Powershell/Windows Command): no escaping, no programming language constructs besides variable definition at the beginning of line. It supports these operators:

&  &&  |  ||  (  )  ''  ""  ;  # for comments

Instead of escaping always use quotes like here nimgrep --ext:'nim|nims' file.name shows how to input |. Any argument that contains . or / or \ will be treated as a file or directory.

In addition to Cmd there is also Console language for displaying interactive sessions. Lines with a command should start with $, other lines are considered as program output.

Types

GeneralTokenizer = object of RootObj
  kind*: TokenClass
  start*, length*: int
  buf: cstring
  pos: int
  state: TokenClass
  lang: SourceLanguage
  Source   Edit
SourceLanguage = enum
  langNone, langNim, langCsharp, langC, langJava, langYaml, langPython, langCmd,
  langConsole
  Source   Edit
TokenClass = enum
  gtEof, gtNone, gtWhitespace, gtDecNumber, gtBinNumber, gtHexNumber,
  gtOctNumber, gtFloatNumber, gtIdentifier, gtKeyword, gtStringLit,
  gtLongStringLit, gtCharLit, gtEscapeSequence, gtOperator, gtPunctuation,
  gtComment, gtLongComment, gtRegularExpression, gtTagStart, gtTagEnd, gtKey,
  gtValue, gtRawData, gtAssembler, gtPreprocessor, gtDirective, gtCommand,
  gtRule, gtHyperlink, gtLabel, gtReference, gtPrompt, gtProgramOutput,
  gtProgram, gtOption, gtOther
  Source   Edit

Consts

sourceLanguageToAlpha: array[SourceLanguage, string] = ["none", "Nim", "csharp",
    "C", "Java", "Yaml", "Python", "Cmd", "Console"]
list of languages spelled with alpabetic characters   Source   Edit
sourceLanguageToStr: array[SourceLanguage, string] = ["none", "Nim", "C#", "C",
    "Java", "Yaml", "Python", "Cmd", "Console"]
  Source   Edit
tokenClassToStr: array[TokenClass, string] = ["Eof", "None", "Whitespace",
    "DecNumber", "BinNumber", "HexNumber", "OctNumber", "FloatNumber",
    "Identifier", "Keyword", "StringLit", "LongStringLit", "CharLit",
    "EscapeSequence", "Operator", "Punctuation", "Comment", "LongComment",
    "RegularExpression", "TagStart", "TagEnd", "Key", "Value", "RawData",
    "Assembler", "Preprocessor", "Directive", "Command", "Rule", "Hyperlink",
    "Label", "Reference", "Prompt", "ProgramOutput", "program", "option",
    "Other"]
  Source   Edit

Procs

proc deinitGeneralTokenizer(g: var GeneralTokenizer) {....raises: [], tags: [].}
  Source   Edit
proc getNextToken(g: var GeneralTokenizer; lang: SourceLanguage) {....raises: [],
    tags: [].}
  Source   Edit
proc getSourceLanguage(name: string): SourceLanguage {....raises: [], tags: [].}
  Source   Edit
proc initGeneralTokenizer(g: var GeneralTokenizer; buf: cstring) {....raises: [],
    tags: [].}
  Source   Edit
proc initGeneralTokenizer(g: var GeneralTokenizer; buf: string) {....raises: [],
    tags: [].}
  Source   Edit
proc nimKeywordsSynchronizationCheck(keywordFileContent: string) {....raises: [],
    tags: [].}

used to ensure that keywords are synchronized between this module and wherever the keywords content resides for docs.

at time of writing this was docs/keywords.txt

  Source   Edit
proc tokenize(text: string; lang: SourceLanguage): seq[(string, TokenClass)] {.
    ...raises: [], tags: [].}
  Source   Edit