Skip to content
Snippets Groups Projects
Commit 5eee4fe0 authored by Ted Nyman's avatar Ted Nyman
Browse files

Latest pygments

parent e87424cb
No related branches found
No related tags found
No related merge requests found
Showing
with 2964 additions and 379 deletions
Loading
Loading
@@ -14,7 +14,8 @@ import re
from pygments.lexer import RegexLexer, include, bygroups, using, \
this, combined, ExtendedRegexLexer
from pygments.token import Error, Punctuation, Literal, Token, \
Text, Comment, Operator, Keyword, Name, String, Number, Generic
Text, Comment, Operator, Keyword, Name, String, Number, Generic, \
Whitespace
from pygments.util import get_bool_opt
from pygments.lexers.web import HtmlLexer
 
Loading
Loading
@@ -35,7 +36,7 @@ __all__ = ['BrainfuckLexer', 'BefungeLexer', 'RedcodeLexer', 'MOOCodeLexer',
'ECLLexer', 'UrbiscriptLexer', 'OpenEdgeLexer', 'BroLexer',
'MscgenLexer', 'KconfigLexer', 'VGLLexer', 'SourcePawnLexer',
'RobotFrameworkLexer', 'PuppetLexer', 'NSISLexer', 'RPMSpecLexer',
'CbmBasicV2Lexer', 'AutoItLexer']
'CbmBasicV2Lexer', 'AutoItLexer', 'RexxLexer']
 
 
class ECLLexer(RegexLexer):
Loading
Loading
@@ -245,7 +246,7 @@ class MOOCodeLexer(RegexLexer):
"""
name = 'MOOCode'
filenames = ['*.moo']
aliases = ['moocode']
aliases = ['moocode', 'moo']
mimetypes = ['text/x-moocode']
 
tokens = {
Loading
Loading
@@ -289,7 +290,7 @@ class SmalltalkLexer(RegexLexer):
"""
name = 'Smalltalk'
filenames = ['*.st']
aliases = ['smalltalk', 'squeak']
aliases = ['smalltalk', 'squeak', 'st']
mimetypes = ['text/x-smalltalk']
 
tokens = {
Loading
Loading
@@ -363,7 +364,7 @@ class SmalltalkLexer(RegexLexer):
include('literals'),
],
'afterobject' : [
(r'! !$', Keyword , '#pop'), # squeak chunk delimeter
(r'! !$', Keyword , '#pop'), # squeak chunk delimiter
include('whitespaces'),
(r'\b(ifTrue:|ifFalse:|whileTrue:|whileFalse:|timesRepeat:)',
Name.Builtin, '#pop'),
Loading
Loading
@@ -1397,8 +1398,6 @@ class RebolLexer(RegexLexer):
(r';.*\n', Comment),
(r'%"', Name.Decorator, 'stringFile'),
(r'%[^(\^{^")\s\[\]]+', Name.Decorator),
(r'<[a-zA-Z0-9:._-]*>', Name.Tag),
(r'<[^(<>\s")]+', Name.Tag, 'tag'),
(r'[+-]?([a-zA-Z]{1,3})?\$\d+(\.\d+)?', Number.Float), # money
(r'[+-]?\d+\:\d+(\:\d+)?(\.\d+)?', String.Other), # time
(r'\d+\-[0-9a-zA-Z]+\-\d+(\/\d+\:\d+(\:\d+)?'
Loading
Loading
@@ -1415,6 +1414,8 @@ class RebolLexer(RegexLexer):
(r'comment\s', Comment, 'comment'),
(r'/[^(\^{^")\s/[\]]*', Name.Attribute),
(r'([^(\^{^")\s/[\]]+)(?=[:({"\s/\[\]])', word_callback),
(r'<[a-zA-Z0-9:._-]*>', Name.Tag),
(r'<[^(<>\s")]+', Name.Tag, 'tag'),
(r'([^(\^{^")\s]+)', Text),
],
'string': [
Loading
Loading
@@ -1961,11 +1962,11 @@ class AsymptoteLexer(RegexLexer):
from pygments.lexers._asybuiltins import ASYFUNCNAME, ASYVARNAME
for index, token, value in \
RegexLexer.get_tokens_unprocessed(self, text):
if token is Name and value in ASYFUNCNAME:
token = Name.Function
elif token is Name and value in ASYVARNAME:
token = Name.Variable
yield index, token, value
if token is Name and value in ASYFUNCNAME:
token = Name.Function
elif token is Name and value in ASYVARNAME:
token = Name.Variable
yield index, token, value
 
 
class PostScriptLexer(RegexLexer):
Loading
Loading
@@ -1979,7 +1980,7 @@ class PostScriptLexer(RegexLexer):
*New in Pygments 1.4.*
"""
name = 'PostScript'
aliases = ['postscript']
aliases = ['postscript', 'postscr']
filenames = ['*.ps', '*.eps']
mimetypes = ['application/postscript']
 
Loading
Loading
@@ -2067,7 +2068,7 @@ class AutohotkeyLexer(RegexLexer):
*New in Pygments 1.4.*
"""
name = 'autohotkey'
aliases = ['ahk']
aliases = ['ahk', 'autohotkey']
filenames = ['*.ahk', '*.ahkl']
mimetypes = ['text/x-autohotkey']
 
Loading
Loading
@@ -2352,7 +2353,7 @@ class ProtoBufLexer(RegexLexer):
"""
 
name = 'Protocol Buffer'
aliases = ['protobuf']
aliases = ['protobuf', 'proto']
filenames = ['*.proto']
 
tokens = {
Loading
Loading
@@ -2839,8 +2840,8 @@ class BroLexer(RegexLexer):
(r'\\\n', Text),
# Keywords
(r'(add|alarm|break|case|const|continue|delete|do|else|enum|event'
r'|export|for|function|if|global|local|module|next'
r'|of|print|redef|return|schedule|type|when|while)\b', Keyword),
r'|export|for|function|if|global|hook|local|module|next'
r'|of|print|redef|return|schedule|switch|type|when|while)\b', Keyword),
(r'(addr|any|bool|count|counter|double|file|int|interval|net'
r'|pattern|port|record|set|string|subnet|table|time|timer'
r'|vector)\b', Keyword.Type),
Loading
Loading
@@ -3306,7 +3307,7 @@ class NSISLexer(RegexLexer):
tokens = {
'root': [
(r'[;\#].*\n', Comment),
(r"'.*'", String.Single),
(r"'.*?'", String.Single),
(r'"', String.Double, 'str_double'),
(r'`', String.Backtick, 'str_backtick'),
include('macro'),
Loading
Loading
@@ -3457,7 +3458,7 @@ class RPMSpecLexer(RegexLexer):
include('macro'),
(r'(?i)^(Name|Version|Release|Epoch|Summary|Group|License|Packager|'
r'Vendor|Icon|URL|Distribution|Prefix|Patch[0-9]*|Source[0-9]*|'
r'Requires\(?[a-z]*\)?|[a-z]+Req|Obsoletes|Provides|Conflicts|'
r'Requires\(?[a-z]*\)?|[a-z]+Req|Obsoletes|Suggests|Provides|Conflicts|'
r'Build[a-z]+|[a-z]+Arch|Auto[a-z]+)(:)(.*)$',
bygroups(Generic.Heading, Punctuation, using(this))),
(r'^%description', Name.Decorator, 'description'),
Loading
Loading
@@ -3467,7 +3468,7 @@ class RPMSpecLexer(RegexLexer):
r'make(?:install)|ghost|patch[0-9]+|find_lang|exclude|verify)',
Keyword),
include('interpol'),
(r"'.*'", String.Single),
(r"'.*?'", String.Single),
(r'"', String.Double, 'string'),
(r'.', Text),
],
Loading
Loading
@@ -3624,7 +3625,7 @@ class AutoItLexer(RegexLexer):
(r'[a-zA-Z_#@$][a-zA-Z0-9_#@$]*', Name),
(r'\\|\'', Text),
(r'\`([\,\%\`abfnrtv\-\+;])', String.Escape),
(r'_\n', Text), # Line continuation
(r'_\n', Text), # Line continuation
include('garbage'),
],
'commands': [
Loading
Loading
@@ -3665,3 +3666,113 @@ class AutoItLexer(RegexLexer):
(r'[^\S\n]', Text),
],
}
class RexxLexer(RegexLexer):
"""
`Rexx <http://www.rexxinfo.org/>`_ is a scripting language available for
a wide range of different platforms with its roots found on mainframe
systems. It is popular for I/O- and data based tasks and can act as glue
language to bind different applications together.
*New in Pygments 1.7.*
"""
name = 'Rexx'
aliases = ['rexx', 'ARexx', 'arexx']
filenames = ['*.rexx', '*.rex', '*.rx', '*.arexx']
mimetypes = ['text/x-rexx']
flags = re.IGNORECASE
tokens = {
'root': [
(r'\s', Whitespace),
(r'/\*', Comment.Multiline, 'comment'),
(r'"', String, 'string_double'),
(r"'", String, 'string_single'),
(r'[0-9]+(\.[0-9]+)?(e[+-]?[0-9])?', Number),
(r'([a-z_][a-z0-9_]*)(\s*)(:)(\s*)(procedure)\b',
bygroups(Name.Function, Whitespace, Operator, Whitespace,
Keyword.Declaration)),
(r'([a-z_][a-z0-9_]*)(\s*)(:)',
bygroups(Name.Label, Whitespace, Operator)),
include('function'),
include('keyword'),
include('operator'),
(r'[a-z_][a-z0-9_]*', Text),
],
'function': [
(r'(abbrev|abs|address|arg|b2x|bitand|bitor|bitxor|c2d|c2x|'
r'center|charin|charout|chars|compare|condition|copies|d2c|'
r'd2x|datatype|date|delstr|delword|digits|errortext|form|'
r'format|fuzz|insert|lastpos|left|length|linein|lineout|lines|'
r'max|min|overlay|pos|queued|random|reverse|right|sign|'
r'sourceline|space|stream|strip|substr|subword|symbol|time|'
r'trace|translate|trunc|value|verify|word|wordindex|'
r'wordlength|wordpos|words|x2b|x2c|x2d|xrange)(\s*)(\()',
bygroups(Name.Builtin, Whitespace, Operator)),
],
'keyword': [
(r'(address|arg|by|call|do|drop|else|end|exit|for|forever|if|'
r'interpret|iterate|leave|nop|numeric|off|on|options|parse|'
r'pull|push|queue|return|say|select|signal|to|then|trace|until|'
r'while)\b', Keyword.Reserved),
],
'operator': [
(ur'(-|//|/|\(|\)|\*\*|\*|\\<<|\\<|\\==|\\=|\\>>|\\>|\\|\|\||\||'
ur'&&|&|%|\+|<<=|<<|<=|<>|<|==|=|><|>=|>>=|>>|>|¬<<|¬<|¬==|¬=|'
ur'¬>>|¬>|¬|\.|,)', Operator),
],
'string_double': [
(r'[^"\n]+', String),
(r'""', String),
(r'"', String, '#pop'),
(r'\n', Text, '#pop'), # Stray linefeed also terminates strings.
],
'string_single': [
(r'[^\'\n]', String),
(r'\'\'', String),
(r'\'', String, '#pop'),
(r'\n', Text, '#pop'), # Stray linefeed also terminates strings.
],
'comment': [
(r'[^*]+', Comment.Multiline),
(r'\*/', Comment.Multiline, '#pop'),
(r'\*', Comment.Multiline),
]
}
_c = lambda s: re.compile(s, re.MULTILINE)
_ADDRESS_COMMAND_PATTERN = _c(r'^\s*address\s+command\b')
_ADDRESS_PATTERN = _c(r'^\s*address\s+')
_DO_WHILE_PATTERN = _c(r'^\s*do\s+while\b')
_IF_THEN_DO_PATTERN = _c(r'^\s*if\b.+\bthen\s+do\s*$')
_PROCEDURE_PATTERN = _c(r'^\s*([a-z_][a-z0-9_]*)(\s*)(:)(\s*)(procedure)\b')
_ELSE_DO_PATTERN = _c(r'\belse\s+do\s*$')
_PARSE_ARG_PATTERN = _c(r'^\s*parse\s+(upper\s+)?(arg|value)\b')
PATTERNS_AND_WEIGHTS = (
(_ADDRESS_COMMAND_PATTERN, 0.2),
(_ADDRESS_PATTERN, 0.05),
(_DO_WHILE_PATTERN, 0.1),
(_ELSE_DO_PATTERN, 0.1),
(_IF_THEN_DO_PATTERN, 0.1),
(_PROCEDURE_PATTERN, 0.5),
(_PARSE_ARG_PATTERN, 0.2),
)
def analyse_text(text):
"""
Check for inital comment and patterns that distinguish Rexx from other
C-like languages.
"""
if re.search(r'/\*\**\s*rexx', text, re.IGNORECASE):
# Header matches MVS Rexx requirements, this is certainly a Rexx
# script.
return 1.0
elif text.startswith('/*'):
# Header matches general Rexx requirements; the source code might
# still be any language using C comments such as C++, C# or Java.
lowerText = text.lower()
result = sum(weight
for (pattern, weight) in RexxLexer.PATTERNS_AND_WEIGHTS
if pattern.search(lowerText)) + 0.01
return min(result, 1.0)
Loading
Loading
@@ -67,9 +67,11 @@ class BashLexer(RegexLexer):
'data': [
(r'(?s)\$?"(\\\\|\\[0-7]+|\\.|[^"\\])*"', String.Double),
(r"(?s)\$?'(\\\\|\\[0-7]+|\\.|[^'\\])*'", String.Single),
(r';', Text),
(r';', Punctuation),
(r'&', Punctuation),
(r'\|', Punctuation),
(r'\s+', Text),
(r'[^=\s\[\]{}()$"\'`\\<]+', Text),
(r'[^=\s\[\]{}()$"\'`\\<&|;]+', Text),
(r'\d+(?= |\Z)', Number),
(r'\$#?(\w+|.)', Name.Variable),
(r'<', Text),
Loading
Loading
@@ -99,7 +101,10 @@ class BashLexer(RegexLexer):
}
 
def analyse_text(text):
return shebang_matches(text, r'(ba|z|)sh')
if shebang_matches(text, r'(ba|z|)sh'):
return 1
if text.startswith('$ '):
return 0.2
 
 
class BashSessionLexer(Lexer):
Loading
Loading
@@ -206,7 +211,7 @@ class BatchLexer(RegexLexer):
*New in Pygments 0.7.*
"""
name = 'Batchfile'
aliases = ['bat']
aliases = ['bat', 'dosbatch', 'winbatch']
filenames = ['*.bat', '*.cmd']
mimetypes = ['application/x-dos-batch']
 
Loading
Loading
@@ -329,8 +334,8 @@ class PowerShellLexer(RegexLexer):
*New in Pygments 1.5.*
"""
name = 'PowerShell'
aliases = ['powershell', 'posh', 'ps1']
filenames = ['*.ps1']
aliases = ['powershell', 'posh', 'ps1', 'psm1']
filenames = ['*.ps1','*.psm1']
mimetypes = ['text/x-powershell']
 
flags = re.DOTALL | re.IGNORECASE | re.MULTILINE
Loading
Loading
@@ -342,7 +347,7 @@ class PowerShellLexer(RegexLexer):
'dynamicparam do default continue cmdletbinding break begin alias \\? '
'% #script #private #local #global mandatory parametersetname position '
'valuefrompipeline valuefrompipelinebypropertyname '
'valuefromremainingarguments helpmessage try catch').split()
'valuefromremainingarguments helpmessage try catch throw').split()
 
operators = (
'and as band bnot bor bxor casesensitive ccontains ceq cge cgt cle '
Loading
Loading
@@ -368,12 +373,15 @@ class PowerShellLexer(RegexLexer):
 
tokens = {
'root': [
# we need to count pairs of parentheses for correct highlight
# of '$(...)' blocks in strings
(r'\(', Punctuation, 'child'),
(r'\s+', Text),
(r'^(\s*#[#\s]*)(\.(?:%s))([^\n]*$)' % '|'.join(commenthelp),
bygroups(Comment, String.Doc, Comment)),
(r'#[^\n]*?$', Comment),
(r'(&lt;|<)#', Comment.Multiline, 'multline'),
(r'@"\n.*?\n"@', String.Heredoc),
(r'@"\n', String.Heredoc, 'heredoc-double'),
(r"@'\n.*?\n'@", String.Heredoc),
# escaped syntax
(r'`[\'"$@-]', Punctuation),
Loading
Loading
@@ -387,7 +395,11 @@ class PowerShellLexer(RegexLexer):
(r'\[[a-z_\[][a-z0-9_. `,\[\]]*\]', Name.Constant), # .net [type]s
(r'-[a-z_][a-z0-9_]*', Name),
(r'\w+', Name),
(r'[.,{}\[\]$()=+*/\\&%!~?^`|<>-]', Punctuation),
(r'[.,;@{}\[\]$()=+*/\\&%!~?^`|<>-]|::', Punctuation),
],
'child': [
(r'\)', Punctuation, '#pop'),
include('root'),
],
'multline': [
(r'[^#&.]+', Comment.Multiline),
Loading
Loading
@@ -396,15 +408,17 @@ class PowerShellLexer(RegexLexer):
(r'[#&.]', Comment.Multiline),
],
'string': [
(r"`[0abfnrtv'\"\$]", String.Escape),
(r'[^$`"]+', String.Double),
(r'\$\(', String.Interpol, 'interpol'),
(r'`"|""', String.Double),
(r'\$\(', Punctuation, 'child'),
(r'""', String.Double),
(r'[`$]', String.Double),
(r'"', String.Double, '#pop'),
],
'interpol': [
(r'[^$)]+', String.Interpol),
(r'\$\(', String.Interpol, '#push'),
(r'\)', String.Interpol, '#pop'),
'heredoc-double': [
(r'\n"@', String.Heredoc, '#pop'),
(r'\$\(', Punctuation, 'child'),
(r'[^@\n]+"]', String.Heredoc),
(r".", String.Heredoc),
]
}
Loading
Loading
@@ -375,7 +375,7 @@ class SqlLexer(RegexLexer):
r'DIAGNOSTICS|DICTIONARY|DISCONNECT|DISPATCH|DISTINCT|DO|'
r'DOMAIN|DROP|DYNAMIC|DYNAMIC_FUNCTION|DYNAMIC_FUNCTION_CODE|'
r'EACH|ELSE|ENCODING|ENCRYPTED|END|END-EXEC|EQUALS|ESCAPE|EVERY|'
r'EXCEPT|ESCEPTION|EXCLUDING|EXCLUSIVE|EXEC|EXECUTE|EXISTING|'
r'EXCEPTION|EXCEPT|EXCLUDING|EXCLUSIVE|EXEC|EXECUTE|EXISTING|'
r'EXISTS|EXPLAIN|EXTERNAL|EXTRACT|FALSE|FETCH|FINAL|FIRST|FOR|'
r'FORCE|FOREIGN|FORTRAN|FORWARD|FOUND|FREE|FREEZE|FROM|FULL|'
r'FUNCTION|G|GENERAL|GENERATED|GET|GLOBAL|GO|GOTO|GRANT|GRANTED|'
Loading
Loading
Loading
Loading
@@ -766,7 +766,7 @@ class CheetahHtmlLexer(DelegatingLexer):
"""
 
name = 'HTML+Cheetah'
aliases = ['html+cheetah', 'html+spitfire']
aliases = ['html+cheetah', 'html+spitfire', 'htmlcheetah']
mimetypes = ['text/html+cheetah', 'text/html+spitfire']
 
def __init__(self, **options):
Loading
Loading
@@ -1258,7 +1258,7 @@ class HtmlDjangoLexer(DelegatingLexer):
"""
 
name = 'HTML+Django/Jinja'
aliases = ['html+django', 'html+jinja']
aliases = ['html+django', 'html+jinja', 'htmldjango']
alias_filenames = ['*.html', '*.htm', '*.xhtml']
mimetypes = ['text/html+django', 'text/html+jinja']
 
Loading
Loading
@@ -1657,7 +1657,7 @@ class LassoHtmlLexer(DelegatingLexer):
super(LassoHtmlLexer, self).__init__(HtmlLexer, LassoLexer, **options)
 
def analyse_text(text):
rv = LassoLexer.analyse_text(text)
rv = LassoLexer.analyse_text(text) - 0.01
if re.search(r'<\w+>', text, re.I):
rv += 0.2
if html_doctype_matches(text):
Loading
Loading
@@ -1683,9 +1683,9 @@ class LassoXmlLexer(DelegatingLexer):
super(LassoXmlLexer, self).__init__(XmlLexer, LassoLexer, **options)
 
def analyse_text(text):
rv = LassoLexer.analyse_text(text)
rv = LassoLexer.analyse_text(text) - 0.01
if looks_like_xml(text):
rv += 0.5
rv += 0.4
return rv
 
 
Loading
Loading
@@ -1707,8 +1707,8 @@ class LassoCssLexer(DelegatingLexer):
super(LassoCssLexer, self).__init__(CssLexer, LassoLexer, **options)
 
def analyse_text(text):
rv = LassoLexer.analyse_text(text)
if re.search(r'\w+:.+;', text):
rv = LassoLexer.analyse_text(text) - 0.05
if re.search(r'\w+:.+?;', text):
rv += 0.1
if 'padding:' in text:
rv += 0.1
Loading
Loading
@@ -1736,7 +1736,7 @@ class LassoJavascriptLexer(DelegatingLexer):
**options)
 
def analyse_text(text):
rv = LassoLexer.analyse_text(text)
rv = LassoLexer.analyse_text(text) - 0.05
if 'function' in text:
rv += 0.2
return rv
Loading
Loading
@@ -25,7 +25,7 @@ __all__ = ['IniLexer', 'PropertiesLexer', 'SourcesListLexer', 'BaseMakefileLexer
'RstLexer', 'VimLexer', 'GettextLexer', 'SquidConfLexer',
'DebianControlLexer', 'DarcsPatchLexer', 'YamlLexer',
'LighttpdConfLexer', 'NginxConfLexer', 'CMakeLexer', 'HttpLexer',
'PyPyLogLexer', 'RegeditLexer', 'HxmlLexer']
'PyPyLogLexer', 'RegeditLexer', 'HxmlLexer', 'EbnfLexer']
 
 
class IniLexer(RegexLexer):
Loading
Loading
@@ -34,7 +34,7 @@ class IniLexer(RegexLexer):
"""
 
name = 'INI'
aliases = ['ini', 'cfg']
aliases = ['ini', 'cfg', 'dosini']
filenames = ['*.ini', '*.cfg']
mimetypes = ['text/x-ini']
 
Loading
Loading
@@ -106,7 +106,7 @@ class PropertiesLexer(RegexLexer):
"""
 
name = 'Properties'
aliases = ['properties']
aliases = ['properties', 'jproperties']
filenames = ['*.properties']
mimetypes = ['text/x-java-properties']
 
Loading
Loading
@@ -128,7 +128,7 @@ class SourcesListLexer(RegexLexer):
"""
 
name = 'Debian Sourcelist'
aliases = ['sourceslist', 'sources.list']
aliases = ['sourceslist', 'sources.list', 'debsources']
filenames = ['sources.list']
mimetype = ['application/x-debian-sourceslist']
 
Loading
Loading
@@ -1053,7 +1053,7 @@ class DebianControlLexer(RegexLexer):
*New in Pygments 0.9.*
"""
name = 'Debian Control file'
aliases = ['control']
aliases = ['control', 'debcontrol']
filenames = ['control']
 
tokens = {
Loading
Loading
@@ -1631,7 +1631,7 @@ class CMakeLexer(RegexLexer):
# r'VTK_MAKE_INSTANTIATOR|VTK_WRAP_JAVA|VTK_WRAP_PYTHON|'
# r'VTK_WRAP_TCL|WHILE|WRITE_FILE|'
# r'COUNTARGS)\b', Name.Builtin, 'args'),
(r'\b([A-Za-z_]+)([ \t]*)(\()', bygroups(Name.Builtin, Text,
(r'\b(\w+)([ \t]*)(\()', bygroups(Name.Builtin, Text,
Punctuation), 'args'),
include('keywords'),
include('ws')
Loading
Loading
@@ -1709,12 +1709,12 @@ class HttpLexer(RegexLexer):
 
tokens = {
'root': [
(r'(GET|POST|PUT|DELETE|HEAD|OPTIONS|TRACE)( +)([^ ]+)( +)'
r'(HTTPS?)(/)(1\.[01])(\r?\n|$)',
(r'(GET|POST|PUT|DELETE|HEAD|OPTIONS|TRACE|PATCH)( +)([^ ]+)( +)'
r'(HTTP)(/)(1\.[01])(\r?\n|$)',
bygroups(Name.Function, Text, Name.Namespace, Text,
Keyword.Reserved, Operator, Number, Text),
'headers'),
(r'(HTTPS?)(/)(1\.[01])( +)(\d{3})( +)([^\r\n]+)(\r?\n|$)',
(r'(HTTP)(/)(1\.[01])( +)(\d{3})( +)([^\r\n]+)(\r?\n|$)',
bygroups(Keyword.Reserved, Operator, Number, Text, Number,
Text, Name.Exception, Text),
'headers'),
Loading
Loading
@@ -1841,3 +1841,53 @@ class HxmlLexer(RegexLexer):
(r'#.*', Comment.Single)
]
}
class EbnfLexer(RegexLexer):
"""
Lexer for `ISO/IEC 14977 EBNF
<http://en.wikipedia.org/wiki/Extended_Backus%E2%80%93Naur_Form>`_
grammars.
*New in Pygments 1.7.*
"""
name = 'EBNF'
aliases = ['ebnf']
filenames = ['*.ebnf']
mimetypes = ['text/x-ebnf']
tokens = {
'root': [
include('whitespace'),
include('comment_start'),
include('identifier'),
(r'=', Operator, 'production'),
],
'production': [
include('whitespace'),
include('comment_start'),
include('identifier'),
(r'"[^"]*"', String.Double),
(r"'[^']*'", String.Single),
(r'(\?[^?]*\?)', Name.Entity),
(r'[\[\]{}(),|]', Punctuation),
(r'-', Operator),
(r';', Punctuation, '#pop'),
],
'whitespace': [
(r'\s+', Text),
],
'comment_start': [
(r'\(\*', Comment.Multiline, 'comment'),
],
'comment': [
(r'[^*)]', Comment.Multiline),
include('comment_start'),
(r'\*\)', Comment.Multiline, '#pop'),
(r'[*)]', Comment.Multiline),
],
'identifier': [
(r'([a-zA-Z][a-zA-Z0-9 \-]*)', Keyword),
],
}
This diff is collapsed.
# -*- coding: utf-8 -*-
"""
pygments.modeline
~~~~~~~~~~~~~~~~~
A simple modeline parser (based on pymodeline).
:copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
import re
__all__ = ['get_filetype_from_buffer']
modeline_re = re.compile(r'''
(?: vi | vim | ex ) (?: [<=>]? \d* )? :
.* (?: ft | filetype | syn | syntax ) = ( [^:\s]+ )
''', re.VERBOSE)
def get_filetype_from_line(l):
m = modeline_re.search(l)
if m:
return m.group(1)
def get_filetype_from_buffer(buf, max_lines=5):
"""
Scan the buffer for modelines and return filetype if one is found.
"""
lines = buf.splitlines()
for l in lines[-1:-max_lines-1:-1]:
ret = get_filetype_from_line(l)
if ret:
return ret
for l in lines[max_lines:0:-1]:
ret = get_filetype_from_line(l)
if ret:
return ret
return None
This diff is collapsed.
function Get-CommandDefinitionHtml {
# this tells powershell to allow advanced features,
# like the [validatenotnullorempty()] attribute below.
[CmdletBinding()]
param(
[ValidateNotNullOrEmpty()]
[string]$name
)
$command = get-command $name
# Look mom! I'm a cmdlet!
$PSCmdlet.WriteVerbose("Dumping HTML for " + $command)
@"
<html>
<head>
<title>$($command.name)</title>
</head>
<body>
<table border="1">
$(
$command.parametersets | % {
@"
<tr>
<td>$($_.name)</td>
<td>
<table border="1">
<tr>
<th colspan="8">Parameters</th>
$(
$count = 0
$_.parameters | % {
if (0 -eq ($count % 8)) {
@'
</tr>
<tr>
'@
}
@"
<td>$($_.name)</td>
"@
$count++
}
)
</tr>
</table>
</td>
</tr>
"@
}
)
</table>
</body>
</html>
"@
}
Get-CommandDefinitionHtml get-item > out.html
# show in browser
invoke-item out.html
This diff is collapsed.
This diff is collapsed.
role q {
token stopper { \' }
token escape:sym<\\> { <sym> <item=.backslash> }
token backslash:sym<qq> { <?before 'q'> <quote=.LANG('MAIN','quote')> }
token backslash:sym<\\> { <text=.sym> }
token backslash:sym<stopper> { <text=.stopper> }
token backslash:sym<miscq> { {} . }
method tweak_q($v) { self.panic("Too late for :q") }
method tweak_qq($v) { self.panic("Too late for :qq") }
}
role qq does b1 does c1 does s1 does a1 does h1 does f1 {
token stopper { \" }
token backslash:sym<unrec> { {} (\w) { self.throw_unrecog_backslash_seq: $/[0].Str } }
token backslash:sym<misc> { \W }
method tweak_q($v) { self.panic("Too late for :q") }
method tweak_qq($v) { self.panic("Too late for :qq") }
}
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
\documentclass{article}
% this is a LaTeX comment
\usepackage{agda}
\begin{document}
Here's how you can define \emph{RGB} colors in Agda:
\begin{code}
module example where
open import Data.Fin
open import Data.Nat
data Color : Set where
RGB : Fin 256 → Fin 256 → Fin 256 → Color
\end{code}
\end{document}
\ No newline at end of file
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment