From 182532f69052c75d3621ff214ede196cbeed16e7 Mon Sep 17 00:00:00 2001 From: Philippe Ombredanne Date: Mon, 25 Jan 2021 12:54:13 +0100 Subject: [PATCH 01/35] Use wheels embedded in virtualenv.pyz Signed-off-by: Philippe Ombredanne --- configure | 2 +- configure.bat | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/configure b/configure index 8f3a68e..d41bf8e 100755 --- a/configure +++ b/configure @@ -27,7 +27,7 @@ function setup { # create a virtualenv on Python mkdir -p $CONFIGURE_ROOT_DIR/tmp wget -O $CONFIGURE_ROOT_DIR/tmp/virtualenv.pyz https://bootstrap.pypa.io/virtualenv.pyz - $PYTHON_EXE $CONFIGURE_ROOT_DIR/tmp/virtualenv.pyz $CONFIGURE_ROOT_DIR/tmp + $PYTHON_EXE $CONFIGURE_ROOT_DIR/tmp/virtualenv.pyz --wheel embed --pip embed --setuptools embed --seeder pip $CONFIGURE_ROOT_DIR/tmp source $CONFIGURE_ROOT_DIR/tmp/bin/activate $CONFIGURE_ROOT_DIR/tmp/bin/pip install --upgrade pip virtualenv setuptools wheel } diff --git a/configure.bat b/configure.bat index f03ea07..ee68f9e 100644 --- a/configure.bat +++ b/configure.bat @@ -87,7 +87,7 @@ set PYTHONDONTWRITEBYTECODE=1 call mkdir "%CFG_ROOT_DIR%tmp" call curl -o "%CFG_ROOT_DIR%tmp\virtualenv.pyz" https://bootstrap.pypa.io/virtualenv.pyz -call %PYTHON_EXECUTABLE% "%CFG_ROOT_DIR%tmp\virtualenv.pyz" "%CFG_ROOT_DIR%tmp" +call %PYTHON_EXECUTABLE% "%CFG_ROOT_DIR%tmp\virtualenv.pyz" --wheel embed --pip embed --setuptools embed --seeder pip "%CFG_ROOT_DIR%tmp" call "%CFG_ROOT_DIR%tmp\Scripts\activate" call "%CFG_ROOT_DIR%tmp\Scripts\pip" install --upgrade pip virtualenv setuptools wheel call "%CFG_ROOT_DIR%tmp\Scripts\pip" install -e .[testing] From cd4e87beb91ea5e9380dfeb19c3530c0a92ff192 Mon Sep 17 00:00:00 2001 From: Philippe Ombredanne Date: Mon, 25 Jan 2021 12:56:18 +0100 Subject: [PATCH 02/35] Do not force an upgrade on virtualenv.pyz embeds Signed-off-by: Philippe Ombredanne --- configure | 1 - configure.bat | 1 - 2 files changed, 2 deletions(-) diff --git a/configure b/configure index d41bf8e..78e7498 100755 --- a/configure +++ b/configure @@ -29,7 +29,6 @@ function setup { wget -O $CONFIGURE_ROOT_DIR/tmp/virtualenv.pyz https://bootstrap.pypa.io/virtualenv.pyz $PYTHON_EXE $CONFIGURE_ROOT_DIR/tmp/virtualenv.pyz --wheel embed --pip embed --setuptools embed --seeder pip $CONFIGURE_ROOT_DIR/tmp source $CONFIGURE_ROOT_DIR/tmp/bin/activate - $CONFIGURE_ROOT_DIR/tmp/bin/pip install --upgrade pip virtualenv setuptools wheel } diff --git a/configure.bat b/configure.bat index ee68f9e..00cb101 100644 --- a/configure.bat +++ b/configure.bat @@ -89,7 +89,6 @@ call mkdir "%CFG_ROOT_DIR%tmp" call curl -o "%CFG_ROOT_DIR%tmp\virtualenv.pyz" https://bootstrap.pypa.io/virtualenv.pyz call %PYTHON_EXECUTABLE% "%CFG_ROOT_DIR%tmp\virtualenv.pyz" --wheel embed --pip embed --setuptools embed --seeder pip "%CFG_ROOT_DIR%tmp" call "%CFG_ROOT_DIR%tmp\Scripts\activate" -call "%CFG_ROOT_DIR%tmp\Scripts\pip" install --upgrade pip virtualenv setuptools wheel call "%CFG_ROOT_DIR%tmp\Scripts\pip" install -e .[testing] @rem Return a proper return code on failure From 51510cbdb2f2d066d6652695aed40175a37d88a4 Mon Sep 17 00:00:00 2001 From: Steven Esser Date: Thu, 11 Feb 2021 15:56:55 -0500 Subject: [PATCH 03/35] Fix .gitattributes Signed-off-by: Steven Esser --- .gitattributes | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.gitattributes b/.gitattributes index c446d38..b79df5c 100644 --- a/.gitattributes +++ b/.gitattributes @@ -1,2 +1,2 @@ # Ignore all Git auto CR/LF line endings conversions -* binary +* -text From e7efce4f4334de910e61751053bac07bbaab3ed0 Mon Sep 17 00:00:00 2001 From: Pierre Tardy Date: Wed, 31 Mar 2021 15:44:33 +0200 Subject: [PATCH 04/35] failover to python-magic detection if vendored library not present This allows scancode to work on platforms not supported by typecode-libmagic e.g. M1 Macs or windows WSL Most of this code is from https://github.com/ahupp/python-magic/blob/e0ccc6d/magic/loader.py by Adam Hupp, MIT License, compatible with typecode's Apache Signed-off-by: Pierre Tardy --- src/typecode/magic2.py | 59 +++++++++++++++++++++++++++++++++---- tests/test_libmagic_load.py | 27 +++++++++++++++++ 2 files changed, 81 insertions(+), 5 deletions(-) create mode 100644 tests/test_libmagic_load.py diff --git a/src/typecode/magic2.py b/src/typecode/magic2.py index d23aef7..b9a16b7 100644 --- a/src/typecode/magic2.py +++ b/src/typecode/magic2.py @@ -43,6 +43,7 @@ import ctypes import os +import warnings from commoncode import command from plugincode.location_provider import get_location @@ -79,6 +80,43 @@ TYPECODE_LIBMAGIC_DATABASE = 'typecode.libmagic.db' +def load_lib_failover(): + # loader from python-magic + libmagic = None + # Let's try to find magic or magic1 + dll = (ctypes.util.find_library('magic') + or ctypes.util.find_library('magic1') + or ctypes.util.find_library('cygmagic-1') + or ctypes.util.find_library('libmagic-1') + or ctypes.util.find_library('msys-magic-1') # for MSYS2 + ) + # necessary because find_library returns None if it doesn't find the library + if dll: + libmagic = ctypes.CDLL(dll) + + if not libmagic or not libmagic._name: + windows_dlls = ['magic1.dll', 'cygmagic-1.dll', 'libmagic-1.dll', 'msys-magic-1.dll'] + platform_to_lib = {'darwin': ['/opt/local/lib/libmagic.dylib', + '/usr/local/lib/libmagic.dylib'] + + # Assumes there will only be one version installed + glob.glob('/usr/local/Cellar/libmagic/*/lib/libmagic.dylib'), # flake8:noqa + 'win32': windows_dlls, + 'cygwin': windows_dlls, + 'linux': ['libmagic.so.1'], + # fallback for some Linuxes (e.g. Alpine) where library search does not work # flake8:noqa + } + platform = 'linux' if sys.platform.startswith('linux') else sys.platform + for dll in platform_to_lib.get(platform, []): + try: + libmagic = ctypes.CDLL(dll) + break + except OSError: + pass + + if not libmagic or not libmagic._name: + return None + return libmagic + def load_lib(): """ Return the loaded libmagic shared library object from plugin-provided path. @@ -86,11 +124,15 @@ def load_lib(): dll = get_location(TYPECODE_LIBMAGIC_DLL) libdir = get_location(TYPECODE_LIBMAGIC_LIBDIR) if not (dll and libdir) or not os.path.isfile(dll) or not os.path.isdir(libdir): - raise Exception( - 'CRITICAL: libmagic DLL and is magic database are not installed. ' - 'Unable to continue: you need to install a valid typecode-libmagic ' - 'plugin with a valid and proper libmagic and magic DB available.' - ) + ret = load_lib_failover() + if ret is None: + raise ImportError( + 'CRITICAL: libmagic DLL and is magic database are not installed. ' + 'Unable to continue: you need to install a valid typecode-libmagic ' + 'plugin with a valid and proper libmagic and magic DB available.' + ) + warnings.warn("System libmagic is used. Install typecode-libmagic for best consitency") + return ret return command.load_shared_library(dll, libdir) @@ -164,6 +206,7 @@ def __init__(self, flags, magic_db_location=None): self.flags = flags self.cookie = _magic_open(self.flags) if not magic_db_location: + # if no plugin, None is returned, and libmagic will load the default db magic_db_location = get_location(TYPECODE_LIBMAGIC_DATABASE) # Note: this location must always be bytes on Python2 and 3, all OSes @@ -257,3 +300,9 @@ def check_error(result, func, args): # NOQA _magic_load.restype = ctypes.c_int _magic_load.argtypes = [ctypes.c_void_p, ctypes.c_char_p] _magic_load.errcheck = check_error + +_magic_version = libmagic.magic_version +_magic_version.restype = ctypes.c_int +_magic_version.argtypes = [] + +libmagic_version = _magic_version() diff --git a/tests/test_libmagic_load.py b/tests/test_libmagic_load.py new file mode 100644 index 0000000..84b6995 --- /dev/null +++ b/tests/test_libmagic_load.py @@ -0,0 +1,27 @@ +# +# Copyright (c) nexB Inc. and others. +# SPDX-License-Identifier: Apache-2.0 +# +# Visit https://aboutcode.org and https://github.com/nexB/ for support and download. +# ScanCode is a trademark of nexB Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import os + +from typecode.magic2 import libmagic_version + + +def test_load_lib(): + assert libmagic_version > 0 \ No newline at end of file From d6fe59fd2e832075905ecb27235640a2776dad7a Mon Sep 17 00:00:00 2001 From: Philippe Ombredanne Date: Fri, 7 May 2021 14:56:42 +0200 Subject: [PATCH 05/35] Update markers syntax for pytest Signed-off-by: Philippe Ombredanne --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index 55fb92c..a3bda44 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -41,6 +41,6 @@ python_functions = "test" addopts = [ "-rfExXw", - "--strict", + "--strict-markers", "--doctest-modules" ] From ca6ab2189a6ff6fd093dc9424aa17183a05e6988 Mon Sep 17 00:00:00 2001 From: Philippe Ombredanne Date: Fri, 7 May 2021 14:59:17 +0200 Subject: [PATCH 06/35] Add fallback version for setuptools_scm This will work even from a git archive or when git is not installed. Signed-off-by: Philippe Ombredanne --- .gitattributes | 1 + pyproject.toml | 1 + 2 files changed, 2 insertions(+) diff --git a/.gitattributes b/.gitattributes index b79df5c..96c89ce 100644 --- a/.gitattributes +++ b/.gitattributes @@ -1,2 +1,3 @@ # Ignore all Git auto CR/LF line endings conversions * -text +pyproject.toml export-subst diff --git a/pyproject.toml b/pyproject.toml index a3bda44..52caac4 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -3,6 +3,7 @@ requires = ["setuptools >= 50", "wheel", "setuptools_scm[toml] >= 4"] build-backend = "setuptools.build_meta" [tool.setuptools_scm] +fallback_version = "v9999.$Format:%h-%cs$" [tool.pytest.ini_options] norecursedirs = [ From 1364bbbb9c399bd535686ea4ec6bfc241eb0e689 Mon Sep 17 00:00:00 2001 From: Philippe Ombredanne Date: Tue, 11 May 2021 10:57:19 +0200 Subject: [PATCH 07/35] Add note for setuptools_scam fallback version Signed-off-by: Philippe Ombredanne --- pyproject.toml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/pyproject.toml b/pyproject.toml index 52caac4..8eebe91 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -3,6 +3,8 @@ requires = ["setuptools >= 50", "wheel", "setuptools_scm[toml] >= 4"] build-backend = "setuptools.build_meta" [tool.setuptools_scm] +# this is used populated when creating a git archive +# and when there is .git dir and/or there is no git installed fallback_version = "v9999.$Format:%h-%cs$" [tool.pytest.ini_options] From be851b017a6e5c98ad85a84cda8b3f070e7acf34 Mon Sep 17 00:00:00 2001 From: Philippe Ombredanne Date: Tue, 11 May 2021 11:00:26 +0200 Subject: [PATCH 08/35] Use azure-posix.yml for linux and macOS Signed-off-by: Philippe Ombredanne --- azure-pipelines.yml | 10 +++--- etc/ci/azure-mac.yml | 36 --------------------- etc/ci/{azure-linux.yml => azure-posix.yml} | 0 3 files changed, 5 insertions(+), 41 deletions(-) delete mode 100644 etc/ci/azure-mac.yml rename etc/ci/{azure-linux.yml => azure-posix.yml} (100%) diff --git a/azure-pipelines.yml b/azure-pipelines.yml index 9a4c950..31ef36f 100644 --- a/azure-pipelines.yml +++ b/azure-pipelines.yml @@ -7,7 +7,7 @@ jobs: - - template: etc/ci/azure-linux.yml + - template: etc/ci/azure-posix.yml parameters: job_name: ubuntu16_cpython image_name: ubuntu-16.04 @@ -15,7 +15,7 @@ jobs: test_suites: all: tmp/bin/pytest -vvs - - template: etc/ci/azure-linux.yml + - template: etc/ci/azure-posix.yml parameters: job_name: ubuntu18_cpython image_name: ubuntu-18.04 @@ -23,7 +23,7 @@ jobs: test_suites: all: tmp/bin/pytest -n 2 -vvs - - template: etc/ci/azure-linux.yml + - template: etc/ci/azure-posix.yml parameters: job_name: ubuntu20_cpython image_name: ubuntu-20.04 @@ -31,7 +31,7 @@ jobs: test_suites: all: tmp/bin/pytest -n 2 -vvs - - template: etc/ci/azure-mac.yml + - template: etc/ci/azure-posix.yml parameters: job_name: macos1014_cpython image_name: macos-10.14 @@ -39,7 +39,7 @@ jobs: test_suites: all: tmp/bin/pytest -n 2 -vvs - - template: etc/ci/azure-mac.yml + - template: etc/ci/azure-posix.yml parameters: job_name: macos1015_cpython image_name: macos-10.15 diff --git a/etc/ci/azure-mac.yml b/etc/ci/azure-mac.yml deleted file mode 100644 index 752ae2e..0000000 --- a/etc/ci/azure-mac.yml +++ /dev/null @@ -1,36 +0,0 @@ -parameters: - job_name: '' - image_name: '' - python_versions: [] - test_suites: {} - python_architecture: x64 - -jobs: - - job: ${{ parameters.job_name }} - - pool: - vmImage: ${{ parameters.image_name }} - - strategy: - matrix: - ${{ each pyver in parameters.python_versions }}: - ${{ each tsuite in parameters.test_suites }}: - ${{ format('py{0} {1}', pyver, tsuite.key) }}: - python_version: ${{ pyver }} - test_suite_label: ${{ tsuite.key }} - test_suite: ${{ tsuite.value }} - steps: - - checkout: self - fetchDepth: 10 - - - task: UsePythonVersion@0 - inputs: - versionSpec: '$(python_version)' - architecture: '${{ parameters.python_architecture }}' - displayName: 'Install Python $(python_version)' - - - script: ./configure - displayName: 'Run Configure' - - - script: $(test_suite) - displayName: 'Run $(test_suite_label) tests with py$(python_version) on ${{ parameters.job_name }}' diff --git a/etc/ci/azure-linux.yml b/etc/ci/azure-posix.yml similarity index 100% rename from etc/ci/azure-linux.yml rename to etc/ci/azure-posix.yml From 4f0aecf4f2a01c71b8d0f54987cd68de5f7922c2 Mon Sep 17 00:00:00 2001 From: Philippe Ombredanne Date: Tue, 11 May 2021 11:14:23 +0200 Subject: [PATCH 09/35] Adopt new configure script derived from ScanCode Signed-off-by: Philippe Ombredanne --- configure | 164 ++++++++++++++++++++++++---- configure.bat | 238 ++++++++++++++++++++++++++--------------- etc/ci/azure-posix.yml | 7 +- etc/ci/azure-win.yml | 5 +- 4 files changed, 304 insertions(+), 110 deletions(-) diff --git a/configure b/configure index 78e7498..25ab0ce 100755 --- a/configure +++ b/configure @@ -1,43 +1,169 @@ #!/usr/bin/env bash # -# Copyright (c) nexB Inc. and others. +# Copyright (c) nexB Inc. and others. All rights reserved. # SPDX-License-Identifier: Apache-2.0 +# See http://www.apache.org/licenses/LICENSE-2.0 for the license text. +# See https://github.com/nexB/ for support or download. +# See https://aboutcode.org for more information about nexB OSS projects. # set -e #set -x -# source this script for a basic setup and configuration for local development +################################ +# A configuration script to set things up: +# create a virtualenv and install or update thirdparty packages. +# Source this script for initial configuration +# Use configure --help for details +# +# This script will search for a virtualenv.pyz app in etc/thirdparty/virtualenv.pyz +# Otherwise it will download the latest from the VIRTUALENV_PYZ_URL default +################################ +CLI_ARGS=$1 -CONFIGURE_ROOT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" +################################ +# Defaults. Change these variables to customize this script +################################ +# Requirement arguments passed to pip and used by default or with --dev. +REQUIREMENTS="--editable ." +DEV_REQUIREMENTS="--editable .[testing]" -if [[ "$1" == "--clean" ]]; then - rm -rf "$CONFIGURE_ROOT_DIR/tmp" - exit +# where we create a virtualenv +VIRTUALENV_DIR=tmp + +# Cleanable files and directories with the --clean option +CLEANABLE=" + build + tmp" + +# extra arguments passed to pip +PIP_EXTRA_ARGS=" " + +# the URL to download virtualenv.pyz if needed +VIRTUALENV_PYZ_URL=https://bootstrap.pypa.io/virtualenv.pyz +################################ + + +################################ +# Current directory where this script lives +CFG_ROOT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" +CFG_BIN_DIR=$CFG_ROOT_DIR/$VIRTUALENV_DIR/bin + + +################################ +# Set the quiet flag to empty if not defined +if [[ "$CFG_QUIET" == "" ]]; then + CFG_QUIET=" " fi -if [[ "$PYTHON_EXE" == "" ]]; then - PYTHON_EXE=python3 +################################ +# find a proper Python to run +# Use environment variables or a file if available. +# Otherwise the latest Python by default. +if [[ "$PYTHON_EXECUTABLE" == "" ]]; then + # check for a file named PYTHON_EXECUTABLE + if [ -f "$CFG_ROOT_DIR/PYTHON_EXECUTABLE" ]; then + PYTHON_EXECUTABLE=$(cat "$CFG_ROOT_DIR/PYTHON_EXECUTABLE") + else + PYTHON_EXECUTABLE=python3 + fi fi -function setup { - # create a virtualenv on Python - mkdir -p $CONFIGURE_ROOT_DIR/tmp - wget -O $CONFIGURE_ROOT_DIR/tmp/virtualenv.pyz https://bootstrap.pypa.io/virtualenv.pyz - $PYTHON_EXE $CONFIGURE_ROOT_DIR/tmp/virtualenv.pyz --wheel embed --pip embed --setuptools embed --seeder pip $CONFIGURE_ROOT_DIR/tmp - source $CONFIGURE_ROOT_DIR/tmp/bin/activate +################################ +cli_help() { + echo An initial configuration script + echo " usage: ./configure [options]" + echo + echo The default is to configure for regular use. Use --dev for development. + echo + echo The options are: + echo " --clean: clean built and installed files and exit." + echo " --dev: configure the environment for development." + echo " --help: display this help message and exit." + echo + echo By default, the python interpreter version found in the path is used. + echo Alternatively, the PYTHON_EXECUTABLE environment variable can be set to + echo configure another Python executable interpreter to use. If this is not + echo set, a file named PYTHON_EXECUTABLE containing a single line with the + echo path of the Python executable to use will be checked last. + set +e + exit } -setup +clean() { + # Remove cleanable file and directories and files from the root dir. + echo "* Cleaning ..." + for cln in $CLEANABLE; + do rm -rf "${CFG_ROOT_DIR:?}/${cln:?}"; + done + set +e + exit +} -$CONFIGURE_ROOT_DIR/tmp/bin/pip install -e .[testing] -if [ -f "$CONFIGURE_ROOT_DIR/tmp/bin/activate" ]; then - source "$CONFIGURE_ROOT_DIR/tmp/bin/activate" -fi +create_virtualenv() { + # create a virtualenv for Python + # Note: we do not use the bundled Python 3 "venv" because its behavior and + # presence is not consistent across Linux distro and sometimes pip is not + # included either by default. The virtualenv.pyz app cures all these issues. + + VENV_DIR="$1" + if [ ! -f "$CFG_BIN_DIR/python" ]; then + + mkdir -p "$CFG_ROOT_DIR/$VENV_DIR" + + if [ -f "$CFG_ROOT_DIR/etc/thirdparty/virtualenv.pyz" ]; then + VIRTUALENV_PYZ="$CFG_ROOT_DIR/etc/thirdparty/virtualenv.pyz" + else + VIRTUALENV_PYZ="$CFG_ROOT_DIR/$VENV_DIR/virtualenv.pyz" + wget -O "$VIRTUALENV_PYZ" "$VIRTUALENV_PYZ_URL" + fi + + $PYTHON_EXECUTABLE "$VIRTUALENV_PYZ" \ + --wheel embed --pip embed --setuptools embed \ + --seeder pip \ + --never-download \ + --no-periodic-update \ + --no-vcs-ignore \ + $CFG_QUIET \ + "$CFG_ROOT_DIR/$VENV_DIR" + fi +} + + +install_packages() { + # install requirements in virtualenv + # note: --no-build-isolation means that pip/wheel/setuptools will not + # be reinstalled a second time and reused from the virtualenv and this + # speeds up the installation. + # We always have the PEP517 build dependencies installed already. + + "$CFG_BIN_DIR/pip" install \ + --upgrade \ + --no-build-isolation \ + $CFG_QUIET \ + $PIP_EXTRA_ARGS \ + $1 +} + + +################################ +# Main command line entry point +CFG_DEV_MODE=0 +CFG_REQUIREMENTS=$REQUIREMENTS + +case "$CLI_ARGS" in + --help) cli_help;; + --clean) clean;; + --dev) CFG_REQUIREMENTS="$DEV_REQUIREMENTS" && CFG_DEV_MODE=1;; +esac + +create_virtualenv "$VIRTUALENV_DIR" +install_packages "$CFG_REQUIREMENTS" +. "$CFG_BIN_DIR/activate" set +e diff --git a/configure.bat b/configure.bat index 00cb101..8c497ba 100644 --- a/configure.bat +++ b/configure.bat @@ -1,120 +1,180 @@ @echo OFF @setlocal -@rem Copyright (c) nexB Inc. http://www.nexb.com/ - All rights reserved. + +@rem Copyright (c) nexB Inc. and others. All rights reserved. +@rem SPDX-License-Identifier: Apache-2.0 +@rem See http://www.apache.org/licenses/LICENSE-2.0 for the license text. +@rem See https://github.com/nexB/ for support or download. +@rem See https://aboutcode.org for more information about nexB OSS projects. + @rem ################################ -@rem # A configuration script for Windows -@rem # -@rem # The options and (optional) arguments are: -@rem # --clean : this is exclusive of anything else and cleans the environment -@rem # from built and installed files -@rem # -@rem # --python < path to python.exe> : this must be the first argument and set -@rem # the path to the Python executable to use. If < path to python.exe> is -@rem # set to "path", then the executable will be the python.exe available -@rem # in the PATH. +@rem # A configuration script to set things up: +@rem # create a virtualenv and install or update thirdparty packages. +@rem # Source this script for initial configuration +@rem # Use configure --help for details + +@rem # This script will search for a virtualenv.pyz app in etc\thirdparty\virtualenv.pyz +@rem # Otherwise it will download the latest from the VIRTUALENV_PYZ_URL default @rem ################################ -@rem Current directory where this .bat files lives -set CFG_ROOT_DIR=%~dp0 -@rem path where a configured Python should live in the current virtualenv if installed -set CONFIGURED_PYTHON=%CFG_ROOT_DIR%tmp\Scripts\python.exe -set PYTHON_EXECUTABLE= +@rem ################################ +@rem # Defaults. Change these variables to customize this script +@rem ################################ + +@rem # Requirement arguments passed to pip and used by default or with --dev. +set "REQUIREMENTS=--editable ." +set "DEV_REQUIREMENTS=--editable .[testing]" + +@rem # where we create a virtualenv +set "VIRTUALENV_DIR=tmp" + +@rem # Cleanable files and directories to delete with the --clean option +set "CLEANABLE=build tmp" -@rem parse command line options and arguments -:collectopts -if "%1" EQU "--help" (goto cli_help) -if "%1" EQU "--clean" (call rmdir /s /q "%CFG_ROOT_DIR%tmp") && call exit /b -if "%1" EQU "--python" (set PROVIDED_PYTHON=%~2) && shift && shift && goto collectopts +@rem # extra arguments passed to pip +set "PIP_EXTRA_ARGS= " -@rem If we have a pre-configured Python in our virtualenv, reuse this as-is and run -if exist ""%CONFIGURED_PYTHON%"" ( - set PYTHON_EXECUTABLE=%CONFIGURED_PYTHON% - goto run +@rem # the URL to download virtualenv.pyz if needed +set VIRTUALENV_PYZ_URL=https://bootstrap.pypa.io/virtualenv.pyz +@rem ################################ + + +@rem ################################ +@rem # Current directory where this script lives +set CFG_ROOT_DIR=%~dp0 +set "CFG_BIN_DIR=%CFG_ROOT_DIR%\%VIRTUALENV_DIR%\Scripts" + + +@rem ################################ +@rem # Set the quiet flag to empty if not defined +if not defined CFG_QUIET ( + set "CFG_QUIET= " ) -@rem If we have a command arg for Python use this as-is -if ""%PROVIDED_PYTHON%""==""path"" ( - @rem use a bare python available in the PATH - set PYTHON_EXECUTABLE=python - goto run + +@rem ################################ +@rem # Main command line entry point +set CFG_DEV_MODE=0 +set "CFG_REQUIREMENTS=%REQUIREMENTS%" + +if "%1" EQU "--help" (goto cli_help) +if "%1" EQU "--clean" (goto clean) +if "%1" EQU "--dev" ( + set "CFG_REQUIREMENTS=%DEV_REQUIREMENTS%" + set CFG_DEV_MODE=1 ) -if exist ""%PROVIDED_PYTHON%"" ( - set PYTHON_EXECUTABLE=%PROVIDED_PYTHON% - goto run +if "%1" EQU "--python" ( + echo "The --python is now DEPRECATED. Use the PYTHON_EXECUTABLE environment + echo "variable instead. Run configure --help for details." + exit /b 0 ) +@rem ################################ +@rem # find a proper Python to run +@rem # Use environment variables or a file if available. +@rem # Otherwise the latest Python by default. +if not defined PYTHON_EXECUTABLE ( + @rem # check for a file named PYTHON_EXECUTABLE + if exist ""%CFG_ROOT_DIR%\PYTHON_EXECUTABLE"" ( + set /p PYTHON_EXECUTABLE=<""%CFG_ROOT_DIR%\PYTHON_EXECUTABLE"" + ) else ( + set "PYTHON_EXECUTABLE=py" + ) +) -@rem otherwise we search for a suitable Python interpreter -:find_python -@rem First check the existence of the "py" launcher (available in Python 3) -@rem if we have it, check if we have a py -3 installed with the good version or a py 2.7 -@rem if not, check if we have an old py 2.7 -@rem exist if all fails +:create_virtualenv +@rem # create a virtualenv for Python +@rem # Note: we do not use the bundled Python 3 "venv" because its behavior and +@rem # presence is not consistent across Linux distro and sometimes pip is not +@rem # included either by default. The virtualenv.pyz app cures all these issues. -where py >nul 2>nul -if %ERRORLEVEL% == 0 ( - @rem we have a py launcher, check for the availability of our required Python 3 version - py -3.6 --version >nul 2>nul - if %ERRORLEVEL% == 0 ( - set PYTHON_EXECUTABLE=py -3.6 - ) else ( - @rem we have no required python 3, let's try python 2: - py -2 --version >nul 2>nul - if %ERRORLEVEL% == 0 ( - set PYTHON_EXECUTABLE=py -2 - ) else ( - @rem we have py and no python 3 and 2, exit - echo * Unable to find an installation of Python. - exit /b 1 - ) +if not exist ""%CFG_BIN_DIR%\python.exe"" ( + if not exist "%CFG_BIN_DIR%" ( + mkdir %CFG_BIN_DIR% ) -) else ( - @rem we have no py launcher, check for a default Python 2 installation - if not exist ""%DEFAULT_PYTHON2%"" ( - echo * Unable to find an installation of Python. - exit /b 1 + + if exist ""%CFG_ROOT_DIR%\etc\thirdparty\virtualenv.pyz"" ( + %PYTHON_EXECUTABLE% "%CFG_ROOT_DIR%\etc\thirdparty\virtualenv.pyz" ^ + --wheel embed --pip embed --setuptools embed ^ + --seeder pip ^ + --never-download ^ + --no-periodic-update ^ + --no-vcs-ignore ^ + %CFG_QUIET% ^ + %CFG_ROOT_DIR%\%VIRTUALENV_DIR% ) else ( - set PYTHON_EXECUTABLE=%DEFAULT_PYTHON2% + if not exist ""%CFG_ROOT_DIR%\%VIRTUALENV_DIR%\virtualenv.pyz"" ( + curl -o "%CFG_ROOT_DIR%\%VIRTUALENV_DIR%\virtualenv.pyz" %VIRTUALENV_PYZ_URL% + + if %ERRORLEVEL% neq 0 ( + exit /b %ERRORLEVEL% + ) + ) + %PYTHON_EXECUTABLE% "%CFG_ROOT_DIR%\%VIRTUALENV_DIR%\virtualenv.pyz" ^ + --wheel embed --pip embed --setuptools embed ^ + --seeder pip ^ + --never-download ^ + --no-periodic-update ^ + --no-vcs-ignore ^ + %CFG_QUIET% ^ + %CFG_ROOT_DIR%\%VIRTUALENV_DIR% ) ) +if %ERRORLEVEL% neq 0 ( + exit /b %ERRORLEVEL% +) + -:run -@rem without this things may not always work on Windows 10, but this makes things slower -set PYTHONDONTWRITEBYTECODE=1 +:install_packages +@rem # install requirements in virtualenv +@rem # note: --no-build-isolation means that pip/wheel/setuptools will not +@rem # be reinstalled a second time and reused from the virtualenv and this +@rem # speeds up the installation. +@rem # We always have the PEP517 build dependencies installed already. -call mkdir "%CFG_ROOT_DIR%tmp" -call curl -o "%CFG_ROOT_DIR%tmp\virtualenv.pyz" https://bootstrap.pypa.io/virtualenv.pyz -call %PYTHON_EXECUTABLE% "%CFG_ROOT_DIR%tmp\virtualenv.pyz" --wheel embed --pip embed --setuptools embed --seeder pip "%CFG_ROOT_DIR%tmp" -call "%CFG_ROOT_DIR%tmp\Scripts\activate" -call "%CFG_ROOT_DIR%tmp\Scripts\pip" install -e .[testing] +%CFG_BIN_DIR%\pip install ^ + --upgrade ^ + --no-build-isolation ^ + %CFG_QUIET% ^ + %PIP_EXTRA_ARGS% ^ + %CFG_REQUIREMENTS% -@rem Return a proper return code on failure if %ERRORLEVEL% neq 0 ( exit /b %ERRORLEVEL% ) -endlocal -goto activate +exit /b 0 + + +@rem ################################ :cli_help -echo A configuration script for Windows -echo usage: configure [options] [path/to/config/directory] -echo. -echo The options and arguments are: -echo --clean : this is exclusive of anything else and cleans the environment -echo from built and installed files -echo. -echo --python path/to/python.exe : this is set to the path of an alternative -echo Python executable to use. If path/to/python.exe is set to "path", -echo then the executable will be the python.exe available in the PATH. -echo. - - -:activate -@rem Activate the virtualenv -if exist "%CFG_ROOT_DIR%tmp\Scripts\activate" ( - "%CFG_ROOT_DIR%tmp\Scripts\activate" + echo An initial configuration script + echo " usage: configure [options]" + echo " " + echo The default is to configure for regular use. Use --dev for development. + echo " " + echo The options are: + echo " --clean: clean built and installed files and exit." + echo " --dev: configure the environment for development." + echo " --help: display this help message and exit." + echo " " + echo By default, the python interpreter version found in the path is used. + echo Alternatively, the PYTHON_EXECUTABLE environment variable can be set to + echo configure another Python executable interpreter to use. If this is not + echo set, a file named PYTHON_EXECUTABLE containing a single line with the + echo path of the Python executable to use will be checked last. + exit /b 0 + + +:clean +@rem # Remove cleanable file and directories and files from the root dir. +echo "* Cleaning ..." +for %%F in (%CLEANABLE%) do ( + rmdir /s /q "%CFG_ROOT_DIR%\%%F" >nul 2>&1 + del /f /q "%CFG_ROOT_DIR%\%%F" >nul 2>&1 ) +exit /b 0 diff --git a/etc/ci/azure-posix.yml b/etc/ci/azure-posix.yml index 752ae2e..0921d9b 100644 --- a/etc/ci/azure-posix.yml +++ b/etc/ci/azure-posix.yml @@ -19,6 +19,7 @@ jobs: python_version: ${{ pyver }} test_suite_label: ${{ tsuite.key }} test_suite: ${{ tsuite.value }} + steps: - checkout: self fetchDepth: 10 @@ -29,7 +30,11 @@ jobs: architecture: '${{ parameters.python_architecture }}' displayName: 'Install Python $(python_version)' - - script: ./configure + - script: | + python3 --version + python$(python_version) --version + echo "python$(python_version)" > PYTHON_EXECUTABLE + ./configure --dev displayName: 'Run Configure' - script: $(test_suite) diff --git a/etc/ci/azure-win.yml b/etc/ci/azure-win.yml index afe1686..03d8927 100644 --- a/etc/ci/azure-win.yml +++ b/etc/ci/azure-win.yml @@ -29,7 +29,10 @@ jobs: architecture: '${{ parameters.python_architecture }}' displayName: 'Install Python $(python_version)' - - script: configure --python path + - script: | + python --version + echo | set /p=python> PYTHON_EXECUTABLE + configure --dev displayName: 'Run Configure' - script: $(test_suite) From aa04429ae6e5d05ef8ee2a0fbad9872014463a25 Mon Sep 17 00:00:00 2001 From: Philippe Ombredanne Date: Tue, 11 May 2021 11:17:09 +0200 Subject: [PATCH 10/35] Add notes on customization Signed-off-by: Philippe Ombredanne --- README.rst | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/README.rst b/README.rst index a0e682f..a291173 100644 --- a/README.rst +++ b/README.rst @@ -32,3 +32,12 @@ Update an existing project git merge skeleton/main --allow-unrelated-histories This is also the workflow to use when updating the skeleton files in any given repository. + + +Customizing +----------- + +You typically want to perform these customizations: + +- remove or update the src/README.rst and tests/README.rst files +- check the configure and configure.bat defaults From 56ada8fffacac14140bf016fd3f6bee4f4615fcc Mon Sep 17 00:00:00 2001 From: Philippe Ombredanne Date: Tue, 11 May 2021 11:19:12 +0200 Subject: [PATCH 11/35] Adopt new configure --dev convention Signed-off-by: Philippe Ombredanne --- .travis.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.travis.yml b/.travis.yml index 7a342df..1b52eb2 100644 --- a/.travis.yml +++ b/.travis.yml @@ -15,7 +15,7 @@ python: - "3.8" # Scripts to run at install stage -install: ./configure +install: ./configure --dev # Scripts to run at script stage script: tmp/bin/pytest From 0dbcdc9f6c929b3d030910a69e5566c149e15d7a Mon Sep 17 00:00:00 2001 From: Philippe Ombredanne Date: Tue, 11 May 2021 11:21:48 +0200 Subject: [PATCH 12/35] Clarify CHANGELOG to be Rst Signed-off-by: Philippe Ombredanne --- CHANGELOG.rst | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/CHANGELOG.rst b/CHANGELOG.rst index 5f8bc8d..fc2b6e3 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -1,5 +1,8 @@ -Release notes -------------- -### Version 0.0.0 +Changelog +========= + + +v0.0.0 +------ *xxxx-xx-xx* -- Initial release. From d21aef35a61675289bbebf963030b539c10a7b28 Mon Sep 17 00:00:00 2001 From: Philippe Ombredanne Date: Tue, 11 May 2021 11:22:22 +0200 Subject: [PATCH 13/35] Add skeleton release notes to README.rst This was they do not end up in the template CHANGELOG.rst Signed-off-by: Philippe Ombredanne --- README.rst | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/README.rst b/README.rst index a291173..b84a049 100644 --- a/README.rst +++ b/README.rst @@ -41,3 +41,10 @@ You typically want to perform these customizations: - remove or update the src/README.rst and tests/README.rst files - check the configure and configure.bat defaults + + +Release Notes +------------- + +- 2021-05-11: adopt new configure scripts from ScanCode TK that allows correct + configuration of which Python version is used. From ae78d4377ea7df14527b7c2e743f38596086e23c Mon Sep 17 00:00:00 2001 From: Philippe Ombredanne Date: Wed, 12 May 2021 09:30:49 +0200 Subject: [PATCH 14/35] Implement new envt. variables approach We now load native libraries and executables from: 1. an envt. variable path 2. OR a locatin provider plugin 3. OR the PATH or we fail Signed-off-by: Philippe Ombredanne --- src/typecode/magic2.py | 81 +++++++++++++++++++++++++++++++++--------- 1 file changed, 65 insertions(+), 16 deletions(-) diff --git a/src/typecode/magic2.py b/src/typecode/magic2.py index d23aef7..3011782 100644 --- a/src/typecode/magic2.py +++ b/src/typecode/magic2.py @@ -45,9 +45,7 @@ import os from commoncode import command -from plugincode.location_provider import get_location - -from os import fsencode +from commoncode.system import on_windows TRACE = False @@ -74,24 +72,75 @@ DETECT_ENC = MAGIC_NONE | MAGIC_MIME | MAGIC_MIME_ENCODING # keys for plugin-provided locations -TYPECODE_LIBMAGIC_LIBDIR = 'typecode.libmagic.libdir' TYPECODE_LIBMAGIC_DLL = 'typecode.libmagic.dll' -TYPECODE_LIBMAGIC_DATABASE = 'typecode.libmagic.db' +TYPECODE_LIBMAGIC_DB = 'typecode.libmagic.db' + +TYPECODE_LIBMAGIC_PATH_ENVVAR = 'TYPECODE_LIBMAGIC_PATH' +TYPECODE_LIBMAGIC_DB_PATH_ENVVAR = 'TYPECODE_LIBMAGIC_DB_PATH' def load_lib(): """ - Return the loaded libmagic shared library object from plugin-provided path. + Return the libmagic shared library object loaded from either: + - an environment variable ``TYPECODE_LIBMAGIC_PATH`` + - a plugin-provided path, + - the system PATH. + Raise an Exception if no libmagic can be found. """ - dll = get_location(TYPECODE_LIBMAGIC_DLL) - libdir = get_location(TYPECODE_LIBMAGIC_LIBDIR) - if not (dll and libdir) or not os.path.isfile(dll) or not os.path.isdir(libdir): + from plugincode.location_provider import get_location + + # try the environment first + dll_loc = os.environ.get(TYPECODE_LIBMAGIC_PATH_ENVVAR) + + # try the PATH + if not dll_loc: + dll = 'libmagic.dll' if on_windows else 'libmagic.so' + dll_loc = command.find_in_path(dll) + + if not dll_loc or not os.path.isfile(dll_loc): + raise Exception( + 'CRITICAL: libmagic DLL and its magic database are not installed. ' + 'Unable to continue: you need to install a valid typecode-libmagic ' + 'plugin with a valid and proper libmagic and magic DB available. ' + f'OR set the {TYPECODE_LIBMAGIC_PATH_ENVVAR} environment variable.' + ) + return command.load_shared_library(dll_loc) + + +def get_magicdb_location(_cache=[]): + """ + Return the location of the magicdb loaded from either: + - an environment variable ``TYPECODE_LIBMAGIC_DB_PATH``, + - a plugin-provided path, + - the system PATH. + Raise an Exception if no magicdb command can be found. + """ + if _cache: + return _cache[0] + + from plugincode.location_provider import get_location + + # try the environment first + magicdb_loc = os.environ.get(TYPECODE_LIBMAGIC_DB_PATH_ENVVAR) + + # try a plugin-provided path second + if not magicdb_loc: + magicdb_loc = get_location(TYPECODE_LIBMAGIC_DB) + + # try the PATH + if not magicdb_loc: + db = 'magic.mgc' + magicdb_loc = command.find_in_path(db) + + if not magicdb_loc or not os.path.isfile(magicdb_loc): raise Exception( - 'CRITICAL: libmagic DLL and is magic database are not installed. ' + 'CRITICAL: Libmagic magic database is not installed. ' 'Unable to continue: you need to install a valid typecode-libmagic ' - 'plugin with a valid and proper libmagic and magic DB available.' + 'plugin with a valid magic database available. ' + 'OR set the TYPECODE_LIBMAGIC_DB_PATH environment variable.' ) - return command.load_shared_library(dll, libdir) + _cache.append(magicdb_loc) + return magicdb_loc if TRACE: @@ -164,11 +213,11 @@ def __init__(self, flags, magic_db_location=None): self.flags = flags self.cookie = _magic_open(self.flags) if not magic_db_location: - magic_db_location = get_location(TYPECODE_LIBMAGIC_DATABASE) + magic_db_location = get_magicdb_location() - # Note: this location must always be bytes on Python2 and 3, all OSes + # Note: this location must always be FS-encoded bytes on all OSes if isinstance(magic_db_location, str): - magic_db_location = fsencode(magic_db_location) + magic_db_location = os.fsencode(magic_db_location) _magic_load(self.cookie, magic_db_location) @@ -190,7 +239,7 @@ def get(self, location): # location string may therefore be mangled and the file not accessible # anymore by libmagic in some cases. try: - uloc = fsencode(location) + uloc = os.fsencode(location) return _magic_file(self.cookie, uloc) except: # if all fails, read the start of the file instead From d77ff06e266c1eac1183d443e21a548e4a0363db Mon Sep 17 00:00:00 2001 From: Philippe Ombredanne Date: Sat, 29 May 2021 18:48:49 +0200 Subject: [PATCH 15/35] Require latest commoncode Signed-off-by: Philippe Ombredanne --- setup.cfg | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.cfg b/setup.cfg index 5fc51f2..a267a33 100644 --- a/setup.cfg +++ b/setup.cfg @@ -36,7 +36,7 @@ zip_safe = false install_requires = attrs >= 18.1, !=20.1.0 binaryornot - commoncode >= 21.1.21 + commoncode >= 21.5.25 pdfminer.six plugincode From 7e9287ac649ffd71e0eb78f49e92056c7a38be25 Mon Sep 17 00:00:00 2001 From: Philippe Ombredanne Date: Sat, 29 May 2021 18:49:55 +0200 Subject: [PATCH 16/35] Update CHANGELOG and README Signed-off-by: Philippe Ombredanne --- CHANGELOG.rst | 9 +++++++++ README.rst | 27 ++++++++++++++++++++++++++- 2 files changed, 35 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.rst b/CHANGELOG.rst index 000d521..c390ec4 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -5,6 +5,15 @@ vNext ----- +Version 21.5.29 +--------------- + +- Update vendored pygments to 2.7.4 +- Update commoncode to latest version +- Use new libmagic configuration based on a plugin, and environment variable + or the system path. + + Version 21.2.24 --------------- diff --git a/README.rst b/README.rst index ff9dfbb..740c71c 100644 --- a/README.rst +++ b/README.rst @@ -22,10 +22,35 @@ system, use the `minimal` option:: pip install typecode +In this case, you will need to provide a working libmagic and its database +available in one of these ways: + +- **a typecode-libmagic plugin**: See the standard ones at + https://github.com/nexB/scancode-plugins/tree/main/builtins + These can either bundle a libmagic library and its magic database or expose a + system-installed libmagic. + They do so by providing a plugin entry point as a ``scancode_location_provider`` + for ``typecode_libmagic`` which points to a callable that must return a mapping + with these two keys: + - 'typecode.libmagic.dll': the absolute path to a libmagic DLL + - 'typecode.libmagic.db': the absolute path to a libmagic 'magic.mgc' database + See for example: + - https://github.com/nexB/scancode-plugins/blob/4da5fe8a5ab1c87b9b4af9e54d7ad60e289747f5/builtins/typecode_libmagic-linux/setup.py#L42 + - https://github.com/nexB/scancode-plugins/blob/4da5fe8a5ab1c87b9b4af9e54d7ad60e289747f5/builtins/typecode_libmagic-linux/src/typecode_libmagic/__init__.py#L32 + +- **environment variables**: + - TYPECODE_LIBMAGIC_PATH: the absolute path to a libmagic DLL + - TYPECODE_LIBMAGIC_DB_PATH: the absolute path to a libmagic 'magic.mgc' database + +- **a system-installed libmagic and its database availale in the system PATH**: + + +The supported libmagic version is 5.39. + To set up the development environment:: - source configure + source configure --dev To run unit tests:: From a2f1535c5559f9bf3e95293f840b4cf50cfdb11b Mon Sep 17 00:00:00 2001 From: Philippe Ombredanne Date: Sat, 29 May 2021 19:01:46 +0200 Subject: [PATCH 17/35] Add note to vendor Pygments Signed-off-by: Philippe Ombredanne --- README.rst | 2 ++ 1 file changed, 2 insertions(+) diff --git a/README.rst b/README.rst index 740c71c..8a30dcb 100644 --- a/README.rst +++ b/README.rst @@ -60,3 +60,5 @@ To clean up development environment:: ./configure --clean + +To update Pygment to a newer vendored version use vendy. From 9dfe6d4813153568ad373289c7afd6d39c310f6b Mon Sep 17 00:00:00 2001 From: Philippe Ombredanne Date: Sat, 29 May 2021 19:10:44 +0200 Subject: [PATCH 18/35] Add vendy to testing/dev extras Signed-off-by: Philippe Ombredanne --- setup.cfg | 1 + 1 file changed, 1 insertion(+) diff --git a/setup.cfg b/setup.cfg index a267a33..b9a9af2 100644 --- a/setup.cfg +++ b/setup.cfg @@ -54,6 +54,7 @@ testing = # upstream pytest >= 6 pytest-xdist >= 2 + vendy docs= Sphinx>=3.3.1 sphinx-rtd-theme>=0.5.0 From f4d096043085d2dca1ed0785cf31f6cc081f4dfc Mon Sep 17 00:00:00 2001 From: Philippe Ombredanne Date: Sat, 29 May 2021 22:24:59 +0200 Subject: [PATCH 19/35] Bump vendored pygments to 2.9.0 Signed-off-by: Philippe Ombredanne --- README.rst | 11 +- pyproject.toml | 2 +- src/typecode/_vendor/apache-2.0.LICENSE | 201 ++ src/typecode/_vendor/bin/pygmentize | 8 - src/typecode/_vendor/bsd-new.LICENSE | 9 + src/typecode/_vendor/pygments.ABOUT | 22 + src/typecode/_vendor/pygments.NOTICE | 40 + src/typecode/_vendor/pygments/LICENSE | 2 +- src/typecode/_vendor/pygments/__init__.py | 17 +- src/typecode/_vendor/pygments/__main__.py | 18 + src/typecode/_vendor/pygments/cmdline.py | 67 +- src/typecode/_vendor/pygments/console.py | 2 +- src/typecode/_vendor/pygments/filter.py | 10 +- .../_vendor/pygments/filters/__init__.py | 630 ++++++- src/typecode/_vendor/pygments/formatter.py | 8 +- .../_vendor/pygments/formatters/__init__.py | 14 +- .../_vendor/pygments/formatters/_mapping.py | 4 +- .../_vendor/pygments/formatters/bbcode.py | 2 +- .../_vendor/pygments/formatters/html.py | 324 ++-- .../_vendor/pygments/formatters/img.py | 109 +- .../_vendor/pygments/formatters/irc.py | 2 +- .../_vendor/pygments/formatters/latex.py | 82 +- .../_vendor/pygments/formatters/other.py | 20 +- .../_vendor/pygments/formatters/rtf.py | 58 +- .../_vendor/pygments/formatters/svg.py | 42 +- .../_vendor/pygments/formatters/terminal.py | 8 +- .../pygments/formatters/terminal256.py | 22 +- src/typecode/_vendor/pygments/lexer.py | 68 +- .../_vendor/pygments/lexers/__init__.py | 37 +- .../_vendor/pygments/lexers/_asy_builtins.py | 10 +- .../_vendor/pygments/lexers/_cl_builtins.py | 30 +- .../pygments/lexers/_cocoa_builtins.py | 10 +- .../pygments/lexers/_csound_builtins.py | 169 +- .../pygments/lexers/_lasso_builtins.py | 2 +- .../_vendor/pygments/lexers/_lua_builtins.py | 6 +- .../_vendor/pygments/lexers/_mapping.py | 947 +++++----- .../_vendor/pygments/lexers/_mql_builtins.py | 4 +- .../pygments/lexers/_mysql_builtins.py | 1282 +++++++++++++ .../pygments/lexers/_openedge_builtins.py | 2 +- .../_vendor/pygments/lexers/_php_builtins.py | 7 +- .../pygments/lexers/_postgres_builtins.py | 89 +- .../pygments/lexers/_scilab_builtins.py | 4 +- .../pygments/lexers/_sourcemod_builtins.py | 4 +- .../_vendor/pygments/lexers/_stan_builtins.py | 2 +- .../pygments/lexers/_stata_builtins.py | 2 +- .../_vendor/pygments/lexers/_tsql_builtins.py | 2 +- .../_vendor/pygments/lexers/_usd_builtins.py | 113 ++ .../pygments/lexers/_vbscript_builtins.py | 2 +- .../_vendor/pygments/lexers/_vim_builtins.py | 2 +- .../_vendor/pygments/lexers/actionscript.py | 19 +- src/typecode/_vendor/pygments/lexers/agile.py | 2 +- .../_vendor/pygments/lexers/algebra.py | 21 +- .../_vendor/pygments/lexers/ambient.py | 4 +- src/typecode/_vendor/pygments/lexers/ampl.py | 2 +- src/typecode/_vendor/pygments/lexers/apl.py | 30 +- .../_vendor/pygments/lexers/archetype.py | 8 +- src/typecode/_vendor/pygments/lexers/arrow.py | 117 ++ src/typecode/_vendor/pygments/lexers/asm.py | 317 +++- .../_vendor/pygments/lexers/automation.py | 2 +- src/typecode/_vendor/pygments/lexers/bare.py | 104 ++ src/typecode/_vendor/pygments/lexers/basic.py | 19 +- .../_vendor/pygments/lexers/bibtex.py | 4 +- src/typecode/_vendor/pygments/lexers/boa.py | 8 +- .../_vendor/pygments/lexers/business.py | 23 +- src/typecode/_vendor/pygments/lexers/c_cpp.py | 176 +- .../_vendor/pygments/lexers/c_like.py | 63 +- .../_vendor/pygments/lexers/capnproto.py | 2 +- .../_vendor/pygments/lexers/chapel.py | 22 +- src/typecode/_vendor/pygments/lexers/clean.py | 15 +- .../_vendor/pygments/lexers/compiled.py | 2 +- .../_vendor/pygments/lexers/configs.py | 118 +- .../_vendor/pygments/lexers/console.py | 2 +- .../_vendor/pygments/lexers/crystal.py | 134 +- .../_vendor/pygments/lexers/csound.py | 93 +- src/typecode/_vendor/pygments/lexers/css.py | 26 +- src/typecode/_vendor/pygments/lexers/d.py | 19 +- .../_vendor/pygments/lexers/dalvik.py | 2 +- src/typecode/_vendor/pygments/lexers/data.py | 327 +++- .../_vendor/pygments/lexers/devicetree.py | 109 ++ src/typecode/_vendor/pygments/lexers/diff.py | 8 +- .../_vendor/pygments/lexers/dotnet.py | 47 +- src/typecode/_vendor/pygments/lexers/dsls.py | 204 ++- src/typecode/_vendor/pygments/lexers/dylan.py | 28 +- src/typecode/_vendor/pygments/lexers/ecl.py | 18 +- .../_vendor/pygments/lexers/eiffel.py | 2 +- src/typecode/_vendor/pygments/lexers/elm.py | 8 +- src/typecode/_vendor/pygments/lexers/email.py | 151 ++ .../_vendor/pygments/lexers/erlang.py | 59 +- .../_vendor/pygments/lexers/esoteric.py | 63 +- src/typecode/_vendor/pygments/lexers/ezhil.py | 34 +- .../_vendor/pygments/lexers/factor.py | 6 +- .../_vendor/pygments/lexers/fantom.py | 2 +- src/typecode/_vendor/pygments/lexers/felix.py | 2 +- .../_vendor/pygments/lexers/floscript.py | 2 +- src/typecode/_vendor/pygments/lexers/forth.py | 79 +- .../_vendor/pygments/lexers/fortran.py | 20 +- .../_vendor/pygments/lexers/foxpro.py | 4 +- .../_vendor/pygments/lexers/freefem.py | 44 +- .../_vendor/pygments/lexers/functional.py | 2 +- .../_vendor/pygments/lexers/gdscript.py | 346 ++++ src/typecode/_vendor/pygments/lexers/go.py | 4 +- .../pygments/lexers/grammar_notation.py | 65 +- src/typecode/_vendor/pygments/lexers/graph.py | 2 +- .../_vendor/pygments/lexers/graphics.py | 25 +- .../_vendor/pygments/lexers/haskell.py | 21 +- src/typecode/_vendor/pygments/lexers/haxe.py | 6 +- src/typecode/_vendor/pygments/lexers/hdl.py | 262 ++- .../_vendor/pygments/lexers/hexdump.py | 2 +- src/typecode/_vendor/pygments/lexers/html.py | 30 +- src/typecode/_vendor/pygments/lexers/idl.py | 15 +- src/typecode/_vendor/pygments/lexers/igor.py | 15 +- .../_vendor/pygments/lexers/inferno.py | 2 +- .../_vendor/pygments/lexers/installers.py | 2 +- .../_vendor/pygments/lexers/int_fiction.py | 53 +- .../_vendor/pygments/lexers/iolang.py | 4 +- src/typecode/_vendor/pygments/lexers/j.py | 2 +- .../_vendor/pygments/lexers/javascript.py | 190 +- src/typecode/_vendor/pygments/lexers/julia.py | 86 +- src/typecode/_vendor/pygments/lexers/jvm.py | 473 ++--- src/typecode/_vendor/pygments/lexers/lisp.py | 1632 +++++++++-------- src/typecode/_vendor/pygments/lexers/make.py | 16 +- .../_vendor/pygments/lexers/markup.py | 254 ++- src/typecode/_vendor/pygments/lexers/math.py | 2 +- .../_vendor/pygments/lexers/matlab.py | 145 +- src/typecode/_vendor/pygments/lexers/mime.py | 226 +++ src/typecode/_vendor/pygments/lexers/ml.py | 215 ++- .../_vendor/pygments/lexers/modeling.py | 8 +- .../_vendor/pygments/lexers/modula2.py | 35 +- src/typecode/_vendor/pygments/lexers/monte.py | 2 +- src/typecode/_vendor/pygments/lexers/mosel.py | 448 +++++ src/typecode/_vendor/pygments/lexers/ncl.py | 2 +- .../_vendor/pygments/lexers/nimrod.py | 8 +- src/typecode/_vendor/pygments/lexers/nit.py | 2 +- src/typecode/_vendor/pygments/lexers/nix.py | 2 +- .../_vendor/pygments/lexers/oberon.py | 18 +- .../_vendor/pygments/lexers/objective.py | 6 +- src/typecode/_vendor/pygments/lexers/ooc.py | 2 +- src/typecode/_vendor/pygments/lexers/other.py | 2 +- .../_vendor/pygments/lexers/parasail.py | 2 +- .../_vendor/pygments/lexers/parsers.py | 117 +- .../_vendor/pygments/lexers/pascal.py | 34 +- src/typecode/_vendor/pygments/lexers/pawn.py | 46 +- src/typecode/_vendor/pygments/lexers/perl.py | 392 ++-- src/typecode/_vendor/pygments/lexers/php.py | 75 +- .../_vendor/pygments/lexers/pointless.py | 71 + src/typecode/_vendor/pygments/lexers/pony.py | 4 +- src/typecode/_vendor/pygments/lexers/praat.py | 68 +- .../_vendor/pygments/lexers/prolog.py | 73 +- .../_vendor/pygments/lexers/promql.py | 183 ++ .../_vendor/pygments/lexers/python.py | 656 ++++--- src/typecode/_vendor/pygments/lexers/qvt.py | 4 +- src/typecode/_vendor/pygments/lexers/r.py | 14 +- src/typecode/_vendor/pygments/lexers/rdf.py | 256 ++- src/typecode/_vendor/pygments/lexers/rebol.py | 2 +- .../_vendor/pygments/lexers/resource.py | 4 +- src/typecode/_vendor/pygments/lexers/ride.py | 139 ++ src/typecode/_vendor/pygments/lexers/rnc.py | 2 +- .../_vendor/pygments/lexers/roboconf.py | 2 +- .../_vendor/pygments/lexers/robotframework.py | 52 +- src/typecode/_vendor/pygments/lexers/ruby.py | 73 +- src/typecode/_vendor/pygments/lexers/rust.py | 146 +- src/typecode/_vendor/pygments/lexers/sas.py | 2 +- src/typecode/_vendor/pygments/lexers/scdoc.py | 83 + .../_vendor/pygments/lexers/scripting.py | 92 +- src/typecode/_vendor/pygments/lexers/sgf.py | 2 +- src/typecode/_vendor/pygments/lexers/shell.py | 147 +- src/typecode/_vendor/pygments/lexers/sieve.py | 69 + src/typecode/_vendor/pygments/lexers/slash.py | 8 +- .../_vendor/pygments/lexers/smalltalk.py | 4 +- src/typecode/_vendor/pygments/lexers/smv.py | 6 +- .../_vendor/pygments/lexers/snobol.py | 2 +- .../_vendor/pygments/lexers/solidity.py | 92 + .../_vendor/pygments/lexers/special.py | 42 +- src/typecode/_vendor/pygments/lexers/sql.py | 444 +++-- src/typecode/_vendor/pygments/lexers/stata.py | 18 +- .../_vendor/pygments/lexers/supercollider.py | 11 +- src/typecode/_vendor/pygments/lexers/tcl.py | 2 +- .../_vendor/pygments/lexers/templates.py | 207 +-- .../_vendor/pygments/lexers/teraterm.py | 289 ++- .../_vendor/pygments/lexers/testing.py | 12 +- src/typecode/_vendor/pygments/lexers/text.py | 2 +- .../_vendor/pygments/lexers/textedit.py | 12 +- .../_vendor/pygments/lexers/textfmts.py | 149 +- .../_vendor/pygments/lexers/theorem.py | 127 +- src/typecode/_vendor/pygments/lexers/tnt.py | 263 +++ .../_vendor/pygments/lexers/trafficscript.py | 2 +- .../_vendor/pygments/lexers/typoscript.py | 5 +- .../_vendor/pygments/lexers/unicon.py | 46 +- src/typecode/_vendor/pygments/lexers/urbi.py | 19 +- src/typecode/_vendor/pygments/lexers/usd.py | 90 + .../_vendor/pygments/lexers/varnish.py | 4 +- .../_vendor/pygments/lexers/verification.py | 23 +- src/typecode/_vendor/pygments/lexers/web.py | 2 +- .../_vendor/pygments/lexers/webidl.py | 299 +++ .../_vendor/pygments/lexers/webmisc.py | 53 +- .../_vendor/pygments/lexers/whiley.py | 10 +- src/typecode/_vendor/pygments/lexers/x10.py | 4 +- src/typecode/_vendor/pygments/lexers/xorg.py | 4 +- src/typecode/_vendor/pygments/lexers/yang.py | 104 ++ src/typecode/_vendor/pygments/lexers/zig.py | 124 ++ src/typecode/_vendor/pygments/modeline.py | 2 +- src/typecode/_vendor/pygments/plugin.py | 2 +- src/typecode/_vendor/pygments/regexopt.py | 2 +- src/typecode/_vendor/pygments/scanner.py | 4 +- src/typecode/_vendor/pygments/sphinxext.py | 4 +- src/typecode/_vendor/pygments/style.py | 18 +- .../_vendor/pygments/styles/__init__.py | 8 +- src/typecode/_vendor/pygments/styles/abap.py | 2 +- src/typecode/_vendor/pygments/styles/algol.py | 2 +- .../_vendor/pygments/styles/algol_nu.py | 2 +- .../_vendor/pygments/styles/arduino.py | 4 +- .../_vendor/pygments/styles/autumn.py | 2 +- .../_vendor/pygments/styles/borland.py | 2 +- src/typecode/_vendor/pygments/styles/bw.py | 2 +- .../_vendor/pygments/styles/colorful.py | 2 +- .../_vendor/pygments/styles/default.py | 2 +- src/typecode/_vendor/pygments/styles/emacs.py | 2 +- .../_vendor/pygments/styles/friendly.py | 2 +- .../_vendor/pygments/styles/fruity.py | 2 +- src/typecode/_vendor/pygments/styles/igor.py | 2 +- .../_vendor/pygments/styles/inkpot.py | 67 + .../_vendor/pygments/styles/lovelace.py | 2 +- src/typecode/_vendor/pygments/styles/manni.py | 2 +- .../_vendor/pygments/styles/monokai.py | 7 +- .../_vendor/pygments/styles/murphy.py | 2 +- .../_vendor/pygments/styles/native.py | 2 +- .../_vendor/pygments/styles/paraiso_dark.py | 2 +- .../_vendor/pygments/styles/paraiso_light.py | 2 +- .../_vendor/pygments/styles/pastie.py | 2 +- .../_vendor/pygments/styles/perldoc.py | 2 +- .../_vendor/pygments/styles/rainbow_dash.py | 2 +- src/typecode/_vendor/pygments/styles/rrt.py | 2 +- src/typecode/_vendor/pygments/styles/sas.py | 2 +- .../_vendor/pygments/styles/solarized.py | 6 +- .../_vendor/pygments/styles/stata_dark.py | 2 +- .../_vendor/pygments/styles/stata_light.py | 2 +- src/typecode/_vendor/pygments/styles/tango.py | 2 +- src/typecode/_vendor/pygments/styles/trac.py | 2 +- src/typecode/_vendor/pygments/styles/vim.py | 2 +- src/typecode/_vendor/pygments/styles/vs.py | 2 +- src/typecode/_vendor/pygments/styles/xcode.py | 2 +- src/typecode/_vendor/pygments/token.py | 2 +- src/typecode/_vendor/pygments/unistring.py | 157 +- src/typecode/_vendor/pygments/util.py | 113 +- src/typecode/pygments_lexers.py | 37 +- src/typecode/pygments_lexers.py.ABOUT | 24 +- src/typecode/pygments_lexers_mapping.py | 96 +- src/typecode/pygments_lexers_mapping.py.ABOUT | 24 +- 248 files changed, 12877 insertions(+), 4780 deletions(-) create mode 100644 src/typecode/_vendor/apache-2.0.LICENSE delete mode 100755 src/typecode/_vendor/bin/pygmentize create mode 100644 src/typecode/_vendor/bsd-new.LICENSE create mode 100644 src/typecode/_vendor/pygments.ABOUT create mode 100644 src/typecode/_vendor/pygments.NOTICE create mode 100644 src/typecode/_vendor/pygments/__main__.py create mode 100644 src/typecode/_vendor/pygments/lexers/_mysql_builtins.py create mode 100644 src/typecode/_vendor/pygments/lexers/_usd_builtins.py create mode 100644 src/typecode/_vendor/pygments/lexers/arrow.py create mode 100644 src/typecode/_vendor/pygments/lexers/bare.py create mode 100644 src/typecode/_vendor/pygments/lexers/devicetree.py create mode 100644 src/typecode/_vendor/pygments/lexers/email.py create mode 100644 src/typecode/_vendor/pygments/lexers/gdscript.py create mode 100644 src/typecode/_vendor/pygments/lexers/mime.py create mode 100644 src/typecode/_vendor/pygments/lexers/mosel.py create mode 100644 src/typecode/_vendor/pygments/lexers/pointless.py create mode 100644 src/typecode/_vendor/pygments/lexers/promql.py create mode 100644 src/typecode/_vendor/pygments/lexers/ride.py create mode 100644 src/typecode/_vendor/pygments/lexers/scdoc.py create mode 100644 src/typecode/_vendor/pygments/lexers/sieve.py create mode 100644 src/typecode/_vendor/pygments/lexers/solidity.py create mode 100644 src/typecode/_vendor/pygments/lexers/tnt.py create mode 100644 src/typecode/_vendor/pygments/lexers/usd.py create mode 100644 src/typecode/_vendor/pygments/lexers/webidl.py create mode 100644 src/typecode/_vendor/pygments/lexers/yang.py create mode 100644 src/typecode/_vendor/pygments/lexers/zig.py create mode 100644 src/typecode/_vendor/pygments/styles/inkpot.py diff --git a/README.rst b/README.rst index a41ffbd..ee3cc9a 100644 --- a/README.rst +++ b/README.rst @@ -60,4 +60,13 @@ To clean up development environment:: ./configure --clean -To update Pygment to a newer vendored version use vendy:: + +To update Pygment to a newer vendored version use vendy: + + - Update the version of pygments in ``pyproject.toml`` + - Run ``vendy`` + - Update the src/typecpde/pygments_lexers_mapping.py + and src/typecode/pygments_lexers.py scripts accordingly, including their + ABOUT files + + \ No newline at end of file diff --git a/pyproject.toml b/pyproject.toml index a1b9c8c..96a0320 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -55,5 +55,5 @@ addopts = [ [tool.vendy] target = 'typecode' packages = [ - "pygments==2.4.2", + "pygments==2.9.0", ] diff --git a/src/typecode/_vendor/apache-2.0.LICENSE b/src/typecode/_vendor/apache-2.0.LICENSE new file mode 100644 index 0000000..f49a4e1 --- /dev/null +++ b/src/typecode/_vendor/apache-2.0.LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/src/typecode/_vendor/bin/pygmentize b/src/typecode/_vendor/bin/pygmentize deleted file mode 100755 index 3c8d0ca..0000000 --- a/src/typecode/_vendor/bin/pygmentize +++ /dev/null @@ -1,8 +0,0 @@ -#!/home/jono/nexb/typecode/tmp/bin/python -# -*- coding: utf-8 -*- -import re -import sys -from typecode._vendor.pygments.cmdline import main -if __name__ == '__main__': - sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) - sys.exit(main()) diff --git a/src/typecode/_vendor/bsd-new.LICENSE b/src/typecode/_vendor/bsd-new.LICENSE new file mode 100644 index 0000000..57326dd --- /dev/null +++ b/src/typecode/_vendor/bsd-new.LICENSE @@ -0,0 +1,9 @@ +Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: + +1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. + +2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. + +3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. \ No newline at end of file diff --git a/src/typecode/_vendor/pygments.ABOUT b/src/typecode/_vendor/pygments.ABOUT new file mode 100644 index 0000000..398df37 --- /dev/null +++ b/src/typecode/_vendor/pygments.ABOUT @@ -0,0 +1,22 @@ +about_resource: pygments +attribute: yes +checksum_md5: 665516d1d1c0099241ab6e4c057e26be +checksum_sha1: e0277b8dd2ebce5121a68bec62173b9e0b057742 +contact: georg@python.org +copyright: Copyright by the Pygments team +description: Pygments is a generic syntax highlighter suitable for use in code hosting, forums, + wikis or other applications that need to prettify source code. +download_url: https://files.pythonhosted.org/packages/ba/6e/7a7c13c21d8a4a7f82ccbfe257a045890d4dbf18c023f985f565f97393e3/Pygments-2.9.0.tar.gz +homepage_url: http://pygments.org/ +license_expression: bsd-simplified AND (bsd-new AND apache-2.0 AND public-domain) +name: Pygments +notice_file: pygments.NOTICE +owner: Pocoo Team +owner_url: http://www.pocoo.org/ +package_url: pkg:pypi/pygments@2.9.0 +primary_language: Python +track_changes: yes +type: pypi +version: 2.9.0 +vcs_url: git+https://github.com/pygments/pygments.git +notes: this has been vendored with vendy \ No newline at end of file diff --git a/src/typecode/_vendor/pygments.NOTICE b/src/typecode/_vendor/pygments.NOTICE new file mode 100644 index 0000000..fe5bac9 --- /dev/null +++ b/src/typecode/_vendor/pygments.NOTICE @@ -0,0 +1,40 @@ +Copyright (c) by the respective authors (see AUTHORS file). +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + +* Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + +* Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + + +# Copyright 2012 Nokia Siemens Networks Oyj +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. \ No newline at end of file diff --git a/src/typecode/_vendor/pygments/LICENSE b/src/typecode/_vendor/pygments/LICENSE index 13d1c74..e1b1566 100644 --- a/src/typecode/_vendor/pygments/LICENSE +++ b/src/typecode/_vendor/pygments/LICENSE @@ -1,4 +1,4 @@ -Copyright (c) 2006-2019 by the respective authors (see AUTHORS file). +Copyright (c) 2006-2021 by the respective authors (see AUTHORS file). All rights reserved. Redistribution and use in source and binary forms, with or without diff --git a/src/typecode/_vendor/pygments/__init__.py b/src/typecode/_vendor/pygments/__init__.py index 3541a04..8652335 100644 --- a/src/typecode/_vendor/pygments/__init__.py +++ b/src/typecode/_vendor/pygments/__init__.py @@ -17,19 +17,18 @@ * it is usable as a command-line tool and as a library * ... and it highlights even Brainfuck! - The `Pygments tip`_ is installable with ``easy_install Pygments==dev``. + The `Pygments master branch`_ is installable with ``easy_install Pygments==dev``. - .. _Pygments tip: - http://bitbucket.org/birkenfeld/pygments-main/get/tip.zip#egg=Pygments-dev + .. _Pygments master branch: + https://github.com/pygments/pygments/archive/master.zip#egg=Pygments-dev - :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ import sys +from io import StringIO, BytesIO -from typecode._vendor.pygments.util import StringIO, BytesIO - -__version__ = '2.4.2' +__version__ = '2.7.4' __docformat__ = 'restructuredtext' __all__ = ['lex', 'format', 'highlight'] @@ -84,7 +83,3 @@ def highlight(code, lexer, formatter, outfile=None): """ return format(lex(code, lexer), formatter, outfile) - -if __name__ == '__main__': # pragma: no cover - from typecode._vendor.pygments.cmdline import main - sys.exit(main(sys.argv)) diff --git a/src/typecode/_vendor/pygments/__main__.py b/src/typecode/_vendor/pygments/__main__.py new file mode 100644 index 0000000..1e65c89 --- /dev/null +++ b/src/typecode/_vendor/pygments/__main__.py @@ -0,0 +1,18 @@ +# -*- coding: utf-8 -*- +""" + pygments.__main__ + ~~~~~~~~~~~~~~~~~ + + Main entry point for ``python -m pygments``. + + :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS. + :license: BSD, see LICENSE for details. +""" + +import sys +import typecode._vendor.pygments.cmdline + +try: + sys.exit(typecode._vendor.pygments.cmdline.main(sys.argv)) +except KeyboardInterrupt: + sys.exit(1) diff --git a/src/typecode/_vendor/pygments/cmdline.py b/src/typecode/_vendor/pygments/cmdline.py index ed416dc..a3e7366 100644 --- a/src/typecode/_vendor/pygments/cmdline.py +++ b/src/typecode/_vendor/pygments/cmdline.py @@ -5,12 +5,10 @@ Command line interface. - :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ -from __future__ import print_function - import os import sys import getopt @@ -18,7 +16,8 @@ from typecode._vendor.pygments import __version__, highlight from typecode._vendor.pygments.util import ClassNotFound, OptionError, docstring_headline, \ - guess_decode, guess_decode_from_terminal, terminal_encoding + guess_decode, guess_decode_from_terminal, terminal_encoding, \ + UnclosingTextIOWrapper from typecode._vendor.pygments.lexers import get_all_lexers, get_lexer_by_name, guess_lexer, \ load_lexer_from_file, get_lexer_for_filename, find_lexer_class_for_filename from typecode._vendor.pygments.lexers.special import TextLexer @@ -233,7 +232,7 @@ def main_inner(popts, args, usage): return 0 if opts.pop('-V', None) is not None: - print('Pygments version %s, (c) 2006-2019 by Georg Brandl.' % __version__) + print('Pygments version %s, (c) 2006-2021 by Georg Brandl.' % __version__) return 0 # handle ``pygmentize -L`` @@ -338,8 +337,17 @@ def main_inner(popts, args, usage): # custom lexer, located relative to user's cwd if allow_custom_lexer_formatter and '.py' in lexername: try: + filename = None + name = None if ':' in lexername: filename, name = lexername.rsplit(':', 1) + + if '.py' in name: + # This can happen on Windows: If the lexername is + # C:\lexer.py -- return to normal load path in that case + name = None + + if filename and name: lexer = load_lexer_from_file(filename, name, **parsed_opts) else: @@ -397,11 +405,7 @@ def main_inner(popts, args, usage): elif '-s' not in opts: # treat stdin as full file (-s support is later) # read code from terminal, always in binary mode since we want to # decode ourselves and be tolerant with it - if sys.version_info > (3,): - # Python 3: we have to use .buffer to get a binary stream - code = sys.stdin.buffer.read() - else: - code = sys.stdin.read() + code = sys.stdin.buffer.read() # use .buffer to get a binary stream if not inencoding: code, inencoding = guess_decode_from_terminal(code, sys.stdin) # else the lexer will do the decoding @@ -432,10 +436,18 @@ def main_inner(popts, args, usage): # custom formatter, located relative to user's cwd if allow_custom_lexer_formatter and '.py' in fmter: try: + filename = None + name = None if ':' in fmter: - file, fmtername = fmter.rsplit(':', 1) - fmter = load_formatter_from_file(file, fmtername, - **parsed_opts) + # Same logic as above for custom lexer + filename, name = fmter.rsplit(':', 1) + + if '.py' in name: + name = None + + if filename and name: + fmter = load_formatter_from_file(filename, name, + **parsed_opts) else: fmter = load_formatter_from_file(fmter, **parsed_opts) except ClassNotFound as err: @@ -466,11 +478,7 @@ def main_inner(popts, args, usage): fmter = Terminal256Formatter(**parsed_opts) else: fmter = TerminalFormatter(**parsed_opts) - if sys.version_info > (3,): - # Python 3: we have to use .buffer to get a binary stream - outfile = sys.stdout.buffer - else: - outfile = sys.stdout + outfile = sys.stdout.buffer # determine output encoding if not explicitly selected if not outencoding: @@ -485,10 +493,8 @@ def main_inner(popts, args, usage): if not outfn and sys.platform in ('win32', 'cygwin') and \ fmter.name in ('Terminal', 'Terminal256'): # pragma: no cover # unfortunately colorama doesn't support binary streams on Py3 - if sys.version_info > (3,): - from typecode._vendor.pygments.util import UnclosingTextIOWrapper - outfile = UnclosingTextIOWrapper(outfile, encoding=fmter.encoding) - fmter.encoding = None + outfile = UnclosingTextIOWrapper(outfile, encoding=fmter.encoding) + fmter.encoding = None try: import colorama.initialise except ImportError: @@ -509,17 +515,17 @@ def main_inner(popts, args, usage): # ... and do it! if '-s' not in opts: # process whole input as per normal... - highlight(code, lexer, fmter, outfile) + try: + highlight(code, lexer, fmter, outfile) + finally: + if outfn: + outfile.close() return 0 else: # line by line processing of stdin (eg: for 'tail -f')... try: while 1: - if sys.version_info > (3,): - # Python 3: we have to use .buffer to get a binary stream - line = sys.stdin.buffer.readline() - else: - line = sys.stdin.readline() + line = sys.stdin.buffer.readline() if not line: break if not inencoding: @@ -530,6 +536,9 @@ def main_inner(popts, args, usage): return 0 except KeyboardInterrupt: # pragma: no cover return 0 + finally: + if outfn: + outfile.close() def main(args=sys.argv): @@ -554,7 +563,7 @@ def main(args=sys.argv): file=sys.stderr) print('Please report the whole traceback to the issue tracker at', file=sys.stderr) - print('.', + print('.', file=sys.stderr) print('*' * 65, file=sys.stderr) print(file=sys.stderr) diff --git a/src/typecode/_vendor/pygments/console.py b/src/typecode/_vendor/pygments/console.py index a05b256..6e1d13e 100644 --- a/src/typecode/_vendor/pygments/console.py +++ b/src/typecode/_vendor/pygments/console.py @@ -5,7 +5,7 @@ Format colored console output. - :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ diff --git a/src/typecode/_vendor/pygments/filter.py b/src/typecode/_vendor/pygments/filter.py index 7f81920..36dbc7c 100644 --- a/src/typecode/_vendor/pygments/filter.py +++ b/src/typecode/_vendor/pygments/filter.py @@ -5,7 +5,7 @@ Module that implements the default filter. - :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ @@ -17,8 +17,7 @@ def apply_filters(stream, filters, lexer=None): filter, otherwise the filter receives `None`. """ def _apply(filter_, stream): - for token in filter_.filter(lexer, stream): - yield token + yield from filter_.filter(lexer, stream) for filter_ in filters: stream = _apply(filter_, stream) return stream @@ -40,7 +39,7 @@ def lowercase(self, lexer, stream, options): }) -class Filter(object): +class Filter: """ Default filter. Subclass this class or use the `simplefilter` decorator to create own filters. @@ -70,5 +69,4 @@ def __init__(self, **options): def filter(self, lexer, stream): # pylint: disable=not-callable - for ttype, value in self.function(lexer, stream, self.options): - yield ttype, value + yield from self.function(lexer, stream, self.options) diff --git a/src/typecode/_vendor/pygments/filters/__init__.py b/src/typecode/_vendor/pygments/filters/__init__.py index 514bbe9..828f0b2 100644 --- a/src/typecode/_vendor/pygments/filters/__init__.py +++ b/src/typecode/_vendor/pygments/filters/__init__.py @@ -6,7 +6,7 @@ Module containing filter lookup functions and default filters. - :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ @@ -16,7 +16,7 @@ string_to_tokentype from typecode._vendor.pygments.filter import Filter from typecode._vendor.pygments.util import get_list_opt, get_int_opt, get_bool_opt, \ - get_choice_opt, ClassNotFound, OptionError, text_type, string_types + get_choice_opt, ClassNotFound, OptionError from typecode._vendor.pygments.plugin import find_plugin_filters @@ -45,8 +45,7 @@ def get_filter_by_name(filtername, **options): def get_all_filters(): """Return a generator of all filter names.""" - for name in FILTERS: - yield name + yield from FILTERS for name, _ in find_plugin_filters(): yield name @@ -88,9 +87,597 @@ def filter(self, lexer, stream): if ttype in String.Doc or \ ttype in Comment and \ ttype not in Comment.Preproc: - for sttype, svalue in _replace_special(ttype, value, regex, - Comment.Special): - yield sttype, svalue + yield from _replace_special(ttype, value, regex, Comment.Special) + else: + yield ttype, value + + +class SymbolFilter(Filter): + """Convert mathematical symbols such as \\ in Isabelle + or \\longrightarrow in LaTeX into Unicode characters. + + This is mostly useful for HTML or console output when you want to + approximate the source rendering you'd see in an IDE. + + Options accepted: + + `lang` : string + The symbol language. Must be one of ``'isabelle'`` or + ``'latex'``. The default is ``'isabelle'``. + """ + + latex_symbols = { + '\\alpha' : '\U000003b1', + '\\beta' : '\U000003b2', + '\\gamma' : '\U000003b3', + '\\delta' : '\U000003b4', + '\\varepsilon' : '\U000003b5', + '\\zeta' : '\U000003b6', + '\\eta' : '\U000003b7', + '\\vartheta' : '\U000003b8', + '\\iota' : '\U000003b9', + '\\kappa' : '\U000003ba', + '\\lambda' : '\U000003bb', + '\\mu' : '\U000003bc', + '\\nu' : '\U000003bd', + '\\xi' : '\U000003be', + '\\pi' : '\U000003c0', + '\\varrho' : '\U000003c1', + '\\sigma' : '\U000003c3', + '\\tau' : '\U000003c4', + '\\upsilon' : '\U000003c5', + '\\varphi' : '\U000003c6', + '\\chi' : '\U000003c7', + '\\psi' : '\U000003c8', + '\\omega' : '\U000003c9', + '\\Gamma' : '\U00000393', + '\\Delta' : '\U00000394', + '\\Theta' : '\U00000398', + '\\Lambda' : '\U0000039b', + '\\Xi' : '\U0000039e', + '\\Pi' : '\U000003a0', + '\\Sigma' : '\U000003a3', + '\\Upsilon' : '\U000003a5', + '\\Phi' : '\U000003a6', + '\\Psi' : '\U000003a8', + '\\Omega' : '\U000003a9', + '\\leftarrow' : '\U00002190', + '\\longleftarrow' : '\U000027f5', + '\\rightarrow' : '\U00002192', + '\\longrightarrow' : '\U000027f6', + '\\Leftarrow' : '\U000021d0', + '\\Longleftarrow' : '\U000027f8', + '\\Rightarrow' : '\U000021d2', + '\\Longrightarrow' : '\U000027f9', + '\\leftrightarrow' : '\U00002194', + '\\longleftrightarrow' : '\U000027f7', + '\\Leftrightarrow' : '\U000021d4', + '\\Longleftrightarrow' : '\U000027fa', + '\\mapsto' : '\U000021a6', + '\\longmapsto' : '\U000027fc', + '\\relbar' : '\U00002500', + '\\Relbar' : '\U00002550', + '\\hookleftarrow' : '\U000021a9', + '\\hookrightarrow' : '\U000021aa', + '\\leftharpoondown' : '\U000021bd', + '\\rightharpoondown' : '\U000021c1', + '\\leftharpoonup' : '\U000021bc', + '\\rightharpoonup' : '\U000021c0', + '\\rightleftharpoons' : '\U000021cc', + '\\leadsto' : '\U0000219d', + '\\downharpoonleft' : '\U000021c3', + '\\downharpoonright' : '\U000021c2', + '\\upharpoonleft' : '\U000021bf', + '\\upharpoonright' : '\U000021be', + '\\restriction' : '\U000021be', + '\\uparrow' : '\U00002191', + '\\Uparrow' : '\U000021d1', + '\\downarrow' : '\U00002193', + '\\Downarrow' : '\U000021d3', + '\\updownarrow' : '\U00002195', + '\\Updownarrow' : '\U000021d5', + '\\langle' : '\U000027e8', + '\\rangle' : '\U000027e9', + '\\lceil' : '\U00002308', + '\\rceil' : '\U00002309', + '\\lfloor' : '\U0000230a', + '\\rfloor' : '\U0000230b', + '\\flqq' : '\U000000ab', + '\\frqq' : '\U000000bb', + '\\bot' : '\U000022a5', + '\\top' : '\U000022a4', + '\\wedge' : '\U00002227', + '\\bigwedge' : '\U000022c0', + '\\vee' : '\U00002228', + '\\bigvee' : '\U000022c1', + '\\forall' : '\U00002200', + '\\exists' : '\U00002203', + '\\nexists' : '\U00002204', + '\\neg' : '\U000000ac', + '\\Box' : '\U000025a1', + '\\Diamond' : '\U000025c7', + '\\vdash' : '\U000022a2', + '\\models' : '\U000022a8', + '\\dashv' : '\U000022a3', + '\\surd' : '\U0000221a', + '\\le' : '\U00002264', + '\\ge' : '\U00002265', + '\\ll' : '\U0000226a', + '\\gg' : '\U0000226b', + '\\lesssim' : '\U00002272', + '\\gtrsim' : '\U00002273', + '\\lessapprox' : '\U00002a85', + '\\gtrapprox' : '\U00002a86', + '\\in' : '\U00002208', + '\\notin' : '\U00002209', + '\\subset' : '\U00002282', + '\\supset' : '\U00002283', + '\\subseteq' : '\U00002286', + '\\supseteq' : '\U00002287', + '\\sqsubset' : '\U0000228f', + '\\sqsupset' : '\U00002290', + '\\sqsubseteq' : '\U00002291', + '\\sqsupseteq' : '\U00002292', + '\\cap' : '\U00002229', + '\\bigcap' : '\U000022c2', + '\\cup' : '\U0000222a', + '\\bigcup' : '\U000022c3', + '\\sqcup' : '\U00002294', + '\\bigsqcup' : '\U00002a06', + '\\sqcap' : '\U00002293', + '\\Bigsqcap' : '\U00002a05', + '\\setminus' : '\U00002216', + '\\propto' : '\U0000221d', + '\\uplus' : '\U0000228e', + '\\bigplus' : '\U00002a04', + '\\sim' : '\U0000223c', + '\\doteq' : '\U00002250', + '\\simeq' : '\U00002243', + '\\approx' : '\U00002248', + '\\asymp' : '\U0000224d', + '\\cong' : '\U00002245', + '\\equiv' : '\U00002261', + '\\Join' : '\U000022c8', + '\\bowtie' : '\U00002a1d', + '\\prec' : '\U0000227a', + '\\succ' : '\U0000227b', + '\\preceq' : '\U0000227c', + '\\succeq' : '\U0000227d', + '\\parallel' : '\U00002225', + '\\mid' : '\U000000a6', + '\\pm' : '\U000000b1', + '\\mp' : '\U00002213', + '\\times' : '\U000000d7', + '\\div' : '\U000000f7', + '\\cdot' : '\U000022c5', + '\\star' : '\U000022c6', + '\\circ' : '\U00002218', + '\\dagger' : '\U00002020', + '\\ddagger' : '\U00002021', + '\\lhd' : '\U000022b2', + '\\rhd' : '\U000022b3', + '\\unlhd' : '\U000022b4', + '\\unrhd' : '\U000022b5', + '\\triangleleft' : '\U000025c3', + '\\triangleright' : '\U000025b9', + '\\triangle' : '\U000025b3', + '\\triangleq' : '\U0000225c', + '\\oplus' : '\U00002295', + '\\bigoplus' : '\U00002a01', + '\\otimes' : '\U00002297', + '\\bigotimes' : '\U00002a02', + '\\odot' : '\U00002299', + '\\bigodot' : '\U00002a00', + '\\ominus' : '\U00002296', + '\\oslash' : '\U00002298', + '\\dots' : '\U00002026', + '\\cdots' : '\U000022ef', + '\\sum' : '\U00002211', + '\\prod' : '\U0000220f', + '\\coprod' : '\U00002210', + '\\infty' : '\U0000221e', + '\\int' : '\U0000222b', + '\\oint' : '\U0000222e', + '\\clubsuit' : '\U00002663', + '\\diamondsuit' : '\U00002662', + '\\heartsuit' : '\U00002661', + '\\spadesuit' : '\U00002660', + '\\aleph' : '\U00002135', + '\\emptyset' : '\U00002205', + '\\nabla' : '\U00002207', + '\\partial' : '\U00002202', + '\\flat' : '\U0000266d', + '\\natural' : '\U0000266e', + '\\sharp' : '\U0000266f', + '\\angle' : '\U00002220', + '\\copyright' : '\U000000a9', + '\\textregistered' : '\U000000ae', + '\\textonequarter' : '\U000000bc', + '\\textonehalf' : '\U000000bd', + '\\textthreequarters' : '\U000000be', + '\\textordfeminine' : '\U000000aa', + '\\textordmasculine' : '\U000000ba', + '\\euro' : '\U000020ac', + '\\pounds' : '\U000000a3', + '\\yen' : '\U000000a5', + '\\textcent' : '\U000000a2', + '\\textcurrency' : '\U000000a4', + '\\textdegree' : '\U000000b0', + } + + isabelle_symbols = { + '\\' : '\U0001d7ec', + '\\' : '\U0001d7ed', + '\\' : '\U0001d7ee', + '\\' : '\U0001d7ef', + '\\' : '\U0001d7f0', + '\\' : '\U0001d7f1', + '\\' : '\U0001d7f2', + '\\' : '\U0001d7f3', + '\\' : '\U0001d7f4', + '\\' : '\U0001d7f5', + '\\' : '\U0001d49c', + '\\' : '\U0000212c', + '\\' : '\U0001d49e', + '\\' : '\U0001d49f', + '\\' : '\U00002130', + '\\' : '\U00002131', + '\\' : '\U0001d4a2', + '\\' : '\U0000210b', + '\\' : '\U00002110', + '\\' : '\U0001d4a5', + '\\' : '\U0001d4a6', + '\\' : '\U00002112', + '\\' : '\U00002133', + '\\' : '\U0001d4a9', + '\\' : '\U0001d4aa', + '\\

' : '\U0001d5c9', + '\\' : '\U0001d5ca', + '\\' : '\U0001d5cb', + '\\' : '\U0001d5cc', + '\\' : '\U0001d5cd', + '\\' : '\U0001d5ce', + '\\' : '\U0001d5cf', + '\\' : '\U0001d5d0', + '\\' : '\U0001d5d1', + '\\' : '\U0001d5d2', + '\\' : '\U0001d5d3', + '\\' : '\U0001d504', + '\\' : '\U0001d505', + '\\' : '\U0000212d', + '\\

' : '\U0001d507', + '\\' : '\U0001d508', + '\\' : '\U0001d509', + '\\' : '\U0001d50a', + '\\' : '\U0000210c', + '\\' : '\U00002111', + '\\' : '\U0001d50d', + '\\' : '\U0001d50e', + '\\' : '\U0001d50f', + '\\' : '\U0001d510', + '\\' : '\U0001d511', + '\\' : '\U0001d512', + '\\' : '\U0001d513', + '\\' : '\U0001d514', + '\\' : '\U0000211c', + '\\' : '\U0001d516', + '\\' : '\U0001d517', + '\\' : '\U0001d518', + '\\' : '\U0001d519', + '\\' : '\U0001d51a', + '\\' : '\U0001d51b', + '\\' : '\U0001d51c', + '\\' : '\U00002128', + '\\' : '\U0001d51e', + '\\' : '\U0001d51f', + '\\' : '\U0001d520', + '\\
' : '\U0001d521', + '\\' : '\U0001d522', + '\\' : '\U0001d523', + '\\' : '\U0001d524', + '\\' : '\U0001d525', + '\\' : '\U0001d526', + '\\' : '\U0001d527', + '\\' : '\U0001d528', + '\\' : '\U0001d529', + '\\' : '\U0001d52a', + '\\' : '\U0001d52b', + '\\' : '\U0001d52c', + '\\' : '\U0001d52d', + '\\' : '\U0001d52e', + '\\' : '\U0001d52f', + '\\' : '\U0001d530', + '\\' : '\U0001d531', + '\\' : '\U0001d532', + '\\' : '\U0001d533', + '\\' : '\U0001d534', + '\\' : '\U0001d535', + '\\' : '\U0001d536', + '\\' : '\U0001d537', + '\\' : '\U000003b1', + '\\' : '\U000003b2', + '\\' : '\U000003b3', + '\\' : '\U000003b4', + '\\' : '\U000003b5', + '\\' : '\U000003b6', + '\\' : '\U000003b7', + '\\' : '\U000003b8', + '\\' : '\U000003b9', + '\\' : '\U000003ba', + '\\' : '\U000003bb', + '\\' : '\U000003bc', + '\\' : '\U000003bd', + '\\' : '\U000003be', + '\\' : '\U000003c0', + '\\' : '\U000003c1', + '\\' : '\U000003c3', + '\\' : '\U000003c4', + '\\' : '\U000003c5', + '\\' : '\U000003c6', + '\\' : '\U000003c7', + '\\' : '\U000003c8', + '\\' : '\U000003c9', + '\\' : '\U00000393', + '\\' : '\U00000394', + '\\' : '\U00000398', + '\\' : '\U0000039b', + '\\' : '\U0000039e', + '\\' : '\U000003a0', + '\\' : '\U000003a3', + '\\' : '\U000003a5', + '\\' : '\U000003a6', + '\\' : '\U000003a8', + '\\' : '\U000003a9', + '\\' : '\U0001d539', + '\\' : '\U00002102', + '\\' : '\U00002115', + '\\' : '\U0000211a', + '\\' : '\U0000211d', + '\\' : '\U00002124', + '\\' : '\U00002190', + '\\' : '\U000027f5', + '\\' : '\U00002192', + '\\' : '\U000027f6', + '\\' : '\U000021d0', + '\\' : '\U000027f8', + '\\' : '\U000021d2', + '\\' : '\U000027f9', + '\\' : '\U00002194', + '\\' : '\U000027f7', + '\\' : '\U000021d4', + '\\' : '\U000027fa', + '\\' : '\U000021a6', + '\\' : '\U000027fc', + '\\' : '\U00002500', + '\\' : '\U00002550', + '\\' : '\U000021a9', + '\\' : '\U000021aa', + '\\' : '\U000021bd', + '\\' : '\U000021c1', + '\\' : '\U000021bc', + '\\' : '\U000021c0', + '\\' : '\U000021cc', + '\\' : '\U0000219d', + '\\' : '\U000021c3', + '\\' : '\U000021c2', + '\\' : '\U000021bf', + '\\' : '\U000021be', + '\\' : '\U000021be', + '\\' : '\U00002237', + '\\' : '\U00002191', + '\\' : '\U000021d1', + '\\' : '\U00002193', + '\\' : '\U000021d3', + '\\' : '\U00002195', + '\\' : '\U000021d5', + '\\' : '\U000027e8', + '\\' : '\U000027e9', + '\\' : '\U00002308', + '\\' : '\U00002309', + '\\' : '\U0000230a', + '\\' : '\U0000230b', + '\\' : '\U00002987', + '\\' : '\U00002988', + '\\' : '\U000027e6', + '\\' : '\U000027e7', + '\\' : '\U00002983', + '\\' : '\U00002984', + '\\' : '\U000000ab', + '\\' : '\U000000bb', + '\\' : '\U000022a5', + '\\' : '\U000022a4', + '\\' : '\U00002227', + '\\' : '\U000022c0', + '\\' : '\U00002228', + '\\' : '\U000022c1', + '\\' : '\U00002200', + '\\' : '\U00002203', + '\\' : '\U00002204', + '\\' : '\U000000ac', + '\\' : '\U000025a1', + '\\' : '\U000025c7', + '\\' : '\U000022a2', + '\\' : '\U000022a8', + '\\' : '\U000022a9', + '\\' : '\U000022ab', + '\\' : '\U000022a3', + '\\' : '\U0000221a', + '\\' : '\U00002264', + '\\' : '\U00002265', + '\\' : '\U0000226a', + '\\' : '\U0000226b', + '\\' : '\U00002272', + '\\' : '\U00002273', + '\\' : '\U00002a85', + '\\' : '\U00002a86', + '\\' : '\U00002208', + '\\' : '\U00002209', + '\\' : '\U00002282', + '\\' : '\U00002283', + '\\' : '\U00002286', + '\\' : '\U00002287', + '\\' : '\U0000228f', + '\\' : '\U00002290', + '\\' : '\U00002291', + '\\' : '\U00002292', + '\\' : '\U00002229', + '\\' : '\U000022c2', + '\\' : '\U0000222a', + '\\' : '\U000022c3', + '\\' : '\U00002294', + '\\' : '\U00002a06', + '\\' : '\U00002293', + '\\' : '\U00002a05', + '\\' : '\U00002216', + '\\' : '\U0000221d', + '\\' : '\U0000228e', + '\\' : '\U00002a04', + '\\' : '\U00002260', + '\\' : '\U0000223c', + '\\' : '\U00002250', + '\\' : '\U00002243', + '\\' : '\U00002248', + '\\' : '\U0000224d', + '\\' : '\U00002245', + '\\' : '\U00002323', + '\\' : '\U00002261', + '\\' : '\U00002322', + '\\' : '\U000022c8', + '\\' : '\U00002a1d', + '\\' : '\U0000227a', + '\\' : '\U0000227b', + '\\' : '\U0000227c', + '\\' : '\U0000227d', + '\\' : '\U00002225', + '\\' : '\U000000a6', + '\\' : '\U000000b1', + '\\' : '\U00002213', + '\\' : '\U000000d7', + '\\
' : '\U000000f7', + '\\' : '\U000022c5', + '\\' : '\U000022c6', + '\\' : '\U00002219', + '\\' : '\U00002218', + '\\' : '\U00002020', + '\\' : '\U00002021', + '\\' : '\U000022b2', + '\\' : '\U000022b3', + '\\' : '\U000022b4', + '\\' : '\U000022b5', + '\\' : '\U000025c3', + '\\' : '\U000025b9', + '\\' : '\U000025b3', + '\\' : '\U0000225c', + '\\' : '\U00002295', + '\\' : '\U00002a01', + '\\' : '\U00002297', + '\\' : '\U00002a02', + '\\' : '\U00002299', + '\\' : '\U00002a00', + '\\' : '\U00002296', + '\\' : '\U00002298', + '\\' : '\U00002026', + '\\' : '\U000022ef', + '\\' : '\U00002211', + '\\' : '\U0000220f', + '\\' : '\U00002210', + '\\' : '\U0000221e', + '\\' : '\U0000222b', + '\\' : '\U0000222e', + '\\' : '\U00002663', + '\\' : '\U00002662', + '\\' : '\U00002661', + '\\' : '\U00002660', + '\\' : '\U00002135', + '\\' : '\U00002205', + '\\' : '\U00002207', + '\\' : '\U00002202', + '\\' : '\U0000266d', + '\\' : '\U0000266e', + '\\' : '\U0000266f', + '\\' : '\U00002220', + '\\' : '\U000000a9', + '\\' : '\U000000ae', + '\\' : '\U000000ad', + '\\' : '\U000000af', + '\\' : '\U000000bc', + '\\' : '\U000000bd', + '\\' : '\U000000be', + '\\' : '\U000000aa', + '\\' : '\U000000ba', + '\\
' : '\U000000a7', + '\\' : '\U000000b6', + '\\' : '\U000000a1', + '\\' : '\U000000bf', + '\\' : '\U000020ac', + '\\' : '\U000000a3', + '\\' : '\U000000a5', + '\\' : '\U000000a2', + '\\' : '\U000000a4', + '\\' : '\U000000b0', + '\\' : '\U00002a3f', + '\\' : '\U00002127', + '\\' : '\U000025ca', + '\\' : '\U00002118', + '\\' : '\U00002240', + '\\' : '\U000022c4', + '\\' : '\U000000b4', + '\\' : '\U00000131', + '\\' : '\U000000a8', + '\\' : '\U000000b8', + '\\' : '\U000002dd', + '\\' : '\U000003f5', + '\\' : '\U000023ce', + '\\' : '\U00002039', + '\\' : '\U0000203a', + '\\' : '\U00002302', + '\\<^sub>' : '\U000021e9', + '\\<^sup>' : '\U000021e7', + '\\<^bold>' : '\U00002759', + '\\<^bsub>' : '\U000021d8', + '\\<^esub>' : '\U000021d9', + '\\<^bsup>' : '\U000021d7', + '\\<^esup>' : '\U000021d6', + } + + lang_map = {'isabelle' : isabelle_symbols, 'latex' : latex_symbols} + + def __init__(self, **options): + Filter.__init__(self, **options) + lang = get_choice_opt(options, 'lang', + ['isabelle', 'latex'], 'isabelle') + self.symbols = self.lang_map[lang] + + def filter(self, lexer, stream): + for ttype, value in stream: + if value in self.symbols: + yield ttype, self.symbols[value] else: yield ttype, value @@ -113,7 +700,7 @@ def __init__(self, **options): Filter.__init__(self, **options) case = get_choice_opt(options, 'case', ['lower', 'upper', 'capitalize'], 'lower') - self.convert = getattr(text_type, case) + self.convert = getattr(str, case) def filter(self, lexer, stream): for ttype, value in stream: @@ -229,11 +816,11 @@ class VisibleWhitespaceFilter(Filter): def __init__(self, **options): Filter.__init__(self, **options) - for name, default in [('spaces', u'·'), - ('tabs', u'»'), - ('newlines', u'¶')]: + for name, default in [('spaces', '·'), + ('tabs', '»'), + ('newlines', '¶')]: opt = options.get(name, False) - if isinstance(opt, string_types) and len(opt) == 1: + if isinstance(opt, str) and len(opt) == 1: setattr(self, name, opt) else: setattr(self, name, (opt and default or '')) @@ -246,10 +833,11 @@ def __init__(self, **options): def filter(self, lexer, stream): if self.wstt: - spaces = self.spaces or u' ' - tabs = self.tabs or u'\t' - newlines = self.newlines or u'\n' + spaces = self.spaces or ' ' + tabs = self.tabs or '\t' + newlines = self.newlines or '\n' regex = re.compile(r'\s') + def replacefunc(wschar): if wschar == ' ': return spaces @@ -260,9 +848,8 @@ def replacefunc(wschar): return wschar for ttype, value in stream: - for sttype, svalue in _replace_special(ttype, value, regex, - Whitespace, replacefunc): - yield sttype, svalue + yield from _replace_special(ttype, value, regex, Whitespace, + replacefunc) else: spaces, tabs, newlines = self.spaces, self.tabs, self.newlines # simpler processing @@ -298,18 +885,18 @@ def gobble(self, value, left): if left < len(value): return value[left:], 0 else: - return u'', left - len(value) + return '', left - len(value) def filter(self, lexer, stream): n = self.n - left = n # How many characters left to gobble. + left = n # How many characters left to gobble. for ttype, value in stream: # Remove ``left`` tokens from first line, ``n`` from all others. parts = value.split('\n') (parts[0], left) = self.gobble(parts[0], left) for i in range(1, len(parts)): (parts[i], left) = self.gobble(parts[i], n) - value = u'\n'.join(parts) + value = '\n'.join(parts) if value != '': yield ttype, value @@ -347,4 +934,5 @@ def filter(self, lexer, stream): 'whitespace': VisibleWhitespaceFilter, 'gobble': GobbleFilter, 'tokenmerge': TokenMergeFilter, + 'symbols': SymbolFilter, } diff --git a/src/typecode/_vendor/pygments/formatter.py b/src/typecode/_vendor/pygments/formatter.py index 325fc1e..d35f918 100644 --- a/src/typecode/_vendor/pygments/formatter.py +++ b/src/typecode/_vendor/pygments/formatter.py @@ -5,25 +5,25 @@ Base formatter class. - :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ import codecs -from typecode._vendor.pygments.util import get_bool_opt, string_types +from typecode._vendor.pygments.util import get_bool_opt from typecode._vendor.pygments.styles import get_style_by_name __all__ = ['Formatter'] def _lookup_style(style): - if isinstance(style, string_types): + if isinstance(style, str): return get_style_by_name(style) return style -class Formatter(object): +class Formatter: """ Converts a token stream to text. diff --git a/src/typecode/_vendor/pygments/formatters/__init__.py b/src/typecode/_vendor/pygments/formatters/__init__.py index ad64544..a7dfb2c 100644 --- a/src/typecode/_vendor/pygments/formatters/__init__.py +++ b/src/typecode/_vendor/pygments/formatters/__init__.py @@ -5,7 +5,7 @@ Pygments formatters. - :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ @@ -17,7 +17,7 @@ from typecode._vendor.pygments.formatters._mapping import FORMATTERS from typecode._vendor.pygments.plugin import find_plugin_formatters -from typecode._vendor.pygments.util import ClassNotFound, itervalues +from typecode._vendor.pygments.util import ClassNotFound __all__ = ['get_formatter_by_name', 'get_formatter_for_filename', 'get_all_formatters', 'load_formatter_from_file'] + list(FORMATTERS) @@ -45,7 +45,7 @@ def _load_formatters(module_name): def get_all_formatters(): """Return a generator for all formatter classes.""" # NB: this returns formatter classes, not info like get_all_lexers(). - for info in itervalues(FORMATTERS): + for info in FORMATTERS.values(): if info[1] not in _formatter_cache: _load_formatters(info[0]) yield _formatter_cache[info[1]] @@ -58,7 +58,7 @@ def find_formatter_class(alias): Returns None if not found. """ - for module_name, name, aliases, _, _ in itervalues(FORMATTERS): + for module_name, name, aliases, _, _ in FORMATTERS.values(): if alias in aliases: if name not in _formatter_cache: _load_formatters(module_name) @@ -108,8 +108,8 @@ def load_formatter_from_file(filename, formattername="CustomFormatter", # And finally instantiate it with the options return formatter_class(**options) except IOError as err: - raise ClassNotFound('cannot read %s' % filename) - except ClassNotFound as err: + raise ClassNotFound('cannot read %s: %s' % (filename, err)) + except ClassNotFound: raise except Exception as err: raise ClassNotFound('error when loading custom formatter: %s' % err) @@ -121,7 +121,7 @@ def get_formatter_for_filename(fn, **options): Raises ClassNotFound if not found. """ fn = basename(fn) - for modname, name, _, filenames, _ in itervalues(FORMATTERS): + for modname, name, _, filenames, _ in FORMATTERS.values(): for filename in filenames: if _fn_matches(fn, filename): if name not in _formatter_cache: diff --git a/src/typecode/_vendor/pygments/formatters/_mapping.py b/src/typecode/_vendor/pygments/formatters/_mapping.py index b700b5c..a310bb5 100644 --- a/src/typecode/_vendor/pygments/formatters/_mapping.py +++ b/src/typecode/_vendor/pygments/formatters/_mapping.py @@ -9,12 +9,10 @@ Do not alter the FORMATTERS dictionary by hand. - :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ -from __future__ import print_function - FORMATTERS = { 'BBCodeFormatter': ('pygments.formatters.bbcode', 'BBCode', ('bbcode', 'bb'), (), 'Format tokens with BBcodes. These formatting codes are used by many bulletin boards, so you can highlight your sourcecode with pygments before posting it there.'), 'BmpImageFormatter': ('pygments.formatters.img', 'img_bmp', ('bmp', 'bitmap'), ('*.bmp',), 'Create a bitmap image from source code. This uses the Python Imaging Library to generate a pixmap from the source code.'), diff --git a/src/typecode/_vendor/pygments/formatters/bbcode.py b/src/typecode/_vendor/pygments/formatters/bbcode.py index f0a1a78..a556778 100644 --- a/src/typecode/_vendor/pygments/formatters/bbcode.py +++ b/src/typecode/_vendor/pygments/formatters/bbcode.py @@ -5,7 +5,7 @@ BBcode formatter. - :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ diff --git a/src/typecode/_vendor/pygments/formatters/html.py b/src/typecode/_vendor/pygments/formatters/html.py index 2e1f739..8c73541 100644 --- a/src/typecode/_vendor/pygments/formatters/html.py +++ b/src/typecode/_vendor/pygments/formatters/html.py @@ -5,20 +5,19 @@ Formatter for HTML output. - :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ -from __future__ import print_function - +import functools import os import sys import os.path +from io import StringIO from typecode._vendor.pygments.formatter import Formatter from typecode._vendor.pygments.token import Token, Text, STANDARD_TYPES -from typecode._vendor.pygments.util import get_bool_opt, get_int_opt, get_list_opt, \ - StringIO, string_types, iteritems +from typecode._vendor.pygments.util import get_bool_opt, get_int_opt, get_list_opt try: import ctags @@ -29,11 +28,11 @@ _escape_html_table = { - ord('&'): u'&', - ord('<'): u'<', - ord('>'): u'>', - ord('"'): u'"', - ord("'"): u''', + ord('&'): '&', + ord('<'): '<', + ord('>'): '>', + ord('"'): '"', + ord("'"): ''', } @@ -41,12 +40,14 @@ def escape_html(text, table=_escape_html_table): """Escape &, <, > as well as single and double quotes for HTML.""" return text.translate(table) + def webify(color): if color.startswith('calc') or color.startswith('var'): return color else: return '#' + color + def _get_ttype_class(ttype): fname = STANDARD_TYPES.get(ttype) if fname: @@ -61,13 +62,10 @@ def _get_ttype_class(ttype): CSSFILE_TEMPLATE = '''\ /* -generated by Pygments -Copyright 2006-2019 by the Pygments team. +generated by Pygments +Copyright 2006-2021 by the Pygments team. Licensed under the BSD license, see LICENSE for details. */ -td.linenos { background-color: #f0f0f0; padding-right: 10px; } -span.lineno { background-color: #f0f0f0; padding: 0 5px 0 5px; } -pre { line-height: 125%%; } %(styledefs)s ''' @@ -75,8 +73,8 @@ def _get_ttype_class(ttype): @@ -208,9 +206,10 @@ class HtmlFormatter(Formatter): `cssfile` exists. `noclasses` - If set to true, token ```` tags will not use CSS classes, but - inline styles. This is not recommended for larger pieces of code since - it increases output size by quite a bit (default: ``False``). + If set to true, token ```` tags (as well as line number elements) + will not use CSS classes, but inline styles. This is not recommended + for larger pieces of code since it increases output size by quite a bit + (default: ``False``). `classprefix` Since the token types use relatively short class names, they may clash @@ -416,6 +415,7 @@ def __init__(self, **options): self.tagurlformat = self._decodeifneeded(options.get('tagurlformat', '')) self.filename = self._decodeifneeded(options.get('filename', '')) self.wrapcode = get_bool_opt(options, 'wrapcode', False) + self.span_element_openers = {} if self.tagsfile: if not ctags: @@ -438,7 +438,7 @@ def __init__(self, **options): self.lineseparator = options.get('lineseparator', '\n') self.lineanchors = options.get('lineanchors', '') self.linespans = options.get('linespans', '') - self.anchorlinenos = options.get('anchorlinenos', False) + self.anchorlinenos = get_bool_opt(options, 'anchorlinenos', False) self.hl_lines = set() for lineno in get_list_opt(options, 'hl_lines', []): try: @@ -457,13 +457,20 @@ def _get_css_class(self, ttype): return '' def _get_css_classes(self, ttype): - """Return the css classes of this token type prefixed with - the classprefix option.""" + """Return the CSS classes of this token type prefixed with the classprefix option.""" cls = self._get_css_class(ttype) while ttype not in STANDARD_TYPES: ttype = ttype.parent cls = self._get_css_class(ttype) + ' ' + cls - return cls + return cls or '' + + def _get_css_inline_styles(self, ttype): + """Return the inline CSS styles for this token type.""" + cclass = self.ttype2class.get(ttype) + while cclass is None: + ttype = ttype.parent + cclass = self.ttype2class.get(ttype) + return cclass or '' def _create_stylesheet(self): t2c = self.ttype2class = {Token: ''} @@ -495,9 +502,69 @@ def get_style_defs(self, arg=None): highlighting style. ``arg`` can be a string or list of selectors to insert before the token type classes. """ + style_lines = [] + + style_lines.extend(self.get_linenos_style_defs()) + style_lines.extend(self.get_background_style_defs(arg)) + style_lines.extend(self.get_token_style_defs(arg)) + + return '\n'.join(style_lines) + + def get_token_style_defs(self, arg=None): + prefix = self.get_css_prefix(arg) + + styles = [ + (level, ttype, cls, style) + for cls, (style, ttype, level) in self.class2style.items() + if cls and style + ] + styles.sort() + + lines = [ + '%s { %s } /* %s */' % (prefix(cls), style, repr(ttype)[6:]) + for (level, ttype, cls, style) in styles + ] + + return lines + + def get_background_style_defs(self, arg=None): + prefix = self.get_css_prefix(arg) + bg_color = self.style.background_color + hl_color = self.style.highlight_color + + lines = [] + + if arg and not self.nobackground and bg_color is not None: + text_style = '' + if Text in self.ttype2class: + text_style = ' ' + self.class2style[self.ttype2class[Text]][0] + lines.insert( + 0, '%s{ background: %s;%s }' % ( + prefix(''), bg_color, text_style + ) + ) + if hl_color is not None: + lines.insert( + 0, '%s { background-color: %s }' % (prefix('hll'), hl_color) + ) + + return lines + + def get_linenos_style_defs(self): + lines = [ + 'pre { %s }' % self._pre_style, + 'td.linenos pre { %s }' % self._linenos_style, + 'span.linenos { %s }' % self._linenos_style, + 'td.linenos pre.special { %s }' % self._linenos_special_style, + 'span.linenos.special { %s }' % self._linenos_special_style, + ] + + return lines + + def get_css_prefix(self, arg): if arg is None: arg = ('cssclass' in self.options and '.'+self.cssclass or '') - if isinstance(arg, string_types): + if isinstance(arg, str): args = [arg] else: args = list(arg) @@ -510,23 +577,25 @@ def prefix(cls): tmp.append((arg and arg + ' ' or '') + cls) return ', '.join(tmp) - styles = [(level, ttype, cls, style) - for cls, (style, ttype, level) in iteritems(self.class2style) - if cls and style] - styles.sort() - lines = ['%s { %s } /* %s */' % (prefix(cls), style, repr(ttype)[6:]) - for (level, ttype, cls, style) in styles] - if arg and not self.nobackground and \ - self.style.background_color is not None: - text_style = '' - if Text in self.ttype2class: - text_style = ' ' + self.class2style[self.ttype2class[Text]][0] - lines.insert(0, '%s { background: %s;%s }' % - (prefix(''), self.style.background_color, text_style)) - if self.style.highlight_color is not None: - lines.insert(0, '%s.hll { background-color: %s }' % - (prefix(''), self.style.highlight_color)) - return '\n'.join(lines) + return prefix + + @property + def _pre_style(self): + return 'line-height: 125%;' + + @property + def _linenos_style(self): + return 'color: %s; background-color: %s; padding-left: 5px; padding-right: 5px;' % ( + self.style.line_number_color, + self.style.line_number_background_color + ) + + @property + def _linenos_special_style(self): + return 'color: %s; background-color: %s; padding-left: 5px; padding-right: 5px;' % ( + self.style.line_number_special_color, + self.style.line_number_special_background_color + ) def _decodeifneeded(self, value): if isinstance(value, bytes): @@ -573,8 +642,7 @@ def _wrap_full(self, inner, outfile): styledefs=self.get_style_defs('body'), encoding=self.encoding)) - for t, line in inner: - yield t, line + yield from inner yield 0, DOC_FOOTER def _wrap_tablelinenos(self, inner): @@ -592,88 +660,83 @@ def _wrap_tablelinenos(self, inner): la = self.lineanchors aln = self.anchorlinenos nocls = self.noclasses - if sp: - lines = [] - - for i in range(fl, fl+lncount): - if i % st == 0: - if i % sp == 0: - if aln: - lines.append('%*d' % - (la, i, mw, i)) - else: - lines.append('%*d' % (mw, i)) - else: - if aln: - lines.append('%*d' % (la, i, mw, i)) - else: - lines.append('%*d' % (mw, i)) + + lines = [] + + for i in range(fl, fl+lncount): + print_line = i % st == 0 + special_line = sp and i % sp == 0 + + if print_line: + line = '%*d' % (mw, i) + if aln: + line = '%s' % (la, i, line) + else: + line = ' ' * mw + + if nocls: + if special_line: + style = ' style="%s"' % self._linenos_special_style else: - lines.append('') - ls = '\n'.join(lines) - else: - lines = [] - for i in range(fl, fl+lncount): - if i % st == 0: - if aln: - lines.append('%*d' % (la, i, mw, i)) - else: - lines.append('%*d' % (mw, i)) + style = ' style="%s"' % self._linenos_style + else: + if special_line: + style = ' class="special"' else: - lines.append('') - ls = '\n'.join(lines) + style = '' + + if style: + line = '%s' % (style, line) + + lines.append(line) + + ls = '\n'.join(lines) # in case you wonder about the seemingly redundant
here: since the # content in the other cell also is wrapped in a div, some browsers in # some configurations seem to mess up the formatting... - if nocls: - yield 0, ('' % self.cssclass + - '
' - '
' +
-                      ls + '
') - else: - yield 0, ('' % self.cssclass + - '
' +
-                      ls + '
') + yield 0, ( + '' % self.cssclass + + '
' +
+            ls + '
' + ) yield 0, dummyoutfile.getvalue() yield 0, '
' def _wrap_inlinelinenos(self, inner): # need a list of lines since we need the width of a single number :( - lines = list(inner) + inner_lines = list(inner) sp = self.linenospecial st = self.linenostep num = self.linenostart - mw = len(str(len(lines) + num - 1)) + mw = len(str(len(inner_lines) + num - 1)) + nocls = self.noclasses - if self.noclasses: - if sp: - for t, line in lines: - if num % sp == 0: - style = 'background-color: #ffffc0; padding: 0 5px 0 5px' - else: - style = 'background-color: #f0f0f0; padding: 0 5px 0 5px' - yield 1, '%*s ' % ( - style, mw, (num % st and ' ' or num)) + line - num += 1 + for _, inner_line in inner_lines: + print_line = num % st == 0 + special_line = sp and num % sp == 0 + + if print_line: + line = '%*d' % (mw, num) else: - for t, line in lines: - yield 1, ('%*s ' % ( - mw, (num % st and ' ' or num)) + line) - num += 1 - elif sp: - for t, line in lines: - yield 1, '%*s ' % ( - num % sp == 0 and ' special' or '', mw, - (num % st and ' ' or num)) + line - num += 1 - else: - for t, line in lines: - yield 1, '%*s ' % ( - mw, (num % st and ' ' or num)) + line - num += 1 + line = ' ' * mw + + if nocls: + if special_line: + style = ' style="%s"' % self._linenos_special_style + else: + style = ' style="%s"' % self._linenos_style + else: + if special_line: + style = ' class="linenos special"' + else: + style = ' class="linenos"' + + if style: + yield 1, '%s' % (style, line) + inner_line + else: + yield 1, line + inner_line + num += 1 def _wrap_lineanchors(self, inner): s = self.lineanchors @@ -707,8 +770,7 @@ def _wrap_div(self, inner): yield 0, ('') - for tup in inner: - yield tup + yield from inner yield 0, '\n' def _wrap_pre(self, inner): @@ -716,7 +778,7 @@ def _wrap_pre(self, inner): if self.prestyles: style.append(self.prestyles) if self.noclasses: - style.append('line-height: 125%') + style.append(self._pre_style) style = '; '.join(style) if self.filename: @@ -725,16 +787,19 @@ def _wrap_pre(self, inner): # the empty span here is to keep leading empty lines from being # ignored by HTML parsers yield 0, ('') - for tup in inner: - yield tup + yield from inner yield 0, '' def _wrap_code(self, inner): yield 0, '' - for tup in inner: - yield tup + yield from inner yield 0, '' + @functools.lru_cache(maxsize=100) + def _translate_parts(self, value): + """HTML-escape a value and split it by newlines.""" + return value.translate(_escape_html_table).split('\n') + def _format_lines(self, tokensource): """ Just format the tokens, without any wrapping tags. @@ -742,26 +807,23 @@ def _format_lines(self, tokensource): """ nocls = self.noclasses lsep = self.lineseparator - # for lookup only - getcls = self.ttype2class.get - c2s = self.class2style - escape_table = _escape_html_table tagsfile = self.tagsfile lspan = '' line = [] for ttype, value in tokensource: - if nocls: - cclass = getcls(ttype) - while cclass is None: - ttype = ttype.parent - cclass = getcls(ttype) - cspan = cclass and '' % c2s[cclass][0] or '' - else: - cls = self._get_css_classes(ttype) - cspan = cls and '' % cls or '' + try: + cspan = self.span_element_openers[ttype] + except KeyError: + if nocls: + css_style = self._get_css_inline_styles(ttype) + cspan = css_style and '' % self.class2style[css_style][0] or '' + else: + css_class = self._get_css_classes(ttype) + cspan = css_class and '' % css_class or '' + self.span_element_openers[ttype] = cspan - parts = value.translate(escape_table).split('\n') + parts = self._translate_parts(value) if tagsfile and ttype in Token.Name: filename, linenumber = self._lookup_ctag(value) diff --git a/src/typecode/_vendor/pygments/formatters/img.py b/src/typecode/_vendor/pygments/formatters/img.py index c640147..7479cf6 100644 --- a/src/typecode/_vendor/pygments/formatters/img.py +++ b/src/typecode/_vendor/pygments/formatters/img.py @@ -5,7 +5,7 @@ Formatter for Pixmap output. - :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ @@ -14,7 +14,7 @@ from typecode._vendor.pygments.formatter import Formatter from typecode._vendor.pygments.util import get_bool_opt, get_int_opt, get_list_opt, \ - get_choice_opt, xrange + get_choice_opt import subprocess @@ -46,9 +46,9 @@ } # A sane default for modern systems -DEFAULT_FONT_NAME_NIX = 'Bitstream Vera Sans Mono' +DEFAULT_FONT_NAME_NIX = 'DejaVu Sans Mono' DEFAULT_FONT_NAME_WIN = 'Courier New' -DEFAULT_FONT_NAME_MAC = 'Courier New' +DEFAULT_FONT_NAME_MAC = 'Menlo' class PilNotAvailable(ImportError): @@ -59,7 +59,7 @@ class FontNotFound(Exception): """When there are no usable fonts specified""" -class FontManager(object): +class FontManager: """ Manages a set of fonts: normal, italic, bold, etc... """ @@ -125,8 +125,9 @@ def _create_mac(self): for font_dir in (os.path.join(os.getenv("HOME"), 'Library/Fonts/'), '/Library/Fonts/', '/System/Library/Fonts/'): font_map.update( - ((os.path.splitext(f)[0].lower(), os.path.join(font_dir, f)) - for f in os.listdir(font_dir) if f.lower().endswith('ttf'))) + (os.path.splitext(f)[0].lower(), os.path.join(font_dir, f)) + for f in os.listdir(font_dir) + if f.lower().endswith(('ttf', 'ttc'))) for name in STYLES['NORMAL']: path = self._get_mac_font_path(font_map, self.font_name, name) @@ -164,31 +165,43 @@ def _lookup_win(self, key, basename, styles, fail=False): return None def _create_win(self): - try: - key = _winreg.OpenKey( - _winreg.HKEY_LOCAL_MACHINE, - r'Software\Microsoft\Windows NT\CurrentVersion\Fonts') - except EnvironmentError: + lookuperror = None + keynames = [ (_winreg.HKEY_CURRENT_USER, r'Software\Microsoft\Windows NT\CurrentVersion\Fonts'), + (_winreg.HKEY_CURRENT_USER, r'Software\Microsoft\Windows\CurrentVersion\Fonts'), + (_winreg.HKEY_LOCAL_MACHINE, r'Software\Microsoft\Windows NT\CurrentVersion\Fonts'), + (_winreg.HKEY_LOCAL_MACHINE, r'Software\Microsoft\Windows\CurrentVersion\Fonts') ] + for keyname in keynames: try: - key = _winreg.OpenKey( - _winreg.HKEY_LOCAL_MACHINE, - r'Software\Microsoft\Windows\CurrentVersion\Fonts') + key = _winreg.OpenKey(*keyname) + try: + path = self._lookup_win(key, self.font_name, STYLES['NORMAL'], True) + self.fonts['NORMAL'] = ImageFont.truetype(path, self.font_size) + for style in ('ITALIC', 'BOLD', 'BOLDITALIC'): + path = self._lookup_win(key, self.font_name, STYLES[style]) + if path: + self.fonts[style] = ImageFont.truetype(path, self.font_size) + else: + if style == 'BOLDITALIC': + self.fonts[style] = self.fonts['BOLD'] + else: + self.fonts[style] = self.fonts['NORMAL'] + return + except FontNotFound as err: + lookuperror = err + finally: + _winreg.CloseKey(key) except EnvironmentError: - raise FontNotFound('Can\'t open Windows font registry key') - try: - path = self._lookup_win(key, self.font_name, STYLES['NORMAL'], True) - self.fonts['NORMAL'] = ImageFont.truetype(path, self.font_size) - for style in ('ITALIC', 'BOLD', 'BOLDITALIC'): - path = self._lookup_win(key, self.font_name, STYLES[style]) - if path: - self.fonts[style] = ImageFont.truetype(path, self.font_size) - else: - if style == 'BOLDITALIC': - self.fonts[style] = self.fonts['BOLD'] - else: - self.fonts[style] = self.fonts['NORMAL'] - finally: - _winreg.CloseKey(key) + pass + else: + # If we get here, we checked all registry keys and had no luck + # We can be in one of two situations now: + # * All key lookups failed. In this case lookuperror is None and we + # will raise a generic error + # * At least one lookup failed with a FontNotFound error. In this + # case, we will raise that as a more specific error + if lookuperror: + raise lookuperror + raise FontNotFound('Can\'t open Windows font registry key') def get_char_size(self): """ @@ -196,6 +209,12 @@ def get_char_size(self): """ return self.fonts['NORMAL'].getsize('M') + def get_text_size(self, text): + """ + Get the text size(width, height). + """ + return self.fonts['NORMAL'].getsize(text) + def get_font(self, bold, oblique): """ Get the font based on bold and italic flags. @@ -237,7 +256,8 @@ class ImageFormatter(Formatter): bold and italic fonts will be generated. This really should be a monospace font to look sane. - Default: "Bitstream Vera Sans Mono" on Windows, Courier New on \\*nix + Default: "Courier New" on Windows, "Menlo" on Mac OS, and + "DejaVu Sans Mono" on \\*nix `font_size` The font size in points to be used. @@ -405,17 +425,17 @@ def _get_char_width(self): """ return self.fontw - def _get_char_x(self, charno): + def _get_char_x(self, linelength): """ Get the X coordinate of a character position. """ - return charno * self.fontw + self.image_pad + self.line_number_width + return linelength + self.image_pad + self.line_number_width - def _get_text_pos(self, charno, lineno): + def _get_text_pos(self, linelength, lineno): """ Get the actual position for a character and line position. """ - return self._get_char_x(charno), self._get_line_y(lineno) + return self._get_char_x(linelength), self._get_line_y(lineno) def _get_linenumber_pos(self, lineno): """ @@ -439,11 +459,11 @@ def _get_style_font(self, style): """ return self.fonts.get_font(style['bold'], style['italic']) - def _get_image_size(self, maxcharno, maxlineno): + def _get_image_size(self, maxlinelength, maxlineno): """ Get the required image size. """ - return (self._get_char_x(maxcharno) + self.image_pad, + return (self._get_char_x(maxlinelength) + self.image_pad, self._get_line_y(maxlineno + 0) + self.image_pad) def _draw_linenumber(self, posno, lineno): @@ -469,6 +489,7 @@ def _create_drawables(self, tokensource): Create drawables for the token content. """ lineno = charno = maxcharno = 0 + maxlinelength = linelength = 0 for ttype, value in tokensource: while ttype not in self.styles: ttype = ttype.parent @@ -483,17 +504,22 @@ def _create_drawables(self, tokensource): temp = line.rstrip('\n') if temp: self._draw_text( - self._get_text_pos(charno, lineno), + self._get_text_pos(linelength, lineno), temp, font = self._get_style_font(style), fill = self._get_text_color(style) ) + temp_width, temp_hight = self.fonts.get_text_size(temp) + linelength += temp_width + maxlinelength = max(maxlinelength, linelength) charno += len(temp) maxcharno = max(maxcharno, charno) if line.endswith('\n'): # add a line for each extra line in the value + linelength = 0 charno = 0 lineno += 1 + self.maxlinelength = maxlinelength self.maxcharno = maxcharno self.maxlineno = lineno @@ -503,7 +529,7 @@ def _draw_line_numbers(self): """ if not self.line_numbers: return - for p in xrange(self.maxlineno): + for p in range(self.maxlineno): n = p + self.line_number_start if (n % self.line_number_step) == 0: self._draw_linenumber(p, n) @@ -521,7 +547,8 @@ def _paint_line_number_bg(self, im): rectw = self.image_pad + self.line_number_width - self.line_number_pad draw.rectangle([(0, 0), (rectw, recth)], fill=self.line_number_bg) - draw.line([(rectw, 0), (rectw, recth)], fill=self.line_number_fg) + if self.line_number_separator: + draw.line([(rectw, 0), (rectw, recth)], fill=self.line_number_fg) del draw def format(self, tokensource, outfile): @@ -536,7 +563,7 @@ def format(self, tokensource, outfile): self._draw_line_numbers() im = Image.new( 'RGB', - self._get_image_size(self.maxcharno, self.maxlineno), + self._get_image_size(self.maxlinelength, self.maxlineno), self.background_color ) self._paint_line_number_bg(im) diff --git a/src/typecode/_vendor/pygments/formatters/irc.py b/src/typecode/_vendor/pygments/formatters/irc.py index 51f6791..62a2056 100644 --- a/src/typecode/_vendor/pygments/formatters/irc.py +++ b/src/typecode/_vendor/pygments/formatters/irc.py @@ -5,7 +5,7 @@ Formatter for IRC output - :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ diff --git a/src/typecode/_vendor/pygments/formatters/latex.py b/src/typecode/_vendor/pygments/formatters/latex.py index 85685ec..ade88fb 100644 --- a/src/typecode/_vendor/pygments/formatters/latex.py +++ b/src/typecode/_vendor/pygments/formatters/latex.py @@ -5,17 +5,16 @@ Formatter for LaTeX fancyvrb output. - :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ -from __future__ import division +from io import StringIO from typecode._vendor.pygments.formatter import Formatter -from typecode._vendor.pygments.lexer import Lexer +from typecode._vendor.pygments.lexer import Lexer, do_insertions from typecode._vendor.pygments.token import Token, STANDARD_TYPES -from typecode._vendor.pygments.util import get_bool_opt, get_int_opt, StringIO, xrange, \ - iteritems +from typecode._vendor.pygments.util import get_bool_opt, get_int_opt __all__ = ['LatexFormatter'] @@ -266,7 +265,7 @@ def __init__(self, **options): self.right = self.escapeinside[1] else: self.escapeinside = '' - self.envname = options.get('envname', u'Verbatim') + self.envname = options.get('envname', 'Verbatim') self._create_stylesheet() @@ -322,7 +321,7 @@ def get_style_defs(self, arg=''): """ cp = self.commandprefix styles = [] - for name, definition in iteritems(self.cmd2def): + for name, definition in self.cmd2def.items(): styles.append(r'\expandafter\def\csname %s@tok@%s\endcsname{%s}' % (cp, name, definition)) return STYLE_TEMPLATE % {'cp': self.commandprefix, @@ -337,24 +336,24 @@ def format_unencoded(self, tokensource, outfile): realoutfile = outfile outfile = StringIO() - outfile.write(u'\\begin{' + self.envname + u'}[commandchars=\\\\\\{\\}') + outfile.write('\\begin{' + self.envname + '}[commandchars=\\\\\\{\\}') if self.linenos: start, step = self.linenostart, self.linenostep - outfile.write(u',numbers=left' + - (start and u',firstnumber=%d' % start or u'') + - (step and u',stepnumber=%d' % step or u'')) + outfile.write(',numbers=left' + + (start and ',firstnumber=%d' % start or '') + + (step and ',stepnumber=%d' % step or '')) if self.mathescape or self.texcomments or self.escapeinside: - outfile.write(u',codes={\\catcode`\\$=3\\catcode`\\^=7\\catcode`\\_=8}') + outfile.write(',codes={\\catcode`\\$=3\\catcode`\\^=7\\catcode`\\_=8}') if self.verboptions: - outfile.write(u',' + self.verboptions) - outfile.write(u']\n') + outfile.write(',' + self.verboptions) + outfile.write(']\n') for ttype, value in tokensource: if ttype in Token.Comment: if self.texcomments: # Try to guess comment starting lexeme and escape it ... start = value[0:1] - for i in xrange(1, len(value)): + for i in range(1, len(value)): if start[0] != value[i]: break start += value[i] @@ -410,7 +409,7 @@ def format_unencoded(self, tokensource, outfile): else: outfile.write(value) - outfile.write(u'\\end{' + self.envname + u'}\n') + outfile.write('\\end{' + self.envname + '}\n') if self.full: encoding = self.encoding or 'utf8' @@ -447,13 +446,44 @@ def __init__(self, left, right, lang, **options): Lexer.__init__(self, **options) def get_tokens_unprocessed(self, text): + # find and remove all the escape tokens (replace with an empty string) + # this is very similar to DelegatingLexer.get_tokens_unprocessed. + buffered = '' + insertions = [] + insertion_buf = [] + for i, t, v in self._find_safe_escape_tokens(text): + if t is None: + if insertion_buf: + insertions.append((len(buffered), insertion_buf)) + insertion_buf = [] + buffered += v + else: + insertion_buf.append((i, t, v)) + if insertion_buf: + insertions.append((len(buffered), insertion_buf)) + return do_insertions(insertions, + self.lang.get_tokens_unprocessed(buffered)) + + def _find_safe_escape_tokens(self, text): + """ find escape tokens that are not in strings or comments """ + for i, t, v in self._filter_to( + self.lang.get_tokens_unprocessed(text), + lambda t: t in Token.Comment or t in Token.String + ): + if t is None: + for i2, t2, v2 in self._find_escape_tokens(v): + yield i + i2, t2, v2 + else: + yield i, None, v + + def _filter_to(self, it, pred): + """ Keep only the tokens that match `pred`, merge the others together """ buf = '' idx = 0 - for i, t, v in self.lang.get_tokens_unprocessed(text): - if t in Token.Comment or t in Token.String: + for i, t, v in it: + if pred(t): if buf: - for x in self.get_tokens_aux(idx, buf): - yield x + yield idx, None, buf buf = '' yield i, t, v else: @@ -461,16 +491,16 @@ def get_tokens_unprocessed(self, text): idx = i buf += v if buf: - for x in self.get_tokens_aux(idx, buf): - yield x + yield idx, None, buf - def get_tokens_aux(self, index, text): + def _find_escape_tokens(self, text): + """ Find escape tokens within text, give token=None otherwise """ + index = 0 while text: a, sep1, text = text.partition(self.left) if a: - for i, t, v in self.lang.get_tokens_unprocessed(a): - yield index + i, t, v - index += len(a) + yield index, None, a + index += len(a) if sep1: b, sep2, text = text.partition(self.right) if sep2: diff --git a/src/typecode/_vendor/pygments/formatters/other.py b/src/typecode/_vendor/pygments/formatters/other.py index 330bdbc..6ebc1be 100644 --- a/src/typecode/_vendor/pygments/formatters/other.py +++ b/src/typecode/_vendor/pygments/formatters/other.py @@ -5,12 +5,12 @@ Other formatters: NullFormatter, RawTokenFormatter. - :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ from typecode._vendor.pygments.formatter import Formatter -from typecode._vendor.pygments.util import OptionError, get_choice_opt +from typecode._vendor.pygments.util import get_choice_opt from typecode._vendor.pygments.token import Token from typecode._vendor.pygments.console import colorize @@ -87,14 +87,17 @@ def format(self, tokensource, outfile): if self.compress == 'gz': import gzip outfile = gzip.GzipFile('', 'wb', 9, outfile) + def write(text): outfile.write(text.encode()) flush = outfile.flush elif self.compress == 'bz2': import bz2 compressor = bz2.BZ2Compressor(9) + def write(text): outfile.write(compressor.compress(text.encode())) + def flush(): outfile.write(compressor.flush()) outfile.flush() @@ -115,14 +118,15 @@ def write(text): write("%s\t%r\n" % (ttype, value)) flush() -TESTCASE_BEFORE = u'''\ - def testNeedsName(self): + +TESTCASE_BEFORE = '''\ + def testNeedsName(lexer): fragment = %r tokens = [ ''' -TESTCASE_AFTER = u'''\ +TESTCASE_AFTER = '''\ ] - self.assertEqual(tokens, list(self.lexer.get_tokens(fragment))) + assert list(lexer.get_tokens(fragment)) == tokens ''' @@ -148,8 +152,8 @@ def format(self, tokensource, outfile): rawbuf.append(value) outbuf.append('%s(%s, %r),\n' % (indentation, ttype, value)) - before = TESTCASE_BEFORE % (u''.join(rawbuf),) - during = u''.join(outbuf) + before = TESTCASE_BEFORE % (''.join(rawbuf),) + during = ''.join(outbuf) after = TESTCASE_AFTER if self.encoding is None: outfile.write(before + during + after) diff --git a/src/typecode/_vendor/pygments/formatters/rtf.py b/src/typecode/_vendor/pygments/formatters/rtf.py index 4aedeb1..a369052 100644 --- a/src/typecode/_vendor/pygments/formatters/rtf.py +++ b/src/typecode/_vendor/pygments/formatters/rtf.py @@ -5,12 +5,12 @@ A formatter that generates RTF files. - :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ from typecode._vendor.pygments.formatter import Formatter -from typecode._vendor.pygments.util import get_int_opt, _surrogatepair +from typecode._vendor.pygments.util import get_int_opt, surrogatepair __all__ = ['RtfFormatter'] @@ -35,7 +35,7 @@ class RtfFormatter(Formatter): ``'default'``). `fontface` - The used font famliy, for example ``Bitstream Vera Sans``. Defaults to + The used font family, for example ``Bitstream Vera Sans``. Defaults to some generic font which is supposed to have fixed width. `fontsize` @@ -65,14 +65,14 @@ def __init__(self, **options): self.fontsize = get_int_opt(options, 'fontsize', 0) def _escape(self, text): - return text.replace(u'\\', u'\\\\') \ - .replace(u'{', u'\\{') \ - .replace(u'}', u'\\}') + return text.replace('\\', '\\\\') \ + .replace('{', '\\{') \ + .replace('}', '\\}') def _escape_text(self, text): - # empty strings, should give a small performance improvment + # empty strings, should give a small performance improvement if not text: - return u'' + return '' # escape text text = self._escape(text) @@ -85,21 +85,21 @@ def _escape_text(self, text): buf.append(str(c)) elif (2**7) <= cn < (2**16): # single unicode escape sequence - buf.append(u'{\\u%d}' % cn) + buf.append('{\\u%d}' % cn) elif (2**16) <= cn: # RTF limits unicode to 16 bits. # Force surrogate pairs - buf.append(u'{\\u%d}{\\u%d}' % _surrogatepair(cn)) + buf.append('{\\u%d}{\\u%d}' % surrogatepair(cn)) - return u''.join(buf).replace(u'\n', u'\\par\n') + return ''.join(buf).replace('\n', '\\par\n') def format_unencoded(self, tokensource, outfile): # rtf 1.8 header - outfile.write(u'{\\rtf1\\ansi\\uc0\\deff0' - u'{\\fonttbl{\\f0\\fmodern\\fprq1\\fcharset0%s;}}' - u'{\\colortbl;' % (self.fontface and - u' ' + self._escape(self.fontface) or - u'')) + outfile.write('{\\rtf1\\ansi\\uc0\\deff0' + '{\\fonttbl{\\f0\\fmodern\\fprq1\\fcharset0%s;}}' + '{\\colortbl;' % (self.fontface and + ' ' + self._escape(self.fontface) or + '')) # convert colors and save them in a mapping to access them later. color_mapping = {} @@ -108,15 +108,15 @@ def format_unencoded(self, tokensource, outfile): for color in style['color'], style['bgcolor'], style['border']: if color and color not in color_mapping: color_mapping[color] = offset - outfile.write(u'\\red%d\\green%d\\blue%d;' % ( + outfile.write('\\red%d\\green%d\\blue%d;' % ( int(color[0:2], 16), int(color[2:4], 16), int(color[4:6], 16) )) offset += 1 - outfile.write(u'}\\f0 ') + outfile.write('}\\f0 ') if self.fontsize: - outfile.write(u'\\fs%d' % (self.fontsize)) + outfile.write('\\fs%d' % self.fontsize) # highlight stream for ttype, value in tokensource: @@ -125,23 +125,23 @@ def format_unencoded(self, tokensource, outfile): style = self.style.style_for_token(ttype) buf = [] if style['bgcolor']: - buf.append(u'\\cb%d' % color_mapping[style['bgcolor']]) + buf.append('\\cb%d' % color_mapping[style['bgcolor']]) if style['color']: - buf.append(u'\\cf%d' % color_mapping[style['color']]) + buf.append('\\cf%d' % color_mapping[style['color']]) if style['bold']: - buf.append(u'\\b') + buf.append('\\b') if style['italic']: - buf.append(u'\\i') + buf.append('\\i') if style['underline']: - buf.append(u'\\ul') + buf.append('\\ul') if style['border']: - buf.append(u'\\chbrdr\\chcfpat%d' % + buf.append('\\chbrdr\\chcfpat%d' % color_mapping[style['border']]) - start = u''.join(buf) + start = ''.join(buf) if start: - outfile.write(u'{%s ' % start) + outfile.write('{%s ' % start) outfile.write(self._escape_text(value)) if start: - outfile.write(u'}') + outfile.write('}') - outfile.write(u'}') + outfile.write('}') diff --git a/src/typecode/_vendor/pygments/formatters/svg.py b/src/typecode/_vendor/pygments/formatters/svg.py index 9cf36e1..c9a19cf 100644 --- a/src/typecode/_vendor/pygments/formatters/svg.py +++ b/src/typecode/_vendor/pygments/formatters/svg.py @@ -5,11 +5,12 @@ Formatter for SVG output. - :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ from typecode._vendor.pygments.formatter import Formatter +from typecode._vendor.pygments.token import Comment from typecode._vendor.pygments.util import get_bool_opt, get_int_opt __all__ = ['SvgFormatter'] @@ -52,6 +53,19 @@ class SvgFormatter(Formatter): The value to give the wrapping ```` element's ``font-size`` attribute, defaults to ``"14px"``. + `linenos` + If ``True``, add line numbers (default: ``False``). + + `linenostart` + The line number for the first line (default: ``1``). + + `linenostep` + If set to a number n > 1, only every nth line number is printed. + + `linenowidth` + Maximum width devoted to line numbers (default: ``3*ystep``, sufficient + for up to 4-digit line numbers. Increase width for longer code blocks). + `xoffset` Starting offset in X direction, defaults to ``0``. @@ -92,6 +106,10 @@ def __init__(self, **options): self.yoffset = get_int_opt(options, 'yoffset', int_fs) self.ystep = get_int_opt(options, 'ystep', int_fs + 5) self.spacehack = get_bool_opt(options, 'spacehack', True) + self.linenos = get_bool_opt(options,'linenos',False) + self.linenostart = get_int_opt(options,'linenostart',1) + self.linenostep = get_int_opt(options,'linenostep',1) + self.linenowidth = get_int_opt(options,'linenowidth', 3*self.ystep) self._stylecache = {} def format_unencoded(self, tokensource, outfile): @@ -115,7 +133,19 @@ def format_unencoded(self, tokensource, outfile): outfile.write('\n') outfile.write('\n' % (self.fontfamily, self.fontsize)) - outfile.write('' % (x, y)) + + counter = self.linenostart + counter_step = self.linenostep + counter_style = self._get_style(Comment) + line_x = x + + if self.linenos: + if counter % counter_step == 0: + outfile.write('%s' % (x+self.linenowidth,y,counter_style,counter)) + line_x += self.linenowidth + self.ystep + counter += 1 + + outfile.write('' % (line_x, y)) for ttype, value in tokensource: style = self._get_style(ttype) tspan = style and '' or '' @@ -127,8 +157,12 @@ def format_unencoded(self, tokensource, outfile): for part in parts[:-1]: outfile.write(tspan + part + tspanend) y += self.ystep - outfile.write('\n' % (x, y)) + outfile.write('\n') + if self.linenos and counter % counter_step == 0: + outfile.write('%s' % (x+self.linenowidth,y,counter_style,counter)) + + counter += 1 + outfile.write('' % (line_x,y)) outfile.write(tspan + parts[-1] + tspanend) outfile.write('') diff --git a/src/typecode/_vendor/pygments/formatters/terminal.py b/src/typecode/_vendor/pygments/formatters/terminal.py index 2801bdc..c82e093 100644 --- a/src/typecode/_vendor/pygments/formatters/terminal.py +++ b/src/typecode/_vendor/pygments/formatters/terminal.py @@ -5,7 +5,7 @@ Formatter for terminal output with ANSI sequences. - :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ @@ -92,12 +92,6 @@ def __init__(self, **options): self._lineno = 0 def format(self, tokensource, outfile): - # hack: if the output is a terminal and has an encoding set, - # use that to avoid unicode encode problems - if not self.encoding and hasattr(outfile, "encoding") and \ - hasattr(outfile, "isatty") and outfile.isatty() and \ - sys.version_info < (3,): - self.encoding = outfile.encoding return Formatter.format(self, tokensource, outfile) def _write_lineno(self, outfile): diff --git a/src/typecode/_vendor/pygments/formatters/terminal256.py b/src/typecode/_vendor/pygments/formatters/terminal256.py index ca8e195..ebe374d 100644 --- a/src/typecode/_vendor/pygments/formatters/terminal256.py +++ b/src/typecode/_vendor/pygments/formatters/terminal256.py @@ -11,7 +11,7 @@ Formatter version 1. - :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ @@ -35,11 +35,12 @@ class EscapeSequence: - def __init__(self, fg=None, bg=None, bold=False, underline=False): + def __init__(self, fg=None, bg=None, bold=False, underline=False, italic=False): self.fg = fg self.bg = bg self.bold = bold self.underline = underline + self.italic = italic def escape(self, attrs): if len(attrs): @@ -68,6 +69,8 @@ def color_string(self): attrs.append("01") if self.underline: attrs.append("04") + if self.italic: + attrs.append("03") return self.escape(attrs) def true_color_string(self): @@ -80,6 +83,8 @@ def true_color_string(self): attrs.append("01") if self.underline: attrs.append("04") + if self.italic: + attrs.append("03") return self.escape(attrs) def reset_string(self): @@ -88,7 +93,7 @@ def reset_string(self): attrs.append("39") if self.bg is not None: attrs.append("49") - if self.bold or self.underline: + if self.bold or self.underline or self.italic: attrs.append("00") return self.escape(attrs) @@ -135,6 +140,7 @@ def __init__(self, **options): self.usebold = 'nobold' not in options self.useunderline = 'nounderline' not in options + self.useitalic = 'noitalic' not in options self._build_color_table() # build an RGB-to-256 color conversion table self._setup_styles() # convert selected style's colors to term. colors @@ -227,16 +233,12 @@ def _setup_styles(self): escape.bold = True if self.useunderline and ndef['underline']: escape.underline = True + if self.useitalic and ndef['italic']: + escape.italic = True self.style_string[str(ttype)] = (escape.color_string(), escape.reset_string()) def format(self, tokensource, outfile): - # hack: if the output is a terminal and has an encoding set, - # use that to avoid unicode encode problems - if not self.encoding and hasattr(outfile, "encoding") and \ - hasattr(outfile, "isatty") and outfile.isatty() and \ - sys.version_info < (3,): - self.encoding = outfile.encoding return Formatter.format(self, tokensource, outfile) def format_unencoded(self, tokensource, outfile): @@ -311,5 +313,7 @@ def _setup_styles(self): escape.bold = True if self.useunderline and ndef['underline']: escape.underline = True + if self.useitalic and ndef['italic']: + escape.italic = True self.style_string[str(ttype)] = (escape.true_color_string(), escape.reset_string()) diff --git a/src/typecode/_vendor/pygments/lexer.py b/src/typecode/_vendor/pygments/lexer.py index 0af5e1f..a897fcd 100644 --- a/src/typecode/_vendor/pygments/lexer.py +++ b/src/typecode/_vendor/pygments/lexer.py @@ -5,12 +5,10 @@ Base lexer classes. - :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ -from __future__ import print_function - import re import sys import time @@ -19,7 +17,7 @@ from typecode._vendor.pygments.filters import get_filter_by_name from typecode._vendor.pygments.token import Error, Text, Other, _TokenType from typecode._vendor.pygments.util import get_bool_opt, get_int_opt, get_list_opt, \ - make_analysator, text_type, add_metaclass, iteritems, Future, guess_decode + make_analysator, Future, guess_decode from typecode._vendor.pygments.regexopt import regex_opt __all__ = ['Lexer', 'RegexLexer', 'ExtendedRegexLexer', 'DelegatingLexer', @@ -48,8 +46,7 @@ def __new__(mcs, name, bases, d): return type.__new__(mcs, name, bases, d) -@add_metaclass(LexerMeta) -class Lexer(object): +class Lexer(metaclass=LexerMeta): """ Lexer for a specific language. @@ -145,16 +142,16 @@ def get_tokens(self, text, unfiltered=False): Also preprocess the text, i.e. expand tabs and strip it if wanted and applies registered filters. """ - if not isinstance(text, text_type): + if not isinstance(text, str): if self.encoding == 'guess': text, _ = guess_decode(text) elif self.encoding == 'chardet': try: import chardet - except ImportError: + except ImportError as e: raise ImportError('To enable chardet encoding guessing, ' 'please install the chardet library ' - 'from http://chardet.feedparser.org/') + 'from http://chardet.feedparser.org/') from e # check for BOM first decoded = None for bom, encoding in _encoding_map: @@ -169,11 +166,11 @@ def get_tokens(self, text, unfiltered=False): text = decoded else: text = text.decode(self.encoding) - if text.startswith(u'\ufeff'): - text = text[len(u'\ufeff'):] + if text.startswith('\ufeff'): + text = text[len('\ufeff'):] else: - if text.startswith(u'\ufeff'): - text = text[len(u'\ufeff'):] + if text.startswith('\ufeff'): + text = text[len('\ufeff'):] # text now *is* a unicode string text = text.replace('\r\n', '\n') @@ -252,7 +249,7 @@ class include(str): # pylint: disable=invalid-name pass -class _inherit(object): +class _inherit: """ Indicates the a state should inherit from its superclass. """ @@ -275,7 +272,7 @@ def __init__(self, *args): pass -class _PseudoMatch(object): +class _PseudoMatch: """ A pseudo match object constructed from a string. """ @@ -328,11 +325,12 @@ def callback(lexer, match, ctx=None): return callback -class _This(object): +class _This: """ Special singleton used for indicating the caller class. Used by ``using``. """ + this = _This() @@ -498,7 +496,7 @@ def _process_state(cls, unprocessed, processed, state): rex = cls._process_regex(tdef[0], rflags, state) except Exception as err: raise ValueError("uncompilable regex %r in state %r of %r: %s" % - (tdef[0], state, cls, err)) + (tdef[0], state, cls, err)) from err token = cls._process_token(tdef[1]) @@ -536,7 +534,7 @@ def get_tokendefs(cls): for c in cls.__mro__: toks = c.__dict__.get('tokens', {}) - for state, items in iteritems(toks): + for state, items in toks.items(): curitems = tokens.get(state) if curitems is None: # N.b. because this is assigned by reference, sufficiently @@ -582,8 +580,7 @@ def __call__(cls, *args, **kwds): return type.__call__(cls, *args, **kwds) -@add_metaclass(RegexLexerMeta) -class RegexLexer(Lexer): +class RegexLexer(Lexer, metaclass=RegexLexerMeta): """ Base for simple stateful regular expression-based lexers. Simplifies the lexing process so that you need only @@ -631,8 +628,7 @@ def get_tokens_unprocessed(self, text, stack=('root',)): if type(action) is _TokenType: yield pos, action, m.group() else: - for item in action(self, m): - yield item + yield from action(self, m) pos = m.end() if new_state is not None: # state transition @@ -667,7 +663,7 @@ def get_tokens_unprocessed(self, text, stack=('root',)): # at EOL, reset state to "root" statestack = ['root'] statetokens = tokendefs['root'] - yield pos, Text, u'\n' + yield pos, Text, '\n' pos += 1 continue yield pos, Error, text[pos] @@ -676,7 +672,7 @@ def get_tokens_unprocessed(self, text, stack=('root',)): break -class LexerContext(object): +class LexerContext: """ A helper object that holds lexer position data. """ @@ -719,8 +715,7 @@ def get_tokens_unprocessed(self, text=None, context=None): yield ctx.pos, action, m.group() ctx.pos = m.end() else: - for item in action(self, m, ctx): - yield item + yield from action(self, m, ctx) if not new_state: # altered the state stack? statetokens = tokendefs[ctx.stack[-1]] @@ -756,7 +751,7 @@ def get_tokens_unprocessed(self, text=None, context=None): # at EOL, reset state to "root" ctx.stack = ['root'] statetokens = tokendefs['root'] - yield ctx.pos, Text, u'\n' + yield ctx.pos, Text, '\n' ctx.pos += 1 continue yield ctx.pos, Error, text[ctx.pos] @@ -784,8 +779,7 @@ def do_insertions(insertions, tokens): index, itokens = next(insertions) except StopIteration: # no insertions - for item in tokens: - yield item + yield from tokens return realpos = None @@ -800,8 +794,9 @@ def do_insertions(insertions, tokens): oldi = 0 while insleft and i + len(v) >= index: tmpval = v[oldi:index - i] - yield realpos, t, tmpval - realpos += len(tmpval) + if tmpval: + yield realpos, t, tmpval + realpos += len(tmpval) for it_index, it_token, it_value in itokens: yield realpos, it_token, it_value realpos += len(it_value) @@ -811,8 +806,9 @@ def do_insertions(insertions, tokens): except StopIteration: insleft = False break # not strictly necessary - yield realpos, t, v[oldi:] - realpos += len(v) - oldi + if oldi < len(v): + yield realpos, t, v[oldi:] + realpos += len(v) - oldi # leftover tokens while insleft: @@ -850,8 +846,7 @@ def match_func(text, pos, endpos=sys.maxsize): return match_func -@add_metaclass(ProfilingRegexLexerMeta) -class ProfilingRegexLexer(RegexLexer): +class ProfilingRegexLexer(RegexLexer, metaclass=ProfilingRegexLexerMeta): """Drop-in replacement for RegexLexer that does profiling of its regexes.""" _prof_data = [] @@ -860,8 +855,7 @@ class ProfilingRegexLexer(RegexLexer): def get_tokens_unprocessed(self, text, stack=('root',)): # this needs to be a stack, since using(this) will produce nested calls self.__class__._prof_data.append({}) - for tok in RegexLexer.get_tokens_unprocessed(self, text, stack): - yield tok + yield from RegexLexer.get_tokens_unprocessed(self, text, stack) rawdata = self.__class__._prof_data.pop() data = sorted(((s, repr(r).strip('u\'').replace('\\\\', '\\')[:65], n, 1000 * t, 1000 * t / n) diff --git a/src/typecode/_vendor/pygments/lexers/__init__.py b/src/typecode/_vendor/pygments/lexers/__init__.py index db55ab5..173ac3b 100644 --- a/src/typecode/_vendor/pygments/lexers/__init__.py +++ b/src/typecode/_vendor/pygments/lexers/__init__.py @@ -5,7 +5,7 @@ Pygments lexers. - :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ @@ -18,11 +18,15 @@ from typecode._vendor.pygments.lexers._mapping import LEXERS from typecode._vendor.pygments.modeline import get_filetype_from_buffer from typecode._vendor.pygments.plugin import find_plugin_lexers -from typecode._vendor.pygments.util import ClassNotFound, itervalues, guess_decode, text_type +from typecode._vendor.pygments.util import ClassNotFound, guess_decode +COMPAT = { + 'Python3Lexer': 'PythonLexer', + 'Python3TracebackLexer': 'PythonTracebackLexer', +} __all__ = ['get_lexer_by_name', 'get_lexer_for_filename', 'find_lexer_class', - 'guess_lexer', 'load_lexer_from_file'] + list(LEXERS) + 'guess_lexer', 'load_lexer_from_file'] + list(LEXERS) + list(COMPAT) _lexer_cache = {} _pattern_cache = {} @@ -48,7 +52,7 @@ def get_all_lexers(): """Return a generator of tuples in the form ``(name, aliases, filenames, mimetypes)`` of all know lexers. """ - for item in itervalues(LEXERS): + for item in LEXERS.values(): yield item[1:] for lexer in find_plugin_lexers(): yield lexer.name, lexer.aliases, lexer.filenames, lexer.mimetypes @@ -62,7 +66,7 @@ def find_lexer_class(name): if name in _lexer_cache: return _lexer_cache[name] # lookup builtin lexers - for module_name, lname, aliases, _, _ in itervalues(LEXERS): + for module_name, lname, aliases, _, _ in LEXERS.values(): if name == lname: _load_lexers(module_name) return _lexer_cache[name] @@ -82,7 +86,7 @@ def find_lexer_class_by_name(_alias): if not _alias: raise ClassNotFound('no lexer for alias %r found' % _alias) # lookup builtin lexers - for module_name, name, aliases, _, _ in itervalues(LEXERS): + for module_name, name, aliases, _, _ in LEXERS.values(): if _alias.lower() in aliases: if name not in _lexer_cache: _load_lexers(module_name) @@ -103,7 +107,7 @@ def get_lexer_by_name(_alias, **options): raise ClassNotFound('no lexer for alias %r found' % _alias) # lookup builtin lexers - for module_name, name, aliases, _, _ in itervalues(LEXERS): + for module_name, name, aliases, _, _ in LEXERS.values(): if _alias.lower() in aliases: if name not in _lexer_cache: _load_lexers(module_name) @@ -143,8 +147,8 @@ def load_lexer_from_file(filename, lexername="CustomLexer", **options): # And finally instantiate it with the options return lexer_class(**options) except IOError as err: - raise ClassNotFound('cannot read %s' % filename) - except ClassNotFound as err: + raise ClassNotFound('cannot read %s: %s' % (filename, err)) + except ClassNotFound: raise except Exception as err: raise ClassNotFound('error when loading custom lexer: %s' % err) @@ -160,7 +164,7 @@ def find_lexer_class_for_filename(_fn, code=None): """ matches = [] fn = basename(_fn) - for modname, name, _, filenames, _ in itervalues(LEXERS): + for modname, name, _, filenames, _ in LEXERS.values(): for filename in filenames: if _fn_matches(fn, filename): if name not in _lexer_cache: @@ -171,7 +175,7 @@ def find_lexer_class_for_filename(_fn, code=None): if _fn_matches(fn, filename): matches.append((cls, filename)) - if sys.version_info > (3,) and isinstance(code, bytes): + if isinstance(code, bytes): # decode it, since all analyse_text functions expect unicode code = guess_decode(code) @@ -212,7 +216,7 @@ def get_lexer_for_mimetype(_mime, **options): Raises ClassNotFound if not found. """ - for modname, name, _, _, mimetypes in itervalues(LEXERS): + for modname, name, _, _, mimetypes in LEXERS.values(): if _mime in mimetypes: if name not in _lexer_cache: _load_lexers(modname) @@ -231,8 +235,7 @@ def _iter_lexerclasses(plugins=True): _load_lexers(module_name) yield _lexer_cache[name] if plugins: - for lexer in find_plugin_lexers(): - yield lexer + yield from find_plugin_lexers() def guess_lexer_for_filename(_fn, _text, **options): @@ -243,7 +246,7 @@ def guess_lexer_for_filename(_fn, _text, **options): usage:: - >>> from typecode._vendor.pygments.lexers import guess_lexer_for_filename + >>> from pygments.lexers import guess_lexer_for_filename >>> guess_lexer_for_filename('hello.html', '<%= @foo %>') >>> guess_lexer_for_filename('hello.html', '

{{ title|e }}

') @@ -289,7 +292,7 @@ def type_sort(t): def guess_lexer(_text, **options): """Guess a lexer by strong distinctions in the text (eg, shebang).""" - if not isinstance(_text, text_type): + if not isinstance(_text, str): inencoding = options.get('inencoding', options.get('encoding')) if inencoding: _text = _text.decode(inencoding or 'utf8') @@ -327,6 +330,8 @@ def __getattr__(self, name): cls = _lexer_cache[info[1]] setattr(self, name, cls) return cls + if name in COMPAT: + return getattr(self, COMPAT[name]) raise AttributeError(name) diff --git a/src/typecode/_vendor/pygments/lexers/_asy_builtins.py b/src/typecode/_vendor/pygments/lexers/_asy_builtins.py index 6ac79ad..884463a 100644 --- a/src/typecode/_vendor/pygments/lexers/_asy_builtins.py +++ b/src/typecode/_vendor/pygments/lexers/_asy_builtins.py @@ -10,11 +10,11 @@ TODO: perl/python script in Asymptote SVN similar to asy-list.pl but only for function and variable names. - :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ -ASYFUNCNAME = set(( +ASYFUNCNAME = { 'AND', 'Arc', 'ArcArrow', @@ -1038,9 +1038,9 @@ 'ztick', 'ztick3', 'ztrans' -)) +} -ASYVARNAME = set(( +ASYVARNAME = { 'AliceBlue', 'Align', 'Allow', @@ -1642,4 +1642,4 @@ 'ylabelwidth', 'zerotickfuzz', 'zerowinding' -)) +} diff --git a/src/typecode/_vendor/pygments/lexers/_cl_builtins.py b/src/typecode/_vendor/pygments/lexers/_cl_builtins.py index d0306fa..ef4612e 100644 --- a/src/typecode/_vendor/pygments/lexers/_cl_builtins.py +++ b/src/typecode/_vendor/pygments/lexers/_cl_builtins.py @@ -5,11 +5,11 @@ ANSI Common Lisp builtins. - :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ -BUILTIN_FUNCTIONS = set(( # 638 functions +BUILTIN_FUNCTIONS = { # 638 functions '<', '<=', '=', '>', '>=', '-', '/', '/=', '*', '+', '1-', '1+', 'abort', 'abs', 'acons', 'acos', 'acosh', 'add-method', 'adjoin', 'adjustable-array-p', 'adjust-array', 'allocate-instance', @@ -157,17 +157,17 @@ 'wild-pathname-p', 'write', 'write-byte', 'write-char', 'write-line', 'write-sequence', 'write-string', 'write-to-string', 'yes-or-no-p', 'y-or-n-p', 'zerop', -)) +} -SPECIAL_FORMS = set(( +SPECIAL_FORMS = { 'block', 'catch', 'declare', 'eval-when', 'flet', 'function', 'go', 'if', 'labels', 'lambda', 'let', 'let*', 'load-time-value', 'locally', 'macrolet', 'multiple-value-call', 'multiple-value-prog1', 'progn', 'progv', 'quote', 'return-from', 'setq', 'symbol-macrolet', 'tagbody', 'the', 'throw', 'unwind-protect', -)) +} -MACROS = set(( +MACROS = { 'and', 'assert', 'call-method', 'case', 'ccase', 'check-type', 'cond', 'ctypecase', 'decf', 'declaim', 'defclass', 'defconstant', 'defgeneric', 'define-compiler-macro', 'define-condition', 'define-method-combination', @@ -188,19 +188,19 @@ 'with-input-from-string', 'with-open-file', 'with-open-stream', 'with-output-to-string', 'with-package-iterator', 'with-simple-restart', 'with-slots', 'with-standard-io-syntax', -)) +} -LAMBDA_LIST_KEYWORDS = set(( +LAMBDA_LIST_KEYWORDS = { '&allow-other-keys', '&aux', '&body', '&environment', '&key', '&optional', '&rest', '&whole', -)) +} -DECLARATIONS = set(( +DECLARATIONS = { 'dynamic-extent', 'ignore', 'optimize', 'ftype', 'inline', 'special', 'ignorable', 'notinline', 'type', -)) +} -BUILTIN_TYPES = set(( +BUILTIN_TYPES = { 'atom', 'boolean', 'base-char', 'base-string', 'bignum', 'bit', 'compiled-function', 'extended-char', 'fixnum', 'keyword', 'nil', 'signed-byte', 'short-float', 'single-float', 'double-float', 'long-float', @@ -217,9 +217,9 @@ 'simple-type-error', 'simple-warning', 'stream-error', 'storage-condition', 'style-warning', 'type-error', 'unbound-variable', 'unbound-slot', 'undefined-function', 'warning', -)) +} -BUILTIN_CLASSES = set(( +BUILTIN_CLASSES = { 'array', 'broadcast-stream', 'bit-vector', 'built-in-class', 'character', 'class', 'complex', 'concatenated-stream', 'cons', 'echo-stream', 'file-stream', 'float', 'function', 'generic-function', 'hash-table', @@ -229,4 +229,4 @@ 'standard-generic-function', 'standard-method', 'standard-object', 'string-stream', 'stream', 'string', 'structure-class', 'structure-object', 'symbol', 'synonym-stream', 't', 'two-way-stream', 'vector', -)) +} diff --git a/src/typecode/_vendor/pygments/lexers/_cocoa_builtins.py b/src/typecode/_vendor/pygments/lexers/_cocoa_builtins.py index f7c55c6..a542277 100644 --- a/src/typecode/_vendor/pygments/lexers/_cocoa_builtins.py +++ b/src/typecode/_vendor/pygments/lexers/_cocoa_builtins.py @@ -8,15 +8,13 @@ File may be also used as standalone generator for aboves. - :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ -from __future__ import print_function - -COCOA_INTERFACES = set(['UITableViewCell', 'HKCorrelationQuery', 'NSURLSessionDataTask', 'PHFetchOptions', 'NSLinguisticTagger', 'NSStream', 'AVAudioUnitDelay', 'GCMotion', 'SKPhysicsWorld', 'NSString', 'CMAttitude', 'AVAudioEnvironmentDistanceAttenuationParameters', 'HKStatisticsCollection', 'SCNPlane', 'CBPeer', 'JSContext', 'SCNTransaction', 'SCNTorus', 'AVAudioUnitEffect', 'UICollectionReusableView', 'MTLSamplerDescriptor', 'AVAssetReaderSampleReferenceOutput', 'AVMutableCompositionTrack', 'GKLeaderboard', 'NSFetchedResultsController', 'SKRange', 'MKTileOverlayRenderer', 'MIDINetworkSession', 'UIVisualEffectView', 'CIWarpKernel', 'PKObject', 'MKRoute', 'MPVolumeView', 'UIPrintInfo', 'SCNText', 'ADClient', 'PKPayment', 'AVMutableAudioMix', 'GLKEffectPropertyLight', 'WKScriptMessage', 'AVMIDIPlayer', 'PHCollectionListChangeRequest', 'UICollectionViewLayout', 'NSMutableCharacterSet', 'SKPaymentTransaction', 'NEOnDemandRuleConnect', 'NSShadow', 'SCNView', 'NSURLSessionConfiguration', 'MTLVertexAttributeDescriptor', 'CBCharacteristic', 'HKQuantityType', 'CKLocationSortDescriptor', 'NEVPNIKEv2SecurityAssociationParameters', 'CMStepCounter', 'NSNetService', 'AVAssetWriterInputMetadataAdaptor', 'UICollectionView', 'UIViewPrintFormatter', 'SCNLevelOfDetail', 'CAShapeLayer', 'MCPeerID', 'MPRatingCommand', 'WKNavigation', 'NSDictionary', 'NSFileVersion', 'CMGyroData', 'AVAudioUnitDistortion', 'CKFetchRecordsOperation', 'SKPhysicsJointSpring', 'SCNHitTestResult', 'AVAudioTime', 'CIFilter', 'UIView', 'SCNConstraint', 'CAPropertyAnimation', 'MKMapItem', 'MPRemoteCommandCenter', 'PKPaymentSummaryItem', 'UICollectionViewFlowLayoutInvalidationContext', 'UIInputViewController', 'PKPass', 'SCNPhysicsBehavior', 'MTLRenderPassColorAttachmentDescriptor', 'MKPolygonRenderer', 'CKNotification', 'JSValue', 'PHCollectionList', 'CLGeocoder', 'NSByteCountFormatter', 'AVCaptureScreenInput', 'MPFeedbackCommand', 'CAAnimation', 'MKOverlayPathView', 'UIActionSheet', 'UIMotionEffectGroup', 'NSLengthFormatter', 'UIBarItem', 'SKProduct', 'AVAssetExportSession', 'NSKeyedUnarchiver', 'NSMutableSet', 'SCNPyramid', 'PHAssetCollection', 'MKMapView', 'HMHomeManager', 'CATransition', 'MTLCompileOptions', 'UIVibrancyEffect', 'CLCircularRegion', 'MKTileOverlay', 'SCNShape', 'ACAccountCredential', 'SKPhysicsJointLimit', 'MKMapSnapshotter', 'AVMediaSelectionGroup', 'NSIndexSet', 'CBPeripheralManager', 'CKRecordZone', 'AVAudioRecorder', 'NSURL', 'CBCentral', 'NSNumber', 'AVAudioOutputNode', 'MTLVertexAttributeDescriptorArray', 'MKETAResponse', 'SKTransition', 'SSReadingList', 'HKSourceQuery', 'UITableViewRowAction', 'UITableView', 'SCNParticlePropertyController', 'AVCaptureStillImageOutput', 'GCController', 'AVAudioPlayerNode', 'AVAudioSessionPortDescription', 'NSHTTPURLResponse', 'NEOnDemandRuleEvaluateConnection', 'SKEffectNode', 'HKQuantity', 'GCControllerElement', 'AVPlayerItemAccessLogEvent', 'SCNBox', 'NSExtensionContext', 'MKOverlayRenderer', 'SCNPhysicsVehicle', 'NSDecimalNumber', 'EKReminder', 'MKPolylineView', 'CKQuery', 'AVAudioMixerNode', 'GKAchievementDescription', 'EKParticipant', 'NSBlockOperation', 'UIActivityItemProvider', 'CLLocation', 'NSBatchUpdateRequest', 'PHContentEditingOutput', 'PHObjectChangeDetails', 'HKWorkoutType', 'MPMoviePlayerController', 'AVAudioFormat', 'HMTrigger', 'MTLRenderPassDepthAttachmentDescriptor', 'SCNRenderer', 'GKScore', 'UISplitViewController', 'HKSource', 'NSURLConnection', 'ABUnknownPersonViewController', 'SCNTechnique', 'UIMenuController', 'NSEvent', 'SKTextureAtlas', 'NSKeyedArchiver', 'GKLeaderboardSet', 'NSSimpleCString', 'AVAudioPCMBuffer', 'CBATTRequest', 'GKMatchRequest', 'AVMetadataObject', 'SKProductsRequest', 'UIAlertView', 'NSIncrementalStore', 'MFMailComposeViewController', 'SCNFloor', 'NSSortDescriptor', 'CKFetchNotificationChangesOperation', 'MPMovieAccessLog', 'NSManagedObjectContext', 'AVAudioUnitGenerator', 'WKBackForwardList', 'SKMutableTexture', 'AVCaptureAudioDataOutput', 'ACAccount', 'AVMetadataItem', 'MPRatingCommandEvent', 'AVCaptureDeviceInputSource', 'CLLocationManager', 'MPRemoteCommand', 'AVCaptureSession', 'UIStepper', 'UIRefreshControl', 'NEEvaluateConnectionRule', 'CKModifyRecordsOperation', 'UICollectionViewTransitionLayout', 'CBCentralManager', 'NSPurgeableData', 'PKShippingMethod', 'SLComposeViewController', 'NSHashTable', 'MKUserTrackingBarButtonItem', 'UILexiconEntry', 'CMMotionActivity', 'SKAction', 'SKShader', 'AVPlayerItemOutput', 'MTLRenderPassAttachmentDescriptor', 'UIDocumentInteractionController', 'UIDynamicItemBehavior', 'NSMutableDictionary', 'UILabel', 'AVCaptureInputPort', 'NSExpression', 'CAInterAppAudioTransportView', 'SKMutablePayment', 'UIImage', 'PHCachingImageManager', 'SCNTransformConstraint', 'HKCorrelationType', 'UIColor', 'SCNGeometrySource', 'AVCaptureAutoExposureBracketedStillImageSettings', 'UIPopoverBackgroundView', 'UIToolbar', 'NSNotificationCenter', 'UICollectionViewLayoutAttributes', 'AVAssetReaderOutputMetadataAdaptor', 'NSEntityMigrationPolicy', 'HMUser', 'NSLocale', 'NSURLSession', 'SCNCamera', 'NSTimeZone', 'UIManagedDocument', 'AVMutableVideoCompositionLayerInstruction', 'AVAssetTrackGroup', 'NSInvocationOperation', 'ALAssetRepresentation', 'AVQueuePlayer', 'HMServiceGroup', 'UIPasteboard', 'PHContentEditingInput', 'NSLayoutManager', 'EKCalendarChooser', 'EKObject', 'CATiledLayer', 'GLKReflectionMapEffect', 'NSManagedObjectID', 'NSEnergyFormatter', 'SLRequest', 'HMCharacteristic', 'AVPlayerLayer', 'MTLRenderPassDescriptor', 'SKPayment', 'NSPointerArray', 'AVAudioMix', 'SCNLight', 'MCAdvertiserAssistant', 'MKMapSnapshotOptions', 'HKCategorySample', 'AVAudioEnvironmentReverbParameters', 'SCNMorpher', 'AVTimedMetadataGroup', 'CBMutableCharacteristic', 'NSFetchRequest', 'UIDevice', 'NSManagedObject', 'NKAssetDownload', 'AVOutputSettingsAssistant', 'SKPhysicsJointPin', 'UITabBar', 'UITextInputMode', 'NSFetchRequestExpression', 'HMActionSet', 'CTSubscriber', 'PHAssetChangeRequest', 'NSPersistentStoreRequest', 'UITabBarController', 'HKQuantitySample', 'AVPlayerItem', 'AVSynchronizedLayer', 'MKDirectionsRequest', 'NSMetadataItem', 'UIPresentationController', 'UINavigationItem', 'PHFetchResultChangeDetails', 'PHImageManager', 'AVCaptureManualExposureBracketedStillImageSettings', 'UIStoryboardPopoverSegue', 'SCNLookAtConstraint', 'UIGravityBehavior', 'UIWindow', 'CBMutableDescriptor', 'NEOnDemandRuleDisconnect', 'UIBezierPath', 'UINavigationController', 'ABPeoplePickerNavigationController', 'EKSource', 'AVAssetWriterInput', 'AVPlayerItemTrack', 'GLKEffectPropertyTexture', 'NSHTTPCookie', 'NSURLResponse', 'SKPaymentQueue', 'NSAssertionHandler', 'MKReverseGeocoder', 'GCControllerAxisInput', 'NSArray', 'NSOrthography', 'NSURLSessionUploadTask', 'NSCharacterSet', 'AVMutableVideoCompositionInstruction', 'AVAssetReaderOutput', 'EAGLContext', 'WKFrameInfo', 'CMPedometer', 'MyClass', 'CKModifyBadgeOperation', 'AVCaptureAudioFileOutput', 'SKEmitterNode', 'NSMachPort', 'AVVideoCompositionCoreAnimationTool', 'PHCollection', 'SCNPhysicsWorld', 'NSURLRequest', 'CMAccelerometerData', 'NSNetServiceBrowser', 'CLFloor', 'AVAsynchronousVideoCompositionRequest', 'SCNGeometry', 'SCNIKConstraint', 'CIKernel', 'CAGradientLayer', 'HKCharacteristicType', 'NSFormatter', 'SCNAction', 'CATransaction', 'CBUUID', 'UIStoryboard', 'MPMediaLibrary', 'UITapGestureRecognizer', 'MPMediaItemArtwork', 'NSURLSessionTask', 'AVAudioUnit', 'MCBrowserViewController', 'UIFontDescriptor', 'NSRelationshipDescription', 'HKSample', 'WKWebView', 'NSMutableAttributedString', 'NSPersistentStoreAsynchronousResult', 'MPNowPlayingInfoCenter', 'MKLocalSearch', 'EAAccessory', 'HKCorrelation', 'CATextLayer', 'NSNotificationQueue', 'UINib', 'GLKTextureLoader', 'HKObjectType', 'NSValue', 'NSMutableIndexSet', 'SKPhysicsContact', 'NSProgress', 'AVPlayerViewController', 'CAScrollLayer', 'GKSavedGame', 'NSTextCheckingResult', 'PHObjectPlaceholder', 'SKConstraint', 'EKEventEditViewController', 'NSEntityDescription', 'NSURLCredentialStorage', 'UIApplication', 'SKDownload', 'SCNNode', 'MKLocalSearchRequest', 'SKScene', 'UISearchDisplayController', 'NEOnDemandRule', 'MTLRenderPassStencilAttachmentDescriptor', 'CAReplicatorLayer', 'UIPrintPageRenderer', 'EKCalendarItem', 'NSUUID', 'EAAccessoryManager', 'NEOnDemandRuleIgnore', 'SKRegion', 'AVAssetResourceLoader', 'EAWiFiUnconfiguredAccessoryBrowser', 'NSUserActivity', 'CTCall', 'UIPrinterPickerController', 'CIVector', 'UINavigationBar', 'UIPanGestureRecognizer', 'MPMediaQuery', 'ABNewPersonViewController', 'CKRecordZoneID', 'HKAnchoredObjectQuery', 'CKFetchRecordZonesOperation', 'UIStoryboardSegue', 'ACAccountType', 'GKSession', 'SKVideoNode', 'PHChange', 'SKReceiptRefreshRequest', 'GCExtendedGamepadSnapshot', 'MPSeekCommandEvent', 'GCExtendedGamepad', 'CAValueFunction', 'SCNCylinder', 'NSNotification', 'NSBatchUpdateResult', 'PKPushCredentials', 'SCNPhysicsSliderJoint', 'AVCaptureDeviceFormat', 'AVPlayerItemErrorLog', 'NSMapTable', 'NSSet', 'CMMotionManager', 'GKVoiceChatService', 'UIPageControl', 'UILexicon', 'MTLArrayType', 'AVAudioUnitReverb', 'MKGeodesicPolyline', 'AVMutableComposition', 'NSLayoutConstraint', 'UIPrinter', 'NSOrderedSet', 'CBAttribute', 'PKPushPayload', 'NSIncrementalStoreNode', 'EKEventStore', 'MPRemoteCommandEvent', 'UISlider', 'UIBlurEffect', 'CKAsset', 'AVCaptureInput', 'AVAudioEngine', 'MTLVertexDescriptor', 'SKPhysicsBody', 'NSOperation', 'PKPaymentPass', 'UIImageAsset', 'MKMapCamera', 'SKProductsResponse', 'GLKEffectPropertyMaterial', 'AVCaptureDevice', 'CTCallCenter', 'CABTMIDILocalPeripheralViewController', 'NEVPNManager', 'HKQuery', 'SCNPhysicsContact', 'CBMutableService', 'AVSampleBufferDisplayLayer', 'SCNSceneSource', 'SKLightNode', 'CKDiscoveredUserInfo', 'NSMutableArray', 'MTLDepthStencilDescriptor', 'MTLArgument', 'NSMassFormatter', 'CIRectangleFeature', 'PKPushRegistry', 'NEVPNConnection', 'MCNearbyServiceBrowser', 'NSOperationQueue', 'MKPolylineRenderer', 'HKWorkout', 'NSValueTransformer', 'UICollectionViewFlowLayout', 'MPChangePlaybackRateCommandEvent', 'NSEntityMapping', 'SKTexture', 'NSMergePolicy', 'UITextInputStringTokenizer', 'NSRecursiveLock', 'AVAsset', 'NSUndoManager', 'AVAudioUnitSampler', 'NSItemProvider', 'SKUniform', 'MPMediaPickerController', 'CKOperation', 'MTLRenderPipelineDescriptor', 'EAWiFiUnconfiguredAccessory', 'NSFileCoordinator', 'SKRequest', 'NSFileHandle', 'NSConditionLock', 'UISegmentedControl', 'NSManagedObjectModel', 'UITabBarItem', 'SCNCone', 'MPMediaItem', 'SCNMaterial', 'EKRecurrenceRule', 'UIEvent', 'UITouch', 'UIPrintInteractionController', 'CMDeviceMotion', 'NEVPNProtocol', 'NSCompoundPredicate', 'HKHealthStore', 'MKMultiPoint', 'HKSampleType', 'UIPrintFormatter', 'AVAudioUnitEQFilterParameters', 'SKView', 'NSConstantString', 'UIPopoverController', 'CKDatabase', 'AVMetadataFaceObject', 'UIAccelerometer', 'EKEventViewController', 'CMAltitudeData', 'MTLStencilDescriptor', 'UISwipeGestureRecognizer', 'NSPort', 'MKCircleRenderer', 'AVCompositionTrack', 'NSAsynchronousFetchRequest', 'NSUbiquitousKeyValueStore', 'NSMetadataQueryResultGroup', 'AVAssetResourceLoadingDataRequest', 'UITableViewHeaderFooterView', 'CKNotificationID', 'AVAudioSession', 'HKUnit', 'NSNull', 'NSPersistentStoreResult', 'MKCircleView', 'AVAudioChannelLayout', 'NEVPNProtocolIKEv2', 'WKProcessPool', 'UIAttachmentBehavior', 'CLBeacon', 'NSInputStream', 'NSURLCache', 'GKPlayer', 'NSMappingModel', 'CIQRCodeFeature', 'AVMutableVideoComposition', 'PHFetchResult', 'NSAttributeDescription', 'AVPlayer', 'MKAnnotationView', 'PKPaymentRequest', 'NSTimer', 'CBDescriptor', 'MKOverlayView', 'AVAudioUnitTimePitch', 'NSSaveChangesRequest', 'UIReferenceLibraryViewController', 'SKPhysicsJointFixed', 'UILocalizedIndexedCollation', 'UIInterpolatingMotionEffect', 'UIDocumentPickerViewController', 'AVAssetWriter', 'NSBundle', 'SKStoreProductViewController', 'GLKViewController', 'NSMetadataQueryAttributeValueTuple', 'GKTurnBasedMatch', 'AVAudioFile', 'UIActivity', 'NSPipe', 'MKShape', 'NSMergeConflict', 'CIImage', 'HKObject', 'UIRotationGestureRecognizer', 'AVPlayerItemLegibleOutput', 'AVAssetImageGenerator', 'GCControllerButtonInput', 'CKMarkNotificationsReadOperation', 'CKSubscription', 'MPTimedMetadata', 'NKIssue', 'UIScreenMode', 'HMAccessoryBrowser', 'GKTurnBasedEventHandler', 'UIWebView', 'MKPolyline', 'JSVirtualMachine', 'AVAssetReader', 'NSAttributedString', 'GKMatchmakerViewController', 'NSCountedSet', 'UIButton', 'WKNavigationResponse', 'GKLocalPlayer', 'MPMovieErrorLog', 'AVSpeechUtterance', 'HKStatistics', 'UILocalNotification', 'HKBiologicalSexObject', 'AVURLAsset', 'CBPeripheral', 'NSDateComponentsFormatter', 'SKSpriteNode', 'UIAccessibilityElement', 'AVAssetWriterInputGroup', 'HMZone', 'AVAssetReaderAudioMixOutput', 'NSEnumerator', 'UIDocument', 'MKLocalSearchResponse', 'UISimpleTextPrintFormatter', 'PHPhotoLibrary', 'CBService', 'UIDocumentMenuViewController', 'MCSession', 'QLPreviewController', 'CAMediaTimingFunction', 'UITextPosition', 'ASIdentifierManager', 'AVAssetResourceLoadingRequest', 'SLComposeServiceViewController', 'UIPinchGestureRecognizer', 'PHObject', 'NSExtensionItem', 'HKSampleQuery', 'MTLRenderPipelineColorAttachmentDescriptorArray', 'MKRouteStep', 'SCNCapsule', 'NSMetadataQuery', 'AVAssetResourceLoadingContentInformationRequest', 'UITraitCollection', 'CTCarrier', 'NSFileSecurity', 'UIAcceleration', 'UIMotionEffect', 'MTLRenderPipelineReflection', 'CLHeading', 'CLVisit', 'MKDirectionsResponse', 'HMAccessory', 'MTLStructType', 'UITextView', 'CMMagnetometerData', 'UICollisionBehavior', 'UIProgressView', 'CKServerChangeToken', 'UISearchBar', 'MKPlacemark', 'AVCaptureConnection', 'NSPropertyMapping', 'ALAssetsFilter', 'SK3DNode', 'AVPlayerItemErrorLogEvent', 'NSJSONSerialization', 'AVAssetReaderVideoCompositionOutput', 'ABPersonViewController', 'CIDetector', 'GKTurnBasedMatchmakerViewController', 'MPMediaItemCollection', 'SCNSphere', 'NSCondition', 'NSURLCredential', 'MIDINetworkConnection', 'NSFileProviderExtension', 'NSDecimalNumberHandler', 'NSAtomicStoreCacheNode', 'NSAtomicStore', 'EKAlarm', 'CKNotificationInfo', 'AVAudioUnitEQ', 'UIPercentDrivenInteractiveTransition', 'MKPolygon', 'AVAssetTrackSegment', 'MTLVertexAttribute', 'NSExpressionDescription', 'HKStatisticsCollectionQuery', 'NSURLAuthenticationChallenge', 'NSDirectoryEnumerator', 'MKDistanceFormatter', 'UIAlertAction', 'NSPropertyListSerialization', 'GKPeerPickerController', 'UIUserNotificationSettings', 'UITableViewController', 'GKNotificationBanner', 'MKPointAnnotation', 'MTLRenderPassColorAttachmentDescriptorArray', 'NSCache', 'SKPhysicsJoint', 'NSXMLParser', 'UIViewController', 'PKPaymentToken', 'MFMessageComposeViewController', 'AVAudioInputNode', 'NSDataDetector', 'CABTMIDICentralViewController', 'AVAudioUnitMIDIInstrument', 'AVCaptureVideoPreviewLayer', 'AVAssetWriterInputPassDescription', 'MPChangePlaybackRateCommand', 'NSURLComponents', 'CAMetalLayer', 'UISnapBehavior', 'AVMetadataMachineReadableCodeObject', 'CKDiscoverUserInfosOperation', 'NSTextAttachment', 'NSException', 'UIMenuItem', 'CMMotionActivityManager', 'SCNGeometryElement', 'NCWidgetController', 'CAEmitterLayer', 'MKUserLocation', 'UIImagePickerController', 'CIFeature', 'AVCaptureDeviceInput', 'ALAsset', 'NSURLSessionDownloadTask', 'SCNPhysicsHingeJoint', 'MPMoviePlayerViewController', 'NSMutableOrderedSet', 'SCNMaterialProperty', 'UIFont', 'AVCaptureVideoDataOutput', 'NSCachedURLResponse', 'ALAssetsLibrary', 'NSInvocation', 'UILongPressGestureRecognizer', 'NSTextStorage', 'WKWebViewConfiguration', 'CIFaceFeature', 'MKMapSnapshot', 'GLKEffectPropertyFog', 'AVComposition', 'CKDiscoverAllContactsOperation', 'AVAudioMixInputParameters', 'CAEmitterBehavior', 'PKPassLibrary', 'UIMutableUserNotificationCategory', 'NSLock', 'NEVPNProtocolIPSec', 'ADBannerView', 'UIDocumentPickerExtensionViewController', 'UIActivityIndicatorView', 'AVPlayerMediaSelectionCriteria', 'CALayer', 'UIAccessibilityCustomAction', 'UIBarButtonItem', 'AVAudioSessionRouteDescription', 'CLBeaconRegion', 'HKBloodTypeObject', 'MTLVertexBufferLayoutDescriptorArray', 'CABasicAnimation', 'AVVideoCompositionInstruction', 'AVMutableTimedMetadataGroup', 'EKRecurrenceEnd', 'NSTextContainer', 'TWTweetComposeViewController', 'PKPaymentAuthorizationViewController', 'UIScrollView', 'WKNavigationAction', 'AVPlayerItemMetadataOutput', 'EKRecurrenceDayOfWeek', 'NSNumberFormatter', 'MTLComputePipelineReflection', 'UIScreen', 'CLRegion', 'NSProcessInfo', 'GLKTextureInfo', 'SCNSkinner', 'AVCaptureMetadataOutput', 'SCNAnimationEvent', 'NSTextTab', 'JSManagedValue', 'NSDate', 'UITextChecker', 'WKBackForwardListItem', 'NSData', 'NSParagraphStyle', 'AVMutableMetadataItem', 'EKCalendar', 'HKWorkoutEvent', 'NSMutableURLRequest', 'UIVideoEditorController', 'HMTimerTrigger', 'AVAudioUnitVarispeed', 'UIDynamicAnimator', 'AVCompositionTrackSegment', 'GCGamepadSnapshot', 'MPMediaEntity', 'GLKSkyboxEffect', 'UISwitch', 'EKStructuredLocation', 'UIGestureRecognizer', 'NSProxy', 'GLKBaseEffect', 'UIPushBehavior', 'GKScoreChallenge', 'NSCoder', 'MPMediaPlaylist', 'NSDateComponents', 'WKUserScript', 'EKEvent', 'NSDateFormatter', 'NSAsynchronousFetchResult', 'AVAssetWriterInputPixelBufferAdaptor', 'UIVisualEffect', 'UICollectionViewCell', 'UITextField', 'CLPlacemark', 'MPPlayableContentManager', 'AVCaptureOutput', 'HMCharacteristicWriteAction', 'CKModifySubscriptionsOperation', 'NSPropertyDescription', 'GCGamepad', 'UIMarkupTextPrintFormatter', 'SCNTube', 'NSPersistentStoreCoordinator', 'AVAudioEnvironmentNode', 'GKMatchmaker', 'CIContext', 'NSThread', 'SLComposeSheetConfigurationItem', 'SKPhysicsJointSliding', 'NSPredicate', 'GKVoiceChat', 'SKCropNode', 'AVCaptureAudioPreviewOutput', 'NSStringDrawingContext', 'GKGameCenterViewController', 'UIPrintPaper', 'SCNPhysicsBallSocketJoint', 'UICollectionViewLayoutInvalidationContext', 'GLKEffectPropertyTransform', 'AVAudioIONode', 'UIDatePicker', 'MKDirections', 'ALAssetsGroup', 'CKRecordZoneNotification', 'SCNScene', 'MPMovieAccessLogEvent', 'CKFetchSubscriptionsOperation', 'CAEmitterCell', 'AVAudioUnitTimeEffect', 'HMCharacteristicMetadata', 'MKPinAnnotationView', 'UIPickerView', 'UIImageView', 'UIUserNotificationCategory', 'SCNPhysicsVehicleWheel', 'HKCategoryType', 'MPMediaQuerySection', 'GKFriendRequestComposeViewController', 'NSError', 'MTLRenderPipelineColorAttachmentDescriptor', 'SCNPhysicsShape', 'UISearchController', 'SCNPhysicsBody', 'CTSubscriberInfo', 'AVPlayerItemAccessLog', 'MPMediaPropertyPredicate', 'CMLogItem', 'NSAutoreleasePool', 'NSSocketPort', 'AVAssetReaderTrackOutput', 'SKNode', 'UIMutableUserNotificationAction', 'SCNProgram', 'AVSpeechSynthesisVoice', 'CMAltimeter', 'AVCaptureAudioChannel', 'GKTurnBasedExchangeReply', 'AVVideoCompositionLayerInstruction', 'AVSpeechSynthesizer', 'GKChallengeEventHandler', 'AVCaptureFileOutput', 'UIControl', 'SCNPhysicsField', 'CKReference', 'LAContext', 'CKRecordID', 'ADInterstitialAd', 'AVAudioSessionDataSourceDescription', 'AVAudioBuffer', 'CIColorKernel', 'GCControllerDirectionPad', 'NSFileManager', 'AVMutableAudioMixInputParameters', 'UIScreenEdgePanGestureRecognizer', 'CAKeyframeAnimation', 'CKQueryNotification', 'PHAdjustmentData', 'EASession', 'AVAssetResourceRenewalRequest', 'UIInputView', 'NSFileWrapper', 'UIResponder', 'NSPointerFunctions', 'UIKeyCommand', 'NSHTTPCookieStorage', 'AVMediaSelectionOption', 'NSRunLoop', 'NSFileAccessIntent', 'CAAnimationGroup', 'MKCircle', 'UIAlertController', 'NSMigrationManager', 'NSDateIntervalFormatter', 'UICollectionViewUpdateItem', 'CKDatabaseOperation', 'PHImageRequestOptions', 'SKReachConstraints', 'CKRecord', 'CAInterAppAudioSwitcherView', 'WKWindowFeatures', 'GKInvite', 'NSMutableData', 'PHAssetCollectionChangeRequest', 'NSMutableParagraphStyle', 'UIDynamicBehavior', 'GLKEffectProperty', 'CKFetchRecordChangesOperation', 'SKShapeNode', 'MPMovieErrorLogEvent', 'MKPolygonView', 'MPContentItem', 'HMAction', 'NSScanner', 'GKAchievementChallenge', 'AVAudioPlayer', 'CKContainer', 'AVVideoComposition', 'NKLibrary', 'NSPersistentStore', 'AVCaptureMovieFileOutput', 'HMRoom', 'GKChallenge', 'UITextRange', 'NSURLProtectionSpace', 'ACAccountStore', 'MPSkipIntervalCommand', 'NSComparisonPredicate', 'HMHome', 'PHVideoRequestOptions', 'NSOutputStream', 'MPSkipIntervalCommandEvent', 'PKAddPassesViewController', 'UITextSelectionRect', 'CTTelephonyNetworkInfo', 'AVTextStyleRule', 'NSFetchedPropertyDescription', 'UIPageViewController', 'CATransformLayer', 'UICollectionViewController', 'AVAudioNode', 'MCNearbyServiceAdvertiser', 'NSObject', 'PHAsset', 'GKLeaderboardViewController', 'CKQueryCursor', 'MPMusicPlayerController', 'MKOverlayPathRenderer', 'CMPedometerData', 'HMService', 'SKFieldNode', 'GKAchievement', 'WKUserContentController', 'AVAssetTrack', 'TWRequest', 'SKLabelNode', 'AVCaptureBracketedStillImageSettings', 'MIDINetworkHost', 'MPMediaPredicate', 'AVFrameRateRange', 'MTLTextureDescriptor', 'MTLVertexBufferLayoutDescriptor', 'MPFeedbackCommandEvent', 'UIUserNotificationAction', 'HKStatisticsQuery', 'SCNParticleSystem', 'NSIndexPath', 'AVVideoCompositionRenderContext', 'CADisplayLink', 'HKObserverQuery', 'UIPopoverPresentationController', 'CKQueryOperation', 'CAEAGLLayer', 'NSMutableString', 'NSMessagePort', 'NSURLQueryItem', 'MTLStructMember', 'AVAudioSessionChannelDescription', 'GLKView', 'UIActivityViewController', 'GKAchievementViewController', 'GKTurnBasedParticipant', 'NSURLProtocol', 'NSUserDefaults', 'NSCalendar', 'SKKeyframeSequence', 'AVMetadataItemFilter', 'CKModifyRecordZonesOperation', 'WKPreferences', 'NSMethodSignature', 'NSRegularExpression', 'EAGLSharegroup', 'AVPlayerItemVideoOutput', 'PHContentEditingInputRequestOptions', 'GKMatch', 'CIColor', 'UIDictationPhrase']) -COCOA_PROTOCOLS = set(['SKStoreProductViewControllerDelegate', 'AVVideoCompositionInstruction', 'AVAudioSessionDelegate', 'GKMatchDelegate', 'NSFileManagerDelegate', 'UILayoutSupport', 'NSCopying', 'UIPrintInteractionControllerDelegate', 'QLPreviewControllerDataSource', 'SKProductsRequestDelegate', 'NSTextStorageDelegate', 'MCBrowserViewControllerDelegate', 'MTLComputeCommandEncoder', 'SCNSceneExportDelegate', 'UISearchResultsUpdating', 'MFMailComposeViewControllerDelegate', 'MTLBlitCommandEncoder', 'NSDecimalNumberBehaviors', 'PHContentEditingController', 'NSMutableCopying', 'UIActionSheetDelegate', 'UIViewControllerTransitioningDelegate', 'UIAlertViewDelegate', 'AVAudioPlayerDelegate', 'MKReverseGeocoderDelegate', 'NSCoding', 'UITextInputTokenizer', 'GKFriendRequestComposeViewControllerDelegate', 'UIActivityItemSource', 'NSCacheDelegate', 'UIAdaptivePresentationControllerDelegate', 'GKAchievementViewControllerDelegate', 'UIViewControllerTransitionCoordinator', 'EKEventEditViewDelegate', 'NSURLConnectionDelegate', 'UITableViewDelegate', 'GKPeerPickerControllerDelegate', 'UIGuidedAccessRestrictionDelegate', 'AVSpeechSynthesizerDelegate', 'AVAudio3DMixing', 'AVPlayerItemLegibleOutputPushDelegate', 'ADInterstitialAdDelegate', 'HMAccessoryBrowserDelegate', 'AVAssetResourceLoaderDelegate', 'UITabBarControllerDelegate', 'CKRecordValue', 'SKPaymentTransactionObserver', 'AVCaptureAudioDataOutputSampleBufferDelegate', 'UIInputViewAudioFeedback', 'GKChallengeListener', 'SKSceneDelegate', 'UIPickerViewDelegate', 'UIWebViewDelegate', 'UIApplicationDelegate', 'GKInviteEventListener', 'MPMediaPlayback', 'MyClassJavaScriptMethods', 'AVAsynchronousKeyValueLoading', 'QLPreviewItem', 'SCNBoundingVolume', 'NSPortDelegate', 'UIContentContainer', 'SCNNodeRendererDelegate', 'SKRequestDelegate', 'SKPhysicsContactDelegate', 'HMAccessoryDelegate', 'UIPageViewControllerDataSource', 'SCNSceneRendererDelegate', 'SCNPhysicsContactDelegate', 'MKMapViewDelegate', 'AVPlayerItemOutputPushDelegate', 'UICollectionViewDelegate', 'UIImagePickerControllerDelegate', 'MTLRenderCommandEncoder', 'PKPaymentAuthorizationViewControllerDelegate', 'UIToolbarDelegate', 'WKUIDelegate', 'SCNActionable', 'NSURLConnectionDataDelegate', 'MKOverlay', 'CBCentralManagerDelegate', 'JSExport', 'NSTextLayoutOrientationProvider', 'UIPickerViewDataSource', 'PKPushRegistryDelegate', 'UIViewControllerTransitionCoordinatorContext', 'NSLayoutManagerDelegate', 'MTLLibrary', 'NSFetchedResultsControllerDelegate', 'ABPeoplePickerNavigationControllerDelegate', 'MTLResource', 'NSDiscardableContent', 'UITextFieldDelegate', 'MTLBuffer', 'MTLSamplerState', 'GKGameCenterControllerDelegate', 'MPMediaPickerControllerDelegate', 'UISplitViewControllerDelegate', 'UIAppearance', 'UIPickerViewAccessibilityDelegate', 'UITraitEnvironment', 'UIScrollViewAccessibilityDelegate', 'ADBannerViewDelegate', 'MPPlayableContentDataSource', 'MTLComputePipelineState', 'NSURLSessionDelegate', 'MTLCommandBuffer', 'NSXMLParserDelegate', 'UIViewControllerRestoration', 'UISearchBarDelegate', 'UIBarPositioning', 'CBPeripheralDelegate', 'UISearchDisplayDelegate', 'CAAction', 'PKAddPassesViewControllerDelegate', 'MCNearbyServiceAdvertiserDelegate', 'MTLDepthStencilState', 'GKTurnBasedMatchmakerViewControllerDelegate', 'MPPlayableContentDelegate', 'AVCaptureVideoDataOutputSampleBufferDelegate', 'UIAppearanceContainer', 'UIStateRestoring', 'UITextDocumentProxy', 'MTLDrawable', 'NSURLSessionTaskDelegate', 'NSFilePresenter', 'AVAudioStereoMixing', 'UIViewControllerContextTransitioning', 'UITextInput', 'CBPeripheralManagerDelegate', 'UITextInputDelegate', 'NSFastEnumeration', 'NSURLAuthenticationChallengeSender', 'SCNProgramDelegate', 'AVVideoCompositing', 'SCNAnimatable', 'NSSecureCoding', 'MCAdvertiserAssistantDelegate', 'GKLocalPlayerListener', 'GLKNamedEffect', 'UIPopoverControllerDelegate', 'AVCaptureMetadataOutputObjectsDelegate', 'NSExtensionRequestHandling', 'UITextSelecting', 'UIPrinterPickerControllerDelegate', 'NCWidgetProviding', 'MTLCommandEncoder', 'NSURLProtocolClient', 'MFMessageComposeViewControllerDelegate', 'UIVideoEditorControllerDelegate', 'WKNavigationDelegate', 'GKSavedGameListener', 'UITableViewDataSource', 'MTLFunction', 'EKCalendarChooserDelegate', 'NSUserActivityDelegate', 'UICollisionBehaviorDelegate', 'NSStreamDelegate', 'MCNearbyServiceBrowserDelegate', 'HMHomeDelegate', 'UINavigationControllerDelegate', 'MCSessionDelegate', 'UIDocumentPickerDelegate', 'UIViewControllerInteractiveTransitioning', 'GKTurnBasedEventListener', 'SCNSceneRenderer', 'MTLTexture', 'GLKViewDelegate', 'EAAccessoryDelegate', 'WKScriptMessageHandler', 'PHPhotoLibraryChangeObserver', 'NSKeyedUnarchiverDelegate', 'AVPlayerItemMetadataOutputPushDelegate', 'NSMachPortDelegate', 'SCNShadable', 'UIPopoverBackgroundViewMethods', 'UIDocumentMenuDelegate', 'UIBarPositioningDelegate', 'ABPersonViewControllerDelegate', 'NSNetServiceBrowserDelegate', 'EKEventViewDelegate', 'UIScrollViewDelegate', 'NSURLConnectionDownloadDelegate', 'UIGestureRecognizerDelegate', 'UINavigationBarDelegate', 'AVAudioMixing', 'NSFetchedResultsSectionInfo', 'UIDocumentInteractionControllerDelegate', 'MTLParallelRenderCommandEncoder', 'QLPreviewControllerDelegate', 'UIAccessibilityReadingContent', 'ABUnknownPersonViewControllerDelegate', 'GLKViewControllerDelegate', 'UICollectionViewDelegateFlowLayout', 'UIPopoverPresentationControllerDelegate', 'UIDynamicAnimatorDelegate', 'NSTextAttachmentContainer', 'MKAnnotation', 'UIAccessibilityIdentification', 'UICoordinateSpace', 'ABNewPersonViewControllerDelegate', 'MTLDevice', 'CAMediaTiming', 'AVCaptureFileOutputRecordingDelegate', 'HMHomeManagerDelegate', 'UITextViewDelegate', 'UITabBarDelegate', 'GKLeaderboardViewControllerDelegate', 'UISearchControllerDelegate', 'EAWiFiUnconfiguredAccessoryBrowserDelegate', 'UITextInputTraits', 'MTLRenderPipelineState', 'GKVoiceChatClient', 'UIKeyInput', 'UICollectionViewDataSource', 'SCNTechniqueSupport', 'NSLocking', 'AVCaptureFileOutputDelegate', 'GKChallengeEventHandlerDelegate', 'UIObjectRestoration', 'CIFilterConstructor', 'AVPlayerItemOutputPullDelegate', 'EAGLDrawable', 'AVVideoCompositionValidationHandling', 'UIViewControllerAnimatedTransitioning', 'NSURLSessionDownloadDelegate', 'UIAccelerometerDelegate', 'UIPageViewControllerDelegate', 'MTLCommandQueue', 'UIDataSourceModelAssociation', 'AVAudioRecorderDelegate', 'GKSessionDelegate', 'NSKeyedArchiverDelegate', 'CAMetalDrawable', 'UIDynamicItem', 'CLLocationManagerDelegate', 'NSMetadataQueryDelegate', 'NSNetServiceDelegate', 'GKMatchmakerViewControllerDelegate', 'NSURLSessionDataDelegate']) -COCOA_PRIMITIVES = set(['ROTAHeader', '__CFBundle', 'MortSubtable', 'AudioFilePacketTableInfo', 'CGPDFOperatorTable', 'KerxStateEntry', 'ExtendedTempoEvent', 'CTParagraphStyleSetting', 'OpaqueMIDIPort', '_GLKMatrix3', '_GLKMatrix2', '_GLKMatrix4', 'ExtendedControlEvent', 'CAFAudioDescription', 'OpaqueCMBlockBuffer', 'CGTextDrawingMode', 'EKErrorCode', 'gss_buffer_desc_struct', 'AudioUnitParameterInfo', '__SCPreferences', '__CTFrame', '__CTLine', 'AudioFile_SMPTE_Time', 'gss_krb5_lucid_context_v1', 'OpaqueJSValue', 'TrakTableEntry', 'AudioFramePacketTranslation', 'CGImageSource', 'OpaqueJSPropertyNameAccumulator', 'JustPCGlyphRepeatAddAction', '__CFBinaryHeap', 'OpaqueMIDIThruConnection', 'opaqueCMBufferQueue', 'OpaqueMusicSequence', 'MortRearrangementSubtable', 'MixerDistanceParams', 'MorxSubtable', 'MIDIObjectPropertyChangeNotification', 'SFNTLookupSegment', 'CGImageMetadataErrors', 'CGPath', 'OpaqueMIDIEndpoint', 'AudioComponentPlugInInterface', 'gss_ctx_id_t_desc_struct', 'sfntFontFeatureSetting', 'OpaqueJSContextGroup', '__SCNetworkConnection', 'AudioUnitParameterValueTranslation', 'CGImageMetadataType', 'CGPattern', 'AudioFileTypeAndFormatID', 'CGContext', 'AUNodeInteraction', 'SFNTLookupTable', 'JustPCDecompositionAction', 'KerxControlPointHeader', 'AudioStreamPacketDescription', 'KernSubtableHeader', '__SecCertificate', 'AUMIDIOutputCallbackStruct', 'MIDIMetaEvent', 'AudioQueueChannelAssignment', 'AnchorPoint', 'JustTable', '__CFNetService', 'CF_BRIDGED_TYPE', 'gss_krb5_lucid_key', 'CGPDFDictionary', 'KerxSubtableHeader', 'CAF_UUID_ChunkHeader', 'gss_krb5_cfx_keydata', 'OpaqueJSClass', 'CGGradient', 'OpaqueMIDISetup', 'JustPostcompTable', '__CTParagraphStyle', 'AudioUnitParameterHistoryInfo', 'OpaqueJSContext', 'CGShading', 'MIDIThruConnectionParams', 'BslnFormat0Part', 'SFNTLookupSingle', '__CFHost', '__SecRandom', '__CTFontDescriptor', '_NSRange', 'sfntDirectory', 'AudioQueueLevelMeterState', 'CAFPositionPeak', 'PropLookupSegment', '__CVOpenGLESTextureCache', 'sfntInstance', '_GLKQuaternion', 'AnkrTable', '__SCNetworkProtocol', 'CAFFileHeader', 'KerxOrderedListHeader', 'CGBlendMode', 'STXEntryOne', 'CAFRegion', 'SFNTLookupTrimmedArrayHeader', 'SCNMatrix4', 'KerxControlPointEntry', 'OpaqueMusicTrack', '_GLKVector4', 'gss_OID_set_desc_struct', 'OpaqueMusicPlayer', '_CFHTTPAuthentication', 'CGAffineTransform', 'CAFMarkerChunk', 'AUHostIdentifier', 'ROTAGlyphEntry', 'BslnTable', 'gss_krb5_lucid_context_version', '_GLKMatrixStack', 'CGImage', 'KernStateEntry', 'SFNTLookupSingleHeader', 'MortLigatureSubtable', 'CAFUMIDChunk', 'SMPTETime', 'CAFDataChunk', 'CGPDFStream', 'AudioFileRegionList', 'STEntryTwo', 'SFNTLookupBinarySearchHeader', 'OpbdTable', '__CTGlyphInfo', 'BslnFormat2Part', 'KerxIndexArrayHeader', 'TrakTable', 'KerxKerningPair', '__CFBitVector', 'KernVersion0SubtableHeader', 'OpaqueAudioComponentInstance', 'AudioChannelLayout', '__CFUUID', 'MIDISysexSendRequest', '__CFNumberFormatter', 'CGImageSourceStatus', 'AudioFileMarkerList', 'AUSamplerBankPresetData', 'CGDataProvider', 'AudioFormatInfo', '__SecIdentity', 'sfntCMapExtendedSubHeader', 'MIDIChannelMessage', 'KernOffsetTable', 'CGColorSpaceModel', 'MFMailComposeErrorCode', 'CGFunction', '__SecTrust', 'AVAudio3DAngularOrientation', 'CGFontPostScriptFormat', 'KernStateHeader', 'AudioUnitCocoaViewInfo', 'CGDataConsumer', 'OpaqueMIDIDevice', 'KernVersion0Header', 'AnchorPointTable', 'CGImageDestination', 'CAFInstrumentChunk', 'AudioUnitMeterClipping', 'MorxChain', '__CTFontCollection', 'STEntryOne', 'STXEntryTwo', 'ExtendedNoteOnEvent', 'CGColorRenderingIntent', 'KerxSimpleArrayHeader', 'MorxTable', '_GLKVector3', '_GLKVector2', 'MortTable', 'CGPDFBox', 'AudioUnitParameterValueFromString', '__CFSocket', 'ALCdevice_struct', 'MIDINoteMessage', 'sfntFeatureHeader', 'CGRect', '__SCNetworkInterface', '__CFTree', 'MusicEventUserData', 'TrakTableData', 'GCQuaternion', 'MortContextualSubtable', '__CTRun', 'AudioUnitFrequencyResponseBin', 'MortChain', 'MorxInsertionSubtable', 'CGImageMetadata', 'gss_auth_identity', 'AudioUnitMIDIControlMapping', 'CAFChunkHeader', 'CGImagePropertyOrientation', 'CGPDFScanner', 'OpaqueMusicEventIterator', 'sfntDescriptorHeader', 'AudioUnitNodeConnection', 'OpaqueMIDIDeviceList', 'ExtendedAudioFormatInfo', 'BslnFormat1Part', 'sfntFontDescriptor', 'KernSimpleArrayHeader', '__CFRunLoopObserver', 'CGPatternTiling', 'MIDINotification', 'MorxLigatureSubtable', 'MessageComposeResult', 'MIDIThruConnectionEndpoint', 'MusicDeviceStdNoteParams', 'opaqueCMSimpleQueue', 'ALCcontext_struct', 'OpaqueAudioQueue', 'PropLookupSingle', 'CGInterpolationQuality', 'CGColor', 'AudioOutputUnitStartAtTimeParams', 'gss_name_t_desc_struct', 'CGFunctionCallbacks', 'CAFPacketTableHeader', 'AudioChannelDescription', 'sfntFeatureName', 'MorxContextualSubtable', 'CVSMPTETime', 'AudioValueRange', 'CGTextEncoding', 'AudioStreamBasicDescription', 'AUNodeRenderCallback', 'AudioPanningInfo', 'KerxOrderedListEntry', '__CFAllocator', 'OpaqueJSPropertyNameArray', '__SCDynamicStore', 'OpaqueMIDIEntity', '__CTRubyAnnotation', 'SCNVector4', 'CFHostClientContext', 'CFNetServiceClientContext', 'AudioUnitPresetMAS_SettingData', 'opaqueCMBufferQueueTriggerToken', 'AudioUnitProperty', 'CAFRegionChunk', 'CGPDFString', '__GLsync', '__CFStringTokenizer', 'JustWidthDeltaEntry', 'sfntVariationAxis', '__CFNetDiagnostic', 'CAFOverviewSample', 'sfntCMapEncoding', 'CGVector', '__SCNetworkService', 'opaqueCMSampleBuffer', 'AUHostVersionIdentifier', 'AudioBalanceFade', 'sfntFontRunFeature', 'KerxCoordinateAction', 'sfntCMapSubHeader', 'CVPlanarPixelBufferInfo', 'AUNumVersion', 'AUSamplerInstrumentData', 'AUPreset', '__CTRunDelegate', 'OpaqueAudioQueueProcessingTap', 'KerxTableHeader', '_NSZone', 'OpaqueExtAudioFile', '__CFRunLoopSource', '__CVMetalTextureCache', 'KerxAnchorPointAction', 'OpaqueJSString', 'AudioQueueParameterEvent', '__CFHTTPMessage', 'OpaqueCMClock', 'ScheduledAudioFileRegion', 'STEntryZero', 'AVAudio3DPoint', 'gss_channel_bindings_struct', 'sfntVariationHeader', 'AUChannelInfo', 'UIOffset', 'GLKEffectPropertyPrv', 'KerxStateHeader', 'CGLineJoin', 'CGPDFDocument', '__CFBag', 'KernOrderedListHeader', '__SCNetworkSet', '__SecKey', 'MIDIObjectAddRemoveNotification', 'AudioUnitParameter', 'JustPCActionSubrecord', 'AudioComponentDescription', 'AudioUnitParameterValueName', 'AudioUnitParameterEvent', 'KerxControlPointAction', 'AudioTimeStamp', 'KernKerningPair', 'gss_buffer_set_desc_struct', 'MortFeatureEntry', 'FontVariation', 'CAFStringID', 'LcarCaretClassEntry', 'AudioUnitParameterStringFromValue', 'ACErrorCode', 'ALMXGlyphEntry', 'LtagTable', '__CTTypesetter', 'AuthorizationOpaqueRef', 'UIEdgeInsets', 'CGPathElement', 'CAFMarker', 'KernTableHeader', 'NoteParamsControlValue', 'SSLContext', 'gss_cred_id_t_desc_struct', 'AudioUnitParameterNameInfo', 'CGDataConsumerCallbacks', 'ALMXHeader', 'CGLineCap', 'MIDIControlTransform', 'CGPDFArray', '__SecPolicy', 'AudioConverterPrimeInfo', '__CTTextTab', '__CFNetServiceMonitor', 'AUInputSamplesInOutputCallbackStruct', '__CTFramesetter', 'CGPDFDataFormat', 'STHeader', 'CVPlanarPixelBufferInfo_YCbCrPlanar', 'MIDIValueMap', 'JustDirectionTable', '__SCBondStatus', 'SFNTLookupSegmentHeader', 'OpaqueCMMemoryPool', 'CGPathDrawingMode', 'CGFont', '__SCNetworkReachability', 'AudioClassDescription', 'CGPoint', 'AVAudio3DVectorOrientation', 'CAFStrings', '__CFNetServiceBrowser', 'opaqueMTAudioProcessingTap', 'sfntNameRecord', 'CGPDFPage', 'CGLayer', 'ComponentInstanceRecord', 'CAFInfoStrings', 'HostCallbackInfo', 'MusicDeviceNoteParams', 'OpaqueVTCompressionSession', 'KernIndexArrayHeader', 'CVPlanarPixelBufferInfo_YCbCrBiPlanar', 'MusicTrackLoopInfo', 'opaqueCMFormatDescription', 'STClassTable', 'sfntDirectoryEntry', 'OpaqueCMTimebase', 'CGDataProviderDirectCallbacks', 'MIDIPacketList', 'CAFOverviewChunk', 'MIDIPacket', 'ScheduledAudioSlice', 'CGDataProviderSequentialCallbacks', 'AudioBuffer', 'MorxRearrangementSubtable', 'CGPatternCallbacks', 'AUDistanceAttenuationData', 'MIDIIOErrorNotification', 'CGPDFContentStream', 'IUnknownVTbl', 'MIDITransform', 'MortInsertionSubtable', 'CABarBeatTime', 'AudioBufferList', '__CVBuffer', 'AURenderCallbackStruct', 'STXEntryZero', 'JustPCDuctilityAction', 'OpaqueAudioQueueTimeline', 'VTDecompressionOutputCallbackRecord', 'OpaqueMIDIClient', '__CFPlugInInstance', 'AudioQueueBuffer', '__CFFileDescriptor', 'AudioUnitConnection', '_GKTurnBasedExchangeStatus', 'LcarCaretTable', 'CVPlanarComponentInfo', 'JustWidthDeltaGroup', 'OpaqueAudioComponent', 'ParameterEvent', '__CVPixelBufferPool', '__CTFont', 'CGColorSpace', 'CGSize', 'AUDependentParameter', 'MIDIDriverInterface', 'gss_krb5_rfc1964_keydata', '__CFDateFormatter', 'LtagStringRange', 'OpaqueVTDecompressionSession', 'gss_iov_buffer_desc_struct', 'AUPresetEvent', 'PropTable', 'KernOrderedListEntry', 'CF_BRIDGED_MUTABLE_TYPE', 'gss_OID_desc_struct', 'AudioUnitPresetMAS_Settings', 'AudioFileMarker', 'JustPCConditionalAddAction', 'BslnFormat3Part', '__CFNotificationCenter', 'MortSwashSubtable', 'AUParameterMIDIMapping', 'SCNVector3', 'OpaqueAudioConverter', 'MIDIRawData', 'sfntNameHeader', '__CFRunLoop', 'MFMailComposeResult', 'CATransform3D', 'OpbdSideValues', 'CAF_SMPTE_Time', '__SecAccessControl', 'JustPCAction', 'OpaqueVTFrameSilo', 'OpaqueVTMultiPassStorage', 'CGPathElementType', 'AudioFormatListItem', 'AudioUnitExternalBuffer', 'AudioFileRegion', 'AudioValueTranslation', 'CGImageMetadataTag', 'CAFPeakChunk', 'AudioBytePacketTranslation', 'sfntCMapHeader', '__CFURLEnumerator', 'STXHeader', 'CGPDFObjectType', 'SFNTLookupArrayHeader']) +COCOA_INTERFACES = {'UITableViewCell', 'HKCorrelationQuery', 'NSURLSessionDataTask', 'PHFetchOptions', 'NSLinguisticTagger', 'NSStream', 'AVAudioUnitDelay', 'GCMotion', 'SKPhysicsWorld', 'NSString', 'CMAttitude', 'AVAudioEnvironmentDistanceAttenuationParameters', 'HKStatisticsCollection', 'SCNPlane', 'CBPeer', 'JSContext', 'SCNTransaction', 'SCNTorus', 'AVAudioUnitEffect', 'UICollectionReusableView', 'MTLSamplerDescriptor', 'AVAssetReaderSampleReferenceOutput', 'AVMutableCompositionTrack', 'GKLeaderboard', 'NSFetchedResultsController', 'SKRange', 'MKTileOverlayRenderer', 'MIDINetworkSession', 'UIVisualEffectView', 'CIWarpKernel', 'PKObject', 'MKRoute', 'MPVolumeView', 'UIPrintInfo', 'SCNText', 'ADClient', 'PKPayment', 'AVMutableAudioMix', 'GLKEffectPropertyLight', 'WKScriptMessage', 'AVMIDIPlayer', 'PHCollectionListChangeRequest', 'UICollectionViewLayout', 'NSMutableCharacterSet', 'SKPaymentTransaction', 'NEOnDemandRuleConnect', 'NSShadow', 'SCNView', 'NSURLSessionConfiguration', 'MTLVertexAttributeDescriptor', 'CBCharacteristic', 'HKQuantityType', 'CKLocationSortDescriptor', 'NEVPNIKEv2SecurityAssociationParameters', 'CMStepCounter', 'NSNetService', 'AVAssetWriterInputMetadataAdaptor', 'UICollectionView', 'UIViewPrintFormatter', 'SCNLevelOfDetail', 'CAShapeLayer', 'MCPeerID', 'MPRatingCommand', 'WKNavigation', 'NSDictionary', 'NSFileVersion', 'CMGyroData', 'AVAudioUnitDistortion', 'CKFetchRecordsOperation', 'SKPhysicsJointSpring', 'SCNHitTestResult', 'AVAudioTime', 'CIFilter', 'UIView', 'SCNConstraint', 'CAPropertyAnimation', 'MKMapItem', 'MPRemoteCommandCenter', 'PKPaymentSummaryItem', 'UICollectionViewFlowLayoutInvalidationContext', 'UIInputViewController', 'PKPass', 'SCNPhysicsBehavior', 'MTLRenderPassColorAttachmentDescriptor', 'MKPolygonRenderer', 'CKNotification', 'JSValue', 'PHCollectionList', 'CLGeocoder', 'NSByteCountFormatter', 'AVCaptureScreenInput', 'MPFeedbackCommand', 'CAAnimation', 'MKOverlayPathView', 'UIActionSheet', 'UIMotionEffectGroup', 'NSLengthFormatter', 'UIBarItem', 'SKProduct', 'AVAssetExportSession', 'NSKeyedUnarchiver', 'NSMutableSet', 'SCNPyramid', 'PHAssetCollection', 'MKMapView', 'HMHomeManager', 'CATransition', 'MTLCompileOptions', 'UIVibrancyEffect', 'CLCircularRegion', 'MKTileOverlay', 'SCNShape', 'ACAccountCredential', 'SKPhysicsJointLimit', 'MKMapSnapshotter', 'AVMediaSelectionGroup', 'NSIndexSet', 'CBPeripheralManager', 'CKRecordZone', 'AVAudioRecorder', 'NSURL', 'CBCentral', 'NSNumber', 'AVAudioOutputNode', 'MTLVertexAttributeDescriptorArray', 'MKETAResponse', 'SKTransition', 'SSReadingList', 'HKSourceQuery', 'UITableViewRowAction', 'UITableView', 'SCNParticlePropertyController', 'AVCaptureStillImageOutput', 'GCController', 'AVAudioPlayerNode', 'AVAudioSessionPortDescription', 'NSHTTPURLResponse', 'NEOnDemandRuleEvaluateConnection', 'SKEffectNode', 'HKQuantity', 'GCControllerElement', 'AVPlayerItemAccessLogEvent', 'SCNBox', 'NSExtensionContext', 'MKOverlayRenderer', 'SCNPhysicsVehicle', 'NSDecimalNumber', 'EKReminder', 'MKPolylineView', 'CKQuery', 'AVAudioMixerNode', 'GKAchievementDescription', 'EKParticipant', 'NSBlockOperation', 'UIActivityItemProvider', 'CLLocation', 'NSBatchUpdateRequest', 'PHContentEditingOutput', 'PHObjectChangeDetails', 'HKWorkoutType', 'MPMoviePlayerController', 'AVAudioFormat', 'HMTrigger', 'MTLRenderPassDepthAttachmentDescriptor', 'SCNRenderer', 'GKScore', 'UISplitViewController', 'HKSource', 'NSURLConnection', 'ABUnknownPersonViewController', 'SCNTechnique', 'UIMenuController', 'NSEvent', 'SKTextureAtlas', 'NSKeyedArchiver', 'GKLeaderboardSet', 'NSSimpleCString', 'AVAudioPCMBuffer', 'CBATTRequest', 'GKMatchRequest', 'AVMetadataObject', 'SKProductsRequest', 'UIAlertView', 'NSIncrementalStore', 'MFMailComposeViewController', 'SCNFloor', 'NSSortDescriptor', 'CKFetchNotificationChangesOperation', 'MPMovieAccessLog', 'NSManagedObjectContext', 'AVAudioUnitGenerator', 'WKBackForwardList', 'SKMutableTexture', 'AVCaptureAudioDataOutput', 'ACAccount', 'AVMetadataItem', 'MPRatingCommandEvent', 'AVCaptureDeviceInputSource', 'CLLocationManager', 'MPRemoteCommand', 'AVCaptureSession', 'UIStepper', 'UIRefreshControl', 'NEEvaluateConnectionRule', 'CKModifyRecordsOperation', 'UICollectionViewTransitionLayout', 'CBCentralManager', 'NSPurgeableData', 'PKShippingMethod', 'SLComposeViewController', 'NSHashTable', 'MKUserTrackingBarButtonItem', 'UILexiconEntry', 'CMMotionActivity', 'SKAction', 'SKShader', 'AVPlayerItemOutput', 'MTLRenderPassAttachmentDescriptor', 'UIDocumentInteractionController', 'UIDynamicItemBehavior', 'NSMutableDictionary', 'UILabel', 'AVCaptureInputPort', 'NSExpression', 'CAInterAppAudioTransportView', 'SKMutablePayment', 'UIImage', 'PHCachingImageManager', 'SCNTransformConstraint', 'HKCorrelationType', 'UIColor', 'SCNGeometrySource', 'AVCaptureAutoExposureBracketedStillImageSettings', 'UIPopoverBackgroundView', 'UIToolbar', 'NSNotificationCenter', 'UICollectionViewLayoutAttributes', 'AVAssetReaderOutputMetadataAdaptor', 'NSEntityMigrationPolicy', 'HMUser', 'NSLocale', 'NSURLSession', 'SCNCamera', 'NSTimeZone', 'UIManagedDocument', 'AVMutableVideoCompositionLayerInstruction', 'AVAssetTrackGroup', 'NSInvocationOperation', 'ALAssetRepresentation', 'AVQueuePlayer', 'HMServiceGroup', 'UIPasteboard', 'PHContentEditingInput', 'NSLayoutManager', 'EKCalendarChooser', 'EKObject', 'CATiledLayer', 'GLKReflectionMapEffect', 'NSManagedObjectID', 'NSEnergyFormatter', 'SLRequest', 'HMCharacteristic', 'AVPlayerLayer', 'MTLRenderPassDescriptor', 'SKPayment', 'NSPointerArray', 'AVAudioMix', 'SCNLight', 'MCAdvertiserAssistant', 'MKMapSnapshotOptions', 'HKCategorySample', 'AVAudioEnvironmentReverbParameters', 'SCNMorpher', 'AVTimedMetadataGroup', 'CBMutableCharacteristic', 'NSFetchRequest', 'UIDevice', 'NSManagedObject', 'NKAssetDownload', 'AVOutputSettingsAssistant', 'SKPhysicsJointPin', 'UITabBar', 'UITextInputMode', 'NSFetchRequestExpression', 'HMActionSet', 'CTSubscriber', 'PHAssetChangeRequest', 'NSPersistentStoreRequest', 'UITabBarController', 'HKQuantitySample', 'AVPlayerItem', 'AVSynchronizedLayer', 'MKDirectionsRequest', 'NSMetadataItem', 'UIPresentationController', 'UINavigationItem', 'PHFetchResultChangeDetails', 'PHImageManager', 'AVCaptureManualExposureBracketedStillImageSettings', 'UIStoryboardPopoverSegue', 'SCNLookAtConstraint', 'UIGravityBehavior', 'UIWindow', 'CBMutableDescriptor', 'NEOnDemandRuleDisconnect', 'UIBezierPath', 'UINavigationController', 'ABPeoplePickerNavigationController', 'EKSource', 'AVAssetWriterInput', 'AVPlayerItemTrack', 'GLKEffectPropertyTexture', 'NSHTTPCookie', 'NSURLResponse', 'SKPaymentQueue', 'NSAssertionHandler', 'MKReverseGeocoder', 'GCControllerAxisInput', 'NSArray', 'NSOrthography', 'NSURLSessionUploadTask', 'NSCharacterSet', 'AVMutableVideoCompositionInstruction', 'AVAssetReaderOutput', 'EAGLContext', 'WKFrameInfo', 'CMPedometer', 'MyClass', 'CKModifyBadgeOperation', 'AVCaptureAudioFileOutput', 'SKEmitterNode', 'NSMachPort', 'AVVideoCompositionCoreAnimationTool', 'PHCollection', 'SCNPhysicsWorld', 'NSURLRequest', 'CMAccelerometerData', 'NSNetServiceBrowser', 'CLFloor', 'AVAsynchronousVideoCompositionRequest', 'SCNGeometry', 'SCNIKConstraint', 'CIKernel', 'CAGradientLayer', 'HKCharacteristicType', 'NSFormatter', 'SCNAction', 'CATransaction', 'CBUUID', 'UIStoryboard', 'MPMediaLibrary', 'UITapGestureRecognizer', 'MPMediaItemArtwork', 'NSURLSessionTask', 'AVAudioUnit', 'MCBrowserViewController', 'UIFontDescriptor', 'NSRelationshipDescription', 'HKSample', 'WKWebView', 'NSMutableAttributedString', 'NSPersistentStoreAsynchronousResult', 'MPNowPlayingInfoCenter', 'MKLocalSearch', 'EAAccessory', 'HKCorrelation', 'CATextLayer', 'NSNotificationQueue', 'UINib', 'GLKTextureLoader', 'HKObjectType', 'NSValue', 'NSMutableIndexSet', 'SKPhysicsContact', 'NSProgress', 'AVPlayerViewController', 'CAScrollLayer', 'GKSavedGame', 'NSTextCheckingResult', 'PHObjectPlaceholder', 'SKConstraint', 'EKEventEditViewController', 'NSEntityDescription', 'NSURLCredentialStorage', 'UIApplication', 'SKDownload', 'SCNNode', 'MKLocalSearchRequest', 'SKScene', 'UISearchDisplayController', 'NEOnDemandRule', 'MTLRenderPassStencilAttachmentDescriptor', 'CAReplicatorLayer', 'UIPrintPageRenderer', 'EKCalendarItem', 'NSUUID', 'EAAccessoryManager', 'NEOnDemandRuleIgnore', 'SKRegion', 'AVAssetResourceLoader', 'EAWiFiUnconfiguredAccessoryBrowser', 'NSUserActivity', 'CTCall', 'UIPrinterPickerController', 'CIVector', 'UINavigationBar', 'UIPanGestureRecognizer', 'MPMediaQuery', 'ABNewPersonViewController', 'CKRecordZoneID', 'HKAnchoredObjectQuery', 'CKFetchRecordZonesOperation', 'UIStoryboardSegue', 'ACAccountType', 'GKSession', 'SKVideoNode', 'PHChange', 'SKReceiptRefreshRequest', 'GCExtendedGamepadSnapshot', 'MPSeekCommandEvent', 'GCExtendedGamepad', 'CAValueFunction', 'SCNCylinder', 'NSNotification', 'NSBatchUpdateResult', 'PKPushCredentials', 'SCNPhysicsSliderJoint', 'AVCaptureDeviceFormat', 'AVPlayerItemErrorLog', 'NSMapTable', 'NSSet', 'CMMotionManager', 'GKVoiceChatService', 'UIPageControl', 'UILexicon', 'MTLArrayType', 'AVAudioUnitReverb', 'MKGeodesicPolyline', 'AVMutableComposition', 'NSLayoutConstraint', 'UIPrinter', 'NSOrderedSet', 'CBAttribute', 'PKPushPayload', 'NSIncrementalStoreNode', 'EKEventStore', 'MPRemoteCommandEvent', 'UISlider', 'UIBlurEffect', 'CKAsset', 'AVCaptureInput', 'AVAudioEngine', 'MTLVertexDescriptor', 'SKPhysicsBody', 'NSOperation', 'PKPaymentPass', 'UIImageAsset', 'MKMapCamera', 'SKProductsResponse', 'GLKEffectPropertyMaterial', 'AVCaptureDevice', 'CTCallCenter', 'CABTMIDILocalPeripheralViewController', 'NEVPNManager', 'HKQuery', 'SCNPhysicsContact', 'CBMutableService', 'AVSampleBufferDisplayLayer', 'SCNSceneSource', 'SKLightNode', 'CKDiscoveredUserInfo', 'NSMutableArray', 'MTLDepthStencilDescriptor', 'MTLArgument', 'NSMassFormatter', 'CIRectangleFeature', 'PKPushRegistry', 'NEVPNConnection', 'MCNearbyServiceBrowser', 'NSOperationQueue', 'MKPolylineRenderer', 'HKWorkout', 'NSValueTransformer', 'UICollectionViewFlowLayout', 'MPChangePlaybackRateCommandEvent', 'NSEntityMapping', 'SKTexture', 'NSMergePolicy', 'UITextInputStringTokenizer', 'NSRecursiveLock', 'AVAsset', 'NSUndoManager', 'AVAudioUnitSampler', 'NSItemProvider', 'SKUniform', 'MPMediaPickerController', 'CKOperation', 'MTLRenderPipelineDescriptor', 'EAWiFiUnconfiguredAccessory', 'NSFileCoordinator', 'SKRequest', 'NSFileHandle', 'NSConditionLock', 'UISegmentedControl', 'NSManagedObjectModel', 'UITabBarItem', 'SCNCone', 'MPMediaItem', 'SCNMaterial', 'EKRecurrenceRule', 'UIEvent', 'UITouch', 'UIPrintInteractionController', 'CMDeviceMotion', 'NEVPNProtocol', 'NSCompoundPredicate', 'HKHealthStore', 'MKMultiPoint', 'HKSampleType', 'UIPrintFormatter', 'AVAudioUnitEQFilterParameters', 'SKView', 'NSConstantString', 'UIPopoverController', 'CKDatabase', 'AVMetadataFaceObject', 'UIAccelerometer', 'EKEventViewController', 'CMAltitudeData', 'MTLStencilDescriptor', 'UISwipeGestureRecognizer', 'NSPort', 'MKCircleRenderer', 'AVCompositionTrack', 'NSAsynchronousFetchRequest', 'NSUbiquitousKeyValueStore', 'NSMetadataQueryResultGroup', 'AVAssetResourceLoadingDataRequest', 'UITableViewHeaderFooterView', 'CKNotificationID', 'AVAudioSession', 'HKUnit', 'NSNull', 'NSPersistentStoreResult', 'MKCircleView', 'AVAudioChannelLayout', 'NEVPNProtocolIKEv2', 'WKProcessPool', 'UIAttachmentBehavior', 'CLBeacon', 'NSInputStream', 'NSURLCache', 'GKPlayer', 'NSMappingModel', 'CIQRCodeFeature', 'AVMutableVideoComposition', 'PHFetchResult', 'NSAttributeDescription', 'AVPlayer', 'MKAnnotationView', 'PKPaymentRequest', 'NSTimer', 'CBDescriptor', 'MKOverlayView', 'AVAudioUnitTimePitch', 'NSSaveChangesRequest', 'UIReferenceLibraryViewController', 'SKPhysicsJointFixed', 'UILocalizedIndexedCollation', 'UIInterpolatingMotionEffect', 'UIDocumentPickerViewController', 'AVAssetWriter', 'NSBundle', 'SKStoreProductViewController', 'GLKViewController', 'NSMetadataQueryAttributeValueTuple', 'GKTurnBasedMatch', 'AVAudioFile', 'UIActivity', 'NSPipe', 'MKShape', 'NSMergeConflict', 'CIImage', 'HKObject', 'UIRotationGestureRecognizer', 'AVPlayerItemLegibleOutput', 'AVAssetImageGenerator', 'GCControllerButtonInput', 'CKMarkNotificationsReadOperation', 'CKSubscription', 'MPTimedMetadata', 'NKIssue', 'UIScreenMode', 'HMAccessoryBrowser', 'GKTurnBasedEventHandler', 'UIWebView', 'MKPolyline', 'JSVirtualMachine', 'AVAssetReader', 'NSAttributedString', 'GKMatchmakerViewController', 'NSCountedSet', 'UIButton', 'WKNavigationResponse', 'GKLocalPlayer', 'MPMovieErrorLog', 'AVSpeechUtterance', 'HKStatistics', 'UILocalNotification', 'HKBiologicalSexObject', 'AVURLAsset', 'CBPeripheral', 'NSDateComponentsFormatter', 'SKSpriteNode', 'UIAccessibilityElement', 'AVAssetWriterInputGroup', 'HMZone', 'AVAssetReaderAudioMixOutput', 'NSEnumerator', 'UIDocument', 'MKLocalSearchResponse', 'UISimpleTextPrintFormatter', 'PHPhotoLibrary', 'CBService', 'UIDocumentMenuViewController', 'MCSession', 'QLPreviewController', 'CAMediaTimingFunction', 'UITextPosition', 'ASIdentifierManager', 'AVAssetResourceLoadingRequest', 'SLComposeServiceViewController', 'UIPinchGestureRecognizer', 'PHObject', 'NSExtensionItem', 'HKSampleQuery', 'MTLRenderPipelineColorAttachmentDescriptorArray', 'MKRouteStep', 'SCNCapsule', 'NSMetadataQuery', 'AVAssetResourceLoadingContentInformationRequest', 'UITraitCollection', 'CTCarrier', 'NSFileSecurity', 'UIAcceleration', 'UIMotionEffect', 'MTLRenderPipelineReflection', 'CLHeading', 'CLVisit', 'MKDirectionsResponse', 'HMAccessory', 'MTLStructType', 'UITextView', 'CMMagnetometerData', 'UICollisionBehavior', 'UIProgressView', 'CKServerChangeToken', 'UISearchBar', 'MKPlacemark', 'AVCaptureConnection', 'NSPropertyMapping', 'ALAssetsFilter', 'SK3DNode', 'AVPlayerItemErrorLogEvent', 'NSJSONSerialization', 'AVAssetReaderVideoCompositionOutput', 'ABPersonViewController', 'CIDetector', 'GKTurnBasedMatchmakerViewController', 'MPMediaItemCollection', 'SCNSphere', 'NSCondition', 'NSURLCredential', 'MIDINetworkConnection', 'NSFileProviderExtension', 'NSDecimalNumberHandler', 'NSAtomicStoreCacheNode', 'NSAtomicStore', 'EKAlarm', 'CKNotificationInfo', 'AVAudioUnitEQ', 'UIPercentDrivenInteractiveTransition', 'MKPolygon', 'AVAssetTrackSegment', 'MTLVertexAttribute', 'NSExpressionDescription', 'HKStatisticsCollectionQuery', 'NSURLAuthenticationChallenge', 'NSDirectoryEnumerator', 'MKDistanceFormatter', 'UIAlertAction', 'NSPropertyListSerialization', 'GKPeerPickerController', 'UIUserNotificationSettings', 'UITableViewController', 'GKNotificationBanner', 'MKPointAnnotation', 'MTLRenderPassColorAttachmentDescriptorArray', 'NSCache', 'SKPhysicsJoint', 'NSXMLParser', 'UIViewController', 'PKPaymentToken', 'MFMessageComposeViewController', 'AVAudioInputNode', 'NSDataDetector', 'CABTMIDICentralViewController', 'AVAudioUnitMIDIInstrument', 'AVCaptureVideoPreviewLayer', 'AVAssetWriterInputPassDescription', 'MPChangePlaybackRateCommand', 'NSURLComponents', 'CAMetalLayer', 'UISnapBehavior', 'AVMetadataMachineReadableCodeObject', 'CKDiscoverUserInfosOperation', 'NSTextAttachment', 'NSException', 'UIMenuItem', 'CMMotionActivityManager', 'SCNGeometryElement', 'NCWidgetController', 'CAEmitterLayer', 'MKUserLocation', 'UIImagePickerController', 'CIFeature', 'AVCaptureDeviceInput', 'ALAsset', 'NSURLSessionDownloadTask', 'SCNPhysicsHingeJoint', 'MPMoviePlayerViewController', 'NSMutableOrderedSet', 'SCNMaterialProperty', 'UIFont', 'AVCaptureVideoDataOutput', 'NSCachedURLResponse', 'ALAssetsLibrary', 'NSInvocation', 'UILongPressGestureRecognizer', 'NSTextStorage', 'WKWebViewConfiguration', 'CIFaceFeature', 'MKMapSnapshot', 'GLKEffectPropertyFog', 'AVComposition', 'CKDiscoverAllContactsOperation', 'AVAudioMixInputParameters', 'CAEmitterBehavior', 'PKPassLibrary', 'UIMutableUserNotificationCategory', 'NSLock', 'NEVPNProtocolIPSec', 'ADBannerView', 'UIDocumentPickerExtensionViewController', 'UIActivityIndicatorView', 'AVPlayerMediaSelectionCriteria', 'CALayer', 'UIAccessibilityCustomAction', 'UIBarButtonItem', 'AVAudioSessionRouteDescription', 'CLBeaconRegion', 'HKBloodTypeObject', 'MTLVertexBufferLayoutDescriptorArray', 'CABasicAnimation', 'AVVideoCompositionInstruction', 'AVMutableTimedMetadataGroup', 'EKRecurrenceEnd', 'NSTextContainer', 'TWTweetComposeViewController', 'PKPaymentAuthorizationViewController', 'UIScrollView', 'WKNavigationAction', 'AVPlayerItemMetadataOutput', 'EKRecurrenceDayOfWeek', 'NSNumberFormatter', 'MTLComputePipelineReflection', 'UIScreen', 'CLRegion', 'NSProcessInfo', 'GLKTextureInfo', 'SCNSkinner', 'AVCaptureMetadataOutput', 'SCNAnimationEvent', 'NSTextTab', 'JSManagedValue', 'NSDate', 'UITextChecker', 'WKBackForwardListItem', 'NSData', 'NSParagraphStyle', 'AVMutableMetadataItem', 'EKCalendar', 'HKWorkoutEvent', 'NSMutableURLRequest', 'UIVideoEditorController', 'HMTimerTrigger', 'AVAudioUnitVarispeed', 'UIDynamicAnimator', 'AVCompositionTrackSegment', 'GCGamepadSnapshot', 'MPMediaEntity', 'GLKSkyboxEffect', 'UISwitch', 'EKStructuredLocation', 'UIGestureRecognizer', 'NSProxy', 'GLKBaseEffect', 'UIPushBehavior', 'GKScoreChallenge', 'NSCoder', 'MPMediaPlaylist', 'NSDateComponents', 'WKUserScript', 'EKEvent', 'NSDateFormatter', 'NSAsynchronousFetchResult', 'AVAssetWriterInputPixelBufferAdaptor', 'UIVisualEffect', 'UICollectionViewCell', 'UITextField', 'CLPlacemark', 'MPPlayableContentManager', 'AVCaptureOutput', 'HMCharacteristicWriteAction', 'CKModifySubscriptionsOperation', 'NSPropertyDescription', 'GCGamepad', 'UIMarkupTextPrintFormatter', 'SCNTube', 'NSPersistentStoreCoordinator', 'AVAudioEnvironmentNode', 'GKMatchmaker', 'CIContext', 'NSThread', 'SLComposeSheetConfigurationItem', 'SKPhysicsJointSliding', 'NSPredicate', 'GKVoiceChat', 'SKCropNode', 'AVCaptureAudioPreviewOutput', 'NSStringDrawingContext', 'GKGameCenterViewController', 'UIPrintPaper', 'SCNPhysicsBallSocketJoint', 'UICollectionViewLayoutInvalidationContext', 'GLKEffectPropertyTransform', 'AVAudioIONode', 'UIDatePicker', 'MKDirections', 'ALAssetsGroup', 'CKRecordZoneNotification', 'SCNScene', 'MPMovieAccessLogEvent', 'CKFetchSubscriptionsOperation', 'CAEmitterCell', 'AVAudioUnitTimeEffect', 'HMCharacteristicMetadata', 'MKPinAnnotationView', 'UIPickerView', 'UIImageView', 'UIUserNotificationCategory', 'SCNPhysicsVehicleWheel', 'HKCategoryType', 'MPMediaQuerySection', 'GKFriendRequestComposeViewController', 'NSError', 'MTLRenderPipelineColorAttachmentDescriptor', 'SCNPhysicsShape', 'UISearchController', 'SCNPhysicsBody', 'CTSubscriberInfo', 'AVPlayerItemAccessLog', 'MPMediaPropertyPredicate', 'CMLogItem', 'NSAutoreleasePool', 'NSSocketPort', 'AVAssetReaderTrackOutput', 'SKNode', 'UIMutableUserNotificationAction', 'SCNProgram', 'AVSpeechSynthesisVoice', 'CMAltimeter', 'AVCaptureAudioChannel', 'GKTurnBasedExchangeReply', 'AVVideoCompositionLayerInstruction', 'AVSpeechSynthesizer', 'GKChallengeEventHandler', 'AVCaptureFileOutput', 'UIControl', 'SCNPhysicsField', 'CKReference', 'LAContext', 'CKRecordID', 'ADInterstitialAd', 'AVAudioSessionDataSourceDescription', 'AVAudioBuffer', 'CIColorKernel', 'GCControllerDirectionPad', 'NSFileManager', 'AVMutableAudioMixInputParameters', 'UIScreenEdgePanGestureRecognizer', 'CAKeyframeAnimation', 'CKQueryNotification', 'PHAdjustmentData', 'EASession', 'AVAssetResourceRenewalRequest', 'UIInputView', 'NSFileWrapper', 'UIResponder', 'NSPointerFunctions', 'UIKeyCommand', 'NSHTTPCookieStorage', 'AVMediaSelectionOption', 'NSRunLoop', 'NSFileAccessIntent', 'CAAnimationGroup', 'MKCircle', 'UIAlertController', 'NSMigrationManager', 'NSDateIntervalFormatter', 'UICollectionViewUpdateItem', 'CKDatabaseOperation', 'PHImageRequestOptions', 'SKReachConstraints', 'CKRecord', 'CAInterAppAudioSwitcherView', 'WKWindowFeatures', 'GKInvite', 'NSMutableData', 'PHAssetCollectionChangeRequest', 'NSMutableParagraphStyle', 'UIDynamicBehavior', 'GLKEffectProperty', 'CKFetchRecordChangesOperation', 'SKShapeNode', 'MPMovieErrorLogEvent', 'MKPolygonView', 'MPContentItem', 'HMAction', 'NSScanner', 'GKAchievementChallenge', 'AVAudioPlayer', 'CKContainer', 'AVVideoComposition', 'NKLibrary', 'NSPersistentStore', 'AVCaptureMovieFileOutput', 'HMRoom', 'GKChallenge', 'UITextRange', 'NSURLProtectionSpace', 'ACAccountStore', 'MPSkipIntervalCommand', 'NSComparisonPredicate', 'HMHome', 'PHVideoRequestOptions', 'NSOutputStream', 'MPSkipIntervalCommandEvent', 'PKAddPassesViewController', 'UITextSelectionRect', 'CTTelephonyNetworkInfo', 'AVTextStyleRule', 'NSFetchedPropertyDescription', 'UIPageViewController', 'CATransformLayer', 'UICollectionViewController', 'AVAudioNode', 'MCNearbyServiceAdvertiser', 'NSObject', 'PHAsset', 'GKLeaderboardViewController', 'CKQueryCursor', 'MPMusicPlayerController', 'MKOverlayPathRenderer', 'CMPedometerData', 'HMService', 'SKFieldNode', 'GKAchievement', 'WKUserContentController', 'AVAssetTrack', 'TWRequest', 'SKLabelNode', 'AVCaptureBracketedStillImageSettings', 'MIDINetworkHost', 'MPMediaPredicate', 'AVFrameRateRange', 'MTLTextureDescriptor', 'MTLVertexBufferLayoutDescriptor', 'MPFeedbackCommandEvent', 'UIUserNotificationAction', 'HKStatisticsQuery', 'SCNParticleSystem', 'NSIndexPath', 'AVVideoCompositionRenderContext', 'CADisplayLink', 'HKObserverQuery', 'UIPopoverPresentationController', 'CKQueryOperation', 'CAEAGLLayer', 'NSMutableString', 'NSMessagePort', 'NSURLQueryItem', 'MTLStructMember', 'AVAudioSessionChannelDescription', 'GLKView', 'UIActivityViewController', 'GKAchievementViewController', 'GKTurnBasedParticipant', 'NSURLProtocol', 'NSUserDefaults', 'NSCalendar', 'SKKeyframeSequence', 'AVMetadataItemFilter', 'CKModifyRecordZonesOperation', 'WKPreferences', 'NSMethodSignature', 'NSRegularExpression', 'EAGLSharegroup', 'AVPlayerItemVideoOutput', 'PHContentEditingInputRequestOptions', 'GKMatch', 'CIColor', 'UIDictationPhrase'} +COCOA_PROTOCOLS = {'SKStoreProductViewControllerDelegate', 'AVVideoCompositionInstruction', 'AVAudioSessionDelegate', 'GKMatchDelegate', 'NSFileManagerDelegate', 'UILayoutSupport', 'NSCopying', 'UIPrintInteractionControllerDelegate', 'QLPreviewControllerDataSource', 'SKProductsRequestDelegate', 'NSTextStorageDelegate', 'MCBrowserViewControllerDelegate', 'MTLComputeCommandEncoder', 'SCNSceneExportDelegate', 'UISearchResultsUpdating', 'MFMailComposeViewControllerDelegate', 'MTLBlitCommandEncoder', 'NSDecimalNumberBehaviors', 'PHContentEditingController', 'NSMutableCopying', 'UIActionSheetDelegate', 'UIViewControllerTransitioningDelegate', 'UIAlertViewDelegate', 'AVAudioPlayerDelegate', 'MKReverseGeocoderDelegate', 'NSCoding', 'UITextInputTokenizer', 'GKFriendRequestComposeViewControllerDelegate', 'UIActivityItemSource', 'NSCacheDelegate', 'UIAdaptivePresentationControllerDelegate', 'GKAchievementViewControllerDelegate', 'UIViewControllerTransitionCoordinator', 'EKEventEditViewDelegate', 'NSURLConnectionDelegate', 'UITableViewDelegate', 'GKPeerPickerControllerDelegate', 'UIGuidedAccessRestrictionDelegate', 'AVSpeechSynthesizerDelegate', 'AVAudio3DMixing', 'AVPlayerItemLegibleOutputPushDelegate', 'ADInterstitialAdDelegate', 'HMAccessoryBrowserDelegate', 'AVAssetResourceLoaderDelegate', 'UITabBarControllerDelegate', 'CKRecordValue', 'SKPaymentTransactionObserver', 'AVCaptureAudioDataOutputSampleBufferDelegate', 'UIInputViewAudioFeedback', 'GKChallengeListener', 'SKSceneDelegate', 'UIPickerViewDelegate', 'UIWebViewDelegate', 'UIApplicationDelegate', 'GKInviteEventListener', 'MPMediaPlayback', 'MyClassJavaScriptMethods', 'AVAsynchronousKeyValueLoading', 'QLPreviewItem', 'SCNBoundingVolume', 'NSPortDelegate', 'UIContentContainer', 'SCNNodeRendererDelegate', 'SKRequestDelegate', 'SKPhysicsContactDelegate', 'HMAccessoryDelegate', 'UIPageViewControllerDataSource', 'SCNSceneRendererDelegate', 'SCNPhysicsContactDelegate', 'MKMapViewDelegate', 'AVPlayerItemOutputPushDelegate', 'UICollectionViewDelegate', 'UIImagePickerControllerDelegate', 'MTLRenderCommandEncoder', 'PKPaymentAuthorizationViewControllerDelegate', 'UIToolbarDelegate', 'WKUIDelegate', 'SCNActionable', 'NSURLConnectionDataDelegate', 'MKOverlay', 'CBCentralManagerDelegate', 'JSExport', 'NSTextLayoutOrientationProvider', 'UIPickerViewDataSource', 'PKPushRegistryDelegate', 'UIViewControllerTransitionCoordinatorContext', 'NSLayoutManagerDelegate', 'MTLLibrary', 'NSFetchedResultsControllerDelegate', 'ABPeoplePickerNavigationControllerDelegate', 'MTLResource', 'NSDiscardableContent', 'UITextFieldDelegate', 'MTLBuffer', 'MTLSamplerState', 'GKGameCenterControllerDelegate', 'MPMediaPickerControllerDelegate', 'UISplitViewControllerDelegate', 'UIAppearance', 'UIPickerViewAccessibilityDelegate', 'UITraitEnvironment', 'UIScrollViewAccessibilityDelegate', 'ADBannerViewDelegate', 'MPPlayableContentDataSource', 'MTLComputePipelineState', 'NSURLSessionDelegate', 'MTLCommandBuffer', 'NSXMLParserDelegate', 'UIViewControllerRestoration', 'UISearchBarDelegate', 'UIBarPositioning', 'CBPeripheralDelegate', 'UISearchDisplayDelegate', 'CAAction', 'PKAddPassesViewControllerDelegate', 'MCNearbyServiceAdvertiserDelegate', 'MTLDepthStencilState', 'GKTurnBasedMatchmakerViewControllerDelegate', 'MPPlayableContentDelegate', 'AVCaptureVideoDataOutputSampleBufferDelegate', 'UIAppearanceContainer', 'UIStateRestoring', 'UITextDocumentProxy', 'MTLDrawable', 'NSURLSessionTaskDelegate', 'NSFilePresenter', 'AVAudioStereoMixing', 'UIViewControllerContextTransitioning', 'UITextInput', 'CBPeripheralManagerDelegate', 'UITextInputDelegate', 'NSFastEnumeration', 'NSURLAuthenticationChallengeSender', 'SCNProgramDelegate', 'AVVideoCompositing', 'SCNAnimatable', 'NSSecureCoding', 'MCAdvertiserAssistantDelegate', 'GKLocalPlayerListener', 'GLKNamedEffect', 'UIPopoverControllerDelegate', 'AVCaptureMetadataOutputObjectsDelegate', 'NSExtensionRequestHandling', 'UITextSelecting', 'UIPrinterPickerControllerDelegate', 'NCWidgetProviding', 'MTLCommandEncoder', 'NSURLProtocolClient', 'MFMessageComposeViewControllerDelegate', 'UIVideoEditorControllerDelegate', 'WKNavigationDelegate', 'GKSavedGameListener', 'UITableViewDataSource', 'MTLFunction', 'EKCalendarChooserDelegate', 'NSUserActivityDelegate', 'UICollisionBehaviorDelegate', 'NSStreamDelegate', 'MCNearbyServiceBrowserDelegate', 'HMHomeDelegate', 'UINavigationControllerDelegate', 'MCSessionDelegate', 'UIDocumentPickerDelegate', 'UIViewControllerInteractiveTransitioning', 'GKTurnBasedEventListener', 'SCNSceneRenderer', 'MTLTexture', 'GLKViewDelegate', 'EAAccessoryDelegate', 'WKScriptMessageHandler', 'PHPhotoLibraryChangeObserver', 'NSKeyedUnarchiverDelegate', 'AVPlayerItemMetadataOutputPushDelegate', 'NSMachPortDelegate', 'SCNShadable', 'UIPopoverBackgroundViewMethods', 'UIDocumentMenuDelegate', 'UIBarPositioningDelegate', 'ABPersonViewControllerDelegate', 'NSNetServiceBrowserDelegate', 'EKEventViewDelegate', 'UIScrollViewDelegate', 'NSURLConnectionDownloadDelegate', 'UIGestureRecognizerDelegate', 'UINavigationBarDelegate', 'AVAudioMixing', 'NSFetchedResultsSectionInfo', 'UIDocumentInteractionControllerDelegate', 'MTLParallelRenderCommandEncoder', 'QLPreviewControllerDelegate', 'UIAccessibilityReadingContent', 'ABUnknownPersonViewControllerDelegate', 'GLKViewControllerDelegate', 'UICollectionViewDelegateFlowLayout', 'UIPopoverPresentationControllerDelegate', 'UIDynamicAnimatorDelegate', 'NSTextAttachmentContainer', 'MKAnnotation', 'UIAccessibilityIdentification', 'UICoordinateSpace', 'ABNewPersonViewControllerDelegate', 'MTLDevice', 'CAMediaTiming', 'AVCaptureFileOutputRecordingDelegate', 'HMHomeManagerDelegate', 'UITextViewDelegate', 'UITabBarDelegate', 'GKLeaderboardViewControllerDelegate', 'UISearchControllerDelegate', 'EAWiFiUnconfiguredAccessoryBrowserDelegate', 'UITextInputTraits', 'MTLRenderPipelineState', 'GKVoiceChatClient', 'UIKeyInput', 'UICollectionViewDataSource', 'SCNTechniqueSupport', 'NSLocking', 'AVCaptureFileOutputDelegate', 'GKChallengeEventHandlerDelegate', 'UIObjectRestoration', 'CIFilterConstructor', 'AVPlayerItemOutputPullDelegate', 'EAGLDrawable', 'AVVideoCompositionValidationHandling', 'UIViewControllerAnimatedTransitioning', 'NSURLSessionDownloadDelegate', 'UIAccelerometerDelegate', 'UIPageViewControllerDelegate', 'MTLCommandQueue', 'UIDataSourceModelAssociation', 'AVAudioRecorderDelegate', 'GKSessionDelegate', 'NSKeyedArchiverDelegate', 'CAMetalDrawable', 'UIDynamicItem', 'CLLocationManagerDelegate', 'NSMetadataQueryDelegate', 'NSNetServiceDelegate', 'GKMatchmakerViewControllerDelegate', 'NSURLSessionDataDelegate'} +COCOA_PRIMITIVES = {'ROTAHeader', '__CFBundle', 'MortSubtable', 'AudioFilePacketTableInfo', 'CGPDFOperatorTable', 'KerxStateEntry', 'ExtendedTempoEvent', 'CTParagraphStyleSetting', 'OpaqueMIDIPort', '_GLKMatrix3', '_GLKMatrix2', '_GLKMatrix4', 'ExtendedControlEvent', 'CAFAudioDescription', 'OpaqueCMBlockBuffer', 'CGTextDrawingMode', 'EKErrorCode', 'gss_buffer_desc_struct', 'AudioUnitParameterInfo', '__SCPreferences', '__CTFrame', '__CTLine', 'AudioFile_SMPTE_Time', 'gss_krb5_lucid_context_v1', 'OpaqueJSValue', 'TrakTableEntry', 'AudioFramePacketTranslation', 'CGImageSource', 'OpaqueJSPropertyNameAccumulator', 'JustPCGlyphRepeatAddAction', '__CFBinaryHeap', 'OpaqueMIDIThruConnection', 'opaqueCMBufferQueue', 'OpaqueMusicSequence', 'MortRearrangementSubtable', 'MixerDistanceParams', 'MorxSubtable', 'MIDIObjectPropertyChangeNotification', 'SFNTLookupSegment', 'CGImageMetadataErrors', 'CGPath', 'OpaqueMIDIEndpoint', 'AudioComponentPlugInInterface', 'gss_ctx_id_t_desc_struct', 'sfntFontFeatureSetting', 'OpaqueJSContextGroup', '__SCNetworkConnection', 'AudioUnitParameterValueTranslation', 'CGImageMetadataType', 'CGPattern', 'AudioFileTypeAndFormatID', 'CGContext', 'AUNodeInteraction', 'SFNTLookupTable', 'JustPCDecompositionAction', 'KerxControlPointHeader', 'AudioStreamPacketDescription', 'KernSubtableHeader', '__SecCertificate', 'AUMIDIOutputCallbackStruct', 'MIDIMetaEvent', 'AudioQueueChannelAssignment', 'AnchorPoint', 'JustTable', '__CFNetService', 'CF_BRIDGED_TYPE', 'gss_krb5_lucid_key', 'CGPDFDictionary', 'KerxSubtableHeader', 'CAF_UUID_ChunkHeader', 'gss_krb5_cfx_keydata', 'OpaqueJSClass', 'CGGradient', 'OpaqueMIDISetup', 'JustPostcompTable', '__CTParagraphStyle', 'AudioUnitParameterHistoryInfo', 'OpaqueJSContext', 'CGShading', 'MIDIThruConnectionParams', 'BslnFormat0Part', 'SFNTLookupSingle', '__CFHost', '__SecRandom', '__CTFontDescriptor', '_NSRange', 'sfntDirectory', 'AudioQueueLevelMeterState', 'CAFPositionPeak', 'PropLookupSegment', '__CVOpenGLESTextureCache', 'sfntInstance', '_GLKQuaternion', 'AnkrTable', '__SCNetworkProtocol', 'CAFFileHeader', 'KerxOrderedListHeader', 'CGBlendMode', 'STXEntryOne', 'CAFRegion', 'SFNTLookupTrimmedArrayHeader', 'SCNMatrix4', 'KerxControlPointEntry', 'OpaqueMusicTrack', '_GLKVector4', 'gss_OID_set_desc_struct', 'OpaqueMusicPlayer', '_CFHTTPAuthentication', 'CGAffineTransform', 'CAFMarkerChunk', 'AUHostIdentifier', 'ROTAGlyphEntry', 'BslnTable', 'gss_krb5_lucid_context_version', '_GLKMatrixStack', 'CGImage', 'KernStateEntry', 'SFNTLookupSingleHeader', 'MortLigatureSubtable', 'CAFUMIDChunk', 'SMPTETime', 'CAFDataChunk', 'CGPDFStream', 'AudioFileRegionList', 'STEntryTwo', 'SFNTLookupBinarySearchHeader', 'OpbdTable', '__CTGlyphInfo', 'BslnFormat2Part', 'KerxIndexArrayHeader', 'TrakTable', 'KerxKerningPair', '__CFBitVector', 'KernVersion0SubtableHeader', 'OpaqueAudioComponentInstance', 'AudioChannelLayout', '__CFUUID', 'MIDISysexSendRequest', '__CFNumberFormatter', 'CGImageSourceStatus', 'AudioFileMarkerList', 'AUSamplerBankPresetData', 'CGDataProvider', 'AudioFormatInfo', '__SecIdentity', 'sfntCMapExtendedSubHeader', 'MIDIChannelMessage', 'KernOffsetTable', 'CGColorSpaceModel', 'MFMailComposeErrorCode', 'CGFunction', '__SecTrust', 'AVAudio3DAngularOrientation', 'CGFontPostScriptFormat', 'KernStateHeader', 'AudioUnitCocoaViewInfo', 'CGDataConsumer', 'OpaqueMIDIDevice', 'KernVersion0Header', 'AnchorPointTable', 'CGImageDestination', 'CAFInstrumentChunk', 'AudioUnitMeterClipping', 'MorxChain', '__CTFontCollection', 'STEntryOne', 'STXEntryTwo', 'ExtendedNoteOnEvent', 'CGColorRenderingIntent', 'KerxSimpleArrayHeader', 'MorxTable', '_GLKVector3', '_GLKVector2', 'MortTable', 'CGPDFBox', 'AudioUnitParameterValueFromString', '__CFSocket', 'ALCdevice_struct', 'MIDINoteMessage', 'sfntFeatureHeader', 'CGRect', '__SCNetworkInterface', '__CFTree', 'MusicEventUserData', 'TrakTableData', 'GCQuaternion', 'MortContextualSubtable', '__CTRun', 'AudioUnitFrequencyResponseBin', 'MortChain', 'MorxInsertionSubtable', 'CGImageMetadata', 'gss_auth_identity', 'AudioUnitMIDIControlMapping', 'CAFChunkHeader', 'CGImagePropertyOrientation', 'CGPDFScanner', 'OpaqueMusicEventIterator', 'sfntDescriptorHeader', 'AudioUnitNodeConnection', 'OpaqueMIDIDeviceList', 'ExtendedAudioFormatInfo', 'BslnFormat1Part', 'sfntFontDescriptor', 'KernSimpleArrayHeader', '__CFRunLoopObserver', 'CGPatternTiling', 'MIDINotification', 'MorxLigatureSubtable', 'MessageComposeResult', 'MIDIThruConnectionEndpoint', 'MusicDeviceStdNoteParams', 'opaqueCMSimpleQueue', 'ALCcontext_struct', 'OpaqueAudioQueue', 'PropLookupSingle', 'CGInterpolationQuality', 'CGColor', 'AudioOutputUnitStartAtTimeParams', 'gss_name_t_desc_struct', 'CGFunctionCallbacks', 'CAFPacketTableHeader', 'AudioChannelDescription', 'sfntFeatureName', 'MorxContextualSubtable', 'CVSMPTETime', 'AudioValueRange', 'CGTextEncoding', 'AudioStreamBasicDescription', 'AUNodeRenderCallback', 'AudioPanningInfo', 'KerxOrderedListEntry', '__CFAllocator', 'OpaqueJSPropertyNameArray', '__SCDynamicStore', 'OpaqueMIDIEntity', '__CTRubyAnnotation', 'SCNVector4', 'CFHostClientContext', 'CFNetServiceClientContext', 'AudioUnitPresetMAS_SettingData', 'opaqueCMBufferQueueTriggerToken', 'AudioUnitProperty', 'CAFRegionChunk', 'CGPDFString', '__GLsync', '__CFStringTokenizer', 'JustWidthDeltaEntry', 'sfntVariationAxis', '__CFNetDiagnostic', 'CAFOverviewSample', 'sfntCMapEncoding', 'CGVector', '__SCNetworkService', 'opaqueCMSampleBuffer', 'AUHostVersionIdentifier', 'AudioBalanceFade', 'sfntFontRunFeature', 'KerxCoordinateAction', 'sfntCMapSubHeader', 'CVPlanarPixelBufferInfo', 'AUNumVersion', 'AUSamplerInstrumentData', 'AUPreset', '__CTRunDelegate', 'OpaqueAudioQueueProcessingTap', 'KerxTableHeader', '_NSZone', 'OpaqueExtAudioFile', '__CFRunLoopSource', '__CVMetalTextureCache', 'KerxAnchorPointAction', 'OpaqueJSString', 'AudioQueueParameterEvent', '__CFHTTPMessage', 'OpaqueCMClock', 'ScheduledAudioFileRegion', 'STEntryZero', 'AVAudio3DPoint', 'gss_channel_bindings_struct', 'sfntVariationHeader', 'AUChannelInfo', 'UIOffset', 'GLKEffectPropertyPrv', 'KerxStateHeader', 'CGLineJoin', 'CGPDFDocument', '__CFBag', 'KernOrderedListHeader', '__SCNetworkSet', '__SecKey', 'MIDIObjectAddRemoveNotification', 'AudioUnitParameter', 'JustPCActionSubrecord', 'AudioComponentDescription', 'AudioUnitParameterValueName', 'AudioUnitParameterEvent', 'KerxControlPointAction', 'AudioTimeStamp', 'KernKerningPair', 'gss_buffer_set_desc_struct', 'MortFeatureEntry', 'FontVariation', 'CAFStringID', 'LcarCaretClassEntry', 'AudioUnitParameterStringFromValue', 'ACErrorCode', 'ALMXGlyphEntry', 'LtagTable', '__CTTypesetter', 'AuthorizationOpaqueRef', 'UIEdgeInsets', 'CGPathElement', 'CAFMarker', 'KernTableHeader', 'NoteParamsControlValue', 'SSLContext', 'gss_cred_id_t_desc_struct', 'AudioUnitParameterNameInfo', 'CGDataConsumerCallbacks', 'ALMXHeader', 'CGLineCap', 'MIDIControlTransform', 'CGPDFArray', '__SecPolicy', 'AudioConverterPrimeInfo', '__CTTextTab', '__CFNetServiceMonitor', 'AUInputSamplesInOutputCallbackStruct', '__CTFramesetter', 'CGPDFDataFormat', 'STHeader', 'CVPlanarPixelBufferInfo_YCbCrPlanar', 'MIDIValueMap', 'JustDirectionTable', '__SCBondStatus', 'SFNTLookupSegmentHeader', 'OpaqueCMMemoryPool', 'CGPathDrawingMode', 'CGFont', '__SCNetworkReachability', 'AudioClassDescription', 'CGPoint', 'AVAudio3DVectorOrientation', 'CAFStrings', '__CFNetServiceBrowser', 'opaqueMTAudioProcessingTap', 'sfntNameRecord', 'CGPDFPage', 'CGLayer', 'ComponentInstanceRecord', 'CAFInfoStrings', 'HostCallbackInfo', 'MusicDeviceNoteParams', 'OpaqueVTCompressionSession', 'KernIndexArrayHeader', 'CVPlanarPixelBufferInfo_YCbCrBiPlanar', 'MusicTrackLoopInfo', 'opaqueCMFormatDescription', 'STClassTable', 'sfntDirectoryEntry', 'OpaqueCMTimebase', 'CGDataProviderDirectCallbacks', 'MIDIPacketList', 'CAFOverviewChunk', 'MIDIPacket', 'ScheduledAudioSlice', 'CGDataProviderSequentialCallbacks', 'AudioBuffer', 'MorxRearrangementSubtable', 'CGPatternCallbacks', 'AUDistanceAttenuationData', 'MIDIIOErrorNotification', 'CGPDFContentStream', 'IUnknownVTbl', 'MIDITransform', 'MortInsertionSubtable', 'CABarBeatTime', 'AudioBufferList', '__CVBuffer', 'AURenderCallbackStruct', 'STXEntryZero', 'JustPCDuctilityAction', 'OpaqueAudioQueueTimeline', 'VTDecompressionOutputCallbackRecord', 'OpaqueMIDIClient', '__CFPlugInInstance', 'AudioQueueBuffer', '__CFFileDescriptor', 'AudioUnitConnection', '_GKTurnBasedExchangeStatus', 'LcarCaretTable', 'CVPlanarComponentInfo', 'JustWidthDeltaGroup', 'OpaqueAudioComponent', 'ParameterEvent', '__CVPixelBufferPool', '__CTFont', 'CGColorSpace', 'CGSize', 'AUDependentParameter', 'MIDIDriverInterface', 'gss_krb5_rfc1964_keydata', '__CFDateFormatter', 'LtagStringRange', 'OpaqueVTDecompressionSession', 'gss_iov_buffer_desc_struct', 'AUPresetEvent', 'PropTable', 'KernOrderedListEntry', 'CF_BRIDGED_MUTABLE_TYPE', 'gss_OID_desc_struct', 'AudioUnitPresetMAS_Settings', 'AudioFileMarker', 'JustPCConditionalAddAction', 'BslnFormat3Part', '__CFNotificationCenter', 'MortSwashSubtable', 'AUParameterMIDIMapping', 'SCNVector3', 'OpaqueAudioConverter', 'MIDIRawData', 'sfntNameHeader', '__CFRunLoop', 'MFMailComposeResult', 'CATransform3D', 'OpbdSideValues', 'CAF_SMPTE_Time', '__SecAccessControl', 'JustPCAction', 'OpaqueVTFrameSilo', 'OpaqueVTMultiPassStorage', 'CGPathElementType', 'AudioFormatListItem', 'AudioUnitExternalBuffer', 'AudioFileRegion', 'AudioValueTranslation', 'CGImageMetadataTag', 'CAFPeakChunk', 'AudioBytePacketTranslation', 'sfntCMapHeader', '__CFURLEnumerator', 'STXHeader', 'CGPDFObjectType', 'SFNTLookupArrayHeader'} if __name__ == '__main__': # pragma: no cover import os diff --git a/src/typecode/_vendor/pygments/lexers/_csound_builtins.py b/src/typecode/_vendor/pygments/lexers/_csound_builtins.py index 16a4839..461a35f 100644 --- a/src/typecode/_vendor/pygments/lexers/_csound_builtins.py +++ b/src/typecode/_vendor/pygments/lexers/_csound_builtins.py @@ -3,52 +3,56 @@ pygments.lexers._csound_builtins ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ -# Opcodes in Csound 6.12.0 at commit 6ca322bd31f1ca907c008616b40a5f237ff449db using -# python -c " -# import re, subprocess -# output = subprocess.Popen(['csound', '--list-opcodes0'], stderr=subprocess.PIPE).communicate()[1] -# opcodes = output[re.search(r'^$', output, re.M).end():re.search(r'^\d+ opcodes$', output, re.M).start()].split() -# output = subprocess.Popen(['csound', '--list-opcodes2'], stderr=subprocess.PIPE).communicate()[1] -# all_opcodes = output[re.search(r'^$', output, re.M).end():re.search(r'^\d+ opcodes$', output, re.M).start()].split() +# Opcodes in Csound 6.14.0 using: +# python3 -c " +# import re +# from subprocess import Popen, PIPE +# output = Popen(['csound', '--list-opcodes0'], stderr=PIPE, text=True).communicate()[1] +# opcodes = output[re.search(r'^\$', output, re.M).end() : re.search(r'^\d+ opcodes\$', output, re.M).start()].split() +# output = Popen(['csound', '--list-opcodes2'], stderr=PIPE, text=True).communicate()[1] +# all_opcodes = output[re.search(r'^\$', output, re.M).end() : re.search(r'^\d+ opcodes\$', output, re.M).start()].split() # deprecated_opcodes = [opcode for opcode in all_opcodes if opcode not in opcodes] -# print '''OPCODES = set(\''' -# {} +# # Remove opcodes that csound.py treats as keywords. +# keyword_opcodes = [ +# 'cggoto', # https://csound.com/docs/manual/cggoto.html +# 'cigoto', # https://csound.com/docs/manual/cigoto.html +# 'cingoto', # (undocumented) +# 'ckgoto', # https://csound.com/docs/manual/ckgoto.html +# 'cngoto', # https://csound.com/docs/manual/cngoto.html +# 'cnkgoto', # (undocumented) +# 'endin', # https://csound.com/docs/manual/endin.html +# 'endop', # https://csound.com/docs/manual/endop.html +# 'goto', # https://csound.com/docs/manual/goto.html +# 'igoto', # https://csound.com/docs/manual/igoto.html +# 'instr', # https://csound.com/docs/manual/instr.html +# 'kgoto', # https://csound.com/docs/manual/kgoto.html +# 'loop_ge', # https://csound.com/docs/manual/loop_ge.html +# 'loop_gt', # https://csound.com/docs/manual/loop_gt.html +# 'loop_le', # https://csound.com/docs/manual/loop_le.html +# 'loop_lt', # https://csound.com/docs/manual/loop_lt.html +# 'opcode', # https://csound.com/docs/manual/opcode.html +# 'reinit', # https://csound.com/docs/manual/reinit.html +# 'return', # https://csound.com/docs/manual/return.html +# 'rireturn', # https://csound.com/docs/manual/rireturn.html +# 'rigoto', # https://csound.com/docs/manual/rigoto.html +# 'tigoto', # https://csound.com/docs/manual/tigoto.html +# 'timout' # https://csound.com/docs/manual/timout.html +# ] +# opcodes = [opcode for opcode in opcodes if opcode not in keyword_opcodes] +# newline = '\n' +# print(f'''OPCODES = set(\''' +# {newline.join(opcodes)} # \'''.split()) # # DEPRECATED_OPCODES = set(\''' -# {} +# {newline.join(deprecated_opcodes)} # \'''.split()) -# '''.format('\n'.join(opcodes), '\n'.join(deprecated_opcodes)) +# ''') # " -# except for -# cggoto csound.com/docs/manual/cggoto.html -# cigoto csound.com/docs/manual/cigoto.html -# cingoto (undocumented) -# ckgoto csound.com/docs/manual/ckgoto.html -# cngoto csound.com/docs/manual/cngoto.html -# cnkgoto (undocumented) -# endin csound.com/docs/manual/endin.html -# endop csound.com/docs/manual/endop.html -# goto csound.com/docs/manual/goto.html -# igoto csound.com/docs/manual/igoto.html -# instr csound.com/docs/manual/instr.html -# kgoto csound.com/docs/manual/kgoto.html -# loop_ge csound.com/docs/manual/loop_ge.html -# loop_gt csound.com/docs/manual/loop_gt.html -# loop_le csound.com/docs/manual/loop_le.html -# loop_lt csound.com/docs/manual/loop_lt.html -# opcode csound.com/docs/manual/opcode.html -# reinit csound.com/docs/manual/reinit.html -# return csound.com/docs/manual/return.html -# rireturn csound.com/docs/manual/rireturn.html -# rigoto csound.com/docs/manual/rigoto.html -# tigoto csound.com/docs/manual/tigoto.html -# timout csound.com/docs/manual/timout.html -# which are treated as keywords in csound.py. OPCODES = set(''' ATSadd @@ -169,8 +173,8 @@ STKBrass STKClarinet STKDrummer -STKFlute STKFMVoices +STKFlute STKHevyMetl STKMandolin STKModalBar @@ -196,12 +200,18 @@ adsynt adsynt2 aftouch +allpole alpass alwayson ampdb ampdbfs ampmidi +ampmidicurve ampmidid +apoleparams +arduinoRead +arduinoStart +arduinoStop areson aresonk atone @@ -224,6 +234,7 @@ biquad biquada birnd +bob bpf bpfcos bqrez @@ -249,7 +260,6 @@ ceps cepsinv chanctrl -changed changed2 chani chano @@ -261,11 +271,19 @@ chnclear chnexport chnget +chngeta +chngeti +chngetk chngetks +chngets chnmix chnparams chnset +chnseta +chnseti +chnsetk chnsetks +chnsets chuap clear clfilt @@ -274,6 +292,11 @@ clockon cmp cmplxprod +cntCreate +cntCycles +cntRead +cntReset +cntState comb combinv compilecsd @@ -293,6 +316,8 @@ cosseg cossegb cossegr +count +count_i cps2pch cpsmidi cpsmidib @@ -418,6 +443,17 @@ flooper flooper2 floor +fluidAllOut +fluidCCi +fluidCCk +fluidControl +fluidEngine +fluidInfo +fluidLoad +fluidNote +fluidOut +fluidProgramSelect +fluidSetInterpMethod fmanal fmax fmb3 @@ -452,6 +488,7 @@ ftchnls ftconv ftcps +ftexists ftfree ftgen ftgenonce @@ -468,7 +505,9 @@ ftsamplebank ftsave ftsavek +ftset ftslice +ftslicei ftsr gain gainslider @@ -492,6 +531,7 @@ grain2 grain3 granule +gtf guiro harmon harmon2 @@ -599,6 +639,10 @@ la_i_multiply_mr la_i_multiply_vc la_i_multiply_vr +la_i_norm1_mc +la_i_norm1_mr +la_i_norm1_vc +la_i_norm1_vr la_i_norm_euclid_mc la_i_norm_euclid_mr la_i_norm_euclid_vc @@ -609,10 +653,6 @@ la_i_norm_inf_vr la_i_norm_max_mc la_i_norm_max_mr -la_i_norm1_mc -la_i_norm1_mr -la_i_norm1_vc -la_i_norm1_vr la_i_print_mc la_i_print_mr la_i_print_vc @@ -697,6 +737,10 @@ la_k_multiply_mr la_k_multiply_vc la_k_multiply_vr +la_k_norm1_mc +la_k_norm1_mr +la_k_norm1_vc +la_k_norm1_vr la_k_norm_euclid_mc la_k_norm_euclid_mr la_k_norm_euclid_vc @@ -707,10 +751,6 @@ la_k_norm_inf_vr la_k_norm_max_mc la_k_norm_max_mr -la_k_norm1_mc -la_k_norm1_mr -la_k_norm1_vc -la_k_norm1_vr la_k_qr_eigen_mc la_k_qr_eigen_mr la_k_qr_factor_mc @@ -732,6 +772,9 @@ la_k_upper_solve_mr la_k_vc_set la_k_vr_set +lag +lagud +lastcycle lenarray lfo limit @@ -777,6 +820,8 @@ lowpass2 lowres lowresx +lpcanal +lpcfilter lpf18 lpform lpfreson @@ -800,6 +845,7 @@ lua_iopcall lua_iopcall_off lua_opdef +lufs mac maca madsr @@ -822,6 +868,7 @@ median mediank metro +metro2 mfb midglobal midiarp @@ -900,6 +947,8 @@ nsamp nstance nstrnum +nstrstr +ntof ntom ntrpol nxtpow2 @@ -1024,13 +1073,11 @@ printk2 printks printks2 +println prints +printsk product pset -ptable -ptable3 -ptablei -ptableiw ptablew ptrack puts @@ -1047,6 +1094,7 @@ pvsarp pvsbandp pvsbandr +pvsbandwidth pvsbin pvsblur pvsbuffer @@ -1055,6 +1103,7 @@ pvscale pvscent pvsceps +pvscfs pvscross pvsdemix pvsdiskin @@ -1074,6 +1123,7 @@ pvsinfo pvsinit pvslock +pvslpc pvsmaska pvsmix pvsmooth @@ -1175,6 +1225,7 @@ qnan r2c rand +randc randh randi random @@ -1198,6 +1249,7 @@ repluck reshapearray reson +resonbnk resonk resonr resonx @@ -1215,6 +1267,7 @@ rms rnd rnd31 +rndseed round rspline rtclock @@ -1235,6 +1288,7 @@ schedkwhen schedkwhennamed schedule +schedulek schedwhen scoreline scoreline_i @@ -1322,6 +1376,7 @@ sqrt squinewave statevar +sterrain stix strcat strcatk @@ -1337,6 +1392,7 @@ strget strindex strindexk +string2array strlen strlenk strlower @@ -1344,6 +1400,7 @@ strrindex strrindexk strset +strstrip strsub strsubk strtod @@ -1380,7 +1437,6 @@ tableigpw tableikt tableimix -tableiw tablekt tablemix tableng @@ -1432,6 +1488,8 @@ trfilter trhighest trigger +trighold +trigphasor trigseq trim trim_i @@ -1514,6 +1572,7 @@ vpow_i vpowv vpowv_i +vps vpvoc vrandh vrandi @@ -1589,6 +1648,7 @@ array bformdec bformenc +changed copy2ftab copy2ttab hrtfer @@ -1598,6 +1658,10 @@ mintab pop pop_f +ptable +ptable3 +ptablei +ptableiw push push_f scalet @@ -1616,6 +1680,7 @@ stack sumtab tabgen +tableiw tabmap tabmap_i tabslice diff --git a/src/typecode/_vendor/pygments/lexers/_lasso_builtins.py b/src/typecode/_vendor/pygments/lexers/_lasso_builtins.py index 1d2719d..2d37252 100644 --- a/src/typecode/_vendor/pygments/lexers/_lasso_builtins.py +++ b/src/typecode/_vendor/pygments/lexers/_lasso_builtins.py @@ -5,7 +5,7 @@ Built-in Lasso types, traits, methods, and members. - :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ diff --git a/src/typecode/_vendor/pygments/lexers/_lua_builtins.py b/src/typecode/_vendor/pygments/lexers/_lua_builtins.py index b2b46a6..a9f4c5c 100644 --- a/src/typecode/_vendor/pygments/lexers/_lua_builtins.py +++ b/src/typecode/_vendor/pygments/lexers/_lua_builtins.py @@ -9,12 +9,10 @@ Do not edit the MODULES dict by hand. - :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ -from __future__ import print_function - MODULES = {'basic': ('_G', '_VERSION', 'assert', @@ -288,7 +286,7 @@ def run(): print('>> %s' % full_function_name) m = get_function_module(full_function_name) modules.setdefault(m, []).append(full_function_name) - modules = dict((k, tuple(v)) for k, v in modules.iteritems()) + modules = {k: tuple(v) for k, v in modules.items()} regenerate(__file__, modules) diff --git a/src/typecode/_vendor/pygments/lexers/_mapping.py b/src/typecode/_vendor/pygments/lexers/_mapping.py index ce1b6df..d40d915 100644 --- a/src/typecode/_vendor/pygments/lexers/_mapping.py +++ b/src/typecode/_vendor/pygments/lexers/_mapping.py @@ -13,465 +13,494 @@ :license: BSD, see LICENSE for details. """ -from __future__ import print_function - LEXERS = { - 'ABAPLexer': ('pygments.lexers.business', 'ABAP', ('abap',), ('*.abap', '*.ABAP'), ('text/x-abap',)), - 'APLLexer': ('pygments.lexers.apl', 'APL', ('apl',), ('*.apl',), ()), - 'AbnfLexer': ('pygments.lexers.grammar_notation', 'ABNF', ('abnf',), ('*.abnf',), ('text/x-abnf',)), - 'ActionScript3Lexer': ('pygments.lexers.actionscript', 'ActionScript 3', ('as3', 'actionscript3'), ('*.as',), ('application/x-actionscript3', 'text/x-actionscript3', 'text/actionscript3')), - 'ActionScriptLexer': ('pygments.lexers.actionscript', 'ActionScript', ('as', 'actionscript'), ('*.as',), ('application/x-actionscript', 'text/x-actionscript', 'text/actionscript')), - 'AdaLexer': ('pygments.lexers.pascal', 'Ada', ('ada', 'ada95', 'ada2005'), ('*.adb', '*.ads', '*.ada'), ('text/x-ada',)), - 'AdlLexer': ('pygments.lexers.archetype', 'ADL', ('adl',), ('*.adl', '*.adls', '*.adlf', '*.adlx'), ()), - 'AgdaLexer': ('pygments.lexers.haskell', 'Agda', ('agda',), ('*.agda',), ('text/x-agda',)), - 'AheuiLexer': ('pygments.lexers.esoteric', 'Aheui', ('aheui',), ('*.aheui',), ()), - 'AlloyLexer': ('pygments.lexers.dsls', 'Alloy', ('alloy',), ('*.als',), ('text/x-alloy',)), - 'AmbientTalkLexer': ('pygments.lexers.ambient', 'AmbientTalk', ('at', 'ambienttalk', 'ambienttalk/2'), ('*.at',), ('text/x-ambienttalk',)), - 'AmplLexer': ('pygments.lexers.ampl', 'Ampl', ('ampl',), ('*.run',), ()), - 'Angular2HtmlLexer': ('pygments.lexers.templates', 'HTML + Angular2', ('html+ng2',), ('*.ng2',), ()), - 'Angular2Lexer': ('pygments.lexers.templates', 'Angular2', ('ng2',), (), ()), - 'AntlrActionScriptLexer': ('pygments.lexers.parsers', 'ANTLR With ActionScript Target', ('antlr-as', 'antlr-actionscript'), ('*.G', '*.g'), ()), - 'AntlrCSharpLexer': ('pygments.lexers.parsers', 'ANTLR With C# Target', ('antlr-csharp', 'antlr-c#'), ('*.G', '*.g'), ()), - 'AntlrCppLexer': ('pygments.lexers.parsers', 'ANTLR With CPP Target', ('antlr-cpp',), ('*.G', '*.g'), ()), - 'AntlrJavaLexer': ('pygments.lexers.parsers', 'ANTLR With Java Target', ('antlr-java',), ('*.G', '*.g'), ()), - 'AntlrLexer': ('pygments.lexers.parsers', 'ANTLR', ('antlr',), (), ()), - 'AntlrObjectiveCLexer': ('pygments.lexers.parsers', 'ANTLR With ObjectiveC Target', ('antlr-objc',), ('*.G', '*.g'), ()), - 'AntlrPerlLexer': ('pygments.lexers.parsers', 'ANTLR With Perl Target', ('antlr-perl',), ('*.G', '*.g'), ()), - 'AntlrPythonLexer': ('pygments.lexers.parsers', 'ANTLR With Python Target', ('antlr-python',), ('*.G', '*.g'), ()), - 'AntlrRubyLexer': ('pygments.lexers.parsers', 'ANTLR With Ruby Target', ('antlr-ruby', 'antlr-rb'), ('*.G', '*.g'), ()), - 'ApacheConfLexer': ('pygments.lexers.configs', 'ApacheConf', ('apacheconf', 'aconf', 'apache'), ('.htaccess', 'apache.conf', 'apache2.conf'), ('text/x-apacheconf',)), - 'AppleScriptLexer': ('pygments.lexers.scripting', 'AppleScript', ('applescript',), ('*.applescript',), ()), - 'ArduinoLexer': ('pygments.lexers.c_like', 'Arduino', ('arduino',), ('*.ino',), ('text/x-arduino',)), - 'AspectJLexer': ('pygments.lexers.jvm', 'AspectJ', ('aspectj',), ('*.aj',), ('text/x-aspectj',)), - 'AsymptoteLexer': ('pygments.lexers.graphics', 'Asymptote', ('asy', 'asymptote'), ('*.asy',), ('text/x-asymptote',)), - 'AugeasLexer': ('pygments.lexers.configs', 'Augeas', ('augeas',), ('*.aug',), ()), - 'AutoItLexer': ('pygments.lexers.automation', 'AutoIt', ('autoit',), ('*.au3',), ('text/x-autoit',)), - 'AutohotkeyLexer': ('pygments.lexers.automation', 'autohotkey', ('ahk', 'autohotkey'), ('*.ahk', '*.ahkl'), ('text/x-autohotkey',)), - 'AwkLexer': ('pygments.lexers.textedit', 'Awk', ('awk', 'gawk', 'mawk', 'nawk'), ('*.awk',), ('application/x-awk',)), - 'BBCBasicLexer': ('pygments.lexers.basic', 'BBC Basic', ('bbcbasic',), ('*.bbc',), ()), - 'BBCodeLexer': ('pygments.lexers.markup', 'BBCode', ('bbcode',), (), ('text/x-bbcode',)), - 'BCLexer': ('pygments.lexers.algebra', 'BC', ('bc',), ('*.bc',), ()), - 'BSTLexer': ('pygments.lexers.bibtex', 'BST', ('bst', 'bst-pybtex'), ('*.bst',), ()), - 'BaseMakefileLexer': ('pygments.lexers.make', 'Base Makefile', ('basemake',), (), ()), - 'BashLexer': ('pygments.lexers.shell', 'Bash', ('bash', 'sh', 'ksh', 'zsh', 'shell'), ('*.sh', '*.ksh', '*.bash', '*.ebuild', '*.eclass', '*.exheres-0', '*.exlib', '*.zsh', '.bashrc', 'bashrc', '.bash_*', 'bash_*', 'zshrc', '.zshrc', 'PKGBUILD'), ('application/x-sh', 'application/x-shellscript', 'text/x-shellscript')), - 'BashSessionLexer': ('pygments.lexers.shell', 'Bash Session', ('console', 'shell-session'), ('*.sh-session', '*.shell-session'), ('application/x-shell-session', 'application/x-sh-session')), - 'BatchLexer': ('pygments.lexers.shell', 'Batchfile', ('bat', 'batch', 'dosbatch', 'winbatch'), ('*.bat', '*.cmd'), ('application/x-dos-batch',)), - 'BefungeLexer': ('pygments.lexers.esoteric', 'Befunge', ('befunge',), ('*.befunge',), ('application/x-befunge',)), - 'BibTeXLexer': ('pygments.lexers.bibtex', 'BibTeX', ('bib', 'bibtex'), ('*.bib',), ('text/x-bibtex',)), - 'BlitzBasicLexer': ('pygments.lexers.basic', 'BlitzBasic', ('blitzbasic', 'b3d', 'bplus'), ('*.bb', '*.decls'), ('text/x-bb',)), - 'BlitzMaxLexer': ('pygments.lexers.basic', 'BlitzMax', ('blitzmax', 'bmax'), ('*.bmx',), ('text/x-bmx',)), - 'BnfLexer': ('pygments.lexers.grammar_notation', 'BNF', ('bnf',), ('*.bnf',), ('text/x-bnf',)), - 'BoaLexer': ('pygments.lexers.boa', 'Boa', ('boa',), ('*.boa',), ()), - 'BooLexer': ('pygments.lexers.dotnet', 'Boo', ('boo',), ('*.boo',), ('text/x-boo',)), - 'BoogieLexer': ('pygments.lexers.verification', 'Boogie', ('boogie',), ('*.bpl',), ()), - 'BrainfuckLexer': ('pygments.lexers.esoteric', 'Brainfuck', ('brainfuck', 'bf'), ('*.bf', '*.b'), ('application/x-brainfuck',)), - 'BroLexer': ('pygments.lexers.dsls', 'Bro', ('bro',), ('*.bro',), ()), - 'BugsLexer': ('pygments.lexers.modeling', 'BUGS', ('bugs', 'winbugs', 'openbugs'), ('*.bug',), ()), - 'CAmkESLexer': ('pygments.lexers.esoteric', 'CAmkES', ('camkes', 'idl4'), ('*.camkes', '*.idl4'), ()), - 'CLexer': ('pygments.lexers.c_cpp', 'C', ('c',), ('*.c', '*.h', '*.idc'), ('text/x-chdr', 'text/x-csrc')), - 'CMakeLexer': ('pygments.lexers.make', 'CMake', ('cmake',), ('*.cmake', 'CMakeLists.txt'), ('text/x-cmake',)), - 'CObjdumpLexer': ('pygments.lexers.asm', 'c-objdump', ('c-objdump',), ('*.c-objdump',), ('text/x-c-objdump',)), - 'CPSALexer': ('pygments.lexers.lisp', 'CPSA', ('cpsa',), ('*.cpsa',), ()), - 'CSharpAspxLexer': ('pygments.lexers.dotnet', 'aspx-cs', ('aspx-cs',), ('*.aspx', '*.asax', '*.ascx', '*.ashx', '*.asmx', '*.axd'), ()), - 'CSharpLexer': ('pygments.lexers.dotnet', 'C#', ('csharp', 'c#'), ('*.cs',), ('text/x-csharp',)), - 'Ca65Lexer': ('pygments.lexers.asm', 'ca65 assembler', ('ca65',), ('*.s',), ()), - 'CadlLexer': ('pygments.lexers.archetype', 'cADL', ('cadl',), ('*.cadl',), ()), - 'CapDLLexer': ('pygments.lexers.esoteric', 'CapDL', ('capdl',), ('*.cdl',), ()), - 'CapnProtoLexer': ('pygments.lexers.capnproto', "Cap'n Proto", ('capnp',), ('*.capnp',), ()), - 'CbmBasicV2Lexer': ('pygments.lexers.basic', 'CBM BASIC V2', ('cbmbas',), ('*.bas',), ()), - 'CeylonLexer': ('pygments.lexers.jvm', 'Ceylon', ('ceylon',), ('*.ceylon',), ('text/x-ceylon',)), - 'Cfengine3Lexer': ('pygments.lexers.configs', 'CFEngine3', ('cfengine3', 'cf3'), ('*.cf',), ()), - 'ChaiscriptLexer': ('pygments.lexers.scripting', 'ChaiScript', ('chai', 'chaiscript'), ('*.chai',), ('text/x-chaiscript', 'application/x-chaiscript')), - 'ChapelLexer': ('pygments.lexers.chapel', 'Chapel', ('chapel', 'chpl'), ('*.chpl',), ()), - 'CharmciLexer': ('pygments.lexers.c_like', 'Charmci', ('charmci',), ('*.ci',), ()), - 'CheetahHtmlLexer': ('pygments.lexers.templates', 'HTML+Cheetah', ('html+cheetah', 'html+spitfire', 'htmlcheetah'), (), ('text/html+cheetah', 'text/html+spitfire')), - 'CheetahJavascriptLexer': ('pygments.lexers.templates', 'JavaScript+Cheetah', ('js+cheetah', 'javascript+cheetah', 'js+spitfire', 'javascript+spitfire'), (), ('application/x-javascript+cheetah', 'text/x-javascript+cheetah', 'text/javascript+cheetah', 'application/x-javascript+spitfire', 'text/x-javascript+spitfire', 'text/javascript+spitfire')), - 'CheetahLexer': ('pygments.lexers.templates', 'Cheetah', ('cheetah', 'spitfire'), ('*.tmpl', '*.spt'), ('application/x-cheetah', 'application/x-spitfire')), - 'CheetahXmlLexer': ('pygments.lexers.templates', 'XML+Cheetah', ('xml+cheetah', 'xml+spitfire'), (), ('application/xml+cheetah', 'application/xml+spitfire')), - 'CirruLexer': ('pygments.lexers.webmisc', 'Cirru', ('cirru',), ('*.cirru',), ('text/x-cirru',)), - 'ClayLexer': ('pygments.lexers.c_like', 'Clay', ('clay',), ('*.clay',), ('text/x-clay',)), - 'CleanLexer': ('pygments.lexers.clean', 'Clean', ('clean',), ('*.icl', '*.dcl'), ()), - 'ClojureLexer': ('pygments.lexers.jvm', 'Clojure', ('clojure', 'clj'), ('*.clj',), ('text/x-clojure', 'application/x-clojure')), - 'ClojureScriptLexer': ('pygments.lexers.jvm', 'ClojureScript', ('clojurescript', 'cljs'), ('*.cljs',), ('text/x-clojurescript', 'application/x-clojurescript')), - 'CobolFreeformatLexer': ('pygments.lexers.business', 'COBOLFree', ('cobolfree',), ('*.cbl', '*.CBL'), ()), - 'CobolLexer': ('pygments.lexers.business', 'COBOL', ('cobol',), ('*.cob', '*.COB', '*.cpy', '*.CPY'), ('text/x-cobol',)), - 'CoffeeScriptLexer': ('pygments.lexers.javascript', 'CoffeeScript', ('coffee-script', 'coffeescript', 'coffee'), ('*.coffee',), ('text/coffeescript',)), - 'ColdfusionCFCLexer': ('pygments.lexers.templates', 'Coldfusion CFC', ('cfc',), ('*.cfc',), ()), - 'ColdfusionHtmlLexer': ('pygments.lexers.templates', 'Coldfusion HTML', ('cfm',), ('*.cfm', '*.cfml'), ('application/x-coldfusion',)), - 'ColdfusionLexer': ('pygments.lexers.templates', 'cfstatement', ('cfs',), (), ()), - 'CommonLispLexer': ('pygments.lexers.lisp', 'Common Lisp', ('common-lisp', 'cl', 'lisp'), ('*.cl', '*.lisp'), ('text/x-common-lisp',)), - 'ComponentPascalLexer': ('pygments.lexers.oberon', 'Component Pascal', ('componentpascal', 'cp'), ('*.cp', '*.cps'), ('text/x-component-pascal',)), - 'CoqLexer': ('pygments.lexers.theorem', 'Coq', ('coq',), ('*.v',), ('text/x-coq',)), - 'CppLexer': ('pygments.lexers.c_cpp', 'C++', ('cpp', 'c++'), ('*.cpp', '*.hpp', '*.c++', '*.h++', '*.cc', '*.hh', '*.cxx', '*.hxx', '*.C', '*.H', '*.cp', '*.CPP'), ('text/x-c++hdr', 'text/x-c++src')), - 'CppObjdumpLexer': ('pygments.lexers.asm', 'cpp-objdump', ('cpp-objdump', 'c++-objdumb', 'cxx-objdump'), ('*.cpp-objdump', '*.c++-objdump', '*.cxx-objdump'), ('text/x-cpp-objdump',)), - 'CrmshLexer': ('pygments.lexers.dsls', 'Crmsh', ('crmsh', 'pcmk'), ('*.crmsh', '*.pcmk'), ()), - 'CrocLexer': ('pygments.lexers.d', 'Croc', ('croc',), ('*.croc',), ('text/x-crocsrc',)), - 'CryptolLexer': ('pygments.lexers.haskell', 'Cryptol', ('cryptol', 'cry'), ('*.cry',), ('text/x-cryptol',)), - 'CrystalLexer': ('pygments.lexers.crystal', 'Crystal', ('cr', 'crystal'), ('*.cr',), ('text/x-crystal',)), - 'CsoundDocumentLexer': ('pygments.lexers.csound', 'Csound Document', ('csound-document', 'csound-csd'), ('*.csd',), ()), - 'CsoundOrchestraLexer': ('pygments.lexers.csound', 'Csound Orchestra', ('csound', 'csound-orc'), ('*.orc', '*.udo'), ()), - 'CsoundScoreLexer': ('pygments.lexers.csound', 'Csound Score', ('csound-score', 'csound-sco'), ('*.sco',), ()), - 'CssDjangoLexer': ('pygments.lexers.templates', 'CSS+Django/Jinja', ('css+django', 'css+jinja'), (), ('text/css+django', 'text/css+jinja')), - 'CssErbLexer': ('pygments.lexers.templates', 'CSS+Ruby', ('css+erb', 'css+ruby'), (), ('text/css+ruby',)), - 'CssGenshiLexer': ('pygments.lexers.templates', 'CSS+Genshi Text', ('css+genshitext', 'css+genshi'), (), ('text/css+genshi',)), - 'CssLexer': ('pygments.lexers.css', 'CSS', ('css',), ('*.css',), ('text/css',)), - 'CssPhpLexer': ('pygments.lexers.templates', 'CSS+PHP', ('css+php',), (), ('text/css+php',)), - 'CssSmartyLexer': ('pygments.lexers.templates', 'CSS+Smarty', ('css+smarty',), (), ('text/css+smarty',)), - 'CudaLexer': ('pygments.lexers.c_like', 'CUDA', ('cuda', 'cu'), ('*.cu', '*.cuh'), ('text/x-cuda',)), - 'CypherLexer': ('pygments.lexers.graph', 'Cypher', ('cypher',), ('*.cyp', '*.cypher'), ()), - 'CythonLexer': ('pygments.lexers.python', 'Cython', ('cython', 'pyx', 'pyrex'), ('*.pyx', '*.pxd', '*.pxi'), ('text/x-cython', 'application/x-cython')), - 'DLexer': ('pygments.lexers.d', 'D', ('d',), ('*.d', '*.di'), ('text/x-dsrc',)), - 'DObjdumpLexer': ('pygments.lexers.asm', 'd-objdump', ('d-objdump',), ('*.d-objdump',), ('text/x-d-objdump',)), - 'DarcsPatchLexer': ('pygments.lexers.diff', 'Darcs Patch', ('dpatch',), ('*.dpatch', '*.darcspatch'), ()), - 'DartLexer': ('pygments.lexers.javascript', 'Dart', ('dart',), ('*.dart',), ('text/x-dart',)), - 'Dasm16Lexer': ('pygments.lexers.asm', 'DASM16', ('dasm16',), ('*.dasm16', '*.dasm'), ('text/x-dasm16',)), - 'DebianControlLexer': ('pygments.lexers.installers', 'Debian Control file', ('control', 'debcontrol'), ('control',), ()), - 'DelphiLexer': ('pygments.lexers.pascal', 'Delphi', ('delphi', 'pas', 'pascal', 'objectpascal'), ('*.pas', '*.dpr'), ('text/x-pascal',)), - 'DgLexer': ('pygments.lexers.python', 'dg', ('dg',), ('*.dg',), ('text/x-dg',)), - 'DiffLexer': ('pygments.lexers.diff', 'Diff', ('diff', 'udiff'), ('*.diff', '*.patch'), ('text/x-diff', 'text/x-patch')), - 'DjangoLexer': ('pygments.lexers.templates', 'Django/Jinja', ('django', 'jinja'), (), ('application/x-django-templating', 'application/x-jinja')), - 'DockerLexer': ('pygments.lexers.configs', 'Docker', ('docker', 'dockerfile'), ('Dockerfile', '*.docker'), ('text/x-dockerfile-config',)), - 'DtdLexer': ('pygments.lexers.html', 'DTD', ('dtd',), ('*.dtd',), ('application/xml-dtd',)), - 'DuelLexer': ('pygments.lexers.webmisc', 'Duel', ('duel', 'jbst', 'jsonml+bst'), ('*.duel', '*.jbst'), ('text/x-duel', 'text/x-jbst')), - 'DylanConsoleLexer': ('pygments.lexers.dylan', 'Dylan session', ('dylan-console', 'dylan-repl'), ('*.dylan-console',), ('text/x-dylan-console',)), - 'DylanLexer': ('pygments.lexers.dylan', 'Dylan', ('dylan',), ('*.dylan', '*.dyl', '*.intr'), ('text/x-dylan',)), - 'DylanLidLexer': ('pygments.lexers.dylan', 'DylanLID', ('dylan-lid', 'lid'), ('*.lid', '*.hdp'), ('text/x-dylan-lid',)), - 'ECLLexer': ('pygments.lexers.ecl', 'ECL', ('ecl',), ('*.ecl',), ('application/x-ecl',)), - 'ECLexer': ('pygments.lexers.c_like', 'eC', ('ec',), ('*.ec', '*.eh'), ('text/x-echdr', 'text/x-ecsrc')), - 'EarlGreyLexer': ('pygments.lexers.javascript', 'Earl Grey', ('earl-grey', 'earlgrey', 'eg'), ('*.eg',), ('text/x-earl-grey',)), - 'EasytrieveLexer': ('pygments.lexers.scripting', 'Easytrieve', ('easytrieve',), ('*.ezt', '*.mac'), ('text/x-easytrieve',)), - 'EbnfLexer': ('pygments.lexers.parsers', 'EBNF', ('ebnf',), ('*.ebnf',), ('text/x-ebnf',)), - 'EiffelLexer': ('pygments.lexers.eiffel', 'Eiffel', ('eiffel',), ('*.e',), ('text/x-eiffel',)), - 'ElixirConsoleLexer': ('pygments.lexers.erlang', 'Elixir iex session', ('iex',), (), ('text/x-elixir-shellsession',)), - 'ElixirLexer': ('pygments.lexers.erlang', 'Elixir', ('elixir', 'ex', 'exs'), ('*.ex', '*.exs'), ('text/x-elixir',)), - 'ElmLexer': ('pygments.lexers.elm', 'Elm', ('elm',), ('*.elm',), ('text/x-elm',)), - 'EmacsLispLexer': ('pygments.lexers.lisp', 'EmacsLisp', ('emacs', 'elisp', 'emacs-lisp'), ('*.el',), ('text/x-elisp', 'application/x-elisp')), - 'ErbLexer': ('pygments.lexers.templates', 'ERB', ('erb',), (), ('application/x-ruby-templating',)), - 'ErlangLexer': ('pygments.lexers.erlang', 'Erlang', ('erlang',), ('*.erl', '*.hrl', '*.es', '*.escript'), ('text/x-erlang',)), - 'ErlangShellLexer': ('pygments.lexers.erlang', 'Erlang erl session', ('erl',), ('*.erl-sh',), ('text/x-erl-shellsession',)), - 'EvoqueHtmlLexer': ('pygments.lexers.templates', 'HTML+Evoque', ('html+evoque',), ('*.html',), ('text/html+evoque',)), - 'EvoqueLexer': ('pygments.lexers.templates', 'Evoque', ('evoque',), ('*.evoque',), ('application/x-evoque',)), - 'EvoqueXmlLexer': ('pygments.lexers.templates', 'XML+Evoque', ('xml+evoque',), ('*.xml',), ('application/xml+evoque',)), - 'EzhilLexer': ('pygments.lexers.ezhil', 'Ezhil', ('ezhil',), ('*.n',), ('text/x-ezhil',)), - 'FSharpLexer': ('pygments.lexers.dotnet', 'F#', ('fsharp', 'f#'), ('*.fs', '*.fsi'), ('text/x-fsharp',)), - 'FactorLexer': ('pygments.lexers.factor', 'Factor', ('factor',), ('*.factor',), ('text/x-factor',)), - 'FancyLexer': ('pygments.lexers.ruby', 'Fancy', ('fancy', 'fy'), ('*.fy', '*.fancypack'), ('text/x-fancysrc',)), - 'FantomLexer': ('pygments.lexers.fantom', 'Fantom', ('fan',), ('*.fan',), ('application/x-fantom',)), - 'FelixLexer': ('pygments.lexers.felix', 'Felix', ('felix', 'flx'), ('*.flx', '*.flxh'), ('text/x-felix',)), - 'FennelLexer': ('pygments.lexers.lisp', 'Fennel', ('fennel', 'fnl'), ('*.fnl',), ()), - 'FishShellLexer': ('pygments.lexers.shell', 'Fish', ('fish', 'fishshell'), ('*.fish', '*.load'), ('application/x-fish',)), - 'FlatlineLexer': ('pygments.lexers.dsls', 'Flatline', ('flatline',), (), ('text/x-flatline',)), - 'FloScriptLexer': ('pygments.lexers.floscript', 'FloScript', ('floscript', 'flo'), ('*.flo',), ()), - 'ForthLexer': ('pygments.lexers.forth', 'Forth', ('forth',), ('*.frt', '*.fs'), ('application/x-forth',)), - 'FortranFixedLexer': ('pygments.lexers.fortran', 'FortranFixed', ('fortranfixed',), ('*.f', '*.F'), ()), - 'FortranLexer': ('pygments.lexers.fortran', 'Fortran', ('fortran',), ('*.f03', '*.f90', '*.F03', '*.F90'), ('text/x-fortran',)), - 'FoxProLexer': ('pygments.lexers.foxpro', 'FoxPro', ('foxpro', 'vfp', 'clipper', 'xbase'), ('*.PRG', '*.prg'), ()), - 'FreeFemLexer': ('pygments.lexers.freefem', 'Freefem', ('freefem',), ('*.edp',), ('text/x-freefem',)), - 'GAPLexer': ('pygments.lexers.algebra', 'GAP', ('gap',), ('*.g', '*.gd', '*.gi', '*.gap'), ()), - 'GLShaderLexer': ('pygments.lexers.graphics', 'GLSL', ('glsl',), ('*.vert', '*.frag', '*.geo'), ('text/x-glslsrc',)), - 'GasLexer': ('pygments.lexers.asm', 'GAS', ('gas', 'asm'), ('*.s', '*.S'), ('text/x-gas',)), - 'GenshiLexer': ('pygments.lexers.templates', 'Genshi', ('genshi', 'kid', 'xml+genshi', 'xml+kid'), ('*.kid',), ('application/x-genshi', 'application/x-kid')), - 'GenshiTextLexer': ('pygments.lexers.templates', 'Genshi Text', ('genshitext',), (), ('application/x-genshi-text', 'text/x-genshi')), - 'GettextLexer': ('pygments.lexers.textfmts', 'Gettext Catalog', ('pot', 'po'), ('*.pot', '*.po'), ('application/x-gettext', 'text/x-gettext', 'text/gettext')), - 'GherkinLexer': ('pygments.lexers.testing', 'Gherkin', ('cucumber', 'gherkin'), ('*.feature',), ('text/x-gherkin',)), - 'GnuplotLexer': ('pygments.lexers.graphics', 'Gnuplot', ('gnuplot',), ('*.plot', '*.plt'), ('text/x-gnuplot',)), - 'GoLexer': ('pygments.lexers.go', 'Go', ('go',), ('*.go',), ('text/x-gosrc',)), - 'GoloLexer': ('pygments.lexers.jvm', 'Golo', ('golo',), ('*.golo',), ()), - 'GoodDataCLLexer': ('pygments.lexers.business', 'GoodData-CL', ('gooddata-cl',), ('*.gdc',), ('text/x-gooddata-cl',)), - 'GosuLexer': ('pygments.lexers.jvm', 'Gosu', ('gosu',), ('*.gs', '*.gsx', '*.gsp', '*.vark'), ('text/x-gosu',)), - 'GosuTemplateLexer': ('pygments.lexers.jvm', 'Gosu Template', ('gst',), ('*.gst',), ('text/x-gosu-template',)), - 'GroffLexer': ('pygments.lexers.markup', 'Groff', ('groff', 'nroff', 'man'), ('*.[1234567]', '*.man'), ('application/x-troff', 'text/troff')), - 'GroovyLexer': ('pygments.lexers.jvm', 'Groovy', ('groovy',), ('*.groovy', '*.gradle'), ('text/x-groovy',)), - 'HLSLShaderLexer': ('pygments.lexers.graphics', 'HLSL', ('hlsl',), ('*.hlsl', '*.hlsli'), ('text/x-hlsl',)), - 'HamlLexer': ('pygments.lexers.html', 'Haml', ('haml',), ('*.haml',), ('text/x-haml',)), - 'HandlebarsHtmlLexer': ('pygments.lexers.templates', 'HTML+Handlebars', ('html+handlebars',), ('*.handlebars', '*.hbs'), ('text/html+handlebars', 'text/x-handlebars-template')), - 'HandlebarsLexer': ('pygments.lexers.templates', 'Handlebars', ('handlebars',), (), ()), - 'HaskellLexer': ('pygments.lexers.haskell', 'Haskell', ('haskell', 'hs'), ('*.hs',), ('text/x-haskell',)), - 'HaxeLexer': ('pygments.lexers.haxe', 'Haxe', ('hx', 'haxe', 'hxsl'), ('*.hx', '*.hxsl'), ('text/haxe', 'text/x-haxe', 'text/x-hx')), - 'HexdumpLexer': ('pygments.lexers.hexdump', 'Hexdump', ('hexdump',), (), ()), - 'HsailLexer': ('pygments.lexers.asm', 'HSAIL', ('hsail', 'hsa'), ('*.hsail',), ('text/x-hsail',)), - 'HspecLexer': ('pygments.lexers.haskell', 'Hspec', ('hspec',), (), ()), - 'HtmlDjangoLexer': ('pygments.lexers.templates', 'HTML+Django/Jinja', ('html+django', 'html+jinja', 'htmldjango'), (), ('text/html+django', 'text/html+jinja')), - 'HtmlGenshiLexer': ('pygments.lexers.templates', 'HTML+Genshi', ('html+genshi', 'html+kid'), (), ('text/html+genshi',)), - 'HtmlLexer': ('pygments.lexers.html', 'HTML', ('html',), ('*.html', '*.htm', '*.xhtml', '*.xslt'), ('text/html', 'application/xhtml+xml')), - 'HtmlPhpLexer': ('pygments.lexers.templates', 'HTML+PHP', ('html+php',), ('*.phtml',), ('application/x-php', 'application/x-httpd-php', 'application/x-httpd-php3', 'application/x-httpd-php4', 'application/x-httpd-php5')), - 'HtmlSmartyLexer': ('pygments.lexers.templates', 'HTML+Smarty', ('html+smarty',), (), ('text/html+smarty',)), - 'HttpLexer': ('pygments.lexers.textfmts', 'HTTP', ('http',), (), ()), - 'HxmlLexer': ('pygments.lexers.haxe', 'Hxml', ('haxeml', 'hxml'), ('*.hxml',), ()), - 'HyLexer': ('pygments.lexers.lisp', 'Hy', ('hylang',), ('*.hy',), ('text/x-hy', 'application/x-hy')), - 'HybrisLexer': ('pygments.lexers.scripting', 'Hybris', ('hybris', 'hy'), ('*.hy', '*.hyb'), ('text/x-hybris', 'application/x-hybris')), - 'IDLLexer': ('pygments.lexers.idl', 'IDL', ('idl',), ('*.pro',), ('text/idl',)), - 'IconLexer': ('pygments.lexers.unicon', 'Icon', ('icon',), ('*.icon', '*.ICON'), ()), - 'IdrisLexer': ('pygments.lexers.haskell', 'Idris', ('idris', 'idr'), ('*.idr',), ('text/x-idris',)), - 'IgorLexer': ('pygments.lexers.igor', 'Igor', ('igor', 'igorpro'), ('*.ipf',), ('text/ipf',)), - 'Inform6Lexer': ('pygments.lexers.int_fiction', 'Inform 6', ('inform6', 'i6'), ('*.inf',), ()), - 'Inform6TemplateLexer': ('pygments.lexers.int_fiction', 'Inform 6 template', ('i6t',), ('*.i6t',), ()), - 'Inform7Lexer': ('pygments.lexers.int_fiction', 'Inform 7', ('inform7', 'i7'), ('*.ni', '*.i7x'), ()), - 'IniLexer': ('pygments.lexers.configs', 'INI', ('ini', 'cfg', 'dosini'), ('*.ini', '*.cfg', '*.inf'), ('text/x-ini', 'text/inf')), - 'IoLexer': ('pygments.lexers.iolang', 'Io', ('io',), ('*.io',), ('text/x-iosrc',)), - 'IokeLexer': ('pygments.lexers.jvm', 'Ioke', ('ioke', 'ik'), ('*.ik',), ('text/x-iokesrc',)), - 'IrcLogsLexer': ('pygments.lexers.textfmts', 'IRC logs', ('irc',), ('*.weechatlog',), ('text/x-irclog',)), - 'IsabelleLexer': ('pygments.lexers.theorem', 'Isabelle', ('isabelle',), ('*.thy',), ('text/x-isabelle',)), - 'JLexer': ('pygments.lexers.j', 'J', ('j',), ('*.ijs',), ('text/x-j',)), - 'JagsLexer': ('pygments.lexers.modeling', 'JAGS', ('jags',), ('*.jag', '*.bug'), ()), - 'JasminLexer': ('pygments.lexers.jvm', 'Jasmin', ('jasmin', 'jasminxt'), ('*.j',), ()), - 'JavaLexer': ('pygments.lexers.jvm', 'Java', ('java',), ('*.java',), ('text/x-java',)), - 'JavascriptDjangoLexer': ('pygments.lexers.templates', 'JavaScript+Django/Jinja', ('js+django', 'javascript+django', 'js+jinja', 'javascript+jinja'), (), ('application/x-javascript+django', 'application/x-javascript+jinja', 'text/x-javascript+django', 'text/x-javascript+jinja', 'text/javascript+django', 'text/javascript+jinja')), - 'JavascriptErbLexer': ('pygments.lexers.templates', 'JavaScript+Ruby', ('js+erb', 'javascript+erb', 'js+ruby', 'javascript+ruby'), (), ('application/x-javascript+ruby', 'text/x-javascript+ruby', 'text/javascript+ruby')), - 'JavascriptGenshiLexer': ('pygments.lexers.templates', 'JavaScript+Genshi Text', ('js+genshitext', 'js+genshi', 'javascript+genshitext', 'javascript+genshi'), (), ('application/x-javascript+genshi', 'text/x-javascript+genshi', 'text/javascript+genshi')), - 'JavascriptLexer': ('pygments.lexers.javascript', 'JavaScript', ('js', 'javascript'), ('*.js', '*.jsm'), ('application/javascript', 'application/x-javascript', 'text/x-javascript', 'text/javascript')), - 'JavascriptPhpLexer': ('pygments.lexers.templates', 'JavaScript+PHP', ('js+php', 'javascript+php'), (), ('application/x-javascript+php', 'text/x-javascript+php', 'text/javascript+php')), - 'JavascriptSmartyLexer': ('pygments.lexers.templates', 'JavaScript+Smarty', ('js+smarty', 'javascript+smarty'), (), ('application/x-javascript+smarty', 'text/x-javascript+smarty', 'text/javascript+smarty')), - 'JclLexer': ('pygments.lexers.scripting', 'JCL', ('jcl',), ('*.jcl',), ('text/x-jcl',)), - 'JsgfLexer': ('pygments.lexers.grammar_notation', 'JSGF', ('jsgf',), ('*.jsgf',), ('application/jsgf', 'application/x-jsgf', 'text/jsgf')), - 'JsonBareObjectLexer': ('pygments.lexers.data', 'JSONBareObject', ('json-object',), (), ('application/json-object',)), - 'JsonLdLexer': ('pygments.lexers.data', 'JSON-LD', ('jsonld', 'json-ld'), ('*.jsonld',), ('application/ld+json',)), - 'JsonLexer': ('pygments.lexers.data', 'JSON', ('json',), ('*.json',), ('application/json',)), - 'JspLexer': ('pygments.lexers.templates', 'Java Server Page', ('jsp',), ('*.jsp',), ('application/x-jsp',)), - 'JuliaConsoleLexer': ('pygments.lexers.julia', 'Julia console', ('jlcon',), (), ()), - 'JuliaLexer': ('pygments.lexers.julia', 'Julia', ('julia', 'jl'), ('*.jl',), ('text/x-julia', 'application/x-julia')), - 'JuttleLexer': ('pygments.lexers.javascript', 'Juttle', ('juttle', 'juttle'), ('*.juttle',), ('application/juttle', 'application/x-juttle', 'text/x-juttle', 'text/juttle')), - 'KalLexer': ('pygments.lexers.javascript', 'Kal', ('kal',), ('*.kal',), ('text/kal', 'application/kal')), - 'KconfigLexer': ('pygments.lexers.configs', 'Kconfig', ('kconfig', 'menuconfig', 'linux-config', 'kernel-config'), ('Kconfig', '*Config.in*', 'external.in*', 'standard-modules.in'), ('text/x-kconfig',)), - 'KokaLexer': ('pygments.lexers.haskell', 'Koka', ('koka',), ('*.kk', '*.kki'), ('text/x-koka',)), - 'KotlinLexer': ('pygments.lexers.jvm', 'Kotlin', ('kotlin',), ('*.kt',), ('text/x-kotlin',)), - 'LSLLexer': ('pygments.lexers.scripting', 'LSL', ('lsl',), ('*.lsl',), ('text/x-lsl',)), - 'LassoCssLexer': ('pygments.lexers.templates', 'CSS+Lasso', ('css+lasso',), (), ('text/css+lasso',)), - 'LassoHtmlLexer': ('pygments.lexers.templates', 'HTML+Lasso', ('html+lasso',), (), ('text/html+lasso', 'application/x-httpd-lasso', 'application/x-httpd-lasso[89]')), - 'LassoJavascriptLexer': ('pygments.lexers.templates', 'JavaScript+Lasso', ('js+lasso', 'javascript+lasso'), (), ('application/x-javascript+lasso', 'text/x-javascript+lasso', 'text/javascript+lasso')), - 'LassoLexer': ('pygments.lexers.javascript', 'Lasso', ('lasso', 'lassoscript'), ('*.lasso', '*.lasso[89]'), ('text/x-lasso',)), - 'LassoXmlLexer': ('pygments.lexers.templates', 'XML+Lasso', ('xml+lasso',), (), ('application/xml+lasso',)), - 'LeanLexer': ('pygments.lexers.theorem', 'Lean', ('lean',), ('*.lean',), ('text/x-lean',)), - 'LessCssLexer': ('pygments.lexers.css', 'LessCss', ('less',), ('*.less',), ('text/x-less-css',)), - 'LighttpdConfLexer': ('pygments.lexers.configs', 'Lighttpd configuration file', ('lighty', 'lighttpd'), (), ('text/x-lighttpd-conf',)), - 'LimboLexer': ('pygments.lexers.inferno', 'Limbo', ('limbo',), ('*.b',), ('text/limbo',)), - 'LiquidLexer': ('pygments.lexers.templates', 'liquid', ('liquid',), ('*.liquid',), ()), - 'LiterateAgdaLexer': ('pygments.lexers.haskell', 'Literate Agda', ('lagda', 'literate-agda'), ('*.lagda',), ('text/x-literate-agda',)), - 'LiterateCryptolLexer': ('pygments.lexers.haskell', 'Literate Cryptol', ('lcry', 'literate-cryptol', 'lcryptol'), ('*.lcry',), ('text/x-literate-cryptol',)), - 'LiterateHaskellLexer': ('pygments.lexers.haskell', 'Literate Haskell', ('lhs', 'literate-haskell', 'lhaskell'), ('*.lhs',), ('text/x-literate-haskell',)), - 'LiterateIdrisLexer': ('pygments.lexers.haskell', 'Literate Idris', ('lidr', 'literate-idris', 'lidris'), ('*.lidr',), ('text/x-literate-idris',)), - 'LiveScriptLexer': ('pygments.lexers.javascript', 'LiveScript', ('live-script', 'livescript'), ('*.ls',), ('text/livescript',)), - 'LlvmLexer': ('pygments.lexers.asm', 'LLVM', ('llvm',), ('*.ll',), ('text/x-llvm',)), - 'LogosLexer': ('pygments.lexers.objective', 'Logos', ('logos',), ('*.x', '*.xi', '*.xm', '*.xmi'), ('text/x-logos',)), - 'LogtalkLexer': ('pygments.lexers.prolog', 'Logtalk', ('logtalk',), ('*.lgt', '*.logtalk'), ('text/x-logtalk',)), - 'LuaLexer': ('pygments.lexers.scripting', 'Lua', ('lua',), ('*.lua', '*.wlua'), ('text/x-lua', 'application/x-lua')), - 'MOOCodeLexer': ('pygments.lexers.scripting', 'MOOCode', ('moocode', 'moo'), ('*.moo',), ('text/x-moocode',)), - 'MSDOSSessionLexer': ('pygments.lexers.shell', 'MSDOS Session', ('doscon',), (), ()), - 'MakefileLexer': ('pygments.lexers.make', 'Makefile', ('make', 'makefile', 'mf', 'bsdmake'), ('*.mak', '*.mk', 'Makefile', 'makefile', 'Makefile.*', 'GNUmakefile'), ('text/x-makefile',)), - 'MakoCssLexer': ('pygments.lexers.templates', 'CSS+Mako', ('css+mako',), (), ('text/css+mako',)), - 'MakoHtmlLexer': ('pygments.lexers.templates', 'HTML+Mako', ('html+mako',), (), ('text/html+mako',)), - 'MakoJavascriptLexer': ('pygments.lexers.templates', 'JavaScript+Mako', ('js+mako', 'javascript+mako'), (), ('application/x-javascript+mako', 'text/x-javascript+mako', 'text/javascript+mako')), - 'MakoLexer': ('pygments.lexers.templates', 'Mako', ('mako',), ('*.mao',), ('application/x-mako',)), - 'MakoXmlLexer': ('pygments.lexers.templates', 'XML+Mako', ('xml+mako',), (), ('application/xml+mako',)), - 'MaqlLexer': ('pygments.lexers.business', 'MAQL', ('maql',), ('*.maql',), ('text/x-gooddata-maql', 'application/x-gooddata-maql')), - 'MarkdownLexer': ('pygments.lexers.markup', 'markdown', ('md',), ('*.md',), ('text/x-markdown',)), - 'MaskLexer': ('pygments.lexers.javascript', 'Mask', ('mask',), ('*.mask',), ('text/x-mask',)), - 'MasonLexer': ('pygments.lexers.templates', 'Mason', ('mason',), ('*.m', '*.mhtml', '*.mc', '*.mi', 'autohandler', 'dhandler'), ('application/x-mason',)), - 'MathematicaLexer': ('pygments.lexers.algebra', 'Mathematica', ('mathematica', 'mma', 'nb'), ('*.nb', '*.cdf', '*.nbp', '*.ma'), ('application/mathematica', 'application/vnd.wolfram.mathematica', 'application/vnd.wolfram.mathematica.package', 'application/vnd.wolfram.cdf')), - 'MatlabLexer': ('pygments.lexers.matlab', 'Matlab', ('matlab',), ('*.m',), ('text/matlab',)), - 'MatlabSessionLexer': ('pygments.lexers.matlab', 'Matlab session', ('matlabsession',), (), ()), - 'MiniDLexer': ('pygments.lexers.d', 'MiniD', ('minid',), (), ('text/x-minidsrc',)), - 'ModelicaLexer': ('pygments.lexers.modeling', 'Modelica', ('modelica',), ('*.mo',), ('text/x-modelica',)), - 'Modula2Lexer': ('pygments.lexers.modula2', 'Modula-2', ('modula2', 'm2'), ('*.def', '*.mod'), ('text/x-modula2',)), - 'MoinWikiLexer': ('pygments.lexers.markup', 'MoinMoin/Trac Wiki markup', ('trac-wiki', 'moin'), (), ('text/x-trac-wiki',)), - 'MonkeyLexer': ('pygments.lexers.basic', 'Monkey', ('monkey',), ('*.monkey',), ('text/x-monkey',)), - 'MonteLexer': ('pygments.lexers.monte', 'Monte', ('monte',), ('*.mt',), ()), - 'MoonScriptLexer': ('pygments.lexers.scripting', 'MoonScript', ('moon', 'moonscript'), ('*.moon',), ('text/x-moonscript', 'application/x-moonscript')), - 'MozPreprocCssLexer': ('pygments.lexers.markup', 'CSS+mozpreproc', ('css+mozpreproc',), ('*.css.in',), ()), - 'MozPreprocHashLexer': ('pygments.lexers.markup', 'mozhashpreproc', ('mozhashpreproc',), (), ()), - 'MozPreprocJavascriptLexer': ('pygments.lexers.markup', 'Javascript+mozpreproc', ('javascript+mozpreproc',), ('*.js.in',), ()), - 'MozPreprocPercentLexer': ('pygments.lexers.markup', 'mozpercentpreproc', ('mozpercentpreproc',), (), ()), - 'MozPreprocXulLexer': ('pygments.lexers.markup', 'XUL+mozpreproc', ('xul+mozpreproc',), ('*.xul.in',), ()), - 'MqlLexer': ('pygments.lexers.c_like', 'MQL', ('mql', 'mq4', 'mq5', 'mql4', 'mql5'), ('*.mq4', '*.mq5', '*.mqh'), ('text/x-mql',)), - 'MscgenLexer': ('pygments.lexers.dsls', 'Mscgen', ('mscgen', 'msc'), ('*.msc',), ()), - 'MuPADLexer': ('pygments.lexers.algebra', 'MuPAD', ('mupad',), ('*.mu',), ()), - 'MxmlLexer': ('pygments.lexers.actionscript', 'MXML', ('mxml',), ('*.mxml',), ()), - 'MySqlLexer': ('pygments.lexers.sql', 'MySQL', ('mysql',), (), ('text/x-mysql',)), - 'MyghtyCssLexer': ('pygments.lexers.templates', 'CSS+Myghty', ('css+myghty',), (), ('text/css+myghty',)), - 'MyghtyHtmlLexer': ('pygments.lexers.templates', 'HTML+Myghty', ('html+myghty',), (), ('text/html+myghty',)), - 'MyghtyJavascriptLexer': ('pygments.lexers.templates', 'JavaScript+Myghty', ('js+myghty', 'javascript+myghty'), (), ('application/x-javascript+myghty', 'text/x-javascript+myghty', 'text/javascript+mygthy')), - 'MyghtyLexer': ('pygments.lexers.templates', 'Myghty', ('myghty',), ('*.myt', 'autodelegate'), ('application/x-myghty',)), - 'MyghtyXmlLexer': ('pygments.lexers.templates', 'XML+Myghty', ('xml+myghty',), (), ('application/xml+myghty',)), - 'NCLLexer': ('pygments.lexers.ncl', 'NCL', ('ncl',), ('*.ncl',), ('text/ncl',)), - 'NSISLexer': ('pygments.lexers.installers', 'NSIS', ('nsis', 'nsi', 'nsh'), ('*.nsi', '*.nsh'), ('text/x-nsis',)), - 'NasmLexer': ('pygments.lexers.asm', 'NASM', ('nasm',), ('*.asm', '*.ASM'), ('text/x-nasm',)), - 'NasmObjdumpLexer': ('pygments.lexers.asm', 'objdump-nasm', ('objdump-nasm',), ('*.objdump-intel',), ('text/x-nasm-objdump',)), - 'NemerleLexer': ('pygments.lexers.dotnet', 'Nemerle', ('nemerle',), ('*.n',), ('text/x-nemerle',)), - 'NesCLexer': ('pygments.lexers.c_like', 'nesC', ('nesc',), ('*.nc',), ('text/x-nescsrc',)), - 'NewLispLexer': ('pygments.lexers.lisp', 'NewLisp', ('newlisp',), ('*.lsp', '*.nl', '*.kif'), ('text/x-newlisp', 'application/x-newlisp')), - 'NewspeakLexer': ('pygments.lexers.smalltalk', 'Newspeak', ('newspeak',), ('*.ns2',), ('text/x-newspeak',)), - 'NginxConfLexer': ('pygments.lexers.configs', 'Nginx configuration file', ('nginx',), ('nginx.conf',), ('text/x-nginx-conf',)), - 'NimrodLexer': ('pygments.lexers.nimrod', 'Nimrod', ('nim', 'nimrod'), ('*.nim', '*.nimrod'), ('text/x-nim',)), - 'NitLexer': ('pygments.lexers.nit', 'Nit', ('nit',), ('*.nit',), ()), - 'NixLexer': ('pygments.lexers.nix', 'Nix', ('nixos', 'nix'), ('*.nix',), ('text/x-nix',)), - 'NuSMVLexer': ('pygments.lexers.smv', 'NuSMV', ('nusmv',), ('*.smv',), ()), - 'NumPyLexer': ('pygments.lexers.python', 'NumPy', ('numpy',), (), ()), - 'ObjdumpLexer': ('pygments.lexers.asm', 'objdump', ('objdump',), ('*.objdump',), ('text/x-objdump',)), - 'ObjectiveCLexer': ('pygments.lexers.objective', 'Objective-C', ('objective-c', 'objectivec', 'obj-c', 'objc'), ('*.m', '*.h'), ('text/x-objective-c',)), - 'ObjectiveCppLexer': ('pygments.lexers.objective', 'Objective-C++', ('objective-c++', 'objectivec++', 'obj-c++', 'objc++'), ('*.mm', '*.hh'), ('text/x-objective-c++',)), - 'ObjectiveJLexer': ('pygments.lexers.javascript', 'Objective-J', ('objective-j', 'objectivej', 'obj-j', 'objj'), ('*.j',), ('text/x-objective-j',)), - 'OcamlLexer': ('pygments.lexers.ml', 'OCaml', ('ocaml',), ('*.ml', '*.mli', '*.mll', '*.mly'), ('text/x-ocaml',)), - 'OctaveLexer': ('pygments.lexers.matlab', 'Octave', ('octave',), ('*.m',), ('text/octave',)), - 'OdinLexer': ('pygments.lexers.archetype', 'ODIN', ('odin',), ('*.odin',), ('text/odin',)), - 'OocLexer': ('pygments.lexers.ooc', 'Ooc', ('ooc',), ('*.ooc',), ('text/x-ooc',)), - 'OpaLexer': ('pygments.lexers.ml', 'Opa', ('opa',), ('*.opa',), ('text/x-opa',)), - 'OpenEdgeLexer': ('pygments.lexers.business', 'OpenEdge ABL', ('openedge', 'abl', 'progress'), ('*.p', '*.cls'), ('text/x-openedge', 'application/x-openedge')), - 'PacmanConfLexer': ('pygments.lexers.configs', 'PacmanConf', ('pacmanconf',), ('pacman.conf',), ()), - 'PanLexer': ('pygments.lexers.dsls', 'Pan', ('pan',), ('*.pan',), ()), - 'ParaSailLexer': ('pygments.lexers.parasail', 'ParaSail', ('parasail',), ('*.psi', '*.psl'), ('text/x-parasail',)), - 'PawnLexer': ('pygments.lexers.pawn', 'Pawn', ('pawn',), ('*.p', '*.pwn', '*.inc'), ('text/x-pawn',)), - 'Perl6Lexer': ('pygments.lexers.perl', 'Perl6', ('perl6', 'pl6'), ('*.pl', '*.pm', '*.nqp', '*.p6', '*.6pl', '*.p6l', '*.pl6', '*.6pm', '*.p6m', '*.pm6', '*.t'), ('text/x-perl6', 'application/x-perl6')), - 'PerlLexer': ('pygments.lexers.perl', 'Perl', ('perl', 'pl'), ('*.pl', '*.pm', '*.t'), ('text/x-perl', 'application/x-perl')), - 'PhpLexer': ('pygments.lexers.php', 'PHP', ('php', 'php3', 'php4', 'php5'), ('*.php', '*.php[345]', '*.inc'), ('text/x-php',)), - 'PigLexer': ('pygments.lexers.jvm', 'Pig', ('pig',), ('*.pig',), ('text/x-pig',)), - 'PikeLexer': ('pygments.lexers.c_like', 'Pike', ('pike',), ('*.pike', '*.pmod'), ('text/x-pike',)), - 'PkgConfigLexer': ('pygments.lexers.configs', 'PkgConfig', ('pkgconfig',), ('*.pc',), ()), - 'PlPgsqlLexer': ('pygments.lexers.sql', 'PL/pgSQL', ('plpgsql',), (), ('text/x-plpgsql',)), - 'PonyLexer': ('pygments.lexers.pony', 'Pony', ('pony',), ('*.pony',), ()), - 'PostScriptLexer': ('pygments.lexers.graphics', 'PostScript', ('postscript', 'postscr'), ('*.ps', '*.eps'), ('application/postscript',)), - 'PostgresConsoleLexer': ('pygments.lexers.sql', 'PostgreSQL console (psql)', ('psql', 'postgresql-console', 'postgres-console'), (), ('text/x-postgresql-psql',)), - 'PostgresLexer': ('pygments.lexers.sql', 'PostgreSQL SQL dialect', ('postgresql', 'postgres'), (), ('text/x-postgresql',)), - 'PovrayLexer': ('pygments.lexers.graphics', 'POVRay', ('pov',), ('*.pov', '*.inc'), ('text/x-povray',)), - 'PowerShellLexer': ('pygments.lexers.shell', 'PowerShell', ('powershell', 'posh', 'ps1', 'psm1'), ('*.ps1', '*.psm1'), ('text/x-powershell',)), - 'PowerShellSessionLexer': ('pygments.lexers.shell', 'PowerShell Session', ('ps1con',), (), ()), - 'PraatLexer': ('pygments.lexers.praat', 'Praat', ('praat',), ('*.praat', '*.proc', '*.psc'), ()), - 'PrologLexer': ('pygments.lexers.prolog', 'Prolog', ('prolog',), ('*.ecl', '*.prolog', '*.pro', '*.pl'), ('text/x-prolog',)), - 'PropertiesLexer': ('pygments.lexers.configs', 'Properties', ('properties', 'jproperties'), ('*.properties',), ('text/x-java-properties',)), - 'ProtoBufLexer': ('pygments.lexers.dsls', 'Protocol Buffer', ('protobuf', 'proto'), ('*.proto',), ()), - 'PugLexer': ('pygments.lexers.html', 'Pug', ('pug', 'jade'), ('*.pug', '*.jade'), ('text/x-pug', 'text/x-jade')), - 'PuppetLexer': ('pygments.lexers.dsls', 'Puppet', ('puppet',), ('*.pp',), ()), - 'PyPyLogLexer': ('pygments.lexers.console', 'PyPy Log', ('pypylog', 'pypy'), ('*.pypylog',), ('application/x-pypylog',)), - 'Python3Lexer': ('pygments.lexers.python', 'Python 3', ('python3', 'py3'), (), ('text/x-python3', 'application/x-python3')), - 'Python3TracebackLexer': ('pygments.lexers.python', 'Python 3.0 Traceback', ('py3tb',), ('*.py3tb',), ('text/x-python3-traceback',)), - 'PythonConsoleLexer': ('pygments.lexers.python', 'Python console session', ('pycon',), (), ('text/x-python-doctest',)), - 'PythonLexer': ('pygments.lexers.python', 'Python', ('python', 'py', 'sage'), ('*.py', '*.pyw', '*.sc', 'SConstruct', 'SConscript', '*.tac', '*.sage'), ('text/x-python', 'application/x-python')), - 'PythonTracebackLexer': ('pygments.lexers.python', 'Python Traceback', ('pytb',), ('*.pytb',), ('text/x-python-traceback',)), - 'QBasicLexer': ('pygments.lexers.basic', 'QBasic', ('qbasic', 'basic'), ('*.BAS', '*.bas'), ('text/basic',)), - 'QVToLexer': ('pygments.lexers.qvt', 'QVTO', ('qvto', 'qvt'), ('*.qvto',), ()), - 'QmlLexer': ('pygments.lexers.webmisc', 'QML', ('qml', 'qbs'), ('*.qml', '*.qbs'), ('application/x-qml', 'application/x-qt.qbs+qml')), - 'RConsoleLexer': ('pygments.lexers.r', 'RConsole', ('rconsole', 'rout'), ('*.Rout',), ()), - 'RNCCompactLexer': ('pygments.lexers.rnc', 'Relax-NG Compact', ('rnc', 'rng-compact'), ('*.rnc',), ()), - 'RPMSpecLexer': ('pygments.lexers.installers', 'RPMSpec', ('spec',), ('*.spec',), ('text/x-rpm-spec',)), - 'RacketLexer': ('pygments.lexers.lisp', 'Racket', ('racket', 'rkt'), ('*.rkt', '*.rktd', '*.rktl'), ('text/x-racket', 'application/x-racket')), - 'RagelCLexer': ('pygments.lexers.parsers', 'Ragel in C Host', ('ragel-c',), ('*.rl',), ()), - 'RagelCppLexer': ('pygments.lexers.parsers', 'Ragel in CPP Host', ('ragel-cpp',), ('*.rl',), ()), - 'RagelDLexer': ('pygments.lexers.parsers', 'Ragel in D Host', ('ragel-d',), ('*.rl',), ()), - 'RagelEmbeddedLexer': ('pygments.lexers.parsers', 'Embedded Ragel', ('ragel-em',), ('*.rl',), ()), - 'RagelJavaLexer': ('pygments.lexers.parsers', 'Ragel in Java Host', ('ragel-java',), ('*.rl',), ()), - 'RagelLexer': ('pygments.lexers.parsers', 'Ragel', ('ragel',), (), ()), - 'RagelObjectiveCLexer': ('pygments.lexers.parsers', 'Ragel in Objective C Host', ('ragel-objc',), ('*.rl',), ()), - 'RagelRubyLexer': ('pygments.lexers.parsers', 'Ragel in Ruby Host', ('ragel-ruby', 'ragel-rb'), ('*.rl',), ()), - 'RawTokenLexer': ('pygments.lexers.special', 'Raw token data', ('raw',), (), ('application/x-pygments-tokens',)), - 'RdLexer': ('pygments.lexers.r', 'Rd', ('rd',), ('*.Rd',), ('text/x-r-doc',)), - 'RebolLexer': ('pygments.lexers.rebol', 'REBOL', ('rebol',), ('*.r', '*.r3', '*.reb'), ('text/x-rebol',)), - 'RedLexer': ('pygments.lexers.rebol', 'Red', ('red', 'red/system'), ('*.red', '*.reds'), ('text/x-red', 'text/x-red-system')), - 'RedcodeLexer': ('pygments.lexers.esoteric', 'Redcode', ('redcode',), ('*.cw',), ()), - 'RegeditLexer': ('pygments.lexers.configs', 'reg', ('registry',), ('*.reg',), ('text/x-windows-registry',)), - 'ResourceLexer': ('pygments.lexers.resource', 'ResourceBundle', ('resource', 'resourcebundle'), ('*.txt',), ()), - 'RexxLexer': ('pygments.lexers.scripting', 'Rexx', ('rexx', 'arexx'), ('*.rexx', '*.rex', '*.rx', '*.arexx'), ('text/x-rexx',)), - 'RhtmlLexer': ('pygments.lexers.templates', 'RHTML', ('rhtml', 'html+erb', 'html+ruby'), ('*.rhtml',), ('text/html+ruby',)), - 'RoboconfGraphLexer': ('pygments.lexers.roboconf', 'Roboconf Graph', ('roboconf-graph',), ('*.graph',), ()), - 'RoboconfInstancesLexer': ('pygments.lexers.roboconf', 'Roboconf Instances', ('roboconf-instances',), ('*.instances',), ()), - 'RobotFrameworkLexer': ('pygments.lexers.robotframework', 'RobotFramework', ('robotframework',), ('*.txt', '*.robot'), ('text/x-robotframework',)), - 'RqlLexer': ('pygments.lexers.sql', 'RQL', ('rql',), ('*.rql',), ('text/x-rql',)), - 'RslLexer': ('pygments.lexers.dsls', 'RSL', ('rsl',), ('*.rsl',), ('text/rsl',)), - 'RstLexer': ('pygments.lexers.markup', 'reStructuredText', ('rst', 'rest', 'restructuredtext'), ('*.rst', '*.rest'), ('text/x-rst', 'text/prs.fallenstein.rst')), - 'RtsLexer': ('pygments.lexers.trafficscript', 'TrafficScript', ('rts', 'trafficscript'), ('*.rts',), ()), - 'RubyConsoleLexer': ('pygments.lexers.ruby', 'Ruby irb session', ('rbcon', 'irb'), (), ('text/x-ruby-shellsession',)), - 'RubyLexer': ('pygments.lexers.ruby', 'Ruby', ('rb', 'ruby', 'duby'), ('*.rb', '*.rbw', 'Rakefile', '*.rake', '*.gemspec', '*.rbx', '*.duby', 'Gemfile'), ('text/x-ruby', 'application/x-ruby')), - 'RustLexer': ('pygments.lexers.rust', 'Rust', ('rust', 'rs'), ('*.rs', '*.rs.in'), ('text/rust',)), - 'SASLexer': ('pygments.lexers.sas', 'SAS', ('sas',), ('*.SAS', '*.sas'), ('text/x-sas', 'text/sas', 'application/x-sas')), - 'SLexer': ('pygments.lexers.r', 'S', ('splus', 's', 'r'), ('*.S', '*.R', '.Rhistory', '.Rprofile', '.Renviron'), ('text/S-plus', 'text/S', 'text/x-r-source', 'text/x-r', 'text/x-R', 'text/x-r-history', 'text/x-r-profile')), - 'SMLLexer': ('pygments.lexers.ml', 'Standard ML', ('sml',), ('*.sml', '*.sig', '*.fun'), ('text/x-standardml', 'application/x-standardml')), - 'SarlLexer': ('pygments.lexers.jvm', 'SARL', ('sarl',), ('*.sarl',), ('text/x-sarl',)), - 'SassLexer': ('pygments.lexers.css', 'Sass', ('sass',), ('*.sass',), ('text/x-sass',)), - 'ScalaLexer': ('pygments.lexers.jvm', 'Scala', ('scala',), ('*.scala',), ('text/x-scala',)), - 'ScamlLexer': ('pygments.lexers.html', 'Scaml', ('scaml',), ('*.scaml',), ('text/x-scaml',)), - 'SchemeLexer': ('pygments.lexers.lisp', 'Scheme', ('scheme', 'scm'), ('*.scm', '*.ss'), ('text/x-scheme', 'application/x-scheme')), - 'ScilabLexer': ('pygments.lexers.matlab', 'Scilab', ('scilab',), ('*.sci', '*.sce', '*.tst'), ('text/scilab',)), - 'ScssLexer': ('pygments.lexers.css', 'SCSS', ('scss',), ('*.scss',), ('text/x-scss',)), - 'ShenLexer': ('pygments.lexers.lisp', 'Shen', ('shen',), ('*.shen',), ('text/x-shen', 'application/x-shen')), - 'SilverLexer': ('pygments.lexers.verification', 'Silver', ('silver',), ('*.sil', '*.vpr'), ()), - 'SlashLexer': ('pygments.lexers.slash', 'Slash', ('slash',), ('*.sl',), ()), - 'SlimLexer': ('pygments.lexers.webmisc', 'Slim', ('slim',), ('*.slim',), ('text/x-slim',)), - 'SlurmBashLexer': ('pygments.lexers.shell', 'Slurm', ('slurm', 'sbatch'), ('*.sl',), ()), - 'SmaliLexer': ('pygments.lexers.dalvik', 'Smali', ('smali',), ('*.smali',), ('text/smali',)), - 'SmalltalkLexer': ('pygments.lexers.smalltalk', 'Smalltalk', ('smalltalk', 'squeak', 'st'), ('*.st',), ('text/x-smalltalk',)), - 'SmartGameFormatLexer': ('pygments.lexers.sgf', 'SmartGameFormat', ('sgf',), ('*.sgf',), ()), - 'SmartyLexer': ('pygments.lexers.templates', 'Smarty', ('smarty',), ('*.tpl',), ('application/x-smarty',)), - 'SnobolLexer': ('pygments.lexers.snobol', 'Snobol', ('snobol',), ('*.snobol',), ('text/x-snobol',)), - 'SnowballLexer': ('pygments.lexers.dsls', 'Snowball', ('snowball',), ('*.sbl',), ()), - 'SourcePawnLexer': ('pygments.lexers.pawn', 'SourcePawn', ('sp',), ('*.sp',), ('text/x-sourcepawn',)), - 'SourcesListLexer': ('pygments.lexers.installers', 'Debian Sourcelist', ('sourceslist', 'sources.list', 'debsources'), ('sources.list',), ()), - 'SparqlLexer': ('pygments.lexers.rdf', 'SPARQL', ('sparql',), ('*.rq', '*.sparql'), ('application/sparql-query',)), - 'SqlLexer': ('pygments.lexers.sql', 'SQL', ('sql',), ('*.sql',), ('text/x-sql',)), - 'SqliteConsoleLexer': ('pygments.lexers.sql', 'sqlite3con', ('sqlite3',), ('*.sqlite3-console',), ('text/x-sqlite3-console',)), - 'SquidConfLexer': ('pygments.lexers.configs', 'SquidConf', ('squidconf', 'squid.conf', 'squid'), ('squid.conf',), ('text/x-squidconf',)), - 'SspLexer': ('pygments.lexers.templates', 'Scalate Server Page', ('ssp',), ('*.ssp',), ('application/x-ssp',)), - 'StanLexer': ('pygments.lexers.modeling', 'Stan', ('stan',), ('*.stan',), ()), - 'StataLexer': ('pygments.lexers.stata', 'Stata', ('stata', 'do'), ('*.do', '*.ado'), ('text/x-stata', 'text/stata', 'application/x-stata')), - 'SuperColliderLexer': ('pygments.lexers.supercollider', 'SuperCollider', ('sc', 'supercollider'), ('*.sc', '*.scd'), ('application/supercollider', 'text/supercollider')), - 'SwiftLexer': ('pygments.lexers.objective', 'Swift', ('swift',), ('*.swift',), ('text/x-swift',)), - 'SwigLexer': ('pygments.lexers.c_like', 'SWIG', ('swig',), ('*.swg', '*.i'), ('text/swig',)), - 'SystemVerilogLexer': ('pygments.lexers.hdl', 'systemverilog', ('systemverilog', 'sv'), ('*.sv', '*.svh'), ('text/x-systemverilog',)), - 'TAPLexer': ('pygments.lexers.testing', 'TAP', ('tap',), ('*.tap',), ()), - 'TOMLLexer': ('pygments.lexers.configs', 'TOML', ('toml',), ('*.toml',), ()), - 'Tads3Lexer': ('pygments.lexers.int_fiction', 'TADS 3', ('tads3',), ('*.t',), ()), - 'TasmLexer': ('pygments.lexers.asm', 'TASM', ('tasm',), ('*.asm', '*.ASM', '*.tasm'), ('text/x-tasm',)), - 'TclLexer': ('pygments.lexers.tcl', 'Tcl', ('tcl',), ('*.tcl', '*.rvt'), ('text/x-tcl', 'text/x-script.tcl', 'application/x-tcl')), - 'TcshLexer': ('pygments.lexers.shell', 'Tcsh', ('tcsh', 'csh'), ('*.tcsh', '*.csh'), ('application/x-csh',)), - 'TcshSessionLexer': ('pygments.lexers.shell', 'Tcsh Session', ('tcshcon',), (), ()), - 'TeaTemplateLexer': ('pygments.lexers.templates', 'Tea', ('tea',), ('*.tea',), ('text/x-tea',)), - 'TeraTermLexer': ('pygments.lexers.teraterm', 'Tera Term macro', ('ttl', 'teraterm', 'teratermmacro'), ('*.ttl',), ('text/x-teratermmacro',)), - 'TermcapLexer': ('pygments.lexers.configs', 'Termcap', ('termcap',), ('termcap', 'termcap.src'), ()), - 'TerminfoLexer': ('pygments.lexers.configs', 'Terminfo', ('terminfo',), ('terminfo', 'terminfo.src'), ()), - 'TerraformLexer': ('pygments.lexers.configs', 'Terraform', ('terraform', 'tf'), ('*.tf',), ('application/x-tf', 'application/x-terraform')), - 'TexLexer': ('pygments.lexers.markup', 'TeX', ('tex', 'latex'), ('*.tex', '*.aux', '*.toc'), ('text/x-tex', 'text/x-latex')), - 'TextLexer': ('pygments.lexers.special', 'Text only', ('text',), ('*.txt',), ('text/plain',)), - 'ThriftLexer': ('pygments.lexers.dsls', 'Thrift', ('thrift',), ('*.thrift',), ('application/x-thrift',)), - 'TodotxtLexer': ('pygments.lexers.textfmts', 'Todotxt', ('todotxt',), ('todo.txt', '*.todotxt'), ('text/x-todo',)), - 'TransactSqlLexer': ('pygments.lexers.sql', 'Transact-SQL', ('tsql', 't-sql'), ('*.sql',), ('text/x-tsql',)), - 'TreetopLexer': ('pygments.lexers.parsers', 'Treetop', ('treetop',), ('*.treetop', '*.tt'), ()), - 'TurtleLexer': ('pygments.lexers.rdf', 'Turtle', ('turtle',), ('*.ttl',), ('text/turtle', 'application/x-turtle')), - 'TwigHtmlLexer': ('pygments.lexers.templates', 'HTML+Twig', ('html+twig',), ('*.twig',), ('text/html+twig',)), - 'TwigLexer': ('pygments.lexers.templates', 'Twig', ('twig',), (), ('application/x-twig',)), - 'TypeScriptLexer': ('pygments.lexers.javascript', 'TypeScript', ('ts', 'typescript'), ('*.ts', '*.tsx'), ('text/x-typescript',)), - 'TypoScriptCssDataLexer': ('pygments.lexers.typoscript', 'TypoScriptCssData', ('typoscriptcssdata',), (), ()), - 'TypoScriptHtmlDataLexer': ('pygments.lexers.typoscript', 'TypoScriptHtmlData', ('typoscripthtmldata',), (), ()), - 'TypoScriptLexer': ('pygments.lexers.typoscript', 'TypoScript', ('typoscript',), ('*.typoscript',), ('text/x-typoscript',)), - 'UcodeLexer': ('pygments.lexers.unicon', 'ucode', ('ucode',), ('*.u', '*.u1', '*.u2'), ()), - 'UniconLexer': ('pygments.lexers.unicon', 'Unicon', ('unicon',), ('*.icn',), ('text/unicon',)), - 'UrbiscriptLexer': ('pygments.lexers.urbi', 'UrbiScript', ('urbiscript',), ('*.u',), ('application/x-urbiscript',)), - 'VBScriptLexer': ('pygments.lexers.basic', 'VBScript', ('vbscript',), ('*.vbs', '*.VBS'), ()), - 'VCLLexer': ('pygments.lexers.varnish', 'VCL', ('vcl',), ('*.vcl',), ('text/x-vclsrc',)), - 'VCLSnippetLexer': ('pygments.lexers.varnish', 'VCLSnippets', ('vclsnippets', 'vclsnippet'), (), ('text/x-vclsnippet',)), - 'VCTreeStatusLexer': ('pygments.lexers.console', 'VCTreeStatus', ('vctreestatus',), (), ()), - 'VGLLexer': ('pygments.lexers.dsls', 'VGL', ('vgl',), ('*.rpf',), ()), - 'ValaLexer': ('pygments.lexers.c_like', 'Vala', ('vala', 'vapi'), ('*.vala', '*.vapi'), ('text/x-vala',)), - 'VbNetAspxLexer': ('pygments.lexers.dotnet', 'aspx-vb', ('aspx-vb',), ('*.aspx', '*.asax', '*.ascx', '*.ashx', '*.asmx', '*.axd'), ()), - 'VbNetLexer': ('pygments.lexers.dotnet', 'VB.net', ('vb.net', 'vbnet'), ('*.vb', '*.bas'), ('text/x-vbnet', 'text/x-vba')), - 'VelocityHtmlLexer': ('pygments.lexers.templates', 'HTML+Velocity', ('html+velocity',), (), ('text/html+velocity',)), - 'VelocityLexer': ('pygments.lexers.templates', 'Velocity', ('velocity',), ('*.vm', '*.fhtml'), ()), - 'VelocityXmlLexer': ('pygments.lexers.templates', 'XML+Velocity', ('xml+velocity',), (), ('application/xml+velocity',)), - 'VerilogLexer': ('pygments.lexers.hdl', 'verilog', ('verilog', 'v'), ('*.v',), ('text/x-verilog',)), - 'VhdlLexer': ('pygments.lexers.hdl', 'vhdl', ('vhdl',), ('*.vhdl', '*.vhd'), ('text/x-vhdl',)), - 'VimLexer': ('pygments.lexers.textedit', 'VimL', ('vim',), ('*.vim', '.vimrc', '.exrc', '.gvimrc', '_vimrc', '_exrc', '_gvimrc', 'vimrc', 'gvimrc'), ('text/x-vim',)), - 'WDiffLexer': ('pygments.lexers.diff', 'WDiff', ('wdiff',), ('*.wdiff',), ()), - 'WhileyLexer': ('pygments.lexers.whiley', 'Whiley', ('whiley',), ('*.whiley',), ('text/x-whiley',)), - 'X10Lexer': ('pygments.lexers.x10', 'X10', ('x10', 'xten'), ('*.x10',), ('text/x-x10',)), - 'XQueryLexer': ('pygments.lexers.webmisc', 'XQuery', ('xquery', 'xqy', 'xq', 'xql', 'xqm'), ('*.xqy', '*.xquery', '*.xq', '*.xql', '*.xqm'), ('text/xquery', 'application/xquery')), - 'XmlDjangoLexer': ('pygments.lexers.templates', 'XML+Django/Jinja', ('xml+django', 'xml+jinja'), (), ('application/xml+django', 'application/xml+jinja')), - 'XmlErbLexer': ('pygments.lexers.templates', 'XML+Ruby', ('xml+erb', 'xml+ruby'), (), ('application/xml+ruby',)), - 'XmlLexer': ('pygments.lexers.html', 'XML', ('xml',), ('*.xml', '*.xsl', '*.rss', '*.xslt', '*.xsd', '*.wsdl', '*.wsf'), ('text/xml', 'application/xml', 'image/svg+xml', 'application/rss+xml', 'application/atom+xml')), - 'XmlPhpLexer': ('pygments.lexers.templates', 'XML+PHP', ('xml+php',), (), ('application/xml+php',)), - 'XmlSmartyLexer': ('pygments.lexers.templates', 'XML+Smarty', ('xml+smarty',), (), ('application/xml+smarty',)), - 'XorgLexer': ('pygments.lexers.xorg', 'Xorg', ('xorg.conf',), ('xorg.conf',), ()), - 'XsltLexer': ('pygments.lexers.html', 'XSLT', ('xslt',), ('*.xsl', '*.xslt', '*.xpl'), ('application/xsl+xml', 'application/xslt+xml')), - 'XtendLexer': ('pygments.lexers.jvm', 'Xtend', ('xtend',), ('*.xtend',), ('text/x-xtend',)), - 'XtlangLexer': ('pygments.lexers.lisp', 'xtlang', ('extempore',), ('*.xtm',), ()), - 'YamlJinjaLexer': ('pygments.lexers.templates', 'YAML+Jinja', ('yaml+jinja', 'salt', 'sls'), ('*.sls',), ('text/x-yaml+jinja', 'text/x-sls')), - 'YamlLexer': ('pygments.lexers.data', 'YAML', ('yaml',), ('*.yaml', '*.yml'), ('text/x-yaml',)), - 'ZephirLexer': ('pygments.lexers.php', 'Zephir', ('zephir',), ('*.zep',), ()), + 'ABAPLexer': ('typecode._vendor.pygments.lexers.business', 'ABAP', ('abap',), ('*.abap', '*.ABAP'), ('text/x-abap',)), + 'APLLexer': ('typecode._vendor.pygments.lexers.apl', 'APL', ('apl',), ('*.apl',), ()), + 'AbnfLexer': ('typecode._vendor.pygments.lexers.grammar_notation', 'ABNF', ('abnf',), ('*.abnf',), ('text/x-abnf',)), + 'ActionScript3Lexer': ('typecode._vendor.pygments.lexers.actionscript', 'ActionScript 3', ('as3', 'actionscript3'), ('*.as',), ('application/x-actionscript3', 'text/x-actionscript3', 'text/actionscript3')), + 'ActionScriptLexer': ('typecode._vendor.pygments.lexers.actionscript', 'ActionScript', ('as', 'actionscript'), ('*.as',), ('application/x-actionscript', 'text/x-actionscript', 'text/actionscript')), + 'AdaLexer': ('typecode._vendor.pygments.lexers.pascal', 'Ada', ('ada', 'ada95', 'ada2005'), ('*.adb', '*.ads', '*.ada'), ('text/x-ada',)), + 'AdlLexer': ('typecode._vendor.pygments.lexers.archetype', 'ADL', ('adl',), ('*.adl', '*.adls', '*.adlf', '*.adlx'), ()), + 'AgdaLexer': ('typecode._vendor.pygments.lexers.haskell', 'Agda', ('agda',), ('*.agda',), ('text/x-agda',)), + 'AheuiLexer': ('typecode._vendor.pygments.lexers.esoteric', 'Aheui', ('aheui',), ('*.aheui',), ()), + 'AlloyLexer': ('typecode._vendor.pygments.lexers.dsls', 'Alloy', ('alloy',), ('*.als',), ('text/x-alloy',)), + 'AmbientTalkLexer': ('typecode._vendor.pygments.lexers.ambient', 'AmbientTalk', ('at', 'ambienttalk', 'ambienttalk/2'), ('*.at',), ('text/x-ambienttalk',)), + 'AmplLexer': ('typecode._vendor.pygments.lexers.ampl', 'Ampl', ('ampl',), ('*.run',), ()), + 'Angular2HtmlLexer': ('typecode._vendor.pygments.lexers.templates', 'HTML + Angular2', ('html+ng2',), ('*.ng2',), ()), + 'Angular2Lexer': ('typecode._vendor.pygments.lexers.templates', 'Angular2', ('ng2',), (), ()), + 'AntlrActionScriptLexer': ('typecode._vendor.pygments.lexers.parsers', 'ANTLR With ActionScript Target', ('antlr-as', 'antlr-actionscript'), ('*.G', '*.g'), ()), + 'AntlrCSharpLexer': ('typecode._vendor.pygments.lexers.parsers', 'ANTLR With C# Target', ('antlr-csharp', 'antlr-c#'), ('*.G', '*.g'), ()), + 'AntlrCppLexer': ('typecode._vendor.pygments.lexers.parsers', 'ANTLR With CPP Target', ('antlr-cpp',), ('*.G', '*.g'), ()), + 'AntlrJavaLexer': ('typecode._vendor.pygments.lexers.parsers', 'ANTLR With Java Target', ('antlr-java',), ('*.G', '*.g'), ()), + 'AntlrLexer': ('typecode._vendor.pygments.lexers.parsers', 'ANTLR', ('antlr',), (), ()), + 'AntlrObjectiveCLexer': ('typecode._vendor.pygments.lexers.parsers', 'ANTLR With ObjectiveC Target', ('antlr-objc',), ('*.G', '*.g'), ()), + 'AntlrPerlLexer': ('typecode._vendor.pygments.lexers.parsers', 'ANTLR With Perl Target', ('antlr-perl',), ('*.G', '*.g'), ()), + 'AntlrPythonLexer': ('typecode._vendor.pygments.lexers.parsers', 'ANTLR With Python Target', ('antlr-python',), ('*.G', '*.g'), ()), + 'AntlrRubyLexer': ('typecode._vendor.pygments.lexers.parsers', 'ANTLR With Ruby Target', ('antlr-ruby', 'antlr-rb'), ('*.G', '*.g'), ()), + 'ApacheConfLexer': ('typecode._vendor.pygments.lexers.configs', 'ApacheConf', ('apacheconf', 'aconf', 'apache'), ('.htaccess', 'apache.conf', 'apache2.conf'), ('text/x-apacheconf',)), + 'AppleScriptLexer': ('typecode._vendor.pygments.lexers.scripting', 'AppleScript', ('applescript',), ('*.applescript',), ()), + 'ArduinoLexer': ('typecode._vendor.pygments.lexers.c_like', 'Arduino', ('arduino',), ('*.ino',), ('text/x-arduino',)), + 'ArrowLexer': ('typecode._vendor.pygments.lexers.arrow', 'Arrow', ('arrow',), ('*.arw',), ()), + 'AspectJLexer': ('typecode._vendor.pygments.lexers.jvm', 'AspectJ', ('aspectj',), ('*.aj',), ('text/x-aspectj',)), + 'AsymptoteLexer': ('typecode._vendor.pygments.lexers.graphics', 'Asymptote', ('asy', 'asymptote'), ('*.asy',), ('text/x-asymptote',)), + 'AugeasLexer': ('typecode._vendor.pygments.lexers.configs', 'Augeas', ('augeas',), ('*.aug',), ()), + 'AutoItLexer': ('typecode._vendor.pygments.lexers.automation', 'AutoIt', ('autoit',), ('*.au3',), ('text/x-autoit',)), + 'AutohotkeyLexer': ('typecode._vendor.pygments.lexers.automation', 'autohotkey', ('ahk', 'autohotkey'), ('*.ahk', '*.ahkl'), ('text/x-autohotkey',)), + 'AwkLexer': ('typecode._vendor.pygments.lexers.textedit', 'Awk', ('awk', 'gawk', 'mawk', 'nawk'), ('*.awk',), ('application/x-awk',)), + 'BBCBasicLexer': ('typecode._vendor.pygments.lexers.basic', 'BBC Basic', ('bbcbasic',), ('*.bbc',), ()), + 'BBCodeLexer': ('typecode._vendor.pygments.lexers.markup', 'BBCode', ('bbcode',), (), ('text/x-bbcode',)), + 'BCLexer': ('typecode._vendor.pygments.lexers.algebra', 'BC', ('bc',), ('*.bc',), ()), + 'BSTLexer': ('typecode._vendor.pygments.lexers.bibtex', 'BST', ('bst', 'bst-pybtex'), ('*.bst',), ()), + 'BareLexer': ('typecode._vendor.pygments.lexers.bare', 'BARE', ('bare',), ('*.bare',), ()), + 'BaseMakefileLexer': ('typecode._vendor.pygments.lexers.make', 'Base Makefile', ('basemake',), (), ()), + 'BashLexer': ('typecode._vendor.pygments.lexers.shell', 'Bash', ('bash', 'sh', 'ksh', 'zsh', 'shell'), ('*.sh', '*.ksh', '*.bash', '*.ebuild', '*.eclass', '*.exheres-0', '*.exlib', '*.zsh', '.bashrc', 'bashrc', '.bash_*', 'bash_*', 'zshrc', '.zshrc', 'PKGBUILD'), ('application/x-sh', 'application/x-shellscript', 'text/x-shellscript')), + 'BashSessionLexer': ('typecode._vendor.pygments.lexers.shell', 'Bash Session', ('console', 'shell-session'), ('*.sh-session', '*.shell-session'), ('application/x-shell-session', 'application/x-sh-session')), + 'BatchLexer': ('typecode._vendor.pygments.lexers.shell', 'Batchfile', ('bat', 'batch', 'dosbatch', 'winbatch'), ('*.bat', '*.cmd'), ('application/x-dos-batch',)), + 'BefungeLexer': ('typecode._vendor.pygments.lexers.esoteric', 'Befunge', ('befunge',), ('*.befunge',), ('application/x-befunge',)), + 'BibTeXLexer': ('typecode._vendor.pygments.lexers.bibtex', 'BibTeX', ('bib', 'bibtex'), ('*.bib',), ('text/x-bibtex',)), + 'BlitzBasicLexer': ('typecode._vendor.pygments.lexers.basic', 'BlitzBasic', ('blitzbasic', 'b3d', 'bplus'), ('*.bb', '*.decls'), ('text/x-bb',)), + 'BlitzMaxLexer': ('typecode._vendor.pygments.lexers.basic', 'BlitzMax', ('blitzmax', 'bmax'), ('*.bmx',), ('text/x-bmx',)), + 'BnfLexer': ('typecode._vendor.pygments.lexers.grammar_notation', 'BNF', ('bnf',), ('*.bnf',), ('text/x-bnf',)), + 'BoaLexer': ('typecode._vendor.pygments.lexers.boa', 'Boa', ('boa',), ('*.boa',), ()), + 'BooLexer': ('typecode._vendor.pygments.lexers.dotnet', 'Boo', ('boo',), ('*.boo',), ('text/x-boo',)), + 'BoogieLexer': ('typecode._vendor.pygments.lexers.verification', 'Boogie', ('boogie',), ('*.bpl',), ()), + 'BrainfuckLexer': ('typecode._vendor.pygments.lexers.esoteric', 'Brainfuck', ('brainfuck', 'bf'), ('*.bf', '*.b'), ('application/x-brainfuck',)), + 'BugsLexer': ('typecode._vendor.pygments.lexers.modeling', 'BUGS', ('bugs', 'winbugs', 'openbugs'), ('*.bug',), ()), + 'CAmkESLexer': ('typecode._vendor.pygments.lexers.esoteric', 'CAmkES', ('camkes', 'idl4'), ('*.camkes', '*.idl4'), ()), + 'CLexer': ('typecode._vendor.pygments.lexers.c_cpp', 'C', ('c',), ('*.c', '*.h', '*.idc'), ('text/x-chdr', 'text/x-csrc')), + 'CMakeLexer': ('typecode._vendor.pygments.lexers.make', 'CMake', ('cmake',), ('*.cmake', 'CMakeLists.txt'), ('text/x-cmake',)), + 'CObjdumpLexer': ('typecode._vendor.pygments.lexers.asm', 'c-objdump', ('c-objdump',), ('*.c-objdump',), ('text/x-c-objdump',)), + 'CPSALexer': ('typecode._vendor.pygments.lexers.lisp', 'CPSA', ('cpsa',), ('*.cpsa',), ()), + 'CSharpAspxLexer': ('typecode._vendor.pygments.lexers.dotnet', 'aspx-cs', ('aspx-cs',), ('*.aspx', '*.asax', '*.ascx', '*.ashx', '*.asmx', '*.axd'), ()), + 'CSharpLexer': ('typecode._vendor.pygments.lexers.dotnet', 'C#', ('csharp', 'c#'), ('*.cs',), ('text/x-csharp',)), + 'Ca65Lexer': ('typecode._vendor.pygments.lexers.asm', 'ca65 assembler', ('ca65',), ('*.s',), ()), + 'CadlLexer': ('typecode._vendor.pygments.lexers.archetype', 'cADL', ('cadl',), ('*.cadl',), ()), + 'CapDLLexer': ('typecode._vendor.pygments.lexers.esoteric', 'CapDL', ('capdl',), ('*.cdl',), ()), + 'CapnProtoLexer': ('typecode._vendor.pygments.lexers.capnproto', "Cap'n Proto", ('capnp',), ('*.capnp',), ()), + 'CbmBasicV2Lexer': ('typecode._vendor.pygments.lexers.basic', 'CBM BASIC V2', ('cbmbas',), ('*.bas',), ()), + 'CeylonLexer': ('typecode._vendor.pygments.lexers.jvm', 'Ceylon', ('ceylon',), ('*.ceylon',), ('text/x-ceylon',)), + 'Cfengine3Lexer': ('typecode._vendor.pygments.lexers.configs', 'CFEngine3', ('cfengine3', 'cf3'), ('*.cf',), ()), + 'ChaiscriptLexer': ('typecode._vendor.pygments.lexers.scripting', 'ChaiScript', ('chai', 'chaiscript'), ('*.chai',), ('text/x-chaiscript', 'application/x-chaiscript')), + 'ChapelLexer': ('typecode._vendor.pygments.lexers.chapel', 'Chapel', ('chapel', 'chpl'), ('*.chpl',), ()), + 'CharmciLexer': ('typecode._vendor.pygments.lexers.c_like', 'Charmci', ('charmci',), ('*.ci',), ()), + 'CheetahHtmlLexer': ('typecode._vendor.pygments.lexers.templates', 'HTML+Cheetah', ('html+cheetah', 'html+spitfire', 'htmlcheetah'), (), ('text/html+cheetah', 'text/html+spitfire')), + 'CheetahJavascriptLexer': ('typecode._vendor.pygments.lexers.templates', 'JavaScript+Cheetah', ('js+cheetah', 'javascript+cheetah', 'js+spitfire', 'javascript+spitfire'), (), ('application/x-javascript+cheetah', 'text/x-javascript+cheetah', 'text/javascript+cheetah', 'application/x-javascript+spitfire', 'text/x-javascript+spitfire', 'text/javascript+spitfire')), + 'CheetahLexer': ('typecode._vendor.pygments.lexers.templates', 'Cheetah', ('cheetah', 'spitfire'), ('*.tmpl', '*.spt'), ('application/x-cheetah', 'application/x-spitfire')), + 'CheetahXmlLexer': ('typecode._vendor.pygments.lexers.templates', 'XML+Cheetah', ('xml+cheetah', 'xml+spitfire'), (), ('application/xml+cheetah', 'application/xml+spitfire')), + 'CirruLexer': ('typecode._vendor.pygments.lexers.webmisc', 'Cirru', ('cirru',), ('*.cirru',), ('text/x-cirru',)), + 'ClayLexer': ('typecode._vendor.pygments.lexers.c_like', 'Clay', ('clay',), ('*.clay',), ('text/x-clay',)), + 'CleanLexer': ('typecode._vendor.pygments.lexers.clean', 'Clean', ('clean',), ('*.icl', '*.dcl'), ()), + 'ClojureLexer': ('typecode._vendor.pygments.lexers.jvm', 'Clojure', ('clojure', 'clj'), ('*.clj',), ('text/x-clojure', 'application/x-clojure')), + 'ClojureScriptLexer': ('typecode._vendor.pygments.lexers.jvm', 'ClojureScript', ('clojurescript', 'cljs'), ('*.cljs',), ('text/x-clojurescript', 'application/x-clojurescript')), + 'CobolFreeformatLexer': ('typecode._vendor.pygments.lexers.business', 'COBOLFree', ('cobolfree',), ('*.cbl', '*.CBL'), ()), + 'CobolLexer': ('typecode._vendor.pygments.lexers.business', 'COBOL', ('cobol',), ('*.cob', '*.COB', '*.cpy', '*.CPY'), ('text/x-cobol',)), + 'CoffeeScriptLexer': ('typecode._vendor.pygments.lexers.javascript', 'CoffeeScript', ('coffee-script', 'coffeescript', 'coffee'), ('*.coffee',), ('text/coffeescript',)), + 'ColdfusionCFCLexer': ('typecode._vendor.pygments.lexers.templates', 'Coldfusion CFC', ('cfc',), ('*.cfc',), ()), + 'ColdfusionHtmlLexer': ('typecode._vendor.pygments.lexers.templates', 'Coldfusion HTML', ('cfm',), ('*.cfm', '*.cfml'), ('application/x-coldfusion',)), + 'ColdfusionLexer': ('typecode._vendor.pygments.lexers.templates', 'cfstatement', ('cfs',), (), ()), + 'CommonLispLexer': ('typecode._vendor.pygments.lexers.lisp', 'Common Lisp', ('common-lisp', 'cl', 'lisp'), ('*.cl', '*.lisp'), ('text/x-common-lisp',)), + 'ComponentPascalLexer': ('typecode._vendor.pygments.lexers.oberon', 'Component Pascal', ('componentpascal', 'cp'), ('*.cp', '*.cps'), ('text/x-component-pascal',)), + 'CoqLexer': ('typecode._vendor.pygments.lexers.theorem', 'Coq', ('coq',), ('*.v',), ('text/x-coq',)), + 'CppLexer': ('typecode._vendor.pygments.lexers.c_cpp', 'C++', ('cpp', 'c++'), ('*.cpp', '*.hpp', '*.c++', '*.h++', '*.cc', '*.hh', '*.cxx', '*.hxx', '*.C', '*.H', '*.cp', '*.CPP'), ('text/x-c++hdr', 'text/x-c++src')), + 'CppObjdumpLexer': ('typecode._vendor.pygments.lexers.asm', 'cpp-objdump', ('cpp-objdump', 'c++-objdumb', 'cxx-objdump'), ('*.cpp-objdump', '*.c++-objdump', '*.cxx-objdump'), ('text/x-cpp-objdump',)), + 'CrmshLexer': ('typecode._vendor.pygments.lexers.dsls', 'Crmsh', ('crmsh', 'pcmk'), ('*.crmsh', '*.pcmk'), ()), + 'CrocLexer': ('typecode._vendor.pygments.lexers.d', 'Croc', ('croc',), ('*.croc',), ('text/x-crocsrc',)), + 'CryptolLexer': ('typecode._vendor.pygments.lexers.haskell', 'Cryptol', ('cryptol', 'cry'), ('*.cry',), ('text/x-cryptol',)), + 'CrystalLexer': ('typecode._vendor.pygments.lexers.crystal', 'Crystal', ('cr', 'crystal'), ('*.cr',), ('text/x-crystal',)), + 'CsoundDocumentLexer': ('typecode._vendor.pygments.lexers.csound', 'Csound Document', ('csound-document', 'csound-csd'), ('*.csd',), ()), + 'CsoundOrchestraLexer': ('typecode._vendor.pygments.lexers.csound', 'Csound Orchestra', ('csound', 'csound-orc'), ('*.orc', '*.udo'), ()), + 'CsoundScoreLexer': ('typecode._vendor.pygments.lexers.csound', 'Csound Score', ('csound-score', 'csound-sco'), ('*.sco',), ()), + 'CssDjangoLexer': ('typecode._vendor.pygments.lexers.templates', 'CSS+Django/Jinja', ('css+django', 'css+jinja'), (), ('text/css+django', 'text/css+jinja')), + 'CssErbLexer': ('typecode._vendor.pygments.lexers.templates', 'CSS+Ruby', ('css+erb', 'css+ruby'), (), ('text/css+ruby',)), + 'CssGenshiLexer': ('typecode._vendor.pygments.lexers.templates', 'CSS+Genshi Text', ('css+genshitext', 'css+genshi'), (), ('text/css+genshi',)), + 'CssLexer': ('typecode._vendor.pygments.lexers.css', 'CSS', ('css',), ('*.css',), ('text/css',)), + 'CssPhpLexer': ('typecode._vendor.pygments.lexers.templates', 'CSS+PHP', ('css+php',), (), ('text/css+php',)), + 'CssSmartyLexer': ('typecode._vendor.pygments.lexers.templates', 'CSS+Smarty', ('css+smarty',), (), ('text/css+smarty',)), + 'CudaLexer': ('typecode._vendor.pygments.lexers.c_like', 'CUDA', ('cuda', 'cu'), ('*.cu', '*.cuh'), ('text/x-cuda',)), + 'CypherLexer': ('typecode._vendor.pygments.lexers.graph', 'Cypher', ('cypher',), ('*.cyp', '*.cypher'), ()), + 'CythonLexer': ('typecode._vendor.pygments.lexers.python', 'Cython', ('cython', 'pyx', 'pyrex'), ('*.pyx', '*.pxd', '*.pxi'), ('text/x-cython', 'application/x-cython')), + 'DLexer': ('typecode._vendor.pygments.lexers.d', 'D', ('d',), ('*.d', '*.di'), ('text/x-dsrc',)), + 'DObjdumpLexer': ('typecode._vendor.pygments.lexers.asm', 'd-objdump', ('d-objdump',), ('*.d-objdump',), ('text/x-d-objdump',)), + 'DarcsPatchLexer': ('typecode._vendor.pygments.lexers.diff', 'Darcs Patch', ('dpatch',), ('*.dpatch', '*.darcspatch'), ()), + 'DartLexer': ('typecode._vendor.pygments.lexers.javascript', 'Dart', ('dart',), ('*.dart',), ('text/x-dart',)), + 'Dasm16Lexer': ('typecode._vendor.pygments.lexers.asm', 'DASM16', ('dasm16',), ('*.dasm16', '*.dasm'), ('text/x-dasm16',)), + 'DebianControlLexer': ('typecode._vendor.pygments.lexers.installers', 'Debian Control file', ('control', 'debcontrol'), ('control',), ()), + 'DelphiLexer': ('typecode._vendor.pygments.lexers.pascal', 'Delphi', ('delphi', 'pas', 'pascal', 'objectpascal'), ('*.pas', '*.dpr'), ('text/x-pascal',)), + 'DevicetreeLexer': ('typecode._vendor.pygments.lexers.devicetree', 'Devicetree', ('devicetree', 'dts'), ('*.dts', '*.dtsi'), ('text/x-c',)), + 'DgLexer': ('typecode._vendor.pygments.lexers.python', 'dg', ('dg',), ('*.dg',), ('text/x-dg',)), + 'DiffLexer': ('typecode._vendor.pygments.lexers.diff', 'Diff', ('diff', 'udiff'), ('*.diff', '*.patch'), ('text/x-diff', 'text/x-patch')), + 'DjangoLexer': ('typecode._vendor.pygments.lexers.templates', 'Django/Jinja', ('django', 'jinja'), (), ('application/x-django-templating', 'application/x-jinja')), + 'DockerLexer': ('typecode._vendor.pygments.lexers.configs', 'Docker', ('docker', 'dockerfile'), ('Dockerfile', '*.docker'), ('text/x-dockerfile-config',)), + 'DtdLexer': ('typecode._vendor.pygments.lexers.html', 'DTD', ('dtd',), ('*.dtd',), ('application/xml-dtd',)), + 'DuelLexer': ('typecode._vendor.pygments.lexers.webmisc', 'Duel', ('duel', 'jbst', 'jsonml+bst'), ('*.duel', '*.jbst'), ('text/x-duel', 'text/x-jbst')), + 'DylanConsoleLexer': ('typecode._vendor.pygments.lexers.dylan', 'Dylan session', ('dylan-console', 'dylan-repl'), ('*.dylan-console',), ('text/x-dylan-console',)), + 'DylanLexer': ('typecode._vendor.pygments.lexers.dylan', 'Dylan', ('dylan',), ('*.dylan', '*.dyl', '*.intr'), ('text/x-dylan',)), + 'DylanLidLexer': ('typecode._vendor.pygments.lexers.dylan', 'DylanLID', ('dylan-lid', 'lid'), ('*.lid', '*.hdp'), ('text/x-dylan-lid',)), + 'ECLLexer': ('typecode._vendor.pygments.lexers.ecl', 'ECL', ('ecl',), ('*.ecl',), ('application/x-ecl',)), + 'ECLexer': ('typecode._vendor.pygments.lexers.c_like', 'eC', ('ec',), ('*.ec', '*.eh'), ('text/x-echdr', 'text/x-ecsrc')), + 'EarlGreyLexer': ('typecode._vendor.pygments.lexers.javascript', 'Earl Grey', ('earl-grey', 'earlgrey', 'eg'), ('*.eg',), ('text/x-earl-grey',)), + 'EasytrieveLexer': ('typecode._vendor.pygments.lexers.scripting', 'Easytrieve', ('easytrieve',), ('*.ezt', '*.mac'), ('text/x-easytrieve',)), + 'EbnfLexer': ('typecode._vendor.pygments.lexers.parsers', 'EBNF', ('ebnf',), ('*.ebnf',), ('text/x-ebnf',)), + 'EiffelLexer': ('typecode._vendor.pygments.lexers.eiffel', 'Eiffel', ('eiffel',), ('*.e',), ('text/x-eiffel',)), + 'ElixirConsoleLexer': ('typecode._vendor.pygments.lexers.erlang', 'Elixir iex session', ('iex',), (), ('text/x-elixir-shellsession',)), + 'ElixirLexer': ('typecode._vendor.pygments.lexers.erlang', 'Elixir', ('elixir', 'ex', 'exs'), ('*.ex', '*.eex', '*.exs'), ('text/x-elixir',)), + 'ElmLexer': ('typecode._vendor.pygments.lexers.elm', 'Elm', ('elm',), ('*.elm',), ('text/x-elm',)), + 'EmacsLispLexer': ('typecode._vendor.pygments.lexers.lisp', 'EmacsLisp', ('emacs', 'elisp', 'emacs-lisp'), ('*.el',), ('text/x-elisp', 'application/x-elisp')), + 'EmailLexer': ('typecode._vendor.pygments.lexers.email', 'E-mail', ('email', 'eml'), ('*.eml',), ('message/rfc822',)), + 'ErbLexer': ('typecode._vendor.pygments.lexers.templates', 'ERB', ('erb',), (), ('application/x-ruby-templating',)), + 'ErlangLexer': ('typecode._vendor.pygments.lexers.erlang', 'Erlang', ('erlang',), ('*.erl', '*.hrl', '*.es', '*.escript'), ('text/x-erlang',)), + 'ErlangShellLexer': ('typecode._vendor.pygments.lexers.erlang', 'Erlang erl session', ('erl',), ('*.erl-sh',), ('text/x-erl-shellsession',)), + 'EvoqueHtmlLexer': ('typecode._vendor.pygments.lexers.templates', 'HTML+Evoque', ('html+evoque',), ('*.html',), ('text/html+evoque',)), + 'EvoqueLexer': ('typecode._vendor.pygments.lexers.templates', 'Evoque', ('evoque',), ('*.evoque',), ('application/x-evoque',)), + 'EvoqueXmlLexer': ('typecode._vendor.pygments.lexers.templates', 'XML+Evoque', ('xml+evoque',), ('*.xml',), ('application/xml+evoque',)), + 'ExeclineLexer': ('typecode._vendor.pygments.lexers.shell', 'execline', ('execline',), ('*.exec',), ()), + 'EzhilLexer': ('typecode._vendor.pygments.lexers.ezhil', 'Ezhil', ('ezhil',), ('*.n',), ('text/x-ezhil',)), + 'FSharpLexer': ('typecode._vendor.pygments.lexers.dotnet', 'F#', ('fsharp', 'f#'), ('*.fs', '*.fsi'), ('text/x-fsharp',)), + 'FStarLexer': ('typecode._vendor.pygments.lexers.ml', 'FStar', ('fstar',), ('*.fst', '*.fsti'), ('text/x-fstar',)), + 'FactorLexer': ('typecode._vendor.pygments.lexers.factor', 'Factor', ('factor',), ('*.factor',), ('text/x-factor',)), + 'FancyLexer': ('typecode._vendor.pygments.lexers.ruby', 'Fancy', ('fancy', 'fy'), ('*.fy', '*.fancypack'), ('text/x-fancysrc',)), + 'FantomLexer': ('typecode._vendor.pygments.lexers.fantom', 'Fantom', ('fan',), ('*.fan',), ('application/x-fantom',)), + 'FelixLexer': ('typecode._vendor.pygments.lexers.felix', 'Felix', ('felix', 'flx'), ('*.flx', '*.flxh'), ('text/x-felix',)), + 'FennelLexer': ('typecode._vendor.pygments.lexers.lisp', 'Fennel', ('fennel', 'fnl'), ('*.fnl',), ()), + 'FishShellLexer': ('typecode._vendor.pygments.lexers.shell', 'Fish', ('fish', 'fishshell'), ('*.fish', '*.load'), ('application/x-fish',)), + 'FlatlineLexer': ('typecode._vendor.pygments.lexers.dsls', 'Flatline', ('flatline',), (), ('text/x-flatline',)), + 'FloScriptLexer': ('typecode._vendor.pygments.lexers.floscript', 'FloScript', ('floscript', 'flo'), ('*.flo',), ()), + 'ForthLexer': ('typecode._vendor.pygments.lexers.forth', 'Forth', ('forth',), ('*.frt', '*.fs'), ('application/x-forth',)), + 'FortranFixedLexer': ('typecode._vendor.pygments.lexers.fortran', 'FortranFixed', ('fortranfixed',), ('*.f', '*.F'), ()), + 'FortranLexer': ('typecode._vendor.pygments.lexers.fortran', 'Fortran', ('fortran',), ('*.f03', '*.f90', '*.F03', '*.F90'), ('text/x-fortran',)), + 'FoxProLexer': ('typecode._vendor.pygments.lexers.foxpro', 'FoxPro', ('foxpro', 'vfp', 'clipper', 'xbase'), ('*.PRG', '*.prg'), ()), + 'FreeFemLexer': ('typecode._vendor.pygments.lexers.freefem', 'Freefem', ('freefem',), ('*.edp',), ('text/x-freefem',)), + 'GAPLexer': ('typecode._vendor.pygments.lexers.algebra', 'GAP', ('gap',), ('*.g', '*.gd', '*.gi', '*.gap'), ()), + 'GDScriptLexer': ('typecode._vendor.pygments.lexers.gdscript', 'GDScript', ('gdscript', 'gd'), ('*.gd',), ('text/x-gdscript', 'application/x-gdscript')), + 'GLShaderLexer': ('typecode._vendor.pygments.lexers.graphics', 'GLSL', ('glsl',), ('*.vert', '*.frag', '*.geo'), ('text/x-glslsrc',)), + 'GasLexer': ('typecode._vendor.pygments.lexers.asm', 'GAS', ('gas', 'asm'), ('*.s', '*.S'), ('text/x-gas',)), + 'GenshiLexer': ('typecode._vendor.pygments.lexers.templates', 'Genshi', ('genshi', 'kid', 'xml+genshi', 'xml+kid'), ('*.kid',), ('application/x-genshi', 'application/x-kid')), + 'GenshiTextLexer': ('typecode._vendor.pygments.lexers.templates', 'Genshi Text', ('genshitext',), (), ('application/x-genshi-text', 'text/x-genshi')), + 'GettextLexer': ('typecode._vendor.pygments.lexers.textfmts', 'Gettext Catalog', ('pot', 'po'), ('*.pot', '*.po'), ('application/x-gettext', 'text/x-gettext', 'text/gettext')), + 'GherkinLexer': ('typecode._vendor.pygments.lexers.testing', 'Gherkin', ('cucumber', 'gherkin'), ('*.feature',), ('text/x-gherkin',)), + 'GnuplotLexer': ('typecode._vendor.pygments.lexers.graphics', 'Gnuplot', ('gnuplot',), ('*.plot', '*.plt'), ('text/x-gnuplot',)), + 'GoLexer': ('typecode._vendor.pygments.lexers.go', 'Go', ('go',), ('*.go',), ('text/x-gosrc',)), + 'GoloLexer': ('typecode._vendor.pygments.lexers.jvm', 'Golo', ('golo',), ('*.golo',), ()), + 'GoodDataCLLexer': ('typecode._vendor.pygments.lexers.business', 'GoodData-CL', ('gooddata-cl',), ('*.gdc',), ('text/x-gooddata-cl',)), + 'GosuLexer': ('typecode._vendor.pygments.lexers.jvm', 'Gosu', ('gosu',), ('*.gs', '*.gsx', '*.gsp', '*.vark'), ('text/x-gosu',)), + 'GosuTemplateLexer': ('typecode._vendor.pygments.lexers.jvm', 'Gosu Template', ('gst',), ('*.gst',), ('text/x-gosu-template',)), + 'GroffLexer': ('typecode._vendor.pygments.lexers.markup', 'Groff', ('groff', 'nroff', 'man'), ('*.[1234567]', '*.man'), ('application/x-troff', 'text/troff')), + 'GroovyLexer': ('typecode._vendor.pygments.lexers.jvm', 'Groovy', ('groovy',), ('*.groovy', '*.gradle'), ('text/x-groovy',)), + 'HLSLShaderLexer': ('typecode._vendor.pygments.lexers.graphics', 'HLSL', ('hlsl',), ('*.hlsl', '*.hlsli'), ('text/x-hlsl',)), + 'HamlLexer': ('typecode._vendor.pygments.lexers.html', 'Haml', ('haml',), ('*.haml',), ('text/x-haml',)), + 'HandlebarsHtmlLexer': ('typecode._vendor.pygments.lexers.templates', 'HTML+Handlebars', ('html+handlebars',), ('*.handlebars', '*.hbs'), ('text/html+handlebars', 'text/x-handlebars-template')), + 'HandlebarsLexer': ('typecode._vendor.pygments.lexers.templates', 'Handlebars', ('handlebars',), (), ()), + 'HaskellLexer': ('typecode._vendor.pygments.lexers.haskell', 'Haskell', ('haskell', 'hs'), ('*.hs',), ('text/x-haskell',)), + 'HaxeLexer': ('typecode._vendor.pygments.lexers.haxe', 'Haxe', ('hx', 'haxe', 'hxsl'), ('*.hx', '*.hxsl'), ('text/haxe', 'text/x-haxe', 'text/x-hx')), + 'HexdumpLexer': ('typecode._vendor.pygments.lexers.hexdump', 'Hexdump', ('hexdump',), (), ()), + 'HsailLexer': ('typecode._vendor.pygments.lexers.asm', 'HSAIL', ('hsail', 'hsa'), ('*.hsail',), ('text/x-hsail',)), + 'HspecLexer': ('typecode._vendor.pygments.lexers.haskell', 'Hspec', ('hspec',), (), ()), + 'HtmlDjangoLexer': ('typecode._vendor.pygments.lexers.templates', 'HTML+Django/Jinja', ('html+django', 'html+jinja', 'htmldjango'), (), ('text/html+django', 'text/html+jinja')), + 'HtmlGenshiLexer': ('typecode._vendor.pygments.lexers.templates', 'HTML+Genshi', ('html+genshi', 'html+kid'), (), ('text/html+genshi',)), + 'HtmlLexer': ('typecode._vendor.pygments.lexers.html', 'HTML', ('html',), ('*.html', '*.htm', '*.xhtml', '*.xslt'), ('text/html', 'application/xhtml+xml')), + 'HtmlPhpLexer': ('typecode._vendor.pygments.lexers.templates', 'HTML+PHP', ('html+php',), ('*.phtml',), ('application/x-php', 'application/x-httpd-php', 'application/x-httpd-php3', 'application/x-httpd-php4', 'application/x-httpd-php5')), + 'HtmlSmartyLexer': ('typecode._vendor.pygments.lexers.templates', 'HTML+Smarty', ('html+smarty',), (), ('text/html+smarty',)), + 'HttpLexer': ('typecode._vendor.pygments.lexers.textfmts', 'HTTP', ('http',), (), ()), + 'HxmlLexer': ('typecode._vendor.pygments.lexers.haxe', 'Hxml', ('haxeml', 'hxml'), ('*.hxml',), ()), + 'HyLexer': ('typecode._vendor.pygments.lexers.lisp', 'Hy', ('hylang',), ('*.hy',), ('text/x-hy', 'application/x-hy')), + 'HybrisLexer': ('typecode._vendor.pygments.lexers.scripting', 'Hybris', ('hybris', 'hy'), ('*.hy', '*.hyb'), ('text/x-hybris', 'application/x-hybris')), + 'IDLLexer': ('typecode._vendor.pygments.lexers.idl', 'IDL', ('idl',), ('*.pro',), ('text/idl',)), + 'IconLexer': ('typecode._vendor.pygments.lexers.unicon', 'Icon', ('icon',), ('*.icon', '*.ICON'), ()), + 'IdrisLexer': ('typecode._vendor.pygments.lexers.haskell', 'Idris', ('idris', 'idr'), ('*.idr',), ('text/x-idris',)), + 'IgorLexer': ('typecode._vendor.pygments.lexers.igor', 'Igor', ('igor', 'igorpro'), ('*.ipf',), ('text/ipf',)), + 'Inform6Lexer': ('typecode._vendor.pygments.lexers.int_fiction', 'Inform 6', ('inform6', 'i6'), ('*.inf',), ()), + 'Inform6TemplateLexer': ('typecode._vendor.pygments.lexers.int_fiction', 'Inform 6 template', ('i6t',), ('*.i6t',), ()), + 'Inform7Lexer': ('typecode._vendor.pygments.lexers.int_fiction', 'Inform 7', ('inform7', 'i7'), ('*.ni', '*.i7x'), ()), + 'IniLexer': ('typecode._vendor.pygments.lexers.configs', 'INI', ('ini', 'cfg', 'dosini'), ('*.ini', '*.cfg', '*.inf'), ('text/x-ini', 'text/inf')), + 'IoLexer': ('typecode._vendor.pygments.lexers.iolang', 'Io', ('io',), ('*.io',), ('text/x-iosrc',)), + 'IokeLexer': ('typecode._vendor.pygments.lexers.jvm', 'Ioke', ('ioke', 'ik'), ('*.ik',), ('text/x-iokesrc',)), + 'IrcLogsLexer': ('typecode._vendor.pygments.lexers.textfmts', 'IRC logs', ('irc',), ('*.weechatlog',), ('text/x-irclog',)), + 'IsabelleLexer': ('typecode._vendor.pygments.lexers.theorem', 'Isabelle', ('isabelle',), ('*.thy',), ('text/x-isabelle',)), + 'JLexer': ('typecode._vendor.pygments.lexers.j', 'J', ('j',), ('*.ijs',), ('text/x-j',)), + 'JagsLexer': ('typecode._vendor.pygments.lexers.modeling', 'JAGS', ('jags',), ('*.jag', '*.bug'), ()), + 'JasminLexer': ('typecode._vendor.pygments.lexers.jvm', 'Jasmin', ('jasmin', 'jasminxt'), ('*.j',), ()), + 'JavaLexer': ('typecode._vendor.pygments.lexers.jvm', 'Java', ('java',), ('*.java',), ('text/x-java',)), + 'JavascriptDjangoLexer': ('typecode._vendor.pygments.lexers.templates', 'JavaScript+Django/Jinja', ('js+django', 'javascript+django', 'js+jinja', 'javascript+jinja'), (), ('application/x-javascript+django', 'application/x-javascript+jinja', 'text/x-javascript+django', 'text/x-javascript+jinja', 'text/javascript+django', 'text/javascript+jinja')), + 'JavascriptErbLexer': ('typecode._vendor.pygments.lexers.templates', 'JavaScript+Ruby', ('js+erb', 'javascript+erb', 'js+ruby', 'javascript+ruby'), (), ('application/x-javascript+ruby', 'text/x-javascript+ruby', 'text/javascript+ruby')), + 'JavascriptGenshiLexer': ('typecode._vendor.pygments.lexers.templates', 'JavaScript+Genshi Text', ('js+genshitext', 'js+genshi', 'javascript+genshitext', 'javascript+genshi'), (), ('application/x-javascript+genshi', 'text/x-javascript+genshi', 'text/javascript+genshi')), + 'JavascriptLexer': ('typecode._vendor.pygments.lexers.javascript', 'JavaScript', ('js', 'javascript'), ('*.js', '*.jsm', '*.mjs'), ('application/javascript', 'application/x-javascript', 'text/x-javascript', 'text/javascript')), + 'JavascriptPhpLexer': ('typecode._vendor.pygments.lexers.templates', 'JavaScript+PHP', ('js+php', 'javascript+php'), (), ('application/x-javascript+php', 'text/x-javascript+php', 'text/javascript+php')), + 'JavascriptSmartyLexer': ('typecode._vendor.pygments.lexers.templates', 'JavaScript+Smarty', ('js+smarty', 'javascript+smarty'), (), ('application/x-javascript+smarty', 'text/x-javascript+smarty', 'text/javascript+smarty')), + 'JclLexer': ('typecode._vendor.pygments.lexers.scripting', 'JCL', ('jcl',), ('*.jcl',), ('text/x-jcl',)), + 'JsgfLexer': ('typecode._vendor.pygments.lexers.grammar_notation', 'JSGF', ('jsgf',), ('*.jsgf',), ('application/jsgf', 'application/x-jsgf', 'text/jsgf')), + 'JsonBareObjectLexer': ('typecode._vendor.pygments.lexers.data', 'JSONBareObject', (), (), ()), + 'JsonLdLexer': ('typecode._vendor.pygments.lexers.data', 'JSON-LD', ('jsonld', 'json-ld'), ('*.jsonld',), ('application/ld+json',)), + 'JsonLexer': ('typecode._vendor.pygments.lexers.data', 'JSON', ('json', 'json-object'), ('*.json', 'Pipfile.lock'), ('application/json', 'application/json-object')), + 'JspLexer': ('typecode._vendor.pygments.lexers.templates', 'Java Server Page', ('jsp',), ('*.jsp',), ('application/x-jsp',)), + 'JuliaConsoleLexer': ('typecode._vendor.pygments.lexers.julia', 'Julia console', ('jlcon',), (), ()), + 'JuliaLexer': ('typecode._vendor.pygments.lexers.julia', 'Julia', ('julia', 'jl'), ('*.jl',), ('text/x-julia', 'application/x-julia')), + 'JuttleLexer': ('typecode._vendor.pygments.lexers.javascript', 'Juttle', ('juttle',), ('*.juttle',), ('application/juttle', 'application/x-juttle', 'text/x-juttle', 'text/juttle')), + 'KalLexer': ('typecode._vendor.pygments.lexers.javascript', 'Kal', ('kal',), ('*.kal',), ('text/kal', 'application/kal')), + 'KconfigLexer': ('typecode._vendor.pygments.lexers.configs', 'Kconfig', ('kconfig', 'menuconfig', 'linux-config', 'kernel-config'), ('Kconfig*', '*Config.in*', 'external.in*', 'standard-modules.in'), ('text/x-kconfig',)), + 'KernelLogLexer': ('typecode._vendor.pygments.lexers.textfmts', 'Kernel log', ('kmsg', 'dmesg'), ('*.kmsg', '*.dmesg'), ()), + 'KokaLexer': ('typecode._vendor.pygments.lexers.haskell', 'Koka', ('koka',), ('*.kk', '*.kki'), ('text/x-koka',)), + 'KotlinLexer': ('typecode._vendor.pygments.lexers.jvm', 'Kotlin', ('kotlin',), ('*.kt', '*.kts'), ('text/x-kotlin',)), + 'LSLLexer': ('typecode._vendor.pygments.lexers.scripting', 'LSL', ('lsl',), ('*.lsl',), ('text/x-lsl',)), + 'LassoCssLexer': ('typecode._vendor.pygments.lexers.templates', 'CSS+Lasso', ('css+lasso',), (), ('text/css+lasso',)), + 'LassoHtmlLexer': ('typecode._vendor.pygments.lexers.templates', 'HTML+Lasso', ('html+lasso',), (), ('text/html+lasso', 'application/x-httpd-lasso', 'application/x-httpd-lasso[89]')), + 'LassoJavascriptLexer': ('typecode._vendor.pygments.lexers.templates', 'JavaScript+Lasso', ('js+lasso', 'javascript+lasso'), (), ('application/x-javascript+lasso', 'text/x-javascript+lasso', 'text/javascript+lasso')), + 'LassoLexer': ('typecode._vendor.pygments.lexers.javascript', 'Lasso', ('lasso', 'lassoscript'), ('*.lasso', '*.lasso[89]'), ('text/x-lasso',)), + 'LassoXmlLexer': ('typecode._vendor.pygments.lexers.templates', 'XML+Lasso', ('xml+lasso',), (), ('application/xml+lasso',)), + 'LeanLexer': ('typecode._vendor.pygments.lexers.theorem', 'Lean', ('lean',), ('*.lean',), ('text/x-lean',)), + 'LessCssLexer': ('typecode._vendor.pygments.lexers.css', 'LessCss', ('less',), ('*.less',), ('text/x-less-css',)), + 'LighttpdConfLexer': ('typecode._vendor.pygments.lexers.configs', 'Lighttpd configuration file', ('lighty', 'lighttpd'), (), ('text/x-lighttpd-conf',)), + 'LimboLexer': ('typecode._vendor.pygments.lexers.inferno', 'Limbo', ('limbo',), ('*.b',), ('text/limbo',)), + 'LiquidLexer': ('typecode._vendor.pygments.lexers.templates', 'liquid', ('liquid',), ('*.liquid',), ()), + 'LiterateAgdaLexer': ('typecode._vendor.pygments.lexers.haskell', 'Literate Agda', ('lagda', 'literate-agda'), ('*.lagda',), ('text/x-literate-agda',)), + 'LiterateCryptolLexer': ('typecode._vendor.pygments.lexers.haskell', 'Literate Cryptol', ('lcry', 'literate-cryptol', 'lcryptol'), ('*.lcry',), ('text/x-literate-cryptol',)), + 'LiterateHaskellLexer': ('typecode._vendor.pygments.lexers.haskell', 'Literate Haskell', ('lhs', 'literate-haskell', 'lhaskell'), ('*.lhs',), ('text/x-literate-haskell',)), + 'LiterateIdrisLexer': ('typecode._vendor.pygments.lexers.haskell', 'Literate Idris', ('lidr', 'literate-idris', 'lidris'), ('*.lidr',), ('text/x-literate-idris',)), + 'LiveScriptLexer': ('typecode._vendor.pygments.lexers.javascript', 'LiveScript', ('live-script', 'livescript'), ('*.ls',), ('text/livescript',)), + 'LlvmLexer': ('typecode._vendor.pygments.lexers.asm', 'LLVM', ('llvm',), ('*.ll',), ('text/x-llvm',)), + 'LlvmMirBodyLexer': ('typecode._vendor.pygments.lexers.asm', 'LLVM-MIR Body', ('llvm-mir-body',), (), ()), + 'LlvmMirLexer': ('typecode._vendor.pygments.lexers.asm', 'LLVM-MIR', ('llvm-mir',), ('*.mir',), ()), + 'LogosLexer': ('typecode._vendor.pygments.lexers.objective', 'Logos', ('logos',), ('*.x', '*.xi', '*.xm', '*.xmi'), ('text/x-logos',)), + 'LogtalkLexer': ('typecode._vendor.pygments.lexers.prolog', 'Logtalk', ('logtalk',), ('*.lgt', '*.logtalk'), ('text/x-logtalk',)), + 'LuaLexer': ('typecode._vendor.pygments.lexers.scripting', 'Lua', ('lua',), ('*.lua', '*.wlua'), ('text/x-lua', 'application/x-lua')), + 'MIMELexer': ('typecode._vendor.pygments.lexers.mime', 'MIME', ('mime',), (), ('multipart/mixed', 'multipart/related', 'multipart/alternative')), + 'MOOCodeLexer': ('typecode._vendor.pygments.lexers.scripting', 'MOOCode', ('moocode', 'moo'), ('*.moo',), ('text/x-moocode',)), + 'MSDOSSessionLexer': ('typecode._vendor.pygments.lexers.shell', 'MSDOS Session', ('doscon',), (), ()), + 'MakefileLexer': ('typecode._vendor.pygments.lexers.make', 'Makefile', ('make', 'makefile', 'mf', 'bsdmake'), ('*.mak', '*.mk', 'Makefile', 'makefile', 'Makefile.*', 'GNUmakefile'), ('text/x-makefile',)), + 'MakoCssLexer': ('typecode._vendor.pygments.lexers.templates', 'CSS+Mako', ('css+mako',), (), ('text/css+mako',)), + 'MakoHtmlLexer': ('typecode._vendor.pygments.lexers.templates', 'HTML+Mako', ('html+mako',), (), ('text/html+mako',)), + 'MakoJavascriptLexer': ('typecode._vendor.pygments.lexers.templates', 'JavaScript+Mako', ('js+mako', 'javascript+mako'), (), ('application/x-javascript+mako', 'text/x-javascript+mako', 'text/javascript+mako')), + 'MakoLexer': ('typecode._vendor.pygments.lexers.templates', 'Mako', ('mako',), ('*.mao',), ('application/x-mako',)), + 'MakoXmlLexer': ('typecode._vendor.pygments.lexers.templates', 'XML+Mako', ('xml+mako',), (), ('application/xml+mako',)), + 'MaqlLexer': ('typecode._vendor.pygments.lexers.business', 'MAQL', ('maql',), ('*.maql',), ('text/x-gooddata-maql', 'application/x-gooddata-maql')), + 'MarkdownLexer': ('typecode._vendor.pygments.lexers.markup', 'markdown', ('md',), ('*.md', '*.markdown'), ('text/x-markdown',)), + 'MaskLexer': ('typecode._vendor.pygments.lexers.javascript', 'Mask', ('mask',), ('*.mask',), ('text/x-mask',)), + 'MasonLexer': ('typecode._vendor.pygments.lexers.templates', 'Mason', ('mason',), ('*.m', '*.mhtml', '*.mc', '*.mi', 'autohandler', 'dhandler'), ('application/x-mason',)), + 'MathematicaLexer': ('typecode._vendor.pygments.lexers.algebra', 'Mathematica', ('mathematica', 'mma', 'nb'), ('*.nb', '*.cdf', '*.nbp', '*.ma'), ('application/mathematica', 'application/vnd.wolfram.mathematica', 'application/vnd.wolfram.mathematica.package', 'application/vnd.wolfram.cdf')), + 'MatlabLexer': ('typecode._vendor.pygments.lexers.matlab', 'Matlab', ('matlab',), ('*.m',), ('text/matlab',)), + 'MatlabSessionLexer': ('typecode._vendor.pygments.lexers.matlab', 'Matlab session', ('matlabsession',), (), ()), + 'MiniDLexer': ('typecode._vendor.pygments.lexers.d', 'MiniD', ('minid',), (), ('text/x-minidsrc',)), + 'MiniScriptLexer': ('typecode._vendor.pygments.lexers.scripting', 'MiniScript', ('ms', 'miniscript'), ('*.ms',), ('text/x-minicript', 'application/x-miniscript')), + 'ModelicaLexer': ('typecode._vendor.pygments.lexers.modeling', 'Modelica', ('modelica',), ('*.mo',), ('text/x-modelica',)), + 'Modula2Lexer': ('typecode._vendor.pygments.lexers.modula2', 'Modula-2', ('modula2', 'm2'), ('*.def', '*.mod'), ('text/x-modula2',)), + 'MoinWikiLexer': ('typecode._vendor.pygments.lexers.markup', 'MoinMoin/Trac Wiki markup', ('trac-wiki', 'moin'), (), ('text/x-trac-wiki',)), + 'MonkeyLexer': ('typecode._vendor.pygments.lexers.basic', 'Monkey', ('monkey',), ('*.monkey',), ('text/x-monkey',)), + 'MonteLexer': ('typecode._vendor.pygments.lexers.monte', 'Monte', ('monte',), ('*.mt',), ()), + 'MoonScriptLexer': ('typecode._vendor.pygments.lexers.scripting', 'MoonScript', ('moon', 'moonscript'), ('*.moon',), ('text/x-moonscript', 'application/x-moonscript')), + 'MoselLexer': ('typecode._vendor.pygments.lexers.mosel', 'Mosel', ('mosel',), ('*.mos',), ()), + 'MozPreprocCssLexer': ('typecode._vendor.pygments.lexers.markup', 'CSS+mozpreproc', ('css+mozpreproc',), ('*.css.in',), ()), + 'MozPreprocHashLexer': ('typecode._vendor.pygments.lexers.markup', 'mozhashpreproc', ('mozhashpreproc',), (), ()), + 'MozPreprocJavascriptLexer': ('typecode._vendor.pygments.lexers.markup', 'Javascript+mozpreproc', ('javascript+mozpreproc',), ('*.js.in',), ()), + 'MozPreprocPercentLexer': ('typecode._vendor.pygments.lexers.markup', 'mozpercentpreproc', ('mozpercentpreproc',), (), ()), + 'MozPreprocXulLexer': ('typecode._vendor.pygments.lexers.markup', 'XUL+mozpreproc', ('xul+mozpreproc',), ('*.xul.in',), ()), + 'MqlLexer': ('typecode._vendor.pygments.lexers.c_like', 'MQL', ('mql', 'mq4', 'mq5', 'mql4', 'mql5'), ('*.mq4', '*.mq5', '*.mqh'), ('text/x-mql',)), + 'MscgenLexer': ('typecode._vendor.pygments.lexers.dsls', 'Mscgen', ('mscgen', 'msc'), ('*.msc',), ()), + 'MuPADLexer': ('typecode._vendor.pygments.lexers.algebra', 'MuPAD', ('mupad',), ('*.mu',), ()), + 'MxmlLexer': ('typecode._vendor.pygments.lexers.actionscript', 'MXML', ('mxml',), ('*.mxml',), ()), + 'MySqlLexer': ('typecode._vendor.pygments.lexers.sql', 'MySQL', ('mysql',), (), ('text/x-mysql',)), + 'MyghtyCssLexer': ('typecode._vendor.pygments.lexers.templates', 'CSS+Myghty', ('css+myghty',), (), ('text/css+myghty',)), + 'MyghtyHtmlLexer': ('typecode._vendor.pygments.lexers.templates', 'HTML+Myghty', ('html+myghty',), (), ('text/html+myghty',)), + 'MyghtyJavascriptLexer': ('typecode._vendor.pygments.lexers.templates', 'JavaScript+Myghty', ('js+myghty', 'javascript+myghty'), (), ('application/x-javascript+myghty', 'text/x-javascript+myghty', 'text/javascript+mygthy')), + 'MyghtyLexer': ('typecode._vendor.pygments.lexers.templates', 'Myghty', ('myghty',), ('*.myt', 'autodelegate'), ('application/x-myghty',)), + 'MyghtyXmlLexer': ('typecode._vendor.pygments.lexers.templates', 'XML+Myghty', ('xml+myghty',), (), ('application/xml+myghty',)), + 'NCLLexer': ('typecode._vendor.pygments.lexers.ncl', 'NCL', ('ncl',), ('*.ncl',), ('text/ncl',)), + 'NSISLexer': ('typecode._vendor.pygments.lexers.installers', 'NSIS', ('nsis', 'nsi', 'nsh'), ('*.nsi', '*.nsh'), ('text/x-nsis',)), + 'NasmLexer': ('typecode._vendor.pygments.lexers.asm', 'NASM', ('nasm',), ('*.asm', '*.ASM'), ('text/x-nasm',)), + 'NasmObjdumpLexer': ('typecode._vendor.pygments.lexers.asm', 'objdump-nasm', ('objdump-nasm',), ('*.objdump-intel',), ('text/x-nasm-objdump',)), + 'NemerleLexer': ('typecode._vendor.pygments.lexers.dotnet', 'Nemerle', ('nemerle',), ('*.n',), ('text/x-nemerle',)), + 'NesCLexer': ('typecode._vendor.pygments.lexers.c_like', 'nesC', ('nesc',), ('*.nc',), ('text/x-nescsrc',)), + 'NewLispLexer': ('typecode._vendor.pygments.lexers.lisp', 'NewLisp', ('newlisp',), ('*.lsp', '*.nl', '*.kif'), ('text/x-newlisp', 'application/x-newlisp')), + 'NewspeakLexer': ('typecode._vendor.pygments.lexers.smalltalk', 'Newspeak', ('newspeak',), ('*.ns2',), ('text/x-newspeak',)), + 'NginxConfLexer': ('typecode._vendor.pygments.lexers.configs', 'Nginx configuration file', ('nginx',), ('nginx.conf',), ('text/x-nginx-conf',)), + 'NimrodLexer': ('typecode._vendor.pygments.lexers.nimrod', 'Nimrod', ('nim', 'nimrod'), ('*.nim', '*.nimrod'), ('text/x-nim',)), + 'NitLexer': ('typecode._vendor.pygments.lexers.nit', 'Nit', ('nit',), ('*.nit',), ()), + 'NixLexer': ('typecode._vendor.pygments.lexers.nix', 'Nix', ('nixos', 'nix'), ('*.nix',), ('text/x-nix',)), + 'NotmuchLexer': ('typecode._vendor.pygments.lexers.textfmts', 'Notmuch', ('notmuch',), (), ()), + 'NuSMVLexer': ('typecode._vendor.pygments.lexers.smv', 'NuSMV', ('nusmv',), ('*.smv',), ()), + 'NumPyLexer': ('typecode._vendor.pygments.lexers.python', 'NumPy', ('numpy',), (), ()), + 'ObjdumpLexer': ('typecode._vendor.pygments.lexers.asm', 'objdump', ('objdump',), ('*.objdump',), ('text/x-objdump',)), + 'ObjectiveCLexer': ('typecode._vendor.pygments.lexers.objective', 'Objective-C', ('objective-c', 'objectivec', 'obj-c', 'objc'), ('*.m', '*.h'), ('text/x-objective-c',)), + 'ObjectiveCppLexer': ('typecode._vendor.pygments.lexers.objective', 'Objective-C++', ('objective-c++', 'objectivec++', 'obj-c++', 'objc++'), ('*.mm', '*.hh'), ('text/x-objective-c++',)), + 'ObjectiveJLexer': ('typecode._vendor.pygments.lexers.javascript', 'Objective-J', ('objective-j', 'objectivej', 'obj-j', 'objj'), ('*.j',), ('text/x-objective-j',)), + 'OcamlLexer': ('typecode._vendor.pygments.lexers.ml', 'OCaml', ('ocaml',), ('*.ml', '*.mli', '*.mll', '*.mly'), ('text/x-ocaml',)), + 'OctaveLexer': ('typecode._vendor.pygments.lexers.matlab', 'Octave', ('octave',), ('*.m',), ('text/octave',)), + 'OdinLexer': ('typecode._vendor.pygments.lexers.archetype', 'ODIN', ('odin',), ('*.odin',), ('text/odin',)), + 'OocLexer': ('typecode._vendor.pygments.lexers.ooc', 'Ooc', ('ooc',), ('*.ooc',), ('text/x-ooc',)), + 'OpaLexer': ('typecode._vendor.pygments.lexers.ml', 'Opa', ('opa',), ('*.opa',), ('text/x-opa',)), + 'OpenEdgeLexer': ('typecode._vendor.pygments.lexers.business', 'OpenEdge ABL', ('openedge', 'abl', 'progress'), ('*.p', '*.cls'), ('text/x-openedge', 'application/x-openedge')), + 'PacmanConfLexer': ('typecode._vendor.pygments.lexers.configs', 'PacmanConf', ('pacmanconf',), ('pacman.conf',), ()), + 'PanLexer': ('typecode._vendor.pygments.lexers.dsls', 'Pan', ('pan',), ('*.pan',), ()), + 'ParaSailLexer': ('typecode._vendor.pygments.lexers.parasail', 'ParaSail', ('parasail',), ('*.psi', '*.psl'), ('text/x-parasail',)), + 'PawnLexer': ('typecode._vendor.pygments.lexers.pawn', 'Pawn', ('pawn',), ('*.p', '*.pwn', '*.inc'), ('text/x-pawn',)), + 'PegLexer': ('typecode._vendor.pygments.lexers.grammar_notation', 'PEG', ('peg',), ('*.peg',), ('text/x-peg',)), + 'Perl6Lexer': ('typecode._vendor.pygments.lexers.perl', 'Perl6', ('perl6', 'pl6', 'raku'), ('*.pl', '*.pm', '*.nqp', '*.p6', '*.6pl', '*.p6l', '*.pl6', '*.6pm', '*.p6m', '*.pm6', '*.t', '*.raku', '*.rakumod', '*.rakutest', '*.rakudoc'), ('text/x-perl6', 'application/x-perl6')), + 'PerlLexer': ('typecode._vendor.pygments.lexers.perl', 'Perl', ('perl', 'pl'), ('*.pl', '*.pm', '*.t', '*.perl'), ('text/x-perl', 'application/x-perl')), + 'PhpLexer': ('typecode._vendor.pygments.lexers.php', 'PHP', ('php', 'php3', 'php4', 'php5'), ('*.php', '*.php[345]', '*.inc'), ('text/x-php',)), + 'PigLexer': ('typecode._vendor.pygments.lexers.jvm', 'Pig', ('pig',), ('*.pig',), ('text/x-pig',)), + 'PikeLexer': ('typecode._vendor.pygments.lexers.c_like', 'Pike', ('pike',), ('*.pike', '*.pmod'), ('text/x-pike',)), + 'PkgConfigLexer': ('typecode._vendor.pygments.lexers.configs', 'PkgConfig', ('pkgconfig',), ('*.pc',), ()), + 'PlPgsqlLexer': ('typecode._vendor.pygments.lexers.sql', 'PL/pgSQL', ('plpgsql',), (), ('text/x-plpgsql',)), + 'PointlessLexer': ('typecode._vendor.pygments.lexers.pointless', 'Pointless', ('pointless',), ('*.ptls',), ()), + 'PonyLexer': ('typecode._vendor.pygments.lexers.pony', 'Pony', ('pony',), ('*.pony',), ()), + 'PostScriptLexer': ('typecode._vendor.pygments.lexers.graphics', 'PostScript', ('postscript', 'postscr'), ('*.ps', '*.eps'), ('application/postscript',)), + 'PostgresConsoleLexer': ('typecode._vendor.pygments.lexers.sql', 'PostgreSQL console (psql)', ('psql', 'postgresql-console', 'postgres-console'), (), ('text/x-postgresql-psql',)), + 'PostgresLexer': ('typecode._vendor.pygments.lexers.sql', 'PostgreSQL SQL dialect', ('postgresql', 'postgres'), (), ('text/x-postgresql',)), + 'PovrayLexer': ('typecode._vendor.pygments.lexers.graphics', 'POVRay', ('pov',), ('*.pov', '*.inc'), ('text/x-povray',)), + 'PowerShellLexer': ('typecode._vendor.pygments.lexers.shell', 'PowerShell', ('powershell', 'posh', 'ps1', 'psm1'), ('*.ps1', '*.psm1'), ('text/x-powershell',)), + 'PowerShellSessionLexer': ('typecode._vendor.pygments.lexers.shell', 'PowerShell Session', ('ps1con',), (), ()), + 'PraatLexer': ('typecode._vendor.pygments.lexers.praat', 'Praat', ('praat',), ('*.praat', '*.proc', '*.psc'), ()), + 'PrologLexer': ('typecode._vendor.pygments.lexers.prolog', 'Prolog', ('prolog',), ('*.ecl', '*.prolog', '*.pro', '*.pl'), ('text/x-prolog',)), + 'PromQLLexer': ('typecode._vendor.pygments.lexers.promql', 'PromQL', ('promql',), ('*.promql',), ()), + 'PropertiesLexer': ('typecode._vendor.pygments.lexers.configs', 'Properties', ('properties', 'jproperties'), ('*.properties',), ('text/x-java-properties',)), + 'ProtoBufLexer': ('typecode._vendor.pygments.lexers.dsls', 'Protocol Buffer', ('protobuf', 'proto'), ('*.proto',), ()), + 'PsyshConsoleLexer': ('typecode._vendor.pygments.lexers.php', 'PsySH console session for PHP', ('psysh',), (), ()), + 'PugLexer': ('typecode._vendor.pygments.lexers.html', 'Pug', ('pug', 'jade'), ('*.pug', '*.jade'), ('text/x-pug', 'text/x-jade')), + 'PuppetLexer': ('typecode._vendor.pygments.lexers.dsls', 'Puppet', ('puppet',), ('*.pp',), ()), + 'PyPyLogLexer': ('typecode._vendor.pygments.lexers.console', 'PyPy Log', ('pypylog', 'pypy'), ('*.pypylog',), ('application/x-pypylog',)), + 'Python2Lexer': ('typecode._vendor.pygments.lexers.python', 'Python 2.x', ('python2', 'py2'), (), ('text/x-python2', 'application/x-python2')), + 'Python2TracebackLexer': ('typecode._vendor.pygments.lexers.python', 'Python 2.x Traceback', ('py2tb',), ('*.py2tb',), ('text/x-python2-traceback',)), + 'PythonConsoleLexer': ('typecode._vendor.pygments.lexers.python', 'Python console session', ('pycon',), (), ('text/x-python-doctest',)), + 'PythonLexer': ('typecode._vendor.pygments.lexers.python', 'Python', ('python', 'py', 'sage', 'python3', 'py3'), ('*.py', '*.pyw', '*.jy', '*.sage', '*.sc', 'SConstruct', 'SConscript', '*.bzl', 'BUCK', 'BUILD', 'BUILD.bazel', 'WORKSPACE', '*.tac'), ('text/x-python', 'application/x-python', 'text/x-python3', 'application/x-python3')), + 'PythonTracebackLexer': ('typecode._vendor.pygments.lexers.python', 'Python Traceback', ('pytb', 'py3tb'), ('*.pytb', '*.py3tb'), ('text/x-python-traceback', 'text/x-python3-traceback')), + 'QBasicLexer': ('typecode._vendor.pygments.lexers.basic', 'QBasic', ('qbasic', 'basic'), ('*.BAS', '*.bas'), ('text/basic',)), + 'QVToLexer': ('typecode._vendor.pygments.lexers.qvt', 'QVTO', ('qvto', 'qvt'), ('*.qvto',), ()), + 'QmlLexer': ('typecode._vendor.pygments.lexers.webmisc', 'QML', ('qml', 'qbs'), ('*.qml', '*.qbs'), ('application/x-qml', 'application/x-qt.qbs+qml')), + 'RConsoleLexer': ('typecode._vendor.pygments.lexers.r', 'RConsole', ('rconsole', 'rout'), ('*.Rout',), ()), + 'RNCCompactLexer': ('typecode._vendor.pygments.lexers.rnc', 'Relax-NG Compact', ('rnc', 'rng-compact'), ('*.rnc',), ()), + 'RPMSpecLexer': ('typecode._vendor.pygments.lexers.installers', 'RPMSpec', ('spec',), ('*.spec',), ('text/x-rpm-spec',)), + 'RacketLexer': ('typecode._vendor.pygments.lexers.lisp', 'Racket', ('racket', 'rkt'), ('*.rkt', '*.rktd', '*.rktl'), ('text/x-racket', 'application/x-racket')), + 'RagelCLexer': ('typecode._vendor.pygments.lexers.parsers', 'Ragel in C Host', ('ragel-c',), ('*.rl',), ()), + 'RagelCppLexer': ('typecode._vendor.pygments.lexers.parsers', 'Ragel in CPP Host', ('ragel-cpp',), ('*.rl',), ()), + 'RagelDLexer': ('typecode._vendor.pygments.lexers.parsers', 'Ragel in D Host', ('ragel-d',), ('*.rl',), ()), + 'RagelEmbeddedLexer': ('typecode._vendor.pygments.lexers.parsers', 'Embedded Ragel', ('ragel-em',), ('*.rl',), ()), + 'RagelJavaLexer': ('typecode._vendor.pygments.lexers.parsers', 'Ragel in Java Host', ('ragel-java',), ('*.rl',), ()), + 'RagelLexer': ('typecode._vendor.pygments.lexers.parsers', 'Ragel', ('ragel',), (), ()), + 'RagelObjectiveCLexer': ('typecode._vendor.pygments.lexers.parsers', 'Ragel in Objective C Host', ('ragel-objc',), ('*.rl',), ()), + 'RagelRubyLexer': ('typecode._vendor.pygments.lexers.parsers', 'Ragel in Ruby Host', ('ragel-ruby', 'ragel-rb'), ('*.rl',), ()), + 'RawTokenLexer': ('typecode._vendor.pygments.lexers.special', 'Raw token data', ('raw',), (), ('application/x-pygments-tokens',)), + 'RdLexer': ('typecode._vendor.pygments.lexers.r', 'Rd', ('rd',), ('*.Rd',), ('text/x-r-doc',)), + 'ReasonLexer': ('typecode._vendor.pygments.lexers.ml', 'ReasonML', ('reason', 'reasonml'), ('*.re', '*.rei'), ('text/x-reasonml',)), + 'RebolLexer': ('typecode._vendor.pygments.lexers.rebol', 'REBOL', ('rebol',), ('*.r', '*.r3', '*.reb'), ('text/x-rebol',)), + 'RedLexer': ('typecode._vendor.pygments.lexers.rebol', 'Red', ('red', 'red/system'), ('*.red', '*.reds'), ('text/x-red', 'text/x-red-system')), + 'RedcodeLexer': ('typecode._vendor.pygments.lexers.esoteric', 'Redcode', ('redcode',), ('*.cw',), ()), + 'RegeditLexer': ('typecode._vendor.pygments.lexers.configs', 'reg', ('registry',), ('*.reg',), ('text/x-windows-registry',)), + 'ResourceLexer': ('typecode._vendor.pygments.lexers.resource', 'ResourceBundle', ('resource', 'resourcebundle'), (), ()), + 'RexxLexer': ('typecode._vendor.pygments.lexers.scripting', 'Rexx', ('rexx', 'arexx'), ('*.rexx', '*.rex', '*.rx', '*.arexx'), ('text/x-rexx',)), + 'RhtmlLexer': ('typecode._vendor.pygments.lexers.templates', 'RHTML', ('rhtml', 'html+erb', 'html+ruby'), ('*.rhtml',), ('text/html+ruby',)), + 'RideLexer': ('typecode._vendor.pygments.lexers.ride', 'Ride', ('ride',), ('*.ride',), ('text/x-ride',)), + 'RoboconfGraphLexer': ('typecode._vendor.pygments.lexers.roboconf', 'Roboconf Graph', ('roboconf-graph',), ('*.graph',), ()), + 'RoboconfInstancesLexer': ('typecode._vendor.pygments.lexers.roboconf', 'Roboconf Instances', ('roboconf-instances',), ('*.instances',), ()), + 'RobotFrameworkLexer': ('typecode._vendor.pygments.lexers.robotframework', 'RobotFramework', ('robotframework',), ('*.robot',), ('text/x-robotframework',)), + 'RqlLexer': ('typecode._vendor.pygments.lexers.sql', 'RQL', ('rql',), ('*.rql',), ('text/x-rql',)), + 'RslLexer': ('typecode._vendor.pygments.lexers.dsls', 'RSL', ('rsl',), ('*.rsl',), ('text/rsl',)), + 'RstLexer': ('typecode._vendor.pygments.lexers.markup', 'reStructuredText', ('rst', 'rest', 'restructuredtext'), ('*.rst', '*.rest'), ('text/x-rst', 'text/prs.fallenstein.rst')), + 'RtsLexer': ('typecode._vendor.pygments.lexers.trafficscript', 'TrafficScript', ('rts', 'trafficscript'), ('*.rts',), ()), + 'RubyConsoleLexer': ('typecode._vendor.pygments.lexers.ruby', 'Ruby irb session', ('rbcon', 'irb'), (), ('text/x-ruby-shellsession',)), + 'RubyLexer': ('typecode._vendor.pygments.lexers.ruby', 'Ruby', ('rb', 'ruby', 'duby'), ('*.rb', '*.rbw', 'Rakefile', '*.rake', '*.gemspec', '*.rbx', '*.duby', 'Gemfile'), ('text/x-ruby', 'application/x-ruby')), + 'RustLexer': ('typecode._vendor.pygments.lexers.rust', 'Rust', ('rust', 'rs'), ('*.rs', '*.rs.in'), ('text/rust', 'text/x-rust')), + 'SASLexer': ('typecode._vendor.pygments.lexers.sas', 'SAS', ('sas',), ('*.SAS', '*.sas'), ('text/x-sas', 'text/sas', 'application/x-sas')), + 'SLexer': ('typecode._vendor.pygments.lexers.r', 'S', ('splus', 's', 'r'), ('*.S', '*.R', '.Rhistory', '.Rprofile', '.Renviron'), ('text/S-plus', 'text/S', 'text/x-r-source', 'text/x-r', 'text/x-R', 'text/x-r-history', 'text/x-r-profile')), + 'SMLLexer': ('typecode._vendor.pygments.lexers.ml', 'Standard ML', ('sml',), ('*.sml', '*.sig', '*.fun'), ('text/x-standardml', 'application/x-standardml')), + 'SarlLexer': ('typecode._vendor.pygments.lexers.jvm', 'SARL', ('sarl',), ('*.sarl',), ('text/x-sarl',)), + 'SassLexer': ('typecode._vendor.pygments.lexers.css', 'Sass', ('sass',), ('*.sass',), ('text/x-sass',)), + 'ScalaLexer': ('typecode._vendor.pygments.lexers.jvm', 'Scala', ('scala',), ('*.scala',), ('text/x-scala',)), + 'ScamlLexer': ('typecode._vendor.pygments.lexers.html', 'Scaml', ('scaml',), ('*.scaml',), ('text/x-scaml',)), + 'ScdocLexer': ('typecode._vendor.pygments.lexers.scdoc', 'scdoc', ('scdoc', 'scd'), ('*.scd', '*.scdoc'), ()), + 'SchemeLexer': ('typecode._vendor.pygments.lexers.lisp', 'Scheme', ('scheme', 'scm'), ('*.scm', '*.ss'), ('text/x-scheme', 'application/x-scheme')), + 'ScilabLexer': ('typecode._vendor.pygments.lexers.matlab', 'Scilab', ('scilab',), ('*.sci', '*.sce', '*.tst'), ('text/scilab',)), + 'ScssLexer': ('typecode._vendor.pygments.lexers.css', 'SCSS', ('scss',), ('*.scss',), ('text/x-scss',)), + 'ShExCLexer': ('typecode._vendor.pygments.lexers.rdf', 'ShExC', ('shexc', 'shex'), ('*.shex',), ('text/shex',)), + 'ShenLexer': ('typecode._vendor.pygments.lexers.lisp', 'Shen', ('shen',), ('*.shen',), ('text/x-shen', 'application/x-shen')), + 'SieveLexer': ('typecode._vendor.pygments.lexers.sieve', 'Sieve', ('sieve',), ('*.siv', '*.sieve'), ()), + 'SilverLexer': ('typecode._vendor.pygments.lexers.verification', 'Silver', ('silver',), ('*.sil', '*.vpr'), ()), + 'SingularityLexer': ('typecode._vendor.pygments.lexers.configs', 'Singularity', ('singularity',), ('*.def', 'Singularity'), ()), + 'SlashLexer': ('typecode._vendor.pygments.lexers.slash', 'Slash', ('slash',), ('*.sla',), ()), + 'SlimLexer': ('typecode._vendor.pygments.lexers.webmisc', 'Slim', ('slim',), ('*.slim',), ('text/x-slim',)), + 'SlurmBashLexer': ('typecode._vendor.pygments.lexers.shell', 'Slurm', ('slurm', 'sbatch'), ('*.sl',), ()), + 'SmaliLexer': ('typecode._vendor.pygments.lexers.dalvik', 'Smali', ('smali',), ('*.smali',), ('text/smali',)), + 'SmalltalkLexer': ('typecode._vendor.pygments.lexers.smalltalk', 'Smalltalk', ('smalltalk', 'squeak', 'st'), ('*.st',), ('text/x-smalltalk',)), + 'SmartGameFormatLexer': ('typecode._vendor.pygments.lexers.sgf', 'SmartGameFormat', ('sgf',), ('*.sgf',), ()), + 'SmartyLexer': ('typecode._vendor.pygments.lexers.templates', 'Smarty', ('smarty',), ('*.tpl',), ('application/x-smarty',)), + 'SnobolLexer': ('typecode._vendor.pygments.lexers.snobol', 'Snobol', ('snobol',), ('*.snobol',), ('text/x-snobol',)), + 'SnowballLexer': ('typecode._vendor.pygments.lexers.dsls', 'Snowball', ('snowball',), ('*.sbl',), ()), + 'SolidityLexer': ('typecode._vendor.pygments.lexers.solidity', 'Solidity', ('solidity',), ('*.sol',), ()), + 'SourcePawnLexer': ('typecode._vendor.pygments.lexers.pawn', 'SourcePawn', ('sp',), ('*.sp',), ('text/x-sourcepawn',)), + 'SourcesListLexer': ('typecode._vendor.pygments.lexers.installers', 'Debian Sourcelist', ('sourceslist', 'sources.list', 'debsources'), ('sources.list',), ()), + 'SparqlLexer': ('typecode._vendor.pygments.lexers.rdf', 'SPARQL', ('sparql',), ('*.rq', '*.sparql'), ('application/sparql-query',)), + 'SqlLexer': ('typecode._vendor.pygments.lexers.sql', 'SQL', ('sql',), ('*.sql',), ('text/x-sql',)), + 'SqliteConsoleLexer': ('typecode._vendor.pygments.lexers.sql', 'sqlite3con', ('sqlite3',), ('*.sqlite3-console',), ('text/x-sqlite3-console',)), + 'SquidConfLexer': ('typecode._vendor.pygments.lexers.configs', 'SquidConf', ('squidconf', 'squid.conf', 'squid'), ('squid.conf',), ('text/x-squidconf',)), + 'SspLexer': ('typecode._vendor.pygments.lexers.templates', 'Scalate Server Page', ('ssp',), ('*.ssp',), ('application/x-ssp',)), + 'StanLexer': ('typecode._vendor.pygments.lexers.modeling', 'Stan', ('stan',), ('*.stan',), ()), + 'StataLexer': ('typecode._vendor.pygments.lexers.stata', 'Stata', ('stata', 'do'), ('*.do', '*.ado'), ('text/x-stata', 'text/stata', 'application/x-stata')), + 'SuperColliderLexer': ('typecode._vendor.pygments.lexers.supercollider', 'SuperCollider', ('sc', 'supercollider'), ('*.sc', '*.scd'), ('application/supercollider', 'text/supercollider')), + 'SwiftLexer': ('typecode._vendor.pygments.lexers.objective', 'Swift', ('swift',), ('*.swift',), ('text/x-swift',)), + 'SwigLexer': ('typecode._vendor.pygments.lexers.c_like', 'SWIG', ('swig',), ('*.swg', '*.i'), ('text/swig',)), + 'SystemVerilogLexer': ('typecode._vendor.pygments.lexers.hdl', 'systemverilog', ('systemverilog', 'sv'), ('*.sv', '*.svh'), ('text/x-systemverilog',)), + 'TAPLexer': ('typecode._vendor.pygments.lexers.testing', 'TAP', ('tap',), ('*.tap',), ()), + 'TNTLexer': ('typecode._vendor.pygments.lexers.tnt', 'Typographic Number Theory', ('tnt',), ('*.tnt',), ()), + 'TOMLLexer': ('typecode._vendor.pygments.lexers.configs', 'TOML', ('toml',), ('*.toml', 'Pipfile', 'poetry.lock'), ()), + 'Tads3Lexer': ('typecode._vendor.pygments.lexers.int_fiction', 'TADS 3', ('tads3',), ('*.t',), ()), + 'TasmLexer': ('typecode._vendor.pygments.lexers.asm', 'TASM', ('tasm',), ('*.asm', '*.ASM', '*.tasm'), ('text/x-tasm',)), + 'TclLexer': ('typecode._vendor.pygments.lexers.tcl', 'Tcl', ('tcl',), ('*.tcl', '*.rvt'), ('text/x-tcl', 'text/x-script.tcl', 'application/x-tcl')), + 'TcshLexer': ('typecode._vendor.pygments.lexers.shell', 'Tcsh', ('tcsh', 'csh'), ('*.tcsh', '*.csh'), ('application/x-csh',)), + 'TcshSessionLexer': ('typecode._vendor.pygments.lexers.shell', 'Tcsh Session', ('tcshcon',), (), ()), + 'TeaTemplateLexer': ('typecode._vendor.pygments.lexers.templates', 'Tea', ('tea',), ('*.tea',), ('text/x-tea',)), + 'TeraTermLexer': ('typecode._vendor.pygments.lexers.teraterm', 'Tera Term macro', ('ttl', 'teraterm', 'teratermmacro'), ('*.ttl',), ('text/x-teratermmacro',)), + 'TermcapLexer': ('typecode._vendor.pygments.lexers.configs', 'Termcap', ('termcap',), ('termcap', 'termcap.src'), ()), + 'TerminfoLexer': ('typecode._vendor.pygments.lexers.configs', 'Terminfo', ('terminfo',), ('terminfo', 'terminfo.src'), ()), + 'TerraformLexer': ('typecode._vendor.pygments.lexers.configs', 'Terraform', ('terraform', 'tf'), ('*.tf',), ('application/x-tf', 'application/x-terraform')), + 'TexLexer': ('typecode._vendor.pygments.lexers.markup', 'TeX', ('tex', 'latex'), ('*.tex', '*.aux', '*.toc'), ('text/x-tex', 'text/x-latex')), + 'TextLexer': ('typecode._vendor.pygments.lexers.special', 'Text only', ('text',), ('*.txt',), ('text/plain',)), + 'ThriftLexer': ('typecode._vendor.pygments.lexers.dsls', 'Thrift', ('thrift',), ('*.thrift',), ('application/x-thrift',)), + 'TiddlyWiki5Lexer': ('typecode._vendor.pygments.lexers.markup', 'tiddler', ('tid',), ('*.tid',), ('text/vnd.tiddlywiki',)), + 'TodotxtLexer': ('typecode._vendor.pygments.lexers.textfmts', 'Todotxt', ('todotxt',), ('todo.txt', '*.todotxt'), ('text/x-todo',)), + 'TransactSqlLexer': ('typecode._vendor.pygments.lexers.sql', 'Transact-SQL', ('tsql', 't-sql'), ('*.sql',), ('text/x-tsql',)), + 'TreetopLexer': ('typecode._vendor.pygments.lexers.parsers', 'Treetop', ('treetop',), ('*.treetop', '*.tt'), ()), + 'TurtleLexer': ('typecode._vendor.pygments.lexers.rdf', 'Turtle', ('turtle',), ('*.ttl',), ('text/turtle', 'application/x-turtle')), + 'TwigHtmlLexer': ('typecode._vendor.pygments.lexers.templates', 'HTML+Twig', ('html+twig',), ('*.twig',), ('text/html+twig',)), + 'TwigLexer': ('typecode._vendor.pygments.lexers.templates', 'Twig', ('twig',), (), ('application/x-twig',)), + 'TypeScriptLexer': ('typecode._vendor.pygments.lexers.javascript', 'TypeScript', ('ts', 'typescript'), ('*.ts', '*.tsx'), ('text/x-typescript',)), + 'TypoScriptCssDataLexer': ('typecode._vendor.pygments.lexers.typoscript', 'TypoScriptCssData', ('typoscriptcssdata',), (), ()), + 'TypoScriptHtmlDataLexer': ('typecode._vendor.pygments.lexers.typoscript', 'TypoScriptHtmlData', ('typoscripthtmldata',), (), ()), + 'TypoScriptLexer': ('typecode._vendor.pygments.lexers.typoscript', 'TypoScript', ('typoscript',), ('*.typoscript',), ('text/x-typoscript',)), + 'UcodeLexer': ('typecode._vendor.pygments.lexers.unicon', 'ucode', ('ucode',), ('*.u', '*.u1', '*.u2'), ()), + 'UniconLexer': ('typecode._vendor.pygments.lexers.unicon', 'Unicon', ('unicon',), ('*.icn',), ('text/unicon',)), + 'UrbiscriptLexer': ('typecode._vendor.pygments.lexers.urbi', 'UrbiScript', ('urbiscript',), ('*.u',), ('application/x-urbiscript',)), + 'UsdLexer': ('typecode._vendor.pygments.lexers.usd', 'USD', ('usd', 'usda'), ('*.usd', '*.usda'), ()), + 'VBScriptLexer': ('typecode._vendor.pygments.lexers.basic', 'VBScript', ('vbscript',), ('*.vbs', '*.VBS'), ()), + 'VCLLexer': ('typecode._vendor.pygments.lexers.varnish', 'VCL', ('vcl',), ('*.vcl',), ('text/x-vclsrc',)), + 'VCLSnippetLexer': ('typecode._vendor.pygments.lexers.varnish', 'VCLSnippets', ('vclsnippets', 'vclsnippet'), (), ('text/x-vclsnippet',)), + 'VCTreeStatusLexer': ('typecode._vendor.pygments.lexers.console', 'VCTreeStatus', ('vctreestatus',), (), ()), + 'VGLLexer': ('typecode._vendor.pygments.lexers.dsls', 'VGL', ('vgl',), ('*.rpf',), ()), + 'ValaLexer': ('typecode._vendor.pygments.lexers.c_like', 'Vala', ('vala', 'vapi'), ('*.vala', '*.vapi'), ('text/x-vala',)), + 'VbNetAspxLexer': ('typecode._vendor.pygments.lexers.dotnet', 'aspx-vb', ('aspx-vb',), ('*.aspx', '*.asax', '*.ascx', '*.ashx', '*.asmx', '*.axd'), ()), + 'VbNetLexer': ('typecode._vendor.pygments.lexers.dotnet', 'VB.net', ('vb.net', 'vbnet'), ('*.vb', '*.bas'), ('text/x-vbnet', 'text/x-vba')), + 'VelocityHtmlLexer': ('typecode._vendor.pygments.lexers.templates', 'HTML+Velocity', ('html+velocity',), (), ('text/html+velocity',)), + 'VelocityLexer': ('typecode._vendor.pygments.lexers.templates', 'Velocity', ('velocity',), ('*.vm', '*.fhtml'), ()), + 'VelocityXmlLexer': ('typecode._vendor.pygments.lexers.templates', 'XML+Velocity', ('xml+velocity',), (), ('application/xml+velocity',)), + 'VerilogLexer': ('typecode._vendor.pygments.lexers.hdl', 'verilog', ('verilog', 'v'), ('*.v',), ('text/x-verilog',)), + 'VhdlLexer': ('typecode._vendor.pygments.lexers.hdl', 'vhdl', ('vhdl',), ('*.vhdl', '*.vhd'), ('text/x-vhdl',)), + 'VimLexer': ('typecode._vendor.pygments.lexers.textedit', 'VimL', ('vim',), ('*.vim', '.vimrc', '.exrc', '.gvimrc', '_vimrc', '_exrc', '_gvimrc', 'vimrc', 'gvimrc'), ('text/x-vim',)), + 'WDiffLexer': ('typecode._vendor.pygments.lexers.diff', 'WDiff', ('wdiff',), ('*.wdiff',), ()), + 'WebIDLLexer': ('typecode._vendor.pygments.lexers.webidl', 'Web IDL', ('webidl',), ('*.webidl',), ()), + 'WhileyLexer': ('typecode._vendor.pygments.lexers.whiley', 'Whiley', ('whiley',), ('*.whiley',), ('text/x-whiley',)), + 'X10Lexer': ('typecode._vendor.pygments.lexers.x10', 'X10', ('x10', 'xten'), ('*.x10',), ('text/x-x10',)), + 'XQueryLexer': ('typecode._vendor.pygments.lexers.webmisc', 'XQuery', ('xquery', 'xqy', 'xq', 'xql', 'xqm'), ('*.xqy', '*.xquery', '*.xq', '*.xql', '*.xqm'), ('text/xquery', 'application/xquery')), + 'XmlDjangoLexer': ('typecode._vendor.pygments.lexers.templates', 'XML+Django/Jinja', ('xml+django', 'xml+jinja'), (), ('application/xml+django', 'application/xml+jinja')), + 'XmlErbLexer': ('typecode._vendor.pygments.lexers.templates', 'XML+Ruby', ('xml+erb', 'xml+ruby'), (), ('application/xml+ruby',)), + 'XmlLexer': ('typecode._vendor.pygments.lexers.html', 'XML', ('xml',), ('*.xml', '*.xsl', '*.rss', '*.xslt', '*.xsd', '*.wsdl', '*.wsf'), ('text/xml', 'application/xml', 'image/svg+xml', 'application/rss+xml', 'application/atom+xml')), + 'XmlPhpLexer': ('typecode._vendor.pygments.lexers.templates', 'XML+PHP', ('xml+php',), (), ('application/xml+php',)), + 'XmlSmartyLexer': ('typecode._vendor.pygments.lexers.templates', 'XML+Smarty', ('xml+smarty',), (), ('application/xml+smarty',)), + 'XorgLexer': ('typecode._vendor.pygments.lexers.xorg', 'Xorg', ('xorg.conf',), ('xorg.conf',), ()), + 'XsltLexer': ('typecode._vendor.pygments.lexers.html', 'XSLT', ('xslt',), ('*.xsl', '*.xslt', '*.xpl'), ('application/xsl+xml', 'application/xslt+xml')), + 'XtendLexer': ('typecode._vendor.pygments.lexers.jvm', 'Xtend', ('xtend',), ('*.xtend',), ('text/x-xtend',)), + 'XtlangLexer': ('typecode._vendor.pygments.lexers.lisp', 'xtlang', ('extempore',), ('*.xtm',), ()), + 'YamlJinjaLexer': ('typecode._vendor.pygments.lexers.templates', 'YAML+Jinja', ('yaml+jinja', 'salt', 'sls'), ('*.sls',), ('text/x-yaml+jinja', 'text/x-sls')), + 'YamlLexer': ('typecode._vendor.pygments.lexers.data', 'YAML', ('yaml',), ('*.yaml', '*.yml'), ('text/x-yaml',)), + 'YangLexer': ('typecode._vendor.pygments.lexers.yang', 'YANG', ('yang',), ('*.yang',), ('application/yang',)), + 'ZeekLexer': ('typecode._vendor.pygments.lexers.dsls', 'Zeek', ('zeek', 'bro'), ('*.zeek', '*.bro'), ()), + 'ZephirLexer': ('typecode._vendor.pygments.lexers.php', 'Zephir', ('zephir',), ('*.zep',), ()), + 'ZigLexer': ('typecode._vendor.pygments.lexers.zig', 'Zig', ('zig',), ('*.zig',), ('text/zig',)), } if __name__ == '__main__': # pragma: no cover @@ -484,7 +513,7 @@ for root, dirs, files in os.walk('.'): for filename in files: if filename.endswith('.py') and not filename.startswith('_'): - module_name = 'pygments.lexers%s.%s' % ( + module_name = 'typecode._vendor.pygments.lexers%s.%s' % ( root[1:].replace('/', '.'), filename[:-3]) print(module_name) module = __import__(module_name, None, None, ['']) diff --git a/src/typecode/_vendor/pygments/lexers/_mql_builtins.py b/src/typecode/_vendor/pygments/lexers/_mql_builtins.py index 8c80379..997cdc2 100644 --- a/src/typecode/_vendor/pygments/lexers/_mql_builtins.py +++ b/src/typecode/_vendor/pygments/lexers/_mql_builtins.py @@ -5,7 +5,7 @@ Builtins for the MqlLexer. - :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ types = ( @@ -884,7 +884,7 @@ 'PERIOD_W1', 'POINTER_AUTOMATIC', 'POINTER_DYNAMIC', - 'POINTER_INVALID' + 'POINTER_INVALID', 'PRICE_CLOSE', 'PRICE_HIGH', 'PRICE_LOW', diff --git a/src/typecode/_vendor/pygments/lexers/_mysql_builtins.py b/src/typecode/_vendor/pygments/lexers/_mysql_builtins.py new file mode 100644 index 0000000..40a1ceb --- /dev/null +++ b/src/typecode/_vendor/pygments/lexers/_mysql_builtins.py @@ -0,0 +1,1282 @@ +# -*- coding: utf-8 -*- +""" + pygments.lexers._mysql_builtins + ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + Self-updating data files for the MySQL lexer. + + :copyright: Copyright 2020 by the Pygments team, see AUTHORS. + :license: BSD, see LICENSE for details. +""" + + +MYSQL_CONSTANTS = ( + 'false', + 'null', + 'true', + 'unknown', +) + + +# At this time, no easily-parsed, definitive list of data types +# has been found in the MySQL source code or documentation. (The +# `sql/sql_yacc.yy` file is definitive but is difficult to parse.) +# Therefore these types are currently maintained manually. +# +# Some words in this list -- like "long", "national", "precision", +# and "varying" -- appear to only occur in combination with other +# data type keywords. Therefore they are included as separate words +# even though they do not naturally occur in syntax separately. +# +# This list is also used to strip data types out of the list of +# MySQL keywords, which is automatically updated later in the file. +# +MYSQL_DATATYPES = ( + # Numeric data types + 'bigint', + 'bit', + 'bool', + 'boolean', + 'dec', + 'decimal', + 'double', + 'fixed', + 'float', + 'float4', + 'float8', + 'int', + 'int1', + 'int2', + 'int3', + 'int4', + 'int8', + 'integer', + 'mediumint', + 'middleint', + 'numeric', + 'precision', + 'real', + 'serial', + 'smallint', + 'tinyint', + + # Date and time data types + 'date', + 'datetime', + 'time', + 'timestamp', + 'year', + + # String data types + 'binary', + 'blob', + 'char', + 'enum', + 'long', + 'longblob', + 'longtext', + 'mediumblob', + 'mediumtext', + 'national', + 'nchar', + 'nvarchar', + 'set', + 'text', + 'tinyblob', + 'tinytext', + 'varbinary', + 'varchar', + 'varcharacter', + 'varying', + + # Spatial data types + 'geometry', + 'geometrycollection', + 'linestring', + 'multilinestring', + 'multipoint', + 'multipolygon', + 'point', + 'polygon', + + # JSON data types + 'json', +) + +# Everything below this line is auto-generated from the MySQL source code. +# Run this file in Python and it will update itself. +# ----------------------------------------------------------------------------- + +MYSQL_FUNCTIONS = ( + 'abs', + 'acos', + 'adddate', + 'addtime', + 'aes_decrypt', + 'aes_encrypt', + 'any_value', + 'asin', + 'atan', + 'atan2', + 'benchmark', + 'bin', + 'bin_to_uuid', + 'bit_and', + 'bit_count', + 'bit_length', + 'bit_or', + 'bit_xor', + 'can_access_column', + 'can_access_database', + 'can_access_event', + 'can_access_resource_group', + 'can_access_routine', + 'can_access_table', + 'can_access_trigger', + 'can_access_view', + 'cast', + 'ceil', + 'ceiling', + 'char_length', + 'character_length', + 'coercibility', + 'compress', + 'concat', + 'concat_ws', + 'connection_id', + 'conv', + 'convert_cpu_id_mask', + 'convert_interval_to_user_interval', + 'convert_tz', + 'cos', + 'cot', + 'count', + 'crc32', + 'curdate', + 'current_role', + 'curtime', + 'date_add', + 'date_format', + 'date_sub', + 'datediff', + 'dayname', + 'dayofmonth', + 'dayofweek', + 'dayofyear', + 'degrees', + 'elt', + 'exp', + 'export_set', + 'extract', + 'extractvalue', + 'field', + 'find_in_set', + 'floor', + 'format_bytes', + 'format_pico_time', + 'found_rows', + 'from_base64', + 'from_days', + 'from_unixtime', + 'get_dd_column_privileges', + 'get_dd_create_options', + 'get_dd_index_private_data', + 'get_dd_index_sub_part_length', + 'get_dd_property_key_value', + 'get_dd_tablespace_private_data', + 'get_lock', + 'greatest', + 'group_concat', + 'gtid_subset', + 'gtid_subtract', + 'hex', + 'icu_version', + 'ifnull', + 'inet6_aton', + 'inet6_ntoa', + 'inet_aton', + 'inet_ntoa', + 'instr', + 'internal_auto_increment', + 'internal_avg_row_length', + 'internal_check_time', + 'internal_checksum', + 'internal_data_free', + 'internal_data_length', + 'internal_dd_char_length', + 'internal_get_comment_or_error', + 'internal_get_dd_column_extra', + 'internal_get_enabled_role_json', + 'internal_get_hostname', + 'internal_get_mandatory_roles_json', + 'internal_get_partition_nodegroup', + 'internal_get_username', + 'internal_get_view_warning_or_error', + 'internal_index_column_cardinality', + 'internal_index_length', + 'internal_is_enabled_role', + 'internal_is_mandatory_role', + 'internal_keys_disabled', + 'internal_max_data_length', + 'internal_table_rows', + 'internal_tablespace_autoextend_size', + 'internal_tablespace_data_free', + 'internal_tablespace_extent_size', + 'internal_tablespace_extra', + 'internal_tablespace_free_extents', + 'internal_tablespace_id', + 'internal_tablespace_initial_size', + 'internal_tablespace_logfile_group_name', + 'internal_tablespace_logfile_group_number', + 'internal_tablespace_maximum_size', + 'internal_tablespace_row_format', + 'internal_tablespace_status', + 'internal_tablespace_total_extents', + 'internal_tablespace_type', + 'internal_tablespace_version', + 'internal_update_time', + 'is_free_lock', + 'is_ipv4', + 'is_ipv4_compat', + 'is_ipv4_mapped', + 'is_ipv6', + 'is_used_lock', + 'is_uuid', + 'is_visible_dd_object', + 'isnull', + 'json_array', + 'json_array_append', + 'json_array_insert', + 'json_arrayagg', + 'json_contains', + 'json_contains_path', + 'json_depth', + 'json_extract', + 'json_insert', + 'json_keys', + 'json_length', + 'json_merge', + 'json_merge_patch', + 'json_merge_preserve', + 'json_object', + 'json_objectagg', + 'json_overlaps', + 'json_pretty', + 'json_quote', + 'json_remove', + 'json_replace', + 'json_schema_valid', + 'json_schema_validation_report', + 'json_search', + 'json_set', + 'json_storage_free', + 'json_storage_size', + 'json_type', + 'json_unquote', + 'json_valid', + 'last_day', + 'last_insert_id', + 'lcase', + 'least', + 'length', + 'like_range_max', + 'like_range_min', + 'ln', + 'load_file', + 'locate', + 'log', + 'log10', + 'log2', + 'lower', + 'lpad', + 'ltrim', + 'make_set', + 'makedate', + 'maketime', + 'master_pos_wait', + 'max', + 'mbrcontains', + 'mbrcoveredby', + 'mbrcovers', + 'mbrdisjoint', + 'mbrequals', + 'mbrintersects', + 'mbroverlaps', + 'mbrtouches', + 'mbrwithin', + 'md5', + 'mid', + 'min', + 'monthname', + 'name_const', + 'now', + 'nullif', + 'oct', + 'octet_length', + 'ord', + 'period_add', + 'period_diff', + 'pi', + 'position', + 'pow', + 'power', + 'ps_current_thread_id', + 'ps_thread_id', + 'quote', + 'radians', + 'rand', + 'random_bytes', + 'regexp_instr', + 'regexp_like', + 'regexp_replace', + 'regexp_substr', + 'release_all_locks', + 'release_lock', + 'remove_dd_property_key', + 'reverse', + 'roles_graphml', + 'round', + 'rpad', + 'rtrim', + 'sec_to_time', + 'session_user', + 'sha', + 'sha1', + 'sha2', + 'sign', + 'sin', + 'sleep', + 'soundex', + 'space', + 'sqrt', + 'st_area', + 'st_asbinary', + 'st_asgeojson', + 'st_astext', + 'st_aswkb', + 'st_aswkt', + 'st_buffer', + 'st_buffer_strategy', + 'st_centroid', + 'st_contains', + 'st_convexhull', + 'st_crosses', + 'st_difference', + 'st_dimension', + 'st_disjoint', + 'st_distance', + 'st_distance_sphere', + 'st_endpoint', + 'st_envelope', + 'st_equals', + 'st_exteriorring', + 'st_geohash', + 'st_geomcollfromtext', + 'st_geomcollfromtxt', + 'st_geomcollfromwkb', + 'st_geometrycollectionfromtext', + 'st_geometrycollectionfromwkb', + 'st_geometryfromtext', + 'st_geometryfromwkb', + 'st_geometryn', + 'st_geometrytype', + 'st_geomfromgeojson', + 'st_geomfromtext', + 'st_geomfromwkb', + 'st_interiorringn', + 'st_intersection', + 'st_intersects', + 'st_isclosed', + 'st_isempty', + 'st_issimple', + 'st_isvalid', + 'st_latfromgeohash', + 'st_latitude', + 'st_length', + 'st_linefromtext', + 'st_linefromwkb', + 'st_linestringfromtext', + 'st_linestringfromwkb', + 'st_longfromgeohash', + 'st_longitude', + 'st_makeenvelope', + 'st_mlinefromtext', + 'st_mlinefromwkb', + 'st_mpointfromtext', + 'st_mpointfromwkb', + 'st_mpolyfromtext', + 'st_mpolyfromwkb', + 'st_multilinestringfromtext', + 'st_multilinestringfromwkb', + 'st_multipointfromtext', + 'st_multipointfromwkb', + 'st_multipolygonfromtext', + 'st_multipolygonfromwkb', + 'st_numgeometries', + 'st_numinteriorring', + 'st_numinteriorrings', + 'st_numpoints', + 'st_overlaps', + 'st_pointfromgeohash', + 'st_pointfromtext', + 'st_pointfromwkb', + 'st_pointn', + 'st_polyfromtext', + 'st_polyfromwkb', + 'st_polygonfromtext', + 'st_polygonfromwkb', + 'st_simplify', + 'st_srid', + 'st_startpoint', + 'st_swapxy', + 'st_symdifference', + 'st_touches', + 'st_transform', + 'st_union', + 'st_validate', + 'st_within', + 'st_x', + 'st_y', + 'statement_digest', + 'statement_digest_text', + 'std', + 'stddev', + 'stddev_pop', + 'stddev_samp', + 'str_to_date', + 'strcmp', + 'subdate', + 'substr', + 'substring', + 'substring_index', + 'subtime', + 'sum', + 'sysdate', + 'system_user', + 'tan', + 'time_format', + 'time_to_sec', + 'timediff', + 'to_base64', + 'to_days', + 'to_seconds', + 'trim', + 'ucase', + 'uncompress', + 'uncompressed_length', + 'unhex', + 'unix_timestamp', + 'updatexml', + 'upper', + 'uuid', + 'uuid_short', + 'uuid_to_bin', + 'validate_password_strength', + 'var_pop', + 'var_samp', + 'variance', + 'version', + 'wait_for_executed_gtid_set', + 'wait_until_sql_thread_after_gtids', + 'weekday', + 'weekofyear', + 'yearweek', +) + + +MYSQL_OPTIMIZER_HINTS = ( + 'bka', + 'bnl', + 'dupsweedout', + 'firstmatch', + 'group_index', + 'hash_join', + 'index', + 'index_merge', + 'intoexists', + 'join_fixed_order', + 'join_index', + 'join_order', + 'join_prefix', + 'join_suffix', + 'loosescan', + 'materialization', + 'max_execution_time', + 'merge', + 'mrr', + 'no_bka', + 'no_bnl', + 'no_group_index', + 'no_hash_join', + 'no_icp', + 'no_index', + 'no_index_merge', + 'no_join_index', + 'no_merge', + 'no_mrr', + 'no_order_index', + 'no_range_optimization', + 'no_semijoin', + 'no_skip_scan', + 'order_index', + 'qb_name', + 'resource_group', + 'semijoin', + 'set_var', + 'skip_scan', + 'subquery', +) + + +MYSQL_KEYWORDS = ( + 'accessible', + 'account', + 'action', + 'active', + 'add', + 'admin', + 'after', + 'against', + 'aggregate', + 'algorithm', + 'all', + 'alter', + 'always', + 'analyze', + 'and', + 'any', + 'array', + 'as', + 'asc', + 'ascii', + 'asensitive', + 'at', + 'attribute', + 'auto_increment', + 'autoextend_size', + 'avg', + 'avg_row_length', + 'backup', + 'before', + 'begin', + 'between', + 'binlog', + 'block', + 'both', + 'btree', + 'buckets', + 'by', + 'byte', + 'cache', + 'call', + 'cascade', + 'cascaded', + 'case', + 'catalog_name', + 'chain', + 'change', + 'changed', + 'channel', + 'character', + 'charset', + 'check', + 'checksum', + 'cipher', + 'class_origin', + 'client', + 'clone', + 'close', + 'coalesce', + 'code', + 'collate', + 'collation', + 'column', + 'column_format', + 'column_name', + 'columns', + 'comment', + 'commit', + 'committed', + 'compact', + 'completion', + 'component', + 'compressed', + 'compression', + 'concurrent', + 'condition', + 'connection', + 'consistent', + 'constraint', + 'constraint_catalog', + 'constraint_name', + 'constraint_schema', + 'contains', + 'context', + 'continue', + 'convert', + 'cpu', + 'create', + 'cross', + 'cube', + 'cume_dist', + 'current', + 'current_date', + 'current_time', + 'current_timestamp', + 'current_user', + 'cursor', + 'cursor_name', + 'data', + 'database', + 'databases', + 'datafile', + 'day', + 'day_hour', + 'day_microsecond', + 'day_minute', + 'day_second', + 'deallocate', + 'declare', + 'default', + 'default_auth', + 'definer', + 'definition', + 'delay_key_write', + 'delayed', + 'delete', + 'dense_rank', + 'desc', + 'describe', + 'description', + 'deterministic', + 'diagnostics', + 'directory', + 'disable', + 'discard', + 'disk', + 'distinct', + 'distinctrow', + 'div', + 'do', + 'drop', + 'dual', + 'dumpfile', + 'duplicate', + 'dynamic', + 'each', + 'else', + 'elseif', + 'empty', + 'enable', + 'enclosed', + 'encryption', + 'end', + 'ends', + 'enforced', + 'engine', + 'engine_attribute', + 'engines', + 'error', + 'errors', + 'escape', + 'escaped', + 'event', + 'events', + 'every', + 'except', + 'exchange', + 'exclude', + 'execute', + 'exists', + 'exit', + 'expansion', + 'expire', + 'explain', + 'export', + 'extended', + 'extent_size', + 'failed_login_attempts', + 'false', + 'fast', + 'faults', + 'fetch', + 'fields', + 'file', + 'file_block_size', + 'filter', + 'first', + 'first_value', + 'flush', + 'following', + 'follows', + 'for', + 'force', + 'foreign', + 'format', + 'found', + 'from', + 'full', + 'fulltext', + 'function', + 'general', + 'generated', + 'geomcollection', + 'get', + 'get_format', + 'get_master_public_key', + 'global', + 'grant', + 'grants', + 'group', + 'group_replication', + 'grouping', + 'groups', + 'handler', + 'hash', + 'having', + 'help', + 'high_priority', + 'histogram', + 'history', + 'host', + 'hosts', + 'hour', + 'hour_microsecond', + 'hour_minute', + 'hour_second', + 'identified', + 'if', + 'ignore', + 'ignore_server_ids', + 'import', + 'in', + 'inactive', + 'index', + 'indexes', + 'infile', + 'initial_size', + 'inner', + 'inout', + 'insensitive', + 'insert', + 'insert_method', + 'install', + 'instance', + 'interval', + 'into', + 'invisible', + 'invoker', + 'io', + 'io_after_gtids', + 'io_before_gtids', + 'io_thread', + 'ipc', + 'is', + 'isolation', + 'issuer', + 'iterate', + 'join', + 'json_table', + 'json_value', + 'key', + 'key_block_size', + 'keys', + 'kill', + 'lag', + 'language', + 'last', + 'last_value', + 'lateral', + 'lead', + 'leading', + 'leave', + 'leaves', + 'left', + 'less', + 'level', + 'like', + 'limit', + 'linear', + 'lines', + 'list', + 'load', + 'local', + 'localtime', + 'localtimestamp', + 'lock', + 'locked', + 'locks', + 'logfile', + 'logs', + 'loop', + 'low_priority', + 'master', + 'master_auto_position', + 'master_bind', + 'master_compression_algorithms', + 'master_connect_retry', + 'master_delay', + 'master_heartbeat_period', + 'master_host', + 'master_log_file', + 'master_log_pos', + 'master_password', + 'master_port', + 'master_public_key_path', + 'master_retry_count', + 'master_server_id', + 'master_ssl', + 'master_ssl_ca', + 'master_ssl_capath', + 'master_ssl_cert', + 'master_ssl_cipher', + 'master_ssl_crl', + 'master_ssl_crlpath', + 'master_ssl_key', + 'master_ssl_verify_server_cert', + 'master_tls_ciphersuites', + 'master_tls_version', + 'master_user', + 'master_zstd_compression_level', + 'match', + 'max_connections_per_hour', + 'max_queries_per_hour', + 'max_rows', + 'max_size', + 'max_updates_per_hour', + 'max_user_connections', + 'maxvalue', + 'medium', + 'member', + 'memory', + 'merge', + 'message_text', + 'microsecond', + 'migrate', + 'min_rows', + 'minute', + 'minute_microsecond', + 'minute_second', + 'mod', + 'mode', + 'modifies', + 'modify', + 'month', + 'mutex', + 'mysql_errno', + 'name', + 'names', + 'natural', + 'ndb', + 'ndbcluster', + 'nested', + 'network_namespace', + 'never', + 'new', + 'next', + 'no', + 'no_wait', + 'no_write_to_binlog', + 'nodegroup', + 'none', + 'not', + 'nowait', + 'nth_value', + 'ntile', + 'null', + 'nulls', + 'number', + 'of', + 'off', + 'offset', + 'oj', + 'old', + 'on', + 'one', + 'only', + 'open', + 'optimize', + 'optimizer_costs', + 'option', + 'optional', + 'optionally', + 'options', + 'or', + 'order', + 'ordinality', + 'organization', + 'others', + 'out', + 'outer', + 'outfile', + 'over', + 'owner', + 'pack_keys', + 'page', + 'parser', + 'partial', + 'partition', + 'partitioning', + 'partitions', + 'password', + 'password_lock_time', + 'path', + 'percent_rank', + 'persist', + 'persist_only', + 'phase', + 'plugin', + 'plugin_dir', + 'plugins', + 'port', + 'precedes', + 'preceding', + 'prepare', + 'preserve', + 'prev', + 'primary', + 'privilege_checks_user', + 'privileges', + 'procedure', + 'process', + 'processlist', + 'profile', + 'profiles', + 'proxy', + 'purge', + 'quarter', + 'query', + 'quick', + 'random', + 'range', + 'rank', + 'read', + 'read_only', + 'read_write', + 'reads', + 'rebuild', + 'recover', + 'recursive', + 'redo_buffer_size', + 'redundant', + 'reference', + 'references', + 'regexp', + 'relay', + 'relay_log_file', + 'relay_log_pos', + 'relay_thread', + 'relaylog', + 'release', + 'reload', + 'remove', + 'rename', + 'reorganize', + 'repair', + 'repeat', + 'repeatable', + 'replace', + 'replicate_do_db', + 'replicate_do_table', + 'replicate_ignore_db', + 'replicate_ignore_table', + 'replicate_rewrite_db', + 'replicate_wild_do_table', + 'replicate_wild_ignore_table', + 'replication', + 'require', + 'require_row_format', + 'require_table_primary_key_check', + 'reset', + 'resignal', + 'resource', + 'respect', + 'restart', + 'restore', + 'restrict', + 'resume', + 'retain', + 'return', + 'returned_sqlstate', + 'returning', + 'returns', + 'reuse', + 'reverse', + 'revoke', + 'right', + 'rlike', + 'role', + 'rollback', + 'rollup', + 'rotate', + 'routine', + 'row', + 'row_count', + 'row_format', + 'row_number', + 'rows', + 'rtree', + 'savepoint', + 'schedule', + 'schema', + 'schema_name', + 'schemas', + 'second', + 'second_microsecond', + 'secondary', + 'secondary_engine', + 'secondary_engine_attribute', + 'secondary_load', + 'secondary_unload', + 'security', + 'select', + 'sensitive', + 'separator', + 'serializable', + 'server', + 'session', + 'share', + 'show', + 'shutdown', + 'signal', + 'signed', + 'simple', + 'skip', + 'slave', + 'slow', + 'snapshot', + 'socket', + 'some', + 'soname', + 'sounds', + 'source', + 'spatial', + 'specific', + 'sql', + 'sql_after_gtids', + 'sql_after_mts_gaps', + 'sql_before_gtids', + 'sql_big_result', + 'sql_buffer_result', + 'sql_calc_found_rows', + 'sql_no_cache', + 'sql_small_result', + 'sql_thread', + 'sql_tsi_day', + 'sql_tsi_hour', + 'sql_tsi_minute', + 'sql_tsi_month', + 'sql_tsi_quarter', + 'sql_tsi_second', + 'sql_tsi_week', + 'sql_tsi_year', + 'sqlexception', + 'sqlstate', + 'sqlwarning', + 'srid', + 'ssl', + 'stacked', + 'start', + 'starting', + 'starts', + 'stats_auto_recalc', + 'stats_persistent', + 'stats_sample_pages', + 'status', + 'stop', + 'storage', + 'stored', + 'straight_join', + 'stream', + 'string', + 'subclass_origin', + 'subject', + 'subpartition', + 'subpartitions', + 'super', + 'suspend', + 'swaps', + 'switches', + 'system', + 'table', + 'table_checksum', + 'table_name', + 'tables', + 'tablespace', + 'temporary', + 'temptable', + 'terminated', + 'than', + 'then', + 'thread_priority', + 'ties', + 'timestampadd', + 'timestampdiff', + 'tls', + 'to', + 'trailing', + 'transaction', + 'trigger', + 'triggers', + 'true', + 'truncate', + 'type', + 'types', + 'unbounded', + 'uncommitted', + 'undefined', + 'undo', + 'undo_buffer_size', + 'undofile', + 'unicode', + 'uninstall', + 'union', + 'unique', + 'unknown', + 'unlock', + 'unsigned', + 'until', + 'update', + 'upgrade', + 'usage', + 'use', + 'use_frm', + 'user', + 'user_resources', + 'using', + 'utc_date', + 'utc_time', + 'utc_timestamp', + 'validation', + 'value', + 'values', + 'variables', + 'vcpu', + 'view', + 'virtual', + 'visible', + 'wait', + 'warnings', + 'week', + 'weight_string', + 'when', + 'where', + 'while', + 'window', + 'with', + 'without', + 'work', + 'wrapper', + 'write', + 'x509', + 'xa', + 'xid', + 'xml', + 'xor', + 'year_month', + 'zerofill', +) + + +if __name__ == '__main__': # pragma: no cover + import re + from urllib.request import urlopen + + from typecode._vendor.pygments.util import format_lines + + # MySQL source code + SOURCE_URL = 'https://github.com/mysql/mysql-server/raw/8.0' + LEX_URL = SOURCE_URL + '/sql/lex.h' + ITEM_CREATE_URL = SOURCE_URL + '/sql/item_create.cc' + + + def update_myself(): + # Pull content from lex.h. + lex_file = urlopen(LEX_URL).read().decode('utf8', errors='ignore') + keywords = parse_lex_keywords(lex_file) + functions = parse_lex_functions(lex_file) + optimizer_hints = parse_lex_optimizer_hints(lex_file) + + # Parse content in item_create.cc. + item_create_file = urlopen(ITEM_CREATE_URL).read().decode('utf8', errors='ignore') + functions.update(parse_item_create_functions(item_create_file)) + + # Remove data types from the set of keywords. + keywords -= set(MYSQL_DATATYPES) + + update_content('MYSQL_FUNCTIONS', tuple(sorted(functions))) + update_content('MYSQL_KEYWORDS', tuple(sorted(keywords))) + update_content('MYSQL_OPTIMIZER_HINTS', tuple(sorted(optimizer_hints))) + + + def parse_lex_keywords(f): + """Parse keywords in lex.h.""" + + results = set() + for m in re.finditer(r'{SYM(?:_HK)?\("(?P[a-z0-9_]+)",', f, flags=re.I): + results.add(m.group('keyword').lower()) + + if not results: + raise ValueError('No keywords found') + + return results + + + def parse_lex_optimizer_hints(f): + """Parse optimizer hints in lex.h.""" + + results = set() + for m in re.finditer(r'{SYM_H\("(?P[a-z0-9_]+)",', f, flags=re.I): + results.add(m.group('keyword').lower()) + + if not results: + raise ValueError('No optimizer hints found') + + return results + + + def parse_lex_functions(f): + """Parse MySQL function names from lex.h.""" + + results = set() + for m in re.finditer(r'{SYM_FN?\("(?P[a-z0-9_]+)",', f, flags=re.I): + results.add(m.group('function').lower()) + + if not results: + raise ValueError('No lex functions found') + + return results + + + def parse_item_create_functions(f): + """Parse MySQL function names from item_create.cc.""" + + results = set() + for m in re.finditer(r'{"(?P[^"]+?)",\s*SQL_F[^(]+?\(', f, flags=re.I): + results.add(m.group('function').lower()) + + if not results: + raise ValueError('No item_create functions found') + + return results + + + def update_content(field_name, content): + """Overwrite this file with content parsed from MySQL's source code.""" + + with open(__file__) as f: + data = f.read() + + # Line to start/end inserting + re_match = re.compile(r'^%s\s*=\s*\($.*?^\s*\)$' % field_name, re.M | re.S) + m = re_match.search(data) + if not m: + raise ValueError('Could not find an existing definition for %s' % field_name) + + new_block = format_lines(field_name, content) + data = data[:m.start()] + new_block + data[m.end():] + + with open(__file__, 'w', newline='\n') as f: + f.write(data) + + update_myself() diff --git a/src/typecode/_vendor/pygments/lexers/_openedge_builtins.py b/src/typecode/_vendor/pygments/lexers/_openedge_builtins.py index 09587e0..65a73ad 100644 --- a/src/typecode/_vendor/pygments/lexers/_openedge_builtins.py +++ b/src/typecode/_vendor/pygments/lexers/_openedge_builtins.py @@ -5,7 +5,7 @@ Builtin list for the OpenEdgeLexer. - :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ diff --git a/src/typecode/_vendor/pygments/lexers/_php_builtins.py b/src/typecode/_vendor/pygments/lexers/_php_builtins.py index 44ef205..783ba4a 100644 --- a/src/typecode/_vendor/pygments/lexers/_php_builtins.py +++ b/src/typecode/_vendor/pygments/lexers/_php_builtins.py @@ -12,12 +12,10 @@ internet connection. don't run that at home, use a server ;-) - :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ -from __future__ import print_function - MODULES = {'.NET': ('dotnet_load',), 'APC': ('apc_add', 'apc_bin_dump', @@ -4729,8 +4727,7 @@ def get_php_references(): download = urlretrieve(PHP_MANUAL_URL) with tarfile.open(download[0]) as tar: tar.extractall() - for file in glob.glob("%s%s" % (PHP_MANUAL_DIR, PHP_REFERENCE_GLOB)): - yield file + yield from glob.glob("%s%s" % (PHP_MANUAL_DIR, PHP_REFERENCE_GLOB)) os.remove(download[0]) def regenerate(filename, modules): diff --git a/src/typecode/_vendor/pygments/lexers/_postgres_builtins.py b/src/typecode/_vendor/pygments/lexers/_postgres_builtins.py index 7acaddd..77d4b58 100644 --- a/src/typecode/_vendor/pygments/lexers/_postgres_builtins.py +++ b/src/typecode/_vendor/pygments/lexers/_postgres_builtins.py @@ -5,7 +5,7 @@ Self-updating data files for PostgreSQL lexer. - :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ @@ -36,6 +36,7 @@ 'ASSIGNMENT', 'ASYMMETRIC', 'AT', + 'ATTACH', 'ATTRIBUTE', 'AUTHORIZATION', 'BACKWARD', @@ -49,6 +50,7 @@ 'BOTH', 'BY', 'CACHE', + 'CALL', 'CALLED', 'CASCADE', 'CASCADED', @@ -68,12 +70,14 @@ 'COLLATE', 'COLLATION', 'COLUMN', + 'COLUMNS', 'COMMENT', 'COMMENTS', 'COMMIT', 'COMMITTED', 'CONCURRENTLY', 'CONFIGURATION', + 'CONFLICT', 'CONNECTION', 'CONSTRAINT', 'CONSTRAINTS', @@ -85,6 +89,7 @@ 'CREATE', 'CROSS', 'CSV', + 'CUBE', 'CURRENT', 'CURRENT_CATALOG', 'CURRENT_DATE', @@ -110,7 +115,9 @@ 'DELETE', 'DELIMITER', 'DELIMITERS', + 'DEPENDS', 'DESC', + 'DETACH', 'DICTIONARY', 'DISABLE', 'DISCARD', @@ -136,6 +143,7 @@ 'EXECUTE', 'EXISTS', 'EXPLAIN', + 'EXPRESSION', 'EXTENSION', 'EXTERNAL', 'EXTRACT', @@ -155,11 +163,14 @@ 'FULL', 'FUNCTION', 'FUNCTIONS', + 'GENERATED', 'GLOBAL', 'GRANT', 'GRANTED', 'GREATEST', 'GROUP', + 'GROUPING', + 'GROUPS', 'HANDLER', 'HAVING', 'HEADER', @@ -171,7 +182,9 @@ 'IMMEDIATE', 'IMMUTABLE', 'IMPLICIT', + 'IMPORT', 'IN', + 'INCLUDE', 'INCLUDING', 'INCREMENT', 'INDEX', @@ -202,8 +215,6 @@ 'LARGE', 'LAST', 'LATERAL', - 'LC_COLLATE', - 'LC_CTYPE', 'LEADING', 'LEAKPROOF', 'LEAST', @@ -218,10 +229,13 @@ 'LOCALTIMESTAMP', 'LOCATION', 'LOCK', + 'LOCKED', + 'LOGGED', 'MAPPING', 'MATCH', 'MATERIALIZED', 'MAXVALUE', + 'METHOD', 'MINUTE', 'MINVALUE', 'MODE', @@ -232,9 +246,16 @@ 'NATIONAL', 'NATURAL', 'NCHAR', + 'NEW', 'NEXT', + 'NFC', + 'NFD', + 'NFKC', + 'NFKD', 'NO', 'NONE', + 'NORMALIZE', + 'NORMALIZED', 'NOT', 'NOTHING', 'NOTIFY', @@ -249,6 +270,7 @@ 'OFF', 'OFFSET', 'OIDS', + 'OLD', 'ON', 'ONLY', 'OPERATOR', @@ -257,13 +279,16 @@ 'OR', 'ORDER', 'ORDINALITY', + 'OTHERS', 'OUT', 'OUTER', 'OVER', 'OVERLAPS', 'OVERLAY', + 'OVERRIDING', 'OWNED', 'OWNER', + 'PARALLEL', 'PARSER', 'PARTIAL', 'PARTITION', @@ -283,7 +308,9 @@ 'PRIVILEGES', 'PROCEDURAL', 'PROCEDURE', + 'PROCEDURES', 'PROGRAM', + 'PUBLICATION', 'QUOTE', 'RANGE', 'READ', @@ -293,6 +320,7 @@ 'RECURSIVE', 'REF', 'REFERENCES', + 'REFERENCING', 'REFRESH', 'REINDEX', 'RELATIVE', @@ -310,11 +338,15 @@ 'RIGHT', 'ROLE', 'ROLLBACK', + 'ROLLUP', + 'ROUTINE', + 'ROUTINES', 'ROW', 'ROWS', 'RULE', 'SAVEPOINT', 'SCHEMA', + 'SCHEMAS', 'SCROLL', 'SEARCH', 'SECOND', @@ -328,13 +360,16 @@ 'SESSION_USER', 'SET', 'SETOF', + 'SETS', 'SHARE', 'SHOW', 'SIMILAR', 'SIMPLE', + 'SKIP', 'SMALLINT', 'SNAPSHOT', 'SOME', + 'SQL', 'STABLE', 'STANDALONE', 'START', @@ -343,25 +378,31 @@ 'STDIN', 'STDOUT', 'STORAGE', + 'STORED', 'STRICT', 'STRIP', + 'SUBSCRIPTION', 'SUBSTRING', + 'SUPPORT', 'SYMMETRIC', 'SYSID', 'SYSTEM', 'TABLE', 'TABLES', + 'TABLESAMPLE', 'TABLESPACE', 'TEMP', 'TEMPLATE', 'TEMPORARY', 'TEXT', 'THEN', + 'TIES', 'TIME', 'TIMESTAMP', 'TO', 'TRAILING', 'TRANSACTION', + 'TRANSFORM', 'TREAT', 'TRIGGER', 'TRIM', @@ -370,6 +411,7 @@ 'TRUSTED', 'TYPE', 'TYPES', + 'UESCAPE', 'UNBOUNDED', 'UNCOMMITTED', 'UNENCRYPTED', @@ -412,10 +454,12 @@ 'XMLELEMENT', 'XMLEXISTS', 'XMLFOREST', + 'XMLNAMESPACES', 'XMLPARSE', 'XMLPI', 'XMLROOT', 'XMLSERIALIZE', + 'XMLTABLE', 'YEAR', 'YES', 'ZONE', @@ -452,10 +496,12 @@ 'line', 'lseg', 'macaddr', + 'macaddr8', 'money', 'numeric', 'path', 'pg_lsn', + 'pg_snapshot', 'point', 'polygon', 'real', @@ -483,19 +529,28 @@ PSEUDO_TYPES = ( 'any', - 'anyelement', 'anyarray', - 'anynonarray', + 'anycompatible', + 'anycompatiblearray', + 'anycompatiblenonarray', + 'anycompatiblerange', + 'anyelement', 'anyenum', + 'anynonarray', 'anyrange', 'cstring', + 'event_trigger', + 'fdw_handler', + 'index_am_handler', 'internal', 'language_handler', - 'fdw_handler', + 'pg_ddl_command', 'record', + 'table_am_handler', 'trigger', + 'tsm_handler', + 'unknown', 'void', - 'opaque', ) # Remove 'trigger' from types @@ -519,25 +574,26 @@ # One man's constant is another man's variable. SOURCE_URL = 'https://github.com/postgres/postgres/raw/master' - KEYWORDS_URL = SOURCE_URL + '/doc/src/sgml/keywords.sgml' + KEYWORDS_URL = SOURCE_URL + '/src/include/parser/kwlist.h' DATATYPES_URL = SOURCE_URL + '/doc/src/sgml/datatype.sgml' def update_myself(): - data_file = list(urlopen(DATATYPES_URL)) + content = urlopen(DATATYPES_URL).read().decode('utf-8', errors='ignore') + data_file = list(content.splitlines()) datatypes = parse_datatypes(data_file) pseudos = parse_pseudos(data_file) - keywords = parse_keywords(urlopen(KEYWORDS_URL)) + content = urlopen(KEYWORDS_URL).read().decode('utf-8', errors='ignore') + keywords = parse_keywords(content) + update_consts(__file__, 'DATATYPES', datatypes) update_consts(__file__, 'PSEUDO_TYPES', pseudos) update_consts(__file__, 'KEYWORDS', keywords) def parse_keywords(f): kw = [] - for m in re.finditer( - r'\s*([^<]+)\s*' - r'([^<]+)', f.read()): - kw.append(m.group(1)) + for m in re.finditer(r'PG_KEYWORD\("(.+?)"', f): + kw.append(m.group(1).upper()) if not kw: raise ValueError('no keyword found') @@ -576,7 +632,7 @@ def parse_datatypes(f): def parse_pseudos(f): dt = [] re_start = re.compile(r'\s*') - re_entry = re.compile(r'\s*([^<]+)') + re_entry = re.compile(r'\s*(.+?)') re_end = re.compile(r'\s*
') f = iter(f) @@ -599,6 +655,7 @@ def parse_pseudos(f): if not dt: raise ValueError('pseudo datatypes not found') + dt.sort() return dt def update_consts(filename, constname, content): @@ -615,7 +672,7 @@ def update_consts(filename, constname, content): new_block = format_lines(constname, content) data = data[:m.start()] + new_block + data[m.end():] - with open(filename, 'w') as f: + with open(filename, 'w', newline='\n') as f: f.write(data) update_myself() diff --git a/src/typecode/_vendor/pygments/lexers/_scilab_builtins.py b/src/typecode/_vendor/pygments/lexers/_scilab_builtins.py index 097757c..fc1030e 100644 --- a/src/typecode/_vendor/pygments/lexers/_scilab_builtins.py +++ b/src/typecode/_vendor/pygments/lexers/_scilab_builtins.py @@ -5,7 +5,7 @@ Builtin list for the ScilabLexer. - :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ @@ -3089,6 +3089,6 @@ def extract_completion(var_type): with open(__file__, 'w') as f: f.write(header) f.write('# Autogenerated\n\n') - for k, v in sorted(new_data.iteritems()): + for k, v in sorted(new_data.items()): f.write(format_lines(k + '_kw', v) + '\n\n') f.write(footer) diff --git a/src/typecode/_vendor/pygments/lexers/_sourcemod_builtins.py b/src/typecode/_vendor/pygments/lexers/_sourcemod_builtins.py index 617ea7c..ff6994a 100644 --- a/src/typecode/_vendor/pygments/lexers/_sourcemod_builtins.py +++ b/src/typecode/_vendor/pygments/lexers/_sourcemod_builtins.py @@ -8,12 +8,10 @@ Do not edit the FUNCTIONS list by hand. - :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ -from __future__ import print_function - FUNCTIONS = ( 'OnEntityCreated', 'OnEntityDestroyed', diff --git a/src/typecode/_vendor/pygments/lexers/_stan_builtins.py b/src/typecode/_vendor/pygments/lexers/_stan_builtins.py index e95f5b1..c695539 100644 --- a/src/typecode/_vendor/pygments/lexers/_stan_builtins.py +++ b/src/typecode/_vendor/pygments/lexers/_stan_builtins.py @@ -6,7 +6,7 @@ This file contains the names of functions for Stan used by ``pygments.lexers.math.StanLexer. This is for Stan language version 2.17.0. - :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ diff --git a/src/typecode/_vendor/pygments/lexers/_stata_builtins.py b/src/typecode/_vendor/pygments/lexers/_stata_builtins.py index 3f4abdc..2e43090 100644 --- a/src/typecode/_vendor/pygments/lexers/_stata_builtins.py +++ b/src/typecode/_vendor/pygments/lexers/_stata_builtins.py @@ -5,7 +5,7 @@ Builtins for Stata - :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ diff --git a/src/typecode/_vendor/pygments/lexers/_tsql_builtins.py b/src/typecode/_vendor/pygments/lexers/_tsql_builtins.py index dfc5f61..ebf1edc 100644 --- a/src/typecode/_vendor/pygments/lexers/_tsql_builtins.py +++ b/src/typecode/_vendor/pygments/lexers/_tsql_builtins.py @@ -5,7 +5,7 @@ These are manually translated lists from https://msdn.microsoft.com. - :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ diff --git a/src/typecode/_vendor/pygments/lexers/_usd_builtins.py b/src/typecode/_vendor/pygments/lexers/_usd_builtins.py new file mode 100644 index 0000000..64e4884 --- /dev/null +++ b/src/typecode/_vendor/pygments/lexers/_usd_builtins.py @@ -0,0 +1,113 @@ +# -*- coding: utf-8 -*- +""" + pygments.lexers._usd_builtins + ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + A collection of known USD-related keywords, attributes, and types. + + :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS. + :license: BSD, see LICENSE for details. +""" + +COMMON_ATTRIBUTES = [ + "extent", + "xformOpOrder", +] + +KEYWORDS = [ + "class", + "clips", + "custom", + "customData", + "def", + "dictionary", + "inherits", + "over", + "payload", + "references", + "rel", + "subLayers", + "timeSamples", + "uniform", + "variantSet", + "variantSets", + "variants", +] + +OPERATORS = [ + "add", + "append", + "delete", + "prepend", + "reorder", +] + +SPECIAL_NAMES = [ + "active", + "apiSchemas", + "defaultPrim", + "elementSize", + "endTimeCode", + "hidden", + "instanceable", + "interpolation", + "kind", + "startTimeCode", + "upAxis", +] + +TYPES = [ + "asset", + "bool", + "color3d", + "color3f", + "color3h", + "color4d", + "color4f", + "color4h", + "double", + "double2", + "double3", + "double4", + "float", + "float2", + "float3", + "float4", + "frame4d", + "half", + "half2", + "half3", + "half4", + "int", + "int2", + "int3", + "int4", + "keyword", + "matrix2d", + "matrix3d", + "matrix4d", + "normal3d", + "normal3f", + "normal3h", + "point3d", + "point3f", + "point3h", + "quatd", + "quatf", + "quath", + "string", + "syn", + "token", + "uchar", + "uchar2", + "uchar3", + "uchar4", + "uint", + "uint2", + "uint3", + "uint4", + "usdaType", + "vector3d", + "vector3f", + "vector3h", +] diff --git a/src/typecode/_vendor/pygments/lexers/_vbscript_builtins.py b/src/typecode/_vendor/pygments/lexers/_vbscript_builtins.py index e752007..0c19d72 100644 --- a/src/typecode/_vendor/pygments/lexers/_vbscript_builtins.py +++ b/src/typecode/_vendor/pygments/lexers/_vbscript_builtins.py @@ -6,7 +6,7 @@ These are manually translated lists from http://www.indusoft.com/pdf/VBScript%20Reference.pdf. - :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ diff --git a/src/typecode/_vendor/pygments/lexers/_vim_builtins.py b/src/typecode/_vendor/pygments/lexers/_vim_builtins.py index 39c9ed1..55941ed 100644 --- a/src/typecode/_vendor/pygments/lexers/_vim_builtins.py +++ b/src/typecode/_vendor/pygments/lexers/_vim_builtins.py @@ -5,7 +5,7 @@ This file is autogenerated by scripts/get_vimkw.py - :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ diff --git a/src/typecode/_vendor/pygments/lexers/actionscript.py b/src/typecode/_vendor/pygments/lexers/actionscript.py index 0fb4085..d48c695 100644 --- a/src/typecode/_vendor/pygments/lexers/actionscript.py +++ b/src/typecode/_vendor/pygments/lexers/actionscript.py @@ -5,7 +5,7 @@ Lexers for ActionScript and MXML. - :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ @@ -37,7 +37,7 @@ class ActionScriptLexer(RegexLexer): (r'\s+', Text), (r'//.*?\n', Comment.Single), (r'/\*.*?\*/', Comment.Multiline), - (r'/(\\\\|\\/|[^/\n])*/[gim]*', String.Regex), + (r'/(\\\\|\\[^\\]|[^/\\\n])*/[gim]*', String.Regex), (r'[~^*!%&<>|+=:;,/?\\-]+', Operator), (r'[{}\[\]();.]+', Punctuation), (words(( @@ -105,11 +105,16 @@ class ActionScriptLexer(RegexLexer): (r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float), (r'0x[0-9a-f]+', Number.Hex), (r'[0-9]+', Number.Integer), - (r'"(\\\\|\\"|[^"])*"', String.Double), - (r"'(\\\\|\\'|[^'])*'", String.Single), + (r'"(\\\\|\\[^\\]|[^"\\])*"', String.Double), + (r"'(\\\\|\\[^\\]|[^'\\])*'", String.Single), ] } + def analyse_text(text): + """This is only used to disambiguate between ActionScript and + ActionScript3. We return 0 here; the ActionScript3 lexer will match + AS3 variable definitions and that will hopefully suffice.""" + return 0 class ActionScript3Lexer(RegexLexer): """ @@ -144,7 +149,7 @@ class ActionScript3Lexer(RegexLexer): bygroups(Keyword, Text, Keyword.Type, Text, Operator)), (r'//.*?\n', Comment.Single), (r'/\*.*?\*/', Comment.Multiline), - (r'/(\\\\|\\/|[^\n])*/[gisx]*', String.Regex), + (r'/(\\\\|\\[^\\]|[^\\\n])*/[gisx]*', String.Regex), (r'(\.)(' + identifier + r')', bygroups(Operator, Name.Attribute)), (r'(case|default|for|each|in|while|do|break|return|continue|if|else|' r'throw|try|catch|with|new|typeof|arguments|instanceof|this|' @@ -164,8 +169,8 @@ class ActionScript3Lexer(RegexLexer): (r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float), (r'0x[0-9a-f]+', Number.Hex), (r'[0-9]+', Number.Integer), - (r'"(\\\\|\\"|[^"])*"', String.Double), - (r"'(\\\\|\\'|[^'])*'", String.Single), + (r'"(\\\\|\\[^\\]|[^"\\])*"', String.Double), + (r"'(\\\\|\\[^\\]|[^'\\])*'", String.Single), (r'[~^*!%&<>|+=:;,/?\\{}\[\]().-]+', Operator), ], 'funcparams': [ diff --git a/src/typecode/_vendor/pygments/lexers/agile.py b/src/typecode/_vendor/pygments/lexers/agile.py index 3b1be93..ba1e436 100644 --- a/src/typecode/_vendor/pygments/lexers/agile.py +++ b/src/typecode/_vendor/pygments/lexers/agile.py @@ -5,7 +5,7 @@ Just export lexer classes previously contained in this module. - :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ diff --git a/src/typecode/_vendor/pygments/lexers/algebra.py b/src/typecode/_vendor/pygments/lexers/algebra.py index 508ff71..6117fc1 100644 --- a/src/typecode/_vendor/pygments/lexers/algebra.py +++ b/src/typecode/_vendor/pygments/lexers/algebra.py @@ -5,7 +5,7 @@ Lexers for computer algebra systems. - :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ @@ -68,6 +68,25 @@ class GAPLexer(RegexLexer): ], } + def analyse_text(text): + score = 0.0 + + # Declaration part + if re.search( + r"(InstallTrueMethod|Declare(Attribute|Category|Filter|Operation" + + r"|GlobalFunction|Synonym|SynonymAttr|Property))", text + ): + score += 0.7 + + # Implementation part + if re.search( + r"(DeclareRepresentation|Install(GlobalFunction|Method|" + + r"ImmediateMethod|OtherMethod)|New(Family|Type)|Objectify)", text + ): + score += 0.7 + + return min(score, 1.0) + class MathematicaLexer(RegexLexer): """ diff --git a/src/typecode/_vendor/pygments/lexers/ambient.py b/src/typecode/_vendor/pygments/lexers/ambient.py index ed0bf61..96b646f 100644 --- a/src/typecode/_vendor/pygments/lexers/ambient.py +++ b/src/typecode/_vendor/pygments/lexers/ambient.py @@ -5,7 +5,7 @@ Lexers for AmbientTalk language. - :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ @@ -44,7 +44,7 @@ class AmbientTalkLexer(RegexLexer): (builtin, Name.Builtin), (r'(true|false|nil)\b', Keyword.Constant), (r'(~|lobby|jlobby|/)\.', Keyword.Constant, 'namespace'), - (r'"(\\\\|\\"|[^"])*"', String), + (r'"(\\\\|\\[^\\]|[^"\\])*"', String), (r'\|', Punctuation, 'arglist'), (r'<:|[*^!%&<>+=,./?-]|:=', Operator), (r"`[a-zA-Z_]\w*", String.Symbol), diff --git a/src/typecode/_vendor/pygments/lexers/ampl.py b/src/typecode/_vendor/pygments/lexers/ampl.py index 46b44f8..cbf0162 100644 --- a/src/typecode/_vendor/pygments/lexers/ampl.py +++ b/src/typecode/_vendor/pygments/lexers/ampl.py @@ -5,7 +5,7 @@ Lexers for the AMPL language. - :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ diff --git a/src/typecode/_vendor/pygments/lexers/apl.py b/src/typecode/_vendor/pygments/lexers/apl.py index 91e528c..a610a47 100644 --- a/src/typecode/_vendor/pygments/lexers/apl.py +++ b/src/typecode/_vendor/pygments/lexers/apl.py @@ -5,7 +5,7 @@ Lexers for APL. - :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ @@ -18,7 +18,7 @@ class APLLexer(RegexLexer): """ - A simple APL lexer. + A simple `APL `_ lexer. .. versionadded:: 2.0 """ @@ -35,7 +35,7 @@ class APLLexer(RegexLexer): # Comment # ======= # '⍝' is traditional; '#' is supported by GNU APL and NGN (but not Dyalog) - (u'[⍝#].*$', Comment.Single), + (r'[⍝#].*$', Comment.Single), # # Strings # ======= @@ -46,7 +46,7 @@ class APLLexer(RegexLexer): # =========== # This token type is used for diamond and parenthesis # but not for bracket and ; (see below) - (u'[⋄◇()]', Punctuation), + (r'[⋄◇()]', Punctuation), # # Array indexing # ============== @@ -57,45 +57,45 @@ class APLLexer(RegexLexer): # Distinguished names # =================== # following IBM APL2 standard - (u'⎕[A-Za-zΔ∆⍙][A-Za-zΔ∆⍙_¯0-9]*', Name.Function), + (r'⎕[A-Za-zΔ∆⍙][A-Za-zΔ∆⍙_¯0-9]*', Name.Function), # # Labels # ====== # following IBM APL2 standard - # (u'[A-Za-zΔ∆⍙][A-Za-zΔ∆⍙_¯0-9]*:', Name.Label), + # (r'[A-Za-zΔ∆⍙][A-Za-zΔ∆⍙_¯0-9]*:', Name.Label), # # Variables # ========= # following IBM APL2 standard - (u'[A-Za-zΔ∆⍙][A-Za-zΔ∆⍙_¯0-9]*', Name.Variable), + (r'[A-Za-zΔ∆⍙][A-Za-zΔ∆⍙_¯0-9]*', Name.Variable), # # Numbers # ======= - (u'¯?(0[Xx][0-9A-Fa-f]+|[0-9]*\\.?[0-9]+([Ee][+¯]?[0-9]+)?|¯|∞)' - u'([Jj]¯?(0[Xx][0-9A-Fa-f]+|[0-9]*\\.?[0-9]+([Ee][+¯]?[0-9]+)?|¯|∞))?', + (r'¯?(0[Xx][0-9A-Fa-f]+|[0-9]*\.?[0-9]+([Ee][+¯]?[0-9]+)?|¯|∞)' + r'([Jj]¯?(0[Xx][0-9A-Fa-f]+|[0-9]*\.?[0-9]+([Ee][+¯]?[0-9]+)?|¯|∞))?', Number), # # Operators # ========== - (u'[\\.\\\\\\/⌿⍀¨⍣⍨⍠⍤∘]', Name.Attribute), # closest token type - (u'[+\\-×÷⌈⌊∣|⍳?*⍟○!⌹<≤=>≥≠≡≢∊⍷∪∩~∨∧⍱⍲⍴,⍪⌽⊖⍉↑↓⊂⊃⌷⍋⍒⊤⊥⍕⍎⊣⊢⍁⍂≈⌸⍯↗]', + (r'[\.\\\/⌿⍀¨⍣⍨⍠⍤∘⌸&⌶@⌺⍥⍛⍢]', Name.Attribute), # closest token type + (r'[+\-×÷⌈⌊∣|⍳?*⍟○!⌹<≤=>≥≠≡≢∊⍷∪∩~∨∧⍱⍲⍴,⍪⌽⊖⍉↑↓⊂⊃⌷⍋⍒⊤⊥⍕⍎⊣⊢⍁⍂≈⌸⍯↗⊆⊇⍸√⌾…⍮]', Operator), # # Constant # ======== - (u'⍬', Name.Constant), + (r'⍬', Name.Constant), # # Quad symbol # =========== - (u'[⎕⍞]', Name.Variable.Global), + (r'[⎕⍞]', Name.Variable.Global), # # Arrows left/right # ================= - (u'[←→]', Keyword.Declaration), + (r'[←→]', Keyword.Declaration), # # D-Fn # ==== - (u'[⍺⍵⍶⍹∇:]', Name.Builtin.Pseudo), + (r'[⍺⍵⍶⍹∇:]', Name.Builtin.Pseudo), (r'[{}]', Keyword.Type), ], } diff --git a/src/typecode/_vendor/pygments/lexers/archetype.py b/src/typecode/_vendor/pygments/lexers/archetype.py index fc877b5..f55a6d1 100644 --- a/src/typecode/_vendor/pygments/lexers/archetype.py +++ b/src/typecode/_vendor/pygments/lexers/archetype.py @@ -14,7 +14,7 @@ Contributed by Thomas Beale , . - :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ @@ -58,7 +58,7 @@ class AtomsLexer(RegexLexer): (r'P((\d*(\.\d+)?[YyMmWwDd]){1,3}(T(\d*(\.\d+)?[HhMmSs]){,3})?|' r'T(\d*(\.\d+)?[HhMmSs]){,3})', Literal.Date), (r'[+-]?(\d+\.\d*|\.\d+|\d+)[eE][+-]?\d+', Number.Float), - (r'[+-]?(\d+)*\.\d+%?', Number.Float), + (r'[+-]?\d*\.\d+%?', Number.Float), (r'0x[0-9a-fA-F]+', Number.Hex), (r'[+-]?\d+%?', Number.Integer), ], @@ -212,9 +212,9 @@ class CadlLexer(AtomsLexer): (r'(not)\W', Operator), (r'(matches|is_in)\W', Operator), # is_in / not is_in char - (u'(\u2208|\u2209)', Operator), + ('(\u2208|\u2209)', Operator), # there_exists / not there_exists / for_all / and / or - (u'(\u2203|\u2204|\u2200|\u2227|\u2228|\u22BB|\223C)', + ('(\u2203|\u2204|\u2200|\u2227|\u2228|\u22BB|\223C)', Operator), # regex in slot or as string constraint (r'(\{)(\s*/[^}]+/\s*)(\})', diff --git a/src/typecode/_vendor/pygments/lexers/arrow.py b/src/typecode/_vendor/pygments/lexers/arrow.py new file mode 100644 index 0000000..a4f4d4b --- /dev/null +++ b/src/typecode/_vendor/pygments/lexers/arrow.py @@ -0,0 +1,117 @@ +# -*- coding: utf-8 -*- +""" + pygments.lexers.arrow + ~~~~~~~~~~~~~~~~~~~~~ + + Lexer for Arrow. + + :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS. + :license: BSD, see LICENSE for details. +""" + +from typecode._vendor.pygments.lexer import RegexLexer, bygroups, default, include +from typecode._vendor.pygments.token import Text, Operator, Keyword, Punctuation, Name, \ + String, Number + +__all__ = ['ArrowLexer'] + +TYPES = r'\b(int|bool|char)((?:\[\])*)(?=\s+)' +IDENT = r'([a-zA-Z_][a-zA-Z0-9_]*)' +DECL = TYPES + r'(\s+)' + IDENT + + +class ArrowLexer(RegexLexer): + """ + Lexer for Arrow: https://pypi.org/project/py-arrow-lang/ + + .. versionadded:: 2.7 + """ + + name = 'Arrow' + aliases = ['arrow'] + filenames = ['*.arw'] + + tokens = { + 'root': [ + (r'\s+', Text), + (r'^[|\s]+', Punctuation), + include('blocks'), + include('statements'), + include('expressions'), + ], + 'blocks': [ + (r'(function)(\n+)(/-->)(\s*)' + + DECL + # 4 groups + r'(\()', bygroups( + Keyword.Reserved, Text, Punctuation, + Text, Keyword.Type, Punctuation, Text, + Name.Function, Punctuation + ), 'fparams'), + (r'/-->$|\\-->$|/--<|\\--<|\^', Punctuation), + ], + 'statements': [ + (DECL, bygroups(Keyword.Type, Punctuation, Text, Name.Variable)), + (r'\[', Punctuation, 'index'), + (r'=', Operator), + (r'require|main', Keyword.Reserved), + (r'print', Keyword.Reserved, 'print'), + ], + 'expressions': [ + (r'\s+', Text), + (r'[0-9]+', Number.Integer), + (r'true|false', Keyword.Constant), + (r"'", String.Char, 'char'), + (r'"', String.Double, 'string'), + (r'\{', Punctuation, 'array'), + (r'==|!=|<|>|\+|-|\*|/|%', Operator), + (r'and|or|not|length', Operator.Word), + (r'(input)(\s+)(int|char\[\])', bygroups( + Keyword.Reserved, Text, Keyword.Type + )), + (IDENT + r'(\()', bygroups( + Name.Function, Punctuation + ), 'fargs'), + (IDENT, Name.Variable), + (r'\[', Punctuation, 'index'), + (r'\(', Punctuation, 'expressions'), + (r'\)', Punctuation, '#pop'), + ], + 'print': [ + include('expressions'), + (r',', Punctuation), + default('#pop'), + ], + 'fparams': [ + (DECL, bygroups(Keyword.Type, Punctuation, Text, Name.Variable)), + (r',', Punctuation), + (r'\)', Punctuation, '#pop'), + ], + 'escape': [ + (r'\\(["\\/abfnrtv]|[0-9]{1,3}|x[0-9a-fA-F]{2}|u[0-9a-fA-F]{4})', + String.Escape), + ], + 'char': [ + (r"'", String.Char, '#pop'), + include('escape'), + (r"[^'\\]", String.Char), + ], + 'string': [ + (r'"', String.Double, '#pop'), + include('escape'), + (r'[^"\\]+', String.Double), + ], + 'array': [ + include('expressions'), + (r'\}', Punctuation, '#pop'), + (r',', Punctuation), + ], + 'fargs': [ + include('expressions'), + (r'\)', Punctuation, '#pop'), + (r',', Punctuation), + ], + 'index': [ + include('expressions'), + (r'\]', Punctuation, '#pop'), + ], + } diff --git a/src/typecode/_vendor/pygments/lexers/asm.py b/src/typecode/_vendor/pygments/lexers/asm.py index a3f334b..44277dd 100644 --- a/src/typecode/_vendor/pygments/lexers/asm.py +++ b/src/typecode/_vendor/pygments/lexers/asm.py @@ -5,22 +5,23 @@ Lexers for assembly languages. - :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ import re from typecode._vendor.pygments.lexer import RegexLexer, include, bygroups, using, words, \ - DelegatingLexer + DelegatingLexer, default from typecode._vendor.pygments.lexers.c_cpp import CppLexer, CLexer from typecode._vendor.pygments.lexers.d import DLexer from typecode._vendor.pygments.token import Text, Name, Number, String, Comment, Punctuation, \ - Other, Keyword, Operator + Other, Keyword, Operator, Literal __all__ = ['GasLexer', 'ObjdumpLexer', 'DObjdumpLexer', 'CppObjdumpLexer', - 'CObjdumpLexer', 'HsailLexer', 'LlvmLexer', 'NasmLexer', - 'NasmObjdumpLexer', 'TasmLexer', 'Ca65Lexer', 'Dasm16Lexer'] + 'CObjdumpLexer', 'HsailLexer', 'LlvmLexer', 'LlvmMirBodyLexer', + 'LlvmMirLexer', 'NasmLexer', 'NasmObjdumpLexer', 'TasmLexer', + 'Ca65Lexer', 'Dasm16Lexer'] class GasLexer(RegexLexer): @@ -36,7 +37,8 @@ class GasLexer(RegexLexer): string = r'"(\\"|[^"])*"' char = r'[\w$.@-]' identifier = r'(?:[a-zA-Z$_]' + char + r'*|\.' + char + '+)' - number = r'(?:0[xX][a-zA-Z0-9]+|\d+)' + number = r'(?:0[xX][a-fA-F0-9]+|#?-?\d+)' + register = '%' + identifier tokens = { 'root': [ @@ -52,8 +54,11 @@ class GasLexer(RegexLexer): (string, String), ('@' + identifier, Name.Attribute), (number, Number.Integer), + (register, Name.Variable), (r'[\r\n]+', Text, '#pop'), - (r'[;#].*?\n', Comment, '#pop'), + (r'([;#]|//).*?\n', Comment.Single, '#pop'), + (r'/[*].*?[*]/', Comment.Multiline), + (r'/[*].*?\n[\w\W]*?[*]/', Comment.Multiline, '#pop'), include('punctuation'), include('whitespace') @@ -72,12 +77,14 @@ class GasLexer(RegexLexer): (identifier, Name.Constant), (number, Number.Integer), # Registers - ('%' + identifier, Name.Variable), + (register, Name.Variable), # Numeric constants ('$'+number, Number.Integer), (r"$'(.|\\')'", String.Char), (r'[\r\n]+', Text, '#pop'), - (r'[;#].*?\n', Comment, '#pop'), + (r'([;#]|//).*?\n', Comment.Single, '#pop'), + (r'/[*].*?[*]/', Comment.Multiline), + (r'/[*].*?\n[\w\W]*?[*]/', Comment.Multiline, '#pop'), include('punctuation'), include('whitespace') @@ -85,7 +92,8 @@ class GasLexer(RegexLexer): 'whitespace': [ (r'\n', Text), (r'\s+', Text), - (r'[;#].*?\n', Comment) + (r'([;#]|//).*?\n', Comment.Single), + (r'/[*][\w\W]*?[*]/', Comment.Multiline) ], 'punctuation': [ (r'[-*,.()\[\]!:]+', Punctuation) @@ -93,9 +101,9 @@ class GasLexer(RegexLexer): } def analyse_text(text): - if re.match(r'^\.(text|data|section)', text, re.M): + if re.search(r'^\.(text|data|section)', text, re.M): return True - elif re.match(r'^\.\w+', text, re.M): + elif re.search(r'^\.\w+', text, re.M): return 0.1 @@ -150,7 +158,7 @@ def _objdump_lexer_tokens(asm_lexer): class ObjdumpLexer(RegexLexer): """ - For the output of 'objdump -dr' + For the output of ``objdump -dr``. """ name = 'objdump' aliases = ['objdump'] @@ -162,7 +170,7 @@ class ObjdumpLexer(RegexLexer): class DObjdumpLexer(DelegatingLexer): """ - For the output of 'objdump -Sr on compiled D files' + For the output of ``objdump -Sr`` on compiled D files. """ name = 'd-objdump' aliases = ['d-objdump'] @@ -170,12 +178,12 @@ class DObjdumpLexer(DelegatingLexer): mimetypes = ['text/x-d-objdump'] def __init__(self, **options): - super(DObjdumpLexer, self).__init__(DLexer, ObjdumpLexer, **options) + super().__init__(DLexer, ObjdumpLexer, **options) class CppObjdumpLexer(DelegatingLexer): """ - For the output of 'objdump -Sr on compiled C++ files' + For the output of ``objdump -Sr`` on compiled C++ files. """ name = 'cpp-objdump' aliases = ['cpp-objdump', 'c++-objdumb', 'cxx-objdump'] @@ -183,12 +191,12 @@ class CppObjdumpLexer(DelegatingLexer): mimetypes = ['text/x-cpp-objdump'] def __init__(self, **options): - super(CppObjdumpLexer, self).__init__(CppLexer, ObjdumpLexer, **options) + super().__init__(CppLexer, ObjdumpLexer, **options) class CObjdumpLexer(DelegatingLexer): """ - For the output of 'objdump -Sr on compiled C files' + For the output of ``objdump -Sr`` on compiled C files. """ name = 'c-objdump' aliases = ['c-objdump'] @@ -196,7 +204,7 @@ class CObjdumpLexer(DelegatingLexer): mimetypes = ['text/x-c-objdump'] def __init__(self, **options): - super(CObjdumpLexer, self).__init__(CLexer, ObjdumpLexer, **options) + super().__init__(CLexer, ObjdumpLexer, **options) class HsailLexer(RegexLexer): @@ -390,18 +398,18 @@ class LlvmLexer(RegexLexer): 'cleanupret', 'cmpxchg', 'cold', 'coldcc', 'comdat', 'common', 'constant', 'contract', 'convergent', 'critical', 'cxx_fast_tlscc', 'datalayout', 'declare', 'default', 'define', 'deplibs', 'dereferenceable', 'dereferenceable_or_null', - 'distinct', 'dllexport', 'dllimport', 'double', 'dso_local', 'dso_preemptable', + 'distinct', 'dllexport', 'dllimport', 'dso_local', 'dso_preemptable', 'dsoLocal', 'eq', 'exact', 'exactmatch', 'extern_weak', 'external', 'externally_initialized', 'extractelement', 'extractvalue', 'fadd', 'false', - 'fast', 'fastcc', 'fcmp', 'fdiv', 'fence', 'filter', 'flags', 'float', 'fmul', - 'fp128', 'fpext', 'fptosi', 'fptoui', 'fptrunc', 'frem', 'from', 'fsub', + 'fast', 'fastcc', 'fcmp', 'fdiv', 'fence', 'filter', 'flags', 'fmul', + 'fpext', 'fptosi', 'fptoui', 'fptrunc', 'freeze', 'frem', 'from', 'fsub', 'funcFlags', 'function', 'gc', 'getelementptr', 'ghccc', 'global', 'guid', 'gv', - 'half', 'hash', 'hhvm_ccc', 'hhvmcc', 'hidden', 'hot', 'hotness', 'icmp', + 'hash', 'hhvm_ccc', 'hhvmcc', 'hidden', 'hot', 'hotness', 'icmp', 'ifunc', 'inaccessiblemem_or_argmemonly', 'inaccessiblememonly', 'inalloca', 'inbounds', 'indir', 'indirectbr', 'info', 'initialexec', 'inline', 'inlineBits', 'inlinehint', 'inrange', 'inreg', 'insertelement', 'insertvalue', 'insts', 'intel_ocl_bicc', 'inteldialect', 'internal', 'inttoptr', 'invoke', - 'jumptable', 'kind', 'label', 'landingpad', 'largest', 'linkage', 'linkonce', + 'jumptable', 'kind', 'landingpad', 'largest', 'linkage', 'linkonce', 'linkonce_odr', 'live', 'load', 'local_unnamed_addr', 'localdynamic', 'localexec', 'lshr', 'max', 'metadata', 'min', 'minsize', 'module', 'monotonic', 'msp430_intrcc', 'mul', 'musttail', 'naked', 'name', 'nand', 'ne', 'nest', @@ -410,7 +418,7 @@ class LlvmLexer(RegexLexer): 'nonlazybind', 'nonnull', 'norecurse', 'noRecurse', 'noredzone', 'noreturn', 'notail', 'notEligibleToImport', 'nounwind', 'nsw', 'nsz', 'null', 'nuw', 'oeq', 'offset', 'oge', 'ogt', 'ole', 'olt', 'one', 'opaque', 'optforfuzzing', - 'optnone', 'optsize', 'or', 'ord', 'path', 'personality', 'phi', 'ppc_fp128', + 'optnone', 'optsize', 'or', 'ord', 'path', 'personality', 'phi', 'poison', 'prefix', 'preserve_allcc', 'preserve_mostcc', 'private', 'prologue', 'protected', 'ptrtoint', 'ptx_device', 'ptx_kernel', 'readnone', 'readNone', 'readonly', 'readOnly', 'reassoc', 'refs', 'relbf', 'release', 'resByArg', @@ -430,22 +438,245 @@ class LlvmLexer(RegexLexer): 'unordered', 'unreachable', 'unsat', 'unwind', 'urem', 'uselistorder', 'uselistorder_bb', 'uwtable', 'va_arg', 'variable', 'vFuncId', 'virtualConstProp', 'void', 'volatile', 'weak', 'weak_odr', 'webkit_jscc', - 'win64cc', 'within', 'wpdRes', 'wpdResolutions', 'writeonly', 'x', - 'x86_64_sysvcc', 'x86_fastcallcc', 'x86_fp80', 'x86_intrcc', 'x86_mmx', + 'win64cc', 'within', 'wpdRes', 'wpdResolutions', 'writeonly', + 'x86_64_sysvcc', 'x86_fastcallcc', 'x86_intrcc', 'x86_mmx', 'x86_regcallcc', 'x86_stdcallcc', 'x86_thiscallcc', 'x86_vectorcallcc', 'xchg', - 'xor', 'zeroext', 'zeroinitializer', 'zext'), + 'xor', 'zeroext', 'zeroinitializer', 'zext', 'immarg', 'willreturn'), suffix=r'\b'), Keyword), # Types - (words(('void', 'half', 'float', 'double', 'x86_fp80', 'fp128', - 'ppc_fp128', 'label', 'metadata', 'token')), Keyword.Type), + (words(('void', 'half', 'bfloat', 'float', 'double', 'fp128', + 'x86_fp80', 'ppc_fp128', 'label', 'metadata', 'token')), + Keyword.Type), # Integer types - (r'i[1-9]\d*', Keyword) + (r'i[1-9]\d*', Keyword.Type) ] } +class LlvmMirBodyLexer(RegexLexer): + """ + For LLVM MIR examples without the YAML wrapper. + + For more information on LLVM MIR see https://llvm.org/docs/MIRLangRef.html. + + .. versionadded:: 2.6 + """ + name = 'LLVM-MIR Body' + aliases = ['llvm-mir-body'] + filenames = [] + mimetypes = [] + + tokens = { + 'root': [ + # Attributes on basic blocks + (words(('liveins', 'successors'), suffix=':'), Keyword), + # Basic Block Labels + (r'bb\.[0-9]+(\.[a-zA-Z0-9_.-]+)?( \(address-taken\))?:', Name.Label), + (r'bb\.[0-9]+ \(%[a-zA-Z0-9_.-]+\)( \(address-taken\))?:', Name.Label), + (r'%bb\.[0-9]+(\.\w+)?', Name.Label), + # Stack references + (r'%stack\.[0-9]+(\.\w+\.addr)?', Name), + # Subreg indices + (r'%subreg\.\w+', Name), + # Virtual registers + (r'%[a-zA-Z0-9_]+ *', Name.Variable, 'vreg'), + # Reference to LLVM-IR global + include('global'), + # Reference to Intrinsic + (r'intrinsic\(\@[a-zA-Z0-9_.]+\)', Name.Variable.Global), + # Comparison predicates + (words(('eq', 'ne', 'sgt', 'sge', 'slt', 'sle', 'ugt', 'uge', 'ult', + 'ule'), prefix=r'intpred\(', suffix=r'\)'), Name.Builtin), + (words(('oeq', 'one', 'ogt', 'oge', 'olt', 'ole', 'ugt', 'uge', + 'ult', 'ule'), prefix=r'floatpred\(', suffix=r'\)'), + Name.Builtin), + # Physical registers + (r'\$\w+', String.Single), + # Assignment operator + (r'=', Operator), + # gMIR Opcodes + (r'(G_ANYEXT|G_[SZ]EXT|G_SEXT_INREG|G_TRUNC|G_IMPLICIT_DEF|G_PHI|' + r'G_FRAME_INDEX|G_GLOBAL_VALUE|G_INTTOPTR|G_PTRTOINT|G_BITCAST|' + r'G_CONSTANT|G_FCONSTANT|G_VASTART|G_VAARG|G_CTLZ|G_CTLZ_ZERO_UNDEF|' + r'G_CTTZ|G_CTTZ_ZERO_UNDEF|G_CTPOP|G_BSWAP|G_BITREVERSE|' + r'G_ADDRSPACE_CAST|G_BLOCK_ADDR|G_JUMP_TABLE|G_DYN_STACKALLOC|' + r'G_ADD|G_SUB|G_MUL|G_[SU]DIV|G_[SU]REM|G_AND|G_OR|G_XOR|G_SHL|' + r'G_[LA]SHR|G_[IF]CMP|G_SELECT|G_GEP|G_PTR_MASK|G_SMIN|G_SMAX|' + r'G_UMIN|G_UMAX|G_[US]ADDO|G_[US]ADDE|G_[US]SUBO|G_[US]SUBE|' + r'G_[US]MULO|G_[US]MULH|G_FNEG|G_FPEXT|G_FPTRUNC|G_FPTO[US]I|' + r'G_[US]ITOFP|G_FABS|G_FCOPYSIGN|G_FCANONICALIZE|G_FMINNUM|' + r'G_FMAXNUM|G_FMINNUM_IEEE|G_FMAXNUM_IEEE|G_FMINIMUM|G_FMAXIMUM|' + r'G_FADD|G_FSUB|G_FMUL|G_FMA|G_FMAD|G_FDIV|G_FREM|G_FPOW|G_FEXP|' + r'G_FEXP2|G_FLOG|G_FLOG2|G_FLOG10|G_FCEIL|G_FCOS|G_FSIN|G_FSQRT|' + r'G_FFLOOR|G_FRINT|G_FNEARBYINT|G_INTRINSIC_TRUNC|' + r'G_INTRINSIC_ROUND|G_LOAD|G_[ZS]EXTLOAD|G_INDEXED_LOAD|' + r'G_INDEXED_[ZS]EXTLOAD|G_STORE|G_INDEXED_STORE|' + r'G_ATOMIC_CMPXCHG_WITH_SUCCESS|G_ATOMIC_CMPXCHG|' + r'G_ATOMICRMW_(XCHG|ADD|SUB|AND|NAND|OR|XOR|MAX|MIN|UMAX|UMIN|FADD|' + r'FSUB)' + r'|G_FENCE|G_EXTRACT|G_UNMERGE_VALUES|G_INSERT|G_MERGE_VALUES|' + r'G_BUILD_VECTOR|G_BUILD_VECTOR_TRUNC|G_CONCAT_VECTORS|' + r'G_INTRINSIC|G_INTRINSIC_W_SIDE_EFFECTS|G_BR|G_BRCOND|' + r'G_BRINDIRECT|G_BRJT|G_INSERT_VECTOR_ELT|G_EXTRACT_VECTOR_ELT|' + r'G_SHUFFLE_VECTOR)\b', + Name.Builtin), + # Target independent opcodes + (r'(COPY|PHI|INSERT_SUBREG|EXTRACT_SUBREG|REG_SEQUENCE)\b', + Name.Builtin), + # Flags + (words(('killed', 'implicit')), Keyword), + # ConstantInt values + (r'i[0-9]+ +', Keyword.Type, 'constantint'), + # ConstantFloat values + (r'(half|float|double) +', Keyword.Type, 'constantfloat'), + # Bare immediates + include('integer'), + # MMO's + (r':: *', Operator, 'mmo'), + # MIR Comments + (r';.*', Comment), + # If we get here, assume it's a target instruction + (r'[a-zA-Z0-9_]+', Name), + # Everything else that isn't highlighted + (r'[(), \n]+', Text), + ], + # The integer constant from a ConstantInt value + 'constantint': [ + include('integer'), + (r'(?=.)', Text, '#pop'), + ], + # The floating point constant from a ConstantFloat value + 'constantfloat': [ + include('float'), + (r'(?=.)', Text, '#pop'), + ], + 'vreg': [ + # The bank or class if there is one + (r' *:(?!:)', Keyword, ('#pop', 'vreg_bank_or_class')), + # The LLT if there is one + (r' *\(', Text, 'vreg_type'), + (r'(?=.)', Text, '#pop'), + ], + 'vreg_bank_or_class': [ + # The unassigned bank/class + (r' *_', Name.Variable.Magic), + (r' *[a-zA-Z0-9_]+', Name.Variable), + # The LLT if there is one + (r' *\(', Text, 'vreg_type'), + (r'(?=.)', Text, '#pop'), + ], + 'vreg_type': [ + # Scalar and pointer types + (r' *[sp][0-9]+', Keyword.Type), + (r' *<[0-9]+ *x *[sp][0-9]+>', Keyword.Type), + (r'\)', Text, '#pop'), + (r'(?=.)', Text, '#pop'), + ], + 'mmo': [ + (r'\(', Text), + (r' +', Text), + (words(('load', 'store', 'on', 'into', 'from', 'align', 'monotonic', + 'acquire', 'release', 'acq_rel', 'seq_cst')), + Keyword), + # IR references + (r'%ir\.[a-zA-Z0-9_.-]+', Name), + (r'%ir-block\.[a-zA-Z0-9_.-]+', Name), + (r'[-+]', Operator), + include('integer'), + include('global'), + (r',', Punctuation), + (r'\), \(', Text), + (r'\)', Text, '#pop'), + ], + 'integer': [(r'-?[0-9]+', Number.Integer),], + 'float': [(r'-?[0-9]+\.[0-9]+(e[+-][0-9]+)?', Number.Float)], + 'global': [(r'\@[a-zA-Z0-9_.]+', Name.Variable.Global)], + } + + +class LlvmMirLexer(RegexLexer): + """ + Lexer for the overall LLVM MIR document format. + + MIR is a human readable serialization format that's used to represent LLVM's + machine specific intermediate representation. It allows LLVM's developers to + see the state of the compilation process at various points, as well as test + individual pieces of the compiler. + + For more information on LLVM MIR see https://llvm.org/docs/MIRLangRef.html. + + .. versionadded:: 2.6 + """ + name = 'LLVM-MIR' + aliases = ['llvm-mir'] + filenames = ['*.mir'] + + tokens = { + 'root': [ + # Comments are hashes at the YAML level + (r'#.*', Comment), + # Documents starting with | are LLVM-IR + (r'--- \|$', Keyword, 'llvm_ir'), + # Other documents are MIR + (r'---', Keyword, 'llvm_mir'), + # Consume everything else in one token for efficiency + (r'[^-#]+|.', Text), + ], + 'llvm_ir': [ + # Documents end with '...' or '---' + (r'(\.\.\.|(?=---))', Keyword, '#pop'), + # Delegate to the LlvmLexer + (r'((?:.|\n)+?)(?=(\.\.\.|---))', bygroups(using(LlvmLexer))), + ], + 'llvm_mir': [ + # Comments are hashes at the YAML level + (r'#.*', Comment), + # Documents end with '...' or '---' + (r'(\.\.\.|(?=---))', Keyword, '#pop'), + # Handle the simple attributes + (r'name:', Keyword, 'name'), + (words(('alignment', ), + suffix=':'), Keyword, 'number'), + (words(('legalized', 'regBankSelected', 'tracksRegLiveness', + 'selected', 'exposesReturnsTwice'), + suffix=':'), Keyword, 'boolean'), + # Handle the attributes don't highlight inside + (words(('registers', 'stack', 'fixedStack', 'liveins', 'frameInfo', + 'machineFunctionInfo'), + suffix=':'), Keyword), + # Delegate the body block to the LlvmMirBodyLexer + (r'body: *\|', Keyword, 'llvm_mir_body'), + # Consume everything else + (r'.+', Text), + (r'\n', Text), + ], + 'name': [ + (r'[^\n]+', Name), + default('#pop'), + ], + 'boolean': [ + (r' *(true|false)', Name.Builtin), + default('#pop'), + ], + 'number': [ + (r' *[0-9]+', Number), + default('#pop'), + ], + 'llvm_mir_body': [ + # Documents end with '...' or '---'. + # We have to pop llvm_mir_body and llvm_mir + (r'(\.\.\.|(?=---))', Keyword, '#pop:2'), + # Delegate the body block to the LlvmMirBodyLexer + (r'((?:.|\n)+?)(?=\.\.\.|---)', bygroups(using(LlvmMirBodyLexer))), + # The '...' is optional. If we didn't already find it then it isn't + # there. There might be a '---' instead though. + (r'(?!\.\.\.|---)((?:.|\n)+)', bygroups(using(LlvmMirBodyLexer))), + ], + } + + class NasmLexer(RegexLexer): """ For Nasm (Intel) assembly code. @@ -455,6 +686,10 @@ class NasmLexer(RegexLexer): filenames = ['*.asm', '*.ASM'] mimetypes = ['text/x-nasm'] + # Tasm uses the same file endings, but TASM is not as common as NASM, so + # we prioritize NASM higher by default + priority = 1.0 + identifier = r'[a-z$._?][\w$.?#@~]*' hexn = r'(?:0x[0-9a-f]+|$0[0-9a-f]*|[0-9]+[0-9a-f]*h)' octn = r'[0-7]+q' @@ -463,7 +698,7 @@ class NasmLexer(RegexLexer): floatn = decn + r'\.e?' + decn string = r'"(\\"|[^"\n])*"|' + r"'(\\'|[^'\n])*'|" + r"`(\\`|[^`\n])*`" declkw = r'(?:res|d)[bwdqt]|times' - register = (r'r[0-9][0-5]?[bwd]|' + register = (r'r[0-9][0-5]?[bwd]?|' r'[a-d][lh]|[er]?[a-d]x|[er]?[sb]p|[er]?[sd]i|[c-gs]s|st[0-7]|' r'mm[0-7]|cr[0-4]|dr[0-367]|tr[3-7]') wordop = r'seg|wrt|strict' @@ -520,10 +755,15 @@ class NasmLexer(RegexLexer): ], } + def analyse_text(text): + # Probably TASM + if re.match(r'PROC', text, re.IGNORECASE): + return False + class NasmObjdumpLexer(ObjdumpLexer): """ - For the output of 'objdump -d -M intel'. + For the output of ``objdump -d -M intel``. .. versionadded:: 2.0 """ @@ -614,6 +854,11 @@ class TasmLexer(RegexLexer): ], } + def analyse_text(text): + # See above + if re.match(r'PROC', text, re.I): + return True + class Ca65Lexer(RegexLexer): """ @@ -650,13 +895,13 @@ class Ca65Lexer(RegexLexer): def analyse_text(self, text): # comments in GAS start with "#" - if re.match(r'^\s*;', text, re.MULTILINE): + if re.search(r'^\s*;', text, re.MULTILINE): return 0.9 class Dasm16Lexer(RegexLexer): """ - Simple lexer for DCPU-16 Assembly + For DCPU-16 Assembly. Check http://0x10c.com/doc/dcpu-16.txt @@ -691,7 +936,7 @@ class Dasm16Lexer(RegexLexer): ] # Regexes yo - char = r'[a-zA-Z$._0-9@]' + char = r'[a-zA-Z0-9_$@.]' identifier = r'(?:[a-zA-Z$_]' + char + r'*|\.' + char + '+)' number = r'[+-]?(?:0[xX][a-zA-Z0-9]+|\d+)' binary_number = r'0b[01_]+' diff --git a/src/typecode/_vendor/pygments/lexers/automation.py b/src/typecode/_vendor/pygments/lexers/automation.py index 93db2b4..e59e959 100644 --- a/src/typecode/_vendor/pygments/lexers/automation.py +++ b/src/typecode/_vendor/pygments/lexers/automation.py @@ -5,7 +5,7 @@ Lexers for automation scripting languages. - :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ diff --git a/src/typecode/_vendor/pygments/lexers/bare.py b/src/typecode/_vendor/pygments/lexers/bare.py new file mode 100644 index 0000000..e274295 --- /dev/null +++ b/src/typecode/_vendor/pygments/lexers/bare.py @@ -0,0 +1,104 @@ +# -*- coding: utf-8 -*- +""" + pygments.lexers.bare + ~~~~~~~~~~~~~~~~~~~~ + + Lexer for the BARE schema. + + :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS. + :license: BSD, see LICENSE for details. +""" + +import re + +from typecode._vendor.pygments.lexer import RegexLexer, words, bygroups +from typecode._vendor.pygments.token import Text, Comment, Keyword, Name, Literal + +__all__ = ['BareLexer'] + + +class BareLexer(RegexLexer): + """ + For `BARE schema `_ schema source. + + .. versionadded:: 2.7 + """ + name = 'BARE' + filenames = ['*.bare'] + aliases = ['bare'] + + flags = re.MULTILINE | re.UNICODE + + keywords = [ + 'type', + 'enum', + 'u8', + 'u16', + 'u32', + 'u64', + 'uint', + 'i8', + 'i16', + 'i32', + 'i64', + 'int', + 'f32', + 'f64', + 'bool', + 'void', + 'data', + 'string', + 'optional', + 'map', + ] + + tokens = { + 'root': [ + (r'(type)(\s+)([A-Z][a-zA-Z0-9]+)(\s+\{)', + bygroups(Keyword, Text, Name.Class, Text), 'struct'), + (r'(type)(\s+)([A-Z][a-zA-Z0-9]+)(\s+\()', + bygroups(Keyword, Text, Name.Class, Text), 'union'), + (r'(type)(\s+)([A-Z][a-zA-Z0-9]+)(\s+)', + bygroups(Keyword, Text, Name, Text), 'typedef'), + (r'(enum)(\s+)([A-Z][a-zA-Z0-9]+)(\s+\{)', + bygroups(Keyword, Text, Name.Class, Text), 'enum'), + (r'#.*?$', Comment), + (r'\s+', Text), + ], + 'struct': [ + (r'\{', Text, '#push'), + (r'\}', Text, '#pop'), + (r'([a-zA-Z0-9]+)(:\s*)', bygroups(Name.Attribute, Text), 'typedef'), + (r'\s+', Text), + ], + 'union': [ + (r'\)', Text, '#pop'), + (r'\s*\|\s*', Text), + (r'[A-Z][a-zA-Z0-9]+', Name.Class), + (words(keywords), Keyword), + (r'\s+', Text), + ], + 'typedef': [ + (r'\[\]', Text), + (r'#.*?$', Comment, '#pop'), + (r'(\[)(\d+)(\])', bygroups(Text, Literal, Text)), + (r'<|>', Text), + (r'\(', Text, 'union'), + (r'(\[)([a-z][a-z-A-Z0-9]+)(\])', bygroups(Text, Keyword, Text)), + (r'(\[)([A-Z][a-z-A-Z0-9]+)(\])', bygroups(Text, Name.Class, Text)), + (r'([A-Z][a-z-A-Z0-9]+)', Name.Class), + (words(keywords), Keyword), + (r'\n', Text, '#pop'), + (r'\{', Text, 'struct'), + (r'\s+', Text), + (r'\d+', Literal), + ], + 'enum': [ + (r'\{', Text, '#push'), + (r'\}', Text, '#pop'), + (r'([A-Z][A-Z0-9_]*)(\s*=\s*)(\d+)', bygroups(Name.Attribute, Text, Literal)), + (r'([A-Z][A-Z0-9_]*)', bygroups(Name.Attribute)), + (r'#.*?$', Comment), + (r'\s+', Text), + ], + } diff --git a/src/typecode/_vendor/pygments/lexers/basic.py b/src/typecode/_vendor/pygments/lexers/basic.py index eba981b..440d9ba 100644 --- a/src/typecode/_vendor/pygments/lexers/basic.py +++ b/src/typecode/_vendor/pygments/lexers/basic.py @@ -5,7 +5,7 @@ Lexers for BASIC like languages (other than VB.net). - :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ @@ -21,7 +21,6 @@ 'QBasicLexer', 'VBScriptLexer', 'BBCBasicLexer'] - class BlitzMaxLexer(RegexLexer): """ For `BlitzMax `_ source code. @@ -524,15 +523,18 @@ class VBScriptLexer(RegexLexer): (r'[0-9]+\.[0-9]*(e[+-]?[0-9]+)?', Number.Float), (r'\.[0-9]+(e[+-]?[0-9]+)?', Number.Float), # Float variant 2, for example: .1, .1e2 (r'[0-9]+e[+-]?[0-9]+', Number.Float), # Float variant 3, for example: 123e45 - (r'\d+', Number.Integer), + (r'[0-9]+', Number.Integer), ('#.+#', String), # date or time value (r'(dim)(\s+)([a-z_][a-z0-9_]*)', bygroups(Keyword.Declaration, Whitespace, Name.Variable), 'dim_more'), (r'(function|sub)(\s+)([a-z_][a-z0-9_]*)', bygroups(Keyword.Declaration, Whitespace, Name.Function)), - (r'(class)(\s+)([a-z_][a-z0-9_]*)', bygroups(Keyword.Declaration, Whitespace, Name.Class)), - (r'(const)(\s+)([a-z_][a-z0-9_]*)', bygroups(Keyword.Declaration, Whitespace, Name.Constant)), - (r'(end)(\s+)(class|function|if|property|sub|with)', bygroups(Keyword, Whitespace, Keyword)), + (r'(class)(\s+)([a-z_][a-z0-9_]*)', + bygroups(Keyword.Declaration, Whitespace, Name.Class)), + (r'(const)(\s+)([a-z_][a-z0-9_]*)', + bygroups(Keyword.Declaration, Whitespace, Name.Constant)), + (r'(end)(\s+)(class|function|if|property|sub|with)', + bygroups(Keyword, Whitespace, Keyword)), (r'(on)(\s+)(error)(\s+)(goto)(\s+)(0)', bygroups(Keyword, Whitespace, Keyword, Whitespace, Keyword, Whitespace, Number.Integer)), (r'(on)(\s+)(error)(\s+)(resume)(\s+)(next)', @@ -553,7 +555,8 @@ class VBScriptLexer(RegexLexer): (r'.+(\n)?', Error) ], 'dim_more': [ - (r'(\s*)(,)(\s*)([a-z_][a-z0-9]*)', bygroups(Whitespace, Punctuation, Whitespace, Name.Variable)), + (r'(\s*)(,)(\s*)([a-z_][a-z0-9]*)', + bygroups(Whitespace, Punctuation, Whitespace, Name.Variable)), default('#pop'), ], 'string': [ @@ -609,7 +612,7 @@ class BBCBasicLexer(RegexLexer): (r"[0-9]+", Name.Label), (r"(\*)([^\n]*)", bygroups(Keyword.Pseudo, Comment.Special)), - (r"", Whitespace, 'code'), + default('code'), ], 'code': [ diff --git a/src/typecode/_vendor/pygments/lexers/bibtex.py b/src/typecode/_vendor/pygments/lexers/bibtex.py index 531d98c..b02be82 100644 --- a/src/typecode/_vendor/pygments/lexers/bibtex.py +++ b/src/typecode/_vendor/pygments/lexers/bibtex.py @@ -5,7 +5,7 @@ Lexers for BibTeX bibliography data and styles - :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ @@ -33,7 +33,7 @@ class BibTeXLexer(ExtendedRegexLexer): flags = re.IGNORECASE ALLOWED_CHARS = r'@!$&*+\-./:;<>?\[\\\]^`|~' - IDENTIFIER = '[{0}][{1}]*'.format('a-z_' + ALLOWED_CHARS, r'\w' + ALLOWED_CHARS) + IDENTIFIER = '[{}][{}]*'.format('a-z_' + ALLOWED_CHARS, r'\w' + ALLOWED_CHARS) def open_brace_callback(self, match, ctx): opening_brace = match.group() diff --git a/src/typecode/_vendor/pygments/lexers/boa.py b/src/typecode/_vendor/pygments/lexers/boa.py index 1c1623d..44a847d 100644 --- a/src/typecode/_vendor/pygments/lexers/boa.py +++ b/src/typecode/_vendor/pygments/lexers/boa.py @@ -5,7 +5,7 @@ Lexers for the Boa language. - :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ @@ -92,9 +92,9 @@ class BoaLexer(RegexLexer): (classes, Name.Classes), (words(operators), Operator), (r'[][(),;{}\\.]', Punctuation), - (r'"(\\\\|\\"|[^"])*"', String), - (r'`(\\\\|\\`|[^`])*`', String), - (words(string_sep), String.Delimeter), + (r'"(\\\\|\\[^\\]|[^"\\])*"', String.Double), + (r"`(\\\\|\\[^\\]|[^`\\])*`", String.Backtick), + (words(string_sep), String.Delimiter), (r'[a-zA-Z_]+', Name.Variable), (r'[0-9]+', Number.Integer), (r'\s+?', Text), # Whitespace diff --git a/src/typecode/_vendor/pygments/lexers/business.py b/src/typecode/_vendor/pygments/lexers/business.py index 5ad62ea..7ae59a2 100644 --- a/src/typecode/_vendor/pygments/lexers/business.py +++ b/src/typecode/_vendor/pygments/lexers/business.py @@ -5,7 +5,7 @@ Lexers for "business-oriented" languages. - :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ @@ -478,8 +478,8 @@ class OpenEdgeLexer(RegexLexer): (r'(?i)(DEFINE|DEF|DEFI|DEFIN)\b', Keyword.Declaration), (types, Keyword.Type), (keywords, Name.Builtin), - (r'"(\\\\|\\"|[^"])*"', String.Double), - (r"'(\\\\|\\'|[^'])*'", String.Single), + (r'"(\\\\|\\[^\\]|[^"\\])*"', String.Double), + (r"'(\\\\|\\[^\\]|[^'\\])*'", String.Single), (r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float), (r'[0-9]+', Number.Integer), (r'\s+', Text), @@ -500,11 +500,26 @@ class OpenEdgeLexer(RegexLexer): ], } + def analyse_text(text): + """Try to identify OpenEdge ABL based on a few common constructs.""" + result = 0 + + if 'END.' in text: + result += 0.05 + + if 'END PROCEDURE.' in text: + result += 0.05 + + if 'ELSE DO:' in text: + result += 0.05 + + return result + class GoodDataCLLexer(RegexLexer): """ Lexer for `GoodData-CL - `_ script files. diff --git a/src/typecode/_vendor/pygments/lexers/c_cpp.py b/src/typecode/_vendor/pygments/lexers/c_cpp.py index 69e09d1..681bcb2 100644 --- a/src/typecode/_vendor/pygments/lexers/c_cpp.py +++ b/src/typecode/_vendor/pygments/lexers/c_cpp.py @@ -5,7 +5,7 @@ Lexers for C/C++ languages. - :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ @@ -33,6 +33,18 @@ class CFamilyLexer(RegexLexer): #: only one /* */ style comment _ws1 = r'\s*(?:/[*].*?[*]/\s*)?' + # Hexadecimal part in an hexadecimal integer/floating-point literal. + # This includes decimal separators matching. + _hexpart = r'[0-9a-fA-F](\'?[0-9a-fA-F])*' + # Decimal part in an decimal integer/floating-point literal. + # This includes decimal separators matching. + _decpart = r'\d(\'?\d)*' + # Integer literal suffix (e.g. 'ull' or 'll'). + _intsuffix = r'(([uU][lL]{0,2})|[lL]{1,2}[uU]?)?' + + # Identifier regex with C and C++ Universal Character Name (UCN) support. + _ident = r'(?:[a-zA-Z_$]|\\u[0-9a-fA-F]{4}|\\U[0-9a-fA-F]{8})(?:[\w$]|\\u[0-9a-fA-F]{4}|\\U[0-9a-fA-F]{8})*' + tokens = { 'whitespace': [ # preprocessor directives: without whitespace @@ -52,27 +64,33 @@ class CFamilyLexer(RegexLexer): (r'/(\\\n)?[*][\w\W]*', Comment.Multiline), ], 'statements': [ - (r'(L?)(")', bygroups(String.Affix, String), 'string'), - (r"(L?)(')(\\.|\\[0-7]{1,3}|\\x[a-fA-F0-9]{1,2}|[^\\\'\n])(')", + (r'([LuU]|u8)?(")', bygroups(String.Affix, String), 'string'), + (r"([LuU]|u8)?(')(\\.|\\[0-7]{1,3}|\\x[a-fA-F0-9]{1,2}|[^\\\'\n])(')", bygroups(String.Affix, String.Char, String.Char, String.Char)), - (r'(\d+\.\d*|\.\d+|\d+)[eE][+-]?\d+[LlUu]*', Number.Float), - (r'(\d+\.\d*|\.\d+|\d+[fF])[fF]?', Number.Float), - (r'0x[0-9a-fA-F]+[LlUu]*', Number.Hex), - (r'0[0-7]+[LlUu]*', Number.Oct), - (r'\d+[LlUu]*', Number.Integer), + + # Hexadecimal floating-point literals (C11, C++17) + (r'0[xX](' + _hexpart + r'\.' + _hexpart + r'|\.' + _hexpart + r'|' + _hexpart + r')[pP][+-]?' + _hexpart + r'[lL]?', Number.Float), + + (r'(-)?(' + _decpart + r'\.' + _decpart + r'|\.' + _decpart + r'|' + _decpart + r')[eE][+-]?' + _decpart + r'[fFlL]?', Number.Float), + (r'(-)?((' + _decpart + r'\.(' + _decpart + r')?|\.' + _decpart + r')[fFlL]?)|(' + _decpart + r'[fFlL])', Number.Float), + (r'(-)?0[xX]' + _hexpart + _intsuffix, Number.Hex), + (r'(-)?0[bB][01](\'?[01])*' + _intsuffix, Number.Bin), + (r'(-)?0(\'?[0-7])+' + _intsuffix, Number.Oct), + (r'(-)?' + _decpart + _intsuffix, Number.Integer), (r'\*/', Error), (r'[~!%^&*+=|?:<>/-]', Operator), (r'[()\[\],.]', Punctuation), + (r'(struct|union)(\s+)', bygroups(Keyword, Text), 'classname'), (words(('asm', 'auto', 'break', 'case', 'const', 'continue', 'default', 'do', 'else', 'enum', 'extern', 'for', 'goto', - 'if', 'register', 'restricted', 'return', 'sizeof', - 'static', 'struct', 'switch', 'typedef', 'union', - 'volatile', 'while'), + 'if', 'register', 'restricted', 'return', 'sizeof', 'struct', + 'static', 'switch', 'typedef', 'volatile', 'while', 'union', + 'thread_local', 'alignas', 'alignof', 'static_assert', '_Pragma'), suffix=r'\b'), Keyword), (r'(bool|int|long|float|short|double|char|unsigned|signed|void)\b', Keyword.Type), (words(('inline', '_inline', '__inline', 'naked', 'restrict', - 'thread', 'typename'), suffix=r'\b'), Keyword.Reserved), + 'thread'), suffix=r'\b'), Keyword.Reserved), # Vector intrinsics (r'(__m(128i|128d|128|64))\b', Keyword.Reserved), # Microsoft-isms @@ -83,22 +101,22 @@ class CFamilyLexer(RegexLexer): 'identifier', 'forceinline', 'assume'), prefix=r'__', suffix=r'\b'), Keyword.Reserved), (r'(true|false|NULL)\b', Name.Builtin), - (r'([a-zA-Z_]\w*)(\s*)(:)(?!:)', bygroups(Name.Label, Text, Punctuation)), - (r'[a-zA-Z_]\w*', Name), + (r'(' + _ident + r')(\s*)(:)(?!:)', bygroups(Name.Label, Text, Punctuation)), + (_ident, Name) ], 'root': [ include('whitespace'), # functions - (r'((?:[\w*\s])+?(?:\s|[*]))' # return arguments - r'([a-zA-Z_]\w*)' # method name + (r'((?:' + _ident + r'(?:[&*\s])+))' # return arguments + r'(' + _ident + r')' # method name r'(\s*\([^;]*?\))' # signature r'([^;{]*)(\{)', bygroups(using(this), Name.Function, using(this), using(this), Punctuation), 'function'), # function declarations - (r'((?:[\w*\s])+?(?:\s|[*]))' # return arguments - r'([a-zA-Z_]\w*)' # method name + (r'((?:' + _ident + r'(?:[&*\s])+))' # return arguments + r'(' + _ident + r')' # method name r'(\s*\([^;]*?\))' # signature r'([^;]*)(;)', bygroups(using(this), Name.Function, using(this), using(this), @@ -108,8 +126,8 @@ class CFamilyLexer(RegexLexer): 'statement': [ include('whitespace'), include('statements'), - ('[{}]', Punctuation), - (';', Punctuation, '#pop'), + (r'\}', Punctuation), + (r'[{;]', Punctuation, '#pop'), ], 'function': [ include('whitespace'), @@ -127,8 +145,8 @@ class CFamilyLexer(RegexLexer): (r'\\', String), # stray backslash ], 'macro': [ - (r'(include)(' + _ws1 + r')([^\n]+)', - bygroups(Comment.Preproc, Text, Comment.PreprocFile)), + (r'(include)('+_ws1+r')("[^"]+")([^\n]*)', bygroups(Comment.Preproc, using(this), Comment.PreprocFile, Comment.Single)), + (r'(include)('+_ws1+r')(<[^>]+>)([^\n]*)', bygroups(Comment.Preproc, using(this), Comment.PreprocFile, Comment.Single)), (r'[^/\n]+', Comment.Preproc), (r'/[*](.|\n)*?[*]/', Comment.Multiline), (r'//.*?\n', Comment.Single, '#pop'), @@ -141,28 +159,46 @@ class CFamilyLexer(RegexLexer): (r'^\s*#el(?:se|if).*\n', Comment.Preproc, '#pop'), (r'^\s*#endif.*?(?)', Text, '#pop'), + default('#pop') ] } - stdlib_types = set(( + stdlib_types = { 'size_t', 'ssize_t', 'off_t', 'wchar_t', 'ptrdiff_t', 'sig_atomic_t', 'fpos_t', 'clock_t', 'time_t', 'va_list', 'jmp_buf', 'FILE', 'DIR', 'div_t', 'ldiv_t', - 'mbstate_t', 'wctrans_t', 'wint_t', 'wctype_t')) - c99_types = set(( - '_Bool', '_Complex', 'int8_t', 'int16_t', 'int32_t', 'int64_t', 'uint8_t', + 'mbstate_t', 'wctrans_t', 'wint_t', 'wctype_t'} + c99_types = { + 'int8_t', 'int16_t', 'int32_t', 'int64_t', 'uint8_t', 'uint16_t', 'uint32_t', 'uint64_t', 'int_least8_t', 'int_least16_t', 'int_least32_t', 'int_least64_t', 'uint_least8_t', 'uint_least16_t', 'uint_least32_t', 'uint_least64_t', 'int_fast8_t', 'int_fast16_t', 'int_fast32_t', 'int_fast64_t', 'uint_fast8_t', 'uint_fast16_t', 'uint_fast32_t', 'uint_fast64_t', - 'intptr_t', 'uintptr_t', 'intmax_t', 'uintmax_t')) - linux_types = set(( + 'intptr_t', 'uintptr_t', 'intmax_t', 'uintmax_t'} + linux_types = { 'clockid_t', 'cpu_set_t', 'cpumask_t', 'dev_t', 'gid_t', 'id_t', 'ino_t', 'key_t', 'mode_t', 'nfds_t', 'pid_t', 'rlim_t', 'sig_t', 'sighandler_t', 'siginfo_t', - 'sigset_t', 'sigval_t', 'socklen_t', 'timer_t', 'uid_t')) + 'sigset_t', 'sigval_t', 'socklen_t', 'timer_t', 'uid_t'} + c11_atomic_types = { + 'atomic_bool', 'atomic_char', 'atomic_schar', 'atomic_uchar', 'atomic_short', + 'atomic_ushort', 'atomic_int', 'atomic_uint', 'atomic_long', 'atomic_ulong', + 'atomic_llong', 'atomic_ullong', 'atomic_char16_t', 'atomic_char32_t', 'atomic_wchar_t', + 'atomic_int_least8_t', 'atomic_uint_least8_t', 'atomic_int_least16_t', + 'atomic_uint_least16_t', 'atomic_int_least32_t', 'atomic_uint_least32_t', + 'atomic_int_least64_t', 'atomic_uint_least64_t', 'atomic_int_fast8_t', + 'atomic_uint_fast8_t', 'atomic_int_fast16_t', 'atomic_uint_fast16_t', + 'atomic_int_fast32_t', 'atomic_uint_fast32_t', 'atomic_int_fast64_t', + 'atomic_uint_fast64_t', 'atomic_intptr_t', 'atomic_uintptr_t', 'atomic_size_t', + 'atomic_ptrdiff_t', 'atomic_intmax_t', 'atomic_uintmax_t'} def __init__(self, **options): self.stdlibhighlighting = get_bool_opt(options, 'stdlibhighlighting', True) self.c99highlighting = get_bool_opt(options, 'c99highlighting', True) + self.c11highlighting = get_bool_opt(options, 'c11highlighting', True) self.platformhighlighting = get_bool_opt(options, 'platformhighlighting', True) RegexLexer.__init__(self, **options) @@ -174,6 +210,8 @@ def get_tokens_unprocessed(self, text): token = Keyword.Type elif self.c99highlighting and value in self.c99_types: token = Keyword.Type + elif self.c11highlighting and value in self.c11_atomic_types: + token = Keyword.Type elif self.platformhighlighting and value in self.linux_types: token = Keyword.Type yield index, token, value @@ -182,6 +220,25 @@ def get_tokens_unprocessed(self, text): class CLexer(CFamilyLexer): """ For C source code with preprocessor directives. + + Additional options accepted: + + `stdlibhighlighting` + Highlight common types found in the C/C++ standard library (e.g. `size_t`). + (default: ``True``). + + `c99highlighting` + Highlight common types found in the C99 standard library (e.g. `int8_t`). + Actually, this includes all fixed-width integer types. + (default: ``True``). + + `c11highlighting` + Highlight atomic types found in the C11 standard library (e.g. `atomic_bool`). + (default: ``True``). + + `platformhighlighting` + Highlight common types found in the platform SDK headers (e.g. `clockid_t` on Linux). + (default: ``True``). """ name = 'C' aliases = ['c'] @@ -189,6 +246,17 @@ class CLexer(CFamilyLexer): mimetypes = ['text/x-chdr', 'text/x-csrc'] priority = 0.1 + tokens = { + 'statements': [ + (words(( + '_Alignas', '_Alignof', '_Noreturn', '_Generic', '_Thread_local', + '_Static_assert', '_Imaginary', 'noreturn', 'imaginary', 'complex'), + suffix=r'\b'), Keyword), + (words(('_Bool', '_Complex', '_Atomic'), suffix=r'\b'), Keyword.Type), + inherit + ] + } + def analyse_text(text): if re.search(r'^\s*#include [<"]', text, re.MULTILINE): return 0.1 @@ -199,6 +267,25 @@ def analyse_text(text): class CppLexer(CFamilyLexer): """ For C++ source code with preprocessor directives. + + Additional options accepted: + + `stdlibhighlighting` + Highlight common types found in the C/C++ standard library (e.g. `size_t`). + (default: ``True``). + + `c99highlighting` + Highlight common types found in the C99 standard library (e.g. `int8_t`). + Actually, this includes all fixed-width integer types. + (default: ``True``). + + `c11highlighting` + Highlight atomic types found in the C11 standard library (e.g. `atomic_bool`). + (default: ``True``). + + `platformhighlighting` + Highlight common types found in the platform SDK headers (e.g. `clockid_t` on Linux). + (default: ``True``). """ name = 'C++' aliases = ['cpp', 'c++'] @@ -210,23 +297,24 @@ class CppLexer(CFamilyLexer): tokens = { 'statements': [ + (r'(class|concept|typename)(\s+)', bygroups(Keyword, Text), 'classname'), (words(( 'catch', 'const_cast', 'delete', 'dynamic_cast', 'explicit', 'export', 'friend', 'mutable', 'namespace', 'new', 'operator', - 'private', 'protected', 'public', 'reinterpret_cast', + 'private', 'protected', 'public', 'reinterpret_cast', 'class', 'restrict', 'static_cast', 'template', 'this', 'throw', 'throws', - 'try', 'typeid', 'typename', 'using', 'virtual', - 'constexpr', 'nullptr', 'decltype', 'thread_local', - 'alignas', 'alignof', 'static_assert', 'noexcept', 'override', - 'final'), suffix=r'\b'), Keyword), - (r'char(16_t|32_t)\b', Keyword.Type), - (r'(class)(\s+)', bygroups(Keyword, Text), 'classname'), + 'try', 'typeid', 'using', 'virtual', 'constexpr', 'nullptr', 'concept', + 'decltype', 'noexcept', 'override', 'final', 'constinit', 'consteval', + 'co_await', 'co_return', 'co_yield', 'requires', 'import', 'module', + 'typename'), + suffix=r'\b'), Keyword), + (r'char(16_t|32_t|8_t)\b', Keyword.Type), + (r'(enum)(\s+)', bygroups(Keyword, Text), 'enumname'), + # C++11 raw strings - (r'(R)(")([^\\()\s]{,16})(\()((?:.|\n)*?)(\)\3)(")', + (r'((?:[LuU]|u8)?R)(")([^\\()\s]{,16})(\()((?:.|\n)*?)(\)\3)(")', bygroups(String.Affix, String, String.Delimiter, String.Delimiter, String, String.Delimiter, String)), - # C++11 UTF-8/16/32 strings - (r'(u8|u|U)(")', bygroups(String.Affix, String), 'string'), inherit, ], 'root': [ @@ -238,11 +326,15 @@ class CppLexer(CFamilyLexer): # Offload C++ extensions, http://offload.codeplay.com/ (r'__(offload|blockingoffload|outer)\b', Keyword.Pseudo), ], - 'classname': [ - (r'[a-zA-Z_]\w*', Name.Class, '#pop'), + 'enumname': [ + include('whitespace'), + # 'enum class' and 'enum struct' C++11 support + (words(('class', 'struct'), suffix=r'\b'), Keyword), + (CFamilyLexer._ident, Name.Class, '#pop'), # template specification (r'\s*(?=>)', Text, '#pop'), - ], + default('#pop') + ] } def analyse_text(text): diff --git a/src/typecode/_vendor/pygments/lexers/c_like.py b/src/typecode/_vendor/pygments/lexers/c_like.py index d5dacef..3c7bb02 100644 --- a/src/typecode/_vendor/pygments/lexers/c_like.py +++ b/src/typecode/_vendor/pygments/lexers/c_like.py @@ -5,7 +5,7 @@ Lexers for other C-like languages. - :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ @@ -172,12 +172,7 @@ class ECLexer(CLexer): (r'(class)(\s+)', bygroups(Keyword, Text), 'classname'), (r'(null|value|this)\b', Name.Builtin), inherit, - ], - 'classname': [ - (r'[a-zA-Z_]\w*', Name.Class, '#pop'), - # template specification - (r'\s*(?=>)', Text, '#pop'), - ], + ] } @@ -291,23 +286,23 @@ class CudaLexer(CLexer): aliases = ['cuda', 'cu'] mimetypes = ['text/x-cuda'] - function_qualifiers = set(('__device__', '__global__', '__host__', - '__noinline__', '__forceinline__')) - variable_qualifiers = set(('__device__', '__constant__', '__shared__', - '__restrict__')) - vector_types = set(('char1', 'uchar1', 'char2', 'uchar2', 'char3', 'uchar3', - 'char4', 'uchar4', 'short1', 'ushort1', 'short2', 'ushort2', - 'short3', 'ushort3', 'short4', 'ushort4', 'int1', 'uint1', - 'int2', 'uint2', 'int3', 'uint3', 'int4', 'uint4', 'long1', - 'ulong1', 'long2', 'ulong2', 'long3', 'ulong3', 'long4', - 'ulong4', 'longlong1', 'ulonglong1', 'longlong2', - 'ulonglong2', 'float1', 'float2', 'float3', 'float4', - 'double1', 'double2', 'dim3')) - variables = set(('gridDim', 'blockIdx', 'blockDim', 'threadIdx', 'warpSize')) - functions = set(('__threadfence_block', '__threadfence', '__threadfence_system', - '__syncthreads', '__syncthreads_count', '__syncthreads_and', - '__syncthreads_or')) - execution_confs = set(('<<<', '>>>')) + function_qualifiers = {'__device__', '__global__', '__host__', + '__noinline__', '__forceinline__'} + variable_qualifiers = {'__device__', '__constant__', '__shared__', + '__restrict__'} + vector_types = {'char1', 'uchar1', 'char2', 'uchar2', 'char3', 'uchar3', + 'char4', 'uchar4', 'short1', 'ushort1', 'short2', 'ushort2', + 'short3', 'ushort3', 'short4', 'ushort4', 'int1', 'uint1', + 'int2', 'uint2', 'int3', 'uint3', 'int4', 'uint4', 'long1', + 'ulong1', 'long2', 'ulong2', 'long3', 'ulong3', 'long4', + 'ulong4', 'longlong1', 'ulonglong1', 'longlong2', + 'ulonglong2', 'float1', 'float2', 'float3', 'float4', + 'double1', 'double2', 'dim3'} + variables = {'gridDim', 'blockIdx', 'blockDim', 'threadIdx', 'warpSize'} + functions = {'__threadfence_block', '__threadfence', '__threadfence_system', + '__syncthreads', '__syncthreads_count', '__syncthreads_and', + '__syncthreads_or'} + execution_confs = {'<<<', '>>>'} def get_tokens_unprocessed(self, text): for index, token, value in CLexer.get_tokens_unprocessed(self, text): @@ -352,7 +347,7 @@ class SwigLexer(CppLexer): } # This is a far from complete set of SWIG directives - swig_directives = set(( + swig_directives = { # Most common directives '%apply', '%define', '%director', '%enddef', '%exception', '%extend', '%feature', '%fragment', '%ignore', '%immutable', '%import', '%include', @@ -371,7 +366,7 @@ class SwigLexer(CppLexer): '%pythoncallback', '%pythoncode', '%pythondynamic', '%pythonmaybecall', '%pythonnondynamic', '%pythonprepend', '%refobject', '%shadow', '%sizeof', '%trackobjects', '%types', '%unrefobject', '%varargs', '%warn', - '%warnfilter')) + '%warnfilter'} def analyse_text(text): rv = 0 @@ -429,13 +424,13 @@ class ArduinoLexer(CppLexer): mimetypes = ['text/x-arduino'] # Language sketch main structure functions - structure = set(('setup', 'loop')) + structure = {'setup', 'loop'} # Language operators - operators = set(('not', 'or', 'and', 'xor')) + operators = {'not', 'or', 'and', 'xor'} # Language 'variables' - variables = set(( + variables = { 'DIGITAL_MESSAGE', 'FIRMATA_STRING', 'ANALOG_MESSAGE', 'REPORT_DIGITAL', 'REPORT_ANALOG', 'INPUT_PULLUP', 'SET_PIN_MODE', 'INTERNAL2V56', 'SYSTEM_RESET', 'LED_BUILTIN', 'INTERNAL1V1', 'SYSEX_START', 'INTERNAL', 'EXTERNAL', 'HIGH', @@ -452,10 +447,10 @@ class ArduinoLexer(CppLexer): 'signed', 'inline', 'delete', '_Bool', 'complex', '_Complex', '_Imaginary', 'atomic_bool', 'atomic_char', 'atomic_schar', 'atomic_uchar', 'atomic_short', 'atomic_ushort', 'atomic_int', 'atomic_uint', 'atomic_long', 'atomic_ulong', - 'atomic_llong', 'atomic_ullong', 'PROGMEM')) + 'atomic_llong', 'atomic_ullong', 'PROGMEM'} # Language shipped functions and class ( ) - functions = set(( + functions = { 'KeyboardController', 'MouseController', 'SoftwareSerial', 'EthernetServer', 'EthernetClient', 'LiquidCrystal', 'RobotControl', 'GSMVoiceCall', 'EthernetUDP', 'EsploraTFT', 'HttpClient', 'RobotMotor', 'WiFiClient', @@ -517,13 +512,13 @@ class ArduinoLexer(CppLexer): 'cos', 'sin', 'pow', 'map', 'abs', 'max', 'min', 'get', 'run', 'put', 'isAlphaNumeric', 'isAlpha', 'isAscii', 'isWhitespace', 'isControl', 'isDigit', 'isGraph', 'isLowerCase', 'isPrintable', 'isPunct', 'isSpace', 'isUpperCase', - 'isHexadecimalDigit')) + 'isHexadecimalDigit'} # do not highlight - suppress_highlight = set(( + suppress_highlight = { 'namespace', 'template', 'mutable', 'using', 'asm', 'typeid', 'typename', 'this', 'alignof', 'constexpr', 'decltype', 'noexcept', - 'static_assert', 'thread_local', 'restrict')) + 'static_assert', 'thread_local', 'restrict'} def get_tokens_unprocessed(self, text): for index, token, value in CppLexer.get_tokens_unprocessed(self, text): diff --git a/src/typecode/_vendor/pygments/lexers/capnproto.py b/src/typecode/_vendor/pygments/lexers/capnproto.py index 98adbdd..070d4ac 100644 --- a/src/typecode/_vendor/pygments/lexers/capnproto.py +++ b/src/typecode/_vendor/pygments/lexers/capnproto.py @@ -5,7 +5,7 @@ Lexers for the Cap'n Proto schema language. - :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ diff --git a/src/typecode/_vendor/pygments/lexers/chapel.py b/src/typecode/_vendor/pygments/lexers/chapel.py index a764e5b..4482414 100644 --- a/src/typecode/_vendor/pygments/lexers/chapel.py +++ b/src/typecode/_vendor/pygments/lexers/chapel.py @@ -5,7 +5,7 @@ Lexer for the Chapel language. - :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ @@ -18,7 +18,7 @@ class ChapelLexer(RegexLexer): """ - For `Chapel `_ source. + For `Chapel `_ source. .. versionadded:: 2.0 """ @@ -38,24 +38,24 @@ class ChapelLexer(RegexLexer): (r'(config|const|in|inout|out|param|ref|type|var)\b', Keyword.Declaration), - (r'(false|nil|true)\b', Keyword.Constant), - (r'(bool|complex|imag|int|opaque|range|real|string|uint)\b', + (r'(false|nil|none|true)\b', Keyword.Constant), + (r'(bool|bytes|complex|imag|int|nothing|opaque|range|real|string|uint|void)\b', Keyword.Type), (words(( 'align', 'as', 'atomic', 'begin', 'borrowed', 'break', 'by', 'catch', 'cobegin', 'coforall', 'continue', - 'delete', 'dmapped', 'do', 'domain', + 'defer', 'delete', 'dmapped', 'do', 'domain', 'else', 'enum', 'except', 'export', 'extern', - 'for', 'forall', - 'if', 'index', 'inline', - 'label', 'lambda', 'let', 'local', + 'for', 'forall', 'forwarding', + 'if', 'import', 'index', 'init', 'inline', + 'label', 'lambda', 'let', 'lifetime', 'local', 'locale' 'new', 'noinit', 'on', 'only', 'otherwise', 'override', 'owned', 'pragma', 'private', 'prototype', 'public', 'reduce', 'require', 'return', 'scan', 'select', 'serial', 'shared', 'single', 'sparse', 'subdomain', 'sync', - 'then', 'throw', 'throws', 'try', + 'then', 'this', 'throw', 'throws', 'try', 'unmanaged', 'use', 'when', 'where', 'while', 'with', 'yield', @@ -88,8 +88,8 @@ class ChapelLexer(RegexLexer): (r'[0-9]+', Number.Integer), # strings - (r'"(\\\\|\\"|[^"])*"', String), - (r"'(\\\\|\\'|[^'])*'", String), + (r'"(\\\\|\\[^\\]|[^"\\])*"', String.Double), + (r"'(\\\\|\\[^\\]|[^'\\])*'", String.Single), # tokens (r'(=|\+=|-=|\*=|/=|\*\*=|%=|&=|\|=|\^=|&&=|\|\|=|<<=|>>=|' diff --git a/src/typecode/_vendor/pygments/lexers/clean.py b/src/typecode/_vendor/pygments/lexers/clean.py index 4e1b4f3..99fbe0f 100644 --- a/src/typecode/_vendor/pygments/lexers/clean.py +++ b/src/typecode/_vendor/pygments/lexers/clean.py @@ -5,11 +5,11 @@ Lexer for the Clean language. - :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ -from typecode._vendor.pygments.lexer import ExtendedRegexLexer, words, include, bygroups +from typecode._vendor.pygments.lexer import ExtendedRegexLexer, words, default, include, bygroups from typecode._vendor.pygments.token import Comment, Error, Keyword, Literal, Name, Number, \ Operator, Punctuation, String, Whitespace @@ -35,9 +35,9 @@ class CleanLexer(ExtendedRegexLexer): modulewords = ('implementation', 'definition', 'system') - lowerId = r'[a-z`][\w\d`]*' - upperId = r'[A-Z`][\w\d`]*' - funnyId = r'[~@#\$%\^?!+\-*<>\\/|&=:]+' + lowerId = r'[a-z`][\w`]*' + upperId = r'[A-Z`][\w`]*' + funnyId = r'[~@#$%\^?!+\-*<>\\/|&=:]+' scoreUpperId = r'_' + upperId scoreLowerId = r'_' + lowerId moduleId = r'[a-zA-Z_][a-zA-Z0-9_.`]+' @@ -92,7 +92,8 @@ class CleanLexer(ExtendedRegexLexer): (r'(\s*)\b(as)\b', bygroups(Whitespace, Keyword), ('#pop', 'import.module.as')), (moduleId, Name.Class), (r'(\s*)(,)(\s*)', bygroups(Whitespace, Punctuation, Whitespace)), - (r'\s*', Whitespace, '#pop'), + (r'\s+', Whitespace), + default('#pop'), ], 'import.module.as': [ include('whitespace'), @@ -160,7 +161,7 @@ class CleanLexer(ExtendedRegexLexer): (r'[$\n]', Error, '#pop'), ], 'operators': [ - (r'[-~@#\$%\^?!+*<>\\/|&=:\.]+', Operator), + (r'[-~@#$%\^?!+*<>\\/|&=:.]+', Operator), (r'\b_+\b', Operator), ], 'delimiters': [ diff --git a/src/typecode/_vendor/pygments/lexers/compiled.py b/src/typecode/_vendor/pygments/lexers/compiled.py index 2b984d0..996d5e4 100644 --- a/src/typecode/_vendor/pygments/lexers/compiled.py +++ b/src/typecode/_vendor/pygments/lexers/compiled.py @@ -5,7 +5,7 @@ Just export lexer classes previously contained in this module. - :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ diff --git a/src/typecode/_vendor/pygments/lexers/configs.py b/src/typecode/_vendor/pygments/lexers/configs.py index 883ce24..c17c7a2 100644 --- a/src/typecode/_vendor/pygments/lexers/configs.py +++ b/src/typecode/_vendor/pygments/lexers/configs.py @@ -5,7 +5,7 @@ Lexers for configuration file formats. - :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ @@ -13,7 +13,7 @@ from typecode._vendor.pygments.lexer import RegexLexer, default, words, bygroups, include, using from typecode._vendor.pygments.token import Text, Comment, Operator, Keyword, Name, String, \ - Number, Punctuation, Whitespace, Literal + Number, Punctuation, Whitespace, Literal, Generic from typecode._vendor.pygments.lexers.shell import BashLexer from typecode._vendor.pygments.lexers.data import JsonLexer @@ -21,7 +21,8 @@ 'Cfengine3Lexer', 'ApacheConfLexer', 'SquidConfLexer', 'NginxConfLexer', 'LighttpdConfLexer', 'DockerLexer', 'TerraformLexer', 'TermcapLexer', 'TerminfoLexer', - 'PkgConfigLexer', 'PacmanConfLexer', 'AugeasLexer', 'TOMLLexer'] + 'PkgConfigLexer', 'PacmanConfLexer', 'AugeasLexer', 'TOMLLexer', + 'SingularityLexer'] class IniLexer(RegexLexer): @@ -39,7 +40,7 @@ class IniLexer(RegexLexer): (r'\s+', Text), (r'[;#].*', Comment.Single), (r'\[.*?\]$', Keyword), - (r'(.*?)([ \t]*)(=)([ \t]*)(.*(?:\n[ \t].+)*)', + (r'(.*?)([ \t]*)(=)([ \t]*)([^\t\n]*)', bygroups(Name.Attribute, Text, Operator, Text, String)), # standalone option, supported by some INI parsers (r'(.+?)$', Name.Attribute), @@ -155,7 +156,7 @@ class KconfigLexer(RegexLexer): name = 'Kconfig' aliases = ['kconfig', 'menuconfig', 'linux-config', 'kernel-config'] # Adjust this if new kconfig file names appear in your environment - filenames = ['Kconfig', '*Config.in*', 'external.in*', + filenames = ['Kconfig*', '*Config.in*', 'external.in*', 'standard-modules.in'] mimetypes = ['text/x-kconfig'] # No re.MULTILINE, indentation-aware help text needs line-by-line handling @@ -300,11 +301,12 @@ class ApacheConfLexer(RegexLexer): tokens = { 'root': [ (r'\s+', Text), - (r'(#.*?)$', Comment), - (r'(<[^\s>]+)(?:(\s+)(.*))?(>)', + (r'#(.*\\\n)+.*$|(#.*?)$', Comment), + (r'(<[^\s>/][^\s>]*)(?:(\s+)(.*))?(>)', bygroups(Name.Tag, Text, String, Name.Tag)), - (r'([a-z]\w*)(\s+)', - bygroups(Name.Builtin, Text), 'value'), + (r'(]+)(>)', + bygroups(Name.Tag, Name.Tag)), + (r'[a-z]\w*', Name.Builtin, 'value'), (r'\.+', Text), ], 'value': [ @@ -314,12 +316,12 @@ class ApacheConfLexer(RegexLexer): (r'[^\S\n]+', Text), (r'\d+\.\d+\.\d+\.\d+(?:/\d+)?', Number), (r'\d+', Number), - (r'/([a-z0-9][\w./-]+)', String.Other), + (r'/([*a-z0-9][*\w./-]+)', String.Other), (r'(on|off|none|any|all|double|email|dns|min|minimal|' r'os|productonly|full|emerg|alert|crit|error|warn|' r'notice|info|debug|registry|script|inetd|standalone|' r'user|group)\b', Keyword), - (r'"([^"\\]*(?:\\.[^"\\]*)*)"', String.Double), + (r'"([^"\\]*(?:\\(.|\n)[^"\\]*)*)"', String.Double), (r'[^\s"\\]+', Text) ], } @@ -540,14 +542,16 @@ class DockerLexer(RegexLexer): filenames = ['Dockerfile', '*.docker'] mimetypes = ['text/x-dockerfile-config'] - _keywords = (r'(?:FROM|MAINTAINER|EXPOSE|WORKDIR|USER|STOPSIGNAL)') + _keywords = (r'(?:MAINTAINER|EXPOSE|WORKDIR|USER|STOPSIGNAL)') _bash_keywords = (r'(?:RUN|CMD|ENTRYPOINT|ENV|ARG|LABEL|ADD|COPY)') - _lb = r'(?:\s*\\?\s*)' # dockerfile line break regex + _lb = r'(?:\s*\\?\s*)' # dockerfile line break regex flags = re.IGNORECASE | re.MULTILINE tokens = { 'root': [ (r'#.*', Comment), + (r'(FROM)([ \t]*)(\S*)([ \t]*)(?:(AS)([ \t]*)(\S*))?', + bygroups(Keyword, Text, String, Text, Keyword, Text, String)), (r'(ONBUILD)(%s)' % (_lb,), bygroups(Keyword, using(BashLexer))), (r'(HEALTHCHECK)((%s--\w+=\w+%s)*)' % (_lb, _lb), bygroups(Keyword, using(BashLexer))), @@ -574,31 +578,35 @@ class TerraformLexer(RegexLexer): filenames = ['*.tf'] mimetypes = ['application/x-tf', 'application/x-terraform'] - embedded_keywords = ('ingress', 'egress', 'listener', 'default', 'connection', 'alias', 'tags', 'lifecycle', 'timeouts') + embedded_keywords = ('ingress', 'egress', 'listener', 'default', + 'connection', 'alias', 'terraform', 'tags', 'vars', + 'config', 'lifecycle', 'timeouts') tokens = { 'root': [ - include('string'), - include('punctuation'), - include('curly'), - include('basic'), - include('whitespace'), - (r'[0-9]+', Number), + include('string'), + include('punctuation'), + include('curly'), + include('basic'), + include('whitespace'), + (r'[0-9]+', Number), ], 'basic': [ - (words(('true', 'false'), prefix=r'\b', suffix=r'\b'), Keyword.Type), - (r'\s*/\*', Comment.Multiline, 'comment'), - (r'\s*#.*\n', Comment.Single), - (r'(.*?)(\s*)(=)', bygroups(Name.Attribute, Text, Operator)), - (words(('variable', 'resource', 'provider', 'provisioner', 'module'), - prefix=r'\b', suffix=r'\b'), Keyword.Reserved, 'function'), - (words(embedded_keywords, prefix=r'\b', suffix=r'\b'), Keyword.Declaration), - (r'\$\{', String.Interpol, 'var_builtin'), + (words(('true', 'false'), prefix=r'\b', suffix=r'\b'), Keyword.Type), + (r'\s*/\*', Comment.Multiline, 'comment'), + (r'\s*#.*\n', Comment.Single), + (r'(.*?)(\s*)(=)', bygroups(Name.Attribute, Text, Operator)), + (words(('variable', 'resource', 'provider', 'provisioner', 'module', + 'backend', 'data', 'output'), prefix=r'\b', suffix=r'\b'), + Keyword.Reserved, 'function'), + (words(embedded_keywords, prefix=r'\b', suffix=r'\b'), + Keyword.Declaration), + (r'\$\{', String.Interpol, 'var_builtin'), ], 'function': [ - (r'(\s+)(".*")(\s+)', bygroups(Text, String, Text)), - include('punctuation'), - include('curly'), + (r'(\s+)(".*")(\s+)', bygroups(Text, String, Text)), + include('punctuation'), + include('curly'), ], 'var_builtin': [ (r'\$\{', String.Interpol, '#push'), @@ -894,7 +902,7 @@ class TOMLLexer(RegexLexer): name = 'TOML' aliases = ['toml'] - filenames = ['*.toml'] + filenames = ['*.toml', 'Pipfile', 'poetry.lock'] tokens = { 'root': [ @@ -903,7 +911,7 @@ class TOMLLexer(RegexLexer): (r'\s+', Text), (r'#.*?$', Comment.Single), # Basic string - (r'"(\\\\|\\"|[^"])*"', String), + (r'"(\\\\|\\[^\\]|[^"\\])*"', String), # Literal string (r'\'\'\'(.*)\'\'\'', String), (r'\'[^\']*\'', String), @@ -932,3 +940,47 @@ class TOMLLexer(RegexLexer): ] } + + +class SingularityLexer(RegexLexer): + """ + Lexer for `Singularity definition files + `_. + + .. versionadded:: 2.6 + """ + + name = 'Singularity' + aliases = ['singularity'] + filenames = ['*.def', 'Singularity'] + flags = re.IGNORECASE | re.MULTILINE | re.DOTALL + + _headers = r'^(\s*)(bootstrap|from|osversion|mirrorurl|include|registry|namespace|includecmd)(:)' + _section = r'^%(?:pre|post|setup|environment|help|labels|test|runscript|files|startscript)\b' + _appsect = r'^%app(?:install|help|run|labels|env|test|files)\b' + + tokens = { + 'root': [ + (_section, Generic.Heading, 'script'), + (_appsect, Generic.Heading, 'script'), + (_headers, bygroups(Text, Keyword, Text)), + (r'\s*#.*?\n', Comment), + (r'\b(([0-9]+\.?[0-9]*)|(\.[0-9]+))\b', Number), + (r'(?!^\s*%).', Text), + ], + 'script': [ + (r'(.+?(?=^\s*%))|(.*)', using(BashLexer), '#pop'), + ], + } + + def analyse_text(text): + """This is a quite simple script file, but there are a few keywords + which seem unique to this language.""" + result = 0 + if re.search(r'\b(?:osversion|includecmd|mirrorurl)\b', text, re.IGNORECASE): + result += 0.5 + + if re.search(SingularityLexer._section[1:], text): + result += 0.49 + + return result diff --git a/src/typecode/_vendor/pygments/lexers/console.py b/src/typecode/_vendor/pygments/lexers/console.py index d93e522..e8c2011 100644 --- a/src/typecode/_vendor/pygments/lexers/console.py +++ b/src/typecode/_vendor/pygments/lexers/console.py @@ -5,7 +5,7 @@ Lexers for misc console output. - :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ diff --git a/src/typecode/_vendor/pygments/lexers/crystal.py b/src/typecode/_vendor/pygments/lexers/crystal.py index 33ad7bb..4ca0edf 100644 --- a/src/typecode/_vendor/pygments/lexers/crystal.py +++ b/src/typecode/_vendor/pygments/lexers/crystal.py @@ -5,7 +5,7 @@ Lexer for Crystal. - :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ @@ -57,9 +57,11 @@ def heredoc_callback(self, match, ctx): ctx.pos = match.start(5) ctx.end = match.end(5) - # this may find other heredocs - for i, t, v in self.get_tokens_unprocessed(context=ctx): - yield i, t, v + # this may find other heredocs, so limit the recursion depth + if len(heredocstack) < 100: + yield from self.get_tokens_unprocessed(context=ctx) + else: + yield ctx.pos, String.Heredoc, match.group(5) ctx.pos = match.end() if outermost: @@ -87,27 +89,11 @@ def heredoc_callback(self, match, ctx): del heredocstack[:] def gen_crystalstrings_rules(): - def intp_regex_callback(self, match, ctx): - yield match.start(1), String.Regex, match.group(1) # begin - nctx = LexerContext(match.group(3), 0, ['interpolated-regex']) - for i, t, v in self.get_tokens_unprocessed(context=nctx): - yield match.start(3)+i, t, v - yield match.start(4), String.Regex, match.group(4) # end[imsx]* - ctx.pos = match.end() - - def intp_string_callback(self, match, ctx): - yield match.start(1), String.Other, match.group(1) - nctx = LexerContext(match.group(3), 0, ['interpolated-string']) - for i, t, v in self.get_tokens_unprocessed(context=nctx): - yield match.start(3)+i, t, v - yield match.start(4), String.Other, match.group(4) # end - ctx.pos = match.end() - states = {} states['strings'] = [ - (r'\:@{0,2}[a-zA-Z_]\w*[!?]?', String.Symbol), - (words(CRYSTAL_OPERATORS, prefix=r'\:@{0,2}'), String.Symbol), - (r":'(\\\\|\\'|[^'])*'", String.Symbol), + (r'\:\w+[!?]?', String.Symbol), + (words(CRYSTAL_OPERATORS, prefix=r'\:'), String.Symbol), + (r":'(\\\\|\\[^\\]|[^'\\])*'", String.Symbol), # This allows arbitrary text after '\ for simplicity (r"'(\\\\|\\'|[^']|\\[^'\\]+)'", String.Char), (r':"', String.Symbol, 'simple-sym'), @@ -128,35 +114,42 @@ def intp_string_callback(self, match, ctx): (end, ttype, '#pop'), ] - # braced quoted strings + # https://crystal-lang.org/docs/syntax_and_semantics/literals/string.html#percent-string-literals for lbrace, rbrace, bracecc, name in \ ('\\{', '\\}', '{}', 'cb'), \ ('\\[', '\\]', '\\[\\]', 'sb'), \ ('\\(', '\\)', '()', 'pa'), \ - ('<', '>', '<>', 'ab'): + ('<', '>', '<>', 'ab'), \ + ('\\|', '\\|', '\\|', 'pi'): states[name+'-intp-string'] = [ - (r'\\[' + lbrace + ']', String.Other), + (r'\\' + lbrace, String.Other), + ] + (lbrace != rbrace) * [ (lbrace, String.Other, '#push'), + ] + [ (rbrace, String.Other, '#pop'), include('string-intp-escaped'), (r'[\\#' + bracecc + ']', String.Other), (r'[^\\#' + bracecc + ']+', String.Other), ] - states['strings'].append((r'%' + lbrace, String.Other, + states['strings'].append((r'%Q?' + lbrace, String.Other, name+'-intp-string')) states[name+'-string'] = [ (r'\\[\\' + bracecc + ']', String.Other), + ] + (lbrace != rbrace) * [ (lbrace, String.Other, '#push'), + ] + [ (rbrace, String.Other, '#pop'), (r'[\\#' + bracecc + ']', String.Other), (r'[^\\#' + bracecc + ']+', String.Other), ] - # http://crystal-lang.org/docs/syntax_and_semantics/literals/array.html - states['strings'].append((r'%[wi]' + lbrace, String.Other, + # https://crystal-lang.org/docs/syntax_and_semantics/literals/array.html#percent-array-literals + states['strings'].append((r'%[qwi]' + lbrace, String.Other, name+'-string')) states[name+'-regex'] = [ (r'\\[\\' + bracecc + ']', String.Regex), + ] + (lbrace != rbrace) * [ (lbrace, String.Regex, '#push'), + ] + [ (rbrace + '[imsx]*', String.Regex, '#pop'), include('string-intp'), (r'[\\#' + bracecc + ']', String.Regex), @@ -165,27 +158,6 @@ def intp_string_callback(self, match, ctx): states['strings'].append((r'%r' + lbrace, String.Regex, name+'-regex')) - # these must come after %! - states['strings'] += [ - # %r regex - (r'(%r([\W_]))((?:\\\2|(?!\2).)*)(\2[imsx]*)', - intp_regex_callback), - # regular fancy strings with qsw - (r'(%[wi]([\W_]))((?:\\\2|(?!\2).)*)(\2)', - intp_string_callback), - # special forms of fancy strings after operators or - # in method calls with braces - (r'(?<=[-+/*%=<>&!^|~,(])(\s*)(%([\t ])(?:(?:\\\3|(?!\3).)*)\3)', - bygroups(Text, String.Other, None)), - # and because of fixed width lookbehinds the whole thing a - # second time for line startings... - (r'^(\s*)(%([\t ])(?:(?:\\\3|(?!\3).)*)\3)', - bygroups(Text, String.Other, None)), - # all regular fancy strings without qsw - (r'(%([\[{(<]))((?:\\\2|(?!\2).)*)(\2)', - intp_string_callback), - ] - return states tokens = { @@ -193,10 +165,16 @@ def intp_string_callback(self, match, ctx): (r'#.*?$', Comment.Single), # keywords (words(''' - abstract asm as begin break case do else elsif end ensure extend ifdef if - include instance_sizeof next of pointerof private protected rescue return - require sizeof super then typeof unless until when while with yield + abstract asm begin break case do else elsif end ensure extend if in + include next of private protected require rescue return select self super + then unless until when while with yield '''.split(), suffix=r'\b'), Keyword), + (words(''' + previous_def forall out uninitialized __DIR__ __FILE__ __LINE__ + __END_LINE__ + '''.split(), prefix=r'(?=])', Keyword, 'funcname'), - (r'(class|struct|union|type|alias|enum)(\s+)((?:[a-zA-Z_]\w*::)*)', + (r'(annotation|class|struct|union|type|alias|enum)(\s+)((?:[a-zA-Z_]\w*::)*)', bygroups(Keyword, Text, Name.Namespace), 'classname'), - (r'(self|out|uninitialized)\b|(is_a|responds_to)\?', Keyword.Pseudo), - # macros + # https://crystal-lang.org/api/toplevel.html (words(''' - debugger record pp assert_responds_to spawn parallel - getter setter property delegate def_hash def_equals def_equals_and_hash - forward_missing_to - '''.split(), suffix=r'\b'), Name.Builtin.Pseudo), - (r'getter[!?]|property[!?]|__(DIR|FILE|LINE)__\b', Name.Builtin.Pseudo), + instance_sizeof offsetof pointerof sizeof typeof + '''.split(), prefix=r'(?`_ scores. + For `Csound `_ scores. .. versionadded:: 2.1 """ @@ -137,14 +149,14 @@ class CsoundScoreLexer(CsoundLexer): include('whitespace and macro uses'), include('preprocessor directives'), - (r'[abCdefiqstvxy]', Keyword), + (r'[aBbCdefiqstvxy]', Keyword), # There is also a w statement that is generated internally and should not be # used; see https://github.com/csound/csound/issues/750. (r'z', Keyword.Constant), # z is a constant equal to 800,000,000,000. 800 billion seconds is about # 25,367.8 years. See also - # https://csound.github.io/docs/manual/ScoreTop.html and + # https://csound.com/docs/manual/ScoreTop.html and # https://github.com/csound/csound/search?q=stof+path%3AEngine+filename%3Asread.c. (r'([nNpP][pP])(\d+)', bygroups(Keyword, Number.Integer)), @@ -164,13 +176,6 @@ class CsoundScoreLexer(CsoundLexer): (r'\n', Text, '#pop') ], - 'quoted string': [ - (r'"', String, '#pop'), - (r'[^"$]+', String), - include('macro uses'), - (r'[$]', String) - ], - 'loop after left brace': [ include('whitespace and macro uses'), (r'\d+', Number.Integer, ('#pop', 'loop after repeat count')), @@ -184,8 +189,8 @@ class CsoundScoreLexer(CsoundLexer): include('root') ], - # Braced strings are not allowed in Csound scores, but this is needed - # because the superclass includes it. + # Braced strings are not allowed in Csound scores, but this is needed because the + # superclass includes it. 'braced string': [ (r'\}\}', String, '#pop'), (r'[^}]|\}(?!\})', String) @@ -195,7 +200,7 @@ class CsoundScoreLexer(CsoundLexer): class CsoundOrchestraLexer(CsoundLexer): """ - For `Csound `_ orchestras. + For `Csound `_ orchestras. .. versionadded:: 2.1 """ @@ -212,34 +217,31 @@ def opcode_name_callback(lexer, match): yield match.start(), Name.Function, opcode def name_callback(lexer, match): + type_annotation_token = Keyword.Type + name = match.group(1) if name in OPCODES or name in DEPRECATED_OPCODES: yield match.start(), Name.Builtin, name - if match.group(2): - yield match.start(2), Punctuation, match.group(2) - yield match.start(3), Keyword.Type, match.group(3) elif name in lexer.user_defined_opcodes: yield match.start(), Name.Function, name else: - nameMatch = re.search(r'^(g?[afikSw])(\w+)', name) - if nameMatch: - yield nameMatch.start(1), Keyword.Type, nameMatch.group(1) - yield nameMatch.start(2), Name, nameMatch.group(2) + type_annotation_token = Name + name_match = re.search(r'^(g?[afikSw])(\w+)', name) + if name_match: + yield name_match.start(1), Keyword.Type, name_match.group(1) + yield name_match.start(2), Name, name_match.group(2) else: yield match.start(), Name, name - # If there's a trailing :V, for example, we want to keep this around - # and emit it as well, otherwise this lexer will not pass round-trip - # testing - if match.group(2): - yield match.start(2), Punctuation, match.group(2) - yield match.start(3), Name, match.group(3) + if match.group(2): + yield match.start(2), Punctuation, match.group(2) + yield match.start(3), type_annotation_token, match.group(3) tokens = { 'root': [ (r'\n', Text), - (r'^([ \t]*)(\w+)(:)(?:[ \t]+|$)', bygroups(Text, Name.Label, Punctuation)), + (r'^([ \t]*)(\w+)(:)([ \t]+|$)', bygroups(Text, Name.Label, Punctuation, Text)), include('whitespace and macro uses'), include('preprocessor directives'), @@ -328,22 +330,24 @@ def name_callback(lexer, match): (r'\\(?:[\\abnrt"]|[0-7]{1,3})', String.Escape) ], # Format specifiers are highlighted in all strings, even though only - # fprintks https://csound.github.io/docs/manual/fprintks.html - # fprints https://csound.github.io/docs/manual/fprints.html - # printf/printf_i https://csound.github.io/docs/manual/printf.html - # printks https://csound.github.io/docs/manual/printks.html - # prints https://csound.github.io/docs/manual/prints.html - # sprintf https://csound.github.io/docs/manual/sprintf.html - # sprintfk https://csound.github.io/docs/manual/sprintfk.html - # work with strings that contain format specifiers. In addition, these - # opcodes’ handling of format specifiers is inconsistent: - # - fprintks, fprints, printks, and prints do accept %a and %A - # specifiers, but can’t accept %s specifiers. - # - printf, printf_i, sprintf, and sprintfk don’t accept %a and %A - # specifiers, but can accept %s specifiers. + # fprintks https://csound.com/docs/manual/fprintks.html + # fprints https://csound.com/docs/manual/fprints.html + # printf/printf_i https://csound.com/docs/manual/printf.html + # printks https://csound.com/docs/manual/printks.html + # prints https://csound.com/docs/manual/prints.html + # sprintf https://csound.com/docs/manual/sprintf.html + # sprintfk https://csound.com/docs/manual/sprintfk.html + # work with strings that contain format specifiers. In addition, these opcodes’ + # handling of format specifiers is inconsistent: + # - fprintks and fprints accept %a and %A specifiers, and accept %s specifiers + # starting in Csound 6.15.0. + # - printks and prints accept %a and %A specifiers, but don’t accept %s + # specifiers. + # - printf, printf_i, sprintf, and sprintfk don’t accept %a and %A specifiers, + # but accept %s specifiers. # See https://github.com/csound/csound/issues/747 for more information. 'format specifiers': [ - (r'%[#0\- +]*\d*(?:\.\d+)?[diuoxXfFeEgGaAcs]', String.Interpol), + (r'%[#0\- +]*\d*(?:\.\d+)?[AE-GXac-giosux]', String.Interpol), (r'%%', String.Escape) ], @@ -371,6 +375,7 @@ def name_callback(lexer, match): 'Csound score opcode': [ include('whitespace and macro uses'), + (r'"', String, 'quoted string'), (r'\{\{', String, 'Csound score'), (r'\n', Text, '#pop') ], @@ -381,6 +386,7 @@ def name_callback(lexer, match): 'Python opcode': [ include('whitespace and macro uses'), + (r'"', String, 'quoted string'), (r'\{\{', String, 'Python'), (r'\n', Text, '#pop') ], @@ -391,6 +397,7 @@ def name_callback(lexer, match): 'Lua opcode': [ include('whitespace and macro uses'), + (r'"', String, 'quoted string'), (r'\{\{', String, 'Lua'), (r'\n', Text, '#pop') ], @@ -403,7 +410,7 @@ def name_callback(lexer, match): class CsoundDocumentLexer(RegexLexer): """ - For `Csound `_ documents. + For `Csound `_ documents. .. versionadded:: 2.1 """ diff --git a/src/typecode/_vendor/pygments/lexers/css.py b/src/typecode/_vendor/pygments/lexers/css.py index 0101ef9..63fe0a5 100644 --- a/src/typecode/_vendor/pygments/lexers/css.py +++ b/src/typecode/_vendor/pygments/lexers/css.py @@ -5,7 +5,7 @@ Lexers for CSS and related stylesheet formats. - :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ @@ -16,7 +16,6 @@ default, words, inherit from typecode._vendor.pygments.token import Text, Comment, Operator, Keyword, Name, String, \ Number, Punctuation -from typecode._vendor.pygments.util import iteritems __all__ = ['CssLexer', 'SassLexer', 'ScssLexer', 'LessCssLexer'] @@ -290,8 +289,8 @@ class CssLexer(RegexLexer): (r'(@)([\w-]+)', bygroups(Punctuation, Keyword), 'atrule'), (r'[\w-]+', Name.Tag), (r'[~^*!%&$\[\]()<>|+=@:;,./?-]', Operator), - (r'"(\\\\|\\"|[^"])*"', String.Double), - (r"'(\\\\|\\'|[^'])*'", String.Single) + (r'"(\\\\|\\[^\\]|[^"\\])*"', String.Double), + (r"'(\\\\|\\[^\\]|[^'\\])*'", String.Single), ], 'atrule': [ (r'\{', Punctuation, 'atcontent'), @@ -311,6 +310,8 @@ class CssLexer(RegexLexer): (words(_vendor_prefixes,), Keyword.Pseudo), (r'('+r'|'.join(_css_properties)+r')(\s*)(\:)', bygroups(Keyword, Text, Punctuation), 'value-start'), + (r'([-]+[a-zA-Z_][\w-]*)(\s*)(\:)', bygroups(Name.Variable, Text, Punctuation), + 'value-start'), (r'([a-zA-Z_][\w-]*)(\s*)(\:)', bygroups(Name, Text, Punctuation), 'value-start'), @@ -336,14 +337,15 @@ class CssLexer(RegexLexer): (r'[~^*!%&<>|+=@:./?-]+', Operator), (r'[\[\](),]+', Punctuation), - (r'"(\\\\|\\"|[^"])*"', String.Double), - (r"'(\\\\|\\'|[^'])*'", String.Single), + (r'"(\\\\|\\[^\\]|[^"\\])*"', String.Double), + (r"'(\\\\|\\[^\\]|[^'\\])*'", String.Single), (r'[a-zA-Z_][\w-]*', Name), (r';', Punctuation, '#pop'), (r'\}', Punctuation, '#pop:2'), ], 'function-start': [ (r'\s+', Text), + (r'[-]+([\w+]+[-]*)+', Name.Variable), include('urls'), (words(_vendor_prefixes,), Keyword.Pseudo), (words(_keyword_values, suffix=r'\b'), Keyword.Constant), @@ -359,9 +361,9 @@ class CssLexer(RegexLexer): (r'/\*(?:.|\n)*?\*/', Comment), include('numeric-values'), (r'[*+/-]', Operator), - (r'[,]', Punctuation), - (r'"(\\\\|\\"|[^"])*"', String.Double), - (r"'(\\\\|\\'|[^'])*'", String.Single), + (r',', Punctuation), + (r'"(\\\\|\\[^\\]|[^"\\])*"', String.Double), + (r"'(\\\\|\\[^\\]|[^'\\])*'", String.Single), (r'[a-zA-Z_-]\w*', Name), (r'\)', Punctuation, '#pop'), ], @@ -397,7 +399,7 @@ class CssLexer(RegexLexer): 'behind', 'below', 'bidi-override', 'blink', 'block', 'bold', 'bolder', 'both', 'capitalize', 'center-left', 'center-right', 'center', 'circle', 'cjk-ideographic', 'close-quote', 'collapse', 'condensed', 'continuous', - 'crop', 'crosshair', 'cross', 'cursive', 'dashed', 'decimal-leading-zero', + 'crosshair', 'cross', 'cursive', 'dashed', 'decimal-leading-zero', 'decimal', 'default', 'digits', 'disc', 'dotted', 'double', 'e-resize', 'embed', 'extra-condensed', 'extra-expanded', 'expanded', 'fantasy', 'far-left', 'far-right', 'faster', 'fast', 'fixed', 'georgian', 'groove', 'hebrew', 'help', @@ -612,7 +614,7 @@ class SassLexer(ExtendedRegexLexer): (r"\*/", Comment, '#pop'), ], } - for group, common in iteritems(common_sass_tokens): + for group, common in common_sass_tokens.items(): tokens[group] = copy.copy(common) tokens['value'].append((r'\n', Text, 'root')) tokens['selector'].append((r'\n', Text, 'root')) @@ -662,7 +664,7 @@ class ScssLexer(RegexLexer): (r"\*/", Comment, '#pop'), ], } - for group, common in iteritems(common_sass_tokens): + for group, common in common_sass_tokens.items(): tokens[group] = copy.copy(common) tokens['value'].extend([(r'\n', Text), (r'[;{}]', Punctuation, '#pop')]) tokens['selector'].extend([(r'\n', Text), (r'[;{}]', Punctuation, '#pop')]) diff --git a/src/typecode/_vendor/pygments/lexers/d.py b/src/typecode/_vendor/pygments/lexers/d.py index 4ca3d4e..003e7a5 100644 --- a/src/typecode/_vendor/pygments/lexers/d.py +++ b/src/typecode/_vendor/pygments/lexers/d.py @@ -5,7 +5,7 @@ Lexers for D languages. - :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ @@ -47,11 +47,16 @@ class DLexer(RegexLexer): 'module', 'new', 'nothrow', 'out', 'override', 'package', 'pragma', 'private', 'protected', 'public', 'pure', 'ref', 'return', 'scope', 'shared', 'static', 'struct', 'super', 'switch', 'synchronized', - 'template', 'this', 'throw', 'try', 'typedef', 'typeid', 'typeof', + 'template', 'this', 'throw', 'try', 'typeid', 'typeof', 'union', 'unittest', 'version', 'volatile', 'while', 'with', '__gshared', '__traits', '__vector', '__parameters'), suffix=r'\b'), Keyword), + (words(( + # Removed in 2.072 + 'typedef', ), + suffix=r'\b'), + Keyword.Removed), (words(( 'bool', 'byte', 'cdouble', 'cent', 'cfloat', 'char', 'creal', 'dchar', 'double', 'float', 'idouble', 'ifloat', 'int', 'ireal', @@ -60,9 +65,9 @@ class DLexer(RegexLexer): Keyword.Type), (r'(false|true|null)\b', Keyword.Constant), (words(( - '__FILE__', '__MODULE__', '__LINE__', '__FUNCTION__', '__PRETTY_FUNCTION__' - '', '__DATE__', '__EOF__', '__TIME__', '__TIMESTAMP__', '__VENDOR__', - '__VERSION__'), suffix=r'\b'), + '__FILE__', '__FILE_FULL_PATH__', '__MODULE__', '__LINE__', '__FUNCTION__', + '__PRETTY_FUNCTION__', '__DATE__', '__EOF__', '__TIME__', '__TIMESTAMP__', + '__VENDOR__', '__VERSION__'), suffix=r'\b'), Keyword.Pseudo), (r'macro\b', Keyword.Reserved), (r'(string|wstring|dstring|size_t|ptrdiff_t)\b', Name.Builtin), @@ -93,7 +98,7 @@ class DLexer(RegexLexer): # -- AlternateWysiwygString (r'`[^`]*`[cwd]?', String), # -- DoubleQuotedString - (r'"(\\\\|\\"|[^"])*"[cwd]?', String), + (r'"(\\\\|\\[^\\]|[^"\\])*"[cwd]?', String), # -- EscapeSequence (r"\\(['\"?\\abfnrtv]|x[0-9a-fA-F]{2}|[0-7]{1,3}" r"|u[0-9a-fA-F]{4}|U[0-9a-fA-F]{8}|&\w+;)", @@ -224,7 +229,7 @@ class CrocLexer(RegexLexer): (r'@`(``|[^`])*`', String), (r"@'(''|[^'])*'", String), # -- DoubleQuotedString - (r'"(\\\\|\\"|[^"])*"', String), + (r'"(\\\\|\\[^\\]|[^"\\])*"', String), # Tokens (r'(~=|\^=|%=|\*=|==|!=|>>>=|>>>|>>=|>>|>=|<=>|\?=|-\>' r'|<<=|<<|<=|\+\+|\+=|--|-=|\|\||\|=|&&|&=|\.\.|/=)' diff --git a/src/typecode/_vendor/pygments/lexers/dalvik.py b/src/typecode/_vendor/pygments/lexers/dalvik.py index 2df81d5..09d7805 100644 --- a/src/typecode/_vendor/pygments/lexers/dalvik.py +++ b/src/typecode/_vendor/pygments/lexers/dalvik.py @@ -5,7 +5,7 @@ Pygments lexers for Dalvik VM-related languages. - :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ diff --git a/src/typecode/_vendor/pygments/lexers/data.py b/src/typecode/_vendor/pygments/lexers/data.py index 1580bce..3ea83bc 100644 --- a/src/typecode/_vendor/pygments/lexers/data.py +++ b/src/typecode/_vendor/pygments/lexers/data.py @@ -5,13 +5,13 @@ Lexers for data file format. - :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ import re -from typecode._vendor.pygments.lexer import RegexLexer, ExtendedRegexLexer, LexerContext, \ +from typecode._vendor.pygments.lexer import Lexer, RegexLexer, ExtendedRegexLexer, LexerContext, \ include, bygroups, inherit from typecode._vendor.pygments.token import Text, Comment, Keyword, Name, String, Number, \ Punctuation, Literal, Error @@ -23,7 +23,7 @@ class YamlLexerContext(LexerContext): """Indentation context for the YAML lexer.""" def __init__(self, *args, **kwds): - super(YamlLexerContext, self).__init__(*args, **kwds) + super().__init__(*args, **kwds) self.indent_stack = [] self.indent = -1 self.next_indent = 0 @@ -233,7 +233,7 @@ def callback(lexer, match, context): # whitespaces separating tokens (r'[ ]+', Text), # key with colon - (r'''([^,:?\[\]{}"'\n]+)(:)(?=[ ]|$)''', + (r'''([^#,:?\[\]{}"'\n]+)(:)(?=[ ]|$)''', bygroups(Name.Tag, set_indent(Punctuation, implicit=True))), # tags, anchors and aliases, include('descriptors'), @@ -433,10 +433,10 @@ def callback(lexer, match, context): def get_tokens_unprocessed(self, text=None, context=None): if context is None: context = YamlLexerContext(text, 0) - return super(YamlLexer, self).get_tokens_unprocessed(text, context) + return super().get_tokens_unprocessed(text, context) -class JsonLexer(RegexLexer): +class JsonLexer(Lexer): """ For JSON data structures. @@ -444,75 +444,192 @@ class JsonLexer(RegexLexer): """ name = 'JSON' - aliases = ['json'] - filenames = ['*.json'] - mimetypes = ['application/json'] - - flags = re.DOTALL - - # integer part of a number - int_part = r'-?(0|[1-9]\d*)' - - # fractional part of a number - frac_part = r'\.\d+' - - # exponential part of a number - exp_part = r'[eE](\+|-)?\d+' - - tokens = { - 'whitespace': [ - (r'\s+', Text), - ], - - # represents a simple terminal value - 'simplevalue': [ - (r'(true|false|null)\b', Keyword.Constant), - (('%(int_part)s(%(frac_part)s%(exp_part)s|' - '%(exp_part)s|%(frac_part)s)') % vars(), - Number.Float), - (int_part, Number.Integer), - (r'"(\\\\|\\"|[^"])*"', String.Double), - ], - + aliases = ['json', 'json-object'] + filenames = ['*.json', 'Pipfile.lock'] + mimetypes = ['application/json', 'application/json-object'] + + # No validation of integers, floats, or constants is done. + # As long as the characters are members of the following + # sets, the token will be considered valid. For example, + # + # "--1--" is parsed as an integer + # "1...eee" is parsed as a float + # "trustful" is parsed as a constant + # + integers = set('-0123456789') + floats = set('.eE+') + constants = set('truefalsenull') # true|false|null + hexadecimals = set('0123456789abcdefABCDEF') + punctuations = set('{}[],') + whitespaces = {'\u0020', '\u000a', '\u000d', '\u0009'} + + def get_tokens_unprocessed(self, text): + """Parse JSON data.""" + + in_string = False + in_escape = False + in_unicode_escape = 0 + in_whitespace = False + in_constant = False + in_number = False + in_float = False + in_punctuation = False + + start = 0 + + # The queue is used to store data that may need to be tokenized + # differently based on what follows. In particular, JSON object + # keys are tokenized differently than string values, but cannot + # be distinguished until punctuation is encountered outside the + # string. + # + # A ":" character after the string indicates that the string is + # an object key; any other character indicates the string is a + # regular string value. + # + # The queue holds tuples that contain the following data: + # + # (start_index, token_type, text) + # + # By default the token type of text in double quotes is + # String.Double. The token type will be replaced if a colon + # is encountered after the string closes. + # + queue = [] + + for stop, character in enumerate(text): + if in_string: + if in_unicode_escape: + if character in self.hexadecimals: + in_unicode_escape -= 1 + if not in_unicode_escape: + in_escape = False + else: + in_unicode_escape = 0 + in_escape = False + + elif in_escape: + if character == 'u': + in_unicode_escape = 4 + else: + in_escape = False + + elif character == '\\': + in_escape = True + + elif character == '"': + queue.append((start, String.Double, text[start:stop + 1])) + in_string = False + in_escape = False + in_unicode_escape = 0 + + continue + + elif in_whitespace: + if character in self.whitespaces: + continue + + if queue: + queue.append((start, Text, text[start:stop])) + else: + yield start, Text, text[start:stop] + in_whitespace = False + # Fall through so the new character can be evaluated. + + elif in_constant: + if character in self.constants: + continue + + yield start, Keyword.Constant, text[start:stop] + in_constant = False + # Fall through so the new character can be evaluated. + + elif in_number: + if character in self.integers: + continue + elif character in self.floats: + in_float = True + continue + + if in_float: + yield start, Number.Float, text[start:stop] + else: + yield start, Number.Integer, text[start:stop] + in_number = False + in_float = False + # Fall through so the new character can be evaluated. + + elif in_punctuation: + if character in self.punctuations: + continue + + yield start, Punctuation, text[start:stop] + in_punctuation = False + # Fall through so the new character can be evaluated. + + start = stop + + if character == '"': + in_string = True + + elif character in self.whitespaces: + in_whitespace = True + + elif character in {'f', 'n', 't'}: # The first letters of true|false|null + # Exhaust the queue. Accept the existing token types. + yield from queue + queue.clear() + + in_constant = True + + elif character in self.integers: + # Exhaust the queue. Accept the existing token types. + yield from queue + queue.clear() + + in_number = True + + elif character == ':': + # Yield from the queue. Replace string token types. + for _start, _token, _text in queue: + if _token is Text: + yield _start, _token, _text + elif _token is String.Double: + yield _start, Name.Tag, _text + else: + yield _start, Error, _text + queue.clear() + + in_punctuation = True + + elif character in self.punctuations: + # Exhaust the queue. Accept the existing token types. + yield from queue + queue.clear() + + in_punctuation = True - # the right hand side of an object, after the attribute name - 'objectattribute': [ - include('value'), - (r':', Punctuation), - # comma terminates the attribute but expects more - (r',', Punctuation, '#pop'), - # a closing bracket terminates the entire object, so pop twice - (r'\}', Punctuation, '#pop:2'), - ], - - # a json object - { attr, attr, ... } - 'objectvalue': [ - include('whitespace'), - (r'"(\\\\|\\"|[^"])*"', Name.Tag, 'objectattribute'), - (r'\}', Punctuation, '#pop'), - ], - - # json array - [ value, value, ... } - 'arrayvalue': [ - include('whitespace'), - include('value'), - (r',', Punctuation), - (r'\]', Punctuation, '#pop'), - ], - - # a json value - either a simple value or a complex value (object or array) - 'value': [ - include('whitespace'), - include('simplevalue'), - (r'\{', Punctuation, 'objectvalue'), - (r'\[', Punctuation, 'arrayvalue'), - ], - - # the root of a json document whould be a value - 'root': [ - include('value'), - ], - } + else: + # Exhaust the queue. Accept the existing token types. + yield from queue + queue.clear() + + yield start, Error, character + + # Yield any remaining text. + yield from queue + if in_string: + yield start, Error, text[start:] + elif in_float: + yield start, Number.Float, text[start:] + elif in_number: + yield start, Number.Integer, text[start:] + elif in_constant: + yield start, Keyword.Constant, text[start:] + elif in_whitespace: + yield start, Text, text[start:] + elif in_punctuation: + yield start, Punctuation, text[start:] class JsonBareObjectLexer(JsonLexer): @@ -520,28 +637,21 @@ class JsonBareObjectLexer(JsonLexer): For JSON data structures (with missing object curly braces). .. versionadded:: 2.2 + + .. deprecated:: 2.8.0 + + Behaves the same as `JsonLexer` now. """ name = 'JSONBareObject' - aliases = ['json-object'] + aliases = [] filenames = [] - mimetypes = ['application/json-object'] - - tokens = { - 'root': [ - (r'\}', Error), - include('objectvalue'), - ], - 'objectattribute': [ - (r'\}', Error), - inherit, - ], - } + mimetypes = [] class JsonLdLexer(JsonLexer): """ - For `JSON-LD `_ linked data. + For `JSON-LD `_ linked data. .. versionadded:: 2.0 """ @@ -551,11 +661,38 @@ class JsonLdLexer(JsonLexer): filenames = ['*.jsonld'] mimetypes = ['application/ld+json'] - tokens = { - 'objectvalue': [ - (r'"@(context|id|value|language|type|container|list|set|' - r'reverse|index|base|vocab|graph)"', Name.Decorator, - 'objectattribute'), - inherit, - ], + json_ld_keywords = { + '"@%s"' % keyword + for keyword in ( + 'base', + 'container', + 'context', + 'direction', + 'graph', + 'id', + 'import', + 'included', + 'index', + 'json', + 'language', + 'list', + 'nest', + 'none', + 'prefix', + 'propagate', + 'protected', + 'reverse', + 'set', + 'type', + 'value', + 'version', + 'vocab', + ) } + + def get_tokens_unprocessed(self, text): + for start, token, value in super(JsonLdLexer, self).get_tokens_unprocessed(text): + if token is Name.Tag and value in self.json_ld_keywords: + yield start, Name.Decorator, value + else: + yield start, token, value diff --git a/src/typecode/_vendor/pygments/lexers/devicetree.py b/src/typecode/_vendor/pygments/lexers/devicetree.py new file mode 100644 index 0000000..76e8068 --- /dev/null +++ b/src/typecode/_vendor/pygments/lexers/devicetree.py @@ -0,0 +1,109 @@ +# -*- coding: utf-8 -*- +""" + pygments.lexers.devicetree + ~~~~~~~~~~~~~~~~~~~~~~~~~~ + + Lexers for Devicetree language. + + :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS. + :license: BSD, see LICENSE for details. +""" + +from typecode._vendor.pygments.lexer import RegexLexer, bygroups, include, default, words +from typecode._vendor.pygments.token import Comment, Keyword, Name, Number, Operator, \ + Punctuation, String, Text + +__all__ = ['DevicetreeLexer'] + + +class DevicetreeLexer(RegexLexer): + """ + Lexer for `Devicetree `_ files. + + .. versionadded:: 2.7 + """ + + name = 'Devicetree' + aliases = ['devicetree', 'dts'] + filenames = ['*.dts', '*.dtsi'] + mimetypes = ['text/x-c'] + + #: optional Whitespace or /*...*/ style comment + _ws = r'\s*(?:/[*][^*/]*?[*]/\s*)*' + + tokens = { + 'macro': [ + # Include preprocessor directives (C style): + (r'(#include)(' + _ws + r')([^\n]+)', + bygroups(Comment.Preproc, Comment.Multiline, Comment.PreprocFile)), + # Define preprocessor directives (C style): + (r'(#define)(' + _ws + r')([^\n]+)', + bygroups(Comment.Preproc, Comment.Multiline, Comment.Preproc)), + # devicetree style with file: + (r'(/[^*/{]+/)(' + _ws + r')("[^\n{]+")', + bygroups(Comment.Preproc, Comment.Multiline, Comment.PreprocFile)), + # devicetree style with property: + (r'(/[^*/{]+/)(' + _ws + r')([^\n;{]*)([;]?)', + bygroups(Comment.Preproc, Comment.Multiline, Comment.Preproc, Punctuation)), + ], + 'whitespace': [ + (r'\n', Text), + (r'\s+', Text), + (r'\\\n', Text), # line continuation + (r'//(\n|[\w\W]*?[^\\]\n)', Comment.Single), + (r'/(\\\n)?[*][\w\W]*?[*](\\\n)?/', Comment.Multiline), + # Open until EOF, so no ending delimeter + (r'/(\\\n)?[*][\w\W]*', Comment.Multiline), + ], + 'statements': [ + (r'(L?)(")', bygroups(String.Affix, String), 'string'), + (r'0x[0-9a-fA-F]+', Number.Hex), + (r'\d+', Number.Integer), + (r'([^\s{}/*]*)(\s*)(:)', bygroups(Name.Label, Text, Punctuation)), + (words(('compatible', 'model', 'phandle', 'status', '#address-cells', + '#size-cells', 'reg', 'virtual-reg', 'ranges', 'dma-ranges', + 'device_type', 'name'), suffix=r'\b'), Keyword.Reserved), + (r'([~!%^&*+=|?:<>/#-])', Operator), + (r'[()\[\]{},.]', Punctuation), + (r'[a-zA-Z_][\w-]*(?=(?:\s*,\s*[a-zA-Z_][\w-]*|(?:' + _ws + r'))*\s*[=;])', + Name), + (r'[a-zA-Z_]\w*', Name.Attribute), + ], + 'root': [ + include('whitespace'), + include('macro'), + + # Nodes + (r'([^/*@\s&]+|/)(@?)([0-9a-fA-F,]*)(' + _ws + r')(\{)', + bygroups(Name.Function, Operator, Number.Integer, + Comment.Multiline, Punctuation), 'node'), + + default('statement'), + ], + 'statement': [ + include('whitespace'), + include('statements'), + (';', Punctuation, '#pop'), + ], + 'node': [ + include('whitespace'), + include('macro'), + + (r'([^/*@\s&]+|/)(@?)([0-9a-fA-F,]*)(' + _ws + r')(\{)', + bygroups(Name.Function, Operator, Number.Integer, + Comment.Multiline, Punctuation), '#push'), + + include('statements'), + + (r'\};', Punctuation, '#pop'), + (';', Punctuation), + ], + 'string': [ + (r'"', String, '#pop'), + (r'\\([\\abfnrtv"\']|x[a-fA-F0-9]{2,4}|' + r'u[a-fA-F0-9]{4}|U[a-fA-F0-9]{8}|[0-7]{1,3})', String.Escape), + (r'[^\\"\n]+', String), # all other characters + (r'\\\n', String), # line continuation + (r'\\', String), # stray backslash + ], + } diff --git a/src/typecode/_vendor/pygments/lexers/diff.py b/src/typecode/_vendor/pygments/lexers/diff.py index a74d804..0fd6dc4 100644 --- a/src/typecode/_vendor/pygments/lexers/diff.py +++ b/src/typecode/_vendor/pygments/lexers/diff.py @@ -5,7 +5,7 @@ Lexers for diff/patch formats. - :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ @@ -114,9 +114,9 @@ class WDiffLexer(RegexLexer): Note that: - * only to normal output (without option like -l). - * if target files of wdiff contain "[-", "-]", "{+", "+}", - especially they are unbalanced, this lexer will get confusing. + * It only works with normal output (without options like ``-l``). + * If the target files contain "[-", "-]", "{+", or "+}", + especially they are unbalanced, the lexer will get confused. .. versionadded:: 2.2 """ diff --git a/src/typecode/_vendor/pygments/lexers/dotnet.py b/src/typecode/_vendor/pygments/lexers/dotnet.py index d07f043..011f240 100644 --- a/src/typecode/_vendor/pygments/lexers/dotnet.py +++ b/src/typecode/_vendor/pygments/lexers/dotnet.py @@ -5,7 +5,7 @@ Lexers for .net languages. - :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ import re @@ -14,7 +14,7 @@ using, this, default, words from typecode._vendor.pygments.token import Punctuation, \ Text, Comment, Operator, Keyword, Name, String, Number, Literal, Other -from typecode._vendor.pygments.util import get_choice_opt, iteritems +from typecode._vendor.pygments.util import get_choice_opt from typecode._vendor.pygments import unistring as uni from typecode._vendor.pygments.lexers.html import XmlLexer @@ -71,7 +71,7 @@ class CSharpLexer(RegexLexer): tokens = {} token_variants = True - for levelname, cs_ident in iteritems(levels): + for levelname, cs_ident in levels.items(): tokens[levelname] = { 'root': [ # method names @@ -88,7 +88,7 @@ class CSharpLexer(RegexLexer): (r'[~!%^&*()+=|\[\]:;,.<>/?-]', Punctuation), (r'[{}]', Punctuation), (r'@"(""|[^"])*"', String), - (r'"(\\\\|\\"|[^"\n])*["\n]', String), + (r'"(\\\\|\\[^\\]|[^"\\\n])*["\n]', String), (r"'\\.'|'[^\\]'", String.Char), (r"[0-9](\.[0-9]*)?([eE][+-][0-9]+)?" r"[flFLdD]?|0[xX][0-9a-fA-F]+[Ll]?", Number), @@ -184,7 +184,7 @@ class NemerleLexer(RegexLexer): tokens = {} token_variants = True - for levelname, cs_ident in iteritems(levels): + for levelname, cs_ident in levels.items(): tokens[levelname] = { 'root': [ # method names @@ -213,7 +213,7 @@ class NemerleLexer(RegexLexer): (r'[~!%^&*()+=|\[\]:;,.<>/?-]', Punctuation), (r'[{}]', Punctuation), (r'@"(""|[^"])*"', String), - (r'"(\\\\|\\"|[^"\n])*["\n]', String), + (r'"(\\\\|\\[^\\]|[^"\\\n])*["\n]', String), (r"'\\.'|'[^\\]'", String.Char), (r"0[xX][0-9a-fA-F]+[Ll]?", Number), (r"[0-9](\.[0-9]*)?([eE][+-][0-9]+)?[flFLdD]?", Number), @@ -295,6 +295,16 @@ def __init__(self, **options): RegexLexer.__init__(self, **options) + def analyse_text(text): + """Nemerle is quite similar to Python, but @if is relatively uncommon + elsewhere.""" + result = 0 + + if '@if' in text: + result += 0.1 + + return result + class BooLexer(RegexLexer): """ @@ -315,8 +325,8 @@ class BooLexer(RegexLexer): (r'\\\n', Text), (r'\\', Text), (r'(in|is|and|or|not)\b', Operator.Word), - (r'/(\\\\|\\/|[^/\s])/', String.Regex), - (r'@/(\\\\|\\/|[^/])*/', String.Regex), + (r'/(\\\\|\\[^\\]|[^/\\\s])/', String.Regex), + (r'@/(\\\\|\\[^\\]|[^/\\])*/', String.Regex), (r'=~|!=|==|<<|>>|[-+/*%=<>&^|]', Operator), (r'(as|abstract|callable|constructor|destructor|do|import|' r'enum|event|final|get|interface|internal|of|override|' @@ -335,8 +345,8 @@ class BooLexer(RegexLexer): r'rawArrayIndexing|required|typeof|unchecked|using|' r'yieldAll|zip)\b', Name.Builtin), (r'"""(\\\\|\\"|.*?)"""', String.Double), - (r'"(\\\\|\\"|[^"]*?)"', String.Double), - (r"'(\\\\|\\'|[^']*?)'", String.Single), + (r'"(\\\\|\\[^\\]|[^"\\])*"', String.Double), + (r"'(\\\\|\\[^\\]|[^'\\])*'", String.Single), (r'[a-zA-Z_]\w*', Name), (r'(\d+\.\d*|\d*\.\d+)([fF][+-]?[0-9]+)?', Number.Float), (r'[0-9][0-9.]*(ms?|d|h|s)', Number), @@ -507,8 +517,7 @@ class CSharpAspxLexer(DelegatingLexer): mimetypes = [] def __init__(self, **options): - super(CSharpAspxLexer, self).__init__(CSharpLexer, GenericAspxLexer, - **options) + super().__init__(CSharpLexer, GenericAspxLexer, **options) def analyse_text(text): if re.search(r'Page\s*Language="C#"', text, re.I) is not None: @@ -528,8 +537,7 @@ class VbNetAspxLexer(DelegatingLexer): mimetypes = [] def __init__(self, **options): - super(VbNetAspxLexer, self).__init__(VbNetLexer, GenericAspxLexer, - **options) + super().__init__(VbNetLexer, GenericAspxLexer, **options) def analyse_text(text): if re.search(r'Page\s*Language="Vb"', text, re.I) is not None: @@ -686,3 +694,14 @@ class FSharpLexer(RegexLexer): (r'"', String), ], } + + def analyse_text(text): + """F# doesn't have that many unique features -- |> and <| are weak + indicators.""" + result = 0 + if '|>' in text: + result += 0.05 + if '<|' in text: + result += 0.05 + + return result diff --git a/src/typecode/_vendor/pygments/lexers/dsls.py b/src/typecode/_vendor/pygments/lexers/dsls.py index 33d3d3c..c2c2074 100644 --- a/src/typecode/_vendor/pygments/lexers/dsls.py +++ b/src/typecode/_vendor/pygments/lexers/dsls.py @@ -5,7 +5,7 @@ Lexers for various domain-specific languages. - :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ @@ -14,9 +14,9 @@ from typecode._vendor.pygments.lexer import ExtendedRegexLexer, RegexLexer, bygroups, words, \ include, default, this, using, combined from typecode._vendor.pygments.token import Text, Comment, Operator, Keyword, Name, String, \ - Number, Punctuation, Literal, Whitespace + Number, Punctuation, Whitespace -__all__ = ['ProtoBufLexer', 'BroLexer', 'PuppetLexer', 'RslLexer', +__all__ = ['ProtoBufLexer', 'ZeekLexer', 'PuppetLexer', 'RslLexer', 'MscgenLexer', 'VGLLexer', 'AlloyLexer', 'PanLexer', 'CrmshLexer', 'ThriftLexer', 'FlatlineLexer', 'SnowballLexer'] @@ -40,9 +40,9 @@ class ProtoBufLexer(RegexLexer): (r'/(\\\n)?/(\n|(.|\n)*?[^\\]\n)', Comment.Single), (r'/(\\\n)?\*(.|\n)*?\*(\\\n)?/', Comment.Multiline), (words(( - 'import', 'option', 'optional', 'required', 'repeated', 'default', - 'packed', 'ctype', 'extensions', 'to', 'max', 'rpc', 'returns', - 'oneof'), prefix=r'\b', suffix=r'\b'), + 'import', 'option', 'optional', 'required', 'repeated', + 'reserved', 'default', 'packed', 'ctype', 'extensions', 'to', + 'max', 'rpc', 'returns', 'oneof', 'syntax'), prefix=r'\b', suffix=r'\b'), Keyword), (words(( 'int32', 'int64', 'uint32', 'uint64', 'sint32', 'sint64', @@ -188,84 +188,166 @@ class ThriftLexer(RegexLexer): } -class BroLexer(RegexLexer): +class ZeekLexer(RegexLexer): """ - For `Bro `_ scripts. + For `Zeek `_ scripts. - .. versionadded:: 1.5 + .. versionadded:: 2.5 """ - name = 'Bro' - aliases = ['bro'] - filenames = ['*.bro'] + name = 'Zeek' + aliases = ['zeek', 'bro'] + filenames = ['*.zeek', '*.bro'] - _hex = r'[0-9a-fA-F_]' + _hex = r'[0-9a-fA-F]' _float = r'((\d*\.?\d+)|(\d+\.?\d*))([eE][-+]?\d+)?' _h = r'[A-Za-z0-9][-A-Za-z0-9]*' tokens = { 'root': [ - # Whitespace - (r'^@.*?\n', Comment.Preproc), - (r'#.*?\n', Comment.Single), + include('whitespace'), + include('comments'), + include('directives'), + include('attributes'), + include('types'), + include('keywords'), + include('literals'), + include('operators'), + include('punctuation'), + (r'((?:[A-Za-z_]\w*)(?:::(?:[A-Za-z_]\w*))*)(?=\s*\()', + Name.Function), + include('identifiers'), + ], + + 'whitespace': [ (r'\n', Text), (r'\s+', Text), (r'\\\n', Text), - # Keywords - (r'(add|alarm|break|case|const|continue|delete|do|else|enum|event' - r'|export|for|function|if|global|hook|local|module|next' - r'|of|print|redef|return|schedule|switch|type|when|while)\b', Keyword), - (r'(addr|any|bool|count|counter|double|file|int|interval|net' - r'|pattern|port|record|set|string|subnet|table|time|timer' - r'|vector)\b', Keyword.Type), + ], + + 'comments': [ + (r'#.*$', Comment), + ], + + 'directives': [ + (r'@(load-plugin|load-sigs|load|unload)\b.*$', Comment.Preproc), + (r'@(DEBUG|DIR|FILENAME|deprecated|if|ifdef|ifndef|else|endif)\b', Comment.Preproc), + (r'(@prefixes)\s*(\+?=).*$', Comment.Preproc), + ], + + 'attributes': [ + (words(('redef', 'priority', 'log', 'optional', 'default', 'add_func', + 'delete_func', 'expire_func', 'read_expire', 'write_expire', + 'create_expire', 'synchronized', 'persistent', 'rotate_interval', + 'rotate_size', 'encrypt', 'raw_output', 'mergeable', 'error_handler', + 'type_column', 'deprecated'), + prefix=r'&', suffix=r'\b'), + Keyword.Pseudo), + ], + + 'types': [ + (words(('any', + 'enum', 'record', 'set', 'table', 'vector', + 'function', 'hook', 'event', + 'addr', 'bool', 'count', 'double', 'file', 'int', 'interval', + 'pattern', 'port', 'string', 'subnet', 'time'), + suffix=r'\b'), + Keyword.Type), + + (r'(opaque)(\s+)(of)(\s+)((?:[A-Za-z_]\w*)(?:::(?:[A-Za-z_]\w*))*)\b', + bygroups(Keyword.Type, Text, Operator.Word, Text, Keyword.Type)), + + (r'(type)(\s+)((?:[A-Za-z_]\w*)(?:::(?:[A-Za-z_]\w*))*)(\s*)(:)(\s*)\b(record|enum)\b', + bygroups(Keyword, Text, Name.Class, Text, Operator, Text, Keyword.Type)), + + (r'(type)(\s+)((?:[A-Za-z_]\w*)(?:::(?:[A-Za-z_]\w*))*)(\s*)(:)', + bygroups(Keyword, Text, Name, Text, Operator)), + + (r'(redef)(\s+)(record|enum)(\s+)((?:[A-Za-z_]\w*)(?:::(?:[A-Za-z_]\w*))*)\b', + bygroups(Keyword, Text, Keyword.Type, Text, Name.Class)), + ], + + 'keywords': [ + (words(('redef', 'export', 'if', 'else', 'for', 'while', + 'return', 'break', 'next', 'continue', 'fallthrough', + 'switch', 'default', 'case', + 'add', 'delete', + 'when', 'timeout', 'schedule'), + suffix=r'\b'), + Keyword), + (r'(print)\b', Keyword), + (r'(global|local|const|option)\b', Keyword.Declaration), + (r'(module)(\s+)(([A-Za-z_]\w*)(?:::([A-Za-z_]\w*))*)\b', + bygroups(Keyword.Namespace, Text, Name.Namespace)), + ], + + 'literals': [ + (r'"', String, 'string'), + + # Not the greatest match for patterns, but generally helps + # disambiguate between start of a pattern and just a division + # operator. + (r'/(?=.*/)', String.Regex, 'regex'), + (r'(T|F)\b', Keyword.Constant), - (r'(&)((?:add|delete|expire)_func|attr|(?:create|read|write)_expire' - r'|default|disable_print_hook|raw_output|encrypt|group|log' - r'|mergeable|optional|persistent|priority|redef' - r'|rotate_(?:interval|size)|synchronized)\b', - bygroups(Punctuation, Keyword)), - (r'\s+module\b', Keyword.Namespace), - # Addresses, ports and networks - (r'\d+/(tcp|udp|icmp|unknown)\b', Number), - (r'(\d+\.){3}\d+', Number), - (r'(' + _hex + r'){7}' + _hex, Number), - (r'0x' + _hex + r'(' + _hex + r'|:)*::(' + _hex + r'|:)*', Number), - (r'((\d+|:)(' + _hex + r'|:)*)?::(' + _hex + r'|:)*', Number), - (r'(\d+\.\d+\.|(\d+\.){2}\d+)', Number), + + # Port + (r'\d{1,5}/(udp|tcp|icmp|unknown)\b', Number), + + # IPv4 Address + (r'(\d{1,3}.){3}(\d{1,3})\b', Number), + + # IPv6 Address + (r'\[([0-9a-fA-F]{0,4}:){2,7}([0-9a-fA-F]{0,4})?((\d{1,3}.){3}(\d{1,3}))?\]', Number), + + # Numeric + (r'0[xX]' + _hex + r'+\b', Number.Hex), + (_float + r'\s*(day|hr|min|sec|msec|usec)s?\b', Number.Float), + (_float + r'\b', Number.Float), + (r'(\d+)\b', Number.Integer), + # Hostnames (_h + r'(\.' + _h + r')+', String), - # Numeric - (_float + r'\s+(day|hr|min|sec|msec|usec)s?\b', Literal.Date), - (r'0[xX]' + _hex, Number.Hex), - (_float, Number.Float), - (r'\d+', Number.Integer), - (r'/', String.Regex, 'regex'), - (r'"', String, 'string'), - # Operators - (r'[!%*/+:<=>?~|-]', Operator), + ], + + 'operators': [ + (r'[!%*/+<=>~|&^-]', Operator), (r'([-+=&|]{2}|[+=!><-]=)', Operator), - (r'(in|match)\b', Operator.Word), - (r'[{}()\[\]$.,;]', Punctuation), - # Identfier - (r'([_a-zA-Z]\w*)(::)', bygroups(Name, Name.Namespace)), + (r'(in|as|is|of)\b', Operator.Word), + (r'\??\$', Operator), + ], + + 'punctuation': [ + (r'[{}()\[\],;.]', Punctuation), + # The "ternary if", which uses '?' and ':', could instead be + # treated as an Operator, but colons are more frequently used to + # separate field/identifier names from their types, so the (often) + # less-prominent Punctuation is used even with '?' for consistency. + (r'[?:]', Punctuation), + ], + + 'identifiers': [ + (r'([a-zA-Z_]\w*)(::)', bygroups(Name, Punctuation)), (r'[a-zA-Z_]\w*', Name) ], + 'string': [ + (r'\\.', String.Escape), + (r'%-?[0-9]*(\.[0-9]+)?[DTd-gsx]', String.Escape), (r'"', String, '#pop'), - (r'\\([\\abfnrtv"\']|x[a-fA-F0-9]{2,4}|[0-7]{1,3})', String.Escape), - (r'[^\\"\n]+', String), - (r'\\\n', String), - (r'\\', String) + (r'.', String), ], + 'regex': [ + (r'\\.', String.Escape), (r'/', String.Regex, '#pop'), - (r'\\[\\nt/]', String.Regex), # String.Escape is too intense here. - (r'[^\\/\n]+', String.Regex), - (r'\\\n', String.Regex), - (r'\\', String.Regex) - ] + (r'.', String.Regex), + ], } +BroLexer = ZeekLexer + + class PuppetLexer(RegexLexer): """ For `Puppet `__ configuration DSL. @@ -550,7 +632,7 @@ class AlloyLexer(RegexLexer): (iden_rex, Name), (r'[:,]', Punctuation), (r'[0-9]+', Number.Integer), - (r'"(\\\\|\\"|[^"])*"', String), + (r'"(\\\\|\\[^\\]|[^"\\])*"', String), (r'\n', Text), ] } @@ -558,7 +640,7 @@ class AlloyLexer(RegexLexer): class PanLexer(RegexLexer): """ - Lexer for `pan `_ source files. + Lexer for `pan `_ source files. Based on tcsh lexer. @@ -745,7 +827,7 @@ class FlatlineLexer(RegexLexer): (r'0x-?[a-f\d]+', Number.Hex), # strings, symbols and characters - (r'"(\\\\|\\"|[^"])*"', String), + (r'"(\\\\|\\[^\\]|[^"\\])*"', String), (r"\\(.|[a-z]+)", String.Char), # expression template placeholder diff --git a/src/typecode/_vendor/pygments/lexers/dylan.py b/src/typecode/_vendor/pygments/lexers/dylan.py index 9788f29..b678f2a 100644 --- a/src/typecode/_vendor/pygments/lexers/dylan.py +++ b/src/typecode/_vendor/pygments/lexers/dylan.py @@ -5,7 +5,7 @@ Lexers for the Dylan language. - :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ @@ -32,27 +32,27 @@ class DylanLexer(RegexLexer): flags = re.IGNORECASE - builtins = set(( + builtins = { 'subclass', 'abstract', 'block', 'concrete', 'constant', 'class', 'compiler-open', 'compiler-sideways', 'domain', 'dynamic', 'each-subclass', 'exception', 'exclude', 'function', 'generic', 'handler', 'inherited', 'inline', 'inline-only', 'instance', 'interface', 'import', 'keyword', 'library', 'macro', 'method', 'module', 'open', 'primary', 'required', 'sealed', 'sideways', - 'singleton', 'slot', 'thread', 'variable', 'virtual')) + 'singleton', 'slot', 'thread', 'variable', 'virtual'} - keywords = set(( + keywords = { 'above', 'afterwards', 'begin', 'below', 'by', 'case', 'cleanup', 'create', 'define', 'else', 'elseif', 'end', 'export', 'finally', 'for', 'from', 'if', 'in', 'let', 'local', 'otherwise', 'rename', 'select', 'signal', 'then', 'to', 'unless', 'until', 'use', 'when', - 'while')) + 'while'} - operators = set(( + operators = { '~', '+', '-', '*', '|', '^', '=', '==', '~=', '~==', '<', '<=', - '>', '>=', '&', '|')) + '>', '>=', '&', '|'} - functions = set(( + functions = { 'abort', 'abs', 'add', 'add!', 'add-method', 'add-new', 'add-new!', 'all-superclasses', 'always', 'any?', 'applicable-method?', 'apply', 'aref', 'aref-setter', 'as', 'as-lowercase', 'as-lowercase!', @@ -86,7 +86,7 @@ class DylanLexer(RegexLexer): 'subtype?', 'table-protocol', 'tail', 'tail-setter', 'third', 'third-setter', 'truncate', 'truncate/', 'type-error-expected-type', 'type-error-value', 'type-for-copy', 'type-union', 'union', 'values', - 'vector', 'zero?')) + 'vector', 'zero?'} valid_name = '\\\\?[\\w!&*<>|^$%@\\-+~?/=]+' @@ -277,13 +277,11 @@ def get_tokens_unprocessed(self, text): curcode += line[end:] else: if curcode: - for item in do_insertions(insertions, - dylexer.get_tokens_unprocessed(curcode)): - yield item + yield from do_insertions(insertions, + dylexer.get_tokens_unprocessed(curcode)) curcode = '' insertions = [] yield match.start(), Generic.Output, line if curcode: - for item in do_insertions(insertions, - dylexer.get_tokens_unprocessed(curcode)): - yield item + yield from do_insertions(insertions, + dylexer.get_tokens_unprocessed(curcode)) diff --git a/src/typecode/_vendor/pygments/lexers/ecl.py b/src/typecode/_vendor/pygments/lexers/ecl.py index 591f7e3..5677f9c 100644 --- a/src/typecode/_vendor/pygments/lexers/ecl.py +++ b/src/typecode/_vendor/pygments/lexers/ecl.py @@ -5,7 +5,7 @@ Lexers for the ECL language. - :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ @@ -21,7 +21,7 @@ class ECLLexer(RegexLexer): """ Lexer for the declarative big-data `ECL - `_ + `_ language. .. versionadded:: 1.5 @@ -123,3 +123,17 @@ class ECLLexer(RegexLexer): (r'[^"\']+', String), ], } + + def analyse_text(text): + """This is very difficult to guess relative to other business languages. + <- in conjuction with BEGIN/END seems relatively rare though.""" + result = 0 + + if '<-' in text: + result += 0.01 + if 'BEGIN' in text: + result += 0.01 + if 'END' in text: + result += 0.01 + + return result diff --git a/src/typecode/_vendor/pygments/lexers/eiffel.py b/src/typecode/_vendor/pygments/lexers/eiffel.py index a8d73aa..e3e5768 100644 --- a/src/typecode/_vendor/pygments/lexers/eiffel.py +++ b/src/typecode/_vendor/pygments/lexers/eiffel.py @@ -5,7 +5,7 @@ Lexer for the Eiffel language. - :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ diff --git a/src/typecode/_vendor/pygments/lexers/elm.py b/src/typecode/_vendor/pygments/lexers/elm.py index ae4a51b..b3b5823 100644 --- a/src/typecode/_vendor/pygments/lexers/elm.py +++ b/src/typecode/_vendor/pygments/lexers/elm.py @@ -5,7 +5,7 @@ Lexer for the Elm programming language. - :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ @@ -40,7 +40,7 @@ class ElmLexer(RegexLexer): reservedWords = words(( 'alias', 'as', 'case', 'else', 'if', 'import', 'in', 'let', 'module', 'of', 'port', 'then', 'type', 'where', - ), suffix=r'\b') + ), suffix=r'\b') tokens = { 'root': [ @@ -68,7 +68,7 @@ class ElmLexer(RegexLexer): (reservedWords, Keyword.Reserved), # Types - (r'[A-Z]\w*', Keyword.Type), + (r'[A-Z][a-zA-Z0-9_]*', Keyword.Type), # Main (specialName, Keyword.Reserved), @@ -77,7 +77,7 @@ class ElmLexer(RegexLexer): (words((builtinOps), prefix=r'\(', suffix=r'\)'), Name.Function), # Infix Operators - (words((builtinOps)), Name.Function), + (words(builtinOps), Name.Function), # Numbers include('numbers'), diff --git a/src/typecode/_vendor/pygments/lexers/email.py b/src/typecode/_vendor/pygments/lexers/email.py new file mode 100644 index 0000000..0fef6d3 --- /dev/null +++ b/src/typecode/_vendor/pygments/lexers/email.py @@ -0,0 +1,151 @@ +# -*- coding: utf-8 -*- +""" + pygments.lexers.email + ~~~~~~~~~~~~~~~~~~~~~ + + Lexer for the raw E-mail. + + :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS. + :license: BSD, see LICENSE for details. +""" + +from typecode._vendor.pygments.lexer import RegexLexer, DelegatingLexer, bygroups +from typecode._vendor.pygments.lexers.mime import MIMELexer +from typecode._vendor.pygments.token import Text, Keyword, Name, String, Number, Comment +from typecode._vendor.pygments.util import get_bool_opt + +__all__ = ["EmailLexer"] + + +class EmailHeaderLexer(RegexLexer): + """ + Sub-lexer for raw E-mail. This lexer only process header part of e-mail. + + .. versionadded:: 2.5 + """ + + def __init__(self, **options): + super().__init__(**options) + self.highlight_x = get_bool_opt(options, "highlight-X-header", False) + + def get_x_header_tokens(self, match): + if self.highlight_x: + # field + yield match.start(1), Name.Tag, match.group(1) + + # content + default_actions = self.get_tokens_unprocessed( + match.group(2), stack=("root", "header")) + yield from default_actions + else: + # lowlight + yield match.start(1), Comment.Special, match.group(1) + yield match.start(2), Comment.Multiline, match.group(2) + + tokens = { + "root": [ + (r"^(?:[A-WYZ]|X400)[\w\-]*:", Name.Tag, "header"), + (r"^(X-(?:\w[\w\-]*:))([\s\S]*?\n)(?![ \t])", get_x_header_tokens), + ], + "header": [ + # folding + (r"\n[ \t]", Text.Whitespace), + (r"\n(?![ \t])", Text.Whitespace, "#pop"), + + # keywords + (r"\bE?SMTPS?\b", Keyword), + (r"\b(?:HE|EH)LO\b", Keyword), + + # mailbox + (r"[\w\.\-\+=]+@[\w\.\-]+", Name.Label), + (r"<[\w\.\-\+=]+@[\w\.\-]+>", Name.Label), + + # domain + (r"\b(\w[\w\.-]*\.[\w\.-]*\w[a-zA-Z]+)\b", Name.Function), + + # IPv4 + ( + r"(?<=\b)(?:(?:25[0-5]|2[0-4][0-9]|1?[0-9][0-9]?)\.){3}(?:25[0" + r"-5]|2[0-4][0-9]|1?[0-9][0-9]?)(?=\b)", + Number.Integer, + ), + + # IPv6 + (r"(?<=\b)([0-9a-fA-F]{1,4}:){1,7}:(?!\b)", Number.Hex), + (r"(?<=\b):((:[0-9a-fA-F]{1,4}){1,7}|:)(?=\b)", Number.Hex), + (r"(?<=\b)([0-9a-fA-F]{1,4}:){7,7}[0-9a-fA-F]{1,4}(?=\b)", Number.Hex), + (r"(?<=\b)([0-9a-fA-F]{1,4}:){1,6}:[0-9a-fA-F]{1,4}(?=\b)", Number.Hex), + (r"(?<=\b)[0-9a-fA-F]{1,4}:((:[0-9a-fA-F]{1,4}){1,6})(?=\b)", Number.Hex), + (r"(?<=\b)fe80:(:[0-9a-fA-F]{0,4}){0,4}%[0-9a-zA-Z]{1,}(?=\b)", Number.Hex), + (r"(?<=\b)([0-9a-fA-F]{1,4}:){1,5}(:[0-9a-fA-F]{1,4}){1,2}(?=\b)", Number.Hex), + (r"(?<=\b)([0-9a-fA-F]{1,4}:){1,4}(:[0-9a-fA-F]{1,4}){1,3}(?=\b)", + Number.Hex), + (r"(?<=\b)([0-9a-fA-F]{1,4}:){1,3}(:[0-9a-fA-F]{1,4}){1,4}(?=\b)", + Number.Hex), + (r"(?<=\b)([0-9a-fA-F]{1,4}:){1,2}(:[0-9a-fA-F]{1,4}){1,5}(?=\b)", + Number.Hex), + ( + r"(?<=\b)::(ffff(:0{1,4}){0,1}:){0,1}((25[0-5]|(2[0-4]|1{0,1}" + r"[0-9]){0,1}[0-9])\.){3,3}(25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}" + r"[0-9])(?=\b)", + Number.Hex, + ), + ( + r"(?<=\b)([0-9a-fA-F]{1,4}:){1,4}:((25[0-5]|(2[0-4]|1{0,1}[0-" + r"9]){0,1}[0-9])\.){3,3}(25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-" + r"9])(?=\b)", + Number.Hex, + ), + + # Date time + ( + r"(?:(Sun|Mon|Tue|Wed|Thu|Fri|Sat),\s+)?(0[1-9]|[1-2]?[0-9]|3[" + r"01])\s+(Jan|Feb|Mar|Apr|May|Jun|Jul|Aug|Sep|Oct|Nov|Dec)\s+(" + r"19[0-9]{2}|[2-9][0-9]{3})\s+(2[0-3]|[0-1][0-9]):([0-5][0-9])" + r"(?::(60|[0-5][0-9]))?(?:\.\d{1,5})?\s+([-\+][0-9]{2}[0-5][0-" + r"9]|\(?(?:UTC?|GMT|(?:E|C|M|P)(?:ST|ET|DT)|[A-IK-Z])\)?)", + Name.Decorator, + ), + + # RFC-2047 encoded string + ( + r"(=\?)([\w-]+)(\?)([BbQq])(\?)([\[\w!\"#$%&\'()*+,-./:;<=>@[\\" + r"\]^_`{|}~]+)(\?=)", + bygroups( + String.Affix, + Name.Constant, + String.Affix, + Keyword.Constant, + String.Affix, + Number.Hex, + String.Affix + ) + ), + + # others + (r'[\s]+', Text.Whitespace), + (r'[\S]', Text), + ], + } + + +class EmailLexer(DelegatingLexer): + """ + Lexer for raw E-mail. + + Additional options accepted: + + `highlight-X-header` + Highlight the fields of ``X-`` user-defined email header. (default: + ``False``). + + .. versionadded:: 2.5 + """ + + name = "E-mail" + aliases = ["email", "eml"] + filenames = ["*.eml"] + mimetypes = ["message/rfc822"] + + def __init__(self, **options): + super().__init__(EmailHeaderLexer, MIMELexer, Comment, **options) diff --git a/src/typecode/_vendor/pygments/lexers/erlang.py b/src/typecode/_vendor/pygments/lexers/erlang.py index ae2b922..d29c4a1 100644 --- a/src/typecode/_vendor/pygments/lexers/erlang.py +++ b/src/typecode/_vendor/pygments/lexers/erlang.py @@ -5,7 +5,7 @@ Lexers for Erlang. - :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ @@ -163,7 +163,7 @@ class ErlangShellLexer(Lexer): filenames = ['*.erl-sh'] mimetypes = ['text/x-erl-shellsession'] - _prompt_re = re.compile(r'\d+>(?=\s|\Z)') + _prompt_re = re.compile(r'(?:\([\w@_.]+\))?\d+>(?=\s|\Z)') def get_tokens_unprocessed(self, text): erlexer = ErlangLexer(**self.options) @@ -180,9 +180,8 @@ def get_tokens_unprocessed(self, text): curcode += line[end:] else: if curcode: - for item in do_insertions(insertions, - erlexer.get_tokens_unprocessed(curcode)): - yield item + yield from do_insertions(insertions, + erlexer.get_tokens_unprocessed(curcode)) curcode = '' insertions = [] if line.startswith('*'): @@ -190,9 +189,8 @@ def get_tokens_unprocessed(self, text): else: yield match.start(), Generic.Output, line if curcode: - for item in do_insertions(insertions, - erlexer.get_tokens_unprocessed(curcode)): - yield item + yield from do_insertions(insertions, + erlexer.get_tokens_unprocessed(curcode)) def gen_elixir_string_rules(name, symbol, token): @@ -207,10 +205,10 @@ def gen_elixir_string_rules(name, symbol, token): return states -def gen_elixir_sigstr_rules(term, token, interpol=True): +def gen_elixir_sigstr_rules(term, term_class, token, interpol=True): if interpol: return [ - (r'[^#%s\\]+' % (term,), token), + (r'[^#%s\\]+' % (term_class,), token), include('escapes'), (r'\\.', token), (r'%s[a-zA-Z]*' % (term,), token, '#pop'), @@ -218,7 +216,7 @@ def gen_elixir_sigstr_rules(term, token, interpol=True): ] else: return [ - (r'[^%s\\]+' % (term,), token), + (r'[^%s\\]+' % (term_class,), token), (r'\\.', token), (r'%s[a-zA-Z]*' % (term,), token, '#pop'), ] @@ -233,7 +231,7 @@ class ElixirLexer(RegexLexer): name = 'Elixir' aliases = ['elixir', 'ex', 'exs'] - filenames = ['*.ex', '*.exs'] + filenames = ['*.ex', '*.eex', '*.exs'] mimetypes = ['text/x-elixir'] KEYWORD = ('fn', 'do', 'end', 'after', 'else', 'rescue', 'catch') @@ -291,14 +289,14 @@ def get_tokens_unprocessed(self, text): def gen_elixir_sigil_rules(): # all valid sigil terminators (excluding heredocs) terminators = [ - (r'\{', r'\}', 'cb'), - (r'\[', r'\]', 'sb'), - (r'\(', r'\)', 'pa'), - (r'<', r'>', 'ab'), - (r'/', r'/', 'slas'), - (r'\|', r'\|', 'pipe'), - ('"', '"', 'quot'), - ("'", "'", 'apos'), + (r'\{', r'\}', '}', 'cb'), + (r'\[', r'\]', r'\]', 'sb'), + (r'\(', r'\)', ')', 'pa'), + ('<', '>', '>', 'ab'), + ('/', '/', '/', 'slas'), + (r'\|', r'\|', '|', 'pipe'), + ('"', '"', '"', 'quot'), + ("'", "'", "'", 'apos'), ] # heredocs have slightly different rules @@ -328,14 +326,15 @@ def gen_elixir_sigil_rules(): include('heredoc_no_interpol'), ] - for lterm, rterm, name in terminators: + for lterm, rterm, rterm_class, name in terminators: states['sigils'] += [ (r'~[a-z]' + lterm, token, name + '-intp'), (r'~[A-Z]' + lterm, token, name + '-no-intp'), ] - states[name + '-intp'] = gen_elixir_sigstr_rules(rterm, token) + states[name + '-intp'] = \ + gen_elixir_sigstr_rules(rterm, rterm_class, token) states[name + '-no-intp'] = \ - gen_elixir_sigstr_rules(rterm, token, interpol=False) + gen_elixir_sigstr_rules(rterm, rterm_class, token, interpol=False) return states @@ -495,7 +494,7 @@ class ElixirConsoleLexer(Lexer): aliases = ['iex'] mimetypes = ['text/x-elixir-shellsession'] - _prompt_re = re.compile(r'(iex|\.{3})(\(\d+\))?> ') + _prompt_re = re.compile(r'(iex|\.{3})((?:\([\w@_.]+\))?\d+|\(\d+\))?> ') def get_tokens_unprocessed(self, text): exlexer = ElixirLexer(**self.options) @@ -505,7 +504,7 @@ def get_tokens_unprocessed(self, text): insertions = [] for match in line_re.finditer(text): line = match.group() - if line.startswith(u'** '): + if line.startswith('** '): in_error = True insertions.append((len(curcode), [(0, Generic.Error, line[:-1])])) @@ -520,14 +519,12 @@ def get_tokens_unprocessed(self, text): curcode += line[end:] else: if curcode: - for item in do_insertions( - insertions, exlexer.get_tokens_unprocessed(curcode)): - yield item + yield from do_insertions( + insertions, exlexer.get_tokens_unprocessed(curcode)) curcode = '' insertions = [] token = Generic.Error if in_error else Generic.Output yield match.start(), token, line if curcode: - for item in do_insertions( - insertions, exlexer.get_tokens_unprocessed(curcode)): - yield item + yield from do_insertions( + insertions, exlexer.get_tokens_unprocessed(curcode)) diff --git a/src/typecode/_vendor/pygments/lexers/esoteric.py b/src/typecode/_vendor/pygments/lexers/esoteric.py index 60fa4c8..fbf2cc6 100644 --- a/src/typecode/_vendor/pygments/lexers/esoteric.py +++ b/src/typecode/_vendor/pygments/lexers/esoteric.py @@ -5,7 +5,7 @@ Lexers for esoteric languages. - :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ @@ -13,6 +13,8 @@ from typecode._vendor.pygments.token import Text, Comment, Operator, Keyword, Name, String, \ Number, Punctuation, Error +import re + __all__ = ['BrainfuckLexer', 'BefungeLexer', 'RedcodeLexer', 'CAmkESLexer', 'CapDLLexer', 'AheuiLexer'] @@ -48,6 +50,31 @@ class BrainfuckLexer(RegexLexer): ] } + def analyse_text(text): + """It's safe to assume that a program which mostly consists of + - + and < > is brainfuck.""" + plus_minus_count = 0 + greater_less_count = 0 + + range_to_check = max(256, len(text)) + + for c in text[:range_to_check]: + if c == '+' or c == '-': + plus_minus_count += 1 + if c == '<' or c == '>': + greater_less_count += 1 + + if plus_minus_count > (0.25 * range_to_check): + return 1.0 + if greater_less_count > (0.25 * range_to_check): + return 1.0 + + result = 0 + if '[-]' in text: + result += 0.5 + + return result + class BefungeLexer(RegexLexer): """ @@ -255,23 +282,23 @@ class AheuiLexer(RegexLexer): tokens = { 'root': [ - (u'[' - u'나-낳냐-냫너-넣녀-녛노-놓뇨-눟뉴-닇' - u'다-닿댜-댷더-덯뎌-뎧도-돟됴-둫듀-딓' - u'따-땋땨-떃떠-떻뗘-뗳또-똫뚀-뚷뜌-띟' - u'라-랗랴-럏러-렇려-렿로-롷료-뤃류-릫' - u'마-맣먀-먛머-멓며-몋모-뫃묘-뭏뮤-믷' - u'바-밯뱌-뱧버-벟벼-볗보-봏뵤-붛뷰-빃' - u'빠-빻뺘-뺳뻐-뻫뼈-뼣뽀-뽛뾰-뿧쀼-삏' - u'사-샇샤-샿서-섷셔-셯소-솧쇼-숳슈-싛' - u'싸-쌓쌰-썋써-쎃쎠-쎻쏘-쏳쑈-쑿쓔-씧' - u'자-잫쟈-쟣저-젛져-졓조-좋죠-줗쥬-즿' - u'차-챃챠-챻처-첳쳐-쳫초-촣쵸-춯츄-칗' - u'카-캏캬-컇커-컿켜-켷코-콯쿄-쿻큐-킣' - u'타-탛탸-턓터-텋텨-톃토-톻툐-퉇튜-틯' - u'파-팧퍄-퍟퍼-펗펴-폏포-퐇표-풓퓨-픻' - u'하-핳햐-햫허-헣혀-혛호-홓효-훟휴-힇' - u']', Operator), + ('[' + '나-낳냐-냫너-넣녀-녛노-놓뇨-눟뉴-닇' + '다-닿댜-댷더-덯뎌-뎧도-돟됴-둫듀-딓' + '따-땋땨-떃떠-떻뗘-뗳또-똫뚀-뚷뜌-띟' + '라-랗랴-럏러-렇려-렿로-롷료-뤃류-릫' + '마-맣먀-먛머-멓며-몋모-뫃묘-뭏뮤-믷' + '바-밯뱌-뱧버-벟벼-볗보-봏뵤-붛뷰-빃' + '빠-빻뺘-뺳뻐-뻫뼈-뼣뽀-뽛뾰-뿧쀼-삏' + '사-샇샤-샿서-섷셔-셯소-솧쇼-숳슈-싛' + '싸-쌓쌰-썋써-쎃쎠-쎻쏘-쏳쑈-쑿쓔-씧' + '자-잫쟈-쟣저-젛져-졓조-좋죠-줗쥬-즿' + '차-챃챠-챻처-첳쳐-쳫초-촣쵸-춯츄-칗' + '카-캏캬-컇커-컿켜-켷코-콯쿄-쿻큐-킣' + '타-탛탸-턓터-텋텨-톃토-톻툐-퉇튜-틯' + '파-팧퍄-퍟퍼-펗펴-폏포-퐇표-풓퓨-픻' + '하-핳햐-햫허-헣혀-혛호-홓효-훟휴-힇' + ']', Operator), ('.', Comment), ], } diff --git a/src/typecode/_vendor/pygments/lexers/ezhil.py b/src/typecode/_vendor/pygments/lexers/ezhil.py index 9d3503a..d34cea4 100644 --- a/src/typecode/_vendor/pygments/lexers/ezhil.py +++ b/src/typecode/_vendor/pygments/lexers/ezhil.py @@ -5,7 +5,7 @@ Pygments lexers for Ezhil language. - :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ @@ -30,20 +30,20 @@ class EzhilLexer(RegexLexer): flags = re.MULTILINE | re.UNICODE # Refer to tamil.utf8.tamil_letters from open-tamil for a stricter version of this. # This much simpler version is close enough, and includes combining marks. - _TALETTERS = u'[a-zA-Z_]|[\u0b80-\u0bff]' + _TALETTERS = '[a-zA-Z_]|[\u0b80-\u0bff]' tokens = { 'root': [ include('keywords'), (r'#.*\n', Comment.Single), (r'[@+/*,^\-%]|[!<>=]=?|&&?|\|\|?', Operator), - (u'இல்', Operator.Word), - (words((u'assert', u'max', u'min', - u'நீளம்', u'சரம்_இடமாற்று', u'சரம்_கண்டுபிடி', - u'பட்டியல்', u'பின்இணை', u'வரிசைப்படுத்து', - u'எடு', u'தலைகீழ்', u'நீட்டிக்க', u'நுழைக்க', u'வை', - u'கோப்பை_திற', u'கோப்பை_எழுது', u'கோப்பை_மூடு', - u'pi', u'sin', u'cos', u'tan', u'sqrt', u'hypot', u'pow', - u'exp', u'log', u'log10', u'exit', + ('இல்', Operator.Word), + (words(('assert', 'max', 'min', + 'நீளம்', 'சரம்_இடமாற்று', 'சரம்_கண்டுபிடி', + 'பட்டியல்', 'பின்இணை', 'வரிசைப்படுத்து', + 'எடு', 'தலைகீழ்', 'நீட்டிக்க', 'நுழைக்க', 'வை', + 'கோப்பை_திற', 'கோப்பை_எழுது', 'கோப்பை_மூடு', + 'pi', 'sin', 'cos', 'tan', 'sqrt', 'hypot', 'pow', + 'exp', 'log', 'log10', 'exit', ), suffix=r'\b'), Name.Builtin), (r'(True|False)\b', Keyword.Constant), (r'[^\S\n]+', Text), @@ -52,10 +52,10 @@ class EzhilLexer(RegexLexer): (r'[(){}\[\]:;.]', Punctuation), ], 'keywords': [ - (u'பதிப்பி|தேர்ந்தெடு|தேர்வு|ஏதேனில்|ஆனால்|இல்லைஆனால்|இல்லை|ஆக|ஒவ்வொன்றாக|இல்|வரை|செய்|முடியேனில்|பின்கொடு|முடி|நிரல்பாகம்|தொடர்|நிறுத்து|நிரல்பாகம்', Keyword), + ('பதிப்பி|தேர்ந்தெடு|தேர்வு|ஏதேனில்|ஆனால்|இல்லைஆனால்|இல்லை|ஆக|ஒவ்வொன்றாக|இல்|வரை|செய்|முடியேனில்|பின்கொடு|முடி|நிரல்பாகம்|தொடர்|நிறுத்து|நிரல்பாகம்', Keyword), ], 'identifier': [ - (u'(?:'+_TALETTERS+u')(?:[0-9]|'+_TALETTERS+u')*', Name), + ('(?:'+_TALETTERS+')(?:[0-9]|'+_TALETTERS+')*', Name), ], 'literal': [ (r'".*?"', String), @@ -64,6 +64,14 @@ class EzhilLexer(RegexLexer): ] } + def analyse_text(text): + """This language uses Tamil-script. We'll assume that if there's a + decent amount of Tamil-characters, it's this language. This assumption + is obviously horribly off if someone uses string literals in tamil + in another language.""" + if len(re.findall(r'[\u0b80-\u0bff]', text)) > 10: + return 0.25 + def __init__(self, **options): - super(EzhilLexer, self).__init__(**options) + super().__init__(**options) self.encoding = options.get('encoding', 'utf-8') diff --git a/src/typecode/_vendor/pygments/lexers/factor.py b/src/typecode/_vendor/pygments/lexers/factor.py index 481cf57..ccd1608 100644 --- a/src/typecode/_vendor/pygments/lexers/factor.py +++ b/src/typecode/_vendor/pygments/lexers/factor.py @@ -5,7 +5,7 @@ Lexers for the Factor language. - :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ @@ -265,7 +265,7 @@ class FactorLexer(RegexLexer): (r'(?:)\s', Keyword.Namespace), # strings - (r'"""\s+(?:.|\n)*?\s+"""', String), + (r'"""\s(?:.|\n)*?\s"""', String), (r'"(?:\\\\|\\"|[^"])*"', String), (r'\S+"\s+(?:\\\\|\\"|[^"])*"', String), (r'CHAR:\s+(?:\\[\\abfnrstv]|[^\\]\S*)\s', String.Char), @@ -322,7 +322,7 @@ class FactorLexer(RegexLexer): 'slots': [ (r'\s+', Text), (r';\s', Keyword, '#pop'), - (r'(\{\s+)(\S+)(\s+[^}]+\s+\}\s)', + (r'(\{\s+)(\S+)(\s[^}]+\s\}\s)', bygroups(Text, Name.Variable, Text)), (r'\S+', Name.Variable), ], diff --git a/src/typecode/_vendor/pygments/lexers/fantom.py b/src/typecode/_vendor/pygments/lexers/fantom.py index b436ad3..139c6d7 100644 --- a/src/typecode/_vendor/pygments/lexers/fantom.py +++ b/src/typecode/_vendor/pygments/lexers/fantom.py @@ -5,7 +5,7 @@ Lexer for the Fantom language. - :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ diff --git a/src/typecode/_vendor/pygments/lexers/felix.py b/src/typecode/_vendor/pygments/lexers/felix.py index 7aa0da3..d634e69 100644 --- a/src/typecode/_vendor/pygments/lexers/felix.py +++ b/src/typecode/_vendor/pygments/lexers/felix.py @@ -5,7 +5,7 @@ Lexer for the Felix language. - :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ diff --git a/src/typecode/_vendor/pygments/lexers/floscript.py b/src/typecode/_vendor/pygments/lexers/floscript.py index d638ec4..57a0e17 100644 --- a/src/typecode/_vendor/pygments/lexers/floscript.py +++ b/src/typecode/_vendor/pygments/lexers/floscript.py @@ -5,7 +5,7 @@ Lexer for FloScript - :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ diff --git a/src/typecode/_vendor/pygments/lexers/forth.py b/src/typecode/_vendor/pygments/lexers/forth.py index e31b680..af2043c 100644 --- a/src/typecode/_vendor/pygments/lexers/forth.py +++ b/src/typecode/_vendor/pygments/lexers/forth.py @@ -5,15 +5,14 @@ Lexer for the Forth language. - :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ import re -from typecode._vendor.pygments.lexer import RegexLexer, include, bygroups -from typecode._vendor.pygments.token import Error, Punctuation, Literal, Token, \ - Text, Comment, Operator, Keyword, Name, String, Number, Generic +from typecode._vendor.pygments.lexer import RegexLexer, bygroups +from typecode._vendor.pygments.token import Text, Comment, Keyword, Name, String, Number __all__ = ['ForthLexer'] @@ -30,12 +29,6 @@ class ForthLexer(RegexLexer): filenames = ['*.frt', '*.fs'] mimetypes = ['application/x-forth'] - delimiter = r'\s' - delimiter_end = r'(?=[%s])' % delimiter - - valid_name_chars = r'[^%s]' % delimiter - valid_name = r"%s+%s" % (valid_name_chars, delimiter_end) - flags = re.IGNORECASE | re.MULTILINE tokens = { @@ -71,7 +64,7 @@ class ForthLexer(RegexLexer): r'then|type|u\.|u\<|um\*|um\/mod|unloop|until|' r'variable|while|word|xor|\[char\]|\[\'\]|' r'@|!|\#|<\#|\#>|:|;|\+|-|\*|\/|,|<|>|\|1\+|1-|\.|' - # *** Wordset CORE-EXT + # *** Wordset CORE-EXT r'\.r|0<>|' r'0>|2>r|2r>|2r@|:noname|\?do|again|c\"|' r'case|compile,|endcase|endof|erase|false|' @@ -79,38 +72,38 @@ class ForthLexer(RegexLexer): r'restore-input|roll|save-input|source-id|to|' r'true|tuck|u\.r|u>|unused|value|within|' r'\[compile\]|' - # *** Wordset CORE-EXT-obsolescent + # *** Wordset CORE-EXT-obsolescent r'\#tib|convert|expect|query|span|' r'tib|' - # *** Wordset DOUBLE + # *** Wordset DOUBLE r'2constant|2literal|2variable|d\+|d-|' r'd\.|d\.r|d0<|d0=|d2\*|d2\/|d<|d=|d>s|' r'dabs|dmax|dmin|dnegate|m\*\/|m\+|' - # *** Wordset DOUBLE-EXT + # *** Wordset DOUBLE-EXT r'2rot|du<|' - # *** Wordset EXCEPTION + # *** Wordset EXCEPTION r'catch|throw|' - # *** Wordset EXCEPTION-EXT + # *** Wordset EXCEPTION-EXT r'abort|abort\"|' - # *** Wordset FACILITY + # *** Wordset FACILITY r'at-xy|key\?|page|' - # *** Wordset FACILITY-EXT + # *** Wordset FACILITY-EXT r'ekey|ekey>char|ekey\?|emit\?|ms|time&date|' - # *** Wordset FILE + # *** Wordset FILE r'BIN|CLOSE-FILE|CREATE-FILE|DELETE-FILE|FILE-POSITION|' r'FILE-SIZE|INCLUDE-FILE|INCLUDED|OPEN-FILE|R\/O|' r'R\/W|READ-FILE|READ-LINE|REPOSITION-FILE|RESIZE-FILE|' r'S\"|SOURCE-ID|W/O|WRITE-FILE|WRITE-LINE|' - # *** Wordset FILE-EXT + # *** Wordset FILE-EXT r'FILE-STATUS|FLUSH-FILE|REFILL|RENAME-FILE|' - # *** Wordset FLOAT + # *** Wordset FLOAT r'>float|d>f|' r'f!|f\*|f\+|f-|f\/|f0<|f0=|f<|f>d|f@|' r'falign|faligned|fconstant|fdepth|fdrop|fdup|' r'fliteral|float\+|floats|floor|fmax|fmin|' r'fnegate|fover|frot|fround|fswap|fvariable|' r'represent|' - # *** Wordset FLOAT-EXT + # *** Wordset FLOAT-EXT r'df!|df@|dfalign|dfaligned|dfloat\+|' r'dfloats|f\*\*|f\.|fabs|facos|facosh|falog|' r'fasin|fasinh|fatan|fatan2|fatanh|fcos|fcosh|' @@ -118,34 +111,34 @@ class ForthLexer(RegexLexer): r'fsincos|fsinh|fsqrt|ftan|ftanh|f~|precision|' r'set-precision|sf!|sf@|sfalign|sfaligned|sfloat\+|' r'sfloats|' - # *** Wordset LOCAL + # *** Wordset LOCAL r'\(local\)|to|' - # *** Wordset LOCAL-EXT + # *** Wordset LOCAL-EXT r'locals\||' - # *** Wordset MEMORY + # *** Wordset MEMORY r'allocate|free|resize|' - # *** Wordset SEARCH + # *** Wordset SEARCH r'definitions|find|forth-wordlist|get-current|' r'get-order|search-wordlist|set-current|set-order|' r'wordlist|' - # *** Wordset SEARCH-EXT + # *** Wordset SEARCH-EXT r'also|forth|only|order|previous|' - # *** Wordset STRING + # *** Wordset STRING r'-trailing|\/string|blank|cmove|cmove>|compare|' r'search|sliteral|' - # *** Wordset TOOLS + # *** Wordset TOOLS r'.s|dump|see|words|' - # *** Wordset TOOLS-EXT + # *** Wordset TOOLS-EXT r';code|' r'ahead|assembler|bye|code|cs-pick|cs-roll|' r'editor|state|\[else\]|\[if\]|\[then\]|' - # *** Wordset TOOLS-EXT-obsolescent - r'forget|' - # Forth 2012 - r'defer|defer@|defer!|action-of|begin-structure|field:|buffer:|' - r'parse-name|buffer:|traverse-wordlist|n>r|nr>|2value|fvalue|' - r'name>interpret|name>compile|name>string|' - r'cfield:|end-structure)'+delimiter, Keyword), + # *** Wordset TOOLS-EXT-obsolescent + r'forget|' + # Forth 2012 + r'defer|defer@|defer!|action-of|begin-structure|field:|buffer:|' + r'parse-name|buffer:|traverse-wordlist|n>r|nr>|2value|fvalue|' + r'name>interpret|name>compile|name>string|' + r'cfield:|end-structure)(?!\S)', Keyword), # Numbers (r'(\$[0-9A-F]+)', Number.Hex), @@ -156,18 +149,18 @@ class ForthLexer(RegexLexer): r'itype|icompare|sp@|sp!|rp@|rp!|up@|up!|' r'>a|a>|a@|a!|a@+|a@-|>b|b>|b@|b!|b@+|b@-|' r'find-name|1ms|' - r'sp0|rp0|\(evaluate\)|int-trap|int!)' + delimiter, + r'sp0|rp0|\(evaluate\)|int-trap|int!)(?!\S)', Name.Constant), # a proposal (r'(do-recognizer|r:fail|recognizer:|get-recognizers|' r'set-recognizers|r:float|r>comp|r>int|r>post|' r'r:name|r:word|r:dnum|r:num|recognizer|forth-recognizer|' - r'rec:num|rec:float|rec:word)' + delimiter, Name.Decorator), + r'rec:num|rec:float|rec:word)(?!\S)', Name.Decorator), # defining words. The next word is a new command name (r'(Evalue|Rvalue|Uvalue|Edefer|Rdefer|Udefer)(\s+)', bygroups(Keyword.Namespace, Text), 'worddef'), - (valid_name, Name.Function), # Anything else is executed + (r'\S+', Name.Function), # Anything else is executed ], 'worddef': [ @@ -177,3 +170,9 @@ class ForthLexer(RegexLexer): (r'[^"]+', String, '#pop'), ], } + + def analyse_text(text): + """Forth uses : COMMAND ; quite a lot in a single line, so we're trying + to find that.""" + if re.search('\n:[^\n]+;\n', text): + return 0.1 diff --git a/src/typecode/_vendor/pygments/lexers/fortran.py b/src/typecode/_vendor/pygments/lexers/fortran.py index 545664c..f9b6f38 100644 --- a/src/typecode/_vendor/pygments/lexers/fortran.py +++ b/src/typecode/_vendor/pygments/lexers/fortran.py @@ -5,7 +5,7 @@ Lexers for Fortran languages. - :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ @@ -56,14 +56,14 @@ class FortranLexer(RegexLexer): 'CODIMENSION', 'COMMON', 'CONCURRRENT', 'CONTIGUOUS', 'CONTAINS', 'CONTINUE', 'CRITICAL', 'CYCLE', 'DATA', 'DEALLOCATE', 'DECODE', 'DEFERRED', 'DIMENSION', 'DO', 'ELEMENTAL', 'ELSE', 'ENCODE', 'END', - 'ENTRY', 'ENUM', 'ENUMERATOR', 'EQUIVALENCE', 'EXIT', 'EXTENDS', - 'EXTERNAL', 'EXTRINSIC', 'FILE', 'FINAL', 'FORALL', 'FORMAT', + 'ENTRY', 'ENUM', 'ENUMERATOR', 'EQUIVALENCE', 'ERROR STOP', 'EXIT', + 'EXTENDS', 'EXTERNAL', 'EXTRINSIC', 'FILE', 'FINAL', 'FORALL', 'FORMAT', 'FUNCTION', 'GENERIC', 'GOTO', 'IF', 'IMAGES', 'IMPLICIT', 'IMPORT', 'IMPURE', 'INCLUDE', 'INQUIRE', 'INTENT', 'INTERFACE', 'INTRINSIC', 'IS', 'LOCK', 'MEMORY', 'MODULE', 'NAMELIST', 'NULLIFY', - 'NONE', 'NON_INTRINSIC', 'NON_OVERRIDABLE', 'NOPASS', 'OPEN', 'OPTIONAL', - 'OPTIONS', 'PARAMETER', 'PASS', 'PAUSE', 'POINTER', 'PRINT', 'PRIVATE', - 'PROGRAM', 'PROCEDURE', 'PROTECTED', 'PUBLIC', 'PURE', 'READ', + 'NONE', 'NON_INTRINSIC', 'NON_OVERRIDABLE', 'NOPASS', 'ONLY', 'OPEN', + 'OPTIONAL', 'OPTIONS', 'PARAMETER', 'PASS', 'PAUSE', 'POINTER', 'PRINT', + 'PRIVATE', 'PROGRAM', 'PROCEDURE', 'PROTECTED', 'PUBLIC', 'PURE', 'READ', 'RECURSIVE', 'RESULT', 'RETURN', 'REWIND', 'SAVE', 'SELECT', 'SEQUENCE', 'STOP', 'SUBMODULE', 'SUBROUTINE', 'SYNC', 'SYNCALL', 'SYNCIMAGES', 'SYNCMEMORY', 'TARGET', 'THEN', 'TYPE', 'UNLOCK', 'USE', 'VALUE', @@ -155,10 +155,10 @@ class FortranLexer(RegexLexer): ], 'nums': [ - (r'\d+(?![.e])(_[a-z]\w+)?', Number.Integer), - (r'[+-]?\d*\.\d+([ed][-+]?\d+)?(_[a-z]\w+)?', Number.Float), - (r'[+-]?\d+\.\d*([ed][-+]?\d+)?(_[a-z]\w+)?', Number.Float), - (r'[+-]?\d+(\.\d*)?[ed][-+]?\d+(_[a-z]\w+)?', Number.Float), + (r'\d+(?![.e])(_([1-9]|[a-z]\w*))?', Number.Integer), + (r'[+-]?\d*\.\d+([ed][-+]?\d+)?(_([1-9]|[a-z]\w*))?', Number.Float), + (r'[+-]?\d+\.\d*([ed][-+]?\d+)?(_([1-9]|[a-z]\w*))?', Number.Float), + (r'[+-]?\d+(\.\d*)?[ed][-+]?\d+(_([1-9]|[a-z]\w*))?', Number.Float), ], } diff --git a/src/typecode/_vendor/pygments/lexers/foxpro.py b/src/typecode/_vendor/pygments/lexers/foxpro.py index 2c249ad..2a50900 100644 --- a/src/typecode/_vendor/pygments/lexers/foxpro.py +++ b/src/typecode/_vendor/pygments/lexers/foxpro.py @@ -5,7 +5,7 @@ Simple lexer for Microsoft Visual FoxPro source code. - :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ @@ -36,7 +36,7 @@ class FoxProLexer(RegexLexer): tokens = { 'root': [ - (r';\s*\n', Punctuation), # consume newline + (r';\s*\n', Punctuation), # consume newline (r'(^|\n)\s*', Text, 'newline'), # Square brackets may be used for array indices diff --git a/src/typecode/_vendor/pygments/lexers/freefem.py b/src/typecode/_vendor/pygments/lexers/freefem.py index 5a3cec5..0d391f3 100644 --- a/src/typecode/_vendor/pygments/lexers/freefem.py +++ b/src/typecode/_vendor/pygments/lexers/freefem.py @@ -5,7 +5,7 @@ Lexer for FreeFem++ language. - :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ @@ -36,27 +36,27 @@ class FreeFemLexer(CppLexer): mimetypes = ['text/x-freefem'] # Language operators - operators = set(('+', '-', '*', '.*', '/', './', '%', '^', '^-1', ':', '\'')) + operators = {'+', '-', '*', '.*', '/', './', '%', '^', '^-1', ':', '\''} # types - types = set(('bool', 'border', 'complex', 'dmatrix', 'fespace', 'func', 'gslspline', - 'ifstream', 'int', 'macro', 'matrix', 'mesh', 'mesh3', 'mpiComm', - 'mpiGroup', 'mpiRequest', 'NewMacro', 'EndMacro', 'ofstream', 'Pmmap', - 'problem', 'Psemaphore', 'real', 'solve', 'string', 'varf')) + types = {'bool', 'border', 'complex', 'dmatrix', 'fespace', 'func', 'gslspline', + 'ifstream', 'int', 'macro', 'matrix', 'mesh', 'mesh3', 'mpiComm', + 'mpiGroup', 'mpiRequest', 'NewMacro', 'EndMacro', 'ofstream', 'Pmmap', + 'problem', 'Psemaphore', 'real', 'solve', 'string', 'varf'} # finite element spaces - fespaces = set(('BDM1', 'BDM1Ortho', 'Edge03d', 'Edge13d', 'Edge23d', 'FEQF', 'HCT', - 'P0', 'P03d', 'P0Edge', 'P1', 'P13d', 'P1b', 'P1b3d', 'P1bl', 'P1bl3d', - 'P1dc', 'P1Edge', 'P1nc', 'P2', 'P23d', 'P2b', 'P2BR', 'P2dc', 'P2Edge', - 'P2h', 'P2Morley', 'P2pnc', 'P3', 'P3dc', 'P3Edge', 'P4', 'P4dc', - 'P4Edge', 'P5Edge', 'RT0', 'RT03d', 'RT0Ortho', 'RT1', 'RT1Ortho', - 'RT2', 'RT2Ortho')) + fespaces = {'BDM1', 'BDM1Ortho', 'Edge03d', 'Edge13d', 'Edge23d', 'FEQF', 'HCT', + 'P0', 'P03d', 'P0Edge', 'P1', 'P13d', 'P1b', 'P1b3d', 'P1bl', 'P1bl3d', + 'P1dc', 'P1Edge', 'P1nc', 'P2', 'P23d', 'P2b', 'P2BR', 'P2dc', 'P2Edge', + 'P2h', 'P2Morley', 'P2pnc', 'P3', 'P3dc', 'P3Edge', 'P4', 'P4dc', + 'P4Edge', 'P5Edge', 'RT0', 'RT03d', 'RT0Ortho', 'RT1', 'RT1Ortho', + 'RT2', 'RT2Ortho'} # preprocessor - preprocessor = set(('ENDIFMACRO', 'include', 'IFMACRO', 'load')) + preprocessor = {'ENDIFMACRO', 'include', 'IFMACRO', 'load'} # Language keywords - keywords = set(( + keywords = { 'adj', 'append', 'area', @@ -169,10 +169,10 @@ class FreeFemLexer(CppLexer): 'x', 'y', 'z' - )) + } # Language shipped functions and class ( ) - functions = set(( + functions = { 'abs', 'acos', 'acosh', @@ -702,10 +702,10 @@ class FreeFemLexer(CppLexer): 'y0', 'y1', 'yn' - )) + } # function parameters - parameters = set(( + parameters = { 'A', 'A1', 'abserror', @@ -849,13 +849,13 @@ class FreeFemLexer(CppLexer): 'WindowIndex', 'which', 'zbound' - )) + } # deprecated - deprecated = set(('fixeborder',)) + deprecated = {'fixeborder'} # do not highlight - suppress_highlight = set(( + suppress_highlight = { 'alignof', 'asm', 'constexpr', @@ -874,7 +874,7 @@ class FreeFemLexer(CppLexer): 'typeid', 'typename', 'using' - )) + } def get_tokens_unprocessed(self, text): for index, token, value in CppLexer.get_tokens_unprocessed(self, text): diff --git a/src/typecode/_vendor/pygments/lexers/functional.py b/src/typecode/_vendor/pygments/lexers/functional.py index 47deede..1fd3976 100644 --- a/src/typecode/_vendor/pygments/lexers/functional.py +++ b/src/typecode/_vendor/pygments/lexers/functional.py @@ -5,7 +5,7 @@ Just export lexer classes previously contained in this module. - :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ diff --git a/src/typecode/_vendor/pygments/lexers/gdscript.py b/src/typecode/_vendor/pygments/lexers/gdscript.py new file mode 100644 index 0000000..217a22e --- /dev/null +++ b/src/typecode/_vendor/pygments/lexers/gdscript.py @@ -0,0 +1,346 @@ +# -*- coding: utf-8 -*- +""" + pygments.lexers.gdscript + ~~~~~~~~~~~~~~~~~~~~~~~~ + + Lexer for GDScript. + + Modified by Daniel J. Ramirez based on the original + python.py. + + :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS. + :license: BSD, see LICENSE for details. +""" + +import re + +from typecode._vendor.pygments.lexer import RegexLexer, include, bygroups, default, words, \ + combined +from typecode._vendor.pygments.token import Text, Comment, Operator, Keyword, Name, String, \ + Number, Punctuation + +__all__ = ["GDScriptLexer"] + +line_re = re.compile(".*?\n") + + +class GDScriptLexer(RegexLexer): + """ + For `GDScript source code `_. + """ + + name = "GDScript" + aliases = ["gdscript", "gd"] + filenames = ["*.gd"] + mimetypes = ["text/x-gdscript", "application/x-gdscript"] + + def innerstring_rules(ttype): + return [ + # the old style '%s' % (...) string formatting + ( + r"%(\(\w+\))?[-#0 +]*([0-9]+|[*])?(\.([0-9]+|[*]))?" + "[hlL]?[E-GXc-giorsux%]", + String.Interpol, + ), + # backslashes, quotes and formatting signs must be parsed one at a time + (r'[^\\\'"%\n]+', ttype), + (r'[\'"\\]', ttype), + # unhandled string formatting sign + (r"%", ttype), + # newlines are an error (use "nl" state) + ] + + tokens = { + "root": [ + (r"\n", Text), + ( + r'^(\s*)([rRuUbB]{,2})("""(?:.|\n)*?""")', + bygroups(Text, String.Affix, String.Doc), + ), + ( + r"^(\s*)([rRuUbB]{,2})('''(?:.|\n)*?''')", + bygroups(Text, String.Affix, String.Doc), + ), + (r"[^\S\n]+", Text), + (r"#.*$", Comment.Single), + (r"[]{}:(),;[]", Punctuation), + (r"\\\n", Text), + (r"\\", Text), + (r"(in|and|or|not)\b", Operator.Word), + ( + r"!=|==|<<|>>|&&|\+=|-=|\*=|/=|%=|&=|\|=|\|\||[-~+/*%=<>&^.!|$]", + Operator, + ), + include("keywords"), + (r"(func)((?:\s|\\\s)+)", bygroups(Keyword, Text), "funcname"), + (r"(class)((?:\s|\\\s)+)", bygroups(Keyword, Text), "classname"), + include("builtins"), + ( + '([rR]|[uUbB][rR]|[rR][uUbB])(""")', + bygroups(String.Affix, String.Double), + "tdqs", + ), + ( + "([rR]|[uUbB][rR]|[rR][uUbB])(''')", + bygroups(String.Affix, String.Single), + "tsqs", + ), + ( + '([rR]|[uUbB][rR]|[rR][uUbB])(")', + bygroups(String.Affix, String.Double), + "dqs", + ), + ( + "([rR]|[uUbB][rR]|[rR][uUbB])(')", + bygroups(String.Affix, String.Single), + "sqs", + ), + ( + '([uUbB]?)(""")', + bygroups(String.Affix, String.Double), + combined("stringescape", "tdqs"), + ), + ( + "([uUbB]?)(''')", + bygroups(String.Affix, String.Single), + combined("stringescape", "tsqs"), + ), + ( + '([uUbB]?)(")', + bygroups(String.Affix, String.Double), + combined("stringescape", "dqs"), + ), + ( + "([uUbB]?)(')", + bygroups(String.Affix, String.Single), + combined("stringescape", "sqs"), + ), + include("name"), + include("numbers"), + ], + "keywords": [ + ( + words( + ( + "and", + "in", + "not", + "or", + "as", + "breakpoint", + "class", + "class_name", + "extends", + "is", + "func", + "setget", + "signal", + "tool", + "const", + "enum", + "export", + "onready", + "static", + "var", + "break", + "continue", + "if", + "elif", + "else", + "for", + "pass", + "return", + "match", + "while", + "remote", + "master", + "puppet", + "remotesync", + "mastersync", + "puppetsync", + ), + suffix=r"\b", + ), + Keyword, + ), + ], + "builtins": [ + ( + words( + ( + "Color8", + "ColorN", + "abs", + "acos", + "asin", + "assert", + "atan", + "atan2", + "bytes2var", + "ceil", + "char", + "clamp", + "convert", + "cos", + "cosh", + "db2linear", + "decimals", + "dectime", + "deg2rad", + "dict2inst", + "ease", + "exp", + "floor", + "fmod", + "fposmod", + "funcref", + "hash", + "inst2dict", + "instance_from_id", + "is_inf", + "is_nan", + "lerp", + "linear2db", + "load", + "log", + "max", + "min", + "nearest_po2", + "pow", + "preload", + "print", + "print_stack", + "printerr", + "printraw", + "prints", + "printt", + "rad2deg", + "rand_range", + "rand_seed", + "randf", + "randi", + "randomize", + "range", + "round", + "seed", + "sign", + "sin", + "sinh", + "sqrt", + "stepify", + "str", + "str2var", + "tan", + "tan", + "tanh", + "type_exist", + "typeof", + "var2bytes", + "var2str", + "weakref", + "yield", + ), + prefix=r"(?>=|<<|>>|<=|>=|&\^=|&\^|\+=|-=|\*=|/=|%=|&=|\|=|&&|\|\|' r'|<-|\+\+|--|==|!=|:=|\.\.\.|[+\-*/%&])', Operator), diff --git a/src/typecode/_vendor/pygments/lexers/grammar_notation.py b/src/typecode/_vendor/pygments/lexers/grammar_notation.py index 145e4e2..dd51c43 100644 --- a/src/typecode/_vendor/pygments/lexers/grammar_notation.py +++ b/src/typecode/_vendor/pygments/lexers/grammar_notation.py @@ -3,9 +3,9 @@ pygments.lexers.grammar_notation ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - Lexers for grammer notations like BNF. + Lexers for grammar notations like BNF. - :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ @@ -15,12 +15,12 @@ from typecode._vendor.pygments.token import Comment, Keyword, Literal, Name, Number, \ Operator, Punctuation, String, Text -__all__ = ['BnfLexer', 'AbnfLexer', 'JsgfLexer'] +__all__ = ['BnfLexer', 'AbnfLexer', 'JsgfLexer', 'PegLexer'] class BnfLexer(RegexLexer): """ - This lexer is for grammer notations which are similar to + This lexer is for grammar notations which are similar to original BNF. In order to maximize a number of targets of this lexer, @@ -211,3 +211,60 @@ class JsgfLexer(RegexLexer): (r'.', Comment.Multiline), ], } + + +class PegLexer(RegexLexer): + """ + This lexer is for `Parsing Expression Grammars + `_ (PEG). + + Various implementations of PEG have made different decisions + regarding the syntax, so let's try to be accommodating: + + * `<-`, `←`, `:`, and `=` are all accepted as rule operators. + + * Both `|` and `/` are choice operators. + + * `^`, `↑`, and `~` are cut operators. + + * A single `a-z` character immediately before a string, or + multiple `a-z` characters following a string, are part of the + string (e.g., `r"..."` or `"..."ilmsuxa`). + + .. versionadded:: 2.6 + """ + + name = 'PEG' + aliases = ['peg'] + filenames = ['*.peg'] + mimetypes = ['text/x-peg'] + + tokens = { + 'root': [ + # Comments + (r'#.*', Comment.Single), + + # All operators + (r'<-|[←:=/|&!?*+^↑~]', Operator), + + # Other punctuation + (r'[()]', Punctuation), + + # Keywords + (r'\.', Keyword), + + # Character classes + (r'(\[)([^\]]*(?:\\.[^\]\\]*)*)(\])', + bygroups(Punctuation, String, Punctuation)), + + # Single and double quoted strings (with optional modifiers) + (r'[a-z]?"[^"\\]*(?:\\.[^"\\]*)*"[a-z]*', String.Double), + (r"[a-z]?'[^'\\]*(?:\\.[^'\\]*)*'[a-z]*", String.Single), + + # Nonterminals are not whitespace, operators, or punctuation + (r'[^\s<←:=/|&!?*+\^↑~()\[\]"\'#]+', Name.Class), + + # Fallback + (r'.', Text), + ], + } diff --git a/src/typecode/_vendor/pygments/lexers/graph.py b/src/typecode/_vendor/pygments/lexers/graph.py index 22bae20..e3cfdf9 100644 --- a/src/typecode/_vendor/pygments/lexers/graph.py +++ b/src/typecode/_vendor/pygments/lexers/graph.py @@ -5,7 +5,7 @@ Lexers for graph query languages. - :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ diff --git a/src/typecode/_vendor/pygments/lexers/graphics.py b/src/typecode/_vendor/pygments/lexers/graphics.py index 016e8f4..0fb74cb 100644 --- a/src/typecode/_vendor/pygments/lexers/graphics.py +++ b/src/typecode/_vendor/pygments/lexers/graphics.py @@ -5,7 +5,7 @@ Lexers for computer graphics and plotting related languages. - :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ @@ -425,7 +425,7 @@ class AsymptoteLexer(RegexLexer): ], 'statements': [ # simple string (TeX friendly) - (r'"(\\\\|\\"|[^"])*"', String), + (r'"(\\\\|\\[^\\]|[^"\\])*"', String), # C style string (with character escapes) (r"'", String, 'string'), (r'(\d+\.\d*|\.\d+|\d+)[eE][+-]?\d+[lL]?', Number.Float), @@ -775,7 +775,26 @@ class PovrayLexer(RegexLexer): (r'[0-9]+\.[0-9]*', Number.Float), (r'\.[0-9]+', Number.Float), (r'[0-9]+', Number.Integer), - (r'"(\\\\|\\"|[^"])*"', String), + (r'"(\\\\|\\[^\\]|[^"\\])*"', String), (r'\s+', Text), ] } + + def analyse_text(text): + """POVRAY is similar to JSON/C, but the combination of camera and + light_source is probably not very likely elsewhere. HLSL or GLSL + are similar (GLSL even has #version), but they miss #declare, and + light_source/camera are not keywords anywhere else -- it's fair + to assume though that any POVRAY scene must have a camera and + lightsource.""" + result = 0 + if '#version' in text: + result += 0.05 + if '#declare' in text: + result += 0.05 + if 'camera' in text: + result += 0.05 + if 'light_source' in text: + result += 0.1 + + return result diff --git a/src/typecode/_vendor/pygments/lexers/haskell.py b/src/typecode/_vendor/pygments/lexers/haskell.py index 6b4d38a..da936f2 100644 --- a/src/typecode/_vendor/pygments/lexers/haskell.py +++ b/src/typecode/_vendor/pygments/lexers/haskell.py @@ -5,7 +5,7 @@ Lexers for Haskell and related languages. - :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ @@ -66,6 +66,7 @@ class HaskellLexer(RegexLexer): (r"(')[" + uni.Lu + r"][\w\']*", Keyword.Type), (r"(')\[[^\]]*\]", Keyword.Type), # tuples and lists get special treatment in GHC (r"(')\([^)]*\)", Keyword.Type), # .. + (r"(')[:!#$%&*+.\\/<=>?@^|~-]+", Keyword.Type), # promoted type operators # Operators (r'\\(?![:!#$%&*+.\\/<=>?@^|~-]+)', Name.Function), # lambda operator (r'(<-|::|->|=>|=)(?![:!#$%&*+.\\/<=>?@^|~-]+)', Operator.Word), # specials @@ -197,6 +198,7 @@ class IdrisLexer(RegexLexer): 'if', 'in', 'infix[lr]?', 'instance', 'rewrite', 'auto', 'namespace', 'codata', 'mutual', 'private', 'public', 'abstract', 'total', 'partial', + 'interface', 'implementation', 'export', 'covering', 'constructor', 'let', 'proof', 'of', 'then', 'static', 'where', '_', 'with', 'pattern', 'term', 'syntax', 'prefix', 'postulate', 'parameters', 'record', 'dsl', 'impossible', 'implicit', @@ -213,7 +215,7 @@ class IdrisLexer(RegexLexer): tokens = { 'root': [ # Comments - (r'^(\s*)(%%%s)' % '|'.join(directives), + (r'^(\s*)(%%(%s))' % '|'.join(directives), bygroups(Text, Keyword.Reserved)), (r'(\s*)(--(?![!#$%&*+./<=>?@^|_~:\\]).*?)$', bygroups(Text, Comment.Single)), (r'(\s*)(\|{3}.*?)$', bygroups(Text, Comment.Single)), @@ -325,10 +327,10 @@ class AgdaLexer(RegexLexer): # Identifiers (r'\b(%s)(?!\')\b' % '|'.join(reserved), Keyword.Reserved), (r'(import|module)(\s+)', bygroups(Keyword.Reserved, Text), 'module'), - (r'\b(Set|Prop)\b', Keyword.Type), + (r'\b(Set|Prop)[\u2080-\u2089]*\b', Keyword.Type), # Special Symbols (r'(\(|\)|\{|\})', Operator), - (u'(\\.{1,3}|\\||\u039B|\u2200|\u2192|:|=|->)', Operator.Word), + (r'(\.{1,3}|\||\u03BB|\u2200|\u2192|:|=|->)', Operator.Word), # Numbers (r'\d+[eE][+-]?\d+', Number.Float), (r'\d+\.\d+([eE][+-]?\d+)?', Number.Float), @@ -481,10 +483,10 @@ class CryptolLexer(RegexLexer): ], } - EXTRA_KEYWORDS = set(('join', 'split', 'reverse', 'transpose', 'width', - 'length', 'tail', '<<', '>>', '<<<', '>>>', 'const', - 'reg', 'par', 'seq', 'ASSERT', 'undefined', 'error', - 'trace')) + EXTRA_KEYWORDS = {'join', 'split', 'reverse', 'transpose', 'width', + 'length', 'tail', '<<', '>>', '<<<', '>>>', 'const', + 'reg', 'par', 'seq', 'ASSERT', 'undefined', 'error', + 'trace'} def get_tokens_unprocessed(self, text): stack = ['root'] @@ -557,8 +559,7 @@ def get_tokens_unprocessed(self, text): latex += line insertions.append((len(code), list(lxlexer.get_tokens_unprocessed(latex)))) - for item in do_insertions(insertions, self.baselexer.get_tokens_unprocessed(code)): - yield item + yield from do_insertions(insertions, self.baselexer.get_tokens_unprocessed(code)) class LiterateHaskellLexer(LiterateLexer): diff --git a/src/typecode/_vendor/pygments/lexers/haxe.py b/src/typecode/_vendor/pygments/lexers/haxe.py index 0b01a45..e641608 100644 --- a/src/typecode/_vendor/pygments/lexers/haxe.py +++ b/src/typecode/_vendor/pygments/lexers/haxe.py @@ -5,7 +5,7 @@ Lexers for Haxe and related stuff. - :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ @@ -467,7 +467,7 @@ def preproc_callback(self, match, ctx): (r'"', String.Double, ('#pop', 'expr-chain', 'string-double')), # EReg - (r'~/(\\\\|\\/|[^/\n])*/[gimsu]*', String.Regex, ('#pop', 'expr-chain')), + (r'~/(\\\\|\\[^\\]|[^/\\\n])*/[gimsu]*', String.Regex, ('#pop', 'expr-chain')), # Array (r'\[', Punctuation, ('#pop', 'expr-chain', 'array-decl')), @@ -722,7 +722,7 @@ def preproc_callback(self, match, ctx): (r'"', String.Double, ('#pop', 'string-double')), # EReg - (r'~/(\\\\|\\/|[^/\n])*/[gim]*', String.Regex, '#pop'), + (r'~/(\\\\|\\[^\\]|[^/\\\n])*/[gim]*', String.Regex, '#pop'), # Array (r'\[', Operator, ('#pop', 'array-decl')), diff --git a/src/typecode/_vendor/pygments/lexers/hdl.py b/src/typecode/_vendor/pygments/lexers/hdl.py index 2865608..b9749d2 100644 --- a/src/typecode/_vendor/pygments/lexers/hdl.py +++ b/src/typecode/_vendor/pygments/lexers/hdl.py @@ -5,7 +5,7 @@ Lexers for hardware descriptor languages. - :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ @@ -105,11 +105,12 @@ class VerilogLexer(RegexLexer): (words(( 'byte', 'shortint', 'int', 'longint', 'integer', 'time', 'bit', 'logic', 'reg', 'supply0', 'supply1', 'tri', 'triand', - 'trior', 'tri0', 'tri1', 'trireg', 'uwire', 'wire', 'wand', 'wo' + 'trior', 'tri0', 'tri1', 'trireg', 'uwire', 'wire', 'wand', 'wor' 'shortreal', 'real', 'realtime'), suffix=r'\b'), Keyword.Type), (r'[a-zA-Z_]\w*:(?!:)', Name.Label), (r'\$?[a-zA-Z_]\w*', Name), + (r'\\(\S+)', Name), ], 'string': [ (r'"', String, '#pop'), @@ -131,14 +132,18 @@ class VerilogLexer(RegexLexer): ] } - def get_tokens_unprocessed(self, text): - for index, token, value in \ - RegexLexer.get_tokens_unprocessed(self, text): - # Convention: mark all upper case names as constants - if token is Name: - if value.isupper(): - token = Name.Constant - yield index, token, value + def analyse_text(text): + """Verilog code will use one of reg/wire/assign for sure, and that + is not common elsewhere.""" + result = 0 + if 'reg' in text: + result += 0.1 + if 'wire' in text: + result += 0.1 + if 'assign' in text: + result += 0.1 + + return result class SystemVerilogLexer(RegexLexer): @@ -170,91 +175,185 @@ class SystemVerilogLexer(RegexLexer): (r'[{}#@]', Punctuation), (r'L?"', String, 'string'), (r"L?'(\\.|\\[0-7]{1,3}|\\x[a-fA-F0-9]{1,2}|[^\\\'\n])'", String.Char), + (r'(\d+\.\d*|\.\d+|\d+)[eE][+-]?\d+[lL]?', Number.Float), (r'(\d+\.\d*|\.\d+|\d+[fF])[fF]?', Number.Float), - (r'([0-9]+)|(\'h)[0-9a-fA-F]+', Number.Hex), - (r'([0-9]+)|(\'b)[01]+', Number.Bin), - (r'([0-9]+)|(\'d)[0-9]+', Number.Integer), - (r'([0-9]+)|(\'o)[0-7]+', Number.Oct), - (r'\'[01xz]', Number), - (r'\d+[Ll]?', Number.Integer), + + (r'([1-9][_0-9]*)?\s*\'[sS]?[bB]\s*[xXzZ?01][_xXzZ?01]*', + Number.Bin), + (r'([1-9][_0-9]*)?\s*\'[sS]?[oO]\s*[xXzZ?0-7][_xXzZ?0-7]*', + Number.Oct), + (r'([1-9][_0-9]*)?\s*\'[sS]?[dD]\s*[xXzZ?0-9][_xXzZ?0-9]*', + Number.Integer), + (r'([1-9][_0-9]*)?\s*\'[sS]?[hH]\s*[xXzZ?0-9a-fA-F][_xXzZ?0-9a-fA-F]*', + Number.Hex), + + (r'\'[01xXzZ]', Number), + (r'[0-9][_0-9]*', Number.Integer), + (r'\*/', Error), + (r'[~!%^&*+=|?:<>/-]', Operator), - (r'[()\[\],.;\']', Punctuation), + (words(('inside', 'dist'), suffix=r'\b'), Operator.Word), + + (r'[()\[\],.;\'$]', Punctuation), (r'`[a-zA-Z_]\w*', Name.Constant), (words(( - 'accept_on', 'alias', 'always', 'always_comb', 'always_ff', 'always_latch', - 'and', 'assert', 'assign', 'assume', 'automatic', 'before', 'begin', 'bind', 'bins', - 'binsof', 'bit', 'break', 'buf', 'bufif0', 'bufif1', 'byte', 'case', 'casex', 'casez', - 'cell', 'chandle', 'checker', 'class', 'clocking', 'cmos', 'config', 'const', 'constraint', - 'context', 'continue', 'cover', 'covergroup', 'coverpoint', 'cross', 'deassign', - 'default', 'defparam', 'design', 'disable', 'dist', 'do', 'edge', 'else', 'end', 'endcase', - 'endchecker', 'endclass', 'endclocking', 'endconfig', 'endfunction', 'endgenerate', - 'endgroup', 'endinterface', 'endmodule', 'endpackage', 'endprimitive', - 'endprogram', 'endproperty', 'endsequence', 'endspecify', 'endtable', - 'endtask', 'enum', 'event', 'eventually', 'expect', 'export', 'extends', 'extern', - 'final', 'first_match', 'for', 'force', 'foreach', 'forever', 'fork', 'forkjoin', - 'function', 'generate', 'genvar', 'global', 'highz0', 'highz1', 'if', 'iff', 'ifnone', - 'ignore_bins', 'illegal_bins', 'implies', 'import', 'incdir', 'include', - 'initial', 'inout', 'input', 'inside', 'instance', 'int', 'integer', 'interface', - 'intersect', 'join', 'join_any', 'join_none', 'large', 'let', 'liblist', 'library', - 'local', 'localparam', 'logic', 'longint', 'macromodule', 'matches', 'medium', - 'modport', 'module', 'nand', 'negedge', 'new', 'nexttime', 'nmos', 'nor', 'noshowcancelled', - 'not', 'notif0', 'notif1', 'null', 'or', 'output', 'package', 'packed', 'parameter', - 'pmos', 'posedge', 'primitive', 'priority', 'program', 'property', 'protected', - 'pull0', 'pull1', 'pulldown', 'pullup', 'pulsestyle_ondetect', 'pulsestyle_onevent', - 'pure', 'rand', 'randc', 'randcase', 'randsequence', 'rcmos', 'real', 'realtime', - 'ref', 'reg', 'reject_on', 'release', 'repeat', 'restrict', 'return', 'rnmos', - 'rpmos', 'rtran', 'rtranif0', 'rtranif1', 's_always', 's_eventually', 's_nexttime', - 's_until', 's_until_with', 'scalared', 'sequence', 'shortint', 'shortreal', - 'showcancelled', 'signed', 'small', 'solve', 'specify', 'specparam', 'static', - 'string', 'strong', 'strong0', 'strong1', 'struct', 'super', 'supply0', 'supply1', - 'sync_accept_on', 'sync_reject_on', 'table', 'tagged', 'task', 'this', 'throughout', - 'time', 'timeprecision', 'timeunit', 'tran', 'tranif0', 'tranif1', 'tri', 'tri0', - 'tri1', 'triand', 'trior', 'trireg', 'type', 'typedef', 'union', 'unique', 'unique0', - 'unsigned', 'until', 'until_with', 'untyped', 'use', 'uwire', 'var', 'vectored', - 'virtual', 'void', 'wait', 'wait_order', 'wand', 'weak', 'weak0', 'weak1', 'while', - 'wildcard', 'wire', 'with', 'within', 'wor', 'xnor', 'xor'), suffix=r'\b'), + 'accept_on', 'alias', 'always', 'always_comb', 'always_ff', + 'always_latch', 'and', 'assert', 'assign', 'assume', 'automatic', + 'before', 'begin', 'bind', 'bins', 'binsof', 'break', 'buf', + 'bufif0', 'bufif1', 'case', 'casex', 'casez', 'cell', + 'checker', 'clocking', 'cmos', 'config', + 'constraint', 'context', 'continue', 'cover', 'covergroup', + 'coverpoint', 'cross', 'deassign', 'default', 'defparam', 'design', + 'disable', 'do', 'edge', 'else', 'end', 'endcase', + 'endchecker', 'endclocking', 'endconfig', 'endfunction', + 'endgenerate', 'endgroup', 'endinterface', 'endmodule', 'endpackage', + 'endprimitive', 'endprogram', 'endproperty', 'endsequence', + 'endspecify', 'endtable', 'endtask', 'enum', 'eventually', + 'expect', 'export', 'extern', 'final', 'first_match', + 'for', 'force', 'foreach', 'forever', 'fork', 'forkjoin', 'function', + 'generate', 'genvar', 'global', 'highz0', 'highz1', 'if', 'iff', + 'ifnone', 'ignore_bins', 'illegal_bins', 'implies', 'implements', 'import', + 'incdir', 'include', 'initial', 'inout', 'input', + 'instance', 'interconnect', 'interface', 'intersect', 'join', + 'join_any', 'join_none', 'large', 'let', 'liblist', 'library', + 'local', 'localparam', 'macromodule', 'matches', + 'medium', 'modport', 'module', 'nand', 'negedge', 'nettype', 'new', 'nexttime', + 'nmos', 'nor', 'noshowcancelled', 'not', 'notif0', 'notif1', 'null', + 'or', 'output', 'package', 'packed', 'parameter', 'pmos', 'posedge', + 'primitive', 'priority', 'program', 'property', 'protected', 'pull0', + 'pull1', 'pulldown', 'pullup', 'pulsestyle_ondetect', + 'pulsestyle_onevent', 'pure', 'rand', 'randc', 'randcase', + 'randsequence', 'rcmos', 'ref', + 'reject_on', 'release', 'repeat', 'restrict', 'return', 'rnmos', + 'rpmos', 'rtran', 'rtranif0', 'rtranif1', 's_always', 's_eventually', + 's_nexttime', 's_until', 's_until_with', 'scalared', 'sequence', + 'showcancelled', 'small', 'soft', 'solve', + 'specify', 'specparam', 'static', 'strong', 'strong0', + 'strong1', 'struct', 'super', 'sync_accept_on', + 'sync_reject_on', 'table', 'tagged', 'task', 'this', 'throughout', + 'timeprecision', 'timeunit', 'tran', 'tranif0', 'tranif1', + 'typedef', 'union', 'unique', 'unique0', 'until', + 'until_with', 'untyped', 'use', 'vectored', + 'virtual', 'wait', 'wait_order', 'weak', 'weak0', + 'weak1', 'while', 'wildcard', 'with', 'within', + 'xnor', 'xor'), + suffix=r'\b'), Keyword), + (r'(class)(\s+)([a-zA-Z_]\w*)', + bygroups(Keyword.Declaration, Text, Name.Class)), + (r'(extends)(\s+)([a-zA-Z_]\w*)', + bygroups(Keyword.Declaration, Text, Name.Class)), + (r'(endclass\b)(?:(\s*)(:)(\s*)([a-zA-Z_]\w*))?', + bygroups(Keyword.Declaration, Text, Punctuation, Text, Name.Class)), + (words(( - '`__FILE__', '`__LINE__', '`begin_keywords', '`celldefine', '`default_nettype', - '`define', '`else', '`elsif', '`end_keywords', '`endcelldefine', '`endif', - '`ifdef', '`ifndef', '`include', '`line', '`nounconnected_drive', '`pragma', - '`resetall', '`timescale', '`unconnected_drive', '`undef', '`undefineall'), + # Variable types + 'bit', 'byte', 'chandle', 'const', 'event', 'int', 'integer', + 'logic', 'longint', 'real', 'realtime', 'reg', 'shortint', + 'shortreal', 'signed', 'string', 'time', 'type', 'unsigned', + 'var', 'void', + # Net types + 'supply0', 'supply1', 'tri', 'triand', 'trior', 'trireg', + 'tri0', 'tri1', 'uwire', 'wand', 'wire', 'wor'), + suffix=r'\b'), + Keyword.Type), + + (words(( + '`__FILE__', '`__LINE__', '`begin_keywords', '`celldefine', + '`default_nettype', '`define', '`else', '`elsif', '`end_keywords', + '`endcelldefine', '`endif', '`ifdef', '`ifndef', '`include', + '`line', '`nounconnected_drive', '`pragma', '`resetall', + '`timescale', '`unconnected_drive', '`undef', '`undefineall'), suffix=r'\b'), Comment.Preproc), (words(( - '$display', '$displayb', '$displayh', '$displayo', '$dumpall', '$dumpfile', - '$dumpflush', '$dumplimit', '$dumpoff', '$dumpon', '$dumpports', - '$dumpportsall', '$dumpportsflush', '$dumpportslimit', '$dumpportsoff', - '$dumpportson', '$dumpvars', '$fclose', '$fdisplay', '$fdisplayb', - '$fdisplayh', '$fdisplayo', '$feof', '$ferror', '$fflush', '$fgetc', - '$fgets', '$finish', '$fmonitor', '$fmonitorb', '$fmonitorh', '$fmonitoro', - '$fopen', '$fread', '$fscanf', '$fseek', '$fstrobe', '$fstrobeb', '$fstrobeh', - '$fstrobeo', '$ftell', '$fwrite', '$fwriteb', '$fwriteh', '$fwriteo', - '$monitor', '$monitorb', '$monitorh', '$monitoro', '$monitoroff', - '$monitoron', '$plusargs', '$random', '$readmemb', '$readmemh', '$rewind', - '$sformat', '$sformatf', '$sscanf', '$strobe', '$strobeb', '$strobeh', '$strobeo', - '$swrite', '$swriteb', '$swriteh', '$swriteo', '$test', '$ungetc', - '$value$plusargs', '$write', '$writeb', '$writeh', '$writememb', - '$writememh', '$writeo'), suffix=r'\b'), + # Simulation control tasks (20.2) + '$exit', '$finish', '$stop', + # Simulation time functions (20.3) + '$realtime', '$stime', '$time', + # Timescale tasks (20.4) + '$printtimescale', '$timeformat', + # Conversion functions + '$bitstoreal', '$bitstoshortreal', '$cast', '$itor', + '$realtobits', '$rtoi', '$shortrealtobits', '$signed', + '$unsigned', + # Data query functions (20.6) + '$bits', '$isunbounded', '$typename', + # Array query functions (20.7) + '$dimensions', '$high', '$increment', '$left', '$low', '$right', + '$size', '$unpacked_dimensions', + # Math functions (20.8) + '$acos', '$acosh', '$asin', '$asinh', '$atan', '$atan2', + '$atanh', '$ceil', '$clog2', '$cos', '$cosh', '$exp', '$floor', + '$hypot', '$ln', '$log10', '$pow', '$sin', '$sinh', '$sqrt', + '$tan', '$tanh', + # Bit vector system functions (20.9) + '$countbits', '$countones', '$isunknown', '$onehot', '$onehot0', + # Severity tasks (20.10) + '$info', '$error', '$fatal', '$warning', + # Assertion control tasks (20.12) + '$assertcontrol', '$assertfailoff', '$assertfailon', + '$assertkill', '$assertnonvacuouson', '$assertoff', '$asserton', + '$assertpassoff', '$assertpasson', '$assertvacuousoff', + # Sampled value system functions (20.13) + '$changed', '$changed_gclk', '$changing_gclk', '$falling_gclk', + '$fell', '$fell_gclk', '$future_gclk', '$past', '$past_gclk', + '$rising_gclk', '$rose', '$rose_gclk', '$sampled', '$stable', + '$stable_gclk', '$steady_gclk', + # Coverage control functions (20.14) + '$coverage_control', '$coverage_get', '$coverage_get_max', + '$coverage_merge', '$coverage_save', '$get_coverage', + '$load_coverage_db', '$set_coverage_db_name', + # Probabilistic distribution functions (20.15) + '$dist_chi_square', '$dist_erlang', '$dist_exponential', + '$dist_normal', '$dist_poisson', '$dist_t', '$dist_uniform', + '$random', + # Stochastic analysis tasks and functions (20.16) + '$q_add', '$q_exam', '$q_full', '$q_initialize', '$q_remove', + # PLA modeling tasks (20.17) + '$async$and$array', '$async$and$plane', '$async$nand$array', + '$async$nand$plane', '$async$nor$array', '$async$nor$plane', + '$async$or$array', '$async$or$plane', '$sync$and$array', + '$sync$and$plane', '$sync$nand$array', '$sync$nand$plane', + '$sync$nor$array', '$sync$nor$plane', '$sync$or$array', + '$sync$or$plane', + # Miscellaneous tasks and functions (20.18) + '$system', + # Display tasks (21.2) + '$display', '$displayb', '$displayh', '$displayo', '$monitor', + '$monitorb', '$monitorh', '$monitoro', '$monitoroff', + '$monitoron', '$strobe', '$strobeb', '$strobeh', '$strobeo', + '$write', '$writeb', '$writeh', '$writeo', + # File I/O tasks and functions (21.3) + '$fclose', '$fdisplay', '$fdisplayb', '$fdisplayh', + '$fdisplayo', '$feof', '$ferror', '$fflush', '$fgetc', '$fgets', + '$fmonitor', '$fmonitorb', '$fmonitorh', '$fmonitoro', '$fopen', + '$fread', '$fscanf', '$fseek', '$fstrobe', '$fstrobeb', + '$fstrobeh', '$fstrobeo', '$ftell', '$fwrite', '$fwriteb', + '$fwriteh', '$fwriteo', '$rewind', '$sformat', '$sformatf', + '$sscanf', '$swrite', '$swriteb', '$swriteh', '$swriteo', + '$ungetc', + # Memory load tasks (21.4) + '$readmemb', '$readmemh', + # Memory dump tasks (21.5) + '$writememb', '$writememh', + # Command line input (21.6) + '$test$plusargs', '$value$plusargs', + # VCD tasks (21.7) + '$dumpall', '$dumpfile', '$dumpflush', '$dumplimit', '$dumpoff', + '$dumpon', '$dumpports', '$dumpportsall', '$dumpportsflush', + '$dumpportslimit', '$dumpportsoff', '$dumpportson', '$dumpvars', + ), suffix=r'\b'), Name.Builtin), - (r'(class)(\s+)', bygroups(Keyword, Text), 'classname'), - (words(( - 'byte', 'shortint', 'int', 'longint', 'integer', 'time', - 'bit', 'logic', 'reg', 'supply0', 'supply1', 'tri', 'triand', - 'trior', 'tri0', 'tri1', 'trireg', 'uwire', 'wire', 'wand', 'wo' - 'shortreal', 'real', 'realtime'), suffix=r'\b'), - Keyword.Type), (r'[a-zA-Z_]\w*:(?!:)', Name.Label), (r'\$?[a-zA-Z_]\w*', Name), - ], - 'classname': [ - (r'[a-zA-Z_]\w*', Name.Class, '#pop'), + (r'\\(\S+)', Name), ], 'string': [ (r'"', String, '#pop'), @@ -276,15 +375,6 @@ class SystemVerilogLexer(RegexLexer): ] } - def get_tokens_unprocessed(self, text): - for index, token, value in \ - RegexLexer.get_tokens_unprocessed(self, text): - # Convention: mark all upper case names as constants - if token is Name: - if value.isupper(): - token = Name.Constant - yield index, token, value - class VhdlLexer(RegexLexer): """ diff --git a/src/typecode/_vendor/pygments/lexers/hexdump.py b/src/typecode/_vendor/pygments/lexers/hexdump.py index 88dc745..ba44a80 100644 --- a/src/typecode/_vendor/pygments/lexers/hexdump.py +++ b/src/typecode/_vendor/pygments/lexers/hexdump.py @@ -5,7 +5,7 @@ Lexers for hexadecimal dumps. - :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ diff --git a/src/typecode/_vendor/pygments/lexers/html.py b/src/typecode/_vendor/pygments/lexers/html.py index d211349..ef98c89 100644 --- a/src/typecode/_vendor/pygments/lexers/html.py +++ b/src/typecode/_vendor/pygments/lexers/html.py @@ -5,7 +5,7 @@ Lexers for HTML, XML and related markup. - :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ @@ -77,12 +77,24 @@ class HtmlLexer(RegexLexer): bygroups(Punctuation, Text, Punctuation, Text, Name.Tag, Text, Punctuation), '#pop'), (r'.+?(?=<\s*/\s*script\s*>)', using(JavascriptLexer)), + # fallback cases for when there is no closing script tag + # first look for newline and then go back into root state + # if that fails just read the rest of the file + # this is similar to the error handling logic in lexer.py + (r'.+?\n', using(JavascriptLexer), '#pop'), + (r'.+', using(JavascriptLexer), '#pop'), ], 'style-content': [ (r'(<)(\s*)(/)(\s*)(style)(\s*)(>)', bygroups(Punctuation, Text, Punctuation, Text, Name.Tag, Text, Punctuation),'#pop'), (r'.+?(?=<\s*/\s*style\s*>)', using(CssLexer)), + # fallback cases for when there is no closing style tag + # first look for newline and then go back into root state + # if that fails just read the rest of the file + # this is similar to the error handling logic in lexer.py + (r'.+?\n', using(CssLexer), '#pop'), + (r'.+', using(CssLexer), '#pop'), ], 'attr': [ ('".*?"', String, '#pop'), @@ -244,7 +256,7 @@ class XsltLexer(XmlLexer): filenames = ['*.xsl', '*.xslt', '*.xpl'] # xpl is XProc mimetypes = ['application/xsl+xml', 'application/xslt+xml'] - EXTRA_KEYWORDS = set(( + EXTRA_KEYWORDS = { 'apply-imports', 'apply-templates', 'attribute', 'attribute-set', 'call-template', 'choose', 'comment', 'copy', 'copy-of', 'decimal-format', 'element', 'fallback', @@ -253,7 +265,7 @@ class XsltLexer(XmlLexer): 'preserve-space', 'processing-instruction', 'sort', 'strip-space', 'stylesheet', 'template', 'text', 'transform', 'value-of', 'variable', 'when', 'with-param' - )) + } def get_tokens_unprocessed(self, text): for index, token, value in XmlLexer.get_tokens_unprocessed(self, text): @@ -357,8 +369,8 @@ class HamlLexer(ExtendedRegexLexer): (r'\w+', Name.Variable, '#pop'), (r'@\w+', Name.Variable.Instance, '#pop'), (r'\$\w+', Name.Variable.Global, '#pop'), - (r"'(\\\\|\\'|[^'\n])*'", String, '#pop'), - (r'"(\\\\|\\"|[^"\n])*"', String, '#pop'), + (r"'(\\\\|\\[^\\]|[^'\\\n])*'", String, '#pop'), + (r'"(\\\\|\\[^\\]|[^"\\\n])*"', String, '#pop'), ], 'html-comment-block': [ @@ -469,8 +481,8 @@ class ScamlLexer(ExtendedRegexLexer): (r'\w+', Name.Variable, '#pop'), (r'@\w+', Name.Variable.Instance, '#pop'), (r'\$\w+', Name.Variable.Global, '#pop'), - (r"'(\\\\|\\'|[^'\n])*'", String, '#pop'), - (r'"(\\\\|\\"|[^"\n])*"', String, '#pop'), + (r"'(\\\\|\\[^\\]|[^'\\\n])*'", String, '#pop'), + (r'"(\\\\|\\[^\\]|[^"\\\n])*"', String, '#pop'), ], 'html-comment-block': [ @@ -578,8 +590,8 @@ class PugLexer(ExtendedRegexLexer): (r'\w+', Name.Variable, '#pop'), (r'@\w+', Name.Variable.Instance, '#pop'), (r'\$\w+', Name.Variable.Global, '#pop'), - (r"'(\\\\|\\'|[^'\n])*'", String, '#pop'), - (r'"(\\\\|\\"|[^"\n])*"', String, '#pop'), + (r"'(\\\\|\\[^\\]|[^'\\\n])*'", String, '#pop'), + (r'"(\\\\|\\[^\\]|[^"\\\n])*"', String, '#pop'), ], 'html-comment-block': [ diff --git a/src/typecode/_vendor/pygments/lexers/idl.py b/src/typecode/_vendor/pygments/lexers/idl.py index 273da14..abb6091 100644 --- a/src/typecode/_vendor/pygments/lexers/idl.py +++ b/src/typecode/_vendor/pygments/lexers/idl.py @@ -5,7 +5,7 @@ Lexers for IDL. - :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ @@ -32,7 +32,7 @@ class IDLLexer(RegexLexer): _RESERVED = ( 'and', 'begin', 'break', 'case', 'common', 'compile_opt', - 'continue', 'do', 'else', 'end', 'endcase', 'elseelse', + 'continue', 'do', 'else', 'end', 'endcase', 'endelse', 'endfor', 'endforeach', 'endif', 'endrep', 'endswitch', 'endwhile', 'eq', 'for', 'foreach', 'forward_function', 'function', 'ge', 'goto', 'gt', 'if', 'inherits', 'le', @@ -268,3 +268,14 @@ class IDLLexer(RegexLexer): (r'.', Text), ] } + + def analyse_text(text): + """endelse seems to be unique to IDL, endswitch is rare at least.""" + result = 0 + + if 'endelse' in text: + result += 0.2 + if 'endswitch' in text: + result += 0.01 + + return result \ No newline at end of file diff --git a/src/typecode/_vendor/pygments/lexers/igor.py b/src/typecode/_vendor/pygments/lexers/igor.py index 680f2d9..273f5a1 100644 --- a/src/typecode/_vendor/pygments/lexers/igor.py +++ b/src/typecode/_vendor/pygments/lexers/igor.py @@ -5,7 +5,7 @@ Lexers for Igor Pro. - :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ @@ -391,15 +391,10 @@ class IgorLexer(RegexLexer): 'WaveRefIndexedDFR', 'WaveRefsEqual', 'WaveRefWaveToList', 'WaveTextEncoding', 'WaveType', 'WaveUnits', 'WhichListItem', 'WinList', 'WinName', 'WinRecreation', 'WinType', 'wnoise', 'xcsr', 'XWaveName', 'XWaveRefFromTrace', 'x2pnt', 'zcsr', - 'ZernikeR', 'zeromq_client_connect', 'zeromq_client_connect', - 'zeromq_client_recv', 'zeromq_client_recv', 'zeromq_client_send', - 'zeromq_client_send', 'zeromq_handler_start', 'zeromq_handler_start', - 'zeromq_handler_stop', 'zeromq_handler_stop', 'zeromq_server_bind', - 'zeromq_server_bind', 'zeromq_server_recv', 'zeromq_server_recv', - 'zeromq_server_send', 'zeromq_server_send', 'zeromq_set', 'zeromq_set', - 'zeromq_stop', 'zeromq_stop', 'zeromq_test_callfunction', - 'zeromq_test_callfunction', 'zeromq_test_serializeWave', - 'zeromq_test_serializeWave', 'zeta' + 'ZernikeR', 'zeromq_client_connect', 'zeromq_client_recv', + 'zeromq_client_send', 'zeromq_handler_start', 'zeromq_handler_stop', + 'zeromq_server_bind', 'zeromq_server_recv', 'zeromq_server_send', 'zeromq_set', + 'zeromq_stop', 'zeromq_test_callfunction', 'zeromq_test_serializeWave', 'zeta' ) tokens = { diff --git a/src/typecode/_vendor/pygments/lexers/inferno.py b/src/typecode/_vendor/pygments/lexers/inferno.py index 3bf0616..402b756 100644 --- a/src/typecode/_vendor/pygments/lexers/inferno.py +++ b/src/typecode/_vendor/pygments/lexers/inferno.py @@ -5,7 +5,7 @@ Lexers for Inferno os and all the related stuff. - :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ diff --git a/src/typecode/_vendor/pygments/lexers/installers.py b/src/typecode/_vendor/pygments/lexers/installers.py index 4395410..a1bb9fc 100644 --- a/src/typecode/_vendor/pygments/lexers/installers.py +++ b/src/typecode/_vendor/pygments/lexers/installers.py @@ -5,7 +5,7 @@ Lexers for installer/packager DSLs and formats. - :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ diff --git a/src/typecode/_vendor/pygments/lexers/int_fiction.py b/src/typecode/_vendor/pygments/lexers/int_fiction.py index 7d4f546..2bcda1c 100644 --- a/src/typecode/_vendor/pygments/lexers/int_fiction.py +++ b/src/typecode/_vendor/pygments/lexers/int_fiction.py @@ -5,7 +5,7 @@ Lexers for interactive fiction languages. - :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ @@ -38,10 +38,10 @@ class Inform6Lexer(RegexLexer): # Inform 7 maps these four character classes to their ASCII # equivalents. To support Inform 6 inclusions within Inform 7, # Inform6Lexer maps them too. - _dash = u'\\-\u2010-\u2014' - _dquote = u'"\u201c\u201d' - _squote = u"'\u2018\u2019" - _newline = u'\\n\u0085\u2028\u2029' + _dash = '\\-\u2010-\u2014' + _dquote = '"\u201c\u201d' + _squote = "'\u2018\u2019" + _newline = '\\n\u0085\u2028\u2029' tokens = { 'root': [ @@ -118,7 +118,7 @@ class Inform6Lexer(RegexLexer): include('_whitespace'), # Strings (r'[%s][^@][%s]' % (_squote, _squote), String.Char, '#pop'), - (r'([%s])(@\{[0-9a-fA-F]{1,4}\})([%s])' % (_squote, _squote), + (r'([%s])(@\{[0-9a-fA-F]*\})([%s])' % (_squote, _squote), bygroups(String.Char, String.Escape, String.Char), '#pop'), (r'([%s])(@.{2})([%s])' % (_squote, _squote), bygroups(String.Char, String.Escape, String.Char), '#pop'), @@ -180,7 +180,7 @@ class Inform6Lexer(RegexLexer): (r'[~^]+', String.Escape), (r'[^~^\\@({%s]+' % _squote, String.Single), (r'[({]', String.Single), - (r'@\{[0-9a-fA-F]{,4}\}', String.Escape), + (r'@\{[0-9a-fA-F]*\}', String.Escape), (r'@.{2}', String.Escape), (r'[%s]' % _squote, String.Single, '#pop') ], @@ -191,7 +191,7 @@ class Inform6Lexer(RegexLexer): (r'\\', String.Escape), (r'@(\\\s*[%s]\s*)*@((\\\s*[%s]\s*)*[0-9])*' % (_newline, _newline), String.Escape), - (r'@(\\\s*[%s]\s*)*\{((\\\s*[%s]\s*)*[0-9a-fA-F]){,4}' + (r'@(\\\s*[%s]\s*)*\{((\\\s*[%s]\s*)*[0-9a-fA-F])*' r'(\\\s*[%s]\s*)*\}' % (_newline, _newline, _newline), String.Escape), (r'@(\\\s*[%s]\s*)*.(\\\s*[%s]\s*)*.' % (_newline, _newline), @@ -257,8 +257,8 @@ class Inform6Lexer(RegexLexer): (r'(?i)(extend|verb)\b', Keyword, 'grammar'), (r'(?i)fake_action\b', Keyword, ('default', '_constant')), (r'(?i)import\b', Keyword, 'manifest'), - (r'(?i)(include|link)\b', Keyword, - ('default', 'before-plain-string')), + (r'(?i)(include|link|origsource)\b', Keyword, + ('default', 'before-plain-string?')), (r'(?i)(lowstring|undef)\b', Keyword, ('default', '_constant')), (r'(?i)message\b', Keyword, ('default', 'diagnostic')), (r'(?i)(nearby|object)\b', Keyword, @@ -365,11 +365,12 @@ class Inform6Lexer(RegexLexer): 'diagnostic': [ include('_whitespace'), (r'[%s]' % _dquote, String.Double, ('#pop', 'message-string')), - default(('#pop', 'before-plain-string', 'directive-keyword?')) + default(('#pop', 'before-plain-string?', 'directive-keyword?')) ], - 'before-plain-string': [ + 'before-plain-string?': [ include('_whitespace'), - (r'[%s]' % _dquote, String.Double, ('#pop', 'plain-string')) + (r'[%s]' % _dquote, String.Double, ('#pop', 'plain-string')), + default('#pop') ], 'message-string': [ (r'[~^]+', String.Escape), @@ -386,6 +387,7 @@ class Inform6Lexer(RegexLexer): 'replace', 'reverse', 'scope', 'score', 'special', 'string', 'table', 'terminating', 'time', 'topic', 'warning', 'with'), suffix=r'\b'), Keyword, '#pop'), + (r'static\b', Keyword), (r'[%s]{1,2}>|[+=]' % _dash, Punctuation, '#pop') ], '_directive-keyword': [ @@ -514,6 +516,15 @@ def get_tokens_unprocessed(self, text): while objectloop_queue: yield objectloop_queue.pop(0) + def analyse_text(text): + """We try to find a keyword which seem relatively common, unfortunately + there is a decent overlap with Smalltalk keywords otherwise here..""" + result = 0 + if re.search('\borigsource\b', text, re.IGNORECASE): + result += 0.05 + + return result + class Inform7Lexer(RegexLexer): """ @@ -856,7 +867,7 @@ def _make_attribute_value_state(terminator, host_triple, host_double, tokens = { 'root': [ - (u'\ufeff', Text), + ('\ufeff', Text), (r'\{', Punctuation, 'object-body'), (r';+', Punctuation), (r'(?=(argcount|break|case|catch|continue|default|definingobj|' @@ -1341,3 +1352,17 @@ def get_tokens_unprocessed(self, text, **kwargs): else: token = Comment yield index, token, value + + def analyse_text(text): + """This is a rather generic descriptive language without strong + identifiers. It looks like a 'GameMainDef' has to be present, + and/or a 'versionInfo' with an 'IFID' field.""" + result = 0 + if '__TADS' in text or 'GameMainDef' in text: + result += 0.2 + + # This is a fairly unique keyword which is likely used in source as well + if 'versionInfo' in text and 'IFID' in text: + result += 0.1 + + return result diff --git a/src/typecode/_vendor/pygments/lexers/iolang.py b/src/typecode/_vendor/pygments/lexers/iolang.py index 0fe8afd..d544096 100644 --- a/src/typecode/_vendor/pygments/lexers/iolang.py +++ b/src/typecode/_vendor/pygments/lexers/iolang.py @@ -5,7 +5,7 @@ Lexers for the Io language. - :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ @@ -37,7 +37,7 @@ class IoLexer(RegexLexer): (r'/(\\\n)?[*](.|\n)*?[*](\\\n)?/', Comment.Multiline), (r'/\+', Comment.Multiline, 'nestedcomment'), # DoubleQuotedString - (r'"(\\\\|\\"|[^"])*"', String), + (r'"(\\\\|\\[^\\]|[^"\\])*"', String), # Operators (r'::=|:=|=|\(|\)|;|,|\*|-|\+|>|<|@|!|/|\||\^|\.|%|&|\[|\]|\{|\}', Operator), diff --git a/src/typecode/_vendor/pygments/lexers/j.py b/src/typecode/_vendor/pygments/lexers/j.py index a397c5e..05e3bed 100644 --- a/src/typecode/_vendor/pygments/lexers/j.py +++ b/src/typecode/_vendor/pygments/lexers/j.py @@ -5,7 +5,7 @@ Lexer for the J programming language. - :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ diff --git a/src/typecode/_vendor/pygments/lexers/javascript.py b/src/typecode/_vendor/pygments/lexers/javascript.py index 7019af6..3a333bd 100644 --- a/src/typecode/_vendor/pygments/lexers/javascript.py +++ b/src/typecode/_vendor/pygments/lexers/javascript.py @@ -5,7 +5,7 @@ Lexers for JavaScript and related languages. - :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ @@ -15,7 +15,7 @@ this, words, combined from typecode._vendor.pygments.token import Text, Comment, Operator, Keyword, Name, String, \ Number, Punctuation, Other -from typecode._vendor.pygments.util import get_bool_opt, iteritems +from typecode._vendor.pygments.util import get_bool_opt import typecode._vendor.pygments.unistring as uni __all__ = ['JavascriptLexer', 'KalLexer', 'LiveScriptLexer', 'DartLexer', @@ -26,7 +26,7 @@ ']|\\\\u[a-fA-F0-9]{4})') JS_IDENT_PART = ('(?:[$' + uni.combine('Lu', 'Ll', 'Lt', 'Lm', 'Lo', 'Nl', 'Mn', 'Mc', 'Nd', 'Pc') + - u'\u200c\u200d]|\\\\u[a-fA-F0-9]{4})') + '\u200c\u200d]|\\\\u[a-fA-F0-9]{4})') JS_IDENT = JS_IDENT_START + '(?:' + JS_IDENT_PART + ')*' @@ -37,7 +37,7 @@ class JavascriptLexer(RegexLexer): name = 'JavaScript' aliases = ['js', 'javascript'] - filenames = ['*.js', '*.jsm'] + filenames = ['*.js', '*.jsm', '*.mjs'] mimetypes = ['application/javascript', 'application/x-javascript', 'text/x-javascript', 'text/javascript'] @@ -53,7 +53,7 @@ class JavascriptLexer(RegexLexer): 'slashstartsregex': [ include('commentsandwhitespace'), (r'/(\\.|[^[/\\\n]|\[(\\.|[^\]\\\n])*])+/' - r'([gimuy]+\b|\B)', String.Regex, '#pop'), + r'([gimuys]+\b|\B)', String.Regex, '#pop'), (r'(?=/)', Text, ('#pop', 'badregex')), default('#pop') ], @@ -64,33 +64,38 @@ class JavascriptLexer(RegexLexer): (r'\A#! ?/.*?\n', Comment.Hashbang), # recognized by node.js (r'^(?=\s|/|', + '--', '-->', # prec-lazy-or - u'||', + '||', # prec-lazy-and - u'&&', + '&&', # prec-comparison - u'>', u'<', u'>=', u'≥', u'<=', u'≤', u'==', u'===', u'≡', u'!=', u'≠', - u'!==', u'≢', u'.>', u'.<', u'.>=', u'.≥', u'.<=', u'.≤', u'.==', u'.!=', - u'.≠', u'.=', u'.!', u'<:', u'>:', u'∈', u'∉', u'∋', u'∌', u'⊆', - u'⊈', u'⊂', - u'⊄', u'⊊', + '>', '<', '>=', '≥', '<=', '≤', '==', '===', '≡', '!=', '≠', + '!==', '≢', '.>', '.<', '.>=', '.≥', '.<=', '.≤', '.==', '.!=', + '.≠', '.=', '.!', '<:', '>:', '∈', '∉', '∋', '∌', '⊆', + '⊈', '⊂', + '⊄', '⊊', # prec-pipe - u'|>', u'<|', + '|>', '<|', # prec-colon - u':', + ':', # prec-plus - u'+', u'-', u'.+', u'.-', u'|', u'∪', u'$', + '.+', '.-', '|', '∪', '$', # prec-bitshift - u'<<', u'>>', u'>>>', u'.<<', u'.>>', u'.>>>', + '<<', '>>', '>>>', '.<<', '.>>', '.>>>', # prec-times - u'*', u'/', u'./', u'÷', u'.÷', u'%', u'⋅', u'.%', u'.*', u'\\', u'.\\', u'&', u'∩', + '*', '/', './', '÷', '.÷', '%', '⋅', '.%', '.*', '\\', '.\\', '&', '∩', # prec-rational - u'//', u'.//', + '//', './/', # prec-power - u'^', u'.^', + '^', '.^', # prec-decl - u'::', + '::', # prec-dot - u'.', + '.', # unary op - u'+', u'-', u'!', u'√', u'∛', u'∜' - ]), Operator), + '+', '-', '!', '√', '∛', '∜', + )), Operator), # chars (r"'(\\.|\\[0-7]{1,3}|\\x[a-fA-F0-9]{1,3}|\\u[a-fA-F0-9]{1,4}|" @@ -316,9 +314,8 @@ def get_tokens_unprocessed(self, text): curcode += line[6:] else: if curcode: - for item in do_insertions( - insertions, jllexer.get_tokens_unprocessed(curcode)): - yield item + yield from do_insertions( + insertions, jllexer.get_tokens_unprocessed(curcode)) curcode = '' insertions = [] if line.startswith('ERROR: ') or error: @@ -330,6 +327,5 @@ def get_tokens_unprocessed(self, text): start += len(line) if curcode: - for item in do_insertions( - insertions, jllexer.get_tokens_unprocessed(curcode)): - yield item + yield from do_insertions( + insertions, jllexer.get_tokens_unprocessed(curcode)) diff --git a/src/typecode/_vendor/pygments/lexers/jvm.py b/src/typecode/_vendor/pygments/lexers/jvm.py index 50b90d1..e889f0c 100644 --- a/src/typecode/_vendor/pygments/lexers/jvm.py +++ b/src/typecode/_vendor/pygments/lexers/jvm.py @@ -5,7 +5,7 @@ Pygments lexers for JVM languages. - :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ @@ -26,7 +26,7 @@ class JavaLexer(RegexLexer): """ - For `Java `_ source code. + For `Java `_ source code. """ name = 'Java' @@ -50,7 +50,7 @@ class JavaLexer(RegexLexer): (r'((?:(?:[^\W\d]|\$)[\w.\[\]$<>]*\s+)+?)' # return arguments r'((?:[^\W\d]|\$)[\w$]*)' # method name r'(\s*)(\()', # signature start - bygroups(using(this), Name.Function, Text, Operator)), + bygroups(using(this), Name.Function, Text, Punctuation)), (r'@[^\W\d][\w.]*', Name.Decorator), (r'(abstract|const|enum|extends|final|implements|native|private|' r'protected|public|static|strictfp|super|synchronized|throws|' @@ -61,11 +61,14 @@ class JavaLexer(RegexLexer): (r'(true|false|null)\b', Keyword.Constant), (r'(class|interface)(\s+)', bygroups(Keyword.Declaration, Text), 'class'), + (r'(var)(\s+)', bygroups(Keyword.Declaration, Text), + 'var'), (r'(import(?:\s+static)?)(\s+)', bygroups(Keyword.Namespace, Text), 'import'), - (r'"(\\\\|\\"|[^"])*"', String), + (r'"', String, 'string'), (r"'\\.'|'[^\\]'|'\\u[0-9a-fA-F]{4}'", String.Char), - (r'(\.)((?:[^\W\d]|\$)[\w$]*)', bygroups(Operator, Name.Attribute)), + (r'(\.)((?:[^\W\d]|\$)[\w$]*)', bygroups(Punctuation, + Name.Attribute)), (r'^\s*([^\W\d]|\$)[\w$]*:', Name.Label), (r'([^\W\d]|\$)[\w$]*', Name), (r'([0-9][0-9_]*\.([0-9][0-9_]*)?|' @@ -80,15 +83,26 @@ class JavaLexer(RegexLexer): (r'0[bB][01][01_]*[lL]?', Number.Bin), (r'0[0-7_]+[lL]?', Number.Oct), (r'0|[1-9][0-9_]*[lL]?', Number.Integer), - (r'[~^*!%&\[\](){}<>|+=:;,./?-]', Operator), + (r'[~^*!%&\[\]<>|+=/?-]', Operator), + (r'[{}();:.,]', Punctuation), (r'\n', Text) ], 'class': [ (r'([^\W\d]|\$)[\w$]*', Name.Class, '#pop') ], + 'var': [ + (r'([^\W\d]|\$)[\w$]*', Name, '#pop') + ], 'import': [ (r'[\w.]+\*?', Name.Namespace, '#pop') ], + 'string': [ + (r'[^\\"]+', String), + (r'\\\\', String), # Escaped backslash + (r'\\"', String), # Escaped quote + (r'\\', String), # Bare backslash + (r'"', String, '#pop'), # Closing quote + ], } @@ -104,7 +118,7 @@ class AspectJLexer(JavaLexer): filenames = ['*.aj'] mimetypes = ['text/x-aspectj'] - aj_keywords = set(( + aj_keywords = { 'aspect', 'pointcut', 'privileged', 'call', 'execution', 'initialization', 'preinitialization', 'handler', 'get', 'set', 'staticinitialization', 'target', 'args', 'within', 'withincode', @@ -114,9 +128,9 @@ class AspectJLexer(JavaLexer): 'thisJoinPointStaticPart', 'thisEnclosingJoinPointStaticPart', 'issingleton', 'perthis', 'pertarget', 'percflow', 'percflowbelow', 'pertypewithin', 'lock', 'unlock', 'thisAspectInstance' - )) - aj_inter_type = set(('parents:', 'warning:', 'error:', 'soft:', 'precedence:')) - aj_inter_type_annotation = set(('@type', '@method', '@constructor', '@field')) + } + aj_inter_type = {'parents:', 'warning:', 'error:', 'soft:', 'precedence:'} + aj_inter_type_annotation = {'@type', '@method', '@constructor', '@field'} def get_tokens_unprocessed(self, text): for index, token, value in JavaLexer.get_tokens_unprocessed(self, text): @@ -144,148 +158,147 @@ class ScalaLexer(RegexLexer): flags = re.MULTILINE | re.DOTALL # don't use raw unicode strings! - op = (u'[-~\\^\\*!%&\\\\<>\\|+=:/?@\u00a6-\u00a7\u00a9\u00ac\u00ae\u00b0-\u00b1' - u'\u00b6\u00d7\u00f7\u03f6\u0482\u0606-\u0608\u060e-\u060f\u06e9' - u'\u06fd-\u06fe\u07f6\u09fa\u0b70\u0bf3-\u0bf8\u0bfa\u0c7f\u0cf1-\u0cf2' - u'\u0d79\u0f01-\u0f03\u0f13-\u0f17\u0f1a-\u0f1f\u0f34\u0f36\u0f38' - u'\u0fbe-\u0fc5\u0fc7-\u0fcf\u109e-\u109f\u1360\u1390-\u1399\u1940' - u'\u19e0-\u19ff\u1b61-\u1b6a\u1b74-\u1b7c\u2044\u2052\u207a-\u207c' - u'\u208a-\u208c\u2100-\u2101\u2103-\u2106\u2108-\u2109\u2114\u2116-\u2118' - u'\u211e-\u2123\u2125\u2127\u2129\u212e\u213a-\u213b\u2140-\u2144' - u'\u214a-\u214d\u214f\u2190-\u2328\u232b-\u244a\u249c-\u24e9\u2500-\u2767' - u'\u2794-\u27c4\u27c7-\u27e5\u27f0-\u2982\u2999-\u29d7\u29dc-\u29fb' - u'\u29fe-\u2b54\u2ce5-\u2cea\u2e80-\u2ffb\u3004\u3012-\u3013\u3020' - u'\u3036-\u3037\u303e-\u303f\u3190-\u3191\u3196-\u319f\u31c0-\u31e3' - u'\u3200-\u321e\u322a-\u3250\u3260-\u327f\u328a-\u32b0\u32c0-\u33ff' - u'\u4dc0-\u4dff\ua490-\ua4c6\ua828-\ua82b\ufb29\ufdfd\ufe62\ufe64-\ufe66' - u'\uff0b\uff1c-\uff1e\uff5c\uff5e\uffe2\uffe4\uffe8-\uffee\ufffc-\ufffd]+') - - letter = (u'[a-zA-Z\\$_\u00aa\u00b5\u00ba\u00c0-\u00d6\u00d8-\u00f6' - u'\u00f8-\u02af\u0370-\u0373\u0376-\u0377\u037b-\u037d\u0386' - u'\u0388-\u03f5\u03f7-\u0481\u048a-\u0556\u0561-\u0587\u05d0-\u05f2' - u'\u0621-\u063f\u0641-\u064a\u066e-\u066f\u0671-\u06d3\u06d5' - u'\u06ee-\u06ef\u06fa-\u06fc\u06ff\u0710\u0712-\u072f\u074d-\u07a5' - u'\u07b1\u07ca-\u07ea\u0904-\u0939\u093d\u0950\u0958-\u0961' - u'\u0972-\u097f\u0985-\u09b9\u09bd\u09ce\u09dc-\u09e1\u09f0-\u09f1' - u'\u0a05-\u0a39\u0a59-\u0a5e\u0a72-\u0a74\u0a85-\u0ab9\u0abd' - u'\u0ad0-\u0ae1\u0b05-\u0b39\u0b3d\u0b5c-\u0b61\u0b71\u0b83-\u0bb9' - u'\u0bd0\u0c05-\u0c3d\u0c58-\u0c61\u0c85-\u0cb9\u0cbd\u0cde-\u0ce1' - u'\u0d05-\u0d3d\u0d60-\u0d61\u0d7a-\u0d7f\u0d85-\u0dc6\u0e01-\u0e30' - u'\u0e32-\u0e33\u0e40-\u0e45\u0e81-\u0eb0\u0eb2-\u0eb3\u0ebd-\u0ec4' - u'\u0edc-\u0f00\u0f40-\u0f6c\u0f88-\u0f8b\u1000-\u102a\u103f' - u'\u1050-\u1055\u105a-\u105d\u1061\u1065-\u1066\u106e-\u1070' - u'\u1075-\u1081\u108e\u10a0-\u10fa\u1100-\u135a\u1380-\u138f' - u'\u13a0-\u166c\u166f-\u1676\u1681-\u169a\u16a0-\u16ea\u16ee-\u1711' - u'\u1720-\u1731\u1740-\u1751\u1760-\u1770\u1780-\u17b3\u17dc' - u'\u1820-\u1842\u1844-\u18a8\u18aa-\u191c\u1950-\u19a9\u19c1-\u19c7' - u'\u1a00-\u1a16\u1b05-\u1b33\u1b45-\u1b4b\u1b83-\u1ba0\u1bae-\u1baf' - u'\u1c00-\u1c23\u1c4d-\u1c4f\u1c5a-\u1c77\u1d00-\u1d2b\u1d62-\u1d77' - u'\u1d79-\u1d9a\u1e00-\u1fbc\u1fbe\u1fc2-\u1fcc\u1fd0-\u1fdb' - u'\u1fe0-\u1fec\u1ff2-\u1ffc\u2071\u207f\u2102\u2107\u210a-\u2113' - u'\u2115\u2119-\u211d\u2124\u2126\u2128\u212a-\u212d\u212f-\u2139' - u'\u213c-\u213f\u2145-\u2149\u214e\u2160-\u2188\u2c00-\u2c7c' - u'\u2c80-\u2ce4\u2d00-\u2d65\u2d80-\u2dde\u3006-\u3007\u3021-\u3029' - u'\u3038-\u303a\u303c\u3041-\u3096\u309f\u30a1-\u30fa\u30ff-\u318e' - u'\u31a0-\u31b7\u31f0-\u31ff\u3400-\u4db5\u4e00-\ua014\ua016-\ua48c' - u'\ua500-\ua60b\ua610-\ua61f\ua62a-\ua66e\ua680-\ua697\ua722-\ua76f' - u'\ua771-\ua787\ua78b-\ua801\ua803-\ua805\ua807-\ua80a\ua80c-\ua822' - u'\ua840-\ua873\ua882-\ua8b3\ua90a-\ua925\ua930-\ua946\uaa00-\uaa28' - u'\uaa40-\uaa42\uaa44-\uaa4b\uac00-\ud7a3\uf900-\ufb1d\ufb1f-\ufb28' - u'\ufb2a-\ufd3d\ufd50-\ufdfb\ufe70-\ufefc\uff21-\uff3a\uff41-\uff5a' - u'\uff66-\uff6f\uff71-\uff9d\uffa0-\uffdc]') - - upper = (u'[A-Z\\$_\u00c0-\u00d6\u00d8-\u00de\u0100\u0102\u0104\u0106\u0108' - u'\u010a\u010c\u010e\u0110\u0112\u0114\u0116\u0118\u011a\u011c' - u'\u011e\u0120\u0122\u0124\u0126\u0128\u012a\u012c\u012e\u0130' - u'\u0132\u0134\u0136\u0139\u013b\u013d\u013f\u0141\u0143\u0145' - u'\u0147\u014a\u014c\u014e\u0150\u0152\u0154\u0156\u0158\u015a' - u'\u015c\u015e\u0160\u0162\u0164\u0166\u0168\u016a\u016c\u016e' - u'\u0170\u0172\u0174\u0176\u0178-\u0179\u017b\u017d\u0181-\u0182' - u'\u0184\u0186-\u0187\u0189-\u018b\u018e-\u0191\u0193-\u0194' - u'\u0196-\u0198\u019c-\u019d\u019f-\u01a0\u01a2\u01a4\u01a6-\u01a7' - u'\u01a9\u01ac\u01ae-\u01af\u01b1-\u01b3\u01b5\u01b7-\u01b8\u01bc' - u'\u01c4\u01c7\u01ca\u01cd\u01cf\u01d1\u01d3\u01d5\u01d7\u01d9' - u'\u01db\u01de\u01e0\u01e2\u01e4\u01e6\u01e8\u01ea\u01ec\u01ee' - u'\u01f1\u01f4\u01f6-\u01f8\u01fa\u01fc\u01fe\u0200\u0202\u0204' - u'\u0206\u0208\u020a\u020c\u020e\u0210\u0212\u0214\u0216\u0218' - u'\u021a\u021c\u021e\u0220\u0222\u0224\u0226\u0228\u022a\u022c' - u'\u022e\u0230\u0232\u023a-\u023b\u023d-\u023e\u0241\u0243-\u0246' - u'\u0248\u024a\u024c\u024e\u0370\u0372\u0376\u0386\u0388-\u038f' - u'\u0391-\u03ab\u03cf\u03d2-\u03d4\u03d8\u03da\u03dc\u03de\u03e0' - u'\u03e2\u03e4\u03e6\u03e8\u03ea\u03ec\u03ee\u03f4\u03f7' - u'\u03f9-\u03fa\u03fd-\u042f\u0460\u0462\u0464\u0466\u0468\u046a' - u'\u046c\u046e\u0470\u0472\u0474\u0476\u0478\u047a\u047c\u047e' - u'\u0480\u048a\u048c\u048e\u0490\u0492\u0494\u0496\u0498\u049a' - u'\u049c\u049e\u04a0\u04a2\u04a4\u04a6\u04a8\u04aa\u04ac\u04ae' - u'\u04b0\u04b2\u04b4\u04b6\u04b8\u04ba\u04bc\u04be\u04c0-\u04c1' - u'\u04c3\u04c5\u04c7\u04c9\u04cb\u04cd\u04d0\u04d2\u04d4\u04d6' - u'\u04d8\u04da\u04dc\u04de\u04e0\u04e2\u04e4\u04e6\u04e8\u04ea' - u'\u04ec\u04ee\u04f0\u04f2\u04f4\u04f6\u04f8\u04fa\u04fc\u04fe' - u'\u0500\u0502\u0504\u0506\u0508\u050a\u050c\u050e\u0510\u0512' - u'\u0514\u0516\u0518\u051a\u051c\u051e\u0520\u0522\u0531-\u0556' - u'\u10a0-\u10c5\u1e00\u1e02\u1e04\u1e06\u1e08\u1e0a\u1e0c\u1e0e' - u'\u1e10\u1e12\u1e14\u1e16\u1e18\u1e1a\u1e1c\u1e1e\u1e20\u1e22' - u'\u1e24\u1e26\u1e28\u1e2a\u1e2c\u1e2e\u1e30\u1e32\u1e34\u1e36' - u'\u1e38\u1e3a\u1e3c\u1e3e\u1e40\u1e42\u1e44\u1e46\u1e48\u1e4a' - u'\u1e4c\u1e4e\u1e50\u1e52\u1e54\u1e56\u1e58\u1e5a\u1e5c\u1e5e' - u'\u1e60\u1e62\u1e64\u1e66\u1e68\u1e6a\u1e6c\u1e6e\u1e70\u1e72' - u'\u1e74\u1e76\u1e78\u1e7a\u1e7c\u1e7e\u1e80\u1e82\u1e84\u1e86' - u'\u1e88\u1e8a\u1e8c\u1e8e\u1e90\u1e92\u1e94\u1e9e\u1ea0\u1ea2' - u'\u1ea4\u1ea6\u1ea8\u1eaa\u1eac\u1eae\u1eb0\u1eb2\u1eb4\u1eb6' - u'\u1eb8\u1eba\u1ebc\u1ebe\u1ec0\u1ec2\u1ec4\u1ec6\u1ec8\u1eca' - u'\u1ecc\u1ece\u1ed0\u1ed2\u1ed4\u1ed6\u1ed8\u1eda\u1edc\u1ede' - u'\u1ee0\u1ee2\u1ee4\u1ee6\u1ee8\u1eea\u1eec\u1eee\u1ef0\u1ef2' - u'\u1ef4\u1ef6\u1ef8\u1efa\u1efc\u1efe\u1f08-\u1f0f\u1f18-\u1f1d' - u'\u1f28-\u1f2f\u1f38-\u1f3f\u1f48-\u1f4d\u1f59-\u1f5f' - u'\u1f68-\u1f6f\u1fb8-\u1fbb\u1fc8-\u1fcb\u1fd8-\u1fdb' - u'\u1fe8-\u1fec\u1ff8-\u1ffb\u2102\u2107\u210b-\u210d\u2110-\u2112' - u'\u2115\u2119-\u211d\u2124\u2126\u2128\u212a-\u212d\u2130-\u2133' - u'\u213e-\u213f\u2145\u2183\u2c00-\u2c2e\u2c60\u2c62-\u2c64\u2c67' - u'\u2c69\u2c6b\u2c6d-\u2c6f\u2c72\u2c75\u2c80\u2c82\u2c84\u2c86' - u'\u2c88\u2c8a\u2c8c\u2c8e\u2c90\u2c92\u2c94\u2c96\u2c98\u2c9a' - u'\u2c9c\u2c9e\u2ca0\u2ca2\u2ca4\u2ca6\u2ca8\u2caa\u2cac\u2cae' - u'\u2cb0\u2cb2\u2cb4\u2cb6\u2cb8\u2cba\u2cbc\u2cbe\u2cc0\u2cc2' - u'\u2cc4\u2cc6\u2cc8\u2cca\u2ccc\u2cce\u2cd0\u2cd2\u2cd4\u2cd6' - u'\u2cd8\u2cda\u2cdc\u2cde\u2ce0\u2ce2\ua640\ua642\ua644\ua646' - u'\ua648\ua64a\ua64c\ua64e\ua650\ua652\ua654\ua656\ua658\ua65a' - u'\ua65c\ua65e\ua662\ua664\ua666\ua668\ua66a\ua66c\ua680\ua682' - u'\ua684\ua686\ua688\ua68a\ua68c\ua68e\ua690\ua692\ua694\ua696' - u'\ua722\ua724\ua726\ua728\ua72a\ua72c\ua72e\ua732\ua734\ua736' - u'\ua738\ua73a\ua73c\ua73e\ua740\ua742\ua744\ua746\ua748\ua74a' - u'\ua74c\ua74e\ua750\ua752\ua754\ua756\ua758\ua75a\ua75c\ua75e' - u'\ua760\ua762\ua764\ua766\ua768\ua76a\ua76c\ua76e\ua779\ua77b' - u'\ua77d-\ua77e\ua780\ua782\ua784\ua786\ua78b\uff21-\uff3a]') - - idrest = u'%s(?:%s|[0-9])*(?:(?<=_)%s)?' % (letter, letter, op) - letter_letter_digit = u'%s(?:%s|\\d)*' % (letter, letter) + op = ('[-~\\^\\*!%&\\\\<>\\|+=:/?@\u00a6-\u00a7\u00a9\u00ac\u00ae\u00b0-\u00b1' + '\u00b6\u00d7\u00f7\u03f6\u0482\u0606-\u0608\u060e-\u060f\u06e9' + '\u06fd-\u06fe\u07f6\u09fa\u0b70\u0bf3-\u0bf8\u0bfa\u0c7f\u0cf1-\u0cf2' + '\u0d79\u0f01-\u0f03\u0f13-\u0f17\u0f1a-\u0f1f\u0f34\u0f36\u0f38' + '\u0fbe-\u0fc5\u0fc7-\u0fcf\u109e-\u109f\u1360\u1390-\u1399\u1940' + '\u19e0-\u19ff\u1b61-\u1b6a\u1b74-\u1b7c\u2044\u2052\u207a-\u207c' + '\u208a-\u208c\u2100-\u2101\u2103-\u2106\u2108-\u2109\u2114\u2116-\u2118' + '\u211e-\u2123\u2125\u2127\u2129\u212e\u213a-\u213b\u2140-\u2144' + '\u214a-\u214d\u214f\u2190-\u2328\u232b-\u244a\u249c-\u24e9\u2500-\u2767' + '\u2794-\u27c4\u27c7-\u27e5\u27f0-\u2982\u2999-\u29d7\u29dc-\u29fb' + '\u29fe-\u2b54\u2ce5-\u2cea\u2e80-\u2ffb\u3004\u3012-\u3013\u3020' + '\u3036-\u3037\u303e-\u303f\u3190-\u3191\u3196-\u319f\u31c0-\u31e3' + '\u3200-\u321e\u322a-\u3250\u3260-\u327f\u328a-\u32b0\u32c0-\u33ff' + '\u4dc0-\u4dff\ua490-\ua4c6\ua828-\ua82b\ufb29\ufdfd\ufe62\ufe64-\ufe66' + '\uff0b\uff1c-\uff1e\uff5c\uff5e\uffe2\uffe4\uffe8-\uffee\ufffc-\ufffd]+') + + letter = ('[a-zA-Z\\$_\u00aa\u00b5\u00ba\u00c0-\u00d6\u00d8-\u00f6' + '\u00f8-\u02af\u0370-\u0373\u0376-\u0377\u037b-\u037d\u0386' + '\u0388-\u03f5\u03f7-\u0481\u048a-\u0556\u0561-\u0587\u05d0-\u05f2' + '\u0621-\u063f\u0641-\u064a\u066e-\u066f\u0671-\u06d3\u06d5' + '\u06ee-\u06ef\u06fa-\u06fc\u06ff\u0710\u0712-\u072f\u074d-\u07a5' + '\u07b1\u07ca-\u07ea\u0904-\u0939\u093d\u0950\u0958-\u0961' + '\u0972-\u097f\u0985-\u09b9\u09bd\u09ce\u09dc-\u09e1\u09f0-\u09f1' + '\u0a05-\u0a39\u0a59-\u0a5e\u0a72-\u0a74\u0a85-\u0ab9\u0abd' + '\u0ad0-\u0ae1\u0b05-\u0b39\u0b3d\u0b5c-\u0b61\u0b71\u0b83-\u0bb9' + '\u0bd0\u0c05-\u0c3d\u0c58-\u0c61\u0c85-\u0cb9\u0cbd\u0cde-\u0ce1' + '\u0d05-\u0d3d\u0d60-\u0d61\u0d7a-\u0d7f\u0d85-\u0dc6\u0e01-\u0e30' + '\u0e32-\u0e33\u0e40-\u0e45\u0e81-\u0eb0\u0eb2-\u0eb3\u0ebd-\u0ec4' + '\u0edc-\u0f00\u0f40-\u0f6c\u0f88-\u0f8b\u1000-\u102a\u103f' + '\u1050-\u1055\u105a-\u105d\u1061\u1065-\u1066\u106e-\u1070' + '\u1075-\u1081\u108e\u10a0-\u10fa\u1100-\u135a\u1380-\u138f' + '\u13a0-\u166c\u166f-\u1676\u1681-\u169a\u16a0-\u16ea\u16ee-\u1711' + '\u1720-\u1731\u1740-\u1751\u1760-\u1770\u1780-\u17b3\u17dc' + '\u1820-\u1842\u1844-\u18a8\u18aa-\u191c\u1950-\u19a9\u19c1-\u19c7' + '\u1a00-\u1a16\u1b05-\u1b33\u1b45-\u1b4b\u1b83-\u1ba0\u1bae-\u1baf' + '\u1c00-\u1c23\u1c4d-\u1c4f\u1c5a-\u1c77\u1d00-\u1d2b\u1d62-\u1d77' + '\u1d79-\u1d9a\u1e00-\u1fbc\u1fbe\u1fc2-\u1fcc\u1fd0-\u1fdb' + '\u1fe0-\u1fec\u1ff2-\u1ffc\u2071\u207f\u2102\u2107\u210a-\u2113' + '\u2115\u2119-\u211d\u2124\u2126\u2128\u212a-\u212d\u212f-\u2139' + '\u213c-\u213f\u2145-\u2149\u214e\u2160-\u2188\u2c00-\u2c7c' + '\u2c80-\u2ce4\u2d00-\u2d65\u2d80-\u2dde\u3006-\u3007\u3021-\u3029' + '\u3038-\u303a\u303c\u3041-\u3096\u309f\u30a1-\u30fa\u30ff-\u318e' + '\u31a0-\u31b7\u31f0-\u31ff\u3400-\u4db5\u4e00-\ua014\ua016-\ua48c' + '\ua500-\ua60b\ua610-\ua61f\ua62a-\ua66e\ua680-\ua697\ua722-\ua76f' + '\ua771-\ua787\ua78b-\ua801\ua803-\ua805\ua807-\ua80a\ua80c-\ua822' + '\ua840-\ua873\ua882-\ua8b3\ua90a-\ua925\ua930-\ua946\uaa00-\uaa28' + '\uaa40-\uaa42\uaa44-\uaa4b\uac00-\ud7a3\uf900-\ufb1d\ufb1f-\ufb28' + '\ufb2a-\ufd3d\ufd50-\ufdfb\ufe70-\ufefc\uff21-\uff3a\uff41-\uff5a' + '\uff66-\uff6f\uff71-\uff9d\uffa0-\uffdc]') + + upper = ('[A-Z\\$_\u00c0-\u00d6\u00d8-\u00de\u0100\u0102\u0104\u0106\u0108' + '\u010a\u010c\u010e\u0110\u0112\u0114\u0116\u0118\u011a\u011c' + '\u011e\u0120\u0122\u0124\u0126\u0128\u012a\u012c\u012e\u0130' + '\u0132\u0134\u0136\u0139\u013b\u013d\u013f\u0141\u0143\u0145' + '\u0147\u014a\u014c\u014e\u0150\u0152\u0154\u0156\u0158\u015a' + '\u015c\u015e\u0160\u0162\u0164\u0166\u0168\u016a\u016c\u016e' + '\u0170\u0172\u0174\u0176\u0178-\u0179\u017b\u017d\u0181-\u0182' + '\u0184\u0186-\u0187\u0189-\u018b\u018e-\u0191\u0193-\u0194' + '\u0196-\u0198\u019c-\u019d\u019f-\u01a0\u01a2\u01a4\u01a6-\u01a7' + '\u01a9\u01ac\u01ae-\u01af\u01b1-\u01b3\u01b5\u01b7-\u01b8\u01bc' + '\u01c4\u01c7\u01ca\u01cd\u01cf\u01d1\u01d3\u01d5\u01d7\u01d9' + '\u01db\u01de\u01e0\u01e2\u01e4\u01e6\u01e8\u01ea\u01ec\u01ee' + '\u01f1\u01f4\u01f6-\u01f8\u01fa\u01fc\u01fe\u0200\u0202\u0204' + '\u0206\u0208\u020a\u020c\u020e\u0210\u0212\u0214\u0216\u0218' + '\u021a\u021c\u021e\u0220\u0222\u0224\u0226\u0228\u022a\u022c' + '\u022e\u0230\u0232\u023a-\u023b\u023d-\u023e\u0241\u0243-\u0246' + '\u0248\u024a\u024c\u024e\u0370\u0372\u0376\u0386\u0388-\u038f' + '\u0391-\u03ab\u03cf\u03d2-\u03d4\u03d8\u03da\u03dc\u03de\u03e0' + '\u03e2\u03e4\u03e6\u03e8\u03ea\u03ec\u03ee\u03f4\u03f7' + '\u03f9-\u03fa\u03fd-\u042f\u0460\u0462\u0464\u0466\u0468\u046a' + '\u046c\u046e\u0470\u0472\u0474\u0476\u0478\u047a\u047c\u047e' + '\u0480\u048a\u048c\u048e\u0490\u0492\u0494\u0496\u0498\u049a' + '\u049c\u049e\u04a0\u04a2\u04a4\u04a6\u04a8\u04aa\u04ac\u04ae' + '\u04b0\u04b2\u04b4\u04b6\u04b8\u04ba\u04bc\u04be\u04c0-\u04c1' + '\u04c3\u04c5\u04c7\u04c9\u04cb\u04cd\u04d0\u04d2\u04d4\u04d6' + '\u04d8\u04da\u04dc\u04de\u04e0\u04e2\u04e4\u04e6\u04e8\u04ea' + '\u04ec\u04ee\u04f0\u04f2\u04f4\u04f6\u04f8\u04fa\u04fc\u04fe' + '\u0500\u0502\u0504\u0506\u0508\u050a\u050c\u050e\u0510\u0512' + '\u0514\u0516\u0518\u051a\u051c\u051e\u0520\u0522\u0531-\u0556' + '\u10a0-\u10c5\u1e00\u1e02\u1e04\u1e06\u1e08\u1e0a\u1e0c\u1e0e' + '\u1e10\u1e12\u1e14\u1e16\u1e18\u1e1a\u1e1c\u1e1e\u1e20\u1e22' + '\u1e24\u1e26\u1e28\u1e2a\u1e2c\u1e2e\u1e30\u1e32\u1e34\u1e36' + '\u1e38\u1e3a\u1e3c\u1e3e\u1e40\u1e42\u1e44\u1e46\u1e48\u1e4a' + '\u1e4c\u1e4e\u1e50\u1e52\u1e54\u1e56\u1e58\u1e5a\u1e5c\u1e5e' + '\u1e60\u1e62\u1e64\u1e66\u1e68\u1e6a\u1e6c\u1e6e\u1e70\u1e72' + '\u1e74\u1e76\u1e78\u1e7a\u1e7c\u1e7e\u1e80\u1e82\u1e84\u1e86' + '\u1e88\u1e8a\u1e8c\u1e8e\u1e90\u1e92\u1e94\u1e9e\u1ea0\u1ea2' + '\u1ea4\u1ea6\u1ea8\u1eaa\u1eac\u1eae\u1eb0\u1eb2\u1eb4\u1eb6' + '\u1eb8\u1eba\u1ebc\u1ebe\u1ec0\u1ec2\u1ec4\u1ec6\u1ec8\u1eca' + '\u1ecc\u1ece\u1ed0\u1ed2\u1ed4\u1ed6\u1ed8\u1eda\u1edc\u1ede' + '\u1ee0\u1ee2\u1ee4\u1ee6\u1ee8\u1eea\u1eec\u1eee\u1ef0\u1ef2' + '\u1ef4\u1ef6\u1ef8\u1efa\u1efc\u1efe\u1f08-\u1f0f\u1f18-\u1f1d' + '\u1f28-\u1f2f\u1f38-\u1f3f\u1f48-\u1f4d\u1f59-\u1f5f' + '\u1f68-\u1f6f\u1fb8-\u1fbb\u1fc8-\u1fcb\u1fd8-\u1fdb' + '\u1fe8-\u1fec\u1ff8-\u1ffb\u2102\u2107\u210b-\u210d\u2110-\u2112' + '\u2115\u2119-\u211d\u2124\u2126\u2128\u212a-\u212d\u2130-\u2133' + '\u213e-\u213f\u2145\u2183\u2c00-\u2c2e\u2c60\u2c62-\u2c64\u2c67' + '\u2c69\u2c6b\u2c6d-\u2c6f\u2c72\u2c75\u2c80\u2c82\u2c84\u2c86' + '\u2c88\u2c8a\u2c8c\u2c8e\u2c90\u2c92\u2c94\u2c96\u2c98\u2c9a' + '\u2c9c\u2c9e\u2ca0\u2ca2\u2ca4\u2ca6\u2ca8\u2caa\u2cac\u2cae' + '\u2cb0\u2cb2\u2cb4\u2cb6\u2cb8\u2cba\u2cbc\u2cbe\u2cc0\u2cc2' + '\u2cc4\u2cc6\u2cc8\u2cca\u2ccc\u2cce\u2cd0\u2cd2\u2cd4\u2cd6' + '\u2cd8\u2cda\u2cdc\u2cde\u2ce0\u2ce2\ua640\ua642\ua644\ua646' + '\ua648\ua64a\ua64c\ua64e\ua650\ua652\ua654\ua656\ua658\ua65a' + '\ua65c\ua65e\ua662\ua664\ua666\ua668\ua66a\ua66c\ua680\ua682' + '\ua684\ua686\ua688\ua68a\ua68c\ua68e\ua690\ua692\ua694\ua696' + '\ua722\ua724\ua726\ua728\ua72a\ua72c\ua72e\ua732\ua734\ua736' + '\ua738\ua73a\ua73c\ua73e\ua740\ua742\ua744\ua746\ua748\ua74a' + '\ua74c\ua74e\ua750\ua752\ua754\ua756\ua758\ua75a\ua75c\ua75e' + '\ua760\ua762\ua764\ua766\ua768\ua76a\ua76c\ua76e\ua779\ua77b' + '\ua77d-\ua77e\ua780\ua782\ua784\ua786\ua78b\uff21-\uff3a]') + + idrest = '%s(?:%s|[0-9])*(?:(?<=_)%s)?' % (letter, letter, op) + letter_letter_digit = '%s(?:%s|\\d)*' % (letter, letter) tokens = { 'root': [ # method names (r'(class|trait|object)(\s+)', bygroups(Keyword, Text), 'class'), (r'[^\S\n]+', Text), - (r'//.*?\n', Comment.Single), - (r'/\*', Comment.Multiline, 'comment'), - (u'@%s' % idrest, Name.Decorator), - (u'(abstract|ca(?:se|tch)|d(?:ef|o)|e(?:lse|xtends)|' - u'f(?:inal(?:ly)?|or(?:Some)?)|i(?:f|mplicit)|' - u'lazy|match|new|override|pr(?:ivate|otected)' - u'|re(?:quires|turn)|s(?:ealed|uper)|' - u't(?:h(?:is|row)|ry)|va[lr]|w(?:hile|ith)|yield)\\b|' - u'(<[%:-]|=>|>:|[#=@_\u21D2\u2190])(\\b|(?=\\s)|$)', Keyword), - (u':(?!%s)' % op, Keyword, 'type'), - (u'%s%s\\b' % (upper, idrest), Name.Class), + include('comments'), + (r'@%s' % idrest, Name.Decorator), + (r'(abstract|ca(?:se|tch)|d(?:ef|o)|e(?:lse|xtends)|' + r'f(?:inal(?:ly)?|or(?:Some)?)|i(?:f|mplicit)|' + r'lazy|match|new|override|pr(?:ivate|otected)' + r'|re(?:quires|turn)|s(?:ealed|uper)|' + r't(?:h(?:is|row)|ry)|va[lr]|w(?:hile|ith)|yield)\b|' + r'(<[%:-]|=>|>:|[#=@_\u21D2\u2190])\b', Keyword), + (r':(?!%s)' % op, Keyword, 'type'), + (r'%s%s\b' % (upper, idrest), Name.Class), (r'(true|false|null)\b', Keyword.Constant), (r'(import|package)(\s+)', bygroups(Keyword, Text), 'import'), (r'(type)(\s+)', bygroups(Keyword, Text), 'type'), (r'""".*?"""(?!")', String), - (r'"(\\\\|\\"|[^"])*"', String), + (r'"(\\\\|\\[^\\]|[^"\\])*"', String), (r"'\\.'|'[^\\]'|'\\u[0-9a-fA-F]{4}'", String.Char), - (u"'%s" % idrest, Text.Symbol), + (r"'%s" % idrest, Text.Symbol), (r'[fs]"""', String, 'interptriplestring'), # interpolated strings (r'[fs]"', String, 'interpstring'), # interpolated strings - (r'raw"(\\\\|\\"|[^"])*"', String), # raw strings - # (ur'(\.)(%s|%s|`[^`]+`)' % (idrest, op), bygroups(Operator, + (r'raw"(\\\\|\\[^\\]|[^"\\])*"', String), # raw strings + # (r'(\.)(%s|%s|`[^`]+`)' % (idrest, op), bygroups(Operator, # Name.Attribute)), (idrest, Name), (r'`[^`]+`', Name), @@ -299,34 +312,40 @@ class ScalaLexer(RegexLexer): (r'\n', Text) ], 'class': [ - (u'(%s|%s|`[^`]+`)(\\s*)(\\[)' % (idrest, op), - bygroups(Name.Class, Text, Operator), 'typeparam'), + (r'(%s|%s|`[^`]+`)(\s*)(\[)' % (idrest, op), + bygroups(Name.Class, Text, Operator), ('#pop', 'typeparam')), (r'\s+', Text), + include('comments'), (r'\{', Operator, '#pop'), (r'\(', Operator, '#pop'), - (r'//.*?\n', Comment.Single, '#pop'), - (u'%s|%s|`[^`]+`' % (idrest, op), Name.Class, '#pop'), + (r'%s|%s|`[^`]+`' % (idrest, op), Name.Class, '#pop'), ], 'type': [ (r'\s+', Text), - (r'<[%:]|>:|[#_]|forSome|type', Keyword), - (u'([,);}]|=>|=|\u21d2)(\\s*)', bygroups(Operator, Text), '#pop'), + include('comments'), + (r'<[%:]|>:|[#_]|\bforSome\b|\btype\b', Keyword), + (r'([,);}]|=>|=|\u21d2)(\s*)', bygroups(Operator, Text), '#pop'), (r'[({]', Operator, '#push'), - (u'((?:%s|%s|`[^`]+`)(?:\\.(?:%s|%s|`[^`]+`))*)(\\s*)(\\[)' % + (r'((?:%s|%s|`[^`]+`)(?:\.(?:%s|%s|`[^`]+`))*)(\s*)(\[)' % (idrest, op, idrest, op), bygroups(Keyword.Type, Text, Operator), ('#pop', 'typeparam')), - (u'((?:%s|%s|`[^`]+`)(?:\\.(?:%s|%s|`[^`]+`))*)(\\s*)$' % + (r'((?:%s|%s|`[^`]+`)(?:\.(?:%s|%s|`[^`]+`))*)(\s*)$' % (idrest, op, idrest, op), bygroups(Keyword.Type, Text), '#pop'), - (r'//.*?\n', Comment.Single, '#pop'), - (u'\\.|%s|%s|`[^`]+`' % (idrest, op), Keyword.Type) + (r'\.|%s|%s|`[^`]+`' % (idrest, op), Keyword.Type) ], 'typeparam': [ - (r'[\s,]+', Text), - (u'<[%:]|=>|>:|[#_\u21D2]|forSome|type', Keyword), + (r'\s+', Text), + include('comments'), + (r',+', Punctuation), + (r'<[%:]|=>|>:|[#_\u21D2]|\bforSome\b|\btype\b', Keyword), (r'([\])}])', Operator, '#pop'), (r'[(\[{]', Operator, '#push'), - (u'\\.|%s|%s|`[^`]+`' % (idrest, op), Keyword.Type) + (r'\.|%s|%s|`[^`]+`' % (idrest, op), Keyword.Type) + ], + 'comments': [ + (r'//.*?\n', Comment.Single), + (r'/\*', Comment.Multiline, 'comment'), ], 'comment': [ (r'[^/*]+', Comment.Multiline), @@ -335,7 +354,7 @@ class ScalaLexer(RegexLexer): (r'[*/]', Comment.Multiline) ], 'import': [ - (u'(%s|\\.)+' % idrest, Name.Namespace, '#pop') + (r'(%s|\.)+' % idrest, Name.Namespace, '#pop') ], 'interpstringcommon': [ (r'[^"$\\]+', String), @@ -455,8 +474,7 @@ class GosuTemplateLexer(Lexer): def get_tokens_unprocessed(self, text): lexer = GosuLexer() stack = ['templateText'] - for item in lexer.get_tokens_unprocessed(text, stack): - yield item + yield from lexer.get_tokens_unprocessed(text, stack) class GroovyLexer(RegexLexer): @@ -504,10 +522,10 @@ class GroovyLexer(RegexLexer): (r'(import)(\s+)', bygroups(Keyword.Namespace, Text), 'import'), (r'""".*?"""', String.Double), (r"'''.*?'''", String.Single), - (r'"(\\\\|\\"|[^"])*"', String.Double), - (r"'(\\\\|\\'|[^'])*'", String.Single), + (r'"(\\\\|\\[^\\]|[^"\\])*"', String.Double), + (r"'(\\\\|\\[^\\]|[^'\\])*'", String.Single), (r'\$/((?!/\$).)*/\$', String), - (r'/(\\\\|\\"|[^/])*/', String), + (r'/(\\\\|\\[^\\]|[^/\\])*/', String), (r"'\\.'|'[^\\]'|'\\u[0-9a-fA-F]{4}'", String.Char), (r'(\.)([a-zA-Z_]\w*)', bygroups(Operator, Name.Attribute)), (r'[a-zA-Z_]\w*:', Name.Label), @@ -600,7 +618,7 @@ class IokeLexer(RegexLexer): # Symbols (r':[\w!:?]+', String.Symbol), (r'[\w!:?]+:(?![\w!?])', String.Other), - (r':"(\\\\|\\"|[^"])*"', String.Symbol), + (r':"(\\\\|\\[^\\]|[^"\\])*"', String.Symbol), # Documentation (r'((?<=fn\()|(?<=fnx\()|(?<=method\()|(?<=macro\()|(?<=lecro\()' @@ -687,9 +705,9 @@ class IokeLexer(RegexLexer): r'System|Text|Tuple)(?![\w!:?])', Name.Builtin), # functions - (u'(generateMatchMethod|aliasMethod|\u03bb|\u028E|fnx|fn|method|' - u'dmacro|dlecro|syntax|macro|dlecrox|lecrox|lecro|syntax)' - u'(?![\\w!:?])', Name.Function), + ('(generateMatchMethod|aliasMethod|\u03bb|\u028E|fnx|fn|method|' + 'dmacro|dlecro|syntax|macro|dlecrox|lecrox|lecro|syntax)' + '(?![\\w!:?])', Name.Function), # Numbers (r'-?0[xX][0-9a-fA-F]+', Number.Hex), @@ -705,7 +723,7 @@ class IokeLexer(RegexLexer): r'\-\-|<=|>=|==|!=|&&|\.\.|\+=|\-=|\*=|\/=|%=|&=|\^=|\|=|<\-|' r'\+>|!>|<>|&>|%>|#>|\@>|\/>|\*>|\?>|\|>|\^>|~>|\$>|<\->|\->|' r'<<|>>|\*\*|\?\||\?&|\|\||>|<|\*|\/|%|\+|\-|&|\^|\||=|\$|!|~|' - u'\\?|#|\u2260|\u2218|\u2208|\u2209)', Operator), + r'\?|#|\u2260|\u2218|\u2208|\u2209)', Operator), (r'(and|nand|or|xor|nor|return|import)(?![\w!?])', Operator), @@ -818,7 +836,7 @@ class ClojureLexer(RegexLexer): (r'0x-?[abcdef\d]+', Number.Hex), # strings, symbols and characters - (r'"(\\\\|\\"|[^"])*"', String), + (r'"(\\\\|\\[^\\]|[^"\\])*"', String), (r"'" + valid_name, String.Symbol), (r"\\(.|[a-z]+)", String.Char), @@ -899,8 +917,8 @@ class TeaLangLexer(RegexLexer): (r'(true|false|null)\b', Keyword.Constant), (r'(template)(\s+)', bygroups(Keyword.Declaration, Text), 'template'), (r'(import)(\s+)', bygroups(Keyword.Namespace, Text), 'import'), - (r'"(\\\\|\\"|[^"])*"', String), - (r'\'(\\\\|\\\'|[^\'])*\'', String), + (r'"(\\\\|\\[^\\]|[^"\\])*"', String.Double), + (r"'(\\\\|\\[^\\]|[^'\\])*'", String.Single), (r'(\.)([a-zA-Z_]\w*)', bygroups(Operator, Name.Attribute)), (r'[a-zA-Z_]\w*:', Name.Label), (r'[a-zA-Z_\$]\w*', Name), @@ -961,9 +979,8 @@ class CeylonLexer(RegexLexer): (r'(class|interface|object|alias)(\s+)', bygroups(Keyword.Declaration, Text), 'class'), (r'(import)(\s+)', bygroups(Keyword.Namespace, Text), 'import'), - (r'"(\\\\|\\"|[^"])*"', String), + (r'"(\\\\|\\[^\\]|[^"\\])*"', String), (r"'\\.'|'[^\\]'|'\\\{#[0-9a-fA-F]{4}\}'", String.Char), - (r'".*``.*``.*"', String.Interpol), (r'(\.)([a-z_]\w*)', bygroups(Operator, Name.Attribute)), (r'[a-zA-Z_]\w*:', Name.Label), @@ -1009,7 +1026,7 @@ class KotlinLexer(RegexLexer): name = 'Kotlin' aliases = ['kotlin'] - filenames = ['*.kt'] + filenames = ['*.kt', '*.kts'] mimetypes = ['text/x-kotlin'] flags = re.MULTILINE | re.DOTALL | re.UNICODE @@ -1031,6 +1048,7 @@ class KotlinLexer(RegexLexer): (r'\s+', Text), (r'\\\n', Text), # line continuation (r'//.*?\n', Comment.Single), + (r'^#!/.+?\n', Comment.Single), # shebang for kotlin scripts (r'/[*].*?[*]/', Comment.Multiline), (r'""".*?"""', String), (r'\n', Text), @@ -1038,7 +1056,7 @@ class KotlinLexer(RegexLexer): (r'[~!%^&*()+=|\[\]:;,.<>/?-]', Punctuation), (r'[{}]', Punctuation), (r'@"(""|[^"])*"', String), - (r'"(\\\\|\\"|[^"\n])*["\n]', String), + (r'"(\\\\|\\[^\\]|[^"\\\n])*["\n]', String), (r"'\\.'|'[^\\]'", String.Char), (r"[0-9](\.[0-9]*)?([eE][+-][0-9]+)?[flFL]?|" r"0[xX][0-9a-fA-F]+[Ll]?", Number), @@ -1131,9 +1149,9 @@ class XtendLexer(RegexLexer): 'class'), (r'(import)(\s+)', bygroups(Keyword.Namespace, Text), 'import'), (r"(''')", String, 'template'), - (u'(\u00BB)', String, 'template'), - (r'"(\\\\|\\"|[^"])*"', String), - (r"'(\\\\|\\'|[^'])*'", String), + (r'(\u00BB)', String, 'template'), + (r'"(\\\\|\\[^\\]|[^"\\])*"', String.Double), + (r"'(\\\\|\\[^\\]|[^'\\])*'", String.Single), (r'[a-zA-Z_]\w*:', Name.Label), (r'[a-zA-Z_$]\w*', Name), (r'[~^*!%&\[\](){}<>\|+=:;,./?-]', Operator), @@ -1150,7 +1168,7 @@ class XtendLexer(RegexLexer): ], 'template': [ (r"'''", String, '#pop'), - (u'\u00AB', String, '#pop'), + (r'\u00AB', String, '#pop'), (r'.', String) ], } @@ -1601,54 +1619,59 @@ def analyse_text(text): class SarlLexer(RegexLexer): - """ - For `SARL `_ source code. - - .. versionadded:: 2.4 - """ - - name = 'SARL' - aliases = ['sarl'] - filenames = ['*.sarl'] - mimetypes = ['text/x-sarl'] - - flags = re.MULTILINE | re.DOTALL - - tokens = { - 'root': [ - # method names - (r'^(\s*(?:[a-zA-Z_][\w.\[\]]*\s+)+?)' # return arguments - r'([a-zA-Z_$][\w$]*)' # method name - r'(\s*)(\()', # signature start - bygroups(using(this), Name.Function, Text, Operator)), - (r'[^\S\n]+', Text), - (r'//.*?\n', Comment.Single), - (r'/\*.*?\*/', Comment.Multiline), - (r'@[a-zA-Z_][\w.]*', Name.Decorator), - (r'(as|break|case|catch|default|do|else|extends|extension|finally|fires|for|if|implements|instanceof|new|on|requires|return|super|switch|throw|throws|try|typeof|uses|while|with)\b', - Keyword), - (r'(abstract|def|dispatch|final|native|override|private|protected|public|static|strictfp|synchronized|transient|val|var|volatile)\b', Keyword.Declaration), - (r'(boolean|byte|char|double|float|int|long|short|void)\b', - Keyword.Type), - (r'(package)(\s+)', bygroups(Keyword.Namespace, Text)), - (r'(false|it|null|occurrence|this|true|void)\b', Keyword.Constant), - (r'(agent|annotation|artifact|behavior|capacity|class|enum|event|interface|skill|space)(\s+)', bygroups(Keyword.Declaration, Text), - 'class'), - (r'(import)(\s+)', bygroups(Keyword.Namespace, Text), 'import'), - (r'"(\\\\|\\"|[^"])*"', String), - (r"'(\\\\|\\'|[^'])*'", String), - (r'[a-zA-Z_]\w*:', Name.Label), - (r'[a-zA-Z_$]\w*', Name), - (r'[~^*!%&\[\](){}<>\|+=:;,./?-]', Operator), - (r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float), - (r'0x[0-9a-fA-F]+', Number.Hex), - (r'[0-9]+L?', Number.Integer), - (r'\n', Text) - ], - 'class': [ - (r'[a-zA-Z_]\w*', Name.Class, '#pop') - ], - 'import': [ - (r'[\w.]+\*?', Name.Namespace, '#pop') - ], - } + """ + For `SARL `_ source code. + + .. versionadded:: 2.4 + """ + + name = 'SARL' + aliases = ['sarl'] + filenames = ['*.sarl'] + mimetypes = ['text/x-sarl'] + + flags = re.MULTILINE | re.DOTALL + + tokens = { + 'root': [ + # method names + (r'^(\s*(?:[a-zA-Z_][\w.\[\]]*\s+)+?)' # return arguments + r'([a-zA-Z_$][\w$]*)' # method name + r'(\s*)(\()', # signature start + bygroups(using(this), Name.Function, Text, Operator)), + (r'[^\S\n]+', Text), + (r'//.*?\n', Comment.Single), + (r'/\*.*?\*/', Comment.Multiline), + (r'@[a-zA-Z_][\w.]*', Name.Decorator), + (r'(as|break|case|catch|default|do|else|extends|extension|finally|' + r'fires|for|if|implements|instanceof|new|on|requires|return|super|' + r'switch|throw|throws|try|typeof|uses|while|with)\b', + Keyword), + (r'(abstract|def|dispatch|final|native|override|private|protected|' + r'public|static|strictfp|synchronized|transient|val|var|volatile)\b', + Keyword.Declaration), + (r'(boolean|byte|char|double|float|int|long|short|void)\b', + Keyword.Type), + (r'(package)(\s+)', bygroups(Keyword.Namespace, Text)), + (r'(false|it|null|occurrence|this|true|void)\b', Keyword.Constant), + (r'(agent|annotation|artifact|behavior|capacity|class|enum|event|' + r'interface|skill|space)(\s+)', bygroups(Keyword.Declaration, Text), + 'class'), + (r'(import)(\s+)', bygroups(Keyword.Namespace, Text), 'import'), + (r'"(\\\\|\\[^\\]|[^"\\])*"', String.Double), + (r"'(\\\\|\\[^\\]|[^'\\])*'", String.Single), + (r'[a-zA-Z_]\w*:', Name.Label), + (r'[a-zA-Z_$]\w*', Name), + (r'[~^*!%&\[\](){}<>\|+=:;,./?-]', Operator), + (r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float), + (r'0x[0-9a-fA-F]+', Number.Hex), + (r'[0-9]+L?', Number.Integer), + (r'\n', Text) + ], + 'class': [ + (r'[a-zA-Z_]\w*', Name.Class, '#pop') + ], + 'import': [ + (r'[\w.]+\*?', Name.Namespace, '#pop') + ], + } diff --git a/src/typecode/_vendor/pygments/lexers/lisp.py b/src/typecode/_vendor/pygments/lexers/lisp.py index 83f250b..528d414 100644 --- a/src/typecode/_vendor/pygments/lexers/lisp.py +++ b/src/typecode/_vendor/pygments/lexers/lisp.py @@ -5,7 +5,7 @@ Lexers for Lispy languages. - :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ @@ -119,7 +119,7 @@ class SchemeLexer(RegexLexer): # (r'(#e|#i|#b|#o|#d|#x)[\d.]+', Number), # strings, symbols and characters - (r'"(\\\\|\\"|[^"])*"', String), + (r'"(\\\\|\\[^\\]|[^"\\])*"', String), (r"'" + valid_name, String.Symbol), (r"#\\([()/'\"._!§$%& ?=+-]|[a-zA-Z0-9]+)", String.Char), @@ -382,7 +382,7 @@ class HyLexer(RegexLexer): # valid names for identifiers # well, names can only not consist fully of numbers # but this should be good enough for now - valid_name = r'(?!#)[\w!$%*+<=>?/.#-:]+' + valid_name = r'(?!#)[\w!$%*+<=>?/.#:-]+' def _multi_escape(entries): return words(entries, suffix=' ') @@ -403,7 +403,7 @@ def _multi_escape(entries): (r'0[xX][a-fA-F0-9]+', Number.Hex), # strings, symbols and characters - (r'"(\\\\|\\"|[^"])*"', String), + (r'"(\\\\|\\[^\\]|[^"\\])*"', String), (r"'" + valid_name, String.Symbol), (r"\\(.|[a-z]+)", String.Char), (r'^(\s*)([rRuU]{,2}"""(?:.|\n)*?""")', bygroups(Text, String.Doc)), @@ -471,779 +471,779 @@ class RacketLexer(RegexLexer): # Generated by example.rkt _keywords = ( - u'#%app', u'#%datum', u'#%declare', u'#%expression', u'#%module-begin', - u'#%plain-app', u'#%plain-lambda', u'#%plain-module-begin', - u'#%printing-module-begin', u'#%provide', u'#%require', - u'#%stratified-body', u'#%top', u'#%top-interaction', - u'#%variable-reference', u'->', u'->*', u'->*m', u'->d', u'->dm', u'->i', - u'->m', u'...', u':do-in', u'==', u'=>', u'_', u'absent', u'abstract', - u'all-defined-out', u'all-from-out', u'and', u'any', u'augment', u'augment*', - u'augment-final', u'augment-final*', u'augride', u'augride*', u'begin', - u'begin-for-syntax', u'begin0', u'case', u'case->', u'case->m', - u'case-lambda', u'class', u'class*', u'class-field-accessor', - u'class-field-mutator', u'class/c', u'class/derived', u'combine-in', - u'combine-out', u'command-line', u'compound-unit', u'compound-unit/infer', - u'cond', u'cons/dc', u'contract', u'contract-out', u'contract-struct', - u'contracted', u'define', u'define-compound-unit', - u'define-compound-unit/infer', u'define-contract-struct', - u'define-custom-hash-types', u'define-custom-set-types', - u'define-for-syntax', u'define-local-member-name', u'define-logger', - u'define-match-expander', u'define-member-name', - u'define-module-boundary-contract', u'define-namespace-anchor', - u'define-opt/c', u'define-sequence-syntax', u'define-serializable-class', - u'define-serializable-class*', u'define-signature', - u'define-signature-form', u'define-struct', u'define-struct/contract', - u'define-struct/derived', u'define-syntax', u'define-syntax-rule', - u'define-syntaxes', u'define-unit', u'define-unit-binding', - u'define-unit-from-context', u'define-unit/contract', - u'define-unit/new-import-export', u'define-unit/s', u'define-values', - u'define-values-for-export', u'define-values-for-syntax', - u'define-values/invoke-unit', u'define-values/invoke-unit/infer', - u'define/augment', u'define/augment-final', u'define/augride', - u'define/contract', u'define/final-prop', u'define/match', - u'define/overment', u'define/override', u'define/override-final', - u'define/private', u'define/public', u'define/public-final', - u'define/pubment', u'define/subexpression-pos-prop', - u'define/subexpression-pos-prop/name', u'delay', u'delay/idle', - u'delay/name', u'delay/strict', u'delay/sync', u'delay/thread', u'do', - u'else', u'except', u'except-in', u'except-out', u'export', u'extends', - u'failure-cont', u'false', u'false/c', u'field', u'field-bound?', u'file', - u'flat-murec-contract', u'flat-rec-contract', u'for', u'for*', u'for*/and', - u'for*/async', u'for*/first', u'for*/fold', u'for*/fold/derived', - u'for*/hash', u'for*/hasheq', u'for*/hasheqv', u'for*/last', u'for*/list', - u'for*/lists', u'for*/mutable-set', u'for*/mutable-seteq', - u'for*/mutable-seteqv', u'for*/or', u'for*/product', u'for*/set', - u'for*/seteq', u'for*/seteqv', u'for*/stream', u'for*/sum', u'for*/vector', - u'for*/weak-set', u'for*/weak-seteq', u'for*/weak-seteqv', u'for-label', - u'for-meta', u'for-syntax', u'for-template', u'for/and', u'for/async', - u'for/first', u'for/fold', u'for/fold/derived', u'for/hash', u'for/hasheq', - u'for/hasheqv', u'for/last', u'for/list', u'for/lists', u'for/mutable-set', - u'for/mutable-seteq', u'for/mutable-seteqv', u'for/or', u'for/product', - u'for/set', u'for/seteq', u'for/seteqv', u'for/stream', u'for/sum', - u'for/vector', u'for/weak-set', u'for/weak-seteq', u'for/weak-seteqv', - u'gen:custom-write', u'gen:dict', u'gen:equal+hash', u'gen:set', - u'gen:stream', u'generic', u'get-field', u'hash/dc', u'if', u'implies', - u'import', u'include', u'include-at/relative-to', - u'include-at/relative-to/reader', u'include/reader', u'inherit', - u'inherit-field', u'inherit/inner', u'inherit/super', u'init', - u'init-depend', u'init-field', u'init-rest', u'inner', u'inspect', - u'instantiate', u'interface', u'interface*', u'invariant-assertion', - u'invoke-unit', u'invoke-unit/infer', u'lambda', u'lazy', u'let', u'let*', - u'let*-values', u'let-syntax', u'let-syntaxes', u'let-values', u'let/cc', - u'let/ec', u'letrec', u'letrec-syntax', u'letrec-syntaxes', - u'letrec-syntaxes+values', u'letrec-values', u'lib', u'link', u'local', - u'local-require', u'log-debug', u'log-error', u'log-fatal', u'log-info', - u'log-warning', u'match', u'match*', u'match*/derived', u'match-define', - u'match-define-values', u'match-lambda', u'match-lambda*', - u'match-lambda**', u'match-let', u'match-let*', u'match-let*-values', - u'match-let-values', u'match-letrec', u'match-letrec-values', - u'match/derived', u'match/values', u'member-name-key', u'mixin', u'module', - u'module*', u'module+', u'nand', u'new', u'nor', u'object-contract', - u'object/c', u'only', u'only-in', u'only-meta-in', u'open', u'opt/c', u'or', - u'overment', u'overment*', u'override', u'override*', u'override-final', - u'override-final*', u'parameterize', u'parameterize*', - u'parameterize-break', u'parametric->/c', u'place', u'place*', - u'place/context', u'planet', u'prefix', u'prefix-in', u'prefix-out', - u'private', u'private*', u'prompt-tag/c', u'protect-out', u'provide', - u'provide-signature-elements', u'provide/contract', u'public', u'public*', - u'public-final', u'public-final*', u'pubment', u'pubment*', u'quasiquote', - u'quasisyntax', u'quasisyntax/loc', u'quote', u'quote-syntax', - u'quote-syntax/prune', u'recontract-out', u'recursive-contract', - u'relative-in', u'rename', u'rename-in', u'rename-inner', u'rename-out', - u'rename-super', u'require', u'send', u'send*', u'send+', u'send-generic', - u'send/apply', u'send/keyword-apply', u'set!', u'set!-values', - u'set-field!', u'shared', u'stream', u'stream*', u'stream-cons', u'struct', - u'struct*', u'struct-copy', u'struct-field-index', u'struct-out', - u'struct/c', u'struct/ctc', u'struct/dc', u'submod', u'super', - u'super-instantiate', u'super-make-object', u'super-new', u'syntax', - u'syntax-case', u'syntax-case*', u'syntax-id-rules', u'syntax-rules', - u'syntax/loc', u'tag', u'this', u'this%', u'thunk', u'thunk*', u'time', - u'unconstrained-domain->', u'unit', u'unit-from-context', u'unit/c', - u'unit/new-import-export', u'unit/s', u'unless', u'unquote', - u'unquote-splicing', u'unsyntax', u'unsyntax-splicing', u'values/drop', - u'when', u'with-continuation-mark', u'with-contract', - u'with-contract-continuation-mark', u'with-handlers', u'with-handlers*', - u'with-method', u'with-syntax', u'λ' + '#%app', '#%datum', '#%declare', '#%expression', '#%module-begin', + '#%plain-app', '#%plain-lambda', '#%plain-module-begin', + '#%printing-module-begin', '#%provide', '#%require', + '#%stratified-body', '#%top', '#%top-interaction', + '#%variable-reference', '->', '->*', '->*m', '->d', '->dm', '->i', + '->m', '...', ':do-in', '==', '=>', '_', 'absent', 'abstract', + 'all-defined-out', 'all-from-out', 'and', 'any', 'augment', 'augment*', + 'augment-final', 'augment-final*', 'augride', 'augride*', 'begin', + 'begin-for-syntax', 'begin0', 'case', 'case->', 'case->m', + 'case-lambda', 'class', 'class*', 'class-field-accessor', + 'class-field-mutator', 'class/c', 'class/derived', 'combine-in', + 'combine-out', 'command-line', 'compound-unit', 'compound-unit/infer', + 'cond', 'cons/dc', 'contract', 'contract-out', 'contract-struct', + 'contracted', 'define', 'define-compound-unit', + 'define-compound-unit/infer', 'define-contract-struct', + 'define-custom-hash-types', 'define-custom-set-types', + 'define-for-syntax', 'define-local-member-name', 'define-logger', + 'define-match-expander', 'define-member-name', + 'define-module-boundary-contract', 'define-namespace-anchor', + 'define-opt/c', 'define-sequence-syntax', 'define-serializable-class', + 'define-serializable-class*', 'define-signature', + 'define-signature-form', 'define-struct', 'define-struct/contract', + 'define-struct/derived', 'define-syntax', 'define-syntax-rule', + 'define-syntaxes', 'define-unit', 'define-unit-binding', + 'define-unit-from-context', 'define-unit/contract', + 'define-unit/new-import-export', 'define-unit/s', 'define-values', + 'define-values-for-export', 'define-values-for-syntax', + 'define-values/invoke-unit', 'define-values/invoke-unit/infer', + 'define/augment', 'define/augment-final', 'define/augride', + 'define/contract', 'define/final-prop', 'define/match', + 'define/overment', 'define/override', 'define/override-final', + 'define/private', 'define/public', 'define/public-final', + 'define/pubment', 'define/subexpression-pos-prop', + 'define/subexpression-pos-prop/name', 'delay', 'delay/idle', + 'delay/name', 'delay/strict', 'delay/sync', 'delay/thread', 'do', + 'else', 'except', 'except-in', 'except-out', 'export', 'extends', + 'failure-cont', 'false', 'false/c', 'field', 'field-bound?', 'file', + 'flat-murec-contract', 'flat-rec-contract', 'for', 'for*', 'for*/and', + 'for*/async', 'for*/first', 'for*/fold', 'for*/fold/derived', + 'for*/hash', 'for*/hasheq', 'for*/hasheqv', 'for*/last', 'for*/list', + 'for*/lists', 'for*/mutable-set', 'for*/mutable-seteq', + 'for*/mutable-seteqv', 'for*/or', 'for*/product', 'for*/set', + 'for*/seteq', 'for*/seteqv', 'for*/stream', 'for*/sum', 'for*/vector', + 'for*/weak-set', 'for*/weak-seteq', 'for*/weak-seteqv', 'for-label', + 'for-meta', 'for-syntax', 'for-template', 'for/and', 'for/async', + 'for/first', 'for/fold', 'for/fold/derived', 'for/hash', 'for/hasheq', + 'for/hasheqv', 'for/last', 'for/list', 'for/lists', 'for/mutable-set', + 'for/mutable-seteq', 'for/mutable-seteqv', 'for/or', 'for/product', + 'for/set', 'for/seteq', 'for/seteqv', 'for/stream', 'for/sum', + 'for/vector', 'for/weak-set', 'for/weak-seteq', 'for/weak-seteqv', + 'gen:custom-write', 'gen:dict', 'gen:equal+hash', 'gen:set', + 'gen:stream', 'generic', 'get-field', 'hash/dc', 'if', 'implies', + 'import', 'include', 'include-at/relative-to', + 'include-at/relative-to/reader', 'include/reader', 'inherit', + 'inherit-field', 'inherit/inner', 'inherit/super', 'init', + 'init-depend', 'init-field', 'init-rest', 'inner', 'inspect', + 'instantiate', 'interface', 'interface*', 'invariant-assertion', + 'invoke-unit', 'invoke-unit/infer', 'lambda', 'lazy', 'let', 'let*', + 'let*-values', 'let-syntax', 'let-syntaxes', 'let-values', 'let/cc', + 'let/ec', 'letrec', 'letrec-syntax', 'letrec-syntaxes', + 'letrec-syntaxes+values', 'letrec-values', 'lib', 'link', 'local', + 'local-require', 'log-debug', 'log-error', 'log-fatal', 'log-info', + 'log-warning', 'match', 'match*', 'match*/derived', 'match-define', + 'match-define-values', 'match-lambda', 'match-lambda*', + 'match-lambda**', 'match-let', 'match-let*', 'match-let*-values', + 'match-let-values', 'match-letrec', 'match-letrec-values', + 'match/derived', 'match/values', 'member-name-key', 'mixin', 'module', + 'module*', 'module+', 'nand', 'new', 'nor', 'object-contract', + 'object/c', 'only', 'only-in', 'only-meta-in', 'open', 'opt/c', 'or', + 'overment', 'overment*', 'override', 'override*', 'override-final', + 'override-final*', 'parameterize', 'parameterize*', + 'parameterize-break', 'parametric->/c', 'place', 'place*', + 'place/context', 'planet', 'prefix', 'prefix-in', 'prefix-out', + 'private', 'private*', 'prompt-tag/c', 'protect-out', 'provide', + 'provide-signature-elements', 'provide/contract', 'public', 'public*', + 'public-final', 'public-final*', 'pubment', 'pubment*', 'quasiquote', + 'quasisyntax', 'quasisyntax/loc', 'quote', 'quote-syntax', + 'quote-syntax/prune', 'recontract-out', 'recursive-contract', + 'relative-in', 'rename', 'rename-in', 'rename-inner', 'rename-out', + 'rename-super', 'require', 'send', 'send*', 'send+', 'send-generic', + 'send/apply', 'send/keyword-apply', 'set!', 'set!-values', + 'set-field!', 'shared', 'stream', 'stream*', 'stream-cons', 'struct', + 'struct*', 'struct-copy', 'struct-field-index', 'struct-out', + 'struct/c', 'struct/ctc', 'struct/dc', 'submod', 'super', + 'super-instantiate', 'super-make-object', 'super-new', 'syntax', + 'syntax-case', 'syntax-case*', 'syntax-id-rules', 'syntax-rules', + 'syntax/loc', 'tag', 'this', 'this%', 'thunk', 'thunk*', 'time', + 'unconstrained-domain->', 'unit', 'unit-from-context', 'unit/c', + 'unit/new-import-export', 'unit/s', 'unless', 'unquote', + 'unquote-splicing', 'unsyntax', 'unsyntax-splicing', 'values/drop', + 'when', 'with-continuation-mark', 'with-contract', + 'with-contract-continuation-mark', 'with-handlers', 'with-handlers*', + 'with-method', 'with-syntax', 'λ' ) # Generated by example.rkt _builtins = ( - u'*', u'*list/c', u'+', u'-', u'/', u'<', u'', u'>/c', u'>=', u'>=/c', u'abort-current-continuation', u'abs', - u'absolute-path?', u'acos', u'add-between', u'add1', u'alarm-evt', - u'always-evt', u'and/c', u'andmap', u'angle', u'any/c', u'append', u'append*', - u'append-map', u'apply', u'argmax', u'argmin', u'arithmetic-shift', - u'arity-at-least', u'arity-at-least-value', u'arity-at-least?', - u'arity-checking-wrapper', u'arity-includes?', u'arity=?', - u'arrow-contract-info', u'arrow-contract-info-accepts-arglist', - u'arrow-contract-info-chaperone-procedure', - u'arrow-contract-info-check-first-order', u'arrow-contract-info?', - u'asin', u'assf', u'assoc', u'assq', u'assv', u'atan', - u'bad-number-of-results', u'banner', u'base->-doms/c', u'base->-rngs/c', - u'base->?', u'between/c', u'bitwise-and', u'bitwise-bit-field', - u'bitwise-bit-set?', u'bitwise-ior', u'bitwise-not', u'bitwise-xor', - u'blame-add-car-context', u'blame-add-cdr-context', u'blame-add-context', - u'blame-add-missing-party', u'blame-add-nth-arg-context', - u'blame-add-range-context', u'blame-add-unknown-context', - u'blame-context', u'blame-contract', u'blame-fmt->-string', - u'blame-missing-party?', u'blame-negative', u'blame-original?', - u'blame-positive', u'blame-replace-negative', u'blame-source', - u'blame-swap', u'blame-swapped?', u'blame-update', u'blame-value', - u'blame?', u'boolean=?', u'boolean?', u'bound-identifier=?', u'box', - u'box-cas!', u'box-immutable', u'box-immutable/c', u'box/c', u'box?', - u'break-enabled', u'break-parameterization?', u'break-thread', - u'build-chaperone-contract-property', u'build-compound-type-name', - u'build-contract-property', u'build-flat-contract-property', - u'build-list', u'build-path', u'build-path/convention-type', - u'build-string', u'build-vector', u'byte-pregexp', u'byte-pregexp?', - u'byte-ready?', u'byte-regexp', u'byte-regexp?', u'byte?', u'bytes', - u'bytes->immutable-bytes', u'bytes->list', u'bytes->path', - u'bytes->path-element', u'bytes->string/latin-1', u'bytes->string/locale', - u'bytes->string/utf-8', u'bytes-append', u'bytes-append*', - u'bytes-close-converter', u'bytes-convert', u'bytes-convert-end', - u'bytes-converter?', u'bytes-copy', u'bytes-copy!', - u'bytes-environment-variable-name?', u'bytes-fill!', u'bytes-join', - u'bytes-length', u'bytes-no-nuls?', u'bytes-open-converter', u'bytes-ref', - u'bytes-set!', u'bytes-utf-8-index', u'bytes-utf-8-length', - u'bytes-utf-8-ref', u'bytes?', u'bytes?', u'caaaar', - u'caaadr', u'caaar', u'caadar', u'caaddr', u'caadr', u'caar', u'cadaar', - u'cadadr', u'cadar', u'caddar', u'cadddr', u'caddr', u'cadr', - u'call-in-nested-thread', u'call-with-atomic-output-file', - u'call-with-break-parameterization', - u'call-with-composable-continuation', u'call-with-continuation-barrier', - u'call-with-continuation-prompt', u'call-with-current-continuation', - u'call-with-default-reading-parameterization', - u'call-with-escape-continuation', u'call-with-exception-handler', - u'call-with-file-lock/timeout', u'call-with-immediate-continuation-mark', - u'call-with-input-bytes', u'call-with-input-file', - u'call-with-input-file*', u'call-with-input-string', - u'call-with-output-bytes', u'call-with-output-file', - u'call-with-output-file*', u'call-with-output-string', - u'call-with-parameterization', u'call-with-semaphore', - u'call-with-semaphore/enable-break', u'call-with-values', u'call/cc', - u'call/ec', u'car', u'cartesian-product', u'cdaaar', u'cdaadr', u'cdaar', - u'cdadar', u'cdaddr', u'cdadr', u'cdar', u'cddaar', u'cddadr', u'cddar', - u'cdddar', u'cddddr', u'cdddr', u'cddr', u'cdr', u'ceiling', u'channel-get', - u'channel-put', u'channel-put-evt', u'channel-put-evt?', - u'channel-try-get', u'channel/c', u'channel?', u'chaperone-box', - u'chaperone-channel', u'chaperone-continuation-mark-key', - u'chaperone-contract-property?', u'chaperone-contract?', u'chaperone-evt', - u'chaperone-hash', u'chaperone-hash-set', u'chaperone-of?', - u'chaperone-procedure', u'chaperone-procedure*', u'chaperone-prompt-tag', - u'chaperone-struct', u'chaperone-struct-type', u'chaperone-vector', - u'chaperone?', u'char->integer', u'char-alphabetic?', u'char-blank?', - u'char-ci<=?', u'char-ci=?', u'char-ci>?', - u'char-downcase', u'char-foldcase', u'char-general-category', - u'char-graphic?', u'char-in', u'char-in/c', u'char-iso-control?', - u'char-lower-case?', u'char-numeric?', u'char-punctuation?', - u'char-ready?', u'char-symbolic?', u'char-title-case?', u'char-titlecase', - u'char-upcase', u'char-upper-case?', u'char-utf-8-length', - u'char-whitespace?', u'char<=?', u'char=?', u'char>?', - u'char?', u'check-duplicate-identifier', u'check-duplicates', - u'checked-procedure-check-and-extract', u'choice-evt', - u'class->interface', u'class-info', u'class-seal', u'class-unseal', - u'class?', u'cleanse-path', u'close-input-port', u'close-output-port', - u'coerce-chaperone-contract', u'coerce-chaperone-contracts', - u'coerce-contract', u'coerce-contract/f', u'coerce-contracts', - u'coerce-flat-contract', u'coerce-flat-contracts', u'collect-garbage', - u'collection-file-path', u'collection-path', u'combinations', u'compile', - u'compile-allow-set!-undefined', u'compile-context-preservation-enabled', - u'compile-enforce-module-constants', u'compile-syntax', - u'compiled-expression-recompile', u'compiled-expression?', - u'compiled-module-expression?', u'complete-path?', u'complex?', u'compose', - u'compose1', u'conjoin', u'conjugate', u'cons', u'cons/c', u'cons?', u'const', - u'continuation-mark-key/c', u'continuation-mark-key?', - u'continuation-mark-set->context', u'continuation-mark-set->list', - u'continuation-mark-set->list*', u'continuation-mark-set-first', - u'continuation-mark-set?', u'continuation-marks', - u'continuation-prompt-available?', u'continuation-prompt-tag?', - u'continuation?', u'contract-continuation-mark-key', - u'contract-custom-write-property-proc', u'contract-exercise', - u'contract-first-order', u'contract-first-order-passes?', - u'contract-late-neg-projection', u'contract-name', u'contract-proc', - u'contract-projection', u'contract-property?', - u'contract-random-generate', u'contract-random-generate-fail', - u'contract-random-generate-fail?', - u'contract-random-generate-get-current-environment', - u'contract-random-generate-stash', u'contract-random-generate/choose', - u'contract-stronger?', u'contract-struct-exercise', - u'contract-struct-generate', u'contract-struct-late-neg-projection', - u'contract-struct-list-contract?', u'contract-val-first-projection', - u'contract?', u'convert-stream', u'copy-directory/files', u'copy-file', - u'copy-port', u'cos', u'cosh', u'count', u'current-blame-format', - u'current-break-parameterization', u'current-code-inspector', - u'current-command-line-arguments', u'current-compile', - u'current-compiled-file-roots', u'current-continuation-marks', - u'current-contract-region', u'current-custodian', u'current-directory', - u'current-directory-for-user', u'current-drive', - u'current-environment-variables', u'current-error-port', u'current-eval', - u'current-evt-pseudo-random-generator', - u'current-force-delete-permissions', u'current-future', - u'current-gc-milliseconds', u'current-get-interaction-input-port', - u'current-inexact-milliseconds', u'current-input-port', - u'current-inspector', u'current-library-collection-links', - u'current-library-collection-paths', u'current-load', - u'current-load-extension', u'current-load-relative-directory', - u'current-load/use-compiled', u'current-locale', u'current-logger', - u'current-memory-use', u'current-milliseconds', - u'current-module-declare-name', u'current-module-declare-source', - u'current-module-name-resolver', u'current-module-path-for-load', - u'current-namespace', u'current-output-port', u'current-parameterization', - u'current-plumber', u'current-preserved-thread-cell-values', - u'current-print', u'current-process-milliseconds', u'current-prompt-read', - u'current-pseudo-random-generator', u'current-read-interaction', - u'current-reader-guard', u'current-readtable', u'current-seconds', - u'current-security-guard', u'current-subprocess-custodian-mode', - u'current-thread', u'current-thread-group', - u'current-thread-initial-stack-size', - u'current-write-relative-directory', u'curry', u'curryr', - u'custodian-box-value', u'custodian-box?', u'custodian-limit-memory', - u'custodian-managed-list', u'custodian-memory-accounting-available?', - u'custodian-require-memory', u'custodian-shutdown-all', u'custodian?', - u'custom-print-quotable-accessor', u'custom-print-quotable?', - u'custom-write-accessor', u'custom-write-property-proc', u'custom-write?', - u'date', u'date*', u'date*-nanosecond', u'date*-time-zone-name', u'date*?', - u'date-day', u'date-dst?', u'date-hour', u'date-minute', u'date-month', - u'date-second', u'date-time-zone-offset', u'date-week-day', u'date-year', - u'date-year-day', u'date?', u'datum->syntax', u'datum-intern-literal', - u'default-continuation-prompt-tag', u'degrees->radians', - u'delete-directory', u'delete-directory/files', u'delete-file', - u'denominator', u'dict->list', u'dict-can-functional-set?', - u'dict-can-remove-keys?', u'dict-clear', u'dict-clear!', u'dict-copy', - u'dict-count', u'dict-empty?', u'dict-for-each', u'dict-has-key?', - u'dict-implements/c', u'dict-implements?', u'dict-iter-contract', - u'dict-iterate-first', u'dict-iterate-key', u'dict-iterate-next', - u'dict-iterate-value', u'dict-key-contract', u'dict-keys', u'dict-map', - u'dict-mutable?', u'dict-ref', u'dict-ref!', u'dict-remove', - u'dict-remove!', u'dict-set', u'dict-set!', u'dict-set*', u'dict-set*!', - u'dict-update', u'dict-update!', u'dict-value-contract', u'dict-values', - u'dict?', u'directory-exists?', u'directory-list', u'disjoin', u'display', - u'display-lines', u'display-lines-to-file', u'display-to-file', - u'displayln', u'double-flonum?', u'drop', u'drop-common-prefix', - u'drop-right', u'dropf', u'dropf-right', u'dump-memory-stats', - u'dup-input-port', u'dup-output-port', u'dynamic->*', u'dynamic-get-field', - u'dynamic-object/c', u'dynamic-place', u'dynamic-place*', - u'dynamic-require', u'dynamic-require-for-syntax', u'dynamic-send', - u'dynamic-set-field!', u'dynamic-wind', u'eighth', u'empty', - u'empty-sequence', u'empty-stream', u'empty?', - u'environment-variables-copy', u'environment-variables-names', - u'environment-variables-ref', u'environment-variables-set!', - u'environment-variables?', u'eof', u'eof-evt', u'eof-object?', - u'ephemeron-value', u'ephemeron?', u'eprintf', u'eq-contract-val', - u'eq-contract?', u'eq-hash-code', u'eq?', u'equal-contract-val', - u'equal-contract?', u'equal-hash-code', u'equal-secondary-hash-code', - u'equal<%>', u'equal?', u'equal?/recur', u'eqv-hash-code', u'eqv?', u'error', - u'error-display-handler', u'error-escape-handler', - u'error-print-context-length', u'error-print-source-location', - u'error-print-width', u'error-value->string-handler', u'eval', - u'eval-jit-enabled', u'eval-syntax', u'even?', u'evt/c', u'evt?', - u'exact->inexact', u'exact-ceiling', u'exact-floor', u'exact-integer?', - u'exact-nonnegative-integer?', u'exact-positive-integer?', u'exact-round', - u'exact-truncate', u'exact?', u'executable-yield-handler', u'exit', - u'exit-handler', u'exn', u'exn-continuation-marks', u'exn-message', - u'exn:break', u'exn:break-continuation', u'exn:break:hang-up', - u'exn:break:hang-up?', u'exn:break:terminate', u'exn:break:terminate?', - u'exn:break?', u'exn:fail', u'exn:fail:contract', - u'exn:fail:contract:arity', u'exn:fail:contract:arity?', - u'exn:fail:contract:blame', u'exn:fail:contract:blame-object', - u'exn:fail:contract:blame?', u'exn:fail:contract:continuation', - u'exn:fail:contract:continuation?', u'exn:fail:contract:divide-by-zero', - u'exn:fail:contract:divide-by-zero?', - u'exn:fail:contract:non-fixnum-result', - u'exn:fail:contract:non-fixnum-result?', u'exn:fail:contract:variable', - u'exn:fail:contract:variable-id', u'exn:fail:contract:variable?', - u'exn:fail:contract?', u'exn:fail:filesystem', - u'exn:fail:filesystem:errno', u'exn:fail:filesystem:errno-errno', - u'exn:fail:filesystem:errno?', u'exn:fail:filesystem:exists', - u'exn:fail:filesystem:exists?', u'exn:fail:filesystem:missing-module', - u'exn:fail:filesystem:missing-module-path', - u'exn:fail:filesystem:missing-module?', u'exn:fail:filesystem:version', - u'exn:fail:filesystem:version?', u'exn:fail:filesystem?', - u'exn:fail:network', u'exn:fail:network:errno', - u'exn:fail:network:errno-errno', u'exn:fail:network:errno?', - u'exn:fail:network?', u'exn:fail:object', u'exn:fail:object?', - u'exn:fail:out-of-memory', u'exn:fail:out-of-memory?', u'exn:fail:read', - u'exn:fail:read-srclocs', u'exn:fail:read:eof', u'exn:fail:read:eof?', - u'exn:fail:read:non-char', u'exn:fail:read:non-char?', u'exn:fail:read?', - u'exn:fail:syntax', u'exn:fail:syntax-exprs', - u'exn:fail:syntax:missing-module', - u'exn:fail:syntax:missing-module-path', - u'exn:fail:syntax:missing-module?', u'exn:fail:syntax:unbound', - u'exn:fail:syntax:unbound?', u'exn:fail:syntax?', u'exn:fail:unsupported', - u'exn:fail:unsupported?', u'exn:fail:user', u'exn:fail:user?', - u'exn:fail?', u'exn:misc:match?', u'exn:missing-module-accessor', - u'exn:missing-module?', u'exn:srclocs-accessor', u'exn:srclocs?', u'exn?', - u'exp', u'expand', u'expand-once', u'expand-syntax', u'expand-syntax-once', - u'expand-syntax-to-top-form', u'expand-to-top-form', u'expand-user-path', - u'explode-path', u'expt', u'externalizable<%>', u'failure-result/c', - u'false?', u'field-names', u'fifth', u'file->bytes', u'file->bytes-lines', - u'file->lines', u'file->list', u'file->string', u'file->value', - u'file-exists?', u'file-name-from-path', u'file-or-directory-identity', - u'file-or-directory-modify-seconds', u'file-or-directory-permissions', - u'file-position', u'file-position*', u'file-size', - u'file-stream-buffer-mode', u'file-stream-port?', u'file-truncate', - u'filename-extension', u'filesystem-change-evt', - u'filesystem-change-evt-cancel', u'filesystem-change-evt?', - u'filesystem-root-list', u'filter', u'filter-map', u'filter-not', - u'filter-read-input-port', u'find-executable-path', u'find-files', - u'find-library-collection-links', u'find-library-collection-paths', - u'find-relative-path', u'find-system-path', u'findf', u'first', - u'first-or/c', u'fixnum?', u'flat-contract', u'flat-contract-predicate', - u'flat-contract-property?', u'flat-contract?', u'flat-named-contract', - u'flatten', u'floating-point-bytes->real', u'flonum?', u'floor', - u'flush-output', u'fold-files', u'foldl', u'foldr', u'for-each', u'force', - u'format', u'fourth', u'fprintf', u'free-identifier=?', - u'free-label-identifier=?', u'free-template-identifier=?', - u'free-transformer-identifier=?', u'fsemaphore-count', u'fsemaphore-post', - u'fsemaphore-try-wait?', u'fsemaphore-wait', u'fsemaphore?', u'future', - u'future?', u'futures-enabled?', u'gcd', u'generate-member-key', - u'generate-temporaries', u'generic-set?', u'generic?', u'gensym', - u'get-output-bytes', u'get-output-string', u'get-preference', - u'get/build-late-neg-projection', u'get/build-val-first-projection', - u'getenv', u'global-port-print-handler', u'group-by', u'group-execute-bit', - u'group-read-bit', u'group-write-bit', u'guard-evt', u'handle-evt', - u'handle-evt?', u'has-blame?', u'has-contract?', u'hash', u'hash->list', - u'hash-clear', u'hash-clear!', u'hash-copy', u'hash-copy-clear', - u'hash-count', u'hash-empty?', u'hash-eq?', u'hash-equal?', u'hash-eqv?', - u'hash-for-each', u'hash-has-key?', u'hash-iterate-first', - u'hash-iterate-key', u'hash-iterate-key+value', u'hash-iterate-next', - u'hash-iterate-pair', u'hash-iterate-value', u'hash-keys', u'hash-map', - u'hash-placeholder?', u'hash-ref', u'hash-ref!', u'hash-remove', - u'hash-remove!', u'hash-set', u'hash-set!', u'hash-set*', u'hash-set*!', - u'hash-update', u'hash-update!', u'hash-values', u'hash-weak?', u'hash/c', - u'hash?', u'hasheq', u'hasheqv', u'identifier-binding', - u'identifier-binding-symbol', u'identifier-label-binding', - u'identifier-prune-lexical-context', - u'identifier-prune-to-source-module', - u'identifier-remove-from-definition-context', - u'identifier-template-binding', u'identifier-transformer-binding', - u'identifier?', u'identity', u'if/c', u'imag-part', u'immutable?', - u'impersonate-box', u'impersonate-channel', - u'impersonate-continuation-mark-key', u'impersonate-hash', - u'impersonate-hash-set', u'impersonate-procedure', - u'impersonate-procedure*', u'impersonate-prompt-tag', - u'impersonate-struct', u'impersonate-vector', u'impersonator-contract?', - u'impersonator-ephemeron', u'impersonator-of?', - u'impersonator-prop:application-mark', u'impersonator-prop:blame', - u'impersonator-prop:contracted', - u'impersonator-property-accessor-procedure?', u'impersonator-property?', - u'impersonator?', u'implementation?', u'implementation?/c', u'in-bytes', - u'in-bytes-lines', u'in-combinations', u'in-cycle', u'in-dict', - u'in-dict-keys', u'in-dict-pairs', u'in-dict-values', u'in-directory', - u'in-hash', u'in-hash-keys', u'in-hash-pairs', u'in-hash-values', - u'in-immutable-hash', u'in-immutable-hash-keys', - u'in-immutable-hash-pairs', u'in-immutable-hash-values', - u'in-immutable-set', u'in-indexed', u'in-input-port-bytes', - u'in-input-port-chars', u'in-lines', u'in-list', u'in-mlist', - u'in-mutable-hash', u'in-mutable-hash-keys', u'in-mutable-hash-pairs', - u'in-mutable-hash-values', u'in-mutable-set', u'in-naturals', - u'in-parallel', u'in-permutations', u'in-port', u'in-producer', u'in-range', - u'in-sequences', u'in-set', u'in-slice', u'in-stream', u'in-string', - u'in-syntax', u'in-value', u'in-values*-sequence', u'in-values-sequence', - u'in-vector', u'in-weak-hash', u'in-weak-hash-keys', u'in-weak-hash-pairs', - u'in-weak-hash-values', u'in-weak-set', u'inexact->exact', - u'inexact-real?', u'inexact?', u'infinite?', u'input-port-append', - u'input-port?', u'inspector?', u'instanceof/c', u'integer->char', - u'integer->integer-bytes', u'integer-bytes->integer', u'integer-in', - u'integer-length', u'integer-sqrt', u'integer-sqrt/remainder', u'integer?', - u'interface->method-names', u'interface-extension?', u'interface?', - u'internal-definition-context-binding-identifiers', - u'internal-definition-context-introduce', - u'internal-definition-context-seal', u'internal-definition-context?', - u'is-a?', u'is-a?/c', u'keyword->string', u'keyword-apply', u'keywordbytes', u'list->mutable-set', - u'list->mutable-seteq', u'list->mutable-seteqv', u'list->set', - u'list->seteq', u'list->seteqv', u'list->string', u'list->vector', - u'list->weak-set', u'list->weak-seteq', u'list->weak-seteqv', - u'list-contract?', u'list-prefix?', u'list-ref', u'list-set', u'list-tail', - u'list-update', u'list/c', u'list?', u'listen-port-number?', u'listof', - u'load', u'load-extension', u'load-on-demand-enabled', u'load-relative', - u'load-relative-extension', u'load/cd', u'load/use-compiled', - u'local-expand', u'local-expand/capture-lifts', - u'local-transformer-expand', u'local-transformer-expand/capture-lifts', - u'locale-string-encoding', u'log', u'log-all-levels', u'log-level-evt', - u'log-level?', u'log-max-level', u'log-message', u'log-receiver?', - u'logger-name', u'logger?', u'magnitude', u'make-arity-at-least', - u'make-base-empty-namespace', u'make-base-namespace', u'make-bytes', - u'make-channel', u'make-chaperone-contract', - u'make-continuation-mark-key', u'make-continuation-prompt-tag', - u'make-contract', u'make-custodian', u'make-custodian-box', - u'make-custom-hash', u'make-custom-hash-types', u'make-custom-set', - u'make-custom-set-types', u'make-date', u'make-date*', - u'make-derived-parameter', u'make-directory', u'make-directory*', - u'make-do-sequence', u'make-empty-namespace', - u'make-environment-variables', u'make-ephemeron', u'make-exn', - u'make-exn:break', u'make-exn:break:hang-up', u'make-exn:break:terminate', - u'make-exn:fail', u'make-exn:fail:contract', - u'make-exn:fail:contract:arity', u'make-exn:fail:contract:blame', - u'make-exn:fail:contract:continuation', - u'make-exn:fail:contract:divide-by-zero', - u'make-exn:fail:contract:non-fixnum-result', - u'make-exn:fail:contract:variable', u'make-exn:fail:filesystem', - u'make-exn:fail:filesystem:errno', u'make-exn:fail:filesystem:exists', - u'make-exn:fail:filesystem:missing-module', - u'make-exn:fail:filesystem:version', u'make-exn:fail:network', - u'make-exn:fail:network:errno', u'make-exn:fail:object', - u'make-exn:fail:out-of-memory', u'make-exn:fail:read', - u'make-exn:fail:read:eof', u'make-exn:fail:read:non-char', - u'make-exn:fail:syntax', u'make-exn:fail:syntax:missing-module', - u'make-exn:fail:syntax:unbound', u'make-exn:fail:unsupported', - u'make-exn:fail:user', u'make-file-or-directory-link', - u'make-flat-contract', u'make-fsemaphore', u'make-generic', - u'make-handle-get-preference-locked', u'make-hash', - u'make-hash-placeholder', u'make-hasheq', u'make-hasheq-placeholder', - u'make-hasheqv', u'make-hasheqv-placeholder', - u'make-immutable-custom-hash', u'make-immutable-hash', - u'make-immutable-hasheq', u'make-immutable-hasheqv', - u'make-impersonator-property', u'make-input-port', - u'make-input-port/read-to-peek', u'make-inspector', - u'make-keyword-procedure', u'make-known-char-range-list', - u'make-limited-input-port', u'make-list', u'make-lock-file-name', - u'make-log-receiver', u'make-logger', u'make-mixin-contract', - u'make-mutable-custom-set', u'make-none/c', u'make-object', - u'make-output-port', u'make-parameter', u'make-parent-directory*', - u'make-phantom-bytes', u'make-pipe', u'make-pipe-with-specials', - u'make-placeholder', u'make-plumber', u'make-polar', u'make-prefab-struct', - u'make-primitive-class', u'make-proj-contract', - u'make-pseudo-random-generator', u'make-reader-graph', u'make-readtable', - u'make-rectangular', u'make-rename-transformer', - u'make-resolved-module-path', u'make-security-guard', u'make-semaphore', - u'make-set!-transformer', u'make-shared-bytes', u'make-sibling-inspector', - u'make-special-comment', u'make-srcloc', u'make-string', - u'make-struct-field-accessor', u'make-struct-field-mutator', - u'make-struct-type', u'make-struct-type-property', - u'make-syntax-delta-introducer', u'make-syntax-introducer', - u'make-temporary-file', u'make-tentative-pretty-print-output-port', - u'make-thread-cell', u'make-thread-group', u'make-vector', - u'make-weak-box', u'make-weak-custom-hash', u'make-weak-custom-set', - u'make-weak-hash', u'make-weak-hasheq', u'make-weak-hasheqv', - u'make-will-executor', u'map', u'match-equality-test', - u'matches-arity-exactly?', u'max', u'mcar', u'mcdr', u'mcons', u'member', - u'member-name-key-hash-code', u'member-name-key=?', u'member-name-key?', - u'memf', u'memq', u'memv', u'merge-input', u'method-in-interface?', u'min', - u'mixin-contract', u'module->exports', u'module->imports', - u'module->language-info', u'module->namespace', - u'module-compiled-cross-phase-persistent?', u'module-compiled-exports', - u'module-compiled-imports', u'module-compiled-language-info', - u'module-compiled-name', u'module-compiled-submodules', - u'module-declared?', u'module-path-index-join', - u'module-path-index-resolve', u'module-path-index-split', - u'module-path-index-submodule', u'module-path-index?', u'module-path?', - u'module-predefined?', u'module-provide-protected?', u'modulo', u'mpair?', - u'mutable-set', u'mutable-seteq', u'mutable-seteqv', u'n->th', - u'nack-guard-evt', u'namespace-anchor->empty-namespace', - u'namespace-anchor->namespace', u'namespace-anchor?', - u'namespace-attach-module', u'namespace-attach-module-declaration', - u'namespace-base-phase', u'namespace-mapped-symbols', - u'namespace-module-identifier', u'namespace-module-registry', - u'namespace-require', u'namespace-require/constant', - u'namespace-require/copy', u'namespace-require/expansion-time', - u'namespace-set-variable-value!', u'namespace-symbol->identifier', - u'namespace-syntax-introduce', u'namespace-undefine-variable!', - u'namespace-unprotect-module', u'namespace-variable-value', u'namespace?', - u'nan?', u'natural-number/c', u'negate', u'negative?', u'never-evt', - u'new-∀/c', u'new-∃/c', u'newline', u'ninth', u'non-empty-listof', - u'non-empty-string?', u'none/c', u'normal-case-path', u'normalize-arity', - u'normalize-path', u'normalized-arity?', u'not', u'not/c', u'null', u'null?', - u'number->string', u'number?', u'numerator', u'object%', u'object->vector', - u'object-info', u'object-interface', u'object-method-arity-includes?', - u'object-name', u'object-or-false=?', u'object=?', u'object?', u'odd?', - u'one-of/c', u'open-input-bytes', u'open-input-file', - u'open-input-output-file', u'open-input-string', u'open-output-bytes', - u'open-output-file', u'open-output-nowhere', u'open-output-string', - u'or/c', u'order-of-magnitude', u'ormap', u'other-execute-bit', - u'other-read-bit', u'other-write-bit', u'output-port?', u'pair?', - u'parameter-procedure=?', u'parameter/c', u'parameter?', - u'parameterization?', u'parse-command-line', u'partition', u'path->bytes', - u'path->complete-path', u'path->directory-path', u'path->string', - u'path-add-suffix', u'path-convention-type', u'path-element->bytes', - u'path-element->string', u'path-element?', u'path-for-some-system?', - u'path-list-string->path-list', u'path-only', u'path-replace-suffix', - u'path-string?', u'pathbytes', u'port->bytes-lines', u'port->lines', - u'port->list', u'port->string', u'port-closed-evt', u'port-closed?', - u'port-commit-peeked', u'port-count-lines!', u'port-count-lines-enabled', - u'port-counts-lines?', u'port-display-handler', u'port-file-identity', - u'port-file-unlock', u'port-next-location', u'port-number?', - u'port-print-handler', u'port-progress-evt', - u'port-provides-progress-evts?', u'port-read-handler', - u'port-try-file-lock?', u'port-write-handler', u'port-writes-atomic?', - u'port-writes-special?', u'port?', u'positive?', u'predicate/c', - u'prefab-key->struct-type', u'prefab-key?', u'prefab-struct-key', - u'preferences-lock-file-mode', u'pregexp', u'pregexp?', u'pretty-display', - u'pretty-format', u'pretty-print', u'pretty-print-.-symbol-without-bars', - u'pretty-print-abbreviate-read-macros', u'pretty-print-columns', - u'pretty-print-current-style-table', u'pretty-print-depth', - u'pretty-print-exact-as-decimal', u'pretty-print-extend-style-table', - u'pretty-print-handler', u'pretty-print-newline', - u'pretty-print-post-print-hook', u'pretty-print-pre-print-hook', - u'pretty-print-print-hook', u'pretty-print-print-line', - u'pretty-print-remap-stylable', u'pretty-print-show-inexactness', - u'pretty-print-size-hook', u'pretty-print-style-table?', - u'pretty-printing', u'pretty-write', u'primitive-closure?', - u'primitive-result-arity', u'primitive?', u'print', u'print-as-expression', - u'print-boolean-long-form', u'print-box', u'print-graph', - u'print-hash-table', u'print-mpair-curly-braces', - u'print-pair-curly-braces', u'print-reader-abbreviations', - u'print-struct', u'print-syntax-width', u'print-unreadable', - u'print-vector-length', u'printable/c', u'printable<%>', u'printf', - u'println', u'procedure->method', u'procedure-arity', - u'procedure-arity-includes/c', u'procedure-arity-includes?', - u'procedure-arity?', u'procedure-closure-contents-eq?', - u'procedure-extract-target', u'procedure-keywords', - u'procedure-reduce-arity', u'procedure-reduce-keyword-arity', - u'procedure-rename', u'procedure-result-arity', u'procedure-specialize', - u'procedure-struct-type?', u'procedure?', u'process', u'process*', - u'process*/ports', u'process/ports', u'processor-count', u'progress-evt?', - u'promise-forced?', u'promise-running?', u'promise/c', u'promise/name?', - u'promise?', u'prop:arity-string', u'prop:arrow-contract', - u'prop:arrow-contract-get-info', u'prop:arrow-contract?', u'prop:blame', - u'prop:chaperone-contract', u'prop:checked-procedure', u'prop:contract', - u'prop:contracted', u'prop:custom-print-quotable', u'prop:custom-write', - u'prop:dict', u'prop:dict/contract', u'prop:equal+hash', u'prop:evt', - u'prop:exn:missing-module', u'prop:exn:srclocs', - u'prop:expansion-contexts', u'prop:flat-contract', - u'prop:impersonator-of', u'prop:input-port', - u'prop:liberal-define-context', u'prop:object-name', - u'prop:opt-chaperone-contract', u'prop:opt-chaperone-contract-get-test', - u'prop:opt-chaperone-contract?', u'prop:orc-contract', - u'prop:orc-contract-get-subcontracts', u'prop:orc-contract?', - u'prop:output-port', u'prop:place-location', u'prop:procedure', - u'prop:recursive-contract', u'prop:recursive-contract-unroll', - u'prop:recursive-contract?', u'prop:rename-transformer', u'prop:sequence', - u'prop:set!-transformer', u'prop:stream', u'proper-subset?', - u'pseudo-random-generator->vector', u'pseudo-random-generator-vector?', - u'pseudo-random-generator?', u'put-preferences', u'putenv', u'quotient', - u'quotient/remainder', u'radians->degrees', u'raise', - u'raise-argument-error', u'raise-arguments-error', u'raise-arity-error', - u'raise-blame-error', u'raise-contract-error', u'raise-mismatch-error', - u'raise-not-cons-blame-error', u'raise-range-error', - u'raise-result-error', u'raise-syntax-error', u'raise-type-error', - u'raise-user-error', u'random', u'random-seed', u'range', u'rational?', - u'rationalize', u'read', u'read-accept-bar-quote', u'read-accept-box', - u'read-accept-compiled', u'read-accept-dot', u'read-accept-graph', - u'read-accept-infix-dot', u'read-accept-lang', u'read-accept-quasiquote', - u'read-accept-reader', u'read-byte', u'read-byte-or-special', - u'read-bytes', u'read-bytes!', u'read-bytes!-evt', u'read-bytes-avail!', - u'read-bytes-avail!*', u'read-bytes-avail!-evt', - u'read-bytes-avail!/enable-break', u'read-bytes-evt', u'read-bytes-line', - u'read-bytes-line-evt', u'read-case-sensitive', u'read-cdot', u'read-char', - u'read-char-or-special', u'read-curly-brace-as-paren', - u'read-curly-brace-with-tag', u'read-decimal-as-inexact', - u'read-eval-print-loop', u'read-language', u'read-line', u'read-line-evt', - u'read-on-demand-source', u'read-square-bracket-as-paren', - u'read-square-bracket-with-tag', u'read-string', u'read-string!', - u'read-string!-evt', u'read-string-evt', u'read-syntax', - u'read-syntax/recursive', u'read/recursive', u'readtable-mapping', - u'readtable?', u'real->decimal-string', u'real->double-flonum', - u'real->floating-point-bytes', u'real->single-flonum', u'real-in', - u'real-part', u'real?', u'reencode-input-port', u'reencode-output-port', - u'regexp', u'regexp-match', u'regexp-match*', u'regexp-match-evt', - u'regexp-match-exact?', u'regexp-match-peek', - u'regexp-match-peek-immediate', u'regexp-match-peek-positions', - u'regexp-match-peek-positions*', - u'regexp-match-peek-positions-immediate', - u'regexp-match-peek-positions-immediate/end', - u'regexp-match-peek-positions/end', u'regexp-match-positions', - u'regexp-match-positions*', u'regexp-match-positions/end', - u'regexp-match/end', u'regexp-match?', u'regexp-max-lookbehind', - u'regexp-quote', u'regexp-replace', u'regexp-replace*', - u'regexp-replace-quote', u'regexp-replaces', u'regexp-split', - u'regexp-try-match', u'regexp?', u'relative-path?', u'relocate-input-port', - u'relocate-output-port', u'remainder', u'remf', u'remf*', u'remove', - u'remove*', u'remove-duplicates', u'remq', u'remq*', u'remv', u'remv*', - u'rename-contract', u'rename-file-or-directory', - u'rename-transformer-target', u'rename-transformer?', u'replace-evt', - u'reroot-path', u'resolve-path', u'resolved-module-path-name', - u'resolved-module-path?', u'rest', u'reverse', u'round', u'second', - u'seconds->date', u'security-guard?', u'semaphore-peek-evt', - u'semaphore-peek-evt?', u'semaphore-post', u'semaphore-try-wait?', - u'semaphore-wait', u'semaphore-wait/enable-break', u'semaphore?', - u'sequence->list', u'sequence->stream', u'sequence-add-between', - u'sequence-andmap', u'sequence-append', u'sequence-count', - u'sequence-filter', u'sequence-fold', u'sequence-for-each', - u'sequence-generate', u'sequence-generate*', u'sequence-length', - u'sequence-map', u'sequence-ormap', u'sequence-ref', u'sequence-tail', - u'sequence/c', u'sequence?', u'set', u'set!-transformer-procedure', - u'set!-transformer?', u'set->list', u'set->stream', u'set-add', u'set-add!', - u'set-box!', u'set-clear', u'set-clear!', u'set-copy', u'set-copy-clear', - u'set-count', u'set-empty?', u'set-eq?', u'set-equal?', u'set-eqv?', - u'set-first', u'set-for-each', u'set-implements/c', u'set-implements?', - u'set-intersect', u'set-intersect!', u'set-map', u'set-mcar!', u'set-mcdr!', - u'set-member?', u'set-mutable?', u'set-phantom-bytes!', - u'set-port-next-location!', u'set-remove', u'set-remove!', u'set-rest', - u'set-some-basic-contracts!', u'set-subtract', u'set-subtract!', - u'set-symmetric-difference', u'set-symmetric-difference!', u'set-union', - u'set-union!', u'set-weak?', u'set/c', u'set=?', u'set?', u'seteq', u'seteqv', - u'seventh', u'sgn', u'shared-bytes', u'shell-execute', u'shrink-path-wrt', - u'shuffle', u'simple-form-path', u'simplify-path', u'sin', - u'single-flonum?', u'sinh', u'sixth', u'skip-projection-wrapper?', u'sleep', - u'some-system-path->string', u'sort', u'special-comment-value', - u'special-comment?', u'special-filter-input-port', u'split-at', - u'split-at-right', u'split-common-prefix', u'split-path', u'splitf-at', - u'splitf-at-right', u'sqr', u'sqrt', u'srcloc', u'srcloc->string', - u'srcloc-column', u'srcloc-line', u'srcloc-position', u'srcloc-source', - u'srcloc-span', u'srcloc?', u'stop-after', u'stop-before', u'stream->list', - u'stream-add-between', u'stream-andmap', u'stream-append', u'stream-count', - u'stream-empty?', u'stream-filter', u'stream-first', u'stream-fold', - u'stream-for-each', u'stream-length', u'stream-map', u'stream-ormap', - u'stream-ref', u'stream-rest', u'stream-tail', u'stream/c', u'stream?', - u'string', u'string->bytes/latin-1', u'string->bytes/locale', - u'string->bytes/utf-8', u'string->immutable-string', u'string->keyword', - u'string->list', u'string->number', u'string->path', - u'string->path-element', u'string->some-system-path', u'string->symbol', - u'string->uninterned-symbol', u'string->unreadable-symbol', - u'string-append', u'string-append*', u'string-ci<=?', u'string-ci=?', u'string-ci>?', u'string-contains?', - u'string-copy', u'string-copy!', u'string-downcase', - u'string-environment-variable-name?', u'string-fill!', u'string-foldcase', - u'string-join', u'string-len/c', u'string-length', u'string-locale-ci?', u'string-locale-downcase', - u'string-locale-upcase', u'string-locale?', u'string-no-nuls?', u'string-normalize-nfc', - u'string-normalize-nfd', u'string-normalize-nfkc', - u'string-normalize-nfkd', u'string-normalize-spaces', u'string-port?', - u'string-prefix?', u'string-ref', u'string-replace', u'string-set!', - u'string-split', u'string-suffix?', u'string-titlecase', u'string-trim', - u'string-upcase', u'string-utf-8-length', u'string<=?', u'string=?', u'string>?', u'string?', u'struct->vector', - u'struct-accessor-procedure?', u'struct-constructor-procedure?', - u'struct-info', u'struct-mutator-procedure?', - u'struct-predicate-procedure?', u'struct-type-info', - u'struct-type-make-constructor', u'struct-type-make-predicate', - u'struct-type-property-accessor-procedure?', u'struct-type-property/c', - u'struct-type-property?', u'struct-type?', u'struct:arity-at-least', - u'struct:arrow-contract-info', u'struct:date', u'struct:date*', - u'struct:exn', u'struct:exn:break', u'struct:exn:break:hang-up', - u'struct:exn:break:terminate', u'struct:exn:fail', - u'struct:exn:fail:contract', u'struct:exn:fail:contract:arity', - u'struct:exn:fail:contract:blame', - u'struct:exn:fail:contract:continuation', - u'struct:exn:fail:contract:divide-by-zero', - u'struct:exn:fail:contract:non-fixnum-result', - u'struct:exn:fail:contract:variable', u'struct:exn:fail:filesystem', - u'struct:exn:fail:filesystem:errno', - u'struct:exn:fail:filesystem:exists', - u'struct:exn:fail:filesystem:missing-module', - u'struct:exn:fail:filesystem:version', u'struct:exn:fail:network', - u'struct:exn:fail:network:errno', u'struct:exn:fail:object', - u'struct:exn:fail:out-of-memory', u'struct:exn:fail:read', - u'struct:exn:fail:read:eof', u'struct:exn:fail:read:non-char', - u'struct:exn:fail:syntax', u'struct:exn:fail:syntax:missing-module', - u'struct:exn:fail:syntax:unbound', u'struct:exn:fail:unsupported', - u'struct:exn:fail:user', u'struct:srcloc', - u'struct:wrapped-extra-arg-arrow', u'struct?', u'sub1', u'subbytes', - u'subclass?', u'subclass?/c', u'subprocess', u'subprocess-group-enabled', - u'subprocess-kill', u'subprocess-pid', u'subprocess-status', - u'subprocess-wait', u'subprocess?', u'subset?', u'substring', u'suggest/c', - u'symbol->string', u'symbol-interned?', u'symbol-unreadable?', u'symboldatum', - u'syntax->list', u'syntax-arm', u'syntax-column', u'syntax-debug-info', - u'syntax-disarm', u'syntax-e', u'syntax-line', - u'syntax-local-bind-syntaxes', u'syntax-local-certifier', - u'syntax-local-context', u'syntax-local-expand-expression', - u'syntax-local-get-shadower', u'syntax-local-identifier-as-binding', - u'syntax-local-introduce', u'syntax-local-lift-context', - u'syntax-local-lift-expression', u'syntax-local-lift-module', - u'syntax-local-lift-module-end-declaration', - u'syntax-local-lift-provide', u'syntax-local-lift-require', - u'syntax-local-lift-values-expression', - u'syntax-local-make-definition-context', - u'syntax-local-make-delta-introducer', - u'syntax-local-module-defined-identifiers', - u'syntax-local-module-exports', - u'syntax-local-module-required-identifiers', u'syntax-local-name', - u'syntax-local-phase-level', u'syntax-local-submodules', - u'syntax-local-transforming-module-provides?', u'syntax-local-value', - u'syntax-local-value/immediate', u'syntax-original?', u'syntax-position', - u'syntax-property', u'syntax-property-preserved?', - u'syntax-property-symbol-keys', u'syntax-protect', u'syntax-rearm', - u'syntax-recertify', u'syntax-shift-phase-level', u'syntax-source', - u'syntax-source-module', u'syntax-span', u'syntax-taint', - u'syntax-tainted?', u'syntax-track-origin', - u'syntax-transforming-module-expression?', - u'syntax-transforming-with-lifts?', u'syntax-transforming?', u'syntax/c', - u'syntax?', u'system', u'system*', u'system*/exit-code', - u'system-big-endian?', u'system-idle-evt', u'system-language+country', - u'system-library-subpath', u'system-path-convention-type', u'system-type', - u'system/exit-code', u'tail-marks-match?', u'take', u'take-common-prefix', - u'take-right', u'takef', u'takef-right', u'tan', u'tanh', - u'tcp-abandon-port', u'tcp-accept', u'tcp-accept-evt', - u'tcp-accept-ready?', u'tcp-accept/enable-break', u'tcp-addresses', - u'tcp-close', u'tcp-connect', u'tcp-connect/enable-break', u'tcp-listen', - u'tcp-listener?', u'tcp-port?', u'tentative-pretty-print-port-cancel', - u'tentative-pretty-print-port-transfer', u'tenth', u'terminal-port?', - u'the-unsupplied-arg', u'third', u'thread', u'thread-cell-ref', - u'thread-cell-set!', u'thread-cell-values?', u'thread-cell?', - u'thread-dead-evt', u'thread-dead?', u'thread-group?', u'thread-receive', - u'thread-receive-evt', u'thread-resume', u'thread-resume-evt', - u'thread-rewind-receive', u'thread-running?', u'thread-send', - u'thread-suspend', u'thread-suspend-evt', u'thread-try-receive', - u'thread-wait', u'thread/suspend-to-kill', u'thread?', u'time-apply', - u'touch', u'transplant-input-port', u'transplant-output-port', u'true', - u'truncate', u'udp-addresses', u'udp-bind!', u'udp-bound?', u'udp-close', - u'udp-connect!', u'udp-connected?', u'udp-multicast-interface', - u'udp-multicast-join-group!', u'udp-multicast-leave-group!', - u'udp-multicast-loopback?', u'udp-multicast-set-interface!', - u'udp-multicast-set-loopback!', u'udp-multicast-set-ttl!', - u'udp-multicast-ttl', u'udp-open-socket', u'udp-receive!', - u'udp-receive!*', u'udp-receive!-evt', u'udp-receive!/enable-break', - u'udp-receive-ready-evt', u'udp-send', u'udp-send*', u'udp-send-evt', - u'udp-send-ready-evt', u'udp-send-to', u'udp-send-to*', u'udp-send-to-evt', - u'udp-send-to/enable-break', u'udp-send/enable-break', u'udp?', u'unbox', - u'uncaught-exception-handler', u'unit?', u'unspecified-dom', - u'unsupplied-arg?', u'use-collection-link-paths', - u'use-compiled-file-paths', u'use-user-specific-search-paths', - u'user-execute-bit', u'user-read-bit', u'user-write-bit', u'value-blame', - u'value-contract', u'values', u'variable-reference->empty-namespace', - u'variable-reference->module-base-phase', - u'variable-reference->module-declaration-inspector', - u'variable-reference->module-path-index', - u'variable-reference->module-source', u'variable-reference->namespace', - u'variable-reference->phase', - u'variable-reference->resolved-module-path', - u'variable-reference-constant?', u'variable-reference?', u'vector', - u'vector->immutable-vector', u'vector->list', - u'vector->pseudo-random-generator', u'vector->pseudo-random-generator!', - u'vector->values', u'vector-append', u'vector-argmax', u'vector-argmin', - u'vector-copy', u'vector-copy!', u'vector-count', u'vector-drop', - u'vector-drop-right', u'vector-fill!', u'vector-filter', - u'vector-filter-not', u'vector-immutable', u'vector-immutable/c', - u'vector-immutableof', u'vector-length', u'vector-map', u'vector-map!', - u'vector-member', u'vector-memq', u'vector-memv', u'vector-ref', - u'vector-set!', u'vector-set*!', u'vector-set-performance-stats!', - u'vector-split-at', u'vector-split-at-right', u'vector-take', - u'vector-take-right', u'vector/c', u'vector?', u'vectorof', u'version', - u'void', u'void?', u'weak-box-value', u'weak-box?', u'weak-set', - u'weak-seteq', u'weak-seteqv', u'will-execute', u'will-executor?', - u'will-register', u'will-try-execute', u'with-input-from-bytes', - u'with-input-from-file', u'with-input-from-string', - u'with-output-to-bytes', u'with-output-to-file', u'with-output-to-string', - u'would-be-future', u'wrap-evt', u'wrapped-extra-arg-arrow', - u'wrapped-extra-arg-arrow-extra-neg-party-argument', - u'wrapped-extra-arg-arrow-real-func', u'wrapped-extra-arg-arrow?', - u'writable<%>', u'write', u'write-byte', u'write-bytes', - u'write-bytes-avail', u'write-bytes-avail*', u'write-bytes-avail-evt', - u'write-bytes-avail/enable-break', u'write-char', u'write-special', - u'write-special-avail*', u'write-special-evt', u'write-string', - u'write-to-file', u'writeln', u'xor', u'zero?', u'~.a', u'~.s', u'~.v', u'~a', - u'~e', u'~r', u'~s', u'~v' + '*', '*list/c', '+', '-', '/', '<', '', '>/c', '>=', '>=/c', 'abort-current-continuation', 'abs', + 'absolute-path?', 'acos', 'add-between', 'add1', 'alarm-evt', + 'always-evt', 'and/c', 'andmap', 'angle', 'any/c', 'append', 'append*', + 'append-map', 'apply', 'argmax', 'argmin', 'arithmetic-shift', + 'arity-at-least', 'arity-at-least-value', 'arity-at-least?', + 'arity-checking-wrapper', 'arity-includes?', 'arity=?', + 'arrow-contract-info', 'arrow-contract-info-accepts-arglist', + 'arrow-contract-info-chaperone-procedure', + 'arrow-contract-info-check-first-order', 'arrow-contract-info?', + 'asin', 'assf', 'assoc', 'assq', 'assv', 'atan', + 'bad-number-of-results', 'banner', 'base->-doms/c', 'base->-rngs/c', + 'base->?', 'between/c', 'bitwise-and', 'bitwise-bit-field', + 'bitwise-bit-set?', 'bitwise-ior', 'bitwise-not', 'bitwise-xor', + 'blame-add-car-context', 'blame-add-cdr-context', 'blame-add-context', + 'blame-add-missing-party', 'blame-add-nth-arg-context', + 'blame-add-range-context', 'blame-add-unknown-context', + 'blame-context', 'blame-contract', 'blame-fmt->-string', + 'blame-missing-party?', 'blame-negative', 'blame-original?', + 'blame-positive', 'blame-replace-negative', 'blame-source', + 'blame-swap', 'blame-swapped?', 'blame-update', 'blame-value', + 'blame?', 'boolean=?', 'boolean?', 'bound-identifier=?', 'box', + 'box-cas!', 'box-immutable', 'box-immutable/c', 'box/c', 'box?', + 'break-enabled', 'break-parameterization?', 'break-thread', + 'build-chaperone-contract-property', 'build-compound-type-name', + 'build-contract-property', 'build-flat-contract-property', + 'build-list', 'build-path', 'build-path/convention-type', + 'build-string', 'build-vector', 'byte-pregexp', 'byte-pregexp?', + 'byte-ready?', 'byte-regexp', 'byte-regexp?', 'byte?', 'bytes', + 'bytes->immutable-bytes', 'bytes->list', 'bytes->path', + 'bytes->path-element', 'bytes->string/latin-1', 'bytes->string/locale', + 'bytes->string/utf-8', 'bytes-append', 'bytes-append*', + 'bytes-close-converter', 'bytes-convert', 'bytes-convert-end', + 'bytes-converter?', 'bytes-copy', 'bytes-copy!', + 'bytes-environment-variable-name?', 'bytes-fill!', 'bytes-join', + 'bytes-length', 'bytes-no-nuls?', 'bytes-open-converter', 'bytes-ref', + 'bytes-set!', 'bytes-utf-8-index', 'bytes-utf-8-length', + 'bytes-utf-8-ref', 'bytes?', 'bytes?', 'caaaar', + 'caaadr', 'caaar', 'caadar', 'caaddr', 'caadr', 'caar', 'cadaar', + 'cadadr', 'cadar', 'caddar', 'cadddr', 'caddr', 'cadr', + 'call-in-nested-thread', 'call-with-atomic-output-file', + 'call-with-break-parameterization', + 'call-with-composable-continuation', 'call-with-continuation-barrier', + 'call-with-continuation-prompt', 'call-with-current-continuation', + 'call-with-default-reading-parameterization', + 'call-with-escape-continuation', 'call-with-exception-handler', + 'call-with-file-lock/timeout', 'call-with-immediate-continuation-mark', + 'call-with-input-bytes', 'call-with-input-file', + 'call-with-input-file*', 'call-with-input-string', + 'call-with-output-bytes', 'call-with-output-file', + 'call-with-output-file*', 'call-with-output-string', + 'call-with-parameterization', 'call-with-semaphore', + 'call-with-semaphore/enable-break', 'call-with-values', 'call/cc', + 'call/ec', 'car', 'cartesian-product', 'cdaaar', 'cdaadr', 'cdaar', + 'cdadar', 'cdaddr', 'cdadr', 'cdar', 'cddaar', 'cddadr', 'cddar', + 'cdddar', 'cddddr', 'cdddr', 'cddr', 'cdr', 'ceiling', 'channel-get', + 'channel-put', 'channel-put-evt', 'channel-put-evt?', + 'channel-try-get', 'channel/c', 'channel?', 'chaperone-box', + 'chaperone-channel', 'chaperone-continuation-mark-key', + 'chaperone-contract-property?', 'chaperone-contract?', 'chaperone-evt', + 'chaperone-hash', 'chaperone-hash-set', 'chaperone-of?', + 'chaperone-procedure', 'chaperone-procedure*', 'chaperone-prompt-tag', + 'chaperone-struct', 'chaperone-struct-type', 'chaperone-vector', + 'chaperone?', 'char->integer', 'char-alphabetic?', 'char-blank?', + 'char-ci<=?', 'char-ci=?', 'char-ci>?', + 'char-downcase', 'char-foldcase', 'char-general-category', + 'char-graphic?', 'char-in', 'char-in/c', 'char-iso-control?', + 'char-lower-case?', 'char-numeric?', 'char-punctuation?', + 'char-ready?', 'char-symbolic?', 'char-title-case?', 'char-titlecase', + 'char-upcase', 'char-upper-case?', 'char-utf-8-length', + 'char-whitespace?', 'char<=?', 'char=?', 'char>?', + 'char?', 'check-duplicate-identifier', 'check-duplicates', + 'checked-procedure-check-and-extract', 'choice-evt', + 'class->interface', 'class-info', 'class-seal', 'class-unseal', + 'class?', 'cleanse-path', 'close-input-port', 'close-output-port', + 'coerce-chaperone-contract', 'coerce-chaperone-contracts', + 'coerce-contract', 'coerce-contract/f', 'coerce-contracts', + 'coerce-flat-contract', 'coerce-flat-contracts', 'collect-garbage', + 'collection-file-path', 'collection-path', 'combinations', 'compile', + 'compile-allow-set!-undefined', 'compile-context-preservation-enabled', + 'compile-enforce-module-constants', 'compile-syntax', + 'compiled-expression-recompile', 'compiled-expression?', + 'compiled-module-expression?', 'complete-path?', 'complex?', 'compose', + 'compose1', 'conjoin', 'conjugate', 'cons', 'cons/c', 'cons?', 'const', + 'continuation-mark-key/c', 'continuation-mark-key?', + 'continuation-mark-set->context', 'continuation-mark-set->list', + 'continuation-mark-set->list*', 'continuation-mark-set-first', + 'continuation-mark-set?', 'continuation-marks', + 'continuation-prompt-available?', 'continuation-prompt-tag?', + 'continuation?', 'contract-continuation-mark-key', + 'contract-custom-write-property-proc', 'contract-exercise', + 'contract-first-order', 'contract-first-order-passes?', + 'contract-late-neg-projection', 'contract-name', 'contract-proc', + 'contract-projection', 'contract-property?', + 'contract-random-generate', 'contract-random-generate-fail', + 'contract-random-generate-fail?', + 'contract-random-generate-get-current-environment', + 'contract-random-generate-stash', 'contract-random-generate/choose', + 'contract-stronger?', 'contract-struct-exercise', + 'contract-struct-generate', 'contract-struct-late-neg-projection', + 'contract-struct-list-contract?', 'contract-val-first-projection', + 'contract?', 'convert-stream', 'copy-directory/files', 'copy-file', + 'copy-port', 'cos', 'cosh', 'count', 'current-blame-format', + 'current-break-parameterization', 'current-code-inspector', + 'current-command-line-arguments', 'current-compile', + 'current-compiled-file-roots', 'current-continuation-marks', + 'current-contract-region', 'current-custodian', 'current-directory', + 'current-directory-for-user', 'current-drive', + 'current-environment-variables', 'current-error-port', 'current-eval', + 'current-evt-pseudo-random-generator', + 'current-force-delete-permissions', 'current-future', + 'current-gc-milliseconds', 'current-get-interaction-input-port', + 'current-inexact-milliseconds', 'current-input-port', + 'current-inspector', 'current-library-collection-links', + 'current-library-collection-paths', 'current-load', + 'current-load-extension', 'current-load-relative-directory', + 'current-load/use-compiled', 'current-locale', 'current-logger', + 'current-memory-use', 'current-milliseconds', + 'current-module-declare-name', 'current-module-declare-source', + 'current-module-name-resolver', 'current-module-path-for-load', + 'current-namespace', 'current-output-port', 'current-parameterization', + 'current-plumber', 'current-preserved-thread-cell-values', + 'current-print', 'current-process-milliseconds', 'current-prompt-read', + 'current-pseudo-random-generator', 'current-read-interaction', + 'current-reader-guard', 'current-readtable', 'current-seconds', + 'current-security-guard', 'current-subprocess-custodian-mode', + 'current-thread', 'current-thread-group', + 'current-thread-initial-stack-size', + 'current-write-relative-directory', 'curry', 'curryr', + 'custodian-box-value', 'custodian-box?', 'custodian-limit-memory', + 'custodian-managed-list', 'custodian-memory-accounting-available?', + 'custodian-require-memory', 'custodian-shutdown-all', 'custodian?', + 'custom-print-quotable-accessor', 'custom-print-quotable?', + 'custom-write-accessor', 'custom-write-property-proc', 'custom-write?', + 'date', 'date*', 'date*-nanosecond', 'date*-time-zone-name', 'date*?', + 'date-day', 'date-dst?', 'date-hour', 'date-minute', 'date-month', + 'date-second', 'date-time-zone-offset', 'date-week-day', 'date-year', + 'date-year-day', 'date?', 'datum->syntax', 'datum-intern-literal', + 'default-continuation-prompt-tag', 'degrees->radians', + 'delete-directory', 'delete-directory/files', 'delete-file', + 'denominator', 'dict->list', 'dict-can-functional-set?', + 'dict-can-remove-keys?', 'dict-clear', 'dict-clear!', 'dict-copy', + 'dict-count', 'dict-empty?', 'dict-for-each', 'dict-has-key?', + 'dict-implements/c', 'dict-implements?', 'dict-iter-contract', + 'dict-iterate-first', 'dict-iterate-key', 'dict-iterate-next', + 'dict-iterate-value', 'dict-key-contract', 'dict-keys', 'dict-map', + 'dict-mutable?', 'dict-ref', 'dict-ref!', 'dict-remove', + 'dict-remove!', 'dict-set', 'dict-set!', 'dict-set*', 'dict-set*!', + 'dict-update', 'dict-update!', 'dict-value-contract', 'dict-values', + 'dict?', 'directory-exists?', 'directory-list', 'disjoin', 'display', + 'display-lines', 'display-lines-to-file', 'display-to-file', + 'displayln', 'double-flonum?', 'drop', 'drop-common-prefix', + 'drop-right', 'dropf', 'dropf-right', 'dump-memory-stats', + 'dup-input-port', 'dup-output-port', 'dynamic->*', 'dynamic-get-field', + 'dynamic-object/c', 'dynamic-place', 'dynamic-place*', + 'dynamic-require', 'dynamic-require-for-syntax', 'dynamic-send', + 'dynamic-set-field!', 'dynamic-wind', 'eighth', 'empty', + 'empty-sequence', 'empty-stream', 'empty?', + 'environment-variables-copy', 'environment-variables-names', + 'environment-variables-ref', 'environment-variables-set!', + 'environment-variables?', 'eof', 'eof-evt', 'eof-object?', + 'ephemeron-value', 'ephemeron?', 'eprintf', 'eq-contract-val', + 'eq-contract?', 'eq-hash-code', 'eq?', 'equal-contract-val', + 'equal-contract?', 'equal-hash-code', 'equal-secondary-hash-code', + 'equal<%>', 'equal?', 'equal?/recur', 'eqv-hash-code', 'eqv?', 'error', + 'error-display-handler', 'error-escape-handler', + 'error-print-context-length', 'error-print-source-location', + 'error-print-width', 'error-value->string-handler', 'eval', + 'eval-jit-enabled', 'eval-syntax', 'even?', 'evt/c', 'evt?', + 'exact->inexact', 'exact-ceiling', 'exact-floor', 'exact-integer?', + 'exact-nonnegative-integer?', 'exact-positive-integer?', 'exact-round', + 'exact-truncate', 'exact?', 'executable-yield-handler', 'exit', + 'exit-handler', 'exn', 'exn-continuation-marks', 'exn-message', + 'exn:break', 'exn:break-continuation', 'exn:break:hang-up', + 'exn:break:hang-up?', 'exn:break:terminate', 'exn:break:terminate?', + 'exn:break?', 'exn:fail', 'exn:fail:contract', + 'exn:fail:contract:arity', 'exn:fail:contract:arity?', + 'exn:fail:contract:blame', 'exn:fail:contract:blame-object', + 'exn:fail:contract:blame?', 'exn:fail:contract:continuation', + 'exn:fail:contract:continuation?', 'exn:fail:contract:divide-by-zero', + 'exn:fail:contract:divide-by-zero?', + 'exn:fail:contract:non-fixnum-result', + 'exn:fail:contract:non-fixnum-result?', 'exn:fail:contract:variable', + 'exn:fail:contract:variable-id', 'exn:fail:contract:variable?', + 'exn:fail:contract?', 'exn:fail:filesystem', + 'exn:fail:filesystem:errno', 'exn:fail:filesystem:errno-errno', + 'exn:fail:filesystem:errno?', 'exn:fail:filesystem:exists', + 'exn:fail:filesystem:exists?', 'exn:fail:filesystem:missing-module', + 'exn:fail:filesystem:missing-module-path', + 'exn:fail:filesystem:missing-module?', 'exn:fail:filesystem:version', + 'exn:fail:filesystem:version?', 'exn:fail:filesystem?', + 'exn:fail:network', 'exn:fail:network:errno', + 'exn:fail:network:errno-errno', 'exn:fail:network:errno?', + 'exn:fail:network?', 'exn:fail:object', 'exn:fail:object?', + 'exn:fail:out-of-memory', 'exn:fail:out-of-memory?', 'exn:fail:read', + 'exn:fail:read-srclocs', 'exn:fail:read:eof', 'exn:fail:read:eof?', + 'exn:fail:read:non-char', 'exn:fail:read:non-char?', 'exn:fail:read?', + 'exn:fail:syntax', 'exn:fail:syntax-exprs', + 'exn:fail:syntax:missing-module', + 'exn:fail:syntax:missing-module-path', + 'exn:fail:syntax:missing-module?', 'exn:fail:syntax:unbound', + 'exn:fail:syntax:unbound?', 'exn:fail:syntax?', 'exn:fail:unsupported', + 'exn:fail:unsupported?', 'exn:fail:user', 'exn:fail:user?', + 'exn:fail?', 'exn:misc:match?', 'exn:missing-module-accessor', + 'exn:missing-module?', 'exn:srclocs-accessor', 'exn:srclocs?', 'exn?', + 'exp', 'expand', 'expand-once', 'expand-syntax', 'expand-syntax-once', + 'expand-syntax-to-top-form', 'expand-to-top-form', 'expand-user-path', + 'explode-path', 'expt', 'externalizable<%>', 'failure-result/c', + 'false?', 'field-names', 'fifth', 'file->bytes', 'file->bytes-lines', + 'file->lines', 'file->list', 'file->string', 'file->value', + 'file-exists?', 'file-name-from-path', 'file-or-directory-identity', + 'file-or-directory-modify-seconds', 'file-or-directory-permissions', + 'file-position', 'file-position*', 'file-size', + 'file-stream-buffer-mode', 'file-stream-port?', 'file-truncate', + 'filename-extension', 'filesystem-change-evt', + 'filesystem-change-evt-cancel', 'filesystem-change-evt?', + 'filesystem-root-list', 'filter', 'filter-map', 'filter-not', + 'filter-read-input-port', 'find-executable-path', 'find-files', + 'find-library-collection-links', 'find-library-collection-paths', + 'find-relative-path', 'find-system-path', 'findf', 'first', + 'first-or/c', 'fixnum?', 'flat-contract', 'flat-contract-predicate', + 'flat-contract-property?', 'flat-contract?', 'flat-named-contract', + 'flatten', 'floating-point-bytes->real', 'flonum?', 'floor', + 'flush-output', 'fold-files', 'foldl', 'foldr', 'for-each', 'force', + 'format', 'fourth', 'fprintf', 'free-identifier=?', + 'free-label-identifier=?', 'free-template-identifier=?', + 'free-transformer-identifier=?', 'fsemaphore-count', 'fsemaphore-post', + 'fsemaphore-try-wait?', 'fsemaphore-wait', 'fsemaphore?', 'future', + 'future?', 'futures-enabled?', 'gcd', 'generate-member-key', + 'generate-temporaries', 'generic-set?', 'generic?', 'gensym', + 'get-output-bytes', 'get-output-string', 'get-preference', + 'get/build-late-neg-projection', 'get/build-val-first-projection', + 'getenv', 'global-port-print-handler', 'group-by', 'group-execute-bit', + 'group-read-bit', 'group-write-bit', 'guard-evt', 'handle-evt', + 'handle-evt?', 'has-blame?', 'has-contract?', 'hash', 'hash->list', + 'hash-clear', 'hash-clear!', 'hash-copy', 'hash-copy-clear', + 'hash-count', 'hash-empty?', 'hash-eq?', 'hash-equal?', 'hash-eqv?', + 'hash-for-each', 'hash-has-key?', 'hash-iterate-first', + 'hash-iterate-key', 'hash-iterate-key+value', 'hash-iterate-next', + 'hash-iterate-pair', 'hash-iterate-value', 'hash-keys', 'hash-map', + 'hash-placeholder?', 'hash-ref', 'hash-ref!', 'hash-remove', + 'hash-remove!', 'hash-set', 'hash-set!', 'hash-set*', 'hash-set*!', + 'hash-update', 'hash-update!', 'hash-values', 'hash-weak?', 'hash/c', + 'hash?', 'hasheq', 'hasheqv', 'identifier-binding', + 'identifier-binding-symbol', 'identifier-label-binding', + 'identifier-prune-lexical-context', + 'identifier-prune-to-source-module', + 'identifier-remove-from-definition-context', + 'identifier-template-binding', 'identifier-transformer-binding', + 'identifier?', 'identity', 'if/c', 'imag-part', 'immutable?', + 'impersonate-box', 'impersonate-channel', + 'impersonate-continuation-mark-key', 'impersonate-hash', + 'impersonate-hash-set', 'impersonate-procedure', + 'impersonate-procedure*', 'impersonate-prompt-tag', + 'impersonate-struct', 'impersonate-vector', 'impersonator-contract?', + 'impersonator-ephemeron', 'impersonator-of?', + 'impersonator-prop:application-mark', 'impersonator-prop:blame', + 'impersonator-prop:contracted', + 'impersonator-property-accessor-procedure?', 'impersonator-property?', + 'impersonator?', 'implementation?', 'implementation?/c', 'in-bytes', + 'in-bytes-lines', 'in-combinations', 'in-cycle', 'in-dict', + 'in-dict-keys', 'in-dict-pairs', 'in-dict-values', 'in-directory', + 'in-hash', 'in-hash-keys', 'in-hash-pairs', 'in-hash-values', + 'in-immutable-hash', 'in-immutable-hash-keys', + 'in-immutable-hash-pairs', 'in-immutable-hash-values', + 'in-immutable-set', 'in-indexed', 'in-input-port-bytes', + 'in-input-port-chars', 'in-lines', 'in-list', 'in-mlist', + 'in-mutable-hash', 'in-mutable-hash-keys', 'in-mutable-hash-pairs', + 'in-mutable-hash-values', 'in-mutable-set', 'in-naturals', + 'in-parallel', 'in-permutations', 'in-port', 'in-producer', 'in-range', + 'in-sequences', 'in-set', 'in-slice', 'in-stream', 'in-string', + 'in-syntax', 'in-value', 'in-values*-sequence', 'in-values-sequence', + 'in-vector', 'in-weak-hash', 'in-weak-hash-keys', 'in-weak-hash-pairs', + 'in-weak-hash-values', 'in-weak-set', 'inexact->exact', + 'inexact-real?', 'inexact?', 'infinite?', 'input-port-append', + 'input-port?', 'inspector?', 'instanceof/c', 'integer->char', + 'integer->integer-bytes', 'integer-bytes->integer', 'integer-in', + 'integer-length', 'integer-sqrt', 'integer-sqrt/remainder', 'integer?', + 'interface->method-names', 'interface-extension?', 'interface?', + 'internal-definition-context-binding-identifiers', + 'internal-definition-context-introduce', + 'internal-definition-context-seal', 'internal-definition-context?', + 'is-a?', 'is-a?/c', 'keyword->string', 'keyword-apply', 'keywordbytes', 'list->mutable-set', + 'list->mutable-seteq', 'list->mutable-seteqv', 'list->set', + 'list->seteq', 'list->seteqv', 'list->string', 'list->vector', + 'list->weak-set', 'list->weak-seteq', 'list->weak-seteqv', + 'list-contract?', 'list-prefix?', 'list-ref', 'list-set', 'list-tail', + 'list-update', 'list/c', 'list?', 'listen-port-number?', 'listof', + 'load', 'load-extension', 'load-on-demand-enabled', 'load-relative', + 'load-relative-extension', 'load/cd', 'load/use-compiled', + 'local-expand', 'local-expand/capture-lifts', + 'local-transformer-expand', 'local-transformer-expand/capture-lifts', + 'locale-string-encoding', 'log', 'log-all-levels', 'log-level-evt', + 'log-level?', 'log-max-level', 'log-message', 'log-receiver?', + 'logger-name', 'logger?', 'magnitude', 'make-arity-at-least', + 'make-base-empty-namespace', 'make-base-namespace', 'make-bytes', + 'make-channel', 'make-chaperone-contract', + 'make-continuation-mark-key', 'make-continuation-prompt-tag', + 'make-contract', 'make-custodian', 'make-custodian-box', + 'make-custom-hash', 'make-custom-hash-types', 'make-custom-set', + 'make-custom-set-types', 'make-date', 'make-date*', + 'make-derived-parameter', 'make-directory', 'make-directory*', + 'make-do-sequence', 'make-empty-namespace', + 'make-environment-variables', 'make-ephemeron', 'make-exn', + 'make-exn:break', 'make-exn:break:hang-up', 'make-exn:break:terminate', + 'make-exn:fail', 'make-exn:fail:contract', + 'make-exn:fail:contract:arity', 'make-exn:fail:contract:blame', + 'make-exn:fail:contract:continuation', + 'make-exn:fail:contract:divide-by-zero', + 'make-exn:fail:contract:non-fixnum-result', + 'make-exn:fail:contract:variable', 'make-exn:fail:filesystem', + 'make-exn:fail:filesystem:errno', 'make-exn:fail:filesystem:exists', + 'make-exn:fail:filesystem:missing-module', + 'make-exn:fail:filesystem:version', 'make-exn:fail:network', + 'make-exn:fail:network:errno', 'make-exn:fail:object', + 'make-exn:fail:out-of-memory', 'make-exn:fail:read', + 'make-exn:fail:read:eof', 'make-exn:fail:read:non-char', + 'make-exn:fail:syntax', 'make-exn:fail:syntax:missing-module', + 'make-exn:fail:syntax:unbound', 'make-exn:fail:unsupported', + 'make-exn:fail:user', 'make-file-or-directory-link', + 'make-flat-contract', 'make-fsemaphore', 'make-generic', + 'make-handle-get-preference-locked', 'make-hash', + 'make-hash-placeholder', 'make-hasheq', 'make-hasheq-placeholder', + 'make-hasheqv', 'make-hasheqv-placeholder', + 'make-immutable-custom-hash', 'make-immutable-hash', + 'make-immutable-hasheq', 'make-immutable-hasheqv', + 'make-impersonator-property', 'make-input-port', + 'make-input-port/read-to-peek', 'make-inspector', + 'make-keyword-procedure', 'make-known-char-range-list', + 'make-limited-input-port', 'make-list', 'make-lock-file-name', + 'make-log-receiver', 'make-logger', 'make-mixin-contract', + 'make-mutable-custom-set', 'make-none/c', 'make-object', + 'make-output-port', 'make-parameter', 'make-parent-directory*', + 'make-phantom-bytes', 'make-pipe', 'make-pipe-with-specials', + 'make-placeholder', 'make-plumber', 'make-polar', 'make-prefab-struct', + 'make-primitive-class', 'make-proj-contract', + 'make-pseudo-random-generator', 'make-reader-graph', 'make-readtable', + 'make-rectangular', 'make-rename-transformer', + 'make-resolved-module-path', 'make-security-guard', 'make-semaphore', + 'make-set!-transformer', 'make-shared-bytes', 'make-sibling-inspector', + 'make-special-comment', 'make-srcloc', 'make-string', + 'make-struct-field-accessor', 'make-struct-field-mutator', + 'make-struct-type', 'make-struct-type-property', + 'make-syntax-delta-introducer', 'make-syntax-introducer', + 'make-temporary-file', 'make-tentative-pretty-print-output-port', + 'make-thread-cell', 'make-thread-group', 'make-vector', + 'make-weak-box', 'make-weak-custom-hash', 'make-weak-custom-set', + 'make-weak-hash', 'make-weak-hasheq', 'make-weak-hasheqv', + 'make-will-executor', 'map', 'match-equality-test', + 'matches-arity-exactly?', 'max', 'mcar', 'mcdr', 'mcons', 'member', + 'member-name-key-hash-code', 'member-name-key=?', 'member-name-key?', + 'memf', 'memq', 'memv', 'merge-input', 'method-in-interface?', 'min', + 'mixin-contract', 'module->exports', 'module->imports', + 'module->language-info', 'module->namespace', + 'module-compiled-cross-phase-persistent?', 'module-compiled-exports', + 'module-compiled-imports', 'module-compiled-language-info', + 'module-compiled-name', 'module-compiled-submodules', + 'module-declared?', 'module-path-index-join', + 'module-path-index-resolve', 'module-path-index-split', + 'module-path-index-submodule', 'module-path-index?', 'module-path?', + 'module-predefined?', 'module-provide-protected?', 'modulo', 'mpair?', + 'mutable-set', 'mutable-seteq', 'mutable-seteqv', 'n->th', + 'nack-guard-evt', 'namespace-anchor->empty-namespace', + 'namespace-anchor->namespace', 'namespace-anchor?', + 'namespace-attach-module', 'namespace-attach-module-declaration', + 'namespace-base-phase', 'namespace-mapped-symbols', + 'namespace-module-identifier', 'namespace-module-registry', + 'namespace-require', 'namespace-require/constant', + 'namespace-require/copy', 'namespace-require/expansion-time', + 'namespace-set-variable-value!', 'namespace-symbol->identifier', + 'namespace-syntax-introduce', 'namespace-undefine-variable!', + 'namespace-unprotect-module', 'namespace-variable-value', 'namespace?', + 'nan?', 'natural-number/c', 'negate', 'negative?', 'never-evt', + 'new-∀/c', 'new-∃/c', 'newline', 'ninth', 'non-empty-listof', + 'non-empty-string?', 'none/c', 'normal-case-path', 'normalize-arity', + 'normalize-path', 'normalized-arity?', 'not', 'not/c', 'null', 'null?', + 'number->string', 'number?', 'numerator', 'object%', 'object->vector', + 'object-info', 'object-interface', 'object-method-arity-includes?', + 'object-name', 'object-or-false=?', 'object=?', 'object?', 'odd?', + 'one-of/c', 'open-input-bytes', 'open-input-file', + 'open-input-output-file', 'open-input-string', 'open-output-bytes', + 'open-output-file', 'open-output-nowhere', 'open-output-string', + 'or/c', 'order-of-magnitude', 'ormap', 'other-execute-bit', + 'other-read-bit', 'other-write-bit', 'output-port?', 'pair?', + 'parameter-procedure=?', 'parameter/c', 'parameter?', + 'parameterization?', 'parse-command-line', 'partition', 'path->bytes', + 'path->complete-path', 'path->directory-path', 'path->string', + 'path-add-suffix', 'path-convention-type', 'path-element->bytes', + 'path-element->string', 'path-element?', 'path-for-some-system?', + 'path-list-string->path-list', 'path-only', 'path-replace-suffix', + 'path-string?', 'pathbytes', 'port->bytes-lines', 'port->lines', + 'port->list', 'port->string', 'port-closed-evt', 'port-closed?', + 'port-commit-peeked', 'port-count-lines!', 'port-count-lines-enabled', + 'port-counts-lines?', 'port-display-handler', 'port-file-identity', + 'port-file-unlock', 'port-next-location', 'port-number?', + 'port-print-handler', 'port-progress-evt', + 'port-provides-progress-evts?', 'port-read-handler', + 'port-try-file-lock?', 'port-write-handler', 'port-writes-atomic?', + 'port-writes-special?', 'port?', 'positive?', 'predicate/c', + 'prefab-key->struct-type', 'prefab-key?', 'prefab-struct-key', + 'preferences-lock-file-mode', 'pregexp', 'pregexp?', 'pretty-display', + 'pretty-format', 'pretty-print', 'pretty-print-.-symbol-without-bars', + 'pretty-print-abbreviate-read-macros', 'pretty-print-columns', + 'pretty-print-current-style-table', 'pretty-print-depth', + 'pretty-print-exact-as-decimal', 'pretty-print-extend-style-table', + 'pretty-print-handler', 'pretty-print-newline', + 'pretty-print-post-print-hook', 'pretty-print-pre-print-hook', + 'pretty-print-print-hook', 'pretty-print-print-line', + 'pretty-print-remap-stylable', 'pretty-print-show-inexactness', + 'pretty-print-size-hook', 'pretty-print-style-table?', + 'pretty-printing', 'pretty-write', 'primitive-closure?', + 'primitive-result-arity', 'primitive?', 'print', 'print-as-expression', + 'print-boolean-long-form', 'print-box', 'print-graph', + 'print-hash-table', 'print-mpair-curly-braces', + 'print-pair-curly-braces', 'print-reader-abbreviations', + 'print-struct', 'print-syntax-width', 'print-unreadable', + 'print-vector-length', 'printable/c', 'printable<%>', 'printf', + 'println', 'procedure->method', 'procedure-arity', + 'procedure-arity-includes/c', 'procedure-arity-includes?', + 'procedure-arity?', 'procedure-closure-contents-eq?', + 'procedure-extract-target', 'procedure-keywords', + 'procedure-reduce-arity', 'procedure-reduce-keyword-arity', + 'procedure-rename', 'procedure-result-arity', 'procedure-specialize', + 'procedure-struct-type?', 'procedure?', 'process', 'process*', + 'process*/ports', 'process/ports', 'processor-count', 'progress-evt?', + 'promise-forced?', 'promise-running?', 'promise/c', 'promise/name?', + 'promise?', 'prop:arity-string', 'prop:arrow-contract', + 'prop:arrow-contract-get-info', 'prop:arrow-contract?', 'prop:blame', + 'prop:chaperone-contract', 'prop:checked-procedure', 'prop:contract', + 'prop:contracted', 'prop:custom-print-quotable', 'prop:custom-write', + 'prop:dict', 'prop:dict/contract', 'prop:equal+hash', 'prop:evt', + 'prop:exn:missing-module', 'prop:exn:srclocs', + 'prop:expansion-contexts', 'prop:flat-contract', + 'prop:impersonator-of', 'prop:input-port', + 'prop:liberal-define-context', 'prop:object-name', + 'prop:opt-chaperone-contract', 'prop:opt-chaperone-contract-get-test', + 'prop:opt-chaperone-contract?', 'prop:orc-contract', + 'prop:orc-contract-get-subcontracts', 'prop:orc-contract?', + 'prop:output-port', 'prop:place-location', 'prop:procedure', + 'prop:recursive-contract', 'prop:recursive-contract-unroll', + 'prop:recursive-contract?', 'prop:rename-transformer', 'prop:sequence', + 'prop:set!-transformer', 'prop:stream', 'proper-subset?', + 'pseudo-random-generator->vector', 'pseudo-random-generator-vector?', + 'pseudo-random-generator?', 'put-preferences', 'putenv', 'quotient', + 'quotient/remainder', 'radians->degrees', 'raise', + 'raise-argument-error', 'raise-arguments-error', 'raise-arity-error', + 'raise-blame-error', 'raise-contract-error', 'raise-mismatch-error', + 'raise-not-cons-blame-error', 'raise-range-error', + 'raise-result-error', 'raise-syntax-error', 'raise-type-error', + 'raise-user-error', 'random', 'random-seed', 'range', 'rational?', + 'rationalize', 'read', 'read-accept-bar-quote', 'read-accept-box', + 'read-accept-compiled', 'read-accept-dot', 'read-accept-graph', + 'read-accept-infix-dot', 'read-accept-lang', 'read-accept-quasiquote', + 'read-accept-reader', 'read-byte', 'read-byte-or-special', + 'read-bytes', 'read-bytes!', 'read-bytes!-evt', 'read-bytes-avail!', + 'read-bytes-avail!*', 'read-bytes-avail!-evt', + 'read-bytes-avail!/enable-break', 'read-bytes-evt', 'read-bytes-line', + 'read-bytes-line-evt', 'read-case-sensitive', 'read-cdot', 'read-char', + 'read-char-or-special', 'read-curly-brace-as-paren', + 'read-curly-brace-with-tag', 'read-decimal-as-inexact', + 'read-eval-print-loop', 'read-language', 'read-line', 'read-line-evt', + 'read-on-demand-source', 'read-square-bracket-as-paren', + 'read-square-bracket-with-tag', 'read-string', 'read-string!', + 'read-string!-evt', 'read-string-evt', 'read-syntax', + 'read-syntax/recursive', 'read/recursive', 'readtable-mapping', + 'readtable?', 'real->decimal-string', 'real->double-flonum', + 'real->floating-point-bytes', 'real->single-flonum', 'real-in', + 'real-part', 'real?', 'reencode-input-port', 'reencode-output-port', + 'regexp', 'regexp-match', 'regexp-match*', 'regexp-match-evt', + 'regexp-match-exact?', 'regexp-match-peek', + 'regexp-match-peek-immediate', 'regexp-match-peek-positions', + 'regexp-match-peek-positions*', + 'regexp-match-peek-positions-immediate', + 'regexp-match-peek-positions-immediate/end', + 'regexp-match-peek-positions/end', 'regexp-match-positions', + 'regexp-match-positions*', 'regexp-match-positions/end', + 'regexp-match/end', 'regexp-match?', 'regexp-max-lookbehind', + 'regexp-quote', 'regexp-replace', 'regexp-replace*', + 'regexp-replace-quote', 'regexp-replaces', 'regexp-split', + 'regexp-try-match', 'regexp?', 'relative-path?', 'relocate-input-port', + 'relocate-output-port', 'remainder', 'remf', 'remf*', 'remove', + 'remove*', 'remove-duplicates', 'remq', 'remq*', 'remv', 'remv*', + 'rename-contract', 'rename-file-or-directory', + 'rename-transformer-target', 'rename-transformer?', 'replace-evt', + 'reroot-path', 'resolve-path', 'resolved-module-path-name', + 'resolved-module-path?', 'rest', 'reverse', 'round', 'second', + 'seconds->date', 'security-guard?', 'semaphore-peek-evt', + 'semaphore-peek-evt?', 'semaphore-post', 'semaphore-try-wait?', + 'semaphore-wait', 'semaphore-wait/enable-break', 'semaphore?', + 'sequence->list', 'sequence->stream', 'sequence-add-between', + 'sequence-andmap', 'sequence-append', 'sequence-count', + 'sequence-filter', 'sequence-fold', 'sequence-for-each', + 'sequence-generate', 'sequence-generate*', 'sequence-length', + 'sequence-map', 'sequence-ormap', 'sequence-ref', 'sequence-tail', + 'sequence/c', 'sequence?', 'set', 'set!-transformer-procedure', + 'set!-transformer?', 'set->list', 'set->stream', 'set-add', 'set-add!', + 'set-box!', 'set-clear', 'set-clear!', 'set-copy', 'set-copy-clear', + 'set-count', 'set-empty?', 'set-eq?', 'set-equal?', 'set-eqv?', + 'set-first', 'set-for-each', 'set-implements/c', 'set-implements?', + 'set-intersect', 'set-intersect!', 'set-map', 'set-mcar!', 'set-mcdr!', + 'set-member?', 'set-mutable?', 'set-phantom-bytes!', + 'set-port-next-location!', 'set-remove', 'set-remove!', 'set-rest', + 'set-some-basic-contracts!', 'set-subtract', 'set-subtract!', + 'set-symmetric-difference', 'set-symmetric-difference!', 'set-union', + 'set-union!', 'set-weak?', 'set/c', 'set=?', 'set?', 'seteq', 'seteqv', + 'seventh', 'sgn', 'shared-bytes', 'shell-execute', 'shrink-path-wrt', + 'shuffle', 'simple-form-path', 'simplify-path', 'sin', + 'single-flonum?', 'sinh', 'sixth', 'skip-projection-wrapper?', 'sleep', + 'some-system-path->string', 'sort', 'special-comment-value', + 'special-comment?', 'special-filter-input-port', 'split-at', + 'split-at-right', 'split-common-prefix', 'split-path', 'splitf-at', + 'splitf-at-right', 'sqr', 'sqrt', 'srcloc', 'srcloc->string', + 'srcloc-column', 'srcloc-line', 'srcloc-position', 'srcloc-source', + 'srcloc-span', 'srcloc?', 'stop-after', 'stop-before', 'stream->list', + 'stream-add-between', 'stream-andmap', 'stream-append', 'stream-count', + 'stream-empty?', 'stream-filter', 'stream-first', 'stream-fold', + 'stream-for-each', 'stream-length', 'stream-map', 'stream-ormap', + 'stream-ref', 'stream-rest', 'stream-tail', 'stream/c', 'stream?', + 'string', 'string->bytes/latin-1', 'string->bytes/locale', + 'string->bytes/utf-8', 'string->immutable-string', 'string->keyword', + 'string->list', 'string->number', 'string->path', + 'string->path-element', 'string->some-system-path', 'string->symbol', + 'string->uninterned-symbol', 'string->unreadable-symbol', + 'string-append', 'string-append*', 'string-ci<=?', 'string-ci=?', 'string-ci>?', 'string-contains?', + 'string-copy', 'string-copy!', 'string-downcase', + 'string-environment-variable-name?', 'string-fill!', 'string-foldcase', + 'string-join', 'string-len/c', 'string-length', 'string-locale-ci?', 'string-locale-downcase', + 'string-locale-upcase', 'string-locale?', 'string-no-nuls?', 'string-normalize-nfc', + 'string-normalize-nfd', 'string-normalize-nfkc', + 'string-normalize-nfkd', 'string-normalize-spaces', 'string-port?', + 'string-prefix?', 'string-ref', 'string-replace', 'string-set!', + 'string-split', 'string-suffix?', 'string-titlecase', 'string-trim', + 'string-upcase', 'string-utf-8-length', 'string<=?', 'string=?', 'string>?', 'string?', 'struct->vector', + 'struct-accessor-procedure?', 'struct-constructor-procedure?', + 'struct-info', 'struct-mutator-procedure?', + 'struct-predicate-procedure?', 'struct-type-info', + 'struct-type-make-constructor', 'struct-type-make-predicate', + 'struct-type-property-accessor-procedure?', 'struct-type-property/c', + 'struct-type-property?', 'struct-type?', 'struct:arity-at-least', + 'struct:arrow-contract-info', 'struct:date', 'struct:date*', + 'struct:exn', 'struct:exn:break', 'struct:exn:break:hang-up', + 'struct:exn:break:terminate', 'struct:exn:fail', + 'struct:exn:fail:contract', 'struct:exn:fail:contract:arity', + 'struct:exn:fail:contract:blame', + 'struct:exn:fail:contract:continuation', + 'struct:exn:fail:contract:divide-by-zero', + 'struct:exn:fail:contract:non-fixnum-result', + 'struct:exn:fail:contract:variable', 'struct:exn:fail:filesystem', + 'struct:exn:fail:filesystem:errno', + 'struct:exn:fail:filesystem:exists', + 'struct:exn:fail:filesystem:missing-module', + 'struct:exn:fail:filesystem:version', 'struct:exn:fail:network', + 'struct:exn:fail:network:errno', 'struct:exn:fail:object', + 'struct:exn:fail:out-of-memory', 'struct:exn:fail:read', + 'struct:exn:fail:read:eof', 'struct:exn:fail:read:non-char', + 'struct:exn:fail:syntax', 'struct:exn:fail:syntax:missing-module', + 'struct:exn:fail:syntax:unbound', 'struct:exn:fail:unsupported', + 'struct:exn:fail:user', 'struct:srcloc', + 'struct:wrapped-extra-arg-arrow', 'struct?', 'sub1', 'subbytes', + 'subclass?', 'subclass?/c', 'subprocess', 'subprocess-group-enabled', + 'subprocess-kill', 'subprocess-pid', 'subprocess-status', + 'subprocess-wait', 'subprocess?', 'subset?', 'substring', 'suggest/c', + 'symbol->string', 'symbol-interned?', 'symbol-unreadable?', 'symboldatum', + 'syntax->list', 'syntax-arm', 'syntax-column', 'syntax-debug-info', + 'syntax-disarm', 'syntax-e', 'syntax-line', + 'syntax-local-bind-syntaxes', 'syntax-local-certifier', + 'syntax-local-context', 'syntax-local-expand-expression', + 'syntax-local-get-shadower', 'syntax-local-identifier-as-binding', + 'syntax-local-introduce', 'syntax-local-lift-context', + 'syntax-local-lift-expression', 'syntax-local-lift-module', + 'syntax-local-lift-module-end-declaration', + 'syntax-local-lift-provide', 'syntax-local-lift-require', + 'syntax-local-lift-values-expression', + 'syntax-local-make-definition-context', + 'syntax-local-make-delta-introducer', + 'syntax-local-module-defined-identifiers', + 'syntax-local-module-exports', + 'syntax-local-module-required-identifiers', 'syntax-local-name', + 'syntax-local-phase-level', 'syntax-local-submodules', + 'syntax-local-transforming-module-provides?', 'syntax-local-value', + 'syntax-local-value/immediate', 'syntax-original?', 'syntax-position', + 'syntax-property', 'syntax-property-preserved?', + 'syntax-property-symbol-keys', 'syntax-protect', 'syntax-rearm', + 'syntax-recertify', 'syntax-shift-phase-level', 'syntax-source', + 'syntax-source-module', 'syntax-span', 'syntax-taint', + 'syntax-tainted?', 'syntax-track-origin', + 'syntax-transforming-module-expression?', + 'syntax-transforming-with-lifts?', 'syntax-transforming?', 'syntax/c', + 'syntax?', 'system', 'system*', 'system*/exit-code', + 'system-big-endian?', 'system-idle-evt', 'system-language+country', + 'system-library-subpath', 'system-path-convention-type', 'system-type', + 'system/exit-code', 'tail-marks-match?', 'take', 'take-common-prefix', + 'take-right', 'takef', 'takef-right', 'tan', 'tanh', + 'tcp-abandon-port', 'tcp-accept', 'tcp-accept-evt', + 'tcp-accept-ready?', 'tcp-accept/enable-break', 'tcp-addresses', + 'tcp-close', 'tcp-connect', 'tcp-connect/enable-break', 'tcp-listen', + 'tcp-listener?', 'tcp-port?', 'tentative-pretty-print-port-cancel', + 'tentative-pretty-print-port-transfer', 'tenth', 'terminal-port?', + 'the-unsupplied-arg', 'third', 'thread', 'thread-cell-ref', + 'thread-cell-set!', 'thread-cell-values?', 'thread-cell?', + 'thread-dead-evt', 'thread-dead?', 'thread-group?', 'thread-receive', + 'thread-receive-evt', 'thread-resume', 'thread-resume-evt', + 'thread-rewind-receive', 'thread-running?', 'thread-send', + 'thread-suspend', 'thread-suspend-evt', 'thread-try-receive', + 'thread-wait', 'thread/suspend-to-kill', 'thread?', 'time-apply', + 'touch', 'transplant-input-port', 'transplant-output-port', 'true', + 'truncate', 'udp-addresses', 'udp-bind!', 'udp-bound?', 'udp-close', + 'udp-connect!', 'udp-connected?', 'udp-multicast-interface', + 'udp-multicast-join-group!', 'udp-multicast-leave-group!', + 'udp-multicast-loopback?', 'udp-multicast-set-interface!', + 'udp-multicast-set-loopback!', 'udp-multicast-set-ttl!', + 'udp-multicast-ttl', 'udp-open-socket', 'udp-receive!', + 'udp-receive!*', 'udp-receive!-evt', 'udp-receive!/enable-break', + 'udp-receive-ready-evt', 'udp-send', 'udp-send*', 'udp-send-evt', + 'udp-send-ready-evt', 'udp-send-to', 'udp-send-to*', 'udp-send-to-evt', + 'udp-send-to/enable-break', 'udp-send/enable-break', 'udp?', 'unbox', + 'uncaught-exception-handler', 'unit?', 'unspecified-dom', + 'unsupplied-arg?', 'use-collection-link-paths', + 'use-compiled-file-paths', 'use-user-specific-search-paths', + 'user-execute-bit', 'user-read-bit', 'user-write-bit', 'value-blame', + 'value-contract', 'values', 'variable-reference->empty-namespace', + 'variable-reference->module-base-phase', + 'variable-reference->module-declaration-inspector', + 'variable-reference->module-path-index', + 'variable-reference->module-source', 'variable-reference->namespace', + 'variable-reference->phase', + 'variable-reference->resolved-module-path', + 'variable-reference-constant?', 'variable-reference?', 'vector', + 'vector->immutable-vector', 'vector->list', + 'vector->pseudo-random-generator', 'vector->pseudo-random-generator!', + 'vector->values', 'vector-append', 'vector-argmax', 'vector-argmin', + 'vector-copy', 'vector-copy!', 'vector-count', 'vector-drop', + 'vector-drop-right', 'vector-fill!', 'vector-filter', + 'vector-filter-not', 'vector-immutable', 'vector-immutable/c', + 'vector-immutableof', 'vector-length', 'vector-map', 'vector-map!', + 'vector-member', 'vector-memq', 'vector-memv', 'vector-ref', + 'vector-set!', 'vector-set*!', 'vector-set-performance-stats!', + 'vector-split-at', 'vector-split-at-right', 'vector-take', + 'vector-take-right', 'vector/c', 'vector?', 'vectorof', 'version', + 'void', 'void?', 'weak-box-value', 'weak-box?', 'weak-set', + 'weak-seteq', 'weak-seteqv', 'will-execute', 'will-executor?', + 'will-register', 'will-try-execute', 'with-input-from-bytes', + 'with-input-from-file', 'with-input-from-string', + 'with-output-to-bytes', 'with-output-to-file', 'with-output-to-string', + 'would-be-future', 'wrap-evt', 'wrapped-extra-arg-arrow', + 'wrapped-extra-arg-arrow-extra-neg-party-argument', + 'wrapped-extra-arg-arrow-real-func', 'wrapped-extra-arg-arrow?', + 'writable<%>', 'write', 'write-byte', 'write-bytes', + 'write-bytes-avail', 'write-bytes-avail*', 'write-bytes-avail-evt', + 'write-bytes-avail/enable-break', 'write-char', 'write-special', + 'write-special-avail*', 'write-special-evt', 'write-string', + 'write-to-file', 'writeln', 'xor', 'zero?', '~.a', '~.s', '~.v', '~a', + '~e', '~r', '~s', '~v' ) _opening_parenthesis = r'[([{]' @@ -1270,7 +1270,7 @@ class RacketLexer(RegexLexer): ], 'datum': [ (r'(?s)#;|#![ /]([^\\\n]|\\.)*', Comment), - (u';[^\\n\\r\x85\u2028\u2029]*', Comment.Single), + (r';[^\n\r\x85\u2028\u2029]*', Comment.Single), (r'#\|', Comment.Multiline, 'block-comment'), # Whitespaces @@ -1490,7 +1490,7 @@ class NewLispLexer(RegexLexer): (r'\s+', Text), # strings, symbols and characters - (r'"(\\\\|\\"|[^"])*"', String), + (r'"(\\\\|\\[^\\]|[^"\\])*"', String), # braces (r'\{', String, "bracestring"), @@ -1554,7 +1554,7 @@ class EmacsLispLexer(RegexLexer): # Take a deep breath... symbol = r'((?:%s)(?:%s)*)' % (nonmacro, constituent) - macros = set(( + macros = { 'atomic-change-group', 'case', 'block', 'cl-block', 'cl-callf', 'cl-callf2', 'cl-case', 'cl-decf', 'cl-declaim', 'cl-declare', 'cl-define-compiler-macro', 'cl-defmacro', 'cl-defstruct', @@ -1601,17 +1601,17 @@ class EmacsLispLexer(RegexLexer): 'with-tramp-file-property', 'with-tramp-progress-reporter', 'with-wrapper-hook', 'load-time-value', 'locally', 'macrolet', 'progv', 'return-from', - )) + } - special_forms = set(( + special_forms = { 'and', 'catch', 'cond', 'condition-case', 'defconst', 'defvar', 'function', 'if', 'interactive', 'let', 'let*', 'or', 'prog1', 'prog2', 'progn', 'quote', 'save-current-buffer', 'save-excursion', 'save-restriction', 'setq', 'setq-default', 'subr-arity', 'unwind-protect', 'while', - )) + } - builtin_function = set(( + builtin_function = { '%', '*', '+', '-', '/', '/=', '1+', '1-', '<', '<=', '=', '>', '>=', 'Snarf-documentation', 'abort-recursive-edit', 'abs', 'accept-process-output', 'access-file', 'accessible-keymaps', 'acos', @@ -1937,8 +1937,9 @@ class EmacsLispLexer(RegexLexer): 'split-window-internal', 'sqrt', 'standard-case-table', 'standard-category-table', 'standard-syntax-table', 'start-kbd-macro', 'start-process', 'stop-process', 'store-kbd-macro-event', 'string', - 'string-as-multibyte', 'string-as-unibyte', 'string-bytes', - 'string-collate-equalp', 'string-collate-lessp', 'string-equal', + 'string=', 'string<', 'string>', 'string-as-multibyte', + 'string-as-unibyte', 'string-bytes', 'string-collate-equalp', + 'string-collate-lessp', 'string-equal', 'string-greaterp', 'string-lessp', 'string-make-multibyte', 'string-make-unibyte', 'string-match', 'string-to-char', 'string-to-multibyte', 'string-to-number', 'string-to-syntax', 'string-to-unibyte', @@ -2050,23 +2051,23 @@ class EmacsLispLexer(RegexLexer): 'xw-color-values', 'xw-display-color-p', 'xw-display-color-p', 'yes-or-no-p', 'zlib-available-p', 'zlib-decompress-region', 'forward-point', - )) + } - builtin_function_highlighted = set(( + builtin_function_highlighted = { 'defvaralias', 'provide', 'require', 'with-no-warnings', 'define-widget', 'with-electric-help', 'throw', 'defalias', 'featurep' - )) + } - lambda_list_keywords = set(( + lambda_list_keywords = { '&allow-other-keys', '&aux', '&body', '&environment', '&key', '&optional', '&rest', '&whole', - )) + } - error_keywords = set(( + error_keywords = { 'cl-assert', 'cl-check-type', 'error', 'signal', 'user-error', 'warn', - )) + } def get_tokens_unprocessed(self, text): stack = ['root'] @@ -2225,7 +2226,7 @@ class ShenLexer(RegexLexer): BUILTINS_ANYWHERE = ('where', 'skip', '>>', '_', '!', '', '') - MAPPINGS = dict((s, Keyword) for s in DECLARATIONS) + MAPPINGS = {s: Keyword for s in DECLARATIONS} MAPPINGS.update((s, Name.Builtin) for s in BUILTINS) MAPPINGS.update((s, Keyword) for s in SPECIAL_FORMS) @@ -2276,9 +2277,7 @@ def _process_declarations(self, tokens): if self._relevant(token): if opening_paren and token == Keyword and value in self.DECLARATIONS: declaration = value - for index, token, value in \ - self._process_declaration(declaration, tokens): - yield index, token, value + yield from self._process_declaration(declaration, tokens) opening_paren = value == '(' and token == Punctuation def _process_symbols(self, tokens): @@ -2384,7 +2383,7 @@ class CPSALexer(SchemeLexer): # (r'(#e|#i|#b|#o|#d|#x)[\d.]+', Number), # strings, symbols and characters - (r'"(\\\\|\\"|[^"])*"', String), + (r'"(\\\\|\\[^\\]|[^"\\])*"', String), (r"'" + valid_name, String.Symbol), (r"#\\([()/'\"._!§$%& ?=+-]|[a-zA-Z0-9]+)", String.Char), @@ -2597,7 +2596,7 @@ class XtlangLexer(RegexLexer): (r'(#b|#o|#x)[\d.]+', Number), # strings - (r'"(\\\\|\\"|[^"])*"', String), + (r'"(\\\\|\\[^\\]|[^"\\])*"', String), # true/false constants (r'(#t|#f)', Name.Constant), @@ -2635,29 +2634,34 @@ class FennelLexer(RegexLexer): # these two lists are taken from fennel-mode.el: # https://gitlab.com/technomancy/fennel-mode - # this list is current as of Fennel version 0.1.0. + # this list is current as of Fennel version 0.6.0. special_forms = ( - u'require-macros', u'eval-compiler', - u'do', u'values', u'if', u'when', u'each', u'for', u'fn', u'lambda', - u'λ', u'set', u'global', u'var', u'local', u'let', u'tset', u'doto', - u'set-forcibly!', u'defn', u'partial', u'while', u'or', u'and', u'true', - u'false', u'nil', u'.', u'+', u'..', u'^', u'-', u'*', u'%', u'/', u'>', - u'<', u'>=', u'<=', u'=', u'~=', u'#', u'...', u':', u'->', u'->>', + 'require-macros', 'eval-compiler', 'doc', 'lua', 'hashfn', + 'macro', 'macros', 'import-macros', 'pick-args', 'pick-values', + 'macroexpand', 'macrodebug', 'do', 'values', 'if', 'when', + 'each', 'for', 'fn', 'lambda', 'λ', 'partial', 'while', + 'set', 'global', 'var', 'local', 'let', 'tset', 'set-forcibly!', + 'doto', 'match', 'or', 'and', 'true', 'false', 'nil', 'not', + 'not=', '.', '+', '..', '^', '-', '*', '%', '/', '>', + '<', '>=', '<=', '=', '...', ':', '->', '->>', '-?>', + '-?>>', 'rshift', 'lshift', 'bor', 'band', 'bnot', 'bxor', + 'with-open', 'length' ) # Might be nicer to use the list from _lua_builtins.py but it's unclear how? builtins = ( - u'_G', u'_VERSION', u'arg', u'assert', u'bit32', u'collectgarbage', - u'coroutine', u'debug', u'dofile', u'error', u'getfenv', - u'getmetatable', u'io', u'ipairs', u'load', u'loadfile', u'loadstring', - u'math', u'next', u'os', u'package', u'pairs', u'pcall', u'print', - u'rawequal', u'rawget', u'rawlen', u'rawset', u'require', u'select', - u'setfenv', u'setmetatable', u'string', u'table', u'tonumber', - u'tostring', u'type', u'unpack', u'xpcall' + '_G', '_VERSION', 'arg', 'assert', 'bit32', 'collectgarbage', + 'coroutine', 'debug', 'dofile', 'error', 'getfenv', + 'getmetatable', 'io', 'ipairs', 'load', 'loadfile', 'loadstring', + 'math', 'next', 'os', 'package', 'pairs', 'pcall', 'print', + 'rawequal', 'rawget', 'rawlen', 'rawset', 'require', 'select', + 'setfenv', 'setmetatable', 'string', 'table', 'tonumber', + 'tostring', 'type', 'unpack', 'xpcall' ) - # based on the scheme definition, but disallowing leading digits and commas - valid_name = r'[a-zA-Z_!$%&*+/:<=>?@^~|-][\w!$%&*+/:<=>?@^~|\.-]*' + # based on the scheme definition, but disallowing leading digits and + # commas, and @ is not allowed. + valid_name = r'[a-zA-Z_!$%&*+/:<=>?^~|-][\w!$%&*+/:<=>?^~|\.-]*' tokens = { 'root': [ @@ -2668,8 +2672,7 @@ class FennelLexer(RegexLexer): (r'-?\d+\.\d+', Number.Float), (r'-?\d+', Number.Integer), - (r'"(\\\\|\\"|[^"])*"', String), - (r"'(\\\\|\\'|[^'])*'", String), + (r'"(\\\\|\\[^\\]|[^"\\])*"', String), # these are technically strings, but it's worth visually # distinguishing them because their intent is different @@ -2689,5 +2692,8 @@ class FennelLexer(RegexLexer): (r'(\(|\))', Punctuation), (r'(\[|\])', Punctuation), (r'(\{|\})', Punctuation), + + # the # symbol is shorthand for a lambda function + (r'#', Punctuation), ] } diff --git a/src/typecode/_vendor/pygments/lexers/make.py b/src/typecode/_vendor/pygments/lexers/make.py index 911ffc1..9fe8eda 100644 --- a/src/typecode/_vendor/pygments/lexers/make.py +++ b/src/typecode/_vendor/pygments/lexers/make.py @@ -5,7 +5,7 @@ Lexers for Makefiles and similar. - :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ @@ -57,8 +57,7 @@ def get_tokens_unprocessed(self, text): ins.append((len(done), [(0, Comment, line)])) else: done += line - for item in do_insertions(ins, lex.get_tokens_unprocessed(done)): - yield item + yield from do_insertions(ins, lex.get_tokens_unprocessed(done)) def analyse_text(text): # Many makefiles have $(BIG_CAPS) style variables @@ -93,8 +92,8 @@ class BaseMakefileLexer(RegexLexer): (r'([\w${}().-]+)(\s*)([!?:+]?=)([ \t]*)((?:.*\\\n)+|.*\n)', bygroups(Name.Variable, Text, Operator, Text, using(BashLexer))), # strings - (r'(?s)"(\\\\|\\.|[^"\\])*"', String.Double), - (r"(?s)'(\\\\|\\.|[^'\\])*'", String.Single), + (r'"(\\\\|\\[^\\]|[^"\\])*"', String.Double), + (r"'(\\\\|\\[^\\]|[^'\\])*'", String.Single), # targets (r'([^\n:]+)(:+)([ \t]*)', bygroups(Name.Function, Operator, Text), 'block-header'), @@ -196,7 +195,12 @@ class CMakeLexer(RegexLexer): } def analyse_text(text): - exp = r'^ *CMAKE_MINIMUM_REQUIRED *\( *VERSION *\d(\.\d)* *( FATAL_ERROR)? *\) *$' + exp = ( + r'^[ \t]*CMAKE_MINIMUM_REQUIRED[ \t]*' + r'\([ \t]*VERSION[ \t]*\d+(\.\d+)*[ \t]*' + r'([ \t]FATAL_ERROR)?[ \t]*\)[ \t]*' + r'(#[^\n]*)?$' + ) if re.search(exp, text, flags=re.MULTILINE | re.IGNORECASE): return 0.8 return 0.0 diff --git a/src/typecode/_vendor/pygments/lexers/markup.py b/src/typecode/_vendor/pygments/lexers/markup.py index 0876241..75a3777 100644 --- a/src/typecode/_vendor/pygments/lexers/markup.py +++ b/src/typecode/_vendor/pygments/lexers/markup.py @@ -5,7 +5,7 @@ Lexers for non-HTML markup languages. - :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ @@ -24,7 +24,7 @@ __all__ = ['BBCodeLexer', 'MoinWikiLexer', 'RstLexer', 'TexLexer', 'GroffLexer', 'MozPreprocHashLexer', 'MozPreprocPercentLexer', 'MozPreprocXulLexer', 'MozPreprocJavascriptLexer', - 'MozPreprocCssLexer', 'MarkdownLexer'] + 'MozPreprocCssLexer', 'MarkdownLexer', 'TiddlyWiki5Lexer'] class BBCodeLexer(RegexLexer): @@ -165,12 +165,11 @@ def _handle_sourcecode(self, match): code += line[indention_size:] else: code += line - for item in do_insertions(ins, lexer.get_tokens_unprocessed(code)): - yield item + yield from do_insertions(ins, lexer.get_tokens_unprocessed(code)) # from docutils.parsers.rst.states - closers = u'\'")]}>\u2019\u201d\xbb!?' - unicode_delimiters = u'\u2010\u2011\u2012\u2013\u2014\u00a0' + closers = '\'")]}>\u2019\u201d\xbb!?' + unicode_delimiters = '\u2010\u2011\u2012\u2013\u2014\u00a0' end_string_suffix = (r'((?=$)|(?=[-/:.,; \n\x00%s%s]))' % (re.escape(unicode_delimiters), re.escape(closers))) @@ -204,7 +203,7 @@ def _handle_sourcecode(self, match): bygroups(Text, Operator, using(this, state='inline'))), # Sourcecode directives (r'^( *\.\.)(\s*)((?:source)?code(?:-block)?)(::)([ \t]*)([^\n]+)' - r'(\n[ \t]*\n)([ \t]+)(.*)(\n)((?:(?:\8.*|)\n)+)', + r'(\n[ \t]*\n)([ \t]+)(.*)(\n)((?:(?:\8.*)?\n)+)', _handle_sourcecode), # A directive (r'^( *\.\.)(\s*)([\w:-]+?)(::)(?:([ \t]*)(.*))', @@ -222,15 +221,14 @@ def _handle_sourcecode(self, match): Punctuation, Text, using(this, state='inline'))), # Comments (r'^ *\.\..*(\n( +.*\n|\n)+)?', Comment.Preproc), - # Field list - (r'^( *)(:[a-zA-Z-]+:)(\s*)$', bygroups(Text, Name.Class, Text)), - (r'^( *)(:.*?:)([ \t]+)(.*?)$', - bygroups(Text, Name.Class, Text, Name.Function)), + # Field list marker + (r'^( *)(:(?:\\\\|\\:|[^:\n])+:(?=\s))([ \t]*)', + bygroups(Text, Name.Class, Text)), # Definition list (r'^(\S.*(?\s)(.+\n)', bygroups(Keyword, Generic.Emph)), - # text block - (r'^(```\n)([\w\W]*?)(^```$)', bygroups(String, Text, String)), + # code block fenced by 3 backticks + (r'^(\s*```\n[\w\W]*?^\s*```$\n)', String.Backtick), # code block with language - (r'^(```)(\w+)(\n)([\w\W]*?)(^```$)', _handle_codeblock), + (r'^(\s*```)(\w+)(\n)([\w\W]*?)(^\s*```$\n)', _handle_codeblock), include('inline'), ], 'inline': [ # escape (r'\\.', Text), - # italics - (r'(\s)([*_][^*_]+[*_])(\W|\n)', bygroups(Text, Generic.Emph, Text)), - # bold - # warning: the following rule eats internal tags. eg. **foo _bar_ baz** bar is not italics - (r'(\s)((\*\*|__).*\3)((?=\W|\n))', bygroups(Text, Generic.Strong, None, Text)), - # "proper way" (r'(\s)([*_]{2}[^*_]+[*_]{2})((?=\W|\n))', bygroups(Text, Generic.Strong, Text)), - # strikethrough - (r'(\s)(~~[^~]+~~)((?=\W|\n))', bygroups(Text, Generic.Deleted, Text)), # inline code - (r'`[^`]+`', String.Backtick), + (r'([^`]?)(`[^`\n]+`)', bygroups(Text, String.Backtick)), + # warning: the following rules eat outer tags. + # eg. **foo _bar_ baz** => foo and baz are not recognized as bold + # bold fenced by '**' + (r'([^\*]?)(\*\*[^* \n][^*\n]*\*\*)', bygroups(Text, Generic.Strong)), + # bold fenced by '__' + (r'([^_]?)(__[^_ \n][^_\n]*__)', bygroups(Text, Generic.Strong)), + # italics fenced by '*' + (r'([^\*]?)(\*[^* \n][^*\n]*\*)', bygroups(Text, Generic.Emph)), + # italics fenced by '_' + (r'([^_]?)(_[^_ \n][^_\n]*_)', bygroups(Text, Generic.Emph)), + # strikethrough + (r'([^~]?)(~~[^~ \n][^~\n]*~~)', bygroups(Text, Generic.Deleted)), # mentions and topics (twitter and github stuff) (r'[@#][\w/:]+', Name.Entity), # (image?) links eg: ![Image of Yaktocat](https://octodex.github.com/images/yaktocat.png) - (r'(!?\[)([^]]+)(\])(\()([^)]+)(\))', bygroups(Text, Name.Tag, Text, Text, Name.Attribute, Text)), + (r'(!?\[)([^]]+)(\])(\()([^)]+)(\))', + bygroups(Text, Name.Tag, Text, Text, Name.Attribute, Text)), # reference-style links, e.g.: # [an example][id] # [id]: http://example.com/ - (r'(\[)([^]]+)(\])(\[)([^]]*)(\])', bygroups(Text, Name.Tag, Text, Text, Name.Label, Text)), - (r'^(\s*\[)([^]]*)(\]:\s*)(.+)', bygroups(Text, Name.Label, Text, Name.Attribute)), + (r'(\[)([^]]+)(\])(\[)([^]]*)(\])', + bygroups(Text, Name.Tag, Text, Text, Name.Label, Text)), + (r'^(\s*\[)([^]]*)(\]:\s*)(.+)', + bygroups(Text, Name.Label, Text, Name.Attribute)), # general text, must come last! (r'[^\\\s]+', Text), @@ -597,3 +603,161 @@ def _handle_codeblock(self, match): def __init__(self, **options): self.handlecodeblocks = get_bool_opt(options, 'handlecodeblocks', True) RegexLexer.__init__(self, **options) + + +class TiddlyWiki5Lexer(RegexLexer): + """ + For `TiddlyWiki5 `_ markup. + + .. versionadded:: 2.7 + """ + name = 'tiddler' + aliases = ['tid'] + filenames = ['*.tid'] + mimetypes = ["text/vnd.tiddlywiki"] + flags = re.MULTILINE + + def _handle_codeblock(self, match): + """ + match args: 1:backticks, 2:lang_name, 3:newline, 4:code, 5:backticks + """ + from typecode._vendor.pygments.lexers import get_lexer_by_name + + # section header + yield match.start(1), String, match.group(1) + yield match.start(2), String, match.group(2) + yield match.start(3), Text, match.group(3) + + # lookup lexer if wanted and existing + lexer = None + if self.handlecodeblocks: + try: + lexer = get_lexer_by_name(match.group(2).strip()) + except ClassNotFound: + pass + code = match.group(4) + + # no lexer for this language. handle it like it was a code block + if lexer is None: + yield match.start(4), String, code + return + + yield from do_insertions([], lexer.get_tokens_unprocessed(code)) + + yield match.start(5), String, match.group(5) + + def _handle_cssblock(self, match): + """ + match args: 1:style tag 2:newline, 3:code, 4:closing style tag + """ + from typecode._vendor.pygments.lexers import get_lexer_by_name + + # section header + yield match.start(1), String, match.group(1) + yield match.start(2), String, match.group(2) + + lexer = None + if self.handlecodeblocks: + try: + lexer = get_lexer_by_name('css') + except ClassNotFound: + pass + code = match.group(3) + + # no lexer for this language. handle it like it was a code block + if lexer is None: + yield match.start(3), String, code + return + + yield from do_insertions([], lexer.get_tokens_unprocessed(code)) + + yield match.start(4), String, match.group(4) + + tokens = { + 'root': [ + # title in metadata section + (r'^(title)(:\s)(.+\n)', bygroups(Keyword, Text, Generic.Heading)), + # headings + (r'^(!)([^!].+\n)', bygroups(Generic.Heading, Text)), + (r'^(!{2,6})(.+\n)', bygroups(Generic.Subheading, Text)), + # bulleted or numbered lists or single-line block quotes + # (can be mixed) + (r'^(\s*)([*#>]+)(\s*)(.+\n)', + bygroups(Text, Keyword, Text, using(this, state='inline'))), + # multi-line block quotes + (r'^(<<<.*\n)([\w\W]*?)(^<<<.*$)', bygroups(String, Text, String)), + # table header + (r'^(\|.*?\|h)$', bygroups(Generic.Strong)), + # table footer or caption + (r'^(\|.*?\|[cf])$', bygroups(Generic.Emph)), + # table class + (r'^(\|.*?\|k)$', bygroups(Name.Tag)), + # definitions + (r'^(;.*)$', bygroups(Generic.Strong)), + # text block + (r'^(```\n)([\w\W]*?)(^```$)', bygroups(String, Text, String)), + # code block with language + (r'^(```)(\w+)(\n)([\w\W]*?)(^```$)', _handle_codeblock), + # CSS style block + (r'^($)', _handle_cssblock), + + include('keywords'), + include('inline'), + ], + 'keywords': [ + (words(( + '\\define', '\\end', 'caption', 'created', 'modified', 'tags', + 'title', 'type'), prefix=r'^', suffix=r'\b'), + Keyword), + ], + 'inline': [ + # escape + (r'\\.', Text), + # created or modified date + (r'\d{17}', Number.Integer), + # italics + (r'(\s)(//[^/]+//)((?=\W|\n))', + bygroups(Text, Generic.Emph, Text)), + # superscript + (r'(\s)(\^\^[^\^]+\^\^)', bygroups(Text, Generic.Emph)), + # subscript + (r'(\s)(,,[^,]+,,)', bygroups(Text, Generic.Emph)), + # underscore + (r'(\s)(__[^_]+__)', bygroups(Text, Generic.Strong)), + # bold + (r"(\s)(''[^']+'')((?=\W|\n))", + bygroups(Text, Generic.Strong, Text)), + # strikethrough + (r'(\s)(~~[^~]+~~)((?=\W|\n))', + bygroups(Text, Generic.Deleted, Text)), + # TiddlyWiki variables + (r'<<[^>]+>>', Name.Tag), + (r'\$\$[^$]+\$\$', Name.Tag), + (r'\$\([^)]+\)\$', Name.Tag), + # TiddlyWiki style or class + (r'^@@.*$', Name.Tag), + # HTML tags + (r']+>', Name.Tag), + # inline code + (r'`[^`]+`', String.Backtick), + # HTML escaped symbols + (r'&\S*?;', String.Regex), + # Wiki links + (r'(\[{2})([^]\|]+)(\]{2})', bygroups(Text, Name.Tag, Text)), + # External links + (r'(\[{2})([^]\|]+)(\|)([^]\|]+)(\]{2})', + bygroups(Text, Name.Tag, Text, Name.Attribute, Text)), + # Transclusion + (r'(\{{2})([^}]+)(\}{2})', bygroups(Text, Name.Tag, Text)), + # URLs + (r'(\b.?.?tps?://[^\s"]+)', bygroups(Name.Attribute)), + + # general text, must come last! + (r'[\w]+', Text), + (r'.', Text) + ], + } + + def __init__(self, **options): + self.handlecodeblocks = get_bool_opt(options, 'handlecodeblocks', True) + RegexLexer.__init__(self, **options) diff --git a/src/typecode/_vendor/pygments/lexers/math.py b/src/typecode/_vendor/pygments/lexers/math.py index 02d9965..16278ce 100644 --- a/src/typecode/_vendor/pygments/lexers/math.py +++ b/src/typecode/_vendor/pygments/lexers/math.py @@ -5,7 +5,7 @@ Just export lexers that were contained in this module. - :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ diff --git a/src/typecode/_vendor/pygments/lexers/matlab.py b/src/typecode/_vendor/pygments/lexers/matlab.py index 1294c4c..98031ca 100644 --- a/src/typecode/_vendor/pygments/lexers/matlab.py +++ b/src/typecode/_vendor/pygments/lexers/matlab.py @@ -5,13 +5,14 @@ Lexers for Matlab and related languages. - :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ import re -from typecode._vendor.pygments.lexer import Lexer, RegexLexer, bygroups, words, do_insertions +from typecode._vendor.pygments.lexer import Lexer, RegexLexer, bygroups, default, words, \ + do_insertions from typecode._vendor.pygments.token import Text, Comment, Operator, Keyword, Name, String, \ Number, Punctuation, Generic, Whitespace @@ -45,33 +46,35 @@ class MatlabLexer(RegexLexer): # specfun: Special Math functions # elmat: Elementary matrices and matrix manipulation # - # taken from Matlab version 7.4.0.336 (R2007a) + # taken from Matlab version 9.4 (R2018a) # elfun = ("sin", "sind", "sinh", "asin", "asind", "asinh", "cos", "cosd", "cosh", "acos", "acosd", "acosh", "tan", "tand", "tanh", "atan", "atand", "atan2", - "atanh", "sec", "secd", "sech", "asec", "asecd", "asech", "csc", "cscd", + "atan2d", "atanh", "sec", "secd", "sech", "asec", "asecd", "asech", "csc", "cscd", "csch", "acsc", "acscd", "acsch", "cot", "cotd", "coth", "acot", "acotd", - "acoth", "hypot", "exp", "expm1", "log", "log1p", "log10", "log2", "pow2", + "acoth", "hypot", "deg2rad", "rad2deg", "exp", "expm1", "log", "log1p", "log10", "log2", "pow2", "realpow", "reallog", "realsqrt", "sqrt", "nthroot", "nextpow2", "abs", "angle", "complex", "conj", "imag", "real", "unwrap", "isreal", "cplxpair", "fix", "floor", "ceil", "round", "mod", "rem", "sign") specfun = ("airy", "besselj", "bessely", "besselh", "besseli", "besselk", "beta", - "betainc", "betaln", "ellipj", "ellipke", "erf", "erfc", "erfcx", - "erfinv", "expint", "gamma", "gammainc", "gammaln", "psi", "legendre", + "betainc", "betaincinv", "betaln", "ellipj", "ellipke", "erf", "erfc", "erfcx", + "erfinv", "erfcinv", "expint", "gamma", "gammainc", "gammaincinv", "gammaln", "psi", "legendre", "cross", "dot", "factor", "isprime", "primes", "gcd", "lcm", "rat", "rats", "perms", "nchoosek", "factorial", "cart2sph", "cart2pol", "pol2cart", "sph2cart", "hsv2rgb", "rgb2hsv") - elmat = ("zeros", "ones", "eye", "repmat", "rand", "randn", "linspace", "logspace", + elmat = ("zeros", "ones", "eye", "repmat", "repelem", "linspace", "logspace", "freqspace", "meshgrid", "accumarray", "size", "length", "ndims", "numel", - "disp", "isempty", "isequal", "isequalwithequalnans", "cat", "reshape", - "diag", "blkdiag", "tril", "triu", "fliplr", "flipud", "flipdim", "rot90", + "disp", "isempty", "isequal", "isequaln", "cat", "reshape", + "diag", "blkdiag", "tril", "triu", "fliplr", "flipud", "flip", "rot90", "find", "end", "sub2ind", "ind2sub", "bsxfun", "ndgrid", "permute", "ipermute", "shiftdim", "circshift", "squeeze", "isscalar", "isvector", - "ans", "eps", "realmax", "realmin", "pi", "i", "inf", "nan", "isnan", - "isinf", "isfinite", "j", "why", "compan", "gallery", "hadamard", "hankel", + "isrow", "iscolumn", "ismatrix", "eps", "realmax", "realmin", "intmax", "intmin", "flintmax", "pi", "i", "inf", "nan", "isnan", + "isinf", "isfinite", "j", "true", "false", "compan", "gallery", "hadamard", "hankel", "hilb", "invhilb", "magic", "pascal", "rosser", "toeplitz", "vander", "wilkinson") + _operators = r'-|==|~=|<=|>=|<|>|&&|&|~|\|\|?|\.\*|\*|\+|\.\^|\.\\|\./|/|\\' + tokens = { 'root': [ # line starting with '!' is sent as a system command. not sure what @@ -79,25 +82,40 @@ class MatlabLexer(RegexLexer): (r'^!.*', String.Other), (r'%\{\s*\n', Comment.Multiline, 'blockcomment'), (r'%.*$', Comment), - (r'^\s*function', Keyword, 'deffunc'), - - # from 'iskeyword' on version 7.11 (R2010): - (words(( - 'break', 'case', 'catch', 'classdef', 'continue', 'else', 'elseif', - 'end', 'enumerated', 'events', 'for', 'function', 'global', 'if', - 'methods', 'otherwise', 'parfor', 'persistent', 'properties', - 'return', 'spmd', 'switch', 'try', 'while'), suffix=r'\b'), + (r'^\s*function\b', Keyword, 'deffunc'), + + # from 'iskeyword' on version 9.4 (R2018a): + # Check that there is no preceding dot, as keywords are valid field + # names. + (words(('break', 'case', 'catch', 'classdef', 'continue', 'else', + 'elseif', 'end', 'for', 'function', + 'global', 'if', 'otherwise', 'parfor', + 'persistent', 'return', 'spmd', 'switch', + 'try', 'while'), + prefix=r'(?|<=|>=|&&|&|~|\|\|?', Operator), - # operators requiring escape for re: - (r'\.\*|\*|\+|\.\^|\.\\|\.\/|\/|\\', Operator), + (_operators, Operator), + + # numbers (must come before punctuation to handle `.5`; cannot use + # `\b` due to e.g. `5. + .5`). + (r'(?', '->', '#', # Modules ':>', - )) + } - nonid_reserved = set(('(', ')', '[', ']', '{', '}', ',', ';', '...', '_')) + nonid_reserved = {'(', ')', '[', ']', '{', '}', ',', ';', '...', '_'} alphanumid_re = r"[a-zA-Z][\w']*" symbolicid_re = r"[!%&$#+\-/:<=>?@\\~`^|*]+" @@ -142,7 +142,7 @@ def id_callback(self, match): (r'#\s+(%s)' % symbolicid_re, Name.Label), # Some reserved words trigger a special, local lexer state change (r'\b(datatype|abstype)\b(?!\')', Keyword.Reserved, 'dname'), - (r'(?=\b(exception)\b(?!\'))', Text, ('ename')), + (r'\b(exception)\b(?!\')', Keyword.Reserved, 'ename'), (r'\b(functor|include|open|signature|structure)\b(?!\')', Keyword.Reserved, 'sname'), (r'\b(type|eqtype)\b(?!\')', Keyword.Reserved, 'tname'), @@ -315,15 +315,14 @@ def id_callback(self, match): 'ename': [ include('whitespace'), - (r'(exception|and)\b(\s+)(%s)' % alphanumid_re, + (r'(and\b)(\s+)(%s)' % alphanumid_re, bygroups(Keyword.Reserved, Text, Name.Class)), - (r'(exception|and)\b(\s*)(%s)' % symbolicid_re, + (r'(and\b)(\s*)(%s)' % symbolicid_re, bygroups(Keyword.Reserved, Text, Name.Class)), (r'\b(of)\b(?!\')', Keyword.Reserved), + (r'(%s)|(%s)' % (alphanumid_re, symbolicid_re), Name.Class), - include('breakout'), - include('core'), - (r'\S+', Error), + default('#pop'), ], 'datcon': [ @@ -767,3 +766,193 @@ class OpaLexer(RegexLexer): (r'[^\-]+|-', Comment), ], } + + +class ReasonLexer(RegexLexer): + """ + For the ReasonML language (https://reasonml.github.io/). + + .. versionadded:: 2.6 + """ + + name = 'ReasonML' + aliases = ['reason', "reasonml"] + filenames = ['*.re', '*.rei'] + mimetypes = ['text/x-reasonml'] + + keywords = ( + 'as', 'assert', 'begin', 'class', 'constraint', 'do', 'done', 'downto', + 'else', 'end', 'exception', 'external', 'false', 'for', 'fun', 'esfun', + 'function', 'functor', 'if', 'in', 'include', 'inherit', 'initializer', 'lazy', + 'let', 'switch', 'module', 'pub', 'mutable', 'new', 'nonrec', 'object', 'of', + 'open', 'pri', 'rec', 'sig', 'struct', 'then', 'to', 'true', 'try', + 'type', 'val', 'virtual', 'when', 'while', 'with', + ) + keyopts = ( + '!=', '#', '&', '&&', r'\(', r'\)', r'\*', r'\+', ',', '-', + r'-\.', '=>', r'\.', r'\.\.', r'\.\.\.', ':', '::', ':=', ':>', ';', ';;', '<', + '<-', '=', '>', '>]', r'>\}', r'\?', r'\?\?', r'\[', r'\[<', r'\[>', + r'\[\|', ']', '_', '`', r'\{', r'\{<', r'\|', r'\|\|', r'\|]', r'\}', '~' + ) + + operators = r'[!$%&*+\./:<=>?@^|~-]' + word_operators = ('and', 'asr', 'land', 'lor', 'lsl', 'lsr', 'lxor', 'mod', 'or') + prefix_syms = r'[!?~]' + infix_syms = r'[=<>@^|&+\*/$%-]' + primitives = ('unit', 'int', 'float', 'bool', 'string', 'char', 'list', 'array') + + tokens = { + 'escape-sequence': [ + (r'\\[\\"\'ntbr]', String.Escape), + (r'\\[0-9]{3}', String.Escape), + (r'\\x[0-9a-fA-F]{2}', String.Escape), + ], + 'root': [ + (r'\s+', Text), + (r'false|true|\(\)|\[\]', Name.Builtin.Pseudo), + (r'\b([A-Z][\w\']*)(?=\s*\.)', Name.Namespace, 'dotted'), + (r'\b([A-Z][\w\']*)', Name.Class), + (r'//.*?\n', Comment.Single), + (r'\/\*(?!/)', Comment.Multiline, 'comment'), + (r'\b(%s)\b' % '|'.join(keywords), Keyword), + (r'(%s)' % '|'.join(keyopts[::-1]), Operator.Word), + (r'(%s|%s)?%s' % (infix_syms, prefix_syms, operators), Operator), + (r'\b(%s)\b' % '|'.join(word_operators), Operator.Word), + (r'\b(%s)\b' % '|'.join(primitives), Keyword.Type), + + (r"[^\W\d][\w']*", Name), + + (r'-?\d[\d_]*(.[\d_]*)?([eE][+\-]?\d[\d_]*)', Number.Float), + (r'0[xX][\da-fA-F][\da-fA-F_]*', Number.Hex), + (r'0[oO][0-7][0-7_]*', Number.Oct), + (r'0[bB][01][01_]*', Number.Bin), + (r'\d[\d_]*', Number.Integer), + + (r"'(?:(\\[\\\"'ntbr ])|(\\[0-9]{3})|(\\x[0-9a-fA-F]{2}))'", + String.Char), + (r"'.'", String.Char), + (r"'", Keyword), + + (r'"', String.Double, 'string'), + + (r'[~?][a-z][\w\']*:', Name.Variable), + ], + 'comment': [ + (r'[^/*]+', Comment.Multiline), + (r'\/\*', Comment.Multiline, '#push'), + (r'\*\/', Comment.Multiline, '#pop'), + (r'\*', Comment.Multiline), + ], + 'string': [ + (r'[^\\"]+', String.Double), + include('escape-sequence'), + (r'\\\n', String.Double), + (r'"', String.Double, '#pop'), + ], + 'dotted': [ + (r'\s+', Text), + (r'\.', Punctuation), + (r'[A-Z][\w\']*(?=\s*\.)', Name.Namespace), + (r'[A-Z][\w\']*', Name.Class, '#pop'), + (r'[a-z_][\w\']*', Name, '#pop'), + default('#pop'), + ], + } + + +class FStarLexer(RegexLexer): + """ + For the F* language (https://www.fstar-lang.org/). + .. versionadded:: 2.7 + """ + + name = 'FStar' + aliases = ['fstar'] + filenames = ['*.fst', '*.fsti'] + mimetypes = ['text/x-fstar'] + + keywords = ( + 'abstract', 'attributes', 'noeq', 'unopteq', 'and' + 'begin', 'by', 'default', 'effect', 'else', 'end', 'ensures', + 'exception', 'exists', 'false', 'forall', 'fun', 'function', 'if', + 'in', 'include', 'inline', 'inline_for_extraction', 'irreducible', + 'logic', 'match', 'module', 'mutable', 'new', 'new_effect', 'noextract', + 'of', 'open', 'opaque', 'private', 'range_of', 'reifiable', + 'reify', 'reflectable', 'requires', 'set_range_of', 'sub_effect', + 'synth', 'then', 'total', 'true', 'try', 'type', 'unfold', 'unfoldable', + 'val', 'when', 'with', 'not' + ) + decl_keywords = ('let', 'rec') + assume_keywords = ('assume', 'admit', 'assert', 'calc') + keyopts = ( + r'~', r'-', r'/\\', r'\\/', r'<:', r'<@', r'\(\|', r'\|\)', r'#', r'u#', + r'&', r'\(', r'\)', r'\(\)', r',', r'~>', r'->', r'<-', r'<--', r'<==>', + r'==>', r'\.', r'\?', r'\?\.', r'\.\[', r'\.\(', r'\.\(\|', r'\.\[\|', + r'\{:pattern', r':', r'::', r':=', r';', r';;', r'=', r'%\[', r'!\{', + r'\[', r'\[@', r'\[\|', r'\|>', r'\]', r'\|\]', r'\{', r'\|', r'\}', r'\$' + ) + + operators = r'[!$%&*+\./:<=>?@^|~-]' + prefix_syms = r'[!?~]' + infix_syms = r'[=<>@^|&+\*/$%-]' + primitives = ('unit', 'int', 'float', 'bool', 'string', 'char', 'list', 'array') + + tokens = { + 'escape-sequence': [ + (r'\\[\\"\'ntbr]', String.Escape), + (r'\\[0-9]{3}', String.Escape), + (r'\\x[0-9a-fA-F]{2}', String.Escape), + ], + 'root': [ + (r'\s+', Text), + (r'false|true|False|True|\(\)|\[\]', Name.Builtin.Pseudo), + (r'\b([A-Z][\w\']*)(?=\s*\.)', Name.Namespace, 'dotted'), + (r'\b([A-Z][\w\']*)', Name.Class), + (r'\(\*(?![)])', Comment, 'comment'), + (r'^\/\/.+$', Comment), + (r'\b(%s)\b' % '|'.join(keywords), Keyword), + (r'\b(%s)\b' % '|'.join(assume_keywords), Name.Exception), + (r'\b(%s)\b' % '|'.join(decl_keywords), Keyword.Declaration), + (r'(%s)' % '|'.join(keyopts[::-1]), Operator), + (r'(%s|%s)?%s' % (infix_syms, prefix_syms, operators), Operator), + (r'\b(%s)\b' % '|'.join(primitives), Keyword.Type), + + (r"[^\W\d][\w']*", Name), + + (r'-?\d[\d_]*(.[\d_]*)?([eE][+\-]?\d[\d_]*)', Number.Float), + (r'0[xX][\da-fA-F][\da-fA-F_]*', Number.Hex), + (r'0[oO][0-7][0-7_]*', Number.Oct), + (r'0[bB][01][01_]*', Number.Bin), + (r'\d[\d_]*', Number.Integer), + + (r"'(?:(\\[\\\"'ntbr ])|(\\[0-9]{3})|(\\x[0-9a-fA-F]{2}))'", + String.Char), + (r"'.'", String.Char), + (r"'", Keyword), # a stray quote is another syntax element + (r"\`([\w\'.]+)\`", Operator.Word), # for infix applications + (r"\`", Keyword), # for quoting + (r'"', String.Double, 'string'), + + (r'[~?][a-z][\w\']*:', Name.Variable), + ], + 'comment': [ + (r'[^(*)]+', Comment), + (r'\(\*', Comment, '#push'), + (r'\*\)', Comment, '#pop'), + (r'[(*)]', Comment), + ], + 'string': [ + (r'[^\\"]+', String.Double), + include('escape-sequence'), + (r'\\\n', String.Double), + (r'"', String.Double, '#pop'), + ], + 'dotted': [ + (r'\s+', Text), + (r'\.', Punctuation), + (r'[A-Z][\w\']*(?=\s*\.)', Name.Namespace), + (r'[A-Z][\w\']*', Name.Class, '#pop'), + (r'[a-z_][\w\']*', Name, '#pop'), + default('#pop'), + ], + } diff --git a/src/typecode/_vendor/pygments/lexers/modeling.py b/src/typecode/_vendor/pygments/lexers/modeling.py index 93c7ac7..b50fe45 100644 --- a/src/typecode/_vendor/pygments/lexers/modeling.py +++ b/src/typecode/_vendor/pygments/lexers/modeling.py @@ -5,7 +5,7 @@ Lexers for modeling languages. - :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ @@ -38,7 +38,7 @@ class ModelicaLexer(RegexLexer): tokens = { 'whitespace': [ - (u'[\\s\ufeff]+', Text), + (r'[\s\ufeff]+', Text), (r'//[^\n]*\n?', Comment.Single), (r'/\*.*?\*/', Comment.Multiline) ], @@ -62,8 +62,8 @@ class ModelicaLexer(RegexLexer): r'transpose|vector|zeros)\b', Name.Builtin), (r'(algorithm|annotation|break|connect|constant|constrainedby|der|' r'discrete|each|else|elseif|elsewhen|encapsulated|enumeration|' - r'equation|exit|expandable|extends|external|final|flow|for|if|' - r'import|impure|in|initial|inner|input|loop|nondiscrete|outer|' + r'equation|exit|expandable|extends|external|firstTick|final|flow|for|if|' + r'import|impure|in|initial|inner|input|interval|loop|nondiscrete|outer|' r'output|parameter|partial|protected|public|pure|redeclare|' r'replaceable|return|stream|then|when|while)\b', Keyword.Reserved), diff --git a/src/typecode/_vendor/pygments/lexers/modula2.py b/src/typecode/_vendor/pygments/lexers/modula2.py index 22ef95a..23abf8e 100644 --- a/src/typecode/_vendor/pygments/lexers/modula2.py +++ b/src/typecode/_vendor/pygments/lexers/modula2.py @@ -5,7 +5,7 @@ Multi-Dialect Lexer for Modula-2. - :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ @@ -228,8 +228,8 @@ class Modula2Lexer(RegexLexer): (r'[0-9A-F]+H', Number.Hex), ], 'string_literals': [ - (r"'(\\\\|\\'|[^'])*'", String), # single quoted string - (r'"(\\\\|\\"|[^"])*"', String), # double quoted string + (r'"(\\\\|\\[^\\]|[^"\\])*"', String.Double), + (r"'(\\\\|\\[^\\]|[^'\\])*'", String.Single), ], 'digraph_operators': [ # Dot Product Operator @@ -1547,15 +1547,34 @@ def get_tokens_unprocessed(self, text): # substitute lexemes when in Algol mode if self.algol_publication_mode: if value == '#': - value = u'≠' + value = '≠' elif value == '<=': - value = u'≤' + value = '≤' elif value == '>=': - value = u'≥' + value = '≥' elif value == '==': - value = u'≡' + value = '≡' elif value == '*.': - value = u'•' + value = '•' # return result yield index, token, value + + def analyse_text(text): + """It's Pascal-like, but does not use FUNCTION -- uses PROCEDURE + instead.""" + + # Check if this looks like Pascal, if not, bail out early + if not ('(*' in text and '*)' in text and ':=' in text): + return + + result = 0 + # Procedure is in Modula2 + if re.search(r'\bPROCEDURE\b', text): + result += 0.6 + + # FUNCTION is only valid in Pascal, but not in Modula2 + if re.search(r'\bFUNCTION\b', text): + result = 0.0 + + return result diff --git a/src/typecode/_vendor/pygments/lexers/monte.py b/src/typecode/_vendor/pygments/lexers/monte.py index ef15139..d7e8a6c 100644 --- a/src/typecode/_vendor/pygments/lexers/monte.py +++ b/src/typecode/_vendor/pygments/lexers/monte.py @@ -5,7 +5,7 @@ Lexer for the Monte programming language. - :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ diff --git a/src/typecode/_vendor/pygments/lexers/mosel.py b/src/typecode/_vendor/pygments/lexers/mosel.py new file mode 100644 index 0000000..026dbf2 --- /dev/null +++ b/src/typecode/_vendor/pygments/lexers/mosel.py @@ -0,0 +1,448 @@ +# -*- coding: utf-8 -*- +""" + pygments.lexers.mosel + ~~~~~~~~~~~~~~~~~~~~~ + + Lexers for the mosel language. + http://www.fico.com/en/products/fico-xpress-optimization + + :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS. + :license: BSD, see LICENSE for details. +""" + +from typecode._vendor.pygments.lexer import RegexLexer, words +from typecode._vendor.pygments.token import Text, Comment, Operator, Keyword, Name, String, \ + Number, Punctuation + +__all__ = ['MoselLexer'] + +FUNCTIONS = ( + # core functions + '_', + 'abs', + 'arctan', + 'asproc', + 'assert', + 'bitflip', + 'bitneg', + 'bitset', + 'bitshift', + 'bittest', + 'bitval', + 'ceil', + 'cos', + 'create', + 'currentdate', + 'currenttime', + 'cutelt', + 'cutfirst', + 'cuthead', + 'cutlast', + 'cuttail', + 'datablock', + 'delcell', + 'exists', + 'exit', + 'exp', + 'exportprob', + 'fclose', + 'fflush', + 'finalize', + 'findfirst', + 'findlast', + 'floor', + 'fopen', + 'fselect', + 'fskipline', + 'fwrite', + 'fwrite_', + 'fwriteln', + 'fwriteln_', + 'getact', + 'getcoeff', + 'getcoeffs', + 'getdual', + 'getelt', + 'getfid', + 'getfirst', + 'getfname', + 'gethead', + 'getlast', + 'getobjval', + 'getparam', + 'getrcost', + 'getreadcnt', + 'getreverse', + 'getsize', + 'getslack', + 'getsol', + 'gettail', + 'gettype', + 'getvars', + 'isdynamic', + 'iseof', + 'isfinite', + 'ishidden', + 'isinf', + 'isnan', + 'isodd', + 'ln', + 'localsetparam', + 'log', + 'makesos1', + 'makesos2', + 'maxlist', + 'memoryuse', + 'minlist', + 'newmuid', + 'publish', + 'random', + 'read', + 'readln', + 'reset', + 'restoreparam', + 'reverse', + 'round', + 'setcoeff', + 'sethidden', + 'setioerr', + 'setmatherr', + 'setname', + 'setparam', + 'setrandseed', + 'setrange', + 'settype', + 'sin', + 'splithead', + 'splittail', + 'sqrt', + 'strfmt', + 'substr', + 'timestamp', + 'unpublish', + 'versionnum', + 'versionstr', + 'write', + 'write_', + 'writeln', + 'writeln_', + + # mosel exam mmxprs | sed -n -e "s/ [pf][a-z]* \([a-zA-Z0-9_]*\).*/'\1',/p" | sort -u + 'addcut', + 'addcuts', + 'addmipsol', + 'basisstability', + 'calcsolinfo', + 'clearmipdir', + 'clearmodcut', + 'command', + 'copysoltoinit', + 'crossoverlpsol', + 'defdelayedrows', + 'defsecurevecs', + 'delcuts', + 'dropcuts', + 'estimatemarginals', + 'fixglobal', + 'flushmsgq', + 'getbstat', + 'getcnlist', + 'getcplist', + 'getdualray', + 'getiis', + 'getiissense', + 'getiistype', + 'getinfcause', + 'getinfeas', + 'getlb', + 'getlct', + 'getleft', + 'getloadedlinctrs', + 'getloadedmpvars', + 'getname', + 'getprimalray', + 'getprobstat', + 'getrange', + 'getright', + 'getsensrng', + 'getsize', + 'getsol', + 'gettype', + 'getub', + 'getvars', + 'gety', + 'hasfeature', + 'implies', + 'indicator', + 'initglobal', + 'ishidden', + 'isiisvalid', + 'isintegral', + 'loadbasis', + 'loadcuts', + 'loadlpsol', + 'loadmipsol', + 'loadprob', + 'maximise', + 'maximize', + 'minimise', + 'minimize', + 'postsolve', + 'readbasis', + 'readdirs', + 'readsol', + 'refinemipsol', + 'rejectintsol', + 'repairinfeas', + 'repairinfeas_deprec', + 'resetbasis', + 'resetiis', + 'resetsol', + 'savebasis', + 'savemipsol', + 'savesol', + 'savestate', + 'selectsol', + 'setarchconsistency', + 'setbstat', + 'setcallback', + 'setcbcutoff', + 'setgndata', + 'sethidden', + 'setlb', + 'setmipdir', + 'setmodcut', + 'setsol', + 'setub', + 'setucbdata', + 'stopoptimise', + 'stopoptimize', + 'storecut', + 'storecuts', + 'unloadprob', + 'uselastbarsol', + 'writebasis', + 'writedirs', + 'writeprob', + 'writesol', + 'xor', + 'xprs_addctr', + 'xprs_addindic', + + # mosel exam mmsystem | sed -n -e "s/ [pf][a-z]* \([a-zA-Z0-9_]*\).*/'\1',/p" | sort -u + 'addmonths', + 'copytext', + 'cuttext', + 'deltext', + 'endswith', + 'erase', + 'expandpath', + 'fcopy', + 'fdelete', + 'findfiles', + 'findtext', + 'fmove', + 'formattext', + 'getasnumber', + 'getchar', + 'getcwd', + 'getdate', + 'getday', + 'getdaynum', + 'getdays', + 'getdirsep', + 'getdsoparam', + 'getendparse', + 'getenv', + 'getfsize', + 'getfstat', + 'getftime', + 'gethour', + 'getminute', + 'getmonth', + 'getmsec', + 'getoserrmsg', + 'getoserror', + 'getpathsep', + 'getqtype', + 'getsecond', + 'getsepchar', + 'getsize', + 'getstart', + 'getsucc', + 'getsysinfo', + 'getsysstat', + 'gettime', + 'gettmpdir', + 'gettrim', + 'getweekday', + 'getyear', + 'inserttext', + 'isvalid', + 'jointext', + 'makedir', + 'makepath', + 'newtar', + 'newzip', + 'nextfield', + 'openpipe', + 'parseextn', + 'parseint', + 'parsereal', + 'parsetext', + 'pastetext', + 'pathmatch', + 'pathsplit', + 'qsort', + 'quote', + 'readtextline', + 'regmatch', + 'regreplace', + 'removedir', + 'removefiles', + 'setchar', + 'setdate', + 'setday', + 'setdsoparam', + 'setendparse', + 'setenv', + 'sethour', + 'setminute', + 'setmonth', + 'setmsec', + 'setoserror', + 'setqtype', + 'setsecond', + 'setsepchar', + 'setstart', + 'setsucc', + 'settime', + 'settrim', + 'setyear', + 'sleep', + 'splittext', + 'startswith', + 'system', + 'tarlist', + 'textfmt', + 'tolower', + 'toupper', + 'trim', + 'untar', + 'unzip', + 'ziplist', + + # mosel exam mmjobs | sed -n -e "s/ [pf][a-z]* \([a-zA-Z0-9_]*\).*/'\1',/p" | sort -u + 'canceltimer', + 'clearaliases', + 'compile', + 'connect', + 'detach', + 'disconnect', + 'dropnextevent', + 'findxsrvs', + 'getaliases', + 'getannidents', + 'getannotations', + 'getbanner', + 'getclass', + 'getdsoprop', + 'getdsopropnum', + 'getexitcode', + 'getfromgid', + 'getfromid', + 'getfromuid', + 'getgid', + 'gethostalias', + 'getid', + 'getmodprop', + 'getmodpropnum', + 'getnextevent', + 'getnode', + 'getrmtid', + 'getstatus', + 'getsysinfo', + 'gettimer', + 'getuid', + 'getvalue', + 'isqueueempty', + 'load', + 'nullevent', + 'peeknextevent', + 'resetmodpar', + 'run', + 'send', + 'setcontrol', + 'setdefstream', + 'setgid', + 'sethostalias', + 'setmodpar', + 'settimer', + 'setuid', + 'setworkdir', + 'stop', + 'unload', + 'wait', + 'waitexpired', + 'waitfor', + 'waitforend', +) + + +class MoselLexer(RegexLexer): + """ + For the Mosel optimization language. + + .. versionadded:: 2.6 + """ + name = 'Mosel' + aliases = ['mosel'] + filenames = ['*.mos'] + + tokens = { + 'root': [ + (r'\n', Text), + (r'\s+', Text.Whitespace), + (r'!.*?\n', Comment.Single), + (r'\(!(.|\n)*?!\)', Comment.Multiline), + (words(( + 'and', 'as', 'break', 'case', 'count', 'declarations', 'do', + 'dynamic', 'elif', 'else', 'end-', 'end', 'evaluation', 'false', + 'forall', 'forward', 'from', 'function', 'hashmap', 'if', + 'imports', 'include', 'initialisations', 'initializations', 'inter', + 'max', 'min', 'model', 'namespace', 'next', 'not', 'nsgroup', + 'nssearch', 'of', 'options', 'or', 'package', 'parameters', + 'procedure', 'public', 'prod', 'record', 'repeat', 'requirements', + 'return', 'sum', 'then', 'to', 'true', 'union', 'until', 'uses', + 'version', 'while', 'with'), prefix=r'\b', suffix=r'\b'), + Keyword.Builtin), + (words(( + 'range', 'array', 'set', 'list', 'mpvar', 'mpproblem', 'linctr', + 'nlctr', 'integer', 'string', 'real', 'boolean', 'text', 'time', + 'date', 'datetime', 'returned', 'Model', 'Mosel', 'counter', + 'xmldoc', 'is_sos1', 'is_sos2', 'is_integer', 'is_binary', + 'is_continuous', 'is_free', 'is_semcont', 'is_semint', + 'is_partint'), prefix=r'\b', suffix=r'\b'), + Keyword.Type), + (r'(\+|\-|\*|/|=|<=|>=|\||\^|<|>|<>|\.\.|\.|:=|::|:|in|mod|div)', + Operator), + (r'[()\[\]{},;]+', Punctuation), + (words(FUNCTIONS, prefix=r'\b', suffix=r'\b'), Name.Function), + (r'(\d+\.(?!\.)\d*|\.(?!.)\d+)([eE][+-]?\d+)?', Number.Float), + (r'\d+([eE][+-]?\d+)?', Number.Integer), + (r'[+-]?Infinity', Number.Integer), + (r'0[xX][0-9a-fA-F]+', Number), + (r'"', String.Double, 'double_quote'), + (r'\'', String.Single, 'single_quote'), + (r'(\w+|(\.(?!\.)))', Text), + ], + 'single_quote': [ + (r'\'', String.Single, '#pop'), + (r'[^\']+', String.Single), + ], + 'double_quote': [ + (r'(\\"|\\[0-7]{1,3}\D|\\[abfnrtv]|\\\\)', String.Escape), + (r'\"', String.Double, '#pop'), + (r'[^"\\]+', String.Double), + ], + } diff --git a/src/typecode/_vendor/pygments/lexers/ncl.py b/src/typecode/_vendor/pygments/lexers/ncl.py index b36d0a1..51f090c 100644 --- a/src/typecode/_vendor/pygments/lexers/ncl.py +++ b/src/typecode/_vendor/pygments/lexers/ncl.py @@ -5,7 +5,7 @@ Lexers for NCAR Command Language. - :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ diff --git a/src/typecode/_vendor/pygments/lexers/nimrod.py b/src/typecode/_vendor/pygments/lexers/nimrod.py index b618888..1de7831 100644 --- a/src/typecode/_vendor/pygments/lexers/nimrod.py +++ b/src/typecode/_vendor/pygments/lexers/nimrod.py @@ -5,7 +5,7 @@ Lexer for the Nim language (formerly known as Nimrod). - :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ @@ -43,14 +43,14 @@ def underscorize(words): return "|".join(newWords) keywords = [ - 'addr', 'and', 'as', 'asm', 'atomic', 'bind', 'block', 'break', 'case', + 'addr', 'and', 'as', 'asm', 'bind', 'block', 'break', 'case', 'cast', 'concept', 'const', 'continue', 'converter', 'defer', 'discard', 'distinct', 'div', 'do', 'elif', 'else', 'end', 'enum', 'except', 'export', 'finally', 'for', 'func', 'if', 'in', 'yield', 'interface', 'is', 'isnot', 'iterator', 'let', 'macro', 'method', 'mixin', 'mod', 'not', 'notin', 'object', 'of', 'or', 'out', 'proc', 'ptr', 'raise', - 'ref', 'return', 'shared', 'shl', 'shr', 'static', 'template', 'try', - 'tuple', 'type', 'when', 'while', 'with', 'without', 'xor' + 'ref', 'return', 'shl', 'shr', 'static', 'template', 'try', + 'tuple', 'type', 'using', 'when', 'while', 'xor' ] keywordsPseudo = [ diff --git a/src/typecode/_vendor/pygments/lexers/nit.py b/src/typecode/_vendor/pygments/lexers/nit.py index 144d38f..65c7edd 100644 --- a/src/typecode/_vendor/pygments/lexers/nit.py +++ b/src/typecode/_vendor/pygments/lexers/nit.py @@ -5,7 +5,7 @@ Lexer for the Nit language. - :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ diff --git a/src/typecode/_vendor/pygments/lexers/nix.py b/src/typecode/_vendor/pygments/lexers/nix.py index 19bf682..cf1e327 100644 --- a/src/typecode/_vendor/pygments/lexers/nix.py +++ b/src/typecode/_vendor/pygments/lexers/nix.py @@ -5,7 +5,7 @@ Lexers for the NixOS Nix language. - :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ diff --git a/src/typecode/_vendor/pygments/lexers/oberon.py b/src/typecode/_vendor/pygments/lexers/oberon.py index ef8c7f7..42395dc 100644 --- a/src/typecode/_vendor/pygments/lexers/oberon.py +++ b/src/typecode/_vendor/pygments/lexers/oberon.py @@ -5,7 +5,7 @@ Lexers for Oberon family languages. - :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ @@ -103,3 +103,19 @@ class ComponentPascalLexer(RegexLexer): (r'(TRUE|FALSE|NIL|INF)\b', Keyword.Constant), ] } + + def analyse_text(text): + """The only other lexer using .cp is the C++ one, so we check if for + a few common Pascal keywords here. Those are unfortunately quite + common across various business languages as well.""" + result = 0 + if 'BEGIN' in text: + result += 0.01 + if 'END' in text: + result += 0.01 + if 'PROCEDURE' in text: + result += 0.01 + if 'END' in text: + result += 0.01 + + return result diff --git a/src/typecode/_vendor/pygments/lexers/objective.py b/src/typecode/_vendor/pygments/lexers/objective.py index 6376845..07a4ab2 100644 --- a/src/typecode/_vendor/pygments/lexers/objective.py +++ b/src/typecode/_vendor/pygments/lexers/objective.py @@ -5,7 +5,7 @@ Lexers for Objective-C family languages. - :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ @@ -261,11 +261,11 @@ class LogosLexer(ObjectiveCppLexer): 'logos_classname'), (r'(%hook|%group)(\s+)([a-zA-Z$_][\w$]+)', bygroups(Keyword, Text, Name.Class)), - (r'(%config)(\s*\(\s*)(\w+)(\s*=\s*)(.*?)(\s*\)\s*)', + (r'(%config)(\s*\(\s*)(\w+)(\s*=)(.*?)(\)\s*)', bygroups(Keyword, Text, Name.Variable, Text, String, Text)), (r'(%ctor)(\s*)(\{)', bygroups(Keyword, Text, Punctuation), 'function'), - (r'(%new)(\s*)(\()(\s*.*?\s*)(\))', + (r'(%new)(\s*)(\()(.*?)(\))', bygroups(Keyword, Text, Keyword, String, Keyword)), (r'(\s*)(%end)(\s*)', bygroups(Text, Keyword, Text)), inherit, diff --git a/src/typecode/_vendor/pygments/lexers/ooc.py b/src/typecode/_vendor/pygments/lexers/ooc.py index 2a6a56e..e2e61ce 100644 --- a/src/typecode/_vendor/pygments/lexers/ooc.py +++ b/src/typecode/_vendor/pygments/lexers/ooc.py @@ -5,7 +5,7 @@ Lexers for the Ooc language. - :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ diff --git a/src/typecode/_vendor/pygments/lexers/other.py b/src/typecode/_vendor/pygments/lexers/other.py index ac2e41b..6273210 100644 --- a/src/typecode/_vendor/pygments/lexers/other.py +++ b/src/typecode/_vendor/pygments/lexers/other.py @@ -5,7 +5,7 @@ Just export lexer classes previously contained in this module. - :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ diff --git a/src/typecode/_vendor/pygments/lexers/parasail.py b/src/typecode/_vendor/pygments/lexers/parasail.py index b7f8d1c..fb1b579 100644 --- a/src/typecode/_vendor/pygments/lexers/parasail.py +++ b/src/typecode/_vendor/pygments/lexers/parasail.py @@ -5,7 +5,7 @@ Lexer for ParaSail. - :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ diff --git a/src/typecode/_vendor/pygments/lexers/parsers.py b/src/typecode/_vendor/pygments/lexers/parsers.py index 03b42bc..d01d700 100644 --- a/src/typecode/_vendor/pygments/lexers/parsers.py +++ b/src/typecode/_vendor/pygments/lexers/parsers.py @@ -5,7 +5,7 @@ Lexers for parser generators. - :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ @@ -28,7 +28,6 @@ 'RagelCppLexer', 'RagelObjectiveCLexer', 'RagelRubyLexer', 'RagelJavaLexer', 'AntlrLexer', 'AntlrPythonLexer', 'AntlrPerlLexer', 'AntlrRubyLexer', 'AntlrCppLexer', - # 'AntlrCLexer', 'AntlrCSharpLexer', 'AntlrObjectiveCLexer', 'AntlrJavaLexer', 'AntlrActionScriptLexer', 'TreetopLexer', 'EbnfLexer'] @@ -65,10 +64,10 @@ class RagelLexer(RegexLexer): (r'[+-]?[0-9]+', Number.Integer), ], 'literals': [ - (r'"(\\\\|\\"|[^"])*"', String), # double quote string - (r"'(\\\\|\\'|[^'])*'", String), # single quote string - (r'\[(\\\\|\\\]|[^\]])*\]', String), # square bracket literals - (r'/(?!\*)(\\\\|\\/|[^/])*/', String.Regex), # regular expressions + (r'"(\\\\|\\[^\\]|[^"\\])*"', String.Double), + (r"'(\\\\|\\[^\\]|[^'\\])*'", String.Single), + (r'\[(\\\\|\\[^\\]|[^\\\]])*\]', String), # square bracket literals + (r'/(?!\*)(\\\\|\\[^\\]|[^/\\])*/', String.Regex), # regular expressions ], 'identifiers': [ (r'[a-zA-Z_]\w*', Name.Variable), @@ -107,15 +106,15 @@ class RagelLexer(RegexLexer): r'[^\\]\\[{}]', # allow escaped { or } # strings and comments may safely contain unsafe characters - r'"(\\\\|\\"|[^"])*"', # double quote string - r"'(\\\\|\\'|[^'])*'", # single quote string + r'"(\\\\|\\[^\\]|[^"\\])*"', + r"'(\\\\|\\[^\\]|[^'\\])*'", r'//.*$\n?', # single line comment r'/\*(.|\n)*?\*/', # multi-line javadoc-style comment r'\#.*$\n?', # ruby comment # regular expression: There's no reason for it to start # with a * and this stops confusion with comments. - r'/(?!\*)(\\\\|\\/|[^/])*/', + r'/(?!\*)(\\\\|\\[^\\]|[^/\\])*/', # / is safe now that we've handled regex and javadoc comments r'/', @@ -148,12 +147,12 @@ class RagelEmbeddedLexer(RegexLexer): r'%(?=[^%]|$)', # a single % sign is okay, just not 2 of them # strings and comments may safely contain unsafe characters - r'"(\\\\|\\"|[^"])*"', # double quote string - r"'(\\\\|\\'|[^'])*'", # single quote string + r'"(\\\\|\\[^\\]|[^"\\])*"', + r"'(\\\\|\\[^\\]|[^'\\])*'", r'/\*(.|\n)*?\*/', # multi-line javadoc-style comment r'//.*$\n?', # single line comment r'\#.*$\n?', # ruby/ragel comment - r'/(?!\*)(\\\\|\\/|[^/])*/', # regular expression + r'/(?!\*)(\\\\|\\[^\\]|[^/\\])*/', # regular expression # / is safe now that we've handled regex and javadoc comments r'/', @@ -183,7 +182,7 @@ class RagelEmbeddedLexer(RegexLexer): # specifically allow regex followed immediately by * # so it doesn't get mistaken for a comment - r'/(?!\*)(\\\\|\\/|[^/])*/\*', + r'/(?!\*)(\\\\|\\[^\\]|[^/\\])*/\*', # allow / as long as it's not followed by another / or by a * r'/(?=[^/*]|$)', @@ -194,9 +193,9 @@ class RagelEmbeddedLexer(RegexLexer): )) + r')+', # strings and comments may safely contain unsafe characters - r'"(\\\\|\\"|[^"])*"', # double quote string - r"'(\\\\|\\'|[^'])*'", # single quote string - r"\[(\\\\|\\\]|[^\]])*\]", # square bracket literal + r'"(\\\\|\\[^\\]|[^"\\])*"', + r"'(\\\\|\\[^\\]|[^'\\])*'", + r"\[(\\\\|\\[^\\]|[^\]\\])*\]", # square bracket literal r'/\*(.|\n)*?\*/', # multi-line javadoc-style comment r'//.*$\n?', # single line comment r'\#.*$\n?', # ruby/ragel comment @@ -222,8 +221,7 @@ class RagelRubyLexer(DelegatingLexer): filenames = ['*.rl'] def __init__(self, **options): - super(RagelRubyLexer, self).__init__(RubyLexer, RagelEmbeddedLexer, - **options) + super().__init__(RubyLexer, RagelEmbeddedLexer, **options) def analyse_text(text): return '@LANG: ruby' in text @@ -241,8 +239,7 @@ class RagelCLexer(DelegatingLexer): filenames = ['*.rl'] def __init__(self, **options): - super(RagelCLexer, self).__init__(CLexer, RagelEmbeddedLexer, - **options) + super().__init__(CLexer, RagelEmbeddedLexer, **options) def analyse_text(text): return '@LANG: c' in text @@ -260,7 +257,7 @@ class RagelDLexer(DelegatingLexer): filenames = ['*.rl'] def __init__(self, **options): - super(RagelDLexer, self).__init__(DLexer, RagelEmbeddedLexer, **options) + super().__init__(DLexer, RagelEmbeddedLexer, **options) def analyse_text(text): return '@LANG: d' in text @@ -278,7 +275,7 @@ class RagelCppLexer(DelegatingLexer): filenames = ['*.rl'] def __init__(self, **options): - super(RagelCppLexer, self).__init__(CppLexer, RagelEmbeddedLexer, **options) + super().__init__(CppLexer, RagelEmbeddedLexer, **options) def analyse_text(text): return '@LANG: c++' in text @@ -296,9 +293,7 @@ class RagelObjectiveCLexer(DelegatingLexer): filenames = ['*.rl'] def __init__(self, **options): - super(RagelObjectiveCLexer, self).__init__(ObjectiveCLexer, - RagelEmbeddedLexer, - **options) + super().__init__(ObjectiveCLexer, RagelEmbeddedLexer, **options) def analyse_text(text): return '@LANG: objc' in text @@ -316,8 +311,7 @@ class RagelJavaLexer(DelegatingLexer): filenames = ['*.rl'] def __init__(self, **options): - super(RagelJavaLexer, self).__init__(JavaLexer, RagelEmbeddedLexer, - **options) + super().__init__(JavaLexer, RagelEmbeddedLexer, **options) def analyse_text(text): return '@LANG: java' in text @@ -422,8 +416,8 @@ class AntlrLexer(RegexLexer): (r':', Punctuation), # literals - (r"'(\\\\|\\'|[^'])*'", String), - (r'"(\\\\|\\"|[^"])*"', String), + (r'"(\\\\|\\[^\\]|[^"\\])*"', String.Double), + (r"'(\\\\|\\[^\\]|[^'\\])*'", String.Single), (r'<<([^>]|>[^>])>>', String), # identifiers # Tokens start with capital letter. @@ -462,14 +456,14 @@ class AntlrLexer(RegexLexer): r'[^${}\'"/\\]+', # exclude unsafe characters # strings and comments may safely contain unsafe characters - r'"(\\\\|\\"|[^"])*"', # double quote string - r"'(\\\\|\\'|[^'])*'", # single quote string + r'"(\\\\|\\[^\\]|[^"\\])*"', + r"'(\\\\|\\[^\\]|[^'\\])*'", r'//.*$\n?', # single line comment r'/\*(.|\n)*?\*/', # multi-line javadoc-style comment # regular expression: There's no reason for it to start # with a * and this stops confusion with comments. - r'/(?!\*)(\\\\|\\/|[^/])*/', + r'/(?!\*)(\\\\|\\[^\\]|[^/\\])*/', # backslashes are okay, as long as we are not backslashing a % r'\\(?!%)', @@ -489,14 +483,14 @@ class AntlrLexer(RegexLexer): r'[^$\[\]\'"/]+', # exclude unsafe characters # strings and comments may safely contain unsafe characters - r'"(\\\\|\\"|[^"])*"', # double quote string - r"'(\\\\|\\'|[^'])*'", # single quote string + r'"(\\\\|\\[^\\]|[^"\\])*"', + r"'(\\\\|\\[^\\]|[^'\\])*'", r'//.*$\n?', # single line comment r'/\*(.|\n)*?\*/', # multi-line javadoc-style comment # regular expression: There's no reason for it to start # with a * and this stops confusion with comments. - r'/(?!\*)(\\\\|\\/|[^/])*/', + r'/(?!\*)(\\\\|\\[^\\]|[^/\\])*/', # Now that we've handled regex and javadoc comments # it's safe to let / through. @@ -515,30 +509,8 @@ class AntlrLexer(RegexLexer): def analyse_text(text): return re.search(r'^\s*grammar\s+[a-zA-Z0-9]+\s*;', text, re.M) -# http://www.antlr.org/wiki/display/ANTLR3/Code+Generation+Targets - -# TH: I'm not aware of any language features of C++ that will cause -# incorrect lexing of C files. Antlr doesn't appear to make a distinction, -# so just assume they're C++. No idea how to make Objective C work in the -# future. - -# class AntlrCLexer(DelegatingLexer): -# """ -# ANTLR with C Target -# -# .. versionadded:: 1.1 -# """ -# -# name = 'ANTLR With C Target' -# aliases = ['antlr-c'] -# filenames = ['*.G', '*.g'] -# -# def __init__(self, **options): -# super(AntlrCLexer, self).__init__(CLexer, AntlrLexer, **options) -# -# def analyse_text(text): -# return re.match(r'^\s*language\s*=\s*C\s*;', text) +# http://www.antlr.org/wiki/display/ANTLR3/Code+Generation+Targets class AntlrCppLexer(DelegatingLexer): """ @@ -552,7 +524,7 @@ class AntlrCppLexer(DelegatingLexer): filenames = ['*.G', '*.g'] def __init__(self, **options): - super(AntlrCppLexer, self).__init__(CppLexer, AntlrLexer, **options) + super().__init__(CppLexer, AntlrLexer, **options) def analyse_text(text): return AntlrLexer.analyse_text(text) and \ @@ -571,8 +543,7 @@ class AntlrObjectiveCLexer(DelegatingLexer): filenames = ['*.G', '*.g'] def __init__(self, **options): - super(AntlrObjectiveCLexer, self).__init__(ObjectiveCLexer, - AntlrLexer, **options) + super().__init__(ObjectiveCLexer, AntlrLexer, **options) def analyse_text(text): return AntlrLexer.analyse_text(text) and \ @@ -591,8 +562,7 @@ class AntlrCSharpLexer(DelegatingLexer): filenames = ['*.G', '*.g'] def __init__(self, **options): - super(AntlrCSharpLexer, self).__init__(CSharpLexer, AntlrLexer, - **options) + super().__init__(CSharpLexer, AntlrLexer, **options) def analyse_text(text): return AntlrLexer.analyse_text(text) and \ @@ -611,8 +581,7 @@ class AntlrPythonLexer(DelegatingLexer): filenames = ['*.G', '*.g'] def __init__(self, **options): - super(AntlrPythonLexer, self).__init__(PythonLexer, AntlrLexer, - **options) + super().__init__(PythonLexer, AntlrLexer, **options) def analyse_text(text): return AntlrLexer.analyse_text(text) and \ @@ -631,8 +600,7 @@ class AntlrJavaLexer(DelegatingLexer): filenames = ['*.G', '*.g'] def __init__(self, **options): - super(AntlrJavaLexer, self).__init__(JavaLexer, AntlrLexer, - **options) + super().__init__(JavaLexer, AntlrLexer, **options) def analyse_text(text): # Antlr language is Java by default @@ -651,8 +619,7 @@ class AntlrRubyLexer(DelegatingLexer): filenames = ['*.G', '*.g'] def __init__(self, **options): - super(AntlrRubyLexer, self).__init__(RubyLexer, AntlrLexer, - **options) + super().__init__(RubyLexer, AntlrLexer, **options) def analyse_text(text): return AntlrLexer.analyse_text(text) and \ @@ -671,8 +638,7 @@ class AntlrPerlLexer(DelegatingLexer): filenames = ['*.G', '*.g'] def __init__(self, **options): - super(AntlrPerlLexer, self).__init__(PerlLexer, AntlrLexer, - **options) + super().__init__(PerlLexer, AntlrLexer, **options) def analyse_text(text): return AntlrLexer.analyse_text(text) and \ @@ -692,8 +658,7 @@ class AntlrActionScriptLexer(DelegatingLexer): def __init__(self, **options): from typecode._vendor.pygments.lexers.actionscript import ActionScriptLexer - super(AntlrActionScriptLexer, self).__init__(ActionScriptLexer, - AntlrLexer, **options) + super().__init__(ActionScriptLexer, AntlrLexer, **options) def analyse_text(text): return AntlrLexer.analyse_text(text) and \ @@ -736,8 +701,8 @@ class TreetopBaseLexer(RegexLexer): 'rule': [ include('space'), include('end'), - (r'"(\\\\|\\"|[^"])*"', String.Double), - (r"'(\\\\|\\'|[^'])*'", String.Single), + (r'"(\\\\|\\[^\\]|[^"\\])*"', String.Double), + (r"'(\\\\|\\[^\\]|[^'\\])*'", String.Single), (r'([A-Za-z_]\w*)(:)', bygroups(Name.Label, Punctuation)), (r'[A-Za-z_]\w*', Name), (r'[()]', Punctuation), @@ -781,7 +746,7 @@ class TreetopLexer(DelegatingLexer): filenames = ['*.treetop', '*.tt'] def __init__(self, **options): - super(TreetopLexer, self).__init__(RubyLexer, TreetopBaseLexer, **options) + super().__init__(RubyLexer, TreetopBaseLexer, **options) class EbnfLexer(RegexLexer): diff --git a/src/typecode/_vendor/pygments/lexers/pascal.py b/src/typecode/_vendor/pygments/lexers/pascal.py index 6c610a0..d239854 100644 --- a/src/typecode/_vendor/pygments/lexers/pascal.py +++ b/src/typecode/_vendor/pygments/lexers/pascal.py @@ -5,7 +5,7 @@ Lexers for Pascal family languages. - :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ @@ -68,29 +68,29 @@ class DelphiLexer(Lexer): 'dispose', 'exit', 'false', 'new', 'true' ) - BLOCK_KEYWORDS = set(( + BLOCK_KEYWORDS = { 'begin', 'class', 'const', 'constructor', 'destructor', 'end', 'finalization', 'function', 'implementation', 'initialization', 'label', 'library', 'operator', 'procedure', 'program', 'property', 'record', 'threadvar', 'type', 'unit', 'uses', 'var' - )) + } - FUNCTION_MODIFIERS = set(( + FUNCTION_MODIFIERS = { 'alias', 'cdecl', 'export', 'inline', 'interrupt', 'nostackframe', 'pascal', 'register', 'safecall', 'softfloat', 'stdcall', 'varargs', 'name', 'dynamic', 'near', 'virtual', 'external', 'override', 'assembler' - )) + } # XXX: those aren't global. but currently we know no way for defining # them just for the type context. - DIRECTIVES = set(( + DIRECTIVES = { 'absolute', 'abstract', 'assembler', 'cppdecl', 'default', 'far', 'far16', 'forward', 'index', 'oldfpccall', 'private', 'protected', 'published', 'public' - )) + } - BUILTIN_TYPES = set(( + BUILTIN_TYPES = { 'ansichar', 'ansistring', 'bool', 'boolean', 'byte', 'bytebool', 'cardinal', 'char', 'comp', 'currency', 'double', 'dword', 'extended', 'int64', 'integer', 'iunknown', 'longbool', 'longint', @@ -104,7 +104,7 @@ class DelphiLexer(Lexer): 'shortstring', 'single', 'smallint', 'string', 'tclass', 'tdate', 'tdatetime', 'textfile', 'thandle', 'tobject', 'ttime', 'variant', 'widechar', 'widestring', 'word', 'wordbool' - )) + } BUILTIN_UNITS = { 'System': ( @@ -246,7 +246,7 @@ class DelphiLexer(Lexer): ) } - ASM_REGISTERS = set(( + ASM_REGISTERS = { 'ah', 'al', 'ax', 'bh', 'bl', 'bp', 'bx', 'ch', 'cl', 'cr0', 'cr1', 'cr2', 'cr3', 'cr4', 'cs', 'cx', 'dh', 'di', 'dl', 'dr0', 'dr1', 'dr2', 'dr3', 'dr4', 'dr5', 'dr6', 'dr7', 'ds', 'dx', @@ -255,9 +255,9 @@ class DelphiLexer(Lexer): 'mm7', 'si', 'sp', 'ss', 'st0', 'st1', 'st2', 'st3', 'st4', 'st5', 'st6', 'st7', 'xmm0', 'xmm1', 'xmm2', 'xmm3', 'xmm4', 'xmm5', 'xmm6', 'xmm7' - )) + } - ASM_INSTRUCTIONS = set(( + ASM_INSTRUCTIONS = { 'aaa', 'aad', 'aam', 'aas', 'adc', 'add', 'and', 'arpl', 'bound', 'bsf', 'bsr', 'bswap', 'bt', 'btc', 'btr', 'bts', 'call', 'cbw', 'cdq', 'clc', 'cld', 'cli', 'clts', 'cmc', 'cmova', 'cmovae', @@ -296,7 +296,7 @@ class DelphiLexer(Lexer): 'sysret', 'test', 'ud1', 'ud2', 'umov', 'verr', 'verw', 'wait', 'wbinvd', 'wrmsr', 'wrshr', 'xadd', 'xbts', 'xchg', 'xlat', 'xlatb', 'xor' - )) + } def __init__(self, **options): Lexer.__init__(self, **options) @@ -563,9 +563,9 @@ class AdaLexer(RegexLexer): 'exception', 'exit', 'interface', 'for', 'goto', 'if', 'is', 'limited', 'loop', 'new', 'null', 'of', 'or', 'others', 'out', 'overriding', 'pragma', 'protected', 'raise', 'range', 'record', 'renames', 'requeue', - 'return', 'reverse', 'select', 'separate', 'subtype', 'synchronized', - 'task', 'tagged', 'terminate', 'then', 'type', 'until', 'when', - 'while', 'xor'), prefix=r'\b', suffix=r'\b'), + 'return', 'reverse', 'select', 'separate', 'some', 'subtype', + 'synchronized', 'task', 'tagged', 'terminate', 'then', 'type', 'until', + 'when', 'while', 'xor'), prefix=r'\b', suffix=r'\b'), Keyword.Reserved), (r'"[^"]*"', String), include('attribute'), @@ -577,7 +577,7 @@ class AdaLexer(RegexLexer): (r'\n+', Text), ], 'numbers': [ - (r'[0-9_]+#[0-9a-f]+#', Number.Hex), + (r'[0-9_]+#[0-9a-f_\.]+#', Number.Hex), (r'[0-9_]+\.[0-9_]*', Number.Float), (r'[0-9_]+', Number.Integer), ], diff --git a/src/typecode/_vendor/pygments/lexers/pawn.py b/src/typecode/_vendor/pygments/lexers/pawn.py index fcd1a0a..301637a 100644 --- a/src/typecode/_vendor/pygments/lexers/pawn.py +++ b/src/typecode/_vendor/pygments/lexers/pawn.py @@ -5,7 +5,7 @@ Lexers for the Pawn languages. - :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ @@ -86,25 +86,25 @@ class SourcePawnLexer(RegexLexer): ] } - SM_TYPES = set(('Action', 'bool', 'Float', 'Plugin', 'String', 'any', - 'AdminFlag', 'OverrideType', 'OverrideRule', 'ImmunityType', - 'GroupId', 'AdminId', 'AdmAccessMode', 'AdminCachePart', - 'CookieAccess', 'CookieMenu', 'CookieMenuAction', 'NetFlow', - 'ConVarBounds', 'QueryCookie', 'ReplySource', - 'ConVarQueryResult', 'ConVarQueryFinished', 'Function', - 'Action', 'Identity', 'PluginStatus', 'PluginInfo', 'DBResult', - 'DBBindType', 'DBPriority', 'PropType', 'PropFieldType', - 'MoveType', 'RenderMode', 'RenderFx', 'EventHookMode', - 'EventHook', 'FileType', 'FileTimeMode', 'PathType', - 'ParamType', 'ExecType', 'DialogType', 'Handle', 'KvDataTypes', - 'NominateResult', 'MapChange', 'MenuStyle', 'MenuAction', - 'MenuSource', 'RegexError', 'SDKCallType', 'SDKLibrary', - 'SDKFuncConfSource', 'SDKType', 'SDKPassMethod', 'RayType', - 'TraceEntityFilter', 'ListenOverride', 'SortOrder', 'SortType', - 'SortFunc2D', 'APLRes', 'FeatureType', 'FeatureStatus', - 'SMCResult', 'SMCError', 'TFClassType', 'TFTeam', 'TFCond', - 'TFResourceType', 'Timer', 'TopMenuAction', 'TopMenuObjectType', - 'TopMenuPosition', 'TopMenuObject', 'UserMsg')) + SM_TYPES = {'Action', 'bool', 'Float', 'Plugin', 'String', 'any', + 'AdminFlag', 'OverrideType', 'OverrideRule', 'ImmunityType', + 'GroupId', 'AdminId', 'AdmAccessMode', 'AdminCachePart', + 'CookieAccess', 'CookieMenu', 'CookieMenuAction', 'NetFlow', + 'ConVarBounds', 'QueryCookie', 'ReplySource', + 'ConVarQueryResult', 'ConVarQueryFinished', 'Function', + 'Action', 'Identity', 'PluginStatus', 'PluginInfo', 'DBResult', + 'DBBindType', 'DBPriority', 'PropType', 'PropFieldType', + 'MoveType', 'RenderMode', 'RenderFx', 'EventHookMode', + 'EventHook', 'FileType', 'FileTimeMode', 'PathType', + 'ParamType', 'ExecType', 'DialogType', 'Handle', 'KvDataTypes', + 'NominateResult', 'MapChange', 'MenuStyle', 'MenuAction', + 'MenuSource', 'RegexError', 'SDKCallType', 'SDKLibrary', + 'SDKFuncConfSource', 'SDKType', 'SDKPassMethod', 'RayType', + 'TraceEntityFilter', 'ListenOverride', 'SortOrder', 'SortType', + 'SortFunc2D', 'APLRes', 'FeatureType', 'FeatureStatus', + 'SMCResult', 'SMCError', 'TFClassType', 'TFTeam', 'TFCond', + 'TFResourceType', 'Timer', 'TopMenuAction', 'TopMenuObjectType', + 'TopMenuPosition', 'TopMenuObject', 'UserMsg'} def __init__(self, **options): self.smhighlighting = get_bool_opt(options, @@ -197,3 +197,9 @@ class PawnLexer(RegexLexer): (r'.*?\n', Comment), ] } + + def analyse_text(text): + """This is basically C. There is a keyword which doesn't exist in C + though and is nearly unique to this language.""" + if 'tagof' in text: + return 0.01 diff --git a/src/typecode/_vendor/pygments/lexers/perl.py b/src/typecode/_vendor/pygments/lexers/perl.py index b517821..c3ed032 100644 --- a/src/typecode/_vendor/pygments/lexers/perl.py +++ b/src/typecode/_vendor/pygments/lexers/perl.py @@ -3,9 +3,9 @@ pygments.lexers.perl ~~~~~~~~~~~~~~~~~~~~ - Lexers for Perl and related languages. + Lexers for Perl, Raku and related languages. - :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ @@ -22,12 +22,12 @@ class PerlLexer(RegexLexer): """ - For `Perl `_ source code. + For `Perl `_ source code. """ name = 'Perl' aliases = ['perl', 'pl'] - filenames = ['*.pl', '*.pm', '*.t'] + filenames = ['*.pl', '*.pm', '*.t', '*.perl'] mimetypes = ['text/x-perl', 'application/x-perl'] flags = re.DOTALL | re.MULTILINE @@ -208,97 +208,205 @@ class PerlLexer(RegexLexer): def analyse_text(text): if shebang_matches(text, r'perl'): return True + + result = 0 + if re.search(r'(?:my|our)\s+[$@%(]', text): - return 0.9 + result += 0.9 + + if ':=' in text: + # := is not valid Perl, but it appears in unicon, so we should + # become less confident if we think we found Perl with := + result /= 2 + + return result class Perl6Lexer(ExtendedRegexLexer): """ - For `Perl 6 `_ source code. + For `Raku `_ (a.k.a. Perl 6) source code. .. versionadded:: 2.0 """ name = 'Perl6' - aliases = ['perl6', 'pl6'] + aliases = ['perl6', 'pl6', 'raku'] filenames = ['*.pl', '*.pm', '*.nqp', '*.p6', '*.6pl', '*.p6l', '*.pl6', - '*.6pm', '*.p6m', '*.pm6', '*.t'] + '*.6pm', '*.p6m', '*.pm6', '*.t', '*.raku', '*.rakumod', + '*.rakutest', '*.rakudoc'] mimetypes = ['text/x-perl6', 'application/x-perl6'] flags = re.MULTILINE | re.DOTALL | re.UNICODE PERL6_IDENTIFIER_RANGE = r"['\w:-]" PERL6_KEYWORDS = ( - 'BEGIN', 'CATCH', 'CHECK', 'CONTROL', 'END', 'ENTER', 'FIRST', 'INIT', - 'KEEP', 'LAST', 'LEAVE', 'NEXT', 'POST', 'PRE', 'START', 'TEMP', - 'UNDO', 'as', 'assoc', 'async', 'augment', 'binary', 'break', 'but', - 'cached', 'category', 'class', 'constant', 'contend', 'continue', - 'copy', 'deep', 'default', 'defequiv', 'defer', 'die', 'do', 'else', - 'elsif', 'enum', 'equiv', 'exit', 'export', 'fail', 'fatal', 'for', - 'gather', 'given', 'goto', 'grammar', 'handles', 'has', 'if', 'inline', - 'irs', 'is', 'last', 'leave', 'let', 'lift', 'loop', 'looser', 'macro', - 'make', 'maybe', 'method', 'module', 'multi', 'my', 'next', 'of', - 'ofs', 'only', 'oo', 'ors', 'our', 'package', 'parsed', 'prec', - 'proto', 'readonly', 'redo', 'ref', 'regex', 'reparsed', 'repeat', - 'require', 'required', 'return', 'returns', 'role', 'rule', 'rw', - 'self', 'slang', 'state', 'sub', 'submethod', 'subset', 'supersede', - 'take', 'temp', 'tighter', 'token', 'trusts', 'try', 'unary', - 'unless', 'until', 'use', 'warn', 'when', 'where', 'while', 'will', + #Phasers + 'BEGIN','CATCH','CHECK','CLOSE','CONTROL','DOC','END','ENTER','FIRST', + 'INIT','KEEP','LAST','LEAVE','NEXT','POST','PRE','QUIT','UNDO', + #Keywords + 'anon','augment','but','class','constant','default','does','else', + 'elsif','enum','for','gather','given','grammar','has','if','import', + 'is','let','loop','made','make','method','module','multi','my','need', + 'orwith','our','proceed','proto','repeat','require','return', + 'return-rw','returns','role','rule','state','sub','submethod','subset', + 'succeed','supersede','token','try','unit','unless','until','use', + 'when','while','with','without', + #Traits + 'export','native','repr','required','rw','symbol', ) PERL6_BUILTINS = ( - 'ACCEPTS', 'HOW', 'REJECTS', 'VAR', 'WHAT', 'WHENCE', 'WHERE', 'WHICH', - 'WHO', 'abs', 'acos', 'acosec', 'acosech', 'acosh', 'acotan', 'acotanh', - 'all', 'any', 'approx', 'arity', 'asec', 'asech', 'asin', 'asinh', - 'assuming', 'atan', 'atan2', 'atanh', 'attr', 'bless', 'body', 'by', - 'bytes', 'caller', 'callsame', 'callwith', 'can', 'capitalize', 'cat', - 'ceiling', 'chars', 'chmod', 'chomp', 'chop', 'chr', 'chroot', - 'circumfix', 'cis', 'classify', 'clone', 'close', 'cmp_ok', 'codes', - 'comb', 'connect', 'contains', 'context', 'cos', 'cosec', 'cosech', - 'cosh', 'cotan', 'cotanh', 'count', 'defined', 'delete', 'diag', - 'dies_ok', 'does', 'e', 'each', 'eager', 'elems', 'end', 'eof', 'eval', - 'eval_dies_ok', 'eval_elsewhere', 'eval_lives_ok', 'evalfile', 'exists', - 'exp', 'first', 'flip', 'floor', 'flunk', 'flush', 'fmt', 'force_todo', - 'fork', 'from', 'getc', 'gethost', 'getlogin', 'getpeername', 'getpw', - 'gmtime', 'graphs', 'grep', 'hints', 'hyper', 'im', 'index', 'infix', - 'invert', 'is_approx', 'is_deeply', 'isa', 'isa_ok', 'isnt', 'iterator', - 'join', 'key', 'keys', 'kill', 'kv', 'lastcall', 'lazy', 'lc', 'lcfirst', - 'like', 'lines', 'link', 'lives_ok', 'localtime', 'log', 'log10', 'map', - 'max', 'min', 'minmax', 'name', 'new', 'nextsame', 'nextwith', 'nfc', - 'nfd', 'nfkc', 'nfkd', 'nok_error', 'nonce', 'none', 'normalize', 'not', - 'nothing', 'ok', 'once', 'one', 'open', 'opendir', 'operator', 'ord', - 'p5chomp', 'p5chop', 'pack', 'pair', 'pairs', 'pass', 'perl', 'pi', - 'pick', 'plan', 'plan_ok', 'polar', 'pop', 'pos', 'postcircumfix', - 'postfix', 'pred', 'prefix', 'print', 'printf', 'push', 'quasi', - 'quotemeta', 'rand', 're', 'read', 'readdir', 'readline', 'reduce', - 'reverse', 'rewind', 'rewinddir', 'rindex', 'roots', 'round', - 'roundrobin', 'run', 'runinstead', 'sameaccent', 'samecase', 'say', - 'sec', 'sech', 'sech', 'seek', 'shape', 'shift', 'sign', 'signature', - 'sin', 'sinh', 'skip', 'skip_rest', 'sleep', 'slurp', 'sort', 'splice', - 'split', 'sprintf', 'sqrt', 'srand', 'strand', 'subst', 'substr', 'succ', - 'sum', 'symlink', 'tan', 'tanh', 'throws_ok', 'time', 'times', 'to', - 'todo', 'trim', 'trim_end', 'trim_start', 'true', 'truncate', 'uc', - 'ucfirst', 'undef', 'undefine', 'uniq', 'unlike', 'unlink', 'unpack', - 'unpolar', 'unshift', 'unwrap', 'use_ok', 'value', 'values', 'vec', - 'version_lt', 'void', 'wait', 'want', 'wrap', 'write', 'zip', + 'ACCEPTS','abs','abs2rel','absolute','accept','accessed','acos', + 'acosec','acosech','acosh','acotan','acotanh','acquire','act','action', + 'actions','add','add_attribute','add_enum_value','add_fallback', + 'add_method','add_parent','add_private_method','add_role','add_trustee', + 'adverb','after','all','allocate','allof','allowed','alternative-names', + 'annotations','antipair','antipairs','any','anyof','app_lifetime', + 'append','arch','archname','args','arity','Array','asec','asech','asin', + 'asinh','ASSIGN-KEY','ASSIGN-POS','assuming','ast','at','atan','atan2', + 'atanh','AT-KEY','atomic-assign','atomic-dec-fetch','atomic-fetch', + 'atomic-fetch-add','atomic-fetch-dec','atomic-fetch-inc', + 'atomic-fetch-sub','atomic-inc-fetch','AT-POS','attributes','auth', + 'await','backtrace','Bag','BagHash','bail-out','base','basename', + 'base-repeating','batch','BIND-KEY','BIND-POS','bind-stderr', + 'bind-stdin','bind-stdout','bind-udp','bits','bless','block','Bool', + 'bool-only','bounds','break','Bridge','broken','BUILD','build-date', + 'bytes','cache','callframe','calling-package','CALL-ME','callsame', + 'callwith','can','cancel','candidates','cando','can-ok','canonpath', + 'caps','caption','Capture','cas','catdir','categorize','categorize-list', + 'catfile','catpath','cause','ceiling','cglobal','changed','Channel', + 'chars','chdir','child','child-name','child-typename','chmod','chomp', + 'chop','chr','chrs','chunks','cis','classify','classify-list','cleanup', + 'clone','close','closed','close-stdin','cmp-ok','code','codes','collate', + 'column','comb','combinations','command','comment','compiler','Complex', + 'compose','compose_type','composer','condition','config', + 'configure_destroy','configure_type_checking','conj','connect', + 'constraints','construct','contains','contents','copy','cos','cosec', + 'cosech','cosh','cotan','cotanh','count','count-only','cpu-cores', + 'cpu-usage','CREATE','create_type','cross','cue','curdir','curupdir','d', + 'Date','DateTime','day','daycount','day-of-month','day-of-week', + 'day-of-year','days-in-month','declaration','decode','decoder','deepmap', + 'default','defined','DEFINITE','delayed','DELETE-KEY','DELETE-POS', + 'denominator','desc','DESTROY','destroyers','devnull','diag', + 'did-you-mean','die','dies-ok','dir','dirname','dir-sep','DISTROnames', + 'do','does','does-ok','done','done-testing','duckmap','dynamic','e', + 'eager','earlier','elems','emit','enclosing','encode','encoder', + 'encoding','end','ends-with','enum_from_value','enum_value_list', + 'enum_values','enums','eof','EVAL','eval-dies-ok','EVALFILE', + 'eval-lives-ok','exception','excludes-max','excludes-min','EXISTS-KEY', + 'EXISTS-POS','exit','exitcode','exp','expected','explicitly-manage', + 'expmod','extension','f','fail','fails-like','fc','feature','file', + 'filename','find_method','find_method_qualified','finish','first','flat', + 'flatmap','flip','floor','flunk','flush','fmt','format','formatter', + 'freeze','from','from-list','from-loop','from-posix','full', + 'full-barrier','get','get_value','getc','gist','got','grab','grabpairs', + 'grep','handle','handled','handles','hardware','has_accessor','Hash', + 'head','headers','hh-mm-ss','hidden','hides','hour','how','hyper','id', + 'illegal','im','in','indent','index','indices','indir','infinite', + 'infix','infix:<+>','infix:<->','install_method_cache','Instant', + 'instead','Int','int-bounds','interval','in-timezone','invalid-str', + 'invert','invocant','IO','IO::Notification.watch-path','is_trusted', + 'is_type','isa','is-absolute','isa-ok','is-approx','is-deeply', + 'is-hidden','is-initial-thread','is-int','is-lazy','is-leap-year', + 'isNaN','isnt','is-prime','is-relative','is-routine','is-setting', + 'is-win','item','iterator','join','keep','kept','KERNELnames','key', + 'keyof','keys','kill','kv','kxxv','l','lang','last','lastcall','later', + 'lazy','lc','leading','level','like','line','lines','link','List', + 'listen','live','lives-ok','local','lock','log','log10','lookup','lsb', + 'made','MAIN','make','Map','match','max','maxpairs','merge','message', + 'method','method_table','methods','migrate','min','minmax','minpairs', + 'minute','misplaced','Mix','MixHash','mkdir','mode','modified','month', + 'move','mro','msb','multi','multiness','my','name','named','named_names', + 'narrow','nativecast','native-descriptor','nativesizeof','new','new_type', + 'new-from-daycount','new-from-pairs','next','nextcallee','next-handle', + 'nextsame','nextwith','NFC','NFD','NFKC','NFKD','nl-in','nl-out', + 'nodemap','nok','none','norm','not','note','now','nude','Num', + 'numerator','Numeric','of','offset','offset-in-hours','offset-in-minutes', + 'ok','old','on-close','one','on-switch','open','opened','operation', + 'optional','ord','ords','orig','os-error','osname','out-buffer','pack', + 'package','package-kind','package-name','packages','pair','pairs', + 'pairup','parameter','params','parent','parent-name','parents','parse', + 'parse-base','parsefile','parse-names','parts','pass','path','path-sep', + 'payload','peer-host','peer-port','periods','perl','permutations','phaser', + 'pick','pickpairs','pid','placeholder','plan','plus','polar','poll', + 'polymod','pop','pos','positional','posix','postfix','postmatch', + 'precomp-ext','precomp-target','pred','prefix','prematch','prepend', + 'print','printf','print-nl','print-to','private','private_method_table', + 'proc','produce','Promise','prompt','protect','pull-one','push', + 'push-all','push-at-least','push-exactly','push-until-lazy','put', + 'qualifier-type','quit','r','race','radix','rand','range','Rat','raw', + 're','read','readchars','readonly','ready','Real','reallocate','reals', + 'reason','rebless','receive','recv','redispatcher','redo','reduce', + 'rel2abs','relative','release','rename','repeated','replacement', + 'report','reserved','resolve','restore','result','resume','rethrow', + 'reverse','right','rindex','rmdir','role','roles_to_compose','rolish', + 'roll','rootdir','roots','rotate','rotor','round','roundrobin', + 'routine-type','run','rwx','s','samecase','samemark','samewith','say', + 'schedule-on','scheduler','scope','sec','sech','second','seek','self', + 'send','Set','set_hidden','set_name','set_package','set_rw','set_value', + 'SetHash','set-instruments','setup_finalization','shape','share','shell', + 'shift','sibling','sigil','sign','signal','signals','signature','sin', + 'sinh','sink','sink-all','skip','skip-at-least','skip-at-least-pull-one', + 'skip-one','skip-rest','sleep','sleep-timer','sleep-until','Slip','slurp', + 'slurp-rest','slurpy','snap','snapper','so','socket-host','socket-port', + 'sort','source','source-package','spawn','SPEC','splice','split', + 'splitdir','splitpath','sprintf','spurt','sqrt','squish','srand','stable', + 'start','started','starts-with','status','stderr','stdout','Str', + 'sub_signature','subbuf','subbuf-rw','subname','subparse','subst', + 'subst-mutate','substr','substr-eq','substr-rw','subtest','succ','sum', + 'Supply','symlink','t','tail','take','take-rw','tan','tanh','tap', + 'target','target-name','tc','tclc','tell','then','throttle','throw', + 'throws-like','timezone','tmpdir','to','today','todo','toggle','to-posix', + 'total','trailing','trans','tree','trim','trim-leading','trim-trailing', + 'truncate','truncated-to','trusts','try_acquire','trying','twigil','type', + 'type_captures','typename','uc','udp','uncaught_handler','unimatch', + 'uniname','uninames','uniparse','uniprop','uniprops','unique','unival', + 'univals','unlike','unlink','unlock','unpack','unpolar','unshift', + 'unwrap','updir','USAGE','use-ok','utc','val','value','values','VAR', + 'variable','verbose-config','version','VMnames','volume','vow','w','wait', + 'warn','watch','watch-path','week','weekday-of-month','week-number', + 'week-year','WHAT','when','WHERE','WHEREFORE','WHICH','WHO', + 'whole-second','WHY','wordcase','words','workaround','wrap','write', + 'write-to','x','yada','year','yield','yyyy-mm-dd','z','zip','zip-latest', + ) PERL6_BUILTIN_CLASSES = ( - 'Abstraction', 'Any', 'AnyChar', 'Array', 'Associative', 'Bag', 'Bit', - 'Blob', 'Block', 'Bool', 'Buf', 'Byte', 'Callable', 'Capture', 'Char', 'Class', - 'Code', 'Codepoint', 'Comparator', 'Complex', 'Decreasing', 'Exception', - 'Failure', 'False', 'Grammar', 'Grapheme', 'Hash', 'IO', 'Increasing', - 'Int', 'Junction', 'KeyBag', 'KeyExtractor', 'KeyHash', 'KeySet', - 'KitchenSink', 'List', 'Macro', 'Mapping', 'Match', 'Matcher', 'Method', - 'Module', 'Num', 'Object', 'Ordered', 'Ordering', 'OrderingPair', - 'Package', 'Pair', 'Positional', 'Proxy', 'Range', 'Rat', 'Regex', - 'Role', 'Routine', 'Scalar', 'Seq', 'Set', 'Signature', 'Str', 'StrLen', - 'StrPos', 'Sub', 'Submethod', 'True', 'UInt', 'Undef', 'Version', 'Void', - 'Whatever', 'bit', 'bool', 'buf', 'buf1', 'buf16', 'buf2', 'buf32', - 'buf4', 'buf64', 'buf8', 'complex', 'int', 'int1', 'int16', 'int2', - 'int32', 'int4', 'int64', 'int8', 'num', 'rat', 'rat1', 'rat16', 'rat2', - 'rat32', 'rat4', 'rat64', 'rat8', 'uint', 'uint1', 'uint16', 'uint2', - 'uint32', 'uint4', 'uint64', 'uint8', 'utf16', 'utf32', 'utf8', + #Booleans + 'False','True', + #Classes + 'Any','Array','Associative','AST','atomicint','Attribute','Backtrace', + 'Backtrace::Frame','Bag','Baggy','BagHash','Blob','Block','Bool','Buf', + 'Callable','CallFrame','Cancellation','Capture','CArray','Channel','Code', + 'compiler','Complex','ComplexStr','Cool','CurrentThreadScheduler', + 'Cursor','Date','Dateish','DateTime','Distro','Duration','Encoding', + 'Exception','Failure','FatRat','Grammar','Hash','HyperWhatever','Instant', + 'Int','int16','int32','int64','int8','IntStr','IO','IO::ArgFiles', + 'IO::CatHandle','IO::Handle','IO::Notification','IO::Path', + 'IO::Path::Cygwin','IO::Path::QNX','IO::Path::Unix','IO::Path::Win32', + 'IO::Pipe','IO::Socket','IO::Socket::Async','IO::Socket::INET','IO::Spec', + 'IO::Spec::Cygwin','IO::Spec::QNX','IO::Spec::Unix','IO::Spec::Win32', + 'IO::Special','Iterable','Iterator','Junction','Kernel','Label','List', + 'Lock','Lock::Async','long','longlong','Macro','Map','Match', + 'Metamodel::AttributeContainer','Metamodel::C3MRO','Metamodel::ClassHOW', + 'Metamodel::EnumHOW','Metamodel::Finalization','Metamodel::MethodContainer', + 'Metamodel::MROBasedMethodDispatch','Metamodel::MultipleInheritance', + 'Metamodel::Naming','Metamodel::Primitives','Metamodel::PrivateMethodContainer', + 'Metamodel::RoleContainer','Metamodel::Trusting','Method','Mix','MixHash', + 'Mixy','Mu','NFC','NFD','NFKC','NFKD','Nil','Num','num32','num64', + 'Numeric','NumStr','ObjAt','Order','Pair','Parameter','Perl','Pod::Block', + 'Pod::Block::Code','Pod::Block::Comment','Pod::Block::Declarator', + 'Pod::Block::Named','Pod::Block::Para','Pod::Block::Table','Pod::Heading', + 'Pod::Item','Pointer','Positional','PositionalBindFailover','Proc', + 'Proc::Async','Promise','Proxy','PseudoStash','QuantHash','Range','Rat', + 'Rational','RatStr','Real','Regex','Routine','Scalar','Scheduler', + 'Semaphore','Seq','Set','SetHash','Setty','Signature','size_t','Slip', + 'Stash','Str','StrDistance','Stringy','Sub','Submethod','Supplier', + 'Supplier::Preserving','Supply','Systemic','Tap','Telemetry', + 'Telemetry::Instrument::Thread','Telemetry::Instrument::Usage', + 'Telemetry::Period','Telemetry::Sampler','Thread','ThreadPoolScheduler', + 'UInt','uint16','uint32','uint64','uint8','Uni','utf8','Variable', + 'Version','VM','Whatever','WhateverCode','WrapHandle' ) PERL6_OPERATORS = ( @@ -311,76 +419,76 @@ class Perl6Lexer(ExtendedRegexLexer): '~', '&', '^', 'but', 'does', '<=>', '..', '..^', '^..', '^..^', '!=', '==', '<', '<=', '>', '>=', '~~', '===', '!eqv', '&&', '||', '^^', '//', 'min', 'max', '??', '!!', 'ff', 'fff', 'so', - 'not', '<==', '==>', '<<==', '==>>', + 'not', '<==', '==>', '<<==', '==>>','unicmp', ) # Perl 6 has a *lot* of possible bracketing characters # this list was lifted from STD.pm6 (https://github.com/perl6/std) PERL6_BRACKETS = { - u'\u0028': u'\u0029', u'\u003c': u'\u003e', u'\u005b': u'\u005d', - u'\u007b': u'\u007d', u'\u00ab': u'\u00bb', u'\u0f3a': u'\u0f3b', - u'\u0f3c': u'\u0f3d', u'\u169b': u'\u169c', u'\u2018': u'\u2019', - u'\u201a': u'\u2019', u'\u201b': u'\u2019', u'\u201c': u'\u201d', - u'\u201e': u'\u201d', u'\u201f': u'\u201d', u'\u2039': u'\u203a', - u'\u2045': u'\u2046', u'\u207d': u'\u207e', u'\u208d': u'\u208e', - u'\u2208': u'\u220b', u'\u2209': u'\u220c', u'\u220a': u'\u220d', - u'\u2215': u'\u29f5', u'\u223c': u'\u223d', u'\u2243': u'\u22cd', - u'\u2252': u'\u2253', u'\u2254': u'\u2255', u'\u2264': u'\u2265', - u'\u2266': u'\u2267', u'\u2268': u'\u2269', u'\u226a': u'\u226b', - u'\u226e': u'\u226f', u'\u2270': u'\u2271', u'\u2272': u'\u2273', - u'\u2274': u'\u2275', u'\u2276': u'\u2277', u'\u2278': u'\u2279', - u'\u227a': u'\u227b', u'\u227c': u'\u227d', u'\u227e': u'\u227f', - u'\u2280': u'\u2281', u'\u2282': u'\u2283', u'\u2284': u'\u2285', - u'\u2286': u'\u2287', u'\u2288': u'\u2289', u'\u228a': u'\u228b', - u'\u228f': u'\u2290', u'\u2291': u'\u2292', u'\u2298': u'\u29b8', - u'\u22a2': u'\u22a3', u'\u22a6': u'\u2ade', u'\u22a8': u'\u2ae4', - u'\u22a9': u'\u2ae3', u'\u22ab': u'\u2ae5', u'\u22b0': u'\u22b1', - u'\u22b2': u'\u22b3', u'\u22b4': u'\u22b5', u'\u22b6': u'\u22b7', - u'\u22c9': u'\u22ca', u'\u22cb': u'\u22cc', u'\u22d0': u'\u22d1', - u'\u22d6': u'\u22d7', u'\u22d8': u'\u22d9', u'\u22da': u'\u22db', - u'\u22dc': u'\u22dd', u'\u22de': u'\u22df', u'\u22e0': u'\u22e1', - u'\u22e2': u'\u22e3', u'\u22e4': u'\u22e5', u'\u22e6': u'\u22e7', - u'\u22e8': u'\u22e9', u'\u22ea': u'\u22eb', u'\u22ec': u'\u22ed', - u'\u22f0': u'\u22f1', u'\u22f2': u'\u22fa', u'\u22f3': u'\u22fb', - u'\u22f4': u'\u22fc', u'\u22f6': u'\u22fd', u'\u22f7': u'\u22fe', - u'\u2308': u'\u2309', u'\u230a': u'\u230b', u'\u2329': u'\u232a', - u'\u23b4': u'\u23b5', u'\u2768': u'\u2769', u'\u276a': u'\u276b', - u'\u276c': u'\u276d', u'\u276e': u'\u276f', u'\u2770': u'\u2771', - u'\u2772': u'\u2773', u'\u2774': u'\u2775', u'\u27c3': u'\u27c4', - u'\u27c5': u'\u27c6', u'\u27d5': u'\u27d6', u'\u27dd': u'\u27de', - u'\u27e2': u'\u27e3', u'\u27e4': u'\u27e5', u'\u27e6': u'\u27e7', - u'\u27e8': u'\u27e9', u'\u27ea': u'\u27eb', u'\u2983': u'\u2984', - u'\u2985': u'\u2986', u'\u2987': u'\u2988', u'\u2989': u'\u298a', - u'\u298b': u'\u298c', u'\u298d': u'\u298e', u'\u298f': u'\u2990', - u'\u2991': u'\u2992', u'\u2993': u'\u2994', u'\u2995': u'\u2996', - u'\u2997': u'\u2998', u'\u29c0': u'\u29c1', u'\u29c4': u'\u29c5', - u'\u29cf': u'\u29d0', u'\u29d1': u'\u29d2', u'\u29d4': u'\u29d5', - u'\u29d8': u'\u29d9', u'\u29da': u'\u29db', u'\u29f8': u'\u29f9', - u'\u29fc': u'\u29fd', u'\u2a2b': u'\u2a2c', u'\u2a2d': u'\u2a2e', - u'\u2a34': u'\u2a35', u'\u2a3c': u'\u2a3d', u'\u2a64': u'\u2a65', - u'\u2a79': u'\u2a7a', u'\u2a7d': u'\u2a7e', u'\u2a7f': u'\u2a80', - u'\u2a81': u'\u2a82', u'\u2a83': u'\u2a84', u'\u2a8b': u'\u2a8c', - u'\u2a91': u'\u2a92', u'\u2a93': u'\u2a94', u'\u2a95': u'\u2a96', - u'\u2a97': u'\u2a98', u'\u2a99': u'\u2a9a', u'\u2a9b': u'\u2a9c', - u'\u2aa1': u'\u2aa2', u'\u2aa6': u'\u2aa7', u'\u2aa8': u'\u2aa9', - u'\u2aaa': u'\u2aab', u'\u2aac': u'\u2aad', u'\u2aaf': u'\u2ab0', - u'\u2ab3': u'\u2ab4', u'\u2abb': u'\u2abc', u'\u2abd': u'\u2abe', - u'\u2abf': u'\u2ac0', u'\u2ac1': u'\u2ac2', u'\u2ac3': u'\u2ac4', - u'\u2ac5': u'\u2ac6', u'\u2acd': u'\u2ace', u'\u2acf': u'\u2ad0', - u'\u2ad1': u'\u2ad2', u'\u2ad3': u'\u2ad4', u'\u2ad5': u'\u2ad6', - u'\u2aec': u'\u2aed', u'\u2af7': u'\u2af8', u'\u2af9': u'\u2afa', - u'\u2e02': u'\u2e03', u'\u2e04': u'\u2e05', u'\u2e09': u'\u2e0a', - u'\u2e0c': u'\u2e0d', u'\u2e1c': u'\u2e1d', u'\u2e20': u'\u2e21', - u'\u3008': u'\u3009', u'\u300a': u'\u300b', u'\u300c': u'\u300d', - u'\u300e': u'\u300f', u'\u3010': u'\u3011', u'\u3014': u'\u3015', - u'\u3016': u'\u3017', u'\u3018': u'\u3019', u'\u301a': u'\u301b', - u'\u301d': u'\u301e', u'\ufd3e': u'\ufd3f', u'\ufe17': u'\ufe18', - u'\ufe35': u'\ufe36', u'\ufe37': u'\ufe38', u'\ufe39': u'\ufe3a', - u'\ufe3b': u'\ufe3c', u'\ufe3d': u'\ufe3e', u'\ufe3f': u'\ufe40', - u'\ufe41': u'\ufe42', u'\ufe43': u'\ufe44', u'\ufe47': u'\ufe48', - u'\ufe59': u'\ufe5a', u'\ufe5b': u'\ufe5c', u'\ufe5d': u'\ufe5e', - u'\uff08': u'\uff09', u'\uff1c': u'\uff1e', u'\uff3b': u'\uff3d', - u'\uff5b': u'\uff5d', u'\uff5f': u'\uff60', u'\uff62': u'\uff63', + '\u0028': '\u0029', '\u003c': '\u003e', '\u005b': '\u005d', + '\u007b': '\u007d', '\u00ab': '\u00bb', '\u0f3a': '\u0f3b', + '\u0f3c': '\u0f3d', '\u169b': '\u169c', '\u2018': '\u2019', + '\u201a': '\u2019', '\u201b': '\u2019', '\u201c': '\u201d', + '\u201e': '\u201d', '\u201f': '\u201d', '\u2039': '\u203a', + '\u2045': '\u2046', '\u207d': '\u207e', '\u208d': '\u208e', + '\u2208': '\u220b', '\u2209': '\u220c', '\u220a': '\u220d', + '\u2215': '\u29f5', '\u223c': '\u223d', '\u2243': '\u22cd', + '\u2252': '\u2253', '\u2254': '\u2255', '\u2264': '\u2265', + '\u2266': '\u2267', '\u2268': '\u2269', '\u226a': '\u226b', + '\u226e': '\u226f', '\u2270': '\u2271', '\u2272': '\u2273', + '\u2274': '\u2275', '\u2276': '\u2277', '\u2278': '\u2279', + '\u227a': '\u227b', '\u227c': '\u227d', '\u227e': '\u227f', + '\u2280': '\u2281', '\u2282': '\u2283', '\u2284': '\u2285', + '\u2286': '\u2287', '\u2288': '\u2289', '\u228a': '\u228b', + '\u228f': '\u2290', '\u2291': '\u2292', '\u2298': '\u29b8', + '\u22a2': '\u22a3', '\u22a6': '\u2ade', '\u22a8': '\u2ae4', + '\u22a9': '\u2ae3', '\u22ab': '\u2ae5', '\u22b0': '\u22b1', + '\u22b2': '\u22b3', '\u22b4': '\u22b5', '\u22b6': '\u22b7', + '\u22c9': '\u22ca', '\u22cb': '\u22cc', '\u22d0': '\u22d1', + '\u22d6': '\u22d7', '\u22d8': '\u22d9', '\u22da': '\u22db', + '\u22dc': '\u22dd', '\u22de': '\u22df', '\u22e0': '\u22e1', + '\u22e2': '\u22e3', '\u22e4': '\u22e5', '\u22e6': '\u22e7', + '\u22e8': '\u22e9', '\u22ea': '\u22eb', '\u22ec': '\u22ed', + '\u22f0': '\u22f1', '\u22f2': '\u22fa', '\u22f3': '\u22fb', + '\u22f4': '\u22fc', '\u22f6': '\u22fd', '\u22f7': '\u22fe', + '\u2308': '\u2309', '\u230a': '\u230b', '\u2329': '\u232a', + '\u23b4': '\u23b5', '\u2768': '\u2769', '\u276a': '\u276b', + '\u276c': '\u276d', '\u276e': '\u276f', '\u2770': '\u2771', + '\u2772': '\u2773', '\u2774': '\u2775', '\u27c3': '\u27c4', + '\u27c5': '\u27c6', '\u27d5': '\u27d6', '\u27dd': '\u27de', + '\u27e2': '\u27e3', '\u27e4': '\u27e5', '\u27e6': '\u27e7', + '\u27e8': '\u27e9', '\u27ea': '\u27eb', '\u2983': '\u2984', + '\u2985': '\u2986', '\u2987': '\u2988', '\u2989': '\u298a', + '\u298b': '\u298c', '\u298d': '\u298e', '\u298f': '\u2990', + '\u2991': '\u2992', '\u2993': '\u2994', '\u2995': '\u2996', + '\u2997': '\u2998', '\u29c0': '\u29c1', '\u29c4': '\u29c5', + '\u29cf': '\u29d0', '\u29d1': '\u29d2', '\u29d4': '\u29d5', + '\u29d8': '\u29d9', '\u29da': '\u29db', '\u29f8': '\u29f9', + '\u29fc': '\u29fd', '\u2a2b': '\u2a2c', '\u2a2d': '\u2a2e', + '\u2a34': '\u2a35', '\u2a3c': '\u2a3d', '\u2a64': '\u2a65', + '\u2a79': '\u2a7a', '\u2a7d': '\u2a7e', '\u2a7f': '\u2a80', + '\u2a81': '\u2a82', '\u2a83': '\u2a84', '\u2a8b': '\u2a8c', + '\u2a91': '\u2a92', '\u2a93': '\u2a94', '\u2a95': '\u2a96', + '\u2a97': '\u2a98', '\u2a99': '\u2a9a', '\u2a9b': '\u2a9c', + '\u2aa1': '\u2aa2', '\u2aa6': '\u2aa7', '\u2aa8': '\u2aa9', + '\u2aaa': '\u2aab', '\u2aac': '\u2aad', '\u2aaf': '\u2ab0', + '\u2ab3': '\u2ab4', '\u2abb': '\u2abc', '\u2abd': '\u2abe', + '\u2abf': '\u2ac0', '\u2ac1': '\u2ac2', '\u2ac3': '\u2ac4', + '\u2ac5': '\u2ac6', '\u2acd': '\u2ace', '\u2acf': '\u2ad0', + '\u2ad1': '\u2ad2', '\u2ad3': '\u2ad4', '\u2ad5': '\u2ad6', + '\u2aec': '\u2aed', '\u2af7': '\u2af8', '\u2af9': '\u2afa', + '\u2e02': '\u2e03', '\u2e04': '\u2e05', '\u2e09': '\u2e0a', + '\u2e0c': '\u2e0d', '\u2e1c': '\u2e1d', '\u2e20': '\u2e21', + '\u3008': '\u3009', '\u300a': '\u300b', '\u300c': '\u300d', + '\u300e': '\u300f', '\u3010': '\u3011', '\u3014': '\u3015', + '\u3016': '\u3017', '\u3018': '\u3019', '\u301a': '\u301b', + '\u301d': '\u301e', '\ufd3e': '\ufd3f', '\ufe17': '\ufe18', + '\ufe35': '\ufe36', '\ufe37': '\ufe38', '\ufe39': '\ufe3a', + '\ufe3b': '\ufe3c', '\ufe3d': '\ufe3e', '\ufe3f': '\ufe40', + '\ufe41': '\ufe42', '\ufe43': '\ufe44', '\ufe47': '\ufe48', + '\ufe59': '\ufe5a', '\ufe5b': '\ufe5c', '\ufe5d': '\ufe5e', + '\uff08': '\uff09', '\uff1c': '\uff1e', '\uff3b': '\uff3d', + '\uff5b': '\uff5d', '\uff5f': '\uff60', '\uff62': '\uff63', } def _build_word_match(words, boundary_regex_fragment=None, prefix='', suffix=''): @@ -504,11 +612,11 @@ def embedded_perl6_callback(lexer, match, context): Name.Builtin), (_build_word_match(PERL6_BUILTINS, PERL6_IDENTIFIER_RANGE), Name.Builtin), # copied from PerlLexer - (r'[$@%&][.^:?=!~]?' + PERL6_IDENTIFIER_RANGE + u'+(?:<<.*?>>|<.*?>|«.*?»)*', + (r'[$@%&][.^:?=!~]?' + PERL6_IDENTIFIER_RANGE + '+(?:<<.*?>>|<.*?>|«.*?»)*', Name.Variable), (r'\$[!/](?:<<.*?>>|<.*?>|«.*?»)*', Name.Variable.Global), (r'::\?\w+', Name.Variable.Global), - (r'[$@%&]\*' + PERL6_IDENTIFIER_RANGE + u'+(?:<<.*?>>|<.*?>|«.*?»)*', + (r'[$@%&]\*' + PERL6_IDENTIFIER_RANGE + '+(?:<<.*?>>|<.*?>|«.*?»)*', Name.Variable.Global), (r'\$(?:<.*?>)+', Name.Variable), (r'(?:q|qq|Q)[a-zA-Z]?\s*(?P:[\w\s:]+)?\s*(?P(?P[^0-9a-zA-Z:\s])' @@ -613,8 +721,12 @@ def strip_pod(lines): continue break + if ':=' in text: + # Same logic as above for PerlLexer + rating /= 2 + return rating def __init__(self, **options): - super(Perl6Lexer, self).__init__(**options) + super().__init__(**options) self.encoding = options.get('encoding', 'utf-8') diff --git a/src/typecode/_vendor/pygments/lexers/php.py b/src/typecode/_vendor/pygments/lexers/php.py index d2da3d5..979a883 100644 --- a/src/typecode/_vendor/pygments/lexers/php.py +++ b/src/typecode/_vendor/pygments/lexers/php.py @@ -5,20 +5,21 @@ Lexers for PHP and related languages. - :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ import re -from typecode._vendor.pygments.lexer import RegexLexer, include, bygroups, default, using, \ - this, words +from typecode._vendor.pygments.lexer import Lexer, RegexLexer, include, bygroups, default, \ + using, this, words, do_insertions from typecode._vendor.pygments.token import Text, Comment, Operator, Keyword, Name, String, \ - Number, Punctuation, Other -from typecode._vendor.pygments.util import get_bool_opt, get_list_opt, iteritems, \ - shebang_matches + Number, Punctuation, Other, Generic +from typecode._vendor.pygments.util import get_bool_opt, get_list_opt, shebang_matches -__all__ = ['ZephirLexer', 'PhpLexer'] +__all__ = ['ZephirLexer', 'PsyshConsoleLexer', 'PhpLexer'] + +line_re = re.compile('.*?\n') class ZephirLexer(RegexLexer): @@ -50,13 +51,14 @@ class ZephirLexer(RegexLexer): include('commentsandwhitespace'), (r'/(\\.|[^[/\\\n]|\[(\\.|[^\]\\\n])*])+/' r'([gim]+\b|\B)', String.Regex, '#pop'), + (r'/', Operator, '#pop'), default('#pop') ], 'badregex': [ (r'\n', Text, '#pop') ], 'root': [ - (r'^(?=\s|/|)', + (r'([a-z\u00c0-\u1fff\u3040-\ud7ff\ue000-\uffef]' + r'[\w$\u00c0-\u1fff\u3040-\ud7ff\ue000-\uffef]*)' + r'(\s*)(:-|-->)', bygroups(Name.Function, Text, Operator)), # function defn - (u'([a-z\u00c0-\u1fff\u3040-\ud7ff\ue000-\uffef]' - u'[\\w$\u00c0-\u1fff\u3040-\ud7ff\ue000-\uffef]*)' - u'(\\s*)(\\()', + (r'([a-z\u00c0-\u1fff\u3040-\ud7ff\ue000-\uffef]' + r'[\w$\u00c0-\u1fff\u3040-\ud7ff\ue000-\uffef]*)' + r'(\s*)(\()', bygroups(Name.Function, Text, Punctuation)), - (u'[a-z\u00c0-\u1fff\u3040-\ud7ff\ue000-\uffef]' - u'[\\w$\u00c0-\u1fff\u3040-\ud7ff\ue000-\uffef]*', + (r'[a-z\u00c0-\u1fff\u3040-\ud7ff\ue000-\uffef]' + r'[\w$\u00c0-\u1fff\u3040-\ud7ff\ue000-\uffef]*', String.Atom), # atom, characters # This one includes ! - (u'[#&*+\\-./:<=>?@\\\\^~\u00a1-\u00bf\u2010-\u303f]+', + (r'[#&*+\-./:<=>?@\\^~\u00a1-\u00bf\u2010-\u303f]+', String.Atom), # atom, graphics (r'[A-Z_]\w*', Name.Variable), - (u'\\s+|[\u2000-\u200f\ufff0-\ufffe\uffef]', Text), + (r'\s+|[\u2000-\u200f\ufff0-\ufffe\uffef]', Text), ], 'nested-comment': [ (r'\*/', Comment.Multiline, '#pop'), @@ -107,19 +107,19 @@ class LogtalkLexer(RegexLexer): (r'\n', Text), (r'\s+', Text), # Numbers - (r"0'.", Number), + (r"0'[\\]?.", Number), (r'0b[01]+', Number.Bin), (r'0o[0-7]+', Number.Oct), (r'0x[0-9a-fA-F]+', Number.Hex), (r'\d+\.?\d*((e|E)(\+|-)?\d+)?', Number), # Variables - (r'([A-Z_]\w*)', Name.Variable), + (r'([A-Z_][a-zA-Z0-9_]*)', Name.Variable), # Event handlers (r'(after|before)(?=[(])', Keyword), # Message forwarding handler (r'forward(?=[(])', Keyword), # Execution-context methods - (r'(parameter|this|se(lf|nder))(?=[(])', Keyword), + (r'(context|parameter|this|se(lf|nder))(?=[(])', Keyword), # Reflection (r'(current_predicate|predicate_property)(?=[(])', Keyword), # DCGs and term expansion @@ -135,20 +135,23 @@ class LogtalkLexer(RegexLexer): # Events (r'(current_event|(abolish|define)_events)(?=[(])', Keyword), # Flags - (r'(current|set)_logtalk_flag(?=[(])', Keyword), + (r'(create|current|set)_logtalk_flag(?=[(])', Keyword), # Compiling, loading, and library paths - (r'logtalk_(compile|l(ibrary_path|oad|oad_context)|make)(?=[(])', Keyword), + (r'logtalk_(compile|l(ibrary_path|oad|oad_context)|make(_target_action)?)(?=[(])', Keyword), (r'\blogtalk_make\b', Keyword), # Database (r'(clause|retract(all)?)(?=[(])', Keyword), (r'a(bolish|ssert(a|z))(?=[(])', Keyword), # Control constructs (r'(ca(ll|tch)|throw)(?=[(])', Keyword), - (r'(fa(il|lse)|true)\b', Keyword), + (r'(fa(il|lse)|true|(instantiation|system)_error)\b', Keyword), + (r'(type|domain|existence|permission|representation|evaluation|resource|syntax)_error(?=[(])', Keyword), # All solutions (r'((bag|set)of|f(ind|or)all)(?=[(])', Keyword), - # Multi-threading meta-predicates - (r'threaded(_(call|once|ignore|exit|peek|wait|notify))?(?=[(])', Keyword), + # Multi-threading predicates + (r'threaded(_(ca(ll|ncel)|once|ignore|exit|peek|wait|notify))?(?=[(])', Keyword), + # Engine predicates + (r'threaded_engine(_(create|destroy|self|next|next_reified|yield|post|fetch))?(?=[(])', Keyword), # Term unification (r'(subsumes_term|unify_with_occurs_check)(?=[(])', Keyword), # Term creation and decomposition @@ -160,8 +163,7 @@ class LogtalkLexer(RegexLexer): # Other arithmetic functors (r'(cos|a(cos|sin|tan|tan2)|exp|log|s(in|qrt)|xor)(?=[(])', Keyword), # Term testing - (r'(var|atom(ic)?|integer|float|c(allable|ompound)|n(onvar|umber)|' - r'ground|acyclic_term)(?=[(])', Keyword), + (r'(var|atom(ic)?|integer|float|c(allable|ompound)|n(onvar|umber)|ground|acyclic_term)(?=[(])', Keyword), # Term comparison (r'compare(?=[(])', Keyword), # Stream selection and control @@ -225,11 +227,11 @@ class LogtalkLexer(RegexLexer): # Existential quantifier (r'\^', Operator), # Strings - (r'"(\\\\|\\"|[^"])*"', String), - # Ponctuation + (r'"(\\\\|\\[^\\]|[^"\\])*"', String), + # Punctuation (r'[()\[\],.|]', Text), # Atoms - (r"[a-z]\w*", Text), + (r"[a-z][a-zA-Z0-9_]*", Text), (r"'", String, 'quoted_atom'), ], @@ -244,44 +246,43 @@ class LogtalkLexer(RegexLexer): 'directive': [ # Conditional compilation directives (r'(el)?if(?=[(])', Keyword, 'root'), - (r'(e(lse|ndif))[.]', Keyword, 'root'), + (r'(e(lse|ndif))(?=[.])', Keyword, 'root'), # Entity directives (r'(category|object|protocol)(?=[(])', Keyword, 'entityrelations'), - (r'(end_(category|object|protocol))[.]', Keyword, 'root'), + (r'(end_(category|object|protocol))(?=[.])', Keyword, 'root'), # Predicate scope directives (r'(public|protected|private)(?=[(])', Keyword, 'root'), # Other directives (r'e(n(coding|sure_loaded)|xport)(?=[(])', Keyword, 'root'), (r'in(clude|itialization|fo)(?=[(])', Keyword, 'root'), - (r'(built_in|dynamic|synchronized|threaded)[.]', Keyword, 'root'), - (r'(alias|d(ynamic|iscontiguous)|m(eta_(non_terminal|predicate)|ode|ultifile)|' - r's(et_(logtalk|prolog)_flag|ynchronized))(?=[(])', Keyword, 'root'), + (r'(built_in|dynamic|synchronized|threaded)(?=[.])', Keyword, 'root'), + (r'(alias|d(ynamic|iscontiguous)|m(eta_(non_terminal|predicate)|ode|ultifile)|s(et_(logtalk|prolog)_flag|ynchronized))(?=[(])', Keyword, 'root'), (r'op(?=[(])', Keyword, 'root'), (r'(c(alls|oinductive)|module|reexport|use(s|_module))(?=[(])', Keyword, 'root'), - (r'[a-z]\w*(?=[(])', Text, 'root'), - (r'[a-z]\w*[.]', Text, 'root'), + (r'[a-z][a-zA-Z0-9_]*(?=[(])', Text, 'root'), + (r'[a-z][a-zA-Z0-9_]*(?=[.])', Text, 'root'), ], 'entityrelations': [ (r'(complements|extends|i(nstantiates|mp(lements|orts))|specializes)(?=[(])', Keyword), # Numbers - (r"0'.", Number), + (r"0'[\\]?.", Number), (r'0b[01]+', Number.Bin), (r'0o[0-7]+', Number.Oct), (r'0x[0-9a-fA-F]+', Number.Hex), (r'\d+\.?\d*((e|E)(\+|-)?\d+)?', Number), # Variables - (r'([A-Z_]\w*)', Name.Variable), + (r'([A-Z_][a-zA-Z0-9_]*)', Name.Variable), # Atoms - (r"[a-z]\w*", Text), + (r"[a-z][a-zA-Z0-9_]*", Text), (r"'", String, 'quoted_atom'), # Strings - (r'"(\\\\|\\"|[^"])*"', String), + (r'"(\\\\|\\[^\\]|[^"\\])*"', String), # End of entity-opening directive (r'([)]\.)', Text, 'root'), # Scope operator (r'(::)', Operator), - # Ponctuation + # Punctuation (r'[()\[\],.|]', Text), # Comments (r'%.*?\n', Comment), diff --git a/src/typecode/_vendor/pygments/lexers/promql.py b/src/typecode/_vendor/pygments/lexers/promql.py new file mode 100644 index 0000000..45818df --- /dev/null +++ b/src/typecode/_vendor/pygments/lexers/promql.py @@ -0,0 +1,183 @@ +# -*- coding: utf-8 -*- +""" + pygments.lexers.promql + ~~~~~~~~~~~~~~~~~~~~~~ + + Lexer for Prometheus Query Language. + + :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS. + :license: BSD, see LICENSE for details. +""" + +from typecode._vendor.pygments.lexer import RegexLexer, bygroups, default, words +from typecode._vendor.pygments.token import ( + Comment, + Keyword, + Name, + Number, + Operator, + Punctuation, + String, + Whitespace, +) + +__all__ = ["PromQLLexer"] + + +class PromQLLexer(RegexLexer): + """ + For `PromQL `_ queries. + + For details about the grammar see: + https://github.com/prometheus/prometheus/tree/master/promql/parser + + .. versionadded: 2.7 + """ + + name = "PromQL" + aliases = ["promql"] + filenames = ["*.promql"] + + base_keywords = ( + words( + ( + "bool", + "by", + "group_left", + "group_right", + "ignoring", + "offset", + "on", + "without", + ), + suffix=r"\b", + ), + Keyword, + ) + + aggregator_keywords = ( + words( + ( + "sum", + "min", + "max", + "avg", + "group", + "stddev", + "stdvar", + "count", + "count_values", + "bottomk", + "topk", + "quantile", + ), + suffix=r"\b", + ), + Keyword, + ) + + function_keywords = ( + words( + ( + "abs", + "absent", + "absent_over_time", + "avg_over_time", + "ceil", + "changes", + "clamp_max", + "clamp_min", + "count_over_time", + "day_of_month", + "day_of_week", + "days_in_month", + "delta", + "deriv", + "exp", + "floor", + "histogram_quantile", + "holt_winters", + "hour", + "idelta", + "increase", + "irate", + "label_join", + "label_replace", + "ln", + "log10", + "log2", + "max_over_time", + "min_over_time", + "minute", + "month", + "predict_linear", + "quantile_over_time", + "rate", + "resets", + "round", + "scalar", + "sort", + "sort_desc", + "sqrt", + "stddev_over_time", + "stdvar_over_time", + "sum_over_time", + "time", + "timestamp", + "vector", + "year", + ), + suffix=r"\b", + ), + Keyword.Reserved, + ) + + tokens = { + "root": [ + (r"\n", Whitespace), + (r"\s+", Whitespace), + (r",", Punctuation), + # Keywords + base_keywords, + aggregator_keywords, + function_keywords, + # Offsets + (r"[1-9][0-9]*[smhdwy]", String), + # Numbers + (r"-?[0-9]+\.[0-9]+", Number.Float), + (r"-?[0-9]+", Number.Integer), + # Comments + (r"#.*?$", Comment.Single), + # Operators + (r"(\+|\-|\*|\/|\%|\^)", Operator), + (r"==|!=|>=|<=|<|>", Operator), + (r"and|or|unless", Operator.Word), + # Metrics + (r"[_a-zA-Z][a-zA-Z0-9_]+", Name.Variable), + # Params + (r'(["\'])(.*?)(["\'])', bygroups(Punctuation, String, Punctuation)), + # Other states + (r"\(", Operator, "function"), + (r"\)", Operator), + (r"\{", Punctuation, "labels"), + (r"\[", Punctuation, "range"), + ], + "labels": [ + (r"\}", Punctuation, "#pop"), + (r"\n", Whitespace), + (r"\s+", Whitespace), + (r",", Punctuation), + (r'([_a-zA-Z][a-zA-Z0-9_]*?)(\s*?)(=~|!=|=|~!)(\s*?)(")(.*?)(")', + bygroups(Name.Label, Whitespace, Operator, Whitespace, + Punctuation, String, Punctuation)), + ], + "range": [ + (r"\]", Punctuation, "#pop"), + (r"[1-9][0-9]*[smhdwy]", String), + ], + "function": [ + (r"\)", Operator, "#pop"), + (r"\(", Operator, "#push"), + default("#pop"), + ], + } diff --git a/src/typecode/_vendor/pygments/lexers/python.py b/src/typecode/_vendor/pygments/lexers/python.py index 00c618c..a06a55e 100644 --- a/src/typecode/_vendor/pygments/lexers/python.py +++ b/src/typecode/_vendor/pygments/lexers/python.py @@ -5,7 +5,7 @@ Lexers for Python and related languages. - :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ @@ -19,21 +19,379 @@ from typecode._vendor.pygments import unistring as uni __all__ = ['PythonLexer', 'PythonConsoleLexer', 'PythonTracebackLexer', - 'Python3Lexer', 'Python3TracebackLexer', 'CythonLexer', - 'DgLexer', 'NumPyLexer'] + 'Python2Lexer', 'Python2TracebackLexer', + 'CythonLexer', 'DgLexer', 'NumPyLexer'] line_re = re.compile('.*?\n') class PythonLexer(RegexLexer): """ - For `Python `_ source code. + For `Python `_ source code (version 3.x). + + .. versionadded:: 0.10 + + .. versionchanged:: 2.5 + This is now the default ``PythonLexer``. It is still available as the + alias ``Python3Lexer``. """ name = 'Python' - aliases = ['python', 'py', 'sage'] - filenames = ['*.py', '*.pyw', '*.sc', 'SConstruct', 'SConscript', '*.tac', '*.sage'] - mimetypes = ['text/x-python', 'application/x-python'] + aliases = ['python', 'py', 'sage', 'python3', 'py3'] + filenames = [ + '*.py', + '*.pyw', + # Jython + '*.jy', + # Sage + '*.sage', + # SCons + '*.sc', + 'SConstruct', + 'SConscript', + # Skylark/Starlark (used by Bazel, Buck, and Pants) + '*.bzl', + 'BUCK', + 'BUILD', + 'BUILD.bazel', + 'WORKSPACE', + # Twisted Application infrastructure + '*.tac', + ] + mimetypes = ['text/x-python', 'application/x-python', + 'text/x-python3', 'application/x-python3'] + + flags = re.MULTILINE | re.UNICODE + + uni_name = "[%s][%s]*" % (uni.xid_start, uni.xid_continue) + + def innerstring_rules(ttype): + return [ + # the old style '%s' % (...) string formatting (still valid in Py3) + (r'%(\(\w+\))?[-#0 +]*([0-9]+|[*])?(\.([0-9]+|[*]))?' + '[hlL]?[E-GXc-giorsaux%]', String.Interpol), + # the new style '{}'.format(...) string formatting + (r'\{' + r'((\w+)((\.\w+)|(\[[^\]]+\]))*)?' # field name + r'(\![sra])?' # conversion + r'(\:(.?[<>=\^])?[-+ ]?#?0?(\d+)?,?(\.\d+)?[E-GXb-gnosx%]?)?' + r'\}', String.Interpol), + + # backslashes, quotes and formatting signs must be parsed one at a time + (r'[^\\\'"%{\n]+', ttype), + (r'[\'"\\]', ttype), + # unhandled string formatting sign + (r'%|(\{{1,2})', ttype) + # newlines are an error (use "nl" state) + ] + + def fstring_rules(ttype): + return [ + # Assuming that a '}' is the closing brace after format specifier. + # Sadly, this means that we won't detect syntax error. But it's + # more important to parse correct syntax correctly, than to + # highlight invalid syntax. + (r'\}', String.Interpol), + (r'\{', String.Interpol, 'expr-inside-fstring'), + # backslashes, quotes and formatting signs must be parsed one at a time + (r'[^\\\'"{}\n]+', ttype), + (r'[\'"\\]', ttype), + # newlines are an error (use "nl" state) + ] + + tokens = { + 'root': [ + (r'\n', Text), + (r'^(\s*)([rRuUbB]{,2})("""(?:.|\n)*?""")', + bygroups(Text, String.Affix, String.Doc)), + (r"^(\s*)([rRuUbB]{,2})('''(?:.|\n)*?''')", + bygroups(Text, String.Affix, String.Doc)), + (r'\A#!.+$', Comment.Hashbang), + (r'#.*$', Comment.Single), + (r'\\\n', Text), + (r'\\', Text), + include('keywords'), + (r'(def)((?:\s|\\\s)+)', bygroups(Keyword, Text), 'funcname'), + (r'(class)((?:\s|\\\s)+)', bygroups(Keyword, Text), 'classname'), + (r'(from)((?:\s|\\\s)+)', bygroups(Keyword.Namespace, Text), + 'fromimport'), + (r'(import)((?:\s|\\\s)+)', bygroups(Keyword.Namespace, Text), + 'import'), + include('expr'), + ], + 'expr': [ + # raw f-strings + ('(?i)(rf|fr)(""")', + bygroups(String.Affix, String.Double), 'tdqf'), + ("(?i)(rf|fr)(''')", + bygroups(String.Affix, String.Single), 'tsqf'), + ('(?i)(rf|fr)(")', + bygroups(String.Affix, String.Double), 'dqf'), + ("(?i)(rf|fr)(')", + bygroups(String.Affix, String.Single), 'sqf'), + # non-raw f-strings + ('([fF])(""")', bygroups(String.Affix, String.Double), + combined('fstringescape', 'tdqf')), + ("([fF])(''')", bygroups(String.Affix, String.Single), + combined('fstringescape', 'tsqf')), + ('([fF])(")', bygroups(String.Affix, String.Double), + combined('fstringescape', 'dqf')), + ("([fF])(')", bygroups(String.Affix, String.Single), + combined('fstringescape', 'sqf')), + # raw strings + ('(?i)(rb|br|r)(""")', + bygroups(String.Affix, String.Double), 'tdqs'), + ("(?i)(rb|br|r)(''')", + bygroups(String.Affix, String.Single), 'tsqs'), + ('(?i)(rb|br|r)(")', + bygroups(String.Affix, String.Double), 'dqs'), + ("(?i)(rb|br|r)(')", + bygroups(String.Affix, String.Single), 'sqs'), + # non-raw strings + ('([uUbB]?)(""")', bygroups(String.Affix, String.Double), + combined('stringescape', 'tdqs')), + ("([uUbB]?)(''')", bygroups(String.Affix, String.Single), + combined('stringescape', 'tsqs')), + ('([uUbB]?)(")', bygroups(String.Affix, String.Double), + combined('stringescape', 'dqs')), + ("([uUbB]?)(')", bygroups(String.Affix, String.Single), + combined('stringescape', 'sqs')), + (r'[^\S\n]+', Text), + (r'!=|==|<<|>>|:=|[-~+/*%=<>&^|.]', Operator), + (r'[]{}:(),;[]', Punctuation), + (r'(in|is|and|or|not)\b', Operator.Word), + include('expr-keywords'), + include('builtins'), + include('magicfuncs'), + include('magicvars'), + include('name'), + include('numbers'), + ], + 'expr-inside-fstring': [ + (r'[{([]', Punctuation, 'expr-inside-fstring-inner'), + # without format specifier + (r'(=\s*)?' # debug (https://bugs.python.org/issue36817) + r'(\![sraf])?' # conversion + r'\}', String.Interpol, '#pop'), + # with format specifier + # we'll catch the remaining '}' in the outer scope + (r'(=\s*)?' # debug (https://bugs.python.org/issue36817) + r'(\![sraf])?' # conversion + r':', String.Interpol, '#pop'), + (r'\s+', Text), # allow new lines + include('expr'), + ], + 'expr-inside-fstring-inner': [ + (r'[{([]', Punctuation, 'expr-inside-fstring-inner'), + (r'[])}]', Punctuation, '#pop'), + (r'\s+', Text), # allow new lines + include('expr'), + ], + 'expr-keywords': [ + # Based on https://docs.python.org/3/reference/expressions.html + (words(( + 'async for', 'await', 'else', 'for', 'if', 'lambda', + 'yield', 'yield from'), suffix=r'\b'), + Keyword), + (words(('True', 'False', 'None'), suffix=r'\b'), Keyword.Constant), + ], + 'keywords': [ + (words(( + 'assert', 'async', 'await', 'break', 'continue', 'del', 'elif', + 'else', 'except', 'finally', 'for', 'global', 'if', 'lambda', + 'pass', 'raise', 'nonlocal', 'return', 'try', 'while', 'yield', + 'yield from', 'as', 'with'), suffix=r'\b'), + Keyword), + (words(('True', 'False', 'None'), suffix=r'\b'), Keyword.Constant), + ], + 'builtins': [ + (words(( + '__import__', 'abs', 'all', 'any', 'bin', 'bool', 'bytearray', + 'bytes', 'chr', 'classmethod', 'compile', 'complex', + 'delattr', 'dict', 'dir', 'divmod', 'enumerate', 'eval', 'filter', + 'float', 'format', 'frozenset', 'getattr', 'globals', 'hasattr', + 'hash', 'hex', 'id', 'input', 'int', 'isinstance', 'issubclass', + 'iter', 'len', 'list', 'locals', 'map', 'max', 'memoryview', + 'min', 'next', 'object', 'oct', 'open', 'ord', 'pow', 'print', + 'property', 'range', 'repr', 'reversed', 'round', 'set', 'setattr', + 'slice', 'sorted', 'staticmethod', 'str', 'sum', 'super', 'tuple', + 'type', 'vars', 'zip'), prefix=r'(?`_ source code. + + .. versionchanged:: 2.5 + This class has been renamed from ``PythonLexer``. ``PythonLexer`` now + refers to the Python 3 variant. File name patterns like ``*.py`` have + been moved to Python 3 as well. + """ + + name = 'Python 2.x' + aliases = ['python2', 'py2'] + filenames = [] # now taken over by PythonLexer (3.x) + mimetypes = ['text/x-python2', 'application/x-python2'] def innerstring_rules(ttype): return [ @@ -124,15 +482,15 @@ def innerstring_rules(ttype): 'Exception', 'FloatingPointError', 'FutureWarning', 'GeneratorExit', 'IOError', 'ImportError', 'ImportWarning', 'IndentationError', 'IndexError', 'KeyError', 'KeyboardInterrupt', 'LookupError', - 'MemoryError', 'ModuleNotFoundError', 'NameError', 'NotImplemented', 'NotImplementedError', - 'OSError', 'OverflowError', 'OverflowWarning', 'PendingDeprecationWarning', - 'RecursionError', 'ReferenceError', 'RuntimeError', 'RuntimeWarning', 'StandardError', - 'StopIteration', 'StopAsyncIteration', 'SyntaxError', 'SyntaxWarning', 'SystemError', - 'SystemExit', 'TabError', 'TypeError', 'UnboundLocalError', - 'UnicodeDecodeError', 'UnicodeEncodeError', 'UnicodeError', - 'UnicodeTranslateError', 'UnicodeWarning', 'UserWarning', - 'ValueError', 'VMSError', 'Warning', 'WindowsError', - 'ZeroDivisionError'), prefix=r'(?`_ source code (version 3.0). - - .. versionadded:: 0.10 - """ - - name = 'Python 3' - aliases = ['python3', 'py3'] - filenames = [] # Nothing until Python 3 gets widespread - mimetypes = ['text/x-python3', 'application/x-python3'] - - flags = re.MULTILINE | re.UNICODE - - uni_name = "[%s][%s]*" % (uni.xid_start, uni.xid_continue) - - def innerstring_rules(ttype): - return [ - # the old style '%s' % (...) string formatting (still valid in Py3) - (r'%(\(\w+\))?[-#0 +]*([0-9]+|[*])?(\.([0-9]+|[*]))?' - '[hlL]?[E-GXc-giorsaux%]', String.Interpol), - # the new style '{}'.format(...) string formatting - (r'\{' - r'((\w+)((\.\w+)|(\[[^\]]+\]))*)?' # field name - r'(\![sra])?' # conversion - r'(\:(.?[<>=\^])?[-+ ]?#?0?(\d+)?,?(\.\d+)?[E-GXb-gnosx%]?)?' - r'\}', String.Interpol), - - # backslashes, quotes and formatting signs must be parsed one at a time - (r'[^\\\'"%{\n]+', ttype), - (r'[\'"\\]', ttype), - # unhandled string formatting sign - (r'%|(\{{1,2})', ttype) - # newlines are an error (use "nl" state) - ] - - tokens = PythonLexer.tokens.copy() - tokens['keywords'] = [ - (words(( - 'assert', 'async', 'await', 'break', 'continue', 'del', 'elif', - 'else', 'except', 'finally', 'for', 'global', 'if', 'lambda', 'pass', - 'raise', 'nonlocal', 'return', 'try', 'while', 'yield', 'yield from', - 'as', 'with'), suffix=r'\b'), - Keyword), - (words(( - 'True', 'False', 'None'), suffix=r'\b'), - Keyword.Constant), - ] - tokens['builtins'] = [ - (words(( - '__import__', 'abs', 'all', 'any', 'bin', 'bool', 'bytearray', 'bytes', - 'chr', 'classmethod', 'cmp', 'compile', 'complex', 'delattr', 'dict', - 'dir', 'divmod', 'enumerate', 'eval', 'filter', 'float', 'format', - 'frozenset', 'getattr', 'globals', 'hasattr', 'hash', 'hex', 'id', - 'input', 'int', 'isinstance', 'issubclass', 'iter', 'len', 'list', - 'locals', 'map', 'max', 'memoryview', 'min', 'next', 'object', 'oct', - 'open', 'ord', 'pow', 'print', 'property', 'range', 'repr', 'reversed', - 'round', 'set', 'setattr', 'slice', 'sorted', 'staticmethod', 'str', - 'sum', 'super', 'tuple', 'type', 'vars', 'zip'), prefix=r'(?>> ') or line.startswith(u'... '): + if line.startswith('>>> ') or line.startswith('... '): tb = 0 insertions.append((len(curcode), [(0, Generic.Prompt, line[:4])])) curcode += line[4:] - elif line.rstrip() == u'...' and not tb: + elif line.rstrip() == '...' and not tb: # only a new >>> prompt can end an exception block # otherwise an ellipsis in place of the traceback frames # will be mishandled insertions.append((len(curcode), - [(0, Generic.Prompt, u'...')])) + [(0, Generic.Prompt, '...')])) curcode += line[3:] else: if curcode: - for item in do_insertions( - insertions, pylexer.get_tokens_unprocessed(curcode)): - yield item + yield from do_insertions( + insertions, pylexer.get_tokens_unprocessed(curcode)) curcode = '' insertions = [] - if (line.startswith(u'Traceback (most recent call last):') or - re.match(u' File "[^"]+", line \\d+\\n$', line)): + if (line.startswith('Traceback (most recent call last):') or + re.match(' File "[^"]+", line \\d+\\n$', line)): tb = 1 curtb = line tbindex = match.start() @@ -474,7 +673,7 @@ def get_tokens_unprocessed(self, text): yield match.start(), Name.Class, line elif tb: curtb += line - if not (line.startswith(' ') or line.strip() == u'...'): + if not (line.startswith(' ') or line.strip() == '...'): tb = 0 for i, t, v in tblexer.get_tokens_unprocessed(curtb): yield tbindex+i, t, v @@ -482,9 +681,8 @@ def get_tokens_unprocessed(self, text): else: yield match.start(), Generic.Output, line if curcode: - for item in do_insertions(insertions, - pylexer.get_tokens_unprocessed(curcode)): - yield item + yield from do_insertions(insertions, + pylexer.get_tokens_unprocessed(curcode)) if curtb: for i, t, v in tblexer.get_tokens_unprocessed(curtb): yield tbindex+i, t, v @@ -492,23 +690,28 @@ def get_tokens_unprocessed(self, text): class PythonTracebackLexer(RegexLexer): """ - For Python tracebacks. + For Python 3.x tracebacks, with support for chained exceptions. - .. versionadded:: 0.7 + .. versionadded:: 1.0 + + .. versionchanged:: 2.5 + This is now the default ``PythonTracebackLexer``. It is still available + as the alias ``Python3TracebackLexer``. """ name = 'Python Traceback' - aliases = ['pytb'] - filenames = ['*.pytb'] - mimetypes = ['text/x-python-traceback'] + aliases = ['pytb', 'py3tb'] + filenames = ['*.pytb', '*.py3tb'] + mimetypes = ['text/x-python-traceback', 'text/x-python3-traceback'] tokens = { 'root': [ - # Cover both (most recent call last) and (innermost last) - # The optional ^C allows us to catch keyboard interrupt signals. - (r'^(\^C)?(Traceback.*\n)', - bygroups(Text, Generic.Traceback), 'intb'), - # SyntaxError starts with this. + (r'\n', Text), + (r'^Traceback \(most recent call last\):\n', Generic.Traceback, 'intb'), + (r'^During handling of the above exception, another ' + r'exception occurred:\n\n', Generic.Traceback), + (r'^The above exception was the direct cause of the ' + r'following exception:\n\n', Generic.Traceback), (r'^(?= File "[^"]+", line \d+)', Generic.Traceback, 'intb'), (r'^.*\n', Other), ], @@ -523,33 +726,40 @@ class PythonTracebackLexer(RegexLexer): bygroups(Text, Comment, Text)), # for doctests... (r'^([^:]+)(: )(.+)(\n)', bygroups(Generic.Error, Text, Name, Text), '#pop'), - (r'^([a-zA-Z_]\w*)(:?\n)', + (r'^([a-zA-Z_][\w.]*)(:?\n)', bygroups(Generic.Error, Text), '#pop') ], } -class Python3TracebackLexer(RegexLexer): +Python3TracebackLexer = PythonTracebackLexer + + +class Python2TracebackLexer(RegexLexer): """ - For Python 3.0 tracebacks, with support for chained exceptions. + For Python tracebacks. - .. versionadded:: 1.0 + .. versionadded:: 0.7 + + .. versionchanged:: 2.5 + This class has been renamed from ``PythonTracebackLexer``. + ``PythonTracebackLexer`` now refers to the Python 3 variant. """ - name = 'Python 3.0 Traceback' - aliases = ['py3tb'] - filenames = ['*.py3tb'] - mimetypes = ['text/x-python3-traceback'] + name = 'Python 2.x Traceback' + aliases = ['py2tb'] + filenames = ['*.py2tb'] + mimetypes = ['text/x-python2-traceback'] tokens = { 'root': [ - (r'\n', Text), - (r'^Traceback \(most recent call last\):\n', Generic.Traceback, 'intb'), - (r'^During handling of the above exception, another ' - r'exception occurred:\n\n', Generic.Traceback), - (r'^The above exception was the direct cause of the ' - r'following exception:\n\n', Generic.Traceback), + # Cover both (most recent call last) and (innermost last) + # The optional ^C allows us to catch keyboard interrupt signals. + (r'^(\^C)?(Traceback.*\n)', + bygroups(Text, Generic.Traceback), 'intb'), + # SyntaxError starts with this. (r'^(?= File "[^"]+", line \d+)', Generic.Traceback, 'intb'), + (r'^.*\n', Other), ], 'intb': [ (r'^( File )("[^"]+")(, line )(\d+)(, in )(.+)(\n)', @@ -557,7 +767,7 @@ class Python3TracebackLexer(RegexLexer): (r'^( File )("[^"]+")(, line )(\d+)(\n)', bygroups(Text, Name.Builtin, Text, Number, Text)), (r'^( )(.+)(\n)', - bygroups(Text, using(Python3Lexer), Text)), + bygroups(Text, using(Python2Lexer), Text)), (r'^([ \t]*)(\.\.\.)(\n)', bygroups(Text, Comment, Text)), # for doctests... (r'^([^:]+)(: )(.+)(\n)', @@ -620,7 +830,7 @@ class CythonLexer(RegexLexer): ], 'keywords': [ (words(( - 'assert', 'break', 'by', 'continue', 'ctypedef', 'del', 'elif', + 'assert', 'async', 'await', 'break', 'by', 'continue', 'ctypedef', 'del', 'elif', 'else', 'except', 'except?', 'exec', 'finally', 'for', 'fused', 'gil', 'global', 'if', 'include', 'lambda', 'nogil', 'pass', 'print', 'raise', 'return', 'try', 'while', 'yield', 'as', 'with'), suffix=r'\b'), @@ -860,7 +1070,7 @@ class NumPyLexer(PythonLexer): mimetypes = [] filenames = [] - EXTRA_KEYWORDS = set(( + EXTRA_KEYWORDS = { 'abs', 'absolute', 'accumulate', 'add', 'alen', 'all', 'allclose', 'alltrue', 'alterdot', 'amax', 'amin', 'angle', 'any', 'append', 'apply_along_axis', 'apply_over_axes', 'arange', 'arccos', 'arccosh', @@ -925,7 +1135,7 @@ class NumPyLexer(PythonLexer): 'typename', 'uniform', 'union1d', 'unique', 'unique1d', 'unravel_index', 'unwrap', 'vander', 'var', 'vdot', 'vectorize', 'view', 'vonmises', 'vsplit', 'vstack', 'weibull', 'where', 'who', 'zeros', 'zeros_like' - )) + } def get_tokens_unprocessed(self, text): for index, token, value in \ @@ -936,6 +1146,6 @@ def get_tokens_unprocessed(self, text): yield index, token, value def analyse_text(text): - return (shebang_matches(text, r'pythonw?(2(\.\d)?)?') or + return (shebang_matches(text, r'pythonw?(3(\.\d)?)?') or 'import ' in text[:1000]) \ and ('import numpy' in text or 'from numpy import' in text) diff --git a/src/typecode/_vendor/pygments/lexers/qvt.py b/src/typecode/_vendor/pygments/lexers/qvt.py index 46a5aae..497aaad 100644 --- a/src/typecode/_vendor/pygments/lexers/qvt.py +++ b/src/typecode/_vendor/pygments/lexers/qvt.py @@ -5,7 +5,7 @@ Lexer for QVT Operational language. - :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ @@ -18,7 +18,7 @@ class QVToLexer(RegexLexer): - u""" + """ For the `QVT Operational Mapping language `_. Reference for implementing this: «Meta Object Facility (MOF) 2.0 diff --git a/src/typecode/_vendor/pygments/lexers/r.py b/src/typecode/_vendor/pygments/lexers/r.py index 2ae5dd9..6196d46 100644 --- a/src/typecode/_vendor/pygments/lexers/r.py +++ b/src/typecode/_vendor/pygments/lexers/r.py @@ -5,7 +5,7 @@ Lexers for the R/S languages. - :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ @@ -49,9 +49,8 @@ def get_tokens_unprocessed(self, text): # If we have stored prompt lines, need to process them first. if current_code_block: # Weave together the prompts and highlight code. - for item in do_insertions( - insertions, slexer.get_tokens_unprocessed(current_code_block)): - yield item + yield from do_insertions( + insertions, slexer.get_tokens_unprocessed(current_code_block)) # Reset vars for next code block. current_code_block = '' insertions = [] @@ -62,9 +61,8 @@ def get_tokens_unprocessed(self, text): # process the last code block. This is neither elegant nor DRY so # should be changed. if current_code_block: - for item in do_insertions( - insertions, slexer.get_tokens_unprocessed(current_code_block)): - yield item + yield from do_insertions( + insertions, slexer.get_tokens_unprocessed(current_code_block)) class SLexer(RegexLexer): @@ -80,7 +78,7 @@ class SLexer(RegexLexer): mimetypes = ['text/S-plus', 'text/S', 'text/x-r-source', 'text/x-r', 'text/x-R', 'text/x-r-history', 'text/x-r-profile'] - valid_name = r'(?:`[^`\\]*(?:\\.[^`\\]*)*`)|(?:(?:[a-zA-z]|[_.][^0-9])[\w_.]*)' + valid_name = r'`[^`\\]*(?:\\.[^`\\]*)*`|(?:[a-zA-Z]|\.[A-Za-z_.])[\w.]*|\.' tokens = { 'comments': [ (r'#.*$', Comment.Single), diff --git a/src/typecode/_vendor/pygments/lexers/rdf.py b/src/typecode/_vendor/pygments/lexers/rdf.py index 6be4bbf..d7f2f66 100644 --- a/src/typecode/_vendor/pygments/lexers/rdf.py +++ b/src/typecode/_vendor/pygments/lexers/rdf.py @@ -5,7 +5,7 @@ Lexers for semantic web and RDF query languages and markup. - :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ @@ -15,12 +15,12 @@ from typecode._vendor.pygments.token import Keyword, Punctuation, String, Number, Operator, Generic, \ Whitespace, Name, Literal, Comment, Text -__all__ = ['SparqlLexer', 'TurtleLexer'] +__all__ = ['SparqlLexer', 'TurtleLexer', 'ShExCLexer'] class SparqlLexer(RegexLexer): """ - Lexer for `SPARQL `_ query language. + Lexer for `SPARQL `_ query language. .. versionadded:: 2.0 """ @@ -31,27 +31,27 @@ class SparqlLexer(RegexLexer): # character group definitions :: - PN_CHARS_BASE_GRP = (u'a-zA-Z' - u'\u00c0-\u00d6' - u'\u00d8-\u00f6' - u'\u00f8-\u02ff' - u'\u0370-\u037d' - u'\u037f-\u1fff' - u'\u200c-\u200d' - u'\u2070-\u218f' - u'\u2c00-\u2fef' - u'\u3001-\ud7ff' - u'\uf900-\ufdcf' - u'\ufdf0-\ufffd') + PN_CHARS_BASE_GRP = ('a-zA-Z' + '\u00c0-\u00d6' + '\u00d8-\u00f6' + '\u00f8-\u02ff' + '\u0370-\u037d' + '\u037f-\u1fff' + '\u200c-\u200d' + '\u2070-\u218f' + '\u2c00-\u2fef' + '\u3001-\ud7ff' + '\uf900-\ufdcf' + '\ufdf0-\ufffd') PN_CHARS_U_GRP = (PN_CHARS_BASE_GRP + '_') PN_CHARS_GRP = (PN_CHARS_U_GRP + r'\-' + r'0-9' + - u'\u00b7' + - u'\u0300-\u036f' + - u'\u203f-\u2040') + '\u00b7' + + '\u0300-\u036f' + + '\u203f-\u2040') HEX_GRP = '0-9A-Fa-f' @@ -76,8 +76,8 @@ class SparqlLexer(RegexLexer): PN_PREFIX = PN_CHARS_BASE + '(?:[' + PN_CHARS_GRP + '.]*' + PN_CHARS + ')?' - VARNAME = u'[0-9' + PN_CHARS_U_GRP + '][' + PN_CHARS_U_GRP + \ - u'0-9\u00b7\u0300-\u036f\u203f-\u2040]*' + VARNAME = '[0-9' + PN_CHARS_U_GRP + '][' + PN_CHARS_U_GRP + \ + '0-9\u00b7\u0300-\u036f\u203f-\u2040]*' PERCENT = '%' + HEX + HEX @@ -99,10 +99,10 @@ class SparqlLexer(RegexLexer): # keywords :: (r'(?i)(select|construct|describe|ask|where|filter|group\s+by|minus|' r'distinct|reduced|from\s+named|from|order\s+by|desc|asc|limit|' - r'offset|bindings|load|clear|drop|create|add|move|copy|' - r'insert\s+data|delete\s+data|delete\s+where|delete|insert|' + r'offset|values|bindings|load|into|clear|drop|create|add|move|copy|' + r'insert\s+data|delete\s+data|delete\s+where|with|delete|insert|' r'using\s+named|using|graph|default|named|all|optional|service|' - r'silent|bind|union|not\s+in|in|as|having|to|prefix|base)\b', Keyword), + r'silent|bind|undef|union|not\s+in|in|as|having|to|prefix|base)\b', Keyword), (r'(a)\b', Keyword), # IRIs :: ('(' + IRIREF + ')', Name.Label), @@ -117,7 +117,7 @@ class SparqlLexer(RegexLexer): (r'(?i)(str|lang|langmatches|datatype|bound|iri|uri|bnode|rand|abs|' r'ceil|floor|round|concat|strlen|ucase|lcase|encode_for_uri|' r'contains|strstarts|strends|strbefore|strafter|year|month|day|' - r'hours|minutes|seconds|timezone|tz|now|md5|sha1|sha256|sha384|' + r'hours|minutes|seconds|timezone|tz|now|uuid|struuid|md5|sha1|sha256|sha384|' r'sha512|coalesce|if|strlang|strdt|sameterm|isiri|isuri|isblank|' r'isliteral|isnumeric|regex|substr|replace|exists|not\s+exists|' r'count|sum|min|max|avg|sample|group_concat|separator)\b', @@ -187,19 +187,61 @@ class TurtleLexer(RegexLexer): filenames = ['*.ttl'] mimetypes = ['text/turtle', 'application/x-turtle'] - flags = re.IGNORECASE + # character group definitions :: + PN_CHARS_BASE_GRP = ('a-zA-Z' + '\u00c0-\u00d6' + '\u00d8-\u00f6' + '\u00f8-\u02ff' + '\u0370-\u037d' + '\u037f-\u1fff' + '\u200c-\u200d' + '\u2070-\u218f' + '\u2c00-\u2fef' + '\u3001-\ud7ff' + '\uf900-\ufdcf' + '\ufdf0-\ufffd') + + PN_CHARS_U_GRP = (PN_CHARS_BASE_GRP + '_') + + PN_CHARS_GRP = (PN_CHARS_U_GRP + + r'\-' + + r'0-9' + + '\u00b7' + + '\u0300-\u036f' + + '\u203f-\u2040') + + PN_CHARS = '[' + PN_CHARS_GRP + ']' + + PN_CHARS_BASE = '[' + PN_CHARS_BASE_GRP + ']' + + PN_PREFIX = PN_CHARS_BASE + '(?:[' + PN_CHARS_GRP + '.]*' + PN_CHARS + ')?' + + HEX_GRP = '0-9A-Fa-f' + + HEX = '[' + HEX_GRP + ']' + + PERCENT = '%' + HEX + HEX + + PN_LOCAL_ESC_CHARS_GRP = r' _~.\-!$&"()*+,;=/?#@%' + + PN_LOCAL_ESC_CHARS = '[' + PN_LOCAL_ESC_CHARS_GRP + ']' + + PN_LOCAL_ESC = r'\\' + PN_LOCAL_ESC_CHARS + + PLX = '(?:' + PERCENT + ')|(?:' + PN_LOCAL_ESC + ')' + + PN_LOCAL = ('(?:[' + PN_CHARS_U_GRP + ':0-9' + ']|' + PLX + ')' + + '(?:(?:[' + PN_CHARS_GRP + '.:]|' + PLX + ')*(?:[' + + PN_CHARS_GRP + ':]|' + PLX + '))?') patterns = { - 'PNAME_NS': r'((?:[a-z][\w-]*)?\:)', # Simplified character range + 'PNAME_NS': r'((?:[a-zA-Z][\w-]*)?\:)', # Simplified character range 'IRIREF': r'(<[^<>"{}|^`\\\x00-\x20]*>)' } - # PNAME_NS PN_LOCAL (with simplified character range) - patterns['PrefixedName'] = r'%(PNAME_NS)s([a-z][\w-]*)' % patterns - tokens = { 'root': [ - (r'\s+', Whitespace), + (r'\s+', Text), # Base / prefix (r'(@base|BASE)(\s+)%(IRIREF)s(\s*)(\.?)' % patterns, @@ -216,8 +258,8 @@ class TurtleLexer(RegexLexer): (r'%(IRIREF)s' % patterns, Name.Variable), # PrefixedName - (r'%(PrefixedName)s' % patterns, - bygroups(Name.Namespace, Name.Tag)), + (r'(' + PN_PREFIX + r')?(\:)(' + PN_LOCAL + r')?', + bygroups(Name.Namespace, Punctuation, Name.Tag)), # Comment (r'#[^\n]+', Comment), @@ -257,12 +299,10 @@ class TurtleLexer(RegexLexer): (r'.', String, '#pop'), ], 'end-of-string': [ - (r'(@)([a-z]+(:?-[a-z0-9]+)*)', + (r'(@)([a-zA-Z]+(?:-[a-zA-Z0-9]+)*)', bygroups(Operator, Generic.Emph), '#pop:2'), (r'(\^\^)%(IRIREF)s' % patterns, bygroups(Operator, Generic.Emph), '#pop:2'), - (r'(\^\^)%(PrefixedName)s' % patterns, - bygroups(Operator, Generic.Emph, Generic.Emph), '#pop:2'), default('#pop:2'), @@ -275,3 +315,149 @@ def analyse_text(text): for t in ('@base ', 'BASE ', '@prefix ', 'PREFIX '): if re.search(r'^\s*%s' % t, text): return 0.80 + + +class ShExCLexer(RegexLexer): + """ + Lexer for `ShExC `_ shape expressions language syntax. + """ + name = 'ShExC' + aliases = ['shexc', 'shex'] + filenames = ['*.shex'] + mimetypes = ['text/shex'] + + # character group definitions :: + + PN_CHARS_BASE_GRP = ('a-zA-Z' + '\u00c0-\u00d6' + '\u00d8-\u00f6' + '\u00f8-\u02ff' + '\u0370-\u037d' + '\u037f-\u1fff' + '\u200c-\u200d' + '\u2070-\u218f' + '\u2c00-\u2fef' + '\u3001-\ud7ff' + '\uf900-\ufdcf' + '\ufdf0-\ufffd') + + PN_CHARS_U_GRP = (PN_CHARS_BASE_GRP + '_') + + PN_CHARS_GRP = (PN_CHARS_U_GRP + + r'\-' + + r'0-9' + + '\u00b7' + + '\u0300-\u036f' + + '\u203f-\u2040') + + HEX_GRP = '0-9A-Fa-f' + + PN_LOCAL_ESC_CHARS_GRP = r"_~.\-!$&'()*+,;=/?#@%" + + # terminal productions :: + + PN_CHARS_BASE = '[' + PN_CHARS_BASE_GRP + ']' + + PN_CHARS_U = '[' + PN_CHARS_U_GRP + ']' + + PN_CHARS = '[' + PN_CHARS_GRP + ']' + + HEX = '[' + HEX_GRP + ']' + + PN_LOCAL_ESC_CHARS = '[' + PN_LOCAL_ESC_CHARS_GRP + ']' + + UCHAR_NO_BACKSLASH = '(?:u' + HEX + '{4}|U' + HEX + '{8})' + + UCHAR = r'\\' + UCHAR_NO_BACKSLASH + + IRIREF = r'<(?:[^\x00-\x20<>"{}|^`\\]|' + UCHAR + ')*>' + + BLANK_NODE_LABEL = '_:[0-9' + PN_CHARS_U_GRP + '](?:[' + PN_CHARS_GRP + \ + '.]*' + PN_CHARS + ')?' + + PN_PREFIX = PN_CHARS_BASE + '(?:[' + PN_CHARS_GRP + '.]*' + PN_CHARS + ')?' + + PERCENT = '%' + HEX + HEX + + PN_LOCAL_ESC = r'\\' + PN_LOCAL_ESC_CHARS + + PLX = '(?:' + PERCENT + ')|(?:' + PN_LOCAL_ESC + ')' + + PN_LOCAL = ('(?:[' + PN_CHARS_U_GRP + ':0-9' + ']|' + PLX + ')' + + '(?:(?:[' + PN_CHARS_GRP + '.:]|' + PLX + ')*(?:[' + + PN_CHARS_GRP + ':]|' + PLX + '))?') + + EXPONENT = r'[eE][+-]?\d+' + + # Lexer token definitions :: + + tokens = { + 'root': [ + (r'\s+', Text), + # keywords :: + (r'(?i)(base|prefix|start|external|' + r'literal|iri|bnode|nonliteral|length|minlength|maxlength|' + r'mininclusive|minexclusive|maxinclusive|maxexclusive|' + r'totaldigits|fractiondigits|' + r'closed|extra)\b', Keyword), + (r'(a)\b', Keyword), + # IRIs :: + ('(' + IRIREF + ')', Name.Label), + # blank nodes :: + ('(' + BLANK_NODE_LABEL + ')', Name.Label), + # prefixed names :: + (r'(' + PN_PREFIX + r')?(\:)(' + PN_LOCAL + ')?', + bygroups(Name.Namespace, Punctuation, Name.Tag)), + # boolean literals :: + (r'(true|false)', Keyword.Constant), + # double literals :: + (r'[+\-]?(\d+\.\d*' + EXPONENT + r'|\.?\d+' + EXPONENT + ')', Number.Float), + # decimal literals :: + (r'[+\-]?(\d+\.\d*|\.\d+)', Number.Float), + # integer literals :: + (r'[+\-]?\d+', Number.Integer), + # operators :: + (r'[@|$&=*+?^\-~]', Operator), + # operator keywords :: + (r'(?i)(and|or|not)\b', Operator.Word), + # punctuation characters :: + (r'[(){}.;,:^\[\]]', Punctuation), + # line comments :: + (r'#[^\n]*', Comment), + # strings :: + (r'"""', String, 'triple-double-quoted-string'), + (r'"', String, 'single-double-quoted-string'), + (r"'''", String, 'triple-single-quoted-string'), + (r"'", String, 'single-single-quoted-string'), + ], + 'triple-double-quoted-string': [ + (r'"""', String, 'end-of-string'), + (r'[^\\]+', String), + (r'\\', String, 'string-escape'), + ], + 'single-double-quoted-string': [ + (r'"', String, 'end-of-string'), + (r'[^"\\\n]+', String), + (r'\\', String, 'string-escape'), + ], + 'triple-single-quoted-string': [ + (r"'''", String, 'end-of-string'), + (r'[^\\]+', String), + (r'\\', String.Escape, 'string-escape'), + ], + 'single-single-quoted-string': [ + (r"'", String, 'end-of-string'), + (r"[^'\\\n]+", String), + (r'\\', String, 'string-escape'), + ], + 'string-escape': [ + (UCHAR_NO_BACKSLASH, String.Escape, '#pop'), + (r'.', String.Escape, '#pop'), + ], + 'end-of-string': [ + (r'(@)([a-zA-Z]+(?:-[a-zA-Z0-9]+)*)', + bygroups(Operator, Name.Function), '#pop:2'), + (r'\^\^', Operator, '#pop:2'), + default('#pop:2'), + ], + } diff --git a/src/typecode/_vendor/pygments/lexers/rebol.py b/src/typecode/_vendor/pygments/lexers/rebol.py index e81cee9..4b99a57 100644 --- a/src/typecode/_vendor/pygments/lexers/rebol.py +++ b/src/typecode/_vendor/pygments/lexers/rebol.py @@ -5,7 +5,7 @@ Lexers for the REBOL and related languages. - :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ diff --git a/src/typecode/_vendor/pygments/lexers/resource.py b/src/typecode/_vendor/pygments/lexers/resource.py index 14d5a56..120d929 100644 --- a/src/typecode/_vendor/pygments/lexers/resource.py +++ b/src/typecode/_vendor/pygments/lexers/resource.py @@ -5,7 +5,7 @@ Lexer for resource definition files. - :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ @@ -26,7 +26,7 @@ class ResourceLexer(RegexLexer): """ name = 'ResourceBundle' aliases = ['resource', 'resourcebundle'] - filenames = ['*.txt'] + filenames = [] _types = (':table', ':array', ':string', ':bin', ':import', ':intvector', ':int', ':alias') diff --git a/src/typecode/_vendor/pygments/lexers/ride.py b/src/typecode/_vendor/pygments/lexers/ride.py new file mode 100644 index 0000000..3c1c12d --- /dev/null +++ b/src/typecode/_vendor/pygments/lexers/ride.py @@ -0,0 +1,139 @@ +# -*- coding: utf-8 -*- +""" + pygments.lexers.ride + ~~~~~~~~~~~~~~~~~~~~ + + Lexer for the Ride programming language. + + :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS. + :license: BSD, see LICENSE for details. +""" + +from typecode._vendor.pygments.lexer import RegexLexer, words, include +from typecode._vendor.pygments.token import Comment, Keyword, Name, Number, Punctuation, String, Text + +__all__ = ['RideLexer'] + + +class RideLexer(RegexLexer): + """ + For `Ride `_ + source code. + + .. versionadded:: 2.6 + """ + + name = 'Ride' + aliases = ['ride'] + filenames = ['*.ride'] + mimetypes = ['text/x-ride'] + + validName = r'[a-zA-Z_][a-zA-Z0-9_\']*' + + builtinOps = ( + '||', '|', '>=', '>', '==', '!', + '=', '<=', '<', '::', ':+', ':', '!=', '/', + '.', '=>', '-', '+', '*', '&&', '%', '++', + ) + + globalVariablesName = ( + 'NOALG', 'MD5', 'SHA1', 'SHA224', 'SHA256', 'SHA384', 'SHA512', + 'SHA3224', 'SHA3256', 'SHA3384', 'SHA3512', 'nil', 'this', 'unit', + 'height', 'lastBlock', 'Buy', 'Sell', 'CEILING', 'FLOOR', 'DOWN', + 'HALFDOWN', 'HALFEVEN', 'HALFUP', 'UP', + ) + + typesName = ( + 'Unit', 'Int', 'Boolean', 'ByteVector', 'String', 'Address', 'Alias', + 'Transfer', 'AssetPair', 'DataEntry', 'Order', 'Transaction', + 'GenesisTransaction', 'PaymentTransaction', 'ReissueTransaction', + 'BurnTransaction', 'MassTransferTransaction', 'ExchangeTransaction', + 'TransferTransaction', 'SetAssetScriptTransaction', + 'InvokeScriptTransaction', 'IssueTransaction', 'LeaseTransaction', + 'LeaseCancelTransaction', 'CreateAliasTransaction', + 'SetScriptTransaction', 'SponsorFeeTransaction', 'DataTransaction', + 'WriteSet', 'AttachedPayment', 'ScriptTransfer', 'TransferSet', + 'ScriptResult', 'Invocation', 'Asset', 'BlockInfo', 'Issue', 'Reissue', + 'Burn', 'NoAlg', 'Md5', 'Sha1', 'Sha224', 'Sha256', 'Sha384', 'Sha512', + 'Sha3224', 'Sha3256', 'Sha3384', 'Sha3512', 'BinaryEntry', + 'BooleanEntry', 'IntegerEntry', 'StringEntry', 'List', 'Ceiling', + 'Down', 'Floor', 'HalfDown', 'HalfEven', 'HalfUp', 'Up', + ) + + functionsName = ( + 'fraction', 'size', 'toBytes', 'take', 'drop', 'takeRight', 'dropRight', + 'toString', 'isDefined', 'extract', 'throw', 'getElement', 'value', + 'cons', 'toUtf8String', 'toInt', 'indexOf', 'lastIndexOf', 'split', + 'parseInt', 'parseIntValue', 'keccak256', 'blake2b256', 'sha256', + 'sigVerify', 'toBase58String', 'fromBase58String', 'toBase64String', + 'fromBase64String', 'transactionById', 'transactionHeightById', + 'getInteger', 'getBoolean', 'getBinary', 'getString', + 'addressFromPublicKey', 'addressFromString', 'addressFromRecipient', + 'assetBalance', 'wavesBalance', 'getIntegerValue', 'getBooleanValue', + 'getBinaryValue', 'getStringValue', 'addressFromStringValue', + 'assetInfo', 'rsaVerify', 'checkMerkleProof', 'median', + 'valueOrElse', 'valueOrErrorMessage', 'contains', 'log', 'pow', + 'toBase16String', 'fromBase16String', 'blockInfoByHeight', + 'transferTransactionById', + ) + + reservedWords = words(( + 'match', 'case', 'else', 'func', 'if', + 'let', 'then', '@Callable', '@Verifier', + ), suffix=r'\b') + + tokens = { + 'root': [ + # Comments + (r'#.*', Comment.Single), + # Whitespace + (r'\s+', Text), + # Strings + (r'"', String, 'doublequote'), + (r'utf8\'', String, 'utf8quote'), + (r'base(58|64|16)\'', String, 'singlequote'), + # Keywords + (reservedWords, Keyword.Reserved), + (r'\{-#.*?#-\}', Keyword.Reserved), + (r'FOLD<\d+>', Keyword.Reserved), + # Types + (words(typesName), Keyword.Type), + # Main + # (specialName, Keyword.Reserved), + # Prefix Operators + (words(builtinOps, prefix=r'\(', suffix=r'\)'), Name.Function), + # Infix Operators + (words(builtinOps), Name.Function), + (words(globalVariablesName), Name.Function), + (words(functionsName), Name.Function), + # Numbers + include('numbers'), + # Variable Names + (validName, Name.Variable), + # Parens + (r'[,()\[\]{}]', Punctuation), + ], + + 'doublequote': [ + (r'\\u[0-9a-fA-F]{4}', String.Escape), + (r'\\[nrfvb\\"]', String.Escape), + (r'[^"]', String), + (r'"', String, '#pop'), + ], + + 'utf8quote': [ + (r'\\u[0-9a-fA-F]{4}', String.Escape), + (r'\\[nrfvb\\\']', String.Escape), + (r'[^\']', String), + (r'\'', String, '#pop'), + ], + + 'singlequote': [ + (r'[^\']', String), + (r'\'', String, '#pop'), + ], + + 'numbers': [ + (r'_?\d+', Number.Integer), + ], + } diff --git a/src/typecode/_vendor/pygments/lexers/rnc.py b/src/typecode/_vendor/pygments/lexers/rnc.py index c85ae35..9b3d263 100644 --- a/src/typecode/_vendor/pygments/lexers/rnc.py +++ b/src/typecode/_vendor/pygments/lexers/rnc.py @@ -5,7 +5,7 @@ Lexer for Relax-NG Compact syntax - :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ diff --git a/src/typecode/_vendor/pygments/lexers/roboconf.py b/src/typecode/_vendor/pygments/lexers/roboconf.py index f34c5b3..9775854 100644 --- a/src/typecode/_vendor/pygments/lexers/roboconf.py +++ b/src/typecode/_vendor/pygments/lexers/roboconf.py @@ -5,7 +5,7 @@ Lexers for Roboconf DSL. - :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ diff --git a/src/typecode/_vendor/pygments/lexers/robotframework.py b/src/typecode/_vendor/pygments/lexers/robotframework.py index 1c960b4..f9f10d8 100644 --- a/src/typecode/_vendor/pygments/lexers/robotframework.py +++ b/src/typecode/_vendor/pygments/lexers/robotframework.py @@ -5,7 +5,7 @@ Lexer for Robot Framework. - :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ @@ -27,7 +27,6 @@ from typecode._vendor.pygments.lexer import Lexer from typecode._vendor.pygments.token import Token -from typecode._vendor.pygments.util import text_type __all__ = ['RobotFrameworkLexer'] @@ -64,7 +63,7 @@ class RobotFrameworkLexer(Lexer): """ name = 'RobotFramework' aliases = ['robotframework'] - filenames = ['*.txt', '*.robot'] + filenames = ['*.robot'] mimetypes = ['text/x-robotframework'] def __init__(self, **options): @@ -80,11 +79,11 @@ def get_tokens_unprocessed(self, text): for value, token in row_tokenizer.tokenize(row): for value, token in var_tokenizer.tokenize(value, token): if value: - yield index, token, text_type(value) + yield index, token, str(value) index += len(value) -class VariableTokenizer(object): +class VariableTokenizer: def tokenize(self, string, token): var = VariableSplitter(string, identifiers='$@%&') @@ -99,19 +98,16 @@ def _tokenize(self, var, string, orig_token): before = string[:var.start] yield before, orig_token yield var.identifier + '{', SYNTAX - for value, token in self.tokenize(var.base, VARIABLE): - yield value, token + yield from self.tokenize(var.base, VARIABLE) yield '}', SYNTAX if var.index: yield '[', SYNTAX - for value, token in self.tokenize(var.index, VARIABLE): - yield value, token + yield from self.tokenize(var.index, VARIABLE) yield ']', SYNTAX - for value, token in self.tokenize(string[var.end:], orig_token): - yield value, token + yield from self.tokenize(string[var.end:], orig_token) -class RowTokenizer(object): +class RowTokenizer: def __init__(self): self._table = UnknownTable() @@ -124,6 +120,7 @@ def __init__(self): 'metadata': settings, 'variables': variables, 'variable': variables, 'testcases': testcases, 'testcase': testcases, + 'tasks': testcases, 'task': testcases, 'keywords': keywords, 'keyword': keywords, 'userkeywords': keywords, 'userkeyword': keywords} @@ -138,9 +135,8 @@ def tokenize(self, row): elif index == 0 and value.startswith('*'): self._table = self._start_table(value) heading = True - for value, token in self._tokenize(value, index, commented, - separator, heading): - yield value, token + yield from self._tokenize(value, index, commented, + separator, heading) self._table.end_row() def _start_table(self, header): @@ -155,25 +151,22 @@ def _tokenize(self, value, index, commented, separator, heading): elif heading: yield value, HEADING else: - for value, token in self._table.tokenize(value, index): - yield value, token + yield from self._table.tokenize(value, index) -class RowSplitter(object): +class RowSplitter: _space_splitter = re.compile('( {2,})') _pipe_splitter = re.compile(r'((?:^| +)\|(?: +|$))') def split(self, row): splitter = (row.startswith('| ') and self._split_from_pipes or self._split_from_spaces) - for value in splitter(row): - yield value + yield from splitter(row) yield '\n' def _split_from_spaces(self, row): yield '' # Start with (pseudo)separator similarly as with pipes - for value in self._space_splitter.split(row): - yield value + yield from self._space_splitter.split(row) def _split_from_pipes(self, row): _, separator, rest = self._pipe_splitter.split(row, 1) @@ -185,7 +178,7 @@ def _split_from_pipes(self, row): yield rest -class Tokenizer(object): +class Tokenizer: _tokens = None def __init__(self): @@ -216,11 +209,11 @@ class Comment(Tokenizer): class Setting(Tokenizer): _tokens = (SETTING, ARGUMENT) _keyword_settings = ('suitesetup', 'suiteprecondition', 'suiteteardown', - 'suitepostcondition', 'testsetup', 'testprecondition', - 'testteardown', 'testpostcondition', 'testtemplate') + 'suitepostcondition', 'testsetup', 'tasksetup', 'testprecondition', + 'testteardown','taskteardown', 'testpostcondition', 'testtemplate', 'tasktemplate') _import_settings = ('library', 'resource', 'variables') _other_settings = ('documentation', 'metadata', 'forcetags', 'defaulttags', - 'testtimeout') + 'testtimeout','tasktimeout') _custom_tokenizer = None def __init__(self, template_setter=None): @@ -292,7 +285,7 @@ def _tokenize(self, value, index): return GherkinTokenizer().tokenize(value, KEYWORD) -class GherkinTokenizer(object): +class GherkinTokenizer: _gherkin_prefix = re.compile('^(Given|When|Then|And) ', re.IGNORECASE) def tokenize(self, value, token): @@ -320,7 +313,7 @@ def _tokenize(self, value, index): return token -class _Table(object): +class _Table: _tokenizer_class = None def __init__(self, prev_tokenizer=None): @@ -333,8 +326,7 @@ def tokenize(self, value, index): self._tokenizer = self._prev_tokenizer yield value, SYNTAX else: - for value_and_token in self._tokenize(value, index): - yield value_and_token + yield from self._tokenize(value, index) self._prev_values_on_row.append(value) def _continues(self, value, index): diff --git a/src/typecode/_vendor/pygments/lexers/ruby.py b/src/typecode/_vendor/pygments/lexers/ruby.py index cebc532..dec7dc4 100644 --- a/src/typecode/_vendor/pygments/lexers/ruby.py +++ b/src/typecode/_vendor/pygments/lexers/ruby.py @@ -5,7 +5,7 @@ Lexers for Ruby and related languages. - :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ @@ -43,23 +43,25 @@ class RubyLexer(ExtendedRegexLexer): def heredoc_callback(self, match, ctx): # okay, this is the hardest part of parsing Ruby... - # match: 1 = <<-?, 2 = quote? 3 = name 4 = quote? 5 = rest of line + # match: 1 = <<[-~]?, 2 = quote? 3 = name 4 = quote? 5 = rest of line start = match.start(1) - yield start, Operator, match.group(1) # <<-? + yield start, Operator, match.group(1) # <<[-~]? yield match.start(2), String.Heredoc, match.group(2) # quote ", ', ` yield match.start(3), String.Delimiter, match.group(3) # heredoc name yield match.start(4), String.Heredoc, match.group(4) # quote again heredocstack = ctx.__dict__.setdefault('heredocstack', []) outermost = not bool(heredocstack) - heredocstack.append((match.group(1) == '<<-', match.group(3))) + heredocstack.append((match.group(1) in ('<<-', '<<~'), match.group(3))) ctx.pos = match.start(5) ctx.end = match.end(5) - # this may find other heredocs - for i, t, v in self.get_tokens_unprocessed(context=ctx): - yield i, t, v + # this may find other heredocs, so limit the recursion depth + if len(heredocstack) < 100: + yield from self.get_tokens_unprocessed(context=ctx) + else: + yield ctx.pos, String.Heredoc, match.group(5) ctx.pos = match.end() if outermost: @@ -108,17 +110,18 @@ def intp_string_callback(self, match, ctx): # easy ones (r'\:@{0,2}[a-zA-Z_]\w*[!?]?', String.Symbol), (words(RUBY_OPERATORS, prefix=r'\:@{0,2}'), String.Symbol), - (r":'(\\\\|\\'|[^'])*'", String.Symbol), - (r"'(\\\\|\\'|[^'])*'", String.Single), + (r":'(\\\\|\\[^\\]|[^'\\])*'", String.Symbol), (r':"', String.Symbol, 'simple-sym'), (r'([a-zA-Z_]\w*)(:)(?!:)', bygroups(String.Symbol, Punctuation)), # Since Ruby 1.9 - (r'"', String.Double, 'simple-string'), + (r'"', String.Double, 'simple-string-double'), + (r"'", String.Single, 'simple-string-single'), (r'(?~!:])|' @@ -421,16 +424,14 @@ def get_tokens_unprocessed(self, text): curcode += line[end:] else: if curcode: - for item in do_insertions( - insertions, rblexer.get_tokens_unprocessed(curcode)): - yield item + yield from do_insertions( + insertions, rblexer.get_tokens_unprocessed(curcode)) curcode = '' insertions = [] yield match.start(), Generic.Output, line if curcode: - for item in do_insertions( - insertions, rblexer.get_tokens_unprocessed(curcode)): - yield item + yield from do_insertions( + insertions, rblexer.get_tokens_unprocessed(curcode)) class FancyLexer(RegexLexer): @@ -451,26 +452,26 @@ class FancyLexer(RegexLexer): tokens = { # copied from PerlLexer: 'balanced-regex': [ - (r'/(\\\\|\\/|[^/])*/[egimosx]*', String.Regex, '#pop'), - (r'!(\\\\|\\!|[^!])*![egimosx]*', String.Regex, '#pop'), + (r'/(\\\\|\\[^\\]|[^/\\])*/[egimosx]*', String.Regex, '#pop'), + (r'!(\\\\|\\[^\\]|[^!\\])*![egimosx]*', String.Regex, '#pop'), (r'\\(\\\\|[^\\])*\\[egimosx]*', String.Regex, '#pop'), - (r'\{(\\\\|\\\}|[^}])*\}[egimosx]*', String.Regex, '#pop'), - (r'<(\\\\|\\>|[^>])*>[egimosx]*', String.Regex, '#pop'), - (r'\[(\\\\|\\\]|[^\]])*\][egimosx]*', String.Regex, '#pop'), - (r'\((\\\\|\\\)|[^)])*\)[egimosx]*', String.Regex, '#pop'), - (r'@(\\\\|\\@|[^@])*@[egimosx]*', String.Regex, '#pop'), - (r'%(\\\\|\\%|[^%])*%[egimosx]*', String.Regex, '#pop'), - (r'\$(\\\\|\\\$|[^$])*\$[egimosx]*', String.Regex, '#pop'), + (r'\{(\\\\|\\[^\\]|[^}\\])*\}[egimosx]*', String.Regex, '#pop'), + (r'<(\\\\|\\[^\\]|[^>\\])*>[egimosx]*', String.Regex, '#pop'), + (r'\[(\\\\|\\[^\\]|[^\]\\])*\][egimosx]*', String.Regex, '#pop'), + (r'\((\\\\|\\[^\\]|[^)\\])*\)[egimosx]*', String.Regex, '#pop'), + (r'@(\\\\|\\[^\\]|[^@\\])*@[egimosx]*', String.Regex, '#pop'), + (r'%(\\\\|\\[^\\]|[^%\\])*%[egimosx]*', String.Regex, '#pop'), + (r'\$(\\\\|\\[^\\]|[^$\\])*\$[egimosx]*', String.Regex, '#pop'), ], 'root': [ (r'\s+', Text), # balanced delimiters (copied from PerlLexer): - (r's\{(\\\\|\\\}|[^}])*\}\s*', String.Regex, 'balanced-regex'), - (r's<(\\\\|\\>|[^>])*>\s*', String.Regex, 'balanced-regex'), - (r's\[(\\\\|\\\]|[^\]])*\]\s*', String.Regex, 'balanced-regex'), - (r's\((\\\\|\\\)|[^)])*\)\s*', String.Regex, 'balanced-regex'), - (r'm?/(\\\\|\\/|[^/\n])*/[gcimosx]*', String.Regex), + (r's\{(\\\\|\\[^\\]|[^}\\])*\}\s*', String.Regex, 'balanced-regex'), + (r's<(\\\\|\\[^\\]|[^>\\])*>\s*', String.Regex, 'balanced-regex'), + (r's\[(\\\\|\\[^\\]|[^\]\\])*\]\s*', String.Regex, 'balanced-regex'), + (r's\((\\\\|\\[^\\]|[^)\\])*\)\s*', String.Regex, 'balanced-regex'), + (r'm?/(\\\\|\\[^\\]|[^///\n])*/[gcimosx]*', String.Regex), (r'm(?=[/!\\{<\[(@%$])', String.Regex, 'balanced-regex'), # Comments @@ -478,9 +479,9 @@ class FancyLexer(RegexLexer): # Symbols (r'\'([^\'\s\[\](){}]+|\[\])', String.Symbol), # Multi-line DoubleQuotedString - (r'"""(\\\\|\\"|[^"])*"""', String), + (r'"""(\\\\|\\[^\\]|[^\\])*?"""', String), # DoubleQuotedString - (r'"(\\\\|\\"|[^"])*"', String), + (r'"(\\\\|\\[^\\]|[^"\\])*"', String), # keywords (r'(def|class|try|catch|finally|retry|return|return_local|match|' r'case|->|=>)\b', Keyword), diff --git a/src/typecode/_vendor/pygments/lexers/rust.py b/src/typecode/_vendor/pygments/lexers/rust.py index 7eeeb76..8a6538d 100644 --- a/src/typecode/_vendor/pygments/lexers/rust.py +++ b/src/typecode/_vendor/pygments/lexers/rust.py @@ -5,7 +5,7 @@ Lexers for the Rust language. - :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ @@ -18,48 +18,56 @@ class RustLexer(RegexLexer): """ - Lexer for the Rust programming language (version 1.10). + Lexer for the Rust programming language (version 1.47). .. versionadded:: 1.6 """ name = 'Rust' filenames = ['*.rs', '*.rs.in'] aliases = ['rust', 'rs'] - mimetypes = ['text/rust'] - - keyword_types = ( - words(('u8', 'u16', 'u32', 'u64', 'i8', 'i16', 'i32', 'i64', - 'i128', 'u128', 'usize', 'isize', 'f32', 'f64', 'str', 'bool'), - suffix=r'\b'), - Keyword.Type) - - builtin_types = (words(( - # Reexported core operators - 'Copy', 'Send', 'Sized', 'Sync', - 'Drop', 'Fn', 'FnMut', 'FnOnce', - - # Reexported types and traits - 'Box', - 'ToOwned', - 'Clone', + mimetypes = ['text/rust', 'text/x-rust'] + + keyword_types = (words(( + 'u8', 'u16', 'u32', 'u64', 'u128', 'i8', 'i16', 'i32', 'i64', 'i128', + 'usize', 'isize', 'f32', 'f64', 'char', 'str', 'bool', + ), suffix=r'\b'), Keyword.Type) + + builtin_funcs_types = (words(( + 'Copy', 'Send', 'Sized', 'Sync', 'Unpin', + 'Drop', 'Fn', 'FnMut', 'FnOnce', 'drop', + 'Box', 'ToOwned', 'Clone', 'PartialEq', 'PartialOrd', 'Eq', 'Ord', - 'AsRef', 'AsMut', 'Into', 'From', - 'Default', - 'Iterator', 'Extend', 'IntoIterator', - 'DoubleEndedIterator', 'ExactSizeIterator', - 'Option', - 'Some', 'None', - 'Result', - 'Ok', 'Err', - 'SliceConcatExt', - 'String', 'ToString', - 'Vec'), suffix=r'\b'), - Name.Builtin) + 'AsRef', 'AsMut', 'Into', 'From', 'Default', + 'Iterator', 'Extend', 'IntoIterator', 'DoubleEndedIterator', + 'ExactSizeIterator', + 'Option', 'Some', 'None', + 'Result', 'Ok', 'Err', + 'String', 'ToString', 'Vec', + ), suffix=r'\b'), Name.Builtin) + + builtin_macros = (words(( + 'asm', 'assert', 'assert_eq', 'assert_ne', 'cfg', 'column', + 'compile_error', 'concat', 'concat_idents', 'dbg', 'debug_assert', + 'debug_assert_eq', 'debug_assert_ne', 'env', 'eprint', 'eprintln', + 'file', 'format', 'format_args', 'format_args_nl', 'global_asm', + 'include', 'include_bytes', 'include_str', + 'is_aarch64_feature_detected', + 'is_arm_feature_detected', + 'is_mips64_feature_detected', + 'is_mips_feature_detected', + 'is_powerpc64_feature_detected', + 'is_powerpc_feature_detected', + 'is_x86_feature_detected', + 'line', 'llvm_asm', 'log_syntax', 'macro_rules', 'matches', + 'module_path', 'option_env', 'panic', 'print', 'println', 'stringify', + 'thread_local', 'todo', 'trace_macros', 'unimplemented', 'unreachable', + 'vec', 'write', 'writeln', + ), suffix=r'!'), Name.Function.Magic) tokens = { 'root': [ # rust allows a file to start with a shebang, but if the first line - # starts with #![ then it’s not a shebang but a crate attribute. + # starts with #![ then it's not a shebang but a crate attribute. (r'#![^[\r\n].*$', Comment.Preproc), default('base'), ], @@ -77,26 +85,26 @@ class RustLexer(RegexLexer): # Macro parameters (r"""\$([a-zA-Z_]\w*|\(,?|\),?|,?)""", Comment.Preproc), # Keywords - (words(( - 'as', 'box', 'const', 'crate', 'else', 'extern', - 'for', 'if', 'impl', 'in', 'loop', 'match', 'move', - 'mut', 'pub', 'ref', 'return', 'static', 'super', - 'trait', 'unsafe', 'use', 'where', 'while'), suffix=r'\b'), - Keyword), - (words(('abstract', 'alignof', 'become', 'do', 'final', 'macro', - 'offsetof', 'override', 'priv', 'proc', 'pure', 'sizeof', - 'typeof', 'unsized', 'virtual', 'yield'), suffix=r'\b'), - Keyword.Reserved), + (words(('as', 'async', 'await', 'box', 'const', 'crate', 'dyn', + 'else', 'extern', 'for', 'if', 'impl', 'in', 'loop', + 'match', 'move', 'mut', 'pub', 'ref', 'return', 'static', + 'super', 'trait', 'unsafe', 'use', 'where', 'while'), + suffix=r'\b'), Keyword), + (words(('abstract', 'become', 'do', 'final', 'macro', 'override', + 'priv', 'typeof', 'try', 'unsized', 'virtual', 'yield'), + suffix=r'\b'), Keyword.Reserved), (r'(true|false)\b', Keyword.Constant), + (r'self\b', Name.Builtin.Pseudo), (r'mod\b', Keyword, 'modname'), (r'let\b', Keyword.Declaration), (r'fn\b', Keyword, 'funcname'), (r'(struct|enum|type|union)\b', Keyword, 'typename'), (r'(default)(\s+)(type|fn)\b', bygroups(Keyword, Text, Keyword)), keyword_types, - (r'self\b', Name.Builtin.Pseudo), - # Prelude (taken from Rust’s src/libstd/prelude.rs) - builtin_types, + (r'[sS]elf\b', Name.Builtin.Pseudo), + # Prelude (taken from Rust's src/libstd/prelude.rs) + builtin_funcs_types, + builtin_macros, # Path seperators, so types don't catch them. (r'::\b', Text), # Types in positions. @@ -104,49 +112,47 @@ class RustLexer(RegexLexer): # Labels (r'(break|continue)(\s*)(\'[A-Za-z_]\w*)?', bygroups(Keyword, Text.Whitespace, Name.Label)), - # Character Literal + + # Character literals (r"""'(\\['"\\nrt]|\\x[0-7][0-9a-fA-F]|\\0""" r"""|\\u\{[0-9a-fA-F]{1,6}\}|.)'""", String.Char), (r"""b'(\\['"\\nrt]|\\x[0-9a-fA-F]{2}|\\0""" r"""|\\u\{[0-9a-fA-F]{1,6}\}|.)'""", String.Char), - # Binary Literal + + # Binary literals (r'0b[01_]+', Number.Bin, 'number_lit'), - # Octal Literal + # Octal literals (r'0o[0-7_]+', Number.Oct, 'number_lit'), - # Hexadecimal Literal + # Hexadecimal literals (r'0[xX][0-9a-fA-F_]+', Number.Hex, 'number_lit'), - # Decimal Literal + # Decimal literals (r'[0-9][0-9_]*(\.[0-9_]+[eE][+\-]?[0-9_]+|' r'\.[0-9_]*(?!\.)|[eE][+\-]?[0-9_]+)', Number.Float, 'number_lit'), (r'[0-9][0-9_]*', Number.Integer, 'number_lit'), - # String Literal + + # String literals (r'b"', String, 'bytestring'), (r'"', String, 'string'), (r'b?r(#*)".*?"\1', String), - # Lifetime - (r"""'static""", Name.Builtin), - (r"""'[a-zA-Z_]\w*""", Name.Attribute), + # Lifetime names + (r"'", Operator, 'lifetime'), # Operators and Punctuation + (r'\.\.=?', Operator), (r'[{}()\[\],.;]', Punctuation), (r'[+\-*/%&|<>^!~@=:?]', Operator), - # Identifier + # Identifiers (r'[a-zA-Z_]\w*', Name), + # Raw identifiers + (r'r#[a-zA-Z_]\w*', Name), # Attributes (r'#!?\[', Comment.Preproc, 'attribute['), - # Macros - (r'([A-Za-z_]\w*)(!)(\s*)([A-Za-z_]\w*)?(\s*)(\{)', - bygroups(Comment.Preproc, Punctuation, Whitespace, Name, - Whitespace, Punctuation), 'macro{'), - (r'([A-Za-z_]\w*)(!)(\s*)([A-Za-z_]\w*)?(\()', - bygroups(Comment.Preproc, Punctuation, Whitespace, Name, - Punctuation), 'macro('), ], 'comment': [ (r'[^*/]+', Comment.Multiline), @@ -173,11 +179,17 @@ class RustLexer(RegexLexer): 'typename': [ (r'\s+', Text), (r'&', Keyword.Pseudo), - builtin_types, + (r"'", Operator, 'lifetime'), + builtin_funcs_types, keyword_types, (r'[a-zA-Z_]\w*', Name.Class, '#pop'), default('#pop'), ], + 'lifetime': [ + (r"(static|_)", Name.Builtin), + (r"[a-zA-Z_]+\w*", Name.Attribute), + default('#pop'), + ], 'number_lit': [ (r'[ui](8|16|32|64|size)', Keyword, '#pop'), (r'f(32|64)', Keyword, '#pop'), @@ -194,14 +206,6 @@ class RustLexer(RegexLexer): (r"""\\x[89a-fA-F][0-9a-fA-F]""", String.Escape), include('string'), ], - 'macro{': [ - (r'\{', Operator, '#push'), - (r'\}', Operator, '#pop'), - ], - 'macro(': [ - (r'\(', Operator, '#push'), - (r'\)', Operator, '#pop'), - ], 'attribute_common': [ (r'"', String, 'string'), (r'\[', Comment.Preproc, 'attribute['), diff --git a/src/typecode/_vendor/pygments/lexers/sas.py b/src/typecode/_vendor/pygments/lexers/sas.py index 80013df..9be8262 100644 --- a/src/typecode/_vendor/pygments/lexers/sas.py +++ b/src/typecode/_vendor/pygments/lexers/sas.py @@ -5,7 +5,7 @@ Lexer for SAS. - :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ diff --git a/src/typecode/_vendor/pygments/lexers/scdoc.py b/src/typecode/_vendor/pygments/lexers/scdoc.py new file mode 100644 index 0000000..aa9e35e --- /dev/null +++ b/src/typecode/_vendor/pygments/lexers/scdoc.py @@ -0,0 +1,83 @@ +# -*- coding: utf-8 -*- +""" + pygments.lexers.scdoc + ~~~~~~~~~~~~~~~~~~~~~ + + Lexer for scdoc, a simple man page generator. + + :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS. + :license: BSD, see LICENSE for details. +""" + +import re + +from typecode._vendor.pygments.lexer import RegexLexer, include, bygroups, \ + using, this +from typecode._vendor.pygments.token import Text, Comment, Keyword, String, \ + Generic + + +__all__ = ['ScdocLexer'] + + +class ScdocLexer(RegexLexer): + """ + `scdoc` is a simple man page generator for POSIX systems written in C99. + https://git.sr.ht/~sircmpwn/scdoc + + .. versionadded:: 2.5 + """ + name = 'scdoc' + aliases = ['scdoc', 'scd'] + filenames = ['*.scd', '*.scdoc'] + flags = re.MULTILINE + + tokens = { + 'root': [ + # comment + (r'^(;.+\n)', bygroups(Comment)), + + # heading with pound prefix + (r'^(#)([^#].+\n)', bygroups(Generic.Heading, Text)), + (r'^(#{2})(.+\n)', bygroups(Generic.Subheading, Text)), + # bulleted lists + (r'^(\s*)([*-])(\s)(.+\n)', + bygroups(Text, Keyword, Text, using(this, state='inline'))), + # numbered lists + (r'^(\s*)(\.+\.)( .+\n)', + bygroups(Text, Keyword, using(this, state='inline'))), + # quote + (r'^(\s*>\s)(.+\n)', bygroups(Keyword, Generic.Emph)), + # text block + (r'^(```\n)([\w\W]*?)(^```$)', bygroups(String, Text, String)), + + include('inline'), + ], + 'inline': [ + # escape + (r'\\.', Text), + # underlines + (r'(\s)(_[^_]+_)(\W|\n)', bygroups(Text, Generic.Emph, Text)), + # bold + (r'(\s)(\*[^*]+\*)(\W|\n)', bygroups(Text, Generic.Strong, Text)), + # inline code + (r'`[^`]+`', String.Backtick), + + # general text, must come last! + (r'[^\\\s]+', Text), + (r'.', Text), + ], + } + + def analyse_text(text): + """This is very similar to markdown, save for the escape characters + needed for * and _.""" + result = 0 + + if '\\*' in text: + result += 0.01 + + if '\\_' in text: + result += 0.01 + + return result diff --git a/src/typecode/_vendor/pygments/lexers/scripting.py b/src/typecode/_vendor/pygments/lexers/scripting.py index e8b3756..da41761 100644 --- a/src/typecode/_vendor/pygments/lexers/scripting.py +++ b/src/typecode/_vendor/pygments/lexers/scripting.py @@ -5,7 +5,7 @@ Lexer for scripting and embedded languages. - :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ @@ -15,11 +15,11 @@ words from typecode._vendor.pygments.token import Text, Comment, Operator, Keyword, Name, String, \ Number, Punctuation, Error, Whitespace, Other -from typecode._vendor.pygments.util import get_bool_opt, get_list_opt, iteritems +from typecode._vendor.pygments.util import get_bool_opt, get_list_opt __all__ = ['LuaLexer', 'MoonScriptLexer', 'ChaiscriptLexer', 'LSLLexer', 'AppleScriptLexer', 'RexxLexer', 'MOOCodeLexer', 'HybrisLexer', - 'EasytrieveLexer', 'JclLexer'] + 'EasytrieveLexer', 'JclLexer', 'MiniScriptLexer'] class LuaLexer(RegexLexer): @@ -40,7 +40,7 @@ class LuaLexer(RegexLexer): .. sourcecode:: pycon - >>> from typecode._vendor.pygments.lexers._lua_builtins import MODULES + >>> from pygments.lexers._lua_builtins import MODULES >>> MODULES.keys() ['string', 'coroutine', 'modules', 'io', 'basic', ...] """ @@ -142,7 +142,7 @@ def __init__(self, **options): self._functions = set() if self.func_name_highlighting: from typecode._vendor.pygments.lexers._lua_builtins import MODULES - for mod, func in iteritems(MODULES): + for mod, func in MODULES.items(): if mod not in self.disabled_modules: self._functions.update(func) RegexLexer.__init__(self, **options) @@ -157,12 +157,11 @@ def get_tokens_unprocessed(self, text): elif '.' in value: a, b = value.split('.') yield index, Name, a - yield index + len(a), Punctuation, u'.' + yield index + len(a), Punctuation, '.' yield index + len(a) + 1, Name, b continue yield index, token, value - class MoonScriptLexer(LuaLexer): """ For `MoonScript `_ source code. @@ -284,7 +283,7 @@ class ChaiscriptLexer(RegexLexer): (r'0x[0-9a-fA-F]+', Number.Hex), (r'[0-9]+', Number.Integer), (r'"', String.Double, 'dqstring'), - (r"'(\\\\|\\'|[^'])*'", String.Single), + (r"'(\\\\|\\[^\\]|[^'\\])*'", String.Single), ], 'dqstring': [ (r'\$\{[^"}]+?\}', String.Interpol), @@ -660,18 +659,18 @@ class AppleScriptLexer(RegexLexer): tokens = { 'root': [ (r'\s+', Text), - (u'¬\\n', String.Escape), + (r'¬\n', String.Escape), (r"'s\s+", Text), # This is a possessive, consider moving (r'(--|#).*?$', Comment), (r'\(\*', Comment.Multiline, 'comment'), (r'[(){}!,.:]', Punctuation), - (u'(«)([^»]+)(»)', + (r'(«)([^»]+)(»)', bygroups(Text, Name.Builtin, Text)), (r'\b((?:considering|ignoring)\s*)' r'(application responses|case|diacriticals|hyphens|' r'numeric strings|punctuation|white space)', bygroups(Keyword, Name.Builtin)), - (u'(-|\\*|\\+|&|≠|>=?|<=?|=|≥|≤|/|÷|\\^)', Operator), + (r'(-|\*|\+|&|≠|>=?|<=?|=|≥|≤|/|÷|\^)', Operator), (r"\b(%s)\b" % '|'.join(Operators), Operator.Word), (r'^(\s*(?:on|end)\s+)' r'(%s)' % '|'.join(StudioEvents[::-1]), @@ -690,7 +689,7 @@ class AppleScriptLexer(RegexLexer): (r'\b(%s)s?\b' % '|'.join(StudioClasses), Name.Builtin), (r'\b(%s)\b' % '|'.join(StudioCommands), Name.Builtin), (r'\b(%s)\b' % '|'.join(References), Name.Builtin), - (r'"(\\\\|\\"|[^"])*"', String.Double), + (r'"(\\\\|\\[^\\]|[^"\\])*"', String.Double), (r'\b(%s)\b' % Identifiers, Name.Variable), (r'[-+]?(\d+\.\d*|\d*\.\d+)(E[-+][0-9]+)?', Number.Float), (r'[-+]?\d+', Number.Integer), @@ -834,7 +833,7 @@ class MOOCodeLexer(RegexLexer): # Numbers (r'(0|[1-9][0-9_]*)', Number.Integer), # Strings - (r'"(\\\\|\\"|[^"])*"', String), + (r'"(\\\\|\\[^\\]|[^"\\])*"', String), # exceptions (r'(E_PERM|E_DIV)', Name.Exception), # db-refs @@ -925,7 +924,7 @@ class HybrisLexer(RegexLexer): 'Runnable', 'CGI', 'ClientSocket', 'Socket', 'ServerSocket', 'File', 'Console', 'Directory', 'Exception'), suffix=r'\b'), Keyword.Type), - (r'"(\\\\|\\"|[^"])*"', String), + (r'"(\\\\|\\[^\\]|[^"\\])*"', String), (r"'\\.'|'[^\\]'|'\\u[0-9a-f]{4}'", String.Char), (r'(\.)([a-zA-Z_]\w*)', bygroups(Operator, Name.Attribute)), @@ -945,6 +944,15 @@ class HybrisLexer(RegexLexer): ], } + def analyse_text(text): + """public method and private method don't seem to be quite common + elsewhere.""" + result = 0 + if re.search(r'\b(?:public|private)\s+method\b', text): + result += 0.01 + return result + + class EasytrieveLexer(RegexLexer): """ @@ -977,7 +985,7 @@ class EasytrieveLexer(RegexLexer): _DELIMITER_PATTERN = '[' + _DELIMITERS + ']' _DELIMITER_PATTERN_CAPTURE = '(' + _DELIMITER_PATTERN + ')' _NON_DELIMITER_OR_COMMENT_PATTERN = '[^' + _DELIMITERS_OR_COMENT + ']' - _OPERATORS_PATTERN = u'[.+\\-/=\\[\\](){}<>;,&%¬]' + _OPERATORS_PATTERN = '[.+\\-/=\\[\\](){}<>;,&%¬]' _KEYWORDS = [ 'AFTER-BREAK', 'AFTER-LINE', 'AFTER-SCREEN', 'AIM', 'AND', 'ATTR', 'BEFORE', 'BEFORE-BREAK', 'BEFORE-LINE', 'BEFORE-SCREEN', 'BUSHU', @@ -1220,3 +1228,57 @@ def analyse_text(text): result = 1.0 assert 0.0 <= result <= 1.0 return result + + +class MiniScriptLexer(RegexLexer): + """ + For `MiniScript `_ source code. + + .. versionadded:: 2.6 + """ + + name = "MiniScript" + aliases = ["ms", "miniscript"] + filenames = ["*.ms"] + mimetypes = ['text/x-minicript', 'application/x-miniscript'] + + tokens = { + 'root': [ + (r'#!(.*?)$', Comment.Preproc), + default('base'), + ], + 'base': [ + ('//.*$', Comment.Single), + (r'(?i)(\d*\.\d+|\d+\.\d*)(e[+-]?\d+)?', Number), + (r'(?i)\d+e[+-]?\d+', Number), + (r'\d+', Number), + (r'\n', Text), + (r'[^\S\n]+', Text), + (r'"', String, 'string_double'), + (r'(==|!=|<=|>=|[=+\-*/%^<>.:])', Operator), + (r'[;,\[\]{}()]', Punctuation), + (words(( + 'break', 'continue', 'else', 'end', 'for', 'function', 'if', + 'in', 'isa', 'then', 'repeat', 'return', 'while'), suffix=r'\b'), + Keyword), + (words(( + 'abs', 'acos', 'asin', 'atan', 'ceil', 'char', 'cos', 'floor', + 'log', 'round', 'rnd', 'pi', 'sign', 'sin', 'sqrt', 'str', 'tan', + 'hasIndex', 'indexOf', 'len', 'val', 'code', 'remove', 'lower', + 'upper', 'replace', 'split', 'indexes', 'values', 'join', 'sum', + 'sort', 'shuffle', 'push', 'pop', 'pull', 'range', + 'print', 'input', 'time', 'wait', 'locals', 'globals', 'outer', + 'yield'), suffix=r'\b'), + Name.Builtin), + (r'(true|false|null)\b', Keyword.Constant), + (r'(and|or|not|new)\b', Operator.Word), + (r'(self|super|__isa)\b', Name.Builtin.Pseudo), + (r'[a-zA-Z_]\w*', Name.Variable) + ], + 'string_double': [ + (r'[^"\n]+', String), + (r'""', String), + (r'"', String, '#pop'), + (r'\n', Text, '#pop'), # Stray linefeed also terminates strings. + ] + } diff --git a/src/typecode/_vendor/pygments/lexers/sgf.py b/src/typecode/_vendor/pygments/lexers/sgf.py index cfe6336..d423757 100644 --- a/src/typecode/_vendor/pygments/lexers/sgf.py +++ b/src/typecode/_vendor/pygments/lexers/sgf.py @@ -5,7 +5,7 @@ Lexer for Smart Game Format (sgf) file format. - :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ diff --git a/src/typecode/_vendor/pygments/lexers/shell.py b/src/typecode/_vendor/pygments/lexers/shell.py index e9caf39..2b15969 100644 --- a/src/typecode/_vendor/pygments/lexers/shell.py +++ b/src/typecode/_vendor/pygments/lexers/shell.py @@ -5,7 +5,7 @@ Lexers for various shells. - :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ @@ -20,7 +20,8 @@ __all__ = ['BashLexer', 'BashSessionLexer', 'TcshLexer', 'BatchLexer', 'SlurmBashLexer', 'MSDOSSessionLexer', 'PowerShellLexer', - 'PowerShellSessionLexer', 'TcshSessionLexer', 'FishShellLexer'] + 'PowerShellSessionLexer', 'TcshSessionLexer', 'FishShellLexer', + 'ExeclineLexer'] line_re = re.compile('.*?\n') @@ -56,7 +57,7 @@ class BashLexer(RegexLexer): (r'\$', Text), ], 'basic': [ - (r'\b(if|fi|else|while|do|done|for|then|return|function|case|' + (r'\b(if|fi|else|while|in|do|done|for|then|return|function|case|' r'select|continue|until|esac|elif)(\s*)\b', bygroups(Keyword, Text)), (r'\b(alias|bg|bind|break|builtin|caller|cd|command|compgen|' @@ -148,12 +149,16 @@ def get_tokens_unprocessed(self, text): else: yield index, token, value + class ShellSessionBaseLexer(Lexer): """ - Base lexer for simplistic shell sessions. + Base lexer for shell sessions. .. versionadded:: 2.1 """ + + _venv = re.compile(r'^(\([^)]*\))(\s*)') + def get_tokens_unprocessed(self, text): innerlexer = self._innerLexerCls(**self.options) @@ -164,11 +169,24 @@ def get_tokens_unprocessed(self, text): for match in line_re.finditer(text): line = match.group() - m = re.match(self._ps1rgx, line) if backslash_continuation: curcode += line backslash_continuation = curcode.endswith('\\\n') - elif m: + continue + + venv_match = self._venv.match(line) + if venv_match: + venv = venv_match.group(1) + venv_whitespace = venv_match.group(2) + insertions.append((len(curcode), + [(0, Generic.Prompt.VirtualEnv, venv)])) + if venv_whitespace: + insertions.append((len(curcode), + [(0, Text, venv_whitespace)])) + line = line[venv_match.end():] + + m = self._ps1rgx.match(line) + if m: # To support output lexers (say diff output), the output # needs to be broken by prompts whenever the output lexer # changes. @@ -200,7 +218,8 @@ def get_tokens_unprocessed(self, text): class BashSessionLexer(ShellSessionBaseLexer): """ - Lexer for simplistic shell sessions. + Lexer for Bash shell sessions, i.e. command lines, including a + prompt, interspersed with output. .. versionadded:: 1.1 """ @@ -211,9 +230,9 @@ class BashSessionLexer(ShellSessionBaseLexer): mimetypes = ['application/x-shell-session', 'application/x-sh-session'] _innerLexerCls = BashLexer - _ps1rgx = \ + _ps1rgx = re.compile( r'^((?:(?:\[.*?\])|(?:\(\S+\))?(?:| |sh\S*?|\w+\S+[@:]\S+(?:\s+\S+)' \ - r'?|\[\S+[@:][^\n]+\].+))\s*[$#%])(.*\n?)' + r'?|\[\S+[@:][^\n]+\].+))\s*[$#%]\s*)(.*\n?)') _ps2 = '>' @@ -233,14 +252,14 @@ class BatchLexer(RegexLexer): _nl = r'\n\x1a' _punct = r'&<>|' _ws = r'\t\v\f\r ,;=\xa0' + _nlws = r'\s\x1a\xa0,;=' _space = r'(?:(?:(?:\^[%s])?[%s])+)' % (_nl, _ws) _keyword_terminator = (r'(?=(?:\^[%s]?)?[%s+./:[\\\]]|[%s%s(])' % (_nl, _ws, _nl, _punct)) _token_terminator = r'(?=\^?[%s]|[%s%s])' % (_ws, _punct, _nl) _start_label = r'((?:(?<=^[^:])|^[^:]?)[%s]*)(:)' % _ws - _label = r'(?:(?:[^%s%s%s+:^]|\^[%s]?[\w\W])*)' % (_nl, _punct, _ws, _nl) - _label_compound = (r'(?:(?:[^%s%s%s+:^)]|\^[%s]?[^)])*)' % - (_nl, _punct, _ws, _nl)) + _label = r'(?:(?:[^%s%s+:^]|\^[%s]?[\w\W])*)' % (_nlws, _punct, _nl) + _label_compound = r'(?:(?:[^%s%s+:^)]|\^[%s]?[^)])*)' % (_nlws, _punct, _nl) _number = r'(?:-?(?:0[0-7]+|0x[\da-f]+|\d+)%s)' % _token_terminator _opword = r'(?:equ|geq|gtr|leq|lss|neq)' _string = r'(?:"[^%s"]*(?:"|(?=[%s])))' % (_nl, _nl) @@ -250,9 +269,8 @@ class BatchLexer(RegexLexer): r'(?:\^?![^!:%s]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:' r'[^!%s^]|\^[^!%s])[^=%s]*=(?:[^!%s^]|\^[^!%s])*)?)?\^?!))' % (_nl, _nl, _nl, _nl, _nl, _nl, _nl, _nl, _nl, _nl, _nl, _nl)) - _core_token = r'(?:(?:(?:\^[%s]?)?[^"%s%s%s])+)' % (_nl, _nl, _punct, _ws) - _core_token_compound = r'(?:(?:(?:\^[%s]?)?[^"%s%s%s)])+)' % (_nl, _nl, - _punct, _ws) + _core_token = r'(?:(?:(?:\^[%s]?)?[^"%s%s])+)' % (_nl, _nlws, _punct) + _core_token_compound = r'(?:(?:(?:\^[%s]?)?[^"%s%s)])+)' % (_nl, _nlws, _punct) _token = r'(?:[%s]+|%s)' % (_punct, _core_token) _token_compound = r'(?:[%s]+|%s)' % (_punct, _core_token_compound) _stoken = (r'(?:[%s]+|(?:%s|%s|%s)+)' % @@ -363,7 +381,8 @@ def _make_follow_state(compound, _label=_label, return state def _make_arithmetic_state(compound, _nl=_nl, _punct=_punct, - _string=_string, _variable=_variable, _ws=_ws): + _string=_string, _variable=_variable, + _ws=_ws, _nlws=_nlws): op = r'=+\-*/!~' state = [] if compound: @@ -374,8 +393,8 @@ def _make_arithmetic_state(compound, _nl=_nl, _punct=_punct, (r'\d+', Number.Integer), (r'[(),]+', Punctuation), (r'([%s]|%%|\^\^)+' % op, Operator), - (r'(%s|%s|(\^[%s]?)?[^()%s%%^"%s%s%s]|\^[%s%s]?%s)+' % - (_string, _variable, _nl, op, _nl, _punct, _ws, _nl, _ws, + (r'(%s|%s|(\^[%s]?)?[^()%s%%\^"%s%s]|\^[%s]?%s)+' % + (_string, _variable, _nl, op, _nlws, _punct, _nlws, r'[^)]' if compound else r'[\w\W]'), using(this, state='variable')), (r'(?=[\x00|&])', Text, '#pop'), @@ -409,15 +428,15 @@ def _make_redirect_state(compound, _core_token_compound=_core_token_compound, _nl=_nl, _punct=_punct, _stoken=_stoken, _string=_string, _space=_space, - _variable=_variable, _ws=_ws): + _variable=_variable, _nlws=_nlws): stoken_compound = (r'(?:[%s]+|(?:%s|%s|%s)+)' % (_punct, _string, _variable, _core_token_compound)) return [ - (r'((?:(?<=[%s%s])\d)?)(>>?&|<&)([%s%s]*)(\d)' % - (_nl, _ws, _nl, _ws), + (r'((?:(?<=[%s])\d)?)(>>?&|<&)([%s]*)(\d)' % + (_nlws, _nlws), bygroups(Number.Integer, Punctuation, Text, Number.Integer)), - (r'((?:(?<=[%s%s])(?>?|<)(%s?%s)' % - (_nl, _ws, _nl, _space, stoken_compound if compound else _stoken), + (r'((?:(?<=[%s])(?>?|<)(%s?%s)' % + (_nlws, _nl, _space, stoken_compound if compound else _stoken), bygroups(Number.Integer, Punctuation, using(this, state='text'))) ] @@ -456,7 +475,7 @@ def _make_redirect_state(compound, 'text': [ (r'"', String.Double, 'string'), include('variable-or-escape'), - (r'[^"%%^%s%s%s\d)]+|.' % (_nl, _punct, _ws), Text) + (r'[^"%%^%s%s\d)]+|.' % (_nlws, _punct), Text) ], 'variable': [ (r'"', String.Double, 'string'), @@ -477,13 +496,13 @@ def _make_redirect_state(compound, include('follow') ], 'for/f': [ - (r'(")((?:%s|[^"])*?")([%s%s]*)(\))' % (_variable, _nl, _ws), + (r'(")((?:%s|[^"])*?")([%s]*)(\))' % (_variable, _nlws), bygroups(String.Double, using(this, state='string'), Text, Punctuation)), (r'"', String.Double, ('#pop', 'for2', 'string')), - (r"('(?:%%%%|%s|[\w\W])*?')([%s%s]*)(\))" % (_variable, _nl, _ws), + (r"('(?:%%%%|%s|[\w\W])*?')([%s]*)(\))" % (_variable, _nlws), bygroups(using(this, state='sqstring'), Text, Punctuation)), - (r'(`(?:%%%%|%s|[\w\W])*?`)([%s%s]*)(\))' % (_variable, _nl, _ws), + (r'(`(?:%%%%|%s|[\w\W])*?`)([%s]*)(\))' % (_variable, _nlws), bygroups(using(this, state='bqstring'), Text, Punctuation)), include('for2') ], @@ -529,7 +548,8 @@ def _make_redirect_state(compound, class MSDOSSessionLexer(ShellSessionBaseLexer): """ - Lexer for simplistic MSDOS sessions. + Lexer for MS DOS shell sessions, i.e. command lines, including a + prompt, interspersed with output. .. versionadded:: 2.1 """ @@ -540,7 +560,7 @@ class MSDOSSessionLexer(ShellSessionBaseLexer): mimetypes = [] _innerLexerCls = BatchLexer - _ps1rgx = r'^([^>]*>)(.*\n?)' + _ps1rgx = re.compile(r'^([^>]*>)(.*\n?)') _ps2 = 'More? ' @@ -614,7 +634,8 @@ class TcshLexer(RegexLexer): class TcshSessionLexer(ShellSessionBaseLexer): """ - Lexer for Tcsh sessions. + Lexer for Tcsh sessions, i.e. command lines, including a + prompt, interspersed with output. .. versionadded:: 2.1 """ @@ -625,7 +646,7 @@ class TcshSessionLexer(ShellSessionBaseLexer): mimetypes = [] _innerLexerCls = TcshLexer - _ps1rgx = r'^([^>]+>)(.*\n?)' + _ps1rgx = re.compile(r'^([^>]+>)(.*\n?)') _ps2 = '? ' @@ -745,7 +766,8 @@ class PowerShellLexer(RegexLexer): class PowerShellSessionLexer(ShellSessionBaseLexer): """ - Lexer for simplistic Windows PowerShell sessions. + Lexer for PowerShell sessions, i.e. command lines, including a + prompt, interspersed with output. .. versionadded:: 2.1 """ @@ -756,7 +778,7 @@ class PowerShellSessionLexer(ShellSessionBaseLexer): mimetypes = [] _innerLexerCls = PowerShellLexer - _ps1rgx = r'^(PS [^>]+> )(.*\n?)' + _ps1rgx = re.compile(r'^((?:\[[^]]+\]: )?PS[^>]*> ?)(.*\n?)') _ps2 = '>> ' @@ -831,3 +853,62 @@ class FishShellLexer(RegexLexer): include('root'), ], } + +class ExeclineLexer(RegexLexer): + """ + Lexer for Laurent Bercot's execline language + (https://skarnet.org/software/execline). + + .. versionadded:: 2.7 + """ + + name = 'execline' + aliases = ['execline'] + filenames = ['*.exec'] + + tokens = { + 'root': [ + include('basic'), + include('data'), + include('interp') + ], + 'interp': [ + (r'\$\{', String.Interpol, 'curly'), + (r'\$[\w@#]+', Name.Variable), # user variable + (r'\$', Text), + ], + 'basic': [ + (r'\b(background|backtick|cd|define|dollarat|elgetopt|' + r'elgetpositionals|elglob|emptyenv|envfile|exec|execlineb|' + r'exit|export|fdblock|fdclose|fdmove|fdreserve|fdswap|' + r'forbacktickx|foreground|forstdin|forx|getcwd|getpid|heredoc|' + r'homeof|if|ifelse|ifte|ifthenelse|importas|loopwhilex|' + r'multidefine|multisubstitute|pipeline|piperw|posix-cd|' + r'redirfd|runblock|shift|trap|tryexec|umask|unexport|wait|' + r'withstdinas)\b', Name.Builtin), + (r'\A#!.+\n', Comment.Hashbang), + (r'#.*\n', Comment.Single), + (r'[{}]', Operator) + ], + 'data': [ + (r'(?s)"(\\.|[^"\\$])*"', String.Double), + (r'"', String.Double, 'string'), + (r'\s+', Text), + (r'[^\s{}$"\\]+', Text) + ], + 'string': [ + (r'"', String.Double, '#pop'), + (r'(?s)(\\\\|\\.|[^"\\$])+', String.Double), + include('interp'), + ], + 'curly': [ + (r'\}', String.Interpol, '#pop'), + (r'[\w#@]+', Name.Variable), + include('root') + ] + + } + + def analyse_text(text): + if shebang_matches(text, r'execlineb'): + return 1 diff --git a/src/typecode/_vendor/pygments/lexers/sieve.py b/src/typecode/_vendor/pygments/lexers/sieve.py new file mode 100644 index 0000000..3d05beb --- /dev/null +++ b/src/typecode/_vendor/pygments/lexers/sieve.py @@ -0,0 +1,69 @@ +# -*- coding: utf-8 -*- +""" + pygments.lexers.sieve + ~~~~~~~~~~~~~~~~~~~~~ + + Lexer for Sieve file format. + + https://tools.ietf.org/html/rfc5228 + https://tools.ietf.org/html/rfc5173 + https://tools.ietf.org/html/rfc5229 + https://tools.ietf.org/html/rfc5230 + https://tools.ietf.org/html/rfc5232 + https://tools.ietf.org/html/rfc5235 + https://tools.ietf.org/html/rfc5429 + https://tools.ietf.org/html/rfc8580 + + :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS. + :license: BSD, see LICENSE for details. +""" + +from typecode._vendor.pygments.lexer import RegexLexer, bygroups +from typecode._vendor.pygments.token import Comment, Name, Literal, String, Text, Punctuation, Keyword + +__all__ = ["SieveLexer"] + + +class SieveLexer(RegexLexer): + """ + Lexer for sieve format. + """ + name = 'Sieve' + filenames = ['*.siv', '*.sieve'] + aliases = ['sieve'] + + tokens = { + 'root': [ + (r'\s+', Text), + (r'[();,{}\[\]]', Punctuation), + # import: + (r'(?i)require', + Keyword.Namespace), + # tags: + (r'(?i)(:)(addresses|all|contains|content|create|copy|comparator|count|days|detail|domain|fcc|flags|from|handle|importance|is|localpart|length|lowerfirst|lower|matches|message|mime|options|over|percent|quotewildcard|raw|regex|specialuse|subject|text|under|upperfirst|upper|value)', + bygroups(Name.Tag, Name.Tag)), + # tokens: + (r'(?i)(address|addflag|allof|anyof|body|discard|elsif|else|envelope|ereject|exists|false|fileinto|if|hasflag|header|keep|notify_method_capability|notify|not|redirect|reject|removeflag|setflag|size|spamtest|stop|string|true|vacation|virustest)', + Name.Builtin), + (r'(?i)set', + Keyword.Declaration), + # number: + (r'([0-9.]+)([kmgKMG])?', + bygroups(Literal.Number, Literal.Number)), + # comment: + (r'#.*$', + Comment.Single), + (r'/\*.*\*/', + Comment.Multiline), + # string: + (r'"[^"]*?"', + String), + # text block: + (r'text:', + Name.Tag, 'text'), + ], + 'text': [ + (r'[^.].*?\n', String), + (r'^\.', Punctuation, "#pop"), + ] + } diff --git a/src/typecode/_vendor/pygments/lexers/slash.py b/src/typecode/_vendor/pygments/lexers/slash.py index 1b34428..6bf5f38 100644 --- a/src/typecode/_vendor/pygments/lexers/slash.py +++ b/src/typecode/_vendor/pygments/lexers/slash.py @@ -6,7 +6,7 @@ Lexer for the `Slash `_ programming language. - :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ @@ -26,7 +26,7 @@ def move_state(new_state): def right_angle_bracket(lexer, match, ctx): if len(ctx.stack) > 1 and ctx.stack[-2] == "string": ctx.stack.pop() - yield match.start(), String.Interpol, "}" + yield match.start(), String.Interpol, '}' ctx.pos = match.end() pass @@ -178,8 +178,8 @@ class SlashLexer(DelegatingLexer): name = 'Slash' aliases = ['slash'] - filenames = ['*.sl'] + filenames = ['*.sla'] def __init__(self, **options): from typecode._vendor.pygments.lexers.web import HtmlLexer - super(SlashLexer, self).__init__(HtmlLexer, SlashLanguageLexer, **options) + super().__init__(HtmlLexer, SlashLanguageLexer, **options) diff --git a/src/typecode/_vendor/pygments/lexers/smalltalk.py b/src/typecode/_vendor/pygments/lexers/smalltalk.py index e5f8e43..a46f440 100644 --- a/src/typecode/_vendor/pygments/lexers/smalltalk.py +++ b/src/typecode/_vendor/pygments/lexers/smalltalk.py @@ -5,7 +5,7 @@ Lexers for Smalltalk and related languages. - :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ @@ -139,7 +139,7 @@ class SmalltalkLexer(RegexLexer): class NewspeakLexer(RegexLexer): """ - For `Newspeak ` syntax. + For `Newspeak `_ syntax. .. versionadded:: 1.1 """ diff --git a/src/typecode/_vendor/pygments/lexers/smv.py b/src/typecode/_vendor/pygments/lexers/smv.py index c03d822..3232951 100644 --- a/src/typecode/_vendor/pygments/lexers/smv.py +++ b/src/typecode/_vendor/pygments/lexers/smv.py @@ -5,13 +5,13 @@ Lexers for the SMV languages. - :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ from typecode._vendor.pygments.lexer import RegexLexer, words -from typecode._vendor.pygments.token import Comment, Generic, Keyword, Name, Number, \ - Operator, Punctuation, Text +from typecode._vendor.pygments.token import Comment, Keyword, Name, Number, Operator, \ + Punctuation, Text __all__ = ['NuSMVLexer'] diff --git a/src/typecode/_vendor/pygments/lexers/snobol.py b/src/typecode/_vendor/pygments/lexers/snobol.py index 81dbee8..de954aa 100644 --- a/src/typecode/_vendor/pygments/lexers/snobol.py +++ b/src/typecode/_vendor/pygments/lexers/snobol.py @@ -5,7 +5,7 @@ Lexers for the SNOBOL language. - :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ diff --git a/src/typecode/_vendor/pygments/lexers/solidity.py b/src/typecode/_vendor/pygments/lexers/solidity.py new file mode 100644 index 0000000..783a856 --- /dev/null +++ b/src/typecode/_vendor/pygments/lexers/solidity.py @@ -0,0 +1,92 @@ +# -*- coding: utf-8 -*- +""" + pygments.lexers.solidity + ~~~~~~~~~~~~~~~~~~~~~~~~ + + Lexers for Solidity. + + :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS. + :license: BSD, see LICENSE for details. +""" + +import re + +from typecode._vendor.pygments.lexer import RegexLexer, bygroups, include, words +from typecode._vendor.pygments.token import Text, Comment, Operator, Keyword, Name, String, \ + Number, Punctuation, Whitespace + +__all__ = ['SolidityLexer'] + + +class SolidityLexer(RegexLexer): + """ + For Solidity source code. + + .. versionadded:: 2.5 + """ + + name = 'Solidity' + aliases = ['solidity'] + filenames = ['*.sol'] + mimetypes = [] + + flags = re.MULTILINE | re.UNICODE + + datatype = ( + r'\b(address|bool|(?:(?:bytes|hash|int|string|uint)(?:8|16|24|32|40|48|56|64' + r'|72|80|88|96|104|112|120|128|136|144|152|160|168|176|184|192|200|208' + r'|216|224|232|240|248|256)?))\b' + ) + + tokens = { + 'root': [ + include('whitespace'), + include('comments'), + (r'\bpragma\s+solidity\b', Keyword, 'pragma'), + (r'\b(contract)(\s+)([a-zA-Z_]\w*)', + bygroups(Keyword, Whitespace, Name.Entity)), + (datatype + r'(\s+)((?:external|public|internal|private)\s+)?' + + r'([a-zA-Z_]\w*)', + bygroups(Keyword.Type, Whitespace, Keyword, Name.Variable)), + (r'\b(enum|event|function|struct)(\s+)([a-zA-Z_]\w*)', + bygroups(Keyword.Type, Whitespace, Name.Variable)), + (r'\b(msg|block|tx)\.([A-Za-z_][a-zA-Z0-9_]*)\b', Keyword), + (words(( + 'block', 'break', 'constant', 'constructor', 'continue', + 'contract', 'do', 'else', 'external', 'false', 'for', + 'function', 'if', 'import', 'inherited', 'internal', 'is', + 'library', 'mapping', 'memory', 'modifier', 'msg', 'new', + 'payable', 'private', 'public', 'require', 'return', + 'returns', 'struct', 'suicide', 'throw', 'this', 'true', + 'tx', 'var', 'while'), prefix=r'\b', suffix=r'\b'), + Keyword.Type), + (words(('keccak256',), prefix=r'\b', suffix=r'\b'), Name.Builtin), + (datatype, Keyword.Type), + include('constants'), + (r'[a-zA-Z_]\w*', Text), + (r'[!<=>+*/-]', Operator), + (r'[.;:{}(),\[\]]', Punctuation) + ], + 'comments': [ + (r'//(\n|[\w\W]*?[^\\]\n)', Comment.Single), + (r'/(\\\n)?[*][\w\W]*?[*](\\\n)?/', Comment.Multiline), + (r'/(\\\n)?[*][\w\W]*', Comment.Multiline) + ], + 'constants': [ + (r'("(\\"|.)*?")', String.Double), + (r"('(\\'|.)*?')", String.Single), + (r'\b0[xX][0-9a-fA-F]+\b', Number.Hex), + (r'\b\d+\b', Number.Decimal), + ], + 'pragma': [ + include('whitespace'), + include('comments'), + (r'(\^|>=|<)(\s*)(\d+\.\d+\.\d+)', + bygroups(Operator, Whitespace, Keyword)), + (r';', Punctuation, '#pop') + ], + 'whitespace': [ + (r'\s+', Whitespace), + (r'\n', Whitespace) + ] + } diff --git a/src/typecode/_vendor/pygments/lexers/special.py b/src/typecode/_vendor/pygments/lexers/special.py index 951454a..ddfa03e 100644 --- a/src/typecode/_vendor/pygments/lexers/special.py +++ b/src/typecode/_vendor/pygments/lexers/special.py @@ -5,15 +5,16 @@ Special lexers. - :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ import re +from io import BytesIO from typecode._vendor.pygments.lexer import Lexer from typecode._vendor.pygments.token import Token, Error, Text -from typecode._vendor.pygments.util import get_choice_opt, text_type, BytesIO +from typecode._vendor.pygments.util import get_choice_opt __all__ = ['TextLexer', 'RawTokenLexer'] @@ -35,9 +36,10 @@ def get_tokens_unprocessed(self, text): def analyse_text(text): return TextLexer.priority + _ttype_cache = {} -line_re = re.compile(b'.*?\n') +line_re = re.compile('.*?\n') class RawTokenLexer(Lexer): @@ -63,20 +65,20 @@ def __init__(self, **options): Lexer.__init__(self, **options) def get_tokens(self, text): - if isinstance(text, text_type): - # raw token stream never has any non-ASCII characters - text = text.encode('ascii') - if self.compress == 'gz': - import gzip - gzipfile = gzip.GzipFile('', 'rb', 9, BytesIO(text)) - text = gzipfile.read() - elif self.compress == 'bz2': - import bz2 - text = bz2.decompress(text) - - # do not call Lexer.get_tokens() because we do not want Unicode - # decoding to occur, and stripping is not optional. - text = text.strip(b'\n') + b'\n' + if self.compress: + if isinstance(text, str): + text = text.encode('latin1') + if self.compress == 'gz': + import gzip + gzipfile = gzip.GzipFile('', 'rb', 9, BytesIO(text)) + text = gzipfile.read() + elif self.compress == 'bz2': + import bz2 + text = bz2.decompress(text) + text = text.decode('latin1') + + # do not call Lexer.get_tokens() because stripping is not optional. + text = text.strip('\n') + '\n' for i, t, v in self.get_tokens_unprocessed(text): yield t, v @@ -84,9 +86,9 @@ def get_tokens_unprocessed(self, text): length = 0 for match in line_re.finditer(text): try: - ttypestr, val = match.group().split(b'\t', 1) + ttypestr, val = match.group().rstrip().split('\t', 1) except ValueError: - val = match.group().decode('ascii', 'replace') + val = match.group() ttype = Error else: ttype = _ttype_cache.get(ttypestr) @@ -98,6 +100,6 @@ def get_tokens_unprocessed(self, text): raise ValueError('malformed token name') ttype = getattr(ttype, ttype_) _ttype_cache[ttypestr] = ttype - val = val[2:-2].decode('unicode-escape') + val = val[1:-1].encode().decode('unicode-escape') yield length, ttype, val length += len(val) diff --git a/src/typecode/_vendor/pygments/lexers/sql.py b/src/typecode/_vendor/pygments/lexers/sql.py index bde08aa..7ebedfa 100644 --- a/src/typecode/_vendor/pygments/lexers/sql.py +++ b/src/typecode/_vendor/pygments/lexers/sql.py @@ -34,20 +34,26 @@ The ``tests/examplefiles`` contains a few test files with data to be parsed by these lexers. - :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ import re from typecode._vendor.pygments.lexer import Lexer, RegexLexer, do_insertions, bygroups, words -from typecode._vendor.pygments.token import Punctuation, Whitespace, Error, \ - Text, Comment, Operator, Keyword, Name, String, Number, Generic +from typecode._vendor.pygments.token import Punctuation, Whitespace, Text, Comment, Operator, \ + Keyword, Name, String, Number, Generic, Literal from typecode._vendor.pygments.lexers import get_lexer_by_name, ClassNotFound -from typecode._vendor.pygments.util import iteritems from typecode._vendor.pygments.lexers._postgres_builtins import KEYWORDS, DATATYPES, \ PSEUDO_TYPES, PLPGSQL_KEYWORDS +from typecode._vendor.pygments.lexers._mysql_builtins import \ + MYSQL_CONSTANTS, \ + MYSQL_DATATYPES, \ + MYSQL_FUNCTIONS, \ + MYSQL_KEYWORDS, \ + MYSQL_OPTIMIZER_HINTS + from typecode._vendor.pygments.lexers import _tsql_builtins @@ -75,28 +81,28 @@ def language_callback(lexer, match): The lexer is chosen looking for a nearby LANGUAGE or assumed as plpgsql if inside a DO statement and no LANGUAGE has been found. """ - l = None + lx = None m = language_re.match(lexer.text[match.end():match.end()+100]) if m is not None: - l = lexer._get_lexer(m.group(1)) + lx = lexer._get_lexer(m.group(1)) else: m = list(language_re.finditer( lexer.text[max(0, match.start()-100):match.start()])) if m: - l = lexer._get_lexer(m[-1].group(1)) + lx = lexer._get_lexer(m[-1].group(1)) else: m = list(do_re.finditer( lexer.text[max(0, match.start()-25):match.start()])) if m: - l = lexer._get_lexer('plpgsql') + lx = lexer._get_lexer('plpgsql') # 1 = $, 2 = delimiter, 3 = $ yield (match.start(1), String, match.group(1)) yield (match.start(2), String.Delimiter, match.group(2)) yield (match.start(3), String, match.group(3)) # 4 = string contents - if l: - for x in l.get_tokens_unprocessed(match.group(4)): + if lx: + for x in lx.get_tokens_unprocessed(match.group(4)): yield x else: yield (match.start(4), String, match.group(4)) @@ -106,7 +112,7 @@ def language_callback(lexer, match): yield (match.start(7), String, match.group(7)) -class PostgresBase(object): +class PostgresBase: """Base class for Postgres-related lexers. This is implemented as a mixin to avoid the Lexer metaclass kicking in. @@ -118,9 +124,7 @@ class PostgresBase(object): def get_tokens_unprocessed(self, text, *args): # Have a copy of the entire text to be used by `language_callback`. self.text = text - for x in super(PostgresBase, self).get_tokens_unprocessed( - text, *args): - yield x + yield from super().get_tokens_unprocessed(text, *args) def _get_lexer(self, lang): if lang.lower() == 'sql': @@ -134,9 +138,9 @@ def _get_lexer(self, lang): if lang.startswith('pl') and lang.endswith('u'): tries.append(lang[2:-1]) - for l in tries: + for lx in tries: try: - return get_lexer_by_name(l, **self.options) + return get_lexer_by_name(lx, **self.options) except ClassNotFound: pass else: @@ -163,8 +167,8 @@ class PostgresLexer(PostgresBase, RegexLexer): (r'--.*\n?', Comment.Single), (r'/\*', Comment.Multiline, 'multiline-comments'), (r'(' + '|'.join(s.replace(" ", r"\s+") - for s in DATATYPES + PSEUDO_TYPES) - + r')\b', Name.Builtin), + for s in DATATYPES + PSEUDO_TYPES) + r')\b', + Name.Builtin), (words(KEYWORDS, suffix=r'\b'), Keyword), (r'[+*/<>=~!@#%^&|`?-]+', Operator), (r'::', Operator), # cast @@ -212,7 +216,7 @@ class PlPgsqlLexer(PostgresBase, RegexLexer): mimetypes = ['text/x-plpgsql'] flags = re.IGNORECASE - tokens = dict((k, l[:]) for (k, l) in iteritems(PostgresLexer.tokens)) + tokens = {k: l[:] for (k, l) in PostgresLexer.tokens.items()} # extend the keywords list for i, pattern in enumerate(tokens['root']): @@ -246,7 +250,7 @@ class PsqlRegexLexer(PostgresBase, RegexLexer): aliases = [] # not public flags = re.IGNORECASE - tokens = dict((k, l[:]) for (k, l) in iteritems(PostgresLexer.tokens)) + tokens = {k: l[:] for (k, l) in PostgresLexer.tokens.items()} tokens['root'].append( (r'\\[^\s]+', Keyword.Pseudo, 'psql-command')) @@ -260,6 +264,7 @@ class PsqlRegexLexer(PostgresBase, RegexLexer): (r"[^\s]+", String.Symbol), ] + re_prompt = re.compile(r'^(\S.*?)??[=\-\(\$\'\"][#>]') re_psql_command = re.compile(r'\s*\\') re_end_command = re.compile(r';\s*(--.*?)?$') @@ -270,7 +275,7 @@ class PsqlRegexLexer(PostgresBase, RegexLexer): r'FATAL|HINT|DETAIL|CONTEXT|LINE [0-9]+):)(.*?\n)') -class lookahead(object): +class lookahead: """Wrap an iterator and allow pushing back an item.""" def __init__(self, x): self.iter = iter(x) @@ -319,8 +324,7 @@ def get_tokens_unprocessed(self, data): # Identify a shell prompt in case of psql commandline example if line.startswith('$') and not curcode: lexer = get_lexer_by_name('console', **self.options) - for x in lexer.get_tokens_unprocessed(line): - yield x + yield from lexer.get_tokens_unprocessed(line) break # Identify a psql prompt @@ -340,9 +344,8 @@ def get_tokens_unprocessed(self, data): break # Emit the combined stream of command and prompt(s) - for item in do_insertions(insertions, - sql.get_tokens_unprocessed(curcode)): - yield item + yield from do_insertions(insertions, + sql.get_tokens_unprocessed(curcode)) # Emit the output lines out_token = Generic.Output @@ -384,86 +387,99 @@ class SqlLexer(RegexLexer): (r'--.*\n?', Comment.Single), (r'/\*', Comment.Multiline, 'multiline-comments'), (words(( - 'ABORT', 'ABS', 'ABSOLUTE', 'ACCESS', 'ADA', 'ADD', 'ADMIN', 'AFTER', 'AGGREGATE', - 'ALIAS', 'ALL', 'ALLOCATE', 'ALTER', 'ANALYSE', 'ANALYZE', 'AND', 'ANY', 'ARE', 'AS', - 'ASC', 'ASENSITIVE', 'ASSERTION', 'ASSIGNMENT', 'ASYMMETRIC', 'AT', 'ATOMIC', - 'AUTHORIZATION', 'AVG', 'BACKWARD', 'BEFORE', 'BEGIN', 'BETWEEN', 'BITVAR', - 'BIT_LENGTH', 'BOTH', 'BREADTH', 'BY', 'C', 'CACHE', 'CALL', 'CALLED', 'CARDINALITY', - 'CASCADE', 'CASCADED', 'CASE', 'CAST', 'CATALOG', 'CATALOG_NAME', 'CHAIN', + 'ABORT', 'ABS', 'ABSOLUTE', 'ACCESS', 'ADA', 'ADD', 'ADMIN', 'AFTER', + 'AGGREGATE', 'ALIAS', 'ALL', 'ALLOCATE', 'ALTER', 'ANALYSE', 'ANALYZE', + 'AND', 'ANY', 'ARE', 'AS', 'ASC', 'ASENSITIVE', 'ASSERTION', 'ASSIGNMENT', + 'ASYMMETRIC', 'AT', 'ATOMIC', 'AUTHORIZATION', 'AVG', 'BACKWARD', + 'BEFORE', 'BEGIN', 'BETWEEN', 'BITVAR', 'BIT_LENGTH', 'BOTH', 'BREADTH', + 'BY', 'C', 'CACHE', 'CALL', 'CALLED', 'CARDINALITY', 'CASCADE', + 'CASCADED', 'CASE', 'CAST', 'CATALOG', 'CATALOG_NAME', 'CHAIN', 'CHARACTERISTICS', 'CHARACTER_LENGTH', 'CHARACTER_SET_CATALOG', 'CHARACTER_SET_NAME', 'CHARACTER_SET_SCHEMA', 'CHAR_LENGTH', 'CHECK', - 'CHECKED', 'CHECKPOINT', 'CLASS', 'CLASS_ORIGIN', 'CLOB', 'CLOSE', 'CLUSTER', - 'COALSECE', 'COBOL', 'COLLATE', 'COLLATION', 'COLLATION_CATALOG', - 'COLLATION_NAME', 'COLLATION_SCHEMA', 'COLUMN', 'COLUMN_NAME', - 'COMMAND_FUNCTION', 'COMMAND_FUNCTION_CODE', 'COMMENT', 'COMMIT', - 'COMMITTED', 'COMPLETION', 'CONDITION_NUMBER', 'CONNECT', 'CONNECTION', - 'CONNECTION_NAME', 'CONSTRAINT', 'CONSTRAINTS', 'CONSTRAINT_CATALOG', - 'CONSTRAINT_NAME', 'CONSTRAINT_SCHEMA', 'CONSTRUCTOR', 'CONTAINS', - 'CONTINUE', 'CONVERSION', 'CONVERT', 'COPY', 'CORRESPONTING', 'COUNT', - 'CREATE', 'CREATEDB', 'CREATEUSER', 'CROSS', 'CUBE', 'CURRENT', 'CURRENT_DATE', - 'CURRENT_PATH', 'CURRENT_ROLE', 'CURRENT_TIME', 'CURRENT_TIMESTAMP', - 'CURRENT_USER', 'CURSOR', 'CURSOR_NAME', 'CYCLE', 'DATA', 'DATABASE', + 'CHECKED', 'CHECKPOINT', 'CLASS', 'CLASS_ORIGIN', 'CLOB', 'CLOSE', + 'CLUSTER', 'COALESCE', 'COBOL', 'COLLATE', 'COLLATION', + 'COLLATION_CATALOG', 'COLLATION_NAME', 'COLLATION_SCHEMA', 'COLUMN', + 'COLUMN_NAME', 'COMMAND_FUNCTION', 'COMMAND_FUNCTION_CODE', 'COMMENT', + 'COMMIT', 'COMMITTED', 'COMPLETION', 'CONDITION_NUMBER', 'CONNECT', + 'CONNECTION', 'CONNECTION_NAME', 'CONSTRAINT', 'CONSTRAINTS', + 'CONSTRAINT_CATALOG', 'CONSTRAINT_NAME', 'CONSTRAINT_SCHEMA', + 'CONSTRUCTOR', 'CONTAINS', 'CONTINUE', 'CONVERSION', 'CONVERT', + 'COPY', 'CORRESPONDING', 'COUNT', 'CREATE', 'CREATEDB', 'CREATEUSER', + 'CROSS', 'CUBE', 'CURRENT', 'CURRENT_DATE', 'CURRENT_PATH', + 'CURRENT_ROLE', 'CURRENT_TIME', 'CURRENT_TIMESTAMP', 'CURRENT_USER', + 'CURSOR', 'CURSOR_NAME', 'CYCLE', 'DATA', 'DATABASE', 'DATETIME_INTERVAL_CODE', 'DATETIME_INTERVAL_PRECISION', 'DAY', - 'DEALLOCATE', 'DECLARE', 'DEFAULT', 'DEFAULTS', 'DEFERRABLE', 'DEFERRED', - 'DEFINED', 'DEFINER', 'DELETE', 'DELIMITER', 'DELIMITERS', 'DEREF', 'DESC', - 'DESCRIBE', 'DESCRIPTOR', 'DESTROY', 'DESTRUCTOR', 'DETERMINISTIC', - 'DIAGNOSTICS', 'DICTIONARY', 'DISCONNECT', 'DISPATCH', 'DISTINCT', 'DO', - 'DOMAIN', 'DROP', 'DYNAMIC', 'DYNAMIC_FUNCTION', 'DYNAMIC_FUNCTION_CODE', 'EACH', - 'ELSE', 'ELSIF', 'ENCODING', 'ENCRYPTED', 'END', 'END-EXEC', 'EQUALS', 'ESCAPE', 'EVERY', - 'EXCEPTION', 'EXCEPT', 'EXCLUDING', 'EXCLUSIVE', 'EXEC', 'EXECUTE', 'EXISTING', - 'EXISTS', 'EXPLAIN', 'EXTERNAL', 'EXTRACT', 'FALSE', 'FETCH', 'FINAL', 'FIRST', 'FOR', - 'FORCE', 'FOREIGN', 'FORTRAN', 'FORWARD', 'FOUND', 'FREE', 'FREEZE', 'FROM', 'FULL', - 'FUNCTION', 'G', 'GENERAL', 'GENERATED', 'GET', 'GLOBAL', 'GO', 'GOTO', 'GRANT', 'GRANTED', - 'GROUP', 'GROUPING', 'HANDLER', 'HAVING', 'HIERARCHY', 'HOLD', 'HOST', 'IDENTITY', 'IF', - 'IGNORE', 'ILIKE', 'IMMEDIATE', 'IMMUTABLE', 'IMPLEMENTATION', 'IMPLICIT', 'IN', - 'INCLUDING', 'INCREMENT', 'INDEX', 'INDITCATOR', 'INFIX', 'INHERITS', 'INITIALIZE', - 'INITIALLY', 'INNER', 'INOUT', 'INPUT', 'INSENSITIVE', 'INSERT', 'INSTANTIABLE', - 'INSTEAD', 'INTERSECT', 'INTO', 'INVOKER', 'IS', 'ISNULL', 'ISOLATION', 'ITERATE', 'JOIN', - 'KEY', 'KEY_MEMBER', 'KEY_TYPE', 'LANCOMPILER', 'LANGUAGE', 'LARGE', 'LAST', - 'LATERAL', 'LEADING', 'LEFT', 'LENGTH', 'LESS', 'LEVEL', 'LIKE', 'LIMIT', 'LISTEN', 'LOAD', - 'LOCAL', 'LOCALTIME', 'LOCALTIMESTAMP', 'LOCATION', 'LOCATOR', 'LOCK', 'LOWER', - 'MAP', 'MATCH', 'MAX', 'MAXVALUE', 'MESSAGE_LENGTH', 'MESSAGE_OCTET_LENGTH', - 'MESSAGE_TEXT', 'METHOD', 'MIN', 'MINUTE', 'MINVALUE', 'MOD', 'MODE', 'MODIFIES', - 'MODIFY', 'MONTH', 'MORE', 'MOVE', 'MUMPS', 'NAMES', 'NATIONAL', 'NATURAL', 'NCHAR', - 'NCLOB', 'NEW', 'NEXT', 'NO', 'NOCREATEDB', 'NOCREATEUSER', 'NONE', 'NOT', 'NOTHING', - 'NOTIFY', 'NOTNULL', 'NULL', 'NULLABLE', 'NULLIF', 'OBJECT', 'OCTET_LENGTH', 'OF', 'OFF', - 'OFFSET', 'OIDS', 'OLD', 'ON', 'ONLY', 'OPEN', 'OPERATION', 'OPERATOR', 'OPTION', 'OPTIONS', - 'OR', 'ORDER', 'ORDINALITY', 'OUT', 'OUTER', 'OUTPUT', 'OVERLAPS', 'OVERLAY', 'OVERRIDING', - 'OWNER', 'PAD', 'PARAMETER', 'PARAMETERS', 'PARAMETER_MODE', 'PARAMATER_NAME', - 'PARAMATER_ORDINAL_POSITION', 'PARAMETER_SPECIFIC_CATALOG', - 'PARAMETER_SPECIFIC_NAME', 'PARAMATER_SPECIFIC_SCHEMA', 'PARTIAL', - 'PASCAL', 'PENDANT', 'PLACING', 'PLI', 'POSITION', 'POSTFIX', 'PRECISION', 'PREFIX', - 'PREORDER', 'PREPARE', 'PRESERVE', 'PRIMARY', 'PRIOR', 'PRIVILEGES', 'PROCEDURAL', - 'PROCEDURE', 'PUBLIC', 'READ', 'READS', 'RECHECK', 'RECURSIVE', 'REF', 'REFERENCES', - 'REFERENCING', 'REINDEX', 'RELATIVE', 'RENAME', 'REPEATABLE', 'REPLACE', 'RESET', - 'RESTART', 'RESTRICT', 'RESULT', 'RETURN', 'RETURNED_LENGTH', - 'RETURNED_OCTET_LENGTH', 'RETURNED_SQLSTATE', 'RETURNS', 'REVOKE', 'RIGHT', - 'ROLE', 'ROLLBACK', 'ROLLUP', 'ROUTINE', 'ROUTINE_CATALOG', 'ROUTINE_NAME', - 'ROUTINE_SCHEMA', 'ROW', 'ROWS', 'ROW_COUNT', 'RULE', 'SAVE_POINT', 'SCALE', 'SCHEMA', - 'SCHEMA_NAME', 'SCOPE', 'SCROLL', 'SEARCH', 'SECOND', 'SECURITY', 'SELECT', 'SELF', - 'SENSITIVE', 'SERIALIZABLE', 'SERVER_NAME', 'SESSION', 'SESSION_USER', 'SET', - 'SETOF', 'SETS', 'SHARE', 'SHOW', 'SIMILAR', 'SIMPLE', 'SIZE', 'SOME', 'SOURCE', 'SPACE', - 'SPECIFIC', 'SPECIFICTYPE', 'SPECIFIC_NAME', 'SQL', 'SQLCODE', 'SQLERROR', - 'SQLEXCEPTION', 'SQLSTATE', 'SQLWARNINIG', 'STABLE', 'START', 'STATE', 'STATEMENT', - 'STATIC', 'STATISTICS', 'STDIN', 'STDOUT', 'STORAGE', 'STRICT', 'STRUCTURE', 'STYPE', - 'SUBCLASS_ORIGIN', 'SUBLIST', 'SUBSTRING', 'SUM', 'SYMMETRIC', 'SYSID', 'SYSTEM', - 'SYSTEM_USER', 'TABLE', 'TABLE_NAME', ' TEMP', 'TEMPLATE', 'TEMPORARY', 'TERMINATE', - 'THAN', 'THEN', 'TIMESTAMP', 'TIMEZONE_HOUR', 'TIMEZONE_MINUTE', 'TO', 'TOAST', - 'TRAILING', 'TRANSATION', 'TRANSACTIONS_COMMITTED', - 'TRANSACTIONS_ROLLED_BACK', 'TRANSATION_ACTIVE', 'TRANSFORM', - 'TRANSFORMS', 'TRANSLATE', 'TRANSLATION', 'TREAT', 'TRIGGER', 'TRIGGER_CATALOG', - 'TRIGGER_NAME', 'TRIGGER_SCHEMA', 'TRIM', 'TRUE', 'TRUNCATE', 'TRUSTED', 'TYPE', - 'UNCOMMITTED', 'UNDER', 'UNENCRYPTED', 'UNION', 'UNIQUE', 'UNKNOWN', 'UNLISTEN', - 'UNNAMED', 'UNNEST', 'UNTIL', 'UPDATE', 'UPPER', 'USAGE', 'USER', - 'USER_DEFINED_TYPE_CATALOG', 'USER_DEFINED_TYPE_NAME', - 'USER_DEFINED_TYPE_SCHEMA', 'USING', 'VACUUM', 'VALID', 'VALIDATOR', 'VALUES', - 'VARIABLE', 'VERBOSE', 'VERSION', 'VIEW', 'VOLATILE', 'WHEN', 'WHENEVER', 'WHERE', - 'WITH', 'WITHOUT', 'WORK', 'WRITE', 'YEAR', 'ZONE'), suffix=r'\b'), + 'DEALLOCATE', 'DECLARE', 'DEFAULT', 'DEFAULTS', 'DEFERRABLE', + 'DEFERRED', 'DEFINED', 'DEFINER', 'DELETE', 'DELIMITER', 'DELIMITERS', + 'DEREF', 'DESC', 'DESCRIBE', 'DESCRIPTOR', 'DESTROY', 'DESTRUCTOR', + 'DETERMINISTIC', 'DIAGNOSTICS', 'DICTIONARY', 'DISCONNECT', 'DISPATCH', + 'DISTINCT', 'DO', 'DOMAIN', 'DROP', 'DYNAMIC', 'DYNAMIC_FUNCTION', + 'DYNAMIC_FUNCTION_CODE', 'EACH', 'ELSE', 'ELSIF', 'ENCODING', + 'ENCRYPTED', 'END', 'END-EXEC', 'EQUALS', 'ESCAPE', 'EVERY', 'EXCEPTION', + 'EXCEPT', 'EXCLUDING', 'EXCLUSIVE', 'EXEC', 'EXECUTE', 'EXISTING', + 'EXISTS', 'EXPLAIN', 'EXTERNAL', 'EXTRACT', 'FALSE', 'FETCH', 'FINAL', + 'FIRST', 'FOR', 'FORCE', 'FOREIGN', 'FORTRAN', 'FORWARD', 'FOUND', 'FREE', + 'FREEZE', 'FROM', 'FULL', 'FUNCTION', 'G', 'GENERAL', 'GENERATED', 'GET', + 'GLOBAL', 'GO', 'GOTO', 'GRANT', 'GRANTED', 'GROUP', 'GROUPING', + 'HANDLER', 'HAVING', 'HIERARCHY', 'HOLD', 'HOST', 'IDENTITY', 'IF', + 'IGNORE', 'ILIKE', 'IMMEDIATE', 'IMMEDIATELY', 'IMMUTABLE', 'IMPLEMENTATION', 'IMPLICIT', + 'IN', 'INCLUDING', 'INCREMENT', 'INDEX', 'INDITCATOR', 'INFIX', + 'INHERITS', 'INITIALIZE', 'INITIALLY', 'INNER', 'INOUT', 'INPUT', + 'INSENSITIVE', 'INSERT', 'INSTANTIABLE', 'INSTEAD', 'INTERSECT', 'INTO', + 'INVOKER', 'IS', 'ISNULL', 'ISOLATION', 'ITERATE', 'JOIN', 'KEY', + 'KEY_MEMBER', 'KEY_TYPE', 'LANCOMPILER', 'LANGUAGE', 'LARGE', 'LAST', + 'LATERAL', 'LEADING', 'LEFT', 'LENGTH', 'LESS', 'LEVEL', 'LIKE', 'LIMIT', + 'LISTEN', 'LOAD', 'LOCAL', 'LOCALTIME', 'LOCALTIMESTAMP', 'LOCATION', + 'LOCATOR', 'LOCK', 'LOWER', 'MAP', 'MATCH', 'MAX', 'MAXVALUE', + 'MESSAGE_LENGTH', 'MESSAGE_OCTET_LENGTH', 'MESSAGE_TEXT', 'METHOD', 'MIN', + 'MINUTE', 'MINVALUE', 'MOD', 'MODE', 'MODIFIES', 'MODIFY', 'MONTH', + 'MORE', 'MOVE', 'MUMPS', 'NAMES', 'NATIONAL', 'NATURAL', 'NCHAR', 'NCLOB', + 'NEW', 'NEXT', 'NO', 'NOCREATEDB', 'NOCREATEUSER', 'NONE', 'NOT', + 'NOTHING', 'NOTIFY', 'NOTNULL', 'NULL', 'NULLABLE', 'NULLIF', 'OBJECT', + 'OCTET_LENGTH', 'OF', 'OFF', 'OFFSET', 'OIDS', 'OLD', 'ON', 'ONLY', + 'OPEN', 'OPERATION', 'OPERATOR', 'OPTION', 'OPTIONS', 'OR', 'ORDER', + 'ORDINALITY', 'OUT', 'OUTER', 'OUTPUT', 'OVERLAPS', 'OVERLAY', + 'OVERRIDING', 'OWNER', 'PAD', 'PARAMETER', 'PARAMETERS', 'PARAMETER_MODE', + 'PARAMETER_NAME', 'PARAMETER_ORDINAL_POSITION', + 'PARAMETER_SPECIFIC_CATALOG', 'PARAMETER_SPECIFIC_NAME', + 'PARAMETER_SPECIFIC_SCHEMA', 'PARTIAL', 'PASCAL', 'PENDANT', 'PERIOD', 'PLACING', + 'PLI', 'POSITION', 'POSTFIX', 'PRECEEDS', 'PRECISION', 'PREFIX', 'PREORDER', + 'PREPARE', 'PRESERVE', 'PRIMARY', 'PRIOR', 'PRIVILEGES', 'PROCEDURAL', + 'PROCEDURE', 'PUBLIC', 'READ', 'READS', 'RECHECK', 'RECURSIVE', 'REF', + 'REFERENCES', 'REFERENCING', 'REINDEX', 'RELATIVE', 'RENAME', + 'REPEATABLE', 'REPLACE', 'RESET', 'RESTART', 'RESTRICT', 'RESULT', + 'RETURN', 'RETURNED_LENGTH', 'RETURNED_OCTET_LENGTH', 'RETURNED_SQLSTATE', + 'RETURNS', 'REVOKE', 'RIGHT', 'ROLE', 'ROLLBACK', 'ROLLUP', 'ROUTINE', + 'ROUTINE_CATALOG', 'ROUTINE_NAME', 'ROUTINE_SCHEMA', 'ROW', 'ROWS', + 'ROW_COUNT', 'RULE', 'SAVE_POINT', 'SCALE', 'SCHEMA', 'SCHEMA_NAME', + 'SCOPE', 'SCROLL', 'SEARCH', 'SECOND', 'SECURITY', 'SELECT', 'SELF', + 'SENSITIVE', 'SERIALIZABLE', 'SERVER_NAME', 'SESSION', 'SESSION_USER', + 'SET', 'SETOF', 'SETS', 'SHARE', 'SHOW', 'SIMILAR', 'SIMPLE', 'SIZE', + 'SOME', 'SOURCE', 'SPACE', 'SPECIFIC', 'SPECIFICTYPE', 'SPECIFIC_NAME', + 'SQL', 'SQLCODE', 'SQLERROR', 'SQLEXCEPTION', 'SQLSTATE', 'SQLWARNINIG', + 'STABLE', 'START', 'STATE', 'STATEMENT', 'STATIC', 'STATISTICS', 'STDIN', + 'STDOUT', 'STORAGE', 'STRICT', 'STRUCTURE', 'STYPE', 'SUBCLASS_ORIGIN', + 'SUBLIST', 'SUBSTRING', 'SUCCEEDS', 'SUM', 'SYMMETRIC', 'SYSID', 'SYSTEM', + 'SYSTEM_USER', 'TABLE', 'TABLE_NAME', ' TEMP', 'TEMPLATE', 'TEMPORARY', + 'TERMINATE', 'THAN', 'THEN', 'TIME', 'TIMESTAMP', 'TIMEZONE_HOUR', + 'TIMEZONE_MINUTE', 'TO', 'TOAST', 'TRAILING', 'TRANSACTION', + 'TRANSACTIONS_COMMITTED', 'TRANSACTIONS_ROLLED_BACK', 'TRANSACTION_ACTIVE', + 'TRANSFORM', 'TRANSFORMS', 'TRANSLATE', 'TRANSLATION', 'TREAT', 'TRIGGER', + 'TRIGGER_CATALOG', 'TRIGGER_NAME', 'TRIGGER_SCHEMA', 'TRIM', 'TRUE', + 'TRUNCATE', 'TRUSTED', 'TYPE', 'UNCOMMITTED', 'UNDER', 'UNENCRYPTED', + 'UNION', 'UNIQUE', 'UNKNOWN', 'UNLISTEN', 'UNNAMED', 'UNNEST', 'UNTIL', + 'UPDATE', 'UPPER', 'USAGE', 'USER', 'USER_DEFINED_TYPE_CATALOG', + 'USER_DEFINED_TYPE_NAME', 'USER_DEFINED_TYPE_SCHEMA', 'USING', 'VACUUM', + 'VALID', 'VALIDATOR', 'VALUES', 'VARIABLE', 'VERBOSE', + 'VERSION', 'VERSIONS', 'VERSIONING', 'VIEW', + 'VOLATILE', 'WHEN', 'WHENEVER', 'WHERE', 'WITH', 'WITHOUT', 'WORK', + 'WRITE', 'YEAR', 'ZONE'), suffix=r'\b'), Keyword), (words(( - 'ARRAY', 'BIGINT', 'BINARY', 'BIT', 'BLOB', 'BOOLEAN', 'CHAR', 'CHARACTER', 'DATE', - 'DEC', 'DECIMAL', 'FLOAT', 'INT', 'INTEGER', 'INTERVAL', 'NUMBER', 'NUMERIC', 'REAL', - 'SERIAL', 'SMALLINT', 'VARCHAR', 'VARYING', 'INT8', 'SERIAL8', 'TEXT'), suffix=r'\b'), + 'ARRAY', 'BIGINT', 'BINARY', 'BIT', 'BLOB', 'BOOLEAN', 'CHAR', + 'CHARACTER', 'DATE', 'DEC', 'DECIMAL', 'FLOAT', 'INT', 'INTEGER', + 'INTERVAL', 'NUMBER', 'NUMERIC', 'REAL', 'SERIAL', 'SMALLINT', + 'VARCHAR', 'VARYING', 'INT8', 'SERIAL8', 'TEXT'), suffix=r'\b'), Name.Builtin), (r'[+*/<>=~!@#%^&|`?-]', Operator), (r'[0-9]+', Number.Integer), @@ -503,7 +519,7 @@ class TransactSqlLexer(RegexLexer): tokens = { 'root': [ (r'\s+', Whitespace), - (r'(?m)--.*?$\n?', Comment.Single), + (r'--.*?$\n?', Comment.Single), (r'/\*', Comment.Multiline, 'multiline-comments'), (words(_tsql_builtins.OPERATORS), Operator), (words(_tsql_builtins.OPERATOR_WORDS, suffix=r'\b'), Operator.Word), @@ -547,14 +563,15 @@ def analyse_text(text): rating = 1.0 else: name_between_backtick_count = len( - name_between_backtick_re.findall((text))) + name_between_backtick_re.findall(text)) name_between_bracket_count = len( name_between_bracket_re.findall(text)) # We need to check if there are any names using # backticks or brackets, as otherwise both are 0 # and 0 >= 2 * 0, so we would always assume it's true dialect_name_count = name_between_backtick_count + name_between_bracket_count - if dialect_name_count >= 1 and name_between_bracket_count >= 2 * name_between_backtick_count: + if dialect_name_count >= 1 and \ + name_between_bracket_count >= 2 * name_between_backtick_count: # Found at least twice as many [name] as `name`. rating += 0.5 elif name_between_bracket_count > name_between_backtick_count: @@ -569,8 +586,12 @@ def analyse_text(text): class MySqlLexer(RegexLexer): - """ - Special lexer for MySQL. + """The Oracle MySQL lexer. + + This lexer does not attempt to maintain strict compatibility with + MariaDB syntax or keywords. Although MySQL and MariaDB's common code + history suggests there may be significant overlap between the two, + compatibility between the two is not a target for this lexer. """ name = 'MySQL' @@ -581,74 +602,163 @@ class MySqlLexer(RegexLexer): tokens = { 'root': [ (r'\s+', Text), - (r'(#|--\s+).*\n?', Comment.Single), - (r'/\*', Comment.Multiline, 'multiline-comments'), - (r'[0-9]+', Number.Integer), - (r'[0-9]*\.[0-9]+(e[+-][0-9]+)', Number.Float), - (r"'(\\\\|\\'|''|[^'])*'", String.Single), - (r'"(\\\\|\\"|""|[^"])*"', String.Double), - (r"`(\\\\|\\`|``|[^`])*`", String.Symbol), - (r'[+*/<>=~!@#%^&|`?-]', Operator), - (r'\b(tinyint|smallint|mediumint|int|integer|bigint|date|' - r'datetime|time|bit|bool|tinytext|mediumtext|longtext|text|' - r'tinyblob|mediumblob|longblob|blob|float|double|double\s+' - r'precision|real|numeric|dec|decimal|timestamp|year|char|' - r'varchar|varbinary|varcharacter|enum|set)(\b\s*)(\()?', - bygroups(Keyword.Type, Text, Punctuation)), - (r'\b(add|all|alter|analyze|and|as|asc|asensitive|before|between|' - r'bigint|binary|blob|both|by|call|cascade|case|change|char|' - r'character|check|collate|column|condition|constraint|continue|' - r'convert|create|cross|current_date|current_time|' - r'current_timestamp|current_user|cursor|database|databases|' - r'day_hour|day_microsecond|day_minute|day_second|dec|decimal|' - r'declare|default|delayed|delete|desc|describe|deterministic|' - r'distinct|distinctrow|div|double|drop|dual|each|else|elseif|' - r'enclosed|escaped|exists|exit|explain|fetch|flush|float|float4|' - r'float8|for|force|foreign|from|fulltext|grant|group|having|' - r'high_priority|hour_microsecond|hour_minute|hour_second|if|' - r'ignore|in|index|infile|inner|inout|insensitive|insert|int|' - r'int1|int2|int3|int4|int8|integer|interval|into|is|iterate|' - r'join|key|keys|kill|leading|leave|left|like|limit|lines|load|' - r'localtime|localtimestamp|lock|long|loop|low_priority|match|' - r'minute_microsecond|minute_second|mod|modifies|natural|' - r'no_write_to_binlog|not|numeric|on|optimize|option|optionally|' - r'or|order|out|outer|outfile|precision|primary|procedure|purge|' - r'raid0|read|reads|real|references|regexp|release|rename|repeat|' - r'replace|require|restrict|return|revoke|right|rlike|schema|' - r'schemas|second_microsecond|select|sensitive|separator|set|' - r'show|smallint|soname|spatial|specific|sql|sql_big_result|' - r'sql_calc_found_rows|sql_small_result|sqlexception|sqlstate|' - r'sqlwarning|ssl|starting|straight_join|table|terminated|then|' - r'to|trailing|trigger|undo|union|unique|unlock|unsigned|update|' - r'usage|use|using|utc_date|utc_time|utc_timestamp|values|' - r'varying|when|where|while|with|write|x509|xor|year_month|' - r'zerofill)\b', Keyword), - # TODO: this list is not complete - (r'\b(auto_increment|engine|charset|tables)\b', Keyword.Pseudo), - (r'(true|false|null)', Name.Constant), - (r'([a-z_]\w*)(\s*)(\()', + + # Comments + (r'(?:#|--\s+).*', Comment.Single), + (r'/\*\+', Comment.Special, 'optimizer-hints'), + (r'/\*', Comment.Multiline, 'multiline-comment'), + + # Hexadecimal literals + (r"x'([0-9a-f]{2})+'", Number.Hex), # MySQL requires paired hex characters in this form. + (r'0x[0-9a-f]+', Number.Hex), + + # Binary literals + (r"b'[01]+'", Number.Bin), + (r'0b[01]+', Number.Bin), + + # Numeric literals + (r'[0-9]+\.[0-9]*(e[+-]?[0-9]+)?', Number.Float), # Mandatory integer, optional fraction and exponent + (r'[0-9]*\.[0-9]+(e[+-]?[0-9]+)?', Number.Float), # Mandatory fraction, optional integer and exponent + (r'[0-9]+e[+-]?[0-9]+', Number.Float), # Exponents with integer significands are still floats + (r'[0-9]+(?=[^0-9a-z$_\u0080-\uffff])', Number.Integer), # Integers that are not in a schema object name + + # Date literals + (r"\{\s*d\s*(?P['\"])\s*\d{2}(\d{2})?.?\d{2}.?\d{2}\s*(?P=quote)\s*\}", + Literal.Date), + + # Time literals + (r"\{\s*t\s*(?P['\"])\s*(?:\d+\s+)?\d{1,2}.?\d{1,2}.?\d{1,2}(\.\d*)?\s*(?P=quote)\s*\}", + Literal.Date), + + # Timestamp literals + ( + r"\{\s*ts\s*(?P['\"])\s*" + r"\d{2}(?:\d{2})?.?\d{2}.?\d{2}" # Date part + r"\s+" # Whitespace between date and time + r"\d{1,2}.?\d{1,2}.?\d{1,2}(\.\d*)?" # Time part + r"\s*(?P=quote)\s*\}", + Literal.Date + ), + + # String literals + (r"'", String.Single, 'single-quoted-string'), + (r'"', String.Double, 'double-quoted-string'), + + # Variables + (r'@@(?:global\.|persist\.|persist_only\.|session\.)?[a-z_]+', Name.Variable), + (r'@[a-z0-9_$.]+', Name.Variable), + (r"@'", Name.Variable, 'single-quoted-variable'), + (r'@"', Name.Variable, 'double-quoted-variable'), + (r"@`", Name.Variable, 'backtick-quoted-variable'), + (r'\?', Name.Variable), # For demonstrating prepared statements + + # Operators + (r'[!%&*+/:<=>^|~-]+', Operator), + + # Exceptions; these words tokenize differently in different contexts. + (r'\b(set)(?!\s*\()', Keyword), + (r'\b(character)(\s+)(set)\b', bygroups(Keyword, Text, Keyword)), + # In all other known cases, "SET" is tokenized by MYSQL_DATATYPES. + + (words(MYSQL_CONSTANTS, prefix=r'\b', suffix=r'\b'), Name.Constant), + (words(MYSQL_DATATYPES, prefix=r'\b', suffix=r'\b'), Keyword.Type), + (words(MYSQL_KEYWORDS, prefix=r'\b', suffix=r'\b'), Keyword), + (words(MYSQL_FUNCTIONS, prefix=r'\b', suffix=r'\b(\s*)(\()'), bygroups(Name.Function, Text, Punctuation)), - (r'[a-z_]\w*', Name), - (r'@[a-z0-9]*[._]*[a-z0-9]*', Name.Variable), - (r'[;:()\[\],.]', Punctuation) + + # Schema object names + # + # Note: Although the first regex supports unquoted all-numeric + # identifiers, this will not be a problem in practice because + # numeric literals have already been handled above. + # + ('[0-9a-z$_\u0080-\uffff]+', Name), + (r'`', Name.Quoted, 'schema-object-name'), + + # Punctuation + (r'[(),.;]', Punctuation), ], - 'multiline-comments': [ - (r'/\*', Comment.Multiline, 'multiline-comments'), + + # Multiline comment substates + # --------------------------- + + 'optimizer-hints': [ + (r'[^*a-z]+', Comment.Special), + (r'\*/', Comment.Special, '#pop'), + (words(MYSQL_OPTIMIZER_HINTS, suffix=r'\b'), Comment.Preproc), + ('[a-z]+', Comment.Special), + (r'\*', Comment.Special), + ], + + 'multiline-comment': [ + (r'[^*]+', Comment.Multiline), (r'\*/', Comment.Multiline, '#pop'), - (r'[^/*]+', Comment.Multiline), - (r'[/*]', Comment.Multiline) - ] + (r'\*', Comment.Multiline), + ], + + # String substates + # ---------------- + + 'single-quoted-string': [ + (r"[^'\\]+", String.Single), + (r"''", String.Escape), + (r"""\\[0'"bnrtZ\\%_]""", String.Escape), + (r"'", String.Single, '#pop'), + ], + + 'double-quoted-string': [ + (r'[^"\\]+', String.Double), + (r'""', String.Escape), + (r"""\\[0'"bnrtZ\\%_]""", String.Escape), + (r'"', String.Double, '#pop'), + ], + + # Variable substates + # ------------------ + + 'single-quoted-variable': [ + (r"[^']+", Name.Variable), + (r"''", Name.Variable), + (r"'", Name.Variable, '#pop'), + ], + + 'double-quoted-variable': [ + (r'[^"]+', Name.Variable), + (r'""', Name.Variable), + (r'"', Name.Variable, '#pop'), + ], + + 'backtick-quoted-variable': [ + (r'[^`]+', Name.Variable), + (r'``', Name.Variable), + (r'`', Name.Variable, '#pop'), + ], + + # Schema object name substates + # ---------------------------- + # + # "Name.Quoted" and "Name.Quoted.Escape" are non-standard but + # formatters will style them as "Name" by default but add + # additional styles based on the token name. This gives users + # flexibility to add custom styles as desired. + # + 'schema-object-name': [ + (r'[^`]+', Name.Quoted), + (r'``', Name.Quoted.Escape), + (r'`', Name.Quoted, '#pop'), + ], } def analyse_text(text): rating = 0 name_between_backtick_count = len( - name_between_backtick_re.findall((text))) + name_between_backtick_re.findall(text)) name_between_bracket_count = len( name_between_bracket_re.findall(text)) # Same logic as above in the TSQL analysis dialect_name_count = name_between_backtick_count + name_between_bracket_count - if dialect_name_count >= 1 and name_between_backtick_count >= 2 * name_between_bracket_count: + if dialect_name_count >= 1 and \ + name_between_backtick_count >= 2 * name_between_bracket_count: # Found at least twice as many `name` as [name]. rating += 0.5 elif name_between_backtick_count > name_between_bracket_count: @@ -683,9 +793,8 @@ def get_tokens_unprocessed(self, data): curcode += line[8:] else: if curcode: - for item in do_insertions(insertions, - sql.get_tokens_unprocessed(curcode)): - yield item + yield from do_insertions(insertions, + sql.get_tokens_unprocessed(curcode)) curcode = '' insertions = [] if line.startswith('SQL error: '): @@ -693,9 +802,8 @@ def get_tokens_unprocessed(self, data): else: yield (match.start(), Generic.Output, line) if curcode: - for item in do_insertions(insertions, - sql.get_tokens_unprocessed(curcode)): - yield item + yield from do_insertions(insertions, + sql.get_tokens_unprocessed(curcode)) class RqlLexer(RegexLexer): diff --git a/src/typecode/_vendor/pygments/lexers/stata.py b/src/typecode/_vendor/pygments/lexers/stata.py index c5f3b7c..dba8621 100644 --- a/src/typecode/_vendor/pygments/lexers/stata.py +++ b/src/typecode/_vendor/pygments/lexers/stata.py @@ -5,12 +5,12 @@ Lexer for Stata - :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ import re -from typecode._vendor.pygments.lexer import RegexLexer, include, words +from typecode._vendor.pygments.lexer import RegexLexer, default, include, words from typecode._vendor.pygments.token import Comment, Keyword, Name, Number, \ String, Text, Operator @@ -27,8 +27,8 @@ class StataLexer(RegexLexer): """ # Syntax based on # - http://fmwww.bc.edu/RePEc/bocode/s/synlightlist.ado - # - http://github.com/isagalaev/highlight.js/blob/master/src/languages/stata.js - # - http://github.com/jpitblado/vim-stata/blob/master/syntax/stata.vim + # - https://github.com/isagalaev/highlight.js/blob/master/src/languages/stata.js + # - https://github.com/jpitblado/vim-stata/blob/master/syntax/stata.vim name = 'Stata' aliases = ['stata', 'do'] @@ -118,27 +118,27 @@ class StataLexer(RegexLexer): # A global is more restricted, so we do follow rules. Note only # locals explicitly enclosed ${} can be nested. 'macros': [ - (r'\$(\{|(?=[\$`]))', Name.Variable.Global, 'macro-global-nested'), + (r'\$(\{|(?=[$`]))', Name.Variable.Global, 'macro-global-nested'), (r'\$', Name.Variable.Global, 'macro-global-name'), (r'`', Name.Variable, 'macro-local'), ], 'macro-local': [ (r'`', Name.Variable, '#push'), (r"'", Name.Variable, '#pop'), - (r'\$(\{|(?=[\$`]))', Name.Variable.Global, 'macro-global-nested'), + (r'\$(\{|(?=[$`]))', Name.Variable.Global, 'macro-global-nested'), (r'\$', Name.Variable.Global, 'macro-global-name'), (r'.', Name.Variable), # fallback ], 'macro-global-nested': [ - (r'\$(\{|(?=[\$`]))', Name.Variable.Global, '#push'), + (r'\$(\{|(?=[$`]))', Name.Variable.Global, '#push'), (r'\}', Name.Variable.Global, '#pop'), (r'\$', Name.Variable.Global, 'macro-global-name'), (r'`', Name.Variable, 'macro-local'), (r'\w', Name.Variable.Global), # fallback - (r'(?!\w)', Name.Variable.Global, '#pop'), + default('#pop'), ], 'macro-global-name': [ - (r'\$(\{|(?=[\$`]))', Name.Variable.Global, 'macro-global-nested', '#pop'), + (r'\$(\{|(?=[$`]))', Name.Variable.Global, 'macro-global-nested', '#pop'), (r'\$', Name.Variable.Global, 'macro-global-name', '#pop'), (r'`', Name.Variable, 'macro-local', '#pop'), (r'\w{1,32}', Name.Variable.Global, '#pop'), diff --git a/src/typecode/_vendor/pygments/lexers/supercollider.py b/src/typecode/_vendor/pygments/lexers/supercollider.py index 7b1ff2a..bf8d5b3 100644 --- a/src/typecode/_vendor/pygments/lexers/supercollider.py +++ b/src/typecode/_vendor/pygments/lexers/supercollider.py @@ -5,7 +5,7 @@ Lexer for SuperCollider - :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ @@ -84,7 +84,12 @@ class SuperColliderLexer(RegexLexer): (r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float), (r'0x[0-9a-fA-F]+', Number.Hex), (r'[0-9]+', Number.Integer), - (r'"(\\\\|\\"|[^"])*"', String.Double), - (r"'(\\\\|\\'|[^'])*'", String.Single), + (r'"(\\\\|\\[^\\]|[^"\\])*"', String.Double), + (r"'(\\\\|\\[^\\]|[^'\\])*'", String.Single), ] } + + def analyse_text(text): + """We're searching for a common function and a unique keyword here.""" + if 'SinOsc' in text or 'thisFunctionDef' in text: + return 0.1 diff --git a/src/typecode/_vendor/pygments/lexers/tcl.py b/src/typecode/_vendor/pygments/lexers/tcl.py index 11b8373..eb0a259 100644 --- a/src/typecode/_vendor/pygments/lexers/tcl.py +++ b/src/typecode/_vendor/pygments/lexers/tcl.py @@ -5,7 +5,7 @@ Lexers for Tcl and related languages. - :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ diff --git a/src/typecode/_vendor/pygments/lexers/templates.py b/src/typecode/_vendor/pygments/lexers/templates.py index 221720e..e86ff32 100644 --- a/src/typecode/_vendor/pygments/lexers/templates.py +++ b/src/typecode/_vendor/pygments/lexers/templates.py @@ -5,7 +5,7 @@ Lexers for various template engines' markup. - :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ @@ -179,8 +179,8 @@ class SmartyLexer(RegexLexer): (r'(true|false|null)\b', Keyword.Constant), (r"[0-9](\.[0-9]*)?(eE[+-][0-9])?[flFLdD]?|" r"0[xX][0-9a-fA-F]+[Ll]?", Number), - (r'"(\\\\|\\"|[^"])*"', String.Double), - (r"'(\\\\|\\'|[^'])*'", String.Single), + (r'"(\\\\|\\[^\\]|[^"\\])*"', String.Double), + (r"'(\\\\|\\[^\\]|[^'\\])*'", String.Single), (r'[a-zA-Z_]\w*', Name.Attribute) ] } @@ -226,7 +226,7 @@ class VelocityLexer(RegexLexer): 'directiveparams'), (r'(#\{?)(' + identifier + r')(\}|\b)', bygroups(Comment.Preproc, Name.Function, Comment.Preproc)), - (r'\$\{?', Punctuation, 'variable') + (r'\$!?\{?', Punctuation, 'variable') ], 'variable': [ (identifier, Name.Variable), @@ -249,11 +249,11 @@ class VelocityLexer(RegexLexer): (r'\]', Operator, '#pop') ], 'funcparams': [ - (r'\$\{?', Punctuation, 'variable'), + (r'\$!?\{?', Punctuation, 'variable'), (r'\s+', Text), (r'[,:]', Punctuation), - (r'"(\\\\|\\"|[^"])*"', String.Double), - (r"'(\\\\|\\'|[^'])*'", String.Single), + (r'"(\\\\|\\[^\\]|[^"\\])*"', String.Double), + (r"'(\\\\|\\[^\\]|[^'\\])*'", String.Single), (r"0[xX][0-9a-fA-F]+[Ll]?", Number), (r"\b[0-9]+\b", Number), (r'(true|false|null)\b', Keyword.Constant), @@ -274,7 +274,7 @@ def analyse_text(text): rv += 0.15 if re.search(r'#\{?foreach\}?\(.+?\).*?#\{?end\}?', text): rv += 0.15 - if re.search(r'\$\{?[a-zA-Z_]\w*(\([^)]*\))?' + if re.search(r'\$!?\{?[a-zA-Z_]\w*(\([^)]*\))?' r'(\.\w+(\([^)]*\))?)*\}?', text): rv += 0.01 return rv @@ -293,8 +293,7 @@ class VelocityHtmlLexer(DelegatingLexer): mimetypes = ['text/html+velocity'] def __init__(self, **options): - super(VelocityHtmlLexer, self).__init__(HtmlLexer, VelocityLexer, - **options) + super().__init__(HtmlLexer, VelocityLexer, **options) class VelocityXmlLexer(DelegatingLexer): @@ -310,8 +309,7 @@ class VelocityXmlLexer(DelegatingLexer): mimetypes = ['application/xml+velocity'] def __init__(self, **options): - super(VelocityXmlLexer, self).__init__(XmlLexer, VelocityLexer, - **options) + super().__init__(XmlLexer, VelocityLexer, **options) def analyse_text(text): rv = VelocityLexer.analyse_text(text) - 0.01 @@ -323,7 +321,7 @@ def analyse_text(text): class DjangoLexer(RegexLexer): """ Generic `django `_ - and `jinja `_ template lexer. + and `jinja `_ template lexer. It just highlights django/jinja code between the preprocessor directives, other data is left untouched by the lexer. @@ -340,7 +338,7 @@ class DjangoLexer(RegexLexer): (r'[^{]+', Other), (r'\{\{', Comment.Preproc, 'var'), # jinja/django comments - (r'\{[*#].*?[*#]\}', Comment), + (r'\{#.*?#\}', Comment), # django comments (r'(\{%)(-?\s*)(comment)(\s*-?)(%\})(.*?)' r'(\{%)(-?\s*)(endcomment)(\s*-?)(%\})', @@ -373,8 +371,8 @@ class DjangoLexer(RegexLexer): (r'(loop|block|super|forloop)\b', Name.Builtin), (r'[a-zA-Z_][\w-]*', Name.Variable), (r'\.\w+', Name.Variable), - (r':?"(\\\\|\\"|[^"])*"', String.Double), - (r":?'(\\\\|\\'|[^'])*'", String.Single), + (r':?"(\\\\|\\[^\\]|[^"\\])*"', String.Double), + (r":?'(\\\\|\\[^\\]|[^'\\])*'", String.Single), (r'([{}()\[\]+\-*/%,:~]|[><=]=?|!=)', Operator), (r"[0-9](\.[0-9]*)?(eE[+-][0-9])?[flFLdD]?|" r"0[xX][0-9a-fA-F]+[Ll]?", Number), @@ -464,8 +462,7 @@ class MyghtyHtmlLexer(DelegatingLexer): mimetypes = ['text/html+myghty'] def __init__(self, **options): - super(MyghtyHtmlLexer, self).__init__(HtmlLexer, MyghtyLexer, - **options) + super().__init__(HtmlLexer, MyghtyLexer, **options) class MyghtyXmlLexer(DelegatingLexer): @@ -481,8 +478,7 @@ class MyghtyXmlLexer(DelegatingLexer): mimetypes = ['application/xml+myghty'] def __init__(self, **options): - super(MyghtyXmlLexer, self).__init__(XmlLexer, MyghtyLexer, - **options) + super().__init__(XmlLexer, MyghtyLexer, **options) class MyghtyJavascriptLexer(DelegatingLexer): @@ -500,8 +496,7 @@ class MyghtyJavascriptLexer(DelegatingLexer): 'text/javascript+mygthy'] def __init__(self, **options): - super(MyghtyJavascriptLexer, self).__init__(JavascriptLexer, - MyghtyLexer, **options) + super().__init__(JavascriptLexer, MyghtyLexer, **options) class MyghtyCssLexer(DelegatingLexer): @@ -517,8 +512,7 @@ class MyghtyCssLexer(DelegatingLexer): mimetypes = ['text/css+myghty'] def __init__(self, **options): - super(MyghtyCssLexer, self).__init__(CssLexer, MyghtyLexer, - **options) + super().__init__(CssLexer, MyghtyLexer, **options) class MasonLexer(RegexLexer): @@ -543,9 +537,8 @@ class MasonLexer(RegexLexer): (r'(?s)(<%(?:def|method))(\s*)(.*?)(>)(.*?)()', bygroups(Name.Tag, Text, Name.Function, Name.Tag, using(this), Name.Tag)), - (r'(?s)(<%\w+)(.*?)(>)(.*?)()', - bygroups(Name.Tag, Name.Function, Name.Tag, - using(PerlLexer), Name.Tag)), + (r'(?s)(<%(\w+)(.*?)(>))(.*?)()', + bygroups(Name.Tag, None, None, None, using(PerlLexer), Name.Tag)), (r'(?s)(<&[^|])(.*?)(,.*?)?(&>)', bygroups(Name.Tag, Name.Function, using(PerlLexer), Name.Tag)), (r'(?s)(<&\|)(.*?)(,.*?)?(&>)', @@ -571,7 +564,7 @@ class MasonLexer(RegexLexer): def analyse_text(text): result = 0.0 - if re.search(r'', text) is not None: + if re.search(r'', text) is not None: result = 1.0 elif re.search(r'<&.+&>', text, re.DOTALL) is not None: result = 0.11 @@ -659,8 +652,7 @@ class MakoHtmlLexer(DelegatingLexer): mimetypes = ['text/html+mako'] def __init__(self, **options): - super(MakoHtmlLexer, self).__init__(HtmlLexer, MakoLexer, - **options) + super().__init__(HtmlLexer, MakoLexer, **options) class MakoXmlLexer(DelegatingLexer): @@ -676,8 +668,7 @@ class MakoXmlLexer(DelegatingLexer): mimetypes = ['application/xml+mako'] def __init__(self, **options): - super(MakoXmlLexer, self).__init__(XmlLexer, MakoLexer, - **options) + super().__init__(XmlLexer, MakoLexer, **options) class MakoJavascriptLexer(DelegatingLexer): @@ -695,8 +686,7 @@ class MakoJavascriptLexer(DelegatingLexer): 'text/javascript+mako'] def __init__(self, **options): - super(MakoJavascriptLexer, self).__init__(JavascriptLexer, - MakoLexer, **options) + super().__init__(JavascriptLexer, MakoLexer, **options) class MakoCssLexer(DelegatingLexer): @@ -712,8 +702,7 @@ class MakoCssLexer(DelegatingLexer): mimetypes = ['text/css+mako'] def __init__(self, **options): - super(MakoCssLexer, self).__init__(CssLexer, MakoLexer, - **options) + super().__init__(CssLexer, MakoLexer, **options) # Genshi and Cheetah lexers courtesy of Matt Good. @@ -786,8 +775,7 @@ class CheetahHtmlLexer(DelegatingLexer): mimetypes = ['text/html+cheetah', 'text/html+spitfire'] def __init__(self, **options): - super(CheetahHtmlLexer, self).__init__(HtmlLexer, CheetahLexer, - **options) + super().__init__(HtmlLexer, CheetahLexer, **options) class CheetahXmlLexer(DelegatingLexer): @@ -801,8 +789,7 @@ class CheetahXmlLexer(DelegatingLexer): mimetypes = ['application/xml+cheetah', 'application/xml+spitfire'] def __init__(self, **options): - super(CheetahXmlLexer, self).__init__(XmlLexer, CheetahLexer, - **options) + super().__init__(XmlLexer, CheetahLexer, **options) class CheetahJavascriptLexer(DelegatingLexer): @@ -822,8 +809,7 @@ class CheetahJavascriptLexer(DelegatingLexer): 'text/javascript+spitfire'] def __init__(self, **options): - super(CheetahJavascriptLexer, self).__init__(JavascriptLexer, - CheetahLexer, **options) + super().__init__(JavascriptLexer, CheetahLexer, **options) class GenshiTextLexer(RegexLexer): @@ -937,8 +923,7 @@ class HtmlGenshiLexer(DelegatingLexer): mimetypes = ['text/html+genshi'] def __init__(self, **options): - super(HtmlGenshiLexer, self).__init__(HtmlLexer, GenshiMarkupLexer, - **options) + super().__init__(HtmlLexer, GenshiMarkupLexer, **options) def analyse_text(text): rv = 0.0 @@ -962,8 +947,7 @@ class GenshiLexer(DelegatingLexer): mimetypes = ['application/x-genshi', 'application/x-kid'] def __init__(self, **options): - super(GenshiLexer, self).__init__(XmlLexer, GenshiMarkupLexer, - **options) + super().__init__(XmlLexer, GenshiMarkupLexer, **options) def analyse_text(text): rv = 0.0 @@ -988,9 +972,7 @@ class JavascriptGenshiLexer(DelegatingLexer): 'text/javascript+genshi'] def __init__(self, **options): - super(JavascriptGenshiLexer, self).__init__(JavascriptLexer, - GenshiTextLexer, - **options) + super().__init__(JavascriptLexer, GenshiTextLexer, **options) def analyse_text(text): return GenshiLexer.analyse_text(text) - 0.05 @@ -1007,8 +989,7 @@ class CssGenshiLexer(DelegatingLexer): mimetypes = ['text/css+genshi'] def __init__(self, **options): - super(CssGenshiLexer, self).__init__(CssLexer, GenshiTextLexer, - **options) + super().__init__(CssLexer, GenshiTextLexer, **options) def analyse_text(text): return GenshiLexer.analyse_text(text) - 0.05 @@ -1029,7 +1010,7 @@ class RhtmlLexer(DelegatingLexer): mimetypes = ['text/html+ruby'] def __init__(self, **options): - super(RhtmlLexer, self).__init__(HtmlLexer, ErbLexer, **options) + super().__init__(HtmlLexer, ErbLexer, **options) def analyse_text(text): rv = ErbLexer.analyse_text(text) - 0.01 @@ -1051,7 +1032,7 @@ class XmlErbLexer(DelegatingLexer): mimetypes = ['application/xml+ruby'] def __init__(self, **options): - super(XmlErbLexer, self).__init__(XmlLexer, ErbLexer, **options) + super().__init__(XmlLexer, ErbLexer, **options) def analyse_text(text): rv = ErbLexer.analyse_text(text) - 0.01 @@ -1071,7 +1052,7 @@ class CssErbLexer(DelegatingLexer): mimetypes = ['text/css+ruby'] def __init__(self, **options): - super(CssErbLexer, self).__init__(CssLexer, ErbLexer, **options) + super().__init__(CssLexer, ErbLexer, **options) def analyse_text(text): return ErbLexer.analyse_text(text) - 0.05 @@ -1091,8 +1072,7 @@ class JavascriptErbLexer(DelegatingLexer): 'text/javascript+ruby'] def __init__(self, **options): - super(JavascriptErbLexer, self).__init__(JavascriptLexer, ErbLexer, - **options) + super().__init__(JavascriptLexer, ErbLexer, **options) def analyse_text(text): return ErbLexer.analyse_text(text) - 0.05 @@ -1115,7 +1095,7 @@ class HtmlPhpLexer(DelegatingLexer): 'application/x-httpd-php4', 'application/x-httpd-php5'] def __init__(self, **options): - super(HtmlPhpLexer, self).__init__(HtmlLexer, PhpLexer, **options) + super().__init__(HtmlLexer, PhpLexer, **options) def analyse_text(text): rv = PhpLexer.analyse_text(text) - 0.01 @@ -1135,7 +1115,7 @@ class XmlPhpLexer(DelegatingLexer): mimetypes = ['application/xml+php'] def __init__(self, **options): - super(XmlPhpLexer, self).__init__(XmlLexer, PhpLexer, **options) + super().__init__(XmlLexer, PhpLexer, **options) def analyse_text(text): rv = PhpLexer.analyse_text(text) - 0.01 @@ -1155,7 +1135,7 @@ class CssPhpLexer(DelegatingLexer): mimetypes = ['text/css+php'] def __init__(self, **options): - super(CssPhpLexer, self).__init__(CssLexer, PhpLexer, **options) + super().__init__(CssLexer, PhpLexer, **options) def analyse_text(text): return PhpLexer.analyse_text(text) - 0.05 @@ -1175,8 +1155,7 @@ class JavascriptPhpLexer(DelegatingLexer): 'text/javascript+php'] def __init__(self, **options): - super(JavascriptPhpLexer, self).__init__(JavascriptLexer, PhpLexer, - **options) + super().__init__(JavascriptLexer, PhpLexer, **options) def analyse_text(text): return PhpLexer.analyse_text(text) @@ -1196,7 +1175,7 @@ class HtmlSmartyLexer(DelegatingLexer): mimetypes = ['text/html+smarty'] def __init__(self, **options): - super(HtmlSmartyLexer, self).__init__(HtmlLexer, SmartyLexer, **options) + super().__init__(HtmlLexer, SmartyLexer, **options) def analyse_text(text): rv = SmartyLexer.analyse_text(text) - 0.01 @@ -1217,7 +1196,7 @@ class XmlSmartyLexer(DelegatingLexer): mimetypes = ['application/xml+smarty'] def __init__(self, **options): - super(XmlSmartyLexer, self).__init__(XmlLexer, SmartyLexer, **options) + super().__init__(XmlLexer, SmartyLexer, **options) def analyse_text(text): rv = SmartyLexer.analyse_text(text) - 0.01 @@ -1238,7 +1217,7 @@ class CssSmartyLexer(DelegatingLexer): mimetypes = ['text/css+smarty'] def __init__(self, **options): - super(CssSmartyLexer, self).__init__(CssLexer, SmartyLexer, **options) + super().__init__(CssLexer, SmartyLexer, **options) def analyse_text(text): return SmartyLexer.analyse_text(text) - 0.05 @@ -1258,8 +1237,7 @@ class JavascriptSmartyLexer(DelegatingLexer): 'text/javascript+smarty'] def __init__(self, **options): - super(JavascriptSmartyLexer, self).__init__(JavascriptLexer, SmartyLexer, - **options) + super().__init__(JavascriptLexer, SmartyLexer, **options) def analyse_text(text): return SmartyLexer.analyse_text(text) - 0.05 @@ -1279,7 +1257,7 @@ class HtmlDjangoLexer(DelegatingLexer): mimetypes = ['text/html+django', 'text/html+jinja'] def __init__(self, **options): - super(HtmlDjangoLexer, self).__init__(HtmlLexer, DjangoLexer, **options) + super().__init__(HtmlLexer, DjangoLexer, **options) def analyse_text(text): rv = DjangoLexer.analyse_text(text) - 0.01 @@ -1300,7 +1278,7 @@ class XmlDjangoLexer(DelegatingLexer): mimetypes = ['application/xml+django', 'application/xml+jinja'] def __init__(self, **options): - super(XmlDjangoLexer, self).__init__(XmlLexer, DjangoLexer, **options) + super().__init__(XmlLexer, DjangoLexer, **options) def analyse_text(text): rv = DjangoLexer.analyse_text(text) - 0.01 @@ -1321,7 +1299,7 @@ class CssDjangoLexer(DelegatingLexer): mimetypes = ['text/css+django', 'text/css+jinja'] def __init__(self, **options): - super(CssDjangoLexer, self).__init__(CssLexer, DjangoLexer, **options) + super().__init__(CssLexer, DjangoLexer, **options) def analyse_text(text): return DjangoLexer.analyse_text(text) - 0.05 @@ -1345,8 +1323,7 @@ class JavascriptDjangoLexer(DelegatingLexer): 'text/javascript+jinja'] def __init__(self, **options): - super(JavascriptDjangoLexer, self).__init__(JavascriptLexer, DjangoLexer, - **options) + super().__init__(JavascriptLexer, DjangoLexer, **options) def analyse_text(text): return DjangoLexer.analyse_text(text) - 0.05 @@ -1389,7 +1366,7 @@ class JspLexer(DelegatingLexer): mimetypes = ['application/x-jsp'] def __init__(self, **options): - super(JspLexer, self).__init__(XmlLexer, JspRootLexer, **options) + super().__init__(XmlLexer, JspRootLexer, **options) def analyse_text(text): rv = JavaLexer.analyse_text(text) - 0.01 @@ -1428,7 +1405,7 @@ class EvoqueLexer(RegexLexer): # see doc for handling first name arg: /directives/evoque/ # + minor inconsistency: the "name" in e.g. $overlay{name=site_base} # should be using(PythonLexer), not passed out as String - (r'(\$)(evoque|overlay)(\{(%)?)(\s*[#\w\-"\'.]+[^=,%}]+?)?' + (r'(\$)(evoque|overlay)(\{(%)?)(\s*[#\w\-"\'.]+)?' r'(.*?)((?(4)%)\})', bygroups(Punctuation, Name.Builtin, Punctuation, None, String, using(PythonLexer), Punctuation)), @@ -1452,6 +1429,10 @@ class EvoqueLexer(RegexLexer): ], } + def analyse_text(text): + """Evoque templates use $evoque, which is unique.""" + if '$evoque' in text: + return 1 class EvoqueHtmlLexer(DelegatingLexer): """ @@ -1466,8 +1447,10 @@ class EvoqueHtmlLexer(DelegatingLexer): mimetypes = ['text/html+evoque'] def __init__(self, **options): - super(EvoqueHtmlLexer, self).__init__(HtmlLexer, EvoqueLexer, - **options) + super().__init__(HtmlLexer, EvoqueLexer, **options) + + def analyse_text(text): + return EvoqueLexer.analyse_text(text) class EvoqueXmlLexer(DelegatingLexer): @@ -1483,8 +1466,10 @@ class EvoqueXmlLexer(DelegatingLexer): mimetypes = ['application/xml+evoque'] def __init__(self, **options): - super(EvoqueXmlLexer, self).__init__(XmlLexer, EvoqueLexer, - **options) + super().__init__(XmlLexer, EvoqueLexer, **options) + + def analyse_text(text): + return EvoqueLexer.analyse_text(text) class ColdfusionLexer(RegexLexer): @@ -1591,8 +1576,7 @@ class ColdfusionHtmlLexer(DelegatingLexer): mimetypes = ['application/x-coldfusion'] def __init__(self, **options): - super(ColdfusionHtmlLexer, self).__init__(HtmlLexer, ColdfusionMarkupLexer, - **options) + super().__init__(HtmlLexer, ColdfusionMarkupLexer, **options) class ColdfusionCFCLexer(DelegatingLexer): @@ -1607,8 +1591,7 @@ class ColdfusionCFCLexer(DelegatingLexer): mimetypes = [] def __init__(self, **options): - super(ColdfusionCFCLexer, self).__init__(ColdfusionHtmlLexer, ColdfusionLexer, - **options) + super().__init__(ColdfusionHtmlLexer, ColdfusionLexer, **options) class SspLexer(DelegatingLexer): @@ -1623,7 +1606,7 @@ class SspLexer(DelegatingLexer): mimetypes = ['application/x-ssp'] def __init__(self, **options): - super(SspLexer, self).__init__(XmlLexer, JspRootLexer, **options) + super().__init__(XmlLexer, JspRootLexer, **options) def analyse_text(text): rv = 0.0 @@ -1670,8 +1653,7 @@ class TeaTemplateLexer(DelegatingLexer): mimetypes = ['text/x-tea'] def __init__(self, **options): - super(TeaTemplateLexer, self).__init__(XmlLexer, - TeaTemplateRootLexer, **options) + super().__init__(XmlLexer, TeaTemplateRootLexer, **options) def analyse_text(text): rv = TeaLangLexer.analyse_text(text) - 0.01 @@ -1701,7 +1683,7 @@ class LassoHtmlLexer(DelegatingLexer): 'application/x-httpd-lasso[89]'] def __init__(self, **options): - super(LassoHtmlLexer, self).__init__(HtmlLexer, LassoLexer, **options) + super().__init__(HtmlLexer, LassoLexer, **options) def analyse_text(text): rv = LassoLexer.analyse_text(text) - 0.01 @@ -1725,7 +1707,7 @@ class LassoXmlLexer(DelegatingLexer): mimetypes = ['application/xml+lasso'] def __init__(self, **options): - super(LassoXmlLexer, self).__init__(XmlLexer, LassoLexer, **options) + super().__init__(XmlLexer, LassoLexer, **options) def analyse_text(text): rv = LassoLexer.analyse_text(text) - 0.01 @@ -1749,11 +1731,11 @@ class LassoCssLexer(DelegatingLexer): def __init__(self, **options): options['requiredelimiters'] = True - super(LassoCssLexer, self).__init__(CssLexer, LassoLexer, **options) + super().__init__(CssLexer, LassoLexer, **options) def analyse_text(text): rv = LassoLexer.analyse_text(text) - 0.05 - if re.search(r'\w+:.+?;', text): + if re.search(r'\w+:[^;]+;', text): rv += 0.1 if 'padding:' in text: rv += 0.1 @@ -1777,8 +1759,7 @@ class LassoJavascriptLexer(DelegatingLexer): def __init__(self, **options): options['requiredelimiters'] = True - super(LassoJavascriptLexer, self).__init__(JavascriptLexer, LassoLexer, - **options) + super().__init__(JavascriptLexer, LassoLexer, **options) def analyse_text(text): rv = LassoLexer.analyse_text(text) - 0.05 @@ -1802,27 +1783,27 @@ class HandlebarsLexer(RegexLexer): 'root': [ (r'[^{]+', Other), + # Comment start {{! }} or {{!-- (r'\{\{!.*\}\}', Comment), + # HTML Escaping open {{{expression (r'(\{\{\{)(\s*)', bygroups(Comment.Special, Text), 'tag'), + + # {{blockOpen {{#blockOpen {{/blockClose with optional tilde ~ + (r'(\{\{)([#~/]+)([^\s}]*)', + bygroups(Comment.Preproc, Number.Attribute, Number.Attribute), 'tag'), (r'(\{\{)(\s*)', bygroups(Comment.Preproc, Text), 'tag'), ], 'tag': [ (r'\s+', Text), + # HTML Escaping close }}} (r'\}\}\}', Comment.Special, '#pop'), - (r'\}\}', Comment.Preproc, '#pop'), - - # Handlebars - (r'([#/]*)(each|if|unless|else|with|log|in(line)?)', bygroups(Keyword, - Keyword)), - (r'#\*inline', Keyword), - - # General {{#block}} - (r'([#/])([\w-]+)', bygroups(Name.Function, Name.Function)), + # blockClose}}, includes optional tilde ~ + (r'(~?)(\}\})', bygroups(Number, Comment.Preproc), '#pop'), # {{opt=something}} - (r'([\w-]+)(=)', bygroups(Name.Attribute, Operator)), + (r'([^\s}]+)(=)', bygroups(Name.Attribute, Operator)), # Partials {{> ...}} (r'(>)(\s*)(@partial-block)', bygroups(Keyword, Text, Keyword)), @@ -1845,7 +1826,7 @@ class HandlebarsLexer(RegexLexer): include('generic'), ], 'variable': [ - (r'[a-zA-Z][\w-]*', Name.Variable), + (r'[()/@a-zA-Z][\w-]*', Name.Variable), (r'\.[\w-]+', Name.Variable), (r'(this\/|\.\/|(\.\.\/)+)[\w-]+', Name.Variable), ], @@ -1853,8 +1834,8 @@ class HandlebarsLexer(RegexLexer): include('variable'), # borrowed from DjangoLexer - (r':?"(\\\\|\\"|[^"])*"', String.Double), - (r":?'(\\\\|\\'|[^'])*'", String.Single), + (r':?"(\\\\|\\[^\\]|[^"\\])*"', String.Double), + (r":?'(\\\\|\\[^\\]|[^'\\])*'", String.Single), (r"[0-9](\.[0-9]*)?(eE[+-][0-9])?[flFLdD]?|" r"0[xX][0-9a-fA-F]+[Ll]?", Number), ] @@ -1875,7 +1856,7 @@ class HandlebarsHtmlLexer(DelegatingLexer): mimetypes = ['text/html+handlebars', 'text/x-handlebars-template'] def __init__(self, **options): - super(HandlebarsHtmlLexer, self).__init__(HtmlLexer, HandlebarsLexer, **options) + super().__init__(HtmlLexer, HandlebarsLexer, **options) class YamlJinjaLexer(DelegatingLexer): @@ -1894,7 +1875,7 @@ class YamlJinjaLexer(DelegatingLexer): mimetypes = ['text/x-yaml+jinja', 'text/x-sls'] def __init__(self, **options): - super(YamlJinjaLexer, self).__init__(YamlLexer, DjangoLexer, **options) + super().__init__(YamlLexer, DjangoLexer, **options) class LiquidLexer(RegexLexer): @@ -2166,8 +2147,8 @@ class TwigLexer(RegexLexer): (_ident_inner, Name.Variable), (r'\.' + _ident_inner, Name.Variable), (r'\.[0-9]+', Number), - (r':?"(\\\\|\\"|[^"])*"', String.Double), - (r":?'(\\\\|\\'|[^'])*'", String.Single), + (r':?"(\\\\|\\[^\\]|[^"\\])*"', String.Double), + (r":?'(\\\\|\\[^\\]|[^'\\])*'", String.Single), (r'([{}()\[\]+\-*/,:~%]|\.\.|\?|:|\*\*|\/\/|!=|[><=]=?)', Operator), (r"[0-9](\.[0-9]*)?(eE[+-][0-9])?[flFLdD]?|" r"0[xX][0-9a-fA-F]+[Ll]?", Number), @@ -2200,7 +2181,7 @@ class TwigHtmlLexer(DelegatingLexer): mimetypes = ['text/html+twig'] def __init__(self, **options): - super(TwigHtmlLexer, self).__init__(HtmlLexer, TwigLexer, **options) + super().__init__(HtmlLexer, TwigLexer, **options) class Angular2Lexer(RegexLexer): @@ -2235,9 +2216,9 @@ class Angular2Lexer(RegexLexer): # *ngIf="..."; #f="ngForm" (r'([*#])([\w:.-]+)(\s*)(=)(\s*)', - bygroups(Punctuation, Name.Attribute, Punctuation, Operator), 'attr'), + bygroups(Punctuation, Name.Attribute, Text, Operator, Text), 'attr'), (r'([*#])([\w:.-]+)(\s*)', - bygroups(Punctuation, Name.Attribute, Punctuation)), + bygroups(Punctuation, Name.Attribute, Text)), ], 'ngExpression': [ @@ -2246,8 +2227,8 @@ class Angular2Lexer(RegexLexer): # Literals (r':?(true|false)', String.Boolean), - (r':?"(\\\\|\\"|[^"])*"', String.Double), - (r":?'(\\\\|\\'|[^'])*'", String.Single), + (r':?"(\\\\|\\[^\\]|[^"\\])*"', String.Double), + (r":?'(\\\\|\\[^\\]|[^'\\])*'", String.Single), (r"[0-9](\.[0-9]*)?(eE[+-][0-9])?[flFLdD]?|" r"0[xX][0-9a-fA-F]+[Ll]?", Number), @@ -2280,4 +2261,4 @@ class Angular2HtmlLexer(DelegatingLexer): filenames = ['*.ng2'] def __init__(self, **options): - super(Angular2HtmlLexer, self).__init__(HtmlLexer, Angular2Lexer, **options) + super().__init__(HtmlLexer, Angular2Lexer, **options) diff --git a/src/typecode/_vendor/pygments/lexers/teraterm.py b/src/typecode/_vendor/pygments/lexers/teraterm.py index d15bfe2..0223fae 100644 --- a/src/typecode/_vendor/pygments/lexers/teraterm.py +++ b/src/typecode/_vendor/pygments/lexers/teraterm.py @@ -5,7 +5,7 @@ Lexer for Tera Term macro files. - :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ @@ -40,7 +40,7 @@ class TeraTermLexer(RegexLexer): include('numeric-literals'), include('string-literals'), include('all-whitespace'), - (r'[^\s]', Text), + (r'\S', Text), ], 'comments': [ (r';[^\r\n]*', Comment.Single), @@ -52,71 +52,250 @@ class TeraTermLexer(RegexLexer): (r'[*/]', Comment.Multiline) ], 'labels': [ - (r'(?i)^(\s*)(:[0-9a-z_]+)', bygroups(Text, Name.Label)), + (r'(?i)^(\s*)(:[a-z0-9_]+)', bygroups(Text, Name.Label)), ], 'commands': [ ( r'(?i)\b(' - r'basename|beep|bplusrecv|bplussend|break|bringupbox|' - r'callmenu|changedir|checksum16|checksum16file|' - r'checksum32|checksum32file|checksum8|checksum8file|' - r'clearscreen|clipb2var|closesbox|closett|code2str|' - r'connect|continue|crc16|crc16file|crc32|crc32file|' - r'cygconnect|delpassword|dirname|dirnamebox|disconnect|' - r'dispstr|do|else|elseif|enablekeyb|end|endif|enduntil|' - r'endwhile|exec|execcmnd|exit|expandenv|fileclose|' - r'fileconcat|filecopy|filecreate|filedelete|filelock|' - r'filemarkptr|filenamebox|fileopen|fileread|filereadln|' - r'filerename|filesearch|fileseek|fileseekback|filestat|' - r'filestrseek|filestrseek2|filetruncate|fileunlock|' - r'filewrite|filewriteln|findclose|findfirst|findnext|' - r'flushrecv|foldercreate|folderdelete|foldersearch|for|' - r'getdate|getdir|getenv|getfileattr|gethostname|' - r'getipv4addr|getipv6addr|getmodemstatus|getpassword|' - r'getspecialfolder|gettime|gettitle|getttdir|getver|' - r'if|ifdefined|include|inputbox|int2str|intdim|' - r'ispassword|kmtfinish|kmtget|kmtrecv|kmtsend|listbox|' - r'loadkeymap|logautoclosemode|logclose|loginfo|logopen|' - r'logpause|logrotate|logstart|logwrite|loop|makepath|' - r'messagebox|mpause|next|passwordbox|pause|quickvanrecv|' - r'quickvansend|random|recvln|regexoption|restoresetup|' - r'return|rotateleft|rotateright|scprecv|scpsend|send|' - r'sendbreak|sendbroadcast|sendfile|sendkcode|sendln|' - r'sendlnbroadcast|sendlnmulticast|sendmulticast|setbaud|' - r'setdate|setdebug|setdir|setdlgpos|setdtr|setecho|' - r'setenv|setexitcode|setfileattr|setflowctrl|' - r'setmulticastname|setpassword|setrts|setsync|settime|' - r'settitle|show|showtt|sprintf|sprintf2|statusbox|' - r'str2code|str2int|strcompare|strconcat|strcopy|strdim|' - r'strinsert|strjoin|strlen|strmatch|strremove|' - r'strreplace|strscan|strspecial|strsplit|strtrim|' - r'testlink|then|tolower|toupper|unlink|until|uptime|' - r'var2clipb|wait|wait4all|waitevent|waitln|waitn|' - r'waitrecv|waitregex|while|xmodemrecv|xmodemsend|' - r'yesnobox|ymodemrecv|ymodemsend|zmodemrecv|zmodemsend' + r'basename|' + r'beep|' + r'bplusrecv|' + r'bplussend|' + r'break|' + r'bringupbox|' + # 'call' is handled separately. + r'callmenu|' + r'changedir|' + r'checksum16|' + r'checksum16file|' + r'checksum32|' + r'checksum32file|' + r'checksum8|' + r'checksum8file|' + r'clearscreen|' + r'clipb2var|' + r'closesbox|' + r'closett|' + r'code2str|' + r'connect|' + r'continue|' + r'crc16|' + r'crc16file|' + r'crc32|' + r'crc32file|' + r'cygconnect|' + r'delpassword|' + r'dirname|' + r'dirnamebox|' + r'disconnect|' + r'dispstr|' + r'do|' + r'else|' + r'elseif|' + r'enablekeyb|' + r'end|' + r'endif|' + r'enduntil|' + r'endwhile|' + r'exec|' + r'execcmnd|' + r'exit|' + r'expandenv|' + r'fileclose|' + r'fileconcat|' + r'filecopy|' + r'filecreate|' + r'filedelete|' + r'filelock|' + r'filemarkptr|' + r'filenamebox|' + r'fileopen|' + r'fileread|' + r'filereadln|' + r'filerename|' + r'filesearch|' + r'fileseek|' + r'fileseekback|' + r'filestat|' + r'filestrseek|' + r'filestrseek2|' + r'filetruncate|' + r'fileunlock|' + r'filewrite|' + r'filewriteln|' + r'findclose|' + r'findfirst|' + r'findnext|' + r'flushrecv|' + r'foldercreate|' + r'folderdelete|' + r'foldersearch|' + r'for|' + r'getdate|' + r'getdir|' + r'getenv|' + r'getfileattr|' + r'gethostname|' + r'getipv4addr|' + r'getipv6addr|' + r'getmodemstatus|' + r'getpassword|' + r'getspecialfolder|' + r'gettime|' + r'gettitle|' + r'getttdir|' + r'getver|' + # 'goto' is handled separately. + r'if|' + r'ifdefined|' + r'include|' + r'inputbox|' + r'int2str|' + r'intdim|' + r'ispassword|' + r'kmtfinish|' + r'kmtget|' + r'kmtrecv|' + r'kmtsend|' + r'listbox|' + r'loadkeymap|' + r'logautoclosemode|' + r'logclose|' + r'loginfo|' + r'logopen|' + r'logpause|' + r'logrotate|' + r'logstart|' + r'logwrite|' + r'loop|' + r'makepath|' + r'messagebox|' + r'mpause|' + r'next|' + r'passwordbox|' + r'pause|' + r'quickvanrecv|' + r'quickvansend|' + r'random|' + r'recvln|' + r'regexoption|' + r'restoresetup|' + r'return|' + r'rotateleft|' + r'rotateright|' + r'scprecv|' + r'scpsend|' + r'send|' + r'sendbreak|' + r'sendbroadcast|' + r'sendfile|' + r'sendkcode|' + r'sendln|' + r'sendlnbroadcast|' + r'sendlnmulticast|' + r'sendmulticast|' + r'setbaud|' + r'setdate|' + r'setdebug|' + r'setdir|' + r'setdlgpos|' + r'setdtr|' + r'setecho|' + r'setenv|' + r'setexitcode|' + r'setfileattr|' + r'setflowctrl|' + r'setmulticastname|' + r'setpassword|' + r'setrts|' + r'setspeed|' + r'setsync|' + r'settime|' + r'settitle|' + r'show|' + r'showtt|' + r'sprintf|' + r'sprintf2|' + r'statusbox|' + r'str2code|' + r'str2int|' + r'strcompare|' + r'strconcat|' + r'strcopy|' + r'strdim|' + r'strinsert|' + r'strjoin|' + r'strlen|' + r'strmatch|' + r'strremove|' + r'strreplace|' + r'strscan|' + r'strspecial|' + r'strsplit|' + r'strtrim|' + r'testlink|' + r'then|' + r'tolower|' + r'toupper|' + r'unlink|' + r'until|' + r'uptime|' + r'var2clipb|' + r'wait|' + r'wait4all|' + r'waitevent|' + r'waitln|' + r'waitn|' + r'waitrecv|' + r'waitregex|' + r'while|' + r'xmodemrecv|' + r'xmodemsend|' + r'yesnobox|' + r'ymodemrecv|' + r'ymodemsend|' + r'zmodemrecv|' + r'zmodemsend' r')\b', Keyword, ), - ( - r'(?i)(call|goto)([ \t]+)([0-9a-z_]+)', - bygroups(Keyword, Text, Name.Label), - ) + (r'(?i)(call|goto)([ \t]+)([a-z0-9_]+)', + bygroups(Keyword, Text, Name.Label)), ], 'builtin-variables': [ ( r'(?i)(' - r'groupmatchstr1|groupmatchstr2|groupmatchstr3|' - r'groupmatchstr4|groupmatchstr5|groupmatchstr6|' - r'groupmatchstr7|groupmatchstr8|groupmatchstr9|' - r'param1|param2|param3|param4|param5|param6|' - r'param7|param8|param9|paramcnt|params|' - r'inputstr|matchstr|mtimeout|result|timeout' + r'groupmatchstr1|' + r'groupmatchstr2|' + r'groupmatchstr3|' + r'groupmatchstr4|' + r'groupmatchstr5|' + r'groupmatchstr6|' + r'groupmatchstr7|' + r'groupmatchstr8|' + r'groupmatchstr9|' + r'inputstr|' + r'matchstr|' + r'mtimeout|' + r'param1|' + r'param2|' + r'param3|' + r'param4|' + r'param5|' + r'param6|' + r'param7|' + r'param8|' + r'param9|' + r'paramcnt|' + r'params|' + r'result|' + r'timeout' r')\b', Name.Builtin ), ], 'user-variables': [ - (r'(?i)[A-Z_][A-Z0-9_]*', Name.Variable), + (r'(?i)[a-z_][a-z0-9_]*', Name.Variable), ], 'numeric-literals': [ (r'(-?)([0-9]+)', bygroups(Operator, Number.Integer)), @@ -128,7 +307,7 @@ class TeraTermLexer(RegexLexer): (r'"', String.Double, 'in-double-string'), ], 'in-general-string': [ - (r'[\\][\\nt]', String.Escape), # Only three escapes are supported. + (r'\\[\\nt]', String.Escape), # Only three escapes are supported. (r'.', String), ], 'in-single-string': [ @@ -145,14 +324,12 @@ class TeraTermLexer(RegexLexer): (r'[()]', String.Symbol), ], 'all-whitespace': [ - (r'[\s]+', Text), + (r'\s+', Text), ], } # Turtle and Tera Term macro files share the same file extension # but each has a recognizable and distinct syntax. def analyse_text(text): - result = 0.0 if re.search(TeraTermLexer.tokens['commands'][0][0], text): - result += 0.60 - return result + return 0.01 diff --git a/src/typecode/_vendor/pygments/lexers/testing.py b/src/typecode/_vendor/pygments/lexers/testing.py index 5bac4f6..84d7a40 100644 --- a/src/typecode/_vendor/pygments/lexers/testing.py +++ b/src/typecode/_vendor/pygments/lexers/testing.py @@ -5,7 +5,7 @@ Lexers for testing languages. - :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ @@ -17,7 +17,7 @@ class GherkinLexer(RegexLexer): """ - For `Gherkin ` syntax. + For `Gherkin ` syntax. .. versionadded:: 1.2 """ @@ -26,10 +26,10 @@ class GherkinLexer(RegexLexer): filenames = ['*.feature'] mimetypes = ['text/x-gherkin'] - feature_keywords = u'^(기능|機能|功能|フィーチャ|خاصية|תכונה|Функціонал|Функционалност|Функционал|Фича|Особина|Могућност|Özellik|Właściwość|Tính năng|Trajto|Savybė|Požiadavka|Požadavek|Osobina|Ominaisuus|Omadus|OH HAI|Mogućnost|Mogucnost|Jellemző|Fīča|Funzionalità|Funktionalität|Funkcionalnost|Funkcionalitāte|Funcționalitate|Functionaliteit|Functionalitate|Funcionalitat|Funcionalidade|Fonctionnalité|Fitur|Feature|Egenskap|Egenskab|Crikey|Característica|Arwedd)(:)(.*)$' - feature_element_keywords = u'^(\\s*)(시나리오 개요|시나리오|배경|背景|場景大綱|場景|场景大纲|场景|劇本大綱|劇本|剧本大纲|剧本|テンプレ|シナリオテンプレート|シナリオテンプレ|シナリオアウトライン|シナリオ|سيناريو مخطط|سيناريو|الخلفية|תרחיש|תבנית תרחיש|רקע|Тарих|Сценарій|Сценарио|Сценарий структураси|Сценарий|Структура сценарію|Структура сценарија|Структура сценария|Скица|Рамка на сценарий|Пример|Предыстория|Предистория|Позадина|Передумова|Основа|Концепт|Контекст|Założenia|Wharrimean is|Tình huống|The thing of it is|Tausta|Taust|Tapausaihio|Tapaus|Szenariogrundriss|Szenario|Szablon scenariusza|Stsenaarium|Struktura scenarija|Skica|Skenario konsep|Skenario|Situācija|Senaryo taslağı|Senaryo|Scénář|Scénario|Schema dello scenario|Scenārijs pēc parauga|Scenārijs|Scenár|Scenaro|Scenariusz|Scenariul de şablon|Scenariul de sablon|Scenariu|Scenario Outline|Scenario Amlinellol|Scenario|Scenarijus|Scenarijaus šablonas|Scenarij|Scenarie|Rerefons|Raamstsenaarium|Primer|Pozadí|Pozadina|Pozadie|Plan du scénario|Plan du Scénario|Osnova scénáře|Osnova|Náčrt Scénáře|Náčrt Scenáru|Mate|MISHUN SRSLY|MISHUN|Kịch bản|Konturo de la scenaro|Kontext|Konteksts|Kontekstas|Kontekst|Koncept|Khung tình huống|Khung kịch bản|Háttér|Grundlage|Geçmiş|Forgatókönyv vázlat|Forgatókönyv|Fono|Esquema do Cenário|Esquema do Cenario|Esquema del escenario|Esquema de l\'escenari|Escenario|Escenari|Dis is what went down|Dasar|Contexto|Contexte|Contesto|Condiţii|Conditii|Cenário|Cenario|Cefndir|Bối cảnh|Blokes|Bakgrunn|Bakgrund|Baggrund|Background|B4|Antecedents|Antecedentes|All y\'all|Achtergrond|Abstrakt Scenario|Abstract Scenario)(:)(.*)$' - examples_keywords = u'^(\\s*)(예|例子|例|サンプル|امثلة|דוגמאות|Сценарији|Примери|Приклади|Мисоллар|Значения|Örnekler|Voorbeelden|Variantai|Tapaukset|Scenarios|Scenariji|Scenarijai|Příklady|Példák|Príklady|Przykłady|Primjeri|Primeri|Piemēri|Pavyzdžiai|Paraugs|Juhtumid|Exemplos|Exemples|Exemplele|Exempel|Examples|Esempi|Enghreifftiau|Ekzemploj|Eksempler|Ejemplos|EXAMPLZ|Dữ liệu|Contoh|Cobber|Beispiele)(:)(.*)$' - step_keywords = u'^(\\s*)(하지만|조건|먼저|만일|만약|단|그리고|그러면|那麼|那么|而且|當|当|前提|假設|假设|假如|假定|但是|但し|並且|并且|同時|同时|もし|ならば|ただし|しかし|かつ|و |متى |لكن |عندما |ثم |بفرض |اذاً |כאשר |וגם |בהינתן |אזי |אז |אבל |Якщо |Унда |То |Припустимо, що |Припустимо |Онда |Но |Нехай |Лекин |Когато |Када |Кад |К тому же |И |Задато |Задати |Задате |Если |Допустим |Дадено |Ва |Бирок |Аммо |Али |Але |Агар |А |І |Și |És |Zatati |Zakładając |Zadato |Zadate |Zadano |Zadani |Zadan |Youse know when youse got |Youse know like when |Yna |Ya know how |Ya gotta |Y |Wun |Wtedy |When y\'all |When |Wenn |WEN |Và |Ve |Und |Un |Thì |Then y\'all |Then |Tapi |Tak |Tada |Tad |Så |Stel |Soit |Siis |Si |Sed |Se |Quando |Quand |Quan |Pryd |Pokud |Pokiaľ |Però |Pero |Pak |Oraz |Onda |Ond |Oletetaan |Og |Och |O zaman |Når |När |Niin |Nhưng |N |Mutta |Men |Mas |Maka |Majd |Mais |Maar |Ma |Lorsque |Lorsqu\'|Kun |Kuid |Kui |Khi |Keď |Ketika |Když |Kaj |Kai |Kada |Kad |Jeżeli |Ja |Ir |I CAN HAZ |I |Ha |Givun |Givet |Given y\'all |Given |Gitt |Gegeven |Gegeben sei |Fakat |Eğer ki |Etant donné |Et |Então |Entonces |Entao |En |Eeldades |E |Duota |Dun |Donitaĵo |Donat |Donada |Do |Diyelim ki |Dengan |Den youse gotta |De |Dato |Dar |Dann |Dan |Dado |Dacă |Daca |DEN |Când |Cuando |Cho |Cept |Cand |Cal |But y\'all |But |Buh |Biết |Bet |BUT |Atès |Atunci |Atesa |Anrhegedig a |Angenommen |And y\'all |And |An |Ama |Als |Alors |Allora |Ali |Aleshores |Ale |Akkor |Aber |AN |A také |A |\\* )' + feature_keywords = '^(기능|機能|功能|フィーチャ|خاصية|תכונה|Функціонал|Функционалност|Функционал|Фича|Особина|Могућност|Özellik|Właściwość|Tính năng|Trajto|Savybė|Požiadavka|Požadavek|Osobina|Ominaisuus|Omadus|OH HAI|Mogućnost|Mogucnost|Jellemző|Fīča|Funzionalità|Funktionalität|Funkcionalnost|Funkcionalitāte|Funcționalitate|Functionaliteit|Functionalitate|Funcionalitat|Funcionalidade|Fonctionnalité|Fitur|Feature|Egenskap|Egenskab|Crikey|Característica|Arwedd)(:)(.*)$' + feature_element_keywords = '^(\\s*)(시나리오 개요|시나리오|배경|背景|場景大綱|場景|场景大纲|场景|劇本大綱|劇本|剧本大纲|剧本|テンプレ|シナリオテンプレート|シナリオテンプレ|シナリオアウトライン|シナリオ|سيناريو مخطط|سيناريو|الخلفية|תרחיש|תבנית תרחיש|רקע|Тарих|Сценарій|Сценарио|Сценарий структураси|Сценарий|Структура сценарію|Структура сценарија|Структура сценария|Скица|Рамка на сценарий|Пример|Предыстория|Предистория|Позадина|Передумова|Основа|Концепт|Контекст|Założenia|Wharrimean is|Tình huống|The thing of it is|Tausta|Taust|Tapausaihio|Tapaus|Szenariogrundriss|Szenario|Szablon scenariusza|Stsenaarium|Struktura scenarija|Skica|Skenario konsep|Skenario|Situācija|Senaryo taslağı|Senaryo|Scénář|Scénario|Schema dello scenario|Scenārijs pēc parauga|Scenārijs|Scenár|Scenaro|Scenariusz|Scenariul de şablon|Scenariul de sablon|Scenariu|Scenario Outline|Scenario Amlinellol|Scenario|Scenarijus|Scenarijaus šablonas|Scenarij|Scenarie|Rerefons|Raamstsenaarium|Primer|Pozadí|Pozadina|Pozadie|Plan du scénario|Plan du Scénario|Osnova scénáře|Osnova|Náčrt Scénáře|Náčrt Scenáru|Mate|MISHUN SRSLY|MISHUN|Kịch bản|Konturo de la scenaro|Kontext|Konteksts|Kontekstas|Kontekst|Koncept|Khung tình huống|Khung kịch bản|Háttér|Grundlage|Geçmiş|Forgatókönyv vázlat|Forgatókönyv|Fono|Esquema do Cenário|Esquema do Cenario|Esquema del escenario|Esquema de l\'escenari|Escenario|Escenari|Dis is what went down|Dasar|Contexto|Contexte|Contesto|Condiţii|Conditii|Cenário|Cenario|Cefndir|Bối cảnh|Blokes|Bakgrunn|Bakgrund|Baggrund|Background|B4|Antecedents|Antecedentes|All y\'all|Achtergrond|Abstrakt Scenario|Abstract Scenario)(:)(.*)$' + examples_keywords = '^(\\s*)(예|例子|例|サンプル|امثلة|דוגמאות|Сценарији|Примери|Приклади|Мисоллар|Значения|Örnekler|Voorbeelden|Variantai|Tapaukset|Scenarios|Scenariji|Scenarijai|Příklady|Példák|Príklady|Przykłady|Primjeri|Primeri|Piemēri|Pavyzdžiai|Paraugs|Juhtumid|Exemplos|Exemples|Exemplele|Exempel|Examples|Esempi|Enghreifftiau|Ekzemploj|Eksempler|Ejemplos|EXAMPLZ|Dữ liệu|Contoh|Cobber|Beispiele)(:)(.*)$' + step_keywords = '^(\\s*)(하지만|조건|먼저|만일|만약|단|그리고|그러면|那麼|那么|而且|當|当|前提|假設|假设|假如|假定|但是|但し|並且|并且|同時|同时|もし|ならば|ただし|しかし|かつ|و |متى |لكن |عندما |ثم |بفرض |اذاً |כאשר |וגם |בהינתן |אזי |אז |אבל |Якщо |Унда |То |Припустимо, що |Припустимо |Онда |Но |Нехай |Лекин |Когато |Када |Кад |К тому же |И |Задато |Задати |Задате |Если |Допустим |Дадено |Ва |Бирок |Аммо |Али |Але |Агар |А |І |Și |És |Zatati |Zakładając |Zadato |Zadate |Zadano |Zadani |Zadan |Youse know when youse got |Youse know like when |Yna |Ya know how |Ya gotta |Y |Wun |Wtedy |When y\'all |When |Wenn |WEN |Và |Ve |Und |Un |Thì |Then y\'all |Then |Tapi |Tak |Tada |Tad |Så |Stel |Soit |Siis |Si |Sed |Se |Quando |Quand |Quan |Pryd |Pokud |Pokiaľ |Però |Pero |Pak |Oraz |Onda |Ond |Oletetaan |Og |Och |O zaman |Når |När |Niin |Nhưng |N |Mutta |Men |Mas |Maka |Majd |Mais |Maar |Ma |Lorsque |Lorsqu\'|Kun |Kuid |Kui |Khi |Keď |Ketika |Když |Kaj |Kai |Kada |Kad |Jeżeli |Ja |Ir |I CAN HAZ |I |Ha |Givun |Givet |Given y\'all |Given |Gitt |Gegeven |Gegeben sei |Fakat |Eğer ki |Etant donné |Et |Então |Entonces |Entao |En |Eeldades |E |Duota |Dun |Donitaĵo |Donat |Donada |Do |Diyelim ki |Dengan |Den youse gotta |De |Dato |Dar |Dann |Dan |Dado |Dacă |Daca |DEN |Când |Cuando |Cho |Cept |Cand |Cal |But y\'all |But |Buh |Biết |Bet |BUT |Atès |Atunci |Atesa |Anrhegedig a |Angenommen |And y\'all |And |An |Ama |Als |Alors |Allora |Ali |Aleshores |Ale |Akkor |Aber |AN |A také |A |\\* )' tokens = { 'comments': [ diff --git a/src/typecode/_vendor/pygments/lexers/text.py b/src/typecode/_vendor/pygments/lexers/text.py index 2cdf4e6..41f5626 100644 --- a/src/typecode/_vendor/pygments/lexers/text.py +++ b/src/typecode/_vendor/pygments/lexers/text.py @@ -5,7 +5,7 @@ Lexers for non-source code file types. - :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ diff --git a/src/typecode/_vendor/pygments/lexers/textedit.py b/src/typecode/_vendor/pygments/lexers/textedit.py index e1c18c2..6187edd 100644 --- a/src/typecode/_vendor/pygments/lexers/textedit.py +++ b/src/typecode/_vendor/pygments/lexers/textedit.py @@ -5,7 +5,7 @@ Lexers for languages related to text processing. - :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ @@ -69,8 +69,8 @@ class AwkLexer(RegexLexer): (r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float), (r'0x[0-9a-fA-F]+', Number.Hex), (r'[0-9]+', Number.Integer), - (r'"(\\\\|\\"|[^"])*"', String.Double), - (r"'(\\\\|\\'|[^'])*'", String.Single), + (r'"(\\\\|\\[^\\]|[^"\\])*"', String.Double), + (r"'(\\\\|\\[^\\]|[^'\\])*'", String.Single), ] } @@ -102,9 +102,9 @@ class VimLexer(RegexLexer): (r'[ \t]+', Text), # TODO: regexes can have other delims - (r'/(\\\\|\\/|[^\n/])*/', String.Regex), - (r'"(\\\\|\\"|[^\n"])*"', String.Double), - (r"'(''|[^\n'])*'", String.Single), + (r'/[^/\\\n]*(?:\\[\s\S][^/\\\n]*)*/', String.Regex), + (r'"[^"\\\n]*(?:\\[\s\S][^"\\\n]*)*"', String.Double), + (r"'[^\n']*(?:''[^\n']*)*'", String.Single), # Who decided that doublequote was a good comment character?? (r'(?<=\s)"[^\-:.%#=*].*', Comment), diff --git a/src/typecode/_vendor/pygments/lexers/textfmts.py b/src/typecode/_vendor/pygments/lexers/textfmts.py index cc6c4c9..30972c6 100644 --- a/src/typecode/_vendor/pygments/lexers/textfmts.py +++ b/src/typecode/_vendor/pygments/lexers/textfmts.py @@ -5,18 +5,20 @@ Lexers for various text formats. - :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ import re -from typecode._vendor.pygments.lexer import RegexLexer, bygroups +from typecode._vendor.pygments.lexers import guess_lexer, get_lexer_by_name +from typecode._vendor.pygments.lexer import RegexLexer, bygroups, default, include from typecode._vendor.pygments.token import Text, Comment, Operator, Keyword, Name, String, \ - Number, Generic, Literal + Number, Generic, Literal, Punctuation from typecode._vendor.pygments.util import ClassNotFound -__all__ = ['IrcLogsLexer', 'TodotxtLexer', 'HttpLexer', 'GettextLexer'] +__all__ = ['IrcLogsLexer', 'TodotxtLexer', 'HttpLexer', 'GettextLexer', + 'NotmuchLexer', 'KernelLogLexer'] class IrcLogsLexer(RegexLexer): @@ -173,13 +175,13 @@ def content_callback(self, match): tokens = { 'root': [ (r'(GET|POST|PUT|DELETE|HEAD|OPTIONS|TRACE|PATCH)( +)([^ ]+)( +)' - r'(HTTP)(/)(1\.[01])(\r?\n|\Z)', + r'(HTTP)(/)(1\.[01]|2(?:\.0)?|3)(\r?\n|\Z)', bygroups(Name.Function, Text, Name.Namespace, Text, Keyword.Reserved, Operator, Number, Text), 'headers'), - (r'(HTTP)(/)(1\.[01])( +)(\d{3})( +)([^\r\n]+)(\r?\n|\Z)', - bygroups(Keyword.Reserved, Operator, Number, Text, Number, - Text, Name.Exception, Text), + (r'(HTTP)(/)(1\.[01]|2(?:\.0)?|3)( +)(\d{3})(?:( +)([^\r\n]*))?(\r?\n|\Z)', + bygroups(Keyword.Reserved, Operator, Number, Text, Number, Text, + Name.Exception, Text), 'headers'), ], 'headers': [ @@ -295,3 +297,134 @@ class TodotxtLexer(RegexLexer): (r'\s+', IncompleteTaskText), ], } + + +class NotmuchLexer(RegexLexer): + """ + For `Notmuch `_ email text format. + + .. versionadded:: 2.5 + + Additional options accepted: + + `body_lexer` + If given, highlight the contents of the message body with the specified + lexer, else guess it according to the body content (default: ``None``). + """ + + name = 'Notmuch' + aliases = ['notmuch'] + + def _highlight_code(self, match): + code = match.group(1) + + try: + if self.body_lexer: + lexer = get_lexer_by_name(self.body_lexer) + else: + lexer = guess_lexer(code.strip()) + except ClassNotFound: + lexer = get_lexer_by_name('text') + + yield from lexer.get_tokens_unprocessed(code) + + tokens = { + 'root': [ + (r'\fmessage\{\s*', Keyword, ('message', 'message-attr')), + ], + 'message-attr': [ + (r'(\s*id:\s*)(\S+)', bygroups(Name.Attribute, String)), + (r'(\s*(?:depth|match|excluded):\s*)(\d+)', + bygroups(Name.Attribute, Number.Integer)), + (r'(\s*filename:\s*)(.+\n)', + bygroups(Name.Attribute, String)), + default('#pop'), + ], + 'message': [ + (r'\fmessage\}\n', Keyword, '#pop'), + (r'\fheader\{\n', Keyword, 'header'), + (r'\fbody\{\n', Keyword, 'body'), + ], + 'header': [ + (r'\fheader\}\n', Keyword, '#pop'), + (r'((?:Subject|From|To|Cc|Date):\s*)(.*\n)', + bygroups(Name.Attribute, String)), + (r'(.*)(\s*\(.*\))(\s*\(.*\)\n)', + bygroups(Generic.Strong, Literal, Name.Tag)), + ], + 'body': [ + (r'\fpart\{\n', Keyword, 'part'), + (r'\f(part|attachment)\{\s*', Keyword, ('part', 'part-attr')), + (r'\fbody\}\n', Keyword, '#pop'), + ], + 'part-attr': [ + (r'(ID:\s*)(\d+)', bygroups(Name.Attribute, Number.Integer)), + (r'(,\s*)((?:Filename|Content-id):\s*)([^,]+)', + bygroups(Punctuation, Name.Attribute, String)), + (r'(,\s*)(Content-type:\s*)(.+\n)', + bygroups(Punctuation, Name.Attribute, String)), + default('#pop'), + ], + 'part': [ + (r'\f(?:part|attachment)\}\n', Keyword, '#pop'), + (r'\f(?:part|attachment)\{\s*', Keyword, ('#push', 'part-attr')), + (r'^Non-text part: .*\n', Comment), + (r'(?s)(.*?(?=\f(?:part|attachment)\}\n))', _highlight_code), + ], + } + + def analyse_text(text): + return 1.0 if text.startswith('\fmessage{') else 0.0 + + def __init__(self, **options): + self.body_lexer = options.get('body_lexer', None) + RegexLexer.__init__(self, **options) + + +class KernelLogLexer(RegexLexer): + """ + For Linux Kernel log ("dmesg") output. + + .. versionadded:: 2.6 + """ + name = 'Kernel log' + aliases = ['kmsg', 'dmesg'] + filenames = ['*.kmsg', '*.dmesg'] + + tokens = { + 'root': [ + (r'^[^:]+:debug : (?=\[)', Text, 'debug'), + (r'^[^:]+:info : (?=\[)', Text, 'info'), + (r'^[^:]+:warn : (?=\[)', Text, 'warn'), + (r'^[^:]+:notice: (?=\[)', Text, 'warn'), + (r'^[^:]+:err : (?=\[)', Text, 'error'), + (r'^[^:]+:crit : (?=\[)', Text, 'error'), + (r'^(?=\[)', Text, 'unknown'), + ], + 'unknown': [ + (r'^(?=.+(warning|notice|audit|deprecated))', Text, 'warn'), + (r'^(?=.+(error|critical|fail|Bug))', Text, 'error'), + default('info'), + ], + 'base': [ + (r'\[[0-9. ]+\] ', Number), + (r'(?<=\] ).+?:', Keyword), + (r'\n', Text, '#pop'), + ], + 'debug': [ + include('base'), + (r'.+\n', Comment, '#pop') + ], + 'info': [ + include('base'), + (r'.+\n', Text, '#pop') + ], + 'warn': [ + include('base'), + (r'.+\n', Generic.Strong, '#pop') + ], + 'error': [ + include('base'), + (r'.+\n', Generic.Error, '#pop') + ] + } diff --git a/src/typecode/_vendor/pygments/lexers/theorem.py b/src/typecode/_vendor/pygments/lexers/theorem.py index 9b5024c..d0eae1e 100644 --- a/src/typecode/_vendor/pygments/lexers/theorem.py +++ b/src/typecode/_vendor/pygments/lexers/theorem.py @@ -5,7 +5,7 @@ Lexers for theorem-proving languages. - :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ @@ -30,6 +30,8 @@ class CoqLexer(RegexLexer): filenames = ['*.v'] mimetypes = ['text/x-coq'] + flags = re.UNICODE + keywords1 = ( # Vernacular commands 'Section', 'Module', 'End', 'Require', 'Import', 'Export', 'Variable', @@ -93,7 +95,7 @@ class CoqLexer(RegexLexer): '<->', '=', '>', '>]', r'>\}', r'\?', r'\?\?', r'\[', r'\[<', r'\[>', r'\[\|', ']', '_', '`', r'\{', r'\{<', r'\|', r'\|]', r'\}', '~', '=>', r'/\\', r'\\/', r'\{\|', r'\|\}', - u'Π', u'λ', + 'Π', 'λ', ) operators = r'[!$%&*+\./:<=>?@^|~-]' prefix_syms = r'[!?~]' @@ -123,14 +125,15 @@ class CoqLexer(RegexLexer): (r'0[bB][01][01_]*', Number.Bin), (r'-?\d[\d_]*(.[\d_]*)?([eE][+\-]?\d[\d_]*)', Number.Float), - (r"'(?:(\\[\\\"'ntbr ])|(\\[0-9]{3})|(\\x[0-9a-fA-F]{2}))'", - String.Char), + (r"'(?:(\\[\\\"'ntbr ])|(\\[0-9]{3})|(\\x[0-9a-fA-F]{2}))'", String.Char), + (r"'.'", String.Char), (r"'", Keyword), # a stray quote is another syntax element (r'"', String.Double, 'string'), (r'[~?][a-z][\w\']*:', Name), + (r'\S', Name.Builtin.Pseudo), ], 'comment': [ (r'[^(*)]+', Comment), @@ -154,8 +157,8 @@ class CoqLexer(RegexLexer): } def analyse_text(text): - if text.startswith('(*'): - return True + if 'Qed' in text and 'Proof' in text: + return 1 class IsabelleLexer(RegexLexer): @@ -388,69 +391,87 @@ class LeanLexer(RegexLexer): flags = re.MULTILINE | re.UNICODE - keywords1 = ( - 'import', 'abbreviation', 'opaque_hint', 'tactic_hint', 'definition', - 'renaming', 'inline', 'hiding', 'exposing', 'parameter', 'parameters', - 'conjecture', 'hypothesis', 'lemma', 'corollary', 'variable', 'variables', - 'theorem', 'axiom', 'inductive', 'structure', 'universe', 'alias', - 'help', 'options', 'precedence', 'postfix', 'prefix', 'calc_trans', - 'calc_subst', 'calc_refl', 'infix', 'infixl', 'infixr', 'notation', 'eval', - 'check', 'exit', 'coercion', 'end', 'private', 'using', 'namespace', - 'including', 'instance', 'section', 'context', 'protected', 'expose', - 'export', 'set_option', 'add_rewrite', 'extends', 'open', 'example', - 'constant', 'constants', 'print', 'opaque', 'reducible', 'irreducible', - ) - - keywords2 = ( - 'forall', 'fun', 'Pi', 'obtain', 'from', 'have', 'show', 'assume', - 'take', 'let', 'if', 'else', 'then', 'by', 'in', 'with', 'begin', - 'proof', 'qed', 'calc', 'match', - ) - - keywords3 = ( - # Sorts - 'Type', 'Prop', - ) - - operators = ( - u'!=', u'#', u'&', u'&&', u'*', u'+', u'-', u'/', u'@', u'!', u'`', - u'-.', u'->', u'.', u'..', u'...', u'::', u':>', u';', u';;', u'<', - u'<-', u'=', u'==', u'>', u'_', u'|', u'||', u'~', u'=>', u'<=', u'>=', - u'/\\', u'\\/', u'∀', u'Π', u'λ', u'↔', u'∧', u'∨', u'≠', u'≤', u'≥', - u'¬', u'⁻¹', u'⬝', u'▸', u'→', u'∃', u'ℕ', u'ℤ', u'≈', u'×', u'⌞', - u'⌟', u'≡', u'⟨', u'⟩', - ) - - punctuation = (u'(', u')', u':', u'{', u'}', u'[', u']', u'⦃', u'⦄', - u':=', u',') - tokens = { 'root': [ (r'\s+', Text), + (r'/--', String.Doc, 'docstring'), (r'/-', Comment, 'comment'), (r'--.*?$', Comment.Single), - (words(keywords1, prefix=r'\b', suffix=r'\b'), Keyword.Namespace), - (words(keywords2, prefix=r'\b', suffix=r'\b'), Keyword), - (words(keywords3, prefix=r'\b', suffix=r'\b'), Keyword.Type), - (words(operators), Name.Builtin.Pseudo), - (words(punctuation), Operator), - (u"[A-Za-z_\u03b1-\u03ba\u03bc-\u03fb\u1f00-\u1ffe\u2100-\u214f]" - u"[A-Za-z_'\u03b1-\u03ba\u03bc-\u03fb\u1f00-\u1ffe\u2070-\u2079" - u"\u207f-\u2089\u2090-\u209c\u2100-\u214f0-9]*", Name), + (words(( + 'import', 'renaming', 'hiding', + 'namespace', + 'local', + 'private', 'protected', 'section', + 'include', 'omit', 'section', + 'protected', 'export', + 'open', + 'attribute', + ), prefix=r'\b', suffix=r'\b'), Keyword.Namespace), + (words(( + 'lemma', 'theorem', 'def', 'definition', 'example', + 'axiom', 'axioms', 'constant', 'constants', + 'universe', 'universes', + 'inductive', 'coinductive', 'structure', 'extends', + 'class', 'instance', + 'abbreviation', + + 'noncomputable theory', + + 'noncomputable', 'mutual', 'meta', + + 'attribute', + + 'parameter', 'parameters', + 'variable', 'variables', + + 'reserve', 'precedence', + 'postfix', 'prefix', 'notation', 'infix', 'infixl', 'infixr', + + 'begin', 'by', 'end', + + 'set_option', + 'run_cmd', + ), prefix=r'\b', suffix=r'\b'), Keyword.Declaration), + (r'@\[[^\]]*\]', Keyword.Declaration), + (words(( + 'forall', 'fun', 'Pi', 'from', 'have', 'show', 'assume', 'suffices', + 'let', 'if', 'else', 'then', 'in', 'with', 'calc', 'match', + 'do' + ), prefix=r'\b', suffix=r'\b'), Keyword), + (words(('sorry', 'admit'), prefix=r'\b', suffix=r'\b'), Generic.Error), + (words(('Sort', 'Prop', 'Type'), prefix=r'\b', suffix=r'\b'), Keyword.Type), + (words(( + '#eval', '#check', '#reduce', '#exit', + '#print', '#help', + ), suffix=r'\b'), Keyword), + (words(( + '(', ')', ':', '{', '}', '[', ']', '⟨', '⟩', '‹', '›', '⦃', '⦄', ':=', ',', + )), Operator), + (r'[A-Za-z_\u03b1-\u03ba\u03bc-\u03fb\u1f00-\u1ffe\u2100-\u214f]' + r'[.A-Za-z_\'\u03b1-\u03ba\u03bc-\u03fb\u1f00-\u1ffe\u2070-\u2079' + r'\u207f-\u2089\u2090-\u209c\u2100-\u214f0-9]*', Name), + (r'0x[A-Za-z0-9]+', Number.Integer), + (r'0b[01]+', Number.Integer), (r'\d+', Number.Integer), (r'"', String.Double, 'string'), - (r'[~?][a-z][\w\']*:', Name.Variable) + (r"'(?:(\\[\\\"'nt])|(\\x[0-9a-fA-F]{2})|(\\u[0-9a-fA-F]{4})|.)'", String.Char), + (r'[~?][a-z][\w\']*:', Name.Variable), + (r'\S', Name.Builtin.Pseudo), ], 'comment': [ - # Multiline Comments (r'[^/-]', Comment.Multiline), (r'/-', Comment.Multiline, '#push'), (r'-/', Comment.Multiline, '#pop'), (r'[/-]', Comment.Multiline) ], + 'docstring': [ + (r'[^/-]', String.Doc), + (r'-/', String.Doc, '#pop'), + (r'[/-]', String.Doc) + ], 'string': [ (r'[^\\"]+', String.Double), - (r'\\[n"\\]', String.Escape), + (r"(?:(\\[\\\"'nt])|(\\x[0-9a-fA-F]{2})|(\\u[0-9a-fA-F]{4}))", String.Escape), ('"', String.Double, '#pop'), ], } diff --git a/src/typecode/_vendor/pygments/lexers/tnt.py b/src/typecode/_vendor/pygments/lexers/tnt.py new file mode 100644 index 0000000..1ea6ee5 --- /dev/null +++ b/src/typecode/_vendor/pygments/lexers/tnt.py @@ -0,0 +1,263 @@ +# -*- coding: utf-8 -*- +""" + pygments.lexers.tnt + ~~~~~~~~~~~~~~~~~~~ + + Lexer for Typographic Number Theory. + + :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS. + :license: BSD, see LICENSE for details. +""" + +import re + +from typecode._vendor.pygments.lexer import Lexer +from typecode._vendor.pygments.token import Text, Comment, Operator, Keyword, Name, Number, \ + Punctuation, Error + +__all__ = ['TNTLexer'] + + +class TNTLexer(Lexer): + """ + Lexer for Typographic Number Theory, as described in the book + Gödel, Escher, Bach, by Douglas R. Hofstadter, + or as summarized here: + https://github.com/Kenny2github/language-tnt/blob/master/README.md#summary-of-tnt + + .. versionadded:: 2.7 + """ + + name = 'Typographic Number Theory' + aliases = ['tnt'] + filenames = ['*.tnt'] + + cur = [] + + LOGIC = set('⊃→]&∧^|∨Vv') + OPERATORS = set('+.⋅*') + VARIABLES = set('abcde') + PRIMES = set("'′") + NEGATORS = set('~!') + QUANTIFIERS = set('AE∀∃') + NUMBERS = set('0123456789') + WHITESPACE = set('\t \v\n') + + RULES = re.compile('''(?xi) + joining | separation | double-tilde | fantasy\\ rule + | carry[- ]over(?:\\ of)?(?:\\ line)?\\ ([0-9]+) | detachment + | contrapositive | De\\ Morgan | switcheroo + | specification | generalization | interchange + | existence | symmetry | transitivity + | add\\ S | drop\\ S | induction + | axiom\\ ([1-5]) | premise | push | pop + ''') + LINENOS = re.compile(r'(?:[0-9]+)(?:(?:, ?|,? and )(?:[0-9]+))*') + COMMENT = re.compile(r'\[[^\n\]]+\]') + + def __init__(self, *args, **kwargs): + Lexer.__init__(self, *args, **kwargs) + self.cur = [] + + def whitespace(self, start, text, required=False): + """Tokenize whitespace.""" + end = start + try: + while text[end] in self.WHITESPACE: + end += 1 + except IndexError: + end = len(text) + if required: + assert end != start + if end != start: + self.cur.append((start, Text, text[start:end])) + return end + + def variable(self, start, text): + """Tokenize a variable.""" + assert text[start] in self.VARIABLES + end = start+1 + while text[end] in self.PRIMES: + end += 1 + self.cur.append((start, Name.Variable, text[start:end])) + return end + + def term(self, start, text): + """Tokenize a term.""" + if text[start] == 'S': # S...S(...) or S...0 + end = start+1 + while text[end] == 'S': + end += 1 + self.cur.append((start, Number.Integer, text[start:end])) + return self.term(end, text) + if text[start] == '0': # the singleton 0 + self.cur.append((start, Number.Integer, text[start])) + return start+1 + if text[start] in self.VARIABLES: # a''... + return self.variable(start, text) + if text[start] == '(': # (...+...) + self.cur.append((start, Punctuation, text[start])) + start = self.term(start+1, text) + assert text[start] in self.OPERATORS + self.cur.append((start, Operator, text[start])) + start = self.term(start+1, text) + assert text[start] == ')' + self.cur.append((start, Punctuation, text[start])) + return start+1 + raise AssertionError # no matches + + def formula(self, start, text): + """Tokenize a formula.""" + if text[start] in self.NEGATORS: # ~<...> + end = start+1 + while text[end] in self.NEGATORS: + end += 1 + self.cur.append((start, Operator, text[start:end])) + return self.formula(end, text) + if text[start] in self.QUANTIFIERS: # Aa:<...> + self.cur.append((start, Keyword.Declaration, text[start])) + start = self.variable(start+1, text) + assert text[start] == ':' + self.cur.append((start, Punctuation, text[start])) + return self.formula(start+1, text) + if text[start] == '<': # <...&...> + self.cur.append((start, Punctuation, text[start])) + start = self.formula(start+1, text) + assert text[start] in self.LOGIC + self.cur.append((start, Operator, text[start])) + start = self.formula(start+1, text) + assert text[start] == '>' + self.cur.append((start, Punctuation, text[start])) + return start+1 + # ...=... + start = self.term(start, text) + assert text[start] == '=' + self.cur.append((start, Operator, text[start])) + start = self.term(start+1, text) + return start + + def rule(self, start, text): + """Tokenize a rule.""" + match = self.RULES.match(text, start) + assert match is not None + groups = sorted(match.regs[1:]) # exclude whole match + for group in groups: + if group[0] >= 0: # this group matched + self.cur.append((start, Keyword, text[start:group[0]])) + self.cur.append((group[0], Number.Integer, + text[group[0]:group[1]])) + if group[1] != match.end(): + self.cur.append((group[1], Keyword, + text[group[1]:match.end()])) + break + else: + self.cur.append((start, Keyword, text[start:match.end()])) + return match.end() + + def lineno(self, start, text): + """Tokenize a line referral.""" + end = start + while text[end] not in self.NUMBERS: + end += 1 + self.cur.append((start, Punctuation, text[start])) + self.cur.append((start+1, Text, text[start+1:end])) + start = end + match = self.LINENOS.match(text, start) + assert match is not None + assert text[match.end()] == ')' + self.cur.append((match.start(), Number.Integer, match.group(0))) + self.cur.append((match.end(), Punctuation, text[match.end()])) + return match.end() + 1 + + def error_till_line_end(self, start, text): + """Mark everything from ``start`` to the end of the line as Error.""" + end = start + try: + while text[end] != '\n': # there's whitespace in rules + end += 1 + except IndexError: + end = len(text) + if end != start: + self.cur.append((start, Error, text[start:end])) + end = self.whitespace(end, text) + return end + + def get_tokens_unprocessed(self, text): + """Returns a list of TNT tokens.""" + self.cur = [] + start = end = self.whitespace(0, text) + while start <= end < len(text): + try: + # try line number + while text[end] in self.NUMBERS: + end += 1 + if end != start: # actual number present + self.cur.append((start, Number.Integer, text[start:end])) + # whitespace is required after a line number + orig = len(self.cur) + try: + start = end = self.whitespace(end, text, True) + except AssertionError: + del self.cur[orig:] + start = end = self.error_till_line_end(end, text) + continue + # at this point it could be a comment + match = self.COMMENT.match(text, start) + if match is not None: + self.cur.append((start, Comment, text[start:match.end()])) + start = end = match.end() + # anything after the closing bracket is invalid + start = end = self.error_till_line_end(start, text) + # do not attempt to process the rest + continue + del match + if text[start] in '[]': # fantasy push or pop + self.cur.append((start, Keyword, text[start])) + start += 1 + end += 1 + else: + # one formula, possibly containing subformulae + orig = len(self.cur) + try: + start = end = self.formula(start, text) + except AssertionError: # not well-formed + del self.cur[orig:] + while text[end] not in self.WHITESPACE: + end += 1 + self.cur.append((start, Error, text[start:end])) + start = end + # skip whitespace after formula + orig = len(self.cur) + try: + start = end = self.whitespace(end, text, True) + except AssertionError: + del self.cur[orig:] + start = end = self.error_till_line_end(start, text) + continue + # rule proving this formula a theorem + orig = len(self.cur) + try: + start = end = self.rule(start, text) + except AssertionError: + del self.cur[orig:] + start = end = self.error_till_line_end(start, text) + continue + # skip whitespace after rule + start = end = self.whitespace(end, text) + # line marker + if text[start] == '(': + orig = len(self.cur) + try: + start = end = self.lineno(start, text) + except AssertionError: + del self.cur[orig:] + start = end = self.error_till_line_end(start, text) + continue + start = end = self.whitespace(start, text) + except IndexError: + try: + del self.cur[orig:] + except NameError: + pass # if orig was never defined, fine + self.error_till_line_end(start, text) + return self.cur diff --git a/src/typecode/_vendor/pygments/lexers/trafficscript.py b/src/typecode/_vendor/pygments/lexers/trafficscript.py index c02692b..c6efedf 100644 --- a/src/typecode/_vendor/pygments/lexers/trafficscript.py +++ b/src/typecode/_vendor/pygments/lexers/trafficscript.py @@ -5,7 +5,7 @@ Lexer for RiverBed's TrafficScript (RTS) language. - :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ diff --git a/src/typecode/_vendor/pygments/lexers/typoscript.py b/src/typecode/_vendor/pygments/lexers/typoscript.py index a6c2c4d..1917716 100644 --- a/src/typecode/_vendor/pygments/lexers/typoscript.py +++ b/src/typecode/_vendor/pygments/lexers/typoscript.py @@ -14,7 +14,7 @@ `TypoScriptHtmlDataLexer` Lexer that highlights markers, constants and registers within html tags. - :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ @@ -113,9 +113,6 @@ class TypoScriptLexer(RegexLexer): flags = re.DOTALL | re.MULTILINE - # Slightly higher than TypeScript (which is 0). - priority = 0.0 - tokens = { 'root': [ include('comment'), diff --git a/src/typecode/_vendor/pygments/lexers/unicon.py b/src/typecode/_vendor/pygments/lexers/unicon.py index 5ea64c5..949f532 100644 --- a/src/typecode/_vendor/pygments/lexers/unicon.py +++ b/src/typecode/_vendor/pygments/lexers/unicon.py @@ -5,7 +5,7 @@ Lexers for the Icon and Unicon languages, including ucode VM. - :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ @@ -129,17 +129,15 @@ class UniconLexer(RegexLexer): 'WSync'), prefix=r'\b', suffix=r'\b'), Name.Function), include('numbers'), - (r'<@|<<@|>@|>>@|\.>|\->', Operator), - (r'\*\*|\+\+|\-\-|\.|\=|\~\=|<\=|>\=|\=\=|\~\=\=|<<|<<\=|>>|>>\=', Operator), - (r':\=|:\=:|\->|<\->|\+:\=|\|', Operator), - (r'\=\=\=|\~\=\=\=', Operator), + (r'<@|<<@|>@|>>@|\.>|->|===|~===|\*\*|\+\+|--|\.|~==|~=|<=|>=|==|' + r'=|<<=|<<|>>=|>>|:=:|:=|->|<->|\+:=|\|', Operator), (r'"(?:[^\\"]|\\.)*"', String), (r"'(?:[^\\']|\\.)*'", String.Character), (r'[*<>+=/&!?@~\\-]', Operator), (r'\^', Operator), (r'(\w+)(\s*|[(,])', bygroups(Name, using(this))), - (r"([\[\]])", Punctuation), - (r"(<>|=>|[()|:;,.'`]|[{}]|[%]|[&?])", Punctuation), + (r"[\[\]]", Punctuation), + (r"<>|=>|[()|:;,.'`{}%&?]", Punctuation), (r'\n+', Text), ], 'numbers': [ @@ -272,15 +270,14 @@ class IconLexer(RegexLexer): 'WSync'), prefix=r'\b', suffix=r'\b'), Name.Function), include('numbers'), - (r'\*\*|\+\+|\-\-|\.|\=|\~\=|<\=|>\=|\=\=|\~\=\=|<<|<<\=|>>|>>\=', Operator), - (r':\=|:\=:|<\-|<\->|\+:\=|\||\|\|', Operator), - (r'\=\=\=|\~\=\=\=', Operator), + (r'===|~===|\*\*|\+\+|--|\.|==|~==|<=|>=|=|~=|<<=|<<|>>=|>>|' + r':=:|:=|<->|<-|\+:=|\|\||\|', Operator), (r'"(?:[^\\"]|\\.)*"', String), (r"'(?:[^\\']|\\.)*'", String.Character), (r'[*<>+=/&!?@~\\-]', Operator), (r'(\w+)(\s*|[(,])', bygroups(Name, using(this))), - (r"([\[\]])", Punctuation), - (r"(<>|=>|[()|:;,.'`]|[{}]|[%^]|[&?])", Punctuation), + (r"[\[\]]", Punctuation), + (r"<>|=>|[()|:;,.'`{}%\^&?]", Punctuation), (r'\n+', Text), ], 'numbers': [ @@ -388,3 +385,28 @@ class UcodeLexer(RegexLexer): (r'[\w-]+', Text), ], } + + def analyse_text(text): + """endsuspend and endrepeat are unique to this language, and + \\self, /self doesn't seem to get used anywhere else either.""" + result = 0 + + if 'endsuspend' in text: + result += 0.1 + + if 'endrepeat' in text: + result += 0.1 + + if ':=' in text: + result += 0.01 + + if 'procedure' in text and 'end' in text: + result += 0.01 + + # This seems quite unique to unicon -- doesn't appear in any other + # example source we have (A quick search reveals that \SELF appears in + # Perl/Raku code) + if r'\self' in text and r'/self' in text: + result += 0.5 + + return result diff --git a/src/typecode/_vendor/pygments/lexers/urbi.py b/src/typecode/_vendor/pygments/lexers/urbi.py index 47e0f0b..fe802ba 100644 --- a/src/typecode/_vendor/pygments/lexers/urbi.py +++ b/src/typecode/_vendor/pygments/lexers/urbi.py @@ -5,7 +5,7 @@ Lexers for UrbiScript language. - :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ @@ -117,11 +117,11 @@ def blob_callback(lexer, match, ctx): ], 'string.double': [ (r'((?:\\\\|\\"|[^"])*?)(\\B\((\d+)\)\()', blob_callback), - (r'(\\\\|\\"|[^"])*?"', String.Double, '#pop'), + (r'(\\\\|\\[^\\]|[^"\\])*?"', String.Double, '#pop'), ], 'string.single': [ (r"((?:\\\\|\\'|[^'])*?)(\\B\((\d+)\)\()", blob_callback), - (r"(\\\\|\\'|[^'])*?'", String.Single, '#pop'), + (r"(\\\\|\\[^\\]|[^'\\])*?'", String.Single, '#pop'), ], # from http://pygments.org/docs/lexerdevelopment/#changing-states 'comment': [ @@ -131,3 +131,16 @@ def blob_callback(lexer, match, ctx): (r'[*/]', Comment.Multiline), ] } + + def analyse_text(text): + """This is fairly similar to C and others, but freezeif and + waituntil are unique keywords.""" + result = 0 + + if 'freezeif' in text: + result += 0.05 + + if 'waituntil' in text: + result += 0.05 + + return result diff --git a/src/typecode/_vendor/pygments/lexers/usd.py b/src/typecode/_vendor/pygments/lexers/usd.py new file mode 100644 index 0000000..cc76623 --- /dev/null +++ b/src/typecode/_vendor/pygments/lexers/usd.py @@ -0,0 +1,90 @@ +# -*- coding: utf-8 -*- +""" + pygments.lexers.usd + ~~~~~~~~~~~~~~~~~~~ + + The module that parses Pixar's Universal Scene Description file format. + + :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS. + :license: BSD, see LICENSE for details. +""" + +from typecode._vendor.pygments.lexer import RegexLexer, bygroups +from typecode._vendor.pygments.lexer import words as words_ +from typecode._vendor.pygments.lexers._usd_builtins import COMMON_ATTRIBUTES, KEYWORDS, \ + OPERATORS, SPECIAL_NAMES, TYPES +from typecode._vendor.pygments.token import Comment, Keyword, Name, Number, Operator, \ + Punctuation, String, Text, Whitespace + +__all__ = ["UsdLexer"] + + +def _keywords(words, type_): + return [(words_(words, prefix=r"\b", suffix=r"\b"), type_)] + + +_TYPE = r"(\w+(?:\[\])?)" +_BASE_ATTRIBUTE = r"(\w+(?:\:\w+)*)(?:(\.)(timeSamples))?" +_WHITESPACE = r"([ \t]+)" + + +class UsdLexer(RegexLexer): + """ + A lexer that parses Pixar's Universal Scene Description file format. + + .. versionadded:: 2.6 + """ + + name = "USD" + aliases = ["usd", "usda"] + filenames = ["*.usd", "*.usda"] + + tokens = { + "root": [ + (r"(custom){_WHITESPACE}(uniform)(\s+){}(\s+){}(\s*)(=)".format( + _TYPE, _BASE_ATTRIBUTE, _WHITESPACE=_WHITESPACE), + bygroups(Keyword.Token, Whitespace, Keyword.Token, Whitespace, + Keyword.Type, Whitespace, Name.Attribute, Text, + Name.Keyword.Tokens, Whitespace, Operator)), + (r"(custom){_WHITESPACE}{}(\s+){}(\s*)(=)".format( + _TYPE, _BASE_ATTRIBUTE, _WHITESPACE=_WHITESPACE), + bygroups(Keyword.Token, Whitespace, Keyword.Type, Whitespace, + Name.Attribute, Text, Name.Keyword.Tokens, Whitespace, + Operator)), + (r"(uniform){_WHITESPACE}{}(\s+){}(\s*)(=)".format( + _TYPE, _BASE_ATTRIBUTE, _WHITESPACE=_WHITESPACE), + bygroups(Keyword.Token, Whitespace, Keyword.Type, Whitespace, + Name.Attribute, Text, Name.Keyword.Tokens, Whitespace, + Operator)), + (r"{}{_WHITESPACE}{}(\s*)(=)".format( + _TYPE, _BASE_ATTRIBUTE, _WHITESPACE=_WHITESPACE), + bygroups(Keyword.Type, Whitespace, Name.Attribute, Text, + Name.Keyword.Tokens, Whitespace, Operator)), + ] + + _keywords(KEYWORDS, Keyword.Tokens) + + _keywords(SPECIAL_NAMES, Name.Builtins) + + _keywords(COMMON_ATTRIBUTES, Name.Attribute) + + [(r"\b\w+:[\w:]+\b", Name.Attribute)] + + _keywords(OPERATORS, Operator) + # more attributes + [(type_ + r"\[\]", Keyword.Type) for type_ in TYPES] + + _keywords(TYPES, Keyword.Type) + + [ + (r"[(){}\[\]]", Punctuation), + ("#.*?$", Comment.Single), + (",", Punctuation), + (";", Punctuation), # ";"s are allowed to combine separate metadata lines + ("=", Operator), + (r"[-]*([0-9]*[.])?[0-9]+(?:e[+-]*\d+)?", Number), + (r"'''(?:.|\n)*?'''", String), + (r'"""(?:.|\n)*?"""', String), + (r"'.*?'", String), + (r'".*?"', String), + (r"<(\.\./)*([\w/]+|[\w/]+\.\w+[\w:]*)>", Name.Namespace), + (r"@.*?@", String.Interpol), + (r'\(.*"[.\\n]*".*\)', String.Doc), + (r"\A#usda .+$", Comment.Hashbang), + (r"\s+", Whitespace), + (r"\w+", Text), + (r"[_:.]+", Punctuation), + ], + } diff --git a/src/typecode/_vendor/pygments/lexers/varnish.py b/src/typecode/_vendor/pygments/lexers/varnish.py index 3c85397..a25b19c 100644 --- a/src/typecode/_vendor/pygments/lexers/varnish.py +++ b/src/typecode/_vendor/pygments/lexers/varnish.py @@ -5,7 +5,7 @@ Lexers for Varnish configuration - :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ @@ -61,7 +61,7 @@ def analyse_text(text): bygroups(Name.Attribute, Operator, Name.Variable.Global, Punctuation)), (r'(\.probe)(\s*=\s*)(\{)', bygroups(Name.Attribute, Operator, Punctuation), 'probe'), - (r'(\.\w+\b)(\s*=\s*)([^;]*)(\s*;)', + (r'(\.\w+\b)(\s*=\s*)([^;\s]*)(\s*;)', bygroups(Name.Attribute, Operator, using(this), Punctuation)), (r'\{', Punctuation, '#push'), (r'\}', Punctuation, '#pop'), diff --git a/src/typecode/_vendor/pygments/lexers/verification.py b/src/typecode/_vendor/pygments/lexers/verification.py index 6f25d69..18f17d7 100644 --- a/src/typecode/_vendor/pygments/lexers/verification.py +++ b/src/typecode/_vendor/pygments/lexers/verification.py @@ -5,13 +5,13 @@ Lexer for Intermediate Verification Languages (IVLs). - :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ from typecode._vendor.pygments.lexer import RegexLexer, include, words from typecode._vendor.pygments.token import Comment, Operator, Keyword, Name, Number, \ - Punctuation, Whitespace + Punctuation, Text, Generic __all__ = ['BoogieLexer', 'SilverLexer'] @@ -29,8 +29,9 @@ class BoogieLexer(RegexLexer): tokens = { 'root': [ # Whitespace and Comments - (r'\n', Whitespace), - (r'\s+', Whitespace), + (r'\n', Text), + (r'\s+', Text), + (r'\\\n', Text), # line continuation (r'//[/!](.*?)\n', Comment.Doc), (r'//(.*?)\n', Comment.Single), (r'/\*', Comment.Multiline, 'comment'), @@ -45,6 +46,7 @@ class BoogieLexer(RegexLexer): (words(('bool', 'int', 'ref'), suffix=r'\b'), Keyword.Type), include('numbers'), (r"(>=|<=|:=|!=|==>|&&|\|\||[+/\-=>*<\[\]])", Operator), + (r'\{.*?\}', Generic.Emph), #triggers (r"([{}():;,.])", Punctuation), # Identifier (r'[a-zA-Z_]\w*', Name), @@ -74,8 +76,9 @@ class SilverLexer(RegexLexer): tokens = { 'root': [ # Whitespace and Comments - (r'\n', Whitespace), - (r'\s+', Whitespace), + (r'\n', Text), + (r'\s+', Text), + (r'\\\n', Text), # line continuation (r'//[/!](.*?)\n', Comment.Doc), (r'//(.*?)\n', Comment.Single), (r'/\*', Comment.Multiline, 'comment'), @@ -83,18 +86,18 @@ class SilverLexer(RegexLexer): (words(( 'result', 'true', 'false', 'null', 'method', 'function', 'predicate', 'program', 'domain', 'axiom', 'var', 'returns', - 'field', 'define', 'requires', 'ensures', 'invariant', - 'fold', 'unfold', 'inhale', 'exhale', 'new', 'assert', + 'field', 'define', 'fold', 'unfold', 'inhale', 'exhale', 'new', 'assert', 'assume', 'goto', 'while', 'if', 'elseif', 'else', 'fresh', 'constraining', 'Seq', 'Set', 'Multiset', 'union', 'intersection', 'setminus', 'subset', 'unfolding', 'in', 'old', 'forall', 'exists', 'acc', 'wildcard', 'write', 'none', 'epsilon', 'perm', 'unique', 'apply', 'package', 'folding', 'label', 'forperm'), suffix=r'\b'), Keyword), - (words(('Int', 'Perm', 'Bool', 'Ref'), suffix=r'\b'), Keyword.Type), + (words(('requires', 'ensures', 'invariant'), suffix=r'\b'), Name.Decorator), + (words(('Int', 'Perm', 'Bool', 'Ref', 'Rational'), suffix=r'\b'), Keyword.Type), include('numbers'), - (r'[!%&*+=|?:<>/\-\[\]]', Operator), + (r'\{.*?\}', Generic.Emph), #triggers (r'([{}():;,.])', Punctuation), # Identifier (r'[\w$]\w*', Name), diff --git a/src/typecode/_vendor/pygments/lexers/web.py b/src/typecode/_vendor/pygments/lexers/web.py index fc7b202..4ef1181 100644 --- a/src/typecode/_vendor/pygments/lexers/web.py +++ b/src/typecode/_vendor/pygments/lexers/web.py @@ -5,7 +5,7 @@ Just export previously exported lexers. - :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ diff --git a/src/typecode/_vendor/pygments/lexers/webidl.py b/src/typecode/_vendor/pygments/lexers/webidl.py new file mode 100644 index 0000000..c4ff66a --- /dev/null +++ b/src/typecode/_vendor/pygments/lexers/webidl.py @@ -0,0 +1,299 @@ +# -*- coding: utf-8 -*- +""" + pygments.lexers.webidl + ~~~~~~~~~~~~~~~~~~~~~~ + + Lexers for Web IDL, including some extensions. + + :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS. + :license: BSD, see LICENSE for details. +""" + +from typecode._vendor.pygments.lexer import RegexLexer, default, include, words +from typecode._vendor.pygments.token import Comment, Keyword, Name, Number, Punctuation, \ + String, Text + +__all__ = ['WebIDLLexer'] + +_builtin_types = ( + # primitive types + 'byte', 'octet', 'boolean', + r'(?:unsigned\s+)?(?:short|long(?:\s+long)?)', + r'(?:unrestricted\s+)?(?:float|double)', + # string types + 'DOMString', 'ByteString', 'USVString', + # exception types + 'Error', 'DOMException', + # typed array types + 'Uint8Array', 'Uint16Array', 'Uint32Array', 'Uint8ClampedArray', + 'Float32Array', 'Float64Array', + # buffer source types + 'ArrayBuffer', 'DataView', 'Int8Array', 'Int16Array', 'Int32Array', + # other + 'any', 'void', 'object', 'RegExp', +) +_identifier = r'_?[A-Za-z][a-zA-Z0-9_-]*' +_keyword_suffix = r'(?![\w-])' +_string = r'"[^"]*"' + + +class WebIDLLexer(RegexLexer): + """ + For Web IDL. + + .. versionadded:: 2.6 + """ + + name = 'Web IDL' + aliases = ['webidl'] + filenames = ['*.webidl'] + + tokens = { + 'common': [ + (r'\s+', Text), + (r'(?s)/\*.*?\*/', Comment.Multiline), + (r'//.*', Comment.Single), + (r'^#.*', Comment.Preproc), + ], + 'root': [ + include('common'), + (r'\[', Punctuation, 'extended_attributes'), + (r'partial' + _keyword_suffix, Keyword), + (r'typedef' + _keyword_suffix, Keyword, ('typedef', 'type')), + (r'interface' + _keyword_suffix, Keyword, 'interface_rest'), + (r'enum' + _keyword_suffix, Keyword, 'enum_rest'), + (r'callback' + _keyword_suffix, Keyword, 'callback_rest'), + (r'dictionary' + _keyword_suffix, Keyword, 'dictionary_rest'), + (r'namespace' + _keyword_suffix, Keyword, 'namespace_rest'), + (_identifier, Name.Class, 'implements_rest'), + ], + 'extended_attributes': [ + include('common'), + (r',', Punctuation), + (_identifier, Name.Decorator), + (r'=', Punctuation, 'extended_attribute_rest'), + (r'\(', Punctuation, 'argument_list'), + (r'\]', Punctuation, '#pop'), + ], + 'extended_attribute_rest': [ + include('common'), + (_identifier, Name, 'extended_attribute_named_rest'), + (_string, String), + (r'\(', Punctuation, 'identifier_list'), + default('#pop'), + ], + 'extended_attribute_named_rest': [ + include('common'), + (r'\(', Punctuation, 'argument_list'), + default('#pop'), + ], + 'argument_list': [ + include('common'), + (r'\)', Punctuation, '#pop'), + default('argument'), + ], + 'argument': [ + include('common'), + (r'optional' + _keyword_suffix, Keyword), + (r'\[', Punctuation, 'extended_attributes'), + (r',', Punctuation, '#pop'), + (r'\)', Punctuation, '#pop:2'), + default(('argument_rest', 'type')) + ], + 'argument_rest': [ + include('common'), + (_identifier, Name.Variable), + (r'\.\.\.', Punctuation), + (r'=', Punctuation, 'default_value'), + default('#pop'), + ], + 'identifier_list': [ + include('common'), + (_identifier, Name.Class), + (r',', Punctuation), + (r'\)', Punctuation, '#pop'), + ], + 'type': [ + include('common'), + (r'(?:' + r'|'.join(_builtin_types) + r')' + _keyword_suffix, + Keyword.Type, 'type_null'), + (words(('sequence', 'Promise', 'FrozenArray'), + suffix=_keyword_suffix), Keyword.Type, 'type_identifier'), + (_identifier, Name.Class, 'type_identifier'), + (r'\(', Punctuation, 'union_type'), + ], + 'union_type': [ + include('common'), + (r'or' + _keyword_suffix, Keyword), + (r'\)', Punctuation, ('#pop', 'type_null')), + default('type'), + ], + 'type_identifier': [ + (r'<', Punctuation, 'type_list'), + default(('#pop', 'type_null')) + ], + 'type_null': [ + (r'\?', Punctuation), + default('#pop:2'), + ], + 'default_value': [ + include('common'), + include('const_value'), + (_string, String, '#pop'), + (r'\[\s*\]', Punctuation, '#pop'), + ], + 'const_value': [ + include('common'), + (words(('true', 'false', '-Infinity', 'Infinity', 'NaN', 'null'), + suffix=_keyword_suffix), Keyword.Constant, '#pop'), + (r'-?(?:(?:[0-9]+\.[0-9]*|[0-9]*\.[0-9]+)(?:[Ee][+-]?[0-9]+)?' + + r'|[0-9]+[Ee][+-]?[0-9]+)', Number.Float, '#pop'), + (r'-?[1-9][0-9]*', Number.Integer, '#pop'), + (r'-?0[Xx][0-9A-Fa-f]+', Number.Hex, '#pop'), + (r'-?0[0-7]*', Number.Oct, '#pop'), + ], + 'typedef': [ + include('common'), + (_identifier, Name.Class), + (r';', Punctuation, '#pop'), + ], + 'namespace_rest': [ + include('common'), + (_identifier, Name.Namespace), + (r'\{', Punctuation, 'namespace_body'), + (r';', Punctuation, '#pop'), + ], + 'namespace_body': [ + include('common'), + (r'\[', Punctuation, 'extended_attributes'), + (r'readonly' + _keyword_suffix, Keyword), + (r'attribute' + _keyword_suffix, + Keyword, ('attribute_rest', 'type')), + (r'const' + _keyword_suffix, Keyword, ('const_rest', 'type')), + (r'\}', Punctuation, '#pop'), + default(('operation_rest', 'type')), + ], + 'interface_rest': [ + include('common'), + (_identifier, Name.Class), + (r':', Punctuation), + (r'\{', Punctuation, 'interface_body'), + (r';', Punctuation, '#pop'), + ], + 'interface_body': [ + (words(('iterable', 'maplike', 'setlike'), suffix=_keyword_suffix), + Keyword, 'iterable_maplike_setlike_rest'), + (words(('setter', 'getter', 'creator', 'deleter', 'legacycaller', + 'inherit', 'static', 'stringifier', 'jsonifier'), + suffix=_keyword_suffix), Keyword), + (r'serializer' + _keyword_suffix, Keyword, 'serializer_rest'), + (r';', Punctuation), + include('namespace_body'), + ], + 'attribute_rest': [ + include('common'), + (_identifier, Name.Variable), + (r';', Punctuation, '#pop'), + ], + 'const_rest': [ + include('common'), + (_identifier, Name.Constant), + (r'=', Punctuation, 'const_value'), + (r';', Punctuation, '#pop'), + ], + 'operation_rest': [ + include('common'), + (r';', Punctuation, '#pop'), + default('operation'), + ], + 'operation': [ + include('common'), + (_identifier, Name.Function), + (r'\(', Punctuation, 'argument_list'), + (r';', Punctuation, '#pop:2'), + ], + 'iterable_maplike_setlike_rest': [ + include('common'), + (r'<', Punctuation, 'type_list'), + (r';', Punctuation, '#pop'), + ], + 'type_list': [ + include('common'), + (r',', Punctuation), + (r'>', Punctuation, '#pop'), + default('type'), + ], + 'serializer_rest': [ + include('common'), + (r'=', Punctuation, 'serialization_pattern'), + (r';', Punctuation, '#pop'), + default('operation'), + ], + 'serialization_pattern': [ + include('common'), + (_identifier, Name.Variable, '#pop'), + (r'\{', Punctuation, 'serialization_pattern_map'), + (r'\[', Punctuation, 'serialization_pattern_list'), + ], + 'serialization_pattern_map': [ + include('common'), + (words(('getter', 'inherit', 'attribute'), + suffix=_keyword_suffix), Keyword), + (r',', Punctuation), + (_identifier, Name.Variable), + (r'\}', Punctuation, '#pop:2'), + ], + 'serialization_pattern_list': [ + include('common'), + (words(('getter', 'attribute'), suffix=_keyword_suffix), Keyword), + (r',', Punctuation), + (_identifier, Name.Variable), + (r']', Punctuation, '#pop:2'), + ], + 'enum_rest': [ + include('common'), + (_identifier, Name.Class), + (r'\{', Punctuation, 'enum_body'), + (r';', Punctuation, '#pop'), + ], + 'enum_body': [ + include('common'), + (_string, String), + (r',', Punctuation), + (r'\}', Punctuation, '#pop'), + ], + 'callback_rest': [ + include('common'), + (r'interface' + _keyword_suffix, + Keyword, ('#pop', 'interface_rest')), + (_identifier, Name.Class), + (r'=', Punctuation, ('operation', 'type')), + (r';', Punctuation, '#pop'), + ], + 'dictionary_rest': [ + include('common'), + (_identifier, Name.Class), + (r':', Punctuation), + (r'\{', Punctuation, 'dictionary_body'), + (r';', Punctuation, '#pop'), + ], + 'dictionary_body': [ + include('common'), + (r'\[', Punctuation, 'extended_attributes'), + (r'required' + _keyword_suffix, Keyword), + (r'\}', Punctuation, '#pop'), + default(('dictionary_item', 'type')), + ], + 'dictionary_item': [ + include('common'), + (_identifier, Name.Variable), + (r'=', Punctuation, 'default_value'), + (r';', Punctuation, '#pop'), + ], + 'implements_rest': [ + include('common'), + (r'implements' + _keyword_suffix, Keyword), + (_identifier, Name.Class), + (r';', Punctuation, '#pop'), + ], + } diff --git a/src/typecode/_vendor/pygments/lexers/webmisc.py b/src/typecode/_vendor/pygments/lexers/webmisc.py index 46adda1..1271ce5 100644 --- a/src/typecode/_vendor/pygments/lexers/webmisc.py +++ b/src/typecode/_vendor/pygments/lexers/webmisc.py @@ -5,7 +5,7 @@ Lexers for misc. web stuff. - :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ @@ -15,7 +15,6 @@ default, using from typecode._vendor.pygments.token import Text, Comment, Operator, Keyword, Name, String, \ Number, Punctuation, Literal -from typecode._vendor.pygments.util import unirange from typecode._vendor.pygments.lexers.css import _indentation, _starts_block from typecode._vendor.pygments.lexers.html import HtmlLexer @@ -74,15 +73,15 @@ class XQueryLexer(ExtendedRegexLexer): # FIX UNICODE LATER # ncnamestartchar = ( - # ur"[A-Z]|_|[a-z]|[\u00C0-\u00D6]|[\u00D8-\u00F6]|[\u00F8-\u02FF]|" - # ur"[\u0370-\u037D]|[\u037F-\u1FFF]|[\u200C-\u200D]|[\u2070-\u218F]|" - # ur"[\u2C00-\u2FEF]|[\u3001-\uD7FF]|[\uF900-\uFDCF]|[\uFDF0-\uFFFD]|" - # ur"[\u10000-\uEFFFF]" + # r"[A-Z]|_|[a-z]|[\u00C0-\u00D6]|[\u00D8-\u00F6]|[\u00F8-\u02FF]|" + # r"[\u0370-\u037D]|[\u037F-\u1FFF]|[\u200C-\u200D]|[\u2070-\u218F]|" + # r"[\u2C00-\u2FEF]|[\u3001-\uD7FF]|[\uF900-\uFDCF]|[\uFDF0-\uFFFD]|" + # r"[\u10000-\uEFFFF]" # ) ncnamestartchar = r"(?:[A-Z]|_|[a-z])" # FIX UNICODE LATER - # ncnamechar = ncnamestartchar + (ur"|-|\.|[0-9]|\u00B7|[\u0300-\u036F]|" - # ur"[\u203F-\u2040]") + # ncnamechar = ncnamestartchar + (r"|-|\.|[0-9]|\u00B7|[\u0300-\u036F]|" + # r"[\u203F-\u2040]") ncnamechar = r"(?:" + ncnamestartchar + r"|-|\.|[0-9])" ncname = "(?:%s+%s*)" % (ncnamestartchar, ncnamechar) pitarget_namestartchar = r"(?:[A-KN-WYZ]|_|:|[a-kn-wyz])" @@ -99,14 +98,14 @@ class XQueryLexer(ExtendedRegexLexer): stringsingle = r"(?:'(?:" + entityref + r"|" + charref + r"|''|[^&'])*')" # FIX UNICODE LATER - # elementcontentchar = (ur'\t|\r|\n|[\u0020-\u0025]|[\u0028-\u003b]|' - # ur'[\u003d-\u007a]|\u007c|[\u007e-\u007F]') + # elementcontentchar = (r'\t|\r|\n|[\u0020-\u0025]|[\u0028-\u003b]|' + # r'[\u003d-\u007a]|\u007c|[\u007e-\u007F]') elementcontentchar = r'[A-Za-z]|\s|\d|[!"#$%()*+,\-./:;=?@\[\\\]^_\'`|~]' - # quotattrcontentchar = (ur'\t|\r|\n|[\u0020-\u0021]|[\u0023-\u0025]|' - # ur'[\u0027-\u003b]|[\u003d-\u007a]|\u007c|[\u007e-\u007F]') + # quotattrcontentchar = (r'\t|\r|\n|[\u0020-\u0021]|[\u0023-\u0025]|' + # r'[\u0027-\u003b]|[\u003d-\u007a]|\u007c|[\u007e-\u007F]') quotattrcontentchar = r'[A-Za-z]|\s|\d|[!#$%()*+,\-./:;=?@\[\\\]^_\'`|~]' - # aposattrcontentchar = (ur'\t|\r|\n|[\u0020-\u0025]|[\u0028-\u003b]|' - # ur'[\u003d-\u007a]|\u007c|[\u007e-\u007F]') + # aposattrcontentchar = (r'\t|\r|\n|[\u0020-\u0025]|[\u0028-\u003b]|' + # r'[\u003d-\u007a]|\u007c|[\u007e-\u007F]') aposattrcontentchar = r'[A-Za-z]|\s|\d|[!"#$%()*+,\-./:;=?@\[\\\]^_`|~]' # CHAR elements - fix the above elementcontentchar, quotattrcontentchar, @@ -129,7 +128,8 @@ def operator_root_callback(lexer, match, ctx): def popstate_tag_callback(lexer, match, ctx): yield match.start(), Name.Tag, match.group(1) - ctx.stack.append(lexer.xquery_parse_state.pop()) + if lexer.xquery_parse_state: + ctx.stack.append(lexer.xquery_parse_state.pop()) ctx.pos = match.end() def popstate_xmlcomment_callback(lexer, match, ctx): @@ -158,6 +158,9 @@ def popstate_callback(lexer, match, ctx): # state stack if len(lexer.xquery_parse_state) == 0: ctx.stack.pop() + if not ctx.stack: + # make sure we have at least the root state on invalid inputs + ctx.stack = ['root'] elif len(ctx.stack) > 1: ctx.stack.append(lexer.xquery_parse_state.pop()) else: @@ -515,8 +518,8 @@ def pushstate_operator_callback(lexer, match, ctx): 'xml_comment': [ (r'(-->)', popstate_xmlcomment_callback), (r'[^-]{1,2}', Literal), - (u'\\t|\\r|\\n|[\u0020-\uD7FF]|[\uE000-\uFFFD]|' + - unirange(0x10000, 0x10ffff), Literal), + (r'\t|\r|\n|[\u0020-\uD7FF]|[\uE000-\uFFFD]|[\U00010000-\U0010FFFF]', + Literal), ], 'processing_instruction': [ (r'\s+', Text, 'processing_instruction_content'), @@ -525,13 +528,13 @@ def pushstate_operator_callback(lexer, match, ctx): ], 'processing_instruction_content': [ (r'\?>', String.Doc, '#pop'), - (u'\\t|\\r|\\n|[\u0020-\uD7FF]|[\uE000-\uFFFD]|' + - unirange(0x10000, 0x10ffff), Literal), + (r'\t|\r|\n|[\u0020-\uD7FF]|[\uE000-\uFFFD]|[\U00010000-\U0010FFFF]', + Literal), ], 'cdata_section': [ (r']]>', String.Doc, '#pop'), - (u'\\t|\\r|\\n|[\u0020-\uD7FF]|[\uE000-\uFFFD]|' + - unirange(0x10000, 0x10ffff), Literal), + (r'\t|\r|\n|[\u0020-\uD7FF]|[\uE000-\uFFFD]|[\U00010000-\U0010FFFF]', + Literal), ], 'start_tag': [ include('whitespace'), @@ -600,8 +603,8 @@ def pushstate_operator_callback(lexer, match, ctx): ], 'pragmacontents': [ (r'#\)', Punctuation, 'operator'), - (u'\\t|\\r|\\n|[\u0020-\uD7FF]|[\uE000-\uFFFD]|' + - unirange(0x10000, 0x10ffff), Literal), + (r'\t|\r|\n|[\u0020-\uD7FF]|[\uE000-\uFFFD]|[\U00010000-\U0010FFFF]', + Literal), (r'(\s+)', Text), ], 'occurrenceindicator': [ @@ -855,8 +858,8 @@ class QmlLexer(RegexLexer): (r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float), (r'0x[0-9a-fA-F]+', Number.Hex), (r'[0-9]+', Number.Integer), - (r'"(\\\\|\\"|[^"])*"', String.Double), - (r"'(\\\\|\\'|[^'])*'", String.Single), + (r'"(\\\\|\\[^\\]|[^"\\])*"', String.Double), + (r"'(\\\\|\\[^\\]|[^'\\])*'", String.Single), ] } diff --git a/src/typecode/_vendor/pygments/lexers/whiley.py b/src/typecode/_vendor/pygments/lexers/whiley.py index 4d00658..98e7f6d 100644 --- a/src/typecode/_vendor/pygments/lexers/whiley.py +++ b/src/typecode/_vendor/pygments/lexers/whiley.py @@ -5,7 +5,7 @@ Lexers for the Whiley language. - :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ @@ -97,11 +97,11 @@ class WhileyLexer(RegexLexer): # operators and punctuation (r'[{}()\[\],.;]', Punctuation), - (u'[+\\-*/%&|<>^!~@=:?' + (r'[+\-*/%&|<>^!~@=:?' # unicode operators - u'\u2200\u2203\u2205\u2282\u2286\u2283\u2287' - u'\u222A\u2229\u2264\u2265\u2208\u2227\u2228' - u']', Operator), + r'\u2200\u2203\u2205\u2282\u2286\u2283\u2287' + r'\u222A\u2229\u2264\u2265\u2208\u2227\u2228' + r']', Operator), # identifier (r'[a-zA-Z_]\w*', Name), diff --git a/src/typecode/_vendor/pygments/lexers/x10.py b/src/typecode/_vendor/pygments/lexers/x10.py index 7b57e9f..31e149d 100644 --- a/src/typecode/_vendor/pygments/lexers/x10.py +++ b/src/typecode/_vendor/pygments/lexers/x10.py @@ -5,7 +5,7 @@ Lexers for the X10 programming language. - :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ @@ -62,7 +62,7 @@ class X10Lexer(RegexLexer): (r'\b(%s)\b' % '|'.join(types), Keyword.Type), (r'\b(%s)\b' % '|'.join(values), Keyword.Constant), (r'\b(%s)\b' % '|'.join(modifiers), Keyword.Declaration), - (r'"(\\\\|\\"|[^"])*"', String), + (r'"(\\\\|\\[^\\]|[^"\\])*"', String), (r"'\\.'|'[^\\]'|'\\u[0-9a-fA-F]{4}'", String.Char), (r'.', Text) ], diff --git a/src/typecode/_vendor/pygments/lexers/xorg.py b/src/typecode/_vendor/pygments/lexers/xorg.py index 486e6ef..e322777 100644 --- a/src/typecode/_vendor/pygments/lexers/xorg.py +++ b/src/typecode/_vendor/pygments/lexers/xorg.py @@ -5,7 +5,7 @@ Lexers for Xorg configs. - :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ @@ -29,7 +29,7 @@ class XorgLexer(RegexLexer): (r'((?:Sub)?Section)(\s+)("\w+")', bygroups(String.Escape, Text, String.Escape)), - (r'(End(|Sub)Section)', String.Escape), + (r'(End(?:Sub)?Section)', String.Escape), (r'(\w+)(\s+)([^\n#]+)', bygroups(Name.Builtin, Text, Name.Constant)), diff --git a/src/typecode/_vendor/pygments/lexers/yang.py b/src/typecode/_vendor/pygments/lexers/yang.py new file mode 100644 index 0000000..9fccf16 --- /dev/null +++ b/src/typecode/_vendor/pygments/lexers/yang.py @@ -0,0 +1,104 @@ +# -*- coding: utf-8 -*- +""" + pygments.lexers.yang + ~~~~~~~~~~~~~~~~~~~~ + + Lexer for the YANG 1.1 modeling language. See :rfc:`7950`. + + :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS. + :license: BSD, see LICENSE for details. +""" + +from typecode._vendor.pygments.lexer import (RegexLexer, bygroups, words) +from typecode._vendor.pygments.token import (Text, Token, Name, String, Comment, + Number) + +__all__ = ['YangLexer'] + +class YangLexer(RegexLexer): + """ + Lexer for `YANG `_, based on RFC7950 + + .. versionadded:: 2.7 + """ + name = 'YANG' + aliases = ['yang'] + filenames = ['*.yang'] + mimetypes = ['application/yang'] + + #Keywords from RFC7950 ; oriented at BNF style + TOP_STMTS_KEYWORDS = ("module", "submodule") + MODULE_HEADER_STMT_KEYWORDS = ("belongs-to", "namespace", "prefix", "yang-version") + META_STMT_KEYWORDS = ("contact", "description", "organization", + "reference", "revision") + LINKAGE_STMTS_KEYWORDS = ("import", "include", "revision-date") + BODY_STMT_KEYWORDS = ("action", "argument", "augment", "deviation", + "extension", "feature", "grouping", "identity", + "if-feature", "input", "notification", "output", + "rpc", "typedef") + DATA_DEF_STMT_KEYWORDS = ("anydata", "anyxml", "case", "choice", + "config", "container", "deviate", "leaf", + "leaf-list", "list", "must", "presence", + "refine", "uses", "when") + TYPE_STMT_KEYWORDS = ("base", "bit", "default", "enum", "error-app-tag", + "error-message", "fraction-digits", "length", + "max-elements", "min-elements", "modifier", + "ordered-by", "path", "pattern", "position", + "range", "require-instance", "status", "type", + "units", "value", "yin-element") + LIST_STMT_KEYWORDS = ("key", "mandatory", "unique") + + #RFC7950 other keywords + CONSTANTS_KEYWORDS = ("add", "current", "delete", "deprecated", "false", + "invert-match", "max", "min", "not-supported", + "obsolete", "replace", "true", "unbounded", "user") + + #RFC7950 Built-In Types + TYPES = ("binary", "bits", "boolean", "decimal64", "empty", "enumeration", + "identityref", "instance-identifier", "int16", "int32", "int64", + "int8", "leafref", "string", "uint16", "uint32", "uint64", + "uint8", "union") + + suffix_re_pattern = r'(?=[^\w\-:])' + + tokens = { + 'comments': [ + (r'[^*/]', Comment), + (r'/\*', Comment, '#push'), + (r'\*/', Comment, '#pop'), + (r'[*/]', Comment), + ], + "root": [ + (r'\s+', Text.Whitespace), + (r'[{};]+', Token.Punctuation), + (r'(?`_ source code. + + grammar: https://ziglang.org/documentation/master/#Grammar + """ + name = 'Zig' + aliases = ['zig'] + filenames = ['*.zig'] + mimetypes = ['text/zig'] + + type_keywords = ( + words(('bool', 'f16', 'f32', 'f64', 'f128', 'void', 'noreturn', 'type', + 'anyerror', 'promise', 'i0', 'u0', 'isize', 'usize', 'comptime_int', + 'comptime_float', 'c_short', 'c_ushort', 'c_int', 'c_uint', 'c_long', + 'c_ulong', 'c_longlong', 'c_ulonglong', 'c_longdouble', 'c_void' + 'i8', 'u8', 'i16', 'u16', 'i32', 'u32', 'i64', 'u64', 'i128', + 'u128'), suffix=r'\b'), + Keyword.Type) + + storage_keywords = ( + words(('const', 'var', 'extern', 'packed', 'export', 'pub', 'noalias', + 'inline', 'comptime', 'nakedcc', 'stdcallcc', 'volatile', 'allowzero', + 'align', 'linksection', 'threadlocal'), suffix=r'\b'), + Keyword.Reserved) + + structure_keywords = ( + words(('struct', 'enum', 'union', 'error'), suffix=r'\b'), + Keyword) + + statement_keywords = ( + words(('break', 'return', 'continue', 'asm', 'defer', 'errdefer', + 'unreachable', 'try', 'catch', 'async', 'await', 'suspend', + 'resume', 'cancel'), suffix=r'\b'), + Keyword) + + conditional_keywords = ( + words(('if', 'else', 'switch', 'and', 'or', 'orelse'), suffix=r'\b'), + Keyword) + + repeat_keywords = ( + words(('while', 'for'), suffix=r'\b'), + Keyword) + + other_keywords = ( + words(('fn', 'usingnamespace', 'test'), suffix=r'\b'), + Keyword) + + constant_keywords = ( + words(('true', 'false', 'null', 'undefined'), suffix=r'\b'), + Keyword.Constant) + + tokens = { + 'root': [ + (r'\n', Whitespace), + (r'\s+', Whitespace), + (r'//.*?\n', Comment.Single), + + # Keywords + statement_keywords, + storage_keywords, + structure_keywords, + repeat_keywords, + type_keywords, + constant_keywords, + conditional_keywords, + other_keywords, + + # Floats + (r'0x[0-9a-fA-F]+\.[0-9a-fA-F]+([pP][\-+]?[0-9a-fA-F]+)?', Number.Float), + (r'0x[0-9a-fA-F]+\.?[pP][\-+]?[0-9a-fA-F]+', Number.Float), + (r'[0-9]+\.[0-9]+([eE][-+]?[0-9]+)?', Number.Float), + (r'[0-9]+\.?[eE][-+]?[0-9]+', Number.Float), + + # Integers + (r'0b[01]+', Number.Bin), + (r'0o[0-7]+', Number.Oct), + (r'0x[0-9a-fA-F]+', Number.Hex), + (r'[0-9]+', Number.Integer), + + # Identifier + (r'@[a-zA-Z_]\w*', Name.Builtin), + (r'[a-zA-Z_]\w*', Name), + + # Characters + (r'\'\\\'\'', String.Escape), + (r'\'\\(x[a-fA-F0-9]{2}|u[a-fA-F0-9]{4}|U[a-fA-F0-9]{6}|[nr\\t\'"])\'', + String.Escape), + (r'\'[^\\\']\'', String), + + # Strings + (r'\\\\[^\n]*', String.Heredoc), + (r'c\\\\[^\n]*', String.Heredoc), + (r'c?"', String, 'string'), + + # Operators, Punctuation + (r'[+%=><|^!?/\-*&~:]', Operator), + (r'[{}()\[\],.;]', Punctuation) + ], + 'string': [ + (r'\\(x[a-fA-F0-9]{2}|u[a-fA-F0-9]{4}|U[a-fA-F0-9]{6}|[nr\\t\'"])', + String.Escape), + (r'[^\\"\n]+', String), + (r'"', String, '#pop') + ] + } diff --git a/src/typecode/_vendor/pygments/modeline.py b/src/typecode/_vendor/pygments/modeline.py index 31b2e7f..c667027 100644 --- a/src/typecode/_vendor/pygments/modeline.py +++ b/src/typecode/_vendor/pygments/modeline.py @@ -5,7 +5,7 @@ A simple modeline parser (based on pymodeline). - :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ diff --git a/src/typecode/_vendor/pygments/plugin.py b/src/typecode/_vendor/pygments/plugin.py index 3d185ef..213eec7 100644 --- a/src/typecode/_vendor/pygments/plugin.py +++ b/src/typecode/_vendor/pygments/plugin.py @@ -32,7 +32,7 @@ yourfilter = yourfilter:YourFilter - :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ LEXER_ENTRY_POINT = 'pygments.lexers' diff --git a/src/typecode/_vendor/pygments/regexopt.py b/src/typecode/_vendor/pygments/regexopt.py index 59d77ee..9fb70b1 100644 --- a/src/typecode/_vendor/pygments/regexopt.py +++ b/src/typecode/_vendor/pygments/regexopt.py @@ -6,7 +6,7 @@ An algorithm that generates optimized regexes for matching long lists of literal strings. - :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ diff --git a/src/typecode/_vendor/pygments/scanner.py b/src/typecode/_vendor/pygments/scanner.py index bcb19ed..b35727d 100644 --- a/src/typecode/_vendor/pygments/scanner.py +++ b/src/typecode/_vendor/pygments/scanner.py @@ -12,7 +12,7 @@ Have a look at the `DelphiLexer` to get an idea of how to use this scanner. - :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ import re @@ -25,7 +25,7 @@ class EndOfText(RuntimeError): """ -class Scanner(object): +class Scanner: """ Simple scanner diff --git a/src/typecode/_vendor/pygments/sphinxext.py b/src/typecode/_vendor/pygments/sphinxext.py index 6bf82cd..060afeb 100644 --- a/src/typecode/_vendor/pygments/sphinxext.py +++ b/src/typecode/_vendor/pygments/sphinxext.py @@ -6,12 +6,10 @@ Sphinx extension to generate automatic documentation of lexers, formatters and filters. - :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ -from __future__ import print_function - import sys from docutils import nodes diff --git a/src/typecode/_vendor/pygments/style.py b/src/typecode/_vendor/pygments/style.py index f79ca74..b325fd7 100644 --- a/src/typecode/_vendor/pygments/style.py +++ b/src/typecode/_vendor/pygments/style.py @@ -5,12 +5,11 @@ Basic style object. - :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ from typecode._vendor.pygments.token import Token, STANDARD_TYPES -from typecode._vendor.pygments.util import add_metaclass # Default mapping of ansixxx to RGB colors. _ansimap = { @@ -169,8 +168,7 @@ def __len__(cls): return len(cls._styles) -@add_metaclass(StyleMeta) -class Style(object): +class Style(metaclass=StyleMeta): #: overall background color (``None`` means transparent) background_color = '#ffffff' @@ -178,5 +176,17 @@ class Style(object): #: highlight background color highlight_color = '#ffffcc' + #: line number font color + line_number_color = '#000000' + + #: line number background color + line_number_background_color = '#f0f0f0' + + #: special line number font color + line_number_special_color = '#000000' + + #: special line number background color + line_number_special_background_color = '#ffffc0' + #: Style definitions for individual token types. styles = {} diff --git a/src/typecode/_vendor/pygments/styles/__init__.py b/src/typecode/_vendor/pygments/styles/__init__.py index 8dbb51e..9e906be 100644 --- a/src/typecode/_vendor/pygments/styles/__init__.py +++ b/src/typecode/_vendor/pygments/styles/__init__.py @@ -5,7 +5,7 @@ Contains built-in styles. - :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ @@ -50,6 +50,7 @@ 'stata': 'stata_light::StataLightStyle', 'stata-light': 'stata_light::StataLightStyle', 'stata-dark': 'stata_dark::StataDarkStyle', + 'inkpot': 'inkpot::InkPotStyle', } @@ -78,9 +79,8 @@ def get_style_by_name(name): def get_all_styles(): - """Return an generator for all styles by name, + """Return a generator for all styles by name, both builtin and plugin.""" - for name in STYLE_MAP: - yield name + yield from STYLE_MAP for name, _ in find_plugin_styles(): yield name diff --git a/src/typecode/_vendor/pygments/styles/abap.py b/src/typecode/_vendor/pygments/styles/abap.py index f48c969..1e57fe6 100644 --- a/src/typecode/_vendor/pygments/styles/abap.py +++ b/src/typecode/_vendor/pygments/styles/abap.py @@ -5,7 +5,7 @@ ABAP workbench like style. - :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ diff --git a/src/typecode/_vendor/pygments/styles/algol.py b/src/typecode/_vendor/pygments/styles/algol.py index c46614e..f9e42fe 100644 --- a/src/typecode/_vendor/pygments/styles/algol.py +++ b/src/typecode/_vendor/pygments/styles/algol.py @@ -26,7 +26,7 @@ [1] `Revised Report on the Algorithmic Language Algol-60 ` - :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ diff --git a/src/typecode/_vendor/pygments/styles/algol_nu.py b/src/typecode/_vendor/pygments/styles/algol_nu.py index e41d1bf..cbc7251 100644 --- a/src/typecode/_vendor/pygments/styles/algol_nu.py +++ b/src/typecode/_vendor/pygments/styles/algol_nu.py @@ -26,7 +26,7 @@ [1] `Revised Report on the Algorithmic Language Algol-60 ` - :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ diff --git a/src/typecode/_vendor/pygments/styles/arduino.py b/src/typecode/_vendor/pygments/styles/arduino.py index 3d9cd5b..8f38f60 100644 --- a/src/typecode/_vendor/pygments/styles/arduino.py +++ b/src/typecode/_vendor/pygments/styles/arduino.py @@ -5,7 +5,7 @@ Arduino® Syntax highlighting style. - :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ @@ -15,7 +15,7 @@ class ArduinoStyle(Style): - u""" + """ The Arduino® language style. This style is designed to highlight the Arduino source code, so exepect the best results with it. """ diff --git a/src/typecode/_vendor/pygments/styles/autumn.py b/src/typecode/_vendor/pygments/styles/autumn.py index 94a4675..aa5b239 100644 --- a/src/typecode/_vendor/pygments/styles/autumn.py +++ b/src/typecode/_vendor/pygments/styles/autumn.py @@ -5,7 +5,7 @@ A colorful style, inspired by the terminal highlighting style. - :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ diff --git a/src/typecode/_vendor/pygments/styles/borland.py b/src/typecode/_vendor/pygments/styles/borland.py index e5d77e4..b786fee 100644 --- a/src/typecode/_vendor/pygments/styles/borland.py +++ b/src/typecode/_vendor/pygments/styles/borland.py @@ -5,7 +5,7 @@ Style similar to the style used in the Borland IDEs. - :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ diff --git a/src/typecode/_vendor/pygments/styles/bw.py b/src/typecode/_vendor/pygments/styles/bw.py index d932281..eb511ae 100644 --- a/src/typecode/_vendor/pygments/styles/bw.py +++ b/src/typecode/_vendor/pygments/styles/bw.py @@ -5,7 +5,7 @@ Simple black/white only style. - :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ diff --git a/src/typecode/_vendor/pygments/styles/colorful.py b/src/typecode/_vendor/pygments/styles/colorful.py index f5fc145..0cd4430 100644 --- a/src/typecode/_vendor/pygments/styles/colorful.py +++ b/src/typecode/_vendor/pygments/styles/colorful.py @@ -5,7 +5,7 @@ A colorful style, inspired by CodeRay. - :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ diff --git a/src/typecode/_vendor/pygments/styles/default.py b/src/typecode/_vendor/pygments/styles/default.py index d77fc08..5754491 100644 --- a/src/typecode/_vendor/pygments/styles/default.py +++ b/src/typecode/_vendor/pygments/styles/default.py @@ -5,7 +5,7 @@ The default highlighting style. - :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ diff --git a/src/typecode/_vendor/pygments/styles/emacs.py b/src/typecode/_vendor/pygments/styles/emacs.py index c4eb3a2..58071e5 100644 --- a/src/typecode/_vendor/pygments/styles/emacs.py +++ b/src/typecode/_vendor/pygments/styles/emacs.py @@ -5,7 +5,7 @@ A highlighting style for Pygments, inspired by Emacs. - :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ diff --git a/src/typecode/_vendor/pygments/styles/friendly.py b/src/typecode/_vendor/pygments/styles/friendly.py index a0604fb..8e5cbe1 100644 --- a/src/typecode/_vendor/pygments/styles/friendly.py +++ b/src/typecode/_vendor/pygments/styles/friendly.py @@ -5,7 +5,7 @@ A modern style based on the VIM pyte theme. - :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ diff --git a/src/typecode/_vendor/pygments/styles/fruity.py b/src/typecode/_vendor/pygments/styles/fruity.py index 2aa81f8..297452f 100644 --- a/src/typecode/_vendor/pygments/styles/fruity.py +++ b/src/typecode/_vendor/pygments/styles/fruity.py @@ -5,7 +5,7 @@ pygments version of my "fruity" vim theme. - :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ diff --git a/src/typecode/_vendor/pygments/styles/igor.py b/src/typecode/_vendor/pygments/styles/igor.py index 6cc42dd..7899f09 100644 --- a/src/typecode/_vendor/pygments/styles/igor.py +++ b/src/typecode/_vendor/pygments/styles/igor.py @@ -5,7 +5,7 @@ Igor Pro default style. - :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ diff --git a/src/typecode/_vendor/pygments/styles/inkpot.py b/src/typecode/_vendor/pygments/styles/inkpot.py new file mode 100644 index 0000000..dfa9015 --- /dev/null +++ b/src/typecode/_vendor/pygments/styles/inkpot.py @@ -0,0 +1,67 @@ +# -*- coding: utf-8 -*- +""" + pygments.styles.inkpot + ~~~~~~~~~~~~~~~~~~~~~~ + + A highlighting style for Pygments, inspired by the Inkpot theme for VIM. + + :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS. + :license: BSD, see LICENSE for details. +""" + +from typecode._vendor.pygments.style import Style +from typecode._vendor.pygments.token import Text, Other, Keyword, Name, Comment, String, \ + Error, Number, Operator, Generic, Whitespace, Punctuation + + +class InkPotStyle(Style): + background_color = "#1e1e27" + default_style = "" + styles = { + Text: "#cfbfad", + Other: "#cfbfad", + Whitespace: "#434357", + Comment: "#cd8b00", + Comment.Preproc: "#409090", + Comment.PreprocFile: "bg:#404040 #ffcd8b", + Comment.Special: "#808bed", + + Keyword: "#808bed", + Keyword.Pseudo: "nobold", + Keyword.Type: "#ff8bff", + + Operator: "#666666", + + Punctuation: "#cfbfad", + + Name: "#cfbfad", + Name.Attribute: "#cfbfad", + Name.Builtin.Pseudo: '#ffff00', + Name.Builtin: "#808bed", + Name.Class: "#ff8bff", + Name.Constant: "#409090", + Name.Decorator: "#409090", + Name.Exception: "#ff0000", + Name.Function: "#c080d0", + Name.Label: "#808bed", + Name.Namespace: "#ff0000", + Name.Variable: "#cfbfad", + + String: "bg:#404040 #ffcd8b", + String.Doc: "#808bed", + + Number: "#f0ad6d", + + Generic.Heading: "bold #000080", + Generic.Subheading: "bold #800080", + Generic.Deleted: "#A00000", + Generic.Inserted: "#00A000", + Generic.Error: "#FF0000", + Generic.Emph: "italic", + Generic.Strong: "bold", + Generic.Prompt: "bold #000080", + Generic.Output: "#888", + Generic.Traceback: "#04D", + + Error: "bg:#6e2e2e #ffffff" + } diff --git a/src/typecode/_vendor/pygments/styles/lovelace.py b/src/typecode/_vendor/pygments/styles/lovelace.py index 74b79b4..c45cec1 100644 --- a/src/typecode/_vendor/pygments/styles/lovelace.py +++ b/src/typecode/_vendor/pygments/styles/lovelace.py @@ -9,7 +9,7 @@ A desaturated, somewhat subdued style created for the Lovelace interactive learning environment. - :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ diff --git a/src/typecode/_vendor/pygments/styles/manni.py b/src/typecode/_vendor/pygments/styles/manni.py index 47dfaac..b63f4b1 100644 --- a/src/typecode/_vendor/pygments/styles/manni.py +++ b/src/typecode/_vendor/pygments/styles/manni.py @@ -8,7 +8,7 @@ This is a port of the style used in the `php port`_ of pygments by Manni. The style is called 'default' there. - :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ diff --git a/src/typecode/_vendor/pygments/styles/monokai.py b/src/typecode/_vendor/pygments/styles/monokai.py index 118e34b..46c8cdd 100644 --- a/src/typecode/_vendor/pygments/styles/monokai.py +++ b/src/typecode/_vendor/pygments/styles/monokai.py @@ -7,7 +7,7 @@ http://www.monokai.nl/blog/2006/07/15/textmate-color-theme/ - :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ @@ -92,14 +92,15 @@ class MonokaiStyle(Style): String.Single: "", # class: 's1' String.Symbol: "", # class: 'ss' + Generic: "", # class: 'g' Generic.Deleted: "#f92672", # class: 'gd', Generic.Emph: "italic", # class: 'ge' Generic.Error: "", # class: 'gr' Generic.Heading: "", # class: 'gh' Generic.Inserted: "#a6e22e", # class: 'gi' - Generic.Output: "", # class: 'go' - Generic.Prompt: "", # class: 'gp' + Generic.Output: "#66d9ef", # class: 'go' + Generic.Prompt: "bold #f92672", # class: 'gp' Generic.Strong: "bold", # class: 'gs' Generic.Subheading: "#75715e", # class: 'gu' Generic.Traceback: "", # class: 'gt' diff --git a/src/typecode/_vendor/pygments/styles/murphy.py b/src/typecode/_vendor/pygments/styles/murphy.py index 9a71482..73ec9be 100644 --- a/src/typecode/_vendor/pygments/styles/murphy.py +++ b/src/typecode/_vendor/pygments/styles/murphy.py @@ -5,7 +5,7 @@ Murphy's style from CodeRay. - :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ diff --git a/src/typecode/_vendor/pygments/styles/native.py b/src/typecode/_vendor/pygments/styles/native.py index bc64298..afdbb61 100644 --- a/src/typecode/_vendor/pygments/styles/native.py +++ b/src/typecode/_vendor/pygments/styles/native.py @@ -5,7 +5,7 @@ pygments version of my "native" vim theme. - :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ diff --git a/src/typecode/_vendor/pygments/styles/paraiso_dark.py b/src/typecode/_vendor/pygments/styles/paraiso_dark.py index 5d9e85b..221e603 100644 --- a/src/typecode/_vendor/pygments/styles/paraiso_dark.py +++ b/src/typecode/_vendor/pygments/styles/paraiso_dark.py @@ -9,7 +9,7 @@ Created with Base16 Builder by Chris Kempson (https://github.com/chriskempson/base16-builder). - :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ diff --git a/src/typecode/_vendor/pygments/styles/paraiso_light.py b/src/typecode/_vendor/pygments/styles/paraiso_light.py index 980f42a..6ed8759 100644 --- a/src/typecode/_vendor/pygments/styles/paraiso_light.py +++ b/src/typecode/_vendor/pygments/styles/paraiso_light.py @@ -9,7 +9,7 @@ Created with Base16 Builder by Chris Kempson (https://github.com/chriskempson/base16-builder). - :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ diff --git a/src/typecode/_vendor/pygments/styles/pastie.py b/src/typecode/_vendor/pygments/styles/pastie.py index 24a405c..96c47e9 100644 --- a/src/typecode/_vendor/pygments/styles/pastie.py +++ b/src/typecode/_vendor/pygments/styles/pastie.py @@ -7,7 +7,7 @@ .. _pastie: http://pastie.caboo.se/ - :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ diff --git a/src/typecode/_vendor/pygments/styles/perldoc.py b/src/typecode/_vendor/pygments/styles/perldoc.py index b33f194..dc68bd1 100644 --- a/src/typecode/_vendor/pygments/styles/perldoc.py +++ b/src/typecode/_vendor/pygments/styles/perldoc.py @@ -7,7 +7,7 @@ .. _perldoc: http://perldoc.perl.org/ - :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ diff --git a/src/typecode/_vendor/pygments/styles/rainbow_dash.py b/src/typecode/_vendor/pygments/styles/rainbow_dash.py index cac9593..35e746f 100644 --- a/src/typecode/_vendor/pygments/styles/rainbow_dash.py +++ b/src/typecode/_vendor/pygments/styles/rainbow_dash.py @@ -7,7 +7,7 @@ .. _theme: http://sanssecours.github.io/Rainbow-Dash.tmbundle - :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ diff --git a/src/typecode/_vendor/pygments/styles/rrt.py b/src/typecode/_vendor/pygments/styles/rrt.py index adcd2c4..e687350 100644 --- a/src/typecode/_vendor/pygments/styles/rrt.py +++ b/src/typecode/_vendor/pygments/styles/rrt.py @@ -5,7 +5,7 @@ pygments "rrt" theme, based on Zap and Emacs defaults. - :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ diff --git a/src/typecode/_vendor/pygments/styles/sas.py b/src/typecode/_vendor/pygments/styles/sas.py index a4e3d04..e48d079 100644 --- a/src/typecode/_vendor/pygments/styles/sas.py +++ b/src/typecode/_vendor/pygments/styles/sas.py @@ -7,7 +7,7 @@ meant to be a complete style. It's merely meant to mimic SAS' program editor syntax highlighting. - :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ diff --git a/src/typecode/_vendor/pygments/styles/solarized.py b/src/typecode/_vendor/pygments/styles/solarized.py index fa247c8..8a65237 100644 --- a/src/typecode/_vendor/pygments/styles/solarized.py +++ b/src/typecode/_vendor/pygments/styles/solarized.py @@ -8,7 +8,7 @@ A Pygments style for the Solarized themes (licensed under MIT). See: https://github.com/altercation/solarized - :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ @@ -118,6 +118,8 @@ class SolarizedDarkStyle(Style): styles = make_style(DARK_COLORS) background_color = DARK_COLORS['base03'] highlight_color = DARK_COLORS['base02'] + line_number_color = DARK_COLORS['base01'] + line_number_background_color = DARK_COLORS['base02'] class SolarizedLightStyle(SolarizedDarkStyle): @@ -128,3 +130,5 @@ class SolarizedLightStyle(SolarizedDarkStyle): styles = make_style(LIGHT_COLORS) background_color = LIGHT_COLORS['base03'] highlight_color = LIGHT_COLORS['base02'] + line_number_color = LIGHT_COLORS['base01'] + line_number_background_color = LIGHT_COLORS['base02'] diff --git a/src/typecode/_vendor/pygments/styles/stata_dark.py b/src/typecode/_vendor/pygments/styles/stata_dark.py index bb5c6a7..01984c1 100644 --- a/src/typecode/_vendor/pygments/styles/stata_dark.py +++ b/src/typecode/_vendor/pygments/styles/stata_dark.py @@ -7,7 +7,7 @@ meant to be a complete style, just for Stata's file formats. - :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ diff --git a/src/typecode/_vendor/pygments/styles/stata_light.py b/src/typecode/_vendor/pygments/styles/stata_light.py index 87f9787..21bceaa 100644 --- a/src/typecode/_vendor/pygments/styles/stata_light.py +++ b/src/typecode/_vendor/pygments/styles/stata_light.py @@ -6,7 +6,7 @@ Light Style inspired by Stata's do-file editor. Note this is not meant to be a complete style, just for Stata's file formats. - :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ diff --git a/src/typecode/_vendor/pygments/styles/tango.py b/src/typecode/_vendor/pygments/styles/tango.py index 32c3f4d..a6e7e9e 100644 --- a/src/typecode/_vendor/pygments/styles/tango.py +++ b/src/typecode/_vendor/pygments/styles/tango.py @@ -33,7 +33,7 @@ have been chosen to have the same style. Similarly, keywords (Keyword.*), and Operator.Word (and, or, in) have been assigned the same style. - :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ diff --git a/src/typecode/_vendor/pygments/styles/trac.py b/src/typecode/_vendor/pygments/styles/trac.py index 4e849b9..07f1b40 100644 --- a/src/typecode/_vendor/pygments/styles/trac.py +++ b/src/typecode/_vendor/pygments/styles/trac.py @@ -5,7 +5,7 @@ Port of the default trac highlighter design. - :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ diff --git a/src/typecode/_vendor/pygments/styles/vim.py b/src/typecode/_vendor/pygments/styles/vim.py index c52eab7..c3d0c9f 100644 --- a/src/typecode/_vendor/pygments/styles/vim.py +++ b/src/typecode/_vendor/pygments/styles/vim.py @@ -5,7 +5,7 @@ A highlighting style for Pygments, inspired by vim. - :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ diff --git a/src/typecode/_vendor/pygments/styles/vs.py b/src/typecode/_vendor/pygments/styles/vs.py index 79fc917..58abb0b 100644 --- a/src/typecode/_vendor/pygments/styles/vs.py +++ b/src/typecode/_vendor/pygments/styles/vs.py @@ -5,7 +5,7 @@ Simple style with MS Visual Studio colors. - :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ diff --git a/src/typecode/_vendor/pygments/styles/xcode.py b/src/typecode/_vendor/pygments/styles/xcode.py index 1ca957f..8c80a0b 100644 --- a/src/typecode/_vendor/pygments/styles/xcode.py +++ b/src/typecode/_vendor/pygments/styles/xcode.py @@ -5,7 +5,7 @@ Style similar to the `Xcode` default theme. - :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ diff --git a/src/typecode/_vendor/pygments/token.py b/src/typecode/_vendor/pygments/token.py index 5c30eb4..f7e107d 100644 --- a/src/typecode/_vendor/pygments/token.py +++ b/src/typecode/_vendor/pygments/token.py @@ -5,7 +5,7 @@ Basic token types and the standard tokens. - :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ diff --git a/src/typecode/_vendor/pygments/unistring.py b/src/typecode/_vendor/pygments/unistring.py index dd011cf..908beca 100644 --- a/src/typecode/_vendor/pygments/unistring.py +++ b/src/typecode/_vendor/pygments/unistring.py @@ -8,132 +8,89 @@ Inspired by chartypes_create.py from the MoinMoin project. - :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ import sys -Cc = u'\x00-\x1f\x7f-\x9f' +Cc = '\x00-\x1f\x7f-\x9f' -Cf = u'\xad\u0600-\u0605\u061c\u06dd\u070f\u08e2\u180e\u200b-\u200f\u202a-\u202e\u2060-\u2064\u2066-\u206f\ufeff\ufff9-\ufffb' +Cf = '\xad\u0600-\u0605\u061c\u06dd\u070f\u08e2\u180e\u200b-\u200f\u202a-\u202e\u2060-\u2064\u2066-\u206f\ufeff\ufff9-\ufffb\U000110bd\U000110cd\U0001bca0-\U0001bca3\U0001d173-\U0001d17a\U000e0001\U000e0020-\U000e007f' -Cn = u'\u0378-\u0379\u0380-\u0383\u038b\u038d\u03a2\u0530\u0557-\u0558\u058b-\u058c\u0590\u05c8-\u05cf\u05eb-\u05ee\u05f5-\u05ff\u061d\u070e\u074b-\u074c\u07b2-\u07bf\u07fb-\u07fc\u082e-\u082f\u083f\u085c-\u085d\u085f\u086b-\u089f\u08b5\u08be-\u08d2\u0984\u098d-\u098e\u0991-\u0992\u09a9\u09b1\u09b3-\u09b5\u09ba-\u09bb\u09c5-\u09c6\u09c9-\u09ca\u09cf-\u09d6\u09d8-\u09db\u09de\u09e4-\u09e5\u09ff-\u0a00\u0a04\u0a0b-\u0a0e\u0a11-\u0a12\u0a29\u0a31\u0a34\u0a37\u0a3a-\u0a3b\u0a3d\u0a43-\u0a46\u0a49-\u0a4a\u0a4e-\u0a50\u0a52-\u0a58\u0a5d\u0a5f-\u0a65\u0a77-\u0a80\u0a84\u0a8e\u0a92\u0aa9\u0ab1\u0ab4\u0aba-\u0abb\u0ac6\u0aca\u0ace-\u0acf\u0ad1-\u0adf\u0ae4-\u0ae5\u0af2-\u0af8\u0b00\u0b04\u0b0d-\u0b0e\u0b11-\u0b12\u0b29\u0b31\u0b34\u0b3a-\u0b3b\u0b45-\u0b46\u0b49-\u0b4a\u0b4e-\u0b55\u0b58-\u0b5b\u0b5e\u0b64-\u0b65\u0b78-\u0b81\u0b84\u0b8b-\u0b8d\u0b91\u0b96-\u0b98\u0b9b\u0b9d\u0ba0-\u0ba2\u0ba5-\u0ba7\u0bab-\u0bad\u0bba-\u0bbd\u0bc3-\u0bc5\u0bc9\u0bce-\u0bcf\u0bd1-\u0bd6\u0bd8-\u0be5\u0bfb-\u0bff\u0c0d\u0c11\u0c29\u0c3a-\u0c3c\u0c45\u0c49\u0c4e-\u0c54\u0c57\u0c5b-\u0c5f\u0c64-\u0c65\u0c70-\u0c77\u0c8d\u0c91\u0ca9\u0cb4\u0cba-\u0cbb\u0cc5\u0cc9\u0cce-\u0cd4\u0cd7-\u0cdd\u0cdf\u0ce4-\u0ce5\u0cf0\u0cf3-\u0cff\u0d04\u0d0d\u0d11\u0d45\u0d49\u0d50-\u0d53\u0d64-\u0d65\u0d80-\u0d81\u0d84\u0d97-\u0d99\u0db2\u0dbc\u0dbe-\u0dbf\u0dc7-\u0dc9\u0dcb-\u0dce\u0dd5\u0dd7\u0de0-\u0de5\u0df0-\u0df1\u0df5-\u0e00\u0e3b-\u0e3e\u0e5c-\u0e80\u0e83\u0e85-\u0e86\u0e89\u0e8b-\u0e8c\u0e8e-\u0e93\u0e98\u0ea0\u0ea4\u0ea6\u0ea8-\u0ea9\u0eac\u0eba\u0ebe-\u0ebf\u0ec5\u0ec7\u0ece-\u0ecf\u0eda-\u0edb\u0ee0-\u0eff\u0f48\u0f6d-\u0f70\u0f98\u0fbd\u0fcd\u0fdb-\u0fff\u10c6\u10c8-\u10cc\u10ce-\u10cf\u1249\u124e-\u124f\u1257\u1259\u125e-\u125f\u1289\u128e-\u128f\u12b1\u12b6-\u12b7\u12bf\u12c1\u12c6-\u12c7\u12d7\u1311\u1316-\u1317\u135b-\u135c\u137d-\u137f\u139a-\u139f\u13f6-\u13f7\u13fe-\u13ff\u169d-\u169f\u16f9-\u16ff\u170d\u1715-\u171f\u1737-\u173f\u1754-\u175f\u176d\u1771\u1774-\u177f\u17de-\u17df\u17ea-\u17ef\u17fa-\u17ff\u180f\u181a-\u181f\u1879-\u187f\u18ab-\u18af\u18f6-\u18ff\u191f\u192c-\u192f\u193c-\u193f\u1941-\u1943\u196e-\u196f\u1975-\u197f\u19ac-\u19af\u19ca-\u19cf\u19db-\u19dd\u1a1c-\u1a1d\u1a5f\u1a7d-\u1a7e\u1a8a-\u1a8f\u1a9a-\u1a9f\u1aae-\u1aaf\u1abf-\u1aff\u1b4c-\u1b4f\u1b7d-\u1b7f\u1bf4-\u1bfb\u1c38-\u1c3a\u1c4a-\u1c4c\u1c89-\u1c8f\u1cbb-\u1cbc\u1cc8-\u1ccf\u1cfa-\u1cff\u1dfa\u1f16-\u1f17\u1f1e-\u1f1f\u1f46-\u1f47\u1f4e-\u1f4f\u1f58\u1f5a\u1f5c\u1f5e\u1f7e-\u1f7f\u1fb5\u1fc5\u1fd4-\u1fd5\u1fdc\u1ff0-\u1ff1\u1ff5\u1fff\u2065\u2072-\u2073\u208f\u209d-\u209f\u20c0-\u20cf\u20f1-\u20ff\u218c-\u218f\u2427-\u243f\u244b-\u245f\u2b74-\u2b75\u2b96-\u2b97\u2bc9\u2bff\u2c2f\u2c5f\u2cf4-\u2cf8\u2d26\u2d28-\u2d2c\u2d2e-\u2d2f\u2d68-\u2d6e\u2d71-\u2d7e\u2d97-\u2d9f\u2da7\u2daf\u2db7\u2dbf\u2dc7\u2dcf\u2dd7\u2ddf\u2e4f-\u2e7f\u2e9a\u2ef4-\u2eff\u2fd6-\u2fef\u2ffc-\u2fff\u3040\u3097-\u3098\u3100-\u3104\u3130\u318f\u31bb-\u31bf\u31e4-\u31ef\u321f\u32ff\u4db6-\u4dbf\u9ff0-\u9fff\ua48d-\ua48f\ua4c7-\ua4cf\ua62c-\ua63f\ua6f8-\ua6ff\ua7ba-\ua7f6\ua82c-\ua82f\ua83a-\ua83f\ua878-\ua87f\ua8c6-\ua8cd\ua8da-\ua8df\ua954-\ua95e\ua97d-\ua97f\ua9ce\ua9da-\ua9dd\ua9ff\uaa37-\uaa3f\uaa4e-\uaa4f\uaa5a-\uaa5b\uaac3-\uaada\uaaf7-\uab00\uab07-\uab08\uab0f-\uab10\uab17-\uab1f\uab27\uab2f\uab66-\uab6f\uabee-\uabef\uabfa-\uabff\ud7a4-\ud7af\ud7c7-\ud7ca\ud7fc-\ud7ff\ufa6e-\ufa6f\ufada-\ufaff\ufb07-\ufb12\ufb18-\ufb1c\ufb37\ufb3d\ufb3f\ufb42\ufb45\ufbc2-\ufbd2\ufd40-\ufd4f\ufd90-\ufd91\ufdc8-\ufdef\ufdfe-\ufdff\ufe1a-\ufe1f\ufe53\ufe67\ufe6c-\ufe6f\ufe75\ufefd-\ufefe\uff00\uffbf-\uffc1\uffc8-\uffc9\uffd0-\uffd1\uffd8-\uffd9\uffdd-\uffdf\uffe7\uffef-\ufff8\ufffe-\uffff' +Cn = '\u0378-\u0379\u0380-\u0383\u038b\u038d\u03a2\u0530\u0557-\u0558\u058b-\u058c\u0590\u05c8-\u05cf\u05eb-\u05ee\u05f5-\u05ff\u061d\u070e\u074b-\u074c\u07b2-\u07bf\u07fb-\u07fc\u082e-\u082f\u083f\u085c-\u085d\u085f\u086b-\u089f\u08b5\u08be-\u08d2\u0984\u098d-\u098e\u0991-\u0992\u09a9\u09b1\u09b3-\u09b5\u09ba-\u09bb\u09c5-\u09c6\u09c9-\u09ca\u09cf-\u09d6\u09d8-\u09db\u09de\u09e4-\u09e5\u09ff-\u0a00\u0a04\u0a0b-\u0a0e\u0a11-\u0a12\u0a29\u0a31\u0a34\u0a37\u0a3a-\u0a3b\u0a3d\u0a43-\u0a46\u0a49-\u0a4a\u0a4e-\u0a50\u0a52-\u0a58\u0a5d\u0a5f-\u0a65\u0a77-\u0a80\u0a84\u0a8e\u0a92\u0aa9\u0ab1\u0ab4\u0aba-\u0abb\u0ac6\u0aca\u0ace-\u0acf\u0ad1-\u0adf\u0ae4-\u0ae5\u0af2-\u0af8\u0b00\u0b04\u0b0d-\u0b0e\u0b11-\u0b12\u0b29\u0b31\u0b34\u0b3a-\u0b3b\u0b45-\u0b46\u0b49-\u0b4a\u0b4e-\u0b55\u0b58-\u0b5b\u0b5e\u0b64-\u0b65\u0b78-\u0b81\u0b84\u0b8b-\u0b8d\u0b91\u0b96-\u0b98\u0b9b\u0b9d\u0ba0-\u0ba2\u0ba5-\u0ba7\u0bab-\u0bad\u0bba-\u0bbd\u0bc3-\u0bc5\u0bc9\u0bce-\u0bcf\u0bd1-\u0bd6\u0bd8-\u0be5\u0bfb-\u0bff\u0c0d\u0c11\u0c29\u0c3a-\u0c3c\u0c45\u0c49\u0c4e-\u0c54\u0c57\u0c5b-\u0c5f\u0c64-\u0c65\u0c70-\u0c77\u0c8d\u0c91\u0ca9\u0cb4\u0cba-\u0cbb\u0cc5\u0cc9\u0cce-\u0cd4\u0cd7-\u0cdd\u0cdf\u0ce4-\u0ce5\u0cf0\u0cf3-\u0cff\u0d04\u0d0d\u0d11\u0d45\u0d49\u0d50-\u0d53\u0d64-\u0d65\u0d80-\u0d81\u0d84\u0d97-\u0d99\u0db2\u0dbc\u0dbe-\u0dbf\u0dc7-\u0dc9\u0dcb-\u0dce\u0dd5\u0dd7\u0de0-\u0de5\u0df0-\u0df1\u0df5-\u0e00\u0e3b-\u0e3e\u0e5c-\u0e80\u0e83\u0e85-\u0e86\u0e89\u0e8b-\u0e8c\u0e8e-\u0e93\u0e98\u0ea0\u0ea4\u0ea6\u0ea8-\u0ea9\u0eac\u0eba\u0ebe-\u0ebf\u0ec5\u0ec7\u0ece-\u0ecf\u0eda-\u0edb\u0ee0-\u0eff\u0f48\u0f6d-\u0f70\u0f98\u0fbd\u0fcd\u0fdb-\u0fff\u10c6\u10c8-\u10cc\u10ce-\u10cf\u1249\u124e-\u124f\u1257\u1259\u125e-\u125f\u1289\u128e-\u128f\u12b1\u12b6-\u12b7\u12bf\u12c1\u12c6-\u12c7\u12d7\u1311\u1316-\u1317\u135b-\u135c\u137d-\u137f\u139a-\u139f\u13f6-\u13f7\u13fe-\u13ff\u169d-\u169f\u16f9-\u16ff\u170d\u1715-\u171f\u1737-\u173f\u1754-\u175f\u176d\u1771\u1774-\u177f\u17de-\u17df\u17ea-\u17ef\u17fa-\u17ff\u180f\u181a-\u181f\u1879-\u187f\u18ab-\u18af\u18f6-\u18ff\u191f\u192c-\u192f\u193c-\u193f\u1941-\u1943\u196e-\u196f\u1975-\u197f\u19ac-\u19af\u19ca-\u19cf\u19db-\u19dd\u1a1c-\u1a1d\u1a5f\u1a7d-\u1a7e\u1a8a-\u1a8f\u1a9a-\u1a9f\u1aae-\u1aaf\u1abf-\u1aff\u1b4c-\u1b4f\u1b7d-\u1b7f\u1bf4-\u1bfb\u1c38-\u1c3a\u1c4a-\u1c4c\u1c89-\u1c8f\u1cbb-\u1cbc\u1cc8-\u1ccf\u1cfa-\u1cff\u1dfa\u1f16-\u1f17\u1f1e-\u1f1f\u1f46-\u1f47\u1f4e-\u1f4f\u1f58\u1f5a\u1f5c\u1f5e\u1f7e-\u1f7f\u1fb5\u1fc5\u1fd4-\u1fd5\u1fdc\u1ff0-\u1ff1\u1ff5\u1fff\u2065\u2072-\u2073\u208f\u209d-\u209f\u20c0-\u20cf\u20f1-\u20ff\u218c-\u218f\u2427-\u243f\u244b-\u245f\u2b74-\u2b75\u2b96-\u2b97\u2bc9\u2bff\u2c2f\u2c5f\u2cf4-\u2cf8\u2d26\u2d28-\u2d2c\u2d2e-\u2d2f\u2d68-\u2d6e\u2d71-\u2d7e\u2d97-\u2d9f\u2da7\u2daf\u2db7\u2dbf\u2dc7\u2dcf\u2dd7\u2ddf\u2e4f-\u2e7f\u2e9a\u2ef4-\u2eff\u2fd6-\u2fef\u2ffc-\u2fff\u3040\u3097-\u3098\u3100-\u3104\u3130\u318f\u31bb-\u31bf\u31e4-\u31ef\u321f\u32ff\u4db6-\u4dbf\u9ff0-\u9fff\ua48d-\ua48f\ua4c7-\ua4cf\ua62c-\ua63f\ua6f8-\ua6ff\ua7ba-\ua7f6\ua82c-\ua82f\ua83a-\ua83f\ua878-\ua87f\ua8c6-\ua8cd\ua8da-\ua8df\ua954-\ua95e\ua97d-\ua97f\ua9ce\ua9da-\ua9dd\ua9ff\uaa37-\uaa3f\uaa4e-\uaa4f\uaa5a-\uaa5b\uaac3-\uaada\uaaf7-\uab00\uab07-\uab08\uab0f-\uab10\uab17-\uab1f\uab27\uab2f\uab66-\uab6f\uabee-\uabef\uabfa-\uabff\ud7a4-\ud7af\ud7c7-\ud7ca\ud7fc-\ud7ff\ufa6e-\ufa6f\ufada-\ufaff\ufb07-\ufb12\ufb18-\ufb1c\ufb37\ufb3d\ufb3f\ufb42\ufb45\ufbc2-\ufbd2\ufd40-\ufd4f\ufd90-\ufd91\ufdc8-\ufdef\ufdfe-\ufdff\ufe1a-\ufe1f\ufe53\ufe67\ufe6c-\ufe6f\ufe75\ufefd-\ufefe\uff00\uffbf-\uffc1\uffc8-\uffc9\uffd0-\uffd1\uffd8-\uffd9\uffdd-\uffdf\uffe7\uffef-\ufff8\ufffe-\uffff\U0001000c\U00010027\U0001003b\U0001003e\U0001004e-\U0001004f\U0001005e-\U0001007f\U000100fb-\U000100ff\U00010103-\U00010106\U00010134-\U00010136\U0001018f\U0001019c-\U0001019f\U000101a1-\U000101cf\U000101fe-\U0001027f\U0001029d-\U0001029f\U000102d1-\U000102df\U000102fc-\U000102ff\U00010324-\U0001032c\U0001034b-\U0001034f\U0001037b-\U0001037f\U0001039e\U000103c4-\U000103c7\U000103d6-\U000103ff\U0001049e-\U0001049f\U000104aa-\U000104af\U000104d4-\U000104d7\U000104fc-\U000104ff\U00010528-\U0001052f\U00010564-\U0001056e\U00010570-\U000105ff\U00010737-\U0001073f\U00010756-\U0001075f\U00010768-\U000107ff\U00010806-\U00010807\U00010809\U00010836\U00010839-\U0001083b\U0001083d-\U0001083e\U00010856\U0001089f-\U000108a6\U000108b0-\U000108df\U000108f3\U000108f6-\U000108fa\U0001091c-\U0001091e\U0001093a-\U0001093e\U00010940-\U0001097f\U000109b8-\U000109bb\U000109d0-\U000109d1\U00010a04\U00010a07-\U00010a0b\U00010a14\U00010a18\U00010a36-\U00010a37\U00010a3b-\U00010a3e\U00010a49-\U00010a4f\U00010a59-\U00010a5f\U00010aa0-\U00010abf\U00010ae7-\U00010aea\U00010af7-\U00010aff\U00010b36-\U00010b38\U00010b56-\U00010b57\U00010b73-\U00010b77\U00010b92-\U00010b98\U00010b9d-\U00010ba8\U00010bb0-\U00010bff\U00010c49-\U00010c7f\U00010cb3-\U00010cbf\U00010cf3-\U00010cf9\U00010d28-\U00010d2f\U00010d3a-\U00010e5f\U00010e7f-\U00010eff\U00010f28-\U00010f2f\U00010f5a-\U00010fff\U0001104e-\U00011051\U00011070-\U0001107e\U000110c2-\U000110cc\U000110ce-\U000110cf\U000110e9-\U000110ef\U000110fa-\U000110ff\U00011135\U00011147-\U0001114f\U00011177-\U0001117f\U000111ce-\U000111cf\U000111e0\U000111f5-\U000111ff\U00011212\U0001123f-\U0001127f\U00011287\U00011289\U0001128e\U0001129e\U000112aa-\U000112af\U000112eb-\U000112ef\U000112fa-\U000112ff\U00011304\U0001130d-\U0001130e\U00011311-\U00011312\U00011329\U00011331\U00011334\U0001133a\U00011345-\U00011346\U00011349-\U0001134a\U0001134e-\U0001134f\U00011351-\U00011356\U00011358-\U0001135c\U00011364-\U00011365\U0001136d-\U0001136f\U00011375-\U000113ff\U0001145a\U0001145c\U0001145f-\U0001147f\U000114c8-\U000114cf\U000114da-\U0001157f\U000115b6-\U000115b7\U000115de-\U000115ff\U00011645-\U0001164f\U0001165a-\U0001165f\U0001166d-\U0001167f\U000116b8-\U000116bf\U000116ca-\U000116ff\U0001171b-\U0001171c\U0001172c-\U0001172f\U00011740-\U000117ff\U0001183c-\U0001189f\U000118f3-\U000118fe\U00011900-\U000119ff\U00011a48-\U00011a4f\U00011a84-\U00011a85\U00011aa3-\U00011abf\U00011af9-\U00011bff\U00011c09\U00011c37\U00011c46-\U00011c4f\U00011c6d-\U00011c6f\U00011c90-\U00011c91\U00011ca8\U00011cb7-\U00011cff\U00011d07\U00011d0a\U00011d37-\U00011d39\U00011d3b\U00011d3e\U00011d48-\U00011d4f\U00011d5a-\U00011d5f\U00011d66\U00011d69\U00011d8f\U00011d92\U00011d99-\U00011d9f\U00011daa-\U00011edf\U00011ef9-\U00011fff\U0001239a-\U000123ff\U0001246f\U00012475-\U0001247f\U00012544-\U00012fff\U0001342f-\U000143ff\U00014647-\U000167ff\U00016a39-\U00016a3f\U00016a5f\U00016a6a-\U00016a6d\U00016a70-\U00016acf\U00016aee-\U00016aef\U00016af6-\U00016aff\U00016b46-\U00016b4f\U00016b5a\U00016b62\U00016b78-\U00016b7c\U00016b90-\U00016e3f\U00016e9b-\U00016eff\U00016f45-\U00016f4f\U00016f7f-\U00016f8e\U00016fa0-\U00016fdf\U00016fe2-\U00016fff\U000187f2-\U000187ff\U00018af3-\U0001afff\U0001b11f-\U0001b16f\U0001b2fc-\U0001bbff\U0001bc6b-\U0001bc6f\U0001bc7d-\U0001bc7f\U0001bc89-\U0001bc8f\U0001bc9a-\U0001bc9b\U0001bca4-\U0001cfff\U0001d0f6-\U0001d0ff\U0001d127-\U0001d128\U0001d1e9-\U0001d1ff\U0001d246-\U0001d2df\U0001d2f4-\U0001d2ff\U0001d357-\U0001d35f\U0001d379-\U0001d3ff\U0001d455\U0001d49d\U0001d4a0-\U0001d4a1\U0001d4a3-\U0001d4a4\U0001d4a7-\U0001d4a8\U0001d4ad\U0001d4ba\U0001d4bc\U0001d4c4\U0001d506\U0001d50b-\U0001d50c\U0001d515\U0001d51d\U0001d53a\U0001d53f\U0001d545\U0001d547-\U0001d549\U0001d551\U0001d6a6-\U0001d6a7\U0001d7cc-\U0001d7cd\U0001da8c-\U0001da9a\U0001daa0\U0001dab0-\U0001dfff\U0001e007\U0001e019-\U0001e01a\U0001e022\U0001e025\U0001e02b-\U0001e7ff\U0001e8c5-\U0001e8c6\U0001e8d7-\U0001e8ff\U0001e94b-\U0001e94f\U0001e95a-\U0001e95d\U0001e960-\U0001ec70\U0001ecb5-\U0001edff\U0001ee04\U0001ee20\U0001ee23\U0001ee25-\U0001ee26\U0001ee28\U0001ee33\U0001ee38\U0001ee3a\U0001ee3c-\U0001ee41\U0001ee43-\U0001ee46\U0001ee48\U0001ee4a\U0001ee4c\U0001ee50\U0001ee53\U0001ee55-\U0001ee56\U0001ee58\U0001ee5a\U0001ee5c\U0001ee5e\U0001ee60\U0001ee63\U0001ee65-\U0001ee66\U0001ee6b\U0001ee73\U0001ee78\U0001ee7d\U0001ee7f\U0001ee8a\U0001ee9c-\U0001eea0\U0001eea4\U0001eeaa\U0001eebc-\U0001eeef\U0001eef2-\U0001efff\U0001f02c-\U0001f02f\U0001f094-\U0001f09f\U0001f0af-\U0001f0b0\U0001f0c0\U0001f0d0\U0001f0f6-\U0001f0ff\U0001f10d-\U0001f10f\U0001f16c-\U0001f16f\U0001f1ad-\U0001f1e5\U0001f203-\U0001f20f\U0001f23c-\U0001f23f\U0001f249-\U0001f24f\U0001f252-\U0001f25f\U0001f266-\U0001f2ff\U0001f6d5-\U0001f6df\U0001f6ed-\U0001f6ef\U0001f6fa-\U0001f6ff\U0001f774-\U0001f77f\U0001f7d9-\U0001f7ff\U0001f80c-\U0001f80f\U0001f848-\U0001f84f\U0001f85a-\U0001f85f\U0001f888-\U0001f88f\U0001f8ae-\U0001f8ff\U0001f90c-\U0001f90f\U0001f93f\U0001f971-\U0001f972\U0001f977-\U0001f979\U0001f97b\U0001f9a3-\U0001f9af\U0001f9ba-\U0001f9bf\U0001f9c3-\U0001f9cf\U0001fa00-\U0001fa5f\U0001fa6e-\U0001ffff\U0002a6d7-\U0002a6ff\U0002b735-\U0002b73f\U0002b81e-\U0002b81f\U0002cea2-\U0002ceaf\U0002ebe1-\U0002f7ff\U0002fa1e-\U000e0000\U000e0002-\U000e001f\U000e0080-\U000e00ff\U000e01f0-\U000effff\U000ffffe-\U000fffff\U0010fffe-\U0010ffff' -Co = u'\ue000-\uf8ff' +Co = '\ue000-\uf8ff\U000f0000-\U000ffffd\U00100000-\U0010fffd' -try: - Cs = eval(r"u'\ud800-\udbff\\\udc00\udc01-\udfff'") -except UnicodeDecodeError: - Cs = '' # Jython can't handle isolated surrogates +Cs = '\ud800-\udbff\\\udc00\udc01-\udfff' -Ll = u'a-z\xb5\xdf-\xf6\xf8-\xff\u0101\u0103\u0105\u0107\u0109\u010b\u010d\u010f\u0111\u0113\u0115\u0117\u0119\u011b\u011d\u011f\u0121\u0123\u0125\u0127\u0129\u012b\u012d\u012f\u0131\u0133\u0135\u0137-\u0138\u013a\u013c\u013e\u0140\u0142\u0144\u0146\u0148-\u0149\u014b\u014d\u014f\u0151\u0153\u0155\u0157\u0159\u015b\u015d\u015f\u0161\u0163\u0165\u0167\u0169\u016b\u016d\u016f\u0171\u0173\u0175\u0177\u017a\u017c\u017e-\u0180\u0183\u0185\u0188\u018c-\u018d\u0192\u0195\u0199-\u019b\u019e\u01a1\u01a3\u01a5\u01a8\u01aa-\u01ab\u01ad\u01b0\u01b4\u01b6\u01b9-\u01ba\u01bd-\u01bf\u01c6\u01c9\u01cc\u01ce\u01d0\u01d2\u01d4\u01d6\u01d8\u01da\u01dc-\u01dd\u01df\u01e1\u01e3\u01e5\u01e7\u01e9\u01eb\u01ed\u01ef-\u01f0\u01f3\u01f5\u01f9\u01fb\u01fd\u01ff\u0201\u0203\u0205\u0207\u0209\u020b\u020d\u020f\u0211\u0213\u0215\u0217\u0219\u021b\u021d\u021f\u0221\u0223\u0225\u0227\u0229\u022b\u022d\u022f\u0231\u0233-\u0239\u023c\u023f-\u0240\u0242\u0247\u0249\u024b\u024d\u024f-\u0293\u0295-\u02af\u0371\u0373\u0377\u037b-\u037d\u0390\u03ac-\u03ce\u03d0-\u03d1\u03d5-\u03d7\u03d9\u03db\u03dd\u03df\u03e1\u03e3\u03e5\u03e7\u03e9\u03eb\u03ed\u03ef-\u03f3\u03f5\u03f8\u03fb-\u03fc\u0430-\u045f\u0461\u0463\u0465\u0467\u0469\u046b\u046d\u046f\u0471\u0473\u0475\u0477\u0479\u047b\u047d\u047f\u0481\u048b\u048d\u048f\u0491\u0493\u0495\u0497\u0499\u049b\u049d\u049f\u04a1\u04a3\u04a5\u04a7\u04a9\u04ab\u04ad\u04af\u04b1\u04b3\u04b5\u04b7\u04b9\u04bb\u04bd\u04bf\u04c2\u04c4\u04c6\u04c8\u04ca\u04cc\u04ce-\u04cf\u04d1\u04d3\u04d5\u04d7\u04d9\u04db\u04dd\u04df\u04e1\u04e3\u04e5\u04e7\u04e9\u04eb\u04ed\u04ef\u04f1\u04f3\u04f5\u04f7\u04f9\u04fb\u04fd\u04ff\u0501\u0503\u0505\u0507\u0509\u050b\u050d\u050f\u0511\u0513\u0515\u0517\u0519\u051b\u051d\u051f\u0521\u0523\u0525\u0527\u0529\u052b\u052d\u052f\u0560-\u0588\u10d0-\u10fa\u10fd-\u10ff\u13f8-\u13fd\u1c80-\u1c88\u1d00-\u1d2b\u1d6b-\u1d77\u1d79-\u1d9a\u1e01\u1e03\u1e05\u1e07\u1e09\u1e0b\u1e0d\u1e0f\u1e11\u1e13\u1e15\u1e17\u1e19\u1e1b\u1e1d\u1e1f\u1e21\u1e23\u1e25\u1e27\u1e29\u1e2b\u1e2d\u1e2f\u1e31\u1e33\u1e35\u1e37\u1e39\u1e3b\u1e3d\u1e3f\u1e41\u1e43\u1e45\u1e47\u1e49\u1e4b\u1e4d\u1e4f\u1e51\u1e53\u1e55\u1e57\u1e59\u1e5b\u1e5d\u1e5f\u1e61\u1e63\u1e65\u1e67\u1e69\u1e6b\u1e6d\u1e6f\u1e71\u1e73\u1e75\u1e77\u1e79\u1e7b\u1e7d\u1e7f\u1e81\u1e83\u1e85\u1e87\u1e89\u1e8b\u1e8d\u1e8f\u1e91\u1e93\u1e95-\u1e9d\u1e9f\u1ea1\u1ea3\u1ea5\u1ea7\u1ea9\u1eab\u1ead\u1eaf\u1eb1\u1eb3\u1eb5\u1eb7\u1eb9\u1ebb\u1ebd\u1ebf\u1ec1\u1ec3\u1ec5\u1ec7\u1ec9\u1ecb\u1ecd\u1ecf\u1ed1\u1ed3\u1ed5\u1ed7\u1ed9\u1edb\u1edd\u1edf\u1ee1\u1ee3\u1ee5\u1ee7\u1ee9\u1eeb\u1eed\u1eef\u1ef1\u1ef3\u1ef5\u1ef7\u1ef9\u1efb\u1efd\u1eff-\u1f07\u1f10-\u1f15\u1f20-\u1f27\u1f30-\u1f37\u1f40-\u1f45\u1f50-\u1f57\u1f60-\u1f67\u1f70-\u1f7d\u1f80-\u1f87\u1f90-\u1f97\u1fa0-\u1fa7\u1fb0-\u1fb4\u1fb6-\u1fb7\u1fbe\u1fc2-\u1fc4\u1fc6-\u1fc7\u1fd0-\u1fd3\u1fd6-\u1fd7\u1fe0-\u1fe7\u1ff2-\u1ff4\u1ff6-\u1ff7\u210a\u210e-\u210f\u2113\u212f\u2134\u2139\u213c-\u213d\u2146-\u2149\u214e\u2184\u2c30-\u2c5e\u2c61\u2c65-\u2c66\u2c68\u2c6a\u2c6c\u2c71\u2c73-\u2c74\u2c76-\u2c7b\u2c81\u2c83\u2c85\u2c87\u2c89\u2c8b\u2c8d\u2c8f\u2c91\u2c93\u2c95\u2c97\u2c99\u2c9b\u2c9d\u2c9f\u2ca1\u2ca3\u2ca5\u2ca7\u2ca9\u2cab\u2cad\u2caf\u2cb1\u2cb3\u2cb5\u2cb7\u2cb9\u2cbb\u2cbd\u2cbf\u2cc1\u2cc3\u2cc5\u2cc7\u2cc9\u2ccb\u2ccd\u2ccf\u2cd1\u2cd3\u2cd5\u2cd7\u2cd9\u2cdb\u2cdd\u2cdf\u2ce1\u2ce3-\u2ce4\u2cec\u2cee\u2cf3\u2d00-\u2d25\u2d27\u2d2d\ua641\ua643\ua645\ua647\ua649\ua64b\ua64d\ua64f\ua651\ua653\ua655\ua657\ua659\ua65b\ua65d\ua65f\ua661\ua663\ua665\ua667\ua669\ua66b\ua66d\ua681\ua683\ua685\ua687\ua689\ua68b\ua68d\ua68f\ua691\ua693\ua695\ua697\ua699\ua69b\ua723\ua725\ua727\ua729\ua72b\ua72d\ua72f-\ua731\ua733\ua735\ua737\ua739\ua73b\ua73d\ua73f\ua741\ua743\ua745\ua747\ua749\ua74b\ua74d\ua74f\ua751\ua753\ua755\ua757\ua759\ua75b\ua75d\ua75f\ua761\ua763\ua765\ua767\ua769\ua76b\ua76d\ua76f\ua771-\ua778\ua77a\ua77c\ua77f\ua781\ua783\ua785\ua787\ua78c\ua78e\ua791\ua793-\ua795\ua797\ua799\ua79b\ua79d\ua79f\ua7a1\ua7a3\ua7a5\ua7a7\ua7a9\ua7af\ua7b5\ua7b7\ua7b9\ua7fa\uab30-\uab5a\uab60-\uab65\uab70-\uabbf\ufb00-\ufb06\ufb13-\ufb17\uff41-\uff5a' +Ll = 'a-z\xb5\xdf-\xf6\xf8-\xff\u0101\u0103\u0105\u0107\u0109\u010b\u010d\u010f\u0111\u0113\u0115\u0117\u0119\u011b\u011d\u011f\u0121\u0123\u0125\u0127\u0129\u012b\u012d\u012f\u0131\u0133\u0135\u0137-\u0138\u013a\u013c\u013e\u0140\u0142\u0144\u0146\u0148-\u0149\u014b\u014d\u014f\u0151\u0153\u0155\u0157\u0159\u015b\u015d\u015f\u0161\u0163\u0165\u0167\u0169\u016b\u016d\u016f\u0171\u0173\u0175\u0177\u017a\u017c\u017e-\u0180\u0183\u0185\u0188\u018c-\u018d\u0192\u0195\u0199-\u019b\u019e\u01a1\u01a3\u01a5\u01a8\u01aa-\u01ab\u01ad\u01b0\u01b4\u01b6\u01b9-\u01ba\u01bd-\u01bf\u01c6\u01c9\u01cc\u01ce\u01d0\u01d2\u01d4\u01d6\u01d8\u01da\u01dc-\u01dd\u01df\u01e1\u01e3\u01e5\u01e7\u01e9\u01eb\u01ed\u01ef-\u01f0\u01f3\u01f5\u01f9\u01fb\u01fd\u01ff\u0201\u0203\u0205\u0207\u0209\u020b\u020d\u020f\u0211\u0213\u0215\u0217\u0219\u021b\u021d\u021f\u0221\u0223\u0225\u0227\u0229\u022b\u022d\u022f\u0231\u0233-\u0239\u023c\u023f-\u0240\u0242\u0247\u0249\u024b\u024d\u024f-\u0293\u0295-\u02af\u0371\u0373\u0377\u037b-\u037d\u0390\u03ac-\u03ce\u03d0-\u03d1\u03d5-\u03d7\u03d9\u03db\u03dd\u03df\u03e1\u03e3\u03e5\u03e7\u03e9\u03eb\u03ed\u03ef-\u03f3\u03f5\u03f8\u03fb-\u03fc\u0430-\u045f\u0461\u0463\u0465\u0467\u0469\u046b\u046d\u046f\u0471\u0473\u0475\u0477\u0479\u047b\u047d\u047f\u0481\u048b\u048d\u048f\u0491\u0493\u0495\u0497\u0499\u049b\u049d\u049f\u04a1\u04a3\u04a5\u04a7\u04a9\u04ab\u04ad\u04af\u04b1\u04b3\u04b5\u04b7\u04b9\u04bb\u04bd\u04bf\u04c2\u04c4\u04c6\u04c8\u04ca\u04cc\u04ce-\u04cf\u04d1\u04d3\u04d5\u04d7\u04d9\u04db\u04dd\u04df\u04e1\u04e3\u04e5\u04e7\u04e9\u04eb\u04ed\u04ef\u04f1\u04f3\u04f5\u04f7\u04f9\u04fb\u04fd\u04ff\u0501\u0503\u0505\u0507\u0509\u050b\u050d\u050f\u0511\u0513\u0515\u0517\u0519\u051b\u051d\u051f\u0521\u0523\u0525\u0527\u0529\u052b\u052d\u052f\u0560-\u0588\u10d0-\u10fa\u10fd-\u10ff\u13f8-\u13fd\u1c80-\u1c88\u1d00-\u1d2b\u1d6b-\u1d77\u1d79-\u1d9a\u1e01\u1e03\u1e05\u1e07\u1e09\u1e0b\u1e0d\u1e0f\u1e11\u1e13\u1e15\u1e17\u1e19\u1e1b\u1e1d\u1e1f\u1e21\u1e23\u1e25\u1e27\u1e29\u1e2b\u1e2d\u1e2f\u1e31\u1e33\u1e35\u1e37\u1e39\u1e3b\u1e3d\u1e3f\u1e41\u1e43\u1e45\u1e47\u1e49\u1e4b\u1e4d\u1e4f\u1e51\u1e53\u1e55\u1e57\u1e59\u1e5b\u1e5d\u1e5f\u1e61\u1e63\u1e65\u1e67\u1e69\u1e6b\u1e6d\u1e6f\u1e71\u1e73\u1e75\u1e77\u1e79\u1e7b\u1e7d\u1e7f\u1e81\u1e83\u1e85\u1e87\u1e89\u1e8b\u1e8d\u1e8f\u1e91\u1e93\u1e95-\u1e9d\u1e9f\u1ea1\u1ea3\u1ea5\u1ea7\u1ea9\u1eab\u1ead\u1eaf\u1eb1\u1eb3\u1eb5\u1eb7\u1eb9\u1ebb\u1ebd\u1ebf\u1ec1\u1ec3\u1ec5\u1ec7\u1ec9\u1ecb\u1ecd\u1ecf\u1ed1\u1ed3\u1ed5\u1ed7\u1ed9\u1edb\u1edd\u1edf\u1ee1\u1ee3\u1ee5\u1ee7\u1ee9\u1eeb\u1eed\u1eef\u1ef1\u1ef3\u1ef5\u1ef7\u1ef9\u1efb\u1efd\u1eff-\u1f07\u1f10-\u1f15\u1f20-\u1f27\u1f30-\u1f37\u1f40-\u1f45\u1f50-\u1f57\u1f60-\u1f67\u1f70-\u1f7d\u1f80-\u1f87\u1f90-\u1f97\u1fa0-\u1fa7\u1fb0-\u1fb4\u1fb6-\u1fb7\u1fbe\u1fc2-\u1fc4\u1fc6-\u1fc7\u1fd0-\u1fd3\u1fd6-\u1fd7\u1fe0-\u1fe7\u1ff2-\u1ff4\u1ff6-\u1ff7\u210a\u210e-\u210f\u2113\u212f\u2134\u2139\u213c-\u213d\u2146-\u2149\u214e\u2184\u2c30-\u2c5e\u2c61\u2c65-\u2c66\u2c68\u2c6a\u2c6c\u2c71\u2c73-\u2c74\u2c76-\u2c7b\u2c81\u2c83\u2c85\u2c87\u2c89\u2c8b\u2c8d\u2c8f\u2c91\u2c93\u2c95\u2c97\u2c99\u2c9b\u2c9d\u2c9f\u2ca1\u2ca3\u2ca5\u2ca7\u2ca9\u2cab\u2cad\u2caf\u2cb1\u2cb3\u2cb5\u2cb7\u2cb9\u2cbb\u2cbd\u2cbf\u2cc1\u2cc3\u2cc5\u2cc7\u2cc9\u2ccb\u2ccd\u2ccf\u2cd1\u2cd3\u2cd5\u2cd7\u2cd9\u2cdb\u2cdd\u2cdf\u2ce1\u2ce3-\u2ce4\u2cec\u2cee\u2cf3\u2d00-\u2d25\u2d27\u2d2d\ua641\ua643\ua645\ua647\ua649\ua64b\ua64d\ua64f\ua651\ua653\ua655\ua657\ua659\ua65b\ua65d\ua65f\ua661\ua663\ua665\ua667\ua669\ua66b\ua66d\ua681\ua683\ua685\ua687\ua689\ua68b\ua68d\ua68f\ua691\ua693\ua695\ua697\ua699\ua69b\ua723\ua725\ua727\ua729\ua72b\ua72d\ua72f-\ua731\ua733\ua735\ua737\ua739\ua73b\ua73d\ua73f\ua741\ua743\ua745\ua747\ua749\ua74b\ua74d\ua74f\ua751\ua753\ua755\ua757\ua759\ua75b\ua75d\ua75f\ua761\ua763\ua765\ua767\ua769\ua76b\ua76d\ua76f\ua771-\ua778\ua77a\ua77c\ua77f\ua781\ua783\ua785\ua787\ua78c\ua78e\ua791\ua793-\ua795\ua797\ua799\ua79b\ua79d\ua79f\ua7a1\ua7a3\ua7a5\ua7a7\ua7a9\ua7af\ua7b5\ua7b7\ua7b9\ua7fa\uab30-\uab5a\uab60-\uab65\uab70-\uabbf\ufb00-\ufb06\ufb13-\ufb17\uff41-\uff5a\U00010428-\U0001044f\U000104d8-\U000104fb\U00010cc0-\U00010cf2\U000118c0-\U000118df\U00016e60-\U00016e7f\U0001d41a-\U0001d433\U0001d44e-\U0001d454\U0001d456-\U0001d467\U0001d482-\U0001d49b\U0001d4b6-\U0001d4b9\U0001d4bb\U0001d4bd-\U0001d4c3\U0001d4c5-\U0001d4cf\U0001d4ea-\U0001d503\U0001d51e-\U0001d537\U0001d552-\U0001d56b\U0001d586-\U0001d59f\U0001d5ba-\U0001d5d3\U0001d5ee-\U0001d607\U0001d622-\U0001d63b\U0001d656-\U0001d66f\U0001d68a-\U0001d6a5\U0001d6c2-\U0001d6da\U0001d6dc-\U0001d6e1\U0001d6fc-\U0001d714\U0001d716-\U0001d71b\U0001d736-\U0001d74e\U0001d750-\U0001d755\U0001d770-\U0001d788\U0001d78a-\U0001d78f\U0001d7aa-\U0001d7c2\U0001d7c4-\U0001d7c9\U0001d7cb\U0001e922-\U0001e943' -Lm = u'\u02b0-\u02c1\u02c6-\u02d1\u02e0-\u02e4\u02ec\u02ee\u0374\u037a\u0559\u0640\u06e5-\u06e6\u07f4-\u07f5\u07fa\u081a\u0824\u0828\u0971\u0e46\u0ec6\u10fc\u17d7\u1843\u1aa7\u1c78-\u1c7d\u1d2c-\u1d6a\u1d78\u1d9b-\u1dbf\u2071\u207f\u2090-\u209c\u2c7c-\u2c7d\u2d6f\u2e2f\u3005\u3031-\u3035\u303b\u309d-\u309e\u30fc-\u30fe\ua015\ua4f8-\ua4fd\ua60c\ua67f\ua69c-\ua69d\ua717-\ua71f\ua770\ua788\ua7f8-\ua7f9\ua9cf\ua9e6\uaa70\uaadd\uaaf3-\uaaf4\uab5c-\uab5f\uff70\uff9e-\uff9f' +Lm = '\u02b0-\u02c1\u02c6-\u02d1\u02e0-\u02e4\u02ec\u02ee\u0374\u037a\u0559\u0640\u06e5-\u06e6\u07f4-\u07f5\u07fa\u081a\u0824\u0828\u0971\u0e46\u0ec6\u10fc\u17d7\u1843\u1aa7\u1c78-\u1c7d\u1d2c-\u1d6a\u1d78\u1d9b-\u1dbf\u2071\u207f\u2090-\u209c\u2c7c-\u2c7d\u2d6f\u2e2f\u3005\u3031-\u3035\u303b\u309d-\u309e\u30fc-\u30fe\ua015\ua4f8-\ua4fd\ua60c\ua67f\ua69c-\ua69d\ua717-\ua71f\ua770\ua788\ua7f8-\ua7f9\ua9cf\ua9e6\uaa70\uaadd\uaaf3-\uaaf4\uab5c-\uab5f\uff70\uff9e-\uff9f\U00016b40-\U00016b43\U00016f93-\U00016f9f\U00016fe0-\U00016fe1' -Lo = u'\xaa\xba\u01bb\u01c0-\u01c3\u0294\u05d0-\u05ea\u05ef-\u05f2\u0620-\u063f\u0641-\u064a\u066e-\u066f\u0671-\u06d3\u06d5\u06ee-\u06ef\u06fa-\u06fc\u06ff\u0710\u0712-\u072f\u074d-\u07a5\u07b1\u07ca-\u07ea\u0800-\u0815\u0840-\u0858\u0860-\u086a\u08a0-\u08b4\u08b6-\u08bd\u0904-\u0939\u093d\u0950\u0958-\u0961\u0972-\u0980\u0985-\u098c\u098f-\u0990\u0993-\u09a8\u09aa-\u09b0\u09b2\u09b6-\u09b9\u09bd\u09ce\u09dc-\u09dd\u09df-\u09e1\u09f0-\u09f1\u09fc\u0a05-\u0a0a\u0a0f-\u0a10\u0a13-\u0a28\u0a2a-\u0a30\u0a32-\u0a33\u0a35-\u0a36\u0a38-\u0a39\u0a59-\u0a5c\u0a5e\u0a72-\u0a74\u0a85-\u0a8d\u0a8f-\u0a91\u0a93-\u0aa8\u0aaa-\u0ab0\u0ab2-\u0ab3\u0ab5-\u0ab9\u0abd\u0ad0\u0ae0-\u0ae1\u0af9\u0b05-\u0b0c\u0b0f-\u0b10\u0b13-\u0b28\u0b2a-\u0b30\u0b32-\u0b33\u0b35-\u0b39\u0b3d\u0b5c-\u0b5d\u0b5f-\u0b61\u0b71\u0b83\u0b85-\u0b8a\u0b8e-\u0b90\u0b92-\u0b95\u0b99-\u0b9a\u0b9c\u0b9e-\u0b9f\u0ba3-\u0ba4\u0ba8-\u0baa\u0bae-\u0bb9\u0bd0\u0c05-\u0c0c\u0c0e-\u0c10\u0c12-\u0c28\u0c2a-\u0c39\u0c3d\u0c58-\u0c5a\u0c60-\u0c61\u0c80\u0c85-\u0c8c\u0c8e-\u0c90\u0c92-\u0ca8\u0caa-\u0cb3\u0cb5-\u0cb9\u0cbd\u0cde\u0ce0-\u0ce1\u0cf1-\u0cf2\u0d05-\u0d0c\u0d0e-\u0d10\u0d12-\u0d3a\u0d3d\u0d4e\u0d54-\u0d56\u0d5f-\u0d61\u0d7a-\u0d7f\u0d85-\u0d96\u0d9a-\u0db1\u0db3-\u0dbb\u0dbd\u0dc0-\u0dc6\u0e01-\u0e30\u0e32-\u0e33\u0e40-\u0e45\u0e81-\u0e82\u0e84\u0e87-\u0e88\u0e8a\u0e8d\u0e94-\u0e97\u0e99-\u0e9f\u0ea1-\u0ea3\u0ea5\u0ea7\u0eaa-\u0eab\u0ead-\u0eb0\u0eb2-\u0eb3\u0ebd\u0ec0-\u0ec4\u0edc-\u0edf\u0f00\u0f40-\u0f47\u0f49-\u0f6c\u0f88-\u0f8c\u1000-\u102a\u103f\u1050-\u1055\u105a-\u105d\u1061\u1065-\u1066\u106e-\u1070\u1075-\u1081\u108e\u1100-\u1248\u124a-\u124d\u1250-\u1256\u1258\u125a-\u125d\u1260-\u1288\u128a-\u128d\u1290-\u12b0\u12b2-\u12b5\u12b8-\u12be\u12c0\u12c2-\u12c5\u12c8-\u12d6\u12d8-\u1310\u1312-\u1315\u1318-\u135a\u1380-\u138f\u1401-\u166c\u166f-\u167f\u1681-\u169a\u16a0-\u16ea\u16f1-\u16f8\u1700-\u170c\u170e-\u1711\u1720-\u1731\u1740-\u1751\u1760-\u176c\u176e-\u1770\u1780-\u17b3\u17dc\u1820-\u1842\u1844-\u1878\u1880-\u1884\u1887-\u18a8\u18aa\u18b0-\u18f5\u1900-\u191e\u1950-\u196d\u1970-\u1974\u1980-\u19ab\u19b0-\u19c9\u1a00-\u1a16\u1a20-\u1a54\u1b05-\u1b33\u1b45-\u1b4b\u1b83-\u1ba0\u1bae-\u1baf\u1bba-\u1be5\u1c00-\u1c23\u1c4d-\u1c4f\u1c5a-\u1c77\u1ce9-\u1cec\u1cee-\u1cf1\u1cf5-\u1cf6\u2135-\u2138\u2d30-\u2d67\u2d80-\u2d96\u2da0-\u2da6\u2da8-\u2dae\u2db0-\u2db6\u2db8-\u2dbe\u2dc0-\u2dc6\u2dc8-\u2dce\u2dd0-\u2dd6\u2dd8-\u2dde\u3006\u303c\u3041-\u3096\u309f\u30a1-\u30fa\u30ff\u3105-\u312f\u3131-\u318e\u31a0-\u31ba\u31f0-\u31ff\u3400-\u4db5\u4e00-\u9fef\ua000-\ua014\ua016-\ua48c\ua4d0-\ua4f7\ua500-\ua60b\ua610-\ua61f\ua62a-\ua62b\ua66e\ua6a0-\ua6e5\ua78f\ua7f7\ua7fb-\ua801\ua803-\ua805\ua807-\ua80a\ua80c-\ua822\ua840-\ua873\ua882-\ua8b3\ua8f2-\ua8f7\ua8fb\ua8fd-\ua8fe\ua90a-\ua925\ua930-\ua946\ua960-\ua97c\ua984-\ua9b2\ua9e0-\ua9e4\ua9e7-\ua9ef\ua9fa-\ua9fe\uaa00-\uaa28\uaa40-\uaa42\uaa44-\uaa4b\uaa60-\uaa6f\uaa71-\uaa76\uaa7a\uaa7e-\uaaaf\uaab1\uaab5-\uaab6\uaab9-\uaabd\uaac0\uaac2\uaadb-\uaadc\uaae0-\uaaea\uaaf2\uab01-\uab06\uab09-\uab0e\uab11-\uab16\uab20-\uab26\uab28-\uab2e\uabc0-\uabe2\uac00-\ud7a3\ud7b0-\ud7c6\ud7cb-\ud7fb\uf900-\ufa6d\ufa70-\ufad9\ufb1d\ufb1f-\ufb28\ufb2a-\ufb36\ufb38-\ufb3c\ufb3e\ufb40-\ufb41\ufb43-\ufb44\ufb46-\ufbb1\ufbd3-\ufd3d\ufd50-\ufd8f\ufd92-\ufdc7\ufdf0-\ufdfb\ufe70-\ufe74\ufe76-\ufefc\uff66-\uff6f\uff71-\uff9d\uffa0-\uffbe\uffc2-\uffc7\uffca-\uffcf\uffd2-\uffd7\uffda-\uffdc' +Lo = '\xaa\xba\u01bb\u01c0-\u01c3\u0294\u05d0-\u05ea\u05ef-\u05f2\u0620-\u063f\u0641-\u064a\u066e-\u066f\u0671-\u06d3\u06d5\u06ee-\u06ef\u06fa-\u06fc\u06ff\u0710\u0712-\u072f\u074d-\u07a5\u07b1\u07ca-\u07ea\u0800-\u0815\u0840-\u0858\u0860-\u086a\u08a0-\u08b4\u08b6-\u08bd\u0904-\u0939\u093d\u0950\u0958-\u0961\u0972-\u0980\u0985-\u098c\u098f-\u0990\u0993-\u09a8\u09aa-\u09b0\u09b2\u09b6-\u09b9\u09bd\u09ce\u09dc-\u09dd\u09df-\u09e1\u09f0-\u09f1\u09fc\u0a05-\u0a0a\u0a0f-\u0a10\u0a13-\u0a28\u0a2a-\u0a30\u0a32-\u0a33\u0a35-\u0a36\u0a38-\u0a39\u0a59-\u0a5c\u0a5e\u0a72-\u0a74\u0a85-\u0a8d\u0a8f-\u0a91\u0a93-\u0aa8\u0aaa-\u0ab0\u0ab2-\u0ab3\u0ab5-\u0ab9\u0abd\u0ad0\u0ae0-\u0ae1\u0af9\u0b05-\u0b0c\u0b0f-\u0b10\u0b13-\u0b28\u0b2a-\u0b30\u0b32-\u0b33\u0b35-\u0b39\u0b3d\u0b5c-\u0b5d\u0b5f-\u0b61\u0b71\u0b83\u0b85-\u0b8a\u0b8e-\u0b90\u0b92-\u0b95\u0b99-\u0b9a\u0b9c\u0b9e-\u0b9f\u0ba3-\u0ba4\u0ba8-\u0baa\u0bae-\u0bb9\u0bd0\u0c05-\u0c0c\u0c0e-\u0c10\u0c12-\u0c28\u0c2a-\u0c39\u0c3d\u0c58-\u0c5a\u0c60-\u0c61\u0c80\u0c85-\u0c8c\u0c8e-\u0c90\u0c92-\u0ca8\u0caa-\u0cb3\u0cb5-\u0cb9\u0cbd\u0cde\u0ce0-\u0ce1\u0cf1-\u0cf2\u0d05-\u0d0c\u0d0e-\u0d10\u0d12-\u0d3a\u0d3d\u0d4e\u0d54-\u0d56\u0d5f-\u0d61\u0d7a-\u0d7f\u0d85-\u0d96\u0d9a-\u0db1\u0db3-\u0dbb\u0dbd\u0dc0-\u0dc6\u0e01-\u0e30\u0e32-\u0e33\u0e40-\u0e45\u0e81-\u0e82\u0e84\u0e87-\u0e88\u0e8a\u0e8d\u0e94-\u0e97\u0e99-\u0e9f\u0ea1-\u0ea3\u0ea5\u0ea7\u0eaa-\u0eab\u0ead-\u0eb0\u0eb2-\u0eb3\u0ebd\u0ec0-\u0ec4\u0edc-\u0edf\u0f00\u0f40-\u0f47\u0f49-\u0f6c\u0f88-\u0f8c\u1000-\u102a\u103f\u1050-\u1055\u105a-\u105d\u1061\u1065-\u1066\u106e-\u1070\u1075-\u1081\u108e\u1100-\u1248\u124a-\u124d\u1250-\u1256\u1258\u125a-\u125d\u1260-\u1288\u128a-\u128d\u1290-\u12b0\u12b2-\u12b5\u12b8-\u12be\u12c0\u12c2-\u12c5\u12c8-\u12d6\u12d8-\u1310\u1312-\u1315\u1318-\u135a\u1380-\u138f\u1401-\u166c\u166f-\u167f\u1681-\u169a\u16a0-\u16ea\u16f1-\u16f8\u1700-\u170c\u170e-\u1711\u1720-\u1731\u1740-\u1751\u1760-\u176c\u176e-\u1770\u1780-\u17b3\u17dc\u1820-\u1842\u1844-\u1878\u1880-\u1884\u1887-\u18a8\u18aa\u18b0-\u18f5\u1900-\u191e\u1950-\u196d\u1970-\u1974\u1980-\u19ab\u19b0-\u19c9\u1a00-\u1a16\u1a20-\u1a54\u1b05-\u1b33\u1b45-\u1b4b\u1b83-\u1ba0\u1bae-\u1baf\u1bba-\u1be5\u1c00-\u1c23\u1c4d-\u1c4f\u1c5a-\u1c77\u1ce9-\u1cec\u1cee-\u1cf1\u1cf5-\u1cf6\u2135-\u2138\u2d30-\u2d67\u2d80-\u2d96\u2da0-\u2da6\u2da8-\u2dae\u2db0-\u2db6\u2db8-\u2dbe\u2dc0-\u2dc6\u2dc8-\u2dce\u2dd0-\u2dd6\u2dd8-\u2dde\u3006\u303c\u3041-\u3096\u309f\u30a1-\u30fa\u30ff\u3105-\u312f\u3131-\u318e\u31a0-\u31ba\u31f0-\u31ff\u3400-\u4db5\u4e00-\u9fef\ua000-\ua014\ua016-\ua48c\ua4d0-\ua4f7\ua500-\ua60b\ua610-\ua61f\ua62a-\ua62b\ua66e\ua6a0-\ua6e5\ua78f\ua7f7\ua7fb-\ua801\ua803-\ua805\ua807-\ua80a\ua80c-\ua822\ua840-\ua873\ua882-\ua8b3\ua8f2-\ua8f7\ua8fb\ua8fd-\ua8fe\ua90a-\ua925\ua930-\ua946\ua960-\ua97c\ua984-\ua9b2\ua9e0-\ua9e4\ua9e7-\ua9ef\ua9fa-\ua9fe\uaa00-\uaa28\uaa40-\uaa42\uaa44-\uaa4b\uaa60-\uaa6f\uaa71-\uaa76\uaa7a\uaa7e-\uaaaf\uaab1\uaab5-\uaab6\uaab9-\uaabd\uaac0\uaac2\uaadb-\uaadc\uaae0-\uaaea\uaaf2\uab01-\uab06\uab09-\uab0e\uab11-\uab16\uab20-\uab26\uab28-\uab2e\uabc0-\uabe2\uac00-\ud7a3\ud7b0-\ud7c6\ud7cb-\ud7fb\uf900-\ufa6d\ufa70-\ufad9\ufb1d\ufb1f-\ufb28\ufb2a-\ufb36\ufb38-\ufb3c\ufb3e\ufb40-\ufb41\ufb43-\ufb44\ufb46-\ufbb1\ufbd3-\ufd3d\ufd50-\ufd8f\ufd92-\ufdc7\ufdf0-\ufdfb\ufe70-\ufe74\ufe76-\ufefc\uff66-\uff6f\uff71-\uff9d\uffa0-\uffbe\uffc2-\uffc7\uffca-\uffcf\uffd2-\uffd7\uffda-\uffdc\U00010000-\U0001000b\U0001000d-\U00010026\U00010028-\U0001003a\U0001003c-\U0001003d\U0001003f-\U0001004d\U00010050-\U0001005d\U00010080-\U000100fa\U00010280-\U0001029c\U000102a0-\U000102d0\U00010300-\U0001031f\U0001032d-\U00010340\U00010342-\U00010349\U00010350-\U00010375\U00010380-\U0001039d\U000103a0-\U000103c3\U000103c8-\U000103cf\U00010450-\U0001049d\U00010500-\U00010527\U00010530-\U00010563\U00010600-\U00010736\U00010740-\U00010755\U00010760-\U00010767\U00010800-\U00010805\U00010808\U0001080a-\U00010835\U00010837-\U00010838\U0001083c\U0001083f-\U00010855\U00010860-\U00010876\U00010880-\U0001089e\U000108e0-\U000108f2\U000108f4-\U000108f5\U00010900-\U00010915\U00010920-\U00010939\U00010980-\U000109b7\U000109be-\U000109bf\U00010a00\U00010a10-\U00010a13\U00010a15-\U00010a17\U00010a19-\U00010a35\U00010a60-\U00010a7c\U00010a80-\U00010a9c\U00010ac0-\U00010ac7\U00010ac9-\U00010ae4\U00010b00-\U00010b35\U00010b40-\U00010b55\U00010b60-\U00010b72\U00010b80-\U00010b91\U00010c00-\U00010c48\U00010d00-\U00010d23\U00010f00-\U00010f1c\U00010f27\U00010f30-\U00010f45\U00011003-\U00011037\U00011083-\U000110af\U000110d0-\U000110e8\U00011103-\U00011126\U00011144\U00011150-\U00011172\U00011176\U00011183-\U000111b2\U000111c1-\U000111c4\U000111da\U000111dc\U00011200-\U00011211\U00011213-\U0001122b\U00011280-\U00011286\U00011288\U0001128a-\U0001128d\U0001128f-\U0001129d\U0001129f-\U000112a8\U000112b0-\U000112de\U00011305-\U0001130c\U0001130f-\U00011310\U00011313-\U00011328\U0001132a-\U00011330\U00011332-\U00011333\U00011335-\U00011339\U0001133d\U00011350\U0001135d-\U00011361\U00011400-\U00011434\U00011447-\U0001144a\U00011480-\U000114af\U000114c4-\U000114c5\U000114c7\U00011580-\U000115ae\U000115d8-\U000115db\U00011600-\U0001162f\U00011644\U00011680-\U000116aa\U00011700-\U0001171a\U00011800-\U0001182b\U000118ff\U00011a00\U00011a0b-\U00011a32\U00011a3a\U00011a50\U00011a5c-\U00011a83\U00011a86-\U00011a89\U00011a9d\U00011ac0-\U00011af8\U00011c00-\U00011c08\U00011c0a-\U00011c2e\U00011c40\U00011c72-\U00011c8f\U00011d00-\U00011d06\U00011d08-\U00011d09\U00011d0b-\U00011d30\U00011d46\U00011d60-\U00011d65\U00011d67-\U00011d68\U00011d6a-\U00011d89\U00011d98\U00011ee0-\U00011ef2\U00012000-\U00012399\U00012480-\U00012543\U00013000-\U0001342e\U00014400-\U00014646\U00016800-\U00016a38\U00016a40-\U00016a5e\U00016ad0-\U00016aed\U00016b00-\U00016b2f\U00016b63-\U00016b77\U00016b7d-\U00016b8f\U00016f00-\U00016f44\U00016f50\U00017000-\U000187f1\U00018800-\U00018af2\U0001b000-\U0001b11e\U0001b170-\U0001b2fb\U0001bc00-\U0001bc6a\U0001bc70-\U0001bc7c\U0001bc80-\U0001bc88\U0001bc90-\U0001bc99\U0001e800-\U0001e8c4\U0001ee00-\U0001ee03\U0001ee05-\U0001ee1f\U0001ee21-\U0001ee22\U0001ee24\U0001ee27\U0001ee29-\U0001ee32\U0001ee34-\U0001ee37\U0001ee39\U0001ee3b\U0001ee42\U0001ee47\U0001ee49\U0001ee4b\U0001ee4d-\U0001ee4f\U0001ee51-\U0001ee52\U0001ee54\U0001ee57\U0001ee59\U0001ee5b\U0001ee5d\U0001ee5f\U0001ee61-\U0001ee62\U0001ee64\U0001ee67-\U0001ee6a\U0001ee6c-\U0001ee72\U0001ee74-\U0001ee77\U0001ee79-\U0001ee7c\U0001ee7e\U0001ee80-\U0001ee89\U0001ee8b-\U0001ee9b\U0001eea1-\U0001eea3\U0001eea5-\U0001eea9\U0001eeab-\U0001eebb\U00020000-\U0002a6d6\U0002a700-\U0002b734\U0002b740-\U0002b81d\U0002b820-\U0002cea1\U0002ceb0-\U0002ebe0\U0002f800-\U0002fa1d' -Lt = u'\u01c5\u01c8\u01cb\u01f2\u1f88-\u1f8f\u1f98-\u1f9f\u1fa8-\u1faf\u1fbc\u1fcc\u1ffc' +Lt = '\u01c5\u01c8\u01cb\u01f2\u1f88-\u1f8f\u1f98-\u1f9f\u1fa8-\u1faf\u1fbc\u1fcc\u1ffc' -Lu = u'A-Z\xc0-\xd6\xd8-\xde\u0100\u0102\u0104\u0106\u0108\u010a\u010c\u010e\u0110\u0112\u0114\u0116\u0118\u011a\u011c\u011e\u0120\u0122\u0124\u0126\u0128\u012a\u012c\u012e\u0130\u0132\u0134\u0136\u0139\u013b\u013d\u013f\u0141\u0143\u0145\u0147\u014a\u014c\u014e\u0150\u0152\u0154\u0156\u0158\u015a\u015c\u015e\u0160\u0162\u0164\u0166\u0168\u016a\u016c\u016e\u0170\u0172\u0174\u0176\u0178-\u0179\u017b\u017d\u0181-\u0182\u0184\u0186-\u0187\u0189-\u018b\u018e-\u0191\u0193-\u0194\u0196-\u0198\u019c-\u019d\u019f-\u01a0\u01a2\u01a4\u01a6-\u01a7\u01a9\u01ac\u01ae-\u01af\u01b1-\u01b3\u01b5\u01b7-\u01b8\u01bc\u01c4\u01c7\u01ca\u01cd\u01cf\u01d1\u01d3\u01d5\u01d7\u01d9\u01db\u01de\u01e0\u01e2\u01e4\u01e6\u01e8\u01ea\u01ec\u01ee\u01f1\u01f4\u01f6-\u01f8\u01fa\u01fc\u01fe\u0200\u0202\u0204\u0206\u0208\u020a\u020c\u020e\u0210\u0212\u0214\u0216\u0218\u021a\u021c\u021e\u0220\u0222\u0224\u0226\u0228\u022a\u022c\u022e\u0230\u0232\u023a-\u023b\u023d-\u023e\u0241\u0243-\u0246\u0248\u024a\u024c\u024e\u0370\u0372\u0376\u037f\u0386\u0388-\u038a\u038c\u038e-\u038f\u0391-\u03a1\u03a3-\u03ab\u03cf\u03d2-\u03d4\u03d8\u03da\u03dc\u03de\u03e0\u03e2\u03e4\u03e6\u03e8\u03ea\u03ec\u03ee\u03f4\u03f7\u03f9-\u03fa\u03fd-\u042f\u0460\u0462\u0464\u0466\u0468\u046a\u046c\u046e\u0470\u0472\u0474\u0476\u0478\u047a\u047c\u047e\u0480\u048a\u048c\u048e\u0490\u0492\u0494\u0496\u0498\u049a\u049c\u049e\u04a0\u04a2\u04a4\u04a6\u04a8\u04aa\u04ac\u04ae\u04b0\u04b2\u04b4\u04b6\u04b8\u04ba\u04bc\u04be\u04c0-\u04c1\u04c3\u04c5\u04c7\u04c9\u04cb\u04cd\u04d0\u04d2\u04d4\u04d6\u04d8\u04da\u04dc\u04de\u04e0\u04e2\u04e4\u04e6\u04e8\u04ea\u04ec\u04ee\u04f0\u04f2\u04f4\u04f6\u04f8\u04fa\u04fc\u04fe\u0500\u0502\u0504\u0506\u0508\u050a\u050c\u050e\u0510\u0512\u0514\u0516\u0518\u051a\u051c\u051e\u0520\u0522\u0524\u0526\u0528\u052a\u052c\u052e\u0531-\u0556\u10a0-\u10c5\u10c7\u10cd\u13a0-\u13f5\u1c90-\u1cba\u1cbd-\u1cbf\u1e00\u1e02\u1e04\u1e06\u1e08\u1e0a\u1e0c\u1e0e\u1e10\u1e12\u1e14\u1e16\u1e18\u1e1a\u1e1c\u1e1e\u1e20\u1e22\u1e24\u1e26\u1e28\u1e2a\u1e2c\u1e2e\u1e30\u1e32\u1e34\u1e36\u1e38\u1e3a\u1e3c\u1e3e\u1e40\u1e42\u1e44\u1e46\u1e48\u1e4a\u1e4c\u1e4e\u1e50\u1e52\u1e54\u1e56\u1e58\u1e5a\u1e5c\u1e5e\u1e60\u1e62\u1e64\u1e66\u1e68\u1e6a\u1e6c\u1e6e\u1e70\u1e72\u1e74\u1e76\u1e78\u1e7a\u1e7c\u1e7e\u1e80\u1e82\u1e84\u1e86\u1e88\u1e8a\u1e8c\u1e8e\u1e90\u1e92\u1e94\u1e9e\u1ea0\u1ea2\u1ea4\u1ea6\u1ea8\u1eaa\u1eac\u1eae\u1eb0\u1eb2\u1eb4\u1eb6\u1eb8\u1eba\u1ebc\u1ebe\u1ec0\u1ec2\u1ec4\u1ec6\u1ec8\u1eca\u1ecc\u1ece\u1ed0\u1ed2\u1ed4\u1ed6\u1ed8\u1eda\u1edc\u1ede\u1ee0\u1ee2\u1ee4\u1ee6\u1ee8\u1eea\u1eec\u1eee\u1ef0\u1ef2\u1ef4\u1ef6\u1ef8\u1efa\u1efc\u1efe\u1f08-\u1f0f\u1f18-\u1f1d\u1f28-\u1f2f\u1f38-\u1f3f\u1f48-\u1f4d\u1f59\u1f5b\u1f5d\u1f5f\u1f68-\u1f6f\u1fb8-\u1fbb\u1fc8-\u1fcb\u1fd8-\u1fdb\u1fe8-\u1fec\u1ff8-\u1ffb\u2102\u2107\u210b-\u210d\u2110-\u2112\u2115\u2119-\u211d\u2124\u2126\u2128\u212a-\u212d\u2130-\u2133\u213e-\u213f\u2145\u2183\u2c00-\u2c2e\u2c60\u2c62-\u2c64\u2c67\u2c69\u2c6b\u2c6d-\u2c70\u2c72\u2c75\u2c7e-\u2c80\u2c82\u2c84\u2c86\u2c88\u2c8a\u2c8c\u2c8e\u2c90\u2c92\u2c94\u2c96\u2c98\u2c9a\u2c9c\u2c9e\u2ca0\u2ca2\u2ca4\u2ca6\u2ca8\u2caa\u2cac\u2cae\u2cb0\u2cb2\u2cb4\u2cb6\u2cb8\u2cba\u2cbc\u2cbe\u2cc0\u2cc2\u2cc4\u2cc6\u2cc8\u2cca\u2ccc\u2cce\u2cd0\u2cd2\u2cd4\u2cd6\u2cd8\u2cda\u2cdc\u2cde\u2ce0\u2ce2\u2ceb\u2ced\u2cf2\ua640\ua642\ua644\ua646\ua648\ua64a\ua64c\ua64e\ua650\ua652\ua654\ua656\ua658\ua65a\ua65c\ua65e\ua660\ua662\ua664\ua666\ua668\ua66a\ua66c\ua680\ua682\ua684\ua686\ua688\ua68a\ua68c\ua68e\ua690\ua692\ua694\ua696\ua698\ua69a\ua722\ua724\ua726\ua728\ua72a\ua72c\ua72e\ua732\ua734\ua736\ua738\ua73a\ua73c\ua73e\ua740\ua742\ua744\ua746\ua748\ua74a\ua74c\ua74e\ua750\ua752\ua754\ua756\ua758\ua75a\ua75c\ua75e\ua760\ua762\ua764\ua766\ua768\ua76a\ua76c\ua76e\ua779\ua77b\ua77d-\ua77e\ua780\ua782\ua784\ua786\ua78b\ua78d\ua790\ua792\ua796\ua798\ua79a\ua79c\ua79e\ua7a0\ua7a2\ua7a4\ua7a6\ua7a8\ua7aa-\ua7ae\ua7b0-\ua7b4\ua7b6\ua7b8\uff21-\uff3a' +Lu = 'A-Z\xc0-\xd6\xd8-\xde\u0100\u0102\u0104\u0106\u0108\u010a\u010c\u010e\u0110\u0112\u0114\u0116\u0118\u011a\u011c\u011e\u0120\u0122\u0124\u0126\u0128\u012a\u012c\u012e\u0130\u0132\u0134\u0136\u0139\u013b\u013d\u013f\u0141\u0143\u0145\u0147\u014a\u014c\u014e\u0150\u0152\u0154\u0156\u0158\u015a\u015c\u015e\u0160\u0162\u0164\u0166\u0168\u016a\u016c\u016e\u0170\u0172\u0174\u0176\u0178-\u0179\u017b\u017d\u0181-\u0182\u0184\u0186-\u0187\u0189-\u018b\u018e-\u0191\u0193-\u0194\u0196-\u0198\u019c-\u019d\u019f-\u01a0\u01a2\u01a4\u01a6-\u01a7\u01a9\u01ac\u01ae-\u01af\u01b1-\u01b3\u01b5\u01b7-\u01b8\u01bc\u01c4\u01c7\u01ca\u01cd\u01cf\u01d1\u01d3\u01d5\u01d7\u01d9\u01db\u01de\u01e0\u01e2\u01e4\u01e6\u01e8\u01ea\u01ec\u01ee\u01f1\u01f4\u01f6-\u01f8\u01fa\u01fc\u01fe\u0200\u0202\u0204\u0206\u0208\u020a\u020c\u020e\u0210\u0212\u0214\u0216\u0218\u021a\u021c\u021e\u0220\u0222\u0224\u0226\u0228\u022a\u022c\u022e\u0230\u0232\u023a-\u023b\u023d-\u023e\u0241\u0243-\u0246\u0248\u024a\u024c\u024e\u0370\u0372\u0376\u037f\u0386\u0388-\u038a\u038c\u038e-\u038f\u0391-\u03a1\u03a3-\u03ab\u03cf\u03d2-\u03d4\u03d8\u03da\u03dc\u03de\u03e0\u03e2\u03e4\u03e6\u03e8\u03ea\u03ec\u03ee\u03f4\u03f7\u03f9-\u03fa\u03fd-\u042f\u0460\u0462\u0464\u0466\u0468\u046a\u046c\u046e\u0470\u0472\u0474\u0476\u0478\u047a\u047c\u047e\u0480\u048a\u048c\u048e\u0490\u0492\u0494\u0496\u0498\u049a\u049c\u049e\u04a0\u04a2\u04a4\u04a6\u04a8\u04aa\u04ac\u04ae\u04b0\u04b2\u04b4\u04b6\u04b8\u04ba\u04bc\u04be\u04c0-\u04c1\u04c3\u04c5\u04c7\u04c9\u04cb\u04cd\u04d0\u04d2\u04d4\u04d6\u04d8\u04da\u04dc\u04de\u04e0\u04e2\u04e4\u04e6\u04e8\u04ea\u04ec\u04ee\u04f0\u04f2\u04f4\u04f6\u04f8\u04fa\u04fc\u04fe\u0500\u0502\u0504\u0506\u0508\u050a\u050c\u050e\u0510\u0512\u0514\u0516\u0518\u051a\u051c\u051e\u0520\u0522\u0524\u0526\u0528\u052a\u052c\u052e\u0531-\u0556\u10a0-\u10c5\u10c7\u10cd\u13a0-\u13f5\u1c90-\u1cba\u1cbd-\u1cbf\u1e00\u1e02\u1e04\u1e06\u1e08\u1e0a\u1e0c\u1e0e\u1e10\u1e12\u1e14\u1e16\u1e18\u1e1a\u1e1c\u1e1e\u1e20\u1e22\u1e24\u1e26\u1e28\u1e2a\u1e2c\u1e2e\u1e30\u1e32\u1e34\u1e36\u1e38\u1e3a\u1e3c\u1e3e\u1e40\u1e42\u1e44\u1e46\u1e48\u1e4a\u1e4c\u1e4e\u1e50\u1e52\u1e54\u1e56\u1e58\u1e5a\u1e5c\u1e5e\u1e60\u1e62\u1e64\u1e66\u1e68\u1e6a\u1e6c\u1e6e\u1e70\u1e72\u1e74\u1e76\u1e78\u1e7a\u1e7c\u1e7e\u1e80\u1e82\u1e84\u1e86\u1e88\u1e8a\u1e8c\u1e8e\u1e90\u1e92\u1e94\u1e9e\u1ea0\u1ea2\u1ea4\u1ea6\u1ea8\u1eaa\u1eac\u1eae\u1eb0\u1eb2\u1eb4\u1eb6\u1eb8\u1eba\u1ebc\u1ebe\u1ec0\u1ec2\u1ec4\u1ec6\u1ec8\u1eca\u1ecc\u1ece\u1ed0\u1ed2\u1ed4\u1ed6\u1ed8\u1eda\u1edc\u1ede\u1ee0\u1ee2\u1ee4\u1ee6\u1ee8\u1eea\u1eec\u1eee\u1ef0\u1ef2\u1ef4\u1ef6\u1ef8\u1efa\u1efc\u1efe\u1f08-\u1f0f\u1f18-\u1f1d\u1f28-\u1f2f\u1f38-\u1f3f\u1f48-\u1f4d\u1f59\u1f5b\u1f5d\u1f5f\u1f68-\u1f6f\u1fb8-\u1fbb\u1fc8-\u1fcb\u1fd8-\u1fdb\u1fe8-\u1fec\u1ff8-\u1ffb\u2102\u2107\u210b-\u210d\u2110-\u2112\u2115\u2119-\u211d\u2124\u2126\u2128\u212a-\u212d\u2130-\u2133\u213e-\u213f\u2145\u2183\u2c00-\u2c2e\u2c60\u2c62-\u2c64\u2c67\u2c69\u2c6b\u2c6d-\u2c70\u2c72\u2c75\u2c7e-\u2c80\u2c82\u2c84\u2c86\u2c88\u2c8a\u2c8c\u2c8e\u2c90\u2c92\u2c94\u2c96\u2c98\u2c9a\u2c9c\u2c9e\u2ca0\u2ca2\u2ca4\u2ca6\u2ca8\u2caa\u2cac\u2cae\u2cb0\u2cb2\u2cb4\u2cb6\u2cb8\u2cba\u2cbc\u2cbe\u2cc0\u2cc2\u2cc4\u2cc6\u2cc8\u2cca\u2ccc\u2cce\u2cd0\u2cd2\u2cd4\u2cd6\u2cd8\u2cda\u2cdc\u2cde\u2ce0\u2ce2\u2ceb\u2ced\u2cf2\ua640\ua642\ua644\ua646\ua648\ua64a\ua64c\ua64e\ua650\ua652\ua654\ua656\ua658\ua65a\ua65c\ua65e\ua660\ua662\ua664\ua666\ua668\ua66a\ua66c\ua680\ua682\ua684\ua686\ua688\ua68a\ua68c\ua68e\ua690\ua692\ua694\ua696\ua698\ua69a\ua722\ua724\ua726\ua728\ua72a\ua72c\ua72e\ua732\ua734\ua736\ua738\ua73a\ua73c\ua73e\ua740\ua742\ua744\ua746\ua748\ua74a\ua74c\ua74e\ua750\ua752\ua754\ua756\ua758\ua75a\ua75c\ua75e\ua760\ua762\ua764\ua766\ua768\ua76a\ua76c\ua76e\ua779\ua77b\ua77d-\ua77e\ua780\ua782\ua784\ua786\ua78b\ua78d\ua790\ua792\ua796\ua798\ua79a\ua79c\ua79e\ua7a0\ua7a2\ua7a4\ua7a6\ua7a8\ua7aa-\ua7ae\ua7b0-\ua7b4\ua7b6\ua7b8\uff21-\uff3a\U00010400-\U00010427\U000104b0-\U000104d3\U00010c80-\U00010cb2\U000118a0-\U000118bf\U00016e40-\U00016e5f\U0001d400-\U0001d419\U0001d434-\U0001d44d\U0001d468-\U0001d481\U0001d49c\U0001d49e-\U0001d49f\U0001d4a2\U0001d4a5-\U0001d4a6\U0001d4a9-\U0001d4ac\U0001d4ae-\U0001d4b5\U0001d4d0-\U0001d4e9\U0001d504-\U0001d505\U0001d507-\U0001d50a\U0001d50d-\U0001d514\U0001d516-\U0001d51c\U0001d538-\U0001d539\U0001d53b-\U0001d53e\U0001d540-\U0001d544\U0001d546\U0001d54a-\U0001d550\U0001d56c-\U0001d585\U0001d5a0-\U0001d5b9\U0001d5d4-\U0001d5ed\U0001d608-\U0001d621\U0001d63c-\U0001d655\U0001d670-\U0001d689\U0001d6a8-\U0001d6c0\U0001d6e2-\U0001d6fa\U0001d71c-\U0001d734\U0001d756-\U0001d76e\U0001d790-\U0001d7a8\U0001d7ca\U0001e900-\U0001e921' -Mc = u'\u0903\u093b\u093e-\u0940\u0949-\u094c\u094e-\u094f\u0982-\u0983\u09be-\u09c0\u09c7-\u09c8\u09cb-\u09cc\u09d7\u0a03\u0a3e-\u0a40\u0a83\u0abe-\u0ac0\u0ac9\u0acb-\u0acc\u0b02-\u0b03\u0b3e\u0b40\u0b47-\u0b48\u0b4b-\u0b4c\u0b57\u0bbe-\u0bbf\u0bc1-\u0bc2\u0bc6-\u0bc8\u0bca-\u0bcc\u0bd7\u0c01-\u0c03\u0c41-\u0c44\u0c82-\u0c83\u0cbe\u0cc0-\u0cc4\u0cc7-\u0cc8\u0cca-\u0ccb\u0cd5-\u0cd6\u0d02-\u0d03\u0d3e-\u0d40\u0d46-\u0d48\u0d4a-\u0d4c\u0d57\u0d82-\u0d83\u0dcf-\u0dd1\u0dd8-\u0ddf\u0df2-\u0df3\u0f3e-\u0f3f\u0f7f\u102b-\u102c\u1031\u1038\u103b-\u103c\u1056-\u1057\u1062-\u1064\u1067-\u106d\u1083-\u1084\u1087-\u108c\u108f\u109a-\u109c\u17b6\u17be-\u17c5\u17c7-\u17c8\u1923-\u1926\u1929-\u192b\u1930-\u1931\u1933-\u1938\u1a19-\u1a1a\u1a55\u1a57\u1a61\u1a63-\u1a64\u1a6d-\u1a72\u1b04\u1b35\u1b3b\u1b3d-\u1b41\u1b43-\u1b44\u1b82\u1ba1\u1ba6-\u1ba7\u1baa\u1be7\u1bea-\u1bec\u1bee\u1bf2-\u1bf3\u1c24-\u1c2b\u1c34-\u1c35\u1ce1\u1cf2-\u1cf3\u1cf7\u302e-\u302f\ua823-\ua824\ua827\ua880-\ua881\ua8b4-\ua8c3\ua952-\ua953\ua983\ua9b4-\ua9b5\ua9ba-\ua9bb\ua9bd-\ua9c0\uaa2f-\uaa30\uaa33-\uaa34\uaa4d\uaa7b\uaa7d\uaaeb\uaaee-\uaaef\uaaf5\uabe3-\uabe4\uabe6-\uabe7\uabe9-\uabea\uabec' +Mc = '\u0903\u093b\u093e-\u0940\u0949-\u094c\u094e-\u094f\u0982-\u0983\u09be-\u09c0\u09c7-\u09c8\u09cb-\u09cc\u09d7\u0a03\u0a3e-\u0a40\u0a83\u0abe-\u0ac0\u0ac9\u0acb-\u0acc\u0b02-\u0b03\u0b3e\u0b40\u0b47-\u0b48\u0b4b-\u0b4c\u0b57\u0bbe-\u0bbf\u0bc1-\u0bc2\u0bc6-\u0bc8\u0bca-\u0bcc\u0bd7\u0c01-\u0c03\u0c41-\u0c44\u0c82-\u0c83\u0cbe\u0cc0-\u0cc4\u0cc7-\u0cc8\u0cca-\u0ccb\u0cd5-\u0cd6\u0d02-\u0d03\u0d3e-\u0d40\u0d46-\u0d48\u0d4a-\u0d4c\u0d57\u0d82-\u0d83\u0dcf-\u0dd1\u0dd8-\u0ddf\u0df2-\u0df3\u0f3e-\u0f3f\u0f7f\u102b-\u102c\u1031\u1038\u103b-\u103c\u1056-\u1057\u1062-\u1064\u1067-\u106d\u1083-\u1084\u1087-\u108c\u108f\u109a-\u109c\u17b6\u17be-\u17c5\u17c7-\u17c8\u1923-\u1926\u1929-\u192b\u1930-\u1931\u1933-\u1938\u1a19-\u1a1a\u1a55\u1a57\u1a61\u1a63-\u1a64\u1a6d-\u1a72\u1b04\u1b35\u1b3b\u1b3d-\u1b41\u1b43-\u1b44\u1b82\u1ba1\u1ba6-\u1ba7\u1baa\u1be7\u1bea-\u1bec\u1bee\u1bf2-\u1bf3\u1c24-\u1c2b\u1c34-\u1c35\u1ce1\u1cf2-\u1cf3\u1cf7\u302e-\u302f\ua823-\ua824\ua827\ua880-\ua881\ua8b4-\ua8c3\ua952-\ua953\ua983\ua9b4-\ua9b5\ua9ba-\ua9bb\ua9bd-\ua9c0\uaa2f-\uaa30\uaa33-\uaa34\uaa4d\uaa7b\uaa7d\uaaeb\uaaee-\uaaef\uaaf5\uabe3-\uabe4\uabe6-\uabe7\uabe9-\uabea\uabec\U00011000\U00011002\U00011082\U000110b0-\U000110b2\U000110b7-\U000110b8\U0001112c\U00011145-\U00011146\U00011182\U000111b3-\U000111b5\U000111bf-\U000111c0\U0001122c-\U0001122e\U00011232-\U00011233\U00011235\U000112e0-\U000112e2\U00011302-\U00011303\U0001133e-\U0001133f\U00011341-\U00011344\U00011347-\U00011348\U0001134b-\U0001134d\U00011357\U00011362-\U00011363\U00011435-\U00011437\U00011440-\U00011441\U00011445\U000114b0-\U000114b2\U000114b9\U000114bb-\U000114be\U000114c1\U000115af-\U000115b1\U000115b8-\U000115bb\U000115be\U00011630-\U00011632\U0001163b-\U0001163c\U0001163e\U000116ac\U000116ae-\U000116af\U000116b6\U00011720-\U00011721\U00011726\U0001182c-\U0001182e\U00011838\U00011a39\U00011a57-\U00011a58\U00011a97\U00011c2f\U00011c3e\U00011ca9\U00011cb1\U00011cb4\U00011d8a-\U00011d8e\U00011d93-\U00011d94\U00011d96\U00011ef5-\U00011ef6\U00016f51-\U00016f7e\U0001d165-\U0001d166\U0001d16d-\U0001d172' -Me = u'\u0488-\u0489\u1abe\u20dd-\u20e0\u20e2-\u20e4\ua670-\ua672' +Me = '\u0488-\u0489\u1abe\u20dd-\u20e0\u20e2-\u20e4\ua670-\ua672' -Mn = u'\u0300-\u036f\u0483-\u0487\u0591-\u05bd\u05bf\u05c1-\u05c2\u05c4-\u05c5\u05c7\u0610-\u061a\u064b-\u065f\u0670\u06d6-\u06dc\u06df-\u06e4\u06e7-\u06e8\u06ea-\u06ed\u0711\u0730-\u074a\u07a6-\u07b0\u07eb-\u07f3\u07fd\u0816-\u0819\u081b-\u0823\u0825-\u0827\u0829-\u082d\u0859-\u085b\u08d3-\u08e1\u08e3-\u0902\u093a\u093c\u0941-\u0948\u094d\u0951-\u0957\u0962-\u0963\u0981\u09bc\u09c1-\u09c4\u09cd\u09e2-\u09e3\u09fe\u0a01-\u0a02\u0a3c\u0a41-\u0a42\u0a47-\u0a48\u0a4b-\u0a4d\u0a51\u0a70-\u0a71\u0a75\u0a81-\u0a82\u0abc\u0ac1-\u0ac5\u0ac7-\u0ac8\u0acd\u0ae2-\u0ae3\u0afa-\u0aff\u0b01\u0b3c\u0b3f\u0b41-\u0b44\u0b4d\u0b56\u0b62-\u0b63\u0b82\u0bc0\u0bcd\u0c00\u0c04\u0c3e-\u0c40\u0c46-\u0c48\u0c4a-\u0c4d\u0c55-\u0c56\u0c62-\u0c63\u0c81\u0cbc\u0cbf\u0cc6\u0ccc-\u0ccd\u0ce2-\u0ce3\u0d00-\u0d01\u0d3b-\u0d3c\u0d41-\u0d44\u0d4d\u0d62-\u0d63\u0dca\u0dd2-\u0dd4\u0dd6\u0e31\u0e34-\u0e3a\u0e47-\u0e4e\u0eb1\u0eb4-\u0eb9\u0ebb-\u0ebc\u0ec8-\u0ecd\u0f18-\u0f19\u0f35\u0f37\u0f39\u0f71-\u0f7e\u0f80-\u0f84\u0f86-\u0f87\u0f8d-\u0f97\u0f99-\u0fbc\u0fc6\u102d-\u1030\u1032-\u1037\u1039-\u103a\u103d-\u103e\u1058-\u1059\u105e-\u1060\u1071-\u1074\u1082\u1085-\u1086\u108d\u109d\u135d-\u135f\u1712-\u1714\u1732-\u1734\u1752-\u1753\u1772-\u1773\u17b4-\u17b5\u17b7-\u17bd\u17c6\u17c9-\u17d3\u17dd\u180b-\u180d\u1885-\u1886\u18a9\u1920-\u1922\u1927-\u1928\u1932\u1939-\u193b\u1a17-\u1a18\u1a1b\u1a56\u1a58-\u1a5e\u1a60\u1a62\u1a65-\u1a6c\u1a73-\u1a7c\u1a7f\u1ab0-\u1abd\u1b00-\u1b03\u1b34\u1b36-\u1b3a\u1b3c\u1b42\u1b6b-\u1b73\u1b80-\u1b81\u1ba2-\u1ba5\u1ba8-\u1ba9\u1bab-\u1bad\u1be6\u1be8-\u1be9\u1bed\u1bef-\u1bf1\u1c2c-\u1c33\u1c36-\u1c37\u1cd0-\u1cd2\u1cd4-\u1ce0\u1ce2-\u1ce8\u1ced\u1cf4\u1cf8-\u1cf9\u1dc0-\u1df9\u1dfb-\u1dff\u20d0-\u20dc\u20e1\u20e5-\u20f0\u2cef-\u2cf1\u2d7f\u2de0-\u2dff\u302a-\u302d\u3099-\u309a\ua66f\ua674-\ua67d\ua69e-\ua69f\ua6f0-\ua6f1\ua802\ua806\ua80b\ua825-\ua826\ua8c4-\ua8c5\ua8e0-\ua8f1\ua8ff\ua926-\ua92d\ua947-\ua951\ua980-\ua982\ua9b3\ua9b6-\ua9b9\ua9bc\ua9e5\uaa29-\uaa2e\uaa31-\uaa32\uaa35-\uaa36\uaa43\uaa4c\uaa7c\uaab0\uaab2-\uaab4\uaab7-\uaab8\uaabe-\uaabf\uaac1\uaaec-\uaaed\uaaf6\uabe5\uabe8\uabed\ufb1e\ufe00-\ufe0f\ufe20-\ufe2f' +Mn = '\u0300-\u036f\u0483-\u0487\u0591-\u05bd\u05bf\u05c1-\u05c2\u05c4-\u05c5\u05c7\u0610-\u061a\u064b-\u065f\u0670\u06d6-\u06dc\u06df-\u06e4\u06e7-\u06e8\u06ea-\u06ed\u0711\u0730-\u074a\u07a6-\u07b0\u07eb-\u07f3\u07fd\u0816-\u0819\u081b-\u0823\u0825-\u0827\u0829-\u082d\u0859-\u085b\u08d3-\u08e1\u08e3-\u0902\u093a\u093c\u0941-\u0948\u094d\u0951-\u0957\u0962-\u0963\u0981\u09bc\u09c1-\u09c4\u09cd\u09e2-\u09e3\u09fe\u0a01-\u0a02\u0a3c\u0a41-\u0a42\u0a47-\u0a48\u0a4b-\u0a4d\u0a51\u0a70-\u0a71\u0a75\u0a81-\u0a82\u0abc\u0ac1-\u0ac5\u0ac7-\u0ac8\u0acd\u0ae2-\u0ae3\u0afa-\u0aff\u0b01\u0b3c\u0b3f\u0b41-\u0b44\u0b4d\u0b56\u0b62-\u0b63\u0b82\u0bc0\u0bcd\u0c00\u0c04\u0c3e-\u0c40\u0c46-\u0c48\u0c4a-\u0c4d\u0c55-\u0c56\u0c62-\u0c63\u0c81\u0cbc\u0cbf\u0cc6\u0ccc-\u0ccd\u0ce2-\u0ce3\u0d00-\u0d01\u0d3b-\u0d3c\u0d41-\u0d44\u0d4d\u0d62-\u0d63\u0dca\u0dd2-\u0dd4\u0dd6\u0e31\u0e34-\u0e3a\u0e47-\u0e4e\u0eb1\u0eb4-\u0eb9\u0ebb-\u0ebc\u0ec8-\u0ecd\u0f18-\u0f19\u0f35\u0f37\u0f39\u0f71-\u0f7e\u0f80-\u0f84\u0f86-\u0f87\u0f8d-\u0f97\u0f99-\u0fbc\u0fc6\u102d-\u1030\u1032-\u1037\u1039-\u103a\u103d-\u103e\u1058-\u1059\u105e-\u1060\u1071-\u1074\u1082\u1085-\u1086\u108d\u109d\u135d-\u135f\u1712-\u1714\u1732-\u1734\u1752-\u1753\u1772-\u1773\u17b4-\u17b5\u17b7-\u17bd\u17c6\u17c9-\u17d3\u17dd\u180b-\u180d\u1885-\u1886\u18a9\u1920-\u1922\u1927-\u1928\u1932\u1939-\u193b\u1a17-\u1a18\u1a1b\u1a56\u1a58-\u1a5e\u1a60\u1a62\u1a65-\u1a6c\u1a73-\u1a7c\u1a7f\u1ab0-\u1abd\u1b00-\u1b03\u1b34\u1b36-\u1b3a\u1b3c\u1b42\u1b6b-\u1b73\u1b80-\u1b81\u1ba2-\u1ba5\u1ba8-\u1ba9\u1bab-\u1bad\u1be6\u1be8-\u1be9\u1bed\u1bef-\u1bf1\u1c2c-\u1c33\u1c36-\u1c37\u1cd0-\u1cd2\u1cd4-\u1ce0\u1ce2-\u1ce8\u1ced\u1cf4\u1cf8-\u1cf9\u1dc0-\u1df9\u1dfb-\u1dff\u20d0-\u20dc\u20e1\u20e5-\u20f0\u2cef-\u2cf1\u2d7f\u2de0-\u2dff\u302a-\u302d\u3099-\u309a\ua66f\ua674-\ua67d\ua69e-\ua69f\ua6f0-\ua6f1\ua802\ua806\ua80b\ua825-\ua826\ua8c4-\ua8c5\ua8e0-\ua8f1\ua8ff\ua926-\ua92d\ua947-\ua951\ua980-\ua982\ua9b3\ua9b6-\ua9b9\ua9bc\ua9e5\uaa29-\uaa2e\uaa31-\uaa32\uaa35-\uaa36\uaa43\uaa4c\uaa7c\uaab0\uaab2-\uaab4\uaab7-\uaab8\uaabe-\uaabf\uaac1\uaaec-\uaaed\uaaf6\uabe5\uabe8\uabed\ufb1e\ufe00-\ufe0f\ufe20-\ufe2f\U000101fd\U000102e0\U00010376-\U0001037a\U00010a01-\U00010a03\U00010a05-\U00010a06\U00010a0c-\U00010a0f\U00010a38-\U00010a3a\U00010a3f\U00010ae5-\U00010ae6\U00010d24-\U00010d27\U00010f46-\U00010f50\U00011001\U00011038-\U00011046\U0001107f-\U00011081\U000110b3-\U000110b6\U000110b9-\U000110ba\U00011100-\U00011102\U00011127-\U0001112b\U0001112d-\U00011134\U00011173\U00011180-\U00011181\U000111b6-\U000111be\U000111c9-\U000111cc\U0001122f-\U00011231\U00011234\U00011236-\U00011237\U0001123e\U000112df\U000112e3-\U000112ea\U00011300-\U00011301\U0001133b-\U0001133c\U00011340\U00011366-\U0001136c\U00011370-\U00011374\U00011438-\U0001143f\U00011442-\U00011444\U00011446\U0001145e\U000114b3-\U000114b8\U000114ba\U000114bf-\U000114c0\U000114c2-\U000114c3\U000115b2-\U000115b5\U000115bc-\U000115bd\U000115bf-\U000115c0\U000115dc-\U000115dd\U00011633-\U0001163a\U0001163d\U0001163f-\U00011640\U000116ab\U000116ad\U000116b0-\U000116b5\U000116b7\U0001171d-\U0001171f\U00011722-\U00011725\U00011727-\U0001172b\U0001182f-\U00011837\U00011839-\U0001183a\U00011a01-\U00011a0a\U00011a33-\U00011a38\U00011a3b-\U00011a3e\U00011a47\U00011a51-\U00011a56\U00011a59-\U00011a5b\U00011a8a-\U00011a96\U00011a98-\U00011a99\U00011c30-\U00011c36\U00011c38-\U00011c3d\U00011c3f\U00011c92-\U00011ca7\U00011caa-\U00011cb0\U00011cb2-\U00011cb3\U00011cb5-\U00011cb6\U00011d31-\U00011d36\U00011d3a\U00011d3c-\U00011d3d\U00011d3f-\U00011d45\U00011d47\U00011d90-\U00011d91\U00011d95\U00011d97\U00011ef3-\U00011ef4\U00016af0-\U00016af4\U00016b30-\U00016b36\U00016f8f-\U00016f92\U0001bc9d-\U0001bc9e\U0001d167-\U0001d169\U0001d17b-\U0001d182\U0001d185-\U0001d18b\U0001d1aa-\U0001d1ad\U0001d242-\U0001d244\U0001da00-\U0001da36\U0001da3b-\U0001da6c\U0001da75\U0001da84\U0001da9b-\U0001da9f\U0001daa1-\U0001daaf\U0001e000-\U0001e006\U0001e008-\U0001e018\U0001e01b-\U0001e021\U0001e023-\U0001e024\U0001e026-\U0001e02a\U0001e8d0-\U0001e8d6\U0001e944-\U0001e94a\U000e0100-\U000e01ef' -Nd = u'0-9\u0660-\u0669\u06f0-\u06f9\u07c0-\u07c9\u0966-\u096f\u09e6-\u09ef\u0a66-\u0a6f\u0ae6-\u0aef\u0b66-\u0b6f\u0be6-\u0bef\u0c66-\u0c6f\u0ce6-\u0cef\u0d66-\u0d6f\u0de6-\u0def\u0e50-\u0e59\u0ed0-\u0ed9\u0f20-\u0f29\u1040-\u1049\u1090-\u1099\u17e0-\u17e9\u1810-\u1819\u1946-\u194f\u19d0-\u19d9\u1a80-\u1a89\u1a90-\u1a99\u1b50-\u1b59\u1bb0-\u1bb9\u1c40-\u1c49\u1c50-\u1c59\ua620-\ua629\ua8d0-\ua8d9\ua900-\ua909\ua9d0-\ua9d9\ua9f0-\ua9f9\uaa50-\uaa59\uabf0-\uabf9\uff10-\uff19' +Nd = '0-9\u0660-\u0669\u06f0-\u06f9\u07c0-\u07c9\u0966-\u096f\u09e6-\u09ef\u0a66-\u0a6f\u0ae6-\u0aef\u0b66-\u0b6f\u0be6-\u0bef\u0c66-\u0c6f\u0ce6-\u0cef\u0d66-\u0d6f\u0de6-\u0def\u0e50-\u0e59\u0ed0-\u0ed9\u0f20-\u0f29\u1040-\u1049\u1090-\u1099\u17e0-\u17e9\u1810-\u1819\u1946-\u194f\u19d0-\u19d9\u1a80-\u1a89\u1a90-\u1a99\u1b50-\u1b59\u1bb0-\u1bb9\u1c40-\u1c49\u1c50-\u1c59\ua620-\ua629\ua8d0-\ua8d9\ua900-\ua909\ua9d0-\ua9d9\ua9f0-\ua9f9\uaa50-\uaa59\uabf0-\uabf9\uff10-\uff19\U000104a0-\U000104a9\U00010d30-\U00010d39\U00011066-\U0001106f\U000110f0-\U000110f9\U00011136-\U0001113f\U000111d0-\U000111d9\U000112f0-\U000112f9\U00011450-\U00011459\U000114d0-\U000114d9\U00011650-\U00011659\U000116c0-\U000116c9\U00011730-\U00011739\U000118e0-\U000118e9\U00011c50-\U00011c59\U00011d50-\U00011d59\U00011da0-\U00011da9\U00016a60-\U00016a69\U00016b50-\U00016b59\U0001d7ce-\U0001d7ff\U0001e950-\U0001e959' -Nl = u'\u16ee-\u16f0\u2160-\u2182\u2185-\u2188\u3007\u3021-\u3029\u3038-\u303a\ua6e6-\ua6ef' +Nl = '\u16ee-\u16f0\u2160-\u2182\u2185-\u2188\u3007\u3021-\u3029\u3038-\u303a\ua6e6-\ua6ef\U00010140-\U00010174\U00010341\U0001034a\U000103d1-\U000103d5\U00012400-\U0001246e' -No = u'\xb2-\xb3\xb9\xbc-\xbe\u09f4-\u09f9\u0b72-\u0b77\u0bf0-\u0bf2\u0c78-\u0c7e\u0d58-\u0d5e\u0d70-\u0d78\u0f2a-\u0f33\u1369-\u137c\u17f0-\u17f9\u19da\u2070\u2074-\u2079\u2080-\u2089\u2150-\u215f\u2189\u2460-\u249b\u24ea-\u24ff\u2776-\u2793\u2cfd\u3192-\u3195\u3220-\u3229\u3248-\u324f\u3251-\u325f\u3280-\u3289\u32b1-\u32bf\ua830-\ua835' +No = '\xb2-\xb3\xb9\xbc-\xbe\u09f4-\u09f9\u0b72-\u0b77\u0bf0-\u0bf2\u0c78-\u0c7e\u0d58-\u0d5e\u0d70-\u0d78\u0f2a-\u0f33\u1369-\u137c\u17f0-\u17f9\u19da\u2070\u2074-\u2079\u2080-\u2089\u2150-\u215f\u2189\u2460-\u249b\u24ea-\u24ff\u2776-\u2793\u2cfd\u3192-\u3195\u3220-\u3229\u3248-\u324f\u3251-\u325f\u3280-\u3289\u32b1-\u32bf\ua830-\ua835\U00010107-\U00010133\U00010175-\U00010178\U0001018a-\U0001018b\U000102e1-\U000102fb\U00010320-\U00010323\U00010858-\U0001085f\U00010879-\U0001087f\U000108a7-\U000108af\U000108fb-\U000108ff\U00010916-\U0001091b\U000109bc-\U000109bd\U000109c0-\U000109cf\U000109d2-\U000109ff\U00010a40-\U00010a48\U00010a7d-\U00010a7e\U00010a9d-\U00010a9f\U00010aeb-\U00010aef\U00010b58-\U00010b5f\U00010b78-\U00010b7f\U00010ba9-\U00010baf\U00010cfa-\U00010cff\U00010e60-\U00010e7e\U00010f1d-\U00010f26\U00010f51-\U00010f54\U00011052-\U00011065\U000111e1-\U000111f4\U0001173a-\U0001173b\U000118ea-\U000118f2\U00011c5a-\U00011c6c\U00016b5b-\U00016b61\U00016e80-\U00016e96\U0001d2e0-\U0001d2f3\U0001d360-\U0001d378\U0001e8c7-\U0001e8cf\U0001ec71-\U0001ecab\U0001ecad-\U0001ecaf\U0001ecb1-\U0001ecb4\U0001f100-\U0001f10c' -Pc = u'_\u203f-\u2040\u2054\ufe33-\ufe34\ufe4d-\ufe4f\uff3f' +Pc = '_\u203f-\u2040\u2054\ufe33-\ufe34\ufe4d-\ufe4f\uff3f' -Pd = u'\\-\u058a\u05be\u1400\u1806\u2010-\u2015\u2e17\u2e1a\u2e3a-\u2e3b\u2e40\u301c\u3030\u30a0\ufe31-\ufe32\ufe58\ufe63\uff0d' +Pd = '\\-\u058a\u05be\u1400\u1806\u2010-\u2015\u2e17\u2e1a\u2e3a-\u2e3b\u2e40\u301c\u3030\u30a0\ufe31-\ufe32\ufe58\ufe63\uff0d' -Pe = u')\\]}\u0f3b\u0f3d\u169c\u2046\u207e\u208e\u2309\u230b\u232a\u2769\u276b\u276d\u276f\u2771\u2773\u2775\u27c6\u27e7\u27e9\u27eb\u27ed\u27ef\u2984\u2986\u2988\u298a\u298c\u298e\u2990\u2992\u2994\u2996\u2998\u29d9\u29db\u29fd\u2e23\u2e25\u2e27\u2e29\u3009\u300b\u300d\u300f\u3011\u3015\u3017\u3019\u301b\u301e-\u301f\ufd3e\ufe18\ufe36\ufe38\ufe3a\ufe3c\ufe3e\ufe40\ufe42\ufe44\ufe48\ufe5a\ufe5c\ufe5e\uff09\uff3d\uff5d\uff60\uff63' +Pe = ')\\]}\u0f3b\u0f3d\u169c\u2046\u207e\u208e\u2309\u230b\u232a\u2769\u276b\u276d\u276f\u2771\u2773\u2775\u27c6\u27e7\u27e9\u27eb\u27ed\u27ef\u2984\u2986\u2988\u298a\u298c\u298e\u2990\u2992\u2994\u2996\u2998\u29d9\u29db\u29fd\u2e23\u2e25\u2e27\u2e29\u3009\u300b\u300d\u300f\u3011\u3015\u3017\u3019\u301b\u301e-\u301f\ufd3e\ufe18\ufe36\ufe38\ufe3a\ufe3c\ufe3e\ufe40\ufe42\ufe44\ufe48\ufe5a\ufe5c\ufe5e\uff09\uff3d\uff5d\uff60\uff63' -Pf = u'\xbb\u2019\u201d\u203a\u2e03\u2e05\u2e0a\u2e0d\u2e1d\u2e21' +Pf = '\xbb\u2019\u201d\u203a\u2e03\u2e05\u2e0a\u2e0d\u2e1d\u2e21' -Pi = u'\xab\u2018\u201b-\u201c\u201f\u2039\u2e02\u2e04\u2e09\u2e0c\u2e1c\u2e20' +Pi = '\xab\u2018\u201b-\u201c\u201f\u2039\u2e02\u2e04\u2e09\u2e0c\u2e1c\u2e20' -Po = u"!-#%-'*,.-/:-;?-@\\\\\xa1\xa7\xb6-\xb7\xbf\u037e\u0387\u055a-\u055f\u0589\u05c0\u05c3\u05c6\u05f3-\u05f4\u0609-\u060a\u060c-\u060d\u061b\u061e-\u061f\u066a-\u066d\u06d4\u0700-\u070d\u07f7-\u07f9\u0830-\u083e\u085e\u0964-\u0965\u0970\u09fd\u0a76\u0af0\u0c84\u0df4\u0e4f\u0e5a-\u0e5b\u0f04-\u0f12\u0f14\u0f85\u0fd0-\u0fd4\u0fd9-\u0fda\u104a-\u104f\u10fb\u1360-\u1368\u166d-\u166e\u16eb-\u16ed\u1735-\u1736\u17d4-\u17d6\u17d8-\u17da\u1800-\u1805\u1807-\u180a\u1944-\u1945\u1a1e-\u1a1f\u1aa0-\u1aa6\u1aa8-\u1aad\u1b5a-\u1b60\u1bfc-\u1bff\u1c3b-\u1c3f\u1c7e-\u1c7f\u1cc0-\u1cc7\u1cd3\u2016-\u2017\u2020-\u2027\u2030-\u2038\u203b-\u203e\u2041-\u2043\u2047-\u2051\u2053\u2055-\u205e\u2cf9-\u2cfc\u2cfe-\u2cff\u2d70\u2e00-\u2e01\u2e06-\u2e08\u2e0b\u2e0e-\u2e16\u2e18-\u2e19\u2e1b\u2e1e-\u2e1f\u2e2a-\u2e2e\u2e30-\u2e39\u2e3c-\u2e3f\u2e41\u2e43-\u2e4e\u3001-\u3003\u303d\u30fb\ua4fe-\ua4ff\ua60d-\ua60f\ua673\ua67e\ua6f2-\ua6f7\ua874-\ua877\ua8ce-\ua8cf\ua8f8-\ua8fa\ua8fc\ua92e-\ua92f\ua95f\ua9c1-\ua9cd\ua9de-\ua9df\uaa5c-\uaa5f\uaade-\uaadf\uaaf0-\uaaf1\uabeb\ufe10-\ufe16\ufe19\ufe30\ufe45-\ufe46\ufe49-\ufe4c\ufe50-\ufe52\ufe54-\ufe57\ufe5f-\ufe61\ufe68\ufe6a-\ufe6b\uff01-\uff03\uff05-\uff07\uff0a\uff0c\uff0e-\uff0f\uff1a-\uff1b\uff1f-\uff20\uff3c\uff61\uff64-\uff65" +Po = "!-#%-'*,.-/:-;?-@\\\\\xa1\xa7\xb6-\xb7\xbf\u037e\u0387\u055a-\u055f\u0589\u05c0\u05c3\u05c6\u05f3-\u05f4\u0609-\u060a\u060c-\u060d\u061b\u061e-\u061f\u066a-\u066d\u06d4\u0700-\u070d\u07f7-\u07f9\u0830-\u083e\u085e\u0964-\u0965\u0970\u09fd\u0a76\u0af0\u0c84\u0df4\u0e4f\u0e5a-\u0e5b\u0f04-\u0f12\u0f14\u0f85\u0fd0-\u0fd4\u0fd9-\u0fda\u104a-\u104f\u10fb\u1360-\u1368\u166d-\u166e\u16eb-\u16ed\u1735-\u1736\u17d4-\u17d6\u17d8-\u17da\u1800-\u1805\u1807-\u180a\u1944-\u1945\u1a1e-\u1a1f\u1aa0-\u1aa6\u1aa8-\u1aad\u1b5a-\u1b60\u1bfc-\u1bff\u1c3b-\u1c3f\u1c7e-\u1c7f\u1cc0-\u1cc7\u1cd3\u2016-\u2017\u2020-\u2027\u2030-\u2038\u203b-\u203e\u2041-\u2043\u2047-\u2051\u2053\u2055-\u205e\u2cf9-\u2cfc\u2cfe-\u2cff\u2d70\u2e00-\u2e01\u2e06-\u2e08\u2e0b\u2e0e-\u2e16\u2e18-\u2e19\u2e1b\u2e1e-\u2e1f\u2e2a-\u2e2e\u2e30-\u2e39\u2e3c-\u2e3f\u2e41\u2e43-\u2e4e\u3001-\u3003\u303d\u30fb\ua4fe-\ua4ff\ua60d-\ua60f\ua673\ua67e\ua6f2-\ua6f7\ua874-\ua877\ua8ce-\ua8cf\ua8f8-\ua8fa\ua8fc\ua92e-\ua92f\ua95f\ua9c1-\ua9cd\ua9de-\ua9df\uaa5c-\uaa5f\uaade-\uaadf\uaaf0-\uaaf1\uabeb\ufe10-\ufe16\ufe19\ufe30\ufe45-\ufe46\ufe49-\ufe4c\ufe50-\ufe52\ufe54-\ufe57\ufe5f-\ufe61\ufe68\ufe6a-\ufe6b\uff01-\uff03\uff05-\uff07\uff0a\uff0c\uff0e-\uff0f\uff1a-\uff1b\uff1f-\uff20\uff3c\uff61\uff64-\uff65\U00010100-\U00010102\U0001039f\U000103d0\U0001056f\U00010857\U0001091f\U0001093f\U00010a50-\U00010a58\U00010a7f\U00010af0-\U00010af6\U00010b39-\U00010b3f\U00010b99-\U00010b9c\U00010f55-\U00010f59\U00011047-\U0001104d\U000110bb-\U000110bc\U000110be-\U000110c1\U00011140-\U00011143\U00011174-\U00011175\U000111c5-\U000111c8\U000111cd\U000111db\U000111dd-\U000111df\U00011238-\U0001123d\U000112a9\U0001144b-\U0001144f\U0001145b\U0001145d\U000114c6\U000115c1-\U000115d7\U00011641-\U00011643\U00011660-\U0001166c\U0001173c-\U0001173e\U0001183b\U00011a3f-\U00011a46\U00011a9a-\U00011a9c\U00011a9e-\U00011aa2\U00011c41-\U00011c45\U00011c70-\U00011c71\U00011ef7-\U00011ef8\U00012470-\U00012474\U00016a6e-\U00016a6f\U00016af5\U00016b37-\U00016b3b\U00016b44\U00016e97-\U00016e9a\U0001bc9f\U0001da87-\U0001da8b\U0001e95e-\U0001e95f" -Ps = u'(\\[{\u0f3a\u0f3c\u169b\u201a\u201e\u2045\u207d\u208d\u2308\u230a\u2329\u2768\u276a\u276c\u276e\u2770\u2772\u2774\u27c5\u27e6\u27e8\u27ea\u27ec\u27ee\u2983\u2985\u2987\u2989\u298b\u298d\u298f\u2991\u2993\u2995\u2997\u29d8\u29da\u29fc\u2e22\u2e24\u2e26\u2e28\u2e42\u3008\u300a\u300c\u300e\u3010\u3014\u3016\u3018\u301a\u301d\ufd3f\ufe17\ufe35\ufe37\ufe39\ufe3b\ufe3d\ufe3f\ufe41\ufe43\ufe47\ufe59\ufe5b\ufe5d\uff08\uff3b\uff5b\uff5f\uff62' +Ps = '(\\[{\u0f3a\u0f3c\u169b\u201a\u201e\u2045\u207d\u208d\u2308\u230a\u2329\u2768\u276a\u276c\u276e\u2770\u2772\u2774\u27c5\u27e6\u27e8\u27ea\u27ec\u27ee\u2983\u2985\u2987\u2989\u298b\u298d\u298f\u2991\u2993\u2995\u2997\u29d8\u29da\u29fc\u2e22\u2e24\u2e26\u2e28\u2e42\u3008\u300a\u300c\u300e\u3010\u3014\u3016\u3018\u301a\u301d\ufd3f\ufe17\ufe35\ufe37\ufe39\ufe3b\ufe3d\ufe3f\ufe41\ufe43\ufe47\ufe59\ufe5b\ufe5d\uff08\uff3b\uff5b\uff5f\uff62' -Sc = u'$\xa2-\xa5\u058f\u060b\u07fe-\u07ff\u09f2-\u09f3\u09fb\u0af1\u0bf9\u0e3f\u17db\u20a0-\u20bf\ua838\ufdfc\ufe69\uff04\uffe0-\uffe1\uffe5-\uffe6' +Sc = '$\xa2-\xa5\u058f\u060b\u07fe-\u07ff\u09f2-\u09f3\u09fb\u0af1\u0bf9\u0e3f\u17db\u20a0-\u20bf\ua838\ufdfc\ufe69\uff04\uffe0-\uffe1\uffe5-\uffe6\U0001ecb0' -Sk = u'\\^`\xa8\xaf\xb4\xb8\u02c2-\u02c5\u02d2-\u02df\u02e5-\u02eb\u02ed\u02ef-\u02ff\u0375\u0384-\u0385\u1fbd\u1fbf-\u1fc1\u1fcd-\u1fcf\u1fdd-\u1fdf\u1fed-\u1fef\u1ffd-\u1ffe\u309b-\u309c\ua700-\ua716\ua720-\ua721\ua789-\ua78a\uab5b\ufbb2-\ufbc1\uff3e\uff40\uffe3' +Sk = '\\^`\xa8\xaf\xb4\xb8\u02c2-\u02c5\u02d2-\u02df\u02e5-\u02eb\u02ed\u02ef-\u02ff\u0375\u0384-\u0385\u1fbd\u1fbf-\u1fc1\u1fcd-\u1fcf\u1fdd-\u1fdf\u1fed-\u1fef\u1ffd-\u1ffe\u309b-\u309c\ua700-\ua716\ua720-\ua721\ua789-\ua78a\uab5b\ufbb2-\ufbc1\uff3e\uff40\uffe3\U0001f3fb-\U0001f3ff' -Sm = u'+<->|~\xac\xb1\xd7\xf7\u03f6\u0606-\u0608\u2044\u2052\u207a-\u207c\u208a-\u208c\u2118\u2140-\u2144\u214b\u2190-\u2194\u219a-\u219b\u21a0\u21a3\u21a6\u21ae\u21ce-\u21cf\u21d2\u21d4\u21f4-\u22ff\u2320-\u2321\u237c\u239b-\u23b3\u23dc-\u23e1\u25b7\u25c1\u25f8-\u25ff\u266f\u27c0-\u27c4\u27c7-\u27e5\u27f0-\u27ff\u2900-\u2982\u2999-\u29d7\u29dc-\u29fb\u29fe-\u2aff\u2b30-\u2b44\u2b47-\u2b4c\ufb29\ufe62\ufe64-\ufe66\uff0b\uff1c-\uff1e\uff5c\uff5e\uffe2\uffe9-\uffec' +Sm = '+<->|~\xac\xb1\xd7\xf7\u03f6\u0606-\u0608\u2044\u2052\u207a-\u207c\u208a-\u208c\u2118\u2140-\u2144\u214b\u2190-\u2194\u219a-\u219b\u21a0\u21a3\u21a6\u21ae\u21ce-\u21cf\u21d2\u21d4\u21f4-\u22ff\u2320-\u2321\u237c\u239b-\u23b3\u23dc-\u23e1\u25b7\u25c1\u25f8-\u25ff\u266f\u27c0-\u27c4\u27c7-\u27e5\u27f0-\u27ff\u2900-\u2982\u2999-\u29d7\u29dc-\u29fb\u29fe-\u2aff\u2b30-\u2b44\u2b47-\u2b4c\ufb29\ufe62\ufe64-\ufe66\uff0b\uff1c-\uff1e\uff5c\uff5e\uffe2\uffe9-\uffec\U0001d6c1\U0001d6db\U0001d6fb\U0001d715\U0001d735\U0001d74f\U0001d76f\U0001d789\U0001d7a9\U0001d7c3\U0001eef0-\U0001eef1' -So = u'\xa6\xa9\xae\xb0\u0482\u058d-\u058e\u060e-\u060f\u06de\u06e9\u06fd-\u06fe\u07f6\u09fa\u0b70\u0bf3-\u0bf8\u0bfa\u0c7f\u0d4f\u0d79\u0f01-\u0f03\u0f13\u0f15-\u0f17\u0f1a-\u0f1f\u0f34\u0f36\u0f38\u0fbe-\u0fc5\u0fc7-\u0fcc\u0fce-\u0fcf\u0fd5-\u0fd8\u109e-\u109f\u1390-\u1399\u1940\u19de-\u19ff\u1b61-\u1b6a\u1b74-\u1b7c\u2100-\u2101\u2103-\u2106\u2108-\u2109\u2114\u2116-\u2117\u211e-\u2123\u2125\u2127\u2129\u212e\u213a-\u213b\u214a\u214c-\u214d\u214f\u218a-\u218b\u2195-\u2199\u219c-\u219f\u21a1-\u21a2\u21a4-\u21a5\u21a7-\u21ad\u21af-\u21cd\u21d0-\u21d1\u21d3\u21d5-\u21f3\u2300-\u2307\u230c-\u231f\u2322-\u2328\u232b-\u237b\u237d-\u239a\u23b4-\u23db\u23e2-\u2426\u2440-\u244a\u249c-\u24e9\u2500-\u25b6\u25b8-\u25c0\u25c2-\u25f7\u2600-\u266e\u2670-\u2767\u2794-\u27bf\u2800-\u28ff\u2b00-\u2b2f\u2b45-\u2b46\u2b4d-\u2b73\u2b76-\u2b95\u2b98-\u2bc8\u2bca-\u2bfe\u2ce5-\u2cea\u2e80-\u2e99\u2e9b-\u2ef3\u2f00-\u2fd5\u2ff0-\u2ffb\u3004\u3012-\u3013\u3020\u3036-\u3037\u303e-\u303f\u3190-\u3191\u3196-\u319f\u31c0-\u31e3\u3200-\u321e\u322a-\u3247\u3250\u3260-\u327f\u328a-\u32b0\u32c0-\u32fe\u3300-\u33ff\u4dc0-\u4dff\ua490-\ua4c6\ua828-\ua82b\ua836-\ua837\ua839\uaa77-\uaa79\ufdfd\uffe4\uffe8\uffed-\uffee\ufffc-\ufffd' +So = '\xa6\xa9\xae\xb0\u0482\u058d-\u058e\u060e-\u060f\u06de\u06e9\u06fd-\u06fe\u07f6\u09fa\u0b70\u0bf3-\u0bf8\u0bfa\u0c7f\u0d4f\u0d79\u0f01-\u0f03\u0f13\u0f15-\u0f17\u0f1a-\u0f1f\u0f34\u0f36\u0f38\u0fbe-\u0fc5\u0fc7-\u0fcc\u0fce-\u0fcf\u0fd5-\u0fd8\u109e-\u109f\u1390-\u1399\u1940\u19de-\u19ff\u1b61-\u1b6a\u1b74-\u1b7c\u2100-\u2101\u2103-\u2106\u2108-\u2109\u2114\u2116-\u2117\u211e-\u2123\u2125\u2127\u2129\u212e\u213a-\u213b\u214a\u214c-\u214d\u214f\u218a-\u218b\u2195-\u2199\u219c-\u219f\u21a1-\u21a2\u21a4-\u21a5\u21a7-\u21ad\u21af-\u21cd\u21d0-\u21d1\u21d3\u21d5-\u21f3\u2300-\u2307\u230c-\u231f\u2322-\u2328\u232b-\u237b\u237d-\u239a\u23b4-\u23db\u23e2-\u2426\u2440-\u244a\u249c-\u24e9\u2500-\u25b6\u25b8-\u25c0\u25c2-\u25f7\u2600-\u266e\u2670-\u2767\u2794-\u27bf\u2800-\u28ff\u2b00-\u2b2f\u2b45-\u2b46\u2b4d-\u2b73\u2b76-\u2b95\u2b98-\u2bc8\u2bca-\u2bfe\u2ce5-\u2cea\u2e80-\u2e99\u2e9b-\u2ef3\u2f00-\u2fd5\u2ff0-\u2ffb\u3004\u3012-\u3013\u3020\u3036-\u3037\u303e-\u303f\u3190-\u3191\u3196-\u319f\u31c0-\u31e3\u3200-\u321e\u322a-\u3247\u3250\u3260-\u327f\u328a-\u32b0\u32c0-\u32fe\u3300-\u33ff\u4dc0-\u4dff\ua490-\ua4c6\ua828-\ua82b\ua836-\ua837\ua839\uaa77-\uaa79\ufdfd\uffe4\uffe8\uffed-\uffee\ufffc-\ufffd\U00010137-\U0001013f\U00010179-\U00010189\U0001018c-\U0001018e\U00010190-\U0001019b\U000101a0\U000101d0-\U000101fc\U00010877-\U00010878\U00010ac8\U0001173f\U00016b3c-\U00016b3f\U00016b45\U0001bc9c\U0001d000-\U0001d0f5\U0001d100-\U0001d126\U0001d129-\U0001d164\U0001d16a-\U0001d16c\U0001d183-\U0001d184\U0001d18c-\U0001d1a9\U0001d1ae-\U0001d1e8\U0001d200-\U0001d241\U0001d245\U0001d300-\U0001d356\U0001d800-\U0001d9ff\U0001da37-\U0001da3a\U0001da6d-\U0001da74\U0001da76-\U0001da83\U0001da85-\U0001da86\U0001ecac\U0001f000-\U0001f02b\U0001f030-\U0001f093\U0001f0a0-\U0001f0ae\U0001f0b1-\U0001f0bf\U0001f0c1-\U0001f0cf\U0001f0d1-\U0001f0f5\U0001f110-\U0001f16b\U0001f170-\U0001f1ac\U0001f1e6-\U0001f202\U0001f210-\U0001f23b\U0001f240-\U0001f248\U0001f250-\U0001f251\U0001f260-\U0001f265\U0001f300-\U0001f3fa\U0001f400-\U0001f6d4\U0001f6e0-\U0001f6ec\U0001f6f0-\U0001f6f9\U0001f700-\U0001f773\U0001f780-\U0001f7d8\U0001f800-\U0001f80b\U0001f810-\U0001f847\U0001f850-\U0001f859\U0001f860-\U0001f887\U0001f890-\U0001f8ad\U0001f900-\U0001f90b\U0001f910-\U0001f93e\U0001f940-\U0001f970\U0001f973-\U0001f976\U0001f97a\U0001f97c-\U0001f9a2\U0001f9b0-\U0001f9b9\U0001f9c0-\U0001f9c2\U0001f9d0-\U0001f9ff\U0001fa60-\U0001fa6d' -Zl = u'\u2028' +Zl = '\u2028' -Zp = u'\u2029' +Zp = '\u2029' -Zs = u' \xa0\u1680\u2000-\u200a\u202f\u205f\u3000' +Zs = ' \xa0\u1680\u2000-\u200a\u202f\u205f\u3000' -xid_continue = u'0-9A-Z_a-z\xaa\xb5\xb7\xba\xc0-\xd6\xd8-\xf6\xf8-\u02c1\u02c6-\u02d1\u02e0-\u02e4\u02ec\u02ee\u0300-\u0374\u0376-\u0377\u037b-\u037d\u037f\u0386-\u038a\u038c\u038e-\u03a1\u03a3-\u03f5\u03f7-\u0481\u0483-\u0487\u048a-\u052f\u0531-\u0556\u0559\u0560-\u0588\u0591-\u05bd\u05bf\u05c1-\u05c2\u05c4-\u05c5\u05c7\u05d0-\u05ea\u05ef-\u05f2\u0610-\u061a\u0620-\u0669\u066e-\u06d3\u06d5-\u06dc\u06df-\u06e8\u06ea-\u06fc\u06ff\u0710-\u074a\u074d-\u07b1\u07c0-\u07f5\u07fa\u07fd\u0800-\u082d\u0840-\u085b\u0860-\u086a\u08a0-\u08b4\u08b6-\u08bd\u08d3-\u08e1\u08e3-\u0963\u0966-\u096f\u0971-\u0983\u0985-\u098c\u098f-\u0990\u0993-\u09a8\u09aa-\u09b0\u09b2\u09b6-\u09b9\u09bc-\u09c4\u09c7-\u09c8\u09cb-\u09ce\u09d7\u09dc-\u09dd\u09df-\u09e3\u09e6-\u09f1\u09fc\u09fe\u0a01-\u0a03\u0a05-\u0a0a\u0a0f-\u0a10\u0a13-\u0a28\u0a2a-\u0a30\u0a32-\u0a33\u0a35-\u0a36\u0a38-\u0a39\u0a3c\u0a3e-\u0a42\u0a47-\u0a48\u0a4b-\u0a4d\u0a51\u0a59-\u0a5c\u0a5e\u0a66-\u0a75\u0a81-\u0a83\u0a85-\u0a8d\u0a8f-\u0a91\u0a93-\u0aa8\u0aaa-\u0ab0\u0ab2-\u0ab3\u0ab5-\u0ab9\u0abc-\u0ac5\u0ac7-\u0ac9\u0acb-\u0acd\u0ad0\u0ae0-\u0ae3\u0ae6-\u0aef\u0af9-\u0aff\u0b01-\u0b03\u0b05-\u0b0c\u0b0f-\u0b10\u0b13-\u0b28\u0b2a-\u0b30\u0b32-\u0b33\u0b35-\u0b39\u0b3c-\u0b44\u0b47-\u0b48\u0b4b-\u0b4d\u0b56-\u0b57\u0b5c-\u0b5d\u0b5f-\u0b63\u0b66-\u0b6f\u0b71\u0b82-\u0b83\u0b85-\u0b8a\u0b8e-\u0b90\u0b92-\u0b95\u0b99-\u0b9a\u0b9c\u0b9e-\u0b9f\u0ba3-\u0ba4\u0ba8-\u0baa\u0bae-\u0bb9\u0bbe-\u0bc2\u0bc6-\u0bc8\u0bca-\u0bcd\u0bd0\u0bd7\u0be6-\u0bef\u0c00-\u0c0c\u0c0e-\u0c10\u0c12-\u0c28\u0c2a-\u0c39\u0c3d-\u0c44\u0c46-\u0c48\u0c4a-\u0c4d\u0c55-\u0c56\u0c58-\u0c5a\u0c60-\u0c63\u0c66-\u0c6f\u0c80-\u0c83\u0c85-\u0c8c\u0c8e-\u0c90\u0c92-\u0ca8\u0caa-\u0cb3\u0cb5-\u0cb9\u0cbc-\u0cc4\u0cc6-\u0cc8\u0cca-\u0ccd\u0cd5-\u0cd6\u0cde\u0ce0-\u0ce3\u0ce6-\u0cef\u0cf1-\u0cf2\u0d00-\u0d03\u0d05-\u0d0c\u0d0e-\u0d10\u0d12-\u0d44\u0d46-\u0d48\u0d4a-\u0d4e\u0d54-\u0d57\u0d5f-\u0d63\u0d66-\u0d6f\u0d7a-\u0d7f\u0d82-\u0d83\u0d85-\u0d96\u0d9a-\u0db1\u0db3-\u0dbb\u0dbd\u0dc0-\u0dc6\u0dca\u0dcf-\u0dd4\u0dd6\u0dd8-\u0ddf\u0de6-\u0def\u0df2-\u0df3\u0e01-\u0e3a\u0e40-\u0e4e\u0e50-\u0e59\u0e81-\u0e82\u0e84\u0e87-\u0e88\u0e8a\u0e8d\u0e94-\u0e97\u0e99-\u0e9f\u0ea1-\u0ea3\u0ea5\u0ea7\u0eaa-\u0eab\u0ead-\u0eb9\u0ebb-\u0ebd\u0ec0-\u0ec4\u0ec6\u0ec8-\u0ecd\u0ed0-\u0ed9\u0edc-\u0edf\u0f00\u0f18-\u0f19\u0f20-\u0f29\u0f35\u0f37\u0f39\u0f3e-\u0f47\u0f49-\u0f6c\u0f71-\u0f84\u0f86-\u0f97\u0f99-\u0fbc\u0fc6\u1000-\u1049\u1050-\u109d\u10a0-\u10c5\u10c7\u10cd\u10d0-\u10fa\u10fc-\u1248\u124a-\u124d\u1250-\u1256\u1258\u125a-\u125d\u1260-\u1288\u128a-\u128d\u1290-\u12b0\u12b2-\u12b5\u12b8-\u12be\u12c0\u12c2-\u12c5\u12c8-\u12d6\u12d8-\u1310\u1312-\u1315\u1318-\u135a\u135d-\u135f\u1369-\u1371\u1380-\u138f\u13a0-\u13f5\u13f8-\u13fd\u1401-\u166c\u166f-\u167f\u1681-\u169a\u16a0-\u16ea\u16ee-\u16f8\u1700-\u170c\u170e-\u1714\u1720-\u1734\u1740-\u1753\u1760-\u176c\u176e-\u1770\u1772-\u1773\u1780-\u17d3\u17d7\u17dc-\u17dd\u17e0-\u17e9\u180b-\u180d\u1810-\u1819\u1820-\u1878\u1880-\u18aa\u18b0-\u18f5\u1900-\u191e\u1920-\u192b\u1930-\u193b\u1946-\u196d\u1970-\u1974\u1980-\u19ab\u19b0-\u19c9\u19d0-\u19da\u1a00-\u1a1b\u1a20-\u1a5e\u1a60-\u1a7c\u1a7f-\u1a89\u1a90-\u1a99\u1aa7\u1ab0-\u1abd\u1b00-\u1b4b\u1b50-\u1b59\u1b6b-\u1b73\u1b80-\u1bf3\u1c00-\u1c37\u1c40-\u1c49\u1c4d-\u1c7d\u1c80-\u1c88\u1c90-\u1cba\u1cbd-\u1cbf\u1cd0-\u1cd2\u1cd4-\u1cf9\u1d00-\u1df9\u1dfb-\u1f15\u1f18-\u1f1d\u1f20-\u1f45\u1f48-\u1f4d\u1f50-\u1f57\u1f59\u1f5b\u1f5d\u1f5f-\u1f7d\u1f80-\u1fb4\u1fb6-\u1fbc\u1fbe\u1fc2-\u1fc4\u1fc6-\u1fcc\u1fd0-\u1fd3\u1fd6-\u1fdb\u1fe0-\u1fec\u1ff2-\u1ff4\u1ff6-\u1ffc\u203f-\u2040\u2054\u2071\u207f\u2090-\u209c\u20d0-\u20dc\u20e1\u20e5-\u20f0\u2102\u2107\u210a-\u2113\u2115\u2118-\u211d\u2124\u2126\u2128\u212a-\u2139\u213c-\u213f\u2145-\u2149\u214e\u2160-\u2188\u2c00-\u2c2e\u2c30-\u2c5e\u2c60-\u2ce4\u2ceb-\u2cf3\u2d00-\u2d25\u2d27\u2d2d\u2d30-\u2d67\u2d6f\u2d7f-\u2d96\u2da0-\u2da6\u2da8-\u2dae\u2db0-\u2db6\u2db8-\u2dbe\u2dc0-\u2dc6\u2dc8-\u2dce\u2dd0-\u2dd6\u2dd8-\u2dde\u2de0-\u2dff\u3005-\u3007\u3021-\u302f\u3031-\u3035\u3038-\u303c\u3041-\u3096\u3099-\u309a\u309d-\u309f\u30a1-\u30fa\u30fc-\u30ff\u3105-\u312f\u3131-\u318e\u31a0-\u31ba\u31f0-\u31ff\u3400-\u4db5\u4e00-\u9fef\ua000-\ua48c\ua4d0-\ua4fd\ua500-\ua60c\ua610-\ua62b\ua640-\ua66f\ua674-\ua67d\ua67f-\ua6f1\ua717-\ua71f\ua722-\ua788\ua78b-\ua7b9\ua7f7-\ua827\ua840-\ua873\ua880-\ua8c5\ua8d0-\ua8d9\ua8e0-\ua8f7\ua8fb\ua8fd-\ua92d\ua930-\ua953\ua960-\ua97c\ua980-\ua9c0\ua9cf-\ua9d9\ua9e0-\ua9fe\uaa00-\uaa36\uaa40-\uaa4d\uaa50-\uaa59\uaa60-\uaa76\uaa7a-\uaac2\uaadb-\uaadd\uaae0-\uaaef\uaaf2-\uaaf6\uab01-\uab06\uab09-\uab0e\uab11-\uab16\uab20-\uab26\uab28-\uab2e\uab30-\uab5a\uab5c-\uab65\uab70-\uabea\uabec-\uabed\uabf0-\uabf9\uac00-\ud7a3\ud7b0-\ud7c6\ud7cb-\ud7fb\uf900-\ufa6d\ufa70-\ufad9\ufb00-\ufb06\ufb13-\ufb17\ufb1d-\ufb28\ufb2a-\ufb36\ufb38-\ufb3c\ufb3e\ufb40-\ufb41\ufb43-\ufb44\ufb46-\ufbb1\ufbd3-\ufc5d\ufc64-\ufd3d\ufd50-\ufd8f\ufd92-\ufdc7\ufdf0-\ufdf9\ufe00-\ufe0f\ufe20-\ufe2f\ufe33-\ufe34\ufe4d-\ufe4f\ufe71\ufe73\ufe77\ufe79\ufe7b\ufe7d\ufe7f-\ufefc\uff10-\uff19\uff21-\uff3a\uff3f\uff41-\uff5a\uff66-\uffbe\uffc2-\uffc7\uffca-\uffcf\uffd2-\uffd7\uffda-\uffdc' +xid_continue = '0-9A-Z_a-z\xaa\xb5\xb7\xba\xc0-\xd6\xd8-\xf6\xf8-\u02c1\u02c6-\u02d1\u02e0-\u02e4\u02ec\u02ee\u0300-\u0374\u0376-\u0377\u037b-\u037d\u037f\u0386-\u038a\u038c\u038e-\u03a1\u03a3-\u03f5\u03f7-\u0481\u0483-\u0487\u048a-\u052f\u0531-\u0556\u0559\u0560-\u0588\u0591-\u05bd\u05bf\u05c1-\u05c2\u05c4-\u05c5\u05c7\u05d0-\u05ea\u05ef-\u05f2\u0610-\u061a\u0620-\u0669\u066e-\u06d3\u06d5-\u06dc\u06df-\u06e8\u06ea-\u06fc\u06ff\u0710-\u074a\u074d-\u07b1\u07c0-\u07f5\u07fa\u07fd\u0800-\u082d\u0840-\u085b\u0860-\u086a\u08a0-\u08b4\u08b6-\u08bd\u08d3-\u08e1\u08e3-\u0963\u0966-\u096f\u0971-\u0983\u0985-\u098c\u098f-\u0990\u0993-\u09a8\u09aa-\u09b0\u09b2\u09b6-\u09b9\u09bc-\u09c4\u09c7-\u09c8\u09cb-\u09ce\u09d7\u09dc-\u09dd\u09df-\u09e3\u09e6-\u09f1\u09fc\u09fe\u0a01-\u0a03\u0a05-\u0a0a\u0a0f-\u0a10\u0a13-\u0a28\u0a2a-\u0a30\u0a32-\u0a33\u0a35-\u0a36\u0a38-\u0a39\u0a3c\u0a3e-\u0a42\u0a47-\u0a48\u0a4b-\u0a4d\u0a51\u0a59-\u0a5c\u0a5e\u0a66-\u0a75\u0a81-\u0a83\u0a85-\u0a8d\u0a8f-\u0a91\u0a93-\u0aa8\u0aaa-\u0ab0\u0ab2-\u0ab3\u0ab5-\u0ab9\u0abc-\u0ac5\u0ac7-\u0ac9\u0acb-\u0acd\u0ad0\u0ae0-\u0ae3\u0ae6-\u0aef\u0af9-\u0aff\u0b01-\u0b03\u0b05-\u0b0c\u0b0f-\u0b10\u0b13-\u0b28\u0b2a-\u0b30\u0b32-\u0b33\u0b35-\u0b39\u0b3c-\u0b44\u0b47-\u0b48\u0b4b-\u0b4d\u0b56-\u0b57\u0b5c-\u0b5d\u0b5f-\u0b63\u0b66-\u0b6f\u0b71\u0b82-\u0b83\u0b85-\u0b8a\u0b8e-\u0b90\u0b92-\u0b95\u0b99-\u0b9a\u0b9c\u0b9e-\u0b9f\u0ba3-\u0ba4\u0ba8-\u0baa\u0bae-\u0bb9\u0bbe-\u0bc2\u0bc6-\u0bc8\u0bca-\u0bcd\u0bd0\u0bd7\u0be6-\u0bef\u0c00-\u0c0c\u0c0e-\u0c10\u0c12-\u0c28\u0c2a-\u0c39\u0c3d-\u0c44\u0c46-\u0c48\u0c4a-\u0c4d\u0c55-\u0c56\u0c58-\u0c5a\u0c60-\u0c63\u0c66-\u0c6f\u0c80-\u0c83\u0c85-\u0c8c\u0c8e-\u0c90\u0c92-\u0ca8\u0caa-\u0cb3\u0cb5-\u0cb9\u0cbc-\u0cc4\u0cc6-\u0cc8\u0cca-\u0ccd\u0cd5-\u0cd6\u0cde\u0ce0-\u0ce3\u0ce6-\u0cef\u0cf1-\u0cf2\u0d00-\u0d03\u0d05-\u0d0c\u0d0e-\u0d10\u0d12-\u0d44\u0d46-\u0d48\u0d4a-\u0d4e\u0d54-\u0d57\u0d5f-\u0d63\u0d66-\u0d6f\u0d7a-\u0d7f\u0d82-\u0d83\u0d85-\u0d96\u0d9a-\u0db1\u0db3-\u0dbb\u0dbd\u0dc0-\u0dc6\u0dca\u0dcf-\u0dd4\u0dd6\u0dd8-\u0ddf\u0de6-\u0def\u0df2-\u0df3\u0e01-\u0e3a\u0e40-\u0e4e\u0e50-\u0e59\u0e81-\u0e82\u0e84\u0e87-\u0e88\u0e8a\u0e8d\u0e94-\u0e97\u0e99-\u0e9f\u0ea1-\u0ea3\u0ea5\u0ea7\u0eaa-\u0eab\u0ead-\u0eb9\u0ebb-\u0ebd\u0ec0-\u0ec4\u0ec6\u0ec8-\u0ecd\u0ed0-\u0ed9\u0edc-\u0edf\u0f00\u0f18-\u0f19\u0f20-\u0f29\u0f35\u0f37\u0f39\u0f3e-\u0f47\u0f49-\u0f6c\u0f71-\u0f84\u0f86-\u0f97\u0f99-\u0fbc\u0fc6\u1000-\u1049\u1050-\u109d\u10a0-\u10c5\u10c7\u10cd\u10d0-\u10fa\u10fc-\u1248\u124a-\u124d\u1250-\u1256\u1258\u125a-\u125d\u1260-\u1288\u128a-\u128d\u1290-\u12b0\u12b2-\u12b5\u12b8-\u12be\u12c0\u12c2-\u12c5\u12c8-\u12d6\u12d8-\u1310\u1312-\u1315\u1318-\u135a\u135d-\u135f\u1369-\u1371\u1380-\u138f\u13a0-\u13f5\u13f8-\u13fd\u1401-\u166c\u166f-\u167f\u1681-\u169a\u16a0-\u16ea\u16ee-\u16f8\u1700-\u170c\u170e-\u1714\u1720-\u1734\u1740-\u1753\u1760-\u176c\u176e-\u1770\u1772-\u1773\u1780-\u17d3\u17d7\u17dc-\u17dd\u17e0-\u17e9\u180b-\u180d\u1810-\u1819\u1820-\u1878\u1880-\u18aa\u18b0-\u18f5\u1900-\u191e\u1920-\u192b\u1930-\u193b\u1946-\u196d\u1970-\u1974\u1980-\u19ab\u19b0-\u19c9\u19d0-\u19da\u1a00-\u1a1b\u1a20-\u1a5e\u1a60-\u1a7c\u1a7f-\u1a89\u1a90-\u1a99\u1aa7\u1ab0-\u1abd\u1b00-\u1b4b\u1b50-\u1b59\u1b6b-\u1b73\u1b80-\u1bf3\u1c00-\u1c37\u1c40-\u1c49\u1c4d-\u1c7d\u1c80-\u1c88\u1c90-\u1cba\u1cbd-\u1cbf\u1cd0-\u1cd2\u1cd4-\u1cf9\u1d00-\u1df9\u1dfb-\u1f15\u1f18-\u1f1d\u1f20-\u1f45\u1f48-\u1f4d\u1f50-\u1f57\u1f59\u1f5b\u1f5d\u1f5f-\u1f7d\u1f80-\u1fb4\u1fb6-\u1fbc\u1fbe\u1fc2-\u1fc4\u1fc6-\u1fcc\u1fd0-\u1fd3\u1fd6-\u1fdb\u1fe0-\u1fec\u1ff2-\u1ff4\u1ff6-\u1ffc\u203f-\u2040\u2054\u2071\u207f\u2090-\u209c\u20d0-\u20dc\u20e1\u20e5-\u20f0\u2102\u2107\u210a-\u2113\u2115\u2118-\u211d\u2124\u2126\u2128\u212a-\u2139\u213c-\u213f\u2145-\u2149\u214e\u2160-\u2188\u2c00-\u2c2e\u2c30-\u2c5e\u2c60-\u2ce4\u2ceb-\u2cf3\u2d00-\u2d25\u2d27\u2d2d\u2d30-\u2d67\u2d6f\u2d7f-\u2d96\u2da0-\u2da6\u2da8-\u2dae\u2db0-\u2db6\u2db8-\u2dbe\u2dc0-\u2dc6\u2dc8-\u2dce\u2dd0-\u2dd6\u2dd8-\u2dde\u2de0-\u2dff\u3005-\u3007\u3021-\u302f\u3031-\u3035\u3038-\u303c\u3041-\u3096\u3099-\u309a\u309d-\u309f\u30a1-\u30fa\u30fc-\u30ff\u3105-\u312f\u3131-\u318e\u31a0-\u31ba\u31f0-\u31ff\u3400-\u4db5\u4e00-\u9fef\ua000-\ua48c\ua4d0-\ua4fd\ua500-\ua60c\ua610-\ua62b\ua640-\ua66f\ua674-\ua67d\ua67f-\ua6f1\ua717-\ua71f\ua722-\ua788\ua78b-\ua7b9\ua7f7-\ua827\ua840-\ua873\ua880-\ua8c5\ua8d0-\ua8d9\ua8e0-\ua8f7\ua8fb\ua8fd-\ua92d\ua930-\ua953\ua960-\ua97c\ua980-\ua9c0\ua9cf-\ua9d9\ua9e0-\ua9fe\uaa00-\uaa36\uaa40-\uaa4d\uaa50-\uaa59\uaa60-\uaa76\uaa7a-\uaac2\uaadb-\uaadd\uaae0-\uaaef\uaaf2-\uaaf6\uab01-\uab06\uab09-\uab0e\uab11-\uab16\uab20-\uab26\uab28-\uab2e\uab30-\uab5a\uab5c-\uab65\uab70-\uabea\uabec-\uabed\uabf0-\uabf9\uac00-\ud7a3\ud7b0-\ud7c6\ud7cb-\ud7fb\uf900-\ufa6d\ufa70-\ufad9\ufb00-\ufb06\ufb13-\ufb17\ufb1d-\ufb28\ufb2a-\ufb36\ufb38-\ufb3c\ufb3e\ufb40-\ufb41\ufb43-\ufb44\ufb46-\ufbb1\ufbd3-\ufc5d\ufc64-\ufd3d\ufd50-\ufd8f\ufd92-\ufdc7\ufdf0-\ufdf9\ufe00-\ufe0f\ufe20-\ufe2f\ufe33-\ufe34\ufe4d-\ufe4f\ufe71\ufe73\ufe77\ufe79\ufe7b\ufe7d\ufe7f-\ufefc\uff10-\uff19\uff21-\uff3a\uff3f\uff41-\uff5a\uff66-\uffbe\uffc2-\uffc7\uffca-\uffcf\uffd2-\uffd7\uffda-\uffdc\U00010000-\U0001000b\U0001000d-\U00010026\U00010028-\U0001003a\U0001003c-\U0001003d\U0001003f-\U0001004d\U00010050-\U0001005d\U00010080-\U000100fa\U00010140-\U00010174\U000101fd\U00010280-\U0001029c\U000102a0-\U000102d0\U000102e0\U00010300-\U0001031f\U0001032d-\U0001034a\U00010350-\U0001037a\U00010380-\U0001039d\U000103a0-\U000103c3\U000103c8-\U000103cf\U000103d1-\U000103d5\U00010400-\U0001049d\U000104a0-\U000104a9\U000104b0-\U000104d3\U000104d8-\U000104fb\U00010500-\U00010527\U00010530-\U00010563\U00010600-\U00010736\U00010740-\U00010755\U00010760-\U00010767\U00010800-\U00010805\U00010808\U0001080a-\U00010835\U00010837-\U00010838\U0001083c\U0001083f-\U00010855\U00010860-\U00010876\U00010880-\U0001089e\U000108e0-\U000108f2\U000108f4-\U000108f5\U00010900-\U00010915\U00010920-\U00010939\U00010980-\U000109b7\U000109be-\U000109bf\U00010a00-\U00010a03\U00010a05-\U00010a06\U00010a0c-\U00010a13\U00010a15-\U00010a17\U00010a19-\U00010a35\U00010a38-\U00010a3a\U00010a3f\U00010a60-\U00010a7c\U00010a80-\U00010a9c\U00010ac0-\U00010ac7\U00010ac9-\U00010ae6\U00010b00-\U00010b35\U00010b40-\U00010b55\U00010b60-\U00010b72\U00010b80-\U00010b91\U00010c00-\U00010c48\U00010c80-\U00010cb2\U00010cc0-\U00010cf2\U00010d00-\U00010d27\U00010d30-\U00010d39\U00010f00-\U00010f1c\U00010f27\U00010f30-\U00010f50\U00011000-\U00011046\U00011066-\U0001106f\U0001107f-\U000110ba\U000110d0-\U000110e8\U000110f0-\U000110f9\U00011100-\U00011134\U00011136-\U0001113f\U00011144-\U00011146\U00011150-\U00011173\U00011176\U00011180-\U000111c4\U000111c9-\U000111cc\U000111d0-\U000111da\U000111dc\U00011200-\U00011211\U00011213-\U00011237\U0001123e\U00011280-\U00011286\U00011288\U0001128a-\U0001128d\U0001128f-\U0001129d\U0001129f-\U000112a8\U000112b0-\U000112ea\U000112f0-\U000112f9\U00011300-\U00011303\U00011305-\U0001130c\U0001130f-\U00011310\U00011313-\U00011328\U0001132a-\U00011330\U00011332-\U00011333\U00011335-\U00011339\U0001133b-\U00011344\U00011347-\U00011348\U0001134b-\U0001134d\U00011350\U00011357\U0001135d-\U00011363\U00011366-\U0001136c\U00011370-\U00011374\U00011400-\U0001144a\U00011450-\U00011459\U0001145e\U00011480-\U000114c5\U000114c7\U000114d0-\U000114d9\U00011580-\U000115b5\U000115b8-\U000115c0\U000115d8-\U000115dd\U00011600-\U00011640\U00011644\U00011650-\U00011659\U00011680-\U000116b7\U000116c0-\U000116c9\U00011700-\U0001171a\U0001171d-\U0001172b\U00011730-\U00011739\U00011800-\U0001183a\U000118a0-\U000118e9\U000118ff\U00011a00-\U00011a3e\U00011a47\U00011a50-\U00011a83\U00011a86-\U00011a99\U00011a9d\U00011ac0-\U00011af8\U00011c00-\U00011c08\U00011c0a-\U00011c36\U00011c38-\U00011c40\U00011c50-\U00011c59\U00011c72-\U00011c8f\U00011c92-\U00011ca7\U00011ca9-\U00011cb6\U00011d00-\U00011d06\U00011d08-\U00011d09\U00011d0b-\U00011d36\U00011d3a\U00011d3c-\U00011d3d\U00011d3f-\U00011d47\U00011d50-\U00011d59\U00011d60-\U00011d65\U00011d67-\U00011d68\U00011d6a-\U00011d8e\U00011d90-\U00011d91\U00011d93-\U00011d98\U00011da0-\U00011da9\U00011ee0-\U00011ef6\U00012000-\U00012399\U00012400-\U0001246e\U00012480-\U00012543\U00013000-\U0001342e\U00014400-\U00014646\U00016800-\U00016a38\U00016a40-\U00016a5e\U00016a60-\U00016a69\U00016ad0-\U00016aed\U00016af0-\U00016af4\U00016b00-\U00016b36\U00016b40-\U00016b43\U00016b50-\U00016b59\U00016b63-\U00016b77\U00016b7d-\U00016b8f\U00016e40-\U00016e7f\U00016f00-\U00016f44\U00016f50-\U00016f7e\U00016f8f-\U00016f9f\U00016fe0-\U00016fe1\U00017000-\U000187f1\U00018800-\U00018af2\U0001b000-\U0001b11e\U0001b170-\U0001b2fb\U0001bc00-\U0001bc6a\U0001bc70-\U0001bc7c\U0001bc80-\U0001bc88\U0001bc90-\U0001bc99\U0001bc9d-\U0001bc9e\U0001d165-\U0001d169\U0001d16d-\U0001d172\U0001d17b-\U0001d182\U0001d185-\U0001d18b\U0001d1aa-\U0001d1ad\U0001d242-\U0001d244\U0001d400-\U0001d454\U0001d456-\U0001d49c\U0001d49e-\U0001d49f\U0001d4a2\U0001d4a5-\U0001d4a6\U0001d4a9-\U0001d4ac\U0001d4ae-\U0001d4b9\U0001d4bb\U0001d4bd-\U0001d4c3\U0001d4c5-\U0001d505\U0001d507-\U0001d50a\U0001d50d-\U0001d514\U0001d516-\U0001d51c\U0001d51e-\U0001d539\U0001d53b-\U0001d53e\U0001d540-\U0001d544\U0001d546\U0001d54a-\U0001d550\U0001d552-\U0001d6a5\U0001d6a8-\U0001d6c0\U0001d6c2-\U0001d6da\U0001d6dc-\U0001d6fa\U0001d6fc-\U0001d714\U0001d716-\U0001d734\U0001d736-\U0001d74e\U0001d750-\U0001d76e\U0001d770-\U0001d788\U0001d78a-\U0001d7a8\U0001d7aa-\U0001d7c2\U0001d7c4-\U0001d7cb\U0001d7ce-\U0001d7ff\U0001da00-\U0001da36\U0001da3b-\U0001da6c\U0001da75\U0001da84\U0001da9b-\U0001da9f\U0001daa1-\U0001daaf\U0001e000-\U0001e006\U0001e008-\U0001e018\U0001e01b-\U0001e021\U0001e023-\U0001e024\U0001e026-\U0001e02a\U0001e800-\U0001e8c4\U0001e8d0-\U0001e8d6\U0001e900-\U0001e94a\U0001e950-\U0001e959\U0001ee00-\U0001ee03\U0001ee05-\U0001ee1f\U0001ee21-\U0001ee22\U0001ee24\U0001ee27\U0001ee29-\U0001ee32\U0001ee34-\U0001ee37\U0001ee39\U0001ee3b\U0001ee42\U0001ee47\U0001ee49\U0001ee4b\U0001ee4d-\U0001ee4f\U0001ee51-\U0001ee52\U0001ee54\U0001ee57\U0001ee59\U0001ee5b\U0001ee5d\U0001ee5f\U0001ee61-\U0001ee62\U0001ee64\U0001ee67-\U0001ee6a\U0001ee6c-\U0001ee72\U0001ee74-\U0001ee77\U0001ee79-\U0001ee7c\U0001ee7e\U0001ee80-\U0001ee89\U0001ee8b-\U0001ee9b\U0001eea1-\U0001eea3\U0001eea5-\U0001eea9\U0001eeab-\U0001eebb\U00020000-\U0002a6d6\U0002a700-\U0002b734\U0002b740-\U0002b81d\U0002b820-\U0002cea1\U0002ceb0-\U0002ebe0\U0002f800-\U0002fa1d\U000e0100-\U000e01ef' -xid_start = u'A-Z_a-z\xaa\xb5\xba\xc0-\xd6\xd8-\xf6\xf8-\u02c1\u02c6-\u02d1\u02e0-\u02e4\u02ec\u02ee\u0370-\u0374\u0376-\u0377\u037b-\u037d\u037f\u0386\u0388-\u038a\u038c\u038e-\u03a1\u03a3-\u03f5\u03f7-\u0481\u048a-\u052f\u0531-\u0556\u0559\u0560-\u0588\u05d0-\u05ea\u05ef-\u05f2\u0620-\u064a\u066e-\u066f\u0671-\u06d3\u06d5\u06e5-\u06e6\u06ee-\u06ef\u06fa-\u06fc\u06ff\u0710\u0712-\u072f\u074d-\u07a5\u07b1\u07ca-\u07ea\u07f4-\u07f5\u07fa\u0800-\u0815\u081a\u0824\u0828\u0840-\u0858\u0860-\u086a\u08a0-\u08b4\u08b6-\u08bd\u0904-\u0939\u093d\u0950\u0958-\u0961\u0971-\u0980\u0985-\u098c\u098f-\u0990\u0993-\u09a8\u09aa-\u09b0\u09b2\u09b6-\u09b9\u09bd\u09ce\u09dc-\u09dd\u09df-\u09e1\u09f0-\u09f1\u09fc\u0a05-\u0a0a\u0a0f-\u0a10\u0a13-\u0a28\u0a2a-\u0a30\u0a32-\u0a33\u0a35-\u0a36\u0a38-\u0a39\u0a59-\u0a5c\u0a5e\u0a72-\u0a74\u0a85-\u0a8d\u0a8f-\u0a91\u0a93-\u0aa8\u0aaa-\u0ab0\u0ab2-\u0ab3\u0ab5-\u0ab9\u0abd\u0ad0\u0ae0-\u0ae1\u0af9\u0b05-\u0b0c\u0b0f-\u0b10\u0b13-\u0b28\u0b2a-\u0b30\u0b32-\u0b33\u0b35-\u0b39\u0b3d\u0b5c-\u0b5d\u0b5f-\u0b61\u0b71\u0b83\u0b85-\u0b8a\u0b8e-\u0b90\u0b92-\u0b95\u0b99-\u0b9a\u0b9c\u0b9e-\u0b9f\u0ba3-\u0ba4\u0ba8-\u0baa\u0bae-\u0bb9\u0bd0\u0c05-\u0c0c\u0c0e-\u0c10\u0c12-\u0c28\u0c2a-\u0c39\u0c3d\u0c58-\u0c5a\u0c60-\u0c61\u0c80\u0c85-\u0c8c\u0c8e-\u0c90\u0c92-\u0ca8\u0caa-\u0cb3\u0cb5-\u0cb9\u0cbd\u0cde\u0ce0-\u0ce1\u0cf1-\u0cf2\u0d05-\u0d0c\u0d0e-\u0d10\u0d12-\u0d3a\u0d3d\u0d4e\u0d54-\u0d56\u0d5f-\u0d61\u0d7a-\u0d7f\u0d85-\u0d96\u0d9a-\u0db1\u0db3-\u0dbb\u0dbd\u0dc0-\u0dc6\u0e01-\u0e30\u0e32\u0e40-\u0e46\u0e81-\u0e82\u0e84\u0e87-\u0e88\u0e8a\u0e8d\u0e94-\u0e97\u0e99-\u0e9f\u0ea1-\u0ea3\u0ea5\u0ea7\u0eaa-\u0eab\u0ead-\u0eb0\u0eb2\u0ebd\u0ec0-\u0ec4\u0ec6\u0edc-\u0edf\u0f00\u0f40-\u0f47\u0f49-\u0f6c\u0f88-\u0f8c\u1000-\u102a\u103f\u1050-\u1055\u105a-\u105d\u1061\u1065-\u1066\u106e-\u1070\u1075-\u1081\u108e\u10a0-\u10c5\u10c7\u10cd\u10d0-\u10fa\u10fc-\u1248\u124a-\u124d\u1250-\u1256\u1258\u125a-\u125d\u1260-\u1288\u128a-\u128d\u1290-\u12b0\u12b2-\u12b5\u12b8-\u12be\u12c0\u12c2-\u12c5\u12c8-\u12d6\u12d8-\u1310\u1312-\u1315\u1318-\u135a\u1380-\u138f\u13a0-\u13f5\u13f8-\u13fd\u1401-\u166c\u166f-\u167f\u1681-\u169a\u16a0-\u16ea\u16ee-\u16f8\u1700-\u170c\u170e-\u1711\u1720-\u1731\u1740-\u1751\u1760-\u176c\u176e-\u1770\u1780-\u17b3\u17d7\u17dc\u1820-\u1878\u1880-\u18a8\u18aa\u18b0-\u18f5\u1900-\u191e\u1950-\u196d\u1970-\u1974\u1980-\u19ab\u19b0-\u19c9\u1a00-\u1a16\u1a20-\u1a54\u1aa7\u1b05-\u1b33\u1b45-\u1b4b\u1b83-\u1ba0\u1bae-\u1baf\u1bba-\u1be5\u1c00-\u1c23\u1c4d-\u1c4f\u1c5a-\u1c7d\u1c80-\u1c88\u1c90-\u1cba\u1cbd-\u1cbf\u1ce9-\u1cec\u1cee-\u1cf1\u1cf5-\u1cf6\u1d00-\u1dbf\u1e00-\u1f15\u1f18-\u1f1d\u1f20-\u1f45\u1f48-\u1f4d\u1f50-\u1f57\u1f59\u1f5b\u1f5d\u1f5f-\u1f7d\u1f80-\u1fb4\u1fb6-\u1fbc\u1fbe\u1fc2-\u1fc4\u1fc6-\u1fcc\u1fd0-\u1fd3\u1fd6-\u1fdb\u1fe0-\u1fec\u1ff2-\u1ff4\u1ff6-\u1ffc\u2071\u207f\u2090-\u209c\u2102\u2107\u210a-\u2113\u2115\u2118-\u211d\u2124\u2126\u2128\u212a-\u2139\u213c-\u213f\u2145-\u2149\u214e\u2160-\u2188\u2c00-\u2c2e\u2c30-\u2c5e\u2c60-\u2ce4\u2ceb-\u2cee\u2cf2-\u2cf3\u2d00-\u2d25\u2d27\u2d2d\u2d30-\u2d67\u2d6f\u2d80-\u2d96\u2da0-\u2da6\u2da8-\u2dae\u2db0-\u2db6\u2db8-\u2dbe\u2dc0-\u2dc6\u2dc8-\u2dce\u2dd0-\u2dd6\u2dd8-\u2dde\u3005-\u3007\u3021-\u3029\u3031-\u3035\u3038-\u303c\u3041-\u3096\u309d-\u309f\u30a1-\u30fa\u30fc-\u30ff\u3105-\u312f\u3131-\u318e\u31a0-\u31ba\u31f0-\u31ff\u3400-\u4db5\u4e00-\u9fef\ua000-\ua48c\ua4d0-\ua4fd\ua500-\ua60c\ua610-\ua61f\ua62a-\ua62b\ua640-\ua66e\ua67f-\ua69d\ua6a0-\ua6ef\ua717-\ua71f\ua722-\ua788\ua78b-\ua7b9\ua7f7-\ua801\ua803-\ua805\ua807-\ua80a\ua80c-\ua822\ua840-\ua873\ua882-\ua8b3\ua8f2-\ua8f7\ua8fb\ua8fd-\ua8fe\ua90a-\ua925\ua930-\ua946\ua960-\ua97c\ua984-\ua9b2\ua9cf\ua9e0-\ua9e4\ua9e6-\ua9ef\ua9fa-\ua9fe\uaa00-\uaa28\uaa40-\uaa42\uaa44-\uaa4b\uaa60-\uaa76\uaa7a\uaa7e-\uaaaf\uaab1\uaab5-\uaab6\uaab9-\uaabd\uaac0\uaac2\uaadb-\uaadd\uaae0-\uaaea\uaaf2-\uaaf4\uab01-\uab06\uab09-\uab0e\uab11-\uab16\uab20-\uab26\uab28-\uab2e\uab30-\uab5a\uab5c-\uab65\uab70-\uabe2\uac00-\ud7a3\ud7b0-\ud7c6\ud7cb-\ud7fb\uf900-\ufa6d\ufa70-\ufad9\ufb00-\ufb06\ufb13-\ufb17\ufb1d\ufb1f-\ufb28\ufb2a-\ufb36\ufb38-\ufb3c\ufb3e\ufb40-\ufb41\ufb43-\ufb44\ufb46-\ufbb1\ufbd3-\ufc5d\ufc64-\ufd3d\ufd50-\ufd8f\ufd92-\ufdc7\ufdf0-\ufdf9\ufe71\ufe73\ufe77\ufe79\ufe7b\ufe7d\ufe7f-\ufefc\uff21-\uff3a\uff41-\uff5a\uff66-\uff9d\uffa0-\uffbe\uffc2-\uffc7\uffca-\uffcf\uffd2-\uffd7\uffda-\uffdc' - -if sys.maxunicode > 0xFFFF: - # non-BMP characters, use only on wide Unicode builds - Cf += u'\U000110bd\U000110cd\U0001bca0-\U0001bca3\U0001d173-\U0001d17a\U000e0001\U000e0020-\U000e007f' - - Cn += u'\U0001000c\U00010027\U0001003b\U0001003e\U0001004e-\U0001004f\U0001005e-\U0001007f\U000100fb-\U000100ff\U00010103-\U00010106\U00010134-\U00010136\U0001018f\U0001019c-\U0001019f\U000101a1-\U000101cf\U000101fe-\U0001027f\U0001029d-\U0001029f\U000102d1-\U000102df\U000102fc-\U000102ff\U00010324-\U0001032c\U0001034b-\U0001034f\U0001037b-\U0001037f\U0001039e\U000103c4-\U000103c7\U000103d6-\U000103ff\U0001049e-\U0001049f\U000104aa-\U000104af\U000104d4-\U000104d7\U000104fc-\U000104ff\U00010528-\U0001052f\U00010564-\U0001056e\U00010570-\U000105ff\U00010737-\U0001073f\U00010756-\U0001075f\U00010768-\U000107ff\U00010806-\U00010807\U00010809\U00010836\U00010839-\U0001083b\U0001083d-\U0001083e\U00010856\U0001089f-\U000108a6\U000108b0-\U000108df\U000108f3\U000108f6-\U000108fa\U0001091c-\U0001091e\U0001093a-\U0001093e\U00010940-\U0001097f\U000109b8-\U000109bb\U000109d0-\U000109d1\U00010a04\U00010a07-\U00010a0b\U00010a14\U00010a18\U00010a36-\U00010a37\U00010a3b-\U00010a3e\U00010a49-\U00010a4f\U00010a59-\U00010a5f\U00010aa0-\U00010abf\U00010ae7-\U00010aea\U00010af7-\U00010aff\U00010b36-\U00010b38\U00010b56-\U00010b57\U00010b73-\U00010b77\U00010b92-\U00010b98\U00010b9d-\U00010ba8\U00010bb0-\U00010bff\U00010c49-\U00010c7f\U00010cb3-\U00010cbf\U00010cf3-\U00010cf9\U00010d28-\U00010d2f\U00010d3a-\U00010e5f\U00010e7f-\U00010eff\U00010f28-\U00010f2f\U00010f5a-\U00010fff\U0001104e-\U00011051\U00011070-\U0001107e\U000110c2-\U000110cc\U000110ce-\U000110cf\U000110e9-\U000110ef\U000110fa-\U000110ff\U00011135\U00011147-\U0001114f\U00011177-\U0001117f\U000111ce-\U000111cf\U000111e0\U000111f5-\U000111ff\U00011212\U0001123f-\U0001127f\U00011287\U00011289\U0001128e\U0001129e\U000112aa-\U000112af\U000112eb-\U000112ef\U000112fa-\U000112ff\U00011304\U0001130d-\U0001130e\U00011311-\U00011312\U00011329\U00011331\U00011334\U0001133a\U00011345-\U00011346\U00011349-\U0001134a\U0001134e-\U0001134f\U00011351-\U00011356\U00011358-\U0001135c\U00011364-\U00011365\U0001136d-\U0001136f\U00011375-\U000113ff\U0001145a\U0001145c\U0001145f-\U0001147f\U000114c8-\U000114cf\U000114da-\U0001157f\U000115b6-\U000115b7\U000115de-\U000115ff\U00011645-\U0001164f\U0001165a-\U0001165f\U0001166d-\U0001167f\U000116b8-\U000116bf\U000116ca-\U000116ff\U0001171b-\U0001171c\U0001172c-\U0001172f\U00011740-\U000117ff\U0001183c-\U0001189f\U000118f3-\U000118fe\U00011900-\U000119ff\U00011a48-\U00011a4f\U00011a84-\U00011a85\U00011aa3-\U00011abf\U00011af9-\U00011bff\U00011c09\U00011c37\U00011c46-\U00011c4f\U00011c6d-\U00011c6f\U00011c90-\U00011c91\U00011ca8\U00011cb7-\U00011cff\U00011d07\U00011d0a\U00011d37-\U00011d39\U00011d3b\U00011d3e\U00011d48-\U00011d4f\U00011d5a-\U00011d5f\U00011d66\U00011d69\U00011d8f\U00011d92\U00011d99-\U00011d9f\U00011daa-\U00011edf\U00011ef9-\U00011fff\U0001239a-\U000123ff\U0001246f\U00012475-\U0001247f\U00012544-\U00012fff\U0001342f-\U000143ff\U00014647-\U000167ff\U00016a39-\U00016a3f\U00016a5f\U00016a6a-\U00016a6d\U00016a70-\U00016acf\U00016aee-\U00016aef\U00016af6-\U00016aff\U00016b46-\U00016b4f\U00016b5a\U00016b62\U00016b78-\U00016b7c\U00016b90-\U00016e3f\U00016e9b-\U00016eff\U00016f45-\U00016f4f\U00016f7f-\U00016f8e\U00016fa0-\U00016fdf\U00016fe2-\U00016fff\U000187f2-\U000187ff\U00018af3-\U0001afff\U0001b11f-\U0001b16f\U0001b2fc-\U0001bbff\U0001bc6b-\U0001bc6f\U0001bc7d-\U0001bc7f\U0001bc89-\U0001bc8f\U0001bc9a-\U0001bc9b\U0001bca4-\U0001cfff\U0001d0f6-\U0001d0ff\U0001d127-\U0001d128\U0001d1e9-\U0001d1ff\U0001d246-\U0001d2df\U0001d2f4-\U0001d2ff\U0001d357-\U0001d35f\U0001d379-\U0001d3ff\U0001d455\U0001d49d\U0001d4a0-\U0001d4a1\U0001d4a3-\U0001d4a4\U0001d4a7-\U0001d4a8\U0001d4ad\U0001d4ba\U0001d4bc\U0001d4c4\U0001d506\U0001d50b-\U0001d50c\U0001d515\U0001d51d\U0001d53a\U0001d53f\U0001d545\U0001d547-\U0001d549\U0001d551\U0001d6a6-\U0001d6a7\U0001d7cc-\U0001d7cd\U0001da8c-\U0001da9a\U0001daa0\U0001dab0-\U0001dfff\U0001e007\U0001e019-\U0001e01a\U0001e022\U0001e025\U0001e02b-\U0001e7ff\U0001e8c5-\U0001e8c6\U0001e8d7-\U0001e8ff\U0001e94b-\U0001e94f\U0001e95a-\U0001e95d\U0001e960-\U0001ec70\U0001ecb5-\U0001edff\U0001ee04\U0001ee20\U0001ee23\U0001ee25-\U0001ee26\U0001ee28\U0001ee33\U0001ee38\U0001ee3a\U0001ee3c-\U0001ee41\U0001ee43-\U0001ee46\U0001ee48\U0001ee4a\U0001ee4c\U0001ee50\U0001ee53\U0001ee55-\U0001ee56\U0001ee58\U0001ee5a\U0001ee5c\U0001ee5e\U0001ee60\U0001ee63\U0001ee65-\U0001ee66\U0001ee6b\U0001ee73\U0001ee78\U0001ee7d\U0001ee7f\U0001ee8a\U0001ee9c-\U0001eea0\U0001eea4\U0001eeaa\U0001eebc-\U0001eeef\U0001eef2-\U0001efff\U0001f02c-\U0001f02f\U0001f094-\U0001f09f\U0001f0af-\U0001f0b0\U0001f0c0\U0001f0d0\U0001f0f6-\U0001f0ff\U0001f10d-\U0001f10f\U0001f16c-\U0001f16f\U0001f1ad-\U0001f1e5\U0001f203-\U0001f20f\U0001f23c-\U0001f23f\U0001f249-\U0001f24f\U0001f252-\U0001f25f\U0001f266-\U0001f2ff\U0001f6d5-\U0001f6df\U0001f6ed-\U0001f6ef\U0001f6fa-\U0001f6ff\U0001f774-\U0001f77f\U0001f7d9-\U0001f7ff\U0001f80c-\U0001f80f\U0001f848-\U0001f84f\U0001f85a-\U0001f85f\U0001f888-\U0001f88f\U0001f8ae-\U0001f8ff\U0001f90c-\U0001f90f\U0001f93f\U0001f971-\U0001f972\U0001f977-\U0001f979\U0001f97b\U0001f9a3-\U0001f9af\U0001f9ba-\U0001f9bf\U0001f9c3-\U0001f9cf\U0001fa00-\U0001fa5f\U0001fa6e-\U0001ffff\U0002a6d7-\U0002a6ff\U0002b735-\U0002b73f\U0002b81e-\U0002b81f\U0002cea2-\U0002ceaf\U0002ebe1-\U0002f7ff\U0002fa1e-\U000e0000\U000e0002-\U000e001f\U000e0080-\U000e00ff\U000e01f0-\U000effff\U000ffffe-\U000fffff\U0010fffe-\U0010ffff' - - Co += u'\U000f0000-\U000ffffd\U00100000-\U0010fffd' - - Ll += u'\U00010428-\U0001044f\U000104d8-\U000104fb\U00010cc0-\U00010cf2\U000118c0-\U000118df\U00016e60-\U00016e7f\U0001d41a-\U0001d433\U0001d44e-\U0001d454\U0001d456-\U0001d467\U0001d482-\U0001d49b\U0001d4b6-\U0001d4b9\U0001d4bb\U0001d4bd-\U0001d4c3\U0001d4c5-\U0001d4cf\U0001d4ea-\U0001d503\U0001d51e-\U0001d537\U0001d552-\U0001d56b\U0001d586-\U0001d59f\U0001d5ba-\U0001d5d3\U0001d5ee-\U0001d607\U0001d622-\U0001d63b\U0001d656-\U0001d66f\U0001d68a-\U0001d6a5\U0001d6c2-\U0001d6da\U0001d6dc-\U0001d6e1\U0001d6fc-\U0001d714\U0001d716-\U0001d71b\U0001d736-\U0001d74e\U0001d750-\U0001d755\U0001d770-\U0001d788\U0001d78a-\U0001d78f\U0001d7aa-\U0001d7c2\U0001d7c4-\U0001d7c9\U0001d7cb\U0001e922-\U0001e943' - - Lm += u'\U00016b40-\U00016b43\U00016f93-\U00016f9f\U00016fe0-\U00016fe1' - - Lo += u'\U00010000-\U0001000b\U0001000d-\U00010026\U00010028-\U0001003a\U0001003c-\U0001003d\U0001003f-\U0001004d\U00010050-\U0001005d\U00010080-\U000100fa\U00010280-\U0001029c\U000102a0-\U000102d0\U00010300-\U0001031f\U0001032d-\U00010340\U00010342-\U00010349\U00010350-\U00010375\U00010380-\U0001039d\U000103a0-\U000103c3\U000103c8-\U000103cf\U00010450-\U0001049d\U00010500-\U00010527\U00010530-\U00010563\U00010600-\U00010736\U00010740-\U00010755\U00010760-\U00010767\U00010800-\U00010805\U00010808\U0001080a-\U00010835\U00010837-\U00010838\U0001083c\U0001083f-\U00010855\U00010860-\U00010876\U00010880-\U0001089e\U000108e0-\U000108f2\U000108f4-\U000108f5\U00010900-\U00010915\U00010920-\U00010939\U00010980-\U000109b7\U000109be-\U000109bf\U00010a00\U00010a10-\U00010a13\U00010a15-\U00010a17\U00010a19-\U00010a35\U00010a60-\U00010a7c\U00010a80-\U00010a9c\U00010ac0-\U00010ac7\U00010ac9-\U00010ae4\U00010b00-\U00010b35\U00010b40-\U00010b55\U00010b60-\U00010b72\U00010b80-\U00010b91\U00010c00-\U00010c48\U00010d00-\U00010d23\U00010f00-\U00010f1c\U00010f27\U00010f30-\U00010f45\U00011003-\U00011037\U00011083-\U000110af\U000110d0-\U000110e8\U00011103-\U00011126\U00011144\U00011150-\U00011172\U00011176\U00011183-\U000111b2\U000111c1-\U000111c4\U000111da\U000111dc\U00011200-\U00011211\U00011213-\U0001122b\U00011280-\U00011286\U00011288\U0001128a-\U0001128d\U0001128f-\U0001129d\U0001129f-\U000112a8\U000112b0-\U000112de\U00011305-\U0001130c\U0001130f-\U00011310\U00011313-\U00011328\U0001132a-\U00011330\U00011332-\U00011333\U00011335-\U00011339\U0001133d\U00011350\U0001135d-\U00011361\U00011400-\U00011434\U00011447-\U0001144a\U00011480-\U000114af\U000114c4-\U000114c5\U000114c7\U00011580-\U000115ae\U000115d8-\U000115db\U00011600-\U0001162f\U00011644\U00011680-\U000116aa\U00011700-\U0001171a\U00011800-\U0001182b\U000118ff\U00011a00\U00011a0b-\U00011a32\U00011a3a\U00011a50\U00011a5c-\U00011a83\U00011a86-\U00011a89\U00011a9d\U00011ac0-\U00011af8\U00011c00-\U00011c08\U00011c0a-\U00011c2e\U00011c40\U00011c72-\U00011c8f\U00011d00-\U00011d06\U00011d08-\U00011d09\U00011d0b-\U00011d30\U00011d46\U00011d60-\U00011d65\U00011d67-\U00011d68\U00011d6a-\U00011d89\U00011d98\U00011ee0-\U00011ef2\U00012000-\U00012399\U00012480-\U00012543\U00013000-\U0001342e\U00014400-\U00014646\U00016800-\U00016a38\U00016a40-\U00016a5e\U00016ad0-\U00016aed\U00016b00-\U00016b2f\U00016b63-\U00016b77\U00016b7d-\U00016b8f\U00016f00-\U00016f44\U00016f50\U00017000-\U000187f1\U00018800-\U00018af2\U0001b000-\U0001b11e\U0001b170-\U0001b2fb\U0001bc00-\U0001bc6a\U0001bc70-\U0001bc7c\U0001bc80-\U0001bc88\U0001bc90-\U0001bc99\U0001e800-\U0001e8c4\U0001ee00-\U0001ee03\U0001ee05-\U0001ee1f\U0001ee21-\U0001ee22\U0001ee24\U0001ee27\U0001ee29-\U0001ee32\U0001ee34-\U0001ee37\U0001ee39\U0001ee3b\U0001ee42\U0001ee47\U0001ee49\U0001ee4b\U0001ee4d-\U0001ee4f\U0001ee51-\U0001ee52\U0001ee54\U0001ee57\U0001ee59\U0001ee5b\U0001ee5d\U0001ee5f\U0001ee61-\U0001ee62\U0001ee64\U0001ee67-\U0001ee6a\U0001ee6c-\U0001ee72\U0001ee74-\U0001ee77\U0001ee79-\U0001ee7c\U0001ee7e\U0001ee80-\U0001ee89\U0001ee8b-\U0001ee9b\U0001eea1-\U0001eea3\U0001eea5-\U0001eea9\U0001eeab-\U0001eebb\U00020000-\U0002a6d6\U0002a700-\U0002b734\U0002b740-\U0002b81d\U0002b820-\U0002cea1\U0002ceb0-\U0002ebe0\U0002f800-\U0002fa1d' - - Lu += u'\U00010400-\U00010427\U000104b0-\U000104d3\U00010c80-\U00010cb2\U000118a0-\U000118bf\U00016e40-\U00016e5f\U0001d400-\U0001d419\U0001d434-\U0001d44d\U0001d468-\U0001d481\U0001d49c\U0001d49e-\U0001d49f\U0001d4a2\U0001d4a5-\U0001d4a6\U0001d4a9-\U0001d4ac\U0001d4ae-\U0001d4b5\U0001d4d0-\U0001d4e9\U0001d504-\U0001d505\U0001d507-\U0001d50a\U0001d50d-\U0001d514\U0001d516-\U0001d51c\U0001d538-\U0001d539\U0001d53b-\U0001d53e\U0001d540-\U0001d544\U0001d546\U0001d54a-\U0001d550\U0001d56c-\U0001d585\U0001d5a0-\U0001d5b9\U0001d5d4-\U0001d5ed\U0001d608-\U0001d621\U0001d63c-\U0001d655\U0001d670-\U0001d689\U0001d6a8-\U0001d6c0\U0001d6e2-\U0001d6fa\U0001d71c-\U0001d734\U0001d756-\U0001d76e\U0001d790-\U0001d7a8\U0001d7ca\U0001e900-\U0001e921' - - Mc += u'\U00011000\U00011002\U00011082\U000110b0-\U000110b2\U000110b7-\U000110b8\U0001112c\U00011145-\U00011146\U00011182\U000111b3-\U000111b5\U000111bf-\U000111c0\U0001122c-\U0001122e\U00011232-\U00011233\U00011235\U000112e0-\U000112e2\U00011302-\U00011303\U0001133e-\U0001133f\U00011341-\U00011344\U00011347-\U00011348\U0001134b-\U0001134d\U00011357\U00011362-\U00011363\U00011435-\U00011437\U00011440-\U00011441\U00011445\U000114b0-\U000114b2\U000114b9\U000114bb-\U000114be\U000114c1\U000115af-\U000115b1\U000115b8-\U000115bb\U000115be\U00011630-\U00011632\U0001163b-\U0001163c\U0001163e\U000116ac\U000116ae-\U000116af\U000116b6\U00011720-\U00011721\U00011726\U0001182c-\U0001182e\U00011838\U00011a39\U00011a57-\U00011a58\U00011a97\U00011c2f\U00011c3e\U00011ca9\U00011cb1\U00011cb4\U00011d8a-\U00011d8e\U00011d93-\U00011d94\U00011d96\U00011ef5-\U00011ef6\U00016f51-\U00016f7e\U0001d165-\U0001d166\U0001d16d-\U0001d172' - - Mn += u'\U000101fd\U000102e0\U00010376-\U0001037a\U00010a01-\U00010a03\U00010a05-\U00010a06\U00010a0c-\U00010a0f\U00010a38-\U00010a3a\U00010a3f\U00010ae5-\U00010ae6\U00010d24-\U00010d27\U00010f46-\U00010f50\U00011001\U00011038-\U00011046\U0001107f-\U00011081\U000110b3-\U000110b6\U000110b9-\U000110ba\U00011100-\U00011102\U00011127-\U0001112b\U0001112d-\U00011134\U00011173\U00011180-\U00011181\U000111b6-\U000111be\U000111c9-\U000111cc\U0001122f-\U00011231\U00011234\U00011236-\U00011237\U0001123e\U000112df\U000112e3-\U000112ea\U00011300-\U00011301\U0001133b-\U0001133c\U00011340\U00011366-\U0001136c\U00011370-\U00011374\U00011438-\U0001143f\U00011442-\U00011444\U00011446\U0001145e\U000114b3-\U000114b8\U000114ba\U000114bf-\U000114c0\U000114c2-\U000114c3\U000115b2-\U000115b5\U000115bc-\U000115bd\U000115bf-\U000115c0\U000115dc-\U000115dd\U00011633-\U0001163a\U0001163d\U0001163f-\U00011640\U000116ab\U000116ad\U000116b0-\U000116b5\U000116b7\U0001171d-\U0001171f\U00011722-\U00011725\U00011727-\U0001172b\U0001182f-\U00011837\U00011839-\U0001183a\U00011a01-\U00011a0a\U00011a33-\U00011a38\U00011a3b-\U00011a3e\U00011a47\U00011a51-\U00011a56\U00011a59-\U00011a5b\U00011a8a-\U00011a96\U00011a98-\U00011a99\U00011c30-\U00011c36\U00011c38-\U00011c3d\U00011c3f\U00011c92-\U00011ca7\U00011caa-\U00011cb0\U00011cb2-\U00011cb3\U00011cb5-\U00011cb6\U00011d31-\U00011d36\U00011d3a\U00011d3c-\U00011d3d\U00011d3f-\U00011d45\U00011d47\U00011d90-\U00011d91\U00011d95\U00011d97\U00011ef3-\U00011ef4\U00016af0-\U00016af4\U00016b30-\U00016b36\U00016f8f-\U00016f92\U0001bc9d-\U0001bc9e\U0001d167-\U0001d169\U0001d17b-\U0001d182\U0001d185-\U0001d18b\U0001d1aa-\U0001d1ad\U0001d242-\U0001d244\U0001da00-\U0001da36\U0001da3b-\U0001da6c\U0001da75\U0001da84\U0001da9b-\U0001da9f\U0001daa1-\U0001daaf\U0001e000-\U0001e006\U0001e008-\U0001e018\U0001e01b-\U0001e021\U0001e023-\U0001e024\U0001e026-\U0001e02a\U0001e8d0-\U0001e8d6\U0001e944-\U0001e94a\U000e0100-\U000e01ef' - - Nd += u'\U000104a0-\U000104a9\U00010d30-\U00010d39\U00011066-\U0001106f\U000110f0-\U000110f9\U00011136-\U0001113f\U000111d0-\U000111d9\U000112f0-\U000112f9\U00011450-\U00011459\U000114d0-\U000114d9\U00011650-\U00011659\U000116c0-\U000116c9\U00011730-\U00011739\U000118e0-\U000118e9\U00011c50-\U00011c59\U00011d50-\U00011d59\U00011da0-\U00011da9\U00016a60-\U00016a69\U00016b50-\U00016b59\U0001d7ce-\U0001d7ff\U0001e950-\U0001e959' - - Nl += u'\U00010140-\U00010174\U00010341\U0001034a\U000103d1-\U000103d5\U00012400-\U0001246e' - - No += u'\U00010107-\U00010133\U00010175-\U00010178\U0001018a-\U0001018b\U000102e1-\U000102fb\U00010320-\U00010323\U00010858-\U0001085f\U00010879-\U0001087f\U000108a7-\U000108af\U000108fb-\U000108ff\U00010916-\U0001091b\U000109bc-\U000109bd\U000109c0-\U000109cf\U000109d2-\U000109ff\U00010a40-\U00010a48\U00010a7d-\U00010a7e\U00010a9d-\U00010a9f\U00010aeb-\U00010aef\U00010b58-\U00010b5f\U00010b78-\U00010b7f\U00010ba9-\U00010baf\U00010cfa-\U00010cff\U00010e60-\U00010e7e\U00010f1d-\U00010f26\U00010f51-\U00010f54\U00011052-\U00011065\U000111e1-\U000111f4\U0001173a-\U0001173b\U000118ea-\U000118f2\U00011c5a-\U00011c6c\U00016b5b-\U00016b61\U00016e80-\U00016e96\U0001d2e0-\U0001d2f3\U0001d360-\U0001d378\U0001e8c7-\U0001e8cf\U0001ec71-\U0001ecab\U0001ecad-\U0001ecaf\U0001ecb1-\U0001ecb4\U0001f100-\U0001f10c' - - Po += u'\U00010100-\U00010102\U0001039f\U000103d0\U0001056f\U00010857\U0001091f\U0001093f\U00010a50-\U00010a58\U00010a7f\U00010af0-\U00010af6\U00010b39-\U00010b3f\U00010b99-\U00010b9c\U00010f55-\U00010f59\U00011047-\U0001104d\U000110bb-\U000110bc\U000110be-\U000110c1\U00011140-\U00011143\U00011174-\U00011175\U000111c5-\U000111c8\U000111cd\U000111db\U000111dd-\U000111df\U00011238-\U0001123d\U000112a9\U0001144b-\U0001144f\U0001145b\U0001145d\U000114c6\U000115c1-\U000115d7\U00011641-\U00011643\U00011660-\U0001166c\U0001173c-\U0001173e\U0001183b\U00011a3f-\U00011a46\U00011a9a-\U00011a9c\U00011a9e-\U00011aa2\U00011c41-\U00011c45\U00011c70-\U00011c71\U00011ef7-\U00011ef8\U00012470-\U00012474\U00016a6e-\U00016a6f\U00016af5\U00016b37-\U00016b3b\U00016b44\U00016e97-\U00016e9a\U0001bc9f\U0001da87-\U0001da8b\U0001e95e-\U0001e95f' - - Sc += u'\U0001ecb0' - - Sk += u'\U0001f3fb-\U0001f3ff' - - Sm += u'\U0001d6c1\U0001d6db\U0001d6fb\U0001d715\U0001d735\U0001d74f\U0001d76f\U0001d789\U0001d7a9\U0001d7c3\U0001eef0-\U0001eef1' - - So += u'\U00010137-\U0001013f\U00010179-\U00010189\U0001018c-\U0001018e\U00010190-\U0001019b\U000101a0\U000101d0-\U000101fc\U00010877-\U00010878\U00010ac8\U0001173f\U00016b3c-\U00016b3f\U00016b45\U0001bc9c\U0001d000-\U0001d0f5\U0001d100-\U0001d126\U0001d129-\U0001d164\U0001d16a-\U0001d16c\U0001d183-\U0001d184\U0001d18c-\U0001d1a9\U0001d1ae-\U0001d1e8\U0001d200-\U0001d241\U0001d245\U0001d300-\U0001d356\U0001d800-\U0001d9ff\U0001da37-\U0001da3a\U0001da6d-\U0001da74\U0001da76-\U0001da83\U0001da85-\U0001da86\U0001ecac\U0001f000-\U0001f02b\U0001f030-\U0001f093\U0001f0a0-\U0001f0ae\U0001f0b1-\U0001f0bf\U0001f0c1-\U0001f0cf\U0001f0d1-\U0001f0f5\U0001f110-\U0001f16b\U0001f170-\U0001f1ac\U0001f1e6-\U0001f202\U0001f210-\U0001f23b\U0001f240-\U0001f248\U0001f250-\U0001f251\U0001f260-\U0001f265\U0001f300-\U0001f3fa\U0001f400-\U0001f6d4\U0001f6e0-\U0001f6ec\U0001f6f0-\U0001f6f9\U0001f700-\U0001f773\U0001f780-\U0001f7d8\U0001f800-\U0001f80b\U0001f810-\U0001f847\U0001f850-\U0001f859\U0001f860-\U0001f887\U0001f890-\U0001f8ad\U0001f900-\U0001f90b\U0001f910-\U0001f93e\U0001f940-\U0001f970\U0001f973-\U0001f976\U0001f97a\U0001f97c-\U0001f9a2\U0001f9b0-\U0001f9b9\U0001f9c0-\U0001f9c2\U0001f9d0-\U0001f9ff\U0001fa60-\U0001fa6d' - - xid_continue += u'\U00010000-\U0001000b\U0001000d-\U00010026\U00010028-\U0001003a\U0001003c-\U0001003d\U0001003f-\U0001004d\U00010050-\U0001005d\U00010080-\U000100fa\U00010140-\U00010174\U000101fd\U00010280-\U0001029c\U000102a0-\U000102d0\U000102e0\U00010300-\U0001031f\U0001032d-\U0001034a\U00010350-\U0001037a\U00010380-\U0001039d\U000103a0-\U000103c3\U000103c8-\U000103cf\U000103d1-\U000103d5\U00010400-\U0001049d\U000104a0-\U000104a9\U000104b0-\U000104d3\U000104d8-\U000104fb\U00010500-\U00010527\U00010530-\U00010563\U00010600-\U00010736\U00010740-\U00010755\U00010760-\U00010767\U00010800-\U00010805\U00010808\U0001080a-\U00010835\U00010837-\U00010838\U0001083c\U0001083f-\U00010855\U00010860-\U00010876\U00010880-\U0001089e\U000108e0-\U000108f2\U000108f4-\U000108f5\U00010900-\U00010915\U00010920-\U00010939\U00010980-\U000109b7\U000109be-\U000109bf\U00010a00-\U00010a03\U00010a05-\U00010a06\U00010a0c-\U00010a13\U00010a15-\U00010a17\U00010a19-\U00010a35\U00010a38-\U00010a3a\U00010a3f\U00010a60-\U00010a7c\U00010a80-\U00010a9c\U00010ac0-\U00010ac7\U00010ac9-\U00010ae6\U00010b00-\U00010b35\U00010b40-\U00010b55\U00010b60-\U00010b72\U00010b80-\U00010b91\U00010c00-\U00010c48\U00010c80-\U00010cb2\U00010cc0-\U00010cf2\U00010d00-\U00010d27\U00010d30-\U00010d39\U00010f00-\U00010f1c\U00010f27\U00010f30-\U00010f50\U00011000-\U00011046\U00011066-\U0001106f\U0001107f-\U000110ba\U000110d0-\U000110e8\U000110f0-\U000110f9\U00011100-\U00011134\U00011136-\U0001113f\U00011144-\U00011146\U00011150-\U00011173\U00011176\U00011180-\U000111c4\U000111c9-\U000111cc\U000111d0-\U000111da\U000111dc\U00011200-\U00011211\U00011213-\U00011237\U0001123e\U00011280-\U00011286\U00011288\U0001128a-\U0001128d\U0001128f-\U0001129d\U0001129f-\U000112a8\U000112b0-\U000112ea\U000112f0-\U000112f9\U00011300-\U00011303\U00011305-\U0001130c\U0001130f-\U00011310\U00011313-\U00011328\U0001132a-\U00011330\U00011332-\U00011333\U00011335-\U00011339\U0001133b-\U00011344\U00011347-\U00011348\U0001134b-\U0001134d\U00011350\U00011357\U0001135d-\U00011363\U00011366-\U0001136c\U00011370-\U00011374\U00011400-\U0001144a\U00011450-\U00011459\U0001145e\U00011480-\U000114c5\U000114c7\U000114d0-\U000114d9\U00011580-\U000115b5\U000115b8-\U000115c0\U000115d8-\U000115dd\U00011600-\U00011640\U00011644\U00011650-\U00011659\U00011680-\U000116b7\U000116c0-\U000116c9\U00011700-\U0001171a\U0001171d-\U0001172b\U00011730-\U00011739\U00011800-\U0001183a\U000118a0-\U000118e9\U000118ff\U00011a00-\U00011a3e\U00011a47\U00011a50-\U00011a83\U00011a86-\U00011a99\U00011a9d\U00011ac0-\U00011af8\U00011c00-\U00011c08\U00011c0a-\U00011c36\U00011c38-\U00011c40\U00011c50-\U00011c59\U00011c72-\U00011c8f\U00011c92-\U00011ca7\U00011ca9-\U00011cb6\U00011d00-\U00011d06\U00011d08-\U00011d09\U00011d0b-\U00011d36\U00011d3a\U00011d3c-\U00011d3d\U00011d3f-\U00011d47\U00011d50-\U00011d59\U00011d60-\U00011d65\U00011d67-\U00011d68\U00011d6a-\U00011d8e\U00011d90-\U00011d91\U00011d93-\U00011d98\U00011da0-\U00011da9\U00011ee0-\U00011ef6\U00012000-\U00012399\U00012400-\U0001246e\U00012480-\U00012543\U00013000-\U0001342e\U00014400-\U00014646\U00016800-\U00016a38\U00016a40-\U00016a5e\U00016a60-\U00016a69\U00016ad0-\U00016aed\U00016af0-\U00016af4\U00016b00-\U00016b36\U00016b40-\U00016b43\U00016b50-\U00016b59\U00016b63-\U00016b77\U00016b7d-\U00016b8f\U00016e40-\U00016e7f\U00016f00-\U00016f44\U00016f50-\U00016f7e\U00016f8f-\U00016f9f\U00016fe0-\U00016fe1\U00017000-\U000187f1\U00018800-\U00018af2\U0001b000-\U0001b11e\U0001b170-\U0001b2fb\U0001bc00-\U0001bc6a\U0001bc70-\U0001bc7c\U0001bc80-\U0001bc88\U0001bc90-\U0001bc99\U0001bc9d-\U0001bc9e\U0001d165-\U0001d169\U0001d16d-\U0001d172\U0001d17b-\U0001d182\U0001d185-\U0001d18b\U0001d1aa-\U0001d1ad\U0001d242-\U0001d244\U0001d400-\U0001d454\U0001d456-\U0001d49c\U0001d49e-\U0001d49f\U0001d4a2\U0001d4a5-\U0001d4a6\U0001d4a9-\U0001d4ac\U0001d4ae-\U0001d4b9\U0001d4bb\U0001d4bd-\U0001d4c3\U0001d4c5-\U0001d505\U0001d507-\U0001d50a\U0001d50d-\U0001d514\U0001d516-\U0001d51c\U0001d51e-\U0001d539\U0001d53b-\U0001d53e\U0001d540-\U0001d544\U0001d546\U0001d54a-\U0001d550\U0001d552-\U0001d6a5\U0001d6a8-\U0001d6c0\U0001d6c2-\U0001d6da\U0001d6dc-\U0001d6fa\U0001d6fc-\U0001d714\U0001d716-\U0001d734\U0001d736-\U0001d74e\U0001d750-\U0001d76e\U0001d770-\U0001d788\U0001d78a-\U0001d7a8\U0001d7aa-\U0001d7c2\U0001d7c4-\U0001d7cb\U0001d7ce-\U0001d7ff\U0001da00-\U0001da36\U0001da3b-\U0001da6c\U0001da75\U0001da84\U0001da9b-\U0001da9f\U0001daa1-\U0001daaf\U0001e000-\U0001e006\U0001e008-\U0001e018\U0001e01b-\U0001e021\U0001e023-\U0001e024\U0001e026-\U0001e02a\U0001e800-\U0001e8c4\U0001e8d0-\U0001e8d6\U0001e900-\U0001e94a\U0001e950-\U0001e959\U0001ee00-\U0001ee03\U0001ee05-\U0001ee1f\U0001ee21-\U0001ee22\U0001ee24\U0001ee27\U0001ee29-\U0001ee32\U0001ee34-\U0001ee37\U0001ee39\U0001ee3b\U0001ee42\U0001ee47\U0001ee49\U0001ee4b\U0001ee4d-\U0001ee4f\U0001ee51-\U0001ee52\U0001ee54\U0001ee57\U0001ee59\U0001ee5b\U0001ee5d\U0001ee5f\U0001ee61-\U0001ee62\U0001ee64\U0001ee67-\U0001ee6a\U0001ee6c-\U0001ee72\U0001ee74-\U0001ee77\U0001ee79-\U0001ee7c\U0001ee7e\U0001ee80-\U0001ee89\U0001ee8b-\U0001ee9b\U0001eea1-\U0001eea3\U0001eea5-\U0001eea9\U0001eeab-\U0001eebb\U00020000-\U0002a6d6\U0002a700-\U0002b734\U0002b740-\U0002b81d\U0002b820-\U0002cea1\U0002ceb0-\U0002ebe0\U0002f800-\U0002fa1d\U000e0100-\U000e01ef' - - xid_start += u'\U00010000-\U0001000b\U0001000d-\U00010026\U00010028-\U0001003a\U0001003c-\U0001003d\U0001003f-\U0001004d\U00010050-\U0001005d\U00010080-\U000100fa\U00010140-\U00010174\U00010280-\U0001029c\U000102a0-\U000102d0\U00010300-\U0001031f\U0001032d-\U0001034a\U00010350-\U00010375\U00010380-\U0001039d\U000103a0-\U000103c3\U000103c8-\U000103cf\U000103d1-\U000103d5\U00010400-\U0001049d\U000104b0-\U000104d3\U000104d8-\U000104fb\U00010500-\U00010527\U00010530-\U00010563\U00010600-\U00010736\U00010740-\U00010755\U00010760-\U00010767\U00010800-\U00010805\U00010808\U0001080a-\U00010835\U00010837-\U00010838\U0001083c\U0001083f-\U00010855\U00010860-\U00010876\U00010880-\U0001089e\U000108e0-\U000108f2\U000108f4-\U000108f5\U00010900-\U00010915\U00010920-\U00010939\U00010980-\U000109b7\U000109be-\U000109bf\U00010a00\U00010a10-\U00010a13\U00010a15-\U00010a17\U00010a19-\U00010a35\U00010a60-\U00010a7c\U00010a80-\U00010a9c\U00010ac0-\U00010ac7\U00010ac9-\U00010ae4\U00010b00-\U00010b35\U00010b40-\U00010b55\U00010b60-\U00010b72\U00010b80-\U00010b91\U00010c00-\U00010c48\U00010c80-\U00010cb2\U00010cc0-\U00010cf2\U00010d00-\U00010d23\U00010f00-\U00010f1c\U00010f27\U00010f30-\U00010f45\U00011003-\U00011037\U00011083-\U000110af\U000110d0-\U000110e8\U00011103-\U00011126\U00011144\U00011150-\U00011172\U00011176\U00011183-\U000111b2\U000111c1-\U000111c4\U000111da\U000111dc\U00011200-\U00011211\U00011213-\U0001122b\U00011280-\U00011286\U00011288\U0001128a-\U0001128d\U0001128f-\U0001129d\U0001129f-\U000112a8\U000112b0-\U000112de\U00011305-\U0001130c\U0001130f-\U00011310\U00011313-\U00011328\U0001132a-\U00011330\U00011332-\U00011333\U00011335-\U00011339\U0001133d\U00011350\U0001135d-\U00011361\U00011400-\U00011434\U00011447-\U0001144a\U00011480-\U000114af\U000114c4-\U000114c5\U000114c7\U00011580-\U000115ae\U000115d8-\U000115db\U00011600-\U0001162f\U00011644\U00011680-\U000116aa\U00011700-\U0001171a\U00011800-\U0001182b\U000118a0-\U000118df\U000118ff\U00011a00\U00011a0b-\U00011a32\U00011a3a\U00011a50\U00011a5c-\U00011a83\U00011a86-\U00011a89\U00011a9d\U00011ac0-\U00011af8\U00011c00-\U00011c08\U00011c0a-\U00011c2e\U00011c40\U00011c72-\U00011c8f\U00011d00-\U00011d06\U00011d08-\U00011d09\U00011d0b-\U00011d30\U00011d46\U00011d60-\U00011d65\U00011d67-\U00011d68\U00011d6a-\U00011d89\U00011d98\U00011ee0-\U00011ef2\U00012000-\U00012399\U00012400-\U0001246e\U00012480-\U00012543\U00013000-\U0001342e\U00014400-\U00014646\U00016800-\U00016a38\U00016a40-\U00016a5e\U00016ad0-\U00016aed\U00016b00-\U00016b2f\U00016b40-\U00016b43\U00016b63-\U00016b77\U00016b7d-\U00016b8f\U00016e40-\U00016e7f\U00016f00-\U00016f44\U00016f50\U00016f93-\U00016f9f\U00016fe0-\U00016fe1\U00017000-\U000187f1\U00018800-\U00018af2\U0001b000-\U0001b11e\U0001b170-\U0001b2fb\U0001bc00-\U0001bc6a\U0001bc70-\U0001bc7c\U0001bc80-\U0001bc88\U0001bc90-\U0001bc99\U0001d400-\U0001d454\U0001d456-\U0001d49c\U0001d49e-\U0001d49f\U0001d4a2\U0001d4a5-\U0001d4a6\U0001d4a9-\U0001d4ac\U0001d4ae-\U0001d4b9\U0001d4bb\U0001d4bd-\U0001d4c3\U0001d4c5-\U0001d505\U0001d507-\U0001d50a\U0001d50d-\U0001d514\U0001d516-\U0001d51c\U0001d51e-\U0001d539\U0001d53b-\U0001d53e\U0001d540-\U0001d544\U0001d546\U0001d54a-\U0001d550\U0001d552-\U0001d6a5\U0001d6a8-\U0001d6c0\U0001d6c2-\U0001d6da\U0001d6dc-\U0001d6fa\U0001d6fc-\U0001d714\U0001d716-\U0001d734\U0001d736-\U0001d74e\U0001d750-\U0001d76e\U0001d770-\U0001d788\U0001d78a-\U0001d7a8\U0001d7aa-\U0001d7c2\U0001d7c4-\U0001d7cb\U0001e800-\U0001e8c4\U0001e900-\U0001e943\U0001ee00-\U0001ee03\U0001ee05-\U0001ee1f\U0001ee21-\U0001ee22\U0001ee24\U0001ee27\U0001ee29-\U0001ee32\U0001ee34-\U0001ee37\U0001ee39\U0001ee3b\U0001ee42\U0001ee47\U0001ee49\U0001ee4b\U0001ee4d-\U0001ee4f\U0001ee51-\U0001ee52\U0001ee54\U0001ee57\U0001ee59\U0001ee5b\U0001ee5d\U0001ee5f\U0001ee61-\U0001ee62\U0001ee64\U0001ee67-\U0001ee6a\U0001ee6c-\U0001ee72\U0001ee74-\U0001ee77\U0001ee79-\U0001ee7c\U0001ee7e\U0001ee80-\U0001ee89\U0001ee8b-\U0001ee9b\U0001eea1-\U0001eea3\U0001eea5-\U0001eea9\U0001eeab-\U0001eebb\U00020000-\U0002a6d6\U0002a700-\U0002b734\U0002b740-\U0002b81d\U0002b820-\U0002cea1\U0002ceb0-\U0002ebe0\U0002f800-\U0002fa1d' +xid_start = 'A-Z_a-z\xaa\xb5\xba\xc0-\xd6\xd8-\xf6\xf8-\u02c1\u02c6-\u02d1\u02e0-\u02e4\u02ec\u02ee\u0370-\u0374\u0376-\u0377\u037b-\u037d\u037f\u0386\u0388-\u038a\u038c\u038e-\u03a1\u03a3-\u03f5\u03f7-\u0481\u048a-\u052f\u0531-\u0556\u0559\u0560-\u0588\u05d0-\u05ea\u05ef-\u05f2\u0620-\u064a\u066e-\u066f\u0671-\u06d3\u06d5\u06e5-\u06e6\u06ee-\u06ef\u06fa-\u06fc\u06ff\u0710\u0712-\u072f\u074d-\u07a5\u07b1\u07ca-\u07ea\u07f4-\u07f5\u07fa\u0800-\u0815\u081a\u0824\u0828\u0840-\u0858\u0860-\u086a\u08a0-\u08b4\u08b6-\u08bd\u0904-\u0939\u093d\u0950\u0958-\u0961\u0971-\u0980\u0985-\u098c\u098f-\u0990\u0993-\u09a8\u09aa-\u09b0\u09b2\u09b6-\u09b9\u09bd\u09ce\u09dc-\u09dd\u09df-\u09e1\u09f0-\u09f1\u09fc\u0a05-\u0a0a\u0a0f-\u0a10\u0a13-\u0a28\u0a2a-\u0a30\u0a32-\u0a33\u0a35-\u0a36\u0a38-\u0a39\u0a59-\u0a5c\u0a5e\u0a72-\u0a74\u0a85-\u0a8d\u0a8f-\u0a91\u0a93-\u0aa8\u0aaa-\u0ab0\u0ab2-\u0ab3\u0ab5-\u0ab9\u0abd\u0ad0\u0ae0-\u0ae1\u0af9\u0b05-\u0b0c\u0b0f-\u0b10\u0b13-\u0b28\u0b2a-\u0b30\u0b32-\u0b33\u0b35-\u0b39\u0b3d\u0b5c-\u0b5d\u0b5f-\u0b61\u0b71\u0b83\u0b85-\u0b8a\u0b8e-\u0b90\u0b92-\u0b95\u0b99-\u0b9a\u0b9c\u0b9e-\u0b9f\u0ba3-\u0ba4\u0ba8-\u0baa\u0bae-\u0bb9\u0bd0\u0c05-\u0c0c\u0c0e-\u0c10\u0c12-\u0c28\u0c2a-\u0c39\u0c3d\u0c58-\u0c5a\u0c60-\u0c61\u0c80\u0c85-\u0c8c\u0c8e-\u0c90\u0c92-\u0ca8\u0caa-\u0cb3\u0cb5-\u0cb9\u0cbd\u0cde\u0ce0-\u0ce1\u0cf1-\u0cf2\u0d05-\u0d0c\u0d0e-\u0d10\u0d12-\u0d3a\u0d3d\u0d4e\u0d54-\u0d56\u0d5f-\u0d61\u0d7a-\u0d7f\u0d85-\u0d96\u0d9a-\u0db1\u0db3-\u0dbb\u0dbd\u0dc0-\u0dc6\u0e01-\u0e30\u0e32\u0e40-\u0e46\u0e81-\u0e82\u0e84\u0e87-\u0e88\u0e8a\u0e8d\u0e94-\u0e97\u0e99-\u0e9f\u0ea1-\u0ea3\u0ea5\u0ea7\u0eaa-\u0eab\u0ead-\u0eb0\u0eb2\u0ebd\u0ec0-\u0ec4\u0ec6\u0edc-\u0edf\u0f00\u0f40-\u0f47\u0f49-\u0f6c\u0f88-\u0f8c\u1000-\u102a\u103f\u1050-\u1055\u105a-\u105d\u1061\u1065-\u1066\u106e-\u1070\u1075-\u1081\u108e\u10a0-\u10c5\u10c7\u10cd\u10d0-\u10fa\u10fc-\u1248\u124a-\u124d\u1250-\u1256\u1258\u125a-\u125d\u1260-\u1288\u128a-\u128d\u1290-\u12b0\u12b2-\u12b5\u12b8-\u12be\u12c0\u12c2-\u12c5\u12c8-\u12d6\u12d8-\u1310\u1312-\u1315\u1318-\u135a\u1380-\u138f\u13a0-\u13f5\u13f8-\u13fd\u1401-\u166c\u166f-\u167f\u1681-\u169a\u16a0-\u16ea\u16ee-\u16f8\u1700-\u170c\u170e-\u1711\u1720-\u1731\u1740-\u1751\u1760-\u176c\u176e-\u1770\u1780-\u17b3\u17d7\u17dc\u1820-\u1878\u1880-\u18a8\u18aa\u18b0-\u18f5\u1900-\u191e\u1950-\u196d\u1970-\u1974\u1980-\u19ab\u19b0-\u19c9\u1a00-\u1a16\u1a20-\u1a54\u1aa7\u1b05-\u1b33\u1b45-\u1b4b\u1b83-\u1ba0\u1bae-\u1baf\u1bba-\u1be5\u1c00-\u1c23\u1c4d-\u1c4f\u1c5a-\u1c7d\u1c80-\u1c88\u1c90-\u1cba\u1cbd-\u1cbf\u1ce9-\u1cec\u1cee-\u1cf1\u1cf5-\u1cf6\u1d00-\u1dbf\u1e00-\u1f15\u1f18-\u1f1d\u1f20-\u1f45\u1f48-\u1f4d\u1f50-\u1f57\u1f59\u1f5b\u1f5d\u1f5f-\u1f7d\u1f80-\u1fb4\u1fb6-\u1fbc\u1fbe\u1fc2-\u1fc4\u1fc6-\u1fcc\u1fd0-\u1fd3\u1fd6-\u1fdb\u1fe0-\u1fec\u1ff2-\u1ff4\u1ff6-\u1ffc\u2071\u207f\u2090-\u209c\u2102\u2107\u210a-\u2113\u2115\u2118-\u211d\u2124\u2126\u2128\u212a-\u2139\u213c-\u213f\u2145-\u2149\u214e\u2160-\u2188\u2c00-\u2c2e\u2c30-\u2c5e\u2c60-\u2ce4\u2ceb-\u2cee\u2cf2-\u2cf3\u2d00-\u2d25\u2d27\u2d2d\u2d30-\u2d67\u2d6f\u2d80-\u2d96\u2da0-\u2da6\u2da8-\u2dae\u2db0-\u2db6\u2db8-\u2dbe\u2dc0-\u2dc6\u2dc8-\u2dce\u2dd0-\u2dd6\u2dd8-\u2dde\u3005-\u3007\u3021-\u3029\u3031-\u3035\u3038-\u303c\u3041-\u3096\u309d-\u309f\u30a1-\u30fa\u30fc-\u30ff\u3105-\u312f\u3131-\u318e\u31a0-\u31ba\u31f0-\u31ff\u3400-\u4db5\u4e00-\u9fef\ua000-\ua48c\ua4d0-\ua4fd\ua500-\ua60c\ua610-\ua61f\ua62a-\ua62b\ua640-\ua66e\ua67f-\ua69d\ua6a0-\ua6ef\ua717-\ua71f\ua722-\ua788\ua78b-\ua7b9\ua7f7-\ua801\ua803-\ua805\ua807-\ua80a\ua80c-\ua822\ua840-\ua873\ua882-\ua8b3\ua8f2-\ua8f7\ua8fb\ua8fd-\ua8fe\ua90a-\ua925\ua930-\ua946\ua960-\ua97c\ua984-\ua9b2\ua9cf\ua9e0-\ua9e4\ua9e6-\ua9ef\ua9fa-\ua9fe\uaa00-\uaa28\uaa40-\uaa42\uaa44-\uaa4b\uaa60-\uaa76\uaa7a\uaa7e-\uaaaf\uaab1\uaab5-\uaab6\uaab9-\uaabd\uaac0\uaac2\uaadb-\uaadd\uaae0-\uaaea\uaaf2-\uaaf4\uab01-\uab06\uab09-\uab0e\uab11-\uab16\uab20-\uab26\uab28-\uab2e\uab30-\uab5a\uab5c-\uab65\uab70-\uabe2\uac00-\ud7a3\ud7b0-\ud7c6\ud7cb-\ud7fb\uf900-\ufa6d\ufa70-\ufad9\ufb00-\ufb06\ufb13-\ufb17\ufb1d\ufb1f-\ufb28\ufb2a-\ufb36\ufb38-\ufb3c\ufb3e\ufb40-\ufb41\ufb43-\ufb44\ufb46-\ufbb1\ufbd3-\ufc5d\ufc64-\ufd3d\ufd50-\ufd8f\ufd92-\ufdc7\ufdf0-\ufdf9\ufe71\ufe73\ufe77\ufe79\ufe7b\ufe7d\ufe7f-\ufefc\uff21-\uff3a\uff41-\uff5a\uff66-\uff9d\uffa0-\uffbe\uffc2-\uffc7\uffca-\uffcf\uffd2-\uffd7\uffda-\uffdc\U00010000-\U0001000b\U0001000d-\U00010026\U00010028-\U0001003a\U0001003c-\U0001003d\U0001003f-\U0001004d\U00010050-\U0001005d\U00010080-\U000100fa\U00010140-\U00010174\U00010280-\U0001029c\U000102a0-\U000102d0\U00010300-\U0001031f\U0001032d-\U0001034a\U00010350-\U00010375\U00010380-\U0001039d\U000103a0-\U000103c3\U000103c8-\U000103cf\U000103d1-\U000103d5\U00010400-\U0001049d\U000104b0-\U000104d3\U000104d8-\U000104fb\U00010500-\U00010527\U00010530-\U00010563\U00010600-\U00010736\U00010740-\U00010755\U00010760-\U00010767\U00010800-\U00010805\U00010808\U0001080a-\U00010835\U00010837-\U00010838\U0001083c\U0001083f-\U00010855\U00010860-\U00010876\U00010880-\U0001089e\U000108e0-\U000108f2\U000108f4-\U000108f5\U00010900-\U00010915\U00010920-\U00010939\U00010980-\U000109b7\U000109be-\U000109bf\U00010a00\U00010a10-\U00010a13\U00010a15-\U00010a17\U00010a19-\U00010a35\U00010a60-\U00010a7c\U00010a80-\U00010a9c\U00010ac0-\U00010ac7\U00010ac9-\U00010ae4\U00010b00-\U00010b35\U00010b40-\U00010b55\U00010b60-\U00010b72\U00010b80-\U00010b91\U00010c00-\U00010c48\U00010c80-\U00010cb2\U00010cc0-\U00010cf2\U00010d00-\U00010d23\U00010f00-\U00010f1c\U00010f27\U00010f30-\U00010f45\U00011003-\U00011037\U00011083-\U000110af\U000110d0-\U000110e8\U00011103-\U00011126\U00011144\U00011150-\U00011172\U00011176\U00011183-\U000111b2\U000111c1-\U000111c4\U000111da\U000111dc\U00011200-\U00011211\U00011213-\U0001122b\U00011280-\U00011286\U00011288\U0001128a-\U0001128d\U0001128f-\U0001129d\U0001129f-\U000112a8\U000112b0-\U000112de\U00011305-\U0001130c\U0001130f-\U00011310\U00011313-\U00011328\U0001132a-\U00011330\U00011332-\U00011333\U00011335-\U00011339\U0001133d\U00011350\U0001135d-\U00011361\U00011400-\U00011434\U00011447-\U0001144a\U00011480-\U000114af\U000114c4-\U000114c5\U000114c7\U00011580-\U000115ae\U000115d8-\U000115db\U00011600-\U0001162f\U00011644\U00011680-\U000116aa\U00011700-\U0001171a\U00011800-\U0001182b\U000118a0-\U000118df\U000118ff\U00011a00\U00011a0b-\U00011a32\U00011a3a\U00011a50\U00011a5c-\U00011a83\U00011a86-\U00011a89\U00011a9d\U00011ac0-\U00011af8\U00011c00-\U00011c08\U00011c0a-\U00011c2e\U00011c40\U00011c72-\U00011c8f\U00011d00-\U00011d06\U00011d08-\U00011d09\U00011d0b-\U00011d30\U00011d46\U00011d60-\U00011d65\U00011d67-\U00011d68\U00011d6a-\U00011d89\U00011d98\U00011ee0-\U00011ef2\U00012000-\U00012399\U00012400-\U0001246e\U00012480-\U00012543\U00013000-\U0001342e\U00014400-\U00014646\U00016800-\U00016a38\U00016a40-\U00016a5e\U00016ad0-\U00016aed\U00016b00-\U00016b2f\U00016b40-\U00016b43\U00016b63-\U00016b77\U00016b7d-\U00016b8f\U00016e40-\U00016e7f\U00016f00-\U00016f44\U00016f50\U00016f93-\U00016f9f\U00016fe0-\U00016fe1\U00017000-\U000187f1\U00018800-\U00018af2\U0001b000-\U0001b11e\U0001b170-\U0001b2fb\U0001bc00-\U0001bc6a\U0001bc70-\U0001bc7c\U0001bc80-\U0001bc88\U0001bc90-\U0001bc99\U0001d400-\U0001d454\U0001d456-\U0001d49c\U0001d49e-\U0001d49f\U0001d4a2\U0001d4a5-\U0001d4a6\U0001d4a9-\U0001d4ac\U0001d4ae-\U0001d4b9\U0001d4bb\U0001d4bd-\U0001d4c3\U0001d4c5-\U0001d505\U0001d507-\U0001d50a\U0001d50d-\U0001d514\U0001d516-\U0001d51c\U0001d51e-\U0001d539\U0001d53b-\U0001d53e\U0001d540-\U0001d544\U0001d546\U0001d54a-\U0001d550\U0001d552-\U0001d6a5\U0001d6a8-\U0001d6c0\U0001d6c2-\U0001d6da\U0001d6dc-\U0001d6fa\U0001d6fc-\U0001d714\U0001d716-\U0001d734\U0001d736-\U0001d74e\U0001d750-\U0001d76e\U0001d770-\U0001d788\U0001d78a-\U0001d7a8\U0001d7aa-\U0001d7c2\U0001d7c4-\U0001d7cb\U0001e800-\U0001e8c4\U0001e900-\U0001e943\U0001ee00-\U0001ee03\U0001ee05-\U0001ee1f\U0001ee21-\U0001ee22\U0001ee24\U0001ee27\U0001ee29-\U0001ee32\U0001ee34-\U0001ee37\U0001ee39\U0001ee3b\U0001ee42\U0001ee47\U0001ee49\U0001ee4b\U0001ee4d-\U0001ee4f\U0001ee51-\U0001ee52\U0001ee54\U0001ee57\U0001ee59\U0001ee5b\U0001ee5d\U0001ee5f\U0001ee61-\U0001ee62\U0001ee64\U0001ee67-\U0001ee6a\U0001ee6c-\U0001ee72\U0001ee74-\U0001ee77\U0001ee79-\U0001ee7c\U0001ee7e\U0001ee80-\U0001ee89\U0001ee8b-\U0001ee9b\U0001eea1-\U0001eea3\U0001eea5-\U0001eea9\U0001eeab-\U0001eebb\U00020000-\U0002a6d6\U0002a700-\U0002b734\U0002b740-\U0002b81d\U0002b820-\U0002cea1\U0002ceb0-\U0002ebe0\U0002f800-\U0002fa1d' cats = ['Cc', 'Cf', 'Cn', 'Co', 'Cs', 'Ll', 'Lm', 'Lo', 'Lt', 'Lu', 'Mc', 'Me', 'Mn', 'Nd', 'Nl', 'No', 'Pc', 'Pd', 'Pe', 'Pf', 'Pi', 'Po', 'Ps', 'Sc', 'Sk', 'Sm', 'So', 'Zl', 'Zp', 'Zs'] # Generated from unidata 11.0.0 def combine(*args): - return u''.join(globals()[cat] for cat in args) + return ''.join(globals()[cat] for cat in args) def allexcept(*args): newcats = cats[:] for arg in args: newcats.remove(arg) - return u''.join(globals()[cat] for cat in newcats) + return ''.join(globals()[cat] for cat in newcats) def _handle_runs(char_list): # pragma: no cover @@ -150,18 +107,13 @@ def _handle_runs(char_list): # pragma: no cover if a == b: yield a else: - yield u'%s-%s' % (a, b) + yield '%s-%s' % (a, b) if __name__ == '__main__': # pragma: no cover import unicodedata - # we need Py3 for the determination of the XID_* properties - if sys.version_info[:2] < (3, 3): - raise RuntimeError('this file must be regenerated with Python 3.3+') - - categories_bmp = {'xid_start': [], 'xid_continue': []} - categories_nonbmp = {'xid_start': [], 'xid_continue': []} + categories = {'xid_start': [], 'xid_continue': []} with open(__file__) as fp: content = fp.read() @@ -175,43 +127,26 @@ def _handle_runs(char_list): # pragma: no cover if ord(c) == 0xdc00: # Hack to avoid combining this combining with the preceeding high # surrogate, 0xdbff, when doing a repr. - c = u'\\' + c + c = '\\' + c elif ord(c) in (0x2d, 0x5b, 0x5c, 0x5d, 0x5e): # Escape regex metachars. - c = u'\\' + c - cat_dic = categories_bmp if code < 0x10000 else categories_nonbmp - cat_dic.setdefault(cat, []).append(c) + c = '\\' + c + categories.setdefault(cat, []).append(c) # XID_START and XID_CONTINUE are special categories used for matching # identifiers in Python 3. if c.isidentifier(): - cat_dic['xid_start'].append(c) + categories['xid_start'].append(c) if ('a' + c).isidentifier(): - cat_dic['xid_continue'].append(c) + categories['xid_continue'].append(c) with open(__file__, 'w') as fp: fp.write(header) - for cat in sorted(categories_bmp): - val = u''.join(_handle_runs(categories_bmp[cat])) - if cat == 'Cs': - # Jython can't handle isolated surrogates - fp.write("""\ -try: - Cs = eval(r"u%s") -except UnicodeDecodeError: - Cs = '' # Jython can't handle isolated surrogates\n\n""" % ascii(val)) - else: - fp.write('%s = u%a\n\n' % (cat, val)) - - fp.write('if sys.maxunicode > 0xFFFF:\n') - fp.write(' # non-BMP characters, use only on wide Unicode builds\n') - for cat in sorted(categories_nonbmp): - # no special case for Cs needed, since there are no surrogates - # in the higher planes - val = u''.join(_handle_runs(categories_nonbmp[cat])) - fp.write(' %s += u%a\n\n' % (cat, val)) + for cat in sorted(categories): + val = ''.join(_handle_runs(categories[cat])) + fp.write('%s = %a\n\n' % (cat, val)) - cats = sorted(categories_bmp) + cats = sorted(categories) cats.remove('xid_start') cats.remove('xid_continue') fp.write('cats = %r\n\n' % cats) diff --git a/src/typecode/_vendor/pygments/util.py b/src/typecode/_vendor/pygments/util.py index 9bc7266..e6f9ed7 100644 --- a/src/typecode/_vendor/pygments/util.py +++ b/src/typecode/_vendor/pygments/util.py @@ -5,17 +5,16 @@ Utility functions. - :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ import re -import sys +from io import TextIOWrapper split_path_re = re.compile(r'[/\\ ]') doctype_lookup_re = re.compile(r''' - (<\?.*?\?>)?\s* >> from typecode._vendor.pygments.util import shebang_matches + >>> from pygments.util import shebang_matches >>> shebang_matches('#!/usr/bin/env python', r'python(2\.\d)?') True >>> shebang_matches('#!/usr/bin/python2.4', r'python(2\.\d)?') @@ -173,10 +172,10 @@ def doctype_matches(text, regex): Note that this method only checks the first part of a DOCTYPE. eg: 'html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN"' """ - m = doctype_lookup_re.match(text) + m = doctype_lookup_re.search(text) if m is None: return False - doctype = m.group(2) + doctype = m.group(1) return re.compile(regex, re.I).match(doctype.strip()) is not None @@ -196,7 +195,7 @@ def looks_like_xml(text): try: return _looks_like_xml_cache[key] except KeyError: - m = doctype_lookup_re.match(text) + m = doctype_lookup_re.search(text) if m is not None: return True rv = tag_re.search(text[:1000]) is not None @@ -204,56 +203,15 @@ def looks_like_xml(text): return rv -# Python narrow build compatibility - -def _surrogatepair(c): - # Given a unicode character code - # with length greater than 16 bits, - # return the two 16 bit surrogate pair. +def surrogatepair(c): + """Given a unicode character code with length greater than 16 bits, + return the two 16 bit surrogate pair. + """ # From example D28 of: # http://www.unicode.org/book/ch03.pdf return (0xd7c0 + (c >> 10), (0xdc00 + (c & 0x3ff))) -def unirange(a, b): - """Returns a regular expression string to match the given non-BMP range.""" - if b < a: - raise ValueError("Bad character range") - if a < 0x10000 or b < 0x10000: - raise ValueError("unirange is only defined for non-BMP ranges") - - if sys.maxunicode > 0xffff: - # wide build - return u'[%s-%s]' % (unichr(a), unichr(b)) - else: - # narrow build stores surrogates, and the 're' module handles them - # (incorrectly) as characters. Since there is still ordering among - # these characters, expand the range to one that it understands. Some - # background in http://bugs.python.org/issue3665 and - # http://bugs.python.org/issue12749 - # - # Additionally, the lower constants are using unichr rather than - # literals because jython [which uses the wide path] can't load this - # file if they are literals. - ah, al = _surrogatepair(a) - bh, bl = _surrogatepair(b) - if ah == bh: - return u'(?:%s[%s-%s])' % (unichr(ah), unichr(al), unichr(bl)) - else: - buf = [] - buf.append(u'%s[%s-%s]' % - (unichr(ah), unichr(al), - ah == bh and unichr(bl) or unichr(0xdfff))) - if ah - bh > 1: - buf.append(u'[%s-%s][%s-%s]' % - unichr(ah+1), unichr(bh-1), unichr(0xdc00), unichr(0xdfff)) - if ah != bh: - buf.append(u'%s[%s-%s]' % - (unichr(bh), unichr(0xdc00), unichr(bl))) - - return u'(?:' + u'|'.join(buf) + u')' - - def format_lines(var_name, seq, raw=False, indent_level=0): """Formats a sequence of strings for output.""" lines = [] @@ -289,7 +247,7 @@ def duplicates_removed(it, already_seen=()): return lst -class Future(object): +class Future: """Generic class to defer some work. Handled specially in RegexLexerMeta, to support regex string construction at @@ -345,44 +303,7 @@ def terminal_encoding(term): return locale.getpreferredencoding() -# Python 2/3 compatibility - -if sys.version_info < (3, 0): - unichr = unichr - xrange = xrange - string_types = (str, unicode) - text_type = unicode - u_prefix = 'u' - iteritems = dict.iteritems - itervalues = dict.itervalues - import StringIO - import cStringIO - # unfortunately, io.StringIO in Python 2 doesn't accept str at all - StringIO = StringIO.StringIO - BytesIO = cStringIO.StringIO -else: - unichr = chr - xrange = range - string_types = (str,) - text_type = str - u_prefix = '' - iteritems = dict.items - itervalues = dict.values - from io import StringIO, BytesIO, TextIOWrapper - - class UnclosingTextIOWrapper(TextIOWrapper): - # Don't close underlying buffer on destruction. - def close(self): - self.flush() - - -def add_metaclass(metaclass): - """Class decorator for creating a class with a metaclass.""" - def wrapper(cls): - orig_vars = cls.__dict__.copy() - orig_vars.pop('__dict__', None) - orig_vars.pop('__weakref__', None) - for slots_var in orig_vars.get('__slots__', ()): - orig_vars.pop(slots_var) - return metaclass(cls.__name__, cls.__bases__, orig_vars) - return wrapper +class UnclosingTextIOWrapper(TextIOWrapper): + # Don't close underlying buffer on destruction. + def close(self): + self.flush() diff --git a/src/typecode/pygments_lexers.py b/src/typecode/pygments_lexers.py index dabbfd4..28fe86b 100644 --- a/src/typecode/pygments_lexers.py +++ b/src/typecode/pygments_lexers.py @@ -5,8 +5,8 @@ Pygments lexers. - :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS. - :license: BSD, see pygments_lexers.py.ABOUT for details. + :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS. + :license: BSD, see LICENSE for details. """ import re @@ -18,11 +18,15 @@ from typecode.pygments_lexers_mapping import LEXERS from typecode._vendor.pygments.modeline import get_filetype_from_buffer from typecode._vendor.pygments.plugin import find_plugin_lexers -from typecode._vendor.pygments.util import ClassNotFound, itervalues, guess_decode, text_type +from typecode._vendor.pygments.util import ClassNotFound, guess_decode +COMPAT = { + 'Python3Lexer': 'PythonLexer', + 'Python3TracebackLexer': 'PythonTracebackLexer', +} __all__ = ['get_lexer_by_name', 'get_lexer_for_filename', 'find_lexer_class', - 'guess_lexer', 'load_lexer_from_file'] + list(LEXERS) + 'guess_lexer', 'load_lexer_from_file'] + list(LEXERS) + list(COMPAT) _lexer_cache = {} _pattern_cache = {} @@ -48,7 +52,7 @@ def get_all_lexers(): """Return a generator of tuples in the form ``(name, aliases, filenames, mimetypes)`` of all know lexers. """ - for item in itervalues(LEXERS): + for item in LEXERS.values(): yield item[1:] for lexer in find_plugin_lexers(): yield lexer.name, lexer.aliases, lexer.filenames, lexer.mimetypes @@ -62,7 +66,7 @@ def find_lexer_class(name): if name in _lexer_cache: return _lexer_cache[name] # lookup builtin lexers - for module_name, lname, aliases, _, _ in itervalues(LEXERS): + for module_name, lname, aliases, _, _ in LEXERS.values(): if name == lname: _load_lexers(module_name) return _lexer_cache[name] @@ -82,7 +86,7 @@ def find_lexer_class_by_name(_alias): if not _alias: raise ClassNotFound('no lexer for alias %r found' % _alias) # lookup builtin lexers - for module_name, name, aliases, _, _ in itervalues(LEXERS): + for module_name, name, aliases, _, _ in LEXERS.values(): if _alias.lower() in aliases: if name not in _lexer_cache: _load_lexers(module_name) @@ -103,7 +107,7 @@ def get_lexer_by_name(_alias, **options): raise ClassNotFound('no lexer for alias %r found' % _alias) # lookup builtin lexers - for module_name, name, aliases, _, _ in itervalues(LEXERS): + for module_name, name, aliases, _, _ in LEXERS.values(): if _alias.lower() in aliases: if name not in _lexer_cache: _load_lexers(module_name) @@ -143,8 +147,8 @@ def load_lexer_from_file(filename, lexername="CustomLexer", **options): # And finally instantiate it with the options return lexer_class(**options) except IOError as err: - raise ClassNotFound('cannot read %s' % filename) - except ClassNotFound as err: + raise ClassNotFound('cannot read %s: %s' % (filename, err)) + except ClassNotFound: raise except Exception as err: raise ClassNotFound('error when loading custom lexer: %s' % err) @@ -160,7 +164,7 @@ def find_lexer_class_for_filename(_fn, code=None): """ matches = [] fn = basename(_fn) - for modname, name, _, filenames, _ in itervalues(LEXERS): + for modname, name, _, filenames, _ in LEXERS.values(): for filename in filenames: if _fn_matches(fn, filename): if name not in _lexer_cache: @@ -171,7 +175,7 @@ def find_lexer_class_for_filename(_fn, code=None): if _fn_matches(fn, filename): matches.append((cls, filename)) - if sys.version_info > (3,) and isinstance(code, bytes): + if isinstance(code, bytes): # decode it, since all analyse_text functions expect unicode code = guess_decode(code) @@ -212,7 +216,7 @@ def get_lexer_for_mimetype(_mime, **options): Raises ClassNotFound if not found. """ - for modname, name, _, _, mimetypes in itervalues(LEXERS): + for modname, name, _, _, mimetypes in LEXERS.values(): if _mime in mimetypes: if name not in _lexer_cache: _load_lexers(modname) @@ -231,8 +235,7 @@ def _iter_lexerclasses(plugins=True): _load_lexers(module_name) yield _lexer_cache[name] if plugins: - for lexer in find_plugin_lexers(): - yield lexer + yield from find_plugin_lexers() def guess_lexer_for_filename(_fn, _text, **options): @@ -288,7 +291,7 @@ def type_sort(t): def guess_lexer(_text, **options): """Guess a lexer by strong distinctions in the text (eg, shebang).""" - if not isinstance(_text, text_type): + if not isinstance(_text, str): inencoding = options.get('inencoding', options.get('encoding')) if inencoding: _text = _text.decode(inencoding or 'utf8') @@ -326,6 +329,8 @@ def __getattr__(self, name): cls = _lexer_cache[info[1]] setattr(self, name, cls) return cls + if name in COMPAT: + return getattr(self, COMPAT[name]) raise AttributeError(name) diff --git a/src/typecode/pygments_lexers.py.ABOUT b/src/typecode/pygments_lexers.py.ABOUT index 332001d..382abd9 100644 --- a/src/typecode/pygments_lexers.py.ABOUT +++ b/src/typecode/pygments_lexers.py.ABOUT @@ -1,23 +1,25 @@ about_resource: pygments_lexers.py +copyright: Copyright (c) by the Pygments team +notes: this is a Pygments file copied from pygments/lexers/__init__.py + to focus on programming languages detection only. It has been modified to be + usable alone. +notice_file: pygments_lexers.py.NOTICE attribute: yes -checksum_md5: ea723daf498a9805b481619c19ab75f8 -checksum_sha1: b22c0e245d2f7874e09bf817352f25462874dd18 +checksum_md5: 665516d1d1c0099241ab6e4c057e26be +checksum_sha1: e0277b8dd2ebce5121a68bec62173b9e0b057742 contact: georg@python.org copyright: Copyright (c) by the Pygments team description: Pygments is a generic syntax highlighter suitable for use in code hosting, forums, wikis or other applications that need to prettify source code. -download_url: https://files.pythonhosted.org/packages/7e/ae/26808275fc76bf2832deb10d3a3ed3107bc4de01b85dcccbe525f2cd6d1e/Pygments-2.4.2.tar.gz +download_url: https://files.pythonhosted.org/packages/ba/6e/7a7c13c21d8a4a7f82ccbfe257a045890d4dbf18c023f985f565f97393e3/Pygments-2.9.0.tar.gz homepage_url: http://pygments.org/ license_expression: bsd-simplified AND (bsd-new AND apache-2.0 AND public-domain) name: pygments -notes: this is a Pygments file copied from pygments/lexers/__init__.py - to focus on programming languages detection only. It has been modified to be - usable alone. -notice_file: pygments_lexers.py.NOTICE -notice_url: https://github.com/pygments/pygments/blob/2.4.2/LICENSE owner: Pocoo Team owner_url: http://www.pocoo.org/ -package_url: pkg:pypi/pygments@2.4.2#pygments/lexers/__init__.py +package_url: pkg:pypi/pygments@2.9.0 +primary_language: Python track_changes: yes -vcs_url: https://github.com/pygments/pygments -version: 2.4.2 +type: pypi +version: 2.9.0 +vcs_url: git+https://github.com/pygments/pygments.git diff --git a/src/typecode/pygments_lexers_mapping.py b/src/typecode/pygments_lexers_mapping.py index c9d70a2..97d3cd8 100644 --- a/src/typecode/pygments_lexers_mapping.py +++ b/src/typecode/pygments_lexers_mapping.py @@ -3,7 +3,7 @@ This file is a list of the subset of lexers we care for as actual programming languages from the larger whole generated list at pygments.lexers._mapping - This is based on Pygments 2.4.2 pygments.lexers._mapping and the modification is + This is based on Pygments 2.9.0 pygments.lexers._mapping and the modification is to comment out certain/most lexers. pygments.lexers._mapping @@ -16,17 +16,15 @@ Do not alter the LEXERS dictionary by hand. :copyright: Copyright 2006-2014, 2016 by the Pygments team, see AUTHORS. - :license: BSD, see prog_lexers.py.ABOUT for details. + :license: BSD, see LICENSE for details. """ -from __future__ import print_function - LEXERS = { 'ABAPLexer': ('typecode._vendor.pygments.lexers.business', 'ABAP', ('abap',), ('*.abap', '*.ABAP'), ('text/x-abap',)), - # 'APLLexer': ('typecode._vendor.pygments.lexers.apl', 'APL', ('apl',), ('*.apl',), ()), + #'APLLexer': ('typecode._vendor.pygments.lexers.apl', 'APL', ('apl',), ('*.apl',), ()), #'AbnfLexer': ('typecode._vendor.pygments.lexers.grammar_notation', 'ABNF', ('abnf',), ('*.abnf',), ('text/x-abnf',)), - 'ActionScript3Lexer': ('typecode._vendor.pygments.lexers.actionscript', 'ActionScript 3', ('as3', 'actionscript3'), ('*.as',), ('application/x-actionscript3', 'text/x-actionscript3', 'text/actionscript3')), - 'ActionScriptLexer': ('typecode._vendor.pygments.lexers.actionscript', 'ActionScript', ('as', 'actionscript'), ('*.as',), ('application/x-actionscript', 'text/x-actionscript', 'text/actionscript')), + #'ActionScript3Lexer': ('typecode._vendor.pygments.lexers.actionscript', 'ActionScript 3', ('as3', 'actionscript3'), ('*.as',), ('application/x-actionscript3', 'text/x-actionscript3', 'text/actionscript3')), + #'ActionScriptLexer': ('typecode._vendor.pygments.lexers.actionscript', 'ActionScript', ('as', 'actionscript'), ('*.as',), ('application/x-actionscript', 'text/x-actionscript', 'text/actionscript')), #'AdaLexer': ('typecode._vendor.pygments.lexers.pascal', 'Ada', ('ada', 'ada95', 'ada2005'), ('*.adb', '*.ads', '*.ada'), ('text/x-ada',)), #'AdlLexer': ('typecode._vendor.pygments.lexers.archetype', 'ADL', ('adl',), ('*.adl', '*.adls', '*.adlf', '*.adlx'), ()), #'AgdaLexer': ('typecode._vendor.pygments.lexers.haskell', 'Agda', ('agda',), ('*.agda',), ('text/x-agda',)), @@ -34,8 +32,8 @@ #'AlloyLexer': ('typecode._vendor.pygments.lexers.dsls', 'Alloy', ('alloy',), ('*.als',), ('text/x-alloy',)), #'AmbientTalkLexer': ('typecode._vendor.pygments.lexers.ambient', 'AmbientTalk', ('at', 'ambienttalk', 'ambienttalk/2'), ('*.at',), ('text/x-ambienttalk',)), #'AmplLexer': ('typecode._vendor.pygments.lexers.ampl', 'Ampl', ('ampl',), ('*.run',), ()), - 'Angular2HtmlLexer': ('typecode._vendor.pygments.lexers.templates', 'HTML + Angular2', ('html+ng2',), ('*.ng2',), ()), - 'Angular2Lexer': ('typecode._vendor.pygments.lexers.templates', 'Angular2', ('ng2',), (), ()), + #'Angular2HtmlLexer': ('typecode._vendor.pygments.lexers.templates', 'HTML + Angular2', ('html+ng2',), ('*.ng2',), ()), + #'Angular2Lexer': ('typecode._vendor.pygments.lexers.templates', 'Angular2', ('ng2',), (), ()), #'AntlrActionScriptLexer': ('typecode._vendor.pygments.lexers.parsers', 'ANTLR With ActionScript Target', ('antlr-as', 'antlr-actionscript'), ('*.G', '*.g'), ()), #'AntlrCSharpLexer': ('typecode._vendor.pygments.lexers.parsers', 'ANTLR With C# Target', ('antlr-csharp', 'antlr-c#'), ('*.G', '*.g'), ()), #'AntlrCppLexer': ('typecode._vendor.pygments.lexers.parsers', 'ANTLR With CPP Target', ('antlr-cpp',), ('*.G', '*.g'), ()), @@ -46,8 +44,10 @@ #'AntlrPythonLexer': ('typecode._vendor.pygments.lexers.parsers', 'ANTLR With Python Target', ('antlr-python',), ('*.G', '*.g'), ()), #'AntlrRubyLexer': ('typecode._vendor.pygments.lexers.parsers', 'ANTLR With Ruby Target', ('antlr-ruby', 'antlr-rb'), ('*.G', '*.g'), ()), #'ApacheConfLexer': ('typecode._vendor.pygments.lexers.configs', 'ApacheConf', ('apacheconf', 'aconf', 'apache'), ('.htaccess', 'apache.conf', 'apache2.conf'), ('text/x-apacheconf',)), - 'AppleScriptLexer': ('typecode._vendor.pygments.lexers.scripting', 'AppleScript', ('applescript',), ('*.applescript',), ()), + #'AppleScriptLexer': ('typecode._vendor.pygments.lexers.scripting', 'AppleScript', ('applescript',), ('*.applescript',), ()), #'ArduinoLexer': ('typecode._vendor.pygments.lexers.c_like', 'Arduino', ('arduino',), ('*.ino',), ('text/x-arduino',)), + #'ArrowLexer': ('typecode._vendor.pygments.arrow', 'Arrow', ('arrow',), ('*.arw',), ()), + 'AspectJLexer': ('typecode._vendor.pygments.lexers.jvm', 'AspectJ', ('aspectj',), ('*.aj',), ('text/x-aspectj',)), #'AsymptoteLexer': ('typecode._vendor.pygments.lexers.graphics', 'Asymptote', ('asy', 'asymptote'), ('*.asy',), ('text/x-asymptote',)), #'AugeasLexer': ('typecode._vendor.pygments.lexers.configs', 'Augeas', ('augeas',), ('*.aug',), ()), @@ -58,6 +58,7 @@ #'BBCodeLexer': ('typecode._vendor.pygments.lexers.markup', 'BBCode', ('bbcode',), (), ('text/x-bbcode',)), #'BCLexer': ('typecode._vendor.pygments.lexers.algebra', 'BC', ('bc',), ('*.bc',), ()), #'BSTLexer': ('typecode._vendor.pygments.lexers.bibtex', 'BST', ('bst', 'bst-pybtex'), ('*.bst',), ()), + #'BareLexer': ('typecode._vendor.pygments.lexers.bare', 'BARE', ('bare',), ('*.bare',), ()), #'BaseMakefileLexer': ('typecode._vendor.pygments.lexers.make', 'Base Makefile', ('basemake',), (), ()), 'BashLexer': ('typecode._vendor.pygments.lexers.shell', 'Bash', ('bash', 'sh', 'ksh', 'zsh', 'shell'), ('*.sh', '*.ksh', '*.bash', '*.ebuild', '*.eclass', '*.exheres-0', '*.exlib', '*.zsh', '.bashrc', 'bashrc', '.bash_*', 'bash_*', 'zshrc', '.zshrc', 'PKGBUILD'), ('application/x-sh', 'application/x-shellscript', 'text/x-shellscript')), #'BashSessionLexer': ('typecode._vendor.pygments.lexers.shell', 'Bash Session', ('console', 'shell-session'), ('*.sh-session', '*.shell-session'), ('application/x-shell-session', 'application/x-sh-session')), @@ -71,7 +72,6 @@ #'BooLexer': ('typecode._vendor.pygments.lexers.dotnet', 'Boo', ('boo',), ('*.boo',), ('text/x-boo',)), #'BoogieLexer': ('typecode._vendor.pygments.lexers.verification', 'Boogie', ('boogie',), ('*.bpl',), ()), #'BrainfuckLexer': ('typecode._vendor.pygments.lexers.esoteric', 'Brainfuck', ('brainfuck', 'bf'), ('*.bf', '*.b'), ('application/x-brainfuck',)), - #'BroLexer': ('typecode._vendor.pygments.lexers.dsls', 'Bro', ('bro',), ('*.bro',), ()), #'BugsLexer': ('typecode._vendor.pygments.lexers.modeling', 'BUGS', ('bugs', 'winbugs', 'openbugs'), ('*.bug',), ()), #'CAmkESLexer': ('typecode._vendor.pygments.lexers.esoteric', 'CAmkES', ('camkes', 'idl4'), ('*.camkes', '*.idl4'), ()), 'CLexer': ('typecode._vendor.pygments.lexers.c_cpp', 'C', ('c',), ('*.c', '*.h', '*.idc'), ('text/x-chdr', 'text/x-csrc')), @@ -99,7 +99,7 @@ #'CleanLexer': ('typecode._vendor.pygments.lexers.clean', 'Clean', ('clean',), ('*.icl', '*.dcl'), ()), 'ClojureLexer': ('typecode._vendor.pygments.lexers.jvm', 'Clojure', ('clojure', 'clj'), ('*.clj',), ('text/x-clojure', 'application/x-clojure')), 'ClojureScriptLexer': ('typecode._vendor.pygments.lexers.jvm', 'ClojureScript', ('clojurescript', 'cljs'), ('*.cljs',), ('text/x-clojurescript', 'application/x-clojurescript')), - 'CobolFreeformatLexer': ('typecode._vendor.pygments.lexers.business', 'COBOLFree', ('cobolfree',), ('*.cbl', '*.CBL'), ()), + #'CobolFreeformatLexer': ('typecode._vendor.pygments.lexers.business', 'COBOLFree', ('cobolfree',), ('*.cbl', '*.CBL'), ()), 'CobolLexer': ('typecode._vendor.pygments.lexers.business', 'COBOL', ('cobol',), ('*.cob', '*.COB', '*.cpy', '*.CPY'), ('text/x-cobol',)), 'CoffeeScriptLexer': ('typecode._vendor.pygments.lexers.javascript', 'CoffeeScript', ('coffee-script', 'coffeescript', 'coffee'), ('*.coffee',), ('text/coffeescript',)), #'ColdfusionCFCLexer': ('typecode._vendor.pygments.lexers.templates', 'Coldfusion CFC', ('cfc',), ('*.cfc',), ()), @@ -133,6 +133,7 @@ 'Dasm16Lexer': ('typecode._vendor.pygments.lexers.asm', 'DASM16', ('dasm16',), ('*.dasm16', '*.dasm'), ('text/x-dasm16',)), #'DebianControlLexer': ('typecode._vendor.pygments.lexers.installers', 'Debian Control file', ('control', 'debcontrol'), ('control',), ()), 'DelphiLexer': ('typecode._vendor.pygments.lexers.pascal', 'Delphi', ('delphi', 'pas', 'pascal', 'objectpascal'), ('*.pas', '*.dpr'), ('text/x-pascal',)), + #'DevicetreeLexer': ('typecode._vendor.pygments.lexers.devicetree', 'Devicetree', ('devicetree', 'dts'), ('*.dts', '*.dtsi'), ('text/x-c',)), #'DgLexer': ('typecode._vendor.pygments.lexers.python', 'dg', ('dg',), ('*.dg',), ('text/x-dg',)), #'DiffLexer': ('typecode._vendor.pygments.lexers.diff', 'Diff', ('diff', 'udiff'), ('*.diff', '*.patch'), ('text/x-diff', 'text/x-patch')), #'DjangoLexer': ('typecode._vendor.pygments.lexers.templates', 'Django/Jinja', ('django', 'jinja'), (), ('application/x-django-templating', 'application/x-jinja')), @@ -149,17 +150,20 @@ #'EbnfLexer': ('typecode._vendor.pygments.lexers.parsers', 'EBNF', ('ebnf',), ('*.ebnf',), ('text/x-ebnf',)), #'EiffelLexer': ('typecode._vendor.pygments.lexers.eiffel', 'Eiffel', ('eiffel',), ('*.e',), ('text/x-eiffel',)), #'ElixirConsoleLexer': ('typecode._vendor.pygments.lexers.erlang', 'Elixir iex session', ('iex',), (), ('text/x-elixir-shellsession',)), - 'ElixirLexer': ('typecode._vendor.pygments.lexers.erlang', 'Elixir', ('elixir', 'ex', 'exs'), ('*.ex', '*.exs'), ('text/x-elixir',)), + 'ElixirLexer': ('typecode._vendor.pygments.lexers.erlang', 'Elixir', ('elixir', 'ex', 'exs'), ('*.ex', '*.eex', '*.exs'), ('text/x-elixir',)), 'ElmLexer': ('typecode._vendor.pygments.lexers.elm', 'Elm', ('elm',), ('*.elm',), ('text/x-elm',)), #'EmacsLispLexer': ('typecode._vendor.pygments.lexers.lisp', 'EmacsLisp', ('emacs', 'elisp', 'emacs-lisp'), ('*.el',), ('text/x-elisp', 'application/x-elisp')), + #'EmailLexer': ('typecode._vendor.pygments.lexers.email', 'E-mail', ('email', 'eml'), ('*.eml',), ('message/rfc822',)), 'ErbLexer': ('typecode._vendor.pygments.lexers.templates', 'ERB', ('erb',), (), ('application/x-ruby-templating',)), 'ErlangLexer': ('typecode._vendor.pygments.lexers.erlang', 'Erlang', ('erlang',), ('*.erl', '*.hrl', '*.es', '*.escript'), ('text/x-erlang',)), #'ErlangShellLexer': ('typecode._vendor.pygments.lexers.erlang', 'Erlang erl session', ('erl',), ('*.erl-sh',), ('text/x-erl-shellsession',)), #'EvoqueHtmlLexer': ('typecode._vendor.pygments.lexers.templates', 'HTML+Evoque', ('html+evoque',), ('*.html',), ('text/html+evoque',)), #'EvoqueLexer': ('typecode._vendor.pygments.lexers.templates', 'Evoque', ('evoque',), ('*.evoque',), ('application/x-evoque',)), #'EvoqueXmlLexer': ('typecode._vendor.pygments.lexers.templates', 'XML+Evoque', ('xml+evoque',), ('*.xml',), ('application/xml+evoque',)), + #'ExeclineLexer': ('typecode._vendor.pygments.lexers.shell', 'execline', ('execline',), ('*.exec',), ()), #'EzhilLexer': ('typecode._vendor.pygments.lexers.ezhil', 'Ezhil', ('ezhil',), ('*.n',), ('text/x-ezhil',)), 'FSharpLexer': ('typecode._vendor.pygments.lexers.dotnet', 'F#', ('fsharp', 'f#'), ('*.fs', '*.fsi'), ('text/x-fsharp',)), + #'FStarLexer': ('typecode._vendor.pygments.lexers.ml', 'FStar', ('fstar',), ('*.fst', '*.fsti'), ('text/x-fstar',)), #'FactorLexer': ('typecode._vendor.pygments.lexers.factor', 'Factor', ('factor',), ('*.factor',), ('text/x-factor',)), #'FancyLexer': ('typecode._vendor.pygments.lexers.ruby', 'Fancy', ('fancy', 'fy'), ('*.fy', '*.fancypack'), ('text/x-fancysrc',)), #'FantomLexer': ('typecode._vendor.pygments.lexers.fantom', 'Fantom', ('fan',), ('*.fan',), ('application/x-fantom',)), @@ -174,6 +178,7 @@ #'FoxProLexer': ('typecode._vendor.pygments.lexers.foxpro', 'FoxPro', ('foxpro', 'vfp', 'clipper', 'xbase'), ('*.PRG', '*.prg'), ()), #'FreeFemLexer': ('typecode._vendor.pygments.lexers.freefem', 'Freefem', ('freefem',), ('*.edp',), ('text/x-freefem',)), #'GAPLexer': ('typecode._vendor.pygments.lexers.algebra', 'GAP', ('gap',), ('*.g', '*.gd', '*.gi', '*.gap'), ()), + #'GDScriptLexer': ('typecode._vendor.pygments.lexers.gdscript', 'GDScript', ('gdscript', 'gd'), ('*.gd',), ('text/x-gdscript', 'application/x-gdscript')), #'GLShaderLexer': ('typecode._vendor.pygments.lexers.graphics', 'GLSL', ('glsl',), ('*.vert', '*.frag', '*.geo'), ('text/x-glslsrc',)), 'GasLexer': ('typecode._vendor.pygments.lexers.asm', 'GAS', ('gas', 'asm'), ('*.s', '*.S'), ('text/x-gas',)), #'GenshiLexer': ('typecode._vendor.pygments.lexers.templates', 'Genshi', ('genshi', 'kid', 'xml+genshi', 'xml+kid'), ('*.kid',), ('application/x-genshi', 'application/x-kid')), @@ -225,22 +230,23 @@ #'JavascriptDjangoLexer': ('typecode._vendor.pygments.lexers.templates', 'JavaScript+Django/Jinja', ('js+django', 'javascript+django', 'js+jinja', 'javascript+jinja'), (), ('application/x-javascript+django', 'application/x-javascript+jinja', 'text/x-javascript+django', 'text/x-javascript+jinja', 'text/javascript+django', 'text/javascript+jinja')), #'JavascriptErbLexer': ('typecode._vendor.pygments.lexers.templates', 'JavaScript+Ruby', ('js+erb', 'javascript+erb', 'js+ruby', 'javascript+ruby'), (), ('application/x-javascript+ruby', 'text/x-javascript+ruby', 'text/javascript+ruby')), #'JavascriptGenshiLexer': ('typecode._vendor.pygments.lexers.templates', 'JavaScript+Genshi Text', ('js+genshitext', 'js+genshi', 'javascript+genshitext', 'javascript+genshi'), (), ('application/x-javascript+genshi', 'text/x-javascript+genshi', 'text/javascript+genshi')), - 'JavascriptLexer': ('typecode._vendor.pygments.lexers.javascript', 'JavaScript', ('js', 'javascript'), ('*.js', '*.jsm'), ('application/javascript', 'application/x-javascript', 'text/x-javascript', 'text/javascript')), + 'JavascriptLexer': ('typecode._vendor.pygments.lexers.javascript', 'JavaScript', ('js', 'javascript'), ('*.js', '*.jsm', '*.mjs'), ('application/javascript', 'application/x-javascript', 'text/x-javascript', 'text/javascript')), #'JavascriptPhpLexer': ('typecode._vendor.pygments.lexers.templates', 'JavaScript+PHP', ('js+php', 'javascript+php'), (), ('application/x-javascript+php', 'text/x-javascript+php', 'text/javascript+php')), #'JavascriptSmartyLexer': ('typecode._vendor.pygments.lexers.templates', 'JavaScript+Smarty', ('js+smarty', 'javascript+smarty'), (), ('application/x-javascript+smarty', 'text/x-javascript+smarty', 'text/javascript+smarty')), #'JclLexer': ('typecode._vendor.pygments.lexers.scripting', 'JCL', ('jcl',), ('*.jcl',), ('text/x-jcl',)), #'JsgfLexer': ('typecode._vendor.pygments.lexers.grammar_notation', 'JSGF', ('jsgf',), ('*.jsgf',), ('application/jsgf', 'application/x-jsgf', 'text/jsgf')), - #'JsonBareObjectLexer': ('typecode._vendor.pygments.lexers.data', 'JSONBareObject', ('json-object',), (), ('application/json-object',)), + #'JsonBareObjectLexer': ('typecode._vendor.pygments.lexers.data', 'JSONBareObject', (), (), ()), #'JsonLdLexer': ('typecode._vendor.pygments.lexers.data', 'JSON-LD', ('jsonld', 'json-ld'), ('*.jsonld',), ('application/ld+json',)), - 'JsonLexer': ('typecode._vendor.pygments.lexers.data', 'JSON', ('json',), ('*.json',), ('application/json',)), - #'JspLexer': ('typecode._vendor.pygments.lexers.templates', 'Java Server Page', ('jsp',), ('*.jsp',), ('application/x-jsp',)), + #'JsonLexer': ('typecode._vendor.pygments.lexers.data', 'JSON', ('json', 'json-object'), ('*.json', 'Pipfile.lock'), ('application/json', 'application/json-object')), + 'JspLexer': ('typecode._vendor.pygments.lexers.templates', 'Java Server Page', ('jsp',), ('*.jsp',), ('application/x-jsp',)), #'JuliaConsoleLexer': ('typecode._vendor.pygments.lexers.julia', 'Julia console', ('jlcon',), (), ()), #'JuliaLexer': ('typecode._vendor.pygments.lexers.julia', 'Julia', ('julia', 'jl'), ('*.jl',), ('text/x-julia', 'application/x-julia')), - #'JuttleLexer': ('typecode._vendor.pygments.lexers.javascript', 'Juttle', ('juttle', 'juttle'), ('*.juttle',), ('application/juttle', 'application/x-juttle', 'text/x-juttle', 'text/juttle')), + #'JuttleLexer': ('typecode._vendor.pygments.lexers.javascript', 'Juttle', ('juttle',), ('*.juttle',), ('application/juttle', 'application/x-juttle', 'text/x-juttle', 'text/juttle')), #'KalLexer': ('typecode._vendor.pygments.lexers.javascript', 'Kal', ('kal',), ('*.kal',), ('text/kal', 'application/kal')), - #'KconfigLexer': ('typecode._vendor.pygments.lexers.configs', 'Kconfig', ('kconfig', 'menuconfig', 'linux-config', 'kernel-config'), ('Kconfig', '*Config.in*', 'external.in*', 'standard-modules.in'), ('text/x-kconfig',)), + #'KconfigLexer': ('typecode._vendor.pygments.lexers.configs', 'Kconfig', ('kconfig', 'menuconfig', 'linux-config', 'kernel-config'), ('Kconfig*', '*Config.in*', 'external.in*', 'standard-modules.in'), ('text/x-kconfig',)), + #'KernelLogLexer': ('typecode._vendor.pygments.lexers.textfmts', 'Kernel log', ('kmsg', 'dmesg'), ('*.kmsg', '*.dmesg'), ()), #'KokaLexer': ('typecode._vendor.pygments.lexers.haskell', 'Koka', ('koka',), ('*.kk', '*.kki'), ('text/x-koka',)), - 'KotlinLexer': ('typecode._vendor.pygments.lexers.jvm', 'Kotlin', ('kotlin',), ('*.kt',), ('text/x-kotlin',)), + 'KotlinLexer': ('typecode._vendor.pygments.lexers.jvm', 'Kotlin', ('kotlin',), ('*.kt', '*.kts'), ('text/x-kotlin',)), #'LSLLexer': ('typecode._vendor.pygments.lexers.scripting', 'LSL', ('lsl',), ('*.lsl',), ('text/x-lsl',)), #'LassoCssLexer': ('typecode._vendor.pygments.lexers.templates', 'CSS+Lasso', ('css+lasso',), (), ('text/css+lasso',)), #'LassoHtmlLexer': ('typecode._vendor.pygments.lexers.templates', 'HTML+Lasso', ('html+lasso',), (), ('text/html+lasso', 'application/x-httpd-lasso', 'application/x-httpd-lasso[89]')), @@ -248,7 +254,7 @@ #'LassoLexer': ('typecode._vendor.pygments.lexers.javascript', 'Lasso', ('lasso', 'lassoscript'), ('*.lasso', '*.lasso[89]'), ('text/x-lasso',)), #'LassoXmlLexer': ('typecode._vendor.pygments.lexers.templates', 'XML+Lasso', ('xml+lasso',), (), ('application/xml+lasso',)), #'LeanLexer': ('typecode._vendor.pygments.lexers.theorem', 'Lean', ('lean',), ('*.lean',), ('text/x-lean',)), - 'LessCssLexer': ('typecode._vendor.pygments.lexers.css', 'LessCss', ('less',), ('*.less',), ('text/x-less-css',)), + #'LessCssLexer': ('typecode._vendor.pygments.lexers.css', 'LessCss', ('less',), ('*.less',), ('text/x-less-css',)), #'LighttpdConfLexer': ('typecode._vendor.pygments.lexers.configs', 'Lighttpd configuration file', ('lighty', 'lighttpd'), (), ('text/x-lighttpd-conf',)), #'LimboLexer': ('typecode._vendor.pygments.lexers.inferno', 'Limbo', ('limbo',), ('*.b',), ('text/limbo',)), #'LiquidLexer': ('typecode._vendor.pygments.lexers.templates', 'liquid', ('liquid',), ('*.liquid',), ()), @@ -258,9 +264,12 @@ #'LiterateIdrisLexer': ('typecode._vendor.pygments.lexers.haskell', 'Literate Idris', ('lidr', 'literate-idris', 'lidris'), ('*.lidr',), ('text/x-literate-idris',)), #'LiveScriptLexer': ('typecode._vendor.pygments.lexers.javascript', 'LiveScript', ('live-script', 'livescript'), ('*.ls',), ('text/livescript',)), #'LlvmLexer': ('typecode._vendor.pygments.lexers.asm', 'LLVM', ('llvm',), ('*.ll',), ('text/x-llvm',)), + #'LlvmMirBodyLexer': ('typecode._vendor.pygments.lexers.asm', 'LLVM-MIR Body', ('llvm-mir-body',), (), ()), + #'LlvmMirLexer': ('typecode._vendor.pygments.lexers.asm', 'LLVM-MIR', ('llvm-mir',), ('*.mir',), ()), #'LogosLexer': ('typecode._vendor.pygments.lexers.objective', 'Logos', ('logos',), ('*.x', '*.xi', '*.xm', '*.xmi'), ('text/x-logos',)), #'LogtalkLexer': ('typecode._vendor.pygments.lexers.prolog', 'Logtalk', ('logtalk',), ('*.lgt', '*.logtalk'), ('text/x-logtalk',)), 'LuaLexer': ('typecode._vendor.pygments.lexers.scripting', 'Lua', ('lua',), ('*.lua', '*.wlua'), ('text/x-lua', 'application/x-lua')), + #'MIMELexer': ('typecode._vendor.pygments.lexers.mime', 'MIME', ('mime',), (), ('multipart/mixed', 'multipart/related', 'multipart/alternative')), #'MOOCodeLexer': ('typecode._vendor.pygments.lexers.scripting', 'MOOCode', ('moocode', 'moo'), ('*.moo',), ('text/x-moocode',)), #'MSDOSSessionLexer': ('typecode._vendor.pygments.lexers.shell', 'MSDOS Session', ('doscon',), (), ()), #'MakefileLexer': ('typecode._vendor.pygments.lexers.make', 'Makefile', ('make', 'makefile', 'mf', 'bsdmake'), ('*.mak', '*.mk', 'Makefile', 'makefile', 'Makefile.*', 'GNUmakefile'), ('text/x-makefile',)), @@ -270,19 +279,21 @@ #'MakoLexer': ('typecode._vendor.pygments.lexers.templates', 'Mako', ('mako',), ('*.mao',), ('application/x-mako',)), #'MakoXmlLexer': ('typecode._vendor.pygments.lexers.templates', 'XML+Mako', ('xml+mako',), (), ('application/xml+mako',)), #'MaqlLexer': ('typecode._vendor.pygments.lexers.business', 'MAQL', ('maql',), ('*.maql',), ('text/x-gooddata-maql', 'application/x-gooddata-maql')), - #'MarkdownLexer': ('typecode._vendor.pygments.lexers.markup', 'markdown', ('md',), ('*.md',), ('text/x-markdown',)), + #'MarkdownLexer': ('typecode._vendor.pygments.lexers.markup', 'markdown', ('md',), ('*.md', '*.markdown'), ('text/x-markdown',)), #'MaskLexer': ('typecode._vendor.pygments.lexers.javascript', 'Mask', ('mask',), ('*.mask',), ('text/x-mask',)), #'MasonLexer': ('typecode._vendor.pygments.lexers.templates', 'Mason', ('mason',), ('*.m', '*.mhtml', '*.mc', '*.mi', 'autohandler', 'dhandler'), ('application/x-mason',)), #'MathematicaLexer': ('typecode._vendor.pygments.lexers.algebra', 'Mathematica', ('mathematica', 'mma', 'nb'), ('*.nb', '*.cdf', '*.nbp', '*.ma'), ('application/mathematica', 'application/vnd.wolfram.mathematica', 'application/vnd.wolfram.mathematica.package', 'application/vnd.wolfram.cdf')), #'MatlabLexer': ('typecode._vendor.pygments.lexers.matlab', 'Matlab', ('matlab',), ('*.m',), ('text/matlab',)), #'MatlabSessionLexer': ('typecode._vendor.pygments.lexers.matlab', 'Matlab session', ('matlabsession',), (), ()), #'MiniDLexer': ('typecode._vendor.pygments.lexers.d', 'MiniD', ('minid',), (), ('text/x-minidsrc',)), + #'MiniScriptLexer': ('typecode._vendor.pygments.lexers.scripting', 'MiniScript', ('ms', 'miniscript'), ('*.ms',), ('text/x-minicript', 'application/x-miniscript')), #'ModelicaLexer': ('typecode._vendor.pygments.lexers.modeling', 'Modelica', ('modelica',), ('*.mo',), ('text/x-modelica',)), #'Modula2Lexer': ('typecode._vendor.pygments.lexers.modula2', 'Modula-2', ('modula2', 'm2'), ('*.def', '*.mod'), ('text/x-modula2',)), #'MoinWikiLexer': ('typecode._vendor.pygments.lexers.markup', 'MoinMoin/Trac Wiki markup', ('trac-wiki', 'moin'), (), ('text/x-trac-wiki',)), #'MonkeyLexer': ('typecode._vendor.pygments.lexers.basic', 'Monkey', ('monkey',), ('*.monkey',), ('text/x-monkey',)), #'MonteLexer': ('typecode._vendor.pygments.lexers.monte', 'Monte', ('monte',), ('*.mt',), ()), #'MoonScriptLexer': ('typecode._vendor.pygments.lexers.scripting', 'MoonScript', ('moon', 'moonscript'), ('*.moon',), ('text/x-moonscript', 'application/x-moonscript')), + #'MoselLexer': ('typecode._vendor.pygments.lexers.mosel', 'Mosel', ('mosel',), ('*.mos',), ()), #'MozPreprocCssLexer': ('typecode._vendor.pygments.lexers.markup', 'CSS+mozpreproc', ('css+mozpreproc',), ('*.css.in',), ()), #'MozPreprocHashLexer': ('typecode._vendor.pygments.lexers.markup', 'mozhashpreproc', ('mozhashpreproc',), (), ()), #'MozPreprocJavascriptLexer': ('typecode._vendor.pygments.lexers.markup', 'Javascript+mozpreproc', ('javascript+mozpreproc',), ('*.js.in',), ()), @@ -310,6 +321,7 @@ #'NimrodLexer': ('typecode._vendor.pygments.lexers.nimrod', 'Nimrod', ('nim', 'nimrod'), ('*.nim', '*.nimrod'), ('text/x-nim',)), #'NitLexer': ('typecode._vendor.pygments.lexers.nit', 'Nit', ('nit',), ('*.nit',), ()), #'NixLexer': ('typecode._vendor.pygments.lexers.nix', 'Nix', ('nixos', 'nix'), ('*.nix',), ('text/x-nix',)), + #'NotmuchLexer': ('typecode._vendor.pygments.lexers.textfmts', 'Notmuch', ('notmuch',), (), ()), #'NuSMVLexer': ('typecode._vendor.pygments.lexers.smv', 'NuSMV', ('nusmv',), ('*.smv',), ()), #'NumPyLexer': ('typecode._vendor.pygments.lexers.python', 'NumPy', ('numpy',), (), ()), #'ObjdumpLexer': ('typecode._vendor.pygments.lexers.asm', 'objdump', ('objdump',), ('*.objdump',), ('text/x-objdump',)), @@ -326,13 +338,15 @@ #'PanLexer': ('typecode._vendor.pygments.lexers.dsls', 'Pan', ('pan',), ('*.pan',), ()), #'ParaSailLexer': ('typecode._vendor.pygments.lexers.parasail', 'ParaSail', ('parasail',), ('*.psi', '*.psl'), ('text/x-parasail',)), #'PawnLexer': ('typecode._vendor.pygments.lexers.pawn', 'Pawn', ('pawn',), ('*.p', '*.pwn', '*.inc'), ('text/x-pawn',)), - 'Perl6Lexer': ('typecode._vendor.pygments.lexers.perl', 'Perl6', ('perl6', 'pl6'), ('*.pl', '*.pm', '*.nqp', '*.p6', '*.6pl', '*.p6l', '*.pl6', '*.6pm', '*.p6m', '*.pm6', '*.t'), ('text/x-perl6', 'application/x-perl6')), - 'PerlLexer': ('typecode._vendor.pygments.lexers.perl', 'Perl', ('perl', 'pl'), ('*.pl', '*.pm', '*.t'), ('text/x-perl', 'application/x-perl')), + #'PegLexer': ('typecode._vendor.pygments.lexers.grammar_notation', 'PEG', ('peg',), ('*.peg',), ('text/x-peg',)), + #'Perl6Lexer': ('typecode._vendor.pygments.lexers.perl', 'Perl6', ('perl6', 'pl6', 'raku'), ('*.pl', '*.pm', '*.nqp', '*.p6', '*.6pl', '*.p6l', '*.pl6', '*.6pm', '*.p6m', '*.pm6', '*.t', '*.raku', '*.rakumod', '*.rakutest', '*.rakudoc'), ('text/x-perl6', 'application/x-perl6')), + #'PerlLexer': ('typecode._vendor.pygments.lexers.perl', 'Perl', ('perl', 'pl'), ('*.pl', '*.pm', '*.t', '*.perl'), ('text/x-perl', 'application/x-perl')), 'PhpLexer': ('typecode._vendor.pygments.lexers.php', 'PHP', ('php', 'php3', 'php4', 'php5'), ('*.php', '*.php[345]', '*.inc'), ('text/x-php',)), #'PigLexer': ('typecode._vendor.pygments.lexers.jvm', 'Pig', ('pig',), ('*.pig',), ('text/x-pig',)), #'PikeLexer': ('typecode._vendor.pygments.lexers.c_like', 'Pike', ('pike',), ('*.pike', '*.pmod'), ('text/x-pike',)), #'PkgConfigLexer': ('typecode._vendor.pygments.lexers.configs', 'PkgConfig', ('pkgconfig',), ('*.pc',), ()), #'PlPgsqlLexer': ('typecode._vendor.pygments.lexers.sql', 'PL/pgSQL', ('plpgsql',), (), ('text/x-plpgsql',)), + #'PointlessLexer': ('typecode._vendor.pygments.lexers.pointless', 'Pointless', ('pointless',), ('*.ptls',), ()), #'PonyLexer': ('typecode._vendor.pygments.lexers.pony', 'Pony', ('pony',), ('*.pony',), ()), #'PostScriptLexer': ('typecode._vendor.pygments.lexers.graphics', 'PostScript', ('postscript', 'postscr'), ('*.ps', '*.eps'), ('application/postscript',)), #'PostgresConsoleLexer': ('typecode._vendor.pygments.lexers.sql', 'PostgreSQL console (psql)', ('psql', 'postgresql-console', 'postgres-console'), (), ('text/x-postgresql-psql',)), @@ -342,16 +356,18 @@ #'PowerShellSessionLexer': ('typecode._vendor.pygments.lexers.shell', 'PowerShell Session', ('ps1con',), (), ()), #'PraatLexer': ('typecode._vendor.pygments.lexers.praat', 'Praat', ('praat',), ('*.praat', '*.proc', '*.psc'), ()), #'PrologLexer': ('typecode._vendor.pygments.lexers.prolog', 'Prolog', ('prolog',), ('*.ecl', '*.prolog', '*.pro', '*.pl'), ('text/x-prolog',)), + #'PromQLLexer': ('typecode._vendor.pygments.lexers.promql', 'PromQL', ('promql',), ('*.promql',), ()), #'PropertiesLexer': ('typecode._vendor.pygments.lexers.configs', 'Properties', ('properties', 'jproperties'), ('*.properties',), ('text/x-java-properties',)), #'ProtoBufLexer': ('typecode._vendor.pygments.lexers.dsls', 'Protocol Buffer', ('protobuf', 'proto'), ('*.proto',), ()), + #'PsyshConsoleLexer': ('typecode._vendor.pygments.lexers.php', 'PsySH console session for PHP', ('psysh',), (), ()), #'PugLexer': ('typecode._vendor.pygments.lexers.html', 'Pug', ('pug', 'jade'), ('*.pug', '*.jade'), ('text/x-pug', 'text/x-jade')), #'PuppetLexer': ('typecode._vendor.pygments.lexers.dsls', 'Puppet', ('puppet',), ('*.pp',), ()), #'PyPyLogLexer': ('typecode._vendor.pygments.lexers.console', 'PyPy Log', ('pypylog', 'pypy'), ('*.pypylog',), ('application/x-pypylog',)), - 'Python3Lexer': ('typecode._vendor.pygments.lexers.python', 'Python 3', ('python3', 'py3'), (), ('text/x-python3', 'application/x-python3')), - #'Python3TracebackLexer': ('typecode._vendor.pygments.lexers.python', 'Python 3.0 Traceback', ('py3tb',), ('*.py3tb',), ('text/x-python3-traceback',)), + #'Python2Lexer': ('typecode._vendor.pygments.lexers.python', 'Python 2.x', ('python2', 'py2'), (), ('text/x-python2', 'application/x-python2')), + #'Python2TracebackLexer': ('typecode._vendor.pygments.lexers.python', 'Python 2.x Traceback', ('py2tb',), ('*.py2tb',), ('text/x-python2-traceback',)), #'PythonConsoleLexer': ('typecode._vendor.pygments.lexers.python', 'Python console session', ('pycon',), (), ('text/x-python-doctest',)), - 'PythonLexer': ('typecode._vendor.pygments.lexers.python', 'Python', ('python', 'py', 'sage'), ('*.py', '*.pyw', '*.sc', 'SConstruct', 'SConscript', '*.tac', '*.sage'), ('text/x-python', 'application/x-python')), - #'PythonTracebackLexer': ('typecode._vendor.pygments.lexers.python', 'Python Traceback', ('pytb',), ('*.pytb',), ('text/x-python-traceback',)), + 'PythonLexer': ('typecode._vendor.pygments.lexers.python', 'Python', ('python', 'py', 'sage', 'python3', 'py3'), ('*.py', '*.pyw', '*.jy', '*.sage', '*.sc', 'SConstruct', 'SConscript', '*.bzl', 'BUCK', 'BUILD', 'BUILD.bazel', 'WORKSPACE', '*.tac'), ('text/x-python', 'application/x-python', 'text/x-python3', 'application/x-python3')), + #'PythonTracebackLexer': ('typecode._vendor.pygments.lexers.python', 'Python Traceback', ('pytb', 'py3tb'), ('*.pytb', '*.py3tb'), ('text/x-python-traceback', 'text/x-python3-traceback')), #'QBasicLexer': ('typecode._vendor.pygments.lexers.basic', 'QBasic', ('qbasic', 'basic'), ('*.BAS', '*.bas'), ('text/basic',)), #'QVToLexer': ('typecode._vendor.pygments.lexers.qvt', 'QVTO', ('qvto', 'qvt'), ('*.qvto',), ()), #'QmlLexer': ('typecode._vendor.pygments.lexers.webmisc', 'QML', ('qml', 'qbs'), ('*.qml', '*.qbs'), ('application/x-qml', 'application/x-qt.qbs+qml')), @@ -369,23 +385,25 @@ #'RagelRubyLexer': ('typecode._vendor.pygments.lexers.parsers', 'Ragel in Ruby Host', ('ragel-ruby', 'ragel-rb'), ('*.rl',), ()), #'RawTokenLexer': ('typecode._vendor.pygments.lexers.special', 'Raw token data', ('raw',), (), ('application/x-pygments-tokens',)), #'RdLexer': ('typecode._vendor.pygments.lexers.r', 'Rd', ('rd',), ('*.Rd',), ('text/x-r-doc',)), + #'ReasonLexer': ('typecode._vendor.pygments.lexers.ml', 'ReasonML', ('reason', 'reasonml'), ('*.re', '*.rei'), ('text/x-reasonml',)), #'RebolLexer': ('typecode._vendor.pygments.lexers.rebol', 'REBOL', ('rebol',), ('*.r', '*.r3', '*.reb'), ('text/x-rebol',)), #'RedLexer': ('typecode._vendor.pygments.lexers.rebol', 'Red', ('red', 'red/system'), ('*.red', '*.reds'), ('text/x-red', 'text/x-red-system')), #'RedcodeLexer': ('typecode._vendor.pygments.lexers.esoteric', 'Redcode', ('redcode',), ('*.cw',), ()), #'RegeditLexer': ('typecode._vendor.pygments.lexers.configs', 'reg', ('registry',), ('*.reg',), ('text/x-windows-registry',)), - #'ResourceLexer': ('typecode._vendor.pygments.lexers.resource', 'ResourceBundle', ('resource', 'resourcebundle'), ('*.txt',), ()), + #'ResourceLexer': ('typecode._vendor.pygments.lexers.resource', 'ResourceBundle', ('resource', 'resourcebundle'), (), ()), #'RexxLexer': ('typecode._vendor.pygments.lexers.scripting', 'Rexx', ('rexx', 'arexx'), ('*.rexx', '*.rex', '*.rx', '*.arexx'), ('text/x-rexx',)), #'RhtmlLexer': ('typecode._vendor.pygments.lexers.templates', 'RHTML', ('rhtml', 'html+erb', 'html+ruby'), ('*.rhtml',), ('text/html+ruby',)), + #'RideLexer': ('typecode._vendor.pygments.lexers.ride', 'Ride', ('ride',), ('*.ride',), ('text/x-ride',)), #'RoboconfGraphLexer': ('typecode._vendor.pygments.lexers.roboconf', 'Roboconf Graph', ('roboconf-graph',), ('*.graph',), ()), #'RoboconfInstancesLexer': ('typecode._vendor.pygments.lexers.roboconf', 'Roboconf Instances', ('roboconf-instances',), ('*.instances',), ()), - #'RobotFrameworkLexer': ('typecode._vendor.pygments.lexers.robotframework', 'RobotFramework', ('robotframework',), ('*.txt', '*.robot'), ('text/x-robotframework',)), + #'RobotFrameworkLexer': ('typecode._vendor.pygments.lexers.robotframework', 'RobotFramework', ('robotframework',), ('*.robot',), ('text/x-robotframework',)), #'RqlLexer': ('typecode._vendor.pygments.lexers.sql', 'RQL', ('rql',), ('*.rql',), ('text/x-rql',)), #'RslLexer': ('typecode._vendor.pygments.lexers.dsls', 'RSL', ('rsl',), ('*.rsl',), ('text/rsl',)), #'RstLexer': ('typecode._vendor.pygments.lexers.markup', 'reStructuredText', ('rst', 'rest', 'restructuredtext'), ('*.rst', '*.rest'), ('text/x-rst', 'text/prs.fallenstein.rst')), #'RtsLexer': ('typecode._vendor.pygments.lexers.trafficscript', 'TrafficScript', ('rts', 'trafficscript'), ('*.rts',), ()), #'RubyConsoleLexer': ('typecode._vendor.pygments.lexers.ruby', 'Ruby irb session', ('rbcon', 'irb'), (), ('text/x-ruby-shellsession',)), 'RubyLexer': ('typecode._vendor.pygments.lexers.ruby', 'Ruby', ('rb', 'ruby', 'duby'), ('*.rb', '*.rbw', 'Rakefile', '*.rake', '*.gemspec', '*.rbx', '*.duby', 'Gemfile'), ('text/x-ruby', 'application/x-ruby')), - 'RustLexer': ('typecode._vendor.pygments.lexers.rust', 'Rust', ('rust', 'rs'), ('*.rs', '*.rs.in'), ('text/rust',)), + 'RustLexer': ('typecode._vendor.pygments.lexers.rust', 'Rust', ('rust', 'rs'), ('*.rs', '*.rs.in'), ('text/rust', 'text/x-rust')), #'SASLexer': ('typecode._vendor.pygments.lexers.sas', 'SAS', ('sas',), ('*.SAS', '*.sas'), ('text/x-sas', 'text/sas', 'application/x-sas')), #'SLexer': ('typecode._vendor.pygments.lexers.r', 'S', ('splus', 's', 'r'), ('*.S', '*.R', '.Rhistory', '.Rprofile', '.Renviron'), ('text/S-plus', 'text/S', 'text/x-r-source', 'text/x-r', 'text/x-R', 'text/x-r-history', 'text/x-r-profile')), #'SMLLexer': ('typecode._vendor.pygments.lexers.ml', 'Standard ML', ('sml',), ('*.sml', '*.sig', '*.fun'), ('text/x-standardml', 'application/x-standardml')), @@ -393,12 +411,16 @@ 'SassLexer': ('typecode._vendor.pygments.lexers.css', 'Sass', ('sass',), ('*.sass',), ('text/x-sass',)), 'ScalaLexer': ('typecode._vendor.pygments.lexers.jvm', 'Scala', ('scala',), ('*.scala',), ('text/x-scala',)), #'ScamlLexer': ('typecode._vendor.pygments.lexers.html', 'Scaml', ('scaml',), ('*.scaml',), ('text/x-scaml',)), + #'ScdocLexer': ('typecode._vendor.pygments.lexers.scdoc', 'scdoc', ('scdoc', 'scd'), ('*.scd', '*.scdoc'), ()), #'SchemeLexer': ('typecode._vendor.pygments.lexers.lisp', 'Scheme', ('scheme', 'scm'), ('*.scm', '*.ss'), ('text/x-scheme', 'application/x-scheme')), #'ScilabLexer': ('typecode._vendor.pygments.lexers.matlab', 'Scilab', ('scilab',), ('*.sci', '*.sce', '*.tst'), ('text/scilab',)), 'ScssLexer': ('typecode._vendor.pygments.lexers.css', 'SCSS', ('scss',), ('*.scss',), ('text/x-scss',)), + #'ShExCLexer': ('typecode._vendor.pygments.lexers.rdf', 'ShExC', ('shexc', 'shex'), ('*.shex',), ('text/shex',)), #'ShenLexer': ('typecode._vendor.pygments.lexers.lisp', 'Shen', ('shen',), ('*.shen',), ('text/x-shen', 'application/x-shen')), + #'SieveLexer': ('typecode._vendor.pygments.lexers.sieve', 'Sieve', ('sieve',), ('*.siv', '*.sieve'), ()), #'SilverLexer': ('typecode._vendor.pygments.lexers.verification', 'Silver', ('silver',), ('*.sil', '*.vpr'), ()), - #'SlashLexer': ('typecode._vendor.pygments.lexers.slash', 'Slash', ('slash',), ('*.sl',), ()), + #'SingularityLexer': ('typecode._vendor.pygments.lexers.configs', 'Singularity', ('singularity',), ('*.def', 'Singularity'), ()), + #'SlashLexer': ('typecode._vendor.pygments.lexers.slash', 'Slash', ('slash',), ('*.sla',), ()), #'SlimLexer': ('typecode._vendor.pygments.lexers.webmisc', 'Slim', ('slim',), ('*.slim',), ('text/x-slim',)), #'SlurmBashLexer': ('typecode._vendor.pygments.lexers.shell', 'Slurm', ('slurm', 'sbatch'), ('*.sl',), ()), #'SmaliLexer': ('typecode._vendor.pygments.lexers.dalvik', 'Smali', ('smali',), ('*.smali',), ('text/smali',)), @@ -407,6 +429,7 @@ #'SmartyLexer': ('typecode._vendor.pygments.lexers.templates', 'Smarty', ('smarty',), ('*.tpl',), ('application/x-smarty',)), #'SnobolLexer': ('typecode._vendor.pygments.lexers.snobol', 'Snobol', ('snobol',), ('*.snobol',), ('text/x-snobol',)), #'SnowballLexer': ('typecode._vendor.pygments.lexers.dsls', 'Snowball', ('snowball',), ('*.sbl',), ()), + #'SolidityLexer': ('typecode._vendor.pygments.lexers.solidity', 'Solidity', ('solidity',), ('*.sol',), ()), #'SourcePawnLexer': ('typecode._vendor.pygments.lexers.pawn', 'SourcePawn', ('sp',), ('*.sp',), ('text/x-sourcepawn',)), #'SourcesListLexer': ('typecode._vendor.pygments.lexers.installers', 'Debian Sourcelist', ('sourceslist', 'sources.list', 'debsources'), ('sources.list',), ()), #'SparqlLexer': ('typecode._vendor.pygments.lexers.rdf', 'SPARQL', ('sparql',), ('*.rq', '*.sparql'), ('application/sparql-query',)), @@ -421,7 +444,8 @@ #'SwigLexer': ('typecode._vendor.pygments.lexers.c_like', 'SWIG', ('swig',), ('*.swg', '*.i'), ('text/swig',)), #'SystemVerilogLexer': ('typecode._vendor.pygments.lexers.hdl', 'systemverilog', ('systemverilog', 'sv'), ('*.sv', '*.svh'), ('text/x-systemverilog',)), #'TAPLexer': ('typecode._vendor.pygments.lexers.testing', 'TAP', ('tap',), ('*.tap',), ()), - #'TOMLLexer': ('typecode._vendor.pygments.lexers.configs', 'TOML', ('toml',), ('*.toml',), ()), + #'TNTLexer': ('typecode._vendor.pygments.lexers.tnt', 'Typographic Number Theory', ('tnt',), ('*.tnt',), ()), + #'TOMLLexer': ('typecode._vendor.pygments.lexers.configs', 'TOML', ('toml',), ('*.toml', 'Pipfile', 'poetry.lock'), ()), #'Tads3Lexer': ('typecode._vendor.pygments.lexers.int_fiction', 'TADS 3', ('tads3',), ('*.t',), ()), 'TasmLexer': ('typecode._vendor.pygments.lexers.asm', 'TASM', ('tasm',), ('*.asm', '*.ASM', '*.tasm'), ('text/x-tasm',)), 'TclLexer': ('typecode._vendor.pygments.lexers.tcl', 'Tcl', ('tcl',), ('*.tcl', '*.rvt'), ('text/x-tcl', 'text/x-script.tcl', 'application/x-tcl')), @@ -435,6 +459,7 @@ #'TexLexer': ('typecode._vendor.pygments.lexers.markup', 'TeX', ('tex', 'latex'), ('*.tex', '*.aux', '*.toc'), ('text/x-tex', 'text/x-latex')), #'TextLexer': ('typecode._vendor.pygments.lexers.special', 'Text only', ('text',), ('*.txt',), ('text/plain',)), 'ThriftLexer': ('typecode._vendor.pygments.lexers.dsls', 'Thrift', ('thrift',), ('*.thrift',), ('application/x-thrift',)), + #'TiddlyWiki5Lexer': ('typecode._vendor.pygments.lexers.markup', 'tiddler', ('tid',), ('*.tid',), ('text/vnd.tiddlywiki',)), #'TodotxtLexer': ('typecode._vendor.pygments.lexers.textfmts', 'Todotxt', ('todotxt',), ('todo.txt', '*.todotxt'), ('text/x-todo',)), #'TransactSqlLexer': ('typecode._vendor.pygments.lexers.sql', 'Transact-SQL', ('tsql', 't-sql'), ('*.sql',), ('text/x-tsql',)), #'TreetopLexer': ('typecode._vendor.pygments.lexers.parsers', 'Treetop', ('treetop',), ('*.treetop', '*.tt'), ()), @@ -448,6 +473,7 @@ #'UcodeLexer': ('typecode._vendor.pygments.lexers.unicon', 'ucode', ('ucode',), ('*.u', '*.u1', '*.u2'), ()), #'UniconLexer': ('typecode._vendor.pygments.lexers.unicon', 'Unicon', ('unicon',), ('*.icn',), ('text/unicon',)), #'UrbiscriptLexer': ('typecode._vendor.pygments.lexers.urbi', 'UrbiScript', ('urbiscript',), ('*.u',), ('application/x-urbiscript',)), + #'UsdLexer': ('typecode._vendor.pygments.lexers.usd', 'USD', ('usd', 'usda'), ('*.usd', '*.usda'), ()), 'VBScriptLexer': ('typecode._vendor.pygments.lexers.basic', 'VBScript', ('vbscript',), ('*.vbs', '*.VBS'), ()), #'VCLLexer': ('typecode._vendor.pygments.lexers.varnish', 'VCL', ('vcl',), ('*.vcl',), ('text/x-vclsrc',)), #'VCLSnippetLexer': ('typecode._vendor.pygments.lexers.varnish', 'VCLSnippets', ('vclsnippets', 'vclsnippet'), (), ('text/x-vclsnippet',)), @@ -463,6 +489,7 @@ 'VhdlLexer': ('typecode._vendor.pygments.lexers.hdl', 'vhdl', ('vhdl',), ('*.vhdl', '*.vhd'), ('text/x-vhdl',)), #'VimLexer': ('typecode._vendor.pygments.lexers.textedit', 'VimL', ('vim',), ('*.vim', '.vimrc', '.exrc', '.gvimrc', '_vimrc', '_exrc', '_gvimrc', 'vimrc', 'gvimrc'), ('text/x-vim',)), #'WDiffLexer': ('typecode._vendor.pygments.lexers.diff', 'WDiff', ('wdiff',), ('*.wdiff',), ()), + #'WebIDLLexer': ('typecode._vendor.pygments.lexers.webidl', 'Web IDL', ('webidl',), ('*.webidl',), ()), #'WhileyLexer': ('typecode._vendor.pygments.lexers.whiley', 'Whiley', ('whiley',), ('*.whiley',), ('text/x-whiley',)), #'X10Lexer': ('typecode._vendor.pygments.lexers.x10', 'X10', ('x10', 'xten'), ('*.x10',), ('text/x-x10',)), #'XQueryLexer': ('typecode._vendor.pygments.lexers.webmisc', 'XQuery', ('xquery', 'xqy', 'xq', 'xql', 'xqm'), ('*.xqy', '*.xquery', '*.xq', '*.xql', '*.xqm'), ('text/xquery', 'application/xquery')), @@ -477,7 +504,10 @@ #'XtlangLexer': ('typecode._vendor.pygments.lexers.lisp', 'xtlang', ('extempore',), ('*.xtm',), ()), #'YamlJinjaLexer': ('typecode._vendor.pygments.lexers.templates', 'YAML+Jinja', ('yaml+jinja', 'salt', 'sls'), ('*.sls',), ('text/x-yaml+jinja', 'text/x-sls')), #'YamlLexer': ('typecode._vendor.pygments.lexers.data', 'YAML', ('yaml',), ('*.yaml', '*.yml'), ('text/x-yaml',)), + #'YangLexer': ('typecode._vendor.pygments.lexers.yang', 'YANG', ('yang',), ('*.yang',), ('application/yang',)), + #'ZeekLexer': ('typecode._vendor.pygments.lexers.dsls', 'Zeek', ('zeek', 'bro'), ('*.zeek', '*.bro'), ()), #'ZephirLexer': ('typecode._vendor.pygments.lexers.php', 'Zephir', ('zephir',), ('*.zep',), ()), + #'ZigLexer': ('typecode._vendor.pygments.lexers.zig', 'Zig', ('zig',), ('*.zig',), ('text/zig',)), } if __name__ == '__main__': # pragma: no cover diff --git a/src/typecode/pygments_lexers_mapping.py.ABOUT b/src/typecode/pygments_lexers_mapping.py.ABOUT index 9e66248..4ad15e9 100644 --- a/src/typecode/pygments_lexers_mapping.py.ABOUT +++ b/src/typecode/pygments_lexers_mapping.py.ABOUT @@ -1,23 +1,25 @@ about_resource: pygments_lexers_mapping.py +copyright: Copyright (c) by the Pygments team +notes: this is a Pygments file copied from pygments/lexers/_mapping.py + to focus on programming languages detection only. It has been modified to be + usable alone. +notice_file: pygments_lexers_mapping.py.NOTICE attribute: yes -checksum_md5: ea723daf498a9805b481619c19ab75f8 -checksum_sha1: b22c0e245d2f7874e09bf817352f25462874dd18 +checksum_md5: 665516d1d1c0099241ab6e4c057e26be +checksum_sha1: e0277b8dd2ebce5121a68bec62173b9e0b057742 contact: georg@python.org copyright: Copyright (c) by the Pygments team description: Pygments is a generic syntax highlighter suitable for use in code hosting, forums, wikis or other applications that need to prettify source code. -download_url: https://files.pythonhosted.org/packages/7e/ae/26808275fc76bf2832deb10d3a3ed3107bc4de01b85dcccbe525f2cd6d1e/Pygments-2.4.2.tar.gz +download_url: https://files.pythonhosted.org/packages/ba/6e/7a7c13c21d8a4a7f82ccbfe257a045890d4dbf18c023f985f565f97393e3/Pygments-2.9.0.tar.gz homepage_url: http://pygments.org/ license_expression: bsd-simplified AND (bsd-new AND apache-2.0 AND public-domain) name: pygments -notes: this is a Pygments file copied from pygments/lexers/_mapping.py - to focus on programming languages detection only. It has been modified to be - usable alone. -notice_file: pygments_lexers.py.NOTICE -notice_url: https://github.com/pygments/pygments/blob/2.4.2/LICENSE owner: Pocoo Team owner_url: http://www.pocoo.org/ -package_url: pkg:pypi/pygments@2.4.2#pygments/lexers/_mapping.py +package_url: pkg:pypi/pygments@2.9.0 +primary_language: Python track_changes: yes -vcs_url: https://github.com/pygments/pygments -version: 2.4.2 +type: pypi +version: 2.9.0 +vcs_url: git+https://github.com/pygments/pygments.git From 50e1abb585c3098b2a4f0f9e67ccfe5138c34d11 Mon Sep 17 00:00:00 2001 From: Philippe Ombredanne Date: Sun, 30 May 2021 10:56:37 +0200 Subject: [PATCH 20/35] Make new libmagic loading work Its was skipping plugin loading. Also add tracing. Signed-off-by: Philippe Ombredanne --- src/typecode/magic2.py | 44 ++++++++++++++++++++++++++++++++++++++++-- 1 file changed, 42 insertions(+), 2 deletions(-) diff --git a/src/typecode/magic2.py b/src/typecode/magic2.py index 3011782..bd8b5ad 100644 --- a/src/typecode/magic2.py +++ b/src/typecode/magic2.py @@ -47,13 +47,31 @@ from commoncode import command from commoncode.system import on_windows -TRACE = False - """ magic2 is minimal and specialized wrapper around a vendored libmagic file identification library. This is NOT thread-safe. It is based on python-magic by Adam Hup and adapted to the specific needs of ScanCode. """ + +# Tracing flag +TRACE = True + + +def logger_debug(*args): + pass + + +if TRACE: + import logging + import sys + + logger = logging.getLogger(__name__) + logging.basicConfig(stream=sys.stdout) + logger.setLevel(logging.DEBUG) + + def logger_debug(*args): + return logger.debug(' '.join(isinstance(a, str) and a or repr(a) for a in args)) + # # Cached detectors # @@ -92,11 +110,24 @@ def load_lib(): # try the environment first dll_loc = os.environ.get(TYPECODE_LIBMAGIC_PATH_ENVVAR) + if TRACE and dll_loc: + logger_debug('load_lib:', 'got environ magic location:', dll_loc) + + # try a plugin-provided path second + if not dll_loc: + dll_loc = get_location(TYPECODE_LIBMAGIC_DLL) + + if TRACE and dll_loc: + logger_debug('load_lib:', 'got plugin magic location:', dll_loc) + # try the PATH if not dll_loc: dll = 'libmagic.dll' if on_windows else 'libmagic.so' dll_loc = command.find_in_path(dll) + if TRACE and dll_loc: + logger_debug('load_lib:', 'got path magic location:', dll_loc) + if not dll_loc or not os.path.isfile(dll_loc): raise Exception( 'CRITICAL: libmagic DLL and its magic database are not installed. ' @@ -123,15 +154,24 @@ def get_magicdb_location(_cache=[]): # try the environment first magicdb_loc = os.environ.get(TYPECODE_LIBMAGIC_DB_PATH_ENVVAR) + if TRACE and magicdb_loc: + logger_debug('get_magicdb_location:', 'got environ magicdb location:', magicdb_loc) + # try a plugin-provided path second if not magicdb_loc: magicdb_loc = get_location(TYPECODE_LIBMAGIC_DB) + if TRACE and magicdb_loc: + logger_debug('get_magicdb_location:', 'got plugin magicdb location:', magicdb_loc) + # try the PATH if not magicdb_loc: db = 'magic.mgc' magicdb_loc = command.find_in_path(db) + if TRACE and magicdb_loc: + logger_debug('get_magicdb_location:', 'got path magicdb location:', magicdb_loc) + if not magicdb_loc or not os.path.isfile(magicdb_loc): raise Exception( 'CRITICAL: Libmagic magic database is not installed. ' From 0f5029a0dc70497974c6fac3dfb37ac79f2e23bf Mon Sep 17 00:00:00 2001 From: Philippe Ombredanne Date: Sun, 30 May 2021 10:58:28 +0200 Subject: [PATCH 21/35] Detect file type content only if no extension Only do content-based detection for programing language if there is no file extension as this avoids a large number of detection issues. Signed-off-by: Philippe Ombredanne --- src/typecode/contenttype.py | 40 +++++++++++++++++++------------------ 1 file changed, 21 insertions(+), 19 deletions(-) diff --git a/src/typecode/contenttype.py b/src/typecode/contenttype.py index fd64aa8..7737636 100644 --- a/src/typecode/contenttype.py +++ b/src/typecode/contenttype.py @@ -107,7 +107,6 @@ def logger_debug(*args): 'Makefile.inc', ) - # Global registry of Type objects, keyed by location # FIXME: can this be a memory hog for very large scans? _registry = {} @@ -395,8 +394,8 @@ def is_compact_js(self): """ if self._is_compact_js is None: # FIXME: when moving to Python 3 - extensions = (u'.min.js', u'.typeface.json',) - json_ext = u'.json' + extensions = ('.min.js', '.typeface.json',) + json_ext = '.json' self._is_compact_js = ( self.is_js_map @@ -433,7 +432,7 @@ def is_archive(self): return self._is_archive self._is_archive = False - docx_type_end = u'2007+' + docx_type_end = '2007+' ft = self.filetype_file.lower() @@ -464,9 +463,9 @@ def is_office_doc(self): loc = self.location.lower() # FIXME: add open office extensions and other extensions for other docs msoffice_exts = ( - u'.doc', u'.docx', - u'.xlsx', u'.xlsx', - u'.ppt', u'.pptx', + '.doc', '.docx', + '.xlsx', '.xlsx', + '.ppt', '.pptx', ) if loc.endswith(msoffice_exts): @@ -485,8 +484,8 @@ def is_package(self): # FIXME: this should beased on proper package recognition, not this simplistic check ft = self.filetype_file.lower() loc = self.location.lower() - package_archive_extensions = u'.jar', u'.war', u'.ear', u'.zip', '.whl', '.egg' - gem_extension = u'.gem' + package_archive_extensions = '.jar', '.war', '.ear', '.zip', '.whl', '.egg' + gem_extension = '.gem' # FIXME: this is grossly under specified and is missing many packages if ('debian binary package' in ft @@ -505,7 +504,7 @@ def is_compressed(self): """ ft = self.filetype_file.lower() - docx_ext = u'x' + docx_ext = 'x' if (not self.is_text and ( @@ -552,7 +551,7 @@ def is_media(self): if any(m in mt for m in mimes) or any(t in ft for t in types): return True - tga_ext = u'.tga' + tga_ext = '.tga' if ft == 'data' and mt == 'application/octet-stream' and self.location.lower().endswith(tga_ext): # there is a regression in libmagic 5.38 https://bugs.astron.com/view.php?id=161 @@ -615,7 +614,7 @@ def contains_text(self): Return True if a file possibly contains some text. """ if self._contains_text is None: - svg_ext = u'.svg' + svg_ext = '.svg' if not self.is_file: self._contains_text = False @@ -800,7 +799,7 @@ class TypeDefinition(object): TypeDefinition( name='MySQL ARCHIVE Storage Engine data files', filetypes=('mysql table definition file',), - extensions=(u'.arm', u'.arz', u'.arn',), + extensions=('.arm', '.arz', '.arn',), ), ]) @@ -864,12 +863,15 @@ def get_pygments_lexer(location): try: return get_lexer_for_filename(location.lower()) except LexerClassNotFound: - try: - # if Pygments does not guess we should not carry forward - content = get_text_file_start(location) - return guess_lexer(content) - except LexerClassNotFound: - return + # only try content-based detection if we do not have an extension + ext = fileutils.file_extension(location) + if not ext: + try: + # if Pygments does not guess we should not carry forward + content = get_text_file_start(location) + return guess_lexer(content) + except LexerClassNotFound: + return def get_text_file_start(location, length=4096): From b480cf31a60022fe11066236cb24832170290adf Mon Sep 17 00:00:00 2001 From: Philippe Ombredanne Date: Sun, 30 May 2021 10:59:12 +0200 Subject: [PATCH 22/35] Adapt tests to latest Pygments Aslo ensure we use assertion in the correct order. Signed-off-by: Philippe Ombredanne --- tests/data/filetest/code/groff/example.ms.yml | 4 +- .../code/java/CommonViewerSiteFactory.jad.yml | 4 +- tests/filetype_test_utils.py | 26 +++++-- tests/test_contenttype.py | 70 +++++++++---------- tests/test_entropy.py | 2 +- 5 files changed, 59 insertions(+), 47 deletions(-) diff --git a/tests/data/filetest/code/groff/example.ms.yml b/tests/data/filetest/code/groff/example.ms.yml index 250f6d2..3df2bcf 100644 --- a/tests/data/filetest/code/groff/example.ms.yml +++ b/tests/data/filetest/code/groff/example.ms.yml @@ -1,8 +1,8 @@ filetype_file: troff or preprocessor input, ASCII text mimetype_file: text/troff mimetype_python: text/troff -filetype_pygment: GAS -programming_language: GAS +filetype_pygment: +programming_language: is_file: yes is_regular: yes size: 93 diff --git a/tests/data/filetest/code/java/CommonViewerSiteFactory.jad.yml b/tests/data/filetest/code/java/CommonViewerSiteFactory.jad.yml index 918b6c4..41f92e8 100644 --- a/tests/data/filetest/code/java/CommonViewerSiteFactory.jad.yml +++ b/tests/data/filetest/code/java/CommonViewerSiteFactory.jad.yml @@ -1,8 +1,8 @@ filetype_file: Java source, ASCII text mimetype_file: text/x-java mimetype_python: text/vnd.sun.j2me.app-descriptor -filetype_pygment: Python -programming_language: Python +filetype_pygment: +programming_language: is_file: yes is_regular: yes size: 1447 diff --git a/tests/filetype_test_utils.py b/tests/filetype_test_utils.py index 351d2db..e83fda6 100644 --- a/tests/filetype_test_utils.py +++ b/tests/filetype_test_utils.py @@ -118,7 +118,7 @@ def __attrs_post_init__(self, *args, **kwargs): setattr(self, key, value) except: import traceback - msg = 'file://' + self.data_file + '\n' + repr(self) + '\n' + traceback.format_exc() + msg = f'file://{self.data_file}\n{repr(self)}\n' + traceback.format_exc() raise Exception(msg) if isinstance(self.size, str): self.size = int(self.size) @@ -187,7 +187,7 @@ def check_types_equal(expected, result): # on windows we really have weird things return False else: - return result_value == expected_value + return expected_value == result_value # we have either number, date, None or boolean value and # we want both values to be both trueish or falsish @@ -198,9 +198,15 @@ def check_types_equal(expected, result): return True -def make_filetype_test_functions(test, index, test_data_dir=test_env.test_data_dir, regen=False): +def make_filetype_test_functions( + test, + index, + test_data_dir=test_env.test_data_dir, + regen=False, +): """ - Build and return a test function closing on tests arguments and the function name. + Build and return a test function closing on tests arguments and the function + name. """ def closure_test_function(*args, **kwargs): @@ -218,7 +224,7 @@ def closure_test_function(*args, **kwargs): if not passing: expected['data file'] = 'file://' + data_file expected['test_file'] = 'file://' + test_file - assert dict(expected) == dict(results) + assert dict(results) == dict(expected) data_file = test.data_file test_file = test.test_file @@ -242,7 +248,12 @@ def closure_test_function(*args, **kwargs): return closure_test_function, test_name -def build_tests(filetype_tests, clazz, test_data_dir=test_env.test_data_dir, regen=False): +def build_tests( + filetype_tests, + clazz, + test_data_dir=test_env.test_data_dir, + regen=False, +): """ Dynamically build test methods from a sequence of FileTypeTest and attach these method to the clazz test class. @@ -253,6 +264,7 @@ def build_tests(filetype_tests, clazz, test_data_dir=test_env.test_data_dir, reg actual_regen = False else: actual_regen = regen - method, name = make_filetype_test_functions(test, i, test_data_dir, actual_regen) + method, name = make_filetype_test_functions( + test, i, test_data_dir, actual_regen) # attach that method to our test class setattr(clazz, name, method) diff --git a/tests/test_contenttype.py b/tests/test_contenttype.py index 7e2ae35..d371c7e 100644 --- a/tests/test_contenttype.py +++ b/tests/test_contenttype.py @@ -68,10 +68,10 @@ def test_filetype_file_on_unicode_file_name(self): expected = 'PNG image data, 16 x 12, 8-bit/color RGBA, interlaced' - assert expected == get_filetype_file(test_file) + assert get_filetype_file(test_file) == expected expected = 'image/png' - assert expected == get_mimetype_file(test_file) + assert get_mimetype_file(test_file) == expected @skipIf(not on_linux, 'Windows and macOS have some issues with some non-unicode paths') def test_filetype_file_on_unicode_file_name2(self): @@ -87,13 +87,13 @@ def test_filetype_file_on_unicode_file_name2(self): if on_windows: # FIXME: this is a very short png file though expected = 'Non-ISO extended-ASCII text' - assert expected == get_filetype_file(test_file) + assert get_filetype_file(test_file) == expected expected = 'image/png' if on_windows: # FIXME: this is a very short png file though expected = 'text/plain' - assert expected == get_mimetype_file(test_file) + assert get_mimetype_file(test_file) == expected @skipIf(on_windows, 'Windows does not have (well supported) links.') def test_symbolink_links(self): @@ -102,22 +102,22 @@ def test_symbolink_links(self): test_file1 = os.path.join(test_dir, 'prunedirs/targets/simlink_to_dir') assert is_link(test_file1) assert not is_broken_link(test_file1) - assert '../sources/subdir' == get_link_target(test_file1) + assert get_link_target(test_file1) == '../sources/subdir' test_file2 = os.path.join(test_dir, 'prunedirs/targets/simlink_to_file') assert is_link(test_file2) assert not is_broken_link(test_file2) - assert '../sources/a.txt' == get_link_target(test_file2) + assert get_link_target(test_file2) == '../sources/a.txt' test_file3 = os.path.join(test_dir, 'prunedirs/targets/simlink_to_missing_file') assert is_link(test_file3) assert is_broken_link(test_file3) - assert '../sources/temp.txt' == get_link_target(test_file3) + assert get_link_target(test_file3) == '../sources/temp.txt' test_file4 = os.path.join(test_dir, 'prunedirs/targets/simlink_to_missing_dir') assert is_link(test_file4) assert is_broken_link(test_file4) - assert '../sources/tempdir' == get_link_target(test_file4) + assert get_link_target(test_file4) == '../sources/tempdir' @skipIf(not on_windows, 'Hangs for now, for lack of proper sudo access on some test servers.') @skipIf(on_windows, 'Windows does not have fifos.') @@ -129,7 +129,7 @@ def test_contenttype_fifo(self): self.fail('Unable to create fifo') assert os.path.exists(myfifo) assert is_special(myfifo) - assert 'FIFO pipe' == get_filetype(myfifo) + assert get_filetype(myfifo) == 'FIFO pipe' def test_debian_package(self): test_file = self.get_test_loc('contenttype/package/libjama-dev_1.2.4-2_all.deb') @@ -144,7 +144,7 @@ def test_debian_package(self): assert is_archive(test_file) assert is_compressed(test_file) assert not contains_text(test_file) - assert '' == get_filetype_pygment(test_file) + assert get_filetype_pygment(test_file) == '' def test_package_json(self): test_file = self.get_test_loc('contenttype/package/package.json') @@ -157,7 +157,7 @@ def test_package_json(self): assert get_filetype(test_file) in expected assert is_text(test_file) assert not is_binary(test_file) - assert '' == get_filetype_pygment(test_file) + assert get_filetype_pygment(test_file) == '' assert not is_source(test_file) def test_certificate(self): @@ -171,7 +171,7 @@ def test_certificate(self): 'data', ) assert get_filetype(test_file).startswith(expected) - assert '' == get_filetype_pygment(test_file) + assert get_filetype_pygment(test_file) == '' def test_code_c_1(self): test_file = self.get_test_loc('contenttype/code/c/c_code.c') @@ -182,7 +182,7 @@ def test_code_c_1(self): 'c source, ascii text', ) assert get_filetype(test_file) in expected - assert 'C' == get_filetype_pygment(test_file) + assert get_filetype_pygment(test_file) == 'C' assert is_source(test_file) assert is_text(test_file) @@ -196,14 +196,14 @@ def test_code_c_7(self): ) assert get_filetype(test_file) in expected assert is_source(test_file) - assert 'C' == get_filetype_pygment(test_file) + assert get_filetype_pygment(test_file) == 'C' def test_code_python_2(self): test_file = self.get_test_loc('contenttype/code/python/extract.py') assert is_source(test_file) assert is_text(test_file) - assert 'Python' == get_filetype_pygment(test_file) - assert 'python script, ascii text executable' == get_filetype(test_file) + assert get_filetype_pygment(test_file) == 'Python' + assert get_filetype(test_file) == 'python script, ascii text executable' expected = ( 'text/x-python', # new in libmagic 5.39 @@ -216,8 +216,8 @@ def test_compiled_elf_so(self): test_file = self.get_test_loc(u'contenttype/compiled/linux/libssl.so.0.9.7') assert not is_special(test_file) assert not is_text(test_file) - assert '' == get_filetype_pygment(test_file) - assert 'application/x-sharedlib' == get_mimetype_file(test_file) + assert get_filetype_pygment(test_file) == '' + assert get_mimetype_file(test_file) == 'application/x-sharedlib' expected = ( # correct with libmagic 5.38 and 5.39 'ELF 32-bit LSB shared object, Intel 80386, version 1 (SYSV), statically linked, stripped', @@ -226,7 +226,7 @@ def test_compiled_elf_so(self): ) assert get_filetype_file(test_file) in expected assert get_filetype(test_file) in [t.lower() for t in expected] - assert '' == get_filetype_pygment(test_file) + assert get_filetype_pygment(test_file) == '' def test_compiled_elf_so_2(self): test_file = self.get_test_loc('contenttype/compiled/linux/libnetsnmpagent.so.5') @@ -238,28 +238,28 @@ def test_compiled_elf_so_2(self): 'elf 32-bit lsb shared object, intel 80386, version 1 (sysv), dynamically linked, with debug_info, not stripped', ) assert get_filetype(test_file) in expected - assert '' == get_filetype_pygment(test_file) + assert get_filetype_pygment(test_file) == '' @pytest.mark.xfail( on_mac or on_windows, reason='Somehow we get really weird results on macOS with libmagic 5.38 and mac, win32 on libmagic 5.39: ' '[64-bit architecture=6893422] [64-bit architecture=6649701] [architecture=1075809] [architecture=3959150] [architecture=768]') def test_compiled_java_classfile_1(self): test_file = self.get_test_loc('contenttype/compiled/java/CommonViewerSiteFactory.class') - assert 'compiled java class data, version 46.0 (java 1.2)' == get_filetype(test_file) - assert '' == get_filetype_pygment(test_file) + assert get_filetype(test_file) == 'compiled java class data, version 46.0 (java 1.2)' + assert get_filetype_pygment(test_file) == '' @pytest.mark.xfail(on_mac or on_windows, reason='Somehow we get really weird results on macOS with libmagic 5.38 and mac, win32 on libmagic 5.39: ' '[64-bit architecture=6893422] [64-bit architecture=6649701] [architecture=1075809] [architecture=3959150] [architecture=768]') def test_compiled_java_classfile_2(self): test_file = self.get_test_loc('contenttype/compiled/java/old.class') assert is_binary(test_file) - assert 'compiled java class data, version 46.0 (java 1.2)' == get_filetype(test_file) - assert '' == get_filetype_pygment(test_file) + assert get_filetype(test_file) == 'compiled java class data, version 46.0 (java 1.2)' + assert get_filetype_pygment(test_file) == '' def test_compiled_python_1(self): test_dir = self.extract_test_zip('contenttype/compiled/python/compiled.zip') test_file = os.path.join(test_dir, 'command.pyc') - assert 'python 2.5 byte-compiled' == get_filetype(test_file) + assert get_filetype(test_file) == 'python 2.5 byte-compiled' assert not is_source(test_file) assert not is_text(test_file) expected_mime = ( @@ -268,7 +268,7 @@ def test_compiled_python_1(self): 'text/x-bytecode.python', ) assert get_mimetype_file(test_file) in expected_mime - assert '' == get_filetype_pygment(test_file) + assert get_filetype_pygment(test_file) == '' test_file2 = os.path.join(test_dir, 'contenttype.pyc') assert is_binary(test_file2) @@ -279,11 +279,11 @@ def test_compiled_python_1(self): assert get_pygments_lexer(test_file3) is None test_file4 = os.path.join(test_dir, 'extract.pyc') - assert 'python 2.5 byte-compiled' == get_filetype(test_file4) + assert get_filetype(test_file4) == 'python 2.5 byte-compiled' assert not is_source(test_file4) assert not is_text(test_file4) assert get_mimetype_file(test_file4) in expected_mime - assert '' == get_filetype_pygment(test_file4) + assert get_filetype_pygment(test_file4) == '' # @pytest.mark.xfail(on_windows or on_mac, reason='Somehow we have incorrect results on win63 with libmagic 5.38: ' # 'application/octet-stream instead of EPS') @@ -305,7 +305,7 @@ def test_doc_postscript_eps(self): get_filetype_file='DOS EPS Binary File Postscript starts at byte 32 length 466 TIFF starts at byte 498 length 11890', get_mimetype_file='image/x-eps', ) - assert expected == results + assert results == expected def test_media_image_img(self): test_file = self.get_test_loc('contenttype/media/Image1.img') @@ -351,7 +351,7 @@ def test_archive_squashfs_crashing(self): assert is_archive(test_file) assert is_compressed(test_file) assert not contains_text(test_file) - assert '' == get_filetype_pygment(test_file) + assert get_filetype_pygment(test_file) == '' @skipIf(on_windows, 'fails because of libmagic bug on windows.') def test_archive_squashfs_gz(self): @@ -360,7 +360,7 @@ def test_archive_squashfs_gz(self): assert is_archive(test_file) assert is_compressed(test_file) assert not contains_text(test_file) - assert '' == get_filetype_pygment(test_file) + assert get_filetype_pygment(test_file) == '' @skipIf(on_windows, 'fails because of libmagic bug on windows.') def test_archive_squashfs_lzo(self): @@ -369,7 +369,7 @@ def test_archive_squashfs_lzo(self): assert is_archive(test_file) assert is_compressed(test_file) assert not contains_text(test_file) - assert '' == get_filetype_pygment(test_file) + assert get_filetype_pygment(test_file) == '' @skipIf(on_windows, 'fails because of libmagic bug on windows.') def test_archive_squashfs_xz(self): @@ -378,16 +378,16 @@ def test_archive_squashfs_xz(self): assert is_archive(test_file) assert is_compressed(test_file) assert not contains_text(test_file) - assert '' == get_filetype_pygment(test_file) + assert get_filetype_pygment(test_file) == '' def test_directory(self): test_file = self.get_test_loc('contenttype') assert not is_binary(test_file) assert not is_compressed(test_file) assert not contains_text(test_file) - assert '' == get_filetype_pygment(test_file) + assert get_filetype_pygment(test_file) == '' def test_size(self): test_dir = self.get_test_loc('contenttype/size') result = size(test_dir) - assert 18 == result + assert result == 18 diff --git a/tests/test_entropy.py b/tests/test_entropy.py index 866a00a..8c8e481 100644 --- a/tests/test_entropy.py +++ b/tests/test_entropy.py @@ -28,7 +28,7 @@ def check_entropy(data, expected, func=shannon_entropy): entro = round(func(data), 2) expected = round(expected, 2) - assert expected == entro + assert entro == expected class TestEntropy(unittest.TestCase): From e5b373f474e4ce5e2d37b54c683014b7b66079e4 Mon Sep 17 00:00:00 2001 From: Philippe Ombredanne Date: Sun, 30 May 2021 10:59:32 +0200 Subject: [PATCH 23/35] Use pip extras with the correct syntax Signed-off-by: Philippe Ombredanne --- configure | 4 ++-- configure.bat | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/configure b/configure index 25ab0ce..98d0cf7 100755 --- a/configure +++ b/configure @@ -26,8 +26,8 @@ CLI_ARGS=$1 ################################ # Requirement arguments passed to pip and used by default or with --dev. -REQUIREMENTS="--editable ." -DEV_REQUIREMENTS="--editable .[testing]" +REQUIREMENTS="--editable .[full]" +DEV_REQUIREMENTS="--editable .[full,testing]" # where we create a virtualenv VIRTUALENV_DIR=tmp diff --git a/configure.bat b/configure.bat index ed81235..2d0a878 100644 --- a/configure.bat +++ b/configure.bat @@ -25,7 +25,7 @@ @rem # Requirement arguments passed to pip and used by default or with --dev. set "REQUIREMENTS=--editable .[full]" -set "DEV_REQUIREMENTS=--editable .[full, testing]" +set "DEV_REQUIREMENTS=--editable .[full,testing]" @rem # where we create a virtualenv set "VIRTUALENV_DIR=tmp" From 5bbd8cb51122aecc8798bbde98dd43f63db1fe57 Mon Sep 17 00:00:00 2001 From: Philippe Ombredanne Date: Sun, 30 May 2021 10:59:46 +0200 Subject: [PATCH 24/35] Update CHANGELOG Signed-off-by: Philippe Ombredanne --- CHANGELOG.rst | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.rst b/CHANGELOG.rst index 91cd8ad..9a0606d 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -4,13 +4,17 @@ Release notes vNext ----- -Version 21.5.29 +Version 21.5.30 --------------- -- Update vendored pygments to 2.7.4 +- Update vendored pygments to 2.9.0 - Update commoncode to latest version - Use new libmagic configuration based on a plugin, and environment variable or the system path. +- Only do content-based detection for programing language if there is no extension. +- Remove Actionscript3 from programing language detection. This is not common + enough and is the source of too many false positives. + Version 21.2.24 From ca73e37583126544939d8f0fac07cbd35f28ecc5 Mon Sep 17 00:00:00 2001 From: Philippe Ombredanne Date: Sun, 30 May 2021 11:28:45 +0200 Subject: [PATCH 25/35] Add CI links and format ReST Signed-off-by: Philippe Ombredanne --- README.rst | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/README.rst b/README.rst index ee3cc9a..672612d 100644 --- a/README.rst +++ b/README.rst @@ -12,6 +12,11 @@ macOS) and Pygments. It started as library in scancode-toolkit. Visit https://aboutcode.org and https://github.com/nexB/ for support and download. +We run CI tests on: + + - Travis https://travis-ci.org/github/nexB/typecode + - Azure pipelines https://dev.azure.com/nexB/typecode/_build + To install this package with its full capability (where the binaries for libmagic are installed), use the `full` option:: @@ -32,13 +37,17 @@ available in one of these ways: They do so by providing a plugin entry point as a ``scancode_location_provider`` for ``typecode_libmagic`` which points to a callable that must return a mapping with these two keys: + - 'typecode.libmagic.dll': the absolute path to a libmagic DLL - 'typecode.libmagic.db': the absolute path to a libmagic 'magic.mgc' database + See for example: + - https://github.com/nexB/scancode-plugins/blob/4da5fe8a5ab1c87b9b4af9e54d7ad60e289747f5/builtins/typecode_libmagic-linux/setup.py#L42 - https://github.com/nexB/scancode-plugins/blob/4da5fe8a5ab1c87b9b4af9e54d7ad60e289747f5/builtins/typecode_libmagic-linux/src/typecode_libmagic/__init__.py#L32 - **environment variables**: + - TYPECODE_LIBMAGIC_PATH: the absolute path to a libmagic DLL - TYPECODE_LIBMAGIC_DB_PATH: the absolute path to a libmagic 'magic.mgc' database From 001c831959d8b079818236dd3ae7cc76d9648ef0 Mon Sep 17 00:00:00 2001 From: Philippe Ombredanne Date: Wed, 12 May 2021 09:30:49 +0200 Subject: [PATCH 26/35] Implement new library loading approach We now load the libmagic native library and its database from paths found in: 1. environment variables, 2. OR a location provider plugin, 3. OR the system PATH, 4. OR we fail with an informative error message. Based on original code contributed by @priv-kweihmann to scancode-plugins in https://github.com/nexB/scancode-plugins/pull/9 and moved here and adapted for use in the core code rather than in a plugin. Contributed-by: Konrad Weihmann Signed-off-by: Philippe Ombredanne --- src/typecode/magic2.py | 125 +++++++++++++++++++++++++++++++++++------ 1 file changed, 107 insertions(+), 18 deletions(-) diff --git a/src/typecode/magic2.py b/src/typecode/magic2.py index d23aef7..bd8b5ad 100644 --- a/src/typecode/magic2.py +++ b/src/typecode/magic2.py @@ -45,17 +45,33 @@ import os from commoncode import command -from plugincode.location_provider import get_location - -from os import fsencode - -TRACE = False +from commoncode.system import on_windows """ magic2 is minimal and specialized wrapper around a vendored libmagic file identification library. This is NOT thread-safe. It is based on python-magic by Adam Hup and adapted to the specific needs of ScanCode. """ + +# Tracing flag +TRACE = True + + +def logger_debug(*args): + pass + + +if TRACE: + import logging + import sys + + logger = logging.getLogger(__name__) + logging.basicConfig(stream=sys.stdout) + logger.setLevel(logging.DEBUG) + + def logger_debug(*args): + return logger.debug(' '.join(isinstance(a, str) and a or repr(a) for a in args)) + # # Cached detectors # @@ -74,24 +90,97 @@ DETECT_ENC = MAGIC_NONE | MAGIC_MIME | MAGIC_MIME_ENCODING # keys for plugin-provided locations -TYPECODE_LIBMAGIC_LIBDIR = 'typecode.libmagic.libdir' TYPECODE_LIBMAGIC_DLL = 'typecode.libmagic.dll' -TYPECODE_LIBMAGIC_DATABASE = 'typecode.libmagic.db' +TYPECODE_LIBMAGIC_DB = 'typecode.libmagic.db' + +TYPECODE_LIBMAGIC_PATH_ENVVAR = 'TYPECODE_LIBMAGIC_PATH' +TYPECODE_LIBMAGIC_DB_PATH_ENVVAR = 'TYPECODE_LIBMAGIC_DB_PATH' def load_lib(): """ - Return the loaded libmagic shared library object from plugin-provided path. + Return the libmagic shared library object loaded from either: + - an environment variable ``TYPECODE_LIBMAGIC_PATH`` + - a plugin-provided path, + - the system PATH. + Raise an Exception if no libmagic can be found. """ - dll = get_location(TYPECODE_LIBMAGIC_DLL) - libdir = get_location(TYPECODE_LIBMAGIC_LIBDIR) - if not (dll and libdir) or not os.path.isfile(dll) or not os.path.isdir(libdir): + from plugincode.location_provider import get_location + + # try the environment first + dll_loc = os.environ.get(TYPECODE_LIBMAGIC_PATH_ENVVAR) + + if TRACE and dll_loc: + logger_debug('load_lib:', 'got environ magic location:', dll_loc) + + # try a plugin-provided path second + if not dll_loc: + dll_loc = get_location(TYPECODE_LIBMAGIC_DLL) + + if TRACE and dll_loc: + logger_debug('load_lib:', 'got plugin magic location:', dll_loc) + + # try the PATH + if not dll_loc: + dll = 'libmagic.dll' if on_windows else 'libmagic.so' + dll_loc = command.find_in_path(dll) + + if TRACE and dll_loc: + logger_debug('load_lib:', 'got path magic location:', dll_loc) + + if not dll_loc or not os.path.isfile(dll_loc): + raise Exception( + 'CRITICAL: libmagic DLL and its magic database are not installed. ' + 'Unable to continue: you need to install a valid typecode-libmagic ' + 'plugin with a valid and proper libmagic and magic DB available. ' + f'OR set the {TYPECODE_LIBMAGIC_PATH_ENVVAR} environment variable.' + ) + return command.load_shared_library(dll_loc) + + +def get_magicdb_location(_cache=[]): + """ + Return the location of the magicdb loaded from either: + - an environment variable ``TYPECODE_LIBMAGIC_DB_PATH``, + - a plugin-provided path, + - the system PATH. + Raise an Exception if no magicdb command can be found. + """ + if _cache: + return _cache[0] + + from plugincode.location_provider import get_location + + # try the environment first + magicdb_loc = os.environ.get(TYPECODE_LIBMAGIC_DB_PATH_ENVVAR) + + if TRACE and magicdb_loc: + logger_debug('get_magicdb_location:', 'got environ magicdb location:', magicdb_loc) + + # try a plugin-provided path second + if not magicdb_loc: + magicdb_loc = get_location(TYPECODE_LIBMAGIC_DB) + + if TRACE and magicdb_loc: + logger_debug('get_magicdb_location:', 'got plugin magicdb location:', magicdb_loc) + + # try the PATH + if not magicdb_loc: + db = 'magic.mgc' + magicdb_loc = command.find_in_path(db) + + if TRACE and magicdb_loc: + logger_debug('get_magicdb_location:', 'got path magicdb location:', magicdb_loc) + + if not magicdb_loc or not os.path.isfile(magicdb_loc): raise Exception( - 'CRITICAL: libmagic DLL and is magic database are not installed. ' + 'CRITICAL: Libmagic magic database is not installed. ' 'Unable to continue: you need to install a valid typecode-libmagic ' - 'plugin with a valid and proper libmagic and magic DB available.' + 'plugin with a valid magic database available. ' + 'OR set the TYPECODE_LIBMAGIC_DB_PATH environment variable.' ) - return command.load_shared_library(dll, libdir) + _cache.append(magicdb_loc) + return magicdb_loc if TRACE: @@ -164,11 +253,11 @@ def __init__(self, flags, magic_db_location=None): self.flags = flags self.cookie = _magic_open(self.flags) if not magic_db_location: - magic_db_location = get_location(TYPECODE_LIBMAGIC_DATABASE) + magic_db_location = get_magicdb_location() - # Note: this location must always be bytes on Python2 and 3, all OSes + # Note: this location must always be FS-encoded bytes on all OSes if isinstance(magic_db_location, str): - magic_db_location = fsencode(magic_db_location) + magic_db_location = os.fsencode(magic_db_location) _magic_load(self.cookie, magic_db_location) @@ -190,7 +279,7 @@ def get(self, location): # location string may therefore be mangled and the file not accessible # anymore by libmagic in some cases. try: - uloc = fsencode(location) + uloc = os.fsencode(location) return _magic_file(self.cookie, uloc) except: # if all fails, read the start of the file instead From 9963b4401acdd0fe7ccffc9098856087d17947d3 Mon Sep 17 00:00:00 2001 From: Philippe Ombredanne Date: Sun, 30 May 2021 19:41:10 +0200 Subject: [PATCH 27/35] Rename test file Signed-off-by: Philippe Ombredanne --- tests/{test_libmagic_load.py => test_magic2.py} | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename tests/{test_libmagic_load.py => test_magic2.py} (100%) diff --git a/tests/test_libmagic_load.py b/tests/test_magic2.py similarity index 100% rename from tests/test_libmagic_load.py rename to tests/test_magic2.py From b7f7337eca18375fe99b1b868fa1cb7538e0dd3e Mon Sep 17 00:00:00 2001 From: Philippe Ombredanne Date: Sun, 30 May 2021 19:56:05 +0200 Subject: [PATCH 28/35] Remove unused import Signed-off-by: Philippe Ombredanne --- tests/test_magic2.py | 3 --- 1 file changed, 3 deletions(-) diff --git a/tests/test_magic2.py b/tests/test_magic2.py index 84b6995..5509d08 100644 --- a/tests/test_magic2.py +++ b/tests/test_magic2.py @@ -17,9 +17,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # - -import os - from typecode.magic2 import libmagic_version From 68a2b7d1e19045323646cddd81bfd5081c72de04 Mon Sep 17 00:00:00 2001 From: Philippe Ombredanne Date: Sun, 30 May 2021 19:57:30 +0200 Subject: [PATCH 29/35] Format code and streamline failover support #17 Signed-off-by: Philippe Ombredanne --- src/typecode/magic2.py | 139 +++++++++++++++++++++++++++-------------- 1 file changed, 92 insertions(+), 47 deletions(-) diff --git a/src/typecode/magic2.py b/src/typecode/magic2.py index 2c65073..e9ae4a1 100644 --- a/src/typecode/magic2.py +++ b/src/typecode/magic2.py @@ -42,10 +42,12 @@ # SOFTWARE. import ctypes +import glob import os from commoncode import command from commoncode.system import on_windows +import warnings """ magic2 is minimal and specialized wrapper around a vendored libmagic file @@ -97,42 +99,64 @@ def logger_debug(*args): TYPECODE_LIBMAGIC_DB_PATH_ENVVAR = 'TYPECODE_LIBMAGIC_DB_PATH' +class NoMagicLibError(Exception): + """ + Raised when no libmagic library is found. + """ + + def load_lib_failover(): - # loader from python-magic - libmagic = None - # Let's try to find magic or magic1 - dll = (ctypes.util.find_library('magic') + """ + Return a loaded libmagic from well known system installation locations. + This is a function originally from python-magic. + """ + libmagic = None + # Let's try to find magic or magic1 + dll = ( + ctypes.util.find_library('magic') or ctypes.util.find_library('magic1') or ctypes.util.find_library('cygmagic-1') or ctypes.util.find_library('libmagic-1') - or ctypes.util.find_library('msys-magic-1') # for MSYS2 - ) - # necessary because find_library returns None if it doesn't find the library - if dll: - libmagic = ctypes.CDLL(dll) - - if not libmagic or not libmagic._name: - windows_dlls = ['magic1.dll', 'cygmagic-1.dll', 'libmagic-1.dll', 'msys-magic-1.dll'] - platform_to_lib = {'darwin': ['/opt/local/lib/libmagic.dylib', - '/usr/local/lib/libmagic.dylib'] + - # Assumes there will only be one version installed - glob.glob('/usr/local/Cellar/libmagic/*/lib/libmagic.dylib'), # flake8:noqa - 'win32': windows_dlls, - 'cygwin': windows_dlls, - 'linux': ['libmagic.so.1'], - # fallback for some Linuxes (e.g. Alpine) where library search does not work # flake8:noqa - } - platform = 'linux' if sys.platform.startswith('linux') else sys.platform - for dll in platform_to_lib.get(platform, []): - try: - libmagic = ctypes.CDLL(dll) - break - except OSError: - pass - - if not libmagic or not libmagic._name: - return None - return libmagic + # for MSYS2 + or ctypes.util.find_library('msys-magic-1') + ) + # necessary because find_library returns None if it doesn't find the library + if dll: + libmagic = ctypes.CDLL(dll) + + if not (libmagic and libmagic._name): + windows_dlls = [ + 'magic1.dll', + 'cygmagic-1.dll', + 'libmagic-1.dll', + 'msys-magic-1.dll', + ] + platform_to_lib = { + 'darwin': ( + [ + '/opt/local/lib/libmagic.dylib', + '/usr/local/lib/libmagic.dylib', + ] + + # Assumes there will only be one version installed when using brew + glob.glob('/usr/local/Cellar/libmagic/*/lib/libmagic.dylib') + ), + 'win32': windows_dlls, + 'cygwin': windows_dlls, + 'linux': ['libmagic.so.1'], + } + # fallback for some Linuxes (e.g. Alpine) where library search does not + # work # flake8:noqa + platform = 'linux' if sys.platform.startswith('linux') else sys.platform + for dll in platform_to_lib.get(platform, []): + try: + libmagic = ctypes.CDLL(dll) + break + except OSError: + pass + + if libmagic and libmagic._name: + return libmagic + def load_lib(): """ @@ -140,7 +164,7 @@ def load_lib(): - an environment variable ``TYPECODE_LIBMAGIC_PATH`` - a plugin-provided path, - the system PATH. - Raise an Exception if no libmagic can be found. + Raise an NoMagicLibError if no libmagic can be found. """ from plugincode.location_provider import get_location @@ -161,23 +185,35 @@ def load_lib(): if not dll_loc: failover_lib = load_lib_failover() if failover_lib: + warnings.warn( + 'System libmagic found in typical location is used. ' + 'Install instead a typecode-libmagic plugin for best support.' + ) return failover_lib - # try the PATH if not dll_loc: dll = 'libmagic.dll' if on_windows else 'libmagic.so' dll_loc = command.find_in_path(dll) + if dll_loc: + warnings.warn( + 'libmagic found in the PATH. ' + 'Install instead a typecode-libmagic plugin for best support.' + ) + if TRACE and dll_loc: logger_debug('load_lib:', 'got path magic location:', dll_loc) if not dll_loc or not os.path.isfile(dll_loc): - raise Exception( + raise NoMagicLibError( 'CRITICAL: libmagic DLL and its magic database are not installed. ' 'Unable to continue: you need to install a valid typecode-libmagic ' - 'plugin with a valid and proper libmagic and magic DB available. ' - f'OR set the {TYPECODE_LIBMAGIC_PATH_ENVVAR} environment variable.' + 'plugin with a valid and proper libmagic and magic DB available.\n' + f'OR set the {TYPECODE_LIBMAGIC_PATH_ENVVAR} and ' + f'{TYPECODE_LIBMAGIC_DB_PATH_ENVVAR} environment variables.\n' + f'OR install libmagic in typical common locations.\n' + f'OR have a libmagic in the system PATH.\n' ) return command.load_shared_library(dll_loc) @@ -188,7 +224,7 @@ def get_magicdb_location(_cache=[]): - an environment variable ``TYPECODE_LIBMAGIC_DB_PATH``, - a plugin-provided path, - the system PATH. - Raise an Exception if no magicdb command can be found. + Trigger a warning if no magicdb file is found. """ if _cache: return _cache[0] @@ -213,16 +249,25 @@ def get_magicdb_location(_cache=[]): db = 'magic.mgc' magicdb_loc = command.find_in_path(db) + if magicdb_loc: + warnings.warn( + 'magicdb found in the PATH. ' + 'Install instead a typecode-libmagic plugin for best support.\n' + f'OR set the {TYPECODE_LIBMAGIC_DB_PATH_ENVVAR} environment variable.' + ) + if TRACE and magicdb_loc: logger_debug('get_magicdb_location:', 'got path magicdb location:', magicdb_loc) - if not magicdb_loc or not os.path.isfile(magicdb_loc): - raise Exception( - 'CRITICAL: Libmagic magic database is not installed. ' - 'Unable to continue: you need to install a valid typecode-libmagic ' - 'plugin with a valid magic database available. ' - 'OR set the TYPECODE_LIBMAGIC_DB_PATH environment variable.' - ) + if not magicdb_loc: + warnings.warn( + 'Libmagic magic database not found. ' + 'A default will be used if possible. ' + 'Install instead a typecode-libmagic plugin for best support.\n' + f'OR set the {TYPECODE_LIBMAGIC_DB_PATH_ENVVAR} environment variable.' + ) + return + _cache.append(magicdb_loc) return magicdb_loc @@ -297,11 +342,11 @@ def __init__(self, flags, magic_db_location=None): self.flags = flags self.cookie = _magic_open(self.flags) if not magic_db_location: - # if no plugin, None is returned, and libmagic will load the default db + # Caveat emptor: this may be empty in which case a default will be tried magic_db_location = get_magicdb_location() # Note: this location must always be FS-encoded bytes on all OSes - if isinstance(magic_db_location, str): + if magic_db_location and not isinstance(magic_db_location, bytes): magic_db_location = os.fsencode(magic_db_location) _magic_load(self.cookie, magic_db_location) From 237569df289fb4a9b1085b4ab422b9f89b563cb1 Mon Sep 17 00:00:00 2001 From: Philippe Ombredanne Date: Sun, 30 May 2021 21:56:32 +0200 Subject: [PATCH 30/35] Format code and streamline license headers Signed-off-by: Philippe Ombredanne --- NOTICE | 21 +- src/typecode/contenttype.py | 21 +- src/typecode/entropy.py | 21 +- src/typecode/extractible.py | 22 +- src/typecode/magic2.py | 25 +- src/typecode/mimetypes.py | 7 + src/typecode/pygments_lexers.py | 5 + src/typecode/pygments_lexers_mapping.py | 854 ++++++++++++------------ tests/filetype_test_utils.py | 21 +- tests/test_contenttype.py | 21 +- tests/test_entropy.py | 21 +- tests/test_extractible.py | 21 +- tests/test_magic2.py | 24 +- tests/test_types.py | 22 +- 14 files changed, 499 insertions(+), 607 deletions(-) diff --git a/NOTICE b/NOTICE index 65936b2..fc8995f 100644 --- a/NOTICE +++ b/NOTICE @@ -1,19 +1,8 @@ # -# Copyright (c) nexB Inc. and others. -# SPDX-License-Identifier: Apache-2.0 -# -# Visit https://aboutcode.org and https://github.com/nexB/ for support and download. +# Copyright (c) nexB Inc. and others. All rights reserved. # ScanCode is a trademark of nexB Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. +# SPDX-License-Identifier: Apache-2.0 +# See http://www.apache.org/licenses/LICENSE-2.0 for the license text. +# See https://github.com/nexB/typecode for support or download. +# See https://aboutcode.org for more information about nexB OSS projects. # diff --git a/src/typecode/contenttype.py b/src/typecode/contenttype.py index 7737636..414df3b 100644 --- a/src/typecode/contenttype.py +++ b/src/typecode/contenttype.py @@ -1,21 +1,10 @@ # -# Copyright (c) nexB Inc. and others. -# SPDX-License-Identifier: Apache-2.0 -# -# Visit https://aboutcode.org and https://github.com/nexB/ for support and download. +# Copyright (c) nexB Inc. and others. All rights reserved. # ScanCode is a trademark of nexB Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. +# SPDX-License-Identifier: Apache-2.0 +# See http://www.apache.org/licenses/LICENSE-2.0 for the license text. +# See https://github.com/nexB/typecode for support or download. +# See https://aboutcode.org for more information about nexB OSS projects. # import contextlib diff --git a/src/typecode/entropy.py b/src/typecode/entropy.py index 3836779..ba51a3e 100644 --- a/src/typecode/entropy.py +++ b/src/typecode/entropy.py @@ -1,21 +1,10 @@ # -# Copyright (c) nexB Inc. and others. -# SPDX-License-Identifier: Apache-2.0 -# -# Visit https://aboutcode.org and https://github.com/nexB/ for support and download. +# Copyright (c) nexB Inc. and others. All rights reserved. # ScanCode is a trademark of nexB Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. +# SPDX-License-Identifier: Apache-2.0 +# See http://www.apache.org/licenses/LICENSE-2.0 for the license text. +# See https://github.com/nexB/typecode for support or download. +# See https://aboutcode.org for more information about nexB OSS projects. # from collections import Counter diff --git a/src/typecode/extractible.py b/src/typecode/extractible.py index 096ff52..5a87d88 100644 --- a/src/typecode/extractible.py +++ b/src/typecode/extractible.py @@ -1,21 +1,10 @@ # -# Copyright (c) nexB Inc. and others. -# SPDX-License-Identifier: Apache-2.0 -# -# Visit https://aboutcode.org and https://github.com/nexB/ for support and download. +# Copyright (c) nexB Inc. and others. All rights reserved. # ScanCode is a trademark of nexB Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. +# SPDX-License-Identifier: Apache-2.0 +# See http://www.apache.org/licenses/LICENSE-2.0 for the license text. +# See https://github.com/nexB/typecode for support or download. +# See https://aboutcode.org for more information about nexB OSS projects. # from functools import partial @@ -25,7 +14,6 @@ import tarfile import zipfile - """ Utilities to detect is a file is extractible. We prefer using extractcode support if available, otherwise we use the standard library archive diff --git a/src/typecode/magic2.py b/src/typecode/magic2.py index e9ae4a1..40f6677 100644 --- a/src/typecode/magic2.py +++ b/src/typecode/magic2.py @@ -1,26 +1,13 @@ # -# Copyright (c) nexB Inc. and others. -# SPDX-License-Identifier: Apache-2.0 AND MIT -# -# Visit https://aboutcode.org and https://github.com/nexB/ for support and download. +# Copyright (c) nexB Inc. and others. All rights reserved. # ScanCode is a trademark of nexB Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# +# SPDX-License-Identifier: Apache-2.0 AND MIT +# See http://www.apache.org/licenses/LICENSE-2.0 for the license text. +# See https://github.com/nexB/typecode for support or download. +# See https://aboutcode.org for more information about nexB OSS projects. # # This code was in part derived from the python-magic library: -# The MIT License (MIT) -# + # Copyright (c) 2001-2014 Adam Hupp # # Permission is hereby granted, free of charge, to any person obtaining a copy diff --git a/src/typecode/mimetypes.py b/src/typecode/mimetypes.py index eaad6e8..5f4853f 100644 --- a/src/typecode/mimetypes.py +++ b/src/typecode/mimetypes.py @@ -1,3 +1,10 @@ +# +# Copyright (c) Copyright (c) Python Software Foundation. All rights reserved. +# SPDX-License-Identifier: Python-2.0 +# See https://github.com/nexB/typecode for support or download. +# See https://aboutcode.org for more information about nexB OSS projects. +# + """ Guess the MIME type of a file extension. download_url: https://github.com/python/cpython/blob/v3.9.1/Lib/mimetypes.py diff --git a/src/typecode/pygments_lexers.py b/src/typecode/pygments_lexers.py index 28fe86b..d225d62 100644 --- a/src/typecode/pygments_lexers.py +++ b/src/typecode/pygments_lexers.py @@ -1,4 +1,8 @@ # -*- coding: utf-8 -*- + +# SPDX-License-Identifier: BSD-2-Clause +# Copyright (c) Pygments authors + """ pygments.lexers ~~~~~~~~~~~~~~~ @@ -283,6 +287,7 @@ def type_sort(t): # - priority # - last resort: class name return (t[0], primary[t[1]], t[1].priority, t[1].__name__) + result.sort(key=type_sort) return result[-1][1](**options) diff --git a/src/typecode/pygments_lexers_mapping.py b/src/typecode/pygments_lexers_mapping.py index 97d3cd8..1875ccd 100644 --- a/src/typecode/pygments_lexers_mapping.py +++ b/src/typecode/pygments_lexers_mapping.py @@ -1,4 +1,8 @@ # -*- coding: utf-8 -*- + +# SPDX-License-Identifier: BSD-2-Clause +# Copyright (c) Pygments authors + """ This file is a list of the subset of lexers we care for as actual programming languages from the larger whole generated list at pygments.lexers._mapping @@ -21,493 +25,493 @@ LEXERS = { 'ABAPLexer': ('typecode._vendor.pygments.lexers.business', 'ABAP', ('abap',), ('*.abap', '*.ABAP'), ('text/x-abap',)), - #'APLLexer': ('typecode._vendor.pygments.lexers.apl', 'APL', ('apl',), ('*.apl',), ()), - #'AbnfLexer': ('typecode._vendor.pygments.lexers.grammar_notation', 'ABNF', ('abnf',), ('*.abnf',), ('text/x-abnf',)), - #'ActionScript3Lexer': ('typecode._vendor.pygments.lexers.actionscript', 'ActionScript 3', ('as3', 'actionscript3'), ('*.as',), ('application/x-actionscript3', 'text/x-actionscript3', 'text/actionscript3')), - #'ActionScriptLexer': ('typecode._vendor.pygments.lexers.actionscript', 'ActionScript', ('as', 'actionscript'), ('*.as',), ('application/x-actionscript', 'text/x-actionscript', 'text/actionscript')), - #'AdaLexer': ('typecode._vendor.pygments.lexers.pascal', 'Ada', ('ada', 'ada95', 'ada2005'), ('*.adb', '*.ads', '*.ada'), ('text/x-ada',)), - #'AdlLexer': ('typecode._vendor.pygments.lexers.archetype', 'ADL', ('adl',), ('*.adl', '*.adls', '*.adlf', '*.adlx'), ()), - #'AgdaLexer': ('typecode._vendor.pygments.lexers.haskell', 'Agda', ('agda',), ('*.agda',), ('text/x-agda',)), - #'AheuiLexer': ('typecode._vendor.pygments.lexers.esoteric', 'Aheui', ('aheui',), ('*.aheui',), ()), - #'AlloyLexer': ('typecode._vendor.pygments.lexers.dsls', 'Alloy', ('alloy',), ('*.als',), ('text/x-alloy',)), - #'AmbientTalkLexer': ('typecode._vendor.pygments.lexers.ambient', 'AmbientTalk', ('at', 'ambienttalk', 'ambienttalk/2'), ('*.at',), ('text/x-ambienttalk',)), - #'AmplLexer': ('typecode._vendor.pygments.lexers.ampl', 'Ampl', ('ampl',), ('*.run',), ()), - #'Angular2HtmlLexer': ('typecode._vendor.pygments.lexers.templates', 'HTML + Angular2', ('html+ng2',), ('*.ng2',), ()), - #'Angular2Lexer': ('typecode._vendor.pygments.lexers.templates', 'Angular2', ('ng2',), (), ()), - #'AntlrActionScriptLexer': ('typecode._vendor.pygments.lexers.parsers', 'ANTLR With ActionScript Target', ('antlr-as', 'antlr-actionscript'), ('*.G', '*.g'), ()), - #'AntlrCSharpLexer': ('typecode._vendor.pygments.lexers.parsers', 'ANTLR With C# Target', ('antlr-csharp', 'antlr-c#'), ('*.G', '*.g'), ()), - #'AntlrCppLexer': ('typecode._vendor.pygments.lexers.parsers', 'ANTLR With CPP Target', ('antlr-cpp',), ('*.G', '*.g'), ()), - #'AntlrJavaLexer': ('typecode._vendor.pygments.lexers.parsers', 'ANTLR With Java Target', ('antlr-java',), ('*.G', '*.g'), ()), + # 'APLLexer': ('typecode._vendor.pygments.lexers.apl', 'APL', ('apl',), ('*.apl',), ()), + # 'AbnfLexer': ('typecode._vendor.pygments.lexers.grammar_notation', 'ABNF', ('abnf',), ('*.abnf',), ('text/x-abnf',)), + # 'ActionScript3Lexer': ('typecode._vendor.pygments.lexers.actionscript', 'ActionScript 3', ('as3', 'actionscript3'), ('*.as',), ('application/x-actionscript3', 'text/x-actionscript3', 'text/actionscript3')), + # 'ActionScriptLexer': ('typecode._vendor.pygments.lexers.actionscript', 'ActionScript', ('as', 'actionscript'), ('*.as',), ('application/x-actionscript', 'text/x-actionscript', 'text/actionscript')), + # 'AdaLexer': ('typecode._vendor.pygments.lexers.pascal', 'Ada', ('ada', 'ada95', 'ada2005'), ('*.adb', '*.ads', '*.ada'), ('text/x-ada',)), + # 'AdlLexer': ('typecode._vendor.pygments.lexers.archetype', 'ADL', ('adl',), ('*.adl', '*.adls', '*.adlf', '*.adlx'), ()), + # 'AgdaLexer': ('typecode._vendor.pygments.lexers.haskell', 'Agda', ('agda',), ('*.agda',), ('text/x-agda',)), + # 'AheuiLexer': ('typecode._vendor.pygments.lexers.esoteric', 'Aheui', ('aheui',), ('*.aheui',), ()), + # 'AlloyLexer': ('typecode._vendor.pygments.lexers.dsls', 'Alloy', ('alloy',), ('*.als',), ('text/x-alloy',)), + # 'AmbientTalkLexer': ('typecode._vendor.pygments.lexers.ambient', 'AmbientTalk', ('at', 'ambienttalk', 'ambienttalk/2'), ('*.at',), ('text/x-ambienttalk',)), + # 'AmplLexer': ('typecode._vendor.pygments.lexers.ampl', 'Ampl', ('ampl',), ('*.run',), ()), + # 'Angular2HtmlLexer': ('typecode._vendor.pygments.lexers.templates', 'HTML + Angular2', ('html+ng2',), ('*.ng2',), ()), + # 'Angular2Lexer': ('typecode._vendor.pygments.lexers.templates', 'Angular2', ('ng2',), (), ()), + # 'AntlrActionScriptLexer': ('typecode._vendor.pygments.lexers.parsers', 'ANTLR With ActionScript Target', ('antlr-as', 'antlr-actionscript'), ('*.G', '*.g'), ()), + # 'AntlrCSharpLexer': ('typecode._vendor.pygments.lexers.parsers', 'ANTLR With C# Target', ('antlr-csharp', 'antlr-c#'), ('*.G', '*.g'), ()), + # 'AntlrCppLexer': ('typecode._vendor.pygments.lexers.parsers', 'ANTLR With CPP Target', ('antlr-cpp',), ('*.G', '*.g'), ()), + # 'AntlrJavaLexer': ('typecode._vendor.pygments.lexers.parsers', 'ANTLR With Java Target', ('antlr-java',), ('*.G', '*.g'), ()), 'AntlrLexer': ('typecode._vendor.pygments.lexers.parsers', 'ANTLR', ('antlr',), (), ()), - #'AntlrObjectiveCLexer': ('typecode._vendor.pygments.lexers.parsers', 'ANTLR With ObjectiveC Target', ('antlr-objc',), ('*.G', '*.g'), ()), - #'AntlrPerlLexer': ('typecode._vendor.pygments.lexers.parsers', 'ANTLR With Perl Target', ('antlr-perl',), ('*.G', '*.g'), ()), - #'AntlrPythonLexer': ('typecode._vendor.pygments.lexers.parsers', 'ANTLR With Python Target', ('antlr-python',), ('*.G', '*.g'), ()), - #'AntlrRubyLexer': ('typecode._vendor.pygments.lexers.parsers', 'ANTLR With Ruby Target', ('antlr-ruby', 'antlr-rb'), ('*.G', '*.g'), ()), - #'ApacheConfLexer': ('typecode._vendor.pygments.lexers.configs', 'ApacheConf', ('apacheconf', 'aconf', 'apache'), ('.htaccess', 'apache.conf', 'apache2.conf'), ('text/x-apacheconf',)), - #'AppleScriptLexer': ('typecode._vendor.pygments.lexers.scripting', 'AppleScript', ('applescript',), ('*.applescript',), ()), - #'ArduinoLexer': ('typecode._vendor.pygments.lexers.c_like', 'Arduino', ('arduino',), ('*.ino',), ('text/x-arduino',)), - #'ArrowLexer': ('typecode._vendor.pygments.arrow', 'Arrow', ('arrow',), ('*.arw',), ()), + # 'AntlrObjectiveCLexer': ('typecode._vendor.pygments.lexers.parsers', 'ANTLR With ObjectiveC Target', ('antlr-objc',), ('*.G', '*.g'), ()), + # 'AntlrPerlLexer': ('typecode._vendor.pygments.lexers.parsers', 'ANTLR With Perl Target', ('antlr-perl',), ('*.G', '*.g'), ()), + # 'AntlrPythonLexer': ('typecode._vendor.pygments.lexers.parsers', 'ANTLR With Python Target', ('antlr-python',), ('*.G', '*.g'), ()), + # 'AntlrRubyLexer': ('typecode._vendor.pygments.lexers.parsers', 'ANTLR With Ruby Target', ('antlr-ruby', 'antlr-rb'), ('*.G', '*.g'), ()), + # 'ApacheConfLexer': ('typecode._vendor.pygments.lexers.configs', 'ApacheConf', ('apacheconf', 'aconf', 'apache'), ('.htaccess', 'apache.conf', 'apache2.conf'), ('text/x-apacheconf',)), + # 'AppleScriptLexer': ('typecode._vendor.pygments.lexers.scripting', 'AppleScript', ('applescript',), ('*.applescript',), ()), + # 'ArduinoLexer': ('typecode._vendor.pygments.lexers.c_like', 'Arduino', ('arduino',), ('*.ino',), ('text/x-arduino',)), + # 'ArrowLexer': ('typecode._vendor.pygments.arrow', 'Arrow', ('arrow',), ('*.arw',), ()), 'AspectJLexer': ('typecode._vendor.pygments.lexers.jvm', 'AspectJ', ('aspectj',), ('*.aj',), ('text/x-aspectj',)), - #'AsymptoteLexer': ('typecode._vendor.pygments.lexers.graphics', 'Asymptote', ('asy', 'asymptote'), ('*.asy',), ('text/x-asymptote',)), - #'AugeasLexer': ('typecode._vendor.pygments.lexers.configs', 'Augeas', ('augeas',), ('*.aug',), ()), - #'AutoItLexer': ('typecode._vendor.pygments.lexers.automation', 'AutoIt', ('autoit',), ('*.au3',), ('text/x-autoit',)), - #'AutohotkeyLexer': ('typecode._vendor.pygments.lexers.automation', 'autohotkey', ('ahk', 'autohotkey'), ('*.ahk', '*.ahkl'), ('text/x-autohotkey',)), - #'AwkLexer': ('typecode._vendor.pygments.lexers.textedit', 'Awk', ('awk', 'gawk', 'mawk', 'nawk'), ('*.awk',), ('application/x-awk',)), - #'BBCBasicLexer': ('typecode._vendor.pygments.lexers.basic', 'BBC Basic', ('bbcbasic',), ('*.bbc',), ()), - #'BBCodeLexer': ('typecode._vendor.pygments.lexers.markup', 'BBCode', ('bbcode',), (), ('text/x-bbcode',)), - #'BCLexer': ('typecode._vendor.pygments.lexers.algebra', 'BC', ('bc',), ('*.bc',), ()), - #'BSTLexer': ('typecode._vendor.pygments.lexers.bibtex', 'BST', ('bst', 'bst-pybtex'), ('*.bst',), ()), - #'BareLexer': ('typecode._vendor.pygments.lexers.bare', 'BARE', ('bare',), ('*.bare',), ()), - #'BaseMakefileLexer': ('typecode._vendor.pygments.lexers.make', 'Base Makefile', ('basemake',), (), ()), + # 'AsymptoteLexer': ('typecode._vendor.pygments.lexers.graphics', 'Asymptote', ('asy', 'asymptote'), ('*.asy',), ('text/x-asymptote',)), + # 'AugeasLexer': ('typecode._vendor.pygments.lexers.configs', 'Augeas', ('augeas',), ('*.aug',), ()), + # 'AutoItLexer': ('typecode._vendor.pygments.lexers.automation', 'AutoIt', ('autoit',), ('*.au3',), ('text/x-autoit',)), + # 'AutohotkeyLexer': ('typecode._vendor.pygments.lexers.automation', 'autohotkey', ('ahk', 'autohotkey'), ('*.ahk', '*.ahkl'), ('text/x-autohotkey',)), + # 'AwkLexer': ('typecode._vendor.pygments.lexers.textedit', 'Awk', ('awk', 'gawk', 'mawk', 'nawk'), ('*.awk',), ('application/x-awk',)), + # 'BBCBasicLexer': ('typecode._vendor.pygments.lexers.basic', 'BBC Basic', ('bbcbasic',), ('*.bbc',), ()), + # 'BBCodeLexer': ('typecode._vendor.pygments.lexers.markup', 'BBCode', ('bbcode',), (), ('text/x-bbcode',)), + # 'BCLexer': ('typecode._vendor.pygments.lexers.algebra', 'BC', ('bc',), ('*.bc',), ()), + # 'BSTLexer': ('typecode._vendor.pygments.lexers.bibtex', 'BST', ('bst', 'bst-pybtex'), ('*.bst',), ()), + # 'BareLexer': ('typecode._vendor.pygments.lexers.bare', 'BARE', ('bare',), ('*.bare',), ()), + # 'BaseMakefileLexer': ('typecode._vendor.pygments.lexers.make', 'Base Makefile', ('basemake',), (), ()), 'BashLexer': ('typecode._vendor.pygments.lexers.shell', 'Bash', ('bash', 'sh', 'ksh', 'zsh', 'shell'), ('*.sh', '*.ksh', '*.bash', '*.ebuild', '*.eclass', '*.exheres-0', '*.exlib', '*.zsh', '.bashrc', 'bashrc', '.bash_*', 'bash_*', 'zshrc', '.zshrc', 'PKGBUILD'), ('application/x-sh', 'application/x-shellscript', 'text/x-shellscript')), - #'BashSessionLexer': ('typecode._vendor.pygments.lexers.shell', 'Bash Session', ('console', 'shell-session'), ('*.sh-session', '*.shell-session'), ('application/x-shell-session', 'application/x-sh-session')), + # 'BashSessionLexer': ('typecode._vendor.pygments.lexers.shell', 'Bash Session', ('console', 'shell-session'), ('*.sh-session', '*.shell-session'), ('application/x-shell-session', 'application/x-sh-session')), 'BatchLexer': ('typecode._vendor.pygments.lexers.shell', 'Batchfile', ('bat', 'batch', 'dosbatch', 'winbatch'), ('*.bat', '*.cmd'), ('application/x-dos-batch',)), - #'BefungeLexer': ('typecode._vendor.pygments.lexers.esoteric', 'Befunge', ('befunge',), ('*.befunge',), ('application/x-befunge',)), - #'BibTeXLexer': ('typecode._vendor.pygments.lexers.bibtex', 'BibTeX', ('bib', 'bibtex'), ('*.bib',), ('text/x-bibtex',)), - #'BlitzBasicLexer': ('typecode._vendor.pygments.lexers.basic', 'BlitzBasic', ('blitzbasic', 'b3d', 'bplus'), ('*.bb', '*.decls'), ('text/x-bb',)), - #'BlitzMaxLexer': ('typecode._vendor.pygments.lexers.basic', 'BlitzMax', ('blitzmax', 'bmax'), ('*.bmx',), ('text/x-bmx',)), - #'BnfLexer': ('typecode._vendor.pygments.lexers.grammar_notation', 'BNF', ('bnf',), ('*.bnf',), ('text/x-bnf',)), - #'BoaLexer': ('typecode._vendor.pygments.lexers.boa', 'Boa', ('boa',), ('*.boa',), ()), - #'BooLexer': ('typecode._vendor.pygments.lexers.dotnet', 'Boo', ('boo',), ('*.boo',), ('text/x-boo',)), - #'BoogieLexer': ('typecode._vendor.pygments.lexers.verification', 'Boogie', ('boogie',), ('*.bpl',), ()), - #'BrainfuckLexer': ('typecode._vendor.pygments.lexers.esoteric', 'Brainfuck', ('brainfuck', 'bf'), ('*.bf', '*.b'), ('application/x-brainfuck',)), - #'BugsLexer': ('typecode._vendor.pygments.lexers.modeling', 'BUGS', ('bugs', 'winbugs', 'openbugs'), ('*.bug',), ()), - #'CAmkESLexer': ('typecode._vendor.pygments.lexers.esoteric', 'CAmkES', ('camkes', 'idl4'), ('*.camkes', '*.idl4'), ()), + # 'BefungeLexer': ('typecode._vendor.pygments.lexers.esoteric', 'Befunge', ('befunge',), ('*.befunge',), ('application/x-befunge',)), + # 'BibTeXLexer': ('typecode._vendor.pygments.lexers.bibtex', 'BibTeX', ('bib', 'bibtex'), ('*.bib',), ('text/x-bibtex',)), + # 'BlitzBasicLexer': ('typecode._vendor.pygments.lexers.basic', 'BlitzBasic', ('blitzbasic', 'b3d', 'bplus'), ('*.bb', '*.decls'), ('text/x-bb',)), + # 'BlitzMaxLexer': ('typecode._vendor.pygments.lexers.basic', 'BlitzMax', ('blitzmax', 'bmax'), ('*.bmx',), ('text/x-bmx',)), + # 'BnfLexer': ('typecode._vendor.pygments.lexers.grammar_notation', 'BNF', ('bnf',), ('*.bnf',), ('text/x-bnf',)), + # 'BoaLexer': ('typecode._vendor.pygments.lexers.boa', 'Boa', ('boa',), ('*.boa',), ()), + # 'BooLexer': ('typecode._vendor.pygments.lexers.dotnet', 'Boo', ('boo',), ('*.boo',), ('text/x-boo',)), + # 'BoogieLexer': ('typecode._vendor.pygments.lexers.verification', 'Boogie', ('boogie',), ('*.bpl',), ()), + # 'BrainfuckLexer': ('typecode._vendor.pygments.lexers.esoteric', 'Brainfuck', ('brainfuck', 'bf'), ('*.bf', '*.b'), ('application/x-brainfuck',)), + # 'BugsLexer': ('typecode._vendor.pygments.lexers.modeling', 'BUGS', ('bugs', 'winbugs', 'openbugs'), ('*.bug',), ()), + # 'CAmkESLexer': ('typecode._vendor.pygments.lexers.esoteric', 'CAmkES', ('camkes', 'idl4'), ('*.camkes', '*.idl4'), ()), 'CLexer': ('typecode._vendor.pygments.lexers.c_cpp', 'C', ('c',), ('*.c', '*.h', '*.idc'), ('text/x-chdr', 'text/x-csrc')), 'CMakeLexer': ('typecode._vendor.pygments.lexers.make', 'CMake', ('cmake',), ('*.cmake', 'CMakeLists.txt'), ('text/x-cmake',)), - #'CObjdumpLexer': ('typecode._vendor.pygments.lexers.asm', 'c-objdump', ('c-objdump',), ('*.c-objdump',), ('text/x-c-objdump',)), - #'CPSALexer': ('typecode._vendor.pygments.lexers.lisp', 'CPSA', ('cpsa',), ('*.cpsa',), ()), + # 'CObjdumpLexer': ('typecode._vendor.pygments.lexers.asm', 'c-objdump', ('c-objdump',), ('*.c-objdump',), ('text/x-c-objdump',)), + # 'CPSALexer': ('typecode._vendor.pygments.lexers.lisp', 'CPSA', ('cpsa',), ('*.cpsa',), ()), 'CSharpAspxLexer': ('typecode._vendor.pygments.lexers.dotnet', 'aspx-cs', ('aspx-cs',), ('*.aspx', '*.asax', '*.ascx', '*.ashx', '*.asmx', '*.axd'), ()), 'CSharpLexer': ('typecode._vendor.pygments.lexers.dotnet', 'C#', ('csharp', 'c#'), ('*.cs',), ('text/x-csharp',)), - #'Ca65Lexer': ('typecode._vendor.pygments.lexers.asm', 'ca65 assembler', ('ca65',), ('*.s',), ()), - #'CadlLexer': ('typecode._vendor.pygments.lexers.archetype', 'cADL', ('cadl',), ('*.cadl',), ()), - #'CapDLLexer': ('typecode._vendor.pygments.lexers.esoteric', 'CapDL', ('capdl',), ('*.cdl',), ()), - #'CapnProtoLexer': ('typecode._vendor.pygments.lexers.capnproto', "Cap'n Proto", ('capnp',), ('*.capnp',), ()), - #'CbmBasicV2Lexer': ('typecode._vendor.pygments.lexers.basic', 'CBM BASIC V2', ('cbmbas',), ('*.bas',), ()), - #'CeylonLexer': ('typecode._vendor.pygments.lexers.jvm', 'Ceylon', ('ceylon',), ('*.ceylon',), ('text/x-ceylon',)), - #'Cfengine3Lexer': ('typecode._vendor.pygments.lexers.configs', 'CFEngine3', ('cfengine3', 'cf3'), ('*.cf',), ()), - #'ChaiscriptLexer': ('typecode._vendor.pygments.lexers.scripting', 'ChaiScript', ('chai', 'chaiscript'), ('*.chai',), ('text/x-chaiscript', 'application/x-chaiscript')), - #'ChapelLexer': ('typecode._vendor.pygments.lexers.chapel', 'Chapel', ('chapel', 'chpl'), ('*.chpl',), ()), - #'CharmciLexer': ('typecode._vendor.pygments.lexers.c_like', 'Charmci', ('charmci',), ('*.ci',), ()), - #'CheetahHtmlLexer': ('typecode._vendor.pygments.lexers.templates', 'HTML+Cheetah', ('html+cheetah', 'html+spitfire', 'htmlcheetah'), (), ('text/html+cheetah', 'text/html+spitfire')), - #'CheetahJavascriptLexer': ('typecode._vendor.pygments.lexers.templates', 'JavaScript+Cheetah', ('js+cheetah', 'javascript+cheetah', 'js+spitfire', 'javascript+spitfire'), (), ('application/x-javascript+cheetah', 'text/x-javascript+cheetah', 'text/javascript+cheetah', 'application/x-javascript+spitfire', 'text/x-javascript+spitfire', 'text/javascript+spitfire')), - #'CheetahLexer': ('typecode._vendor.pygments.lexers.templates', 'Cheetah', ('cheetah', 'spitfire'), ('*.tmpl', '*.spt'), ('application/x-cheetah', 'application/x-spitfire')), - #'CheetahXmlLexer': ('typecode._vendor.pygments.lexers.templates', 'XML+Cheetah', ('xml+cheetah', 'xml+spitfire'), (), ('application/xml+cheetah', 'application/xml+spitfire')), - #'CirruLexer': ('typecode._vendor.pygments.lexers.webmisc', 'Cirru', ('cirru',), ('*.cirru',), ('text/x-cirru',)), - #'ClayLexer': ('typecode._vendor.pygments.lexers.c_like', 'Clay', ('clay',), ('*.clay',), ('text/x-clay',)), - #'CleanLexer': ('typecode._vendor.pygments.lexers.clean', 'Clean', ('clean',), ('*.icl', '*.dcl'), ()), + # 'Ca65Lexer': ('typecode._vendor.pygments.lexers.asm', 'ca65 assembler', ('ca65',), ('*.s',), ()), + # 'CadlLexer': ('typecode._vendor.pygments.lexers.archetype', 'cADL', ('cadl',), ('*.cadl',), ()), + # 'CapDLLexer': ('typecode._vendor.pygments.lexers.esoteric', 'CapDL', ('capdl',), ('*.cdl',), ()), + # 'CapnProtoLexer': ('typecode._vendor.pygments.lexers.capnproto', "Cap'n Proto", ('capnp',), ('*.capnp',), ()), + # 'CbmBasicV2Lexer': ('typecode._vendor.pygments.lexers.basic', 'CBM BASIC V2', ('cbmbas',), ('*.bas',), ()), + # 'CeylonLexer': ('typecode._vendor.pygments.lexers.jvm', 'Ceylon', ('ceylon',), ('*.ceylon',), ('text/x-ceylon',)), + # 'Cfengine3Lexer': ('typecode._vendor.pygments.lexers.configs', 'CFEngine3', ('cfengine3', 'cf3'), ('*.cf',), ()), + # 'ChaiscriptLexer': ('typecode._vendor.pygments.lexers.scripting', 'ChaiScript', ('chai', 'chaiscript'), ('*.chai',), ('text/x-chaiscript', 'application/x-chaiscript')), + # 'ChapelLexer': ('typecode._vendor.pygments.lexers.chapel', 'Chapel', ('chapel', 'chpl'), ('*.chpl',), ()), + # 'CharmciLexer': ('typecode._vendor.pygments.lexers.c_like', 'Charmci', ('charmci',), ('*.ci',), ()), + # 'CheetahHtmlLexer': ('typecode._vendor.pygments.lexers.templates', 'HTML+Cheetah', ('html+cheetah', 'html+spitfire', 'htmlcheetah'), (), ('text/html+cheetah', 'text/html+spitfire')), + # 'CheetahJavascriptLexer': ('typecode._vendor.pygments.lexers.templates', 'JavaScript+Cheetah', ('js+cheetah', 'javascript+cheetah', 'js+spitfire', 'javascript+spitfire'), (), ('application/x-javascript+cheetah', 'text/x-javascript+cheetah', 'text/javascript+cheetah', 'application/x-javascript+spitfire', 'text/x-javascript+spitfire', 'text/javascript+spitfire')), + # 'CheetahLexer': ('typecode._vendor.pygments.lexers.templates', 'Cheetah', ('cheetah', 'spitfire'), ('*.tmpl', '*.spt'), ('application/x-cheetah', 'application/x-spitfire')), + # 'CheetahXmlLexer': ('typecode._vendor.pygments.lexers.templates', 'XML+Cheetah', ('xml+cheetah', 'xml+spitfire'), (), ('application/xml+cheetah', 'application/xml+spitfire')), + # 'CirruLexer': ('typecode._vendor.pygments.lexers.webmisc', 'Cirru', ('cirru',), ('*.cirru',), ('text/x-cirru',)), + # 'ClayLexer': ('typecode._vendor.pygments.lexers.c_like', 'Clay', ('clay',), ('*.clay',), ('text/x-clay',)), + # 'CleanLexer': ('typecode._vendor.pygments.lexers.clean', 'Clean', ('clean',), ('*.icl', '*.dcl'), ()), 'ClojureLexer': ('typecode._vendor.pygments.lexers.jvm', 'Clojure', ('clojure', 'clj'), ('*.clj',), ('text/x-clojure', 'application/x-clojure')), 'ClojureScriptLexer': ('typecode._vendor.pygments.lexers.jvm', 'ClojureScript', ('clojurescript', 'cljs'), ('*.cljs',), ('text/x-clojurescript', 'application/x-clojurescript')), - #'CobolFreeformatLexer': ('typecode._vendor.pygments.lexers.business', 'COBOLFree', ('cobolfree',), ('*.cbl', '*.CBL'), ()), + # 'CobolFreeformatLexer': ('typecode._vendor.pygments.lexers.business', 'COBOLFree', ('cobolfree',), ('*.cbl', '*.CBL'), ()), 'CobolLexer': ('typecode._vendor.pygments.lexers.business', 'COBOL', ('cobol',), ('*.cob', '*.COB', '*.cpy', '*.CPY'), ('text/x-cobol',)), 'CoffeeScriptLexer': ('typecode._vendor.pygments.lexers.javascript', 'CoffeeScript', ('coffee-script', 'coffeescript', 'coffee'), ('*.coffee',), ('text/coffeescript',)), - #'ColdfusionCFCLexer': ('typecode._vendor.pygments.lexers.templates', 'Coldfusion CFC', ('cfc',), ('*.cfc',), ()), - #'ColdfusionHtmlLexer': ('typecode._vendor.pygments.lexers.templates', 'Coldfusion HTML', ('cfm',), ('*.cfm', '*.cfml'), ('application/x-coldfusion',)), - #'ColdfusionLexer': ('typecode._vendor.pygments.lexers.templates', 'cfstatement', ('cfs',), (), ()), + # 'ColdfusionCFCLexer': ('typecode._vendor.pygments.lexers.templates', 'Coldfusion CFC', ('cfc',), ('*.cfc',), ()), + # 'ColdfusionHtmlLexer': ('typecode._vendor.pygments.lexers.templates', 'Coldfusion HTML', ('cfm',), ('*.cfm', '*.cfml'), ('application/x-coldfusion',)), + # 'ColdfusionLexer': ('typecode._vendor.pygments.lexers.templates', 'cfstatement', ('cfs',), (), ()), 'CommonLispLexer': ('typecode._vendor.pygments.lexers.lisp', 'Common Lisp', ('common-lisp', 'cl', 'lisp'), ('*.cl', '*.lisp'), ('text/x-common-lisp',)), - #'ComponentPascalLexer': ('typecode._vendor.pygments.lexers.oberon', 'Component Pascal', ('componentpascal', 'cp'), ('*.cp', '*.cps'), ('text/x-component-pascal',)), - #'CoqLexer': ('typecode._vendor.pygments.lexers.theorem', 'Coq', ('coq',), ('*.v',), ('text/x-coq',)), + # 'ComponentPascalLexer': ('typecode._vendor.pygments.lexers.oberon', 'Component Pascal', ('componentpascal', 'cp'), ('*.cp', '*.cps'), ('text/x-component-pascal',)), + # 'CoqLexer': ('typecode._vendor.pygments.lexers.theorem', 'Coq', ('coq',), ('*.v',), ('text/x-coq',)), 'CppLexer': ('typecode._vendor.pygments.lexers.c_cpp', 'C++', ('cpp', 'c++'), ('*.cpp', '*.hpp', '*.c++', '*.h++', '*.cc', '*.hh', '*.cxx', '*.hxx', '*.C', '*.H', '*.cp', '*.CPP'), ('text/x-c++hdr', 'text/x-c++src')), - #'CppObjdumpLexer': ('typecode._vendor.pygments.lexers.asm', 'cpp-objdump', ('cpp-objdump', 'c++-objdumb', 'cxx-objdump'), ('*.cpp-objdump', '*.c++-objdump', '*.cxx-objdump'), ('text/x-cpp-objdump',)), - #'CrmshLexer': ('typecode._vendor.pygments.lexers.dsls', 'Crmsh', ('crmsh', 'pcmk'), ('*.crmsh', '*.pcmk'), ()), - #'CrocLexer': ('typecode._vendor.pygments.lexers.d', 'Croc', ('croc',), ('*.croc',), ('text/x-crocsrc',)), - #'CryptolLexer': ('typecode._vendor.pygments.lexers.haskell', 'Cryptol', ('cryptol', 'cry'), ('*.cry',), ('text/x-cryptol',)), - #'CrystalLexer': ('typecode._vendor.pygments.lexers.crystal', 'Crystal', ('cr', 'crystal'), ('*.cr',), ('text/x-crystal',)), - #'CsoundDocumentLexer': ('typecode._vendor.pygments.lexers.csound', 'Csound Document', ('csound-document', 'csound-csd'), ('*.csd',), ()), - #'CsoundOrchestraLexer': ('typecode._vendor.pygments.lexers.csound', 'Csound Orchestra', ('csound', 'csound-orc'), ('*.orc', '*.udo'), ()), - #'CsoundScoreLexer': ('typecode._vendor.pygments.lexers.csound', 'Csound Score', ('csound-score', 'csound-sco'), ('*.sco',), ()), - #'CssDjangoLexer': ('typecode._vendor.pygments.lexers.templates', 'CSS+Django/Jinja', ('css+django', 'css+jinja'), (), ('text/css+django', 'text/css+jinja')), - #'CssErbLexer': ('typecode._vendor.pygments.lexers.templates', 'CSS+Ruby', ('css+erb', 'css+ruby'), (), ('text/css+ruby',)), - #'CssGenshiLexer': ('typecode._vendor.pygments.lexers.templates', 'CSS+Genshi Text', ('css+genshitext', 'css+genshi'), (), ('text/css+genshi',)), + # 'CppObjdumpLexer': ('typecode._vendor.pygments.lexers.asm', 'cpp-objdump', ('cpp-objdump', 'c++-objdumb', 'cxx-objdump'), ('*.cpp-objdump', '*.c++-objdump', '*.cxx-objdump'), ('text/x-cpp-objdump',)), + # 'CrmshLexer': ('typecode._vendor.pygments.lexers.dsls', 'Crmsh', ('crmsh', 'pcmk'), ('*.crmsh', '*.pcmk'), ()), + # 'CrocLexer': ('typecode._vendor.pygments.lexers.d', 'Croc', ('croc',), ('*.croc',), ('text/x-crocsrc',)), + # 'CryptolLexer': ('typecode._vendor.pygments.lexers.haskell', 'Cryptol', ('cryptol', 'cry'), ('*.cry',), ('text/x-cryptol',)), + # 'CrystalLexer': ('typecode._vendor.pygments.lexers.crystal', 'Crystal', ('cr', 'crystal'), ('*.cr',), ('text/x-crystal',)), + # 'CsoundDocumentLexer': ('typecode._vendor.pygments.lexers.csound', 'Csound Document', ('csound-document', 'csound-csd'), ('*.csd',), ()), + # 'CsoundOrchestraLexer': ('typecode._vendor.pygments.lexers.csound', 'Csound Orchestra', ('csound', 'csound-orc'), ('*.orc', '*.udo'), ()), + # 'CsoundScoreLexer': ('typecode._vendor.pygments.lexers.csound', 'Csound Score', ('csound-score', 'csound-sco'), ('*.sco',), ()), + # 'CssDjangoLexer': ('typecode._vendor.pygments.lexers.templates', 'CSS+Django/Jinja', ('css+django', 'css+jinja'), (), ('text/css+django', 'text/css+jinja')), + # 'CssErbLexer': ('typecode._vendor.pygments.lexers.templates', 'CSS+Ruby', ('css+erb', 'css+ruby'), (), ('text/css+ruby',)), + # 'CssGenshiLexer': ('typecode._vendor.pygments.lexers.templates', 'CSS+Genshi Text', ('css+genshitext', 'css+genshi'), (), ('text/css+genshi',)), 'CssLexer': ('typecode._vendor.pygments.lexers.css', 'CSS', ('css',), ('*.css',), ('text/css',)), - #'CssPhpLexer': ('typecode._vendor.pygments.lexers.templates', 'CSS+PHP', ('css+php',), (), ('text/css+php',)), - #'CssSmartyLexer': ('typecode._vendor.pygments.lexers.templates', 'CSS+Smarty', ('css+smarty',), (), ('text/css+smarty',)), - #'CudaLexer': ('typecode._vendor.pygments.lexers.c_like', 'CUDA', ('cuda', 'cu'), ('*.cu', '*.cuh'), ('text/x-cuda',)), - #'CypherLexer': ('typecode._vendor.pygments.lexers.graph', 'Cypher', ('cypher',), ('*.cyp', '*.cypher'), ()), + # 'CssPhpLexer': ('typecode._vendor.pygments.lexers.templates', 'CSS+PHP', ('css+php',), (), ('text/css+php',)), + # 'CssSmartyLexer': ('typecode._vendor.pygments.lexers.templates', 'CSS+Smarty', ('css+smarty',), (), ('text/css+smarty',)), + # 'CudaLexer': ('typecode._vendor.pygments.lexers.c_like', 'CUDA', ('cuda', 'cu'), ('*.cu', '*.cuh'), ('text/x-cuda',)), + # 'CypherLexer': ('typecode._vendor.pygments.lexers.graph', 'Cypher', ('cypher',), ('*.cyp', '*.cypher'), ()), 'CythonLexer': ('typecode._vendor.pygments.lexers.python', 'Cython', ('cython', 'pyx', 'pyrex'), ('*.pyx', '*.pxd', '*.pxi'), ('text/x-cython', 'application/x-cython')), - #'DLexer': ('typecode._vendor.pygments.lexers.d', 'D', ('d',), ('*.d', '*.di'), ('text/x-dsrc',)), - #'DObjdumpLexer': ('typecode._vendor.pygments.lexers.asm', 'd-objdump', ('d-objdump',), ('*.d-objdump',), ('text/x-d-objdump',)), - #'DarcsPatchLexer': ('typecode._vendor.pygments.lexers.diff', 'Darcs Patch', ('dpatch',), ('*.dpatch', '*.darcspatch'), ()), + # 'DLexer': ('typecode._vendor.pygments.lexers.d', 'D', ('d',), ('*.d', '*.di'), ('text/x-dsrc',)), + # 'DObjdumpLexer': ('typecode._vendor.pygments.lexers.asm', 'd-objdump', ('d-objdump',), ('*.d-objdump',), ('text/x-d-objdump',)), + # 'DarcsPatchLexer': ('typecode._vendor.pygments.lexers.diff', 'Darcs Patch', ('dpatch',), ('*.dpatch', '*.darcspatch'), ()), 'DartLexer': ('typecode._vendor.pygments.lexers.javascript', 'Dart', ('dart',), ('*.dart',), ('text/x-dart',)), 'Dasm16Lexer': ('typecode._vendor.pygments.lexers.asm', 'DASM16', ('dasm16',), ('*.dasm16', '*.dasm'), ('text/x-dasm16',)), - #'DebianControlLexer': ('typecode._vendor.pygments.lexers.installers', 'Debian Control file', ('control', 'debcontrol'), ('control',), ()), + # 'DebianControlLexer': ('typecode._vendor.pygments.lexers.installers', 'Debian Control file', ('control', 'debcontrol'), ('control',), ()), 'DelphiLexer': ('typecode._vendor.pygments.lexers.pascal', 'Delphi', ('delphi', 'pas', 'pascal', 'objectpascal'), ('*.pas', '*.dpr'), ('text/x-pascal',)), - #'DevicetreeLexer': ('typecode._vendor.pygments.lexers.devicetree', 'Devicetree', ('devicetree', 'dts'), ('*.dts', '*.dtsi'), ('text/x-c',)), - #'DgLexer': ('typecode._vendor.pygments.lexers.python', 'dg', ('dg',), ('*.dg',), ('text/x-dg',)), - #'DiffLexer': ('typecode._vendor.pygments.lexers.diff', 'Diff', ('diff', 'udiff'), ('*.diff', '*.patch'), ('text/x-diff', 'text/x-patch')), - #'DjangoLexer': ('typecode._vendor.pygments.lexers.templates', 'Django/Jinja', ('django', 'jinja'), (), ('application/x-django-templating', 'application/x-jinja')), - #'DockerLexer': ('typecode._vendor.pygments.lexers.configs', 'Docker', ('docker', 'dockerfile'), ('Dockerfile', '*.docker'), ('text/x-dockerfile-config',)), - #'DtdLexer': ('typecode._vendor.pygments.lexers.html', 'DTD', ('dtd',), ('*.dtd',), ('application/xml-dtd',)), - #'DuelLexer': ('typecode._vendor.pygments.lexers.webmisc', 'Duel', ('duel', 'jbst', 'jsonml+bst'), ('*.duel', '*.jbst'), ('text/x-duel', 'text/x-jbst')), - #'DylanConsoleLexer': ('typecode._vendor.pygments.lexers.dylan', 'Dylan session', ('dylan-console', 'dylan-repl'), ('*.dylan-console',), ('text/x-dylan-console',)), - #'DylanLexer': ('typecode._vendor.pygments.lexers.dylan', 'Dylan', ('dylan',), ('*.dylan', '*.dyl', '*.intr'), ('text/x-dylan',)), - #'DylanLidLexer': ('typecode._vendor.pygments.lexers.dylan', 'DylanLID', ('dylan-lid', 'lid'), ('*.lid', '*.hdp'), ('text/x-dylan-lid',)), - #'ECLLexer': ('typecode._vendor.pygments.lexers.ecl', 'ECL', ('ecl',), ('*.ecl',), ('application/x-ecl',)), - #'ECLexer': ('typecode._vendor.pygments.lexers.c_like', 'eC', ('ec',), ('*.ec', '*.eh'), ('text/x-echdr', 'text/x-ecsrc')), - #'EarlGreyLexer': ('typecode._vendor.pygments.lexers.javascript', 'Earl Grey', ('earl-grey', 'earlgrey', 'eg'), ('*.eg',), ('text/x-earl-grey',)), - #'EasytrieveLexer': ('typecode._vendor.pygments.lexers.scripting', 'Easytrieve', ('easytrieve',), ('*.ezt', '*.mac'), ('text/x-easytrieve',)), - #'EbnfLexer': ('typecode._vendor.pygments.lexers.parsers', 'EBNF', ('ebnf',), ('*.ebnf',), ('text/x-ebnf',)), - #'EiffelLexer': ('typecode._vendor.pygments.lexers.eiffel', 'Eiffel', ('eiffel',), ('*.e',), ('text/x-eiffel',)), - #'ElixirConsoleLexer': ('typecode._vendor.pygments.lexers.erlang', 'Elixir iex session', ('iex',), (), ('text/x-elixir-shellsession',)), + # 'DevicetreeLexer': ('typecode._vendor.pygments.lexers.devicetree', 'Devicetree', ('devicetree', 'dts'), ('*.dts', '*.dtsi'), ('text/x-c',)), + # 'DgLexer': ('typecode._vendor.pygments.lexers.python', 'dg', ('dg',), ('*.dg',), ('text/x-dg',)), + # 'DiffLexer': ('typecode._vendor.pygments.lexers.diff', 'Diff', ('diff', 'udiff'), ('*.diff', '*.patch'), ('text/x-diff', 'text/x-patch')), + # 'DjangoLexer': ('typecode._vendor.pygments.lexers.templates', 'Django/Jinja', ('django', 'jinja'), (), ('application/x-django-templating', 'application/x-jinja')), + # 'DockerLexer': ('typecode._vendor.pygments.lexers.configs', 'Docker', ('docker', 'dockerfile'), ('Dockerfile', '*.docker'), ('text/x-dockerfile-config',)), + # 'DtdLexer': ('typecode._vendor.pygments.lexers.html', 'DTD', ('dtd',), ('*.dtd',), ('application/xml-dtd',)), + # 'DuelLexer': ('typecode._vendor.pygments.lexers.webmisc', 'Duel', ('duel', 'jbst', 'jsonml+bst'), ('*.duel', '*.jbst'), ('text/x-duel', 'text/x-jbst')), + # 'DylanConsoleLexer': ('typecode._vendor.pygments.lexers.dylan', 'Dylan session', ('dylan-console', 'dylan-repl'), ('*.dylan-console',), ('text/x-dylan-console',)), + # 'DylanLexer': ('typecode._vendor.pygments.lexers.dylan', 'Dylan', ('dylan',), ('*.dylan', '*.dyl', '*.intr'), ('text/x-dylan',)), + # 'DylanLidLexer': ('typecode._vendor.pygments.lexers.dylan', 'DylanLID', ('dylan-lid', 'lid'), ('*.lid', '*.hdp'), ('text/x-dylan-lid',)), + # 'ECLLexer': ('typecode._vendor.pygments.lexers.ecl', 'ECL', ('ecl',), ('*.ecl',), ('application/x-ecl',)), + # 'ECLexer': ('typecode._vendor.pygments.lexers.c_like', 'eC', ('ec',), ('*.ec', '*.eh'), ('text/x-echdr', 'text/x-ecsrc')), + # 'EarlGreyLexer': ('typecode._vendor.pygments.lexers.javascript', 'Earl Grey', ('earl-grey', 'earlgrey', 'eg'), ('*.eg',), ('text/x-earl-grey',)), + # 'EasytrieveLexer': ('typecode._vendor.pygments.lexers.scripting', 'Easytrieve', ('easytrieve',), ('*.ezt', '*.mac'), ('text/x-easytrieve',)), + # 'EbnfLexer': ('typecode._vendor.pygments.lexers.parsers', 'EBNF', ('ebnf',), ('*.ebnf',), ('text/x-ebnf',)), + # 'EiffelLexer': ('typecode._vendor.pygments.lexers.eiffel', 'Eiffel', ('eiffel',), ('*.e',), ('text/x-eiffel',)), + # 'ElixirConsoleLexer': ('typecode._vendor.pygments.lexers.erlang', 'Elixir iex session', ('iex',), (), ('text/x-elixir-shellsession',)), 'ElixirLexer': ('typecode._vendor.pygments.lexers.erlang', 'Elixir', ('elixir', 'ex', 'exs'), ('*.ex', '*.eex', '*.exs'), ('text/x-elixir',)), 'ElmLexer': ('typecode._vendor.pygments.lexers.elm', 'Elm', ('elm',), ('*.elm',), ('text/x-elm',)), - #'EmacsLispLexer': ('typecode._vendor.pygments.lexers.lisp', 'EmacsLisp', ('emacs', 'elisp', 'emacs-lisp'), ('*.el',), ('text/x-elisp', 'application/x-elisp')), - #'EmailLexer': ('typecode._vendor.pygments.lexers.email', 'E-mail', ('email', 'eml'), ('*.eml',), ('message/rfc822',)), + # 'EmacsLispLexer': ('typecode._vendor.pygments.lexers.lisp', 'EmacsLisp', ('emacs', 'elisp', 'emacs-lisp'), ('*.el',), ('text/x-elisp', 'application/x-elisp')), + # 'EmailLexer': ('typecode._vendor.pygments.lexers.email', 'E-mail', ('email', 'eml'), ('*.eml',), ('message/rfc822',)), 'ErbLexer': ('typecode._vendor.pygments.lexers.templates', 'ERB', ('erb',), (), ('application/x-ruby-templating',)), 'ErlangLexer': ('typecode._vendor.pygments.lexers.erlang', 'Erlang', ('erlang',), ('*.erl', '*.hrl', '*.es', '*.escript'), ('text/x-erlang',)), - #'ErlangShellLexer': ('typecode._vendor.pygments.lexers.erlang', 'Erlang erl session', ('erl',), ('*.erl-sh',), ('text/x-erl-shellsession',)), - #'EvoqueHtmlLexer': ('typecode._vendor.pygments.lexers.templates', 'HTML+Evoque', ('html+evoque',), ('*.html',), ('text/html+evoque',)), - #'EvoqueLexer': ('typecode._vendor.pygments.lexers.templates', 'Evoque', ('evoque',), ('*.evoque',), ('application/x-evoque',)), - #'EvoqueXmlLexer': ('typecode._vendor.pygments.lexers.templates', 'XML+Evoque', ('xml+evoque',), ('*.xml',), ('application/xml+evoque',)), - #'ExeclineLexer': ('typecode._vendor.pygments.lexers.shell', 'execline', ('execline',), ('*.exec',), ()), - #'EzhilLexer': ('typecode._vendor.pygments.lexers.ezhil', 'Ezhil', ('ezhil',), ('*.n',), ('text/x-ezhil',)), + # 'ErlangShellLexer': ('typecode._vendor.pygments.lexers.erlang', 'Erlang erl session', ('erl',), ('*.erl-sh',), ('text/x-erl-shellsession',)), + # 'EvoqueHtmlLexer': ('typecode._vendor.pygments.lexers.templates', 'HTML+Evoque', ('html+evoque',), ('*.html',), ('text/html+evoque',)), + # 'EvoqueLexer': ('typecode._vendor.pygments.lexers.templates', 'Evoque', ('evoque',), ('*.evoque',), ('application/x-evoque',)), + # 'EvoqueXmlLexer': ('typecode._vendor.pygments.lexers.templates', 'XML+Evoque', ('xml+evoque',), ('*.xml',), ('application/xml+evoque',)), + # 'ExeclineLexer': ('typecode._vendor.pygments.lexers.shell', 'execline', ('execline',), ('*.exec',), ()), + # 'EzhilLexer': ('typecode._vendor.pygments.lexers.ezhil', 'Ezhil', ('ezhil',), ('*.n',), ('text/x-ezhil',)), 'FSharpLexer': ('typecode._vendor.pygments.lexers.dotnet', 'F#', ('fsharp', 'f#'), ('*.fs', '*.fsi'), ('text/x-fsharp',)), - #'FStarLexer': ('typecode._vendor.pygments.lexers.ml', 'FStar', ('fstar',), ('*.fst', '*.fsti'), ('text/x-fstar',)), - #'FactorLexer': ('typecode._vendor.pygments.lexers.factor', 'Factor', ('factor',), ('*.factor',), ('text/x-factor',)), - #'FancyLexer': ('typecode._vendor.pygments.lexers.ruby', 'Fancy', ('fancy', 'fy'), ('*.fy', '*.fancypack'), ('text/x-fancysrc',)), - #'FantomLexer': ('typecode._vendor.pygments.lexers.fantom', 'Fantom', ('fan',), ('*.fan',), ('application/x-fantom',)), - #'FelixLexer': ('typecode._vendor.pygments.lexers.felix', 'Felix', ('felix', 'flx'), ('*.flx', '*.flxh'), ('text/x-felix',)), - #'FennelLexer': ('typecode._vendor.pygments.lexers.lisp', 'Fennel', ('fennel', 'fnl'), ('*.fnl',), ()), - #'FishShellLexer': ('typecode._vendor.pygments.lexers.shell', 'Fish', ('fish', 'fishshell'), ('*.fish', '*.load'), ('application/x-fish',)), - #'FlatlineLexer': ('typecode._vendor.pygments.lexers.dsls', 'Flatline', ('flatline',), (), ('text/x-flatline',)), - #'FloScriptLexer': ('typecode._vendor.pygments.lexers.floscript', 'FloScript', ('floscript', 'flo'), ('*.flo',), ()), + # 'FStarLexer': ('typecode._vendor.pygments.lexers.ml', 'FStar', ('fstar',), ('*.fst', '*.fsti'), ('text/x-fstar',)), + # 'FactorLexer': ('typecode._vendor.pygments.lexers.factor', 'Factor', ('factor',), ('*.factor',), ('text/x-factor',)), + # 'FancyLexer': ('typecode._vendor.pygments.lexers.ruby', 'Fancy', ('fancy', 'fy'), ('*.fy', '*.fancypack'), ('text/x-fancysrc',)), + # 'FantomLexer': ('typecode._vendor.pygments.lexers.fantom', 'Fantom', ('fan',), ('*.fan',), ('application/x-fantom',)), + # 'FelixLexer': ('typecode._vendor.pygments.lexers.felix', 'Felix', ('felix', 'flx'), ('*.flx', '*.flxh'), ('text/x-felix',)), + # 'FennelLexer': ('typecode._vendor.pygments.lexers.lisp', 'Fennel', ('fennel', 'fnl'), ('*.fnl',), ()), + # 'FishShellLexer': ('typecode._vendor.pygments.lexers.shell', 'Fish', ('fish', 'fishshell'), ('*.fish', '*.load'), ('application/x-fish',)), + # 'FlatlineLexer': ('typecode._vendor.pygments.lexers.dsls', 'Flatline', ('flatline',), (), ('text/x-flatline',)), + # 'FloScriptLexer': ('typecode._vendor.pygments.lexers.floscript', 'FloScript', ('floscript', 'flo'), ('*.flo',), ()), 'ForthLexer': ('typecode._vendor.pygments.lexers.forth', 'Forth', ('forth',), ('*.frt', '*.fs'), ('application/x-forth',)), 'FortranFixedLexer': ('typecode._vendor.pygments.lexers.fortran', 'FortranFixed', ('fortranfixed',), ('*.f', '*.F'), ()), 'FortranLexer': ('typecode._vendor.pygments.lexers.fortran', 'Fortran', ('fortran',), ('*.f03', '*.f90', '*.F03', '*.F90'), ('text/x-fortran',)), - #'FoxProLexer': ('typecode._vendor.pygments.lexers.foxpro', 'FoxPro', ('foxpro', 'vfp', 'clipper', 'xbase'), ('*.PRG', '*.prg'), ()), - #'FreeFemLexer': ('typecode._vendor.pygments.lexers.freefem', 'Freefem', ('freefem',), ('*.edp',), ('text/x-freefem',)), - #'GAPLexer': ('typecode._vendor.pygments.lexers.algebra', 'GAP', ('gap',), ('*.g', '*.gd', '*.gi', '*.gap'), ()), - #'GDScriptLexer': ('typecode._vendor.pygments.lexers.gdscript', 'GDScript', ('gdscript', 'gd'), ('*.gd',), ('text/x-gdscript', 'application/x-gdscript')), - #'GLShaderLexer': ('typecode._vendor.pygments.lexers.graphics', 'GLSL', ('glsl',), ('*.vert', '*.frag', '*.geo'), ('text/x-glslsrc',)), + # 'FoxProLexer': ('typecode._vendor.pygments.lexers.foxpro', 'FoxPro', ('foxpro', 'vfp', 'clipper', 'xbase'), ('*.PRG', '*.prg'), ()), + # 'FreeFemLexer': ('typecode._vendor.pygments.lexers.freefem', 'Freefem', ('freefem',), ('*.edp',), ('text/x-freefem',)), + # 'GAPLexer': ('typecode._vendor.pygments.lexers.algebra', 'GAP', ('gap',), ('*.g', '*.gd', '*.gi', '*.gap'), ()), + # 'GDScriptLexer': ('typecode._vendor.pygments.lexers.gdscript', 'GDScript', ('gdscript', 'gd'), ('*.gd',), ('text/x-gdscript', 'application/x-gdscript')), + # 'GLShaderLexer': ('typecode._vendor.pygments.lexers.graphics', 'GLSL', ('glsl',), ('*.vert', '*.frag', '*.geo'), ('text/x-glslsrc',)), 'GasLexer': ('typecode._vendor.pygments.lexers.asm', 'GAS', ('gas', 'asm'), ('*.s', '*.S'), ('text/x-gas',)), - #'GenshiLexer': ('typecode._vendor.pygments.lexers.templates', 'Genshi', ('genshi', 'kid', 'xml+genshi', 'xml+kid'), ('*.kid',), ('application/x-genshi', 'application/x-kid')), - #'GenshiTextLexer': ('typecode._vendor.pygments.lexers.templates', 'Genshi Text', ('genshitext',), (), ('application/x-genshi-text', 'text/x-genshi')), - #'GettextLexer': ('typecode._vendor.pygments.lexers.textfmts', 'Gettext Catalog', ('pot', 'po'), ('*.pot', '*.po'), ('application/x-gettext', 'text/x-gettext', 'text/gettext')), - #'GherkinLexer': ('typecode._vendor.pygments.lexers.testing', 'Gherkin', ('cucumber', 'gherkin'), ('*.feature',), ('text/x-gherkin',)), - #'GnuplotLexer': ('typecode._vendor.pygments.lexers.graphics', 'Gnuplot', ('gnuplot',), ('*.plot', '*.plt'), ('text/x-gnuplot',)), + # 'GenshiLexer': ('typecode._vendor.pygments.lexers.templates', 'Genshi', ('genshi', 'kid', 'xml+genshi', 'xml+kid'), ('*.kid',), ('application/x-genshi', 'application/x-kid')), + # 'GenshiTextLexer': ('typecode._vendor.pygments.lexers.templates', 'Genshi Text', ('genshitext',), (), ('application/x-genshi-text', 'text/x-genshi')), + # 'GettextLexer': ('typecode._vendor.pygments.lexers.textfmts', 'Gettext Catalog', ('pot', 'po'), ('*.pot', '*.po'), ('application/x-gettext', 'text/x-gettext', 'text/gettext')), + # 'GherkinLexer': ('typecode._vendor.pygments.lexers.testing', 'Gherkin', ('cucumber', 'gherkin'), ('*.feature',), ('text/x-gherkin',)), + # 'GnuplotLexer': ('typecode._vendor.pygments.lexers.graphics', 'Gnuplot', ('gnuplot',), ('*.plot', '*.plt'), ('text/x-gnuplot',)), 'GoLexer': ('typecode._vendor.pygments.lexers.go', 'Go', ('go',), ('*.go',), ('text/x-gosrc',)), - #'GoloLexer': ('typecode._vendor.pygments.lexers.jvm', 'Golo', ('golo',), ('*.golo',), ()), - #'GoodDataCLLexer': ('typecode._vendor.pygments.lexers.business', 'GoodData-CL', ('gooddata-cl',), ('*.gdc',), ('text/x-gooddata-cl',)), - #'GosuLexer': ('typecode._vendor.pygments.lexers.jvm', 'Gosu', ('gosu',), ('*.gs', '*.gsx', '*.gsp', '*.vark'), ('text/x-gosu',)), - #'GosuTemplateLexer': ('typecode._vendor.pygments.lexers.jvm', 'Gosu Template', ('gst',), ('*.gst',), ('text/x-gosu-template',)), - #'GroffLexer': ('typecode._vendor.pygments.lexers.markup', 'Groff', ('groff', 'nroff', 'man'), ('*.[1234567]', '*.man'), ('application/x-troff', 'text/troff')), + # 'GoloLexer': ('typecode._vendor.pygments.lexers.jvm', 'Golo', ('golo',), ('*.golo',), ()), + # 'GoodDataCLLexer': ('typecode._vendor.pygments.lexers.business', 'GoodData-CL', ('gooddata-cl',), ('*.gdc',), ('text/x-gooddata-cl',)), + # 'GosuLexer': ('typecode._vendor.pygments.lexers.jvm', 'Gosu', ('gosu',), ('*.gs', '*.gsx', '*.gsp', '*.vark'), ('text/x-gosu',)), + # 'GosuTemplateLexer': ('typecode._vendor.pygments.lexers.jvm', 'Gosu Template', ('gst',), ('*.gst',), ('text/x-gosu-template',)), + # 'GroffLexer': ('typecode._vendor.pygments.lexers.markup', 'Groff', ('groff', 'nroff', 'man'), ('*.[1234567]', '*.man'), ('application/x-troff', 'text/troff')), 'GroovyLexer': ('typecode._vendor.pygments.lexers.jvm', 'Groovy', ('groovy',), ('*.groovy', '*.gradle'), ('text/x-groovy',)), - #'HLSLShaderLexer': ('typecode._vendor.pygments.lexers.graphics', 'HLSL', ('hlsl',), ('*.hlsl', '*.hlsli'), ('text/x-hlsl',)), - #'HamlLexer': ('typecode._vendor.pygments.lexers.html', 'Haml', ('haml',), ('*.haml',), ('text/x-haml',)), - #'HandlebarsHtmlLexer': ('typecode._vendor.pygments.lexers.templates', 'HTML+Handlebars', ('html+handlebars',), ('*.handlebars', '*.hbs'), ('text/html+handlebars', 'text/x-handlebars-template')), - #'HandlebarsLexer': ('typecode._vendor.pygments.lexers.templates', 'Handlebars', ('handlebars',), (), ()), + # 'HLSLShaderLexer': ('typecode._vendor.pygments.lexers.graphics', 'HLSL', ('hlsl',), ('*.hlsl', '*.hlsli'), ('text/x-hlsl',)), + # 'HamlLexer': ('typecode._vendor.pygments.lexers.html', 'Haml', ('haml',), ('*.haml',), ('text/x-haml',)), + # 'HandlebarsHtmlLexer': ('typecode._vendor.pygments.lexers.templates', 'HTML+Handlebars', ('html+handlebars',), ('*.handlebars', '*.hbs'), ('text/html+handlebars', 'text/x-handlebars-template')), + # 'HandlebarsLexer': ('typecode._vendor.pygments.lexers.templates', 'Handlebars', ('handlebars',), (), ()), 'HaskellLexer': ('typecode._vendor.pygments.lexers.haskell', 'Haskell', ('haskell', 'hs'), ('*.hs',), ('text/x-haskell',)), 'HaxeLexer': ('typecode._vendor.pygments.lexers.haxe', 'Haxe', ('hx', 'haxe', 'hxsl'), ('*.hx', '*.hxsl'), ('text/haxe', 'text/x-haxe', 'text/x-hx')), - #'HexdumpLexer': ('typecode._vendor.pygments.lexers.hexdump', 'Hexdump', ('hexdump',), (), ()), - #'HsailLexer': ('typecode._vendor.pygments.lexers.asm', 'HSAIL', ('hsail', 'hsa'), ('*.hsail',), ('text/x-hsail',)), - #'HspecLexer': ('typecode._vendor.pygments.lexers.haskell', 'Hspec', ('hspec',), (), ()), - #'HtmlDjangoLexer': ('typecode._vendor.pygments.lexers.templates', 'HTML+Django/Jinja', ('html+django', 'html+jinja', 'htmldjango'), (), ('text/html+django', 'text/html+jinja')), - #'HtmlGenshiLexer': ('typecode._vendor.pygments.lexers.templates', 'HTML+Genshi', ('html+genshi', 'html+kid'), (), ('text/html+genshi',)), + # 'HexdumpLexer': ('typecode._vendor.pygments.lexers.hexdump', 'Hexdump', ('hexdump',), (), ()), + # 'HsailLexer': ('typecode._vendor.pygments.lexers.asm', 'HSAIL', ('hsail', 'hsa'), ('*.hsail',), ('text/x-hsail',)), + # 'HspecLexer': ('typecode._vendor.pygments.lexers.haskell', 'Hspec', ('hspec',), (), ()), + # 'HtmlDjangoLexer': ('typecode._vendor.pygments.lexers.templates', 'HTML+Django/Jinja', ('html+django', 'html+jinja', 'htmldjango'), (), ('text/html+django', 'text/html+jinja')), + # 'HtmlGenshiLexer': ('typecode._vendor.pygments.lexers.templates', 'HTML+Genshi', ('html+genshi', 'html+kid'), (), ('text/html+genshi',)), 'HtmlLexer': ('typecode._vendor.pygments.lexers.html', 'HTML', ('html',), ('*.html', '*.htm', '*.xhtml', '*.xslt'), ('text/html', 'application/xhtml+xml')), - #'HtmlPhpLexer': ('typecode._vendor.pygments.lexers.templates', 'HTML+PHP', ('html+php',), ('*.phtml',), ('application/x-php', 'application/x-httpd-php', 'application/x-httpd-php3', 'application/x-httpd-php4', 'application/x-httpd-php5')), - #'HtmlSmartyLexer': ('typecode._vendor.pygments.lexers.templates', 'HTML+Smarty', ('html+smarty',), (), ('text/html+smarty',)), - #'HttpLexer': ('typecode._vendor.pygments.lexers.textfmts', 'HTTP', ('http',), (), ()), - #'HxmlLexer': ('typecode._vendor.pygments.lexers.haxe', 'Hxml', ('haxeml', 'hxml'), ('*.hxml',), ()), - #'HyLexer': ('typecode._vendor.pygments.lexers.lisp', 'Hy', ('hylang',), ('*.hy',), ('text/x-hy', 'application/x-hy')), - #'HybrisLexer': ('typecode._vendor.pygments.lexers.scripting', 'Hybris', ('hybris', 'hy'), ('*.hy', '*.hyb'), ('text/x-hybris', 'application/x-hybris')), - #'IDLLexer': ('typecode._vendor.pygments.lexers.idl', 'IDL', ('idl',), ('*.pro',), ('text/idl',)), - #'IconLexer': ('typecode._vendor.pygments.lexers.unicon', 'Icon', ('icon',), ('*.icon', '*.ICON'), ()), - #'IdrisLexer': ('typecode._vendor.pygments.lexers.haskell', 'Idris', ('idris', 'idr'), ('*.idr',), ('text/x-idris',)), - #'IgorLexer': ('typecode._vendor.pygments.lexers.igor', 'Igor', ('igor', 'igorpro'), ('*.ipf',), ('text/ipf',)), - #'Inform6Lexer': ('typecode._vendor.pygments.lexers.int_fiction', 'Inform 6', ('inform6', 'i6'), ('*.inf',), ()), - #'Inform6TemplateLexer': ('typecode._vendor.pygments.lexers.int_fiction', 'Inform 6 template', ('i6t',), ('*.i6t',), ()), - #'Inform7Lexer': ('typecode._vendor.pygments.lexers.int_fiction', 'Inform 7', ('inform7', 'i7'), ('*.ni', '*.i7x'), ()), - #'IniLexer': ('typecode._vendor.pygments.lexers.configs', 'INI', ('ini', 'cfg', 'dosini'), ('*.ini', '*.cfg', '*.inf'), ('text/x-ini', 'text/inf')), - #'IoLexer': ('typecode._vendor.pygments.lexers.iolang', 'Io', ('io',), ('*.io',), ('text/x-iosrc',)), - #'IokeLexer': ('typecode._vendor.pygments.lexers.jvm', 'Ioke', ('ioke', 'ik'), ('*.ik',), ('text/x-iokesrc',)), - #'IrcLogsLexer': ('typecode._vendor.pygments.lexers.textfmts', 'IRC logs', ('irc',), ('*.weechatlog',), ('text/x-irclog',)), - #'IsabelleLexer': ('typecode._vendor.pygments.lexers.theorem', 'Isabelle', ('isabelle',), ('*.thy',), ('text/x-isabelle',)), - #'JLexer': ('typecode._vendor.pygments.lexers.j', 'J', ('j',), ('*.ijs',), ('text/x-j',)), - #'JagsLexer': ('typecode._vendor.pygments.lexers.modeling', 'JAGS', ('jags',), ('*.jag', '*.bug'), ()), - #'JasminLexer': ('typecode._vendor.pygments.lexers.jvm', 'Jasmin', ('jasmin', 'jasminxt'), ('*.j',), ()), + # 'HtmlPhpLexer': ('typecode._vendor.pygments.lexers.templates', 'HTML+PHP', ('html+php',), ('*.phtml',), ('application/x-php', 'application/x-httpd-php', 'application/x-httpd-php3', 'application/x-httpd-php4', 'application/x-httpd-php5')), + # 'HtmlSmartyLexer': ('typecode._vendor.pygments.lexers.templates', 'HTML+Smarty', ('html+smarty',), (), ('text/html+smarty',)), + # 'HttpLexer': ('typecode._vendor.pygments.lexers.textfmts', 'HTTP', ('http',), (), ()), + # 'HxmlLexer': ('typecode._vendor.pygments.lexers.haxe', 'Hxml', ('haxeml', 'hxml'), ('*.hxml',), ()), + # 'HyLexer': ('typecode._vendor.pygments.lexers.lisp', 'Hy', ('hylang',), ('*.hy',), ('text/x-hy', 'application/x-hy')), + # 'HybrisLexer': ('typecode._vendor.pygments.lexers.scripting', 'Hybris', ('hybris', 'hy'), ('*.hy', '*.hyb'), ('text/x-hybris', 'application/x-hybris')), + # 'IDLLexer': ('typecode._vendor.pygments.lexers.idl', 'IDL', ('idl',), ('*.pro',), ('text/idl',)), + # 'IconLexer': ('typecode._vendor.pygments.lexers.unicon', 'Icon', ('icon',), ('*.icon', '*.ICON'), ()), + # 'IdrisLexer': ('typecode._vendor.pygments.lexers.haskell', 'Idris', ('idris', 'idr'), ('*.idr',), ('text/x-idris',)), + # 'IgorLexer': ('typecode._vendor.pygments.lexers.igor', 'Igor', ('igor', 'igorpro'), ('*.ipf',), ('text/ipf',)), + # 'Inform6Lexer': ('typecode._vendor.pygments.lexers.int_fiction', 'Inform 6', ('inform6', 'i6'), ('*.inf',), ()), + # 'Inform6TemplateLexer': ('typecode._vendor.pygments.lexers.int_fiction', 'Inform 6 template', ('i6t',), ('*.i6t',), ()), + # 'Inform7Lexer': ('typecode._vendor.pygments.lexers.int_fiction', 'Inform 7', ('inform7', 'i7'), ('*.ni', '*.i7x'), ()), + # 'IniLexer': ('typecode._vendor.pygments.lexers.configs', 'INI', ('ini', 'cfg', 'dosini'), ('*.ini', '*.cfg', '*.inf'), ('text/x-ini', 'text/inf')), + # 'IoLexer': ('typecode._vendor.pygments.lexers.iolang', 'Io', ('io',), ('*.io',), ('text/x-iosrc',)), + # 'IokeLexer': ('typecode._vendor.pygments.lexers.jvm', 'Ioke', ('ioke', 'ik'), ('*.ik',), ('text/x-iokesrc',)), + # 'IrcLogsLexer': ('typecode._vendor.pygments.lexers.textfmts', 'IRC logs', ('irc',), ('*.weechatlog',), ('text/x-irclog',)), + # 'IsabelleLexer': ('typecode._vendor.pygments.lexers.theorem', 'Isabelle', ('isabelle',), ('*.thy',), ('text/x-isabelle',)), + # 'JLexer': ('typecode._vendor.pygments.lexers.j', 'J', ('j',), ('*.ijs',), ('text/x-j',)), + # 'JagsLexer': ('typecode._vendor.pygments.lexers.modeling', 'JAGS', ('jags',), ('*.jag', '*.bug'), ()), + # 'JasminLexer': ('typecode._vendor.pygments.lexers.jvm', 'Jasmin', ('jasmin', 'jasminxt'), ('*.j',), ()), 'JavaLexer': ('typecode._vendor.pygments.lexers.jvm', 'Java', ('java',), ('*.java',), ('text/x-java',)), - #'JavascriptDjangoLexer': ('typecode._vendor.pygments.lexers.templates', 'JavaScript+Django/Jinja', ('js+django', 'javascript+django', 'js+jinja', 'javascript+jinja'), (), ('application/x-javascript+django', 'application/x-javascript+jinja', 'text/x-javascript+django', 'text/x-javascript+jinja', 'text/javascript+django', 'text/javascript+jinja')), - #'JavascriptErbLexer': ('typecode._vendor.pygments.lexers.templates', 'JavaScript+Ruby', ('js+erb', 'javascript+erb', 'js+ruby', 'javascript+ruby'), (), ('application/x-javascript+ruby', 'text/x-javascript+ruby', 'text/javascript+ruby')), - #'JavascriptGenshiLexer': ('typecode._vendor.pygments.lexers.templates', 'JavaScript+Genshi Text', ('js+genshitext', 'js+genshi', 'javascript+genshitext', 'javascript+genshi'), (), ('application/x-javascript+genshi', 'text/x-javascript+genshi', 'text/javascript+genshi')), + # 'JavascriptDjangoLexer': ('typecode._vendor.pygments.lexers.templates', 'JavaScript+Django/Jinja', ('js+django', 'javascript+django', 'js+jinja', 'javascript+jinja'), (), ('application/x-javascript+django', 'application/x-javascript+jinja', 'text/x-javascript+django', 'text/x-javascript+jinja', 'text/javascript+django', 'text/javascript+jinja')), + # 'JavascriptErbLexer': ('typecode._vendor.pygments.lexers.templates', 'JavaScript+Ruby', ('js+erb', 'javascript+erb', 'js+ruby', 'javascript+ruby'), (), ('application/x-javascript+ruby', 'text/x-javascript+ruby', 'text/javascript+ruby')), + # 'JavascriptGenshiLexer': ('typecode._vendor.pygments.lexers.templates', 'JavaScript+Genshi Text', ('js+genshitext', 'js+genshi', 'javascript+genshitext', 'javascript+genshi'), (), ('application/x-javascript+genshi', 'text/x-javascript+genshi', 'text/javascript+genshi')), 'JavascriptLexer': ('typecode._vendor.pygments.lexers.javascript', 'JavaScript', ('js', 'javascript'), ('*.js', '*.jsm', '*.mjs'), ('application/javascript', 'application/x-javascript', 'text/x-javascript', 'text/javascript')), - #'JavascriptPhpLexer': ('typecode._vendor.pygments.lexers.templates', 'JavaScript+PHP', ('js+php', 'javascript+php'), (), ('application/x-javascript+php', 'text/x-javascript+php', 'text/javascript+php')), - #'JavascriptSmartyLexer': ('typecode._vendor.pygments.lexers.templates', 'JavaScript+Smarty', ('js+smarty', 'javascript+smarty'), (), ('application/x-javascript+smarty', 'text/x-javascript+smarty', 'text/javascript+smarty')), - #'JclLexer': ('typecode._vendor.pygments.lexers.scripting', 'JCL', ('jcl',), ('*.jcl',), ('text/x-jcl',)), - #'JsgfLexer': ('typecode._vendor.pygments.lexers.grammar_notation', 'JSGF', ('jsgf',), ('*.jsgf',), ('application/jsgf', 'application/x-jsgf', 'text/jsgf')), - #'JsonBareObjectLexer': ('typecode._vendor.pygments.lexers.data', 'JSONBareObject', (), (), ()), - #'JsonLdLexer': ('typecode._vendor.pygments.lexers.data', 'JSON-LD', ('jsonld', 'json-ld'), ('*.jsonld',), ('application/ld+json',)), - #'JsonLexer': ('typecode._vendor.pygments.lexers.data', 'JSON', ('json', 'json-object'), ('*.json', 'Pipfile.lock'), ('application/json', 'application/json-object')), + # 'JavascriptPhpLexer': ('typecode._vendor.pygments.lexers.templates', 'JavaScript+PHP', ('js+php', 'javascript+php'), (), ('application/x-javascript+php', 'text/x-javascript+php', 'text/javascript+php')), + # 'JavascriptSmartyLexer': ('typecode._vendor.pygments.lexers.templates', 'JavaScript+Smarty', ('js+smarty', 'javascript+smarty'), (), ('application/x-javascript+smarty', 'text/x-javascript+smarty', 'text/javascript+smarty')), + # 'JclLexer': ('typecode._vendor.pygments.lexers.scripting', 'JCL', ('jcl',), ('*.jcl',), ('text/x-jcl',)), + # 'JsgfLexer': ('typecode._vendor.pygments.lexers.grammar_notation', 'JSGF', ('jsgf',), ('*.jsgf',), ('application/jsgf', 'application/x-jsgf', 'text/jsgf')), + # 'JsonBareObjectLexer': ('typecode._vendor.pygments.lexers.data', 'JSONBareObject', (), (), ()), + # 'JsonLdLexer': ('typecode._vendor.pygments.lexers.data', 'JSON-LD', ('jsonld', 'json-ld'), ('*.jsonld',), ('application/ld+json',)), + # 'JsonLexer': ('typecode._vendor.pygments.lexers.data', 'JSON', ('json', 'json-object'), ('*.json', 'Pipfile.lock'), ('application/json', 'application/json-object')), 'JspLexer': ('typecode._vendor.pygments.lexers.templates', 'Java Server Page', ('jsp',), ('*.jsp',), ('application/x-jsp',)), - #'JuliaConsoleLexer': ('typecode._vendor.pygments.lexers.julia', 'Julia console', ('jlcon',), (), ()), - #'JuliaLexer': ('typecode._vendor.pygments.lexers.julia', 'Julia', ('julia', 'jl'), ('*.jl',), ('text/x-julia', 'application/x-julia')), - #'JuttleLexer': ('typecode._vendor.pygments.lexers.javascript', 'Juttle', ('juttle',), ('*.juttle',), ('application/juttle', 'application/x-juttle', 'text/x-juttle', 'text/juttle')), - #'KalLexer': ('typecode._vendor.pygments.lexers.javascript', 'Kal', ('kal',), ('*.kal',), ('text/kal', 'application/kal')), - #'KconfigLexer': ('typecode._vendor.pygments.lexers.configs', 'Kconfig', ('kconfig', 'menuconfig', 'linux-config', 'kernel-config'), ('Kconfig*', '*Config.in*', 'external.in*', 'standard-modules.in'), ('text/x-kconfig',)), - #'KernelLogLexer': ('typecode._vendor.pygments.lexers.textfmts', 'Kernel log', ('kmsg', 'dmesg'), ('*.kmsg', '*.dmesg'), ()), - #'KokaLexer': ('typecode._vendor.pygments.lexers.haskell', 'Koka', ('koka',), ('*.kk', '*.kki'), ('text/x-koka',)), + # 'JuliaConsoleLexer': ('typecode._vendor.pygments.lexers.julia', 'Julia console', ('jlcon',), (), ()), + # 'JuliaLexer': ('typecode._vendor.pygments.lexers.julia', 'Julia', ('julia', 'jl'), ('*.jl',), ('text/x-julia', 'application/x-julia')), + # 'JuttleLexer': ('typecode._vendor.pygments.lexers.javascript', 'Juttle', ('juttle',), ('*.juttle',), ('application/juttle', 'application/x-juttle', 'text/x-juttle', 'text/juttle')), + # 'KalLexer': ('typecode._vendor.pygments.lexers.javascript', 'Kal', ('kal',), ('*.kal',), ('text/kal', 'application/kal')), + # 'KconfigLexer': ('typecode._vendor.pygments.lexers.configs', 'Kconfig', ('kconfig', 'menuconfig', 'linux-config', 'kernel-config'), ('Kconfig*', '*Config.in*', 'external.in*', 'standard-modules.in'), ('text/x-kconfig',)), + # 'KernelLogLexer': ('typecode._vendor.pygments.lexers.textfmts', 'Kernel log', ('kmsg', 'dmesg'), ('*.kmsg', '*.dmesg'), ()), + # 'KokaLexer': ('typecode._vendor.pygments.lexers.haskell', 'Koka', ('koka',), ('*.kk', '*.kki'), ('text/x-koka',)), 'KotlinLexer': ('typecode._vendor.pygments.lexers.jvm', 'Kotlin', ('kotlin',), ('*.kt', '*.kts'), ('text/x-kotlin',)), - #'LSLLexer': ('typecode._vendor.pygments.lexers.scripting', 'LSL', ('lsl',), ('*.lsl',), ('text/x-lsl',)), - #'LassoCssLexer': ('typecode._vendor.pygments.lexers.templates', 'CSS+Lasso', ('css+lasso',), (), ('text/css+lasso',)), - #'LassoHtmlLexer': ('typecode._vendor.pygments.lexers.templates', 'HTML+Lasso', ('html+lasso',), (), ('text/html+lasso', 'application/x-httpd-lasso', 'application/x-httpd-lasso[89]')), - #'LassoJavascriptLexer': ('typecode._vendor.pygments.lexers.templates', 'JavaScript+Lasso', ('js+lasso', 'javascript+lasso'), (), ('application/x-javascript+lasso', 'text/x-javascript+lasso', 'text/javascript+lasso')), - #'LassoLexer': ('typecode._vendor.pygments.lexers.javascript', 'Lasso', ('lasso', 'lassoscript'), ('*.lasso', '*.lasso[89]'), ('text/x-lasso',)), - #'LassoXmlLexer': ('typecode._vendor.pygments.lexers.templates', 'XML+Lasso', ('xml+lasso',), (), ('application/xml+lasso',)), - #'LeanLexer': ('typecode._vendor.pygments.lexers.theorem', 'Lean', ('lean',), ('*.lean',), ('text/x-lean',)), - #'LessCssLexer': ('typecode._vendor.pygments.lexers.css', 'LessCss', ('less',), ('*.less',), ('text/x-less-css',)), - #'LighttpdConfLexer': ('typecode._vendor.pygments.lexers.configs', 'Lighttpd configuration file', ('lighty', 'lighttpd'), (), ('text/x-lighttpd-conf',)), - #'LimboLexer': ('typecode._vendor.pygments.lexers.inferno', 'Limbo', ('limbo',), ('*.b',), ('text/limbo',)), - #'LiquidLexer': ('typecode._vendor.pygments.lexers.templates', 'liquid', ('liquid',), ('*.liquid',), ()), - #'LiterateAgdaLexer': ('typecode._vendor.pygments.lexers.haskell', 'Literate Agda', ('lagda', 'literate-agda'), ('*.lagda',), ('text/x-literate-agda',)), - #'LiterateCryptolLexer': ('typecode._vendor.pygments.lexers.haskell', 'Literate Cryptol', ('lcry', 'literate-cryptol', 'lcryptol'), ('*.lcry',), ('text/x-literate-cryptol',)), - #'LiterateHaskellLexer': ('typecode._vendor.pygments.lexers.haskell', 'Literate Haskell', ('lhs', 'literate-haskell', 'lhaskell'), ('*.lhs',), ('text/x-literate-haskell',)), - #'LiterateIdrisLexer': ('typecode._vendor.pygments.lexers.haskell', 'Literate Idris', ('lidr', 'literate-idris', 'lidris'), ('*.lidr',), ('text/x-literate-idris',)), - #'LiveScriptLexer': ('typecode._vendor.pygments.lexers.javascript', 'LiveScript', ('live-script', 'livescript'), ('*.ls',), ('text/livescript',)), - #'LlvmLexer': ('typecode._vendor.pygments.lexers.asm', 'LLVM', ('llvm',), ('*.ll',), ('text/x-llvm',)), - #'LlvmMirBodyLexer': ('typecode._vendor.pygments.lexers.asm', 'LLVM-MIR Body', ('llvm-mir-body',), (), ()), - #'LlvmMirLexer': ('typecode._vendor.pygments.lexers.asm', 'LLVM-MIR', ('llvm-mir',), ('*.mir',), ()), - #'LogosLexer': ('typecode._vendor.pygments.lexers.objective', 'Logos', ('logos',), ('*.x', '*.xi', '*.xm', '*.xmi'), ('text/x-logos',)), - #'LogtalkLexer': ('typecode._vendor.pygments.lexers.prolog', 'Logtalk', ('logtalk',), ('*.lgt', '*.logtalk'), ('text/x-logtalk',)), + # 'LSLLexer': ('typecode._vendor.pygments.lexers.scripting', 'LSL', ('lsl',), ('*.lsl',), ('text/x-lsl',)), + # 'LassoCssLexer': ('typecode._vendor.pygments.lexers.templates', 'CSS+Lasso', ('css+lasso',), (), ('text/css+lasso',)), + # 'LassoHtmlLexer': ('typecode._vendor.pygments.lexers.templates', 'HTML+Lasso', ('html+lasso',), (), ('text/html+lasso', 'application/x-httpd-lasso', 'application/x-httpd-lasso[89]')), + # 'LassoJavascriptLexer': ('typecode._vendor.pygments.lexers.templates', 'JavaScript+Lasso', ('js+lasso', 'javascript+lasso'), (), ('application/x-javascript+lasso', 'text/x-javascript+lasso', 'text/javascript+lasso')), + # 'LassoLexer': ('typecode._vendor.pygments.lexers.javascript', 'Lasso', ('lasso', 'lassoscript'), ('*.lasso', '*.lasso[89]'), ('text/x-lasso',)), + # 'LassoXmlLexer': ('typecode._vendor.pygments.lexers.templates', 'XML+Lasso', ('xml+lasso',), (), ('application/xml+lasso',)), + # 'LeanLexer': ('typecode._vendor.pygments.lexers.theorem', 'Lean', ('lean',), ('*.lean',), ('text/x-lean',)), + # 'LessCssLexer': ('typecode._vendor.pygments.lexers.css', 'LessCss', ('less',), ('*.less',), ('text/x-less-css',)), + # 'LighttpdConfLexer': ('typecode._vendor.pygments.lexers.configs', 'Lighttpd configuration file', ('lighty', 'lighttpd'), (), ('text/x-lighttpd-conf',)), + # 'LimboLexer': ('typecode._vendor.pygments.lexers.inferno', 'Limbo', ('limbo',), ('*.b',), ('text/limbo',)), + # 'LiquidLexer': ('typecode._vendor.pygments.lexers.templates', 'liquid', ('liquid',), ('*.liquid',), ()), + # 'LiterateAgdaLexer': ('typecode._vendor.pygments.lexers.haskell', 'Literate Agda', ('lagda', 'literate-agda'), ('*.lagda',), ('text/x-literate-agda',)), + # 'LiterateCryptolLexer': ('typecode._vendor.pygments.lexers.haskell', 'Literate Cryptol', ('lcry', 'literate-cryptol', 'lcryptol'), ('*.lcry',), ('text/x-literate-cryptol',)), + # 'LiterateHaskellLexer': ('typecode._vendor.pygments.lexers.haskell', 'Literate Haskell', ('lhs', 'literate-haskell', 'lhaskell'), ('*.lhs',), ('text/x-literate-haskell',)), + # 'LiterateIdrisLexer': ('typecode._vendor.pygments.lexers.haskell', 'Literate Idris', ('lidr', 'literate-idris', 'lidris'), ('*.lidr',), ('text/x-literate-idris',)), + # 'LiveScriptLexer': ('typecode._vendor.pygments.lexers.javascript', 'LiveScript', ('live-script', 'livescript'), ('*.ls',), ('text/livescript',)), + # 'LlvmLexer': ('typecode._vendor.pygments.lexers.asm', 'LLVM', ('llvm',), ('*.ll',), ('text/x-llvm',)), + # 'LlvmMirBodyLexer': ('typecode._vendor.pygments.lexers.asm', 'LLVM-MIR Body', ('llvm-mir-body',), (), ()), + # 'LlvmMirLexer': ('typecode._vendor.pygments.lexers.asm', 'LLVM-MIR', ('llvm-mir',), ('*.mir',), ()), + # 'LogosLexer': ('typecode._vendor.pygments.lexers.objective', 'Logos', ('logos',), ('*.x', '*.xi', '*.xm', '*.xmi'), ('text/x-logos',)), + # 'LogtalkLexer': ('typecode._vendor.pygments.lexers.prolog', 'Logtalk', ('logtalk',), ('*.lgt', '*.logtalk'), ('text/x-logtalk',)), 'LuaLexer': ('typecode._vendor.pygments.lexers.scripting', 'Lua', ('lua',), ('*.lua', '*.wlua'), ('text/x-lua', 'application/x-lua')), - #'MIMELexer': ('typecode._vendor.pygments.lexers.mime', 'MIME', ('mime',), (), ('multipart/mixed', 'multipart/related', 'multipart/alternative')), - #'MOOCodeLexer': ('typecode._vendor.pygments.lexers.scripting', 'MOOCode', ('moocode', 'moo'), ('*.moo',), ('text/x-moocode',)), - #'MSDOSSessionLexer': ('typecode._vendor.pygments.lexers.shell', 'MSDOS Session', ('doscon',), (), ()), - #'MakefileLexer': ('typecode._vendor.pygments.lexers.make', 'Makefile', ('make', 'makefile', 'mf', 'bsdmake'), ('*.mak', '*.mk', 'Makefile', 'makefile', 'Makefile.*', 'GNUmakefile'), ('text/x-makefile',)), - #'MakoCssLexer': ('typecode._vendor.pygments.lexers.templates', 'CSS+Mako', ('css+mako',), (), ('text/css+mako',)), - #'MakoHtmlLexer': ('typecode._vendor.pygments.lexers.templates', 'HTML+Mako', ('html+mako',), (), ('text/html+mako',)), - #'MakoJavascriptLexer': ('typecode._vendor.pygments.lexers.templates', 'JavaScript+Mako', ('js+mako', 'javascript+mako'), (), ('application/x-javascript+mako', 'text/x-javascript+mako', 'text/javascript+mako')), - #'MakoLexer': ('typecode._vendor.pygments.lexers.templates', 'Mako', ('mako',), ('*.mao',), ('application/x-mako',)), - #'MakoXmlLexer': ('typecode._vendor.pygments.lexers.templates', 'XML+Mako', ('xml+mako',), (), ('application/xml+mako',)), - #'MaqlLexer': ('typecode._vendor.pygments.lexers.business', 'MAQL', ('maql',), ('*.maql',), ('text/x-gooddata-maql', 'application/x-gooddata-maql')), - #'MarkdownLexer': ('typecode._vendor.pygments.lexers.markup', 'markdown', ('md',), ('*.md', '*.markdown'), ('text/x-markdown',)), - #'MaskLexer': ('typecode._vendor.pygments.lexers.javascript', 'Mask', ('mask',), ('*.mask',), ('text/x-mask',)), - #'MasonLexer': ('typecode._vendor.pygments.lexers.templates', 'Mason', ('mason',), ('*.m', '*.mhtml', '*.mc', '*.mi', 'autohandler', 'dhandler'), ('application/x-mason',)), - #'MathematicaLexer': ('typecode._vendor.pygments.lexers.algebra', 'Mathematica', ('mathematica', 'mma', 'nb'), ('*.nb', '*.cdf', '*.nbp', '*.ma'), ('application/mathematica', 'application/vnd.wolfram.mathematica', 'application/vnd.wolfram.mathematica.package', 'application/vnd.wolfram.cdf')), - #'MatlabLexer': ('typecode._vendor.pygments.lexers.matlab', 'Matlab', ('matlab',), ('*.m',), ('text/matlab',)), - #'MatlabSessionLexer': ('typecode._vendor.pygments.lexers.matlab', 'Matlab session', ('matlabsession',), (), ()), - #'MiniDLexer': ('typecode._vendor.pygments.lexers.d', 'MiniD', ('minid',), (), ('text/x-minidsrc',)), - #'MiniScriptLexer': ('typecode._vendor.pygments.lexers.scripting', 'MiniScript', ('ms', 'miniscript'), ('*.ms',), ('text/x-minicript', 'application/x-miniscript')), - #'ModelicaLexer': ('typecode._vendor.pygments.lexers.modeling', 'Modelica', ('modelica',), ('*.mo',), ('text/x-modelica',)), - #'Modula2Lexer': ('typecode._vendor.pygments.lexers.modula2', 'Modula-2', ('modula2', 'm2'), ('*.def', '*.mod'), ('text/x-modula2',)), - #'MoinWikiLexer': ('typecode._vendor.pygments.lexers.markup', 'MoinMoin/Trac Wiki markup', ('trac-wiki', 'moin'), (), ('text/x-trac-wiki',)), - #'MonkeyLexer': ('typecode._vendor.pygments.lexers.basic', 'Monkey', ('monkey',), ('*.monkey',), ('text/x-monkey',)), - #'MonteLexer': ('typecode._vendor.pygments.lexers.monte', 'Monte', ('monte',), ('*.mt',), ()), - #'MoonScriptLexer': ('typecode._vendor.pygments.lexers.scripting', 'MoonScript', ('moon', 'moonscript'), ('*.moon',), ('text/x-moonscript', 'application/x-moonscript')), - #'MoselLexer': ('typecode._vendor.pygments.lexers.mosel', 'Mosel', ('mosel',), ('*.mos',), ()), - #'MozPreprocCssLexer': ('typecode._vendor.pygments.lexers.markup', 'CSS+mozpreproc', ('css+mozpreproc',), ('*.css.in',), ()), - #'MozPreprocHashLexer': ('typecode._vendor.pygments.lexers.markup', 'mozhashpreproc', ('mozhashpreproc',), (), ()), - #'MozPreprocJavascriptLexer': ('typecode._vendor.pygments.lexers.markup', 'Javascript+mozpreproc', ('javascript+mozpreproc',), ('*.js.in',), ()), - #'MozPreprocPercentLexer': ('typecode._vendor.pygments.lexers.markup', 'mozpercentpreproc', ('mozpercentpreproc',), (), ()), - #'MozPreprocXulLexer': ('typecode._vendor.pygments.lexers.markup', 'XUL+mozpreproc', ('xul+mozpreproc',), ('*.xul.in',), ()), - #'MqlLexer': ('typecode._vendor.pygments.lexers.c_like', 'MQL', ('mql', 'mq4', 'mq5', 'mql4', 'mql5'), ('*.mq4', '*.mq5', '*.mqh'), ('text/x-mql',)), - #'MscgenLexer': ('typecode._vendor.pygments.lexers.dsls', 'Mscgen', ('mscgen', 'msc'), ('*.msc',), ()), - #'MuPADLexer': ('typecode._vendor.pygments.lexers.algebra', 'MuPAD', ('mupad',), ('*.mu',), ()), - #'MxmlLexer': ('typecode._vendor.pygments.lexers.actionscript', 'MXML', ('mxml',), ('*.mxml',), ()), - #'MySqlLexer': ('typecode._vendor.pygments.lexers.sql', 'MySQL', ('mysql',), (), ('text/x-mysql',)), - #'MyghtyCssLexer': ('typecode._vendor.pygments.lexers.templates', 'CSS+Myghty', ('css+myghty',), (), ('text/css+myghty',)), - #'MyghtyHtmlLexer': ('typecode._vendor.pygments.lexers.templates', 'HTML+Myghty', ('html+myghty',), (), ('text/html+myghty',)), - #'MyghtyJavascriptLexer': ('typecode._vendor.pygments.lexers.templates', 'JavaScript+Myghty', ('js+myghty', 'javascript+myghty'), (), ('application/x-javascript+myghty', 'text/x-javascript+myghty', 'text/javascript+mygthy')), - #'MyghtyLexer': ('typecode._vendor.pygments.lexers.templates', 'Myghty', ('myghty',), ('*.myt', 'autodelegate'), ('application/x-myghty',)), - #'MyghtyXmlLexer': ('typecode._vendor.pygments.lexers.templates', 'XML+Myghty', ('xml+myghty',), (), ('application/xml+myghty',)), - #'NCLLexer': ('typecode._vendor.pygments.lexers.ncl', 'NCL', ('ncl',), ('*.ncl',), ('text/ncl',)), - #'NSISLexer': ('typecode._vendor.pygments.lexers.installers', 'NSIS', ('nsis', 'nsi', 'nsh'), ('*.nsi', '*.nsh'), ('text/x-nsis',)), + # 'MIMELexer': ('typecode._vendor.pygments.lexers.mime', 'MIME', ('mime',), (), ('multipart/mixed', 'multipart/related', 'multipart/alternative')), + # 'MOOCodeLexer': ('typecode._vendor.pygments.lexers.scripting', 'MOOCode', ('moocode', 'moo'), ('*.moo',), ('text/x-moocode',)), + # 'MSDOSSessionLexer': ('typecode._vendor.pygments.lexers.shell', 'MSDOS Session', ('doscon',), (), ()), + # 'MakefileLexer': ('typecode._vendor.pygments.lexers.make', 'Makefile', ('make', 'makefile', 'mf', 'bsdmake'), ('*.mak', '*.mk', 'Makefile', 'makefile', 'Makefile.*', 'GNUmakefile'), ('text/x-makefile',)), + # 'MakoCssLexer': ('typecode._vendor.pygments.lexers.templates', 'CSS+Mako', ('css+mako',), (), ('text/css+mako',)), + # 'MakoHtmlLexer': ('typecode._vendor.pygments.lexers.templates', 'HTML+Mako', ('html+mako',), (), ('text/html+mako',)), + # 'MakoJavascriptLexer': ('typecode._vendor.pygments.lexers.templates', 'JavaScript+Mako', ('js+mako', 'javascript+mako'), (), ('application/x-javascript+mako', 'text/x-javascript+mako', 'text/javascript+mako')), + # 'MakoLexer': ('typecode._vendor.pygments.lexers.templates', 'Mako', ('mako',), ('*.mao',), ('application/x-mako',)), + # 'MakoXmlLexer': ('typecode._vendor.pygments.lexers.templates', 'XML+Mako', ('xml+mako',), (), ('application/xml+mako',)), + # 'MaqlLexer': ('typecode._vendor.pygments.lexers.business', 'MAQL', ('maql',), ('*.maql',), ('text/x-gooddata-maql', 'application/x-gooddata-maql')), + # 'MarkdownLexer': ('typecode._vendor.pygments.lexers.markup', 'markdown', ('md',), ('*.md', '*.markdown'), ('text/x-markdown',)), + # 'MaskLexer': ('typecode._vendor.pygments.lexers.javascript', 'Mask', ('mask',), ('*.mask',), ('text/x-mask',)), + # 'MasonLexer': ('typecode._vendor.pygments.lexers.templates', 'Mason', ('mason',), ('*.m', '*.mhtml', '*.mc', '*.mi', 'autohandler', 'dhandler'), ('application/x-mason',)), + # 'MathematicaLexer': ('typecode._vendor.pygments.lexers.algebra', 'Mathematica', ('mathematica', 'mma', 'nb'), ('*.nb', '*.cdf', '*.nbp', '*.ma'), ('application/mathematica', 'application/vnd.wolfram.mathematica', 'application/vnd.wolfram.mathematica.package', 'application/vnd.wolfram.cdf')), + # 'MatlabLexer': ('typecode._vendor.pygments.lexers.matlab', 'Matlab', ('matlab',), ('*.m',), ('text/matlab',)), + # 'MatlabSessionLexer': ('typecode._vendor.pygments.lexers.matlab', 'Matlab session', ('matlabsession',), (), ()), + # 'MiniDLexer': ('typecode._vendor.pygments.lexers.d', 'MiniD', ('minid',), (), ('text/x-minidsrc',)), + # 'MiniScriptLexer': ('typecode._vendor.pygments.lexers.scripting', 'MiniScript', ('ms', 'miniscript'), ('*.ms',), ('text/x-minicript', 'application/x-miniscript')), + # 'ModelicaLexer': ('typecode._vendor.pygments.lexers.modeling', 'Modelica', ('modelica',), ('*.mo',), ('text/x-modelica',)), + # 'Modula2Lexer': ('typecode._vendor.pygments.lexers.modula2', 'Modula-2', ('modula2', 'm2'), ('*.def', '*.mod'), ('text/x-modula2',)), + # 'MoinWikiLexer': ('typecode._vendor.pygments.lexers.markup', 'MoinMoin/Trac Wiki markup', ('trac-wiki', 'moin'), (), ('text/x-trac-wiki',)), + # 'MonkeyLexer': ('typecode._vendor.pygments.lexers.basic', 'Monkey', ('monkey',), ('*.monkey',), ('text/x-monkey',)), + # 'MonteLexer': ('typecode._vendor.pygments.lexers.monte', 'Monte', ('monte',), ('*.mt',), ()), + # 'MoonScriptLexer': ('typecode._vendor.pygments.lexers.scripting', 'MoonScript', ('moon', 'moonscript'), ('*.moon',), ('text/x-moonscript', 'application/x-moonscript')), + # 'MoselLexer': ('typecode._vendor.pygments.lexers.mosel', 'Mosel', ('mosel',), ('*.mos',), ()), + # 'MozPreprocCssLexer': ('typecode._vendor.pygments.lexers.markup', 'CSS+mozpreproc', ('css+mozpreproc',), ('*.css.in',), ()), + # 'MozPreprocHashLexer': ('typecode._vendor.pygments.lexers.markup', 'mozhashpreproc', ('mozhashpreproc',), (), ()), + # 'MozPreprocJavascriptLexer': ('typecode._vendor.pygments.lexers.markup', 'Javascript+mozpreproc', ('javascript+mozpreproc',), ('*.js.in',), ()), + # 'MozPreprocPercentLexer': ('typecode._vendor.pygments.lexers.markup', 'mozpercentpreproc', ('mozpercentpreproc',), (), ()), + # 'MozPreprocXulLexer': ('typecode._vendor.pygments.lexers.markup', 'XUL+mozpreproc', ('xul+mozpreproc',), ('*.xul.in',), ()), + # 'MqlLexer': ('typecode._vendor.pygments.lexers.c_like', 'MQL', ('mql', 'mq4', 'mq5', 'mql4', 'mql5'), ('*.mq4', '*.mq5', '*.mqh'), ('text/x-mql',)), + # 'MscgenLexer': ('typecode._vendor.pygments.lexers.dsls', 'Mscgen', ('mscgen', 'msc'), ('*.msc',), ()), + # 'MuPADLexer': ('typecode._vendor.pygments.lexers.algebra', 'MuPAD', ('mupad',), ('*.mu',), ()), + # 'MxmlLexer': ('typecode._vendor.pygments.lexers.actionscript', 'MXML', ('mxml',), ('*.mxml',), ()), + # 'MySqlLexer': ('typecode._vendor.pygments.lexers.sql', 'MySQL', ('mysql',), (), ('text/x-mysql',)), + # 'MyghtyCssLexer': ('typecode._vendor.pygments.lexers.templates', 'CSS+Myghty', ('css+myghty',), (), ('text/css+myghty',)), + # 'MyghtyHtmlLexer': ('typecode._vendor.pygments.lexers.templates', 'HTML+Myghty', ('html+myghty',), (), ('text/html+myghty',)), + # 'MyghtyJavascriptLexer': ('typecode._vendor.pygments.lexers.templates', 'JavaScript+Myghty', ('js+myghty', 'javascript+myghty'), (), ('application/x-javascript+myghty', 'text/x-javascript+myghty', 'text/javascript+mygthy')), + # 'MyghtyLexer': ('typecode._vendor.pygments.lexers.templates', 'Myghty', ('myghty',), ('*.myt', 'autodelegate'), ('application/x-myghty',)), + # 'MyghtyXmlLexer': ('typecode._vendor.pygments.lexers.templates', 'XML+Myghty', ('xml+myghty',), (), ('application/xml+myghty',)), + # 'NCLLexer': ('typecode._vendor.pygments.lexers.ncl', 'NCL', ('ncl',), ('*.ncl',), ('text/ncl',)), + # 'NSISLexer': ('typecode._vendor.pygments.lexers.installers', 'NSIS', ('nsis', 'nsi', 'nsh'), ('*.nsi', '*.nsh'), ('text/x-nsis',)), 'NasmLexer': ('typecode._vendor.pygments.lexers.asm', 'NASM', ('nasm',), ('*.asm', '*.ASM'), ('text/x-nasm',)), - #'NasmObjdumpLexer': ('typecode._vendor.pygments.lexers.asm', 'objdump-nasm', ('objdump-nasm',), ('*.objdump-intel',), ('text/x-nasm-objdump',)), - #'NemerleLexer': ('typecode._vendor.pygments.lexers.dotnet', 'Nemerle', ('nemerle',), ('*.n',), ('text/x-nemerle',)), - #'NesCLexer': ('typecode._vendor.pygments.lexers.c_like', 'nesC', ('nesc',), ('*.nc',), ('text/x-nescsrc',)), - #'NewLispLexer': ('typecode._vendor.pygments.lexers.lisp', 'NewLisp', ('newlisp',), ('*.lsp', '*.nl', '*.kif'), ('text/x-newlisp', 'application/x-newlisp')), - #'NewspeakLexer': ('typecode._vendor.pygments.lexers.smalltalk', 'Newspeak', ('newspeak',), ('*.ns2',), ('text/x-newspeak',)), - #'NginxConfLexer': ('typecode._vendor.pygments.lexers.configs', 'Nginx configuration file', ('nginx',), ('nginx.conf',), ('text/x-nginx-conf',)), - #'NimrodLexer': ('typecode._vendor.pygments.lexers.nimrod', 'Nimrod', ('nim', 'nimrod'), ('*.nim', '*.nimrod'), ('text/x-nim',)), - #'NitLexer': ('typecode._vendor.pygments.lexers.nit', 'Nit', ('nit',), ('*.nit',), ()), - #'NixLexer': ('typecode._vendor.pygments.lexers.nix', 'Nix', ('nixos', 'nix'), ('*.nix',), ('text/x-nix',)), - #'NotmuchLexer': ('typecode._vendor.pygments.lexers.textfmts', 'Notmuch', ('notmuch',), (), ()), - #'NuSMVLexer': ('typecode._vendor.pygments.lexers.smv', 'NuSMV', ('nusmv',), ('*.smv',), ()), - #'NumPyLexer': ('typecode._vendor.pygments.lexers.python', 'NumPy', ('numpy',), (), ()), - #'ObjdumpLexer': ('typecode._vendor.pygments.lexers.asm', 'objdump', ('objdump',), ('*.objdump',), ('text/x-objdump',)), + # 'NasmObjdumpLexer': ('typecode._vendor.pygments.lexers.asm', 'objdump-nasm', ('objdump-nasm',), ('*.objdump-intel',), ('text/x-nasm-objdump',)), + # 'NemerleLexer': ('typecode._vendor.pygments.lexers.dotnet', 'Nemerle', ('nemerle',), ('*.n',), ('text/x-nemerle',)), + # 'NesCLexer': ('typecode._vendor.pygments.lexers.c_like', 'nesC', ('nesc',), ('*.nc',), ('text/x-nescsrc',)), + # 'NewLispLexer': ('typecode._vendor.pygments.lexers.lisp', 'NewLisp', ('newlisp',), ('*.lsp', '*.nl', '*.kif'), ('text/x-newlisp', 'application/x-newlisp')), + # 'NewspeakLexer': ('typecode._vendor.pygments.lexers.smalltalk', 'Newspeak', ('newspeak',), ('*.ns2',), ('text/x-newspeak',)), + # 'NginxConfLexer': ('typecode._vendor.pygments.lexers.configs', 'Nginx configuration file', ('nginx',), ('nginx.conf',), ('text/x-nginx-conf',)), + # 'NimrodLexer': ('typecode._vendor.pygments.lexers.nimrod', 'Nimrod', ('nim', 'nimrod'), ('*.nim', '*.nimrod'), ('text/x-nim',)), + # 'NitLexer': ('typecode._vendor.pygments.lexers.nit', 'Nit', ('nit',), ('*.nit',), ()), + # 'NixLexer': ('typecode._vendor.pygments.lexers.nix', 'Nix', ('nixos', 'nix'), ('*.nix',), ('text/x-nix',)), + # 'NotmuchLexer': ('typecode._vendor.pygments.lexers.textfmts', 'Notmuch', ('notmuch',), (), ()), + # 'NuSMVLexer': ('typecode._vendor.pygments.lexers.smv', 'NuSMV', ('nusmv',), ('*.smv',), ()), + # 'NumPyLexer': ('typecode._vendor.pygments.lexers.python', 'NumPy', ('numpy',), (), ()), + # 'ObjdumpLexer': ('typecode._vendor.pygments.lexers.asm', 'objdump', ('objdump',), ('*.objdump',), ('text/x-objdump',)), 'ObjectiveCLexer': ('typecode._vendor.pygments.lexers.objective', 'Objective-C', ('objective-c', 'objectivec', 'obj-c', 'objc'), ('*.m', '*.h'), ('text/x-objective-c',)), 'ObjectiveCppLexer': ('typecode._vendor.pygments.lexers.objective', 'Objective-C++', ('objective-c++', 'objectivec++', 'obj-c++', 'objc++'), ('*.mm', '*.hh'), ('text/x-objective-c++',)), - #'ObjectiveJLexer': ('typecode._vendor.pygments.lexers.javascript', 'Objective-J', ('objective-j', 'objectivej', 'obj-j', 'objj'), ('*.j',), ('text/x-objective-j',)), + # 'ObjectiveJLexer': ('typecode._vendor.pygments.lexers.javascript', 'Objective-J', ('objective-j', 'objectivej', 'obj-j', 'objj'), ('*.j',), ('text/x-objective-j',)), 'OcamlLexer': ('typecode._vendor.pygments.lexers.ml', 'OCaml', ('ocaml',), ('*.ml', '*.mli', '*.mll', '*.mly'), ('text/x-ocaml',)), - #'OctaveLexer': ('typecode._vendor.pygments.lexers.matlab', 'Octave', ('octave',), ('*.m',), ('text/octave',)), - #'OdinLexer': ('typecode._vendor.pygments.lexers.archetype', 'ODIN', ('odin',), ('*.odin',), ('text/odin',)), - #'OocLexer': ('typecode._vendor.pygments.lexers.ooc', 'Ooc', ('ooc',), ('*.ooc',), ('text/x-ooc',)), - #'OpaLexer': ('typecode._vendor.pygments.lexers.ml', 'Opa', ('opa',), ('*.opa',), ('text/x-opa',)), - #'OpenEdgeLexer': ('typecode._vendor.pygments.lexers.business', 'OpenEdge ABL', ('openedge', 'abl', 'progress'), ('*.p', '*.cls'), ('text/x-openedge', 'application/x-openedge')), - #'PacmanConfLexer': ('typecode._vendor.pygments.lexers.configs', 'PacmanConf', ('pacmanconf',), ('pacman.conf',), ()), - #'PanLexer': ('typecode._vendor.pygments.lexers.dsls', 'Pan', ('pan',), ('*.pan',), ()), - #'ParaSailLexer': ('typecode._vendor.pygments.lexers.parasail', 'ParaSail', ('parasail',), ('*.psi', '*.psl'), ('text/x-parasail',)), - #'PawnLexer': ('typecode._vendor.pygments.lexers.pawn', 'Pawn', ('pawn',), ('*.p', '*.pwn', '*.inc'), ('text/x-pawn',)), - #'PegLexer': ('typecode._vendor.pygments.lexers.grammar_notation', 'PEG', ('peg',), ('*.peg',), ('text/x-peg',)), - #'Perl6Lexer': ('typecode._vendor.pygments.lexers.perl', 'Perl6', ('perl6', 'pl6', 'raku'), ('*.pl', '*.pm', '*.nqp', '*.p6', '*.6pl', '*.p6l', '*.pl6', '*.6pm', '*.p6m', '*.pm6', '*.t', '*.raku', '*.rakumod', '*.rakutest', '*.rakudoc'), ('text/x-perl6', 'application/x-perl6')), - #'PerlLexer': ('typecode._vendor.pygments.lexers.perl', 'Perl', ('perl', 'pl'), ('*.pl', '*.pm', '*.t', '*.perl'), ('text/x-perl', 'application/x-perl')), + # 'OctaveLexer': ('typecode._vendor.pygments.lexers.matlab', 'Octave', ('octave',), ('*.m',), ('text/octave',)), + # 'OdinLexer': ('typecode._vendor.pygments.lexers.archetype', 'ODIN', ('odin',), ('*.odin',), ('text/odin',)), + # 'OocLexer': ('typecode._vendor.pygments.lexers.ooc', 'Ooc', ('ooc',), ('*.ooc',), ('text/x-ooc',)), + # 'OpaLexer': ('typecode._vendor.pygments.lexers.ml', 'Opa', ('opa',), ('*.opa',), ('text/x-opa',)), + # 'OpenEdgeLexer': ('typecode._vendor.pygments.lexers.business', 'OpenEdge ABL', ('openedge', 'abl', 'progress'), ('*.p', '*.cls'), ('text/x-openedge', 'application/x-openedge')), + # 'PacmanConfLexer': ('typecode._vendor.pygments.lexers.configs', 'PacmanConf', ('pacmanconf',), ('pacman.conf',), ()), + # 'PanLexer': ('typecode._vendor.pygments.lexers.dsls', 'Pan', ('pan',), ('*.pan',), ()), + # 'ParaSailLexer': ('typecode._vendor.pygments.lexers.parasail', 'ParaSail', ('parasail',), ('*.psi', '*.psl'), ('text/x-parasail',)), + # 'PawnLexer': ('typecode._vendor.pygments.lexers.pawn', 'Pawn', ('pawn',), ('*.p', '*.pwn', '*.inc'), ('text/x-pawn',)), + # 'PegLexer': ('typecode._vendor.pygments.lexers.grammar_notation', 'PEG', ('peg',), ('*.peg',), ('text/x-peg',)), + # 'Perl6Lexer': ('typecode._vendor.pygments.lexers.perl', 'Perl6', ('perl6', 'pl6', 'raku'), ('*.pl', '*.pm', '*.nqp', '*.p6', '*.6pl', '*.p6l', '*.pl6', '*.6pm', '*.p6m', '*.pm6', '*.t', '*.raku', '*.rakumod', '*.rakutest', '*.rakudoc'), ('text/x-perl6', 'application/x-perl6')), + # 'PerlLexer': ('typecode._vendor.pygments.lexers.perl', 'Perl', ('perl', 'pl'), ('*.pl', '*.pm', '*.t', '*.perl'), ('text/x-perl', 'application/x-perl')), 'PhpLexer': ('typecode._vendor.pygments.lexers.php', 'PHP', ('php', 'php3', 'php4', 'php5'), ('*.php', '*.php[345]', '*.inc'), ('text/x-php',)), - #'PigLexer': ('typecode._vendor.pygments.lexers.jvm', 'Pig', ('pig',), ('*.pig',), ('text/x-pig',)), - #'PikeLexer': ('typecode._vendor.pygments.lexers.c_like', 'Pike', ('pike',), ('*.pike', '*.pmod'), ('text/x-pike',)), - #'PkgConfigLexer': ('typecode._vendor.pygments.lexers.configs', 'PkgConfig', ('pkgconfig',), ('*.pc',), ()), - #'PlPgsqlLexer': ('typecode._vendor.pygments.lexers.sql', 'PL/pgSQL', ('plpgsql',), (), ('text/x-plpgsql',)), - #'PointlessLexer': ('typecode._vendor.pygments.lexers.pointless', 'Pointless', ('pointless',), ('*.ptls',), ()), - #'PonyLexer': ('typecode._vendor.pygments.lexers.pony', 'Pony', ('pony',), ('*.pony',), ()), - #'PostScriptLexer': ('typecode._vendor.pygments.lexers.graphics', 'PostScript', ('postscript', 'postscr'), ('*.ps', '*.eps'), ('application/postscript',)), - #'PostgresConsoleLexer': ('typecode._vendor.pygments.lexers.sql', 'PostgreSQL console (psql)', ('psql', 'postgresql-console', 'postgres-console'), (), ('text/x-postgresql-psql',)), - #'PostgresLexer': ('typecode._vendor.pygments.lexers.sql', 'PostgreSQL SQL dialect', ('postgresql', 'postgres'), (), ('text/x-postgresql',)), - #'PovrayLexer': ('typecode._vendor.pygments.lexers.graphics', 'POVRay', ('pov',), ('*.pov', '*.inc'), ('text/x-povray',)), + # 'PigLexer': ('typecode._vendor.pygments.lexers.jvm', 'Pig', ('pig',), ('*.pig',), ('text/x-pig',)), + # 'PikeLexer': ('typecode._vendor.pygments.lexers.c_like', 'Pike', ('pike',), ('*.pike', '*.pmod'), ('text/x-pike',)), + # 'PkgConfigLexer': ('typecode._vendor.pygments.lexers.configs', 'PkgConfig', ('pkgconfig',), ('*.pc',), ()), + # 'PlPgsqlLexer': ('typecode._vendor.pygments.lexers.sql', 'PL/pgSQL', ('plpgsql',), (), ('text/x-plpgsql',)), + # 'PointlessLexer': ('typecode._vendor.pygments.lexers.pointless', 'Pointless', ('pointless',), ('*.ptls',), ()), + # 'PonyLexer': ('typecode._vendor.pygments.lexers.pony', 'Pony', ('pony',), ('*.pony',), ()), + # 'PostScriptLexer': ('typecode._vendor.pygments.lexers.graphics', 'PostScript', ('postscript', 'postscr'), ('*.ps', '*.eps'), ('application/postscript',)), + # 'PostgresConsoleLexer': ('typecode._vendor.pygments.lexers.sql', 'PostgreSQL console (psql)', ('psql', 'postgresql-console', 'postgres-console'), (), ('text/x-postgresql-psql',)), + # 'PostgresLexer': ('typecode._vendor.pygments.lexers.sql', 'PostgreSQL SQL dialect', ('postgresql', 'postgres'), (), ('text/x-postgresql',)), + # 'PovrayLexer': ('typecode._vendor.pygments.lexers.graphics', 'POVRay', ('pov',), ('*.pov', '*.inc'), ('text/x-povray',)), 'PowerShellLexer': ('typecode._vendor.pygments.lexers.shell', 'PowerShell', ('powershell', 'posh', 'ps1', 'psm1'), ('*.ps1', '*.psm1'), ('text/x-powershell',)), - #'PowerShellSessionLexer': ('typecode._vendor.pygments.lexers.shell', 'PowerShell Session', ('ps1con',), (), ()), - #'PraatLexer': ('typecode._vendor.pygments.lexers.praat', 'Praat', ('praat',), ('*.praat', '*.proc', '*.psc'), ()), - #'PrologLexer': ('typecode._vendor.pygments.lexers.prolog', 'Prolog', ('prolog',), ('*.ecl', '*.prolog', '*.pro', '*.pl'), ('text/x-prolog',)), - #'PromQLLexer': ('typecode._vendor.pygments.lexers.promql', 'PromQL', ('promql',), ('*.promql',), ()), - #'PropertiesLexer': ('typecode._vendor.pygments.lexers.configs', 'Properties', ('properties', 'jproperties'), ('*.properties',), ('text/x-java-properties',)), - #'ProtoBufLexer': ('typecode._vendor.pygments.lexers.dsls', 'Protocol Buffer', ('protobuf', 'proto'), ('*.proto',), ()), - #'PsyshConsoleLexer': ('typecode._vendor.pygments.lexers.php', 'PsySH console session for PHP', ('psysh',), (), ()), - #'PugLexer': ('typecode._vendor.pygments.lexers.html', 'Pug', ('pug', 'jade'), ('*.pug', '*.jade'), ('text/x-pug', 'text/x-jade')), - #'PuppetLexer': ('typecode._vendor.pygments.lexers.dsls', 'Puppet', ('puppet',), ('*.pp',), ()), - #'PyPyLogLexer': ('typecode._vendor.pygments.lexers.console', 'PyPy Log', ('pypylog', 'pypy'), ('*.pypylog',), ('application/x-pypylog',)), - #'Python2Lexer': ('typecode._vendor.pygments.lexers.python', 'Python 2.x', ('python2', 'py2'), (), ('text/x-python2', 'application/x-python2')), - #'Python2TracebackLexer': ('typecode._vendor.pygments.lexers.python', 'Python 2.x Traceback', ('py2tb',), ('*.py2tb',), ('text/x-python2-traceback',)), - #'PythonConsoleLexer': ('typecode._vendor.pygments.lexers.python', 'Python console session', ('pycon',), (), ('text/x-python-doctest',)), + # 'PowerShellSessionLexer': ('typecode._vendor.pygments.lexers.shell', 'PowerShell Session', ('ps1con',), (), ()), + # 'PraatLexer': ('typecode._vendor.pygments.lexers.praat', 'Praat', ('praat',), ('*.praat', '*.proc', '*.psc'), ()), + # 'PrologLexer': ('typecode._vendor.pygments.lexers.prolog', 'Prolog', ('prolog',), ('*.ecl', '*.prolog', '*.pro', '*.pl'), ('text/x-prolog',)), + # 'PromQLLexer': ('typecode._vendor.pygments.lexers.promql', 'PromQL', ('promql',), ('*.promql',), ()), + # 'PropertiesLexer': ('typecode._vendor.pygments.lexers.configs', 'Properties', ('properties', 'jproperties'), ('*.properties',), ('text/x-java-properties',)), + # 'ProtoBufLexer': ('typecode._vendor.pygments.lexers.dsls', 'Protocol Buffer', ('protobuf', 'proto'), ('*.proto',), ()), + # 'PsyshConsoleLexer': ('typecode._vendor.pygments.lexers.php', 'PsySH console session for PHP', ('psysh',), (), ()), + # 'PugLexer': ('typecode._vendor.pygments.lexers.html', 'Pug', ('pug', 'jade'), ('*.pug', '*.jade'), ('text/x-pug', 'text/x-jade')), + # 'PuppetLexer': ('typecode._vendor.pygments.lexers.dsls', 'Puppet', ('puppet',), ('*.pp',), ()), + # 'PyPyLogLexer': ('typecode._vendor.pygments.lexers.console', 'PyPy Log', ('pypylog', 'pypy'), ('*.pypylog',), ('application/x-pypylog',)), + # 'Python2Lexer': ('typecode._vendor.pygments.lexers.python', 'Python 2.x', ('python2', 'py2'), (), ('text/x-python2', 'application/x-python2')), + # 'Python2TracebackLexer': ('typecode._vendor.pygments.lexers.python', 'Python 2.x Traceback', ('py2tb',), ('*.py2tb',), ('text/x-python2-traceback',)), + # 'PythonConsoleLexer': ('typecode._vendor.pygments.lexers.python', 'Python console session', ('pycon',), (), ('text/x-python-doctest',)), 'PythonLexer': ('typecode._vendor.pygments.lexers.python', 'Python', ('python', 'py', 'sage', 'python3', 'py3'), ('*.py', '*.pyw', '*.jy', '*.sage', '*.sc', 'SConstruct', 'SConscript', '*.bzl', 'BUCK', 'BUILD', 'BUILD.bazel', 'WORKSPACE', '*.tac'), ('text/x-python', 'application/x-python', 'text/x-python3', 'application/x-python3')), - #'PythonTracebackLexer': ('typecode._vendor.pygments.lexers.python', 'Python Traceback', ('pytb', 'py3tb'), ('*.pytb', '*.py3tb'), ('text/x-python-traceback', 'text/x-python3-traceback')), - #'QBasicLexer': ('typecode._vendor.pygments.lexers.basic', 'QBasic', ('qbasic', 'basic'), ('*.BAS', '*.bas'), ('text/basic',)), - #'QVToLexer': ('typecode._vendor.pygments.lexers.qvt', 'QVTO', ('qvto', 'qvt'), ('*.qvto',), ()), - #'QmlLexer': ('typecode._vendor.pygments.lexers.webmisc', 'QML', ('qml', 'qbs'), ('*.qml', '*.qbs'), ('application/x-qml', 'application/x-qt.qbs+qml')), - #'RConsoleLexer': ('typecode._vendor.pygments.lexers.r', 'RConsole', ('rconsole', 'rout'), ('*.Rout',), ()), - #'RNCCompactLexer': ('typecode._vendor.pygments.lexers.rnc', 'Relax-NG Compact', ('rnc', 'rng-compact'), ('*.rnc',), ()), - #'RPMSpecLexer': ('typecode._vendor.pygments.lexers.installers', 'RPMSpec', ('spec',), ('*.spec',), ('text/x-rpm-spec',)), - #'RacketLexer': ('typecode._vendor.pygments.lexers.lisp', 'Racket', ('racket', 'rkt'), ('*.rkt', '*.rktd', '*.rktl'), ('text/x-racket', 'application/x-racket')), - #'RagelCLexer': ('typecode._vendor.pygments.lexers.parsers', 'Ragel in C Host', ('ragel-c',), ('*.rl',), ()), - #'RagelCppLexer': ('typecode._vendor.pygments.lexers.parsers', 'Ragel in CPP Host', ('ragel-cpp',), ('*.rl',), ()), - #'RagelDLexer': ('typecode._vendor.pygments.lexers.parsers', 'Ragel in D Host', ('ragel-d',), ('*.rl',), ()), - #'RagelEmbeddedLexer': ('typecode._vendor.pygments.lexers.parsers', 'Embedded Ragel', ('ragel-em',), ('*.rl',), ()), - #'RagelJavaLexer': ('typecode._vendor.pygments.lexers.parsers', 'Ragel in Java Host', ('ragel-java',), ('*.rl',), ()), - #'RagelLexer': ('typecode._vendor.pygments.lexers.parsers', 'Ragel', ('ragel',), (), ()), - #'RagelObjectiveCLexer': ('typecode._vendor.pygments.lexers.parsers', 'Ragel in Objective C Host', ('ragel-objc',), ('*.rl',), ()), - #'RagelRubyLexer': ('typecode._vendor.pygments.lexers.parsers', 'Ragel in Ruby Host', ('ragel-ruby', 'ragel-rb'), ('*.rl',), ()), - #'RawTokenLexer': ('typecode._vendor.pygments.lexers.special', 'Raw token data', ('raw',), (), ('application/x-pygments-tokens',)), - #'RdLexer': ('typecode._vendor.pygments.lexers.r', 'Rd', ('rd',), ('*.Rd',), ('text/x-r-doc',)), - #'ReasonLexer': ('typecode._vendor.pygments.lexers.ml', 'ReasonML', ('reason', 'reasonml'), ('*.re', '*.rei'), ('text/x-reasonml',)), - #'RebolLexer': ('typecode._vendor.pygments.lexers.rebol', 'REBOL', ('rebol',), ('*.r', '*.r3', '*.reb'), ('text/x-rebol',)), - #'RedLexer': ('typecode._vendor.pygments.lexers.rebol', 'Red', ('red', 'red/system'), ('*.red', '*.reds'), ('text/x-red', 'text/x-red-system')), - #'RedcodeLexer': ('typecode._vendor.pygments.lexers.esoteric', 'Redcode', ('redcode',), ('*.cw',), ()), - #'RegeditLexer': ('typecode._vendor.pygments.lexers.configs', 'reg', ('registry',), ('*.reg',), ('text/x-windows-registry',)), - #'ResourceLexer': ('typecode._vendor.pygments.lexers.resource', 'ResourceBundle', ('resource', 'resourcebundle'), (), ()), - #'RexxLexer': ('typecode._vendor.pygments.lexers.scripting', 'Rexx', ('rexx', 'arexx'), ('*.rexx', '*.rex', '*.rx', '*.arexx'), ('text/x-rexx',)), - #'RhtmlLexer': ('typecode._vendor.pygments.lexers.templates', 'RHTML', ('rhtml', 'html+erb', 'html+ruby'), ('*.rhtml',), ('text/html+ruby',)), - #'RideLexer': ('typecode._vendor.pygments.lexers.ride', 'Ride', ('ride',), ('*.ride',), ('text/x-ride',)), - #'RoboconfGraphLexer': ('typecode._vendor.pygments.lexers.roboconf', 'Roboconf Graph', ('roboconf-graph',), ('*.graph',), ()), - #'RoboconfInstancesLexer': ('typecode._vendor.pygments.lexers.roboconf', 'Roboconf Instances', ('roboconf-instances',), ('*.instances',), ()), - #'RobotFrameworkLexer': ('typecode._vendor.pygments.lexers.robotframework', 'RobotFramework', ('robotframework',), ('*.robot',), ('text/x-robotframework',)), - #'RqlLexer': ('typecode._vendor.pygments.lexers.sql', 'RQL', ('rql',), ('*.rql',), ('text/x-rql',)), - #'RslLexer': ('typecode._vendor.pygments.lexers.dsls', 'RSL', ('rsl',), ('*.rsl',), ('text/rsl',)), - #'RstLexer': ('typecode._vendor.pygments.lexers.markup', 'reStructuredText', ('rst', 'rest', 'restructuredtext'), ('*.rst', '*.rest'), ('text/x-rst', 'text/prs.fallenstein.rst')), - #'RtsLexer': ('typecode._vendor.pygments.lexers.trafficscript', 'TrafficScript', ('rts', 'trafficscript'), ('*.rts',), ()), - #'RubyConsoleLexer': ('typecode._vendor.pygments.lexers.ruby', 'Ruby irb session', ('rbcon', 'irb'), (), ('text/x-ruby-shellsession',)), + # 'PythonTracebackLexer': ('typecode._vendor.pygments.lexers.python', 'Python Traceback', ('pytb', 'py3tb'), ('*.pytb', '*.py3tb'), ('text/x-python-traceback', 'text/x-python3-traceback')), + # 'QBasicLexer': ('typecode._vendor.pygments.lexers.basic', 'QBasic', ('qbasic', 'basic'), ('*.BAS', '*.bas'), ('text/basic',)), + # 'QVToLexer': ('typecode._vendor.pygments.lexers.qvt', 'QVTO', ('qvto', 'qvt'), ('*.qvto',), ()), + # 'QmlLexer': ('typecode._vendor.pygments.lexers.webmisc', 'QML', ('qml', 'qbs'), ('*.qml', '*.qbs'), ('application/x-qml', 'application/x-qt.qbs+qml')), + # 'RConsoleLexer': ('typecode._vendor.pygments.lexers.r', 'RConsole', ('rconsole', 'rout'), ('*.Rout',), ()), + # 'RNCCompactLexer': ('typecode._vendor.pygments.lexers.rnc', 'Relax-NG Compact', ('rnc', 'rng-compact'), ('*.rnc',), ()), + # 'RPMSpecLexer': ('typecode._vendor.pygments.lexers.installers', 'RPMSpec', ('spec',), ('*.spec',), ('text/x-rpm-spec',)), + # 'RacketLexer': ('typecode._vendor.pygments.lexers.lisp', 'Racket', ('racket', 'rkt'), ('*.rkt', '*.rktd', '*.rktl'), ('text/x-racket', 'application/x-racket')), + # 'RagelCLexer': ('typecode._vendor.pygments.lexers.parsers', 'Ragel in C Host', ('ragel-c',), ('*.rl',), ()), + # 'RagelCppLexer': ('typecode._vendor.pygments.lexers.parsers', 'Ragel in CPP Host', ('ragel-cpp',), ('*.rl',), ()), + # 'RagelDLexer': ('typecode._vendor.pygments.lexers.parsers', 'Ragel in D Host', ('ragel-d',), ('*.rl',), ()), + # 'RagelEmbeddedLexer': ('typecode._vendor.pygments.lexers.parsers', 'Embedded Ragel', ('ragel-em',), ('*.rl',), ()), + # 'RagelJavaLexer': ('typecode._vendor.pygments.lexers.parsers', 'Ragel in Java Host', ('ragel-java',), ('*.rl',), ()), + # 'RagelLexer': ('typecode._vendor.pygments.lexers.parsers', 'Ragel', ('ragel',), (), ()), + # 'RagelObjectiveCLexer': ('typecode._vendor.pygments.lexers.parsers', 'Ragel in Objective C Host', ('ragel-objc',), ('*.rl',), ()), + # 'RagelRubyLexer': ('typecode._vendor.pygments.lexers.parsers', 'Ragel in Ruby Host', ('ragel-ruby', 'ragel-rb'), ('*.rl',), ()), + # 'RawTokenLexer': ('typecode._vendor.pygments.lexers.special', 'Raw token data', ('raw',), (), ('application/x-pygments-tokens',)), + # 'RdLexer': ('typecode._vendor.pygments.lexers.r', 'Rd', ('rd',), ('*.Rd',), ('text/x-r-doc',)), + # 'ReasonLexer': ('typecode._vendor.pygments.lexers.ml', 'ReasonML', ('reason', 'reasonml'), ('*.re', '*.rei'), ('text/x-reasonml',)), + # 'RebolLexer': ('typecode._vendor.pygments.lexers.rebol', 'REBOL', ('rebol',), ('*.r', '*.r3', '*.reb'), ('text/x-rebol',)), + # 'RedLexer': ('typecode._vendor.pygments.lexers.rebol', 'Red', ('red', 'red/system'), ('*.red', '*.reds'), ('text/x-red', 'text/x-red-system')), + # 'RedcodeLexer': ('typecode._vendor.pygments.lexers.esoteric', 'Redcode', ('redcode',), ('*.cw',), ()), + # 'RegeditLexer': ('typecode._vendor.pygments.lexers.configs', 'reg', ('registry',), ('*.reg',), ('text/x-windows-registry',)), + # 'ResourceLexer': ('typecode._vendor.pygments.lexers.resource', 'ResourceBundle', ('resource', 'resourcebundle'), (), ()), + # 'RexxLexer': ('typecode._vendor.pygments.lexers.scripting', 'Rexx', ('rexx', 'arexx'), ('*.rexx', '*.rex', '*.rx', '*.arexx'), ('text/x-rexx',)), + # 'RhtmlLexer': ('typecode._vendor.pygments.lexers.templates', 'RHTML', ('rhtml', 'html+erb', 'html+ruby'), ('*.rhtml',), ('text/html+ruby',)), + # 'RideLexer': ('typecode._vendor.pygments.lexers.ride', 'Ride', ('ride',), ('*.ride',), ('text/x-ride',)), + # 'RoboconfGraphLexer': ('typecode._vendor.pygments.lexers.roboconf', 'Roboconf Graph', ('roboconf-graph',), ('*.graph',), ()), + # 'RoboconfInstancesLexer': ('typecode._vendor.pygments.lexers.roboconf', 'Roboconf Instances', ('roboconf-instances',), ('*.instances',), ()), + # 'RobotFrameworkLexer': ('typecode._vendor.pygments.lexers.robotframework', 'RobotFramework', ('robotframework',), ('*.robot',), ('text/x-robotframework',)), + # 'RqlLexer': ('typecode._vendor.pygments.lexers.sql', 'RQL', ('rql',), ('*.rql',), ('text/x-rql',)), + # 'RslLexer': ('typecode._vendor.pygments.lexers.dsls', 'RSL', ('rsl',), ('*.rsl',), ('text/rsl',)), + # 'RstLexer': ('typecode._vendor.pygments.lexers.markup', 'reStructuredText', ('rst', 'rest', 'restructuredtext'), ('*.rst', '*.rest'), ('text/x-rst', 'text/prs.fallenstein.rst')), + # 'RtsLexer': ('typecode._vendor.pygments.lexers.trafficscript', 'TrafficScript', ('rts', 'trafficscript'), ('*.rts',), ()), + # 'RubyConsoleLexer': ('typecode._vendor.pygments.lexers.ruby', 'Ruby irb session', ('rbcon', 'irb'), (), ('text/x-ruby-shellsession',)), 'RubyLexer': ('typecode._vendor.pygments.lexers.ruby', 'Ruby', ('rb', 'ruby', 'duby'), ('*.rb', '*.rbw', 'Rakefile', '*.rake', '*.gemspec', '*.rbx', '*.duby', 'Gemfile'), ('text/x-ruby', 'application/x-ruby')), 'RustLexer': ('typecode._vendor.pygments.lexers.rust', 'Rust', ('rust', 'rs'), ('*.rs', '*.rs.in'), ('text/rust', 'text/x-rust')), - #'SASLexer': ('typecode._vendor.pygments.lexers.sas', 'SAS', ('sas',), ('*.SAS', '*.sas'), ('text/x-sas', 'text/sas', 'application/x-sas')), - #'SLexer': ('typecode._vendor.pygments.lexers.r', 'S', ('splus', 's', 'r'), ('*.S', '*.R', '.Rhistory', '.Rprofile', '.Renviron'), ('text/S-plus', 'text/S', 'text/x-r-source', 'text/x-r', 'text/x-R', 'text/x-r-history', 'text/x-r-profile')), - #'SMLLexer': ('typecode._vendor.pygments.lexers.ml', 'Standard ML', ('sml',), ('*.sml', '*.sig', '*.fun'), ('text/x-standardml', 'application/x-standardml')), - #'SarlLexer': ('typecode._vendor.pygments.lexers.jvm', 'SARL', ('sarl',), ('*.sarl',), ('text/x-sarl',)), + # 'SASLexer': ('typecode._vendor.pygments.lexers.sas', 'SAS', ('sas',), ('*.SAS', '*.sas'), ('text/x-sas', 'text/sas', 'application/x-sas')), + # 'SLexer': ('typecode._vendor.pygments.lexers.r', 'S', ('splus', 's', 'r'), ('*.S', '*.R', '.Rhistory', '.Rprofile', '.Renviron'), ('text/S-plus', 'text/S', 'text/x-r-source', 'text/x-r', 'text/x-R', 'text/x-r-history', 'text/x-r-profile')), + # 'SMLLexer': ('typecode._vendor.pygments.lexers.ml', 'Standard ML', ('sml',), ('*.sml', '*.sig', '*.fun'), ('text/x-standardml', 'application/x-standardml')), + # 'SarlLexer': ('typecode._vendor.pygments.lexers.jvm', 'SARL', ('sarl',), ('*.sarl',), ('text/x-sarl',)), 'SassLexer': ('typecode._vendor.pygments.lexers.css', 'Sass', ('sass',), ('*.sass',), ('text/x-sass',)), 'ScalaLexer': ('typecode._vendor.pygments.lexers.jvm', 'Scala', ('scala',), ('*.scala',), ('text/x-scala',)), - #'ScamlLexer': ('typecode._vendor.pygments.lexers.html', 'Scaml', ('scaml',), ('*.scaml',), ('text/x-scaml',)), - #'ScdocLexer': ('typecode._vendor.pygments.lexers.scdoc', 'scdoc', ('scdoc', 'scd'), ('*.scd', '*.scdoc'), ()), - #'SchemeLexer': ('typecode._vendor.pygments.lexers.lisp', 'Scheme', ('scheme', 'scm'), ('*.scm', '*.ss'), ('text/x-scheme', 'application/x-scheme')), - #'ScilabLexer': ('typecode._vendor.pygments.lexers.matlab', 'Scilab', ('scilab',), ('*.sci', '*.sce', '*.tst'), ('text/scilab',)), + # 'ScamlLexer': ('typecode._vendor.pygments.lexers.html', 'Scaml', ('scaml',), ('*.scaml',), ('text/x-scaml',)), + # 'ScdocLexer': ('typecode._vendor.pygments.lexers.scdoc', 'scdoc', ('scdoc', 'scd'), ('*.scd', '*.scdoc'), ()), + # 'SchemeLexer': ('typecode._vendor.pygments.lexers.lisp', 'Scheme', ('scheme', 'scm'), ('*.scm', '*.ss'), ('text/x-scheme', 'application/x-scheme')), + # 'ScilabLexer': ('typecode._vendor.pygments.lexers.matlab', 'Scilab', ('scilab',), ('*.sci', '*.sce', '*.tst'), ('text/scilab',)), 'ScssLexer': ('typecode._vendor.pygments.lexers.css', 'SCSS', ('scss',), ('*.scss',), ('text/x-scss',)), - #'ShExCLexer': ('typecode._vendor.pygments.lexers.rdf', 'ShExC', ('shexc', 'shex'), ('*.shex',), ('text/shex',)), - #'ShenLexer': ('typecode._vendor.pygments.lexers.lisp', 'Shen', ('shen',), ('*.shen',), ('text/x-shen', 'application/x-shen')), - #'SieveLexer': ('typecode._vendor.pygments.lexers.sieve', 'Sieve', ('sieve',), ('*.siv', '*.sieve'), ()), - #'SilverLexer': ('typecode._vendor.pygments.lexers.verification', 'Silver', ('silver',), ('*.sil', '*.vpr'), ()), - #'SingularityLexer': ('typecode._vendor.pygments.lexers.configs', 'Singularity', ('singularity',), ('*.def', 'Singularity'), ()), - #'SlashLexer': ('typecode._vendor.pygments.lexers.slash', 'Slash', ('slash',), ('*.sla',), ()), - #'SlimLexer': ('typecode._vendor.pygments.lexers.webmisc', 'Slim', ('slim',), ('*.slim',), ('text/x-slim',)), - #'SlurmBashLexer': ('typecode._vendor.pygments.lexers.shell', 'Slurm', ('slurm', 'sbatch'), ('*.sl',), ()), - #'SmaliLexer': ('typecode._vendor.pygments.lexers.dalvik', 'Smali', ('smali',), ('*.smali',), ('text/smali',)), - #'SmalltalkLexer': ('typecode._vendor.pygments.lexers.smalltalk', 'Smalltalk', ('smalltalk', 'squeak', 'st'), ('*.st',), ('text/x-smalltalk',)), - #'SmartGameFormatLexer': ('typecode._vendor.pygments.lexers.sgf', 'SmartGameFormat', ('sgf',), ('*.sgf',), ()), - #'SmartyLexer': ('typecode._vendor.pygments.lexers.templates', 'Smarty', ('smarty',), ('*.tpl',), ('application/x-smarty',)), - #'SnobolLexer': ('typecode._vendor.pygments.lexers.snobol', 'Snobol', ('snobol',), ('*.snobol',), ('text/x-snobol',)), - #'SnowballLexer': ('typecode._vendor.pygments.lexers.dsls', 'Snowball', ('snowball',), ('*.sbl',), ()), - #'SolidityLexer': ('typecode._vendor.pygments.lexers.solidity', 'Solidity', ('solidity',), ('*.sol',), ()), - #'SourcePawnLexer': ('typecode._vendor.pygments.lexers.pawn', 'SourcePawn', ('sp',), ('*.sp',), ('text/x-sourcepawn',)), - #'SourcesListLexer': ('typecode._vendor.pygments.lexers.installers', 'Debian Sourcelist', ('sourceslist', 'sources.list', 'debsources'), ('sources.list',), ()), - #'SparqlLexer': ('typecode._vendor.pygments.lexers.rdf', 'SPARQL', ('sparql',), ('*.rq', '*.sparql'), ('application/sparql-query',)), - #'SqlLexer': ('typecode._vendor.pygments.lexers.sql', 'SQL', ('sql',), ('*.sql',), ('text/x-sql',)), - #'SqliteConsoleLexer': ('typecode._vendor.pygments.lexers.sql', 'sqlite3con', ('sqlite3',), ('*.sqlite3-console',), ('text/x-sqlite3-console',)), - #'SquidConfLexer': ('typecode._vendor.pygments.lexers.configs', 'SquidConf', ('squidconf', 'squid.conf', 'squid'), ('squid.conf',), ('text/x-squidconf',)), - #'SspLexer': ('typecode._vendor.pygments.lexers.templates', 'Scalate Server Page', ('ssp',), ('*.ssp',), ('application/x-ssp',)), - #'StanLexer': ('typecode._vendor.pygments.lexers.modeling', 'Stan', ('stan',), ('*.stan',), ()), - #'StataLexer': ('typecode._vendor.pygments.lexers.stata', 'Stata', ('stata', 'do'), ('*.do', '*.ado'), ('text/x-stata', 'text/stata', 'application/x-stata')), - #'SuperColliderLexer': ('typecode._vendor.pygments.lexers.supercollider', 'SuperCollider', ('sc', 'supercollider'), ('*.sc', '*.scd'), ('application/supercollider', 'text/supercollider')), + # 'ShExCLexer': ('typecode._vendor.pygments.lexers.rdf', 'ShExC', ('shexc', 'shex'), ('*.shex',), ('text/shex',)), + # 'ShenLexer': ('typecode._vendor.pygments.lexers.lisp', 'Shen', ('shen',), ('*.shen',), ('text/x-shen', 'application/x-shen')), + # 'SieveLexer': ('typecode._vendor.pygments.lexers.sieve', 'Sieve', ('sieve',), ('*.siv', '*.sieve'), ()), + # 'SilverLexer': ('typecode._vendor.pygments.lexers.verification', 'Silver', ('silver',), ('*.sil', '*.vpr'), ()), + # 'SingularityLexer': ('typecode._vendor.pygments.lexers.configs', 'Singularity', ('singularity',), ('*.def', 'Singularity'), ()), + # 'SlashLexer': ('typecode._vendor.pygments.lexers.slash', 'Slash', ('slash',), ('*.sla',), ()), + # 'SlimLexer': ('typecode._vendor.pygments.lexers.webmisc', 'Slim', ('slim',), ('*.slim',), ('text/x-slim',)), + # 'SlurmBashLexer': ('typecode._vendor.pygments.lexers.shell', 'Slurm', ('slurm', 'sbatch'), ('*.sl',), ()), + # 'SmaliLexer': ('typecode._vendor.pygments.lexers.dalvik', 'Smali', ('smali',), ('*.smali',), ('text/smali',)), + # 'SmalltalkLexer': ('typecode._vendor.pygments.lexers.smalltalk', 'Smalltalk', ('smalltalk', 'squeak', 'st'), ('*.st',), ('text/x-smalltalk',)), + # 'SmartGameFormatLexer': ('typecode._vendor.pygments.lexers.sgf', 'SmartGameFormat', ('sgf',), ('*.sgf',), ()), + # 'SmartyLexer': ('typecode._vendor.pygments.lexers.templates', 'Smarty', ('smarty',), ('*.tpl',), ('application/x-smarty',)), + # 'SnobolLexer': ('typecode._vendor.pygments.lexers.snobol', 'Snobol', ('snobol',), ('*.snobol',), ('text/x-snobol',)), + # 'SnowballLexer': ('typecode._vendor.pygments.lexers.dsls', 'Snowball', ('snowball',), ('*.sbl',), ()), + # 'SolidityLexer': ('typecode._vendor.pygments.lexers.solidity', 'Solidity', ('solidity',), ('*.sol',), ()), + # 'SourcePawnLexer': ('typecode._vendor.pygments.lexers.pawn', 'SourcePawn', ('sp',), ('*.sp',), ('text/x-sourcepawn',)), + # 'SourcesListLexer': ('typecode._vendor.pygments.lexers.installers', 'Debian Sourcelist', ('sourceslist', 'sources.list', 'debsources'), ('sources.list',), ()), + # 'SparqlLexer': ('typecode._vendor.pygments.lexers.rdf', 'SPARQL', ('sparql',), ('*.rq', '*.sparql'), ('application/sparql-query',)), + # 'SqlLexer': ('typecode._vendor.pygments.lexers.sql', 'SQL', ('sql',), ('*.sql',), ('text/x-sql',)), + # 'SqliteConsoleLexer': ('typecode._vendor.pygments.lexers.sql', 'sqlite3con', ('sqlite3',), ('*.sqlite3-console',), ('text/x-sqlite3-console',)), + # 'SquidConfLexer': ('typecode._vendor.pygments.lexers.configs', 'SquidConf', ('squidconf', 'squid.conf', 'squid'), ('squid.conf',), ('text/x-squidconf',)), + # 'SspLexer': ('typecode._vendor.pygments.lexers.templates', 'Scalate Server Page', ('ssp',), ('*.ssp',), ('application/x-ssp',)), + # 'StanLexer': ('typecode._vendor.pygments.lexers.modeling', 'Stan', ('stan',), ('*.stan',), ()), + # 'StataLexer': ('typecode._vendor.pygments.lexers.stata', 'Stata', ('stata', 'do'), ('*.do', '*.ado'), ('text/x-stata', 'text/stata', 'application/x-stata')), + # 'SuperColliderLexer': ('typecode._vendor.pygments.lexers.supercollider', 'SuperCollider', ('sc', 'supercollider'), ('*.sc', '*.scd'), ('application/supercollider', 'text/supercollider')), 'SwiftLexer': ('typecode._vendor.pygments.lexers.objective', 'Swift', ('swift',), ('*.swift',), ('text/x-swift',)), - #'SwigLexer': ('typecode._vendor.pygments.lexers.c_like', 'SWIG', ('swig',), ('*.swg', '*.i'), ('text/swig',)), - #'SystemVerilogLexer': ('typecode._vendor.pygments.lexers.hdl', 'systemverilog', ('systemverilog', 'sv'), ('*.sv', '*.svh'), ('text/x-systemverilog',)), - #'TAPLexer': ('typecode._vendor.pygments.lexers.testing', 'TAP', ('tap',), ('*.tap',), ()), - #'TNTLexer': ('typecode._vendor.pygments.lexers.tnt', 'Typographic Number Theory', ('tnt',), ('*.tnt',), ()), - #'TOMLLexer': ('typecode._vendor.pygments.lexers.configs', 'TOML', ('toml',), ('*.toml', 'Pipfile', 'poetry.lock'), ()), - #'Tads3Lexer': ('typecode._vendor.pygments.lexers.int_fiction', 'TADS 3', ('tads3',), ('*.t',), ()), + # 'SwigLexer': ('typecode._vendor.pygments.lexers.c_like', 'SWIG', ('swig',), ('*.swg', '*.i'), ('text/swig',)), + # 'SystemVerilogLexer': ('typecode._vendor.pygments.lexers.hdl', 'systemverilog', ('systemverilog', 'sv'), ('*.sv', '*.svh'), ('text/x-systemverilog',)), + # 'TAPLexer': ('typecode._vendor.pygments.lexers.testing', 'TAP', ('tap',), ('*.tap',), ()), + # 'TNTLexer': ('typecode._vendor.pygments.lexers.tnt', 'Typographic Number Theory', ('tnt',), ('*.tnt',), ()), + # 'TOMLLexer': ('typecode._vendor.pygments.lexers.configs', 'TOML', ('toml',), ('*.toml', 'Pipfile', 'poetry.lock'), ()), + # 'Tads3Lexer': ('typecode._vendor.pygments.lexers.int_fiction', 'TADS 3', ('tads3',), ('*.t',), ()), 'TasmLexer': ('typecode._vendor.pygments.lexers.asm', 'TASM', ('tasm',), ('*.asm', '*.ASM', '*.tasm'), ('text/x-tasm',)), 'TclLexer': ('typecode._vendor.pygments.lexers.tcl', 'Tcl', ('tcl',), ('*.tcl', '*.rvt'), ('text/x-tcl', 'text/x-script.tcl', 'application/x-tcl')), 'TcshLexer': ('typecode._vendor.pygments.lexers.shell', 'Tcsh', ('tcsh', 'csh'), ('*.tcsh', '*.csh'), ('application/x-csh',)), - #'TcshSessionLexer': ('typecode._vendor.pygments.lexers.shell', 'Tcsh Session', ('tcshcon',), (), ()), - #'TeaTemplateLexer': ('typecode._vendor.pygments.lexers.templates', 'Tea', ('tea',), ('*.tea',), ('text/x-tea',)), - #'TeraTermLexer': ('typecode._vendor.pygments.lexers.teraterm', 'Tera Term macro', ('ttl', 'teraterm', 'teratermmacro'), ('*.ttl',), ('text/x-teratermmacro',)), - #'TermcapLexer': ('typecode._vendor.pygments.lexers.configs', 'Termcap', ('termcap',), ('termcap', 'termcap.src'), ()), - #'TerminfoLexer': ('typecode._vendor.pygments.lexers.configs', 'Terminfo', ('terminfo',), ('terminfo', 'terminfo.src'), ()), - #'TerraformLexer': ('typecode._vendor.pygments.lexers.configs', 'Terraform', ('terraform', 'tf'), ('*.tf',), ('application/x-tf', 'application/x-terraform')), - #'TexLexer': ('typecode._vendor.pygments.lexers.markup', 'TeX', ('tex', 'latex'), ('*.tex', '*.aux', '*.toc'), ('text/x-tex', 'text/x-latex')), - #'TextLexer': ('typecode._vendor.pygments.lexers.special', 'Text only', ('text',), ('*.txt',), ('text/plain',)), + # 'TcshSessionLexer': ('typecode._vendor.pygments.lexers.shell', 'Tcsh Session', ('tcshcon',), (), ()), + # 'TeaTemplateLexer': ('typecode._vendor.pygments.lexers.templates', 'Tea', ('tea',), ('*.tea',), ('text/x-tea',)), + # 'TeraTermLexer': ('typecode._vendor.pygments.lexers.teraterm', 'Tera Term macro', ('ttl', 'teraterm', 'teratermmacro'), ('*.ttl',), ('text/x-teratermmacro',)), + # 'TermcapLexer': ('typecode._vendor.pygments.lexers.configs', 'Termcap', ('termcap',), ('termcap', 'termcap.src'), ()), + # 'TerminfoLexer': ('typecode._vendor.pygments.lexers.configs', 'Terminfo', ('terminfo',), ('terminfo', 'terminfo.src'), ()), + # 'TerraformLexer': ('typecode._vendor.pygments.lexers.configs', 'Terraform', ('terraform', 'tf'), ('*.tf',), ('application/x-tf', 'application/x-terraform')), + # 'TexLexer': ('typecode._vendor.pygments.lexers.markup', 'TeX', ('tex', 'latex'), ('*.tex', '*.aux', '*.toc'), ('text/x-tex', 'text/x-latex')), + # 'TextLexer': ('typecode._vendor.pygments.lexers.special', 'Text only', ('text',), ('*.txt',), ('text/plain',)), 'ThriftLexer': ('typecode._vendor.pygments.lexers.dsls', 'Thrift', ('thrift',), ('*.thrift',), ('application/x-thrift',)), - #'TiddlyWiki5Lexer': ('typecode._vendor.pygments.lexers.markup', 'tiddler', ('tid',), ('*.tid',), ('text/vnd.tiddlywiki',)), - #'TodotxtLexer': ('typecode._vendor.pygments.lexers.textfmts', 'Todotxt', ('todotxt',), ('todo.txt', '*.todotxt'), ('text/x-todo',)), - #'TransactSqlLexer': ('typecode._vendor.pygments.lexers.sql', 'Transact-SQL', ('tsql', 't-sql'), ('*.sql',), ('text/x-tsql',)), - #'TreetopLexer': ('typecode._vendor.pygments.lexers.parsers', 'Treetop', ('treetop',), ('*.treetop', '*.tt'), ()), - #'TurtleLexer': ('typecode._vendor.pygments.lexers.rdf', 'Turtle', ('turtle',), ('*.ttl',), ('text/turtle', 'application/x-turtle')), - #'TwigHtmlLexer': ('typecode._vendor.pygments.lexers.templates', 'HTML+Twig', ('html+twig',), ('*.twig',), ('text/html+twig',)), - #'TwigLexer': ('typecode._vendor.pygments.lexers.templates', 'Twig', ('twig',), (), ('application/x-twig',)), + # 'TiddlyWiki5Lexer': ('typecode._vendor.pygments.lexers.markup', 'tiddler', ('tid',), ('*.tid',), ('text/vnd.tiddlywiki',)), + # 'TodotxtLexer': ('typecode._vendor.pygments.lexers.textfmts', 'Todotxt', ('todotxt',), ('todo.txt', '*.todotxt'), ('text/x-todo',)), + # 'TransactSqlLexer': ('typecode._vendor.pygments.lexers.sql', 'Transact-SQL', ('tsql', 't-sql'), ('*.sql',), ('text/x-tsql',)), + # 'TreetopLexer': ('typecode._vendor.pygments.lexers.parsers', 'Treetop', ('treetop',), ('*.treetop', '*.tt'), ()), + # 'TurtleLexer': ('typecode._vendor.pygments.lexers.rdf', 'Turtle', ('turtle',), ('*.ttl',), ('text/turtle', 'application/x-turtle')), + # 'TwigHtmlLexer': ('typecode._vendor.pygments.lexers.templates', 'HTML+Twig', ('html+twig',), ('*.twig',), ('text/html+twig',)), + # 'TwigLexer': ('typecode._vendor.pygments.lexers.templates', 'Twig', ('twig',), (), ('application/x-twig',)), 'TypeScriptLexer': ('typecode._vendor.pygments.lexers.javascript', 'TypeScript', ('ts', 'typescript'), ('*.ts', '*.tsx'), ('text/x-typescript',)), - #'TypoScriptCssDataLexer': ('typecode._vendor.pygments.lexers.typoscript', 'TypoScriptCssData', ('typoscriptcssdata',), (), ()), - #'TypoScriptHtmlDataLexer': ('typecode._vendor.pygments.lexers.typoscript', 'TypoScriptHtmlData', ('typoscripthtmldata',), (), ()), - #'TypoScriptLexer': ('typecode._vendor.pygments.lexers.typoscript', 'TypoScript', ('typoscript',), ('*.typoscript',), ('text/x-typoscript',)), - #'UcodeLexer': ('typecode._vendor.pygments.lexers.unicon', 'ucode', ('ucode',), ('*.u', '*.u1', '*.u2'), ()), - #'UniconLexer': ('typecode._vendor.pygments.lexers.unicon', 'Unicon', ('unicon',), ('*.icn',), ('text/unicon',)), - #'UrbiscriptLexer': ('typecode._vendor.pygments.lexers.urbi', 'UrbiScript', ('urbiscript',), ('*.u',), ('application/x-urbiscript',)), - #'UsdLexer': ('typecode._vendor.pygments.lexers.usd', 'USD', ('usd', 'usda'), ('*.usd', '*.usda'), ()), + # 'TypoScriptCssDataLexer': ('typecode._vendor.pygments.lexers.typoscript', 'TypoScriptCssData', ('typoscriptcssdata',), (), ()), + # 'TypoScriptHtmlDataLexer': ('typecode._vendor.pygments.lexers.typoscript', 'TypoScriptHtmlData', ('typoscripthtmldata',), (), ()), + # 'TypoScriptLexer': ('typecode._vendor.pygments.lexers.typoscript', 'TypoScript', ('typoscript',), ('*.typoscript',), ('text/x-typoscript',)), + # 'UcodeLexer': ('typecode._vendor.pygments.lexers.unicon', 'ucode', ('ucode',), ('*.u', '*.u1', '*.u2'), ()), + # 'UniconLexer': ('typecode._vendor.pygments.lexers.unicon', 'Unicon', ('unicon',), ('*.icn',), ('text/unicon',)), + # 'UrbiscriptLexer': ('typecode._vendor.pygments.lexers.urbi', 'UrbiScript', ('urbiscript',), ('*.u',), ('application/x-urbiscript',)), + # 'UsdLexer': ('typecode._vendor.pygments.lexers.usd', 'USD', ('usd', 'usda'), ('*.usd', '*.usda'), ()), 'VBScriptLexer': ('typecode._vendor.pygments.lexers.basic', 'VBScript', ('vbscript',), ('*.vbs', '*.VBS'), ()), - #'VCLLexer': ('typecode._vendor.pygments.lexers.varnish', 'VCL', ('vcl',), ('*.vcl',), ('text/x-vclsrc',)), - #'VCLSnippetLexer': ('typecode._vendor.pygments.lexers.varnish', 'VCLSnippets', ('vclsnippets', 'vclsnippet'), (), ('text/x-vclsnippet',)), - #'VCTreeStatusLexer': ('typecode._vendor.pygments.lexers.console', 'VCTreeStatus', ('vctreestatus',), (), ()), - #'VGLLexer': ('typecode._vendor.pygments.lexers.dsls', 'VGL', ('vgl',), ('*.rpf',), ()), - #'ValaLexer': ('typecode._vendor.pygments.lexers.c_like', 'Vala', ('vala', 'vapi'), ('*.vala', '*.vapi'), ('text/x-vala',)), + # 'VCLLexer': ('typecode._vendor.pygments.lexers.varnish', 'VCL', ('vcl',), ('*.vcl',), ('text/x-vclsrc',)), + # 'VCLSnippetLexer': ('typecode._vendor.pygments.lexers.varnish', 'VCLSnippets', ('vclsnippets', 'vclsnippet'), (), ('text/x-vclsnippet',)), + # 'VCTreeStatusLexer': ('typecode._vendor.pygments.lexers.console', 'VCTreeStatus', ('vctreestatus',), (), ()), + # 'VGLLexer': ('typecode._vendor.pygments.lexers.dsls', 'VGL', ('vgl',), ('*.rpf',), ()), + # 'ValaLexer': ('typecode._vendor.pygments.lexers.c_like', 'Vala', ('vala', 'vapi'), ('*.vala', '*.vapi'), ('text/x-vala',)), 'VbNetAspxLexer': ('typecode._vendor.pygments.lexers.dotnet', 'aspx-vb', ('aspx-vb',), ('*.aspx', '*.asax', '*.ascx', '*.ashx', '*.asmx', '*.axd'), ()), 'VbNetLexer': ('typecode._vendor.pygments.lexers.dotnet', 'VB.net', ('vb.net', 'vbnet'), ('*.vb', '*.bas'), ('text/x-vbnet', 'text/x-vba')), - #'VelocityHtmlLexer': ('typecode._vendor.pygments.lexers.templates', 'HTML+Velocity', ('html+velocity',), (), ('text/html+velocity',)), - #'VelocityLexer': ('typecode._vendor.pygments.lexers.templates', 'Velocity', ('velocity',), ('*.vm', '*.fhtml'), ()), - #'VelocityXmlLexer': ('typecode._vendor.pygments.lexers.templates', 'XML+Velocity', ('xml+velocity',), (), ('application/xml+velocity',)), + # 'VelocityHtmlLexer': ('typecode._vendor.pygments.lexers.templates', 'HTML+Velocity', ('html+velocity',), (), ('text/html+velocity',)), + # 'VelocityLexer': ('typecode._vendor.pygments.lexers.templates', 'Velocity', ('velocity',), ('*.vm', '*.fhtml'), ()), + # 'VelocityXmlLexer': ('typecode._vendor.pygments.lexers.templates', 'XML+Velocity', ('xml+velocity',), (), ('application/xml+velocity',)), 'VerilogLexer': ('typecode._vendor.pygments.lexers.hdl', 'verilog', ('verilog', 'v'), ('*.v',), ('text/x-verilog',)), 'VhdlLexer': ('typecode._vendor.pygments.lexers.hdl', 'vhdl', ('vhdl',), ('*.vhdl', '*.vhd'), ('text/x-vhdl',)), - #'VimLexer': ('typecode._vendor.pygments.lexers.textedit', 'VimL', ('vim',), ('*.vim', '.vimrc', '.exrc', '.gvimrc', '_vimrc', '_exrc', '_gvimrc', 'vimrc', 'gvimrc'), ('text/x-vim',)), - #'WDiffLexer': ('typecode._vendor.pygments.lexers.diff', 'WDiff', ('wdiff',), ('*.wdiff',), ()), - #'WebIDLLexer': ('typecode._vendor.pygments.lexers.webidl', 'Web IDL', ('webidl',), ('*.webidl',), ()), - #'WhileyLexer': ('typecode._vendor.pygments.lexers.whiley', 'Whiley', ('whiley',), ('*.whiley',), ('text/x-whiley',)), - #'X10Lexer': ('typecode._vendor.pygments.lexers.x10', 'X10', ('x10', 'xten'), ('*.x10',), ('text/x-x10',)), - #'XQueryLexer': ('typecode._vendor.pygments.lexers.webmisc', 'XQuery', ('xquery', 'xqy', 'xq', 'xql', 'xqm'), ('*.xqy', '*.xquery', '*.xq', '*.xql', '*.xqm'), ('text/xquery', 'application/xquery')), - #'XmlDjangoLexer': ('typecode._vendor.pygments.lexers.templates', 'XML+Django/Jinja', ('xml+django', 'xml+jinja'), (), ('application/xml+django', 'application/xml+jinja')), - #'XmlErbLexer': ('typecode._vendor.pygments.lexers.templates', 'XML+Ruby', ('xml+erb', 'xml+ruby'), (), ('application/xml+ruby',)), - #'XmlLexer': ('typecode._vendor.pygments.lexers.html', 'XML', ('xml',), ('*.xml', '*.xsl', '*.rss', '*.xslt', '*.xsd', '*.wsdl', '*.wsf'), ('text/xml', 'application/xml', 'image/svg+xml', 'application/rss+xml', 'application/atom+xml')), - #'XmlPhpLexer': ('typecode._vendor.pygments.lexers.templates', 'XML+PHP', ('xml+php',), (), ('application/xml+php',)), - #'XmlSmartyLexer': ('typecode._vendor.pygments.lexers.templates', 'XML+Smarty', ('xml+smarty',), (), ('application/xml+smarty',)), - #'XorgLexer': ('typecode._vendor.pygments.lexers.xorg', 'Xorg', ('xorg.conf',), ('xorg.conf',), ()), - #'XsltLexer': ('typecode._vendor.pygments.lexers.html', 'XSLT', ('xslt',), ('*.xsl', '*.xslt', '*.xpl'), ('application/xsl+xml', 'application/xslt+xml')), - #'XtendLexer': ('typecode._vendor.pygments.lexers.jvm', 'Xtend', ('xtend',), ('*.xtend',), ('text/x-xtend',)), - #'XtlangLexer': ('typecode._vendor.pygments.lexers.lisp', 'xtlang', ('extempore',), ('*.xtm',), ()), - #'YamlJinjaLexer': ('typecode._vendor.pygments.lexers.templates', 'YAML+Jinja', ('yaml+jinja', 'salt', 'sls'), ('*.sls',), ('text/x-yaml+jinja', 'text/x-sls')), - #'YamlLexer': ('typecode._vendor.pygments.lexers.data', 'YAML', ('yaml',), ('*.yaml', '*.yml'), ('text/x-yaml',)), - #'YangLexer': ('typecode._vendor.pygments.lexers.yang', 'YANG', ('yang',), ('*.yang',), ('application/yang',)), - #'ZeekLexer': ('typecode._vendor.pygments.lexers.dsls', 'Zeek', ('zeek', 'bro'), ('*.zeek', '*.bro'), ()), - #'ZephirLexer': ('typecode._vendor.pygments.lexers.php', 'Zephir', ('zephir',), ('*.zep',), ()), - #'ZigLexer': ('typecode._vendor.pygments.lexers.zig', 'Zig', ('zig',), ('*.zig',), ('text/zig',)), + # 'VimLexer': ('typecode._vendor.pygments.lexers.textedit', 'VimL', ('vim',), ('*.vim', '.vimrc', '.exrc', '.gvimrc', '_vimrc', '_exrc', '_gvimrc', 'vimrc', 'gvimrc'), ('text/x-vim',)), + # 'WDiffLexer': ('typecode._vendor.pygments.lexers.diff', 'WDiff', ('wdiff',), ('*.wdiff',), ()), + # 'WebIDLLexer': ('typecode._vendor.pygments.lexers.webidl', 'Web IDL', ('webidl',), ('*.webidl',), ()), + # 'WhileyLexer': ('typecode._vendor.pygments.lexers.whiley', 'Whiley', ('whiley',), ('*.whiley',), ('text/x-whiley',)), + # 'X10Lexer': ('typecode._vendor.pygments.lexers.x10', 'X10', ('x10', 'xten'), ('*.x10',), ('text/x-x10',)), + # 'XQueryLexer': ('typecode._vendor.pygments.lexers.webmisc', 'XQuery', ('xquery', 'xqy', 'xq', 'xql', 'xqm'), ('*.xqy', '*.xquery', '*.xq', '*.xql', '*.xqm'), ('text/xquery', 'application/xquery')), + # 'XmlDjangoLexer': ('typecode._vendor.pygments.lexers.templates', 'XML+Django/Jinja', ('xml+django', 'xml+jinja'), (), ('application/xml+django', 'application/xml+jinja')), + # 'XmlErbLexer': ('typecode._vendor.pygments.lexers.templates', 'XML+Ruby', ('xml+erb', 'xml+ruby'), (), ('application/xml+ruby',)), + # 'XmlLexer': ('typecode._vendor.pygments.lexers.html', 'XML', ('xml',), ('*.xml', '*.xsl', '*.rss', '*.xslt', '*.xsd', '*.wsdl', '*.wsf'), ('text/xml', 'application/xml', 'image/svg+xml', 'application/rss+xml', 'application/atom+xml')), + # 'XmlPhpLexer': ('typecode._vendor.pygments.lexers.templates', 'XML+PHP', ('xml+php',), (), ('application/xml+php',)), + # 'XmlSmartyLexer': ('typecode._vendor.pygments.lexers.templates', 'XML+Smarty', ('xml+smarty',), (), ('application/xml+smarty',)), + # 'XorgLexer': ('typecode._vendor.pygments.lexers.xorg', 'Xorg', ('xorg.conf',), ('xorg.conf',), ()), + # 'XsltLexer': ('typecode._vendor.pygments.lexers.html', 'XSLT', ('xslt',), ('*.xsl', '*.xslt', '*.xpl'), ('application/xsl+xml', 'application/xslt+xml')), + # 'XtendLexer': ('typecode._vendor.pygments.lexers.jvm', 'Xtend', ('xtend',), ('*.xtend',), ('text/x-xtend',)), + # 'XtlangLexer': ('typecode._vendor.pygments.lexers.lisp', 'xtlang', ('extempore',), ('*.xtm',), ()), + # 'YamlJinjaLexer': ('typecode._vendor.pygments.lexers.templates', 'YAML+Jinja', ('yaml+jinja', 'salt', 'sls'), ('*.sls',), ('text/x-yaml+jinja', 'text/x-sls')), + # 'YamlLexer': ('typecode._vendor.pygments.lexers.data', 'YAML', ('yaml',), ('*.yaml', '*.yml'), ('text/x-yaml',)), + # 'YangLexer': ('typecode._vendor.pygments.lexers.yang', 'YANG', ('yang',), ('*.yang',), ('application/yang',)), + # 'ZeekLexer': ('typecode._vendor.pygments.lexers.dsls', 'Zeek', ('zeek', 'bro'), ('*.zeek', '*.bro'), ()), + # 'ZephirLexer': ('typecode._vendor.pygments.lexers.php', 'Zephir', ('zephir',), ('*.zep',), ()), + # 'ZigLexer': ('typecode._vendor.pygments.lexers.zig', 'Zig', ('zig',), ('*.zig',), ('text/zig',)), } if __name__ == '__main__': # pragma: no cover diff --git a/tests/filetype_test_utils.py b/tests/filetype_test_utils.py index e83fda6..10d3778 100644 --- a/tests/filetype_test_utils.py +++ b/tests/filetype_test_utils.py @@ -1,21 +1,10 @@ # -# Copyright (c) nexB Inc. and others. -# SPDX-License-Identifier: Apache-2.0 -# -# Visit https://aboutcode.org and https://github.com/nexB/ for support and download. +# Copyright (c) nexB Inc. and others. All rights reserved. # ScanCode is a trademark of nexB Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. +# SPDX-License-Identifier: Apache-2.0 +# See http://www.apache.org/licenses/LICENSE-2.0 for the license text. +# See https://github.com/nexB/typecode for support or download. +# See https://aboutcode.org for more information about nexB OSS projects. # from collections import OrderedDict diff --git a/tests/test_contenttype.py b/tests/test_contenttype.py index d371c7e..252e85a 100644 --- a/tests/test_contenttype.py +++ b/tests/test_contenttype.py @@ -1,21 +1,10 @@ # -# Copyright (c) nexB Inc. and others. -# SPDX-License-Identifier: Apache-2.0 -# -# Visit https://aboutcode.org and https://github.com/nexB/ for support and download. +# Copyright (c) nexB Inc. and others. All rights reserved. # ScanCode is a trademark of nexB Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. +# SPDX-License-Identifier: Apache-2.0 +# See http://www.apache.org/licenses/LICENSE-2.0 for the license text. +# See https://github.com/nexB/typecode for support or download. +# See https://aboutcode.org for more information about nexB OSS projects. # import os diff --git a/tests/test_entropy.py b/tests/test_entropy.py index 8c8e481..01fac18 100644 --- a/tests/test_entropy.py +++ b/tests/test_entropy.py @@ -1,21 +1,10 @@ # -# Copyright (c) nexB Inc. and others. -# SPDX-License-Identifier: Apache-2.0 -# -# Visit https://aboutcode.org and https://github.com/nexB/ for support and download. +# Copyright (c) nexB Inc. and others. All rights reserved. # ScanCode is a trademark of nexB Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. +# SPDX-License-Identifier: Apache-2.0 +# See http://www.apache.org/licenses/LICENSE-2.0 for the license text. +# See https://github.com/nexB/typecode for support or download. +# See https://aboutcode.org for more information about nexB OSS projects. # from functools import partial diff --git a/tests/test_extractible.py b/tests/test_extractible.py index 139d4db..2797f39 100644 --- a/tests/test_extractible.py +++ b/tests/test_extractible.py @@ -1,21 +1,10 @@ # -# Copyright (c) nexB Inc. and others. -# SPDX-License-Identifier: Apache-2.0 -# -# Visit https://aboutcode.org and https://github.com/nexB/ for support and download. +# Copyright (c) nexB Inc. and others. All rights reserved. # ScanCode is a trademark of nexB Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. +# SPDX-License-Identifier: Apache-2.0 +# See http://www.apache.org/licenses/LICENSE-2.0 for the license text. +# See https://github.com/nexB/typecode for support or download. +# See https://aboutcode.org for more information about nexB OSS projects. # import os diff --git a/tests/test_magic2.py b/tests/test_magic2.py index 5509d08..d696670 100644 --- a/tests/test_magic2.py +++ b/tests/test_magic2.py @@ -1,24 +1,14 @@ # -# Copyright (c) nexB Inc. and others. -# SPDX-License-Identifier: Apache-2.0 -# -# Visit https://aboutcode.org and https://github.com/nexB/ for support and download. +# Copyright (c) nexB Inc. and others. All rights reserved. # ScanCode is a trademark of nexB Inc. +# SPDX-License-Identifier: Apache-2.0 +# See http://www.apache.org/licenses/LICENSE-2.0 for the license text. +# See https://github.com/nexB/typecode for support or download. +# See https://aboutcode.org for more information about nexB OSS projects. # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# + from typecode.magic2 import libmagic_version def test_load_lib(): - assert libmagic_version > 0 \ No newline at end of file + assert libmagic_version > 0 diff --git a/tests/test_types.py b/tests/test_types.py index 17f1f58..6274b7e 100644 --- a/tests/test_types.py +++ b/tests/test_types.py @@ -1,21 +1,10 @@ # -# Copyright (c) nexB Inc. and others. -# SPDX-License-Identifier: Apache-2.0 -# -# Visit https://aboutcode.org and https://github.com/nexB/ for support and download. +# Copyright (c) nexB Inc. and others. All rights reserved. # ScanCode is a trademark of nexB Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. +# SPDX-License-Identifier: Apache-2.0 +# See http://www.apache.org/licenses/LICENSE-2.0 for the license text. +# See https://github.com/nexB/typecode for support or download. +# See https://aboutcode.org for more information about nexB OSS projects. # import os @@ -25,7 +14,6 @@ from filetype_test_utils import build_tests from filetype_test_utils import load_filetype_tests - test_env = FileDrivenTesting() test_env.test_data_dir = os.path.join(os.path.dirname(__file__), 'data') From 3aeb2ec68d313b75430539d9e4d2e57c53ef6998 Mon Sep 17 00:00:00 2001 From: Philippe Ombredanne Date: Mon, 31 May 2021 11:24:39 +0200 Subject: [PATCH 31/35] Update format Signed-off-by: Philippe Ombredanne --- setup.cfg | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/setup.cfg b/setup.cfg index f791084..f192f22 100644 --- a/setup.cfg +++ b/setup.cfg @@ -20,7 +20,7 @@ classifiers = Programming Language :: Python :: 3 :: Only Topic :: Software Development Topic :: Utilities -keywords = +keywords = utilities [options] @@ -43,4 +43,4 @@ testing = docs= Sphinx>=3.3.1 sphinx-rtd-theme>=0.5.0 - doc8>=0.8.1 \ No newline at end of file + doc8>=0.8.1 From 2c412e8222d4d615384a24e2ddc472b0c9703916 Mon Sep 17 00:00:00 2001 From: Philippe Ombredanne Date: Mon, 31 May 2021 11:24:57 +0200 Subject: [PATCH 32/35] Add Python 3.9 to Travis Signed-off-by: Philippe Ombredanne --- .travis.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.travis.yml b/.travis.yml index 1b52eb2..1a90a38 100644 --- a/.travis.yml +++ b/.travis.yml @@ -13,6 +13,7 @@ python: - "3.6" - "3.7" - "3.8" + - "3.9" # Scripts to run at install stage install: ./configure --dev From 69eec23792d59dbdc3a3acb1711884560cf27073 Mon Sep 17 00:00:00 2001 From: Philippe Ombredanne Date: Mon, 31 May 2021 11:27:35 +0200 Subject: [PATCH 33/35] Format and remove spurious spaces From https://github.com/nexB/typecode/pull/20 Reported-by: Pierre Tardy Signed-off-by: Philippe Ombredanne --- configure.bat | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/configure.bat b/configure.bat index 8c497ba..80d0a43 100644 --- a/configure.bat +++ b/configure.bat @@ -9,7 +9,7 @@ @rem ################################ -@rem # A configuration script to set things up: +@rem # A configuration script to set things up: @rem # create a virtualenv and install or update thirdparty packages. @rem # Source this script for initial configuration @rem # Use configure --help for details @@ -48,7 +48,7 @@ set "CFG_BIN_DIR=%CFG_ROOT_DIR%\%VIRTUALENV_DIR%\Scripts" @rem ################################ -@rem # Set the quiet flag to empty if not defined +@rem # Set the quiet flag to empty if not defined if not defined CFG_QUIET ( set "CFG_QUIET= " ) @@ -65,8 +65,8 @@ if "%1" EQU "--dev" ( set "CFG_REQUIREMENTS=%DEV_REQUIREMENTS%" set CFG_DEV_MODE=1 ) -if "%1" EQU "--python" ( - echo "The --python is now DEPRECATED. Use the PYTHON_EXECUTABLE environment +if "%1" EQU "--python"( + echo "The --python option is now DEPRECATED. Use the PYTHON_EXECUTABLE environment" echo "variable instead. Run configure --help for details." exit /b 0 ) @@ -76,7 +76,7 @@ if "%1" EQU "--python" ( @rem # Use environment variables or a file if available. @rem # Otherwise the latest Python by default. if not defined PYTHON_EXECUTABLE ( - @rem # check for a file named PYTHON_EXECUTABLE + @rem # check for a file named PYTHON_EXECUTABLE if exist ""%CFG_ROOT_DIR%\PYTHON_EXECUTABLE"" ( set /p PYTHON_EXECUTABLE=<""%CFG_ROOT_DIR%\PYTHON_EXECUTABLE"" ) else ( From 0e09ad9eb77ca0b580d71baa428955a0a56d19f1 Mon Sep 17 00:00:00 2001 From: Philippe Ombredanne Date: Mon, 31 May 2021 19:17:43 +0200 Subject: [PATCH 34/35] Bump to more modern version of setuptools_scm And remove v prefix from fallback version Signed-off-by: Philippe Ombredanne --- pyproject.toml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 8eebe91..852f0fc 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,11 +1,11 @@ [build-system] -requires = ["setuptools >= 50", "wheel", "setuptools_scm[toml] >= 4"] +requires = ["setuptools >= 50", "wheel", "setuptools_scm[toml] >= 6"] build-backend = "setuptools.build_meta" [tool.setuptools_scm] # this is used populated when creating a git archive # and when there is .git dir and/or there is no git installed -fallback_version = "v9999.$Format:%h-%cs$" +fallback_version = "9999.$Format:%h-%cs$" [tool.pytest.ini_options] norecursedirs = [ From e339a70e1a46b613fa73b9d0a9273fe7640acb8d Mon Sep 17 00:00:00 2001 From: Philippe Ombredanne Date: Mon, 31 May 2021 19:18:09 +0200 Subject: [PATCH 35/35] Add space for correct syntax Signed-off-by: Philippe Ombredanne --- configure.bat | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/configure.bat b/configure.bat index 80d0a43..c12f937 100644 --- a/configure.bat +++ b/configure.bat @@ -65,7 +65,7 @@ if "%1" EQU "--dev" ( set "CFG_REQUIREMENTS=%DEV_REQUIREMENTS%" set CFG_DEV_MODE=1 ) -if "%1" EQU "--python"( +if "%1" EQU "--python" ( echo "The --python option is now DEPRECATED. Use the PYTHON_EXECUTABLE environment" echo "variable instead. Run configure --help for details." exit /b 0

' : '\U0001d4ab', + '\\' : '\U0001d4ac', + '\\' : '\U0000211b', + '\\' : '\U0001d4ae', + '\\' : '\U0001d4af', + '\\' : '\U0001d4b0', + '\\' : '\U0001d4b1', + '\\' : '\U0001d4b2', + '\\' : '\U0001d4b3', + '\\' : '\U0001d4b4', + '\\' : '\U0001d4b5', + '\\' : '\U0001d5ba', + '\\' : '\U0001d5bb', + '\\' : '\U0001d5bc', + '\\' : '\U0001d5bd', + '\\' : '\U0001d5be', + '\\' : '\U0001d5bf', + '\\' : '\U0001d5c0', + '\\' : '\U0001d5c1', + '\\' : '\U0001d5c2', + '\\' : '\U0001d5c3', + '\\' : '\U0001d5c4', + '\\' : '\U0001d5c5', + '\\' : '\U0001d5c6', + '\\' : '\U0001d5c7', + '\\' : '\U0001d5c8', + '\\