fix build under newer KConfig by bump cmake min version
This commit is contained in:
106
3rdparty/lexilla540/lexilla/scripts/HeaderOrder.txt
vendored
Normal file
106
3rdparty/lexilla540/lexilla/scripts/HeaderOrder.txt
vendored
Normal file
@ -0,0 +1,106 @@
|
||||
// Define the standard order in which to include header files
|
||||
// All platform headers should be included before Scintilla headers
|
||||
// and each of these groups are then divided into directory groups.
|
||||
|
||||
// Base of the repository relative to this file
|
||||
|
||||
//base:..
|
||||
|
||||
// File patterns to check:
|
||||
|
||||
//source:include/*.h
|
||||
//source:src/*.cxx
|
||||
//source:lexlib/*.cxx
|
||||
//source:lexers/*.cxx
|
||||
//source:access/*.cxx
|
||||
//source:test/*.cxx
|
||||
//source:test/unit/*.cxx
|
||||
|
||||
// C standard library
|
||||
#include <stdlib.h>
|
||||
#include <string.h>
|
||||
#include <stdio.h>
|
||||
#include <stdarg.h>
|
||||
#include <assert.h>
|
||||
#include <ctype.h>
|
||||
|
||||
// C++ wrappers of C standard library
|
||||
#include <cstdlib>
|
||||
#include <cstdint>
|
||||
#include <cassert>
|
||||
#include <cstring>
|
||||
#include <cctype>
|
||||
#include <cstdio>
|
||||
#include <cstdarg>
|
||||
|
||||
// C++ standard library
|
||||
#include <utility>
|
||||
#include <string>
|
||||
#include <string_view>
|
||||
#include <vector>
|
||||
#include <map>
|
||||
#include <set>
|
||||
#include <optional>
|
||||
#include <initializer_list>
|
||||
#include <algorithm>
|
||||
#include <iterator>
|
||||
#include <functional>
|
||||
#include <memory>
|
||||
#include <regex>
|
||||
#include <iostream>
|
||||
#include <sstream>
|
||||
#include <fstream>
|
||||
#include <iomanip>
|
||||
#include <filesystem>
|
||||
|
||||
// POSIX
|
||||
#include <dlfcn.h>
|
||||
|
||||
// Windows header needed for loading DLL
|
||||
#include <windows.h>
|
||||
|
||||
// Scintilla/Lexilla headers
|
||||
|
||||
// Non-platform-specific headers
|
||||
|
||||
// Scintilla include
|
||||
|
||||
#include "Sci_Position.h"
|
||||
#include "ILexer.h"
|
||||
#include "Scintilla.h"
|
||||
|
||||
// Lexilla include
|
||||
|
||||
#include "SciLexer.h"
|
||||
#include "Lexilla.h"
|
||||
|
||||
// access
|
||||
|
||||
#include "LexillaAccess.h"
|
||||
|
||||
// lexlib
|
||||
#include "StringCopy.h"
|
||||
#include "PropSetSimple.h"
|
||||
#include "InList.h"
|
||||
#include "WordList.h"
|
||||
#include "LexAccessor.h"
|
||||
#include "Accessor.h"
|
||||
#include "StyleContext.h"
|
||||
#include "CharacterSet.h"
|
||||
#include "CharacterCategory.h"
|
||||
#include "LexerModule.h"
|
||||
#include "CatalogueModules.h"
|
||||
#include "OptionSet.h"
|
||||
#include "SparseState.h"
|
||||
#include "SubStyles.h"
|
||||
#include "DefaultLexer.h"
|
||||
#include "LexerBase.h"
|
||||
#include "LexerSimple.h"
|
||||
|
||||
// test
|
||||
|
||||
#include "TestDocument.h"
|
||||
|
||||
// Catch testing framework
|
||||
#include "catch.hpp"
|
||||
|
29
3rdparty/lexilla540/lexilla/scripts/LexFacer.py
vendored
Normal file
29
3rdparty/lexilla540/lexilla/scripts/LexFacer.py
vendored
Normal file
@ -0,0 +1,29 @@
|
||||
#!/usr/bin/env python3
|
||||
# LexFacer.py - regenerate the SciLexer.h files from the Scintilla.iface interface
|
||||
# definition file.
|
||||
# Implemented 2000 by Neil Hodgson neilh@scintilla.org
|
||||
# Requires Python 3.6 or later
|
||||
|
||||
import os, pathlib, sys
|
||||
|
||||
sys.path.append(os.path.join("..", "..", "scintilla", "scripts"))
|
||||
|
||||
import Face
|
||||
import FileGenerator
|
||||
|
||||
def printLexHFile(f):
|
||||
out = []
|
||||
for name in f.order:
|
||||
v = f.features[name]
|
||||
if v["FeatureType"] in ["val"]:
|
||||
if "SCE_" in name or "SCLEX_" in name:
|
||||
out.append("#define " + name + " " + v["Value"])
|
||||
return out
|
||||
|
||||
def RegenerateAll(root, _showMaxID):
|
||||
f = Face.Face()
|
||||
f.ReadFromFile(root / "include/LexicalStyles.iface")
|
||||
FileGenerator.Regenerate(root / "include/SciLexer.h", "/* ", printLexHFile(f))
|
||||
|
||||
if __name__ == "__main__":
|
||||
RegenerateAll(pathlib.Path(__file__).resolve().parent.parent, True)
|
338
3rdparty/lexilla540/lexilla/scripts/LexillaData.py
vendored
Normal file
338
3rdparty/lexilla540/lexilla/scripts/LexillaData.py
vendored
Normal file
@ -0,0 +1,338 @@
|
||||
#!/usr/bin/env python3
|
||||
# LexillaData.py - implemented 2013 by Neil Hodgson neilh@scintilla.org
|
||||
# Released to the public domain.
|
||||
# Requires FileGenerator from Scintilla so scintilla must be a peer directory of lexilla.
|
||||
|
||||
"""
|
||||
Common code used by Lexilla and SciTE for source file regeneration.
|
||||
"""
|
||||
|
||||
# The LexillaData object exposes information about Lexilla as properties:
|
||||
# Version properties
|
||||
# version
|
||||
# versionDotted
|
||||
# versionCommad
|
||||
#
|
||||
# Date last modified
|
||||
# dateModified
|
||||
# yearModified
|
||||
# mdyModified
|
||||
# dmyModified
|
||||
# myModified
|
||||
#
|
||||
# Information about lexers and properties defined in lexers
|
||||
# lexFiles
|
||||
# sorted list of lexer file stems like LexAbaqus
|
||||
# lexerModules
|
||||
# sorted list of module names like lmAbaqus
|
||||
# lexerProperties
|
||||
# sorted list of lexer properties like lexer.bash.command.substitution
|
||||
# propertyDocuments
|
||||
# dictionary of property documentation { name: document string }
|
||||
# like lexer.bash.special.parameter: Set shell (default is Bash) special parameters.
|
||||
# sclexFromName
|
||||
# dictionary of SCLEX_* IDs { name: SCLEX_ID } like ave: SCLEX_AVE
|
||||
# fileFromSclex
|
||||
# dictionary of file names { SCLEX_ID: file name } like SCLEX_AU3: LexAU3.cxx
|
||||
# lexersXcode
|
||||
# dictionary of project file UUIDs { file name: [build UUID, file UUID] }
|
||||
# like LexTCL: [28BA733B24E34D9700272C2D,28BA72C924E34D9100272C2D]
|
||||
# credits
|
||||
# list of names of contributors like Atsuo Ishimoto
|
||||
|
||||
# This file can be run to see the data it provides.
|
||||
# Requires Python 3.6 or later
|
||||
|
||||
import datetime, pathlib, sys, textwrap
|
||||
|
||||
neutralEncoding = "iso-8859-1" # Each byte value is valid in iso-8859-1
|
||||
|
||||
def ReadFileAsList(path):
|
||||
"""Read all the lnes in the file and return as a list of strings without line ends.
|
||||
"""
|
||||
with path.open(encoding="utf-8") as f:
|
||||
return [line.rstrip('\n') for line in f]
|
||||
|
||||
def FindModules(lexFile):
|
||||
""" Return a list of modules found within a lexer implementation file. """
|
||||
modules = []
|
||||
partLine = ""
|
||||
with lexFile.open(encoding=neutralEncoding) as f:
|
||||
lineNum = 0
|
||||
for line in f.readlines():
|
||||
lineNum += 1
|
||||
line = line.rstrip()
|
||||
if partLine or line.startswith("extern const LexerModule"):
|
||||
if ")" in line:
|
||||
line = partLine + line
|
||||
original = line
|
||||
line = line.replace("(", " ")
|
||||
line = line.replace(")", " ")
|
||||
line = line.replace(",", " ")
|
||||
parts = line.split()[2:]
|
||||
lexerName = parts[4]
|
||||
if not (lexerName.startswith('"') and lexerName.endswith('"')):
|
||||
print(f"{lexFile}:{lineNum}: Bad LexerModule statement:\n{original}")
|
||||
sys.exit(1)
|
||||
lexerName = lexerName.strip('"')
|
||||
modules.append([parts[1], parts[2], lexerName])
|
||||
partLine = ""
|
||||
else:
|
||||
partLine = partLine + line
|
||||
return modules
|
||||
|
||||
def FindSectionInList(lines, markers):
|
||||
"""Find a section defined by an initial start marker, an optional secondary
|
||||
marker and an end marker.
|
||||
The section is between the secondary/initial start and the end.
|
||||
Report as a slice object so the section can be extracted or replaced.
|
||||
Raises an exception if the markers can't be found.
|
||||
Currently only used for Xcode project files.
|
||||
"""
|
||||
start = -1
|
||||
end = -1
|
||||
state = 0
|
||||
for i, line in enumerate(lines):
|
||||
if markers[0] in line:
|
||||
if markers[1]:
|
||||
state = 1
|
||||
else:
|
||||
start = i+1
|
||||
state = 2
|
||||
elif state == 1:
|
||||
if markers[1] in line:
|
||||
start = i+1
|
||||
state = 2
|
||||
elif state == 2:
|
||||
if markers[2] in line:
|
||||
end = i
|
||||
state = 3
|
||||
# Check that section was found
|
||||
if start == -1:
|
||||
raise ValueError("Could not find start marker(s) |" + markers[0] + "|" + markers[1] + "|")
|
||||
if end == -1:
|
||||
raise ValueError("Could not find end marker " + markers[2])
|
||||
return slice(start, end)
|
||||
|
||||
def FindLexersInXcode(xCodeProject):
|
||||
""" Return a dictionary { file name: [build UUID, file UUID] } of lexers in Xcode project. """
|
||||
lines = ReadFileAsList(xCodeProject)
|
||||
|
||||
# PBXBuildFile section is a list of all buildable files in the project so extract the file
|
||||
# basename and its build and file IDs
|
||||
uidsOfBuild = {}
|
||||
markersPBXBuildFile = ["Begin PBXBuildFile section", "", "End PBXBuildFile section"]
|
||||
for buildLine in lines[FindSectionInList(lines, markersPBXBuildFile)]:
|
||||
# Occurs for each file in the build. Find the UIDs used for the file.
|
||||
#\t\t[0-9A-F]+ /* [a-zA-Z]+.cxx in sources */ = {isa = PBXBuildFile; fileRef = [0-9A-F]+ /* [a-zA-Z]+ */; };
|
||||
pieces = buildLine.split()
|
||||
uid1 = pieces[0]
|
||||
filename = pieces[2].split(".")[0]
|
||||
uid2 = pieces[12]
|
||||
uidsOfBuild[filename] = [uid1, uid2]
|
||||
|
||||
# PBXGroup section contains the folders (Lexilla, Lexers, LexLib, ...) so is used to find the lexers
|
||||
lexers = {}
|
||||
markersLexers = ["/* Lexers */ =", "children", ");"]
|
||||
for lexerLine in lines[FindSectionInList(lines, markersLexers)]:
|
||||
#\t\t\t\t[0-9A-F]+ /* [a-zA-Z]+.cxx */,
|
||||
uid, _, rest = lexerLine.partition("/* ")
|
||||
uid = uid.strip()
|
||||
lexer, _, _ = rest.partition(".")
|
||||
lexers[lexer] = uidsOfBuild[lexer]
|
||||
|
||||
return lexers
|
||||
|
||||
# Properties that start with lexer. or fold. are automatically found but there are some
|
||||
# older properties that don't follow this pattern so must be explicitly listed.
|
||||
knownIrregularProperties = [
|
||||
"fold",
|
||||
"styling.within.preprocessor",
|
||||
"tab.timmy.whinge.level",
|
||||
"asp.default.language",
|
||||
"html.tags.case.sensitive",
|
||||
"ps.level",
|
||||
"ps.tokenize",
|
||||
"sql.backslash.escapes",
|
||||
"nsis.uservars",
|
||||
"nsis.ignorecase"
|
||||
]
|
||||
|
||||
def FindProperties(lexFile):
|
||||
""" Return a set of property names in a lexer implementation file. """
|
||||
properties = set()
|
||||
with open(lexFile, encoding=neutralEncoding) as f:
|
||||
for s in f.readlines():
|
||||
if ("GetProperty" in s or "DefineProperty" in s) and "\"" in s:
|
||||
s = s.strip()
|
||||
if not s.startswith("//"): # Drop comments
|
||||
propertyName = s.split("\"")[1]
|
||||
if propertyName.lower() == propertyName:
|
||||
# Only allow lower case property names
|
||||
if propertyName in knownIrregularProperties or \
|
||||
propertyName.startswith("fold.") or \
|
||||
propertyName.startswith("lexer."):
|
||||
properties.add(propertyName)
|
||||
return properties
|
||||
|
||||
def FindPropertyDocumentation(lexFile):
|
||||
""" Return a dictionary { name: document string } of property documentation in a lexer. """
|
||||
documents = {}
|
||||
with lexFile.open(encoding=neutralEncoding) as f:
|
||||
name = ""
|
||||
for line in f.readlines():
|
||||
line = line.strip()
|
||||
if "// property " in line:
|
||||
propertyName = line.split()[2]
|
||||
if propertyName.lower() == propertyName:
|
||||
# Only allow lower case property names
|
||||
name = propertyName
|
||||
documents[name] = ""
|
||||
elif "DefineProperty" in line and "\"" in line:
|
||||
propertyName = line.split("\"")[1]
|
||||
if propertyName.lower() == propertyName:
|
||||
# Only allow lower case property names
|
||||
name = propertyName
|
||||
documents[name] = ""
|
||||
elif name:
|
||||
if line.startswith("//"):
|
||||
if documents[name]:
|
||||
documents[name] += " "
|
||||
documents[name] += line[2:].strip()
|
||||
elif line.startswith("\""):
|
||||
line = line[1:].strip()
|
||||
if line.endswith(";"):
|
||||
line = line[:-1].strip()
|
||||
if line.endswith(")"):
|
||||
line = line[:-1].strip()
|
||||
if line.endswith("\""):
|
||||
line = line[:-1]
|
||||
# Fix escaped double quotes
|
||||
line = line.replace("\\\"", "\"")
|
||||
documents[name] += line
|
||||
else:
|
||||
name = ""
|
||||
for name in list(documents.keys()):
|
||||
if documents[name] == "":
|
||||
del documents[name]
|
||||
return documents
|
||||
|
||||
def FindCredits(historyFile):
|
||||
""" Return a list of contributors in a history file. """
|
||||
creditList = []
|
||||
stage = 0
|
||||
with historyFile.open(encoding="utf-8") as f:
|
||||
for line in f.readlines():
|
||||
line = line.strip()
|
||||
if stage == 0 and line == "<table>":
|
||||
stage = 1
|
||||
elif stage == 1 and line == "</table>":
|
||||
stage = 2
|
||||
if stage == 1 and line.startswith("<td>"):
|
||||
credit = line[4:-5]
|
||||
if "<a" in line:
|
||||
title, dummy, rest = credit.partition("<a href=")
|
||||
urlplus, _bracket, end = rest.partition(">")
|
||||
name = end.split("<")[0]
|
||||
url = urlplus[1:-1]
|
||||
credit = title.strip()
|
||||
if credit:
|
||||
credit += " "
|
||||
credit += name + " " + url
|
||||
creditList.append(credit)
|
||||
return creditList
|
||||
|
||||
def ciKey(a):
|
||||
""" Return a string lowered to be used when sorting. """
|
||||
return str(a).lower()
|
||||
|
||||
def SortListInsensitive(l):
|
||||
""" Sort a list of strings case insensitively. """
|
||||
l.sort(key=ciKey)
|
||||
|
||||
class LexillaData:
|
||||
""" Expose information about Lexilla as properties. """
|
||||
|
||||
def __init__(self, scintillaRoot):
|
||||
# Discover version information
|
||||
self.version = (scintillaRoot / "version.txt").read_text().strip()
|
||||
self.versionDotted = self.version[0:-2] + '.' + self.version[-2] + '.' + \
|
||||
self.version[-1]
|
||||
self.versionCommad = self.versionDotted.replace(".", ", ") + ', 0'
|
||||
|
||||
with (scintillaRoot / "doc" / "Lexilla.html").open() as f:
|
||||
self.dateModified = [d for d in f.readlines() if "Date.Modified" in d]\
|
||||
[0].split('\"')[3]
|
||||
# 20130602
|
||||
# Lexilla.html
|
||||
dtModified = datetime.datetime.strptime(self.dateModified, "%Y%m%d")
|
||||
self.yearModified = self.dateModified[0:4]
|
||||
monthModified = dtModified.strftime("%B")
|
||||
dayModified = f"{dtModified.day}"
|
||||
self.mdyModified = monthModified + " " + dayModified + " " + self.yearModified
|
||||
# May 22 2013
|
||||
# Lexilla.html, SciTE.html
|
||||
self.dmyModified = dayModified + " " + monthModified + " " + self.yearModified
|
||||
# 22 May 2013
|
||||
# LexillaHistory.html -- only first should change
|
||||
self.myModified = monthModified + " " + self.yearModified
|
||||
|
||||
# Find all the lexer source code files
|
||||
lexFilePaths = list((scintillaRoot / "lexers").glob("Lex*.cxx"))
|
||||
SortListInsensitive(lexFilePaths)
|
||||
self.lexFiles = [f.stem for f in lexFilePaths]
|
||||
self.lexerModules = []
|
||||
lexerProperties = set()
|
||||
self.propertyDocuments = {}
|
||||
self.sclexFromName = {}
|
||||
self.fileFromSclex = {}
|
||||
for lexFile in lexFilePaths:
|
||||
modules = FindModules(lexFile)
|
||||
for module in modules:
|
||||
self.sclexFromName[module[2]] = module[1]
|
||||
self.fileFromSclex[module[1]] = lexFile
|
||||
self.lexerModules.append(module[0])
|
||||
for prop in FindProperties(lexFile):
|
||||
lexerProperties.add(prop)
|
||||
documents = FindPropertyDocumentation(lexFile)
|
||||
for prop, doc in documents.items():
|
||||
if prop not in self.propertyDocuments:
|
||||
self.propertyDocuments[prop] = doc
|
||||
SortListInsensitive(self.lexerModules)
|
||||
self.lexerProperties = list(lexerProperties)
|
||||
SortListInsensitive(self.lexerProperties)
|
||||
|
||||
self.lexersXcode = FindLexersInXcode(scintillaRoot /
|
||||
"src/Lexilla/Lexilla.xcodeproj/project.pbxproj")
|
||||
self.credits = FindCredits(scintillaRoot / "doc" / "LexillaHistory.html")
|
||||
|
||||
def printWrapped(text):
|
||||
""" Print string wrapped with subsequent lines indented. """
|
||||
print(textwrap.fill(text, subsequent_indent=" "))
|
||||
|
||||
if __name__=="__main__":
|
||||
sci = LexillaData(pathlib.Path(__file__).resolve().parent.parent)
|
||||
print(f"Version {sci.version} {sci.versionDotted} {sci.versionCommad}")
|
||||
print(f"Date last modified {sci.dateModified} {sci.yearModified} {sci.mdyModified}"
|
||||
f" {sci.dmyModified} {sci.myModified}")
|
||||
printWrapped(str(len(sci.lexFiles)) + " lexer files: " + ", ".join(sci.lexFiles))
|
||||
printWrapped(str(len(sci.lexerModules)) + " lexer modules: " + ", ".join(sci.lexerModules))
|
||||
#~ printWrapped(str(len(sci.lexersXcode)) + " Xcode lexer references: " + ", ".join(
|
||||
#~ [lex+":"+uids[0]+","+uids[1] for lex, uids in sci.lexersXcode.items()]))
|
||||
print("Lexer name to ID:")
|
||||
lexNames = sorted(sci.sclexFromName.keys())
|
||||
for lexName in lexNames:
|
||||
sclex = sci.sclexFromName[lexName]
|
||||
fileName = sci.fileFromSclex[sclex].name
|
||||
print(" " + lexName + " -> " + sclex + " in " + fileName)
|
||||
printWrapped("Lexer properties: " + ", ".join(sci.lexerProperties))
|
||||
print("Lexer property documentation:")
|
||||
documentProperties = list(sci.propertyDocuments.keys())
|
||||
SortListInsensitive(documentProperties)
|
||||
for k in documentProperties:
|
||||
print(" " + k)
|
||||
print(textwrap.fill(sci.propertyDocuments[k], initial_indent=" ",
|
||||
subsequent_indent=" "))
|
||||
print("Credits:")
|
||||
for c in sci.credits:
|
||||
sys.stdout.buffer.write(b" " + c.encode("utf-8") + b"\n")
|
165
3rdparty/lexilla540/lexilla/scripts/LexillaGen.py
vendored
Normal file
165
3rdparty/lexilla540/lexilla/scripts/LexillaGen.py
vendored
Normal file
@ -0,0 +1,165 @@
|
||||
#!/usr/bin/env python3
|
||||
# LexillaGen.py - implemented 2019 by Neil Hodgson neilh@scintilla.org
|
||||
# Released to the public domain.
|
||||
|
||||
"""
|
||||
Regenerate the Lexilla source files that list all the lexers.
|
||||
"""
|
||||
|
||||
# Should be run whenever a new lexer is added or removed.
|
||||
# Requires Python 3.6 or later
|
||||
# Files are regenerated in place with templates stored in comments.
|
||||
# The format of generation comments is documented in FileGenerator.py.
|
||||
|
||||
import os, pathlib, sys, uuid
|
||||
|
||||
thisPath = pathlib.Path(__file__).resolve()
|
||||
|
||||
sys.path.append(str(thisPath.parent.parent.parent / "scintilla" / "scripts"))
|
||||
|
||||
from FileGenerator import Regenerate, UpdateLineInFile, \
|
||||
ReplaceREInFile, UpdateLineInPlistFile, UpdateFileFromLines
|
||||
import LexillaData
|
||||
import LexFacer
|
||||
|
||||
sys.path.append(str(thisPath.parent.parent / "src"))
|
||||
import DepGen
|
||||
|
||||
# RegenerateXcodeProject and associated functions are copied from scintilla/scripts/LexGen.py
|
||||
|
||||
def uid24():
|
||||
""" Last 24 digits of UUID, used for item IDs in Xcode. """
|
||||
return str(uuid.uuid4()).replace("-", "").upper()[-24:]
|
||||
|
||||
def ciLexerKey(a):
|
||||
""" Return 3rd element of string lowered to be used when sorting. """
|
||||
return a.split()[2].lower()
|
||||
|
||||
|
||||
"""
|
||||
11F35FDB12AEFAF100F0236D /* LexA68k.cxx in Sources */ = {isa = PBXBuildFile; fileRef = 11F35FDA12AEFAF100F0236D /* LexA68k.cxx */; };
|
||||
11F35FDA12AEFAF100F0236D /* LexA68k.cxx */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.cpp; name = LexA68k.cxx; path = ../../lexers/LexA68k.cxx; sourceTree = SOURCE_ROOT; };
|
||||
11F35FDA12AEFAF100F0236D /* LexA68k.cxx */,
|
||||
11F35FDB12AEFAF100F0236D /* LexA68k.cxx in Sources */,
|
||||
"""
|
||||
def RegenerateXcodeProject(path, lexers, lexerReferences):
|
||||
""" Regenerate project to include any new lexers. """
|
||||
# Build 4 blocks for insertion:
|
||||
# Each markers contains a unique section start, an optional wait string, and a section end
|
||||
|
||||
markersPBXBuildFile = ["Begin PBXBuildFile section", "", "End PBXBuildFile section"]
|
||||
sectionPBXBuildFile = []
|
||||
|
||||
markersPBXFileReference = ["Begin PBXFileReference section", "", "End PBXFileReference section"]
|
||||
sectionPBXFileReference = []
|
||||
|
||||
markersLexers = ["/* Lexers */ =", "children", ");"]
|
||||
sectionLexers = []
|
||||
|
||||
markersPBXSourcesBuildPhase = ["Begin PBXSourcesBuildPhase section", "files", ");"]
|
||||
sectionPBXSourcesBuildPhase = []
|
||||
|
||||
for lexer in lexers:
|
||||
if lexer not in lexerReferences:
|
||||
uid1 = uid24()
|
||||
uid2 = uid24()
|
||||
print("Lexer", lexer, "is not in Xcode project. Use IDs", uid1, uid2)
|
||||
lexerReferences[lexer] = [uid1, uid2]
|
||||
linePBXBuildFile = f"\t\t{uid1} /* {lexer}.cxx in Sources */ = {{isa = PBXBuildFile; fileRef = {uid2} /* {lexer}.cxx */; }};"
|
||||
linePBXFileReference = f"\t\t{uid2} /* {lexer}.cxx */ = {{isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.cpp; name = {lexer}.cxx; path = ../../lexers/{lexer}.cxx; sourceTree = SOURCE_ROOT; }};"
|
||||
lineLexers = f"\t\t\t\t{uid2} /* {lexer}.cxx */,"
|
||||
linePBXSourcesBuildPhase = f"\t\t\t\t{uid1} /* {lexer}.cxx in Sources */,"
|
||||
sectionPBXBuildFile.append(linePBXBuildFile)
|
||||
sectionPBXFileReference.append(linePBXFileReference)
|
||||
sectionLexers.append(lineLexers)
|
||||
sectionPBXSourcesBuildPhase.append(linePBXSourcesBuildPhase)
|
||||
|
||||
lines = LexillaData.ReadFileAsList(path)
|
||||
|
||||
sli = LexillaData.FindSectionInList(lines, markersPBXBuildFile)
|
||||
lines[sli.stop:sli.stop] = sectionPBXBuildFile
|
||||
|
||||
sli = LexillaData.FindSectionInList(lines, markersPBXFileReference)
|
||||
lines[sli.stop:sli.stop] = sectionPBXFileReference
|
||||
|
||||
sli = LexillaData.FindSectionInList(lines, markersLexers)
|
||||
# This section is shown in the project outline so sort it to make it easier to navigate.
|
||||
allLexers = sorted(lines[sli.start:sli.stop] + sectionLexers, key=ciLexerKey)
|
||||
lines[sli] = allLexers
|
||||
|
||||
sli = LexillaData.FindSectionInList(lines, markersPBXSourcesBuildPhase)
|
||||
lines[sli.stop:sli.stop] = sectionPBXSourcesBuildPhase
|
||||
|
||||
UpdateFileFromLines(path, lines, os.linesep)
|
||||
|
||||
def RegenerateAll(rootDirectory):
|
||||
""" Regenerate all the files. """
|
||||
|
||||
root = pathlib.Path(rootDirectory)
|
||||
|
||||
lexillaBase = root.resolve()
|
||||
|
||||
lex = LexillaData.LexillaData(lexillaBase)
|
||||
|
||||
lexillaDir = lexillaBase
|
||||
srcDir = lexillaDir / "src"
|
||||
docDir = lexillaDir / "doc"
|
||||
|
||||
Regenerate(srcDir / "Lexilla.cxx", "//", lex.lexerModules)
|
||||
Regenerate(srcDir / "lexilla.mak", "#", lex.lexFiles)
|
||||
|
||||
# Discover version information
|
||||
version = (lexillaDir / "version.txt").read_text().strip()
|
||||
versionDotted = version[0:-2] + '.' + version[-2] + '.' + version[-1]
|
||||
versionCommad = versionDotted.replace(".", ", ") + ', 0'
|
||||
|
||||
rcPath = srcDir / "LexillaVersion.rc"
|
||||
UpdateLineInFile(rcPath, "#define VERSION_LEXILLA",
|
||||
"#define VERSION_LEXILLA \"" + versionDotted + "\"")
|
||||
UpdateLineInFile(rcPath, "#define VERSION_WORDS",
|
||||
"#define VERSION_WORDS " + versionCommad)
|
||||
UpdateLineInFile(docDir / "LexillaDownload.html", " Release",
|
||||
" Release " + versionDotted)
|
||||
ReplaceREInFile(docDir / "LexillaDownload.html",
|
||||
r"/www.scintilla.org/([a-zA-Z]+)\d{3,5}",
|
||||
r"/www.scintilla.org/\g<1>" + version,
|
||||
0)
|
||||
|
||||
pathMain = lexillaDir / "doc" / "Lexilla.html"
|
||||
UpdateLineInFile(pathMain,
|
||||
' <font color="#FFCC99" size="3">Release version',
|
||||
' <font color="#FFCC99" size="3">Release version ' + \
|
||||
versionDotted + '<br />')
|
||||
UpdateLineInFile(pathMain,
|
||||
' Site last modified',
|
||||
' Site last modified ' + lex.mdyModified + '</font>')
|
||||
UpdateLineInFile(pathMain,
|
||||
' <meta name="Date.Modified"',
|
||||
' <meta name="Date.Modified" content="' + lex.dateModified + '" />')
|
||||
UpdateLineInFile(lexillaDir / "doc" / "LexillaHistory.html",
|
||||
' Released ',
|
||||
' Released ' + lex.dmyModified + '.')
|
||||
|
||||
lexillaXcode = lexillaDir / "src" / "Lexilla"
|
||||
lexillaXcodeProject = lexillaXcode / "Lexilla.xcodeproj" / "project.pbxproj"
|
||||
|
||||
lexerReferences = LexillaData.FindLexersInXcode(lexillaXcodeProject)
|
||||
|
||||
UpdateLineInPlistFile(lexillaXcode / "Info.plist",
|
||||
"CFBundleShortVersionString", versionDotted)
|
||||
|
||||
ReplaceREInFile(lexillaXcodeProject, "CURRENT_PROJECT_VERSION = [0-9.]+;",
|
||||
f'CURRENT_PROJECT_VERSION = {versionDotted};',
|
||||
0)
|
||||
|
||||
RegenerateXcodeProject(lexillaXcodeProject, lex.lexFiles, lexerReferences)
|
||||
|
||||
LexFacer.RegenerateAll(root, False)
|
||||
|
||||
currentDirectory = pathlib.Path.cwd()
|
||||
os.chdir(srcDir)
|
||||
DepGen.Generate()
|
||||
os.chdir(currentDirectory)
|
||||
|
||||
if __name__=="__main__":
|
||||
RegenerateAll(pathlib.Path(__file__).resolve().parent.parent)
|
75
3rdparty/lexilla540/lexilla/scripts/LexillaLogo.py
vendored
Normal file
75
3rdparty/lexilla540/lexilla/scripts/LexillaLogo.py
vendored
Normal file
@ -0,0 +1,75 @@
|
||||
# LexillaLogo.py
|
||||
# Requires Python 3.6.
|
||||
# Requires Pillow https://python-pillow.org/, tested with 7.2.0 on Windows 10
|
||||
|
||||
import random
|
||||
from PIL import Image, ImageDraw, ImageFont
|
||||
|
||||
colours = [
|
||||
(136,0,21,255),
|
||||
(237,28,36,255),
|
||||
(255,127,39,255),
|
||||
(255,201,14,255),
|
||||
(185,122,87,255),
|
||||
(255,174,201,255),
|
||||
(181,230,29,255),
|
||||
(34,177,76,255),
|
||||
(153,217,234,255),
|
||||
(0,162,232,255),
|
||||
(112,146,190,255),
|
||||
(63,72,204,255),
|
||||
(200,191,231,255),
|
||||
]
|
||||
|
||||
width = 1280
|
||||
height = 150
|
||||
|
||||
def drawLines(dr):
|
||||
for y in range(0,height, 2):
|
||||
x = 0
|
||||
while x < width:
|
||||
#lexeme = random.randint(2, 20)
|
||||
lexeme = int(random.expovariate(0.3))
|
||||
colour = random.choice(colours)
|
||||
strokeRectangle = (x, y, x+lexeme, y)
|
||||
dr.rectangle(strokeRectangle, fill=colour)
|
||||
x += lexeme + 3
|
||||
|
||||
def drawGuide(dr):
|
||||
for y in range(0,height, 2):
|
||||
x = 0
|
||||
while x < width:
|
||||
lexeme = int(random.expovariate(0.3))
|
||||
colour = (0x30, 0x30, 0x30)
|
||||
strokeRectangle = (x, y, x+lexeme, y)
|
||||
dr.rectangle(strokeRectangle, fill=colour)
|
||||
x += lexeme + 3
|
||||
|
||||
def drawLogo():
|
||||
# Ensure same image each time
|
||||
random.seed(1)
|
||||
|
||||
# Georgia bold italic
|
||||
font = ImageFont.truetype(font="georgiaz.ttf", size=190)
|
||||
|
||||
imageMask = Image.new("L", (width, height), color=(0xff))
|
||||
drMask = ImageDraw.Draw(imageMask)
|
||||
drMask.text((30, -29), "Lexilla", font=font, fill=(0))
|
||||
|
||||
imageBack = Image.new("RGB", (width, height), color=(0,0,0))
|
||||
drBack = ImageDraw.Draw(imageBack)
|
||||
drawGuide(drBack)
|
||||
|
||||
imageLines = Image.new("RGB", (width, height), color=(0,0,0))
|
||||
dr = ImageDraw.Draw(imageLines)
|
||||
drawLines(dr)
|
||||
|
||||
imageOut = Image.composite(imageBack, imageLines, imageMask)
|
||||
|
||||
imageOut.save("../doc/LexillaLogo.png", "png")
|
||||
|
||||
imageDoubled = imageOut.resize((width*2, height * 2), Image.NEAREST)
|
||||
|
||||
imageDoubled.save("../doc/LexillaLogo2x.png", "png")
|
||||
|
||||
drawLogo()
|
23
3rdparty/lexilla540/lexilla/scripts/PromoteNew.bat
vendored
Normal file
23
3rdparty/lexilla540/lexilla/scripts/PromoteNew.bat
vendored
Normal file
@ -0,0 +1,23 @@
|
||||
@echo off
|
||||
rem Promote new result files.
|
||||
rem Find all the *.new files under test\examples and copy them to their expected name without ".new".
|
||||
rem Run after RunTest.bat if ".new" result files are correct.
|
||||
pushd ..\test\examples
|
||||
for /R %%f in (*.new) do (call :moveFile %%f)
|
||||
popd
|
||||
goto :eof
|
||||
|
||||
:moveFile
|
||||
set pathWithNew=%1
|
||||
set directory=%~dp1
|
||||
set fileWithNew=%~nx1
|
||||
set fileNoNew=%~n1
|
||||
set pathNoNew=%pathWithNew:~0,-4%
|
||||
|
||||
if exist %pathNoNew% (
|
||||
echo Move %fileWithNew% to %fileNoNew% in %directory%
|
||||
) else (
|
||||
echo New %fileWithNew% to %fileNoNew% in %directory%
|
||||
)
|
||||
move %pathWithNew% %pathNoNew%
|
||||
goto :eof
|
7
3rdparty/lexilla540/lexilla/scripts/RunTest.bat
vendored
Normal file
7
3rdparty/lexilla540/lexilla/scripts/RunTest.bat
vendored
Normal file
@ -0,0 +1,7 @@
|
||||
rem Test lexers
|
||||
rem build lexilla.dll and TestLexers.exe then run TestLexers.exe
|
||||
cd ../src
|
||||
make --jobs=%NUMBER_OF_PROCESSORS% DEBUG=1
|
||||
cd ../test
|
||||
make DEBUG=1
|
||||
make test
|
12
3rdparty/lexilla540/lexilla/scripts/RunTest.sh
vendored
Normal file
12
3rdparty/lexilla540/lexilla/scripts/RunTest.sh
vendored
Normal file
@ -0,0 +1,12 @@
|
||||
# Test lexers
|
||||
# build lexilla.so and TestLexers then run TestLexers
|
||||
JOBS="--jobs=$(getconf _NPROCESSORS_ONLN)"
|
||||
(
|
||||
cd ../src
|
||||
make "$JOBS" DEBUG=1
|
||||
)
|
||||
(
|
||||
cd ../test
|
||||
make DEBUG=1
|
||||
make test
|
||||
)
|
Reference in New Issue
Block a user