diff --git a/.editorconfig b/.editorconfig new file mode 100644 index 0000000000000000000000000000000000000000..a3b7406700f4c49c8f9829c7e3ced5cecbf954da --- /dev/null +++ b/.editorconfig @@ -0,0 +1,7 @@ +[**] +insert_final_newline = true + +[**.{asn1,asn,xsd,xml}] +indent_style = space +indent_size = 4 +trim_trailing_whitespace = true \ No newline at end of file diff --git a/.gitignore b/.gitignore index 3e3760376b450b5a9b9e6c9c8ff79d2305903cc0..58d77c67cce7380ed734c531d7302ce8827eceef 100644 --- a/.gitignore +++ b/.gitignore @@ -1,3 +1,6 @@ +# Dockerfiles +dockerfile_* + # Editors .vscode/ .idea/ diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index 7c98eee47f69c446b7964cd60796dc33eea69267..b4e3df33f5089456fc4d64b9dcd03d40ab38864c 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -1,37 +1,17 @@ -image: "mcanterb/forge-cicd:latest" - -before_script: - - python3 --version - stages: - - Syntax - - CompileAndLint - - Merge + - check -checkXSD: - stage: Syntax +process_asn: + image: "mcanterb/asn1test:latest" + stage: check + interruptible: true script: - - python3 testing/check_xsd.py + - python3 testing/asn_process.py -parseASN1: - stage: Syntax +process_xsd: + image: "mcanterb/xsdtest:latest" + stage: check + interruptible: true script: - - python3 testing/parse_asn1.py - -compileASN1: - stage: CompileAndLint - script: - - python3 testing/compile_asn1.py - -lintASN1: - stage: CompileAndLint - script: - - python3 testing/lint_asn1.py - allow_failure: true - -MergeTest: - stage: Merge - script: - - python3 testing/merge_test.py - + - python3 testing/xsd_process.py diff --git a/testing/asn_compile_targets.json b/testing/asn_compile_targets.json new file mode 100644 index 0000000000000000000000000000000000000000..1beceb2f126c0a2461e0ef7746feec1af1d5daa5 --- /dev/null +++ b/testing/asn_compile_targets.json @@ -0,0 +1,7 @@ +[ + ["./33128/r15/TS33128Payloads.asn"], + ["./33128/r16/TS33128Payloads.asn"], + ["./33128/r16/TS33128IdentityAssociation.asn"], + ["./33128/r17/TS33128Payloads.asn"], + ["./33128/r17/TS33128IdentityAssociation.asn"] +] \ No newline at end of file diff --git a/testing/asn_ignore.txt b/testing/asn_ignore.txt new file mode 100644 index 0000000000000000000000000000000000000000..70aa0c5b7faba31cfa3e78e61bd66806e1df3474 --- /dev/null +++ b/testing/asn_ignore.txt @@ -0,0 +1 @@ +33108 \ No newline at end of file diff --git a/testing/asn_ignore_lint.txt b/testing/asn_ignore_lint.txt new file mode 100644 index 0000000000000000000000000000000000000000..03e7f54e7937f1cf45bf259cef700a7624bf6912 --- /dev/null +++ b/testing/asn_ignore_lint.txt @@ -0,0 +1 @@ +dependencies \ No newline at end of file diff --git a/testing/asn_lint_exceptions.json b/testing/asn_lint_exceptions.json new file mode 100644 index 0000000000000000000000000000000000000000..bbdf361567816ff194efd8e530a40264400da069 --- /dev/null +++ b/testing/asn_lint_exceptions.json @@ -0,0 +1,22 @@ +{ + "33128/r15/TS33128Payloads.asn" : [ + "Enumerations for UDMServingSystemMethod start at 0, not 1", + "Field 'aNNodeID' in GlobalRANNodeID is an anonymous CHOICE" + ], + "33128/r16/TS33128Payloads.asn" : [ + "Enumerations for EstablishmentStatus start at 0, not 1", + "Enumerations for RequestIndication start at 0, not 1", + "Enumerations for UDMServingSystemMethod start at 0, not 1", + "Enumerations for MMSDirection start at 0, not 1", + "Enumerations for MMSReplyCharging start at 0, not 1", + "Enumerations for MMStatusExtension start at 0, not 1" + ], + "33128/r17/TS33128Payloads.asn" : [ + "Enumerations for EstablishmentStatus start at 0, not 1", + "Enumerations for RequestIndication start at 0, not 1", + "Enumerations for UDMServingSystemMethod start at 0, not 1", + "Enumerations for MMSDirection start at 0, not 1", + "Enumerations for MMSReplyCharging start at 0, not 1", + "Enumerations for MMStatusExtension start at 0, not 1" + ] +} \ No newline at end of file diff --git a/testing/asn_process.py b/testing/asn_process.py new file mode 100644 index 0000000000000000000000000000000000000000..49deb16100ca83550c74d973fdd8c1b92de58c06 --- /dev/null +++ b/testing/asn_process.py @@ -0,0 +1,183 @@ +import logging +import json +from pathlib import Path +from subprocess import run + +from pycrate_asn1c.asnproc import * + +import lint_asn1 + + +def syntaxCheckASN (fileList): + """ + Performs ASN syntax checking on a list of filenames (or pathlib Paths) + + :param fileList: List of filenames (str or Pathlib Path) + :returns: Dict with result, return code and message for each filename + + Calls the open-source asn1c compiler with the "syntax only" option. + As a result, asn1c must be available to run. + """ + results = {} + for file in fileList: + try: + p = run(['asn1c', '-E', str(file)], capture_output=True) + if (p.returncode != 0): + results[str(file)] = { + 'ok' : False, + 'code' : p.returncode, + 'message' : p.stderr.decode().splitlines()[0] + } + else: + results[str(file)] = { + 'ok' : True + } + except Exception as ex: + results[str(file)] = { + 'ok' : False, + 'code' : -1, + 'message' : f"{ex!r}" + } + return results + + + +def compileAllTargets (compileTargets): + """ + Attempts to compile a set of compile targets using the pycrate ASN1 tools + + :param compileTargets: list of compile targets, each of which is a list of filenames + :returns: A dict of outcome against the first filename of each compile target. Return code and message are included for failures. + + For each compile target (list of filenames) the first filename is assumed + to be the "primary" file. This doesn't have any relavance to the compilation, + but will be used as the identifier when reporting any compile errors. + The compilation is performed by the pycrate ASN compile functions; errors + are caught as exceptions and rendered into a list. + + Unfortunately, the pycrate compiler doesn't report line numbers. + The asn1c compiler does, but doesn't properly handle identifiers with the + same name in different modules; as this occurs multiple times in TS 33.108, + we can't use it. + """ + results = {} + for target in compileTargets: + firstTarget = target[0] + logging.debug(f"Compiling {firstTarget}") + try: + fileTexts = [] + fileNames = [] + GLOBAL.clear() + for filename in target: + with open(filename) as f: + fileTexts.append(f.read()) + fileNames.append(str(filename)) + logging.debug (f" Loading {filename}") + compile_text(fileTexts, filenames = fileNames) + results[str(firstTarget)] = { + 'ok' : True, + } + except Exception as ex: + results[str(firstTarget)] = { + 'ok' : False, + 'code' : -1, + 'message' : f"{ex!r}" + } + continue + return results + + + +def processResults (results, stageName): + """ + Counts the number of errors and writes out the output per filename + + :param results: List of filenames (str or Pathlib Path) + :param stageName: Name to decorate the output with + :returns: The number of files which had errors + """ + print("") + errorCount = sum([1 for r in results.values() if not r['ok']]) + logging.info(f"{errorCount} {stageName} errors encountered") + + print(f"{'-':-<60}") + print(f"{stageName} results:") + print(f"{'-':-<60}") + for filename, result in results.items(): + print(f" {filename:.<55}{'..OK' if result['ok'] else 'FAIL'}") + if not result['ok']: + if isinstance(result['message'], list): + for thing in result['message']: + print(f" {thing['message']}") + else: + print(f" {result['message']}") + + print(f"{'-':-<60}") + print(f"{stageName} errors: {errorCount}") + print(f"{'-':-<60}") + + return errorCount + + +if __name__ == '__main__': + logging.info('Searching for ASN.1 files') + fileList = list(Path(".").rglob("*.asn1")) + list(Path(".").rglob("*.asn")) + logging.info(f'{len(fileList)} ASN.1 files found') + for file in fileList: + logging.debug(f' {file}') + + ignoreList = Path('testing/asn_ignore.txt').read_text().splitlines() + ignoredFiles = [] + for ignore in ignoreList: + logging.debug(f'Ignoring pattern {ignore}') + for file in fileList: + if ignore in str(file): + ignoredFiles.append(file) + logging.debug(f" Ignoring {str(file)} as contains {ignore}") + ignoredFiles = list(set(ignoredFiles)) + logging.info(f'{len(ignoredFiles)} files ignored') + for file in ignoredFiles: + logging.debug(f' {file}') + + fileList = [file for file in fileList if file not in ignoredFiles] + logging.info(f'{len(fileList)} files to process') + for file in fileList: + logging.debug(f' {file}') + + if len(fileList) == 0: + logging.warning ("No files specified") + exit(0) + + logging.info("Parsing ASN1 files") + parseResults = syntaxCheckASN(fileList) + if processResults(parseResults, "Parsing") > 0: + exit(-1) + + logging.info ("Getting compile targets") + compileTargets = json.loads(Path('testing/asn_compile_targets.json').read_text()) + logging.info (f"{len(compileTargets)} compile targets found") + + compileResults = compileAllTargets(compileTargets) + if processResults(compileResults, "Compiling") > 0: + exit(-1) + + logging.info ("Linting files") + ignoreLintingList = Path('testing/asn_ignore_lint.txt').read_text().splitlines() + ignoredFiles = [] + for ignore in ignoreLintingList: + logging.debug(f'Ignoring pattern {ignore} for linting') + for file in fileList: + if ignore in str(file): + ignoredFiles.append(file) + logging.debug(f" Ignoring {str(file)} for linting as contains {ignore}") + ignoredFiles = list(set(ignoredFiles)) + logging.info(f'{len(ignoredFiles)} files ignored for linting') + for file in ignoredFiles: + logging.debug(f' {file}') + fileList = [file for file in fileList if file not in ignoredFiles] + lintExceptions = json.loads(Path('testing/asn_lint_exceptions.json').read_text()) + lintResults = lint_asn1.lintASN1Files(fileList, lintExceptions) + if processResults(lintResults, "Linting") > 0: + exit(-1) + + exit(0) diff --git a/testing/compile_asn1.py b/testing/compile_asn1.py deleted file mode 100644 index 35fd5954bd92e0b923980a8debab01595acf5da4..0000000000000000000000000000000000000000 --- a/testing/compile_asn1.py +++ /dev/null @@ -1,47 +0,0 @@ -import logging - -import asn1tools -from pathlib import Path - -from pprint import pprint - -ignoreReleases = {'33108' : [f'r{i}' for i in range(5, 17)], - '33128' : [] } - -def prepareFile(f): - with open(f) as fh: - s = fh.read() - s = s.replace("RELATIVE-OID", "OBJECT IDENTIFIER") # sigh - return s - -if __name__ == '__main__': - fileList = list(Path(".").rglob("*.asn1")) + list(Path(".").rglob("*.asn")) - - ignoredFiles = [file for file in fileList if file.parts[1] in ignoreReleases[file.parts[0]]] - logging.info(f"Ignoring {len(ignoredFiles)} files") - logging.debug(ignoredFiles) - - fileList = [file for file in fileList if file not in ignoredFiles] - - if len(fileList) == 0: - logging.warning ("No files specified") - exit(0) - - print ("ASN.1 Compilation checks:") - print ("-----------------------------") - logging.info("Parsing files...") - errorCount = 0 - for f in fileList: - try: - s = prepareFile(str(f)) - asn1tools.compile_string(s) # this won't work for modules with IMPORTs - except asn1tools.ParseError as ex: - logging.info (f" {f}: Failed - {ex!r}") - print (f" {f}: Failed - {ex!r}") - errorCount += 1 - continue - print (f" {f}: OK") - print ("-----------------------------") - print (f"Compile errors: {errorCount}") - print ("-----------------------------") - exit(errorCount) diff --git a/testing/dockerfile b/testing/dockerfile deleted file mode 100644 index d71a5bfbb31a252d405d0c71cba867f4fbb23587..0000000000000000000000000000000000000000 --- a/testing/dockerfile +++ /dev/null @@ -1,7 +0,0 @@ -# docker build -t mcanterb/forge-cicd -# docker push mcanterb/forge-cicd - -FROM python:3.8 -RUN apt update && apt-get install -y git -RUN git config --global user.name "forgeRobot" && git config --global user.email "forgeRobot@example.com" -RUN pip3 install -q asn1tools lxml xmlschema requests gitpython \ No newline at end of file diff --git a/testing/lint_asn1.py b/testing/lint_asn1.py index 704d37c68732dc73d712346aaa23f49ca07b6b3a..822c3e4bcdc3ec338143c02fee713f12a2b0429b 100644 --- a/testing/lint_asn1.py +++ b/testing/lint_asn1.py @@ -8,8 +8,6 @@ import string from pprint import pprint import functools -import lintingexceptions - moduleLevelTests = [] typeLevelTests = [] @@ -98,7 +96,7 @@ def D41 (module, context): testDescription = "AUTOMATIC TAGS not used") def D42(module, context): errors = [] - if (module['tags'] == 'AUTOMATIC'): + if ('tags' in module) and (module['tags'] == 'AUTOMATIC'): appendFailure(errors, context, {"message" : "AUTOMATIC TAGS directive used"}) return errors @@ -167,9 +165,9 @@ def checkD45 (t, context): return errors -def lintASN1File (asnFile): - print (f"File: {asnFile}") +def lintASN1File (asnFile, exceptions): errors = [] + suppressed = [] context = {'file' : asnFile} try: logging.info ("Checking file {0}...".format(asnFile)) @@ -191,10 +189,17 @@ def lintASN1File (asnFile): except ParseError as ex: appendFailure(errors, context, { "message" : "ParseError: {0}".format(ex)}) logging.error("ParseError: {0}".format(ex)) - return errors + if len(exceptions) > 0: + suppressed = [error for error in errors if error['message'] in exceptions] + errors = [error for error in errors if error['message'] not in exceptions] + return { + 'ok' : len(errors) == 0, + 'message' : errors, + 'suppressed' : suppressed + } -def lintASN1Files (fileList): +def lintASN1Files (fileList, exceptions): if len(fileList) == 0: logging.warning ("No files specified") return {} @@ -202,7 +207,9 @@ def lintASN1Files (fileList): errorMap = {} logging.info("Checking files...") for f in fileList: - errorMap[str(f)] = lintASN1File(str(f)) + unixf = str(f).replace('\\', '/') + errorMap[str(f)] = lintASN1File(str(f), exceptions[unixf] if unixf in exceptions else []) + return errorMap diff --git a/testing/lintingexceptions.py b/testing/lintingexceptions.py deleted file mode 100644 index 423b5d3926b9427e35d876e0eceee585a4042569..0000000000000000000000000000000000000000 --- a/testing/lintingexceptions.py +++ /dev/null @@ -1,7 +0,0 @@ -exceptedStrings = ["D.4.4: Enumerations for UDMServingSystemMethod start at 0, not 1", -"D.4.5: Field 'aNNodeID' in GlobalRANNodeID is an anonymous CHOICE", -"D.4.4: Enumerations for EstablishmentStatus start at 0, not 1", -"D.4.4: Enumerations for MMSDirection start at 0, not 1", -"D.4.4: Enumerations for MMSReplyCharging start at 0, not 1", -"D.4.4: Enumerations for MMStatusExtension start at 0, not 1", -"D.4.4: Enumerations for RequestIndication start at 0, not 1"] diff --git a/testing/parse_asn1.py b/testing/parse_asn1.py deleted file mode 100644 index 1602bfb96a6a4b61fa8dfd2e42745aa792456594..0000000000000000000000000000000000000000 --- a/testing/parse_asn1.py +++ /dev/null @@ -1,40 +0,0 @@ -import logging - -from asn1tools import parse_files, ParseError -from pathlib import Path - -from pprint import pprint - -ignoreReleases = {'33108' : [f'r{i}' for i in range(5, 16)], - '33128' : [] } - -if __name__ == '__main__': - fileList = list(Path(".").rglob("*.asn1")) + list(Path(".").rglob("*.asn")) - - ignoredFiles = [file for file in fileList if file.parts[1] in ignoreReleases[file.parts[0]]] - logging.info(f"Ignoring {len(ignoredFiles)} files") - logging.debug(ignoredFiles) - - fileList = [file for file in fileList if file not in ignoredFiles] - - if len(fileList) == 0: - logging.warning ("No files specified") - exit(0) - - print ("ASN.1 Parser checks:") - print ("-----------------------------") - logging.info("Parsing files...") - errorCount = 0 - for f in fileList: - try: - parse_files(str(f)) - except ParseError as ex: - logging.info (f" {f}: Failed - {ex!r}") - print (f" {f}: Failed - {ex!r}") - errorCount += 1 - continue - print (f" {f}: OK") - print ("-----------------------------") - print (f"Parse errors: {errorCount}") - print ("-----------------------------") - exit(errorCount) diff --git a/testing/check_xsd.py b/testing/xsd_process.py similarity index 91% rename from testing/check_xsd.py rename to testing/xsd_process.py index bbe8a72afa4824f409a90941af00fe244addc42f..a7dcd617d7e9dd8550d7cd49c32de571cb5d716b 100644 --- a/testing/check_xsd.py +++ b/testing/xsd_process.py @@ -1,12 +1,7 @@ import logging -logging.basicConfig(level=logging.INFO) -import glob -import sys from pathlib import Path -from pprint import pprint -from lxml import etree -from xml.etree.ElementTree import ParseError +from xmlschema.etree import etree_tostring from xmlschema import XMLSchema, XMLSchemaParseError @@ -22,7 +17,7 @@ def BuildSchemaDictonary (fileList): xs = XMLSchema(schemaFile, validation='skip') schemaLocations.append((xs.default_namespace, str(Path(schemaFile).resolve()))) logging.info(" [ {0} -> {1} ]".format(xs.default_namespace, schemaFile)) - except ParseError as ex: + except XMLSchemaParseError as ex: logging.warning (" [ {0} failed to parse: {1} ]".format(schemaFile, ex)) return schemaLocations @@ -47,13 +42,14 @@ def ValidateXSDFiles (fileList): logging.info("Schema validation:") for schemaFile in fileList: try: - schema = XMLSchema(schemaFile, locations = schemaLocations) + schema = XMLSchema(schemaFile, locations = schemaLocations, validation="lax") logging.info(schemaFile + ": OK") - errors[schemaFile] = [] + errors[schemaFile] = [f"{etree_tostring(e.elem, e.namespaces, ' ', 20)} - {e.message}" for e in schema.all_errors] except XMLSchemaParseError as ex: logging.warning(schemaFile + ": Failed validation ({0})".format(ex.message)) if (ex.schema_url) and (ex.schema_url != ex.origin_url): logging.warning(" Error comes from {0}, suppressing".format(ex.schema_url)) + errors[schemaFile] = [] else: errors[schemaFile] = [ex] return errors