Commit 0e16f822 authored by canterburym's avatar canterburym
Browse files

Merge branch 'feature/linter' into 'rel16'


See merge request 3GPP/SA3LI!12

(cherry picked from commit 5e3a533d)

a5e10368 Update check_asn1
c6489eb4 Correct glob pattern
236a6f12 Fixes path error
ba83afab Adds linter
6482e2db Updated XSD checking
35fb1716 Remove log config
3dbbca04 Includes linter in CI/CD pipeline
parent 41731e11
Pipeline #1785 failed
...@@ -13,6 +13,12 @@ checkASN1: ...@@ -13,6 +13,12 @@ checkASN1:
script: script:
- python3 testing/ - python3 testing/
stage: Check Schemas
- python3 testing/
allow_failure: true
checkXSD: checkXSD:
stage: Check XSD stage: Check XSD
script: script:
from asn1tools import parse_files, ParseError import logging
import sys
from asn1tools import parse_files, compile_dict, ParseError, CompileError
from glob import glob from glob import glob
from pathlib import Path from pathlib import Path
from pprint import pprint
def parseASN1File (asnFile):
except ParseError as ex:
return [ex]
return []
def parseASN1Files (fileList):
if len(fileList) == 0:
logging.warning ("No files specified")
return {}
errors = {}"Parsing files...")
for f in fileList:
ex = parseASN1File(f)
if ex: (f" {f}: Failed - {ex!r}")
else: (f" {f}: OK")
errors[f] = ex
return errors
schemaFileGlob = glob("*.asn1")
for schemaFile in schemaFileGlob: def compileASN1Files (fileList):"Compiling files...")
errors = []
try: try:
print("Checking file: {0}".format(schemaFile), end="") d = parse_files(fileList)
parse_files(schemaFile) for modulename, module in d.items():
print(" OK") # Weird fix because the compiler doesn't like RELATIVE-OID as a type
# Not sure if the on-the-wire encoding would be affected or not
# but for most checking purposes this doesn't matter
module['types']["RELATIVE-OID"] = {'type' : 'OBJECT IDENTIFIER'}
c = compile_dict(d)
except CompileError as ex: (f"Compiler error: {ex}")
except ParseError as ex: except ParseError as ex:
sys.exit("ASN1 parser error: " + str(ex)) (f"Parse error: {ex}")
errors.append(ex) ("Compiled OK")
return errors
def validateASN1Files (fileList):
parseErrors = parseASN1Files(fileList)
# if len(parseErrors > 0):
# ("Abandonding compile due to parse errors")
compileErrors = compileASN1Files(fileList)
return parseErrors, compileErrors
def validateAllASN1FilesInPath (path):
globPattern = str(Path(path)) + '/*.asn1'"Searching: " + globPattern)
schemaGlob = glob(globPattern, recursive=True)
return validateASN1Files(schemaGlob)
if __name__ == '__main__':
parseErrors, compileErrors = validateAllASN1FilesInPath("./")
parseErrorCount = 0
print ("ASN.1 Parser checks:")
print ("-----------------------------")
for filename, errors in parseErrors.items():
if len(errors) > 0:
parseErrorCount += len(errors)
print (f"{filename}: {len(errors)} errors")
for error in errors:
print (" " + str(error))
print (f"{filename}: OK")
print ("-----------------------------")
print ("ASN.1 Compilation:")
print ("-----------------------------")
if len(compileErrors) > 0:
for error in compileErrors:
print (" " + str(error))
print ("Compilation OK")
print ("-----------------------------")
print (f"{parseErrorCount} parse errors, {len(compileErrors)} compile errors")
exit (parseErrorCount + len(compileErrors))
print ("{0} ASN.1 schemas checked".format(len(schemaFileGlob)))
import logging
import glob import glob
import sys import sys
from pathlib import Path from pathlib import Path
from pprint import pprint from pprint import pprint
if __name__ == '__main__': from lxml import etree
from xml.etree.ElementTree import ParseError
from xmlschema import XMLSchema, XMLSchemaParseError
def BuildSchemaDictonary (fileList):
if len(fileList) == 0:"No schema files provided")
return []"Schema locations:")
schemaLocations = []
for schemaFile in fileList:
xs = XMLSchema(schemaFile, validation='skip')
schemaLocations.append((xs.default_namespace, str(Path(schemaFile).resolve())))" [ {0} -> {1} ]".format(xs.default_namespace, schemaFile))
except ParseError as ex:
logging.warning (" [ {0} failed to parse: {1} ]".format(schemaFile, ex))
return schemaLocations
def BuildSchema (coreFile, fileList = None):
schemaLocations = []
if fileList and len(fileList) > 0:
schemaLocations = BuildSchemaDictonary(fileList)
coreSchema = XMLSchema(str(Path(coreFile)), locations=schemaLocations)
return coreSchema
if sys.version_info <= (3, 5):
sys.exit('ERROR: You need at least Python 3.5 to run this tool')
try: def ValidateXSDFiles (fileList):
from lxml import etree if len(fileList) == 0:
except ImportError:"No schema files provided")
sys.exit('ERROR: You need to install the Python lxml library') return {}
schemaLocations = BuildSchemaDictonary(fileList)
errors = {}
try:"Schema validation:")
import xmlschema for schemaFile in fileList:
except ImportError: try:
sys.exit('ERROR: You need to install the xml schema library') schema = XMLSchema(schemaFile, locations = schemaLocations) + ": OK")
errors[schemaFile] = []
except XMLSchemaParseError as ex:
logging.warning(schemaFile + ": Failed validation ({0})".format(ex.message))
if (ex.schema_url) and (ex.schema_url != ex.origin_url):
logging.warning(" Error comes from {0}, suppressing".format(ex.schema_url))
errors[schemaFile] = [ex]
return errors
schemaFiles = glob.glob('*.xsd') def ValidateAllXSDFilesInPath (path):
globPattern = str(Path(path)) + '/*.xsd'"Searching: " + globPattern)
schemaGlob = glob.glob(globPattern, recursive=True)
return ValidateXSDFiles(schemaGlob)
def ValidateInstanceDocuments (coreFile, supportingSchemas, instanceDocs):
if (instanceDocs is None) or len(instanceDocs) == 0:
logging.warning ("No instance documents provided")
return []
schema = BuildSchema(coreFile, supportingSchemas)
errors = []
for instanceDoc in instanceDocs:
schema.validate(instanceDoc) ("{0} passed validation".format(instanceDoc))
except Exception as ex:
logging.error ("{0} failed validation: {1}".format(instanceDoc, ex))
return errors
if __name__ == '__main__':
results = ValidateAllXSDFilesInPath("./")
for schemaFile in schemaFiles: print ("XSD validation checks:")
print("Checking file: {0}".format(schemaFile), end="") print ("-----------------------------")
xs = xmlschema.XMLSchema(schemaFile) errorCount = 0
print(" OK") for fileName, errors in results.items():
if len(errors) > 0:
errorCount += len(errors)
print (f" {fileName}: {len(errors)} errors")
for error in errors:
if isinstance(error, XMLSchemaParseError):
print (error.msg)
print (f" {str(error)}")
print (f" {fileName}: OK")
print ("{0} XSD schemas checked".format(len(schemaFiles))) print ("-----------------------------")
\ No newline at end of file print (f"{errorCount} errors detected")
\ No newline at end of file
import logging
from asn1tools import parse_files, compile_dict, ParseError, CompileError
from glob import glob
from pathlib import Path
import string
from pprint import pprint
import functools
moduleLevelTests = []
typeLevelTests = []
fileLevelTests = []
def lintingTest (testName, testKind, testDescription):
def decorate (func):
def wrapper(*args, **kwargs):
logging.debug (f" Running test {testName}")
errors = func(*args, **kwargs)
for error in errors:
error['testName'] = testName
error['testKind'] = testKind
error['testDescription'] = testDescription
return errors
if (testKind == "type"):
if (testKind == "module"):
if (testKind == "file"):
return wrapper
return decorate
def formatFailure(f):
return f"{f['testName']}: {f['message']}"
def appendFailure(failures, context, newFailure):
combinedFailure = {**context, **newFailure} (f"Test Failure: {combinedFailure}")
# File level tests
@lintingTest(testName = "D.4.9",
testKind = "file",
testDescription = "Fields, tags, types and flags are space aligned")
def D41 (fileLines, context):
errors = []
for lineNumber, line in enumerate(fileLines):
if '\t' in line:
appendFailure(errors, context, { "line" : lineNumber,
"message" : f"Line {lineNumber} contains tab characters"})
return errors
@lintingTest(testName = "D.4.11",
testKind = "file",
testDescription = "Braces are given their own line")
def D41 (fileLines, context):
errors = []
for lineNumber, line in enumerate(fileLines):
if ('{' in line and line.strip().replace(",","") != '{') or ('}' in line and line.strip().replace(",","") != '}'):
if "itu-t(0)" in line: continue
if "OBJECT IDENTIFIER" in line: continue
if "RELATIVE-OID" in line: continue
appendFailure(errors, context, { "line" : lineNumber + 1,
"message" : f"Line {lineNumber + 1} contains a brace but also other characters ('{line}')"})
return errors
# Module level tests
@lintingTest(testName = "D.4.1",
testKind = "module",
testDescription = "EXTENSIBILITY IMPLIED directive set")
def D41 (module, context):
errors = []
if (not ('extensibility-implied' in module.keys()) or (module['extensibility-implied'] == False)):
appendFailure(errors, context, {"message" : "EXTENSIBILITY IMPLIED directive not set"})
return errors
@lintingTest(testName = "D.4.2",
testKind = "module",
testDescription = "AUTOMATIC TAGS not used")
def D42(module, context):
errors = []
if (module['tags'] == 'AUTOMATIC'):
appendFailure(errors, context, {"message" : "AUTOMATIC TAGS directive used"})
return errors
# Type level tests
@lintingTest(testName = "D.3.4",
testKind = "type",
testDescription = "Field names only contain characters A-Z, a-z, 0-9")
def D34(t, context):
if not 'members' in t.keys():
logging.debug (f" D34 ignoring {context['module']} '{context['type']}' as it has no members")
return []
errors = []
for m in t['members']:
logging.debug (f" D34 checking member {m}")
badLetters = list(set([letter for letter in m['name'] if not ((letter in string.ascii_letters) or (letter in string.digits)) ]))
if len(badLetters) > 0:
appendFailure (errors, context, { "field" : m['name'],
"message" : f"Field '{m['name']}' contains disallowed characters {badLetters!r}"})
return errors
@lintingTest(testName = "D.4.3",
testKind = "type",
testDescription = "Tag numbers start at zero")
def D43 (t, context):
errors = []
if (t['type'] == 'SEQUENCE') or (t['type'] == 'CHOICE'):
if t['members'][0]['tag']['number'] != 1:
appendFailure (errors, context, {"message" : f"Tag numbers for {context['type']} start at {t['members'][0]['tag']['number']}, not 1"})
return errors
@lintingTest(testName = "D.4.4",
testKind = "type",
testDescription = "Enumerations start at zero")
def D44 (t, context):
errors = []
if t['type'] == 'ENUMERATED':
if t['values'][0][1] != 1:
appendFailure(errors, context, { "message" : f"Enumerations for {context['type']} start at {t['values'][0][1]}, not 1"})
return errors
@lintingTest(testName = "D.4.5",
testKind = "type",
testDescription = "No anonymous types")
def checkD45 (t, context):
if not 'members' in t:
logging.debug (f" D45: No members in type {context['type']}, ignoring")
return []
errors = []
for m in t['members']:
if m['type'] in ['ENUMERATED','SEQUENCE','CHOICE', 'SET']:
appendFailure(errors, context, { "field" : m['name'],
"message" : f"Field '{m['name']}' in {context['type']} is an anonymous {m['type']}"})
return errors
def lintASN1File (asnFile):
errors = []
context = {'file' : asnFile}
try: ("Checking file {0}...".format(asnFile))
with open(asnFile) as f:
s =
for test in fileLevelTests:
errors += test(s, context)
d = parse_files(asnFile)
for moduleName, module in d.items(): (" Checking module {0}".format(moduleName))
for test in moduleLevelTests:
context['module'] = moduleName
errors += test(module, context)
for typeName, typeDef in module['types'].items():
context['type'] = typeName
context['module'] = moduleName
for test in typeLevelTests:
errors += test(typeDef, context)
except ParseError as ex:
logging.error("ParseError: {0}".format(ex))
return ["ParseError: {0}".format(ex)]
return errors
def lintASN1Files (fileList):
if len(fileList) == 0:
logging.warning ("No files specified")
return []
errorMap = {}"Checking files...")
for f in fileList:
errorMap[f] = lintASN1File(f)
return errorMap
def lintAllASN1FilesInPath (path):
globPattern = str(Path(path)) + '/*.asn1'"Searching: " + globPattern)
schemaGlob = glob(globPattern, recursive=True)
return lintASN1Files(schemaGlob)
if __name__ == '__main__':
result = lintAllASN1FilesInPath("./")
totalErrors = 0
print ("Drafting rule checks:")
print ("-----------------------------")
for filename, results in result.items():
print ("{0}: {1}".format(filename, "OK" if len(results) == 0 else "{0} errors detected".format(len(results))))
for error in results:
print(" " + formatFailure(error))
totalErrors += len(results)
print ("-----------------------------")
print ("{0} non-compliances detected".format(totalErrors))
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment