An attempt at getting image data back
This commit is contained in:
@@ -0,0 +1,43 @@
|
||||
# Copyright 2020 The Chromium Authors
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
group("mojom") {
|
||||
data = [
|
||||
"__init__.py",
|
||||
"error.py",
|
||||
"fileutil.py",
|
||||
"generate/__init__.py",
|
||||
"generate/check.py",
|
||||
"generate/generator.py",
|
||||
"generate/module.py",
|
||||
"generate/pack.py",
|
||||
"generate/template_expander.py",
|
||||
"generate/translate.py",
|
||||
"parse/__init__.py",
|
||||
"parse/ast.py",
|
||||
"parse/conditional_features.py",
|
||||
"parse/lexer.py",
|
||||
"parse/parser.py",
|
||||
|
||||
# Third-party module dependencies
|
||||
"//third_party/jinja2/",
|
||||
"//third_party/ply/",
|
||||
]
|
||||
}
|
||||
|
||||
group("tests") {
|
||||
data = [
|
||||
"fileutil_unittest.py",
|
||||
"generate/generator_unittest.py",
|
||||
"generate/module_unittest.py",
|
||||
"generate/pack_unittest.py",
|
||||
"generate/translate_unittest.py",
|
||||
"parse/ast_unittest.py",
|
||||
"parse/conditional_features_unittest.py",
|
||||
"parse/lexer_unittest.py",
|
||||
"parse/parser_unittest.py",
|
||||
]
|
||||
|
||||
public_deps = [ ":mojom" ]
|
||||
}
|
||||
@@ -0,0 +1,28 @@
|
||||
# Copyright 2014 The Chromium Authors
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
|
||||
class Error(Exception):
|
||||
"""Base class for Mojo IDL bindings parser/generator errors."""
|
||||
|
||||
def __init__(self, filename, message, lineno=None, addenda=None, **kwargs):
|
||||
"""|filename| is the (primary) file which caused the error, |message| is the
|
||||
error message, |lineno| is the 1-based line number (or |None| if not
|
||||
applicable/available), and |addenda| is a list of additional lines to append
|
||||
to the final error message."""
|
||||
Exception.__init__(self, **kwargs)
|
||||
self.filename = filename
|
||||
self.message = message
|
||||
self.lineno = lineno
|
||||
self.addenda = addenda
|
||||
|
||||
def __str__(self):
|
||||
if self.lineno:
|
||||
s = "%s:%d: Error: %s" % (self.filename, self.lineno, self.message)
|
||||
else:
|
||||
s = "%s: Error: %s" % (self.filename, self.message)
|
||||
return "\n".join([s] + self.addenda) if self.addenda else s
|
||||
|
||||
def __repr__(self):
|
||||
return str(self)
|
||||
@@ -0,0 +1,44 @@
|
||||
# Copyright 2015 The Chromium Authors
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
import errno
|
||||
import os.path
|
||||
import sys
|
||||
|
||||
|
||||
def _GetDirAbove(dirname):
|
||||
"""Returns the directory "above" this file containing |dirname| (which must
|
||||
also be "above" this file)."""
|
||||
path = os.path.abspath(__file__)
|
||||
while True:
|
||||
path, tail = os.path.split(path)
|
||||
if not tail:
|
||||
return None
|
||||
if tail == dirname:
|
||||
return path
|
||||
|
||||
|
||||
def EnsureDirectoryExists(path, always_try_to_create=False):
|
||||
"""A wrapper for os.makedirs that does not error if the directory already
|
||||
exists. A different process could be racing to create this directory."""
|
||||
|
||||
if not os.path.exists(path) or always_try_to_create:
|
||||
try:
|
||||
os.makedirs(path)
|
||||
except OSError as e:
|
||||
# There may have been a race to create this directory.
|
||||
if e.errno != errno.EEXIST:
|
||||
raise
|
||||
|
||||
|
||||
def AddLocalRepoThirdPartyDirToModulePath():
|
||||
"""Helper function to find the top-level directory of this script's repository
|
||||
assuming the script falls somewhere within a 'mojo' directory, and insert the
|
||||
top-level 'third_party' directory early in the module search path. Used to
|
||||
ensure that third-party dependencies provided within the repository itself
|
||||
(e.g. Chromium sources include snapshots of jinja2 and ply) are preferred over
|
||||
locally installed system library packages."""
|
||||
toplevel_dir = _GetDirAbove('mojo')
|
||||
if toplevel_dir:
|
||||
sys.path.insert(1, os.path.join(toplevel_dir, 'third_party'))
|
||||
@@ -0,0 +1,37 @@
|
||||
# Copyright 2015 The Chromium Authors
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
import os.path
|
||||
import shutil
|
||||
import tempfile
|
||||
import unittest
|
||||
|
||||
from mojom import fileutil
|
||||
|
||||
class FileUtilTest(unittest.TestCase):
|
||||
def testEnsureDirectoryExists(self):
|
||||
"""Test that EnsureDirectoryExists functions correctly."""
|
||||
|
||||
temp_dir = tempfile.mkdtemp()
|
||||
try:
|
||||
self.assertTrue(os.path.exists(temp_dir))
|
||||
|
||||
# Directory does not exist, yet.
|
||||
full = os.path.join(temp_dir, "foo", "bar")
|
||||
self.assertFalse(os.path.exists(full))
|
||||
|
||||
# Create the directory.
|
||||
fileutil.EnsureDirectoryExists(full)
|
||||
self.assertTrue(os.path.exists(full))
|
||||
|
||||
# Trying to create it again does not cause an error.
|
||||
fileutil.EnsureDirectoryExists(full)
|
||||
self.assertTrue(os.path.exists(full))
|
||||
|
||||
# Bypass check for directory existence to tickle error handling that
|
||||
# occurs in response to a race.
|
||||
fileutil.EnsureDirectoryExists(full, always_try_to_create=True)
|
||||
self.assertTrue(os.path.exists(full))
|
||||
finally:
|
||||
shutil.rmtree(temp_dir)
|
||||
@@ -0,0 +1,26 @@
|
||||
# Copyright 2022 The Chromium Authors
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
"""Code shared by the various pre-generation mojom checkers."""
|
||||
|
||||
|
||||
class CheckException(Exception):
|
||||
def __init__(self, module, message):
|
||||
self.module = module
|
||||
self.message = message
|
||||
super().__init__(self.message)
|
||||
|
||||
def __str__(self):
|
||||
return "Failed mojo pre-generation check for {}:\n{}".format(
|
||||
self.module.path, self.message)
|
||||
|
||||
|
||||
class Check:
|
||||
def __init__(self, module):
|
||||
self.module = module
|
||||
|
||||
def CheckModule(self):
|
||||
""" Subclass should return True if its Checks pass, and throw an
|
||||
exception otherwise. CheckModule will be called immediately before
|
||||
mojom.generate.Generator.GenerateFiles()"""
|
||||
raise NotImplementedError("Subclasses must override/implement this method")
|
||||
@@ -0,0 +1,328 @@
|
||||
# Copyright 2013 The Chromium Authors
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
"""Code shared by the various language-specific code generators."""
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
from functools import partial
|
||||
import os.path
|
||||
import re
|
||||
|
||||
from mojom import fileutil
|
||||
from mojom.generate import module as mojom
|
||||
from mojom.generate import pack
|
||||
|
||||
|
||||
def ExpectedArraySize(kind):
|
||||
if mojom.IsArrayKind(kind):
|
||||
return kind.length
|
||||
return None
|
||||
|
||||
|
||||
def SplitCamelCase(identifier):
|
||||
"""Splits a camel-cased |identifier| and returns a list of lower-cased
|
||||
strings.
|
||||
"""
|
||||
# Add underscores after uppercase letters when appropriate. An uppercase
|
||||
# letter is considered the end of a word if it is followed by an upper and a
|
||||
# lower. E.g. URLLoaderFactory -> URL_LoaderFactory
|
||||
identifier = re.sub('([A-Z][0-9]*)(?=[A-Z][0-9]*[a-z])', r'\1_', identifier)
|
||||
# Add underscores after lowercase letters when appropriate. A lowercase letter
|
||||
# is considered the end of a word if it is followed by an upper.
|
||||
# E.g. URLLoaderFactory -> URLLoader_Factory
|
||||
identifier = re.sub('([a-z][0-9]*)(?=[A-Z])', r'\1_', identifier)
|
||||
return [x.lower() for x in identifier.split('_')]
|
||||
|
||||
|
||||
def ToCamel(identifier, lower_initial=False, digits_split=False, delimiter='_'):
|
||||
"""Splits |identifier| using |delimiter|, makes the first character of each
|
||||
word uppercased (but makes the first character of the first word lowercased
|
||||
if |lower_initial| is set to True), and joins the words. Please note that for
|
||||
each word, all the characters except the first one are untouched.
|
||||
"""
|
||||
result = ''
|
||||
capitalize_next = True
|
||||
for i in range(len(identifier)):
|
||||
if identifier[i] == delimiter:
|
||||
capitalize_next = True
|
||||
elif digits_split and identifier[i].isdigit():
|
||||
capitalize_next = True
|
||||
result += identifier[i]
|
||||
elif capitalize_next:
|
||||
capitalize_next = False
|
||||
result += identifier[i].upper()
|
||||
else:
|
||||
result += identifier[i]
|
||||
|
||||
if lower_initial and result:
|
||||
result = result[0].lower() + result[1:]
|
||||
|
||||
return result
|
||||
|
||||
|
||||
def _ToSnakeCase(identifier, upper=False):
|
||||
"""Splits camel-cased |identifier| into lower case words, removes the first
|
||||
word if it's "k" and joins them using "_" e.g. for "URLLoaderFactory", returns
|
||||
"URL_LOADER_FACTORY" if upper, otherwise "url_loader_factory".
|
||||
"""
|
||||
words = SplitCamelCase(identifier)
|
||||
if words[0] == 'k' and len(words) > 1:
|
||||
words = words[1:]
|
||||
|
||||
# Variables cannot start with a digit
|
||||
if (words[0][0].isdigit()):
|
||||
words[0] = '_' + words[0]
|
||||
|
||||
|
||||
if upper:
|
||||
words = map(lambda x: x.upper(), words)
|
||||
|
||||
return '_'.join(words)
|
||||
|
||||
|
||||
def ToUpperSnakeCase(identifier):
|
||||
"""Splits camel-cased |identifier| into lower case words, removes the first
|
||||
word if it's "k" and joins them using "_" e.g. for "URLLoaderFactory", returns
|
||||
"URL_LOADER_FACTORY".
|
||||
"""
|
||||
return _ToSnakeCase(identifier, upper=True)
|
||||
|
||||
|
||||
def ToLowerSnakeCase(identifier):
|
||||
"""Splits camel-cased |identifier| into lower case words, removes the first
|
||||
word if it's "k" and joins them using "_" e.g. for "URLLoaderFactory", returns
|
||||
"url_loader_factory".
|
||||
"""
|
||||
return _ToSnakeCase(identifier, upper=False)
|
||||
|
||||
|
||||
class Stylizer:
|
||||
"""Stylizers specify naming rules to map mojom names to names in generated
|
||||
code. For example, if you would like method_name in mojom to be mapped to
|
||||
MethodName in the generated code, you need to define a subclass of Stylizer
|
||||
and override StylizeMethod to do the conversion."""
|
||||
|
||||
def StylizeConstant(self, mojom_name):
|
||||
return mojom_name
|
||||
|
||||
def StylizeField(self, mojom_name):
|
||||
return mojom_name
|
||||
|
||||
def StylizeStruct(self, mojom_name):
|
||||
return mojom_name
|
||||
|
||||
def StylizeUnion(self, mojom_name):
|
||||
return mojom_name
|
||||
|
||||
def StylizeParameter(self, mojom_name):
|
||||
return mojom_name
|
||||
|
||||
def StylizeMethod(self, mojom_name):
|
||||
return mojom_name
|
||||
|
||||
def StylizeInterface(self, mojom_name):
|
||||
return mojom_name
|
||||
|
||||
def StylizeEnumField(self, mojom_name):
|
||||
return mojom_name
|
||||
|
||||
def StylizeEnum(self, mojom_name):
|
||||
return mojom_name
|
||||
|
||||
def StylizeFeature(self, mojom_name):
|
||||
return mojom_name
|
||||
|
||||
def StylizeModule(self, mojom_namespace):
|
||||
return mojom_namespace
|
||||
|
||||
|
||||
def WriteFile(contents, full_path):
|
||||
# If |contents| is same with the file content, we skip updating.
|
||||
if not isinstance(contents, bytes):
|
||||
data = contents.encode('utf8')
|
||||
else:
|
||||
data = contents
|
||||
|
||||
if os.path.isfile(full_path):
|
||||
with open(full_path, 'rb') as destination_file:
|
||||
if destination_file.read() == data:
|
||||
return
|
||||
|
||||
# Make sure the containing directory exists.
|
||||
full_dir = os.path.dirname(full_path)
|
||||
fileutil.EnsureDirectoryExists(full_dir)
|
||||
|
||||
# Dump the data to disk.
|
||||
with open(full_path, 'wb') as f:
|
||||
f.write(data)
|
||||
|
||||
|
||||
def AddComputedData(module):
|
||||
"""Adds computed data to the given module. The data is computed once and
|
||||
used repeatedly in the generation process."""
|
||||
|
||||
def _AddStructComputedData(exported, struct):
|
||||
struct.packed = pack.PackedStruct(struct)
|
||||
struct.bytes = pack.GetByteLayout(struct.packed)
|
||||
struct.versions = pack.GetVersionInfo(struct.packed)
|
||||
struct.exported = exported
|
||||
|
||||
def _AddInterfaceComputedData(interface):
|
||||
interface.version = 0
|
||||
for method in interface.methods:
|
||||
# this field is never scrambled
|
||||
method.sequential_ordinal = method.ordinal
|
||||
|
||||
if method.min_version is not None:
|
||||
interface.version = max(interface.version, method.min_version)
|
||||
|
||||
method.param_struct = _GetStructFromMethod(method)
|
||||
if interface.stable:
|
||||
method.param_struct.attributes[mojom.ATTRIBUTE_STABLE] = True
|
||||
if method.explicit_ordinal is None:
|
||||
raise Exception(
|
||||
'Stable interfaces must declare explicit method ordinals. The '
|
||||
'method %s on stable interface %s does not declare an explicit '
|
||||
'ordinal.' % (method.mojom_name, interface.qualified_name))
|
||||
interface.version = max(interface.version,
|
||||
method.param_struct.versions[-1].version)
|
||||
|
||||
if method.response_parameters is not None:
|
||||
method.response_param_struct = _GetResponseStructFromMethod(method)
|
||||
if interface.stable:
|
||||
method.response_param_struct.attributes[mojom.ATTRIBUTE_STABLE] = True
|
||||
interface.version = max(
|
||||
interface.version,
|
||||
method.response_param_struct.versions[-1].version)
|
||||
else:
|
||||
method.response_param_struct = None
|
||||
|
||||
def _GetStructFromMethod(method):
|
||||
"""Converts a method's parameters into the fields of a struct."""
|
||||
params_class = "%s_%s_Params" % (method.interface.mojom_name,
|
||||
method.mojom_name)
|
||||
struct = mojom.Struct(params_class,
|
||||
module=method.interface.module,
|
||||
attributes={})
|
||||
for param in method.parameters:
|
||||
struct.AddField(
|
||||
param.mojom_name,
|
||||
param.kind,
|
||||
param.ordinal,
|
||||
attributes=param.attributes)
|
||||
_AddStructComputedData(False, struct)
|
||||
return struct
|
||||
|
||||
def _GetResponseStructFromMethod(method):
|
||||
"""Converts a method's response_parameters into the fields of a struct."""
|
||||
params_class = "%s_%s_ResponseParams" % (method.interface.mojom_name,
|
||||
method.mojom_name)
|
||||
struct = mojom.Struct(params_class,
|
||||
module=method.interface.module,
|
||||
attributes={})
|
||||
for param in method.response_parameters:
|
||||
struct.AddField(
|
||||
param.mojom_name,
|
||||
param.kind,
|
||||
param.ordinal,
|
||||
attributes=param.attributes)
|
||||
_AddStructComputedData(False, struct)
|
||||
return struct
|
||||
|
||||
for struct in module.structs:
|
||||
_AddStructComputedData(True, struct)
|
||||
for interface in module.interfaces:
|
||||
_AddInterfaceComputedData(interface)
|
||||
|
||||
|
||||
class Generator:
|
||||
# Pass |output_dir| to emit files to disk. Omit |output_dir| to echo all
|
||||
# files to stdout.
|
||||
def __init__(self,
|
||||
module,
|
||||
output_dir=None,
|
||||
typemap=None,
|
||||
variant=None,
|
||||
bytecode_path=None,
|
||||
for_blink=False,
|
||||
js_generate_struct_deserializers=False,
|
||||
export_attribute=None,
|
||||
export_header=None,
|
||||
generate_non_variant_code=False,
|
||||
support_lazy_serialization=False,
|
||||
disallow_native_types=False,
|
||||
disallow_interfaces=False,
|
||||
generate_message_ids=False,
|
||||
generate_fuzzing=False,
|
||||
enable_kythe_annotations=False,
|
||||
extra_cpp_template_paths=None,
|
||||
generate_extra_cpp_only=False):
|
||||
self.module = module
|
||||
self.output_dir = output_dir
|
||||
self.typemap = typemap or {}
|
||||
self.variant = variant
|
||||
self.bytecode_path = bytecode_path
|
||||
self.for_blink = for_blink
|
||||
self.js_generate_struct_deserializers = js_generate_struct_deserializers
|
||||
self.export_attribute = export_attribute
|
||||
self.export_header = export_header
|
||||
self.generate_non_variant_code = generate_non_variant_code
|
||||
self.support_lazy_serialization = support_lazy_serialization
|
||||
self.disallow_native_types = disallow_native_types
|
||||
self.disallow_interfaces = disallow_interfaces
|
||||
self.generate_message_ids = generate_message_ids
|
||||
self.generate_fuzzing = generate_fuzzing
|
||||
self.enable_kythe_annotations = enable_kythe_annotations
|
||||
self.extra_cpp_template_paths = extra_cpp_template_paths
|
||||
self.generate_extra_cpp_only = generate_extra_cpp_only
|
||||
|
||||
def Write(self, contents, filename):
|
||||
if self.output_dir is None:
|
||||
print(contents)
|
||||
return
|
||||
full_path = os.path.join(self.output_dir, filename)
|
||||
WriteFile(contents, full_path)
|
||||
|
||||
def OptimizeEmpty(self, contents):
|
||||
# Look for .cc files that contain no actual code. There are many of these
|
||||
# and they collectively take a while to compile.
|
||||
lines = contents.splitlines()
|
||||
|
||||
for line in lines:
|
||||
if line.startswith('#') or line.startswith('//'):
|
||||
continue
|
||||
if re.match(r'namespace .* {', line) or re.match(r'}.*//.*namespace',
|
||||
line):
|
||||
continue
|
||||
if line.strip():
|
||||
# There is some actual code - return the unmodified contents.
|
||||
return contents
|
||||
|
||||
# If we reach here then we have a .cc file with no actual code. The
|
||||
# includes are therefore unneeded and can be removed.
|
||||
new_lines = [line for line in lines if not line.startswith('#include')]
|
||||
if len(new_lines) < len(lines):
|
||||
new_lines.append('')
|
||||
new_lines.append('// Includes removed due to no code being generated.')
|
||||
return '\n'.join(new_lines)
|
||||
|
||||
def WriteWithComment(self, contents, filename):
|
||||
generator_name = "mojom_bindings_generator.py"
|
||||
comment = r"// %s is auto generated by %s, do not edit" % (filename,
|
||||
generator_name)
|
||||
contents = comment + '\n' + '\n' + contents;
|
||||
if filename.endswith('.cc'):
|
||||
contents = self.OptimizeEmpty(contents)
|
||||
self.Write(contents, filename)
|
||||
|
||||
def GenerateFiles(self, args):
|
||||
raise NotImplementedError("Subclasses must override/implement this method")
|
||||
|
||||
def GetJinjaParameters(self):
|
||||
"""Returns default constructor parameters for the jinja environment."""
|
||||
return {}
|
||||
|
||||
def GetGlobals(self):
|
||||
"""Returns global mappings for the template generation."""
|
||||
return {}
|
||||
@@ -0,0 +1,71 @@
|
||||
# Copyright 2014 The Chromium Authors
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
import importlib.util
|
||||
import os.path
|
||||
import sys
|
||||
import unittest
|
||||
|
||||
def _GetDirAbove(dirname):
|
||||
"""Returns the directory "above" this file containing |dirname| (which must
|
||||
also be "above" this file)."""
|
||||
path = os.path.abspath(__file__)
|
||||
while True:
|
||||
path, tail = os.path.split(path)
|
||||
assert tail
|
||||
if tail == dirname:
|
||||
return path
|
||||
|
||||
|
||||
try:
|
||||
importlib.util.find_spec("mojom")
|
||||
except ImportError:
|
||||
sys.path.append(os.path.join(_GetDirAbove("pylib"), "pylib"))
|
||||
from mojom.generate import generator
|
||||
|
||||
class StringManipulationTest(unittest.TestCase):
|
||||
"""generator contains some string utilities, this tests only those."""
|
||||
|
||||
def testSplitCamelCase(self):
|
||||
self.assertEquals(["camel", "case"], generator.SplitCamelCase("CamelCase"))
|
||||
self.assertEquals(["url", "loader", "factory"],
|
||||
generator.SplitCamelCase('URLLoaderFactory'))
|
||||
self.assertEquals(["get99", "entries"],
|
||||
generator.SplitCamelCase('Get99Entries'))
|
||||
self.assertEquals(["get99entries"],
|
||||
generator.SplitCamelCase('Get99entries'))
|
||||
|
||||
def testToCamel(self):
|
||||
self.assertEquals("CamelCase", generator.ToCamel("camel_case"))
|
||||
self.assertEquals("CAMELCASE", generator.ToCamel("CAMEL_CASE"))
|
||||
self.assertEquals("camelCase",
|
||||
generator.ToCamel("camel_case", lower_initial=True))
|
||||
self.assertEquals("CamelCase", generator.ToCamel(
|
||||
"camel case", delimiter=' '))
|
||||
self.assertEquals("CaMelCaSe", generator.ToCamel("caMel_caSe"))
|
||||
self.assertEquals("L2Tp", generator.ToCamel("l2tp", digits_split=True))
|
||||
self.assertEquals("l2tp", generator.ToCamel("l2tp", lower_initial=True))
|
||||
|
||||
def testToSnakeCase(self):
|
||||
self.assertEquals("snake_case", generator.ToLowerSnakeCase("SnakeCase"))
|
||||
self.assertEquals("snake_case", generator.ToLowerSnakeCase("snakeCase"))
|
||||
self.assertEquals("snake_case", generator.ToLowerSnakeCase("SnakeCASE"))
|
||||
self.assertEquals("snake_d3d11_case",
|
||||
generator.ToLowerSnakeCase("SnakeD3D11Case"))
|
||||
self.assertEquals("snake_d3d11_case",
|
||||
generator.ToLowerSnakeCase("SnakeD3d11Case"))
|
||||
self.assertEquals("snake_d3d11_case",
|
||||
generator.ToLowerSnakeCase("snakeD3d11Case"))
|
||||
self.assertEquals("SNAKE_CASE", generator.ToUpperSnakeCase("SnakeCase"))
|
||||
self.assertEquals("SNAKE_CASE", generator.ToUpperSnakeCase("snakeCase"))
|
||||
self.assertEquals("SNAKE_CASE", generator.ToUpperSnakeCase("SnakeCASE"))
|
||||
self.assertEquals("SNAKE_D3D11_CASE",
|
||||
generator.ToUpperSnakeCase("SnakeD3D11Case"))
|
||||
self.assertEquals("SNAKE_D3D11_CASE",
|
||||
generator.ToUpperSnakeCase("SnakeD3d11Case"))
|
||||
self.assertEquals("SNAKE_D3D11_CASE",
|
||||
generator.ToUpperSnakeCase("snakeD3d11Case"))
|
||||
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
||||
File diff suppressed because it is too large
Load Diff
@@ -0,0 +1,31 @@
|
||||
# Copyright 2014 The Chromium Authors
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
import sys
|
||||
import unittest
|
||||
|
||||
from mojom.generate import module as mojom
|
||||
|
||||
|
||||
class ModuleTest(unittest.TestCase):
|
||||
def testNonInterfaceAsInterfaceRequest(self):
|
||||
"""Tests that a non-interface cannot be used for interface requests."""
|
||||
module = mojom.Module('test_module', 'test_namespace')
|
||||
struct = mojom.Struct('TestStruct', module=module)
|
||||
with self.assertRaises(Exception) as e:
|
||||
mojom.InterfaceRequest(struct)
|
||||
self.assertEquals(
|
||||
e.exception.__str__(),
|
||||
'Interface request requires \'x:TestStruct\' to be an interface.')
|
||||
|
||||
def testNonInterfaceAsAssociatedInterface(self):
|
||||
"""Tests that a non-interface type cannot be used for associated interfaces.
|
||||
"""
|
||||
module = mojom.Module('test_module', 'test_namespace')
|
||||
struct = mojom.Struct('TestStruct', module=module)
|
||||
with self.assertRaises(Exception) as e:
|
||||
mojom.AssociatedInterface(struct)
|
||||
self.assertEquals(
|
||||
e.exception.__str__(),
|
||||
'Associated interface requires \'x:TestStruct\' to be an interface.')
|
||||
@@ -0,0 +1,367 @@
|
||||
# Copyright 2013 The Chromium Authors
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
import copy
|
||||
from mojom.generate import module as mojom
|
||||
|
||||
# This module provides a mechanism for determining the packed order and offsets
|
||||
# of a mojom.Struct.
|
||||
#
|
||||
# ps = pack.PackedStruct(struct)
|
||||
# ps.packed_fields will access a list of PackedField objects, each of which
|
||||
# will have an offset, a size and a bit (for mojom.BOOLs).
|
||||
|
||||
# Size of struct header in bytes: num_bytes [4B] + version [4B].
|
||||
HEADER_SIZE = 8
|
||||
|
||||
|
||||
class PackedField:
|
||||
kind_to_size = {
|
||||
mojom.BOOL: 1,
|
||||
mojom.INT8: 1,
|
||||
mojom.UINT8: 1,
|
||||
mojom.INT16: 2,
|
||||
mojom.UINT16: 2,
|
||||
mojom.INT32: 4,
|
||||
mojom.UINT32: 4,
|
||||
mojom.FLOAT: 4,
|
||||
mojom.HANDLE: 4,
|
||||
mojom.MSGPIPE: 4,
|
||||
mojom.SHAREDBUFFER: 4,
|
||||
mojom.PLATFORMHANDLE: 4,
|
||||
mojom.DCPIPE: 4,
|
||||
mojom.DPPIPE: 4,
|
||||
mojom.NULLABLE_HANDLE: 4,
|
||||
mojom.NULLABLE_MSGPIPE: 4,
|
||||
mojom.NULLABLE_SHAREDBUFFER: 4,
|
||||
mojom.NULLABLE_PLATFORMHANDLE: 4,
|
||||
mojom.NULLABLE_DCPIPE: 4,
|
||||
mojom.NULLABLE_DPPIPE: 4,
|
||||
mojom.INT64: 8,
|
||||
mojom.UINT64: 8,
|
||||
mojom.DOUBLE: 8,
|
||||
mojom.STRING: 8,
|
||||
mojom.NULLABLE_STRING: 8
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def GetSizeForKind(cls, kind):
|
||||
if isinstance(kind, (mojom.Array, mojom.Map, mojom.Struct, mojom.Interface,
|
||||
mojom.AssociatedInterface, mojom.PendingRemote,
|
||||
mojom.PendingAssociatedRemote)):
|
||||
return 8
|
||||
if isinstance(kind, mojom.Union):
|
||||
return 16
|
||||
if isinstance(kind, (mojom.InterfaceRequest, mojom.PendingReceiver)):
|
||||
kind = mojom.MSGPIPE
|
||||
if isinstance(
|
||||
kind,
|
||||
(mojom.AssociatedInterfaceRequest, mojom.PendingAssociatedReceiver)):
|
||||
return 4
|
||||
if isinstance(kind, mojom.Enum):
|
||||
# TODO(mpcomplete): what about big enums?
|
||||
return cls.kind_to_size[mojom.INT32]
|
||||
if not kind in cls.kind_to_size:
|
||||
raise Exception("Undefined type: %s. Did you forget to import the file "
|
||||
"containing the definition?" % kind.spec)
|
||||
return cls.kind_to_size[kind]
|
||||
|
||||
@classmethod
|
||||
def GetAlignmentForKind(cls, kind):
|
||||
if isinstance(kind, (mojom.Interface, mojom.AssociatedInterface,
|
||||
mojom.PendingRemote, mojom.PendingAssociatedRemote)):
|
||||
return 4
|
||||
if isinstance(kind, mojom.Union):
|
||||
return 8
|
||||
return cls.GetSizeForKind(kind)
|
||||
|
||||
def __init__(self,
|
||||
field,
|
||||
index,
|
||||
ordinal,
|
||||
original_field=None,
|
||||
sub_ordinal=None,
|
||||
linked_value_packed_field=None):
|
||||
"""
|
||||
Args:
|
||||
field: the original field.
|
||||
index: the position of the original field in the struct.
|
||||
ordinal: the ordinal of the field for serialization.
|
||||
original_field: See below.
|
||||
sub_ordinal: See below.
|
||||
linked_value_packed_field: See below.
|
||||
|
||||
original_field, sub_ordinal, and linked_value_packed_field are used to
|
||||
support nullable ValueKind fields. For legacy reasons, nullable ValueKind
|
||||
fields actually generate two PackedFields. This allows:
|
||||
|
||||
- backwards compatibility prior to Mojo support for nullable ValueKinds.
|
||||
- correct packing of fields for the aforementioned backwards compatibility.
|
||||
|
||||
When translating Fields to PackedFields, the original field is turned into
|
||||
two PackedFields: the first PackedField always has type mojom.BOOL, while
|
||||
the second PackedField has the non-nullable version of the field's kind.
|
||||
|
||||
When constructing these PackedFields, original_field references the field
|
||||
as defined in the mojom; the name as defined in the mojom will be used for
|
||||
all layers above the wire/data layer.
|
||||
|
||||
sub_ordinal is used to sort the two PackedFields correctly with respect to
|
||||
each other: the first mojom.BOOL field always has sub_ordinal 0, while the
|
||||
second field always has sub_ordinal 1.
|
||||
|
||||
Finally, linked_value_packed_field is used by the serialization and
|
||||
deserialization helpers, which generally just iterate over a PackedStruct's
|
||||
PackedField's in ordinal order. This allows the helpers to easily reference
|
||||
any related PackedFields rather than having to lookup related PackedFields
|
||||
by index while iterating.
|
||||
"""
|
||||
self.field = field
|
||||
self.index = index
|
||||
self.ordinal = ordinal
|
||||
self.original_field = original_field
|
||||
self.sub_ordinal = sub_ordinal
|
||||
self.linked_value_packed_field = linked_value_packed_field
|
||||
self.size = self.GetSizeForKind(self.field.kind)
|
||||
self.alignment = self.GetAlignmentForKind(self.field.kind)
|
||||
self.offset = None
|
||||
self.bit = None
|
||||
self.min_version = None
|
||||
|
||||
|
||||
def GetPad(offset, alignment):
|
||||
"""Returns the pad necessary to reserve space so that |offset + pad| equals to
|
||||
some multiple of |alignment|."""
|
||||
return (alignment - (offset % alignment)) % alignment
|
||||
|
||||
|
||||
def GetFieldOffset(field, last_field):
|
||||
"""Returns a 2-tuple of the field offset and bit (for BOOLs)."""
|
||||
if (field.field.kind == mojom.BOOL and last_field.field.kind == mojom.BOOL
|
||||
and last_field.bit < 7):
|
||||
return (last_field.offset, last_field.bit + 1)
|
||||
|
||||
offset = last_field.offset + last_field.size
|
||||
pad = GetPad(offset, field.alignment)
|
||||
return (offset + pad, 0)
|
||||
|
||||
|
||||
def GetPayloadSizeUpToField(field):
|
||||
"""Returns the payload size (not including struct header) if |field| is the
|
||||
last field.
|
||||
"""
|
||||
if not field:
|
||||
return 0
|
||||
offset = field.offset + field.size
|
||||
pad = GetPad(offset, 8)
|
||||
return offset + pad
|
||||
|
||||
|
||||
def IsNullableValueKindPackedField(field):
|
||||
"""Returns true if `field` is derived from a nullable ValueKind field.
|
||||
|
||||
Nullable ValueKind fields often require special handling in the bindings due
|
||||
to the way the implementation is constrained for wire compatibility.
|
||||
"""
|
||||
assert isinstance(field, PackedField)
|
||||
return field.sub_ordinal is not None
|
||||
|
||||
|
||||
def IsPrimaryNullableValueKindPackedField(field):
|
||||
"""Returns true if `field` is derived from a nullable ValueKind mojom field
|
||||
and is the "primary" field.
|
||||
|
||||
The primary field is a bool PackedField that controls if the field should be
|
||||
considered as present or not; it will have a reference to the PackedField that
|
||||
holds the actual value representation if considered present.
|
||||
|
||||
Bindings code that translates between the wire protocol and the higher layers
|
||||
can use this to simplify mapping multiple PackedFields to the single field
|
||||
that is logically exposed to bindings consumers.
|
||||
"""
|
||||
assert isinstance(field, PackedField)
|
||||
return field.linked_value_packed_field is not None
|
||||
|
||||
|
||||
class PackedStruct:
|
||||
def __init__(self, struct):
|
||||
self.struct = struct
|
||||
# |packed_fields| contains all the fields, in increasing offset order.
|
||||
self.packed_fields = []
|
||||
# |packed_fields_in_ordinal_order| refers to the same fields as
|
||||
# |packed_fields|, but in ordinal order.
|
||||
self.packed_fields_in_ordinal_order = []
|
||||
|
||||
# No fields.
|
||||
if (len(struct.fields) == 0):
|
||||
return
|
||||
|
||||
# Start by sorting by ordinal.
|
||||
src_fields = self.packed_fields_in_ordinal_order
|
||||
ordinal = 0
|
||||
for index, field in enumerate(struct.fields):
|
||||
if field.ordinal is not None:
|
||||
ordinal = field.ordinal
|
||||
# Nullable value types are a bit weird: they generate two PackedFields
|
||||
# despite being a single ValueKind. This is for wire compatibility to
|
||||
# ease the transition from legacy mojom syntax where nullable value types
|
||||
# were not supported.
|
||||
if isinstance(field.kind, mojom.ValueKind) and field.kind.is_nullable:
|
||||
# The suffixes intentionally use Unicode codepoints which are considered
|
||||
# valid C++/Java/JavaScript identifiers, yet are unlikely to be used in
|
||||
# actual user code.
|
||||
has_value_field = copy.copy(field)
|
||||
has_value_field.name = f'{field.mojom_name}_$flag'
|
||||
has_value_field.kind = mojom.BOOL
|
||||
|
||||
value_field = copy.copy(field)
|
||||
value_field.name = f'{field.mojom_name}_$value'
|
||||
value_field.kind = field.kind.MakeUnnullableKind()
|
||||
|
||||
value_packed_field = PackedField(value_field,
|
||||
index,
|
||||
ordinal,
|
||||
original_field=field,
|
||||
sub_ordinal=1,
|
||||
linked_value_packed_field=None)
|
||||
has_value_packed_field = PackedField(
|
||||
has_value_field,
|
||||
index,
|
||||
ordinal,
|
||||
original_field=field,
|
||||
sub_ordinal=0,
|
||||
linked_value_packed_field=value_packed_field)
|
||||
src_fields.append(has_value_packed_field)
|
||||
src_fields.append(value_packed_field)
|
||||
else:
|
||||
src_fields.append(PackedField(field, index, ordinal))
|
||||
ordinal += 1
|
||||
src_fields.sort(key=lambda field: (field.ordinal, field.sub_ordinal))
|
||||
|
||||
# Set |min_version| for each field.
|
||||
next_min_version = 0
|
||||
for packed_field in src_fields:
|
||||
if packed_field.field.min_version is None:
|
||||
assert next_min_version == 0
|
||||
else:
|
||||
assert packed_field.field.min_version >= next_min_version
|
||||
next_min_version = packed_field.field.min_version
|
||||
packed_field.min_version = next_min_version
|
||||
|
||||
if (packed_field.min_version != 0
|
||||
and mojom.IsReferenceKind(packed_field.field.kind)
|
||||
and not packed_field.field.kind.is_nullable):
|
||||
raise Exception(
|
||||
"Non-nullable reference fields are only allowed in version 0 of a "
|
||||
"struct. %s.%s is defined with [MinVersion=%d]." %
|
||||
(self.struct.name, packed_field.field.name,
|
||||
packed_field.min_version))
|
||||
|
||||
src_field = src_fields[0]
|
||||
src_field.offset = 0
|
||||
src_field.bit = 0
|
||||
dst_fields = self.packed_fields
|
||||
dst_fields.append(src_field)
|
||||
|
||||
# Then find first slot that each field will fit.
|
||||
for src_field in src_fields[1:]:
|
||||
last_field = dst_fields[0]
|
||||
for i in range(1, len(dst_fields)):
|
||||
next_field = dst_fields[i]
|
||||
offset, bit = GetFieldOffset(src_field, last_field)
|
||||
if offset + src_field.size <= next_field.offset:
|
||||
# Found hole.
|
||||
src_field.offset = offset
|
||||
src_field.bit = bit
|
||||
dst_fields.insert(i, src_field)
|
||||
break
|
||||
last_field = next_field
|
||||
if src_field.offset is None:
|
||||
# Add to end
|
||||
src_field.offset, src_field.bit = GetFieldOffset(src_field, last_field)
|
||||
dst_fields.append(src_field)
|
||||
|
||||
|
||||
class ByteInfo:
|
||||
def __init__(self):
|
||||
self.is_padding = False
|
||||
self.packed_fields = []
|
||||
|
||||
|
||||
def GetByteLayout(packed_struct):
|
||||
total_payload_size = GetPayloadSizeUpToField(
|
||||
packed_struct.packed_fields[-1] if packed_struct.packed_fields else None)
|
||||
byte_info = [ByteInfo() for i in range(total_payload_size)]
|
||||
|
||||
limit_of_previous_field = 0
|
||||
for packed_field in packed_struct.packed_fields:
|
||||
for i in range(limit_of_previous_field, packed_field.offset):
|
||||
byte_info[i].is_padding = True
|
||||
byte_info[packed_field.offset].packed_fields.append(packed_field)
|
||||
limit_of_previous_field = packed_field.offset + packed_field.size
|
||||
|
||||
for i in range(limit_of_previous_field, len(byte_info)):
|
||||
byte_info[i].is_padding = True
|
||||
|
||||
for byte in byte_info:
|
||||
# A given byte cannot both be padding and have a fields packed into it.
|
||||
assert not (byte.is_padding and byte.packed_fields)
|
||||
|
||||
return byte_info
|
||||
|
||||
|
||||
class VersionInfo:
|
||||
def __init__(self, version, num_fields, num_packed_fields, num_bytes):
|
||||
self.version = version
|
||||
self.num_fields = num_fields
|
||||
self.num_packed_fields = num_packed_fields
|
||||
self.num_bytes = num_bytes
|
||||
|
||||
|
||||
def GetVersionInfo(packed_struct):
|
||||
"""Get version information for a struct.
|
||||
|
||||
Args:
|
||||
packed_struct: A PackedStruct instance.
|
||||
|
||||
Returns:
|
||||
A non-empty list of VersionInfo instances, sorted by version in increasing
|
||||
order.
|
||||
Note: The version numbers may not be consecutive.
|
||||
"""
|
||||
versions = []
|
||||
last_version = 0
|
||||
last_num_fields = 0
|
||||
last_num_packed_fields = 0
|
||||
last_payload_size = 0
|
||||
|
||||
for packed_field in packed_struct.packed_fields_in_ordinal_order:
|
||||
if packed_field.min_version != last_version:
|
||||
versions.append(
|
||||
VersionInfo(last_version, last_num_fields, last_num_packed_fields,
|
||||
last_payload_size + HEADER_SIZE))
|
||||
last_version = packed_field.min_version
|
||||
|
||||
# Nullable numeric fields (e.g. `int32?`) expand to two packed fields, so to
|
||||
# avoid double-counting, only increment if the field is:
|
||||
# - not used for representing a nullable value kind field, or
|
||||
# - the primary field representing the nullable value kind field.
|
||||
last_num_fields += 1 if (
|
||||
not IsNullableValueKindPackedField(packed_field)
|
||||
or IsPrimaryNullableValueKindPackedField(packed_field)) else 0
|
||||
|
||||
last_num_packed_fields += 1
|
||||
|
||||
# The fields are iterated in ordinal order here. However, the size of a
|
||||
# version is determined by the last field of that version in pack order,
|
||||
# instead of ordinal order. Therefore, we need to calculate the max value.
|
||||
last_payload_size = max(GetPayloadSizeUpToField(packed_field),
|
||||
last_payload_size)
|
||||
|
||||
assert len(
|
||||
versions) == 0 or last_num_packed_fields != versions[-1].num_packed_fields
|
||||
versions.append(
|
||||
VersionInfo(last_version, last_num_fields, last_num_packed_fields,
|
||||
last_payload_size + HEADER_SIZE))
|
||||
return versions
|
||||
@@ -0,0 +1,253 @@
|
||||
# Copyright 2013 The Chromium Authors
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
import sys
|
||||
import unittest
|
||||
|
||||
from mojom.generate import module as mojom
|
||||
from mojom.generate import pack
|
||||
|
||||
|
||||
class PackTest(unittest.TestCase):
|
||||
def testOrdinalOrder(self):
|
||||
struct = mojom.Struct('test')
|
||||
struct.AddField('testfield1', mojom.INT32, 2)
|
||||
struct.AddField('testfield2', mojom.INT32, 1)
|
||||
ps = pack.PackedStruct(struct)
|
||||
|
||||
self.assertEqual(2, len(ps.packed_fields))
|
||||
self.assertEqual('testfield2', ps.packed_fields[0].field.mojom_name)
|
||||
self.assertEqual('testfield1', ps.packed_fields[1].field.mojom_name)
|
||||
|
||||
def testZeroFields(self):
|
||||
struct = mojom.Struct('test')
|
||||
ps = pack.PackedStruct(struct)
|
||||
self.assertEqual(0, len(ps.packed_fields))
|
||||
|
||||
def testOneField(self):
|
||||
struct = mojom.Struct('test')
|
||||
struct.AddField('testfield1', mojom.INT8)
|
||||
ps = pack.PackedStruct(struct)
|
||||
self.assertEqual(1, len(ps.packed_fields))
|
||||
|
||||
def _CheckPackSequence(self, kinds, fields, offsets):
|
||||
"""Checks the pack order and offsets of a sequence of mojom.Kinds.
|
||||
|
||||
Args:
|
||||
kinds: A sequence of mojom.Kinds that specify the fields that are to be
|
||||
created.
|
||||
fields: The expected order of the resulting fields, with the integer "1"
|
||||
first.
|
||||
offsets: The expected order of offsets, with the integer "0" first.
|
||||
"""
|
||||
struct = mojom.Struct('test')
|
||||
index = 1
|
||||
for kind in kinds:
|
||||
struct.AddField('%d' % index, kind)
|
||||
index += 1
|
||||
ps = pack.PackedStruct(struct)
|
||||
num_fields = len(ps.packed_fields)
|
||||
self.assertEqual(len(kinds), num_fields)
|
||||
for i in range(num_fields):
|
||||
self.assertEqual('%d' % fields[i], ps.packed_fields[i].field.mojom_name)
|
||||
self.assertEqual(offsets[i], ps.packed_fields[i].offset)
|
||||
|
||||
def testPaddingPackedInOrder(self):
|
||||
return self._CheckPackSequence((mojom.INT8, mojom.UINT8, mojom.INT32),
|
||||
(1, 2, 3), (0, 1, 4))
|
||||
|
||||
def testPaddingPackedOutOfOrder(self):
|
||||
return self._CheckPackSequence((mojom.INT8, mojom.INT32, mojom.UINT8),
|
||||
(1, 3, 2), (0, 1, 4))
|
||||
|
||||
def testPaddingPackedOverflow(self):
|
||||
kinds = (mojom.INT8, mojom.INT32, mojom.INT16, mojom.INT8, mojom.INT8)
|
||||
# 2 bytes should be packed together first, followed by short, then by int.
|
||||
fields = (1, 4, 3, 2, 5)
|
||||
offsets = (0, 1, 2, 4, 8)
|
||||
return self._CheckPackSequence(kinds, fields, offsets)
|
||||
|
||||
def testNullableTypes(self):
|
||||
kinds = (mojom.STRING.MakeNullableKind(), mojom.HANDLE.MakeNullableKind(),
|
||||
mojom.Struct('test_struct').MakeNullableKind(),
|
||||
mojom.DCPIPE.MakeNullableKind(), mojom.Array().MakeNullableKind(),
|
||||
mojom.DPPIPE.MakeNullableKind(),
|
||||
mojom.Array(length=5).MakeNullableKind(),
|
||||
mojom.MSGPIPE.MakeNullableKind(),
|
||||
mojom.Interface('test_interface').MakeNullableKind(),
|
||||
mojom.SHAREDBUFFER.MakeNullableKind(),
|
||||
mojom.InterfaceRequest().MakeNullableKind())
|
||||
fields = (1, 2, 4, 3, 5, 6, 8, 7, 9, 10, 11)
|
||||
offsets = (0, 8, 12, 16, 24, 32, 36, 40, 48, 56, 60)
|
||||
return self._CheckPackSequence(kinds, fields, offsets)
|
||||
|
||||
def testAllTypes(self):
|
||||
return self._CheckPackSequence(
|
||||
(mojom.BOOL, mojom.INT8, mojom.STRING, mojom.UINT8, mojom.INT16,
|
||||
mojom.DOUBLE, mojom.UINT16, mojom.INT32, mojom.UINT32, mojom.INT64,
|
||||
mojom.FLOAT, mojom.STRING, mojom.HANDLE, mojom.UINT64,
|
||||
mojom.Struct('test'), mojom.Array(), mojom.STRING.MakeNullableKind()),
|
||||
(1, 2, 4, 5, 7, 3, 6, 8, 9, 10, 11, 13, 12, 14, 15, 16, 17, 18),
|
||||
(0, 1, 2, 4, 6, 8, 16, 24, 28, 32, 40, 44, 48, 56, 64, 72, 80, 88))
|
||||
|
||||
def testPaddingPackedOutOfOrderByOrdinal(self):
|
||||
struct = mojom.Struct('test')
|
||||
struct.AddField('testfield1', mojom.INT8)
|
||||
struct.AddField('testfield3', mojom.UINT8, 3)
|
||||
struct.AddField('testfield2', mojom.INT32, 2)
|
||||
ps = pack.PackedStruct(struct)
|
||||
self.assertEqual(3, len(ps.packed_fields))
|
||||
|
||||
# Second byte should be packed in behind first, altering order.
|
||||
self.assertEqual('testfield1', ps.packed_fields[0].field.mojom_name)
|
||||
self.assertEqual('testfield3', ps.packed_fields[1].field.mojom_name)
|
||||
self.assertEqual('testfield2', ps.packed_fields[2].field.mojom_name)
|
||||
|
||||
# Second byte should be packed with first.
|
||||
self.assertEqual(0, ps.packed_fields[0].offset)
|
||||
self.assertEqual(1, ps.packed_fields[1].offset)
|
||||
self.assertEqual(4, ps.packed_fields[2].offset)
|
||||
|
||||
def testBools(self):
|
||||
struct = mojom.Struct('test')
|
||||
struct.AddField('bit0', mojom.BOOL)
|
||||
struct.AddField('bit1', mojom.BOOL)
|
||||
struct.AddField('int', mojom.INT32)
|
||||
struct.AddField('bit2', mojom.BOOL)
|
||||
struct.AddField('bit3', mojom.BOOL)
|
||||
struct.AddField('bit4', mojom.BOOL)
|
||||
struct.AddField('bit5', mojom.BOOL)
|
||||
struct.AddField('bit6', mojom.BOOL)
|
||||
struct.AddField('bit7', mojom.BOOL)
|
||||
struct.AddField('bit8', mojom.BOOL)
|
||||
ps = pack.PackedStruct(struct)
|
||||
self.assertEqual(10, len(ps.packed_fields))
|
||||
|
||||
# First 8 bits packed together.
|
||||
for i in range(8):
|
||||
pf = ps.packed_fields[i]
|
||||
self.assertEqual(0, pf.offset)
|
||||
self.assertEqual("bit%d" % i, pf.field.mojom_name)
|
||||
self.assertEqual(i, pf.bit)
|
||||
|
||||
# Ninth bit goes into second byte.
|
||||
self.assertEqual("bit8", ps.packed_fields[8].field.mojom_name)
|
||||
self.assertEqual(1, ps.packed_fields[8].offset)
|
||||
self.assertEqual(0, ps.packed_fields[8].bit)
|
||||
|
||||
# int comes last.
|
||||
self.assertEqual("int", ps.packed_fields[9].field.mojom_name)
|
||||
self.assertEqual(4, ps.packed_fields[9].offset)
|
||||
|
||||
def testMinVersion(self):
|
||||
"""Tests that |min_version| is properly set for packed fields."""
|
||||
struct = mojom.Struct('test')
|
||||
struct.AddField('field_2', mojom.BOOL, 2)
|
||||
struct.AddField('field_0', mojom.INT32, 0)
|
||||
struct.AddField('field_1', mojom.INT64, 1)
|
||||
ps = pack.PackedStruct(struct)
|
||||
|
||||
self.assertEqual('field_0', ps.packed_fields[0].field.mojom_name)
|
||||
self.assertEqual('field_2', ps.packed_fields[1].field.mojom_name)
|
||||
self.assertEqual('field_1', ps.packed_fields[2].field.mojom_name)
|
||||
|
||||
self.assertEqual(0, ps.packed_fields[0].min_version)
|
||||
self.assertEqual(0, ps.packed_fields[1].min_version)
|
||||
self.assertEqual(0, ps.packed_fields[2].min_version)
|
||||
|
||||
struct.fields[0].attributes = {'MinVersion': 1}
|
||||
ps = pack.PackedStruct(struct)
|
||||
|
||||
self.assertEqual(0, ps.packed_fields[0].min_version)
|
||||
self.assertEqual(1, ps.packed_fields[1].min_version)
|
||||
self.assertEqual(0, ps.packed_fields[2].min_version)
|
||||
|
||||
def testGetVersionInfoEmptyStruct(self):
|
||||
"""Tests that pack.GetVersionInfo() never returns an empty list, even for
|
||||
empty structs.
|
||||
"""
|
||||
struct = mojom.Struct('test')
|
||||
ps = pack.PackedStruct(struct)
|
||||
|
||||
versions = pack.GetVersionInfo(ps)
|
||||
self.assertEqual(1, len(versions))
|
||||
self.assertEqual(0, versions[0].version)
|
||||
self.assertEqual(0, versions[0].num_fields)
|
||||
self.assertEqual(8, versions[0].num_bytes)
|
||||
|
||||
def testGetVersionInfoComplexOrder(self):
|
||||
"""Tests pack.GetVersionInfo() using a struct whose definition order,
|
||||
ordinal order and pack order for fields are all different.
|
||||
"""
|
||||
struct = mojom.Struct('test')
|
||||
struct.AddField(
|
||||
'field_3', mojom.BOOL, ordinal=3, attributes={'MinVersion': 3})
|
||||
struct.AddField('field_0', mojom.INT32, ordinal=0)
|
||||
struct.AddField(
|
||||
'field_1', mojom.INT64, ordinal=1, attributes={'MinVersion': 2})
|
||||
struct.AddField(
|
||||
'field_2', mojom.INT64, ordinal=2, attributes={'MinVersion': 3})
|
||||
ps = pack.PackedStruct(struct)
|
||||
|
||||
versions = pack.GetVersionInfo(ps)
|
||||
self.assertEqual(3, len(versions))
|
||||
|
||||
self.assertEqual(0, versions[0].version)
|
||||
self.assertEqual(1, versions[0].num_fields)
|
||||
self.assertEqual(16, versions[0].num_bytes)
|
||||
|
||||
self.assertEqual(2, versions[1].version)
|
||||
self.assertEqual(2, versions[1].num_fields)
|
||||
self.assertEqual(24, versions[1].num_bytes)
|
||||
|
||||
self.assertEqual(3, versions[2].version)
|
||||
self.assertEqual(4, versions[2].num_fields)
|
||||
self.assertEqual(32, versions[2].num_bytes)
|
||||
|
||||
def testGetVersionInfoPackedStruct(self):
|
||||
"""Tests that pack.GetVersionInfo() correctly sets version, num_fields,
|
||||
and num_packed_fields for a packed struct.
|
||||
"""
|
||||
struct = mojom.Struct('test')
|
||||
struct.AddField('field_0', mojom.BOOL, ordinal=0)
|
||||
struct.AddField('field_1',
|
||||
mojom.NULLABLE_BOOL,
|
||||
ordinal=1,
|
||||
attributes={'MinVersion': 1})
|
||||
struct.AddField('field_2',
|
||||
mojom.NULLABLE_BOOL,
|
||||
ordinal=2,
|
||||
attributes={'MinVersion': 2})
|
||||
ps = pack.PackedStruct(struct)
|
||||
versions = pack.GetVersionInfo(ps)
|
||||
|
||||
self.assertEqual(3, len(versions))
|
||||
self.assertEqual(0, versions[0].version)
|
||||
self.assertEqual(1, versions[1].version)
|
||||
self.assertEqual(2, versions[2].version)
|
||||
self.assertEqual(1, versions[0].num_fields)
|
||||
self.assertEqual(2, versions[1].num_fields)
|
||||
self.assertEqual(3, versions[2].num_fields)
|
||||
self.assertEqual(1, versions[0].num_packed_fields)
|
||||
self.assertEqual(3, versions[1].num_packed_fields)
|
||||
self.assertEqual(5, versions[2].num_packed_fields)
|
||||
|
||||
def testInterfaceAlignment(self):
|
||||
"""Tests that interfaces are aligned on 4-byte boundaries, although the size
|
||||
of an interface is 8 bytes.
|
||||
"""
|
||||
kinds = (mojom.INT32, mojom.Interface('test_interface'))
|
||||
fields = (1, 2)
|
||||
offsets = (0, 4)
|
||||
self._CheckPackSequence(kinds, fields, offsets)
|
||||
|
||||
def testAssociatedInterfaceAlignment(self):
|
||||
"""Tests that associated interfaces are aligned on 4-byte boundaries,
|
||||
although the size of an associated interface is 8 bytes.
|
||||
"""
|
||||
kinds = (mojom.INT32,
|
||||
mojom.AssociatedInterface(mojom.Interface('test_interface')))
|
||||
fields = (1, 2)
|
||||
offsets = (0, 4)
|
||||
self._CheckPackSequence(kinds, fields, offsets)
|
||||
@@ -0,0 +1,82 @@
|
||||
# Copyright 2013 The Chromium Authors
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
# Based on third_party/WebKit/Source/build/scripts/template_expander.py.
|
||||
|
||||
import os.path
|
||||
import sys
|
||||
|
||||
from mojom import fileutil
|
||||
|
||||
fileutil.AddLocalRepoThirdPartyDirToModulePath()
|
||||
import jinja2
|
||||
|
||||
|
||||
def ApplyTemplate(mojo_generator, path_to_template, params, **kwargs):
|
||||
loader = jinja2.ModuleLoader(
|
||||
os.path.join(mojo_generator.bytecode_path,
|
||||
"%s.zip" % mojo_generator.GetTemplatePrefix()))
|
||||
final_kwargs = dict(mojo_generator.GetJinjaParameters())
|
||||
final_kwargs.update(kwargs)
|
||||
|
||||
jinja_env = jinja2.Environment(
|
||||
loader=loader, keep_trailing_newline=True, **final_kwargs)
|
||||
jinja_env.globals.update(mojo_generator.GetGlobals())
|
||||
jinja_env.filters.update(mojo_generator.GetFilters())
|
||||
template = jinja_env.get_template(path_to_template)
|
||||
return template.render(params)
|
||||
|
||||
|
||||
def UseJinja(path_to_template, **kwargs):
|
||||
def RealDecorator(generator):
|
||||
def GeneratorInternal(*args, **kwargs2):
|
||||
parameters = generator(*args, **kwargs2)
|
||||
return ApplyTemplate(args[0], path_to_template, parameters, **kwargs)
|
||||
|
||||
GeneratorInternal.__name__ = generator.__name__
|
||||
return GeneratorInternal
|
||||
|
||||
return RealDecorator
|
||||
|
||||
|
||||
def ApplyImportedTemplate(mojo_generator, path_to_template, filename, params,
|
||||
**kwargs):
|
||||
loader = jinja2.FileSystemLoader(searchpath=path_to_template)
|
||||
final_kwargs = dict(mojo_generator.GetJinjaParameters())
|
||||
final_kwargs.update(kwargs)
|
||||
|
||||
jinja_env = jinja2.Environment(
|
||||
loader=loader, keep_trailing_newline=True, **final_kwargs)
|
||||
jinja_env.globals.update(mojo_generator.GetGlobals())
|
||||
jinja_env.filters.update(mojo_generator.GetFilters())
|
||||
template = jinja_env.get_template(filename)
|
||||
return template.render(params)
|
||||
|
||||
|
||||
def UseJinjaForImportedTemplate(func):
|
||||
def wrapper(*args, **kwargs):
|
||||
parameters = func(*args, **kwargs)
|
||||
path_to_template = args[1]
|
||||
filename = args[2]
|
||||
return ApplyImportedTemplate(args[0], path_to_template, filename,
|
||||
parameters)
|
||||
|
||||
wrapper.__name__ = func.__name__
|
||||
return wrapper
|
||||
|
||||
|
||||
def PrecompileTemplates(generator_modules, output_dir):
|
||||
for module in generator_modules.values():
|
||||
generator = module.Generator(None)
|
||||
jinja_env = jinja2.Environment(
|
||||
loader=jinja2.FileSystemLoader([
|
||||
os.path.join(
|
||||
os.path.dirname(module.__file__), generator.GetTemplatePrefix())
|
||||
]))
|
||||
jinja_env.filters.update(generator.GetFilters())
|
||||
jinja_env.compile_templates(os.path.join(
|
||||
output_dir, "%s.zip" % generator.GetTemplatePrefix()),
|
||||
extensions=["tmpl"],
|
||||
zip="stored",
|
||||
ignore_errors=False)
|
||||
File diff suppressed because it is too large
Load Diff
@@ -0,0 +1,141 @@
|
||||
# Copyright 2014 The Chromium Authors
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
import unittest
|
||||
|
||||
from mojom.generate import module as mojom
|
||||
from mojom.generate import translate
|
||||
from mojom.parse import ast
|
||||
|
||||
class TranslateTest(unittest.TestCase):
|
||||
"""Tests |parser.Parse()|."""
|
||||
|
||||
def testSimpleArray(self):
|
||||
"""Tests a simple int32[]."""
|
||||
# pylint: disable=W0212
|
||||
self.assertEquals(translate._MapKind("int32[]"), "a:i32")
|
||||
|
||||
def testAssociativeArray(self):
|
||||
"""Tests a simple uint8{string}."""
|
||||
# pylint: disable=W0212
|
||||
self.assertEquals(translate._MapKind("uint8{string}"), "m[s][u8]")
|
||||
|
||||
def testLeftToRightAssociativeArray(self):
|
||||
"""Makes sure that parsing is done from right to left on the internal kinds
|
||||
in the presence of an associative array."""
|
||||
# pylint: disable=W0212
|
||||
self.assertEquals(translate._MapKind("uint8[]{string}"), "m[s][a:u8]")
|
||||
|
||||
def testTranslateSimpleUnions(self):
|
||||
"""Makes sure that a simple union is translated correctly."""
|
||||
tree = ast.Mojom(None, ast.ImportList(), [
|
||||
ast.Union(
|
||||
"SomeUnion", None,
|
||||
ast.UnionBody([
|
||||
ast.UnionField("a", None, None, "int32"),
|
||||
ast.UnionField("b", None, None, "string")
|
||||
]))
|
||||
])
|
||||
|
||||
translation = translate.OrderedModule(tree, "mojom_tree", [])
|
||||
self.assertEqual(1, len(translation.unions))
|
||||
|
||||
union = translation.unions[0]
|
||||
self.assertTrue(isinstance(union, mojom.Union))
|
||||
self.assertEqual("SomeUnion", union.mojom_name)
|
||||
self.assertEqual(2, len(union.fields))
|
||||
self.assertEqual("a", union.fields[0].mojom_name)
|
||||
self.assertEqual(mojom.INT32.spec, union.fields[0].kind.spec)
|
||||
self.assertEqual("b", union.fields[1].mojom_name)
|
||||
self.assertEqual(mojom.STRING.spec, union.fields[1].kind.spec)
|
||||
|
||||
def testMapKindRaisesWithDuplicate(self):
|
||||
"""Verifies _MapTreeForType() raises when passed two values with the same
|
||||
name."""
|
||||
methods = [
|
||||
ast.Method('dup', None, None, ast.ParameterList(), None),
|
||||
ast.Method('dup', None, None, ast.ParameterList(), None)
|
||||
]
|
||||
with self.assertRaises(Exception):
|
||||
translate._ElemsOfType(methods, ast.Method, 'scope')
|
||||
|
||||
def testAssociatedKinds(self):
|
||||
"""Tests type spec translation of associated interfaces and requests."""
|
||||
# pylint: disable=W0212
|
||||
self.assertEquals(
|
||||
translate._MapKind("asso<SomeInterface>?"), "?asso:x:SomeInterface")
|
||||
self.assertEquals(translate._MapKind("rca<SomeInterface>?"),
|
||||
"?rca:x:SomeInterface")
|
||||
|
||||
def testSelfRecursiveUnions(self):
|
||||
"""Verifies _UnionField() raises when a union is self-recursive."""
|
||||
tree = ast.Mojom(None, ast.ImportList(), [
|
||||
ast.Union("SomeUnion", None,
|
||||
ast.UnionBody([ast.UnionField("a", None, None, "SomeUnion")]))
|
||||
])
|
||||
with self.assertRaises(Exception):
|
||||
translate.OrderedModule(tree, "mojom_tree", [])
|
||||
|
||||
tree = ast.Mojom(None, ast.ImportList(), [
|
||||
ast.Union(
|
||||
"SomeUnion", None,
|
||||
ast.UnionBody([ast.UnionField("a", None, None, "SomeUnion?")]))
|
||||
])
|
||||
with self.assertRaises(Exception):
|
||||
translate.OrderedModule(tree, "mojom_tree", [])
|
||||
|
||||
def testDuplicateAttributesException(self):
|
||||
tree = ast.Mojom(None, ast.ImportList(), [
|
||||
ast.Union(
|
||||
"FakeUnion",
|
||||
ast.AttributeList([
|
||||
ast.Attribute("key1", "value"),
|
||||
ast.Attribute("key1", "value")
|
||||
]),
|
||||
ast.UnionBody([
|
||||
ast.UnionField("a", None, None, "int32"),
|
||||
ast.UnionField("b", None, None, "string")
|
||||
]))
|
||||
])
|
||||
with self.assertRaises(Exception):
|
||||
translate.OrderedModule(tree, "mojom_tree", [])
|
||||
|
||||
def testEnumWithReservedValues(self):
|
||||
"""Verifies that assigning reserved values to enumerators fails."""
|
||||
# -128 is reserved for the empty representation in WTF::HashTraits.
|
||||
tree = ast.Mojom(None, ast.ImportList(), [
|
||||
ast.Enum(
|
||||
"MyEnum", None,
|
||||
ast.EnumValueList([
|
||||
ast.EnumValue('kReserved', None, '-128'),
|
||||
]))
|
||||
])
|
||||
with self.assertRaises(Exception) as context:
|
||||
translate.OrderedModule(tree, "mojom_tree", [])
|
||||
self.assertIn("reserved for WTF::HashTrait", str(context.exception))
|
||||
|
||||
# -127 is reserved for the deleted representation in WTF::HashTraits.
|
||||
tree = ast.Mojom(None, ast.ImportList(), [
|
||||
ast.Enum(
|
||||
"MyEnum", None,
|
||||
ast.EnumValueList([
|
||||
ast.EnumValue('kReserved', None, '-127'),
|
||||
]))
|
||||
])
|
||||
with self.assertRaises(Exception) as context:
|
||||
translate.OrderedModule(tree, "mojom_tree", [])
|
||||
self.assertIn("reserved for WTF::HashTrait", str(context.exception))
|
||||
|
||||
# Implicitly assigning a reserved value should also fail.
|
||||
tree = ast.Mojom(None, ast.ImportList(), [
|
||||
ast.Enum(
|
||||
"MyEnum", None,
|
||||
ast.EnumValueList([
|
||||
ast.EnumValue('kNotReserved', None, '-129'),
|
||||
ast.EnumValue('kImplicitlyReserved', None, None),
|
||||
]))
|
||||
])
|
||||
with self.assertRaises(Exception) as context:
|
||||
translate.OrderedModule(tree, "mojom_tree", [])
|
||||
self.assertIn("reserved for WTF::HashTrait", str(context.exception))
|
||||
@@ -0,0 +1,462 @@
|
||||
# Copyright 2014 The Chromium Authors
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
"""Node classes for the AST for a Mojo IDL file."""
|
||||
|
||||
# Note: For convenience of testing, you probably want to define __eq__() methods
|
||||
# for all node types; it's okay to be slightly lax (e.g., not compare filename
|
||||
# and lineno). You may also define __repr__() to help with analyzing test
|
||||
# failures, especially for more complex types.
|
||||
|
||||
import os.path
|
||||
|
||||
|
||||
# Instance of 'NodeListBase' has no '_list_item_type' member (no-member)
|
||||
# pylint: disable=no-member
|
||||
|
||||
|
||||
class NodeBase:
|
||||
"""Base class for nodes in the AST."""
|
||||
|
||||
def __init__(self, filename=None, lineno=None):
|
||||
self.filename = filename
|
||||
self.lineno = lineno
|
||||
|
||||
def __eq__(self, other):
|
||||
# We want strict comparison of the two object's types. Disable pylint's
|
||||
# insistence upon recommending isinstance().
|
||||
# pylint: disable=unidiomatic-typecheck
|
||||
return type(self) == type(other)
|
||||
|
||||
# Make != the inverse of ==. (Subclasses shouldn't have to override this.)
|
||||
def __ne__(self, other):
|
||||
return not self == other
|
||||
|
||||
|
||||
# TODO(vtl): Some of this is complicated enough that it should be tested.
|
||||
class NodeListBase(NodeBase):
|
||||
"""Represents a list of other nodes, all having the same type. (This is meant
|
||||
to be subclassed, with subclasses defining _list_item_type to be the class (or
|
||||
classes, in a tuple) of the members of the list.)"""
|
||||
|
||||
def __init__(self, item_or_items=None, **kwargs):
|
||||
super().__init__(**kwargs)
|
||||
self.items = []
|
||||
if item_or_items is None:
|
||||
pass
|
||||
elif isinstance(item_or_items, list):
|
||||
for item in item_or_items:
|
||||
assert isinstance(item, self._list_item_type)
|
||||
self.Append(item)
|
||||
else:
|
||||
assert isinstance(item_or_items, self._list_item_type)
|
||||
self.Append(item_or_items)
|
||||
|
||||
# Support iteration. For everything else, users should just access |items|
|
||||
# directly. (We intentionally do NOT supply |__len__()| or |__nonzero__()|, so
|
||||
# |bool(NodeListBase())| is true.)
|
||||
def __iter__(self):
|
||||
return self.items.__iter__()
|
||||
|
||||
def __eq__(self, other):
|
||||
return super().__eq__(other) and \
|
||||
self.items == other.items
|
||||
|
||||
# Implement this so that on failure, we get slightly more sensible output.
|
||||
def __repr__(self):
|
||||
return self.__class__.__name__ + "([" + \
|
||||
", ".join([repr(elem) for elem in self.items]) + "])"
|
||||
|
||||
def Insert(self, item):
|
||||
"""Inserts item at the front of the list."""
|
||||
|
||||
assert isinstance(item, self._list_item_type)
|
||||
self.items.insert(0, item)
|
||||
self._UpdateFilenameAndLineno()
|
||||
|
||||
def Append(self, item):
|
||||
"""Appends item to the end of the list."""
|
||||
|
||||
assert isinstance(item, self._list_item_type)
|
||||
self.items.append(item)
|
||||
self._UpdateFilenameAndLineno()
|
||||
|
||||
def _UpdateFilenameAndLineno(self):
|
||||
if self.items:
|
||||
self.filename = self.items[0].filename
|
||||
self.lineno = self.items[0].lineno
|
||||
|
||||
|
||||
class Definition(NodeBase):
|
||||
"""Represents a definition of anything that has a global name (e.g., enums,
|
||||
enum values, consts, structs, struct fields, interfaces). (This does not
|
||||
include parameter definitions.) This class is meant to be subclassed."""
|
||||
|
||||
def __init__(self, mojom_name, **kwargs):
|
||||
assert isinstance(mojom_name, str)
|
||||
NodeBase.__init__(self, **kwargs)
|
||||
self.mojom_name = mojom_name
|
||||
|
||||
|
||||
################################################################################
|
||||
|
||||
|
||||
class Attribute(NodeBase):
|
||||
"""Represents an attribute."""
|
||||
|
||||
def __init__(self, key, value, **kwargs):
|
||||
assert isinstance(key, str)
|
||||
super().__init__(**kwargs)
|
||||
self.key = key
|
||||
self.value = value
|
||||
|
||||
def __eq__(self, other):
|
||||
return super().__eq__(other) and \
|
||||
self.key == other.key and \
|
||||
self.value == other.value
|
||||
|
||||
|
||||
class AttributeList(NodeListBase):
|
||||
"""Represents a list attributes."""
|
||||
|
||||
_list_item_type = Attribute
|
||||
|
||||
|
||||
class Const(Definition):
|
||||
"""Represents a const definition."""
|
||||
|
||||
def __init__(self, mojom_name, attribute_list, typename, value, **kwargs):
|
||||
assert attribute_list is None or isinstance(attribute_list, AttributeList)
|
||||
# The typename is currently passed through as a string.
|
||||
assert isinstance(typename, str)
|
||||
# The value is either a literal (currently passed through as a string) or a
|
||||
# "wrapped identifier".
|
||||
assert isinstance(value, (tuple, str))
|
||||
super().__init__(mojom_name, **kwargs)
|
||||
self.attribute_list = attribute_list
|
||||
self.typename = typename
|
||||
self.value = value
|
||||
|
||||
def __eq__(self, other):
|
||||
return super().__eq__(other) and \
|
||||
self.attribute_list == other.attribute_list and \
|
||||
self.typename == other.typename and \
|
||||
self.value == other.value
|
||||
|
||||
|
||||
class Enum(Definition):
|
||||
"""Represents an enum definition."""
|
||||
|
||||
def __init__(self, mojom_name, attribute_list, enum_value_list, **kwargs):
|
||||
assert attribute_list is None or isinstance(attribute_list, AttributeList)
|
||||
assert enum_value_list is None or isinstance(enum_value_list, EnumValueList)
|
||||
super().__init__(mojom_name, **kwargs)
|
||||
self.attribute_list = attribute_list
|
||||
self.enum_value_list = enum_value_list
|
||||
|
||||
def __eq__(self, other):
|
||||
return super().__eq__(other) and \
|
||||
self.attribute_list == other.attribute_list and \
|
||||
self.enum_value_list == other.enum_value_list
|
||||
|
||||
|
||||
class EnumValue(Definition):
|
||||
"""Represents a definition of an enum value."""
|
||||
|
||||
def __init__(self, mojom_name, attribute_list, value, **kwargs):
|
||||
# The optional value is either an int (which is current a string) or a
|
||||
# "wrapped identifier".
|
||||
assert attribute_list is None or isinstance(attribute_list, AttributeList)
|
||||
assert value is None or isinstance(value, (tuple, str))
|
||||
super().__init__(mojom_name, **kwargs)
|
||||
self.attribute_list = attribute_list
|
||||
self.value = value
|
||||
|
||||
def __eq__(self, other):
|
||||
return super().__eq__(other) and \
|
||||
self.attribute_list == other.attribute_list and \
|
||||
self.value == other.value
|
||||
|
||||
|
||||
class EnumValueList(NodeListBase):
|
||||
"""Represents a list of enum value definitions (i.e., the "body" of an enum
|
||||
definition)."""
|
||||
|
||||
_list_item_type = EnumValue
|
||||
|
||||
|
||||
class Feature(Definition):
|
||||
"""Represents a runtime feature definition."""
|
||||
def __init__(self, mojom_name, attribute_list, body, **kwargs):
|
||||
assert attribute_list is None or isinstance(attribute_list, AttributeList)
|
||||
assert isinstance(body, FeatureBody) or body is None
|
||||
super().__init__(mojom_name, **kwargs)
|
||||
self.attribute_list = attribute_list
|
||||
self.body = body
|
||||
|
||||
def __eq__(self, other):
|
||||
return super().__eq__(other) and \
|
||||
self.attribute_list == other.attribute_list and \
|
||||
self.body == other.body
|
||||
|
||||
def __repr__(self):
|
||||
return "Feature(mojom_name = %s, attribute_list = %s, body = %s)" % (
|
||||
self.mojom_name, self.attribute_list, self.body)
|
||||
|
||||
|
||||
# This needs to be declared after `FeatureConst` and `FeatureField`.
|
||||
class FeatureBody(NodeListBase):
|
||||
"""Represents the body of (i.e., list of definitions inside) a feature."""
|
||||
|
||||
# Features are compile time helpers so all fields are initializers/consts
|
||||
# for the underlying platform feature type.
|
||||
_list_item_type = (Const)
|
||||
|
||||
|
||||
class Import(NodeBase):
|
||||
"""Represents an import statement."""
|
||||
|
||||
def __init__(self, attribute_list, import_filename, **kwargs):
|
||||
assert attribute_list is None or isinstance(attribute_list, AttributeList)
|
||||
assert isinstance(import_filename, str)
|
||||
super().__init__(**kwargs)
|
||||
self.attribute_list = attribute_list
|
||||
# TODO(crbug.com/953884): Use pathlib once we're migrated fully to Python 3.
|
||||
self.import_filename = os.path.normpath(import_filename).replace('\\', '/')
|
||||
|
||||
def __eq__(self, other):
|
||||
return super().__eq__(other) and \
|
||||
self.attribute_list == other.attribute_list and \
|
||||
self.import_filename == other.import_filename
|
||||
|
||||
|
||||
class ImportList(NodeListBase):
|
||||
"""Represents a list (i.e., sequence) of import statements."""
|
||||
|
||||
_list_item_type = Import
|
||||
|
||||
|
||||
class Interface(Definition):
|
||||
"""Represents an interface definition."""
|
||||
|
||||
def __init__(self, mojom_name, attribute_list, body, **kwargs):
|
||||
assert attribute_list is None or isinstance(attribute_list, AttributeList)
|
||||
assert isinstance(body, InterfaceBody)
|
||||
super().__init__(mojom_name, **kwargs)
|
||||
self.attribute_list = attribute_list
|
||||
self.body = body
|
||||
|
||||
def __eq__(self, other):
|
||||
return super().__eq__(other) and \
|
||||
self.attribute_list == other.attribute_list and \
|
||||
self.body == other.body
|
||||
|
||||
|
||||
class Method(Definition):
|
||||
"""Represents a method definition."""
|
||||
|
||||
def __init__(self, mojom_name, attribute_list, ordinal, parameter_list,
|
||||
response_parameter_list, **kwargs):
|
||||
assert attribute_list is None or isinstance(attribute_list, AttributeList)
|
||||
assert ordinal is None or isinstance(ordinal, Ordinal)
|
||||
assert isinstance(parameter_list, ParameterList)
|
||||
assert response_parameter_list is None or \
|
||||
isinstance(response_parameter_list, ParameterList)
|
||||
super().__init__(mojom_name, **kwargs)
|
||||
self.attribute_list = attribute_list
|
||||
self.ordinal = ordinal
|
||||
self.parameter_list = parameter_list
|
||||
self.response_parameter_list = response_parameter_list
|
||||
|
||||
def __eq__(self, other):
|
||||
return super().__eq__(other) and \
|
||||
self.attribute_list == other.attribute_list and \
|
||||
self.ordinal == other.ordinal and \
|
||||
self.parameter_list == other.parameter_list and \
|
||||
self.response_parameter_list == other.response_parameter_list
|
||||
|
||||
|
||||
# This needs to be declared after |Method|.
|
||||
class InterfaceBody(NodeListBase):
|
||||
"""Represents the body of (i.e., list of definitions inside) an interface."""
|
||||
|
||||
_list_item_type = (Const, Enum, Method)
|
||||
|
||||
|
||||
class Module(NodeBase):
|
||||
"""Represents a module statement."""
|
||||
|
||||
def __init__(self, mojom_namespace, attribute_list, **kwargs):
|
||||
# |mojom_namespace| is either none or a "wrapped identifier".
|
||||
assert mojom_namespace is None or isinstance(mojom_namespace, tuple)
|
||||
assert attribute_list is None or isinstance(attribute_list, AttributeList)
|
||||
super().__init__(**kwargs)
|
||||
self.mojom_namespace = mojom_namespace
|
||||
self.attribute_list = attribute_list
|
||||
|
||||
def __eq__(self, other):
|
||||
return super().__eq__(other) and \
|
||||
self.mojom_namespace == other.mojom_namespace and \
|
||||
self.attribute_list == other.attribute_list
|
||||
|
||||
|
||||
class Mojom(NodeBase):
|
||||
"""Represents an entire .mojom file. (This is the root node.)"""
|
||||
|
||||
def __init__(self, module, import_list, definition_list, **kwargs):
|
||||
assert module is None or isinstance(module, Module)
|
||||
assert isinstance(import_list, ImportList)
|
||||
assert isinstance(definition_list, list)
|
||||
super().__init__(**kwargs)
|
||||
self.module = module
|
||||
self.import_list = import_list
|
||||
self.definition_list = definition_list
|
||||
|
||||
def __eq__(self, other):
|
||||
return super().__eq__(other) and \
|
||||
self.module == other.module and \
|
||||
self.import_list == other.import_list and \
|
||||
self.definition_list == other.definition_list
|
||||
|
||||
def __repr__(self):
|
||||
return "%s(%r, %r, %r)" % (self.__class__.__name__, self.module,
|
||||
self.import_list, self.definition_list)
|
||||
|
||||
|
||||
class Ordinal(NodeBase):
|
||||
"""Represents an ordinal value labeling, e.g., a struct field."""
|
||||
|
||||
def __init__(self, value, **kwargs):
|
||||
assert isinstance(value, int)
|
||||
super().__init__(**kwargs)
|
||||
self.value = value
|
||||
|
||||
def __eq__(self, other):
|
||||
return super().__eq__(other) and \
|
||||
self.value == other.value
|
||||
|
||||
|
||||
class Parameter(NodeBase):
|
||||
"""Represents a method request or response parameter."""
|
||||
|
||||
def __init__(self, mojom_name, attribute_list, ordinal, typename, **kwargs):
|
||||
assert isinstance(mojom_name, str)
|
||||
assert attribute_list is None or isinstance(attribute_list, AttributeList)
|
||||
assert ordinal is None or isinstance(ordinal, Ordinal)
|
||||
assert isinstance(typename, str)
|
||||
super().__init__(**kwargs)
|
||||
self.mojom_name = mojom_name
|
||||
self.attribute_list = attribute_list
|
||||
self.ordinal = ordinal
|
||||
self.typename = typename
|
||||
|
||||
def __eq__(self, other):
|
||||
return super().__eq__(other) and \
|
||||
self.mojom_name == other.mojom_name and \
|
||||
self.attribute_list == other.attribute_list and \
|
||||
self.ordinal == other.ordinal and \
|
||||
self.typename == other.typename
|
||||
|
||||
|
||||
class ParameterList(NodeListBase):
|
||||
"""Represents a list of (method request or response) parameters."""
|
||||
|
||||
_list_item_type = Parameter
|
||||
|
||||
|
||||
class Struct(Definition):
|
||||
"""Represents a struct definition."""
|
||||
|
||||
def __init__(self, mojom_name, attribute_list, body, **kwargs):
|
||||
assert attribute_list is None or isinstance(attribute_list, AttributeList)
|
||||
assert isinstance(body, StructBody) or body is None
|
||||
super().__init__(mojom_name, **kwargs)
|
||||
self.attribute_list = attribute_list
|
||||
self.body = body
|
||||
|
||||
def __eq__(self, other):
|
||||
return super().__eq__(other) and \
|
||||
self.attribute_list == other.attribute_list and \
|
||||
self.body == other.body
|
||||
|
||||
def __repr__(self):
|
||||
return "Struct(mojom_name = %s, attribute_list = %s, body = %s)" % (
|
||||
self.mojom_name, self.attribute_list, self.body)
|
||||
|
||||
|
||||
class StructField(Definition):
|
||||
"""Represents a struct field definition."""
|
||||
|
||||
def __init__(self, mojom_name, attribute_list, ordinal, typename,
|
||||
default_value, **kwargs):
|
||||
assert isinstance(mojom_name, str)
|
||||
assert attribute_list is None or isinstance(attribute_list, AttributeList)
|
||||
assert ordinal is None or isinstance(ordinal, Ordinal)
|
||||
assert isinstance(typename, str)
|
||||
# The optional default value is currently either a value as a string or a
|
||||
# "wrapped identifier".
|
||||
assert default_value is None or isinstance(default_value, (str, tuple))
|
||||
super().__init__(mojom_name, **kwargs)
|
||||
self.attribute_list = attribute_list
|
||||
self.ordinal = ordinal
|
||||
self.typename = typename
|
||||
self.default_value = default_value
|
||||
|
||||
def __eq__(self, other):
|
||||
return super().__eq__(other) and \
|
||||
self.attribute_list == other.attribute_list and \
|
||||
self.ordinal == other.ordinal and \
|
||||
self.typename == other.typename and \
|
||||
self.default_value == other.default_value
|
||||
|
||||
def __repr__(self):
|
||||
return ("StructField(mojom_name = %s, attribute_list = %s, ordinal = %s, "
|
||||
"typename = %s, default_value = %s") % (
|
||||
self.mojom_name, self.attribute_list, self.ordinal,
|
||||
self.typename, self.default_value)
|
||||
|
||||
|
||||
# This needs to be declared after |StructField|.
|
||||
class StructBody(NodeListBase):
|
||||
"""Represents the body of (i.e., list of definitions inside) a struct."""
|
||||
|
||||
_list_item_type = (Const, Enum, StructField)
|
||||
|
||||
|
||||
class Union(Definition):
|
||||
"""Represents a union definition."""
|
||||
|
||||
def __init__(self, mojom_name, attribute_list, body, **kwargs):
|
||||
assert attribute_list is None or isinstance(attribute_list, AttributeList)
|
||||
assert isinstance(body, UnionBody)
|
||||
super().__init__(mojom_name, **kwargs)
|
||||
self.attribute_list = attribute_list
|
||||
self.body = body
|
||||
|
||||
def __eq__(self, other):
|
||||
return super().__eq__(other) and \
|
||||
self.attribute_list == other.attribute_list and \
|
||||
self.body == other.body
|
||||
|
||||
|
||||
class UnionField(Definition):
|
||||
def __init__(self, mojom_name, attribute_list, ordinal, typename, **kwargs):
|
||||
assert isinstance(mojom_name, str)
|
||||
assert attribute_list is None or isinstance(attribute_list, AttributeList)
|
||||
assert ordinal is None or isinstance(ordinal, Ordinal)
|
||||
assert isinstance(typename, str)
|
||||
super().__init__(mojom_name, **kwargs)
|
||||
self.attribute_list = attribute_list
|
||||
self.ordinal = ordinal
|
||||
self.typename = typename
|
||||
|
||||
def __eq__(self, other):
|
||||
return super().__eq__(other) and \
|
||||
self.attribute_list == other.attribute_list and \
|
||||
self.ordinal == other.ordinal and \
|
||||
self.typename == other.typename
|
||||
|
||||
|
||||
class UnionBody(NodeListBase):
|
||||
|
||||
_list_item_type = UnionField
|
||||
@@ -0,0 +1,115 @@
|
||||
# Copyright 2014 The Chromium Authors
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
import unittest
|
||||
|
||||
from mojom.parse import ast
|
||||
|
||||
class _TestNode(ast.NodeBase):
|
||||
"""Node type for tests."""
|
||||
|
||||
def __init__(self, value, **kwargs):
|
||||
super().__init__(**kwargs)
|
||||
self.value = value
|
||||
|
||||
def __eq__(self, other):
|
||||
return super().__eq__(other) and self.value == other.value
|
||||
|
||||
class _TestNodeList(ast.NodeListBase):
|
||||
"""Node list type for tests."""
|
||||
|
||||
_list_item_type = _TestNode
|
||||
|
||||
class ASTTest(unittest.TestCase):
|
||||
"""Tests various AST classes."""
|
||||
|
||||
def testNodeBase(self):
|
||||
# Test |__eq__()|; this is only used for testing, where we want to do
|
||||
# comparison by value and ignore filenames/line numbers (for convenience).
|
||||
node1 = ast.NodeBase(filename="hello.mojom", lineno=123)
|
||||
node2 = ast.NodeBase()
|
||||
self.assertEquals(node1, node2)
|
||||
self.assertEquals(node2, node1)
|
||||
|
||||
# Check that |__ne__()| just defers to |__eq__()| properly.
|
||||
self.assertFalse(node1 != node2)
|
||||
self.assertFalse(node2 != node1)
|
||||
|
||||
# Check that |filename| and |lineno| are set properly (and are None by
|
||||
# default).
|
||||
self.assertEquals(node1.filename, "hello.mojom")
|
||||
self.assertEquals(node1.lineno, 123)
|
||||
self.assertIsNone(node2.filename)
|
||||
self.assertIsNone(node2.lineno)
|
||||
|
||||
# |NodeBase|'s |__eq__()| should compare types (and a subclass's |__eq__()|
|
||||
# should first defer to its superclass's).
|
||||
node3 = _TestNode(123)
|
||||
self.assertNotEqual(node1, node3)
|
||||
self.assertNotEqual(node3, node1)
|
||||
# Also test |__eq__()| directly.
|
||||
self.assertFalse(node1 == node3)
|
||||
self.assertFalse(node3 == node1)
|
||||
|
||||
node4 = _TestNode(123, filename="world.mojom", lineno=123)
|
||||
self.assertEquals(node4, node3)
|
||||
node5 = _TestNode(456)
|
||||
self.assertNotEquals(node5, node4)
|
||||
|
||||
def testNodeListBase(self):
|
||||
node1 = _TestNode(1, filename="foo.mojom", lineno=1)
|
||||
# Equal to, but not the same as, |node1|:
|
||||
node1b = _TestNode(1, filename="foo.mojom", lineno=1)
|
||||
node2 = _TestNode(2, filename="foo.mojom", lineno=2)
|
||||
|
||||
nodelist1 = _TestNodeList() # Contains: (empty).
|
||||
self.assertEquals(nodelist1, nodelist1)
|
||||
self.assertEquals(nodelist1.items, [])
|
||||
self.assertIsNone(nodelist1.filename)
|
||||
self.assertIsNone(nodelist1.lineno)
|
||||
|
||||
nodelist2 = _TestNodeList(node1) # Contains: 1.
|
||||
self.assertEquals(nodelist2, nodelist2)
|
||||
self.assertEquals(nodelist2.items, [node1])
|
||||
self.assertNotEqual(nodelist2, nodelist1)
|
||||
self.assertEquals(nodelist2.filename, "foo.mojom")
|
||||
self.assertEquals(nodelist2.lineno, 1)
|
||||
|
||||
nodelist3 = _TestNodeList([node2]) # Contains: 2.
|
||||
self.assertEquals(nodelist3.items, [node2])
|
||||
self.assertNotEqual(nodelist3, nodelist1)
|
||||
self.assertNotEqual(nodelist3, nodelist2)
|
||||
self.assertEquals(nodelist3.filename, "foo.mojom")
|
||||
self.assertEquals(nodelist3.lineno, 2)
|
||||
|
||||
nodelist1.Append(node1b) # Contains: 1.
|
||||
self.assertEquals(nodelist1.items, [node1])
|
||||
self.assertEquals(nodelist1, nodelist2)
|
||||
self.assertNotEqual(nodelist1, nodelist3)
|
||||
self.assertEquals(nodelist1.filename, "foo.mojom")
|
||||
self.assertEquals(nodelist1.lineno, 1)
|
||||
|
||||
nodelist1.Append(node2) # Contains: 1, 2.
|
||||
self.assertEquals(nodelist1.items, [node1, node2])
|
||||
self.assertNotEqual(nodelist1, nodelist2)
|
||||
self.assertNotEqual(nodelist1, nodelist3)
|
||||
self.assertEquals(nodelist1.lineno, 1)
|
||||
|
||||
nodelist2.Append(node2) # Contains: 1, 2.
|
||||
self.assertEquals(nodelist2.items, [node1, node2])
|
||||
self.assertEquals(nodelist2, nodelist1)
|
||||
self.assertNotEqual(nodelist2, nodelist3)
|
||||
self.assertEquals(nodelist2.lineno, 1)
|
||||
|
||||
nodelist3.Insert(node1) # Contains: 1, 2.
|
||||
self.assertEquals(nodelist3.items, [node1, node2])
|
||||
self.assertEquals(nodelist3, nodelist1)
|
||||
self.assertEquals(nodelist3, nodelist2)
|
||||
self.assertEquals(nodelist3.lineno, 1)
|
||||
|
||||
# Test iteration:
|
||||
i = 1
|
||||
for item in nodelist1:
|
||||
self.assertEquals(item.value, i)
|
||||
i += 1
|
||||
@@ -0,0 +1,83 @@
|
||||
# Copyright 2018 The Chromium Authors
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
"""Helpers for processing conditionally enabled features in a mojom."""
|
||||
|
||||
from mojom.error import Error
|
||||
from mojom.parse import ast
|
||||
|
||||
|
||||
class EnableIfError(Error):
|
||||
""" Class for errors from ."""
|
||||
|
||||
def __init__(self, filename, message, lineno=None):
|
||||
Error.__init__(self, filename, message, lineno=lineno, addenda=None)
|
||||
|
||||
|
||||
def _IsEnabled(definition, enabled_features):
|
||||
"""Returns true if a definition is enabled.
|
||||
|
||||
A definition is enabled if it has no EnableIf/EnableIfNot attribute.
|
||||
It is retained if it has an EnableIf attribute and the attribute is in
|
||||
enabled_features. It is retained if it has an EnableIfNot attribute and the
|
||||
attribute is not in enabled features.
|
||||
"""
|
||||
if not hasattr(definition, "attribute_list"):
|
||||
return True
|
||||
if not definition.attribute_list:
|
||||
return True
|
||||
|
||||
already_defined = False
|
||||
for a in definition.attribute_list:
|
||||
if a.key == 'EnableIf' or a.key == 'EnableIfNot':
|
||||
if already_defined:
|
||||
raise EnableIfError(
|
||||
definition.filename,
|
||||
"EnableIf/EnableIfNot attribute may only be set once per field.",
|
||||
definition.lineno)
|
||||
already_defined = True
|
||||
|
||||
for attribute in definition.attribute_list:
|
||||
if attribute.key == 'EnableIf' and attribute.value not in enabled_features:
|
||||
return False
|
||||
if attribute.key == 'EnableIfNot' and attribute.value in enabled_features:
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
def _FilterDisabledFromNodeList(node_list, enabled_features):
|
||||
if not node_list:
|
||||
return
|
||||
assert isinstance(node_list, ast.NodeListBase)
|
||||
node_list.items = [
|
||||
item for item in node_list.items if _IsEnabled(item, enabled_features)
|
||||
]
|
||||
for item in node_list.items:
|
||||
_FilterDefinition(item, enabled_features)
|
||||
|
||||
|
||||
def _FilterDefinition(definition, enabled_features):
|
||||
"""Filters definitions with a body."""
|
||||
if isinstance(definition, ast.Enum):
|
||||
_FilterDisabledFromNodeList(definition.enum_value_list, enabled_features)
|
||||
elif isinstance(definition, ast.Method):
|
||||
_FilterDisabledFromNodeList(definition.parameter_list, enabled_features)
|
||||
_FilterDisabledFromNodeList(definition.response_parameter_list,
|
||||
enabled_features)
|
||||
elif isinstance(definition,
|
||||
(ast.Interface, ast.Struct, ast.Union, ast.Feature)):
|
||||
_FilterDisabledFromNodeList(definition.body, enabled_features)
|
||||
|
||||
|
||||
def RemoveDisabledDefinitions(mojom, enabled_features):
|
||||
"""Removes conditionally disabled definitions from a Mojom node."""
|
||||
mojom.import_list = ast.ImportList([
|
||||
imported_file for imported_file in mojom.import_list
|
||||
if _IsEnabled(imported_file, enabled_features)
|
||||
])
|
||||
mojom.definition_list = [
|
||||
definition for definition in mojom.definition_list
|
||||
if _IsEnabled(definition, enabled_features)
|
||||
]
|
||||
for definition in mojom.definition_list:
|
||||
_FilterDefinition(definition, enabled_features)
|
||||
@@ -0,0 +1,376 @@
|
||||
# Copyright 2018 The Chromium Authors
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
import importlib.util
|
||||
import os
|
||||
import sys
|
||||
import unittest
|
||||
|
||||
def _GetDirAbove(dirname):
|
||||
"""Returns the directory "above" this file containing |dirname| (which must
|
||||
also be "above" this file)."""
|
||||
path = os.path.abspath(__file__)
|
||||
while True:
|
||||
path, tail = os.path.split(path)
|
||||
assert tail
|
||||
if tail == dirname:
|
||||
return path
|
||||
|
||||
try:
|
||||
importlib.util.find_spec("mojom")
|
||||
except ImportError:
|
||||
sys.path.append(os.path.join(_GetDirAbove('pylib'), 'pylib'))
|
||||
import mojom.parse.ast as ast
|
||||
import mojom.parse.conditional_features as conditional_features
|
||||
import mojom.parse.parser as parser
|
||||
|
||||
ENABLED_FEATURES = frozenset({'red', 'green', 'blue'})
|
||||
|
||||
class ConditionalFeaturesTest(unittest.TestCase):
|
||||
"""Tests |mojom.parse.conditional_features|."""
|
||||
|
||||
def parseAndAssertEqual(self, source, expected_source):
|
||||
definition = parser.Parse(source, "my_file.mojom")
|
||||
conditional_features.RemoveDisabledDefinitions(definition, ENABLED_FEATURES)
|
||||
expected = parser.Parse(expected_source, "my_file.mojom")
|
||||
self.assertEquals(definition, expected)
|
||||
|
||||
def testFilterConst(self):
|
||||
"""Test that Consts are correctly filtered."""
|
||||
const_source = """
|
||||
[EnableIf=blue]
|
||||
const int kMyConst1 = 1;
|
||||
[EnableIf=orange]
|
||||
const double kMyConst2 = 2;
|
||||
const int kMyConst3 = 3;
|
||||
"""
|
||||
expected_source = """
|
||||
[EnableIf=blue]
|
||||
const int kMyConst1 = 1;
|
||||
const int kMyConst3 = 3;
|
||||
"""
|
||||
self.parseAndAssertEqual(const_source, expected_source)
|
||||
|
||||
def testFilterIfNotConst(self):
|
||||
"""Test that Consts are correctly filtered."""
|
||||
const_source = """
|
||||
[EnableIfNot=blue]
|
||||
const int kMyConst1 = 1;
|
||||
[EnableIfNot=orange]
|
||||
const double kMyConst2 = 2;
|
||||
[EnableIf=blue]
|
||||
const int kMyConst3 = 3;
|
||||
[EnableIfNot=blue]
|
||||
const int kMyConst4 = 4;
|
||||
[EnableIfNot=purple]
|
||||
const int kMyConst5 = 5;
|
||||
"""
|
||||
expected_source = """
|
||||
[EnableIfNot=orange]
|
||||
const double kMyConst2 = 2;
|
||||
[EnableIf=blue]
|
||||
const int kMyConst3 = 3;
|
||||
[EnableIfNot=purple]
|
||||
const int kMyConst5 = 5;
|
||||
"""
|
||||
self.parseAndAssertEqual(const_source, expected_source)
|
||||
|
||||
def testFilterIfNotMultipleConst(self):
|
||||
"""Test that Consts are correctly filtered."""
|
||||
const_source = """
|
||||
[EnableIfNot=blue]
|
||||
const int kMyConst1 = 1;
|
||||
[EnableIfNot=orange]
|
||||
const double kMyConst2 = 2;
|
||||
[EnableIfNot=orange]
|
||||
const int kMyConst3 = 3;
|
||||
"""
|
||||
expected_source = """
|
||||
[EnableIfNot=orange]
|
||||
const double kMyConst2 = 2;
|
||||
[EnableIfNot=orange]
|
||||
const int kMyConst3 = 3;
|
||||
"""
|
||||
self.parseAndAssertEqual(const_source, expected_source)
|
||||
|
||||
def testFilterEnum(self):
|
||||
"""Test that EnumValues are correctly filtered from an Enum."""
|
||||
enum_source = """
|
||||
enum MyEnum {
|
||||
[EnableIf=purple]
|
||||
VALUE1,
|
||||
[EnableIf=blue]
|
||||
VALUE2,
|
||||
VALUE3,
|
||||
};
|
||||
"""
|
||||
expected_source = """
|
||||
enum MyEnum {
|
||||
[EnableIf=blue]
|
||||
VALUE2,
|
||||
VALUE3
|
||||
};
|
||||
"""
|
||||
self.parseAndAssertEqual(enum_source, expected_source)
|
||||
|
||||
def testFilterImport(self):
|
||||
"""Test that imports are correctly filtered from a Mojom."""
|
||||
import_source = """
|
||||
[EnableIf=blue]
|
||||
import "foo.mojom";
|
||||
import "bar.mojom";
|
||||
[EnableIf=purple]
|
||||
import "baz.mojom";
|
||||
"""
|
||||
expected_source = """
|
||||
[EnableIf=blue]
|
||||
import "foo.mojom";
|
||||
import "bar.mojom";
|
||||
"""
|
||||
self.parseAndAssertEqual(import_source, expected_source)
|
||||
|
||||
def testFilterIfNotImport(self):
|
||||
"""Test that imports are correctly filtered from a Mojom."""
|
||||
import_source = """
|
||||
[EnableIf=blue]
|
||||
import "foo.mojom";
|
||||
[EnableIfNot=purple]
|
||||
import "bar.mojom";
|
||||
[EnableIfNot=green]
|
||||
import "baz.mojom";
|
||||
"""
|
||||
expected_source = """
|
||||
[EnableIf=blue]
|
||||
import "foo.mojom";
|
||||
[EnableIfNot=purple]
|
||||
import "bar.mojom";
|
||||
"""
|
||||
self.parseAndAssertEqual(import_source, expected_source)
|
||||
|
||||
def testFilterInterface(self):
|
||||
"""Test that definitions are correctly filtered from an Interface."""
|
||||
interface_source = """
|
||||
interface MyInterface {
|
||||
[EnableIf=blue]
|
||||
enum MyEnum {
|
||||
[EnableIf=purple]
|
||||
VALUE1,
|
||||
VALUE2,
|
||||
};
|
||||
[EnableIf=blue]
|
||||
const int32 kMyConst = 123;
|
||||
[EnableIf=purple]
|
||||
MyMethod();
|
||||
};
|
||||
"""
|
||||
expected_source = """
|
||||
interface MyInterface {
|
||||
[EnableIf=blue]
|
||||
enum MyEnum {
|
||||
VALUE2,
|
||||
};
|
||||
[EnableIf=blue]
|
||||
const int32 kMyConst = 123;
|
||||
};
|
||||
"""
|
||||
self.parseAndAssertEqual(interface_source, expected_source)
|
||||
|
||||
def testFilterMethod(self):
|
||||
"""Test that Parameters are correctly filtered from a Method."""
|
||||
method_source = """
|
||||
interface MyInterface {
|
||||
[EnableIf=blue]
|
||||
MyMethod([EnableIf=purple] int32 a) => ([EnableIf=red] int32 b);
|
||||
};
|
||||
"""
|
||||
expected_source = """
|
||||
interface MyInterface {
|
||||
[EnableIf=blue]
|
||||
MyMethod() => ([EnableIf=red] int32 b);
|
||||
};
|
||||
"""
|
||||
self.parseAndAssertEqual(method_source, expected_source)
|
||||
|
||||
def testFilterStruct(self):
|
||||
"""Test that definitions are correctly filtered from a Struct."""
|
||||
struct_source = """
|
||||
struct MyStruct {
|
||||
[EnableIf=blue]
|
||||
enum MyEnum {
|
||||
VALUE1,
|
||||
[EnableIf=purple]
|
||||
VALUE2,
|
||||
};
|
||||
[EnableIf=yellow]
|
||||
const double kMyConst = 1.23;
|
||||
[EnableIf=green]
|
||||
int32 a;
|
||||
double b;
|
||||
[EnableIf=purple]
|
||||
int32 c;
|
||||
[EnableIf=blue]
|
||||
double d;
|
||||
int32 e;
|
||||
[EnableIf=orange]
|
||||
double f;
|
||||
};
|
||||
"""
|
||||
expected_source = """
|
||||
struct MyStruct {
|
||||
[EnableIf=blue]
|
||||
enum MyEnum {
|
||||
VALUE1,
|
||||
};
|
||||
[EnableIf=green]
|
||||
int32 a;
|
||||
double b;
|
||||
[EnableIf=blue]
|
||||
double d;
|
||||
int32 e;
|
||||
};
|
||||
"""
|
||||
self.parseAndAssertEqual(struct_source, expected_source)
|
||||
|
||||
def testFilterIfNotStruct(self):
|
||||
"""Test that definitions are correctly filtered from a Struct."""
|
||||
struct_source = """
|
||||
struct MyStruct {
|
||||
[EnableIf=blue]
|
||||
enum MyEnum {
|
||||
VALUE1,
|
||||
[EnableIfNot=red]
|
||||
VALUE2,
|
||||
};
|
||||
[EnableIfNot=yellow]
|
||||
const double kMyConst = 1.23;
|
||||
[EnableIf=green]
|
||||
int32 a;
|
||||
double b;
|
||||
[EnableIfNot=purple]
|
||||
int32 c;
|
||||
[EnableIf=blue]
|
||||
double d;
|
||||
int32 e;
|
||||
[EnableIfNot=red]
|
||||
double f;
|
||||
};
|
||||
"""
|
||||
expected_source = """
|
||||
struct MyStruct {
|
||||
[EnableIf=blue]
|
||||
enum MyEnum {
|
||||
VALUE1,
|
||||
};
|
||||
[EnableIfNot=yellow]
|
||||
const double kMyConst = 1.23;
|
||||
[EnableIf=green]
|
||||
int32 a;
|
||||
double b;
|
||||
[EnableIfNot=purple]
|
||||
int32 c;
|
||||
[EnableIf=blue]
|
||||
double d;
|
||||
int32 e;
|
||||
};
|
||||
"""
|
||||
self.parseAndAssertEqual(struct_source, expected_source)
|
||||
|
||||
def testFilterUnion(self):
|
||||
"""Test that UnionFields are correctly filtered from a Union."""
|
||||
union_source = """
|
||||
union MyUnion {
|
||||
[EnableIf=yellow]
|
||||
int32 a;
|
||||
[EnableIf=red]
|
||||
bool b;
|
||||
};
|
||||
"""
|
||||
expected_source = """
|
||||
union MyUnion {
|
||||
[EnableIf=red]
|
||||
bool b;
|
||||
};
|
||||
"""
|
||||
self.parseAndAssertEqual(union_source, expected_source)
|
||||
|
||||
def testSameNameFields(self):
|
||||
mojom_source = """
|
||||
enum Foo {
|
||||
[EnableIf=red]
|
||||
VALUE1 = 5,
|
||||
[EnableIf=yellow]
|
||||
VALUE1 = 6,
|
||||
};
|
||||
[EnableIf=red]
|
||||
const double kMyConst = 1.23;
|
||||
[EnableIf=yellow]
|
||||
const double kMyConst = 4.56;
|
||||
"""
|
||||
expected_source = """
|
||||
enum Foo {
|
||||
[EnableIf=red]
|
||||
VALUE1 = 5,
|
||||
};
|
||||
[EnableIf=red]
|
||||
const double kMyConst = 1.23;
|
||||
"""
|
||||
self.parseAndAssertEqual(mojom_source, expected_source)
|
||||
|
||||
def testFeaturesWithEnableIf(self):
|
||||
mojom_source = """
|
||||
feature Foo {
|
||||
const string name = "FooFeature";
|
||||
[EnableIf=red]
|
||||
const bool default_state = false;
|
||||
[EnableIf=yellow]
|
||||
const bool default_state = true;
|
||||
};
|
||||
"""
|
||||
expected_source = """
|
||||
feature Foo {
|
||||
const string name = "FooFeature";
|
||||
[EnableIf=red]
|
||||
const bool default_state = false;
|
||||
};
|
||||
"""
|
||||
self.parseAndAssertEqual(mojom_source, expected_source)
|
||||
|
||||
def testMultipleEnableIfs(self):
|
||||
source = """
|
||||
enum Foo {
|
||||
[EnableIf=red,EnableIf=yellow]
|
||||
kBarValue = 5,
|
||||
};
|
||||
"""
|
||||
definition = parser.Parse(source, "my_file.mojom")
|
||||
self.assertRaises(conditional_features.EnableIfError,
|
||||
conditional_features.RemoveDisabledDefinitions,
|
||||
definition, ENABLED_FEATURES)
|
||||
|
||||
def testMultipleEnableIfs(self):
|
||||
source = """
|
||||
enum Foo {
|
||||
[EnableIf=red,EnableIfNot=yellow]
|
||||
kBarValue = 5,
|
||||
};
|
||||
"""
|
||||
definition = parser.Parse(source, "my_file.mojom")
|
||||
self.assertRaises(conditional_features.EnableIfError,
|
||||
conditional_features.RemoveDisabledDefinitions,
|
||||
definition, ENABLED_FEATURES)
|
||||
|
||||
def testMultipleEnableIfs(self):
|
||||
source = """
|
||||
enum Foo {
|
||||
[EnableIfNot=red,EnableIfNot=yellow]
|
||||
kBarValue = 5,
|
||||
};
|
||||
"""
|
||||
definition = parser.Parse(source, "my_file.mojom")
|
||||
self.assertRaises(conditional_features.EnableIfError,
|
||||
conditional_features.RemoveDisabledDefinitions,
|
||||
definition, ENABLED_FEATURES)
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
||||
@@ -0,0 +1,249 @@
|
||||
# Copyright 2014 The Chromium Authors
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
import os.path
|
||||
import sys
|
||||
|
||||
from mojom import fileutil
|
||||
from mojom.error import Error
|
||||
|
||||
fileutil.AddLocalRepoThirdPartyDirToModulePath()
|
||||
from ply.lex import TOKEN
|
||||
|
||||
|
||||
class LexError(Error):
|
||||
"""Class for errors from the lexer."""
|
||||
|
||||
def __init__(self, filename, message, lineno):
|
||||
Error.__init__(self, filename, message, lineno=lineno)
|
||||
|
||||
|
||||
# We have methods which look like they could be functions:
|
||||
# pylint: disable=R0201
|
||||
class Lexer:
|
||||
def __init__(self, filename):
|
||||
self.filename = filename
|
||||
|
||||
######################-- PRIVATE --######################
|
||||
|
||||
##
|
||||
## Internal auxiliary methods
|
||||
##
|
||||
def _error(self, msg, token):
|
||||
raise LexError(self.filename, msg, token.lineno)
|
||||
|
||||
##
|
||||
## Reserved keywords
|
||||
##
|
||||
keywords = (
|
||||
'HANDLE',
|
||||
'IMPORT',
|
||||
'MODULE',
|
||||
'STRUCT',
|
||||
'UNION',
|
||||
'INTERFACE',
|
||||
'ENUM',
|
||||
'CONST',
|
||||
'TRUE',
|
||||
'FALSE',
|
||||
'DEFAULT',
|
||||
'ARRAY',
|
||||
'MAP',
|
||||
'ASSOCIATED',
|
||||
'PENDING_REMOTE',
|
||||
'PENDING_RECEIVER',
|
||||
'PENDING_ASSOCIATED_REMOTE',
|
||||
'PENDING_ASSOCIATED_RECEIVER',
|
||||
'FEATURE',
|
||||
)
|
||||
|
||||
keyword_map = {}
|
||||
for keyword in keywords:
|
||||
keyword_map[keyword.lower()] = keyword
|
||||
|
||||
##
|
||||
## All the tokens recognized by the lexer
|
||||
##
|
||||
tokens = keywords + (
|
||||
# Identifiers
|
||||
'NAME',
|
||||
|
||||
# Constants
|
||||
'ORDINAL',
|
||||
'INT_CONST_DEC',
|
||||
'INT_CONST_HEX',
|
||||
'FLOAT_CONST',
|
||||
|
||||
# String literals
|
||||
'STRING_LITERAL',
|
||||
|
||||
# Operators
|
||||
'MINUS',
|
||||
'PLUS',
|
||||
'QSTN',
|
||||
|
||||
# Assignment
|
||||
'EQUALS',
|
||||
|
||||
# Request / response
|
||||
'RESPONSE',
|
||||
|
||||
# Delimiters
|
||||
'LPAREN',
|
||||
'RPAREN', # ( )
|
||||
'LBRACKET',
|
||||
'RBRACKET', # [ ]
|
||||
'LBRACE',
|
||||
'RBRACE', # { }
|
||||
'LANGLE',
|
||||
'RANGLE', # < >
|
||||
'SEMI', # ;
|
||||
'COMMA',
|
||||
'DOT' # , .
|
||||
)
|
||||
|
||||
##
|
||||
## Regexes for use in tokens
|
||||
##
|
||||
|
||||
# valid C identifiers (K&R2: A.2.3)
|
||||
identifier = r'[a-zA-Z_][0-9a-zA-Z_]*'
|
||||
|
||||
hex_prefix = '0[xX]'
|
||||
hex_digits = '[0-9a-fA-F]+'
|
||||
|
||||
# integer constants (K&R2: A.2.5.1)
|
||||
decimal_constant = '0|([1-9][0-9]*)'
|
||||
hex_constant = hex_prefix + hex_digits
|
||||
# Don't allow octal constants (even invalid octal).
|
||||
octal_constant_disallowed = '0[0-9]+'
|
||||
|
||||
# character constants (K&R2: A.2.5.2)
|
||||
# Note: a-zA-Z and '.-~^_!=&;,' are allowed as escape chars to support #line
|
||||
# directives with Windows paths as filenames (..\..\dir\file)
|
||||
# For the same reason, decimal_escape allows all digit sequences. We want to
|
||||
# parse all correct code, even if it means to sometimes parse incorrect
|
||||
# code.
|
||||
#
|
||||
simple_escape = r"""([a-zA-Z._~!=&\^\-\\?'"])"""
|
||||
decimal_escape = r"""(\d+)"""
|
||||
hex_escape = r"""(x[0-9a-fA-F]+)"""
|
||||
bad_escape = r"""([\\][^a-zA-Z._~^!=&\^\-\\?'"x0-7])"""
|
||||
|
||||
escape_sequence = \
|
||||
r"""(\\("""+simple_escape+'|'+decimal_escape+'|'+hex_escape+'))'
|
||||
|
||||
# string literals (K&R2: A.2.6)
|
||||
string_char = r"""([^"\\\n]|""" + escape_sequence + ')'
|
||||
string_literal = '"' + string_char + '*"'
|
||||
bad_string_literal = '"' + string_char + '*' + bad_escape + string_char + '*"'
|
||||
|
||||
# floating constants (K&R2: A.2.5.3)
|
||||
exponent_part = r"""([eE][-+]?[0-9]+)"""
|
||||
fractional_constant = r"""([0-9]*\.[0-9]+)|([0-9]+\.)"""
|
||||
floating_constant = \
|
||||
'(((('+fractional_constant+')'+ \
|
||||
exponent_part+'?)|([0-9]+'+exponent_part+')))'
|
||||
|
||||
# Ordinals
|
||||
ordinal = r'@[0-9]+'
|
||||
missing_ordinal_value = r'@'
|
||||
# Don't allow ordinal values in octal (even invalid octal, like 09) or
|
||||
# hexadecimal.
|
||||
octal_or_hex_ordinal_disallowed = (
|
||||
r'@((0[0-9]+)|(' + hex_prefix + hex_digits + '))')
|
||||
|
||||
##
|
||||
## Rules for the normal state
|
||||
##
|
||||
t_ignore = ' \t\r'
|
||||
|
||||
# Newlines
|
||||
def t_NEWLINE(self, t):
|
||||
r'\n+'
|
||||
t.lexer.lineno += len(t.value)
|
||||
|
||||
# Operators
|
||||
t_MINUS = r'-'
|
||||
t_PLUS = r'\+'
|
||||
t_QSTN = r'\?'
|
||||
|
||||
# =
|
||||
t_EQUALS = r'='
|
||||
|
||||
# =>
|
||||
t_RESPONSE = r'=>'
|
||||
|
||||
# Delimiters
|
||||
t_LPAREN = r'\('
|
||||
t_RPAREN = r'\)'
|
||||
t_LBRACKET = r'\['
|
||||
t_RBRACKET = r'\]'
|
||||
t_LBRACE = r'\{'
|
||||
t_RBRACE = r'\}'
|
||||
t_LANGLE = r'<'
|
||||
t_RANGLE = r'>'
|
||||
t_COMMA = r','
|
||||
t_DOT = r'\.'
|
||||
t_SEMI = r';'
|
||||
|
||||
t_STRING_LITERAL = string_literal
|
||||
|
||||
# The following floating and integer constants are defined as
|
||||
# functions to impose a strict order (otherwise, decimal
|
||||
# is placed before the others because its regex is longer,
|
||||
# and this is bad)
|
||||
#
|
||||
@TOKEN(floating_constant)
|
||||
def t_FLOAT_CONST(self, t):
|
||||
return t
|
||||
|
||||
@TOKEN(hex_constant)
|
||||
def t_INT_CONST_HEX(self, t):
|
||||
return t
|
||||
|
||||
@TOKEN(octal_constant_disallowed)
|
||||
def t_OCTAL_CONSTANT_DISALLOWED(self, t):
|
||||
msg = "Octal values not allowed"
|
||||
self._error(msg, t)
|
||||
|
||||
@TOKEN(decimal_constant)
|
||||
def t_INT_CONST_DEC(self, t):
|
||||
return t
|
||||
|
||||
# unmatched string literals are caught by the preprocessor
|
||||
|
||||
@TOKEN(bad_string_literal)
|
||||
def t_BAD_STRING_LITERAL(self, t):
|
||||
msg = "String contains invalid escape code"
|
||||
self._error(msg, t)
|
||||
|
||||
# Handle ordinal-related tokens in the right order:
|
||||
@TOKEN(octal_or_hex_ordinal_disallowed)
|
||||
def t_OCTAL_OR_HEX_ORDINAL_DISALLOWED(self, t):
|
||||
msg = "Octal and hexadecimal ordinal values not allowed"
|
||||
self._error(msg, t)
|
||||
|
||||
@TOKEN(ordinal)
|
||||
def t_ORDINAL(self, t):
|
||||
return t
|
||||
|
||||
@TOKEN(missing_ordinal_value)
|
||||
def t_BAD_ORDINAL(self, t):
|
||||
msg = "Missing ordinal value"
|
||||
self._error(msg, t)
|
||||
|
||||
@TOKEN(identifier)
|
||||
def t_NAME(self, t):
|
||||
t.type = self.keyword_map.get(t.value, "NAME")
|
||||
return t
|
||||
|
||||
# Ignore C and C++ style comments
|
||||
def t_COMMENT(self, t):
|
||||
r'(/\*(.|\n)*?\*/)|(//.*(\n[ \t]*//.*)*)'
|
||||
t.lexer.lineno += t.value.count("\n")
|
||||
|
||||
def t_error(self, t):
|
||||
msg = "Illegal character %s" % repr(t.value[0])
|
||||
self._error(msg, t)
|
||||
@@ -0,0 +1,194 @@
|
||||
# Copyright 2014 The Chromium Authors
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
import importlib.util
|
||||
import os.path
|
||||
import sys
|
||||
import unittest
|
||||
|
||||
def _GetDirAbove(dirname):
|
||||
"""Returns the directory "above" this file containing |dirname| (which must
|
||||
also be "above" this file)."""
|
||||
path = os.path.abspath(__file__)
|
||||
while True:
|
||||
path, tail = os.path.split(path)
|
||||
assert tail
|
||||
if tail == dirname:
|
||||
return path
|
||||
|
||||
sys.path.insert(1, os.path.join(_GetDirAbove("mojo"), "third_party"))
|
||||
from ply import lex
|
||||
|
||||
try:
|
||||
importlib.util.find_spec("mojom")
|
||||
except ImportError:
|
||||
sys.path.append(os.path.join(_GetDirAbove("pylib"), "pylib"))
|
||||
import mojom.parse.lexer
|
||||
|
||||
# This (monkey-patching LexToken to make comparison value-based) is evil, but
|
||||
# we'll do it anyway. (I'm pretty sure ply's lexer never cares about comparing
|
||||
# for object identity.)
|
||||
def _LexTokenEq(self, other):
|
||||
return self.type == other.type and self.value == other.value and \
|
||||
self.lineno == other.lineno and self.lexpos == other.lexpos
|
||||
|
||||
|
||||
setattr(lex.LexToken, '__eq__', _LexTokenEq)
|
||||
|
||||
|
||||
def _MakeLexToken(token_type, value, lineno=1, lexpos=0):
|
||||
"""Makes a LexToken with the given parameters. (Note that lineno is 1-based,
|
||||
but lexpos is 0-based.)"""
|
||||
rv = lex.LexToken()
|
||||
rv.type, rv.value, rv.lineno, rv.lexpos = token_type, value, lineno, lexpos
|
||||
return rv
|
||||
|
||||
|
||||
def _MakeLexTokenForKeyword(keyword, **kwargs):
|
||||
"""Makes a LexToken for the given keyword."""
|
||||
return _MakeLexToken(keyword.upper(), keyword.lower(), **kwargs)
|
||||
|
||||
|
||||
class LexerTest(unittest.TestCase):
|
||||
"""Tests |mojom.parse.lexer.Lexer|."""
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
unittest.TestCase.__init__(self, *args, **kwargs)
|
||||
# Clone all lexer instances from this one, since making a lexer is slow.
|
||||
self._zygote_lexer = lex.lex(mojom.parse.lexer.Lexer("my_file.mojom"))
|
||||
|
||||
def testValidKeywords(self):
|
||||
"""Tests valid keywords."""
|
||||
self.assertEquals(
|
||||
self._SingleTokenForInput("handle"), _MakeLexTokenForKeyword("handle"))
|
||||
self.assertEquals(
|
||||
self._SingleTokenForInput("import"), _MakeLexTokenForKeyword("import"))
|
||||
self.assertEquals(
|
||||
self._SingleTokenForInput("module"), _MakeLexTokenForKeyword("module"))
|
||||
self.assertEquals(
|
||||
self._SingleTokenForInput("struct"), _MakeLexTokenForKeyword("struct"))
|
||||
self.assertEquals(
|
||||
self._SingleTokenForInput("union"), _MakeLexTokenForKeyword("union"))
|
||||
self.assertEquals(
|
||||
self._SingleTokenForInput("interface"),
|
||||
_MakeLexTokenForKeyword("interface"))
|
||||
self.assertEquals(
|
||||
self._SingleTokenForInput("enum"), _MakeLexTokenForKeyword("enum"))
|
||||
self.assertEquals(
|
||||
self._SingleTokenForInput("const"), _MakeLexTokenForKeyword("const"))
|
||||
self.assertEquals(
|
||||
self._SingleTokenForInput("true"), _MakeLexTokenForKeyword("true"))
|
||||
self.assertEquals(
|
||||
self._SingleTokenForInput("false"), _MakeLexTokenForKeyword("false"))
|
||||
self.assertEquals(
|
||||
self._SingleTokenForInput("default"),
|
||||
_MakeLexTokenForKeyword("default"))
|
||||
self.assertEquals(
|
||||
self._SingleTokenForInput("array"), _MakeLexTokenForKeyword("array"))
|
||||
self.assertEquals(
|
||||
self._SingleTokenForInput("map"), _MakeLexTokenForKeyword("map"))
|
||||
self.assertEquals(
|
||||
self._SingleTokenForInput("associated"),
|
||||
_MakeLexTokenForKeyword("associated"))
|
||||
|
||||
def testValidIdentifiers(self):
|
||||
"""Tests identifiers."""
|
||||
self.assertEquals(
|
||||
self._SingleTokenForInput("abcd"), _MakeLexToken("NAME", "abcd"))
|
||||
self.assertEquals(
|
||||
self._SingleTokenForInput("AbC_d012_"),
|
||||
_MakeLexToken("NAME", "AbC_d012_"))
|
||||
self.assertEquals(
|
||||
self._SingleTokenForInput("_0123"), _MakeLexToken("NAME", "_0123"))
|
||||
|
||||
def testInvalidIdentifiers(self):
|
||||
with self.assertRaisesRegexp(
|
||||
mojom.parse.lexer.LexError,
|
||||
r"^my_file\.mojom:1: Error: Illegal character '\$'$"):
|
||||
self._TokensForInput("$abc")
|
||||
with self.assertRaisesRegexp(
|
||||
mojom.parse.lexer.LexError,
|
||||
r"^my_file\.mojom:1: Error: Illegal character '\$'$"):
|
||||
self._TokensForInput("a$bc")
|
||||
|
||||
def testDecimalIntegerConstants(self):
|
||||
self.assertEquals(
|
||||
self._SingleTokenForInput("0"), _MakeLexToken("INT_CONST_DEC", "0"))
|
||||
self.assertEquals(
|
||||
self._SingleTokenForInput("1"), _MakeLexToken("INT_CONST_DEC", "1"))
|
||||
self.assertEquals(
|
||||
self._SingleTokenForInput("123"), _MakeLexToken("INT_CONST_DEC", "123"))
|
||||
self.assertEquals(
|
||||
self._SingleTokenForInput("10"), _MakeLexToken("INT_CONST_DEC", "10"))
|
||||
|
||||
def testValidTokens(self):
|
||||
"""Tests valid tokens (which aren't tested elsewhere)."""
|
||||
# Keywords tested in |testValidKeywords|.
|
||||
# NAME tested in |testValidIdentifiers|.
|
||||
self.assertEquals(
|
||||
self._SingleTokenForInput("@123"), _MakeLexToken("ORDINAL", "@123"))
|
||||
self.assertEquals(
|
||||
self._SingleTokenForInput("456"), _MakeLexToken("INT_CONST_DEC", "456"))
|
||||
self.assertEquals(
|
||||
self._SingleTokenForInput("0x01aB2eF3"),
|
||||
_MakeLexToken("INT_CONST_HEX", "0x01aB2eF3"))
|
||||
self.assertEquals(
|
||||
self._SingleTokenForInput("123.456"),
|
||||
_MakeLexToken("FLOAT_CONST", "123.456"))
|
||||
self.assertEquals(
|
||||
self._SingleTokenForInput("\"hello\""),
|
||||
_MakeLexToken("STRING_LITERAL", "\"hello\""))
|
||||
self.assertEquals(
|
||||
self._SingleTokenForInput("+"), _MakeLexToken("PLUS", "+"))
|
||||
self.assertEquals(
|
||||
self._SingleTokenForInput("-"), _MakeLexToken("MINUS", "-"))
|
||||
self.assertEquals(
|
||||
self._SingleTokenForInput("?"), _MakeLexToken("QSTN", "?"))
|
||||
self.assertEquals(
|
||||
self._SingleTokenForInput("="), _MakeLexToken("EQUALS", "="))
|
||||
self.assertEquals(
|
||||
self._SingleTokenForInput("=>"), _MakeLexToken("RESPONSE", "=>"))
|
||||
self.assertEquals(
|
||||
self._SingleTokenForInput("("), _MakeLexToken("LPAREN", "("))
|
||||
self.assertEquals(
|
||||
self._SingleTokenForInput(")"), _MakeLexToken("RPAREN", ")"))
|
||||
self.assertEquals(
|
||||
self._SingleTokenForInput("["), _MakeLexToken("LBRACKET", "["))
|
||||
self.assertEquals(
|
||||
self._SingleTokenForInput("]"), _MakeLexToken("RBRACKET", "]"))
|
||||
self.assertEquals(
|
||||
self._SingleTokenForInput("{"), _MakeLexToken("LBRACE", "{"))
|
||||
self.assertEquals(
|
||||
self._SingleTokenForInput("}"), _MakeLexToken("RBRACE", "}"))
|
||||
self.assertEquals(
|
||||
self._SingleTokenForInput("<"), _MakeLexToken("LANGLE", "<"))
|
||||
self.assertEquals(
|
||||
self._SingleTokenForInput(">"), _MakeLexToken("RANGLE", ">"))
|
||||
self.assertEquals(
|
||||
self._SingleTokenForInput(";"), _MakeLexToken("SEMI", ";"))
|
||||
self.assertEquals(
|
||||
self._SingleTokenForInput(","), _MakeLexToken("COMMA", ","))
|
||||
self.assertEquals(self._SingleTokenForInput("."), _MakeLexToken("DOT", "."))
|
||||
|
||||
def _TokensForInput(self, input_string):
|
||||
"""Gets a list of tokens for the given input string."""
|
||||
lexer = self._zygote_lexer.clone()
|
||||
lexer.input(input_string)
|
||||
rv = []
|
||||
while True:
|
||||
tok = lexer.token()
|
||||
if not tok:
|
||||
return rv
|
||||
rv.append(tok)
|
||||
|
||||
def _SingleTokenForInput(self, input_string):
|
||||
"""Gets the single token for the given input string. (Raises an exception if
|
||||
the input string does not result in exactly one token.)"""
|
||||
toks = self._TokensForInput(input_string)
|
||||
assert len(toks) == 1
|
||||
return toks[0]
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
||||
@@ -0,0 +1,510 @@
|
||||
# Copyright 2014 The Chromium Authors
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
"""Generates a syntax tree from a Mojo IDL file."""
|
||||
|
||||
# Breaking parser stanzas is unhelpful so allow longer lines.
|
||||
# pylint: disable=line-too-long
|
||||
|
||||
import os.path
|
||||
import sys
|
||||
|
||||
from mojom import fileutil
|
||||
from mojom.error import Error
|
||||
from mojom.parse import ast
|
||||
from mojom.parse.lexer import Lexer
|
||||
|
||||
fileutil.AddLocalRepoThirdPartyDirToModulePath()
|
||||
from ply import lex
|
||||
from ply import yacc
|
||||
|
||||
_MAX_ORDINAL_VALUE = 0xffffffff
|
||||
_MAX_ARRAY_SIZE = 0xffffffff
|
||||
|
||||
|
||||
class ParseError(Error):
|
||||
"""Class for errors from the parser."""
|
||||
|
||||
def __init__(self, filename, message, lineno=None, snippet=None):
|
||||
Error.__init__(
|
||||
self,
|
||||
filename,
|
||||
message,
|
||||
lineno=lineno,
|
||||
addenda=([snippet] if snippet else None))
|
||||
|
||||
|
||||
# We have methods which look like they could be functions:
|
||||
# pylint: disable=R0201
|
||||
class Parser:
|
||||
def __init__(self, lexer, source, filename):
|
||||
self.tokens = lexer.tokens
|
||||
self.source = source
|
||||
self.filename = filename
|
||||
|
||||
# Names of functions
|
||||
#
|
||||
# In general, we name functions after the left-hand-side of the rule(s) that
|
||||
# they handle. E.g., |p_foo_bar| for a rule |foo_bar : ...|.
|
||||
#
|
||||
# There may be multiple functions handling rules for the same left-hand-side;
|
||||
# then we name the functions |p_foo_bar_N| (for left-hand-side |foo_bar|),
|
||||
# where N is a number (numbered starting from 1). Note that using multiple
|
||||
# functions is actually more efficient than having single functions handle
|
||||
# multiple rules (and, e.g., distinguishing them by examining |len(p)|).
|
||||
#
|
||||
# It's also possible to have a function handling multiple rules with different
|
||||
# left-hand-sides. We do not do this.
|
||||
#
|
||||
# See http://www.dabeaz.com/ply/ply.html#ply_nn25 for more details.
|
||||
|
||||
# TODO(vtl): Get rid of the braces in the module "statement". (Consider
|
||||
# renaming "module" -> "package".) Then we'll be able to have a single rule
|
||||
# for root (by making module "optional").
|
||||
def p_root_1(self, p):
|
||||
"""root : """
|
||||
p[0] = ast.Mojom(None, ast.ImportList(), [])
|
||||
|
||||
def p_root_2(self, p):
|
||||
"""root : root module"""
|
||||
if p[1].module is not None:
|
||||
raise ParseError(
|
||||
self.filename,
|
||||
"Multiple \"module\" statements not allowed:",
|
||||
p[2].lineno,
|
||||
snippet=self._GetSnippet(p[2].lineno))
|
||||
if p[1].import_list.items or p[1].definition_list:
|
||||
raise ParseError(
|
||||
self.filename,
|
||||
"\"module\" statements must precede imports and definitions:",
|
||||
p[2].lineno,
|
||||
snippet=self._GetSnippet(p[2].lineno))
|
||||
p[0] = p[1]
|
||||
p[0].module = p[2]
|
||||
|
||||
def p_root_3(self, p):
|
||||
"""root : root import"""
|
||||
if p[1].definition_list:
|
||||
raise ParseError(
|
||||
self.filename,
|
||||
"\"import\" statements must precede definitions:",
|
||||
p[2].lineno,
|
||||
snippet=self._GetSnippet(p[2].lineno))
|
||||
p[0] = p[1]
|
||||
p[0].import_list.Append(p[2])
|
||||
|
||||
def p_root_4(self, p):
|
||||
"""root : root definition"""
|
||||
p[0] = p[1]
|
||||
p[0].definition_list.append(p[2])
|
||||
|
||||
def p_import(self, p):
|
||||
"""import : attribute_section IMPORT STRING_LITERAL SEMI"""
|
||||
# 'eval' the literal to strip the quotes.
|
||||
# TODO(vtl): This eval is dubious. We should unquote/unescape ourselves.
|
||||
p[0] = ast.Import(
|
||||
p[1], eval(p[3]), filename=self.filename, lineno=p.lineno(2))
|
||||
|
||||
def p_module(self, p):
|
||||
"""module : attribute_section MODULE identifier_wrapped SEMI"""
|
||||
p[0] = ast.Module(p[3], p[1], filename=self.filename, lineno=p.lineno(2))
|
||||
|
||||
def p_definition(self, p):
|
||||
"""definition : struct
|
||||
| union
|
||||
| interface
|
||||
| enum
|
||||
| const
|
||||
| feature"""
|
||||
p[0] = p[1]
|
||||
|
||||
def p_attribute_section_1(self, p):
|
||||
"""attribute_section : """
|
||||
p[0] = None
|
||||
|
||||
def p_attribute_section_2(self, p):
|
||||
"""attribute_section : LBRACKET attribute_list RBRACKET"""
|
||||
p[0] = p[2]
|
||||
|
||||
def p_attribute_list_1(self, p):
|
||||
"""attribute_list : """
|
||||
p[0] = ast.AttributeList()
|
||||
|
||||
def p_attribute_list_2(self, p):
|
||||
"""attribute_list : nonempty_attribute_list"""
|
||||
p[0] = p[1]
|
||||
|
||||
def p_nonempty_attribute_list_1(self, p):
|
||||
"""nonempty_attribute_list : attribute"""
|
||||
p[0] = ast.AttributeList(p[1])
|
||||
|
||||
def p_nonempty_attribute_list_2(self, p):
|
||||
"""nonempty_attribute_list : nonempty_attribute_list COMMA attribute"""
|
||||
p[0] = p[1]
|
||||
p[0].Append(p[3])
|
||||
|
||||
def p_attribute_1(self, p):
|
||||
"""attribute : name_wrapped EQUALS identifier_wrapped"""
|
||||
p[0] = ast.Attribute(p[1],
|
||||
p[3][1],
|
||||
filename=self.filename,
|
||||
lineno=p.lineno(1))
|
||||
|
||||
def p_attribute_2(self, p):
|
||||
"""attribute : name_wrapped EQUALS evaled_literal
|
||||
| name_wrapped EQUALS name_wrapped"""
|
||||
p[0] = ast.Attribute(p[1], p[3], filename=self.filename, lineno=p.lineno(1))
|
||||
|
||||
def p_attribute_3(self, p):
|
||||
"""attribute : name_wrapped"""
|
||||
p[0] = ast.Attribute(p[1], True, filename=self.filename, lineno=p.lineno(1))
|
||||
|
||||
def p_evaled_literal(self, p):
|
||||
"""evaled_literal : literal"""
|
||||
# 'eval' the literal to strip the quotes. Handle keywords "true" and "false"
|
||||
# specially since they cannot directly be evaluated to python boolean
|
||||
# values.
|
||||
if p[1] == "true":
|
||||
p[0] = True
|
||||
elif p[1] == "false":
|
||||
p[0] = False
|
||||
else:
|
||||
p[0] = eval(p[1])
|
||||
|
||||
def p_struct_1(self, p):
|
||||
"""struct : attribute_section STRUCT name_wrapped LBRACE struct_body RBRACE SEMI"""
|
||||
p[0] = ast.Struct(p[3], p[1], p[5])
|
||||
|
||||
def p_struct_2(self, p):
|
||||
"""struct : attribute_section STRUCT name_wrapped SEMI"""
|
||||
p[0] = ast.Struct(p[3], p[1], None)
|
||||
|
||||
def p_struct_body_1(self, p):
|
||||
"""struct_body : """
|
||||
p[0] = ast.StructBody()
|
||||
|
||||
def p_struct_body_2(self, p):
|
||||
"""struct_body : struct_body const
|
||||
| struct_body enum
|
||||
| struct_body struct_field"""
|
||||
p[0] = p[1]
|
||||
p[0].Append(p[2])
|
||||
|
||||
def p_struct_field(self, p):
|
||||
"""struct_field : attribute_section typename name_wrapped ordinal default SEMI"""
|
||||
p[0] = ast.StructField(p[3], p[1], p[4], p[2], p[5])
|
||||
|
||||
def p_feature(self, p):
|
||||
"""feature : attribute_section FEATURE NAME LBRACE feature_body RBRACE SEMI"""
|
||||
p[0] = ast.Feature(p[3], p[1], p[5])
|
||||
|
||||
def p_feature_body_1(self, p):
|
||||
"""feature_body : """
|
||||
p[0] = ast.FeatureBody()
|
||||
|
||||
def p_feature_body_2(self, p):
|
||||
"""feature_body : feature_body const"""
|
||||
p[0] = p[1]
|
||||
p[0].Append(p[2])
|
||||
|
||||
def p_union(self, p):
|
||||
"""union : attribute_section UNION name_wrapped LBRACE union_body RBRACE SEMI"""
|
||||
p[0] = ast.Union(p[3], p[1], p[5])
|
||||
|
||||
def p_union_body_1(self, p):
|
||||
"""union_body : """
|
||||
p[0] = ast.UnionBody()
|
||||
|
||||
def p_union_body_2(self, p):
|
||||
"""union_body : union_body union_field"""
|
||||
p[0] = p[1]
|
||||
p[1].Append(p[2])
|
||||
|
||||
def p_union_field(self, p):
|
||||
"""union_field : attribute_section typename name_wrapped ordinal SEMI"""
|
||||
p[0] = ast.UnionField(p[3], p[1], p[4], p[2])
|
||||
|
||||
def p_default_1(self, p):
|
||||
"""default : """
|
||||
p[0] = None
|
||||
|
||||
def p_default_2(self, p):
|
||||
"""default : EQUALS constant"""
|
||||
p[0] = p[2]
|
||||
|
||||
def p_interface(self, p):
|
||||
"""interface : attribute_section INTERFACE name_wrapped LBRACE interface_body RBRACE SEMI"""
|
||||
p[0] = ast.Interface(p[3], p[1], p[5])
|
||||
|
||||
def p_interface_body_1(self, p):
|
||||
"""interface_body : """
|
||||
p[0] = ast.InterfaceBody()
|
||||
|
||||
def p_interface_body_2(self, p):
|
||||
"""interface_body : interface_body const
|
||||
| interface_body enum
|
||||
| interface_body method"""
|
||||
p[0] = p[1]
|
||||
p[0].Append(p[2])
|
||||
|
||||
def p_response_1(self, p):
|
||||
"""response : """
|
||||
p[0] = None
|
||||
|
||||
def p_response_2(self, p):
|
||||
"""response : RESPONSE LPAREN parameter_list RPAREN"""
|
||||
p[0] = p[3]
|
||||
|
||||
def p_method(self, p):
|
||||
"""method : attribute_section name_wrapped ordinal LPAREN parameter_list RPAREN response SEMI"""
|
||||
p[0] = ast.Method(p[2], p[1], p[3], p[5], p[7])
|
||||
|
||||
def p_parameter_list_1(self, p):
|
||||
"""parameter_list : """
|
||||
p[0] = ast.ParameterList()
|
||||
|
||||
def p_parameter_list_2(self, p):
|
||||
"""parameter_list : nonempty_parameter_list"""
|
||||
p[0] = p[1]
|
||||
|
||||
def p_nonempty_parameter_list_1(self, p):
|
||||
"""nonempty_parameter_list : parameter"""
|
||||
p[0] = ast.ParameterList(p[1])
|
||||
|
||||
def p_nonempty_parameter_list_2(self, p):
|
||||
"""nonempty_parameter_list : nonempty_parameter_list COMMA parameter"""
|
||||
p[0] = p[1]
|
||||
p[0].Append(p[3])
|
||||
|
||||
def p_parameter(self, p):
|
||||
"""parameter : attribute_section typename name_wrapped ordinal"""
|
||||
p[0] = ast.Parameter(
|
||||
p[3], p[1], p[4], p[2], filename=self.filename, lineno=p.lineno(3))
|
||||
|
||||
def p_typename(self, p):
|
||||
"""typename : nonnullable_typename QSTN
|
||||
| nonnullable_typename"""
|
||||
if len(p) == 2:
|
||||
p[0] = p[1]
|
||||
else:
|
||||
p[0] = p[1] + "?"
|
||||
|
||||
def p_nonnullable_typename(self, p):
|
||||
"""nonnullable_typename : basictypename
|
||||
| array
|
||||
| fixed_array
|
||||
| associative_array"""
|
||||
p[0] = p[1]
|
||||
|
||||
def p_basictypename(self, p):
|
||||
"""basictypename : remotetype
|
||||
| receivertype
|
||||
| associatedremotetype
|
||||
| associatedreceivertype
|
||||
| identifier
|
||||
| ASSOCIATED identifier
|
||||
| handletype"""
|
||||
if len(p) == 2:
|
||||
p[0] = p[1]
|
||||
else:
|
||||
p[0] = "asso<" + p[2] + ">"
|
||||
|
||||
def p_remotetype(self, p):
|
||||
"""remotetype : PENDING_REMOTE LANGLE identifier RANGLE"""
|
||||
p[0] = "rmt<%s>" % p[3]
|
||||
|
||||
def p_receivertype(self, p):
|
||||
"""receivertype : PENDING_RECEIVER LANGLE identifier RANGLE"""
|
||||
p[0] = "rcv<%s>" % p[3]
|
||||
|
||||
def p_associatedremotetype(self, p):
|
||||
"""associatedremotetype : PENDING_ASSOCIATED_REMOTE LANGLE identifier RANGLE"""
|
||||
p[0] = "rma<%s>" % p[3]
|
||||
|
||||
def p_associatedreceivertype(self, p):
|
||||
"""associatedreceivertype : PENDING_ASSOCIATED_RECEIVER LANGLE identifier RANGLE"""
|
||||
p[0] = "rca<%s>" % p[3]
|
||||
|
||||
def p_handletype(self, p):
|
||||
"""handletype : HANDLE
|
||||
| HANDLE LANGLE name_wrapped RANGLE"""
|
||||
if len(p) == 2:
|
||||
p[0] = p[1]
|
||||
else:
|
||||
if p[3] not in ('data_pipe_consumer', 'data_pipe_producer',
|
||||
'message_pipe', 'shared_buffer', 'platform'):
|
||||
# Note: We don't enable tracking of line numbers for everything, so we
|
||||
# can't use |p.lineno(3)|.
|
||||
raise ParseError(
|
||||
self.filename,
|
||||
"Invalid handle type %r:" % p[3],
|
||||
lineno=p.lineno(1),
|
||||
snippet=self._GetSnippet(p.lineno(1)))
|
||||
p[0] = "handle<" + p[3] + ">"
|
||||
|
||||
def p_array(self, p):
|
||||
"""array : ARRAY LANGLE typename RANGLE"""
|
||||
p[0] = p[3] + "[]"
|
||||
|
||||
def p_fixed_array(self, p):
|
||||
"""fixed_array : ARRAY LANGLE typename COMMA INT_CONST_DEC RANGLE"""
|
||||
value = int(p[5])
|
||||
if value == 0 or value > _MAX_ARRAY_SIZE:
|
||||
raise ParseError(
|
||||
self.filename,
|
||||
"Fixed array size %d invalid:" % value,
|
||||
lineno=p.lineno(5),
|
||||
snippet=self._GetSnippet(p.lineno(5)))
|
||||
p[0] = p[3] + "[" + p[5] + "]"
|
||||
|
||||
def p_associative_array(self, p):
|
||||
"""associative_array : MAP LANGLE identifier COMMA typename RANGLE"""
|
||||
p[0] = p[5] + "{" + p[3] + "}"
|
||||
|
||||
def p_ordinal_1(self, p):
|
||||
"""ordinal : """
|
||||
p[0] = None
|
||||
|
||||
def p_ordinal_2(self, p):
|
||||
"""ordinal : ORDINAL"""
|
||||
value = int(p[1][1:])
|
||||
if value > _MAX_ORDINAL_VALUE:
|
||||
raise ParseError(
|
||||
self.filename,
|
||||
"Ordinal value %d too large:" % value,
|
||||
lineno=p.lineno(1),
|
||||
snippet=self._GetSnippet(p.lineno(1)))
|
||||
p[0] = ast.Ordinal(value, filename=self.filename, lineno=p.lineno(1))
|
||||
|
||||
def p_enum_1(self, p):
|
||||
"""enum : attribute_section ENUM name_wrapped LBRACE enum_value_list RBRACE SEMI
|
||||
| attribute_section ENUM name_wrapped LBRACE \
|
||||
nonempty_enum_value_list COMMA RBRACE SEMI"""
|
||||
p[0] = ast.Enum(
|
||||
p[3], p[1], p[5], filename=self.filename, lineno=p.lineno(2))
|
||||
|
||||
def p_enum_2(self, p):
|
||||
"""enum : attribute_section ENUM name_wrapped SEMI"""
|
||||
p[0] = ast.Enum(
|
||||
p[3], p[1], None, filename=self.filename, lineno=p.lineno(2))
|
||||
|
||||
def p_enum_value_list_1(self, p):
|
||||
"""enum_value_list : """
|
||||
p[0] = ast.EnumValueList()
|
||||
|
||||
def p_enum_value_list_2(self, p):
|
||||
"""enum_value_list : nonempty_enum_value_list"""
|
||||
p[0] = p[1]
|
||||
|
||||
def p_nonempty_enum_value_list_1(self, p):
|
||||
"""nonempty_enum_value_list : enum_value"""
|
||||
p[0] = ast.EnumValueList(p[1])
|
||||
|
||||
def p_nonempty_enum_value_list_2(self, p):
|
||||
"""nonempty_enum_value_list : nonempty_enum_value_list COMMA enum_value"""
|
||||
p[0] = p[1]
|
||||
p[0].Append(p[3])
|
||||
|
||||
def p_enum_value(self, p):
|
||||
"""enum_value : attribute_section name_wrapped
|
||||
| attribute_section name_wrapped EQUALS int
|
||||
| attribute_section name_wrapped EQUALS identifier_wrapped"""
|
||||
p[0] = ast.EnumValue(
|
||||
p[2],
|
||||
p[1],
|
||||
p[4] if len(p) == 5 else None,
|
||||
filename=self.filename,
|
||||
lineno=p.lineno(2))
|
||||
|
||||
def p_const(self, p):
|
||||
"""const : attribute_section CONST typename name_wrapped EQUALS constant SEMI"""
|
||||
p[0] = ast.Const(p[4], p[1], p[3], p[6])
|
||||
|
||||
def p_constant(self, p):
|
||||
"""constant : literal
|
||||
| identifier_wrapped"""
|
||||
p[0] = p[1]
|
||||
|
||||
def p_identifier_wrapped(self, p):
|
||||
"""identifier_wrapped : identifier"""
|
||||
p[0] = ('IDENTIFIER', p[1])
|
||||
|
||||
# TODO(vtl): Make this produce a "wrapped" identifier (probably as an
|
||||
# |ast.Identifier|, to be added) and get rid of identifier_wrapped.
|
||||
def p_identifier(self, p):
|
||||
"""identifier : name_wrapped
|
||||
| name_wrapped DOT identifier"""
|
||||
p[0] = ''.join(p[1:])
|
||||
|
||||
# Allow 'feature' to be a name literal not just a keyword.
|
||||
def p_name_wrapped(self, p):
|
||||
"""name_wrapped : NAME
|
||||
| FEATURE"""
|
||||
p[0] = p[1]
|
||||
|
||||
def p_literal(self, p):
|
||||
"""literal : int
|
||||
| float
|
||||
| TRUE
|
||||
| FALSE
|
||||
| DEFAULT
|
||||
| STRING_LITERAL"""
|
||||
p[0] = p[1]
|
||||
|
||||
def p_int(self, p):
|
||||
"""int : int_const
|
||||
| PLUS int_const
|
||||
| MINUS int_const"""
|
||||
p[0] = ''.join(p[1:])
|
||||
|
||||
def p_int_const(self, p):
|
||||
"""int_const : INT_CONST_DEC
|
||||
| INT_CONST_HEX"""
|
||||
p[0] = p[1]
|
||||
|
||||
def p_float(self, p):
|
||||
"""float : FLOAT_CONST
|
||||
| PLUS FLOAT_CONST
|
||||
| MINUS FLOAT_CONST"""
|
||||
p[0] = ''.join(p[1:])
|
||||
|
||||
def p_error(self, e):
|
||||
if e is None:
|
||||
# Unexpected EOF.
|
||||
# TODO(vtl): Can we figure out what's missing?
|
||||
raise ParseError(self.filename, "Unexpected end of file")
|
||||
|
||||
if e.value == 'feature':
|
||||
raise ParseError(self.filename,
|
||||
"`feature` is reserved for a future mojom keyword",
|
||||
lineno=e.lineno,
|
||||
snippet=self._GetSnippet(e.lineno))
|
||||
|
||||
raise ParseError(
|
||||
self.filename,
|
||||
"Unexpected %r:" % e.value,
|
||||
lineno=e.lineno,
|
||||
snippet=self._GetSnippet(e.lineno))
|
||||
|
||||
def _GetSnippet(self, lineno):
|
||||
return self.source.split('\n')[lineno - 1]
|
||||
|
||||
|
||||
def Parse(source, filename):
|
||||
"""Parse source file to AST.
|
||||
|
||||
Args:
|
||||
source: The source text as a str (Python 2 or 3) or unicode (Python 2).
|
||||
filename: The filename that |source| originates from.
|
||||
|
||||
Returns:
|
||||
The AST as a mojom.parse.ast.Mojom object.
|
||||
"""
|
||||
lexer = Lexer(filename)
|
||||
parser = Parser(lexer, source, filename)
|
||||
|
||||
lex.lex(object=lexer)
|
||||
yacc.yacc(module=parser, debug=0, write_tables=0)
|
||||
|
||||
tree = yacc.parse(source)
|
||||
return tree
|
||||
File diff suppressed because it is too large
Load Diff
Reference in New Issue
Block a user