aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorJuergen Bocklage-Ryannel <juergen.bocklage-ryannel@pelagicore.com>2017-06-27 19:30:16 +0200
committerJuergen Bocklage-Ryannel <juergen.bocklage-ryannel@pelagicore.com>2017-06-27 19:30:16 +0200
commite033e0a8c6a90acad9069132c09641d52888aa99 (patch)
tree037cda7c2a13f3f979c56d0f7ce33ec3eaef48ea
parent2a723b2085c671f973c827893c8fd0bb43acbe24 (diff)
parentb7ea51d51ee54b3c69fb3d08dfdc023a9e77400a (diff)
Merge branch 'release/1.2'1.2
-rw-r--r--INSTALL.md2
-rwxr-xr-xcli.py5
-rw-r--r--docs/builtin.rst8
-rw-r--r--docs/conf.py20
-rw-r--r--docs/extending.rst24
-rw-r--r--docs/grammar.rst31
-rw-r--r--docs/index.rst1
-rw-r--r--docs/json.rst128
-rw-r--r--examples/interfaces/echo.qface27
-rw-r--r--qface/__about__.py3
-rwxr-xr-xqface/builtin/qtcpp/qtcpp.py1
-rw-r--r--qface/builtin/qtcpp/templates/CMakeLists.txt42
-rw-r--r--qface/builtin/qtcpp/templates/abstractinterface.h3
-rw-r--r--qface/builtin/qtcpp/templates/interface.cpp20
-rw-r--r--qface/builtin/qtcpp/templates/interface.h6
-rw-r--r--qface/builtin/qtcpp/templates/module.cpp12
-rw-r--r--qface/builtin/qtcpp/templates/struct.cpp18
-rwxr-xr-xqface/builtin/qtqml/qtqml.py6
-rw-r--r--qface/builtin/qtqml/templates/AbstractInterface.qml30
-rw-r--r--qface/builtin/qtqml/templates/Interface.qml14
-rw-r--r--qface/builtin/qtqml/templates/Module.qml24
-rw-r--r--qface/builtin/qtqml/templates/copyright.h23
-rw-r--r--qface/builtin/qtqml/templates/module.js10
-rw-r--r--qface/builtin/qtqml/templates/public_qmldir2
-rw-r--r--qface/builtin/schema/log.yaml18
-rwxr-xr-xqface/builtin/schema/schema.py55
-rw-r--r--qface/builtin/schema/templates/module.json1
-rw-r--r--qface/filters.py19
-rw-r--r--qface/generator.py18
-rw-r--r--qface/helper/doc.py4
-rw-r--r--qface/helper/qtcpp.py62
-rw-r--r--qface/helper/qtqml.py4
-rw-r--r--qface/idl/domain.py113
-rw-r--r--qface/idl/listener.py11
-rw-r--r--qface/idl/parser/T.g415
-rw-r--r--qface/idl/parser/T.tokens66
-rw-r--r--qface/idl/parser/TLexer.py250
-rw-r--r--qface/idl/parser/TLexer.tokens66
-rw-r--r--qface/idl/parser/TListener.py18
-rw-r--r--qface/idl/parser/TParser.py987
-rw-r--r--qface/idl/parser/TVisitor.py10
-rw-r--r--qface/idl/profile.py36
-rw-r--r--qface/watch.py2
-rw-r--r--requirements.txt3
-rw-r--r--setup.py1
-rw-r--r--tests/in/com.pelagicore.ivi.tuner.qface17
-rw-r--r--tests/test_generator.py8
-rw-r--r--tests/test_json.py54
-rw-r--r--tests/test_parser.py28
-rw-r--r--tests/test_qtcpp_helper.py12
50 files changed, 1643 insertions, 695 deletions
diff --git a/INSTALL.md b/INSTALL.md
index 4863afc..5a48911 100644
--- a/INSTALL.md
+++ b/INSTALL.md
@@ -22,7 +22,7 @@ Installs qface as an "editable" package. Means updates on the local git repo are
To install the python dependencies use
cd qface
- pip3 install -r requirements
+ pip3 install -r requirements.txt
pip3 install -e .
For updating the grammar you also need antlr4 (see http://www.antlr.org).
diff --git a/cli.py b/cli.py
index c04cbac..09863a2 100755
--- a/cli.py
+++ b/cli.py
@@ -167,12 +167,13 @@ def uninstall():
@cli.command()
def upload():
+ Path('build').rmtree_p()
dist = Path('dist')
- if dist.exists():
- dist.rmdir_p()
+ dist.rmtree_p()
dist.makedirs_p()
sh('python3 setup.py bdist_wheel')
sh('twine upload dist/*')
+ Path('build').rmtree_p()
@cli.command()
diff --git a/docs/builtin.rst b/docs/builtin.rst
index 94bb0b6..b39746c 100644
--- a/docs/builtin.rst
+++ b/docs/builtin.rst
@@ -1,6 +1,14 @@
Builtin Generators
==================
+QFace contains several built in code generators. Their purpose is merely to showcase how to write a code generator
+with QFace. They are working and complete examples of a general purpose generators.
+
+* The QtCPP generator generates a Qt C++ plugin with a QML API ready to be used in your project.
+* The QtQml generator generates a QML only API which ready to be used.
+
+From the QML user interface perspective both provide the same API and are interchangeable.
+
.. toctree::
:maxdepth: 1
diff --git a/docs/conf.py b/docs/conf.py
index e7aa601..53e111b 100644
--- a/docs/conf.py
+++ b/docs/conf.py
@@ -20,6 +20,16 @@
import os
import sys
+__title__ = ''
+__version__ = ''
+__summary__ = ''
+__uri__ = ''
+__author__ = ''
+__author_email__ = ''
+__copyright__ = ''
+exec(open('../qface/__about__.py').read())
+
+
here = os.path.dirname(__file__)
sys.path.insert(0, os.path.join(here, '..'))
@@ -60,18 +70,18 @@ source_suffix = '.rst'
master_doc = 'index'
# General information about the project.
-project = 'QFace'
-copyright = '2016, JRyannel'
-author = 'JRyannel'
+project = __title__
+copyright = __copyright__
+author = __author__
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
-version = '1.0.0'
+version = __version__
# The full version, including alpha/beta/rc tags.
-release = '1.0.0a5'
+release = __version__
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
diff --git a/docs/extending.rst b/docs/extending.rst
index 74aa761..df751c2 100644
--- a/docs/extending.rst
+++ b/docs/extending.rst
@@ -1,10 +1,12 @@
-===============
+***************
Extending QFace
-===============
+***************
-QFace is easy to use and easy to extend. Your generator is just a small python script using the qface library.
+QFace is easy to use and easy to extend. Your generator is just a small python
+script using the qface library.
-The script iterates over the domain model and writes files using a template language.
+The script iterates over the domain model and writes files using a template
+language.
See template engine documentation:
@@ -20,15 +22,20 @@ See template engine documentation:
# parse the interface files
system = FileSystem.parse(input)
# setup the generator
- generator = Generator(searchpath='templates')
+ generator = Generator(search_path='templates')
# create a context object
ctx = {'output': output, 'system': system}
# apply the context on the template and write the output to file
generator.write('{{output}}/modules.csv', 'modules.csv', ctx)
-This script reads the input directory returns a system object from the domain model. This is used as the root object for the code generation inside the template.
+This script reads the input directory returns a system object from the domain
+model. This is used as the root object for the code generation inside the
+template. The context object is applied to the file path as also on the named
+template document. The output of the template is then written to the given file
+path.
-Below is a simple template which geenrates a CSV document of all interfaces, structs and enums.
+Below is a simple template which generates a CSV document of all interfaces,
+structs and enums.
.. code-block:: jinja
@@ -44,4 +51,5 @@ Below is a simple template which geenrates a CSV document of all interfaces, str
{% endfor -%}
{% endfor %}
-The template iterates over the domain objects and generates text which is written into a file. The file name is also adjustable using the same template language.
+The template code iterates over the domain objects and generates text using a
+mixture of output blocks ``{{}}`` and control blocks ``{%%}``.
diff --git a/docs/grammar.rst b/docs/grammar.rst
index e76912d..ac6bf02 100644
--- a/docs/grammar.rst
+++ b/docs/grammar.rst
@@ -135,3 +135,34 @@ Below is an example of a QFace file.
common.TimeStamp modified;
}
+
+Annotations
+===========
+
+Annotation allow the writer to add meta data to an interface document. It uses the `@` notation followed by valid YAML one line content.
+
+.. code-block:: js
+
+ @singleton: true
+ @config: { port: 1234 }
+ interface Echo {
+ }
+
+More information on annotations can be found in the annotations chapter.
+
+Comments
+========
+
+Comments use the JavaDoc convention of using an `@` sign as prefix with the keyword followed by the required paramters.
+
+.. code-block::java
+
+ /**
+ * @brief The last echo message
+ */
+
+Currently only brief, description, see and deprecated are supported doc tags.
+
+The QtCPP builtin generator generates valid Qt documentation out of these comments.
+
+
diff --git a/docs/index.rst b/docs/index.rst
index c83de6d..6e7ae75 100644
--- a/docs/index.rst
+++ b/docs/index.rst
@@ -13,6 +13,7 @@ QFace is a flexible Qt API generator. It uses a common IDL format (called QFace
grammar
annotations
yaml
+ json
domain
extending
api
diff --git a/docs/json.rst b/docs/json.rst
new file mode 100644
index 0000000..ca7d9e5
--- /dev/null
+++ b/docs/json.rst
@@ -0,0 +1,128 @@
+****************
+JSON Meta Export
+****************
+
+QFace allows you to easily export the domain model as a JSON document. This enables you to parse the domain information to be
+used with other tooling.
+
+Inside your generator you need to register the filter first
+
+.. code-block:: python
+
+ from qface.filters import jsonify
+
+
+ generator = Generator(search_path=search_path)
+ generator.register_filter('jsonify', jsonify)
+
+Then inside the template you can transform any symbol into a JSON string using the ``jsonify`` filter.
+
+.. code-block:: jinja
+
+ {{module|jsonify}}
+
+Depending on your need you might want to create a JSON document from the whole system or from each interface or you are just
+interested on a JSON representation of an enumeration. The portion of the domain model exported to JSON really depends on your custom code generator and on which doamin element you apply the ``jsonify`` filter.
+
+JSON Format
+===========
+
+Taking the example QFace document
+
+.. code-block:: thrift
+
+ module org.example 1.0;
+
+ interface Echo {
+ readonly string currentMessage;
+ void echo(Message message);
+ }
+
+ struct Message {
+ string text;
+ }
+
+ enum Status {
+ Null,
+ Loading,
+ Ready,
+ Error
+ }
+
+
+The following JSON output is generated
+
+.. code-block:: json
+
+ {
+ "name": "org.example",
+ "version": "1.0",
+ "interfaces": [
+ {
+ "name": "Echo",
+ "properties": [
+ {
+ "name": "currentMessage",
+ "type": {
+ "name": "string",
+ "primitive": true
+ },
+ "readonly": true
+ }
+ ],
+ "operations": [
+ {
+ "name": "echo",
+ "parameters": [
+ {
+ "name": "message",
+ "type": {
+ "name": "Message",
+ "complex": true
+ }
+ }
+ ]
+ }
+ ],
+ "signals": []
+ }
+ ],
+ "structs": [
+ {
+ "name": "Message",
+ "fields": [
+ {
+ "name": "text",
+ "type": {
+ "name": "string",
+ "primitive": true
+ }
+ }
+ ]
+ }
+ ],
+ "enums": [
+ {
+ "name": "Status",
+ "enum": true,
+ "members": [
+ {
+ "name": "Null",
+ "value": 0
+ },
+ {
+ "name": "Loading",
+ "value": 1
+ },
+ {
+ "name": "Ready",
+ "value": 2
+ },
+ {
+ "name": "Error",
+ "value": 3
+ }
+ ]
+ }
+ ]
+ }
diff --git a/examples/interfaces/echo.qface b/examples/interfaces/echo.qface
index ed58685..95ed42f 100644
--- a/examples/interfaces/echo.qface
+++ b/examples/interfaces/echo.qface
@@ -1,19 +1,38 @@
module org.example 1.0;
/**
- * Provide a simple echo service.
+ * @brief Provide a simple echo service.
*/
interface Echo {
/**
- * The last echo message.
+ * @brief The last echo message.
*/
- string currentMessage;
+ readonly string currentMessage;
/**
- * Returns the passed in message
+ * @brief Returns the passed in message
*/
void echo(Message message);
}
+/**
+ * @brief A message structure to send and receive
+ */
struct Message {
+ /**
+ * @brief the text to send
+ */
string text;
}
+
+/**
+ * @brief Status
+ */
+enum Status {
+ /**
+ * @brief Nothing loaded yet
+ */
+ Null,
+ Loading,
+ Ready,
+ Error
+}
diff --git a/qface/__about__.py b/qface/__about__.py
index f3a9be8..de7d0cf 100644
--- a/qface/__about__.py
+++ b/qface/__about__.py
@@ -9,6 +9,7 @@ except NameError:
__title__ = "qface"
__summary__ = "A generator framework based on a common modern IDL"
__uri__ = "https://pelagicore.github.io/qface/"
-__version__ = "1.1"
+__version__ = "1.2"
__author__ = "JRyannel"
__author_email__ = "qface-generator@googlegroups.com"
+__copyright__ = "2017 Pelagicore"
diff --git a/qface/builtin/qtcpp/qtcpp.py b/qface/builtin/qtcpp/qtcpp.py
index 283b8f6..3a4af2e 100755
--- a/qface/builtin/qtcpp/qtcpp.py
+++ b/qface/builtin/qtcpp/qtcpp.py
@@ -34,6 +34,7 @@ def run(src, dst):
ctx.update({'module': module})
dst = generator.apply('{{dst}}/{{module|lower|replace(".", "-")}}', ctx)
generator.destination = dst
+ generator.write('CMakeLists.txt', 'CMakeLists.txt', ctx)
generator.write('qmldir', 'qmldir', ctx, preserve=True)
generator.write('plugin.cpp', 'plugin.cpp', ctx, preserve=True)
generator.write('plugin.h', 'plugin.h', ctx, preserve=True)
diff --git a/qface/builtin/qtcpp/templates/CMakeLists.txt b/qface/builtin/qtcpp/templates/CMakeLists.txt
new file mode 100644
index 0000000..bb7fdb9
--- /dev/null
+++ b/qface/builtin/qtcpp/templates/CMakeLists.txt
@@ -0,0 +1,42 @@
+{% set module_name = module.name.lower().replace(".", "_") %}
+{% set module_path = module.name_parts|join('/') %}
+
+project({{module_name}})
+
+cmake_minimum_required(VERSION 3.1)
+
+set(CMAKE_CXX_STANDARD 11)
+set(CMAKE_AUTOMOC ON)
+
+set(INSTALL_PATH "${CMAKE_BINARY_DIR}/imports" CACHE STRING "Path where the plugins are deployed")
+
+find_package(Qt5Core REQUIRED)
+find_package(Qt5Qml REQUIRED)
+
+set (SOURCES
+ generated/qml{{module.module_name|lower}}module.cpp
+{% for interface in module.interfaces %}
+ generated/qmlabstract{{interface|lower}}.cpp
+{% endfor %}
+{% for struct in module.structs %}
+ generated/qml{{struct|lower}}.cpp
+ generated/qml{{struct|lower}}model.cpp
+{% endfor %}
+ generated/qmlvariantmodel.cpp
+{% for interface in module.interfaces %}
+ qml{{interface|lower}}.cpp
+{% endfor %}
+ plugin.cpp
+)
+
+add_library({{module_name}}_plugin SHARED ${SOURCES})
+
+set(OUTPUT_PATH ${INSTALL_PATH}/{{module_path}})
+
+set_target_properties({{module_name}}_plugin PROPERTIES
+ LIBRARY_OUTPUT_DIRECTORY ${OUTPUT_PATH}
+ RUNTIME_OUTPUT_DIRECTORY ${OUTPUT_PATH}
+)
+
+target_link_libraries({{module_name}}_plugin PUBLIC Qt5::Core Qt5::Qml)
+configure_file(${CMAKE_SOURCE_DIR}/qmldir ${OUTPUT_PATH}/qmldir COPYONLY)
diff --git a/qface/builtin/qtcpp/templates/abstractinterface.h b/qface/builtin/qtcpp/templates/abstractinterface.h
index 5335632..17be77a 100644
--- a/qface/builtin/qtcpp/templates/abstractinterface.h
+++ b/qface/builtin/qtcpp/templates/abstractinterface.h
@@ -15,8 +15,7 @@ class {{class}} : public QObject
{
Q_OBJECT
{% for property in interface.properties %}
- Q_PROPERTY({{property|returnType}} {{property}} READ {{property}} {% if not property.is_readonly %}
-WRITE set{{property|upperfirst}} {% endif %}NOTIFY {{property}}Changed)
+ Q_PROPERTY({{property|returnType}} {{property}} READ {{property}} {% if not property.readonly %}WRITE set{{property|upperfirst}} {% endif %}{% if not property.const %}NOTIFY {{property}}Changed{% endif %})
{% endfor %}
public:
diff --git a/qface/builtin/qtcpp/templates/interface.cpp b/qface/builtin/qtcpp/templates/interface.cpp
index cc2264a..7aab4ae 100644
--- a/qface/builtin/qtcpp/templates/interface.cpp
+++ b/qface/builtin/qtcpp/templates/interface.cpp
@@ -1,20 +1,36 @@
{# Copyright (c) Pelagicore AB 2016 #}
{% set class = 'Qml{0}'.format(interface) %}
/*
- * This is a preserved file and can be edited.
- * All changes will not be override.
+ * This is a preserved file.
+ * Changes will not be overriden by the generator.
+ * To reset the file you need to delete it first.
*/
#include "{{class|lower}}.h"
#include <QtQml>
+
+/*!
+ \inqmlmodule {{module}} 1.0
+ */
+
QObject* {{class|lower}}_singletontype_provider(QQmlEngine*, QJSEngine*)
{
return new {{class}}();
}
+/*!
+ \qmltype {{interface}}
+ \inqmlmodule {{module}}
+{% with doc = interface.comment|parse_doc %}
+ \brief {{doc.brief}}
+
+ {{doc.description}}
+{% endwith %}
+*/
+
{{interface.comment}}
{{class}}::{{class}}(QObject *parent)
: QmlAbstract{{interface}}(parent)
diff --git a/qface/builtin/qtcpp/templates/interface.h b/qface/builtin/qtcpp/templates/interface.h
index 6ada967..c1722f2 100644
--- a/qface/builtin/qtcpp/templates/interface.h
+++ b/qface/builtin/qtcpp/templates/interface.h
@@ -1,11 +1,11 @@
{# Copyright (c) Pelagicore AB 2016 #}
{% set class = 'Qml{0}'.format(interface) %}
/*
- * This is a preserved file and can be edited.
- * All changes will not be override.
+ * This is a preserved file.
+ * Changes will not be overriden by the generator.
+ * To reset the file you need to delete it first.
*/
-
#pragma once
#include <QtCore>
diff --git a/qface/builtin/qtcpp/templates/module.cpp b/qface/builtin/qtcpp/templates/module.cpp
index 1289471..af2d2f1 100644
--- a/qface/builtin/qtcpp/templates/module.cpp
+++ b/qface/builtin/qtcpp/templates/module.cpp
@@ -68,3 +68,15 @@ void {{class}}::registerQmlTypes(const QString& uri, int majorVersion, int minor
{% endfor %}
qmlRegisterSingletonType<{{class}}>(uri.toLatin1(), majorVersion, minorVersion, "{{module.module_name}}Module", {{class|lower}}_singletontype_provider);
}
+
+
+{% for enum in module.enums %}
+/**
+ * \qmlproperty enumeration {{module.module_name}}Module::{{enum}}
+ * \list
+ {% for member in enum.members %}
+ * \li {{member}}
+ {% endfor %}
+ * \endlist
+ */
+{% endfor %} \ No newline at end of file
diff --git a/qface/builtin/qtcpp/templates/struct.cpp b/qface/builtin/qtcpp/templates/struct.cpp
index 9d3da67..86f358c 100644
--- a/qface/builtin/qtcpp/templates/struct.cpp
+++ b/qface/builtin/qtcpp/templates/struct.cpp
@@ -9,9 +9,6 @@
#include "{{class|lower}}.h"
-
-// Shared Data
-
class {{class}}Data : public QSharedData
{
public:
@@ -44,6 +41,11 @@ public:
{% with doc = struct.comment|parse_doc %}
\brief {{doc.brief}}
+ \note This is a none creatable data object
+
+ Use the module factory method \l {{module.module_name}}Module::create{{struct}} to create
+ an instance.
+
{{doc.description}}
{% endwith %}
*/
@@ -63,6 +65,16 @@ public:
}
{% for field in struct.fields %}
+/*!
+ \qmlproperty {{field.type}} {{struct}}::{{field}} (field)
+{% with doc = field.comment|parse_doc %}
+ \brief {{doc.brief}}
+
+ \note A none notifiable property
+
+ {{doc.description}}
+{% endwith %}
+*/
void {{class}}::set{{field|upperfirst}}({{field|parameterType}})
{
d->{{field}} = {{field}};
diff --git a/qface/builtin/qtqml/qtqml.py b/qface/builtin/qtqml/qtqml.py
index 1d00d74..57e3996 100755
--- a/qface/builtin/qtqml/qtqml.py
+++ b/qface/builtin/qtqml/qtqml.py
@@ -22,7 +22,11 @@ log = logging.getLogger(__file__)
def run(src, dst):
log.debug('run {0} {1}'.format(src, dst))
system = FileSystem.parse(src)
- generator = Generator(search_path=here / 'templates')
+ search_path = [
+ Path('_templates').abspath(),
+ Path(here / 'templates').abspath()
+ ]
+ generator = Generator(search_path=search_path)
generator.register_filter('defaultValue', Filters.defaultValue)
generator.register_filter('propertyType', Filters.propertyType)
ctx = {'dst': dst}
diff --git a/qface/builtin/qtqml/templates/AbstractInterface.qml b/qface/builtin/qtqml/templates/AbstractInterface.qml
index b28d6b7..cac0847 100644
--- a/qface/builtin/qtqml/templates/AbstractInterface.qml
+++ b/qface/builtin/qtqml/templates/AbstractInterface.qml
@@ -1,3 +1,11 @@
+{% include 'copyright.h' %}
+
+
+/*
+ * This is an auto-generated file.
+ * Do not edit! All changes made to it will be lost.
+ */
+
import QtQml 2.2
import QtQml.Models 2.2
@@ -6,12 +14,26 @@ import "."
{{interface.comment}}
QtObject {
id: root
+
{% for property in interface.properties %}
- {{property.comment}}
- {%+ if property.is_readonly %}readonly {% endif %}property {{property|propertyType}} {{property}} : {{property|defaultValue}}
+{% if property.readonly %}
+{% if property.comment %}
+ {{ property.comment }}
+{% endif %}
+ readonly property {{property|propertyType}} {{property}} : _{{property}}
+ property {{property|propertyType}} _{{property}} : {{property|defaultValue}}
+{% else %}
+{% if property.comment %}
+ {{ property.comment }}
+{% endif %}
+ property {{property|propertyType}} {{property}} : {{property|defaultValue }}
+{% endif %}
{% endfor %}
+
{% for operation in interface.operations %}
- {{operation.comment}}
+{% if operation.comment %}
+ {{ operation.comment }}
+{% endif %}
property var {{operation}} : function({{operation.parameters|join(', ')}}) {}
{% endfor %}
@@ -23,5 +45,5 @@ QtObject {
{% endfor -%}
)
{% endfor %}
-
}
+
diff --git a/qface/builtin/qtqml/templates/Interface.qml b/qface/builtin/qtqml/templates/Interface.qml
index 2dc9d5f..f4ebf70 100644
--- a/qface/builtin/qtqml/templates/Interface.qml
+++ b/qface/builtin/qtqml/templates/Interface.qml
@@ -1,3 +1,16 @@
+{% include 'copyright.h' %}
+
+
+/*
+ * This is a preserved file.
+ * Changes will not be overriden by the generator.
+ * To reset the file you need to delete it first.
+ */
+
+{% if 'singleton' in interface.tags %}
+pragma Singleton
+{% endif %}
+
import QtQml 2.2
import "private"
@@ -5,3 +18,4 @@ import "private"
Abstract{{interface}} {
id: root
}
+
diff --git a/qface/builtin/qtqml/templates/Module.qml b/qface/builtin/qtqml/templates/Module.qml
deleted file mode 100644
index 7ededf2..0000000
--- a/qface/builtin/qtqml/templates/Module.qml
+++ /dev/null
@@ -1,24 +0,0 @@
-pragma Singleton
-
-import QtQml 2.2
-
-/**
- * {{module.comment}}
- */
-QtObject {
- id: root
-
- {% for enum in module.enums %}
- // Enum: {{enum}}
- {% for member in enum.members %}
- readonly property int {{member}}: {{member.value}}
- {% endfor %}
-
- {% endfor %}
-
- {% for struct in module.structs %}
- function create{{struct}}() {
- return {};
- }
- {% endfor %}
-}
diff --git a/qface/builtin/qtqml/templates/copyright.h b/qface/builtin/qtqml/templates/copyright.h
new file mode 100644
index 0000000..a484f2e
--- /dev/null
+++ b/qface/builtin/qtqml/templates/copyright.h
@@ -0,0 +1,23 @@
+/*
+ * MIT License
+ *
+ * Copyright (c) 2017 Pelagicore AG
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a copy
+ * of this software and associated documentation files (the "Software"), to deal
+ * in the Software without restriction, including without limitation the rights
+ * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+ * copies of the Software, and to permit persons to whom the Software is
+ * furnished to do so, subject to the following conditions:
+ *
+ * The above copyright notice and this permission notice shall be included in all
+ * copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+ * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+ * SOFTWARE.
+ */
diff --git a/qface/builtin/qtqml/templates/module.js b/qface/builtin/qtqml/templates/module.js
index 551f5c3..c1a92a1 100644
--- a/qface/builtin/qtqml/templates/module.js
+++ b/qface/builtin/qtqml/templates/module.js
@@ -1,3 +1,11 @@
+{% include 'copyright.h' %}
+
+
+/*
+ * This is an auto-generated file.
+ * Do not edit! All changes made to it will be lost.
+ */
+
.pragma library
{% for enum in module.enums %}
@@ -9,7 +17,7 @@ var {{member}} = {{member.value}};
{% for struct in module.structs %}
function create{{struct}}() {
- return {
+ return {
{% for field in struct.fields %}
{{field}} : {{field | defaultValue}},
{% endfor %}
diff --git a/qface/builtin/qtqml/templates/public_qmldir b/qface/builtin/qtqml/templates/public_qmldir
index fc465a7..2fcfe50 100644
--- a/qface/builtin/qtqml/templates/public_qmldir
+++ b/qface/builtin/qtqml/templates/public_qmldir
@@ -1,4 +1,4 @@
{{module_name}}Module 1.0 private/{{module_name}}Module.js
{% for interface in module.interfaces %}
-{{interface}} 1.0 {{interface}}.qml
+{% if 'singleton' in interface.tags %}singleton {% endif %}{{interface}} 1.0 {{interface}}.qml
{% endfor %}
diff --git a/qface/builtin/schema/log.yaml b/qface/builtin/schema/log.yaml
new file mode 100644
index 0000000..21b5bba
--- /dev/null
+++ b/qface/builtin/schema/log.yaml
@@ -0,0 +1,18 @@
+version: 1
+formatters:
+ simple:
+ format: '%(asctime)s - %(name)s - %(levelname)s - %(message)s'
+handlers:
+ console:
+ class: logging.StreamHandler
+ level: INFO
+ formatter: simple
+ stream: ext://sys.stdout
+loggers:
+ qface.generator:
+ level: WARN
+ handlers: [console]
+ propagate: no
+root:
+ level: DEBUG
+ handlers: [console]
diff --git a/qface/builtin/schema/schema.py b/qface/builtin/schema/schema.py
new file mode 100755
index 0000000..5735844
--- /dev/null
+++ b/qface/builtin/schema/schema.py
@@ -0,0 +1,55 @@
+#!/usr/bin/env python3
+# Copyright (c) Pelagicore AB 2016
+
+import click
+import logging
+import logging.config
+import yaml
+from path import Path
+
+from qface.generator import FileSystem, Generator
+from qface.watch import monitor
+from qface.filters import jsonify
+
+
+here = Path(__file__).dirname()
+
+logging.config.dictConfig(yaml.load(open(here / 'log.yaml')))
+
+log = logging.getLogger(__file__)
+
+
+def run(src, dst):
+ log.debug('run {0} {1}'.format(src, dst))
+ system = FileSystem.parse(src)
+ search_path = [
+ Path('_templates').abspath(),
+ Path(here / 'templates').abspath()
+ ]
+ generator = Generator(search_path=search_path)
+ generator.register_filter('jsonify', jsonify)
+ ctx = {'dst': dst}
+ for module in system.modules:
+ ctx.update({
+ 'module': module,
+ })
+ generator.destination = generator.apply("{{dst}}", ctx)
+ generator.write('{{module}}.json', 'module.json', ctx)
+
+
+@click.command()
+@click.option('--reload/--no-reload', default=False)
+@click.argument('src', nargs=-1, type=click.Path(exists=True))
+@click.argument('dst', nargs=1, type=click.Path(exists=True))
+def app(src, dst, reload):
+ """Takes several files or directories as src and generates the code
+ in the given dst directory."""
+ if reload:
+ script = Path(__file__).abspath()
+ monitor(script, src, dst)
+ else:
+ run(src, dst)
+
+
+if __name__ == '__main__':
+ app()
diff --git a/qface/builtin/schema/templates/module.json b/qface/builtin/schema/templates/module.json
new file mode 100644
index 0000000..288ea15
--- /dev/null
+++ b/qface/builtin/schema/templates/module.json
@@ -0,0 +1 @@
+{{module|jsonify}} \ No newline at end of file
diff --git a/qface/filters.py b/qface/filters.py
new file mode 100644
index 0000000..9276d4c
--- /dev/null
+++ b/qface/filters.py
@@ -0,0 +1,19 @@
+import json
+import hashlib
+
+def jsonify(obj):
+ try:
+ # all symbols have a toJson method, try it
+ return json.dumps(obj.toJson(), indent=' ')
+ except AttributeError:
+ pass
+ return json.dumps(obj, indent=' ')
+
+def upper_first(s):
+ s = str(s)
+ return s[0].upper() + s[1:]
+
+def hash(s, hash_type='sha1'):
+ h = hashlib.new(hash_type)
+ h.update(str(s).encode('utf-8'))
+ return h.hexdigest()
diff --git a/qface/generator.py b/qface/generator.py
index 6b43e88..22fbf55 100644
--- a/qface/generator.py
+++ b/qface/generator.py
@@ -36,17 +36,21 @@ def upper_first_filter(s):
return s[0].upper() + s[1:]
+def lower_first_filter(s):
+ s = str(s)
+ return s[0].lower() + s[1:]
+
+
class Generator(object):
"""Manages the templates and applies your context data"""
def __init__(self, search_path: str):
- if search_path:
- search_path = Path(search_path).expand()
- self.env = Environment(
- loader=FileSystemLoader(search_path),
- trim_blocks=True,
- lstrip_blocks=True
- )
+ self.env = Environment(
+ loader=FileSystemLoader(search_path),
+ trim_blocks=True,
+ lstrip_blocks=True
+ )
self.env.filters['upperfirst'] = upper_first_filter
+ self.env.filters['lowerfirst'] = lower_first_filter
self._destination = Path()
@property
diff --git a/qface/helper/doc.py b/qface/helper/doc.py
index c999227..0935e67 100644
--- a/qface/helper/doc.py
+++ b/qface/helper/doc.py
@@ -1,6 +1,5 @@
import re
-
translate = None
"""
The translare function used for transalting inline tags. The
@@ -37,6 +36,8 @@ class DocObject:
setattr(self, name, str(value))
elif attr_type is list:
getattr(self, name).append(value)
+ else:
+ print('documentation tag @{0} not supported'.format(name))
@staticmethod
def _translate(name, value):
@@ -78,4 +79,3 @@ def parse_doc(s):
elif tag: # append to previous matched tag
doc.add_tag(tag, line)
return doc
-
diff --git a/qface/helper/qtcpp.py b/qface/helper/qtcpp.py
index 5b1c379..c42f554 100644
--- a/qface/helper/qtcpp.py
+++ b/qface/helper/qtcpp.py
@@ -1,7 +1,8 @@
"""
Provides helper functionality specificially for Qt C++/QML code generators
"""
-
+import qface.idl.domain as domain
+from jinja2 import environmentfilter
def upper_first(s):
s = str(s)
@@ -23,13 +24,13 @@ class Filters(object):
t = symbol.type # type: qface.domain.TypeSymbol
if t.is_primitive:
if t.is_int:
- return '0'
+ return 'int(0)'
if t.is_bool:
- return 'false'
+ return 'bool(false)'
if t.is_string:
return 'QString()'
if t.is_real:
- return '0.0'
+ return 'qreal(0.0)'
if t.is_variant:
return 'QVariant()'
elif t.is_void:
@@ -105,3 +106,56 @@ class Filters(object):
return '{0}{1}'.format(prefix, symbol.type)
return 'XXX'
+ @staticmethod
+ def open_ns(symbol):
+ ''' generates a open namespace from symbol namespace x { y { z {'''
+ blocks = ['{0} {{'.format(x) for x in symbol.module.name_parts]
+ return 'namespace {0}'.format(str.join(' ', blocks))
+
+ @staticmethod
+ def close_ns(symbol):
+ '''generates a closing names statement from a symbol'''
+ return ' '.join(['}' for x in symbol.module.name_parts])
+
+ @staticmethod
+ def using_ns(symbol):
+ '''generates a using namespace x::y::z statement from a symbol'''
+ id = '::'.join(symbol.module.name_parts)
+ return 'using namespace {0}'.format(id)
+
+ @staticmethod
+ def signalName(s):
+ if isinstance(s, domain.Property):
+ return '{0}Changed'.format(s)
+ return s
+
+ @staticmethod
+ @environmentfilter
+ def parameters(env, s, filter=None, spaces=True):
+ if not filter:
+ filter = Filters.parameterType
+ else:
+ filter = env.filters[filter]
+ args = []
+ indent = ', '
+ if not spaces:
+ indent = ','
+ if isinstance(s, domain.Operation):
+ args = s.parameters
+ elif isinstance(s, domain.Signal):
+ args = s.parameters
+ elif isinstance(s, domain.Property):
+ args = [s]
+ return indent.join([filter(a) for a in args])
+
+ @staticmethod
+ def signature(s):
+ if isinstance(s, domain.Operation):
+ args = s.parameters
+ elif isinstance(s, domain.Signal):
+ args = s.parameters
+ elif isinstance(s, domain.Property):
+ args = [s.type] # for <property>Changed(<type>)
+ else:
+ args = []
+ return ','.join([Filters.returnType(a) for a in args])
diff --git a/qface/helper/qtqml.py b/qface/helper/qtqml.py
index 819000a..9e9c976 100644
--- a/qface/helper/qtqml.py
+++ b/qface/helper/qtqml.py
@@ -19,10 +19,14 @@ class Filters(object):
if t.is_primitive:
if t.name == 'int':
return '0'
+ elif t.name == 'real':
+ return "0.0"
elif t.name == 'bool':
return 'false'
elif t.name == 'string':
return "''"
+ elif t.name == 'var':
+ return "undefined"
elif t.is_enum:
value = next(iter(t.reference.members))
return '{0}Module.{1}'.format(module, value)
diff --git a/qface/idl/domain.py b/qface/idl/domain.py
index c774c16..3be53ff 100644
--- a/qface/idl/domain.py
+++ b/qface/idl/domain.py
@@ -78,6 +78,11 @@ class System(object):
type_name = parts[1]
return (module_name, type_name, fragment_name)
+ def toJson(self):
+ o = {}
+ o['modules'] = [o.toJson() for o in self.modules]
+ return o
+
class NamedElement(object):
def __init__(self, name, module: 'Module'):
@@ -103,6 +108,12 @@ class NamedElement(object):
else:
return '{0}.{1}'.format(self.module.name, self.name)
+ def toJson(self):
+ o = {}
+ if self.name:
+ o['name'] = self.name
+ return o
+
class Symbol(NamedElement):
"""A symbol represents a base class for names elements"""
@@ -144,6 +155,11 @@ class Symbol(NamedElement):
def contents(self):
return self._contentMap.values()
+ def toJson(self):
+ o = super().toJson()
+ if self.type.is_valid:
+ o['type'] = self.type.toJson()
+ return o
class TypeSymbol(NamedElement):
@@ -165,7 +181,10 @@ class TypeSymbol(NamedElement):
@property
def is_valid(self):
'''checks if type is a valid type'''
- return self.is_primitive or self.is_complex
+ return (self.is_primitive and self.name) \
+ or (self.is_complex and self.name) \
+ or (self.is_list and self.nested) \
+ or (self.is_model and self.nested) \
@property
def is_bool(self):
@@ -198,6 +217,11 @@ class TypeSymbol(NamedElement):
return self.is_complex and isinstance(self.reference, Struct)
@property
+ def is_interface(self):
+ '''checks if type is interface'''
+ return self.is_complex and isinstance(self.reference, Interface)
+
+ @property
def is_variant(self):
'''checks if type is primitive and string'''
return self.is_primitive and self.name == 'var'
@@ -220,6 +244,22 @@ class TypeSymbol(NamedElement):
def type(self):
return self
+ def toJson(self):
+ o = super().toJson()
+ if self.is_void:
+ o['void'] = self.is_void
+ if self.is_primitive:
+ o['primitive'] = self.is_primitive
+ if self.is_complex:
+ o['complex'] = self.is_complex
+ if self.is_list:
+ o['list'] = self.is_list
+ if self.is_model:
+ o['model'] = self.is_model
+ if self.nested:
+ o['nested'] = self.nested.toJson()
+ return o
+
class Module(Symbol):
"""Module is a namespace for types, e.g. interfaces, enums, structs"""
@@ -270,6 +310,14 @@ class Module(Symbol):
return self.name.split('.')
@property
+ def majorVersion(self):
+ return self.version.split('.')[0]
+
+ @property
+ def minorVersion(self):
+ return self.version.split('.')[1]
+
+ @property
def module_name(self):
return self.name.split('.')[-1].capitalize()
@@ -283,6 +331,14 @@ class Module(Symbol):
return symbol
return self.system.lookup(name)
+ def toJson(self):
+ o = super().toJson()
+ o['version'] = self.version
+ o['interfaces'] = [s.toJson() for s in self.interfaces]
+ o['structs'] = [s.toJson() for s in self.structs]
+ o['enums'] = [s.toJson() for s in self.enums]
+ return o
+
class Interface(Symbol):
"""A interface is an object with operations, properties and signals"""
@@ -294,6 +350,7 @@ class Interface(Symbol):
self._operationMap = OrderedDict() # type: dict[str, Operation]
self._signalMap = OrderedDict() # type: dict[str, Signal]
self._contentMap = ChainMap(self._propertyMap, self._operationMap, self._signalMap)
+ self._extends = None
@property
def properties(self):
@@ -310,6 +367,17 @@ class Interface(Symbol):
'''returns ordered list of signals'''
return self._signalMap.values()
+ @property
+ def extends(self):
+ return self.module.lookup(self._extends)
+
+ def toJson(self):
+ o = super().toJson()
+ o['properties'] = [s.toJson() for s in self.properties]
+ o['operations'] = [s.toJson() for s in self.operations]
+ o['signals'] = [s.toJson() for s in self.signals]
+ return o
+
class Operation(Symbol):
"""An operation inside a interface"""
@@ -325,6 +393,11 @@ class Operation(Symbol):
'''returns ordered list of parameters'''
return self._parameterMap.values()
+ def toJson(self):
+ o = super().toJson()
+ o['parameters'] = [s.toJson() for s in self.parameters]
+ return o
+
class Signal(Symbol):
"""A signal inside an interface"""
@@ -340,6 +413,11 @@ class Signal(Symbol):
'''returns ordered list of parameters'''
return self._parameterMap.values()
+ def toJson(self):
+ o = super().toJson()
+ o['parameters'] = [s.toJson() for s in self.parameters]
+ return o
+
class Parameter(Symbol):
"""An operation parameter"""
@@ -357,7 +435,16 @@ class Property(Symbol):
log.debug('Property()')
self.interface = interface
self.interface._propertyMap[name] = self
- self.is_readonly = False
+ self.readonly = False
+ self.const = False
+
+ def toJson(self):
+ o = super().toJson()
+ if self.readonly:
+ o['readonly'] = True
+ if self.const:
+ o['const'] = True
+ return o
class Struct(Symbol):
@@ -366,13 +453,18 @@ class Struct(Symbol):
super().__init__(name, module)
log.debug('Struct()')
self.module._structMap[name] = self
- self._fieldMap = self._contentMap = OrderedDict() # type: dict[str, Field]
+ self._fieldMap = self._contentMap = OrderedDict()
@property
def fields(self):
'''returns ordered list of members'''
return self._fieldMap.values()
+ def toJson(self):
+ o = super().toJson()
+ o['fields'] = [s.toJson() for s in self.fields]
+ return o
+
class Field(Symbol):
"""A member in a struct"""
@@ -398,6 +490,15 @@ class Enum(Symbol):
'''returns ordered list of members'''
return self._memberMap.values()
+ def toJson(self):
+ o = super().toJson()
+ if self.is_enum:
+ o['enum'] = self.is_enum
+ if self.is_flag:
+ o['flag'] = self.is_flag
+ o['members'] = [s.toJson() for s in self.members]
+ return o
+
class EnumMember(Symbol):
"""A enum value"""
@@ -407,3 +508,9 @@ class EnumMember(Symbol):
self.enum = enum
self.enum._memberMap[name] = self
self.value = 0
+
+ def toJson(self):
+ o = super().toJson()
+ o['value'] = self.value
+ return o
+
diff --git a/qface/idl/listener.py b/qface/idl/listener.py
index fc5781a..8bc6d36 100644
--- a/qface/idl/listener.py
+++ b/qface/idl/listener.py
@@ -1,6 +1,5 @@
# Copyright (c) Pelagicore AB 2016
import logging
-from _operator import concat
from .parser.TListener import TListener
from .parser.TParser import TParser
@@ -110,6 +109,8 @@ class DomainListener(TListener):
name = ctx.name.text
self.interface = Interface(name, self.module)
self.parse_annotations(ctx, self.interface)
+ if ctx.extends:
+ self.interface._extends = ctx.extends.text
contextMap[ctx] = self.interface
def exitInterfaceSymbol(self, ctx: TParser.InterfaceSymbolContext):
@@ -151,6 +152,9 @@ class DomainListener(TListener):
assert self.interface
name = ctx.name.text
self.operation = Operation(name, self.interface)
+ modifier = ctx.operationModifierSymbol()
+ if modifier:
+ self.operation.const = bool(modifier.is_const)
self.parse_annotations(ctx, self.operation)
self.parse_type(ctx, self.operation.type)
contextMap[ctx] = self.operation
@@ -181,7 +185,10 @@ class DomainListener(TListener):
assert self.interface
name = ctx.name.text
self.property = Property(name, self.interface)
- self.property.is_readonly = bool(ctx.isReadOnly)
+ modifier = ctx.propertyModifierSymbol()
+ if modifier:
+ self.property.readonly = bool(modifier.is_readonly)
+ self.property.const = bool(modifier.is_const)
self.parse_annotations(ctx, self.property)
self.parse_type(ctx, self.property.type)
contextMap[ctx] = self.property
diff --git a/qface/idl/parser/T.g4 b/qface/idl/parser/T.g4
index 4ec68c8..9cfe4e7 100644
--- a/qface/idl/parser/T.g4
+++ b/qface/idl/parser/T.g4
@@ -29,7 +29,7 @@ definitionSymbol
;
interfaceSymbol
- : comment=DOCCOMMENT? tagSymbol* 'interface' name=IDENTIFIER '{' interfaceMemberSymbol* '}' ';'?
+ : comment=DOCCOMMENT? tagSymbol* 'interface' name=IDENTIFIER ('extends' extends=IDENTIFIER)? '{' interfaceMemberSymbol* '}' ';'?
;
interfaceMemberSymbol
@@ -39,7 +39,11 @@ interfaceMemberSymbol
;
operationSymbol
- : comment=DOCCOMMENT? tagSymbol* (typeSymbol | 'void') name=IDENTIFIER '(' operationParameterSymbol* ')' ';'?
+ : comment=DOCCOMMENT? tagSymbol* (typeSymbol | 'void') name=IDENTIFIER '(' operationParameterSymbol* ')' operationModifierSymbol? ';'?
+ ;
+
+operationModifierSymbol
+ : is_const='const'
;
signalSymbol
@@ -48,7 +52,12 @@ signalSymbol
propertySymbol
- : comment=DOCCOMMENT? tagSymbol* isReadOnly='readonly'? typeSymbol name=IDENTIFIER ';'?
+ : comment=DOCCOMMENT? tagSymbol* propertyModifierSymbol? typeSymbol name=IDENTIFIER ';'?
+ ;
+
+propertyModifierSymbol
+ : is_readonly='readonly'
+ | is_const='const'
;
operationParameterSymbol
diff --git a/qface/idl/parser/T.tokens b/qface/idl/parser/T.tokens
index 86bf74a..5f94603 100644
--- a/qface/idl/parser/T.tokens
+++ b/qface/idl/parser/T.tokens
@@ -23,38 +23,42 @@ T__21=22
T__22=23
T__23=24
T__24=25
-TAGLINE=26
-INTCONSTANT=27
-HEXCONSTANT=28
-TAGIDENTIFIER=29
-IDENTIFIER=30
-VERSION=31
-DOCCOMMENT=32
-WHITESPACE=33
-COMMENT=34
-MULTICOMM=35
+T__25=26
+T__26=27
+TAGLINE=28
+INTCONSTANT=29
+HEXCONSTANT=30
+TAGIDENTIFIER=31
+IDENTIFIER=32
+VERSION=33
+DOCCOMMENT=34
+WHITESPACE=35
+COMMENT=36
+MULTICOMM=37
'import'=1
';'=2
'module'=3
'interface'=4
-'{'=5
-'}'=6
-'void'=7
-'('=8
-')'=9
-'signal'=10
-'readonly'=11
-','=12
-'='=13
-'bool'=14
-'int'=15
-'real'=16
-'string'=17
-'var'=18
-'list'=19
-'<'=20
-'>'=21
-'model'=22
-'struct'=23
-'enum'=24
-'flag'=25
+'extends'=5
+'{'=6
+'}'=7
+'void'=8
+'('=9
+')'=10
+'const'=11
+'signal'=12
+'readonly'=13
+','=14
+'='=15
+'bool'=16
+'int'=17
+'real'=18
+'string'=19
+'var'=20
+'list'=21
+'<'=22
+'>'=23
+'model'=24
+'struct'=25
+'enum'=26
+'flag'=27
diff --git a/qface/idl/parser/TLexer.py b/qface/idl/parser/TLexer.py
index 59f1fcb..c6aa4db 100644
--- a/qface/idl/parser/TLexer.py
+++ b/qface/idl/parser/TLexer.py
@@ -5,37 +5,38 @@ from io import StringIO
def serializedATN():
with StringIO() as buf:
- buf.write("\3\u0430\ud6d1\u8206\uad2d\u4417\uaef1\u8d80\uaadd\2%")
- buf.write("\u0114\b\1\4\2\t\2\4\3\t\3\4\4\t\4\4\5\t\5\4\6\t\6\4\7")
+ buf.write("\3\u0430\ud6d1\u8206\uad2d\u4417\uaef1\u8d80\uaadd\2\'")
+ buf.write("\u0126\b\1\4\2\t\2\4\3\t\3\4\4\t\4\4\5\t\5\4\6\t\6\4\7")
buf.write("\t\7\4\b\t\b\4\t\t\t\4\n\t\n\4\13\t\13\4\f\t\f\4\r\t\r")
buf.write("\4\16\t\16\4\17\t\17\4\20\t\20\4\21\t\21\4\22\t\22\4\23")
buf.write("\t\23\4\24\t\24\4\25\t\25\4\26\t\26\4\27\t\27\4\30\t\30")
buf.write("\4\31\t\31\4\32\t\32\4\33\t\33\4\34\t\34\4\35\t\35\4\36")
- buf.write("\t\36\4\37\t\37\4 \t \4!\t!\4\"\t\"\4#\t#\4$\t$\3\2\3")
- buf.write("\2\3\2\3\2\3\2\3\2\3\2\3\3\3\3\3\4\3\4\3\4\3\4\3\4\3\4")
- buf.write("\3\4\3\5\3\5\3\5\3\5\3\5\3\5\3\5\3\5\3\5\3\5\3\6\3\6\3")
- buf.write("\7\3\7\3\b\3\b\3\b\3\b\3\b\3\t\3\t\3\n\3\n\3\13\3\13\3")
- buf.write("\13\3\13\3\13\3\13\3\13\3\f\3\f\3\f\3\f\3\f\3\f\3\f\3")
- buf.write("\f\3\f\3\r\3\r\3\16\3\16\3\17\3\17\3\17\3\17\3\17\3\20")
- buf.write("\3\20\3\20\3\20\3\21\3\21\3\21\3\21\3\21\3\22\3\22\3\22")
- buf.write("\3\22\3\22\3\22\3\22\3\23\3\23\3\23\3\23\3\24\3\24\3\24")
- buf.write("\3\24\3\24\3\25\3\25\3\26\3\26\3\27\3\27\3\27\3\27\3\27")
- buf.write("\3\27\3\30\3\30\3\30\3\30\3\30\3\30\3\30\3\31\3\31\3\31")
- buf.write("\3\31\3\31\3\32\3\32\3\32\3\32\3\32\3\33\3\33\7\33\u00c0")
- buf.write("\n\33\f\33\16\33\u00c3\13\33\3\34\5\34\u00c6\n\34\3\34")
- buf.write("\6\34\u00c9\n\34\r\34\16\34\u00ca\3\35\3\35\3\35\3\35")
- buf.write("\6\35\u00d1\n\35\r\35\16\35\u00d2\3\36\3\36\3\36\7\36")
- buf.write("\u00d8\n\36\f\36\16\36\u00db\13\36\3\37\3\37\7\37\u00df")
- buf.write("\n\37\f\37\16\37\u00e2\13\37\3 \3 \3 \3 \3!\3!\3!\3!\3")
- buf.write("!\7!\u00ed\n!\f!\16!\u00f0\13!\3!\3!\3!\3\"\6\"\u00f6")
- buf.write("\n\"\r\"\16\"\u00f7\3\"\3\"\3#\3#\3#\3#\7#\u0100\n#\f")
- buf.write("#\16#\u0103\13#\3#\3#\3$\3$\3$\3$\7$\u010b\n$\f$\16$\u010e")
- buf.write("\13$\3$\3$\3$\3$\3$\4\u00ee\u010c\2%\3\3\5\4\7\5\t\6\13")
- buf.write("\7\r\b\17\t\21\n\23\13\25\f\27\r\31\16\33\17\35\20\37")
- buf.write("\21!\22#\23%\24\'\25)\26+\27-\30/\31\61\32\63\33\65\34")
- buf.write("\67\359\36;\37= ?!A\"C#E$G%\3\2\t\4\2\f\f\17\17\4\2--")
- buf.write("//\5\2\62;CHch\5\2C\\aac|\7\2\60\60\62;C\\aac|\3\2\62")
- buf.write(";\5\2\13\f\17\17\"\"\u011d\2\3\3\2\2\2\2\5\3\2\2\2\2\7")
+ buf.write("\t\36\4\37\t\37\4 \t \4!\t!\4\"\t\"\4#\t#\4$\t$\4%\t%")
+ buf.write("\4&\t&\3\2\3\2\3\2\3\2\3\2\3\2\3\2\3\3\3\3\3\4\3\4\3\4")
+ buf.write("\3\4\3\4\3\4\3\4\3\5\3\5\3\5\3\5\3\5\3\5\3\5\3\5\3\5\3")
+ buf.write("\5\3\6\3\6\3\6\3\6\3\6\3\6\3\6\3\6\3\7\3\7\3\b\3\b\3\t")
+ buf.write("\3\t\3\t\3\t\3\t\3\n\3\n\3\13\3\13\3\f\3\f\3\f\3\f\3\f")
+ buf.write("\3\f\3\r\3\r\3\r\3\r\3\r\3\r\3\r\3\16\3\16\3\16\3\16\3")
+ buf.write("\16\3\16\3\16\3\16\3\16\3\17\3\17\3\20\3\20\3\21\3\21")
+ buf.write("\3\21\3\21\3\21\3\22\3\22\3\22\3\22\3\23\3\23\3\23\3\23")
+ buf.write("\3\23\3\24\3\24\3\24\3\24\3\24\3\24\3\24\3\25\3\25\3\25")
+ buf.write("\3\25\3\26\3\26\3\26\3\26\3\26\3\27\3\27\3\30\3\30\3\31")
+ buf.write("\3\31\3\31\3\31\3\31\3\31\3\32\3\32\3\32\3\32\3\32\3\32")
+ buf.write("\3\32\3\33\3\33\3\33\3\33\3\33\3\34\3\34\3\34\3\34\3\34")
+ buf.write("\3\35\3\35\7\35\u00d2\n\35\f\35\16\35\u00d5\13\35\3\36")
+ buf.write("\5\36\u00d8\n\36\3\36\6\36\u00db\n\36\r\36\16\36\u00dc")
+ buf.write("\3\37\3\37\3\37\3\37\6\37\u00e3\n\37\r\37\16\37\u00e4")
+ buf.write("\3 \3 \3 \7 \u00ea\n \f \16 \u00ed\13 \3!\3!\7!\u00f1")
+ buf.write("\n!\f!\16!\u00f4\13!\3\"\3\"\3\"\3\"\3#\3#\3#\3#\3#\7")
+ buf.write("#\u00ff\n#\f#\16#\u0102\13#\3#\3#\3#\3$\6$\u0108\n$\r")
+ buf.write("$\16$\u0109\3$\3$\3%\3%\3%\3%\7%\u0112\n%\f%\16%\u0115")
+ buf.write("\13%\3%\3%\3&\3&\3&\3&\7&\u011d\n&\f&\16&\u0120\13&\3")
+ buf.write("&\3&\3&\3&\3&\4\u0100\u011e\2\'\3\3\5\4\7\5\t\6\13\7\r")
+ buf.write("\b\17\t\21\n\23\13\25\f\27\r\31\16\33\17\35\20\37\21!")
+ buf.write("\22#\23%\24\'\25)\26+\27-\30/\31\61\32\63\33\65\34\67")
+ buf.write("\359\36;\37= ?!A\"C#E$G%I&K\'\3\2\t\4\2\f\f\17\17\4\2")
+ buf.write("--//\5\2\62;CHch\5\2C\\aac|\7\2\60\60\62;C\\aac|\3\2\62")
+ buf.write(";\5\2\13\f\17\17\"\"\u012f\2\3\3\2\2\2\2\5\3\2\2\2\2\7")
buf.write("\3\2\2\2\2\t\3\2\2\2\2\13\3\2\2\2\2\r\3\2\2\2\2\17\3\2")
buf.write("\2\2\2\21\3\2\2\2\2\23\3\2\2\2\2\25\3\2\2\2\2\27\3\2\2")
buf.write("\2\2\31\3\2\2\2\2\33\3\2\2\2\2\35\3\2\2\2\2\37\3\2\2\2")
@@ -43,82 +44,88 @@ def serializedATN():
buf.write("\2\2+\3\2\2\2\2-\3\2\2\2\2/\3\2\2\2\2\61\3\2\2\2\2\63")
buf.write("\3\2\2\2\2\65\3\2\2\2\2\67\3\2\2\2\29\3\2\2\2\2;\3\2\2")
buf.write("\2\2=\3\2\2\2\2?\3\2\2\2\2A\3\2\2\2\2C\3\2\2\2\2E\3\2")
- buf.write("\2\2\2G\3\2\2\2\3I\3\2\2\2\5P\3\2\2\2\7R\3\2\2\2\tY\3")
- buf.write("\2\2\2\13c\3\2\2\2\re\3\2\2\2\17g\3\2\2\2\21l\3\2\2\2")
- buf.write("\23n\3\2\2\2\25p\3\2\2\2\27w\3\2\2\2\31\u0080\3\2\2\2")
- buf.write("\33\u0082\3\2\2\2\35\u0084\3\2\2\2\37\u0089\3\2\2\2!\u008d")
- buf.write("\3\2\2\2#\u0092\3\2\2\2%\u0099\3\2\2\2\'\u009d\3\2\2\2")
- buf.write(")\u00a2\3\2\2\2+\u00a4\3\2\2\2-\u00a6\3\2\2\2/\u00ac\3")
- buf.write("\2\2\2\61\u00b3\3\2\2\2\63\u00b8\3\2\2\2\65\u00bd\3\2")
- buf.write("\2\2\67\u00c5\3\2\2\29\u00cc\3\2\2\2;\u00d4\3\2\2\2=\u00dc")
- buf.write("\3\2\2\2?\u00e3\3\2\2\2A\u00e7\3\2\2\2C\u00f5\3\2\2\2")
- buf.write("E\u00fb\3\2\2\2G\u0106\3\2\2\2IJ\7k\2\2JK\7o\2\2KL\7r")
- buf.write("\2\2LM\7q\2\2MN\7t\2\2NO\7v\2\2O\4\3\2\2\2PQ\7=\2\2Q\6")
- buf.write("\3\2\2\2RS\7o\2\2ST\7q\2\2TU\7f\2\2UV\7w\2\2VW\7n\2\2")
- buf.write("WX\7g\2\2X\b\3\2\2\2YZ\7k\2\2Z[\7p\2\2[\\\7v\2\2\\]\7")
- buf.write("g\2\2]^\7t\2\2^_\7h\2\2_`\7c\2\2`a\7e\2\2ab\7g\2\2b\n")
- buf.write("\3\2\2\2cd\7}\2\2d\f\3\2\2\2ef\7\177\2\2f\16\3\2\2\2g")
- buf.write("h\7x\2\2hi\7q\2\2ij\7k\2\2jk\7f\2\2k\20\3\2\2\2lm\7*\2")
- buf.write("\2m\22\3\2\2\2no\7+\2\2o\24\3\2\2\2pq\7u\2\2qr\7k\2\2")
- buf.write("rs\7i\2\2st\7p\2\2tu\7c\2\2uv\7n\2\2v\26\3\2\2\2wx\7t")
- buf.write("\2\2xy\7g\2\2yz\7c\2\2z{\7f\2\2{|\7q\2\2|}\7p\2\2}~\7")
- buf.write("n\2\2~\177\7{\2\2\177\30\3\2\2\2\u0080\u0081\7.\2\2\u0081")
- buf.write("\32\3\2\2\2\u0082\u0083\7?\2\2\u0083\34\3\2\2\2\u0084")
- buf.write("\u0085\7d\2\2\u0085\u0086\7q\2\2\u0086\u0087\7q\2\2\u0087")
- buf.write("\u0088\7n\2\2\u0088\36\3\2\2\2\u0089\u008a\7k\2\2\u008a")
- buf.write("\u008b\7p\2\2\u008b\u008c\7v\2\2\u008c \3\2\2\2\u008d")
- buf.write("\u008e\7t\2\2\u008e\u008f\7g\2\2\u008f\u0090\7c\2\2\u0090")
- buf.write("\u0091\7n\2\2\u0091\"\3\2\2\2\u0092\u0093\7u\2\2\u0093")
- buf.write("\u0094\7v\2\2\u0094\u0095\7t\2\2\u0095\u0096\7k\2\2\u0096")
- buf.write("\u0097\7p\2\2\u0097\u0098\7i\2\2\u0098$\3\2\2\2\u0099")
- buf.write("\u009a\7x\2\2\u009a\u009b\7c\2\2\u009b\u009c\7t\2\2\u009c")
- buf.write("&\3\2\2\2\u009d\u009e\7n\2\2\u009e\u009f\7k\2\2\u009f")
- buf.write("\u00a0\7u\2\2\u00a0\u00a1\7v\2\2\u00a1(\3\2\2\2\u00a2")
- buf.write("\u00a3\7>\2\2\u00a3*\3\2\2\2\u00a4\u00a5\7@\2\2\u00a5")
- buf.write(",\3\2\2\2\u00a6\u00a7\7o\2\2\u00a7\u00a8\7q\2\2\u00a8")
- buf.write("\u00a9\7f\2\2\u00a9\u00aa\7g\2\2\u00aa\u00ab\7n\2\2\u00ab")
- buf.write(".\3\2\2\2\u00ac\u00ad\7u\2\2\u00ad\u00ae\7v\2\2\u00ae")
- buf.write("\u00af\7t\2\2\u00af\u00b0\7w\2\2\u00b0\u00b1\7e\2\2\u00b1")
- buf.write("\u00b2\7v\2\2\u00b2\60\3\2\2\2\u00b3\u00b4\7g\2\2\u00b4")
- buf.write("\u00b5\7p\2\2\u00b5\u00b6\7w\2\2\u00b6\u00b7\7o\2\2\u00b7")
- buf.write("\62\3\2\2\2\u00b8\u00b9\7h\2\2\u00b9\u00ba\7n\2\2\u00ba")
- buf.write("\u00bb\7c\2\2\u00bb\u00bc\7i\2\2\u00bc\64\3\2\2\2\u00bd")
- buf.write("\u00c1\7B\2\2\u00be\u00c0\n\2\2\2\u00bf\u00be\3\2\2\2")
- buf.write("\u00c0\u00c3\3\2\2\2\u00c1\u00bf\3\2\2\2\u00c1\u00c2\3")
- buf.write("\2\2\2\u00c2\66\3\2\2\2\u00c3\u00c1\3\2\2\2\u00c4\u00c6")
- buf.write("\t\3\2\2\u00c5\u00c4\3\2\2\2\u00c5\u00c6\3\2\2\2\u00c6")
- buf.write("\u00c8\3\2\2\2\u00c7\u00c9\4\62;\2\u00c8\u00c7\3\2\2\2")
- buf.write("\u00c9\u00ca\3\2\2\2\u00ca\u00c8\3\2\2\2\u00ca\u00cb\3")
- buf.write("\2\2\2\u00cb8\3\2\2\2\u00cc\u00cd\7\62\2\2\u00cd\u00ce")
- buf.write("\7z\2\2\u00ce\u00d0\3\2\2\2\u00cf\u00d1\t\4\2\2\u00d0")
- buf.write("\u00cf\3\2\2\2\u00d1\u00d2\3\2\2\2\u00d2\u00d0\3\2\2\2")
- buf.write("\u00d2\u00d3\3\2\2\2\u00d3:\3\2\2\2\u00d4\u00d5\7B\2\2")
- buf.write("\u00d5\u00d9\t\5\2\2\u00d6\u00d8\t\6\2\2\u00d7\u00d6\3")
- buf.write("\2\2\2\u00d8\u00db\3\2\2\2\u00d9\u00d7\3\2\2\2\u00d9\u00da")
- buf.write("\3\2\2\2\u00da<\3\2\2\2\u00db\u00d9\3\2\2\2\u00dc\u00e0")
- buf.write("\t\5\2\2\u00dd\u00df\t\6\2\2\u00de\u00dd\3\2\2\2\u00df")
- buf.write("\u00e2\3\2\2\2\u00e0\u00de\3\2\2\2\u00e0\u00e1\3\2\2\2")
- buf.write("\u00e1>\3\2\2\2\u00e2\u00e0\3\2\2\2\u00e3\u00e4\t\7\2")
- buf.write("\2\u00e4\u00e5\7\60\2\2\u00e5\u00e6\t\7\2\2\u00e6@\3\2")
- buf.write("\2\2\u00e7\u00e8\7\61\2\2\u00e8\u00e9\7,\2\2\u00e9\u00ea")
- buf.write("\7,\2\2\u00ea\u00ee\3\2\2\2\u00eb\u00ed\13\2\2\2\u00ec")
- buf.write("\u00eb\3\2\2\2\u00ed\u00f0\3\2\2\2\u00ee\u00ef\3\2\2\2")
- buf.write("\u00ee\u00ec\3\2\2\2\u00ef\u00f1\3\2\2\2\u00f0\u00ee\3")
- buf.write("\2\2\2\u00f1\u00f2\7,\2\2\u00f2\u00f3\7\61\2\2\u00f3B")
- buf.write("\3\2\2\2\u00f4\u00f6\t\b\2\2\u00f5\u00f4\3\2\2\2\u00f6")
- buf.write("\u00f7\3\2\2\2\u00f7\u00f5\3\2\2\2\u00f7\u00f8\3\2\2\2")
- buf.write("\u00f8\u00f9\3\2\2\2\u00f9\u00fa\b\"\2\2\u00faD\3\2\2")
- buf.write("\2\u00fb\u00fc\7\61\2\2\u00fc\u00fd\7\61\2\2\u00fd\u0101")
- buf.write("\3\2\2\2\u00fe\u0100\n\2\2\2\u00ff\u00fe\3\2\2\2\u0100")
- buf.write("\u0103\3\2\2\2\u0101\u00ff\3\2\2\2\u0101\u0102\3\2\2\2")
- buf.write("\u0102\u0104\3\2\2\2\u0103\u0101\3\2\2\2\u0104\u0105\b")
- buf.write("#\2\2\u0105F\3\2\2\2\u0106\u0107\7\61\2\2\u0107\u0108")
- buf.write("\7,\2\2\u0108\u010c\3\2\2\2\u0109\u010b\13\2\2\2\u010a")
- buf.write("\u0109\3\2\2\2\u010b\u010e\3\2\2\2\u010c\u010d\3\2\2\2")
- buf.write("\u010c\u010a\3\2\2\2\u010d\u010f\3\2\2\2\u010e\u010c\3")
- buf.write("\2\2\2\u010f\u0110\7,\2\2\u0110\u0111\7\61\2\2\u0111\u0112")
- buf.write("\3\2\2\2\u0112\u0113\b$\2\2\u0113H\3\2\2\2\r\2\u00c1\u00c5")
- buf.write("\u00ca\u00d2\u00d9\u00e0\u00ee\u00f7\u0101\u010c\3\b\2")
+ buf.write("\2\2\2G\3\2\2\2\2I\3\2\2\2\2K\3\2\2\2\3M\3\2\2\2\5T\3")
+ buf.write("\2\2\2\7V\3\2\2\2\t]\3\2\2\2\13g\3\2\2\2\ro\3\2\2\2\17")
+ buf.write("q\3\2\2\2\21s\3\2\2\2\23x\3\2\2\2\25z\3\2\2\2\27|\3\2")
+ buf.write("\2\2\31\u0082\3\2\2\2\33\u0089\3\2\2\2\35\u0092\3\2\2")
+ buf.write("\2\37\u0094\3\2\2\2!\u0096\3\2\2\2#\u009b\3\2\2\2%\u009f")
+ buf.write("\3\2\2\2\'\u00a4\3\2\2\2)\u00ab\3\2\2\2+\u00af\3\2\2\2")
+ buf.write("-\u00b4\3\2\2\2/\u00b6\3\2\2\2\61\u00b8\3\2\2\2\63\u00be")
+ buf.write("\3\2\2\2\65\u00c5\3\2\2\2\67\u00ca\3\2\2\29\u00cf\3\2")
+ buf.write("\2\2;\u00d7\3\2\2\2=\u00de\3\2\2\2?\u00e6\3\2\2\2A\u00ee")
+ buf.write("\3\2\2\2C\u00f5\3\2\2\2E\u00f9\3\2\2\2G\u0107\3\2\2\2")
+ buf.write("I\u010d\3\2\2\2K\u0118\3\2\2\2MN\7k\2\2NO\7o\2\2OP\7r")
+ buf.write("\2\2PQ\7q\2\2QR\7t\2\2RS\7v\2\2S\4\3\2\2\2TU\7=\2\2U\6")
+ buf.write("\3\2\2\2VW\7o\2\2WX\7q\2\2XY\7f\2\2YZ\7w\2\2Z[\7n\2\2")
+ buf.write("[\\\7g\2\2\\\b\3\2\2\2]^\7k\2\2^_\7p\2\2_`\7v\2\2`a\7")
+ buf.write("g\2\2ab\7t\2\2bc\7h\2\2cd\7c\2\2de\7e\2\2ef\7g\2\2f\n")
+ buf.write("\3\2\2\2gh\7g\2\2hi\7z\2\2ij\7v\2\2jk\7g\2\2kl\7p\2\2")
+ buf.write("lm\7f\2\2mn\7u\2\2n\f\3\2\2\2op\7}\2\2p\16\3\2\2\2qr\7")
+ buf.write("\177\2\2r\20\3\2\2\2st\7x\2\2tu\7q\2\2uv\7k\2\2vw\7f\2")
+ buf.write("\2w\22\3\2\2\2xy\7*\2\2y\24\3\2\2\2z{\7+\2\2{\26\3\2\2")
+ buf.write("\2|}\7e\2\2}~\7q\2\2~\177\7p\2\2\177\u0080\7u\2\2\u0080")
+ buf.write("\u0081\7v\2\2\u0081\30\3\2\2\2\u0082\u0083\7u\2\2\u0083")
+ buf.write("\u0084\7k\2\2\u0084\u0085\7i\2\2\u0085\u0086\7p\2\2\u0086")
+ buf.write("\u0087\7c\2\2\u0087\u0088\7n\2\2\u0088\32\3\2\2\2\u0089")
+ buf.write("\u008a\7t\2\2\u008a\u008b\7g\2\2\u008b\u008c\7c\2\2\u008c")
+ buf.write("\u008d\7f\2\2\u008d\u008e\7q\2\2\u008e\u008f\7p\2\2\u008f")
+ buf.write("\u0090\7n\2\2\u0090\u0091\7{\2\2\u0091\34\3\2\2\2\u0092")
+ buf.write("\u0093\7.\2\2\u0093\36\3\2\2\2\u0094\u0095\7?\2\2\u0095")
+ buf.write(" \3\2\2\2\u0096\u0097\7d\2\2\u0097\u0098\7q\2\2\u0098")
+ buf.write("\u0099\7q\2\2\u0099\u009a\7n\2\2\u009a\"\3\2\2\2\u009b")
+ buf.write("\u009c\7k\2\2\u009c\u009d\7p\2\2\u009d\u009e\7v\2\2\u009e")
+ buf.write("$\3\2\2\2\u009f\u00a0\7t\2\2\u00a0\u00a1\7g\2\2\u00a1")
+ buf.write("\u00a2\7c\2\2\u00a2\u00a3\7n\2\2\u00a3&\3\2\2\2\u00a4")
+ buf.write("\u00a5\7u\2\2\u00a5\u00a6\7v\2\2\u00a6\u00a7\7t\2\2\u00a7")
+ buf.write("\u00a8\7k\2\2\u00a8\u00a9\7p\2\2\u00a9\u00aa\7i\2\2\u00aa")
+ buf.write("(\3\2\2\2\u00ab\u00ac\7x\2\2\u00ac\u00ad\7c\2\2\u00ad")
+ buf.write("\u00ae\7t\2\2\u00ae*\3\2\2\2\u00af\u00b0\7n\2\2\u00b0")
+ buf.write("\u00b1\7k\2\2\u00b1\u00b2\7u\2\2\u00b2\u00b3\7v\2\2\u00b3")
+ buf.write(",\3\2\2\2\u00b4\u00b5\7>\2\2\u00b5.\3\2\2\2\u00b6\u00b7")
+ buf.write("\7@\2\2\u00b7\60\3\2\2\2\u00b8\u00b9\7o\2\2\u00b9\u00ba")
+ buf.write("\7q\2\2\u00ba\u00bb\7f\2\2\u00bb\u00bc\7g\2\2\u00bc\u00bd")
+ buf.write("\7n\2\2\u00bd\62\3\2\2\2\u00be\u00bf\7u\2\2\u00bf\u00c0")
+ buf.write("\7v\2\2\u00c0\u00c1\7t\2\2\u00c1\u00c2\7w\2\2\u00c2\u00c3")
+ buf.write("\7e\2\2\u00c3\u00c4\7v\2\2\u00c4\64\3\2\2\2\u00c5\u00c6")
+ buf.write("\7g\2\2\u00c6\u00c7\7p\2\2\u00c7\u00c8\7w\2\2\u00c8\u00c9")
+ buf.write("\7o\2\2\u00c9\66\3\2\2\2\u00ca\u00cb\7h\2\2\u00cb\u00cc")
+ buf.write("\7n\2\2\u00cc\u00cd\7c\2\2\u00cd\u00ce\7i\2\2\u00ce8\3")
+ buf.write("\2\2\2\u00cf\u00d3\7B\2\2\u00d0\u00d2\n\2\2\2\u00d1\u00d0")
+ buf.write("\3\2\2\2\u00d2\u00d5\3\2\2\2\u00d3\u00d1\3\2\2\2\u00d3")
+ buf.write("\u00d4\3\2\2\2\u00d4:\3\2\2\2\u00d5\u00d3\3\2\2\2\u00d6")
+ buf.write("\u00d8\t\3\2\2\u00d7\u00d6\3\2\2\2\u00d7\u00d8\3\2\2\2")
+ buf.write("\u00d8\u00da\3\2\2\2\u00d9\u00db\4\62;\2\u00da\u00d9\3")
+ buf.write("\2\2\2\u00db\u00dc\3\2\2\2\u00dc\u00da\3\2\2\2\u00dc\u00dd")
+ buf.write("\3\2\2\2\u00dd<\3\2\2\2\u00de\u00df\7\62\2\2\u00df\u00e0")
+ buf.write("\7z\2\2\u00e0\u00e2\3\2\2\2\u00e1\u00e3\t\4\2\2\u00e2")
+ buf.write("\u00e1\3\2\2\2\u00e3\u00e4\3\2\2\2\u00e4\u00e2\3\2\2\2")
+ buf.write("\u00e4\u00e5\3\2\2\2\u00e5>\3\2\2\2\u00e6\u00e7\7B\2\2")
+ buf.write("\u00e7\u00eb\t\5\2\2\u00e8\u00ea\t\6\2\2\u00e9\u00e8\3")
+ buf.write("\2\2\2\u00ea\u00ed\3\2\2\2\u00eb\u00e9\3\2\2\2\u00eb\u00ec")
+ buf.write("\3\2\2\2\u00ec@\3\2\2\2\u00ed\u00eb\3\2\2\2\u00ee\u00f2")
+ buf.write("\t\5\2\2\u00ef\u00f1\t\6\2\2\u00f0\u00ef\3\2\2\2\u00f1")
+ buf.write("\u00f4\3\2\2\2\u00f2\u00f0\3\2\2\2\u00f2\u00f3\3\2\2\2")
+ buf.write("\u00f3B\3\2\2\2\u00f4\u00f2\3\2\2\2\u00f5\u00f6\t\7\2")
+ buf.write("\2\u00f6\u00f7\7\60\2\2\u00f7\u00f8\t\7\2\2\u00f8D\3\2")
+ buf.write("\2\2\u00f9\u00fa\7\61\2\2\u00fa\u00fb\7,\2\2\u00fb\u00fc")
+ buf.write("\7,\2\2\u00fc\u0100\3\2\2\2\u00fd\u00ff\13\2\2\2\u00fe")
+ buf.write("\u00fd\3\2\2\2\u00ff\u0102\3\2\2\2\u0100\u0101\3\2\2\2")
+ buf.write("\u0100\u00fe\3\2\2\2\u0101\u0103\3\2\2\2\u0102\u0100\3")
+ buf.write("\2\2\2\u0103\u0104\7,\2\2\u0104\u0105\7\61\2\2\u0105F")
+ buf.write("\3\2\2\2\u0106\u0108\t\b\2\2\u0107\u0106\3\2\2\2\u0108")
+ buf.write("\u0109\3\2\2\2\u0109\u0107\3\2\2\2\u0109\u010a\3\2\2\2")
+ buf.write("\u010a\u010b\3\2\2\2\u010b\u010c\b$\2\2\u010cH\3\2\2\2")
+ buf.write("\u010d\u010e\7\61\2\2\u010e\u010f\7\61\2\2\u010f\u0113")
+ buf.write("\3\2\2\2\u0110\u0112\n\2\2\2\u0111\u0110\3\2\2\2\u0112")
+ buf.write("\u0115\3\2\2\2\u0113\u0111\3\2\2\2\u0113\u0114\3\2\2\2")
+ buf.write("\u0114\u0116\3\2\2\2\u0115\u0113\3\2\2\2\u0116\u0117\b")
+ buf.write("%\2\2\u0117J\3\2\2\2\u0118\u0119\7\61\2\2\u0119\u011a")
+ buf.write("\7,\2\2\u011a\u011e\3\2\2\2\u011b\u011d\13\2\2\2\u011c")
+ buf.write("\u011b\3\2\2\2\u011d\u0120\3\2\2\2\u011e\u011f\3\2\2\2")
+ buf.write("\u011e\u011c\3\2\2\2\u011f\u0121\3\2\2\2\u0120\u011e\3")
+ buf.write("\2\2\2\u0121\u0122\7,\2\2\u0122\u0123\7\61\2\2\u0123\u0124")
+ buf.write("\3\2\2\2\u0124\u0125\b&\2\2\u0125L\3\2\2\2\r\2\u00d3\u00d7")
+ buf.write("\u00dc\u00e4\u00eb\u00f2\u0100\u0109\u0113\u011e\3\b\2")
buf.write("\2")
return buf.getvalue()
@@ -155,24 +162,26 @@ class TLexer(Lexer):
T__22 = 23
T__23 = 24
T__24 = 25
- TAGLINE = 26
- INTCONSTANT = 27
- HEXCONSTANT = 28
- TAGIDENTIFIER = 29
- IDENTIFIER = 30
- VERSION = 31
- DOCCOMMENT = 32
- WHITESPACE = 33
- COMMENT = 34
- MULTICOMM = 35
+ T__25 = 26
+ T__26 = 27
+ TAGLINE = 28
+ INTCONSTANT = 29
+ HEXCONSTANT = 30
+ TAGIDENTIFIER = 31
+ IDENTIFIER = 32
+ VERSION = 33
+ DOCCOMMENT = 34
+ WHITESPACE = 35
+ COMMENT = 36
+ MULTICOMM = 37
modeNames = [ "DEFAULT_MODE" ]
literalNames = [ "<INVALID>",
- "'import'", "';'", "'module'", "'interface'", "'{'", "'}'",
- "'void'", "'('", "')'", "'signal'", "'readonly'", "','", "'='",
- "'bool'", "'int'", "'real'", "'string'", "'var'", "'list'",
- "'<'", "'>'", "'model'", "'struct'", "'enum'", "'flag'" ]
+ "'import'", "';'", "'module'", "'interface'", "'extends'", "'{'",
+ "'}'", "'void'", "'('", "')'", "'const'", "'signal'", "'readonly'",
+ "','", "'='", "'bool'", "'int'", "'real'", "'string'", "'var'",
+ "'list'", "'<'", "'>'", "'model'", "'struct'", "'enum'", "'flag'" ]
symbolicNames = [ "<INVALID>",
"TAGLINE", "INTCONSTANT", "HEXCONSTANT", "TAGIDENTIFIER", "IDENTIFIER",
@@ -181,9 +190,10 @@ class TLexer(Lexer):
ruleNames = [ "T__0", "T__1", "T__2", "T__3", "T__4", "T__5", "T__6",
"T__7", "T__8", "T__9", "T__10", "T__11", "T__12", "T__13",
"T__14", "T__15", "T__16", "T__17", "T__18", "T__19",
- "T__20", "T__21", "T__22", "T__23", "T__24", "TAGLINE",
- "INTCONSTANT", "HEXCONSTANT", "TAGIDENTIFIER", "IDENTIFIER",
- "VERSION", "DOCCOMMENT", "WHITESPACE", "COMMENT", "MULTICOMM" ]
+ "T__20", "T__21", "T__22", "T__23", "T__24", "T__25",
+ "T__26", "TAGLINE", "INTCONSTANT", "HEXCONSTANT", "TAGIDENTIFIER",
+ "IDENTIFIER", "VERSION", "DOCCOMMENT", "WHITESPACE", "COMMENT",
+ "MULTICOMM" ]
grammarFileName = "T.g4"
diff --git a/qface/idl/parser/TLexer.tokens b/qface/idl/parser/TLexer.tokens
index 86bf74a..5f94603 100644
--- a/qface/idl/parser/TLexer.tokens
+++ b/qface/idl/parser/TLexer.tokens
@@ -23,38 +23,42 @@ T__21=22
T__22=23
T__23=24
T__24=25
-TAGLINE=26
-INTCONSTANT=27
-HEXCONSTANT=28
-TAGIDENTIFIER=29
-IDENTIFIER=30
-VERSION=31
-DOCCOMMENT=32
-WHITESPACE=33
-COMMENT=34
-MULTICOMM=35
+T__25=26
+T__26=27
+TAGLINE=28
+INTCONSTANT=29
+HEXCONSTANT=30
+TAGIDENTIFIER=31
+IDENTIFIER=32
+VERSION=33
+DOCCOMMENT=34
+WHITESPACE=35
+COMMENT=36
+MULTICOMM=37
'import'=1
';'=2
'module'=3
'interface'=4
-'{'=5
-'}'=6
-'void'=7
-'('=8
-')'=9
-'signal'=10
-'readonly'=11
-','=12
-'='=13
-'bool'=14
-'int'=15
-'real'=16
-'string'=17
-'var'=18
-'list'=19
-'<'=20
-'>'=21
-'model'=22
-'struct'=23
-'enum'=24
-'flag'=25
+'extends'=5
+'{'=6
+'}'=7
+'void'=8
+'('=9
+')'=10
+'const'=11
+'signal'=12
+'readonly'=13
+','=14
+'='=15
+'bool'=16
+'int'=17
+'real'=18
+'string'=19
+'var'=20
+'list'=21
+'<'=22
+'>'=23
+'model'=24
+'struct'=25
+'enum'=26
+'flag'=27
diff --git a/qface/idl/parser/TListener.py b/qface/idl/parser/TListener.py
index 07d41ad..e908b46 100644
--- a/qface/idl/parser/TListener.py
+++ b/qface/idl/parser/TListener.py
@@ -80,6 +80,15 @@ class TListener(ParseTreeListener):
pass
+ # Enter a parse tree produced by TParser#operationModifierSymbol.
+ def enterOperationModifierSymbol(self, ctx:TParser.OperationModifierSymbolContext):
+ pass
+
+ # Exit a parse tree produced by TParser#operationModifierSymbol.
+ def exitOperationModifierSymbol(self, ctx:TParser.OperationModifierSymbolContext):
+ pass
+
+
# Enter a parse tree produced by TParser#signalSymbol.
def enterSignalSymbol(self, ctx:TParser.SignalSymbolContext):
pass
@@ -98,6 +107,15 @@ class TListener(ParseTreeListener):
pass
+ # Enter a parse tree produced by TParser#propertyModifierSymbol.
+ def enterPropertyModifierSymbol(self, ctx:TParser.PropertyModifierSymbolContext):
+ pass
+
+ # Exit a parse tree produced by TParser#propertyModifierSymbol.
+ def exitPropertyModifierSymbol(self, ctx:TParser.PropertyModifierSymbolContext):
+ pass
+
+
# Enter a parse tree produced by TParser#operationParameterSymbol.
def enterOperationParameterSymbol(self, ctx:TParser.OperationParameterSymbolContext):
pass
diff --git a/qface/idl/parser/TParser.py b/qface/idl/parser/TParser.py
index 15eadd6..835f86c 100644
--- a/qface/idl/parser/TParser.py
+++ b/qface/idl/parser/TParser.py
@@ -5,147 +5,156 @@ from io import StringIO
def serializedATN():
with StringIO() as buf:
- buf.write("\3\u0430\ud6d1\u8206\uad2d\u4417\uaef1\u8d80\uaadd\3%")
- buf.write("\u0131\4\2\t\2\4\3\t\3\4\4\t\4\4\5\t\5\4\6\t\6\4\7\t\7")
+ buf.write("\3\u0430\ud6d1\u8206\uad2d\u4417\uaef1\u8d80\uaadd\3\'")
+ buf.write("\u0142\4\2\t\2\4\3\t\3\4\4\t\4\4\5\t\5\4\6\t\6\4\7\t\7")
buf.write("\4\b\t\b\4\t\t\t\4\n\t\n\4\13\t\13\4\f\t\f\4\r\t\r\4\16")
buf.write("\t\16\4\17\t\17\4\20\t\20\4\21\t\21\4\22\t\22\4\23\t\23")
buf.write("\4\24\t\24\4\25\t\25\4\26\t\26\4\27\t\27\4\30\t\30\4\31")
- buf.write("\t\31\3\2\3\2\7\2\65\n\2\f\2\16\28\13\2\3\3\3\3\7\3<\n")
- buf.write("\3\f\3\16\3?\13\3\3\4\3\4\3\4\3\4\5\4E\n\4\3\5\5\5H\n")
- buf.write("\5\3\5\7\5K\n\5\f\5\16\5N\13\5\3\5\3\5\3\5\3\5\5\5T\n")
- buf.write("\5\3\6\3\6\3\6\5\6Y\n\6\3\7\5\7\\\n\7\3\7\7\7_\n\7\f\7")
- buf.write("\16\7b\13\7\3\7\3\7\3\7\3\7\7\7h\n\7\f\7\16\7k\13\7\3")
- buf.write("\7\3\7\5\7o\n\7\3\b\3\b\3\b\5\bt\n\b\3\t\5\tw\n\t\3\t")
- buf.write("\7\tz\n\t\f\t\16\t}\13\t\3\t\3\t\5\t\u0081\n\t\3\t\3\t")
- buf.write("\3\t\7\t\u0086\n\t\f\t\16\t\u0089\13\t\3\t\3\t\5\t\u008d")
- buf.write("\n\t\3\n\5\n\u0090\n\n\3\n\7\n\u0093\n\n\f\n\16\n\u0096")
- buf.write("\13\n\3\n\3\n\3\n\3\n\7\n\u009c\n\n\f\n\16\n\u009f\13")
- buf.write("\n\3\n\3\n\5\n\u00a3\n\n\3\13\5\13\u00a6\n\13\3\13\7\13")
- buf.write("\u00a9\n\13\f\13\16\13\u00ac\13\13\3\13\5\13\u00af\n\13")
- buf.write("\3\13\3\13\3\13\5\13\u00b4\n\13\3\f\3\f\3\f\5\f\u00b9")
- buf.write("\n\f\3\r\3\r\3\16\3\16\3\16\5\16\u00c0\n\16\3\16\5\16")
- buf.write("\u00c3\n\16\3\17\3\17\3\17\3\17\5\17\u00c9\n\17\3\20\3")
- buf.write("\20\3\21\3\21\3\21\3\21\3\21\5\21\u00d2\n\21\3\22\3\22")
- buf.write("\3\22\3\22\3\22\3\23\3\23\3\23\3\23\3\23\3\24\5\24\u00df")
- buf.write("\n\24\3\24\7\24\u00e2\n\24\f\24\16\24\u00e5\13\24\3\24")
- buf.write("\3\24\3\24\3\24\7\24\u00eb\n\24\f\24\16\24\u00ee\13\24")
- buf.write("\3\24\3\24\5\24\u00f2\n\24\3\25\5\25\u00f5\n\25\3\25\7")
- buf.write("\25\u00f8\n\25\f\25\16\25\u00fb\13\25\3\25\3\25\3\25\5")
- buf.write("\25\u0100\n\25\3\26\5\26\u0103\n\26\3\26\7\26\u0106\n")
- buf.write("\26\f\26\16\26\u0109\13\26\3\26\3\26\3\26\3\26\7\26\u010f")
- buf.write("\n\26\f\26\16\26\u0112\13\26\3\26\3\26\5\26\u0116\n\26")
- buf.write("\3\27\3\27\5\27\u011a\n\27\3\30\5\30\u011d\n\30\3\30\7")
- buf.write("\30\u0120\n\30\f\30\16\30\u0123\13\30\3\30\3\30\3\30\5")
- buf.write("\30\u0128\n\30\3\30\5\30\u012b\n\30\3\31\3\31\5\31\u012f")
- buf.write("\n\31\3\31\2\2\32\2\4\6\b\n\f\16\20\22\24\26\30\32\34")
- buf.write("\36 \"$&(*,.\60\2\2\u014e\2\62\3\2\2\2\49\3\2\2\2\6@\3")
- buf.write("\2\2\2\bG\3\2\2\2\nX\3\2\2\2\f[\3\2\2\2\16s\3\2\2\2\20")
- buf.write("v\3\2\2\2\22\u008f\3\2\2\2\24\u00a5\3\2\2\2\26\u00b5\3")
- buf.write("\2\2\2\30\u00ba\3\2\2\2\32\u00bc\3\2\2\2\34\u00c8\3\2")
- buf.write("\2\2\36\u00ca\3\2\2\2 \u00d1\3\2\2\2\"\u00d3\3\2\2\2$")
- buf.write("\u00d8\3\2\2\2&\u00de\3\2\2\2(\u00f4\3\2\2\2*\u0102\3")
- buf.write("\2\2\2,\u0119\3\2\2\2.\u011c\3\2\2\2\60\u012e\3\2\2\2")
- buf.write("\62\66\5\4\3\2\63\65\5\n\6\2\64\63\3\2\2\2\658\3\2\2\2")
- buf.write("\66\64\3\2\2\2\66\67\3\2\2\2\67\3\3\2\2\28\66\3\2\2\2")
- buf.write("9=\5\b\5\2:<\5\6\4\2;:\3\2\2\2<?\3\2\2\2=;\3\2\2\2=>\3")
- buf.write("\2\2\2>\5\3\2\2\2?=\3\2\2\2@A\7\3\2\2AB\7 \2\2BD\7!\2")
- buf.write("\2CE\7\4\2\2DC\3\2\2\2DE\3\2\2\2E\7\3\2\2\2FH\7\"\2\2")
- buf.write("GF\3\2\2\2GH\3\2\2\2HL\3\2\2\2IK\5\30\r\2JI\3\2\2\2KN")
- buf.write("\3\2\2\2LJ\3\2\2\2LM\3\2\2\2MO\3\2\2\2NL\3\2\2\2OP\7\5")
- buf.write("\2\2PQ\7 \2\2QS\7!\2\2RT\7\4\2\2SR\3\2\2\2ST\3\2\2\2T")
- buf.write("\t\3\2\2\2UY\5\f\7\2VY\5&\24\2WY\5*\26\2XU\3\2\2\2XV\3")
- buf.write("\2\2\2XW\3\2\2\2Y\13\3\2\2\2Z\\\7\"\2\2[Z\3\2\2\2[\\\3")
- buf.write("\2\2\2\\`\3\2\2\2]_\5\30\r\2^]\3\2\2\2_b\3\2\2\2`^\3\2")
- buf.write("\2\2`a\3\2\2\2ac\3\2\2\2b`\3\2\2\2cd\7\6\2\2de\7 \2\2")
- buf.write("ei\7\7\2\2fh\5\16\b\2gf\3\2\2\2hk\3\2\2\2ig\3\2\2\2ij")
- buf.write("\3\2\2\2jl\3\2\2\2ki\3\2\2\2ln\7\b\2\2mo\7\4\2\2nm\3\2")
- buf.write("\2\2no\3\2\2\2o\r\3\2\2\2pt\5\20\t\2qt\5\24\13\2rt\5\22")
- buf.write("\n\2sp\3\2\2\2sq\3\2\2\2sr\3\2\2\2t\17\3\2\2\2uw\7\"\2")
- buf.write("\2vu\3\2\2\2vw\3\2\2\2w{\3\2\2\2xz\5\30\r\2yx\3\2\2\2")
- buf.write("z}\3\2\2\2{y\3\2\2\2{|\3\2\2\2|\u0080\3\2\2\2}{\3\2\2")
- buf.write("\2~\u0081\5\34\17\2\177\u0081\7\t\2\2\u0080~\3\2\2\2\u0080")
- buf.write("\177\3\2\2\2\u0081\u0082\3\2\2\2\u0082\u0083\7 \2\2\u0083")
- buf.write("\u0087\7\n\2\2\u0084\u0086\5\26\f\2\u0085\u0084\3\2\2")
- buf.write("\2\u0086\u0089\3\2\2\2\u0087\u0085\3\2\2\2\u0087\u0088")
- buf.write("\3\2\2\2\u0088\u008a\3\2\2\2\u0089\u0087\3\2\2\2\u008a")
- buf.write("\u008c\7\13\2\2\u008b\u008d\7\4\2\2\u008c\u008b\3\2\2")
- buf.write("\2\u008c\u008d\3\2\2\2\u008d\21\3\2\2\2\u008e\u0090\7")
- buf.write("\"\2\2\u008f\u008e\3\2\2\2\u008f\u0090\3\2\2\2\u0090\u0094")
- buf.write("\3\2\2\2\u0091\u0093\5\30\r\2\u0092\u0091\3\2\2\2\u0093")
- buf.write("\u0096\3\2\2\2\u0094\u0092\3\2\2\2\u0094\u0095\3\2\2\2")
- buf.write("\u0095\u0097\3\2\2\2\u0096\u0094\3\2\2\2\u0097\u0098\7")
- buf.write("\f\2\2\u0098\u0099\7 \2\2\u0099\u009d\7\n\2\2\u009a\u009c")
- buf.write("\5\26\f\2\u009b\u009a\3\2\2\2\u009c\u009f\3\2\2\2\u009d")
- buf.write("\u009b\3\2\2\2\u009d\u009e\3\2\2\2\u009e\u00a0\3\2\2\2")
- buf.write("\u009f\u009d\3\2\2\2\u00a0\u00a2\7\13\2\2\u00a1\u00a3")
- buf.write("\7\4\2\2\u00a2\u00a1\3\2\2\2\u00a2\u00a3\3\2\2\2\u00a3")
- buf.write("\23\3\2\2\2\u00a4\u00a6\7\"\2\2\u00a5\u00a4\3\2\2\2\u00a5")
- buf.write("\u00a6\3\2\2\2\u00a6\u00aa\3\2\2\2\u00a7\u00a9\5\30\r")
- buf.write("\2\u00a8\u00a7\3\2\2\2\u00a9\u00ac\3\2\2\2\u00aa\u00a8")
- buf.write("\3\2\2\2\u00aa\u00ab\3\2\2\2\u00ab\u00ae\3\2\2\2\u00ac")
- buf.write("\u00aa\3\2\2\2\u00ad\u00af\7\r\2\2\u00ae\u00ad\3\2\2\2")
- buf.write("\u00ae\u00af\3\2\2\2\u00af\u00b0\3\2\2\2\u00b0\u00b1\5")
- buf.write("\34\17\2\u00b1\u00b3\7 \2\2\u00b2\u00b4\7\4\2\2\u00b3")
- buf.write("\u00b2\3\2\2\2\u00b3\u00b4\3\2\2\2\u00b4\25\3\2\2\2\u00b5")
- buf.write("\u00b6\5\34\17\2\u00b6\u00b8\7 \2\2\u00b7\u00b9\7\16\2")
- buf.write("\2\u00b8\u00b7\3\2\2\2\u00b8\u00b9\3\2\2\2\u00b9\27\3")
- buf.write("\2\2\2\u00ba\u00bb\7\34\2\2\u00bb\31\3\2\2\2\u00bc\u00bf")
- buf.write("\7 \2\2\u00bd\u00be\7\17\2\2\u00be\u00c0\7 \2\2\u00bf")
- buf.write("\u00bd\3\2\2\2\u00bf\u00c0\3\2\2\2\u00c0\u00c2\3\2\2\2")
- buf.write("\u00c1\u00c3\7\16\2\2\u00c2\u00c1\3\2\2\2\u00c2\u00c3")
- buf.write("\3\2\2\2\u00c3\33\3\2\2\2\u00c4\u00c9\5 \21\2\u00c5\u00c9")
- buf.write("\5\36\20\2\u00c6\u00c9\5\"\22\2\u00c7\u00c9\5$\23\2\u00c8")
- buf.write("\u00c4\3\2\2\2\u00c8\u00c5\3\2\2\2\u00c8\u00c6\3\2\2\2")
- buf.write("\u00c8\u00c7\3\2\2\2\u00c9\35\3\2\2\2\u00ca\u00cb\7 \2")
- buf.write("\2\u00cb\37\3\2\2\2\u00cc\u00d2\7\20\2\2\u00cd\u00d2\7")
- buf.write("\21\2\2\u00ce\u00d2\7\22\2\2\u00cf\u00d2\7\23\2\2\u00d0")
- buf.write("\u00d2\7\24\2\2\u00d1\u00cc\3\2\2\2\u00d1\u00cd\3\2\2")
- buf.write("\2\u00d1\u00ce\3\2\2\2\u00d1\u00cf\3\2\2\2\u00d1\u00d0")
- buf.write("\3\2\2\2\u00d2!\3\2\2\2\u00d3\u00d4\7\25\2\2\u00d4\u00d5")
- buf.write("\7\26\2\2\u00d5\u00d6\5\34\17\2\u00d6\u00d7\7\27\2\2\u00d7")
- buf.write("#\3\2\2\2\u00d8\u00d9\7\30\2\2\u00d9\u00da\7\26\2\2\u00da")
- buf.write("\u00db\5\34\17\2\u00db\u00dc\7\27\2\2\u00dc%\3\2\2\2\u00dd")
- buf.write("\u00df\7\"\2\2\u00de\u00dd\3\2\2\2\u00de\u00df\3\2\2\2")
- buf.write("\u00df\u00e3\3\2\2\2\u00e0\u00e2\5\30\r\2\u00e1\u00e0")
- buf.write("\3\2\2\2\u00e2\u00e5\3\2\2\2\u00e3\u00e1\3\2\2\2\u00e3")
- buf.write("\u00e4\3\2\2\2\u00e4\u00e6\3\2\2\2\u00e5\u00e3\3\2\2\2")
- buf.write("\u00e6\u00e7\7\31\2\2\u00e7\u00e8\7 \2\2\u00e8\u00ec\7")
- buf.write("\7\2\2\u00e9\u00eb\5(\25\2\u00ea\u00e9\3\2\2\2\u00eb\u00ee")
- buf.write("\3\2\2\2\u00ec\u00ea\3\2\2\2\u00ec\u00ed\3\2\2\2\u00ed")
- buf.write("\u00ef\3\2\2\2\u00ee\u00ec\3\2\2\2\u00ef\u00f1\7\b\2\2")
- buf.write("\u00f0\u00f2\7\4\2\2\u00f1\u00f0\3\2\2\2\u00f1\u00f2\3")
- buf.write("\2\2\2\u00f2\'\3\2\2\2\u00f3\u00f5\7\"\2\2\u00f4\u00f3")
- buf.write("\3\2\2\2\u00f4\u00f5\3\2\2\2\u00f5\u00f9\3\2\2\2\u00f6")
- buf.write("\u00f8\5\30\r\2\u00f7\u00f6\3\2\2\2\u00f8\u00fb\3\2\2")
- buf.write("\2\u00f9\u00f7\3\2\2\2\u00f9\u00fa\3\2\2\2\u00fa\u00fc")
- buf.write("\3\2\2\2\u00fb\u00f9\3\2\2\2\u00fc\u00fd\5\34\17\2\u00fd")
- buf.write("\u00ff\7 \2\2\u00fe\u0100\7\4\2\2\u00ff\u00fe\3\2\2\2")
- buf.write("\u00ff\u0100\3\2\2\2\u0100)\3\2\2\2\u0101\u0103\7\"\2")
- buf.write("\2\u0102\u0101\3\2\2\2\u0102\u0103\3\2\2\2\u0103\u0107")
- buf.write("\3\2\2\2\u0104\u0106\5\30\r\2\u0105\u0104\3\2\2\2\u0106")
- buf.write("\u0109\3\2\2\2\u0107\u0105\3\2\2\2\u0107\u0108\3\2\2\2")
- buf.write("\u0108\u010a\3\2\2\2\u0109\u0107\3\2\2\2\u010a\u010b\5")
- buf.write(",\27\2\u010b\u010c\7 \2\2\u010c\u0110\7\7\2\2\u010d\u010f")
- buf.write("\5.\30\2\u010e\u010d\3\2\2\2\u010f\u0112\3\2\2\2\u0110")
- buf.write("\u010e\3\2\2\2\u0110\u0111\3\2\2\2\u0111\u0113\3\2\2\2")
- buf.write("\u0112\u0110\3\2\2\2\u0113\u0115\7\b\2\2\u0114\u0116\7")
- buf.write("\4\2\2\u0115\u0114\3\2\2\2\u0115\u0116\3\2\2\2\u0116+")
- buf.write("\3\2\2\2\u0117\u011a\7\32\2\2\u0118\u011a\7\33\2\2\u0119")
- buf.write("\u0117\3\2\2\2\u0119\u0118\3\2\2\2\u011a-\3\2\2\2\u011b")
- buf.write("\u011d\7\"\2\2\u011c\u011b\3\2\2\2\u011c\u011d\3\2\2\2")
- buf.write("\u011d\u0121\3\2\2\2\u011e\u0120\5\30\r\2\u011f\u011e")
- buf.write("\3\2\2\2\u0120\u0123\3\2\2\2\u0121\u011f\3\2\2\2\u0121")
- buf.write("\u0122\3\2\2\2\u0122\u0124\3\2\2\2\u0123\u0121\3\2\2\2")
- buf.write("\u0124\u0127\7 \2\2\u0125\u0126\7\17\2\2\u0126\u0128\5")
- buf.write("\60\31\2\u0127\u0125\3\2\2\2\u0127\u0128\3\2\2\2\u0128")
- buf.write("\u012a\3\2\2\2\u0129\u012b\7\16\2\2\u012a\u0129\3\2\2")
- buf.write("\2\u012a\u012b\3\2\2\2\u012b/\3\2\2\2\u012c\u012f\7\35")
- buf.write("\2\2\u012d\u012f\7\36\2\2\u012e\u012c\3\2\2\2\u012e\u012d")
- buf.write("\3\2\2\2\u012f\61\3\2\2\2\61\66=DGLSX[`insv{\u0080\u0087")
- buf.write("\u008c\u008f\u0094\u009d\u00a2\u00a5\u00aa\u00ae\u00b3")
- buf.write("\u00b8\u00bf\u00c2\u00c8\u00d1\u00de\u00e3\u00ec\u00f1")
- buf.write("\u00f4\u00f9\u00ff\u0102\u0107\u0110\u0115\u0119\u011c")
- buf.write("\u0121\u0127\u012a\u012e")
+ buf.write("\t\31\4\32\t\32\4\33\t\33\3\2\3\2\7\29\n\2\f\2\16\2<\13")
+ buf.write("\2\3\3\3\3\7\3@\n\3\f\3\16\3C\13\3\3\4\3\4\3\4\3\4\5\4")
+ buf.write("I\n\4\3\5\5\5L\n\5\3\5\7\5O\n\5\f\5\16\5R\13\5\3\5\3\5")
+ buf.write("\3\5\3\5\5\5X\n\5\3\6\3\6\3\6\5\6]\n\6\3\7\5\7`\n\7\3")
+ buf.write("\7\7\7c\n\7\f\7\16\7f\13\7\3\7\3\7\3\7\3\7\5\7l\n\7\3")
+ buf.write("\7\3\7\7\7p\n\7\f\7\16\7s\13\7\3\7\3\7\5\7w\n\7\3\b\3")
+ buf.write("\b\3\b\5\b|\n\b\3\t\5\t\177\n\t\3\t\7\t\u0082\n\t\f\t")
+ buf.write("\16\t\u0085\13\t\3\t\3\t\5\t\u0089\n\t\3\t\3\t\3\t\7\t")
+ buf.write("\u008e\n\t\f\t\16\t\u0091\13\t\3\t\3\t\5\t\u0095\n\t\3")
+ buf.write("\t\5\t\u0098\n\t\3\n\3\n\3\13\5\13\u009d\n\13\3\13\7\13")
+ buf.write("\u00a0\n\13\f\13\16\13\u00a3\13\13\3\13\3\13\3\13\3\13")
+ buf.write("\7\13\u00a9\n\13\f\13\16\13\u00ac\13\13\3\13\3\13\5\13")
+ buf.write("\u00b0\n\13\3\f\5\f\u00b3\n\f\3\f\7\f\u00b6\n\f\f\f\16")
+ buf.write("\f\u00b9\13\f\3\f\5\f\u00bc\n\f\3\f\3\f\3\f\5\f\u00c1")
+ buf.write("\n\f\3\r\3\r\5\r\u00c5\n\r\3\16\3\16\3\16\5\16\u00ca\n")
+ buf.write("\16\3\17\3\17\3\20\3\20\3\20\5\20\u00d1\n\20\3\20\5\20")
+ buf.write("\u00d4\n\20\3\21\3\21\3\21\3\21\5\21\u00da\n\21\3\22\3")
+ buf.write("\22\3\23\3\23\3\23\3\23\3\23\5\23\u00e3\n\23\3\24\3\24")
+ buf.write("\3\24\3\24\3\24\3\25\3\25\3\25\3\25\3\25\3\26\5\26\u00f0")
+ buf.write("\n\26\3\26\7\26\u00f3\n\26\f\26\16\26\u00f6\13\26\3\26")
+ buf.write("\3\26\3\26\3\26\7\26\u00fc\n\26\f\26\16\26\u00ff\13\26")
+ buf.write("\3\26\3\26\5\26\u0103\n\26\3\27\5\27\u0106\n\27\3\27\7")
+ buf.write("\27\u0109\n\27\f\27\16\27\u010c\13\27\3\27\3\27\3\27\5")
+ buf.write("\27\u0111\n\27\3\30\5\30\u0114\n\30\3\30\7\30\u0117\n")
+ buf.write("\30\f\30\16\30\u011a\13\30\3\30\3\30\3\30\3\30\7\30\u0120")
+ buf.write("\n\30\f\30\16\30\u0123\13\30\3\30\3\30\5\30\u0127\n\30")
+ buf.write("\3\31\3\31\5\31\u012b\n\31\3\32\5\32\u012e\n\32\3\32\7")
+ buf.write("\32\u0131\n\32\f\32\16\32\u0134\13\32\3\32\3\32\3\32\5")
+ buf.write("\32\u0139\n\32\3\32\5\32\u013c\n\32\3\33\3\33\5\33\u0140")
+ buf.write("\n\33\3\33\2\2\34\2\4\6\b\n\f\16\20\22\24\26\30\32\34")
+ buf.write("\36 \"$&(*,.\60\62\64\2\2\u0160\2\66\3\2\2\2\4=\3\2\2")
+ buf.write("\2\6D\3\2\2\2\bK\3\2\2\2\n\\\3\2\2\2\f_\3\2\2\2\16{\3")
+ buf.write("\2\2\2\20~\3\2\2\2\22\u0099\3\2\2\2\24\u009c\3\2\2\2\26")
+ buf.write("\u00b2\3\2\2\2\30\u00c4\3\2\2\2\32\u00c6\3\2\2\2\34\u00cb")
+ buf.write("\3\2\2\2\36\u00cd\3\2\2\2 \u00d9\3\2\2\2\"\u00db\3\2\2")
+ buf.write("\2$\u00e2\3\2\2\2&\u00e4\3\2\2\2(\u00e9\3\2\2\2*\u00ef")
+ buf.write("\3\2\2\2,\u0105\3\2\2\2.\u0113\3\2\2\2\60\u012a\3\2\2")
+ buf.write("\2\62\u012d\3\2\2\2\64\u013f\3\2\2\2\66:\5\4\3\2\679\5")
+ buf.write("\n\6\28\67\3\2\2\29<\3\2\2\2:8\3\2\2\2:;\3\2\2\2;\3\3")
+ buf.write("\2\2\2<:\3\2\2\2=A\5\b\5\2>@\5\6\4\2?>\3\2\2\2@C\3\2\2")
+ buf.write("\2A?\3\2\2\2AB\3\2\2\2B\5\3\2\2\2CA\3\2\2\2DE\7\3\2\2")
+ buf.write("EF\7\"\2\2FH\7#\2\2GI\7\4\2\2HG\3\2\2\2HI\3\2\2\2I\7\3")
+ buf.write("\2\2\2JL\7$\2\2KJ\3\2\2\2KL\3\2\2\2LP\3\2\2\2MO\5\34\17")
+ buf.write("\2NM\3\2\2\2OR\3\2\2\2PN\3\2\2\2PQ\3\2\2\2QS\3\2\2\2R")
+ buf.write("P\3\2\2\2ST\7\5\2\2TU\7\"\2\2UW\7#\2\2VX\7\4\2\2WV\3\2")
+ buf.write("\2\2WX\3\2\2\2X\t\3\2\2\2Y]\5\f\7\2Z]\5*\26\2[]\5.\30")
+ buf.write("\2\\Y\3\2\2\2\\Z\3\2\2\2\\[\3\2\2\2]\13\3\2\2\2^`\7$\2")
+ buf.write("\2_^\3\2\2\2_`\3\2\2\2`d\3\2\2\2ac\5\34\17\2ba\3\2\2\2")
+ buf.write("cf\3\2\2\2db\3\2\2\2de\3\2\2\2eg\3\2\2\2fd\3\2\2\2gh\7")
+ buf.write("\6\2\2hk\7\"\2\2ij\7\7\2\2jl\7\"\2\2ki\3\2\2\2kl\3\2\2")
+ buf.write("\2lm\3\2\2\2mq\7\b\2\2np\5\16\b\2on\3\2\2\2ps\3\2\2\2")
+ buf.write("qo\3\2\2\2qr\3\2\2\2rt\3\2\2\2sq\3\2\2\2tv\7\t\2\2uw\7")
+ buf.write("\4\2\2vu\3\2\2\2vw\3\2\2\2w\r\3\2\2\2x|\5\20\t\2y|\5\26")
+ buf.write("\f\2z|\5\24\13\2{x\3\2\2\2{y\3\2\2\2{z\3\2\2\2|\17\3\2")
+ buf.write("\2\2}\177\7$\2\2~}\3\2\2\2~\177\3\2\2\2\177\u0083\3\2")
+ buf.write("\2\2\u0080\u0082\5\34\17\2\u0081\u0080\3\2\2\2\u0082\u0085")
+ buf.write("\3\2\2\2\u0083\u0081\3\2\2\2\u0083\u0084\3\2\2\2\u0084")
+ buf.write("\u0088\3\2\2\2\u0085\u0083\3\2\2\2\u0086\u0089\5 \21\2")
+ buf.write("\u0087\u0089\7\n\2\2\u0088\u0086\3\2\2\2\u0088\u0087\3")
+ buf.write("\2\2\2\u0089\u008a\3\2\2\2\u008a\u008b\7\"\2\2\u008b\u008f")
+ buf.write("\7\13\2\2\u008c\u008e\5\32\16\2\u008d\u008c\3\2\2\2\u008e")
+ buf.write("\u0091\3\2\2\2\u008f\u008d\3\2\2\2\u008f\u0090\3\2\2\2")
+ buf.write("\u0090\u0092\3\2\2\2\u0091\u008f\3\2\2\2\u0092\u0094\7")
+ buf.write("\f\2\2\u0093\u0095\5\22\n\2\u0094\u0093\3\2\2\2\u0094")
+ buf.write("\u0095\3\2\2\2\u0095\u0097\3\2\2\2\u0096\u0098\7\4\2\2")
+ buf.write("\u0097\u0096\3\2\2\2\u0097\u0098\3\2\2\2\u0098\21\3\2")
+ buf.write("\2\2\u0099\u009a\7\r\2\2\u009a\23\3\2\2\2\u009b\u009d")
+ buf.write("\7$\2\2\u009c\u009b\3\2\2\2\u009c\u009d\3\2\2\2\u009d")
+ buf.write("\u00a1\3\2\2\2\u009e\u00a0\5\34\17\2\u009f\u009e\3\2\2")
+ buf.write("\2\u00a0\u00a3\3\2\2\2\u00a1\u009f\3\2\2\2\u00a1\u00a2")
+ buf.write("\3\2\2\2\u00a2\u00a4\3\2\2\2\u00a3\u00a1\3\2\2\2\u00a4")
+ buf.write("\u00a5\7\16\2\2\u00a5\u00a6\7\"\2\2\u00a6\u00aa\7\13\2")
+ buf.write("\2\u00a7\u00a9\5\32\16\2\u00a8\u00a7\3\2\2\2\u00a9\u00ac")
+ buf.write("\3\2\2\2\u00aa\u00a8\3\2\2\2\u00aa\u00ab\3\2\2\2\u00ab")
+ buf.write("\u00ad\3\2\2\2\u00ac\u00aa\3\2\2\2\u00ad\u00af\7\f\2\2")
+ buf.write("\u00ae\u00b0\7\4\2\2\u00af\u00ae\3\2\2\2\u00af\u00b0\3")
+ buf.write("\2\2\2\u00b0\25\3\2\2\2\u00b1\u00b3\7$\2\2\u00b2\u00b1")
+ buf.write("\3\2\2\2\u00b2\u00b3\3\2\2\2\u00b3\u00b7\3\2\2\2\u00b4")
+ buf.write("\u00b6\5\34\17\2\u00b5\u00b4\3\2\2\2\u00b6\u00b9\3\2\2")
+ buf.write("\2\u00b7\u00b5\3\2\2\2\u00b7\u00b8\3\2\2\2\u00b8\u00bb")
+ buf.write("\3\2\2\2\u00b9\u00b7\3\2\2\2\u00ba\u00bc\5\30\r\2\u00bb")
+ buf.write("\u00ba\3\2\2\2\u00bb\u00bc\3\2\2\2\u00bc\u00bd\3\2\2\2")
+ buf.write("\u00bd\u00be\5 \21\2\u00be\u00c0\7\"\2\2\u00bf\u00c1\7")
+ buf.write("\4\2\2\u00c0\u00bf\3\2\2\2\u00c0\u00c1\3\2\2\2\u00c1\27")
+ buf.write("\3\2\2\2\u00c2\u00c5\7\17\2\2\u00c3\u00c5\7\r\2\2\u00c4")
+ buf.write("\u00c2\3\2\2\2\u00c4\u00c3\3\2\2\2\u00c5\31\3\2\2\2\u00c6")
+ buf.write("\u00c7\5 \21\2\u00c7\u00c9\7\"\2\2\u00c8\u00ca\7\20\2")
+ buf.write("\2\u00c9\u00c8\3\2\2\2\u00c9\u00ca\3\2\2\2\u00ca\33\3")
+ buf.write("\2\2\2\u00cb\u00cc\7\36\2\2\u00cc\35\3\2\2\2\u00cd\u00d0")
+ buf.write("\7\"\2\2\u00ce\u00cf\7\21\2\2\u00cf\u00d1\7\"\2\2\u00d0")
+ buf.write("\u00ce\3\2\2\2\u00d0\u00d1\3\2\2\2\u00d1\u00d3\3\2\2\2")
+ buf.write("\u00d2\u00d4\7\20\2\2\u00d3\u00d2\3\2\2\2\u00d3\u00d4")
+ buf.write("\3\2\2\2\u00d4\37\3\2\2\2\u00d5\u00da\5$\23\2\u00d6\u00da")
+ buf.write("\5\"\22\2\u00d7\u00da\5&\24\2\u00d8\u00da\5(\25\2\u00d9")
+ buf.write("\u00d5\3\2\2\2\u00d9\u00d6\3\2\2\2\u00d9\u00d7\3\2\2\2")
+ buf.write("\u00d9\u00d8\3\2\2\2\u00da!\3\2\2\2\u00db\u00dc\7\"\2")
+ buf.write("\2\u00dc#\3\2\2\2\u00dd\u00e3\7\22\2\2\u00de\u00e3\7\23")
+ buf.write("\2\2\u00df\u00e3\7\24\2\2\u00e0\u00e3\7\25\2\2\u00e1\u00e3")
+ buf.write("\7\26\2\2\u00e2\u00dd\3\2\2\2\u00e2\u00de\3\2\2\2\u00e2")
+ buf.write("\u00df\3\2\2\2\u00e2\u00e0\3\2\2\2\u00e2\u00e1\3\2\2\2")
+ buf.write("\u00e3%\3\2\2\2\u00e4\u00e5\7\27\2\2\u00e5\u00e6\7\30")
+ buf.write("\2\2\u00e6\u00e7\5 \21\2\u00e7\u00e8\7\31\2\2\u00e8\'")
+ buf.write("\3\2\2\2\u00e9\u00ea\7\32\2\2\u00ea\u00eb\7\30\2\2\u00eb")
+ buf.write("\u00ec\5 \21\2\u00ec\u00ed\7\31\2\2\u00ed)\3\2\2\2\u00ee")
+ buf.write("\u00f0\7$\2\2\u00ef\u00ee\3\2\2\2\u00ef\u00f0\3\2\2\2")
+ buf.write("\u00f0\u00f4\3\2\2\2\u00f1\u00f3\5\34\17\2\u00f2\u00f1")
+ buf.write("\3\2\2\2\u00f3\u00f6\3\2\2\2\u00f4\u00f2\3\2\2\2\u00f4")
+ buf.write("\u00f5\3\2\2\2\u00f5\u00f7\3\2\2\2\u00f6\u00f4\3\2\2\2")
+ buf.write("\u00f7\u00f8\7\33\2\2\u00f8\u00f9\7\"\2\2\u00f9\u00fd")
+ buf.write("\7\b\2\2\u00fa\u00fc\5,\27\2\u00fb\u00fa\3\2\2\2\u00fc")
+ buf.write("\u00ff\3\2\2\2\u00fd\u00fb\3\2\2\2\u00fd\u00fe\3\2\2\2")
+ buf.write("\u00fe\u0100\3\2\2\2\u00ff\u00fd\3\2\2\2\u0100\u0102\7")
+ buf.write("\t\2\2\u0101\u0103\7\4\2\2\u0102\u0101\3\2\2\2\u0102\u0103")
+ buf.write("\3\2\2\2\u0103+\3\2\2\2\u0104\u0106\7$\2\2\u0105\u0104")
+ buf.write("\3\2\2\2\u0105\u0106\3\2\2\2\u0106\u010a\3\2\2\2\u0107")
+ buf.write("\u0109\5\34\17\2\u0108\u0107\3\2\2\2\u0109\u010c\3\2\2")
+ buf.write("\2\u010a\u0108\3\2\2\2\u010a\u010b\3\2\2\2\u010b\u010d")
+ buf.write("\3\2\2\2\u010c\u010a\3\2\2\2\u010d\u010e\5 \21\2\u010e")
+ buf.write("\u0110\7\"\2\2\u010f\u0111\7\4\2\2\u0110\u010f\3\2\2\2")
+ buf.write("\u0110\u0111\3\2\2\2\u0111-\3\2\2\2\u0112\u0114\7$\2\2")
+ buf.write("\u0113\u0112\3\2\2\2\u0113\u0114\3\2\2\2\u0114\u0118\3")
+ buf.write("\2\2\2\u0115\u0117\5\34\17\2\u0116\u0115\3\2\2\2\u0117")
+ buf.write("\u011a\3\2\2\2\u0118\u0116\3\2\2\2\u0118\u0119\3\2\2\2")
+ buf.write("\u0119\u011b\3\2\2\2\u011a\u0118\3\2\2\2\u011b\u011c\5")
+ buf.write("\60\31\2\u011c\u011d\7\"\2\2\u011d\u0121\7\b\2\2\u011e")
+ buf.write("\u0120\5\62\32\2\u011f\u011e\3\2\2\2\u0120\u0123\3\2\2")
+ buf.write("\2\u0121\u011f\3\2\2\2\u0121\u0122\3\2\2\2\u0122\u0124")
+ buf.write("\3\2\2\2\u0123\u0121\3\2\2\2\u0124\u0126\7\t\2\2\u0125")
+ buf.write("\u0127\7\4\2\2\u0126\u0125\3\2\2\2\u0126\u0127\3\2\2\2")
+ buf.write("\u0127/\3\2\2\2\u0128\u012b\7\34\2\2\u0129\u012b\7\35")
+ buf.write("\2\2\u012a\u0128\3\2\2\2\u012a\u0129\3\2\2\2\u012b\61")
+ buf.write("\3\2\2\2\u012c\u012e\7$\2\2\u012d\u012c\3\2\2\2\u012d")
+ buf.write("\u012e\3\2\2\2\u012e\u0132\3\2\2\2\u012f\u0131\5\34\17")
+ buf.write("\2\u0130\u012f\3\2\2\2\u0131\u0134\3\2\2\2\u0132\u0130")
+ buf.write("\3\2\2\2\u0132\u0133\3\2\2\2\u0133\u0135\3\2\2\2\u0134")
+ buf.write("\u0132\3\2\2\2\u0135\u0138\7\"\2\2\u0136\u0137\7\21\2")
+ buf.write("\2\u0137\u0139\5\64\33\2\u0138\u0136\3\2\2\2\u0138\u0139")
+ buf.write("\3\2\2\2\u0139\u013b\3\2\2\2\u013a\u013c\7\20\2\2\u013b")
+ buf.write("\u013a\3\2\2\2\u013b\u013c\3\2\2\2\u013c\63\3\2\2\2\u013d")
+ buf.write("\u0140\7\37\2\2\u013e\u0140\7 \2\2\u013f\u013d\3\2\2\2")
+ buf.write("\u013f\u013e\3\2\2\2\u0140\65\3\2\2\2\64:AHKPW\\_dkqv")
+ buf.write("{~\u0083\u0088\u008f\u0094\u0097\u009c\u00a1\u00aa\u00af")
+ buf.write("\u00b2\u00b7\u00bb\u00c0\u00c4\u00c9\u00d0\u00d3\u00d9")
+ buf.write("\u00e2\u00ef\u00f4\u00fd\u0102\u0105\u010a\u0110\u0113")
+ buf.write("\u0118\u0121\u0126\u012a\u012d\u0132\u0138\u013b\u013f")
return buf.getvalue()
@@ -160,10 +169,10 @@ class TParser ( Parser ):
sharedContextCache = PredictionContextCache()
literalNames = [ "<INVALID>", "'import'", "';'", "'module'", "'interface'",
- "'{'", "'}'", "'void'", "'('", "')'", "'signal'", "'readonly'",
- "','", "'='", "'bool'", "'int'", "'real'", "'string'",
- "'var'", "'list'", "'<'", "'>'", "'model'", "'struct'",
- "'enum'", "'flag'" ]
+ "'extends'", "'{'", "'}'", "'void'", "'('", "')'",
+ "'const'", "'signal'", "'readonly'", "','", "'='",
+ "'bool'", "'int'", "'real'", "'string'", "'var'", "'list'",
+ "'<'", "'>'", "'model'", "'struct'", "'enum'", "'flag'" ]
symbolicNames = [ "<INVALID>", "<INVALID>", "<INVALID>", "<INVALID>",
"<INVALID>", "<INVALID>", "<INVALID>", "<INVALID>",
@@ -171,9 +180,10 @@ class TParser ( Parser ):
"<INVALID>", "<INVALID>", "<INVALID>", "<INVALID>",
"<INVALID>", "<INVALID>", "<INVALID>", "<INVALID>",
"<INVALID>", "<INVALID>", "<INVALID>", "<INVALID>",
- "<INVALID>", "<INVALID>", "TAGLINE", "INTCONSTANT",
- "HEXCONSTANT", "TAGIDENTIFIER", "IDENTIFIER", "VERSION",
- "DOCCOMMENT", "WHITESPACE", "COMMENT", "MULTICOMM" ]
+ "<INVALID>", "<INVALID>", "<INVALID>", "<INVALID>",
+ "TAGLINE", "INTCONSTANT", "HEXCONSTANT", "TAGIDENTIFIER",
+ "IDENTIFIER", "VERSION", "DOCCOMMENT", "WHITESPACE",
+ "COMMENT", "MULTICOMM" ]
RULE_documentSymbol = 0
RULE_headerSymbol = 1
@@ -183,30 +193,32 @@ class TParser ( Parser ):
RULE_interfaceSymbol = 5
RULE_interfaceMemberSymbol = 6
RULE_operationSymbol = 7
- RULE_signalSymbol = 8
- RULE_propertySymbol = 9
- RULE_operationParameterSymbol = 10
- RULE_tagSymbol = 11
- RULE_tagAttributeSymbol = 12
- RULE_typeSymbol = 13
- RULE_complexTypeSymbol = 14
- RULE_primitiveTypeSymbol = 15
- RULE_listTypeSymbol = 16
- RULE_modelTypeSymbol = 17
- RULE_structSymbol = 18
- RULE_structFieldSymbol = 19
- RULE_enumSymbol = 20
- RULE_enumTypeSymbol = 21
- RULE_enumMemberSymbol = 22
- RULE_intSymbol = 23
+ RULE_operationModifierSymbol = 8
+ RULE_signalSymbol = 9
+ RULE_propertySymbol = 10
+ RULE_propertyModifierSymbol = 11
+ RULE_operationParameterSymbol = 12
+ RULE_tagSymbol = 13
+ RULE_tagAttributeSymbol = 14
+ RULE_typeSymbol = 15
+ RULE_complexTypeSymbol = 16
+ RULE_primitiveTypeSymbol = 17
+ RULE_listTypeSymbol = 18
+ RULE_modelTypeSymbol = 19
+ RULE_structSymbol = 20
+ RULE_structFieldSymbol = 21
+ RULE_enumSymbol = 22
+ RULE_enumTypeSymbol = 23
+ RULE_enumMemberSymbol = 24
+ RULE_intSymbol = 25
ruleNames = [ "documentSymbol", "headerSymbol", "importSymbol", "moduleSymbol",
"definitionSymbol", "interfaceSymbol", "interfaceMemberSymbol",
- "operationSymbol", "signalSymbol", "propertySymbol",
- "operationParameterSymbol", "tagSymbol", "tagAttributeSymbol",
- "typeSymbol", "complexTypeSymbol", "primitiveTypeSymbol",
- "listTypeSymbol", "modelTypeSymbol", "structSymbol",
- "structFieldSymbol", "enumSymbol", "enumTypeSymbol",
+ "operationSymbol", "operationModifierSymbol", "signalSymbol",
+ "propertySymbol", "propertyModifierSymbol", "operationParameterSymbol",
+ "tagSymbol", "tagAttributeSymbol", "typeSymbol", "complexTypeSymbol",
+ "primitiveTypeSymbol", "listTypeSymbol", "modelTypeSymbol",
+ "structSymbol", "structFieldSymbol", "enumSymbol", "enumTypeSymbol",
"enumMemberSymbol", "intSymbol" ]
EOF = Token.EOF
@@ -235,16 +247,18 @@ class TParser ( Parser ):
T__22=23
T__23=24
T__24=25
- TAGLINE=26
- INTCONSTANT=27
- HEXCONSTANT=28
- TAGIDENTIFIER=29
- IDENTIFIER=30
- VERSION=31
- DOCCOMMENT=32
- WHITESPACE=33
- COMMENT=34
- MULTICOMM=35
+ T__25=26
+ T__26=27
+ TAGLINE=28
+ INTCONSTANT=29
+ HEXCONSTANT=30
+ TAGIDENTIFIER=31
+ IDENTIFIER=32
+ VERSION=33
+ DOCCOMMENT=34
+ WHITESPACE=35
+ COMMENT=36
+ MULTICOMM=37
def __init__(self, input:TokenStream):
super().__init__(input)
@@ -298,15 +312,15 @@ class TParser ( Parser ):
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
- self.state = 48
- self.headerSymbol()
self.state = 52
+ self.headerSymbol()
+ self.state = 56
self._errHandler.sync(self)
_la = self._input.LA(1)
- while (((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << TParser.T__3) | (1 << TParser.T__22) | (1 << TParser.T__23) | (1 << TParser.T__24) | (1 << TParser.TAGLINE) | (1 << TParser.DOCCOMMENT))) != 0):
- self.state = 49
+ while (((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << TParser.T__3) | (1 << TParser.T__24) | (1 << TParser.T__25) | (1 << TParser.T__26) | (1 << TParser.TAGLINE) | (1 << TParser.DOCCOMMENT))) != 0):
+ self.state = 53
self.definitionSymbol()
- self.state = 54
+ self.state = 58
self._errHandler.sync(self)
_la = self._input.LA(1)
@@ -362,15 +376,15 @@ class TParser ( Parser ):
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
- self.state = 55
- self.moduleSymbol()
self.state = 59
+ self.moduleSymbol()
+ self.state = 63
self._errHandler.sync(self)
_la = self._input.LA(1)
while _la==TParser.T__0:
- self.state = 56
+ self.state = 60
self.importSymbol()
- self.state = 61
+ self.state = 65
self._errHandler.sync(self)
_la = self._input.LA(1)
@@ -423,17 +437,17 @@ class TParser ( Parser ):
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
- self.state = 62
+ self.state = 66
self.match(TParser.T__0)
- self.state = 63
+ self.state = 67
localctx.name = self.match(TParser.IDENTIFIER)
- self.state = 64
+ self.state = 68
localctx.version = self.match(TParser.VERSION)
- self.state = 66
+ self.state = 70
self._errHandler.sync(self)
_la = self._input.LA(1)
if _la==TParser.T__1:
- self.state = 65
+ self.state = 69
self.match(TParser.T__1)
@@ -497,35 +511,35 @@ class TParser ( Parser ):
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
- self.state = 69
+ self.state = 73
self._errHandler.sync(self)
_la = self._input.LA(1)
if _la==TParser.DOCCOMMENT:
- self.state = 68
+ self.state = 72
localctx.comment = self.match(TParser.DOCCOMMENT)
- self.state = 74
+ self.state = 78
self._errHandler.sync(self)
_la = self._input.LA(1)
while _la==TParser.TAGLINE:
- self.state = 71
+ self.state = 75
self.tagSymbol()
- self.state = 76
+ self.state = 80
self._errHandler.sync(self)
_la = self._input.LA(1)
- self.state = 77
+ self.state = 81
self.match(TParser.T__2)
- self.state = 78
+ self.state = 82
localctx.name = self.match(TParser.IDENTIFIER)
- self.state = 79
+ self.state = 83
localctx.version = self.match(TParser.VERSION)
- self.state = 81
+ self.state = 85
self._errHandler.sync(self)
_la = self._input.LA(1)
if _la==TParser.T__1:
- self.state = 80
+ self.state = 84
self.match(TParser.T__1)
@@ -580,24 +594,24 @@ class TParser ( Parser ):
localctx = TParser.DefinitionSymbolContext(self, self._ctx, self.state)
self.enterRule(localctx, 8, self.RULE_definitionSymbol)
try:
- self.state = 86
+ self.state = 90
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,6,self._ctx)
if la_ == 1:
self.enterOuterAlt(localctx, 1)
- self.state = 83
+ self.state = 87
self.interfaceSymbol()
pass
elif la_ == 2:
self.enterOuterAlt(localctx, 2)
- self.state = 84
+ self.state = 88
self.structSymbol()
pass
elif la_ == 3:
self.enterOuterAlt(localctx, 3)
- self.state = 85
+ self.state = 89
self.enumSymbol()
pass
@@ -617,9 +631,13 @@ class TParser ( Parser ):
self.parser = parser
self.comment = None # Token
self.name = None # Token
+ self.extends = None # Token
- def IDENTIFIER(self):
- return self.getToken(TParser.IDENTIFIER, 0)
+ def IDENTIFIER(self, i:int=None):
+ if i is None:
+ return self.getTokens(TParser.IDENTIFIER)
+ else:
+ return self.getToken(TParser.IDENTIFIER, i)
def tagSymbol(self, i:int=None):
if i is None:
@@ -665,47 +683,57 @@ class TParser ( Parser ):
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
- self.state = 89
+ self.state = 93
self._errHandler.sync(self)
_la = self._input.LA(1)
if _la==TParser.DOCCOMMENT:
- self.state = 88
+ self.state = 92
localctx.comment = self.match(TParser.DOCCOMMENT)
- self.state = 94
+ self.state = 98
self._errHandler.sync(self)
_la = self._input.LA(1)
while _la==TParser.TAGLINE:
- self.state = 91
+ self.state = 95
self.tagSymbol()
- self.state = 96
+ self.state = 100
self._errHandler.sync(self)
_la = self._input.LA(1)
- self.state = 97
+ self.state = 101
self.match(TParser.T__3)
- self.state = 98
+ self.state = 102
localctx.name = self.match(TParser.IDENTIFIER)
- self.state = 99
- self.match(TParser.T__4)
- self.state = 103
+ self.state = 105
self._errHandler.sync(self)
_la = self._input.LA(1)
- while (((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << TParser.T__6) | (1 << TParser.T__9) | (1 << TParser.T__10) | (1 << TParser.T__13) | (1 << TParser.T__14) | (1 << TParser.T__15) | (1 << TParser.T__16) | (1 << TParser.T__17) | (1 << TParser.T__18) | (1 << TParser.T__21) | (1 << TParser.TAGLINE) | (1 << TParser.IDENTIFIER) | (1 << TParser.DOCCOMMENT))) != 0):
- self.state = 100
+ if _la==TParser.T__4:
+ self.state = 103
+ self.match(TParser.T__4)
+ self.state = 104
+ localctx.extends = self.match(TParser.IDENTIFIER)
+
+
+ self.state = 107
+ self.match(TParser.T__5)
+ self.state = 111
+ self._errHandler.sync(self)
+ _la = self._input.LA(1)
+ while (((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << TParser.T__7) | (1 << TParser.T__10) | (1 << TParser.T__11) | (1 << TParser.T__12) | (1 << TParser.T__15) | (1 << TParser.T__16) | (1 << TParser.T__17) | (1 << TParser.T__18) | (1 << TParser.T__19) | (1 << TParser.T__20) | (1 << TParser.T__23) | (1 << TParser.TAGLINE) | (1 << TParser.IDENTIFIER) | (1 << TParser.DOCCOMMENT))) != 0):
+ self.state = 108
self.interfaceMemberSymbol()
- self.state = 105
+ self.state = 113
self._errHandler.sync(self)
_la = self._input.LA(1)
- self.state = 106
- self.match(TParser.T__5)
- self.state = 108
+ self.state = 114
+ self.match(TParser.T__6)
+ self.state = 116
self._errHandler.sync(self)
_la = self._input.LA(1)
if _la==TParser.T__1:
- self.state = 107
+ self.state = 115
self.match(TParser.T__1)
@@ -760,24 +788,24 @@ class TParser ( Parser ):
localctx = TParser.InterfaceMemberSymbolContext(self, self._ctx, self.state)
self.enterRule(localctx, 12, self.RULE_interfaceMemberSymbol)
try:
- self.state = 113
+ self.state = 121
self._errHandler.sync(self)
- la_ = self._interp.adaptivePredict(self._input,11,self._ctx)
+ la_ = self._interp.adaptivePredict(self._input,12,self._ctx)
if la_ == 1:
self.enterOuterAlt(localctx, 1)
- self.state = 110
+ self.state = 118
self.operationSymbol()
pass
elif la_ == 2:
self.enterOuterAlt(localctx, 2)
- self.state = 111
+ self.state = 119
self.propertySymbol()
pass
elif la_ == 3:
self.enterOuterAlt(localctx, 3)
- self.state = 112
+ self.state = 120
self.signalSymbol()
pass
@@ -819,6 +847,10 @@ class TParser ( Parser ):
return self.getTypedRuleContext(TParser.OperationParameterSymbolContext,i)
+ def operationModifierSymbol(self):
+ return self.getTypedRuleContext(TParser.OperationModifierSymbolContext,0)
+
+
def DOCCOMMENT(self):
return self.getToken(TParser.DOCCOMMENT, 0)
@@ -849,59 +881,67 @@ class TParser ( Parser ):
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
- self.state = 116
+ self.state = 124
self._errHandler.sync(self)
_la = self._input.LA(1)
if _la==TParser.DOCCOMMENT:
- self.state = 115
+ self.state = 123
localctx.comment = self.match(TParser.DOCCOMMENT)
- self.state = 121
+ self.state = 129
self._errHandler.sync(self)
_la = self._input.LA(1)
while _la==TParser.TAGLINE:
- self.state = 118
+ self.state = 126
self.tagSymbol()
- self.state = 123
+ self.state = 131
self._errHandler.sync(self)
_la = self._input.LA(1)
- self.state = 126
+ self.state = 134
self._errHandler.sync(self)
token = self._input.LA(1)
- if token in [TParser.T__13, TParser.T__14, TParser.T__15, TParser.T__16, TParser.T__17, TParser.T__18, TParser.T__21, TParser.IDENTIFIER]:
- self.state = 124
+ if token in [TParser.T__15, TParser.T__16, TParser.T__17, TParser.T__18, TParser.T__19, TParser.T__20, TParser.T__23, TParser.IDENTIFIER]:
+ self.state = 132
self.typeSymbol()
pass
- elif token in [TParser.T__6]:
- self.state = 125
- self.match(TParser.T__6)
+ elif token in [TParser.T__7]:
+ self.state = 133
+ self.match(TParser.T__7)
pass
else:
raise NoViableAltException(self)
- self.state = 128
+ self.state = 136
localctx.name = self.match(TParser.IDENTIFIER)
- self.state = 129
- self.match(TParser.T__7)
- self.state = 133
+ self.state = 137
+ self.match(TParser.T__8)
+ self.state = 141
self._errHandler.sync(self)
_la = self._input.LA(1)
- while (((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << TParser.T__13) | (1 << TParser.T__14) | (1 << TParser.T__15) | (1 << TParser.T__16) | (1 << TParser.T__17) | (1 << TParser.T__18) | (1 << TParser.T__21) | (1 << TParser.IDENTIFIER))) != 0):
- self.state = 130
+ while (((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << TParser.T__15) | (1 << TParser.T__16) | (1 << TParser.T__17) | (1 << TParser.T__18) | (1 << TParser.T__19) | (1 << TParser.T__20) | (1 << TParser.T__23) | (1 << TParser.IDENTIFIER))) != 0):
+ self.state = 138
self.operationParameterSymbol()
- self.state = 135
+ self.state = 143
self._errHandler.sync(self)
_la = self._input.LA(1)
- self.state = 136
- self.match(TParser.T__8)
- self.state = 138
+ self.state = 144
+ self.match(TParser.T__9)
+ self.state = 146
+ self._errHandler.sync(self)
+ la_ = self._interp.adaptivePredict(self._input,17,self._ctx)
+ if la_ == 1:
+ self.state = 145
+ self.operationModifierSymbol()
+
+
+ self.state = 149
self._errHandler.sync(self)
_la = self._input.LA(1)
if _la==TParser.T__1:
- self.state = 137
+ self.state = 148
self.match(TParser.T__1)
@@ -913,6 +953,50 @@ class TParser ( Parser ):
self.exitRule()
return localctx
+ class OperationModifierSymbolContext(ParserRuleContext):
+
+ def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
+ super().__init__(parent, invokingState)
+ self.parser = parser
+ self.is_const = None # Token
+
+
+ def getRuleIndex(self):
+ return TParser.RULE_operationModifierSymbol
+
+ def enterRule(self, listener:ParseTreeListener):
+ if hasattr( listener, "enterOperationModifierSymbol" ):
+ listener.enterOperationModifierSymbol(self)
+
+ def exitRule(self, listener:ParseTreeListener):
+ if hasattr( listener, "exitOperationModifierSymbol" ):
+ listener.exitOperationModifierSymbol(self)
+
+ def accept(self, visitor:ParseTreeVisitor):
+ if hasattr( visitor, "visitOperationModifierSymbol" ):
+ return visitor.visitOperationModifierSymbol(self)
+ else:
+ return visitor.visitChildren(self)
+
+
+
+
+ def operationModifierSymbol(self):
+
+ localctx = TParser.OperationModifierSymbolContext(self, self._ctx, self.state)
+ self.enterRule(localctx, 16, self.RULE_operationModifierSymbol)
+ try:
+ self.enterOuterAlt(localctx, 1)
+ self.state = 151
+ localctx.is_const = self.match(TParser.T__10)
+ except RecognitionException as re:
+ localctx.exception = re
+ self._errHandler.reportError(self, re)
+ self._errHandler.recover(self, re)
+ finally:
+ self.exitRule()
+ return localctx
+
class SignalSymbolContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
@@ -964,51 +1048,51 @@ class TParser ( Parser ):
def signalSymbol(self):
localctx = TParser.SignalSymbolContext(self, self._ctx, self.state)
- self.enterRule(localctx, 16, self.RULE_signalSymbol)
+ self.enterRule(localctx, 18, self.RULE_signalSymbol)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
- self.state = 141
+ self.state = 154
self._errHandler.sync(self)
_la = self._input.LA(1)
if _la==TParser.DOCCOMMENT:
- self.state = 140
+ self.state = 153
localctx.comment = self.match(TParser.DOCCOMMENT)
- self.state = 146
+ self.state = 159
self._errHandler.sync(self)
_la = self._input.LA(1)
while _la==TParser.TAGLINE:
- self.state = 143
+ self.state = 156
self.tagSymbol()
- self.state = 148
+ self.state = 161
self._errHandler.sync(self)
_la = self._input.LA(1)
- self.state = 149
- self.match(TParser.T__9)
- self.state = 150
+ self.state = 162
+ self.match(TParser.T__11)
+ self.state = 163
localctx.name = self.match(TParser.IDENTIFIER)
- self.state = 151
- self.match(TParser.T__7)
- self.state = 155
+ self.state = 164
+ self.match(TParser.T__8)
+ self.state = 168
self._errHandler.sync(self)
_la = self._input.LA(1)
- while (((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << TParser.T__13) | (1 << TParser.T__14) | (1 << TParser.T__15) | (1 << TParser.T__16) | (1 << TParser.T__17) | (1 << TParser.T__18) | (1 << TParser.T__21) | (1 << TParser.IDENTIFIER))) != 0):
- self.state = 152
+ while (((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << TParser.T__15) | (1 << TParser.T__16) | (1 << TParser.T__17) | (1 << TParser.T__18) | (1 << TParser.T__19) | (1 << TParser.T__20) | (1 << TParser.T__23) | (1 << TParser.IDENTIFIER))) != 0):
+ self.state = 165
self.operationParameterSymbol()
- self.state = 157
+ self.state = 170
self._errHandler.sync(self)
_la = self._input.LA(1)
- self.state = 158
- self.match(TParser.T__8)
- self.state = 160
+ self.state = 171
+ self.match(TParser.T__9)
+ self.state = 173
self._errHandler.sync(self)
_la = self._input.LA(1)
if _la==TParser.T__1:
- self.state = 159
+ self.state = 172
self.match(TParser.T__1)
@@ -1026,7 +1110,6 @@ class TParser ( Parser ):
super().__init__(parent, invokingState)
self.parser = parser
self.comment = None # Token
- self.isReadOnly = None # Token
self.name = None # Token
def typeSymbol(self):
@@ -1043,6 +1126,10 @@ class TParser ( Parser ):
return self.getTypedRuleContext(TParser.TagSymbolContext,i)
+ def propertyModifierSymbol(self):
+ return self.getTypedRuleContext(TParser.PropertyModifierSymbolContext,0)
+
+
def DOCCOMMENT(self):
return self.getToken(TParser.DOCCOMMENT, 0)
@@ -1069,45 +1156,45 @@ class TParser ( Parser ):
def propertySymbol(self):
localctx = TParser.PropertySymbolContext(self, self._ctx, self.state)
- self.enterRule(localctx, 18, self.RULE_propertySymbol)
+ self.enterRule(localctx, 20, self.RULE_propertySymbol)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
- self.state = 163
+ self.state = 176
self._errHandler.sync(self)
_la = self._input.LA(1)
if _la==TParser.DOCCOMMENT:
- self.state = 162
+ self.state = 175
localctx.comment = self.match(TParser.DOCCOMMENT)
- self.state = 168
+ self.state = 181
self._errHandler.sync(self)
_la = self._input.LA(1)
while _la==TParser.TAGLINE:
- self.state = 165
+ self.state = 178
self.tagSymbol()
- self.state = 170
+ self.state = 183
self._errHandler.sync(self)
_la = self._input.LA(1)
- self.state = 172
+ self.state = 185
self._errHandler.sync(self)
_la = self._input.LA(1)
- if _la==TParser.T__10:
- self.state = 171
- localctx.isReadOnly = self.match(TParser.T__10)
+ if _la==TParser.T__10 or _la==TParser.T__12:
+ self.state = 184
+ self.propertyModifierSymbol()
- self.state = 174
+ self.state = 187
self.typeSymbol()
- self.state = 175
+ self.state = 188
localctx.name = self.match(TParser.IDENTIFIER)
- self.state = 177
+ self.state = 190
self._errHandler.sync(self)
_la = self._input.LA(1)
if _la==TParser.T__1:
- self.state = 176
+ self.state = 189
self.match(TParser.T__1)
@@ -1119,6 +1206,64 @@ class TParser ( Parser ):
self.exitRule()
return localctx
+ class PropertyModifierSymbolContext(ParserRuleContext):
+
+ def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
+ super().__init__(parent, invokingState)
+ self.parser = parser
+ self.is_readonly = None # Token
+ self.is_const = None # Token
+
+
+ def getRuleIndex(self):
+ return TParser.RULE_propertyModifierSymbol
+
+ def enterRule(self, listener:ParseTreeListener):
+ if hasattr( listener, "enterPropertyModifierSymbol" ):
+ listener.enterPropertyModifierSymbol(self)
+
+ def exitRule(self, listener:ParseTreeListener):
+ if hasattr( listener, "exitPropertyModifierSymbol" ):
+ listener.exitPropertyModifierSymbol(self)
+
+ def accept(self, visitor:ParseTreeVisitor):
+ if hasattr( visitor, "visitPropertyModifierSymbol" ):
+ return visitor.visitPropertyModifierSymbol(self)
+ else:
+ return visitor.visitChildren(self)
+
+
+
+
+ def propertyModifierSymbol(self):
+
+ localctx = TParser.PropertyModifierSymbolContext(self, self._ctx, self.state)
+ self.enterRule(localctx, 22, self.RULE_propertyModifierSymbol)
+ try:
+ self.state = 194
+ self._errHandler.sync(self)
+ token = self._input.LA(1)
+ if token in [TParser.T__12]:
+ self.enterOuterAlt(localctx, 1)
+ self.state = 192
+ localctx.is_readonly = self.match(TParser.T__12)
+ pass
+ elif token in [TParser.T__10]:
+ self.enterOuterAlt(localctx, 2)
+ self.state = 193
+ localctx.is_const = self.match(TParser.T__10)
+ pass
+ else:
+ raise NoViableAltException(self)
+
+ except RecognitionException as re:
+ localctx.exception = re
+ self._errHandler.reportError(self, re)
+ self._errHandler.recover(self, re)
+ finally:
+ self.exitRule()
+ return localctx
+
class OperationParameterSymbolContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
@@ -1156,20 +1301,20 @@ class TParser ( Parser ):
def operationParameterSymbol(self):
localctx = TParser.OperationParameterSymbolContext(self, self._ctx, self.state)
- self.enterRule(localctx, 20, self.RULE_operationParameterSymbol)
+ self.enterRule(localctx, 24, self.RULE_operationParameterSymbol)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
- self.state = 179
+ self.state = 196
self.typeSymbol()
- self.state = 180
+ self.state = 197
localctx.name = self.match(TParser.IDENTIFIER)
- self.state = 182
+ self.state = 199
self._errHandler.sync(self)
_la = self._input.LA(1)
- if _la==TParser.T__11:
- self.state = 181
- self.match(TParser.T__11)
+ if _la==TParser.T__13:
+ self.state = 198
+ self.match(TParser.T__13)
except RecognitionException as re:
@@ -1213,10 +1358,10 @@ class TParser ( Parser ):
def tagSymbol(self):
localctx = TParser.TagSymbolContext(self, self._ctx, self.state)
- self.enterRule(localctx, 22, self.RULE_tagSymbol)
+ self.enterRule(localctx, 26, self.RULE_tagSymbol)
try:
self.enterOuterAlt(localctx, 1)
- self.state = 184
+ self.state = 201
localctx.line = self.match(TParser.TAGLINE)
except RecognitionException as re:
localctx.exception = re
@@ -1263,28 +1408,28 @@ class TParser ( Parser ):
def tagAttributeSymbol(self):
localctx = TParser.TagAttributeSymbolContext(self, self._ctx, self.state)
- self.enterRule(localctx, 24, self.RULE_tagAttributeSymbol)
+ self.enterRule(localctx, 28, self.RULE_tagAttributeSymbol)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
- self.state = 186
+ self.state = 203
localctx.name = self.match(TParser.IDENTIFIER)
- self.state = 189
+ self.state = 206
self._errHandler.sync(self)
_la = self._input.LA(1)
- if _la==TParser.T__12:
- self.state = 187
- self.match(TParser.T__12)
- self.state = 188
+ if _la==TParser.T__14:
+ self.state = 204
+ self.match(TParser.T__14)
+ self.state = 205
localctx.value = self.match(TParser.IDENTIFIER)
- self.state = 192
+ self.state = 209
self._errHandler.sync(self)
_la = self._input.LA(1)
- if _la==TParser.T__11:
- self.state = 191
- self.match(TParser.T__11)
+ if _la==TParser.T__13:
+ self.state = 208
+ self.match(TParser.T__13)
except RecognitionException as re:
@@ -1340,29 +1485,29 @@ class TParser ( Parser ):
def typeSymbol(self):
localctx = TParser.TypeSymbolContext(self, self._ctx, self.state)
- self.enterRule(localctx, 26, self.RULE_typeSymbol)
+ self.enterRule(localctx, 30, self.RULE_typeSymbol)
try:
- self.state = 198
+ self.state = 215
self._errHandler.sync(self)
token = self._input.LA(1)
- if token in [TParser.T__13, TParser.T__14, TParser.T__15, TParser.T__16, TParser.T__17]:
+ if token in [TParser.T__15, TParser.T__16, TParser.T__17, TParser.T__18, TParser.T__19]:
self.enterOuterAlt(localctx, 1)
- self.state = 194
+ self.state = 211
self.primitiveTypeSymbol()
pass
elif token in [TParser.IDENTIFIER]:
self.enterOuterAlt(localctx, 2)
- self.state = 195
+ self.state = 212
self.complexTypeSymbol()
pass
- elif token in [TParser.T__18]:
+ elif token in [TParser.T__20]:
self.enterOuterAlt(localctx, 3)
- self.state = 196
+ self.state = 213
self.listTypeSymbol()
pass
- elif token in [TParser.T__21]:
+ elif token in [TParser.T__23]:
self.enterOuterAlt(localctx, 4)
- self.state = 197
+ self.state = 214
self.modelTypeSymbol()
pass
else:
@@ -1409,10 +1554,10 @@ class TParser ( Parser ):
def complexTypeSymbol(self):
localctx = TParser.ComplexTypeSymbolContext(self, self._ctx, self.state)
- self.enterRule(localctx, 28, self.RULE_complexTypeSymbol)
+ self.enterRule(localctx, 32, self.RULE_complexTypeSymbol)
try:
self.enterOuterAlt(localctx, 1)
- self.state = 200
+ self.state = 217
localctx.name = self.match(TParser.IDENTIFIER)
except RecognitionException as re:
localctx.exception = re
@@ -1453,35 +1598,35 @@ class TParser ( Parser ):
def primitiveTypeSymbol(self):
localctx = TParser.PrimitiveTypeSymbolContext(self, self._ctx, self.state)
- self.enterRule(localctx, 30, self.RULE_primitiveTypeSymbol)
+ self.enterRule(localctx, 34, self.RULE_primitiveTypeSymbol)
try:
- self.state = 207
+ self.state = 224
self._errHandler.sync(self)
token = self._input.LA(1)
- if token in [TParser.T__13]:
+ if token in [TParser.T__15]:
self.enterOuterAlt(localctx, 1)
- self.state = 202
- localctx.name = self.match(TParser.T__13)
+ self.state = 219
+ localctx.name = self.match(TParser.T__15)
pass
- elif token in [TParser.T__14]:
+ elif token in [TParser.T__16]:
self.enterOuterAlt(localctx, 2)
- self.state = 203
- localctx.name = self.match(TParser.T__14)
+ self.state = 220
+ localctx.name = self.match(TParser.T__16)
pass
- elif token in [TParser.T__15]:
+ elif token in [TParser.T__17]:
self.enterOuterAlt(localctx, 3)
- self.state = 204
- localctx.name = self.match(TParser.T__15)
+ self.state = 221
+ localctx.name = self.match(TParser.T__17)
pass
- elif token in [TParser.T__16]:
+ elif token in [TParser.T__18]:
self.enterOuterAlt(localctx, 4)
- self.state = 205
- localctx.name = self.match(TParser.T__16)
+ self.state = 222
+ localctx.name = self.match(TParser.T__18)
pass
- elif token in [TParser.T__17]:
+ elif token in [TParser.T__19]:
self.enterOuterAlt(localctx, 5)
- self.state = 206
- localctx.name = self.match(TParser.T__17)
+ self.state = 223
+ localctx.name = self.match(TParser.T__19)
pass
else:
raise NoViableAltException(self)
@@ -1528,17 +1673,17 @@ class TParser ( Parser ):
def listTypeSymbol(self):
localctx = TParser.ListTypeSymbolContext(self, self._ctx, self.state)
- self.enterRule(localctx, 32, self.RULE_listTypeSymbol)
+ self.enterRule(localctx, 36, self.RULE_listTypeSymbol)
try:
self.enterOuterAlt(localctx, 1)
- self.state = 209
- self.match(TParser.T__18)
- self.state = 210
- self.match(TParser.T__19)
- self.state = 211
- localctx.valueType = self.typeSymbol()
- self.state = 212
+ self.state = 226
self.match(TParser.T__20)
+ self.state = 227
+ self.match(TParser.T__21)
+ self.state = 228
+ localctx.valueType = self.typeSymbol()
+ self.state = 229
+ self.match(TParser.T__22)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
@@ -1581,17 +1726,17 @@ class TParser ( Parser ):
def modelTypeSymbol(self):
localctx = TParser.ModelTypeSymbolContext(self, self._ctx, self.state)
- self.enterRule(localctx, 34, self.RULE_modelTypeSymbol)
+ self.enterRule(localctx, 38, self.RULE_modelTypeSymbol)
try:
self.enterOuterAlt(localctx, 1)
- self.state = 214
+ self.state = 231
+ self.match(TParser.T__23)
+ self.state = 232
self.match(TParser.T__21)
- self.state = 215
- self.match(TParser.T__19)
- self.state = 216
+ self.state = 233
localctx.valueType = self.typeSymbol()
- self.state = 217
- self.match(TParser.T__20)
+ self.state = 234
+ self.match(TParser.T__22)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
@@ -1651,51 +1796,51 @@ class TParser ( Parser ):
def structSymbol(self):
localctx = TParser.StructSymbolContext(self, self._ctx, self.state)
- self.enterRule(localctx, 36, self.RULE_structSymbol)
+ self.enterRule(localctx, 40, self.RULE_structSymbol)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
- self.state = 220
+ self.state = 237
self._errHandler.sync(self)
_la = self._input.LA(1)
if _la==TParser.DOCCOMMENT:
- self.state = 219
+ self.state = 236
localctx.comment = self.match(TParser.DOCCOMMENT)
- self.state = 225
+ self.state = 242
self._errHandler.sync(self)
_la = self._input.LA(1)
while _la==TParser.TAGLINE:
- self.state = 222
+ self.state = 239
self.tagSymbol()
- self.state = 227
+ self.state = 244
self._errHandler.sync(self)
_la = self._input.LA(1)
- self.state = 228
- self.match(TParser.T__22)
- self.state = 229
+ self.state = 245
+ self.match(TParser.T__24)
+ self.state = 246
localctx.name = self.match(TParser.IDENTIFIER)
- self.state = 230
- self.match(TParser.T__4)
- self.state = 234
+ self.state = 247
+ self.match(TParser.T__5)
+ self.state = 251
self._errHandler.sync(self)
_la = self._input.LA(1)
- while (((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << TParser.T__13) | (1 << TParser.T__14) | (1 << TParser.T__15) | (1 << TParser.T__16) | (1 << TParser.T__17) | (1 << TParser.T__18) | (1 << TParser.T__21) | (1 << TParser.TAGLINE) | (1 << TParser.IDENTIFIER) | (1 << TParser.DOCCOMMENT))) != 0):
- self.state = 231
+ while (((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << TParser.T__15) | (1 << TParser.T__16) | (1 << TParser.T__17) | (1 << TParser.T__18) | (1 << TParser.T__19) | (1 << TParser.T__20) | (1 << TParser.T__23) | (1 << TParser.TAGLINE) | (1 << TParser.IDENTIFIER) | (1 << TParser.DOCCOMMENT))) != 0):
+ self.state = 248
self.structFieldSymbol()
- self.state = 236
+ self.state = 253
self._errHandler.sync(self)
_la = self._input.LA(1)
- self.state = 237
- self.match(TParser.T__5)
- self.state = 239
+ self.state = 254
+ self.match(TParser.T__6)
+ self.state = 256
self._errHandler.sync(self)
_la = self._input.LA(1)
if _la==TParser.T__1:
- self.state = 238
+ self.state = 255
self.match(TParser.T__1)
@@ -1755,37 +1900,37 @@ class TParser ( Parser ):
def structFieldSymbol(self):
localctx = TParser.StructFieldSymbolContext(self, self._ctx, self.state)
- self.enterRule(localctx, 38, self.RULE_structFieldSymbol)
+ self.enterRule(localctx, 42, self.RULE_structFieldSymbol)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
- self.state = 242
+ self.state = 259
self._errHandler.sync(self)
_la = self._input.LA(1)
if _la==TParser.DOCCOMMENT:
- self.state = 241
+ self.state = 258
localctx.comment = self.match(TParser.DOCCOMMENT)
- self.state = 247
+ self.state = 264
self._errHandler.sync(self)
_la = self._input.LA(1)
while _la==TParser.TAGLINE:
- self.state = 244
+ self.state = 261
self.tagSymbol()
- self.state = 249
+ self.state = 266
self._errHandler.sync(self)
_la = self._input.LA(1)
- self.state = 250
+ self.state = 267
self.typeSymbol()
- self.state = 251
+ self.state = 268
localctx.name = self.match(TParser.IDENTIFIER)
- self.state = 253
+ self.state = 270
self._errHandler.sync(self)
_la = self._input.LA(1)
if _la==TParser.T__1:
- self.state = 252
+ self.state = 269
self.match(TParser.T__1)
@@ -1852,51 +1997,51 @@ class TParser ( Parser ):
def enumSymbol(self):
localctx = TParser.EnumSymbolContext(self, self._ctx, self.state)
- self.enterRule(localctx, 40, self.RULE_enumSymbol)
+ self.enterRule(localctx, 44, self.RULE_enumSymbol)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
- self.state = 256
+ self.state = 273
self._errHandler.sync(self)
_la = self._input.LA(1)
if _la==TParser.DOCCOMMENT:
- self.state = 255
+ self.state = 272
localctx.comment = self.match(TParser.DOCCOMMENT)
- self.state = 261
+ self.state = 278
self._errHandler.sync(self)
_la = self._input.LA(1)
while _la==TParser.TAGLINE:
- self.state = 258
+ self.state = 275
self.tagSymbol()
- self.state = 263
+ self.state = 280
self._errHandler.sync(self)
_la = self._input.LA(1)
- self.state = 264
+ self.state = 281
self.enumTypeSymbol()
- self.state = 265
+ self.state = 282
localctx.name = self.match(TParser.IDENTIFIER)
- self.state = 266
- self.match(TParser.T__4)
- self.state = 270
+ self.state = 283
+ self.match(TParser.T__5)
+ self.state = 287
self._errHandler.sync(self)
_la = self._input.LA(1)
while (((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << TParser.TAGLINE) | (1 << TParser.IDENTIFIER) | (1 << TParser.DOCCOMMENT))) != 0):
- self.state = 267
+ self.state = 284
self.enumMemberSymbol()
- self.state = 272
+ self.state = 289
self._errHandler.sync(self)
_la = self._input.LA(1)
- self.state = 273
- self.match(TParser.T__5)
- self.state = 275
+ self.state = 290
+ self.match(TParser.T__6)
+ self.state = 292
self._errHandler.sync(self)
_la = self._input.LA(1)
if _la==TParser.T__1:
- self.state = 274
+ self.state = 291
self.match(TParser.T__1)
@@ -1940,20 +2085,20 @@ class TParser ( Parser ):
def enumTypeSymbol(self):
localctx = TParser.EnumTypeSymbolContext(self, self._ctx, self.state)
- self.enterRule(localctx, 42, self.RULE_enumTypeSymbol)
+ self.enterRule(localctx, 46, self.RULE_enumTypeSymbol)
try:
- self.state = 279
+ self.state = 296
self._errHandler.sync(self)
token = self._input.LA(1)
- if token in [TParser.T__23]:
+ if token in [TParser.T__25]:
self.enterOuterAlt(localctx, 1)
- self.state = 277
- localctx.isEnum = self.match(TParser.T__23)
+ self.state = 294
+ localctx.isEnum = self.match(TParser.T__25)
pass
- elif token in [TParser.T__24]:
+ elif token in [TParser.T__26]:
self.enterOuterAlt(localctx, 2)
- self.state = 278
- localctx.isFlag = self.match(TParser.T__24)
+ self.state = 295
+ localctx.isFlag = self.match(TParser.T__26)
pass
else:
raise NoViableAltException(self)
@@ -2014,46 +2159,46 @@ class TParser ( Parser ):
def enumMemberSymbol(self):
localctx = TParser.EnumMemberSymbolContext(self, self._ctx, self.state)
- self.enterRule(localctx, 44, self.RULE_enumMemberSymbol)
+ self.enterRule(localctx, 48, self.RULE_enumMemberSymbol)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
- self.state = 282
+ self.state = 299
self._errHandler.sync(self)
_la = self._input.LA(1)
if _la==TParser.DOCCOMMENT:
- self.state = 281
+ self.state = 298
localctx.comment = self.match(TParser.DOCCOMMENT)
- self.state = 287
+ self.state = 304
self._errHandler.sync(self)
_la = self._input.LA(1)
while _la==TParser.TAGLINE:
- self.state = 284
+ self.state = 301
self.tagSymbol()
- self.state = 289
+ self.state = 306
self._errHandler.sync(self)
_la = self._input.LA(1)
- self.state = 290
+ self.state = 307
localctx.name = self.match(TParser.IDENTIFIER)
- self.state = 293
+ self.state = 310
self._errHandler.sync(self)
_la = self._input.LA(1)
- if _la==TParser.T__12:
- self.state = 291
- self.match(TParser.T__12)
- self.state = 292
+ if _la==TParser.T__14:
+ self.state = 308
+ self.match(TParser.T__14)
+ self.state = 309
self.intSymbol()
- self.state = 296
+ self.state = 313
self._errHandler.sync(self)
_la = self._input.LA(1)
- if _la==TParser.T__11:
- self.state = 295
- self.match(TParser.T__11)
+ if _la==TParser.T__13:
+ self.state = 312
+ self.match(TParser.T__13)
except RecognitionException as re:
@@ -2100,19 +2245,19 @@ class TParser ( Parser ):
def intSymbol(self):
localctx = TParser.IntSymbolContext(self, self._ctx, self.state)
- self.enterRule(localctx, 46, self.RULE_intSymbol)
+ self.enterRule(localctx, 50, self.RULE_intSymbol)
try:
- self.state = 300
+ self.state = 317
self._errHandler.sync(self)
token = self._input.LA(1)
if token in [TParser.INTCONSTANT]:
self.enterOuterAlt(localctx, 1)
- self.state = 298
+ self.state = 315
localctx.value = self.match(TParser.INTCONSTANT)
pass
elif token in [TParser.HEXCONSTANT]:
self.enterOuterAlt(localctx, 2)
- self.state = 299
+ self.state = 316
localctx.value = self.match(TParser.HEXCONSTANT)
pass
else:
diff --git a/qface/idl/parser/TVisitor.py b/qface/idl/parser/TVisitor.py
index 537146c..9836d57 100644
--- a/qface/idl/parser/TVisitor.py
+++ b/qface/idl/parser/TVisitor.py
@@ -49,6 +49,11 @@ class TVisitor(ParseTreeVisitor):
return self.visitChildren(ctx)
+ # Visit a parse tree produced by TParser#operationModifierSymbol.
+ def visitOperationModifierSymbol(self, ctx:TParser.OperationModifierSymbolContext):
+ return self.visitChildren(ctx)
+
+
# Visit a parse tree produced by TParser#signalSymbol.
def visitSignalSymbol(self, ctx:TParser.SignalSymbolContext):
return self.visitChildren(ctx)
@@ -59,6 +64,11 @@ class TVisitor(ParseTreeVisitor):
return self.visitChildren(ctx)
+ # Visit a parse tree produced by TParser#propertyModifierSymbol.
+ def visitPropertyModifierSymbol(self, ctx:TParser.PropertyModifierSymbolContext):
+ return self.visitChildren(ctx)
+
+
# Visit a parse tree produced by TParser#operationParameterSymbol.
def visitOperationParameterSymbol(self, ctx:TParser.OperationParameterSymbolContext):
return self.visitChildren(ctx)
diff --git a/qface/idl/profile.py b/qface/idl/profile.py
new file mode 100644
index 0000000..4b376c9
--- /dev/null
+++ b/qface/idl/profile.py
@@ -0,0 +1,36 @@
+# Copyright (c) Pelagicore AB 2016
+
+from enum import Enum
+
+
+class EFeature(Enum):
+ CONST_PROPERTY = 'const_property'
+ EXTEND_INTERFACE = 'extend_interface'
+
+
+class EProfile(Enum):
+ BASIC = 'basic'
+ ADVANCED = 'advanced'
+ ALL = 'advanced'
+
+
+class Profile:
+ def __init__(self, features=set()):
+ self.features = features
+
+ @staticmethod
+ def get_profile(cls, name):
+ if name is EProfile.BASIC:
+ return Profile(features=[
+ ])
+ if name is EProfile.ADVANCED:
+ return Profile(features=[
+ EFeature.CONST_PROPERTY,
+ EFeature.EXTEND_INTERFACE
+ ])
+ if name is EProfile.ALL:
+ return Profile(features=[
+ ])
+ return []
+
+
diff --git a/qface/watch.py b/qface/watch.py
index 9129bfa..df1293e 100644
--- a/qface/watch.py
+++ b/qface/watch.py
@@ -16,6 +16,8 @@ class RunScriptChangeHandler(FileSystemEventHandler):
self.is_running = False
def on_modified(self, event):
+ if event.is_directory:
+ return
self.run()
def run(self):
diff --git a/requirements.txt b/requirements.txt
index e34954c..3cc9f80 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -1,4 +1,5 @@
-antlr4-python3-runtime>=4.6
+antlr4-python3-runtime==4.6
+typing
jinja2
click
path.py
diff --git a/setup.py b/setup.py
index c116e0c..3a2f843 100644
--- a/setup.py
+++ b/setup.py
@@ -51,6 +51,7 @@ setup(
'path.py',
'pyyaml',
'antlr4-python3-runtime>=4.6',
+ 'typing',
'click',
'watchdog',
'six',
diff --git a/tests/in/com.pelagicore.ivi.tuner.qface b/tests/in/com.pelagicore.ivi.tuner.qface
index c50a4c8..aae8e0c 100644
--- a/tests/in/com.pelagicore.ivi.tuner.qface
+++ b/tests/in/com.pelagicore.ivi.tuner.qface
@@ -1,15 +1,25 @@
module com.pelagicore.ivi.tuner 1.0;
+
+interface BaseTuner {
+ property int baseValue;
+}
+
+
/** Service Tuner */
@service: true
@interface: true
@config: {private: true, b: B, c: C}
@data: [1,2,3]
-interface Tuner {
+interface Tuner extends BaseTuner {
/** property currentStation */
readonly Station currentStation;
+ /** the default station, which never changes */
+ const Station defaultStation;
/** operation nextStation */
void nextStation();
+ /** operation numStations */
+ int numStations() const;
/** operation previousStation */
void previousStation();
/** operation updateCurrentStation */
@@ -22,6 +32,11 @@ interface Tuner {
signal scanFinished();
signal broadcastMessage(string message);
+
+ TunerExtension extension;
+}
+
+interface TunerExtension {
}
/** enum State */
diff --git a/tests/test_generator.py b/tests/test_generator.py
index 21c483c..c99c561 100644
--- a/tests/test_generator.py
+++ b/tests/test_generator.py
@@ -29,14 +29,10 @@ def test_gen_module():
def test_gen_interface():
system = loadSystem()
gen = Generator(search_path='tests/templates')
- template = """
- {%- for interface in module.interfaces -%}
- {{interface}}
- {%- endfor -%}
- """
+ template = """{{module.interfaces|join(',')}}"""
module = system.lookup('com.pelagicore.ivi.tuner')
text = gen.apply(template, {"module": module})
- assert text == 'Tuner'
+ assert text == 'BaseTuner,Tuner,TunerExtension'
def test_parse_document():
diff --git a/tests/test_json.py b/tests/test_json.py
new file mode 100644
index 0000000..785680a
--- /dev/null
+++ b/tests/test_json.py
@@ -0,0 +1,54 @@
+from qface.generator import FileSystem
+import logging
+from path import Path
+import json
+
+# logging.config.fileConfig('logging.ini')
+logging.basicConfig()
+
+log = logging.getLogger(__name__)
+
+inputPath = Path('tests/in')
+
+
+def loadEcho():
+ path = inputPath / 'org.example.echo.qface'
+ return FileSystem.parse_document(path)
+
+
+def load_tuner():
+ path = inputPath / 'com.pelagicore.ivi.tuner.qface'
+ return FileSystem.parse_document(path)
+
+
+def test_echo_json():
+ system = loadEcho()
+ data = system.toJson()
+ text = json.dumps(data)
+ data = json.loads(text)
+ assert len(data['modules']) == 1
+ module = data['modules'][0]
+ assert module['name'] == 'org.example.echo'
+ assert module['version'] == '1.0'
+ assert len(module['interfaces']) == 1
+ interface = module['interfaces'][0]
+ assert interface['name'] == 'Echo'
+ assert len(interface['operations']) == 1
+ # string echo(string msg);
+ operation = interface['operations'][0]
+ assert operation['parameters'][0]['name'] == 'msg'
+ assert operation['parameters'][0]['type']['name'] == 'string'
+
+
+def test_tuner_json():
+ system = load_tuner()
+ data = system.toJson()
+ text = json.dumps(data)
+ data = json.loads(text)
+ module = data['modules'][0]
+ assert len(module['interfaces']) == 3
+ interface = module['interfaces'][0]
+ assert interface['name'] == 'BaseTuner'
+ interface = module['interfaces'][1]
+ assert interface['name'] == 'Tuner'
+
diff --git a/tests/test_parser.py b/tests/test_parser.py
index d94a96f..128e691 100644
--- a/tests/test_parser.py
+++ b/tests/test_parser.py
@@ -52,9 +52,14 @@ def test_property():
assert property.type.name == 'Station'
assert property.module == module
assert property.type.qualified_name == 'com.pelagicore.ivi.tuner.Station'
- assert property.is_readonly
+ assert property.readonly
+ assert not property.const
assert property.comment == '/** property currentStation */'
+ property = interface._propertyMap['defaultStation']
+ assert not property.readonly
+ assert property.const
+
def test_operation():
system = load_tuner()
@@ -63,6 +68,9 @@ def test_operation():
assert operation
operation = interface._contentMap['previousStation']
assert operation
+ operation = interface._contentMap['numStations']
+ assert operation
+ assert operation.const
def test_signals():
@@ -148,3 +156,21 @@ def test_model():
assert property.type.is_model is True
assert property.type.nested.is_complex
assert property.type.nested.name == 'Station'
+
+
+def test_extension():
+ system = load_tuner()
+ interface = system.lookup('com.pelagicore.ivi.tuner.Tuner')
+ extends = system.lookup('com.pelagicore.ivi.tuner.BaseTuner')
+ # import pdb; pdb.set_trace()
+ assert extends is interface.extends
+
+
+def test_interface_property():
+ system = load_tuner()
+ tuner = system.lookup('com.pelagicore.ivi.tuner.Tuner')
+ extension = system.lookup('com.pelagicore.ivi.tuner.TunerExtension')
+ prop = tuner._propertyMap['extension']
+ assert prop.type.is_interface
+ assert prop.type.reference is extension
+
diff --git a/tests/test_qtcpp_helper.py b/tests/test_qtcpp_helper.py
index 8408d2e..535905a 100644
--- a/tests/test_qtcpp_helper.py
+++ b/tests/test_qtcpp_helper.py
@@ -200,3 +200,15 @@ def test_parameter_type():
assert answer == 'QmlMessageModel *{0}'.format(prop.name)
+def test_namespace():
+ system = parse_document()
+ module = system.lookup('org.example')
+ assert module
+ ns = qtcpp.Filters.open_ns(module)
+ assert ns == 'namespace org { example {'
+
+ ns = qtcpp.Filters.close_ns(module)
+ assert ns == '} }'
+
+ ns = qtcpp.Filters.using_ns(module)
+ assert ns == 'using namespace org::example'