Tools: Introduce a Python-based API bindings template generator.
This extends the existing XML-based template generator to allow an
additional kind of Python-based input.
The Python code is read as source code to an AST to a typed model
equivalent to the existing XML model, and processed by the existing
code templates into compatible code.
This provides a few benefits, namely readability is much increased,
but more importantly, it allows associating the APIs with Python's new
typing information, which will allow to provide accurate type hinting
without additional downstream processing in the future.
Right now this is just a proof-of-concept but if the approach is
well received, then a more complete implementation can be done with
further conversion of existing binding files.
Here is an example of how it looks, though I still think the metadata
is too verbose and can be made to look nicer with some further work.
```python
from ..Base.Metadata import metadata
from ..Base.Persistence import PersistencePy
from typing import Any, Optional, List
@metadata(
Father="PersistencePy",
Name="DocumentPy",
Twin="Document",
TwinPointer="Document",
Include="Gui/Document.h",
Namespace="Gui",
FatherInclude="Base/PersistencePy.h",
FatherNamespace="Base"
)
class DocumentPy(PersistencePy):
"""
This is a Document class.
Author: Werner Mayer (wmayer@users.sourceforge.net)
Licence: LGPL
"""
def __init__(self, *args: Any, **kwargs: Any) -> None:
"""
Constructor for DocumentPy.
"""
super(DocumentPy, self).__init__(*args, **kwargs)
pass
def show(self, objName: str) -> None:
"""
show(objName) -> None
Show an object.
Parameters:
objName (str): Name of the `Gui.ViewProvider` to show.
"""
pass
```
This commit is contained in:
@@ -125,6 +125,7 @@ ENDMACRO(fc_target_copy_resource_flat)
|
||||
|
||||
# It would be a bit cleaner to generate these files in ${CMAKE_CURRENT_BINARY_DIR}
|
||||
|
||||
# To be removed once all instances are migrated to generate_from_py
|
||||
macro(generate_from_xml BASE_NAME)
|
||||
set(TOOL_PATH "${CMAKE_SOURCE_DIR}/src/Tools/bindings/generate.py")
|
||||
file(TO_NATIVE_PATH "${TOOL_PATH}" TOOL_NATIVE_PATH)
|
||||
@@ -154,6 +155,35 @@ macro(generate_from_xml BASE_NAME)
|
||||
)
|
||||
endmacro(generate_from_xml)
|
||||
|
||||
macro(generate_from_py BASE_NAME)
|
||||
set(TOOL_PATH "${CMAKE_SOURCE_DIR}/src/Tools/bindings/generate.py")
|
||||
file(TO_NATIVE_PATH "${TOOL_PATH}" TOOL_NATIVE_PATH)
|
||||
file(TO_NATIVE_PATH "${CMAKE_CURRENT_SOURCE_DIR}/${BASE_NAME}.pyi" SOURCE_NATIVE_PATH)
|
||||
|
||||
set(SOURCE_CPP_PATH "${CMAKE_CURRENT_BINARY_DIR}/${BASE_NAME}.cpp" )
|
||||
|
||||
# BASE_NAME may include also a path name
|
||||
GET_FILENAME_COMPONENT(OUTPUT_PATH "${SOURCE_CPP_PATH}" PATH)
|
||||
file(TO_NATIVE_PATH "${OUTPUT_PATH}" OUTPUT_NATIVE_PATH)
|
||||
if(NOT EXISTS "${SOURCE_CPP_PATH}")
|
||||
# assures the source files are generated at least once
|
||||
message(STATUS "${SOURCE_CPP_PATH}")
|
||||
execute_process(COMMAND "${PYTHON_EXECUTABLE}" "${TOOL_NATIVE_PATH}" --outputPath "${OUTPUT_NATIVE_PATH}" "${SOURCE_NATIVE_PATH}"
|
||||
WORKING_DIRECTORY "${CMAKE_CURRENT_SOURCE_DIR}" COMMAND_ERROR_IS_FATAL ANY
|
||||
)
|
||||
endif()
|
||||
add_custom_command(
|
||||
OUTPUT "${CMAKE_CURRENT_BINARY_DIR}/${BASE_NAME}_.h" "${CMAKE_CURRENT_BINARY_DIR}/${BASE_NAME}_.cpp"
|
||||
COMMAND ${PYTHON_EXECUTABLE} "${TOOL_NATIVE_PATH}" --outputPath "${OUTPUT_NATIVE_PATH}" ${BASE_NAME}.pyi
|
||||
MAIN_DEPENDENCY "${BASE_NAME}.pyi"
|
||||
DEPENDS
|
||||
"${CMAKE_SOURCE_DIR}/src/Tools/bindings/templates/templateClassPyExport.py"
|
||||
"${TOOL_PATH}"
|
||||
WORKING_DIRECTORY "${CMAKE_CURRENT_SOURCE_DIR}"
|
||||
COMMENT "Building ${BASE_NAME}.h/.cpp out of ${BASE_NAME}.pyi"
|
||||
)
|
||||
endmacro(generate_from_py)
|
||||
|
||||
macro(generate_embed_from_py BASE_NAME OUTPUT_FILE)
|
||||
set(TOOL_PATH "${CMAKE_SOURCE_DIR}/src/Tools/PythonToCPP.py")
|
||||
file(TO_NATIVE_PATH "${TOOL_PATH}" TOOL_NATIVE_PATH)
|
||||
|
||||
29
src/Base/Metadata.pyi
Normal file
29
src/Base/Metadata.pyi
Normal file
@@ -0,0 +1,29 @@
|
||||
"""
|
||||
This file keeps auxiliary metadata to be used by the Python API stubs.
|
||||
"""
|
||||
|
||||
def export(**kwargs):
|
||||
"""
|
||||
A decorator to attach metadata to a class.
|
||||
"""
|
||||
...
|
||||
|
||||
def constmethod(): ...
|
||||
def no_args(): ...
|
||||
def forward_declarations(source_code):
|
||||
"""
|
||||
A decorator to attach forward declarations to a class.
|
||||
"""
|
||||
...
|
||||
|
||||
def class_declarations(source_code):
|
||||
"""
|
||||
A decorator to attach forward declarations to a class.
|
||||
"""
|
||||
...
|
||||
|
||||
def sequence_protocol(**kwargs):
|
||||
"""
|
||||
A decorator to attach sequence protocol metadata to a class.
|
||||
"""
|
||||
...
|
||||
@@ -2,15 +2,19 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# (c) 2006 Jürgen Riegel GPL
|
||||
|
||||
import os, sys, getopt
|
||||
import os
|
||||
import sys
|
||||
import getopt
|
||||
import model.generateModel_Module
|
||||
import model.generateModel_Python
|
||||
import templates.templateModule
|
||||
import templates.templateClassPyExport
|
||||
|
||||
Usage = """generate - generates a FreeCAD Module out of an XML model
|
||||
|
||||
Usage = """generate - generates a FreeCAD Module out of an XML or Python model
|
||||
|
||||
Usage:
|
||||
generate [Optionen] Model.xml Model2.xml Model3.xml ...
|
||||
generate [Optionen] Model.xml/py Model2.xml/py Model3.xml/py ...
|
||||
|
||||
Options:
|
||||
-h, --help print this help
|
||||
@@ -24,16 +28,23 @@ Author:
|
||||
Licence: GPL
|
||||
|
||||
Version:
|
||||
0.2
|
||||
0.3
|
||||
"""
|
||||
|
||||
|
||||
# Globals
|
||||
|
||||
|
||||
def generate_model(filename):
|
||||
if filename.endswith(".xml"):
|
||||
return model.generateModel_Module.parse(filename)
|
||||
elif filename.endswith(".pyi"):
|
||||
return model.generateModel_Python.parse(filename)
|
||||
raise ValueError("invalid file extension")
|
||||
|
||||
|
||||
def generate(filename, outputPath):
|
||||
# load model
|
||||
GenerateModelInst = model.generateModel_Module.parse(filename)
|
||||
GenerateModelInst = generate_model(filename)
|
||||
|
||||
if len(GenerateModelInst.Module) != 0:
|
||||
Module = templates.templateModule.TemplateModule()
|
||||
@@ -46,7 +57,10 @@ def generate(filename, outputPath):
|
||||
Export.outputDir = outputPath + "/"
|
||||
Export.inputDir = os.path.dirname(filename) + "/"
|
||||
Export.export = GenerateModelInst.PythonExport[0]
|
||||
Export.is_python = filename.endswith(".py")
|
||||
Export.Generate()
|
||||
if Export.is_python:
|
||||
Export.Compare()
|
||||
print("Done generating: " + GenerateModelInst.PythonExport[0].Name)
|
||||
|
||||
|
||||
|
||||
585
src/Tools/bindings/model/generateModel_Python.py
Normal file
585
src/Tools/bindings/model/generateModel_Python.py
Normal file
@@ -0,0 +1,585 @@
|
||||
"""Parses Python binding interface files into a typed AST model."""
|
||||
|
||||
import ast, re
|
||||
from typing import List
|
||||
from model.typedModel import (
|
||||
GenerateModel,
|
||||
PythonExport,
|
||||
Methode,
|
||||
Attribute,
|
||||
Documentation,
|
||||
Author,
|
||||
Parameter,
|
||||
ParameterType,
|
||||
SequenceProtocol,
|
||||
)
|
||||
|
||||
|
||||
def _extract_decorator_kwargs(decorator: ast.expr) -> dict:
|
||||
"""
|
||||
Extract keyword arguments from a decorator call like `@export(Father="...", Name="...")`.
|
||||
Returns them in a dict.
|
||||
"""
|
||||
if not isinstance(decorator, ast.Call):
|
||||
return {}
|
||||
result = {}
|
||||
for kw in decorator.keywords:
|
||||
match kw.value:
|
||||
case ast.Constant(value=val):
|
||||
result[kw.arg] = val
|
||||
case _:
|
||||
pass
|
||||
return result
|
||||
|
||||
|
||||
def _parse_docstring_for_documentation(docstring: str) -> Documentation:
|
||||
"""
|
||||
Given a docstring, parse out DeveloperDocu, UserDocu, Author, Licence, etc.
|
||||
This is a simple heuristic-based parser. Adjust as needed for your format.
|
||||
"""
|
||||
dev_docu = None
|
||||
user_docu = None
|
||||
author_name = None
|
||||
author_email = None
|
||||
author_licence = None
|
||||
|
||||
if not docstring:
|
||||
return Documentation()
|
||||
|
||||
lines = docstring.strip().split("\n")
|
||||
user_docu_lines = []
|
||||
|
||||
for raw_line in lines:
|
||||
line = raw_line.strip()
|
||||
if line.startswith("DeveloperDocu:"):
|
||||
dev_docu = line.split("DeveloperDocu:", 1)[1].strip()
|
||||
elif line.startswith("UserDocu:"):
|
||||
user_docu = line.split("UserDocu:", 1)[1].strip()
|
||||
elif line.startswith("Author:"):
|
||||
# e.g. "Author: John Doe (john@example.com)"
|
||||
# naive approach:
|
||||
author_part = line.split("Author:", 1)[1].strip()
|
||||
# attempt to find email in parentheses
|
||||
match = re.search(r"(.*?)\s*\((.*?)\)", author_part)
|
||||
if match:
|
||||
author_name = match.group(1).strip()
|
||||
author_email = match.group(2).strip()
|
||||
else:
|
||||
author_name = author_part
|
||||
elif line.startswith("Licence:"):
|
||||
author_licence = line.split("Licence:", 1)[1].strip()
|
||||
else:
|
||||
user_docu_lines.append(raw_line)
|
||||
|
||||
if user_docu is None:
|
||||
user_docu = "\n".join(user_docu_lines)
|
||||
|
||||
author_obj = None
|
||||
if author_name or author_email or author_licence:
|
||||
author_obj = Author(
|
||||
content=docstring,
|
||||
Name=author_name or "",
|
||||
EMail=author_email or "",
|
||||
Licence=author_licence or "LGPL",
|
||||
)
|
||||
|
||||
return Documentation(
|
||||
Author=author_obj,
|
||||
DeveloperDocu=dev_docu,
|
||||
UserDocu=user_docu,
|
||||
)
|
||||
|
||||
|
||||
def _get_type_str(node):
|
||||
"""Recursively convert an AST node for a type annotation to its string representation."""
|
||||
match node:
|
||||
case ast.Name(id=name):
|
||||
# Handle qualified names (e.g., typing.List)
|
||||
return name
|
||||
case ast.Attribute(value=val, attr=attr):
|
||||
# For annotations like List[str] (or Final[List[str]]), build the string recursively.
|
||||
return f"{_get_type_str(val)}.{attr}"
|
||||
case ast.Subscript(value=val, slice=slice_node):
|
||||
value_str = _get_type_str(val)
|
||||
slice_str = _get_type_str(slice_node)
|
||||
return f"{value_str}[{slice_str}]"
|
||||
case ast.Tuple(elts=elts):
|
||||
# For multiple types (e.g., Tuple[int, str])
|
||||
return ", ".join(_get_type_str(elt) for elt in elts)
|
||||
case _:
|
||||
# Fallback for unsupported node types
|
||||
return "object"
|
||||
|
||||
|
||||
def _python_type_to_parameter_type(py_type: str) -> ParameterType:
|
||||
"""
|
||||
Map a Python type annotation (as a string) to the ParameterType enum if possible.
|
||||
Fallback to OBJECT if unrecognized.
|
||||
"""
|
||||
py_type = py_type.lower()
|
||||
match py_type:
|
||||
case _ if py_type in ("int", "builtins.int"):
|
||||
return ParameterType.LONG
|
||||
case _ if py_type in ("float", "builtins.float"):
|
||||
return ParameterType.FLOAT
|
||||
case _ if py_type in ("str", "builtins.str"):
|
||||
return ParameterType.STRING
|
||||
case _ if py_type in ("bool", "builtins.bool"):
|
||||
return ParameterType.BOOLEAN
|
||||
case _ if py_type.startswith(("list", "typing.list")):
|
||||
return ParameterType.LIST
|
||||
case _ if py_type.startswith(("dict", "typing.dict")):
|
||||
return ParameterType.DICT
|
||||
case _ if py_type.startswith(("callable", "typing.callable")):
|
||||
return ParameterType.CALLABLE
|
||||
case _ if py_type.startswith(("sequence", "typing.sequence")):
|
||||
return ParameterType.SEQUENCE
|
||||
case _ if py_type.startswith(("tuple", "typing.tuple")):
|
||||
return ParameterType.TUPLE
|
||||
case _:
|
||||
return ParameterType.OBJECT
|
||||
|
||||
|
||||
def _parse_class_attributes(class_node: ast.ClassDef, source_code: str) -> List[Attribute]:
|
||||
"""
|
||||
Parse top-level attributes (e.g. `TypeId: str = ""`) from the class AST node.
|
||||
We'll create an `Attribute` for each. For the `Documentation` of each attribute,
|
||||
we might store minimal or none if there's no docstring.
|
||||
"""
|
||||
attributes = []
|
||||
default_doc = Documentation(DeveloperDocu="", UserDocu="", Author=None)
|
||||
|
||||
for idx, stmt in enumerate(class_node.body):
|
||||
if isinstance(stmt, ast.AnnAssign):
|
||||
# e.g.: `TypeId: Final[str] = ""`
|
||||
name = stmt.target.id if isinstance(stmt.target, ast.Name) else "unknown"
|
||||
# Evaluate the type annotation and detect Final for read-only attributes
|
||||
if isinstance(stmt.annotation, ast.Name):
|
||||
# e.g. `str`
|
||||
type_name = stmt.annotation.id
|
||||
readonly = False
|
||||
elif isinstance(stmt.annotation, ast.Subscript):
|
||||
# Check if this is a Final type hint, e.g. Final[int] or typing.Final[int]
|
||||
is_final = (
|
||||
isinstance(stmt.annotation.value, ast.Name)
|
||||
and stmt.annotation.value.id == "Final"
|
||||
) or (
|
||||
isinstance(stmt.annotation.value, ast.Attribute)
|
||||
and stmt.annotation.value.attr == "Final"
|
||||
)
|
||||
if is_final:
|
||||
readonly = True
|
||||
# Extract the inner type from the Final[...] annotation
|
||||
type_name = _get_type_str(stmt.annotation.slice)
|
||||
else:
|
||||
type_name = _get_type_str(stmt.annotation)
|
||||
readonly = False
|
||||
else:
|
||||
type_name = "object"
|
||||
readonly = False
|
||||
|
||||
param_type = _python_type_to_parameter_type(type_name)
|
||||
|
||||
# Look for a docstring immediately following the attribute definition.
|
||||
attr_doc = default_doc
|
||||
if idx + 1 < len(class_node.body):
|
||||
next_stmt = class_node.body[idx + 1]
|
||||
if (
|
||||
isinstance(next_stmt, ast.Expr)
|
||||
and isinstance(next_stmt.value, ast.Constant)
|
||||
and isinstance(next_stmt.value.value, str)
|
||||
):
|
||||
docstring = next_stmt.value.value
|
||||
|
||||
# Parse the docstring to build a Documentation object.
|
||||
attr_doc = _parse_docstring_for_documentation(docstring)
|
||||
|
||||
param = Parameter(Name=name, Type=param_type)
|
||||
attr = Attribute(Documentation=attr_doc, Parameter=param, Name=name, ReadOnly=readonly)
|
||||
attributes.append(attr)
|
||||
|
||||
return attributes
|
||||
|
||||
|
||||
def _parse_methods(class_node: ast.ClassDef) -> List[Methode]:
|
||||
"""
|
||||
Parse methods from the class AST node, extracting:
|
||||
- Method name
|
||||
- Parameters (from the function signature / annotations)
|
||||
- Docstring
|
||||
"""
|
||||
methods = []
|
||||
|
||||
for stmt in class_node.body:
|
||||
if not isinstance(stmt, ast.FunctionDef):
|
||||
continue
|
||||
|
||||
# Skip methods decorated with @overload
|
||||
skip_method = False
|
||||
for deco in stmt.decorator_list:
|
||||
match deco:
|
||||
case ast.Name(id="overload"):
|
||||
skip_method = True
|
||||
break
|
||||
case ast.Attribute(attr="overload"):
|
||||
skip_method = True
|
||||
break
|
||||
case _:
|
||||
pass
|
||||
if skip_method:
|
||||
continue
|
||||
|
||||
# Extract method name
|
||||
method_name = stmt.name
|
||||
|
||||
# Extract docstring
|
||||
method_docstring = ast.get_docstring(stmt) or ""
|
||||
doc_obj = _parse_docstring_for_documentation(method_docstring)
|
||||
has_keyword_args = False
|
||||
method_params = []
|
||||
|
||||
# Helper for extracting an annotation string
|
||||
def get_annotation_str(annotation):
|
||||
match annotation:
|
||||
case ast.Name(id=name):
|
||||
return name
|
||||
case ast.Attribute(value=ast.Name(id=name), attr=attr):
|
||||
return f"{name}.{attr}"
|
||||
case ast.Subscript(value=ast.Name(id=name), slice=_):
|
||||
return name
|
||||
case ast.Subscript(
|
||||
value=ast.Attribute(value=ast.Name(id=name), attr=attr), slice=_
|
||||
):
|
||||
return f"{name}.{attr}"
|
||||
case _:
|
||||
return "object"
|
||||
|
||||
# Process positional parameters (skipping self/cls)
|
||||
for arg in stmt.args.args:
|
||||
param_name = arg.arg
|
||||
if param_name in ("self", "cls"):
|
||||
continue
|
||||
annotation_str = "object"
|
||||
if arg.annotation:
|
||||
annotation_str = get_annotation_str(arg.annotation)
|
||||
param_type = _python_type_to_parameter_type(annotation_str)
|
||||
method_params.append(Parameter(Name=param_name, Type=param_type))
|
||||
|
||||
# Process keyword-only parameters
|
||||
for kwarg in stmt.args.kwonlyargs:
|
||||
has_keyword_args = True
|
||||
param_name = kwarg.arg
|
||||
annotation_str = "object"
|
||||
if kwarg.annotation:
|
||||
annotation_str = get_annotation_str(kwarg.annotation)
|
||||
param_type = _python_type_to_parameter_type(annotation_str)
|
||||
method_params.append(Parameter(Name=param_name, Type=param_type))
|
||||
|
||||
if stmt.args.kwarg:
|
||||
has_keyword_args = True
|
||||
|
||||
keyword_flag = has_keyword_args and not stmt.args.vararg
|
||||
|
||||
# Check for various decorators using any(...)
|
||||
const_method_flag = any(
|
||||
isinstance(deco, ast.Name) and deco.id == "constmethod" for deco in stmt.decorator_list
|
||||
)
|
||||
static_method_flag = any(
|
||||
isinstance(deco, ast.Name) and deco.id == "staticmethod" for deco in stmt.decorator_list
|
||||
)
|
||||
class_method_flag = any(
|
||||
isinstance(deco, ast.Name) and deco.id == "classmethod" for deco in stmt.decorator_list
|
||||
)
|
||||
no_args = any(
|
||||
isinstance(deco, ast.Name) and deco.id == "no_args" for deco in stmt.decorator_list
|
||||
)
|
||||
|
||||
methode = Methode(
|
||||
Name=method_name,
|
||||
Documentation=doc_obj,
|
||||
Parameter=method_params,
|
||||
Const=const_method_flag,
|
||||
Static=static_method_flag,
|
||||
Class=class_method_flag,
|
||||
Keyword=keyword_flag,
|
||||
NoArgs=no_args,
|
||||
)
|
||||
|
||||
methods.append(methode)
|
||||
|
||||
return methods
|
||||
|
||||
|
||||
def _get_module_from_path(path: str) -> str:
|
||||
"""
|
||||
Returns the name of the FreeCAD module from the path.
|
||||
Examples:
|
||||
.../src/Base/Persistence.py -> "Base"
|
||||
.../src/Mod/CAM/Path/__init__.py -> "CAM"
|
||||
"""
|
||||
# 1. Split the path by the OS separator.
|
||||
import os
|
||||
|
||||
parts = path.split(os.sep)
|
||||
|
||||
# 2. Attempt to find "src" in the path components.
|
||||
try:
|
||||
idx_src = parts.index("src")
|
||||
except ValueError:
|
||||
# If "src" is not found, we cannot determine the module name.
|
||||
return None
|
||||
|
||||
# 3. Check if there is a path component immediately after "src".
|
||||
# If there isn't, we have nothing to return.
|
||||
if idx_src + 1 >= len(parts):
|
||||
return None
|
||||
|
||||
next_part = parts[idx_src + 1]
|
||||
|
||||
# 4. If the next component is "Mod", then the module name is the
|
||||
# component AFTER "Mod" (e.g. "CAM" in "Mod/CAM").
|
||||
if next_part == "Mod":
|
||||
if idx_src + 2 < len(parts):
|
||||
return parts[idx_src + 2]
|
||||
else:
|
||||
# "Mod" is the last component
|
||||
return None
|
||||
else:
|
||||
# 5. Otherwise, if it's not "Mod", we treat that next component
|
||||
# itself as the module name (e.g. "Base").
|
||||
return next_part
|
||||
|
||||
|
||||
def _extract_module_name(import_path: str, default_module: str) -> str:
|
||||
"""
|
||||
Given an import_path like "Base.Foo", return "Base".
|
||||
If import_path has no dot (e.g., "Foo"), return default_module.
|
||||
|
||||
Examples:
|
||||
extract_module_name("Base.Foo", default_module="Fallback") -> "Base"
|
||||
extract_module_name("Foo", default_module="Fallback") -> "Fallback"
|
||||
"""
|
||||
if "." in import_path:
|
||||
# Take everything before the first dot
|
||||
return import_path.split(".", 1)[0]
|
||||
else:
|
||||
# No dot, return the fallback module name
|
||||
return default_module
|
||||
|
||||
|
||||
def _get_module_path(module_name: str) -> str:
|
||||
if module_name in ["Base", "App", "Gui"]:
|
||||
return module_name
|
||||
return "Mod/" + module_name
|
||||
|
||||
|
||||
def _parse_imports(tree) -> dict:
|
||||
"""
|
||||
Parses the given source_code for import statements and constructs
|
||||
a mapping from imported name -> module path.
|
||||
|
||||
For example, code like:
|
||||
|
||||
from Metadata import export, forward_declarations, constmethod
|
||||
from PyObjectBase import PyObjectBase
|
||||
from Base.Foo import Foo
|
||||
from typing import List, Final
|
||||
|
||||
yields a mapping of:
|
||||
{
|
||||
"export": "Metadata",
|
||||
"forward_declarations": "Metadata",
|
||||
"constmethod": "Metadata",
|
||||
"PyObjectBase": "PyObjectBase",
|
||||
"Foo": "Base.Foo",
|
||||
"List": "typing",
|
||||
"Final": "typing"
|
||||
}
|
||||
"""
|
||||
name_to_module_map = {}
|
||||
|
||||
for node in tree.body:
|
||||
match node:
|
||||
# Handle 'import X' or 'import X as Y'
|
||||
case ast.Import(names=names):
|
||||
# e.g. import foo, import foo as bar
|
||||
for alias in names:
|
||||
imported_name = alias.asname if alias.asname else alias.name
|
||||
name_to_module_map[imported_name] = alias.name
|
||||
# Handle 'from X import Y, Z as W'
|
||||
case ast.ImportFrom(module=module, names=names):
|
||||
module_name = module if module is not None else ""
|
||||
for alias in names:
|
||||
imported_name = alias.asname if alias.asname else alias.name
|
||||
name_to_module_map[imported_name] = module_name
|
||||
case _:
|
||||
pass
|
||||
|
||||
return name_to_module_map
|
||||
|
||||
|
||||
def _get_native_class_name(klass: str) -> str:
|
||||
return klass
|
||||
|
||||
|
||||
def _get_native_python_class_name(klass: str) -> str:
|
||||
if klass == "PyObjectBase":
|
||||
return klass
|
||||
return klass + "Py"
|
||||
|
||||
|
||||
def _extract_base_class_name(base: ast.expr) -> str:
|
||||
"""
|
||||
Extract the base class name from an AST node using ast.unparse.
|
||||
For generic bases (e.g. GenericParent[T]), it removes the generic part.
|
||||
For qualified names (e.g. some_module.ParentClass), it returns only the last part.
|
||||
"""
|
||||
base_str = ast.unparse(base)
|
||||
# Remove generic parameters if present.
|
||||
if "[" in base_str:
|
||||
base_str = base_str.split("[", 1)[0]
|
||||
# For qualified names, take only the class name.
|
||||
if "." in base_str:
|
||||
base_str = base_str.split(".")[-1]
|
||||
return base_str
|
||||
|
||||
|
||||
def _parse_class(class_node, source_code: str, path: str, imports_mapping: dict) -> PythonExport:
|
||||
base_class_name = None
|
||||
for base in class_node.bases:
|
||||
base_class_name = _extract_base_class_name(base)
|
||||
break # Only consider the first base class.
|
||||
|
||||
assert base_class_name is not None
|
||||
|
||||
is_exported = False
|
||||
export_decorator_kwargs = {}
|
||||
forward_declarations_text = ""
|
||||
class_declarations_text = ""
|
||||
sequence_protocol_kwargs = None
|
||||
|
||||
for decorator in class_node.decorator_list:
|
||||
match decorator:
|
||||
case ast.Name(id="export"):
|
||||
export_decorator_kwargs = {}
|
||||
is_exported = True
|
||||
case ast.Call(func=ast.Name(id="export"), keywords=_, args=_):
|
||||
export_decorator_kwargs = _extract_decorator_kwargs(decorator)
|
||||
is_exported = True
|
||||
case ast.Call(func=ast.Name(id="forward_declarations"), args=args):
|
||||
if args:
|
||||
match args[0]:
|
||||
case ast.Constant(value=val):
|
||||
forward_declarations_text = val
|
||||
case ast.Call(func=ast.Name(id="class_declarations"), args=args):
|
||||
if args:
|
||||
match args[0]:
|
||||
case ast.Constant(value=val):
|
||||
class_declarations_text = val
|
||||
case ast.Call(func=ast.Name(id="sequence_protocol"), keywords=_, args=_):
|
||||
sequence_protocol_kwargs = _extract_decorator_kwargs(decorator)
|
||||
case _:
|
||||
pass
|
||||
|
||||
# Parse imports to compute module metadata
|
||||
module_name = _get_module_from_path(path)
|
||||
imported_from_module = imports_mapping[base_class_name]
|
||||
parent_module_name = _extract_module_name(imported_from_module, module_name)
|
||||
|
||||
class_docstring = ast.get_docstring(class_node) or ""
|
||||
doc_obj = _parse_docstring_for_documentation(class_docstring)
|
||||
class_attributes = _parse_class_attributes(class_node, source_code)
|
||||
class_methods = _parse_methods(class_node)
|
||||
|
||||
native_class_name = _get_native_class_name(class_node.name)
|
||||
native_python_class_name = _get_native_python_class_name(class_node.name)
|
||||
include = _get_module_path(module_name) + "/" + native_class_name + ".h"
|
||||
|
||||
father_native_python_class_name = _get_native_python_class_name(base_class_name)
|
||||
father_include = (
|
||||
_get_module_path(parent_module_name) + "/" + father_native_python_class_name + ".h"
|
||||
)
|
||||
|
||||
py_export = PythonExport(
|
||||
Documentation=doc_obj,
|
||||
Name=export_decorator_kwargs.get("Name", "") or native_python_class_name,
|
||||
PythonName=export_decorator_kwargs.get("PythonName", "") or None,
|
||||
Include=export_decorator_kwargs.get("Include", "") or include,
|
||||
Father=export_decorator_kwargs.get("Father", "") or father_native_python_class_name,
|
||||
Twin=export_decorator_kwargs.get("Twin", "") or native_class_name,
|
||||
TwinPointer=export_decorator_kwargs.get("TwinPointer", "") or native_class_name,
|
||||
Namespace=export_decorator_kwargs.get("Namespace", "") or module_name,
|
||||
FatherInclude=export_decorator_kwargs.get("FatherInclude", "") or father_include,
|
||||
FatherNamespace=export_decorator_kwargs.get("FatherNamespace", "") or parent_module_name,
|
||||
Constructor=export_decorator_kwargs.get("Constructor", False),
|
||||
NumberProtocol=export_decorator_kwargs.get("NumberProtocol", False),
|
||||
RichCompare=export_decorator_kwargs.get("RichCompare", False),
|
||||
Delete=export_decorator_kwargs.get("Delete", False),
|
||||
Reference=export_decorator_kwargs.get("Reference", None),
|
||||
Initialization=export_decorator_kwargs.get("Initialization", False),
|
||||
DisableNotify=export_decorator_kwargs.get("DisableNotify", False),
|
||||
DescriptorGetter=export_decorator_kwargs.get("DescriptorGetter", False),
|
||||
DescriptorSetter=export_decorator_kwargs.get("DescriptorSetter", False),
|
||||
ForwardDeclarations=forward_declarations_text,
|
||||
ClassDeclarations=class_declarations_text,
|
||||
IsExplicitlyExported=is_exported,
|
||||
)
|
||||
|
||||
# Attach sequence protocol metadata if provided.
|
||||
if sequence_protocol_kwargs is not None:
|
||||
try:
|
||||
seq_protocol = SequenceProtocol(**sequence_protocol_kwargs)
|
||||
py_export.Sequence = seq_protocol
|
||||
except Exception as e:
|
||||
py_export.Sequence = None
|
||||
|
||||
py_export.Attribute.extend(class_attributes)
|
||||
py_export.Methode.extend(class_methods)
|
||||
|
||||
return py_export
|
||||
|
||||
|
||||
def parse_python_code(path: str) -> GenerateModel:
|
||||
"""
|
||||
Parse the given Python source code and build a GenerateModel containing
|
||||
PythonExport entries for each class that inherits from a relevant binding class.
|
||||
"""
|
||||
|
||||
source_code = None
|
||||
with open(path, "r") as file:
|
||||
source_code = file.read()
|
||||
|
||||
tree = ast.parse(source_code)
|
||||
imports_mapping = _parse_imports(tree)
|
||||
model = GenerateModel()
|
||||
|
||||
for node in tree.body:
|
||||
if isinstance(node, ast.ClassDef):
|
||||
py_export = _parse_class(node, source_code, path, imports_mapping)
|
||||
model.PythonExport.append(py_export)
|
||||
|
||||
# Check for multiple non explicitly exported classes
|
||||
non_exported_classes = [
|
||||
item for item in model.PythonExport if not getattr(item, "IsExplicitlyExported", False)
|
||||
]
|
||||
if len(non_exported_classes) > 1:
|
||||
raise Exception("Multiple non explicitly-exported classes were found, please use @export.")
|
||||
|
||||
return model
|
||||
|
||||
|
||||
def parse(path):
|
||||
model = parse_python_code(path)
|
||||
return model
|
||||
|
||||
|
||||
def main():
|
||||
import sys
|
||||
|
||||
args = sys.argv[1:]
|
||||
model = parse(args[0])
|
||||
model.dump()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
@@ -52,7 +52,7 @@ def nohandle(string):
|
||||
class copier:
|
||||
"Smart-copier (YAPTU) class"
|
||||
|
||||
def copyblock(self, i=0, last=None):
|
||||
def copyblock(self, cur_line=0, last=None):
|
||||
"Main copy method: process lines [i,last) of block"
|
||||
|
||||
def repl(match, self=self):
|
||||
@@ -67,13 +67,13 @@ class copier:
|
||||
block = self.locals["_bl"]
|
||||
if last is None:
|
||||
last = len(block)
|
||||
while i < last:
|
||||
line = block[i]
|
||||
while cur_line < last:
|
||||
line = block[cur_line]
|
||||
match = self.restat.match(line)
|
||||
if match: # a statement starts "here" (at line block[i])
|
||||
# i is the last line to _not_ process
|
||||
stat = match.string[match.end(0) :].strip()
|
||||
j = i + 1 # look for 'finish' from here onwards
|
||||
j = cur_line + 1 # look for 'finish' from here onwards
|
||||
nest = 1 # count nesting levels of statements
|
||||
while j < last:
|
||||
line = block[j]
|
||||
@@ -88,20 +88,20 @@ class copier:
|
||||
match = self.recont.match(line)
|
||||
if match: # found a contin.-statement
|
||||
nestat = match.string[match.end(0) :].strip()
|
||||
stat = "%s _cb(%s,%s)\n%s" % (stat, i + 1, j, nestat)
|
||||
i = j # again, i is the last line to _not_ process
|
||||
stat = "%s _cb(%s,%s)\n%s" % (stat, cur_line + 1, j, nestat)
|
||||
cur_line = j # again, i is the last line to _not_ process
|
||||
j = j + 1
|
||||
stat = self.preproc(stat, "exec")
|
||||
stat = "%s _cb(%s,%s)" % (stat, i + 1, j)
|
||||
# for debugging, uncomment...: print("-> Executing: {"+stat+"}")
|
||||
stat = "%s _cb(%s,%s)" % (stat, cur_line + 1, j)
|
||||
# for debugging, uncomment...: print(f"-> Executing on line {cur_line}: {stat}")
|
||||
exec(stat, self.globals, self.locals)
|
||||
i = j + 1
|
||||
cur_line = j + 1
|
||||
else: # normal line, just copy with substitution
|
||||
try:
|
||||
self.ouf.write(self.regex.sub(repl, line).encode("utf8"))
|
||||
except TypeError:
|
||||
self.ouf.write(self.regex.sub(repl, line))
|
||||
i = i + 1
|
||||
cur_line = cur_line + 1
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
|
||||
324
src/Tools/bindings/model/typedModel.py
Normal file
324
src/Tools/bindings/model/typedModel.py
Normal file
@@ -0,0 +1,324 @@
|
||||
from __future__ import annotations
|
||||
from dataclasses import dataclass, field
|
||||
from enum import Enum
|
||||
from typing import List, Optional
|
||||
|
||||
|
||||
#
|
||||
# Enums
|
||||
#
|
||||
|
||||
|
||||
class ParameterType(str, Enum):
|
||||
BOOLEAN = "Boolean"
|
||||
INT = "Int"
|
||||
LONG = "Long"
|
||||
STRING = "String"
|
||||
OBJECT = "Object"
|
||||
FLOAT = "Float"
|
||||
COMPLEX = "Complex"
|
||||
CHAR = "Char"
|
||||
TUPLE = "Tuple"
|
||||
LIST = "List"
|
||||
DICT = "Dict"
|
||||
MODULE = "Module"
|
||||
CALLABLE = "Callable"
|
||||
SEQUENCE = "Sequence"
|
||||
|
||||
def __str__(self):
|
||||
return self.value
|
||||
|
||||
|
||||
#
|
||||
# Supporting Classes
|
||||
#
|
||||
|
||||
|
||||
@dataclass
|
||||
class Author:
|
||||
"""Represents the <Author> element inside <Documentation>."""
|
||||
|
||||
# The text content of <Author> is effectively a string;
|
||||
# we capture it in `content` to hold the text node if needed.
|
||||
content: Optional[str] = None
|
||||
|
||||
# Attributes
|
||||
Name: str = "FreeCAD Project"
|
||||
EMail: str = "example@freecad.org"
|
||||
Licence: str = "LGPL"
|
||||
|
||||
|
||||
@dataclass
|
||||
class Documentation:
|
||||
"""
|
||||
Corresponds to the <Documentation> element.
|
||||
Can contain an <Author>, <DeveloperDocu>, and <UserDocu>.
|
||||
"""
|
||||
|
||||
Author: Optional[Author] = None
|
||||
DeveloperDocu: Optional[str] = None
|
||||
UserDocu: Optional[str] = None
|
||||
|
||||
|
||||
@dataclass
|
||||
class Property:
|
||||
"""
|
||||
Corresponds to <Property> in the schema.
|
||||
It has required attributes Name, Type and optional StartValue,
|
||||
plus optional child <Documentation>.
|
||||
"""
|
||||
|
||||
# Child
|
||||
Documentation: Optional[Documentation] = None
|
||||
|
||||
# Attributes
|
||||
Name: str = ""
|
||||
Type: str = ""
|
||||
StartValue: Optional[str] = None
|
||||
|
||||
|
||||
@dataclass
|
||||
class ViewProvider:
|
||||
"""
|
||||
Corresponds to <ViewProvider>, which can contain 0..∞ <Property> children.
|
||||
"""
|
||||
|
||||
Property: List[Property] = field(default_factory=list)
|
||||
|
||||
|
||||
@dataclass
|
||||
class Parameter:
|
||||
"""
|
||||
Corresponds to <Parameter> in the schema.
|
||||
It has a required 'Name' (str) and a required 'Type' (enumeration).
|
||||
"""
|
||||
|
||||
Name: str
|
||||
Type: ParameterType
|
||||
|
||||
|
||||
#
|
||||
# Elements under <PythonExport>
|
||||
#
|
||||
|
||||
|
||||
@dataclass
|
||||
class Methode:
|
||||
"""
|
||||
Corresponds to <Methode> inside <PythonExport>.
|
||||
Contains an optional <Documentation> and 0..∞ <Parameter>.
|
||||
"""
|
||||
|
||||
Documentation: Optional[Documentation] = None
|
||||
Parameter: List[Parameter] = field(default_factory=list)
|
||||
|
||||
# Attributes
|
||||
Name: str = ""
|
||||
Const: Optional[bool] = None
|
||||
Keyword: bool = False
|
||||
NoArgs: bool = False
|
||||
Class: bool = False
|
||||
Static: bool = False
|
||||
|
||||
|
||||
@dataclass
|
||||
class Attribute:
|
||||
"""
|
||||
Corresponds to <Attribute> inside <PythonExport>.
|
||||
It has a required <Documentation>, a required <Parameter>,
|
||||
and attributes Name, ReadOnly.
|
||||
"""
|
||||
|
||||
Documentation: Documentation
|
||||
Parameter: Parameter
|
||||
|
||||
# Attributes
|
||||
Name: str
|
||||
ReadOnly: bool
|
||||
|
||||
|
||||
@dataclass
|
||||
class SequenceProtocol:
|
||||
"""
|
||||
Corresponds to the <Sequence> element inside <PythonExport>.
|
||||
All attributes are required booleans.
|
||||
"""
|
||||
|
||||
sq_length: bool
|
||||
sq_concat: bool
|
||||
sq_repeat: bool
|
||||
sq_item: bool
|
||||
mp_subscript: bool
|
||||
sq_ass_item: bool
|
||||
mp_ass_subscript: bool
|
||||
sq_contains: bool
|
||||
sq_inplace_concat: bool
|
||||
sq_inplace_repeat: bool
|
||||
|
||||
|
||||
@dataclass
|
||||
class PythonExport:
|
||||
"""
|
||||
Corresponds to <PythonExport> inside <GenerateModel>.
|
||||
It contains:
|
||||
- optional <Documentation>
|
||||
- 0..∞ <Methode>
|
||||
- 0..∞ <Attribute>
|
||||
- optional <Sequence>
|
||||
- optional <CustomAttributes>
|
||||
- one <ClassDeclarations> (type=string)
|
||||
- one <ForwardDeclarations> (type=string)
|
||||
Plus many attributes with required/optional flags.
|
||||
"""
|
||||
|
||||
Documentation: Optional[Documentation] = None
|
||||
Methode: List[Methode] = field(default_factory=list)
|
||||
Attribute: List[Attribute] = field(default_factory=list)
|
||||
Sequence: Optional[SequenceProtocol] = None
|
||||
CustomAttributes: Optional[str] = "" # To match the original XML model
|
||||
ClassDeclarations: str = ""
|
||||
ForwardDeclarations: str = ""
|
||||
NoArgs: bool = False
|
||||
|
||||
# Attributes
|
||||
Name: str = ""
|
||||
PythonName: Optional[str] = None
|
||||
Include: str = ""
|
||||
Father: str = ""
|
||||
Twin: str = ""
|
||||
Namespace: str = ""
|
||||
FatherInclude: str = ""
|
||||
FatherNamespace: str = ""
|
||||
Constructor: bool = False
|
||||
NumberProtocol: bool = False
|
||||
RichCompare: bool = False
|
||||
TwinPointer: str = ""
|
||||
Delete: bool = False
|
||||
Reference: Optional[bool] = None
|
||||
Initialization: bool = False
|
||||
DisableNotify: bool = False
|
||||
DescriptorGetter: bool = False
|
||||
DescriptorSetter: bool = False
|
||||
IsExplicitlyExported: bool = False
|
||||
|
||||
|
||||
#
|
||||
# Module-Related Classes
|
||||
#
|
||||
|
||||
|
||||
@dataclass
|
||||
class Dependencies:
|
||||
"""
|
||||
Corresponds to the <Dependencies> element inside <Module>.
|
||||
It contains 0..∞ local <Module> elements which are not typed in the XSD.
|
||||
We'll treat these as strings or possibly minimal structures.
|
||||
"""
|
||||
|
||||
Module: List[str] = field(default_factory=list)
|
||||
|
||||
|
||||
@dataclass
|
||||
class Feature:
|
||||
"""
|
||||
Corresponds to <Feature> in <Module>'s <Content>.
|
||||
Has optional <Documentation>, 0..∞ <Property>, optional <ViewProvider>,
|
||||
and a required attribute 'Name'.
|
||||
"""
|
||||
|
||||
Documentation: Optional[Documentation] = None
|
||||
Property: List[Property] = field(default_factory=list)
|
||||
ViewProvider: Optional[ViewProvider] = None
|
||||
|
||||
# Attributes
|
||||
Name: str = ""
|
||||
|
||||
|
||||
@dataclass
|
||||
class DocObject:
|
||||
"""
|
||||
Corresponds to <DocObject> in <Module>'s <Content>.
|
||||
Has optional <Documentation>, 0..∞ <Property>, and a required 'Name' attribute.
|
||||
"""
|
||||
|
||||
Documentation: Optional[Documentation] = None
|
||||
Property: List[Property] = field(default_factory=list)
|
||||
|
||||
# Attributes
|
||||
Name: str = ""
|
||||
|
||||
|
||||
@dataclass
|
||||
class ModuleContent:
|
||||
"""
|
||||
Corresponds to the <Content> element in <Module>.
|
||||
Contains:
|
||||
- 0..∞ <Property>
|
||||
- 0..∞ <Feature>
|
||||
- 0..∞ <DocObject>
|
||||
- 0..∞ <GuiCommand>
|
||||
- 0..∞ <PreferencesPage>
|
||||
"""
|
||||
|
||||
Property: List[Property] = field(default_factory=list)
|
||||
Feature: List[Feature] = field(default_factory=list)
|
||||
DocObject: List[DocObject] = field(default_factory=list)
|
||||
GuiCommand: List[str] = field(default_factory=list)
|
||||
PreferencesPage: List[str] = field(default_factory=list)
|
||||
|
||||
|
||||
@dataclass
|
||||
class Module:
|
||||
"""
|
||||
Corresponds to the top-level <Module> element.
|
||||
Has optional <Documentation>, optional <Dependencies>,
|
||||
a required <Content>, and a required attribute Name.
|
||||
"""
|
||||
|
||||
Documentation: Optional[Documentation] = None
|
||||
Dependencies: Optional[Dependencies] = None
|
||||
Content: ModuleContent = field(default_factory=ModuleContent)
|
||||
|
||||
# Attributes
|
||||
Name: str = ""
|
||||
|
||||
|
||||
#
|
||||
# Root Element
|
||||
#
|
||||
|
||||
|
||||
@dataclass
|
||||
class GenerateModel:
|
||||
"""
|
||||
Corresponds to the root element <GenerateModel>.
|
||||
Contains 0..∞ <Module> and 0..∞ <PythonExport>.
|
||||
"""
|
||||
|
||||
Module: List[Module] = field(default_factory=list)
|
||||
PythonExport: List[PythonExport] = field(default_factory=list)
|
||||
|
||||
def dump(self):
|
||||
# Print or process the resulting GenerateModel object
|
||||
print("Parsed GenerateModel object:")
|
||||
|
||||
if self.PythonExport:
|
||||
py_exp = self.PythonExport[0]
|
||||
print("PythonExport Name:", py_exp.Name)
|
||||
if py_exp.Documentation and py_exp.Documentation.Author:
|
||||
print("Author Name:", py_exp.Documentation.Author.Name)
|
||||
print("Author Email:", py_exp.Documentation.Author.EMail)
|
||||
print("Author Licence:", py_exp.Documentation.Author.Licence)
|
||||
print("DeveloperDocu:", py_exp.Documentation.DeveloperDocu)
|
||||
print("UserDocu:", py_exp.Documentation.UserDocu)
|
||||
|
||||
print("Class Attributes:")
|
||||
for attr in py_exp.Attribute:
|
||||
print(f" - {attr.Name} (type={attr.Parameter.Type}, readOnly={attr.ReadOnly})")
|
||||
|
||||
print("Methods:")
|
||||
for meth in py_exp.Methode:
|
||||
print(f" - {meth.Name}")
|
||||
# Each method might have parameters
|
||||
for param in meth.Parameter:
|
||||
print(f" * param: {param.Name}, type={param.Type}")
|
||||
@@ -9,7 +9,39 @@ import model.generateModel_Module
|
||||
import model.generateTools
|
||||
|
||||
|
||||
def compareFiles(file1, file2):
|
||||
"""Compares two files and prints the differences if they are not equal."""
|
||||
|
||||
# Check if files exist
|
||||
for file in (file1, file2):
|
||||
if not os.path.exists(file):
|
||||
raise FileNotFoundError(f"File not found: {file1} {file2}")
|
||||
|
||||
# Read file contents
|
||||
with open(file1, "r", encoding="utf-8") as f1, open(file2, "r", encoding="utf-8") as f2:
|
||||
lines1 = f1.readlines()
|
||||
lines2 = f2.readlines()
|
||||
|
||||
# Compare and print differences
|
||||
import difflib
|
||||
|
||||
diff = list(difflib.unified_diff(lines1, lines2, fromfile=file1, tofile=file2, lineterm=""))
|
||||
|
||||
if diff:
|
||||
error = "Files are not equal.\n\n"
|
||||
error += "Diff:\n\n"
|
||||
error += "".join(diff)
|
||||
raise ValueError(error)
|
||||
|
||||
|
||||
class TemplateClassPyExport(template.ModelTemplate):
|
||||
# TODO: This is temporary, once all XML files are migrated, this can be removed.
|
||||
def getPath(self, path):
|
||||
if self.is_python:
|
||||
root, ext = os.path.splitext(path)
|
||||
return f"{root}_{ext}"
|
||||
return path
|
||||
|
||||
def Generate(self):
|
||||
# self.ParentNamespace = "Base"
|
||||
# self.Namespace = "Base"
|
||||
@@ -19,6 +51,8 @@ class TemplateClassPyExport(template.ModelTemplate):
|
||||
outputDir = self.outputDir
|
||||
|
||||
def escapeString(s, indent=4):
|
||||
if not s:
|
||||
return None
|
||||
"""Escapes a string for use as literal in C++ code"""
|
||||
s = s.strip() # This allows UserDocu-tags on their own lines without adding whitespace
|
||||
s = s.replace("\\", "\\\\")
|
||||
@@ -34,7 +68,7 @@ class TemplateClassPyExport(template.ModelTemplate):
|
||||
os.makedirs(subpath)
|
||||
|
||||
# Imp.cpp must not exist, neither in outputDir nor in inputDir
|
||||
outputImp = outputDir + exportName + "Imp.cpp"
|
||||
outputImp = self.getPath(outputDir + exportName + "Imp.cpp")
|
||||
if not os.path.exists(outputImp):
|
||||
if not os.path.exists(inputDir + exportName + "Imp.cpp"):
|
||||
file = open(outputImp, "wb")
|
||||
@@ -42,17 +76,39 @@ class TemplateClassPyExport(template.ModelTemplate):
|
||||
model.generateTools.replace(self.TemplateImplement, locals(), file)
|
||||
file.close()
|
||||
|
||||
outputCpp = outputDir + exportName + ".cpp"
|
||||
outputCpp = self.getPath(outputDir + exportName + ".cpp")
|
||||
with open(outputCpp, "wb") as file:
|
||||
print("TemplateClassPyExport", "TemplateModule", file.name)
|
||||
model.generateTools.replace(self.TemplateModule, locals(), file)
|
||||
|
||||
outputHeader = outputDir + exportName + ".h"
|
||||
outputHeader = self.getPath(outputDir + exportName + ".h")
|
||||
with open(outputHeader, "wb") as file:
|
||||
print("TemplateClassPyExport", "TemplateHeader", file.name)
|
||||
model.generateTools.replace(self.TemplateHeader, locals(), file)
|
||||
# file.write( model.generateTools.replace(self.Template,locals()))
|
||||
|
||||
def Compare(self):
|
||||
"""
|
||||
Compares the Python generated files to the previously generated XML files.
|
||||
This exists temporarily while the XML files are migrated to Python to guarantee consistency.
|
||||
"""
|
||||
exportName = self.export.Name
|
||||
inputDir = self.inputDir
|
||||
outputDir = self.outputDir
|
||||
|
||||
if not os.path.exists(inputDir + exportName + "Imp.cpp"):
|
||||
outputImpXml = outputDir + exportName + "Imp.cpp"
|
||||
outputImpPy = self.getPath(outputDir + exportName + "Imp.cpp")
|
||||
compareFiles(outputImpXml, outputImpPy)
|
||||
|
||||
outputHeaderXml = outputDir + exportName + ".h"
|
||||
outputHeaderPy = self.getPath(outputDir + exportName + ".h")
|
||||
compareFiles(outputHeaderXml, outputHeaderPy)
|
||||
|
||||
outputCppXml = outputDir + exportName + ".cpp"
|
||||
outputCppPy = self.getPath(outputDir + exportName + ".cpp")
|
||||
compareFiles(outputCppXml, outputCppPy)
|
||||
|
||||
TemplateHeader = """
|
||||
// This file is generated by src/Tools/generateTemplates/templateClassPyExport.py out of the XML file
|
||||
// Every change you make here gets lost in the next full rebuild!
|
||||
|
||||
Reference in New Issue
Block a user