Merge pull request #19516 from tritao/tools-python-bindings-gen

Tools: Introduce a new Python-based C++ bindings generator
This commit is contained in:
Chris Hennes
2025-02-24 16:39:24 +00:00
committed by GitHub
10 changed files with 1519 additions and 24 deletions

View File

@@ -113,6 +113,7 @@ ENDMACRO(fc_target_copy_resource_flat)
# It would be a bit cleaner to generate these files in ${CMAKE_CURRENT_BINARY_DIR}
# To be removed once all instances are migrated to generate_from_py
macro(generate_from_xml BASE_NAME)
set(TOOL_PATH "${CMAKE_SOURCE_DIR}/src/Tools/bindings/generate.py")
file(TO_NATIVE_PATH "${TOOL_PATH}" TOOL_NATIVE_PATH)
@@ -142,7 +143,36 @@ macro(generate_from_xml BASE_NAME)
)
endmacro(generate_from_xml)
macro(generate_from_py BASE_NAME OUTPUT_FILE)
macro(generate_from_py BASE_NAME)
set(TOOL_PATH "${CMAKE_SOURCE_DIR}/src/Tools/bindings/generate.py")
file(TO_NATIVE_PATH "${TOOL_PATH}" TOOL_NATIVE_PATH)
file(TO_NATIVE_PATH "${CMAKE_CURRENT_SOURCE_DIR}/${BASE_NAME}.pyi" SOURCE_NATIVE_PATH)
set(SOURCE_CPP_PATH "${CMAKE_CURRENT_BINARY_DIR}/${BASE_NAME}.cpp" )
# BASE_NAME may include also a path name
GET_FILENAME_COMPONENT(OUTPUT_PATH "${SOURCE_CPP_PATH}" PATH)
file(TO_NATIVE_PATH "${OUTPUT_PATH}" OUTPUT_NATIVE_PATH)
if(NOT EXISTS "${SOURCE_CPP_PATH}")
# assures the source files are generated at least once
message(STATUS "${SOURCE_CPP_PATH}")
execute_process(COMMAND "${PYTHON_EXECUTABLE}" "${TOOL_NATIVE_PATH}" --outputPath "${OUTPUT_NATIVE_PATH}" "${SOURCE_NATIVE_PATH}"
WORKING_DIRECTORY "${CMAKE_CURRENT_SOURCE_DIR}" COMMAND_ERROR_IS_FATAL ANY
)
endif()
add_custom_command(
OUTPUT "${CMAKE_CURRENT_BINARY_DIR}/${BASE_NAME}_.h" "${CMAKE_CURRENT_BINARY_DIR}/${BASE_NAME}_.cpp"
COMMAND ${PYTHON_EXECUTABLE} "${TOOL_NATIVE_PATH}" --outputPath "${OUTPUT_NATIVE_PATH}" ${BASE_NAME}.pyi
MAIN_DEPENDENCY "${BASE_NAME}.pyi"
DEPENDS
"${CMAKE_SOURCE_DIR}/src/Tools/bindings/templates/templateClassPyExport.py"
"${TOOL_PATH}"
WORKING_DIRECTORY "${CMAKE_CURRENT_SOURCE_DIR}"
COMMENT "Building ${BASE_NAME}.h/.cpp out of ${BASE_NAME}.pyi"
)
endmacro(generate_from_py)
macro(generate_embed_from_py BASE_NAME OUTPUT_FILE)
set(TOOL_PATH "${CMAKE_SOURCE_DIR}/src/Tools/PythonToCPP.py")
file(TO_NATIVE_PATH "${TOOL_PATH}" TOOL_NATIVE_PATH)
file(TO_NATIVE_PATH "${CMAKE_CURRENT_SOURCE_DIR}/${BASE_NAME}.py" SOURCE_NATIVE_PATH)
@@ -153,7 +183,7 @@ macro(generate_from_py BASE_NAME OUTPUT_FILE)
DEPENDS "${TOOL_PATH}"
WORKING_DIRECTORY "${CMAKE_CURRENT_BINARY_DIR}"
COMMENT "Building files out of ${BASE_NAME}.py")
endmacro(generate_from_py)
endmacro(generate_embed_from_py)
macro(generate_from_any INPUT_FILE OUTPUT_FILE VARIABLE)
set(TOOL_PATH "${CMAKE_SOURCE_DIR}/src/Tools/PythonToCPP.py")

View File

@@ -101,8 +101,8 @@ generate_from_xml(PropertyContainerPy)
generate_from_xml(MaterialPy)
generate_from_xml(MeasureManagerPy)
generate_from_py(FreeCADInit InitScript.h)
generate_from_py(FreeCADTest TestScript.h)
generate_embed_from_py(FreeCADInit InitScript.h)
generate_embed_from_py(FreeCADTest TestScript.h)
SET(FreeCADApp_XML_SRCS
ExtensionPy.xml

29
src/Base/Metadata.pyi Normal file
View File

@@ -0,0 +1,29 @@
"""
This file keeps auxiliary metadata to be used by the Python API stubs.
"""
def export(**kwargs):
"""
A decorator to attach metadata to a class.
"""
...
def constmethod(): ...
def no_args(): ...
def forward_declarations(source_code):
"""
A decorator to attach forward declarations to a class.
"""
...
def class_declarations(source_code):
"""
A decorator to attach forward declarations to a class.
"""
...
def sequence_protocol(**kwargs):
"""
A decorator to attach sequence protocol metadata to a class.
"""
...

View File

@@ -266,7 +266,7 @@ generate_from_xml(AxisOriginPy)
generate_from_xml(CommandPy)
generate_from_xml(Navigation/NavigationStylePy)
generate_from_py(FreeCADGuiInit GuiInitScript.h)
generate_embed_from_py(FreeCADGuiInit GuiInitScript.h)
# The XML files
SET(FreeCADGui_XML_SRCS

View File

@@ -2,15 +2,19 @@
# -*- coding: utf-8 -*-
# (c) 2006 Jürgen Riegel GPL
import os, sys, getopt
import os
import sys
import getopt
import model.generateModel_Module
import model.generateModel_Python
import templates.templateModule
import templates.templateClassPyExport
Usage = """generate - generates a FreeCAD Module out of an XML model
Usage = """generate - generates a FreeCAD Module out of an XML or Python model
Usage:
generate [Optionen] Model.xml Model2.xml Model3.xml ...
generate [Optionen] Model.xml/py Model2.xml/py Model3.xml/py ...
Options:
-h, --help print this help
@@ -24,16 +28,23 @@ Author:
Licence: GPL
Version:
0.2
0.3
"""
# Globals
def generate_model(filename):
if filename.endswith(".xml"):
return model.generateModel_Module.parse(filename)
elif filename.endswith(".pyi"):
return model.generateModel_Python.parse(filename)
raise ValueError("invalid file extension")
def generate(filename, outputPath):
# load model
GenerateModelInst = model.generateModel_Module.parse(filename)
GenerateModelInst = generate_model(filename)
if len(GenerateModelInst.Module) != 0:
Module = templates.templateModule.TemplateModule()
@@ -46,7 +57,10 @@ def generate(filename, outputPath):
Export.outputDir = outputPath + "/"
Export.inputDir = os.path.dirname(filename) + "/"
Export.export = GenerateModelInst.PythonExport[0]
Export.is_python = filename.endswith(".py")
Export.Generate()
if Export.is_python:
Export.Compare()
print("Done generating: " + GenerateModelInst.PythonExport[0].Name)

View File

@@ -0,0 +1,585 @@
"""Parses Python binding interface files into a typed AST model."""
import ast, re
from typing import List
from model.typedModel import (
GenerateModel,
PythonExport,
Methode,
Attribute,
Documentation,
Author,
Parameter,
ParameterType,
SequenceProtocol,
)
def _extract_decorator_kwargs(decorator: ast.expr) -> dict:
"""
Extract keyword arguments from a decorator call like `@export(Father="...", Name="...")`.
Returns them in a dict.
"""
if not isinstance(decorator, ast.Call):
return {}
result = {}
for kw in decorator.keywords:
match kw.value:
case ast.Constant(value=val):
result[kw.arg] = val
case _:
pass
return result
def _parse_docstring_for_documentation(docstring: str) -> Documentation:
"""
Given a docstring, parse out DeveloperDocu, UserDocu, Author, Licence, etc.
This is a simple heuristic-based parser. Adjust as needed for your format.
"""
dev_docu = None
user_docu = None
author_name = None
author_email = None
author_licence = None
if not docstring:
return Documentation()
lines = docstring.strip().split("\n")
user_docu_lines = []
for raw_line in lines:
line = raw_line.strip()
if line.startswith("DeveloperDocu:"):
dev_docu = line.split("DeveloperDocu:", 1)[1].strip()
elif line.startswith("UserDocu:"):
user_docu = line.split("UserDocu:", 1)[1].strip()
elif line.startswith("Author:"):
# e.g. "Author: John Doe (john@example.com)"
# naive approach:
author_part = line.split("Author:", 1)[1].strip()
# attempt to find email in parentheses
match = re.search(r"(.*?)\s*\((.*?)\)", author_part)
if match:
author_name = match.group(1).strip()
author_email = match.group(2).strip()
else:
author_name = author_part
elif line.startswith("Licence:"):
author_licence = line.split("Licence:", 1)[1].strip()
else:
user_docu_lines.append(raw_line)
if user_docu is None:
user_docu = "\n".join(user_docu_lines)
author_obj = None
if author_name or author_email or author_licence:
author_obj = Author(
content=docstring,
Name=author_name or "",
EMail=author_email or "",
Licence=author_licence or "LGPL",
)
return Documentation(
Author=author_obj,
DeveloperDocu=dev_docu,
UserDocu=user_docu,
)
def _get_type_str(node):
"""Recursively convert an AST node for a type annotation to its string representation."""
match node:
case ast.Name(id=name):
# Handle qualified names (e.g., typing.List)
return name
case ast.Attribute(value=val, attr=attr):
# For annotations like List[str] (or Final[List[str]]), build the string recursively.
return f"{_get_type_str(val)}.{attr}"
case ast.Subscript(value=val, slice=slice_node):
value_str = _get_type_str(val)
slice_str = _get_type_str(slice_node)
return f"{value_str}[{slice_str}]"
case ast.Tuple(elts=elts):
# For multiple types (e.g., Tuple[int, str])
return ", ".join(_get_type_str(elt) for elt in elts)
case _:
# Fallback for unsupported node types
return "object"
def _python_type_to_parameter_type(py_type: str) -> ParameterType:
"""
Map a Python type annotation (as a string) to the ParameterType enum if possible.
Fallback to OBJECT if unrecognized.
"""
py_type = py_type.lower()
match py_type:
case _ if py_type in ("int", "builtins.int"):
return ParameterType.LONG
case _ if py_type in ("float", "builtins.float"):
return ParameterType.FLOAT
case _ if py_type in ("str", "builtins.str"):
return ParameterType.STRING
case _ if py_type in ("bool", "builtins.bool"):
return ParameterType.BOOLEAN
case _ if py_type.startswith(("list", "typing.list")):
return ParameterType.LIST
case _ if py_type.startswith(("dict", "typing.dict")):
return ParameterType.DICT
case _ if py_type.startswith(("callable", "typing.callable")):
return ParameterType.CALLABLE
case _ if py_type.startswith(("sequence", "typing.sequence")):
return ParameterType.SEQUENCE
case _ if py_type.startswith(("tuple", "typing.tuple")):
return ParameterType.TUPLE
case _:
return ParameterType.OBJECT
def _parse_class_attributes(class_node: ast.ClassDef, source_code: str) -> List[Attribute]:
"""
Parse top-level attributes (e.g. `TypeId: str = ""`) from the class AST node.
We'll create an `Attribute` for each. For the `Documentation` of each attribute,
we might store minimal or none if there's no docstring.
"""
attributes = []
default_doc = Documentation(DeveloperDocu="", UserDocu="", Author=None)
for idx, stmt in enumerate(class_node.body):
if isinstance(stmt, ast.AnnAssign):
# e.g.: `TypeId: Final[str] = ""`
name = stmt.target.id if isinstance(stmt.target, ast.Name) else "unknown"
# Evaluate the type annotation and detect Final for read-only attributes
if isinstance(stmt.annotation, ast.Name):
# e.g. `str`
type_name = stmt.annotation.id
readonly = False
elif isinstance(stmt.annotation, ast.Subscript):
# Check if this is a Final type hint, e.g. Final[int] or typing.Final[int]
is_final = (
isinstance(stmt.annotation.value, ast.Name)
and stmt.annotation.value.id == "Final"
) or (
isinstance(stmt.annotation.value, ast.Attribute)
and stmt.annotation.value.attr == "Final"
)
if is_final:
readonly = True
# Extract the inner type from the Final[...] annotation
type_name = _get_type_str(stmt.annotation.slice)
else:
type_name = _get_type_str(stmt.annotation)
readonly = False
else:
type_name = "object"
readonly = False
param_type = _python_type_to_parameter_type(type_name)
# Look for a docstring immediately following the attribute definition.
attr_doc = default_doc
if idx + 1 < len(class_node.body):
next_stmt = class_node.body[idx + 1]
if (
isinstance(next_stmt, ast.Expr)
and isinstance(next_stmt.value, ast.Constant)
and isinstance(next_stmt.value.value, str)
):
docstring = next_stmt.value.value
# Parse the docstring to build a Documentation object.
attr_doc = _parse_docstring_for_documentation(docstring)
param = Parameter(Name=name, Type=param_type)
attr = Attribute(Documentation=attr_doc, Parameter=param, Name=name, ReadOnly=readonly)
attributes.append(attr)
return attributes
def _parse_methods(class_node: ast.ClassDef) -> List[Methode]:
"""
Parse methods from the class AST node, extracting:
- Method name
- Parameters (from the function signature / annotations)
- Docstring
"""
methods = []
for stmt in class_node.body:
if not isinstance(stmt, ast.FunctionDef):
continue
# Skip methods decorated with @overload
skip_method = False
for deco in stmt.decorator_list:
match deco:
case ast.Name(id="overload"):
skip_method = True
break
case ast.Attribute(attr="overload"):
skip_method = True
break
case _:
pass
if skip_method:
continue
# Extract method name
method_name = stmt.name
# Extract docstring
method_docstring = ast.get_docstring(stmt) or ""
doc_obj = _parse_docstring_for_documentation(method_docstring)
has_keyword_args = False
method_params = []
# Helper for extracting an annotation string
def get_annotation_str(annotation):
match annotation:
case ast.Name(id=name):
return name
case ast.Attribute(value=ast.Name(id=name), attr=attr):
return f"{name}.{attr}"
case ast.Subscript(value=ast.Name(id=name), slice=_):
return name
case ast.Subscript(
value=ast.Attribute(value=ast.Name(id=name), attr=attr), slice=_
):
return f"{name}.{attr}"
case _:
return "object"
# Process positional parameters (skipping self/cls)
for arg in stmt.args.args:
param_name = arg.arg
if param_name in ("self", "cls"):
continue
annotation_str = "object"
if arg.annotation:
annotation_str = get_annotation_str(arg.annotation)
param_type = _python_type_to_parameter_type(annotation_str)
method_params.append(Parameter(Name=param_name, Type=param_type))
# Process keyword-only parameters
for kwarg in stmt.args.kwonlyargs:
has_keyword_args = True
param_name = kwarg.arg
annotation_str = "object"
if kwarg.annotation:
annotation_str = get_annotation_str(kwarg.annotation)
param_type = _python_type_to_parameter_type(annotation_str)
method_params.append(Parameter(Name=param_name, Type=param_type))
if stmt.args.kwarg:
has_keyword_args = True
keyword_flag = has_keyword_args and not stmt.args.vararg
# Check for various decorators using any(...)
const_method_flag = any(
isinstance(deco, ast.Name) and deco.id == "constmethod" for deco in stmt.decorator_list
)
static_method_flag = any(
isinstance(deco, ast.Name) and deco.id == "staticmethod" for deco in stmt.decorator_list
)
class_method_flag = any(
isinstance(deco, ast.Name) and deco.id == "classmethod" for deco in stmt.decorator_list
)
no_args = any(
isinstance(deco, ast.Name) and deco.id == "no_args" for deco in stmt.decorator_list
)
methode = Methode(
Name=method_name,
Documentation=doc_obj,
Parameter=method_params,
Const=const_method_flag,
Static=static_method_flag,
Class=class_method_flag,
Keyword=keyword_flag,
NoArgs=no_args,
)
methods.append(methode)
return methods
def _get_module_from_path(path: str) -> str:
"""
Returns the name of the FreeCAD module from the path.
Examples:
.../src/Base/Persistence.py -> "Base"
.../src/Mod/CAM/Path/__init__.py -> "CAM"
"""
# 1. Split the path by the OS separator.
import os
parts = path.split(os.sep)
# 2. Attempt to find "src" in the path components.
try:
idx_src = parts.index("src")
except ValueError:
# If "src" is not found, we cannot determine the module name.
return None
# 3. Check if there is a path component immediately after "src".
# If there isn't, we have nothing to return.
if idx_src + 1 >= len(parts):
return None
next_part = parts[idx_src + 1]
# 4. If the next component is "Mod", then the module name is the
# component AFTER "Mod" (e.g. "CAM" in "Mod/CAM").
if next_part == "Mod":
if idx_src + 2 < len(parts):
return parts[idx_src + 2]
else:
# "Mod" is the last component
return None
else:
# 5. Otherwise, if it's not "Mod", we treat that next component
# itself as the module name (e.g. "Base").
return next_part
def _extract_module_name(import_path: str, default_module: str) -> str:
"""
Given an import_path like "Base.Foo", return "Base".
If import_path has no dot (e.g., "Foo"), return default_module.
Examples:
extract_module_name("Base.Foo", default_module="Fallback") -> "Base"
extract_module_name("Foo", default_module="Fallback") -> "Fallback"
"""
if "." in import_path:
# Take everything before the first dot
return import_path.split(".", 1)[0]
else:
# No dot, return the fallback module name
return default_module
def _get_module_path(module_name: str) -> str:
if module_name in ["Base", "App", "Gui"]:
return module_name
return "Mod/" + module_name
def _parse_imports(tree) -> dict:
"""
Parses the given source_code for import statements and constructs
a mapping from imported name -> module path.
For example, code like:
from Metadata import export, forward_declarations, constmethod
from PyObjectBase import PyObjectBase
from Base.Foo import Foo
from typing import List, Final
yields a mapping of:
{
"export": "Metadata",
"forward_declarations": "Metadata",
"constmethod": "Metadata",
"PyObjectBase": "PyObjectBase",
"Foo": "Base.Foo",
"List": "typing",
"Final": "typing"
}
"""
name_to_module_map = {}
for node in tree.body:
match node:
# Handle 'import X' or 'import X as Y'
case ast.Import(names=names):
# e.g. import foo, import foo as bar
for alias in names:
imported_name = alias.asname if alias.asname else alias.name
name_to_module_map[imported_name] = alias.name
# Handle 'from X import Y, Z as W'
case ast.ImportFrom(module=module, names=names):
module_name = module if module is not None else ""
for alias in names:
imported_name = alias.asname if alias.asname else alias.name
name_to_module_map[imported_name] = module_name
case _:
pass
return name_to_module_map
def _get_native_class_name(klass: str) -> str:
return klass
def _get_native_python_class_name(klass: str) -> str:
if klass == "PyObjectBase":
return klass
return klass + "Py"
def _extract_base_class_name(base: ast.expr) -> str:
"""
Extract the base class name from an AST node using ast.unparse.
For generic bases (e.g. GenericParent[T]), it removes the generic part.
For qualified names (e.g. some_module.ParentClass), it returns only the last part.
"""
base_str = ast.unparse(base)
# Remove generic parameters if present.
if "[" in base_str:
base_str = base_str.split("[", 1)[0]
# For qualified names, take only the class name.
if "." in base_str:
base_str = base_str.split(".")[-1]
return base_str
def _parse_class(class_node, source_code: str, path: str, imports_mapping: dict) -> PythonExport:
base_class_name = None
for base in class_node.bases:
base_class_name = _extract_base_class_name(base)
break # Only consider the first base class.
assert base_class_name is not None
is_exported = False
export_decorator_kwargs = {}
forward_declarations_text = ""
class_declarations_text = ""
sequence_protocol_kwargs = None
for decorator in class_node.decorator_list:
match decorator:
case ast.Name(id="export"):
export_decorator_kwargs = {}
is_exported = True
case ast.Call(func=ast.Name(id="export"), keywords=_, args=_):
export_decorator_kwargs = _extract_decorator_kwargs(decorator)
is_exported = True
case ast.Call(func=ast.Name(id="forward_declarations"), args=args):
if args:
match args[0]:
case ast.Constant(value=val):
forward_declarations_text = val
case ast.Call(func=ast.Name(id="class_declarations"), args=args):
if args:
match args[0]:
case ast.Constant(value=val):
class_declarations_text = val
case ast.Call(func=ast.Name(id="sequence_protocol"), keywords=_, args=_):
sequence_protocol_kwargs = _extract_decorator_kwargs(decorator)
case _:
pass
# Parse imports to compute module metadata
module_name = _get_module_from_path(path)
imported_from_module = imports_mapping[base_class_name]
parent_module_name = _extract_module_name(imported_from_module, module_name)
class_docstring = ast.get_docstring(class_node) or ""
doc_obj = _parse_docstring_for_documentation(class_docstring)
class_attributes = _parse_class_attributes(class_node, source_code)
class_methods = _parse_methods(class_node)
native_class_name = _get_native_class_name(class_node.name)
native_python_class_name = _get_native_python_class_name(class_node.name)
include = _get_module_path(module_name) + "/" + native_class_name + ".h"
father_native_python_class_name = _get_native_python_class_name(base_class_name)
father_include = (
_get_module_path(parent_module_name) + "/" + father_native_python_class_name + ".h"
)
py_export = PythonExport(
Documentation=doc_obj,
Name=export_decorator_kwargs.get("Name", "") or native_python_class_name,
PythonName=export_decorator_kwargs.get("PythonName", "") or None,
Include=export_decorator_kwargs.get("Include", "") or include,
Father=export_decorator_kwargs.get("Father", "") or father_native_python_class_name,
Twin=export_decorator_kwargs.get("Twin", "") or native_class_name,
TwinPointer=export_decorator_kwargs.get("TwinPointer", "") or native_class_name,
Namespace=export_decorator_kwargs.get("Namespace", "") or module_name,
FatherInclude=export_decorator_kwargs.get("FatherInclude", "") or father_include,
FatherNamespace=export_decorator_kwargs.get("FatherNamespace", "") or parent_module_name,
Constructor=export_decorator_kwargs.get("Constructor", False),
NumberProtocol=export_decorator_kwargs.get("NumberProtocol", False),
RichCompare=export_decorator_kwargs.get("RichCompare", False),
Delete=export_decorator_kwargs.get("Delete", False),
Reference=export_decorator_kwargs.get("Reference", None),
Initialization=export_decorator_kwargs.get("Initialization", False),
DisableNotify=export_decorator_kwargs.get("DisableNotify", False),
DescriptorGetter=export_decorator_kwargs.get("DescriptorGetter", False),
DescriptorSetter=export_decorator_kwargs.get("DescriptorSetter", False),
ForwardDeclarations=forward_declarations_text,
ClassDeclarations=class_declarations_text,
IsExplicitlyExported=is_exported,
)
# Attach sequence protocol metadata if provided.
if sequence_protocol_kwargs is not None:
try:
seq_protocol = SequenceProtocol(**sequence_protocol_kwargs)
py_export.Sequence = seq_protocol
except Exception as e:
py_export.Sequence = None
py_export.Attribute.extend(class_attributes)
py_export.Methode.extend(class_methods)
return py_export
def parse_python_code(path: str) -> GenerateModel:
"""
Parse the given Python source code and build a GenerateModel containing
PythonExport entries for each class that inherits from a relevant binding class.
"""
source_code = None
with open(path, "r") as file:
source_code = file.read()
tree = ast.parse(source_code)
imports_mapping = _parse_imports(tree)
model = GenerateModel()
for node in tree.body:
if isinstance(node, ast.ClassDef):
py_export = _parse_class(node, source_code, path, imports_mapping)
model.PythonExport.append(py_export)
# Check for multiple non explicitly exported classes
non_exported_classes = [
item for item in model.PythonExport if not getattr(item, "IsExplicitlyExported", False)
]
if len(non_exported_classes) > 1:
raise Exception("Multiple non explicitly-exported classes were found, please use @export.")
return model
def parse(path):
model = parse_python_code(path)
return model
def main():
import sys
args = sys.argv[1:]
model = parse(args[0])
model.dump()
if __name__ == "__main__":
main()

View File

@@ -52,7 +52,7 @@ def nohandle(string):
class copier:
"Smart-copier (YAPTU) class"
def copyblock(self, i=0, last=None):
def copyblock(self, cur_line=0, last=None):
"Main copy method: process lines [i,last) of block"
def repl(match, self=self):
@@ -67,13 +67,13 @@ class copier:
block = self.locals["_bl"]
if last is None:
last = len(block)
while i < last:
line = block[i]
while cur_line < last:
line = block[cur_line]
match = self.restat.match(line)
if match: # a statement starts "here" (at line block[i])
# i is the last line to _not_ process
stat = match.string[match.end(0) :].strip()
j = i + 1 # look for 'finish' from here onwards
j = cur_line + 1 # look for 'finish' from here onwards
nest = 1 # count nesting levels of statements
while j < last:
line = block[j]
@@ -88,20 +88,20 @@ class copier:
match = self.recont.match(line)
if match: # found a contin.-statement
nestat = match.string[match.end(0) :].strip()
stat = "%s _cb(%s,%s)\n%s" % (stat, i + 1, j, nestat)
i = j # again, i is the last line to _not_ process
stat = "%s _cb(%s,%s)\n%s" % (stat, cur_line + 1, j, nestat)
cur_line = j # again, i is the last line to _not_ process
j = j + 1
stat = self.preproc(stat, "exec")
stat = "%s _cb(%s,%s)" % (stat, i + 1, j)
# for debugging, uncomment...: print("-> Executing: {"+stat+"}")
stat = "%s _cb(%s,%s)" % (stat, cur_line + 1, j)
# for debugging, uncomment...: print(f"-> Executing on line {cur_line}: {stat}")
exec(stat, self.globals, self.locals)
i = j + 1
cur_line = j + 1
else: # normal line, just copy with substitution
try:
self.ouf.write(self.regex.sub(repl, line).encode("utf8"))
except TypeError:
self.ouf.write(self.regex.sub(repl, line))
i = i + 1
cur_line = cur_line + 1
def __init__(
self,

View File

@@ -0,0 +1,324 @@
from __future__ import annotations
from dataclasses import dataclass, field
from enum import Enum
from typing import List, Optional
#
# Enums
#
class ParameterType(str, Enum):
BOOLEAN = "Boolean"
INT = "Int"
LONG = "Long"
STRING = "String"
OBJECT = "Object"
FLOAT = "Float"
COMPLEX = "Complex"
CHAR = "Char"
TUPLE = "Tuple"
LIST = "List"
DICT = "Dict"
MODULE = "Module"
CALLABLE = "Callable"
SEQUENCE = "Sequence"
def __str__(self):
return self.value
#
# Supporting Classes
#
@dataclass
class Author:
"""Represents the <Author> element inside <Documentation>."""
# The text content of <Author> is effectively a string;
# we capture it in `content` to hold the text node if needed.
content: Optional[str] = None
# Attributes
Name: str = "FreeCAD Project"
EMail: str = "example@freecad.org"
Licence: str = "LGPL"
@dataclass
class Documentation:
"""
Corresponds to the <Documentation> element.
Can contain an <Author>, <DeveloperDocu>, and <UserDocu>.
"""
Author: Optional[Author] = None
DeveloperDocu: Optional[str] = None
UserDocu: Optional[str] = None
@dataclass
class Property:
"""
Corresponds to <Property> in the schema.
It has required attributes Name, Type and optional StartValue,
plus optional child <Documentation>.
"""
# Child
Documentation: Optional[Documentation] = None
# Attributes
Name: str = ""
Type: str = ""
StartValue: Optional[str] = None
@dataclass
class ViewProvider:
"""
Corresponds to <ViewProvider>, which can contain 0..∞ <Property> children.
"""
Property: List[Property] = field(default_factory=list)
@dataclass
class Parameter:
"""
Corresponds to <Parameter> in the schema.
It has a required 'Name' (str) and a required 'Type' (enumeration).
"""
Name: str
Type: ParameterType
#
# Elements under <PythonExport>
#
@dataclass
class Methode:
"""
Corresponds to <Methode> inside <PythonExport>.
Contains an optional <Documentation> and 0..∞ <Parameter>.
"""
Documentation: Optional[Documentation] = None
Parameter: List[Parameter] = field(default_factory=list)
# Attributes
Name: str = ""
Const: Optional[bool] = None
Keyword: bool = False
NoArgs: bool = False
Class: bool = False
Static: bool = False
@dataclass
class Attribute:
"""
Corresponds to <Attribute> inside <PythonExport>.
It has a required <Documentation>, a required <Parameter>,
and attributes Name, ReadOnly.
"""
Documentation: Documentation
Parameter: Parameter
# Attributes
Name: str
ReadOnly: bool
@dataclass
class SequenceProtocol:
"""
Corresponds to the <Sequence> element inside <PythonExport>.
All attributes are required booleans.
"""
sq_length: bool
sq_concat: bool
sq_repeat: bool
sq_item: bool
mp_subscript: bool
sq_ass_item: bool
mp_ass_subscript: bool
sq_contains: bool
sq_inplace_concat: bool
sq_inplace_repeat: bool
@dataclass
class PythonExport:
"""
Corresponds to <PythonExport> inside <GenerateModel>.
It contains:
- optional <Documentation>
- 0..∞ <Methode>
- 0..∞ <Attribute>
- optional <Sequence>
- optional <CustomAttributes>
- one <ClassDeclarations> (type=string)
- one <ForwardDeclarations> (type=string)
Plus many attributes with required/optional flags.
"""
Documentation: Optional[Documentation] = None
Methode: List[Methode] = field(default_factory=list)
Attribute: List[Attribute] = field(default_factory=list)
Sequence: Optional[SequenceProtocol] = None
CustomAttributes: Optional[str] = "" # To match the original XML model
ClassDeclarations: str = ""
ForwardDeclarations: str = ""
NoArgs: bool = False
# Attributes
Name: str = ""
PythonName: Optional[str] = None
Include: str = ""
Father: str = ""
Twin: str = ""
Namespace: str = ""
FatherInclude: str = ""
FatherNamespace: str = ""
Constructor: bool = False
NumberProtocol: bool = False
RichCompare: bool = False
TwinPointer: str = ""
Delete: bool = False
Reference: Optional[bool] = None
Initialization: bool = False
DisableNotify: bool = False
DescriptorGetter: bool = False
DescriptorSetter: bool = False
IsExplicitlyExported: bool = False
#
# Module-Related Classes
#
@dataclass
class Dependencies:
"""
Corresponds to the <Dependencies> element inside <Module>.
It contains 0..∞ local <Module> elements which are not typed in the XSD.
We'll treat these as strings or possibly minimal structures.
"""
Module: List[str] = field(default_factory=list)
@dataclass
class Feature:
"""
Corresponds to <Feature> in <Module>'s <Content>.
Has optional <Documentation>, 0..∞ <Property>, optional <ViewProvider>,
and a required attribute 'Name'.
"""
Documentation: Optional[Documentation] = None
Property: List[Property] = field(default_factory=list)
ViewProvider: Optional[ViewProvider] = None
# Attributes
Name: str = ""
@dataclass
class DocObject:
"""
Corresponds to <DocObject> in <Module>'s <Content>.
Has optional <Documentation>, 0..∞ <Property>, and a required 'Name' attribute.
"""
Documentation: Optional[Documentation] = None
Property: List[Property] = field(default_factory=list)
# Attributes
Name: str = ""
@dataclass
class ModuleContent:
"""
Corresponds to the <Content> element in <Module>.
Contains:
- 0..∞ <Property>
- 0..∞ <Feature>
- 0..∞ <DocObject>
- 0..∞ <GuiCommand>
- 0..∞ <PreferencesPage>
"""
Property: List[Property] = field(default_factory=list)
Feature: List[Feature] = field(default_factory=list)
DocObject: List[DocObject] = field(default_factory=list)
GuiCommand: List[str] = field(default_factory=list)
PreferencesPage: List[str] = field(default_factory=list)
@dataclass
class Module:
"""
Corresponds to the top-level <Module> element.
Has optional <Documentation>, optional <Dependencies>,
a required <Content>, and a required attribute Name.
"""
Documentation: Optional[Documentation] = None
Dependencies: Optional[Dependencies] = None
Content: ModuleContent = field(default_factory=ModuleContent)
# Attributes
Name: str = ""
#
# Root Element
#
@dataclass
class GenerateModel:
"""
Corresponds to the root element <GenerateModel>.
Contains 0..∞ <Module> and 0..∞ <PythonExport>.
"""
Module: List[Module] = field(default_factory=list)
PythonExport: List[PythonExport] = field(default_factory=list)
def dump(self):
# Print or process the resulting GenerateModel object
print("Parsed GenerateModel object:")
if self.PythonExport:
py_exp = self.PythonExport[0]
print("PythonExport Name:", py_exp.Name)
if py_exp.Documentation and py_exp.Documentation.Author:
print("Author Name:", py_exp.Documentation.Author.Name)
print("Author Email:", py_exp.Documentation.Author.EMail)
print("Author Licence:", py_exp.Documentation.Author.Licence)
print("DeveloperDocu:", py_exp.Documentation.DeveloperDocu)
print("UserDocu:", py_exp.Documentation.UserDocu)
print("Class Attributes:")
for attr in py_exp.Attribute:
print(f" - {attr.Name} (type={attr.Parameter.Type}, readOnly={attr.ReadOnly})")
print("Methods:")
for meth in py_exp.Methode:
print(f" - {meth.Name}")
# Each method might have parameters
for param in meth.Parameter:
print(f" * param: {param.Name}, type={param.Type}")

View File

@@ -0,0 +1,457 @@
# FreeCAD C++ to Python Binding System Manual
Welcome to the new Python-based binding system for exposing FreeCAD C++ APIs to Python. This system replaces the previous XML-based approach with a more direct and flexible Python interface, allowing C++ developers to define Python bindings using native Python syntax, type annotations, and decorators.
* * *
## Table of Contents
* [Overview](#overview)
* [Key Features](#key-features)
* [Core Components](#core-components)
* [Defining Bindings in Python](#defining-bindings-in-python)
* [Metadata Decorators](#metadata-decorators)
* [Class Definitions](#class-definitions)
* [Method Overloading](#method-overloading)
* [Attributes and Read-Only Properties](#attributes-and-read-only-properties)
* [Example](#example)
* [Getting Started](#getting-started)
* [Advanced Topics](#advanced-topics)
* [Conclusion](#conclusion)
* * *
## Overview
The new Python-based binding system allows you to create bindings between FreeCADs C++ APIs and Python directly within Python source files. By leveraging Pythons native features—such as type annotations, decorators, and overloads—you can produce well-documented, type-safe Python interfaces that closely mirror your C++ classes.
This system is designed to be fully compatible and backwards-compatible with the previous XML-based system but offers the following advantages:
* **Direct Python Syntax:** Write bindings directly in Python without an intermediary XML representation.
* **Enhanced Readability:** Preserve detailed documentation and formatting in docstrings.
* **Type Safety:** Use Python type hints to ensure that the Python interface accurately reflects the C++ API.
* **Decorator-Based Metadata:** Attach metadata to classes and methods to control binding behavior.
* **Method Overloads:** Define multiple method signatures using `@overload` for improved clarity and support of type hinting for Python overloads.
* **Comprehensive Documentation:** Maintain detailed developer and user documentation directly in the Python stubs.
* * *
## Core Components
The binding system is built around a few core components:
* **Metadata Decorators:**
A set of decorators (e.g., `@export`, `@constmethod`, `@sequence_protocol`) to annotate classes and methods with necessary metadata for the binding process. These decorators help bridge the gap between the C++ definitions and the Python interface.
* **C++ Python Stub Generation:**
The system generates C++ Python stubs that act as a direct mapping to the corresponding C++ classes. These stubs include method signatures, attributes, and detailed docstrings and uses the same code
as the previous XML-based system.
* **Type Annotations and Overloads:**
Utilize Python's type hints and the `@overload` decorator from the `typing` module to accurately represent C++ method signatures, including support for overloaded methods.
* * *
## Defining Bindings in Python
### Metadata Decorators
The core decorator, `@export`, is used to attach binding-related metadata to a class. This metadata includes information such as the C++ class name, header files, namespaces, and more.
**Example:**
```python
from Metadata import export, constmethod
from PyObjectBase import PyObjectBase
@export(
Father="PyObjectBase",
Name="PrecisionPy",
Twin="Precision",
TwinPointer="Precision",
Include="Base/Precision.h",
Namespace="Base",
FatherInclude="Base/PyObjectBase.h",
FatherNamespace="Base",
)
class PrecisionPy(PyObjectBase):
"""
Base.Precision class.
This class provides precision values for various numerical operations
in the FreeCAD environment.
"""
...
```
### Class Definitions
Classes are defined in a way that closely mirrors the C++ counterparts. The Python classes use decorators to attach metadata and include docstrings that retain original formatting.
### Method Overloading
For methods that require multiple signatures (overloads), use the `@overload` decorator. A final implementation that handles variable arguments (`*args`, `**kwargs`) is provided as a placeholder.
**Example:**
```python
from typing import overload
class QuantityPy(PyObjectBase):
@overload
def toStr(self) -> str: ...
@overload
def toStr(self, decimals: int) -> str: ...
def toStr(self, decimals: int = ...) -> str:
"""
toStr([decimals])
Returns a string representation of the quantity, rounded to the specified number of decimals.
"""
...
```
The `@overload` variants are not actually used by the generator, but solely for the purpose of
providing Python type hinting to be used by type checkers like mypy.
### Attributes and Read-Only Properties
Attributes defined as read-only are annotated with `Final` from Pythons `typing` module to indicate immutability.
**Example:**
```python
from typing import Final, Tuple
class UnitPy(PyObjectBase):
# holds the unit type as a string, e.g. 'Area'.
Type: Final[str] = ...
# Returns the signature.
Signature: Final[Tuple] = ...
```
* * *
## Metadata Decorators Reference
This section details each metadata decorator and helper function used to attach auxiliary binding information to your Python classes and methods.
* * *
### 1. `metadata`
#### **Purpose**
The `metadata` decorator attaches a set of key-value pairs as metadata to a class. This metadata informs the binding generator about various aspects of the corresponding C++ API, such as its name, header file, namespace, inheritance, and twin (or native) types.
#### **Usage**
```python
from Metadata import export
@export(
Father="PyObjectBase",
Name="PrecisionPy",
Twin="Precision",
TwinPointer="Precision",
Include="Base/Precision.h",
Namespace="Base",
FatherInclude="Base/PyObjectBase.h",
FatherNamespace="Base",
)
class PrecisionPy(PyObjectBase):
"""
Base.Precision class.
This class provides precision values for various numerical operations
in the FreeCAD environment.
"""
# Class implementation goes here...
```
#### **Parameters**
* **Arbitrary Keyword Arguments (`**kwargs`):**
These may include:
* `Father`: The name of the parent class in Python.
* `Name`: The name of the current Python binding C++ class.
* `Twin`: The name of the corresponding C++ class.
* `TwinPointer`: The pointer type of the twin C++ class.
* `Include`: The header file where the C++ class is declared.
* `Namespace`: The C++ namespace of the class.
* `FatherInclude`: The header file for the parent class.
* `FatherNamespace`: The C++ namespace for the parent class.
_(Additional keys can be added as required by the binding generator.)_
* * *
### 2. `constmethod`
#### **Purpose**
The `constmethod` decorator marks a method as a constant method. In C++ bindings, this means that the method does not modify the state of the object and should be treated as `const`. This can affect the generated C++ method signature and enforce read-only behavior in Python where applicable.
#### **Usage**
```python
from Metadata import constmethod
class ExamplePy(PyObjectBase):
@constmethod()
def getValue(self) -> int:
"""
Returns an integer value without modifying the object.
"""
# Actual implementation goes here...
```
* * *
### 3. `no_args`
#### **Purpose**
The `no_args` decorator is used to indicate that a method should be called without any arguments. This is
to signal that fact to the generator so it knows to generate the correct C++ API signature.
#### **Usage**
```python
from Metadata import no_args
class ExamplePy(PyObjectBase):
@no_args()
def reset(self) -> None:
"""
Resets the state of the object.
"""
# Implementation goes here...
```
#### **Parameters**
* **None:**
This decorator acts as a marker. It does not modify the method behavior at runtime but provides metadata to the binding generator.
#### **Behavior**
When the binding generator encounters the `no_args` decorator, it ensures that the generated Python stub does not expect any parameters beyond the implicit `self`, matching the no-argument signature of the underlying C++ method.
* * *
### 4. `forward_declarations`
#### **Purpose**
The `forward_declarations` decorator allows you to attach a snippet of source code containing forward declarations to a class. Forward declarations are useful when the binding process requires awareness of other classes or types before their full definitions are encountered.
#### **Usage**
```python
from Metadata import forward_declarations
@forward_declarations("""
class OtherType;
struct HelperStruct;
""")
class ExamplePy(PyObjectBase):
"""
Example class that depends on OtherType and HelperStruct.
"""
# Class implementation goes here...
```
#### **Parameters**
* **`source_code` (str):**
A string containing the forward declarations in C++ syntax.
#### **Behavior**
This decorator attaches the provided forward declarations to the class (typically in a `__forward_declarations__` attribute). During stub generation or C++ header generation, these declarations are inserted at the appropriate location.
* * *
### 5. `class_declarations`
#### **Purpose**
The `class_declarations` decorator is similar to `forward_declarations` but is used for attaching additional class declarations. This may include extra helper classes, enums, or typedefs that are needed for the proper functioning of the bindings.
#### **Usage**
```python
from Metadata import class_declarations
@class_declarations("""
enum Status {
SUCCESS,
FAILURE
};
typedef std::vector<int> IntVector;
""")
class ExamplePy(PyObjectBase):
"""
Example class with extra class declarations.
"""
# Class implementation goes here...
```
#### **Parameters**
* **`source_code` (str):**
A string containing extra class or type declarations that supplement the binding.
#### **Behavior**
The decorator stores the provided declarations in an attribute (e.g., `__class_declarations__`) so that the binding generator can include these in the final generated files.
* * *
### 6. `sequence_protocol`
#### **Purpose**
The `sequence_protocol` decorator is used to declare that a class implements Pythons sequence protocol. This includes support for operations like indexing, slicing, iteration, and length retrieval. By attaching protocol metadata, you can control how the binding system exposes these behaviors.
#### **Usage**
```python
from Metadata import sequence_protocol
@sequence_protocol(
sq_length=True,
sq_concat=False,
sq_repeat=False,
sq_item=True,
mp_subscript=True,
sq_ass_item=True,
mp_ass_subscript=False,
sq_contains=False,
sq_inplace_concat=False,
sq_inplace_repeat=False,
)
class ContainerPy(PyObjectBase):
"""
A container class that implements Python's sequence protocol.
"""
...
```
#### **Parameters**
* **Arbitrary Keyword Arguments (`**kwargs`):**
* `sq_length` (bool): Whether the class supports element access via indexing.
* `sq_concat` (bool): Whether the class is iterable.
* `sq_repeat` (bool): Whether slicing operations are supported.
* `sq_item` (bool): Whether the class supports element access via indexing.
* `mp_subscript` (bool): Whether the class is iterable.
* `sq_ass_item` (bool): Whether slicing operations are supported.
* `sq_ass_item` (bool):
* `mp_ass_subscript` (bool):
* `sq_contains` (bool):
* `sq_inplace_concat` (bool):
* `sq_inplace_repeat` (bool):
#### **Behavior**
The decorator attaches a `__sequence_protocol__` attribute to the class with the provided dictionary. This metadata is later used to generate the appropriate sequence operations in the Python API stubs.
#### Sequence Protocol Callbacks
1. **`sq_length``static Py_ssize_t sequence_length(PyObject *)`**
* **Purpose:**
Implements the “length” function for the object.
* **Usage in Python:**
When you call `len(obj)`, this function is invoked to determine how many items are in the sequence.
* **C API Mapping:**
This function fills the `sq_length` slot in the `PySequenceMethods` structure.
2. **`sq_concat``static PyObject* sequence_concat(PyObject *, PyObject *)`**
* **Purpose:**
Implements the concatenation operation for sequences.
* **Usage in Python:**
This is called when two sequence objects are added together using the `+` operator (e.g., `a + b`).
* **C API Mapping:**
This function is assigned to the `sq_concat` slot in `PySequenceMethods`.
3. **`sq_repeat``static PyObject * sequence_repeat(PyObject *, Py_ssize_t)`**
* **Purpose:**
Implements the repetition operation for sequences.
* **Usage in Python:**
It is invoked when a sequence is multiplied by an integer using the `*` operator (e.g., `a * n`), creating a new sequence with repeated elements.
* **C API Mapping:**
This function is installed in the `sq_repeat` slot of `PySequenceMethods`.
4. **`sq_item``static PyObject * sequence_item(PyObject *, Py_ssize_t)`**
* **Purpose:**
Implements element access via integer indexing.
* **Usage in Python:**
When you access an element using `obj[index]`, this function is called to retrieve the item.
* **C API Mapping:**
It fills the `sq_item` slot in `PySequenceMethods`.
5. **`sq_ass_item``static int sequence_ass_item(PyObject *, Py_ssize_t, PyObject *)`**
* **Purpose:**
Implements assignment (or deletion) of an element via an integer index.
* **Usage in Python:**
This function is used when an item is assigned (e.g., `obj[index] = value`) or deleted (`del obj[index]`).
* **C API Mapping:**
It is set into the `sq_ass_item` slot of the `PySequenceMethods` structure.
6. **`sq_contains``static int sequence_contains(PyObject *, PyObject *)`**
* **Purpose:**
Implements the membership test operation (the `in` operator).
* **Usage in Python:**
When evaluating `value in obj`, this function is used to determine if `value` is present in the sequence.
* **C API Mapping:**
This function populates the `sq_contains` slot in `PySequenceMethods`.
7. **`sq_inplace_concat``static PyObject* sequence_inplace_concat(PyObject *, PyObject *)`**
* **Purpose:**
Implements in-place concatenation of sequences.
* **Usage in Python:**
This is invoked when using the `+=` operator on sequences, modifying the sequence in place.
* **C API Mapping:**
It goes into the `sq_inplace_concat` slot of `PySequenceMethods`.
8. **`sq_inplace_repeat``static PyObject * sequence_inplace_repeat(PyObject *, Py_ssize_t)`**
* **Purpose:**
Implements in-place repetition of sequences.
* **Usage in Python:**
This function handles in-place multiplication (using `*=`) to repeat the sequence.
* **C API Mapping:**
It fills the `sq_inplace_repeat` slot in `PySequenceMethods`.
* * *
#### Mapping Protocol Callback
Although the code is primarily about the sequence protocol, it also provides functions for handling more general subscript operations via the mapping protocol:
1. **`mp_subscript``static PyObject * mapping_subscript(PyObject *, PyObject *)`**
* **Purpose:**
Provides generalized subscript access.
* **Usage in Python:**
This function is used when the object is accessed with a subscript that is not necessarily an integer (for example, a slice or another key) using the `obj[key]` syntax.
* **C API Mapping:**
It fills the `mp_subscript` slot in the `PyMappingMethods` structure.
2. **`mp_ass_subscript``static int mapping_ass_subscript(PyObject *, PyObject *, PyObject *)`**
* **Purpose:**
Implements assignment (or deletion) via subscripting through the mapping protocol.
* **Usage in Python:**
When performing operations like `obj[key] = value` or `del obj[key]`, this function is called.
* **C API Mapping:**
This function is assigned to the `mp_ass_subscript` slot in the `PyMappingMethods` structure.
* * *

View File

@@ -9,7 +9,39 @@ import model.generateModel_Module
import model.generateTools
def compareFiles(file1, file2):
"""Compares two files and prints the differences if they are not equal."""
# Check if files exist
for file in (file1, file2):
if not os.path.exists(file):
raise FileNotFoundError(f"File not found: {file1} {file2}")
# Read file contents
with open(file1, "r", encoding="utf-8") as f1, open(file2, "r", encoding="utf-8") as f2:
lines1 = f1.readlines()
lines2 = f2.readlines()
# Compare and print differences
import difflib
diff = list(difflib.unified_diff(lines1, lines2, fromfile=file1, tofile=file2, lineterm=""))
if diff:
error = "Files are not equal.\n\n"
error += "Diff:\n\n"
error += "".join(diff)
raise ValueError(error)
class TemplateClassPyExport(template.ModelTemplate):
# TODO: This is temporary, once all XML files are migrated, this can be removed.
def getPath(self, path):
if self.is_python:
root, ext = os.path.splitext(path)
return f"{root}_{ext}"
return path
def Generate(self):
# self.ParentNamespace = "Base"
# self.Namespace = "Base"
@@ -19,6 +51,8 @@ class TemplateClassPyExport(template.ModelTemplate):
outputDir = self.outputDir
def escapeString(s, indent=4):
if not s:
return None
"""Escapes a string for use as literal in C++ code"""
s = s.strip() # This allows UserDocu-tags on their own lines without adding whitespace
s = s.replace("\\", "\\\\")
@@ -34,7 +68,7 @@ class TemplateClassPyExport(template.ModelTemplate):
os.makedirs(subpath)
# Imp.cpp must not exist, neither in outputDir nor in inputDir
outputImp = outputDir + exportName + "Imp.cpp"
outputImp = self.getPath(outputDir + exportName + "Imp.cpp")
if not os.path.exists(outputImp):
if not os.path.exists(inputDir + exportName + "Imp.cpp"):
file = open(outputImp, "wb")
@@ -42,17 +76,39 @@ class TemplateClassPyExport(template.ModelTemplate):
model.generateTools.replace(self.TemplateImplement, locals(), file)
file.close()
outputCpp = outputDir + exportName + ".cpp"
outputCpp = self.getPath(outputDir + exportName + ".cpp")
with open(outputCpp, "wb") as file:
print("TemplateClassPyExport", "TemplateModule", file.name)
model.generateTools.replace(self.TemplateModule, locals(), file)
outputHeader = outputDir + exportName + ".h"
outputHeader = self.getPath(outputDir + exportName + ".h")
with open(outputHeader, "wb") as file:
print("TemplateClassPyExport", "TemplateHeader", file.name)
model.generateTools.replace(self.TemplateHeader, locals(), file)
# file.write( model.generateTools.replace(self.Template,locals()))
def Compare(self):
"""
Compares the Python generated files to the previously generated XML files.
This exists temporarily while the XML files are migrated to Python to guarantee consistency.
"""
exportName = self.export.Name
inputDir = self.inputDir
outputDir = self.outputDir
if not os.path.exists(inputDir + exportName + "Imp.cpp"):
outputImpXml = outputDir + exportName + "Imp.cpp"
outputImpPy = self.getPath(outputDir + exportName + "Imp.cpp")
compareFiles(outputImpXml, outputImpPy)
outputHeaderXml = outputDir + exportName + ".h"
outputHeaderPy = self.getPath(outputDir + exportName + ".h")
compareFiles(outputHeaderXml, outputHeaderPy)
outputCppXml = outputDir + exportName + ".cpp"
outputCppPy = self.getPath(outputDir + exportName + ".cpp")
compareFiles(outputCppXml, outputCppPy)
TemplateHeader = """
// This file is generated by src/Tools/generateTemplates/templateClassPyExport.py out of the XML file
// Every change you make here gets lost in the next full rebuild!