refactor: excel parse
This commit is contained in:
@@ -0,0 +1,3 @@
|
||||
# Copyright (c) 2022-2024, Manfred Moitzi
|
||||
# License: MIT License
|
||||
# Users should always import from ezdxf.acis.api!
|
||||
@@ -0,0 +1,224 @@
|
||||
# Copyright (c) 2022-2024, Manfred Moitzi
|
||||
# License: MIT License
|
||||
from __future__ import annotations
|
||||
from typing import TypeVar, Generic, TYPE_CHECKING, Optional
|
||||
from abc import ABC, abstractmethod
|
||||
from .const import NULL_PTR_NAME, MIN_EXPORT_VERSION
|
||||
from .hdr import AcisHeader
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from .entities import AcisEntity
|
||||
from ezdxf.math import Vec3
|
||||
|
||||
|
||||
T = TypeVar("T", bound="AbstractEntity")
|
||||
|
||||
|
||||
class AbstractEntity(ABC):
|
||||
"""Unified query interface for SAT and SAB data."""
|
||||
|
||||
name: str
|
||||
id: int = -1
|
||||
|
||||
def __str__(self):
|
||||
return f"{self.name}"
|
||||
|
||||
@property
|
||||
def is_null_ptr(self) -> bool:
|
||||
"""Returns ``True`` if this entity is the ``NULL_PTR`` entity."""
|
||||
return self.name == NULL_PTR_NAME
|
||||
|
||||
|
||||
class DataLoader(ABC):
|
||||
"""Data loading interface to create high level AcisEntity data from low
|
||||
level AbstractEntity representation.
|
||||
|
||||
"""
|
||||
|
||||
version: int = MIN_EXPORT_VERSION
|
||||
|
||||
@abstractmethod
|
||||
def has_data(self) -> bool:
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def read_int(self, skip_sat: Optional[int] = None) -> int:
|
||||
"""There are sometimes additional int values in SAB files which are
|
||||
not present in SAT files, maybe reference counters e.g. vertex, coedge.
|
||||
"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def read_double(self) -> float:
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def read_interval(self) -> float:
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def read_vec3(self) -> tuple[float, float, float]:
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def read_bool(self, true: str, false: str) -> bool:
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def read_str(self) -> str:
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def read_ptr(self) -> AbstractEntity:
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def read_transform(self) -> list[float]:
|
||||
pass
|
||||
|
||||
|
||||
class DataExporter(ABC):
|
||||
version: int = MIN_EXPORT_VERSION
|
||||
|
||||
@abstractmethod
|
||||
def write_int(self, value: int, skip_sat=False) -> None:
|
||||
"""There are sometimes additional int values in SAB files which are
|
||||
not present in SAT files, maybe reference counters e.g. vertex, coedge.
|
||||
"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def write_double(self, value: float) -> None:
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def write_interval(self, value: float) -> None:
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def write_loc_vec3(self, value: Vec3) -> None:
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def write_dir_vec3(self, value: Vec3) -> None:
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def write_bool(self, value: bool, true: str, false: str) -> None:
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def write_str(self, value: str) -> None:
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def write_literal_str(self, value: str) -> None:
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def write_ptr(self, entity: AcisEntity) -> None:
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def write_transform(self, data: list[str]) -> None:
|
||||
pass
|
||||
|
||||
|
||||
class AbstractBuilder(Generic[T]):
|
||||
header: AcisHeader
|
||||
bodies: list[T]
|
||||
entities: list[T]
|
||||
|
||||
def reorder_records(self) -> None:
|
||||
if len(self.entities) == 0:
|
||||
return
|
||||
header: list[T] = []
|
||||
entities: list[T] = []
|
||||
for e in self.entities:
|
||||
if e.name == "body":
|
||||
header.append(e)
|
||||
elif e.name == "asmheader":
|
||||
header.insert(0, e) # has to be the first record
|
||||
else:
|
||||
entities.append(e)
|
||||
self.entities = header + entities
|
||||
|
||||
def reset_ids(self, start: int = 0) -> None:
|
||||
for num, entity in enumerate(self.entities, start=start):
|
||||
entity.id = num
|
||||
|
||||
def clear_ids(self) -> None:
|
||||
for entity in self.entities:
|
||||
entity.id = -1
|
||||
|
||||
|
||||
class EntityExporter(Generic[T]):
|
||||
def __init__(self, header: AcisHeader):
|
||||
self.header = header
|
||||
self.version = header.version
|
||||
self._exported_entities: dict[int, T] = {}
|
||||
if self.header.has_asm_header:
|
||||
self.export(self.header.asm_header())
|
||||
|
||||
def export_records(self) -> list[T]:
|
||||
return list(self._exported_entities.values())
|
||||
|
||||
@abstractmethod
|
||||
def make_record(self, entity: AcisEntity) -> T:
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def make_data_exporter(self, record: T) -> DataExporter:
|
||||
pass
|
||||
|
||||
def get_record(self, entity: AcisEntity) -> T:
|
||||
assert not entity.is_none
|
||||
return self._exported_entities[id(entity)]
|
||||
|
||||
def export(self, entity: AcisEntity):
|
||||
if entity.is_none:
|
||||
raise TypeError("invalid NONE_REF entity given")
|
||||
self._make_all_records(entity)
|
||||
self._export_data(entity)
|
||||
|
||||
def _has_record(self, entity: AcisEntity) -> bool:
|
||||
return id(entity) in self._exported_entities
|
||||
|
||||
def _add_record(self, entity: AcisEntity, record: T) -> None:
|
||||
assert not entity.is_none
|
||||
self._exported_entities[id(entity)] = record
|
||||
|
||||
def _make_all_records(self, entity: AcisEntity):
|
||||
def add(e: AcisEntity) -> bool:
|
||||
if not e.is_none and not self._has_record(e):
|
||||
self._add_record(e, self.make_record(e))
|
||||
return True
|
||||
return False
|
||||
|
||||
entities = [entity]
|
||||
while entities:
|
||||
next_entity = entities.pop(0)
|
||||
add(next_entity)
|
||||
for sub_entity in next_entity.entities():
|
||||
if add(sub_entity):
|
||||
entities.append(sub_entity)
|
||||
|
||||
def _export_data(self, entity: AcisEntity):
|
||||
def _export_record(e: AcisEntity):
|
||||
if id(e) not in done:
|
||||
done.add(id(e))
|
||||
record = self.get_record(e)
|
||||
if not e.attributes.is_none:
|
||||
record.attributes = self.get_record(e.attributes) # type: ignore
|
||||
e.export(self.make_data_exporter(record))
|
||||
return True
|
||||
return False
|
||||
|
||||
entities = [entity]
|
||||
done: set[int] = set()
|
||||
while entities:
|
||||
next_entity = entities.pop(0)
|
||||
_export_record(next_entity)
|
||||
for sub_entity in next_entity.entities():
|
||||
if _export_record(sub_entity):
|
||||
entities.append(sub_entity)
|
||||
@@ -0,0 +1,31 @@
|
||||
# Copyright (c) 2022-2024, Manfred Moitzi
|
||||
# License: MIT License
|
||||
# Public API module (interface)
|
||||
"""
|
||||
The main goals of this ACIS support library is:
|
||||
|
||||
1. load and parse simple and known ACIS data structures
|
||||
2. create and export simple and known ACIS data structures
|
||||
|
||||
It is NOT a goal to edit and export arbitrary existing ACIS structures.
|
||||
|
||||
Don't even try it!
|
||||
|
||||
This modules do not implement an ACIS kernel!!!
|
||||
So tasks beyond stitching some flat polygonal faces to a polyhedron or creating
|
||||
simple curves is not possible.
|
||||
|
||||
To all beginners: GO AWAY!
|
||||
|
||||
"""
|
||||
from .const import (
|
||||
AcisException,
|
||||
ParsingError,
|
||||
InvalidLinkStructure,
|
||||
ExportError,
|
||||
)
|
||||
from .mesh import mesh_from_body, body_from_mesh, vertices_from_body
|
||||
from .entities import load, export_sat, export_sab, Body
|
||||
from .dbg import AcisDebugger, dump_sab_as_text
|
||||
from .dxf import export_dxf, load_dxf
|
||||
from .cache import AcisCache
|
||||
@@ -0,0 +1,59 @@
|
||||
# Copyright (c) 2024, Manfred Moitzi
|
||||
# License: MIT License
|
||||
from __future__ import annotations
|
||||
from typing import TYPE_CHECKING, Sequence
|
||||
from .entities import Body, load
|
||||
from .type_hints import EncodedData
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from ezdxf.entities import DXFEntity
|
||||
|
||||
__all__ = ["AcisCache"]
|
||||
NO_BODIES: Sequence[Body] = tuple()
|
||||
|
||||
|
||||
class AcisCache:
|
||||
"""This cache manages ACIS bodies created from SAT or SAB data stored in DXF
|
||||
entities.
|
||||
|
||||
Each entry is a list of ACIS bodies and is indexed by a hash calculated from the
|
||||
source content of the SAT or SAB data.
|
||||
|
||||
"""
|
||||
def __init__(self) -> None:
|
||||
self._entries: dict[int, Sequence[Body]] = {}
|
||||
self.hits: int = 0
|
||||
self.misses: int = 0
|
||||
|
||||
@staticmethod
|
||||
def hash_data(data: EncodedData) -> int:
|
||||
if isinstance(data, list):
|
||||
return hash(tuple(data))
|
||||
elif isinstance(data, bytearray):
|
||||
return hash(bytes(data))
|
||||
return hash(data)
|
||||
|
||||
def __len__(self) -> int:
|
||||
return len(self._entries)
|
||||
|
||||
def get_bodies(self, data: EncodedData) -> Sequence[Body]:
|
||||
if not data:
|
||||
return NO_BODIES
|
||||
|
||||
hash_value = AcisCache.hash_data(data)
|
||||
bodies = self._entries.get(hash_value, NO_BODIES)
|
||||
if bodies is not NO_BODIES:
|
||||
self.hits += 1
|
||||
return bodies
|
||||
|
||||
self.misses += 1
|
||||
bodies = tuple(load(data))
|
||||
self._entries[hash_value] = bodies
|
||||
return bodies
|
||||
|
||||
def from_dxf_entity(self, entity: DXFEntity) -> Sequence[Body]:
|
||||
from ezdxf.entities import Body as DxfBody
|
||||
|
||||
if not isinstance(entity, DxfBody):
|
||||
return NO_BODIES
|
||||
return self.get_bodies(entity.acis_data)
|
||||
@@ -0,0 +1,183 @@
|
||||
# Copyright (c) 2022-2024, Manfred Moitzi
|
||||
# License: MIT License
|
||||
import enum
|
||||
from ezdxf.version import __version__
|
||||
|
||||
# SAT Export Requirements for Autodesk Products
|
||||
# ---------------------------------------------
|
||||
# Script to create test files:
|
||||
# examples/acistools/create_3dsolid_cube.py
|
||||
|
||||
# DXF R2000, R2004, R2007, R2010: OK, tested with TrueView 2022
|
||||
# ACIS version 700
|
||||
# ACIS version string: "ACIS 32.0 NT"
|
||||
# record count: 0, not required
|
||||
# body count: 1, required
|
||||
# ASM header: no
|
||||
# end-marker: "End-of-ACIS-data"
|
||||
|
||||
# DXF R2004, R2007, R2010: OK, tested with TrueView 2022
|
||||
# ACIS version 20800
|
||||
# ACIS version string: "ACIS 208.00 NT"
|
||||
# record count: 0, not required
|
||||
# body count: n + 1 (asm-header), required
|
||||
# ASM header: "208.0.4.7009"
|
||||
# end-marker: "End-of-ACIS-data"
|
||||
|
||||
# SAB Export Requirements for Autodesk Products
|
||||
# ---------------------------------------------
|
||||
# DXF R2013, R2018: OK, tested with TrueView 2022
|
||||
# ACIS version 21800
|
||||
# ACIS version string: "ACIS 208.00 NT"
|
||||
# record count: 0, not required
|
||||
# body count: n + 1 (asm-header), required
|
||||
# ASM header: "208.0.4.7009"
|
||||
# end-marker: "End-of-ASM-data"
|
||||
|
||||
ACIS_VERSION = {
|
||||
400: "ACIS 4.00 NT", # DXF R2000, no asm header - only R2000
|
||||
700: "ACIS 32.0 NT", # DXF R2000-R2010, no asm header
|
||||
20800: "ACIS 208.00 NT", # DXF R2013 with asm-header, asm-end-marker
|
||||
21800: "ACIS 218.00 NT", # DXF R2013 with asm-header, asm-end-marker
|
||||
22300: "ACIS 223.00 NT", # DXF R2018 with asm-header, asm-end-marker
|
||||
}
|
||||
ASM_VERSION = {
|
||||
20800: "208.0.4.7009", # DXF R2004, R2007, R2010
|
||||
21800: "208.0.4.7009", # DXF R2013, default version for R2013 and R2018
|
||||
22300: "222.0.0.1700", # DXF R2018
|
||||
}
|
||||
EZDXF_BUILDER_ID = f"ezdxf v{__version__} ACIS Builder"
|
||||
MIN_EXPORT_VERSION = 700
|
||||
|
||||
# ACIS version 700 is the default version for DXF R2000, R2004, R2007 and R2010 (SAT)
|
||||
# ACIS version 21800 is the default version for DXF R2013 and R2018 (SAB)
|
||||
DEFAULT_SAT_VERSION = 700
|
||||
DEFAULT_SAB_VERSION = 21800
|
||||
|
||||
DATE_FMT = "%a %b %d %H:%M:%S %Y"
|
||||
END_OF_ACIS_DATA_SAT = "End-of-ACIS-data"
|
||||
END_OF_ACIS_DATA_SAB = b"\x0e\x03End\x0e\x02of\x0e\x04ACIS\x0d\x04data"
|
||||
END_OF_ASM_DATA_SAT = "End-of-ASM-data"
|
||||
END_OF_ASM_DATA_SAB = b"\x0e\x03End\x0e\x02of\x0e\x03ASM\x0d\x04data"
|
||||
BEGIN_OF_ACIS_HISTORY_DATA = "Begin-of-ACIS-History-data"
|
||||
END_OF_ACIS_HISTORY_DATA = "End-of-ACIS-History-data"
|
||||
DATA_END_MARKERS = (
|
||||
END_OF_ACIS_DATA_SAT,
|
||||
BEGIN_OF_ACIS_HISTORY_DATA,
|
||||
END_OF_ASM_DATA_SAT,
|
||||
)
|
||||
NULL_PTR_NAME = "null-ptr"
|
||||
NONE_ENTITY_NAME = "none-entity"
|
||||
NOR_TOL = 1e-10
|
||||
RES_TOL = 9.9999999999999995e-7
|
||||
|
||||
BOOL_SPECIFIER = {
|
||||
"forward": True,
|
||||
"forward_v": True,
|
||||
"reversed": False,
|
||||
"reversed_v": False,
|
||||
"single": True,
|
||||
"double": False,
|
||||
}
|
||||
|
||||
ACIS_SIGNATURE = b"ACIS BinaryFile" # DXF R2013/R2018
|
||||
ASM_SIGNATURE = b"ASM BinaryFile4" # DXF R2018
|
||||
SIGNATURES = [ACIS_SIGNATURE, ASM_SIGNATURE]
|
||||
|
||||
|
||||
def is_valid_export_version(version: int):
|
||||
return version >= MIN_EXPORT_VERSION and version in ACIS_VERSION
|
||||
|
||||
|
||||
class Tags(enum.IntEnum):
|
||||
NO_TYPE = 0x00
|
||||
BYTE = 0x01 # not used in files!
|
||||
CHAR = 0x02 # not used in files!
|
||||
SHORT = 0x03 # not used in files!
|
||||
INT = 0x04 # 32-bit signed integer
|
||||
FLOAT = 0x05 # not used in files!
|
||||
DOUBLE = 0x06 # 64-bit double precision floating point value
|
||||
STR = 0x07 # count is the following 8-bit uchar
|
||||
STR2 = 0x08 # not used in files!
|
||||
STR3 = 0x09 # not used in files!
|
||||
|
||||
# bool value for reversed, double, I - depends on context
|
||||
BOOL_TRUE = 0x0A
|
||||
|
||||
# bool value forward, single, forward_v - depends on context
|
||||
BOOL_FALSE = 0x0B
|
||||
POINTER = 0x0C
|
||||
ENTITY_TYPE = 0x0D
|
||||
ENTITY_TYPE_EX = 0x0E
|
||||
SUBTYPE_START = 0x0F
|
||||
SUBTYPE_END = 0x10
|
||||
RECORD_END = 0x11
|
||||
LITERAL_STR = 0x12 # count ia a 32-bit uint, see transform entity
|
||||
LOCATION_VEC = 0x13 # vector (3 doubles)
|
||||
DIRECTION_VEC = 0x14 # vector (3 doubles)
|
||||
|
||||
# Enumeration are stored as strings in SAT and ints in SAB.
|
||||
# It's not possible to translate SAT enums (strings) to SAB enums (int) and
|
||||
# vice versa without knowing the implementation details. Each enumeration
|
||||
# is specific to the class where it is used.
|
||||
ENUM = 0x15
|
||||
# 0x16: ???
|
||||
UNKNOWN_0x17 = 0x17 # double
|
||||
|
||||
|
||||
# entity type structure:
|
||||
# 0x0D 0x04 (char count of) "body" = SAT "body"
|
||||
# 0x0E 0x05 "plane" 0x0D 0x07 "surface" = SAT "plane-surface"
|
||||
# 0x0E 0x06 "ref_vt" 0x0E 0x03 "eye" 0x0D 0x06 "attrib" = SAT "ref_vt-eye-attrib"
|
||||
|
||||
|
||||
class Flags(enum.IntFlag):
|
||||
HAS_HISTORY = 1
|
||||
|
||||
|
||||
class AcisException(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class InvalidLinkStructure(AcisException):
|
||||
pass
|
||||
|
||||
|
||||
class ParsingError(AcisException):
|
||||
pass
|
||||
|
||||
|
||||
class ExportError(AcisException):
|
||||
pass
|
||||
|
||||
|
||||
class EndOfAcisData(AcisException):
|
||||
pass
|
||||
|
||||
|
||||
class Features:
|
||||
LAW_SPL = 400
|
||||
CONE_SCALING = 400
|
||||
LOFT_LAW = 400
|
||||
REF_MIN_UV_GRID = 400
|
||||
VBLEND_AUTO = 400
|
||||
BL_ENV_SF = 400
|
||||
ELLIPSE_OFFSET = 500
|
||||
TOL_MODELING = 500
|
||||
APPROX_SUMMARY = 500
|
||||
TAPER_SCALING = 500
|
||||
LAZY_B_SPLINE = 500
|
||||
DM_MULTI_SURF = 500
|
||||
GA_COPY_ACTION = 600
|
||||
DM_MULTI_SURF_COLOR = 600
|
||||
RECAL_SKIN_ERROR = 520
|
||||
TAPER_U_RULED = 600
|
||||
DM_60 = 600
|
||||
LOFT_PCURVE = 600
|
||||
EELIST_OWNER = 600
|
||||
ANNO_HOOKED = 700
|
||||
PATTERN = 700
|
||||
ENTITY_TAGS = 700
|
||||
AT = 700
|
||||
NET_LAW = 700
|
||||
STRINGLESS_HISTORY = 700
|
||||
@@ -0,0 +1,193 @@
|
||||
# Copyright (c) 2022-2024, Manfred Moitzi
|
||||
# License: MIT License
|
||||
from __future__ import annotations
|
||||
from typing import Iterator, Callable, Any
|
||||
from .entities import (
|
||||
AcisEntity,
|
||||
NONE_REF,
|
||||
Face,
|
||||
Coedge,
|
||||
Loop,
|
||||
Vertex,
|
||||
)
|
||||
from . import sab
|
||||
|
||||
|
||||
class AcisDebugger:
|
||||
def __init__(self, root: AcisEntity = NONE_REF, start_id: int = 1):
|
||||
self._next_id = start_id - 1
|
||||
self._root: AcisEntity = root
|
||||
self.entities: dict[int, AcisEntity] = dict()
|
||||
if not root.is_none:
|
||||
self._store_entities(root)
|
||||
|
||||
@property
|
||||
def root(self) -> AcisEntity:
|
||||
return self._root
|
||||
|
||||
def _get_id(self) -> int:
|
||||
self._next_id += 1
|
||||
return self._next_id
|
||||
|
||||
def _store_entities(self, entity: AcisEntity) -> None:
|
||||
if not entity.is_none and entity.id == -1:
|
||||
entity.id = self._get_id()
|
||||
self.entities[entity.id] = entity
|
||||
for e in vars(entity).values():
|
||||
if isinstance(e, AcisEntity) and e.id == -1:
|
||||
self._store_entities(e)
|
||||
|
||||
def set_entities(self, entity: AcisEntity) -> None:
|
||||
self.entities.clear()
|
||||
self._root = entity
|
||||
self._store_entities(entity)
|
||||
|
||||
def walk(self, root: AcisEntity = NONE_REF) -> Iterator[AcisEntity]:
|
||||
def _walk(entity: AcisEntity):
|
||||
if entity.is_none:
|
||||
return
|
||||
yield entity
|
||||
done.add(entity.id)
|
||||
for e in vars(entity).values():
|
||||
if isinstance(e, AcisEntity) and e.id not in done:
|
||||
yield from _walk(e)
|
||||
|
||||
if root.is_none:
|
||||
root = self._root
|
||||
done: set[int] = set()
|
||||
yield from _walk(root)
|
||||
|
||||
def filter(
|
||||
self, func: Callable[[AcisEntity], bool], entity: AcisEntity = NONE_REF
|
||||
) -> Iterator[Any]:
|
||||
if entity.is_none:
|
||||
entity = self._root
|
||||
yield from filter(func, self.walk(entity))
|
||||
|
||||
def filter_type(
|
||||
self, name: str, entity: AcisEntity = NONE_REF
|
||||
) -> Iterator[Any]:
|
||||
if entity.is_none:
|
||||
entity = self._root
|
||||
yield from filter(lambda x: x.type == name, self.walk(entity))
|
||||
|
||||
@staticmethod
|
||||
def entity_attributes(entity: AcisEntity, indent: int = 0) -> Iterator[str]:
|
||||
indent_str = " " * indent
|
||||
for name, data in vars(entity).items():
|
||||
if name == "id":
|
||||
continue
|
||||
yield f"{indent_str}{name}: {data}"
|
||||
|
||||
def face_link_structure(self, face: Face, indent: int = 0) -> Iterator[str]:
|
||||
indent_str = " " * indent
|
||||
|
||||
while not face.is_none:
|
||||
partner_faces = list(self.partner_faces(face))
|
||||
error = ""
|
||||
linked_partner_faces = []
|
||||
unlinked_partner_faces = []
|
||||
for pface_id in partner_faces:
|
||||
pface = self.entities.get(pface_id)
|
||||
if pface is None:
|
||||
error += f" face {pface_id} does not exist;"
|
||||
if isinstance(pface, Face):
|
||||
reverse_faces = self.partner_faces(pface)
|
||||
if face.id in reverse_faces:
|
||||
linked_partner_faces.append(pface_id)
|
||||
else:
|
||||
unlinked_partner_faces.append(pface_id)
|
||||
else:
|
||||
error += f" entity {pface_id} is not a face;"
|
||||
if unlinked_partner_faces:
|
||||
error = f"unlinked partner faces: {unlinked_partner_faces} {error}"
|
||||
yield f"{indent_str}{str(face)} >> {partner_faces} {error}"
|
||||
face = face.next_face
|
||||
|
||||
@staticmethod
|
||||
def partner_faces(face: Face) -> Iterator[int]:
|
||||
coedges: list[Coedge] = []
|
||||
loop = face.loop
|
||||
while not loop.is_none:
|
||||
coedges.extend(co for co in loop.coedges())
|
||||
loop = loop.next_loop
|
||||
for coedge in coedges:
|
||||
for partner_coedge in coedge.partner_coedges():
|
||||
yield partner_coedge.loop.face.id
|
||||
|
||||
@staticmethod
|
||||
def coedge_structure(face: Face, ident: int = 4) -> list[str]:
|
||||
lines: list[str] = []
|
||||
coedges: list[Coedge] = []
|
||||
loop = face.loop
|
||||
|
||||
while not loop.is_none:
|
||||
coedges.extend(co for co in loop.coedges())
|
||||
loop = loop.next_loop
|
||||
for coedge in coedges:
|
||||
edge1 = coedge.edge
|
||||
sense1 = coedge.sense
|
||||
lines.append(f"Coedge={coedge.id} edge={edge1.id} sense={sense1}")
|
||||
for partner_coedge in coedge.partner_coedges():
|
||||
edge2 = partner_coedge.edge
|
||||
sense2 = partner_coedge.sense
|
||||
lines.append(
|
||||
f" Partner Coedge={partner_coedge.id} edge={edge2.id} sense={sense2}"
|
||||
)
|
||||
ident_str = " " * ident
|
||||
return [ident_str + line for line in lines]
|
||||
|
||||
@staticmethod
|
||||
def loop_vertices(loop: Loop, indent: int = 0) -> str:
|
||||
indent_str = " " * indent
|
||||
return f"{indent_str}{loop} >> {list(AcisDebugger.loop_edges(loop))}"
|
||||
|
||||
@staticmethod
|
||||
def loop_edges(loop: Loop) -> Iterator[list[int]]:
|
||||
coedge = loop.coedge
|
||||
first = coedge
|
||||
while not coedge.is_none:
|
||||
edge = coedge.edge
|
||||
sv = edge.start_vertex
|
||||
ev = edge.end_vertex
|
||||
if coedge.sense:
|
||||
yield [ev.id, sv.id]
|
||||
else:
|
||||
yield [sv.id, ev.id]
|
||||
coedge = coedge.next_coedge
|
||||
if coedge is first:
|
||||
break
|
||||
|
||||
def vertex_to_edge_relation(self) -> Iterator[str]:
|
||||
for vertex in (
|
||||
e for e in self.entities.values() if isinstance(e, Vertex)
|
||||
):
|
||||
edge = vertex.edge
|
||||
sv = edge.start_vertex
|
||||
ev = edge.end_vertex
|
||||
yield f"{vertex}: parent edge is {edge.id}; {sv.id} => {ev.id}; {edge.curve}"
|
||||
|
||||
def is_manifold(self) -> bool:
|
||||
for coedge in self.filter_type("coedge"):
|
||||
if len(coedge.partner_coedges()) > 1:
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
def dump_sab_as_text(data: bytes) -> Iterator[str]:
|
||||
def entity_data(e):
|
||||
for tag, value in e:
|
||||
name = sab.Tags(tag).name
|
||||
yield f"{name} = {value}"
|
||||
|
||||
decoder = sab.Decoder(data)
|
||||
header = decoder.read_header()
|
||||
yield from header.dumps()
|
||||
index = 0
|
||||
try:
|
||||
for record in decoder.read_records():
|
||||
yield f"--------------------- record: {index}"
|
||||
yield from entity_data(record)
|
||||
index += 1
|
||||
except sab.ParsingError as e:
|
||||
yield str(e)
|
||||
@@ -0,0 +1,82 @@
|
||||
# Copyright (c) 2022-2024, Manfred Moitzi
|
||||
# License: MIT License
|
||||
from __future__ import annotations
|
||||
from typing import cast, Sequence
|
||||
from ezdxf.entities import DXFEntity, Body as DXFBody
|
||||
from ezdxf.lldxf import const
|
||||
from .entities import Body, export_sat, export_sab, load
|
||||
|
||||
|
||||
def export_dxf(entity: DXFEntity, bodies: Sequence[Body]):
|
||||
"""Store the :term:`ACIS` bodies in the given DXF entity. This is the
|
||||
recommended way to set ACIS data of DXF entities.
|
||||
|
||||
The DXF entity has to be an ACIS based entity and inherit from
|
||||
:class:`ezdxf.entities.Body`. The entity has to be bound to a valid DXF
|
||||
document and the DXF version of the document has to be DXF R2000 or newer.
|
||||
|
||||
Raises:
|
||||
DXFTypeError: invalid DXF entity type
|
||||
DXFValueError: invalid DXF document
|
||||
DXFVersionError: invalid DXF version
|
||||
|
||||
"""
|
||||
if not isinstance(entity, DXFBody):
|
||||
raise const.DXFTypeError(f"invalid DXF entity {entity.dxftype()}")
|
||||
body = cast(DXFBody, entity)
|
||||
doc = entity.doc
|
||||
if doc is None:
|
||||
raise const.DXFValueError("a valid DXF document is required")
|
||||
dxfversion = doc.dxfversion
|
||||
if dxfversion < const.DXF2000:
|
||||
raise const.DXFVersionError(f"invalid DXF version {dxfversion}")
|
||||
|
||||
if dxfversion < const.DXF2013:
|
||||
body.sat = export_sat(bodies)
|
||||
else:
|
||||
body.sab = export_sab(bodies)
|
||||
|
||||
|
||||
def load_dxf(entity: DXFEntity) -> list[Body]:
|
||||
"""Load the :term:`ACIS` bodies from the given DXF entity. This is the
|
||||
recommended way to load ACIS data.
|
||||
|
||||
The DXF entity has to be an ACIS based entity and inherit from
|
||||
:class:`ezdxf.entities.Body`. The entity has to be bound to a valid DXF
|
||||
document and the DXF version of the document has to be DXF R2000 or newer.
|
||||
|
||||
Raises:
|
||||
DXFTypeError: invalid DXF entity type
|
||||
DXFValueError: invalid DXF document
|
||||
DXFVersionError: invalid DXF version
|
||||
|
||||
.. warning::
|
||||
|
||||
Only a limited count of :term:`ACIS` entities is supported, all
|
||||
unsupported entities are loaded as ``NONE_ENTITY`` and their data is
|
||||
lost. Exporting such ``NONE_ENTITIES`` will raise an :class:`ExportError`
|
||||
exception.
|
||||
|
||||
To emphasize that again: **It is not possible to load and re-export
|
||||
arbitrary ACIS data!**
|
||||
|
||||
"""
|
||||
|
||||
if not isinstance(entity, DXFBody):
|
||||
raise const.DXFTypeError(f"invalid DXF entity {entity.dxftype()}")
|
||||
body = cast(DXFBody, entity)
|
||||
doc = entity.doc
|
||||
if doc is None:
|
||||
raise const.DXFValueError("a valid DXF document is required")
|
||||
dxfversion = doc.dxfversion
|
||||
if dxfversion < const.DXF2000:
|
||||
raise const.DXFVersionError(f"invalid DXF version {dxfversion}")
|
||||
if body.has_binary_data:
|
||||
binary_data = body.sab
|
||||
if binary_data:
|
||||
return load(binary_data)
|
||||
else:
|
||||
text = body.sat
|
||||
if text:
|
||||
return load(text)
|
||||
return []
|
||||
@@ -0,0 +1,802 @@
|
||||
# Copyright (c) 2022-2024, Manfred Moitzi
|
||||
# License: MIT License
|
||||
from __future__ import annotations
|
||||
from typing import Callable, Type, Any, Sequence, Iterator
|
||||
import abc
|
||||
|
||||
from . import sab, sat, const, hdr
|
||||
from .const import Features
|
||||
from .abstract import DataLoader, AbstractEntity, DataExporter
|
||||
from .type_hints import EncodedData
|
||||
from ezdxf.math import Matrix44, Vec3, NULLVEC
|
||||
|
||||
Factory = Callable[[AbstractEntity], "AcisEntity"]
|
||||
|
||||
ENTITY_TYPES: dict[str, Type[AcisEntity]] = {}
|
||||
INF = float("inf")
|
||||
|
||||
|
||||
def load(data: EncodedData) -> list[Body]:
|
||||
"""Returns a list of :class:`Body` entities from :term:`SAT` or :term:`SAB`
|
||||
data. Accepts :term:`SAT` data as a single string or a sequence of strings
|
||||
and :term:`SAB` data as bytes or bytearray.
|
||||
|
||||
"""
|
||||
if isinstance(data, (bytes, bytearray)):
|
||||
return SabLoader.load(data)
|
||||
return SatLoader.load(data)
|
||||
|
||||
|
||||
def export_sat(
|
||||
bodies: Sequence[Body], version: int = const.DEFAULT_SAT_VERSION
|
||||
) -> list[str]:
|
||||
"""Export one or more :class:`Body` entities as text based :term:`SAT` data.
|
||||
|
||||
ACIS version 700 is sufficient for DXF versions R2000, R2004, R2007 and
|
||||
R2010, later DXF versions require :term:`SAB` data.
|
||||
|
||||
Raises:
|
||||
ExportError: ACIS structures contain unsupported entities
|
||||
InvalidLinkStructure: corrupt link structure
|
||||
|
||||
"""
|
||||
if version < const.MIN_EXPORT_VERSION:
|
||||
raise const.ExportError(f"invalid ACIS version: {version}")
|
||||
exporter = sat.SatExporter(_setup_export_header(version))
|
||||
exporter.header.asm_end_marker = False
|
||||
for body in bodies:
|
||||
exporter.export(body)
|
||||
return exporter.dump_sat()
|
||||
|
||||
|
||||
def export_sab(
|
||||
bodies: Sequence[Body], version: int = const.DEFAULT_SAB_VERSION
|
||||
) -> bytes:
|
||||
"""Export one or more :class:`Body` entities as binary encoded :term:`SAB`
|
||||
data.
|
||||
|
||||
ACIS version 21800 is sufficient for DXF versions R2013 and R2018, earlier
|
||||
DXF versions require :term:`SAT` data.
|
||||
|
||||
Raises:
|
||||
ExportError: ACIS structures contain unsupported entities
|
||||
InvalidLinkStructure: corrupt link structure
|
||||
|
||||
"""
|
||||
if version < const.MIN_EXPORT_VERSION:
|
||||
raise const.ExportError(f"invalid ACIS version: {version}")
|
||||
exporter = sab.SabExporter(_setup_export_header(version))
|
||||
exporter.header.asm_end_marker = True
|
||||
for body in bodies:
|
||||
exporter.export(body)
|
||||
return exporter.dump_sab()
|
||||
|
||||
|
||||
def _setup_export_header(version) -> hdr.AcisHeader:
|
||||
if not const.is_valid_export_version(version):
|
||||
raise const.ExportError(f"invalid export version: {version}")
|
||||
header = hdr.AcisHeader()
|
||||
header.set_version(version)
|
||||
return header
|
||||
|
||||
|
||||
def register(cls):
|
||||
ENTITY_TYPES[cls.type] = cls
|
||||
return cls
|
||||
|
||||
|
||||
class NoneEntity:
|
||||
type: str = const.NONE_ENTITY_NAME
|
||||
|
||||
@property
|
||||
def is_none(self) -> bool:
|
||||
return self.type == const.NONE_ENTITY_NAME
|
||||
|
||||
|
||||
NONE_REF: Any = NoneEntity()
|
||||
|
||||
|
||||
class AcisEntity(NoneEntity):
|
||||
"""Base ACIS entity which also represents unsupported entities.
|
||||
|
||||
Unsupported entities are entities whose internal structure are not fully
|
||||
known or user defined entity types.
|
||||
|
||||
The content of these unsupported entities is not loaded and lost by
|
||||
exporting such entities, therefore exporting unsupported entities raises
|
||||
an :class:`ExportError` exception.
|
||||
|
||||
"""
|
||||
|
||||
type: str = "unsupported-entity"
|
||||
id: int = -1
|
||||
attributes: AcisEntity = NONE_REF
|
||||
|
||||
def __str__(self) -> str:
|
||||
return f"{self.type}({self.id})"
|
||||
|
||||
def load(self, loader: DataLoader, entity_factory: Factory) -> None:
|
||||
"""Load the ACIS entity content from `loader`."""
|
||||
self.restore_common(loader, entity_factory)
|
||||
self.restore_data(loader)
|
||||
|
||||
def restore_common(self, loader: DataLoader, entity_factory: Factory) -> None:
|
||||
"""Load the common part of an ACIS entity."""
|
||||
pass
|
||||
|
||||
def restore_data(self, loader: DataLoader) -> None:
|
||||
"""Load the data part of an ACIS entity."""
|
||||
pass
|
||||
|
||||
def export(self, exporter: DataExporter) -> None:
|
||||
"""Write the ACIS entity content to `exporter`."""
|
||||
self.write_common(exporter)
|
||||
self.write_data(exporter)
|
||||
|
||||
def write_common(self, exporter: DataExporter) -> None:
|
||||
"""Write the common part of the ACIS entity.
|
||||
|
||||
It is not possible to export :class:`Body` entities including
|
||||
unsupported entities, doing so would cause data loss or worse data
|
||||
corruption!
|
||||
|
||||
"""
|
||||
raise const.ExportError(f"unsupported entity type: {self.type}")
|
||||
|
||||
def write_data(self, exporter: DataExporter) -> None:
|
||||
"""Write the data part of the ACIS entity."""
|
||||
pass
|
||||
|
||||
def entities(self) -> Iterator[AcisEntity]:
|
||||
"""Yield all attributes of this entity of type AcisEntity."""
|
||||
for e in vars(self).values():
|
||||
if isinstance(e, AcisEntity):
|
||||
yield e
|
||||
|
||||
|
||||
def restore_entity(
|
||||
expected_type: str, loader: DataLoader, entity_factory: Factory
|
||||
) -> Any:
|
||||
raw_entity = loader.read_ptr()
|
||||
if raw_entity.is_null_ptr:
|
||||
return NONE_REF
|
||||
if raw_entity.name.endswith(expected_type):
|
||||
return entity_factory(raw_entity)
|
||||
else:
|
||||
raise const.ParsingError(
|
||||
f"expected entity type '{expected_type}', got '{raw_entity.name}'"
|
||||
)
|
||||
|
||||
|
||||
@register
|
||||
class Transform(AcisEntity):
|
||||
type: str = "transform"
|
||||
matrix = Matrix44()
|
||||
|
||||
def restore_data(self, loader: DataLoader) -> None:
|
||||
data = loader.read_transform()
|
||||
# insert values of the 4th matrix column (0, 0, 0, 1)
|
||||
data.insert(3, 0.0)
|
||||
data.insert(7, 0.0)
|
||||
data.insert(11, 0.0)
|
||||
data.append(1.0)
|
||||
self.matrix = Matrix44(data)
|
||||
|
||||
def write_common(self, exporter: DataExporter) -> None:
|
||||
def write_double(value: float):
|
||||
data.append(f"{value:g}")
|
||||
|
||||
data: list[str] = []
|
||||
for row in self.matrix.rows():
|
||||
write_double(row[0])
|
||||
write_double(row[1])
|
||||
write_double(row[2])
|
||||
test_vector = Vec3(1, 0, 0)
|
||||
result = self.matrix.transform_direction(test_vector)
|
||||
# A uniform scaling in x- y- and z-axis is assumed:
|
||||
write_double(round(result.magnitude, 6)) # scale factor
|
||||
is_rotated = not result.normalize().isclose(test_vector)
|
||||
data.append("rotate" if is_rotated else "no_rotate")
|
||||
data.append("no_reflect")
|
||||
data.append("no_shear")
|
||||
exporter.write_transform(data)
|
||||
|
||||
|
||||
@register
|
||||
class AsmHeader(AcisEntity):
|
||||
type: str = "asmheader"
|
||||
|
||||
def __init__(self, version: str = ""):
|
||||
self.version = version
|
||||
|
||||
def restore_common(self, loader: DataLoader, entity_factory: Factory) -> None:
|
||||
self.version = loader.read_str()
|
||||
|
||||
def write_common(self, exporter: DataExporter) -> None:
|
||||
exporter.write_str(self.version)
|
||||
|
||||
|
||||
class SupportsPattern(AcisEntity):
|
||||
pattern: Pattern = NONE_REF
|
||||
|
||||
def restore_common(self, loader: DataLoader, entity_factory: Factory) -> None:
|
||||
if loader.version >= Features.PATTERN:
|
||||
self.pattern = restore_entity("pattern", loader, entity_factory)
|
||||
|
||||
def write_common(self, exporter: DataExporter) -> None:
|
||||
exporter.write_ptr(self.pattern)
|
||||
|
||||
|
||||
@register
|
||||
class Body(SupportsPattern):
|
||||
type: str = "body"
|
||||
pattern: Pattern = NONE_REF
|
||||
lump: Lump = NONE_REF
|
||||
wire: Wire = NONE_REF
|
||||
transform: Transform = NONE_REF
|
||||
|
||||
def restore_common(self, loader: DataLoader, entity_factory: Factory) -> None:
|
||||
super().restore_common(loader, entity_factory)
|
||||
self.lump = restore_entity("lump", loader, entity_factory)
|
||||
self.wire = restore_entity("wire", loader, entity_factory)
|
||||
self.transform = restore_entity("transform", loader, entity_factory)
|
||||
|
||||
def write_common(self, exporter: DataExporter) -> None:
|
||||
super().write_common(exporter)
|
||||
exporter.write_ptr(self.lump)
|
||||
exporter.write_ptr(self.wire)
|
||||
exporter.write_ptr(self.transform)
|
||||
|
||||
def append_lump(self, lump: Lump) -> None:
|
||||
"""Append a :class:`Lump` entity as last lump."""
|
||||
lump.body = self
|
||||
if self.lump.is_none:
|
||||
self.lump = lump
|
||||
else:
|
||||
current_lump = self.lump
|
||||
while not current_lump.next_lump.is_none:
|
||||
current_lump = current_lump.next_lump
|
||||
current_lump.next_lump = lump
|
||||
|
||||
def lumps(self) -> list[Lump]:
|
||||
"""Returns all linked :class:`Lump` entities as a list."""
|
||||
lumps = []
|
||||
current_lump = self.lump
|
||||
while not current_lump.is_none:
|
||||
lumps.append(current_lump)
|
||||
current_lump = current_lump.next_lump
|
||||
return lumps
|
||||
|
||||
|
||||
@register
|
||||
class Wire(SupportsPattern): # not implemented
|
||||
type: str = "wire"
|
||||
|
||||
|
||||
@register
|
||||
class Pattern(AcisEntity): # not implemented
|
||||
type: str = "pattern"
|
||||
|
||||
|
||||
@register
|
||||
class Lump(SupportsPattern):
|
||||
type: str = "lump"
|
||||
next_lump: Lump = NONE_REF
|
||||
shell: Shell = NONE_REF
|
||||
body: Body = NONE_REF
|
||||
|
||||
def restore_common(self, loader: DataLoader, entity_factory: Factory) -> None:
|
||||
super().restore_common(loader, entity_factory)
|
||||
self.next_lump = restore_entity("lump", loader, entity_factory)
|
||||
self.shell = restore_entity("shell", loader, entity_factory)
|
||||
self.body = restore_entity("body", loader, entity_factory)
|
||||
|
||||
def write_common(self, exporter: DataExporter) -> None:
|
||||
super().write_common(exporter)
|
||||
exporter.write_ptr(self.next_lump)
|
||||
exporter.write_ptr(self.shell)
|
||||
exporter.write_ptr(self.body)
|
||||
|
||||
def append_shell(self, shell: Shell) -> None:
|
||||
"""Append a :class:`Shell` entity as last shell."""
|
||||
shell.lump = self
|
||||
if self.shell.is_none:
|
||||
self.shell = shell
|
||||
else:
|
||||
current_shell = self.shell
|
||||
while not current_shell.next_shell.is_none:
|
||||
current_shell = current_shell.next_shell
|
||||
current_shell.next_shell = shell
|
||||
|
||||
def shells(self) -> list[Shell]:
|
||||
"""Returns all linked :class:`Shell` entities as a list."""
|
||||
shells = []
|
||||
current_shell = self.shell
|
||||
while not current_shell.is_none:
|
||||
shells.append(current_shell)
|
||||
current_shell = current_shell.next_shell
|
||||
return shells
|
||||
|
||||
|
||||
@register
|
||||
class Shell(SupportsPattern):
|
||||
type: str = "shell"
|
||||
next_shell: Shell = NONE_REF
|
||||
subshell: Subshell = NONE_REF
|
||||
face: Face = NONE_REF
|
||||
wire: Wire = NONE_REF
|
||||
lump: Lump = NONE_REF
|
||||
|
||||
def restore_common(self, loader: DataLoader, entity_factory: Factory) -> None:
|
||||
super().restore_common(loader, entity_factory)
|
||||
self.next_shell = restore_entity("next_shell", loader, entity_factory)
|
||||
self.subshell = restore_entity("subshell", loader, entity_factory)
|
||||
self.face = restore_entity("face", loader, entity_factory)
|
||||
self.wire = restore_entity("wire", loader, entity_factory)
|
||||
self.lump = restore_entity("lump", loader, entity_factory)
|
||||
|
||||
def write_common(self, exporter: DataExporter) -> None:
|
||||
super().write_common(exporter)
|
||||
exporter.write_ptr(self.next_shell)
|
||||
exporter.write_ptr(self.subshell)
|
||||
exporter.write_ptr(self.face)
|
||||
exporter.write_ptr(self.wire)
|
||||
exporter.write_ptr(self.lump)
|
||||
|
||||
def append_face(self, face: Face) -> None:
|
||||
"""Append a :class:`Face` entity as last face."""
|
||||
face.shell = self
|
||||
if self.face.is_none:
|
||||
self.face = face
|
||||
else:
|
||||
current_face = self.face
|
||||
while not current_face.next_face.is_none:
|
||||
current_face = current_face.next_face
|
||||
current_face.next_face = face
|
||||
|
||||
def faces(self) -> list[Face]:
|
||||
"""Returns all linked :class:`Face` entities as a list."""
|
||||
faces = []
|
||||
current_face = self.face
|
||||
while not current_face.is_none:
|
||||
faces.append(current_face)
|
||||
current_face = current_face.next_face
|
||||
return faces
|
||||
|
||||
|
||||
@register
|
||||
class Subshell(SupportsPattern): # not implemented
|
||||
type: str = "subshell"
|
||||
|
||||
|
||||
@register
|
||||
class Face(SupportsPattern):
|
||||
type: str = "face"
|
||||
next_face: "Face" = NONE_REF
|
||||
loop: Loop = NONE_REF
|
||||
shell: Shell = NONE_REF
|
||||
subshell: Subshell = NONE_REF
|
||||
surface: Surface = NONE_REF
|
||||
# sense: face normal with respect to the surface
|
||||
sense = False # True = reversed; False = forward
|
||||
double_sided = False # True = double (hollow body); False = single (solid body)
|
||||
containment = False # if double_sided: True = in, False = out
|
||||
|
||||
def restore_common(self, loader: DataLoader, entity_factory: Factory) -> None:
|
||||
super().restore_common(loader, entity_factory)
|
||||
self.next_face = restore_entity("face", loader, entity_factory)
|
||||
self.loop = restore_entity("loop", loader, entity_factory)
|
||||
self.shell = restore_entity("shell", loader, entity_factory)
|
||||
self.subshell = restore_entity("subshell", loader, entity_factory)
|
||||
self.surface = restore_entity("surface", loader, entity_factory)
|
||||
self.sense = loader.read_bool("reversed", "forward")
|
||||
self.double_sided = loader.read_bool("double", "single")
|
||||
if self.double_sided:
|
||||
self.containment = loader.read_bool("in", "out")
|
||||
|
||||
def write_common(self, exporter: DataExporter) -> None:
|
||||
super().write_common(exporter)
|
||||
exporter.write_ptr(self.next_face)
|
||||
exporter.write_ptr(self.loop)
|
||||
exporter.write_ptr(self.shell)
|
||||
exporter.write_ptr(self.subshell)
|
||||
exporter.write_ptr(self.surface)
|
||||
exporter.write_bool(self.sense, "reversed", "forward")
|
||||
exporter.write_bool(self.double_sided, "double", "single")
|
||||
if self.double_sided:
|
||||
exporter.write_bool(self.containment, "in", "out")
|
||||
|
||||
def append_loop(self, loop: Loop) -> None:
|
||||
"""Append a :class:`Loop` entity as last loop."""
|
||||
loop.face = self
|
||||
if self.loop.is_none:
|
||||
self.loop = loop
|
||||
else: # order of coedges is important! (right-hand rule)
|
||||
current_loop = self.loop
|
||||
while not current_loop.next_loop.is_none:
|
||||
current_loop = current_loop.next_loop
|
||||
current_loop.next_loop = loop
|
||||
|
||||
def loops(self) -> list[Loop]:
|
||||
"""Returns all linked :class:`Loop` entities as a list."""
|
||||
loops = []
|
||||
current_loop = self.loop
|
||||
while not current_loop.is_none:
|
||||
loops.append(current_loop)
|
||||
current_loop = current_loop.next_loop
|
||||
return loops
|
||||
|
||||
|
||||
@register
|
||||
class Surface(SupportsPattern):
|
||||
type: str = "surface"
|
||||
u_bounds = INF, INF
|
||||
v_bounds = INF, INF
|
||||
|
||||
def restore_data(self, loader: DataLoader) -> None:
|
||||
self.u_bounds = loader.read_interval(), loader.read_interval()
|
||||
self.v_bounds = loader.read_interval(), loader.read_interval()
|
||||
|
||||
def write_data(self, exporter: DataExporter):
|
||||
exporter.write_interval(self.u_bounds[0])
|
||||
exporter.write_interval(self.u_bounds[1])
|
||||
exporter.write_interval(self.v_bounds[0])
|
||||
exporter.write_interval(self.v_bounds[1])
|
||||
|
||||
@abc.abstractmethod
|
||||
def evaluate(self, u: float, v: float) -> Vec3:
|
||||
"""Returns the spatial location at the parametric surface for the given
|
||||
parameters `u` and `v`.
|
||||
|
||||
"""
|
||||
pass
|
||||
|
||||
|
||||
@register
|
||||
class Plane(Surface):
|
||||
type: str = "plane-surface"
|
||||
origin = Vec3(0, 0, 0)
|
||||
normal = Vec3(0, 0, 1) # pointing outside
|
||||
u_dir = Vec3(1, 0, 0) # unit vector!
|
||||
v_dir = Vec3(0, 1, 0) # unit vector!
|
||||
# reverse_v:
|
||||
# True: "reverse_v" - the normal vector does not follow the right-hand rule
|
||||
# False: "forward_v" - the normal vector follows right-hand rule
|
||||
reverse_v = False
|
||||
|
||||
def restore_common(self, loader: DataLoader, entity_factory: Factory) -> None:
|
||||
super().restore_common(loader, entity_factory)
|
||||
self.origin = Vec3(loader.read_vec3())
|
||||
self.normal = Vec3(loader.read_vec3())
|
||||
self.u_dir = Vec3(loader.read_vec3())
|
||||
self.reverse_v = loader.read_bool("reverse_v", "forward_v")
|
||||
self.update_v_dir()
|
||||
|
||||
def write_common(self, exporter: DataExporter) -> None:
|
||||
super().write_common(exporter)
|
||||
exporter.write_loc_vec3(self.origin)
|
||||
exporter.write_dir_vec3(self.normal)
|
||||
exporter.write_dir_vec3(self.u_dir)
|
||||
exporter.write_bool(self.reverse_v, "reverse_v", "forward_v")
|
||||
# v_dir is not exported
|
||||
|
||||
def update_v_dir(self):
|
||||
v_dir = self.normal.cross(self.u_dir)
|
||||
if self.reverse_v:
|
||||
v_dir = -v_dir
|
||||
self.v_dir = v_dir
|
||||
|
||||
def evaluate(self, u: float, v: float) -> Vec3:
|
||||
return self.origin + (self.u_dir * u) + (self.v_dir * v)
|
||||
|
||||
|
||||
@register
|
||||
class Loop(SupportsPattern):
|
||||
type: str = "loop"
|
||||
next_loop: Loop = NONE_REF
|
||||
coedge: Coedge = NONE_REF
|
||||
face: Face = NONE_REF # parent/owner
|
||||
|
||||
def restore_common(self, loader: DataLoader, entity_factory: Factory) -> None:
|
||||
super().restore_common(loader, entity_factory)
|
||||
self.next_loop = restore_entity("loop", loader, entity_factory)
|
||||
self.coedge = restore_entity("coedge", loader, entity_factory)
|
||||
self.face = restore_entity("face", loader, entity_factory)
|
||||
|
||||
def write_common(self, exporter: DataExporter) -> None:
|
||||
super().write_common(exporter)
|
||||
exporter.write_ptr(self.next_loop)
|
||||
exporter.write_ptr(self.coedge)
|
||||
exporter.write_ptr(self.face)
|
||||
|
||||
def set_coedges(self, coedges: list[Coedge], close=True) -> None:
|
||||
"""Set all coedges of a loop at once."""
|
||||
assert len(coedges) > 0
|
||||
self.coedge = coedges[0]
|
||||
next_coedges = coedges[1:]
|
||||
prev_coedges = coedges[:-1]
|
||||
if close:
|
||||
next_coedges.append(coedges[0])
|
||||
prev_coedges.insert(0, coedges[-1])
|
||||
else:
|
||||
next_coedges.append(NONE_REF)
|
||||
prev_coedges.insert(0, NONE_REF)
|
||||
|
||||
for coedge, next, prev in zip(coedges, next_coedges, prev_coedges):
|
||||
coedge.loop = self
|
||||
coedge.prev_coedge = prev
|
||||
coedge.next_coedge = next
|
||||
|
||||
def coedges(self) -> list[Coedge]:
|
||||
"""Returns all linked :class:`Coedge` entities as a list."""
|
||||
coedges = []
|
||||
|
||||
current_coedge = self.coedge
|
||||
while not current_coedge.is_none: # open loop if none
|
||||
coedges.append(current_coedge)
|
||||
current_coedge = current_coedge.next_coedge
|
||||
if current_coedge is self.coedge: # circular linked list!
|
||||
break # closed loop
|
||||
return coedges
|
||||
|
||||
|
||||
@register
|
||||
class Coedge(SupportsPattern):
|
||||
type: str = "coedge"
|
||||
next_coedge: Coedge = NONE_REF
|
||||
prev_coedge: Coedge = NONE_REF
|
||||
# The partner_coedge points to the coedge of an adjacent face, in a
|
||||
# manifold body each coedge has zero (open) or one (closed) partner edge.
|
||||
# ACIS supports also non-manifold bodies, so there can be more than one
|
||||
# partner coedges which are organized in a circular linked list.
|
||||
partner_coedge: Coedge = NONE_REF
|
||||
edge: Edge = NONE_REF
|
||||
# sense: True = reversed; False = forward;
|
||||
# coedge has the same direction as the underlying edge
|
||||
sense: bool = True
|
||||
loop: Loop = NONE_REF # parent/owner
|
||||
unknown: int = 0 # only in SAB file!?
|
||||
pcurve: PCurve = NONE_REF
|
||||
|
||||
def restore_common(self, loader: DataLoader, entity_factory: Factory) -> None:
|
||||
super().restore_common(loader, entity_factory)
|
||||
self.next_coedge = restore_entity("coedge", loader, entity_factory)
|
||||
self.prev_coedge = restore_entity("coedge", loader, entity_factory)
|
||||
self.partner_coedge = restore_entity("coedge", loader, entity_factory)
|
||||
self.edge = restore_entity("edge", loader, entity_factory)
|
||||
self.sense = loader.read_bool("reversed", "forward")
|
||||
self.loop = restore_entity("loop", loader, entity_factory)
|
||||
self.unknown = loader.read_int(skip_sat=0)
|
||||
self.pcurve = restore_entity("pcurve", loader, entity_factory)
|
||||
|
||||
def write_common(self, exporter: DataExporter) -> None:
|
||||
super().write_common(exporter)
|
||||
exporter.write_ptr(self.next_coedge)
|
||||
exporter.write_ptr(self.prev_coedge)
|
||||
exporter.write_ptr(self.partner_coedge)
|
||||
exporter.write_ptr(self.edge)
|
||||
exporter.write_bool(self.sense, "reversed", "forward")
|
||||
exporter.write_ptr(self.loop)
|
||||
# TODO: write_int() ?
|
||||
exporter.write_int(0, skip_sat=True)
|
||||
exporter.write_ptr(self.pcurve)
|
||||
|
||||
def add_partner_coedge(self, coedge: Coedge) -> None:
|
||||
assert coedge.partner_coedge.is_none
|
||||
partner_coedge = self.partner_coedge
|
||||
if partner_coedge.is_none:
|
||||
partner_coedge = self
|
||||
# insert new coedge as first partner coedge:
|
||||
self.partner_coedge = coedge
|
||||
coedge.partner_coedge = partner_coedge
|
||||
self.order_partner_coedges()
|
||||
|
||||
def order_partner_coedges(self) -> None:
|
||||
# todo: the referenced faces of non-manifold coedges have to be ordered
|
||||
# by the right-hand rule around this edge.
|
||||
pass
|
||||
|
||||
def partner_coedges(self) -> list[Coedge]:
|
||||
"""Returns all partner coedges of this coedge without `self`."""
|
||||
coedges: list[Coedge] = []
|
||||
partner_coedge = self.partner_coedge
|
||||
if partner_coedge.is_none:
|
||||
return coedges
|
||||
while True:
|
||||
coedges.append(partner_coedge)
|
||||
partner_coedge = partner_coedge.partner_coedge
|
||||
if partner_coedge.is_none or partner_coedge is self:
|
||||
break
|
||||
return coedges
|
||||
|
||||
|
||||
@register
|
||||
class Edge(SupportsPattern):
|
||||
type: str = "edge"
|
||||
|
||||
# The parent edge of the start_vertex doesn't have to be this edge!
|
||||
start_vertex: Vertex = NONE_REF
|
||||
start_param: float = 0.0
|
||||
|
||||
# The parent edge of the end_vertex doesn't have to be this edge!
|
||||
end_vertex: Vertex = NONE_REF
|
||||
end_param: float = 0.0
|
||||
coedge: Coedge = NONE_REF
|
||||
curve: Curve = NONE_REF
|
||||
# sense: True = reversed; False = forward;
|
||||
# forward: edge has the same direction as the underlying curve
|
||||
sense: bool = False
|
||||
convexity: str = "unknown"
|
||||
|
||||
def restore_common(self, loader: DataLoader, entity_factory: Factory) -> None:
|
||||
super().restore_common(loader, entity_factory)
|
||||
self.start_vertex = restore_entity("vertex", loader, entity_factory)
|
||||
if loader.version >= Features.TOL_MODELING:
|
||||
self.start_param = loader.read_double()
|
||||
self.end_vertex = restore_entity("vertex", loader, entity_factory)
|
||||
if loader.version >= Features.TOL_MODELING:
|
||||
self.end_param = loader.read_double()
|
||||
self.coedge = restore_entity("coedge", loader, entity_factory)
|
||||
self.curve = restore_entity("curve", loader, entity_factory)
|
||||
self.sense = loader.read_bool("reversed", "forward")
|
||||
if loader.version >= Features.TOL_MODELING:
|
||||
self.convexity = loader.read_str()
|
||||
|
||||
def write_common(self, exporter: DataExporter) -> None:
|
||||
# write support >= version 700 only
|
||||
super().write_common(exporter)
|
||||
exporter.write_ptr(self.start_vertex)
|
||||
exporter.write_double(self.start_param)
|
||||
exporter.write_ptr(self.end_vertex)
|
||||
exporter.write_double(self.end_param)
|
||||
exporter.write_ptr(self.coedge)
|
||||
exporter.write_ptr(self.curve)
|
||||
exporter.write_bool(self.sense, "reversed", "forward")
|
||||
exporter.write_str(self.convexity)
|
||||
|
||||
|
||||
@register
|
||||
class PCurve(SupportsPattern): # not implemented
|
||||
type: str = "pcurve"
|
||||
|
||||
|
||||
@register
|
||||
class Vertex(SupportsPattern):
|
||||
type: str = "vertex"
|
||||
edge: Edge = NONE_REF
|
||||
ref_count: int = 0 # only in SAB files
|
||||
point: Point = NONE_REF
|
||||
|
||||
def restore_common(self, loader: DataLoader, entity_factory: Factory) -> None:
|
||||
super().restore_common(loader, entity_factory)
|
||||
self.edge = restore_entity("edge", loader, entity_factory)
|
||||
self.ref_count = loader.read_int(skip_sat=0)
|
||||
self.point = restore_entity("point", loader, entity_factory)
|
||||
|
||||
def write_common(self, exporter: DataExporter) -> None:
|
||||
super().write_common(exporter)
|
||||
exporter.write_ptr(self.edge)
|
||||
exporter.write_int(self.ref_count, skip_sat=True)
|
||||
exporter.write_ptr(self.point)
|
||||
|
||||
|
||||
@register
|
||||
class Curve(SupportsPattern):
|
||||
type: str = "curve"
|
||||
bounds = INF, INF
|
||||
|
||||
def restore_data(self, loader: DataLoader) -> None:
|
||||
self.bounds = loader.read_interval(), loader.read_interval()
|
||||
|
||||
def write_data(self, exporter: DataExporter) -> None:
|
||||
exporter.write_interval(self.bounds[0])
|
||||
exporter.write_interval(self.bounds[1])
|
||||
|
||||
@abc.abstractmethod
|
||||
def evaluate(self, param: float) -> Vec3:
|
||||
"""Returns the spatial location at the parametric curve for the given
|
||||
parameter.
|
||||
|
||||
"""
|
||||
pass
|
||||
|
||||
|
||||
@register
|
||||
class StraightCurve(Curve):
|
||||
type: str = "straight-curve"
|
||||
origin = Vec3(0, 0, 0)
|
||||
direction = Vec3(1, 0, 0)
|
||||
|
||||
def restore_data(self, loader: DataLoader) -> None:
|
||||
self.origin = Vec3(loader.read_vec3())
|
||||
self.direction = Vec3(loader.read_vec3())
|
||||
super().restore_data(loader)
|
||||
|
||||
def write_data(self, exporter: DataExporter) -> None:
|
||||
exporter.write_loc_vec3(self.origin)
|
||||
exporter.write_dir_vec3(self.direction)
|
||||
super().write_data(exporter)
|
||||
|
||||
def evaluate(self, param: float) -> Vec3:
|
||||
return self.origin + (self.direction * param)
|
||||
|
||||
|
||||
@register
|
||||
class Point(SupportsPattern):
|
||||
type: str = "point"
|
||||
location: Vec3 = NULLVEC
|
||||
|
||||
def restore_data(self, loader: DataLoader) -> None:
|
||||
self.location = Vec3(loader.read_vec3())
|
||||
|
||||
def write_data(self, exporter: DataExporter) -> None:
|
||||
exporter.write_loc_vec3(self.location)
|
||||
|
||||
|
||||
class FileLoader(abc.ABC):
|
||||
records: Sequence[sat.SatEntity | sab.SabEntity]
|
||||
|
||||
def __init__(self, version: int):
|
||||
self.entities: dict[int, AcisEntity] = {}
|
||||
self.version: int = version
|
||||
|
||||
def entity_factory(self, raw_entity: AbstractEntity) -> AcisEntity:
|
||||
uid = id(raw_entity)
|
||||
try:
|
||||
return self.entities[uid]
|
||||
except KeyError: # create a new entity
|
||||
entity = ENTITY_TYPES.get(raw_entity.name, AcisEntity)()
|
||||
self.entities[uid] = entity
|
||||
return entity
|
||||
|
||||
def bodies(self) -> list[Body]:
|
||||
# noinspection PyTypeChecker
|
||||
return [e for e in self.entities.values() if isinstance(e, Body)]
|
||||
|
||||
def load_entities(self):
|
||||
entity_factory = self.entity_factory
|
||||
|
||||
for raw_entity in self.records:
|
||||
entity = entity_factory(raw_entity)
|
||||
entity.id = raw_entity.id
|
||||
attributes = raw_entity.attributes
|
||||
if not attributes.is_null_ptr:
|
||||
entity.attributes = entity_factory(attributes)
|
||||
data_loader = self.make_data_loader(raw_entity.data)
|
||||
entity.load(data_loader, entity_factory)
|
||||
|
||||
@abc.abstractmethod
|
||||
def make_data_loader(self, data: list[Any]) -> DataLoader:
|
||||
pass
|
||||
|
||||
|
||||
class SabLoader(FileLoader):
|
||||
def __init__(self, data: bytes | bytearray):
|
||||
builder = sab.parse_sab(data)
|
||||
super().__init__(builder.header.version)
|
||||
self.records = builder.entities
|
||||
|
||||
def make_data_loader(self, data: list[Any]) -> DataLoader:
|
||||
return sab.SabDataLoader(data, self.version)
|
||||
|
||||
@classmethod
|
||||
def load(cls, data: bytes | bytearray) -> list[Body]:
|
||||
loader = cls(data)
|
||||
loader.load_entities()
|
||||
return loader.bodies()
|
||||
|
||||
|
||||
class SatLoader(FileLoader):
|
||||
def __init__(self, data: str | Sequence[str]):
|
||||
builder = sat.parse_sat(data)
|
||||
super().__init__(builder.header.version)
|
||||
self.records = builder.entities
|
||||
|
||||
def make_data_loader(self, data: list[Any]) -> DataLoader:
|
||||
return sat.SatDataLoader(data, self.version)
|
||||
|
||||
@classmethod
|
||||
def load(cls, data: str | Sequence[str]) -> list[Body]:
|
||||
loader = cls(data)
|
||||
loader.load_entities()
|
||||
return loader.bodies()
|
||||
@@ -0,0 +1,110 @@
|
||||
# Copyright (c) 2022-2024, Manfred Moitzi
|
||||
# License: MIT License
|
||||
from __future__ import annotations
|
||||
from dataclasses import dataclass, field
|
||||
from datetime import datetime
|
||||
import struct
|
||||
from . import const
|
||||
|
||||
# ACIS versions exported by BricsCAD:
|
||||
# R2000/AC1015: 400, "ACIS 4.00 NT", text length has no prefix "@"
|
||||
# R2004/AC1018: 20800 @ "ACIS 208.00 NT", text length has "@" prefix ??? weird
|
||||
# R2007/AC1021: 700 @ "ACIS 32.0 NT", text length has "@" prefix
|
||||
# R2010/AC1024: 700 @ "ACIS 32.0 NT", text length has "@" prefix
|
||||
|
||||
# A test showed that R2000 files that contains ACIS v700/32.0 or v20800/208.0
|
||||
# data can be opened by Autodesk TrueView, BricsCAD and Allplan, so exporting
|
||||
# only v700/32.0 for all DXF versions should be OK!
|
||||
# test script: exploration/acis/transplant_acis_data.py
|
||||
|
||||
|
||||
def encode_str(s: str) -> bytes:
|
||||
b = s.encode("utf8", errors="ignore")
|
||||
return struct.pack("<BB", const.Tags.STR, len(b)) + b
|
||||
|
||||
|
||||
def encode_double(value: float) -> bytes:
|
||||
return struct.pack("<Bd", const.Tags.DOUBLE, value)
|
||||
|
||||
|
||||
@dataclass
|
||||
class AcisHeader:
|
||||
"""Represents an ACIS file header."""
|
||||
|
||||
version: int = const.MIN_EXPORT_VERSION
|
||||
n_records: int = 0 # can be 0
|
||||
n_entities: int = 0
|
||||
flags: int = 0
|
||||
product_id: str = const.EZDXF_BUILDER_ID
|
||||
acis_version: str = const.ACIS_VERSION[const.MIN_EXPORT_VERSION]
|
||||
creation_date: datetime = field(default_factory=datetime.now)
|
||||
units_in_mm: float = 1.0
|
||||
asm_version: str = ""
|
||||
asm_end_marker: bool = False # depends on DXF version: R2013, RT2018
|
||||
|
||||
@property
|
||||
def has_asm_header(self) -> bool:
|
||||
return self.asm_version != ""
|
||||
|
||||
def dumps(self) -> list[str]:
|
||||
"""Returns the SAT file header as list of strings."""
|
||||
return [
|
||||
f"{self.version} {self.n_records} {self.n_entities} {self.flags} ",
|
||||
self._header_str(),
|
||||
f"{self.units_in_mm:g} 9.9999999999999995e-007 1e-010 ",
|
||||
]
|
||||
|
||||
def dumpb(self) -> bytes:
|
||||
"""Returns the SAB file header as bytes."""
|
||||
buffer: list[bytes] = []
|
||||
if self.version > 21800:
|
||||
buffer.append(const.ASM_SIGNATURE)
|
||||
else:
|
||||
buffer.append(const.ACIS_SIGNATURE)
|
||||
data = struct.pack(
|
||||
"<iiii", self.version, self.n_records, self.n_entities, self.flags
|
||||
)
|
||||
buffer.append(data)
|
||||
buffer.append(encode_str(self.product_id))
|
||||
buffer.append(encode_str(self.acis_version))
|
||||
buffer.append(encode_str(self.creation_date.ctime()))
|
||||
buffer.append(encode_double(self.units_in_mm))
|
||||
buffer.append(encode_double(const.RES_TOL))
|
||||
buffer.append(encode_double(const.NOR_TOL))
|
||||
return b"".join(buffer)
|
||||
|
||||
def _header_str(self) -> str:
|
||||
p_len = len(self.product_id)
|
||||
a_len = len(self.acis_version)
|
||||
date = self.creation_date.ctime()
|
||||
if self.version > 400:
|
||||
return f"@{p_len} {self.product_id} @{a_len} {self.acis_version} @{len(date)} {date} "
|
||||
else:
|
||||
return f"{p_len} {self.product_id} {a_len} {self.acis_version} {len(date)} {date} "
|
||||
|
||||
def set_version(self, version: int) -> None:
|
||||
"""Sets the ACIS version as an integer value and updates the version
|
||||
string accordingly.
|
||||
"""
|
||||
try:
|
||||
self.acis_version = const.ACIS_VERSION[version]
|
||||
self.version = version
|
||||
except KeyError:
|
||||
raise ValueError(f"invalid ACIS version number {version}")
|
||||
self.asm_version = const.ASM_VERSION.get(version, "")
|
||||
|
||||
def asm_header(self):
|
||||
from .entities import AsmHeader
|
||||
return AsmHeader(self.asm_version)
|
||||
|
||||
def sat_end_marker(self) -> str:
|
||||
if self.asm_end_marker:
|
||||
return const.END_OF_ASM_DATA_SAT + " "
|
||||
else:
|
||||
return const.END_OF_ACIS_DATA_SAT + " "
|
||||
|
||||
def sab_end_marker(self) -> bytes:
|
||||
if self.asm_end_marker:
|
||||
return const.END_OF_ASM_DATA_SAB
|
||||
else:
|
||||
return const.END_OF_ACIS_DATA_SAB
|
||||
@@ -0,0 +1,415 @@
|
||||
# Copyright (c) 2022-2024, Manfred Moitzi
|
||||
# License: MIT License
|
||||
from __future__ import annotations
|
||||
from typing import Iterator, Sequence, Optional, Iterable
|
||||
from ezdxf.render import MeshVertexMerger, MeshTransformer, MeshBuilder
|
||||
from ezdxf.math import Matrix44, Vec3, NULLVEC, BoundingBox
|
||||
from . import entities
|
||||
from .entities import Body, Lump, NONE_REF, Face, Shell
|
||||
|
||||
|
||||
def mesh_from_body(body: Body, merge_lumps=True) -> list[MeshTransformer]:
|
||||
"""Returns a list of :class:`~ezdxf.render.MeshTransformer` instances from
|
||||
the given ACIS :class:`Body` entity.
|
||||
The list contains multiple meshes if `merge_lumps` is ``False`` or just a
|
||||
single mesh if `merge_lumps` is ``True``.
|
||||
|
||||
The ACIS format stores the faces in counter-clockwise orientation where the
|
||||
face-normal points outwards (away) from the solid body (material).
|
||||
|
||||
.. note::
|
||||
|
||||
This function returns meshes build up only from flat polygonal
|
||||
:class:`Face` entities, for a tessellation of more complex ACIS
|
||||
entities (spline surfaces, tori, cones, ...) is an ACIS kernel
|
||||
required which `ezdxf` does not provide.
|
||||
|
||||
Args:
|
||||
body: ACIS entity of type :class:`Body`
|
||||
merge_lumps: returns all :class:`Lump` entities
|
||||
from a body as a single mesh if ``True`` otherwise each :class:`Lump`
|
||||
entity is a separated mesh
|
||||
|
||||
Raises:
|
||||
TypeError: given `body` entity has invalid type
|
||||
|
||||
"""
|
||||
if not isinstance(body, Body):
|
||||
raise TypeError(f"expected a body entity, got: {type(body)}")
|
||||
|
||||
meshes: list[MeshTransformer] = []
|
||||
builder = MeshVertexMerger()
|
||||
for faces in flat_polygon_faces_from_body(body):
|
||||
for face in faces:
|
||||
builder.add_face(face)
|
||||
if not merge_lumps:
|
||||
meshes.append(MeshTransformer.from_builder(builder))
|
||||
builder = MeshVertexMerger()
|
||||
if merge_lumps:
|
||||
meshes.append(MeshTransformer.from_builder(builder))
|
||||
return meshes
|
||||
|
||||
|
||||
def flat_polygon_faces_from_body(
|
||||
body: Body,
|
||||
) -> Iterator[list[Sequence[Vec3]]]:
|
||||
"""Yields all flat polygon faces from all lumps in the given
|
||||
:class:`Body` entity.
|
||||
Yields a separated list of faces for each linked :class:`Lump` entity.
|
||||
|
||||
Args:
|
||||
body: ACIS entity of type :class:`Body`
|
||||
|
||||
Raises:
|
||||
TypeError: given `body` entity has invalid type
|
||||
|
||||
"""
|
||||
|
||||
if not isinstance(body, Body):
|
||||
raise TypeError(f"expected a body entity, got: {type(body)}")
|
||||
lump = body.lump
|
||||
transform = body.transform
|
||||
|
||||
m: Optional[Matrix44] = None
|
||||
if not transform.is_none:
|
||||
m = transform.matrix
|
||||
while not lump.is_none:
|
||||
yield list(flat_polygon_faces_from_lump(lump, m))
|
||||
lump = lump.next_lump
|
||||
|
||||
|
||||
def flat_polygon_faces_from_lump(
|
||||
lump: Lump, m: Matrix44 | None = None
|
||||
) -> Iterator[Sequence[Vec3]]:
|
||||
"""Yields all flat polygon faces from the given :class:`Lump` entity as
|
||||
sequence of :class:`~ezdxf.math.Vec3` instances. Applies the transformation
|
||||
:class:`~ezdxf.math.Matrix44` `m` to all vertices if not ``None``.
|
||||
|
||||
Args:
|
||||
lump: :class:`Lump` entity
|
||||
m: optional transformation matrix
|
||||
|
||||
Raises:
|
||||
TypeError: `lump` has invalid ACIS type
|
||||
|
||||
"""
|
||||
if not isinstance(lump, Lump):
|
||||
raise TypeError(f"expected a lump entity, got: {type(lump)}")
|
||||
|
||||
shell = lump.shell
|
||||
if shell.is_none:
|
||||
return # not a shell
|
||||
vertices: list[Vec3] = []
|
||||
face = shell.face
|
||||
while not face.is_none:
|
||||
first_coedge = NONE_REF
|
||||
vertices.clear()
|
||||
if face.surface.type == "plane-surface":
|
||||
try:
|
||||
first_coedge = face.loop.coedge
|
||||
except AttributeError: # loop is a none-entity
|
||||
pass
|
||||
coedge = first_coedge
|
||||
while not coedge.is_none: # invalid coedge or face is not closed
|
||||
# the edge entity contains the vertices and the curve type
|
||||
edge = coedge.edge
|
||||
try:
|
||||
# only straight lines as face edges supported:
|
||||
if edge.curve.type != "straight-curve":
|
||||
break
|
||||
# add the first edge vertex to the face vertices
|
||||
if coedge.sense: # reversed sense of the underlying edge
|
||||
vertices.append(edge.end_vertex.point.location)
|
||||
else: # same sense as the underlying edge
|
||||
vertices.append(edge.start_vertex.point.location)
|
||||
except AttributeError:
|
||||
# one of the involved entities is a none-entity or an
|
||||
# incompatible entity type -> ignore this face!
|
||||
break
|
||||
coedge = coedge.next_coedge
|
||||
if coedge is first_coedge: # a valid closed face
|
||||
if m is not None:
|
||||
yield tuple(m.transform_vertices(vertices))
|
||||
else:
|
||||
yield tuple(vertices)
|
||||
break
|
||||
face = face.next_face
|
||||
|
||||
|
||||
def body_from_mesh(mesh: MeshBuilder, precision: int = 6) -> Body:
|
||||
"""Returns a :term:`ACIS` :class:`~ezdxf.acis.entities.Body` entity from a
|
||||
:class:`~ezdxf.render.MeshBuilder` instance.
|
||||
|
||||
This entity can be assigned to a :class:`~ezdxf.entities.Solid3d` DXF entity
|
||||
as :term:`SAT` or :term:`SAB` data according to the version your DXF
|
||||
document uses (SAT for DXF R2000 to R2010 and SAB for DXF R2013 and later).
|
||||
|
||||
If the `mesh` contains multiple separated meshes, each mesh will be a
|
||||
separated :class:`~ezdxf.acis.entities.Lump` node.
|
||||
If each mesh should get its own :class:`~ezdxf.acis.entities.Body` entity,
|
||||
separate the meshes beforehand by the method
|
||||
:meth:`~ezdxf.render.MeshBuilder.separate_meshes`.
|
||||
|
||||
A closed mesh creates a solid body and an open mesh creates an open (hollow)
|
||||
shell. The detection if the mesh is open or closed is based on the edges
|
||||
of the mesh: if **all** edges of mesh have two adjacent faces the mesh is
|
||||
closed.
|
||||
|
||||
The current implementation applies automatically a vertex optimization,
|
||||
which merges coincident vertices into a single vertex.
|
||||
|
||||
"""
|
||||
mesh = mesh.optimize_vertices(precision)
|
||||
body = Body()
|
||||
bbox = BoundingBox(mesh.vertices)
|
||||
if not bbox.center.is_null:
|
||||
mesh.translate(-bbox.center)
|
||||
transform = entities.Transform()
|
||||
transform.matrix = Matrix44.translate(*bbox.center)
|
||||
body.transform = transform
|
||||
|
||||
for mesh in mesh.separate_meshes():
|
||||
lump = lump_from_mesh(mesh)
|
||||
body.append_lump(lump)
|
||||
return body
|
||||
|
||||
|
||||
def lump_from_mesh(mesh: MeshBuilder) -> Lump:
|
||||
"""Returns a :class:`~ezdxf.acis.entities.Lump` entity from a
|
||||
:class:`~ezdxf.render.MeshBuilder` instance. The `mesh` has to be a single
|
||||
body or shell!
|
||||
"""
|
||||
lump = Lump()
|
||||
shell = Shell()
|
||||
lump.append_shell(shell)
|
||||
face_builder = PolyhedronFaceBuilder(mesh)
|
||||
for face in face_builder.acis_faces():
|
||||
shell.append_face(face)
|
||||
return lump
|
||||
|
||||
|
||||
class PolyhedronFaceBuilder:
|
||||
def __init__(self, mesh: MeshBuilder):
|
||||
mesh_copy = mesh.copy()
|
||||
mesh_copy.normalize_faces() # open faces without duplicates!
|
||||
self.vertices: list[Vec3] = mesh_copy.vertices
|
||||
self.faces: list[Sequence[int]] = mesh_copy.faces
|
||||
self.normals = list(mesh_copy.face_normals())
|
||||
self.acis_vertices: list[entities.Vertex] = []
|
||||
|
||||
# double_sided:
|
||||
# If every edge belongs to two faces the body is for sure a closed
|
||||
# surface. But the "is_edge_balance_broken" property can not detect
|
||||
# non-manifold meshes!
|
||||
# - True: the body is an open shell, each side of the face is outside
|
||||
# (environment side)
|
||||
# - False: the body is a closed solid body, one side points outwards of
|
||||
# the body (environment side) and one side points inwards (material
|
||||
# side)
|
||||
self.double_sided = mesh_copy.diagnose().is_edge_balance_broken
|
||||
|
||||
# coedges and edges ledger, where index1 <= index2
|
||||
self.partner_coedges: dict[tuple[int, int], entities.Coedge] = dict()
|
||||
self.edges: dict[tuple[int, int], entities.Edge] = dict()
|
||||
|
||||
def reset(self):
|
||||
self.acis_vertices = list(make_vertices(self.vertices))
|
||||
self.partner_coedges.clear()
|
||||
self.edges.clear()
|
||||
|
||||
def acis_faces(self) -> list[Face]:
|
||||
self.reset()
|
||||
faces: list[Face] = []
|
||||
for face, face_normal in zip(self.faces, self.normals):
|
||||
if face_normal.is_null:
|
||||
continue
|
||||
acis_face = Face()
|
||||
plane = self.make_plane(face)
|
||||
if plane is None:
|
||||
continue
|
||||
plane.normal = face_normal
|
||||
loop = self.make_loop(face)
|
||||
if loop is None:
|
||||
continue
|
||||
acis_face.append_loop(loop)
|
||||
acis_face.surface = plane
|
||||
acis_face.sense = False # face normal is plane normal
|
||||
acis_face.double_sided = self.double_sided
|
||||
faces.append(acis_face)
|
||||
# The link structure of all entities is only completed at the end of
|
||||
# the building process. Do not yield faces from the body of the loop!
|
||||
return faces
|
||||
|
||||
def make_plane(self, face: Sequence[int]) -> Optional[entities.Plane]:
|
||||
assert len(face) > 1, "face requires least 2 vertices"
|
||||
plane = entities.Plane()
|
||||
# normal is always calculated by the right-hand rule:
|
||||
plane.reverse_v = False
|
||||
plane.origin = self.vertices[face[0]]
|
||||
try:
|
||||
plane.u_dir = (self.vertices[face[1]] - plane.origin).normalize()
|
||||
except ZeroDivisionError:
|
||||
return None # vertices are too close together
|
||||
return plane
|
||||
|
||||
def make_loop(self, face: Sequence[int]) -> Optional[entities.Loop]:
|
||||
coedges: list[entities.Coedge] = []
|
||||
face2 = list(face[1:])
|
||||
if face[0] != face[-1]:
|
||||
face2.append(face[0])
|
||||
|
||||
for i1, i2 in zip(face, face2):
|
||||
coedge = self.make_coedge(i1, i2)
|
||||
coedge.edge, coedge.sense = self.make_edge(i1, i2, coedge)
|
||||
coedges.append(coedge)
|
||||
loop = entities.Loop()
|
||||
loop.set_coedges(coedges, close=True)
|
||||
return loop
|
||||
|
||||
def make_coedge(self, index1: int, index2: int) -> entities.Coedge:
|
||||
if index1 > index2:
|
||||
key = index2, index1
|
||||
else:
|
||||
key = index1, index2
|
||||
coedge = entities.Coedge()
|
||||
try:
|
||||
partner_coedge = self.partner_coedges[key]
|
||||
except KeyError:
|
||||
self.partner_coedges[key] = coedge
|
||||
else:
|
||||
partner_coedge.add_partner_coedge(coedge)
|
||||
return coedge
|
||||
|
||||
def make_edge(
|
||||
self, index1: int, index2: int, parent: entities.Coedge
|
||||
) -> tuple[entities.Edge, bool]:
|
||||
def make_vertex(index: int):
|
||||
vertex = self.acis_vertices[index]
|
||||
vertex.ref_count += 1
|
||||
# assign first edge which references the vertex as parent edge (?):
|
||||
if vertex.edge.is_none:
|
||||
vertex.edge = edge
|
||||
return vertex
|
||||
|
||||
sense = False
|
||||
ex1 = index1 # vertex index of unified edges
|
||||
ex2 = index2 # vertex index of unified edges
|
||||
if ex1 > ex2:
|
||||
sense = True
|
||||
ex1, ex2 = ex2, ex1
|
||||
try:
|
||||
return self.edges[ex1, ex2], sense
|
||||
except KeyError:
|
||||
pass
|
||||
# The edge has always the same direction as the underlying
|
||||
# straight curve:
|
||||
edge = entities.Edge()
|
||||
edge.coedge = parent # first coedge which references this edge
|
||||
edge.sense = False
|
||||
edge.start_vertex = make_vertex(ex1)
|
||||
edge.start_param = 0.0
|
||||
edge.end_vertex = make_vertex(ex2)
|
||||
edge.end_param = self.vertices[ex1].distance(self.vertices[ex2])
|
||||
edge.curve = self.make_ray(ex1, ex2)
|
||||
self.edges[ex1, ex2] = edge
|
||||
return edge, sense
|
||||
|
||||
def make_ray(self, index1: int, index2: int) -> entities.StraightCurve:
|
||||
v1 = self.vertices[index1]
|
||||
v2 = self.vertices[index2]
|
||||
ray = entities.StraightCurve()
|
||||
ray.origin = v1
|
||||
try:
|
||||
ray.direction = (v2 - v1).normalize()
|
||||
except ZeroDivisionError: # avoided by normalize_faces()
|
||||
ray.direction = NULLVEC
|
||||
return ray
|
||||
|
||||
|
||||
def make_vertices(vertices: Iterable[Vec3]) -> Iterator[entities.Vertex]:
|
||||
for v in vertices:
|
||||
point = entities.Point()
|
||||
point.location = v
|
||||
vertex = entities.Vertex()
|
||||
vertex.point = point
|
||||
yield vertex
|
||||
|
||||
|
||||
def vertices_from_body(body: Body) -> list[Vec3]:
|
||||
"""Returns all stored vertices in the given :class:`Body` entity.
|
||||
The result is not optimized, meaning the vertices are in no particular order and
|
||||
there are duplicates.
|
||||
|
||||
This function can be useful to determining the approximate bounding box of an
|
||||
:term:`ACIS` entity. The result is exact for polyhedra with flat faces with
|
||||
straight edges, but not for bodies with curved edges and faces.
|
||||
|
||||
Args:
|
||||
body: ACIS entity of type :class:`Body`
|
||||
|
||||
Raises:
|
||||
TypeError: given `body` entity has invalid type
|
||||
|
||||
"""
|
||||
|
||||
if not isinstance(body, Body):
|
||||
raise TypeError(f"expected a body entity, got: {type(body)}")
|
||||
lump = body.lump
|
||||
transform = body.transform
|
||||
vertices: list[Vec3] = []
|
||||
|
||||
m: Optional[Matrix44] = None
|
||||
if not transform.is_none:
|
||||
m = transform.matrix
|
||||
while not lump.is_none:
|
||||
vertices.extend(vertices_from_lump(lump, m))
|
||||
lump = lump.next_lump
|
||||
return vertices
|
||||
|
||||
|
||||
def vertices_from_lump(lump: Lump, m: Matrix44 | None = None) -> list[Vec3]:
|
||||
"""Returns all stored vertices from a given :class:`Lump` entity.
|
||||
Applies the transformation :class:`~ezdxf.math.Matrix44` `m` to all vertices if not
|
||||
``None``.
|
||||
|
||||
Args:
|
||||
lump: :class:`Lump` entity
|
||||
m: optional transformation matrix
|
||||
|
||||
Raises:
|
||||
TypeError: `lump` has invalid ACIS type
|
||||
|
||||
"""
|
||||
if not isinstance(lump, Lump):
|
||||
raise TypeError(f"expected a lump entity, got: {type(lump)}")
|
||||
|
||||
vertices: list[Vec3] = []
|
||||
shell = lump.shell
|
||||
if shell.is_none:
|
||||
return vertices # not a shell
|
||||
|
||||
face = shell.face
|
||||
while not face.is_none:
|
||||
first_coedge = NONE_REF
|
||||
try:
|
||||
first_coedge = face.loop.coedge
|
||||
except AttributeError: # loop is a none-entity
|
||||
pass
|
||||
coedge = first_coedge
|
||||
while not coedge.is_none: # invalid coedge or face is not closed
|
||||
# the edge entity contains the vertices and the curve type
|
||||
edge = coedge.edge
|
||||
try:
|
||||
vertices.append(edge.start_vertex.point.location)
|
||||
vertices.append(edge.end_vertex.point.location)
|
||||
except AttributeError:
|
||||
# one of the involved entities is a none-entity or an
|
||||
# incompatible entity type -> ignore this face!
|
||||
break
|
||||
coedge = coedge.next_coedge
|
||||
if coedge is first_coedge: # a valid closed face
|
||||
break
|
||||
face = face.next_face
|
||||
if m is not None:
|
||||
return list(m.transform_vertices(vertices))
|
||||
return vertices
|
||||
@@ -0,0 +1,527 @@
|
||||
# Copyright (c) 2022-2024, Manfred Moitzi
|
||||
# License: MIT License
|
||||
from __future__ import annotations
|
||||
from typing import (
|
||||
NamedTuple,
|
||||
Any,
|
||||
Sequence,
|
||||
Iterator,
|
||||
Union,
|
||||
Iterable,
|
||||
cast,
|
||||
TYPE_CHECKING,
|
||||
List,
|
||||
Tuple,
|
||||
Optional,
|
||||
)
|
||||
from typing_extensions import TypeAlias
|
||||
import math
|
||||
import struct
|
||||
from datetime import datetime
|
||||
|
||||
from ezdxf.math import Vec3
|
||||
from . import const
|
||||
from .const import ParsingError, Tags, InvalidLinkStructure
|
||||
from .hdr import AcisHeader
|
||||
from .abstract import (
|
||||
AbstractEntity,
|
||||
DataLoader,
|
||||
AbstractBuilder,
|
||||
DataExporter,
|
||||
EntityExporter,
|
||||
)
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from .entities import AcisEntity
|
||||
|
||||
|
||||
class Token(NamedTuple):
|
||||
"""Named tuple to store tagged value tokens of the SAB format."""
|
||||
|
||||
tag: int
|
||||
value: Any
|
||||
|
||||
def __str__(self):
|
||||
return f"(0x{self.tag:02x}, {str(self.value)})"
|
||||
|
||||
|
||||
SabRecord: TypeAlias = List[Token]
|
||||
|
||||
|
||||
class Decoder:
|
||||
def __init__(self, data: bytes):
|
||||
self.data = data
|
||||
self.index: int = 0
|
||||
|
||||
@property
|
||||
def has_data(self) -> bool:
|
||||
return self.index < len(self.data)
|
||||
|
||||
def read_header(self) -> AcisHeader:
|
||||
header = AcisHeader()
|
||||
for signature in const.SIGNATURES:
|
||||
if self.data.startswith(signature):
|
||||
self.index = len(signature)
|
||||
break
|
||||
else:
|
||||
raise ParsingError("not a SAB file")
|
||||
header.version = self.read_int()
|
||||
header.n_records = self.read_int()
|
||||
header.n_entities = self.read_int()
|
||||
header.flags = self.read_int()
|
||||
header.product_id = self.read_str_tag()
|
||||
header.acis_version = self.read_str_tag()
|
||||
date = self.read_str_tag()
|
||||
header.creation_date = datetime.strptime(date, const.DATE_FMT)
|
||||
header.units_in_mm = self.read_double_tag()
|
||||
# tolerances are ignored
|
||||
_ = self.read_double_tag() # res_tol
|
||||
_ = self.read_double_tag() # nor_tol
|
||||
return header
|
||||
|
||||
def forward(self, count: int):
|
||||
pos = self.index
|
||||
self.index += count
|
||||
return pos
|
||||
|
||||
def read_byte(self) -> int:
|
||||
pos = self.forward(1)
|
||||
return self.data[pos]
|
||||
|
||||
def read_bytes(self, count: int) -> bytes:
|
||||
pos = self.forward(count)
|
||||
return self.data[pos : pos + count]
|
||||
|
||||
def read_int(self) -> int:
|
||||
pos = self.forward(4)
|
||||
values = struct.unpack_from("<i", self.data, pos)[0]
|
||||
return values
|
||||
|
||||
def read_float(self) -> float:
|
||||
pos = self.forward(8)
|
||||
return struct.unpack_from("<d", self.data, pos)[0]
|
||||
|
||||
def read_floats(self, count: int) -> Sequence[float]:
|
||||
pos = self.forward(8 * count)
|
||||
return struct.unpack_from(f"<{count}d", self.data, pos)
|
||||
|
||||
def read_str(self, length) -> str:
|
||||
text = self.read_bytes(length)
|
||||
return text.decode()
|
||||
|
||||
def read_str_tag(self) -> str:
|
||||
tag = self.read_byte()
|
||||
if tag != Tags.STR:
|
||||
raise ParsingError("string tag (7) not found")
|
||||
return self.read_str(self.read_byte())
|
||||
|
||||
def read_double_tag(self) -> float:
|
||||
tag = self.read_byte()
|
||||
if tag != Tags.DOUBLE:
|
||||
raise ParsingError("double tag (6) not found")
|
||||
return self.read_float()
|
||||
|
||||
def read_record(self) -> SabRecord:
|
||||
def entity_name():
|
||||
return "-".join(entity_type)
|
||||
|
||||
values: SabRecord = []
|
||||
entity_type: list[str] = []
|
||||
subtype_level: int = 0
|
||||
while True:
|
||||
if not self.has_data:
|
||||
if values:
|
||||
token = values[0]
|
||||
if token.value in const.DATA_END_MARKERS:
|
||||
return values
|
||||
raise ParsingError("pre-mature end of data")
|
||||
tag = self.read_byte()
|
||||
if tag == Tags.INT:
|
||||
values.append(Token(tag, self.read_int()))
|
||||
elif tag == Tags.DOUBLE:
|
||||
values.append(Token(tag, self.read_float()))
|
||||
elif tag == Tags.STR:
|
||||
values.append(Token(tag, self.read_str(self.read_byte())))
|
||||
elif tag == Tags.POINTER:
|
||||
values.append(Token(tag, self.read_int()))
|
||||
elif tag == Tags.BOOL_TRUE:
|
||||
values.append(Token(tag, True))
|
||||
elif tag == Tags.BOOL_FALSE:
|
||||
values.append(Token(tag, False))
|
||||
elif tag == Tags.LITERAL_STR:
|
||||
values.append(Token(tag, self.read_str(self.read_int())))
|
||||
elif tag == Tags.ENTITY_TYPE_EX:
|
||||
entity_type.append(self.read_str(self.read_byte()))
|
||||
elif tag == Tags.ENTITY_TYPE:
|
||||
entity_type.append(self.read_str(self.read_byte()))
|
||||
values.append(Token(tag, entity_name()))
|
||||
entity_type.clear()
|
||||
elif tag == Tags.LOCATION_VEC:
|
||||
values.append(Token(tag, self.read_floats(3)))
|
||||
elif tag == Tags.DIRECTION_VEC:
|
||||
values.append(Token(tag, self.read_floats(3)))
|
||||
elif tag == Tags.ENUM:
|
||||
values.append(Token(tag, self.read_int()))
|
||||
elif tag == Tags.UNKNOWN_0x17:
|
||||
values.append(Token(tag, self.read_float()))
|
||||
elif tag == Tags.SUBTYPE_START:
|
||||
subtype_level += 1
|
||||
values.append(Token(tag, subtype_level))
|
||||
elif tag == Tags.SUBTYPE_END:
|
||||
values.append(Token(tag, subtype_level))
|
||||
subtype_level -= 1
|
||||
elif tag == Tags.RECORD_END:
|
||||
return values
|
||||
else:
|
||||
raise ParsingError(
|
||||
f"unknown SAB tag: 0x{tag:x} ({tag}) in entity '{values[0].value}'"
|
||||
)
|
||||
|
||||
def read_records(self) -> Iterator[SabRecord]:
|
||||
while True:
|
||||
try:
|
||||
if self.has_data:
|
||||
yield self.read_record()
|
||||
else:
|
||||
return
|
||||
except IndexError:
|
||||
return
|
||||
|
||||
|
||||
class SabEntity(AbstractEntity):
|
||||
"""Low level representation of an ACIS entity (node)."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
name: str,
|
||||
attr_ptr: int = -1,
|
||||
id: int = -1,
|
||||
data: Optional[SabRecord] = None,
|
||||
):
|
||||
self.name = name
|
||||
self.attr_ptr = attr_ptr
|
||||
self.id = id
|
||||
self.data: SabRecord = data if data is not None else []
|
||||
self.attributes: "SabEntity" = None # type: ignore
|
||||
|
||||
def __str__(self):
|
||||
return f"{self.name}({self.id})"
|
||||
|
||||
|
||||
NULL_PTR = SabEntity(const.NULL_PTR_NAME, -1, -1, tuple()) # type: ignore
|
||||
|
||||
|
||||
class SabDataLoader(DataLoader):
|
||||
def __init__(self, data: SabRecord, version: int):
|
||||
self.version = version
|
||||
self.data = data
|
||||
self.index = 0
|
||||
|
||||
def has_data(self) -> bool:
|
||||
return self.index <= len(self.data)
|
||||
|
||||
def read_int(self, skip_sat: Optional[int] = None) -> int:
|
||||
token = self.data[self.index]
|
||||
if token.tag == Tags.INT:
|
||||
self.index += 1
|
||||
return cast(int, token.value)
|
||||
raise ParsingError(f"expected int token, got {token}")
|
||||
|
||||
def read_double(self) -> float:
|
||||
token = self.data[self.index]
|
||||
if token.tag == Tags.DOUBLE:
|
||||
self.index += 1
|
||||
return cast(float, token.value)
|
||||
raise ParsingError(f"expected double token, got {token}")
|
||||
|
||||
def read_interval(self) -> float:
|
||||
finite = self.read_bool("F", "I")
|
||||
if finite:
|
||||
return self.read_double()
|
||||
return math.inf
|
||||
|
||||
def read_vec3(self) -> tuple[float, float, float]:
|
||||
token = self.data[self.index]
|
||||
if token.tag in (Tags.LOCATION_VEC, Tags.DIRECTION_VEC):
|
||||
self.index += 1
|
||||
return cast(Tuple[float, float, float], token.value)
|
||||
raise ParsingError(f"expected vector token, got {token}")
|
||||
|
||||
def read_bool(self, true: str, false: str) -> bool:
|
||||
token = self.data[self.index]
|
||||
if token.tag == Tags.BOOL_TRUE:
|
||||
self.index += 1
|
||||
return True
|
||||
elif token.tag == Tags.BOOL_FALSE:
|
||||
self.index += 1
|
||||
return False
|
||||
raise ParsingError(f"expected bool token, got {token}")
|
||||
|
||||
def read_str(self) -> str:
|
||||
token = self.data[self.index]
|
||||
if token.tag in (Tags.STR, Tags.LITERAL_STR):
|
||||
self.index += 1
|
||||
return cast(str, token.value)
|
||||
raise ParsingError(f"expected str token, got {token}")
|
||||
|
||||
def read_ptr(self) -> AbstractEntity:
|
||||
token = self.data[self.index]
|
||||
if token.tag == Tags.POINTER:
|
||||
self.index += 1
|
||||
return cast(AbstractEntity, token.value)
|
||||
raise ParsingError(f"expected pointer token, got {token}")
|
||||
|
||||
def read_transform(self) -> list[float]:
|
||||
# Transform matrix is stored as literal string like in the SAT format!
|
||||
# 4th column is not stored
|
||||
# Read only the matrix values which contain all information needed,
|
||||
# the additional data are only hints for the kernel how to process
|
||||
# the data (rotation, reflection, scaling, shearing).
|
||||
values = self.read_str().split(" ")
|
||||
return [float(v) for v in values[:12]]
|
||||
|
||||
|
||||
class SabBuilder(AbstractBuilder):
|
||||
"""Low level data structure to manage ACIS SAB data files."""
|
||||
|
||||
def __init__(self) -> None:
|
||||
self.header = AcisHeader()
|
||||
self.bodies: list[SabEntity] = []
|
||||
self.entities: list[SabEntity] = []
|
||||
|
||||
def dump_sab(self) -> bytes:
|
||||
"""Returns the SAB representation of the ACIS file as bytes."""
|
||||
self.reorder_records()
|
||||
self.header.n_entities = len(self.bodies) + int(
|
||||
self.header.has_asm_header
|
||||
)
|
||||
self.header.n_records = 0 # is always 0
|
||||
self.header.flags = 12 # important for 21800 - meaning unknown
|
||||
data: list[bytes] = [self.header.dumpb()]
|
||||
encoder = Encoder()
|
||||
for record in build_sab_records(self.entities):
|
||||
encoder.write_record(record)
|
||||
data.extend(encoder.buffer)
|
||||
data.append(self.header.sab_end_marker())
|
||||
return b"".join(data)
|
||||
|
||||
def set_entities(self, entities: list[SabEntity]) -> None:
|
||||
"""Reset entities and bodies list. (internal API)"""
|
||||
self.bodies = [e for e in entities if e.name == "body"]
|
||||
self.entities = entities
|
||||
|
||||
|
||||
class SabExporter(EntityExporter[SabEntity]):
|
||||
def make_record(self, entity: AcisEntity) -> SabEntity:
|
||||
record = SabEntity(entity.type, id=entity.id)
|
||||
record.attributes = NULL_PTR
|
||||
return record
|
||||
|
||||
def make_data_exporter(self, record: SabEntity) -> DataExporter:
|
||||
return SabDataExporter(self, record.data)
|
||||
|
||||
def dump_sab(self) -> bytes:
|
||||
builder = SabBuilder()
|
||||
builder.header = self.header
|
||||
builder.set_entities(self.export_records())
|
||||
return builder.dump_sab()
|
||||
|
||||
|
||||
def build_entities(
|
||||
records: Iterable[SabRecord], version: int
|
||||
) -> Iterator[SabEntity]:
|
||||
for record in records:
|
||||
assert record[0].tag == Tags.ENTITY_TYPE, "invalid entity-name tag"
|
||||
name = record[0].value # 1. entity-name
|
||||
if name in const.DATA_END_MARKERS:
|
||||
yield SabEntity(name)
|
||||
return
|
||||
assert record[1].tag == Tags.POINTER, "invalid attribute pointer tag"
|
||||
attr = record[1].value # 2. attribute record pointer
|
||||
id_ = -1
|
||||
if version >= 700:
|
||||
assert record[2].tag == Tags.INT, "invalid id tag"
|
||||
id_ = record[2].value # 3. int id
|
||||
data = record[3:]
|
||||
else:
|
||||
data = record[2:]
|
||||
yield SabEntity(name, attr, id_, data)
|
||||
|
||||
|
||||
def resolve_pointers(entities: list[SabEntity]) -> list[SabEntity]:
|
||||
def ptr(num: int) -> SabEntity:
|
||||
if num == -1:
|
||||
return NULL_PTR
|
||||
return entities[num]
|
||||
|
||||
for entity in entities:
|
||||
entity.attributes = ptr(entity.attr_ptr)
|
||||
entity.attr_ptr = -1
|
||||
for index, token in enumerate(entity.data):
|
||||
if token.tag == Tags.POINTER:
|
||||
entity.data[index] = Token(token.tag, ptr(token.value))
|
||||
return entities
|
||||
|
||||
|
||||
def parse_sab(data: Union[bytes, bytearray]) -> SabBuilder:
|
||||
"""Returns the :class:`SabBuilder` for the ACIS :term:`SAB` file content
|
||||
given as string or list of strings.
|
||||
|
||||
Raises:
|
||||
ParsingError: invalid or unsupported ACIS data structure
|
||||
|
||||
"""
|
||||
if not isinstance(data, (bytes, bytearray)):
|
||||
raise TypeError("expected bytes, bytearray")
|
||||
builder = SabBuilder()
|
||||
decoder = Decoder(data)
|
||||
builder.header = decoder.read_header()
|
||||
entities = list(
|
||||
build_entities(decoder.read_records(), builder.header.version)
|
||||
)
|
||||
builder.set_entities(resolve_pointers(entities))
|
||||
return builder
|
||||
|
||||
|
||||
class SabDataExporter(DataExporter):
|
||||
def __init__(self, exporter: SabExporter, data: list[Token]):
|
||||
self.version = exporter.version
|
||||
self.exporter = exporter
|
||||
self.data = data
|
||||
|
||||
def write_int(self, value: int, skip_sat=False) -> None:
|
||||
"""There are sometimes additional int values in SAB files which are
|
||||
not present in SAT files, maybe reference counters e.g. vertex, coedge.
|
||||
"""
|
||||
self.data.append(Token(Tags.INT, value))
|
||||
|
||||
def write_double(self, value: float) -> None:
|
||||
self.data.append(Token(Tags.DOUBLE, value))
|
||||
|
||||
def write_interval(self, value: float) -> None:
|
||||
if math.isinf(value):
|
||||
self.data.append(Token(Tags.BOOL_FALSE, False)) # infinite "I"
|
||||
else:
|
||||
self.data.append(Token(Tags.BOOL_TRUE, True)) # finite "F"
|
||||
self.write_double(value)
|
||||
|
||||
def write_loc_vec3(self, value: Vec3) -> None:
|
||||
self.data.append(Token(Tags.LOCATION_VEC, value))
|
||||
|
||||
def write_dir_vec3(self, value: Vec3) -> None:
|
||||
self.data.append(Token(Tags.DIRECTION_VEC, value))
|
||||
|
||||
def write_bool(self, value: bool, true: str, false: str) -> None:
|
||||
if value:
|
||||
self.data.append(Token(Tags.BOOL_TRUE, True))
|
||||
else:
|
||||
self.data.append(Token(Tags.BOOL_FALSE, False))
|
||||
|
||||
def write_str(self, value: str) -> None:
|
||||
self.data.append(Token(Tags.STR, value))
|
||||
|
||||
def write_literal_str(self, value: str) -> None:
|
||||
self.data.append(Token(Tags.LITERAL_STR, value))
|
||||
|
||||
def write_ptr(self, entity: AcisEntity) -> None:
|
||||
record = NULL_PTR
|
||||
if not entity.is_none:
|
||||
record = self.exporter.get_record(entity)
|
||||
self.data.append(Token(Tags.POINTER, record))
|
||||
|
||||
def write_transform(self, data: list[str]) -> None:
|
||||
# The last space is important!
|
||||
self.write_literal_str(" ".join(data) + " ")
|
||||
|
||||
|
||||
def encode_entity_type(name: str) -> list[Token]:
|
||||
if name == const.NULL_PTR_NAME:
|
||||
raise InvalidLinkStructure(
|
||||
f"invalid record type: {const.NULL_PTR_NAME}"
|
||||
)
|
||||
parts = name.split("-")
|
||||
tokens = [Token(Tags.ENTITY_TYPE_EX, part) for part in parts[:-1]]
|
||||
tokens.append(Token(Tags.ENTITY_TYPE, parts[-1]))
|
||||
return tokens
|
||||
|
||||
|
||||
def encode_entity_ptr(entity: SabEntity, entities: list[SabEntity]) -> Token:
|
||||
if entity.is_null_ptr:
|
||||
return Token(Tags.POINTER, -1)
|
||||
try:
|
||||
return Token(Tags.POINTER, entities.index(entity))
|
||||
except ValueError:
|
||||
raise InvalidLinkStructure(
|
||||
f"entity {str(entity)} not in record storage"
|
||||
)
|
||||
|
||||
|
||||
def build_sab_records(entities: list[SabEntity]) -> Iterator[SabRecord]:
|
||||
for entity in entities:
|
||||
record: list[Token] = []
|
||||
record.extend(encode_entity_type(entity.name))
|
||||
# 1. attribute record pointer
|
||||
record.append(encode_entity_ptr(entity.attributes, entities))
|
||||
# 2. int id
|
||||
record.append(Token(Tags.INT, entity.id))
|
||||
for token in entity.data:
|
||||
if token.tag == Tags.POINTER:
|
||||
record.append(encode_entity_ptr(token.value, entities))
|
||||
elif token.tag == Tags.ENTITY_TYPE:
|
||||
record.extend(encode_entity_type(token.value))
|
||||
else:
|
||||
record.append(token)
|
||||
yield record
|
||||
|
||||
|
||||
END_OF_RECORD = bytes([Tags.RECORD_END.value])
|
||||
TRUE_RECORD = bytes([Tags.BOOL_TRUE.value])
|
||||
FALSE_RECORD = bytes([Tags.BOOL_FALSE.value])
|
||||
SUBTYPE_START_RECORD = bytes([Tags.SUBTYPE_START.value])
|
||||
SUBTYPE_END_RECORD = bytes([Tags.SUBTYPE_END.value])
|
||||
SAB_ENCODING = "utf8"
|
||||
|
||||
|
||||
class Encoder:
|
||||
def __init__(self) -> None:
|
||||
self.buffer: list[bytes] = []
|
||||
|
||||
def write_record(self, record: SabRecord) -> None:
|
||||
for token in record:
|
||||
self.write_token(token)
|
||||
self.buffer.append(END_OF_RECORD)
|
||||
|
||||
def write_token(self, token: Token) -> None:
|
||||
tag = token.tag
|
||||
if tag in (Tags.INT, Tags.POINTER, Tags.ENUM):
|
||||
assert isinstance(token.value, int)
|
||||
self.buffer.append(struct.pack("<Bi", tag, token.value))
|
||||
elif tag == Tags.DOUBLE:
|
||||
assert isinstance(token.value, float)
|
||||
self.buffer.append(struct.pack("<Bd", tag, token.value))
|
||||
elif tag == Tags.STR:
|
||||
assert isinstance(token.value, str)
|
||||
data = token.value.encode(encoding=SAB_ENCODING)
|
||||
self.buffer.append(struct.pack("<BB", tag, len(data)) + data)
|
||||
elif tag == Tags.LITERAL_STR:
|
||||
assert isinstance(token.value, str)
|
||||
data = token.value.encode(encoding=SAB_ENCODING)
|
||||
self.buffer.append(struct.pack("<Bi", tag, len(data)) + data)
|
||||
elif tag in (Tags.ENTITY_TYPE, Tags.ENTITY_TYPE_EX):
|
||||
assert isinstance(token.value, str)
|
||||
data = token.value.encode(encoding=SAB_ENCODING)
|
||||
self.buffer.append(struct.pack("<BB", tag, len(data)) + data)
|
||||
elif tag in (Tags.LOCATION_VEC, Tags.DIRECTION_VEC):
|
||||
v = token.value
|
||||
assert isinstance(v, Vec3)
|
||||
self.buffer.append(struct.pack("<B3d", tag, v.x, v.y, v.z))
|
||||
elif tag == Tags.BOOL_TRUE:
|
||||
self.buffer.append(TRUE_RECORD)
|
||||
elif tag == Tags.BOOL_FALSE:
|
||||
self.buffer.append(FALSE_RECORD)
|
||||
elif tag == Tags.SUBTYPE_START:
|
||||
self.buffer.append(SUBTYPE_START_RECORD)
|
||||
elif tag == Tags.SUBTYPE_END:
|
||||
self.buffer.append(SUBTYPE_END_RECORD)
|
||||
else:
|
||||
raise ValueError(f"invalid tag in token: {token}")
|
||||
@@ -0,0 +1,431 @@
|
||||
# Copyright (c) 2022-2024, Manfred Moitzi
|
||||
# License: MIT License
|
||||
from __future__ import annotations
|
||||
from typing import (
|
||||
Any,
|
||||
Sequence,
|
||||
Iterator,
|
||||
Union,
|
||||
List,
|
||||
TYPE_CHECKING,
|
||||
Optional,
|
||||
)
|
||||
from typing_extensions import TypeAlias
|
||||
import math
|
||||
from datetime import datetime
|
||||
|
||||
from . import const
|
||||
from .const import ParsingError, InvalidLinkStructure
|
||||
from .hdr import AcisHeader
|
||||
from .abstract import (
|
||||
AbstractEntity,
|
||||
AbstractBuilder,
|
||||
DataLoader,
|
||||
DataExporter,
|
||||
EntityExporter,
|
||||
)
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from .entities import AcisEntity
|
||||
from ezdxf.math import Vec3
|
||||
|
||||
SatRecord: TypeAlias = List[str]
|
||||
|
||||
|
||||
class SatEntity(AbstractEntity):
|
||||
"""Low level representation of an ACIS entity (node)."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
name: str,
|
||||
attr_ptr: str = "$-1",
|
||||
id: int = -1,
|
||||
data: Optional[list[Any]] = None,
|
||||
):
|
||||
self.name = name
|
||||
self.attr_ptr = attr_ptr
|
||||
self.id = id
|
||||
self.data: list[Any] = data if data is not None else []
|
||||
self.attributes: "SatEntity" = None # type: ignore
|
||||
|
||||
def __str__(self):
|
||||
return f"{self.name}({self.id})"
|
||||
|
||||
|
||||
NULL_PTR = SatEntity("null-ptr", "$-1", -1, tuple()) # type: ignore
|
||||
|
||||
|
||||
def new_entity(
|
||||
name: str,
|
||||
attributes=NULL_PTR,
|
||||
id=-1,
|
||||
data: Optional[list[Any]] = None,
|
||||
) -> SatEntity:
|
||||
"""Factory to create new ACIS entities.
|
||||
|
||||
Args:
|
||||
name: entity type
|
||||
attributes: reference to the entity attributes or :attr:`NULL_PTR`.
|
||||
id: unique entity id as integer or -1
|
||||
data: generic data container as list
|
||||
|
||||
"""
|
||||
e = SatEntity(name, "$-1", id, data)
|
||||
e.attributes = attributes
|
||||
return e
|
||||
|
||||
|
||||
def is_ptr(s: str) -> bool:
|
||||
"""Returns ``True`` if the string `s` represents an entity pointer."""
|
||||
return len(s) > 0 and s[0] == "$"
|
||||
|
||||
|
||||
def resolve_str_pointers(entities: list[SatEntity]) -> list[SatEntity]:
|
||||
def ptr(s: str) -> SatEntity:
|
||||
num = int(s[1:])
|
||||
if num == -1:
|
||||
return NULL_PTR
|
||||
return entities[num]
|
||||
|
||||
for entity in entities:
|
||||
entity.attributes = ptr(entity.attr_ptr)
|
||||
entity.attr_ptr = "$-1"
|
||||
data = []
|
||||
for token in entity.data:
|
||||
if is_ptr(token):
|
||||
data.append(ptr(token))
|
||||
else:
|
||||
data.append(token)
|
||||
entity.data = data
|
||||
return entities
|
||||
|
||||
|
||||
class SatDataLoader(DataLoader):
|
||||
def __init__(self, data: list[Any], version: int):
|
||||
self.version = version
|
||||
self.data = data
|
||||
self.index = 0
|
||||
|
||||
def has_data(self) -> bool:
|
||||
return self.index <= len(self.data)
|
||||
|
||||
def read_int(self, skip_sat: Optional[int] = None) -> int:
|
||||
if skip_sat is not None:
|
||||
return skip_sat
|
||||
|
||||
entry = self.data[self.index]
|
||||
try:
|
||||
value = int(entry)
|
||||
except ValueError:
|
||||
raise ParsingError(f"expected integer, got {entry}")
|
||||
self.index += 1
|
||||
return value
|
||||
|
||||
def read_double(self) -> float:
|
||||
entry = self.data[self.index]
|
||||
try:
|
||||
value = float(entry)
|
||||
except ValueError:
|
||||
raise ParsingError(f"expected double, got {entry}")
|
||||
self.index += 1
|
||||
return value
|
||||
|
||||
def read_interval(self) -> float:
|
||||
finite = self.read_bool("F", "I")
|
||||
if finite:
|
||||
return self.read_double()
|
||||
return math.inf
|
||||
|
||||
def read_vec3(self) -> tuple[float, float, float]:
|
||||
x = self.read_double()
|
||||
y = self.read_double()
|
||||
z = self.read_double()
|
||||
return x, y, z
|
||||
|
||||
def read_bool(self, true: str, false: str) -> bool:
|
||||
value = self.data[self.index]
|
||||
if value == true:
|
||||
self.index += 1
|
||||
return True
|
||||
elif value == false:
|
||||
self.index += 1
|
||||
return False
|
||||
raise ParsingError(
|
||||
f"expected bool string '{true}' or '{false}', got {value}"
|
||||
)
|
||||
|
||||
def read_str(self) -> str:
|
||||
value = self.data[self.index]
|
||||
if self.version < const.Features.AT or value.startswith("@"):
|
||||
self.index += 2
|
||||
return self.data[self.index - 1]
|
||||
raise ParsingError(f"expected string, got {value}")
|
||||
|
||||
def read_ptr(self) -> AbstractEntity:
|
||||
entity = self.data[self.index]
|
||||
if isinstance(entity, AbstractEntity):
|
||||
self.index += 1
|
||||
return entity
|
||||
raise ParsingError(f"expected pointer, got {type(entity)}")
|
||||
|
||||
def read_transform(self) -> list[float]:
|
||||
# 4th column is not stored
|
||||
# Read only the matrix values which contain all information needed,
|
||||
# the additional data are only hints for the kernel how to process
|
||||
# the data (rotation, reflection, scaling, shearing).
|
||||
return [self.read_double() for _ in range(12)]
|
||||
|
||||
|
||||
class SatBuilder(AbstractBuilder):
|
||||
"""Low level data structure to manage ACIS SAT data files."""
|
||||
|
||||
def __init__(self) -> None:
|
||||
self.header = AcisHeader()
|
||||
self.bodies: list[SatEntity] = []
|
||||
self.entities: list[SatEntity] = []
|
||||
self._export_mapping: dict[int, SatEntity] = {}
|
||||
|
||||
def dump_sat(self) -> list[str]:
|
||||
"""Returns the text representation of the ACIS file as list of strings
|
||||
without line endings.
|
||||
|
||||
Raise:
|
||||
InvalidLinkStructure: referenced ACIS entity is not stored in
|
||||
the :attr:`entities` storage
|
||||
|
||||
"""
|
||||
self.reorder_records()
|
||||
self.header.n_entities = len(self.bodies) + int(
|
||||
self.header.has_asm_header
|
||||
)
|
||||
if self.header.version == 700:
|
||||
self.header.n_records = 0 # ignored for old versions
|
||||
else:
|
||||
self.header.n_records = len(self.entities)
|
||||
data = self.header.dumps()
|
||||
data.extend(build_str_records(self.entities, self.header.version))
|
||||
data.append(self.header.sat_end_marker())
|
||||
return data
|
||||
|
||||
def set_entities(self, entities: list[SatEntity]) -> None:
|
||||
"""Reset entities and bodies list. (internal API)"""
|
||||
self.bodies = [e for e in entities if e.name == "body"]
|
||||
self.entities = entities
|
||||
|
||||
|
||||
class SatExporter(EntityExporter[SatEntity]):
|
||||
def make_record(self, entity: AcisEntity) -> SatEntity:
|
||||
record = SatEntity(entity.type, id=entity.id)
|
||||
record.attributes = NULL_PTR
|
||||
return record
|
||||
|
||||
def make_data_exporter(self, record: SatEntity) -> DataExporter:
|
||||
return SatDataExporter(self, record.data)
|
||||
|
||||
def dump_sat(self) -> list[str]:
|
||||
builder = SatBuilder()
|
||||
builder.header = self.header
|
||||
builder.set_entities(self.export_records())
|
||||
return builder.dump_sat()
|
||||
|
||||
|
||||
def build_str_records(entities: list[SatEntity], version: int) -> Iterator[str]:
|
||||
def ptr_str(e: SatEntity) -> str:
|
||||
if e is NULL_PTR:
|
||||
return "$-1"
|
||||
try:
|
||||
return f"${entities.index(e)}"
|
||||
except ValueError:
|
||||
raise InvalidLinkStructure(f"entity {str(e)} not in record storage")
|
||||
|
||||
for entity in entities:
|
||||
tokens = [entity.name]
|
||||
tokens.append(ptr_str(entity.attributes))
|
||||
if version >= 700:
|
||||
tokens.append(str(entity.id))
|
||||
for data in entity.data:
|
||||
if isinstance(data, SatEntity):
|
||||
tokens.append(ptr_str(data))
|
||||
else:
|
||||
tokens.append(str(data))
|
||||
tokens.append("#")
|
||||
yield " ".join(tokens)
|
||||
|
||||
|
||||
def parse_header_str(s: str) -> Iterator[str]:
|
||||
num = ""
|
||||
collect = 0
|
||||
token = ""
|
||||
for c in s.rstrip():
|
||||
if collect > 0:
|
||||
token += c
|
||||
collect -= 1
|
||||
if collect == 0:
|
||||
yield token
|
||||
token = ""
|
||||
elif c == "@":
|
||||
continue
|
||||
elif c in "0123456789":
|
||||
num += c
|
||||
elif c == " " and num:
|
||||
collect = int(num)
|
||||
num = ""
|
||||
|
||||
|
||||
def parse_header(data: Sequence[str]) -> tuple[AcisHeader, Sequence[str]]:
|
||||
header = AcisHeader()
|
||||
tokens = data[0].split()
|
||||
header.version = int(tokens[0])
|
||||
try:
|
||||
header.n_records = int(tokens[1])
|
||||
header.n_entities = int(tokens[2])
|
||||
header.flags = int(tokens[3])
|
||||
except (IndexError, ValueError):
|
||||
pass
|
||||
tokens = list(parse_header_str(data[1]))
|
||||
try:
|
||||
header.product_id = tokens[0]
|
||||
header.acis_version = tokens[1]
|
||||
except IndexError:
|
||||
pass
|
||||
|
||||
if len(tokens) > 2:
|
||||
try: # Sat Jan 1 10:00:00 2022
|
||||
header.creation_date = datetime.strptime(tokens[2], const.DATE_FMT)
|
||||
except ValueError:
|
||||
pass
|
||||
tokens = data[2].split()
|
||||
try:
|
||||
header.units_in_mm = float(tokens[0])
|
||||
except (IndexError, ValueError):
|
||||
pass
|
||||
return header, data[3:]
|
||||
|
||||
|
||||
def _filter_records(data: Sequence[str]) -> Iterator[str]:
|
||||
for line in data:
|
||||
if line.startswith(const.END_OF_ACIS_DATA_SAT) or line.startswith(
|
||||
const.BEGIN_OF_ACIS_HISTORY_DATA
|
||||
):
|
||||
return
|
||||
yield line
|
||||
|
||||
|
||||
def merge_record_strings(data: Sequence[str]) -> Iterator[str]:
|
||||
merged_data = " ".join(_filter_records(data))
|
||||
for record in merged_data.split("#"):
|
||||
record = record.strip()
|
||||
if record:
|
||||
yield record
|
||||
|
||||
|
||||
def parse_records(data: Sequence[str]) -> list[SatRecord]:
|
||||
expected_seq_num = 0
|
||||
records: list[SatRecord] = []
|
||||
for line in merge_record_strings(data):
|
||||
tokens: SatRecord = line.split()
|
||||
first_token = tokens[0].strip()
|
||||
if first_token.startswith("-"):
|
||||
num = -int(first_token)
|
||||
if num != expected_seq_num:
|
||||
raise ParsingError(
|
||||
"non-continuous sequence numbers not supported"
|
||||
)
|
||||
tokens.pop(0)
|
||||
records.append(tokens)
|
||||
expected_seq_num += 1
|
||||
return records
|
||||
|
||||
|
||||
def build_entities(
|
||||
records: Sequence[SatRecord], version: int
|
||||
) -> list[SatEntity]:
|
||||
entities: list[SatEntity] = []
|
||||
for record in records:
|
||||
name = record[0]
|
||||
attr = record[1]
|
||||
id_ = -1
|
||||
if version >= 700:
|
||||
id_ = int(record[2])
|
||||
data = record[3:]
|
||||
else:
|
||||
data = record[2:]
|
||||
entities.append(SatEntity(name, attr, id_, data))
|
||||
return entities
|
||||
|
||||
|
||||
def parse_sat(s: Union[str, Sequence[str]]) -> SatBuilder:
|
||||
"""Returns the :class:`SatBuilder` for the ACIS :term:`SAT` file content
|
||||
given as string or list of strings.
|
||||
|
||||
Raises:
|
||||
ParsingError: invalid or unsupported ACIS data structure
|
||||
|
||||
"""
|
||||
data: Sequence[str]
|
||||
if isinstance(s, str):
|
||||
data = s.splitlines()
|
||||
else:
|
||||
data = s
|
||||
if not isinstance(data, Sequence):
|
||||
raise TypeError("expected as string or a sequence of strings")
|
||||
builder = SatBuilder()
|
||||
header, data = parse_header(data)
|
||||
builder.header = header
|
||||
records = parse_records(data)
|
||||
entities = build_entities(records, header.version)
|
||||
builder.set_entities(resolve_str_pointers(entities))
|
||||
return builder
|
||||
|
||||
|
||||
class SatDataExporter(DataExporter):
|
||||
def __init__(self, exporter: SatExporter, data: list[Any]):
|
||||
self.version = exporter.version
|
||||
self.exporter = exporter
|
||||
self.data = data
|
||||
|
||||
def write_int(self, value: int, skip_sat=False) -> None:
|
||||
"""There are sometimes additional int values in SAB files which are
|
||||
not present in SAT files, maybe reference counters e.g. vertex, coedge.
|
||||
"""
|
||||
if not skip_sat:
|
||||
self.data.append(str(value))
|
||||
|
||||
def write_double(self, value: float) -> None:
|
||||
self.data.append(f"{value:g}")
|
||||
|
||||
def write_interval(self, value: float) -> None:
|
||||
if math.isinf(value):
|
||||
self.data.append("I") # infinite
|
||||
else:
|
||||
self.data.append("F") # finite
|
||||
self.write_double(value)
|
||||
|
||||
def write_loc_vec3(self, value: Vec3) -> None:
|
||||
self.write_double(value.x)
|
||||
self.write_double(value.y)
|
||||
self.write_double(value.z)
|
||||
|
||||
def write_dir_vec3(self, value: Vec3) -> None:
|
||||
self.write_double(value.x)
|
||||
self.write_double(value.y)
|
||||
self.write_double(value.z)
|
||||
|
||||
def write_bool(self, value: bool, true: str, false: str) -> None:
|
||||
self.data.append(true if value else false)
|
||||
|
||||
def write_str(self, value: str) -> None:
|
||||
self.data.append(f"@{len(value)}")
|
||||
self.data.append(str(value))
|
||||
|
||||
def write_literal_str(self, value: str) -> None:
|
||||
self.write_str(value) # just for SAB files important
|
||||
|
||||
def write_ptr(self, entity: AcisEntity) -> None:
|
||||
record = NULL_PTR
|
||||
if not entity.is_none:
|
||||
record = self.exporter.get_record(entity)
|
||||
self.data.append(record)
|
||||
|
||||
def write_transform(self, data: list[str]) -> None:
|
||||
self.data.extend(data)
|
||||
@@ -0,0 +1,7 @@
|
||||
# Copyright (c) 2024, Manfred Moitzi
|
||||
# License: MIT License
|
||||
from __future__ import annotations
|
||||
from typing import Sequence, Union
|
||||
from typing_extensions import TypeAlias
|
||||
|
||||
EncodedData: TypeAlias = Union[str, Sequence[str], bytes, bytearray]
|
||||
Reference in New Issue
Block a user