You can not select more than 25 topics
Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
1383 lines
49 KiB
1383 lines
49 KiB
import sys
|
|
sys.path.append('/media/digitalstorm/edf2449b-a566-40a2-8d0f-393bbe8ea1c0/maxwell_td_hybrid_mesher/HybridMesher') # Add current directory to sys.path for module imports
|
|
|
|
print('/home/qjlim2/TIME_DOMAIN_MAXWELL/HybridMesher/')
|
|
|
|
import Geometry as geo # Custom mesh/geometry handling module
|
|
import numpy as np
|
|
import pandas as pd
|
|
from numpy import linalg as la
|
|
from Geometry import Vector # pybind11 C++ binding
|
|
|
|
|
|
|
|
#######################################################
|
|
#######################################################
|
|
## ##
|
|
## PART TO MODIFY ##
|
|
## ##
|
|
#######################################################
|
|
#######################################################
|
|
|
|
directory = "./" # Absolute path to the model files
|
|
fileName = "S_ABC" # Name of the .g and .json files
|
|
|
|
unit = 1.0 # Unit of the input (in meters) -> 1.0 for meters or 2.54e-2 for inches
|
|
mesh_size = 0.02 # Mesh size in the unit used
|
|
unit2trunc = 4 # Number of significant digits in the regular mesh
|
|
|
|
PEC = 1 # PEC in the .bc file
|
|
ABC_PW = 2 # PW + ABC in the .bc file
|
|
ABC = 3 # ABC in the .bc file
|
|
Interface_PW = 4 # Interface + PW in the .bc file
|
|
NCBC = 1000 # NCBC in the .bc file
|
|
|
|
|
|
number_of_layers = 5 # Number of PML layers to generate
|
|
sigma = 0.01
|
|
|
|
########################################
|
|
## AUXILIARY FUNCTIONS ##
|
|
########################################
|
|
|
|
def floor(number, sig_figs):
|
|
return np.trunc(number * (10 ** sig_figs)) / (10 ** sig_figs)
|
|
|
|
|
|
# Handles scalars or arrays and truncates each to the desired significant figures
|
|
def truncate_to_significant_figures(number, sig_figs):
|
|
isNumber = False
|
|
if not isinstance(number, np.ndarray):
|
|
number = np.array([number])
|
|
isNumber = True
|
|
|
|
maskLower = number < 0
|
|
maskGreater = number > 0
|
|
|
|
order_of_magnitude = np.zeros_like(number)
|
|
order_of_magnitude[maskGreater] = np.floor(np.log10(number[maskGreater]))
|
|
order_of_magnitude[maskLower] = np.floor(np.log10(-number[maskLower]))
|
|
|
|
if isNumber:
|
|
return floor(number, -(order_of_magnitude - sig_figs + 1))[0]
|
|
|
|
return floor(number, -(order_of_magnitude - sig_figs + 1))
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def build_face_bc_array(mesh, BC, PEC, ABC, NCBC):
|
|
|
|
number_mesh_elems = mesh.info.num_elems
|
|
number_mesh_faces = mesh.info.num_face_sets
|
|
|
|
faceBC = np.zeros((number_mesh_elems, 4), dtype=np.int64)
|
|
|
|
print(number_mesh_faces, " face sets found in the mesh.")
|
|
|
|
# Loops over each face set in the regular mesh (each face set groups faces sharing the same BC).
|
|
# Assign face BCs
|
|
for i in range(number_mesh_faces):
|
|
bc_val = 0
|
|
bc_type = mesh.get_face_set_bc(i)
|
|
if bc_type == BC.PEC:
|
|
bc_val = PEC
|
|
elif bc_type == BC.ABC:
|
|
bc_val = ABC
|
|
elif bc_type == BC.NCB:
|
|
bc_val = NCBC
|
|
|
|
# Each face in the set is identified by a global face ID.
|
|
# faceID // 4 gives the element index.
|
|
# faceID % 4 gives the local face number (0–3).
|
|
# This maps the BC code to the correct element face in regularFacesBC.
|
|
for face_id in mesh.face_set(i):
|
|
elem_idx = face_id // 4
|
|
local_face = face_id % 4
|
|
faceBC[elem_idx][local_face] = bc_val
|
|
return faceBC
|
|
|
|
|
|
def build_element_and_bc_arrays(mesh, faceBC, verticesMap, offset=0):
|
|
|
|
# Pre-allocate space for all tetrahedral elements and face BCs
|
|
# Each tetrahedron has 4 vertices and 4 faces
|
|
number_mesh_elems = mesh.info.num_elems
|
|
number_mesh_faces = mesh.info.num_face_sets
|
|
number_mesh_sets = mesh.info.num_elem_sets
|
|
|
|
# Element and BC arrays for regular mesh
|
|
# elementArray[i] = [v0, v1, v2, v3] stores the vertex indices (global IDs) of the
|
|
# ith tetrahedral element.
|
|
# bcArray[i] = [bc0, bc1, bc2, bc3] stores boundary condition codes for each face (0–3) of element i.
|
|
elementArray = np.empty((number_mesh_elems, 4), dtype=np.int64)
|
|
bcArray = np.zeros((number_mesh_elems, 4), dtype=np.int64)
|
|
nElements = 0
|
|
for i in range(number_mesh_sets):
|
|
|
|
# mesh.elem_set(i) returns a (4, num_elems) array: vertex indices of each tetrahedron in set i
|
|
elemSet = np.array(mesh.elem_set(i))
|
|
num_elems = elemSet.shape[1]
|
|
|
|
# verticesMap[...] remaps vertex indices to global deduplicated indices
|
|
# + offset shifts local indices (e.g., when merging meshes)
|
|
row_range = range(nElements, nElements + num_elems)
|
|
elementArray[row_range, :] = verticesMap[elemSet + offset].T
|
|
|
|
# faceBC[...] holds boundary condition codes per face (e.g., PEC, ABC, etc.)
|
|
# The face permutation [3, 1, 2, 0] is used to reorder face BCs consistently
|
|
bcArray[row_range, :] = faceBC[row_range][:, [3, 1, 2, 0]]
|
|
|
|
nElements += num_elems
|
|
|
|
sorted_indices = np.argsort(elementArray, axis=1)
|
|
|
|
# Sort vertex indices within each tetrahedron to normalize ordering (for hashing, comparison, deduplication)
|
|
# Also permute boundary conditions accordingly so they stay matched to the correct face
|
|
elementArray = np.take_along_axis(elementArray, sorted_indices, axis=1)
|
|
bcArray = np.take_along_axis(bcArray, sorted_indices, axis=1)
|
|
|
|
return elementArray, bcArray
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def snap_to_grid(reference_node, grid_size):
|
|
reference_node = np.asarray(reference_node)
|
|
abs_node = np.abs(reference_node)
|
|
shifted = abs_node + (grid_size / 2.0)
|
|
floored = np.floor(shifted / grid_size)
|
|
snapped_unsigned = floored * grid_size
|
|
snapped = snapped_unsigned * np.sign(reference_node)
|
|
return snapped
|
|
|
|
def snap_vertices_relative_to_center(regVertices, referenceNode, regularElementSize, unit2trunc):
|
|
"""
|
|
Snap regVertices to a grid aligned with a coarsely-snapped referenceNode.
|
|
|
|
Parameters:
|
|
- regVertices: np.ndarray, shape (N, 3)
|
|
The vertices to be snapped to a regular grid.
|
|
- referenceNode: np.ndarray, shape (3,)
|
|
The point around which snapping is aligned.
|
|
- regularElementSize: float
|
|
The fine grid spacing.
|
|
- unit2trunc: float
|
|
The coarser unit to truncate the offset from referenceNode.
|
|
|
|
Returns:
|
|
- snappedVertices: np.ndarray, shape (N, 3)
|
|
The grid-aligned version of regVertices.
|
|
"""
|
|
regVertices = np.asarray(regVertices)
|
|
referenceNode = np.asarray(referenceNode)
|
|
|
|
# Step 1: Snap referenceNode to regular grid
|
|
snapped = snap_to_grid(referenceNode, regularElementSize)
|
|
|
|
# Step 2: Offset from actual reference to snapped
|
|
offset = referenceNode - snapped
|
|
|
|
print("Offset from referenceNode to snapped grid:", offset)
|
|
|
|
# Step 3: Truncate that offset to a coarser grid
|
|
offset_trunc = np.floor(offset / unit2trunc) * unit2trunc
|
|
|
|
# Step 4: Center vertices relative to this truncated offset, then snap
|
|
centered_vertices = regVertices - offset_trunc[:,None] # shape (N, 3)
|
|
|
|
# Apply grid snapping around the center
|
|
snapped_vertices = (
|
|
np.floor((np.abs(centered_vertices) + regularElementSize / 2) / regularElementSize)
|
|
* regularElementSize
|
|
* np.sign(centered_vertices)
|
|
+ offset_trunc[:,None]
|
|
).T
|
|
|
|
return snapped_vertices, offset_trunc
|
|
|
|
|
|
|
|
def snap_offset(vertices, regularElementSize, offset_trunc):
|
|
|
|
# Step 4: Center vertices relative to this truncated offset, then snap
|
|
centered_vertices = vertices - offset_trunc[:,None] # shape (N, 3)
|
|
|
|
# Apply grid snapping around the center
|
|
snapped_vertices = (
|
|
np.floor((np.abs(centered_vertices) + regularElementSize / 2) / regularElementSize)
|
|
* regularElementSize
|
|
* np.sign(centered_vertices)
|
|
+ offset_trunc[:,None]
|
|
).T
|
|
|
|
return snapped_vertices
|
|
|
|
|
|
|
|
|
|
def generate_stacked_pml_layers(base_mesh, number_of_layers, base_size, normal, corner, length, width):
|
|
"""
|
|
Generate multiple stacked PML layers without using geo.Vector.
|
|
|
|
Parameters:
|
|
base_mesh : Initial VolumeMesh (starting layer)
|
|
number_of_layers : Number of stacked layers to generate
|
|
base_size : Grid cube size
|
|
normal : (3,) numpy array of floats
|
|
corner : (3,) numpy array of floats
|
|
length : Length in in-plane axis 1
|
|
width : Width in in-plane axis 2
|
|
|
|
Returns:
|
|
layers : List of VolumeMesh layers
|
|
layer_verts : List of (N_i, 3) numpy vertex arrays per layer
|
|
"""
|
|
import numpy as np
|
|
|
|
layers = []
|
|
layer_verts = []
|
|
|
|
axis = int(np.argmax(np.abs(normal)))
|
|
direction = np.sign(normal[axis])
|
|
|
|
current_mesh = base_mesh
|
|
|
|
for n in range(number_of_layers):
|
|
adjusted_corner = corner.copy()
|
|
adjusted_corner[axis] += direction * base_size * n
|
|
print(f"adjusted_corner: {adjusted_corner}")
|
|
|
|
# Convert to geo.Vector
|
|
normal_vec = Vector(*normal)
|
|
corner_vec = Vector(*adjusted_corner)
|
|
|
|
# Call C++ layer generator
|
|
layer = base_mesh.makeGridPMLLayer(base_size, normal_vec, corner_vec, length, width)
|
|
|
|
verts = np.array(layer.vertices).T
|
|
verts = verts.T
|
|
print(f" Vertices: min = {verts.min(axis=1)}, max = {verts.max(axis=1)}")
|
|
|
|
layers.append(layer)
|
|
layer_verts.append(verts)
|
|
current_mesh = layer
|
|
|
|
return layers, layer_verts
|
|
|
|
|
|
|
|
|
|
def generate_pml_layer(mesh, base_size, reg_min, reg_max):
|
|
"""
|
|
Generate 6-directional PML layers using bounding box of regular region.
|
|
|
|
Parameters:
|
|
mesh : Initial VolumeMesh
|
|
base_size : Cube size for grid elements
|
|
reg_min : Tuple (x, y, z), lower corner of regular mesh
|
|
reg_max : Tuple (x, y, z), upper corner of regular mesh
|
|
|
|
Returns:
|
|
layers: list of VolumeMesh objects (one per layer)
|
|
layer_verts: list of (N_i, 3) numpy arrays of vertices (one per layer)
|
|
"""
|
|
max_x, max_y, max_z = reg_max
|
|
min_x, min_y, min_z = reg_min
|
|
|
|
Length_X = max_x - min_x
|
|
Length_Y = max_y - min_y
|
|
Length_Z = max_z - min_z
|
|
|
|
number_of_layers = 10
|
|
layers = []
|
|
layer_verts = []
|
|
|
|
directions = [
|
|
("+X", [1.0, 0.0, 0.0], [max_x, min_y, min_z], Length_Y, Length_Z),
|
|
("-X", [-1.0, 0.0, 0.0], [min_x, min_y, min_z], Length_Y, Length_Z),
|
|
("+Y", [0.0, 1.0, 0.0], [min_x, max_y, min_z], Length_X, Length_Z),
|
|
("-Y", [0.0, -1.0, 0.0], [min_x, min_y, min_z], Length_X, Length_Z),
|
|
("+Z", [0.0, 0.0, 1.0], [min_x, min_y, max_z], Length_X, Length_Y),
|
|
("-Z", [0.0, 0.0, -1.0], [min_x, min_y, min_z], Length_X, Length_Y),
|
|
]
|
|
|
|
for label, normal, corner, length, width in directions:
|
|
print(f"\n########## GENERATING PML LAYER {label} ##########")
|
|
print(f"Normal: {normal}, Corner: {corner}, Length: {length}, Width: {width}")
|
|
|
|
sub_layers, sub_verts = generate_stacked_pml_layers(
|
|
mesh, number_of_layers, base_size,
|
|
np.array(normal, dtype=float),
|
|
np.array(corner, dtype=float),
|
|
length, width
|
|
)
|
|
|
|
layers.extend(sub_layers)
|
|
layer_verts.extend(sub_verts)
|
|
|
|
return layers, layer_verts
|
|
|
|
|
|
|
|
def generate_cuboid_pml_layer(mesh, base_size, reg_min, reg_max, layer_thickness=10):
|
|
"""
|
|
Generate a full PML box (6-directional shell) with a hollow core surrounding the regular region.
|
|
|
|
Parameters:
|
|
mesh : Initial VolumeMesh object (with method makeGridPMLBox)
|
|
base_size : Cube side length
|
|
reg_min : Tuple (x, y, z) — lower corner of the regular region
|
|
reg_max : Tuple (x, y, z) — upper corner of the regular region
|
|
layer_thickness: Number of grid cells for PML thickness in each direction
|
|
|
|
Returns:
|
|
layers : list with one VolumeMesh PML box
|
|
layer_verts : list with one (N, 3) numpy array of vertices
|
|
"""
|
|
reg_min = np.array(reg_min)
|
|
reg_max = np.array(reg_max)
|
|
reg_dims = reg_max - reg_min
|
|
|
|
# Compute number of grid cells along each axis
|
|
num_cells = np.ceil((reg_dims + 2 * layer_thickness * base_size) / base_size).astype(int)
|
|
total_min = reg_min - layer_thickness * base_size
|
|
total_max = reg_max + layer_thickness * base_size
|
|
|
|
# Compute indices of regular (hollow) region in the grid
|
|
inner_start = np.array([layer_thickness, layer_thickness, layer_thickness])
|
|
inner_end = inner_start + np.floor(reg_dims / base_size).astype(int)
|
|
|
|
# Create the PML box using the C++ function via pybind11
|
|
pml_mesh = mesh.makeGridPMLBox(
|
|
size=base_size,
|
|
numCells=tuple(num_cells),
|
|
innerStart=tuple(inner_start),
|
|
innerEnd=tuple(inner_end),
|
|
corner=tuple(total_min)
|
|
)
|
|
|
|
# Extract vertices as (N, 3) numpy array
|
|
verts = np.array(pml_mesh.vertices) # verts() → shape (3, N), transpose to (N, 3)
|
|
|
|
return pml_mesh, verts
|
|
|
|
|
|
|
|
def export_bc_surfaces(mesh, name, BC):
|
|
"""
|
|
Extracts and saves STL files for selected boundary conditions.
|
|
|
|
Parameters:
|
|
- mesh: VolumeMesh object (e.g., regular or irregular)
|
|
- name: output name prefix
|
|
- BC: BoundaryCondition enum from geo.BoundaryCondition
|
|
"""
|
|
|
|
print(BC)
|
|
|
|
bc_map = {
|
|
BC.PEC: "pec",
|
|
BC.NCB: "ncbc",
|
|
BC.ABC: "abc",
|
|
BC.PML_Excitation: "pml_excitation",
|
|
BC.PML_ABC: "pml_abc"
|
|
}
|
|
|
|
for bc_type, label in bc_map.items():
|
|
face_ids = mesh.face_sets_with_bc(bc_type)
|
|
if face_ids:
|
|
surfs = mesh.extract_surfaces(face_ids)
|
|
surfs.save_stl(f"{name}_{label}_surfaces.stl")
|
|
print(f"Saved: {name}_{label}_surfaces.stl")
|
|
else:
|
|
print(f"[Warning] No faces found for BC.{label.upper()}")
|
|
|
|
|
|
def print_boundary_conditions():
|
|
print("Available Boundary Conditions:")
|
|
for name in dir(geo.BoundaryCondition):
|
|
if not name.startswith("__"):
|
|
value = getattr(geo.BoundaryCondition, name)
|
|
print(f"BC.{name} = {value}")
|
|
|
|
|
|
|
|
def build_face_bc_array_custom(mesh, BC, selected_BCs, output_BC_Number):
|
|
"""
|
|
Builds an array of boundary condition codes for each element face with custom output values.
|
|
|
|
Parameters:
|
|
- mesh: the VolumeMesh object.
|
|
- BC: the BoundaryCondition enum class.
|
|
- selected_BCs: list of BC enum types to assign, e.g., [BC.PEC, BC.ABC]
|
|
- output_BC_Number: list of integer codes corresponding to selected_BCs, e.g., [1, 2]
|
|
|
|
Returns:
|
|
- faceBC: (num_elems, 4) NumPy array of integer BC codes.
|
|
"""
|
|
assert len(selected_BCs) == len(output_BC_Number), "Length mismatch between BC list and output values"
|
|
|
|
number_mesh_elems = mesh.info.num_elems
|
|
number_mesh_faces = mesh.info.num_face_sets
|
|
faceBC = np.zeros((number_mesh_elems, 4), dtype=np.int64)
|
|
|
|
print(number_mesh_faces, "face sets found in the mesh.")
|
|
|
|
# Build dictionary for fast lookup
|
|
bc_to_value = dict(zip(selected_BCs, output_BC_Number))
|
|
|
|
for i in range(number_mesh_faces):
|
|
bc_type = mesh.get_face_set_bc(i)
|
|
|
|
if bc_type not in bc_to_value:
|
|
continue # skip unselected BCs
|
|
|
|
bc_val = bc_to_value[bc_type]
|
|
|
|
for face_id in mesh.face_set(i):
|
|
elem_idx = face_id // 4
|
|
local_face = face_id % 4
|
|
faceBC[elem_idx][local_face] = bc_val
|
|
|
|
return faceBC
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
import numpy as np
|
|
from scipy.spatial import cKDTree
|
|
|
|
def auto_merge_duplicate_nodes(totalNodes, tetra_data, tol=1e-8):
|
|
"""
|
|
Merges nodes that are within `tol` distance and updates tetra connectivity.
|
|
|
|
Parameters:
|
|
- totalNodes: (N, 3) array of node coordinates
|
|
- tetra_data: list of tetrahedra, each as [n0, n1, n2, n3]
|
|
- tol: tolerance to treat nodes as duplicates
|
|
|
|
Returns:
|
|
- new_totalNodes: deduplicated node coordinates
|
|
- new_tetra_data: tetra connectivity updated to deduplicated nodes
|
|
"""
|
|
|
|
tree = cKDTree(totalNodes)
|
|
close_pairs = tree.query_pairs(r=tol)
|
|
|
|
# Step 1: Build a mapping from old index → representative (canonical) index
|
|
parent = np.arange(len(totalNodes)) # Initially, each node is its own parent
|
|
|
|
def find(i):
|
|
while parent[i] != i:
|
|
parent[i] = parent[parent[i]]
|
|
i = parent[i]
|
|
return i
|
|
|
|
def union(i, j):
|
|
pi, pj = find(i), find(j)
|
|
if pi != pj:
|
|
parent[max(pi, pj)] = min(pi, pj)
|
|
|
|
for i, j in close_pairs:
|
|
union(i, j)
|
|
|
|
canonical = np.array([find(i) for i in range(len(totalNodes))])
|
|
|
|
# Step 2: Get unique canonical indices and build remapping
|
|
_, new_indices, inverse = np.unique(totalNodes[canonical], axis=0, return_index=True, return_inverse=True)
|
|
new_totalNodes = totalNodes[canonical][new_indices]
|
|
|
|
# Step 3: Update tetra connectivity using canonical + inverse mapping
|
|
new_tetra_data = []
|
|
for tet in tetra_data:
|
|
updated = [inverse[canonical[v]] for v in tet]
|
|
new_tetra_data.append(updated)
|
|
|
|
return new_totalNodes, new_tetra_data
|
|
|
|
|
|
|
|
def parse_node_file(lines):
|
|
"""
|
|
Parses the `.node` file.
|
|
Each node spans two lines: a header and a line with x y z coordinates.
|
|
"""
|
|
num_nodes = int(lines[1].strip())
|
|
totalNodes = []
|
|
|
|
for i in range(num_nodes):
|
|
coords_line = lines[3 + i * 2].strip() # Skip unit, node count, and header lines
|
|
coords = list(map(float, coords_line.split()))
|
|
totalNodes.append(coords)
|
|
|
|
return np.array(totalNodes)
|
|
|
|
|
|
def parse_tetra_file(lines):
|
|
"""
|
|
Parses the `.tetra` file:
|
|
- First line: number of tets (optional)
|
|
- Even lines: [flag node0 node1 node2 node3]
|
|
- Odd lines: face BCs (ignored)
|
|
Returns:
|
|
- tetra_data: list of [n0, n1, n2, n3]
|
|
"""
|
|
num_tets = int(lines[0].strip())
|
|
tetra_data = []
|
|
|
|
for i in range(num_tets):
|
|
parts = lines[1 + 2 * i].strip().split()
|
|
if len(parts) != 5:
|
|
continue
|
|
_, n0, n1, n2, n3 = map(int, parts)
|
|
tetra_data.append([n0, n1, n2, n3])
|
|
|
|
return tetra_data
|
|
|
|
|
|
|
|
import numpy as np
|
|
|
|
def group_tets_by_shape_material_bc(
|
|
elementArray, bcArray, totalNodes,
|
|
materialArray,
|
|
excluded_bc_values=[],
|
|
tolerance=1e-10):
|
|
"""
|
|
Groups tetrahedra by shape, material, and BC pattern.
|
|
Tets with any face BC in excluded_bc_values are excluded (group ID = 0).
|
|
|
|
Parameters:
|
|
- elementArray: (N, 4) vertex indices per tet
|
|
- bcArray: (N, 4) per-face BC codes
|
|
- totalNodes: (M, 3) node coordinates
|
|
- materialArray: (N,) material ID per tet
|
|
- excluded_bc_values: list of BCs to exclude
|
|
- tolerance: float, edge length quantization threshold
|
|
|
|
Returns:
|
|
- group_indices: (N,) array of group IDs (0 = ungrouped)
|
|
- uniqueGroups: array of unique group IDs
|
|
"""
|
|
num_elems = elementArray.shape[0]
|
|
|
|
# Step 1: Get node positions for each tet
|
|
nodesArray = totalNodes[elementArray] # shape: (N, 4, 3)
|
|
|
|
# Step 2: Create exclusion mask
|
|
exclude_mask = np.isin(bcArray, excluded_bc_values).any(axis=1)
|
|
valid_mask = ~exclude_mask
|
|
valid_indices = np.where(valid_mask)[0]
|
|
|
|
# Step 3: Extract shape info via edge vectors
|
|
valid_nodes = nodesArray[valid_indices]
|
|
edgesArray = np.empty((valid_nodes.shape[0], 3, 3))
|
|
edgesArray[:, 0, :] = valid_nodes[:, 0, :] - valid_nodes[:, 1, :]
|
|
edgesArray[:, 1, :] = valid_nodes[:, 0, :] - valid_nodes[:, 2, :]
|
|
edgesArray[:, 2, :] = valid_nodes[:, 0, :] - valid_nodes[:, 3, :]
|
|
rounded_edges = np.round(edgesArray / tolerance) * tolerance
|
|
shape_key = rounded_edges.reshape(valid_nodes.shape[0], -1)
|
|
|
|
# Step 4: Combine with material and BC
|
|
valid_materials = materialArray[valid_indices].reshape(-1, 1)
|
|
valid_bcs = bcArray[valid_indices]
|
|
full_key = np.hstack([shape_key, valid_materials, valid_bcs])
|
|
|
|
# Step 5: Group by unique key
|
|
_, group_ids = np.unique(full_key, axis=0, return_inverse=True)
|
|
group_ids = group_ids + 1 # reserve group ID 0 for excluded
|
|
|
|
# Step 6: Final group array
|
|
group_indices = np.zeros(num_elems, dtype=np.int64)
|
|
group_indices[valid_indices] = group_ids
|
|
|
|
# Step 7: Recompress
|
|
uniqueGroups, compressed = np.unique(group_indices, return_inverse=True)
|
|
group_indices = compressed
|
|
|
|
# === Diagnostics ===
|
|
print(f"🔢 Total tets: {num_elems}")
|
|
print(f"🚫 Excluded by BC list: {exclude_mask.sum()}")
|
|
print(f"✅ Grouped tets: {len(valid_indices)}")
|
|
print(f"🔁 Unique groups (excluding group 0): {len(uniqueGroups) - 1}")
|
|
|
|
return group_indices, uniqueGroups
|
|
|
|
import numpy as np
|
|
|
|
def group_tets_by_box_and_conductivity(
|
|
elementArray, bcArray, totalNodes,
|
|
materialArray,
|
|
region_corner, region_dims, pml_bounds,
|
|
excluded_bc_values=[],
|
|
sigma_base=0.01, m=3, tolerance=1e-10,
|
|
min_group_size=1,
|
|
):
|
|
num_elems = elementArray.shape[0]
|
|
nodesArray = totalNodes[elementArray]
|
|
centroids = nodesArray.mean(axis=1)
|
|
|
|
core_lower = np.asarray(region_corner)
|
|
core_upper = core_lower + np.asarray(region_dims)
|
|
|
|
inside_mask = np.all((centroids >= core_lower) & (centroids <= core_upper), axis=1)
|
|
outside_mask = ~inside_mask
|
|
|
|
group_indices = np.zeros(num_elems, dtype=np.int64)
|
|
|
|
inside_gid_set = set()
|
|
outside_gid_set = set()
|
|
|
|
# ========================
|
|
# Inside grouping
|
|
# ========================
|
|
inside_indices = np.where(inside_mask)[0]
|
|
inside_nodes = nodesArray[inside_indices]
|
|
inside_bcs = bcArray[inside_indices]
|
|
inside_materials = materialArray[inside_indices]
|
|
|
|
exclude_mask = np.isin(inside_bcs, excluded_bc_values).any(axis=1)
|
|
valid_inside_mask = ~exclude_mask
|
|
valid_inside_indices = inside_indices[valid_inside_mask]
|
|
|
|
valid_nodes = nodesArray[valid_inside_indices]
|
|
edges = np.empty((valid_nodes.shape[0], 3, 3))
|
|
edges[:, 0, :] = valid_nodes[:, 0, :] - valid_nodes[:, 1, :]
|
|
edges[:, 1, :] = valid_nodes[:, 0, :] - valid_nodes[:, 2, :]
|
|
edges[:, 2, :] = valid_nodes[:, 0, :] - valid_nodes[:, 3, :]
|
|
shape_key = np.round(edges / tolerance) * tolerance
|
|
shape_key = shape_key.reshape(valid_nodes.shape[0], -1)
|
|
|
|
valid_materials = materialArray[valid_inside_indices].reshape(-1, 1)
|
|
valid_bcs = bcArray[valid_inside_indices]
|
|
key_inside = np.hstack([shape_key, valid_materials, valid_bcs])
|
|
|
|
_, group_ids_inside = np.unique(key_inside, axis=0, return_inverse=True)
|
|
group_ids_inside = group_ids_inside + 1
|
|
group_indices[valid_inside_indices] = group_ids_inside
|
|
next_group_id = group_ids_inside.max() + 1
|
|
|
|
inside_gid_set.update(group_ids_inside)
|
|
|
|
# ========================
|
|
# Outside grouping
|
|
# ========================
|
|
outside_indices = np.where(outside_mask)[0]
|
|
if outside_indices.size > 0:
|
|
outside_nodes = nodesArray[outside_indices]
|
|
outside_materials = materialArray[outside_indices]
|
|
outside_bcs = bcArray[outside_indices]
|
|
|
|
edges_out = np.empty((outside_nodes.shape[0], 3, 3))
|
|
edges_out[:, 0, :] = outside_nodes[:, 0, :] - outside_nodes[:, 1, :]
|
|
edges_out[:, 1, :] = outside_nodes[:, 0, :] - outside_nodes[:, 2, :]
|
|
edges_out[:, 2, :] = outside_nodes[:, 0, :] - outside_nodes[:, 3, :]
|
|
shape_key_out = np.round(edges_out / tolerance) * tolerance
|
|
shape_key_out = shape_key_out.reshape(outside_nodes.shape[0], -1)
|
|
|
|
rc = centroids[outside_indices]
|
|
dx = np.maximum.reduce([region_corner[0] - rc[:, 0], np.zeros_like(rc[:, 0]), rc[:, 0] - (region_corner[0] + region_dims[0])])
|
|
dy = np.maximum.reduce([region_corner[1] - rc[:, 1], np.zeros_like(rc[:, 1]), rc[:, 1] - (region_corner[1] + region_dims[1])])
|
|
dz = np.maximum.reduce([region_corner[2] - rc[:, 2], np.zeros_like(rc[:, 2]), rc[:, 2] - (region_corner[2] + region_dims[2])])
|
|
|
|
s1 = sigma_base * (dx / pml_bounds[0])**m
|
|
s2 = sigma_base * (dy / pml_bounds[1])**m
|
|
s3 = sigma_base * (dz / pml_bounds[2])**m
|
|
sigmas = np.stack([np.round(s1, 6), np.round(s2, 6), np.round(s3, 6)], axis=1)
|
|
|
|
materials_out = outside_materials.reshape(-1, 1)
|
|
key_outside = np.hstack([shape_key_out, materials_out, outside_bcs, sigmas])
|
|
|
|
_, group_ids_outside = np.unique(key_outside, axis=0, return_inverse=True)
|
|
group_ids_outside = group_ids_outside + next_group_id
|
|
group_indices[outside_indices] = group_ids_outside
|
|
|
|
outside_gid_set.update(group_ids_outside)
|
|
|
|
# ========================
|
|
# Filter small groups
|
|
# ========================
|
|
final = group_indices.copy()
|
|
surviving_inside_gid_set = set()
|
|
surviving_outside_gid_set = set()
|
|
|
|
for gid in np.unique(group_indices):
|
|
if gid == 0:
|
|
continue
|
|
count = np.sum(group_indices == gid)
|
|
if count < min_group_size:
|
|
final[group_indices == gid] = 0
|
|
else:
|
|
if gid in inside_gid_set:
|
|
surviving_inside_gid_set.add(gid)
|
|
elif gid in outside_gid_set:
|
|
surviving_outside_gid_set.add(gid)
|
|
|
|
# ========================
|
|
# Remap surviving group IDs to be contiguous
|
|
# ========================
|
|
uniqueGroups = np.unique(final)
|
|
remap = {old_gid: new_gid for new_gid, old_gid in enumerate(uniqueGroups)}
|
|
group_indices_remapped = np.array([remap[gid] for gid in final], dtype=np.int64)
|
|
|
|
# Compute remapped surviving GIDs
|
|
remapped_inside_ids = sorted([remap[gid] for gid in surviving_inside_gid_set if gid in remap])
|
|
remapped_outside_ids = sorted([remap[gid] for gid in surviving_outside_gid_set if gid in remap])
|
|
|
|
# ========================
|
|
# Diagnostics
|
|
# ========================
|
|
def print_gid_range(name, gid_list):
|
|
if gid_list:
|
|
print(f"🔢 {name} group IDs: min={min(gid_list)}, max={max(gid_list)}")
|
|
else:
|
|
print(f"🔢 {name} group IDs: No valid groups")
|
|
|
|
print(f"🔢 Total tets: {num_elems}")
|
|
print(f"📦 Inside box: {len(inside_indices)} (grouped by shape+material+BC)")
|
|
print(f"🌐 Outside box: {len(outside_indices)} (grouped by +conductivity)")
|
|
print(f"🚫 Groups filtered out (<{min_group_size} tets)")
|
|
print(f"🔁 Unique groups (excluding group 0): {len(uniqueGroups) - (1 if 0 in uniqueGroups else 0)}")
|
|
print_gid_range("Inside", remapped_inside_ids)
|
|
print_gid_range("Outside", remapped_outside_ids)
|
|
|
|
return group_indices_remapped
|
|
|
|
def group_tets_by_box_and_pml(
|
|
elementArray, bcArray, totalNodes,
|
|
materialArray,
|
|
region_corner, region_dims,
|
|
excluded_bc_values=[],
|
|
tolerance=1e-10,
|
|
min_group_size=1,
|
|
):
|
|
num_elems = elementArray.shape[0]
|
|
nodesArray = totalNodes[elementArray]
|
|
centroids = nodesArray.mean(axis=1)
|
|
|
|
core_lower = np.asarray(region_corner)
|
|
core_upper = core_lower + np.asarray(region_dims)
|
|
|
|
inside_mask = np.all((centroids >= core_lower) & (centroids <= core_upper), axis=1)
|
|
outside_mask = ~inside_mask
|
|
|
|
group_indices = np.zeros(num_elems, dtype=np.int64)
|
|
|
|
inside_gid_set = set()
|
|
|
|
# ========================
|
|
# Inside grouping only
|
|
# ========================
|
|
inside_indices = np.where(inside_mask)[0]
|
|
inside_nodes = nodesArray[inside_indices]
|
|
inside_bcs = bcArray[inside_indices]
|
|
inside_materials = materialArray[inside_indices]
|
|
|
|
exclude_mask = np.isin(inside_bcs, excluded_bc_values).any(axis=1)
|
|
valid_inside_mask = ~exclude_mask
|
|
valid_inside_indices = inside_indices[valid_inside_mask]
|
|
|
|
valid_nodes = nodesArray[valid_inside_indices]
|
|
edges = np.empty((valid_nodes.shape[0], 3, 3))
|
|
edges[:, 0, :] = valid_nodes[:, 0, :] - valid_nodes[:, 1, :]
|
|
edges[:, 1, :] = valid_nodes[:, 0, :] - valid_nodes[:, 2, :]
|
|
edges[:, 2, :] = valid_nodes[:, 0, :] - valid_nodes[:, 3, :]
|
|
shape_key = np.round(edges / tolerance) * tolerance
|
|
shape_key = shape_key.reshape(valid_nodes.shape[0], -1)
|
|
|
|
valid_materials = materialArray[valid_inside_indices].reshape(-1, 1)
|
|
valid_bcs = bcArray[valid_inside_indices]
|
|
key_inside = np.hstack([shape_key, valid_materials, valid_bcs])
|
|
|
|
_, group_ids_inside = np.unique(key_inside, axis=0, return_inverse=True)
|
|
group_ids_inside = group_ids_inside + 1
|
|
group_indices[valid_inside_indices] = group_ids_inside
|
|
|
|
inside_gid_set.update(group_ids_inside)
|
|
|
|
# ========================
|
|
# Filter small groups
|
|
# ========================
|
|
final = group_indices.copy()
|
|
surviving_inside_gid_set = set()
|
|
|
|
for gid in np.unique(group_indices):
|
|
if gid == 0:
|
|
continue
|
|
count = np.sum(group_indices == gid)
|
|
if count < min_group_size:
|
|
final[group_indices == gid] = 0
|
|
else:
|
|
surviving_inside_gid_set.add(gid)
|
|
|
|
# ========================
|
|
# Remap surviving group IDs to be contiguous
|
|
# ========================
|
|
uniqueGroups = np.unique(final)
|
|
remap = {old_gid: new_gid for new_gid, old_gid in enumerate(uniqueGroups)}
|
|
group_indices_remapped = np.array([remap[gid] for gid in final], dtype=np.int64)
|
|
|
|
# Compute remapped surviving GIDs
|
|
remapped_inside_ids = sorted([remap[gid] for gid in surviving_inside_gid_set if gid in remap])
|
|
|
|
# ========================
|
|
# Diagnostics
|
|
# ========================
|
|
def print_gid_range(name, gid_list):
|
|
if gid_list:
|
|
print(f"🔢 {name} group IDs: min={min(gid_list)}, max={max(gid_list)}")
|
|
else:
|
|
print(f"🔢 {name} group IDs: No valid groups")
|
|
|
|
print(f"🔢 Total tets: {num_elems}")
|
|
print(f"📦 Inside box: {len(inside_indices)} (grouped by shape+material+BC)")
|
|
print(f"🌐 Outside box: {len(np.where(outside_mask)[0])} (set to group 0)")
|
|
print(f"🚫 Groups filtered out (<{min_group_size} tets)")
|
|
print(f"🔁 Unique groups (excluding group 0): {len(uniqueGroups) - (1 if 0 in uniqueGroups else 0)}")
|
|
print_gid_range("Inside", remapped_inside_ids)
|
|
|
|
return group_indices_remapped
|
|
|
|
########################################
|
|
## MAIN FUNCTION ##
|
|
########################################
|
|
|
|
|
|
def main():
|
|
|
|
# Setup full file path and mesh size
|
|
name = f"{directory}/{fileName}"
|
|
regularElementSize = truncate_to_significant_figures(mesh_size, 4)
|
|
|
|
# Load and configure volume mesh
|
|
mesh = geo.VolumeMesh.from_exodus(f"{name}.g")
|
|
config = mesh.apply_config(f"{name}.json")
|
|
BC = geo.BoundaryCondition
|
|
print_boundary_conditions()
|
|
|
|
# Check PEC boundaries
|
|
pec_faces = mesh.face_sets_with_bc(BC.PEC)
|
|
if len(pec_faces) == 0:
|
|
print("Error: No PEC faces found.")
|
|
print("PEC face set size:", len(pec_faces))
|
|
|
|
|
|
###########################################
|
|
## Generate Regular Airbox Mesh ##
|
|
###########################################
|
|
|
|
regular = mesh.make_Airbox_pml_grid_mesh(regularElementSize, pec_faces)
|
|
regVertices = np.array(regular.vertices)
|
|
|
|
#print("Regular Vertices = ", regular.vertices)
|
|
|
|
|
|
print("------------------------------------------------")
|
|
print("Shape of regular vertices = ", regVertices.shape)
|
|
print("------------------------------------------------")
|
|
|
|
reg_min = np.min(regVertices, axis=0)
|
|
reg_max = np.max(regVertices, axis=0)
|
|
|
|
print("########################################")
|
|
print(" Regular Grid Bounding Box")
|
|
print("########################################")
|
|
print(f"X range: {reg_min[0]:.6f} to {reg_max[0]:.6f}")
|
|
print(f"Y range: {reg_min[1]:.6f} to {reg_max[1]:.6f}")
|
|
print(f"Z range: {reg_min[2]:.6f} to {reg_max[2]:.6f}")
|
|
print(f"Box Size (dx, dy, dz): {reg_max - reg_min}")
|
|
print("########################################")
|
|
|
|
|
|
# First vertex is used as the reference node
|
|
referenceNode = regular.vertices[:, 0]
|
|
print("First Node from regular tets = ", referenceNode)
|
|
|
|
regularVertices = regular.vertices.shape[0]
|
|
|
|
'''
|
|
# Find offset to snap vertices to a grid
|
|
regVertices, offset_trunc = snap_vertices_relative_to_center(regVertices, referenceNode, regularElementSize, unit2trunc)
|
|
reg_min = np.min(regVertices, axis=0)
|
|
reg_max = np.max(regVertices, axis=0)
|
|
print("Min Regular Corner after snapping = ", reg_min)
|
|
print("Max Regular Corner after snapping = ", reg_max)
|
|
print("Snapped regVertices to grid")
|
|
'''
|
|
|
|
export_bc_surfaces(regular, "Regular", BC)
|
|
|
|
|
|
########################################
|
|
## Generate Irregular Mesh ##
|
|
########################################
|
|
|
|
irregular = mesh.make_hybrid_mesh_Airbox(
|
|
regularElementSize,
|
|
pec_faces,
|
|
max_volume=(regularElementSize ** 3) * (np.sqrt(2) / 12)
|
|
)
|
|
|
|
print("------------------------------------------------")
|
|
#print("Irregular Vertices = ", irregular.vertices)
|
|
print("Irregular Vertices Shape = ", irregular.vertices.shape)
|
|
print("------------------------------------------------")
|
|
|
|
export_bc_surfaces(irregular, "Irregular", BC)
|
|
|
|
|
|
########################################
|
|
## Generate Box-Shaped PML ##
|
|
########################################
|
|
|
|
# Get bounding box of the regular mesh
|
|
reg_min = np.min(regVertices, axis=1)
|
|
reg_max = np.max(regVertices, axis=1)
|
|
|
|
print("Min Corner = ", reg_min)
|
|
print("Max Corner = ", reg_max)
|
|
|
|
# ---- Inner PML (existing) ----
|
|
sc_layer, sc_vert = generate_cuboid_pml_layer(
|
|
regular,
|
|
regularElementSize,
|
|
reg_min, # inner (core) min
|
|
reg_max, # inner (core) max
|
|
layer_thickness=1
|
|
)
|
|
export_bc_surfaces(sc_layer, "sc_layer", BC)
|
|
|
|
# Collect min/max of inner PML (pml_vert is (N,3))
|
|
pml1_min = np.min(sc_vert, axis=1)
|
|
pml1_max = np.max(sc_vert, axis=1)
|
|
|
|
all_sc_vertices = np.vstack(sc_vert) if len(sc_vert) > 0 else np.empty((0, 3))
|
|
|
|
# ---- Outer PML (new) ----
|
|
# thickness of the outer shell (cells). Feel free to change:
|
|
outer_number_of_layers = number_of_layers
|
|
|
|
pml_layer, pml_vert = generate_cuboid_pml_layer(
|
|
regular,
|
|
regularElementSize,
|
|
pml1_min, # inner hole of outer PML is exactly the outer bounds of the inner PML
|
|
pml1_max,
|
|
layer_thickness=outer_number_of_layers
|
|
)
|
|
export_bc_surfaces(pml_layer, "pml_layer", BC)
|
|
all_pml_vertices = np.vstack(pml_vert) if len(pml_vert) > 0 else np.empty((0, 3))
|
|
pml_min = np.min(all_pml_vertices, axis=0)
|
|
pml_max = np.max(all_pml_vertices, axis=0)
|
|
|
|
export_bc_surfaces(pml_layer, "pml", BC)
|
|
|
|
|
|
|
|
|
|
|
|
# -------------------------------
|
|
# Deduplicate and export node list
|
|
# The unique set of nodes
|
|
# The indices of the first occurrences
|
|
# A mapping array from original to deduplicated nodes
|
|
# The count of how many times each unique row occurred (but unused here)
|
|
# -------------------------------
|
|
|
|
print("Shape of all_pml_vertices:", (pml_vert).T.shape)
|
|
print("Shape of all_sc_vertices:", (sc_vert).T.shape)
|
|
print("Shape of regular vertices:", np.array(regular.vertices).T.shape)
|
|
print("Shape of irregular vertices:", np.array(irregular.vertices).T.shape)
|
|
|
|
# Stack all vertex coordinates for deduplication
|
|
totalNodes = np.vstack((
|
|
np.array(irregular.vertices).T,
|
|
np.array(regular.vertices).T,
|
|
np.array(sc_vert).T,
|
|
np.array(pml_vert).T
|
|
))
|
|
|
|
print("🔢 Number of nodes before rounding/deduplication:", totalNodes.shape[0])
|
|
print("TotalNodes = ",totalNodes.shape)
|
|
|
|
|
|
# 2. Round coordinates to a precision (e.g., 1e-8)
|
|
precision = 8
|
|
totalNodes = np.round(totalNodes, decimals=precision)
|
|
|
|
# Deduplicate and get mapping to global IDs
|
|
totalNodes, indices, verticesMap, _ = np.unique(totalNodes, axis=0, return_index=True, return_inverse=True, return_counts=True)
|
|
|
|
print("✅ Number of unique nodes after rounding:", totalNodes.shape[0])
|
|
|
|
|
|
# Save to .node file
|
|
np.savetxt(f'{name}.node', totalNodes, newline="\n3 0 0\n" , fmt='%f', header=f"{unit}\n{totalNodes.shape[0]}", comments='')
|
|
|
|
print("Total deduplicated nodes:", totalNodes.shape[0])
|
|
|
|
|
|
|
|
########################################
|
|
## IRREGULAR ELEMENTS ##
|
|
########################################
|
|
|
|
offset = 0
|
|
|
|
# Build face BC array for irregular mesh
|
|
selected_BC = [BC.PEC]
|
|
output_BC_number = [1]
|
|
irregularFacesBC = build_face_bc_array_custom(irregular, BC, selected_BC, output_BC_number)
|
|
|
|
# Build element and face BC arrays for irregular mesh
|
|
irregularElementArray, irregularBcArray = build_element_and_bc_arrays(
|
|
irregular, irregularFacesBC, verticesMap, offset=offset)
|
|
|
|
print("Unique face BC values in irregularFacesBC:")
|
|
print(np.unique(irregularFacesBC))
|
|
|
|
offset += irregular.vertices.shape[1]
|
|
|
|
########################################
|
|
## REGULAR ELEMENTS ##
|
|
########################################
|
|
### This section processes regular mesh elements by assigning boundary conditions (BCs),
|
|
### building element connectivity arrays, and canonically ordering nodes for consistency.
|
|
|
|
selected_BC = [BC.PML_Excitation]
|
|
output_BC_number = [2]
|
|
regularFacesBC = build_face_bc_array_custom(regular, BC, selected_BC, output_BC_number)
|
|
elementArray, bcArray = build_element_and_bc_arrays(regular, regularFacesBC, verticesMap,offset=offset)
|
|
|
|
offset += regular.vertices.shape[1]
|
|
|
|
print("Unique face BC values in regularFacesBC:")
|
|
print(np.unique(regularFacesBC))
|
|
|
|
print("Shape of elementArray:", elementArray.shape)
|
|
print("Max vertex index used:", np.max(elementArray))
|
|
|
|
|
|
surfs = irregular.extract_surfaces(irregular.face_sets_with_bc(BC.PEC))
|
|
surfs.save_stl(f"{name}_regular_pec_surfaces.stl")
|
|
surfs = regular.extract_surfaces(regular.face_sets_with_bc(BC.NCB))
|
|
surfs.save_stl(f"{name}_regular_ncbc_surfaces.stl")
|
|
surfs = regular.extract_surfaces(regular.face_sets_with_bc(BC.ABC))
|
|
surfs.save_stl(f"{name}_regular_abc_surfaces.stl")
|
|
|
|
|
|
|
|
###################################
|
|
## SC ELEMENTS ##
|
|
###################################
|
|
|
|
# Accumulators for all PML elements and BCs
|
|
selected_BC = [BC.PML_Excitation,BC.PML_ABC]
|
|
output_BC_number = [2,0]
|
|
scFacesBC = build_face_bc_array_custom(sc_layer, BC, selected_BC, output_BC_number)
|
|
scElementArrays, scBcArrays = build_element_and_bc_arrays(sc_layer, scFacesBC, verticesMap, offset = offset)
|
|
offset += sc_layer.vertices.shape[1]
|
|
|
|
print("Unique face BC values in scFacesBC:")
|
|
print(np.unique(scFacesBC))
|
|
|
|
print("SC Shape of elementArray:", scElementArrays.shape)
|
|
print("SC Max vertex index used:", np.max(scElementArrays))
|
|
|
|
|
|
|
|
|
|
|
|
####################################
|
|
## PML ELEMENTS ##
|
|
####################################
|
|
|
|
# Accumulators for all PML elements and BCs
|
|
selected_BC = [BC.PML_Excitation,BC.PML_ABC]
|
|
output_BC_number = [0,3]
|
|
pmlFacesBC = build_face_bc_array_custom(pml_layer, BC, selected_BC, output_BC_number)
|
|
pmlElementArrays, pmlBcArrays = build_element_and_bc_arrays(pml_layer, pmlFacesBC, verticesMap, offset = offset)
|
|
offset += pml_layer.vertices.shape[1]
|
|
|
|
print("Unique face BC values in pmlFacesBC:")
|
|
print(np.unique(pmlFacesBC))
|
|
print("PML Shape of elementArray:", pmlElementArrays.shape)
|
|
print("PML Max vertex index used:", np.max(pmlElementArrays))
|
|
|
|
|
|
########################################
|
|
## BUILD FULL ARRAYS ##
|
|
########################################
|
|
|
|
# Concatenate tetrahedral elements in order: PML, regular, irregular
|
|
full_element_array = np.vstack([irregularElementArray, elementArray, scElementArrays, pmlElementArrays])
|
|
|
|
# Total element counts
|
|
Nirreg = irregularElementArray.shape[0]
|
|
Nreg = elementArray.shape[0]
|
|
Nsc = scElementArrays.shape[0]
|
|
Npml = pmlElementArrays.shape[0]
|
|
|
|
Ntotal = Nsc + Npml + Nreg + Nirreg
|
|
|
|
assert full_element_array.shape[0] == Ntotal
|
|
|
|
########################################
|
|
## MATERIAL ASSIGNMENT ##
|
|
########################################
|
|
|
|
material_flags = np.zeros(Ntotal, dtype=np.int64)
|
|
|
|
# SC: material 1
|
|
material_flags[Nirreg+Nreg:Nirreg+Nreg+Nsc] = 1
|
|
|
|
# PML: material 2
|
|
material_flags[Nirreg+Nreg+Nsc::] = 2
|
|
|
|
|
|
########################################
|
|
## BUILD BC ARRAY ##
|
|
########################################
|
|
|
|
# Ensure BC arrays match the stacked order
|
|
bc_full_array = np.vstack([
|
|
irregularBcArray, # shape: (Nirreg, 4)
|
|
bcArray, # regular, shape: (Nreg, 4)
|
|
scBcArrays, # shape: (Npml, 4)
|
|
pmlBcArrays # shape: (Nsc, 4)
|
|
])
|
|
|
|
assert bc_full_array.shape == (Ntotal, 4)
|
|
|
|
print("Unique BC codes in full BC array:",np.unique(bc_full_array))
|
|
|
|
|
|
#################################
|
|
## GROUPING REGULARS ##
|
|
#################################
|
|
|
|
# Concatenate tetrahedral elements in order: PML, regular, irregular
|
|
combined_element_array = np.vstack([elementArray, scElementArrays, pmlElementArrays])
|
|
combined_bc_array = np.vstack([
|
|
bcArray, # regular, shape: (Nreg, 4)
|
|
scFacesBC,
|
|
pmlBcArrays # shape: (Npml, 4)
|
|
])
|
|
|
|
temp_material_flags = np.zeros(Nreg+Nsc+Npml, dtype=np.int64)
|
|
temp_material_flags[Nreg:Nreg+Nsc] = 1
|
|
temp_material_flags[Nreg+Nsc:Nreg+Nsc+Npml] = 2
|
|
|
|
|
|
print("Unique bc IDS = ", np.unique(combined_bc_array))
|
|
|
|
|
|
print("==================================================")
|
|
print("Grouping regular elements by geometry...")
|
|
print("==================================================")
|
|
|
|
# Bounding box of regular airbox
|
|
minX = np.min(regVertices[0, :])
|
|
maxX = np.max(regVertices[0, :])
|
|
minY = np.min(regVertices[1, :])
|
|
maxY = np.max(regVertices[1, :])
|
|
minZ = np.min(regVertices[2, :])
|
|
maxZ = np.max(regVertices[2, :])
|
|
reg_corner = np.array([minX, minY, minZ])
|
|
reg_dims = np.array([maxX - minX, maxY - minY, maxZ - minZ])
|
|
print("Bounding box for regular region grouping:")
|
|
print(reg_corner, reg_corner + reg_dims)
|
|
|
|
Excitation_minZ = minZ
|
|
Excitation_maxZ = maxZ
|
|
Excitation_minX = minX
|
|
Excitation_maxX = maxX
|
|
Excitation_minY = minY
|
|
Excitation_maxY = maxY
|
|
|
|
# Bounding box of SC layer
|
|
minX = np.min(all_sc_vertices[0, :])
|
|
maxX = np.max(all_sc_vertices[0, :])
|
|
minY = np.min(all_sc_vertices[1, :])
|
|
maxY = np.max(all_sc_vertices[1, :])
|
|
minZ = np.min(all_sc_vertices[2, :])
|
|
maxZ = np.max(all_sc_vertices[2, :])
|
|
reg_corner = np.array([minX, minY, minZ])
|
|
reg_dims = np.array([maxX - minX, maxY - minY, maxZ - minZ])
|
|
|
|
# Bounding box of PML layer
|
|
PMLminX = np.min(all_pml_vertices[0, :])
|
|
|
|
thickness = abs(PMLminX - minX)
|
|
pml_bounds=(thickness,thickness,thickness)
|
|
|
|
reg_indices = group_tets_by_box_and_pml( elementArray=combined_element_array, bcArray=combined_bc_array, totalNodes=totalNodes,
|
|
materialArray=temp_material_flags, region_corner=reg_corner , region_dims=reg_dims ,
|
|
excluded_bc_values=[2,3], min_group_size=100)
|
|
|
|
|
|
|
|
print("=====================================")
|
|
vals, counts = np.unique(reg_indices, return_counts=True)
|
|
print("Counts (including 0):")
|
|
for v, c in zip(vals, counts):
|
|
print(f"{int(v)}: {int(c)}")
|
|
print("=====================================")
|
|
|
|
|
|
#################################
|
|
## CONSTRUCT FINAL INDEX ##
|
|
#################################
|
|
|
|
print("==================================================")
|
|
print("Constructing overall global regular indices ...")
|
|
print("==================================================")
|
|
|
|
# Combine all tets and group indices
|
|
full_element_array = np.vstack([irregularElementArray, elementArray, scElementArrays, pmlElementArrays])
|
|
final_indices = np.hstack([
|
|
np.zeros(irregularElementArray.shape[0], dtype=np.int64), # Irregular
|
|
reg_indices # Regular
|
|
])
|
|
|
|
|
|
|
|
print("=====================================")
|
|
vals, counts = np.unique(final_indices, return_counts=True)
|
|
print("Final Counts (including 0):")
|
|
for v, c in zip(vals, counts):
|
|
print(f"{int(v)}: {int(c)}")
|
|
print("=====================================")
|
|
|
|
|
|
print("final_indices shape:", final_indices.shape)
|
|
print("Max group ID = ", np.sort(np.unique(final_indices)))
|
|
|
|
|
|
assert final_indices.shape[0] == full_element_array.shape[0], \
|
|
"Mismatch between final_indices and total element count!"
|
|
|
|
# Save .regular group file (now matches .tetra)
|
|
total_group_count = np.max(final_indices) + 1
|
|
np.savetxt(f"{name}.regular", final_indices, fmt='%d', header=str(total_group_count), comments='')
|
|
|
|
print(f"Saved {final_indices.shape[0]} group indices to {name}.regular")
|
|
|
|
|
|
|
|
########################################
|
|
## WRITE TO .tetra FILE ##
|
|
########################################
|
|
|
|
c_with_extra = np.ones((2 * Ntotal, 5), dtype=np.int64) * (-1)
|
|
|
|
# Even rows: material + connectivity
|
|
c_with_extra[::2] = np.hstack((
|
|
material_flags.reshape(-1, 1),
|
|
full_element_array
|
|
))
|
|
|
|
# Odd rows: face BCs
|
|
c_with_extra[1::2, 1:] = bc_full_array
|
|
|
|
# Format as DataFrame for easier output
|
|
df = pd.DataFrame(c_with_extra, dtype=np.int64).where(lambda x: x != -1, np.nan)
|
|
|
|
# Insert element count header
|
|
header_row = pd.Series([Ntotal] + [np.nan] * 4, index=df.columns)
|
|
df = pd.concat([header_row.to_frame().T, df], ignore_index=True)
|
|
|
|
# Save as .tetra file
|
|
df = df.astype("Int64")
|
|
df.to_csv(f"{name}.tetra", sep=' ', index=False, header=False)
|
|
|
|
print(f"✅ Saved {Ntotal} tetrahedra to {name}.tetra")
|
|
|
|
|
|
|
|
#################################
|
|
## WRITE FILES ##
|
|
#################################
|
|
|
|
print("Writing freespace.m...")
|
|
|
|
with open("freespace.m", "w") as f:
|
|
# First 6 rows: identity tensors
|
|
f.write("standard\n")
|
|
f.write("1.000000e+00 0.000000e+00 0.000000e+00 0.000000e+00 0.000000e+00 0.000000e+00\n")
|
|
f.write("0.000000e+00 0.000000e+00 1.000000e+00 0.000000e+00 0.000000e+00 0.000000e+00\n")
|
|
f.write("0.000000e+00 0.000000e+00 0.000000e+00 0.000000e+00 1.000000e+00 0.000000e+00\n")
|
|
f.write("1.000000e+00 0.000000e+00 0.000000e+00 0.000000e+00 0.000000e+00 0.000000e+00\n")
|
|
f.write("0.000000e+00 0.000000e+00 1.000000e+00 0.000000e+00 0.000000e+00 0.000000e+00\n")
|
|
f.write("0.000000e+00 0.000000e+00 0.000000e+00 0.000000e+00 1.000000e+00 0.000000e+00\n")
|
|
|
|
# 3 rows of zero vectors (dispersion parameters)
|
|
for _ in range(3):
|
|
f.write("0.000000e+00 0.000000e+00 0.000000e+00\n")
|
|
|
|
print("Writing scaterring.m...")
|
|
|
|
with open("scattering.m", "w") as f:
|
|
# First 6 rows: identity tensors
|
|
f.write("standard\n")
|
|
f.write("1.000000e+00 0.000000e+00 0.000000e+00 0.000000e+00 0.000000e+00 0.000000e+00\n")
|
|
f.write("0.000000e+00 0.000000e+00 1.000000e+00 0.000000e+00 0.000000e+00 0.000000e+00\n")
|
|
f.write("0.000000e+00 0.000000e+00 0.000000e+00 0.000000e+00 1.000000e+00 0.000000e+00\n")
|
|
f.write("1.000000e+00 0.000000e+00 0.000000e+00 0.000000e+00 0.000000e+00 0.000000e+00\n")
|
|
f.write("0.000000e+00 0.000000e+00 1.000000e+00 0.000000e+00 0.000000e+00 0.000000e+00\n")
|
|
f.write("0.000000e+00 0.000000e+00 0.000000e+00 0.000000e+00 1.000000e+00 0.000000e+00\n")
|
|
|
|
# 3 rows of zero vectors (dispersion parameters)
|
|
for _ in range(3):
|
|
f.write("0.000000e+00 0.000000e+00 0.000000e+00\n")
|
|
|
|
|
|
print("Writing .bc file...")
|
|
|
|
with open(f"{name}.bc", "w") as f:
|
|
f.write("4\n")
|
|
f.write("0 none\n")
|
|
f.write("1 pec\n")
|
|
f.write(f"2 pml 1 pmlExci 1.0 0.0 0.0 0.0 1.0 0.0 0.0 0.0 {Excitation_minZ} \n")
|
|
f.write("3 abc 1\n")
|
|
|
|
with open(f"{name}.prop", "w") as f:
|
|
f.write("./\n")
|
|
f.write("freespace\n")
|
|
f.write("scattering\n")
|
|
f.write("pml\n")
|
|
|
|
sigma_base = 0.001
|
|
print("Writing pml.m...")
|
|
with open("pml.m", "w") as f:
|
|
f.write("standard\n")
|
|
f.write("1.000000e+00 0.000000e+00 0.000000e+00 0.000000e+00 0.000000e+00 0.000000e+00\n")
|
|
f.write("0.000000e+00 0.000000e+00 1.000000e+00 0.000000e+00 0.000000e+00 0.000000e+00\n")
|
|
f.write("0.000000e+00 0.000000e+00 0.000000e+00 0.000000e+00 1.000000e+00 0.000000e+00\n")
|
|
f.write("1.000000e+00 0.000000e+00 0.000000e+00 0.000000e+00 0.000000e+00 0.000000e+00\n")
|
|
f.write("0.000000e+00 0.000000e+00 1.000000e+00 0.000000e+00 0.000000e+00 0.000000e+00\n")
|
|
f.write("0.000000e+00 0.000000e+00 0.000000e+00 0.000000e+00 1.000000e+00 0.000000e+00\n")
|
|
f.write(f"{sigma_base:.6e} 0.000000e+00 0.000000e+00\n")
|
|
f.write(f"0.000000e+00 {sigma_base:.6e} 0.000000e+00\n")
|
|
f.write(f"0.000000e+00 0.000000e+00 {sigma_base:.6e}\n")
|
|
f.write(f"3 {thickness:.3f} -1.0 -1.0 -1.0\n")
|
|
# Use expanded inner cavity bounds (aligned to grid)
|
|
f.write(f"{pml1_min[0]:.3f} {pml1_max[0]:.3f} "
|
|
f"{pml1_min[1]:.3f} {pml1_max[1]:.3f} "
|
|
f"{pml1_min[2]:.3f} {pml1_max[2]:.3f}\n")
|
|
|
|
if __name__ == '__main__':
|
|
main()
|
|
|