Skip to content
Snippets Groups Projects
Commit 805fc0dc authored by Malte Nyhuis's avatar Malte Nyhuis
Browse files

cleanup. remove unnecessary and complex code. remove unidentified bugs

parent 05a096dd
No related branches found
No related tags found
2 merge requests!63Cleanup clean,!62Cleanup 0.2.0
Pipeline #18112 failed
......@@ -291,6 +291,6 @@ def naca(number, n, finite_te=False, half_cosine_spacing=True):
y = radius * np.sin(theta)
"""
points = np.stack([X, Y, np.zeros(len(X))]).T
poly = pv.PolyData(points)
poly = poly.clean(tolerance=1e-6)
return poly.points[::, 0], poly.points[::, 1]
uniquepts = np.unique(points, axis=0)
return uniquepts[::, 0], uniquepts[::, 1]
......@@ -15,14 +15,11 @@ class Blade2D:
def __init__(self, points: np.ndarray or pv.PolyData):
self.origpoints_pv = None
self.sortedpoints_pv = None
self.sortedpointsrolled_pv = None
self.periodicsplinepoints = None
self.skeletonline_pv = None
self.ps_pv = None
self.ss_pv = None
self.hullalpha = None
self.ile_orig = None
self.ite_orig = None
self.ile = None
self.ite = None
self.beta_le = None
......@@ -38,10 +35,11 @@ class Blade2D:
print("working with np.ndarray pointcloud. No dataarrays cab be used for postprocessing")
def compute_polygon(self, alpha=None):
unique_points = np.unique(self.origpoints_pv.points, axis=0)
if alpha:
xs, ys = calc_concavehull(self.origpoints_pv.points[:, 0], self.origpoints_pv.points[:, 1], alpha)
xs, ys = calc_concavehull(unique_points[:, 0], unique_points[:, 1], alpha)
else:
xs, ys, alpha = auto_concaveHull(self.origpoints_pv.points[:, 0], self.origpoints_pv.points[:, 1])
xs, ys, alpha = auto_concaveHull(unique_points[:, 0], unique_points[:, 1])
line = polyline_from_points(np.stack((xs, ys, np.zeros(len(ys)))).T)
orientation = is_counterclockwise(xs, ys)
......@@ -61,21 +59,10 @@ class Blade2D:
self.sortedpoints_pv = self.sortedpoints_pv.merge(self.origpoints_pv.extract_points([i]))
def compute_lete(self):
self.ile_orig, self.ite_orig = extract_vk_hk(self.sortedpoints_pv)
def compute_rolledblade(self):
ids = np.roll(np.arange(self.sortedpoints_pv.number_of_points), -self.ile_orig)
newSortedPoly = pv.PolyData()
for i in ids:
newSortedPoly = newSortedPoly.merge(self.sortedpoints_pv.extract_points(i))
self.ite = (self.ite_orig - self.ile_orig) % newSortedPoly.number_of_points
self.ile = 0
self.sortedpointsrolled_pv = newSortedPoly
self.ile, self.ite = extract_vk_hk(self.sortedpoints_pv)
def extract_sides(self):
self.ps_pv, self.ss_pv = extractSidePolys(self.ite,self.ile, self.sortedpointsrolled_pv)
self.ps_pv, self.ss_pv = extractSidePolys(self.ite,self.ile, self.sortedpoints_pv)
def compute_skeleton(self):
self.skeletonline_pv = midline_from_sides(self.ps_pv, self.ss_pv)
......@@ -83,7 +70,7 @@ class Blade2D:
def compute_stats(self):
vk_tangent = self.skeletonline_pv.points[0] - self.skeletonline_pv.points[1]
hk_tangent = self.skeletonline_pv.points[-2] - self.skeletonline_pv.points[-1]
chord = -self.sortedpointsrolled_pv.points[self.ile] + self.sortedpointsrolled_pv.points[self.ite]
chord = -self.sortedpoints_pv.points[self.ile] + self.sortedpoints_pv.points[self.ite]
self.beta_le = vecAngle(vk_tangent, -np.array([1, 0, 0])) / np.pi * 180
self.beta_te = vecAngle(hk_tangent, np.array([1, 0, 0])) / np.pi * 180
self.camber_phi = vecAngle(chord, np.array([1, 0, 0])) / np.pi * 180
......@@ -92,7 +79,6 @@ class Blade2D:
def compute_all_frompoints(self, alpha=None):
self.compute_polygon(alpha)
self.compute_lete()
self.compute_rolledblade()
self.extract_sides()
self.compute_skeleton()
self.compute_stats()
......@@ -103,8 +89,8 @@ class Blade2D:
p.add_mesh(self.ps_pv, color="red", label="pressure side", line_width=1)
p.add_mesh(self.ss_pv, color="blue", label="suction side", line_width=1)
p.add_mesh(self.skeletonline_pv, color="black", label="skeleton line", line_width=1)
p.add_mesh(pv.PolyData(self.sortedpointsrolled_pv.points[self.ile]), color="green", label="ile", point_size=16)
p.add_mesh(pv.PolyData(self.sortedpointsrolled_pv.points[self.ite]), color="yellow", label="ite", point_size=16)
p.add_mesh(pv.PolyData(self.sortedpoints_pv.points[self.ile]), color="green", label="ile", point_size=16)
p.add_mesh(pv.PolyData(self.sortedpoints_pv.points[self.ite]), color="yellow", label="ite", point_size=16)
p.add_text(
f"beta_le: {self.beta_le} \nbeta_te: {self.beta_te}\nphi_camber: {self.camber_phi}\nlength_camber: {self.camber_length}",
font_size=int(sfactor))
......
import cv2
import numpy as np
import pyvista as pv
import shapely
from scipy.interpolate import splev
from scipy.interpolate import splprep
from scipy.spatial import Voronoi
from scipy.spatial.distance import cdist
from shapely.geometry import Polygon, LineString
from skimage.morphology import skeletonize
from ntrfc.geometry.line import lines_from_points
from ntrfc.turbo.pointcloud_methods import extractSidePolys, midline_from_sides
from ntrfc.math.vectorcalc import findNearest, vecDir
from ntrfc.geometry.alphashape import auto_concaveHull
def detect_inliers_tukey(data):
"""
Detect inliers using Tukey's method.
Parameters:
- data: 1D array-like data
Returns:
- Array of boolean values indicating whether each data point is an inlier
"""
Q1 = np.percentile(data, 25)
Q3 = np.percentile(data, 75)
IQR = Q3 - Q1
lower_bound = Q1 - 1.5 * IQR
upper_bound = Q3 + 1.5 * IQR
return (data >= lower_bound) & (data <= upper_bound)
def detect_inliers_mad(data):
"""
Detect inliers using Median Absolute Deviation (MAD) method.
Parameters:
- data: 1D array-like data
Returns:
- Array of boolean values indicating whether each data point is an inlier
"""
median = np.median(data)
mad = np.median(np.abs(data - median))
threshold = 3.5 * mad # Adjust multiplier as needed
lower_bound = median - threshold
upper_bound = median + threshold
return (data >= lower_bound) & (data <= upper_bound)
def detect_inliers_zscore(data, threshold=2):
"""
Detect outliers using Z-Score method.
Parameters:
- data: 1D array-like data
- threshold: Z-Score threshold for identifying outliers (default=3)
Returns:
- Array of boolean values indicating whether each data point is an outlier
"""
z_scores = np.abs((data - np.mean(data)) / np.std(data))
return z_scores < threshold
def clean_sites(voronoi_sites, skeletonize_sites):
# Compute pairwise distances
distances = cdist(skeletonize_sites, voronoi_sites)
# Find minimal distance for each point in A
min_distances = np.min(distances, axis=1)
inliers_turkey = detect_inliers_tukey(min_distances)
inliers_mad = detect_inliers_mad(min_distances)
inliers_zscore = detect_inliers_zscore(min_distances)
inlier_indices = np.where(inliers_turkey * inliers_zscore * inliers_mad)[0]
valid_midline = skeletonize_sites[inlier_indices]
return valid_midline
def periodic_weights(length, ind_1, ind_2):
"""
Generate a periodic signal of given length with maxima at ind_1 and ind_2
and minima in between, with all values between 0 and 1.
Parameters:
- length: Total length of the signal.
- ind_1: Index of the first maximum (must be less than ind_2).
- ind_2: Index of the second maximum (must be greater than ind_1).
Returns:
- A numpy array containing the periodic signal.
"""
# Create an array of zeros
signal = np.zeros(length)
# 1. Rise from 0 to maximum at ind_1
slope_rise_1 = 1 / ind_1
for i in range(ind_1):
signal[i] = slope_rise_1 * i # Linear rise from 0 to 1
# 2. Fall from maximum at ind_1 to minimum (0) at midpoint between ind_1 and ind_2
mid_point = (ind_1 + ind_2) // 2
slope_fall = -1 / (mid_point - ind_1)
for i in range(ind_1, mid_point):
signal[i] = 1 + slope_fall * (i - ind_1) # Linear fall from 1 to 0
# 3. Rise again from minimum (0) to maximum at ind_2
slope_rise_2 = 1 / (ind_2 - mid_point)
for i in range(mid_point, ind_2):
signal[i] = slope_rise_2 * (i - mid_point) # Linear rise from 0 to 1
# 4. Fall to 0 by the end (to ensure periodicity)
slope_fall_2 = -1 / (length - ind_2)
for i in range(ind_2, length):
signal[i] = 1 + slope_fall_2 * (i - ind_2) # Linear fall from 1 to 0
return signal
import numpy as np
from scipy.interpolate import CubicSpline
def curvature_spline(points):
# Punkte auf x- und y-Koordinaten aufteilen
points = np.array(points)
x = points[:, 0]
y = points[:, 1]
# Spline-Interpolation mit scipy
cs_x = CubicSpline(np.arange(len(x)), x)
cs_y = CubicSpline(np.arange(len(y)), y)
# Erste und zweite Ableitung des Splines berechnen
t = np.linspace(0, len(x) - 1, len(points)) # Parameterwerte für den Spline (z.B. 1000 Schritte)
dx = cs_x(t, 1)
dy = cs_y(t, 1)
ddx = cs_x(t, 2)
ddy = cs_y(t, 2)
# Krümmung für jeden Punkt berechnen
curvature = np.abs(dx * ddy - dy * ddx) / (dx ** 2 + dy ** 2) ** (3 / 2)
return curvature
......@@ -178,7 +37,7 @@ def compute_midline_error(ind_1_start, ind_2_start, sortedPoly, shapelypoly):
lengths_positive = np.array([line.length for line in intersections_positive])
lengths_negative = np.array([line.length for line in intersections_negative])
thicknesses = np.min(np.stack([lengths_positive, lengths_negative]).T, axis=1)
thicknesses = (lengths_positive+lengths_negative)/2
reconstructed_pts_positive = centers + thicknesses[:, np.newaxis] * normals
reconstructed_pts_negative = centers - thicknesses[:, np.newaxis] * normals
reconstructed_pts = np.concatenate([reconstructed_pts_positive, reconstructed_pts_negative])
......@@ -186,16 +45,18 @@ def compute_midline_error(ind_1_start, ind_2_start, sortedPoly, shapelypoly):
rec_xx,rec_yy ,_= auto_concaveHull(reconstructed_pts[:,0],reconstructed_pts[:,1])
reconstructed_shapepoly = Polygon(np.stack((rec_xx,rec_yy)).T)
#compute difference of lengths
differences = np.abs(lengths_positive - lengths_negative)
#error_thickness = np.mean(differences/thicknesses)
error_thickness = np.mean(differences/thicknesses)
error_area = reconstructed_shapepoly.symmetric_difference(shapelypoly).area/shapelypoly.area
error_perimeter = abs(reconstructed_shapepoly.length-shapelypoly.length)/shapelypoly.length
error_hausdorf = reconstructed_shapepoly.hausdorff_distance(shapelypoly)
return error_area * error_perimeter * error_hausdorf
return error_area + error_perimeter + error_hausdorf +error_thickness
def extract_vk_hk(sortedPoly: pv.PolyData) -> (int, int):
voronoires = 64000
voronoires = 32000
points_orig = sortedPoly.points
points_2d_closed_refined_voronoi = pointcloud_to_profile(points_orig, voronoires)
......@@ -210,7 +71,7 @@ def extract_vk_hk(sortedPoly: pv.PolyData) -> (int, int):
indices_front_all = np.where(sortedPoly.points[:, 0] < min_x + delta_x * (test_range))[0]
indices_back_all = np.where(sortedPoly.points[:, 0] > max_x - delta_x * (test_range))[0]
stepsize = len(indices_back_all)*len(indices_front_all)//300
stepsize = len(indices_back_all)*len(indices_front_all)//400
indices_front_coarse = indices_front_all[::stepsize]
indices_back_coarse = indices_back_all[::stepsize]
......@@ -229,7 +90,7 @@ def extract_vk_hk(sortedPoly: pv.PolyData) -> (int, int):
error_data=np.array(error_data)
# get best 10% of combinations
percentile_value = np.percentile(error_data[:, 2], 10)
percentile_value = np.percentile(error_data[:, 2], 3)
lowest10 = error_data[np.where(error_data[:, 2] < percentile_value)]
front_unique = np.unique(lowest10[:, 0])
......@@ -265,108 +126,12 @@ def extract_vk_hk(sortedPoly: pv.PolyData) -> (int, int):
error_data_fine = np.array(error_data_fine)
best_combination_fine = specific_combinations_fine[np.argmin(error_data_fine, axis=0)[2]]
le_ind_best, te_ind_best = int(best_combination_fine[0]), int(best_combination_fine[1])
return le_ind_best, te_ind_best
def skeletonline_completion(diag_dist, points, points_2d_closed_refined, sites_raw_clean):
shapelypoly = Polygon(points_2d_closed_refined).convex_hull
shapelymidline = LineString(sites_raw_clean)
# i need to extend thhe shapelymidline to the boundary of the polygon
# Get the coordinates of the start and end points
start_coords = np.array(shapelymidline.coords[0])
end_coords = np.array(shapelymidline.coords[-1])
# Compute the direction vector
direction_start = diag_dist * vecDir(-(shapelymidline.coords[1] - start_coords))
direction_end = diag_dist * vecDir(-(shapelymidline.coords[-2] - end_coords))
# Extend the line by 1 unit in both directions
extended_start = LineString([start_coords, start_coords + direction_start])
extended_end = LineString([end_coords, end_coords + direction_end])
# Compute the intersection with the polygon
intersection_start = extended_start.intersection(shapelypoly)
intersection_end = extended_end.intersection(shapelypoly)
intersection_point_start = np.array(intersection_start.coords)[1]
intersection_point_end = np.array(intersection_end.coords)[1]
# find closest point index of points and intersections
le_ind = findNearest(points[:, :2], intersection_point_start)
te_ind = findNearest(points[:, :2], intersection_point_end)
skeleton_points = np.concatenate([np.array([points[le_ind][:2]]), sites_raw_clean, np.array([points[te_ind][:2]])])
zeros_column = np.zeros((skeleton_points.shape[0], 1))
skeletonline_complete = pv.PolyData(np.hstack((skeleton_points, zeros_column)))
return le_ind, te_ind, skeletonline_complete
def voronoi_skeleton_sites(points_2d_closed_refined_voronoi):
vor = Voronoi(points_2d_closed_refined_voronoi)
polygon = shapely.geometry.Polygon(points_2d_closed_refined_voronoi).convex_hull
is_inside = [shapely.geometry.Point(i).within(polygon) for i in vor.vertices]
voronoi_sites_inside = vor.vertices[is_inside]
sort_indices = np.argsort(voronoi_sites_inside[:, 0])
sites_inside_sorted = voronoi_sites_inside[sort_indices]
return sites_inside_sorted
def skeletonize_skeleton_sites(points_2d_closed_refined_skeletonize):
res = len(points_2d_closed_refined_skeletonize)
dx = np.min(points_2d_closed_refined_skeletonize[:, 0])
dy = np.min(points_2d_closed_refined_skeletonize[:, 1])
maxx = np.max(points_2d_closed_refined_skeletonize[:, 0])
maxy = np.max(points_2d_closed_refined_skeletonize[:, 1])
scale = max(maxx - dx, maxy - dy)
factor = res
px = (points_2d_closed_refined_skeletonize[:, 0] - dx) / scale * factor
py = (points_2d_closed_refined_skeletonize[:, 1] - dy) / scale * factor
polygon = np.stack((px, py)).T
image_size = (res, res) # Image size
midline, img = compute_midline(polygon, image_size)
# Find midline points
xx_idx, yy_idx = np.where(midline > 0)
midline_points = np.column_stack((xx_idx, yy_idx))
mxx = (midline_points[:, 0]) / factor * scale + dy
myy = (midline_points[:, 1]) / factor * scale + dx
midline_skeletonize = np.stack([myy, mxx]).T
return midline_skeletonize
def polygon_to_binary_image(polygon, image_size):
# Create a blank image
img = np.zeros(image_size, dtype=np.uint8)
# Create an array of polygon vertices
vertices = np.array([polygon], dtype=np.int64)
# Fill the polygon on the image
cv2.fillPoly(img, vertices, color=255)
return img
def compute_midline(polygon, image_size):
# Convert polygon to binary image
binary_img = polygon_to_binary_image(polygon, image_size)
# crop all-zero rows of the binary_img
binary_img = binary_img[~np.all(binary_img == 0, axis=1)]
# Apply skeletonization
skeleton = skeletonize(binary_img, method='lee')
return skeleton, binary_img
def pointcloud_to_profile(points, res):
......
......@@ -8,7 +8,7 @@ def test_symmetric_airfoil_nostagger():
points = pv.PolyData(np.stack([xs, ys, np.zeros(len(xs))]).T)
blade = Blade2D(points)
blade.compute_all_frompoints()
bladepts = blade.sortedpointsrolled_pv.points
bladepts = blade.sortedpoints_pv.points
xs, ys = bladepts[:, 0], bladepts[:, 1]
ite = blade.ite
......@@ -31,7 +31,7 @@ def test_symmetric_airfoil_stagger():
blade = Blade2D(points)
blade.compute_all_frompoints()
bladepts = blade.sortedpointsrolled_pv.points
bladepts = blade.sortedpoints_pv.points
xs, ys = bladepts[:, 0], bladepts[:, 1]
ite = blade.ite
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment