mirror of
https://git.mirrors.martin98.com/https://github.com/slic3r/Slic3r.git
synced 2025-08-12 09:59:03 +08:00
thin wall : medial axis:
* corrections from review * more functions for a clearer code * now simplify the frontier with the anchor to avoid weird edge-cases. * new post-process: remove "curve edge" before merging * new post-process: cube corner: the small bit that go from the voronoi corner to the cube corner is now a "pulling string" that pull the voronoi corner a bit to make a nicer cube. * _variable_width : reduce the threshold for creating a new extrusion by half vs threshold to create segments (if not, it doesn't create enough)
This commit is contained in:
parent
75e79189a2
commit
619370950c
@ -136,6 +136,8 @@ add_library(libslic3r STATIC
|
||||
${LIBDIR}/libslic3r/libslic3r.h
|
||||
${LIBDIR}/libslic3r/Line.cpp
|
||||
${LIBDIR}/libslic3r/Line.hpp
|
||||
${LIBDIR}/libslic3r/MedialAxis.cpp
|
||||
${LIBDIR}/libslic3r/MedialAxis.hpp
|
||||
${LIBDIR}/libslic3r/Model.cpp
|
||||
${LIBDIR}/libslic3r/Model.hpp
|
||||
${LIBDIR}/libslic3r/ModelArrange.hpp
|
||||
@ -506,7 +508,7 @@ if (WIN32)
|
||||
endif ()
|
||||
|
||||
# SLIC3R_MSVC_PDB
|
||||
if (MSVC AND SLIC3R_MSVC_PDB AND ${CMAKE_BUILD_TYPE} STREQUAL "Release")
|
||||
if (MSVC AND SLIC3R_MSVC_PDB AND "${CMAKE_BUILD_TYPE}" STREQUAL "Release")
|
||||
set_target_properties(XS PROPERTIES
|
||||
COMPILE_FLAGS "/Zi"
|
||||
LINK_FLAGS "/DEBUG /OPT:REF /OPT:ICF"
|
||||
|
@ -1,5 +1,6 @@
|
||||
#include "BoundingBox.hpp"
|
||||
#include "ExPolygon.hpp"
|
||||
#include "MedialAxis.hpp"
|
||||
#include "Geometry.hpp"
|
||||
#include "Polygon.hpp"
|
||||
#include "Line.hpp"
|
||||
@ -168,13 +169,15 @@ ExPolygon::overlaps(const ExPolygon &other) const
|
||||
return ! other.contour.points.empty() && this->contains_b(other.contour.points.front());
|
||||
}
|
||||
|
||||
void ExPolygon::simplify_p(double tolerance, Polygons* polygons) const
|
||||
void
|
||||
ExPolygon::simplify_p(double tolerance, Polygons* polygons) const
|
||||
{
|
||||
Polygons pp = this->simplify_p(tolerance);
|
||||
polygons->insert(polygons->end(), pp.begin(), pp.end());
|
||||
}
|
||||
|
||||
Polygons ExPolygon::simplify_p(double tolerance) const
|
||||
Polygons
|
||||
ExPolygon::simplify_p(double tolerance) const
|
||||
{
|
||||
Polygons pp;
|
||||
pp.reserve(this->holes.size() + 1);
|
||||
@ -196,663 +199,50 @@ Polygons ExPolygon::simplify_p(double tolerance) const
|
||||
return simplify_polygons(pp);
|
||||
}
|
||||
|
||||
ExPolygons ExPolygon::simplify(double tolerance) const
|
||||
ExPolygons
|
||||
ExPolygon::simplify(double tolerance) const
|
||||
{
|
||||
return union_ex(this->simplify_p(tolerance));
|
||||
}
|
||||
|
||||
void ExPolygon::simplify(double tolerance, ExPolygons* expolygons) const
|
||||
void
|
||||
ExPolygon::simplify(double tolerance, ExPolygons* expolygons) const
|
||||
{
|
||||
append(*expolygons, this->simplify(tolerance));
|
||||
}
|
||||
|
||||
/// remove point that are at SCALED_EPSILON * 2 distance.
|
||||
void remove_point_too_near(ThickPolyline* to_reduce) {
|
||||
const int32_t smallest = SCALED_EPSILON * 2;
|
||||
uint32_t id = 1;
|
||||
while (id < to_reduce->points.size() - 2) {
|
||||
uint32_t newdist = min(to_reduce->points[id].distance_to(to_reduce->points[id - 1])
|
||||
, to_reduce->points[id].distance_to(to_reduce->points[id + 1]));
|
||||
if (newdist < smallest) {
|
||||
to_reduce->points.erase(to_reduce->points.begin() + id);
|
||||
to_reduce->width.erase(to_reduce->width.begin() + id);
|
||||
} else {
|
||||
++id;
|
||||
}
|
||||
//simplier than simplify
|
||||
void
|
||||
ExPolygon::remove_point_too_near(const coord_t tolerance) {
|
||||
size_t id = 1;
|
||||
while (id < this->contour.points.size() - 1) {
|
||||
size_t newdist = min(this->contour.points[id].distance_to(this->contour.points[id - 1])
|
||||
, this->contour.points[id].distance_to(this->contour.points[id + 1]));
|
||||
if (newdist < tolerance) {
|
||||
this->contour.points.erase(this->contour.points.begin() + id);
|
||||
newdist = this->contour.points[id].distance_to(this->contour.points[id - 1]);
|
||||
}
|
||||
}
|
||||
|
||||
/// add points from pattern to to_modify at the same % of the length
|
||||
/// so not add if an other point is present at the correct position
|
||||
void add_point_same_percent(ThickPolyline* pattern, ThickPolyline* to_modify) {
|
||||
const double to_modify_length = to_modify->length();
|
||||
const double percent_epsilon = SCALED_EPSILON / to_modify_length;
|
||||
const double pattern_length = pattern->length();
|
||||
|
||||
double percent_length = 0;
|
||||
for (uint32_t idx_point = 1; idx_point < pattern->points.size() - 1; ++idx_point) {
|
||||
percent_length += pattern->points[idx_point-1].distance_to(pattern->points[idx_point]) / pattern_length;
|
||||
//find position
|
||||
uint32_t idx_other = 1;
|
||||
double percent_length_other_before = 0;
|
||||
double percent_length_other = 0;
|
||||
while (idx_other < to_modify->points.size()) {
|
||||
percent_length_other_before = percent_length_other;
|
||||
percent_length_other += to_modify->points[idx_other-1].distance_to(to_modify->points[idx_other])
|
||||
/ to_modify_length;
|
||||
if (percent_length_other > percent_length - percent_epsilon) {
|
||||
//if higher (we have gone over it)
|
||||
break;
|
||||
}
|
||||
++idx_other;
|
||||
}
|
||||
if (percent_length_other > percent_length + percent_epsilon) {
|
||||
//insert a new point before the position
|
||||
double percent_dist = (percent_length - percent_length_other_before) / (percent_length_other - percent_length_other_before);
|
||||
coordf_t new_width = to_modify->width[idx_other - 1] * (1 - percent_dist);
|
||||
new_width += to_modify->width[idx_other] * (percent_dist);
|
||||
Point new_point;
|
||||
new_point.x = (coord_t)((double)(to_modify->points[idx_other - 1].x) * (1 - percent_dist));
|
||||
new_point.x += (coord_t)((double)(to_modify->points[idx_other].x) * (percent_dist));
|
||||
new_point.y = (coord_t)((double)(to_modify->points[idx_other - 1].y) * (1 - percent_dist));
|
||||
new_point.y += (coord_t)((double)(to_modify->points[idx_other].y) * (percent_dist));
|
||||
to_modify->width.insert(to_modify->width.begin() + idx_other, new_width);
|
||||
to_modify->points.insert(to_modify->points.begin() + idx_other, new_point);
|
||||
//go to next one
|
||||
//if you removed a point, it check if the next one isn't too near from the previous one.
|
||||
// if not, it byepass it.
|
||||
if (newdist > tolerance) {
|
||||
++id;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// find the nearest angle in the contour (or 2 nearest if it's difficult to choose)
|
||||
/// return 1 for an angle of 90° and 0 for an angle of 0° or 180°
|
||||
double get_coeff_from_angle_countour(Point &point, const ExPolygon &contour) {
|
||||
double nearestDist = point.distance_to(contour.contour.points.front());
|
||||
Point nearest = contour.contour.points.front();
|
||||
uint32_t id_nearest = 0;
|
||||
double nearDist = nearestDist;
|
||||
Point near = nearest;
|
||||
uint32_t id_near=0;
|
||||
for (uint32_t id_point = 1; id_point < contour.contour.points.size(); ++id_point) {
|
||||
if (nearestDist > point.distance_to(contour.contour.points[id_point])) {
|
||||
nearestDist = point.distance_to(contour.contour.points[id_point]);
|
||||
near = nearest;
|
||||
nearest = contour.contour.points[id_point];
|
||||
id_near = id_nearest;
|
||||
id_nearest = id_point;
|
||||
}
|
||||
if (this->contour.points.front().distance_to(this->contour.points.back()) < tolerance) {
|
||||
this->contour.points.erase(this->contour.points.end() -1);
|
||||
}
|
||||
double angle = 0;
|
||||
Point point_before = id_nearest == 0 ? contour.contour.points.back() : contour.contour.points[id_nearest - 1];
|
||||
Point point_after = id_nearest == contour.contour.points.size()-1 ? contour.contour.points.front() : contour.contour.points[id_nearest + 1];
|
||||
//compute angle
|
||||
angle = min(nearest.ccw_angle(point_before, point_after), nearest.ccw_angle(point_after, point_before));
|
||||
//compute the diff from 90°
|
||||
angle = abs(angle - PI / 2);
|
||||
if (near != nearest && max(nearestDist, nearDist) + SCALED_EPSILON < nearest.distance_to(near)) {
|
||||
//not only nearest
|
||||
Point point_before = id_near == 0 ? contour.contour.points.back() : contour.contour.points[id_near - 1];
|
||||
Point point_after = id_near == contour.contour.points.size() - 1 ? contour.contour.points.front() : contour.contour.points[id_near + 1];
|
||||
double angle2 = min(nearest.ccw_angle(point_before, point_after), nearest.ccw_angle(point_after, point_before));
|
||||
angle2 = abs(angle - PI / 2);
|
||||
angle = (angle + angle2) / 2;
|
||||
}
|
||||
|
||||
return 1-(angle/(PI/2));
|
||||
}
|
||||
|
||||
void
|
||||
ExPolygon::medial_axis(const ExPolygon &bounds, double max_width, double min_width, ThickPolylines* polylines, double height) const
|
||||
{
|
||||
// init helper object
|
||||
Slic3r::Geometry::MedialAxis ma(max_width, min_width, this);
|
||||
ma.lines = this->lines();
|
||||
|
||||
// compute the Voronoi diagram and extract medial axis polylines
|
||||
ThickPolylines pp;
|
||||
ma.build(&pp);
|
||||
|
||||
|
||||
//{
|
||||
// stringstream stri;
|
||||
// stri << "medial_axis" << id << ".svg";
|
||||
// SVG svg(stri.str());
|
||||
// svg.draw(bounds);
|
||||
// svg.draw(*this);
|
||||
// svg.draw(pp);
|
||||
// svg.Close();
|
||||
//}
|
||||
|
||||
/* Find the maximum width returned; we're going to use this for validating and
|
||||
filtering the output segments. */
|
||||
double max_w = 0;
|
||||
for (ThickPolylines::const_iterator it = pp.begin(); it != pp.end(); ++it)
|
||||
max_w = fmaxf(max_w, *std::max_element(it->width.begin(), it->width.end()));
|
||||
|
||||
concatThickPolylines(pp);
|
||||
//reoder pp by length (ascending) It's really important to do that to avoid building the line from the width insteand of the length
|
||||
std::sort(pp.begin(), pp.end(), [](const ThickPolyline & a, const ThickPolyline & b) { return a.length() < b.length(); });
|
||||
|
||||
// Aligned fusion: Fusion the bits at the end of lines by "increasing thickness"
|
||||
// For that, we have to find other lines,
|
||||
// and with a next point no more distant than the max width.
|
||||
// Then, we can merge the bit from the first point to the second by following the mean.
|
||||
//
|
||||
int id_f = 0;
|
||||
bool changes = true;
|
||||
|
||||
|
||||
while (changes) {
|
||||
changes = false;
|
||||
for (size_t i = 0; i < pp.size(); ++i) {
|
||||
ThickPolyline& polyline = pp[i];
|
||||
|
||||
//simple check to see if i can be fusionned
|
||||
if (!polyline.endpoints.first && !polyline.endpoints.second) continue;
|
||||
|
||||
|
||||
ThickPolyline* best_candidate = nullptr;
|
||||
float best_dot = -1;
|
||||
int best_idx = 0;
|
||||
double dot_poly_branch = 0;
|
||||
double dot_candidate_branch = 0;
|
||||
|
||||
// find another polyline starting here
|
||||
for (size_t j = i + 1; j < pp.size(); ++j) {
|
||||
ThickPolyline& other = pp[j];
|
||||
if (polyline.last_point().coincides_with(other.last_point())) {
|
||||
polyline.reverse();
|
||||
other.reverse();
|
||||
} else if (polyline.first_point().coincides_with(other.last_point())) {
|
||||
other.reverse();
|
||||
} else if (polyline.first_point().coincides_with(other.first_point())) {
|
||||
} else if (polyline.last_point().coincides_with(other.first_point())) {
|
||||
polyline.reverse();
|
||||
} else {
|
||||
continue;
|
||||
}
|
||||
//std::cout << " try : " << i << ":" << j << " : " <<
|
||||
// (polyline.points.size() < 2 && other.points.size() < 2) <<
|
||||
// (!polyline.endpoints.second || !other.endpoints.second) <<
|
||||
// ((polyline.points.back().distance_to(other.points.back())
|
||||
// + (polyline.width.back() + other.width.back()) / 4)
|
||||
// > max_width*1.05) <<
|
||||
// (abs(polyline.length() - other.length()) > max_width / 2) << "\n";
|
||||
|
||||
//// mergeable tests
|
||||
if (polyline.points.size() < 2 && other.points.size() < 2) continue;
|
||||
if (!polyline.endpoints.second || !other.endpoints.second) continue;
|
||||
// test if the new width will not be too big if a fusion occur
|
||||
//note that this isn't the real calcul. It's just to avoid merging lines too far apart.
|
||||
if (
|
||||
((polyline.points.back().distance_to(other.points.back())
|
||||
+ (polyline.width.back() + other.width.back()) / 4)
|
||||
> max_width*1.05))
|
||||
continue;
|
||||
// test if the lines are not too different in length.
|
||||
if (abs(polyline.length() - other.length()) > max_width / 2) continue;
|
||||
|
||||
|
||||
//test if we don't merge with something too different and without any relevance.
|
||||
double coeffSizePolyI = 1;
|
||||
if (polyline.width.back() == 0) {
|
||||
coeffSizePolyI = 0.1 + 0.9*get_coeff_from_angle_countour(polyline.points.back(), *this);
|
||||
}
|
||||
double coeffSizeOtherJ = 1;
|
||||
if (other.width.back() == 0) {
|
||||
coeffSizeOtherJ = 0.1+0.9*get_coeff_from_angle_countour(other.points.back(), *this);
|
||||
}
|
||||
if (abs(polyline.length()*coeffSizePolyI - other.length()*coeffSizeOtherJ) > max_width / 2) continue;
|
||||
|
||||
//compute angle to see if it's better than previous ones (straighter = better).
|
||||
Pointf v_poly(polyline.lines().front().vector().x, polyline.lines().front().vector().y);
|
||||
v_poly.scale(1 / std::sqrt(v_poly.x*v_poly.x + v_poly.y*v_poly.y));
|
||||
Pointf v_other(other.lines().front().vector().x, other.lines().front().vector().y);
|
||||
v_other.scale(1 / std::sqrt(v_other.x*v_other.x + v_other.y*v_other.y));
|
||||
float other_dot = v_poly.x*v_other.x + v_poly.y*v_other.y;
|
||||
|
||||
// Get the branch/line in wich we may merge, if possible
|
||||
// with that, we can decide what is important, and how we can merge that.
|
||||
// angle_poly - angle_candi =90° => one is useless
|
||||
// both angle are equal => both are useful with same strength
|
||||
// ex: Y => | both are useful to crete a nice line
|
||||
// ex2: TTTTT => ----- these 90° useless lines should be discarded
|
||||
bool find_main_branch = false;
|
||||
int biggest_main_branch_id = 0;
|
||||
int biggest_main_branch_length = 0;
|
||||
for (size_t k = 0; k < pp.size(); ++k) {
|
||||
//std::cout << "try to find main : " << k << " ? " << i << " " << j << " ";
|
||||
if (k == i | k == j) continue;
|
||||
ThickPolyline& main = pp[k];
|
||||
if (polyline.first_point().coincides_with(main.last_point())) {
|
||||
main.reverse();
|
||||
if (!main.endpoints.second)
|
||||
find_main_branch = true;
|
||||
else if (biggest_main_branch_length < main.length()) {
|
||||
biggest_main_branch_id = k;
|
||||
biggest_main_branch_length = main.length();
|
||||
}
|
||||
} else if (polyline.first_point().coincides_with(main.first_point())) {
|
||||
if (!main.endpoints.second)
|
||||
find_main_branch = true;
|
||||
else if (biggest_main_branch_length < main.length()) {
|
||||
biggest_main_branch_id = k;
|
||||
biggest_main_branch_length = main.length();
|
||||
}
|
||||
}
|
||||
if (find_main_branch) {
|
||||
//use this variable to store the good index and break to compute it
|
||||
biggest_main_branch_id = k;
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (!find_main_branch && biggest_main_branch_length == 0) {
|
||||
// nothing -> it's impossible!
|
||||
dot_poly_branch = 0.707;
|
||||
dot_candidate_branch = 0.707;
|
||||
//std::cout << "no main branch... impossible!!\n";
|
||||
} else if (!find_main_branch &&
|
||||
(pp[biggest_main_branch_id].length() < polyline.length() || pp[biggest_main_branch_id].length() < other.length()) ){
|
||||
//the main branch should have no endpoint or be bigger!
|
||||
//here, it have an endpoint, and is not the biggest -> bad!
|
||||
continue;
|
||||
} else {
|
||||
//compute the dot (biggest_main_branch_id)
|
||||
Pointf v_poly(polyline.lines().front().vector().x, polyline.lines().front().vector().y);
|
||||
v_poly.scale(1 / std::sqrt(v_poly.x*v_poly.x + v_poly.y*v_poly.y));
|
||||
Pointf v_candid(other.lines().front().vector().x, other.lines().front().vector().y);
|
||||
v_candid.scale(1 / std::sqrt(v_candid.x*v_candid.x + v_candid.y*v_candid.y));
|
||||
Pointf v_branch(-pp[biggest_main_branch_id].lines().front().vector().x, -pp[biggest_main_branch_id].lines().front().vector().y);
|
||||
v_branch.scale(1 / std::sqrt(v_branch.x*v_branch.x + v_branch.y*v_branch.y));
|
||||
dot_poly_branch = v_poly.x*v_branch.x + v_poly.y*v_branch.y;
|
||||
dot_candidate_branch = v_candid.x*v_branch.x + v_candid.y*v_branch.y;
|
||||
if (dot_poly_branch < 0) dot_poly_branch = 0;
|
||||
if (dot_candidate_branch < 0) dot_candidate_branch = 0;
|
||||
}
|
||||
//test if it's useful to merge or not
|
||||
//ie, don't merge 'T' but ok for 'Y', merge only lines of not disproportionate different length (ratio max: 4)
|
||||
if (dot_poly_branch < 0.1 || dot_candidate_branch < 0.1 ||
|
||||
(polyline.length()>other.length() ? polyline.length() / other.length() : other.length() / polyline.length()) > 4) {
|
||||
continue;
|
||||
}
|
||||
if (other_dot > best_dot) {
|
||||
best_candidate = &other;
|
||||
best_idx = j;
|
||||
best_dot = other_dot;
|
||||
}
|
||||
}
|
||||
if (best_candidate != nullptr) {
|
||||
// delete very near points
|
||||
remove_point_too_near(&polyline);
|
||||
remove_point_too_near(best_candidate);
|
||||
|
||||
// add point at the same pos than the other line to have a nicer fusion
|
||||
add_point_same_percent(&polyline, best_candidate);
|
||||
add_point_same_percent(best_candidate, &polyline);
|
||||
|
||||
//get the angle of the nearest points of the contour to see : _| (good) \_ (average) __(bad)
|
||||
//sqrt because the result are nicer this way: don't over-penalize /_ angles
|
||||
//TODO: try if we can achieve a better result if we use a different algo if the angle is <90°
|
||||
const double coeff_angle_poly = (get_coeff_from_angle_countour(polyline.points.back(), *this));
|
||||
const double coeff_angle_candi = (get_coeff_from_angle_countour(best_candidate->points.back(), *this));
|
||||
|
||||
//this will encourage to follow the curve, a little, because it's shorter near the center
|
||||
//without that, it tends to go to the outter rim.
|
||||
double weight_poly = 2 - polyline.length() / max(polyline.length(), best_candidate->length());
|
||||
double weight_candi = 2 - best_candidate->length() / max(polyline.length(), best_candidate->length());
|
||||
weight_poly *= coeff_angle_poly;
|
||||
weight_candi *= coeff_angle_candi;
|
||||
const double coeff_poly = (dot_poly_branch * weight_poly) / (dot_poly_branch * weight_poly + dot_candidate_branch * weight_candi);
|
||||
const double coeff_candi = 1.0 - coeff_poly;
|
||||
//iterate the points
|
||||
// as voronoi should create symetric thing, we can iterate synchonously
|
||||
unsigned int idx_point = 1;
|
||||
while (idx_point < min(polyline.points.size(), best_candidate->points.size())) {
|
||||
//fusion
|
||||
polyline.points[idx_point].x = polyline.points[idx_point].x * coeff_poly + best_candidate->points[idx_point].x * coeff_candi;
|
||||
polyline.points[idx_point].y = polyline.points[idx_point].y * coeff_poly + best_candidate->points[idx_point].y * coeff_candi;
|
||||
|
||||
// The width decrease with distance from the centerline.
|
||||
// This formula is what works the best, even if it's not perfect (created empirically). 0->3% error on a gap fill on some tests.
|
||||
//If someone find an other formula based on the properties of the voronoi algorithm used here, and it works better, please use it.
|
||||
//or maybe just use the distance to nearest edge in bounds...
|
||||
double value_from_current_width = 0.5*polyline.width[idx_point] * dot_poly_branch / max(dot_poly_branch, dot_candidate_branch);
|
||||
value_from_current_width += 0.5*best_candidate->width[idx_point] * dot_candidate_branch / max(dot_poly_branch, dot_candidate_branch);
|
||||
double value_from_dist = 2 * polyline.points[idx_point].distance_to(best_candidate->points[idx_point]);
|
||||
value_from_dist *= sqrt(min(dot_poly_branch, dot_candidate_branch) / max(dot_poly_branch, dot_candidate_branch));
|
||||
polyline.width[idx_point] = value_from_current_width + value_from_dist;
|
||||
//failsafe
|
||||
if (polyline.width[idx_point] > max_width) polyline.width[idx_point] = max_width;
|
||||
|
||||
++idx_point;
|
||||
}
|
||||
if (idx_point < best_candidate->points.size()) {
|
||||
if (idx_point + 1 < best_candidate->points.size()) {
|
||||
//create a new polyline
|
||||
pp.emplace_back();
|
||||
pp.back().endpoints.first = true;
|
||||
pp.back().endpoints.second = best_candidate->endpoints.second;
|
||||
for (int idx_point_new_line = idx_point; idx_point_new_line < best_candidate->points.size(); ++idx_point_new_line) {
|
||||
pp.back().points.push_back(best_candidate->points[idx_point_new_line]);
|
||||
pp.back().width.push_back(best_candidate->width[idx_point_new_line]);
|
||||
}
|
||||
} else {
|
||||
//Add last point
|
||||
polyline.points.push_back(best_candidate->points[idx_point]);
|
||||
polyline.width.push_back(best_candidate->width[idx_point]);
|
||||
//select if an end opccur
|
||||
polyline.endpoints.second &= best_candidate->endpoints.second;
|
||||
}
|
||||
|
||||
} else {
|
||||
//select if an end opccur
|
||||
polyline.endpoints.second &= best_candidate->endpoints.second;
|
||||
}
|
||||
|
||||
//remove points that are the same or too close each other, ie simplify
|
||||
for (unsigned int idx_point = 1; idx_point < polyline.points.size(); ++idx_point) {
|
||||
if (polyline.points[idx_point - 1].distance_to(polyline.points[idx_point]) < SCALED_EPSILON) {
|
||||
if (idx_point < polyline.points.size() -1) {
|
||||
polyline.points.erase(polyline.points.begin() + idx_point);
|
||||
polyline.width.erase(polyline.width.begin() + idx_point);
|
||||
} else {
|
||||
polyline.points.erase(polyline.points.begin() + idx_point - 1);
|
||||
polyline.width.erase(polyline.width.begin() + idx_point - 1);
|
||||
}
|
||||
--idx_point;
|
||||
}
|
||||
}
|
||||
//remove points that are outside of the geometry
|
||||
for (unsigned int idx_point = 0; idx_point < polyline.points.size(); ++idx_point) {
|
||||
if (!bounds.contains_b(polyline.points[idx_point])) {
|
||||
polyline.points.erase(polyline.points.begin() + idx_point);
|
||||
polyline.width.erase(polyline.width.begin() + idx_point);
|
||||
--idx_point;
|
||||
}
|
||||
}
|
||||
if (polyline.points.size() < 2) {
|
||||
//remove self
|
||||
pp.erase(pp.begin() + i);
|
||||
--i;
|
||||
--best_idx;
|
||||
}
|
||||
|
||||
pp.erase(pp.begin() + best_idx);
|
||||
changes = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (changes) {
|
||||
concatThickPolylines(pp);
|
||||
///reorder, in case of change
|
||||
std::sort(pp.begin(), pp.end(), [](const ThickPolyline & a, const ThickPolyline & b) { return a.length() < b.length(); });
|
||||
}
|
||||
}
|
||||
|
||||
// remove too small extrusion at start & end of polylines
|
||||
changes = false;
|
||||
for (size_t i = 0; i < pp.size(); ++i) {
|
||||
ThickPolyline& polyline = pp[i];
|
||||
// remove bits with too small extrusion
|
||||
while (polyline.points.size() > 1 && polyline.width.front() < min_width && polyline.endpoints.first) {
|
||||
//try to split if possible
|
||||
if (polyline.width[1] > min_width) {
|
||||
double percent_can_keep = (min_width - polyline.width[0]) / (polyline.width[1] - polyline.width[0]);
|
||||
if (polyline.points.front().distance_to(polyline.points[1]) * percent_can_keep > max_width / 2
|
||||
&& polyline.points.front().distance_to(polyline.points[1])* (1 - percent_can_keep) > max_width / 2) {
|
||||
//Can split => move the first point and assign a new weight.
|
||||
//the update of endpoints wil be performed in concatThickPolylines
|
||||
polyline.points.front().x = polyline.points.front().x +
|
||||
(coord_t)((polyline.points[1].x - polyline.points.front().x) * percent_can_keep);
|
||||
polyline.points.front().y = polyline.points.front().y +
|
||||
(coord_t)((polyline.points[1].y - polyline.points.front().y) * percent_can_keep);
|
||||
polyline.width.front() = min_width;
|
||||
changes = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
polyline.points.erase(polyline.points.begin());
|
||||
polyline.width.erase(polyline.width.begin());
|
||||
changes = true;
|
||||
}
|
||||
while (polyline.points.size() > 1 && polyline.width.back() < min_width && polyline.endpoints.second) {
|
||||
//try to split if possible
|
||||
if (polyline.width[polyline.points.size()-2] > min_width) {
|
||||
double percent_can_keep = (min_width - polyline.width.back()) / (polyline.width[polyline.points.size() - 2] - polyline.width.back());
|
||||
if (polyline.points.back().distance_to(polyline.points[polyline.points.size() - 2]) * percent_can_keep > max_width / 2
|
||||
&& polyline.points.back().distance_to(polyline.points[polyline.points.size() - 2]) * (1-percent_can_keep) > max_width / 2) {
|
||||
//Can split => move the first point and assign a new weight.
|
||||
//the update of endpoints wil be performed in concatThickPolylines
|
||||
polyline.points.back().x = polyline.points.back().x +
|
||||
(coord_t)((polyline.points[polyline.points.size() - 2].x - polyline.points.back().x) * percent_can_keep);
|
||||
polyline.points.back().y = polyline.points.back().y +
|
||||
(coord_t)((polyline.points[polyline.points.size() - 2].y - polyline.points.back().y) * percent_can_keep);
|
||||
polyline.width.back() = min_width;
|
||||
changes = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
polyline.points.erase(polyline.points.end()-1);
|
||||
polyline.width.erase(polyline.width.end() - 1);
|
||||
changes = true;
|
||||
}
|
||||
if (polyline.points.size() < 2) {
|
||||
//remove self if too small
|
||||
pp.erase(pp.begin() + i);
|
||||
--i;
|
||||
}
|
||||
}
|
||||
if (changes) concatThickPolylines(pp);
|
||||
|
||||
// Loop through all returned polylines in order to extend their endpoints to the
|
||||
// expolygon boundaries
|
||||
for (size_t i = 0; i < pp.size(); ++i) {
|
||||
ThickPolyline& polyline = pp[i];
|
||||
|
||||
// extend initial and final segments of each polyline if they're actual endpoints
|
||||
// We assign new endpoints to temporary variables because in case of a single-line
|
||||
// polyline, after we extend the start point it will be caught by the intersection()
|
||||
// call, so we keep the inner point until we perform the second intersection() as well
|
||||
Point new_front = polyline.points.front();
|
||||
Point new_back = polyline.points.back();
|
||||
if (polyline.endpoints.first && !bounds.has_boundary_point(new_front)) {
|
||||
Line line(polyline.points[1], polyline.points.front());
|
||||
|
||||
// prevent the line from touching on the other side, otherwise intersection() might return that solution
|
||||
if (polyline.points.size() == 2) line.a = line.midpoint();
|
||||
|
||||
line.extend_end(max_width);
|
||||
(void)bounds.contour.first_intersection(line, &new_front);
|
||||
}
|
||||
if (polyline.endpoints.second && !bounds.has_boundary_point(new_back)) {
|
||||
Line line(
|
||||
*(polyline.points.end() - 2),
|
||||
polyline.points.back()
|
||||
);
|
||||
|
||||
// prevent the line from touching on the other side, otherwise intersection() might return that solution
|
||||
if (polyline.points.size() == 2) line.a = line.midpoint();
|
||||
line.extend_end(max_width);
|
||||
|
||||
(void)bounds.contour.first_intersection(line, &new_back);
|
||||
}
|
||||
polyline.points.front() = new_front;
|
||||
polyline.points.back() = new_back;
|
||||
|
||||
}
|
||||
|
||||
|
||||
// concatenate, but even where multiple thickpolyline join, to create nice long strait polylines
|
||||
/* If we removed any short polylines we now try to connect consecutive polylines
|
||||
in order to allow loop detection. Note that this algorithm is greedier than
|
||||
MedialAxis::process_edge_neighbors() as it will connect random pairs of
|
||||
polylines even when more than two start from the same point. This has no
|
||||
drawbacks since we optimize later using nearest-neighbor which would do the
|
||||
same, but should we use a more sophisticated optimization algorithm we should
|
||||
not connect polylines when more than two meet.
|
||||
Optimisation of the old algorithm : now we select the most "strait line" choice
|
||||
when we merge with an other line at a point with more than two meet.
|
||||
*/
|
||||
changes = false;
|
||||
for (size_t i = 0; i < pp.size(); ++i) {
|
||||
ThickPolyline& polyline = pp[i];
|
||||
if (polyline.endpoints.first && polyline.endpoints.second) continue; // optimization
|
||||
|
||||
ThickPolyline* best_candidate = nullptr;
|
||||
float best_dot = -1;
|
||||
int best_idx = 0;
|
||||
|
||||
// find another polyline starting here
|
||||
for (size_t j = i + 1; j < pp.size(); ++j) {
|
||||
ThickPolyline& other = pp[j];
|
||||
if (polyline.last_point().coincides_with(other.last_point())) {
|
||||
other.reverse();
|
||||
} else if (polyline.first_point().coincides_with(other.last_point())) {
|
||||
polyline.reverse();
|
||||
other.reverse();
|
||||
} else if (polyline.first_point().coincides_with(other.first_point())) {
|
||||
polyline.reverse();
|
||||
} else if (!polyline.last_point().coincides_with(other.first_point())) {
|
||||
continue;
|
||||
}
|
||||
|
||||
Pointf v_poly(polyline.lines().back().vector().x, polyline.lines().back().vector().y);
|
||||
v_poly.scale(1 / std::sqrt(v_poly.x*v_poly.x + v_poly.y*v_poly.y));
|
||||
Pointf v_other(other.lines().front().vector().x, other.lines().front().vector().y);
|
||||
v_other.scale(1 / std::sqrt(v_other.x*v_other.x + v_other.y*v_other.y));
|
||||
float other_dot = v_poly.x*v_other.x + v_poly.y*v_other.y;
|
||||
if (other_dot > best_dot) {
|
||||
best_candidate = &other;
|
||||
best_idx = j;
|
||||
best_dot = other_dot;
|
||||
}
|
||||
}
|
||||
if (best_candidate != nullptr) {
|
||||
|
||||
polyline.points.insert(polyline.points.end(), best_candidate->points.begin() + 1, best_candidate->points.end());
|
||||
polyline.width.insert(polyline.width.end(), best_candidate->width.begin() + 1, best_candidate->width.end());
|
||||
polyline.endpoints.second = best_candidate->endpoints.second;
|
||||
assert(polyline.width.size() == polyline.points.size());
|
||||
changes = true;
|
||||
pp.erase(pp.begin() + best_idx);
|
||||
}
|
||||
}
|
||||
if (changes) concatThickPolylines(pp);
|
||||
|
||||
//remove too thin polylines points (inside a polyline : split it)
|
||||
for (size_t i = 0; i < pp.size(); ++i) {
|
||||
ThickPolyline& polyline = pp[i];
|
||||
|
||||
// remove bits with too small extrusion
|
||||
size_t idx_point = 0;
|
||||
while (idx_point<polyline.points.size()) {
|
||||
if (polyline.width[idx_point] < min_width) {
|
||||
if (idx_point == 0) {
|
||||
//too thin at start
|
||||
polyline.points.erase(polyline.points.begin());
|
||||
polyline.width.erase(polyline.width.begin());
|
||||
idx_point = 0;
|
||||
} else if (idx_point == 1) {
|
||||
//too thin at start
|
||||
polyline.points.erase(polyline.points.begin());
|
||||
polyline.width.erase(polyline.width.begin());
|
||||
polyline.points.erase(polyline.points.begin());
|
||||
polyline.width.erase(polyline.width.begin());
|
||||
idx_point = 0;
|
||||
} else if (idx_point == polyline.points.size() - 2) {
|
||||
//too thin at (near) end
|
||||
polyline.points.erase(polyline.points.end() - 1);
|
||||
polyline.width.erase(polyline.width.end() - 1);
|
||||
polyline.points.erase(polyline.points.end() - 1);
|
||||
polyline.width.erase(polyline.width.end() - 1);
|
||||
} else if (idx_point == polyline.points.size() - 1) {
|
||||
//too thin at end
|
||||
polyline.points.erase(polyline.points.end() - 1);
|
||||
polyline.width.erase(polyline.width.end() - 1);
|
||||
} else {
|
||||
//too thin in middle : split
|
||||
pp.emplace_back();
|
||||
ThickPolyline &newone = pp.back();
|
||||
newone.points.insert(newone.points.begin(), polyline.points.begin() + idx_point + 1, polyline.points.end());
|
||||
newone.width.insert(newone.width.begin(), polyline.width.begin() + idx_point + 1, polyline.width.end());
|
||||
polyline.points.erase(polyline.points.begin() + idx_point, polyline.points.end());
|
||||
polyline.width.erase(polyline.width.begin() + idx_point, polyline.width.end());
|
||||
}
|
||||
} else idx_point++;
|
||||
|
||||
if (polyline.points.size() < 2) {
|
||||
//remove self if too small
|
||||
pp.erase(pp.begin() + i);
|
||||
--i;
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
//remove too short polyline
|
||||
changes = true;
|
||||
while (changes) {
|
||||
changes = false;
|
||||
|
||||
double shortest_size = max_w * 2;
|
||||
int32_t shortest_idx = -1;
|
||||
for (size_t i = 0; i < pp.size(); ++i) {
|
||||
ThickPolyline& polyline = pp[i];
|
||||
// Remove the shortest polylines : polyline that are shorter than wider
|
||||
// (we can't do this check before endpoints extension and clipping because we don't
|
||||
// know how long will the endpoints be extended since it depends on polygon thickness
|
||||
// which is variable - extension will be <= max_width/2 on each side)
|
||||
if ((polyline.endpoints.first || polyline.endpoints.second)
|
||||
&& polyline.length() < max_width / 2) {
|
||||
if (shortest_size > polyline.length()) {
|
||||
shortest_size = polyline.length();
|
||||
shortest_idx = i;
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
if (shortest_idx >= 0 && shortest_idx < pp.size()) {
|
||||
pp.erase(pp.begin() + shortest_idx);
|
||||
changes = true;
|
||||
}
|
||||
if (changes) concatThickPolylines(pp);
|
||||
}
|
||||
|
||||
//TODO: reduce the flow at the intersection ( + ) points ?
|
||||
|
||||
//ensure the volume extruded is correct for what we have been asked
|
||||
// => don't over-extrude
|
||||
double surface = 0;
|
||||
double volume = 0;
|
||||
for (ThickPolyline& polyline : pp) {
|
||||
for (ThickLine l : polyline.thicklines()) {
|
||||
surface += l.length() * (l.a_width + l.b_width) / 2;
|
||||
double width_mean = (l.a_width + l.b_width) / 2;
|
||||
volume += height * (width_mean - height * (1. - 0.25 * PI)) * l.length();
|
||||
}
|
||||
}
|
||||
|
||||
// compute bounds volume
|
||||
double boundsVolume = 0;
|
||||
boundsVolume += height*bounds.area();
|
||||
// add external "perimeter gap"
|
||||
double perimeterRoundGap = bounds.contour.length() * height * (1 - 0.25*PI) * 0.5;
|
||||
// add holes "perimeter gaps"
|
||||
double holesGaps = 0;
|
||||
for (auto hole = bounds.holes.begin(); hole != bounds.holes.end(); ++hole) {
|
||||
holesGaps += hole->length() * height * (1 - 0.25*PI) * 0.5;
|
||||
}
|
||||
boundsVolume += perimeterRoundGap + holesGaps;
|
||||
|
||||
if (boundsVolume < volume) {
|
||||
//reduce width
|
||||
double reduce_by = boundsVolume / volume;
|
||||
for (ThickPolyline& polyline : pp) {
|
||||
for (ThickLine l : polyline.thicklines()) {
|
||||
l.a_width *= reduce_by;
|
||||
l.b_width *= reduce_by;
|
||||
}
|
||||
}
|
||||
}
|
||||
polylines->insert(polylines->end(), pp.begin(), pp.end());
|
||||
ExPolygon::medial_axis(const ExPolygon &bounds, double max_width, double min_width, ThickPolylines* polylines, double height) const {
|
||||
ExPolygon simplifiedBounds = bounds;
|
||||
simplifiedBounds.remove_point_too_near(SCALED_RESOLUTION);
|
||||
ExPolygon simplifiedPolygon = *this;
|
||||
simplifiedPolygon.remove_point_too_near(SCALED_RESOLUTION);
|
||||
Slic3r::MedialAxis ma(simplifiedPolygon, simplifiedBounds, max_width, min_width, height);
|
||||
ma.build(polylines);
|
||||
}
|
||||
|
||||
void
|
||||
|
@ -53,6 +53,7 @@ public:
|
||||
Polygons simplify_p(double tolerance) const;
|
||||
ExPolygons simplify(double tolerance) const;
|
||||
void simplify(double tolerance, ExPolygons* expolygons) const;
|
||||
void remove_point_too_near(const coord_t tolerance);
|
||||
void medial_axis(const ExPolygon &bounds, double max_width, double min_width, ThickPolylines* polylines, double height) const;
|
||||
void medial_axis(double max_width, double min_width, Polylines* polylines) const;
|
||||
void get_trapezoids(Polygons* polygons) const;
|
||||
|
@ -850,277 +850,5 @@ private:
|
||||
const Lines &lines;
|
||||
};
|
||||
|
||||
void
|
||||
MedialAxis::build(ThickPolylines* polylines)
|
||||
{
|
||||
construct_voronoi(this->lines.begin(), this->lines.end(), &this->vd);
|
||||
|
||||
/*
|
||||
// DEBUG: dump all Voronoi edges
|
||||
{
|
||||
for (VD::const_edge_iterator edge = this->vd.edges().begin(); edge != this->vd.edges().end(); ++edge) {
|
||||
if (edge->is_infinite()) continue;
|
||||
|
||||
ThickPolyline polyline;
|
||||
polyline.points.push_back(Point( edge->vertex0()->x(), edge->vertex0()->y() ));
|
||||
polyline.points.push_back(Point( edge->vertex1()->x(), edge->vertex1()->y() ));
|
||||
polylines->push_back(polyline);
|
||||
}
|
||||
return;
|
||||
}
|
||||
*/
|
||||
|
||||
typedef const VD::vertex_type vert_t;
|
||||
typedef const VD::edge_type edge_t;
|
||||
|
||||
// collect valid edges (i.e. prune those not belonging to MAT)
|
||||
// note: this keeps twins, so it inserts twice the number of the valid edges
|
||||
this->valid_edges.clear();
|
||||
{
|
||||
std::set<const VD::edge_type*> seen_edges;
|
||||
for (VD::const_edge_iterator edge = this->vd.edges().begin(); edge != this->vd.edges().end(); ++edge) {
|
||||
// if we only process segments representing closed loops, none if the
|
||||
// infinite edges (if any) would be part of our MAT anyway
|
||||
if (edge->is_secondary() || edge->is_infinite()) continue;
|
||||
|
||||
// don't re-validate twins
|
||||
if (seen_edges.find(&*edge) != seen_edges.end()) continue; // TODO: is this needed?
|
||||
seen_edges.insert(&*edge);
|
||||
seen_edges.insert(edge->twin());
|
||||
|
||||
if (!this->validate_edge(&*edge)) continue;
|
||||
this->valid_edges.insert(&*edge);
|
||||
this->valid_edges.insert(edge->twin());
|
||||
}
|
||||
}
|
||||
this->edges = this->valid_edges;
|
||||
|
||||
// iterate through the valid edges to build polylines
|
||||
while (!this->edges.empty()) {
|
||||
const edge_t* edge = *this->edges.begin();
|
||||
|
||||
// start a polyline
|
||||
ThickPolyline polyline;
|
||||
polyline.points.push_back(Point( edge->vertex0()->x(), edge->vertex0()->y() ));
|
||||
polyline.points.push_back(Point( edge->vertex1()->x(), edge->vertex1()->y() ));
|
||||
polyline.width.push_back(this->thickness[edge].first);
|
||||
polyline.width.push_back(this->thickness[edge].second);
|
||||
|
||||
// remove this edge and its twin from the available edges
|
||||
(void)this->edges.erase(edge);
|
||||
(void)this->edges.erase(edge->twin());
|
||||
|
||||
// get next points
|
||||
this->process_edge_neighbors(edge, &polyline);
|
||||
|
||||
// get previous points
|
||||
{
|
||||
ThickPolyline rpolyline;
|
||||
this->process_edge_neighbors(edge->twin(), &rpolyline);
|
||||
polyline.points.insert(polyline.points.begin(), rpolyline.points.rbegin(), rpolyline.points.rend());
|
||||
polyline.width.insert(polyline.width.begin(), rpolyline.width.rbegin(), rpolyline.width.rend());
|
||||
polyline.endpoints.first = rpolyline.endpoints.second;
|
||||
}
|
||||
|
||||
assert(polyline.width.size() == polyline.points.size());
|
||||
|
||||
// prevent loop endpoints from being extended
|
||||
if (polyline.first_point().coincides_with(polyline.last_point())) {
|
||||
polyline.endpoints.first = false;
|
||||
polyline.endpoints.second = false;
|
||||
}
|
||||
|
||||
// append polyline to result
|
||||
polylines->push_back(polyline);
|
||||
}
|
||||
|
||||
#ifdef SLIC3R_DEBUG
|
||||
{
|
||||
static int iRun = 0;
|
||||
dump_voronoi_to_svg(this->lines, this->vd, polylines, debug_out_path("MedialAxis-%d.svg", iRun ++).c_str());
|
||||
printf("Thick lines: ");
|
||||
for (ThickPolylines::const_iterator it = polylines->begin(); it != polylines->end(); ++ it) {
|
||||
ThickLines lines = it->thicklines();
|
||||
for (ThickLines::const_iterator it2 = lines.begin(); it2 != lines.end(); ++ it2) {
|
||||
printf("%f,%f ", it2->a_width, it2->b_width);
|
||||
}
|
||||
}
|
||||
printf("\n");
|
||||
}
|
||||
#endif /* SLIC3R_DEBUG */
|
||||
}
|
||||
|
||||
void
|
||||
MedialAxis::build(Polylines* polylines)
|
||||
{
|
||||
ThickPolylines tp;
|
||||
this->build(&tp);
|
||||
polylines->insert(polylines->end(), tp.begin(), tp.end());
|
||||
}
|
||||
|
||||
void
|
||||
MedialAxis::process_edge_neighbors(const VD::edge_type* edge, ThickPolyline* polyline)
|
||||
{
|
||||
while (true) {
|
||||
// Since rot_next() works on the edge starting point but we want
|
||||
// to find neighbors on the ending point, we just swap edge with
|
||||
// its twin.
|
||||
const VD::edge_type* twin = edge->twin();
|
||||
|
||||
// count neighbors for this edge
|
||||
std::vector<const VD::edge_type*> neighbors;
|
||||
for (const VD::edge_type* neighbor = twin->rot_next(); neighbor != twin;
|
||||
neighbor = neighbor->rot_next()) {
|
||||
if (this->valid_edges.count(neighbor) > 0) neighbors.push_back(neighbor);
|
||||
}
|
||||
|
||||
// if we have a single neighbor then we can continue recursively
|
||||
if (neighbors.size() == 1) {
|
||||
const VD::edge_type* neighbor = neighbors.front();
|
||||
|
||||
// break if this is a closed loop
|
||||
if (this->edges.count(neighbor) == 0) return;
|
||||
|
||||
Point new_point(neighbor->vertex1()->x(), neighbor->vertex1()->y());
|
||||
polyline->points.push_back(new_point);
|
||||
polyline->width.push_back(this->thickness[neighbor].second);
|
||||
|
||||
(void)this->edges.erase(neighbor);
|
||||
(void)this->edges.erase(neighbor->twin());
|
||||
edge = neighbor;
|
||||
} else if (neighbors.size() == 0) {
|
||||
polyline->endpoints.second = true;
|
||||
return;
|
||||
} else {
|
||||
// T-shaped or star-shaped joint
|
||||
return;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
bool
|
||||
MedialAxis::validate_edge(const VD::edge_type* edge)
|
||||
{
|
||||
// prevent overflows and detect almost-infinite edges
|
||||
if (std::abs(edge->vertex0()->x()) > double(CLIPPER_MAX_COORD_UNSCALED) ||
|
||||
std::abs(edge->vertex0()->y()) > double(CLIPPER_MAX_COORD_UNSCALED) ||
|
||||
std::abs(edge->vertex1()->x()) > double(CLIPPER_MAX_COORD_UNSCALED) ||
|
||||
std::abs(edge->vertex1()->y()) > double(CLIPPER_MAX_COORD_UNSCALED))
|
||||
return false;
|
||||
|
||||
// construct the line representing this edge of the Voronoi diagram
|
||||
const Line line(
|
||||
Point( edge->vertex0()->x(), edge->vertex0()->y() ),
|
||||
Point( edge->vertex1()->x(), edge->vertex1()->y() )
|
||||
);
|
||||
|
||||
// discard edge if it lies outside the supplied shape
|
||||
// this could maybe be optimized (checking inclusion of the endpoints
|
||||
// might give false positives as they might belong to the contour itself)
|
||||
if (this->expolygon != NULL) {
|
||||
if (line.a.coincides_with(line.b)) {
|
||||
// in this case, contains(line) returns a false positive
|
||||
if (!this->expolygon->contains(line.a)) return false;
|
||||
} else {
|
||||
if (!this->expolygon->contains(line)) return false;
|
||||
}
|
||||
}
|
||||
|
||||
// retrieve the original line segments which generated the edge we're checking
|
||||
const VD::cell_type* cell_l = edge->cell();
|
||||
const VD::cell_type* cell_r = edge->twin()->cell();
|
||||
const Line &segment_l = this->retrieve_segment(cell_l);
|
||||
const Line &segment_r = this->retrieve_segment(cell_r);
|
||||
|
||||
/*
|
||||
SVG svg("edge.svg");
|
||||
svg.draw(*this->expolygon);
|
||||
svg.draw(line);
|
||||
svg.draw(segment_l, "red");
|
||||
svg.draw(segment_r, "blue");
|
||||
svg.Close();
|
||||
*/
|
||||
|
||||
/* Calculate thickness of the cross-section at both the endpoints of this edge.
|
||||
Our Voronoi edge is part of a CCW sequence going around its Voronoi cell
|
||||
located on the left side. (segment_l).
|
||||
This edge's twin goes around segment_r. Thus, segment_r is
|
||||
oriented in the same direction as our main edge, and segment_l is oriented
|
||||
in the same direction as our twin edge.
|
||||
We used to only consider the (half-)distances to segment_r, and that works
|
||||
whenever segment_l and segment_r are almost specular and facing. However,
|
||||
at curves they are staggered and they only face for a very little length
|
||||
(our very short edge represents such visibility).
|
||||
Both w0 and w1 can be calculated either towards cell_l or cell_r with equal
|
||||
results by Voronoi definition.
|
||||
When cell_l or cell_r don't refer to the segment but only to an endpoint, we
|
||||
calculate the distance to that endpoint instead. */
|
||||
|
||||
coordf_t w0 = cell_r->contains_segment()
|
||||
? line.a.distance_to(segment_r)*2
|
||||
: line.a.distance_to(this->retrieve_endpoint(cell_r))*2;
|
||||
|
||||
coordf_t w1 = cell_l->contains_segment()
|
||||
? line.b.distance_to(segment_l)*2
|
||||
: line.b.distance_to(this->retrieve_endpoint(cell_l))*2;
|
||||
|
||||
//don't remove the line that goes to the intersection of the contour
|
||||
// we use them to create nicer thin wall lines
|
||||
//if (cell_l->contains_segment() && cell_r->contains_segment()) {
|
||||
// // calculate the relative angle between the two boundary segments
|
||||
// double angle = fabs(segment_r.orientation() - segment_l.orientation());
|
||||
// if (angle > PI) angle = 2*PI - angle;
|
||||
// assert(angle >= 0 && angle <= PI);
|
||||
//
|
||||
// // fabs(angle) ranges from 0 (collinear, same direction) to PI (collinear, opposite direction)
|
||||
// // we're interested only in segments close to the second case (facing segments)
|
||||
// // so we allow some tolerance.
|
||||
// // this filter ensures that we're dealing with a narrow/oriented area (longer than thick)
|
||||
// // we don't run it on edges not generated by two segments (thus generated by one segment
|
||||
// // and the endpoint of another segment), since their orientation would not be meaningful
|
||||
// if (PI - angle > PI/8) {
|
||||
// // angle is not narrow enough
|
||||
//
|
||||
// // only apply this filter to segments that are not too short otherwise their
|
||||
// // angle could possibly be not meaningful
|
||||
// if (w0 < SCALED_EPSILON || w1 < SCALED_EPSILON || line.length() >= this->min_width)
|
||||
// return false;
|
||||
// }
|
||||
//} else {
|
||||
// if (w0 < SCALED_EPSILON || w1 < SCALED_EPSILON)
|
||||
// return false;
|
||||
//}
|
||||
|
||||
// don't do that before we try to fusion them
|
||||
//if (w0 < this->min_width && w1 < this->min_width)
|
||||
// return false;
|
||||
//
|
||||
|
||||
//shouldn't occur if perimeter_generator is well made
|
||||
if (w0 > this->max_width && w1 > this->max_width)
|
||||
return false;
|
||||
|
||||
this->thickness[edge] = std::make_pair(w0, w1);
|
||||
this->thickness[edge->twin()] = std::make_pair(w1, w0);
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
const Line&
|
||||
MedialAxis::retrieve_segment(const VD::cell_type* cell) const
|
||||
{
|
||||
return this->lines[cell->source_index()];
|
||||
}
|
||||
|
||||
const Point&
|
||||
MedialAxis::retrieve_endpoint(const VD::cell_type* cell) const
|
||||
{
|
||||
const Line& line = this->retrieve_segment(cell);
|
||||
if (cell->source_category() == SOURCE_CATEGORY_SEGMENT_START_POINT) {
|
||||
return line.a;
|
||||
} else {
|
||||
return line.b;
|
||||
}
|
||||
}
|
||||
|
||||
} }
|
||||
|
@ -3,13 +3,11 @@
|
||||
|
||||
#include "libslic3r.h"
|
||||
#include "BoundingBox.hpp"
|
||||
#include "MedialAxis.hpp"
|
||||
#include "ExPolygon.hpp"
|
||||
#include "Polygon.hpp"
|
||||
#include "Polyline.hpp"
|
||||
|
||||
#include "boost/polygon/voronoi.hpp"
|
||||
using boost::polygon::voronoi_builder;
|
||||
using boost::polygon::voronoi_diagram;
|
||||
|
||||
namespace Slic3r { namespace Geometry {
|
||||
|
||||
@ -129,34 +127,6 @@ bool arrange(
|
||||
// output
|
||||
Pointfs &positions);
|
||||
|
||||
class MedialAxis {
|
||||
public:
|
||||
Lines lines;
|
||||
const ExPolygon* expolygon;
|
||||
double max_width;
|
||||
double min_width;
|
||||
MedialAxis(double _max_width, double _min_width, const ExPolygon* _expolygon = NULL)
|
||||
: expolygon(_expolygon), max_width(_max_width), min_width(_min_width) {};
|
||||
void build(ThickPolylines* polylines);
|
||||
void build(Polylines* polylines);
|
||||
|
||||
private:
|
||||
class VD : public voronoi_diagram<double> {
|
||||
public:
|
||||
typedef double coord_type;
|
||||
typedef boost::polygon::point_data<coordinate_type> point_type;
|
||||
typedef boost::polygon::segment_data<coordinate_type> segment_type;
|
||||
typedef boost::polygon::rectangle_data<coordinate_type> rect_type;
|
||||
};
|
||||
VD vd;
|
||||
std::set<const VD::edge_type*> edges, valid_edges;
|
||||
std::map<const VD::edge_type*, std::pair<coordf_t,coordf_t> > thickness;
|
||||
void process_edge_neighbors(const VD::edge_type* edge, ThickPolyline* polyline);
|
||||
bool validate_edge(const VD::edge_type* edge);
|
||||
const Line& retrieve_segment(const VD::cell_type* cell) const;
|
||||
const Point& retrieve_endpoint(const VD::cell_type* cell) const;
|
||||
};
|
||||
|
||||
} }
|
||||
|
||||
#endif
|
||||
|
@ -241,7 +241,7 @@ void PerimeterGenerator::process()
|
||||
// (actually, something larger than that still may exist due to mitering or other causes)
|
||||
coord_t min_width = (coord_t)scale_(this->ext_perimeter_flow.nozzle_diameter / 3);
|
||||
|
||||
ExPolygons no_thin_zone = offset_ex(next_onion, (float)(ext_perimeter_width / 2));
|
||||
ExPolygons no_thin_zone = offset_ex(next_onion, (float)(ext_perimeter_width / 2), jtSquare);
|
||||
// medial axis requires non-overlapping geometry
|
||||
ExPolygons thin_zones = diff_ex(last, no_thin_zone, true);
|
||||
//don't use offset2_ex, because we don't want to merge the zones that have been separated.
|
||||
@ -253,7 +253,8 @@ void PerimeterGenerator::process()
|
||||
// compute a bit of overlap to anchor thin walls inside the print.
|
||||
for (ExPolygon &ex : expp) {
|
||||
//growing back the polygon
|
||||
//a vary little bit of overlap can be created here with other thin polygon, but it's more useful than worisome.
|
||||
//a very little bit of overlap can be created here with other thin polygons, but it's more useful than worisome.
|
||||
ex.remove_point_too_near(SCALED_RESOLUTION);
|
||||
ExPolygons ex_bigger = offset_ex(ex, (float)(min_width / 2));
|
||||
if (ex_bigger.size() != 1) continue; // impossible error, growing a single polygon can't create multiple or 0.
|
||||
ExPolygons anchor = intersection_ex(offset_ex(ex, (float)(min_width / 2) +
|
||||
@ -264,7 +265,7 @@ void PerimeterGenerator::process()
|
||||
//be sure it's not too small to extrude reliably
|
||||
if (ex_bigger[0].area() > min_width*(ext_perimeter_width + ext_perimeter_spacing2)) {
|
||||
// the maximum thickness of our thin wall area is equal to the minimum thickness of a single loop
|
||||
ex_bigger[0].medial_axis(bound, ext_perimeter_width + ext_perimeter_spacing2, min_width,
|
||||
ex_bigger[0].medial_axis(bound, ext_perimeter_width + ext_perimeter_spacing2, min_width,
|
||||
&thin_walls, this->layer_height);
|
||||
}
|
||||
break;
|
||||
@ -602,8 +603,10 @@ ExtrusionEntityCollection PerimeterGenerator::_variable_width(const ThickPolylin
|
||||
// of segments, and any pruning shall be performed before we apply this tolerance
|
||||
const double tolerance = scale_(0.05);
|
||||
|
||||
int id_line = 0;
|
||||
ExtrusionEntityCollection coll;
|
||||
for (const ThickPolyline &p : polylines) {
|
||||
id_line++;
|
||||
ExtrusionPaths paths;
|
||||
ExtrusionPath path(role);
|
||||
ThickLines lines = p.thicklines();
|
||||
@ -665,7 +668,7 @@ ExtrusionEntityCollection PerimeterGenerator::_variable_width(const ThickPolylin
|
||||
path.height = flow.height;
|
||||
} else {
|
||||
thickness_delta = fabs(scale_(flow.width) - w);
|
||||
if (thickness_delta <= tolerance) {
|
||||
if (thickness_delta <= tolerance/2) {
|
||||
// the width difference between this line and the current flow width is
|
||||
// within the accepted tolerance
|
||||
path.polyline.append(line.b);
|
||||
|
@ -301,10 +301,10 @@ void concatThickPolylines(ThickPolylines& pp) {
|
||||
for (size_t i = 0; i < pp.size(); ++i) {
|
||||
ThickPolyline *polyline = &pp[i];
|
||||
|
||||
int32_t id_candidate_first_point = -1;
|
||||
int32_t id_candidate_last_point = -1;
|
||||
int32_t nbCandidate_first_point = 0;
|
||||
int32_t nbCandidate_last_point = 0;
|
||||
size_t id_candidate_first_point = -1;
|
||||
size_t id_candidate_last_point = -1;
|
||||
size_t nbCandidate_first_point = 0;
|
||||
size_t nbCandidate_last_point = 0;
|
||||
// find another polyline starting here
|
||||
for (size_t j = 0; j < pp.size(); ++j) {
|
||||
if (j == i) continue;
|
||||
|
Loading…
x
Reference in New Issue
Block a user