mirror of
https://git.mirrors.martin98.com/https://github.com/bambulab/BambuStudio.git
synced 2025-08-08 16:59:05 +08:00
ENH: port of new infill algorithm from prusa
1. initial port of new ensure vertical thickness algorithm from prusa 2. initial port of new internal bridge algorithm from prusa 3. readd interface shell Based on prusa commit 11c0e567a68979e96085b3763a76464cb793ea12 and commit f8e7d1b01c114b4d45f9e221c6b5bb935065d650 Thanks prusa. original author: PavelMikus <pavel.mikus.mail@seznam.cz> Vojtech Bubnik <bubnikv@gmail.com> jira:NEW Signed-off-by: xun.zhang <xun.zhang@bambulab.com> Change-Id: I0d36be065a29cf87315918d720f726975a43ef9f
This commit is contained in:
parent
734a70b493
commit
3d75210ad1
@ -23,23 +23,11 @@ public:
|
||||
BoundingBoxBase(const PointClass &p1, const PointClass &p2, const PointClass &p3) :
|
||||
min(p1), max(p1), defined(false) { merge(p2); merge(p3); }
|
||||
|
||||
template<class It, class = IteratorOnly<It> >
|
||||
BoundingBoxBase(It from, It to) : min(PointClass::Zero()), max(PointClass::Zero())
|
||||
|
||||
template<class It, class = IteratorOnly<It>>
|
||||
BoundingBoxBase(It from, It to)
|
||||
{
|
||||
if (from == to) {
|
||||
this->defined = false;
|
||||
// throw Slic3r::InvalidArgument("Empty point set supplied to BoundingBoxBase constructor");
|
||||
} else {
|
||||
auto it = from;
|
||||
this->min = it->template cast<typename PointClass::Scalar>();
|
||||
this->max = this->min;
|
||||
for (++ it; it != to; ++ it) {
|
||||
auto vec = it->template cast<typename PointClass::Scalar>();
|
||||
this->min = this->min.cwiseMin(vec);
|
||||
this->max = this->max.cwiseMax(vec);
|
||||
}
|
||||
this->defined = (this->min(0) < this->max(0)) && (this->min(1) < this->max(1));
|
||||
}
|
||||
construct(*this, from, to);
|
||||
}
|
||||
|
||||
BoundingBoxBase(const std::vector<PointClass> &points)
|
||||
@ -88,6 +76,30 @@ public:
|
||||
os << "[" << bbox.max(0) - bbox.min(0) << " x " << bbox.max(1) - bbox.min(1) << "] from (" << bbox.min(0) << ", " << bbox.min(1) << ")";
|
||||
return os;
|
||||
}
|
||||
|
||||
private:
|
||||
// to access construct()
|
||||
friend BoundingBox get_extents<false>(const Points &pts);
|
||||
friend BoundingBox get_extents<true>(const Points &pts);
|
||||
|
||||
// if IncludeBoundary, then a bounding box is defined even for a single point.
|
||||
// otherwise a bounding box is only defined if it has a positive area.
|
||||
// The output bounding box is expected to be set to "undefined" initially.
|
||||
template<bool IncludeBoundary = false, class BoundingBoxType, class It, class = IteratorOnly<It>>
|
||||
static void construct(BoundingBoxType &out, It from, It to)
|
||||
{
|
||||
if (from != to) {
|
||||
auto it = from;
|
||||
out.min = it->template cast<typename PointClass::Scalar>();
|
||||
out.max = out.min;
|
||||
for (++ it; it != to; ++ it) {
|
||||
auto vec = it->template cast<typename PointClass::Scalar>();
|
||||
out.min = out.min.cwiseMin(vec);
|
||||
out.max = out.max.cwiseMax(vec);
|
||||
}
|
||||
out.defined = IncludeBoundary || (out.min.x() < out.max.x() && out.min.y() < out.max.y());
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
template <class PointClass>
|
||||
|
@ -71,6 +71,52 @@ private:
|
||||
ExPolygons _anchor_regions;
|
||||
};
|
||||
|
||||
//return ideal bridge direction and unsupported bridge endpoints distance.
|
||||
inline std::tuple<Vec2d, double> detect_bridging_direction(const Lines &floating_edges, const Polygons &overhang_area)
|
||||
{
|
||||
if (floating_edges.empty()) {
|
||||
// consider this area anchored from all sides, pick bridging direction that will likely yield shortest bridges
|
||||
auto [pc1, pc2] = compute_principal_components(overhang_area);
|
||||
if (pc2 == Vec2f::Zero()) { // overhang may be smaller than resolution. In this case, any direction is ok
|
||||
return {Vec2d{1.0,0.0}, 0.0};
|
||||
} else {
|
||||
return {pc2.normalized().cast<double>(), 0.0};
|
||||
}
|
||||
}
|
||||
|
||||
// Overhang is not fully surrounded by anchors, in that case, find such direction that will minimize the number of bridge ends/180turns in the air
|
||||
std::unordered_map<double, Vec2d> directions{};
|
||||
for (const Line &l : floating_edges) {
|
||||
Vec2d normal = l.normal().cast<double>().normalized();
|
||||
double quantized_angle = std::ceil(std::atan2(normal.y(),normal.x()) * 1000.0);
|
||||
directions.emplace(quantized_angle, normal);
|
||||
}
|
||||
std::vector<std::pair<Vec2d, double>> direction_costs{};
|
||||
// it is acutally cost of a perpendicular bridge direction - we find the minimal cost and then return the perpendicular dir
|
||||
for (const auto& d : directions) {
|
||||
direction_costs.emplace_back(d.second, 0.0);
|
||||
}
|
||||
|
||||
for (const Line &l : floating_edges) {
|
||||
Vec2d line = (l.b - l.a).cast<double>();
|
||||
for (auto &dir_cost : direction_costs) {
|
||||
// the dot product already contains the length of the line. dir_cost.first is normalized.
|
||||
dir_cost.second += std::abs(line.dot(dir_cost.first));
|
||||
}
|
||||
}
|
||||
|
||||
Vec2d result_dir = Vec2d::Ones();
|
||||
double min_cost = std::numeric_limits<double>::max();
|
||||
for (const auto &cost : direction_costs) {
|
||||
if (cost.second < min_cost) {
|
||||
// now flip the orientation back and return the direction of the bridge extrusions
|
||||
result_dir = Vec2d{cost.first.y(), -cost.first.x()};
|
||||
min_cost = cost.second;
|
||||
}
|
||||
}
|
||||
|
||||
return {result_dir, min_cost};
|
||||
};
|
||||
|
||||
//return ideal bridge direction and unsupported bridge endpoints distance.
|
||||
inline std::tuple<Vec2d, double> detect_bridging_direction(const Polygons &to_cover, const Polygons &anchors_area)
|
||||
|
@ -403,6 +403,9 @@ set(lisbslic3r_sources
|
||||
Arachne/WallToolPaths.cpp
|
||||
Shape/TextShape.hpp
|
||||
Shape/TextShape.cpp
|
||||
RegionExpansion.hpp
|
||||
RegionExpansion.cpp
|
||||
ClipperZUtils.hpp
|
||||
)
|
||||
|
||||
if (APPLE)
|
||||
|
@ -280,7 +280,7 @@ static ClipperLib::Paths raw_offset(PathsProvider &&paths, float offset, Clipper
|
||||
co.ArcTolerance = miterLimit;
|
||||
else
|
||||
co.MiterLimit = miterLimit;
|
||||
co.ShortestEdgeLength = double(std::abs(offset * CLIPPER_OFFSET_SHORTEST_EDGE_FACTOR));
|
||||
co.ShortestEdgeLength = double(std::abs(offset * ClipperOffsetShortestEdgeFactor));
|
||||
for (const ClipperLib::Path &path : paths) {
|
||||
co.Clear();
|
||||
// Execute reorients the contours so that the outer most contour has a positive area. Thus the output
|
||||
@ -431,7 +431,7 @@ static int offset_expolygon_inner(const Slic3r::ExPolygon &expoly, const float d
|
||||
co.ArcTolerance = miterLimit;
|
||||
else
|
||||
co.MiterLimit = miterLimit;
|
||||
co.ShortestEdgeLength = double(std::abs(delta * CLIPPER_OFFSET_SHORTEST_EDGE_FACTOR));
|
||||
co.ShortestEdgeLength = double(std::abs(delta * ClipperOffsetShortestEdgeFactor));
|
||||
co.AddPath(expoly.contour.points, joinType, ClipperLib::etClosedPolygon);
|
||||
co.Execute(contours, delta);
|
||||
}
|
||||
@ -452,7 +452,7 @@ static int offset_expolygon_inner(const Slic3r::ExPolygon &expoly, const float d
|
||||
co.ArcTolerance = miterLimit;
|
||||
else
|
||||
co.MiterLimit = miterLimit;
|
||||
co.ShortestEdgeLength = double(std::abs(delta * CLIPPER_OFFSET_SHORTEST_EDGE_FACTOR));
|
||||
co.ShortestEdgeLength = double(std::abs(delta * ClipperOffsetShortestEdgeFactor));
|
||||
co.AddPath(hole.points, joinType, ClipperLib::etClosedPolygon);
|
||||
ClipperLib::Paths out2;
|
||||
// Execute reorients the contours so that the outer most contour has a positive area. Thus the output
|
||||
@ -751,6 +751,8 @@ Slic3r::ExPolygons diff_ex(const Slic3r::Surfaces &subject, const Slic3r::Surfac
|
||||
{ return _clipper_ex(ClipperLib::ctDifference, ClipperUtils::SurfacesProvider(subject), ClipperUtils::SurfacesProvider(clip), do_safety_offset); }
|
||||
Slic3r::ExPolygons diff_ex(const Slic3r::SurfacesPtr &subject, const Slic3r::Polygons &clip, ApplySafetyOffset do_safety_offset)
|
||||
{ return _clipper_ex(ClipperLib::ctDifference, ClipperUtils::SurfacesPtrProvider(subject), ClipperUtils::PolygonsProvider(clip), do_safety_offset); }
|
||||
Slic3r::ExPolygons diff_ex(const Slic3r::SurfacesPtr& subject, const Slic3r::ExPolygons& clip, ApplySafetyOffset do_safety_offset)
|
||||
{ return _clipper_ex(ClipperLib::ctDifference, ClipperUtils::SurfacesPtrProvider(subject), ClipperUtils::ExPolygonsProvider(clip), do_safety_offset);}
|
||||
// BBS
|
||||
inline Slic3r::ExPolygons diff_ex(const Slic3r::Polygon& subject, const Slic3r::Polygons& clip, ApplySafetyOffset do_safety_offset)
|
||||
{
|
||||
@ -1125,7 +1127,7 @@ ClipperLib::Path mittered_offset_path_scaled(const Points &contour, const std::v
|
||||
};
|
||||
|
||||
// Minimum edge length, squared.
|
||||
double lmin = *std::max_element(deltas.begin(), deltas.end()) * CLIPPER_OFFSET_SHORTEST_EDGE_FACTOR;
|
||||
double lmin = *std::max_element(deltas.begin(), deltas.end()) * ClipperOffsetShortestEdgeFactor;
|
||||
double l2min = lmin * lmin;
|
||||
// Minimum angle to consider two edges to be parallel.
|
||||
// Vojtech's estimate.
|
||||
|
@ -39,6 +39,9 @@ static constexpr const Slic3r::ClipperLib::JoinType DefaultLineJoinType = Sl
|
||||
// Miter limit is ignored for jtSquare.
|
||||
static constexpr const double DefaultLineMiterLimit = 0.;
|
||||
|
||||
// Decimation factor applied on input contour when doing offset, multiplied by the offset distance.
|
||||
static constexpr const double ClipperOffsetShortestEdgeFactor = 0.005;
|
||||
|
||||
enum class ApplySafetyOffset {
|
||||
No,
|
||||
Yes
|
||||
@ -381,7 +384,8 @@ inline Slic3r::Polygons shrink(const Slic3r::Polygons &polygons, const float d
|
||||
{ assert(delta > 0); return offset(polygons, -delta, joinType, miterLimit); }
|
||||
inline Slic3r::ExPolygons shrink_ex(const Slic3r::Polygons &polygons, const float delta, ClipperLib::JoinType joinType = DefaultJoinType, double miterLimit = DefaultMiterLimit)
|
||||
{ assert(delta > 0); return offset_ex(polygons, -delta, joinType, miterLimit); }
|
||||
|
||||
inline Slic3r::ExPolygons shrink_ex(const Slic3r::ExPolygons &polygons, const float delta, ClipperLib::JoinType joinType = DefaultJoinType, double miterLimit = DefaultMiterLimit)
|
||||
{ assert(delta > 0); return offset_ex(polygons, -delta, joinType, miterLimit); }
|
||||
// Wherever applicable, please use the opening() / closing() variants instead, they convey their purpose better.
|
||||
// Input polygons for negative offset shall be "normalized": There must be no overlap / intersections between the input polygons.
|
||||
Slic3r::Polygons offset2(const Slic3r::ExPolygons &expolygons, const float delta1, const float delta2, ClipperLib::JoinType joinType = DefaultJoinType, double miterLimit = DefaultMiterLimit);
|
||||
@ -448,6 +452,7 @@ Slic3r::ExPolygons diff_ex(const Slic3r::Surfaces &subject, const Slic3r::ExPoly
|
||||
Slic3r::ExPolygons diff_ex(const Slic3r::ExPolygons &subject, const Slic3r::Surfaces &clip, ApplySafetyOffset do_safety_offset = ApplySafetyOffset::No);
|
||||
Slic3r::ExPolygons diff_ex(const Slic3r::Surfaces &subject, const Slic3r::Surfaces &clip, ApplySafetyOffset do_safety_offset = ApplySafetyOffset::No);
|
||||
Slic3r::ExPolygons diff_ex(const Slic3r::SurfacesPtr &subject, const Slic3r::Polygons &clip, ApplySafetyOffset do_safety_offset = ApplySafetyOffset::No);
|
||||
Slic3r::ExPolygons diff_ex(const Slic3r::SurfacesPtr &subject, const Slic3r::ExPolygons &clip, ApplySafetyOffset do_safety_offset = ApplySafetyOffset::No);
|
||||
Slic3r::Polylines diff_pl(const Slic3r::Polyline& subject, const Slic3r::Polygons& clip);
|
||||
Slic3r::Polylines diff_pl(const Slic3r::Polylines &subject, const Slic3r::Polygons &clip);
|
||||
Slic3r::Polylines diff_pl(const Slic3r::Polyline &subject, const Slic3r::ExPolygon &clip);
|
||||
|
147
src/libslic3r/ClipperZUtils.hpp
Normal file
147
src/libslic3r/ClipperZUtils.hpp
Normal file
@ -0,0 +1,147 @@
|
||||
///|/ Copyright (c) Prusa Research 2022 - 2023 Vojtěch Bubník @bubnikv
|
||||
///|/
|
||||
///|/ PrusaSlicer is released under the terms of the AGPLv3 or higher
|
||||
///|/
|
||||
#ifndef slic3r_ClipperZUtils_hpp_
|
||||
#define slic3r_ClipperZUtils_hpp_
|
||||
|
||||
#include <numeric>
|
||||
#include <vector>
|
||||
|
||||
#include <clipper/clipper_z.hpp>
|
||||
#include <libslic3r/Point.hpp>
|
||||
|
||||
namespace Slic3r {
|
||||
|
||||
namespace ClipperZUtils {
|
||||
|
||||
using ZPoint = ClipperLib_Z::IntPoint;
|
||||
using ZPoints = ClipperLib_Z::Path;
|
||||
using ZPath = ClipperLib_Z::Path;
|
||||
using ZPaths = ClipperLib_Z::Paths;
|
||||
|
||||
inline bool zpoint_lower(const ZPoint &l, const ZPoint &r)
|
||||
{
|
||||
return l.x() < r.x() || (l.x() == r.x() && (l.y() < r.y() || (l.y() == r.y() && l.z() < r.z())));
|
||||
}
|
||||
|
||||
// Convert a single path to path with a given Z coordinate.
|
||||
// If Open, then duplicate the first point at the end.
|
||||
template<bool Open = false>
|
||||
inline ZPath to_zpath(const Points &path, coord_t z)
|
||||
{
|
||||
ZPath out;
|
||||
if (! path.empty()) {
|
||||
out.reserve((path.size() + Open) ? 1 : 0);
|
||||
for (const Point &p : path)
|
||||
out.emplace_back(p.x(), p.y(), z);
|
||||
if (Open)
|
||||
out.emplace_back(out.front());
|
||||
}
|
||||
return out;
|
||||
}
|
||||
|
||||
// Convert multiple paths to paths with a given Z coordinate.
|
||||
// If Open, then duplicate the first point of each path at its end.
|
||||
template<bool Open = false>
|
||||
inline ZPaths to_zpaths(const VecOfPoints &paths, coord_t z)
|
||||
{
|
||||
ZPaths out;
|
||||
out.reserve(paths.size());
|
||||
for (const Points &path : paths)
|
||||
out.emplace_back(to_zpath<Open>(path, z));
|
||||
return out;
|
||||
}
|
||||
|
||||
// Convert multiple expolygons into z-paths with Z specified by an index of the source expolygon
|
||||
// offsetted by base_index.
|
||||
// If Open, then duplicate the first point of each path at its end.
|
||||
template<bool Open = false>
|
||||
inline ZPaths expolygons_to_zpaths(const ExPolygons &src, coord_t &base_idx)
|
||||
{
|
||||
ZPaths out;
|
||||
out.reserve(std::accumulate(src.begin(), src.end(), size_t(0),
|
||||
[](const size_t acc, const ExPolygon &expoly) { return acc + expoly.num_contours(); }));
|
||||
for (const ExPolygon &expoly : src) {
|
||||
out.emplace_back(to_zpath<Open>(expoly.contour.points, base_idx));
|
||||
for (const Polygon &hole : expoly.holes)
|
||||
out.emplace_back(to_zpath<Open>(hole.points, base_idx));
|
||||
++ base_idx;
|
||||
}
|
||||
return out;
|
||||
}
|
||||
|
||||
// Convert a single path to path with a given Z coordinate.
|
||||
// If Open, then duplicate the first point at the end.
|
||||
template<bool Open = false>
|
||||
inline Points from_zpath(const ZPoints &path)
|
||||
{
|
||||
Points out;
|
||||
if (! path.empty()) {
|
||||
out.reserve((path.size() + Open) ? 1 : 0);
|
||||
for (const ZPoint &p : path)
|
||||
out.emplace_back(p.x(), p.y());
|
||||
if (Open)
|
||||
out.emplace_back(out.front());
|
||||
}
|
||||
return out;
|
||||
}
|
||||
|
||||
// Convert multiple paths to paths with a given Z coordinate.
|
||||
// If Open, then duplicate the first point of each path at its end.
|
||||
template<bool Open = false>
|
||||
inline void from_zpaths(const ZPaths &paths, VecOfPoints &out)
|
||||
{
|
||||
out.reserve(out.size() + paths.size());
|
||||
for (const ZPoints &path : paths)
|
||||
out.emplace_back(from_zpath<Open>(path));
|
||||
}
|
||||
template<bool Open = false>
|
||||
inline VecOfPoints from_zpaths(const ZPaths &paths)
|
||||
{
|
||||
VecOfPoints out;
|
||||
from_zpaths(paths, out);
|
||||
return out;
|
||||
}
|
||||
|
||||
class ClipperZIntersectionVisitor {
|
||||
public:
|
||||
using Intersection = std::pair<coord_t, coord_t>;
|
||||
using Intersections = std::vector<Intersection>;
|
||||
ClipperZIntersectionVisitor(Intersections &intersections) : m_intersections(intersections) {}
|
||||
void reset() { m_intersections.clear(); }
|
||||
void operator()(const ZPoint &e1bot, const ZPoint &e1top, const ZPoint &e2bot, const ZPoint &e2top, ZPoint &pt) {
|
||||
coord_t srcs[4]{ e1bot.z(), e1top.z(), e2bot.z(), e2top.z() };
|
||||
coord_t *begin = srcs;
|
||||
coord_t *end = srcs + 4;
|
||||
//FIXME bubble sort manually?
|
||||
std::sort(begin, end);
|
||||
end = std::unique(begin, end);
|
||||
if (begin + 1 == end) {
|
||||
// Self intersection may happen on source contour. Just copy the Z value.
|
||||
pt.z() = *begin;
|
||||
} else {
|
||||
assert(begin + 2 == end);
|
||||
if (begin + 2 <= end) {
|
||||
// store a -1 based negative index into the "intersections" vector here.
|
||||
m_intersections.emplace_back(srcs[0], srcs[1]);
|
||||
pt.z() = -coord_t(m_intersections.size());
|
||||
}
|
||||
}
|
||||
}
|
||||
ClipperLib_Z::ZFillCallback clipper_callback() {
|
||||
return [this](const ZPoint &e1bot, const ZPoint &e1top,
|
||||
const ZPoint &e2bot, const ZPoint &e2top, ZPoint &pt)
|
||||
{ return (*this)(e1bot, e1top, e2bot, e2top, pt); };
|
||||
}
|
||||
|
||||
const std::vector<std::pair<coord_t, coord_t>>& intersections() const { return m_intersections; }
|
||||
|
||||
private:
|
||||
std::vector<std::pair<coord_t, coord_t>> &m_intersections;
|
||||
};
|
||||
|
||||
} // namespace ClipperZUtils
|
||||
} // namespace Slic3r
|
||||
|
||||
#endif // slic3r_ClipperZUtils_hpp_
|
@ -513,6 +513,100 @@ void Layer::make_fills(FillAdaptive::Octree* adaptive_fill_octree, FillAdaptive:
|
||||
#endif
|
||||
}
|
||||
|
||||
Polylines Layer::generate_sparse_infill_polylines_for_anchoring(FillAdaptive::Octree* adaptive_fill_octree, FillAdaptive::Octree* support_fill_octree, FillLightning::Generator* lightning_generator) const
|
||||
{
|
||||
std::vector<SurfaceFill> surface_fills = group_fills(*this);
|
||||
const Slic3r::BoundingBox bbox = this->object()->bounding_box();
|
||||
const auto resolution = this->object()->print()->config().resolution.value;
|
||||
|
||||
Polylines sparse_infill_polylines{};
|
||||
|
||||
for (SurfaceFill& surface_fill : surface_fills) {
|
||||
if (surface_fill.surface.surface_type != stInternal) {
|
||||
continue;
|
||||
}
|
||||
|
||||
switch (surface_fill.params.pattern) {
|
||||
case ipCount: continue; break;
|
||||
case ipSupportBase: continue; break;
|
||||
//case ipEnsuring: continue; break;
|
||||
case ipLightning:
|
||||
case ipAdaptiveCubic:
|
||||
case ipSupportCubic:
|
||||
case ipRectilinear:
|
||||
case ipMonotonic:
|
||||
case ipAlignedRectilinear:
|
||||
case ipGrid:
|
||||
case ipTriangles:
|
||||
case ipStars:
|
||||
case ipCubic:
|
||||
case ipLine:
|
||||
case ipConcentric:
|
||||
case ipHoneycomb:
|
||||
case ip3DHoneycomb:
|
||||
case ipGyroid:
|
||||
case ipHilbertCurve:
|
||||
case ipArchimedeanChords:
|
||||
case ipOctagramSpiral: break;
|
||||
}
|
||||
|
||||
// Create the filler object.
|
||||
std::unique_ptr<Fill> f = std::unique_ptr<Fill>(Fill::new_from_type(surface_fill.params.pattern));
|
||||
f->set_bounding_box(bbox);
|
||||
f->layer_id = this->id() - this->object()->get_layer(0)->id(); // We need to subtract raft layers.
|
||||
f->z = this->print_z;
|
||||
f->angle = surface_fill.params.angle;
|
||||
f->adapt_fill_octree = (surface_fill.params.pattern == ipSupportCubic) ? support_fill_octree : adaptive_fill_octree;
|
||||
|
||||
|
||||
if (surface_fill.params.pattern == ipLightning)
|
||||
dynamic_cast<FillLightning::Filler*>(f.get())->generator = lightning_generator;
|
||||
|
||||
// calculate flow spacing for infill pattern generation
|
||||
double link_max_length = 0.;
|
||||
if (!surface_fill.params.bridge) {
|
||||
#if 0
|
||||
link_max_length = layerm.region()->config().get_abs_value(surface.is_external() ? "external_fill_link_max_length" : "fill_link_max_length", flow.spacing());
|
||||
// printf("flow spacing: %f, is_external: %d, link_max_length: %lf\n", flow.spacing(), int(surface.is_external()), link_max_length);
|
||||
#else
|
||||
if (surface_fill.params.density > 80.) // 80%
|
||||
link_max_length = 3. * f->spacing;
|
||||
#endif
|
||||
}
|
||||
|
||||
// Maximum length of the perimeter segment linking two infill lines.
|
||||
f->link_max_length = (coord_t)scale_(link_max_length);
|
||||
// Used by the concentric infill pattern to clip the loops to create extrusion paths.
|
||||
f->loop_clipping = coord_t(scale_(surface_fill.params.flow.nozzle_diameter()) * LOOP_CLIPPING_LENGTH_OVER_NOZZLE_DIAMETER);
|
||||
|
||||
LayerRegion& layerm = *m_regions[surface_fill.region_id];
|
||||
|
||||
// apply half spacing using this flow's own spacing and generate infill
|
||||
FillParams params;
|
||||
params.density = float(0.01 * surface_fill.params.density);
|
||||
params.dont_adjust = false; // surface_fill.params.dont_adjust;
|
||||
params.anchor_length = surface_fill.params.anchor_length;
|
||||
params.anchor_length_max = surface_fill.params.anchor_length_max;
|
||||
params.resolution = resolution;
|
||||
params.use_arachne = false;
|
||||
params.layer_height = layerm.layer()->height;
|
||||
|
||||
for (ExPolygon& expoly : surface_fill.expolygons) {
|
||||
// Spacing is modified by the filler to indicate adjustments. Reset it for each expolygon.
|
||||
f->spacing = surface_fill.params.spacing;
|
||||
surface_fill.surface.expolygon = std::move(expoly);
|
||||
try {
|
||||
Polylines polylines = f->fill_surface(&surface_fill.surface, params);
|
||||
sparse_infill_polylines.insert(sparse_infill_polylines.end(), polylines.begin(), polylines.end());
|
||||
}
|
||||
catch (InfillFailedException&) {}
|
||||
}
|
||||
}
|
||||
|
||||
return sparse_infill_polylines;
|
||||
}
|
||||
|
||||
|
||||
// Create ironing extrusions over top surfaces.
|
||||
void Layer::make_ironing()
|
||||
{
|
||||
|
@ -181,6 +181,10 @@ public:
|
||||
// Phony version of make_fills() without parameters for Perl integration only.
|
||||
void make_fills() { this->make_fills(nullptr, nullptr); }
|
||||
void make_fills(FillAdaptive::Octree* adaptive_fill_octree, FillAdaptive::Octree* support_fill_octree, FillLightning::Generator* lightning_generator = nullptr);
|
||||
Polylines generate_sparse_infill_polylines_for_anchoring(FillAdaptive::Octree *adaptive_fill_octree,
|
||||
FillAdaptive::Octree *support_fill_octree,
|
||||
FillLightning::Generator* lightning_generator) const;
|
||||
|
||||
void make_ironing();
|
||||
|
||||
void export_region_slices_to_svg(const char *path) const;
|
||||
|
@ -7,6 +7,7 @@
|
||||
#include "Surface.hpp"
|
||||
#include "BoundingBox.hpp"
|
||||
#include "SVG.hpp"
|
||||
#include "RegionExpansion.hpp"
|
||||
|
||||
#include <string>
|
||||
#include <map>
|
||||
@ -112,6 +113,352 @@ void LayerRegion::make_perimeters(const SurfaceCollection &slices, SurfaceCollec
|
||||
g.process_classic();
|
||||
}
|
||||
|
||||
|
||||
#if 1
|
||||
|
||||
// Extract surfaces of given type from surfaces, extract fill (layer) thickness of one of the surfaces.
|
||||
static ExPolygons fill_surfaces_extract_expolygons(Surfaces &surfaces, std::initializer_list<SurfaceType> surface_types, double &thickness)
|
||||
{
|
||||
size_t cnt = 0;
|
||||
for (const Surface &surface : surfaces)
|
||||
if (std::find(surface_types.begin(), surface_types.end(), surface.surface_type) != surface_types.end()) {
|
||||
++cnt;
|
||||
thickness = surface.thickness;
|
||||
}
|
||||
if (cnt == 0)
|
||||
return {};
|
||||
|
||||
ExPolygons out;
|
||||
out.reserve(cnt);
|
||||
for (Surface &surface : surfaces)
|
||||
if (std::find(surface_types.begin(), surface_types.end(), surface.surface_type) != surface_types.end())
|
||||
out.emplace_back(std::move(surface.expolygon));
|
||||
return out;
|
||||
}
|
||||
|
||||
// Extract bridging surfaces from "surfaces", expand them into "shells" using expansion_params,
|
||||
// detect bridges.
|
||||
// Trim "shells" by the expanded bridges.
|
||||
Surfaces expand_bridges_detect_orientations(
|
||||
Surfaces &surfaces,
|
||||
ExPolygons &shells,
|
||||
const Algorithm::RegionExpansionParameters &expansion_params_into_solid_infill,
|
||||
ExPolygons &sparse,
|
||||
const Algorithm::RegionExpansionParameters &expansion_params_into_sparse_infill,
|
||||
const float closing_radius)
|
||||
{
|
||||
using namespace Slic3r::Algorithm;
|
||||
|
||||
double thickness;
|
||||
ExPolygons bridges_ex = fill_surfaces_extract_expolygons(surfaces, {stBottomBridge}, thickness);
|
||||
if (bridges_ex.empty())
|
||||
return {};
|
||||
|
||||
// Calculate bridge anchors and their expansions in their respective shell region.
|
||||
WaveSeeds bridge_anchors = wave_seeds(bridges_ex, shells, expansion_params_into_solid_infill.tiny_expansion, true);
|
||||
std::vector<RegionExpansionEx> bridge_expansions = propagate_waves_ex(bridge_anchors, shells, expansion_params_into_solid_infill);
|
||||
bool expanded_into_shells = ! bridge_expansions.empty();
|
||||
bool expanded_into_sparse = false;
|
||||
{
|
||||
WaveSeeds bridge_anchors_sparse = wave_seeds(bridges_ex, sparse, expansion_params_into_sparse_infill.tiny_expansion, true);
|
||||
std::vector<RegionExpansionEx> bridge_expansions_sparse = propagate_waves_ex(bridge_anchors_sparse, sparse, expansion_params_into_sparse_infill);
|
||||
if (! bridge_expansions_sparse.empty()) {
|
||||
expanded_into_sparse = true;
|
||||
for (WaveSeed &seed : bridge_anchors_sparse)
|
||||
seed.boundary += uint32_t(shells.size());
|
||||
for (RegionExpansionEx &expansion : bridge_expansions_sparse)
|
||||
expansion.boundary_id += uint32_t(shells.size());
|
||||
append(bridge_anchors, std::move(bridge_anchors_sparse));
|
||||
append(bridge_expansions, std::move(bridge_expansions_sparse));
|
||||
}
|
||||
}
|
||||
|
||||
// Cache for detecting bridge orientation and merging regions with overlapping expansions.
|
||||
struct Bridge {
|
||||
ExPolygon expolygon;
|
||||
uint32_t group_id;
|
||||
std::vector<RegionExpansionEx>::const_iterator bridge_expansion_begin;
|
||||
double angle = -1;
|
||||
};
|
||||
std::vector<Bridge> bridges;
|
||||
{
|
||||
bridges.reserve(bridges_ex.size());
|
||||
uint32_t group_id = 0;
|
||||
for (ExPolygon &ex : bridges_ex)
|
||||
bridges.push_back({ std::move(ex), group_id ++, bridge_expansions.end() });
|
||||
bridges_ex.clear();
|
||||
}
|
||||
|
||||
// Group the bridge surfaces by overlaps.
|
||||
auto group_id = [&bridges](uint32_t src_id) {
|
||||
uint32_t group_id = bridges[src_id].group_id;
|
||||
while (group_id != src_id) {
|
||||
src_id = group_id;
|
||||
group_id = bridges[src_id].group_id;
|
||||
}
|
||||
bridges[src_id].group_id = group_id;
|
||||
return group_id;
|
||||
};
|
||||
|
||||
{
|
||||
// Cache of bboxes per expansion boundary.
|
||||
std::vector<BoundingBox> bboxes;
|
||||
// Detect overlaps of bridge anchors inside their respective shell regions.
|
||||
// bridge_expansions are sorted by boundary id and source id.
|
||||
for (auto it = bridge_expansions.begin(); it != bridge_expansions.end();) {
|
||||
// For each boundary region:
|
||||
auto it_begin = it;
|
||||
auto it_end = std::next(it_begin);
|
||||
for (; it_end != bridge_expansions.end() && it_end->boundary_id == it_begin->boundary_id; ++ it_end) ;
|
||||
bboxes.clear();
|
||||
bboxes.reserve(it_end - it_begin);
|
||||
for (auto it2 = it_begin; it2 != it_end; ++ it2)
|
||||
bboxes.emplace_back(get_extents(it2->expolygon.contour));
|
||||
// For each bridge anchor of the current source:
|
||||
for (; it != it_end; ++ it) {
|
||||
// A grup id for this bridge.
|
||||
for (auto it2 = std::next(it); it2 != it_end; ++ it2)
|
||||
if (it->src_id != it2->src_id &&
|
||||
bboxes[it - it_begin].overlap(bboxes[it2 - it_begin]) &&
|
||||
// One may ignore holes, they are irrelevant for intersection test.
|
||||
! intersection(it->expolygon.contour, it2->expolygon.contour).empty()) {
|
||||
// The two bridge regions intersect. Give them the same (lower) group id.
|
||||
uint32_t id = group_id(it->src_id);
|
||||
uint32_t id2 = group_id(it2->src_id);
|
||||
if (id < id2)
|
||||
bridges[id2].group_id = id;
|
||||
else
|
||||
bridges[id].group_id = id2;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Detect bridge directions.
|
||||
{
|
||||
std::sort(bridge_anchors.begin(), bridge_anchors.end(), Algorithm::lower_by_src_and_boundary);
|
||||
auto it_bridge_anchor = bridge_anchors.begin();
|
||||
Lines lines;
|
||||
Polygons anchor_areas;
|
||||
for (uint32_t bridge_id = 0; bridge_id < uint32_t(bridges.size()); ++ bridge_id) {
|
||||
Bridge &bridge = bridges[bridge_id];
|
||||
// lines.clear();
|
||||
anchor_areas.clear();
|
||||
int32_t last_anchor_id = -1;
|
||||
for (; it_bridge_anchor != bridge_anchors.end() && it_bridge_anchor->src == bridge_id; ++ it_bridge_anchor) {
|
||||
if (last_anchor_id != int(it_bridge_anchor->boundary)) {
|
||||
last_anchor_id = int(it_bridge_anchor->boundary);
|
||||
append(anchor_areas, to_polygons(last_anchor_id < int32_t(shells.size()) ? shells[last_anchor_id] : sparse[last_anchor_id - int32_t(shells.size())]));
|
||||
}
|
||||
// if (Points &polyline = it_bridge_anchor->path; polyline.size() >= 2) {
|
||||
// reserve_more_power_of_2(lines, polyline.size() - 1);
|
||||
// for (size_t i = 1; i < polyline.size(); ++ i)
|
||||
// lines.push_back({ polyline[i - 1], polyline[1] });
|
||||
// }
|
||||
}
|
||||
lines = to_lines(diff_pl(to_polylines(bridge.expolygon), expand(anchor_areas, float(SCALED_EPSILON))));
|
||||
auto [bridging_dir, unsupported_dist] = detect_bridging_direction(lines, to_polygons(bridge.expolygon));
|
||||
bridge.angle = M_PI + std::atan2(bridging_dir.y(), bridging_dir.x());
|
||||
#if 0
|
||||
coordf_t stroke_width = scale_(0.06);
|
||||
BoundingBox bbox = get_extents(anchor_areas);
|
||||
bbox.merge(get_extents(bridge.expolygon));
|
||||
bbox.offset(scale_(1.));
|
||||
::Slic3r::SVG
|
||||
svg(debug_out_path(("bridge" + std::to_string(bridge.angle) + "_" /* + std::to_string(this->layer()->bottom_z())*/).c_str()),
|
||||
bbox);
|
||||
svg.draw(bridge.expolygon, "cyan");
|
||||
svg.draw(lines, "green", stroke_width);
|
||||
svg.draw(anchor_areas, "red");
|
||||
#endif
|
||||
}
|
||||
}
|
||||
|
||||
// Merge the groups with the same group id, produce surfaces by merging source overhangs with their newly expanded anchors.
|
||||
Surfaces out;
|
||||
{
|
||||
Polygons acc;
|
||||
Surface templ{ stBottomBridge, {} };
|
||||
std::sort(bridge_expansions.begin(), bridge_expansions.end(), [](auto &l, auto &r) {
|
||||
return l.src_id < r.src_id || (l.src_id == r.src_id && l.boundary_id < r.boundary_id);
|
||||
});
|
||||
for (auto it = bridge_expansions.begin(); it != bridge_expansions.end(); ) {
|
||||
bridges[it->src_id].bridge_expansion_begin = it;
|
||||
uint32_t src_id = it->src_id;
|
||||
for (++ it; it != bridge_expansions.end() && it->src_id == src_id; ++ it) ;
|
||||
}
|
||||
for (uint32_t bridge_id = 0; bridge_id < uint32_t(bridges.size()); ++ bridge_id)
|
||||
if (group_id(bridge_id) == bridge_id) {
|
||||
// Head of the group.
|
||||
acc.clear();
|
||||
for (uint32_t bridge_id2 = bridge_id; bridge_id2 < uint32_t(bridges.size()); ++ bridge_id2)
|
||||
if (group_id(bridge_id2) == bridge_id) {
|
||||
append(acc, to_polygons(std::move(bridges[bridge_id2].expolygon)));
|
||||
auto it_bridge_expansion = bridges[bridge_id2].bridge_expansion_begin;
|
||||
assert(it_bridge_expansion == bridge_expansions.end() || it_bridge_expansion->src_id == bridge_id2);
|
||||
for (; it_bridge_expansion != bridge_expansions.end() && it_bridge_expansion->src_id == bridge_id2; ++ it_bridge_expansion)
|
||||
append(acc, to_polygons(std::move(it_bridge_expansion->expolygon)));
|
||||
}
|
||||
//FIXME try to be smart and pick the best bridging angle for all?
|
||||
templ.bridge_angle = bridges[bridge_id].angle;
|
||||
//NOTE: The current regularization of the shells can create small unasigned regions in the object (E.G. benchy)
|
||||
// without the following closing operation, those regions will stay unfilled and cause small holes in the expanded surface.
|
||||
// look for narrow_ensure_vertical_wall_thickness_region_radius filter.
|
||||
ExPolygons final = closing_ex(acc, closing_radius);
|
||||
// without safety offset, artifacts are generated (GH #2494)
|
||||
// union_safety_offset_ex(acc)
|
||||
for (ExPolygon &ex : final)
|
||||
out.emplace_back(templ, std::move(ex));
|
||||
}
|
||||
}
|
||||
|
||||
// Clip by the expanded bridges.
|
||||
if (expanded_into_shells)
|
||||
shells = diff_ex(shells, out);
|
||||
if (expanded_into_sparse)
|
||||
sparse = diff_ex(sparse, out);
|
||||
return out;
|
||||
}
|
||||
|
||||
// Extract bridging surfaces from "surfaces", expand them into "shells" using expansion_params.
|
||||
// Trim "shells" by the expanded bridges.
|
||||
static Surfaces expand_merge_surfaces(
|
||||
Surfaces &surfaces,
|
||||
SurfaceType surface_type,
|
||||
ExPolygons &shells,
|
||||
const Algorithm::RegionExpansionParameters &expansion_params_into_solid_infill,
|
||||
ExPolygons &sparse,
|
||||
const Algorithm::RegionExpansionParameters &expansion_params_into_sparse_infill,
|
||||
const float closing_radius,
|
||||
const double bridge_angle = -1.)
|
||||
{
|
||||
using namespace Slic3r::Algorithm;
|
||||
|
||||
double thickness;
|
||||
ExPolygons src = fill_surfaces_extract_expolygons(surfaces, {surface_type}, thickness);
|
||||
if (src.empty())
|
||||
return {};
|
||||
|
||||
std::vector<RegionExpansion> expansions = propagate_waves(src, shells, expansion_params_into_solid_infill);
|
||||
bool expanded_into_shells = !expansions.empty();
|
||||
bool expanded_into_sparse = false;
|
||||
{
|
||||
std::vector<RegionExpansion> expansions2 = propagate_waves(src, sparse, expansion_params_into_sparse_infill);
|
||||
if (! expansions2.empty()) {
|
||||
expanded_into_sparse = true;
|
||||
for (RegionExpansion &expansion : expansions2)
|
||||
expansion.boundary_id += uint32_t(shells.size());
|
||||
append(expansions, std::move(expansions2));
|
||||
}
|
||||
}
|
||||
|
||||
std::vector<ExPolygon> expanded = merge_expansions_into_expolygons(std::move(src), std::move(expansions));
|
||||
//NOTE: The current regularization of the shells can create small unasigned regions in the object (E.G. benchy)
|
||||
// without the following closing operation, those regions will stay unfilled and cause small holes in the expanded surface.
|
||||
// look for narrow_ensure_vertical_wall_thickness_region_radius filter.
|
||||
expanded = closing_ex(expanded, closing_radius);
|
||||
// Trim the shells by the expanded expolygons.
|
||||
if (expanded_into_shells)
|
||||
shells = diff_ex(shells, expanded);
|
||||
if (expanded_into_sparse)
|
||||
sparse = diff_ex(sparse, expanded);
|
||||
|
||||
Surface templ{ surface_type, {} };
|
||||
templ.bridge_angle = bridge_angle;
|
||||
Surfaces out;
|
||||
out.reserve(expanded.size());
|
||||
for (auto &expoly : expanded)
|
||||
out.emplace_back(templ, std::move(expoly));
|
||||
return out;
|
||||
}
|
||||
|
||||
void LayerRegion::process_external_surfaces(const Layer *lower_layer, const Polygons *lower_layer_covered)
|
||||
{
|
||||
using namespace Slic3r::Algorithm;
|
||||
|
||||
#ifdef SLIC3R_DEBUG_SLICE_PROCESSING
|
||||
export_region_fill_surfaces_to_svg_debug("4_process_external_surfaces-initial");
|
||||
#endif /* SLIC3R_DEBUG_SLICE_PROCESSING */
|
||||
|
||||
// Width of the perimeters.
|
||||
float shell_width = 0;
|
||||
float expansion_min = 0;
|
||||
if (int num_perimeters = this->region().config().wall_loops; num_perimeters > 0) {
|
||||
Flow external_perimeter_flow = this->flow(frExternalPerimeter);
|
||||
Flow perimeter_flow = this->flow(frPerimeter);
|
||||
shell_width = 0.5f * external_perimeter_flow.scaled_width() + external_perimeter_flow.scaled_spacing();
|
||||
shell_width += perimeter_flow.scaled_spacing() * (num_perimeters - 1);
|
||||
expansion_min = perimeter_flow.scaled_spacing();
|
||||
} else {
|
||||
// TODO: Maybe there is better solution when printing with zero perimeters, but this works reasonably well, given the situation
|
||||
shell_width = float(SCALED_EPSILON);
|
||||
expansion_min = float(SCALED_EPSILON);;
|
||||
}
|
||||
|
||||
// Scaled expansions of the respective external surfaces.
|
||||
float expansion_top = shell_width * sqrt(2.);
|
||||
float expansion_bottom = expansion_top;
|
||||
float expansion_bottom_bridge = expansion_top;
|
||||
// Expand by waves of expansion_step size (expansion_step is scaled), but with no more steps than max_nr_expansion_steps.
|
||||
static constexpr const float expansion_step = scaled<float>(0.1);
|
||||
// Don't take more than max_nr_steps for small expansion_step.
|
||||
static constexpr const size_t max_nr_expansion_steps = 5;
|
||||
// Radius (with added epsilon) to absorb empty regions emering from regularization of ensuring, viz const float narrow_ensure_vertical_wall_thickness_region_radius = 0.5f * 0.65f * min_perimeter_infill_spacing;
|
||||
const float closing_radius = 0.55f * 0.65f * 1.05f * this->flow(frSolidInfill).scaled_spacing();
|
||||
|
||||
// Expand the top / bottom / bridge surfaces into the shell thickness solid infills.
|
||||
double layer_thickness;
|
||||
ExPolygons shells = union_ex(fill_surfaces_extract_expolygons(fill_surfaces.surfaces, { stInternalSolid }, layer_thickness));
|
||||
ExPolygons sparse = union_ex(fill_surfaces_extract_expolygons(fill_surfaces.surfaces, { stInternal }, layer_thickness));
|
||||
|
||||
SurfaceCollection bridges;
|
||||
const auto expansion_params_into_sparse_infill = RegionExpansionParameters::build(expansion_min, expansion_step, max_nr_expansion_steps);
|
||||
{
|
||||
BOOST_LOG_TRIVIAL(trace) << "Processing external surface, detecting bridges. layer" << this->layer()->print_z;
|
||||
const double custom_angle = this->region().config().bridge_angle.value;
|
||||
const auto expansion_params_into_solid_infill = RegionExpansionParameters::build(expansion_bottom_bridge, expansion_step, max_nr_expansion_steps);
|
||||
bridges.surfaces = custom_angle > 0 ?
|
||||
expand_merge_surfaces(fill_surfaces.surfaces, stBottomBridge, shells, expansion_params_into_solid_infill, sparse, expansion_params_into_sparse_infill, closing_radius, Geometry::deg2rad(custom_angle)) :
|
||||
expand_bridges_detect_orientations(fill_surfaces.surfaces, shells, expansion_params_into_solid_infill, sparse, expansion_params_into_sparse_infill, closing_radius);
|
||||
BOOST_LOG_TRIVIAL(trace) << "Processing external surface, detecting bridges - done";
|
||||
#if 0
|
||||
{
|
||||
static int iRun = 0;
|
||||
bridges.export_to_svg(debug_out_path("bridges-after-grouping-%d.svg", iRun++), true);
|
||||
}
|
||||
#endif
|
||||
}
|
||||
|
||||
Surfaces bottoms = expand_merge_surfaces(fill_surfaces.surfaces, stBottom, shells,
|
||||
RegionExpansionParameters::build(expansion_bottom, expansion_step, max_nr_expansion_steps),
|
||||
sparse, expansion_params_into_sparse_infill, closing_radius);
|
||||
Surfaces tops = expand_merge_surfaces(fill_surfaces.surfaces, stTop, shells,
|
||||
RegionExpansionParameters::build(expansion_top, expansion_step, max_nr_expansion_steps),
|
||||
sparse, expansion_params_into_sparse_infill, closing_radius);
|
||||
|
||||
// m_fill_surfaces.remove_types({ stBottomBridge, stBottom, stTop, stInternal, stInternalSolid });
|
||||
fill_surfaces.clear();
|
||||
reserve_more(fill_surfaces.surfaces, shells.size() + sparse.size() + bridges.size() + bottoms.size() + tops.size());
|
||||
{
|
||||
Surface solid_templ(stInternalSolid, {});
|
||||
solid_templ.thickness = layer_thickness;
|
||||
fill_surfaces.append(std::move(shells), solid_templ);
|
||||
}
|
||||
{
|
||||
Surface sparse_templ(stInternal, {});
|
||||
sparse_templ.thickness = layer_thickness;
|
||||
fill_surfaces.append(std::move(sparse), sparse_templ);
|
||||
}
|
||||
fill_surfaces.append(std::move(bridges.surfaces));
|
||||
fill_surfaces.append(std::move(bottoms));
|
||||
fill_surfaces.append(std::move(tops));
|
||||
|
||||
#ifdef SLIC3R_DEBUG_SLICE_PROCESSING
|
||||
export_region_fill_surfaces_to_svg_debug("4_process_external_surfaces-final");
|
||||
#endif /* SLIC3R_DEBUG_SLICE_PROCESSING */
|
||||
}
|
||||
#else
|
||||
|
||||
//#define EXTERNAL_SURFACES_OFFSET_PARAMETERS ClipperLib::jtMiter, 3.
|
||||
//#define EXTERNAL_SURFACES_OFFSET_PARAMETERS ClipperLib::jtMiter, 1.5
|
||||
#define EXTERNAL_SURFACES_OFFSET_PARAMETERS ClipperLib::jtSquare, 0.
|
||||
@ -404,6 +751,7 @@ void LayerRegion::process_external_surfaces(const Layer *lower_layer, const Poly
|
||||
export_region_fill_surfaces_to_svg_debug("3_process_external_surfaces-final");
|
||||
#endif /* SLIC3R_DEBUG_SLICE_PROCESSING */
|
||||
}
|
||||
#endif
|
||||
|
||||
void LayerRegion::prepare_fill_surfaces()
|
||||
{
|
||||
|
@ -188,18 +188,28 @@ bool has_duplicate_points(std::vector<Point> &&pts)
|
||||
return false;
|
||||
}
|
||||
|
||||
template<bool IncludeBoundary>
|
||||
BoundingBox get_extents(const Points &pts)
|
||||
{
|
||||
return BoundingBox(pts);
|
||||
BoundingBox out;
|
||||
BoundingBox::construct<IncludeBoundary>(out, pts.begin(), pts.end());
|
||||
return out;
|
||||
}
|
||||
template BoundingBox get_extents<false>(const Points &pts);
|
||||
template BoundingBox get_extents<true>(const Points &pts);
|
||||
|
||||
BoundingBox get_extents(const std::vector<Points> &pts)
|
||||
// if IncludeBoundary, then a bounding box is defined even for a single point.
|
||||
// otherwise a bounding box is only defined if it has a positive area.
|
||||
template<bool IncludeBoundary>
|
||||
BoundingBox get_extents(const VecOfPoints &pts)
|
||||
{
|
||||
BoundingBox bbox;
|
||||
for (const Points &p : pts)
|
||||
bbox.merge(get_extents(p));
|
||||
bbox.merge(get_extents<IncludeBoundary>(p));
|
||||
return bbox;
|
||||
}
|
||||
template BoundingBox get_extents<false>(const VecOfPoints &pts);
|
||||
template BoundingBox get_extents<true>(const VecOfPoints &pts);
|
||||
|
||||
BoundingBoxf get_extents(const std::vector<Vec2d> &pts)
|
||||
{
|
||||
|
@ -55,6 +55,7 @@ using Points3 = std::vector<Vec3crd>;
|
||||
using Pointfs = std::vector<Vec2d>;
|
||||
using Vec2ds = std::vector<Vec2d>;
|
||||
using Pointf3s = std::vector<Vec3d>;
|
||||
using VecOfPoints = std::vector<Points>;
|
||||
|
||||
using Matrix2f = Eigen::Matrix<float, 2, 2, Eigen::DontAlign>;
|
||||
using Matrix2d = Eigen::Matrix<double, 2, 2, Eigen::DontAlign>;
|
||||
@ -268,8 +269,20 @@ inline Point lerp(const Point &a, const Point &b, double t)
|
||||
return ((1. - t) * a.cast<double>() + t * b.cast<double>()).cast<coord_t>();
|
||||
}
|
||||
|
||||
// if IncludeBoundary, then a bounding box is defined even for a single point.
|
||||
// otherwise a bounding box is only defined if it has a positive area.
|
||||
template<bool IncludeBoundary = false>
|
||||
BoundingBox get_extents(const Points &pts);
|
||||
BoundingBox get_extents(const std::vector<Points> &pts);
|
||||
extern template BoundingBox get_extents<false>(const Points &pts);
|
||||
extern template BoundingBox get_extents<true>(const Points &pts);
|
||||
|
||||
// if IncludeBoundary, then a bounding box is defined even for a single point.
|
||||
// otherwise a bounding box is only defined if it has a positive area.
|
||||
template<bool IncludeBoundary = false>
|
||||
BoundingBox get_extents(const VecOfPoints &pts);
|
||||
extern template BoundingBox get_extents<false>(const VecOfPoints &pts);
|
||||
extern template BoundingBox get_extents<true>(const VecOfPoints &pts);
|
||||
|
||||
BoundingBoxf get_extents(const std::vector<Vec2d> &pts);
|
||||
|
||||
// Test for duplicate points in a vector of points.
|
||||
|
@ -221,6 +221,25 @@ inline void polylines_append(Polylines &dst, Polylines &&src)
|
||||
}
|
||||
}
|
||||
|
||||
// Merge polylines at their respective end points.
|
||||
// dst_first: the merge point is at dst.begin() or dst.end()?
|
||||
// src_first: the merge point is at src.begin() or src.end()?
|
||||
// The orientation of the resulting polyline is unknown, the output polyline may start
|
||||
// either with src piece or dst piece.
|
||||
template<typename PointsType>
|
||||
inline void polylines_merge(PointsType &dst, bool dst_first, PointsType &&src, bool src_first)
|
||||
{
|
||||
if (dst_first) {
|
||||
if (src_first)
|
||||
std::reverse(dst.begin(), dst.end());
|
||||
else
|
||||
std::swap(dst, src);
|
||||
} else if (! src_first)
|
||||
std::reverse(src.begin(), src.end());
|
||||
// Merge src into dst.
|
||||
append(dst, std::move(src));
|
||||
}
|
||||
|
||||
const Point& leftmost_point(const Polylines &polylines);
|
||||
|
||||
bool remove_degenerate(Polylines &polylines);
|
||||
|
@ -2,6 +2,8 @@
|
||||
#define slic3r_Print_hpp_
|
||||
|
||||
#include "PrintBase.hpp"
|
||||
#include "Fill/FillAdaptive.hpp"
|
||||
#include "Fill/FillLightning.hpp"
|
||||
|
||||
#include "BoundingBox.hpp"
|
||||
#include "ExtrusionEntityCollection.hpp"
|
||||
@ -491,7 +493,8 @@ private:
|
||||
void discover_horizontal_shells();
|
||||
void combine_infill();
|
||||
void _generate_support_material();
|
||||
std::pair<FillAdaptive::OctreePtr, FillAdaptive::OctreePtr> prepare_adaptive_infill_data();
|
||||
std::pair<FillAdaptive::OctreePtr, FillAdaptive::OctreePtr> prepare_adaptive_infill_data(
|
||||
const std::vector<std::pair<const Surface*, float>>& surfaces_w_bottom_z) const;
|
||||
FillLightning::GeneratorPtr prepare_lightning_infill_data();
|
||||
|
||||
// BBS
|
||||
@ -522,6 +525,10 @@ private:
|
||||
// this is set to true when LayerRegion->slices is split in top/internal/bottom
|
||||
// so that next call to make_perimeters() performs a union() before computing loops
|
||||
bool m_typed_slices = false;
|
||||
|
||||
std::pair<FillAdaptive::OctreePtr, FillAdaptive::OctreePtr> m_adaptive_fill_octrees;
|
||||
FillLightning::GeneratorPtr m_lightning_generator;
|
||||
|
||||
std::vector < VolumeSlices > firstLayerObjSliceByVolume;
|
||||
std::vector<groupedVolumeSlices> firstLayerObjSliceByGroups;
|
||||
|
||||
|
@ -2100,9 +2100,9 @@ void PrintConfigDef::init_fff_params()
|
||||
def = this->add("interface_shells", coBool);
|
||||
//def->label = L("Interface shells");
|
||||
def->label = "Interface shells";
|
||||
//def->tooltip = L("Force the generation of solid shells between adjacent materials/volumes. "
|
||||
// "Useful for multi-extruder prints with translucent materials or manual soluble "
|
||||
// "support material");
|
||||
def->tooltip = L("Force the generation of solid shells between adjacent materials/volumes. "
|
||||
"Useful for multi-extruder prints with translucent materials or manual soluble "
|
||||
"support material");
|
||||
def->category = L("Quality");
|
||||
def->mode = comDevelop;
|
||||
def->set_default_value(new ConfigOptionBool(false));
|
||||
|
File diff suppressed because it is too large
Load Diff
548
src/libslic3r/RegionExpansion.cpp
Normal file
548
src/libslic3r/RegionExpansion.cpp
Normal file
@ -0,0 +1,548 @@
|
||||
///|/ Copyright (c) Prusa Research 2022 - 2023 Vojtěch Bubník @bubnikv
|
||||
///|/
|
||||
///|/ PrusaSlicer is released under the terms of the AGPLv3 or higher
|
||||
///|/
|
||||
#include "RegionExpansion.hpp"
|
||||
|
||||
#include <libslic3r/AABBTreeIndirect.hpp>
|
||||
#include <libslic3r/ClipperZUtils.hpp>
|
||||
#include <libslic3r/ClipperUtils.hpp>
|
||||
#include <libslic3r/Utils.hpp>
|
||||
|
||||
#include <numeric>
|
||||
|
||||
namespace Slic3r {
|
||||
namespace Algorithm {
|
||||
|
||||
// Calculating radius discretization according to ClipperLib offsetter code, see void ClipperOffset::DoOffset(double delta)
|
||||
inline double clipper_round_offset_error(double offset, double arc_tolerance)
|
||||
{
|
||||
static constexpr const double def_arc_tolerance = 0.25;
|
||||
const double y =
|
||||
arc_tolerance <= 0 ?
|
||||
def_arc_tolerance :
|
||||
arc_tolerance > offset * def_arc_tolerance ?
|
||||
offset * def_arc_tolerance :
|
||||
arc_tolerance;
|
||||
double steps = std::min(M_PI / std::acos(1. - y / offset), offset * M_PI);
|
||||
return offset * (1. - cos(M_PI / steps));
|
||||
}
|
||||
|
||||
RegionExpansionParameters RegionExpansionParameters::build(
|
||||
// Scaled expansion value
|
||||
float full_expansion,
|
||||
// Expand by waves of expansion_step size (expansion_step is scaled).
|
||||
float expansion_step,
|
||||
// Don't take more than max_nr_steps for small expansion_step.
|
||||
size_t max_nr_expansion_steps)
|
||||
{
|
||||
assert(full_expansion > 0);
|
||||
assert(expansion_step > 0);
|
||||
assert(max_nr_expansion_steps > 0);
|
||||
|
||||
RegionExpansionParameters out;
|
||||
// Initial expansion of src to make the source regions intersect with boundary regions just a bit.
|
||||
// The expansion should not be too tiny, but also small enough, so the following expansion will
|
||||
// compensate for tiny_expansion and bring the wave back to the boundary without producing
|
||||
// ugly cusps where it touches the boundary.
|
||||
out.tiny_expansion = std::min(0.25f * full_expansion, scaled<float>(0.05f));
|
||||
size_t nsteps = size_t(ceil((full_expansion - out.tiny_expansion) / expansion_step));
|
||||
if (max_nr_expansion_steps > 0)
|
||||
nsteps = std::min(nsteps, max_nr_expansion_steps);
|
||||
assert(nsteps > 0);
|
||||
out.initial_step = (full_expansion - out.tiny_expansion) / nsteps;
|
||||
if (nsteps > 1 && 0.25 * out.initial_step < out.tiny_expansion) {
|
||||
// Decrease the step size by lowering number of steps.
|
||||
nsteps = std::max<size_t>(1, (floor((full_expansion - out.tiny_expansion) / (4. * out.tiny_expansion))));
|
||||
out.initial_step = (full_expansion - out.tiny_expansion) / nsteps;
|
||||
}
|
||||
if (0.25 * out.initial_step < out.tiny_expansion || nsteps == 1) {
|
||||
out.tiny_expansion = 0.2f * full_expansion;
|
||||
out.initial_step = 0.8f * full_expansion;
|
||||
}
|
||||
out.other_step = out.initial_step;
|
||||
out.num_other_steps = nsteps - 1;
|
||||
|
||||
// Accuracy of the offsetter for wave propagation.
|
||||
out.arc_tolerance = scaled<double>(0.1);
|
||||
out.shortest_edge_length = out.initial_step * ClipperOffsetShortestEdgeFactor;
|
||||
|
||||
// Maximum inflation of seed contours over the boundary. Used to trim boundary to speed up
|
||||
// clipping during wave propagation. Needs to be in sync with the offsetter accuracy.
|
||||
// Clipper positive round offset should rather offset less than more.
|
||||
// Still a little bit of additional offset was added.
|
||||
out.max_inflation = (out.tiny_expansion + nsteps * out.initial_step) * 1.1;
|
||||
// (clipper_round_offset_error(out.tiny_expansion, co.ArcTolerance) + nsteps * clipper_round_offset_error(out.initial_step, co.ArcTolerance) * 1.5; // Account for uncertainty
|
||||
|
||||
return out;
|
||||
}
|
||||
|
||||
// similar to expolygons_to_zpaths(), but each contour is expanded before converted to zpath.
|
||||
// The expanded contours are then opened (the first point is repeated at the end).
|
||||
static ClipperLib_Z::Paths expolygons_to_zpaths_expanded_opened(
|
||||
const ExPolygons &src, const float expansion, coord_t &base_idx)
|
||||
{
|
||||
ClipperLib_Z::Paths out;
|
||||
out.reserve(2 * std::accumulate(src.begin(), src.end(), size_t(0),
|
||||
[](const size_t acc, const ExPolygon &expoly) { return acc + expoly.num_contours(); }));
|
||||
ClipperLib::ClipperOffset offsetter;
|
||||
offsetter.ShortestEdgeLength = expansion * ClipperOffsetShortestEdgeFactor;
|
||||
ClipperLib::Paths expansion_cache;
|
||||
for (const ExPolygon &expoly : src) {
|
||||
for (size_t icontour = 0; icontour < expoly.num_contours(); ++ icontour) {
|
||||
// Execute reorients the contours so that the outer most contour has a positive area. Thus the output
|
||||
// contours will be CCW oriented even though the input paths are CW oriented.
|
||||
// Offset is applied after contour reorientation, thus the signum of the offset value is reversed.
|
||||
offsetter.Clear();
|
||||
offsetter.AddPath(expoly.contour_or_hole(icontour).points, ClipperLib::jtSquare, ClipperLib::etClosedPolygon);
|
||||
expansion_cache.clear();
|
||||
offsetter.Execute(expansion_cache, icontour == 0 ? expansion : -expansion);
|
||||
append(out, ClipperZUtils::to_zpaths<true>(expansion_cache, base_idx));
|
||||
}
|
||||
++ base_idx;
|
||||
}
|
||||
return out;
|
||||
}
|
||||
|
||||
// Paths were created by splitting closed polygons into open paths and then by clipping them.
|
||||
// Thus some pieces of the clipped polygons may now become split at the ends of the source polygons.
|
||||
// Those ends are sorted lexicographically in "splits".
|
||||
// Reconnect those split pieces.
|
||||
static inline void merge_splits(ClipperLib_Z::Paths &paths, std::vector<std::pair<ClipperLib_Z::IntPoint, int>> &splits)
|
||||
{
|
||||
for (auto it_path = paths.begin(); it_path != paths.end(); ) {
|
||||
ClipperLib_Z::Path &path = *it_path;
|
||||
assert(path.size() >= 2);
|
||||
bool merged = false;
|
||||
if (path.size() >= 2) {
|
||||
const ClipperLib_Z::IntPoint &front = path.front();
|
||||
const ClipperLib_Z::IntPoint &back = path.back();
|
||||
// The path before clipping was supposed to cross the clipping boundary or be fully out of it.
|
||||
// Thus the clipped contour is supposed to become open, with one exception: The anchor expands into a closed hole.
|
||||
if (front.x() != back.x() || front.y() != back.y()) {
|
||||
// Look up the ends in "splits", possibly join the contours.
|
||||
// "splits" maps into the other piece connected to the same end point.
|
||||
auto find_end = [&splits](const ClipperLib_Z::IntPoint &pt) -> std::pair<ClipperLib_Z::IntPoint, int>* {
|
||||
auto it = std::lower_bound(splits.begin(), splits.end(), pt,
|
||||
[](const auto &l, const auto &r){ return ClipperZUtils::zpoint_lower(l.first, r); });
|
||||
return it != splits.end() && it->first == pt ? &(*it) : nullptr;
|
||||
};
|
||||
auto *end = find_end(front);
|
||||
bool end_front = true;
|
||||
if (! end) {
|
||||
end_front = false;
|
||||
end = find_end(back);
|
||||
}
|
||||
if (end) {
|
||||
// This segment ends at a split point of the source closed contour before clipping.
|
||||
if (end->second == -1) {
|
||||
// Open end was found, not matched yet.
|
||||
end->second = int(it_path - paths.begin());
|
||||
} else {
|
||||
// Open end was found and matched with end->second
|
||||
ClipperLib_Z::Path &other_path = paths[end->second];
|
||||
polylines_merge(other_path, other_path.front() == end->first, std::move(path), end_front);
|
||||
if (std::next(it_path) == paths.end()) {
|
||||
paths.pop_back();
|
||||
break;
|
||||
}
|
||||
path = std::move(paths.back());
|
||||
paths.pop_back();
|
||||
merged = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
if (! merged)
|
||||
++ it_path;
|
||||
}
|
||||
}
|
||||
|
||||
using AABBTreeBBoxes = AABBTreeIndirect::Tree<2, coord_t>;
|
||||
|
||||
static AABBTreeBBoxes build_aabb_tree_over_expolygons(const ExPolygons &expolygons)
|
||||
{
|
||||
// Calculate bounding boxes of internal slices.
|
||||
std::vector<AABBTreeIndirect::BoundingBoxWrapper> bboxes;
|
||||
bboxes.reserve(expolygons.size());
|
||||
for (size_t i = 0; i < expolygons.size(); ++ i)
|
||||
bboxes.emplace_back(i, get_extents(expolygons[i].contour));
|
||||
// Build AABB tree over bounding boxes of boundary expolygons.
|
||||
AABBTreeBBoxes out;
|
||||
out.build_modify_input(bboxes);
|
||||
return out;
|
||||
}
|
||||
|
||||
static int sample_in_expolygons(
|
||||
// AABB tree over boundary expolygons
|
||||
const AABBTreeBBoxes &aabb_tree,
|
||||
const ExPolygons &expolygons,
|
||||
const Point &sample)
|
||||
{
|
||||
int out = -1;
|
||||
AABBTreeIndirect::traverse(aabb_tree,
|
||||
[&sample](const AABBTreeBBoxes::Node &node) {
|
||||
return node.bbox.contains(sample);
|
||||
},
|
||||
[&expolygons, &sample, &out](const AABBTreeBBoxes::Node &node) {
|
||||
assert(node.is_leaf());
|
||||
assert(node.is_valid());
|
||||
if (expolygons[node.idx].contains(sample)) {
|
||||
out = int(node.idx);
|
||||
// Stop traversal.
|
||||
return false;
|
||||
}
|
||||
// Continue traversal.
|
||||
return true;
|
||||
});
|
||||
return out;
|
||||
}
|
||||
|
||||
std::vector<WaveSeed> wave_seeds(
|
||||
// Source regions that are supposed to touch the boundary.
|
||||
const ExPolygons &src,
|
||||
// Boundaries of source regions touching the "boundary" regions will be expanded into the "boundary" region.
|
||||
const ExPolygons &boundary,
|
||||
// Initial expansion of src to make the source regions intersect with boundary regions just a bit.
|
||||
float tiny_expansion,
|
||||
// Sort output by boundary ID and source ID.
|
||||
bool sorted)
|
||||
{
|
||||
assert(tiny_expansion > 0);
|
||||
|
||||
if (src.empty() || boundary.empty())
|
||||
return {};
|
||||
|
||||
using Intersection = ClipperZUtils::ClipperZIntersectionVisitor::Intersection;
|
||||
using Intersections = ClipperZUtils::ClipperZIntersectionVisitor::Intersections;
|
||||
|
||||
ClipperLib_Z::Paths segments;
|
||||
Intersections intersections;
|
||||
|
||||
coord_t idx_boundary_begin = 1;
|
||||
coord_t idx_boundary_end = idx_boundary_begin;
|
||||
coord_t idx_src_end;
|
||||
|
||||
{
|
||||
ClipperLib_Z::Clipper zclipper;
|
||||
ClipperZUtils::ClipperZIntersectionVisitor visitor(intersections);
|
||||
zclipper.ZFillFunction(visitor.clipper_callback());
|
||||
// as closed contours
|
||||
zclipper.AddPaths(ClipperZUtils::expolygons_to_zpaths(boundary, idx_boundary_end), ClipperLib_Z::ptClip, true);
|
||||
// as open contours
|
||||
std::vector<std::pair<ClipperLib_Z::IntPoint, int>> zsrc_splits;
|
||||
{
|
||||
idx_src_end = idx_boundary_end;
|
||||
ClipperLib_Z::Paths zsrc = expolygons_to_zpaths_expanded_opened(src, tiny_expansion, idx_src_end);
|
||||
zclipper.AddPaths(zsrc, ClipperLib_Z::ptSubject, false);
|
||||
zsrc_splits.reserve(zsrc.size());
|
||||
for (const ClipperLib_Z::Path &path : zsrc) {
|
||||
assert(path.size() >= 2);
|
||||
assert(path.front() == path.back());
|
||||
zsrc_splits.emplace_back(path.front(), -1);
|
||||
}
|
||||
std::sort(zsrc_splits.begin(), zsrc_splits.end(), [](const auto &l, const auto &r){ return ClipperZUtils::zpoint_lower(l.first, r.first); });
|
||||
}
|
||||
ClipperLib_Z::PolyTree polytree;
|
||||
zclipper.Execute(ClipperLib_Z::ctIntersection, polytree, ClipperLib_Z::pftNonZero, ClipperLib_Z::pftNonZero);
|
||||
ClipperLib_Z::PolyTreeToPaths(std::move(polytree), segments);
|
||||
merge_splits(segments, zsrc_splits);
|
||||
}
|
||||
|
||||
// AABBTree over bounding boxes of boundaries.
|
||||
// Only built if necessary, that is if any of the seed contours is closed, thus there is no intersection point
|
||||
// with the boundary and all Z coordinates of the closed contour point to the source contour.
|
||||
AABBTreeBBoxes aabb_tree;
|
||||
|
||||
// Sort paths into their respective islands.
|
||||
// Each src x boundary will be processed (wave expanded) independently.
|
||||
// Multiple pieces of a single src may intersect the same boundary.
|
||||
WaveSeeds out;
|
||||
out.reserve(segments.size());
|
||||
int iseed = 0;
|
||||
for (const ClipperLib_Z::Path &path : segments) {
|
||||
assert(path.size() >= 2);
|
||||
const ClipperLib_Z::IntPoint &front = path.front();
|
||||
const ClipperLib_Z::IntPoint &back = path.back();
|
||||
// Both ends of a seed segment are supposed to be inside a single boundary expolygon.
|
||||
// Thus as long as the seed contour is not closed, it should be open at a boundary point.
|
||||
assert((front == back && front.z() >= idx_boundary_end && front.z() < idx_src_end) ||
|
||||
//(front.z() < 0 && back.z() < 0));
|
||||
// Hope that at least one end of an open polyline is clipped by the boundary, thus an intersection point is created.
|
||||
(front.z() < 0 || back.z() < 0));
|
||||
const Intersection *intersection = nullptr;
|
||||
auto intersection_point_valid = [idx_boundary_end, idx_src_end](const Intersection &is) {
|
||||
return is.first >= 1 && is.first < idx_boundary_end &&
|
||||
is.second >= idx_boundary_end && is.second < idx_src_end;
|
||||
};
|
||||
if (front.z() < 0) {
|
||||
const Intersection &is = intersections[- front.z() - 1];
|
||||
assert(intersection_point_valid(is));
|
||||
if (intersection_point_valid(is))
|
||||
intersection = &is;
|
||||
}
|
||||
if (! intersection && back.z() < 0) {
|
||||
const Intersection &is = intersections[- back.z() - 1];
|
||||
assert(intersection_point_valid(is));
|
||||
if (intersection_point_valid(is))
|
||||
intersection = &is;
|
||||
}
|
||||
if (intersection) {
|
||||
// The path intersects the boundary contour at least at one side.
|
||||
out.push_back({ uint32_t(intersection->second - idx_boundary_end), uint32_t(intersection->first - 1), ClipperZUtils::from_zpath(path) });
|
||||
} else {
|
||||
// This should be a closed contour.
|
||||
assert(front == back && front.z() >= idx_boundary_end && front.z() < idx_src_end);
|
||||
// Find a source boundary expolygon of one sample of this closed path.
|
||||
if (aabb_tree.empty())
|
||||
aabb_tree = build_aabb_tree_over_expolygons(boundary);
|
||||
int boundary_id = sample_in_expolygons(aabb_tree, boundary, Point(front.x(), front.y()));
|
||||
// Boundary that contains the sample point was found.
|
||||
assert(boundary_id >= 0);
|
||||
if (boundary_id >= 0)
|
||||
out.push_back({ uint32_t(front.z() - idx_boundary_end), uint32_t(boundary_id), ClipperZUtils::from_zpath(path) });
|
||||
}
|
||||
++ iseed;
|
||||
}
|
||||
|
||||
if (sorted)
|
||||
// Sort the seeds by their intersection boundary and source contour.
|
||||
std::sort(out.begin(), out.end(), lower_by_boundary_and_src);
|
||||
return out;
|
||||
}
|
||||
|
||||
static ClipperLib::Paths wavefront_initial(ClipperLib::ClipperOffset &co, const ClipperLib::Paths &polylines, float offset)
|
||||
{
|
||||
ClipperLib::Paths out;
|
||||
out.reserve(polylines.size());
|
||||
ClipperLib::Paths out_this;
|
||||
for (const ClipperLib::Path &path : polylines) {
|
||||
assert(path.size() >= 2);
|
||||
co.Clear();
|
||||
co.AddPath(path, jtRound, path.front() == path.back() ? ClipperLib::etClosedLine : ClipperLib::etOpenRound);
|
||||
co.Execute(out_this, offset);
|
||||
append(out, std::move(out_this));
|
||||
}
|
||||
return out;
|
||||
}
|
||||
|
||||
// Input polygons may consist of multiple expolygons, even nested expolygons.
|
||||
// After inflation some polygons may thus overlap, however the overlap is being resolved during the successive
|
||||
// clipping operation, thus it is not being done here.
|
||||
static ClipperLib::Paths wavefront_step(ClipperLib::ClipperOffset &co, const ClipperLib::Paths &polygons, float offset)
|
||||
{
|
||||
ClipperLib::Paths out;
|
||||
out.reserve(polygons.size());
|
||||
ClipperLib::Paths out_this;
|
||||
for (const ClipperLib::Path &polygon : polygons) {
|
||||
co.Clear();
|
||||
// Execute reorients the contours so that the outer most contour has a positive area. Thus the output
|
||||
// contours will be CCW oriented even though the input paths are CW oriented.
|
||||
// Offset is applied after contour reorientation, thus the signum of the offset value is reversed.
|
||||
co.AddPath(polygon, jtRound, ClipperLib::etClosedPolygon);
|
||||
bool ccw = ClipperLib::Orientation(polygon);
|
||||
co.Execute(out_this, ccw ? offset : - offset);
|
||||
if (! ccw) {
|
||||
// Reverse the resulting contours.
|
||||
for (ClipperLib::Path &path : out_this)
|
||||
std::reverse(path.begin(), path.end());
|
||||
}
|
||||
append(out, std::move(out_this));
|
||||
}
|
||||
return out;
|
||||
}
|
||||
|
||||
static ClipperLib::Paths wavefront_clip(const ClipperLib::Paths &wavefront, const Polygons &clipping)
|
||||
{
|
||||
ClipperLib::Clipper clipper;
|
||||
clipper.AddPaths(wavefront, ClipperLib::ptSubject, true);
|
||||
clipper.AddPaths(ClipperUtils::PolygonsProvider(clipping), ClipperLib::ptClip, true);
|
||||
ClipperLib::Paths out;
|
||||
clipper.Execute(ClipperLib::ctIntersection, out, ClipperLib::pftPositive, ClipperLib::pftPositive);
|
||||
return out;
|
||||
}
|
||||
|
||||
static Polygons propagate_wave_from_boundary(
|
||||
ClipperLib::ClipperOffset &co,
|
||||
// Seed of the wave: Open polylines very close to the boundary.
|
||||
const ClipperLib::Paths &seed,
|
||||
// Boundary inside which the waveform will propagate.
|
||||
const ExPolygon &boundary,
|
||||
// How much to inflate the seed lines to produce the first wave area.
|
||||
const float initial_step,
|
||||
// How much to inflate the first wave area and the successive wave areas in each step.
|
||||
const float other_step,
|
||||
// Number of inflate steps after the initial step.
|
||||
const size_t num_other_steps,
|
||||
// Maximum inflation of seed contours over the boundary. Used to trim boundary to speed up
|
||||
// clipping during wave propagation.
|
||||
const float max_inflation)
|
||||
{
|
||||
assert(! seed.empty() && seed.front().size() >= 2);
|
||||
Polygons clipping = ClipperUtils::clip_clipper_polygons_with_subject_bbox(boundary, get_extents<true>(seed).inflated(max_inflation));
|
||||
ClipperLib::Paths polygons = wavefront_clip(wavefront_initial(co, seed, initial_step), clipping);
|
||||
// Now offset the remaining
|
||||
for (size_t ioffset = 0; ioffset < num_other_steps; ++ ioffset)
|
||||
polygons = wavefront_clip(wavefront_step(co, polygons, other_step), clipping);
|
||||
return to_polygons(polygons);
|
||||
}
|
||||
|
||||
// Resulting regions are sorted by boundary id and source id.
|
||||
std::vector<RegionExpansion> propagate_waves(const WaveSeeds &seeds, const ExPolygons &boundary, const RegionExpansionParameters ¶ms)
|
||||
{
|
||||
std::vector<RegionExpansion> out;
|
||||
ClipperLib::Paths paths;
|
||||
ClipperLib::ClipperOffset co;
|
||||
co.ArcTolerance = params.arc_tolerance;
|
||||
co.ShortestEdgeLength = params.shortest_edge_length;
|
||||
for (auto it_seed = seeds.begin(); it_seed != seeds.end();) {
|
||||
auto it = it_seed;
|
||||
paths.clear();
|
||||
for (; it != seeds.end() && it->boundary == it_seed->boundary && it->src == it_seed->src; ++ it)
|
||||
paths.emplace_back(it->path);
|
||||
// Propagate the wavefront while clipping it with the trimmed boundary.
|
||||
// Collect the expanded polygons, merge them with the source polygons.
|
||||
RegionExpansion re;
|
||||
for (Polygon &polygon : propagate_wave_from_boundary(co, paths, boundary[it_seed->boundary], params.initial_step, params.other_step, params.num_other_steps, params.max_inflation))
|
||||
out.push_back({ std::move(polygon), it_seed->src, it_seed->boundary });
|
||||
it_seed = it;
|
||||
}
|
||||
|
||||
return out;
|
||||
}
|
||||
|
||||
std::vector<RegionExpansion> propagate_waves(const ExPolygons &src, const ExPolygons &boundary, const RegionExpansionParameters ¶ms)
|
||||
{
|
||||
return propagate_waves(wave_seeds(src, boundary, params.tiny_expansion, true), boundary, params);
|
||||
}
|
||||
|
||||
std::vector<RegionExpansion> propagate_waves(const ExPolygons &src, const ExPolygons &boundary,
|
||||
// Scaled expansion value
|
||||
float expansion,
|
||||
// Expand by waves of expansion_step size (expansion_step is scaled).
|
||||
float expansion_step,
|
||||
// Don't take more than max_nr_steps for small expansion_step.
|
||||
size_t max_nr_steps)
|
||||
{
|
||||
return propagate_waves(src, boundary, RegionExpansionParameters::build(expansion, expansion_step, max_nr_steps));
|
||||
}
|
||||
|
||||
// Returns regions per source ExPolygon expanded into boundary.
|
||||
std::vector<RegionExpansionEx> propagate_waves_ex(const WaveSeeds &seeds, const ExPolygons &boundary, const RegionExpansionParameters ¶ms)
|
||||
{
|
||||
std::vector<RegionExpansion> expanded = propagate_waves(seeds, boundary, params);
|
||||
assert(std::is_sorted(seeds.begin(), seeds.end(), [](const auto &l, const auto &r){ return l.boundary < r.boundary || (l.boundary == r.boundary && l.src < r.src); }));
|
||||
Polygons acc;
|
||||
std::vector<RegionExpansionEx> out;
|
||||
for (auto it = expanded.begin(); it != expanded.end(); ) {
|
||||
auto it2 = it;
|
||||
acc.clear();
|
||||
for (; it2 != expanded.end() && it2->boundary_id == it->boundary_id && it2->src_id == it->src_id; ++ it2)
|
||||
acc.emplace_back(std::move(it2->polygon));
|
||||
size_t size = it2 - it;
|
||||
if (size == 1)
|
||||
out.push_back({ ExPolygon{std::move(acc.front())}, it->src_id, it->boundary_id });
|
||||
else {
|
||||
ExPolygons expolys = union_ex(acc);
|
||||
reserve_more_power_of_2(out, expolys.size());
|
||||
for (ExPolygon &ex : expolys)
|
||||
out.push_back({ std::move(ex), it->src_id, it->boundary_id });
|
||||
}
|
||||
it = it2;
|
||||
}
|
||||
return out;
|
||||
}
|
||||
|
||||
// Returns regions per source ExPolygon expanded into boundary.
|
||||
std::vector<RegionExpansionEx> propagate_waves_ex(
|
||||
// Source regions that are supposed to touch the boundary.
|
||||
// Boundaries of source regions touching the "boundary" regions will be expanded into the "boundary" region.
|
||||
const ExPolygons &src,
|
||||
const ExPolygons &boundary,
|
||||
// Scaled expansion value
|
||||
float full_expansion,
|
||||
// Expand by waves of expansion_step size (expansion_step is scaled).
|
||||
float expansion_step,
|
||||
// Don't take more than max_nr_steps for small expansion_step.
|
||||
size_t max_nr_expansion_steps)
|
||||
{
|
||||
auto params = RegionExpansionParameters::build(full_expansion, expansion_step, max_nr_expansion_steps);
|
||||
return propagate_waves_ex(wave_seeds(src, boundary, params.tiny_expansion, true), boundary, params);
|
||||
}
|
||||
|
||||
std::vector<Polygons> expand_expolygons(const ExPolygons &src, const ExPolygons &boundary,
|
||||
// Scaled expansion value
|
||||
float expansion,
|
||||
// Expand by waves of expansion_step size (expansion_step is scaled).
|
||||
float expansion_step,
|
||||
// Don't take more than max_nr_steps for small expansion_step.
|
||||
size_t max_nr_steps)
|
||||
{
|
||||
std::vector<Polygons> out(src.size(), Polygons{});
|
||||
for (RegionExpansion &r : propagate_waves(src, boundary, expansion, expansion_step, max_nr_steps))
|
||||
out[r.src_id].emplace_back(std::move(r.polygon));
|
||||
return out;
|
||||
}
|
||||
|
||||
std::vector<ExPolygon> merge_expansions_into_expolygons(ExPolygons &&src, std::vector<RegionExpansion> &&expanded)
|
||||
{
|
||||
// expanded regions will be merged into source regions, thus they will be re-sorted by source id.
|
||||
std::sort(expanded.begin(), expanded.end(), [](const auto &l, const auto &r) { return l.src_id < r.src_id; });
|
||||
uint32_t last = 0;
|
||||
Polygons acc;
|
||||
ExPolygons out;
|
||||
out.reserve(src.size());
|
||||
for (auto it = expanded.begin(); it != expanded.end();) {
|
||||
for (; last < it->src_id; ++ last)
|
||||
out.emplace_back(std::move(src[last]));
|
||||
acc.clear();
|
||||
assert(it->src_id == last);
|
||||
for (; it != expanded.end() && it->src_id == last; ++ it)
|
||||
acc.emplace_back(std::move(it->polygon));
|
||||
//FIXME offset & merging could be more efficient, for example one does not need to copy the source expolygon
|
||||
ExPolygon &src_ex = src[last ++];
|
||||
assert(! src_ex.contour.empty());
|
||||
#if 0
|
||||
{
|
||||
static int iRun = 0;
|
||||
BoundingBox bbox = get_extents(acc);
|
||||
bbox.merge(get_extents(src_ex));
|
||||
SVG svg(debug_out_path("expand_merge_expolygons-failed-union=%d.svg", iRun ++).c_str(), bbox);
|
||||
svg.draw(acc);
|
||||
svg.draw_outline(acc, "black", scale_(0.05));
|
||||
svg.draw(src_ex, "red");
|
||||
svg.Close();
|
||||
}
|
||||
#endif
|
||||
Point sample = src_ex.contour.front();
|
||||
append(acc, to_polygons(std::move(src_ex)));
|
||||
ExPolygons merged = union_safety_offset_ex(acc);
|
||||
// Expanding one expolygon by waves should not change connectivity of the source expolygon:
|
||||
// Single expolygon should be produced possibly with increased number of holes.
|
||||
if (merged.size() > 1) {
|
||||
// assert(merged.size() == 1);
|
||||
// There is something wrong with the initial waves. Most likely the bridge was not valid at all
|
||||
// or the boundary region was very close to some bridge edge, but not really touching.
|
||||
// Pick only a single merged expolygon, which contains one sample point of the source expolygon.
|
||||
auto aabb_tree = build_aabb_tree_over_expolygons(merged);
|
||||
int id = sample_in_expolygons(aabb_tree, merged, sample);
|
||||
assert(id != -1);
|
||||
if (id != -1)
|
||||
out.emplace_back(std::move(merged[id]));
|
||||
} else if (merged.size() == 1)
|
||||
out.emplace_back(std::move(merged.front()));
|
||||
}
|
||||
for (; last < uint32_t(src.size()); ++ last)
|
||||
out.emplace_back(std::move(src[last]));
|
||||
return out;
|
||||
}
|
||||
|
||||
std::vector<ExPolygon> expand_merge_expolygons(ExPolygons &&src, const ExPolygons &boundary, const RegionExpansionParameters ¶ms)
|
||||
{
|
||||
// expanded regions are sorted by boundary id and source id
|
||||
std::vector<RegionExpansion> expanded = propagate_waves(src, boundary, params);
|
||||
return merge_expansions_into_expolygons(std::move(src), std::move(expanded));
|
||||
}
|
||||
|
||||
} // Algorithm
|
||||
} // Slic3r
|
122
src/libslic3r/RegionExpansion.hpp
Normal file
122
src/libslic3r/RegionExpansion.hpp
Normal file
@ -0,0 +1,122 @@
|
||||
///|/ Copyright (c) Prusa Research 2022 - 2023 Vojtěch Bubník @bubnikv
|
||||
///|/
|
||||
///|/ PrusaSlicer is released under the terms of the AGPLv3 or higher
|
||||
///|/
|
||||
#ifndef SRC_LIBSLIC3R_ALGORITHM_REGION_EXPANSION_HPP_
|
||||
#define SRC_LIBSLIC3R_ALGORITHM_REGION_EXPANSION_HPP_
|
||||
|
||||
#include <cstdint>
|
||||
#include <libslic3r/Point.hpp>
|
||||
#include <libslic3r/Polygon.hpp>
|
||||
#include <libslic3r/ExPolygon.hpp>
|
||||
|
||||
namespace Slic3r {
|
||||
namespace Algorithm {
|
||||
|
||||
struct RegionExpansionParameters
|
||||
{
|
||||
// Initial expansion of src to make the source regions intersect with boundary regions just a bit.
|
||||
float tiny_expansion;
|
||||
// How much to inflate the seed lines to produce the first wave area.
|
||||
float initial_step;
|
||||
// How much to inflate the first wave area and the successive wave areas in each step.
|
||||
float other_step;
|
||||
// Number of inflate steps after the initial step.
|
||||
size_t num_other_steps;
|
||||
// Maximum inflation of seed contours over the boundary. Used to trim boundary to speed up
|
||||
// clipping during wave propagation.
|
||||
float max_inflation;
|
||||
|
||||
// Accuracy of the offsetter for wave propagation.
|
||||
double arc_tolerance;
|
||||
double shortest_edge_length;
|
||||
|
||||
static RegionExpansionParameters build(
|
||||
// Scaled expansion value
|
||||
float full_expansion,
|
||||
// Expand by waves of expansion_step size (expansion_step is scaled).
|
||||
float expansion_step,
|
||||
// Don't take more than max_nr_steps for small expansion_step.
|
||||
size_t max_nr_expansion_steps);
|
||||
};
|
||||
|
||||
struct WaveSeed {
|
||||
uint32_t src;
|
||||
uint32_t boundary;
|
||||
Points path;
|
||||
};
|
||||
using WaveSeeds = std::vector<WaveSeed>;
|
||||
|
||||
inline bool lower_by_boundary_and_src(const WaveSeed &l, const WaveSeed &r)
|
||||
{
|
||||
return l.boundary < r.boundary || (l.boundary == r.boundary && l.src < r.src);
|
||||
}
|
||||
|
||||
inline bool lower_by_src_and_boundary(const WaveSeed &l, const WaveSeed &r)
|
||||
{
|
||||
return l.src < r.src || (l.src == r.src && l.boundary < r.boundary);
|
||||
}
|
||||
|
||||
// Expand src slightly outwards to intersect boundaries, trim the offsetted src polylines by the boundaries.
|
||||
// Return the trimmed paths annotated with their origin (source of the path, index of the boundary region).
|
||||
WaveSeeds wave_seeds(
|
||||
// Source regions that are supposed to touch the boundary.
|
||||
const ExPolygons &src,
|
||||
// Boundaries of source regions touching the "boundary" regions will be expanded into the "boundary" region.
|
||||
const ExPolygons &boundary,
|
||||
// Initial expansion of src to make the source regions intersect with boundary regions just a bit.
|
||||
float tiny_expansion,
|
||||
bool sorted);
|
||||
|
||||
struct RegionExpansion
|
||||
{
|
||||
Polygon polygon;
|
||||
uint32_t src_id;
|
||||
uint32_t boundary_id;
|
||||
};
|
||||
|
||||
std::vector<RegionExpansion> propagate_waves(const WaveSeeds &seeds, const ExPolygons &boundary, const RegionExpansionParameters ¶ms);
|
||||
std::vector<RegionExpansion> propagate_waves(const ExPolygons &src, const ExPolygons &boundary, const RegionExpansionParameters ¶ms);
|
||||
|
||||
std::vector<RegionExpansion> propagate_waves(const ExPolygons &src, const ExPolygons &boundary,
|
||||
// Scaled expansion value
|
||||
float expansion,
|
||||
// Expand by waves of expansion_step size (expansion_step is scaled).
|
||||
float expansion_step,
|
||||
// Don't take more than max_nr_steps for small expansion_step.
|
||||
size_t max_nr_steps);
|
||||
|
||||
struct RegionExpansionEx
|
||||
{
|
||||
ExPolygon expolygon;
|
||||
uint32_t src_id;
|
||||
uint32_t boundary_id;
|
||||
};
|
||||
|
||||
std::vector<RegionExpansionEx> propagate_waves_ex(const WaveSeeds &seeds, const ExPolygons &boundary, const RegionExpansionParameters ¶ms);
|
||||
|
||||
std::vector<RegionExpansionEx> propagate_waves_ex(const ExPolygons &src, const ExPolygons &boundary,
|
||||
// Scaled expansion value
|
||||
float expansion,
|
||||
// Expand by waves of expansion_step size (expansion_step is scaled).
|
||||
float expansion_step,
|
||||
// Don't take more than max_nr_steps for small expansion_step.
|
||||
size_t max_nr_steps);
|
||||
|
||||
std::vector<Polygons> expand_expolygons(const ExPolygons &src, const ExPolygons &boundary,
|
||||
// Scaled expansion value
|
||||
float expansion,
|
||||
// Expand by waves of expansion_step size (expansion_step is scaled).
|
||||
float expansion_step,
|
||||
// Don't take more than max_nr_steps for small expansion_step.
|
||||
size_t max_nr_steps);
|
||||
|
||||
// Merge src with expansions, return the merged expolygons.
|
||||
std::vector<ExPolygon> merge_expansions_into_expolygons(ExPolygons &&src, std::vector<RegionExpansion> &&expanded);
|
||||
|
||||
std::vector<ExPolygon> expand_merge_expolygons(ExPolygons &&src, const ExPolygons &boundary, const RegionExpansionParameters ¶ms);
|
||||
|
||||
} // Algorithm
|
||||
} // Slic3r
|
||||
|
||||
#endif /* SRC_LIBSLIC3R_ALGORITHM_REGION_EXPANSION_HPP_ */
|
@ -51,17 +51,12 @@ SurfacesPtr SurfaceCollection::filter_by_type(const SurfaceType type)
|
||||
return ss;
|
||||
}
|
||||
|
||||
SurfacesPtr SurfaceCollection::filter_by_types(const SurfaceType *types, int ntypes)
|
||||
SurfacesPtr SurfaceCollection::filter_by_types(std::initializer_list<SurfaceType> types)
|
||||
{
|
||||
SurfacesPtr ss;
|
||||
for (Surfaces::iterator surface = this->surfaces.begin(); surface != this->surfaces.end(); ++surface) {
|
||||
for (int i = 0; i < ntypes; ++ i) {
|
||||
if (surface->surface_type == types[i]) {
|
||||
ss.push_back(&*surface);
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
for (Surface& surface : this->surfaces)
|
||||
if (std::find(types.begin(), types.end(), surface.surface_type) != types.end())
|
||||
ss.push_back(&surface);
|
||||
return ss;
|
||||
}
|
||||
|
||||
@ -86,23 +81,15 @@ void SurfaceCollection::keep_type(const SurfaceType type)
|
||||
surfaces.erase(surfaces.begin() + j, surfaces.end());
|
||||
}
|
||||
|
||||
void SurfaceCollection::keep_types(const SurfaceType *types, int ntypes)
|
||||
void SurfaceCollection::keep_types(std::initializer_list<SurfaceType> types)
|
||||
{
|
||||
size_t j = 0;
|
||||
for (size_t i = 0; i < surfaces.size(); ++ i) {
|
||||
bool keep = false;
|
||||
for (int k = 0; k < ntypes; ++ k) {
|
||||
if (surfaces[i].surface_type == types[k]) {
|
||||
keep = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (keep) {
|
||||
for (size_t i = 0; i < surfaces.size(); ++ i)
|
||||
if (std::find(types.begin(), types.end(), surfaces[i].surface_type) != types.end()) {
|
||||
if (j < i)
|
||||
std::swap(surfaces[i], surfaces[j]);
|
||||
++ j;
|
||||
}
|
||||
}
|
||||
if (j < surfaces.size())
|
||||
surfaces.erase(surfaces.begin() + j, surfaces.end());
|
||||
}
|
||||
@ -121,23 +108,15 @@ void SurfaceCollection::remove_type(const SurfaceType type)
|
||||
surfaces.erase(surfaces.begin() + j, surfaces.end());
|
||||
}
|
||||
|
||||
void SurfaceCollection::remove_types(const SurfaceType *types, int ntypes)
|
||||
void SurfaceCollection::remove_types(std::initializer_list<SurfaceType> types)
|
||||
{
|
||||
size_t j = 0;
|
||||
for (size_t i = 0; i < surfaces.size(); ++ i) {
|
||||
bool remove = false;
|
||||
for (int k = 0; k < ntypes; ++ k) {
|
||||
if (surfaces[i].surface_type == types[k]) {
|
||||
remove = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (! remove) {
|
||||
for (size_t i = 0; i < surfaces.size(); ++ i)
|
||||
if (std::find(types.begin(), types.end(), surfaces[i].surface_type) == types.end()) {
|
||||
if (j < i)
|
||||
std::swap(surfaces[i], surfaces[j]);
|
||||
++ j;
|
||||
}
|
||||
}
|
||||
if (j < surfaces.size())
|
||||
surfaces.erase(surfaces.begin() + j, surfaces.end());
|
||||
}
|
||||
|
@ -27,11 +27,11 @@ public:
|
||||
return false;
|
||||
}
|
||||
SurfacesPtr filter_by_type(const SurfaceType type);
|
||||
SurfacesPtr filter_by_types(const SurfaceType *types, int ntypes);
|
||||
SurfacesPtr filter_by_types(std::initializer_list<SurfaceType> types);
|
||||
void keep_type(const SurfaceType type);
|
||||
void keep_types(const SurfaceType *types, int ntypes);
|
||||
void keep_types(std::initializer_list<SurfaceType> types);
|
||||
void remove_type(const SurfaceType type);
|
||||
void remove_types(const SurfaceType *types, int ntypes);
|
||||
void remove_types(std::initializer_list<SurfaceType> type);
|
||||
void filter_by_type(SurfaceType type, Polygons* polygons);
|
||||
void set_type(SurfaceType type) {
|
||||
for (Surface &surface : this->surfaces)
|
||||
|
@ -291,6 +291,16 @@ template<class T> size_t next_highest_power_of_2(T v,
|
||||
return next_highest_power_of_2(uint32_t(v));
|
||||
}
|
||||
|
||||
template<class VectorType> void reserve_more(VectorType &vector, size_t n)
|
||||
{
|
||||
vector.reserve(vector.size() + n);
|
||||
}
|
||||
|
||||
template<class VectorType> void reserve_more_power_of_2(VectorType &vector, size_t n)
|
||||
{
|
||||
vector.reserve(next_highest_power_of_2(vector.size() + n));
|
||||
}
|
||||
|
||||
template<typename INDEX_TYPE>
|
||||
inline INDEX_TYPE prev_idx_modulo(INDEX_TYPE idx, const INDEX_TYPE count)
|
||||
{
|
||||
|
@ -1980,6 +1980,7 @@ void TabPrint::build()
|
||||
optgroup->append_single_option_line("detect_thin_wall");
|
||||
|
||||
optgroup = page->new_optgroup(L("Top/bottom shells"), L"param_shell");
|
||||
optgroup->append_single_option_line("interface_shells");
|
||||
optgroup->append_single_option_line("top_surface_pattern", "fill-patterns#Infill of the top surface and bottom surface");
|
||||
optgroup->append_single_option_line("top_shell_layers");
|
||||
optgroup->append_single_option_line("top_shell_thickness");
|
||||
@ -2002,7 +2003,7 @@ void TabPrint::build()
|
||||
optgroup->append_single_option_line("minimum_sparse_infill_area","parameter/strength-advance-settings");
|
||||
optgroup->append_single_option_line("infill_combination","parameter/strength-advance-settings");
|
||||
optgroup->append_single_option_line("detect_narrow_internal_solid_infill","parameter/strength-advance-settings");
|
||||
optgroup->append_single_option_line("ensure_vertical_shell_thickness","parameter/strength-advance-settings");
|
||||
//optgroup->append_single_option_line("ensure_vertical_shell_thickness","parameter/strength-advance-settings");
|
||||
optgroup->append_single_option_line("internal_bridge_support_thickness","parameter/strength-advance-settings");
|
||||
|
||||
page = add_options_page(L("Speed"), "empty");
|
||||
|
Loading…
x
Reference in New Issue
Block a user