mirror of
https://git.mirrors.martin98.com/https://github.com/prusa3d/PrusaSlicer.git
synced 2025-08-14 12:36:02 +08:00
core implemented, now fixing the issues
This commit is contained in:
parent
f4e44f9750
commit
f8e7d1b01c
@ -16,6 +16,7 @@
|
|||||||
#include "FillLightning.hpp"
|
#include "FillLightning.hpp"
|
||||||
#include "FillConcentric.hpp"
|
#include "FillConcentric.hpp"
|
||||||
#include "FillEnsuring.hpp"
|
#include "FillEnsuring.hpp"
|
||||||
|
#include "Polygon.hpp"
|
||||||
|
|
||||||
namespace Slic3r {
|
namespace Slic3r {
|
||||||
|
|
||||||
@ -649,7 +650,10 @@ Polylines Layer::generate_sparse_infill_polylines_for_anchoring(FillAdaptive::Oc
|
|||||||
|
|
||||||
for (SurfaceFill &surface_fill : surface_fills) {
|
for (SurfaceFill &surface_fill : surface_fills) {
|
||||||
switch (surface_fill.params.pattern) {
|
switch (surface_fill.params.pattern) {
|
||||||
case ipLightning: continue; break;
|
case ipLightning: {
|
||||||
|
auto polylines = to_polylines(shrink_ex(surface_fill.expolygons, 5.0 * surface_fill.params.flow.scaled_spacing()));
|
||||||
|
sparse_infill_polylines.insert(sparse_infill_polylines.end(), polylines.begin(), polylines.end());
|
||||||
|
}; break;
|
||||||
case ipCount: continue; break;
|
case ipCount: continue; break;
|
||||||
case ipSupportBase: continue; break;
|
case ipSupportBase: continue; break;
|
||||||
case ipEnsuring: continue; break;
|
case ipEnsuring: continue; break;
|
||||||
|
@ -16,6 +16,7 @@
|
|||||||
#include "Layer.hpp"
|
#include "Layer.hpp"
|
||||||
#include "MutablePolygon.hpp"
|
#include "MutablePolygon.hpp"
|
||||||
#include "PrintBase.hpp"
|
#include "PrintBase.hpp"
|
||||||
|
#include "PrintConfig.hpp"
|
||||||
#include "SupportMaterial.hpp"
|
#include "SupportMaterial.hpp"
|
||||||
#include "TreeSupport.hpp"
|
#include "TreeSupport.hpp"
|
||||||
#include "Surface.hpp"
|
#include "Surface.hpp"
|
||||||
@ -1648,347 +1649,63 @@ void PrintObject::bridge_over_infill()
|
|||||||
}
|
}
|
||||||
|
|
||||||
tbb::parallel_for(tbb::blocked_range<size_t>(0, layers_to_generate_infill.size()), [po = static_cast<const PrintObject *>(this),
|
tbb::parallel_for(tbb::blocked_range<size_t>(0, layers_to_generate_infill.size()), [po = static_cast<const PrintObject *>(this),
|
||||||
|
&layers_to_generate_infill,
|
||||||
&infill_lines](tbb::blocked_range<size_t> r) {
|
&infill_lines](tbb::blocked_range<size_t> r) {
|
||||||
for (size_t lidx = r.begin(); lidx < r.end(); lidx++) {
|
for (size_t job_idx = r.begin(); job_idx < r.end(); job_idx++) {
|
||||||
|
size_t lidx = layers_to_generate_infill[job_idx];
|
||||||
infill_lines.at(
|
infill_lines.at(
|
||||||
lidx) = po->get_layer(lidx)->generate_sparse_infill_polylines_for_anchoring(po->adaptive_fill_octrees.first.get(),
|
lidx) = po->get_layer(lidx)->generate_sparse_infill_polylines_for_anchoring(po->adaptive_fill_octrees.first.get(),
|
||||||
po->adaptive_fill_octrees.second.get());
|
po->adaptive_fill_octrees.second.get());
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
std::vector<std::pair<size_t, size_t>> jobs;
|
// cluster layers by depth needed for thick bridges. Each cluster is to be processed by single thread sequentially, so that bridges cannot appear one on another
|
||||||
|
std::vector<std::vector<size_t>> clustered_layers_for_threads;
|
||||||
for (auto pair : surfaces_by_layer) {
|
for (auto pair : surfaces_by_layer) {
|
||||||
if (jobs.empty() || jobs.back().second < pair.first) {
|
if (clustered_layers_for_threads.empty() || this->get_layer(clustered_layers_for_threads.back().back())->print_z >
|
||||||
jobs.emplace_back(pair.first, pair.first + 1);
|
this->get_layer(pair.first)->print_z -
|
||||||
|
this->get_layer(pair.first)->regions()[0]->flow(frSolidInfill, true).height() -
|
||||||
|
EPSILON) {
|
||||||
|
clustered_layers_for_threads.push_back({pair.first});
|
||||||
} else {
|
} else {
|
||||||
jobs.back().second = pair.first + 1;
|
clustered_layers_for_threads.back().push_back(pair.first);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
auto gahter_lower_layers_sparse_infill = [](const PrintObject *po, int lidx, float target_flow_height) {
|
// LAMBDA to gather areas with sparse infill deep enough that we can fit thick bridges there.
|
||||||
|
auto gather_areas_w_depth =
|
||||||
|
[](const PrintObject *po, int lidx, float target_flow_height) {
|
||||||
// Gather lower layers sparse infill areas, to depth defined by used bridge flow
|
// Gather lower layers sparse infill areas, to depth defined by used bridge flow
|
||||||
Polygons lower_layers_sparse_infill{};
|
Polygons lower_layers_sparse_infill{};
|
||||||
Polygons special_infill{};
|
|
||||||
Polygons not_sparse_infill{};
|
Polygons not_sparse_infill{};
|
||||||
double bottom_z = po->get_layer(lidx)->print_z - target_flow_height - EPSILON;
|
double bottom_z = po->get_layer(lidx)->print_z - target_flow_height - EPSILON;
|
||||||
for (int i = int(lidx) - 1; i >= 0; --i) {
|
for (int i = int(lidx) - 1; i >= 0; --i) {
|
||||||
// Stop iterating if layer is lower than bottom_z.
|
// Stop iterating if layer is lower than bottom_z.
|
||||||
if (po->get_layer(i)->print_z < bottom_z)
|
const Layer *layer = po->get_layer(i);
|
||||||
|
if (layer->print_z < bottom_z)
|
||||||
break;
|
break;
|
||||||
for (const auto &link : current_links) {
|
|
||||||
const LayerSlice &slice_below = po->get_layer(i)->lslices_ex[link.slice_idx];
|
|
||||||
next_links.insert(next_links.end(), slice_below.overlaps_below.begin(), slice_below.overlaps_below.end());
|
|
||||||
std::unordered_set<const LayerRegion *> regions_under_to_check;
|
|
||||||
for (const LayerIsland &island : slice_below.islands) {
|
|
||||||
regions_under_to_check.insert(po->get_layer(i)->regions()[island.perimeters.region()]);
|
|
||||||
if (!island.fill_expolygons_composite()) {
|
|
||||||
regions_under_to_check.insert(po->get_layer(i)->regions()[island.fill_region_id]);
|
|
||||||
} else {
|
|
||||||
for (const auto &r : po->get_layer(i)->regions()) {
|
|
||||||
regions_under_to_check.insert(r);
|
|
||||||
}
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
for (const LayerRegion *region : regions_under_to_check) {
|
for (const LayerRegion *region : layer->regions()) {
|
||||||
bool has_low_density = region->region().config().fill_density.value < 100;
|
bool has_low_density = region->region().config().fill_density.value < 100;
|
||||||
bool has_special_infill = region_has_special_infill(region);
|
|
||||||
for (const Surface &surface : region->fill_surfaces()) {
|
for (const Surface &surface : region->fill_surfaces()) {
|
||||||
if (surface.surface_type == stInternal && has_low_density && !has_special_infill) {
|
if (surface.surface_type == stInternal && has_low_density) {
|
||||||
Polygons p = to_polygons(surface.expolygon);
|
Polygons p = to_polygons(surface.expolygon);
|
||||||
lower_layers_sparse_infill.insert(lower_layers_sparse_infill.end(), p.begin(), p.end());
|
lower_layers_sparse_infill.insert(lower_layers_sparse_infill.end(), p.begin(), p.end());
|
||||||
} else if (surface.surface_type == stInternal && has_low_density && has_special_infill) {
|
|
||||||
Polygons p = to_polygons(surface.expolygon);
|
|
||||||
special_infill.insert(special_infill.end(), p.begin(), p.end());
|
|
||||||
} else {
|
} else {
|
||||||
Polygons p = to_polygons(surface.expolygon);
|
Polygons p = to_polygons(surface.expolygon);
|
||||||
not_sparse_infill.insert(not_sparse_infill.end(), p.begin(), p.end());
|
not_sparse_infill.insert(not_sparse_infill.end(), p.begin(), p.end());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
lower_layers_sparse_infill = union_(lower_layers_sparse_infill);
|
||||||
current_links = next_links;
|
|
||||||
next_links.clear();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
lower_layers_sparse_infill = intersection(lower_layers_sparse_infill,
|
return diff(lower_layers_sparse_infill, not_sparse_infill);
|
||||||
layer->lslices[int(candidates.first - layer->lslices_ex.data())]);
|
|
||||||
lower_layers_sparse_infill = diff(lower_layers_sparse_infill, not_sparse_infill);
|
|
||||||
special_infill = intersection(special_infill, layer->lslices[int(candidates.first - layer->lslices_ex.data())]);
|
|
||||||
special_infill = diff(special_infill, not_sparse_infill);
|
|
||||||
|
|
||||||
lower_layers_sparse_infill.insert(lower_layers_sparse_infill.end(), special_infill.begin(), special_infill.end());
|
|
||||||
|
|
||||||
if (shrink(lower_layers_sparse_infill, 3.0 * scale_(max_bridge_flow_height[candidates.first])).empty()) {
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
};
|
};
|
||||||
|
|
||||||
tbb::parallel_for(tbb::blocked_range<size_t>(0, jobs.size()), [po = this, &jobs, &surfaces_by_layer](tbb::blocked_range<size_t> r) {
|
// LAMBDA do determine optimal bridging angle
|
||||||
for (size_t job_idx = r.begin(); job_idx < r.end(); job_idx++) {
|
auto determine_bridging_angle = [](const Polygons &bridged_area, const Lines &anchors, InfillPattern dominant_pattern) {
|
||||||
for (size_t lidx = jobs[job_idx].first; lidx < jobs[job_idx].second; lidx++) {
|
AABBTreeLines::LinesDistancer<Line> lines_tree(anchors);
|
||||||
const Layer *layer = po->get_layer(lidx);
|
|
||||||
|
|
||||||
// Presort the candidate polygons. This will help choose the same angle for neighbournig surfaces, that would otherwise
|
|
||||||
// compete over anchoring sparse infill lines, leaving one area unachored
|
|
||||||
std::sort(surfaces_by_layer[lidx].begin(), surfaces_by_layer[lidx].end(), [](const Surface* left, const Surface* right){
|
|
||||||
auto a = get_extents(left->expolygon);
|
|
||||||
auto b = get_extents(right->expolygon);
|
|
||||||
|
|
||||||
if (a.min.x() == b.min.x()) {
|
|
||||||
return a.min.y() < b.min.y();
|
|
||||||
};
|
|
||||||
return a.min.x() < b.min.x();
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
std::unordered_map<const LayerSlice *, std::vector<ModifiedSurface>> bridging_surfaces;
|
|
||||||
|
|
||||||
tbb::parallel_for(tbb::blocked_range<size_t>(0, this->layers().size()), [po = this,
|
|
||||||
&bridging_surfaces](tbb::blocked_range<size_t> r) {
|
|
||||||
for (size_t lidx = r.begin(); lidx < r.end(); lidx++) {
|
|
||||||
const Layer *layer = po->get_layer(lidx);
|
|
||||||
|
|
||||||
// gather also sparse infill surfaces on this layer, to which we can expand the bridges for anchoring
|
|
||||||
// gather potential internal bridging surfaces for the current layer
|
|
||||||
// pair of LayerSlice idx and surfaces. The LayerSlice idx simplifies the processing, since we cannot expand beyond it
|
|
||||||
std::unordered_map<const LayerSlice *, SurfacesPtr> bridging_surface_candidates;
|
|
||||||
std::unordered_map<const LayerSlice *, SurfacesPtr> expansion_space;
|
|
||||||
std::unordered_map<const LayerSlice *, float> max_bridge_flow_height;
|
|
||||||
std::unordered_map<const Surface *, const LayerRegion *> surface_to_region;
|
|
||||||
for (const LayerSlice &slice : layer->lslices_ex) {
|
|
||||||
AABBTreeLines::LinesDistancer<Line> slice_island_tree{to_lines(layer->lslices[int(&slice - layer->lslices_ex.data())])};
|
|
||||||
std::unordered_set<const LayerRegion *> regions_to_check;
|
|
||||||
|
|
||||||
// If there is composite island we have to check all regions on the layer. otherwise, only some regions are needed to be checked
|
|
||||||
for (const LayerIsland &island : slice.islands) {
|
|
||||||
regions_to_check.insert(layer->regions()[island.perimeters.region()]);
|
|
||||||
if (!island.fill_expolygons_composite()) {
|
|
||||||
regions_to_check.insert(layer->regions()[island.fill_region_id]);
|
|
||||||
} else {
|
|
||||||
for (const auto& r : layer->regions()) {
|
|
||||||
regions_to_check.insert(r);
|
|
||||||
}
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
for ( const LayerRegion *region : regions_to_check) {
|
|
||||||
SurfacesPtr region_internal_solids = region->fill_surfaces().filter_by_type(stInternalSolid);
|
|
||||||
|
|
||||||
// filter out surfaces not from this island... TODO sotre this info in the Z-Graph, so that this filtering is not needed
|
|
||||||
// NOTE: we are keeping even very small internal ensuring overhangs here. The aim is to later differentiate between expanding wall ensuring regions
|
|
||||||
// where briding them would be conterproductive, and small ensuring islands that expand into large ones, where bridging is quite necessary
|
|
||||||
region_internal_solids.erase(std::remove_if(region_internal_solids.begin(), region_internal_solids.end(),
|
|
||||||
[slice_island_tree](const Surface *s) {
|
|
||||||
if (slice_island_tree.outside(s->expolygon.contour.first_point()) > 0) {
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
return false;
|
|
||||||
}),
|
|
||||||
region_internal_solids.end());
|
|
||||||
if (!region_internal_solids.empty()) {
|
|
||||||
max_bridge_flow_height[&slice] = std::max(max_bridge_flow_height[&slice],
|
|
||||||
region->bridging_flow(frSolidInfill, true).height());
|
|
||||||
}
|
|
||||||
for (const Surface *s : region_internal_solids) {
|
|
||||||
surface_to_region[s] = region;
|
|
||||||
}
|
|
||||||
bridging_surface_candidates[&slice].insert(bridging_surface_candidates[&slice].end(), region_internal_solids.begin(),
|
|
||||||
region_internal_solids.end());
|
|
||||||
auto region_sparse_infill = region->fill_surfaces().filter_by_type(stInternal);
|
|
||||||
expansion_space[&slice].insert(expansion_space[&slice].end(), region_sparse_infill.begin(), region_sparse_infill.end());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// if there are none briding candidates, exit now, before making infill for the previous layer
|
|
||||||
if (std::all_of(bridging_surface_candidates.begin(), bridging_surface_candidates.end(),
|
|
||||||
[](const std::pair<const LayerSlice *, SurfacesPtr> &candidates) { return candidates.second.empty(); })) {
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
// generate sparse infill polylines from lower layers to get anchorable polylines
|
|
||||||
Polylines lower_layer_polylines = po->get_layer(lidx)->lower_layer
|
|
||||||
? po->get_layer(lidx)->lower_layer->generate_sparse_infill_polylines_for_anchoring()
|
|
||||||
: Polylines();
|
|
||||||
|
|
||||||
for (std::pair<const LayerSlice *, SurfacesPtr> candidates : bridging_surface_candidates) {
|
|
||||||
if (candidates.second.empty()) {
|
|
||||||
continue;
|
|
||||||
};
|
|
||||||
|
|
||||||
auto region_has_special_infill = [](const LayerRegion *layer_region) {
|
|
||||||
switch (layer_region->region().config().fill_pattern.value) {
|
|
||||||
case ipAdaptiveCubic: return true;
|
|
||||||
case ipSupportCubic: return true;
|
|
||||||
case ipLightning: return true;
|
|
||||||
default: return false;
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
// Gather lower layers sparse infill areas, to depth defined by used bridge flow
|
|
||||||
Polygons lower_layers_sparse_infill{};
|
|
||||||
Polygons special_infill{};
|
|
||||||
Polygons not_sparse_infill{};
|
|
||||||
{
|
|
||||||
double bottom_z = layer->print_z - max_bridge_flow_height[candidates.first] - EPSILON;
|
|
||||||
std::vector<LayerSlice::Link> current_links{};
|
|
||||||
current_links.insert(current_links.end(), candidates.first->overlaps_below.begin(),
|
|
||||||
candidates.first->overlaps_below.end());
|
|
||||||
std::vector<LayerSlice::Link> next_links{};
|
|
||||||
for (int i = int(lidx) - 1; i >= 0; --i) {
|
|
||||||
// Stop iterating if layer is lower than bottom_z.
|
|
||||||
if (po->get_layer(i)->print_z < bottom_z)
|
|
||||||
break;
|
|
||||||
for (const auto &link : current_links) {
|
|
||||||
const LayerSlice &slice_below = po->get_layer(i)->lslices_ex[link.slice_idx];
|
|
||||||
next_links.insert(next_links.end(), slice_below.overlaps_below.begin(), slice_below.overlaps_below.end());
|
|
||||||
std::unordered_set<const LayerRegion *> regions_under_to_check;
|
|
||||||
for (const LayerIsland &island : slice_below.islands) {
|
|
||||||
regions_under_to_check.insert(po->get_layer(i)->regions()[island.perimeters.region()]);
|
|
||||||
if (!island.fill_expolygons_composite()) {
|
|
||||||
regions_under_to_check.insert(po->get_layer(i)->regions()[island.fill_region_id]);
|
|
||||||
} else {
|
|
||||||
for (const auto &r : po->get_layer(i)->regions()) {
|
|
||||||
regions_under_to_check.insert(r);
|
|
||||||
}
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
for (const LayerRegion *region : regions_under_to_check) {
|
|
||||||
bool has_low_density = region->region().config().fill_density.value < 100;
|
|
||||||
bool has_special_infill = region_has_special_infill(region);
|
|
||||||
for (const Surface &surface : region->fill_surfaces()) {
|
|
||||||
if (surface.surface_type == stInternal && has_low_density && !has_special_infill) {
|
|
||||||
Polygons p = to_polygons(surface.expolygon);
|
|
||||||
lower_layers_sparse_infill.insert(lower_layers_sparse_infill.end(), p.begin(), p.end());
|
|
||||||
} else if (surface.surface_type == stInternal && has_low_density && has_special_infill) {
|
|
||||||
Polygons p = to_polygons(surface.expolygon);
|
|
||||||
special_infill.insert(special_infill.end(), p.begin(), p.end());
|
|
||||||
} else {
|
|
||||||
Polygons p = to_polygons(surface.expolygon);
|
|
||||||
not_sparse_infill.insert(not_sparse_infill.end(), p.begin(), p.end());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
current_links = next_links;
|
|
||||||
next_links.clear();
|
|
||||||
}
|
|
||||||
|
|
||||||
lower_layers_sparse_infill = intersection(lower_layers_sparse_infill,
|
|
||||||
layer->lslices[int(candidates.first - layer->lslices_ex.data())]);
|
|
||||||
lower_layers_sparse_infill = diff(lower_layers_sparse_infill, not_sparse_infill);
|
|
||||||
special_infill = intersection(special_infill, layer->lslices[int(candidates.first - layer->lslices_ex.data())]);
|
|
||||||
special_infill = diff(special_infill, not_sparse_infill);
|
|
||||||
|
|
||||||
lower_layers_sparse_infill.insert(lower_layers_sparse_infill.end(), special_infill.begin(), special_infill.end());
|
|
||||||
|
|
||||||
if (shrink(lower_layers_sparse_infill, 3.0 * scale_(max_bridge_flow_height[candidates.first])).empty()) {
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (expansion_space[candidates.first].empty() && special_infill.empty()) {
|
|
||||||
// there is no expansion space to which can anchors expand on this island, add back original polygons and skip the island
|
|
||||||
for (const Surface *candidate : candidates.second) {
|
|
||||||
bridging_surfaces[candidates.first].emplace_back(candidate, to_polygons(candidate->expolygon),
|
|
||||||
surface_to_region[candidate], 0);
|
|
||||||
}
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
Polygons expand_area;
|
|
||||||
for (const Surface *sparse_infill : expansion_space[candidates.first]) {
|
|
||||||
assert(sparse_infill->surface_type == stInternal);
|
|
||||||
Polygons a = to_polygons(sparse_infill->expolygon);
|
|
||||||
expand_area.insert(expand_area.end(), a.begin(), a.end());
|
|
||||||
}
|
|
||||||
|
|
||||||
// Presort the candidate polygons. This will help choose the same angle for neighbournig surfaces, that would otherwise
|
|
||||||
// compete over anchoring sparse infill lines, leaving one area unachored
|
|
||||||
std::sort(candidates.second.begin(), candidates.second.end(), [](const Surface* left, const Surface* right){
|
|
||||||
auto a = get_extents(left->expolygon);
|
|
||||||
auto b = get_extents(right->expolygon);
|
|
||||||
|
|
||||||
if (a.min.x() == b.min.x()) {
|
|
||||||
return a.min.y() < b.min.y();
|
|
||||||
};
|
|
||||||
return a.min.x() < b.min.x();
|
|
||||||
});
|
|
||||||
|
|
||||||
std::unordered_map<const LayerRegion *, std::pair<Polygons, Polygons>> infill_and_deep_infill_polygons_per_region;
|
|
||||||
for (const auto &surface_region : surface_to_region) {
|
|
||||||
const LayerRegion *r = surface_region.second;
|
|
||||||
if (infill_and_deep_infill_polygons_per_region.find(r) == infill_and_deep_infill_polygons_per_region.end()) {
|
|
||||||
const Flow &flow = r->bridging_flow(frSolidInfill, true);
|
|
||||||
Polygons infill_region = to_polygons(r->fill_expolygons());
|
|
||||||
Polygons deep_infill_area = closing(infill_region, scale_(0.01), scale_(0.01) + 4.0 * flow.scaled_spacing());
|
|
||||||
Polygons solid_supported_area = expand(not_sparse_infill, 4.0 * flow.scaled_spacing());
|
|
||||||
infill_and_deep_infill_polygons_per_region[r] = {closing(infill_region, float(scale_(0.1))),
|
|
||||||
intersection(lower_layers_sparse_infill,
|
|
||||||
diff(deep_infill_area, solid_supported_area))};
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Lower layers sparse infill sections gathered
|
|
||||||
// now we can intersected them with bridging surface candidates to get actual areas that need and can accumulate
|
|
||||||
// bridging. These areas we then expand (within the surrounding sparse infill only!)
|
|
||||||
// to touch the infill polylines on previous layer.
|
|
||||||
for (const Surface *candidate : candidates.second) {
|
|
||||||
const Flow &flow = surface_to_region[candidate]->bridging_flow(frSolidInfill, true);
|
|
||||||
assert(candidate->surface_type == stInternalSolid);
|
|
||||||
|
|
||||||
Polygons bridged_area = intersection(expand(to_polygons(candidate->expolygon), flow.scaled_spacing()),
|
|
||||||
infill_and_deep_infill_polygons_per_region[surface_to_region[candidate]].first);
|
|
||||||
// cut off parts which are not over sparse infill - material overflow
|
|
||||||
Polygons worth_bridging = intersection(bridged_area,
|
|
||||||
infill_and_deep_infill_polygons_per_region[surface_to_region[candidate]].second);
|
|
||||||
if (worth_bridging.empty()) {
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
bridged_area = intersection(bridged_area, expand(worth_bridging, 5.0 * flow.scaled_spacing()));
|
|
||||||
|
|
||||||
Polygons max_area = expand_area;
|
|
||||||
max_area.insert(max_area.end(), bridged_area.begin(), bridged_area.end());
|
|
||||||
max_area = closing(max_area, flow.scaled_spacing());
|
|
||||||
|
|
||||||
Polylines anchors = intersection_pl(lower_layer_polylines, max_area);
|
|
||||||
if (!special_infill.empty()) {
|
|
||||||
auto part_over_special_infill = intersection(special_infill, bridged_area);
|
|
||||||
auto artificial_boundary = to_polylines(expand(part_over_special_infill, 0.5 * flow.scaled_width()));
|
|
||||||
anchors.insert(anchors.end(), artificial_boundary.begin(), artificial_boundary.end());
|
|
||||||
|
|
||||||
#ifdef DEBUG_BRIDGE_OVER_INFILL
|
|
||||||
debug_draw(std::to_string(lidx) + "special", to_lines(part_over_special_infill), to_lines(artificial_boundary),
|
|
||||||
to_lines(anchors), to_lines(expand_area));
|
|
||||||
#endif
|
|
||||||
}
|
|
||||||
anchors = diff_pl(anchors, bridged_area);
|
|
||||||
|
|
||||||
Lines anchors_and_walls = to_lines(anchors);
|
|
||||||
Lines tmp = to_lines(max_area);
|
|
||||||
anchors_and_walls.insert(anchors_and_walls.end(), tmp.begin(), tmp.end());
|
|
||||||
|
|
||||||
#ifdef DEBUG_BRIDGE_OVER_INFILL
|
|
||||||
debug_draw(std::to_string(lidx) + "candidate", to_lines(candidate->expolygon), to_lines(bridged_area),
|
|
||||||
to_lines(max_area), (anchors_and_walls));
|
|
||||||
#endif
|
|
||||||
|
|
||||||
double bridging_angle = 0;
|
|
||||||
Polygons tmp_expanded_area = expand(bridged_area, 3.0 * flow.scaled_spacing());
|
|
||||||
for (const ModifiedSurface& s : bridging_surfaces[candidates.first]) {
|
|
||||||
if (!intersection(s.new_polys, tmp_expanded_area).empty()) {
|
|
||||||
bridging_angle = s.bridge_angle;
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if (bridging_angle == 0) {
|
|
||||||
AABBTreeLines::LinesDistancer<Line> lines_tree{anchors.empty() ? anchors_and_walls : to_lines(anchors)};
|
|
||||||
|
|
||||||
std::map<double, int> counted_directions;
|
std::map<double, int> counted_directions;
|
||||||
for (const Polygon &p : bridged_area) {
|
for (const Polygon &p : bridged_area) {
|
||||||
@ -2046,17 +1763,22 @@ void PrintObject::bridge_over_infill()
|
|||||||
best_dir = {dir_acc / score_acc, score_acc};
|
best_dir = {dir_acc / score_acc, score_acc};
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
bridging_angle = best_dir.first;
|
double bridging_angle = best_dir.first;
|
||||||
if (bridging_angle == 0) {
|
if (bridging_angle == 0) {
|
||||||
bridging_angle = 0.001;
|
bridging_angle = 0.001;
|
||||||
}
|
}
|
||||||
switch (surface_to_region[candidate]->region().config().fill_pattern.value) {
|
switch (dominant_pattern) {
|
||||||
case ipHilbertCurve: bridging_angle += 0.25 * PI; break;
|
case ipHilbertCurve: bridging_angle += 0.25 * PI; break;
|
||||||
case ipOctagramSpiral: bridging_angle += (1.0 / 16.0) * PI; break;
|
case ipOctagramSpiral: bridging_angle += (1.0 / 16.0) * PI; break;
|
||||||
default: break;
|
default: break;
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
|
return bridging_angle;
|
||||||
|
};
|
||||||
|
|
||||||
|
// LAMBDA that will fill given polygons with lines, exapand the lines to the nearest anchor, and reconstruct polygons from the newly
|
||||||
|
// generated lines
|
||||||
|
auto construct_anchored_polygon = [](Polygons bridged_area, Lines anchors, const Flow &bridging_flow, double bridging_angle) {
|
||||||
auto lines_rotate = [](Lines &lines, double cos_angle, double sin_angle) {
|
auto lines_rotate = [](Lines &lines, double cos_angle, double sin_angle) {
|
||||||
for (Line &l : lines) {
|
for (Line &l : lines) {
|
||||||
double ax = double(l.a.x());
|
double ax = double(l.a.x());
|
||||||
@ -2079,26 +1801,25 @@ void PrintObject::bridge_over_infill()
|
|||||||
double aligning_angle = -bridging_angle + PI * 0.5;
|
double aligning_angle = -bridging_angle + PI * 0.5;
|
||||||
{
|
{
|
||||||
polygons_rotate(bridged_area, aligning_angle);
|
polygons_rotate(bridged_area, aligning_angle);
|
||||||
lines_rotate(anchors_and_walls, cos(aligning_angle), sin(aligning_angle));
|
lines_rotate(anchors, cos(aligning_angle), sin(aligning_angle));
|
||||||
BoundingBox bb_x = get_extents(bridged_area);
|
BoundingBox bb_x = get_extents(bridged_area);
|
||||||
BoundingBox bb_y = get_extents(anchors_and_walls);
|
BoundingBox bb_y = get_extents(anchors);
|
||||||
|
|
||||||
const size_t n_vlines = (bb_x.max.x() - bb_x.min.x() + flow.scaled_spacing() - 1) / flow.scaled_spacing();
|
const size_t n_vlines = (bb_x.max.x() - bb_x.min.x() + bridging_flow.scaled_spacing() - 1) / bridging_flow.scaled_spacing();
|
||||||
std::vector<Line> vertical_lines(n_vlines);
|
std::vector<Line> vertical_lines(n_vlines);
|
||||||
for (size_t i = 0; i < n_vlines; i++) {
|
for (size_t i = 0; i < n_vlines; i++) {
|
||||||
coord_t x = bb_x.min.x() + i * flow.scaled_spacing();
|
coord_t x = bb_x.min.x() + i * bridging_flow.scaled_spacing();
|
||||||
coord_t y_min = bb_y.min.y() - flow.scaled_spacing();
|
coord_t y_min = bb_y.min.y() - bridging_flow.scaled_spacing();
|
||||||
coord_t y_max = bb_y.max.y() + flow.scaled_spacing();
|
coord_t y_max = bb_y.max.y() + bridging_flow.scaled_spacing();
|
||||||
vertical_lines[i].a = Point{x, y_min};
|
vertical_lines[i].a = Point{x, y_min};
|
||||||
vertical_lines[i].b = Point{x, y_max};
|
vertical_lines[i].b = Point{x, y_max};
|
||||||
}
|
}
|
||||||
|
|
||||||
auto anchors_and_walls_tree = AABBTreeLines::LinesDistancer<Line>{std::move(anchors_and_walls)};
|
auto anchors_and_walls_tree = AABBTreeLines::LinesDistancer<Line>{std::move(anchors)};
|
||||||
auto bridged_area_tree = AABBTreeLines::LinesDistancer<Line>{to_lines(bridged_area)};
|
auto bridged_area_tree = AABBTreeLines::LinesDistancer<Line>{to_lines(bridged_area)};
|
||||||
|
|
||||||
#ifdef DEBUG_BRIDGE_OVER_INFILL
|
#ifdef DEBUG_BRIDGE_OVER_INFILL
|
||||||
debug_draw(std::to_string(lidx) + "sliced", to_lines(bridged_area), anchors_and_walls,
|
debug_draw(std::to_string(lidx) + "sliced", to_lines(bridged_area), anchors_and_walls, vertical_lines, {});
|
||||||
vertical_lines, {});
|
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
std::vector<std::vector<Line>> polygon_sections(n_vlines);
|
std::vector<std::vector<Line>> polygon_sections(n_vlines);
|
||||||
@ -2114,24 +1835,22 @@ void PrintObject::bridge_over_infill()
|
|||||||
auto anchors_intersections = anchors_and_walls_tree.intersections_with_line<true>(vertical_lines[i]);
|
auto anchors_intersections = anchors_and_walls_tree.intersections_with_line<true>(vertical_lines[i]);
|
||||||
|
|
||||||
for (Line §ion : polygon_sections[i]) {
|
for (Line §ion : polygon_sections[i]) {
|
||||||
auto maybe_below_anchor = std::upper_bound(anchors_intersections.rbegin(), anchors_intersections.rend(),
|
auto maybe_below_anchor = std::upper_bound(anchors_intersections.rbegin(), anchors_intersections.rend(), section.a,
|
||||||
section.a,
|
|
||||||
[](const Point &a, const std::pair<Point, size_t> &b) {
|
[](const Point &a, const std::pair<Point, size_t> &b) {
|
||||||
return a.y() > b.first.y();
|
return a.y() > b.first.y();
|
||||||
});
|
});
|
||||||
if (maybe_below_anchor != anchors_intersections.rend()) {
|
if (maybe_below_anchor != anchors_intersections.rend()) {
|
||||||
section.a = maybe_below_anchor->first;
|
section.a = maybe_below_anchor->first;
|
||||||
section.a.y() -= flow.scaled_width() * (0.5 + 1.0);
|
section.a.y() -= bridging_flow.scaled_width() * (0.5 + 1.0);
|
||||||
}
|
}
|
||||||
|
|
||||||
auto maybe_upper_anchor = std::upper_bound(anchors_intersections.begin(), anchors_intersections.end(),
|
auto maybe_upper_anchor = std::upper_bound(anchors_intersections.begin(), anchors_intersections.end(), section.b,
|
||||||
section.b,
|
|
||||||
[](const Point &a, const std::pair<Point, size_t> &b) {
|
[](const Point &a, const std::pair<Point, size_t> &b) {
|
||||||
return a.y() < b.first.y();
|
return a.y() < b.first.y();
|
||||||
});
|
});
|
||||||
if (maybe_upper_anchor != anchors_intersections.end()) {
|
if (maybe_upper_anchor != anchors_intersections.end()) {
|
||||||
section.b = maybe_upper_anchor->first;
|
section.b = maybe_upper_anchor->first;
|
||||||
section.b.y() += flow.scaled_width() * (0.5 + 1.0);
|
section.b.y() += bridging_flow.scaled_width() * (0.5 + 1.0);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -2161,10 +1880,8 @@ void PrintObject::bridge_over_infill()
|
|||||||
for (const auto &polygon_slice : polygon_sections) {
|
for (const auto &polygon_slice : polygon_sections) {
|
||||||
std::unordered_set<const Line *> used_segments;
|
std::unordered_set<const Line *> used_segments;
|
||||||
for (TracedPoly &traced_poly : current_traced_polys) {
|
for (TracedPoly &traced_poly : current_traced_polys) {
|
||||||
auto maybe_first_overlap = std::upper_bound(polygon_slice.begin(), polygon_slice.end(),
|
auto maybe_first_overlap = std::upper_bound(polygon_slice.begin(), polygon_slice.end(), traced_poly.lows.back(),
|
||||||
traced_poly.lows.back(), [](const Point &low, const Line &seg) {
|
[](const Point &low, const Line &seg) { return seg.b.y() > low.y(); });
|
||||||
return seg.b.y() > low.y();
|
|
||||||
});
|
|
||||||
|
|
||||||
if (maybe_first_overlap != polygon_slice.end() && // segment exists
|
if (maybe_first_overlap != polygon_slice.end() && // segment exists
|
||||||
segments_overlap(traced_poly.lows.back().y(), traced_poly.highs.back().y(), maybe_first_overlap->a.y(),
|
segments_overlap(traced_poly.lows.back().y(), traced_poly.highs.back().y(), maybe_first_overlap->a.y(),
|
||||||
@ -2173,28 +1890,28 @@ void PrintObject::bridge_over_infill()
|
|||||||
// Overlapping segment. In that case, add it
|
// Overlapping segment. In that case, add it
|
||||||
// to the traced polygon and add segment to used segments
|
// to the traced polygon and add segment to used segments
|
||||||
if ((traced_poly.lows.back() - maybe_first_overlap->a).cast<double>().squaredNorm() <
|
if ((traced_poly.lows.back() - maybe_first_overlap->a).cast<double>().squaredNorm() <
|
||||||
36.0 * double(flow.scaled_spacing()) * flow.scaled_spacing()) {
|
36.0 * double(bridging_flow.scaled_spacing()) * bridging_flow.scaled_spacing()) {
|
||||||
traced_poly.lows.push_back(maybe_first_overlap->a);
|
traced_poly.lows.push_back(maybe_first_overlap->a);
|
||||||
} else {
|
} else {
|
||||||
traced_poly.lows.push_back(traced_poly.lows.back() + Point{flow.scaled_spacing() / 2, 0});
|
traced_poly.lows.push_back(traced_poly.lows.back() + Point{bridging_flow.scaled_spacing() / 2, 0});
|
||||||
traced_poly.lows.push_back(maybe_first_overlap->a - Point{flow.scaled_spacing() / 2, 0});
|
traced_poly.lows.push_back(maybe_first_overlap->a - Point{bridging_flow.scaled_spacing() / 2, 0});
|
||||||
traced_poly.lows.push_back(maybe_first_overlap->a);
|
traced_poly.lows.push_back(maybe_first_overlap->a);
|
||||||
}
|
}
|
||||||
|
|
||||||
if ((traced_poly.highs.back() - maybe_first_overlap->b).cast<double>().squaredNorm() <
|
if ((traced_poly.highs.back() - maybe_first_overlap->b).cast<double>().squaredNorm() <
|
||||||
36.0 * double(flow.scaled_spacing()) * flow.scaled_spacing()) {
|
36.0 * double(bridging_flow.scaled_spacing()) * bridging_flow.scaled_spacing()) {
|
||||||
traced_poly.highs.push_back(maybe_first_overlap->b);
|
traced_poly.highs.push_back(maybe_first_overlap->b);
|
||||||
} else {
|
} else {
|
||||||
traced_poly.highs.push_back(traced_poly.highs.back() + Point{flow.scaled_spacing() / 2, 0});
|
traced_poly.highs.push_back(traced_poly.highs.back() + Point{bridging_flow.scaled_spacing() / 2, 0});
|
||||||
traced_poly.highs.push_back(maybe_first_overlap->b - Point{flow.scaled_spacing() / 2, 0});
|
traced_poly.highs.push_back(maybe_first_overlap->b - Point{bridging_flow.scaled_spacing() / 2, 0});
|
||||||
traced_poly.highs.push_back(maybe_first_overlap->b);
|
traced_poly.highs.push_back(maybe_first_overlap->b);
|
||||||
}
|
}
|
||||||
used_segments.insert(&(*maybe_first_overlap));
|
used_segments.insert(&(*maybe_first_overlap));
|
||||||
} else {
|
} else {
|
||||||
// Zero or multiple overlapping segments. Resolving this is nontrivial,
|
// Zero or multiple overlapping segments. Resolving this is nontrivial,
|
||||||
// so we just close this polygon and maybe open several new. This will hopefully happen much less often
|
// so we just close this polygon and maybe open several new. This will hopefully happen much less often
|
||||||
traced_poly.lows.push_back(traced_poly.lows.back() + Point{flow.scaled_spacing() / 2, 0});
|
traced_poly.lows.push_back(traced_poly.lows.back() + Point{bridging_flow.scaled_spacing() / 2, 0});
|
||||||
traced_poly.highs.push_back(traced_poly.highs.back() + Point{flow.scaled_spacing() / 2, 0});
|
traced_poly.highs.push_back(traced_poly.highs.back() + Point{bridging_flow.scaled_spacing() / 2, 0});
|
||||||
Polygon &new_poly = expanded_bridged_area.emplace_back(std::move(traced_poly.lows));
|
Polygon &new_poly = expanded_bridged_area.emplace_back(std::move(traced_poly.lows));
|
||||||
new_poly.points.insert(new_poly.points.end(), traced_poly.highs.rbegin(), traced_poly.highs.rend());
|
new_poly.points.insert(new_poly.points.end(), traced_poly.highs.rbegin(), traced_poly.highs.rend());
|
||||||
traced_poly.lows.clear();
|
traced_poly.lows.clear();
|
||||||
@ -2209,9 +1926,9 @@ void PrintObject::bridge_over_infill()
|
|||||||
for (const auto &segment : polygon_slice) {
|
for (const auto &segment : polygon_slice) {
|
||||||
if (used_segments.find(&segment) == used_segments.end()) {
|
if (used_segments.find(&segment) == used_segments.end()) {
|
||||||
TracedPoly &new_tp = current_traced_polys.emplace_back();
|
TracedPoly &new_tp = current_traced_polys.emplace_back();
|
||||||
new_tp.lows.push_back(segment.a - Point{flow.scaled_spacing() / 2, 0});
|
new_tp.lows.push_back(segment.a - Point{bridging_flow.scaled_spacing() / 2, 0});
|
||||||
new_tp.lows.push_back(segment.a);
|
new_tp.lows.push_back(segment.a);
|
||||||
new_tp.highs.push_back(segment.b - Point{flow.scaled_spacing() / 2, 0});
|
new_tp.highs.push_back(segment.b - Point{bridging_flow.scaled_spacing() / 2, 0});
|
||||||
new_tp.highs.push_back(segment.b);
|
new_tp.highs.push_back(segment.b);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -2228,23 +1945,115 @@ void PrintObject::bridge_over_infill()
|
|||||||
for (const auto &s : polygon_sections) {
|
for (const auto &s : polygon_sections) {
|
||||||
l.insert(l.end(), s.begin(), s.end());
|
l.insert(l.end(), s.begin(), s.end());
|
||||||
}
|
}
|
||||||
debug_draw(std::to_string(lidx) + "reconstructed", l, anchors_and_walls_tree.get_lines(),
|
debug_draw(std::to_string(lidx) + "reconstructed", l, anchors_and_walls_tree.get_lines(), to_lines(expanded_bridged_area),
|
||||||
to_lines(expanded_bridged_area), bridged_area_tree.get_lines());
|
bridged_area_tree.get_lines());
|
||||||
#endif
|
#endif
|
||||||
}
|
}
|
||||||
|
|
||||||
polygons_rotate(expanded_bridged_area, -aligning_angle);
|
polygons_rotate(expanded_bridged_area, -aligning_angle);
|
||||||
expanded_bridged_area = intersection(expanded_bridged_area, max_area);
|
return expanded_bridged_area;
|
||||||
expanded_bridged_area = opening(expanded_bridged_area, flow.scaled_spacing());
|
};
|
||||||
expand_area = diff(expand_area, expanded_bridged_area);
|
|
||||||
|
|
||||||
bridging_surfaces[candidates.first].emplace_back(candidate, expanded_bridged_area, surface_to_region[candidate],
|
tbb::parallel_for(tbb::blocked_range<size_t>(0, clustered_layers_for_threads.size()), [po = this, &surfaces_by_layer,
|
||||||
bridging_angle);
|
&clustered_layers_for_threads,
|
||||||
#ifdef DEBUG_BRIDGE_OVER_INFILL
|
&gather_areas_w_depth,
|
||||||
debug_draw(std::to_string(lidx) + "cadidate_added", to_lines(expanded_bridged_area), to_lines(bridged_area),
|
&infill_lines,
|
||||||
to_lines(max_area), to_lines(expand_area));
|
&determine_bridging_angle,
|
||||||
#endif
|
&construct_anchored_polygon]
|
||||||
|
(tbb::blocked_range<size_t> r) {
|
||||||
|
for (size_t cluster_idx = r.begin(); cluster_idx < r.end(); cluster_idx++) {
|
||||||
|
for (size_t job_idx = 0; job_idx < clustered_layers_for_threads[cluster_idx].size(); job_idx++) {
|
||||||
|
size_t lidx = clustered_layers_for_threads[cluster_idx][job_idx];
|
||||||
|
const Layer *layer = po->get_layer(lidx);
|
||||||
|
// this thread has exclusive access to all surfaces in layers enumerated in
|
||||||
|
// clustered_layers_for_threads[cluster_idx]
|
||||||
|
|
||||||
|
// Presort the candidate polygons. This will help choose the same angle for neighbournig surfaces, that
|
||||||
|
// would otherwise compete over anchoring sparse infill lines, leaving one area unachored
|
||||||
|
std::sort(surfaces_by_layer[lidx].begin(), surfaces_by_layer[lidx].end(),
|
||||||
|
[](const CandidateSurface &left, const CandidateSurface &right) {
|
||||||
|
auto a = get_extents(left.new_polys);
|
||||||
|
auto b = get_extents(right.new_polys);
|
||||||
|
|
||||||
|
if (a.min.x() == b.min.x()) {
|
||||||
|
return a.min.y() < b.min.y();
|
||||||
|
};
|
||||||
|
return a.min.x() < b.min.x();
|
||||||
|
});
|
||||||
|
|
||||||
|
// Gather deep infill areas, where thick bridges fit
|
||||||
|
coordf_t thick_bridges_depth = surfaces_by_layer[lidx].front().region->flow(frSolidInfill, true).height();
|
||||||
|
Polygons deep_infill_area = gather_areas_w_depth(po, lidx, thick_bridges_depth);
|
||||||
|
|
||||||
|
// Now also remove area that has been already filled on lower layers by bridging expansion - For this
|
||||||
|
// reason we did the clustering of layers per thread.
|
||||||
|
double bottom_z = po->get_layer(lidx)->print_z - thick_bridges_depth - EPSILON;
|
||||||
|
if (job_idx > 0) {
|
||||||
|
for (int lower_job_idx = job_idx; lower_job_idx >= 0; lower_job_idx--) {
|
||||||
|
size_t lower_layer_idx = clustered_layers_for_threads[cluster_idx][lower_job_idx];
|
||||||
|
const Layer *lower_layer = po->get_layer(lower_layer_idx);
|
||||||
|
if (lower_layer->print_z >= bottom_z) {
|
||||||
|
for (const auto &c : surfaces_by_layer[lower_layer_idx]) {
|
||||||
|
deep_infill_area = diff(deep_infill_area, c.new_polys);
|
||||||
}
|
}
|
||||||
|
} else {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Now gather expansion polygons - internal infill on current layer, from which we can cut off anchors
|
||||||
|
Polygons expansion_area;
|
||||||
|
for (const LayerRegion *region : layer->regions()) {
|
||||||
|
auto polys = to_polygons(region->fill_surfaces().filter_by_type(stInternal));
|
||||||
|
expansion_area.insert(expansion_area.end(), polys.begin(), polys.end());
|
||||||
|
}
|
||||||
|
expansion_area = closing(expansion_area, SCALED_EPSILON);
|
||||||
|
expansion_area = intersection(expansion_area, deep_infill_area);
|
||||||
|
Lines anchors = to_lines(intersection_pl(infill_lines[lidx - 1], expansion_area));
|
||||||
|
|
||||||
|
std::vector<CandidateSurface> expanded_surfaces;
|
||||||
|
expanded_surfaces.reserve(surfaces_by_layer[lidx].size());
|
||||||
|
for (const CandidateSurface &candidate : surfaces_by_layer[lidx]) {
|
||||||
|
const Flow &flow = candidate.region->bridging_flow(frSolidInfill, true);
|
||||||
|
Polygons area_to_be_bridged = intersection(candidate.new_polys, deep_infill_area);
|
||||||
|
|
||||||
|
if (area_to_be_bridged.empty())
|
||||||
|
continue;
|
||||||
|
|
||||||
|
Polygons boundary_area = union_(expansion_area, expand(area_to_be_bridged, flow.scaled_spacing()));
|
||||||
|
Lines boundary_lines = to_lines(boundary_area);
|
||||||
|
if (boundary_lines.empty())
|
||||||
|
continue;
|
||||||
|
|
||||||
|
double bridging_angle = 0;
|
||||||
|
Polygons tmp_expanded_area = expand(area_to_be_bridged, 3.0 * flow.scaled_spacing());
|
||||||
|
for (const CandidateSurface &s : expanded_surfaces) {
|
||||||
|
if (!intersection(s.new_polys, tmp_expanded_area).empty()) {
|
||||||
|
bridging_angle = s.bridge_angle;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (bridging_angle == 0) {
|
||||||
|
if (!anchors.empty()) {
|
||||||
|
bridging_angle = determine_bridging_angle(area_to_be_bridged, anchors,
|
||||||
|
candidate.region->region().config().fill_pattern.value);
|
||||||
|
} else {
|
||||||
|
// use expansion boundaries as anchors. However the current area must be removed from such filter.
|
||||||
|
// Also, use Infill pattern that is neutral for angle determination, since there are no infill lines.
|
||||||
|
bridging_angle = determine_bridging_angle(area_to_be_bridged, boundary_lines, InfillPattern::ipLine);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
boundary_lines.insert(boundary_lines.end(), anchors.begin(), anchors.end());
|
||||||
|
Polygons bridged_area = construct_anchored_polygon(area_to_be_bridged, boundary_lines, flow, bridging_angle);
|
||||||
|
bridged_area = intersection(bridged_area, boundary_area);
|
||||||
|
bridged_area = opening(bridged_area, flow.scaled_spacing());
|
||||||
|
expansion_area = diff(expansion_area, bridged_area);
|
||||||
|
|
||||||
|
expanded_surfaces.push_back(CandidateSurface(candidate.original_surface, bridged_area, candidate.region, bridging_angle));
|
||||||
|
}
|
||||||
|
surfaces_by_layer[lidx].swap(expanded_surfaces);
|
||||||
|
expanded_surfaces.clear();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
@ -2252,65 +2061,50 @@ void PrintObject::bridge_over_infill()
|
|||||||
BOOST_LOG_TRIVIAL(info) << "Bridge over infill - Directions and expanded surfaces computed" << log_memory_info();
|
BOOST_LOG_TRIVIAL(info) << "Bridge over infill - Directions and expanded surfaces computed" << log_memory_info();
|
||||||
|
|
||||||
tbb::parallel_for(tbb::blocked_range<size_t>(0, this->layers().size()), [po = this,
|
tbb::parallel_for(tbb::blocked_range<size_t>(0, this->layers().size()), [po = this,
|
||||||
&bridging_surfaces](tbb::blocked_range<size_t> r) {
|
&surfaces_by_layer](tbb::blocked_range<size_t> r) {
|
||||||
for (size_t lidx = r.begin(); lidx < r.end(); lidx++) {
|
for (size_t lidx = r.begin(); lidx < r.end(); lidx++) {
|
||||||
|
if (surfaces_by_layer.find(lidx) == surfaces_by_layer.end())
|
||||||
|
continue;
|
||||||
Layer *layer = po->get_layer(lidx);
|
Layer *layer = po->get_layer(lidx);
|
||||||
std::unordered_map<const LayerRegion*, Surfaces> new_surfaces;
|
|
||||||
|
|
||||||
for (const LayerSlice &slice : layer->lslices_ex) {
|
|
||||||
if (const auto &modified_surfaces = bridging_surfaces.find(&slice);
|
|
||||||
modified_surfaces != bridging_surfaces.end()) {
|
|
||||||
std::unordered_set<LayerRegion *> regions_to_check;
|
|
||||||
for (const LayerIsland &island : slice.islands) {
|
|
||||||
regions_to_check.insert(layer->regions()[island.perimeters.region()]);
|
|
||||||
if (!island.fill_expolygons_composite()) {
|
|
||||||
regions_to_check.insert(layer->regions()[island.fill_region_id]);
|
|
||||||
} else {
|
|
||||||
for (LayerRegion *r : layer->regions()) {
|
|
||||||
regions_to_check.insert(r);
|
|
||||||
}
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
Polygons cut_from_infill{};
|
Polygons cut_from_infill{};
|
||||||
for (const auto &surface : modified_surfaces->second) {
|
for (const auto &surface : surfaces_by_layer.at(lidx)) {
|
||||||
cut_from_infill.insert(cut_from_infill.end(), surface.new_polys.begin(), surface.new_polys.end());
|
cut_from_infill.insert(cut_from_infill.end(), surface.new_polys.begin(), surface.new_polys.end());
|
||||||
}
|
}
|
||||||
|
|
||||||
for (const LayerRegion *region : regions_to_check) {
|
for (LayerRegion *region : layer->regions()) {
|
||||||
for (const ModifiedSurface &s : modified_surfaces->second) {
|
Surfaces new_surfaces;
|
||||||
for (const Surface &surface : region->m_fill_surfaces.surfaces) {
|
|
||||||
if (s.original_surface == &surface) {
|
for (const CandidateSurface &cs : surfaces_by_layer.at(lidx)) {
|
||||||
|
for (Surface &surface : region->m_fill_surfaces.surfaces) {
|
||||||
|
if (cs.original_surface == &surface) {
|
||||||
Surface tmp(surface, {});
|
Surface tmp(surface, {});
|
||||||
for (const ExPolygon &expoly : diff_ex(surface.expolygon, s.new_polys)) {
|
for (const ExPolygon &expoly : diff_ex(surface.expolygon, cs.new_polys)) {
|
||||||
if (expoly.area() > region->flow(frSolidInfill).scaled_width() * scale_(4.0)) {
|
if (expoly.area() > region->flow(frSolidInfill).scaled_width() * scale_(4.0)) {
|
||||||
new_surfaces[region].emplace_back(tmp, expoly);
|
new_surfaces.emplace_back(tmp, expoly);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
tmp.surface_type = stInternalBridge;
|
tmp.surface_type = stInternalBridge;
|
||||||
tmp.bridge_angle = s.bridge_angle;
|
tmp.bridge_angle = cs.bridge_angle;
|
||||||
for (const ExPolygon &expoly : union_ex(s.new_polys)) {
|
for (const ExPolygon &expoly : union_ex(cs.new_polys)) {
|
||||||
new_surfaces[region].emplace_back(tmp, expoly);
|
new_surfaces.emplace_back(tmp, expoly);
|
||||||
}
|
}
|
||||||
|
surface.clear();
|
||||||
} else if (surface.surface_type == stInternal) {
|
} else if (surface.surface_type == stInternal) {
|
||||||
Surface tmp(surface, {});
|
Surface tmp(surface, {});
|
||||||
for (const ExPolygon &expoly : diff_ex(surface.expolygon, cut_from_infill)) {
|
for (const ExPolygon &expoly : diff_ex(surface.expolygon, cut_from_infill)) {
|
||||||
new_surfaces[region].emplace_back(tmp, expoly);
|
new_surfaces.emplace_back(tmp, expoly);
|
||||||
}
|
}
|
||||||
} else {
|
surface.clear();
|
||||||
new_surfaces[region].push_back(surface);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
region->m_fill_surfaces.surfaces.insert(region->m_fill_surfaces.surfaces.end(), new_surfaces.begin(),
|
||||||
}
|
new_surfaces.end());
|
||||||
}
|
region->m_fill_surfaces.surfaces.erase(std::remove_if(region->m_fill_surfaces.surfaces.begin(),
|
||||||
|
region->m_fill_surfaces.surfaces.end(),
|
||||||
for (LayerRegion *region : layer->regions()) {
|
[](const Surface &s) { return s.empty(); }),
|
||||||
if (new_surfaces.find(region) != new_surfaces.end()) {
|
region->m_fill_surfaces.surfaces.end());
|
||||||
region->m_fill_surfaces = new_surfaces[region];
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
Loading…
x
Reference in New Issue
Block a user