mirror of
https://git.mirrors.martin98.com/https://github.com/google/draco
synced 2025-04-19 12:19:53 +08:00
Split syntax table sections into files.
Also add .gitignore
This commit is contained in:
parent
c0dcf7c2bc
commit
c2fb47ac5d
1
.gitignore
vendored
Normal file
1
.gitignore
vendored
Normal file
@ -0,0 +1 @@
|
||||
docs/_site
|
1
.ruby-version
Normal file
1
.ruby-version
Normal file
@ -0,0 +1 @@
|
||||
2.3.0
|
@ -30,5 +30,6 @@ gems:
|
||||
exclude:
|
||||
- Gemfile
|
||||
- Gemfile.lock
|
||||
- docs/_site
|
||||
sass:
|
||||
style: compressed
|
||||
|
@ -1,2 +1,2 @@
|
||||
|
||||
<p><em>Last modified: 2017-07-09 18:30:06 -0700</em></p>
|
||||
<p><em>Last modified: 2017-07-10 14:30:06 -0700</em></p>
|
||||
|
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@ -32,7 +32,7 @@
|
||||
[author]<br />
|
||||
[author]</em></p>
|
||||
|
||||
<p><em>Last modified: 2017-07-09 18:30:06 -0700</em></p>
|
||||
<p><em>Last modified: 2017-07-10 14:30:06 -0700</em></p>
|
||||
|
||||
<h2 class="no_toc nocount" id="abstract">Abstract</h2>
|
||||
|
||||
|
20
docs/spec/attributes.decoder.md
Normal file
20
docs/spec/attributes.decoder.md
Normal file
@ -0,0 +1,20 @@
|
||||
|
||||
## Attributes Decoder
|
||||
|
||||
### DecodeAttributesDecoderData()
|
||||
|
||||
~~~~~
|
||||
DecodeAttributesDecoderData(buffer) {
|
||||
num_attributes I32
|
||||
point_attribute_ids_.resize(num_attributes);
|
||||
for (i = 0; i < num_attributes; ++i) {
|
||||
att_type UI8
|
||||
data_type UI8
|
||||
components_count UI8
|
||||
normalized UI8
|
||||
custom_id UI16
|
||||
Initialize GeometryAttribute ga
|
||||
att_id = pc->AddAttribute(new PointAttribute(ga));
|
||||
point_attribute_ids_[i] = att_id;
|
||||
}
|
||||
~~~~~
|
61
docs/spec/core.functions.md
Normal file
61
docs/spec/core.functions.md
Normal file
@ -0,0 +1,61 @@
|
||||
|
||||
## Core Functions
|
||||
|
||||
### DecodeVarint<IT>
|
||||
|
||||
~~~~~
|
||||
DecodeVarint<IT>() {
|
||||
If (std::is_unsigned<IT>::value) {
|
||||
in UI8
|
||||
If (in & (1 << 7)) {
|
||||
out = DecodeVarint<IT>()
|
||||
out = (out << 7) | (in & ((1 << 7) - 1))
|
||||
} else {
|
||||
typename std::make_unsigned<IT>::type UIT;
|
||||
out = DecodeVarint<UIT>()
|
||||
out = ConvertSymbolToSignedInt(out)
|
||||
}
|
||||
return out;
|
||||
}
|
||||
~~~~~
|
||||
|
||||
|
||||
### ConvertSymbolToSignedInt()
|
||||
|
||||
~~~~~
|
||||
ConvertSymbolToSignedInt() {
|
||||
abs_val = val >> 1
|
||||
If (val & 1 == 0) {
|
||||
return abs_val
|
||||
} else {
|
||||
signed_val = -abs_val - 1
|
||||
}
|
||||
return signed_val
|
||||
}
|
||||
~~~~~
|
||||
|
||||
|
||||
Sequential Decoder
|
||||
|
||||
### decode_connectivity()
|
||||
|
||||
~~~~~
|
||||
decode_connectivity() {
|
||||
num_faces I32
|
||||
num_points I32
|
||||
connectivity _method UI8
|
||||
If (connectivity _method == 0) {
|
||||
// TODO
|
||||
} else {
|
||||
loop num_faces {
|
||||
If (num_points < 256) {
|
||||
face[] UI8
|
||||
} else if (num_points < (1 << 16)) {
|
||||
face[] UI16
|
||||
} else {
|
||||
face[] UI32
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
~~~~~
|
197
docs/spec/corner.table.md
Normal file
197
docs/spec/corner.table.md
Normal file
@ -0,0 +1,197 @@
|
||||
|
||||
## Corner Table
|
||||
|
||||
### Opposite()
|
||||
|
||||
~~~~~
|
||||
Opposite(corner) {
|
||||
return opposite_corners_[corner];
|
||||
}
|
||||
~~~~~
|
||||
|
||||
|
||||
### Next()
|
||||
|
||||
~~~~~
|
||||
Next(corner) {
|
||||
return LocalIndex(++corner) ? corner : corner - 3;
|
||||
}
|
||||
~~~~~
|
||||
|
||||
|
||||
### Previous()
|
||||
|
||||
~~~~~
|
||||
Previous(corner) {
|
||||
return LocalIndex(corner) ? corner - 1 : corner + 2;
|
||||
}
|
||||
~~~~~
|
||||
|
||||
|
||||
### Vertex()
|
||||
|
||||
~~~~~
|
||||
Vertex(corner) {
|
||||
faces_[Face(corner)][LocalIndex(corner)];
|
||||
}
|
||||
~~~~~
|
||||
|
||||
|
||||
### Face()
|
||||
|
||||
~~~~~
|
||||
Face(corner) {
|
||||
return corner / 3;
|
||||
}
|
||||
~~~~~
|
||||
|
||||
|
||||
### LocalIndex()
|
||||
|
||||
~~~~~
|
||||
LocalIndex(corner) {
|
||||
return corner % 3;
|
||||
}
|
||||
~~~~~
|
||||
|
||||
|
||||
### num_vertices()
|
||||
|
||||
~~~~~
|
||||
num_vertices() {
|
||||
return vertex_corners_.size();
|
||||
}
|
||||
~~~~~
|
||||
|
||||
|
||||
### num_corners()
|
||||
|
||||
~~~~~
|
||||
num_corners() {
|
||||
return faces_.size() * 3;
|
||||
}
|
||||
~~~~~
|
||||
|
||||
|
||||
### num_faces()
|
||||
|
||||
~~~~~
|
||||
num_faces() {
|
||||
return faces_.size();
|
||||
}
|
||||
~~~~~
|
||||
|
||||
|
||||
### bool IsOnBoundary()
|
||||
|
||||
~~~~~
|
||||
bool IsOnBoundary(vert) {
|
||||
corner = LeftMostCorner(vert);
|
||||
if (SwingLeft(corner) < 0)
|
||||
return true;
|
||||
return false;
|
||||
}
|
||||
~~~~~
|
||||
|
||||
|
||||
|
||||
### SwingRight()
|
||||
|
||||
~~~~~
|
||||
SwingRight(corner) {
|
||||
return Previous(Opposite(Previous(corner)));
|
||||
}
|
||||
~~~~~
|
||||
|
||||
|
||||
### SwingLeft()
|
||||
|
||||
~~~~~
|
||||
SwingLeft(corner) {
|
||||
return Next(Opposite(Next(corner)));
|
||||
}
|
||||
~~~~~
|
||||
|
||||
|
||||
### GetLeftCorner()
|
||||
|
||||
~~~~~
|
||||
GetLeftCorner(corner_id) {
|
||||
if (corner_id < 0)
|
||||
return kInvalidCornerIndex;
|
||||
return Opposite(Previous(corner_id));
|
||||
}
|
||||
~~~~~
|
||||
|
||||
|
||||
### GetRightCorner()
|
||||
|
||||
~~~~~
|
||||
GetRightCorner(corner_id) {
|
||||
if (corner_id < 0)
|
||||
return kInvalidCornerIndex;
|
||||
return Opposite(Next(corner_id));
|
||||
}
|
||||
~~~~~
|
||||
|
||||
|
||||
### SetOppositeCorner()
|
||||
|
||||
~~~~~
|
||||
SetOppositeCorner(corner_id, pp_corner_id) {
|
||||
opposite_corners_[corner_id] = opp_corner_id;
|
||||
}
|
||||
~~~~~
|
||||
|
||||
|
||||
|
||||
### MapCornerToVertex()
|
||||
|
||||
~~~~~
|
||||
MapCornerToVertex(corner_id, vert_id) {
|
||||
face = Face(corner_id);
|
||||
faces_[face][LocalIndex(corner_id)] = vert_id;
|
||||
if (vert_id >= 0) {
|
||||
vertex_corners_[vert_id] = corner_id;
|
||||
}
|
||||
}
|
||||
~~~~~
|
||||
|
||||
|
||||
### UpdateVertexToCornerMap()
|
||||
|
||||
~~~~~
|
||||
UpdateVertexToCornerMap(vert) {
|
||||
first_c = vertex_corners_[vert];
|
||||
if (first_c < 0)
|
||||
return;
|
||||
act_c = SwingLeft(first_c);
|
||||
c = first_c;
|
||||
while (act_c >= 0 && act_c != first_c) {
|
||||
c = act_c;
|
||||
act_c = SwingLeft(act_c);
|
||||
}
|
||||
if (act_c != first_c) {
|
||||
vertex_corners_[vert] = c;
|
||||
}
|
||||
}
|
||||
~~~~~
|
||||
|
||||
|
||||
|
||||
### LeftMostCorner()
|
||||
|
||||
~~~~~
|
||||
LeftMostCorner(v) {
|
||||
return vertex_corners_[v];
|
||||
}
|
||||
~~~~~
|
||||
|
||||
|
||||
### MakeVertexIsolated()
|
||||
|
||||
~~~~~
|
||||
MakeVertexIsolated(vert) {
|
||||
vertex_corners_[vert] = kInvalidCornerIndex;
|
||||
}
|
||||
~~~~~
|
40
docs/spec/cornertable.traversal.processor.md
Normal file
40
docs/spec/cornertable.traversal.processor.md
Normal file
@ -0,0 +1,40 @@
|
||||
|
||||
## CornerTable Traversal Processor
|
||||
|
||||
|
||||
### IsFaceVisited()
|
||||
|
||||
~~~~~
|
||||
IsFaceVisited(corner_id) {
|
||||
if (corner_id < 0)
|
||||
return true
|
||||
return is_face_visited_[corner_id / 3];
|
||||
}
|
||||
~~~~~
|
||||
|
||||
|
||||
### MarkFaceVisited()
|
||||
|
||||
~~~~~
|
||||
MarkFaceVisited(face_id) {
|
||||
is_face_visited_[face_id] = true;
|
||||
}
|
||||
~~~~~
|
||||
|
||||
|
||||
### IsVertexVisited()
|
||||
|
||||
~~~~~
|
||||
IsVertexVisited(vert_id) {
|
||||
return is_vertex_visited_[vert_id];
|
||||
}
|
||||
~~~~~
|
||||
|
||||
|
||||
### MarkVertexVisited()
|
||||
|
||||
~~~~~
|
||||
MarkVertexVisited(vert_id) {
|
||||
is_vertex_visited_[vert_id] = true;
|
||||
}
|
||||
~~~~~
|
File diff suppressed because it is too large
Load Diff
67
docs/spec/edgebreaker.hole.and.topology.md
Normal file
67
docs/spec/edgebreaker.hole.and.topology.md
Normal file
@ -0,0 +1,67 @@
|
||||
|
||||
## EdgeBreaker Hole and Topology Split Events
|
||||
|
||||
### DecodeHoleAndTopologySplitEvents()
|
||||
|
||||
FIXME: Escaping angle brackets
|
||||
|
||||
<div class="syntax">
|
||||
DecodeHoleAndTopologySplitEvents() { <b>Type</b>
|
||||
<b>num_topologoy_splits</b> UI32
|
||||
source_symbol_id = 0
|
||||
for (i = 0; i < num_topologoy_splits; ++i) {
|
||||
DecodeVarint\<UI32\>(&delta)
|
||||
split_data[i].source_symbol_id = delta + source_symbol_id
|
||||
DecodeVarint\<UI32\>(&delta)
|
||||
split_data[i].split_symbol_id = source_symbol_id - delta
|
||||
}
|
||||
for (i = 0; i < num_topologoy_splits; ++i) {
|
||||
<b>split_data[i].split_edge</b> bits1
|
||||
<b>split_data[i].source_edge</b> bits1
|
||||
}
|
||||
<b>num_hole_events</b> UI32
|
||||
symbol_id = 0
|
||||
for (i = 0; i < num_hole_events; ++i) {
|
||||
DecodeVarint\<UI32\>(&delta)
|
||||
hole_data[i].symbol_id = delta + symbol_id
|
||||
}
|
||||
return bytes_decoded;
|
||||
}
|
||||
|
||||
</div>
|
||||
|
||||
### CreateAttributesDecoder
|
||||
|
||||
FIXME: Escaping angle brackets
|
||||
|
||||
<div class="syntax">
|
||||
CreateAttributesDecoder() { <b>Type</b>
|
||||
<b>att_data_id</b> I8
|
||||
<b>decoder_type</b> UI8
|
||||
if (att_data_id >= 0) {
|
||||
attribute_data_[att_data_id].decoder_id = att_decoder_id;
|
||||
}
|
||||
<b>traversal_method_encoded</b> UI8
|
||||
if (decoder_type == MESH_VERTEX_ATTRIBUTE) {
|
||||
if (att_data_id < 0) {
|
||||
encoding_data = &pos_encoding_data_;
|
||||
} else {
|
||||
encoding_data = &attribute_data_[att_data_id].encoding_data;
|
||||
attribute_data_[att_data_id].is_connectivity_used = false;
|
||||
}
|
||||
if (traversal_method == MESH_TRAVERSAL_DEPTH_FIRST) {
|
||||
typedef EdgeBreakerTraverser\<AttProcessor, AttObserver\> AttTraverser;
|
||||
sequencer = CreateVertexTraversalSequencer\<AttTraverser\>(encoding_data);
|
||||
} else if (traversal_method == MESH_TRAVERSAL_PREDICTION_DEGREE) {
|
||||
typedef PredictionDegreeTraverser\<AttProcessor, AttObserver\> AttTraverser;
|
||||
sequencer = CreateVertexTraversalSequencer\<AttTraverser\>(encoding_data);
|
||||
}
|
||||
} else {
|
||||
// TODO
|
||||
}
|
||||
att_controller(new SequentialAttributeDecodersController(std::move(sequencer)))
|
||||
decoder_->SetAttributesDecoder(att_decoder_id, std::move(att_controller));
|
||||
}
|
||||
|
||||
</div>
|
||||
|
44
docs/spec/edgebreaker.traversal.decoder.md
Normal file
44
docs/spec/edgebreaker.traversal.decoder.md
Normal file
@ -0,0 +1,44 @@
|
||||
|
||||
## Edgebreaker Traversal Decoder
|
||||
|
||||
### EdgebreakerTraversal_Start()
|
||||
|
||||
<div class="syntax">
|
||||
EdgebreakerTraversal_Start() { <b>Type</b>
|
||||
<b>size</b> UI64
|
||||
<b>symbol_buffer_</b> size * UI8
|
||||
<b>size</b> UI64
|
||||
<b>start_face_buffer_</b> size * UI8
|
||||
if (num_attribute_data_ > 0) {
|
||||
attribute_connectivity_decoders_ = std::unique_ptr<BinaryDecoder[]>(
|
||||
new BinaryDecoder[num_attribute_data_]);
|
||||
for (i = 0; i < num_attribute_data_; ++i) {
|
||||
attribute_connectivity_decoders_[i].StartDecoding()
|
||||
// RansBitDecoder_StartDecoding
|
||||
}
|
||||
}
|
||||
|
||||
</div>
|
||||
|
||||
|
||||
### Traversal_DecodeSymbol()
|
||||
|
||||
~~~~~
|
||||
Traversal_DecodeSymbol() {
|
||||
symbol_buffer_.DecodeLeastSignificantBits32(1, &symbol); bits1
|
||||
if (symbol != TOPOLOGY_C) {
|
||||
symbol_buffer_.DecodeLeastSignificantBits32(2, &symbol_suffix); bits2
|
||||
symbol |= (symbol_suffix << 1);
|
||||
}
|
||||
return symbol
|
||||
}
|
||||
~~~~~
|
||||
|
||||
|
||||
### DecodeAttributeSeam()
|
||||
|
||||
~~~~~
|
||||
DecodeAttributeSeam(int attribute) {
|
||||
return attribute_connectivity_decoders_[attribute].DecodeNextBit();
|
||||
}
|
||||
~~~~~
|
88
docs/spec/edgebreaker.traversal.valence.decoder.md
Normal file
88
docs/spec/edgebreaker.traversal.valence.decoder.md
Normal file
@ -0,0 +1,88 @@
|
||||
|
||||
## EdgeBreaker Traversal Valence Decoder
|
||||
|
||||
### EdgeBreakerTraversalValence_Start()
|
||||
|
||||
~~~~~
|
||||
EdgeBreakerTraversalValence_Start(num_vertices, num_attribute_data) {
|
||||
out_buffer = EdgebreakerTraversal_Start()
|
||||
num_split_symbols I32
|
||||
mode == 0 I8
|
||||
num_vertices_ += num_split_symbols
|
||||
vertex_valences_ init to 0
|
||||
vertex_valences_.resize(num_vertices_, 0);
|
||||
min_valence_ = 2;
|
||||
max_valence_ = 7;
|
||||
num_unique_valences = 6 (max_valence_ - min_valence_ + 1)
|
||||
for (i = 0; i < num_unique_valences; ++i) {
|
||||
DecodeVarint<UI32>(&num_symbols, out_buffer)
|
||||
If (num_symbols > 0) {
|
||||
DecodeSymbols(num_symbols, out_buffer, &context_symbols_[i])
|
||||
}
|
||||
context_counters_[i] = num_symbols
|
||||
}
|
||||
return out_buffer;
|
||||
}
|
||||
~~~~~
|
||||
|
||||
|
||||
|
||||
### TraversalValence_DecodeSymbol()
|
||||
|
||||
~~~~~
|
||||
TraversalValence_DecodeSymbol() {
|
||||
if (active_context_ != -1) {
|
||||
symbol_id = context_symbols_[active_context_]
|
||||
[--context_counters_[active_context_]]
|
||||
last_symbol_ = edge_breaker_symbol_to_topology_id[symbol_id]
|
||||
} else {
|
||||
last_symbol_ = Traversal_DecodeSymbol()
|
||||
}
|
||||
return last_symbol_
|
||||
}
|
||||
~~~~~
|
||||
|
||||
|
||||
|
||||
### TraversalValence_NewActiveCornerReached()
|
||||
|
||||
~~~~~
|
||||
TraversalValence_NewActiveCornerReached(corner) {
|
||||
switch (last_symbol_) {
|
||||
case TOPOLOGY_C:
|
||||
case TOPOLOGY_S:
|
||||
vertex_valences_[ct(next)] += 1;
|
||||
vertex_valences_[ct(prev)] += 1;
|
||||
break;
|
||||
case TOPOLOGY_R:
|
||||
vertex_valences_[corner] += 1;
|
||||
vertex_valences_[ct(next)] += 1;
|
||||
vertex_valences_[ct(prev)] += 2;
|
||||
break;
|
||||
case TOPOLOGY_L:
|
||||
vertex_valences_[corner] += 1;
|
||||
vertex_valences_[ct(next)] += 2;
|
||||
vertex_valences_[ct(prev)] += 1;
|
||||
break;
|
||||
case TOPOLOGY_E:
|
||||
vertex_valences_[corner] += 2;
|
||||
vertex_valences_[ct(next)] += 2;
|
||||
vertex_valences_[ct(prev)] += 2;
|
||||
break;
|
||||
}
|
||||
valence = vertex_valences_[ct(next)]
|
||||
valence = max(valence, min_valence_)
|
||||
valence = min(valence, max_valence_)
|
||||
active_context_ = (valence - min_valence_);
|
||||
}
|
||||
~~~~~
|
||||
|
||||
|
||||
|
||||
### TraversalValence_MergeVertices()
|
||||
|
||||
~~~~~
|
||||
TraversalValence_MergeVertices(dest, source) {
|
||||
vertex_valences_[dest] += vertex_valences_[source];
|
||||
}
|
||||
~~~~~
|
70
docs/spec/edgebreaker.traverser.md
Normal file
70
docs/spec/edgebreaker.traverser.md
Normal file
@ -0,0 +1,70 @@
|
||||
|
||||
## EdgeBreaker Traverser
|
||||
|
||||
### TraverseFromCorner()
|
||||
|
||||
~~~~~
|
||||
TraverseFromCorner(corner_id) {
|
||||
if (processor_.IsFaceVisited(corner_id))
|
||||
return
|
||||
corner_traversal_stack_.clear();
|
||||
corner_traversal_stack_.push_back(corner_id);
|
||||
next_vert = corner_table_->Vertex(corner_table_->Next(corner_id));
|
||||
prev_vert = corner_table_->Vertex(corner_table_->Previous(corner_id));
|
||||
if (!processor_.IsVertexVisited(next_vert)) {
|
||||
processor_.MarkVertexVisited(next_vert);
|
||||
traversal_observer_.OnNewVertexVisited(next_vert,
|
||||
corner_table_->Next(corner_id));
|
||||
}
|
||||
if (!processor_.IsVertexVisited(prev_vert)) {
|
||||
processor_.MarkVertexVisited(prev_vert);
|
||||
traversal_observer_.OnNewVertexVisited(prev_vert,
|
||||
corner_table_->Previous(corner_id));
|
||||
}
|
||||
while (!corner_traversal_stack_.empty()) {
|
||||
corner_id = corner_traversal_stack_.back();
|
||||
face_id =corner_id / 3;
|
||||
if (processor_.IsFaceVisited(face_id)) {
|
||||
corner_traversal_stack_.pop_back();
|
||||
continue
|
||||
}
|
||||
while(true) {
|
||||
face_id = corner_id / 3;
|
||||
processor_.MarkFaceVisited(face_id);
|
||||
traversal_observer_.OnNewFaceVisited(face_id);
|
||||
vert_id = corner_table_->Vertex(corner_id);
|
||||
on_boundary = corner_table_->IsOnBoundary(vert_id);
|
||||
if (!processor_.IsVertexVisited(vert_id)) {
|
||||
processor_.MarkVertexVisited(vert_id);
|
||||
traversal_observer_.OnNewVertexVisited(vert_id, corner_id);
|
||||
if (!on_boundary) {
|
||||
corner_id = corner_table_->GetRightCorner(corner_id);
|
||||
continue;
|
||||
}
|
||||
}
|
||||
// The current vertex has been already visited or it was on a boundary.
|
||||
right_corner_id = corner_table_->GetRightCorner(corner_id);
|
||||
left_corner_id = corner_table_->GetLeftCorner(corner_id);
|
||||
right_face_id((right_corner_id < 0 ? -1 : right_corner_id / 3));
|
||||
left_face_id((left_corner_id < 0 ? -1 : left_corner_id / 3));
|
||||
if (processor_.IsFaceVisited(right_face_id)) {
|
||||
if (processor_.IsFaceVisited(left_face_id)) {
|
||||
corner_traversal_stack_.pop_back();
|
||||
break; // Break from while(true) loop
|
||||
} else {
|
||||
corner_id = left_corner_id;
|
||||
}
|
||||
} else {
|
||||
if (processor_.IsFaceVisited(left_face_id)) {
|
||||
corner_id = right_corner_id;
|
||||
} else {
|
||||
// Split the traversal.
|
||||
corner_traversal_stack_.back() = left_corner_id;
|
||||
corner_traversal_stack_.push_back(right_corner_id);
|
||||
break; // Break from while(true) loop
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
~~~~~
|
@ -13,18 +13,33 @@ version_date: Released 2017-xx-xx
|
||||
{% include_relative 00.00.05.toc.md %}
|
||||
|
||||
{% include_relative 01.00.00.scope.md %}
|
||||
|
||||
{% include_relative 02.00.00.terms.md %}
|
||||
|
||||
{% include_relative 03.00.00.symbols.md %}
|
||||
|
||||
{% include_relative 04.00.00.conventions.md %}
|
||||
|
||||
{% include_relative draco.decoder.md %}
|
||||
|
||||
{% include_relative mesh.decoder.md %}
|
||||
|
||||
{% include_relative edgebreaker.decoder.md %}
|
||||
{% include_relative edgebreaker.hole.and.topology.md %}
|
||||
{% include_relative edgebreaker.traversal.decoder.md %}
|
||||
{% include_relative edgebreaker.traversal.valence.decoder.md %}
|
||||
{% include_relative attributes.decoder.md %}
|
||||
{% include_relative sequential.attributes.decoders.controller.md %}
|
||||
{% include_relative sequential.attribute.decoder.md %}
|
||||
{% include_relative sequential.integer.attribute.decoder.md %}
|
||||
{% include_relative sequential.quantization.attribute.decoder.md %}
|
||||
{% include_relative prediction.scheme.transform.md %}
|
||||
{% include_relative prediction.scheme.wrap.transform.md %}
|
||||
{% include_relative mesh.prediction.scheme.parallelogram.md %}
|
||||
{% include_relative cornertable.traversal.processor.md %}
|
||||
{% include_relative mesh.attribute.indices.encoding.observer.md %}
|
||||
{% include_relative edgebreaker.traverser.md %}
|
||||
{% include_relative mesh.traversal.sequencer.md %}
|
||||
{% include_relative corner.table.md %}
|
||||
{% include_relative mesh.attribute.corner.table.md %}
|
||||
{% include_relative symbol.decoding.md %}
|
||||
{% include_relative rans.decoding.md %}
|
||||
{% include_relative rans.bit.decoder.md %}
|
||||
{% include_relative core.functions.md %}
|
||||
|
||||
|
||||
{% comment %}
|
||||
|
74
docs/spec/mesh.attribute.corner.table.md
Normal file
74
docs/spec/mesh.attribute.corner.table.md
Normal file
@ -0,0 +1,74 @@
|
||||
|
||||
## Mesh Attribute Corner Table
|
||||
|
||||
### bool IsCornerOnSeam()
|
||||
|
||||
~~~~~
|
||||
bool IsCornerOnSeam(corner) {
|
||||
return is_vertex_on_seam_[corner_table_->Vertex(corner)];
|
||||
}
|
||||
~~~~~
|
||||
|
||||
|
||||
### AddSeamEdge()
|
||||
|
||||
~~~~~
|
||||
AddSeamEdge(c) {
|
||||
MarkSeam(c)
|
||||
opp_corner = corner_table_->Opposite(c);
|
||||
if (opp_corner >= 0) {
|
||||
no_interior_seams_ = false;
|
||||
MarkSeam(opp_corner)
|
||||
}
|
||||
}
|
||||
~~~~~
|
||||
|
||||
|
||||
### MarkSeam()
|
||||
|
||||
~~~~~
|
||||
MarkSeam(c) {
|
||||
is_edge_on_seam_[c] = true;
|
||||
is_vertex_on_seam_[corner_table_->Vertex(corner_table_->Next(c))] = true;
|
||||
is_vertex_on_seam_[corner_table_->Vertex(corner_table_->Previous(c))
|
||||
] = true;
|
||||
}
|
||||
~~~~~
|
||||
|
||||
|
||||
### RecomputeVertices()
|
||||
|
||||
~~~~~
|
||||
RecomputeVertices() {
|
||||
// in code RecomputeVerticesInternal<false>(nullptr, nullptr)
|
||||
num_new_vertices = 0;
|
||||
for (v = 0; v < corner_table_->num_vertices(); ++v) {
|
||||
c = corner_table_->LeftMostCorner(v);
|
||||
if (c < 0)
|
||||
continue;
|
||||
first_vert_id(num_new_vertices++);
|
||||
vertex_to_attribute_entry_id_map_.push_back(first_vert_id);
|
||||
first_c = c;
|
||||
if (is_vertex_on_seam_[v]) {
|
||||
act_c = SwingLeft(first_c);
|
||||
while (act_c >= 0) {
|
||||
first_c = act_c;
|
||||
act_c = SwingLeft(act_c);
|
||||
}
|
||||
}
|
||||
corner_to_vertex_map_[first_c] =first_vert_id;
|
||||
vertex_to_left_most_corner_map_.push_back(first_c);
|
||||
act_c = corner_table_->SwingRight(first_c);
|
||||
while (act_c >= 0 && act_c != first_c) {
|
||||
if (is_edge_on_seam_[corner_table_->Next(act_c)]) {
|
||||
// in code IsCornerOppositeToSeamEdge()
|
||||
first_vert_id = AttributeValueIndex(num_new_vertices++);
|
||||
vertex_to_attribute_entry_id_map_.push_back(first_vert_id);
|
||||
vertex_to_left_most_corner_map_.push_back(act_c);
|
||||
}
|
||||
corner_to_vertex_map_[act_c] = first_vert_id;
|
||||
act_c = corner_table_->SwingRight(act_c);
|
||||
}
|
||||
}
|
||||
}
|
||||
~~~~~
|
17
docs/spec/mesh.attribute.indices.encoding.observer.md
Normal file
17
docs/spec/mesh.attribute.indices.encoding.observer.md
Normal file
@ -0,0 +1,17 @@
|
||||
|
||||
## Mesh Attribute Indices Encoding Observer
|
||||
|
||||
### OnNewVertexVisited()
|
||||
|
||||
~~~~~
|
||||
OnNewVertexVisited(vertex, corner) {
|
||||
point_id = mesh_->face(corner / 3)[corner % 3];
|
||||
sequencer_->AddPointId(point_id);
|
||||
// Keep track of visited corners.
|
||||
encoding_data_->encoded_attribute_value_index_to_corner_map.push_back(corner);
|
||||
encoding_data_
|
||||
->vertex_to_encoded_attribute_value_index_map[vertex] =
|
||||
encoding_data_->num_values;
|
||||
encoding_data_->num_values++;
|
||||
}
|
||||
~~~~~
|
58
docs/spec/mesh.prediction.scheme.parallelogram.md
Normal file
58
docs/spec/mesh.prediction.scheme.parallelogram.md
Normal file
@ -0,0 +1,58 @@
|
||||
|
||||
## Mesh Prediction Scheme Parallelogram
|
||||
|
||||
### Decode()
|
||||
|
||||
~~~~~
|
||||
Decode(...) {
|
||||
this->transform().InitializeDecoding(num_components);
|
||||
// restore the first value
|
||||
this->transform().ComputeOriginalValue(pred_vals.get(),
|
||||
in_corr, out_data, 0);
|
||||
// PredictionSchemeWrapTransform_ComputeOriginalValue()
|
||||
corner_map_size = this->mesh_data().data_to_corner_map()->size();
|
||||
for (p = 1; p < corner_map_size; ++p) {
|
||||
corner_id = this->mesh_data().data_to_corner_map()->at(p);
|
||||
dst_offset = p * num_components;
|
||||
b= ComputeParallelogramPrediction(p, corner_id, table,
|
||||
*vertex_to_data_map, out_data,
|
||||
num_components, pred_vals.get())
|
||||
if (!b) {
|
||||
src_offset = (p - 1) * num_components;
|
||||
this->transform().ComputeOriginalValue(out_data + src_offset, in_corr,
|
||||
out_data + dst_offset, dst_offset);
|
||||
// PredictionSchemeWrapTransform_ComputeOriginalValue()
|
||||
} else {
|
||||
this->transform().ComputeOriginalValue(pred_vals.get(), in_corr,
|
||||
out_data + dst_offset, dst_offset);
|
||||
// PredictionSchemeWrapTransform_ComputeOriginalValue()
|
||||
}
|
||||
}
|
||||
}
|
||||
~~~~~
|
||||
|
||||
|
||||
MeshPredictionSchemeParallelogramShared
|
||||
|
||||
### ComputeParallelogramPrediction()
|
||||
|
||||
~~~~~
|
||||
ComputeParallelogramPrediction(...) {
|
||||
oci = table->Opposite(ci);
|
||||
vert_opp = vertex_to_data_map[table->Vertex(ci)];
|
||||
vert_next = vertex_to_data_map[table->Vertex(table->Next(ci))];
|
||||
vert_prev = vertex_to_data_map[table->Vertex(table->Previous(ci))];
|
||||
if (vert_opp < data_entry_id && vert_next < data_entry_id &&
|
||||
vert_prev < data_entry_id) {
|
||||
v_opp_off = vert_opp * num_components;
|
||||
v_next_off = vert_next * num_components;
|
||||
v_prev_off = vert_prev * num_components;
|
||||
for (c = 0; c < num_components; ++c) {
|
||||
out_prediction[c] = (in_data[v_next_off + c] + in_data[v_prev_off + c]) -
|
||||
in_data[v_opp_off + c];
|
||||
}
|
||||
Return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
~~~~~
|
62
docs/spec/mesh.traversal.sequencer.md
Normal file
62
docs/spec/mesh.traversal.sequencer.md
Normal file
@ -0,0 +1,62 @@
|
||||
|
||||
## Mesh Traversal Sequencer
|
||||
|
||||
### GenerateSequenceInternal()
|
||||
|
||||
~~~~~
|
||||
GenerateSequenceInternal() {
|
||||
traverser_.OnTraversalStart();
|
||||
If (corner_order_) {
|
||||
// TODO
|
||||
} else {
|
||||
int32_t num_faces = traverser_.corner_table()->num_faces();
|
||||
for (i = 0; i < num_faces; ++i) {
|
||||
ProcessCorner(3 * i)
|
||||
}
|
||||
}
|
||||
traverser_.OnTraversalEnd();
|
||||
}
|
||||
~~~~~
|
||||
|
||||
|
||||
### ProcessCorner()
|
||||
|
||||
~~~~~
|
||||
ProcessCorner(corner_id) {
|
||||
traverser_.TraverseFromCorner(corner_id);
|
||||
}
|
||||
~~~~~
|
||||
|
||||
|
||||
### UpdatePointToAttributeIndexMapping()
|
||||
|
||||
~~~~~
|
||||
UpdatePointToAttributeIndexMapping(PointAttribute *attribute) {
|
||||
corner_table = traverser_.corner_table();
|
||||
attribute->SetExplicitMapping(mesh_->num_points());
|
||||
num_faces = mesh_->num_faces();
|
||||
num_points = mesh_->num_points();
|
||||
for (f = 0; f < num_faces; ++f) {
|
||||
face = mesh_->face(f);
|
||||
for (p = 0; p < 3; ++p) {
|
||||
point_id = face[p];
|
||||
vert_id = corner_table->Vertex(3 * f + p);
|
||||
att_entry_id(
|
||||
encoding_data_
|
||||
->vertex_to_encoded_attribute_value_index_map[vert_id]);
|
||||
attribute->SetPointMapEntry(point_id, att_entry_id);
|
||||
}
|
||||
}
|
||||
}
|
||||
~~~~~
|
||||
|
||||
|
||||
PointsSequencer
|
||||
|
||||
### AddPointId()
|
||||
|
||||
~~~~~
|
||||
AddPointId(point_id) {
|
||||
out_point_ids_->push_back(point_id);
|
||||
}
|
||||
~~~~~
|
14
docs/spec/prediction.scheme.transform.md
Normal file
14
docs/spec/prediction.scheme.transform.md
Normal file
@ -0,0 +1,14 @@
|
||||
|
||||
## Prediction Scheme Transform
|
||||
|
||||
### ComputeOriginalValue()
|
||||
|
||||
~~~~~
|
||||
ComputeOriginalValue(const DataTypeT *predicted_vals,
|
||||
const CorrTypeT *corr_vals,
|
||||
DataTypeT *out_original_vals, int val_id) {
|
||||
for (i = 0; i < num_components_; ++i) {
|
||||
out_original_vals[i] = predicted_vals[i] + corr_vals[val_id + i];
|
||||
}
|
||||
}
|
||||
~~~~~
|
43
docs/spec/prediction.scheme.wrap.transform.md
Normal file
43
docs/spec/prediction.scheme.wrap.transform.md
Normal file
@ -0,0 +1,43 @@
|
||||
|
||||
## Prediction Scheme Wrap Transform
|
||||
|
||||
### DecodeTransformData()
|
||||
|
||||
~~~~~
|
||||
DecodeTransformData(buffer) {
|
||||
min_value_ DT
|
||||
max_value_ DT
|
||||
}
|
||||
~~~~~
|
||||
|
||||
|
||||
### ComputeOriginalValue()
|
||||
|
||||
~~~~~
|
||||
ComputeOriginalValue(const DataTypeT *predicted_vals,
|
||||
const CorrTypeT *corr_vals,
|
||||
DataTypeT *out_original_vals, int val_id) {
|
||||
clamped_vals = ClampPredictedValue(predicted_vals);
|
||||
ComputeOriginalValue(clamped_vals, corr_vals, out_original_vals, val_id)
|
||||
// PredictionSchemeTransform_ComputeOriginalValue()
|
||||
for (i = 0; i < this->num_components(); ++i) {
|
||||
if (out_original_vals[i] > max_value_) {
|
||||
out_original_vals[i] -= max_dif_;
|
||||
} else if (out_original_vals[i] < min_value_) {
|
||||
out_original_vals[i] += max_dif_;
|
||||
}
|
||||
}
|
||||
~~~~~
|
||||
|
||||
|
||||
### ClampPredictedValue()
|
||||
|
||||
~~~~~
|
||||
ClampPredictedValue(const DataTypeT *predicted_val) {
|
||||
for (i = 0; i < this->num_components(); ++i) {
|
||||
clamped_value_[i] = min(predicted_val[i], max_value_)
|
||||
clamped_value_[i] = max(predicted_val[i], min_value_)
|
||||
}
|
||||
return &clamped_value_[0];
|
||||
}
|
||||
~~~~~
|
23
docs/spec/rans.bit.decoder.md
Normal file
23
docs/spec/rans.bit.decoder.md
Normal file
@ -0,0 +1,23 @@
|
||||
|
||||
## Rans Bit Decoder
|
||||
|
||||
### RansBitDecoder_StartDecoding()
|
||||
|
||||
~~~~~
|
||||
RansBitDecoder_StartDecoding(DecoderBuffer *source_buffer) {
|
||||
prob_zero_ UI8
|
||||
size UI32
|
||||
buffer_ size * UI8
|
||||
ans_read_init(&ans_decoder_, buffer_, size)
|
||||
}
|
||||
~~~~~
|
||||
|
||||
|
||||
### DecodeNextBit()
|
||||
|
||||
~~~~~
|
||||
DecodeNextBit() {
|
||||
uint8_t bit = rabs_desc_read(&ans_decoder_, prob_zero_);
|
||||
return bit > 0;
|
||||
}
|
||||
~~~~~
|
120
docs/spec/rans.decoding.md
Normal file
120
docs/spec/rans.decoding.md
Normal file
@ -0,0 +1,120 @@
|
||||
|
||||
## Rans Decoding
|
||||
|
||||
### ans_read_init()
|
||||
|
||||
~~~~~
|
||||
ans_read_init(struct AnsDecoder *const ans, const uint8_t *const buf,
|
||||
int offset) {
|
||||
x = buf[offset - 1] >> 6
|
||||
If (x == 0) {
|
||||
ans->buf_offset = offset - 1;
|
||||
ans->state = buf[offset - 1] & 0x3F;
|
||||
} else if (x == 1) {
|
||||
ans->buf_offset = offset - 2;
|
||||
ans->state = mem_get_le16(buf + offset - 2) & 0x3FFF;
|
||||
} else if (x == 2) {
|
||||
ans->buf_offset = offset - 3;
|
||||
ans->state = mem_get_le24(buf + offset - 3) & 0x3FFFFF;
|
||||
} else if (x == 3) {
|
||||
// x == 3 implies this byte is a superframe marker
|
||||
return 1;
|
||||
}
|
||||
ans->state += l_base;
|
||||
}
|
||||
~~~~~
|
||||
|
||||
|
||||
### int rabs_desc_read()
|
||||
|
||||
~~~~~
|
||||
int rabs_desc_read(struct AnsDecoder *ans, AnsP8 p0) {
|
||||
AnsP8 p = ans_p8_precision - p0;
|
||||
if (ans->state < l_base) {
|
||||
ans->state = ans->state * io_base + ans->buf[--ans->buf_offset];
|
||||
}
|
||||
x = ans->state;
|
||||
quot = x / ans_p8_precision;
|
||||
rem = x % ans_p8_precision;
|
||||
xn = quot * p;
|
||||
val = rem < p;
|
||||
if (val) {
|
||||
ans->state = xn + rem;
|
||||
} else {
|
||||
ans->state = x - xn - p;
|
||||
}
|
||||
return val;
|
||||
}
|
||||
~~~~~
|
||||
|
||||
|
||||
|
||||
### rans_read_init()
|
||||
|
||||
~~~~~
|
||||
rans_read_init(UI8 *buf, int offset) {
|
||||
ans_.buf = buf;
|
||||
x = buf[offset - 1] >> 6
|
||||
If (x == 0) {
|
||||
ans_.buf_offset = offset - 1;
|
||||
ans_.state = buf[offset - 1] & 0x3F;
|
||||
} else if (x == 1) {
|
||||
ans_.buf_offset = offset - 2;
|
||||
ans_.state = mem_get_le16(buf + offset - 2) & 0x3FFF;
|
||||
} else if (x == 2) {
|
||||
ans_.buf_offset = offset - 3;
|
||||
ans_.state = mem_get_le24(buf + offset - 3) & 0x3FFFFF;
|
||||
} else if (x == 3) {
|
||||
ans_.buf_offset = offset - 4;
|
||||
ans_.state = mem_get_le32(buf + offset - 4) & 0x3FFFFFFF;
|
||||
}
|
||||
ans_.state += l_rans_base;
|
||||
}
|
||||
~~~~~
|
||||
|
||||
|
||||
### rans_build_look_up_table()
|
||||
|
||||
~~~~~
|
||||
rans_build_look_up_table() {
|
||||
cum_prob = 0
|
||||
act_prob = 0
|
||||
for (i = 0; i < num_symbols; ++i) {
|
||||
probability_table_[i].prob = token_probs[i];
|
||||
probability_table_[i].cum_prob = cum_prob;
|
||||
cum_prob += token_probs[i];
|
||||
for (j = act_prob; j < cum_prob; ++j) {
|
||||
Lut_table_[j] = i
|
||||
}
|
||||
act_prob = cum_prob
|
||||
}
|
||||
~~~~~
|
||||
|
||||
|
||||
|
||||
### rans_read()
|
||||
|
||||
~~~~~
|
||||
rans_read() {
|
||||
while (ans_.state < l_rans_base) {
|
||||
ans_.state = ans_.state * io_base + ans_.buf[--ans_.buf_offset];
|
||||
}
|
||||
quo = ans_.state / rans_precision;
|
||||
rem = ans_.state % rans_precision;
|
||||
sym = fetch_sym()
|
||||
ans_.state = quo * sym.prob + rem - sym.cum_prob;
|
||||
return sym.val;
|
||||
}
|
||||
~~~~~
|
||||
|
||||
|
||||
### fetch_sym()
|
||||
|
||||
~~~~~
|
||||
fetch_sym() {
|
||||
symbol = lut_table[rem]
|
||||
out->val = symbol
|
||||
out->prob = probability_table_[symbol].prob;
|
||||
out->cum_prob = probability_table_[symbol].cum_prob;
|
||||
}
|
||||
~~~~~
|
26
docs/spec/sequential.attribute.decoder.md
Normal file
26
docs/spec/sequential.attribute.decoder.md
Normal file
@ -0,0 +1,26 @@
|
||||
|
||||
## Sequential Attribute Decoder
|
||||
|
||||
~~~~~
|
||||
Initialize(...) {
|
||||
// Init some members
|
||||
}
|
||||
~~~~~
|
||||
|
||||
|
||||
### DecodeValues()
|
||||
|
||||
~~~~~
|
||||
DecodeValues(const std::vector<PointIndex> &point_ids) {
|
||||
num_values = point_ids.size();
|
||||
entry_size = attribute_->byte_stride();
|
||||
std::unique_ptr<uint8_t[]> value_data_ptr(new uint8_t[entry_size]);
|
||||
out_byte_pos = 0;
|
||||
for (i = 0; i < num_values; ++i) {
|
||||
value_data UI8 * entry_size
|
||||
attribute_->buffer()->Write(out_byte_pos, value_data, entry_size);
|
||||
out_byte_pos += entry_size;
|
||||
}
|
||||
}
|
||||
~~~~~
|
||||
|
52
docs/spec/sequential.attributes.decoders.controller.md
Normal file
52
docs/spec/sequential.attributes.decoders.controller.md
Normal file
@ -0,0 +1,52 @@
|
||||
|
||||
## Sequential Attributes Decoders Controller
|
||||
|
||||
### DecodeAttributesDecoderData()
|
||||
|
||||
~~~~~
|
||||
DecodeAttributesDecoderData(buffer) {
|
||||
AttributesDecoder_DecodeAttributesDecoderData(buffer)
|
||||
sequential_decoders_.resize(num_attributes());
|
||||
for (i = 0; i < num_attributes(); ++i) {
|
||||
decoder_type UI8
|
||||
sequential_decoders_[i] = CreateSequentialDecoder(decoder_type);
|
||||
sequential_decoders_[i]->Initialize(decoder(), GetAttributeId(i))
|
||||
}
|
||||
~~~~~
|
||||
|
||||
|
||||
### DecodeAttributes()
|
||||
|
||||
~~~~~
|
||||
DecodeAttributes(buffer) {
|
||||
sequencer_->GenerateSequence(&point_ids_)
|
||||
for (i = 0; i < num_attributes(); ++i) {
|
||||
pa = decoder()->point_cloud()->attribute(GetAttributeId(i));
|
||||
sequencer_->UpdatePointToAttributeIndexMapping(pa)
|
||||
}
|
||||
for (i = 0; i < num_attributes(); ++i) {
|
||||
sequential_decoders_[i]->Decode(point_ids_, buffer)
|
||||
//SequentialAttributeDecoder_Decode()
|
||||
}
|
||||
}
|
||||
~~~~~
|
||||
|
||||
|
||||
|
||||
### CreateSequentialDecoder()
|
||||
|
||||
~~~~~
|
||||
CreateSequentialDecoder(type) {
|
||||
switch (type) {
|
||||
case SEQUENTIAL_ATTRIBUTE_ENCODER_GENERIC:
|
||||
return new SequentialAttributeDecoder()
|
||||
case SEQUENTIAL_ATTRIBUTE_ENCODER_INTEGER:
|
||||
return new SequentialIntegerAttributeDecoder()
|
||||
case SEQUENTIAL_ATTRIBUTE_ENCODER_QUANTIZATION:
|
||||
return new SequentialQuantizationAttributeDecoder()
|
||||
case SEQUENTIAL_ATTRIBUTE_ENCODER_NORMALS:
|
||||
return new SequentialNormalAttributeDecoder()
|
||||
}
|
||||
}
|
||||
~~~~~
|
||||
|
52
docs/spec/sequential.integer.attribute.decoder.md
Normal file
52
docs/spec/sequential.integer.attribute.decoder.md
Normal file
@ -0,0 +1,52 @@
|
||||
|
||||
## Sequential Integer Attribute Decoder
|
||||
|
||||
~~~~~
|
||||
Initialize(...) {
|
||||
SequentialAttributeDecoder_Initialize()
|
||||
}
|
||||
~~~~~
|
||||
|
||||
|
||||
### DecodeValues()
|
||||
|
||||
~~~~~
|
||||
DecodeValues(point_ids) {
|
||||
prediction_scheme_method I8
|
||||
if (prediction_scheme_method != PREDICTION_NONE) {
|
||||
prediction_transform_type I8
|
||||
prediction_scheme_ = CreateIntPredictionScheme(...)
|
||||
}
|
||||
if (prediction_scheme_) {
|
||||
}
|
||||
DecodeIntegerValues(point_ids)
|
||||
//SequentialQuantizationAttributeDecoder_DecodeIntegerValues()
|
||||
//StoreValues()
|
||||
DequantizeValues(num_values)
|
||||
}
|
||||
~~~~~
|
||||
|
||||
|
||||
### DecodeIntegerValues()
|
||||
|
||||
~~~~~
|
||||
DecodeIntegerValues(point_ids) {
|
||||
compressed UI8
|
||||
if (compressed) {
|
||||
DecodeSymbols(..., values_.data())
|
||||
} else {
|
||||
// TODO
|
||||
}
|
||||
if (!prediction_scheme_->AreCorrectionsPositive()) {
|
||||
ConvertSymbolsToSignedInts(...)
|
||||
}
|
||||
if (prediction_scheme_) {
|
||||
prediction_scheme_->DecodePredictionData(buffer)
|
||||
// DecodeTransformData(buffer)
|
||||
if (!values_.empty()) {
|
||||
prediction_scheme_->Decode(values_.data(), &values_[0],
|
||||
values_.size(), num_components, point_ids.data())
|
||||
// MeshPredictionSchemeParallelogram_Decode()
|
||||
}
|
||||
~~~~~
|
||||
|
46
docs/spec/sequential.quantization.attribute.decoder.md
Normal file
46
docs/spec/sequential.quantization.attribute.decoder.md
Normal file
@ -0,0 +1,46 @@
|
||||
|
||||
## Sequential Quantization Attribute Decoder
|
||||
|
||||
~~~~~
|
||||
Initialize(...) {
|
||||
SequentialIntegerAttributeDecoder_Initialize()
|
||||
}
|
||||
~~~~~
|
||||
|
||||
|
||||
### DecodeIntegerValues()
|
||||
|
||||
~~~~~
|
||||
DecodeIntegerValues(point_ids) {
|
||||
// DecodeQuantizedDataInfo()
|
||||
num_components = attribute()->components_count();
|
||||
for (i = 0; i < num_components; ++i) {
|
||||
min_value_[i] F32
|
||||
}
|
||||
max_value_dif_ F32
|
||||
quantization_bits_ UI8
|
||||
SequentialIntegerAttributeDecoder::DecodeIntegerValues()
|
||||
}
|
||||
~~~~~
|
||||
|
||||
|
||||
### DequantizeValues()
|
||||
|
||||
~~~~~
|
||||
DequantizeValues(num_values) {
|
||||
max_quantized_value = (1 << (quantization_bits_)) - 1;
|
||||
num_components = attribute()->components_count();
|
||||
entry_size = sizeof(float) * num_components;
|
||||
quant_val_id = 0;
|
||||
out_byte_pos = 0;
|
||||
for (i = 0; i < num_values; ++i) {
|
||||
for (c = 0; c < num_components; ++c) {
|
||||
value = dequantizer.DequantizeFloat(values()->at(quant_val_id++));
|
||||
value = value + min_value_[c];
|
||||
att_val[c] = value;
|
||||
}
|
||||
attribute()->buffer()->Write(out_byte_pos, att_val.get(), entry_size);
|
||||
out_byte_pos += entry_size;
|
||||
}
|
||||
}
|
||||
~~~~~
|
105
docs/spec/symbol.decoding.md
Normal file
105
docs/spec/symbol.decoding.md
Normal file
@ -0,0 +1,105 @@
|
||||
|
||||
## Symbol Decoding
|
||||
|
||||
### DecodeSymbols()
|
||||
|
||||
~~~~~
|
||||
DecodeSymbols(num_symbols, out_buffer, out_values) {
|
||||
scheme UI8
|
||||
If (scheme == 0) {
|
||||
DecodeTaggedSymbols<>(num_symbols, src_buffer, out_values)
|
||||
} else if (scheme == 1) {
|
||||
DecodeRawSymbols<>(num_symbols, src_buffer, out_values)
|
||||
}
|
||||
}
|
||||
~~~~~
|
||||
|
||||
|
||||
### DecodeTaggedSymbols()
|
||||
|
||||
~~~~~
|
||||
DecodeTaggedSymbols() {
|
||||
FIXME
|
||||
}
|
||||
~~~~~
|
||||
|
||||
|
||||
|
||||
### DecodeRawSymbols()
|
||||
|
||||
~~~~~
|
||||
DecodeRawSymbols() {
|
||||
max_bit_length UI8
|
||||
DecodeRawSymbolsInternal(max_bit_length, out_values)
|
||||
return symbols
|
||||
}
|
||||
~~~~~
|
||||
|
||||
|
||||
|
||||
### DecodeRawSymbolsInternal()
|
||||
|
||||
~~~~~
|
||||
DecodeRawSymbolsInternal(max_bit_length, out_values) {
|
||||
decoder = CreateRansSymbolDecoder(max_bit_length)
|
||||
decoder.StartDecoding()
|
||||
// RansSymbolDecoder_StartDecoding
|
||||
for (i = 0; i < num_values; ++i) {
|
||||
out_values[i] = decoder.DecodeSymbol()
|
||||
// RansSymbolDecoder_DecodeSymbol
|
||||
}
|
||||
}
|
||||
~~~~~
|
||||
|
||||
|
||||
### CreateRansSymbolDecoder()
|
||||
|
||||
~~~~~
|
||||
CreateRansSymbolDecoder(max_bit_length) {
|
||||
rans_precision_bits = (3 * max_bit_length) / 2;
|
||||
rans_precision_bits = min(rans_precision_bits, 20)
|
||||
rans_precision_bits = max(rans_precision_bits, 12)
|
||||
rans_precision = 1 << rans_precision_bits_;
|
||||
l_rans_base = rans_precision * 4;
|
||||
num_symbols_ UI32
|
||||
for (i = 0; i < num_symbols_; ++i) {
|
||||
prob_data UI8
|
||||
if ((prob_data & 3) == 3) {
|
||||
offset = prob_data >> 2
|
||||
for (j = 0; j < offset + 1; ++j) {
|
||||
probability_table_[i + j] = 0;
|
||||
}
|
||||
i += offset;
|
||||
} else {
|
||||
prob = prob_data >> 2
|
||||
for (j = 0; j < token; ++j) {
|
||||
eb UI8
|
||||
prob = prob | (eb << (8 * (j + 1) - 2)
|
||||
}
|
||||
probability_table_[i] = prob;
|
||||
}
|
||||
}
|
||||
rans_build_look_up_table()
|
||||
}
|
||||
~~~~~
|
||||
|
||||
|
||||
### RansSymbolDecoder_StartDecoding()
|
||||
|
||||
~~~~~
|
||||
RansSymbolDecoder_StartDecoding() {
|
||||
bytes_encoded UI64
|
||||
buffer bytes_encoded * UI8
|
||||
rans_read_init(buffer, bytes_encoded)
|
||||
}
|
||||
~~~~~
|
||||
|
||||
|
||||
|
||||
### RansSymbolDecoder_DecodeSymbol()
|
||||
|
||||
~~~~~
|
||||
RansSymbolDecoder_DecodeSymbol() {
|
||||
ans_.rans_read()
|
||||
}
|
||||
~~~~~
|
Loading…
x
Reference in New Issue
Block a user