Commit ef16eb42 for libheif
commit ef16eb42d429bf0474e86e866d8c31a2d2ab9962
Author: Dirk Farin <dirk.farin@gmail.com>
Date: Sun Jan 11 16:34:18 2026 +0100
handle case where SAI data is split in several chunks (#1661)
diff --git a/libheif/sequences/seq_boxes.h b/libheif/sequences/seq_boxes.h
index 7c29ac4c..3d606aa2 100644
--- a/libheif/sequences/seq_boxes.h
+++ b/libheif/sequences/seq_boxes.h
@@ -843,6 +843,8 @@ public:
uint8_t get_sample_size(uint32_t idx);
+ bool have_samples_constant_size() const { return m_default_sample_info_size != 0; }
+
uint32_t get_num_samples() const { return m_num_samples; }
std::string dump(Indent&) const override;
diff --git a/libheif/sequences/track.cc b/libheif/sequences/track.cc
index 35759678..caa8cec1 100644
--- a/libheif/sequences/track.cc
+++ b/libheif/sequences/track.cc
@@ -115,22 +115,36 @@ void SampleAuxInfoHelper::write_all(const std::shared_ptr<Box>& parent, const st
SampleAuxInfoReader::SampleAuxInfoReader(std::shared_ptr<Box_saiz> saiz,
- std::shared_ptr<Box_saio> saio)
+ std::shared_ptr<Box_saio> saio,
+ const std::vector<std::shared_ptr<Chunk>>& chunks)
{
m_saiz = saiz;
m_saio = saio;
- m_contiguous = (saio->get_num_chunks() == 1);
- if (m_contiguous) {
- uint64_t offset = saio->get_chunk_offset(0);
- auto nSamples = saiz->get_num_samples();
+ bool oneChunk = (saio->get_num_chunks() == 1);
+
+ uint32_t current_chunk = 0;
+ uint64_t offset = saio->get_chunk_offset(0);
+ uint32_t nSamples = saiz->get_num_samples();
+
+ m_contiguous_and_constant_size = (oneChunk && m_saiz->have_samples_constant_size());
+
+ if (m_contiguous_and_constant_size) {
+ m_singleChunk_offset = offset;
+ }
+ else {
+ m_sample_offsets.resize(nSamples);
for (uint32_t i = 0; i < nSamples; i++) {
- m_contiguous_offsets.push_back(offset);
+ if (!oneChunk && i > chunks[current_chunk]->last_sample_number()) {
+ current_chunk++;
+ assert(current_chunk < chunks.size());
+ offset = saio->get_chunk_offset(current_chunk);
+ }
+
+ m_sample_offsets[i] = offset;
offset += saiz->get_sample_size(i);
}
-
- // TODO: we could add a special case for contiguous data with constant size
}
}
@@ -144,18 +158,20 @@ heif_sample_aux_info_type SampleAuxInfoReader::get_type() const
}
-Result<std::vector<uint8_t> > SampleAuxInfoReader::get_sample_info(const HeifFile* file, uint32_t idx)
+Result<std::vector<uint8_t> > SampleAuxInfoReader::get_sample_info(const HeifFile* file, uint32_t sample_idx)
{
uint64_t offset;
- if (m_contiguous) {
- offset = m_contiguous_offsets[idx];
+ uint8_t size;
+
+ if (m_contiguous_and_constant_size) {
+ size = m_saiz->get_sample_size(0);
+ offset = m_singleChunk_offset + sample_idx * size;
}
else {
- offset = m_saio->get_chunk_offset(idx);
+ offset = m_sample_offsets[sample_idx];
+ size = m_saiz->get_sample_size(sample_idx);
}
- uint8_t size = m_saiz->get_sample_size(idx);
-
std::vector<uint8_t> data;
Error err = file->append_data_from_file_range(data, offset, size);
if (err) {
@@ -419,11 +435,11 @@ Error Track::load(const std::shared_ptr<Box_trak>& trak_box)
}
if (aux_info_type == fourcc("suid")) {
- m_aux_reader_content_ids = std::make_unique<SampleAuxInfoReader>(saiz, saio);
+ m_aux_reader_content_ids = std::make_unique<SampleAuxInfoReader>(saiz, saio, m_chunks);
}
if (aux_info_type == fourcc("stai")) {
- m_aux_reader_tai_timestamps = std::make_unique<SampleAuxInfoReader>(saiz, saio);
+ m_aux_reader_tai_timestamps = std::make_unique<SampleAuxInfoReader>(saiz, saio, m_chunks);
}
}
else {
@@ -977,17 +993,21 @@ Error Track::init_sample_timing_table()
uint64_t current_decoding_time = 0;
uint32_t current_chunk = 0;
+ uint32_t current_sample_in_chunk_idx = 0;
for (uint32_t i = 0; i < m_num_samples; i++) {
SampleTiming timing;
timing.sampleIdx = i;
+ timing.sampleInChunkIdx = current_sample_in_chunk_idx;
timing.media_decoding_time = current_decoding_time;
timing.sample_duration_media_time = m_stts->get_sample_duration(i);
current_decoding_time += timing.sample_duration_media_time;
+ current_sample_in_chunk_idx++;
while (current_chunk < m_chunks.size() &&
i > m_chunks[current_chunk]->last_sample_number()) {
current_chunk++;
+ current_sample_in_chunk_idx=0;
if (current_chunk > m_chunks.size()) {
timing.chunkIdx = 0; // TODO: error
diff --git a/libheif/sequences/track.h b/libheif/sequences/track.h
index 84007017..31fd3f5a 100644
--- a/libheif/sequences/track.h
+++ b/libheif/sequences/track.h
@@ -67,18 +67,24 @@ class SampleAuxInfoReader
{
public:
SampleAuxInfoReader(std::shared_ptr<Box_saiz>,
- std::shared_ptr<Box_saio>);
+ std::shared_ptr<Box_saio>,
+ const std::vector<std::shared_ptr<Chunk>>& chunks);
heif_sample_aux_info_type get_type() const;
- Result<std::vector<uint8_t>> get_sample_info(const HeifFile* file, uint32_t idx);
+ Result<std::vector<uint8_t>> get_sample_info(const HeifFile* file, uint32_t sample_idx);
private:
std::shared_ptr<Box_saiz> m_saiz;
std::shared_ptr<Box_saio> m_saio;
- bool m_contiguous;
- std::vector<uint64_t> m_contiguous_offsets;
+ // If there is only one chunk and the SAI data sizes are constant, we do not need an offset table.
+ // We just store the base offset and can directly calculate the sample offset from that.
+ bool m_contiguous_and_constant_size=false;
+ uint64_t m_singleChunk_offset=0;
+
+ // For chunked data or non-constant sample sizes, we use a table with the offsets for all SAI samples.
+ std::vector<uint64_t> m_sample_offsets;
};
@@ -202,6 +208,7 @@ protected:
struct SampleTiming {
uint32_t sampleIdx = 0;
+ uint32_t sampleInChunkIdx = 0;
uint32_t chunkIdx = 0;
uint64_t presentation_time = 0; // TODO
uint64_t media_composition_time = 0; // TODO