2015-09-06 19:57:33 +02:00
|
|
|
#include "./mp4container.h"
|
|
|
|
#include "./mp4ids.h"
|
2015-09-06 15:42:18 +02:00
|
|
|
|
2018-03-07 01:17:50 +01:00
|
|
|
#include "../backuphelper.h"
|
2015-09-06 19:57:33 +02:00
|
|
|
#include "../exceptions.h"
|
|
|
|
#include "../mediafileinfo.h"
|
2015-04-22 19:22:01 +02:00
|
|
|
|
2017-01-27 18:59:22 +01:00
|
|
|
#include <c++utilities/conversion/stringbuilder.h>
|
2015-04-22 19:22:01 +02:00
|
|
|
#include <c++utilities/io/binaryreader.h>
|
|
|
|
#include <c++utilities/io/binarywriter.h>
|
2018-03-07 01:17:50 +01:00
|
|
|
#include <c++utilities/io/copy.h>
|
2015-04-22 19:22:01 +02:00
|
|
|
|
2015-12-21 00:04:56 +01:00
|
|
|
#include <unistd.h>
|
|
|
|
|
2017-02-05 21:02:40 +01:00
|
|
|
#include <memory>
|
2018-03-07 01:17:50 +01:00
|
|
|
#include <numeric>
|
|
|
|
#include <tuple>
|
2015-04-22 19:22:01 +02:00
|
|
|
|
|
|
|
using namespace std;
|
2019-06-10 22:49:11 +02:00
|
|
|
using namespace CppUtilities;
|
2015-04-22 19:22:01 +02:00
|
|
|
|
2018-03-06 23:09:15 +01:00
|
|
|
namespace TagParser {
|
2015-04-22 19:22:01 +02:00
|
|
|
|
|
|
|
/*!
|
2018-06-03 20:38:32 +02:00
|
|
|
* \class TagParser::Mp4Container
|
2015-04-22 19:22:01 +02:00
|
|
|
* \brief Implementation of GenericContainer<MediaFileInfo, Mp4Tag, Mp4Track, Mp4Atom>.
|
|
|
|
*/
|
|
|
|
|
|
|
|
/*!
|
|
|
|
* \brief Constructs a new container for the specified \a fileInfo at the specified \a startOffset.
|
|
|
|
*/
|
2019-03-13 19:06:42 +01:00
|
|
|
Mp4Container::Mp4Container(MediaFileInfo &fileInfo, std::uint64_t startOffset)
|
2018-03-07 01:17:50 +01:00
|
|
|
: GenericContainer<MediaFileInfo, Mp4Tag, Mp4Track, Mp4Atom>(fileInfo, startOffset)
|
|
|
|
, m_fragmented(false)
|
|
|
|
{
|
|
|
|
}
|
2015-04-22 19:22:01 +02:00
|
|
|
|
|
|
|
Mp4Container::~Mp4Container()
|
2018-03-07 01:17:50 +01:00
|
|
|
{
|
|
|
|
}
|
2015-04-22 19:22:01 +02:00
|
|
|
|
2015-10-06 22:39:18 +02:00
|
|
|
void Mp4Container::reset()
|
|
|
|
{
|
|
|
|
GenericContainer<MediaFileInfo, Mp4Tag, Mp4Track, Mp4Atom>::reset();
|
|
|
|
m_fragmented = false;
|
|
|
|
}
|
|
|
|
|
2018-03-05 17:49:29 +01:00
|
|
|
ElementPosition Mp4Container::determineTagPosition(Diagnostics &diag) const
|
2016-11-15 22:48:38 +01:00
|
|
|
{
|
2018-03-07 01:17:50 +01:00
|
|
|
if (m_firstElement) {
|
2018-03-05 17:49:29 +01:00
|
|
|
const Mp4Atom *mediaDataAtom = m_firstElement->siblingById(Mp4AtomIds::MediaData, diag);
|
2018-03-07 00:16:20 +01:00
|
|
|
const Mp4Atom *userDataAtom = m_firstElement->subelementByPath(diag, Mp4AtomIds::Movie, Mp4AtomIds::UserData);
|
2018-03-07 01:17:50 +01:00
|
|
|
if (mediaDataAtom && userDataAtom) {
|
2016-11-15 22:48:38 +01:00
|
|
|
return userDataAtom->startOffset() < mediaDataAtom->startOffset() ? ElementPosition::BeforeData : ElementPosition::AfterData;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return ElementPosition::Keep;
|
|
|
|
}
|
|
|
|
|
2018-03-05 17:49:29 +01:00
|
|
|
ElementPosition Mp4Container::determineIndexPosition(Diagnostics &diag) const
|
2016-11-16 19:31:09 +01:00
|
|
|
{
|
2018-03-07 01:17:50 +01:00
|
|
|
if (m_firstElement) {
|
2018-03-05 17:49:29 +01:00
|
|
|
const Mp4Atom *mediaDataAtom = m_firstElement->siblingById(Mp4AtomIds::MediaData, diag);
|
|
|
|
const Mp4Atom *movieAtom = m_firstElement->siblingById(Mp4AtomIds::Movie, diag);
|
2018-03-07 01:17:50 +01:00
|
|
|
if (mediaDataAtom && movieAtom) {
|
2016-11-16 19:31:09 +01:00
|
|
|
return movieAtom->startOffset() < mediaDataAtom->startOffset() ? ElementPosition::BeforeData : ElementPosition::AfterData;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return ElementPosition::Keep;
|
|
|
|
}
|
|
|
|
|
2018-03-05 17:49:29 +01:00
|
|
|
void Mp4Container::internalParseHeader(Diagnostics &diag)
|
2015-04-22 19:22:01 +02:00
|
|
|
{
|
|
|
|
//const string context("parsing header of MP4 container"); will be used when generating notifications
|
|
|
|
m_firstElement = make_unique<Mp4Atom>(*this, startOffset());
|
2018-03-05 17:49:29 +01:00
|
|
|
m_firstElement->parse(diag);
|
2018-03-07 01:11:42 +01:00
|
|
|
auto *const ftypAtom = m_firstElement->siblingByIdIncludingThis(Mp4AtomIds::FileType, diag);
|
2018-03-07 01:17:50 +01:00
|
|
|
if (!ftypAtom) {
|
2016-03-14 21:56:27 +01:00
|
|
|
m_doctype.clear();
|
2015-04-22 19:22:01 +02:00
|
|
|
m_version = 0;
|
2018-03-07 01:11:42 +01:00
|
|
|
return;
|
2015-04-22 19:22:01 +02:00
|
|
|
}
|
2018-03-07 01:11:42 +01:00
|
|
|
stream().seekg(static_cast<iostream::off_type>(ftypAtom->dataOffset()));
|
|
|
|
m_doctype = reader().readString(4);
|
|
|
|
m_version = reader().readUInt32BE();
|
2015-04-22 19:22:01 +02:00
|
|
|
}
|
|
|
|
|
2018-03-05 17:49:29 +01:00
|
|
|
void Mp4Container::internalParseTags(Diagnostics &diag)
|
2015-04-22 19:22:01 +02:00
|
|
|
{
|
|
|
|
const string context("parsing tags of MP4 container");
|
2018-03-07 01:11:42 +01:00
|
|
|
auto *const udtaAtom = firstElement()->subelementByPath(diag, Mp4AtomIds::Movie, Mp4AtomIds::UserData);
|
2018-03-07 01:17:50 +01:00
|
|
|
if (!udtaAtom) {
|
2018-03-07 01:11:42 +01:00
|
|
|
return;
|
|
|
|
}
|
|
|
|
auto *metaAtom = udtaAtom->childById(Mp4AtomIds::Meta, diag);
|
|
|
|
bool surplusMetaAtoms = false;
|
2018-03-07 01:17:50 +01:00
|
|
|
while (metaAtom) {
|
2018-03-07 01:11:42 +01:00
|
|
|
metaAtom->parse(diag);
|
|
|
|
m_tags.emplace_back(make_unique<Mp4Tag>());
|
|
|
|
try {
|
|
|
|
m_tags.back()->parse(*metaAtom, diag);
|
2018-03-07 01:17:50 +01:00
|
|
|
} catch (const NoDataFoundException &) {
|
2018-03-07 01:11:42 +01:00
|
|
|
m_tags.pop_back();
|
|
|
|
}
|
2018-03-07 01:17:50 +01:00
|
|
|
if ((metaAtom = metaAtom->siblingById(Mp4AtomIds::Meta, diag))) {
|
2018-03-07 01:11:42 +01:00
|
|
|
surplusMetaAtoms = true;
|
2015-04-22 19:22:01 +02:00
|
|
|
}
|
2018-03-07 01:17:50 +01:00
|
|
|
if (!m_tags.empty()) {
|
2018-03-07 01:11:42 +01:00
|
|
|
break;
|
2015-04-22 19:22:01 +02:00
|
|
|
}
|
|
|
|
}
|
2018-03-07 01:17:50 +01:00
|
|
|
if (surplusMetaAtoms) {
|
2018-03-07 01:11:42 +01:00
|
|
|
diag.emplace_back(DiagLevel::Warning, "udta atom contains multiple meta atoms. Surplus meta atoms will be ignored.", context);
|
|
|
|
}
|
2015-04-22 19:22:01 +02:00
|
|
|
}
|
|
|
|
|
2018-03-05 17:49:29 +01:00
|
|
|
void Mp4Container::internalParseTracks(Diagnostics &diag)
|
2015-04-22 19:22:01 +02:00
|
|
|
{
|
|
|
|
static const string context("parsing tracks of MP4 container");
|
|
|
|
try {
|
|
|
|
// get moov atom which holds track information
|
2018-03-07 01:17:50 +01:00
|
|
|
if (Mp4Atom *moovAtom = firstElement()->siblingByIdIncludingThis(Mp4AtomIds::Movie, diag)) {
|
2015-04-22 19:22:01 +02:00
|
|
|
// get mvhd atom which holds overall track information
|
2018-03-07 01:17:50 +01:00
|
|
|
if (Mp4Atom *mvhdAtom = moovAtom->childById(Mp4AtomIds::MovieHeader, diag)) {
|
|
|
|
if (mvhdAtom->dataSize() > 0) {
|
2018-02-05 00:59:44 +01:00
|
|
|
stream().seekg(static_cast<iostream::off_type>(mvhdAtom->dataOffset()));
|
2019-03-13 19:06:42 +01:00
|
|
|
std::uint8_t version = reader().readByte();
|
2018-03-07 01:17:50 +01:00
|
|
|
if ((version == 1 && mvhdAtom->dataSize() >= 32) || (mvhdAtom->dataSize() >= 20)) {
|
2015-04-22 19:22:01 +02:00
|
|
|
stream().seekg(3, ios_base::cur); // skip flags
|
2018-03-07 01:17:50 +01:00
|
|
|
switch (version) {
|
2015-04-22 19:22:01 +02:00
|
|
|
case 0:
|
|
|
|
m_creationTime = DateTime::fromDate(1904, 1, 1) + TimeSpan::fromSeconds(reader().readUInt32BE());
|
|
|
|
m_modificationTime = DateTime::fromDate(1904, 1, 1) + TimeSpan::fromSeconds(reader().readUInt32BE());
|
|
|
|
m_timeScale = reader().readUInt32BE();
|
|
|
|
m_duration = TimeSpan::fromSeconds(static_cast<double>(reader().readUInt32BE()) / static_cast<double>(m_timeScale));
|
|
|
|
break;
|
|
|
|
case 1:
|
|
|
|
m_creationTime = DateTime::fromDate(1904, 1, 1) + TimeSpan::fromSeconds(reader().readUInt64BE());
|
|
|
|
m_modificationTime = DateTime::fromDate(1904, 1, 1) + TimeSpan::fromSeconds(reader().readUInt64BE());
|
|
|
|
m_timeScale = reader().readUInt32BE();
|
|
|
|
m_duration = TimeSpan::fromSeconds(static_cast<double>(reader().readUInt64BE()) / static_cast<double>(m_timeScale));
|
|
|
|
break;
|
2018-03-07 01:17:50 +01:00
|
|
|
default:;
|
2015-04-22 19:22:01 +02:00
|
|
|
}
|
|
|
|
} else {
|
2018-03-05 17:49:29 +01:00
|
|
|
diag.emplace_back(DiagLevel::Critical, "mvhd atom is truncated.", context);
|
2015-04-22 19:22:01 +02:00
|
|
|
}
|
|
|
|
} else {
|
2018-03-05 17:49:29 +01:00
|
|
|
diag.emplace_back(DiagLevel::Critical, "mvhd atom is empty.", context);
|
2015-04-22 19:22:01 +02:00
|
|
|
}
|
|
|
|
} else {
|
2018-03-05 17:49:29 +01:00
|
|
|
diag.emplace_back(DiagLevel::Critical, "mvhd atom is does not exist.", context);
|
2015-04-22 19:22:01 +02:00
|
|
|
}
|
|
|
|
// get mvex atom which holds default values for fragmented files
|
2018-03-07 01:17:50 +01:00
|
|
|
if (Mp4Atom *mehdAtom = moovAtom->subelementByPath(diag, Mp4AtomIds::MovieExtends, Mp4AtomIds::MovieExtendsHeader)) {
|
2015-04-22 19:22:01 +02:00
|
|
|
m_fragmented = true;
|
2018-03-07 01:17:50 +01:00
|
|
|
if (mehdAtom->dataSize() > 0) {
|
2018-02-05 00:59:44 +01:00
|
|
|
stream().seekg(static_cast<iostream::off_type>(mehdAtom->dataOffset()));
|
2015-04-22 19:22:01 +02:00
|
|
|
unsigned int durationSize = reader().readByte() == 1u ? 8u : 4u; // duration size depends on atom version
|
2018-03-07 01:17:50 +01:00
|
|
|
if (mehdAtom->dataSize() >= 4 + durationSize) {
|
2015-04-22 19:22:01 +02:00
|
|
|
stream().seekg(3, ios_base::cur); // skip flags
|
2018-03-07 01:17:50 +01:00
|
|
|
switch (durationSize) {
|
2015-04-22 19:22:01 +02:00
|
|
|
case 4u:
|
|
|
|
m_duration = TimeSpan::fromSeconds(static_cast<double>(reader().readUInt32BE()) / static_cast<double>(m_timeScale));
|
|
|
|
break;
|
|
|
|
case 8u:
|
|
|
|
m_duration = TimeSpan::fromSeconds(static_cast<double>(reader().readUInt64BE()) / static_cast<double>(m_timeScale));
|
|
|
|
break;
|
2018-03-07 01:17:50 +01:00
|
|
|
default:;
|
2015-04-22 19:22:01 +02:00
|
|
|
}
|
|
|
|
} else {
|
2018-03-05 17:49:29 +01:00
|
|
|
diag.emplace_back(DiagLevel::Warning, "mehd atom is truncated.", context);
|
2015-04-22 19:22:01 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
// get first trak atoms which hold information for each track
|
2018-03-05 17:49:29 +01:00
|
|
|
Mp4Atom *trakAtom = moovAtom->childById(Mp4AtomIds::Track, diag);
|
2015-04-22 19:22:01 +02:00
|
|
|
int trackNum = 1;
|
2018-03-07 01:17:50 +01:00
|
|
|
while (trakAtom) {
|
2015-04-22 19:22:01 +02:00
|
|
|
try {
|
2018-03-05 17:49:29 +01:00
|
|
|
trakAtom->parse(diag);
|
2018-03-07 01:17:50 +01:00
|
|
|
} catch (const Failure &) {
|
2018-03-05 17:49:29 +01:00
|
|
|
diag.emplace_back(DiagLevel::Warning, "Unable to parse child atom of moov.", context);
|
2015-04-22 19:22:01 +02:00
|
|
|
}
|
|
|
|
// parse the trak atom using the Mp4Track class
|
|
|
|
m_tracks.emplace_back(make_unique<Mp4Track>(*trakAtom));
|
|
|
|
try { // try to parse header
|
2018-03-05 17:49:29 +01:00
|
|
|
m_tracks.back()->parseHeader(diag);
|
2018-03-07 01:17:50 +01:00
|
|
|
} catch (const Failure &) {
|
2018-03-05 17:49:29 +01:00
|
|
|
diag.emplace_back(DiagLevel::Critical, argsToString("Unable to parse track ", trackNum, '.'), context);
|
2015-04-22 19:22:01 +02:00
|
|
|
}
|
2018-03-07 00:30:08 +01:00
|
|
|
trakAtom = trakAtom->siblingById(Mp4AtomIds::Track, diag); // get next trak atom
|
2015-04-22 19:22:01 +02:00
|
|
|
++trackNum;
|
|
|
|
}
|
|
|
|
// get overall duration, creation time and modification time if not determined yet
|
2018-03-07 01:17:50 +01:00
|
|
|
if (m_duration.isNull() || m_modificationTime.isNull() || m_creationTime.isNull()) {
|
|
|
|
for (const auto &track : tracks()) {
|
|
|
|
if (track->duration() > m_duration) {
|
2015-04-22 19:22:01 +02:00
|
|
|
m_duration = track->duration();
|
|
|
|
}
|
2018-03-07 01:17:50 +01:00
|
|
|
if (track->modificationTime() > m_modificationTime) {
|
2015-04-22 19:22:01 +02:00
|
|
|
m_modificationTime = track->modificationTime();
|
|
|
|
}
|
2018-03-07 01:17:50 +01:00
|
|
|
if (track->creationTime() < m_creationTime) {
|
2015-04-22 19:22:01 +02:00
|
|
|
m_creationTime = track->creationTime();
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2018-03-07 01:17:50 +01:00
|
|
|
} catch (const Failure &) {
|
2018-03-05 17:49:29 +01:00
|
|
|
diag.emplace_back(DiagLevel::Warning, "Unable to parse moov atom.", context);
|
2015-04-22 19:22:01 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-03-05 17:49:29 +01:00
|
|
|
void Mp4Container::internalMakeFile(Diagnostics &diag, AbortableProgressFeedback &progress)
|
2015-04-22 19:22:01 +02:00
|
|
|
{
|
|
|
|
static const string context("making MP4 container");
|
2018-03-07 01:11:42 +01:00
|
|
|
progress.updateStep("Calculating atom sizes and padding ...");
|
2015-12-21 00:04:56 +01:00
|
|
|
|
|
|
|
// basic validation of original file
|
2018-03-07 01:17:50 +01:00
|
|
|
if (!isHeaderParsed()) {
|
2018-03-05 17:49:29 +01:00
|
|
|
diag.emplace_back(DiagLevel::Critical, "The header has not been parsed yet.", context);
|
2015-12-21 00:04:56 +01:00
|
|
|
throw InvalidDataException();
|
|
|
|
}
|
|
|
|
|
|
|
|
// define variables needed to parse atoms of original file
|
2018-03-07 01:17:50 +01:00
|
|
|
if (!firstElement()) {
|
2018-03-05 17:49:29 +01:00
|
|
|
diag.emplace_back(DiagLevel::Critical, "No MP4 atoms could be found.", context);
|
2015-04-22 19:22:01 +02:00
|
|
|
throw InvalidDataException();
|
|
|
|
}
|
2015-12-21 00:04:56 +01:00
|
|
|
|
|
|
|
// define variables needed to manage file layout
|
|
|
|
// -> whether media data is written chunk by chunk (need to write chunk by chunk if tracks have been altered)
|
|
|
|
const bool writeChunkByChunk = m_tracksAltered;
|
|
|
|
// -> whether rewrite is required (always required when forced to rewrite or when tracks have been altered)
|
|
|
|
bool rewriteRequired = fileInfo().isForcingRewrite() || writeChunkByChunk;
|
2016-11-18 16:51:10 +01:00
|
|
|
// -> use the preferred tag position/index position (force one wins, if both are force tag pos wins; might be changed later if none is forced)
|
2018-03-07 01:17:50 +01:00
|
|
|
ElementPosition initialNewTagPos
|
|
|
|
= fileInfo().forceTagPosition() || !fileInfo().forceIndexPosition() ? fileInfo().tagPosition() : fileInfo().indexPosition();
|
2016-11-19 21:25:18 +01:00
|
|
|
ElementPosition newTagPos = initialNewTagPos;
|
2015-12-21 00:04:56 +01:00
|
|
|
// -> current tag position (determined later)
|
|
|
|
ElementPosition currentTagPos;
|
2015-12-27 18:05:50 +01:00
|
|
|
// -> holds new padding (before actual data)
|
2019-03-13 19:06:42 +01:00
|
|
|
std::uint64_t newPadding;
|
2015-12-27 18:05:50 +01:00
|
|
|
// -> holds new padding (after actual data)
|
2019-03-13 19:06:42 +01:00
|
|
|
std::uint64_t newPaddingEnd;
|
2015-12-21 00:04:56 +01:00
|
|
|
// -> holds current offset
|
2019-03-13 19:06:42 +01:00
|
|
|
std::uint64_t currentOffset;
|
2015-12-21 00:04:56 +01:00
|
|
|
// -> holds track information, used when writing chunk-by-chunk
|
2019-03-13 19:06:42 +01:00
|
|
|
vector<tuple<istream *, vector<std::uint64_t>, vector<std::uint64_t>>> trackInfos;
|
2015-12-21 00:04:56 +01:00
|
|
|
// -> holds offsets of media data atoms in original file, used when simply copying mdat
|
2019-03-13 19:06:42 +01:00
|
|
|
vector<std::int64_t> origMediaDataOffsets;
|
2015-12-21 00:04:56 +01:00
|
|
|
// -> holds offsets of media data atoms in new file, used when simply copying mdat
|
2019-03-13 19:06:42 +01:00
|
|
|
vector<std::int64_t> newMediaDataOffsets;
|
2015-12-21 00:04:56 +01:00
|
|
|
// -> new size of movie atom and user data atom
|
2019-03-13 19:06:42 +01:00
|
|
|
std::uint64_t movieAtomSize, userDataAtomSize;
|
2015-12-21 00:04:56 +01:00
|
|
|
// -> track count of original file
|
|
|
|
const auto trackCount = this->trackCount();
|
|
|
|
|
|
|
|
// find relevant atoms in original file
|
2018-03-07 01:17:50 +01:00
|
|
|
Mp4Atom *fileTypeAtom, *progressiveDownloadInfoAtom, *movieAtom, *firstMediaDataAtom, *firstMovieFragmentAtom /*, *userDataAtom*/;
|
2015-12-27 18:05:50 +01:00
|
|
|
Mp4Atom *level0Atom, *level1Atom, *level2Atom, *lastAtomToBeWritten;
|
2015-04-22 19:22:01 +02:00
|
|
|
try {
|
2015-12-21 00:04:56 +01:00
|
|
|
// file type atom (mandatory)
|
2018-03-07 01:17:50 +01:00
|
|
|
if ((fileTypeAtom = firstElement()->siblingByIdIncludingThis(Mp4AtomIds::FileType, diag))) {
|
2015-12-21 00:04:56 +01:00
|
|
|
// buffer atom
|
|
|
|
fileTypeAtom->makeBuffer();
|
|
|
|
} else {
|
|
|
|
// throw error if missing
|
2018-03-05 17:49:29 +01:00
|
|
|
diag.emplace_back(DiagLevel::Critical, "Mandatory \"ftyp\"-atom not found.", context);
|
2015-12-21 00:04:56 +01:00
|
|
|
throw InvalidDataException();
|
|
|
|
}
|
|
|
|
|
|
|
|
// progressive download information atom (not mandatory)
|
2018-03-07 01:17:50 +01:00
|
|
|
if ((progressiveDownloadInfoAtom = firstElement()->siblingByIdIncludingThis(Mp4AtomIds::ProgressiveDownloadInformation, diag))) {
|
2015-12-21 00:04:56 +01:00
|
|
|
// buffer atom
|
|
|
|
progressiveDownloadInfoAtom->makeBuffer();
|
|
|
|
}
|
|
|
|
|
2015-12-27 18:05:50 +01:00
|
|
|
// movie atom (mandatory)
|
2018-03-07 01:17:50 +01:00
|
|
|
if (!(movieAtom = firstElement()->siblingByIdIncludingThis(Mp4AtomIds::Movie, diag))) {
|
2015-12-21 00:04:56 +01:00
|
|
|
// throw error if missing
|
2018-03-05 17:49:29 +01:00
|
|
|
diag.emplace_back(DiagLevel::Critical, "Mandatory \"moov\"-atom not in the source file found.", context);
|
2015-12-21 00:04:56 +01:00
|
|
|
throw InvalidDataException();
|
|
|
|
}
|
2015-12-27 18:05:50 +01:00
|
|
|
|
|
|
|
// movie fragment atom (indicates dash file)
|
2018-03-07 01:17:50 +01:00
|
|
|
if ((firstMovieFragmentAtom = firstElement()->siblingById(Mp4AtomIds::MovieFragment, diag))) {
|
2015-12-27 18:05:50 +01:00
|
|
|
// there is at least one movie fragment atom -> consider file being dash
|
|
|
|
// -> can not write chunk-by-chunk (currently)
|
2018-03-07 01:17:50 +01:00
|
|
|
if (writeChunkByChunk) {
|
2018-03-05 17:49:29 +01:00
|
|
|
diag.emplace_back(DiagLevel::Critical, "Writing chunk-by-chunk is not implemented for DASH files.", context);
|
2015-12-27 18:05:50 +01:00
|
|
|
throw NotImplementedException();
|
|
|
|
}
|
|
|
|
// -> tags must be placed at the beginning
|
|
|
|
newTagPos = ElementPosition::BeforeData;
|
|
|
|
}
|
|
|
|
|
|
|
|
// media data atom (mandatory?)
|
|
|
|
// -> consider not only mdat as media data atom; consider everything not handled otherwise as media data
|
2018-03-07 01:17:50 +01:00
|
|
|
for (firstMediaDataAtom = nullptr, level0Atom = firstElement(); level0Atom; level0Atom = level0Atom->nextSibling()) {
|
2018-03-05 17:49:29 +01:00
|
|
|
level0Atom->parse(diag);
|
2018-03-07 01:17:50 +01:00
|
|
|
switch (level0Atom->id()) {
|
|
|
|
case Mp4AtomIds::FileType:
|
|
|
|
case Mp4AtomIds::ProgressiveDownloadInformation:
|
|
|
|
case Mp4AtomIds::Movie:
|
|
|
|
case Mp4AtomIds::Free:
|
|
|
|
case Mp4AtomIds::Skip:
|
2015-12-21 00:04:56 +01:00
|
|
|
continue;
|
2015-12-27 18:05:50 +01:00
|
|
|
default:
|
|
|
|
firstMediaDataAtom = level0Atom;
|
2015-04-22 19:22:01 +02:00
|
|
|
}
|
2015-12-21 00:04:56 +01:00
|
|
|
break;
|
|
|
|
}
|
|
|
|
|
2015-12-27 18:05:50 +01:00
|
|
|
// determine current tag position
|
|
|
|
// -> since tags are nested in the movie atom its position is relevant here
|
2018-03-07 01:17:50 +01:00
|
|
|
if (firstMediaDataAtom) {
|
|
|
|
currentTagPos = firstMediaDataAtom->startOffset() < movieAtom->startOffset() ? ElementPosition::AfterData : ElementPosition::BeforeData;
|
|
|
|
if (newTagPos == ElementPosition::Keep) {
|
2015-12-21 00:04:56 +01:00
|
|
|
newTagPos = currentTagPos;
|
2015-04-22 19:22:01 +02:00
|
|
|
}
|
2015-12-21 00:04:56 +01:00
|
|
|
} else {
|
|
|
|
currentTagPos = ElementPosition::Keep;
|
2015-04-22 19:22:01 +02:00
|
|
|
}
|
2015-12-21 00:04:56 +01:00
|
|
|
|
2016-11-19 23:14:38 +01:00
|
|
|
// ensure index and tags are always placed at the beginning when dealing with DASH files
|
2018-03-07 01:17:50 +01:00
|
|
|
if (firstMovieFragmentAtom) {
|
|
|
|
if (initialNewTagPos == ElementPosition::AfterData) {
|
|
|
|
diag.emplace_back(
|
|
|
|
DiagLevel::Warning, "Sorry, but putting index/tags at the end is not possible when dealing with DASH files.", context);
|
2016-11-19 23:14:38 +01:00
|
|
|
}
|
|
|
|
initialNewTagPos = newTagPos = ElementPosition::BeforeData;
|
|
|
|
}
|
|
|
|
|
2017-06-10 21:46:25 +02:00
|
|
|
// user data atom (currently not used)
|
|
|
|
//userDataAtom = movieAtom->childById(Mp4AtomIds::UserData);
|
2015-12-21 00:04:56 +01:00
|
|
|
|
2015-12-27 18:05:50 +01:00
|
|
|
} catch (const NotImplementedException &) {
|
|
|
|
throw;
|
|
|
|
|
2015-12-21 00:04:56 +01:00
|
|
|
} catch (const Failure &) {
|
|
|
|
// can't ignore parsing errors here
|
2018-03-05 17:49:29 +01:00
|
|
|
diag.emplace_back(DiagLevel::Critical, "Unable to parse the overall atom structure of the source file.", context);
|
2015-12-21 00:04:56 +01:00
|
|
|
throw InvalidDataException();
|
|
|
|
}
|
|
|
|
|
2018-03-05 17:49:29 +01:00
|
|
|
progress.stopIfAborted();
|
2015-12-21 00:04:56 +01:00
|
|
|
|
|
|
|
// calculate sizes
|
|
|
|
// -> size of tags
|
|
|
|
vector<Mp4TagMaker> tagMaker;
|
2019-03-13 19:06:42 +01:00
|
|
|
std::uint64_t tagsSize = 0;
|
2015-12-21 00:04:56 +01:00
|
|
|
tagMaker.reserve(m_tags.size());
|
2018-03-07 01:17:50 +01:00
|
|
|
for (auto &tag : m_tags) {
|
2015-12-21 00:04:56 +01:00
|
|
|
try {
|
2018-03-05 17:49:29 +01:00
|
|
|
tagMaker.emplace_back(tag->prepareMaking(diag));
|
2015-12-21 00:04:56 +01:00
|
|
|
tagsSize += tagMaker.back().requiredSize();
|
2018-03-07 01:17:50 +01:00
|
|
|
} catch (const Failure &) {
|
2015-04-22 19:22:01 +02:00
|
|
|
}
|
2015-12-21 00:04:56 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
// -> size of movie atom (contains track and tag information)
|
|
|
|
movieAtomSize = userDataAtomSize = 0;
|
|
|
|
try {
|
|
|
|
// add size of children
|
2018-03-07 01:17:50 +01:00
|
|
|
for (level0Atom = movieAtom; level0Atom; level0Atom = level0Atom->siblingById(Mp4AtomIds::Movie, diag)) {
|
|
|
|
for (level1Atom = level0Atom->firstChild(); level1Atom; level1Atom = level1Atom->nextSibling()) {
|
2018-03-05 17:49:29 +01:00
|
|
|
level1Atom->parse(diag);
|
2018-03-07 01:17:50 +01:00
|
|
|
switch (level1Atom->id()) {
|
2015-12-21 00:04:56 +01:00
|
|
|
case Mp4AtomIds::UserData:
|
2015-11-07 15:23:36 +01:00
|
|
|
try {
|
2018-03-07 01:17:50 +01:00
|
|
|
for (level2Atom = level1Atom->firstChild(); level2Atom; level2Atom = level2Atom->nextSibling()) {
|
2018-03-05 17:49:29 +01:00
|
|
|
level2Atom->parse(diag);
|
2018-03-07 01:17:50 +01:00
|
|
|
switch (level2Atom->id()) {
|
2015-12-21 00:04:56 +01:00
|
|
|
case Mp4AtomIds::Meta:
|
|
|
|
// ignore meta data here; it is added separately
|
|
|
|
break;
|
|
|
|
default:
|
2019-12-30 22:54:11 +01:00
|
|
|
// add size of unknown children of the user data atom
|
2015-12-21 00:04:56 +01:00
|
|
|
userDataAtomSize += level2Atom->totalSize();
|
|
|
|
level2Atom->makeBuffer();
|
2015-04-22 19:22:01 +02:00
|
|
|
}
|
|
|
|
}
|
2018-03-07 01:17:50 +01:00
|
|
|
} catch (const Failure &) {
|
2015-12-21 00:04:56 +01:00
|
|
|
// invalid children might be ignored as not mandatory
|
2018-03-07 01:17:50 +01:00
|
|
|
diag.emplace_back(
|
|
|
|
DiagLevel::Critical, "Unable to parse the children of \"udta\"-atom of the source file; ignoring them.", context);
|
2015-04-22 19:22:01 +02:00
|
|
|
}
|
2015-12-21 00:04:56 +01:00
|
|
|
break;
|
|
|
|
case Mp4AtomIds::Track:
|
2017-06-17 00:31:35 +02:00
|
|
|
// ignore track atoms here; they are added separately
|
2015-12-21 00:04:56 +01:00
|
|
|
break;
|
|
|
|
default:
|
2019-12-30 22:54:11 +01:00
|
|
|
// add size of unknown children of the movie atom
|
2015-12-21 00:04:56 +01:00
|
|
|
movieAtomSize += level1Atom->totalSize();
|
|
|
|
level1Atom->makeBuffer();
|
2015-04-22 19:22:01 +02:00
|
|
|
}
|
2015-12-21 00:04:56 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// add size of meta data
|
2018-03-07 01:17:50 +01:00
|
|
|
if (userDataAtomSize += tagsSize) {
|
2015-12-21 00:04:56 +01:00
|
|
|
Mp4Atom::addHeaderSize(userDataAtomSize);
|
|
|
|
movieAtomSize += userDataAtomSize;
|
|
|
|
}
|
|
|
|
|
2017-06-17 00:31:35 +02:00
|
|
|
// add size of track atoms
|
2018-03-07 01:17:50 +01:00
|
|
|
for (const auto &track : tracks()) {
|
2018-03-05 17:49:29 +01:00
|
|
|
movieAtomSize += track->requiredSize(diag);
|
2015-12-21 00:04:56 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
// add header size
|
|
|
|
Mp4Atom::addHeaderSize(movieAtomSize);
|
2018-03-07 01:17:50 +01:00
|
|
|
} catch (const Failure &) {
|
2015-12-21 00:04:56 +01:00
|
|
|
// can't ignore parsing errors here
|
2018-03-05 17:49:29 +01:00
|
|
|
diag.emplace_back(DiagLevel::Critical, "Unable to parse the children of \"moov\"-atom of the source file.", context);
|
2015-12-21 00:04:56 +01:00
|
|
|
throw InvalidDataException();
|
|
|
|
}
|
|
|
|
|
2018-03-05 17:49:29 +01:00
|
|
|
progress.stopIfAborted();
|
2015-12-21 00:04:56 +01:00
|
|
|
|
2015-12-27 18:05:50 +01:00
|
|
|
// check whether there are atoms to be voided after movie next sibling (only relevant when not rewriting)
|
2018-03-07 01:17:50 +01:00
|
|
|
if (!rewriteRequired) {
|
2015-12-27 18:05:50 +01:00
|
|
|
newPaddingEnd = 0;
|
2019-03-13 19:06:42 +01:00
|
|
|
std::uint64_t currentSum = 0;
|
2018-03-07 01:17:50 +01:00
|
|
|
for (Mp4Atom *level0Atom = firstMediaDataAtom; level0Atom; level0Atom = level0Atom->nextSibling()) {
|
2018-03-05 17:49:29 +01:00
|
|
|
level0Atom->parse(diag);
|
2018-03-07 01:17:50 +01:00
|
|
|
switch (level0Atom->id()) {
|
|
|
|
case Mp4AtomIds::FileType:
|
|
|
|
case Mp4AtomIds::ProgressiveDownloadInformation:
|
|
|
|
case Mp4AtomIds::Movie:
|
|
|
|
case Mp4AtomIds::Free:
|
|
|
|
case Mp4AtomIds::Skip:
|
2015-12-27 18:05:50 +01:00
|
|
|
// must void these if they occur "between" the media data
|
|
|
|
currentSum += level0Atom->totalSize();
|
|
|
|
break;
|
|
|
|
default:
|
|
|
|
newPaddingEnd += currentSum;
|
|
|
|
currentSum = 0;
|
|
|
|
lastAtomToBeWritten = level0Atom;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2015-12-21 00:04:56 +01:00
|
|
|
// calculate padding if no rewrite is required; otherwise use the preferred padding
|
|
|
|
calculatePadding:
|
2018-03-07 01:17:50 +01:00
|
|
|
if (rewriteRequired) {
|
2015-12-21 00:04:56 +01:00
|
|
|
newPadding = (fileInfo().preferredPadding() && fileInfo().preferredPadding() < 8 ? 8 : fileInfo().preferredPadding());
|
|
|
|
} else {
|
|
|
|
// file type atom
|
|
|
|
currentOffset = fileTypeAtom->totalSize();
|
|
|
|
|
|
|
|
// progressive download information atom
|
2018-03-07 01:17:50 +01:00
|
|
|
if (progressiveDownloadInfoAtom) {
|
2015-12-21 00:04:56 +01:00
|
|
|
currentOffset += progressiveDownloadInfoAtom->totalSize();
|
|
|
|
}
|
|
|
|
|
|
|
|
// if writing tags before data: movie atom (contains tag)
|
2018-03-07 01:17:50 +01:00
|
|
|
switch (newTagPos) {
|
2015-12-21 00:04:56 +01:00
|
|
|
case ElementPosition::BeforeData:
|
|
|
|
case ElementPosition::Keep:
|
|
|
|
currentOffset += movieAtomSize;
|
|
|
|
break;
|
2018-03-07 01:17:50 +01:00
|
|
|
default:;
|
2015-12-21 00:04:56 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
// check whether there is sufficiant space before the next atom
|
2018-03-07 01:17:50 +01:00
|
|
|
if (!(rewriteRequired = firstMediaDataAtom && currentOffset > firstMediaDataAtom->startOffset())) {
|
2015-12-21 00:04:56 +01:00
|
|
|
// there is sufficiant space
|
|
|
|
// -> check whether the padding matches specifications
|
2015-12-27 18:05:50 +01:00
|
|
|
// min padding: says "at least ... byte should be reserved to prepend further tag info", so the padding at the end
|
|
|
|
// shouldn't be tanken into account (it can't be used to prepend further tag info)
|
2017-03-01 18:21:00 +01:00
|
|
|
// max padding: says "do not waste more than ... byte", so here all padding should be taken into account
|
2015-12-27 18:05:50 +01:00
|
|
|
newPadding = firstMediaDataAtom->startOffset() - currentOffset;
|
2018-03-07 01:17:50 +01:00
|
|
|
rewriteRequired = (newPadding > 0 && newPadding < 8) || newPadding < fileInfo().minPadding()
|
|
|
|
|| (newPadding + newPaddingEnd) > fileInfo().maxPadding();
|
2015-12-21 00:04:56 +01:00
|
|
|
}
|
2018-03-07 01:17:50 +01:00
|
|
|
if (rewriteRequired) {
|
2015-12-21 00:04:56 +01:00
|
|
|
// can't put the tags before media data
|
2018-03-07 01:17:50 +01:00
|
|
|
if (!firstMovieFragmentAtom && !fileInfo().forceTagPosition() && !fileInfo().forceIndexPosition()
|
|
|
|
&& newTagPos != ElementPosition::AfterData) {
|
2016-01-16 16:47:18 +01:00
|
|
|
// writing tag before media data is not forced, its not a DASH file and tags aren't already at the end
|
|
|
|
// -> try to put the tags at the end
|
2015-12-21 00:04:56 +01:00
|
|
|
newTagPos = ElementPosition::AfterData;
|
|
|
|
rewriteRequired = false;
|
|
|
|
} else {
|
|
|
|
// writing tag before media data is forced -> rewrite the file
|
2016-01-16 16:47:18 +01:00
|
|
|
// when rewriting anyways, ensure the preferred tag position is used
|
2016-11-19 21:25:18 +01:00
|
|
|
newTagPos = initialNewTagPos == ElementPosition::Keep ? currentTagPos : initialNewTagPos;
|
2015-12-21 00:04:56 +01:00
|
|
|
}
|
|
|
|
// in any case: recalculate padding
|
|
|
|
goto calculatePadding;
|
|
|
|
} else {
|
|
|
|
// tags can be put before the media data
|
|
|
|
// -> ensure newTagPos is not ElementPosition::Keep
|
2018-03-07 01:17:50 +01:00
|
|
|
if (newTagPos == ElementPosition::Keep) {
|
2015-12-21 00:04:56 +01:00
|
|
|
newTagPos = ElementPosition::BeforeData;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2015-12-22 23:54:35 +01:00
|
|
|
// setup stream(s) for writing
|
|
|
|
// -> update status
|
2018-03-05 17:49:29 +01:00
|
|
|
progress.nextStepOrStop("Preparing streams ...");
|
2015-12-22 23:54:35 +01:00
|
|
|
|
|
|
|
// -> define variables needed to handle output stream and backup stream (required when rewriting the file)
|
2015-12-21 00:04:56 +01:00
|
|
|
string backupPath;
|
2016-12-18 20:17:50 +01:00
|
|
|
NativeFileStream &outputStream = fileInfo().stream();
|
|
|
|
NativeFileStream backupStream; // create a stream to open the backup/original file for the case rewriting the file is required
|
2015-12-21 00:04:56 +01:00
|
|
|
BinaryWriter outputWriter(&outputStream);
|
|
|
|
|
2018-03-07 01:17:50 +01:00
|
|
|
if (rewriteRequired) {
|
|
|
|
if (fileInfo().saveFilePath().empty()) {
|
2016-05-01 20:02:44 +02:00
|
|
|
// move current file to temp dir and reopen it as backupStream, recreate original file
|
|
|
|
try {
|
2018-07-10 16:34:57 +02:00
|
|
|
BackupHelper::createBackupFile(fileInfo().backupDirectory(), fileInfo().path(), backupPath, outputStream, backupStream);
|
2016-05-01 20:02:44 +02:00
|
|
|
// recreate original file, define buffer variables
|
2021-01-30 21:53:06 +01:00
|
|
|
outputStream.open(BasicFileInfo::pathForOpen(fileInfo().path()).data(), ios_base::out | ios_base::binary | ios_base::trunc);
|
2019-03-13 19:06:42 +01:00
|
|
|
} catch (const std::ios_base::failure &failure) {
|
|
|
|
diag.emplace_back(
|
|
|
|
DiagLevel::Critical, argsToString("Creation of temporary file (to rewrite the original file) failed: ", failure.what()), context);
|
|
|
|
throw;
|
2016-05-01 20:02:44 +02:00
|
|
|
}
|
|
|
|
} else {
|
|
|
|
// open the current file as backupStream and create a new outputStream at the specified "save file path"
|
|
|
|
try {
|
|
|
|
backupStream.exceptions(ios_base::badbit | ios_base::failbit);
|
2021-01-30 21:53:06 +01:00
|
|
|
backupStream.open(BasicFileInfo::pathForOpen(fileInfo().path()).data(), ios_base::in | ios_base::binary);
|
2016-05-01 20:02:44 +02:00
|
|
|
fileInfo().close();
|
2021-01-30 21:53:06 +01:00
|
|
|
outputStream.open(BasicFileInfo::pathForOpen(fileInfo().saveFilePath()).data(), ios_base::out | ios_base::binary | ios_base::trunc);
|
2019-03-13 19:06:42 +01:00
|
|
|
} catch (const std::ios_base::failure &failure) {
|
|
|
|
diag.emplace_back(DiagLevel::Critical, argsToString("Opening streams to write output file failed: ", failure.what()), context);
|
|
|
|
throw;
|
2016-05-01 20:02:44 +02:00
|
|
|
}
|
2015-12-21 00:04:56 +01:00
|
|
|
}
|
|
|
|
|
2016-05-01 20:02:44 +02:00
|
|
|
// set backup stream as associated input stream since we need the original elements to write the new file
|
|
|
|
setStream(backupStream);
|
|
|
|
|
|
|
|
// TODO: reduce code duplication
|
|
|
|
|
2015-12-21 00:04:56 +01:00
|
|
|
} else { // !rewriteRequired
|
2017-06-17 00:31:35 +02:00
|
|
|
// ensure everything to make track atoms is buffered before altering the source file
|
2018-03-07 01:17:50 +01:00
|
|
|
for (const auto &track : tracks()) {
|
2018-03-05 17:49:29 +01:00
|
|
|
track->bufferTrackAtoms(diag);
|
2017-06-17 00:31:35 +02:00
|
|
|
}
|
|
|
|
|
2015-12-21 00:04:56 +01:00
|
|
|
// reopen original file to ensure it is opened for writing
|
|
|
|
try {
|
|
|
|
fileInfo().close();
|
|
|
|
outputStream.open(fileInfo().path(), ios_base::in | ios_base::out | ios_base::binary);
|
2019-03-13 19:06:42 +01:00
|
|
|
} catch (const std::ios_base::failure &failure) {
|
|
|
|
diag.emplace_back(DiagLevel::Critical, argsToString("Opening the file with write permissions failed: ", failure.what()), context);
|
|
|
|
throw;
|
2015-12-21 00:04:56 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// start actual writing
|
|
|
|
try {
|
|
|
|
// write header
|
2018-03-05 17:49:29 +01:00
|
|
|
progress.nextStepOrStop("Writing header and tags ...");
|
2015-12-21 00:04:56 +01:00
|
|
|
// -> make file type atom
|
|
|
|
fileTypeAtom->copyBuffer(outputStream);
|
|
|
|
fileTypeAtom->discardBuffer();
|
|
|
|
// -> make progressive download info atom
|
2018-03-07 01:17:50 +01:00
|
|
|
if (progressiveDownloadInfoAtom) {
|
2015-12-21 00:04:56 +01:00
|
|
|
progressiveDownloadInfoAtom->copyBuffer(outputStream);
|
|
|
|
progressiveDownloadInfoAtom->discardBuffer();
|
|
|
|
}
|
|
|
|
|
2017-05-28 21:01:16 +02:00
|
|
|
// set input/output streams of each track
|
2018-03-07 01:17:50 +01:00
|
|
|
for (auto &track : tracks()) {
|
2017-05-28 21:01:16 +02:00
|
|
|
// ensure the track reads from the original file
|
2018-03-07 01:17:50 +01:00
|
|
|
if (&track->inputStream() == &outputStream) {
|
2017-05-28 21:01:16 +02:00
|
|
|
track->setInputStream(backupStream);
|
|
|
|
}
|
|
|
|
// ensure the track writes to the output file
|
|
|
|
track->setOutputStream(outputStream);
|
|
|
|
}
|
|
|
|
|
2015-12-21 00:04:56 +01:00
|
|
|
// write movie atom / padding and media data
|
2019-03-13 19:06:42 +01:00
|
|
|
for (std::uint8_t pass = 0; pass != 2; ++pass) {
|
2018-03-07 01:17:50 +01:00
|
|
|
if (newTagPos == (pass ? ElementPosition::AfterData : ElementPosition::BeforeData)) {
|
2019-04-16 21:48:33 +02:00
|
|
|
// define function to write tracks
|
|
|
|
bool tracksWritten = false;
|
|
|
|
const auto writeTracks = [&] {
|
|
|
|
if (tracksWritten) {
|
|
|
|
return;
|
|
|
|
}
|
2015-12-21 00:04:56 +01:00
|
|
|
|
2019-04-16 21:48:33 +02:00
|
|
|
for (auto &track : tracks()) {
|
|
|
|
track->makeTrack(diag);
|
|
|
|
}
|
|
|
|
tracksWritten = true;
|
|
|
|
};
|
|
|
|
|
|
|
|
// define function to write user data
|
|
|
|
bool userDataWritten = false;
|
|
|
|
const auto writeUserData = [&] {
|
|
|
|
if (userDataWritten || !userDataAtomSize) {
|
|
|
|
return;
|
2015-11-07 15:23:36 +01:00
|
|
|
}
|
2015-12-21 00:04:56 +01:00
|
|
|
|
|
|
|
// writer user data atom header
|
|
|
|
Mp4Atom::makeHeader(userDataAtomSize, Mp4AtomIds::UserData, outputWriter);
|
|
|
|
|
2019-04-16 21:48:33 +02:00
|
|
|
// write children of user data atom
|
|
|
|
bool metaAtomWritten = false;
|
|
|
|
for (Mp4Atom *level0Atom = movieAtom; level0Atom; level0Atom = level0Atom->siblingById(Mp4AtomIds::Movie, diag)) {
|
|
|
|
for (Mp4Atom *level1Atom = level0Atom->childById(Mp4AtomIds::UserData, diag); level1Atom;
|
2018-03-07 01:17:50 +01:00
|
|
|
level1Atom = level1Atom->siblingById(Mp4AtomIds::UserData, diag)) {
|
2019-04-16 21:48:33 +02:00
|
|
|
for (Mp4Atom *level2Atom = level1Atom->firstChild(); level2Atom; level2Atom = level2Atom->nextSibling()) {
|
2018-03-07 01:17:50 +01:00
|
|
|
switch (level2Atom->id()) {
|
2015-12-21 00:04:56 +01:00
|
|
|
case Mp4AtomIds::Meta:
|
2019-04-16 21:48:33 +02:00
|
|
|
// write meta atom
|
|
|
|
for (auto &maker : tagMaker) {
|
|
|
|
maker.make(outputStream, diag);
|
|
|
|
}
|
|
|
|
metaAtomWritten = true;
|
2015-12-21 00:04:56 +01:00
|
|
|
break;
|
|
|
|
default:
|
|
|
|
// write buffered data
|
|
|
|
level2Atom->copyBuffer(outputStream);
|
|
|
|
level2Atom->discardBuffer();
|
|
|
|
}
|
|
|
|
}
|
2015-11-07 15:23:36 +01:00
|
|
|
}
|
|
|
|
}
|
2015-12-21 00:04:56 +01:00
|
|
|
|
2019-04-16 21:48:33 +02:00
|
|
|
// write meta atom if not already written
|
|
|
|
if (!metaAtomWritten) {
|
|
|
|
for (auto &maker : tagMaker) {
|
|
|
|
maker.make(outputStream, diag);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
userDataWritten = true;
|
|
|
|
};
|
|
|
|
|
|
|
|
// write movie atom
|
|
|
|
// -> write movie atom header
|
|
|
|
Mp4Atom::makeHeader(movieAtomSize, Mp4AtomIds::Movie, outputWriter);
|
|
|
|
|
|
|
|
// -> write children of movie atom preserving the original order
|
|
|
|
for (level0Atom = movieAtom; level0Atom; level0Atom = level0Atom->siblingById(Mp4AtomIds::Movie, diag)) {
|
|
|
|
for (level1Atom = level0Atom->firstChild(); level1Atom; level1Atom = level1Atom->nextSibling()) {
|
|
|
|
switch (level1Atom->id()) {
|
|
|
|
case Mp4AtomIds::Track:
|
|
|
|
writeTracks();
|
|
|
|
break;
|
|
|
|
case Mp4AtomIds::UserData:
|
|
|
|
writeUserData();
|
|
|
|
break;
|
|
|
|
default:
|
|
|
|
// write buffered data
|
|
|
|
level1Atom->copyBuffer(outputStream);
|
|
|
|
level1Atom->discardBuffer();
|
|
|
|
}
|
2015-12-21 00:04:56 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-04-16 21:48:33 +02:00
|
|
|
// -> write tracks and user data atoms if not already happened within the loop
|
|
|
|
writeTracks();
|
|
|
|
writeUserData();
|
|
|
|
|
2015-12-21 00:04:56 +01:00
|
|
|
} else {
|
|
|
|
// write padding
|
2018-03-07 01:17:50 +01:00
|
|
|
if (newPadding) {
|
2015-12-21 00:04:56 +01:00
|
|
|
// write free atom header
|
2019-03-13 19:06:42 +01:00
|
|
|
if (newPadding < numeric_limits<std::uint32_t>::max()) {
|
|
|
|
outputWriter.writeUInt32BE(static_cast<std::uint32_t>(newPadding));
|
2015-12-21 00:04:56 +01:00
|
|
|
outputWriter.writeUInt32BE(Mp4AtomIds::Free);
|
|
|
|
newPadding -= 8;
|
|
|
|
} else {
|
|
|
|
outputWriter.writeUInt32BE(1);
|
|
|
|
outputWriter.writeUInt32BE(Mp4AtomIds::Free);
|
|
|
|
outputWriter.writeUInt64BE(newPadding);
|
|
|
|
newPadding -= 16;
|
|
|
|
}
|
|
|
|
|
|
|
|
// write zeroes
|
2018-03-07 01:17:50 +01:00
|
|
|
for (; newPadding; --newPadding) {
|
2015-12-21 00:04:56 +01:00
|
|
|
outputStream.put(0);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// write media data
|
2018-03-07 01:17:50 +01:00
|
|
|
if (rewriteRequired) {
|
|
|
|
for (level0Atom = firstMediaDataAtom; level0Atom; level0Atom = level0Atom->nextSibling()) {
|
2018-03-05 17:49:29 +01:00
|
|
|
level0Atom->parse(diag);
|
2018-03-07 01:17:50 +01:00
|
|
|
switch (level0Atom->id()) {
|
|
|
|
case Mp4AtomIds::FileType:
|
|
|
|
case Mp4AtomIds::ProgressiveDownloadInformation:
|
|
|
|
case Mp4AtomIds::Movie:
|
|
|
|
case Mp4AtomIds::Free:
|
|
|
|
case Mp4AtomIds::Skip:
|
2015-12-21 00:04:56 +01:00
|
|
|
break;
|
|
|
|
case Mp4AtomIds::MediaData:
|
2018-03-07 01:17:50 +01:00
|
|
|
if (writeChunkByChunk) {
|
2015-12-21 00:04:56 +01:00
|
|
|
// write actual data separately when writing chunk-by-chunk
|
|
|
|
break;
|
|
|
|
} else {
|
|
|
|
// store media data offsets when not writing chunk-by-chunk to be able to update chunk offset table
|
2019-03-13 19:06:42 +01:00
|
|
|
origMediaDataOffsets.push_back(static_cast<std::int64_t>(level0Atom->startOffset()));
|
2015-12-21 00:04:56 +01:00
|
|
|
newMediaDataOffsets.push_back(outputStream.tellp());
|
|
|
|
}
|
2019-06-12 20:40:45 +02:00
|
|
|
[[fallthrough]];
|
2015-12-21 00:04:56 +01:00
|
|
|
default:
|
|
|
|
// update status
|
2018-03-05 17:49:29 +01:00
|
|
|
progress.updateStep("Writing atom: " + level0Atom->idToString());
|
2015-12-21 00:04:56 +01:00
|
|
|
// copy atom entirely and forward status update calls
|
2018-03-05 17:49:29 +01:00
|
|
|
level0Atom->copyEntirely(outputStream, diag, &progress);
|
2015-11-07 15:23:36 +01:00
|
|
|
}
|
2015-12-21 00:04:56 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
// when writing chunk-by-chunk write media data now
|
2018-03-07 01:17:50 +01:00
|
|
|
if (writeChunkByChunk) {
|
2015-12-21 00:04:56 +01:00
|
|
|
// read chunk offset and chunk size table from the old file which are required to get chunks
|
2018-03-05 17:49:29 +01:00
|
|
|
progress.updateStep("Reading chunk offsets and sizes from the original file ...");
|
2015-12-21 00:04:56 +01:00
|
|
|
trackInfos.reserve(trackCount);
|
2019-03-13 19:06:42 +01:00
|
|
|
std::uint64_t totalChunkCount = 0;
|
|
|
|
std::uint64_t totalMediaDataSize = 0;
|
2018-03-07 01:17:50 +01:00
|
|
|
for (auto &track : tracks()) {
|
2018-03-05 17:49:29 +01:00
|
|
|
progress.stopIfAborted();
|
2015-12-21 00:04:56 +01:00
|
|
|
|
|
|
|
// emplace information
|
2018-03-07 01:17:50 +01:00
|
|
|
trackInfos.emplace_back(
|
|
|
|
&track->inputStream(), track->readChunkOffsets(fileInfo().isForcingFullParse(), diag), track->readChunkSizes(diag));
|
2015-12-21 00:04:56 +01:00
|
|
|
|
|
|
|
// check whether the chunks could be parsed correctly
|
2019-03-13 19:06:42 +01:00
|
|
|
const vector<std::uint64_t> &chunkOffsetTable = get<1>(trackInfos.back());
|
|
|
|
const vector<std::uint64_t> &chunkSizesTable = get<2>(trackInfos.back());
|
2018-03-07 01:17:50 +01:00
|
|
|
if (track->chunkCount() != chunkOffsetTable.size() || track->chunkCount() != chunkSizesTable.size()) {
|
|
|
|
diag.emplace_back(DiagLevel::Critical,
|
2019-03-13 19:06:42 +01:00
|
|
|
"Chunks of track " % numberToString<std::uint64_t, string>(track->id()) + " could not be parsed correctly.",
|
|
|
|
context);
|
2015-12-21 00:04:56 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
// increase total chunk count and size
|
|
|
|
totalChunkCount += track->chunkCount();
|
2017-09-14 18:16:40 +02:00
|
|
|
totalMediaDataSize += accumulate(chunkSizesTable.cbegin(), chunkSizesTable.cend(), 0ul);
|
2015-11-07 15:23:36 +01:00
|
|
|
}
|
2015-12-21 00:04:56 +01:00
|
|
|
|
|
|
|
// write media data chunk-by-chunk
|
|
|
|
// -> write header of media data atom
|
|
|
|
Mp4Atom::addHeaderSize(totalMediaDataSize);
|
|
|
|
Mp4Atom::makeHeader(totalMediaDataSize, Mp4AtomIds::MediaData, outputWriter);
|
|
|
|
|
|
|
|
// -> copy chunks
|
|
|
|
CopyHelper<0x2000> copyHelper;
|
2019-03-13 19:06:42 +01:00
|
|
|
std::uint64_t chunkIndexWithinTrack = 0, totalChunksCopied = 0;
|
2015-12-21 00:04:56 +01:00
|
|
|
bool anyChunksCopied;
|
|
|
|
do {
|
2018-03-05 17:49:29 +01:00
|
|
|
progress.stopIfAborted();
|
2015-12-21 00:04:56 +01:00
|
|
|
|
|
|
|
// copy a chunk from each track
|
|
|
|
anyChunksCopied = false;
|
2018-03-07 01:17:50 +01:00
|
|
|
for (size_t trackIndex = 0; trackIndex < trackCount; ++trackIndex) {
|
2015-12-21 00:04:56 +01:00
|
|
|
// get source stream and tables for current track
|
|
|
|
auto &trackInfo = trackInfos[trackIndex];
|
|
|
|
istream &sourceStream = *get<0>(trackInfo);
|
2019-03-13 19:06:42 +01:00
|
|
|
vector<std::uint64_t> &chunkOffsetTable = get<1>(trackInfo);
|
|
|
|
const vector<std::uint64_t> &chunkSizesTable = get<2>(trackInfo);
|
2015-12-21 00:04:56 +01:00
|
|
|
|
|
|
|
// still chunks to be copied (of this track)?
|
2018-03-07 01:17:50 +01:00
|
|
|
if (chunkIndexWithinTrack < chunkOffsetTable.size() && chunkIndexWithinTrack < chunkSizesTable.size()) {
|
2015-12-21 00:04:56 +01:00
|
|
|
// copy chunk, update entry in chunk offset table
|
2018-06-02 22:56:08 +02:00
|
|
|
sourceStream.seekg(static_cast<streamoff>(chunkOffsetTable[chunkIndexWithinTrack]));
|
2019-03-13 19:06:42 +01:00
|
|
|
chunkOffsetTable[chunkIndexWithinTrack] = static_cast<std::uint64_t>(outputStream.tellp());
|
2015-12-21 00:04:56 +01:00
|
|
|
copyHelper.copy(sourceStream, outputStream, chunkSizesTable[chunkIndexWithinTrack]);
|
|
|
|
|
|
|
|
// update counter / status
|
|
|
|
anyChunksCopied = true;
|
|
|
|
++totalChunksCopied;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// incrase chunk index within track, update progress percentage
|
2018-03-07 01:17:50 +01:00
|
|
|
if (!(++chunkIndexWithinTrack % 10)) {
|
2019-03-13 19:06:42 +01:00
|
|
|
progress.updateStepPercentage(static_cast<std::uint8_t>(totalChunksCopied * 100 / totalChunkCount));
|
2015-12-21 00:04:56 +01:00
|
|
|
}
|
|
|
|
|
2018-03-07 01:17:50 +01:00
|
|
|
} while (anyChunksCopied);
|
2015-12-21 00:04:56 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
} else {
|
2015-12-27 18:05:50 +01:00
|
|
|
// can't just skip next movie sibling
|
2018-03-07 01:17:50 +01:00
|
|
|
for (Mp4Atom *level0Atom = firstMediaDataAtom; level0Atom; level0Atom = level0Atom->nextSibling()) {
|
2018-03-05 17:49:29 +01:00
|
|
|
level0Atom->parse(diag);
|
2018-03-07 01:17:50 +01:00
|
|
|
switch (level0Atom->id()) {
|
|
|
|
case Mp4AtomIds::FileType:
|
|
|
|
case Mp4AtomIds::ProgressiveDownloadInformation:
|
|
|
|
case Mp4AtomIds::Movie:
|
2015-12-27 18:05:50 +01:00
|
|
|
// must void these if they occur "between" the media data
|
|
|
|
outputStream.seekp(4, ios_base::cur);
|
|
|
|
outputWriter.writeUInt32BE(Mp4AtomIds::Free);
|
|
|
|
break;
|
|
|
|
default:
|
2018-02-05 00:59:44 +01:00
|
|
|
outputStream.seekp(static_cast<iostream::off_type>(level0Atom->totalSize()), ios_base::cur);
|
2015-12-27 18:05:50 +01:00
|
|
|
}
|
2018-03-07 01:17:50 +01:00
|
|
|
if (level0Atom == lastAtomToBeWritten) {
|
2015-12-27 18:05:50 +01:00
|
|
|
break;
|
|
|
|
}
|
|
|
|
}
|
2015-04-22 19:22:01 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2015-12-21 00:04:56 +01:00
|
|
|
|
|
|
|
// reparse what is written so far
|
2018-03-05 17:49:29 +01:00
|
|
|
progress.updateStep("Reparsing output file ...");
|
2018-03-07 01:17:50 +01:00
|
|
|
if (rewriteRequired) {
|
2015-12-22 17:00:14 +01:00
|
|
|
// report new size
|
2019-03-13 19:06:42 +01:00
|
|
|
fileInfo().reportSizeChanged(static_cast<std::uint64_t>(outputStream.tellp()));
|
2016-05-01 20:02:44 +02:00
|
|
|
// "save as path" is now the regular path
|
2018-03-07 01:17:50 +01:00
|
|
|
if (!fileInfo().saveFilePath().empty()) {
|
2016-05-01 20:02:44 +02:00
|
|
|
fileInfo().reportPathChanged(fileInfo().saveFilePath());
|
|
|
|
fileInfo().setSaveFilePath(string());
|
|
|
|
}
|
2015-12-22 17:00:14 +01:00
|
|
|
// the outputStream needs to be reopened to be able to read again
|
|
|
|
outputStream.close();
|
2021-01-30 21:53:06 +01:00
|
|
|
outputStream.open(BasicFileInfo::pathForOpen(fileInfo().path()).data(), ios_base::in | ios_base::out | ios_base::binary);
|
2015-12-21 00:04:56 +01:00
|
|
|
setStream(outputStream);
|
|
|
|
} else {
|
2019-03-13 19:06:42 +01:00
|
|
|
const auto newSize = static_cast<std::uint64_t>(outputStream.tellp());
|
2018-03-07 01:17:50 +01:00
|
|
|
if (newSize < fileInfo().size()) {
|
2015-12-22 17:00:14 +01:00
|
|
|
// file is smaller after the modification -> truncate
|
|
|
|
// -> close stream before truncating
|
2015-12-21 00:04:56 +01:00
|
|
|
outputStream.close();
|
2015-12-22 17:00:14 +01:00
|
|
|
// -> truncate file
|
2021-01-30 21:53:06 +01:00
|
|
|
if (truncate(BasicFileInfo::pathForOpen(fileInfo().path()).data(), static_cast<iostream::off_type>(newSize)) == 0) {
|
2015-12-21 00:04:56 +01:00
|
|
|
fileInfo().reportSizeChanged(newSize);
|
|
|
|
} else {
|
2018-03-05 17:49:29 +01:00
|
|
|
diag.emplace_back(DiagLevel::Critical, "Unable to truncate the file.", context);
|
2015-12-21 00:04:56 +01:00
|
|
|
}
|
2015-12-22 17:00:14 +01:00
|
|
|
// -> reopen the stream again
|
2021-01-30 21:53:06 +01:00
|
|
|
outputStream.open(BasicFileInfo::pathForOpen(fileInfo().path()).data(), ios_base::in | ios_base::out | ios_base::binary);
|
2015-12-22 17:00:14 +01:00
|
|
|
} else {
|
|
|
|
// file is longer after the modification -> just report new size
|
|
|
|
fileInfo().reportSizeChanged(newSize);
|
2015-12-21 00:04:56 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
reset();
|
2015-04-22 19:22:01 +02:00
|
|
|
try {
|
2018-03-05 17:49:29 +01:00
|
|
|
parseTracks(diag);
|
2018-03-07 01:17:50 +01:00
|
|
|
} catch (const Failure &) {
|
2018-03-05 17:49:29 +01:00
|
|
|
diag.emplace_back(DiagLevel::Critical, "Unable to reparse the new file.", context);
|
2015-04-22 19:22:01 +02:00
|
|
|
throw;
|
|
|
|
}
|
2015-12-21 00:04:56 +01:00
|
|
|
|
2018-03-07 01:17:50 +01:00
|
|
|
if (rewriteRequired) {
|
2019-12-15 19:44:44 +01:00
|
|
|
// check whether the track count of the new file equals the track count of old file
|
2018-03-07 01:17:50 +01:00
|
|
|
if (trackCount != tracks().size()) {
|
2018-03-05 17:49:29 +01:00
|
|
|
diag.emplace_back(DiagLevel::Critical,
|
2019-12-15 19:44:44 +01:00
|
|
|
argsToString("Unable to update chunk offsets (\"stco\"/\"co64\"-atom): Number of tracks in the output file (", tracks().size(),
|
2018-03-07 01:17:50 +01:00
|
|
|
") differs from the number of tracks in the original file (", trackCount, ")."),
|
|
|
|
context);
|
2015-12-21 00:04:56 +01:00
|
|
|
throw Failure();
|
|
|
|
}
|
|
|
|
|
|
|
|
// update chunk offset table
|
2018-03-07 01:17:50 +01:00
|
|
|
if (writeChunkByChunk) {
|
2018-03-05 17:49:29 +01:00
|
|
|
progress.updateStep("Updating chunk offset table for each track ...");
|
2018-03-07 01:17:50 +01:00
|
|
|
for (size_t trackIndex = 0; trackIndex != trackCount; ++trackIndex) {
|
2015-12-21 00:04:56 +01:00
|
|
|
const auto &track = tracks()[trackIndex];
|
|
|
|
const auto &chunkOffsetTable = get<1>(trackInfos[trackIndex]);
|
2018-03-07 01:17:50 +01:00
|
|
|
if (track->chunkCount() == chunkOffsetTable.size()) {
|
2015-12-21 00:04:56 +01:00
|
|
|
track->updateChunkOffsets(chunkOffsetTable);
|
|
|
|
} else {
|
2018-03-07 01:17:50 +01:00
|
|
|
diag.emplace_back(DiagLevel::Critical,
|
|
|
|
argsToString("Unable to update chunk offsets of track ", (trackIndex + 1),
|
|
|
|
": Number of chunks in the output file differs from the number of chunks in the orignal file."),
|
|
|
|
context);
|
2015-12-21 00:04:56 +01:00
|
|
|
throw Failure();
|
|
|
|
}
|
2015-04-22 19:22:01 +02:00
|
|
|
}
|
2015-12-21 00:04:56 +01:00
|
|
|
} else {
|
2018-03-05 17:49:29 +01:00
|
|
|
progress.updateStep("Updating chunk offset table for each track ...");
|
|
|
|
updateOffsets(origMediaDataOffsets, newMediaDataOffsets, diag);
|
2015-04-22 19:22:01 +02:00
|
|
|
}
|
|
|
|
}
|
2015-12-21 00:04:56 +01:00
|
|
|
|
2019-12-15 19:43:16 +01:00
|
|
|
// prevent deferring final write operations (to catch and handle possible errors here)
|
2015-04-22 19:22:01 +02:00
|
|
|
outputStream.flush();
|
2015-12-21 00:04:56 +01:00
|
|
|
|
2018-07-23 14:44:06 +02:00
|
|
|
// handle errors (which might have been occurred after renaming/creating backup file)
|
2018-03-07 01:17:50 +01:00
|
|
|
} catch (...) {
|
2018-03-05 17:49:29 +01:00
|
|
|
BackupHelper::handleFailureAfterFileModified(fileInfo(), backupPath, outputStream, backupStream, diag, context);
|
2015-04-22 19:22:01 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
/*!
|
|
|
|
* \brief Update the chunk offsets for each track of the file.
|
|
|
|
* \param oldMdatOffsets Specifies a vector holding the old offsets of the "mdat"-atoms.
|
|
|
|
* \param newMdatOffsets Specifies a vector holding the new offsets of the "mdat"-atoms.
|
|
|
|
*
|
2015-11-07 15:23:36 +01:00
|
|
|
* Uses internally Mp4Track::updateOffsets(). Offsets stored in the "tfhd"-atom are also
|
2015-04-22 19:22:01 +02:00
|
|
|
* updated (this is not tested yet since I don't have files using this atom).
|
|
|
|
*
|
|
|
|
* \throws Throws std::ios_base::failure when an IO error occurs.
|
2018-06-03 20:38:32 +02:00
|
|
|
* \throws Throws TagParser::Failure or a derived exception when a making
|
2015-04-22 19:22:01 +02:00
|
|
|
* error occurs.
|
|
|
|
*/
|
2019-03-13 19:06:42 +01:00
|
|
|
void Mp4Container::updateOffsets(const std::vector<std::int64_t> &oldMdatOffsets, const std::vector<std::int64_t> &newMdatOffsets, Diagnostics &diag)
|
2015-04-22 19:22:01 +02:00
|
|
|
{
|
|
|
|
// do NOT invalidate the status here since this method is internally called by internalMakeFile(), just update the status
|
|
|
|
const string context("updating MP4 container chunk offset table");
|
2018-03-07 01:17:50 +01:00
|
|
|
if (!firstElement()) {
|
2018-03-05 17:49:29 +01:00
|
|
|
diag.emplace_back(DiagLevel::Critical, "No MP4 atoms could be found.", context);
|
2015-04-22 19:22:01 +02:00
|
|
|
throw InvalidDataException();
|
|
|
|
}
|
2015-11-07 15:23:36 +01:00
|
|
|
// update "base-data-offset-present" of "tfhd"-atom (NOT tested properly)
|
2015-04-22 19:22:01 +02:00
|
|
|
try {
|
2018-03-07 01:17:50 +01:00
|
|
|
for (Mp4Atom *moofAtom = firstElement()->siblingById(Mp4AtomIds::MovieFragment, diag); moofAtom;
|
|
|
|
moofAtom = moofAtom->siblingById(Mp4AtomIds::MovieFragment, diag)) {
|
2018-03-05 17:49:29 +01:00
|
|
|
moofAtom->parse(diag);
|
2015-04-22 19:22:01 +02:00
|
|
|
try {
|
2018-03-07 01:17:50 +01:00
|
|
|
for (Mp4Atom *trafAtom = moofAtom->childById(Mp4AtomIds::TrackFragment, diag); trafAtom;
|
|
|
|
trafAtom = trafAtom->siblingById(Mp4AtomIds::TrackFragment, diag)) {
|
2018-03-05 17:49:29 +01:00
|
|
|
trafAtom->parse(diag);
|
2015-04-22 19:22:01 +02:00
|
|
|
int tfhdAtomCount = 0;
|
2018-03-07 01:17:50 +01:00
|
|
|
for (Mp4Atom *tfhdAtom = trafAtom->childById(Mp4AtomIds::TrackFragmentHeader, diag); tfhdAtom;
|
|
|
|
tfhdAtom = tfhdAtom->siblingById(Mp4AtomIds::TrackFragmentHeader, diag)) {
|
2018-03-05 17:49:29 +01:00
|
|
|
tfhdAtom->parse(diag);
|
2015-04-22 19:22:01 +02:00
|
|
|
++tfhdAtomCount;
|
2018-03-07 01:17:50 +01:00
|
|
|
if (tfhdAtom->dataSize() < 8) {
|
2018-03-05 17:49:29 +01:00
|
|
|
diag.emplace_back(DiagLevel::Warning, "tfhd atom is truncated.", context);
|
2018-03-07 01:11:42 +01:00
|
|
|
continue;
|
|
|
|
}
|
|
|
|
stream().seekg(static_cast<iostream::off_type>(tfhdAtom->dataOffset()) + 1);
|
2019-03-13 19:06:42 +01:00
|
|
|
std::uint32_t flags = reader().readUInt24BE();
|
2018-03-07 01:17:50 +01:00
|
|
|
if (!(flags & 1)) {
|
2018-03-07 01:11:42 +01:00
|
|
|
continue;
|
|
|
|
}
|
2018-03-07 01:17:50 +01:00
|
|
|
if (tfhdAtom->dataSize() < 16) {
|
2018-03-07 01:11:42 +01:00
|
|
|
diag.emplace_back(DiagLevel::Warning, "tfhd atom (denoting base-data-offset-present) is truncated.", context);
|
|
|
|
continue;
|
|
|
|
}
|
|
|
|
stream().seekg(4, ios_base::cur); // skip track ID
|
2019-03-13 19:06:42 +01:00
|
|
|
std::uint64_t off = reader().readUInt64BE();
|
2018-03-07 01:17:50 +01:00
|
|
|
for (auto iOld = oldMdatOffsets.cbegin(), iNew = newMdatOffsets.cbegin(), end = oldMdatOffsets.cend(); iOld != end;
|
|
|
|
++iOld, ++iNew) {
|
2019-03-13 19:06:42 +01:00
|
|
|
if (off < static_cast<std::uint64_t>(*iOld)) {
|
2018-06-02 22:56:08 +02:00
|
|
|
continue;
|
2018-03-07 01:11:42 +01:00
|
|
|
}
|
2019-03-13 19:06:42 +01:00
|
|
|
off += static_cast<std::uint64_t>(*iNew - *iOld);
|
2018-06-02 22:56:08 +02:00
|
|
|
stream().seekp(static_cast<iostream::off_type>(tfhdAtom->dataOffset()) + 8);
|
|
|
|
writer().writeUInt64BE(off);
|
|
|
|
break;
|
2015-04-22 19:22:01 +02:00
|
|
|
}
|
|
|
|
}
|
2018-03-07 01:17:50 +01:00
|
|
|
switch (tfhdAtomCount) {
|
2015-04-22 19:22:01 +02:00
|
|
|
case 0:
|
2018-03-05 17:49:29 +01:00
|
|
|
diag.emplace_back(DiagLevel::Warning, "traf atom doesn't contain mandatory tfhd atom.", context);
|
2015-04-22 19:22:01 +02:00
|
|
|
break;
|
|
|
|
case 1:
|
|
|
|
break;
|
|
|
|
default:
|
2018-03-07 01:17:50 +01:00
|
|
|
diag.emplace_back(
|
|
|
|
DiagLevel::Warning, "traf atom stores multiple tfhd atoms but it should only contain exactly one tfhd atom.", context);
|
2015-04-22 19:22:01 +02:00
|
|
|
}
|
|
|
|
}
|
2018-03-07 01:17:50 +01:00
|
|
|
} catch (const Failure &) {
|
2019-12-30 22:54:11 +01:00
|
|
|
diag.emplace_back(DiagLevel::Critical, "Unable to parse children of top-level atom moof.", context);
|
2015-04-22 19:22:01 +02:00
|
|
|
}
|
|
|
|
}
|
2018-03-07 01:17:50 +01:00
|
|
|
} catch (const Failure &) {
|
2018-03-05 17:49:29 +01:00
|
|
|
diag.emplace_back(DiagLevel::Critical, "Unable to parse top-level atom moof.", context);
|
2015-04-22 19:22:01 +02:00
|
|
|
}
|
|
|
|
// update each track
|
2018-03-07 01:17:50 +01:00
|
|
|
for (auto &track : tracks()) {
|
|
|
|
if (!track->isHeaderValid()) {
|
2015-04-22 19:22:01 +02:00
|
|
|
try {
|
2018-03-05 17:49:29 +01:00
|
|
|
track->parseHeader(diag);
|
2018-03-07 01:17:50 +01:00
|
|
|
} catch (const Failure &) {
|
|
|
|
diag.emplace_back(DiagLevel::Warning,
|
|
|
|
"The chunk offsets of track " % track->name() + " couldn't be updated because the track seems to be invalid..", context);
|
2015-11-07 15:23:36 +01:00
|
|
|
throw;
|
2015-04-22 19:22:01 +02:00
|
|
|
}
|
|
|
|
}
|
2018-03-07 01:17:50 +01:00
|
|
|
if (track->isHeaderValid()) {
|
2015-04-22 19:22:01 +02:00
|
|
|
try {
|
|
|
|
track->updateChunkOffsets(oldMdatOffsets, newMdatOffsets);
|
2018-03-07 01:17:50 +01:00
|
|
|
} catch (const Failure &) {
|
2018-03-05 17:49:29 +01:00
|
|
|
diag.emplace_back(DiagLevel::Warning, "The chunk offsets of track " % track->name() + " couldn't be updated.", context);
|
2015-11-07 15:23:36 +01:00
|
|
|
throw;
|
2015-04-22 19:22:01 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-03-07 01:17:50 +01:00
|
|
|
} // namespace TagParser
|