mirror of
https://gitlab.com/pholy/OSCAR-code.git
synced 2025-04-06 11:10:44 +00:00
Check duplicate PRS1 chunks on import to confirm that they are identical, warn if not.
This commit is contained in:
parent
6ee83576b2
commit
42948b2d3c
@ -820,6 +820,28 @@ Machine* PRS1Loader::CreateMachineFromProperties(QString propertyfile)
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
static QString relativePath(const QString & inpath)
|
||||||
|
{
|
||||||
|
QStringList pathlist = QDir::toNativeSeparators(inpath).split(QDir::separator(), QString::SkipEmptyParts);
|
||||||
|
QString relative = pathlist.mid(pathlist.size()-3).join(QDir::separator());
|
||||||
|
return relative;
|
||||||
|
}
|
||||||
|
|
||||||
|
static bool chunksIdentical(const PRS1DataChunk* a, const PRS1DataChunk* b)
|
||||||
|
{
|
||||||
|
return (a->timestamp == b->timestamp && a->storedCrc == b->storedCrc);
|
||||||
|
}
|
||||||
|
|
||||||
|
static QString chunkComparison(const PRS1DataChunk* a, const PRS1DataChunk* b)
|
||||||
|
{
|
||||||
|
return QString("Session %1 in %2 @ %3 %4 %5 @ %6, skipping")
|
||||||
|
.arg(a->sessionid)
|
||||||
|
.arg(relativePath(a->m_path)).arg(a->m_filepos)
|
||||||
|
.arg(chunksIdentical(a, b) ? "is identical to" : "differs from")
|
||||||
|
.arg(relativePath(b->m_path)).arg(b->m_filepos);
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
void PRS1Loader::ScanFiles(const QStringList & paths, int sessionid_base, Machine * m)
|
void PRS1Loader::ScanFiles(const QStringList & paths, int sessionid_base, Machine * m)
|
||||||
{
|
{
|
||||||
SessionID sid;
|
SessionID sid;
|
||||||
@ -966,7 +988,12 @@ void PRS1Loader::ScanFiles(const QStringList & paths, int sessionid_base, Machin
|
|||||||
switch (ext) {
|
switch (ext) {
|
||||||
case 0:
|
case 0:
|
||||||
if (task->compliance) {
|
if (task->compliance) {
|
||||||
qWarning() << path << "duplicate compliance?";
|
if (chunksIdentical(chunk, task->summary)) {
|
||||||
|
// Never seen identical compliance chunks, so keep logging this for now.
|
||||||
|
qDebug() << chunkComparison(chunk, task->summary);
|
||||||
|
} else {
|
||||||
|
qWarning() << chunkComparison(chunk, task->summary);
|
||||||
|
}
|
||||||
delete chunk;
|
delete chunk;
|
||||||
continue; // (skipping to avoid duplicates)
|
continue; // (skipping to avoid duplicates)
|
||||||
}
|
}
|
||||||
@ -974,7 +1001,18 @@ void PRS1Loader::ScanFiles(const QStringList & paths, int sessionid_base, Machin
|
|||||||
break;
|
break;
|
||||||
case 1:
|
case 1:
|
||||||
if (task->summary) {
|
if (task->summary) {
|
||||||
qWarning() << path << "duplicate summary?";
|
if (chunksIdentical(chunk, task->summary)) {
|
||||||
|
// This seems to be benign. It happens most often when a single file contains
|
||||||
|
// a bunch of chunks and subsequent files each contain a single chunk that was
|
||||||
|
// already covered by the first file. It also sometimes happens with entirely
|
||||||
|
// duplicate files between e.g. a P1 and P0 directory.
|
||||||
|
//
|
||||||
|
// It's common enough that we don't emit a message about it by default.
|
||||||
|
//qDebug() << chunkComparison(chunk, task->summary);
|
||||||
|
} else {
|
||||||
|
// Warn about any non-identical duplicate session IDs.
|
||||||
|
qWarning() << chunkComparison(chunk, task->summary);
|
||||||
|
}
|
||||||
delete chunk;
|
delete chunk;
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
@ -982,10 +1020,20 @@ void PRS1Loader::ScanFiles(const QStringList & paths, int sessionid_base, Machin
|
|||||||
break;
|
break;
|
||||||
case 2:
|
case 2:
|
||||||
if (task->event) {
|
if (task->event) {
|
||||||
|
if (chunksIdentical(chunk, task->event)) {
|
||||||
|
// See comment above regarding identical summary chunks.
|
||||||
|
//qDebug() << chunkComparison(chunk, task->event);
|
||||||
|
} else {
|
||||||
// TODO: This happens on F3V3 events, which are formatted as waveforms,
|
// TODO: This happens on F3V3 events, which are formatted as waveforms,
|
||||||
// with one chunk per mask-on slice, and thus multiple chunks per session.
|
// with one chunk per mask-on slice, and thus multiple chunks per session.
|
||||||
// We need to add support for this scenario.
|
// We need to add support for this scenario instead of just dropping
|
||||||
qWarning() << path << "duplicate events?";
|
// the additional chunks.
|
||||||
|
|
||||||
|
// Warn about any other non-identical duplicate session IDs.
|
||||||
|
if (!(chunk->family == 3 && chunk->familyVersion == 3)) {
|
||||||
|
qWarning() << chunkComparison(chunk, task->event);
|
||||||
|
}
|
||||||
|
}
|
||||||
delete chunk;
|
delete chunk;
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
@ -336,7 +336,7 @@ void parseAndEmitChunkYaml(const QString & path)
|
|||||||
|
|
||||||
void PRS1Tests::testChunksToYaml()
|
void PRS1Tests::testChunksToYaml()
|
||||||
{
|
{
|
||||||
iterateTestCards(TESTDATA_PATH "prs1/input/", parseAndEmitChunkYaml);
|
//iterateTestCards(TESTDATA_PATH "prs1/input/", parseAndEmitChunkYaml);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
Loading…
Reference in New Issue
Block a user