fs: fix duplicate files causing spurious copies

Before this fix duplicate files (on Google Drive) caused the next file
to be spuriously copied.  `rclone dedupe` worked around the problem.
This commit is contained in:
Nick Craig-Wood 2017-10-02 16:52:53 +01:00
parent b7521c0fe2
commit 00fe6d95da
2 changed files with 12 additions and 20 deletions

View File

@ -268,20 +268,22 @@ func matchListings(srcListEntries, dstListEntries DirEntries, transforms []match
prev := srcList[iSrc-1].name
if srcName == prev {
Logf(src, "Duplicate %s found in source - ignoring", DirEntryType(src))
src = nil // ignore the src
iDst-- // ignore the src and retry the dst
continue
} else if srcName < prev {
Errorf(src, "Out of order listing in source")
src = nil // ignore the src
// this should never happen since we sort the listings
panic("Out of order listing in source")
}
}
if dst != nil && iDst > 0 {
prev := dstList[iDst-1].name
if dstName == prev {
Logf(dst, "Duplicate %s found in destination - ignoring", DirEntryType(dst))
dst = nil // ignore the dst
iSrc-- // ignore the dst and retry the src
continue
} else if dstName < prev {
Errorf(dst, "Out of order listing in destination")
dst = nil // ignore the dst
// this should never happen since we sort the listings
panic("Out of order listing in destination")
}
}
if src != nil && dst != nil {

View File

@ -101,11 +101,15 @@ func TestMatchListings(t *testing.T) {
{
what: "One duplicate",
input: DirEntries{
A, A,
a, a,
a, nil,
b, b,
},
matches: []matchPair{
{A, A},
{a, a},
{b, b},
},
},
{
@ -141,20 +145,6 @@ func TestMatchListings(t *testing.T) {
},
transforms: []matchTransformFn{strings.ToLower},
},
/*{
what: "Out of order",
input: DirEntries{
c, nil,
b, b,
a, nil,
},
srcOnly: DirEntries{
c,
},
dstOnly: DirEntries{
b,
},
},*/
} {
var srcList, dstList DirEntries
for i := 0; i < len(test.input); i += 2 {