When resuming an HTTP download, we failed to verify that the range
returned by the server matched what we requested, and blindly appended what we received to what we already had. This could go two ways: if the delivered offset was higher than expected, the local file would contain duplicate data, while if it was lower than expected, there would be data missing from the middle of the file. Furthermore, if the transfer was interrupted again, each subsequent attempt would compound the error. Fix the first problem by restarting the transfer from scratch if there is a gap, and the second by explicitly seeking to the correct location in the local file so as to overwrite any duplicated data. PR: bin/117277 Approved by: re (kib) MFC after: 3 weeks
This commit is contained in:
parent
ab7e59168c
commit
a17e51c108
@ -522,6 +522,12 @@ fetch(char *URL, const char *path)
|
||||
"does not match remote", path);
|
||||
goto failure_keep;
|
||||
}
|
||||
} else if (url->offset > sb.st_size) {
|
||||
/* gap between what we asked for and what we got */
|
||||
warnx("%s: gap in resume mode", URL);
|
||||
fclose(of);
|
||||
of = NULL;
|
||||
/* picked up again later */
|
||||
} else if (us.size != -1) {
|
||||
if (us.size == sb.st_size)
|
||||
/* nothing to do */
|
||||
@ -551,6 +557,14 @@ fetch(char *URL, const char *path)
|
||||
fclose(of);
|
||||
of = NULL;
|
||||
sb = nsb;
|
||||
/* picked up again later */
|
||||
}
|
||||
/* seek to where we left off */
|
||||
if (of != NULL && fseek(of, url->offset, SEEK_SET) != 0) {
|
||||
warn("%s: fseek()", path);
|
||||
fclose(of);
|
||||
of = NULL;
|
||||
/* picked up again later */
|
||||
}
|
||||
}
|
||||
} else if (m_flag && sb.st_size != -1) {
|
||||
|
Loading…
x
Reference in New Issue
Block a user