@@ -1,5 +1,5 @@
#!/usr/bin/env perl
-#
+#
# Copyright (C) 2006 OpenWrt.org
# Copyright (C) 2016 LEDE project
#
@@ -11,6 +11,7 @@ use strict;
use warnings;
use File::Basename;
use File::Copy;
+use File::Temp;
use Text::ParseWords;
@ARGV > 2 or die "Syntax: $0 <target dir> <filename> <hash> <url filename> [<mirror> ...]\n";
@@ -149,10 +150,12 @@ sub download
$mirror =~ s!/$!!;
+ my $hash_tmp = File::Temp->new();
+ my $dl_tmp = File::Temp->new();
+
if ($mirror =~ s!^file://!!) {
if (! -d "$mirror") {
print STDERR "Wrong local cache directory -$mirror-.\n";
- cleanup();
return;
}
@@ -183,10 +186,10 @@ sub download
}
print("Copying $filename from $link\n");
- copy($link, "$target/$filename.dl");
+ copy($link, $dl_tmp);
$hash_cmd and do {
- if (system("cat '$target/$filename.dl' | $hash_cmd > '$target/$filename.hash'")) {
+ if (system("cat '$dl_tmp' | $hash_cmd > '$hash_tmp'")) {
print("Failed to generate hash for $filename\n");
return;
}
@@ -196,46 +199,34 @@ sub download
print STDERR "+ ".join(" ",@cmd)."\n";
open(FETCH_FD, '-|', @cmd) or die "Cannot launch aria2c, curl or wget.\n";
$hash_cmd and do {
- open MD5SUM, "| $hash_cmd > '$target/$filename.hash'" or die "Cannot launch $hash_cmd.\n";
+ open MD5SUM, "| $hash_cmd > '$hash_tmp'" or die "Cannot launch $hash_cmd.\n";
};
- open OUTPUT, "> $target/$filename.dl" or die "Cannot create file $target/$filename.dl: $!\n";
my $buffer;
while (read FETCH_FD, $buffer, 1048576) {
$hash_cmd and print MD5SUM $buffer;
- print OUTPUT $buffer;
+ print $dl_tmp $buffer;
}
$hash_cmd and close MD5SUM;
close FETCH_FD;
- close OUTPUT;
if ($? >> 8) {
print STDERR "Download failed.\n";
- cleanup();
return;
}
}
$hash_cmd and do {
- my $sum = `cat "$target/$filename.hash"`;
+ my $sum = `cat "$hash_tmp"`;
$sum =~ /^(\w+)\s*/ or die "Could not generate file hash\n";
$sum = $1;
if ($sum ne $file_hash) {
print STDERR "Hash of the downloaded file does not match (file: $sum, requested: $file_hash) - deleting download.\n";
- cleanup();
return;
}
};
- unlink "$target/$filename";
- system("mv", "$target/$filename.dl", "$target/$filename");
- cleanup();
-}
-
-sub cleanup
-{
- unlink "$target/$filename.dl";
- unlink "$target/$filename.hash";
+ system("mv", "-f", "$dl_tmp", "$target/$filename");
}
@mirrors = localmirrors();
@@ -332,19 +323,19 @@ push @mirrors, 'https://mirror2.openwrt.org/sources';
if (-f "$target/$filename") {
$hash_cmd and do {
- if (system("cat '$target/$filename' | $hash_cmd > '$target/$filename.hash'")) {
+ my $hash_tmp = File::Temp->new();
+
+ if (system("cat '$target/$filename' | $hash_cmd > '$hash_tmp'")) {
die "Failed to generate hash for $filename\n";
}
- my $sum = `cat "$target/$filename.hash"`;
+ my $sum = `cat "$hash_tmp"`;
$sum =~ /^(\w+)\s*/ or die "Could not generate file hash\n";
$sum = $1;
- cleanup();
exit 0 if $sum eq $file_hash;
die "Hash of the local file $filename does not match (file: $sum, requested: $file_hash) - deleting download.\n";
- unlink "$target/$filename";
};
}
@@ -360,4 +351,3 @@ while (!-f "$target/$filename") {
}
}
-$SIG{INT} = \&cleanup;
When you have multiple build roots sharing the download cache, simultaneous downloads of the same file can result download.pl failing with: Could not generate file hash Use File::Temp for the download and hash output files, and then move the download into place after the file hash is verified. Signed-off-by: Dustin Lundquist <dustin@null-ptr.net> --- scripts/download.pl | 40 +++++++++++++++------------------------- 1 file changed, 15 insertions(+), 25 deletions(-)