2015-12-10 15:53:48 +01:00
|
|
|
#! /usr/bin/env nix-shell
|
2015-12-16 14:17:33 +01:00
|
|
|
#! nix-shell -i perl -p perl perlPackages.NetAmazonS3 perlPackages.FileSlurp nixUnstable
|
2015-12-10 15:53:48 +01:00
|
|
|
|
|
|
|
# This command uploads tarballs to tarballs.nixos.org, the
|
|
|
|
# content-addressed cache used by fetchurl as a fallback for when
|
|
|
|
# upstream tarballs disappear or change. Usage:
|
|
|
|
#
|
|
|
|
# 1) To upload a single file:
|
|
|
|
#
|
|
|
|
# $ copy-tarballs.pl --file /path/to/tarball.tar.gz
|
|
|
|
#
|
|
|
|
# 2) To upload all files obtained via calls to fetchurl in a Nix derivation:
|
|
|
|
#
|
|
|
|
# $ copy-tarballs.pl --expr '(import <nixpkgs> {}).hello'
|
2014-02-19 13:01:33 +01:00
|
|
|
|
|
|
|
use strict;
|
2015-12-10 15:53:48 +01:00
|
|
|
use warnings;
|
2014-02-19 13:01:33 +01:00
|
|
|
use File::Basename;
|
|
|
|
use File::Path;
|
2015-12-16 14:17:33 +01:00
|
|
|
use File::Slurp;
|
2015-12-10 15:53:48 +01:00
|
|
|
use JSON;
|
|
|
|
use Net::Amazon::S3;
|
2014-02-19 13:01:33 +01:00
|
|
|
use Nix::Store;
|
|
|
|
|
2015-12-10 15:53:48 +01:00
|
|
|
# S3 setup.
|
|
|
|
my $aws_access_key_id = $ENV{'AWS_ACCESS_KEY_ID'} or die;
|
|
|
|
my $aws_secret_access_key = $ENV{'AWS_SECRET_ACCESS_KEY'} or die;
|
2014-02-19 13:01:33 +01:00
|
|
|
|
2015-12-10 15:53:48 +01:00
|
|
|
my $s3 = Net::Amazon::S3->new(
|
|
|
|
{ aws_access_key_id => $aws_access_key_id,
|
|
|
|
aws_secret_access_key => $aws_secret_access_key,
|
|
|
|
retry => 1,
|
|
|
|
});
|
2014-02-19 13:01:33 +01:00
|
|
|
|
2015-12-10 15:53:48 +01:00
|
|
|
my $bucket = $s3->bucket("nixpkgs-tarballs") or die;
|
2014-02-19 13:01:33 +01:00
|
|
|
|
2015-12-16 14:17:33 +01:00
|
|
|
my $cacheFile = "/tmp/copy-tarballs-cache";
|
|
|
|
my %cache;
|
|
|
|
$cache{$_} = 1 foreach read_file($cacheFile, err_mode => 'quiet', chomp => 1);
|
|
|
|
|
|
|
|
END() {
|
|
|
|
write_file($cacheFile, map { "$_\n" } keys %cache);
|
|
|
|
}
|
|
|
|
|
2015-12-10 15:53:48 +01:00
|
|
|
sub alreadyMirrored {
|
|
|
|
my ($algo, $hash) = @_;
|
2015-12-16 14:17:33 +01:00
|
|
|
my $key = "$algo/$hash";
|
|
|
|
return 1 if defined $cache{$key};
|
|
|
|
my $res = defined $bucket->get_key($key);
|
|
|
|
$cache{$key} = 1 if $res;
|
|
|
|
return $res;
|
2015-12-10 15:53:48 +01:00
|
|
|
}
|
2014-02-19 13:01:33 +01:00
|
|
|
|
2015-12-10 15:53:48 +01:00
|
|
|
sub uploadFile {
|
|
|
|
my ($fn, $name) = @_;
|
2014-02-19 13:01:33 +01:00
|
|
|
|
2015-12-10 15:53:48 +01:00
|
|
|
my $md5_16 = hashFile("md5", 0, $fn) or die;
|
|
|
|
my $sha1_16 = hashFile("sha1", 0, $fn) or die;
|
|
|
|
my $sha256_32 = hashFile("sha256", 1, $fn) or die;
|
|
|
|
my $sha256_16 = hashFile("sha256", 0, $fn) or die;
|
|
|
|
my $sha512_32 = hashFile("sha512", 1, $fn) or die;
|
|
|
|
my $sha512_16 = hashFile("sha512", 0, $fn) or die;
|
2014-02-19 13:01:33 +01:00
|
|
|
|
2015-12-10 15:53:48 +01:00
|
|
|
my $mainKey = "sha512/$sha512_16";
|
2014-02-19 13:01:33 +01:00
|
|
|
|
2015-12-10 15:53:48 +01:00
|
|
|
# Create redirects from the other hash types.
|
|
|
|
sub redirect {
|
|
|
|
my ($name, $dest) = @_;
|
|
|
|
#print STDERR "linking $name to $dest...\n";
|
|
|
|
$bucket->add_key($name, "", { 'x-amz-website-redirect-location' => "/" . $dest })
|
|
|
|
or die "failed to create redirect from $name to $dest\n";
|
2015-12-16 14:17:33 +01:00
|
|
|
$cache{$name} = 1;
|
2014-02-19 13:01:33 +01:00
|
|
|
}
|
2015-12-10 15:53:48 +01:00
|
|
|
redirect "md5/$md5_16", $mainKey;
|
|
|
|
redirect "sha1/$sha1_16", $mainKey;
|
|
|
|
redirect "sha256/$sha256_32", $mainKey;
|
|
|
|
redirect "sha256/$sha256_16", $mainKey;
|
|
|
|
redirect "sha512/$sha512_32", $mainKey;
|
2015-12-16 14:28:22 +01:00
|
|
|
|
|
|
|
# Upload the file as sha512/<hash-in-base-16>.
|
|
|
|
print STDERR "uploading $fn to $mainKey...\n";
|
|
|
|
$bucket->add_key_filename($mainKey, $fn, { 'x-amz-meta-original-name' => $name })
|
|
|
|
or die "failed to upload $fn to $mainKey\n";
|
|
|
|
$cache{$mainKey} = 1;
|
2015-12-10 15:53:48 +01:00
|
|
|
}
|
2014-02-19 13:01:33 +01:00
|
|
|
|
2015-12-15 11:10:59 +01:00
|
|
|
my $op = shift @ARGV;
|
2014-02-19 13:01:33 +01:00
|
|
|
|
2015-12-10 15:53:48 +01:00
|
|
|
if ($op eq "--file") {
|
2015-12-15 11:10:59 +01:00
|
|
|
my $res = 0;
|
|
|
|
foreach my $fn (@ARGV) {
|
2015-12-16 11:37:22 +01:00
|
|
|
eval {
|
|
|
|
if (alreadyMirrored("sha512", hashFile("sha512", 0, $fn))) {
|
|
|
|
print STDERR "$fn is already mirrored\n";
|
|
|
|
} else {
|
|
|
|
uploadFile($fn, basename $fn);
|
|
|
|
}
|
|
|
|
};
|
|
|
|
if ($@) {
|
|
|
|
warn "$@\n";
|
|
|
|
$res = 1;
|
|
|
|
}
|
2014-02-19 13:01:33 +01:00
|
|
|
}
|
2015-12-15 11:10:59 +01:00
|
|
|
exit $res;
|
2015-12-10 15:53:48 +01:00
|
|
|
}
|
2014-02-19 13:01:33 +01:00
|
|
|
|
2015-12-10 15:53:48 +01:00
|
|
|
elsif ($op eq "--expr") {
|
|
|
|
|
|
|
|
# Evaluate find-tarballs.nix.
|
2015-12-15 11:47:31 +01:00
|
|
|
my $expr = $ARGV[0] // die "$0: --expr requires a Nix expression\n";
|
2015-12-16 13:59:02 +01:00
|
|
|
my $pid = open(JSON, "-|", "nix-instantiate", "--eval", "--json", "--strict",
|
2015-12-10 15:53:48 +01:00
|
|
|
"<nixpkgs/maintainers/scripts/find-tarballs.nix>",
|
|
|
|
"--arg", "expr", $expr);
|
|
|
|
my $stdout = <JSON>;
|
|
|
|
waitpid($pid, 0);
|
|
|
|
die "$0: evaluation failed\n" if $?;
|
|
|
|
close JSON;
|
|
|
|
|
|
|
|
my $fetches = decode_json($stdout);
|
|
|
|
|
|
|
|
print STDERR "evaluation returned ", scalar(@{$fetches}), " tarballs\n";
|
|
|
|
|
|
|
|
# Check every fetchurl call discovered by find-tarballs.nix.
|
|
|
|
my $mirrored = 0;
|
|
|
|
my $have = 0;
|
|
|
|
foreach my $fetch (@{$fetches}) {
|
|
|
|
my $url = $fetch->{url};
|
|
|
|
my $algo = $fetch->{type};
|
|
|
|
my $hash = $fetch->{hash};
|
|
|
|
|
2015-12-16 13:59:02 +01:00
|
|
|
if (defined $ENV{DEBUG}) {
|
|
|
|
print "$url $algo $hash\n";
|
|
|
|
next;
|
|
|
|
}
|
|
|
|
|
2015-12-10 15:53:48 +01:00
|
|
|
if ($url !~ /^http:/ && $url !~ /^https:/ && $url !~ /^ftp:/ && $url !~ /^mirror:/) {
|
|
|
|
print STDERR "skipping $url (unsupported scheme)\n";
|
|
|
|
next;
|
|
|
|
}
|
|
|
|
|
|
|
|
if (alreadyMirrored($algo, $hash)) {
|
|
|
|
$have++;
|
|
|
|
next;
|
|
|
|
}
|
|
|
|
|
|
|
|
print STDERR "mirroring $url...\n";
|
|
|
|
|
|
|
|
next if $ENV{DRY_RUN};
|
|
|
|
|
|
|
|
# Download the file using nix-prefetch-url.
|
|
|
|
$ENV{QUIET} = 1;
|
|
|
|
$ENV{PRINT_PATH} = 1;
|
|
|
|
my $fh;
|
|
|
|
my $pid = open($fh, "-|", "nix-prefetch-url", "--type", $algo, $url, $hash) or die;
|
|
|
|
waitpid($pid, 0) or die;
|
|
|
|
if ($? != 0) {
|
|
|
|
print STDERR "failed to fetch $url: $?\n";
|
|
|
|
next;
|
|
|
|
}
|
|
|
|
<$fh>; my $storePath = <$fh>; chomp $storePath;
|
|
|
|
|
|
|
|
uploadFile($storePath, $url);
|
|
|
|
$mirrored++;
|
2014-02-19 13:01:33 +01:00
|
|
|
}
|
|
|
|
|
2015-12-10 15:53:48 +01:00
|
|
|
print STDERR "mirrored $mirrored files, already have $have files\n";
|
|
|
|
}
|
2014-05-22 12:11:07 +02:00
|
|
|
|
2015-12-10 15:53:48 +01:00
|
|
|
else {
|
2015-12-15 11:10:59 +01:00
|
|
|
die "Syntax: $0 --file FILENAMES... | --expr EXPR\n";
|
2014-02-19 13:01:33 +01:00
|
|
|
}
|