+ if (! -d $dir) {
+ mkdir ($dir) || error ("mkdir $dir: $!");
+ print STDERR "$progname: mkdir $dir/\n" if ($verbose);
+ }
+
+ # MD5 for directory name to use for cache of a feed URL.
+ $dir .= '/' . md5_file ($url);
+
+ if (! -d $dir) {
+ mkdir ($dir) || error ("mkdir $dir: $!");
+ print STDERR "$progname: mkdir $dir/ for $url\n" if ($verbose);
+ }
+
+ # At this point, we have the directory corresponding to this URL.
+ # Now check to see if the files in it are up to date, and download
+ # them if not.
+
+ my $stamp = '.timestamp';
+ my $lock = "$dir/$stamp";
+
+ print STDERR "$progname: awaiting lock: $lock\n"
+ if ($verbose > 1);
+
+ my $mtime = ((stat($lock))[9]) || 0;
+
+ my $lock_fd;
+ open ($lock_fd, '+>>', $lock) || error ("unable to write $lock: $!");
+ flock ($lock_fd, LOCK_EX) || error ("unable to lock $lock: $!");
+ seek ($lock_fd, 0, 0) || error ("unable to rewind $lock: $!");
+
+ my $poll_p = ($mtime + $feed_max_age < time);
+
+ $poll_p = 1 unless ($cache_p); # poll again now with --no-cache cmd line arg.
+
+ # Even if the cache is young, let's make sure there are at least
+ # a few files in it, and re-check if not.
+ #
+ if (! $poll_p) {
+ my $count = 0;
+ opendir (my $dirh, $dir) || error ("$dir: $!");
+ foreach my $f (readdir ($dirh)) {
+ next if ($f =~ m/^\./s);
+ $count++;
+ last;
+ }
+ closedir $dirh;
+
+ if ($count <= 0) {
+ print STDERR "$progname: no files in cache of $url\n" if ($verbose);
+ $poll_p = 1;
+ }
+ }
+
+ if ($poll_p) {
+
+ print STDERR "$progname: loading $url\n" if ($verbose);
+
+ my %files;
+ opendir (my $dirh, $dir) || error ("$dir: $!");
+ foreach my $f (readdir ($dirh)) {
+ next if ($f eq '.' || $f eq '..');
+ $files{$f} = 0; # 0 means "file exists, should be deleted"
+ }
+ closedir $dirh;
+
+ $files{$stamp} = 1;
+
+ # Download each image currently in the feed.
+ #
+ my $count = 0;
+ my @urls = parse_feed ($url);
+ foreach my $p (@urls) {
+ my ($furl, $id) = @$p;
+ my $f = download_image ($furl, $id, $dir);
+ next unless $f;
+ $files{$f} = 1; # Got it, don't delete
+ $count++;
+ }
+
+ print STDERR "$progname: empty feed: $url\n" if ($count <= 0);
+
+ # Now delete any files that are no longer in the feed.
+ # But if there was nothing in the feed (network failure?)
+ # then don't blow away the old files.
+ #
+ my $kept = 0;
+ foreach my $f (keys(%files)) {
+ if ($count <= 0) {
+ $kept++;
+ } elsif ($files{$f}) {
+ $kept++;
+ } else {
+ if (unlink ("$dir/$f")) {
+ print STDERR "$progname: rm $dir/$f\n" if ($verbose > 1);
+ } else {
+ print STDERR "$progname: rm $dir/$f: $!\n"; # don't bail
+ }
+ }
+ }
+
+ # Both feed and cache are empty. No files at all.
+ error ("empty feed: $url") if ($kept <= 1);
+
+ $mtime = time(); # update the timestamp
+
+ } else {
+
+ # Not yet time to re-check the URL.
+ print STDERR "$progname: using cache: $url\n" if ($verbose);
+
+ }
+
+ # Unlock and update the write date on the .timestamp file.
+ #
+ truncate ($lock_fd, 0) || error ("unable to truncate $lock: $!");
+ seek ($lock_fd, 0, 0) || error ("unable to rewind $lock: $!");
+ utime ($mtime, $mtime, $lock_fd) || error ("unable to touch $lock: $!");
+ flock ($lock_fd, LOCK_UN) || error ("unable to unlock $lock: $!");
+ close ($lock_fd);
+ $lock_fd = undef;
+ print STDERR "$progname: unlocked $lock\n" if ($verbose > 1);
+
+ # Don't bother using the imageDirectory cache. We know that this directory
+ # is flat, and we can assume that an RSS feed doesn't contain 100,000 images
+ # like ~/Pictures/ might.
+ #
+ $cache_p = 0;
+
+ # Return the URL and directory name of the files of that URL's local cache.
+ #
+ return ($url, $dir);
+}
+
+
+sub find_random_file($) {
+ my ($dir) = @_;
+
+ if ($use_spotlight_p == -1) {
+ $use_spotlight_p = 0;
+ if (-x '/usr/bin/mdfind') {
+ $use_spotlight_p = 1;
+ }
+ }
+
+ my $url;
+ ($url, $dir) = mirror_feed ($dir);
+
+ if ($url) {
+ $use_spotlight_p = 0;
+ print STDERR "$progname: $dir is cache for $url\n" if ($verbose > 1);
+ }
+
+ @all_files = read_cache ($dir);
+
+ if ($#all_files >= 0) {
+ # got it from the cache...
+
+ } elsif ($use_spotlight_p) {
+ print STDERR "$progname: spotlighting $dir...\n" if ($verbose);
+ spotlight_all_files ($dir);
+ print STDERR "$progname: found " . ($#all_files+1) .
+ " file" . ($#all_files == 0 ? "" : "s") .
+ " via Spotlight\n"
+ if ($verbose);
+ } else {
+ print STDERR "$progname: recursively reading $dir...\n" if ($verbose);
+ find_all_files ($dir);
+ print STDERR "$progname: " .
+ "f=" . ($#all_files+1) . "; " .
+ "d=$dir_count; " .
+ "s=$stat_count; " .
+ "skip=${skip_count_unstat}+$skip_count_stat=" .
+ ($skip_count_unstat + $skip_count_stat) .
+ ".\n"