2 # Copyright © 2001-2011 Jamie Zawinski <jwz@jwz.org>.
4 # Permission to use, copy, modify, distribute, and sell this software and its
5 # documentation for any purpose is hereby granted without fee, provided that
6 # the above copyright notice appear in all copies and that both that
7 # copyright notice and this permission notice appear in supporting
8 # documentation. No representations are made about the suitability of this
9 # software for any purpose. It is provided "as is" without express or
12 # This program chooses a random file from under the given directory, and
13 # prints its name. The file will be an image file whose dimensions are
14 # larger than a certain minimum size.
16 # If the directory is a URL, it is assumed to be an RSS or Atom feed.
17 # The images from that feed will be downloaded, cached, and selected from
18 # at random. The feed will be re-polled periodically, as needed.
20 # The various xscreensaver hacks that manipulate images ("jigsaw", etc.) get
21 # the image to manipulate by running the "xscreensaver-getimage" program.
23 # Under X11, the "xscreensaver-getimage" program invokes this script,
24 # depending on the value of the "chooseRandomImages" and "imageDirectory"
25 # settings in the ~/.xscreensaver file (or .../app-defaults/XScreenSaver).
26 # The screen savers invoke "xscreensaver-getimage" via utils/grabclient.c,
27 # which then invokes this script.
29 # Under Cocoa, this script lives inside the .saver bundle, and is invoked
30 # directly from utils/grabclient.c.
35 #use diagnostics; # Fails on some MacOS 10.5 systems
41 use Fcntl ':flock'; # import LOCK_* constants
43 use POSIX ':fcntl_h'; # S_ISDIR was here in Perl 5.6
44 import Fcntl ':mode' unless defined &S_ISUID; # but it is here in Perl 5.8
45 # but in Perl 5.10, both of these load, and cause errors!
46 # So we have to check for S_ISUID instead of S_ISDIR? WTF?
48 use bytes; # Larry can take Unicode and shove it up his ass sideways.
49 # Perl 5.8.0 causes us to start getting incomprehensible
50 # errors about UTF-8 all over the place without this.
52 use Digest::MD5 qw(md5_base64);
53 use LWP::Simple qw($ua);
56 my $progname = $0; $progname =~ s@.*/@@g;
57 my $version = q{ $Revision: 1.30 $ }; $version =~ s/^[^0-9]+([0-9.]+).*$/$1/;
61 # Whether to use MacOS X's Spotlight to generate the list of files.
62 # When set to -1, uses Spotlight if "mdfind" exists.
64 # (In my experience, this isn't actually any faster, and might not find
65 # everything if your Spotlight index is out of date, which happens often.)
67 my $use_spotlight_p = 0;
69 # Whether to cache the results of the last run.
73 # Regenerate the cache if it is older than this many seconds.
75 my $cache_max_age = 60 * 60 * 3; # 3 hours
77 # Re-poll RSS/Atom feeds when local copy is older than this many seconds.
79 my $feed_max_age = $cache_max_age;
82 # This matches files that we are allowed to use as images (case-insensitive.)
83 # Anything not matching this is ignored. This is so you can point your
84 # imageDirectory at directory trees that have things other than images in
85 # them, but it assumes that you gave your images sensible file extensions.
87 my @good_extensions = ('jpg', 'jpeg', 'pjpeg', 'pjpg', 'png', 'gif',
88 'tif', 'tiff', 'xbm', 'xpm');
89 my $good_file_re = '\.(' . join("|", @good_extensions) . ')$';
91 # This matches file extensions that might occur in an image directory,
92 # and that are never used in the name of a subdirectory. This is an
93 # optimization that prevents us from having to stat() those files to
94 # tell whether they are directories or not. (It speeds things up a
95 # lot. Don't give your directories stupid names.)
97 my @nondir_extensions = ('ai', 'bmp', 'bz2', 'cr2', 'crw', 'db',
98 'dmg', 'eps', 'gz', 'hqx', 'htm', 'html', 'icns', 'ilbm', 'mov',
99 'nef', 'pbm', 'pdf', 'pl', 'ppm', 'ps', 'psd', 'sea', 'sh', 'shtml',
100 'tar', 'tgz', 'thb', 'txt', 'xcf', 'xmp', 'Z', 'zip' );
101 my $nondir_re = '\.(' . join("|", @nondir_extensions) . ')$';
104 # JPEG, GIF, and PNG files that are are smaller than this are rejected:
105 # this is so that you can use an image directory that contains both big
106 # images and thumbnails, and have it only select the big versions.
108 my $min_image_width = 255;
109 my $min_image_height = 255;
111 my @all_files = (); # list of "good" files we've collected
112 my %seen_inodes; # for breaking recursive symlink loops
114 # For diagnostic messages:
116 my $dir_count = 1; # number of directories seen
117 my $stat_count = 0; # number of files/dirs stat'ed
118 my $skip_count_unstat = 0; # number of files skipped without stat'ing
119 my $skip_count_stat = 0; # number of files skipped after stat
121 sub find_all_files($);
122 sub find_all_files($) {
125 print STDERR "$progname: + reading dir $dir/...\n" if ($verbose > 1);
128 if (! opendir ($dd, $dir)) {
129 print STDERR "$progname: couldn't open $dir: $!\n" if ($verbose);
132 my @files = readdir ($dd);
137 foreach my $file (@files) {
138 next if ($file =~ m/^\./); # silently ignore dot files/dirs
140 if ($file =~ m/[~%\#]$/) { # ignore backup files (and dirs...)
141 $skip_count_unstat++;
142 print STDERR "$progname: - skip file $file\n" if ($verbose > 1);
145 $file = "$dir/$file";
147 if ($file =~ m/$good_file_re/io) {
149 # Assume that files ending in .jpg exist and are not directories.
151 push @all_files, $file;
152 print STDERR "$progname: - found file $file\n" if ($verbose > 1);
154 } elsif ($file =~ m/$nondir_re/io) {
156 # Assume that files ending in .html are not directories.
158 $skip_count_unstat++;
159 print STDERR "$progname: -- skip file $file\n" if ($verbose > 1);
163 # Now we need to stat the file to see if it's a subdirectory.
165 # Note: we could use the trick of checking "nlinks" on the parent
166 # directory to see if this directory contains any subdirectories,
167 # but that would exclude any symlinks to directories.
169 my @st = stat($file);
170 my ($dev,$ino,$mode,$nlink,$uid,$gid,$rdev,$size,
171 $atime,$mtime,$ctime,$blksize,$blocks) = @st;
177 my $ll = readlink $file;
179 print STDERR "$progname: + dangling symlink: $file -> $ll\n";
181 print STDERR "$progname: + unreadable: $file\n";
187 next if ($seen_inodes{"$dev:$ino"}); # break symlink loops
188 $seen_inodes{"$dev:$ino"} = 1;
190 if (S_ISDIR($mode)) {
193 print STDERR "$progname: + found dir $file\n" if ($verbose > 1);
197 print STDERR "$progname: + skip file $file\n" if ($verbose > 1);
208 sub spotlight_all_files($) {
212 # "public.image" matches all (indexed) images, including Photoshop, etc.
213 # push @terms, "kMDItemContentTypeTree == 'public.image'";
214 foreach (@good_extensions) {
216 # kMDItemFSName hits the file system every time: much worse than "find".
217 # push @terms, "kMDItemFSName == '*.$_'";
219 # kMDItemDisplayName matches against the name in the Spotlight index,
220 # but won't find files that (for whatever reason) didn't get indexed.
221 push @terms, "kMDItemDisplayName == '*.$_'";
224 $dir =~ s@([^-_/a-z\d.,])@\\$1@gsi; # quote for sh
225 my $cmd = "mdfind -onlyin $dir \"" . join (' || ', @terms) . "\"";
227 print STDERR "$progname: executing: $cmd\n" if ($verbose > 1);
228 @all_files = split (/[\r\n]+/, `$cmd`);
232 # If we're using cacheing, read the cache file and return its contents,
233 # if any. This also holds an exclusive lock on the cache file, which
234 # has the additional benefit that if two copies of this program are
235 # running at once, one will wait for the other, instead of both of
236 # them spanking the same file system at the same time.
238 my $cache_fd = undef;
239 my $cache_file_name = undef;
240 my $read_cache_p = 0;
245 return () unless ($cache_p);
247 my $dd = "$ENV{HOME}/Library/Caches"; # MacOS location
249 $cache_file_name = "$dd/org.jwz.xscreensaver.getimage.cache";
250 } elsif (-d "$ENV{HOME}/tmp") {
251 $cache_file_name = "$ENV{HOME}/tmp/.xscreensaver-getimage.cache";
253 $cache_file_name = "$ENV{HOME}/.xscreensaver-getimage.cache";
256 print STDERR "$progname: awaiting lock: $cache_file_name\n"
259 my $file = $cache_file_name;
260 open ($cache_fd, '+>>', $file) || error ("unable to write $file: $!");
261 flock ($cache_fd, LOCK_EX) || error ("unable to lock $file: $!");
262 seek ($cache_fd, 0, 0) || error ("unable to rewind $file: $!");
264 my $mtime = (stat($cache_fd))[9];
266 if ($mtime + $cache_max_age < time) {
267 print STDERR "$progname: cache is too old\n" if ($verbose);
271 my $odir = <$cache_fd>;
272 $odir =~ s/[\r\n]+$//s if defined ($odir);
273 if (!defined ($odir) || ($dir ne $odir)) {
274 print STDERR "$progname: cache is for $odir, not $dir\n"
275 if ($verbose && $odir);
280 while (<$cache_fd>) {
282 push @files, "$odir/$_";
285 print STDERR "$progname: " . ($#files+1) . " files in cache\n"
296 return unless ($cache_p);
298 # If we read the cache, just close it without rewriting it.
299 # If we didn't read it, then write it now.
301 if (! $read_cache_p) {
303 truncate ($cache_fd, 0) ||
304 error ("unable to truncate $cache_file_name: $!");
305 seek ($cache_fd, 0, 0) ||
306 error ("unable to rewind $cache_file_name: $!");
308 if ($#all_files >= 0) {
309 print $cache_fd "$dir\n";
310 foreach (@all_files) {
311 my $f = $_; # stupid Perl. do this to avoid modifying @all_files!
312 $f =~ s@^\Q$dir\L/@@so || die; # remove $dir from front
313 print $cache_fd "$f\n";
317 print STDERR "$progname: cached " . ($#all_files+1) . " files\n"
321 flock ($cache_fd, LOCK_UN) ||
322 error ("unable to unlock $cache_file_name: $!");
328 # Returns a list of the image enclosures in the RSS or Atom feed.
329 # Elements of the list are references, [ "url", "guid" ].
334 $ua->agent ("$progname/$version");
335 $ua->timeout (10); # bail sooner than the default of 3 minutes
337 my $body = (LWP::Simple::get($url) || '');
339 error ("not an RSS or Atom feed: $url")
340 unless ($body =~ m@^<\?xml\s@si);
342 $body =~ s@(<ENTRY|<ITEM)@\001$1@gsi;
343 my @items = split(/\001/, $body);
349 foreach my $item (@items) {
353 # First look for <link rel="enclosure" href="...">
356 $item =~ s!(<LINK[^<>]*>)!{
358 my ($rel) = ($link =~ m/\bREL\s*=\s*[\"\']?([^<>\'\"]+)/si);
359 my ($type) = ($link =~ m/\bTYPE\s*=\s*[\"\']?([^<>\'\"]+)/si);
360 my ($href) = ($link =~ m/\bHREF\s*=\s*[\"\']([^<>\'\"]+)/si);
362 if ($rel && lc($rel) eq 'enclosure') {
364 $href = undef unless ($type =~ m@^image/@si); # omit videos
366 $iurl = $href if ($href);
372 # Then look for <media:content url="...">
375 $item =~ s!(<MEDIA:CONTENT[^<>]*>)!{
377 my ($href) = ($link =~ m/\bURL\s*=\s*[\"\']([^<>\'\"]+)/si);
378 $iurl = $href if $href;
383 # Then look for <description>... with an <img href="..."> inside.
386 $item =~ s!(<description[^<>]*>.*?</description>)!{
388 $desc =~ s/</</gs;
389 $desc =~ s/>/>/gs;
390 $desc =~ s/"/\"/gs;
391 $desc =~ s/'/\'/gs;
392 $desc =~ s/&/&/gs;
393 my ($href) = ($desc =~ m@<IMG[^<>]*\bSRC=[\"\']?([^\"\'<>]+)@si);
394 $iurl = $href if ($href);
399 # Could also do <content:encoded>, but the above probably covers all
400 # of the real-world possibilities.
403 # Find a unique ID for this image, to defeat image farms.
404 # First look for <id>...</id>
405 ($id) = ($item =~ m!<ID\b[^<>]*>\s*([^<>]+?)\s*</ID>!si) unless $id;
407 # Then look for <guid isPermaLink=...> ... </guid>
408 ($id) = ($item =~ m!<GUID\b[^<>]*>\s*([^<>]+?)\s*</GUID>!si) unless $id;
410 # Then look for <link> ... </link>
411 ($id) = ($item =~ m!<LINK\b[^<>]*>\s*([^<>]+?)\s*</LINK>!si) unless $id;
415 $id = $iurl unless $id;
419 my @P = ($iurl, $id);
421 } elsif ($iurl ne $o) {
422 print STDERR "$progname: WARNING: dup ID \"$id\"" .
423 " for \"$o\" and \"$iurl\"\n";
432 # Like md5_base64 but uses filename-safe characters.
443 # Given the URL of an image, download it into the given directory
444 # and return the file name.
446 sub download_image($$$) {
447 my ($url, $uid, $dir) = @_;
449 my ($ext) = ($url =~ m@\.([a-z\d]+)$@si);
450 my $file = md5_file ($uid);
451 $file .= '.' . lc($ext) if $ext;
453 # Don't bother doing If-Modified-Since to see if the URL has changed.
454 # If we have already downloaded it, assume it's good.
455 if (-f "$dir/$file") {
456 print STDERR "$progname: exists: $dir/$file for $uid / $url\n"
461 # Special-case kludge for Flickr:
462 # Their RSS feeds sometimes include only the small versions of the images.
463 # So if the URL ends in "s" (75x75), "t" (100x100) or "m" (240x240),then
464 # munge it to be "b" (1024x1024).
466 $url =~ s@_[stm](\.[a-z]+)$@_b$1@si
467 if ($url =~ m@^https?://[^/?#&]*?flickr\.com/@si);
469 print STDERR "$progname: downloading: $dir/$file for $uid / $url\n"
471 $ua->agent ("$progname/$version");
472 my $status = LWP::Simple::mirror ($url, "$dir/$file");
473 if (!LWP::Simple::is_success ($status)) {
474 print STDERR "$progname: error $status: $url\n"; # keep going
484 if ($url !~ m/^https?:/si) { # not a URL: local directory.
485 return (undef, $url);
488 my $dir = "$ENV{HOME}/Library/Caches"; # MacOS location
490 $dir = "$dir/org.jwz.xscreensaver.feeds";
491 } elsif (-d "$ENV{HOME}/tmp") {
492 $dir = "$ENV{HOME}/tmp/.xscreensaver-feeds";
494 $dir = "$ENV{HOME}/.xscreensaver-feeds";
498 mkdir ($dir) || error ("mkdir $dir: $!");
499 print STDERR "$progname: mkdir $dir/\n" if ($verbose);
502 # MD5 for directory name to use for cache of a feed URL.
503 $dir .= '/' . md5_file ($url);
506 mkdir ($dir) || error ("mkdir $dir: $!");
507 print STDERR "$progname: mkdir $dir/ for $url\n" if ($verbose);
510 # At this point, we have the directory corresponding to this URL.
511 # Now check to see if the files in it are up to date, and download
514 my $stamp = '.timestamp';
515 my $lock = "$dir/$stamp";
517 print STDERR "$progname: awaiting lock: $lock\n"
520 my $mtime = ((stat($lock))[9]) || 0;
523 open ($lock_fd, '+>>', $lock) || error ("unable to write $lock: $!");
524 flock ($lock_fd, LOCK_EX) || error ("unable to lock $lock: $!");
525 seek ($lock_fd, 0, 0) || error ("unable to rewind $lock: $!");
527 my $poll_p = ($mtime + $feed_max_age < time);
529 $poll_p = 1 unless ($cache_p); # poll again now with --no-cache cmd line arg.
531 # Even if the cache is young, let's make sure there are at least
532 # a few files in it, and re-check if not.
536 opendir (my $dirh, $dir) || error ("$dir: $!");
537 foreach my $f (readdir ($dirh)) {
538 next if ($f =~ m/^\./s);
545 print STDERR "$progname: no files in cache of $url\n" if ($verbose);
552 print STDERR "$progname: loading $url\n" if ($verbose);
555 opendir (my $dirh, $dir) || error ("$dir: $!");
556 foreach my $f (readdir ($dirh)) {
557 next if ($f eq '.' || $f eq '..');
558 $files{$f} = 0; # 0 means "file exists, should be deleted"
564 # Download each image currently in the feed.
567 my @urls = parse_feed ($url);
568 foreach my $p (@urls) {
569 my ($furl, $id) = @$p;
570 my $f = download_image ($furl, $id, $dir);
572 $files{$f} = 1; # Got it, don't delete
576 print STDERR "$progname: empty feed: $url\n" if ($count <= 0);
578 # Now delete any files that are no longer in the feed.
579 # But if there was nothing in the feed (network failure?)
580 # then don't blow away the old files.
583 foreach my $f (keys(%files)) {
586 } elsif ($files{$f}) {
589 if (unlink ("$dir/$f")) {
590 print STDERR "$progname: rm $dir/$f\n" if ($verbose > 1);
592 print STDERR "$progname: rm $dir/$f: $!\n"; # don't bail
597 # Both feed and cache are empty. No files at all.
598 error ("empty feed: $url") if ($kept <= 1);
600 $mtime = time(); # update the timestamp
604 # Not yet time to re-check the URL.
605 print STDERR "$progname: using cache: $url\n" if ($verbose);
609 # Unlock and update the write date on the .timestamp file.
611 truncate ($lock_fd, 0) || error ("unable to truncate $lock: $!");
612 seek ($lock_fd, 0, 0) || error ("unable to rewind $lock: $!");
613 utime ($mtime, $mtime, $lock_fd) || error ("unable to touch $lock: $!");
614 flock ($lock_fd, LOCK_UN) || error ("unable to unlock $lock: $!");
617 print STDERR "$progname: unlocked $lock\n" if ($verbose > 1);
619 # Don't bother using the imageDirectory cache. We know that this directory
620 # is flat, and we can assume that an RSS feed doesn't contain 100,000 images
621 # like ~/Pictures/ might.
625 # Return the URL and directory name of the files of that URL's local cache.
631 sub find_random_file($) {
634 if ($use_spotlight_p == -1) {
635 $use_spotlight_p = 0;
636 if (-x '/usr/bin/mdfind') {
637 $use_spotlight_p = 1;
642 ($url, $dir) = mirror_feed ($dir);
645 $use_spotlight_p = 0;
646 print STDERR "$progname: $dir is cache for $url\n" if ($verbose > 1);
649 @all_files = read_cache ($dir);
651 if ($#all_files >= 0) {
652 # got it from the cache...
654 } elsif ($use_spotlight_p) {
655 print STDERR "$progname: spotlighting $dir...\n" if ($verbose);
656 spotlight_all_files ($dir);
657 print STDERR "$progname: found " . ($#all_files+1) .
658 " file" . ($#all_files == 0 ? "" : "s") .
662 print STDERR "$progname: recursively reading $dir...\n" if ($verbose);
663 find_all_files ($dir);
664 print STDERR "$progname: " .
665 "f=" . ($#all_files+1) . "; " .
668 "skip=${skip_count_unstat}+$skip_count_stat=" .
669 ($skip_count_unstat + $skip_count_stat) .
676 # @all_files = sort(@all_files);
678 if ($#all_files < 0) {
679 print STDERR "$progname: no files in $dir\n";
684 for (my $i = 0; $i < $max_tries; $i++) {
686 my $n = int (rand ($#all_files + 1));
687 my $file = $all_files[$n];
688 if (large_enough_p ($file)) {
690 $file =~ s@^\Q$dir\L/@@so || die; # remove $dir from front
696 print STDERR "$progname: no suitable images in $dir " .
697 "(after $max_tries tries)\n";
702 sub large_enough_p($) {
705 my ($w, $h) = image_file_size ($file);
708 print STDERR "$progname: $file: unable to determine image size\n"
710 # Assume that unknown files are of good sizes: this will happen if
711 # they matched $good_file_re, but we don't have code to parse them.
712 # (This will also happen if the file is junk...)
716 if ($w < $min_image_width || $h < $min_image_height) {
717 print STDERR "$progname: $file: too small ($w x $h)\n" if ($verbose);
721 print STDERR "$progname: $file: $w x $h\n" if ($verbose);
727 # Given the raw body of a GIF document, returns the dimensions of the image.
731 my $type = substr($body, 0, 6);
733 return () unless ($type =~ /GIF8[7,9]a/);
734 $s = substr ($body, 6, 10);
735 my ($a,$b,$c,$d) = unpack ("C"x4, $s);
736 return (($b<<8|$a), ($d<<8|$c));
739 # Given the raw body of a JPEG document, returns the dimensions of the image.
744 my $L = length($body);
746 my $c1 = substr($body, $i, 1); $i++;
747 my $c2 = substr($body, $i, 1); $i++;
748 return () unless (ord($c1) == 0xFF && ord($c2) == 0xD8);
751 while (ord($ch) != 0xDA && $i < $L) {
752 # Find next marker, beginning with 0xFF.
753 while (ord($ch) != 0xFF) {
754 return () if (length($body) <= $i);
755 $ch = substr($body, $i, 1); $i++;
757 # markers can be padded with any number of 0xFF.
758 while (ord($ch) == 0xFF) {
759 return () if (length($body) <= $i);
760 $ch = substr($body, $i, 1); $i++;
763 # $ch contains the value of the marker.
764 my $marker = ord($ch);
766 if (($marker >= 0xC0) &&
769 ($marker != 0xCC)) { # it's a SOFn marker
771 return () if (length($body) <= $i);
772 my $s = substr($body, $i, 4); $i += 4;
773 my ($a,$b,$c,$d) = unpack("C"x4, $s);
774 return (($c<<8|$d), ($a<<8|$b));
777 # We must skip variables, since FFs in variable names aren't
778 # valid JPEG markers.
779 return () if (length($body) <= $i);
780 my $s = substr($body, $i, 2); $i += 2;
781 my ($c1, $c2) = unpack ("C"x2, $s);
782 my $length = ($c1 << 8) | $c2;
783 return () if ($length < 2);
790 # Given the raw body of a PNG document, returns the dimensions of the image.
794 return () unless ($body =~ m/^\211PNG\r/s);
795 my ($bits) = ($body =~ m/^.{12}(.{12})/s);
796 return () unless defined ($bits);
797 return () unless ($bits =~ /^IHDR/);
798 my ($ign, $w, $h) = unpack("a4N2", $bits);
803 # Given the raw body of a GIF, JPEG, or PNG document, returns the dimensions
808 return () if (length($body) < 10);
809 my ($w, $h) = gif_size ($body);
810 if ($w && $h) { return ($w, $h); }
811 ($w, $h) = jpeg_size ($body);
812 if ($w && $h) { return ($w, $h); }
813 # #### TODO: need image parsers for TIFF, XPM, XBM.
814 return png_size ($body);
817 # Returns the dimensions of the image file.
819 sub image_file_size($) {
822 if (! open ($in, '<', $file)) {
823 print STDERR "$progname: $file: $!\n" if ($verbose);
826 binmode ($in); # Larry can take Unicode and shove it up his ass sideways.
828 sysread ($in, $body, 1024 * 50); # The first 50k should be enough.
829 close $in; # (It's not for certain huge jpegs...
830 return image_size ($body); # but we know they're huge!)
836 print STDERR "$progname: $err\n";
841 print STDERR "usage: $progname [--verbose] directory\n" .
842 " Prints the name of a randomly-selected image file. The directory\n" .
843 " is searched recursively. Images smaller than " .
844 "${min_image_width}x${min_image_height} are excluded.\n" .
846 " The directory may also be the URL of an RSS/Atom feed. Enclosed\n" .
847 " images will be downloaded cached locally.\n" .
855 while ($_ = $ARGV[0]) {
857 if ($_ eq "--verbose") { $verbose++; }
858 elsif (m/^-v+$/) { $verbose += length($_)-1; }
859 elsif ($_ eq "--name") { } # ignored, for compatibility
860 elsif ($_ eq "--spotlight") { $use_spotlight_p = 1; }
861 elsif ($_ eq "--no-spotlight") { $use_spotlight_p = 0; }
862 elsif ($_ eq "--cache") { $cache_p = 1; }
863 elsif ($_ eq "--no-cache") { $cache_p = 0; }
864 elsif (m/^-./) { usage; }
865 elsif (!defined($dir)) { $dir = $_; }
869 usage unless (defined($dir));
871 $dir =~ s@^feed:@http:@si;
873 if ($dir =~ m/^https?:/si) {
876 $dir =~ s@^~/@$ENV{HOME}/@s; # allow literal "~/"
877 $dir =~ s@/+$@@s; # omit trailing /
880 print STDERR "$progname: $dir: not a directory or URL\n";
885 my $file = find_random_file ($dir);
886 print STDOUT "$file\n";