2 # Copyright © 2001-2013 Jamie Zawinski <jwz@jwz.org>.
4 # Permission to use, copy, modify, distribute, and sell this software and its
5 # documentation for any purpose is hereby granted without fee, provided that
6 # the above copyright notice appear in all copies and that both that
7 # copyright notice and this permission notice appear in supporting
8 # documentation. No representations are made about the suitability of this
9 # software for any purpose. It is provided "as is" without express or
12 # This program chooses a random file from under the given directory, and
13 # prints its name. The file will be an image file whose dimensions are
14 # larger than a certain minimum size.
16 # If the directory is a URL, it is assumed to be an RSS or Atom feed.
17 # The images from that feed will be downloaded, cached, and selected from
18 # at random. The feed will be re-polled periodically, as needed.
20 # The various xscreensaver hacks that manipulate images ("jigsaw", etc.) get
21 # the image to manipulate by running the "xscreensaver-getimage" program.
23 # Under X11, the "xscreensaver-getimage" program invokes this script,
24 # depending on the value of the "chooseRandomImages" and "imageDirectory"
25 # settings in the ~/.xscreensaver file (or .../app-defaults/XScreenSaver).
26 # The screen savers invoke "xscreensaver-getimage" via utils/grabclient.c,
27 # which then invokes this script.
29 # Under Cocoa, this script lives inside the .saver bundle, and is invoked
30 # directly from utils/grabclient.c.
35 #use diagnostics; # Fails on some MacOS 10.5 systems
41 use Fcntl ':flock'; # import LOCK_* constants
43 use POSIX ':fcntl_h'; # S_ISDIR was here in Perl 5.6
44 import Fcntl ':mode' unless defined &S_ISUID; # but it is here in Perl 5.8
45 # but in Perl 5.10, both of these load, and cause errors!
46 # So we have to check for S_ISUID instead of S_ISDIR? WTF?
48 use bytes; # Larry can take Unicode and shove it up his ass sideways.
49 # Perl 5.8.0 causes us to start getting incomprehensible
50 # errors about UTF-8 all over the place without this.
52 use Digest::MD5 qw(md5_base64);
54 # Some Linux systems don't install LWP by default!
55 # Only error out if we're actually loading a URL instead of local data.
56 BEGIN { eval 'use LWP::Simple;' }
59 my $progname = $0; $progname =~ s@.*/@@g;
60 my $version = q{ $Revision: 1.38 $ }; $version =~ s/^[^0-9]+([0-9.]+).*$/$1/;
64 # Whether to use MacOS X's Spotlight to generate the list of files.
65 # When set to -1, uses Spotlight if "mdfind" exists.
67 # (In my experience, this isn't actually any faster, and might not find
68 # everything if your Spotlight index is out of date, which happens often.)
70 my $use_spotlight_p = 0;
72 # Whether to cache the results of the last run.
76 # Regenerate the cache if it is older than this many seconds.
78 my $cache_max_age = 60 * 60 * 3; # 3 hours
80 # Re-poll RSS/Atom feeds when local copy is older than this many seconds.
82 my $feed_max_age = $cache_max_age;
85 # This matches files that we are allowed to use as images (case-insensitive.)
86 # Anything not matching this is ignored. This is so you can point your
87 # imageDirectory at directory trees that have things other than images in
88 # them, but it assumes that you gave your images sensible file extensions.
90 my @good_extensions = ('jpg', 'jpeg', 'pjpeg', 'pjpg', 'png', 'gif',
91 'tif', 'tiff', 'xbm', 'xpm');
92 my $good_file_re = '\.(' . join("|", @good_extensions) . ')$';
94 # This matches file extensions that might occur in an image directory,
95 # and that are never used in the name of a subdirectory. This is an
96 # optimization that prevents us from having to stat() those files to
97 # tell whether they are directories or not. (It speeds things up a
98 # lot. Don't give your directories stupid names.)
100 my @nondir_extensions = ('ai', 'bmp', 'bz2', 'cr2', 'crw', 'db',
101 'dmg', 'eps', 'gz', 'hqx', 'htm', 'html', 'icns', 'ilbm', 'mov',
102 'nef', 'pbm', 'pdf', 'pl', 'ppm', 'ps', 'psd', 'sea', 'sh', 'shtml',
103 'tar', 'tgz', 'thb', 'txt', 'xcf', 'xmp', 'Z', 'zip' );
104 my $nondir_re = '\.(' . join("|", @nondir_extensions) . ')$';
107 # JPEG, GIF, and PNG files that are are smaller than this are rejected:
108 # this is so that you can use an image directory that contains both big
109 # images and thumbnails, and have it only select the big versions.
111 my $min_image_width = 255;
112 my $min_image_height = 255;
114 my @all_files = (); # list of "good" files we've collected
115 my %seen_inodes; # for breaking recursive symlink loops
117 # For diagnostic messages:
119 my $dir_count = 1; # number of directories seen
120 my $stat_count = 0; # number of files/dirs stat'ed
121 my $skip_count_unstat = 0; # number of files skipped without stat'ing
122 my $skip_count_stat = 0; # number of files skipped after stat
124 sub find_all_files($);
125 sub find_all_files($) {
128 print STDERR "$progname: + reading dir $dir/...\n" if ($verbose > 1);
131 if (! opendir ($dd, $dir)) {
132 print STDERR "$progname: couldn't open $dir: $!\n" if ($verbose);
135 my @files = readdir ($dd);
140 foreach my $file (@files) {
141 next if ($file =~ m/^\./); # silently ignore dot files/dirs
143 if ($file =~ m/[~%\#]$/) { # ignore backup files (and dirs...)
144 $skip_count_unstat++;
145 print STDERR "$progname: - skip file $file\n" if ($verbose > 1);
148 $file = "$dir/$file";
150 if ($file =~ m/$good_file_re/io) {
152 # Assume that files ending in .jpg exist and are not directories.
154 push @all_files, $file;
155 print STDERR "$progname: - found file $file\n" if ($verbose > 1);
157 } elsif ($file =~ m/$nondir_re/io) {
159 # Assume that files ending in .html are not directories.
161 $skip_count_unstat++;
162 print STDERR "$progname: -- skip file $file\n" if ($verbose > 1);
166 # Now we need to stat the file to see if it's a subdirectory.
168 # Note: we could use the trick of checking "nlinks" on the parent
169 # directory to see if this directory contains any subdirectories,
170 # but that would exclude any symlinks to directories.
172 my @st = stat($file);
173 my ($dev,$ino,$mode,$nlink,$uid,$gid,$rdev,$size,
174 $atime,$mtime,$ctime,$blksize,$blocks) = @st;
180 my $ll = readlink $file;
182 print STDERR "$progname: + dangling symlink: $file -> $ll\n";
184 print STDERR "$progname: + unreadable: $file\n";
190 next if ($seen_inodes{"$dev:$ino"}); # break symlink loops
191 $seen_inodes{"$dev:$ino"} = 1;
193 if (S_ISDIR($mode)) {
196 print STDERR "$progname: + found dir $file\n" if ($verbose > 1);
200 print STDERR "$progname: + skip file $file\n" if ($verbose > 1);
211 sub spotlight_all_files($) {
215 # "public.image" matches all (indexed) images, including Photoshop, etc.
216 # push @terms, "kMDItemContentTypeTree == 'public.image'";
217 foreach (@good_extensions) {
219 # kMDItemFSName hits the file system every time: much worse than "find".
220 # push @terms, "kMDItemFSName == '*.$_'";
222 # kMDItemDisplayName matches against the name in the Spotlight index,
223 # but won't find files that (for whatever reason) didn't get indexed.
224 push @terms, "kMDItemDisplayName == '*.$_'";
227 $dir =~ s@([^-_/a-z\d.,])@\\$1@gsi; # quote for sh
228 my $cmd = "mdfind -onlyin $dir \"" . join (' || ', @terms) . "\"";
230 print STDERR "$progname: executing: $cmd\n" if ($verbose > 1);
231 @all_files = split (/[\r\n]+/, `$cmd`);
235 # If we're using cacheing, read the cache file and return its contents,
236 # if any. This also holds an exclusive lock on the cache file, which
237 # has the additional benefit that if two copies of this program are
238 # running at once, one will wait for the other, instead of both of
239 # them spanking the same file system at the same time.
241 my $cache_fd = undef;
242 my $cache_file_name = undef;
243 my $read_cache_p = 0;
248 return () unless ($cache_p);
250 my $dd = "$ENV{HOME}/Library/Caches"; # MacOS location
252 $cache_file_name = "$dd/org.jwz.xscreensaver.getimage.cache";
253 } elsif (-d "$ENV{HOME}/.cache") { # Gnome "FreeDesktop XDG" location
254 $dd = "$ENV{HOME}/.cache/xscreensaver";
255 if (! -d $dd) { mkdir ($dd) || error ("mkdir $dd: $!"); }
256 $cache_file_name = "$dd/xscreensaver-getimage.cache"
257 } elsif (-d "$ENV{HOME}/tmp") { # If ~/.tmp/ exists, use it.
258 $cache_file_name = "$ENV{HOME}/tmp/.xscreensaver-getimage.cache";
260 $cache_file_name = "$ENV{HOME}/.xscreensaver-getimage.cache";
263 print STDERR "$progname: awaiting lock: $cache_file_name\n"
266 my $file = $cache_file_name;
267 open ($cache_fd, '+>>', $file) || error ("unable to write $file: $!");
268 flock ($cache_fd, LOCK_EX) || error ("unable to lock $file: $!");
269 seek ($cache_fd, 0, 0) || error ("unable to rewind $file: $!");
271 my $mtime = (stat($cache_fd))[9];
273 if ($mtime + $cache_max_age < time) {
274 print STDERR "$progname: cache is too old\n" if ($verbose);
278 my $odir = <$cache_fd>;
279 $odir =~ s/[\r\n]+$//s if defined ($odir);
280 if (!defined ($odir) || ($dir ne $odir)) {
281 print STDERR "$progname: cache is for $odir, not $dir\n"
282 if ($verbose && $odir);
287 while (<$cache_fd>) {
289 push @files, "$odir/$_";
292 print STDERR "$progname: " . ($#files+1) . " files in cache\n"
303 return unless ($cache_p);
305 # If we read the cache, just close it without rewriting it.
306 # If we didn't read it, then write it now.
308 if (! $read_cache_p) {
310 truncate ($cache_fd, 0) ||
311 error ("unable to truncate $cache_file_name: $!");
312 seek ($cache_fd, 0, 0) ||
313 error ("unable to rewind $cache_file_name: $!");
315 if ($#all_files >= 0) {
316 print $cache_fd "$dir\n";
317 foreach (@all_files) {
318 my $f = $_; # stupid Perl. do this to avoid modifying @all_files!
319 $f =~ s@^\Q$dir/@@so || die; # remove $dir from front
320 print $cache_fd "$f\n";
324 print STDERR "$progname: cached " . ($#all_files+1) . " files\n"
328 flock ($cache_fd, LOCK_UN) ||
329 error ("unable to unlock $cache_file_name: $!");
335 sub html_unquote($) {
338 # This only needs to handle entities that occur in RSS, not full HTML.
339 my %ent = ( 'amp' => '&', 'lt' => '<', 'gt' => '>',
340 'quot' => '"', 'apos' => "'" );
341 $h =~ s/(&(\#)?([[:alpha:]\d]+);?)/
343 my ($o, $c) = ($1, $3);
345 $c = $ent{$c}; # for <
347 if ($c =~ m@^x([\dA-F]+)$@si) { # for A
349 } elsif ($c =~ m@^\d+$@si) { # for A
366 if (!defined($ENV{http_proxy}) && !defined($ENV{HTTP_PROXY})) {
367 my $proxy_data = `scutil --proxy 2>/dev/null`;
368 my ($server) = ($proxy_data =~ m/\bHTTPProxy\s*:\s*([^\s]+)/s);
369 my ($port) = ($proxy_data =~ m/\bHTTPPort\s*:\s*([^\s]+)/s);
371 # Note: this ignores the "ExceptionsList".
372 $ENV{http_proxy} = "http://" . $server . ($port ? ":$port" : "") . "/";
373 print STDERR "$progname: MacOS proxy: $ENV{http_proxy}\n"
383 if (! defined ($LWP::Simple::ua)) {
384 error ("\n\n\tPerl is broken. Do this to repair it:\n" .
385 "\n\tsudo cpan LWP::Simple\n");
387 set_proxy ($LWP::Simple::ua);
391 # Returns a list of the image enclosures in the RSS or Atom feed.
392 # Elements of the list are references, [ "url", "guid" ].
399 $LWP::Simple::ua->agent ("$progname/$version");
400 $LWP::Simple::ua->timeout (10); # bail sooner than the default of 3 minutes
402 my $body = (LWP::Simple::get($url) || '');
404 if ($body !~ m@^<\?xml\s@si) {
405 # Not an RSS/Atom feed. Try RSS autodiscovery.
407 # (Great news, everybody: Flickr no longer provides RSS for "Sets",
408 # only for "Photostreams", and only the first 20 images of those.
411 error ("not an RSS or Atom feed, or HTML: $url")
412 unless ($body =~ m@<(HEAD|BODY|A|IMG)\b@si);
414 # Find the first <link> with RSS or Atom in it, and use that instead.
416 $body =~ s@<LINK\s+([^<>]*)>@{
418 if ($p =~ m! \b REL \s* = \s* ['"]? alternate \b!six &&
419 $p =~ m! \b TYPE \s* = \s* ['"]? application/(atom|rss) !six &&
420 $p =~ m! \b HREF \s* = \s* ['"] ( [^<>'"]+ ) !six
422 my $u2 = html_unquote ($1);
424 my ($h) = ($url =~ m!^([a-z]+://[^/]+)!si);
427 print STDERR "$progname: found feed: $u2\n"
429 return parse_feed ($u2);
434 error ("no RSS or Atom feed for HTML page: $url");
438 $body =~ s@(<ENTRY|<ITEM)@\001$1@gsi;
439 my @items = split(/\001/, $body);
445 foreach my $item (@items) {
449 # First look for <link rel="enclosure" href="...">
452 $item =~ s!(<LINK[^<>]*>)!{
454 my ($rel) = ($link =~ m/\bREL\s*=\s*[\"\']?([^<>\'\"]+)/si);
455 my ($type) = ($link =~ m/\bTYPE\s*=\s*[\"\']?([^<>\'\"]+)/si);
456 my ($href) = ($link =~ m/\bHREF\s*=\s*[\"\']([^<>\'\"]+)/si);
458 if ($rel && lc($rel) eq 'enclosure') {
460 $href = undef unless ($type =~ m@^image/@si); # omit videos
462 $iurl = html_unquote($href) if $href;
468 # Then look for <media:content url="...">
471 $item =~ s!(<MEDIA:CONTENT[^<>]*>)!{
473 my ($href) = ($link =~ m/\bURL\s*=\s*[\"\']([^<>\'\"]+)/si);
474 $iurl = html_unquote($href) if $href;
479 # Then look for <enclosure url="..."/>
482 $item =~ s!(<ENCLOSURE[^<>]*>)!{
484 my ($type) = ($link =~ m/\bTYPE\s*=\s*[\"\']?([^<>\'\"]+)/si);
485 my ($href) = ($link =~ m/\bURL\s*=\s*[\"\']([^<>\'\"]+)/si);
486 $iurl = html_unquote($href)
487 if ($href && $type && $type =~ m@^image/@si); # omit videos
492 # Ok, maybe there's an image in the <url> field?
495 $item =~ s!((<URL\b[^<>]*>)([^<>]*))!{
496 my ($all, $u2) = ($1, $3);
497 $iurl = html_unquote($u2) if ($u2 =~ m/$good_file_re/io);
502 # Then look for <description>... with an <img src="..."> inside.
505 $item =~ s!(<description[^<>]*>.*?</description>)!{
507 $desc = html_unquote($desc);
508 my ($href) = ($desc =~ m@<IMG[^<>]*\bSRC=[\"\']?([^\"\'<>]+)@si);
509 $iurl = $href if ($href);
514 # Could also do <content:encoded>, but the above probably covers all
515 # of the real-world possibilities.
518 # Find a unique ID for this image, to defeat image farms.
519 # First look for <id>...</id>
520 ($id) = ($item =~ m!<ID\b[^<>]*>\s*([^<>]+?)\s*</ID>!si) unless $id;
522 # Then look for <guid isPermaLink=...> ... </guid>
523 ($id) = ($item =~ m!<GUID\b[^<>]*>\s*([^<>]+?)\s*</GUID>!si) unless $id;
525 # Then look for <link> ... </link>
526 ($id) = ($item =~ m!<LINK\b[^<>]*>\s*([^<>]+?)\s*</LINK>!si) unless $id;
530 $id = $iurl unless $id;
534 my @P = ($iurl, $id);
536 } elsif ($iurl ne $o) {
537 print STDERR "$progname: WARNING: dup ID \"$id\"" .
538 " for \"$o\" and \"$iurl\"\n";
547 # Like md5_base64 but uses filename-safe characters.
558 # Given the URL of an image, download it into the given directory
559 # and return the file name.
561 sub download_image($$$) {
562 my ($url, $uid, $dir) = @_;
565 $url2 =~ s/\#.*$//s; # Omit search terms after file extension
567 my ($ext) = ($url2 =~ m@\.([a-z\d]+)$@si);
569 # If the feed hasn't put a sane extension on their URLs, nothing's going
570 # to work. This code assumes that file names have extensions, even the
571 # ones in the cache directory.
574 print STDERR "$progname: skipping extensionless URL: $url\n"
579 # Don't bother downloading files that we will reject anyway.
581 if (! ($url2 =~ m/$good_file_re/io)) {
582 print STDERR "$progname: skipping non-image URL: $url\n"
587 my $file = md5_file ($uid);
588 $file .= '.' . lc($ext) if $ext;
590 # Don't bother doing If-Modified-Since to see if the URL has changed.
591 # If we have already downloaded it, assume it's good.
592 if (-f "$dir/$file") {
593 print STDERR "$progname: exists: $dir/$file for $uid / $url\n"
598 # Special-case kludge for Flickr:
599 # Their RSS feeds sometimes include only the small versions of the images.
600 # So if the URL ends in one of the "small-size" letters, change it to "b".
615 $url =~ s@_[sqtmnzc](\.[a-z]+)$@_b$1@si
616 if ($url =~ m@^https?://[^/?#&]*?flickr\.com/@si);
618 print STDERR "$progname: downloading: $dir/$file for $uid / $url\n"
621 $LWP::Simple::ua->agent ("$progname/$version");
622 my $status = LWP::Simple::mirror ($url, "$dir/$file");
623 if (!LWP::Simple::is_success ($status)) {
624 print STDERR "$progname: error $status: $url\n"; # keep going
634 if ($url !~ m/^https?:/si) { # not a URL: local directory.
635 return (undef, $url);
638 my $dir = "$ENV{HOME}/Library/Caches"; # MacOS location
640 $dir = "$dir/org.jwz.xscreensaver.feeds";
641 } elsif (-d "$ENV{HOME}/.cache") { # Gnome "FreeDesktop XDG" location
642 $dir = "$ENV{HOME}/.cache/xscreensaver";
643 if (! -d $dir) { mkdir ($dir) || error ("mkdir $dir: $!"); }
645 if (! -d $dir) { mkdir ($dir) || error ("mkdir $dir: $!"); }
646 } elsif (-d "$ENV{HOME}/tmp") { # If ~/.tmp/ exists, use it.
647 $dir = "$ENV{HOME}/tmp/.xscreensaver-feeds";
649 $dir = "$ENV{HOME}/.xscreensaver-feeds";
653 mkdir ($dir) || error ("mkdir $dir: $!");
654 print STDERR "$progname: mkdir $dir/\n" if ($verbose);
657 # MD5 for directory name to use for cache of a feed URL.
658 $dir .= '/' . md5_file ($url);
661 mkdir ($dir) || error ("mkdir $dir: $!");
662 print STDERR "$progname: mkdir $dir/ for $url\n" if ($verbose);
665 # At this point, we have the directory corresponding to this URL.
666 # Now check to see if the files in it are up to date, and download
669 my $stamp = '.timestamp';
670 my $lock = "$dir/$stamp";
672 print STDERR "$progname: awaiting lock: $lock\n"
675 my $mtime = ((stat($lock))[9]) || 0;
678 open ($lock_fd, '+>>', $lock) || error ("unable to write $lock: $!");
679 flock ($lock_fd, LOCK_EX) || error ("unable to lock $lock: $!");
680 seek ($lock_fd, 0, 0) || error ("unable to rewind $lock: $!");
682 my $poll_p = ($mtime + $feed_max_age < time);
684 # --no-cache cmd line arg means poll again right now.
685 $poll_p = 1 unless ($cache_p);
687 # Even if the cache is young, make sure there is at least one file,
688 # and re-check if not.
692 opendir (my $dirh, $dir) || error ("$dir: $!");
693 foreach my $f (readdir ($dirh)) {
694 next if ($f =~ m/^\./s);
701 print STDERR "$progname: no files in cache of $url\n" if ($verbose);
708 print STDERR "$progname: loading $url\n" if ($verbose);
711 opendir (my $dirh, $dir) || error ("$dir: $!");
712 foreach my $f (readdir ($dirh)) {
713 next if ($f eq '.' || $f eq '..');
714 $files{$f} = 0; # 0 means "file exists, should be deleted"
720 # Download each image currently in the feed.
723 my @urls = parse_feed ($url);
724 print STDERR "$progname: " . ($#urls + 1) . " images\n"
726 foreach my $p (@urls) {
727 my ($furl, $id) = @$p;
728 my $f = download_image ($furl, $id, $dir);
730 $files{$f} = 1; # Got it, don't delete
734 my $empty_p = ($count <= 0);
736 # Now delete any files that are no longer in the feed.
737 # But if there was nothing in the feed (network failure?)
738 # then don't blow away the old files.
741 foreach my $f (keys(%files)) {
744 } elsif ($files{$f}) {
747 if (unlink ("$dir/$f")) {
748 print STDERR "$progname: rm $dir/$f\n" if ($verbose > 1);
750 print STDERR "$progname: rm $dir/$f: $!\n"; # don't bail
755 # Both feed and cache are empty. No files at all. Bail.
756 error ("empty feed: $url") if ($kept <= 1);
758 # Feed is empty, but we have some files from last time. Warn.
759 print STDERR "$progname: empty feed: using cache: $url\n"
762 $mtime = time(); # update the timestamp
766 # Not yet time to re-check the URL.
767 print STDERR "$progname: using cache: $url\n" if ($verbose);
771 # Unlock and update the write date on the .timestamp file.
773 truncate ($lock_fd, 0) || error ("unable to truncate $lock: $!");
774 seek ($lock_fd, 0, 0) || error ("unable to rewind $lock: $!");
775 utime ($mtime, $mtime, $lock_fd) || error ("unable to touch $lock: $!");
776 flock ($lock_fd, LOCK_UN) || error ("unable to unlock $lock: $!");
779 print STDERR "$progname: unlocked $lock\n" if ($verbose > 1);
781 # Don't bother using the imageDirectory cache. We know that this directory
782 # is flat, and we can assume that an RSS feed doesn't contain 100,000 images
783 # like ~/Pictures/ might.
787 # Return the URL and directory name of the files of that URL's local cache.
793 sub find_random_file($) {
796 if ($use_spotlight_p == -1) {
797 $use_spotlight_p = 0;
798 if (-x '/usr/bin/mdfind') {
799 $use_spotlight_p = 1;
804 ($url, $dir) = mirror_feed ($dir);
807 $use_spotlight_p = 0;
808 print STDERR "$progname: $dir is cache for $url\n" if ($verbose > 1);
811 @all_files = read_cache ($dir);
813 if ($#all_files >= 0) {
814 # got it from the cache...
816 } elsif ($use_spotlight_p) {
817 print STDERR "$progname: spotlighting $dir...\n" if ($verbose);
818 spotlight_all_files ($dir);
819 print STDERR "$progname: found " . ($#all_files+1) .
820 " file" . ($#all_files == 0 ? "" : "s") .
824 print STDERR "$progname: recursively reading $dir...\n" if ($verbose);
825 find_all_files ($dir);
826 print STDERR "$progname: " .
827 "f=" . ($#all_files+1) . "; " .
830 "skip=${skip_count_unstat}+$skip_count_stat=" .
831 ($skip_count_unstat + $skip_count_stat) .
838 if ($#all_files < 0) {
839 print STDERR "$progname: no files in $dir\n";
844 for (my $i = 0; $i < $max_tries; $i++) {
846 my $n = int (rand ($#all_files + 1));
847 my $file = $all_files[$n];
848 if (large_enough_p ($file)) {
850 $file =~ s@^\Q$dir/@@so || die; # remove $dir from front
856 print STDERR "$progname: no suitable images in $dir " .
857 "(after $max_tries tries)\n";
859 # If we got here, blow away the cache. Maybe it's stale.
860 unlink $cache_file_name if $cache_file_name;
866 sub large_enough_p($) {
869 my ($w, $h) = image_file_size ($file);
873 # Nonexistent files are obviously too small!
874 # Already printed $verbose message about the file not existing.
875 return 0 unless -f $file;
877 print STDERR "$progname: $file: unable to determine image size\n"
879 # Assume that unknown files are of good sizes: this will happen if
880 # they matched $good_file_re, but we don't have code to parse them.
881 # (This will also happen if the file is junk...)
885 if ($w < $min_image_width || $h < $min_image_height) {
886 print STDERR "$progname: $file: too small ($w x $h)\n" if ($verbose);
890 print STDERR "$progname: $file: $w x $h\n" if ($verbose);
896 # Given the raw body of a GIF document, returns the dimensions of the image.
900 my $type = substr($body, 0, 6);
902 return () unless ($type =~ /GIF8[7,9]a/);
903 $s = substr ($body, 6, 10);
904 my ($a,$b,$c,$d) = unpack ("C"x4, $s);
905 return (($b<<8|$a), ($d<<8|$c));
908 # Given the raw body of a JPEG document, returns the dimensions of the image.
913 my $L = length($body);
915 my $c1 = substr($body, $i, 1); $i++;
916 my $c2 = substr($body, $i, 1); $i++;
917 return () unless (ord($c1) == 0xFF && ord($c2) == 0xD8);
920 while (ord($ch) != 0xDA && $i < $L) {
921 # Find next marker, beginning with 0xFF.
922 while (ord($ch) != 0xFF) {
923 return () if (length($body) <= $i);
924 $ch = substr($body, $i, 1); $i++;
926 # markers can be padded with any number of 0xFF.
927 while (ord($ch) == 0xFF) {
928 return () if (length($body) <= $i);
929 $ch = substr($body, $i, 1); $i++;
932 # $ch contains the value of the marker.
933 my $marker = ord($ch);
935 if (($marker >= 0xC0) &&
938 ($marker != 0xCC)) { # it's a SOFn marker
940 return () if (length($body) <= $i);
941 my $s = substr($body, $i, 4); $i += 4;
942 my ($a,$b,$c,$d) = unpack("C"x4, $s);
943 return (($c<<8|$d), ($a<<8|$b));
946 # We must skip variables, since FFs in variable names aren't
947 # valid JPEG markers.
948 return () if (length($body) <= $i);
949 my $s = substr($body, $i, 2); $i += 2;
950 my ($c1, $c2) = unpack ("C"x2, $s);
951 my $length = ($c1 << 8) | $c2;
952 return () if ($length < 2);
959 # Given the raw body of a PNG document, returns the dimensions of the image.
963 return () unless ($body =~ m/^\211PNG\r/s);
964 my ($bits) = ($body =~ m/^.{12}(.{12})/s);
965 return () unless defined ($bits);
966 return () unless ($bits =~ /^IHDR/);
967 my ($ign, $w, $h) = unpack("a4N2", $bits);
972 # Given the raw body of a GIF, JPEG, or PNG document, returns the dimensions
977 return () if (length($body) < 10);
978 my ($w, $h) = gif_size ($body);
979 if ($w && $h) { return ($w, $h); }
980 ($w, $h) = jpeg_size ($body);
981 if ($w && $h) { return ($w, $h); }
982 # #### TODO: need image parsers for TIFF, XPM, XBM.
983 return png_size ($body);
986 # Returns the dimensions of the image file.
988 sub image_file_size($) {
991 if (! open ($in, '<:raw', $file)) {
992 print STDERR "$progname: $file: $!\n" if ($verbose);
996 sysread ($in, $body, 1024 * 50); # The first 50k should be enough.
997 close $in; # (It's not for certain huge jpegs...
998 return image_size ($body); # but we know they're huge!)
1004 print STDERR "$progname: $err\n";
1009 print STDERR "usage: $progname [--verbose] directory-or-feed-url\n\n" .
1010 " Prints the name of a randomly-selected image file. The directory\n" .
1011 " is searched recursively. Images smaller than " .
1012 "${min_image_width}x${min_image_height} are excluded.\n" .
1014 " The directory may also be the URL of an RSS/Atom feed. Enclosed\n" .
1015 " images will be downloaded and cached locally.\n" .
1023 while ($_ = $ARGV[0]) {
1025 if (m/^--?verbose$/s) { $verbose++; }
1026 elsif (m/^-v+$/s) { $verbose += length($_)-1; }
1027 elsif (m/^--?name$/s) { } # ignored, for compatibility
1028 elsif (m/^--?spotlight$/s) { $use_spotlight_p = 1; }
1029 elsif (m/^--?no-spotlight$/s) { $use_spotlight_p = 0; }
1030 elsif (m/^--?cache$/s) { $cache_p = 1; }
1031 elsif (m/^--?no-?cache$/s) { $cache_p = 0; }
1032 elsif (m/^-./) { usage; }
1033 elsif (!defined($dir)) { $dir = $_; }
1037 usage unless (defined($dir));
1039 $dir =~ s@^feed:@http:@si;
1041 if ($dir =~ m/^https?:/si) {
1044 $dir =~ s@^~/@$ENV{HOME}/@s; # allow literal "~/"
1045 $dir =~ s@/+$@@s; # omit trailing /
1048 print STDERR "$progname: $dir: not a directory or URL\n";
1053 my $file = find_random_file ($dir);
1054 print STDOUT "$file\n";