X-Git-Url: http://git.hungrycats.org/cgi-bin/gitweb.cgi?a=blobdiff_plain;f=hacks%2Fwebcollage;h=f53804b855de45916fac9b62f20060d9a4f4d25f;hb=39809ded547bdbb08207d3e514950425215b4410;hp=22f6cec6d60a2234ece3d724273d0d8bccef11e2;hpb=6a1da724858673ac40aa13a9612340d8bed8c7b9;p=xscreensaver diff --git a/hacks/webcollage b/hacks/webcollage index 22f6cec6..f53804b8 100755 --- a/hacks/webcollage +++ b/hacks/webcollage @@ -1,6 +1,6 @@ #!/usr/bin/perl -w # -# webcollage, Copyright (c) 1999-2003 by Jamie Zawinski +# webcollage, Copyright © 1999-2017 by Jamie Zawinski # This program decorates the screen with random images from the web. # One satisfied customer described it as "a nonstop pop culture brainbath." # @@ -15,10 +15,21 @@ # To run this as a display mode with xscreensaver, add this to `programs': # -# default-n: webcollage -root \n\ -# default-n: webcollage -root -filter 'vidwhacker -stdin -stdout' \n\ - - +# webcollage --root +# webcollage --root --filter 'vidwhacker --stdin --stdout' +# +# +# You can see this in action at https://www.jwz.org/webcollage/ -- +# it auto-reloads about once a minute. To make a page similar to +# that on your own system, do this: +# +# webcollage --size '800x600' --imagemap $HOME/www/webcollage/index +# +# +# Requires that either the provided "webcollage-helper" program or +# ImageMagick's "convert" be available on $PATH. +# +# # If you have the "driftnet" program installed, webcollage can display a # collage of images sniffed off your local ethernet, instead of pulled out # of search engines: in that way, your screensaver can display the images @@ -27,7 +38,7 @@ # Driftnet is available here: http://www.ex-parrot.com/~chris/driftnet/ # Use it like so: # -# default-n: webcollage -root -driftnet \n\ +# webcollage --root --driftnet # # Driftnet is the Unix implementation of the MacOS "EtherPEG" program. @@ -41,57 +52,107 @@ use strict; #use diagnostics; -use Socket; require Time::Local; require POSIX; use Fcntl ':flock'; # import LOCK_* constants use POSIX qw(strftime); - -use bytes; # Larry can take Unicode and shove it up his ass sideways. - # Perl 5.8.0 causes us to start getting incomprehensible - # errors about UTF-8 all over the place without this. +use LWP::UserAgent; my $progname = $0; $progname =~ s@.*/@@g; -my $version = q{ $Revision: 1.104 $ }; $version =~ s/^[^0-9]+([0-9.]+).*$/$1/; -my $copyright = "WebCollage $version, Copyright (c) 1999-2002" . +my ($version) = ('$Revision: 1.181 $' =~ m/\s(\d[.\d]+)\s/s); +my $copyright = "WebCollage $version, Copyright (c) 1999-2017" . " Jamie Zawinski \n" . - " http://www.jwz.org/xscreensaver/\n"; + " https://www.jwz.org/webcollage/\n"; -my @search_methods = ( 77, "altavista", \&pick_from_alta_vista_random_link, - 14, "yahoorand", \&pick_from_yahoo_random_link, - 9, "yahoonews", \&pick_from_yahoo_news_text, +my @search_methods = ( + # Google is rate-limiting us now, so this works ok from + # a short-running screen saver, but not as a batch job. + # I haven't found a workaround. + # + 5, "googlephotos", \&pick_from_google_image_photos, + 3, "googleimgs", \&pick_from_google_images, + 3, "googlenums", \&pick_from_google_image_numbers, - # Alta Vista has a new "random link" URL now. - # They added it specifically to better support webcollage! - # That was super cool of them. This is how we used to do - # it, before: + # So let's try Bing instead. No rate limiting yet! + # + 13, "bingphotos", \&pick_from_bing_image_photos, + 11, "bingimgs", \&pick_from_bing_images, + 10, "bingnums", \&pick_from_bing_image_numbers, + + 20, "flickr_recent", \&pick_from_flickr_recent, + 15, "flickr_random", \&pick_from_flickr_random, + 6, "livejournal", \&pick_from_livejournal_images, + + 11, "imgur", \&pick_from_imgur, + + # Tumblr doesn't have an "or" search, so this isn't great. + 3, "tumblr", \&pick_from_tumblr, + + # I ran out of usable access tokens, May 2017 + # 0, "instagram", \&pick_from_instagram, + + # No longer exists, as of Apr 2014 + # 0, "yahoorand", \&pick_from_yahoo_random_link, + + # Twitter destroyed their whole API in 2013. + # 0, "twitpic", \&pick_from_twitpic_images, + # 0, "twitter", \&pick_from_twitter_images, + + # This is a cute way to search for a certain webcams. + # Not included in default methods, since these images + # aren't terribly interesting by themselves. + # See also "SurveillanceSaver". # - # 0, "avimages", \&pick_from_alta_vista_images, - # 0, "avtext", \&pick_from_alta_vista_text, + 0, "securitycam", \&pick_from_security_camera, - # Google asked (nicely) for me to stop searching them. + # Nonfunctional as of June 2011. + # 0, "altavista", \&pick_from_alta_vista_random_link, + + # In Apr 2002, Google asked me to stop searching them. # I asked them to add a "random link" url. They said # "that would be easy, we'll think about it" and then - # never wrote back. Booo Google! Booooo! + # never wrote back. Booo Google! Booooo! So, screw + # those turkeys, I've turned Google searching back on. + # I'm sure they can take it. (Jan 2005.) + + # Jan 2005: Yahoo fucked up their search form so that + # it's no longer possible to do "or" searches on news + # images, so we rarely get any hits there any more. + # + # 0, "yahoonews", \&pick_from_yahoo_news_text, + + # Dec 2004: the ircimages guy's server can't take the + # heat, so he started banning the webcollage user agent. + # I tried to convince him to add a lighter-weight page to + # support webcollage better, but he doesn't care. + # + # 0, "ircimages", \&pick_from_ircimages, + + # Dec 2002: Alta Vista has a new "random link" URL now. + # They added it specifically to better support webcollage! + # That was super cool of them. This is how we used to do + # it, before: # - # 0, "googlenums", \&pick_from_google_image_numbers, - # 0, "googleimgs", \&pick_from_google_images, + # 0, "avimages", \&pick_from_alta_vista_images, + # 0, "avtext", \&pick_from_alta_vista_text, - # I suspect Hotbot is actually the same search engine - # data as Lycos. + # This broke in 2004. Eh, Lycos sucks anyway. # - # 0, "hotbot", \&pick_from_hotbot_text, + # 0, "lycos", \&pick_from_lycos_text, - # Eh, Lycos sucks anyway. - # 0, "lycos", \&pick_from_lycos_text, + # This broke in 2003, I think. I suspect Hotbot is + # actually the same search engine data as Lycos. + # + # 0, "hotbot", \&pick_from_hotbot_text, ); # programs we can use to write to the root window (tried in ascending order.) # my @root_displayers = ( + "xscreensaver-getimage -root -file", "chbg -once -xscreensaver -max_size 100", "xv -root -quit -viewonly +noresetroot -quick24 -rmode 5" . " -rfg black -rbg black", @@ -108,26 +169,17 @@ my @root_displayers = ( my %cookies = ( "www.altavista.com" => "AV_ALL=1", # request uncensored searches "web.altavista.com" => "AV_ALL=1", - - # log in as "cipherpunk" - "www.nytimes.com" => 'NYT-S=18cHMIlJOn2Y1bu5xvEG3Ufuk6E1oJ.' . - 'FMxWaQV0igaB5Yi/Q/guDnLeoL.pe7i1oakSb' . - '/VqfdUdb2Uo27Vzt1jmPn3cpYRlTw9', + "ircimages.com" => 'disclaimer=1', ); # If this is set, it's a helper program to use for pasting images together: -# this is a lot faster and more efficient than using PPM pipelines, which is -# what we do if this program doesn't exist. (We check for "webcollage-helper" -# on $PATH at startup, and set this variable appropriately.) +# this is somewhat faster than using ImageMagick. # my $webcollage_helper = undef; +my $convert_cmd = 'convert'; - -# If we have the webcollage-helper program, then it will paste the images -# together with transparency! 0.0 is invisible, 1.0 is totally opaque. -# -my $opacity = 0.85; +my $opacity = 0.85; # Opacity when pasting images together. # Some sites have managed to poison the search engines. These are they. @@ -153,6 +205,11 @@ my %poisoners = ( # (I don't see how they did it, though!) "alwayshotels.com" => 1, # Poisoned Lycos pretty heavily. "nextag.com" => 1, # Poisoned Alta Vista real good. + "ghettodriveby.com" => 1, # Poisoned Google Images. + "crosswordsolver.org" => 1, # Poisoned Google Images. + "xona.com" => 1, # Poisoned Google Images. + "freepatentsonline.com" => 1, # Poisoned Google Images. + "herbdatanz.com" => 1, # Poisoned Google Images. ); @@ -163,16 +220,98 @@ my %poisoners = ( # site" diagnostic message. # my %warningless_sites = ( - "home.earthlink.net" => 1, # Lots of home pages here. - "www.geocities.com" => 1, + "home.earthlink.net" => 1, "www.angelfire.com" => 1, "members.aol.com" => 1, + "img.photobucket.com" => 1, + "pics.livejournal.com" => 1, + "tinypic.com" => 1, + "flickr.com" => 1, + "staticflickr.com" => 1, + "pbase.com" => 1, + "blogger.com" => 1, + "multiply.com" => 1, + "wikimedia.org" => 1, + "twitpic.com" => 1, + "amazonaws.com" => 1, + "blogspot.com" => 1, + "photoshelter.com" => 1, + "myspacecdn.com" => 1, + "feedburner.com" => 1, + "wikia.com" => 1, + "ljplus.ru" => 1, + "yandex.ru" => 1, + "imgur.com" => 1, + "tumblr.com" => 1, + "yfrog.com" => 1, + "cdninstagram.com" => 1, + "gstatic.com" => 1, "yimg.com" => 1, # This is where dailynews.yahoo.com stores "eimg.com" => 1, # its images, so pick_from_yahoo_news_text() # hits this every time. + "images.quizfarm.com" => 1, # damn those LJ quizzes... + "images.quizilla.com" => 1, + "images.quizdiva.net" => 1, + "driftnet" => 1, # builtin... + "local-directory" => 1, # builtin... +); + + +# For decoding HTML-encoded character entities to URLs. +# In This Modern World, probably we should use HTML::Entities instead. +# +my %entity_table = ( + "apos" => '\'', + "quot" => '"', "amp" => '&', "lt" => '<', + "gt" => '>', "nbsp" => ' ', "iexcl" => '', + "cent" => "\xA2", "pound" => "\xA3", "curren" => "\xA4", + "yen" => "\xA5", "brvbar" => "\xA6", "sect" => "\xA7", + "uml" => "\xA8", "copy" => "\xA9", "ordf" => "\xAA", + "laquo" => "\xAB", "not" => "\xAC", "shy" => "\xAD", + "reg" => "\xAE", "macr" => "\xAF", "deg" => "\xB0", + "plusmn" => "\xB1", "sup2" => "\xB2", "sup3" => "\xB3", + "acute" => "\xB4", "micro" => "\xB5", "para" => "\xB6", + "middot" => "\xB7", "cedil" => "\xB8", "sup1" => "\xB9", + "ordm" => "\xBA", "raquo" => "\xBB", "frac14" => "\xBC", + "frac12" => "\xBD", "frac34" => "\xBE", "iquest" => "\xBF", + "Agrave" => "\xC0", "Aacute" => "\xC1", "Acirc" => "\xC2", + "Atilde" => "\xC3", "Auml" => "\xC4", "Aring" => "\xC5", + "AElig" => "\xC6", "Ccedil" => "\xC7", "Egrave" => "\xC8", + "Eacute" => "\xC9", "Ecirc" => "\xCA", "Euml" => "\xCB", + "Igrave" => "\xCC", "Iacute" => "\xCD", "Icirc" => "\xCE", + "Iuml" => "\xCF", "ETH" => "\xD0", "Ntilde" => "\xD1", + "Ograve" => "\xD2", "Oacute" => "\xD3", "Ocirc" => "\xD4", + "Otilde" => "\xD5", "Ouml" => "\xD6", "times" => "\xD7", + "Oslash" => "\xD8", "Ugrave" => "\xD9", "Uacute" => "\xDA", + "Ucirc" => "\xDB", "Uuml" => "\xDC", "Yacute" => "\xDD", + "THORN" => "\xDE", "szlig" => "\xDF", "agrave" => "\xE0", + "aacute" => "\xE1", "acirc" => "\xE2", "atilde" => "\xE3", + "auml" => "\xE4", "aring" => "\xE5", "aelig" => "\xE6", + "ccedil" => "\xE7", "egrave" => "\xE8", "eacute" => "\xE9", + "ecirc" => "\xEA", "euml" => "\xEB", "igrave" => "\xEC", + "iacute" => "\xED", "icirc" => "\xEE", "iuml" => "\xEF", + "eth" => "\xF0", "ntilde" => "\xF1", "ograve" => "\xF2", + "oacute" => "\xF3", "ocirc" => "\xF4", "otilde" => "\xF5", + "ouml" => "\xF6", "divide" => "\xF7", "oslash" => "\xF8", + "ugrave" => "\xF9", "uacute" => "\xFA", "ucirc" => "\xFB", + "uuml" => "\xFC", "yacute" => "\xFD", "thorn" => "\xFE", + "yuml" => "\xFF", + + # HTML 4 entities that do not have 1:1 Latin1 mappings. + "bull" => "*", "hellip"=> "...", "prime" => "'", "Prime" => "\"", + "frasl" => "/", "trade" => "[tm]", "larr" => "<-", "rarr" => "->", + "harr" => "<->", "lArr" => "<=", "rArr" => "=>", "hArr" => "<=>", + "empty" => "\xD8", "minus" => "-", "lowast"=> "*", "sim" => "~", + "cong" => "=~", "asymp" => "~", "ne" => "!=", "equiv" => "==", + "le" => "<=", "ge" => ">=", "lang" => "<", "rang" => ">", + "loz" => "<>", "OElig" => "OE", "oelig" => "oe", "Yuml" => "Y", + "circ" => "^", "tilde" => "~", "ensp" => " ", "emsp" => " ", + "thinsp"=> " ", "ndash" => "-", "mdash" => "--", "lsquo" => "`", + "rsquo" => "'", "sbquo" => "'", "ldquo" => "\"", "rdquo" => "\"", + "bdquo" => "\"", "lsaquo"=> "<", "rsaquo"=> ">", ); @@ -194,14 +333,14 @@ my $verbose_warnings = 0; # print out warnings when things go wrong my $verbose_load = 0; # diagnostics about loading of URLs my $verbose_filter = 0; # diagnostics about page selection/rejection my $verbose_net = 0; # diagnostics about network I/O -my $verbose_pbm = 0; # diagnostics about PBM pipelines +my $verbose_decode = 0; # diagnostics about img conversion pipelines my $verbose_http = 0; # diagnostics about all HTTP activity my $verbose_exec = 0; # diagnostics about executing programs my $report_performance_interval = 60 * 15; # print some stats every 15 minutes my $http_proxy = undef; -my $http_timeout = 30; +my $http_timeout = 20; my $cvt_timeout = 10; my $min_width = 50; @@ -213,6 +352,8 @@ my $min_gif_area = (120 * 120); my $no_output_p = 0; my $urls_only_p = 0; +my $cocoa_p = 0; +my $imagemap_base = undef; my @pids_to_kill = (); # forked pids we should kill when we exit, if any. @@ -220,6 +361,9 @@ my $driftnet_magic = 'driftnet'; my $driftnet_dir = undef; my $default_driftnet_cmd = "driftnet -a -m 100"; +my $local_magic = 'local-directory'; +my $local_dir = undef; + my $wordlist; my %rejected_urls; @@ -239,168 +383,105 @@ my @tripwire_words = ("aberrate", "abode", "amorphous", "antioch", # returns three values: the HTTP response line; the document headers; # and the document body. # -sub get_document_1 { - my ( $url, $referer, $timeout ) = @_; +sub get_document_1($$$) { + my ($url, $referer, $timeout) = @_; if (!defined($timeout)) { $timeout = $http_timeout; } if ($timeout > $http_timeout) { $timeout = $http_timeout; } - if ($timeout <= 0) { - LOG (($verbose_net || $verbose_load), "timed out for $url"); - return (); - } + my $user_agent = "$progname/$version"; + + if ($url =~ m@^https?://www\.altavista\.com/@s || + $url =~ m@^https?://random\.yahoo\.com/@s || + $url =~ m@^https?://[^./]+\.google\.com/@s || + $url =~ m@^https?://www\.livejournal\.com/@s) { + # block this, you turkeys. + $user_agent = 'Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.8.1.7)' . + ' Gecko/20070914 Firefox/2.0.0.7'; + } + + my $ua = LWP::UserAgent->new ( agent => $user_agent, + keep_alive => 0, + env_proxy => 0, + ); + $ua->proxy ('http', $http_proxy) if $http_proxy; + $ua->default_header ('Referer' => $referer) if $referer; + $ua->default_header ('Accept' => '*/*'); + $ua->timeout($timeout) if $timeout; + + if (0) { + $ua->add_handler ("request_send", + sub($$$) { + my ($req, $ua, $h) = @_; + print "\n>>[[\n"; $req->dump; print "\n]]\n"; + return; + }); + $ua->add_handler ("response_data", + sub($$$$) { + my ($req, $ua, $h, $data) = @_; + #print "\n<<[[\n"; print $data; print "\n]]\n"; + return 1; + }); + $ua->add_handler ("request_done", + sub($$$) { + my ($req, $ua, $h) = @_; + print "\n<<[[\n"; $req->dump; print "\n]]\n"; + return; + }); + } + + if ($verbose_http) { + LOG (1, " ==> GET $url"); + LOG (1, " ==> User-Agent: $user_agent"); + LOG (1, " ==> Referer: $referer") if $referer; + } + + my $res = $ua->get ($url); + + my $http = ($res ? $res->status_line : '') || ''; + my $head = ($res ? $res->headers() : '') || ''; + $head = $head->as_string() if $head; + my $body = ($res && $res->is_success ? $res->decoded_content : '') || ''; LOG ($verbose_net, "get_document_1 $url " . ($referer ? $referer : "")); - if (! ($url =~ m@^http://@i)) { - LOG ($verbose_net, "not an HTTP URL: $url"); - return (); + $head =~ s/\r\n/\n/gs; + $head =~ s/\r/\n/gs; + if ($verbose_http) { + foreach (split (/\n/, $head)) { + LOG ($verbose_http, " <== $_"); + } } - my ($url_proto, $dummy, $serverstring, $path) = split(/\//, $url, 4); - $path = "" unless $path; + my @L = split(/\r\n|\r|\n/, $body); + my $lines = @L; + LOG ($verbose_http, + " <== [ body ]: $lines lines, " . length($body) . " bytes"); - my ($them,$port) = split(/:/, $serverstring); - $port = 80 unless $port; - - my $them2 = $them; - my $port2 = $port; - if ($http_proxy) { - $serverstring = $http_proxy if $http_proxy; - ($them2,$port2) = split(/:/, $serverstring); - $port2 = 80 unless $port2; - } - - my ($remote, $iaddr, $paddr, $proto, $line); - $remote = $them2; - if ($port2 =~ /\D/) { $port2 = getservbyname($port2, 'tcp') } - if (!$port2) { - LOG (($verbose_net || $verbose_load), "unrecognised port in $url"); + if (!$http) { + LOG (($verbose_net || $verbose_load), "null response: $url"); return (); } - $iaddr = inet_aton($remote); - if (!$iaddr) { - LOG (($verbose_net || $verbose_load), "host not found: $remote"); - return (); - } - $paddr = sockaddr_in($port2, $iaddr); - - - my $head = ""; - my $body = ""; - - @_ = - eval { - local $SIG{ALRM} = sub { - LOG (($verbose_net || $verbose_load), "timed out ($timeout) for $url"); - die "alarm\n"; - }; - alarm $timeout; - - $proto = getprotobyname('tcp'); - if (!socket(S, PF_INET, SOCK_STREAM, $proto)) { - LOG (($verbose_net || $verbose_load), "socket: $!"); - return (); - } - if (!connect(S, $paddr)) { - LOG (($verbose_net || $verbose_load), "connect($serverstring): $!"); - return (); - } - - select(S); $| = 1; select(STDOUT); - - my $cookie = $cookies{$them}; - - my $user_agent = "$progname/$version"; - - if ($url =~ m@^http://www\.altavista\.com/@ || - $url =~ m@^http://random\.yahoo\.com/@) { - # block this, you turkeys. - $user_agent = "Mozilla/4.76 [en] (X11; U; Linux 2.2.16-22 i686; Nav)"; - } - - my $hdrs = "GET " . ($http_proxy ? $url : "/$path") . " HTTP/1.0\r\n" . - "Host: $them\r\n" . - "User-Agent: $user_agent\r\n"; - if ($referer) { - $hdrs .= "Referer: $referer\r\n"; - } - if ($cookie) { - my @cc = split(/\r?\n/, $cookie); - $hdrs .= "Cookie: " . join('; ', @cc) . "\r\n"; - } - $hdrs .= "\r\n"; - - foreach (split('\r?\n', $hdrs)) { - LOG ($verbose_http, " ==> $_"); - } - print S $hdrs; - my $http = || ""; - # Kludge: the Yahoo Random Link is now returning as its first - # line "Status: 301" instead of "HTTP/1.0 301 Found". Fix it... - # - $http =~ s@^Status:\s+(\d+)\b@HTTP/1.0 $1@i; - - $_ = $http; - s/[\r\n]+$//s; - LOG ($verbose_http, " <== $_"); - - while () { - $head .= $_; - s/[\r\n]+$//s; - last if m@^$@; - LOG ($verbose_http, " <== $_"); - - if (m@^Set-cookie:\s*([^;\r\n]+)@i) { - set_cookie($them, $1) - } - } - - my $lines = 0; - while () { - $body .= $_; - $lines++; - } - - LOG ($verbose_http, - " <== [ body ]: $lines lines, " . length($body) . " bytes"); - - close S; - - if (!$http) { - LOG (($verbose_net || $verbose_load), "null response: $url"); - return (); - } - - return ( $http, $head, $body ); - }; - die if ($@ && $@ ne "alarm\n"); # propagate errors - if ($@) { - # timed out - $head = undef; - $body = undef; - $suppress_audit = 1; - return (); - } else { - # didn't - alarm 0; - return @_; - } + return ( $http, $head, $body ); } # returns two values: the document headers; and the document body. # if the given URL did a redirect, returns the redirected-to document. # -sub get_document { - my ( $url, $referer, $timeout ) = @_; +sub get_document($$;$) { + my ($url, $referer, $timeout) = @_; my $start = time; if (defined($referer) && $referer eq $driftnet_magic) { return get_driftnet_file ($url); } + if (defined($referer) && $referer eq $local_magic) { + return get_local_file ($url); + } + my $orig_url = $url; my $loop_count = 0; my $max_loop_count = 4; @@ -437,12 +518,12 @@ sub get_document { $url = $location; if ($url =~ m@^/@) { - $referer =~ m@^(http://[^/]+)@i; + $referer =~ m@^(https?://[^/]+)@i; $url = $1 . $url; } elsif (! ($url =~ m@^[a-z]+:@i)) { $_ = $referer; - s@[^/]+$@@g if m@^http://[^/]+/@i; - $_ .= "/" if m@^http://[^/]+$@i; + s@[^/]+$@@g if m@^https?://[^/]+/@i; + $_ .= "/" if m@^https?://[^/]+$@i; $url = $_ . $url; } @@ -486,7 +567,7 @@ sub get_document { # in again, but you have to present the old cookie to get the new cookie. # So, by doing this, the built-in cypherpunks cookie will never go "stale". # -sub set_cookie { +sub set_cookie($$) { my ($host, $cookie) = @_; my $oc = $cookies{$host}; return unless $oc; @@ -512,20 +593,20 @@ sub set_cookie { # given a URL and the body text at that URL, selects and returns a random # image from it. returns () if no suitable images found. # -sub pick_image_from_body { - my ( $url, $body ) = @_; +sub pick_image_from_body($$) { + my ($url, $body) = @_; my $base = $url; $_ = $url; # if there's at least one slash after the host, take off the last # pathname component - if ( m@^http://[^/]+/@io ) { + if ( m@^https?://[^/]+/@io ) { $base =~ s@[^/]+$@@go; } # if there are no slashes after the host at all, put one on the end. - if ( m@^http://[^/]+$@io ) { + if ( m@^https?://[^/]+$@io ) { $base .= "/"; } @@ -546,9 +627,9 @@ sub pick_image_from_body { # randomly from the set of images on the web. All the logic here for # rejecting some images is really a set of heuristics for rejecting # images that are not really images: for rejecting *text* that is in - # GIF/JPEG form. I don't want text, I want pictures, and I want the - # content of the pictures to be randomly selected from among all the - # available content. + # GIF/JPEG/PNG form. I don't want text, I want pictures, and I want + # the content of the pictures to be randomly selected from among all + # the available content. # # So, filtering out "dirty" pictures by looking for "dirty" keywords # would be wrong: dirty pictures exist, like it or not, so webcollage @@ -584,37 +665,46 @@ sub pick_image_from_body { my %unique_urls; foreach (split(/ * 1000) { - LOG (($verbose_filter || $verbose_load), - "excessive keywords ($L bytes) in $url: rejecting."); - $rejected_urls{$url} = $L; - $body = undef; - $_ = undef; - return (); - } else { - LOG ($verbose_filter, " keywords ($L bytes) in $url (ok)"); - } + my $L = length($_); + if ($L > 1000) { + LOG (($verbose_filter || $verbose_load), + "excessive keywords ($L bytes) in $url: rejecting."); + $rejected_urls{$url} = $L; + $body = undef; + $_ = undef; + return (); + } else { + LOG ($verbose_filter, " keywords ($L bytes) in $url (ok)"); } - } elsif ( m/^(img|a) .*(src|href) ?= ?\"? ?(.*?)[ >\"]/io ) { + } elsif (m/^ (IMG|A) \b .* (SRC|HREF) \s* = \s* ["']? (.*?) [ "'<>] /six || + m/^ (LINK|META) \b .* (REL|PROPERTY) \s* = \s* + ["']? (image_src|og:image) ["']? /six) { - my $was_inline = (! ( "$1" eq "a" || "$1" eq "A" )); + my $was_inline = (lc($1) eq 'img'); + my $was_meta = (lc($1) eq 'link' || lc($1) eq 'meta'); my $link = $3; + + # For + # and + # + if ($was_meta) { + next unless (m/ (HREF|CONTENT) \s* = \s* ["']? (.*?) [ "'<>] /six); + $link = $2; + } + my ( $width ) = m/width ?=[ \"]*(\d+)/oi; my ( $height ) = m/height ?=[ \"]*(\d+)/oi; $_ = $link; if ( m@^/@o ) { my $site; - ( $site = $base ) =~ s@^(http://[^/]*).*@$1@gio; + ( $site = $base ) =~ s@^(https?://[^/]*).*@$1@gio; $_ = "$site$link"; } elsif ( ! m@^[^/:?]+:@ ) { $_ = "$base$link"; @@ -623,12 +713,12 @@ sub pick_image_from_body { } # skip non-http - if ( ! m@^http://@io ) { + if ( ! m@^https?://@io ) { next; } # skip non-image - if ( ! m@[.](gif|jpg|jpeg|pjpg|pjpeg)$@io ) { + if ( ! m@[.](gif|jpg|jpeg|pjpg|pjpeg|png)$@io ) { next; } @@ -661,6 +751,13 @@ sub pick_image_from_body { next; } + # skip images with a URL that indicates a Yahoo thumbnail. + if (m@\.yimg\.com/.*/t/@) { + if (!$width) { $width = "?"; } + if (!$height) { $height = "?"; } + LOG ($verbose_filter, " skip yahoo thumb $_ (${width}x$height)"); + next; + } my $url = $_; @@ -672,20 +769,24 @@ sub pick_image_from_body { LOG ($verbose_filter, " image $url" . ($width && $height ? " (${width}x${height})" : "") . - ($was_inline ? " (inline)" : "")); + ($was_meta ? " (meta)" : $was_inline ? " (inline)" : "")); - $urls[++$#urls] = $url; - $unique_urls{$url}++; - # jpegs are preferable to gifs. - $_ = $url; - if ( ! m@[.]gif$@io ) { - $urls[++$#urls] = $url; + my $weight = 1; + + if ($was_meta) { + $weight = 20; # meta tag images are far preferable to inline images. + } else { + if ($url !~ m@[.](gif|png)$@io ) { + $weight += 2; # JPEGs are preferable to GIFs and PNGs. + } + if (! $was_inline) { + $weight += 4; # pointers to images are preferable to inlined images. + } } - # pointers to images are preferable to inlined images. - if ( ! $was_inline ) { - $urls[++$#urls] = $url; + $unique_urls{$url}++; + for (my $i = 0; $i < $weight; $i++) { $urls[++$#urls] = $url; } } @@ -713,6 +814,34 @@ sub pick_image_from_body { } +# Given a URL and the RSS feed from that URL, pick a random image from +# the feed. This is a lot simpler than extracting images out of a page: +# we already know we have reasonable images, so we just pick one. +# Returns: the real URL of the page (preferably not the RSS version), +# and the image. + +sub pick_image_from_rss($$) { + my ($url, $body) = @_; + + my ($base) = ($body =~ m@([^<>]+)@si); # root link + + my @items = ($body =~ m@]*>(.*?)@gsi); + return unless @items; + + my $n = @items; + my $i = int(rand($n)); + my $item = $items[$i]; + + $base = $1 if ($item =~ m@([^<>]+)@si); # item link + $base = $url unless $base; + + ($url) = ($item =~ m/]*\burl="(.*?)"/si); + return unless $url; + + LOG ($verbose_load, "picked image $i/$n: $url"); + return ($base, $url); +} + ############################################################################ # @@ -721,10 +850,13 @@ sub pick_image_from_body { ############################################################################ -sub pick_dictionary { +sub pick_dictionary() { my @dicts = ("/usr/dict/words", "/usr/share/dict/words", - "/usr/share/lib/dict/words"); + "/usr/share/lib/dict/words", + "/usr/share/dict/cracklib-small", + "/usr/share/dict/cracklib-words" + ); foreach my $f (@dicts) { if (-f $f) { $wordlist = $f; @@ -736,65 +868,94 @@ sub pick_dictionary { # returns a random word from the dictionary # -sub random_word { - my $word = 0; - if (open (IN, "<$wordlist")) { - my $size = (stat(IN))[7]; - my $pos = rand $size; - if (seek (IN, $pos, 0)) { - $word = ; # toss partial line - $word = ; # keep next line - } - if (!$word) { - seek( IN, 0, 0 ); - $word = ; - } - close (IN); - } +sub random_word() { - return 0 if (!$word); + return undef unless open (my $in, '<', $wordlist); - $word =~ s/^[ \t\n\r]+//; - $word =~ s/[ \t\n\r]+$//; - $word =~ s/ys$/y/; - $word =~ s/ally$//; - $word =~ s/ly$//; - $word =~ s/ies$/y/; - $word =~ s/ally$/al/; - $word =~ s/izes$/ize/; - $word =~ tr/A-Z/a-z/; + my $size = (stat($in))[7]; + my $word = undef; + my $count = 0; - if ( $word =~ s/[ \t\n\r]/\+/g ) { # convert intra-word spaces to "+". - $word = "\%22$word\%22"; # And put quotes (%22) around it. + while (1) { + error ("looping ($count) while reading $wordlist") + if (++$count > 100); + + my $pos = int (rand ($size)); + if (seek ($in, $pos, 0)) { + $word = <$in>; # toss partial line + $word = <$in>; # keep next line } - return $word; + next unless ($word); + next if ($word =~ m/^[-\']/); + + $word = lc($word); + $word =~ s/^.*-//s; + $word =~ s/^[^a-z]+//s; + $word =~ s/[^a-z]+$//s; + $word =~ s/\'s$//s; + $word =~ s/ys$/y/s; + $word =~ s/ally$//s; + $word =~ s/ly$//s; + $word =~ s/ies$/y/s; + $word =~ s/ally$/al/s; + $word =~ s/izes$/ize/s; + $word =~ s/esses$/ess/s; + $word =~ s/(.{5})ing$/$1/s; + + next if (length ($word) > 14); + last if ($word); + } + + close ($in); + + if ( $word =~ s/\s/\+/gs ) { # convert intra-word spaces to "+". + $word = "\%22$word\%22"; # And put quotes (%22) around it. + } + + return $word; } -sub random_words { - my ($or_p) = @_; - my $sep = ($or_p ? "%20OR%20" : "%20"); - return (random_word . $sep . - random_word . $sep . - random_word . $sep . - random_word . $sep . - random_word); + +sub random_words($) { + my ($sep) = @_; + return (random_word() . $sep . + random_word() . $sep . + random_word() . $sep . + random_word() . $sep . + random_word()); } -sub url_quote { +sub url_quote($) { my ($s) = @_; $s =~ s|([^-a-zA-Z0-9.\@/_\r\n])|sprintf("%%%02X", ord($1))|ge; return $s; } -sub url_unquote { +sub url_unquote($) { my ($s) = @_; $s =~ s/[+]/ /g; $s =~ s/%([a-z0-9]{2})/chr(hex($1))/ige; return $s; } +sub html_quote($) { + my ($s) = @_; + $s =~ s/&/&/gi; + $s =~ s//>/gi; + $s =~ s/\"/"/gi; + return $s; +} + +sub html_unquote($) { + my ($s) = @_; + $s =~ s/(&([a-z]+);)/{ $entity_table{$2} || $1; }/gexi; # e.g., ' + $s =~ s/(&\#(\d+);)/{ chr($2) }/gexi; # e.g., ' + return $s; +} + # Loads the given URL (a search on some search engine) and returns: # - the total number of hits the search engine claimed it had; @@ -802,7 +963,7 @@ sub url_unquote { # Note that this list contains all kinds of internal search engine # junk URLs too -- caller must prune them. # -sub pick_from_search_engine { +sub pick_from_search_engine($$$) { my ( $timeout, $search_url, $words ) = @_; $_ = $words; @@ -862,11 +1023,10 @@ sub pick_from_search_engine { 1 while ($search_count =~ s/^(\d+)(\d{3})/$1,$2/); # if ($search_count eq "?" || $search_count eq "0") { -# local *OUT; # my $file = "/tmp/wc.html"; -# open(OUT, ">$file") || error ("writing $file: $!"); -# print OUT $body; -# close OUT; +# open (my $out, '>', $file) || error ("writing $file: $!"); +# print $out $body; +# close $out; # print STDERR blurb() . "###### wrote $file\n"; # } @@ -882,11 +1042,22 @@ sub pick_from_search_engine { s/(]+)>@i; + my ($u) = m@]+)@i; next unless $u; + my ($u2) = m@]+)@i; - if ($u =~ m/^\"([^\"]*)\"/) { $u = $1; } # quoted string - elsif ($u =~ m/^([^\s]*)\s/) { $u = $1; } # or token + if (m/\bm="\{(.*?)\}"/s) { # Bing info is inside JSON crud + my $json = html_unquote($1); + my ($href) = ($json =~ m/\b(?:surl|purl)\"?:\s*"(.*?)"/s); + my ($img) = ($json =~ m/\b(?:imgurl|murl)\"?:\s*"(.*?)"/s); + $u = "$img\t$href" if ($img && $href); + + } elsif ($u2 && $u2 =~ m@://[^/]*\.gstatic\.com/@s) { $u = $u2; + $u =~ s/^\"|\"$//s; + + } elsif ($u =~ m/^\"([^\"]*)\"/) { $u = $1 # quoted string + } elsif ($u =~ m/^([^\s]*)\s/) { $u = $1; # or token + } if ( $rejected_urls{$u} ) { LOG ($verbose_filter, " pre-rejecting candidate: $u"); @@ -910,11 +1081,11 @@ sub pick_from_search_engine { } -sub depoison { +sub depoison(@) { my (@urls) = @_; my @urls2 = (); foreach (@urls) { - my ($h) = m@^http://([^/: \t\r\n]+)@i; + my ($h) = m@^https?://([^/: \t\r\n]+)@i; next unless defined($h); @@ -941,10 +1112,9 @@ sub depoison { # given a list of URLs, picks one at random; loads it; and returns a # random image from it. -# returns the url of the page loaded; the url of the image chosen; -# and a debugging description string. +# returns the url of the page loaded; the url of the image chosen. # -sub pick_image_from_pages { +sub pick_image_from_pages($$$$@) { my ($base, $total_hit_count, $unfiltered_link_count, $timeout, @pages) = @_; $total_hit_count = "?" unless defined($total_hit_count); @@ -981,52 +1151,53 @@ sub pick_image_from_pages { } -############################################################################ +############################################################################# +## +## Pick images from random pages returned by the Yahoo Random Link +## +############################################################################# +# +## yahoorand +#my $yahoo_random_link = "http://random.yahoo.com/fast/ryl"; # -# Pick images from random pages returned by the Yahoo Random Link # -############################################################################ - -# yahoorand -my $yahoo_random_link = "http://random.yahoo.com/fast/ryl"; - - # Picks a random page; picks a random image on that page; # returns two URLs: the page containing the image, and the image. # Returns () if nothing found this time. # -sub pick_from_yahoo_random_link { - my ( $timeout ) = @_; - - print STDERR "\n\n" if ($verbose_load); - LOG ($verbose_load, "URL: $yahoo_random_link"); - - $last_search = $yahoo_random_link; # for warnings - - $suppress_audit = 1; - - my ( $base, $body ) = get_document ($yahoo_random_link, undef, $timeout); - if (!$base || !$body) { - $body = undef; - return; - } - - LOG ($verbose_load, "redirected to: $base"); - - my $img = pick_image_from_body ($base, $body); - $body = undef; - - if ($img) { - return ($base, $img); - } else { - return (); - } -} +#sub pick_from_yahoo_random_link($) { +# my ($timeout) = @_; +# +# print STDERR "\n\n" if ($verbose_load); +# LOG ($verbose_load, "URL: $yahoo_random_link"); +# +# $last_search = $yahoo_random_link; # for warnings +# +# $suppress_audit = 1; +# +# my ( $base, $body ) = get_document ($yahoo_random_link, undef, $timeout); +# if (!$base || !$body) { +# $body = undef; +# return; +# } +# +# LOG ($verbose_load, "redirected to: $base"); +# +# my $img = pick_image_from_body ($base, $body); +# $body = undef; +# +# if ($img) { +# return ($base, $img); +# } else { +# return (); +# } +#} ############################################################################ # # Pick images from random pages returned by the Alta Vista Random Link +# Note: this seems to have gotten a *lot* less random lately (2007). # ############################################################################ @@ -1038,8 +1209,8 @@ my $alta_vista_random_link = "http://www.altavista.com/image/randomlink"; # returns two URLs: the page containing the image, and the image. # Returns () if nothing found this time. # -sub pick_from_alta_vista_random_link { - my ( $timeout ) = @_; +sub pick_from_alta_vista_random_link($) { + my ($timeout) = @_; print STDERR "\n\n" if ($verbose_load); LOG ($verbose_load, "URL: $alta_vista_random_link"); @@ -1085,10 +1256,10 @@ my $alta_vista_images_url = "http://www.altavista.com/image/results" . "&q="; # avimages -sub pick_from_alta_vista_images { - my ( $timeout ) = @_; +sub pick_from_alta_vista_images($) { + my ($timeout) = @_; - my $words = random_words(0); + my $words = random_word(); my $page = (int(rand(9)) + 1); my $search_url = $alta_vista_images_url . $words; @@ -1103,12 +1274,13 @@ sub pick_from_alta_vista_images { my @candidates = (); foreach my $u (@subpages) { - # avtext is encoding their URLs now. - next unless ($u =~ m@^/r.*\&r=([^&]+).*@); - $u = url_unquote($1); + # avimages is encoding their URLs now. + next unless ($u =~ s/^.*\*\*(http%3a.*$)/$1/gsi); + $u = url_unquote($u); - next unless ($u =~ m@^http://@i); # skip non-HTTP or relative URLs + next unless ($u =~ m@^https?://@i); # skip non-HTTP or relative URLs next if ($u =~ m@[/.]altavista\.com\b@i); # skip altavista builtins + next if ($u =~ m@[/.]yahoo\.com\b@i); # yahoo and av in cahoots? next if ($u =~ m@[/.]doubleclick\.net\b@i); # you cretins next if ($u =~ m@[/.]clicktomarket\.com\b@i); # more cretins @@ -1127,65 +1299,108 @@ sub pick_from_alta_vista_images { ############################################################################ # -# Pick images by feeding random words into Google Image Search. -# By Charles Gales +# Pick images from Aptix security cameras +# Cribbed liberally from google image search code. +# By Jason Sullivan # ############################################################################ +my $aptix_images_url = ("http://www.google.com/search" . + "?q=inurl:%22jpg/image.jpg%3Fr%3D%22"); -my $google_images_url = "http://images.google.com/images" . - "?site=images" . # photos - "&btnG=Search" . # graphics - "&safe=off" . # no screening - "&imgsafe=off" . - "&q="; - -# googleimgs -sub pick_from_google_images { - my ( $timeout ) = @_; +# securitycam +sub pick_from_security_camera($) { + my ($timeout) = @_; - my $words = random_word; # only one word for Google my $page = (int(rand(9)) + 1); - my $num = 20; # 20 images per page - my $search_url = $google_images_url . $words; + my $num = 20; # 20 images per page + my $search_url = $aptix_images_url; if ($page > 1) { $search_url .= "&start=" . $page*$num; # page number - $search_url .= "&num=" . $num; #images per page + $search_url .= "&num=" . $num; #images per page } my ($search_hit_count, @subpages) = - pick_from_search_engine ($timeout, $search_url, $words); + pick_from_search_engine ($timeout, $search_url, ''); my @candidates = (); + my %referers; foreach my $u (@subpages) { - next unless ($u =~ m@imgres\?imgurl@i); # All pics start with this - next if ($u =~ m@[/.]google\.com\b@i); # skip google builtins + next if ($u =~ m@[/.]google\.com\b@i); # skip google builtins (most links) + next unless ($u =~ m@jpg/image.jpg\?r=@i); # All pics contain this - if ($u =~ m@^/imgres\?imgurl=(.*?)\&imgrefurl=(.*?)\&@) { - my $urlf = $2; - LOG ($verbose_filter, " candidate: $urlf"); - push @candidates, $urlf; + LOG ($verbose_filter, " candidate: $u"); + push @candidates, $u; + $referers{$u} = $u; } + + @candidates = depoison (@candidates); + return () if ($#candidates < 0); + my $i = int(rand($#candidates+1)); + my $img = $candidates[$i]; + my $ref = $referers{$img}; + + LOG ($verbose_load, "picked image " . ($i+1) . ": $img (on $ref)"); + return ($ref, $img); +} + + +############################################################################ +# +# Pick images by feeding random words into Google Image Search. +# By Charles Gales +# +############################################################################ + +my $google_images_url = 'https://www.google.com/search' . + '?source=lnms&tbm=isch&tbs=isz:l&q='; + +# googleimgs +sub pick_from_google_images($;$$) { + my ($timeout, $words, $max_page) = @_; + + if (!defined($words)) { + $words = random_word(); # only one word for Google } - return pick_image_from_pages ($search_url, $search_hit_count, $#subpages+1, - $timeout, @candidates); + my $off = int(rand(40)); + my $search_url = $google_images_url . $words . "&start=" . $off; + + my ($search_hit_count, @subpages) = + pick_from_search_engine ($timeout, $search_url, $words); + + my @candidates = (); + foreach my $u (@subpages) { + $u = html_unquote($u); + # next if ($u =~ m@^https?://[^.]*\.(google|youtube)\.com/@s); + next unless ($u =~ m@^https?://[^/]*\.gstatic\.com@s); + LOG ($verbose_filter, " candidate: $u"); + push @candidates, $u; + } + + @candidates = depoison (@candidates); + return () if ($#candidates < 0); + my $i = int(rand($#candidates+1)); + my $img = $candidates[$i]; + + LOG ($verbose_load, "picked image " . ($i+1) . ": $img"); + return ($img, $img); } ############################################################################ # -# Pick images by feeding random *numbers* into Google Image Search. -# By jwz, suggested by from Ian O'Donnell. +# Pick images by feeding random numbers into Google Image Search. +# By jwz, suggested by Ian O'Donnell. # ############################################################################ # googlenums -sub pick_from_google_image_numbers { - my ( $timeout ) = @_; +sub pick_from_google_image_numbers($) { + my ($timeout) = @_; my $max = 9999; my $number = int(rand($max)); @@ -1193,33 +1408,111 @@ sub pick_from_google_image_numbers { $number = sprintf("%04d", $number) if (rand() < 0.3); - my $words = "$number"; - my $page = (int(rand(40)) + 1); - my $num = 20; # 20 images per page - my $search_url = $google_images_url . $words; + pick_from_google_images ($timeout, "$number"); +} - if ($page > 1) { - $search_url .= "&start=" . $page*$num; # page number - $search_url .= "&num=" . $num; #images per page + + +############################################################################ +# +# Pick images by feeding random digital camera file names into +# Google Image Search. +# By jwz, inspired by the excellent Random Personal Picture Finder +# at http://www.diddly.com/random/ +# May 2017: Commented out a bunch of formats that have fallen out of favor. +# +############################################################################ + +my @photomakers = ( + # + # Common digital camera file name formats, as described at + # http://www.diddly.com/random/about.html + # +# sub { sprintf ("dcp%05d.jpg", int(rand(4000))); }, # Kodak + sub { sprintf ("dsc%05d.jpg", int(rand(4000))); }, # Nikon + sub { sprintf ("dscn%04d.jpg", int(rand(4000))); }, # Nikon +# sub { sprintf ("mvc-%03d.jpg", int(rand(999))); }, # Sony Mavica +# sub { sprintf ("mvc%05d.jpg", int(rand(9999))); }, # Sony Mavica +# sub { sprintf ("P101%04d.jpg", int(rand(9999))); }, # Olympus w/ date=101 +# sub { sprintf ("P%x%02d%04d.jpg", # Olympus +# int(rand(0xC)), int(rand(30))+1, +# rand(9999)); }, + sub { sprintf ("IMG_%03d.jpg", int(rand(999))); }, # ? +# sub { sprintf ("IMAG%04d.jpg", int(rand(9999))); }, # RCA and Samsung +# sub { my $n = int(rand(9999)); # Canon +# sprintf ("1%02d-%04d.jpg", int($n/100), $n); }, +# sub { my $n = int(rand(9999)); # Canon +# sprintf ("1%02d-%04d_IMG.jpg", +# int($n/100), $n); }, + sub { sprintf ("IMG_%04d.jpg", int(rand(9999))); }, # Canon + sub { sprintf ("dscf%04d.jpg", int(rand(9999))); }, # Fuji Finepix +# sub { sprintf ("pdrm%04d.jpg", int(rand(9999))); }, # Toshiba PDR +# sub { sprintf ("IM%06d.jpg", int(rand(9999))); }, # HP Photosmart +# sub { sprintf ("EX%06d.jpg", int(rand(9999))); }, # HP Photosmart +# sub { my $n = int(rand(3)); # Kodak DC-40,50,120 +# sprintf ("DC%04d%s.jpg", int(rand(9999)), +# $n == 0 ? 'S' : $n == 1 ? 'M' : 'L'); }, + sub { sprintf ("pict%04d.jpg", int(rand(9999))); }, # Minolta Dimage +# sub { sprintf ("P%07d.jpg", int(rand(9999))); }, # Kodak DC290 +# sub { sprintf ("%02d%02d%04d.jpg", # Casio QV3000, QV4000 +# int(rand(12))+1, int(rand(31))+1, +# int(rand(999))); }, +# sub { sprintf ("%02d%x%02d%04d.jpg", # Casio QV7000 +# int(rand(6)), # year +# int(rand(12))+1, int(rand(31))+1, +# int(rand(999))); }, + sub { sprintf ("IMGP%04d.jpg", int(rand(9999))); }, # Pentax Optio S +# sub { sprintf ("PANA%04d.jpg", int(rand(9999))); }, # Panasonic vid still + sub { sprintf ("HPIM%04d.jpg", int(rand(9999))); }, # HP Photosmart +# sub { sprintf ("PCDV%04d.jpg", int(rand(9999))); }, # ? + ); + + +# googlephotos +sub pick_from_google_image_photos($) { + my ($timeout) = @_; + + my $i = int(rand($#photomakers + 1)); + my $fn = $photomakers[$i]; + my $file = &$fn; + #$file .= "%20filetype:jpg"; + + pick_from_google_images ($timeout, $file); +} + + +############################################################################ +# +# Pick images by feeding random words into Google Image Search. +# By the way: fuck Microsoft. +# +############################################################################ + +my $bing_images_url = "http://www.bing.com/images/async?q="; + + +# bingimgs +sub pick_from_bing_images($;$$) { + my ($timeout, $words, $max_page) = @_; + + if (!defined($words)) { + $words = random_word(); # only one word for Bing } + my $off = int(rand(300)); + my $search_url = $bing_images_url . $words . "&first=" . $off; + my ($search_hit_count, @subpages) = pick_from_search_engine ($timeout, $search_url, $words); my @candidates = (); my %referers; foreach my $u (@subpages) { - next unless ($u =~ m@imgres\?imgurl@i); # All pics start with this - next if ($u =~ m@[/.]google\.com\b@i); # skip google builtins - - if ($u =~ m@^/imgres\?imgurl=(.*?)\&imgrefurl=(.*?)\&@) { - my $ref = $2; - my $img = "http://$1"; - - LOG ($verbose_filter, " candidate: $ref"); - push @candidates, $img; - $referers{$img} = $ref; - } + my ($img, $ref) = ($u =~ m/^(.*?)\t(.*)$/s); + next unless $img; + LOG ($verbose_filter, " candidate: $ref"); + push @candidates, $img; + $referers{$img} = $ref; } @candidates = depoison (@candidates); @@ -1233,10 +1526,49 @@ sub pick_from_google_image_numbers { } + ############################################################################ # -# Pick images by feeding random words into Alta Vista Text Search +# Pick images by feeding random numbers into Bing Image Search. +# +############################################################################ + +# bingnums +sub pick_from_bing_image_numbers($) { + my ($timeout) = @_; + + my $max = 9999; + my $number = int(rand($max)); + + $number = sprintf("%04d", $number) + if (rand() < 0.3); + + pick_from_bing_images ($timeout, "$number"); +} + + +############################################################################ +# +# Pick images by feeding random numbers into Bing Image Search. +# +############################################################################ + +# bingphotos +sub pick_from_bing_image_photos($) { + my ($timeout) = @_; + + my $i = int(rand($#photomakers + 1)); + my $fn = $photomakers[$i]; + my $file = &$fn; + + pick_from_bing_images ($timeout, $file); +} + + +############################################################################ +# +# Pick images by feeding random words into Alta Vista Text Search # ############################################################################ @@ -1250,10 +1582,10 @@ my $alta_vista_url = "http://www.altavista.com/web/results" . "&aqo="; # avtext -sub pick_from_alta_vista_text { - my ( $timeout ) = @_; +sub pick_from_alta_vista_text($) { + my ($timeout) = @_; - my $words = random_words(0); + my $words = random_words('%20'); my $page = (int(rand(9)) + 1); my $search_url = $alta_vista_url . $words; @@ -1273,8 +1605,12 @@ sub pick_from_alta_vista_text { # onMouseOver to make it look like they're not! Well, it makes it # easier for us to identify search results... # - next unless ($u =~ m@^/r.*\&r=([^&]+).*@); - $u = url_unquote($1); + next unless ($u =~ s/^.*\*\*(http%3a.*$)/$1/gsi); + $u = url_unquote($u); + + next unless ($u =~ m@^https?://@i); # skip non-HTTP or relative URLs + next if ($u =~ m@[/.]altavista\.com\b@i); # skip altavista builtins + next if ($u =~ m@[/.]yahoo\.com\b@i); # yahoo and av in cahoots? LOG ($verbose_filter, " candidate: $u"); push @candidates, $u; @@ -1304,8 +1640,10 @@ my $hotbot_search_url =("http://hotbot.lycos.com/default.asp" . "&cobrand=undefined" . "&query="); -sub pick_from_hotbot_text { - my ( $timeout ) = @_; +sub pick_from_hotbot_text($) { + my ($timeout) = @_; + + $last_search = $hotbot_search_url; # for warnings # lycos seems to always give us back dictionaries and word lists if # we search for more than one word... @@ -1322,8 +1660,14 @@ sub pick_from_hotbot_text { foreach my $u (@subpages) { # Hotbot plays redirection games too - next unless ($u =~ m@/director.asp\?.*\btarget=([^&]+)@); - $u = url_decode($1); + # (not any more?) +# next unless ($u =~ m@/director.asp\?.*\btarget=([^&]+)@); +# $u = url_decode($1); + + next unless ($u =~ m@^https?://@i); # skip non-HTTP or relative URLs + next if ($u =~ m@[/.]hotbot\.com\b@i); # skip hotbot builtins + next if ($u =~ m@[/.]lycos\.com\b@i); # skip hotbot builtins + next if ($u =~ m@[/.]inktomi\.com\b@i); # skip hotbot builtins LOG ($verbose_filter, " candidate: $u"); push @candidates, $u; @@ -1347,8 +1691,10 @@ my $lycos_search_url = "http://search.lycos.com/default.asp" . "&tab=web" . "&query="; -sub pick_from_lycos_text { - my ( $timeout ) = @_; +sub pick_from_lycos_text($) { + my ($timeout) = @_; + + $last_search = $lycos_search_url; # for warnings # lycos seems to always give us back dictionaries and word lists if # we search for more than one word... @@ -1365,12 +1711,20 @@ sub pick_from_lycos_text { foreach my $u (@subpages) { # Lycos plays redirection games. - next unless ($u =~ m@^http://click.lycos.com/director.asp - .* - \btarget=([^&]+) - .* - @x); - $u = url_decode($1); + # (not any more?) +# next unless ($u =~ m@^https?://click.lycos.com/director.asp +# .* +# \btarget=([^&]+) +# .* +# @x); +# $u = url_decode($1); + + next unless ($u =~ m@^https?://@i); # skip non-HTTP or relative URLs + next if ($u =~ m@[/.]hotbot\.com\b@i); # skip lycos builtins + next if ($u =~ m@[/.]lycos\.com\b@i); # skip lycos builtins + next if ($u =~ m@[/.]terralycos\.com\b@i); # skip lycos builtins + next if ($u =~ m@[/.]inktomi\.com\b@i); # skip lycos builtins + LOG ($verbose_filter, " candidate: $u"); push @candidates, $u; @@ -1388,21 +1742,17 @@ sub pick_from_lycos_text { # ############################################################################ -my $yahoo_news_url = "http://search.news.yahoo.com/search/news" . - "?a=1" . - "&c=news_photos" . - "&s=-%24s%2C-date" . - "&n=100" . - "&o=o" . - "&2=" . - "&3=" . +my $yahoo_news_url = "http://news.search.yahoo.com/search/news" . + "?c=news_photos" . "&p="; # yahoonews -sub pick_from_yahoo_news_text { - my ( $timeout ) = @_; +sub pick_from_yahoo_news_text($) { + my ($timeout) = @_; - my $words = random_words(0); + $last_search = $yahoo_news_url; # for warnings + + my $words = random_word(); my $search_url = $yahoo_news_url . $words; my ($search_hit_count, @subpages) = @@ -1410,9 +1760,14 @@ sub pick_from_yahoo_news_text { my @candidates = (); foreach my $u (@subpages) { + + # de-redirectize the URLs + $u =~ s@^https?://rds\.yahoo\.com/.*-http%3A@http:@s; + # only accept URLs on Yahoo's news site - next unless ($u =~ m@^http://dailynews\.yahoo\.com/@i || - $u =~ m@^http://story\.news\.yahoo\.com/@i); + next unless ($u =~ m@^https?://dailynews\.yahoo\.com/@i || + $u =~ m@^https?://story\.news\.yahoo\.com/@i); + next unless ($u =~ m@&u=/@); LOG ($verbose_filter, " candidate: $u"); push @candidates, $u; @@ -1423,6 +1778,640 @@ sub pick_from_yahoo_news_text { } + +############################################################################ +# +# Pick images from LiveJournal's list of recently-posted images. +# +############################################################################ + +my $livejournal_img_url = "http://www.livejournal.com/stats/latest-img.bml"; + +# With most of our image sources, we get a random page and then select +# from the images on it. However, in the case of LiveJournal, the page +# of images tends to update slowly; so we'll remember the last N entries +# on it and randomly select from those, to get a wider variety each time. + +my $lj_cache_size = 1000; +my @lj_cache = (); # fifo, for ordering by age +my %lj_cache = (); # hash, for detecting dups + +# livejournal +sub pick_from_livejournal_images($) { + my ($timeout) = @_; + + $last_search = $livejournal_img_url; # for warnings + + my ( $base, $body ) = get_document ($livejournal_img_url, undef, $timeout); + + # Often the document comes back empty. If so, just use the cache. + # return () unless $body; + $body = '' unless defined($body); + + $body =~ s/\n/ /gs; + $body =~ s/(= $lj_cache_size) { + my $pairP = shift @lj_cache; + my $img = $pairP->[0]; + delete $lj_cache{$img}; + } + + LOG ($verbose_load, "picked image " .($i+1) . "/$n: $img"); + + return ($page, $img); +} + + +############################################################################ +# +# Pick images from ircimages.com (images that have been in the /topic of +# various IRC channels.) +# +############################################################################ + +my $ircimages_url = "http://ircimages.com/"; + +# ircimages +sub pick_from_ircimages($) { + my ($timeout) = @_; + + $last_search = $ircimages_url; # for warnings + + my $n = int(rand(2900)); + my $search_url = $ircimages_url . "page-$n"; + + my ( $base, $body ) = get_document ($search_url, undef, $timeout); + return () unless $body; + + my @candidates = (); + + $body =~ s/\n/ /gs; + $body =~ s/(]+)>@i; + next unless $u; + + if ($u =~ m/^\"([^\"]*)\"/) { $u = $1; } # quoted string + elsif ($u =~ m/^([^\s]*)\s/) { $u = $1; } # or token + + next unless ($u =~ m/^https?:/i); + next if ($u =~ m@^https?://(searchirc\.com\|ircimages\.com)@i); + next unless ($u =~ m@[.](gif|jpg|jpeg|pjpg|pjpeg|png)$@i); + + LOG ($verbose_http, " HREF: $u"); + push @candidates, $u; + } + + LOG ($verbose_filter, "" . $#candidates+1 . " links on $search_url"); + + return () if ($#candidates == -1); + + my $i = int(rand($#candidates+1)); + my $img = $candidates[$i]; + + LOG ($verbose_load, "picked image " .($i+1) . "/" . ($#candidates+1) . + ": $img"); + + $search_url = $img; # hmm... + return ($search_url, $img); +} + + +############################################################################ +# +# Pick images from Twitpic's list of recently-posted images. +# +############################################################################ + +my $twitpic_img_url = "http://twitpic.com/public_timeline/feed.rss"; + +# With most of our image sources, we get a random page and then select +# from the images on it. However, in the case of Twitpic, the page +# of images tends to update slowly; so we'll remember the last N entries +# on it and randomly select from those, to get a wider variety each time. + +my $twitpic_cache_size = 1000; +my @twitpic_cache = (); # fifo, for ordering by age +my %twitpic_cache = (); # hash, for detecting dups + +# twitpic +sub pick_from_twitpic_images($) { + my ($timeout) = @_; + + $last_search = $twitpic_img_url; # for warnings + + my ( $base, $body ) = get_document ($twitpic_img_url, undef, $timeout); + + # Update the cache. + + if ($body) { + $body =~ s/\n/ /gs; + $body =~ s/(([^<>]*)@si); + my $page = html_unquote ($1); + + $page =~ s@/$@@s; + $page .= '/full'; + + next if ($twitpic_cache{$page}); # already have it + + LOG ($verbose_filter, " candidate: $page"); + push @twitpic_cache, $page; + $twitpic_cache{$page} = $page; + } + } + + # Pull from the cache. + + return () if ($#twitpic_cache == -1); + + my $n = $#twitpic_cache+1; + my $i = int(rand($n)); + my $page = $twitpic_cache[$i]; + + # delete this one from @twitpic_cache and from %twitpic_cache. + # + @twitpic_cache = ( @twitpic_cache[0 .. $i-1], + @twitpic_cache[$i+1 .. $#twitpic_cache] ); + delete $twitpic_cache{$page}; + + # Keep the size of the cache under the limit by nuking older entries + # + while ($#twitpic_cache >= $twitpic_cache_size) { + my $page = shift @twitpic_cache; + delete $twitpic_cache{$page}; + } + + ( $base, $body ) = get_document ($page, undef, $timeout); + my $img = undef; + $body = '' unless defined($body); + + foreach (split (/= $twitter_cache_size) { + my $page = shift @twitter_cache; + delete $twitter_cache{$page}; + } + + LOG ($verbose_load, "picked page $url"); + + $suppress_audit = 1; + + return ($page, $url); +} + + +############################################################################ +# +# Pick images from Flickr's page of recently-posted photos. +# +############################################################################ + +my $flickr_img_url = "http://www.flickr.com/explore/"; + +# Like LiveJournal, the Flickr page of images tends to update slowly, +# so remember the last N entries on it and randomly select from those. + +# I know that Flickr has an API (http://www.flickr.com/services/api/) +# but it was easy enough to scrape the HTML, so I didn't bother exploring. + +my $flickr_cache_size = 1000; +my @flickr_cache = (); # fifo, for ordering by age +my %flickr_cache = (); # hash, for detecting dups + + +# flickr_recent +sub pick_from_flickr_recent($) { + my ($timeout) = @_; + + my $start = 16 * int(rand(100)); + + $last_search = $flickr_img_url; # for warnings + $last_search .= "?start=$start" if ($start > 0); + + my ( $base, $body ) = get_document ($last_search, undef, $timeout); + + # If the document comes back empty. just use the cache. + # return () unless $body; + $body = '' unless defined($body); + + my $count = 0; + my $count2 = 0; + + if ($body =~ m@{ *"_data": \[ ( .*? \} ) \]@six) { + $body = $1; + } else { + LOG ($verbose_load, "flickr unparsable: $last_search"); + return (); + } + + $body =~ s/[\r\n]/ /gs; + $body =~ s/(\},) *(\{)/$1\n$2/gs; # "_flickrModelRegistry" + + foreach my $chunk (split (/\n/, $body)) { + my ($img) = ($chunk =~ m@"displayUrl": *"(.*?)"@six); + next unless defined ($img); + $img =~ s/\\//gs; + $img = "//" unless ($img =~ m@^/@s); + $img = "http:$img" unless ($img =~ m/^http/s); + + my ($user) = ($chunk =~ m/"pathAlias": *"(.*?)"/si); + next unless defined ($user); + + my ($id) = ($img =~ m@/\d+/(\d+)_([\da-f]+)_@si); + my ($page) = "https://www.flickr.com/photos/$user/$id/"; + + # $img =~ s/_[a-z](\.[a-z\d]+)$/$1/si; # take off "thumb" suffix + + $count++; + next if ($flickr_cache{$img}); # already have it + + my @pair = ($img, $page, $start); + LOG ($verbose_filter, " candidate: $img"); + push @flickr_cache, \@pair; + $flickr_cache{$img} = \@pair; + $count2++; + } + + return () if ($#flickr_cache == -1); + + my $n = $#flickr_cache+1; + my $i = int(rand($n)); + my ($img, $page) = @{$flickr_cache[$i]}; + + # delete this one from @flickr_cache and from %flickr_cache. + # + @flickr_cache = ( @flickr_cache[0 .. $i-1], + @flickr_cache[$i+1 .. $#flickr_cache] ); + delete $flickr_cache{$img}; + + # Keep the size of the cache under the limit by nuking older entries + # + while ($#flickr_cache >= $flickr_cache_size) { + my $pairP = shift @flickr_cache; + my $img = $pairP->[0]; + delete $flickr_cache{$img}; + } + + LOG ($verbose_load, "picked image " .($i+1) . "/$n: $img"); + + return ($page, $img); +} + + +############################################################################ +# +# Pick images from a random RSS feed on Flickr. +# +############################################################################ + +my $flickr_rss_base = ("http://www.flickr.com/services/feeds/" . + "photos_public.gne" . + "?format=rss_200_enc&tagmode=any&tags="); + +# Picks a random RSS feed; picks a random image from that feed; +# returns 2 URLs: the page containing the image, and the image. +# Mostly by Joe Mcmahon +# +# flickr_random +sub pick_from_flickr_random($) { + my $timeout = shift; + + my $words = random_words(','); + my $rss = $flickr_rss_base . $words; + $last_search = $rss; + + $_ = $words; + s/,/ /g; + + print STDERR "\n\n" if ($verbose_load); + LOG ($verbose_load, "words: $_"); + LOG ($verbose_load, "URL: $last_search"); + + $suppress_audit = 1; + + my ( $base, $body ) = get_document ($last_search, undef, $timeout); + if (!$base || !$body) { + $body = undef; + return; + } + + my $img; + ($base, $img) = pick_image_from_rss ($base, $body); + $body = undef; + return () unless defined ($img); + + LOG ($verbose_load, "redirected to: $base"); + return ($base, $img); +} + + +############################################################################ +# +# Pick random images from Instagram. +# +############################################################################ + +my $instagram_url_base = "https://api.instagram.com/v1/media/popular"; + +# instagram_random +sub pick_from_instagram($) { + my $timeout = shift; + + # Liberated access tokens. + # jsdo.it search for: instagram client_id + # Google search for: instagram "&client_id=" site:jsfiddle.net + my @tokens = (#'b59fbe4563944b6c88cced13495c0f49', # gramfeed.com + #'fa26679250df49c48a33fbcf30aae989', # instac.at + #'d9494686198d4dfeb954979a3e270e5e', # iconosquare.com + #'793ef48bb18e4197b61afce2d799b81c', # jsdo.it + #'67b8a3e0073449bba70600d0fc68e6cb', # jsdo.it + #'26a098e0df4d4b9ea8b4ce6c505b7742', # jsdo.it + #'2437cbcd906a4c10940f990d283d3cd5', # jsdo.it + #'191c7d7d5312464cbd92134f36ffdab5', # jsdo.it + #'acfec809437b4340b2c38f66503af774', # jsdo.it + #'e9f77604a3a24beba949c12d18130988', # jsdo.it + #'2cd7bcf68ae346529770073d311575b3', # jsdo.it + #'830c600fe8d742e2ab3f3b94f9bb22b7', # jsdo.it + #'55865a0397ad41e5997dd95ef4df8da1', # jsdo.it + #'192a5742f3644ea8bed1d25e439286a8', # jsdo.it + #'38ed1477e7a44595861b8842cdb8ba23', # jsdo.it + #'e52f79f645f54488ad0cc47f6f55ade6', # jsfiddle.net + ); + + my $tok = $tokens[int(rand($#tokens+1))]; + $last_search = $instagram_url_base . "?client_id=" . $tok; + + print STDERR "\n\n" if ($verbose_load); + LOG ($verbose_load, "URL: $last_search"); + + my ( $base, $body ) = get_document ($last_search, undef, $timeout); + if (!$base || !$body) { + $body = undef; + return; + } + + $body =~ s/("link")/\001$1/gs; + my @chunks = split(/\001/, $body); + shift @chunks; + my @urls = (); + foreach (@chunks) { + s/\\//gs; + my ($url) = m/"link":\s*"(.*?)"/s; + my ($img) = m/"standard_resolution":\{"url":\s*"(.*?)"/s; + ($img) = m/"url":\s*"(.*?)"/s unless $url; + next unless ($url && $img); + push @urls, [ $url, $img ]; + } + + if ($#urls < 0) { + LOG ($verbose_load, "no images on $last_search"); + return (); + } + + my $i = int(rand($#urls+1)); + my ($url, $img) = @{$urls[$i]}; + + LOG ($verbose_load, "picked image " .($i+1) . "/" . ($#urls+1) . ": $url"); + return ($url, $img); +} + + +############################################################################ +# +# Pick images from Imgur. +# +############################################################################ + +my $imgur_base = 'http://imgur.com/search?qs=thumb&q_any='; + +sub pick_from_imgur($) { + my $timeout = shift; + + my $words = random_words('%20'); + $last_search = $imgur_base . $words; + + $_ = $words; + s/%20/ /g; + + print STDERR "\n\n" if ($verbose_load); + LOG ($verbose_load, "words: $_"); + LOG ($verbose_load, "URL: $last_search"); + + $suppress_audit = 1; + + my ( $base, $body ) = get_document ($last_search, undef, $timeout); + if (!$base || !$body) { + $body = undef; + return; + } + + my @imgs = ($body =~ m@\bHREF=[\"\']([^\'\"<>]*/gallery/[^\'\"<>]+)@gsi); + return () unless @imgs; + + my $n = @imgs; + my $i = int(rand($n)); + my $page = $imgs[$i]; + $page =~ s/[?&].*$//s; + $page = "http://imgur.com$page" if ($page =~ m@^/@s); + + my ($id) = ($page =~ m@([^/?&]+)$@s); + my $img = "http://i.imgur.com/$id.jpg"; + + LOG ($verbose_load, "picked image " .($i+1) . "/$n: $img"); + + return ($page, $img); +} + + +############################################################################ +# +# Pick images from Tumblr. +# +############################################################################ + +my $tumblr_base = 'https://www.tumblr.com/search/'; + +sub pick_from_tumblr($) { + my $timeout = shift; + + # Tumblr doesn't have an "or" search, which means our vocabulary is + # a bit too extensive to work well... + + my $words = random_word(); + $last_search = $tumblr_base . $words; + + print STDERR "\n\n" if ($verbose_load); + LOG ($verbose_load, "words: $words"); + LOG ($verbose_load, "URL: $last_search"); + + $suppress_audit = 1; + + my ( $base, $body ) = get_document ($last_search, undef, $timeout); + if (!$base || !$body) { + $body = undef; + return; + } + + my @imgs0 = ($body =~ m@]*)>@gsi); + return () unless @imgs0; + my @imgs; + foreach my $img (@imgs0) { + my ($src) = ($img =~ m@\bsrc=[\"\'](.*?)[\"\']@si); + my ($href) = ($img =~ m@\bdata-pin-url=[\"\'](.*?)[\"\']@si); + next unless ($src && $href); + next if ($src =~ m/^data:/s); + foreach ($src, $href) { $_ = "http://www.tumblr.com$_" if (m@^/@s); } + push @imgs, [$href, $src]; + } + return () unless @imgs; + + my $n = @imgs; + my $i = int(rand($n)); + my $page = $imgs[$i]->[0]; + my $img = $imgs[$i]->[1]; + + LOG ($verbose_load, "picked image " .($i+1) . "/$n: $img"); + + return ($page, $img); +} ############################################################################ @@ -1436,8 +2425,8 @@ sub pick_from_yahoo_news_text { ############################################################################ # driftnet -sub pick_from_driftnet { - my ( $timeout ) = @_; +sub pick_from_driftnet($) { + my ($timeout) = @_; my $id = $driftnet_magic; my $dir = $driftnet_dir; @@ -1453,43 +2442,41 @@ sub pick_from_driftnet { $last_search = $id; while ($now = time, $now < $start + $timeout) { - local *DIR; - opendir (DIR, $dir) || error ("$dir: $!"); - while (my $file = readdir(DIR)) { + opendir (my $dir, $dir) || error ("$dir: $!"); + while (my $file = readdir($dir)) { next if ($file =~ m/^\./); $file = "$dir/$file"; - closedir DIR; + closedir ($dir); LOG ($verbose_load, "picked file $file ($id)"); return ($id, $file); } - closedir DIR; + closedir ($dir); } LOG (($verbose_net || $verbose_load), "timed out for $id"); return (); } -sub get_driftnet_file { +sub get_driftnet_file($) { my ($file) = @_; error ("\$driftnet_dir unset?") unless ($driftnet_dir); my $id = $driftnet_magic; - my $re = qr/$driftnet_dir/; error ("$id: $file not in $driftnet_dir?") - unless ($file =~ m@^$re@o); + unless ($file =~ m@^\Q$driftnet_dir@o); - local *IN; - open (IN, $file) || error ("$id: $file: $!"); + open (my $in, '<', $file) || error ("$id: $file: $!"); my $body = ''; - while () { $body .= $_; } - close IN || error ("$id: $file: $!"); + local $/ = undef; # read entire file + $body = <$in>; + close ($in) || error ("$id: $file: $!"); unlink ($file) || error ("$id: $file: rm: $!"); return ($id, $body); } -sub spawn_driftnet { +sub spawn_driftnet($) { my ($cmd) = @_; # make a directory to use. @@ -1524,13 +2511,53 @@ sub spawn_driftnet { nontrapping_system ($cmd) || error ("exec: $!"); } - # wait a bit, then make sure the process actually started up. - # - sleep (1); - error ("pid $pid failed to start \"$cmd\"") - unless (1 == kill (0, $pid)); + # wait a bit, then make sure the process actually started up. + # + sleep (1); + error ("pid $pid failed to start \"$cmd\"") + unless (1 == kill (0, $pid)); +} + +# local-directory +sub pick_from_local_dir($) { + my ($timeout) = @_; + + my $id = $local_magic; + $last_search = $id; + + my $dir = $local_dir; + error ("\$local_dir unset?") unless ($dir); + $dir =~ s@/+$@@; + + error ("$dir unreadable") unless (-d "$dir/."); + + my $v = ($verbose_exec ? "-v" : ""); + my $pick = `xscreensaver-getimage-file $v "$dir"`; + $pick =~ s/\s+$//s; + $pick = "$dir/$pick" unless ($pick =~ m@^/@s); # relative path + + LOG ($verbose_load, "picked file $pick ($id)"); + return ($id, $pick); +} + + +sub get_local_file($) { + my ($file) = @_; + + error ("\$local_dir unset?") unless ($local_dir); + + my $id = $local_magic; + error ("$id: $file not in $local_dir?") + unless ($file =~ m@^\Q$local_dir@o); + + open (my $in, '<:raw', $file) || error ("$id: $file: $!"); + local $/ = undef; # read entire file + my $body = <$in>; + close ($in) || error ("$id: $file: $!"); + return ($id, $body); } + ############################################################################ # @@ -1544,8 +2571,8 @@ sub spawn_driftnet { # Returns () if nothing found this time. # -sub pick_image { - my ( $timeout ) = @_; +sub pick_image(;$) { + my ($timeout) = @_; $current_state = "select"; $load_method = "none"; @@ -1583,28 +2610,39 @@ sub pick_image { # ############################################################################ -sub timestr { +sub timestr() { return strftime ("%H:%M:%S: ", localtime); } -sub blurb { +sub blurb() { return "$progname: " . timestr() . "$current_state: "; } -sub error { +sub error($) { my ($err) = @_; print STDERR blurb() . "$err\n"; exit 1; } +sub stacktrace() { + my $i = 1; + print STDERR "$progname: stack trace:\n"; + while (1) { + my ($package, $filename, $line, $subroutine) = caller($i++); + last unless defined($package); + $filename =~ s@^.*/@@; + print STDERR " $filename#$line, $subroutine\n"; + } +} + my $lastlog = ""; -sub clearlog { +sub clearlog() { $lastlog = ""; } -sub showlog { +sub showlog() { my $head = "$progname: DEBUG: "; foreach (split (/\n/, $lastlog)) { print STDERR "$head$_\n"; @@ -1612,7 +2650,7 @@ sub showlog { $lastlog = ""; } -sub LOG { +sub LOG($$) { my ($print, $msg) = @_; my $blurb = timestr() . "$current_state: "; $lastlog .= "$blurb$msg\n"; @@ -1625,7 +2663,7 @@ my %stats_successes; my %stats_elapsed; my $last_state = undef; -sub record_attempt { +sub record_attempt($) { my ($name) = @_; if ($last_state) { @@ -1641,7 +2679,7 @@ sub record_attempt { $suppress_audit = 0; } -sub record_success { +sub record_success($$$) { my ($name, $url, $base) = @_; if (defined($stats_successes{$name})) { $stats_successes{$name}++; @@ -1659,7 +2697,7 @@ sub record_success { } -sub record_failure { +sub record_failure($) { my ($name) = @_; return if $image_succeeded; @@ -1691,7 +2729,7 @@ sub record_failure { -sub stats_of { +sub stats_of($) { my ($name) = @_; my $i = $stats_successes{$name}; my $j = $stats_attempts{$name}; @@ -1703,7 +2741,7 @@ sub stats_of { my $current_start_time = 0; -sub start_timer { +sub start_timer($) { my ($name) = @_; $current_start_time = time; @@ -1717,14 +2755,14 @@ sub start_timer { } } -sub stop_timer { +sub stop_timer($$) { my ($name, $success) = @_; $stats_elapsed{$name} += time - $current_start_time; } my $last_report_time = 0; -sub report_performance { +sub report_performance() { return unless $verbose_warnings; @@ -1745,8 +2783,9 @@ sub report_performance { my $suc = $stats_successes{$name} || 0; my $pct = int($suc * 100 / $try); my $secs = $stats_elapsed{$name}; - my $secs_link = int($secs / $try); - print STDERR sprintf ("$blurb %-12s %4s (%d/%d);\t %2d secs/link\n", + my $secs_link = $secs / $try; + print STDERR sprintf ("$blurb %-14s %4s (%d/%d);" . + " \t %.1f secs/link\n", "$name:", "$pct%", $suc, $try, $secs_link); } } @@ -1758,17 +2797,17 @@ my $max_recent_sites = 20; my @recent_images = (); my @recent_sites = (); -sub save_recent_url { +sub save_recent_url($$) { my ($url, $base) = @_; return unless ($verbose_warnings); $_ = $url; - my ($site) = m@^http://([^ \t\n\r/:]+)@; + my ($site) = m@^https?://([^ \t\n\r/:]+)@; return unless defined ($site); - if ($base eq $driftnet_magic) { - $site = $driftnet_magic; + if ($base eq $driftnet_magic || $base eq $local_magic) { + $site = $base; @recent_images = (); } @@ -1820,7 +2859,7 @@ sub save_recent_url { # Does %-decoding. # -sub url_decode { +sub url_decode($) { ($_) = @_; tr/+/ /; s/%([a-fA-F0-9][a-fA-F0-9])/pack("C", hex($1))/eg; @@ -1830,19 +2869,20 @@ sub url_decode { # Given the raw body of a GIF document, returns the dimensions of the image. # -sub gif_size { +sub gif_size($) { my ($body) = @_; my $type = substr($body, 0, 6); my $s; return () unless ($type =~ /GIF8[7,9]a/); $s = substr ($body, 6, 10); my ($a,$b,$c,$d) = unpack ("C"x4, $s); + return () unless defined ($d); return (($b<<8|$a), ($d<<8|$c)); } # Given the raw body of a JPEG document, returns the dimensions of the image. # -sub jpeg_size { +sub jpeg_size($) { my ($body) = @_; my $i = 0; my $L = length($body); @@ -1891,24 +2931,51 @@ sub jpeg_size { return (); } -# Given the raw body of a GIF or JPEG document, returns the dimensions of -# the image. +# Given the raw body of a PNG document, returns the dimensions of the image. +# +sub png_size($) { + my ($body) = @_; + return () unless ($body =~ m/^\211PNG\r/); + my ($bits) = ($body =~ m/^.{12}(.{12})/s); + return () unless defined ($bits); + return () unless ($bits =~ /^IHDR/); + my ($ign, $w, $h) = unpack("a4N2", $bits); + return ($w, $h); +} + + +# Given the raw body of a PNM document, returns the dimensions of the image. +# +sub pnm_size($) { + my ($body) = @_; + return () unless ($body =~ m/^P[1-6]\r?\n(\d+) +(\d+)\r?\n/s); + return ($1, $2); +} + + +# Given the raw body of a GIF, JPEG, or PNG document, returns the dimensions +# of the image. # -sub image_size { +sub image_size($) { my ($body) = @_; my ($w, $h) = gif_size ($body); if ($w && $h) { return ($w, $h); } - return jpeg_size ($body); + ($w, $h) = jpeg_size ($body); + if ($w && $h) { return ($w, $h); } + return png_size ($body); + if ($w && $h) { return ($w, $h); } + return pnm_size ($body); } # returns the full path of the named program, or undef. # -sub which { +sub which($) { my ($prog) = @_; foreach (split (/:/, $ENV{PATH})) { - if (-x "$_/$prog") { - return $prog; + my $path = "$_/$prog"; + if (-x $path) { + return $path; } } return undef; @@ -1916,7 +2983,7 @@ sub which { # Like rand(), but chooses numbers with a bell curve distribution. -sub bellrand { +sub bellrand(;$) { ($_) = @_; $_ = 1.0 unless defined($_); $_ /= 3.0; @@ -1924,31 +2991,34 @@ sub bellrand { } -sub signal_cleanup { - my ($sig) = @_; - print STDERR blurb() . (defined($sig) - ? "caught signal $sig." - : "exiting.") - . "\n" - if ($verbose_exec); - +sub exit_cleanup() { x_cleanup(); - + print STDERR "$progname: exiting\n" if ($verbose_warnings); if (@pids_to_kill) { print STDERR blurb() . "killing: " . join(' ', @pids_to_kill) . "\n"; kill ('TERM', @pids_to_kill); } +} +sub signal_cleanup($) { + my ($sig) = @_; + print STDERR blurb() . (defined($sig) + ? "caught signal $sig." + : "exiting.") + . "\n" + if ($verbose_exec || $verbose_warnings); exit 1; } + + ############################################################################## # # Generating a list of urls only # ############################################################################## -sub url_only_output { +sub url_only_output() { do { my ($base, $img) = pick_image; if ($img) { @@ -1961,32 +3031,47 @@ sub url_only_output { ############################################################################## # -# Running as an xscreensaver module +# Running as an xscreensaver module, or as a web page imagemap # ############################################################################## -my $image_ppm = ($ENV{TMPDIR} ? $ENV{TMPDIR} : "/tmp") . "/webcollage." . $$; -my $image_tmp1 = $image_ppm . "-1"; -my $image_tmp2 = $image_ppm . "-2"; +my ($image_png, $image_tmp1, $image_tmp2); +{ + my $seed = rand(0xFFFFFFFF); + $image_png = sprintf ("%s/webcollage-%08x", + ($ENV{TMPDIR} ? $ENV{TMPDIR} : "/tmp"), + $seed); + $image_tmp1 = $image_png . '-1.png'; + $image_tmp2 = $image_png . '-2.png'; + $image_png .= '.png'; +} + my $filter_cmd = undef; my $post_filter_cmd = undef; my $background = undef; +my @imagemap_areas = (); +my $imagemap_html_tmp = undef; +my $imagemap_jpg_tmp = undef; + + my $img_width; # size of the image being generated. my $img_height; my $delay = 2; -sub x_cleanup { - unlink $image_ppm, $image_tmp1, $image_tmp2; +sub x_cleanup() { + unlink $image_png, $image_tmp1, $image_tmp2; + unlink $imagemap_html_tmp, $imagemap_jpg_tmp + if (defined ($imagemap_html_tmp)); } # Like system, but prints status about exit codes, and kills this process # with whatever signal killed the sub-process, if any. # -sub nontrapping_system { +sub nontrapping_system(@) { $! = 0; $_ = join(" ", @_); @@ -2015,99 +3100,33 @@ sub nontrapping_system { } -# Given the URL of a GIF or JPEG image, and the body of that image, writes a -# PPM to the given output file. Returns the width/height of the image if -# successful. +# Creates a solid-colored PNG. # -sub image_to_pnm { - my ($url, $body, $output) = @_; - my ($cmd, $cmd2, $w, $h); +sub pngmake($$$$) { + my ($outfile, $bgcolor, $w, $h) = @_; - if ((@_ = gif_size ($body))) { - ($w, $h) = @_; - $cmd = "giftopnm"; - } elsif ((@_ = jpeg_size ($body))) { - ($w, $h) = @_; - $cmd = "djpeg"; + my @cmd; + if ($webcollage_helper) { + @cmd = ($webcollage_helper, $bgcolor, $w, $h, $outfile); } else { - LOG (($verbose_pbm || $verbose_load), - "not a GIF or JPG" . - (($body =~ m@<(base|html|head|body|script|table|a href)>@i) - ? " (looks like HTML)" : "") . - ": $url"); - $suppress_audit = 1; - return (); + @cmd = ($convert_cmd, '-size', "${w}x${h}", "xc:$bgcolor", $outfile); } - - $cmd2 = "exec $cmd"; # yes, this really is necessary. if we don't - # do this, the process doesn't die properly. - if (!$verbose_pbm) { - # - # We get a "giftopnm: got a 'Application Extension' extension" - # warning any time it's an animgif. - # - # Note that "giftopnm: EOF / read error on image data" is not - # always a fatal error -- sometimes the image looks fine anyway. - # - $cmd2 .= " 2>/dev/null"; + + my $rc = nontrapping_system (@cmd); + if ($rc != 0) { + LOG(0, "failed to create $bgcolor image: \"$outfile\""); + exit(1); } +} - # There exist corrupted GIF and JPEG files that can make giftopnm and - # djpeg lose their minds and go into a loop. So this gives those programs - # a small timeout -- if they don't complete in time, kill them. - # - my $pid; - @_ = eval { - my $timed_out; - - local $SIG{ALRM} = sub { - LOG ($verbose_pbm, - "timed out ($cvt_timeout) for $cmd on \"$url\" in pid $pid"); - kill ('TERM', $pid) if ($pid); - $timed_out = 1; - $body = undef; - }; - - if (($pid = open(PIPE, "| $cmd2 > $output"))) { - $timed_out = 0; - alarm $cvt_timeout; - print PIPE $body; - $body = undef; - close PIPE; - - LOG ($verbose_exec, "awaiting $pid"); - waitpid ($pid, 0); - LOG ($verbose_exec, "$pid completed"); - my $size = (stat($output))[7]; - $size = -1 unless defined($size); - if ($size < 5) { - LOG ($verbose_pbm, "$cmd on ${w}x$h \"$url\" failed ($size bytes)"); - return (); - } +sub pick_root_displayer() { + my @names = (); - LOG ($verbose_pbm, "created ${w}x$h $output ($cmd)"); - return ($w, $h); - } else { - print STDERR blurb() . "$cmd failed: $!\n"; - return (); - } - }; - die if ($@ && $@ ne "alarm\n"); # propagate errors - if ($@) { - # timed out - $body = undef; - return (); - } else { - # didn't - alarm 0; - $body = undef; - return @_; + if ($cocoa_p) { + # see "xscreensaver/hacks/webcollage-cocoa.m" + return "echo COCOA LOAD "; } -} - -sub pick_root_displayer { - my @names = (); foreach my $cmd (@root_displayers) { $_ = $cmd; @@ -2125,30 +3144,48 @@ sub pick_root_displayer { } -my $ppm_to_root_window_cmd = undef; +my $png_to_root_window_cmd = undef; -sub x_or_pbm_output { +sub x_or_image_output($) { + my ($window_id) = @_; - # Check for our helper program, to see whether we need to use PPM pipelines. + # Adjust the PATH for OS X 10.10. + # + $_ = $0; + s:/[^/]*$::; + s/([^a-zA-Z0-9._\-+\/])/\\$1/g; + $ENV{PATH} = "$_:$ENV{PATH}"; + + # Check for our helper program, to see whether we need to use imagemagick. # $_ = "webcollage-helper"; - if (defined ($webcollage_helper) || which ($_)) { - $webcollage_helper = $_ unless (defined($webcollage_helper)); - LOG ($verbose_pbm, "found \"$webcollage_helper\""); - $webcollage_helper .= " -v"; + + if (! defined ($webcollage_helper)) { +# $webcollage_helper = which ($_); + } + + if (defined ($webcollage_helper)) { + LOG ($verbose_decode, "found \"$webcollage_helper\""); } else { - LOG (($verbose_pbm || $verbose_load), "no $_ program"); + LOG (($verbose_decode || $verbose_load), "no $_ program"); + } + + if ($cocoa_p && !defined ($webcollage_helper)) { + error ("webcollage-helper not found in Cocoa-mode!"); + } + + if (!$cocoa_p && defined ($webcollage_helper)) { + foreach ($image_png, $image_tmp1, $image_tmp2) { + s/\.png$/.jpg/si; + } } # make sure the various programs we execute exist, right up front. # - my @progs = ("ppmmake"); # always need this one + my @progs = (); - if (!defined($webcollage_helper)) { - # Only need these others if we don't have the helper. - @progs = (@progs, "giftopnm", "djpeg", "pnmpaste", "pnmscale", "pnmcut"); - } + push @progs, $convert_cmd unless defined($webcollage_helper); foreach (@progs) { which ($_) || error "$_ not found on \$PATH."; @@ -2156,15 +3193,45 @@ sub x_or_pbm_output { # find a root-window displayer program. # - $ppm_to_root_window_cmd = pick_root_displayer(); + if (!$no_output_p) { + $png_to_root_window_cmd = pick_root_displayer(); + } + + if (defined ($window_id)) { + error ("-window-id only works if xscreensaver-getimage is installed") + unless ($png_to_root_window_cmd =~ m/^xscreensaver-getimage\b/); + + error ("unparsable window id: $window_id") + unless ($window_id =~ m/^\d+$|^0x[\da-f]+$/i); + $png_to_root_window_cmd =~ s/--?root\b/$window_id/ || + error ("unable to munge displayer: $png_to_root_window_cmd"); + } if (!$img_width || !$img_height) { - $_ = "xdpyinfo"; - which ($_) || error "$_ not found on \$PATH."; - $_ = `$_`; - ($img_width, $img_height) = m/dimensions: *(\d+)x(\d+) /; - if (!defined($img_height)) { - error "xdpyinfo failed."; + + if (!defined ($window_id) && + defined ($ENV{XSCREENSAVER_WINDOW})) { + $window_id = $ENV{XSCREENSAVER_WINDOW}; + } + + if (!defined ($window_id)) { + $_ = "xdpyinfo"; + which ($_) || error "$_ not found on \$PATH."; + $_ = `$_`; + ($img_width, $img_height) = m/dimensions: *(\d+)x(\d+) /; + if (!defined($img_height)) { + error "xdpyinfo failed."; + } + } else { # we have a window id + $_ = "xwininfo"; + which ($_) || error "$_ not found on \$PATH."; + $_ .= " -id $window_id"; + $_ = `$_`; + ($img_width, $img_height) = m/^\s*Width:\s*(\d+)\n\s*Height:\s*(\d+)\n/m; + + if (!defined($img_height)) { + error "xwininfo failed."; + } } } @@ -2189,52 +3256,17 @@ sub x_or_pbm_output { # Create the sold-colored base image. # - $_ = "ppmmake '$bgcolor' $img_width $img_height"; - LOG ($verbose_pbm, "creating base image: $_"); - nontrapping_system "$_ > $image_ppm"; + LOG ($verbose_decode, "creating base image: ${img_width}x${img_height}"); + $_ = pngmake ($image_png, $bgcolor, $img_width, $img_height); # Paste the default background image in the middle of it. # if ($bgimage) { - my ($iw, $ih); - - my $body = ""; - local *IMG; - open(IMG, "<$bgimage") || error "couldn't open $bgimage: $!"; - my $cmd; - while () { $body .= $_; } - close (IMG); - - if ((@_ = gif_size ($body))) { - ($iw, $ih) = @_; - $cmd = "giftopnm |"; - - } elsif ((@_ = jpeg_size ($body))) { - ($iw, $ih) = @_; - $cmd = "djpeg |"; - - } elsif ($body =~ m/^P\d\n(\d+) (\d+)\n/) { - $iw = $1; - $ih = $2; - $cmd = ""; - - } else { - error "$bgimage is not a GIF, JPEG, or PPM."; - } - - my $x = int (($img_width - $iw) / 2); - my $y = int (($img_height - $ih) / 2); - LOG ($verbose_pbm, - "pasting $bgimage (${iw}x$ih) into base image at $x,$y"); - - $cmd .= "pnmpaste - $x $y $image_ppm > $image_tmp1"; - open (IMG, "| $cmd") || error "running $cmd: $!"; - print IMG $body; - $body = undef; - close (IMG); - LOG ($verbose_exec, "subproc exited normally."); - rename ($image_tmp1, $image_ppm) || - error "renaming $image_tmp1 to $image_ppm: $!"; + open (my $in, '<:raw', $bgimage) || error ("$bgimage: $!"); + local $/ = undef; # read entire file + my $body = <$in>; + close ($in) || error ("$bgimage: $!"); + paste_image ('init', $image_png, $body, 'init', 1); } clearlog(); @@ -2258,53 +3290,40 @@ sub x_or_pbm_output { } } -sub paste_image { - my ($base, $img, $body, $source) = @_; +sub paste_image($$$$;$) { + my ($base, $img, $body, $source, $init_p) = @_; $current_state = "paste"; $suppress_audit = 0; - LOG ($verbose_pbm, "got $img (" . length($body) . ")"); - - my ($iw, $ih); - - # If we are using the webcollage-helper, then we do not need to convert this - # image to a PPM. But, if we're using a filter command, we still must, since - # that's what the filters expect (webcollage-helper can read PPMs, so that's - # fine.) - # - if (defined ($webcollage_helper) && - !defined ($filter_cmd)) { - - ($iw, $ih) = image_size ($body); - if (!$iw || !$ih) { - LOG (($verbose_pbm || $verbose_load), - "not a GIF or JPG" . - (($body =~ m@<(base|html|head|body|script|table|a href)>@i) - ? " (looks like HTML)" : "") . - ": $img"); - $suppress_audit = 1; - $body = undef; - return 0; - } + LOG ($verbose_decode, "got $img (" . length($body) . ")"); - local *OUT; - open (OUT, ">$image_tmp1") || error ("writing $image_tmp1: $!"); - print OUT $body || error ("writing $image_tmp1: $!"); - close OUT || error ("writing $image_tmp1: $!"); + my ($iw, $ih) = image_size ($body); + if (!$iw || !$ih) { + LOG (($verbose_decode || $verbose_load), + "not a GIF, JPG, or PNG" . + (($body =~ m@<(base|html|head|body|script|table|a href)>@i) + ? " (looks like HTML)" : "") . + ": $img"); + $suppress_audit = 1; + $body = undef; + return 0; + } - } else { - ($iw, $ih) = image_to_pnm ($img, $body, $image_tmp1); + if ($iw <= 0 || $ih <= 0 || $iw > 9999 || $ih > 9999) { + LOG (($verbose_decode || $verbose_load), + "ludicrous image dimensions: $iw x $ih (" . length($body) . + "): $img"); $body = undef; - if (!$iw || !$ih) { - LOG ($verbose_pbm, "unable to make PBM from $img"); - return 0; - } + return 0; } - record_success ($load_method, $img, $base); + open (my $out, '>:raw', $image_tmp1) || error ("writing $image_tmp1: $!"); + (print $out $body) || error ("writing $image_tmp1: $!"); + close ($out) || error ("writing $image_tmp1: $!"); + record_success ($load_method, $img, $base); my $ow = $iw; # used only for error messages my $oh = $ih; @@ -2312,217 +3331,240 @@ sub paste_image { # don't just tack this onto the front of the pipeline -- we want it to # be able to change the size of the input image. # - if ($filter_cmd) { - LOG ($verbose_pbm, "running $filter_cmd"); + if ($filter_cmd && !$init_p) { + LOG ($verbose_decode, "running $filter_cmd"); + + # #### Historically, $filter_cmd read and write PPM files. + # This is doing PNG or JPEG now. I'll bet nobody uses this. my $rc = nontrapping_system "($filter_cmd) < $image_tmp1 >$image_tmp2"; if ($rc != 0) { - LOG(($verbose_pbm || $verbose_load), "failed command: \"$filter_cmd\""); - LOG(($verbose_pbm || $verbose_load), "failed URL: \"$img\" (${ow}x$oh)"); + LOG(($verbose_decode || $verbose_load), + "failed command: \"$filter_cmd\""); + LOG(($verbose_decode || $verbose_load), + "failed URL: \"$img\" (${ow}x$oh)"); return; } rename ($image_tmp2, $image_tmp1); # re-get the width/height in case the filter resized it. - local *IMG; - open(IMG, "<$image_tmp1") || return 0; - $_ = ; - $_ = ; - ($iw, $ih) = m/^(\d+) (\d+)$/; - close (IMG); + open (my $imgf, '<:raw', $image_tmp1) || return 0; + my $b = ''; + sysread ($imgf, $b, 10240); + close $imgf; + ($iw, $ih) = image_size ($b); return 0 unless ($iw && $ih); } - my $target_w = $img_width; + my $target_w = $img_width; # max rectangle into which the image must fit my $target_h = $img_height; - my $cmd = ""; my $scale = 1.0; - - # Usually scale the image to fit on the screen -- but sometimes scale it - # to fit on half or a quarter of the screen. Note that we don't merely - # scale it to fit, we instead cut it in half until it fits -- that should - # give a wider distribution of sizes. - # - if (rand() < 0.3) { $target_w /= 2; $target_h /= 2; $scale /= 2; } - if (rand() < 0.3) { $target_w /= 2; $target_h /= 2; $scale /= 2; } - - if ($iw > $target_w || $ih > $target_h) { - while ($iw > $target_w || - $ih > $target_h) { - $iw = int($iw / 2); - $ih = int($ih / 2); - } - if ($iw <= 10 || $ih <= 10) { - LOG ($verbose_pbm, "scaling to ${iw}x$ih would have been bogus."); - return 0; - } - - LOG ($verbose_pbm, "scaling to ${iw}x$ih"); - - $cmd .= " | pnmscale -xsize $iw -ysize $ih"; - } - - - my $src = $image_tmp1; - my $crop_x = 0; # the sub-rectangle of the image my $crop_y = 0; # that we will actually paste. my $crop_w = $iw; my $crop_h = $ih; + my $x = 0; + my $y = 0; - # The chance that we will randomly crop out a section of an image starts - # out fairly low, but goes up for images that are very large, or images - # that have ratios that make them look like banners (we try to avoid - # banner images entirely, but they slip through when the IMG tags didn't - # have WIDTH and HEIGHT specified.) - # - my $crop_chance = 0.2; - if ($iw > $img_width * 0.4 || $ih > $img_height * 0.4) { - $crop_chance += 0.2; - } - if ($iw > $img_width * 0.7 || $ih > $img_height * 0.7) { - $crop_chance += 0.2; - } - if ($min_ratio && ($iw * $min_ratio) > $ih) { - $crop_chance += 0.7; - } + if (!$init_p) { - if ($crop_chance > 0.1) { - LOG ($verbose_pbm, "crop chance: $crop_chance"); - } + # Usually scale the image to fit on the screen -- but sometimes scale it + # to fit on half or a quarter of the screen. (We do this by reducing the + # size of the target rectangle.) Note that the image is not merely scaled + # to fit; we instead cut the image in half repeatedly until it fits in the + # target rectangle -- that gives a wider distribution of sizes. + # + if (rand() < 0.3) { $target_w /= 2; $target_h /= 2; } # reduce target rect + if (rand() < 0.3) { $target_w /= 2; $target_h /= 2; } + + if ($iw > $target_w || $ih > $target_h) { + while ($iw > $target_w || + $ih > $target_h) { + $iw = int($iw / 2); + $ih = int($ih / 2); + $scale /= 2; + } + if ($iw <= 10 || $ih <= 10) { + LOG ($verbose_decode, "scaling ${ow}x${oh} to ${iw}x$ih" . + " would have been bogus."); + return 0; + } - if (rand() < $crop_chance) { + $crop_w = $iw; + $crop_h = $ih; + + LOG ($verbose_decode, "scaling ${ow}x${oh} to ${iw}x$ih ($scale)"); + } - my $ow = $crop_w; - my $oh = $crop_h; - if ($crop_w > $min_width) { - # if it's a banner, select the width linearly. - # otherwise, select a bell. - my $r = (($min_ratio && ($iw * $min_ratio) > $ih) - ? rand() - : bellrand()); - $crop_w = $min_width + int ($r * ($crop_w - $min_width)); - $crop_x = int (rand() * ($ow - $crop_w)); + my $src = $image_tmp1; + + # The chance that we will randomly crop out a section of an image starts + # out fairly low, but goes up for images that are very large, or images + # that have ratios that make them look like banners (we try to avoid + # banner images entirely, but they slip through when the IMG tags didn't + # have WIDTH and HEIGHT specified.) + # + my $crop_chance = 0.2; + if ($iw > $img_width * 0.4 || $ih > $img_height * 0.4) { + $crop_chance += 0.2; } - if ($crop_h > $min_height) { - # height always selects as a bell. - $crop_h = $min_height + int (bellrand() * ($crop_h - $min_height)); - $crop_y = int (rand() * ($oh - $crop_h)); + if ($iw > $img_width * 0.7 || $ih > $img_height * 0.7) { + $crop_chance += 0.2; + } + if ($min_ratio && ($iw * $min_ratio) > $ih) { + $crop_chance += 0.7; } - if ($crop_x != 0 || $crop_y != 0 || - $crop_w != $iw || $crop_h != $ih) { - LOG ($verbose_pbm, - "randomly cropping to ${crop_w}x$crop_h \@ $crop_x,$crop_y"); + if ($crop_chance > 0.1) { + LOG ($verbose_decode, "crop chance: $crop_chance"); } - } - # Where the image should logically land -- this might be negative. - # - my $x = int((rand() * ($img_width + $crop_w/2)) - $crop_w*3/4); - my $y = int((rand() * ($img_height + $crop_h/2)) - $crop_h*3/4); + if (rand() < $crop_chance) { - # if we have chosen to paste the image outside of the rectangle of the - # screen, then we need to crop it. - # - if ($x < 0 || - $y < 0 || - $x + $crop_w > $img_width || - $y + $crop_h > $img_height) { + my $ow = $crop_w; + my $oh = $crop_h; + + if ($crop_w > $min_width) { + # if it's a banner, select the width linearly. + # otherwise, select a bell. + my $r = (($min_ratio && ($iw * $min_ratio) > $ih) + ? rand() + : bellrand()); + $crop_w = $min_width + int ($r * ($crop_w - $min_width)); + $crop_x = int (rand() * ($ow - $crop_w)); + } + if ($crop_h > $min_height) { + # height always selects as a bell. + $crop_h = $min_height + int (bellrand() * ($crop_h - $min_height)); + $crop_y = int (rand() * ($oh - $crop_h)); + } - LOG ($verbose_pbm, - "cropping for effective paste of ${crop_w}x$crop_h \@ $x,$y"); + # Clip it to the actual post-scaling image size. + if ($crop_x + $crop_w > $iw) { $crop_w = $iw - $crop_x; } + if ($crop_y + $crop_h > $ih) { $crop_h = $ih - $crop_y; } + if ($crop_x < 0) { $crop_w += $crop_x; $crop_x = 0; } + if ($crop_y < 0) { $crop_h += $crop_y; $crop_y = 0; } - if ($x < 0) { $crop_x -= $x; $crop_w += $x; $x = 0; } - if ($y < 0) { $crop_y -= $y; $crop_h += $y; $y = 0; } + if ($crop_x != 0 || $crop_y != 0 || + $crop_w != $iw || $crop_h != $ih) { + LOG ($verbose_decode, + "randomly cropping to ${crop_w}x$crop_h \@ $crop_x,$crop_y"); + } + } - if ($x + $crop_w >= $img_width) { $crop_w = $img_width - $x - 1; } - if ($y + $crop_h >= $img_height) { $crop_h = $img_height - $y - 1; } - } + # Where the image should logically land -- this might be negative. + # + $x = int((rand() * ($img_width + $crop_w/2)) - $crop_w*3/4); + $y = int((rand() * ($img_height + $crop_h/2)) - $crop_h*3/4); - # If any cropping needs to happen, add pnmcut. - # - if ($crop_x != 0 || $crop_y != 0 || - $crop_w != $iw || $crop_h != $ih) { - $iw = $crop_w; - $ih = $crop_h; - $cmd .= " | pnmcut $crop_x $crop_y $iw $ih"; - LOG ($verbose_pbm, "cropping to ${crop_w}x$crop_h \@ $crop_x,$crop_y"); - } + # if we have chosen to paste the image outside of the rectangle of the + # screen, then we need to crop it. + # + if ($x < 0 || + $y < 0 || + $x + $crop_w > $img_width || + $y + $crop_h > $img_height) { - LOG ($verbose_pbm, "pasting ${iw}x$ih \@ $x,$y in $image_ppm"); + LOG ($verbose_decode, + "cropping for effective paste of ${crop_w}x${crop_h} \@ $x,$y"); - $cmd .= " | pnmpaste - $x $y $image_ppm"; + if ($x < 0) { $crop_x -= $x; $crop_w += $x; $x = 0; } + if ($y < 0) { $crop_y -= $y; $crop_h += $y; $y = 0; } - $cmd =~ s@^ *\| *@@; + if ($x + $crop_w >= $img_width) { $crop_w = $img_width - $x - 1; } + if ($y + $crop_h >= $img_height) { $crop_h = $img_height - $y - 1; } + } - if (defined ($webcollage_helper)) { - $cmd = "$webcollage_helper $image_tmp1 $image_ppm " . - "$scale $opacity " . - "$crop_x $crop_y $x $y " . - "$iw $ih"; - $_ = $cmd; + # If any cropping needs to happen, add pnmcut. + # + if ($crop_x != 0 || $crop_y != 0 || + $crop_w != $iw || $crop_h != $ih) { + $iw = $crop_w; + $ih = $crop_h; + LOG ($verbose_decode, "cropping to ${crop_w}x$crop_h \@ " . + "$crop_x,$crop_y"); + } - } else { - # use a PPM pipeline - $_ = "($cmd)"; - $_ .= " < $image_tmp1 > $image_tmp2"; + LOG ($verbose_decode, "pasting ${iw}x$ih \@ $x,$y in $image_png"); } - if ($verbose_pbm) { - $_ = "($_) 2>&1 | sed s'/^/" . blurb() . "/'"; + my @cmd; + if (defined ($webcollage_helper)) { + @cmd = ($webcollage_helper, + $image_tmp1, $image_png, + $scale, $opacity, + $crop_x, $crop_y, $x, $y, + $iw, $ih); } else { - $_ .= " 2> /dev/null"; - } - - my $rc = nontrapping_system ($_); - - if (defined ($webcollage_helper) && -z $image_ppm) { - LOG (1, "failed command: \"$cmd\""); + @cmd = ($convert_cmd, + $image_png, + '(', + $image_tmp1 . '[0]', + '-scale', sprintf("%.2f%%", 100 * $scale), + '-crop', "${iw}x${ih}+${crop_x}+${crop_y}", + '-geometry', "+${x}+${y}", + + ($init_p ? () : + ( + # Blurry edges with rounded corners + '-alpha', 'set', + '-virtual-pixel', 'transparent', + '-channel', 'A', + '-blur', '0x12', + '-level', '50%,100%', + + # Overall transparency + '-evaluate', 'multiply', $opacity, + + '+channel', + )), + ')', + '-composite', + '+repage', + '-strip', + $image_png); + } + + #### $verbose_decode should mean 2>/dev/null + + my $rc = nontrapping_system (@cmd); + + if (-z $image_png) { + LOG (1, "failed command: \"@cmd\""); print STDERR "\naudit log:\n\n\n"; print STDERR ("#" x 78) . "\n"; - print STDERR blurb() . "$image_ppm has zero size\n"; + print STDERR blurb() . "$image_png has zero size\n"; showlog(); print STDERR "\n\n"; exit (1); } if ($rc != 0) { - LOG (($verbose_pbm || $verbose_load), "failed command: \"$cmd\""); - LOG (($verbose_pbm || $verbose_load), "failed URL: \"$img\" (${ow}x$oh)"); + LOG (($verbose_decode || $verbose_load), "failed command: \"@cmd\""); + LOG (($verbose_decode || $verbose_load), + "failed URL: \"$img\" (${ow}x$oh)"); return; } - if (!defined ($webcollage_helper)) { - rename ($image_tmp2, $image_ppm) || return; - } - - my $target = "$image_ppm"; + my $target = "$image_png"; # don't just tack this onto the end of the pipeline -- we don't want it - # to end up in $image_ppm, because we don't want the results to be + # to end up in $image_png, because we don't want the results to be # cumulative. # if ($post_filter_cmd) { - my $cmd; + # #### Historically, $post_filter_cmd read and write PPM files. + # This is doing PNG or JPEG now. I'll bet nobody uses this. $target = $image_tmp1; - if (!defined ($webcollage_helper)) { - $cmd = "($post_filter_cmd) < $image_ppm > $target"; - } else { - # Blah, my scripts need the JPEG data, but some other folks need - # the PPM data -- what to do? Ignore the problem, that's what! -# $cmd = "djpeg < $image_ppm | ($post_filter_cmd) > $target"; - $cmd = "($post_filter_cmd) < $image_ppm > $target"; - } - + my $cmd = "($post_filter_cmd) < $image_png > $target"; $rc = nontrapping_system ($cmd); if ($rc != 0) { - LOG ($verbose_pbm, "filter failed: \"$post_filter_cmd\"\n"); + LOG ($verbose_decode, "filter failed: \"$post_filter_cmd\"\n"); return; } } @@ -2530,7 +3572,7 @@ sub paste_image { if (!$no_output_p) { my $tsize = (stat($target))[7]; if ($tsize > 200) { - $cmd = "$ppm_to_root_window_cmd $target"; + my $cmd = "$png_to_root_window_cmd $target"; # xv seems to hate being killed. it tends to forget to clean # up after itself, and leaves windows around and colors allocated. @@ -2542,23 +3584,29 @@ sub paste_image { # the next network retrieval, which is probably a better thing # to do anyway. # - $cmd .= " &"; + $cmd .= " &" unless ($cocoa_p); $rc = nontrapping_system ($cmd); if ($rc != 0) { - LOG (($verbose_pbm || $verbose_load), "display failed: \"$cmd\""); + LOG (($verbose_decode || $verbose_load), "display failed: \"$cmd\""); return; } } else { - LOG ($verbose_pbm, "$target size is $tsize"); + LOG ($verbose_decode, "$target size is $tsize"); } } - $source .= "-" . stats_of($source); - print STDOUT "image: ${iw}x${ih} @ $x,$y $base $source\n" - if ($verbose_imgmap); + if (defined($source)) { + $source .= "-" . stats_of($source); + print STDOUT "image: ${iw}x${ih} @ $x,$y $base $source\n" + if ($verbose_imgmap); + if ($imagemap_base) { + update_imagemap ($base, $x, $y, $iw, $ih, + $image_png, $img_width, $img_height); + } + } clearlog(); @@ -2566,7 +3614,218 @@ sub paste_image { } -sub init_signals { +sub update_imagemap($$$$$$$$) { + my ($url, $x, $y, $w, $h, $image_png, $image_width, $image_height) = @_; + + $current_state = "imagemap"; + + my $max_areas = 200; + + $url = html_quote ($url); + push @imagemap_areas, [$x, $y, $w, $h, $url]; + shift @imagemap_areas if (@imagemap_areas > $max_areas); + LOG ($verbose_decode, "area: $x,$y,$w,$h"); + + my $map_name = $imagemap_base; + $map_name =~ s@^.*/@@; + $map_name = 'collage' if ($map_name eq ''); + + my $imagemap_html = $imagemap_base . ".html"; + my $imagemap_jpg = $imagemap_base . ".jpg"; + my $imagemap_jpg2 = $imagemap_jpg; + $imagemap_jpg2 =~ s@^.*/@@gs; + + if (!defined ($imagemap_html_tmp)) { + $imagemap_html_tmp = $imagemap_html . sprintf (".%08x", rand(0xffffffff)); + $imagemap_jpg_tmp = $imagemap_jpg . sprintf (".%08x", rand(0xffffffff)); + } + + # Read the imagemap html file (if any) to get a template. + # + my $template_html = ''; + { + if (open (my $in, '<', $imagemap_html)) { + local $/ = undef; # read entire file + $template_html = <$in>; + close $in; + LOG ($verbose_decode, "read template $imagemap_html"); + } + + if (! ($template_html =~ m/\.webcollage_box\b/s)) { # missing or old + $template_html = + ' + + + + + WebCollage + + + +
+
+ + +'; + + LOG ($verbose_decode, "created dummy template"); + } + } + + # Write the jpg to a tmp file + # + { + my @cmd; + if (defined($webcollage_helper)) { + @cmd = ('cp', '-p', $image_png, $imagemap_jpg_tmp); + } else { + @cmd = ($convert_cmd, $image_png, 'jpeg:' . $imagemap_jpg_tmp); + } + my $rc = nontrapping_system (@cmd); + if ($rc != 0) { + error ("imagemap jpeg failed: \"@cmd\"\n"); + } + } + + # Write the html to a tmp file + # + { + my $body = $template_html; + my $img = ("
" . + "" . + "
\n"); + foreach my $a (@imagemap_areas) { + my ($x, $y, $w, $h, $u) = @$a; + $x /= $img_width / 100; + $y /= $img_height / 100; + $w /= $img_width / 100; + $h /= $img_height / 100; + foreach ($x, $y, $w, $h) { $_ = sprintf("%.1f%%", $_); } + $u = html_quote($u); + $img .= ("
\n"); + } + + $img = ("
\n" . + $img . + "
\n"); + $body =~ s@
+ .*?
.*? @$img@sex || + error ("$imagemap_html_tmp: unable to splice image"); + + + # if there are magic webcollage spans in the html, update those too. + # + { + my @st = stat ($imagemap_jpg_tmp); + my $date = strftime("%d-%b-%Y %l:%M:%S %p %Z", localtime($st[9])); + my $size = int(($st[7] / 1024) + 0.5) . "K"; + $body =~ s@().*?()@$1$date$2@si; + $body =~ s@().*?()@$1$size$2@si; + } + + open (my $out, '>', $imagemap_html_tmp) || error("$imagemap_html_tmp: $!"); + (print $out $body) || error("$imagemap_html_tmp: $!"); + close ($out) || error("$imagemap_html_tmp: $!"); + LOG ($verbose_decode, "wrote $imagemap_html_tmp"); + } + + # Rename the two tmp files to the real files + # + rename ($imagemap_html_tmp, $imagemap_html) || + error "renaming $imagemap_html_tmp to $imagemap_html"; + LOG ($verbose_decode, "wrote $imagemap_html"); + + if ($imagemap_jpg_tmp ne $image_png) { + rename ($imagemap_jpg_tmp, $imagemap_jpg) || + error "renaming $imagemap_jpg_tmp to $imagemap_jpg"; + LOG ($verbose_decode, "wrote $imagemap_jpg"); + } +} + + +# Figure out what the proxy server should be, either from environment +# variables or by parsing the output of the (MacOS) program "scutil", +# which tells us what the system-wide proxy settings are. +# +sub set_proxy() { + + if (! defined($http_proxy)) { + # historical suckage: the environment variable name is lower case. + $http_proxy = $ENV{http_proxy} || $ENV{HTTP_PROXY}; + } + + if (defined ($http_proxy)) { + if ($http_proxy && $http_proxy =~ m@^https?://([^/]*)/?$@ ) { + # historical suckage: allow "http://host:port" as well as "host:port". + $http_proxy = $1; + } + + } else { + my $proxy_data = `scutil --proxy 2>/dev/null`; + my ($server) = ($proxy_data =~ m/\bHTTPProxy\s*:\s*([^\s]+)/s); + my ($port) = ($proxy_data =~ m/\bHTTPPort\s*:\s*([^\s]+)/s); + # Note: this ignores the "ExceptionsList". + if ($server) { + $http_proxy = $server; + $http_proxy .= ":$port" if $port; + } + } + + delete $ENV{http_proxy}; + delete $ENV{HTTP_PROXY}; + delete $ENV{https_proxy}; + delete $ENV{HTTPS_PROXY}; + delete $ENV{PERL_LWP_ENV_PROXY}; + + if ($http_proxy) { + $http_proxy = 'http://' . $http_proxy; + LOG ($verbose_net, "proxy server: $http_proxy"); + } else { + $http_proxy = undef; # for --proxy '' + } +} + + +sub init_signals() { $SIG{HUP} = \&signal_cleanup; $SIG{INT} = \&signal_cleanup; @@ -2575,14 +3834,14 @@ sub init_signals { $SIG{KILL} = \&signal_cleanup; $SIG{TERM} = \&signal_cleanup; - # Need this so that if giftopnm dies, we don't die. + # Need this so that if subprocess filters die, we don't die. $SIG{PIPE} = 'IGNORE'; } -END { signal_cleanup(); } +END { exit_cleanup(); } -sub main { +sub main() { $| = 1; srand(time ^ $$); @@ -2594,60 +3853,75 @@ sub main { $load_method = "none"; my $root_p = 0; + my $window_id = undef; - # historical suckage: the environment variable name is lower case. - $http_proxy = $ENV{http_proxy} || $ENV{HTTP_PROXY}; - - while ($_ = $ARGV[0]) { - shift @ARGV; - if ($_ eq "-display" || - $_ eq "-displ" || - $_ eq "-disp" || - $_ eq "-dis" || - $_ eq "-dpy" || - $_ eq "-d") { + while ($#ARGV >= 0) { + $_ = shift @ARGV; + if (m/^--?d(i(s(p(l(a(y)?)?)?)?)?)?$/s) { $ENV{DISPLAY} = shift @ARGV; - } elsif ($_ eq "-root") { + } elsif (m/^--?root$/s) { + $root_p = 1; + } elsif (m/^--?window-id$/s) { + $window_id = shift @ARGV; $root_p = 1; - } elsif ($_ eq "-no-output") { + } elsif (m/^--?no-output$/s) { $no_output_p = 1; - } elsif ($_ eq "-urls-only") { + } elsif (m/^--?urls(-only)?$/s) { $urls_only_p = 1; $no_output_p = 1; - } elsif ($_ eq "-verbose") { + } elsif (m/^--?cocoa$/s) { + $cocoa_p = 1; + } elsif (m/^--?imagemap$/s) { + $imagemap_base = shift @ARGV; + $no_output_p = 1; + } elsif (m/^--?verbose$/s) { $verbose++; } elsif (m/^-v+$/) { $verbose += length($_)-1; - } elsif ($_ eq "-delay") { + } elsif (m/^--?delay$/s) { $delay = shift @ARGV; - } elsif ($_ eq "-timeout") { + } elsif (m/^--?timeout$/s) { $http_timeout = shift @ARGV; - } elsif ($_ eq "-filter") { + } elsif (m/^--?filter$/s) { $filter_cmd = shift @ARGV; - } elsif ($_ eq "-filter2") { + } elsif (m/^--?filter2$/s) { $post_filter_cmd = shift @ARGV; - } elsif ($_ eq "-background" || $_ eq "-bg") { + } elsif (m/^--?(background|bg)$/s) { $background = shift @ARGV; - } elsif ($_ eq "-size") { + } elsif (m/^--?size$/s) { $_ = shift @ARGV; if (m@^(\d+)x(\d+)$@) { $img_width = $1; $img_height = $2; } else { - error "argument to \"-size\" must be of the form \"640x400\""; + error "argument to \"--size\" must be of the form \"640x400\""; } - } elsif ($_ eq "-proxy" || $_ eq "-http-proxy") { + } elsif (m/^--?(http-)?proxy$/s) { $http_proxy = shift @ARGV; - } elsif ($_ eq "-dictionary" || $_ eq "-dict") { + } elsif (m/^--?dict(ionary)?$/s) { $dict = shift @ARGV; - } elsif ($_ eq "-driftnet" || $_ eq "--driftnet") { + } elsif (m/^--?opacity$/s) { + $opacity = shift @ARGV; + error ("opacity must be between 0.0 and 1.0") + if ($opacity <= 0 || $opacity > 1); + } elsif (m/^--?driftnet$/s) { @search_methods = ( 100, "driftnet", \&pick_from_driftnet ); if (! ($ARGV[0] =~ m/^-/)) { $driftnet_cmd = shift @ARGV; } else { $driftnet_cmd = $default_driftnet_cmd; } - } elsif ($_ eq "-debug" || $_ eq "--debug") { + } elsif (m/^--?dir(ectory)?$/s) { + @search_methods = ( 100, "local", \&pick_from_local_dir ); + if (! ($ARGV[0] =~ m/^-/)) { + $local_dir = shift @ARGV; + } else { + error ("local directory path must be set") + } + } elsif (m/^--?fps$/s) { + # -fps only works on MacOS, via "webcollage-cocoa.m". + # Ignore it if passed to this script in an X11 context. + } elsif (m/^--?debug$/s) { my $which = shift @ARGV; my @rest = @search_methods; my $ok = 0; @@ -2664,33 +3938,30 @@ sub main { } error "no such search method as \"$which\"" unless ($ok); LOG (1, "DEBUG: using only \"$which\""); + $report_performance_interval = 30; } else { + print STDERR "unknown option: $_\n\n"; print STDERR "$copyright\nusage: $progname " . - "[-root] [-display dpy] [-verbose] [-debug which]\n" . - "\t\t [-timeout secs] [-delay secs] [-filter cmd] [-filter2 cmd]\n" . - "\t\t [-no-output] [-urls-only] [-background color] [-size WxH]\n" . - "\t\t [-dictionary dictionary-file] [-http-proxy host[:port]]\n" . - "\t\t [-driftnet [driftnet-program-and-args]]\n" . + "[--root] [--display dpy] [--verbose] [--debug which]\n" . + "\t\t [--timeout secs] [--delay secs] [--size WxH]\n" . + "\t\t [--no-output] [--urls-only] [--imagemap filename]\n" . + "\t\t [--background color] [--opacity f]\n" . + "\t\t [--filter cmd] [--filter2 cmd]\n" . + "\t\t [--dictionary dictionary-file] [--http-proxy host[:port]]\n" . + "\t\t [--driftnet [driftnet-program-and-args]]\n" . + "\t\t [--directory local-image-directory]\n" . "\n"; exit 1; } } - if ($http_proxy && $http_proxy eq "") { - $http_proxy = undef; - } - if ($http_proxy && $http_proxy =~ m@^http://([^/]*)/?$@ ) { - # historical suckage: allow "http://host:port" as well as "host:port". - $http_proxy = $1; - } - - if (!$root_p && !$no_output_p) { + if (!$root_p && !$no_output_p && !$cocoa_p) { print STDERR $copyright; - error "the -root argument is mandatory (for now.)"; + error "the --root argument is mandatory (for now.)"; } - if (!$no_output_p && !$ENV{DISPLAY}) { + if (!$no_output_p && !$cocoa_p && !$ENV{DISPLAY}) { error "\$DISPLAY is not set."; } @@ -2723,7 +3994,7 @@ sub main { $verbose_load = 1; $verbose_filter = 1; $verbose_net = 1; - $verbose_pbm = 1; + $verbose_decode = 1; } elsif ($verbose == 6) { $verbose_imgmap = 1; @@ -2731,7 +4002,7 @@ sub main { $verbose_load = 1; $verbose_filter = 1; $verbose_net = 1; - $verbose_pbm = 1; + $verbose_decode = 1; $verbose_http = 1; } elsif ($verbose >= 7) { @@ -2740,7 +4011,7 @@ sub main { $verbose_load = 1; $verbose_filter = 1; $verbose_net = 1; - $verbose_pbm = 1; + $verbose_decode = 1; $verbose_http = 1; $verbose_exec = 1; } @@ -2752,16 +4023,26 @@ sub main { pick_dictionary(); } + if ($imagemap_base && !($img_width && $img_height)) { + error ("--size WxH is required with --imagemap"); + } + + if (defined ($local_dir)) { + $_ = "xscreensaver-getimage-file"; + which ($_) || error "$_ not found on \$PATH."; + } + init_signals(); + set_proxy(); spawn_driftnet ($driftnet_cmd) if ($driftnet_cmd); if ($urls_only_p) { - url_only_output; + url_only_output (); } else { - x_or_pbm_output; + x_or_image_output ($window_id); } } -main; +main(); exit (0);