#!/usr/bin/perl -w
#
-# webcollage, Copyright (c) 1999-2001 by Jamie Zawinski <jwz@jwz.org>
+# webcollage, Copyright (c) 1999-2002 by Jamie Zawinski <jwz@jwz.org>
# This program decorates the screen with random images from the web.
# One satisfied customer described it as "a nonstop pop culture brainbath."
#
my $progname = $0; $progname =~ s@.*/@@g;
-my $version = q{ $Revision: 1.82 $ }; $version =~ s/^[^0-9]+([0-9.]+).*$/$1/;
-my $copyright = "WebCollage $version, Copyright (c) 1999-2001" .
+my $version = q{ $Revision: 1.93 $ }; $version =~ s/^[^0-9]+([0-9.]+).*$/$1/;
+my $copyright = "WebCollage $version, Copyright (c) 1999-2002" .
" Jamie Zawinski <jwz\@jwz.org>\n" .
" http://www.jwz.org/xscreensaver/\n";
-my @search_methods = ( 30, "imagevista", \&pick_from_alta_vista_images,
- 28, "altavista", \&pick_from_alta_vista_text,
- 18, "yahoorand", \&pick_from_yahoo_random_link,
- 14, "googleimgs", \&pick_from_google_images,
+my @search_methods = ( 40, "imagevista", \&pick_from_alta_vista_images,
+ 30, "altavista", \&pick_from_alta_vista_text,
+ 19, "yahoorand", \&pick_from_yahoo_random_link,
+ 9, "lycos", \&pick_from_lycos_text,
2, "yahoonews", \&pick_from_yahoo_news_text,
- 8, "lycos", \&pick_from_lycos_text,
# Hotbot gives me "no matches" just about every time.
# Then I try the same URL again, and it works. I guess
# it caches searches, and webcollage always busts its
# cache and time out? Or it just sucks.
# 0, "hotbot", \&pick_from_hotbot_text,
- );
-#@search_methods=(100, "googleimgs",\&pick_from_google_images);
+ # Google asked (nicely) for me to stop searching them.
+ # 0, "googlenums", \&pick_from_google_image_numbers,
+ # 0, "googleimgs", \&pick_from_google_images,
+
+ );
# programs we can use to write to the root window (tried in ascending order.)
#
my @root_displayers = (
- "xloadimage -quiet -onroot -center -border black",
- "xli -quiet -onroot -center -border black",
- "xv -root -quit -viewonly +noresetroot -rmode 5" .
+ "chbg -once -xscreensaver -max_size 100",
+ "xv -root -quit -viewonly +noresetroot -quick24 -rmode 5" .
" -rfg black -rbg black",
- "chbg -once -xscreensaver",
+ "xli -quiet -onroot -center -border black",
+ "xloadimage -quiet -onroot -center -border black",
# this lame program wasn't built with vroot.h:
# "xsri -scale -keep-aspect -center-horizontal -center-vertical",
);
+# If this is set, it's a helper program to use for pasting images together:
+# this is a lot faster and more efficient than using PPM pipelines, which is
+# what we do if this program doesn't exist. (We check for "webcollage-helper"
+# on $PATH at startup, and set this variable appropriately.)
+#
+my $webcollage_helper = undef;
+
+
+# If we have the webcollage-helper program, then it will paste the images
+# together with transparency! 0.0 is invisible, 1.0 is totally opaque.
+#
+my $opacity = 0.85;
+
+
# Some sites have managed to poison the search engines. These are they.
# (We auto-detect sites that have poisoned the search engines via excessive
# keywords or dictionary words, but these are ones that slip through
"bartleby.com" => 1, # Dictionary, cluttering altavista.
"encyclopedia.com" => 1, # Dictionary, cluttering altavista.
"onlinedictionary.datasegment.com" => 1, # Dictionary, cluttering altavista.
+ "hotlinkpics.com" => 1, # Porn site that has poisoned imagevista
+ # (I don't see how they did it, though!)
+ "alwayshotels.com" => 1, # Poisoned Lycos pretty heavily.
);
"yimg.com" => 1, # This is where dailynews.yahoo.com stores
"eimg.com" => 1, # its images, so pick_from_yahoo_news_text()
# hits this every time.
+
+ "driftnet" => 1, # builtin...
);
my $no_output_p = 0;
my $urls_only_p = 0;
+my @pids_to_kill = (); # forked pids we should kill when we exit, if any.
+
+my $driftnet_magic = 'driftnet';
+my $driftnet_dir = undef;
+my $default_driftnet_cmd = "driftnet -a -m 100";
+
my $wordlist;
my %rejected_urls;
LOG ($verbose_http, " ==> $_");
}
print S $hdrs;
- my $http = <S>;
+ my $http = <S> || "";
$_ = $http;
s/[\r\n]+$//s;
my ( $url, $referer, $timeout ) = @_;
my $start = time;
+ if (defined($referer) && $referer eq $driftnet_magic) {
+ return get_driftnet_file ($url);
+ }
+
my $orig_url = $url;
my $loop_count = 0;
my $max_loop_count = 4;
}
sub random_words {
- return (random_word . "%20" .
- random_word . "%20" .
- random_word . "%20" .
- random_word . "%20" .
+ my ($or_p) = @_;
+ my $sep = ($or_p ? "%20OR%20" : "%20");
+ return (random_word . $sep .
+ random_word . $sep .
+ random_word . $sep .
+ random_word . $sep .
random_word);
}
sub pick_from_alta_vista_images {
my ( $timeout ) = @_;
- my $words = random_words;
+ my $words = random_words(1);
my $page = (int(rand(9)) + 1);
my $search_url = $alta_vista_images_url . $words;
foreach my $u (@subpages) {
# altavista is encoding their URLs now.
- next unless ($u =~
- m@^/r\?ck_sm=[a-zA-Z0-9]+\&ref=[a-zA-Z0-9]+(\&uid=[a-zA-Z0-9]+)?\&r=(.*)@);
- $u = url_unquote($2);
+ next unless ($u =~ m@^/r.*\&r=([^&]+).*@);
+ $u = url_unquote($1);
next unless ($u =~ m@^http://@i); # skip non-HTTP or relative URLs
next if ($u =~ m@[/.]altavista\.com\b@i); # skip altavista builtins
\f
############################################################################
#
-# Pick images by feeding random words into Google Image Search
+# Pick images by feeding random words into Google Image Search.
# By Charles Gales <gales@us.ibm.com>
#
############################################################################
}
+\f
+############################################################################
+#
+# Pick images by feeding random *numbers* into Google Image Search.
+# By jwz, suggested by from Ian O'Donnell.
+#
+############################################################################
+
+
+# googlenums
+sub pick_from_google_image_numbers {
+ my ( $timeout ) = @_;
+
+ my $max = 9999;
+ my $number = int(rand($max));
+
+ $number = sprintf("%04d", $number)
+ if (rand() < 0.3);
+
+ my $words = "$number";
+ my $page = (int(rand(40)) + 1);
+ my $num = 20; # 20 images per page
+ my $search_url = $google_images_url . $words;
+
+ if ($page > 1) {
+ $search_url .= "&start=" . $page*$num; # page number
+ $search_url .= "&num=" . $num; #images per page
+ }
+
+ my ($search_hit_count, @subpages) =
+ pick_from_search_engine ($timeout, $search_url, $words);
+
+ my @candidates = ();
+ my %referers;
+ foreach my $u (@subpages) {
+ next unless ($u =~ m@imgres\?imgurl@i); # All pics start with this
+ next if ($u =~ m@[/.]google\.com\b@i); # skip google builtins
+
+ if ($u =~ m@^/imgres\?imgurl=(.*?)\&imgrefurl=(.*?)\&@) {
+ my $ref = $2;
+ my $img = "http://$1";
+
+ LOG ($verbose_filter, " candidate: $ref");
+ push @candidates, $img;
+ $referers{$img} = $ref;
+ }
+ }
+
+ @candidates = depoison (@candidates);
+ return () if ($#candidates < 0);
+ my $i = int(rand($#candidates+1));
+ my $img = $candidates[$i];
+ my $ref = $referers{$img};
+
+ LOG ($verbose_load, "picked image " . ($i+1) . ": $img (on $ref)");
+ return ($ref, $img);
+}
+
+
\f
############################################################################
#
sub pick_from_alta_vista_text {
my ( $timeout ) = @_;
- my $words = random_words;
+ my $words = random_words(1);
my $page = (int(rand(9)) + 1);
my $search_url = $alta_vista_url . $words;
# onMouseOver to make it look like they're not! Well, it makes it
# easier for us to identify search results...
#
- next unless ($u =~
- m@^/r\?ck_sm=[a-zA-Z0-9]+\&ref=[a-zA-Z0-9]+\&uid=[a-zA-Z0-9]+\&r=(.*)@);
+ next unless ($u =~ m@^/r.*\&r=([^&]+).*@);
$u = url_unquote($1);
LOG ($verbose_filter, " candidate: $u");
sub pick_from_hotbot_text {
my ( $timeout ) = @_;
- my $words = random_words;
+ my $words = random_words(0);
my $search_url = $hotbot_search_url . $words;
my ($search_hit_count, @subpages) =
sub pick_from_lycos_text {
my ( $timeout ) = @_;
- my $words = random_words;
+ my $words = random_words(0);
my $start = int(rand(8)) * 10 + 1;
my $search_url = $lycos_search_url . $words . "&start=$start";
# Lycos plays exact the same redirection game as hotbot.
# Note that "id=0" is used for internal advertising links,
# and 1+ are used for search results.
- next unless ($u =~ m@^http://click.hotbot.com/director.asp\?id=[1-9]\d*&target=([^&]+)@);
+ next unless ($u =~ m@^http://click.hotbot.com/director.asp
+ .*
+ [?&]id=[1-9]\d*
+ .*
+ \&target=([^&]+)
+ .*
+ @x);
$u = url_decode($1);
LOG ($verbose_filter, " candidate: $u");
sub pick_from_yahoo_news_text {
my ( $timeout ) = @_;
- my $words = random_words;
+ my $words = random_words(1);
my $search_url = $yahoo_news_url . $words;
my ($search_hit_count, @subpages) =
my @candidates = ();
foreach my $u (@subpages) {
# only accept URLs on Yahoo's news site
- next unless ($u =~ m@^http://dailynews.yahoo.com/@i);
+ next unless ($u =~ m@^http://dailynews\.yahoo\.com/@i ||
+ $u =~ m@^http://story\.news\.yahoo\.com/@i);
LOG ($verbose_filter, " candidate: $u");
push @candidates, $u;
+\f
+############################################################################
+#
+# Pick images by waiting for driftnet to populate a temp dir with files.
+# Requires driftnet version 0.1.5 or later.
+# (Driftnet is a program by Chris Lightfoot that sniffs your local ethernet
+# for images being downloaded by others.)
+# Driftnet/webcollage integration by jwz.
+#
+############################################################################
+
+# driftnet
+sub pick_from_driftnet {
+ my ( $timeout ) = @_;
+
+ my $id = $driftnet_magic;
+ my $dir = $driftnet_dir;
+ my $start = time;
+ my $now;
+
+ error ("\$driftnet_dir unset?") unless ($dir);
+ $dir =~ s@/+$@@;
+
+ error ("$dir unreadable") unless (-d "$dir/.");
+
+ $timeout = $http_timeout unless ($timeout);
+ $last_search = $id;
+
+ while ($now = time, $now < $start + $timeout) {
+ local *DIR;
+ opendir (DIR, $dir) || error ("$dir: $!");
+ while (my $file = readdir(DIR)) {
+ next if ($file =~ m/^\./);
+ $file = "$dir/$file";
+ closedir DIR;
+ LOG ($verbose_load, "picked file $file ($id)");
+ return ($id, $file);
+ }
+ closedir DIR;
+ }
+ LOG (($verbose_net || $verbose_load), "timed out for $id");
+ return ();
+}
+
+
+sub get_driftnet_file {
+ my ($file) = @_;
+
+ error ("\$driftnet_dir unset?") unless ($driftnet_dir);
+
+ my $id = $driftnet_magic;
+ my $re = qr/$driftnet_dir/;
+ error ("$id: $file not in $driftnet_dir?")
+ unless ($file =~ m@^$re@o);
+
+ local *IN;
+ open (IN, $file) || error ("$id: $file: $!");
+ my $body = '';
+ while (<IN>) { $body .= $_; }
+ close IN;
+ unlink ($file);
+ return ($id, $body);
+}
+
+
+sub spawn_driftnet {
+ my ($cmd) = @_;
+
+ # make a directory to use.
+ while (1) {
+ my $tmp = $ENV{TEMPDIR} || "/tmp";
+ $driftnet_dir = sprintf ("$tmp/driftcollage-%08x", rand(0xffffffff));
+ LOG ($verbose_exec, "mkdir $driftnet_dir");
+ last if mkdir ($driftnet_dir, 0700);
+ }
+
+ if (! ($cmd =~ m/\s/)) {
+ # if the command didn't have any arguments in it, then it must be just
+ # a pointer to the executable. Append the default args to it.
+ my $dargs = $default_driftnet_cmd;
+ $dargs =~ s/^[^\s]+//;
+ $cmd .= $dargs;
+ }
+
+ # point the driftnet command at our newly-minted private directory.
+ #
+ $cmd .= " -d $driftnet_dir";
+ $cmd .= ">/dev/null" unless ($verbose_exec);
+
+ my $pid = fork();
+ if ($pid < 0) { error ("fork: $!\n"); }
+ if ($pid) {
+ # parent fork
+ push @pids_to_kill, $pid;
+ LOG ($verbose_exec, "forked for \"$cmd\"");
+ } else {
+ # child fork
+ nontrapping_system ($cmd) || error ("exec: $!");
+ }
+
+ # wait a bit, then make sure the process actually started up.
+ #
+ sleep (1);
+ error ("pid $pid failed to start \"$cmd\"")
+ unless (1 == kill (0, $pid));
+}
+
\f
############################################################################
#
# Picks a random image on a random page, and returns two URLs:
# the page containing the image, and the image.
# Returns () if nothing found this time.
-# Uses the url-randomizer 1 time in 5, else the image randomizer.
#
sub pick_image {
$_ = $url;
my ($site) = m@^http://([^ \t\n\r/:]+)@;
+ if ($base eq $driftnet_magic) {
+ $site = $driftnet_magic;
+ @recent_images = ();
+ }
+
my $done = 0;
foreach (@recent_images) {
if ($_ eq $url) {
while (ord($ch) != 0xDA && $i < $L) {
# Find next marker, beginning with 0xFF.
while (ord($ch) != 0xFF) {
+ return () if (length($body) <= $i);
$ch = substr($body, $i, 1); $i++;
}
# markers can be padded with any number of 0xFF.
while (ord($ch) == 0xFF) {
+ return () if (length($body) <= $i);
$ch = substr($body, $i, 1); $i++;
}
($marker != 0xC4) &&
($marker != 0xCC)) { # it's a SOFn marker
$i += 3;
+ return () if (length($body) <= $i);
my $s = substr($body, $i, 4); $i += 4;
my ($a,$b,$c,$d) = unpack("C"x4, $s);
return (($c<<8|$d), ($a<<8|$b));
} else {
# We must skip variables, since FFs in variable names aren't
# valid JPEG markers.
+ return () if (length($body) <= $i);
my $s = substr($body, $i, 2); $i += 2;
my ($c1, $c2) = unpack ("C"x2, $s);
my $length = ($c1 << 8) | $c2;
}
+sub signal_cleanup {
+ my ($sig) = @_;
+ print STDERR blurb() . (defined($sig)
+ ? "caught signal $sig."
+ : "exiting.")
+ . "\n"
+ if ($verbose_exec);
+
+ x_cleanup();
+
+ if (@pids_to_kill) {
+ print STDERR blurb() . "killing: " . join(' ', @pids_to_kill) . "\n";
+ kill ('TERM', @pids_to_kill);
+ }
+
+ exit 1;
+}
+
##############################################################################
#
# Generating a list of urls only
my $img_width; # size of the image being generated.
my $img_height;
-my $delay = 0;
-
+my $delay = 2;
sub x_cleanup {
- my ($sig) = @_;
- print STDERR blurb() . "caught signal $sig.\n" if ($verbose_exec);
unlink $image_ppm, $image_tmp1, $image_tmp2;
- exit 1;
}
sub x_or_pbm_output {
+ # Check for our helper program, to see whether we need to use PPM pipelines.
+ #
+ $_ = "webcollage-helper";
+ if (defined ($webcollage_helper) || which ($_)) {
+ $webcollage_helper = $_ unless (defined($webcollage_helper));
+ LOG ($verbose_pbm, "found \"$webcollage_helper\"");
+ $webcollage_helper .= " -v";
+ } else {
+ LOG (($verbose_pbm || $verbose_load), "no $_ program");
+ }
+
# make sure the various programs we execute exist, right up front.
#
- foreach ("ppmmake", "giftopnm", "djpeg", "pnmpaste", "pnmscale", "pnmcut") {
+ my @progs = ("ppmmake"); # always need this one
+
+ if (!defined($webcollage_helper)) {
+ # Only need these others if we don't have the helper.
+ @progs = (@progs, "giftopnm", "djpeg", "pnmpaste", "pnmscale", "pnmcut");
+ }
+
+ foreach (@progs) {
which ($_) || error "$_ not found on \$PATH.";
}
#
$ppm_to_root_window_cmd = pick_root_displayer();
-
- $SIG{HUP} = \&x_cleanup;
- $SIG{INT} = \&x_cleanup;
- $SIG{QUIT} = \&x_cleanup;
- $SIG{ABRT} = \&x_cleanup;
- $SIG{KILL} = \&x_cleanup;
- $SIG{TERM} = \&x_cleanup;
-
- # Need this so that if giftopnm dies, we don't die.
- $SIG{PIPE} = 'IGNORE';
-
if (!$img_width || !$img_height) {
$_ = "xdpyinfo";
which ($_) || error "$_ not found on \$PATH.";
LOG ($verbose_pbm, "got $img (" . length($body) . ")");
- my ($iw, $ih) = image_to_pnm ($img, $body, $image_tmp1);
- $body = undef;
- if (!$iw || !$ih) {
- LOG ($verbose_pbm, "unable to make PBM from $img");
- return 0;
+ my ($iw, $ih);
+
+ # If we are using the webcollage-helper, then we do not need to convert this
+ # image to a PPM. But, if we're using a filter command, we still must, since
+ # that's what the filters expect (webcollage-helper can read PPMs, so that's
+ # fine.)
+ #
+ if (defined ($webcollage_helper) &&
+ !defined ($filter_cmd)) {
+
+ ($iw, $ih) = image_size ($body);
+ if (!$iw || !$ih) {
+ LOG (($verbose_pbm || $verbose_load),
+ "not a GIF or JPG" .
+ (($body =~ m@<(base|html|head|body|script|table|a href)>@i)
+ ? " (looks like HTML)" : "") .
+ ": $img");
+ $suppress_audit = 1;
+ $body = undef;
+ return 0;
+ }
+
+ local *OUT;
+ open (OUT, ">$image_tmp1") || error ("writing $image_tmp1: $!");
+ print OUT $body || error ("writing $image_tmp1: $!");
+ close OUT || error ("writing $image_tmp1: $!");
+
+ } else {
+ ($iw, $ih) = image_to_pnm ($img, $body, $image_tmp1);
+ $body = undef;
+ if (!$iw || !$ih) {
+ LOG ($verbose_pbm, "unable to make PBM from $img");
+ return 0;
+ }
}
record_success ($load_method, $img, $base);
my $target_h = $img_height;
my $cmd = "";
+ my $scale = 1.0;
# Usually scale the image to fit on the screen -- but sometimes scale it
# scale it to fit, we instead cut it in half until it fits -- that should
# give a wider distribution of sizes.
#
- if (rand() < 0.3) { $target_w /= 2; $target_h /= 2; }
- if (rand() < 0.3) { $target_w /= 2; $target_h /= 2; }
+ if (rand() < 0.3) { $target_w /= 2; $target_h /= 2; $scale /= 2; }
+ if (rand() < 0.3) { $target_w /= 2; $target_h /= 2; $scale /= 2; }
if ($iw > $target_w || $ih > $target_h) {
while ($iw > $target_w ||
$cmd =~ s@^ *\| *@@;
- $_ = "($cmd)";
- $_ .= " < $image_tmp1 > $image_tmp2";
+ if (defined ($webcollage_helper)) {
+ $cmd = "$webcollage_helper $image_tmp1 $image_ppm " .
+ "$scale $opacity " .
+ "$crop_x $crop_y $x $y " .
+ "$iw $ih";
+ $_ = $cmd;
+
+ } else {
+ # use a PPM pipeline
+ $_ = "($cmd)";
+ $_ .= " < $image_tmp1 > $image_tmp2";
+ }
if ($verbose_pbm) {
$_ = "($_) 2>&1 | sed s'/^/" . blurb() . "/'";
} else {
$_ .= " 2> /dev/null";
}
+
my $rc = nontrapping_system ($_);
+ if (defined ($webcollage_helper) && -z $image_ppm) {
+ LOG (1, "failed command: \"$cmd\"");
+ print STDERR "\naudit log:\n\n\n";
+ print STDERR ("#" x 78) . "\n";
+ print STDERR blurb() . "$image_ppm has zero size\n";
+ showlog();
+ print STDERR "\n\n";
+ exit (1);
+ }
+
if ($rc != 0) {
LOG (($verbose_pbm || $verbose_load), "failed command: \"$cmd\"");
LOG (($verbose_pbm || $verbose_load), "failed URL: \"$img\" (${ow}x$oh)");
return;
}
- rename ($image_tmp2, $image_ppm) || return;
+ if (!defined ($webcollage_helper)) {
+ rename ($image_tmp2, $image_ppm) || return;
+ }
my $target = "$image_ppm";
# cumulative.
#
if ($post_filter_cmd) {
+
+ my $cmd;
+
$target = $image_tmp1;
- $rc = nontrapping_system "($post_filter_cmd) < $image_ppm > $target";
+ if (!defined ($webcollage_helper)) {
+ $cmd = "($post_filter_cmd) < $image_ppm > $target";
+ } else {
+ # Blah, my scripts need the JPEG data, but some other folks need
+ # the PPM data -- what to do? Ignore the problem, that's what!
+# $cmd = "djpeg < $image_ppm | ($post_filter_cmd) > $target";
+ $cmd = "($post_filter_cmd) < $image_ppm > $target";
+ }
+
+ $rc = nontrapping_system ($cmd);
if ($rc != 0) {
LOG ($verbose_pbm, "filter failed: \"$post_filter_cmd\"\n");
return;
}
+sub init_signals {
+
+ $SIG{HUP} = \&signal_cleanup;
+ $SIG{INT} = \&signal_cleanup;
+ $SIG{QUIT} = \&signal_cleanup;
+ $SIG{ABRT} = \&signal_cleanup;
+ $SIG{KILL} = \&signal_cleanup;
+ $SIG{TERM} = \&signal_cleanup;
+
+ # Need this so that if giftopnm dies, we don't die.
+ $SIG{PIPE} = 'IGNORE';
+}
+
+END { signal_cleanup(); }
+
+
sub main {
$| = 1;
srand(time ^ $$);
my $verbose = 0;
my $dict;
+ my $driftnet_cmd = 0;
$current_state = "init";
$load_method = "none";
$http_proxy = shift @ARGV;
} elsif ($_ eq "-dictionary" || $_ eq "-dict") {
$dict = shift @ARGV;
+ } elsif ($_ eq "-driftnet" || $_ eq "--driftnet") {
+ @search_methods = ( 100, "driftnet", \&pick_from_driftnet );
+ if (! ($ARGV[0] =~ m/^-/)) {
+ $driftnet_cmd = shift @ARGV;
+ } else {
+ $driftnet_cmd = $default_driftnet_cmd;
+ }
+ } elsif ($_ eq "-debug" || $_ eq "--debug") {
+ my $which = shift @ARGV;
+ my @rest = @search_methods;
+ my $ok = 0;
+ while (@rest) {
+ my $pct = shift @rest;
+ my $name = shift @rest;
+ my $tfn = shift @rest;
+
+ if ($name eq $which) {
+ @search_methods = (100, $name, $tfn);
+ $ok = 1;
+ last;
+ }
+ }
+ error "no such search method as \"$which\"" unless ($ok);
+ LOG (1, "DEBUG: using only \"$which\"");
+
} else {
- print STDERR "$copyright\nusage: $progname [-root]" .
- " [-display dpy] [-root] [-verbose] [-timeout secs]\n" .
- "\t\t [-delay secs] [-filter cmd] [-filter2 cmd]\n" .
- "\t\t [-dictionary dictionary-file]\n" .
- "\t\t [-http-proxy host[:port]]\n";
+ print STDERR "$copyright\nusage: $progname " .
+ "[-root] [-display dpy] [-verbose] [-debug which]\n" .
+ "\t\t [-timeout secs] [-delay secs] [-filter cmd] [-filter2 cmd]\n" .
+ "\t\t [-no-output] [-urls-only] [-background color] [-size WxH]\n" .
+ "\t\t [-dictionary dictionary-file] [-http-proxy host[:port]]\n" .
+ "\t\t [-driftnet [driftnet-program-and-args]]\n" .
+ "\n";
exit 1;
}
}
pick_dictionary();
}
+ init_signals();
+
+ spawn_driftnet ($driftnet_cmd) if ($driftnet_cmd);
+
if ($urls_only_p) {
url_only_output;
} else {