#!/usr/bin/perl -w
#
-# webcollage, Copyright © 1999-2014 by Jamie Zawinski <jwz@jwz.org>
+# webcollage, Copyright © 1999-2015 by Jamie Zawinski <jwz@jwz.org>
# This program decorates the screen with random images from the web.
# One satisfied customer described it as "a nonstop pop culture brainbath."
#
# To run this as a display mode with xscreensaver, add this to `programs':
#
-# webcollage -root
-# webcollage -root -filter 'vidwhacker -stdin -stdout'
+# webcollage --root
+# webcollage --root --filter 'vidwhacker --stdin --stdout'
#
#
-# You can see this in action at http://www.jwz.org/webcollage/ --
+# You can see this in action at https://www.jwz.org/webcollage/ --
# it auto-reloads about once a minute. To make a page similar to
# that on your own system, do this:
#
-# webcollage -size '800x600' -imagemap $HOME/www/webcollage/index
+# webcollage --size '800x600' --imagemap $HOME/www/webcollage/index
#
#
# If you have the "driftnet" program installed, webcollage can display a
# Driftnet is available here: http://www.ex-parrot.com/~chris/driftnet/
# Use it like so:
#
-# webcollage -root -driftnet
+# webcollage --root --driftnet
#
# Driftnet is the Unix implementation of the MacOS "EtherPEG" program.
my $progname = $0; $progname =~ s@.*/@@g;
-my ($version) = ('$Revision: 1.164 $' =~ m/\s(\d[.\d]+)\s/s);
-my $copyright = "WebCollage $version, Copyright (c) 1999-2014" .
+my ($version) = ('$Revision: 1.173 $' =~ m/\s(\d[.\d]+)\s/s);
+my $copyright = "WebCollage $version, Copyright (c) 1999-2015" .
" Jamie Zawinski <jwz\@jwz.org>\n" .
- " http://www.jwz.org/webcollage/\n";
+ " https://www.jwz.org/webcollage/\n";
"yandex.ru" => 1,
"imgur.com" => 1,
"yfrog.com" => 1,
+ "cdninstagram.com" => 1,
"yimg.com" => 1, # This is where dailynews.yahoo.com stores
"eimg.com" => 1, # its images, so pick_from_yahoo_news_text()
my $user_agent = "$progname/$version";
- if ($url =~ m@^http://www\.altavista\.com/@ ||
- $url =~ m@^http://random\.yahoo\.com/@ ||
- $url =~ m@^http://images\.google\.com/@ ||
- $url =~ m@^http://www\.google\.com/@) {
+ if ($url =~ m@^https?://www\.altavista\.com/@s ||
+ $url =~ m@^https?://random\.yahoo\.com/@s ||
+ $url =~ m@^https?://[^./]+\.google\.com/@s ||
+ $url =~ m@^https?://www\.livejournal\.com/@s) {
# block this, you turkeys.
- $user_agent = "Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.8.1.7)" .
- " Gecko/20070914 Firefox/2.0.0.7";
-
- # 28-Jun-2007: Google Images now emits the entire page in JS if
- # you claim to be Gecko. They also still block "webcollage".
- # They serve non-JS for unrecognised agents, so let's try this...
- $user_agent = "NoJavascriptPlease/1.0"
- if ($url =~ m@^http://[a-z]+\.google\.com/@);
+ $user_agent = 'Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.8.1.7)' .
+ ' Gecko/20070914 Firefox/2.0.0.7';
}
- my $ua = LWP::UserAgent->new;
- $ua->env_proxy();
- $ua->agent ("$progname/$version");
- $ua->default_header ('Referer' => $referer);
+ my $ua = LWP::UserAgent->new ( agent => $user_agent,
+ keep_alive => 0,
+ env_proxy => 0,
+ );
+ $ua->proxy ('http', $http_proxy) if $http_proxy;
+ $ua->default_header ('Referer' => $referer) if $referer;
+ $ua->default_header ('Accept' => '*/*');
$ua->timeout($timeout) if $timeout;
+ if (0) {
+ $ua->add_handler ("request_send",
+ sub($$$) {
+ my ($req, $ua, $h) = @_;
+ print "\n>>[[\n"; $req->dump; print "\n]]\n";
+ return;
+ });
+ $ua->add_handler ("response_data",
+ sub($$$$) {
+ my ($req, $ua, $h, $data) = @_;
+ #print "\n<<[[\n"; print $data; print "\n]]\n";
+ return 1;
+ });
+ $ua->add_handler ("request_done",
+ sub($$$) {
+ my ($req, $ua, $h) = @_;
+ print "\n<<[[\n"; $req->dump; print "\n]]\n";
+ return;
+ });
+ }
+
if ($verbose_http) {
LOG (1, " ==> GET $url");
LOG (1, " ==> User-Agent: $user_agent");
my $res = $ua->get ($url);
my $http = ($res ? $res->status_line : '') || '';
- my $head = ($res ? $res->headers()->as_string : '') || '';
+ my $head = ($res ? $res->headers() : '') || '';
+ $head = $head->as_string() if $head;
my $body = ($res && $res->is_success ? $res->decoded_content : '') || '';
LOG ($verbose_net, "get_document_1 $url " . ($referer ? $referer : ""));
$url = $location;
if ($url =~ m@^/@) {
- $referer =~ m@^(http://[^/]+)@i;
+ $referer =~ m@^(https?://[^/]+)@i;
$url = $1 . $url;
} elsif (! ($url =~ m@^[a-z]+:@i)) {
$_ = $referer;
- s@[^/]+$@@g if m@^http://[^/]+/@i;
- $_ .= "/" if m@^http://[^/]+$@i;
+ s@[^/]+$@@g if m@^https?://[^/]+/@i;
+ $_ .= "/" if m@^https?://[^/]+$@i;
$url = $_ . $url;
}
# if there's at least one slash after the host, take off the last
# pathname component
- if ( m@^http://[^/]+/@io ) {
+ if ( m@^https?://[^/]+/@io ) {
$base =~ s@[^/]+$@@go;
}
# if there are no slashes after the host at all, put one on the end.
- if ( m@^http://[^/]+$@io ) {
+ if ( m@^https?://[^/]+$@io ) {
$base .= "/";
}
if ( m@^/@o ) {
my $site;
- ( $site = $base ) =~ s@^(http://[^/]*).*@$1@gio;
+ ( $site = $base ) =~ s@^(https?://[^/]*).*@$1@gio;
$_ = "$site$link";
} elsif ( ! m@^[^/:?]+:@ ) {
$_ = "$base$link";
}
# skip non-http
- if ( ! m@^http://@io ) {
+ if ( ! m@^https?://@io ) {
next;
}
my ($u) = m@<A\s.*\bHREF\s*=\s*([^>]+)>@i;
next unless $u;
- if (m/\bm="{(.*?)}"/s) { # Bing info is inside JSON crud
+ if (m/\bm="\{(.*?)\}"/s) { # Bing info is inside JSON crud
my $json = html_unquote($1);
my ($href) = ($json =~ m/\bsurl:"(.*?)"/s);
my ($img) = ($json =~ m/\bimgurl:"(.*?)"/s);
my (@urls) = @_;
my @urls2 = ();
foreach (@urls) {
- my ($h) = m@^http://([^/: \t\r\n]+)@i;
+ my ($h) = m@^https?://([^/: \t\r\n]+)@i;
next unless defined($h);
next unless ($u =~ s/^.*\*\*(http%3a.*$)/$1/gsi);
$u = url_unquote($u);
- next unless ($u =~ m@^http://@i); # skip non-HTTP or relative URLs
+ next unless ($u =~ m@^https?://@i); # skip non-HTTP or relative URLs
next if ($u =~ m@[/.]altavista\.com\b@i); # skip altavista builtins
next if ($u =~ m@[/.]yahoo\.com\b@i); # yahoo and av in cahoots?
next if ($u =~ m@[/.]doubleclick\.net\b@i); # you cretins
#
############################################################################
-my $bing_images_url = "http://www.bing.com/images/async" .
- "?CW=0" .
- "&CH=0" .
- "&q=";
+my $bing_images_url = "http://www.bing.com/images/async?q=";
# bingimgs
next unless ($u =~ s/^.*\*\*(http%3a.*$)/$1/gsi);
$u = url_unquote($u);
- next unless ($u =~ m@^http://@i); # skip non-HTTP or relative URLs
+ next unless ($u =~ m@^https?://@i); # skip non-HTTP or relative URLs
next if ($u =~ m@[/.]altavista\.com\b@i); # skip altavista builtins
next if ($u =~ m@[/.]yahoo\.com\b@i); # yahoo and av in cahoots?
# next unless ($u =~ m@/director.asp\?.*\btarget=([^&]+)@);
# $u = url_decode($1);
- next unless ($u =~ m@^http://@i); # skip non-HTTP or relative URLs
+ next unless ($u =~ m@^https?://@i); # skip non-HTTP or relative URLs
next if ($u =~ m@[/.]hotbot\.com\b@i); # skip hotbot builtins
next if ($u =~ m@[/.]lycos\.com\b@i); # skip hotbot builtins
next if ($u =~ m@[/.]inktomi\.com\b@i); # skip hotbot builtins
# Lycos plays redirection games.
# (not any more?)
-# next unless ($u =~ m@^http://click.lycos.com/director.asp
+# next unless ($u =~ m@^https?://click.lycos.com/director.asp
# .*
# \btarget=([^&]+)
# .*
# @x);
# $u = url_decode($1);
- next unless ($u =~ m@^http://@i); # skip non-HTTP or relative URLs
+ next unless ($u =~ m@^https?://@i); # skip non-HTTP or relative URLs
next if ($u =~ m@[/.]hotbot\.com\b@i); # skip lycos builtins
next if ($u =~ m@[/.]lycos\.com\b@i); # skip lycos builtins
next if ($u =~ m@[/.]terralycos\.com\b@i); # skip lycos builtins
foreach my $u (@subpages) {
# de-redirectize the URLs
- $u =~ s@^http://rds\.yahoo\.com/.*-http%3A@http:@s;
+ $u =~ s@^https?://rds\.yahoo\.com/.*-http%3A@http:@s;
# only accept URLs on Yahoo's news site
- next unless ($u =~ m@^http://dailynews\.yahoo\.com/@i ||
- $u =~ m@^http://story\.news\.yahoo\.com/@i);
+ next unless ($u =~ m@^https?://dailynews\.yahoo\.com/@i ||
+ $u =~ m@^https?://story\.news\.yahoo\.com/@i);
next unless ($u =~ m@&u=/@);
LOG ($verbose_filter, " candidate: $u");
$last_search = $livejournal_img_url; # for warnings
my ( $base, $body ) = get_document ($livejournal_img_url, undef, $timeout);
- return () unless $body;
+
+ # Often the document comes back empty. If so, just use the cache.
+ # return () unless $body;
+ $body = '' unless defined($body);
$body =~ s/\n/ /gs;
$body =~ s/(<recent-image)\b/\n$1/gsi;
if ($u =~ m/^\"([^\"]*)\"/) { $u = $1; } # quoted string
elsif ($u =~ m/^([^\s]*)\s/) { $u = $1; } # or token
- next unless ($u =~ m/^http:/i);
- next if ($u =~ m@^http://(searchirc\.com\|ircimages\.com)@i);
+ next unless ($u =~ m/^https?:/i);
+ next if ($u =~ m@^https?://(searchirc\.com\|ircimages\.com)@i);
next unless ($u =~ m@[.](gif|jpg|jpeg|pjpg|pjpeg|png)$@i);
LOG ($verbose_http, " HREF: $u");
# Parsing JSON is a pain in the ass. So we halfass it as usual.
$body =~ s/^\[|\]$//s;
$body =~ s/(\[.*?\])/{ $_ = $1; s@\},@\} @gs; $_; }/gsexi;
- my @items = split (/},{/, $body);
+ my @items = split (/\},\{/, $body);
foreach (@items) {
my ($name) = m@"screen_name":"([^\"]+)"@si;
my ($img) = m@"media_url":"([^\"]+)"@si;
$last_search .= "?start=$start" if ($start > 0);
my ( $base, $body ) = get_document ($last_search, undef, $timeout);
- return () unless $body;
- $body =~ s/[\r\n]/ /gs;
- $body =~ s/(<a)\b/\n$1/gsi;
+ # If the document comes back empty. just use the cache.
+ # return () unless $body;
+ $body = '' unless defined($body);
my $count = 0;
my $count2 = 0;
- foreach (split (/\n/, $body)) {
- my ($page, $thumb) = m@<A \s [^<>]* \b HREF=\"([^<>\"]+)\" [^<>]* > \s*
- <IMG \s [^<>]* \b
- data-defer-src = \"([^<>\"]+)\" @xsi;
- next unless defined ($thumb);
- $page = html_unquote ($page);
- $thumb = html_unquote ($thumb);
+ if ($body =~ m@{ *"_data": \[ ( .*? \} ) \]@six) {
+ $body = $1;
+ } else {
+ LOG ($verbose_load, "flickr unparsable: $last_search");
+ return ();
+ }
- next unless ($thumb =~ m@^https?://[^/.]+\d*\.static\.?flickr\.com/@);
+ $body =~ s/[\r\n]/ /gs;
+ $body =~ s/(\},) *(\{)/$1\n$2/gs; # "_flickrModelRegistry"
- my $base = "http://www.flickr.com/";
- $page =~ s@^/@$base@;
- $thumb =~ s@^/@$base@;
+ foreach my $chunk (split (/\n/, $body)) {
+ my ($img) = ($chunk =~ m@"displayUrl": *"(.*?)"@six);
+ next unless defined ($img);
+ $img =~ s/\\//gs;
+ $img = "//" unless ($img =~ m@^/@s);
+ $img = "http:$img" unless ($img =~ m/^http/s);
- my $img = $thumb;
- $img =~ s/_[a-z](\.[a-z\d]+)$/$1/si; # take off "thumb" suffix
+ my ($user) = ($chunk =~ m/"pathAlias": *"(.*?)"/si);
+ next unless defined ($user);
+
+ my ($id) = ($img =~ m@/\d+/(\d+)_([\da-f]+)_@si);
+ my ($page) = "https://www.flickr.com/photos/$user/$id/";
+
+ # $img =~ s/_[a-z](\.[a-z\d]+)$/$1/si; # take off "thumb" suffix
$count++;
next if ($flickr_cache{$img}); # already have it
\f
############################################################################
#
-# Pick random images from Instagram, via gramfeed.com's key.
+# Pick random images from Instagram.
#
############################################################################
-my $instagram_url_base = "https://api.instagram.com/v1/media/popular" .
- "?client_id=b59fbe4563944b6c88cced13495c0f49";
+my $instagram_url_base = "https://api.instagram.com/v1/media/popular";
# instagram_random
sub pick_from_instagram($) {
my $timeout = shift;
- $last_search = $instagram_url_base;
+ # Liberated access tokens.
+ # jsdo.it search for: instagram client_id
+ # Google search for: instagram "&client_id=" site:jsfiddle.net
+ my @tokens = ('b59fbe4563944b6c88cced13495c0f49', # gramfeed.com
+ 'fa26679250df49c48a33fbcf30aae989', # instac.at
+ 'd9494686198d4dfeb954979a3e270e5e', # iconosquare.com
+ '793ef48bb18e4197b61afce2d799b81c', # jsdo.it
+ '67b8a3e0073449bba70600d0fc68e6cb', # jsdo.it
+ '26a098e0df4d4b9ea8b4ce6c505b7742', # jsdo.it
+ '2437cbcd906a4c10940f990d283d3cd5', # jsdo.it
+ '191c7d7d5312464cbd92134f36ffdab5', # jsdo.it
+ 'acfec809437b4340b2c38f66503af774', # jsdo.it
+ 'e9f77604a3a24beba949c12d18130988', # jsdo.it
+ '2cd7bcf68ae346529770073d311575b3', # jsdo.it
+ '830c600fe8d742e2ab3f3b94f9bb22b7', # jsdo.it
+ '55865a0397ad41e5997dd95ef4df8da1', # jsdo.it
+ '192a5742f3644ea8bed1d25e439286a8', # jsdo.it
+ '38ed1477e7a44595861b8842cdb8ba23', # jsdo.it
+ 'e52f79f645f54488ad0cc47f6f55ade6', # jsfiddle.net
+ );
+
+ my $tok = $tokens[int(rand($#tokens+1))];
+ $last_search = $instagram_url_base . "?client_id=" . $tok;
print STDERR "\n\n" if ($verbose_load);
LOG ($verbose_load, "URL: $last_search");
foreach (@chunks) {
s/\\//gs;
my ($url) = m/"link":\s*"(.*?)"/s;
- my ($img) = m/"standard_resolution":{"url":\s*"(.*?)"/s;
+ my ($img) = m/"standard_resolution":\{"url":\s*"(.*?)"/s;
($img) = m/"url":\s*"(.*?)"/s unless $url;
next unless ($url && $img);
push @urls, [ $url, $img ];
return unless ($verbose_warnings);
$_ = $url;
- my ($site) = m@^http://([^ \t\n\r/:]+)@;
+ my ($site) = m@^https?://([^ \t\n\r/:]+)@;
return unless defined ($site);
if ($base eq $driftnet_magic || $base eq $local_magic) {
sub which($) {
my ($prog) = @_;
foreach (split (/:/, $ENV{PATH})) {
- if (-x "$_/$prog") {
- return $prog;
+ my $path = "$_/$prog";
+ if (-x $path) {
+ return $path;
}
}
return undef;
#
##############################################################################
-my $image_ppm = sprintf ("%s/webcollage-%08x.ppm",
- ($ENV{TMPDIR} ? $ENV{TMPDIR} : "/tmp"),
- rand(0xFFFFFFFF));
-my $image_tmp1 = sprintf ("%s/webcollage-1-%08x.ppm",
- ($ENV{TMPDIR} ? $ENV{TMPDIR} : "/tmp"),
- rand(0xFFFFFFFF));
-my $image_tmp2 = sprintf ("%s/webcollage-2-%08x.ppm",
- ($ENV{TMPDIR} ? $ENV{TMPDIR} : "/tmp"),
- rand(0xFFFFFFFF));
+my ($image_ppm, $image_tmp1, $image_tmp2);
+{
+ my $seed = rand(0xFFFFFFFF);
+ $image_ppm = sprintf ("%s/webcollage-%08x",
+ ($ENV{TMPDIR} ? $ENV{TMPDIR} : "/tmp"),
+ $seed);
+ $image_tmp1 = $image_ppm . '-1.ppm';
+ $image_tmp2 = $image_ppm . '-2.ppm';
+ $image_ppm .= '.ppm';
+}
+
my $filter_cmd = undef;
my $post_filter_cmd = undef;
sub x_or_pbm_output($) {
my ($window_id) = @_;
+ # Adjust the PATH for OS X 10.10.
+ #
+ $_ = $0;
+ s:/[^/]*$::;
+ s/([^a-zA-Z0-9._\-+\/])/\\$1/g;
+ $ENV{PATH} = "$_:$ENV{PATH}";
+
# Check for our helper program, to see whether we need to use PPM pipelines.
#
$_ = "webcollage-helper";
- if (defined ($webcollage_helper) || which ($_)) {
- $webcollage_helper = $_ unless (defined($webcollage_helper));
+
+ if (! defined ($webcollage_helper)) {
+ $webcollage_helper = which ($_);
+ }
+
+ if (defined ($webcollage_helper)) {
LOG ($verbose_pbm, "found \"$webcollage_helper\"");
- $webcollage_helper .= " -v";
+ $webcollage_helper = "'$webcollage_helper' -v";
} else {
LOG (($verbose_pbm || $verbose_load), "no $_ program");
}
which ($_) || error "$_ not found on \$PATH.";
}
+ # If we're using webcollage-helper and not a filter, then the tmp files
+ # are JPEGs, not PPMs.
+ #
+ if (defined ($webcollage_helper) && !defined ($filter_cmd)) {
+ foreach ($image_ppm, $image_tmp1, $image_tmp2) {
+ s/\.ppm$/.jpg/s;
+ }
+ }
+
+
# find a root-window displayer program.
#
if (!$no_output_p) {
return 0;
}
+ if ($iw <= 0 || $ih <= 0 || $iw > 9999 || $ih > 9999) {
+ LOG (($verbose_pbm || $verbose_load),
+ "ludicrous image dimensions: $iw x $ih (" . length($body) .
+ "): $img");
+ $body = undef;
+ return 0;
+ }
+
open (my $out, '>', $image_tmp1) || error ("writing $image_tmp1: $!");
(print $out $body) || error ("writing $image_tmp1: $!");
close ($out) || error ("writing $image_tmp1: $!");
#
sub set_proxy() {
- if (! $http_proxy) {
+ if (! defined($http_proxy)) {
# historical suckage: the environment variable name is lower case.
$http_proxy = $ENV{http_proxy} || $ENV{HTTP_PROXY};
}
if (defined ($http_proxy)) {
- if ($http_proxy && $http_proxy =~ m@^http://([^/]*)/?$@ ) {
+ if ($http_proxy && $http_proxy =~ m@^https?://([^/]*)/?$@ ) {
# historical suckage: allow "http://host:port" as well as "host:port".
$http_proxy = $1;
}
}
}
+ delete $ENV{http_proxy};
+ delete $ENV{HTTP_PROXY};
+ delete $ENV{https_proxy};
+ delete $ENV{HTTPS_PROXY};
+ delete $ENV{PERL_LWP_ENV_PROXY};
+
if ($http_proxy) {
+ $http_proxy = 'http://' . $http_proxy;
LOG ($verbose_net, "proxy server: $http_proxy");
+ } else {
+ $http_proxy = undef; # for --proxy ''
}
}
my $root_p = 0;
my $window_id = undef;
- while ($_ = $ARGV[0]) {
- shift @ARGV;
- if ($_ eq "-display" ||
- $_ eq "-displ" ||
- $_ eq "-disp" ||
- $_ eq "-dis" ||
- $_ eq "-dpy" ||
- $_ eq "-d") {
+ while ($#ARGV >= 0) {
+ $_ = shift @ARGV;
+ if (m/^--?d(i(s(p(l(a(y)?)?)?)?)?)?$/s) {
$ENV{DISPLAY} = shift @ARGV;
- } elsif ($_ eq "-root") {
+ } elsif (m/^--?root$/s) {
$root_p = 1;
- } elsif ($_ eq "-window-id" || $_ eq "--window-id") {
+ } elsif (m/^--?window-id$/s) {
$window_id = shift @ARGV;
$root_p = 1;
- } elsif ($_ eq "-no-output") {
+ } elsif (m/^--?no-output$/s) {
$no_output_p = 1;
- } elsif ($_ eq "-urls-only") {
+ } elsif (m/^--?urls(-only)?$/s) {
$urls_only_p = 1;
$no_output_p = 1;
- } elsif ($_ eq "-cocoa") {
+ } elsif (m/^--?cocoa$/s) {
$cocoa_p = 1;
- } elsif ($_ eq "-imagemap") {
+ } elsif (m/^--?imagemap$/s) {
$imagemap_base = shift @ARGV;
$no_output_p = 1;
- } elsif ($_ eq "-verbose") {
+ } elsif (m/^--?verbose$/s) {
$verbose++;
} elsif (m/^-v+$/) {
$verbose += length($_)-1;
- } elsif ($_ eq "-delay") {
+ } elsif (m/^--?delay$/s) {
$delay = shift @ARGV;
- } elsif ($_ eq "-timeout") {
+ } elsif (m/^--?timeout$/s) {
$http_timeout = shift @ARGV;
- } elsif ($_ eq "-filter") {
+ } elsif (m/^--?filter$/s) {
$filter_cmd = shift @ARGV;
- } elsif ($_ eq "-filter2") {
+ } elsif (m/^--?filter2$/s) {
$post_filter_cmd = shift @ARGV;
- } elsif ($_ eq "-background" || $_ eq "-bg") {
+ } elsif (m/^--?(background|bg)$/s) {
$background = shift @ARGV;
- } elsif ($_ eq "-size") {
+ } elsif (m/^--?size$/s) {
$_ = shift @ARGV;
if (m@^(\d+)x(\d+)$@) {
$img_width = $1;
$img_height = $2;
} else {
- error "argument to \"-size\" must be of the form \"640x400\"";
+ error "argument to \"--size\" must be of the form \"640x400\"";
}
- } elsif ($_ eq "-proxy" || $_ eq "-http-proxy") {
+ } elsif (m/^--?(http-)?proxy$/s) {
$http_proxy = shift @ARGV;
- } elsif ($_ eq "-dictionary" || $_ eq "-dict") {
+ } elsif (m/^--?dict(ionary)?$/s) {
$dict = shift @ARGV;
- } elsif ($_ eq "-opacity") {
+ } elsif (m/^--?opacity$/s) {
$opacity = shift @ARGV;
error ("opacity must be between 0.0 and 1.0")
if ($opacity <= 0 || $opacity > 1);
- } elsif ($_ eq "-driftnet" || $_ eq "--driftnet") {
+ } elsif (m/^--?driftnet$/s) {
@search_methods = ( 100, "driftnet", \&pick_from_driftnet );
if (! ($ARGV[0] =~ m/^-/)) {
$driftnet_cmd = shift @ARGV;
} else {
$driftnet_cmd = $default_driftnet_cmd;
}
- } elsif ($_ eq "-directory" || $_ eq "--directory") {
+ } elsif (m/^--?dir(ectory)?$/s) {
@search_methods = ( 100, "local", \&pick_from_local_dir );
if (! ($ARGV[0] =~ m/^-/)) {
$local_dir = shift @ARGV;
} else {
error ("local directory path must be set")
}
- } elsif ($_ eq "-fps") {
+ } elsif (m/^--?fps$/s) {
# -fps only works on MacOS, via "webcollage-cocoa.m".
# Ignore it if passed to this script in an X11 context.
- } elsif ($_ eq "-debug" || $_ eq "--debug") {
+ } elsif (m/^--?debug$/s) {
my $which = shift @ARGV;
my @rest = @search_methods;
my $ok = 0;
}
error "no such search method as \"$which\"" unless ($ok);
LOG (1, "DEBUG: using only \"$which\"");
+ $report_performance_interval = 30;
} else {
+ print STDERR "unknown option: $_\n\n";
print STDERR "$copyright\nusage: $progname " .
- "[-root] [-display dpy] [-verbose] [-debug which]\n" .
- "\t\t [-timeout secs] [-delay secs] [-size WxH]\n" .
- "\t\t [-no-output] [-urls-only] [-imagemap filename]\n" .
- "\t\t [-background color] [-opacity f]\n" .
- "\t\t [-filter cmd] [-filter2 cmd]\n" .
- "\t\t [-dictionary dictionary-file] [-http-proxy host[:port]]\n" .
- "\t\t [-driftnet [driftnet-program-and-args]]\n" .
- "\t\t [-directory local-image-directory]\n" .
+ "[--root] [--display dpy] [--verbose] [--debug which]\n" .
+ "\t\t [--timeout secs] [--delay secs] [--size WxH]\n" .
+ "\t\t [--no-output] [--urls-only] [--imagemap filename]\n" .
+ "\t\t [--background color] [--opacity f]\n" .
+ "\t\t [--filter cmd] [--filter2 cmd]\n" .
+ "\t\t [--dictionary dictionary-file] [--http-proxy host[:port]]\n" .
+ "\t\t [--driftnet [driftnet-program-and-args]]\n" .
+ "\t\t [--directory local-image-directory]\n" .
"\n";
exit 1;
}
if (!$root_p && !$no_output_p && !$cocoa_p) {
print STDERR $copyright;
- error "the -root argument is mandatory (for now.)";
+ error "the --root argument is mandatory (for now.)";
}
if (!$no_output_p && !$cocoa_p && !$ENV{DISPLAY}) {
}
if ($imagemap_base && !($img_width && $img_height)) {
- error ("-size WxH is required with -imagemap");
+ error ("--size WxH is required with --imagemap");
}
if (defined ($local_dir)) {