#!/usr/local/bin/perl
#
# url_get: script to obtain documents given their URL (Universal Resource
#          Locator) and write them to stdout
#
# syntax: url_get [-bdh] <url>
#
# where -b means binary transfer when doing ftp transactions,
#       -h means keep the MIME header in HTTP 1.0 transactions (url_get
#          strips this by default). Header & data go to stdout.
#       -d means "debug" mode on HTTP transfers (HTTP MIME header goes
#          to stderr, body of document goes to stdout), or FTP transfers
#          (FTP replies from remote host go to stderr, data to stdout)
#
# return code: url_get returns 0 upon successful completion. If the
#	       server returns an HTTP error, url_get returns one of
#	       the following codes:
#
#	       HTTP code:     	url_get returns:
#	       ---------	---------------
#	        400		 1
#		401		 2
#		402		 3
#		403		 4
#		404		 5
#		500		 6
#		501		 7
#		502		 8
#		503		 9
#
#              For a full explanation of the HTTP codes, see url:
#	       http://info.cern.ch/hypertext/WWW/Protocols/HTTP/HTRESP.html
#	       If url_get aborts for any other reason, it returns 255
#	       (from the PERL "die" command).
########################################################################
# construct the path name from the location of this executable
require 'pwd.pl';
&initpwd;
$my_path=$0;
if (substr($my_path,0,1) ne '/') {
  $my_path = $ENV{'PWD'} . '/' . $my_path;
}
# chop off the last name
$my_path =~ s/\/[^\/]*$//;
$ENV{'PERLLIB'}=$my_path;

require "$my_path/url_get.pl";
require "getopts.pl";
&Getopts(':bdh');

$usage = "Usage: url_get [-bdh] <url> [userid password]\n";

die "$usage" unless $#ARGV >= 0;

$url = shift;
$userid = shift;
$passwd = shift;

die "$usage" if ($userid && ! $passwd);

$status = &url_get($url, $userid, $passwd, "&STDOUT");
exit $status;

__END__
