* implement -R; restart non-proxied command-line FTP xfers

* fix fetch_ftp() so that hcode parsing is not done for file:// urls
  (a } in the wrong place, and code at the wrong indent level...)
* change outfile to being a global (so it gets correctly reset)
* change parse_url to not remove leading '/' for non ftp urls.
  whilst this is not totally rfc1738 compliant, other code kinda
  assumes this is the case, and it doesn't hurt
This commit is contained in:
lukem 1999-03-22 07:36:40 +00:00
parent 1337db796b
commit bed9aad370
5 changed files with 130 additions and 101 deletions

View File

@ -1,4 +1,4 @@
/* $NetBSD: extern.h,v 1.27 1999/02/07 13:14:06 lukem Exp $ */
/* $NetBSD: extern.h,v 1.28 1999/03/22 07:36:40 lukem Exp $ */
/*-
* Copyright (c) 1994 The Regents of the University of California.
@ -46,7 +46,7 @@ void abortsend __P((int));
void account __P((int, char **));
void alarmtimer __P((int));
int another __P((int *, char ***, const char *));
int auto_fetch __P((int, char **, char *));
int auto_fetch __P((int, char **));
void blkfree __P((char **));
void cd __P((int, char **));
void cdup __P((int, char **));

View File

@ -1,4 +1,4 @@
/* $NetBSD: fetch.c,v 1.51 1999/03/15 08:52:17 christos Exp $ */
/* $NetBSD: fetch.c,v 1.52 1999/03/22 07:36:40 lukem Exp $ */
/*-
* Copyright (c) 1997, 1998, 1999 The NetBSD Foundation, Inc.
@ -38,7 +38,7 @@
#include <sys/cdefs.h>
#ifndef lint
__RCSID("$NetBSD: fetch.c,v 1.51 1999/03/15 08:52:17 christos Exp $");
__RCSID("$NetBSD: fetch.c,v 1.52 1999/03/22 07:36:40 lukem Exp $");
#endif /* not lint */
/*
@ -81,10 +81,9 @@ typedef enum {
void aborthttp __P((int));
static int auth_url __P((const char *, char **));
static void base64_encode __P((const char *, size_t, char *));
static int go_fetch __P((const char *, const char *));
static int fetch_ftp __P((const char *, const char *));
static int fetch_url __P((const char *, const char *, const char *,
char *, char *));
static int go_fetch __P((const char *));
static int fetch_ftp __P((const char *));
static int fetch_url __P((const char *, const char *, char *, char *));
static int parse_url __P((const char *, const char *, url_t *, char **,
char **, char **, in_port_t *, char **));
static void url_decode __P((char *));
@ -246,6 +245,11 @@ url_decode(url)
* Sets type to url_t, each of the given char ** pointers to a
* malloc(3)ed strings of the relevant section, and port to
* the number given, or ftpport if ftp://, or httpport if http://.
*
* XXX: this is not totally RFC1738 compliant; path will have the
* leading `/' unless it's an ftp:// URL; this makes things easier
* for file:// and http:// URLs. ftp:// URLs have all leading `/'s
* removed.
*/
static int
parse_url(url, desc, type, user, pass, host, port, path)
@ -302,7 +306,6 @@ cleanup_parse_url:
thost = (char *)xmalloc(len + 1);
strncpy(thost, url, len);
thost[len] = '\0';
ep++; /* skip first / for all URLs */
if (*type == FTP_URL_T) /* skip all leading /'s for ftp URLs */
while (*ep && *ep == '/')
ep++;
@ -359,9 +362,8 @@ jmp_buf httpabort;
* is still open (e.g, ftp xfer with trailing /)
*/
static int
fetch_url(url, outfile, proxyenv, proxyauth, wwwauth)
fetch_url(url, proxyenv, proxyauth, wwwauth)
const char *url;
const char *outfile;
const char *proxyenv;
char *proxyauth;
char *wwwauth;
@ -387,7 +389,7 @@ fetch_url(url, outfile, proxyenv, proxyauth, wwwauth)
s = -1;
buf = savefile = NULL;
auth = location = message = NULL;
ischunked = isproxy = 0;
ischunked = isproxy = hcode = 0;
rval = 1;
hp = NULL;
user = pass = host = path = decodedpath = NULL;
@ -420,7 +422,7 @@ fetch_url(url, outfile, proxyenv, proxyauth, wwwauth)
if (EMPTYSTRING(path)) {
if (urltype == FTP_URL_T) {
rval = fetch_ftp(url, outfile);
rval = fetch_ftp(url);
goto cleanup_fetch_url;
}
if (urltype != HTTP_URL_T || outfile == NULL) {
@ -443,11 +445,14 @@ fetch_url(url, outfile, proxyenv, proxyauth, wwwauth)
}
if (EMPTYSTRING(savefile)) {
if (urltype == FTP_URL_T) {
rval = fetch_ftp(url, outfile);
rval = fetch_ftp(url);
goto cleanup_fetch_url;
}
warnx("Invalid URL (no file after directory) `%s'", url);
goto cleanup_fetch_url;
} else {
if (debug)
fprintf(ttyout, "got savefile as `%s'\n", savefile);
}
filesize = -1;
@ -786,7 +791,6 @@ fetch_url(url, outfile, proxyenv, proxyauth, wwwauth)
}
FREEPTR(buf);
}
switch (hcode) {
case 200:
@ -797,7 +801,8 @@ fetch_url(url, outfile, proxyenv, proxyauth, wwwauth)
case 303:
case 305:
if (EMPTYSTRING(location)) {
warnx("No redirection Location provided by server");
warnx(
"No redirection Location provided by server");
goto cleanup_fetch_url;
}
if (redirect_loop++ > 5) {
@ -808,12 +813,13 @@ fetch_url(url, outfile, proxyenv, proxyauth, wwwauth)
if (verbose)
fprintf(ttyout, "Redirected via %s\n",
location);
rval = fetch_url(url, outfile, location, proxyauth,
wwwauth);
rval = fetch_url(url, location,
proxyauth, wwwauth);
} else {
if (verbose)
fprintf(ttyout, "Redirected to %s\n", location);
rval = go_fetch(location, outfile);
fprintf(ttyout, "Redirected to %s\n",
location);
rval = go_fetch(location);
}
goto cleanup_fetch_url;
case 401:
@ -823,30 +829,36 @@ fetch_url(url, outfile, proxyenv, proxyauth, wwwauth)
fprintf(ttyout, "%s\n", message);
if (EMPTYSTRING(auth)) {
warnx("No authentication challenge provided by server");
warnx(
"No authentication challenge provided by server");
goto cleanup_fetch_url;
}
authp = (hcode == 401) ? &wwwauth : &proxyauth;
if (*authp != NULL) {
char reply[10];
fprintf(ttyout, "Authorization failed. Retry (y/n)? ");
if (fgets(reply, sizeof(reply), stdin) != NULL &&
tolower(reply[0]) != 'y')
fprintf(ttyout,
"Authorization failed. Retry (y/n)? ");
if (fgets(reply, sizeof(reply), stdin) != NULL
&& tolower(reply[0]) != 'y')
goto cleanup_fetch_url;
}
if (auth_url(auth, authp) == 0) {
rval = fetch_url(url, outfile, proxyenv, proxyauth,
wwwauth);
rval = fetch_url(url, proxyenv,
proxyauth, wwwauth);
memset(*authp, '\0', strlen(*authp));
FREEPTR(*authp);
}
goto cleanup_fetch_url;
}
default:
if (message)
warnx("Error retrieving file - `%s'", message);
else
warnx("Unknown error retrieving file");
goto cleanup_fetch_url;
}
} /* end of ftp:// or http:// specific setup */
oldintr = oldintp = NULL;
@ -1012,9 +1024,8 @@ aborthttp(notused)
* is still open (e.g, ftp xfer with trailing /)
*/
static int
fetch_ftp(url, outfile)
fetch_ftp(url)
const char *url;
const char *outfile;
{
char *cp, *xargv[5], rempath[MAXPATHLEN];
char portnum[6]; /* large enough for "65535\0" */
@ -1148,10 +1159,10 @@ fetch_ftp(url, outfile)
xargv[3] = NULL;
xargc++;
}
if (restartautofetch)
reget(xargc, xargv);
else
get(xargc, xargv);
if (outfile != NULL && strcmp(outfile, "-") != 0
&& outfile[0] != '|')
outfile = NULL;
}
if ((code / 100) == COMPLETE)
@ -1178,9 +1189,8 @@ cleanup_fetch_ftp:
* is still open (e.g, ftp xfer with trailing /)
*/
static int
go_fetch(url, outfile)
go_fetch(url)
const char *url;
const char *outfile;
{
#ifndef SMALL
@ -1209,7 +1219,7 @@ go_fetch(url, outfile)
*/
if (strncasecmp(url, HTTP_URL, sizeof(HTTP_URL) - 1) == 0 ||
strncasecmp(url, FILE_URL, sizeof(FILE_URL) - 1) == 0)
return (fetch_url(url, outfile, NULL, NULL, NULL));
return (fetch_url(url, NULL, NULL, NULL));
/*
* Try FTP URL-style and host:file arguments next.
@ -1217,9 +1227,9 @@ go_fetch(url, outfile)
* Othewise, use fetch_ftp().
*/
if (ftpproxy && strncasecmp(url, FTP_URL, sizeof(FTP_URL) - 1) == 0)
return (fetch_url(url, outfile, NULL, NULL, NULL));
return (fetch_url(url, NULL, NULL, NULL));
return (fetch_ftp(url, outfile));
return (fetch_ftp(url));
}
/*
@ -1235,10 +1245,9 @@ go_fetch(url, outfile)
* Otherwise, 0 is returned if all files retrieved successfully.
*/
int
auto_fetch(argc, argv, outfile)
auto_fetch(argc, argv)
int argc;
char *argv[];
char *outfile;
{
volatile int argpos;
int rval;
@ -1261,7 +1270,10 @@ auto_fetch(argc, argv, outfile)
break;
redirect_loop = 0;
anonftp = 1; /* Handle "automatic" transfers. */
rval = go_fetch(argv[argpos], outfile);
rval = go_fetch(argv[argpos]);
if (outfile != NULL && strcmp(outfile, "-") != 0
&& outfile[0] != '|')
outfile = NULL;
if (rval > 0)
rval = argpos + 1;
}

View File

@ -1,4 +1,4 @@
.\" $NetBSD: ftp.1,v 1.35 1999/03/15 08:22:20 garbled Exp $
.\" $NetBSD: ftp.1,v 1.36 1999/03/22 07:36:40 lukem Exp $
.\"
.\" Copyright (c) 1985, 1989, 1990, 1993
.\" The Regents of the University of California. All rights reserved.
@ -35,7 +35,7 @@
.\"
.\" @(#)ftp.1 8.3 (Berkeley) 10/9/94
.\"
.Dd March 8, 1999
.Dd March 22, 1999
.Dt FTP 1
.Os
.Sh NAME
@ -73,12 +73,14 @@ file transfer program
file:///\fIfile\fR
.Nm ftp
.Op Fl f
.Op Fl R
.Bk -words
.Op Fl o Ar output
.Ek
ftp://[\fIuser\fR[:\fIpassword]\fR@]\fIhost\fR[:\fIport\fR]/\fIfile\fR[/]
.Nm ftp
.Op Fl f
.Op Fl R
.Bk -words
.Op Fl o Ar output
.Ek
@ -184,6 +186,8 @@ Sets the port number to
Retry the connection attempt if it failed, pausing for
.Ar wait
seconds.
.It Fl R
Restart all non-proxied ftp auto-fetches.
.It Fl t
Enables packet tracing.
.It Fl v
@ -1173,9 +1177,15 @@ will connect to the site and
to the directory given as the path, and leave the user in interactive
mode ready for further input.
.Pp
If successive auto-fetch ftp elements refer to the same host, then
the connection is maintained between transfers, reducing overhead on
connection creation and deletion.
If
.Fl R
is given, all ftp auto-fetches that don't go via the
.Ev ftp_proxy
will be restarted.
This is implemented by using
.Nm reget
instead of
.Nm get .
.Pp
If
.Ic file

View File

@ -1,4 +1,4 @@
/* $NetBSD: ftp_var.h,v 1.30 1999/03/08 04:36:13 lukem Exp $ */
/* $NetBSD: ftp_var.h,v 1.31 1999/03/22 07:36:40 lukem Exp $ */
/*
* Copyright (c) 1985, 1989, 1993, 1994
@ -152,6 +152,9 @@ const char *ftpproxy; /* ftp:// proxy server */
const char *httpproxy; /* http:// proxy server */
const char *no_proxy; /* list of domains not to proxy */
char *outfile; /* filename to output URLs to */
int restartautofetch; /* restart auto-fetch */
jmp_buf toplevel; /* non-local goto stuff for cmd scanner */
char line[FTPBUFLEN]; /* input line buffer */

View File

@ -1,4 +1,4 @@
/* $NetBSD: main.c,v 1.38 1999/03/08 04:36:13 lukem Exp $ */
/* $NetBSD: main.c,v 1.39 1999/03/22 07:36:41 lukem Exp $ */
/*
* Copyright (c) 1985, 1989, 1993, 1994
@ -43,7 +43,7 @@ __COPYRIGHT("@(#) Copyright (c) 1985, 1989, 1993, 1994\n\
#if 0
static char sccsid[] = "@(#)main.c 8.6 (Berkeley) 10/9/94";
#else
__RCSID("$NetBSD: main.c,v 1.38 1999/03/08 04:36:13 lukem Exp $");
__RCSID("$NetBSD: main.c,v 1.39 1999/03/22 07:36:41 lukem Exp $");
#endif
#endif /* not lint */
@ -82,7 +82,6 @@ main(argc, argv)
long port;
struct passwd *pw = NULL;
char *cp, *ep, homedir[MAXPATHLEN];
char *outfile = NULL;
int dumbterm;
sp = getservbyname("ftp", "tcp");
@ -124,6 +123,8 @@ main(argc, argv)
verbose = 0;
progress = 0;
gatemode = 0;
outfile = NULL;
restartautofetch = 0;
#ifndef SMALL
editing = 0;
el = NULL;
@ -186,7 +187,7 @@ main(argc, argv)
#endif
}
while ((ch = getopt(argc, argv, "Aadefgino:pP:r:tvV")) != -1) {
while ((ch = getopt(argc, argv, "Aadefgino:pP:r:RtvV")) != -1) {
switch (ch) {
case 'A':
activefallback = 0;
@ -245,11 +246,14 @@ main(argc, argv)
case 'r':
retry_connect = strtol(optarg, &ep, 10);
if (retry_connect < 1 || retry_connect > MAX_IN_PORT_T
|| *ep != '\0')
if (retry_connect < 1 || *ep != '\0')
errx(1, "bad retry value: %s", optarg);
break;
case 'R':
restartautofetch = 1;
break;
case 't':
trace = 1;
break;
@ -299,7 +303,7 @@ main(argc, argv)
if (argc > 0) {
if (strchr(argv[0], ':') != NULL) {
rval = auto_fetch(argc, argv, outfile);
rval = auto_fetch(argc, argv);
if (rval >= 0) /* -1 == connected and cd-ed */
exit(rval);
} else {
@ -747,9 +751,9 @@ usage()
(void)fprintf(stderr,
"usage: %s [-AadeginptvV] [-r retry] [-P port] [host [port]]\n"
" %s [-f] [-o outfile] file:///file\n"
" %s [-f] [-o outfile] ftp://[user[:pass]@]host[:port]/path[/]\n"
" %s [-fR] [-o outfile] ftp://[user[:pass]@]host[:port]/path[/]\n"
" %s [-f] [-o outfile] http://host[:port]/path\n"
" %s [-f] [-o outfile] host:path[/]\n",
" %s [-fR] [-o outfile] host:path[/]\n",
__progname, __progname, __progname, __progname, __progname);
exit(1);
}