summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorRobert James Kaes <rjkaes@users.sourceforge.net>2002-11-29 19:25:59 +0000
committerRobert James Kaes <rjkaes@users.sourceforge.net>2002-11-29 19:25:59 +0000
commitd069b4fce99642a6675cf9312060744f978d6a03 (patch)
treedbba1b37f9bb9dd65492f5fc5e9449fc4a49d93e
parent05b16a78db158f30c1cc65d458dd197f78fc3a66 (diff)
downloadtinyproxy-d069b4fce99642a6675cf9312060744f978d6a03.tar.gz
tinyproxy-d069b4fce99642a6675cf9312060744f978d6a03.zip
(extract_http_url): Removed the leading "http://" from all the tests,
since it's skipped by the caller before the URL is passed to this function. (process_request): Include code to handle proxy FTP requests as well. This also lead to a bit of a cleanup in the calling conventions of extract_http_url function. tinyproxy can handle both types of resources by skipping the leading :// part.
Diffstat (limited to '')
-rw-r--r--src/reqs.c23
1 files changed, 12 insertions, 11 deletions
diff --git a/src/reqs.c b/src/reqs.c
index 276f975..f47fcc0 100644
--- a/src/reqs.c
+++ b/src/reqs.c
@@ -1,4 +1,4 @@
-/* $Id: reqs.c,v 1.86 2002-11-26 21:44:43 rjkaes Exp $
+/* $Id: reqs.c,v 1.87 2002-11-29 19:25:59 rjkaes Exp $
*
* This is where all the work in tinyproxy is actually done. Incoming
* connections have a new child created for them. The child then
@@ -190,7 +190,8 @@ free_request_struct(struct request_s *request)
}
/*
- * Pull the information out of the URL line.
+ * Pull the information out of the URL line. This will handle both HTTP
+ * and FTP (proxied) URLs.
*/
static int
extract_http_url(const char *url, struct request_s *request)
@@ -202,14 +203,14 @@ extract_http_url(const char *url, struct request_s *request)
goto ERROR_EXIT;
if (sscanf
- (url, "http://%[^:/]:%hu%s", request->host, &request->port,
+ (url, "%[^:/]:%hu%s", request->host, &request->port,
request->path) == 3) ;
- else if (sscanf(url, "http://%[^/]%s", request->host, request->path) == 2)
+ else if (sscanf(url, "%[^/]%s", request->host, request->path) == 2)
request->port = 80;
- else if (sscanf(url, "http://%[^:/]:%hu", request->host, &request->port)
+ else if (sscanf(url, "%[^:/]:%hu", request->host, &request->port)
== 2)
strcpy(request->path, "/");
- else if (sscanf(url, "http://%[^/]", request->host) == 1) {
+ else if (sscanf(url, "%[^/]", request->host) == 1) {
request->port = 80;
strcpy(request->path, "/");
} else {
@@ -347,13 +348,13 @@ process_request(struct conn_s *connptr, hashmap_t hashofheaders)
return NULL;
}
- if (strncasecmp(url, "http://", 7) == 0) {
- /* Make sure the first four characters are lowercase */
- memcpy(url, "http", 4);
+ if (strncasecmp(url, "http://", 7) == 0
+ || (UPSTREAM_CONFIGURED() && strncasecmp(url, "ftp://", 6) == 0)) {
+ char *skipped_type = strstr(url, "//") + 2;
- if (extract_http_url(url, request) < 0) {
+ if (extract_http_url(skipped_type, request) < 0) {
indicate_http_error(connptr, 400,
- "Bad Request. Could not parse URL.");
+ "Bad Request. Could not parse URL.");
safefree(url);
free_request_struct(request);