changeset 7094:2343c3aa1dec

[gaim-migrate @ 7659] grab_url() and parse_url() are gone, replaced with gaim_url_fetch() and gaim_url_parse(). They were also moved to util.[ch]. committer: Tailor Script <tailor@pidgin.im>
author Christian Hammond <chipx86@chipx86.com>
date Wed, 01 Oct 2003 03:01:25 +0000
parents 3650612c7daa
children c8bf2da398e3
files src/gtkprefs.c src/html.c src/html.h src/protocols/msn/msn.c src/protocols/toc/toc.c src/protocols/yahoo/yahoo.c src/util.c src/util.h
diffstat 8 files changed, 502 insertions(+), 381 deletions(-) [+]
line wrap: on
line diff
--- a/src/gtkprefs.c	Wed Oct 01 02:06:12 2003 +0000
+++ b/src/gtkprefs.c	Wed Oct 01 03:01:25 2003 +0000
@@ -44,9 +44,6 @@
 
 #include "ui.h"
 
-/* XXX for grab_url */
-#include "gaim.h"
-
 #define PROXYHOST 0
 #define PROXYPORT 1
 #define PROXYUSER 2
@@ -428,7 +425,10 @@
 				   1, description,
 				   2, theme->path,
 				   -1);
-		g_object_unref(G_OBJECT(pixbuf));
+
+		if (pixbuf != NULL)
+			g_object_unref(G_OBJECT(pixbuf));
+
 		g_free(description);
 		themes = themes->next;
 		if (current_smiley_theme && !strcmp(theme->path, current_smiley_theme->path)) {
@@ -489,7 +489,9 @@
 	theme_refresh_theme_list();
 }
 
-static void theme_got_url(gpointer data, char *themedata, unsigned long len) {
+static void
+theme_got_url(void *data, const char *themedata, size_t len)
+{
 	FILE *f;
 	gchar *path;
 
@@ -534,7 +536,7 @@
 
 			/* We'll check this just to make sure. This also lets us do something different on
 			 * other platforms, if need be */
-			grab_url(name, TRUE, theme_got_url, ".tgz", NULL, 0);
+			gaim_url_fetch(name, TRUE, NULL, FALSE, theme_got_url, ".tgz");
 		}
 
 		gtk_drag_finish(dc, TRUE, FALSE, t);
--- a/src/html.c	Wed Oct 01 02:06:12 2003 +0000
+++ b/src/html.c	Wed Oct 01 03:01:25 2003 +0000
@@ -71,281 +71,6 @@
 	return text2;
 }
 
-struct g_url *parse_url(char *url)
-{
-	struct g_url *test = g_new0(struct g_url, 1);
-	char scan_info[255];
-	char port[5];
-	int f;
-	char* turl;
-	/* hyphen at end includes it in control set */
-	char addr_ctrl[] = "A-Za-z0-9.-";
-	char port_ctrl[] = "0-9";
-	char page_ctrl[] = "A-Za-z0-9.~_/:*!@&%%?=+^-";
-
-	if((turl=strstr(url, "http://")) || (turl=strstr(url, "HTTP://")))
-		url=turl+=7;
-
-	snprintf(scan_info, sizeof(scan_info),
-		 "%%[%s]:%%[%s]/%%[%s]",
-		 addr_ctrl, port_ctrl, page_ctrl);
-
-	f = sscanf(url, scan_info, test->address, port, test->page);
-	if (f == 1) {
-		snprintf(scan_info, sizeof(scan_info),
-			 "%%[%s]/%%[%s]",
-			 addr_ctrl, page_ctrl);
-		f = sscanf(url, scan_info, test->address, test->page);
-		snprintf(port, sizeof(port), "80");
-	}
-	if (f == 1)
-		test->page[0] = '\0';
-
-	sscanf(port, "%d", &test->port);
-	return test;
-}
-
-struct grab_url_data {
-	void (* callback)(gpointer, char *, unsigned long);
-	gpointer data;
-	struct g_url *website;
-	char *url;
-	gboolean full;
-	char *user_agent;
-	int http11;
-
-	int inpa;
-
-	gboolean sentreq;
-	gboolean newline;
-	gboolean startsaving;
-	char *webdata;
-	unsigned long len;
-	unsigned long data_len;
-};
-
-static gboolean
-parse_redirect(const char *data, size_t data_len, gint sock,
-			   struct grab_url_data *gunk)
-{
-	gchar *s;
-
-	if ((s = g_strstr_len(data, data_len, "Location: ")) != NULL) {
-		gchar *new_url, *temp_url, *end;
-		gboolean full;
-		int len;
-
-		s += strlen("Location: ");
-		end = strchr(s, '\r');
-
-		/* Just in case :) */
-		if (end == NULL)
-			end = strchr(s, '\n');
-
-		len = end - s;
-
-		new_url = g_malloc(len + 1);
-		strncpy(new_url, s, len);
-		new_url[len] = '\0';
-
-		full = gunk->full;
-
-		if (*new_url == '/' || g_strstr_len(new_url, len, "://") == NULL) {
-			temp_url = new_url;
-
-			new_url = g_strdup_printf("%s:%d%s", gunk->website->address,
-									  gunk->website->port, temp_url);
-
-			g_free(temp_url);
-
-			full = FALSE;
-		}
-
-		/* Close the existing stuff. */
-		gaim_input_remove(gunk->inpa);
-		close(sock);
-
-		gaim_debug(GAIM_DEBUG_INFO, "grab_url",
-				   "Redirecting to %s\n", new_url);
-
-		/* Try again, with this new location. */
-		grab_url(new_url, full, gunk->callback, gunk->data, gunk->user_agent, gunk->http11);
-
-		/* Free up. */
-		g_free(new_url);
-		g_free(gunk->webdata);
-		g_free(gunk->website);
-		g_free(gunk->url);
-		g_free(gunk->user_agent);
-		g_free(gunk);
-
-		return TRUE;
-	}
-
-	return FALSE;
-}
-
-static size_t
-parse_content_len(const char *data, size_t data_len)
-{
-	size_t content_len = 0;
-
-	sscanf(data, "Content-Length: %d", &content_len);
-
-	return content_len;
-}
-
-static void grab_url_callback(gpointer dat, gint sock, GaimInputCondition cond)
-{
-	struct grab_url_data *gunk = dat;
-	char data;
-
-	if (sock == -1) {
-		gunk->callback(gunk->data, NULL, 0);
-		g_free(gunk->website);
-		g_free(gunk->url);
-		g_free(gunk->user_agent);
-		g_free(gunk);
-		return;
-	}
-
-	if (!gunk->sentreq) {
-		char buf[1024];
-
-		if(gunk->user_agent) {
-			if(gunk->http11)
-				g_snprintf(buf, sizeof(buf), "GET %s%s HTTP/1.1\r\nUser-Agent: \"%s\"\r\nHost: %s\r\n\r\n", gunk->full ? "" : "/",
-						gunk->full ? gunk->url : gunk->website->page, gunk->user_agent, gunk->website->address);
-			else
-				g_snprintf(buf, sizeof(buf), "GET %s%s HTTP/1.0\r\nUser-Agent: \"%s\"\r\n\r\n", gunk->full ? "" : "/",
-						gunk->full ? gunk->url : gunk->website->page, gunk->user_agent);
-		}
-		else {
-			if(gunk->http11)
-				g_snprintf(buf, sizeof(buf), "GET %s%s HTTP/1.1\r\nHost: %s\r\n\r\n", gunk->full ? "" : "/",
-						gunk->full ? gunk->url : gunk->website->page, gunk->website->address);
-			else
-				g_snprintf(buf, sizeof(buf), "GET %s%s HTTP/1.0\r\n\r\n", gunk->full ? "" : "/",
-						gunk->full ? gunk->url : gunk->website->page);
-
-		}
-		gaim_debug(GAIM_DEBUG_MISC, "grab_url_callback",
-				   "Request: %s\n", buf);
-
-		write(sock, buf, strlen(buf));
-		fcntl(sock, F_SETFL, O_NONBLOCK);
-		gunk->sentreq = TRUE;
-		gunk->inpa = gaim_input_add(sock, GAIM_INPUT_READ, grab_url_callback, dat);
-		gunk->data_len = 4096;
-		gunk->webdata = g_malloc(gunk->data_len);
-		return;
-	}
-
-	if (read(sock, &data, 1) > 0 || errno == EWOULDBLOCK) {
-		if (errno == EWOULDBLOCK) {
-			errno = 0;
-			return;
-		}
-
-		gunk->len++;
-
-		if (gunk->len == gunk->data_len + 1) {
-			gunk->data_len += (gunk->data_len) / 2;
-
-			gunk->webdata = g_realloc(gunk->webdata, gunk->data_len);
-		}
-
-		gunk->webdata[gunk->len - 1] = data;
-
-		if (!gunk->startsaving) {
-			if (data == '\r')
-				return;
-			if (data == '\n') {
-				if (gunk->newline) {
-					size_t content_len;
-					gunk->startsaving = TRUE;
-
-					/* See if we can find a redirect. */
-					if (parse_redirect(gunk->webdata, gunk->len, sock, gunk))
-						return;
-
-					/* No redirect. See if we can find a content length. */
-					content_len = parse_content_len(gunk->webdata, gunk->len);
-
-					if (content_len == 0) {
-						/* We'll stick with an initial 8192 */
-						content_len = 8192;
-					}
-
-					/* Out with the old... */
-					gunk->len = 0;
-					g_free(gunk->webdata);
-					gunk->webdata = NULL;
-
-					/* In with the new. */
-					gunk->data_len = content_len;
-					gunk->webdata = g_malloc(gunk->data_len);
-				}
-				else
-					gunk->newline = TRUE;
-				return;
-			}
-			gunk->newline = FALSE;
-		}
-	} else if (errno != ETIMEDOUT) {
-		gunk->webdata = g_realloc(gunk->webdata, gunk->len + 1);
-		gunk->webdata[gunk->len] = 0;
-
-		gaim_debug(GAIM_DEBUG_MISC, "grab_url_callback",
-				   "Received: '%s'\n", gunk->webdata);
-
-		gaim_input_remove(gunk->inpa);
-		close(sock);
-		gunk->callback(gunk->data, gunk->webdata, gunk->len);
-		if (gunk->webdata)
-			g_free(gunk->webdata);
-		g_free(gunk->website);
-		g_free(gunk->url);
-		g_free(gunk->user_agent);
-		g_free(gunk);
-	} else {
-		gaim_input_remove(gunk->inpa);
-		close(sock);
-		gunk->callback(gunk->data, NULL, 0);
-		if (gunk->webdata)
-			g_free(gunk->webdata);
-		g_free(gunk->website);
-		g_free(gunk->url);
-		g_free(gunk->user_agent);
-		g_free(gunk);
-	}
-}
-
-void grab_url(char *url, gboolean full, void callback(gpointer, char *, unsigned long),
-		gpointer data, char *user_agent, int http11)
-{
-	int sock;
-	struct grab_url_data *gunk = g_new0(struct grab_url_data, 1);
-
-	gunk->callback = callback;
-	gunk->data = data;
-	gunk->url = g_strdup(url);
-	gunk->user_agent = (user_agent) ? g_strdup(user_agent) : NULL;
-	gunk->http11 = http11;
-	gunk->website = parse_url(url);
-	gunk->full = full;
-
-	if ((sock = gaim_proxy_connect(NULL, gunk->website->address,
-								   gunk->website->port, grab_url_callback,
-								   gunk)) < 0) {
-		g_free(gunk->website);
-		g_free(gunk->url);
-		g_free(gunk->user_agent);
-		g_free(gunk);
-		callback(data, g_strdup(_("g003: Error opening connection.\n")), 0);
-	}
-}
-
 struct gaim_parse_tag {
 	char *src_tag;
 	char *dest_tag;
@@ -655,7 +380,7 @@
 	g_string_free(plain, TRUE);
 }
 
-int info_extract_field(char *original, char *add_to, char *start_tok,
+int info_extract_field(const char *original, char *add_to, char *start_tok,
 		int skip, char *end_tok, char check_value, char *no_value_tok,
 		char *display_name, int islink, char *link_prefix)
 {
--- a/src/html.h	Wed Oct 01 02:06:12 2003 +0000
+++ b/src/html.h	Wed Oct 01 03:01:25 2003 +0000
@@ -23,20 +23,9 @@
 #ifndef _GAIM_HTML_H_
 #define _GAIM_HTML_H_
 
-struct g_url {
-	char address[255];
-	int port;
-	char page[255];
-};
-
-void grab_url(char *url, gboolean full,
-			  void (*callback)(gpointer, char *, unsigned long),
-			  gpointer data, char *, int);
-
 gchar *strip_html(const gchar *text);
 void html_to_xhtml(const char *html, char **xhtml_out, char **plain_out);
-struct g_url *parse_url(char *url);
-int info_extract_field(char *, char *, char *, int,
+int info_extract_field(const char *, char *, char *, int,
 				char *, char, char *, char *, int, char *);
 
 #endif /* _GAIM_HTML_H_ */
--- a/src/protocols/msn/msn.c	Wed Oct 01 02:06:12 2003 +0000
+++ b/src/protocols/msn/msn.c	Wed Oct 01 03:01:25 2003 +0000
@@ -1215,15 +1215,16 @@
 }
 
 static void
-msn_got_info(gpointer data, char *url_text, unsigned long len)
+msn_got_info(void *data, const char *url_text, size_t len)
 {
 	GaimConnection *gc = (GaimConnection *)data;
 	char *stripped, *p, *q;
 	char buf[1024];
 	char *user_url = NULL;
 	gboolean found;
+	char *url_buffer;
 
-	if (url_text == NULL || strcmp(url_text,"") == 0)
+	if (url_text == NULL || strcmp(url_text, "") == 0)
 	{
 		gaim_notify_formatted(gc, NULL, _("Buddy Information"), NULL,
 			_("<html><body><b>Error retrieving profile</b></body></html>"),
@@ -1232,6 +1233,8 @@
 		return;
 	}
 
+	url_buffer = g_strdup(url_text);
+
 	/* If they have a homepage link, MSN masks it such that we need to
 	 * fetch the url out before strip_html() nukes it */
 	if ((p = strstr(url_text,
@@ -1247,61 +1250,61 @@
 	 * strip_html() doesn't strip out character entities like &nbsp;
 	 * and &#183;
 	 */
-	while ((p = strstr(url_text, "&nbsp;")) != NULL)
+	while ((p = strstr(url_buffer, "&nbsp;")) != NULL)
 	{
 		memmove(p, p + 6, strlen(p + 6));
-		url_text[strlen(url_text) - 6] = '\0';
+		url_buffer[strlen(url_text) - 6] = '\0';
 	}
 
-	while ((p = strstr(url_text, "&#183;")) != NULL)
+	while ((p = strstr(url_buffer, "&#183;")) != NULL)
 	{
 		memmove(p, p + 6, strlen(p + 6));
-		url_text[strlen(url_text) - 6] = '\0';
+		url_buffer[strlen(url_buffer) - 6] = '\0';
 	}
 
 	/* Nuke the nasty \r's that just get in the way */
-	while ((p = strchr(url_text, '\r')) != NULL)
+	while ((p = strchr(url_buffer, '\r')) != NULL)
 	{
 		memmove(p, p + 1, strlen(p + 1));
-		url_text[strlen(url_text) - 1] = '\0';
+		url_buffer[strlen(url_buffer) - 1] = '\0';
 	}
 
 	/* MSN always puts in &#39; for apostrophies...replace them */
-	while ((p = strstr(url_text, "&#39;")) != NULL)
+	while ((p = strstr(url_buffer, "&#39;")) != NULL)
 	{
 		*p = '\'';
 		memmove(p + 1, p + 5, strlen(p + 5));
-		url_text[strlen(url_text) - 4] = '\0';
+		url_buffer[strlen(url_buffer) - 4] = '\0';
 	}
 
 	/* Nuke the html, it's easier than trying to parse the horrid stuff */
-	stripped = strip_html(url_text);
+	stripped = strip_html(url_buffer);
 
-	/* Gonna re-use the memory we've already got for url_text */
-	strcpy(url_text, "<html><body>\n");
+	/* Gonna re-use the memory we've already got for url_buffer */
+	strcpy(url_buffer, "<html><body>\n");
 
 	/* Extract their Name and put it in */
-	info_extract_field(stripped, url_text, "\tName", 0, "\t", '\n',
+	info_extract_field(stripped, url_buffer, "\tName", 0, "\t", '\n',
 					   "Undisclosed", _("Name"), 0, NULL);
 
 	/* Extract their Age and put it in */
-	info_extract_field(stripped, url_text, "\tAge", 0, "\t", '\n',
+	info_extract_field(stripped, url_buffer, "\tAge", 0, "\t", '\n',
 					   "Undisclosed", _("Age"), 0, NULL);
 
 	/* Extract their Gender and put it in */
-	info_extract_field(stripped, url_text, "\tGender", 6, "\t", '\n',
+	info_extract_field(stripped, url_buffer, "\tGender", 6, "\t", '\n',
 					   "Undisclosed", _("Gender"), 0, NULL);
 
 	/* Extract their MaritalStatus and put it in */
-	info_extract_field(stripped, url_text, "\tMaritalStatus", 0, "\t", '\n',
+	info_extract_field(stripped, url_buffer, "\tMaritalStatus", 0, "\t", '\n',
 					   "Undisclosed", _("Marital Status"), 0, NULL);
 
 	/* Extract their Location and put it in */
-	info_extract_field(stripped, url_text, "\tLocation", 0, "\t", '\n',
+	info_extract_field(stripped, url_buffer, "\tLocation", 0, "\t", '\n',
 					   "Undisclosed", _("Location"), 0, NULL);
 
 	/* Extract their Occupation and put it in */
-	info_extract_field(stripped, url_text, "\t Occupation", 6, "\t", '\n',
+	info_extract_field(stripped, url_buffer, "\t Occupation", 6, "\t", '\n',
 					   "Undisclosed", _("Occupation"), 0, NULL);
 
 	/*
@@ -1313,13 +1316,13 @@
 	 */
 
 	/* Check if they have A Little About Me */
-	found = info_extract_field(stripped, url_text, "\tA Little About Me",
+	found = info_extract_field(stripped, url_buffer, "\tA Little About Me",
 							   1, "Favorite Things", '\n', NULL,
 							   _("A Little About Me"), 0, NULL);
 
 	if (!found)
 	{
-		found = info_extract_field(stripped, url_text,
+		found = info_extract_field(stripped, url_buffer,
 								   "\tA Little About Me", 1,
 								   "Hobbies and Interests", '\n', NULL,
 								   _("A Little About Me"), 0, NULL);
@@ -1327,7 +1330,7 @@
 
 	if (!found)
 	{
-		found = info_extract_field(stripped, url_text,
+		found = info_extract_field(stripped, url_buffer,
 								   "\tA Little About Me", 1,
 								   "Favorite Quote", '\n', NULL,
 								   _("A Little About Me"), 0, NULL);
@@ -1335,7 +1338,7 @@
 
 	if (!found)
 	{
-		found = info_extract_field(stripped, url_text,
+		found = info_extract_field(stripped, url_buffer,
 								   "\tA Little About Me", 1,
 								   "My Homepage\tTake a look", '\n', NULL,
 								   _("A Little About Me"), 0, NULL);
@@ -1343,45 +1346,45 @@
 
 	if (!found)
 	{
-		info_extract_field(stripped, url_text, "\tA Little About Me", 1,
+		info_extract_field(stripped, url_buffer, "\tA Little About Me", 1,
 						   "last updated", '\n',
 							NULL, _("A Little About Me"), 0, NULL);
 	}
 
 	/* Check if they have Favorite Things */
-	found = info_extract_field(stripped, url_text, "Favorite Things", 1,
+	found = info_extract_field(stripped, url_buffer, "Favorite Things", 1,
 							   "Hobbies and Interests", '\n', NULL,
 							   _("Favorite Things"), 0, NULL);
 
 	if (!found)
 	{
-		found = info_extract_field(stripped, url_text, "Favorite Things", 1,
+		found = info_extract_field(stripped, url_buffer, "Favorite Things", 1,
 								   "Favorite Quote", '\n', NULL,
 								   "Favorite Things", 0, NULL);
 	}
 
 	if (!found)
 	{
-		found = info_extract_field(stripped, url_text, "Favorite Things", 1,
+		found = info_extract_field(stripped, url_buffer, "Favorite Things", 1,
 								   "My Homepage\tTake a look", '\n', NULL,
 								   _("Favorite Things"), 0, NULL);
 	}
 
 	if (!found)
 	{
-		info_extract_field(stripped, url_text, "Favorite Things", 1,
+		info_extract_field(stripped, url_buffer, "Favorite Things", 1,
 						   "last updated", '\n', NULL,
 						   _("Favorite Things"), 0, NULL);
 	}
 
 	/* Check if they have Hobbies and Interests */
-	found = info_extract_field(stripped, url_text, "Hobbies and Interests",
+	found = info_extract_field(stripped, url_buffer, "Hobbies and Interests",
 							   1, "Favorite Quote", '\n', NULL,
 							   _("Hobbies and Interests"), 0, NULL);
 
 	if (!found)
 	{
-		found = info_extract_field(stripped, url_text,
+		found = info_extract_field(stripped, url_buffer,
 								   "Hobbies and Interests", 1,
 								   "My Homepage\tTake a look", '\n', NULL,
 								   _("Hobbies and Interests"), 0, NULL);
@@ -1389,25 +1392,25 @@
 
 	if (!found)
 	{
-		info_extract_field(stripped, url_text, "Hobbies and Interests",
+		info_extract_field(stripped, url_buffer, "Hobbies and Interests",
 						   1, "last updated", '\n', NULL,
 						   _("Hobbies and Interests"), 0, NULL);
 	}
 
 	/* Check if they have Favorite Quote */
-	found = info_extract_field(stripped, url_text, "Favorite Quote", 1,
+	found = info_extract_field(stripped, url_buffer, "Favorite Quote", 1,
 							   "My Homepage\tTake a look", '\n', NULL,
 							   _("Favorite Quote"), 0, NULL);
 
 	if (!found)
 	{
-		info_extract_field(stripped, url_text, "Favorite Quote", 1,
+		info_extract_field(stripped, url_buffer, "Favorite Quote", 1,
 						   "last updated", '\n', NULL,
 						   _("Favorite Quote"), 0, NULL);
 	}
 
 	/* Extract the last updated date and put it in */
-	info_extract_field(stripped, url_text, "\tlast updated:", 1, "\n", '\n',
+	info_extract_field(stripped, url_buffer, "\tlast updated:", 1, "\n", '\n',
 					   NULL, _("Last Updated"), 0, NULL);
 
 	/* If we were able to fetch a homepage url earlier, stick it in there */
@@ -1417,16 +1420,17 @@
 				   "<b>%s:</b><br><a href=\"%s\">%s</a><br>\n",
 				   _("Homepage"), user_url, user_url);
 
-		strcat(url_text, buf);
+		strcat(url_buffer, buf);
 	}
 
 	/* Finish it off, and show it to them */
-	strcat(url_text, "</body></html>\n");
+	strcat(url_buffer, "</body></html>\n");
 
 	gaim_notify_formatted(gc, NULL, _("Buddy Information"), NULL,
-						  url_text, NULL, NULL);
+						  url_buffer, NULL, NULL);
 
 	g_free(stripped);
+	g_free(url_buffer);
 }
 
 static void
@@ -1434,8 +1438,9 @@
 {
 	char url[256];
 	g_snprintf(url, sizeof url, "%s%s", PROFILE_URL, name);
-	grab_url(url, FALSE, msn_got_info, gc,
-			 "Mozilla/4.0 (compatible; MSIE 6.0; Windows NT 5.1)",1);
+	gaim_url_fetch(url, FALSE,
+				   "Mozilla/4.0 (compatible; MSIE 6.0; Windows NT 5.1)", TRUE,
+				   msn_got_info, gc);
 }
 
 static GaimPluginProtocolInfo prpl_info =
--- a/src/protocols/toc/toc.c	Wed Oct 01 02:06:12 2003 +0000
+++ b/src/protocols/toc/toc.c	Wed Oct 01 03:01:25 2003 +0000
@@ -491,7 +491,7 @@
 	return rp;
 }
 
-static void toc_got_info(gpointer data, char *url_text, unsigned long len)
+static void toc_got_info(void *data, const char *url_text, size_t len)
 {
 	if (!url_text)
 		return;
@@ -897,7 +897,7 @@
 		g_snprintf(tmp, sizeof(tmp), "http://%s:%d/%s", tdt->toc_ip,
 				gaim_account_get_int(gc->account, "port", TOC_PORT),
 				url);
-		grab_url(tmp, FALSE, toc_got_info, gc, NULL, 0);
+		gaim_url_fetch(tmp, FALSE, NULL, FALSE, toc_got_info, gc);
 	} else if (!g_ascii_strcasecmp(c, "DIR_STATUS")) {
 	} else if (!g_ascii_strcasecmp(c, "ADMIN_NICK_STATUS")) {
 	} else if (!g_ascii_strcasecmp(c, "ADMIN_PASSWD_STATUS")) {
--- a/src/protocols/yahoo/yahoo.c	Wed Oct 01 02:06:12 2003 +0000
+++ b/src/protocols/yahoo/yahoo.c	Wed Oct 01 03:01:25 2003 +0000
@@ -2387,10 +2387,11 @@
 	return TRUE;
 }
 
-static void yahoo_got_info(gpointer data, char *url_text, unsigned long len)
+static void yahoo_got_info(void *data, const char *url_text, size_t len)
 {
-	char *stripped,*p;
+	char *stripped, *p;
 	char buf[1024];
+	char *url_buffer;
 
 	/* we failed to grab the profile URL */
 	if (!url_text) {
@@ -2436,63 +2437,65 @@
 		return;
 	}
 
+	url_buffer = g_strdup(url_text);
+
 	/* strip_html() doesn't strip out character entities like &nbsp; and &#183;
 	*/
-	while ((p = strstr(url_text, "&nbsp;")) != NULL) {
+	while ((p = strstr(url_buffer, "&nbsp;")) != NULL) {
 		memmove(p, p + 6, strlen(p + 6));
-		url_text[strlen(url_text) - 6] = '\0';
+		url_buffer[strlen(url_buffer) - 6] = '\0';
 	}
-	while ((p = strstr(url_text, "&#183;")) != NULL) {
+	while ((p = strstr(url_buffer, "&#183;")) != NULL) {
 		memmove(p, p + 6, strlen(p + 6));
-		url_text[strlen(url_text) - 6] = '\0';
+		url_buffer[strlen(url_buffer) - 6] = '\0';
 	}
 
 	/* nuke the nasty \r's */
-	while ((p = strchr(url_text, '\r')) != NULL) {
+	while ((p = strchr(url_buffer, '\r')) != NULL) {
 		memmove(p, p + 1, strlen(p + 1));
-		url_text[strlen(url_text) - 1] = '\0';
+		url_buffer[strlen(url_buffer) - 1] = '\0';
 	}
 
 	/* nuke the html, it's easier than trying to parse the horrid stuff */
-	stripped = strip_html(url_text);
+	stripped = strip_html(url_buffer);
 
-	/* gonna re-use the memory we've already got for url_text */
-	strcpy(url_text, "<html><body>\n");
+	/* gonna re-use the memory we've already got for url_buffer */
+	strcpy(url_buffer, "<html><body>\n");
 
 	/* extract their Yahoo! ID and put it in */
-	info_extract_field(stripped, url_text, "Yahoo! ID:", 2, "\n", 0,
+	info_extract_field(stripped, url_buffer, "Yahoo! ID:", 2, "\n", 0,
 			NULL, _("Yahoo! ID"), 0, NULL);
 
 	/* extract their Email address and put it in */
-	info_extract_field(stripped, url_text, "My Email", 5, "\n", 0,
+	info_extract_field(stripped, url_buffer, "My Email", 5, "\n", 0,
 			"Private", _("Email"), 0, NULL);
 
 	/* extract the Nickname if it exists */
-	info_extract_field(stripped, url_text, "Nickname:", 1, "\n", '\n',
+	info_extract_field(stripped, url_buffer, "Nickname:", 1, "\n", '\n',
 			NULL, _("Nickname"), 0, NULL);
 
 	/* extract their RealName and put it in */
-	info_extract_field(stripped, url_text, "RealName:", 1, "\n", '\n',
+	info_extract_field(stripped, url_buffer, "RealName:", 1, "\n", '\n',
 			NULL, _("Realname"), 0, NULL);
 
 	/* extract their Location and put it in */
-	info_extract_field(stripped, url_text, "Location:", 2, "\n", '\n',
+	info_extract_field(stripped, url_buffer, "Location:", 2, "\n", '\n',
 			NULL, _("Location"), 0, NULL);
 
 	/* extract their Age and put it in */
-	info_extract_field(stripped, url_text, "Age:", 3, "\n", '\n',
+	info_extract_field(stripped, url_buffer, "Age:", 3, "\n", '\n',
 			NULL, _("Age"), 0, NULL);
 
 	/* extract their MaritalStatus and put it in */
-	info_extract_field(stripped, url_text, "MaritalStatus:", 3, "\n", '\n',
+	info_extract_field(stripped, url_buffer, "MaritalStatus:", 3, "\n", '\n',
 			"No Answer", _("Marital Status"), 0, NULL);
 
 	/* extract their Gender and put it in */
-	info_extract_field(stripped, url_text, "Gender:", 3, "\n", '\n',
+	info_extract_field(stripped, url_buffer, "Gender:", 3, "\n", '\n',
 			"No Answer", _("Gender"), 0, NULL);
 
 	/* extract their Occupation and put it in */
-	info_extract_field(stripped, url_text, "Occupation:", 2, "\n", '\n',
+	info_extract_field(stripped, url_buffer, "Occupation:", 2, "\n", '\n',
 			NULL, _("Occupation"), 0, NULL);
 
 	/* Hobbies, Latest News, and Favorite Quote are a bit different, since the
@@ -2502,24 +2505,24 @@
 	 * looking for the 'Links' heading, which is the next thing to follow this
 	 * bunch.
 	 */
-	if (!info_extract_field(stripped, url_text, "Hobbies:", 1, "Latest News",
+	if (!info_extract_field(stripped, url_buffer, "Hobbies:", 1, "Latest News",
 				'\n', NULL, _("Hobbies"), 0, NULL))
-		if (!info_extract_field(stripped, url_text, "Hobbies:", 1, "Favorite Quote",
+		if (!info_extract_field(stripped, url_buffer, "Hobbies:", 1, "Favorite Quote",
 					'\n', NULL, _("Hobbies"), 0, NULL))
-			info_extract_field(stripped, url_text, "Hobbies:", 1, "Links",
+			info_extract_field(stripped, url_buffer, "Hobbies:", 1, "Links",
 					'\n', NULL, _("Hobbies"), 0, NULL);
-	if (!info_extract_field(stripped, url_text, "Latest News:", 1, "Favorite Quote",
+	if (!info_extract_field(stripped, url_buffer, "Latest News:", 1, "Favorite Quote",
 				'\n', NULL, _("Latest News"), 0, NULL))
-		info_extract_field(stripped, url_text, "Latest News:", 1, "Links",
+		info_extract_field(stripped, url_buffer, "Latest News:", 1, "Links",
 				'\n', NULL, _("Latest News"), 0, NULL);
-	info_extract_field(stripped, url_text, "Favorite Quote:", 0, "Links",
+	info_extract_field(stripped, url_buffer, "Favorite Quote:", 0, "Links",
 			'\n', NULL, _("Favorite Quote"), 0, NULL);
 
 	/* Home Page will either be "No home page specified",
 	 * or "Home Page: " and a link. */
 	p = strstr(stripped, "No home page specified");
 	if (!p)
-		info_extract_field(stripped, url_text, "Home Page:", 1, " ", 0, NULL,
+		info_extract_field(stripped, url_buffer, "Home Page:", 1, " ", 0, NULL,
 				_("Home Page"), 1, NULL);
 
 	/* Cool Link {1,2,3} is also different.  If "No cool link specified" exists,
@@ -2529,28 +2532,30 @@
 	 */
 	p = strstr(stripped,"No cool link specified");
 	if (!p)
-		if (info_extract_field(stripped, url_text, "Cool Link 1:", 1, " ", 0, NULL,
+		if (info_extract_field(stripped, url_buffer, "Cool Link 1:", 1, " ", 0, NULL,
 					_("Cool Link 1"), 1, NULL))
-			if (info_extract_field(stripped, url_text, "Cool Link 2:", 1, " ", 0, NULL,
+			if (info_extract_field(stripped, url_buffer, "Cool Link 2:", 1, " ", 0, NULL,
 						_("Cool Link 2"), 1, NULL))
-				info_extract_field(stripped, url_text, "Cool Link 3:", 1, " ", 0, NULL,
+				info_extract_field(stripped, url_buffer, "Cool Link 3:", 1, " ", 0, NULL,
 						_("Cool Link 3"), 1, NULL);
 
 	/* see if Member Since is there, and if so, extract it. */
-	info_extract_field(stripped, url_text, "Member Since:", 1, "Last Updated:",
+	info_extract_field(stripped, url_buffer, "Member Since:", 1, "Last Updated:",
 			'\n', NULL, _("Member Since"), 0, NULL);
 
 	/* extract the Last Updated date and put it in */
-	info_extract_field(stripped, url_text, "Last Updated:", 1, "\n", '\n', NULL,
+	info_extract_field(stripped, url_buffer, "Last Updated:", 1, "\n", '\n', NULL,
 			_("Last Updated"), 0, NULL);
 
 	/* finish off the html */
-	strcat(url_text, "</body></html>\n");
+	strcat(url_buffer, "</body></html>\n");
 	g_free(stripped);
 
 	/* show it to the user */
 	gaim_notify_formatted(data, NULL, _("Buddy Information"), NULL,
-						  url_text, NULL, NULL);
+						  url_buffer, NULL, NULL);
+
+	g_free(url_buffer);
 }
 
 static void yahoo_get_info(GaimConnection *gc, const char *name)
@@ -2558,7 +2563,7 @@
 	/* struct yahoo_data *yd = (struct yahoo_data *)gc->proto_data; */
 	char url[256];
 	g_snprintf(url, sizeof url, "%s%s", YAHOO_PROFILE_URL, name);
-	grab_url(url, FALSE, yahoo_got_info, gc, NULL, 0);
+	gaim_url_fetch(url, FALSE, NULL, FALSE, yahoo_got_info, gc);
 }
 
 static void yahoo_change_buddys_group(GaimConnection *gc, const char *who,
--- a/src/util.c	Wed Oct 01 02:06:12 2003 +0000
+++ b/src/util.c	Wed Oct 01 03:01:25 2003 +0000
@@ -1,7 +1,9 @@
 /*
- * gaim
+ * @file util.h Utility Functions
+ * @ingroup core
  *
  * Copyright (C) 1998-1999, Mark Spencer <markster@marko.net>
+ * Copyright (C) 2003 Christian Hammond <chipx86@gnupdate.org>
  *
  * This program is free software; you can redistribute it and/or modify
  * it under the terms of the GNU General Public License as published by
@@ -16,15 +18,44 @@
  * You should have received a copy of the GNU General Public License
  * along with this program; if not, write to the Free Software
  * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
- *
  */
-
 #include "internal.h"
 
 #include "conversation.h"
 #include "debug.h"
 #include "prpl.h"
 #include "prefs.h"
+#include "util.h"
+
+typedef struct
+{
+	void (*callback)(void *, const char *, size_t);
+	void *user_data;
+
+	struct
+	{
+		char *address;
+		int port;
+		char *page;
+
+	} website;
+
+	char *url;
+	gboolean full;
+	char *user_agent;
+	gboolean http11;
+
+	int inpa;
+
+	gboolean sentreq;
+	gboolean newline;
+	gboolean startsaving;
+	char *webdata;
+	unsigned long len;
+	unsigned long data_len;
+
+} GaimFetchUrlData;
+
 
 static char home_dir[MAXPATHLEN];
 
@@ -996,14 +1027,17 @@
 	}
 }
 
-gboolean gaim_markup_find_tag(const char *needle, const char *haystack, const char **start, const char **end, GData **attributes) {
+gboolean
+gaim_markup_find_tag(const char *needle, const char *haystack,
+					 const char **start, const char **end, GData **attributes)
+{
 	GData *attribs;
 	const char *cur = haystack;
 	char *name = NULL;
 	gboolean found = FALSE;
 	gboolean in_tag = FALSE;
 	gboolean in_attr = FALSE;
-	char *in_quotes = NULL;
+	const char *in_quotes = NULL;
 	size_t needlelen = strlen(needle);
 
 	g_datalist_init(&attribs);
@@ -1133,3 +1167,332 @@
 
 	return found;
 }
+
+gboolean
+gaim_url_parse(const char *url, char **ret_host, int *ret_port,
+			   char **ret_path)
+{
+	char scan_info[255];
+	char port_str[5];
+	int f;
+	const char *turl;
+	char host[256], path[256];
+	int port = 0;
+	/* hyphen at end includes it in control set */
+	static char addr_ctrl[] = "A-Za-z0-9.-";
+	static char port_ctrl[] = "0-9";
+	static char page_ctrl[] = "A-Za-z0-9.~_/:*!@&%%?=+^-";
+
+	g_return_val_if_fail(url != NULL, FALSE);
+
+	if ((turl = strstr(url, "http://")) != NULL ||
+		(turl = strstr(url, "HTTP://")) != NULL)
+	{
+		turl += 7;
+		url = turl;
+	}
+
+	g_snprintf(scan_info, sizeof(scan_info),
+			   "%%[%s]:%%[%s]/%%[%s]", addr_ctrl, port_ctrl, page_ctrl);
+
+	f = sscanf(url, scan_info, host, port_str, path);
+
+	if (f == 1)
+	{
+		g_snprintf(scan_info, sizeof(scan_info),
+				   "%%[%s]/%%[%s]",
+				   addr_ctrl, page_ctrl);
+		f = sscanf(url, scan_info, host, path);
+		g_snprintf(port_str, sizeof(port_str), "80");
+	}
+
+	if (f == 1)
+		*path = '\0';
+
+	sscanf(port_str, "%d", &port);
+
+	if (ret_host != NULL) *ret_host = g_strdup(host);
+	if (ret_port != NULL) *ret_port = port;
+	if (ret_path != NULL) *ret_path = g_strdup(path);
+
+	return TRUE;
+}
+
+static void
+destroy_fetch_url_data(GaimFetchUrlData *gfud)
+{
+	if (gfud->webdata         != NULL) g_free(gfud->webdata);
+	if (gfud->url             != NULL) g_free(gfud->url);
+	if (gfud->user_agent      != NULL) g_free(gfud->user_agent);
+	if (gfud->website.address != NULL) g_free(gfud->website.address);
+	if (gfud->website.page    != NULL) g_free(gfud->website.page);
+
+	g_free(gfud);
+}
+
+static gboolean
+parse_redirect(const char *data, size_t data_len, gint sock,
+			   GaimFetchUrlData *gfud)
+{
+	gchar *s;
+
+	if ((s = g_strstr_len(data, data_len, "Location: ")) != NULL)
+	{
+		gchar *new_url, *temp_url, *end;
+		gboolean full;
+		int len;
+
+		s += strlen("Location: ");
+		end = strchr(s, '\r');
+
+		/* Just in case :) */
+		if (end == NULL)
+			end = strchr(s, '\n');
+
+		len = end - s;
+
+		new_url = g_malloc(len + 1);
+		strncpy(new_url, s, len);
+		new_url[len] = '\0';
+
+		full = gfud->full;
+
+		if (*new_url == '/' || g_strstr_len(new_url, len, "://") == NULL)
+		{
+			temp_url = new_url;
+
+			new_url = g_strdup_printf("%s:%d%s", gfud->website.address,
+									  gfud->website.port, temp_url);
+
+			g_free(temp_url);
+
+			full = FALSE;
+		}
+
+		/* Close the existing stuff. */
+		gaim_input_remove(gfud->inpa);
+		close(sock);
+
+		gaim_debug_info("gaim_url_fetch", "Redirecting to %s\n", new_url);
+
+		/* Try again, with this new location. */
+		gaim_url_fetch(new_url, full, gfud->user_agent, gfud->http11,
+					   gfud->callback, gfud->user_data);
+
+		/* Free up. */
+		g_free(new_url);
+		destroy_fetch_url_data(gfud);
+
+		return TRUE;
+	}
+
+	return FALSE;
+}
+
+static size_t
+parse_content_len(const char *data, size_t data_len)
+{
+	size_t content_len = 0;
+
+	sscanf(data, "Content-Length: %d", &content_len);
+
+	return content_len;
+}
+
+static void
+url_fetched_cb(gpointer url_data, gint sock, GaimInputCondition cond)
+{
+	GaimFetchUrlData *gfud = url_data;
+	char data;
+
+	if (sock == -1)
+	{
+		gfud->callback(gfud->user_data, NULL, 0);
+
+		destroy_fetch_url_data(gfud);
+
+		return;
+	}
+
+	if (!gfud->sentreq)
+	{
+		char buf[1024];
+
+		if (gfud->user_agent)
+		{
+			if (gfud->http11)
+			{
+				g_snprintf(buf, sizeof(buf),
+						   "GET %s%s HTTP/1.1\r\n"
+						   "User-Agent: \"%s\"\r\n"
+						   "Host: %s\r\n\r\n",
+						   (gfud->full ? "" : "/"),
+						   (gfud->full ? gfud->url : gfud->website.page),
+						   gfud->user_agent, gfud->website.address);
+			}
+			else
+			{
+				g_snprintf(buf, sizeof(buf),
+						   "GET %s%s HTTP/1.0\r\n"
+						   "User-Agent: \"%s\"\r\n\r\n",
+						   (gfud->full ? "" : "/"),
+						   (gfud->full ? gfud->url : gfud->website.page),
+						   gfud->user_agent);
+			}
+		}
+		else
+		{
+			if (gfud->http11)
+			{
+				g_snprintf(buf, sizeof(buf),
+						   "GET %s%s HTTP/1.1\r\n"
+						   "Host: %s\r\n\r\n",
+						   (gfud->full ? "" : "/"),
+						   (gfud->full ? gfud->url : gfud->website.page),
+						   gfud->website.address);
+			}
+			else
+			{
+				g_snprintf(buf, sizeof(buf),
+						   "GET %s%s HTTP/1.0\r\n\r\n",
+						   (gfud->full ? "" : "/"),
+						   (gfud->full ? gfud->url : gfud->website.page));
+			}
+		}
+
+		gaim_debug_misc("gaim_url_fetch", "Request: %s\n", buf);
+
+		write(sock, buf, strlen(buf));
+		fcntl(sock, F_SETFL, O_NONBLOCK);
+		gfud->sentreq = TRUE;
+		gfud->inpa = gaim_input_add(sock, GAIM_INPUT_READ,
+									url_fetched_cb, url_data);
+		gfud->data_len = 4096;
+		gfud->webdata = g_malloc(gfud->data_len);
+
+		return;
+	}
+
+	if (read(sock, &data, 1) > 0 || errno == EWOULDBLOCK)
+	{
+		if (errno == EWOULDBLOCK)
+		{
+			errno = 0;
+
+			return;
+		}
+
+		gfud->len++;
+
+		if (gfud->len == gfud->data_len + 1)
+		{
+			gfud->data_len += (gfud->data_len) / 2;
+
+			gfud->webdata = g_realloc(gfud->webdata, gfud->data_len);
+		}
+
+		gfud->webdata[gfud->len - 1] = data;
+
+		if (!gfud->startsaving)
+		{
+			if (data == '\r')
+				return;
+
+			if (data == '\n')
+			{
+				if (gfud->newline)
+				{
+					size_t content_len;
+					gfud->startsaving = TRUE;
+
+					/* See if we can find a redirect. */
+					if (parse_redirect(gfud->webdata, gfud->len, sock, gfud))
+						return;
+
+					/* No redirect. See if we can find a content length. */
+					content_len = parse_content_len(gfud->webdata, gfud->len);
+
+					if (content_len == 0)
+					{
+						/* We'll stick with an initial 8192 */
+						content_len = 8192;
+					}
+
+					/* Out with the old... */
+					gfud->len = 0;
+					g_free(gfud->webdata);
+					gfud->webdata = NULL;
+
+					/* In with the new. */
+					gfud->data_len = content_len;
+					gfud->webdata = g_malloc(gfud->data_len);
+				}
+				else
+					gfud->newline = TRUE;
+
+				return;
+			}
+
+			gfud->newline = FALSE;
+		}
+	}
+	else if (errno != ETIMEDOUT)
+	{
+		gfud->webdata = g_realloc(gfud->webdata, gfud->len + 1);
+		gfud->webdata[gfud->len] = 0;
+
+		gaim_debug_misc("gaim_url_fetch", "Received: '%s'\n", gfud->webdata);
+
+		gaim_input_remove(gfud->inpa);
+		close(sock);
+		gfud->callback(gfud->user_data, gfud->webdata, gfud->len);
+
+		if (gfud->webdata)
+			g_free(gfud->webdata);
+
+		destroy_fetch_url_data(gfud);
+	}
+	else
+	{
+		gaim_input_remove(gfud->inpa);
+		close(sock);
+
+		gfud->callback(gfud->user_data, NULL, 0);
+
+		destroy_fetch_url_data(gfud);
+	}
+}
+
+void
+gaim_url_fetch(const char *url, gboolean full,
+			   const char *user_agent, gboolean http11,
+			   void (*cb)(gpointer, const char *, size_t),
+			   void *user_data)
+{
+	int sock;
+	GaimFetchUrlData *gfud;
+
+	g_return_if_fail(url != NULL);
+	g_return_if_fail(cb  != NULL);
+
+	gfud = g_new0(GaimFetchUrlData, 1);
+
+	gfud->callback   = cb;
+	gfud->user_data  = user_data;
+	gfud->url        = g_strdup(url);
+	gfud->user_agent = (user_agent != NULL ? g_strdup(user_agent) : NULL);
+	gfud->http11     = http11;
+	gfud->full       = full;
+
+	gaim_url_parse(url, &gfud->website.address, &gfud->website.port,
+				   &gfud->website.page);
+
+	if ((sock = gaim_proxy_connect(NULL, gfud->website.address,
+								   gfud->website.port, url_fetched_cb,
+								   gfud)) < 0)
+	{
+		destroy_fetch_url_data(gfud);
+
+		cb(user_data, g_strdup(_("g003: Error opening connection.\n")), 0);
+	}
+}
--- a/src/util.h	Wed Oct 01 02:06:12 2003 +0000
+++ b/src/util.h	Wed Oct 01 03:01:25 2003 +0000
@@ -5,7 +5,7 @@
  * gaim
  *
  * Copyright (C) 2002-2003, Christian Hammond <chipx86@gnupdate.org>
- * 
+ *
  * This program is free software; you can redistribute it and/or modify
  * it under the terms of the GNU General Public License as published by
  * the Free Software Foundation; either version 2 of the License, or
@@ -332,10 +332,11 @@
 char *gaim_get_size_string(size_t size);
 
 /**
- * Finds a HTML tag matching the given name, locating its start
- * and end, and storing its attributes in a GData hash table.
- * The names of the attributes are lower-cased in the hash table,
- * and the name of the tag is case insensitive.
+ * Finds a HTML tag matching the given name.
+ *
+ * This locates an HTML tag's start and end, and stores its attributes
+ * in a GData hash table. The names of the attributes are lower-cased
+ * in the hash table, and the name of the tag is case insensitive.
  *
  * @param needle	the name of the tag
  * @param haystack	the null-delimited string to search in
@@ -344,7 +345,38 @@
  * @param attributes	the attributes, if the tag was found
  * @return TRUE if the tag was found
  */
-gboolean gaim_markup_find_tag(const char *needle, const char *haystack, const char **start, const char **end, GData **attributes);
+gboolean gaim_markup_find_tag(const char *needle, const char *haystack,
+							  const char **start, const char **end,
+							  GData **attributes);
+
+/**
+ * Parses a URL, returning its host, port, and file path.
+ *
+ * The returned data must be freed.
+ *
+ * @param url      The URL to parse.
+ * @param ret_host The returned host.
+ * @param ret_port The returned port.
+ * @param ret_path The returned path.
+ */
+gboolean gaim_url_parse(const char *url, char **ret_host, int *ret_port,
+						char **ret_path);
+
+/**
+ * Fetches the data from a URL, and passes it to a callback function.
+ *
+ * @param url        The URL.
+ * @param full       TRUE if this is the full URL, or FALSE if it's a
+ *                   partial URL.
+ * @param cb         The callback function.
+ * @param data       The user data to pass to the callback function.
+ * @param user_agent The user agent field to use, or NULL.
+ * @param http11     TRUE if HTTP/1.1 should be used to download the file.
+ */
+void gaim_url_fetch(const char *url, gboolean full,
+					const char *user_agent, gboolean http11,
+					void (*cb)(void *, const char *, size_t),
+					void *data);
 
 #ifdef __cplusplus
 }