changeset 54695:3fb37923e567

Initial revision
author Stefan Monnier <monnier@iro.umontreal.ca>
date Sun, 04 Apr 2004 01:21:46 +0000
parents 253149f265f2
children cdf086a7739d
files lisp/url/.cvsignore lisp/url/url-about.el lisp/url/url-auth.el lisp/url/url-cache.el lisp/url/url-cid.el lisp/url/url-cookie.el lisp/url/url-dav.el lisp/url/url-dired.el lisp/url/url-expand.el lisp/url/url-file.el lisp/url/url-ftp.el lisp/url/url-gw.el lisp/url/url-handlers.el lisp/url/url-history.el lisp/url/url-http.el lisp/url/url-https.el lisp/url/url-imap.el lisp/url/url-irc.el lisp/url/url-ldap.el lisp/url/url-mailto.el lisp/url/url-methods.el lisp/url/url-misc.el lisp/url/url-news.el lisp/url/url-nfs.el lisp/url/url-ns.el lisp/url/url-parse.el lisp/url/url-privacy.el lisp/url/url-proxy.el lisp/url/url-util.el lisp/url/url-vars.el lisp/url/url.el lisp/url/vc-dav.el
diffstat 32 files changed, 7513 insertions(+), 0 deletions(-) [+]
line wrap: on
line diff
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/lisp/url/.cvsignore	Sun Apr 04 01:21:46 2004 +0000
@@ -0,0 +1,4 @@
+Makefile
+auto-autoloads.el
+custom-load.el
+url-auto.el
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/lisp/url/url-about.el	Sun Apr 04 01:21:46 2004 +0000
@@ -0,0 +1,100 @@
+;;; url-about.el --- Show internal URLs
+;; Author: $Author: wmperry $
+;; Created: $Date: 2001/11/24 22:30:21 $
+;; Version: $Revision: 1.1 $
+;; Keywords: comm, data, processes, hypermedia
+
+;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
+;;; Copyright (c) 2001 Free Software Foundation, Inc.
+;;;
+;;; This file is part of GNU Emacs.
+;;;
+;;; GNU Emacs is free software; you can redistribute it and/or modify
+;;; it under the terms of the GNU General Public License as published by
+;;; the Free Software Foundation; either version 2, or (at your option)
+;;; any later version.
+;;;
+;;; GNU Emacs is distributed in the hope that it will be useful,
+;;; but WITHOUT ANY WARRANTY; without even the implied warranty of
+;;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+;;; GNU General Public License for more details.
+;;;
+;;; You should have received a copy of the GNU General Public License
+;;; along with GNU Emacs; see the file COPYING.  If not, write to the
+;;; Free Software Foundation, Inc., 59 Temple Place - Suite 330,
+;;; Boston, MA 02111-1307, USA.
+;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
+(eval-when-compile
+  (require 'cl))
+(require 'url-util)
+(require 'url-parse)
+
+(defun url-probe-protocols ()
+  "Returns a list of all potential URL schemes."
+  (or (get 'url-extension-protocols 'probed)
+      (mapc (lambda (s) (url-scheme-get-property s 'name))
+	    (or (get 'url-extension-protocols 'schemes)
+		(let ((schemes '("info" "man" "rlogin" "telnet"
+				 "tn3270" "data" "snews")))
+		  (mapc (lambda (d)
+			  (mapc (lambda (f)
+				  (if (string-match "url-\\(.*\\).el$" f)
+				      (push (match-string 1 f) schemes)))
+				(directory-files d nil "^url-.*\\.el$")))
+			load-path)
+		  (put 'url-extension-protocols 'schemes schemes)
+		  schemes)))))
+
+(defun url-about-protocols (url)
+  (url-probe-protocols)
+  (insert "<html>\n"
+	  " <head>\n"
+	  "  <title>Supported Protocols</title>\n"
+	  " </head>\n"
+	  " <body>\n"
+	  "  <h1>Supported Protocols - URL v" url-version "</h1>\n"
+	  "  <table width='100%' border='1'>\n"
+	  "   <tr>\n"
+	  "    <td>Protocol\n"
+	  "    <td>Properties\n"
+	  "    <td>Description\n"
+	  "   </tr>\n")
+  (mapc (lambda (k)
+	  (if (string= k "proxy")
+	      ;; Ignore the proxy setting... its magic!
+	      nil
+	    (insert "   <tr>\n")
+	    ;; The name of the protocol
+	    (insert "    <td valign=top>" (or (url-scheme-get-property k 'name) k) "\n")
+
+	    ;; Now the properties.  Currently just asynchronous
+	    ;; status, default port number, and proxy status.
+	    (insert "    <td valign=top>"
+		    (if (url-scheme-get-property k 'asynchronous-p) "As" "S")
+		    "ynchronous<br>\n"
+		    (if (url-scheme-get-property k 'default-port)
+			(format "Default Port: %d<br>\n"
+				(url-scheme-get-property k 'default-port)) "")
+		    (if (assoc k url-proxy-services)
+			(format "Proxy: %s<br>\n" (assoc k url-proxy-services)) ""))
+	    ;; Now the description...
+	    (insert "    <td valign=top>"
+		    (or (url-scheme-get-property k 'description) "N/A"))))
+	(sort (let (x) (maphash (lambda (k v) (push k x)) url-scheme-registry) x) 'string-lessp))
+  (insert "  </table>\n"
+	  " </body>\n"
+	  "</html>\n"))
+
+(defun url-about (url)
+  "Show internal URLs."
+  (let* ((item (downcase (url-filename url)))
+	 (func (intern (format "url-about-%s" item))))
+    (if (fboundp func)
+	(progn
+	  (set-buffer (generate-new-buffer " *about-data*"))
+	  (insert "Content-type: text/html\n\n")
+	  (funcall func url)
+	  (current-buffer))
+      (error "URL does not know about `%s'" item))))
+
+(provide 'url-about)
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/lisp/url/url-auth.el	Sun Apr 04 01:21:46 2004 +0000
@@ -0,0 +1,318 @@
+;;; url-auth.el --- Uniform Resource Locator authorization modules
+;; Author: $Author: wmperry $
+;; Created: $Date: 2001/12/05 19:05:51 $
+;; Version: $Revision: 1.4 $
+;; Keywords: comm, data, processes, hypermedia
+
+;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
+;;; Copyright (c) 1993 - 1996 by William M. Perry <wmperry@cs.indiana.edu>
+;;; Copyright (c) 1996 - 1999 Free Software Foundation, Inc.
+;;;
+;;; This file is part of GNU Emacs.
+;;;
+;;; GNU Emacs is free software; you can redistribute it and/or modify
+;;; it under the terms of the GNU General Public License as published by
+;;; the Free Software Foundation; either version 2, or (at your option)
+;;; any later version.
+;;;
+;;; GNU Emacs is distributed in the hope that it will be useful,
+;;; but WITHOUT ANY WARRANTY; without even the implied warranty of
+;;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+;;; GNU General Public License for more details.
+;;;
+;;; You should have received a copy of the GNU General Public License
+;;; along with GNU Emacs; see the file COPYING.  If not, write to the
+;;; Free Software Foundation, Inc., 59 Temple Place - Suite 330,
+;;; Boston, MA 02111-1307, USA.
+;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
+
+(require 'url-vars)
+(require 'url-parse)
+(autoload 'url-warn "url")
+
+(defsubst url-auth-user-prompt (url realm)
+  "String to usefully prompt for a username."
+  (concat "Username [for "
+	  (or realm (url-truncate-url-for-viewing
+		     (url-recreate-url url)
+		     (- (window-width) 10 20)))
+	  "]: "))
+
+;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
+;;; Basic authorization code
+;;; ------------------------
+;;; This implements the BASIC authorization type.  See the online
+;;; documentation at
+;;; http://www.w3.org/hypertext/WWW/AccessAuthorization/Basic.html
+;;; for the complete documentation on this type.
+;;;
+;;; This is very insecure, but it works as a proof-of-concept
+;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
+(defvar url-basic-auth-storage 'url-http-real-basic-auth-storage
+  "Where usernames and passwords are stored.
+
+Must be a symbol pointing to another variable that will actually store
+the information.  The value of this variable is an assoc list of assoc
+lists.  The first assoc list is keyed by the server name.  The cdr of
+this is an assoc list based on the 'directory' specified by the url we
+are looking up.")
+
+(defun url-basic-auth (url &optional prompt overwrite realm args)
+  "Get the username/password for the specified URL.
+If optional argument PROMPT is non-nil, ask for the username/password
+to use for the url and its descendants.  If optional third argument
+OVERWRITE is non-nil, overwrite the old username/password pair if it
+is found in the assoc list.  If REALM is specified, use that as the realm
+instead of the pathname inheritance method."
+  (let* ((href (if (stringp url)
+		   (url-generic-parse-url url)
+		 url))
+	 (server (url-host href))
+	 (port (url-port href))
+	 (path (url-filename href))
+	 user pass byserv retval data)
+    (setq server (format "%s:%d" server port)
+	  path (cond
+		(realm realm)
+		((string-match "/$" path) path)
+		(t (url-basepath path)))
+	  byserv (cdr-safe (assoc server
+				  (symbol-value url-basic-auth-storage))))
+    (cond
+     ((and prompt (not byserv))
+      (setq user (read-string (url-auth-user-prompt url realm)
+			      (user-real-login-name))
+	    pass (funcall url-passwd-entry-func "Password: "))
+      (set url-basic-auth-storage
+	   (cons (list server
+		       (cons path
+			     (setq retval
+				   (base64-encode-string
+				    (format "%s:%s" user pass)))))
+		 (symbol-value url-basic-auth-storage))))
+     (byserv
+      (setq retval (cdr-safe (assoc path byserv)))
+      (if (and (not retval)
+	       (string-match "/" path))
+ 	  (while (and byserv (not retval))
+	    (setq data (car (car byserv)))
+	    (if (or (not (string-match "/" data)) ; Its a realm - take it!
+		    (and
+		     (>= (length path) (length data))
+		     (string= data (substring path 0 (length data)))))
+		(setq retval (cdr (car byserv))))
+	    (setq byserv (cdr byserv))))
+      (if (or (and (not retval) prompt) overwrite)
+	  (progn
+	    (setq user (read-string (url-auth-user-prompt url realm)
+				    (user-real-login-name))
+		  pass (funcall url-passwd-entry-func "Password: ")
+		  retval (base64-encode-string (format "%s:%s" user pass))
+		  byserv (assoc server (symbol-value url-basic-auth-storage)))
+	    (setcdr byserv
+		    (cons (cons path retval) (cdr byserv))))))
+     (t (setq retval nil)))
+    (if retval (setq retval (concat "Basic " retval)))
+    retval))
+
+;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
+;;; Digest authorization code
+;;; ------------------------
+;;; This implements the DIGEST authorization type.  See the internet draft
+;;; ftp://ds.internic.net/internet-drafts/draft-ietf-http-digest-aa-01.txt
+;;; for the complete documentation on this type.
+;;;
+;;; This is very secure
+;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
+(defvar url-digest-auth-storage nil
+  "Where usernames and passwords are stored.  Its value is an assoc list of
+assoc lists.  The first assoc list is keyed by the server name.  The cdr of
+this is an assoc list based on the 'directory' specified by the url we are
+looking up.")
+
+(defun url-digest-auth-create-key (username password realm method uri)
+  "Create a key for digest authentication method"
+  (let* ((info (if (stringp uri)
+		   (url-generic-parse-url uri)
+		 uri))
+	 (a1 (md5 (concat username ":" realm ":" password)))
+	 (a2 (md5 (concat method ":" (url-filename info)))))
+    (list a1 a2)))
+
+(defun url-digest-auth (url &optional prompt overwrite realm args)
+  "Get the username/password for the specified URL.
+If optional argument PROMPT is non-nil, ask for the username/password
+to use for the url and its descendants.  If optional third argument
+OVERWRITE is non-nil, overwrite the old username/password pair if it
+is found in the assoc list.  If REALM is specified, use that as the realm
+instead of hostname:portnum."
+  (if args
+      (let* ((href (if (stringp url)
+		       (url-generic-parse-url url)
+		     url))
+	     (server (url-host href))
+	     (port (url-port href))
+	     (path (url-filename href))
+	     user pass byserv retval data)
+	(setq path (cond
+		    (realm realm)
+		    ((string-match "/$" path) path)
+		    (t (url-basepath path)))
+	      server (format "%s:%d" server port)
+	      byserv (cdr-safe (assoc server url-digest-auth-storage)))
+	(cond
+	 ((and prompt (not byserv))
+	  (setq user (read-string (url-auth-user-prompt url realm)
+				  (user-real-login-name))
+		pass (funcall url-passwd-entry-func "Password: ")
+		url-digest-auth-storage
+		(cons (list server
+			    (cons path
+				  (setq retval
+					(cons user
+					      (url-digest-auth-create-key
+					       user pass realm
+					       (or url-request-method "GET")
+					       url)))))
+		      url-digest-auth-storage)))
+	 (byserv
+	  (setq retval (cdr-safe (assoc path byserv)))
+	  (if (and (not retval)		; no exact match, check directories
+		   (string-match "/" path)) ; not looking for a realm
+	      (while (and byserv (not retval))
+		(setq data (car (car byserv)))
+		(if (or (not (string-match "/" data))
+			(and
+			 (>= (length path) (length data))
+			 (string= data (substring path 0 (length data)))))
+		    (setq retval (cdr (car byserv))))
+		(setq byserv (cdr byserv))))
+	  (if (or (and (not retval) prompt) overwrite)
+	      (progn
+		(setq user (read-string (url-auth-user-prompt url realm)
+					(user-real-login-name))
+		      pass (funcall url-passwd-entry-func "Password: ")
+		      retval (setq retval
+				   (cons user
+					 (url-digest-auth-create-key
+					  user pass realm
+					  (or url-request-method "GET")
+					  url)))
+		      byserv (assoc server url-digest-auth-storage))
+		(setcdr byserv
+			(cons (cons path retval) (cdr byserv))))))
+	 (t (setq retval nil)))
+	(if retval
+	    (let ((nonce (or (cdr-safe (assoc "nonce" args)) "nonegiven"))
+		  (opaque (or (cdr-safe (assoc "opaque" args)) "nonegiven")))
+	      (format
+	       (concat "Digest username=\"%s\", realm=\"%s\","
+		       "nonce=\"%s\", uri=\"%s\","
+		       "response=\"%s\", opaque=\"%s\"")
+	       (nth 0 retval) realm nonce (url-filename href)
+	       (md5 (concat (nth 1 retval) ":" nonce ":"
+			    (nth 2 retval))) opaque))))))
+
+(defvar url-registered-auth-schemes nil
+  "A list of the registered authorization schemes and various and sundry
+information associated with them.")
+
+;;;###autoload
+(defun url-get-authentication (url realm type prompt &optional args)
+  "Return an authorization string suitable for use in the WWW-Authenticate
+header in an HTTP/1.0 request.
+
+URL    is the url you are requesting authorization to.  This can be either a
+       string representing the URL, or the parsed representation returned by
+       `url-generic-parse-url'
+REALM  is the realm at a specific site we are looking for.  This should be a
+       string specifying the exact realm, or nil or the symbol 'any' to
+       specify that the filename portion of the URL should be used as the
+       realm
+TYPE   is the type of authentication to be returned.  This is either a string
+       representing the type (basic, digest, etc), or nil or the symbol 'any'
+       to specify that any authentication is acceptable.  If requesting 'any'
+       the strongest matching authentication will be returned.  If this is
+       wrong, its no big deal, the error from the server will specify exactly
+       what type of auth to use
+PROMPT is boolean - specifies whether to ask the user for a username/password
+       if one cannot be found in the cache"
+  (if (not realm)
+      (setq realm (cdr-safe (assoc "realm" args))))
+  (if (stringp url)
+      (setq url (url-generic-parse-url url)))
+  (if (or (null type) (eq type 'any))
+      ;; Whooo doogies!
+      ;; Go through and get _all_ the authorization strings that could apply
+      ;; to this URL, store them along with the 'rating' we have in the list
+      ;; of schemes, then sort them so that the 'best' is at the front of the
+      ;; list, then get the car, then get the cdr.
+      ;; Zooom zooom zoooooom
+      (cdr-safe
+       (car-safe
+	(sort
+	 (mapcar
+	  (function
+	   (lambda (scheme)
+	     (if (fboundp (car (cdr scheme)))
+		 (cons (cdr (cdr scheme))
+		       (funcall (car (cdr scheme)) url nil nil realm))
+	       (cons 0 nil))))
+	  url-registered-auth-schemes)
+	 (function
+	  (lambda (x y)
+	    (cond
+	     ((null (cdr x)) nil)
+	     ((and (cdr x) (null (cdr y))) t)
+	     ((and (cdr x) (cdr y))
+	      (>= (car x) (car y)))
+	     (t nil)))))))
+    (if (symbolp type) (setq type (symbol-name type)))
+    (let* ((scheme (car-safe
+		    (cdr-safe (assoc (downcase type)
+				     url-registered-auth-schemes)))))
+      (if (and scheme (fboundp scheme))
+	  (funcall scheme url prompt
+		   (and prompt
+			(funcall scheme url nil nil realm args))
+		   realm args)))))
+
+;;;###autoload
+(defun url-register-auth-scheme (type &optional function rating)
+  "Register an HTTP authentication method.
+
+TYPE     is a string or symbol specifying the name of the method.   This
+         should be the same thing you expect to get returned in an Authenticate
+         header in HTTP/1.0 - it will be downcased.
+FUNCTION is the function to call to get the authorization information.  This
+         defaults to `url-?-auth', where ? is TYPE
+RATING   a rating between 1 and 10 of the strength of the authentication.
+         This is used when asking for the best authentication for a specific
+         URL.  The item with the highest rating is returned."
+  (let* ((type (cond
+		((stringp type) (downcase type))
+		((symbolp type) (downcase (symbol-name type)))
+		(t (error "Bad call to `url-register-auth-scheme'"))))
+	 (function (or function (intern (concat "url-" type "-auth"))))
+	 (rating (cond
+		  ((null rating) 2)
+		  ((stringp rating) (string-to-int rating))
+		  (t rating)))
+	 (node (assoc type url-registered-auth-schemes)))
+    (if (not (fboundp function))
+	(url-warn 'security
+		  (format (eval-when-compile
+			    "Tried to register `%s' as an auth scheme"
+			    ", but it is not a function!") function)))
+
+    (if node
+	(setcdr node (cons function rating))
+      (setq url-registered-auth-schemes
+	    (cons (cons type (cons function rating))
+		  url-registered-auth-schemes)))))
+
+(defun url-auth-registered (scheme)
+  ;; Return non-nil iff SCHEME is registered as an auth type
+  (assoc scheme url-registered-auth-schemes))
+
+(provide 'url-auth)
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/lisp/url/url-cache.el	Sun Apr 04 01:21:46 2004 +0000
@@ -0,0 +1,203 @@
+;;; url-cache.el --- Uniform Resource Locator retrieval tool
+;; Author: $Author: fx $
+;; Created: $Date: 2002/01/22 17:53:45 $
+;; Version: $Revision: 1.4 $
+;; Keywords: comm, data, processes, hypermedia
+
+;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
+;;; Copyright (c) 1993 - 1996 by William M. Perry <wmperry@cs.indiana.edu>
+;;; Copyright (c) 1996 - 1999 Free Software Foundation, Inc.
+;;;
+;;; This file is part of GNU Emacs.
+;;;
+;;; GNU Emacs is free software; you can redistribute it and/or modify
+;;; it under the terms of the GNU General Public License as published by
+;;; the Free Software Foundation; either version 2, or (at your option)
+;;; any later version.
+;;;
+;;; GNU Emacs is distributed in the hope that it will be useful,
+;;; but WITHOUT ANY WARRANTY; without even the implied warranty of
+;;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+;;; GNU General Public License for more details.
+;;;
+;;; You should have received a copy of the GNU General Public License
+;;; along with GNU Emacs; see the file COPYING.  If not, write to the
+;;; Free Software Foundation, Inc., 59 Temple Place - Suite 330,
+;;; Boston, MA 02111-1307, USA.
+;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
+(require 'url-parse)
+
+(defcustom url-cache-directory
+  (expand-file-name "cache" url-configuration-directory)
+  "*The directory where cache files should be stored."
+  :type 'directory
+  :group 'url-file)
+
+;; Cache manager
+(defun url-cache-file-writable-p (file)
+  "Follows the documentation of `file-writable-p', unlike `file-writable-p'."
+  (and (file-writable-p file)
+       (if (file-exists-p file)
+           (not (file-directory-p file))
+         (file-directory-p (file-name-directory file)))))
+                
+(defun url-cache-prepare (file)
+  "Makes it possible to cache data in FILE.
+Creates any necessary parent directories, deleting any non-directory files
+that would stop this.  Returns nil if parent directories can not be
+created.  If FILE already exists as a non-directory, it changes
+permissions of FILE or deletes FILE to make it possible to write a new
+version of FILE.  Returns nil if this can not be done.  Returns nil if
+FILE already exists as a directory.  Otherwise, returns t, indicating that
+FILE can be created or overwritten."
+  (cond
+   ((url-cache-file-writable-p file)
+    t)
+   ((file-directory-p file)
+    nil)
+   (t
+    (condition-case ()
+	(or (make-directory (file-name-directory file) t) t)
+      (error nil)))))
+
+;;;###autoload
+(defun url-store-in-cache (&optional buff)
+  "Store buffer BUFF in the cache."
+  (if (not (and buff (get-buffer buff)))
+      nil
+    (save-excursion
+      (and buff (set-buffer buff))
+      (let* ((fname (url-cache-create-filename (url-view-url t))))
+	(if (url-cache-prepare fname)
+	    (let ((coding-system-for-write 'binary))
+	      (write-region (point-min) (point-max) fname nil 5)))))))
+	
+;;;###autoload
+(defun url-is-cached (url)
+  "Return non-nil if the URL is cached."
+  (let* ((fname (url-cache-create-filename url))
+	 (attribs (file-attributes fname)))
+    (and fname				; got a filename
+	 (file-exists-p fname)		; file exists
+	 (not (eq (nth 0 attribs) t))	; Its not a directory
+	 (nth 5 attribs))))		; Can get last mod-time
+
+(defun url-cache-create-filename-human-readable (url)
+  "Return a filename in the local cache for URL"
+  (if url
+      (let* ((url (if (vectorp url) (url-recreate-url url) url))
+	     (urlobj (url-generic-parse-url url))
+	     (protocol (url-type urlobj))
+	     (hostname (url-host urlobj))
+	     (host-components
+	      (cons
+	       (user-real-login-name)
+	       (cons (or protocol "file")
+		     (reverse (split-string (or hostname "localhost")
+					    (eval-when-compile
+					      (regexp-quote ".")))))))
+	     (fname    (url-filename urlobj)))
+	(if (and fname (/= (length fname) 0) (= (aref fname 0) ?/))
+	    (setq fname (substring fname 1 nil)))
+	(if fname
+	    (let ((slash nil))
+	      (setq fname
+		    (mapconcat
+		     (function
+		      (lambda (x)
+			(cond
+			 ((and (= ?/ x) slash)
+			  (setq slash nil)
+			  "%2F")
+			 ((= ?/ x)
+			  (setq slash t)
+			  "/")
+			 (t
+			  (setq slash nil)
+			  (char-to-string x))))) fname ""))))
+
+	(setq fname (and fname
+			 (mapconcat
+			  (function (lambda (x)
+				      (if (= x ?~) "" (char-to-string x))))
+			  fname ""))
+	      fname (cond
+		     ((null fname) nil)
+		     ((or (string= "" fname) (string= "/" fname))
+		      url-directory-index-file)
+		     ((= (string-to-char fname) ?/)
+		      (if (string= (substring fname -1 nil) "/")
+			  (concat fname url-directory-index-file)
+			(substring fname 1 nil)))
+		     (t
+		      (if (string= (substring fname -1 nil) "/")
+			  (concat fname url-directory-index-file)
+			fname))))
+	(and fname
+	     (expand-file-name fname
+			       (expand-file-name
+				(mapconcat 'identity host-components "/")
+				url-cache-directory))))))
+
+(defun url-cache-create-filename-using-md5 (url)
+  "Create a cached filename using MD5.
+ Very fast if you are in XEmacs, suitably fast otherwise."
+  (require 'md5)
+  (if url
+      (let* ((url (if (vectorp url) (url-recreate-url url) url))
+	     (checksum (md5 url))
+	     (urlobj (url-generic-parse-url url))
+	     (protocol (url-type urlobj))
+	     (hostname (url-host urlobj))
+	     (host-components
+	      (cons
+	       (user-real-login-name)
+	       (cons (or protocol "file")
+		     (nreverse
+		      (delq nil
+			    (split-string (or hostname "localhost")
+					  (eval-when-compile
+					    (regexp-quote "."))))))))
+	     (fname    (url-filename urlobj)))
+	(and fname
+	     (expand-file-name checksum
+			       (expand-file-name
+				(mapconcat 'identity host-components "/")
+				url-cache-directory))))))
+
+(defcustom url-cache-creation-function 'url-cache-create-filename-using-md5
+  "*What function to use to create a cached filename."
+  :type '(choice (const :tag "MD5 of filename (low collision rate)"
+			:value url-cache-create-filename-using-md5)
+		 (const :tag "Human readable filenames (higher collision rate)"
+			:value url-cache-create-filename-human-readable)
+		 (function :tag "Other"))
+  :group 'url-cache)
+
+(defun url-cache-create-filename (url)
+  (funcall url-cache-creation-function url))
+
+;;;###autoload
+(defun url-cache-extract (fnam)
+  "Extract FNAM from the local disk cache"
+  (erase-buffer)
+  (insert-file-contents-literally fnam))
+
+;;;###autoload
+(defun url-cache-expired (url mod)
+  "Return t iff a cached file has expired."
+  (let* ((urlobj (if (vectorp url) url (url-generic-parse-url url)))
+	 (type (url-type urlobj)))
+    (cond
+     (url-standalone-mode
+      (not (file-exists-p (url-cache-create-filename url))))
+     ((string= type "http")
+      t)
+     ((member type '("file" "ftp"))
+      (if (or (equal mod '(0 0)) (not mod))
+	  t
+	(or (> (nth 0 mod) (nth 0 (current-time)))
+	    (> (nth 1 mod) (nth 1 (current-time))))))
+     (t nil))))
+
+(provide 'url-cache)
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/lisp/url/url-cid.el	Sun Apr 04 01:21:46 2004 +0000
@@ -0,0 +1,65 @@
+;;; url-cid.el --- Content-ID URL loader
+;; Author: $Author: fx $
+;; Created: $Date: 2001/05/05 16:35:58 $
+;; Version: $Revision: 1.3 $
+;; Keywords: comm, data, processes
+
+;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
+;;; Copyright (c) 1998 - 1999 Free Software Foundation, Inc.
+;;;
+;;; This file is part of GNU Emacs.
+;;;
+;;; GNU Emacs is free software; you can redistribute it and/or modify
+;;; it under the terms of the GNU General Public License as published by
+;;; the Free Software Foundation; either version 2, or (at your option)
+;;; any later version.
+;;;
+;;; GNU Emacs is distributed in the hope that it will be useful,
+;;; but WITHOUT ANY WARRANTY; without even the implied warranty of
+;;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+;;; GNU General Public License for more details.
+;;;
+;;; You should have received a copy of the GNU General Public License
+;;; along with GNU Emacs; see the file COPYING.  If not, write to the
+;;; Free Software Foundation, Inc., 59 Temple Place - Suite 330,
+;;; Boston, MA 02111-1307, USA.
+;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
+
+(require 'url-vars)
+(require 'url-parse)
+
+(require 'mm-decode)
+
+(defun url-cid-gnus (cid)
+  (let ((content-type nil)
+ 	(encoding nil)
+ 	(part nil)
+ 	(data nil))
+    (setq part (mm-get-content-id cid))
+    (if (not part)
+	(message "Unknown CID encountered: %s" cid)
+      (setq data (save-excursion
+		   (set-buffer (mm-handle-buffer part))
+		   (buffer-string))
+	    content-type (mm-handle-type part)
+	    encoding (symbol-name (mm-handle-encoding part)))
+      (if (= 0 (length content-type)) (setq content-type "text/plain"))
+      (if (= 0 (length encoding)) (setq encoding "8bit"))
+      (if (listp content-type)
+	  (setq content-type (car content-type)))
+      (insert (format "Content-type: %d\r\n"  (length data))
+	      "Content-type: " content-type "\r\n"
+	      "Content-transfer-encoding: " encoding "\r\n"
+	      "\r\n"
+	      (or data "")))))
+
+;;;###autoload
+(defun url-cid (url)
+  (cond
+   ((fboundp 'mm-get-content-id)
+    ;; Using Pterodactyl Gnus or later
+    (save-excursion
+      (set-buffer (generate-new-buffer " *url-cid*"))
+      (url-cid-gnus (url-filename url))))
+   (t
+    (message "Unable to handle CID URL: %s" url))))
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/lisp/url/url-cookie.el	Sun Apr 04 01:21:46 2004 +0000
@@ -0,0 +1,468 @@
+;;; url-cookie.el --- Netscape Cookie support
+;; Author: $Author: wmperry $
+;; Created: $Date: 2002/10/29 14:44:59 $
+;; Version: $Revision: 1.7 $
+;; Keywords: comm, data, processes, hypermedia
+
+;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
+;;; Copyright (c) 1996 by William M. Perry <wmperry@cs.indiana.edu>
+;;; Copyright (c) 1996 - 1999 Free Software Foundation, Inc.
+;;;
+;;; This file is part of GNU Emacs.
+;;;
+;;; GNU Emacs is free software; you can redistribute it and/or modify
+;;; it under the terms of the GNU General Public License as published by
+;;; the Free Software Foundation; either version 2, or (at your option)
+;;; any later version.
+;;;
+;;; GNU Emacs is distributed in the hope that it will be useful,
+;;; but WITHOUT ANY WARRANTY; without even the implied warranty of
+;;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+;;; GNU General Public License for more details.
+;;;
+;;; You should have received a copy of the GNU General Public License
+;;; along with GNU Emacs; see the file COPYING.  If not, write to the
+;;; Free Software Foundation, Inc., 59 Temple Place - Suite 330,
+;;; Boston, MA 02111-1307, USA.
+;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
+
+(require 'timezone)
+(require 'url-util)
+(require 'url-parse)
+(eval-when-compile (require 'cl))
+
+;; See http://home.netscape.com/newsref/std/cookie_spec.html for the
+;; 'open standard' defining this crap.
+;;
+;; A cookie is stored internally as a vector of 7 slots
+;; [ 'cookie name value expires path domain secure ]
+
+(defsubst url-cookie-name    (cookie) (aref cookie 1))
+(defsubst url-cookie-value   (cookie) (aref cookie 2))
+(defsubst url-cookie-expires (cookie) (aref cookie 3))
+(defsubst url-cookie-path    (cookie) (aref cookie 4))
+(defsubst url-cookie-domain  (cookie) (aref cookie 5))
+(defsubst url-cookie-secure  (cookie) (aref cookie 6))
+
+(defsubst url-cookie-set-name    (cookie val) (aset cookie 1 val))
+(defsubst url-cookie-set-value   (cookie val) (aset cookie 2 val))
+(defsubst url-cookie-set-expires (cookie val) (aset cookie 3 val))
+(defsubst url-cookie-set-path    (cookie val) (aset cookie 4 val))
+(defsubst url-cookie-set-domain  (cookie val) (aset cookie 5 val))
+(defsubst url-cookie-set-secure  (cookie val) (aset cookie 6 val))
+(defsubst url-cookie-retrieve-arg (key args) (nth 1 (memq key args)))
+
+(defsubst url-cookie-create (&rest args)
+  (let ((retval (make-vector 7 nil)))
+    (aset retval 0 'cookie)
+    (url-cookie-set-name retval (url-cookie-retrieve-arg :name args))
+    (url-cookie-set-value retval (url-cookie-retrieve-arg :value args))
+    (url-cookie-set-expires retval (url-cookie-retrieve-arg :expires args))
+    (url-cookie-set-path retval (url-cookie-retrieve-arg :path args))
+    (url-cookie-set-domain retval (url-cookie-retrieve-arg :domain args))
+    (url-cookie-set-secure retval (url-cookie-retrieve-arg :secure args))
+    retval))
+
+(defun url-cookie-p (obj)
+  (and (vectorp obj) (= (length obj) 7) (eq (aref obj 0) 'cookie)))
+
+(defgroup url-cookie nil
+  "URL cookies"
+  :prefix "url-"
+  :prefix "url-cookie-"
+  :group 'url)
+
+(defvar url-cookie-storage nil         "Where cookies are stored.")
+(defvar url-cookie-secure-storage nil  "Where secure cookies are stored.")
+(defcustom url-cookie-file nil            "*Where cookies are stored on disk."
+  :type '(choice (const :tag "Default" :value nil) file)
+  :group 'url-file
+  :group 'url-cookie)
+
+(defcustom url-cookie-confirmation nil
+  "*If non-nil, confirmation by the user is required to accept HTTP cookies."
+  :type 'boolean
+  :group 'url-cookie)
+
+(defcustom url-cookie-multiple-line nil
+  "*If nil, HTTP requests put all cookies for the server on one line.
+Some web servers, such as http://www.hotmail.com/, only accept cookies
+when they are on one line.  This is broken behaviour, but just try
+telling Microsoft that.")
+
+(defvar url-cookies-changed-since-last-save nil
+  "Whether the cookies list has changed since the last save operation.")
+
+;;;###autoload
+(defun url-cookie-parse-file (&optional fname)
+  (setq fname (or fname url-cookie-file))
+  (condition-case ()
+      (load fname nil t)
+    (error (message "Could not load cookie file %s" fname))))
+
+(defun url-cookie-clean-up (&optional secure)
+  (let* (
+	 (var (if secure 'url-cookie-secure-storage 'url-cookie-storage))
+	 (val (symbol-value var))
+	 (cur nil)
+	 (new nil)
+	 (cookies nil)
+	 (cur-cookie nil)
+	 (new-cookies nil)
+	 )
+    (while val
+      (setq cur (car val)
+	    val (cdr val)
+	    new-cookies nil
+	    cookies (cdr cur))
+      (while cookies
+	(setq cur-cookie (car cookies)
+	      cookies (cdr cookies))
+	(if (or (not (url-cookie-p cur-cookie))
+		(url-cookie-expired-p cur-cookie)
+		(null (url-cookie-expires cur-cookie)))
+	    nil
+	  (setq new-cookies (cons cur-cookie new-cookies))))
+      (if (not new-cookies)
+	  nil
+	(setcdr cur new-cookies)
+	(setq new (cons cur new))))
+    (set var new)))
+
+;;;###autoload
+(defun url-cookie-write-file (&optional fname)
+  (setq fname (or fname url-cookie-file))
+  (cond
+   ((not url-cookies-changed-since-last-save) nil)
+   ((not (file-writable-p fname))
+    (message "Cookies file %s (see variable `url-cookie-file') is unwritable." fname))
+   (t
+    (url-cookie-clean-up)
+    (url-cookie-clean-up t)
+    (save-excursion
+      (set-buffer (get-buffer-create " *cookies*"))
+      (erase-buffer)
+      (fundamental-mode)
+      (insert ";; Emacs-W3 HTTP cookies file\n"
+	      ";; Automatically generated file!!! DO NOT EDIT!!!\n\n"
+	      "(setq url-cookie-storage\n '")
+      (pp url-cookie-storage (current-buffer))
+      (insert ")\n(setq url-cookie-secure-storage\n '")
+      (pp url-cookie-secure-storage (current-buffer))
+      (insert ")\n")
+      (write-file fname)
+      (kill-buffer (current-buffer))))))
+
+(defun url-cookie-store (name value &optional expires domain path secure)
+  "Stores a netscape-style cookie"
+  (let* ((storage (if secure url-cookie-secure-storage url-cookie-storage))
+	 (tmp storage)
+	 (cur nil)
+	 (found-domain nil))
+
+    ;; First, look for a matching domain
+    (setq found-domain (assoc domain storage))
+
+    (if found-domain
+	;; Need to either stick the new cookie in existing domain storage
+	;; or possibly replace an existing cookie if the names match.
+	(progn
+	  (setq storage (cdr found-domain)
+		tmp nil)
+	  (while storage
+	    (setq cur (car storage)
+		  storage (cdr storage))
+	    (if (and (equal path (url-cookie-path cur))
+		     (equal name (url-cookie-name cur)))
+		(progn
+		  (url-cookie-set-expires cur expires)
+		  (url-cookie-set-value cur value)
+		  (setq tmp t))))
+	  (if (not tmp)
+	      ;; New cookie
+	      (setcdr found-domain (cons
+				    (url-cookie-create :name name
+						       :value value
+						       :expires expires
+						       :domain domain
+						       :path path
+						       :secure secure)
+				    (cdr found-domain)))))
+      ;; Need to add a new top-level domain
+      (setq tmp (url-cookie-create :name name
+				   :value value
+				   :expires expires
+				   :domain domain
+				   :path path
+				   :secure secure))
+      (cond
+       (storage
+	(setcdr storage (cons (list domain tmp) (cdr storage))))
+       (secure
+	(setq url-cookie-secure-storage (list (list domain tmp))))
+       (t
+	(setq url-cookie-storage (list (list domain tmp))))))))
+
+(defun url-cookie-expired-p (cookie)
+  (let* (
+	 (exp (url-cookie-expires cookie))
+	 (cur-date (and exp (timezone-parse-date (current-time-string))))
+	 (exp-date (and exp (timezone-parse-date exp)))
+	 (cur-greg (and cur-date (timezone-absolute-from-gregorian
+				  (string-to-int (aref cur-date 1))
+				  (string-to-int (aref cur-date 2))
+				  (string-to-int (aref cur-date 0)))))
+	 (exp-greg (and exp (timezone-absolute-from-gregorian
+			     (string-to-int (aref exp-date 1))
+			     (string-to-int (aref exp-date 2))
+			     (string-to-int (aref exp-date 0)))))
+	 (diff-in-days (and exp (- cur-greg exp-greg)))
+	 )
+    (cond
+     ((not exp)	nil)			; No expiry == expires at browser quit
+     ((< diff-in-days 0) nil)		; Expires sometime after today
+     ((> diff-in-days 0) t)		; Expired before today
+     (t					; Expires sometime today, check times
+      (let* ((cur-time (timezone-parse-time (aref cur-date 3)))
+	     (exp-time (timezone-parse-time (aref exp-date 3)))
+	     (cur-norm (+ (* 360 (string-to-int (aref cur-time 2)))
+			  (*  60 (string-to-int (aref cur-time 1)))
+			  (*   1 (string-to-int (aref cur-time 0)))))
+	     (exp-norm (+ (* 360 (string-to-int (aref exp-time 2)))
+			  (*  60 (string-to-int (aref exp-time 1)))
+			  (*   1 (string-to-int (aref exp-time 0))))))
+	(> (- cur-norm exp-norm) 1))))))
+
+;;;###autoload
+(defun url-cookie-retrieve (host path &optional secure)
+  "Retrieves all the netscape-style cookies for a specified HOST and PATH"
+  (let ((storage (if secure
+		     (append url-cookie-secure-storage url-cookie-storage)
+		   url-cookie-storage))
+	(case-fold-search t)
+	(cookies nil)
+	(cur nil)
+	(retval nil)
+	(path-regexp nil))
+    (while storage
+      (setq cur (car storage)
+	    storage (cdr storage)
+	    cookies (cdr cur))
+      (if (and (car cur)
+	       (string-match (concat "^.*" (regexp-quote (car cur)) "$") host))
+	  ;; The domains match - a possible hit!
+	  (while cookies
+	    (setq cur (car cookies)
+		  cookies (cdr cookies)
+		  path-regexp (concat "^" (regexp-quote
+					   (url-cookie-path cur))))
+	    (if (and (string-match path-regexp path)
+		     (not (url-cookie-expired-p cur)))
+		(setq retval (cons cur retval))))))
+    retval))
+
+;;;###autolaod
+(defun url-cookie-generate-header-lines (host path secure)
+  (let* ((cookies (url-cookie-retrieve host path secure))
+	(retval nil)
+	(cur nil)
+	(chunk nil))
+    ;; Have to sort this for sending most specific cookies first
+    (setq cookies (and cookies
+		       (sort cookies
+			     (function
+			      (lambda (x y)
+				(> (length (url-cookie-path x))
+				   (length (url-cookie-path y))))))))
+    (while cookies
+      (setq cur (car cookies)
+	    cookies (cdr cookies)
+	    chunk (format "%s=%s" (url-cookie-name cur) (url-cookie-value cur))
+	    retval (if (and url-cookie-multiple-line
+			    (< 80 (+ (length retval) (length chunk) 4)))
+		       (concat retval "\r\nCookie: " chunk)
+		     (if retval
+			 (concat retval "; " chunk)
+		       (concat "Cookie: " chunk)))))
+    (if retval
+	(concat retval "\r\n")
+      "")))
+
+(defvar url-cookie-two-dot-domains
+  (concat "\\.\\("
+   (mapconcat 'identity (list "com" "edu" "net" "org" "gov" "mil" "int")
+	      "\\|")
+   "\\)$")
+  "A regular expression of top-level domains that only require two matching
+'.'s in the domain name in order to set a cookie.")
+
+(defcustom url-cookie-trusted-urls nil
+  "*A list of regular expressions matching URLs to always accept cookies from."
+  :type '(repeat regexp)
+  :group 'url-cookie)
+
+(defcustom url-cookie-untrusted-urls nil
+  "*A list of regular expressions matching URLs to never accept cookies from."
+  :type '(repeat regexp)
+  :group 'url-cookie)
+
+(defun url-cookie-host-can-set-p (host domain)
+  (let ((numdots 0)
+	(tmp domain)
+	(last nil)
+	(case-fold-search t)
+	(mindots 3))
+    (while (setq last (string-match "\\." domain last))
+      (setq numdots (1+ numdots)
+	    last (1+ last)))
+    (if (string-match url-cookie-two-dot-domains domain)
+	(setq mindots 2))
+    (cond
+     ((string= host domain)		; Apparently netscape lets you do this
+      t)
+     ((>= numdots mindots)		; We have enough dots in domain name
+      ;; Need to check and make sure the host is actually _in_ the
+      ;; domain it wants to set a cookie for though.
+      (string-match (concat (regexp-quote domain) "$") host))
+     (t
+      nil))))
+
+;;;###autoload
+(defun url-cookie-handle-set-cookie (str)
+  (setq url-cookies-changed-since-last-save t)
+  (let* ((args (url-parse-args str t))
+	 (case-fold-search t)
+	 (secure (and (assoc-ignore-case "secure" args) t))
+	 (domain (or (cdr-safe (assoc-ignore-case "domain" args))
+		     (url-host url-current-object)))
+	 (current-url (url-view-url t))
+	 (trusted url-cookie-trusted-urls)
+	 (untrusted url-cookie-untrusted-urls)
+	 (expires (cdr-safe (assoc-ignore-case "expires" args)))
+	 (path (or (cdr-safe (assoc-ignore-case "path" args))
+		   (file-name-directory
+		    (url-filename url-current-object))))
+	 (rest nil))
+    (while args
+      (if (not (member (downcase (car (car args)))
+		       '("secure" "domain" "expires" "path")))
+	  (setq rest (cons (car args) rest)))
+      (setq args (cdr args)))
+
+    ;; Sometimes we get dates that the timezone package cannot handle very
+    ;; gracefully - take care of this here, instead of in url-cookie-expired-p
+    ;; to speed things up.
+    (if (and expires
+	     (string-match
+	      (concat "^[^,]+, +\\(..\\)-\\(...\\)-\\(..\\) +"
+		      "\\(..:..:..\\) +\\[*\\([^\]]+\\)\\]*$")
+	      expires))
+	(setq expires (concat (match-string 1 expires) " "
+			      (match-string 2 expires) " "
+			      (match-string 3 expires) " "
+			      (match-string 4 expires) " ["
+			      (match-string 5 expires) "]")))
+
+    ;; This one is for older Emacs/XEmacs variants that don't
+    ;; understand this format without tenths of a second in it.
+    ;; Wednesday, 30-Dec-2037 16:00:00 GMT
+    ;;       - vs -
+    ;; Wednesday, 30-Dec-2037 16:00:00.00 GMT
+    (if (and expires
+	     (string-match
+	      "\\([0-9]+\\)-\\([A-Za-z]+\\)-\\([0-9]+\\)[ \t]+\\([0-9]+:[0-9]+:[0-9]+\\)\\(\\.[0-9]+\\)*[ \t]+\\([-+a-zA-Z0-9]+\\)"
+	      expires))
+	(setq expires (concat (match-string 1 expires) "-"	; day
+			      (match-string 2 expires) "-"	; month
+			      (match-string 3 expires) " "	; year
+			      (match-string 4 expires) ".00 " ; hour:minutes:seconds
+			      (match-string 6 expires)))) ":" ; timezone
+    
+    (while (consp trusted)
+      (if (string-match (car trusted) current-url)
+	  (setq trusted (- (match-end 0) (match-beginning 0)))
+	(pop trusted)))
+    (while (consp untrusted)
+      (if (string-match (car untrusted) current-url)
+	  (setq untrusted (- (match-end 0) (match-beginning 0)))
+	(pop untrusted)))
+    (if (and trusted untrusted)
+	;; Choose the more specific match
+	(if (> trusted untrusted)
+	    (setq untrusted nil)
+	  (setq trusted nil)))
+    (cond
+     (untrusted
+      ;; The site was explicity marked as untrusted by the user
+      nil)
+     ((or (eq url-privacy-level 'paranoid)
+	  (and (listp url-privacy-level) (memq 'cookies url-privacy-level)))
+      ;; user never wants cookies
+      nil)
+     ((and url-cookie-confirmation
+	   (not trusted)
+	   (save-window-excursion
+	     (with-output-to-temp-buffer "*Cookie Warning*"
+	       (mapcar
+		(function
+		 (lambda (x)
+		   (princ (format "%s - %s" (car x) (cdr x))))) rest))
+	     (prog1
+		 (not (funcall url-confirmation-func
+			       (format "Allow %s to set these cookies? "
+				       (url-host url-current-object))))
+	       (if (get-buffer "*Cookie Warning*")
+		   (kill-buffer "*Cookie Warning*")))))
+      ;; user wants to be asked, and declined.
+      nil)
+     ((url-cookie-host-can-set-p (url-host url-current-object) domain)
+      ;; Cookie is accepted by the user, and passes our security checks
+      (let ((cur nil))
+	(while rest
+	  (setq cur (pop rest))
+	  (url-cookie-store (car cur) (cdr cur)
+			    expires domain path secure))))
+     (t
+      (message "%s tried to set a cookie for domain %s - rejected."
+	       (url-host url-current-object) domain)))))
+
+(defvar url-cookie-timer nil)
+
+(defcustom url-cookie-save-interval 3600
+  "*The number of seconds between automatic saves of cookies.
+Default is 1 hour.  Note that if you change this variable outside of
+the `customize' interface after `url-do-setup' has been run, you need
+to run the `url-cookie-setup-save-timer' function manually."
+  :set (function (lambda (var val)
+		   (set-default var val)
+		   (and (featurep 'url)
+			(fboundp 'url-cookie-setup-save-timer)
+			(url-cookie-setup-save-timer))))
+  :type 'integer
+  :group 'url)
+
+;;;###autoload
+(defun url-cookie-setup-save-timer ()
+  "Reset the cookie saver timer."
+  (interactive)
+  (cond
+   ((featurep 'itimer)
+    (ignore-errors (delete-itimer url-cookie-timer))
+    (setq url-cookie-timer nil)
+    (if url-cookie-save-interval
+	(setq url-cookie-timer
+	      (start-itimer "url-cookie-saver" 'url-cookie-write-file
+			    url-cookie-save-interval
+			    url-cookie-save-interval))))
+   ((fboundp 'run-at-time)
+    (ignore-errors (cancel-timer url-cookie-timer))
+    (setq url-cookie-timer nil)
+    (if url-cookie-save-interval
+	(setq url-cookie-timer
+	      (run-at-time url-cookie-save-interval
+			   url-cookie-save-interval
+			   'url-cookie-write-file))))
+   (t nil)))
+
+(provide 'url-cookie)
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/lisp/url/url-dav.el	Sun Apr 04 01:21:46 2004 +0000
@@ -0,0 +1,973 @@
+;;; url-dav.el --- WebDAV support
+
+;; Copyright (C) 2001 Free Software Foundation, Inc.
+
+;; Author: Bill Perry <wmperry@gnu.org>
+;; Maintainer: Bill Perry <wmperry@gnu.org>
+;; Version: $Revision: 1.6 $
+;; Keywords: url, vc
+
+;; GNU Emacs is free software; you can redistribute it and/or modify
+;; it under the terms of the GNU General Public License as published by
+;; the Free Software Foundation; either version 2, or (at your option)
+;; any later version.
+
+;; GNU Emacs is distributed in the hope that it will be useful,
+;; but WITHOUT ANY WARRANTY; without even the implied warranty of
+;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+;; GNU General Public License for more details.
+
+;; You should have received a copy of the GNU General Public License
+;; along with GNU Emacs; see the file COPYING.  If not, write to the
+;; Free Software Foundation, Inc., 59 Temple Place - Suite 330,
+;; Boston, MA 02111-1307, USA.
+
+(eval-when-compile
+  (require 'cl))
+
+(require 'xml)
+(require 'url-util)
+(require 'url-handlers)
+
+(defvar url-dav-supported-protocols '(1 2)
+  "List of supported DAV versions.")
+
+;;;###autoload
+(defun url-dav-supported-p (url)
+  (and (featurep 'xml)
+       (fboundp 'xml-expand-namespace)
+       (intersection url-dav-supported-protocols
+		     (plist-get (url-http-options url) 'dav))))
+
+(defun url-dav-node-text (node)
+  "Return the text data from the XML node NODE."
+  (mapconcat (lambda (txt)
+	       (if (stringp txt)
+		   txt
+		 "")) (xml-node-children node) " "))
+
+
+;;; Parsing routines for the actual node contents.
+;;;
+;;; I am not incredibly happy with how this code looks/works right
+;;; now, but it DOES work, and if we get the API right, our callers
+;;; won't have to worry about the internal representation.
+
+(defconst url-dav-datatype-attribute
+  'urn:uuid:c2f41010-65b3-11d1-a29f-00aa00c14882/dt)
+
+(defun url-dav-process-integer-property (node)
+  (truncate (string-to-number (url-dav-node-text node))))
+
+(defun url-dav-process-number-property (node)
+  (string-to-number (url-dav-node-text node)))
+
+(defconst url-dav-iso8601-regexp
+  (let* ((dash "-?")
+	 (colon ":?")
+	 (4digit "\\([0-9][0-9][0-9][0-9]\\)")
+	 (2digit "\\([0-9][0-9]\\)")
+	 (date-fullyear 4digit)
+	 (date-month 2digit)
+	 (date-mday 2digit)
+	 (time-hour 2digit)
+	 (time-minute 2digit)
+	 (time-second 2digit)
+	 (time-secfrac "\\(\\.[0-9]+\\)?")
+	 (time-numoffset (concat "[-+]\\(" time-hour "\\):" time-minute))
+	 (time-offset (concat "Z" time-numoffset))
+	 (partial-time (concat time-hour colon time-minute colon time-second
+			       time-secfrac))
+	 (full-date (concat date-fullyear dash date-month dash date-mday))
+	 (full-time (concat partial-time time-offset))
+	 (date-time (concat full-date "T" full-time)))
+    (list (concat "^" full-date)
+	  (concat "T" partial-time)
+	  (concat "Z" time-numoffset)))
+  "List of regular expressions matching iso8601 dates.
+1st regular expression matches the date.
+2nd regular expression matches the time.
+3rd regular expression matches the (optional) timezone specification.
+")
+
+(defun url-dav-process-date-property (node)
+  (require 'parse-time)
+  (let* ((date-re (nth 0 url-dav-iso8601-regexp))
+	 (time-re (nth 1 url-dav-iso8601-regexp))
+	 (tz-re (nth 2 url-dav-iso8601-regexp))
+	 (date-string (url-dav-node-text node))
+	 re-start
+	 time seconds minute hour fractional-seconds
+	 day month year day-of-week dst tz)
+    ;; We need to populate 'time' with
+    ;; (SEC MIN HOUR DAY MON YEAR DOW DST TZ)
+
+    ;; Nobody else handles iso8601 correctly, lets do it ourselves.
+    (when (string-match date-re date-string re-start)
+      (setq year (string-to-int (match-string 1 date-string))
+	    month (string-to-int (match-string 2 date-string))
+	    day (string-to-int (match-string 3 date-string))
+	    re-start (match-end 0))
+      (when (string-match time-re date-string re-start)
+	(setq hour (string-to-int (match-string 1 date-string))
+	      minute (string-to-int (match-string 2 date-string))
+	      seconds (string-to-int (match-string 3 date-string))
+	      fractional-seconds (string-to-int (or
+						 (match-string 4 date-string)
+						 "0"))
+	      re-start (match-end 0))
+	(when (string-match tz-re date-string re-start)
+	  (setq tz (match-string 1 date-string)))
+	(url-debug 'dav "Parsed iso8601%s date" (if tz "tz" ""))
+	(setq time (list seconds minute hour day month year day-of-week dst tz))))
+
+    ;; Fall back to having Gnus do fancy things for us.
+    (when (not time)
+      (setq time (parse-time-string date-string)))
+
+    (if time
+	(setq time (apply 'encode-time time))
+      (url-debug 'dav "Unable to decode date (%S) (%s)"
+		 (xml-node-name node) date-string))
+    time))
+
+(defun url-dav-process-boolean-property (node)
+  (/= 0 (string-to-int (url-dav-node-text node))))
+
+(defun url-dav-process-uri-property (node)
+  ;; Returns a parsed representation of the URL...
+  (url-generic-parse-url (url-dav-node-text node)))
+
+(defun url-dav-find-parser (node)
+  "Find a function to parse the XML node NODE."
+  (or (get (xml-node-name node) 'dav-parser)
+      (let ((fn (intern (format "url-dav-process-%s" (xml-node-name node)))))
+	(if (not (fboundp fn))
+	    (setq fn 'url-dav-node-text)
+	  (put (xml-node-name node) 'dav-parser fn))
+	fn)))
+
+(defmacro url-dav-dispatch-node (node)
+  `(funcall (url-dav-find-parser ,node) ,node))
+
+(defun url-dav-process-DAV:prop (node)
+  ;; A prop node has content model of ANY
+  ;;
+  ;; Some predefined nodes have special meanings though.
+  ;;
+  ;; DAV:supportedlock    - list of DAV:lockentry
+  ;; DAV:source
+  ;; DAV:iscollection     - boolean
+  ;; DAV:getcontentlength - integer
+  ;; DAV:ishidden         - boolean
+  ;; DAV:getcontenttype   - string
+  ;; DAV:resourcetype     - node who's name is the resource type
+  ;; DAV:getlastmodified  - date
+  ;; DAV:creationdate     - date
+  ;; DAV:displayname      - string
+  ;; DAV:getetag          - unknown
+  (let ((children (xml-node-children node))
+	(node-type nil)
+	(props nil)
+	(value nil)
+	(handler-func nil))
+    (when (not children)
+      (error "No child nodes in DAV:prop"))
+
+    (while children
+      (setq node (car children)
+	    node-type (intern
+		       (or 
+			(cdr-safe (assq url-dav-datatype-attribute
+					(xml-node-attributes node)))
+			"unknown"))
+	    value nil)
+
+      (case node-type
+	((dateTime.iso8601tz
+	  dateTime.iso8601
+	  dateTime.tz
+	  dateTime.rfc1123
+	  dateTime
+	  date)				; date is our 'special' one...
+	 ;; Some type of date/time string.
+	 (setq value (url-dav-process-date-property node)))
+	(int
+	 ;; Integer type...
+	 (setq value (url-dav-process-integer-property node)))
+	((number float)
+	 (setq value (url-dav-process-number-property node)))
+	(boolean
+	 (setq value (url-dav-process-boolean-property node)))
+	(uri
+	 (setq value (url-dav-process-uri-property node)))
+	(otherwise
+	 (if (not (eq node-type 'unknown))
+	     (url-debug 'dav "Unknown data type in url-dav-process-prop: %s"
+			node-type))
+	 (setq value (url-dav-dispatch-node node))))
+
+      (setq props (plist-put props (xml-node-name node) value)
+	    children (cdr children)))
+    props))
+
+(defun url-dav-process-DAV:supportedlock (node)
+  ;; DAV:supportedlock is a list of DAV:lockentry items.
+  ;; DAV:lockentry in turn contains a DAV:lockscope and DAV:locktype.
+  ;; The DAV:lockscope must have a single node beneath it, ditto for
+  ;; DAV:locktype.
+  (let ((children (xml-node-children node))
+	(results nil)
+	scope type)
+    (while children
+      (when (and (not (stringp (car children)))
+		 (eq (xml-node-name (car children)) 'DAV:lockentry))
+	(setq scope (assq 'DAV:lockscope (xml-node-children (car children)))
+	      type (assq 'DAV:locktype (xml-node-children (car children))))
+	(when (and scope type)
+	  (setq scope (xml-node-name (car (xml-node-children scope)))
+		type (xml-node-name (car (xml-node-children type))))
+	  (push (cons type scope) results)))
+      (setq children (cdr children)))
+    results))
+
+(defun url-dav-process-subnode-property (node)
+  ;; Returns a list of child node names.
+  (delq nil (mapcar 'car-safe (xml-node-children node))))
+
+(defalias 'url-dav-process-DAV:depth 'url-dav-process-integer-property)
+(defalias 'url-dav-process-DAV:resourcetype 'url-dav-process-subnode-property)
+(defalias 'url-dav-process-DAV:locktype 'url-dav-process-subnode-property)
+(defalias 'url-dav-process-DAV:lockscope 'url-dav-process-subnode-property)
+(defalias 'url-dav-process-DAV:getcontentlength 'url-dav-process-integer-property)
+(defalias 'url-dav-process-DAV:getlastmodified 'url-dav-process-date-property)
+(defalias 'url-dav-process-DAV:creationdate 'url-dav-process-date-property)
+(defalias 'url-dav-process-DAV:iscollection 'url-dav-process-boolean-property)
+(defalias 'url-dav-process-DAV:ishidden 'url-dav-process-boolean-property)
+
+(defun url-dav-process-DAV:locktoken (node)
+  ;; DAV:locktoken can have one or more DAV:href children.
+  (delq nil (mapcar (lambda (n)
+		      (if (stringp n)
+			  n
+			(url-dav-dispatch-node n)))
+		    (xml-node-children node))))
+
+(defun url-dav-process-DAV:owner (node)
+  ;; DAV:owner can contain anything.
+  (delq nil (mapcar (lambda (n)
+		      (if (stringp n)
+			  n
+			(url-dav-dispatch-node n)))
+		    (xml-node-children node))))
+
+(defun url-dav-process-DAV:activelock (node)
+  ;; DAV:activelock can contain:
+  ;;  DAV:lockscope
+  ;;  DAV:locktype
+  ;;  DAV:depth
+  ;;  DAV:owner (optional)
+  ;;  DAV:timeout (optional)
+  ;;  DAV:locktoken (optional)
+  (let ((children (xml-node-children node))
+	(results nil))
+    (while children
+      (if (listp (car children))
+	  (push (cons (xml-node-name (car children))
+		      (url-dav-dispatch-node (car children)))
+		results))
+      (setq children (cdr children)))
+    results))
+
+(defun url-dav-process-DAV:lockdiscovery (node)
+  ;; Can only contain a list of DAV:activelock objects.
+  (let ((children (xml-node-children node))
+	(results nil))
+    (while children
+      (cond
+       ((stringp (car children))
+	;; text node? why?
+	nil)
+       ((eq (xml-node-name (car children)) 'DAV:activelock)
+	(push (url-dav-dispatch-node (car children)) results))
+       (t
+	;; Ignore unknown nodes...
+	nil))
+      (setq children (cdr children)))
+    results))
+
+(defun url-dav-process-DAV:status (node)
+  ;; The node contains a standard HTTP/1.1 response line... we really
+  ;; only care about the numeric status code.
+  (let ((status (url-dav-node-text node)))
+    (if (string-match "\\`[ \r\t\n]*HTTP/[0-9.]+ \\([0-9]+\\)" status)
+	(string-to-int (match-string 1 status))
+      500)))
+
+(defun url-dav-process-DAV:propstat (node)
+  ;; A propstate node can have the following children...
+  ;;
+  ;; DAV:prop - a list of properties and values
+  ;; DAV:status - An HTTP/1.1 status line
+  (let ((children (xml-node-children node))
+	(props nil)
+	(status nil))
+    (when (not children)
+      (error "No child nodes in DAV:propstat"))
+
+    (setq props (url-dav-dispatch-node (assq 'DAV:prop children))
+	  status (url-dav-dispatch-node (assq 'DAV:status children)))
+
+    ;; Need to parse out the HTTP status
+    (setq props (plist-put props 'DAV:status status))
+    props))
+
+(defun url-dav-process-DAV:response (node)
+  (let ((children (xml-node-children node))
+	(propstat nil)
+	(href))
+    (when (not children)
+      (error "No child nodes in DAV:response"))
+
+    ;; A response node can have the following children...
+    ;;
+    ;; DAV:href     - URL the response is for.
+    ;; DAV:propstat - see url-dav-process-propstat
+    ;; DAV:responsedescription - text description of the response
+    (setq propstat (assq 'DAV:propstat children)
+	  href (assq 'DAV:href children))
+
+    (when (not href)
+      (error "No href in DAV:response"))
+
+    (when (not propstat)
+      (error "No propstat in DAV:response"))
+
+    (setq propstat (url-dav-dispatch-node propstat)
+	  href (url-dav-dispatch-node href))
+    (cons href propstat)))
+
+(defun url-dav-process-DAV:multistatus (node)
+  (let ((children (xml-node-children node))
+	(results nil))
+    (while children
+      (push (url-dav-dispatch-node (car children)) results)
+      (setq children (cdr children)))
+    results))
+
+
+;;; DAV request/response generation/processing
+(defun url-dav-process-response (buffer url)
+  "Parses a WebDAV response from BUFFER, interpreting it relative to URL.
+
+The buffer must have been retrieved by HTTP or HTTPS and contain an
+XML document.
+"
+  (declare (special url-http-content-type
+		    url-http-response-status
+		    url-http-end-of-headers))
+  (let ((tree nil)
+	(overall-status nil))
+    (when buffer
+      (unwind-protect
+	  (save-excursion
+	    (set-buffer buffer)
+	    (goto-char url-http-end-of-headers)
+	    (setq overall-status url-http-response-status)
+
+	    ;; XML documents can be transferred as either text/xml or
+	    ;; application/xml, and we are required to accept both of
+	    ;; them.
+	    (if (and
+		 url-http-content-type
+		 (or (string-match "^text/xml" url-http-content-type)
+		     (string-match "^application/xml" url-http-content-type)))
+		(setq tree (xml-parse-region (point) (point-max)))))
+	;; Clean up after ourselves.
+	'(kill-buffer buffer)))
+
+    ;; We should now be 
+    (if (eq (xml-node-name (car tree)) 'DAV:multistatus)
+	(url-dav-dispatch-node (car tree))
+      (url-debug 'dav "Got back singleton response for URL(%S)" url)
+      (let ((properties (url-dav-dispatch-node (car tree))))
+	;; We need to make sure we have a DAV:status node in there for
+	;; higher-level code;
+	(setq properties (plist-put properties 'DAV:status overall-status))
+	;; Make this look like a DAV:multistatus parse tree so that
+	;; nobody but us needs to know the difference.
+	(list (cons url properties))))))
+
+(defun url-dav-request (url method tag body
+				 &optional depth headers namespaces)
+  "Performs WebDAV operation METHOD on URL.  Returns the parsed responses.
+Automatically creates an XML request body if TAG is non-nil.
+BODY is the XML document fragment to be enclosed by <TAG></TAG>.
+
+DEPTH is how deep the request should propogate.  Default is 0, meaning
+it should apply only to URL.  A negative number means to use
+`Infinity' for the depth.  Not all WebDAV servers support this depth
+though.
+
+HEADERS is an assoc list of extra headers to send in the request.
+
+NAMESPACES is an assoc list of (NAMESPACE . EXPANSION), and these are
+added to the <TAG> element.  The DAV=DAV: namespace is automatically
+added to this list, so most requests can just pass in nil.
+"
+  ;; Take care of the default value for depth...
+  (setq depth (or depth 0))
+
+  ;; Now lets translate it into something webdav can understand.
+  (if (< depth 0)
+      (setq depth "Infinity")
+    (setq depth (int-to-string depth)))
+  (if (not (assoc "DAV" namespaces))
+      (setq namespaces (cons '("DAV" . "DAV:") namespaces)))
+
+  (let* ((url-request-extra-headers `(("Depth" . ,depth)
+				      ("Content-type" . "text/xml")
+				      ,@headers))
+	 (url-request-method method)
+	 (url-request-data
+	  (if tag
+	      (concat
+	       "<?xml version=\"1.0\" encoding=\"utf-8\" ?>\n"
+	       "<" (symbol-name tag) " "
+	       ;; add in the appropriate namespaces...
+	       (mapconcat (lambda (ns)
+			    (concat "xmlns:" (car ns) "='" (cdr ns) "'"))
+			  namespaces "\n    ")
+	       ">\n"
+	       body
+	       "</" (symbol-name tag) ">\n"))))
+    (url-dav-process-response (url-retrieve-synchronously url) url)))
+
+;;;###autoload
+(defun url-dav-get-properties (url &optional attributes depth namespaces)
+  "Return properties for URL, up to DEPTH levels deep.
+
+Returns an assoc list, where the key is the filename (possibly a full
+URI), and the value is a standard property list of DAV property
+names (ie: DAV:resourcetype).
+"
+  (url-dav-request url "PROPFIND" 'DAV:propfind
+		   (if attributes
+		       (mapconcat (lambda (attr)
+				    (concat "<DAV:prop><"
+					    (symbol-name attr)
+					    "/></DAV:prop>"))
+				  attributes "\n  ")
+		     "  <DAV:allprop/>")
+		   depth nil namespaces))
+
+(defmacro url-dav-http-success-p (status)
+  "Return whether PROPERTIES was the result of a successful DAV request."
+  `(= (/ (or ,status 500) 100) 2))
+
+
+;;; Locking support
+(defvar url-dav-lock-identifier (concat "mailto:" user-mail-address)
+  "*URL used as contact information when creating locks in DAV.
+This will be used as the contents of the DAV:owner/DAV:href tag to
+identify the owner of a LOCK when requesting it.  This will be shown
+to other users when the DAV:lockdiscovery property is requested, so
+make sure you are comfortable with it leaking to the outside world.
+")
+
+;;;###autoload
+(defun url-dav-lock-resource (url exclusive &optional depth)
+  "Request a lock on URL.  If EXCLUSIVE is non-nil, get an exclusive lock.
+Optional 3rd argument DEPTH says how deep the lock should go, default is 0
+\(lock only the resource and none of its children\).
+
+Returns a cons-cell of (SUCCESSFUL-RESULTS . FAILURE-RESULTS).
+SUCCESSFUL-RESULTS is a list of (URL STATUS locktoken).
+FAILURE-RESULTS is a list of (URL STATUS).
+"
+  (setq	exclusive (if exclusive "<DAV:exclusive/>" "<DAV:shared/>"))
+  (let* ((body
+	  (concat
+	   "  <DAV:lockscope>" exclusive "</DAV:lockscope>\n"
+	   "  <DAV:locktype> <DAV:write/> </DAV:locktype>\n"
+	   "  <DAV:owner>\n"
+	   "    <DAV:href>" url-dav-lock-identifier "</DAV:href>\n"
+	   "  </DAV:owner>\n"))
+	 (response nil)		  ; Responses to the LOCK request
+	 (result nil)		  ; For walking thru the response list
+	 (child-url nil)
+	 (child-status nil)
+	 (failures nil)		  ; List of failure cases (URL . STATUS)
+	 (successes nil))	  ; List of success cases (URL . STATUS)
+    (setq response (url-dav-request url "LOCK" 'DAV:lockinfo body
+				    depth '(("Timeout" . "Infinite"))))
+
+    ;; Get the parent URL ready for expand-file-name
+    (if (not (vectorp url))
+	(setq url (url-generic-parse-url url)))
+
+    ;; Walk thru the response list, fully expand the URL, and grab the
+    ;; status code.
+    (while response
+      (setq result (pop response)
+	    child-url (url-expand-file-name (pop result) url)
+	    child-status (or (plist-get result 'DAV:status) 500))
+      (if (url-dav-http-success-p child-status)
+	  (push (list url child-status "huh") successes)
+	(push (list url child-status) failures)))
+    (cons successes failures)))
+
+;;;###autoload
+(defun url-dav-active-locks (url &optional depth)
+  "Return an assoc list of all active locks on URL."
+  (let ((response (url-dav-get-properties url '(DAV:lockdiscovery) depth))
+	(properties nil)
+	(child nil)
+	(child-url nil)
+	(child-results nil)
+	(results nil))
+    (if (not (vectorp url))
+	(setq url (url-generic-parse-url url)))
+
+    (while response
+      (setq child (pop response)
+	    child-url (pop child)
+	    child-results nil)
+      (when (and (url-dav-http-success-p (plist-get child 'DAV:status))
+		 (setq child (plist-get child 'DAV:lockdiscovery)))
+	;; After our parser has had its way with it, The
+	;; DAV:lockdiscovery property is a list of DAV:activelock
+	;; objects, which are comprised of DAV:activelocks, which
+	;; assoc lists of properties and values.
+	(while child
+	  (if (assq 'DAV:locktoken (car child))
+	      (let ((tokens (cdr (assq 'DAV:locktoken (car child))))
+		    (owners (cdr (assq 'DAV:owner (car child)))))
+		(dolist (token tokens)
+		  (dolist (owner owners)
+		    (push (cons token owner) child-results)))))
+	  (pop child)))
+      (if child-results
+	  (push (cons (url-expand-file-name child-url url) child-results)
+		results)))
+    results))
+
+;;;###autoload
+(defun url-dav-unlock-resource (url lock-token)
+  "Release the lock on URL represented by LOCK-TOKEN.
+Returns `t' iff the lock was successfully released.
+"
+  (declare (special url-http-response-status))
+  (let* ((url-request-extra-headers (list (cons "Lock-Token"
+						(concat "<" lock-token ">"))))
+	 (url-request-method "UNLOCK")
+	 (url-request-data nil)
+	 (buffer (url-retrieve-synchronously url))
+	 (result nil))
+    (when buffer
+      (unwind-protect
+	  (save-excursion
+	    (set-buffer buffer)
+	    (setq result (url-dav-http-success-p url-http-response-status)))
+	(kill-buffer buffer)))
+    result))
+
+
+;;; file-name-handler stuff
+(defun url-dav-file-attributes-mode-string (properties)
+  (let ((modes (make-string 10 ?-))
+	(supported-locks (plist-get properties 'DAV:supportedlock))
+	(executable-p (equal (plist-get properties 'http://apache.org/dav/props/executable)
+			     "T"))
+	(directory-p (memq 'DAV:collection (plist-get properties 'DAV:resourcetype)))
+	(readable t)
+	(lock nil))
+    ;; Assume we can read this, otherwise the PROPFIND would have
+    ;; failed.
+    (when readable
+      (aset modes 1 ?r)
+      (aset modes 4 ?r)
+      (aset modes 7 ?r))
+
+    (when directory-p
+      (aset modes 0 ?d))
+
+    (when executable-p
+      (aset modes 3 ?x)
+      (aset modes 6 ?x)
+      (aset modes 9 ?x))
+
+    (while supported-locks
+      (setq lock (car supported-locks)
+	    supported-locks (cdr supported-locks))
+      (case (car lock)
+	(DAV:write
+	 (case (cdr lock)
+	   (DAV:shared			; group permissions (possibly world)
+	    (aset modes 5 ?w))
+	   (DAV:exclusive
+	    (aset modes 2 ?w))		; owner permissions?
+	   (otherwise
+	    (url-debug 'dav "Unrecognized DAV:lockscope (%S)" (cdr lock)))))
+	(otherwise
+	 (url-debug 'dav "Unrecognized DAV:locktype (%S)" (car lock)))))
+    modes))
+
+;;;###autoload
+(defun url-dav-file-attributes (url)
+  (let ((properties (cdar (url-dav-get-properties url)))
+	(attributes nil))
+    (if (and properties
+	     (url-dav-http-success-p (plist-get properties 'DAV:status)))
+	;; We got a good DAV response back..
+	(setq attributes
+	      (list
+	       ;; t for directory, string for symbolic link, or nil
+	       ;; Need to support DAV Bindings to figure out the
+	       ;; symbolic link issues.
+	       (if (memq 'DAV:collection (plist-get properties 'DAV:resourcetype)) t nil)
+
+	       ;; Number of links to file... Needs DAV Bindings.
+	       1
+
+	       ;; File uid - no way to figure out?
+	       0
+
+	       ;; File gid - no way to figure out?
+	       0
+
+	       ;; Last access time - ???
+	       nil
+
+	       ;; Last modification time
+	       (plist-get properties 'DAV:getlastmodified)
+
+	       ;; Last status change time... just reuse last-modified
+	       ;; for now.
+	       (plist-get properties 'DAV:getlastmodified)
+
+	       ;; size in bytes
+	       (or (plist-get properties 'DAV:getcontentlength) 0)
+
+	       ;; file modes as a string like `ls -l'
+	       ;; 
+	       ;; Should be able to build this up from the
+	       ;; DAV:supportedlock attribute pretty easily.  Getting
+	       ;; the group info could be impossible though.
+	       (url-dav-file-attributes-mode-string properties)
+
+	       ;; t iff file's gid would change if it were deleted &
+	       ;; recreated.  No way for us to know that thru DAV.
+	       nil
+
+	       ;; inode number - meaningless
+	       nil
+
+	       ;; device number - meaningless
+	       nil))
+      ;; Fall back to just the normal http way of doing things.
+      (setq attributes (url-http-head-file-attributes url)))
+    attributes))
+
+;;;###autoload
+(defun url-dav-save-resource (url obj &optional content-type lock-token)
+  "Save OBJ as URL using WebDAV.
+URL must be a fully qualified URL.
+OBJ may be a buffer or a string."
+  (let ((buffer nil)
+	(result nil)
+	(url-request-extra-headers nil)
+	(url-request-method "PUT")
+	(url-request-data
+	 (cond
+	  ((bufferp obj)
+	   (save-excursion
+	     (set-buffer obj)
+	     (buffer-string)))
+	  ((stringp obj)
+	   obj)
+	  (t
+	   (error "Invalid object to url-dav-save-resource")))))
+
+    (if lock-token
+	(push
+	 (cons "If" (concat "(<" lock-token ">)"))
+	 url-request-extra-headers))
+
+    ;; Everything must always have a content-type when we submit it.
+    (push
+     (cons "Content-type" (or content-type "application/octet-stream"))
+     url-request-extra-headers)
+
+    ;; Do the save...
+    (setq buffer (url-retrieve-synchronously url))
+
+    ;; Sanity checking
+    (when buffer
+      (unwind-protect
+	  (save-excursion
+	    (set-buffer buffer)
+	    (setq result (url-dav-http-success-p url-http-response-status)))
+	(kill-buffer buffer)))
+    result))
+
+(eval-when-compile
+  (defmacro url-dav-delete-something (url lock-token &rest error-checking)
+    "Delete URL completely, with no sanity checking whatsoever.  DO NOT USE.
+This is defined as a macro that will not be visible from compiled files.
+Use with care, and even then think three times.
+"
+    `(progn
+       ,@error-checking
+       (url-dav-request ,url "DELETE" nil nil -1
+			(if ,lock-token
+			    (list
+			     (cons "If"
+				   (concat "(<" ,lock-token ">)"))))))))
+
+
+;;;###autoload
+(defun url-dav-delete-directory (url &optional recursive lock-token)
+  "Delete the WebDAV collection URL.
+If optional second argument RECURSIVE is non-nil, then delete all
+files in the collection as well.
+"
+  (let ((status nil)
+	(props nil)
+	(props nil))
+    (setq props (url-dav-delete-something
+		 url lock-token
+		 (setq props (url-dav-get-properties url '(DAV:getcontenttype) 1))
+		 (if (and (not recursive)
+			  (/= (length props) 1))
+		     (signal 'file-error (list "Removing directory"
+					       "directory not empty" url)))))
+
+     (mapc (lambda (result)
+	     (setq status (plist-get (cdr result) 'DAV:status))
+	     (if (not (url-dav-http-success-p status))
+		 (signal 'file-error (list "Removing directory"
+					   "Errror removing"
+					   (car result) status))))
+	   props))
+  nil)
+
+;;;###autoload
+(defun url-dav-delete-file (url &optional lock-token)
+  "Delete file named URL."
+  (let ((props nil)
+	(status nil))
+    (setq props (url-dav-delete-something
+		 url lock-token
+		 (setq props (url-dav-get-properties url))
+		 (if (eq (plist-get (cdar props) 'DAV:resourcetype) 'DAV:collection)
+		     (signal 'file-error (list "Removing old name" "is a collection" url)))))
+
+    (mapc (lambda (result)
+	    (setq status (plist-get (cdr result) 'DAV:status))
+	    (if (not (url-dav-http-success-p status))
+		(signal 'file-error (list "Removing old name"
+					  "Errror removing"
+					  (car result) status))))
+	  props))
+  nil)
+
+;;;###autoload
+(defun url-dav-directory-files (url &optional full match nosort files-only)
+  "Return a list of names of files in DIRECTORY.
+There are three optional arguments:
+If FULL is non-nil, return absolute file names.  Otherwise return names
+ that are relative to the specified directory.
+If MATCH is non-nil, mention only file names that match the regexp MATCH.
+If NOSORT is non-nil, the list is not sorted--its order is unpredictable.
+ NOSORT is useful if you plan to sort the result yourself.
+"
+  (let ((properties (url-dav-get-properties url '(DAV:resourcetype) 1))
+	(child-url nil)
+	(child-props nil)
+	(files nil)
+	(parsed-url (url-generic-parse-url url)))
+
+    (if (= (length properties) 1)
+	(signal 'file-error (list "Opening directory" "not a directory" url)))
+
+    (while properties
+      (setq child-props (pop properties)
+	    child-url (pop child-props))
+      (if (and (eq (plist-get child-props 'DAV:resourcetype) 'DAV:collection)
+	       files-only)
+	  ;; It is a directory, and we were told to return just files.
+	  nil
+
+	;; Fully expand the URL and then rip off the beginning if we
+	;; are not supposed to return fully-qualified names.
+	(setq child-url (url-expand-file-name child-url parsed-url))
+	(if (not full)
+	    (setq child-url (substring child-url (length url))))
+
+	;; We don't want '/' as the last character in filenames...
+	(if (string-match "/$" child-url)
+	    (setq child-url (substring child-url 0 -1)))
+
+	;; If we have a match criteria, then apply it.
+	(if (or (and match (not (string-match match child-url)))
+		(string= child-url "")
+		(string= child-url url))
+	    nil
+	  (push child-url files))))
+
+    (if nosort
+	files
+      (sort files 'string-lessp))))
+
+;;;###autoload
+(defun url-dav-file-directory-p (url)
+  "Return t if URL names an existing DAV collection."
+  (let ((properties (cdar (url-dav-get-properties url '(DAV:resourcetype)))))
+    (eq (plist-get properties 'DAV:resourcetype) 'DAV:collection)))
+
+;;;###autoload
+(defun url-dav-make-directory (url &optional parents)
+  "Create the directory DIR and any nonexistent parent dirs."
+  (declare (special url-http-response-status))
+  (let* ((url-request-extra-headers nil)
+	 (url-request-method "MKCOL")
+	 (url-request-data nil)
+	 (buffer (url-retrieve-synchronously url))
+	 (result nil))
+    (when buffer
+      (unwind-protect
+	  (save-excursion
+	    (set-buffer buffer)
+	    (case url-http-response-status
+	      (201			; Collection created in its entirety
+	       (setq result t))
+	      (403			; Forbidden
+	       nil)
+	      (405			; Method not allowed
+	       nil)
+	      (409			; Conflict
+	       nil)
+	      (415 			; Unsupported media type (WTF?)
+	       nil)
+	      (507			; Insufficient storage
+	       nil)
+	      (otherwise
+	       nil)))
+	(kill-buffer buffer)))
+    result))
+
+;;;###autoload
+(defun url-dav-rename-file (oldname newname &optional overwrite)
+  (if (not (and (string-match url-handler-regexp oldname)
+		(string-match url-handler-regexp newname)))
+      (signal 'file-error "Cannot rename between different URL backends" oldname newname))
+
+  (let* ((headers nil)
+	 (props nil)
+	 (status nil)
+	 (directory-p (url-dav-file-directory-p oldname))
+	 (exists-p (url-http-file-exists-p newname)))
+
+    (if (and exists-p
+	     (or 
+	      (null overwrite)
+	      (and (numberp overwrite)
+		   (not (yes-or-no-p
+			 (format "File %s already exists; rename to it anyway? "
+				 newname))))))
+	(signal 'file-already-exists (list "File already exists" newname)))
+
+    ;; Honor the overwrite flag...
+    (if overwrite (push '("Overwrite" . "T") headers))
+
+    ;; Have to tell them where to copy it to!
+    (push (cons "Destination" newname) headers)
+
+    ;; Always send a depth of -1 in case we are moving a collection.
+    (setq props (url-dav-request oldname "MOVE" nil nil (if directory-p -1 0)
+				 headers))
+
+    (mapc (lambda (result)
+	    (setq status (plist-get (cdr result) 'DAV:status))
+
+	    (if (not (url-dav-http-success-p status))
+		(signal 'file-error (list "Renaming" oldname newname status))))
+	  props)
+    t))
+
+;;;###autoload
+(defun url-dav-file-name-all-completions (file url)
+  "Return a list of all completions of file name FILE in directory DIRECTORY.
+These are all file names in directory DIRECTORY which begin with FILE.
+"
+  (url-dav-directory-files url nil (concat "^" file ".*")))
+
+;;;###autoload
+(defun url-dav-file-name-completion (file url)
+  "Complete file name FILE in directory DIRECTORY.
+Returns the longest string
+common to all file names in DIRECTORY that start with FILE.
+If there is only one and FILE matches it exactly, returns t.
+Returns nil if DIR contains no name starting with FILE.
+"
+  (let ((matches (url-dav-file-name-all-completions file url))
+	(result nil))
+    (cond
+     ((null matches)
+      ;; No matches
+      nil)
+     ((and (= (length matches) 1)
+	   (string= file (car matches)))
+      ;; Only one file and FILE matches it exactly...
+      t)
+     (t
+      ;; Need to figure out the longest string that they have in commmon
+      (setq matches (sort matches (lambda (a b) (> (length a) (length b)))))
+      (let ((n (length file))
+	    (searching t)
+	    (regexp nil)
+	    (failed nil))
+	(while (and searching
+		    (< n (length (car matches))))
+	  (setq regexp (concat "^" (substring (car matches) 0 (1+ n)))
+		failed nil)
+	  (dolist (potential matches)
+	    (if (not (string-match regexp potential))
+		(setq failed t)))
+	  (if failed
+	      (setq searching nil)
+	    (incf n)))
+	(substring (car matches) 0 n))))))
+
+(defun url-dav-register-handler (op)
+  (put op 'url-file-handlers (intern-soft (format "url-dav-%s" op))))
+
+(mapcar 'url-dav-register-handler
+	'(file-name-all-completions
+	  file-name-completion
+	  rename-file
+	  make-directory
+	  file-directory-p
+	  directory-files
+	  delete-file
+	  delete-directory
+	  file-attributes))
+
+
+;;; Version Control backend cruft
+
+;(put 'vc-registered 'url-file-handlers 'url-dav-vc-registered)
+
+;;;###autoload
+(defun url-dav-vc-registered (url)
+  (if (and (string-match "\\`https?" url)
+	   (plist-get (url-http-options url) 'dav))
+      (progn
+	(vc-file-setprop url 'vc-backend 'dav)
+	t)))
+
+
+;;; Miscellaneous stuff.
+
+(provide 'url-dav)
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/lisp/url/url-dired.el	Sun Apr 04 01:21:46 2004 +0000
@@ -0,0 +1,102 @@
+;;; url-dired.el --- URL Dired minor mode
+;; Author: $Author: fx $
+;; Created: $Date: 2001/05/05 16:44:20 $
+;; Version: $Revision: 1.3 $
+;; Keywords: comm, files
+
+;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
+;;; Copyright (c) 1993 - 1996 by William M. Perry <wmperry@cs.indiana.edu>
+;;; Copyright (c) 1996 - 1999 Free Software Foundation, Inc.
+;;;
+;;; This file is part of GNU Emacs.
+;;;
+;;; GNU Emacs is free software; you can redistribute it and/or modify
+;;; it under the terms of the GNU General Public License as published by
+;;; the Free Software Foundation; either version 2, or (at your option)
+;;; any later version.
+;;;
+;;; GNU Emacs is distributed in the hope that it will be useful,
+;;; but WITHOUT ANY WARRANTY; without even the implied warranty of
+;;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+;;; GNU General Public License for more details.
+;;;
+;;; You should have received a copy of the GNU General Public License
+;;; along with GNU Emacs; see the file COPYING.  If not, write to the
+;;; Free Software Foundation, Inc., 59 Temple Place - Suite 330,
+;;; Boston, MA 02111-1307, USA.
+;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
+
+(autoload 'w3-fetch "w3")
+(autoload 'w3-open-local "w3")
+(autoload 'dired-get-filename "dired")
+
+(defvar url-dired-minor-mode-map
+  (let ((map (make-sparse-keymap)))
+    (define-key map "\C-m" 'url-dired-find-file)
+    (if (featurep 'xemacs)
+	(define-key map [button2] 'url-dired-find-file-mouse)
+      (define-key map [mouse-2] 'url-dired-find-file-mouse))
+    map)
+  "Keymap used when browsing directories.")
+
+(defvar url-dired-minor-mode nil
+  "Whether we are in url-dired-minor-mode")
+
+(make-variable-buffer-local 'url-dired-minor-mode)
+
+(defun url-dired-find-file ()
+  "In dired, visit the file or directory named on this line, using Emacs-W3."
+  (interactive)
+  (let ((filename (dired-get-filename)))
+    (cond ((string-match "/\\(.*@.*\\):\\(/.*\\)" filename)
+	   (w3-fetch (concat "file://" (match-string 1 filename) (match-string 2 filename))))
+	  (t
+	   (w3-open-local filename)))))
+
+(defun url-dired-find-file-mouse (event)
+  "In dired, visit the file or directory name you click on, using Emacs-W3."
+  (interactive "@e")
+  (mouse-set-point event)
+  (url-dired-find-file))
+
+(defun url-dired-minor-mode (&optional arg)
+  "Minor mode for directory browsing with Emacs-W3."
+  (interactive "P")
+  (cond
+   ((null arg)
+    (setq url-dired-minor-mode (not url-dired-minor-mode)))
+   ((equal 0 arg)
+    (setq url-dired-minor-mode nil))
+   (t
+    (setq url-dired-minor-mode t))))
+
+(if (not (fboundp 'add-minor-mode))
+    (defun add-minor-mode (toggle name &optional keymap after toggle-fun)
+      "Add a minor mode to `minor-mode-alist' and `minor-mode-map-alist'.
+TOGGLE is a symbol which is used as the variable which toggle the minor mode,
+NAME is the name that should appear in the modeline (it should be a string
+beginning with a space), KEYMAP is a keymap to make active when the minor
+mode is active, and AFTER is the toggling symbol used for another minor
+mode.  If AFTER is non-nil, then it is used to position the new mode in the
+minor-mode alists.  TOGGLE-FUN specifies an interactive function that
+is called to toggle the mode on and off; this affects what appens when
+button2 is pressed on the mode, and when button3 is pressed somewhere
+in the list of modes.  If TOGGLE-FUN is nil and TOGGLE names an
+interactive function, TOGGLE is used as the toggle function.
+
+Example:  (add-minor-mode 'view-minor-mode \" View\" view-mode-map)"
+      (if (not (assq toggle minor-mode-alist))
+	  (setq minor-mode-alist (cons (list toggle name) minor-mode-alist)))
+      (if (and keymap (not (assq toggle minor-mode-map-alist)))
+	  (setq minor-mode-map-alist (cons (cons toggle keymap)
+					   minor-mode-map-alist)))))
+
+(add-minor-mode 'url-dired-minor-mode " URL" url-dired-minor-mode-map)
+
+(defun url-find-file-dired (dir)
+  "\"Edit\" directory DIR, but with additional URL-friendly bindings."
+  (interactive "DURL Dired (directory): ")
+  (find-file dir)
+  (url-dired-minor-mode t))
+
+(provide 'url-dired)
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/lisp/url/url-expand.el	Sun Apr 04 01:21:46 2004 +0000
@@ -0,0 +1,143 @@
+;;; url-expand.el --- expand-file-name for URLs
+;; Author: $Author: wmperry $
+;; Created: $Date: 1999/12/05 08:09:15 $
+;; Version: $Revision: 1.3 $
+;; Keywords: comm, data, processes
+
+;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
+;;; Copyright (c) 1999 Free Software Foundation, Inc.
+;;;
+;;; This file is part of GNU Emacs.
+;;;
+;;; GNU Emacs is free software; you can redistribute it and/or modify
+;;; it under the terms of the GNU General Public License as published by
+;;; the Free Software Foundation; either version 2, or (at your option)
+;;; any later version.
+;;;
+;;; GNU Emacs is distributed in the hope that it will be useful,
+;;; but WITHOUT ANY WARRANTY; without even the implied warranty of
+;;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+;;; GNU General Public License for more details.
+;;;
+;;; You should have received a copy of the GNU General Public License
+;;; along with GNU Emacs; see the file COPYING.  If not, write to the
+;;; Free Software Foundation, Inc., 59 Temple Place - Suite 330,
+;;; Boston, MA 02111-1307, USA.
+;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
+
+(require 'url-methods)
+(require 'url-util)
+(require 'url-parse)
+
+(defun url-expander-remove-relative-links (name)
+  ;; Strip . and .. from pathnames
+  (let ((new (if (not (string-match "^/" name))
+		 (concat "/" name)
+	       name)))
+
+    ;; If it ends with a '/.' or '/..', tack on a trailing '/' sot hat
+    ;; the tests that follow are not too complicated in terms of
+    ;; looking for '..' or '../', etc.
+    (if (string-match "/\\.+$" new)
+	(setq new (concat new "/")))
+
+    ;; Remove '/./' first
+    (while (string-match "/\\(\\./\\)" new)
+      (setq new (concat (substring new 0 (match-beginning 1))
+			(substring new (match-end 1)))))
+
+    ;; Then remove '/../'
+    (while (string-match "/\\([^/]*/\\.\\./\\)" new)
+      (setq new (concat (substring new 0 (match-beginning 1))
+			(substring new (match-end 1)))))
+
+    ;; Remove cruft at the beginning of the string, so people that put
+    ;; in extraneous '..' because they are morons won't lose.
+    (while (string-match "^/\\.\\.\\(/\\)" new)
+      (setq new (substring new (match-beginning 1) nil)))
+    new))
+
+(defun url-expand-file-name (url &optional default)
+  "Convert URL to a fully specified URL, and canonicalize it.
+Second arg DEFAULT is a URL to start with if URL is relative.
+If DEFAULT is nil or missing, the current buffer's URL is used.
+Path components that are `.' are removed, and 
+path components followed by `..' are removed, along with the `..' itself."
+  (if (and url (not (string-match "^#" url)))
+      ;; Need to nuke newlines and spaces in the URL, or we open
+      ;; ourselves up to potential security holes.
+      (setq url (mapconcat (function (lambda (x)
+				       (if (memq x '(?  ?\n ?\r))
+					   ""
+					 (char-to-string x))))
+			   url "")))
+
+  ;; Need to figure out how/where to expand the fragment relative to
+  (setq default (cond
+		 ((vectorp default)
+		  ;; Default URL has already been parsed
+		  default)
+		 (default
+		   ;; They gave us a default URL in non-parsed format
+		   (url-generic-parse-url default))
+		 (url-current-object
+		  ;; We are in a URL-based buffer, use the pre-parsed object
+		  url-current-object)
+		 ((string-match url-nonrelative-link url)
+		  ;; The URL they gave us is absolute, go for it.
+		  nil)
+		 (t
+		  ;; Hmmm - this shouldn't ever happen.
+		  (error "url-expand-file-name confused - no default?"))))
+
+  (cond
+   ((= (length url) 0)			; nil or empty string
+    (url-recreate-url default))
+   ((string-match "^#" url)		; Offset link, use it raw
+    url)
+   ((string-match url-nonrelative-link url) ; Fully-qualified URL, return it immediately
+    url)
+   (t
+    (let* ((urlobj (url-generic-parse-url url))
+	   (inhibit-file-name-handlers t)
+	   (expander (url-scheme-get-property (url-type default) 'expand-file-name)))
+      (if (string-match "^//" url)
+	  (setq urlobj (url-generic-parse-url (concat (url-type default) ":"
+						      url))))
+      (funcall expander urlobj default)
+      (url-recreate-url urlobj)))))
+
+(defun url-identity-expander (urlobj defobj)
+  (url-set-type urlobj (or (url-type urlobj) (url-type defobj))))
+
+(defun url-default-expander (urlobj defobj)
+  ;; The default expansion routine - urlobj is modified by side effect!
+  (if (url-type urlobj)
+      ;; Well, they told us the scheme, let's just go with it.
+      nil
+    (url-set-type urlobj (or (url-type urlobj) (url-type defobj)))
+    (url-set-port urlobj (or (url-port urlobj)
+			     (and (string= (url-type urlobj)
+					   (url-type defobj))
+				  (url-port defobj))))
+    (if (not (string= "file" (url-type urlobj)))
+	(url-set-host urlobj (or (url-host urlobj) (url-host defobj))))
+    (if (string= "ftp"  (url-type urlobj))
+	(url-set-user urlobj (or (url-user urlobj) (url-user defobj))))
+    (if (string= (url-filename urlobj) "")
+	(url-set-filename urlobj "/"))
+    (if (string-match "^/" (url-filename urlobj))
+	nil
+      (let ((query nil)
+	    (file nil)
+	    (sepchar nil))
+	(if (string-match "[?#]" (url-filename urlobj))
+	    (setq query (substring (url-filename urlobj) (match-end 0))
+		  file (substring (url-filename urlobj) 0 (match-beginning 0))
+		  sepchar (substring (url-filename urlobj) (match-beginning 0) (match-end 0)))
+	  (setq file (url-filename urlobj)))
+	(setq file (url-expander-remove-relative-links
+		    (concat (url-basepath (url-filename defobj)) file)))
+	(url-set-filename urlobj (if query (concat file sepchar query) file))))))
+
+(provide 'url-expand)
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/lisp/url/url-file.el	Sun Apr 04 01:21:46 2004 +0000
@@ -0,0 +1,239 @@
+;;; url-file.el --- File retrieval code
+;; Author: $Author: fx $
+;; Created: $Date: 2002/04/22 09:14:24 $
+;; Version: $Revision: 1.11 $
+;; Keywords: comm, data, processes
+
+;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
+;;; Copyright (c) 1993 - 1996 by William M. Perry <wmperry@cs.indiana.edu>
+;;; Copyright (c) 1996 - 1999 Free Software Foundation, Inc.
+;;;
+;;; This file is part of GNU Emacs.
+;;;
+;;; GNU Emacs is free software; you can redistribute it and/or modify
+;;; it under the terms of the GNU General Public License as published by
+;;; the Free Software Foundation; either version 2, or (at your option)
+;;; any later version.
+;;;
+;;; GNU Emacs is distributed in the hope that it will be useful,
+;;; but WITHOUT ANY WARRANTY; without even the implied warranty of
+;;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+;;; GNU General Public License for more details.
+;;;
+;;; You should have received a copy of the GNU General Public License
+;;; along with GNU Emacs; see the file COPYING.  If not, write to the
+;;; Free Software Foundation, Inc., 59 Temple Place - Suite 330,
+;;; Boston, MA 02111-1307, USA.
+;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
+
+(eval-when-compile (require 'cl))
+(require 'mailcap)
+(require 'url-vars)
+(require 'url-parse)
+(require 'url-dired)
+
+(defconst url-file-default-port 21 "Default FTP port.")
+(defconst url-file-asynchronous-p t "FTP transfers are asynchronous.")
+(defalias 'url-file-expand-file-name 'url-default-expander)
+
+(defun url-file-find-possibly-compressed-file (fname &rest args)
+  "Find the exact file referenced by `fname'.
+This tries the common compression extensions, because things like
+ange-ftp and efs are not quite smart enough to realize when a server
+can do automatic decompression for them, and won't find 'foo' if
+'foo.gz' exists, even though the ftp server would happily serve it up
+to them."
+  (let ((scratch nil)
+	(compressed-extensions '("" ".gz" ".z" ".Z" ".bz2"))
+	(found nil))
+    (while (and compressed-extensions (not found))
+      (if (file-exists-p (setq scratch (concat fname (pop compressed-extensions))))
+	  (setq found scratch)))
+    found))
+
+(defun url-file-host-is-local-p (host)
+  "Return t iff HOST references our local machine."
+  (let ((case-fold-search t))
+    (or
+     (null host)
+     (string= "" host)
+     (equal (downcase host) (downcase (system-name)))
+     (and (string-match "^localhost$" host) t)
+     (and (not (string-match (regexp-quote ".") host))
+	  (equal (downcase host) (if (string-match (regexp-quote ".")
+						   (system-name))
+				     (substring (system-name) 0
+						(match-beginning 0))
+				   (system-name)))))))
+
+(defun url-file-asynch-callback (x y name buff func args &optional efs)
+  (if (not (featurep 'ange-ftp))
+      ;; EFS passes us an extra argument
+      (setq name buff
+	    buff func
+	    func args
+	    args efs))
+  (let ((size (nth 7 (file-attributes name))))
+    (save-excursion
+      (set-buffer buff)
+      (goto-char (point-max))
+      (if (/= -1 size)
+	  (insert (format "Content-length: %d\n" size)))
+      (insert "\n")
+      (insert-file-contents-literally name)
+      (if (not (url-file-host-is-local-p (url-host url-current-object)))
+	  (condition-case ()
+	      (delete-file name)
+	    (error nil)))
+      (apply func args))))
+
+(defun url-file-build-filename (url)
+  (if (not (vectorp url))
+      (setq url (url-generic-parse-url url)))
+  (let* ((user (url-user url))
+	 (pass (url-password url))
+	 (port (url-port url))
+	 (host (url-host url))
+	 (site (if (and port (/= port 21))
+		   (if (featurep 'ange-ftp)
+		       (format "%s %d" host port)
+		     ;; This works in Emacs 21's ange-ftp too.
+		     (format "%s#%d" host port))
+		 host))
+	 (file (url-unhex-string (url-filename url)))
+	 (filename (if (or user (not (url-file-host-is-local-p host)))
+		       (concat "/" (or user "anonymous") "@" site ":" file)
+		     (if (and (memq system-type
+				    '(emx ms-dos windows-nt ms-windows))
+			      (string-match "^/[a-zA-Z]:/" file))
+			 (substring file 1)
+		       file)))
+	 pos-index)
+
+    (and user pass
+	 (cond
+	  ((featurep 'ange-ftp)
+	   (ange-ftp-set-passwd host user pass))
+	  ((or (featurep 'efs) (featurep 'efs-auto))
+	   (efs-set-passwd host user pass))
+	  (t
+	   nil)))
+
+    ;; This makes sure that directories have a trailing directory
+    ;; separator on them so URL expansion works right.
+    ;;
+    ;; FIXME?  What happens if the remote system doesn't use our local
+    ;; directory-sep-char as its separator?  Would it be safer to just
+    ;; use '/' unconditionally and rely on the FTP server to
+    ;; straighten it out for us?
+    (if (and (file-directory-p filename)
+	     (not (string-match (format "%c$" directory-sep-char) filename)))
+	(url-set-filename url
+			  (format "%s%c" filename directory-sep-char)))
+
+    ;; If it is a directory, look for an index file first.
+    (if (and (file-directory-p filename)
+	     url-directory-index-file
+	     (setq pos-index (expand-file-name url-directory-index-file filename))
+	     (file-exists-p pos-index)
+	     (file-readable-p pos-index))
+	(setq filename pos-index))
+
+    ;; Find the (possibly compressed) file
+    (setq filename (url-file-find-possibly-compressed-file filename))
+    filename))
+
+;;;###autoload
+(defun url-file (url callback cbargs)
+  "Handle file: and ftp: URLs."
+  (let* ((buffer nil)
+	 (uncompressed-filename nil)
+	 (content-type nil)
+	 (content-encoding nil)
+	 (coding-system-for-read 'binary))
+
+    (setq filename (url-file-build-filename url))
+
+    (if (not filename)
+	(error "File does not exist: %s" (url-recreate-url url)))
+
+    ;; Need to figure out the content-type from the real extension,
+    ;; not the compressed one.
+    (setq uncompressed-filename (if (string-match "\\.\\(gz\\|Z\\|z\\)$" filename)
+				    (substring filename 0 (match-beginning 0))
+				  filename))
+    (setq content-type (mailcap-extension-to-mime
+			(url-file-extension uncompressed-filename))
+	  content-encoding (case (intern (url-file-extension filename))
+			     ((\.z \.gz) "gzip")
+			     (\.Z "compress")
+			     (\.uue "x-uuencoded")
+			     (\.hqx "x-hqx")
+			     (\.bz2 "x-bzip2")
+			     (otherwise nil)))
+
+    (if (file-directory-p filename)
+	;; A directory is done the same whether we are local or remote
+	(url-find-file-dired filename)
+      (save-excursion
+	(setq buffer (generate-new-buffer " *url-file*"))
+	(set-buffer buffer)
+	(mm-disable-multibyte)
+	(setq url-current-object url)
+	(insert "Content-type: " (or content-type "application/octet-stream") "\n")
+	(if content-encoding
+	    (insert "Content-transfer-encoding: " content-encoding "\n"))
+	(if (url-file-host-is-local-p (url-host url))
+	      ;; Local files are handled slightly oddly
+	    (if (featurep 'ange-ftp)
+		(url-file-asynch-callback nil nil
+					  filename
+					  (current-buffer)
+					  callback cbargs)
+	      (url-file-asynch-callback nil nil nil
+					filename
+					(current-buffer)
+					callback cbargs))
+	  ;; FTP handling
+	  (let* ((extension (url-file-extension filename))
+		 (new (url-generate-unique-filename
+		       (and (> (length extension) 0)
+			    (concat "%s." extension)))))
+	    (if (featurep 'ange-ftp)
+		(ange-ftp-copy-file-internal filename (expand-file-name new) t
+					     nil t
+					     (list 'url-file-asynch-callback
+						   new (current-buffer)
+						   callback cbargs)
+					     t)
+	      (autoload 'efs-copy-file-internal "efs")
+	      (efs-copy-file-internal filename (efs-ftp-path filename)
+				      new (efs-ftp-path new)
+				      t nil 0
+				      (list 'url-file-asynch-callback
+					    new (current-buffer)
+					    callback cbargs)
+				      0 nil))))))
+    buffer))
+
+(defmacro url-file-create-wrapper (method args)
+  (` (defalias (quote (, (intern (format "url-ftp-%s" method))))
+       (defun (, (intern (format "url-file-%s" method))) (, args)
+	 (, (format "FTP/FILE URL wrapper around `%s' call." method))
+	 (setq url (url-file-build-filename url))
+	 (and url ((, method) (,@ (remove '&rest (remove '&optional args)))))))))
+
+(url-file-create-wrapper file-exists-p (url))
+(url-file-create-wrapper file-attributes (url))
+(url-file-create-wrapper file-symlink-p (url))
+(url-file-create-wrapper file-readable-p (url))
+(url-file-create-wrapper file-writable-p (url))
+(url-file-create-wrapper file-executable-p (url))
+(if (featurep 'xemacs)
+    (progn
+      (url-file-create-wrapper directory-files (url &optional full match nosort files-only))
+      (url-file-create-wrapper file-truename (url &optional default)))
+  (url-file-create-wrapper directory-files (url &optional full match nosort))
+  (url-file-create-wrapper file-truename (url &optional counter prev-dirs)))
+
+(provide 'url-file)
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/lisp/url/url-ftp.el	Sun Apr 04 01:21:46 2004 +0000
@@ -0,0 +1,44 @@
+;;; url-ftp.el --- FTP wrapper
+;; Author: $Author: wmperry $
+;; Created: $Date: 1999/11/30 12:47:21 $
+;; Version: $Revision: 1.1 $
+;; Keywords: comm, data, processes
+
+;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
+;;; Copyright (c) 1993 - 1996 by William M. Perry <wmperry@cs.indiana.edu>
+;;; Copyright (c) 1996 - 1999 Free Software Foundation, Inc.
+;;;
+;;; This file is part of GNU Emacs.
+;;;
+;;; GNU Emacs is free software; you can redistribute it and/or modify
+;;; it under the terms of the GNU General Public License as published by
+;;; the Free Software Foundation; either version 2, or (at your option)
+;;; any later version.
+;;;
+;;; GNU Emacs is distributed in the hope that it will be useful,
+;;; but WITHOUT ANY WARRANTY; without even the implied warranty of
+;;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+;;; GNU General Public License for more details.
+;;;
+;;; You should have received a copy of the GNU General Public License
+;;; along with GNU Emacs; see the file COPYING.  If not, write to the
+;;; Free Software Foundation, Inc., 59 Temple Place - Suite 330,
+;;; Boston, MA 02111-1307, USA.
+;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
+
+;; We knew not what we did when we overloaded 'file' to mean 'file'
+;; and 'ftp' back in the dark ages of the web.
+;;
+;; This stub file is just here to please the auto-scheme-loading code
+;; in url-methods.el and just maps everything onto the code in
+;; url-file.
+
+(require 'url-parse)
+(require 'url-file)
+
+(defconst url-ftp-default-port 21 "Default FTP port.")
+(defconst url-ftp-asynchronous-p t "FTP transfers are asynchronous.")
+(defalias 'url-ftp-expand-file-name 'url-default-expander)
+(defalias 'url-ftp 'url-file)
+
+(provide 'url-ftp)
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/lisp/url/url-gw.el	Sun Apr 04 01:21:46 2004 +0000
@@ -0,0 +1,264 @@
+;;; url-gw.el --- Gateway munging for URL loading
+;; Author: Bill Perry <wmperry@gnu.org>
+;; Created: $Date: 2002/04/22 09:26:46 $
+;; $Revision: 1.8 $
+;; Keywords: comm, data, processes
+
+;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
+;;; Copyright (c) 1997, 1998 Free Software Foundation, Inc.
+;;;
+;;; This file is part of GNU Emacs.
+;;;
+;;; GNU Emacs is free software; you can redistribute it and/or modify
+;;; it under the terms of the GNU General Public License as published by
+;;; the Free Software Foundation; either version 2, or (at your option)
+;;; any later version.
+;;;
+;;; GNU Emacs is distributed in the hope that it will be useful,
+;;; but WITHOUT ANY WARRANTY; without even the implied warranty of
+;;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+;;; GNU General Public License for more details.
+;;;
+;;; You should have received a copy of the GNU General Public License
+;;; along with GNU Emacs; see the file COPYING.  If not, write to the
+;;; Free Software Foundation, Inc., 59 Temple Place - Suite 330,
+;;; Boston, MA 02111-1307, USA.
+;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
+(eval-when-compile (require 'cl))
+(require 'url-vars)
+
+;; Fixme: support SSH explicitly or via a url-gateway-rlogin-program?
+
+(autoload 'socks-open-network-stream "socks")
+(autoload 'open-ssl-stream "ssl")
+
+(defgroup url-gateway nil
+  "URL gateway variables"
+  :group 'url)
+
+(defcustom url-gateway-local-host-regexp nil
+  "*A regular expression specifying local hostnames/machines."
+  :type '(choice (const nil) regexp)
+  :group 'url-gateway)
+
+(defcustom url-gateway-prompt-pattern
+  "^[^#$%>;]*[#$%>;] *" ;; "bash\\|\$ *\r?$\\|> *\r?"
+  "*A regular expression matching a shell prompt."
+  :type 'regexp
+  :group 'url-gateway)
+
+(defcustom url-gateway-rlogin-host nil
+  "*What hostname to actually rlog into before doing a telnet."
+  :type '(choice (const nil) string)
+  :group 'url-gateway)
+
+(defcustom url-gateway-rlogin-user-name nil
+  "*Username to log into the remote machine with when using rlogin."
+  :type '(choice (const nil) string)
+  :group 'url-gateway)
+
+(defcustom url-gateway-rlogin-parameters '("telnet" "-8")
+  "*Parameters to `url-open-rlogin'.
+This list will be used as the parameter list given to rsh."
+  :type '(repeat string)
+  :group 'url-gateway)
+
+(defcustom url-gateway-telnet-host nil
+  "*What hostname to actually login to before doing a telnet."
+  :type '(choice (const nil) string)
+  :group 'url-gateway)
+
+(defcustom url-gateway-telnet-parameters '("exec" "telnet" "-8")
+  "*Parameters to `url-open-telnet'.
+This list will be executed as a command after logging in via telnet."
+  :type '(repeat string)
+  :group 'url-gateway)
+
+(defcustom url-gateway-telnet-login-prompt "^\r*.?login:"
+  "*Prompt that tells us we should send our username when loggin in w/telnet."
+  :type 'regexp
+  :group 'url-gateway)
+
+(defcustom url-gateway-telnet-password-prompt "^\r*.?password:"
+  "*Prompt that tells us we should send our password when loggin in w/telnet."
+  :type 'regexp
+  :group 'url-gateway)
+
+(defcustom url-gateway-telnet-user-name nil
+  "User name to log in via telnet with."
+  :type '(choice (const nil) string)
+  :group 'url-gateway)
+
+(defcustom url-gateway-telnet-password nil
+  "Password to use to log in via telnet with."
+  :type '(choice (const nil) string)
+  :group 'url-gateway)
+
+(defcustom url-gateway-broken-resolution nil
+  "*Whether to use nslookup to resolve hostnames.
+This should be used when your version of Emacs cannot correctly use DNS,
+but your machine can.  This usually happens if you are running a statically
+linked Emacs under SunOS 4.x"
+  :type 'boolean
+  :group 'url-gateway)
+
+(defcustom url-gateway-nslookup-program "nslookup"
+  "*If non-NIL then a string naming nslookup program."
+  :type '(choice (const :tag "None" :value nil) string)
+  :group 'url-gateway)
+
+;; Stolen from ange-ftp
+;;;###autoload
+(defun url-gateway-nslookup-host (host)
+  "Attempt to resolve the given HOST using nslookup if possible."
+  (interactive "sHost:  ")
+  (if url-gateway-nslookup-program
+      (let ((proc (start-process " *nslookup*" " *nslookup*"
+				 url-gateway-nslookup-program host))
+	    (res host))
+	(process-kill-without-query proc)
+	(save-excursion
+	  (set-buffer (process-buffer proc))
+	  (while (memq (process-status proc) '(run open))
+	    (accept-process-output proc))
+	  (goto-char (point-min))
+	  (if (re-search-forward "Name:.*\nAddress: *\\(.*\\)$" nil t)
+	      (setq res (buffer-substring (match-beginning 1)
+					  (match-end 1))))
+	  (kill-buffer (current-buffer)))
+	res)
+    host))
+
+;; Stolen from red gnus nntp.el
+(defun url-wait-for-string (regexp proc)
+  "Wait until string matching REGEXP arrives in process PROC's buffer."
+  (let ((buf (current-buffer)))
+    (goto-char (point-min))
+    (while (not (re-search-forward regexp nil t))
+      (accept-process-output proc)
+      (set-buffer buf)
+      (goto-char (point-min)))))
+
+;; Stolen from red gnus nntp.el
+(defun url-open-rlogin (name buffer host service)
+  "Open a connection using rsh."
+  (if (not (stringp service))
+      (setq service (int-to-string service)))
+  (let ((proc (if url-gateway-rlogin-user-name
+		  (start-process
+		   name buffer "rsh"
+		   url-gateway-rlogin-host "-l" url-gateway-rlogin-user-name
+		   (mapconcat 'identity
+			      (append url-gateway-rlogin-parameters
+				      (list host service)) " "))
+		(start-process
+		 name buffer "rsh" url-gateway-rlogin-host
+		 (mapconcat 'identity
+			    (append url-gateway-rlogin-parameters
+				    (list host service))
+			    " ")))))
+    (set-buffer buffer)
+    (url-wait-for-string "^\r*200" proc)
+    (beginning-of-line)
+    (delete-region (point-min) (point))
+    proc))
+
+;; Stolen from red gnus nntp.el
+(defun url-open-telnet (name buffer host service)
+  (if (not (stringp service))
+      (setq service (int-to-string service)))
+  (save-excursion
+    (set-buffer (get-buffer-create buffer))
+    (erase-buffer)
+    (let ((proc (start-process name buffer "telnet" "-8"))
+	  (case-fold-search t))
+      (when (memq (process-status proc) '(open run))
+	(process-send-string proc "set escape \^X\n")
+	(process-send-string proc (concat
+				   "open " url-gateway-telnet-host "\n"))
+	(url-wait-for-string url-gateway-telnet-login-prompt proc)
+	(process-send-string
+	 proc (concat
+	       (or url-gateway-telnet-user-name
+		   (setq url-gateway-telnet-user-name (read-string "login: ")))
+	       "\n"))
+	(url-wait-for-string url-gateway-telnet-password-prompt proc)
+	(process-send-string
+	 proc (concat
+	       (or url-gateway-telnet-password
+		   (setq url-gateway-telnet-password
+			 (funcall url-passwd-entry-func "Password: ")))
+	       "\n"))
+	(erase-buffer)
+	(url-wait-for-string url-gateway-prompt-pattern proc)
+	(process-send-string
+	 proc (concat (mapconcat 'identity
+				 (append url-gateway-telnet-parameters
+					 (list host service)) " ") "\n"))
+	(url-wait-for-string "^\r*Escape character.*\r*\n+" proc)
+	(delete-region (point-min) (match-end 0))
+	(process-send-string proc "\^]\n")
+	(url-wait-for-string "^telnet" proc)
+	(process-send-string proc "mode character\n")
+	(accept-process-output proc 1)
+	(sit-for 1)
+	(goto-char (point-min))
+	(forward-line 1)
+	(delete-region (point) (point-max)))
+      proc)))
+
+;;;###autoload
+(defun url-open-stream (name buffer host service)
+  "Open a stream to HOST, possibly via a gateway.
+Args per `open-network-stream'.
+Will not make a connexion if `url-gateway-unplugged' is non-nil."
+  (unless url-gateway-unplugged
+    (let ((gw-method (if (and url-gateway-local-host-regexp
+			      (not (eq 'ssl url-gateway-method))
+			      (string-match
+			       url-gateway-local-host-regexp
+			       host))
+			 'native
+		       url-gateway-method))
+;;; 	;; This hack is for OS/2 Emacs so that it will not do bogus CRLF
+;;; 	;; conversions while trying to be 'helpful'
+;;; 	(tcp-binary-process-output-services (if (stringp service)
+;;; 						(list service)
+;;; 					      (list service
+;;; 						    (int-to-string service))))
+
+	  ;; An attempt to deal with denied connections, and attempt
+	  ;; to reconnect
+	  (cur-retries 0)
+	  (retry t)
+	  (errobj nil)
+	  (conn nil))
+
+      ;; If the user told us to do DNS for them, do it.
+      (if url-gateway-broken-resolution
+	  (setq host (url-gateway-nslookup-host host)))
+
+      (condition-case errobj
+	  ;; This is a clean way to ensure the new process inherits the
+	  ;; right coding systems in both Emacs and XEmacs.
+	  (let ((coding-system-for-read 'binary)
+		(coding-system-for-write 'binary))
+	    (setq conn (case gw-method
+			 (ssl
+			  (open-ssl-stream name buffer host service))
+			 ((native)
+			  (open-network-stream name buffer host service))
+			 (socks
+			  (socks-open-network-stream name buffer host service))
+			 (telnet
+			  (url-open-telnet name buffer host service))
+			 (rlogin
+			  (url-open-rlogin name buffer host service))
+			 (otherwise
+			  (error "Bad setting of url-gateway-method: %s"
+				 url-gateway-method)))))
+	(error
+	 (setq conn nil)))
+      conn)))
+
+(provide 'url-gw)
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/lisp/url/url-handlers.el	Sun Apr 04 01:21:46 2004 +0000
@@ -0,0 +1,252 @@
+;;; url-handlers.el --- file-name-handler stuff for URL loading
+;; Author: $Author: sds $
+;; Created: $Date: 2003/06/26 18:45:45 $
+;; Version: $Revision: 1.10 $
+;; Keywords: comm, data, processes, hypermedia
+
+;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
+;;; Copyright (c) 1993 - 1996 by William M. Perry <wmperry@cs.indiana.edu>
+;;; Copyright (c) 1996 - 1999 Free Software Foundation, Inc.
+;;;
+;;; This file is part of GNU Emacs.
+;;;
+;;; GNU Emacs is free software; you can redistribute it and/or modify
+;;; it under the terms of the GNU General Public License as published by
+;;; the Free Software Foundation; either version 2, or (at your option)
+;;; any later version.
+;;;
+;;; GNU Emacs is distributed in the hope that it will be useful,
+;;; but WITHOUT ANY WARRANTY; without even the implied warranty of
+;;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+;;; GNU General Public License for more details.
+;;;
+;;; You should have received a copy of the GNU General Public License
+;;; along with GNU Emacs; see the file COPYING.  If not, write to the
+;;; Free Software Foundation, Inc., 59 Temple Place - Suite 330,
+;;; Boston, MA 02111-1307, USA.
+;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
+
+(require 'url)
+(require 'url-parse)
+(require 'url-util)
+(require 'mm-decode)
+(require 'mailcap)
+
+(eval-when-compile
+  (require 'cl))
+
+;; Implementation status
+;; ---------------------
+;; Function				Status
+;; ------------------------------------------------------------
+;; add-name-to-file			Needs DAV Bindings
+;; copy-file				Broken (assumes 1st item is URL)
+;; delete-directory			Finished (DAV)
+;; delete-file				Finished (DAV)
+;; diff-latest-backup-file
+;; directory-file-name			unnecessary (what about VMS)?
+;; directory-files			Finished (DAV)
+;; dired-call-process
+;; dired-compress-file
+;; dired-uncache
+;; expand-file-name			Finished
+;; file-accessible-directory-p
+;; file-attributes			Finished, better with DAV
+;; file-directory-p			Needs DAV, finished
+;; file-executable-p			Finished
+;; file-exists-p			Finished
+;; file-local-copy
+;; file-modes
+;; file-name-all-completions		Finished (DAV)
+;; file-name-as-directory
+;; file-name-completion			Finished (DAV)
+;; file-name-directory
+;; file-name-nondirectory
+;; file-name-sans-versions		why?
+;; file-newer-than-file-p
+;; file-ownership-preserved-p		No way to know
+;; file-readable-p			Finished
+;; file-regular-p			!directory_p
+;; file-symlink-p			Needs DAV bindings
+;; file-truename			Needs DAV bindings
+;; file-writable-p			Check for LOCK?
+;; find-backup-file-name		why?
+;; get-file-buffer			why?
+;; insert-directory			Use DAV
+;; insert-file-contents			Finished
+;; load
+;; make-directory			Finished (DAV)
+;; make-symbolic-link			Needs DAV bindings
+;; rename-file				Finished (DAV)
+;; set-file-modes			Use mod_dav specific executable flag?
+;; set-visited-file-modtime		Impossible?
+;; shell-command			Impossible?
+;; unhandled-file-name-directory
+;; vc-registered			Finished (DAV)
+;; verify-visited-file-modtime
+;; write-region
+
+(defvar url-handler-regexp
+  "\\`\\(https?\\|ftp\\|file\\|nfs\\)://"
+  "*A regular expression for matching  URLs handled by file-name-handler-alist.
+Some valid URL protocols just do not make sense to visit interactively
+\(about, data, info, irc, mailto, etc\).  This regular expression
+avoids conflicts with local files that look like URLs \(Gnus is
+particularly bad at this\).")
+
+;;;###autoload
+(defun url-setup-file-name-handlers ()
+  "Setup file-name handlers."
+  (cond
+   ((not (boundp 'file-name-handler-alist))
+    nil)				; Don't load if no alist
+   ((rassq 'url-file-handler file-name-handler-alist)
+    nil)				; Don't load twice
+   (t
+    (push (cons url-handler-regexp 'url-file-handler)
+	  file-name-handler-alist))))
+
+(defun url-run-real-handler (operation args)
+  (let ((inhibit-file-name-handlers (cons 'url-file-handler
+					  (if (eq operation inhibit-file-name-operation)
+					      inhibit-file-name-handlers)))
+	(inhibit-file-name-operation operation))
+    (apply operation args)))
+
+(defun url-file-handler (operation &rest args)
+  "Function called from the `file-name-handler-alist' routines.
+OPERATION is what needs to be done (`file-exists-p', etc).  ARGS are
+the arguments that would have been passed to OPERATION."
+  (let ((fn (or (get operation 'url-file-handlers)
+		(intern-soft (format "url-%s" operation))))
+	(val nil)
+	(hooked nil))
+    (if (and fn (fboundp fn))
+	(setq hooked t
+	      val (apply fn args))
+      (setq hooked nil
+	    val (url-run-real-handler operation args)))
+    (url-debug 'handlers "%s %S%S => %S" (if hooked "Hooked" "Real")
+	       operation args val)
+    val))
+
+(defun url-file-handler-identity (&rest args)
+  ;; Identity function
+  (car args))
+
+;; These are operations that we can fully support
+(put 'file-readable-p 'url-file-handlers 'url-file-exists-p)
+(put 'substitute-in-file-name 'url-file-handlers 'url-file-handler-identity)
+(put 'file-name-absolute-p 'url-file-handlers (lambda (&rest ignored) t))
+(put 'expand-file-name 'url-file-handlers 'url-handler-expand-file-name)
+
+;; These are operations that we do not support yet (DAV!!!)
+(put 'file-writable-p 'url-file-handlers 'ignore)
+(put 'file-symlink-p 'url-file-handlers 'ignore)
+
+(defun url-handler-expand-file-name (file &optional base)
+  (if (file-name-absolute-p file)
+      (expand-file-name file "/")
+    (url-expand-file-name file base)))
+
+;; The actual implementation
+;;;###autoload
+(defun url-copy-file (url newname &optional ok-if-already-exists keep-time)
+  "Copy URL to NEWNAME.  Both args must be strings.
+Signals a `file-already-exists' error if file NEWNAME already exists,
+unless a third argument OK-IF-ALREADY-EXISTS is supplied and non-nil.
+A number as third arg means request confirmation if NEWNAME already exists.
+This is what happens in interactive use with M-x.
+Fourth arg KEEP-TIME non-nil means give the new file the same
+last-modified time as the old one.  (This works on only some systems.)
+A prefix arg makes KEEP-TIME non-nil."
+  (if (and (file-exists-p newname)
+	   (not ok-if-already-exists))
+      (error "Opening output file: File already exists, %s" newname))
+  (let ((buffer (url-retrieve-synchronously url))
+	(handle nil))
+    (if (not buffer)
+	(error "Opening input file: No such file or directory, %s" url))
+    (save-excursion
+      (set-buffer buffer)
+      (setq handle (mm-dissect-buffer t)))
+    (mm-save-part-to-file handle newname)
+    (kill-buffer buffer)
+    (mm-destroy-parts handle)))
+
+;;;###autoload
+(defun url-file-local-copy (url &rest ignored)
+  "Copy URL into a temporary file on this machine.
+Returns the name of the local copy, or nil, if FILE is directly
+accessible."
+  (let ((filename (make-temp-name "url")))
+    (url-copy-file url filename)
+    filename))
+
+;;;###autoload
+(defun url-insert-file-contents (url &optional visit beg end replace)
+  (let ((buffer (url-retrieve-synchronously url))
+	(handle nil)
+	(data nil))
+    (if (not buffer)
+	(error "Opening input file: No such file or directory, %s" url))
+    (if visit (setq buffer-file-name url))
+    (save-excursion
+      (set-buffer buffer)
+      (setq handle (mm-dissect-buffer t))
+      (set-buffer (mm-handle-buffer handle))
+      (if beg
+	  (setq data (buffer-substring beg end))
+	(setq data (buffer-string))))
+    (kill-buffer buffer)
+    (mm-destroy-parts handle)
+    (if replace (delete-region (point-min) (point-max)))
+    (save-excursion
+      (insert data))
+    (list url (length data))))
+
+(defun url-file-name-completion (url directory)
+  (error "Unimplemented"))
+
+(defun url-file-name-all-completions (file directory)
+  (error "Unimplemented"))
+
+;; All other handlers map onto their respective backends.
+(defmacro url-handlers-create-wrapper (method args)
+  `(defun ,(intern (format "url-%s" method)) ,args
+     ,(format "URL file-name-handler wrapper for `%s' call.\n---\n%s" method
+              (or (documentation method t) "No original documentation."))
+     (setq url (url-generic-parse-url url))
+     (when (url-type url)
+       (funcall (url-scheme-get-property (url-type url) (quote ,method))
+                ,@(remove '&rest (remove '&optional args))))))
+
+(url-handlers-create-wrapper file-exists-p (url))
+(url-handlers-create-wrapper file-attributes (url))
+(url-handlers-create-wrapper file-symlink-p (url))
+(url-handlers-create-wrapper file-writable-p (url))
+(url-handlers-create-wrapper file-directory-p (url))
+(url-handlers-create-wrapper file-executable-p (url))
+
+(if (featurep 'xemacs)
+    (progn
+      ;; XEmacs specific prototypes
+      (url-handlers-create-wrapper
+       directory-files (url &optional full match nosort files-only))
+      (url-handlers-create-wrapper
+       file-truename (url &optional default)))
+  ;; Emacs specific prototypes
+  (url-handlers-create-wrapper
+   directory-files (url &optional full match nosort))
+  (url-handlers-create-wrapper
+   file-truename (url &optional counter prev-dirs)))
+
+(add-hook 'find-file-hooks 'url-handlers-set-buffer-mode)
+
+(defun url-handlers-set-buffer-mode ()
+  "Set correct modes for the current buffer if visiting a remote file."
+  (and (stringp buffer-file-name)
+       (string-match url-handler-regexp buffer-file-name)
+       (auto-save-mode 0)))
+
+(provide 'url-handlers)
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/lisp/url/url-history.el	Sun Apr 04 01:21:46 2004 +0000
@@ -0,0 +1,199 @@
+;;; url-history.el --- Global history tracking for URL package
+;; Author: $Author: fx $
+;; Created: $Date: 2001/05/05 16:49:52 $
+;; Version: $Revision: 1.6 $
+;; Keywords: comm, data, processes, hypermedia
+
+;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
+;;; Copyright (c) 1993 - 1996 by William M. Perry <wmperry@cs.indiana.edu>
+;;; Copyright (c) 1996 - 1999 Free Software Foundation, Inc.
+;;;
+;;; This file is part of GNU Emacs.
+;;;
+;;; GNU Emacs is free software; you can redistribute it and/or modify
+;;; it under the terms of the GNU General Public License as published by
+;;; the Free Software Foundation; either version 2, or (at your option)
+;;; any later version.
+;;;
+;;; GNU Emacs is distributed in the hope that it will be useful,
+;;; but WITHOUT ANY WARRANTY; without even the implied warranty of
+;;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+;;; GNU General Public License for more details.
+;;;
+;;; You should have received a copy of the GNU General Public License
+;;; along with GNU Emacs; see the file COPYING.  If not, write to the
+;;; Free Software Foundation, Inc., 59 Temple Place - Suite 330,
+;;; Boston, MA 02111-1307, USA.
+;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
+
+;; This can get a recursive require.
+;;(require 'url)
+(eval-when-compile (require 'cl))
+(require 'url-parse)
+(autoload 'url-do-setup "url")
+
+(defgroup url-history nil
+  "History variables in the URL package"
+  :prefix "url-history"
+  :group 'url)
+
+(defcustom url-history-track nil
+  "*Controls whether to keep a list of all the URLS being visited.
+If non-nil, url will keep track of all the URLS visited.
+If eq to `t', then the list is saved to disk at the end of each emacs
+session."
+  :type 'boolean
+  :group 'url-history)
+
+(defcustom url-history-file nil
+  "*The global history file for the URL package.
+This file contains a list of all the URLs you have visited.  This file
+is parsed at startup and used to provide URL completion."
+  :type '(choice (const :tag "Default" :value nil) file)
+  :group 'url-history)
+
+(defcustom url-history-save-interval 3600
+  "*The number of seconds between automatic saves of the history list.
+Default is 1 hour.  Note that if you change this variable outside of
+the `customize' interface after `url-do-setup' has been run, you need
+to run the `url-history-setup-save-timer' function manually."
+  :set (function (lambda (var val)
+		   (set-default var val)
+		   (and (featurep 'url)
+			(fboundp 'url-history-setup-save-timer)
+                        (let ((def (symbol-function
+                                    'url-history-setup-save-timer)))
+                          (not (and (listp def) (eq 'autoload (car def)))))
+			(url-history-setup-save-timer))))
+  :type 'integer
+  :group 'url-history)
+
+(defvar url-history-timer nil)
+
+(defvar url-history-list nil
+  "List of urls visited this session.")
+
+(defvar url-history-changed-since-last-save nil
+  "Whether the history list has changed since the last save operation.")
+
+(defvar url-history-hash-table nil
+  "Hash table for global history completion.")
+
+;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
+;;;###autoload
+(defun url-history-setup-save-timer ()
+  "Reset the history list timer."
+  (interactive)
+  (cond
+   ((featurep 'itimer)
+    (ignore-errors (delete-itimer url-history-timer))
+    (setq url-history-timer nil)
+    (if url-history-save-interval
+	(setq url-history-timer
+	      (start-itimer "url-history-saver" 'url-history-save-history
+			    url-history-save-interval
+			    url-history-save-interval))))
+   ((fboundp 'run-at-time)
+    (ignore-errors (cancel-timer url-history-timer))
+    (setq url-history-timer nil)
+    (if url-history-save-interval
+	(setq url-history-timer
+	      (run-at-time url-history-save-interval
+			   url-history-save-interval
+			   'url-history-save-history))))
+   (t nil)))
+
+;;;###autoload
+(defun url-history-parse-history (&optional fname)
+  "Parse a history file stored in FNAME."
+  ;; Parse out the mosaic global history file for completions, etc.
+  (or fname (setq fname (expand-file-name url-history-file)))
+  (cond
+   ((not (file-exists-p fname))
+    (message "%s does not exist." fname))
+   ((not (file-readable-p fname))
+    (message "%s is unreadable." fname))
+   (t
+    (condition-case nil
+	(load fname nil t)
+      (error (message "Could not load %s" fname)))))
+  (if (not url-history-hash-table)
+      (setq url-history-hash-table (make-hash-table :size 31 :test 'equal))))
+
+(defun url-history-update-url (url time)
+  (setq url-history-changed-since-last-save t)
+  (puthash (if (vectorp url) (url-recreate-url url) url) time url-history-hash-table))
+
+;;;###autoload
+(defun url-history-save-history (&optional fname)
+  "Write the global history file into `url-history-file'.
+The type of data written is determined by what is in the file to begin
+with.  If the type of storage cannot be determined, then prompt the
+user for what type to save as."
+  (interactive)
+  (or fname (setq fname (expand-file-name url-history-file)))
+  (cond
+   ((not url-history-changed-since-last-save) nil)
+   ((not (file-writable-p fname))
+    (message "%s is unwritable." fname))
+   (t
+    (let ((make-backup-files nil)
+	  (version-control nil)
+	  (require-final-newline t))
+      (save-excursion
+	(set-buffer (get-buffer-create " *url-tmp*"))
+	(erase-buffer)
+	(let ((count 0))
+	  (maphash (function
+		       (lambda (key value)
+			 (while (string-match "[\r\n]+" key)
+			   (setq key (concat (substring key 0 (match-beginning 0))
+					     (substring key (match-end 0) nil))))
+			 (setq count (1+ count))
+			 (insert "(puthash \"" key "\""
+				 (if (not (stringp value)) " '" "")
+				 (prin1-to-string value)
+				 " url-history-hash-table)\n")))
+		      url-history-hash-table)
+	  (goto-char (point-min))
+	  (insert (format
+		   "(setq url-history-hash-table (make-hash-table :size %d :test 'equal))\n"
+		   (/ count 4)))
+	  (goto-char (point-max))
+	  (insert "\n")
+	  (write-file fname))
+	(kill-buffer (current-buffer))))))
+  (setq url-history-changed-since-last-save nil))
+
+(defun url-have-visited-url (url)
+  (url-do-setup)
+  (gethash url url-history-hash-table nil))
+
+(defun url-completion-function (string predicate function)
+  (url-do-setup)
+  (cond
+   ((eq function nil)
+    (let ((list nil))
+      (maphash (function (lambda (key val)
+			      (setq list (cons (cons key val)
+					       list))))
+		  url-history-hash-table)
+      (try-completion string (nreverse list) predicate)))
+   ((eq function t)
+    (let ((stub (concat "^" (regexp-quote string)))
+	  (retval nil))
+      (maphash
+       (function
+	(lambda (url time)
+	  (if (string-match stub url)
+	      (setq retval (cons url retval)))))
+       url-history-hash-table)
+      retval))
+   ((eq function 'lambda)
+    (and url-history-hash-table
+	 (gethash string url-history-hash-table)
+	 t))
+   (t
+    (error "url-completion-function very confused."))))
+
+(provide 'url-history)
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/lisp/url/url-http.el	Sun Apr 04 01:21:46 2004 +0000
@@ -0,0 +1,1223 @@
+;;; url-http.el --- HTTP retrieval routines
+;; Author: Bill Perry <wmperry@gnu.org>
+;; Version: $Revision: 1.39 $
+;; Keywords: comm, data, processes
+
+;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
+;;; Copyright (c) 1999, 2001 Free Software Foundation, Inc.
+;;;
+;;; This file is part of GNU Emacs.
+;;;
+;;; GNU Emacs is free software; you can redistribute it and/or modify
+;;; it under the terms of the GNU General Public License as published by
+;;; the Free Software Foundation; either version 2, or (at your option)
+;;; any later version.
+;;;
+;;; GNU Emacs is distributed in the hope that it will be useful,
+;;; but WITHOUT ANY WARRANTY; without even the implied warranty of
+;;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+;;; GNU General Public License for more details.
+;;;
+;;; You should have received a copy of the GNU General Public License
+;;; along with GNU Emacs; see the file COPYING.  If not, write to the
+;;; Free Software Foundation, Inc., 59 Temple Place - Suite 330,
+;;; Boston, MA 02111-1307, USA.
+;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
+
+(eval-when-compile
+  (require 'cl)
+  (defvar url-http-extra-headers))
+(require 'url-gw)
+(require 'url-util)
+(require 'url-parse)
+(require 'url-cookie)
+(require 'mail-parse)
+(require 'url-auth)
+(autoload 'url-retrieve-synchronously "url")
+(autoload 'url-retrieve "url")
+(autoload 'url-cache-create-filename "url-cache")
+(autoload 'url-mark-buffer-as-dead "url")
+
+(defconst url-http-default-port 80 "Default HTTP port.")
+(defconst url-http-asynchronous-p t "HTTP retrievals are asynchronous.")
+(defalias 'url-http-expand-file-name 'url-default-expander)
+
+(defvar url-http-real-basic-auth-storage nil)
+(defvar url-http-proxy-basic-auth-storage nil)
+
+(defvar url-http-open-connections (make-hash-table :test 'equal
+						   :size 17)
+  "A hash table of all open network connections.")
+
+(defvar url-http-version "1.1"
+  "What version of HTTP we advertise, as a string.
+Valid values are 1.1 and 1.0.
+This is only useful when debugging the HTTP subsystem.
+
+Setting this to 1.0 will tell servers not to send chunked encoding,
+and other HTTP/1.1 specific features.
+")
+
+(defvar url-http-attempt-keepalives t
+  "Whether to use a single TCP connection multiple times in HTTP.
+This is only useful when debugging the HTTP subsystem.  Setting to
+`nil' will explicitly close the connection to the server after every
+request.
+")
+
+;(eval-when-compile
+;; These are all macros so that they are hidden from external sight
+;; when the file is byte-compiled.
+;;
+;; This allows us to expose just the entry points we want.
+
+;; These routines will allow us to implement persistent HTTP
+;; connections.
+(defsubst url-http-debug (&rest args)
+  (if quit-flag
+      (let ((proc (get-buffer-process (current-buffer))))
+	;; The user hit C-g, honor it!  Some things can get in an
+	;; incredibly tight loop (chunked encoding)
+	(if proc
+	    (progn
+	      (set-process-sentinel proc nil)
+	      (set-process-filter proc nil)))
+	(error "Transfer interrupted!")))
+  (apply 'url-debug 'http args))
+
+(defun url-http-mark-connection-as-busy (host port proc)
+  (url-http-debug "Marking connection as busy: %s:%d %S" host port proc)
+  (puthash (cons host port)
+	      (delq proc (gethash (cons host port) url-http-open-connections))
+	      url-http-open-connections)
+  proc)
+
+(defun url-http-mark-connection-as-free (host port proc)
+  (url-http-debug "Marking connection as free: %s:%d %S" host port proc)
+  (set-process-buffer proc nil)
+  (set-process-sentinel proc 'url-http-idle-sentinel)
+  (puthash (cons host port)
+	      (cons proc (gethash (cons host port) url-http-open-connections))
+	      url-http-open-connections)
+  nil)
+
+(defun url-http-find-free-connection (host port)
+  (let ((conns (gethash (cons host port) url-http-open-connections))
+	(found nil))
+    (while (and conns (not found))
+      (if (not (memq (process-status (car conns)) '(run open)))
+	  (progn
+	    (url-http-debug "Cleaning up dead process: %s:%d %S"
+			    host port (car conns))
+	    (url-http-idle-sentinel (car conns) nil))
+	(setq found (car conns))
+	(url-http-debug "Found existing connection: %s:%d %S" host port found))
+      (pop conns))
+    (if found
+	(url-http-debug "Reusing existing connection: %s:%d" host port)
+      (url-http-debug "Contacting host: %s:%d" host port))
+    (url-lazy-message "Contacting host: %s:%d" host port)
+    (url-http-mark-connection-as-busy host port
+				      (or found
+					  (url-open-stream host nil host
+							   port)))))
+
+;; Building an HTTP request
+(defun url-http-user-agent-string ()
+  (if (or (eq url-privacy-level 'paranoid)
+	  (and (listp url-privacy-level)
+	       (memq 'agent url-privacy-level)))
+      ""
+    (format "User-Agent: %sURL/%s%s\r\n"
+	    (if url-package-name
+		(concat url-package-name "/" url-package-version " ")
+	      "")
+	    url-version
+	    (cond
+	     ((and url-os-type url-system-type)
+	      (concat " (" url-os-type "; " url-system-type ")"))
+	     ((or url-os-type url-system-type)
+	      (concat " (" (or url-system-type url-os-type) ")"))
+	     (t "")))))
+
+(defun url-http-create-request (url &optional ref-url)
+  "Create an HTTP request for URL, referred to by REF-URL."
+  (declare (special proxy-object proxy-info))
+  (let* ((extra-headers)
+	 (request nil)
+	 (no-cache (cdr-safe (assoc "Pragma" url-request-extra-headers)))
+	 (proxy-obj (and (boundp 'proxy-object) proxy-object))
+	 (proxy-auth (if (or (cdr-safe (assoc "Proxy-Authorization"
+					      url-request-extra-headers))
+			     (not proxy-obj))
+			 nil
+		       (let ((url-basic-auth-storage
+			      'url-http-proxy-basic-auth-storage))
+			 (url-get-authentication url nil 'any nil))))
+	 (real-fname (if proxy-obj (url-recreate-url proxy-obj)
+		       (url-filename url)))
+	 (host (url-host (or proxy-obj url)))
+	 (auth (if (cdr-safe (assoc "Authorization" url-request-extra-headers))
+		   nil
+		 (url-get-authentication (or
+					  (and (boundp 'proxy-info)
+					       proxy-info)
+					  url) nil 'any nil))))
+    (if (equal "" real-fname)
+	(setq real-fname "/"))
+    (setq no-cache (and no-cache (string-match "no-cache" no-cache)))
+    (if auth
+	(setq auth (concat "Authorization: " auth "\r\n")))
+    (if proxy-auth
+	(setq proxy-auth (concat "Proxy-Authorization: " proxy-auth "\r\n")))
+
+    ;; Protection against stupid values in the referer
+    (if (and ref-url (stringp ref-url) (or (string= ref-url "file:nil")
+					   (string= ref-url "")))
+	(setq ref-url nil))
+
+    ;; We do not want to expose the referer if the user is paranoid.
+    (if (or (memq url-privacy-level '(low high paranoid))
+	    (and (listp url-privacy-level)
+		 (memq 'lastloc url-privacy-level)))
+	(setq ref-url nil))
+
+    ;; url-request-extra-headers contains an assoc-list of
+    ;; header/value pairs that we need to put into the request.
+    (setq extra-headers (mapconcat
+			 (lambda (x)
+			   (concat (car x) ": " (cdr x)))
+			 url-request-extra-headers "\r\n"))
+    (if (not (equal extra-headers ""))
+	(setq extra-headers (concat extra-headers "\r\n")))
+
+    ;; This was done with a call to `format'.  Concatting parts has
+    ;; the advantage of keeping the parts of each header togther and
+    ;; allows us to elide null lines directly, at the cost of making
+    ;; the layout less clear.
+    (setq request
+	  (concat
+	   ;; The request
+	   (or url-request-method "GET") " " real-fname " HTTP/" url-http-version "\r\n"
+	   ;; Version of MIME we speak
+	   "MIME-Version: 1.0\r\n"
+	   ;; (maybe) Try to keep the connection open
+	   "Connection: " (if (or proxy-obj
+				  (not url-http-attempt-keepalives))
+			      "close" "keep-alive") "\r\n"
+	   ;; HTTP extensions we support
+	   (if url-extensions-header
+	       (format
+		"Extension: %s\r\n" url-extensions-header))
+	   ;; Who we want to talk to
+	   (if (/= (url-port (or proxy-obj url))
+		   (url-scheme-get-property
+		    (url-type (or proxy-obj url)) 'default-port))
+	       (format
+		"Host: %s:%d\r\n" host (url-port (or proxy-obj url)))
+	     (format "Host: %s\r\n" host))
+	   ;; Who its from
+	   (if url-personal-mail-address
+	       (concat
+		"From: " url-personal-mail-address "\r\n"))
+	   ;; Encodings we understand
+	   (if url-mime-encoding-string
+	       (concat
+		"Accept-encoding: " url-mime-encoding-string "\r\n"))
+	   (if url-mime-charset-string
+	       (concat
+		"Accept-charset: " url-mime-charset-string "\r\n"))
+	   ;; Languages we understand
+	   (if url-mime-language-string
+	       (concat
+		"Accept-language: " url-mime-language-string "\r\n"))
+	   ;; Types we understand
+	   "Accept: " (or url-mime-accept-string "*/*") "\r\n"
+	   ;; User agent
+	   (url-http-user-agent-string)
+	   ;; Proxy Authorization
+	   proxy-auth
+	   ;; Authorization
+	   auth
+	   ;; Cookies
+	   (url-cookie-generate-header-lines host real-fname
+					     (equal "https" (url-type url)))
+	   ;; If-modified-since
+	   (if (and (not no-cache)
+		    (member url-request-method '("GET" nil)))
+	       (let ((tm (url-is-cached (or proxy-obj url))))
+		 (if tm
+		     (concat "If-modified-since: "
+			     (url-get-normalized-date tm) "\r\n"))))
+	   ;; Whence we came
+	   (if ref-url (concat
+			"Referer: " ref-url "\r\n"))
+	   extra-headers
+	   ;; Any data
+	   (if url-request-data
+	       (concat
+		"Content-length: " (number-to-string
+				    (length url-request-data))
+		"\r\n\r\n"
+		url-request-data))
+	   ;; End request
+	   "\r\n"))
+    (url-http-debug "Request is: \n%s" request)
+    request))
+
+;; Parsing routines
+(defun url-http-clean-headers ()
+  "Remove trailing \r from header lines.
+This allows us to use `mail-fetch-field', etc."
+  (declare (special url-http-end-of-headers))
+  (goto-char (point-min))
+  (while (re-search-forward "\r$" url-http-end-of-headers t)
+    (replace-match "")))
+
+(defun url-http-handle-authentication (proxy)
+  (declare (special status success url-http-method url-http-data
+		    url-callback-function url-callback-arguments))
+  (url-http-debug "Handling %s authentication" (if proxy "proxy" "normal"))
+  (let ((auth (or (mail-fetch-field (if proxy "proxy-authenticate" "www-authenticate"))
+		  "basic"))
+	(type nil)
+	(url (url-recreate-url url-current-object))
+	(url-basic-auth-storage 'url-http-real-basic-auth-storage)
+	)
+
+    ;; Cheating, but who cares? :)
+    (if proxy
+	(setq url-basic-auth-storage 'url-http-proxy-basic-auth-storage))
+
+    (setq auth (url-eat-trailing-space (url-strip-leading-spaces auth)))
+    (if (string-match "[ \t]" auth)
+	(setq type (downcase (substring auth 0 (match-beginning 0))))
+      (setq type (downcase auth)))
+
+    (if (not (url-auth-registered type))
+	(progn
+	  (widen)
+	  (goto-char (point-max))
+	  (insert "<hr>Sorry, but I do not know how to handle " type
+		  " authentication.  If you'd like to write it,"
+		  " send it to " url-bug-address ".<hr>")
+	  (setq status t))
+      (let* ((args auth)
+	     (ctr (1- (length args)))
+	     auth)
+	(while (/= 0 ctr)
+	  (if (char-equal ?, (aref args ctr))
+	      (aset args ctr ?\;))
+	  (setq ctr (1- ctr)))
+	(setq args (url-parse-args args)
+	      auth (url-get-authentication url (cdr-safe (assoc "realm" args))
+					   type t args))
+	(if (not auth)
+	    (setq success t)
+	  (push (cons (if proxy "Proxy-Authorization" "Authorization") auth)
+		url-http-extra-headers)
+	  (let ((url-request-method url-http-method)
+		(url-request-data url-http-data)
+		(url-request-extra-headers url-http-extra-headers))
+	    (url-retrieve url url-callback-function url-callback-arguments))))
+      (kill-buffer (current-buffer)))))
+
+(defun url-http-parse-response ()
+  "Parse just the response code."
+  (declare (special url-http-end-of-headers url-http-response-status))
+  (if (not url-http-end-of-headers)
+      (error "Trying to parse HTTP response code in odd buffer: %s" (buffer-name)))
+  (url-http-debug "url-http-parse-response called in (%s)" (buffer-name))
+  (goto-char (point-min))
+  (skip-chars-forward " \t\n")		; Skip any blank crap
+  (skip-chars-forward "HTTP/")		; Skip HTTP Version
+  (read (current-buffer))
+  (setq url-http-response-status (read (current-buffer))))
+
+(defun url-http-handle-cookies ()
+  "Handle all set-cookie / set-cookie2 headers in an HTTP response.
+The buffer must already be narrowed to the headers, so mail-fetch-field will
+work correctly."
+  (let ((cookies (mail-fetch-field "Set-Cookie" nil nil t))
+	(cookies2 (mail-fetch-field "Set-Cookie2" nil nil t)))
+    (and cookies (url-http-debug "Found %d Set-Cookie headers" (length cookies)))
+    (and cookies2 (url-http-debug "Found %d Set-Cookie2 headers" (length cookies2)))
+    (while cookies
+      (url-cookie-handle-set-cookie (pop cookies)))
+;;;     (while cookies2
+;;;       (url-cookie-handle-set-cookie2 (pop cookies)))
+    )
+  )
+
+(defun url-http-parse-headers ()
+ "Parse and handle HTTP specific headers.
+Return t if and only if the current buffer is still active and
+should be shown to the user."
+  ;; The comments after each status code handled are taken from RFC
+  ;; 2616 (HTTP/1.1)
+  (declare (special url-http-end-of-headers url-http-response-status
+		    url-http-method url-http-data url-http-process
+		    url-callback-function url-callback-arguments))
+
+  (url-http-mark-connection-as-free (url-host url-current-object)
+				    (url-port url-current-object)
+				    url-http-process)
+
+  (if (or (not (boundp 'url-http-end-of-headers))
+	  (not url-http-end-of-headers))
+      (error "Trying to parse headers in odd buffer: %s" (buffer-name)))
+  (goto-char (point-min))
+  (url-http-debug "url-http-parse-headers called in (%s)" (buffer-name))
+  (url-http-parse-response)
+  (mail-narrow-to-head)
+  ;;(narrow-to-region (point-min) url-http-end-of-headers)
+  (let ((version nil)
+	(class nil)
+	(success nil))
+    (setq class (/ url-http-response-status 100))
+    (url-http-debug "Parsed HTTP headers: class=%d status=%d" class url-http-response-status)
+    (url-http-handle-cookies)
+
+    (case class
+      ;; Classes of response codes
+      ;;
+      ;; 5xx = Server Error
+      ;; 4xx = Client Error
+      ;; 3xx = Redirection
+      ;; 2xx = Successful
+      ;; 1xx = Informational
+      (1				; Information messages
+       ;; 100 = Continue with request
+       ;; 101 = Switching protocols
+       ;; 102 = Processing (Added by DAV)
+       (url-mark-buffer-as-dead (current-buffer))
+       (error "HTTP responses in class 1xx not supported (%d)" url-http-response-status))
+      (2				; Success
+       ;; 200 Ok
+       ;; 201 Created
+       ;; 202 Accepted
+       ;; 203 Non-authoritative information
+       ;; 204 No content
+       ;; 205 Reset content
+       ;; 206 Partial content
+       ;; 207 Multi-status (Added by DAV)
+       (case url-http-response-status
+	 ((204 205)
+	  ;; No new data, just stay at the same document
+	  (url-mark-buffer-as-dead (current-buffer))
+	  (setq success t))
+	 (otherwise
+	  ;; Generic success for all others.  Store in the cache, and
+	  ;; mark it as successful.
+	  (widen)
+	  (if (equal url-http-method "GET")
+	      (url-store-in-cache (current-buffer)))
+	  (setq success t))))
+      (3				; Redirection
+       ;; 300 Multiple choices
+       ;; 301 Moved permanently
+       ;; 302 Found
+       ;; 303 See other
+       ;; 304 Not modified
+       ;; 305 Use proxy
+       ;; 307 Temporary redirect
+       (let ((redirect-uri (or (mail-fetch-field "Location")
+			       (mail-fetch-field "URI"))))
+	 (case url-http-response-status
+	   (300
+	    ;; Quoth the spec (section 10.3.1)
+	    ;; -------------------------------
+	    ;; The requested resource corresponds to any one of a set of
+	    ;; representations, each with its own specific location and
+	    ;; agent-driven negotiation information is being provided so
+	    ;; that the user can select a preferred representation and
+	    ;; redirect its request to that location.
+	    ;; [...]
+	    ;; If the server has a preferred choice of representation, it
+	    ;; SHOULD include the specific URI for that representation in
+	    ;; the Location field; user agents MAY use the Location field
+	    ;; value for automatic redirection.
+	    ;; -------------------------------
+	    ;; We do not support agent-driven negotiation, so we just
+	    ;; redirect to the preferred URI if one is provided.
+	    nil)
+	   ((301 302 307)
+	    ;; If the 301|302 status code is received in response to a
+	    ;; request other than GET or HEAD, the user agent MUST NOT
+	    ;; automatically redirect the request unless it can be
+	    ;; confirmed by the user, since this might change the
+	    ;; conditions under which the request was issued.
+	    (if (member url-http-method '("HEAD" "GET"))
+		;; Automatic redirection is ok
+		nil
+	      ;; It is just too big of a pain in the ass to get this
+	      ;; prompt all the time.  We will just silently lose our
+	      ;; data and convert to a GET method.
+	      (url-http-debug "Converting `%s' request to `GET' because of REDIRECT(%d)"
+			      url-http-method url-http-response-status)
+	      (setq url-http-method "GET"
+		    url-request-data nil)))
+	   (303
+	    ;; The response to the request can be found under a different
+	    ;; URI and SHOULD be retrieved using a GET method on that
+	    ;; resource.
+	    (setq url-http-method "GET"
+		  url-http-data nil))
+	   (304
+	    ;; The 304 response MUST NOT contain a message-body.
+	    (url-http-debug "Extracting document from cache... (%s)"
+			    (url-cache-create-filename (url-view-url t)))
+	    (url-cache-extract (url-cache-create-filename (url-view-url t)))
+	    (setq redirect-uri nil
+		  success t))
+	   (305
+	    ;; The requested resource MUST be accessed through the
+	    ;; proxy given by the Location field.  The Location field
+	    ;; gives the URI of the proxy.  The recipient is expected
+	    ;; to repeat this single request via the proxy.  305
+	    ;; responses MUST only be generated by origin servers.
+	    (error "Redirection thru a proxy server not supported: %s"
+		   redirect-uri))
+	   (otherwise
+	    ;; Treat everything like '300'
+	    nil))
+	 (when redirect-uri
+	   ;; Clean off any whitespace and/or <...> cruft.
+	   (if (string-match "\\([^ \t]+\\)[ \t]" redirect-uri)
+	       (setq redirect-uri (match-string 1 redirect-uri)))
+	   (if (string-match "^<\\(.*\\)>$" redirect-uri)
+	       (setq redirect-uri (match-string 1 redirect-uri)))
+
+	   ;; Some stupid sites (like sourceforge) send a
+	   ;; non-fully-qualified URL (ie: /), which royally confuses
+	   ;; the URL library.
+	   (if (not (string-match url-nonrelative-link redirect-uri))
+	       (setq redirect-uri (url-expand-file-name redirect-uri)))
+	   (let ((url-request-method url-http-method)
+		 (url-request-data url-http-data)
+		 (url-request-extra-headers url-http-extra-headers))
+	     (url-retrieve redirect-uri url-callback-function
+			   url-callback-arguments)
+	     (url-mark-buffer-as-dead (current-buffer))))))
+      (4				; Client error
+       ;; 400 Bad Request
+       ;; 401 Unauthorized
+       ;; 402 Payment required
+       ;; 403 Forbidden
+       ;; 404 Not found
+       ;; 405 Method not allowed
+       ;; 406 Not acceptable
+       ;; 407 Proxy authentication required
+       ;; 408 Request time-out
+       ;; 409 Conflict
+       ;; 410 Gone
+       ;; 411 Length required
+       ;; 412 Precondition failed
+       ;; 413 Request entity too large
+       ;; 414 Request-URI too large
+       ;; 415 Unsupported media type
+       ;; 416 Requested range not satisfiable
+       ;; 417 Expectation failed
+       ;; 422 Unprocessable Entity (Added by DAV)
+       ;; 423 Locked
+       ;; 424 Failed Dependency
+       (case url-http-response-status
+	 (401
+	  ;; The request requires user authentication.  The response
+	  ;; MUST include a WWW-Authenticate header field containing a
+	  ;; challenge applicable to the requested resource.  The
+	  ;; client MAY repeat the request with a suitable
+	  ;; Authorization header field.
+	  (url-http-handle-authentication nil))
+	 (402
+	  ;; This code is reserved for future use
+	  (url-mark-buffer-as-dead (current-buffer))
+	  (error "Somebody wants you to give them money"))
+	 (403
+	  ;; The server understood the request, but is refusing to
+	  ;; fulfill it.  Authorization will not help and the request
+	  ;; SHOULD NOT be repeated.
+	  (setq success t))
+	 (404
+	  ;; Not found
+	  (setq success t))
+	 (405
+	  ;; The method specified in the Request-Line is not allowed
+	  ;; for the resource identified by the Request-URI.  The
+	  ;; response MUST include an Allow header containing a list of
+	  ;; valid methods for the requested resource.
+	  (setq success t))
+	 (406
+	  ;; The resource identified by the request is only capable of
+	  ;; generating response entities which have content
+	  ;; characteristics nota cceptable according to the accept
+	  ;; headers sent in the request.
+	  (setq success t))
+	 (407
+	  ;; This code is similar to 401 (Unauthorized), but indicates
+	  ;; that the client must first authenticate itself with the
+	  ;; proxy.  The proxy MUST return a Proxy-Authenticate header
+	  ;; field containing a challenge applicable to the proxy for
+	  ;; the requested resource.
+	  (url-http-handle-authentication t))
+	 (408
+	  ;; The client did not produce a request within the time that
+	  ;; the server was prepared to wait.  The client MAY repeat
+	  ;; the request without modifications at any later time.
+	  (setq success t))
+	 (409
+	  ;; The request could not be completed due to a conflict with
+	  ;; the current state of the resource.  This code is only
+	  ;; allowed in situations where it is expected that the user
+	  ;; mioght be able to resolve the conflict and resubmit the
+	  ;; request.  The response body SHOULD include enough
+	  ;; information for the user to recognize the source of the
+	  ;; conflict.
+	  (setq success t))
+	 (410
+	  ;; The requested resource is no longer available at the
+	  ;; server and no forwarding address is known.
+	  (setq success t))
+	 (411
+	  ;; The server refuses to accept the request without a defined
+	  ;; Content-Length.  The client MAY repeat the request if it
+	  ;; adds a valid Content-Length header field containing the
+	  ;; length of the message-body in the request message.
+	  ;;
+	  ;; NOTE - this will never happen because
+	  ;; `url-http-create-request' automatically calculates the
+	  ;; content-length.
+	  (setq success t))
+	 (412
+	  ;; The precondition given in one or more of the
+	  ;; request-header fields evaluated to false when it was
+	  ;; tested on the server.
+	  (setq success t))
+	 ((413 414)
+	  ;; The server is refusing to process a request because the
+	  ;; request entity|URI is larger than the server is willing or
+	  ;; able to process.
+	  (setq success t))
+	 (415
+	  ;; The server is refusing to service the request because the
+	  ;; entity of the request is in a format not supported by the
+	  ;; requested resource for the requested method.
+	  (setq success t))
+	 (416
+	  ;; A server SHOULD return a response with this status code if
+	  ;; a request included a Range request-header field, and none
+	  ;; of the range-specifier values in this field overlap the
+	  ;; current extent of the selected resource, and the request
+	  ;; did not include an If-Range request-header field.
+	  (setq success t))
+	 (417
+	  ;; The expectation given in an Expect request-header field
+	  ;; could not be met by this server, or, if the server is a
+	  ;; proxy, the server has unambiguous evidence that the
+	  ;; request could not be met by the next-hop server.
+	  (setq success t))
+	 (otherwise
+	  ;; The request could not be understood by the server due to
+	  ;; malformed syntax.  The client SHOULD NOT repeat the
+	  ;; request without modifications.
+	  (setq success t))))
+      (5
+       ;; 500 Internal server error
+       ;; 501 Not implemented
+       ;; 502 Bad gateway
+       ;; 503 Service unavailable
+       ;; 504 Gateway time-out
+       ;; 505 HTTP version not supported
+       ;; 507 Insufficient storage
+       (setq success t)
+       (case url-http-response-status
+	 (501
+	  ;; The server does not support the functionality required to
+	  ;; fulfill the request.
+	  nil)
+	 (502
+	  ;; The server, while acting as a gateway or proxy, received
+	  ;; an invalid response from the upstream server it accessed
+	  ;; in attempting to fulfill the request.
+	  nil)
+	 (503
+	  ;; The server is currently unable to handle the request due
+	  ;; to a temporary overloading or maintenance of the server.
+	  ;; The implication is that this is a temporary condition
+	  ;; which will be alleviated after some delay.  If known, the
+	  ;; length of the delay MAY be indicated in a Retry-After
+	  ;; header.  If no Retry-After is given, the client SHOULD
+	  ;; handle the response as it would for a 500 response.
+	  nil)
+	 (504
+	  ;; The server, while acting as a gateway or proxy, did not
+	  ;; receive a timely response from the upstream server
+	  ;; specified by the URI (e.g. HTTP, FTP, LDAP) or some other
+	  ;; auxiliary server (e.g. DNS) it needed to access in
+	  ;; attempting to complete the request.
+	  nil)
+	 (505
+	  ;; The server does not support, or refuses to support, the
+	  ;; HTTP protocol version that was used in the request
+	  ;; message.
+	  nil)
+	 (507				; DAV
+	  ;; The method could not be performed on the resource
+	  ;; because the server is unable to store the representation
+	  ;; needed to successfully complete the request.  This
+	  ;; condition is considered to be temporary.  If the request
+	  ;; which received this status code was the result of a user
+	  ;; action, the request MUST NOT be repeated until it is
+	  ;; requested by a separate user action.
+	  nil)))
+      (otherwise
+       (error "Unknown class of HTTP response code: %d (%d)"
+	      class url-http-response-status)))
+    (if (not success)
+	(url-mark-buffer-as-dead (current-buffer)))
+    (url-http-debug "Finished parsing HTTP headers: %S" success)
+    (widen)
+    success))
+
+;; Miscellaneous
+(defun url-http-activate-callback ()
+  "Activate callback specified when this buffer was created."
+  (declare (special url-http-process
+		    url-callback-function
+		    url-callback-arguments))
+  (url-http-mark-connection-as-free (url-host url-current-object)
+				    (url-port url-current-object)
+				    url-http-process)
+  (url-http-debug "Activating callback in buffer (%s)" (buffer-name))
+  (apply url-callback-function url-callback-arguments))
+
+;; )
+
+;; These unfortunately cannot be macros... please ignore them!
+(defun url-http-idle-sentinel (proc why)
+  "Remove this (now defunct) process PROC from the list of open connections."
+  (maphash (lambda (key val)
+		(if (memq proc val)
+		    (puthash key (delq proc val) url-http-open-connections)))
+	      url-http-open-connections))
+
+(defun url-http-end-of-document-sentinel (proc why)
+  ;; Sentinel used for old HTTP/0.9 or connections we know are going
+  ;; to die as the 'end of document' notifier.
+  (url-http-debug "url-http-end-of-document-sentinel in buffer (%s)"
+		  (process-buffer proc))
+  (url-http-idle-sentinel proc why)
+  (save-excursion
+    (set-buffer (process-buffer proc))
+    (goto-char (point-min))
+    (if (not (looking-at "HTTP/"))
+	;; HTTP/0.9 just gets passed back no matter what
+	(url-http-activate-callback)
+      (if (url-http-parse-headers)
+	  (url-http-activate-callback)))))
+
+(defun url-http-simple-after-change-function (st nd length)
+  ;; Function used when we do NOT know how long the document is going to be
+  ;; Just _very_ simple 'downloaded %d' type of info.
+  (declare (special url-http-end-of-headers))
+  (url-lazy-message "Reading %s..." (url-pretty-length nd)))
+
+(defun url-http-content-length-after-change-function (st nd length)
+  "Function used when we DO know how long the document is going to be.
+More sophisticated percentage downloaded, etc.
+Also does minimal parsing of HTTP headers and will actually cause
+the callback to be triggered."
+  (declare (special url-current-object
+		    url-http-end-of-headers
+		    url-http-content-length
+		    url-http-content-type
+		    url-http-process))
+  (if url-http-content-type
+      (url-display-percentage
+       "Reading [%s]... %s of %s (%d%%)"
+       (url-percentage (- nd url-http-end-of-headers)
+		       url-http-content-length)
+       url-http-content-type
+       (url-pretty-length (- nd url-http-end-of-headers))
+       (url-pretty-length url-http-content-length)
+       (url-percentage (- nd url-http-end-of-headers)
+		       url-http-content-length))
+    (url-display-percentage
+     "Reading... %s of %s (%d%%)"
+     (url-percentage (- nd url-http-end-of-headers)
+		     url-http-content-length)
+     (url-pretty-length (- nd url-http-end-of-headers))
+     (url-pretty-length url-http-content-length)
+     (url-percentage (- nd url-http-end-of-headers)
+		     url-http-content-length)))
+
+  (if (> (- nd url-http-end-of-headers) url-http-content-length)
+      (progn
+	;; Found the end of the document!  Wheee!
+	(url-display-percentage nil nil)
+	(message "Reading... done.")
+	(if (url-http-parse-headers)
+	    (url-http-activate-callback)))))
+
+(defun url-http-chunked-encoding-after-change-function (st nd length)
+  "Function used when dealing with 'chunked' encoding.
+Cannot give a sophisticated percentage, but we need a different
+function to look for the special 0-length chunk that signifies
+the end of the document."
+  (declare (special url-current-object
+		    url-http-end-of-headers
+		    url-http-content-type
+		    url-http-chunked-length
+		    url-http-chunked-counter
+		    url-http-process url-http-chunked-start))
+  (save-excursion
+    (goto-char st)
+    (let ((read-next-chunk t)
+	  (case-fold-search t)
+	  (regexp nil)
+	  (no-initial-crlf nil))
+      ;; We need to loop thru looking for more chunks even within
+      ;; one after-change-function call.
+      (while read-next-chunk
+	(setq no-initial-crlf (= 0 url-http-chunked-counter))
+	(if url-http-content-type
+	    (url-display-percentage nil
+	     "Reading [%s]... chunk #%d"
+	     url-http-content-type url-http-chunked-counter)
+	  (url-display-percentage nil
+	   "Reading... chunk #%d"
+	   url-http-chunked-counter))
+	(url-http-debug "Reading chunk %d (%d %d %d)"
+			url-http-chunked-counter st nd length)
+	(setq regexp (if no-initial-crlf
+			 "\\([0-9a-z]+\\).*\r?\n"
+		       "\r?\n\\([0-9a-z]+\\).*\r?\n"))
+
+	(if url-http-chunked-start
+	    ;; We know how long the chunk is supposed to be, skip over
+	    ;; leading crap if possible.
+	    (if (> nd (+ url-http-chunked-start url-http-chunked-length))
+		(progn
+		  (url-http-debug "Got to the end of chunk #%d!"
+				  url-http-chunked-counter)
+		  (goto-char (+ url-http-chunked-start
+				url-http-chunked-length)))
+	      (url-http-debug "Still need %d bytes to hit end of chunk"
+			      (- (+ url-http-chunked-start
+				    url-http-chunked-length)
+				 nd))
+	      (setq read-next-chunk nil)))
+	(if (not read-next-chunk)
+	    (url-http-debug "Still spinning for next chunk...")
+	  (if no-initial-crlf (skip-chars-forward "\r\n"))
+	  (if (not (looking-at regexp))
+	      (progn
+	   ;; Must not have received the entirety of the chunk header,
+		;; need to spin some more.
+		(url-http-debug "Did not see start of chunk @ %d!" (point))
+		(setq read-next-chunk nil))
+ 	    (add-text-properties (match-beginning 0) (match-end 0)
+				 (list 'start-open t
+				       'end-open t
+				       'chunked-encoding t
+				       'face (if (featurep 'xemacs)
+						 'text-cursor
+					       'cursor)
+				       'invisible t))
+	    (setq url-http-chunked-length (string-to-int (buffer-substring
+							  (match-beginning 1)
+							  (match-end 1))
+							 16)
+		  url-http-chunked-counter (1+ url-http-chunked-counter)
+		  url-http-chunked-start (set-marker
+					  (or url-http-chunked-start
+					      (make-marker))
+					  (match-end 0)))
+;	    (if (not url-http-debug)
+		(delete-region (match-beginning 0) (match-end 0));)
+	    (url-http-debug "Saw start of chunk %d (length=%d, start=%d"
+			    url-http-chunked-counter url-http-chunked-length
+			    (marker-position url-http-chunked-start))
+	    (if (= 0 url-http-chunked-length)
+		(progn
+		  ;; Found the end of the document!  Wheee!
+		  (url-http-debug "Saw end of stream chunk!")
+		  (setq read-next-chunk nil)
+		  (url-display-percentage nil nil)
+		  (goto-char (match-end 1))
+		  (if (re-search-forward "^\r*$" nil t)
+		      (message "Saw end of trailers..."))
+		  (if (url-http-parse-headers)
+		      (url-http-activate-callback))))))))))
+
+(defun url-http-wait-for-headers-change-function (st nd length)
+  ;; This will wait for the headers to arrive and then splice in the
+  ;; next appropriate after-change-function, etc.
+  (declare (special url-current-object
+		    url-http-end-of-headers
+		    url-http-content-type
+		    url-http-content-length
+		    url-http-transfer-encoding
+		    url-callback-function
+		    url-callback-arguments
+		    url-http-process
+		    url-http-method
+		    url-http-after-change-function
+		    url-http-response-status))
+  (url-http-debug "url-http-wait-for-headers-change-function (%s)"
+		  (buffer-name))
+  (if (not (bobp))
+      (let ((end-of-headers nil)
+	    (old-http nil)
+	    (content-length nil))
+	(goto-char (point-min))
+	(if (not (looking-at "^HTTP/[1-9]\\.[0-9]"))
+	    ;; Not HTTP/x.y data, must be 0.9
+	    ;; God, I wish this could die.
+	    (setq end-of-headers t
+		  url-http-end-of-headers 0
+		  old-http t)
+	  (if (re-search-forward "^\r*$" nil t)
+	      ;; Saw the end of the headers
+	      (progn
+		(url-http-debug "Saw end of headers... (%s)" (buffer-name))
+		(setq url-http-end-of-headers (set-marker (make-marker)
+							  (point))
+		      end-of-headers t)
+		(url-http-clean-headers))))
+
+	(if (not end-of-headers)
+	    ;; Haven't seen the end of the headers yet, need to wait
+	    ;; for more data to arrive.
+	    nil
+	  (if old-http
+	      (message "HTTP/0.9 How I hate thee!")
+	    (progn
+	      (url-http-parse-response)
+	      (mail-narrow-to-head)
+	      ;;(narrow-to-region (point-min) url-http-end-of-headers)
+	      (setq url-http-transfer-encoding (mail-fetch-field
+						"transfer-encoding")
+		    url-http-content-type (mail-fetch-field "content-type"))
+	      (if (mail-fetch-field "content-length")
+		  (setq url-http-content-length
+			(string-to-int (mail-fetch-field "content-length"))))
+	      (widen)))
+	  (if url-http-transfer-encoding
+	      (setq url-http-transfer-encoding
+		    (downcase url-http-transfer-encoding)))
+
+	  (cond
+	   ((or (= url-http-response-status 204)
+		(= url-http-response-status 205))
+	    (url-http-debug "%d response must have headers only (%s)."
+			    url-http-response-status (buffer-name))
+	    (if (url-http-parse-headers)
+		(url-http-activate-callback)))
+	   ((string= "HEAD" url-http-method)
+	    ;; A HEAD request is _ALWAYS_ terminated by the header
+	    ;; information, regardless of any entity headers,
+	    ;; according to section 4.4 of the HTTP/1.1 draft.
+	    (url-http-debug "HEAD request must have headers only (%s)."
+			    (buffer-name))
+	    (if (url-http-parse-headers)
+		(url-http-activate-callback)))
+	   ((string= "CONNECT" url-http-method)
+	    ;; A CONNECT request is finished, but we cannot stick this
+	    ;; back on the free connectin list
+	    (url-http-debug "CONNECT request must have headers only.")
+	    (if (url-http-parse-headers)
+		(url-http-activate-callback)))
+	   ((equal url-http-response-status 304)
+	    ;; Only allowed to have a header section.  We have to handle
+	    ;; this here instead of in url-http-parse-headers because if
+	    ;; you have a cached copy of something without a known
+	    ;; content-length, and try to retrieve it from the cache, we'd
+	    ;; fall into the 'being dumb' section and wait for the
+	    ;; connection to terminate, which means we'd wait for 10
+	    ;; seconds for the keep-alives to time out on some servers.
+	    (if (url-http-parse-headers)
+		(url-http-activate-callback)))
+	   (old-http
+	    ;; HTTP/0.9 always signaled end-of-connection by closing the
+	    ;; connection.
+	    (url-http-debug
+	     "Saw HTTP/0.9 response, connection closed means end of document.")
+	    (setq url-http-after-change-function
+		  'url-http-simple-after-change-function))
+	   ((equal url-http-transfer-encoding "chunked")
+	    (url-http-debug "Saw chunked encoding.")
+	    (setq url-http-after-change-function
+		  'url-http-chunked-encoding-after-change-function)
+	    (if (> nd url-http-end-of-headers)
+		(progn
+		  (url-http-debug
+		   "Calling initial chunked-encoding for extra data at end of headers")
+		  (url-http-chunked-encoding-after-change-function
+		   (marker-position url-http-end-of-headers) nd
+		   (- nd url-http-end-of-headers)))))
+	   ((integerp url-http-content-length)
+	    (url-http-debug
+	     "Got a content-length, being smart about document end.")
+	    (setq url-http-after-change-function
+		  'url-http-content-length-after-change-function)
+	    (cond
+	     ((= 0 url-http-content-length)
+	      ;; We got a NULL body!  Activate the callback
+	      ;; immediately!
+	      (url-http-debug
+	       "Got 0-length content-length, activating callback immediately.")
+	      (if (url-http-parse-headers)
+		  (url-http-activate-callback)))
+	     ((> nd url-http-end-of-headers)
+	      ;; Have some leftover data
+	      (url-http-debug "Calling initial content-length for extra data at end of headers")
+	      (url-http-content-length-after-change-function
+	       (marker-position url-http-end-of-headers)
+	       nd
+	       (- nd url-http-end-of-headers)))
+	     (t
+	      nil)))
+	   (t
+	    (url-http-debug "No content-length, being dumb.")
+	    (setq url-http-after-change-function
+		  'url-http-simple-after-change-function)))))
+    ;; We are still at the beginning of the buffer... must just be
+    ;; waiting for a response.
+    (url-http-debug "Spinning waiting for headers..."))
+  (goto-char (point-max)))
+
+;;;###autoload
+(defun url-http (url callback cbargs)
+  "Retrieve URL via HTTP asynchronously.
+URL must be a parsed URL.  See `url-generic-parse-url' for details.
+When retrieval is completed, the function CALLBACK is executed with
+CBARGS as the arguments."
+  (check-type url vector "Need a pre-parsed URL.")
+  (declare (special url-current-object
+		    url-http-end-of-headers
+		    url-http-content-type
+		    url-http-content-length
+		    url-http-transfer-encoding
+		    url-http-after-change-function
+		    url-callback-function
+		    url-callback-arguments
+		    url-http-method
+		    url-http-extra-headers
+		    url-http-data
+		    url-http-chunked-length
+		    url-http-chunked-start
+		    url-http-chunked-counter
+		    url-http-process))
+  (let ((connection (url-http-find-free-connection (url-host url)
+						   (url-port url)))
+	(buffer (generate-new-buffer (format " *http %s:%d*"
+					     (url-host url)
+					     (url-port url)))))
+    (if (not connection)
+	;; Failed to open the connection for some reason
+	(progn
+	  (kill-buffer buffer)
+	  (setq buffer nil)
+	  (error "Could not create connection to %s:%d" (url-host url)
+		 (url-port url)))
+      (save-excursion
+	(set-buffer buffer)
+	(mm-disable-multibyte)
+	(setq url-current-object url
+	      mode-line-format "%b [%s]")
+
+	(dolist (var '(url-http-end-of-headers
+		       url-http-content-type
+		       url-http-content-length
+		       url-http-transfer-encoding
+		       url-http-after-change-function
+		       url-http-response-status
+		       url-http-chunked-length
+		       url-http-chunked-counter
+		       url-http-chunked-start
+		       url-callback-function
+		       url-callback-arguments
+		       url-http-process
+		       url-http-method
+		       url-http-extra-headers
+		       url-http-data))
+	  (set (make-local-variable var) nil))
+
+	(setq url-http-method (or url-request-method "GET")
+	      url-http-extra-headers url-request-extra-headers
+	      url-http-data url-request-data
+	      url-http-process connection
+	      url-http-chunked-length nil
+	      url-http-chunked-start nil
+	      url-http-chunked-counter 0
+	      url-callback-function callback
+	      url-callback-arguments cbargs
+	      url-http-after-change-function 'url-http-wait-for-headers-change-function)
+
+	(set-process-buffer connection buffer)
+	(set-process-sentinel connection 'url-http-end-of-document-sentinel)
+	(set-process-filter connection 'url-http-generic-filter)
+	(process-send-string connection (url-http-create-request url))))
+    buffer))
+
+;; Since Emacs 19/20 does not allow you to change the
+;; `after-change-functions' hook in the midst of running them, we fake
+;; an after change by hooking into the process filter and inserting
+;; the data ourselves.  This is slightly less efficient, but there
+;; were tons of weird ways the after-change code was biting us in the
+;; shorts.
+(defun url-http-generic-filter (proc data)
+  ;; Sometimes we get a zero-length data chunk after the process has
+  ;; been changed to 'free', which means it has no buffer associated
+  ;; with it.  Do nothing if there is no buffer, or 0 length data.
+  (declare (special url-http-after-change-function))
+  (and (process-buffer proc)
+       (/= (length data) 0)
+       (save-excursion
+	 (set-buffer (process-buffer proc))
+	 (url-http-debug "Calling after change function `%s' for `%S'" url-http-after-change-function proc)
+	 (funcall url-http-after-change-function
+		  (point-max)
+		  (progn
+		    (goto-char (point-max))
+		    (insert data)
+		    (point-max))
+		  (length data)))))
+
+;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
+;;; file-name-handler stuff from here on out
+;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
+(if (not (fboundp 'symbol-value-in-buffer))
+    (defun url-http-symbol-value-in-buffer (symbol buffer
+						   &optional unbound-value)
+      "Return the value of SYMBOL in BUFFER, or UNBOUND-VALUE if it is unbound."
+      (save-excursion
+	(set-buffer buffer)
+	(if (not (boundp symbol))
+	    unbound-value
+	  (symbol-value symbol))))
+  (defalias 'url-http-symbol-value-in-buffer 'symbol-value-in-buffer))
+
+(defun url-http-head (url)
+  (let ((url-request-method "HEAD")
+	(url-request-data nil))
+    (url-retrieve-synchronously url)))
+
+;;;###autoload
+(defun url-http-file-exists-p (url)
+  (let ((version nil)
+	(status nil)
+	(exists nil)
+	(buffer (url-http-head url)))
+    (if (not buffer)
+	(setq exists nil)
+      (setq status (url-http-symbol-value-in-buffer 'url-http-response-status
+						    buffer 500)
+	    exists (and (>= status 200) (< status 300)))
+      (kill-buffer buffer))
+    exists))
+
+;;;###autoload
+(defalias 'url-http-file-readable-p 'url-http-file-exists-p)
+
+(defun url-http-head-file-attributes (url)
+  (let ((buffer (url-http-head url))
+	(attributes nil))
+    (when buffer
+      (setq attributes (make-list 11 nil))
+      (setf (nth 1 attributes) 1)	; Number of links to file
+      (setf (nth 2 attributes) 0)	; file uid
+      (setf (nth 3 attributes) 0)	; file gid
+      (setf (nth 7 attributes)		; file size
+	    (url-http-symbol-value-in-buffer 'url-http-content-length
+					     buffer -1))
+      (setf (nth 8 attributes) (eval-when-compile (make-string 10 ?-)))
+      (kill-buffer buffer))
+    attributes))
+
+;;;###autoload
+(defun url-http-file-attributes (url)
+  (if (url-dav-supported-p url)
+      (url-dav-file-attributes url)
+    (url-http-head-file-attributes url)))
+
+;;;###autoload
+(defun url-http-options (url)
+  "Returns a property list describing options available for URL.
+This list is retrieved using the `OPTIONS' HTTP method.
+
+Property list members:
+
+methods
+  A list of symbols specifying what HTTP methods the resource
+  supports.
+
+dav
+  A list of numbers specifying what DAV protocol/schema versions are
+  supported.
+
+dasl
+  A list of supported DASL search types supported (string form)
+
+ranges
+  A list of the units available for use in partial document fetches.
+
+p3p
+  The `Platform For Privacy Protection' description for the resource.
+  Currently this is just the raw header contents.  This is likely to
+  change once P3P is formally supported by the URL package or
+  Emacs/W3.
+"
+  (let* ((url-request-method "OPTIONS")
+	 (url-request-data nil)
+	 (buffer (url-retrieve-synchronously url))
+	 (header nil)
+	 (options nil))
+    (when (and buffer (= 2 (/ (url-http-symbol-value-in-buffer
+			       'url-http-response-status buffer 0) 100)))
+      ;; Only parse the options if we got a 2xx response code!
+      (save-excursion
+	(save-restriction
+	  (save-match-data
+	    (set-buffer buffer)
+	    (mail-narrow-to-head)
+
+	    ;; Figure out what methods are supported.
+	    (when (setq header (mail-fetch-field "allow"))
+	      (setq options (plist-put
+			     options 'methods
+			     (mapcar 'intern (split-string header "[ ,]+")))))
+
+	    ;; Check for DAV
+	    (when (setq header (mail-fetch-field "dav"))
+	      (setq options (plist-put
+			     options 'dav
+			     (delq 0
+				   (mapcar 'string-to-number
+					   (split-string header "[, ]+"))))))
+
+	    ;; Now for DASL
+	    (when (setq header (mail-fetch-field "dasl"))
+	      (setq options (plist-put
+			     options 'dasl
+			     (split-string header "[, ]+"))))
+
+	    ;; P3P - should get more detailed here.  FIXME
+	    (when (setq header (mail-fetch-field "p3p"))
+	      (setq options (plist-put options 'p3p header)))
+
+	    ;; Check for whether they accept byte-range requests.
+	    (when (setq header (mail-fetch-field "accept-ranges"))
+	      (setq options (plist-put
+			     options 'ranges
+			     (delq 'none
+				   (mapcar 'intern
+					   (split-string header "[, ]+"))))))
+	    ))))
+    (if buffer (kill-buffer buffer))
+    options))
+
+(provide 'url-http)
+
+;;; url-http.el ends here
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/lisp/url/url-https.el	Sun Apr 04 01:21:46 2004 +0000
@@ -0,0 +1,53 @@
+;;; url-https.el --- HTTP over SSL routines
+;; Author: $Author: wmperry $
+;; Created: $Date: 2001/11/22 14:32:13 $
+;; Version: $Revision: 1.3 $
+;; Keywords: comm, data, processes
+
+;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
+;;; Copyright (c) 1999 Free Software Foundation, Inc.
+;;;
+;;; This file is part of GNU Emacs.
+;;;
+;;; GNU Emacs is free software; you can redistribute it and/or modify
+;;; it under the terms of the GNU General Public License as published by
+;;; the Free Software Foundation; either version 2, or (at your option)
+;;; any later version.
+;;;
+;;; GNU Emacs is distributed in the hope that it will be useful,
+;;; but WITHOUT ANY WARRANTY; without even the implied warranty of
+;;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+;;; GNU General Public License for more details.
+;;;
+;;; You should have received a copy of the GNU General Public License
+;;; along with GNU Emacs; see the file COPYING.  If not, write to the
+;;; Free Software Foundation, Inc., 59 Temple Place - Suite 330,
+;;; Boston, MA 02111-1307, USA.
+;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
+
+(require 'url-gw)
+(require 'url-util)
+(require 'url-parse)
+(require 'url-cookie)
+(require 'url-http)
+
+(defconst url-https-default-port 443 "Default HTTPS port.")
+(defconst url-https-asynchronous-p t "HTTPS retrievals are asynchronous.")
+(defalias 'url-https-expand-file-name 'url-http-expand-file-name)
+
+(defmacro url-https-create-secure-wrapper (method args)
+  (` (defun (, (intern (format (if method "url-https-%s" "url-https") method))) (, args)
+       (, (format "HTTPS wrapper around `%s' call." (or method "url-http")))
+       (condition-case ()
+	   (require 'ssl)
+	 (error
+	  (error "HTTPS support could not find `ssl' library.")))
+       (let ((url-gateway-method 'ssl))
+	 ((, (intern (format (if method "url-http-%s" "url-http") method))) (,@ (remove '&rest (remove '&optional args))))))))
+
+(url-https-create-secure-wrapper nil (url callback cbargs))
+(url-https-create-secure-wrapper file-exists-p (url))
+(url-https-create-secure-wrapper file-readable-p (url))
+(url-https-create-secure-wrapper file-attributes (url))
+
+(provide 'url-https)
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/lisp/url/url-imap.el	Sun Apr 04 01:21:46 2004 +0000
@@ -0,0 +1,81 @@
+;;; url-imap.el --- IMAP retrieval routines
+;; Author: Simon Josefsson <jas@pdc.kth.se>
+;; Created: $Date: 2002/01/22 17:52:16 $
+;; Version: $Revision: 1.4 $
+;; Keywords: comm, data, processes
+
+;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
+;;; Copyright (c) 1999 Free Software Foundation, Inc.
+;;;
+;;; This file is part of GNU Emacs.
+;;;
+;;; GNU Emacs is free software; you can redistribute it and/or modify
+;;; it under the terms of the GNU General Public License as published by
+;;; the Free Software Foundation; either version 2, or (at your option)
+;;; any later version.
+;;;
+;;; GNU Emacs is distributed in the hope that it will be useful,
+;;; but WITHOUT ANY WARRANTY; without even the implied warranty of
+;;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+;;; GNU General Public License for more details.
+;;;
+;;; You should have received a copy of the GNU General Public License
+;;; along with GNU Emacs; see the file COPYING.  If not, write to the
+;;; Free Software Foundation, Inc., 59 Temple Place - Suite 330,
+;;; Boston, MA 02111-1307, USA.
+;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
+
+; Anyway, here's a teaser. It's quite broken in lots of regards, but at
+; least it seem to work. At least a little. At least when called
+; manually like this (I've no idea how it's supposed to be called):
+
+; (url-imap (url-generic-parse-url "imap://cyrus.andrew.cmu.edu/archive.c-client;UID=1021"))
+
+(eval-when-compile (require 'cl))
+(require 'url-util)
+(require 'url-parse)
+(require 'nnimap)
+(require 'mm-util)
+
+(defconst url-imap-default-port 143 "Default IMAP port")
+
+(defun url-imap-open-host (host port user pass)
+  ;; xxx use user and password
+  (if (fboundp 'nnheader-init-server-buffer)
+      (nnheader-init-server-buffer))
+  (let ((imap-username user)
+	(imap-password pass)
+	(authenticator (if user 'login 'anonymous)))
+    (if (stringp port)
+	(setq port (string-to-int port)))
+    (nnimap-open-server host
+			`((nnimap-server-port ,port)
+			  (nnimap-stream 'network)
+			  (nnimap-authenticator ,authenticator)))))
+
+(defun url-imap (url)
+  (check-type url vector "Need a pre-parsed URL.")
+  (save-excursion
+    (set-buffer (generate-new-buffer " *url-imap*"))
+    (mm-disable-multibyte)
+    (let* ((host (url-host url))
+	   (port (url-port url))
+	   ;; xxx decode mailbox (see rfc2192)
+	   (mailbox (url-filename url))
+	   (coding-system-for-read 'binary))
+      (and (eq (string-to-char mailbox) ?/)
+	   (setq mailbox (substring mailbox 1)))
+      (url-imap-open-host host port (url-user url) (url-password url))
+      (cond ((assoc "TYPE" (url-attributes url))
+	     ;; xxx list mailboxes (start gnus?)
+	     )
+	    ((assoc "UID" (url-attributes url))
+	     ;; fetch message part
+	     ;; xxx handle partial fetches
+	     (insert "Content-type: message/rfc822\n\n")
+	     (nnimap-request-article (cdr (assoc "UID" (url-attributes url))) 
+				     mailbox host (current-buffer)))
+	    (t
+	     ;; xxx list messages in mailbox (start gnus?)
+	     )))
+    (current-buffer)))
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/lisp/url/url-irc.el	Sun Apr 04 01:21:46 2004 +0000
@@ -0,0 +1,78 @@
+;;; url-irc.el --- IRC URL interface
+;; Author: $Author: wmperry $
+;; Created: $Date: 1999/12/24 12:13:33 $
+;; Version: $Revision: 1.2 $
+;; Keywords: comm, data, processes
+
+;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
+;;; Copyright (c) 1996 by William M. Perry <wmperry@cs.indiana.edu>
+;;; Copyright (c) 1996 - 1999 Free Software Foundation, Inc.
+;;;
+;;; This file is part of GNU Emacs.
+;;;
+;;; GNU Emacs is free software; you can redistribute it and/or modify
+;;; it under the terms of the GNU General Public License as published by
+;;; the Free Software Foundation; either version 2, or (at your option)
+;;; any later version.
+;;;
+;;; GNU Emacs is distributed in the hope that it will be useful,
+;;; but WITHOUT ANY WARRANTY; without even the implied warranty of
+;;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+;;; GNU General Public License for more details.
+;;;
+;;; You should have received a copy of the GNU General Public License
+;;; along with GNU Emacs; see the file COPYING.  If not, write to the
+;;; Free Software Foundation, Inc., 59 Temple Place - Suite 330,
+;;; Boston, MA 02111-1307, USA.
+;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
+
+;;; IRC URLs are defined in http://www.w3.org/Addressing/draft-mirashi-url-irc-01.txt
+
+(require 'url-vars)
+(require 'url-parse)
+
+(defconst url-irc-default-port 6667 "Default port for IRC connections")
+
+(defcustom url-irc-function 'url-irc-zenirc
+  "*Function to actually open an IRC connection.
+Should be a function that takes several argument:
+    HOST - the hostname of the IRC server to contact
+    PORT - the port number of the IRC server to contact
+ CHANNEL - What channel on the server to visit right away (can be nil)
+    USER - What username to use
+PASSWORD - What password to use"
+  :type '(choice (const :tag "ZEN IRC" :value 'url-irc-zenirc)
+		 (function :tag "Other"))
+  :group 'url)
+
+(defun url-irc-zenirc (host port channel user password)
+  (let ((zenirc-buffer-name (if (and user host port)
+				(format "%s@%s:%d" user host port)
+			      (format "%s:%d" host port)))
+	(zenirc-server-alist
+	 (list
+	  (list host port password nil user))))
+    (zenirc)
+    (goto-char (point-max))
+    (if (not channel)
+	nil
+      (insert "/join " channel)
+      (zenirc-send-line))))
+
+;;;###autoload
+(defun url-irc (url)
+  (let* ((host (url-host url))
+	 (port (string-to-int (url-port url)))
+	 (pass (url-password url))
+	 (user (url-user url))
+	 (chan (url-filename url)))
+    (if (url-target url)
+	(setq chan (concat chan "#" (url-target url))))
+    (if (string-match "^/" chan)
+	(setq chan (substring chan 1 nil)))
+    (if (= (length chan) 0)
+	(setq chan nil))
+    (funcall url-irc-function host port chan user pass)
+    nil))
+    
+(provide 'url-irc)
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/lisp/url/url-ldap.el	Sun Apr 04 01:21:46 2004 +0000
@@ -0,0 +1,233 @@
+;;; url-ldap.el --- LDAP Uniform Resource Locator retrieval code
+;; Author: $Author: wmperry $
+;; Created: $Date: 1999/11/26 12:11:50 $
+;; Version: $Revision: 1.1.1.1 $
+;; Keywords: comm, data, processes
+
+;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
+;;; Copyright (c) 1998 - 1999 Free Software Foundation, Inc.
+;;;
+;;; This file is part of GNU Emacs.
+;;;
+;;; GNU Emacs is free software; you can redistribute it and/or modify
+;;; it under the terms of the GNU General Public License as published by
+;;; the Free Software Foundation; either version 2, or (at your option)
+;;; any later version.
+;;;
+;;; GNU Emacs is distributed in the hope that it will be useful,
+;;; but WITHOUT ANY WARRANTY; without even the implied warranty of
+;;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+;;; GNU General Public License for more details.
+;;;
+;;; You should have received a copy of the GNU General Public License
+;;; along with GNU Emacs; see the file COPYING.  If not, write to the
+;;; Free Software Foundation, Inc., 59 Temple Place - Suite 330,
+;;; Boston, MA 02111-1307, USA.
+;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
+
+(require 'url-vars)
+(require 'url-parse)
+(require 'url-util)
+
+;; This has been implemented from RFC2255 'The LDAP URL Format' (Dec 1997)
+;;
+;; basic format is: ldap://host:port/dn?attributes?scope?filter?extensions
+;;
+;; Test URLs:
+;; ldap://ldap.itd.umich.edu/cn%3Dumbflabmanager%2C%20ou%3DUser%20Groups%2C%20ou%3DGroups%2C%20o%3DUniversity%20of%20Michigan%2C%20c%3DUS
+;; ldap://ldap.itd.umich.edu/o=University%20of%20Michigan,c=US
+;;
+;; For simple queries, I have verified compatibility with Netscape
+;; Communicator v4.5 under linux.
+;;
+;; For anything _useful_ though, like specifying the attributes,
+;; scope, filter, or extensions, netscape claims the URL format is
+;; unrecognized.  So I don't think it supports anything other than the
+;; defaults (scope=base,attributes=*,filter=(objectClass=*)
+
+(defconst url-ldap-default-port 389 "Default LDAP port.")
+(defalias 'url-ldap-expand-file-name 'url-default-expander)
+
+(defvar url-ldap-pretty-names
+  '(("l"           . "City")
+    ("objectclass" . "Object Class")
+    ("o"           . "Organization")
+    ("ou"          . "Organizational Unit")
+    ("cn"          . "Name")
+    ("sn"          . "Last Name")
+    ("givenname"   . "First Name")
+    ("mail"        . "Email")
+    ("title"       . "Title")
+    ("c"           . "Country")
+    ("postalcode"  . "ZIP Code")
+    ("telephonenumber"          . "Phone Number")
+    ("facsimiletelephonenumber" . "Fax")
+    ("postaladdress"            . "Mailing Address")
+    ("description"              . "Notes"))
+  "*An assoc list mapping LDAP attribute names to pretty descriptions of them.")
+
+(defvar url-ldap-attribute-formatters
+  '(("mail"       . (lambda (x) (format "<a href='mailto:%s'>%s</a>" x x)))
+    ("owner"      . url-ldap-dn-formatter)
+    ("creatorsname" . url-ldap-dn-formatter)
+    ("jpegphoto"     . url-ldap-image-formatter)
+    ("usercertificate" . url-ldap-certificate-formatter)
+    ("modifiersname" . url-ldap-dn-formatter)
+    ("namingcontexts" . url-ldap-dn-formatter)
+    ("defaultnamingcontext" . url-ldap-dn-formatter)
+    ("member"     . url-ldap-dn-formatter))
+  "*An assoc list mapping LDAP attribute names to pretty formatters for them.")
+
+(defsubst url-ldap-attribute-pretty-name (n)
+  (or (cdr-safe (assoc (downcase n) url-ldap-pretty-names)) n))
+
+(defsubst url-ldap-attribute-pretty-desc (n v)
+  (if (string-match "^\\([^;]+\\);" n)
+      (setq n (match-string 1 n)))
+  (funcall (or (cdr-safe (assoc (downcase n) url-ldap-attribute-formatters)) 'identity) v))
+
+(defun url-ldap-dn-formatter (dn)
+  (concat "<a href='/"
+	  (url-hexify-string dn)
+	  "'>" dn "</a>"))
+
+(defun url-ldap-certificate-formatter (data)
+  (condition-case ()
+      (require 'ssl)
+    (error nil))
+  (let ((vals (and (fboundp 'ssl-certificate-information)
+		   (ssl-certificate-information data))))
+    (if (not vals)
+	"<b>Unable to parse certificate</b>"
+      (concat "<table border=0>\n"
+	      (mapconcat
+	       (lambda (ava)
+		 (format "<tr><td>%s</td><td>%s</td></tr>\n" (car ava) (cdr ava)))
+	       vals "\n")
+	      "</table>\n"))))
+
+(defun url-ldap-image-formatter (data)
+  (format "<img alt='JPEG Photo' src='data:image/jpeg;base64,%s'>" 
+	  (url-hexify-string (base64-encode-string data))))
+
+;;;###autoload
+(defun url-ldap (url)
+  (save-excursion
+    (set-buffer (generate-new-buffer " *url-ldap*"))
+    (setq url-current-object url)
+    (insert "Content-type: text/html\r\n\r\n")
+    (if (not (fboundp 'ldap-search-internal))
+	(insert "<html>\n"
+		" <head>\n"
+		"  <title>LDAP Not Supported</title>\n"
+		"  <base href='" (url-recreate-url url) "'>\n"
+		" </head>\n"
+		" <body>\n"
+		"  <h1>LDAP Not Supported</h1>\n"
+		"  <p>\n"
+		"    This version of Emacs does not support LDAP.\n"
+		"  </p>\n"
+		" </body>\n"
+		"</html>\n")
+      (let* ((binddn nil)
+	     (data (url-filename url))
+	     (host (url-host url))
+	     (port (url-port url))
+	     (base-object nil)
+	     (attributes nil)
+	     (scope nil)
+	     (filter nil)
+	     (extensions nil)
+	     (connection nil)
+	     (results nil)
+	     (extract-dn (and (fboundp 'function-max-args)
+			      (= (function-max-args 'ldap-search-internal) 7))))
+
+	;; Get rid of leading /
+	(if (string-match "^/" data)
+	    (setq data (substring data 1)))
+
+	(setq data (mapcar (lambda (x) (if (/= (length x) 0) x nil)) (split-string data "\\?"))
+	      base-object (nth 0 data)
+	      attributes (nth 1 data)
+	      scope (nth 2 data)
+	      filter (nth 3 data)
+	      extensions (nth 4 data))
+
+	;; fill in the defaults
+	(setq base-object (url-unhex-string (or base-object ""))
+	      scope (intern (url-unhex-string (or scope "base")))
+	      filter (url-unhex-string (or filter "(objectClass=*)")))
+
+	(if (not (memq scope '(base one tree)))
+	    (error "Malformed LDAP URL: Unknown scope: %S" scope))
+
+	;; Convert to the internal LDAP support scoping names.
+	(setq scope (cdr (assq scope '((base . base) (one . onelevel) (sub . subtree)))))
+
+	(if attributes
+	    (setq attributes (mapcar 'url-unhex-string (split-string attributes ","))))
+
+	;; Parse out the exentions
+	(if extensions
+	    (setq extensions (mapcar (lambda (ext)
+				       (if (string-match "\\([^=]*\\)=\\(.*\\)" ext)
+					   (cons (match-string 1 ext) (match-string 2 ext))
+					 (cons ext ext)))
+				     (split-string extensions ","))
+		  extensions (mapcar (lambda (ext)
+				       (cons (url-unhex-string (car ext))
+					     (url-unhex-string (cdr ext))))
+				     extensions)))
+
+	(setq binddn (cdr-safe (or (assoc "bindname" extensions)
+				   (assoc "!bindname" extensions))))
+    
+	;; Now, let's actually do something with it.
+	(setq connection (ldap-open host (if binddn (list 'binddn binddn)))
+	      results (if extract-dn
+			  (ldap-search-internal connection filter base-object scope attributes nil t)
+			(ldap-search-internal connection filter base-object scope attributes nil)))
+		      
+	(ldap-close connection)
+	(insert "<html>\n"
+		" <head>\n"
+		"  <title>LDAP Search Results</title>\n"
+		"  <base href='" (url-recreate-url url) "'>\n"
+		" </head>\n"
+		" <body>\n"
+		"  <h1>" (int-to-string (length results)) " matches</h1>\n")
+
+	(mapc (lambda (obj)
+		(insert "  <hr>\n"
+			"  <table border=1>\n")
+		(if extract-dn
+		    (insert "   <tr><th colspan=2>" (car obj) "</th></tr>\n"))
+		(mapc (lambda (attr)
+			(if (= (length (cdr attr)) 1)
+			    ;; single match, easy
+			    (insert "   <tr><td>"
+				    (url-ldap-attribute-pretty-name (car attr))
+				    "</td><td>"
+				    (url-ldap-attribute-pretty-desc (car attr) (car (cdr attr)))
+				    "</td></tr>\n")
+			  ;; Multiple matches, slightly uglier
+			  (insert "   <tr>\n"
+				  (format "    <td valign=top>" (length (cdr attr)))
+				  (url-ldap-attribute-pretty-name (car attr)) "</td><td>"
+				  (mapconcat (lambda (x)
+					       (url-ldap-attribute-pretty-desc (car attr) x))
+					     (cdr attr)
+					     "<br>\n")
+				  "</td>"
+				  "   </tr>\n")))
+		      (if extract-dn (cdr obj) obj))
+		(insert "  </table>\n"))
+	      results)
+
+	(insert "  <hr>\n"
+		" </body>\n"
+		"</html>\n")))
+    (current-buffer)))
+
+(provide 'url-ldap)
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/lisp/url/url-mailto.el	Sun Apr 04 01:21:46 2004 +0000
@@ -0,0 +1,129 @@
+;;; url-mail.el --- Mail Uniform Resource Locator retrieval code
+;; Author: $Author: fx $
+;; Created: $Date: 2001/10/05 17:04:06 $
+;; Version: $Revision: 1.4 $
+;; Keywords: comm, data, processes
+
+;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
+;;; Copyright (c) 1993 - 1996 by William M. Perry <wmperry@cs.indiana.edu>
+;;; Copyright (c) 1996 - 1999 Free Software Foundation, Inc.
+;;;
+;;; This file is part of GNU Emacs.
+;;;
+;;; GNU Emacs is free software; you can redistribute it and/or modify
+;;; it under the terms of the GNU General Public License as published by
+;;; the Free Software Foundation; either version 2, or (at your option)
+;;; any later version.
+;;;
+;;; GNU Emacs is distributed in the hope that it will be useful,
+;;; but WITHOUT ANY WARRANTY; without even the implied warranty of
+;;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+;;; GNU General Public License for more details.
+;;;
+;;; You should have received a copy of the GNU General Public License
+;;; along with GNU Emacs; see the file COPYING.  If not, write to the
+;;; Free Software Foundation, Inc., 59 Temple Place - Suite 330,
+;;; Boston, MA 02111-1307, USA.
+;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
+
+(eval-when-compile (require 'cl))
+(require 'url-vars)
+(require 'url-parse)
+(require 'url-util)
+
+;;;###autoload
+(defun url-mail (&rest args)
+  (interactive "P")
+  (if (fboundp 'message-mail)
+      (apply 'message-mail args)
+    (or (apply 'mail args)
+	(error "Mail aborted"))))
+
+(defun url-mail-goto-field (field)
+  (if (not field)
+      (goto-char (point-max))
+    (let ((dest nil)
+	  (lim nil)
+	  (case-fold-search t))
+      (save-excursion
+	(goto-char (point-min))
+	(if (re-search-forward (regexp-quote mail-header-separator) nil t)
+	    (setq lim (match-beginning 0)))
+	(goto-char (point-min))
+	(if (re-search-forward (concat "^" (regexp-quote field) ":") lim t)
+	    (setq dest (match-beginning 0))))
+      (if dest
+	  (progn
+	    (goto-char dest)
+	    (end-of-line))
+	(goto-char lim)
+	(insert (capitalize field) ": ")
+	(save-excursion
+	  (insert "\n"))))))
+
+;;;###autoload
+(defun url-mailto (url)
+  "Handle the mailto: URL syntax."
+  (if (url-user url)
+      ;; malformed mailto URL (mailto://wmperry@gnu.org instead of
+      ;; mailto:wmperry@gnu.org
+      (url-set-filename url (concat (url-user url) "@" (url-filename url))))
+  (setq url (url-filename url))
+  (let (to args source-url subject func headers-start)
+    (if (string-match (regexp-quote "?") url)
+	(setq headers-start (match-end 0)
+	      to (url-unhex-string (substring url 0 (match-beginning 0)))
+	      args (url-parse-query-string
+		    (substring url headers-start nil) t))
+      (setq to (url-unhex-string url)))
+    (setq source-url (url-view-url t))
+    (if (and url-request-data (not (assoc "subject" args)))
+	(setq args (cons (list "subject"
+			       (concat "Automatic submission from "
+				       url-package-name "/"
+				       url-package-version)) args)))
+    (if (and source-url (not (assoc "x-url-from" args)))
+	(setq args (cons (list "x-url-from" source-url) args)))
+
+    (if (assoc "to" args)
+	(push to (cdr (assoc "to" args)))
+      (setq args (cons (list "to" to) args)))
+    (setq subject (cdr-safe (assoc "subject" args)))
+    (if (fboundp url-mail-command) (funcall url-mail-command) (mail))
+    (while args
+      (if (string= (caar args) "body")
+	  (progn
+	    (goto-char (point-max))
+	    (insert (mapconcat 'identity (cdar args) "\n")))
+	(url-mail-goto-field (caar args))
+	(setq func (intern-soft (concat "mail-" (caar args))))
+	(insert (mapconcat 'identity (cdar args) ", ")))
+      (setq args (cdr args)))
+    ;; (url-mail-goto-field "User-Agent")
+;;     (insert url-package-name "/" url-package-version " URL/" url-version)
+    (if (not url-request-data)
+	(progn
+	  (set-buffer-modified-p nil)
+	  (if subject
+	      (url-mail-goto-field nil)
+	    (url-mail-goto-field "subject")))
+      (if url-request-extra-headers
+	  (mapconcat
+	   (lambda (x)
+	     (url-mail-goto-field (car x))
+	     (insert (cdr x)))
+	   url-request-extra-headers ""))
+      (goto-char (point-max))
+      (insert url-request-data)
+      ;; It seems Microsoft-ish to send without warning.
+      ;; Fixme: presumably this should depend on a privacy setting.
+      (if (y-or-n-p "Send this auto-generated mail? ")
+	  (cond ((eq url-mail-command 'compose-mail)
+		 (funcall (get mail-user-agent 'sendfunc) nil))
+		;; otherwise, we can't be sure
+		((fboundp 'message-mail)
+		 (message-send-and-exit))
+		(t (mail-send-and-exit nil)))))
+    nil))
+
+(provide 'url-mailto)
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/lisp/url/url-methods.el	Sun Apr 04 01:21:46 2004 +0000
@@ -0,0 +1,149 @@
+;;; url-methods.el --- Load URL schemes as needed
+;; Author: $Author: wmperry $
+;; Created: $Date: 2002/11/04 14:40:32 $
+;; Version: $Revision: 1.14 $
+;; Keywords: comm, data, processes, hypermedia
+
+;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
+;;; Copyright (c) 1993 - 1996 by William M. Perry <wmperry@cs.indiana.edu>
+;;; Copyright (c) 1996 - 1999 Free Software Foundation, Inc.
+;;;
+;;; This file is part of GNU Emacs.
+;;;
+;;; GNU Emacs is free software; you can redistribute it and/or modify
+;;; it under the terms of the GNU General Public License as published by
+;;; the Free Software Foundation; either version 2, or (at your option)
+;;; any later version.
+;;;
+;;; GNU Emacs is distributed in the hope that it will be useful,
+;;; but WITHOUT ANY WARRANTY; without even the implied warranty of
+;;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+;;; GNU General Public License for more details.
+;;;
+;;; You should have received a copy of the GNU General Public License
+;;; along with GNU Emacs; see the file COPYING.  If not, write to the
+;;; Free Software Foundation, Inc., 59 Temple Place - Suite 330,
+;;; Boston, MA 02111-1307, USA.
+;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
+
+(eval-when-compile
+  (require 'cl))
+
+;; This loads up some of the small, silly URLs that I really don't
+;; want to bother putting in their own separate files.
+(require 'url-auto)
+(require 'url-parse)
+
+(defvar url-scheme-registry (make-hash-table :size 7 :test 'equal))
+
+(defconst url-scheme-methods
+  '((default-port      . variable)
+    (asynchronous-p    . variable)
+    (expand-file-name  . function)
+    (file-exists-p     . function)
+    (file-attributes   . function)
+    (parse-url         . function)
+    (file-symlink-p    . function)
+    (file-writable-p   . function)
+    (file-directory-p  . function)
+    (file-executable-p . function)
+    (directory-files   . function)
+    (file-truename     . function))
+  "Assoc-list of methods that each URL loader can provide.")
+
+(defconst url-scheme-default-properties
+  (list 'name "unknown"
+	'loader 'url-scheme-default-loader
+	'default-port 0
+	'expand-file-name 'url-identity-expander
+	'parse-url 'url-generic-parse-url
+	'asynchronous-p nil
+	'file-directory-p 'ignore
+	'file-truename (lambda (&rest args)
+			 (url-recreate-url (car args)))
+	'file-exists-p 'ignore
+	'file-attributes 'ignore))
+
+(defun url-scheme-default-loader (url &optional callback cbargs)
+  "Signal an error for an unknown URL scheme."
+  (error "Unkown URL scheme: %s" (url-type url)))
+
+(defun url-scheme-register-proxy (scheme)
+  "Automatically find a proxy for SCHEME and put it in `url-proxy-services'."
+  (let* ((env-var (concat scheme "_proxy"))
+	 (env-proxy (or (getenv (upcase env-var))
+			(getenv (downcase env-var))))
+	 (cur-proxy (assoc scheme url-proxy-services))
+	 (urlobj nil))
+
+    ;; Store any proxying information - this will not overwrite an old
+    ;; entry, so that people can still set this information in their
+    ;; .emacs file
+    (cond
+     (cur-proxy nil)			; Keep their old settings
+     ((null env-proxy) nil)		; No proxy setup
+     ;; First check if its something like hostname:port
+     ((string-match "^\\([^:]+\\):\\([0-9]+\\)$" env-proxy)
+      (setq urlobj (url-generic-parse-url nil)) ; Get a blank object
+      (url-set-type urlobj "http")
+      (url-set-host urlobj (match-string 1 env-proxy))
+      (url-set-port urlobj (string-to-number (match-string 2 env-proxy))))
+     ;; Then check if its a fully specified URL
+     ((string-match url-nonrelative-link env-proxy)
+      (setq urlobj (url-generic-parse-url env-proxy))
+      (url-set-type urlobj "http")
+      (url-set-target urlobj nil))
+     ;; Finally, fall back on the assumption that its just a hostname
+     (t
+      (setq urlobj (url-generic-parse-url nil)) ; Get a blank object
+      (url-set-type urlobj "http")
+      (url-set-host urlobj env-proxy)))
+
+     (if (and (not cur-proxy) urlobj)
+	 (progn
+	   (setq url-proxy-services
+		 (cons (cons scheme (format "%s:%d" (url-host urlobj)
+					    (url-port urlobj)))
+		       url-proxy-services))
+	   (message "Using a proxy for %s..." scheme)))))
+
+(defun url-scheme-get-property (scheme property)
+  "Get property of a URL SCHEME.
+Will automatically try to load a backend from url-SCHEME.el if
+it has not already been loaded."
+  (setq scheme (downcase scheme))
+  (let ((desc (gethash scheme url-scheme-registry)))
+    (if (not desc)
+	(let* ((stub (concat "url-" scheme))
+	       (loader (intern stub)))
+	  (condition-case ()
+	      (require loader)
+	    (error nil))
+	  (if (fboundp loader)
+	      (progn
+		;; Found the module to handle <scheme> URLs
+		(url-scheme-register-proxy scheme)
+		(setq desc (list 'name scheme
+				 'loader loader))
+		(dolist (cell url-scheme-methods)
+		  (let ((symbol (intern-soft (format "%s-%s" stub (car cell))))
+			(type (cdr cell)))
+		    (if symbol
+			(case type
+			  (function
+			   ;; Store the symbol name of a function
+			   (if (fboundp symbol)
+			       (setq desc (plist-put desc (car cell) symbol))))
+			  (variable
+			   ;; Store the VALUE of a variable
+			   (if (boundp symbol)
+			       (setq desc (plist-put desc (car cell)
+						     (symbol-value symbol)))))
+			  (otherwise
+			   (error "Malformed url-scheme-methods entry: %S"
+				  cell))))))
+		(puthash scheme desc url-scheme-registry)))))
+    (or (plist-get desc property)
+	(plist-get url-scheme-default-properties property))))
+
+(provide 'url-methods)
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/lisp/url/url-misc.el	Sun Apr 04 01:21:46 2004 +0000
@@ -0,0 +1,119 @@
+;;; url-misc.el --- Misc Uniform Resource Locator retrieval code
+;; Author: $Author: fx $
+;; Created: $Date: 2002/04/22 22:23:59 $
+;; Version: $Revision: 1.5 $
+;; Keywords: comm, data, processes
+
+;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
+;;; Copyright (c) 1993 - 1996 by William M. Perry <wmperry@cs.indiana.edu>
+;;; Copyright (c) 1996, 97, 98, 99, 2002 Free Software Foundation, Inc.
+;;;
+;;; This file is part of GNU Emacs.
+;;;
+;;; GNU Emacs is free software; you can redistribute it and/or modify
+;;; it under the terms of the GNU General Public License as published by
+;;; the Free Software Foundation; either version 2, or (at your option)
+;;; any later version.
+;;;
+;;; GNU Emacs is distributed in the hope that it will be useful,
+;;; but WITHOUT ANY WARRANTY; without even the implied warranty of
+;;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+;;; GNU General Public License for more details.
+;;;
+;;; You should have received a copy of the GNU General Public License
+;;; along with GNU Emacs; see the file COPYING.  If not, write to the
+;;; Free Software Foundation, Inc., 59 Temple Place - Suite 330,
+;;; Boston, MA 02111-1307, USA.
+;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
+
+(require 'url-vars)
+(require 'url-parse)
+(autoload 'Info-goto-node "info" "" t)
+(autoload 'man "man" nil t)
+
+;;;###autoload
+(defun url-man (url)
+  "Fetch a Unix manual page URL."
+  (man (url-filename url))
+  nil)
+
+;;;###autoload
+(defun url-info (url)
+  "Fetch a GNU Info URL."
+  ;; Fetch an info node
+  (let* ((fname (url-filename url))
+	 (node (url-unhex-string (or (url-target url) "Top"))))
+    (if (and fname node)
+	(Info-goto-node (concat "(" fname ")" node))
+      (error "Malformed url: %s" (url-recreate-url url)))
+    nil))
+
+(defun url-do-terminal-emulator (type server port user)
+  (terminal-emulator
+   (generate-new-buffer (format "%s%s" (if user (concat user "@") "") server))
+   (case type
+     (rlogin "rlogin")
+     (telnet "telnet")
+     (tn3270 "tn3270")
+     (otherwise
+      (error "Unknown terminal emulator required: %s" type)))
+   (case type
+     (rlogin
+      (if user
+	  (list server "-l" user)
+	(list server)))
+     (telnet
+      (if user (message "Please log in as user: %s" user))
+      (if port
+	  (list server port)
+	(list server)))
+     (tn3270
+      (if user (message "Please log in as user: %s" user))
+      (list server)))))
+
+;;;###autoload
+(defun url-generic-emulator-loader (url)
+  (let* ((type (intern (downcase (url-type url))))
+	 (server (url-host url))
+	 (name (url-user url))
+	 (port (url-port url)))
+    (url-do-terminal-emulator type server port name))
+  nil)
+
+;;;###autoload
+(defalias 'url-rlogin 'url-generic-emulator-loader)
+;;;###autoload
+(defalias 'url-telnet 'url-generic-emulator-loader)
+;;;###autoload
+(defalias 'url-tn3270 'url-generic-emulator-loader)
+
+;; RFC 2397
+;;;###autoload
+(defun url-data (url)
+  "Fetch a data URL (RFC 2397)."
+  (let ((mediatype nil)
+	;; The mediatype may need to be hex-encoded too -- see the RFC.
+	(desc (url-unhex-string (url-filename url)))
+	(encoding "8bit")
+	(data nil))
+    (save-excursion
+      (if (not (string-match "\\([^,]*\\)?," desc))
+	  (error "Malformed data URL: %s" desc)
+	(setq mediatype (match-string 1 desc))
+	(if (and mediatype (string-match ";base64\\'" mediatype))
+	    (setq mediatype (substring mediatype 0 (match-beginning 0))
+		  encoding "base64"))
+	(if (or (null mediatype)
+		(eq ?\; (aref mediatype 0)))
+	  (setq mediatype (concat "text/plain" mediatype)))
+	(setq data (url-unhex-string (substring desc (match-end 0)))))
+      (set-buffer (generate-new-buffer " *url-data*"))
+      (mm-disable-multibyte)
+      (insert (format "Content-Length: %d\n" (length data))
+	      "Content-Type: " mediatype "\n"
+	      "Content-Encoding: " encoding "\n"
+	      "\n")
+      (if data (insert data))
+      (current-buffer))))
+
+(provide 'url-misc)
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/lisp/url/url-news.el	Sun Apr 04 01:21:46 2004 +0000
@@ -0,0 +1,135 @@
+;;; url-news.el --- News Uniform Resource Locator retrieval code
+;; Author: $Author: fx $
+;; Created: $Date: 2001/05/22 16:13:00 $
+;; Version: $Revision: 1.3 $
+;; Keywords: comm, data, processes
+
+;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
+;;; Copyright (c) 1993 - 1996 by William M. Perry <wmperry@cs.indiana.edu>
+;;; Copyright (c) 1996 - 1999 Free Software Foundation, Inc.
+;;;
+;;; This file is part of GNU Emacs.
+;;;
+;;; GNU Emacs is free software; you can redistribute it and/or modify
+;;; it under the terms of the GNU General Public License as published by
+;;; the Free Software Foundation; either version 2, or (at your option)
+;;; any later version.
+;;;
+;;; GNU Emacs is distributed in the hope that it will be useful,
+;;; but WITHOUT ANY WARRANTY; without even the implied warranty of
+;;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+;;; GNU General Public License for more details.
+;;;
+;;; You should have received a copy of the GNU General Public License
+;;; along with GNU Emacs; see the file COPYING.  If not, write to the
+;;; Free Software Foundation, Inc., 59 Temple Place - Suite 330,
+;;; Boston, MA 02111-1307, USA.
+;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
+(require 'url-vars)
+(require 'url-util)
+(require 'url-parse)
+(require 'nntp)
+(autoload 'url-warn "url")
+(autoload 'gnus-group-read-ephemeral-group "gnus-group")
+(eval-when-compile (require 'cl))
+
+(defgroup url-news nil
+  "News related options"
+  :group 'url)
+
+(defun url-news-open-host (host port user pass)
+  (if (fboundp 'nnheader-init-server-buffer)
+      (nnheader-init-server-buffer))
+  (nntp-open-server host (list (string-to-int port)))
+  (if (and user pass)
+      (progn
+	(nntp-send-command "^.*\r?\n" "AUTHINFO USER" user)
+	(nntp-send-command "^.*\r?\n" "AUTHINFO PASS" pass)
+	(if (not (nntp-server-opened host))
+	    (url-warn 'url (format "NNTP authentication to `%s' as `%s' failed"
+				   host user))))))
+
+(defun url-news-fetch-message-id (host message-id)
+  (let ((buf (generate-new-buffer " *url-news*")))
+    (if (eq ?> (aref message-id (1- (length message-id))))
+	nil
+      (setq message-id (concat "<" message-id ">")))
+    (if (cdr-safe (nntp-request-article message-id nil host buf))
+	;; Successfully retrieved the article
+	nil
+      (save-excursion
+	(set-buffer buf)
+	(insert "Content-type: text/html\n\n"
+		"<html>\n"
+		" <head>\n"
+		"  <title>Error</title>\n"
+		" </head>\n"
+		" <body>\n"
+		"  <div>\n"
+		"   <h1>Error requesting article...</h1>\n"
+		"   <p>\n"
+		"    The status message returned by the NNTP server was:"
+		"<br><hr>\n"
+		"    <xmp>\n"
+		(nntp-status-message)
+		"    </xmp>\n"
+		"   </p>\n"
+		"   <p>\n"
+		"    If you If you feel this is an error, <a href=\""
+		"mailto:" url-bug-address "\">send me mail</a>\n"
+		"   </p>\n"
+		"  </div>\n"
+		" </body>\n"
+		"</html>\n"
+		"<!-- Automatically generated by URL v" url-version " -->\n"
+		)))
+    buf))
+
+(defun url-news-fetch-newsgroup (newsgroup host)
+  (declare (special gnus-group-buffer))
+  (if (string-match "^/+" newsgroup)
+      (setq newsgroup (substring newsgroup (match-end 0))))
+  (if (string-match "/+$" newsgroup)
+      (setq newsgroup (substring newsgroup 0 (match-beginning 0))))
+
+  ;; This saves us from checking new news if GNUS is already running
+  ;; FIXME - is it relatively safe to use gnus-alive-p here? FIXME
+  (if (or (not (get-buffer gnus-group-buffer))
+	  (save-excursion
+	    (set-buffer gnus-group-buffer)
+	    (not (eq major-mode 'gnus-group-mode))))
+      (gnus))
+  (set-buffer gnus-group-buffer)
+  (goto-char (point-min))
+  (gnus-group-read-ephemeral-group newsgroup
+				   (list 'nntp host
+					 'nntp-open-connection-function
+					 nntp-open-connection-function)
+				   nil
+				   (cons (current-buffer) 'browse)))
+  
+;;;###autoload
+(defun url-news (url)
+  ;; Find a news reference
+  (let* ((host (or (url-host url) url-news-server))
+	 (port (url-port url))
+	 (article-brackets nil)
+	 (buf nil)
+	 (article (url-filename url)))
+    (url-news-open-host host port (url-user url) (url-password url))
+    (setq article (url-unhex-string article))
+    (cond
+     ((string-match "@" article)	; Its a specific article
+      (setq buf (url-news-fetch-message-id host article)))
+     ((string= article "")		; List all newsgroups
+      (gnus))
+     (t					; Whole newsgroup
+      (url-news-fetch-newsgroup article host)))
+    buf))
+
+;;;###autoload
+(defun url-snews (url)
+  (let ((nntp-open-connection-function 'nntp-open-ssl-stream))
+    (url-news url)))
+
+(provide 'url-news)
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/lisp/url/url-nfs.el	Sun Apr 04 01:21:46 2004 +0000
@@ -0,0 +1,97 @@
+;;; url-nfs.el --- NFS URL interface
+;; Author: $Author: fx $
+;; Created: $Date: 2001/05/22 16:10:50 $
+;; Version: $Revision: 1.3 $
+;; Keywords: comm, data, processes
+
+;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
+;;; Copyright (c) 1996 by William M. Perry <wmperry@cs.indiana.edu>
+;;; Copyright (c) 1996 - 1999 Free Software Foundation, Inc.
+;;;
+;;; This file is part of GNU Emacs.
+;;;
+;;; GNU Emacs is free software; you can redistribute it and/or modify
+;;; it under the terms of the GNU General Public License as published by
+;;; the Free Software Foundation; either version 2, or (at your option)
+;;; any later version.
+;;;
+;;; GNU Emacs is distributed in the hope that it will be useful,
+;;; but WITHOUT ANY WARRANTY; without even the implied warranty of
+;;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+;;; GNU General Public License for more details.
+;;;
+;;; You should have received a copy of the GNU General Public License
+;;; along with GNU Emacs; see the file COPYING.  If not, write to the
+;;; Free Software Foundation, Inc., 59 Temple Place - Suite 330,
+;;; Boston, MA 02111-1307, USA.
+;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
+
+(eval-when-compile (require 'cl))
+(require 'url-parse)
+(require 'url-file)
+
+(defvar url-nfs-automounter-directory-spec
+  "file:/net/%h%f"
+  "*How to invoke the NFS automounter.  Certain % sequences are recognized.
+
+%h -- the hostname of the NFS server
+%n -- the port # of the NFS server
+%u -- the username to use to authenticate
+%p -- the password to use to authenticate
+%f -- the filename on the remote server
+%% -- a literal %
+
+Each can be used any number of times.")
+
+(defun url-nfs-unescape (format host port user pass file)
+  (save-excursion
+    (set-buffer (get-buffer-create " *nfs-parse*"))
+    (erase-buffer)
+    (insert format)
+    (goto-char (point-min))
+    (while (re-search-forward "%\\(.\\)" nil t)
+       (let ((escape (aref (match-string 1) 0)))
+	 (replace-match "" t t)
+	 (case escape
+	   (?% (insert "%"))
+	   (?h (insert host))
+	   (?n (insert (or port "")))
+	   (?u (insert (or user "")))
+	   (?p (insert (or pass "")))
+	   (?f (insert (or file "/"))))))
+    (buffer-string)))
+
+(defun url-nfs-build-filename (url)
+  (let* ((host (url-host url))
+	 (port (string-to-int (url-port url)))
+	 (pass (url-password url))
+	 (user (url-user url))
+	 (file (url-filename url)))
+    (url-generic-parse-url
+     (url-nfs-unescape url-nfs-automounter-directory-spec
+		       host port user pass file))))
+
+(defun url-nfs (url callback cbargs)
+  (url-file (url-nfs-build-filename url) callback cbargs))
+
+(defmacro url-nfs-create-wrapper (method args)
+  (` (defun (, (intern (format "url-nfs-%s" method))) (, args)
+       (, (format "NFS URL wrapper around `%s' call." method))
+       (setq url (url-nfs-build-filename url))
+       (and url ((, (intern (format "url-file-%s" method)))
+		 (,@ (remove '&rest (remove '&optional args))))))))
+
+(url-nfs-create-wrapper file-exists-p (url))
+(url-nfs-create-wrapper file-attributes (url))
+(url-nfs-create-wrapper file-symlink-p (url))
+(url-nfs-create-wrapper file-readable-p (url))
+(url-nfs-create-wrapper file-writable-p (url))
+(url-nfs-create-wrapper file-executable-p (url))
+(if (featurep 'xemacs)
+    (progn
+      (url-nfs-create-wrapper directory-files (url &optional full match nosort files-only))
+      (url-nfs-create-wrapper file-truename (url &optional default)))
+  (url-nfs-create-wrapper directory-files (url &optional full match nosort))
+  (url-nfs-create-wrapper file-truename (url &optional counter prev-dirs)))
+
+(provide 'url-nfs)
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/lisp/url/url-ns.el	Sun Apr 04 01:21:46 2004 +0000
@@ -0,0 +1,106 @@
+;;; url-ns.el --- Various netscape-ish functions for proxy definitions
+;; Author: $Author: fx $
+;; Created: $Date: 2000/12/20 21:08:02 $
+;; Version: $Revision: 1.2 $
+;; Keywords: comm, data, processes, hypermedia
+
+;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
+;;; Copyright (c) 1997 - 1999 Free Software Foundation, Inc.
+;;;
+;;; This file is part of GNU Emacs.
+;;;
+;;; GNU Emacs is free software; you can redistribute it and/or modify
+;;; it under the terms of the GNU General Public License as published by
+;;; the Free Software Foundation; either version 2, or (at your option)
+;;; any later version.
+;;;
+;;; GNU Emacs is distributed in the hope that it will be useful,
+;;; but WITHOUT ANY WARRANTY; without even the implied warranty of
+;;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+;;; GNU General Public License for more details.
+;;;
+;;; You should have received a copy of the GNU General Public License
+;;; along with GNU Emacs; see the file COPYING.  If not, write to the
+;;; Free Software Foundation, Inc., 59 Temple Place - Suite 330,
+;;; Boston, MA 02111-1307, USA.
+;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
+
+(require 'url-gw)
+
+;;;###autoload
+(defun isPlainHostName (host)
+  (not (string-match "\\." host)))
+
+;;;###autoload
+(defun dnsDomainIs (host dom)
+  (string-match (concat (regexp-quote dom) "$") host))
+
+;;;###autoload
+(defun dnsResolve (host)
+  (url-gateway-nslookup-host host))
+
+;;;###autoload
+(defun isResolvable (host)
+  (if (string-match "^[0-9.]+$" host)
+      t
+    (not (string= host (url-gateway-nslookup-host host)))))
+
+;;;###autoload
+(defun isInNet (ip net mask)
+  (let ((netc (split-string ip "\\."))
+	(ipc  (split-string net "\\."))
+	(maskc (split-string mask "\\.")))
+    (if (or (/= (length netc) (length ipc))
+	    (/= (length ipc) (length maskc)))
+	nil
+      (setq netc (mapcar 'string-to-int netc)
+	    ipc (mapcar 'string-to-int ipc)
+	    maskc (mapcar 'string-to-int maskc))
+      (and
+       (= (logand (nth 0 netc) (nth 0 maskc))
+	  (logand (nth 0 ipc)  (nth 0 maskc)))
+       (= (logand (nth 1 netc) (nth 1 maskc))
+	  (logand (nth 1 ipc)  (nth 1 maskc)))
+       (= (logand (nth 2 netc) (nth 2 maskc))
+	  (logand (nth 2 ipc)  (nth 2 maskc)))
+       (= (logand (nth 3 netc) (nth 3 maskc))
+	  (logand (nth 3 ipc)  (nth 3 maskc)))))))
+
+;; Netscape configuration file parsing
+(defvar url-ns-user-prefs nil
+  "Internal, do not use.")
+
+;;;###autoload
+(defun url-ns-prefs (&optional file)
+  (if (not file)
+      (setq file (expand-file-name "~/.netscape/preferences.js")))
+  (if (not (and (file-exists-p file)
+		(file-readable-p file)))
+      (message "Could not open %s for reading" file)
+    (save-excursion
+      (let ((false nil)
+	    (true t))
+	(setq url-ns-user-prefs (make-hash-table :size 13 :test 'equal))
+	(set-buffer (get-buffer-create " *ns-parse*"))
+	(erase-buffer)
+	(insert-file-contents file)
+	(goto-char (point-min))
+	(while (re-search-forward "^//" nil t)
+	  (replace-match ";;"))
+	(goto-char (point-min))
+	(while (re-search-forward "^user_pref(" nil t)
+	  (replace-match "(url-ns-set-user-pref "))
+	(goto-char (point-min))
+	(while (re-search-forward "\"," nil t)
+	  (replace-match "\""))
+	(goto-char (point-min))
+	(eval-buffer)))))
+
+(defun url-ns-set-user-pref (key val)
+  (puthash key val url-ns-user-prefs))
+
+;;;###autoload
+(defun url-ns-user-pref (key &optional default)
+  (gethash key url-ns-user-prefs default))
+
+(provide 'url-ns)
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/lisp/url/url-parse.el	Sun Apr 04 01:21:46 2004 +0000
@@ -0,0 +1,207 @@
+;;; url-parse.el --- Uniform Resource Locator parser
+;; Author: $Author: fx $
+;; Created: $Date: 2001/10/01 11:52:06 $
+;; Version: $Revision: 1.4 $
+;; Keywords: comm, data, processes
+
+;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
+;;; Copyright (c) 1993 - 1996 by William M. Perry <wmperry@cs.indiana.edu>
+;;; Copyright (c) 1996 - 1999 Free Software Foundation, Inc.
+;;;
+;;; This file is part of GNU Emacs.
+;;;
+;;; GNU Emacs is free software; you can redistribute it and/or modify
+;;; it under the terms of the GNU General Public License as published by
+;;; the Free Software Foundation; either version 2, or (at your option)
+;;; any later version.
+;;;
+;;; GNU Emacs is distributed in the hope that it will be useful,
+;;; but WITHOUT ANY WARRANTY; without even the implied warranty of
+;;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+;;; GNU General Public License for more details.
+;;;
+;;; You should have received a copy of the GNU General Public License
+;;; along with GNU Emacs; see the file COPYING.  If not, write to the
+;;; Free Software Foundation, Inc., 59 Temple Place - Suite 330,
+;;; Boston, MA 02111-1307, USA.
+;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
+(require 'url-auto)
+(require 'url-vars)
+
+(autoload 'url-scheme-get-property "url-methods")
+
+(defmacro url-type (urlobj)
+  `(aref ,urlobj 0))
+
+(defmacro url-user (urlobj)
+  `(aref ,urlobj 1))
+
+(defmacro url-password (urlobj)
+  `(aref ,urlobj 2))
+
+(defmacro url-host (urlobj)
+  `(aref ,urlobj 3))
+
+(defmacro url-port (urlobj)
+  `(or (aref ,urlobj 4)
+      (if (url-fullness ,urlobj)
+	  (url-scheme-get-property (url-type ,urlobj) 'default-port))))
+
+(defmacro url-filename (urlobj)
+  `(aref ,urlobj 5))
+
+(defmacro url-target (urlobj)
+  `(aref ,urlobj 6))
+
+(defmacro url-attributes (urlobj)
+  `(aref ,urlobj 7))
+
+(defmacro url-fullness (urlobj)
+  `(aref ,urlobj 8))
+
+(defmacro url-set-type (urlobj type)
+  `(aset ,urlobj 0 ,type))
+
+(defmacro url-set-user (urlobj user)
+  `(aset ,urlobj 1 ,user))
+
+(defmacro url-set-password (urlobj pass)
+  `(aset ,urlobj 2 ,pass))
+
+(defmacro url-set-host (urlobj host)
+  `(aset ,urlobj 3 ,host))
+
+(defmacro url-set-port (urlobj port)
+  `(aset ,urlobj 4 ,port))
+
+(defmacro url-set-filename (urlobj file)
+  `(aset ,urlobj 5 ,file))
+
+(defmacro url-set-target (urlobj targ)
+  `(aset ,urlobj 6 ,targ))
+
+(defmacro url-set-attributes (urlobj targ)
+  `(aset ,urlobj 7 ,targ))
+
+(defmacro url-set-full (urlobj val)
+  `(aset ,urlobj 8 ,val))
+  
+;;;###autoload
+(defun url-recreate-url (urlobj)
+  (concat (url-type urlobj) ":" (if (url-host urlobj) "//" "")
+	  (if (url-user urlobj)
+	      (concat (url-user urlobj)
+		      (if (url-password urlobj)
+			  (concat ":" (url-password urlobj)))
+		      "@"))
+	  (url-host urlobj)
+	  (if (and (url-port urlobj)
+		   (not (equal (url-port urlobj)
+			       (url-scheme-get-property (url-type urlobj) 'default-port))))
+	      (format ":%d" (url-port urlobj)))
+	  (or (url-filename urlobj) "/")
+	  (if (url-target urlobj)
+	      (concat "#" (url-target urlobj)))
+	  (if (url-attributes urlobj)
+	      (concat ";"
+		      (mapconcat
+		       (function
+			(lambda (x)
+			  (if (cdr x)
+			      (concat (car x) "=" (cdr x))
+			    (car x)))) (url-attributes urlobj) ";")))))
+
+;;;###autoload
+(defun url-generic-parse-url (url)
+  "Return a vector of the parts of URL.
+Format is:
+\[proto username password hostname portnumber file reference attributes fullp\]"
+  (cond
+   ((null url)
+    (make-vector 9 nil))
+   ((or (not (string-match url-nonrelative-link url))
+	(= ?/ (string-to-char url)))
+    (let ((retval (make-vector 9 nil)))
+      (url-set-filename retval url)
+      (url-set-full retval nil)
+      retval))
+   (t
+    (save-excursion
+      (set-buffer (get-buffer-create " *urlparse*"))
+      (set-syntax-table url-parse-syntax-table)
+      (let ((save-pos nil)
+	    (prot nil)
+	    (user nil)
+	    (pass nil)
+	    (host nil)
+	    (port nil)
+	    (file nil)
+	    (refs nil)
+	    (attr nil)
+	    (full nil)
+	    (inhibit-read-only t))
+	(erase-buffer)
+	(insert url)
+	(goto-char (point-min))
+	(setq save-pos (point))
+	(if (not (looking-at "//"))
+	    (progn
+	      (skip-chars-forward "a-zA-Z+.\\-")
+	      (downcase-region save-pos (point))
+	      (setq prot (buffer-substring save-pos (point)))
+	      (skip-chars-forward ":")
+	      (setq save-pos (point))))
+
+	;; We are doing a fully specified URL, with hostname and all
+	(if (looking-at "//")
+	    (progn
+	      (setq full t)
+	      (forward-char 2)
+	      (setq save-pos (point))
+	      (skip-chars-forward "^/")
+	      (setq host (buffer-substring save-pos (point)))
+	      (if (string-match "^\\([^@]+\\)@" host)
+		  (setq user (match-string 1 host)
+			host (substring host (match-end 0) nil)))
+	      (if (and user (string-match "\\([^:]+\\):\\(.*\\)" user))
+		  (setq pass (match-string 2 user)
+			user (match-string 1 user)))
+	      (if (string-match ":\\([0-9+]+\\)" host)
+		  (setq port (string-to-int (match-string 1 host))
+			host (substring host 0 (match-beginning 0))))
+	      (if (string-match ":$" host)
+		  (setq host (substring host 0 (match-beginning 0))))
+	      (setq host (downcase host)
+		    save-pos (point))))
+
+	(if (not port)
+	    (setq port (url-scheme-get-property prot 'default-port)))
+
+	;; Gross hack to preserve ';' in data URLs
+
+	(setq save-pos (point))
+
+	(if (string= "data" prot)
+	    (goto-char (point-max))
+	  ;; Now check for references
+	  (skip-chars-forward "^#")
+	  (if (eobp)
+	      nil
+	    (delete-region
+	     (point)
+	     (progn
+	       (skip-chars-forward "#")
+	       (setq refs (buffer-substring (point) (point-max)))
+	       (point-max))))
+	  (goto-char save-pos)
+	  (skip-chars-forward "^;")
+	  (if (not (eobp))
+	      (setq attr (url-parse-args (buffer-substring (point) (point-max)) t)
+		    attr (nreverse attr))))
+
+	(setq file (buffer-substring save-pos (point)))
+	(if (and host (string-match "%[0-9][0-9]" host))
+	    (setq host (url-unhex-string host)))
+	(vector prot user pass host port file refs attr full))))))
+
+(provide 'url-parse)
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/lisp/url/url-privacy.el	Sun Apr 04 01:21:46 2004 +0000
@@ -0,0 +1,83 @@
+;;; url-privacy.el --- Global history tracking for URL package
+;; Author: $Author: fx $
+;; Created: $Date: 2001/10/05 17:10:26 $
+;; Version: $Revision: 1.4 $
+;; Keywords: comm, data, processes, hypermedia
+
+;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
+;;; Copyright (c) 1993 - 1996 by William M. Perry <wmperry@cs.indiana.edu>
+;;; Copyright (c) 1996 - 1999 Free Software Foundation, Inc.
+;;;
+;;; This file is part of GNU Emacs.
+;;;
+;;; GNU Emacs is free software; you can redistribute it and/or modify
+;;; it under the terms of the GNU General Public License as published by
+;;; the Free Software Foundation; either version 2, or (at your option)
+;;; any later version.
+;;;
+;;; GNU Emacs is distributed in the hope that it will be useful,
+;;; but WITHOUT ANY WARRANTY; without even the implied warranty of
+;;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+;;; GNU General Public License for more details.
+;;;
+;;; You should have received a copy of the GNU General Public License
+;;; along with GNU Emacs; see the file COPYING.  If not, write to the
+;;; Free Software Foundation, Inc., 59 Temple Place - Suite 330,
+;;; Boston, MA 02111-1307, USA.
+;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
+
+(eval-when-compile (require 'cl))
+(require 'url-vars)
+
+(if (fboundp 'device-type)
+    (defalias 'url-device-type 'device-type)
+  (defun url-device-type (&optional device) (or window-system 'tty)))
+
+;;;###autoload
+(defun url-setup-privacy-info ()
+  (interactive)
+  (setq url-system-type
+	(cond
+	 ((or (eq url-privacy-level 'paranoid)
+	      (and (listp url-privacy-level)
+		   (memq 'os url-privacy-level)))
+	  nil)
+	 ;; First, we handle the inseparable OS/Windowing system
+	 ;; combinations
+	 ((eq system-type 'Apple-Macintosh) "Macintosh")
+	 ((eq system-type 'next-mach) "NeXT")
+	 ((eq system-type 'windows-nt) "Windows-NT; 32bit")
+	 ((eq system-type 'ms-windows) "Windows; 16bit")
+	 ((eq system-type 'ms-dos) "MS-DOS; 32bit")
+	 ((memq (url-device-type) '(win32 w32)) "Windows; 32bit")
+	 ((eq (url-device-type) 'pm) "OS/2; 32bit")
+	 (t
+	  (case (url-device-type)
+	    (x "X11")
+	    (ns "OpenStep")
+	    (tty "TTY")
+	    (otherwise nil)))))
+
+  (setq url-personal-mail-address (or url-personal-mail-address
+				      user-mail-address
+				      (format "%s@%s"  (user-real-login-name)
+					      (system-name))))
+
+  (if (or (memq url-privacy-level '(paranoid high))
+	  (and (listp url-privacy-level)
+	       (memq 'email url-privacy-level)))
+      (setq url-personal-mail-address nil))
+
+  (setq url-os-type
+	(cond
+	 ((or (eq url-privacy-level 'paranoid)
+	      (and (listp url-privacy-level)
+		   (memq 'os url-privacy-level)))
+	  nil)
+	 ((boundp 'system-configuration)
+	  system-configuration)
+	 ((boundp 'system-type)
+	  (symbol-name system-type))
+	 (t nil))))
+
+(provide 'url-privacy)
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/lisp/url/url-proxy.el	Sun Apr 04 01:21:46 2004 +0000
@@ -0,0 +1,78 @@
+;;; url-proxy.el --- Proxy server support
+;; Author: $Author: fx $
+;; Created: $Date: 2001/10/11 21:09:35 $
+;; Version: $Revision: 1.5 $
+;; Keywords: comm, data, processes, hypermedia
+
+;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
+;;; Copyright (c) 1999 Free Software Foundation, Inc.
+;;;
+;;; This file is part of GNU Emacs.
+;;;
+;;; GNU Emacs is free software; you can redistribute it and/or modify
+;;; it under the terms of the GNU General Public License as published by
+;;; the Free Software Foundation; either version 2, or (at your option)
+;;; any later version.
+;;;
+;;; GNU Emacs is distributed in the hope that it will be useful,
+;;; but WITHOUT ANY WARRANTY; without even the implied warranty of
+;;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+;;; GNU General Public License for more details.
+;;;
+;;; You should have received a copy of the GNU General Public License
+;;; along with GNU Emacs; see the file COPYING.  If not, write to the
+;;; Free Software Foundation, Inc., 59 Temple Place - Suite 330,
+;;; Boston, MA 02111-1307, USA.
+;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
+
+(require 'url-parse)
+(autoload 'url-warn "url")
+
+(defun url-default-find-proxy-for-url (urlobj host)
+  (cond
+   ((or (and (assoc "no_proxy" url-proxy-services)
+	     (string-match
+	      (cdr
+	       (assoc "no_proxy" url-proxy-services))
+	      host))
+	(equal "www" (url-type urlobj)))
+    "DIRECT")
+   ((cdr (assoc (url-type urlobj) url-proxy-services))
+    (concat "PROXY " (cdr (assoc (url-type urlobj) url-proxy-services))))
+   ;;
+   ;; Should check for socks
+   ;;
+   (t
+    "DIRECT")))
+
+(defvar url-proxy-locator 'url-default-find-proxy-for-url)
+
+(defun url-find-proxy-for-url (url host)
+  (let ((proxies (split-string (funcall url-proxy-locator url host) " *; *"))
+	(proxy nil)
+	(case-fold-search t))
+    ;; Not sure how I should handle gracefully degrading from one proxy to
+    ;; another, so for now just deal with the first one
+    ;; (while proxies
+    (if (listp proxies)
+	(setq proxy (car proxies))
+      (setq proxy proxies))
+    (cond
+     ((string-match "^direct" proxy) nil)
+     ((string-match "^proxy +" proxy)
+      (concat "http://" (substring proxy (match-end 0)) "/"))
+     ((string-match "^socks +" proxy)
+      (concat "socks://" (substring proxy (match-end 0))))
+     (t
+      (url-warn 'url (format "Unknown proxy directive: %s" proxy) 'critical)
+      nil))))
+
+(defun url-proxy (url callback &optional cbargs)
+  ;; Retrieve URL from a proxy.
+  ;; Expects `url-using-proxy' to be bound to the specific proxy to use."
+  (setq url-using-proxy (url-generic-parse-url url-using-proxy))
+  (let ((proxy-object (copy-sequence url)))
+    (url-set-target proxy-object nil)
+    (url-http url-using-proxy callback cbargs)))
+
+(provide 'url-proxy)
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/lisp/url/url-util.el	Sun Apr 04 01:21:46 2004 +0000
@@ -0,0 +1,487 @@
+;;; url-util.el --- Miscellaneous helper routines for URL library
+;; Author: Bill Perry <wmperry@gnu.org>
+;; Created: $Date: 2002/04/22 09:16:11 $
+;; Version: $Revision: 1.14 $
+;; Keywords: comm, data, processes
+
+;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
+;;; Copyright (c) 1993 - 1996 by William M. Perry <wmperry@cs.indiana.edu>
+;;; Copyright (c) 1996, 97, 98, 99, 2001 Free Software Foundation, Inc.
+;;;
+;;; This file is part of GNU Emacs.
+;;;
+;;; GNU Emacs is free software; you can redistribute it and/or modify
+;;; it under the terms of the GNU General Public License as published by
+;;; the Free Software Foundation; either version 2, or (at your option)
+;;; any later version.
+;;;
+;;; GNU Emacs is distributed in the hope that it will be useful,
+;;; but WITHOUT ANY WARRANTY; without even the implied warranty of
+;;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+;;; GNU General Public License for more details.
+;;;
+;;; You should have received a copy of the GNU General Public License
+;;; along with GNU Emacs; see the file COPYING.  If not, write to the
+;;; Free Software Foundation, Inc., 59 Temple Place - Suite 330,
+;;; Boston, MA 02111-1307, USA.
+;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
+
+(require 'url-parse)
+(autoload 'timezone-parse-date "timezone")
+(autoload 'timezone-make-date-arpa-standard "timezone")
+
+(defvar url-parse-args-syntax-table
+  (copy-syntax-table emacs-lisp-mode-syntax-table)
+  "A syntax table for parsing sgml attributes.")
+
+(modify-syntax-entry ?' "\"" url-parse-args-syntax-table)
+(modify-syntax-entry ?` "\"" url-parse-args-syntax-table)
+(modify-syntax-entry ?{ "(" url-parse-args-syntax-table)
+(modify-syntax-entry ?} ")" url-parse-args-syntax-table)
+
+;;;###autoload
+(defcustom url-debug nil
+  "*What types of debug messages from the URL library to show.
+Debug messages are logged to the *URL-DEBUG* buffer.
+
+If t, all messages will be logged.
+If a number, all messages will be logged, as well shown via `message'.
+If a list, it is a list of the types of messages to be logged."
+  :type '(choice (const :tag "none" nil)
+		 (const :tag "all" t)
+		 (checklist :tag "custom"
+			    (const :tag "HTTP" :value http)
+			    (const :tag "DAV" :value dav)
+			    (const :tag "General" :value retrieval)
+			    (const :tag "Filename handlers" :value handlers)
+			    (symbol :tag "Other")))
+  :group 'url-hairy)
+
+;;;###autoload
+(defun url-debug (tag &rest args)
+  (if quit-flag
+      (error "Interrupted!"))
+  (if (or (eq url-debug t)
+	  (numberp url-debug)
+	  (and (listp url-debug) (memq tag url-debug)))
+      (save-excursion
+	(set-buffer (get-buffer-create "*URL-DEBUG*"))
+	(goto-char (point-max))
+	(insert (symbol-name tag) " -> " (apply 'format args) "\n")
+	(if (numberp url-debug)
+	    (apply 'message args)))))
+
+;;;###autoload
+(defun url-parse-args (str &optional nodowncase)
+  ;; Return an assoc list of attribute/value pairs from an RFC822-type string
+  (let (
+	name				; From name=
+	value				; its value
+	results				; Assoc list of results
+	name-pos			; Start of XXXX= position
+	val-pos				; Start of value position
+	st
+	nd
+	)
+    (save-excursion
+      (save-restriction
+	(set-buffer (get-buffer-create " *urlparse-temp*"))
+	(set-syntax-table url-parse-args-syntax-table)
+	(erase-buffer)
+	(insert str)
+	(setq st (point-min)
+	      nd (point-max))
+	(set-syntax-table url-parse-args-syntax-table)
+	(narrow-to-region st nd)
+	(goto-char (point-min))
+	(while (not (eobp))
+	  (skip-chars-forward "; \n\t")
+	  (setq name-pos (point))
+	  (skip-chars-forward "^ \n\t=;")
+	  (if (not nodowncase)
+	      (downcase-region name-pos (point)))
+	  (setq name (buffer-substring name-pos (point)))
+	  (skip-chars-forward " \t\n")
+	  (if (/= (or (char-after (point)) 0)  ?=) ; There is no value
+	      (setq value nil)
+	    (skip-chars-forward " \t\n=")
+	    (setq val-pos (point)
+		  value
+		  (cond
+		   ((or (= (or (char-after val-pos) 0) ?\")
+			(= (or (char-after val-pos) 0) ?'))
+		    (buffer-substring (1+ val-pos)
+				      (condition-case ()
+					  (prog2
+					      (forward-sexp 1)
+					      (1- (point))
+					    (skip-chars-forward "\""))
+					(error
+					 (skip-chars-forward "^ \t\n")
+					 (point)))))
+		   (t
+		    (buffer-substring val-pos
+				      (progn
+					(skip-chars-forward "^;")
+					(skip-chars-backward " \t")
+					(point)))))))
+	  (setq results (cons (cons name value) results))
+	  (skip-chars-forward "; \n\t"))
+	results))))
+
+;;;###autoload
+(defun url-insert-entities-in-string (string)
+  "Convert HTML markup-start characters to entity references in STRING.
+Also replaces the \" character, so that the result may be safely used as
+  an attribute value in a tag.  Returns a new string with the result of the
+  conversion.  Replaces these characters as follows:
+    &  ==>  &amp;
+    <  ==>  &lt;
+    >  ==>  &gt;
+    \"  ==>  &quot;"
+  (if (string-match "[&<>\"]" string)
+      (save-excursion
+	(set-buffer (get-buffer-create " *entity*"))
+	(erase-buffer)
+	(buffer-disable-undo (current-buffer))
+	(insert string)
+	(goto-char (point-min))
+	(while (progn
+		 (skip-chars-forward "^&<>\"")
+		 (not (eobp)))
+	  (insert (cdr (assq (char-after (point))
+			     '((?\" . "&quot;")
+			       (?& . "&amp;")
+			       (?< . "&lt;")
+			       (?> . "&gt;")))))
+	  (delete-char 1))
+	(buffer-string))
+    string))
+
+;;;###autoload
+(defun url-normalize-url (url)
+  "Return a 'normalized' version of URL.
+Strips out default port numbers, etc."
+  (let (type data grok retval)
+    (setq data (url-generic-parse-url url)
+	  type (url-type data))
+    (if (member type '("www" "about" "mailto" "info"))
+	(setq retval url)
+      (url-set-target data nil)
+      (setq retval (url-recreate-url data)))
+    retval))
+
+;;;###autoload
+(defun url-lazy-message (&rest args)
+  "Just like `message', but is a no-op if called more than once a second.
+Will not do anything if url-show-status is nil."
+  (if (or (null url-show-status)
+	  (active-minibuffer-window)
+	  (= url-lazy-message-time
+	     (setq url-lazy-message-time (nth 1 (current-time)))))
+      nil
+    (apply 'message args)))
+
+;;;###autoload
+(defun url-get-normalized-date (&optional specified-time)
+  "Return a 'real' date string that most HTTP servers can understand."
+  (require 'timezone)
+  (let* ((raw (if specified-time (current-time-string specified-time)
+		(current-time-string)))
+	 (gmt (timezone-make-date-arpa-standard raw
+						(nth 1 (current-time-zone))
+						"GMT"))
+	 (parsed (timezone-parse-date gmt))
+	 (day (cdr-safe (assoc (substring raw 0 3) weekday-alist)))
+	 (year nil)
+	 (month (car
+		 (rassoc
+		  (string-to-int (aref parsed 1)) monthabbrev-alist)))
+	 )
+    (setq day (or (car-safe (rassoc day weekday-alist))
+		  (substring raw 0 3))
+	  year (aref parsed 0))
+    ;; This is needed for plexus servers, or the server will hang trying to
+    ;; parse the if-modified-since header.  Hopefully, I can take this out
+    ;; soon.
+    (if (and year (> (length year) 2))
+	(setq year (substring year -2 nil)))
+
+    (concat day ", " (aref parsed 2) "-" month "-" year " "
+	    (aref parsed 3) " " (or (aref parsed 4)
+				    (concat "[" (nth 1 (current-time-zone))
+					    "]")))))
+
+;;;###autoload
+(defun url-eat-trailing-space (x)
+  "Remove spaces/tabs at the end of a string."
+  (let ((y (1- (length x)))
+	(skip-chars (list ?  ?\t ?\n)))
+    (while (and (>= y 0) (memq (aref x y) skip-chars))
+      (setq y (1- y)))
+    (substring x 0 (1+ y))))
+
+;;;###autoload
+(defun url-strip-leading-spaces (x)
+  "Remove spaces at the front of a string."
+  (let ((y (1- (length x)))
+	(z 0)
+	(skip-chars (list ?  ?\t ?\n)))
+    (while (and (<= z y) (memq (aref x z) skip-chars))
+      (setq z (1+ z)))
+    (substring x z nil)))
+
+;;;###autoload
+(defun url-pretty-length (n)
+  (cond
+   ((< n 1024)
+    (format "%d bytes" n))
+   ((< n (* 1024 1024))
+    (format "%dk" (/ n 1024.0)))
+   (t
+    (format "%2.2fM" (/ n (* 1024 1024.0))))))
+
+;;;###autoload
+(defun url-display-percentage (fmt perc &rest args)
+  (if (null fmt)
+      (if (fboundp 'clear-progress-display)
+	  (clear-progress-display))
+    (if (and (fboundp 'progress-display) perc)
+	(apply 'progress-display fmt perc args)
+      (apply 'message fmt args))))
+
+;;;###autoload
+(defun url-percentage (x y)
+  (if (fboundp 'float)
+      (round (* 100 (/ x (float y))))
+    (/ (* x 100) y)))
+
+;;;###autoload
+(defun url-basepath (file &optional x)
+  "Return the base pathname of FILE, or the actual filename if X is true."
+  (cond
+   ((null file) "")
+   ((string-match (eval-when-compile (regexp-quote "?")) file)
+    (if x
+	(file-name-nondirectory (substring file 0 (match-beginning 0)))
+      (file-name-directory (substring file 0 (match-beginning 0)))))
+   (x (file-name-nondirectory file))
+   (t (file-name-directory file))))
+
+;;;###autoload
+(defun url-parse-query-string (query &optional downcase)
+  (let (retval pairs cur key val)
+    (setq pairs (split-string query "&"))
+    (while pairs
+      (setq cur (car pairs)
+	    pairs (cdr pairs))
+      (if (not (string-match "=" cur))
+	  nil				; Grace
+	(setq key (url-unhex-string (substring cur 0 (match-beginning 0)))
+	      val (url-unhex-string (substring cur (match-end 0) nil)))
+	(if downcase
+	    (setq key (downcase key)))
+	(setq cur (assoc key retval))
+	(if cur
+	    (setcdr cur (cons val (cdr cur)))
+	  (setq retval (cons (list key val) retval)))))
+    retval))
+
+(defun url-unhex (x)
+  (if (> x ?9)
+      (if (>= x ?a)
+	  (+ 10 (- x ?a))
+	(+ 10 (- x ?A)))
+    (- x ?0)))
+
+;;;###autoload
+(defun url-unhex-string (str &optional allow-newlines)
+  "Remove %XXX embedded spaces, etc in a url.
+If optional second argument ALLOW-NEWLINES is non-nil, then allow the
+decoding of carriage returns and line feeds in the string, which is normally
+forbidden in URL encoding."
+  (setq str (or str ""))
+  (let ((tmp "")
+	(case-fold-search t))
+    (while (string-match "%[0-9a-f][0-9a-f]" str)
+      (let* ((start (match-beginning 0))
+	     (ch1 (url-unhex (elt str (+ start 1))))
+	     (code (+ (* 16 ch1)
+		      (url-unhex (elt str (+ start 2))))))
+	(setq tmp (concat
+		   tmp (substring str 0 start)
+		   (cond
+		    (allow-newlines
+		     (char-to-string code))
+		    ((or (= code ?\n) (= code ?\r))
+		     " ")
+		    (t (char-to-string code))))
+	      str (substring str (match-end 0)))))
+    (setq tmp (concat tmp str))
+    tmp))
+
+(defconst url-unreserved-chars
+  '(
+    ?a ?b ?c ?d ?e ?f ?g ?h ?i ?j ?k ?l ?m ?n ?o ?p ?q ?r ?s ?t ?u ?v ?w ?x ?y ?z
+    ?A ?B ?C ?D ?E ?F ?G ?H ?I ?J ?K ?L ?M ?N ?O ?P ?Q ?R ?S ?T ?U ?V ?W ?X ?Y ?Z
+    ?0 ?1 ?2 ?3 ?4 ?5 ?6 ?7 ?8 ?9
+    ?- ?_ ?. ?! ?~ ?* ?' ?\( ?\))
+  "A list of characters that are _NOT_ reserved in the URL spec.
+This is taken from RFC 2396.")
+
+;;;###autoload
+(defun url-hexify-string (str)
+  "Escape characters in a string."
+  (mapconcat
+   (lambda (char)
+     ;; Fixme: use a char table instead.
+     (if (not (memq char url-unreserved-chars))
+	 (if (< char 16)
+	     (format "%%0%X" char)
+	   (if (> char 255)
+	       (error "Hexifying multibyte character %s" str))
+	   (format "%%%X" char))
+       (char-to-string char)))
+   str ""))
+
+;;;###autoload
+(defun url-file-extension (fname &optional x)
+  "Return the filename extension of FNAME.
+If optional variable X is t,
+then return the basename of the file with the extension stripped off."
+  (if (and fname
+	   (setq fname (url-basepath fname t))
+	   (string-match "\\.[^./]+$" fname))
+      (if x (substring fname 0 (match-beginning 0))
+	(substring fname (match-beginning 0) nil))
+    ;;
+    ;; If fname has no extension, and x then return fname itself instead of
+    ;; nothing. When caching it allows the correct .hdr file to be produced
+    ;; for filenames without extension.
+    ;;
+    (if x
+ 	fname
+      "")))
+
+;;;###autoload
+(defun url-truncate-url-for-viewing (url &optional width)
+  "Return a shortened version of URL that is WIDTH characters or less wide.
+WIDTH defaults to the current frame width."
+  (let* ((fr-width (or width (frame-width)))
+	 (str-width (length url))
+	 (tail (file-name-nondirectory url))
+	 (fname nil)
+	 (modified 0)
+	 (urlobj nil))
+    ;; The first thing that can go are the search strings
+    (if (and (>= str-width fr-width)
+	     (string-match "?" url))
+	(setq url (concat (substring url 0 (match-beginning 0)) "?...")
+	      str-width (length url)
+	      tail (file-name-nondirectory url)))
+    (if (< str-width fr-width)
+	nil				; Hey, we are done!
+      (setq urlobj (url-generic-parse-url url)
+	    fname (url-filename urlobj)
+	    fr-width (- fr-width 4))
+      (while (and (>= str-width fr-width)
+		  (string-match "/" fname))
+	(setq fname (substring fname (match-end 0) nil)
+	      modified (1+ modified))
+	(url-set-filename urlobj fname)
+	(setq url (url-recreate-url urlobj)
+	      str-width (length url)))
+      (if (> modified 1)
+	  (setq fname (concat "/.../" fname))
+	(setq fname (concat "/" fname)))
+      (url-set-filename urlobj fname)
+      (setq url (url-recreate-url urlobj)))
+    url))
+
+;;;###autoload
+(defun url-view-url (&optional no-show)
+  "View the current document's URL.
+Optional argument NO-SHOW means just return the URL, don't show it in
+the minibuffer.
+
+This uses `url-current-object', set locally to the buffer."
+  (interactive)
+  (if (not url-current-object)
+      nil
+    (if no-show
+	(url-recreate-url url-current-object)
+      (message "%s" (url-recreate-url url-current-object)))))
+
+(eval-and-compile
+  (defvar url-get-url-filename-chars "-%.?@a-zA-Z0-9()_/:~=&"
+    "Valid characters in a URL")
+  )
+
+(defun url-get-url-at-point (&optional pt)
+  "Get the URL closest to point, but don't change position.
+Has a preference for looking backward when not directly on a symbol."
+  ;; Not at all perfect - point must be right in the name.
+  (save-excursion
+    (if pt (goto-char pt))
+    (let (start url)
+      (save-excursion
+	;; first see if you're just past a filename
+	(if (not (eobp))
+	    (if (looking-at "[] \t\n[{}()]") ; whitespace or some parens
+		(progn
+		  (skip-chars-backward " \n\t\r({[]})")
+		  (if (not (bobp))
+		      (backward-char 1)))))
+	(if (and (char-after (point))
+		 (string-match (eval-when-compile
+				 (concat "[" url-get-url-filename-chars "]"))
+			       (char-to-string (char-after (point)))))
+	    (progn
+	      (skip-chars-backward url-get-url-filename-chars)
+	      (setq start (point))
+	      (skip-chars-forward url-get-url-filename-chars))
+	  (setq start (point)))
+	(setq url (buffer-substring-no-properties start (point))))
+      (if (and url (string-match "^(.*)\\.?$" url))
+	  (setq url (match-string 1 url)))
+      (if (and url (string-match "^URL:" url))
+	  (setq url (substring url 4 nil)))
+      (if (and url (string-match "\\.$" url))
+	  (setq url (substring url 0 -1)))
+      (if (and url (string-match "^www\\." url))
+	  (setq url (concat "http://" url)))
+      (if (and url (not (string-match url-nonrelative-link url)))
+	  (setq url nil))
+      url)))
+
+(defun url-generate-unique-filename (&optional fmt)
+  "Generate a unique filename in `url-temporary-directory'."
+  (if (not fmt)
+      (let ((base (format "url-tmp.%d" (user-real-uid)))
+	    (fname "")
+	    (x 0))
+	(setq fname (format "%s%d" base x))
+	(while (file-exists-p
+		(expand-file-name fname url-temporary-directory))
+	  (setq x (1+ x)
+		fname (concat base (int-to-string x))))
+	(expand-file-name fname url-temporary-directory))
+    (let ((base (concat "url" (int-to-string (user-real-uid))))
+	  (fname "")
+	  (x 0))
+      (setq fname (format fmt (concat base (int-to-string x))))
+      (while (file-exists-p
+	      (expand-file-name fname url-temporary-directory))
+	(setq x (1+ x)
+	      fname (format fmt (concat base (int-to-string x)))))
+      (expand-file-name fname url-temporary-directory))))
+
+(defun url-extract-mime-headers ()
+  "Set `url-current-mime-headers' in current buffer."
+  (save-excursion
+    (goto-char (point-min))
+    (unless url-current-mime-headers
+      (set (make-local-variable 'url-current-mime-headers)
+	   (mail-header-extract)))))
+
+(provide 'url-util)
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/lisp/url/url-vars.el	Sun Apr 04 01:21:46 2004 +0000
@@ -0,0 +1,435 @@
+;;; url-vars.el --- Variables for Uniform Resource Locator tool
+;; Author: $Author: fx $
+;; Created: $Date: 2002/04/22 09:25:02 $
+;; Version: $Revision: 1.14 $
+;; Keywords: comm, data, processes, hypermedia
+
+;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
+;;; Copyright (c) 1993 - 1996 by William M. Perry <wmperry@cs.indiana.edu>
+;;; Copyright (c) 1996, 97, 98, 99, 2001 Free Software Foundation, Inc.
+;;;
+;;; This file is part of GNU Emacs.
+;;;
+;;; GNU Emacs is free software; you can redistribute it and/or modify
+;;; it under the terms of the GNU General Public License as published by
+;;; the Free Software Foundation; either version 2, or (at your option)
+;;; any later version.
+;;;
+;;; GNU Emacs is distributed in the hope that it will be useful,
+;;; but WITHOUT ANY WARRANTY; without even the implied warranty of
+;;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+;;; GNU General Public License for more details.
+;;;
+;;; You should have received a copy of the GNU General Public License
+;;; along with GNU Emacs; see the file COPYING.  If not, write to the
+;;; Free Software Foundation, Inc., 59 Temple Place - Suite 330,
+;;; Boston, MA 02111-1307, USA.
+;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
+
+(require 'mm-util)
+(eval-when-compile (require 'cl))
+
+(defconst url-version (let ((x "$State: Exp $"))
+			(if (string-match "State: \\([^ \t\n]+\\)" x)
+			    (substring x (match-beginning 1) (match-end 1))
+			  x))
+  "Version number of URL package.")
+
+(defgroup url nil
+  "Uniform Resource Locator tool"
+  :group 'hypermedia)
+
+(defgroup url-file nil
+  "URL storage"
+  :prefix "url-"
+  :group 'url)
+
+(defgroup url-cache nil
+  "URL cache"
+  :prefix "url-"
+  :prefix "url-cache-"
+  :group 'url)
+
+(defgroup url-mime nil
+  "MIME options of URL"
+  :prefix "url-"
+  :group 'url)
+
+(defgroup url-hairy nil
+  "Hairy options of URL"
+  :prefix "url-"
+  :group 'url)
+
+
+(defvar url-current-object nil
+  "A parsed representation of the current url.")
+
+(defvar url-current-mime-headers nil
+  "A parsed representation of the MIME headers for the current url.")
+
+(mapcar 'make-variable-buffer-local
+	'(
+	  url-current-object
+	  url-current-referer
+	  url-current-mime-headers
+	  ))
+
+(defcustom url-honor-refresh-requests t
+  "*Whether to do automatic page reloads.
+These are done at the request of the document author or the server via
+the `Refresh' header in an HTTP response.  If nil, no refresh
+requests will be honored.  If t, all refresh requests will be honored.
+If non-nil and not t, the user will be asked for each refresh
+request."
+  :type '(choice (const :tag "off" nil)
+		 (const :tag "on" t)
+		 (const :tag "ask" 'ask))
+  :group 'url-hairy)
+
+(defcustom url-automatic-caching nil
+  "*If non-nil, all documents will be automatically cached to the local disk."
+  :type 'boolean
+  :group 'url-cache)
+
+;; Fixme: sanitize this.
+(defcustom url-cache-expired
+  (lambda (t1 t2) (>= (- (car t2) (car t1)) 5))
+  "*A function determining if a cached item has expired.
+It takes two times (numbers) as its arguments, and returns non-nil if
+the second time is 'too old' when compared to the first time."
+  :type 'function
+  :group 'url-cache)
+
+(defvar url-bug-address "w3-bugs@xemacs.org"
+  "Where to send bug reports.")
+
+(defcustom url-personal-mail-address nil
+  "*Your full email address.
+This is what is sent to HTTP servers as the FROM field in an HTTP
+request."
+  :type '(choice (const :tag "Unspecified" nil) string)
+  :group 'url)
+
+(defcustom url-directory-index-file "index.html"
+  "*The filename to look for when indexing a directory.
+If this file exists, and is readable, then it will be viewed instead of
+using `dired' to view the directory."
+  :type 'string
+  :group 'url-file)
+
+;; Fixme: this should have a setter which calls url-setup-privacy-info.
+(defcustom url-privacy-level '(email)
+  "*How private you want your requests to be.
+HTTP has header fields for various information about the user, including
+operating system information, email addresses, the last page you visited, etc.
+This variable controls how much of this information is sent.
+
+This should a symbol or a list.
+Valid values if a symbol are:
+none     -- Send all information
+low      -- Don't send the last location
+high     -- Don't send the email address or last location
+paranoid -- Don't send anything
+
+If a list, this should be a list of symbols of what NOT to send.
+Valid symbols are:
+email    -- the email address
+os       -- the operating system info
+lastloc  -- the last location
+agent    -- Do not send the User-Agent string
+cookie   -- never accept HTTP cookies
+
+Samples:
+
+ (setq url-privacy-level 'high)
+ (setq url-privacy-level '(email lastloc))    ;; equivalent to 'high
+ (setq url-privacy-level '(os))
+
+::NOTE::
+This variable controls several other variables and is _NOT_ automatically
+updated.  Call the function `url-setup-privacy-info' after modifying this
+variable."
+  :type '(radio (const :tag "None (you believe in the basic goodness of humanity)"
+		       :value none)
+		(const :tag "Low (do not reveal last location)"
+		       :value low)
+		(const :tag "High (no email address or last location)"
+		       :value high)
+		(const :tag "Paranoid (reveal nothing!)"
+		       :value paranoid)
+		(checklist :tag "Custom"
+			   (const :tag "Email address" :value email)
+			   (const :tag "Operating system" :value os)
+			   (const :tag "Last location" :value lastloc)
+			   (const :tag "Browser identification" :value agent)
+			   (const :tag "No cookies" :value cookie)))
+  :group 'url)
+
+(defvar url-inhibit-uncompression nil "Do not do decompression if non-nil.")
+
+(defcustom url-uncompressor-alist '((".z"  . "x-gzip")
+				    (".gz" . "x-gzip")
+				    (".uue" . "x-uuencoded")
+				    (".hqx" . "x-hqx")
+				    (".Z"  . "x-compress")
+				    (".bz2"  . "x-bzip2"))
+  "*An alist of file extensions and appropriate content-transfer-encodings."
+  :type '(repeat (cons :format "%v"
+		       (string :tag "Extension")
+		       (string :tag "Encoding")))
+  :group 'url-mime)
+
+(defcustom url-mail-command (if (fboundp 'compose-mail)
+				'compose-mail
+			      'url-mail)
+  "*This function will be called whenever url needs to send mail.
+It should enter a mail-mode-like buffer in the current window.
+The commands `mail-to' and `mail-subject' should still work in this
+buffer, and it should use `mail-header-separator' if possible."
+  :type 'function
+  :group 'url)
+
+(defcustom url-proxy-services nil
+  "*An alist of schemes and proxy servers that gateway them.
+Looks like ((\"http\" . \"hostname:portnumber\") ...).  This is set up
+from the ACCESS_proxy environment variables."
+  :type '(repeat (cons :format "%v"
+		       (string :tag "Protocol")
+		       (string :tag "Proxy")))
+  :group 'url)
+
+(defcustom url-passwd-entry-func nil
+  "*Symbol indicating which function to call to read in a password.
+It will be set up depending on whether you are running EFS or ange-ftp
+at startup if it is nil.  This function should accept the prompt
+string as its first argument, and the default value as its second
+argument."
+  :type '(choice (const :tag "Guess" :value nil)
+		 (const :tag "Use Ange-FTP" :value ange-ftp-read-passwd)
+		 (const :tag "Use EFS"      :value efs-read-passwd)
+		 (const :tag "Use Password Package" :value read-passwd)
+		 (function :tag "Other"))
+  :group 'url-hairy)
+
+(defcustom url-standalone-mode nil
+  "*Rely solely on the cache?"
+  :type 'boolean
+  :group 'url-cache)
+
+(defvar url-mime-separator-chars (mapcar 'identity
+					(concat "ABCDEFGHIJKLMNOPQRSTUVWXYZ"
+						"abcdefghijklmnopqrstuvwxyz"
+						"0123456789'()+_,-./=?"))
+  "Characters allowable in a MIME multipart separator.")
+
+(defcustom url-bad-port-list
+  '("25" "119" "19")
+  "*List of ports to warn the user about connecting to.
+Defaults to just the mail, chargen, and NNTP ports so you cannot be
+tricked into sending fake mail or forging messages by a malicious HTML
+document."
+  :type '(repeat (string :tag "Port"))
+  :group 'url-hairy)
+
+(defvar url-mime-content-type-charset-regexp
+  ";[ \t]*charset=\"?\\([^\"]+\\)\"?"
+  "Regexp used in parsing `Content-Type' for a charset indication.")
+
+(defvar url-request-data nil "Any data to send with the next request.")
+
+(defvar url-request-extra-headers nil
+  "A list of extra headers to send with the next request.
+Should be an assoc list of headers/contents.")
+
+(defvar url-request-method nil "The method to use for the next request.")
+
+;; FIXME!!  (RFC 2616 gives examples like `compress, gzip'.)
+(defvar url-mime-encoding-string nil
+  "*String to send in the Accept-encoding: field in HTTP requests.")
+
+;; `mm-mime-mule-charset-alist' in Gnus 5.8/9 contains elements whose
+;; cars aren't valid MIME charsets/coding systems, at least in Emacs.
+;; This gets it correct by construction in Emacs.  Fixme: DTRT for
+;; XEmacs -- its `coding-system-list' doesn't have the BASE-ONLY arg.
+(when (and (not (featurep 'xemacs))
+	   (fboundp 'coding-system-list))
+  (setq mm-mime-mule-charset-alist
+	(apply
+	 'nconc
+	 (mapcar
+	  (lambda (cs)
+	    (when (and (coding-system-get cs 'mime-charset)
+		       (not (eq t (coding-system-get cs 'safe-charsets))))
+	      (list (cons (coding-system-get cs 'mime-charset)
+			  (delq 'ascii
+				(coding-system-get cs 'safe-charsets))))))
+	  (coding-system-list 'base-only)))))
+
+;; Perhaps the first few should actually be given decreasing `q's and
+;; the list should be trimmed significantly.
+;; Fixme: do something sane if we don't have `sort-coding-systems'
+;; (Emacs 20, XEmacs).
+(defun url-mime-charset-string ()
+  "Generate a list of preferred MIME charsets for HTTP requests.
+Generated according to current coding system priorities."
+  (if (fboundp 'sort-coding-systems)
+      (let ((ordered (sort-coding-systems
+		      (let (accum)
+			(dolist (elt mm-mime-mule-charset-alist)
+			  (if (mm-coding-system-p (car elt))
+			      (push (car elt) accum)))
+			(nreverse accum)))))
+	(concat (format "%s;q=1, " (pop ordered))
+		(mapconcat 'symbol-name ordered ";q=0.5, ")
+		";q=0.5"))))
+
+(defvar url-mime-charset-string (url-mime-charset-string)
+  "*String to send in the Accept-charset: field in HTTP requests.
+The MIME charset corresponding to the most preferred coding system is
+given priority 1 and the rest are given priority 0.5.")
+
+(defun url-set-mime-charset-string ()
+  (setq url-mime-charset-string (url-mime-charset-string)))
+;; Regenerate if the language environment changes.
+(add-hook 'set-language-environment-hook 'url-set-mime-charset-string)
+
+;; Fixme: set from the locale.
+(defcustom url-mime-language-string nil
+  "*String to send in the Accept-language: field in HTTP requests.
+
+Specifies the preferred language when servers can serve documents in
+several languages.  Use RFC 1766 abbreviations, e.g.@: `en' for
+English, `de' for German.  A comma-separated specifies descending
+order of preference.  The ordering can be made explicit using `q'
+factors defined by HTTP, e.g. `de,en-gb;q=0.8,en;q=0.7'.  `*' means
+get the first available language (as opposed to the default)."
+  :type '(radio
+	  (const :tag "None (get default language version)" :value nil)
+	  (const :tag "Any (get first available language version)" :value "*")
+	  (string :tag "Other"))
+  :group 'url-mime
+  :group 'i18n)
+
+(defvar url-mime-accept-string nil
+  "String to send to the server in the Accept: field in HTTP requests.")
+
+(defvar url-package-version nil
+  "Version number of package using URL.")
+
+(defvar url-package-name nil "Version number of package using URL.")
+
+(defvar url-system-type nil
+  "What type of system we are on.")
+(defvar url-os-type nil
+  "What OS we are on.")
+
+(defcustom url-max-password-attempts 5
+  "*Maximum number of times a password will be prompted for.
+Applies when a protected document is denied by the server."
+  :type 'integer
+  :group 'url)
+
+(defcustom url-temporary-directory (or (getenv "TMPDIR") "/tmp")
+  "*Where temporary files go."
+  :type 'directory
+  :group 'url-file)
+
+(defcustom url-show-status t
+  "*Whether to show a running total of bytes transferred.
+Can cause a large hit if using a remote X display over a slow link, or
+a terminal with a slow modem."
+  :type 'boolean
+  :group 'url)
+
+(defvar url-using-proxy nil
+  "Either nil or the fully qualified proxy URL in use, e.g.
+http://www.domain.com/")
+
+(defcustom url-news-server nil
+  "*The default news server from which to get newsgroups/articles.
+Applies if no server is specified in the URL.  Defaults to the
+environment variable NNTPSERVER or \"news\" if NNTPSERVER is
+undefined."
+  :type '(choice (const :tag "None" :value nil) string)
+  :group 'url)
+
+(defvar url-nonrelative-link
+  "\\`\\([-a-zA-Z0-9+.]+:\\)"
+  "A regular expression that will match an absolute URL.")
+
+(defcustom url-confirmation-func 'y-or-n-p
+  "*What function to use for asking yes or no functions.
+Possible values are `yes-or-no-p' or `y-or-n-p', or any function that
+takes a single argument (the prompt), and returns t only if a positive
+answer is given."
+  :type '(choice (const :tag "Short (y or n)" :value y-or-n-p)
+		 (const :tag "Long (yes or no)" :value yes-or-no-p)
+		 (function :tag "Other"))
+  :group 'url-hairy)
+
+(defcustom url-gateway-method 'native
+  "*The type of gateway support to use.
+Should be a symbol specifying how to get a connection from the local machine.
+
+Currently supported methods:
+`telnet': Run telnet in a subprocess to connect;
+`rlogin': Rlogin to another machine to connect;
+`socks': Connect through a socks server;
+`ssl': Connect with SSL;
+`native': Connect directy."
+  :type '(radio (const :tag "Telnet to gateway host" :value telnet)
+		(const :tag "Rlogin to gateway host" :value rlogin)
+		(const :tag "Use SOCKS proxy" :value socks)
+		(const :tag "Use SSL for all connections" :value ssl)
+		(const :tag "Direct connection" :value native))
+  :group 'url-hairy)
+
+(defvar url-setup-done nil "Has setup configuration been done?")
+
+(defconst weekday-alist
+  '(("Sunday" . 0) ("Monday" . 1) ("Tuesday" . 2) ("Wednesday" . 3)
+    ("Thursday" . 4) ("Friday" . 5) ("Saturday" . 6)
+    ("Tues" . 2) ("Thurs" . 4)
+    ("Sun" . 0) ("Mon" . 1) ("Tue" . 2) ("Wed" . 3)
+    ("Thu" . 4) ("Fri" . 5) ("Sat" . 6)))
+
+(defconst monthabbrev-alist
+  '(("Jan" . 1) ("Feb" . 2) ("Mar" . 3) ("Apr" . 4) ("May" . 5) ("Jun" . 6)
+    ("Jul" . 7) ("Aug" . 8) ("Sep" . 9) ("Oct" . 10) ("Nov" . 11)
+    ("Dec" . 12)))
+
+(defvar url-lazy-message-time 0)
+
+;; Fixme: We may not be able to run SSL.
+(defvar url-extensions-header "Security/Digest Security/SSL")
+
+(defvar url-parse-syntax-table
+  (copy-syntax-table emacs-lisp-mode-syntax-table)
+  "*A syntax table for parsing URLs.")
+
+(modify-syntax-entry ?' "\"" url-parse-syntax-table)
+(modify-syntax-entry ?` "\"" url-parse-syntax-table)
+(modify-syntax-entry ?< "(>" url-parse-syntax-table)
+(modify-syntax-entry ?> ")<" url-parse-syntax-table)
+(modify-syntax-entry ?/ " " url-parse-syntax-table)
+
+(defvar url-load-hook nil
+  "*Hooks to be run after initalizing the URL library.")
+
+;;; Make OS/2 happy - yeeks
+;; (defvar	tcp-binary-process-input-services nil
+;;   "*Make OS/2 happy with our CRLF pairs...")
+
+(defconst url-working-buffer " *url-work")
+
+(defvar url-gateway-unplugged nil
+  "Non-nil means don't open new network connexions.
+This should be set, e.g. by mail user agents rendering HTML to avoid
+`bugs' which call home.")
+
+(defun url-vars-unload-hook ()
+  (remove-hook 'set-language-environment-hook 'url-set-mime-charset-string))
+
+(provide 'url-vars)
+
+;;; url-vars.el ends here
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/lisp/url/url.el	Sun Apr 04 01:21:46 2004 +0000
@@ -0,0 +1,269 @@
+;;; url.el --- Uniform Resource Locator retrieval tool
+;; Author: Bill Perry <wmperry@gnu.org>
+;; Version: $Revision: 1.15 $
+;; Keywords: comm, data, processes, hypermedia
+
+;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
+;;; Copyright (c) 1993 - 1996 by William M. Perry <wmperry@cs.indiana.edu>
+;;; Copyright (c) 1996, 97, 98, 99, 2001 Free Software Foundation, Inc.
+;;;
+;;; This file is part of GNU Emacs.
+;;;
+;;; GNU Emacs is free software; you can redistribute it and/or modify
+;;; it under the terms of the GNU General Public License as published by
+;;; the Free Software Foundation; either version 2, or (at your option)
+;;; any later version.
+;;;
+;;; GNU Emacs is distributed in the hope that it will be useful,
+;;; but WITHOUT ANY WARRANTY; without even the implied warranty of
+;;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+;;; GNU General Public License for more details.
+;;;
+;;; You should have received a copy of the GNU General Public License
+;;; along with GNU Emacs; see the file COPYING.  If not, write to the
+;;; Free Software Foundation, Inc., 59 Temple Place - Suite 330,
+;;; Boston, MA 02111-1307, USA.
+;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
+
+;; Registered URI schemes: http://www.iana.org/assignments/uri-schemes
+
+(eval-when-compile (require 'cl))
+;; Don't require CL at runtime if we can avoid it (Emacs 21).
+;; Otherwise we need it for hashing functions.  `puthash' was never
+;; defined in the Emacs 20 cl.el for some reason.
+(if (fboundp 'puthash)
+    nil					; internal or CL is loaded
+  (defalias 'puthash 'cl-puthash)
+  (autoload 'cl-puthash "cl")
+  (autoload 'gethash "cl")
+  (autoload 'maphash "cl")
+  (autoload 'make-hash-table "cl"))
+
+(eval-when-compile
+  (require 'mm-decode)
+  (require 'mm-view))
+
+(require 'mailcap)
+(require 'url-vars)
+(require 'url-cookie)
+(require 'url-history)
+(require 'url-expand)
+(require 'url-privacy)
+(require 'url-methods)
+(require 'url-proxy)
+(require 'url-parse)
+(require 'url-util)
+
+;; Fixme: customize? convert-standard-filename? 
+;;;###autoload
+(defvar url-configuration-directory "~/.url")
+
+(defun url-do-setup ()
+  "Setup the url package.
+This is to avoid conflict with user settings if URL is dumped with
+Emacs."
+  (unless url-setup-done
+
+    ;; Make OS/2 happy
+    ;;(push '("http" "80") tcp-binary-process-input-services)
+
+    (mailcap-parse-mailcaps)
+    (mailcap-parse-mimetypes)
+    
+    ;; Register all the authentication schemes we can handle
+    (url-register-auth-scheme "basic" nil 4)
+    (url-register-auth-scheme "digest" nil 7)
+
+    (setq url-cookie-file
+	  (or url-cookie-file
+	      (expand-file-name "cookies" url-configuration-directory)))
+    
+    (setq url-history-file
+	  (or url-history-file
+	      (expand-file-name "history" url-configuration-directory)))
+  
+    ;; Parse the global history file if it exists, so that it can be used
+    ;; for URL completion, etc.
+    (url-history-parse-history)
+    (url-history-setup-save-timer)
+
+    ;; Ditto for cookies
+    (url-cookie-setup-save-timer)
+    (url-cookie-parse-file url-cookie-file)
+
+    ;; Read in proxy gateways
+    (let ((noproxy (and (not (assoc "no_proxy" url-proxy-services))
+			(or (getenv "NO_PROXY")
+			    (getenv "no_PROXY")
+			    (getenv "no_proxy")))))
+      (if noproxy
+	  (setq url-proxy-services
+		(cons (cons "no_proxy"
+			    (concat "\\("
+				    (mapconcat
+				     (lambda (x)
+				       (cond
+					((= x ?,) "\\|")
+					((= x ? ) "")
+					((= x ?.) (regexp-quote "."))
+					((= x ?*) ".*")
+					((= x ??) ".")
+					(t (char-to-string x))))
+				     noproxy "") "\\)"))
+		      url-proxy-services))))
+
+    ;; Set the password entry funtion based on user defaults or guess
+    ;; based on which remote-file-access package they are using.
+    (cond
+     (url-passwd-entry-func nil)	; Already been set
+     ((fboundp 'read-passwd)		; Use secure password if available
+      (setq url-passwd-entry-func 'read-passwd))
+     ((or (featurep 'efs)		; Using EFS
+	  (featurep 'efs-auto))		; or autoloading efs
+      (if (not (fboundp 'read-passwd))
+	  (autoload 'read-passwd "passwd" "Read in a password" nil))
+      (setq url-passwd-entry-func 'read-passwd))
+     ((or (featurep 'ange-ftp)		; Using ange-ftp
+	  (and (boundp 'file-name-handler-alist)
+	       (not (featurep 'xemacs)))) ; ??
+      (setq url-passwd-entry-func 'ange-ftp-read-passwd))
+     (t
+      (url-warn
+       'security
+       "(url-setup): Can't determine how to read passwords, winging it.")))
+  
+    (url-setup-privacy-info)
+    (run-hooks 'url-load-hook)
+    (setq url-setup-done t)))
+
+;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
+;;; Retrieval functions
+;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
+(defun url-retrieve (url callback &optional cbargs)
+  "Retrieve URL asynchronously and call CALLBACK with CBARGS when finished.
+The callback is called when the object has been completely retrieved, with
+the current buffer containing the object, and any MIME headers associated
+with it.  URL is either a string or a parsed URL.
+
+Return the buffer URL will load into, or nil if the process has
+already completed."
+  (url-do-setup)
+  (url-gc-dead-buffers)
+  (if (stringp url)
+       (set-text-properties 0 (length url) nil url))
+  (if (not (vectorp url))
+      (setq url (url-generic-parse-url url)))
+  (if (not (functionp callback))
+      (error "Must provide a callback function to url-retrieve"))
+  (unless (url-type url)
+    (error "Bad url: %s" (url-recreate-url url)))
+  (let ((loader (url-scheme-get-property (url-type url) 'loader))
+	(url-using-proxy (if (url-host url)
+			     (url-find-proxy-for-url url (url-host url))))
+	(buffer nil)
+	(asynch (url-scheme-get-property (url-type url) 'asynchronous-p)))
+    (if url-using-proxy
+	(setq asynch t
+	      loader 'url-proxy))
+    (if asynch
+	(setq buffer (funcall loader url callback cbargs))
+      (setq buffer (funcall loader url))
+      (if buffer
+	  (save-excursion
+	    (set-buffer buffer)
+	    (apply callback cbargs))))
+    (url-history-update-url url (current-time))
+    buffer))
+
+(defun url-retrieve-synchronously (url)
+  "Retrieve URL synchronously.
+Return the buffer containing the data, or nil if there are no data
+associated with it (the case for dired, info, or mailto URLs that need
+no further processing).  URL is either a string or a parsed URL."
+  (url-do-setup)
+
+  (lexical-let ((retrieval-done nil)
+		(asynch-buffer nil))
+    (setq asynch-buffer
+	  (url-retrieve url (lambda (&rest ignored)
+			      (url-debug 'retrieval "Synchronous fetching done (%S)" (current-buffer))
+			      (setq retrieval-done t
+				    asynch-buffer (current-buffer)))))
+    (if (not asynch-buffer)
+	;; We do not need to do anything, it was a mailto or something
+	;; similar that takes processing completely outside of the URL
+	;; package.
+	nil
+      (while (not retrieval-done)
+	(url-debug 'retrieval "Spinning in url-retrieve-synchronously: %S (%S)"
+		   retrieval-done asynch-buffer)
+	;; Quoth monnier:
+	;; It turns out that the problem seems to be that the (sit-for
+	;; 0.1) below doesn't actually process the data: instead it
+	;; returns immediately because there is keyboard input
+	;; waiting, so we end up spinning endlessly waiting for the
+	;; process to finish while not letting it finish.
+
+	;; However, raman claims that it blocks Emacs with Emacspeak
+	;; for unexplained reasons.  Put back for his benefit until
+	;; someone can understand it.
+	;; (sleep-for 0.1)
+	(sit-for 0.1))
+      asynch-buffer)))
+
+(defun url-mm-callback (&rest ignored)
+  (let ((handle (mm-dissect-buffer t)))
+    (save-excursion
+      (url-mark-buffer-as-dead (current-buffer))
+      (set-buffer (generate-new-buffer (url-recreate-url url-current-object)))
+      (if (eq (mm-display-part handle) 'external)
+	  (progn
+	    (set-process-sentinel
+	     ;; Fixme: this shouldn't have to know the form of the
+	     ;; undisplayer produced by `mm-display-part'.
+	     (get-buffer-process (cdr (mm-handle-undisplayer handle)))
+	     `(lambda (proc event)
+		(mm-destroy-parts (quote ,handle))))
+	    (message "Viewing externally")
+	    (kill-buffer (current-buffer)))
+	(display-buffer (current-buffer))
+	(mm-destroy-parts handle)))))
+
+(defun url-mm-url (url)
+  "Retrieve URL and pass to the appropriate viewing application."
+  (require 'mm-decode)
+  (require 'mm-view)
+  (url-retrieve url 'url-mm-callback nil))
+
+;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
+;;; Miscellaneous
+;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
+(defvar url-dead-buffer-list nil)
+
+(defun url-mark-buffer-as-dead (buff)
+  (push buff url-dead-buffer-list))
+
+(defun url-gc-dead-buffers ()
+  (let ((buff))
+    (while (setq buff (pop url-dead-buffer-list))
+      (if (buffer-live-p buff)
+	  (kill-buffer buff)))))
+
+(cond
+ ((fboundp 'display-warning)
+  (defalias 'url-warn 'display-warning))
+ ((fboundp 'warn)
+  (defun url-warn (class message &optional level)
+    (warn "(%s/%s) %s" class (or level 'warning) message)))
+ (t
+  (defun url-warn (class message &optional level)
+    (save-excursion
+      (set-buffer (get-buffer-create "*URL-WARNINGS*"))
+      (goto-char (point-max))
+      (save-excursion
+	(insert (format "(%s/%s) %s\n" class (or level 'warning) message)))
+      (display-buffer (current-buffer))))))
+
+(provide 'url)
+
+;;; url.el ends here
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/lisp/url/vc-dav.el	Sun Apr 04 01:21:46 2004 +0000
@@ -0,0 +1,177 @@
+;;; vc-dav.el --- vc.el support for WebDAV
+
+;; Copyright (C) 2001 Free Software Foundation, Inc.
+
+;; Author: Bill Perry <wmperry@gnu.org>
+;; Maintainer: Bill Perry <wmperry@gnu.org>
+;; Version: $Revision: 1.3 $
+;; Keywords: url, vc
+
+;; GNU Emacs is free software; you can redistribute it and/or modify
+;; it under the terms of the GNU General Public License as published by
+;; the Free Software Foundation; either version 2, or (at your option)
+;; any later version.
+
+;; GNU Emacs is distributed in the hope that it will be useful,
+;; but WITHOUT ANY WARRANTY; without even the implied warranty of
+;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+;; GNU General Public License for more details.
+
+;; You should have received a copy of the GNU General Public License
+;; along with GNU Emacs; see the file COPYING.  If not, write to the
+;; Free Software Foundation, Inc., 59 Temple Place - Suite 330,
+;; Boston, MA 02111-1307, USA.
+
+(require 'url)
+(require 'url-dav)
+
+;;; Required functions for a vc backend
+(defun vc-dav-registered (url)
+  "Return t iff URL is registered with a DAV aware server."
+  (url-dav-vc-registered url))
+
+(defun vc-dav-state (url)
+  "Return the current version control state of URL.
+For a list of possible values, see `vc-state'."
+  ;; Things we can support for WebDAV
+  ;;
+  ;; up-to-date - use lockdiscovery
+  ;; edited     - check for an active lock by us
+  ;; USER       - use lockdiscovery + owner
+  ;;
+  ;; These don't make sense for WebDAV
+  ;; needs-patch
+  ;; needs-merge
+  ;; unlocked-changes
+  (let ((locks (url-dav-active-locks url)))
+    (cond
+     ((null locks) 'up-to-date)
+     ((assoc url locks)
+      ;; SOMEBODY has a lock... let's find out who.
+      (setq locks (cdr (assoc url locks)))
+      (if (rassoc url-dav-lock-identifier locks)
+	  ;; _WE_ have a lock
+	  'edited
+	(cdr (car locks)))))))
+	
+(defun vc-dav-checkout-model (url)
+  "Indicate whether URL needs to be \"checked out\" before it can be edited.
+See `vc-checkout-model' for a list of possible values."
+  ;; The only thing we can support with webdav is 'locking
+  'locking)
+
+;; This should figure out the version # of the file somehow.  What is
+;; the most appropriate property in WebDAV to look at for this?
+(defun vc-dav-workfile-version (url)
+  "Return the current workfile version of URL."
+  "Unknown")
+
+(defun vc-dav-register (url &optional rev comment)
+  "Register URL in the DAV backend."
+  ;; Do we need to do anything here?  FIXME?
+  )
+
+(defun vc-dav-checkin (url rev comment)
+  "Commit changes in URL to WebDAV.
+If REV is non-nil, that should become the new revision number.
+COMMENT is used as a check-in comment."
+  ;; This should PUT the resource and release any locks that we hold.
+  )
+
+(defun vc-dav-checkout (url &optional editable rev destfile)
+  "Check out revision REV of URL into the working area.
+
+If EDITABLE is non-nil URL should be writable by the user and if
+locking is used for URL, a lock should also be set.
+
+If REV is non-nil, that is the revision to check out.  If REV is the
+empty string, that means to check ou tht ehead of the trunk.
+
+If optional arg DESTFILE is given, it is an alternate filename to
+write the contents to.
+"
+  ;; This should LOCK the resource.
+  )
+
+(defun vc-dav-revert (url &optional contents-done)
+  "Revert URL back to the current workfile version.
+
+If optional arg CONTENTS-DONE is non-nil, then the contents of FILE
+have already been reverted from a version backup, and this function
+only needs to update the status of URL within the backend.
+"
+  ;; Should do a GET if !contents_done
+  ;; Should UNLOCK the file.
+  )
+
+(defun vc-dav-print-log (url)
+  "Insert the revision log of URL into the *vc* buffer."
+  )
+
+(defun vc-dav-diff (url &optional rev1 rev2)
+  "Insert the diff for URL into the *vc-diff* buffer.
+If REV1 and REV2 are non-nil report differences from REV1 to REV2.
+If REV1 is nil, use the current workfile version as the older version.
+If REV2 is nil, use the current workfile contents as the nwer version.
+
+It should return a status of either 0 (no differences found), or
+1 (either non-empty diff or the diff is run asynchronously).
+"
+  ;; We should do this asynchronously...
+  ;; How would we do it at all, that is the question!
+  )
+
+
+
+;;; Optional functions
+;; Should be faster than vc-dav-state - but how?
+(defun vc-dav-state-heuristic (url)
+  "Estimate the version control state of URL at visiting time."
+  (vc-dav-state url))
+
+;; This should use url-dav-get-properties with a depth of `1' to get
+;; all the properties.
+(defun vc-dav-dir-state (url)
+  "find the version control state of all files in DIR in a fast way."
+  )
+
+(defun vc-dav-workfile-unchanged-p (url)
+  "Return non-nil if URL is unchanged from its current workfile version."
+  ;; Probably impossible with webdav
+  )
+
+(defun vc-dav-responsible-p (url)
+  "Return non-nil if DAV considers itself `responsible' for URL."
+  ;; Check for DAV support on the web server.
+  t)
+
+(defun vc-dav-could-register (url)
+  "Return non-nil if URL could be registered under this backend."
+  ;; Check for DAV support on the web server.
+  t)
+
+;;; Unimplemented functions
+;;
+;; vc-dav-latest-on-branch-p(URL)
+;;    Return non-nil if the current workfile version of FILE is the
+;;    latest on its branch.  There are no branches in webdav yet.
+;;
+;; vc-dav-mode-line-string(url)
+;;    Return a dav-specific mode line string for URL. Are there any
+;;    specific states that we want exposed?
+;;
+;; vc-dav-dired-state-info(url)
+;;    Translate the `vc-state' property of URL into a string that can
+;;    be used in a vc-dired buffer.  Are there any extra states that
+;;    we want exposed?
+;;
+;; vc-dav-receive-file(url rev)
+;;    Let this backend `receive' a file that is already registered
+;;    under another backend.  The default just calls `register', which
+;;    should be sufficient for WebDAV.
+;;
+;; vc-dav-unregister(url)
+;;    Unregister URL.  Not possible with WebDAV, other than by
+;;    deleting the resource.
+
+(provide 'vc-dav)