From b74c9672c2413714a6b601ce6ef12ad7765e6fa9 Mon Sep 17 00:00:00 2001 From: Glenn Morris Date: Thu, 9 Feb 2012 22:23:47 -0500 Subject: Document url-queue-retrieve * doc/misc/url.texi (Retrieving URLs): Update url-retrieve arguments. Mention url-queue-retrieve. * lisp/url/url-queue.el (url-queue-retrieve): Doc fix. * etc/NEWS: Edits. --- lisp/url/ChangeLog | 4 ++++ lisp/url/url-queue.el | 9 +++++---- 2 files changed, 9 insertions(+), 4 deletions(-) (limited to 'lisp/url') diff --git a/lisp/url/ChangeLog b/lisp/url/ChangeLog index 179148a089..d7a22b7212 100644 --- a/lisp/url/ChangeLog +++ b/lisp/url/ChangeLog @@ -1,3 +1,7 @@ +2012-02-10 Glenn Morris + + * url-queue.el (url-queue-retrieve): Doc fix. + 2012-02-08 Lars Ingebrigtsen * url-parse.el (url): Add the `use-cookies' slot to the URL struct diff --git a/lisp/url/url-queue.el b/lisp/url/url-queue.el index c9c6c4fe69..62e5e2f84d 100644 --- a/lisp/url/url-queue.el +++ b/lisp/url/url-queue.el @@ -56,9 +56,10 @@ ;;;###autoload (defun url-queue-retrieve (url callback &optional cbargs silent inhibit-cookies) "Retrieve URL asynchronously and call CALLBACK with CBARGS when finished. -Like `url-retrieve' (which see for details of the arguments), but -controls the level of parallelism via the -`url-queue-parallel-processes' variable." +This is like `url-retrieve' (which see for details of the arguments), +but downloads in parallel. The variable `url-queue-parallel-processes' +sets the number of concurrent processes. The variable `url-queue-timeout' +sets a timeout." (setq url-queue (append url-queue (list (make-url-queue :url url @@ -127,7 +128,7 @@ controls the level of parallelism via the (push job jobs))) (dolist (job jobs) (setq url-queue (delq job url-queue))))) - + (defun url-queue-start-retrieve (job) (setf (url-queue-buffer job) (ignore-errors -- cgit v1.2.3