;;; url-queue.el --- Fetching web pages in parallel
-;; Copyright (C) 2011-2012 Free Software Foundation, Inc.
+;; Copyright (C) 2011-2015 Free Software Foundation, Inc.
;; Author: Lars Magne Ingebrigtsen <larsi@gnus.org>
;; Keywords: comm
;;; Code:
-(eval-when-compile (require 'cl))
+(eval-when-compile (require 'cl-lib))
(require 'browse-url)
(require 'url-parse)
(defvar url-queue nil)
-(defstruct url-queue
+(cl-defstruct url-queue
url callback cbargs silentp
buffer start-time pre-triggered
inhibit-cookiesp)
:inhibit-cookiesp inhibit-cookies))))
(url-queue-setup-runners))
-;; To ensure asynch behaviour, we start the required number of queue
+;; To ensure asynch behavior, we start the required number of queue
;; runners from `run-with-idle-timer'. So we're basically going
;; through the queue in two ways: 1) synchronously when a program
;; calls `url-queue-retrieve' (which will then start the required
(cond
((or (url-queue-start-time entry)
(url-queue-pre-triggered entry))
- (incf running))
+ (cl-incf running))
((not waiting)
(setq waiting entry))))
(when (and waiting
(dolist (entry url-queue)
(cond
((url-queue-start-time entry)
- (incf running))
+ (cl-incf running))
((not waiting)
(setq waiting entry))))
(when (and waiting
(url-queue-start-retrieve waiting))))
(defun url-queue-callback-function (status job)
+ (setq url-queue (delq job url-queue))
(when (and (eq (car status) :error)
(eq (cadr (cadr status)) 'connection-failed))
;; If we get a connection error, then flush all other jobs from
;; synchronously and totally halts Emacs.
(url-queue-remove-jobs-from-host
(plist-get (nthcdr 3 (cadr status)) :host)))
- (setq url-queue (delq job url-queue))
(url-queue-run-queue)
(apply (url-queue-callback job) (cons status (url-queue-cbargs job))))
host)
(push job jobs)))
(dolist (job jobs)
+ (url-queue-kill-job job)
(setq url-queue (delq job url-queue)))))
(defun url-queue-start-retrieve (job)
(setf (url-queue-buffer job)
(ignore-errors
- (url-retrieve (url-queue-url job)
- #'url-queue-callback-function (list job)
- (url-queue-silentp job)
- (url-queue-inhibit-cookiesp job)))))
+ (let ((url-request-noninteractive t))
+ (url-retrieve (url-queue-url job)
+ #'url-queue-callback-function (list job)
+ (url-queue-silentp job)
+ (url-queue-inhibit-cookiesp job))))))
(defun url-queue-prune-old-entries ()
(let (dead-jobs)
url-queue-timeout))
(push job dead-jobs)))
(dolist (job dead-jobs)
- (when (bufferp (url-queue-buffer job))
- (while (get-buffer-process (url-queue-buffer job))
- (ignore-errors
- (delete-process (get-buffer-process (url-queue-buffer job)))))
- (ignore-errors
- (kill-buffer (url-queue-buffer job))))
+ (url-queue-kill-job job)
(setq url-queue (delq job url-queue)))))
+(defun url-queue-kill-job (job)
+ (when (bufferp (url-queue-buffer job))
+ (let (process)
+ (while (setq process (get-buffer-process (url-queue-buffer job)))
+ (set-process-sentinel process 'ignore)
+ (ignore-errors
+ (delete-process process)))))
+ ;; Call the callback with an error message to ensure that the caller
+ ;; is notified that the job has failed.
+ (with-current-buffer
+ (if (and (bufferp (url-queue-buffer job))
+ (buffer-live-p (url-queue-buffer job)))
+ ;; Use the (partially filled) process buffer it it exists.
+ (url-queue-buffer job)
+ ;; If not, just create a new buffer, which will probably be
+ ;; killed again by the caller.
+ (generate-new-buffer " *temp*"))
+ (apply (url-queue-callback job)
+ (cons (list :error (list 'error 'url-queue-timeout
+ "Queue timeout exceeded"))
+ (url-queue-cbargs job)))))
+
(provide 'url-queue)
;;; url-queue.el ends here