Browse Source

Feeds: Fix org-feed when retrieve-method is curl or wget

- test on line 312 failed because these methods returned a string instead
  of a buffer

- requesting 'wget actually executed "curl", with bad parameters

- curl needs --silent, so that progress messages don't interrupt content

- atom parser had code to skip HTTP headers, but these are present only
  when using url-retrieve-synchronously; caused errors with curl/wget.
  Instead, remove HTTP headers right after feed buffer is populated.
Christopher League 16 years ago
parent
commit
9384f9d741
2 changed files with 22 additions and 8 deletions
  1. 8 0
      lisp/ChangeLog
  2. 14 8
      lisp/org-feed.el

+ 8 - 0
lisp/ChangeLog

@@ -1,3 +1,11 @@
+2009-06-30  Carsten Dominik  <carsten.dominik@gmail.com>
+
+	* org-feed.el (org-feed-update): fix problem with non-existing
+	buffer.
+	(org-feed-skip-http-headers): New function.
+	(org-feed-get-feed): Call `org-feed-skip-http-headers'.
+	(org-feed-get-feed): Fix command line arguments for curl.
+
 2009-06-28  Carsten Dominik  <carsten.dominik@gmail.com>
 
 	* org-latex.el (org-export-latex-inline-image-extensions): Add ps

+ 14 - 8
lisp/org-feed.el

@@ -309,7 +309,7 @@ it can be a list structured like an entry in `org-feed-alist'."
 	  feed-buffer inbox-pos new-formatted
 	  entries old-status status new changed guid-alist e guid olds)
       (setq feed-buffer (org-feed-get-feed url))
-      (unless (and feed-buffer (bufferp feed-buffer))
+      (unless (and feed-buffer (bufferp (get-buffer feed-buffer)))
 	(error "Cannot get feed %s" name))
       (when retrieve-only
 	(throw 'exit feed-buffer))
@@ -549,18 +549,28 @@ If that property is already present, nothing changes."
 	       (org-split-string s "\n")
 	       (concat "\n" (make-string n ?\ )))))
 
+(defun org-feed-skip-http-headers (buffer)
+  "Remove HTTP headers from BUFFER, and return it.
+Assumes headers are indeed present!"
+  (with-current-buffer buffer
+    (widen)
+    (goto-char (point-min))
+    (search-forward "\n\n")
+    (delete-region (point-min) (point))
+    buffer))
+
 (defun org-feed-get-feed (url)
   "Get the RSS feed file at URL and return the buffer."
   (cond
    ((eq org-feed-retrieve-method 'url-retrieve-synchronously)
-    (url-retrieve-synchronously url))
+    (org-feed-skip-http-headers (url-retrieve-synchronously url)))
    ((eq org-feed-retrieve-method 'curl)
     (ignore-errors (kill-buffer org-feed-buffer))
-    (call-process "curl" nil org-feed-buffer nil url)
+    (call-process "curl" nil org-feed-buffer nil "--silent" url)
     org-feed-buffer)
    ((eq org-feed-retrieve-method 'wget)
     (ignore-errors (kill-buffer org-feed-buffer))
-    (call-process "curl" nil org-feed-buffer nil "-q" "-O" "-" url)
+    (call-process "wget" nil org-feed-buffer nil "-q" "-O" "-" url)
     org-feed-buffer)
    ((functionp org-feed-retrieve-method)
     (funcall org-feed-retrieve-method url))))
@@ -610,10 +620,6 @@ The `:item-full-text' property actually contains the sexp
 formatted as a string, not the original XML data."
   (with-current-buffer buffer
     (widen)
-    (goto-char (point-min))
-    ;; Skip HTTP headers
-    (search-forward "\n\n")
-    (delete-region (point-min) (point))
     (let ((feed (car (xml-parse-region (point-min) (point-max)))))
       (mapcar
        (lambda (entry)