Reworking the exceptions for requests. - zs - Zeitungsschau rss to email converter
       
 (DIR) Log
 (DIR) Files
 (DIR) Refs
 (DIR) LICENSE
       ---
 (DIR) commit 61f5d5bbba7c474b4a7188f448cf9cbac9479485
 (DIR) parent f29afa30f2666e89c18c0b6c4db3a405bdc9aa13
 (HTM) Author: Christoph Lohmann <20h@r-36.net>
       Date:   Sat, 24 Oct 2015 22:33:29 +0200
       
       Reworking the exceptions for requests.
       
       * Requests has a different status code handling.
               * the status code is returned by fetch()
       * file:/// now has to be handled separatedly
       
       Diffstat:
         zeitungsschau/feed.py               |      15 ++++++++++++---
         zs                                  |      24 +++++++++++-------------
       
       2 files changed, 23 insertions(+), 16 deletions(-)
       ---
 (DIR) diff --git a/zeitungsschau/feed.py b/zeitungsschau/feed.py
       @@ -229,7 +229,16 @@ def parse(astr):
                return feed
        
        def fetch(uri):
       -        return parse(requests.get(uri, timeout=5,\
       -                        headers={"User-Agent": "Zeitungsschau/1.0"}).text.\
       -                                                encode("utf-8"))
       +        if "file://" in uri:
       +                fd = open(uri[7:], "r")
       +                fval = fd.read().encode("utf-8")
       +                fd.close()
       +                rcode = 200
       +        else:
       +                fd = requests.get(uri, timeout=20,\
       +                        headers={"User-Agent": "Zeitungsschau/1.0"})
       +                fval = fd.text.encode("utf-8")
       +                rcode = fd.status_code
       +
       +        return (rcode, parse(fval))
        
 (DIR) diff --git a/zs b/zs
       @@ -11,12 +11,13 @@ import zeitungsschau.feed as feed
        import zeitungsschau.feeddb as feeddb
        import zeitungsschau.opml as opml
        import zeitungsschau.feedemail as feedemail
       -import urllib.error
        import socket
        import http.client
        import ssl
        import getopt
        import pprint
       +import requests.exceptions
       +import requests
        
        dodebug = False
        
       @@ -49,29 +50,26 @@ def run(db, selfeed=None, dryrun=False, onlychanges=False):
                                print("fetch %s" % (feeduri))
                        curfeed = None
                        try:
       -                        curfeed = feed.fetch(feeduri)
       -                except urllib.error.HTTPError as err:
       -                        if err.code == 404:
       -                                estr = "404"
       -                                retries += 1
       +                        (rcode, curfeed) = feed.fetch(feeduri)
                        except socket.gaierror:
                                continue
                        except socket.timeout:
                                continue
       -                except urllib.error.URLError:
       -                        continue
                        except TimeoutError:
                                continue
                        except ConnectionResetError:
                                estr = "connreset"
                                retries += 1
       -                except http.client.IncompleteRead:
       -                        estr = "incompleteread"
       -                        continue
       -                except http.client.BadStatusLine:
       -                        estr = "badstatusline"
       +                except requests.exceptions.ConnectionError:
       +                        estr = "connreset"
       +                        retries += 1
       +                except requests.exceptions.ReadTimeout:
                                continue
        
       +                if rcode == 404:
       +                        estr = "404"
       +                        retries += 1
       +
                        if curfeed == None:
                                continue