commit 00605bf63c3cd85acc5dff471584dc3831e642ce
parent e0285dcddaf5fa0d48d9a9bdeea8fa50ca3109c8
Author: Nils Gillmann <ng0@n0.is>
Date: Sun, 7 Oct 2018 14:56:39 +0000
updateCache: lint
Signed-off-by: Nils Gillmann <ng0@n0.is>
Diffstat:
1 file changed, 18 insertions(+), 18 deletions(-)
diff --git a/updateCache.py b/updateCache.py
@@ -19,8 +19,8 @@ import socket
import errno
import httplib
-FILE_TYPES = [ "txt", "html", "pdf", "ps", "ps.gz", "abstract" ]
-BIN_FILE_TYPES = [ 'pdf', 'ps.gz' ]
+FILE_TYPES = ["txt", "html", "pdf", "ps", "ps.gz", "abstract"]
+BIN_FILE_TYPES = ['pdf', 'ps.gz']
class UIError(Exception):
pass
@@ -32,11 +32,11 @@ def tryUnlink(fn):
pass
def getCacheFname(key, ftype, section):
- return BibTeX.smartJoin(config.OUTPUT_DIR,config.CACHE_DIR,
+ return BibTeX.smartJoin(config.OUTPUT_DIR, config.CACHE_DIR,
section,
- "%s.%s"%(key,ftype))
+ "%s.%s"%(key, ftype))
-def downloadFile(key, ftype, section, url,timeout=None):
+def downloadFile(key, ftype, section, url, timeout=None):
if timeout is None:
timeout = config.DOWNLOAD_CONNECT_TIMEOUT
fname = getCacheFname(key, ftype, section)
@@ -48,7 +48,7 @@ def downloadFile(key, ftype, section, url,timeout=None):
fnameURL = fname+".url"
tryUnlink(fnameTmp)
- def sigalrmHandler(sig,_):
+ def sigalrmHandler(sig, _):
pass
signal.signal(signal.SIGALRM, sigalrmHandler)
signal.alarm(timeout)
@@ -56,11 +56,11 @@ def downloadFile(key, ftype, section, url,timeout=None):
try:
infile = urllib2.urlopen(url)
except httplib.InvalidURL, e:
- raise UIError("Invalid URL %s: %s"%(url,e))
+ raise UIError("Invalid URL %s: %s"%(url, e))
except IOError, e:
- raise UIError("Cannot connect to url %s: %s"%(url,e))
+ raise UIError("Cannot connect to url %s: %s"%(url, e))
except socket.error, e:
- if getattr(e,"errno",-1) == errno.EINTR:
+ if getattr(e, "errno", -1) == errno.EINTR:
raise UIError("Connection timed out to url %s"%url)
else:
raise UIError("Error connecting to %s: %s"%(url, e))
@@ -120,29 +120,29 @@ def downloadAll(bibtex, missingOnly=0):
if missingOnly:
cachedURL = getCachedURL(key, ftype, section)
if cachedURL == url:
- print >>sys.stderr,"Skipping",url
+ print >>sys.stderr, "Skipping", url
continue
elif cachedURL is not None:
- print >>sys.stderr,"URL for %s.%s has changed"%(key,ftype)
+ print >>sys.stderr, "URL for %s.%s has changed"%(key, ftype)
else:
- print >>sys.stderr,"I have no copy of %s.%s"%(key,ftype)
+ print >>sys.stderr, "I have no copy of %s.%s"%(key, ftype)
try:
downloadFile(key, ftype, section, url)
- print "Downloaded",url
+ print "Downloaded", url
except UIError, e:
print >>sys.stderr, str(e)
- errors.append((key,ftype,url,str(e)))
+ errors.append((key, ftype, url, str(e)))
except (IOError, socket.error, ssl.CertificateError), e:
- msg = "Error downloading %s: %s"%(url,str(e))
+ msg = "Error downloading %s: %s"%(url, str(e))
print >>sys.stderr, msg
- errors.append((key,ftype,url,msg))
+ errors.append((key, ftype, url, msg))
if urls.has_key("ps") and not urls.has_key("ps.gz"):
# Say, this is something we'd like to have gzipped locally.
psFname = getCacheFname(key, "ps", section)
psGzFname = getCacheFname(key, "ps.gz", section)
if os.path.exists(psFname) and not os.path.exists(psGzFname):
# This is something we haven't gzipped yet.
- print "Compressing a copy of",psFname
+ print "Compressing a copy of", psFname
outf = gzip.GzipFile(psGzFname, "wb")
inf = open(psFname, "rb")
while 1:
@@ -167,4 +167,4 @@ if __name__ == '__main__':
os.umask(config.CACHE_UMASK)
bib = BibTeX.parseFile(config.MASTER_BIB)
- downloadAll(bib,missingOnly=1)
+ downloadAll(bib, missingOnly=1)