diff mbox

[core,1/3] fetch/url: use logger methods instead of prints

Message ID 1390471322-24372-2-git-send-email-jabk@prevas.dk
State Superseded
Delegated to: Esben Haabendal
Headers show

Commit Message

Jacob Kjaergaard Jan. 23, 2014, 10:02 a.m. UTC
From: Jacob Kjaergaard <jacob.kjaergaard@prevas.dk>

---
 lib/oelite/fetch/url.py |   11 ++++++-----
 1 file changed, 6 insertions(+), 5 deletions(-)
diff mbox

Patch

diff --git a/lib/oelite/fetch/url.py b/lib/oelite/fetch/url.py
index b76eb11..a7bf3fc 100644
--- a/lib/oelite/fetch/url.py
+++ b/lib/oelite/fetch/url.py
@@ -4,6 +4,7 @@  import os
 import urlgrabber
 import urlgrabber.progress
 import hashlib
+from oebakery import die, err, warn, info, debug
 
 class UrlFetcher():
 
@@ -114,17 +115,17 @@  class UrlFetcher():
 
 class SimpleProgress(urlgrabber.progress.BaseMeter):
     def _do_end(self, amount_read, now=None):
-        print "grabbed %d bytes in %.2f seconds" %(amount_read,self.re.elapsed_time())
+        debug("grabbed %d bytes in %.2f seconds" %(amount_read,self.re.elapsed_time()))
 
 
 def grab(url, filename, timeout=120, retry=5, proxy=None, ftpmode=False):
-    print "Grabbing", url
+    debug("Grabbing %s"%(url))
     def grab_fail_callback(data):
         # Only print debug here when non fatal retries, debug in other cases
         # is already printed
         if (data.exception.errno in retrycodes) and (data.tries != data.retry):
-            print "grabbing retry %d/%d, exception %s"%(
-                data.tries, data.retry, data.exception)
+            debug("grabbing retry %d/%d, exception %s"%(
+                data.tries, data.retry, data.exception))
     try:
         retrycodes = urlgrabber.grabber.URLGrabberOptions().retrycodes
         if 12 not in retrycodes:
@@ -138,7 +139,7 @@  def grab(url, filename, timeout=120, retry=5, proxy=None, ftpmode=False):
         if not downloaded_file:
             return False
     except urlgrabber.grabber.URLGrabError as e:
-        print 'URLGrabError %i: %s' % (e.errno, e.strerror)
+        info('URLGrabError %i: %s' % (e.errno, e.strerror))
         if os.path.exists(filename):
             os.unlink(filename)
         return False