summaryrefslogtreecommitdiff
path: root/scripts/urlgrabber-ext-down
blob: 9ea0e709e03a8dca7e7a051df0ff00d455068efd (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
#! /usr/bin/python
#  A very simple external downloader
#  Copyright 2011-2012 Zdenek Pavlas

#   This library is free software; you can redistribute it and/or
#   modify it under the terms of the GNU Lesser General Public
#   License as published by the Free Software Foundation; either
#   version 2.1 of the License, or (at your option) any later version.
#
#   This library is distributed in the hope that it will be useful,
#   but WITHOUT ANY WARRANTY; without even the implied warranty of
#   MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
#   Lesser General Public License for more details.
#
#   You should have received a copy of the GNU Lesser General Public
#   License along with this library; if not, write to the
#      Free Software Foundation, Inc.,
#      59 Temple Place, Suite 330,
#      Boston, MA  02111-1307  USA

import time, os, errno, sys
from urlgrabber.grabber import \
    _readlines, URLGrabberOptions, _loads, \
    PyCurlFileObject, URLGrabError

def write(fmt, *arg):
    try: os.write(1, fmt % arg)
    except OSError, e:
        if e.args[0] != errno.EPIPE: raise
        sys.exit(1)

class ProxyProgress:
    def start(self, *d1, **d2):
        self.next_update = 0
    def update(self, _amount_read):
        t = time.time()
        if t < self.next_update: return
        self.next_update = t + 0.31
        write('%d %d\n', self._id, _amount_read)

def main():
    import signal
    signal.signal(signal.SIGINT, lambda n, f: sys.exit(1))
    cnt = 0
    while True:
        lines = _readlines(0)
        if not lines: break
        for line in lines:
            cnt += 1
            opts = URLGrabberOptions()
            opts._id = cnt
            for k in line.split(' '):
                k, v = k.split('=', 1)
                setattr(opts, k, _loads(v))
            if opts.progress_obj:
                opts.progress_obj = ProxyProgress()
                opts.progress_obj._id = cnt

            dlsz = dltm = 0
            try:
                fo = PyCurlFileObject(opts.url, opts.filename, opts)
                fo._do_grab()
                fo.fo.close()
                size = fo._amount_read
                if fo._tm_last:
                    dlsz = fo._tm_last[0] - fo._tm_first[0]
                    dltm = fo._tm_last[1] - fo._tm_first[1]
                ug_err = 'OK'
            except URLGrabError, e:
                size = 0
                ug_err = '%d %d %s' % (e.errno, getattr(e, 'code', 0), e.strerror)
            write('%d %d %d %.3f %s\n', opts._id, size, dlsz, dltm, ug_err)

if __name__ == '__main__':
    main()