Logo Questions Linux Laravel Mysql Ubuntu Git Menu
 

Python: how to stream/pipe data out of gzip compression?

I need to do something like this, but in python:

dd if=/dev/sdb | gzip -c | curl ftp upload

I can't use the entire command with Popen because:

  1. I need non-blocking operation
  2. I need progress information (tried looping through proc.stderr to no avail)

The other big thing is I can't create a compressed gzip file in memory or on disk prior to uploading.

So this is about what I'm looking to figure out how to do, with gzip_stream_of_strings(input) being the unknown:

import os, pycurl
filename = '/path/to/super/large/file.img'
filesize = os.path.getsize(filename)

def progress(dl_left, dl_completed, ul_left, ul_completed):
    return (ul_completed/filesize)*100

def main():
    c = pycurl.Curl()
    c.setopt(c.URL, 'ftp://IP/save_as.img.gz')
    c.setopt(pycurl.NOPROGRESS, 0)
    c.setopt(pycurl.PROGRESSFUNCTION, progress)
    c.setopt(pycurl.UPLOAD, 1)
    c.setopt(pycurl.INFILESIZE, filesize)
    c.setopt(pycurl.USERPWD, 'user:passwd')
    with open(filename) as input:
        c.setopt(pycurl.READFUNCTION, gzip_stream_of_stings(input))
        c.perform()
        c.close()

Any help is greatly appreciated!

EDIT: Here's the solution:

from gzip import GzipFile
from StringIO import StringIO

CHUNCK_SIZE = 1024

class GZipPipe(StringIO):
    """This class implements a compression pipe suitable for asynchronous 
    process.
    Credit to cdvddt @ http://snippets.dzone.com/posts/show/5644

    @param source: this is the input file to compress
    @param name: this is stored as the name in the gzip header
    @function read: call this to read(size) chunks from the gzip stream        
    """
    def __init__(self, source = None, name = "data"):
        StringIO.__init__(self)

        self.source = source
        self.source_eof = False
        self.buffer = ""
        self.zipfile = GzipFile(name, 'wb', 9, self)

    def write(self, data):
        self.buffer += data

    def read(self, size = -1):
        while ((len(self.buffer) < size) or (size == -1)) and not self.source_eof:
            if self.source == None: 
                break
            chunk = self.source.read(CHUNCK_SIZE)
            self.zipfile.write(chunk)
            if (len(chunk) < CHUNCK_SIZE) :
                self.source_eof = True
                self.zipfile.flush()
                self.zipfile.close()
                break

        if size == 0:
            result = ""
        if size >= 1:
            result = self.buffer[0:size]
            self.buffer = self.buffer[size:]
        else:
            result = self.buffer
            self.buffer = ""

        return result

Used like so:

with open(filename) as input:
    c.setopt(pycurl.READFUNCTION, GZipPipe(input).read)
like image 860
Brian Avatar asked Jan 11 '12 16:01

Brian


1 Answers

The built in zlib library allows for using any filetype object, which includes text streams.

import os, pycurl, zlib
from cStringIO import StringIO
filename = '/path/to/super/large/file.img'
filesize = os.path.getsize(filename)

def progress(dl_left, dl_completed, ul_left, ul_completed):
    return (ul_completed/filesize)*100

def main():
    c = pycurl.Curl()
    c.setopt(c.URL, 'ftp://IP/save_as.img.gz')
    c.setopt(pycurl.NOPROGRESS, 0)
    c.setopt(pycurl.PROGRESSFUNCTION, progress)
    c.setopt(pycurl.UPLOAD, 1)
    c.setopt(pycurl.INFILESIZE, filesize)
    c.setopt(pycurl.USERPWD, 'user:passwd')
    with open(filename) as input:
        s = StringIO()
        c.setopt(pycurl.READFUNCTION, s.write(zlib.compress(input.readlines())))
        c.perform()
        c.close()

I have not tested this. See this SO question for additional info.

like image 103
Spencer Rathbun Avatar answered Oct 19 '22 23:10

Spencer Rathbun