Python gzip CRC校验失败

5
我有一个文件夹,里面有很多巨大的文本文件。每个文件都被压缩成gzip格式,大小达到几个Giga字节。我编写了一段代码来分割每个gzip文件的内容:使用gzip打开每个gzip文件,然后读取并写入指定块大小的行到一个新的gzip文件中。
以下是代码,保存在file_compression.py文件中:
import sys, os, file_manipulation as fm
import gzip


def splitGzipFile(fileName, dest=None, chunkPerSplit=100, linePerChunk=4, file_field_separator="_", zfill=3
                  , verbose=False, file_permission=None, execute=True):
    """
    Splits a gz file into chunk files.
    :param fileName:
    :param chunkPerSplit:
    :param linePerChunk:
    :return:
    """
    absPath = os.path.abspath(fileName)
    baseName = os.path.basename(absPath)
    dirName = os.path.dirname(absPath)
    destFolder = dirName if dest is None else dest


    ## Compute file fields
    rawBaseName, extensions = baseName.split(os.extsep, 1)

    if not str(extensions).startswith("."):
        extensions = "." + extensions

    file_fields = str(rawBaseName).split(file_field_separator)
    first_fields = file_fields[:-1] if file_fields.__len__() > 1 else file_fields
    first_file_part = file_field_separator.join(first_fields)
    last_file_field = file_fields[-1] if file_fields.__len__() > 1 else ""
    current_chunk = getCurrentChunkNumber(last_file_field)
    if current_chunk is None or current_chunk < 0:
        first_file_part = rawBaseName

    ## Initialize chunk variables
    linePerSplit = chunkPerSplit * linePerChunk
    # chunkCounter = 0

    chunkCounter = 0 if current_chunk is None else current_chunk-1

    for chunk in getFileChunks(fileName, linePerSplit):
        print "writing " + str(str(chunk).__len__()) + " ..."
        chunkCounter += 1
        oFile = fm.buildPath(destFolder) + first_file_part + file_field_separator + str(chunkCounter).zfill(zfill) + extensions

        if execute:
            writeGzipFile(oFile, chunk, file_permission)
        if verbose:
            print "Splitting: created file ", oFile



def getCurrentChunkNumber(chunk_field):
    """
    Tries to guess an integer from a string.
    :param chunk_field:
    :return: an integer, None if failure.
    """
    try:
        return int(chunk_field)
    except ValueError:
        return None


def getFileChunks(fileName, linePerSplit):
    with gzip.open(fileName, 'rb') as f:
        print "gzip open"
        lineCounter = 0
        currentChunk = ""
        for line in f:
            currentChunk += line
            lineCounter += 1
            if lineCounter >= linePerSplit:
                yield currentChunk
                currentChunk = ""
                lineCounter = 0
        if not currentChunk == '':
            yield currentChunk


def writeGzipFile(file_name, content, file_permission=None):
    import gzip
    with gzip.open(file_name, 'wb') as f:
        if not content == '':
            f.write(content)

    if file_permission is not None and type(file_permission) == int:
        os.chmod(file_name, file_permission)

这个任务是多进程的,在拆分之前为每个文件创建一个进程。每个文件只被打开和拆分一次,然后被删除,在此之前我通过将它们记录在列表中来确保了这一点:

from tools.file_utils import file_compression as fc, file_manipulation as fm
import multiprocessing
from multiprocessing import Process, Queue, Manager

manager = Manager()
split_seen = manager.list()

files = [...] # list is full of gzip files.
processList = []
sampleDir = "sample/dir/"

for file in files:
    fielPath = sampleDir + str(file)
    p = Process(target=processFile, args=(filePath, sampleDir, True))
    p.start()
    processList.append(p)

## Join the processes
for p in processList:
    p.join()

def processFile(filePath, destFolder, verbose=True):
    global split_seen
    if filePath in split_seen:
        print "Duplicate file processed: " + str(filePath)
        time.sleep(3)
    print "adding", filePath, split_seen.__len__()
    split_seen.append(filePath)
    fc.splitGzipFile(filePath, dest=destFolder, chunkPerSplit=4000000\
                                 , linePerChunk=4
                                 , verbose=True
                                 , file_permission=0770
                                 , zfill=3
                         )

    os.remove(filePath)

到目前为止,代码一直运行良好。但今天我遇到了gzip文件的CRC损坏问题:

Process Process-3:72:

Traceback (most recent call last):

  ...

  File "/.../tools/file_utils/file_compression.py", line 43, in splitGzipFile

    for chunk in getFileChunks(fileName, linePerSplit):

  File "/.../tools/file_utils/file_compression.py", line 70, in getFileChunks

    for line in f:

  File "/.../python2.7/lib/python2.7/gzip.py", line 450, in readline

    c = self.read(readsize)

  File "/.../python2.7/lib/python2.7/gzip.py", line 256, in read

    self._read(readsize)

  File "/.../python2.7/lib/python2.7/gzip.py", line 320, in _read

    self._read_eof()

  File "/.../python2.7/lib/python2.7/gzip.py", line 342, in _read_eof

    hex(self.crc)))

IOError: CRC check failed 0xddbb6045 != 0x34fd5580L

这个问题的起源是什么?我必须再次声明,到目前为止一直都是正常的,文件夹和文件始终具有相同的结构。这种情况的不同之处可能在于我的脚本正在处理比平时多两倍的gzip文件。
可能是同时访问了相同的文件吗?但我严重怀疑这一点,我通过在我的split_seen列表中注册每个访问的文件来确保不是这种情况。
我会接受任何提示,因为我已经没有线索可以查找了。
编辑1
也许有些打开的文件被其他人或另一个程序访问了?我无法要求并依赖证言。所以,如果我要放置一个multiprocess.Lock,它会防止任何其他线程、进程、程序、用户等修改文件吗?还是只限于Python?我找不到任何文档。

你是否已经通过例如 gunzip 来检查 fileName 文件是否损坏? - xiaohan2012
2个回答

1

我在运行了数月的代码中遇到了完全相同的错误。结果发现那个特定文件的源文件已经损坏了。我回到了一个旧文件,它正常工作,我使用了一个新文件,它也正常工作。


1

我遇到了同样的问题。我只是删除了旧文件并重新运行了代码。

rm -rf /tmp/imagenet/

HTH


网页内容由stack overflow 提供, 点击上面的
可以查看英文原文,
原文链接