After fixing uninitialized variable or ignoring linter, the following
test failures occur:


=================================== FAILURES ===================================
_________________________ TestApportUnpack.test_unpack _________________________

self = <tests.integration.test_apport_unpack.TestApportUnpack
testMethod=test_unpack>

    def test_unpack(self):
        """apport-unpack for all possible data types"""
        process = self._call_apport_unpack([self.report_file, self.unpack_dir])
        self.assertEqual(process.returncode, 0)
        self.assertEqual(process.stderr, "")
        self.assertEqual(process.stdout, "")
    
        self.assertEqual(self._get_unpack("utf8"), self.utf8_str)
        self.assertEqual(self._get_unpack("unicode"), self.utf8_str)
        self.assertEqual(self._get_unpack("binary"), self.bindata)
>       self.assertEqual(self._get_unpack("compressed"), b"FooFoo!")
E       AssertionError: b'' != b'FooFoo!'

tests/integration/test_apport_unpack.py:65: AssertionError
___________________________ T.test_compressed_values ___________________________

self = <tests.integration.test_problem_report.T
testMethod=test_compressed_values>

    def test_compressed_values(self):
        """Handle of CompressedValue values."""
        large_val = b"A" * 5000000
    
        pr = problem_report.ProblemReport()
        pr["Foo"] = problem_report.CompressedValue(b"FooFoo!")
        pr["Bin"] = problem_report.CompressedValue()
        pr["Bin"].set_value(bin_data)
        pr["Large"] = problem_report.CompressedValue(large_val)
    
        self.assertTrue(isinstance(pr["Foo"], problem_report.CompressedValue))
        self.assertTrue(isinstance(pr["Bin"], problem_report.CompressedValue))
>       self.assertEqual(pr["Foo"].get_value(), b"FooFoo!")

tests/integration/test_problem_report.py:42: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
problem_report.py:79: in get_value
    return gzip.GzipFile(fileobj=io.BytesIO(self.gzipvalue)).read()
/usr/lib/python3.12/gzip.py:324: in read
    return self._buffer.read(size)
/usr/lib/python3.12/_compression.py:118: in readall
    while data := self.read(sys.maxsize):
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = <gzip._GzipReader object at 0x7f287a41e260>, size =
9223372036854775807

    def read(self, size=-1):
        if size < 0:
            return self.readall()
        # size=0 is special because decompress(max_length=0) is not supported
        if not size:
            return b""
    
        # For certain input data, a single
        # call to decompress() may not return
        # any data. In this case, retry until we get some data or reach EOF.
        while True:
            if self._decompressor.eof:
                # Ending case: we've come to the end of a member in the file,
                # so finish up this member, and read a new gzip header.
                # Check the CRC and file size, and set the flag so we read
                # a new member
                self._read_eof()
                self._new_member = True
                self._decompressor = self._decomp_factory(
                    **self._decomp_args)
    
            if self._new_member:
                # If the _new_member flag is set, we have to
                # jump to the next member, if there is one.
                self._init_read()
                if not self._read_gzip_header():
                    self._size = self._pos
                    return b""
                self._new_member = False
    
            # Read a chunk of data from the file
            if self._decompressor.needs_input:
                buf = self._fp.read(READ_BUFFER_SIZE)
                uncompress = self._decompressor.decompress(buf, size)
            else:
                uncompress = self._decompressor.decompress(b"", size)
    
            if self._decompressor.unused_data != b"":
                # Prepend the already read bytes to the fileobj so they can
                # be seen by _read_eof() and _read_gzip_header()
                self._fp.prepend(self._decompressor.unused_data)
    
            if uncompress != b"":
                break
            if buf == b"":
>               raise EOFError("Compressed file ended before the "
                               "end-of-stream marker was reached")
E               EOFError: Compressed file ended before the end-of-stream marker 
was reached

/usr/lib/python3.12/gzip.py:547: EOFError
_____________________________ T.test_extract_keys ______________________________

self = <tests.integration.test_problem_report.T
testMethod=test_extract_keys>

    def test_extract_keys(self):
        """extract_keys() with various binary elements."""
        # create a test report with binary elements
        large_val = b"A" * 5000000
    
        pr = problem_report.ProblemReport()
        pr["Txt"] = "some text"
        pr["MoreTxt"] = "some more text"
        pr["Foo"] = problem_report.CompressedValue(b"FooFoo!")
        pr["Uncompressed"] = bin_data
        pr["Bin"] = problem_report.CompressedValue()
        pr["Bin"].set_value(bin_data)
        pr["Large"] = problem_report.CompressedValue(large_val)
        pr["Multiline"] = 
problem_report.CompressedValue(b"\1\1\1\n\2\2\n\3\3\3")
    
        report = io.BytesIO()
        pr.write(report)
        report.seek(0)
    
        self.assertRaises(
            OSError,
            pr.extract_keys,
            report,
            "Bin",
            os.path.join(self.workdir, "nonexistent"),
        )
        # Test exception handling: Non-binary and nonexistent key
        tests = [
            (ValueError, "Txt"),
            (ValueError, ["Foo", "Txt"]),
            (KeyError, "Bar"),
            (KeyError, ["Foo", "Bar"]),
        ]
        for exc, keys_arg in tests:
            report.seek(0)
            self.assertRaises(exc, pr.extract_keys, report, keys_arg, 
self.workdir)
    
        # Check valid single elements
        tests = {
            "Foo": b"FooFoo!",
            "Uncompressed": bin_data,
            "Bin": bin_data,
            "Large": large_val,
            "Multiline": b"\1\1\1\n\2\2\n\3\3\3",
        }
        for key, expected in tests.items():
            report.seek(0)
            pr.extract_keys(report, key, self.workdir)
            with open(os.path.join(self.workdir, key), "rb") as f:
>               self.assertEqual(f.read(), expected)
E               AssertionError: b'' != b'FooFoo!'

tests/integration/test_problem_report.py:167: AssertionError
___________________________ T.test_write_mime_binary ___________________________

self = <tests.integration.test_problem_report.T
testMethod=test_write_mime_binary>

    def test_write_mime_binary(self):
        """write_mime() for binary values and file references."""
        with tempfile.NamedTemporaryFile() as temp:
            with tempfile.NamedTemporaryFile() as tempgz:
                temp.write(bin_data)
                temp.flush()
    
                with gzip.GzipFile("File1", "w", fileobj=tempgz) as gz:
                    gz.write(bin_data)
                tempgz.flush()
    
                pr = problem_report.ProblemReport(date="now!")
                pr["Context"] = "Test suite"
                pr["File1"] = (temp.name,)
                pr["File1.gz"] = (tempgz.name,)
                pr["Value1"] = bin_data
                with open(tempgz.name, "rb") as f:
                    pr["Value1.gz"] = f.read()
                pr["ZValue"] = problem_report.CompressedValue(bin_data)
                out = io.BytesIO()
                pr.write_mime(out)
                out.seek(0)
    
        msg = email.message_from_binary_file(out)
        parts = list(msg.walk())
        self.assertEqual(len(parts), 7)
    
        # first part is the multipart container
        self.assertTrue(parts[0].is_multipart())
    
        # second part should be an inline text/plain attachments with all short
        # fields
        self.assertTrue(not parts[1].is_multipart())
        self.assertEqual(parts[1].get_content_type(), "text/plain")
        self.assertEqual(parts[1].get_content_charset(), "utf-8")
        self.assertEqual(parts[1].get_filename(), None)
        self.assertEqual(
            parts[1].get_payload(decode=True),
            b"ProblemType: Crash\nContext: Test suite\nDate: now!\n",
        )
    
        # third part should be the File1: file contents as gzip'ed attachment
        self.assertTrue(not parts[2].is_multipart())
        self.assertEqual(parts[2].get_content_type(), "application/x-gzip")
        self.assertEqual(parts[2].get_filename(), "File1.gz")
        self.assertEqual(self.decode_gzipped_message(parts[2]), bin_data)
    
        # fourth part should be the File1.gz: file contents as gzip'ed
        # attachment; write_mime() should not compress it again
        self.assertTrue(not parts[3].is_multipart())
        self.assertEqual(parts[3].get_content_type(), "application/x-gzip")
        self.assertEqual(parts[3].get_filename(), "File1.gz")
        self.assertEqual(self.decode_gzipped_message(parts[3]), bin_data)
    
        # fifth part should be the Value1: value as gzip'ed attachment
        self.assertTrue(not parts[4].is_multipart())
        self.assertEqual(parts[4].get_content_type(), "application/x-gzip")
        self.assertEqual(parts[4].get_filename(), "Value1.gz")
>       self.assertEqual(self.decode_gzipped_message(parts[4]), bin_data)

tests/integration/test_problem_report.py:480: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
tests/integration/test_problem_report.py:540: in decode_gzipped_message
    return gzip.GzipFile(mode="rb", fileobj=payload).read()
/usr/lib/python3.12/gzip.py:324: in read
    return self._buffer.read(size)
/usr/lib/python3.12/_compression.py:118: in readall
    while data := self.read(sys.maxsize):
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = <gzip._GzipReader object at 0x7f287a899f90>, size =
9223372036854775807

    def read(self, size=-1):
        if size < 0:
            return self.readall()
        # size=0 is special because decompress(max_length=0) is not supported
        if not size:
            return b""
    
        # For certain input data, a single
        # call to decompress() may not return
        # any data. In this case, retry until we get some data or reach EOF.
        while True:
            if self._decompressor.eof:
                # Ending case: we've come to the end of a member in the file,
                # so finish up this member, and read a new gzip header.
                # Check the CRC and file size, and set the flag so we read
                # a new member
                self._read_eof()
                self._new_member = True
                self._decompressor = self._decomp_factory(
                    **self._decomp_args)
    
            if self._new_member:
                # If the _new_member flag is set, we have to
                # jump to the next member, if there is one.
                self._init_read()
                if not self._read_gzip_header():
                    self._size = self._pos
                    return b""
                self._new_member = False
    
            # Read a chunk of data from the file
            if self._decompressor.needs_input:
                buf = self._fp.read(READ_BUFFER_SIZE)
                uncompress = self._decompressor.decompress(buf, size)
            else:
                uncompress = self._decompressor.decompress(b"", size)
    
            if self._decompressor.unused_data != b"":
                # Prepend the already read bytes to the fileobj so they can
                # be seen by _read_eof() and _read_gzip_header()
                self._fp.prepend(self._decompressor.unused_data)
    
            if uncompress != b"":
                break
            if buf == b"":
>               raise EOFError("Compressed file ended before the "
                               "end-of-stream marker was reached")
E               EOFError: Compressed file ended before the end-of-stream marker 
was reached

/usr/lib/python3.12/gzip.py:547: EOFError
___________________________ T.test_write_mime_filter ___________________________

self = <tests.integration.test_problem_report.T
testMethod=test_write_mime_filter>

    def test_write_mime_filter(self):
        """write_mime() with key filters."""
        pr = problem_report.ProblemReport(date="now!")
        pr["GoodText"] = "Hi"
        pr["BadText"] = "YouDontSeeMe"
        pr["GoodBin"] = bin_data
        pr["BadBin"] = "Y" + "\x05" * 10 + "-"
        out = io.BytesIO()
        pr.write_mime(out, skip_keys=["BadText", "BadBin"])
        out.seek(0)
    
        msg = email.message_from_binary_file(out)
        parts = list(msg.walk())
        self.assertEqual(len(parts), 3)
    
        # first part is the multipart container
        self.assertTrue(parts[0].is_multipart())
    
        # second part should be an inline text/plain attachments with all short
        # fields
        self.assertTrue(not parts[1].is_multipart())
        self.assertEqual(parts[1].get_content_type(), "text/plain")
        self.assertEqual(parts[1].get_content_charset(), "utf-8")
        self.assertEqual(parts[1].get_filename(), None)
        self.assertEqual(
            parts[1].get_payload(decode=True),
            textwrap.dedent(
                """\
                ProblemType: Crash
                Date: now!
                GoodText: Hi
                """
            ).encode(),
        )
    
        # third part should be the GoodBin: field as attachment
        self.assertTrue(not parts[2].is_multipart())
>       self.assertEqual(self.decode_gzipped_message(parts[2]), bin_data)

tests/integration/test_problem_report.py:533: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
tests/integration/test_problem_report.py:540: in decode_gzipped_message
    return gzip.GzipFile(mode="rb", fileobj=payload).read()
/usr/lib/python3.12/gzip.py:324: in read
    return self._buffer.read(size)
/usr/lib/python3.12/_compression.py:118: in readall
    while data := self.read(sys.maxsize):
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = <gzip._GzipReader object at 0x7f287a02d630>, size =
9223372036854775807

    def read(self, size=-1):
        if size < 0:
            return self.readall()
        # size=0 is special because decompress(max_length=0) is not supported
        if not size:
            return b""
    
        # For certain input data, a single
        # call to decompress() may not return
        # any data. In this case, retry until we get some data or reach EOF.
        while True:
            if self._decompressor.eof:
                # Ending case: we've come to the end of a member in the file,
                # so finish up this member, and read a new gzip header.
                # Check the CRC and file size, and set the flag so we read
                # a new member
                self._read_eof()
                self._new_member = True
                self._decompressor = self._decomp_factory(
                    **self._decomp_args)
    
            if self._new_member:
                # If the _new_member flag is set, we have to
                # jump to the next member, if there is one.
                self._init_read()
                if not self._read_gzip_header():
                    self._size = self._pos
                    return b""
                self._new_member = False
    
            # Read a chunk of data from the file
            if self._decompressor.needs_input:
                buf = self._fp.read(READ_BUFFER_SIZE)
                uncompress = self._decompressor.decompress(buf, size)
            else:
                uncompress = self._decompressor.decompress(b"", size)
    
            if self._decompressor.unused_data != b"":
                # Prepend the already read bytes to the fileobj so they can
                # be seen by _read_eof() and _read_gzip_header()
                self._fp.prepend(self._decompressor.unused_data)
    
            if uncompress != b"":
                break
            if buf == b"":
>               raise EOFError("Compressed file ended before the "
                               "end-of-stream marker was reached")
E               EOFError: Compressed file ended before the end-of-stream marker 
was reached

/usr/lib/python3.12/gzip.py:547: EOFError
_____________________ T.test_add_gdb_info_short_core_file ______________________

self = <tests.integration.test_report.T
testMethod=test_add_gdb_info_short_core_file>

    def test_add_gdb_info_short_core_file(self):
        """add_gdb_info() with damaged core dump in gzip file"""
        pr = self._generate_sigsegv_report()
        del pr["Stacktrace"]
        del pr["StacktraceTop"]
        del pr["ThreadStacktrace"]
        del pr["Disassembly"]
    
        core = pr["CoreDump"][0]
        os.truncate(core, 10000)
        with open(core, "rb") as f:
            pr["CoreDump"] = problem_report.CompressedValue(f.read())
    
>       self.assertRaises(OSError, pr.add_gdb_info)

tests/integration/test_report.py:791: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
apport/report.py:903: in add_gdb_info
    gdb_cmd, environ = self.gdb_command(rootdir, gdb_sandbox)
apport/report.py:1910: in gdb_command
    self["CoreDump"].write(f)
problem_report.py:91: in write
    block = gz.read(1048576)
/usr/lib/python3.12/gzip.py:324: in read
    return self._buffer.read(size)
/usr/lib/python3.12/_compression.py:68: in readinto
    data = self.read(len(byte_view))
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

    def read(self, size=-1):
        if size < 0:
            return self.readall()
        # size=0 is special because decompress(max_length=0) is not supported
        if not size:
            return b""
    
        # For certain input data, a single
        # call to decompress() may not return
        # any data. In this case, retry until we get some data or reach EOF.
        while True:
            if self._decompressor.eof:
                # Ending case: we've come to the end of a member in the file,
                # so finish up this member, and read a new gzip header.
                # Check the CRC and file size, and set the flag so we read
                # a new member
                self._read_eof()
                self._new_member = True
                self._decompressor = self._decomp_factory(
                    **self._decomp_args)
    
            if self._new_member:
                # If the _new_member flag is set, we have to
                # jump to the next member, if there is one.
                self._init_read()
                if not self._read_gzip_header():
                    self._size = self._pos
                    return b""
                self._new_member = False
    
            # Read a chunk of data from the file
            if self._decompressor.needs_input:
                buf = self._fp.read(READ_BUFFER_SIZE)
                uncompress = self._decompressor.decompress(buf, size)
            else:
                uncompress = self._decompressor.decompress(b"", size)
    
            if self._decompressor.unused_data != b"":
                # Prepend the already read bytes to the fileobj so they can
                # be seen by _read_eof() and _read_gzip_header()
                self._fp.prepend(self._decompressor.unused_data)
    
            if uncompress != b"":
                break
            if buf == b"":
>               raise EOFError("Compressed file ended before the "
                               "end-of-stream marker was reached")
E               EOFError: Compressed file ended before the end-of-stream marker 
was reached

/usr/lib/python3.12/gzip.py:547: EOFError
----------------------------- Captured stderr call -----------------------------
warning: Memory read failed for corefile section, 4096 bytes at 
0xffffffffff600000.
WARNING: Please install gdb-multiarch for processing reports from foreign 
architectures. Results with "gdb" will be very poor.
WARNING: Please install gdb-multiarch for processing reports from foreign 
architectures. Results with "gdb" will be very poor.
__________________________ T.test_search_bug_patterns __________________________

self = <tests.integration.test_report.T
testMethod=test_search_bug_patterns>

    def test_search_bug_patterns(self):
        # TODO: Split into separate test cases
        # pylint: disable=too-many-statements
        """search_bug_patterns()."""
        # create some test patterns
        patterns = textwrap.dedent(
            """\
            <?xml version="1.0"?>
            <patterns>
                <pattern url="http://bugtracker.net/bugs/1";>
                    <re key="Package">^bash </re>
                    <re key="Foo">ba.*r</re>
                </pattern>
                <pattern url="http://bugtracker.net/bugs/2";>
                    <re key="Package">^bash 1-2$</re>
                    <re key="Foo">write_(hello|goodbye)</re>
                </pattern>
                <pattern url="http://bugtracker.net/bugs/3";>
                    <re key="Package">^coreutils </re>
                    <re key="Bar">^1$</re>
                </pattern>
                <pattern url="http://bugtracker.net/bugs/4";>
                    <re key="Package">^coreutils </re>
                    <re></re>
                    <re key="Bar">*</re> <!-- invalid RE -->
                    <re key="broken">+[1^</re>
                </pattern>
                <pattern url="http://bugtracker.net/bugs/5";>
                    <re key="SourcePackage">^bazaar$</re>
                    <re key="LogFile">AssertionError</re>
                </pattern>
                <pattern url="http://bugtracker.net/bugs/6";>
                    <re key="Package">^update-notifier</re>
                    <re key="LogFile">AssertionError ‽</re>
                </pattern>
            </patterns>"""
        ).encode()
    
        # invalid XML
        invalid = b'<?xml version="1.0"?>\n</patterns>'
    
        # create some reports
        r_bash = apport.report.Report()
        r_bash["Package"] = "bash 1-2"
        r_bash["Foo"] = "bazaar"
    
        r_bazaar = apport.report.Report()
        r_bazaar["Package"] = "bazaar 2-1"
        r_bazaar["SourcePackage"] = "bazaar"
        r_bazaar["LogFile"] = "AssertionError"
    
        r_coreutils = apport.report.Report()
        r_coreutils["Package"] = "coreutils 1"
        r_coreutils["Bar"] = "1"
    
        r_invalid = apport.report.Report()
        r_invalid["Package"] = "invalid 1"
    
        r_unicode = apport.report.Report()
        r_unicode["Package"] = "update-notifier"
        r_unicode["LogFile"] = "AssertionError ‽"
    
        with tempfile.NamedTemporaryFile(prefix="apport-") as bug_pattern:
            bug_pattern.write(patterns)
            bug_pattern.flush()
            pattern_url = f"file://{bug_pattern.name}"
    
            # positive match cases
            self.assertEqual(
                r_bash.search_bug_patterns(pattern_url), 
"http://bugtracker.net/bugs/1";
            )
            r_bash["Foo"] = "write_goodbye"
            self.assertEqual(
                r_bash.search_bug_patterns(pattern_url), 
"http://bugtracker.net/bugs/2";
            )
            self.assertEqual(
                r_coreutils.search_bug_patterns(pattern_url),
                "http://bugtracker.net/bugs/3";,
            )
            self.assertEqual(
                r_bazaar.search_bug_patterns(pattern_url),
                "http://bugtracker.net/bugs/5";,
            )
            self.assertEqual(
                r_unicode.search_bug_patterns(pattern_url),
                "http://bugtracker.net/bugs/6";,
            )
    
            # also works for CompressedValues
            r_bash_compressed = r_bash.copy()
            r_bash_compressed["Foo"] = problem_report.CompressedValue(b"bazaar")
            self.assertEqual(
>               r_bash_compressed.search_bug_patterns(pattern_url),
                "http://bugtracker.net/bugs/1";,
            )

tests/integration/test_report.py:1150: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
apport/report.py:1199: in search_bug_patterns
    url = _check_bug_patterns(self, patterns)
apport/report.py:238: in _check_bug_patterns
    url = _check_bug_pattern(report, pattern)
apport/report.py:217: in _check_bug_pattern
    v = v.get_value()
problem_report.py:79: in get_value
    return gzip.GzipFile(fileobj=io.BytesIO(self.gzipvalue)).read()
/usr/lib/python3.12/gzip.py:324: in read
    return self._buffer.read(size)
/usr/lib/python3.12/_compression.py:118: in readall
    while data := self.read(sys.maxsize):
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = <gzip._GzipReader object at 0x7f2879fd6a40>, size =
9223372036854775807

    def read(self, size=-1):
        if size < 0:
            return self.readall()
        # size=0 is special because decompress(max_length=0) is not supported
        if not size:
            return b""
    
        # For certain input data, a single
        # call to decompress() may not return
        # any data. In this case, retry until we get some data or reach EOF.
        while True:
            if self._decompressor.eof:
                # Ending case: we've come to the end of a member in the file,
                # so finish up this member, and read a new gzip header.
                # Check the CRC and file size, and set the flag so we read
                # a new member
                self._read_eof()
                self._new_member = True
                self._decompressor = self._decomp_factory(
                    **self._decomp_args)
    
            if self._new_member:
                # If the _new_member flag is set, we have to
                # jump to the next member, if there is one.
                self._init_read()
                if not self._read_gzip_header():
                    self._size = self._pos
                    return b""
                self._new_member = False
    
            # Read a chunk of data from the file
            if self._decompressor.needs_input:
                buf = self._fp.read(READ_BUFFER_SIZE)
                uncompress = self._decompressor.decompress(buf, size)
            else:
                uncompress = self._decompressor.decompress(b"", size)
    
            if self._decompressor.unused_data != b"":
                # Prepend the already read bytes to the fileobj so they can
                # be seen by _read_eof() and _read_gzip_header()
                self._fp.prepend(self._decompressor.unused_data)
    
            if uncompress != b"":
                break
            if buf == b"":
>               raise EOFError("Compressed file ended before the "
                               "end-of-stream marker was reached")
E               EOFError: Compressed file ended before the end-of-stream marker 
was reached

/usr/lib/python3.12/gzip.py:547: EOFError
______________________________ T.test_load_report ______________________________

self = <tests.integration.test_ui.T testMethod=test_load_report>

    def test_load_report(self):
        """load_report()"""
        # valid report
        self.ui.load_report(self.report_file.name)
        self.assertEqual(set(self.ui.report.keys()), set(self.report.keys()))
        self.assertEqual(self.ui.report["Package"], self.report["Package"])
        self.assertEqual(
>           self.ui.report["CoreDump"].get_value(), 
> self.report["CoreDump"].get_value()
        )

tests/integration/test_ui.py:358: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
problem_report.py:79: in get_value
    return gzip.GzipFile(fileobj=io.BytesIO(self.gzipvalue)).read()
/usr/lib/python3.12/gzip.py:324: in read
    return self._buffer.read(size)
/usr/lib/python3.12/_compression.py:118: in readall
    while data := self.read(sys.maxsize):
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = <gzip._GzipReader object at 0x7f2879fd2aa0>, size =
9223372036854775807

    def read(self, size=-1):
        if size < 0:
            return self.readall()
        # size=0 is special because decompress(max_length=0) is not supported
        if not size:
            return b""
    
        # For certain input data, a single
        # call to decompress() may not return
        # any data. In this case, retry until we get some data or reach EOF.
        while True:
            if self._decompressor.eof:
                # Ending case: we've come to the end of a member in the file,
                # so finish up this member, and read a new gzip header.
                # Check the CRC and file size, and set the flag so we read
                # a new member
                self._read_eof()
                self._new_member = True
                self._decompressor = self._decomp_factory(
                    **self._decomp_args)
    
            if self._new_member:
                # If the _new_member flag is set, we have to
                # jump to the next member, if there is one.
                self._init_read()
                if not self._read_gzip_header():
                    self._size = self._pos
                    return b""
                self._new_member = False
    
            # Read a chunk of data from the file
            if self._decompressor.needs_input:
                buf = self._fp.read(READ_BUFFER_SIZE)
                uncompress = self._decompressor.decompress(buf, size)
            else:
                uncompress = self._decompressor.decompress(b"", size)
    
            if self._decompressor.unused_data != b"":
                # Prepend the already read bytes to the fileobj so they can
                # be seen by _read_eof() and _read_gzip_header()
                self._fp.prepend(self._decompressor.unused_data)
    
            if uncompress != b"":
                break
            if buf == b"":
>               raise EOFError("Compressed file ended before the "
                               "end-of-stream marker was reached")
E               EOFError: Compressed file ended before the end-of-stream marker 
was reached

/usr/lib/python3.12/gzip.py:547: EOFError
=========================== short test summary info ============================
SKIPPED [1] tests/unit/test_problem_report.py:75: Missing German locale support
SKIPPED [1] tests/integration/test_apport_checkreports.py:71: this test needs 
to be run as root
SKIPPED [1] tests/integration/test_hooks.py:312: this test needs to be run as 
root
SKIPPED [1] tests/integration/test_hookutils.py:378: no logind session
SKIPPED [1] tests/integration/test_hookutils.py:70: kernel module 'isofs' not 
available
SKIPPED [1] tests/integration/test_packaging_rpm.py:20: rpm not installed
SKIPPED [1] tests/integration/test_packaging_rpm.py:25: rpm not installed
SKIPPED [1] tests/integration/test_packaging_rpm.py:31: rpm not installed
SKIPPED [1] tests/integration/test_packaging_rpm.py:38: rpm not installed
SKIPPED [1] tests/integration/test_packaging_rpm.py:45: rpm not installed
SKIPPED [1] tests/integration/test_report.py:550: twistd not installed
SKIPPED [1] tests/integration/test_signal_crashes.py:680: this test needs to be 
run as root
SKIPPED [1] tests/integration/test_signal_crashes.py:740: this test needs to be 
run as root
SKIPPED [1] tests/integration/test_signal_crashes.py:658: this test needs to be 
run as root
SKIPPED [1] tests/integration/test_signal_crashes.py:691: this test needs to be 
run as root
SKIPPED [1] tests/integration/test_signal_crashes.py:383: this test needs to be 
run as root
SKIPPED [1] tests/integration/test_signal_crashes.py:218: fix test as multiple 
instances can be started within 30s
SKIPPED [1] tests/integration/test_ui.py:1650: not running in logind session
FAILED tests/integration/test_apport_unpack.py::TestApportUnpack::test_unpack
FAILED tests/integration/test_problem_report.py::T::test_compressed_values - ...
FAILED tests/integration/test_problem_report.py::T::test_extract_keys - Asser...
FAILED tests/integration/test_problem_report.py::T::test_write_mime_binary - ...
FAILED tests/integration/test_problem_report.py::T::test_write_mime_filter - ...
FAILED tests/integration/test_report.py::T::test_add_gdb_info_short_core_file
FAILED tests/integration/test_report.py::T::test_search_bug_patterns - EOFErr...
FAILED tests/integration/test_ui.py::T::test_load_report - EOFError: Compress...
============ 8 failed, 421 passed, 18 skipped in 110.28s (0:01:50) =============

https://launchpadlibrarian.net/712420168/buildlog_ubuntu-noble-
amd64.apport_2.27.0-0ubuntu7~ppa2_BUILDING.txt.gz

** Also affects: python3-defaults (Ubuntu)
   Importance: Undecided
       Status: New

** Tags added: update-excuse

-- 
You received this bug notification because you are a member of Ubuntu
Touch seeded packages, which is subscribed to python3-defaults in
Ubuntu.
https://bugs.launchpad.net/bugs/2051512

Title:
  apport ftbfs with Python 3.12 as the default

Status in apport package in Ubuntu:
  Confirmed
Status in python3-defaults package in Ubuntu:
  New

Bug description:
     debian/rules override_dh_auto_test
  make[1]: Entering directory '/<<PKGBUILDDIR>>'
  tests/run-linters --errors-only
  Skipping mypy tests, mypy is not installed
  Running pylint...
  ************* Module apport-retrace
  bin/apport-retrace:577:44: E0601: Using variable 'crashid' before assignment 
(used-before-assignment)
  make[1]: *** [debian/rules:23: override_dh_auto_test] Error 2
  make[1]: Leaving directory '/<<PKGBUILDDIR>>'
  make: *** [debian/rules:4: binary] Error 2

To manage notifications about this bug go to:
https://bugs.launchpad.net/ubuntu/+source/apport/+bug/2051512/+subscriptions


-- 
Mailing list: https://launchpad.net/~touch-packages
Post to     : touch-packages@lists.launchpad.net
Unsubscribe : https://launchpad.net/~touch-packages
More help   : https://help.launchpad.net/ListHelp

Reply via email to