commit: f329e886b7fc7dbfe2f8ceef0138c84646e6a67b
Author: Brian Harring <ferringb <AT> gmail <DOT> com>
AuthorDate: Sun Nov 30 21:02:14 2025 +0000
Commit: Brian Harring <ferringb <AT> gmail <DOT> com>
CommitDate: Sun Nov 30 21:25:18 2025 +0000
URL:
https://gitweb.gentoo.org/proj/pkgcore/snakeoil.git/commit/?id=f329e886
chore: delete .pickling
Signed-off-by: Brian Harring <ferringb <AT> gmail.com>
src/snakeoil/pickling.py | 42 ------------------------------------------
src/snakeoil/test/mixins.py | 1 -
2 files changed, 43 deletions(-)
diff --git a/src/snakeoil/pickling.py b/src/snakeoil/pickling.py
deleted file mode 100644
index 9707ecd..0000000
--- a/src/snakeoil/pickling.py
+++ /dev/null
@@ -1,42 +0,0 @@
-"""
-pickling convenience module
-"""
-
-__all__ = (
- "iter_stream",
- "dump_stream",
-)
-
-from pickle import dump, load
-
-
-def iter_stream(stream):
- """
- given a filehandle to consume from, yield pickled objects from it.
-
- This is useful in conjunction with :py:func:`dump_stream` to serialize
- items as you go, rather than in one single shot.
-
- :param stream: file like object to continually try consuming pickled
- data from until EOF is reached.
- """
- try:
- while True:
- yield load(stream)
- except EOFError:
- pass
-
-
-def dump_stream(handle, stream):
- """
- given a filehandle to write to, write pickled objects to it.
-
- This is useful in conjunction with :py:func:`iter_stream` to deserialize
- the results of this function- specifically you use dump_stream to flush it
- to disk as you go, and iter_stream to load it back as you go.
-
- :param handle: file like object to write to
- :param stream: iterable of objects to pickle and write to handle
- """
- for item in stream:
- dump(item, handle)
diff --git a/src/snakeoil/test/mixins.py b/src/snakeoil/test/mixins.py
index e6f2e02..01dcfc2 100644
--- a/src/snakeoil/test/mixins.py
+++ b/src/snakeoil/test/mixins.py
@@ -23,7 +23,6 @@ class PythonNamespaceWalker:
module_blacklist = frozenset(
{
"snakeoil.cli.arghparse",
- "snakeoil.pickling",
}
)