Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Archive pytest conversion #7661

Merged
merged 4 commits into from
Jul 10, 2023
Merged
Changes from 2 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
63 changes: 31 additions & 32 deletions src/borg/testsuite/archive.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,23 +34,22 @@ def test_stats_basic(stats):
assert stats.usize == 20


def tests_stats_progress(stats, monkeypatch, columns=80):
@pytest.mark.parametrize(
"item_path, update_size, expected_output",
[
("", 0, "20 B O 20 B U 1 N "), # test unchanged 'stats' fixture
("foo", 10**3, "1.02 kB O 20 B U 1 N foo"), # test updated original size and set item path
("foo" * 40, 10**3, "1.02 kB O 20 B U 1 N foofoofoofoofoofoofoofoofo...foofoofoofoofoofoofoofoofoofoo"),
], # test long item path which exceeds 80 characters
ThomasWaldmann marked this conversation as resolved.
Show resolved Hide resolved
)
def test_stats_progress(item_path, update_size, expected_output, stats, monkeypatch, columns=80):
monkeypatch.setenv("COLUMNS", str(columns))
out = StringIO()
stats.show_progress(stream=out)
s = "20 B O 20 B U 1 N "
buf = " " * (columns - len(s))
assert out.getvalue() == s + buf + "\r"
item = Item(path=item_path) if item_path else None
s = expected_output

out = StringIO()
stats.update(10**3, unique=False)
stats.show_progress(item=Item(path="foo"), final=False, stream=out)
ThomasWaldmann marked this conversation as resolved.
Show resolved Hide resolved
s = "1.02 kB O 20 B U 1 N foo"
buf = " " * (columns - len(s))
assert out.getvalue() == s + buf + "\r"
out = StringIO()
stats.show_progress(item=Item(path="foo" * 40), final=False, stream=out)
s = "1.02 kB O 20 B U 1 N foofoofoofoofoofoofoofoofo...foofoofoofoofoofoofoofoofoofoo"
stats.update(update_size, unique=False)
stats.show_progress(item=item, stream=out)
buf = " " * (columns - len(s))
assert out.getvalue() == s + buf + "\r"

Expand Down Expand Up @@ -103,6 +102,22 @@ def test_stats_progress_json(stats):
assert "nfiles" not in result


@pytest.mark.parametrize(
"isoformat, expected",
[
("1970-01-01T00:00:01.000001", datetime(1970, 1, 1, 0, 0, 1, 1, timezone.utc)), # test with microseconds
("1970-01-01T00:00:01", datetime(1970, 1, 1, 0, 0, 1, 0, timezone.utc)), # test without microseconds
],
)
def test_timestamp_parsing(monkeypatch, isoformat, expected):
repository = Mock()
key = PlaintextKey(repository)
manifest = Manifest(key, repository)
a = Archive(manifest, "test", create=True)
a.metadata = ArchiveItem(time=isoformat)
assert a.ts == expected


class MockCache:
class MockRepo:
def async_response(self, wait=True):
Expand All @@ -117,24 +132,8 @@ def add_chunk(self, id, meta, data, stats=None, wait=True):
return id, len(data)


class ArchiveTimestampTestCase(BaseTestCase):
def _test_timestamp_parsing(self, isoformat, expected):
repository = Mock()
key = PlaintextKey(repository)
manifest = Manifest(key, repository)
a = Archive(manifest, "test", create=True)
a.metadata = ArchiveItem(time=isoformat)
self.assert_equal(a.ts, expected)

def test_with_microseconds(self):
self._test_timestamp_parsing("1970-01-01T00:00:01.000001", datetime(1970, 1, 1, 0, 0, 1, 1, timezone.utc))

def test_without_microseconds(self):
self._test_timestamp_parsing("1970-01-01T00:00:01", datetime(1970, 1, 1, 0, 0, 1, 0, timezone.utc))


class ChunkBufferTestCase(BaseTestCase):
def test(self):
def test_cache_chunk_buffer(self):
data = [Item(path="p1"), Item(path="p2")]
cache = MockCache()
key = PlaintextKey(None)
Expand All @@ -149,7 +148,7 @@ def test(self):
unpacker.feed(cache.objects[id])
self.assert_equal(data, [Item(internal_dict=d) for d in unpacker])

def test_partial(self):
def test_partial_cache_chunk_buffer(self):
big = "0123456789abcdefghijklmnopqrstuvwxyz" * 25000
data = [Item(path="full", target=big), Item(path="partial", target=big)]
cache = MockCache()
Expand Down