mirror of
https://github.com/borgbackup/borg.git
synced 2026-02-23 09:56:20 -05:00
remove Chunk()
This commit is contained in:
parent
69fb9bd403
commit
2ff75d58f2
10 changed files with 91 additions and 97 deletions
|
|
@ -25,7 +25,7 @@ from .compress import Compressor, CompressionSpec
|
|||
from .constants import * # NOQA
|
||||
from .hashindex import ChunkIndex, ChunkIndexEntry
|
||||
from .helpers import Manifest
|
||||
from .helpers import Chunk, ChunkIteratorFileWrapper, open_item
|
||||
from .helpers import ChunkIteratorFileWrapper, open_item
|
||||
from .helpers import Error, IntegrityError, set_ec
|
||||
from .helpers import uid2user, user2uid, gid2group, group2gid
|
||||
from .helpers import parse_timestamp, to_localtime
|
||||
|
|
@ -195,7 +195,7 @@ class DownloadPipeline:
|
|||
otherwise preloaded chunks will accumulate in RemoteRepository and create a memory leak.
|
||||
"""
|
||||
unpacker = msgpack.Unpacker(use_list=False)
|
||||
for _, data in self.fetch_many(ids):
|
||||
for data in self.fetch_many(ids):
|
||||
unpacker.feed(data)
|
||||
items = [Item(internal_dict=item) for item in unpacker]
|
||||
for item in items:
|
||||
|
|
@ -237,7 +237,9 @@ class ChunkBuffer:
|
|||
if self.buffer.tell() == 0:
|
||||
return
|
||||
self.buffer.seek(0)
|
||||
chunks = list(Chunk(bytes(s)) for s in self.chunker.chunkify(self.buffer))
|
||||
# The chunker returns a memoryview to its internal buffer,
|
||||
# thus a copy is needed before resuming the chunker iterator.
|
||||
chunks = list(bytes(s) for s in self.chunker.chunkify(self.buffer))
|
||||
self.buffer.seek(0)
|
||||
self.buffer.truncate(0)
|
||||
# Leave the last partial chunk in the buffer unless flush is True
|
||||
|
|
@ -245,7 +247,7 @@ class ChunkBuffer:
|
|||
for chunk in chunks[:end]:
|
||||
self.chunks.append(self.write_chunk(chunk))
|
||||
if end == -1:
|
||||
self.buffer.write(chunks[-1].data)
|
||||
self.buffer.write(chunks[-1])
|
||||
|
||||
def is_full(self):
|
||||
return self.buffer.tell() > self.BUFFER_SIZE
|
||||
|
|
@ -259,7 +261,7 @@ class CacheChunkBuffer(ChunkBuffer):
|
|||
self.stats = stats
|
||||
|
||||
def write_chunk(self, chunk):
|
||||
id_, _, _ = self.cache.add_chunk(self.key.id_hash(chunk.data), chunk, self.stats, wait=False)
|
||||
id_, _, _ = self.cache.add_chunk(self.key.id_hash(chunk), chunk, self.stats, wait=False)
|
||||
self.cache.repository.async_response(wait=False)
|
||||
return id_
|
||||
|
||||
|
|
@ -325,7 +327,7 @@ class Archive:
|
|||
self.zeros = None
|
||||
|
||||
def _load_meta(self, id):
|
||||
_, data = self.key.decrypt(id, self.repository.get(id))
|
||||
data = self.key.decrypt(id, self.repository.get(id))
|
||||
metadata = ArchiveItem(internal_dict=msgpack.unpackb(data, unicode_errors='surrogateescape'))
|
||||
if metadata.version != 1:
|
||||
raise Exception('Unknown archive metadata version')
|
||||
|
|
@ -464,7 +466,7 @@ Utilization of max. archive size: {csize_max:.0%}
|
|||
metadata = ArchiveItem(metadata)
|
||||
data = self.key.pack_and_authenticate_metadata(metadata.as_dict(), context=b'archive')
|
||||
self.id = self.key.id_hash(data)
|
||||
self.cache.add_chunk(self.id, Chunk(data), self.stats)
|
||||
self.cache.add_chunk(self.id, data, self.stats)
|
||||
while self.repository.async_response(wait=True) is not None:
|
||||
pass
|
||||
self.manifest.archives[name] = (self.id, metadata.time)
|
||||
|
|
@ -490,7 +492,7 @@ Utilization of max. archive size: {csize_max:.0%}
|
|||
add(self.id)
|
||||
for id, chunk in zip(self.metadata.items, self.repository.get_many(self.metadata.items)):
|
||||
add(id)
|
||||
_, data = self.key.decrypt(id, chunk)
|
||||
data = self.key.decrypt(id, chunk)
|
||||
unpacker.feed(data)
|
||||
for item in unpacker:
|
||||
chunks = item.get(b'chunks')
|
||||
|
|
@ -520,7 +522,7 @@ Utilization of max. archive size: {csize_max:.0%}
|
|||
if dry_run or stdout:
|
||||
if 'chunks' in item:
|
||||
item_chunks_size = 0
|
||||
for _, data in self.pipeline.fetch_many([c.id for c in item.chunks], is_preloaded=True):
|
||||
for data in self.pipeline.fetch_many([c.id for c in item.chunks], is_preloaded=True):
|
||||
if pi:
|
||||
pi.show(increase=len(data), info=[remove_surrogates(item.path)])
|
||||
if stdout:
|
||||
|
|
@ -584,7 +586,7 @@ Utilization of max. archive size: {csize_max:.0%}
|
|||
self.zeros = b'\0' * (1 << self.chunker_params[1])
|
||||
with fd:
|
||||
ids = [c.id for c in item.chunks]
|
||||
for _, data in self.pipeline.fetch_many(ids, is_preloaded=True):
|
||||
for data in self.pipeline.fetch_many(ids, is_preloaded=True):
|
||||
if pi:
|
||||
pi.show(increase=len(data), info=[remove_surrogates(item.path)])
|
||||
with backup_io('write'):
|
||||
|
|
@ -712,7 +714,7 @@ Utilization of max. archive size: {csize_max:.0%}
|
|||
setattr(metadata, key, value)
|
||||
data = msgpack.packb(metadata.as_dict(), unicode_errors='surrogateescape')
|
||||
new_id = self.key.id_hash(data)
|
||||
self.cache.add_chunk(new_id, Chunk(data), self.stats)
|
||||
self.cache.add_chunk(new_id, data, self.stats)
|
||||
self.manifest.archives[self.name] = (new_id, metadata.time)
|
||||
self.cache.chunk_decref(self.id, self.stats)
|
||||
self.id = new_id
|
||||
|
|
@ -759,7 +761,7 @@ Utilization of max. archive size: {csize_max:.0%}
|
|||
for (i, (items_id, data)) in enumerate(zip(items_ids, self.repository.get_many(items_ids))):
|
||||
if progress:
|
||||
pi.show(i)
|
||||
_, data = self.key.decrypt(items_id, data)
|
||||
data = self.key.decrypt(items_id, data)
|
||||
unpacker.feed(data)
|
||||
chunk_decref(items_id, stats)
|
||||
try:
|
||||
|
|
@ -874,10 +876,10 @@ Utilization of max. archive size: {csize_max:.0%}
|
|||
self.write_checkpoint()
|
||||
return length, number
|
||||
|
||||
def chunk_file(self, item, cache, stats, chunk_iter, chunk_processor=None, **chunk_kw):
|
||||
def chunk_file(self, item, cache, stats, chunk_iter, chunk_processor=None):
|
||||
if not chunk_processor:
|
||||
def chunk_processor(data):
|
||||
chunk_entry = cache.add_chunk(self.key.id_hash(data), Chunk(data, **chunk_kw), stats, wait=False)
|
||||
chunk_entry = cache.add_chunk(self.key.id_hash(data), data, stats, wait=False)
|
||||
self.cache.repository.async_response(wait=False)
|
||||
return chunk_entry
|
||||
|
||||
|
|
@ -1205,9 +1207,9 @@ class ArchiveChecker:
|
|||
chunk_ids = list(reversed(chunk_ids_revd))
|
||||
chunk_data_iter = self.repository.get_many(chunk_ids)
|
||||
else:
|
||||
_chunk_id = None if chunk_id == Manifest.MANIFEST_ID else chunk_id
|
||||
try:
|
||||
_chunk_id = None if chunk_id == Manifest.MANIFEST_ID else chunk_id
|
||||
_, data = self.key.decrypt(_chunk_id, encrypted_data)
|
||||
self.key.decrypt(_chunk_id, encrypted_data)
|
||||
except IntegrityError as integrity_error:
|
||||
self.error_found = True
|
||||
errors += 1
|
||||
|
|
@ -1277,7 +1279,7 @@ class ArchiveChecker:
|
|||
for chunk_id, _ in self.chunks.iteritems():
|
||||
cdata = self.repository.get(chunk_id)
|
||||
try:
|
||||
_, data = self.key.decrypt(chunk_id, cdata)
|
||||
data = self.key.decrypt(chunk_id, cdata)
|
||||
except IntegrityError as exc:
|
||||
logger.error('Skipping corrupted chunk: %s', exc)
|
||||
self.error_found = True
|
||||
|
|
@ -1322,9 +1324,9 @@ class ArchiveChecker:
|
|||
self.possibly_superseded.add(id_)
|
||||
|
||||
def add_callback(chunk):
|
||||
id_ = self.key.id_hash(chunk.data)
|
||||
id_ = self.key.id_hash(chunk)
|
||||
cdata = self.key.encrypt(chunk)
|
||||
add_reference(id_, len(chunk.data), len(cdata), cdata)
|
||||
add_reference(id_, len(chunk), len(cdata), cdata)
|
||||
return id_
|
||||
|
||||
def add_reference(id_, size, csize, cdata=None):
|
||||
|
|
@ -1345,7 +1347,7 @@ class ArchiveChecker:
|
|||
def replacement_chunk(size):
|
||||
data = bytes(size)
|
||||
chunk_id = self.key.id_hash(data)
|
||||
cdata = self.key.encrypt(Chunk(data))
|
||||
cdata = self.key.encrypt(data)
|
||||
csize = len(cdata)
|
||||
return chunk_id, size, csize, cdata
|
||||
|
||||
|
|
@ -1454,7 +1456,7 @@ class ArchiveChecker:
|
|||
if state > 0:
|
||||
unpacker.resync()
|
||||
for chunk_id, cdata in zip(items, repository.get_many(items)):
|
||||
_, data = self.key.decrypt(chunk_id, cdata)
|
||||
data = self.key.decrypt(chunk_id, cdata)
|
||||
unpacker.feed(data)
|
||||
try:
|
||||
for item in unpacker:
|
||||
|
|
@ -1504,7 +1506,7 @@ class ArchiveChecker:
|
|||
continue
|
||||
mark_as_possibly_superseded(archive_id)
|
||||
cdata = self.repository.get(archive_id)
|
||||
_, data = self.key.decrypt(archive_id, cdata)
|
||||
data = self.key.decrypt(archive_id, cdata)
|
||||
archive = ArchiveItem(internal_dict=msgpack.unpackb(data))
|
||||
if archive.version != 1:
|
||||
raise Exception('Unknown archive metadata version')
|
||||
|
|
@ -1521,7 +1523,7 @@ class ArchiveChecker:
|
|||
archive.items = items_buffer.chunks
|
||||
data = msgpack.packb(archive.as_dict(), unicode_errors='surrogateescape')
|
||||
new_archive_id = self.key.id_hash(data)
|
||||
cdata = self.key.encrypt(Chunk(data))
|
||||
cdata = self.key.encrypt(data)
|
||||
add_reference(new_archive_id, len(data), len(cdata), cdata)
|
||||
self.manifest.archives[info.name] = (new_archive_id, info.ts)
|
||||
|
||||
|
|
@ -1655,11 +1657,10 @@ class ArchiveRecreater:
|
|||
if self.recompress and not self.always_recompress and chunk_id in self.cache.chunks:
|
||||
# Check if this chunk is already compressed the way we want it
|
||||
old_chunk = self.key.decrypt(None, self.repository.get(chunk_id), decompress=False)
|
||||
if Compressor.detect(old_chunk.data).name == self.key.compressor.decide(data).name:
|
||||
if Compressor.detect(old_chunk).name == self.key.compressor.decide(data).name:
|
||||
# Stored chunk has the same compression we wanted
|
||||
overwrite = False
|
||||
chunk = Chunk(data)
|
||||
chunk_entry = self.cache.add_chunk(chunk_id, chunk, target.stats, overwrite=overwrite, wait=False)
|
||||
chunk_entry = self.cache.add_chunk(chunk_id, data, target.stats, overwrite=overwrite, wait=False)
|
||||
self.cache.repository.async_response(wait=False)
|
||||
self.seen_chunks.add(chunk_entry.id)
|
||||
return chunk_entry
|
||||
|
|
@ -1673,7 +1674,7 @@ class ArchiveRecreater:
|
|||
yield from target.chunker.chunkify(file)
|
||||
else:
|
||||
for chunk in chunk_iterator:
|
||||
yield chunk.data
|
||||
yield chunk
|
||||
|
||||
def save(self, archive, target, comment=None, replace_original=True):
|
||||
if self.dry_run:
|
||||
|
|
|
|||
|
|
@ -177,14 +177,14 @@ class Archiver:
|
|||
a = next(chunks1, end)
|
||||
if a is end:
|
||||
return not blen - bi and next(chunks2, end) is end
|
||||
a = memoryview(a.data)
|
||||
a = memoryview(a)
|
||||
alen = len(a)
|
||||
ai = 0
|
||||
if not blen - bi:
|
||||
b = next(chunks2, end)
|
||||
if b is end:
|
||||
return not alen - ai and next(chunks1, end) is end
|
||||
b = memoryview(b.data)
|
||||
b = memoryview(b)
|
||||
blen = len(b)
|
||||
bi = 0
|
||||
slicelen = min(alen - ai, blen - bi)
|
||||
|
|
@ -1395,7 +1395,7 @@ class Archiver:
|
|||
archive = Archive(repository, key, manifest, args.location.archive,
|
||||
consider_part_files=args.consider_part_files)
|
||||
for i, item_id in enumerate(archive.metadata.items):
|
||||
_, data = key.decrypt(item_id, repository.get(item_id))
|
||||
data = key.decrypt(item_id, repository.get(item_id))
|
||||
filename = '%06d_%s.items' % (i, bin_to_hex(item_id))
|
||||
print('Dumping', filename)
|
||||
with open(filename, 'wb') as fd:
|
||||
|
|
@ -1425,7 +1425,7 @@ class Archiver:
|
|||
fd.write(do_indent(prepare_dump_dict(archive_meta_orig)))
|
||||
fd.write(',\n')
|
||||
|
||||
_, data = key.decrypt(archive_meta_orig[b'id'], repository.get(archive_meta_orig[b'id']))
|
||||
data = key.decrypt(archive_meta_orig[b'id'], repository.get(archive_meta_orig[b'id']))
|
||||
archive_org_dict = msgpack.unpackb(data, object_hook=StableDict, unicode_errors='surrogateescape')
|
||||
|
||||
fd.write(' "_meta":\n')
|
||||
|
|
@ -1436,7 +1436,7 @@ class Archiver:
|
|||
unpacker = msgpack.Unpacker(use_list=False, object_hook=StableDict)
|
||||
first = True
|
||||
for item_id in archive_org_dict[b'items']:
|
||||
_, data = key.decrypt(item_id, repository.get(item_id))
|
||||
data = key.decrypt(item_id, repository.get(item_id))
|
||||
unpacker.feed(data)
|
||||
for item in unpacker:
|
||||
item = prepare_dump_dict(item)
|
||||
|
|
@ -1460,7 +1460,7 @@ class Archiver:
|
|||
def do_debug_dump_manifest(self, args, repository, manifest, key):
|
||||
"""dump decoded repository manifest"""
|
||||
|
||||
_, data = key.decrypt(None, repository.get(manifest.MANIFEST_ID))
|
||||
data = key.decrypt(None, repository.get(manifest.MANIFEST_ID))
|
||||
|
||||
meta = prepare_dump_dict(msgpack.fallback.unpackb(data, object_hook=StableDict, unicode_errors='surrogateescape'))
|
||||
|
||||
|
|
@ -1484,7 +1484,7 @@ class Archiver:
|
|||
for id in result:
|
||||
cdata = repository.get(id)
|
||||
give_id = id if id != Manifest.MANIFEST_ID else None
|
||||
_, data = key.decrypt(give_id, cdata)
|
||||
data = key.decrypt(give_id, cdata)
|
||||
filename = '%06d_%s.obj' % (i, bin_to_hex(id))
|
||||
print('Dumping', filename)
|
||||
with open(filename, 'wb') as fd:
|
||||
|
|
|
|||
|
|
@ -424,14 +424,14 @@ Chunk index: {0.total_unique_chunks:20d} {0.total_chunks:20d}"""
|
|||
|
||||
def fetch_and_build_idx(archive_id, repository, key, chunk_idx):
|
||||
cdata = repository.get(archive_id)
|
||||
_, data = key.decrypt(archive_id, cdata)
|
||||
data = key.decrypt(archive_id, cdata)
|
||||
chunk_idx.add(archive_id, 1, len(data), len(cdata))
|
||||
archive = ArchiveItem(internal_dict=msgpack.unpackb(data))
|
||||
if archive.version != 1:
|
||||
raise Exception('Unknown archive metadata version')
|
||||
unpacker = msgpack.Unpacker()
|
||||
for item_id, chunk in zip(archive.items, repository.get_many(archive.items)):
|
||||
_, data = key.decrypt(item_id, chunk)
|
||||
data = key.decrypt(item_id, chunk)
|
||||
chunk_idx.add(item_id, 1, len(data), len(chunk))
|
||||
unpacker.feed(data)
|
||||
for item in unpacker:
|
||||
|
|
@ -527,7 +527,7 @@ Chunk index: {0.total_unique_chunks:20d} {0.total_chunks:20d}"""
|
|||
def add_chunk(self, id, chunk, stats, overwrite=False, wait=True):
|
||||
if not self.txn_active:
|
||||
self.begin_txn()
|
||||
size = len(chunk.data)
|
||||
size = len(chunk)
|
||||
refcount = self.seen_chunk(id, size)
|
||||
if refcount and not overwrite:
|
||||
return self.chunk_incref(id, stats)
|
||||
|
|
|
|||
|
|
@ -144,7 +144,7 @@ class FuseOperations(llfuse.Operations):
|
|||
self.file_versions = {} # for versions mode: original path -> version
|
||||
unpacker = msgpack.Unpacker()
|
||||
for key, chunk in zip(archive.metadata.items, self.repository.get_many(archive.metadata.items)):
|
||||
_, data = self.key.decrypt(key, chunk)
|
||||
data = self.key.decrypt(key, chunk)
|
||||
unpacker.feed(data)
|
||||
for item in unpacker:
|
||||
item = Item(internal_dict=item)
|
||||
|
|
@ -340,7 +340,7 @@ class FuseOperations(llfuse.Operations):
|
|||
# evict fully read chunk from cache
|
||||
del self.data_cache[id]
|
||||
else:
|
||||
_, data = self.key.decrypt(id, self.repository.get(id))
|
||||
data = self.key.decrypt(id, self.repository.get(id))
|
||||
if offset + n < len(data):
|
||||
# chunk was only partially read, cache it
|
||||
self.data_cache[id] = data
|
||||
|
|
|
|||
|
|
@ -44,13 +44,6 @@ from . import hashindex
|
|||
from . import shellpattern
|
||||
from .constants import * # NOQA
|
||||
|
||||
# meta dict, data bytes
|
||||
_Chunk = namedtuple('_Chunk', 'meta data')
|
||||
|
||||
|
||||
def Chunk(data, **meta):
|
||||
return _Chunk(meta, data)
|
||||
|
||||
|
||||
'''
|
||||
The global exit_code variable is used so that modules other than archiver can increase the program exit code if a
|
||||
|
|
@ -247,7 +240,7 @@ class Manifest:
|
|||
if not key:
|
||||
key = key_factory(repository, cdata)
|
||||
manifest = cls(key, repository)
|
||||
data = key.decrypt(None, cdata).data
|
||||
data = key.decrypt(None, cdata)
|
||||
manifest_dict, manifest.tam_verified = key.unpack_and_verify_manifest(data, force_tam_not_required=force_tam_not_required)
|
||||
m = ManifestItem(internal_dict=manifest_dict)
|
||||
manifest.id = key.id_hash(data)
|
||||
|
|
@ -292,7 +285,7 @@ class Manifest:
|
|||
self.tam_verified = True
|
||||
data = self.key.pack_and_authenticate_metadata(manifest.as_dict())
|
||||
self.id = self.key.id_hash(data)
|
||||
self.repository.put(self.MANIFEST_ID, self.key.encrypt(Chunk(data, compression={'name': 'none'})))
|
||||
self.repository.put(self.MANIFEST_ID, self.key.encrypt(data))
|
||||
|
||||
|
||||
def prune_within(archives, within):
|
||||
|
|
@ -1909,7 +1902,7 @@ class ItemFormatter(BaseFormatter):
|
|||
if 'chunks' not in item:
|
||||
return ""
|
||||
hash = hashlib.new(hash_function)
|
||||
for _, data in self.archive.pipeline.fetch_many([c.id for c in item.chunks]):
|
||||
for data in self.archive.pipeline.fetch_many([c.id for c in item.chunks]):
|
||||
hash.update(data)
|
||||
return hash.hexdigest()
|
||||
|
||||
|
|
@ -1934,7 +1927,7 @@ class ChunkIteratorFileWrapper:
|
|||
if not remaining:
|
||||
try:
|
||||
chunk = next(self.chunk_iterator)
|
||||
self.chunk = memoryview(chunk.data)
|
||||
self.chunk = memoryview(chunk)
|
||||
except StopIteration:
|
||||
self.exhausted = True
|
||||
return 0 # EOF
|
||||
|
|
|
|||
|
|
@ -15,7 +15,7 @@ logger = create_logger()
|
|||
from .constants import * # NOQA
|
||||
from .compress import Compressor
|
||||
from .crypto import AES, bytes_to_long, bytes_to_int, num_aes_blocks, hmac_sha256, blake2b_256, hkdf_hmac_sha512
|
||||
from .helpers import Chunk, StableDict
|
||||
from .helpers import StableDict
|
||||
from .helpers import Error, IntegrityError
|
||||
from .helpers import yes
|
||||
from .helpers import get_keys_dir, get_security_dir
|
||||
|
|
@ -252,7 +252,7 @@ class PlaintextKey(KeyBase):
|
|||
return sha256(data).digest()
|
||||
|
||||
def encrypt(self, chunk):
|
||||
data = self.compressor.compress(chunk.data)
|
||||
data = self.compressor.compress(chunk)
|
||||
return b''.join([self.TYPE_STR, data])
|
||||
|
||||
def decrypt(self, id, data, decompress=True):
|
||||
|
|
@ -261,10 +261,10 @@ class PlaintextKey(KeyBase):
|
|||
raise IntegrityError('Chunk %s: Invalid encryption envelope' % id_str)
|
||||
payload = memoryview(data)[1:]
|
||||
if not decompress:
|
||||
return Chunk(payload)
|
||||
return payload
|
||||
data = self.decompress(payload)
|
||||
self.assert_id(id, data)
|
||||
return Chunk(data)
|
||||
return data
|
||||
|
||||
def _tam_key(self, salt, context):
|
||||
return salt + context
|
||||
|
|
@ -330,7 +330,7 @@ class AESKeyBase(KeyBase):
|
|||
MAC = hmac_sha256
|
||||
|
||||
def encrypt(self, chunk):
|
||||
data = self.compressor.compress(chunk.data)
|
||||
data = self.compressor.compress(chunk)
|
||||
self.nonce_manager.ensure_reservation(num_aes_blocks(len(data)))
|
||||
self.enc_cipher.reset()
|
||||
data = b''.join((self.enc_cipher.iv[8:], self.enc_cipher.encrypt(data)))
|
||||
|
|
@ -355,10 +355,10 @@ class AESKeyBase(KeyBase):
|
|||
self.dec_cipher.reset(iv=PREFIX + data[33:41])
|
||||
payload = self.dec_cipher.decrypt(data_view[41:])
|
||||
if not decompress:
|
||||
return Chunk(payload)
|
||||
return payload
|
||||
data = self.decompress(payload)
|
||||
self.assert_id(id, data)
|
||||
return Chunk(data)
|
||||
return data
|
||||
|
||||
def extract_nonce(self, payload):
|
||||
if not (payload[0] == self.TYPE or
|
||||
|
|
@ -742,7 +742,7 @@ class AuthenticatedKey(ID_BLAKE2b_256, RepoKey):
|
|||
STORAGE = KeyBlobStorage.REPO
|
||||
|
||||
def encrypt(self, chunk):
|
||||
data = self.compressor.compress(chunk.data)
|
||||
data = self.compressor.compress(chunk)
|
||||
return b''.join([self.TYPE_STR, data])
|
||||
|
||||
def decrypt(self, id, data, decompress=True):
|
||||
|
|
@ -750,10 +750,10 @@ class AuthenticatedKey(ID_BLAKE2b_256, RepoKey):
|
|||
raise IntegrityError('Chunk %s: Invalid envelope' % bin_to_hex(id))
|
||||
payload = memoryview(data)[1:]
|
||||
if not decompress:
|
||||
return Chunk(payload)
|
||||
return payload
|
||||
data = self.decompress(payload)
|
||||
self.assert_id(id, data)
|
||||
return Chunk(data)
|
||||
return data
|
||||
|
||||
|
||||
AVAILABLE_KEY_TYPES = (
|
||||
|
|
|
|||
|
|
@ -72,8 +72,8 @@ class MockCache:
|
|||
self.repository = self.MockRepo()
|
||||
|
||||
def add_chunk(self, id, chunk, stats=None, wait=True):
|
||||
self.objects[id] = chunk.data
|
||||
return id, len(chunk.data), len(chunk.data)
|
||||
self.objects[id] = chunk
|
||||
return id, len(chunk), len(chunk)
|
||||
|
||||
|
||||
class ArchiveTimestampTestCase(BaseTestCase):
|
||||
|
|
|
|||
|
|
@ -34,7 +34,7 @@ from ..cache import Cache
|
|||
from ..constants import * # NOQA
|
||||
from ..crypto import bytes_to_long, num_aes_blocks
|
||||
from ..helpers import PatternMatcher, parse_pattern, Location, get_security_dir
|
||||
from ..helpers import Chunk, Manifest
|
||||
from ..helpers import Manifest
|
||||
from ..helpers import EXIT_SUCCESS, EXIT_WARNING, EXIT_ERROR
|
||||
from ..helpers import bin_to_hex
|
||||
from ..item import Item
|
||||
|
|
@ -2449,7 +2449,7 @@ class ArchiverCheckTestCase(ArchiverTestCaseBase):
|
|||
'version': 1,
|
||||
})
|
||||
archive_id = key.id_hash(archive)
|
||||
repository.put(archive_id, key.encrypt(Chunk(archive)))
|
||||
repository.put(archive_id, key.encrypt(archive))
|
||||
repository.commit()
|
||||
self.cmd('check', self.repository_location, exit_code=1)
|
||||
self.cmd('check', '--repair', self.repository_location, exit_code=0)
|
||||
|
|
@ -2537,12 +2537,12 @@ class ManifestAuthenticationTest(ArchiverTestCaseBase):
|
|||
def spoof_manifest(self, repository):
|
||||
with repository:
|
||||
_, key = Manifest.load(repository)
|
||||
repository.put(Manifest.MANIFEST_ID, key.encrypt(Chunk(msgpack.packb({
|
||||
repository.put(Manifest.MANIFEST_ID, key.encrypt(msgpack.packb({
|
||||
'version': 1,
|
||||
'archives': {},
|
||||
'config': {},
|
||||
'timestamp': (datetime.utcnow() + timedelta(days=1)).isoformat(),
|
||||
}))))
|
||||
})))
|
||||
repository.commit()
|
||||
|
||||
def test_fresh_init_tam_required(self):
|
||||
|
|
@ -2550,11 +2550,11 @@ class ManifestAuthenticationTest(ArchiverTestCaseBase):
|
|||
repository = Repository(self.repository_path, exclusive=True)
|
||||
with repository:
|
||||
manifest, key = Manifest.load(repository)
|
||||
repository.put(Manifest.MANIFEST_ID, key.encrypt(Chunk(msgpack.packb({
|
||||
repository.put(Manifest.MANIFEST_ID, key.encrypt(msgpack.packb({
|
||||
'version': 1,
|
||||
'archives': {},
|
||||
'timestamp': (datetime.utcnow() + timedelta(days=1)).isoformat(),
|
||||
}))))
|
||||
})))
|
||||
repository.commit()
|
||||
|
||||
with pytest.raises(TAMRequiredError):
|
||||
|
|
@ -2570,9 +2570,9 @@ class ManifestAuthenticationTest(ArchiverTestCaseBase):
|
|||
key.tam_required = False
|
||||
key.change_passphrase(key._passphrase)
|
||||
|
||||
manifest = msgpack.unpackb(key.decrypt(None, repository.get(Manifest.MANIFEST_ID)).data)
|
||||
manifest = msgpack.unpackb(key.decrypt(None, repository.get(Manifest.MANIFEST_ID)))
|
||||
del manifest[b'tam']
|
||||
repository.put(Manifest.MANIFEST_ID, key.encrypt(Chunk(msgpack.packb(manifest))))
|
||||
repository.put(Manifest.MANIFEST_ID, key.encrypt(msgpack.packb(manifest)))
|
||||
repository.commit()
|
||||
output = self.cmd('list', '--debug', self.repository_location)
|
||||
assert 'archive1234' in output
|
||||
|
|
@ -2844,8 +2844,8 @@ def test_get_args():
|
|||
|
||||
def test_compare_chunk_contents():
|
||||
def ccc(a, b):
|
||||
chunks_a = [Chunk(data) for data in a]
|
||||
chunks_b = [Chunk(data) for data in b]
|
||||
chunks_a = [data for data in a]
|
||||
chunks_b = [data for data in b]
|
||||
compare1 = Archiver.compare_chunk_contents(iter(chunks_a), iter(chunks_b))
|
||||
compare2 = Archiver.compare_chunk_contents(iter(chunks_b), iter(chunks_a))
|
||||
assert compare1 == compare2
|
||||
|
|
|
|||
|
|
@ -21,7 +21,7 @@ from ..helpers import get_cache_dir, get_keys_dir, get_security_dir
|
|||
from ..helpers import is_slow_msgpack
|
||||
from ..helpers import yes, TRUISH, FALSISH, DEFAULTISH
|
||||
from ..helpers import StableDict, int_to_bigint, bigint_to_int, bin_to_hex
|
||||
from ..helpers import parse_timestamp, ChunkIteratorFileWrapper, ChunkerParams, Chunk
|
||||
from ..helpers import parse_timestamp, ChunkIteratorFileWrapper, ChunkerParams
|
||||
from ..helpers import ProgressIndicatorPercent, ProgressIndicatorEndless
|
||||
from ..helpers import load_exclude_file, load_pattern_file
|
||||
from ..helpers import parse_pattern, PatternMatcher
|
||||
|
|
@ -1158,7 +1158,7 @@ def test_partial_format():
|
|||
|
||||
|
||||
def test_chunk_file_wrapper():
|
||||
cfw = ChunkIteratorFileWrapper(iter([Chunk(b'abc'), Chunk(b'def')]))
|
||||
cfw = ChunkIteratorFileWrapper(iter([b'abc', b'def']))
|
||||
assert cfw.read(2) == b'ab'
|
||||
assert cfw.read(50) == b'cdef'
|
||||
assert cfw.exhausted
|
||||
|
|
|
|||
|
|
@ -9,7 +9,7 @@ import msgpack
|
|||
|
||||
from ..crypto import bytes_to_long, num_aes_blocks
|
||||
from ..helpers import Location
|
||||
from ..helpers import Chunk, StableDict
|
||||
from ..helpers import StableDict
|
||||
from ..helpers import IntegrityError
|
||||
from ..helpers import get_security_dir
|
||||
from ..key import PlaintextKey, PassphraseKey, KeyfileKey, RepoKey, Blake2KeyfileKey, Blake2RepoKey, AuthenticatedKey
|
||||
|
|
@ -104,17 +104,17 @@ class TestKey:
|
|||
|
||||
def test_plaintext(self):
|
||||
key = PlaintextKey.create(None, None)
|
||||
chunk = Chunk(b'foo')
|
||||
assert hexlify(key.id_hash(chunk.data)) == b'2c26b46b68ffc68ff99b453c1d30413413422d706483bfa0f98a5e886266e7ae'
|
||||
assert chunk == key.decrypt(key.id_hash(chunk.data), key.encrypt(chunk))
|
||||
chunk = b'foo'
|
||||
assert hexlify(key.id_hash(chunk)) == b'2c26b46b68ffc68ff99b453c1d30413413422d706483bfa0f98a5e886266e7ae'
|
||||
assert chunk == key.decrypt(key.id_hash(chunk), key.encrypt(chunk))
|
||||
|
||||
def test_keyfile(self, monkeypatch, keys_dir):
|
||||
monkeypatch.setenv('BORG_PASSPHRASE', 'test')
|
||||
key = KeyfileKey.create(self.MockRepository(), self.MockArgs())
|
||||
assert bytes_to_long(key.enc_cipher.iv, 8) == 0
|
||||
manifest = key.encrypt(Chunk(b'ABC'))
|
||||
manifest = key.encrypt(b'ABC')
|
||||
assert key.extract_nonce(manifest) == 0
|
||||
manifest2 = key.encrypt(Chunk(b'ABC'))
|
||||
manifest2 = key.encrypt(b'ABC')
|
||||
assert manifest != manifest2
|
||||
assert key.decrypt(None, manifest) == key.decrypt(None, manifest2)
|
||||
assert key.extract_nonce(manifest2) == 1
|
||||
|
|
@ -124,8 +124,8 @@ class TestKey:
|
|||
# Key data sanity check
|
||||
assert len({key2.id_key, key2.enc_key, key2.enc_hmac_key}) == 3
|
||||
assert key2.chunk_seed != 0
|
||||
chunk = Chunk(b'foo')
|
||||
assert chunk == key2.decrypt(key.id_hash(chunk.data), key.encrypt(chunk))
|
||||
chunk = b'foo'
|
||||
assert chunk == key2.decrypt(key.id_hash(chunk), key.encrypt(chunk))
|
||||
|
||||
def test_keyfile_nonce_rollback_protection(self, monkeypatch, keys_dir):
|
||||
monkeypatch.setenv('BORG_PASSPHRASE', 'test')
|
||||
|
|
@ -133,9 +133,9 @@ class TestKey:
|
|||
with open(os.path.join(get_security_dir(repository.id_str), 'nonce'), "w") as fd:
|
||||
fd.write("0000000000002000")
|
||||
key = KeyfileKey.create(repository, self.MockArgs())
|
||||
data = key.encrypt(Chunk(b'ABC'))
|
||||
data = key.encrypt(b'ABC')
|
||||
assert key.extract_nonce(data) == 0x2000
|
||||
assert key.decrypt(None, data).data == b'ABC'
|
||||
assert key.decrypt(None, data) == b'ABC'
|
||||
|
||||
def test_keyfile_kfenv(self, tmpdir, monkeypatch):
|
||||
keyfile = tmpdir.join('keyfile')
|
||||
|
|
@ -144,8 +144,8 @@ class TestKey:
|
|||
assert not keyfile.exists()
|
||||
key = KeyfileKey.create(self.MockRepository(), self.MockArgs())
|
||||
assert keyfile.exists()
|
||||
chunk = Chunk(b'ABC')
|
||||
chunk_id = key.id_hash(chunk.data)
|
||||
chunk = b'ABC'
|
||||
chunk_id = key.id_hash(chunk)
|
||||
chunk_cdata = key.encrypt(chunk)
|
||||
key = KeyfileKey.detect(self.MockRepository(), chunk_cdata)
|
||||
assert chunk == key.decrypt(chunk_id, chunk_cdata)
|
||||
|
|
@ -158,7 +158,7 @@ class TestKey:
|
|||
fd.write(self.keyfile2_key_file)
|
||||
monkeypatch.setenv('BORG_PASSPHRASE', 'passphrase')
|
||||
key = KeyfileKey.detect(self.MockRepository(), self.keyfile2_cdata)
|
||||
assert key.decrypt(self.keyfile2_id, self.keyfile2_cdata).data == b'payload'
|
||||
assert key.decrypt(self.keyfile2_id, self.keyfile2_cdata) == b'payload'
|
||||
|
||||
def test_keyfile2_kfenv(self, tmpdir, monkeypatch):
|
||||
keyfile = tmpdir.join('keyfile')
|
||||
|
|
@ -167,14 +167,14 @@ class TestKey:
|
|||
monkeypatch.setenv('BORG_KEY_FILE', str(keyfile))
|
||||
monkeypatch.setenv('BORG_PASSPHRASE', 'passphrase')
|
||||
key = KeyfileKey.detect(self.MockRepository(), self.keyfile2_cdata)
|
||||
assert key.decrypt(self.keyfile2_id, self.keyfile2_cdata).data == b'payload'
|
||||
assert key.decrypt(self.keyfile2_id, self.keyfile2_cdata) == b'payload'
|
||||
|
||||
def test_keyfile_blake2(self, monkeypatch, keys_dir):
|
||||
with keys_dir.join('keyfile').open('w') as fd:
|
||||
fd.write(self.keyfile_blake2_key_file)
|
||||
monkeypatch.setenv('BORG_PASSPHRASE', 'passphrase')
|
||||
key = Blake2KeyfileKey.detect(self.MockRepository(), self.keyfile_blake2_cdata)
|
||||
assert key.decrypt(self.keyfile_blake2_id, self.keyfile_blake2_cdata).data == b'payload'
|
||||
assert key.decrypt(self.keyfile_blake2_id, self.keyfile_blake2_cdata) == b'payload'
|
||||
|
||||
def test_passphrase(self, keys_dir, monkeypatch):
|
||||
monkeypatch.setenv('BORG_PASSPHRASE', 'test')
|
||||
|
|
@ -184,9 +184,9 @@ class TestKey:
|
|||
assert hexlify(key.enc_hmac_key) == b'b885a05d329a086627412a6142aaeb9f6c54ab7950f996dd65587251f6bc0901'
|
||||
assert hexlify(key.enc_key) == b'2ff3654c6daf7381dbbe718d2b20b4f1ea1e34caa6cc65f6bb3ac376b93fed2a'
|
||||
assert key.chunk_seed == -775740477
|
||||
manifest = key.encrypt(Chunk(b'ABC'))
|
||||
manifest = key.encrypt(b'ABC')
|
||||
assert key.extract_nonce(manifest) == 0
|
||||
manifest2 = key.encrypt(Chunk(b'ABC'))
|
||||
manifest2 = key.encrypt(b'ABC')
|
||||
assert manifest != manifest2
|
||||
assert key.decrypt(None, manifest) == key.decrypt(None, manifest2)
|
||||
assert key.extract_nonce(manifest2) == 1
|
||||
|
|
@ -197,9 +197,9 @@ class TestKey:
|
|||
assert key.enc_hmac_key == key2.enc_hmac_key
|
||||
assert key.enc_key == key2.enc_key
|
||||
assert key.chunk_seed == key2.chunk_seed
|
||||
chunk = Chunk(b'foo')
|
||||
assert hexlify(key.id_hash(chunk.data)) == b'818217cf07d37efad3860766dcdf1d21e401650fed2d76ed1d797d3aae925990'
|
||||
assert chunk == key2.decrypt(key2.id_hash(chunk.data), key.encrypt(chunk))
|
||||
chunk = b'foo'
|
||||
assert hexlify(key.id_hash(chunk)) == b'818217cf07d37efad3860766dcdf1d21e401650fed2d76ed1d797d3aae925990'
|
||||
assert chunk == key2.decrypt(key2.id_hash(chunk), key.encrypt(chunk))
|
||||
|
||||
def _corrupt_byte(self, key, data, offset):
|
||||
data = bytearray(data)
|
||||
|
|
@ -224,7 +224,7 @@ class TestKey:
|
|||
key.decrypt(id, data)
|
||||
|
||||
def test_decrypt_decompress(self, key):
|
||||
plaintext = Chunk(b'123456789')
|
||||
plaintext = b'123456789'
|
||||
encrypted = key.encrypt(plaintext)
|
||||
assert key.decrypt(None, encrypted, decompress=False) != plaintext
|
||||
assert key.decrypt(None, encrypted) == plaintext
|
||||
|
|
@ -244,11 +244,11 @@ class TestKey:
|
|||
def test_authenticated_encrypt(self, monkeypatch):
|
||||
monkeypatch.setenv('BORG_PASSPHRASE', 'test')
|
||||
key = AuthenticatedKey.create(self.MockRepository(), self.MockArgs())
|
||||
plaintext = Chunk(b'123456789')
|
||||
plaintext = b'123456789'
|
||||
authenticated = key.encrypt(plaintext)
|
||||
# 0x06 is the key TYPE, 0x0100 identifies LZ4 compression, 0x90 is part of LZ4 and means that an uncompressed
|
||||
# block of length nine follows (the plaintext).
|
||||
assert authenticated == b'\x06\x01\x00\x90' + plaintext.data
|
||||
assert authenticated == b'\x06\x01\x00\x90' + plaintext
|
||||
|
||||
|
||||
class TestPassphrase:
|
||||
|
|
|
|||
Loading…
Reference in a new issue