Skip to content

Improved inference of archive names #53

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 9 commits into from
Apr 8, 2025
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions archiver/constants.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,5 +12,7 @@
ENCRYPTION_ALGORITHM = "AES256"
ENV_VAR_MAPPER_MAX_CPUS = "ARCHIVER_MAX_CPUS_ENV_VAR"
DEFAULT_COMPRESSION_LEVEL = 6
ARCHIVE_SUFFIXES = ['\.part[0-9]+', '\.tar', '\.md5', '\.lz', '\.gpg', '\.lst', '\.parts', '\.txt']
ARCHIVE_SUFFIXES_REG = '$|'.join(ARCHIVE_SUFFIXES) + '$'

MD5_LINE_REGEX = re.compile(r'(\S+)\s+(\S.*)')
6 changes: 4 additions & 2 deletions archiver/extract.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@ def decrypt_existing_archive(archive_path, destination_dir=None, remove_unencryp
decrypt_list_of_archives([archive_path], destination_dir, delete=remove_unencrypted, threads=threads)


def extract_archive(source_path, destination_directory_path, partial_extraction_path=None, threads=None, force=False, extract_at_destination=False):
def extract_archive(source_path, destination_directory_path, partial_extraction_path=None, threads=None, force=False, extract_at_destination=False, archive_name=None):
# Create destination folder if nonexistent or overwrite if --force option used
helpers.handle_destination_directory_creation(destination_directory_path, force)

Expand Down Expand Up @@ -64,7 +64,9 @@ def extract_archive(source_path, destination_directory_path, partial_extraction_
uncompress_and_extract(archive_files, destination_directory_path, threads, partial_extraction_path=partial_extraction_path)

logging.info("Archive extracted to: " + helpers.get_absolute_path_string(destination_directory_path))
return destination_directory_path / helpers.filename_without_extensions(source_path)
if not archive_name:
archive_name = helpers.filename_without_archive_extensions(source_path)
return destination_directory_path / archive_name


def uncompress_and_extract(archive_file_paths, destination_directory_path, threads, partial_extraction_path=None, encrypted=False):
Expand Down
28 changes: 14 additions & 14 deletions archiver/helpers.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,8 @@
import unicodedata

from .constants import READ_CHUNK_BYTE_SIZE, COMPRESSED_ARCHIVE_SUFFIX, \
ENCRYPTED_ARCHIVE_SUFFIX, ENV_VAR_MAPPER_MAX_CPUS, MD5_LINE_REGEX
ENCRYPTED_ARCHIVE_SUFFIX, ENV_VAR_MAPPER_MAX_CPUS, MD5_LINE_REGEX, \
ARCHIVE_SUFFIXES_REG


def get_files_with_type_in_directory_or_terminate(directory, file_type):
Expand Down Expand Up @@ -338,36 +339,35 @@ def file_is_valid_archive_or_terminate(file_path):
terminate_with_message(f"File {file_path.as_posix()} is not a valid archive of type {COMPRESSED_ARCHIVE_SUFFIX} or {ENCRYPTED_ARCHIVE_SUFFIX} or doesn't exist.")


def filename_without_extensions(path):
"""Removes every suffix, including .partX"""
suffixes_string = "".join(path.suffixes)
def filepath_without_archive_extensions(path:Path) -> Path:
"""Removes every archiving suffix"""
while re.match(ARCHIVE_SUFFIXES_REG, path.suffix):
path = path.with_suffix('')
return path

return path.name[:-len(suffixes_string)]

def filename_without_archive_extensions(path):
"""Removes every archiving suffix"""
return filepath_without_archive_extensions(path).name

def filepath_without_extensions(path:Path) -> Path:
"""Removes every suffix, including .partX"""
suffixes_string = "".join(path.suffixes)

return path.parent / path.name[:-len(suffixes_string)]

def infer_source_name(source_path: Path) -> Path:

if not source_path.is_dir():
return filepath_without_extensions(source_path)
return filepath_without_archive_extensions(source_path)
else:
all_files = [p for p in source_path.iterdir() if p.is_file()]
unique_names = list(set([filepath_without_extensions(f) for f in all_files]))
unique_names = list(set([filepath_without_archive_extensions(f) for f in all_files]))

if len(unique_names) == 0:
terminate_with_message('There are no archive files present')
elif len(unique_names) > 1:
terminate_with_message(f'More than one possible archive name detected: {str(unique_names)}')
terminate_with_message(f'Automatic archive name detection has failed. More than one possible archive name detected: {str(unique_names)}\noptionally use --archive_name to specify archive name.')

return unique_names[0]


def filename_without_archive_extensions(path):
def filename_without_archive_extensions_multipart(path):
"""Removes known archive extensions but keeps extensions like .partX"""
name = path.name

Expand Down
25 changes: 14 additions & 11 deletions archiver/integrity.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@
from .listing import parse_tar_listing


def check_integrity(source_path, deep_flag=False, threads=None, work_dir=None):
def check_integrity(source_path, deep_flag=False, threads=None, work_dir=None, archive_name=None):

archives_with_hashes = get_archives_with_hashes_from_path(source_path)
is_encrypted = helpers.path_target_is_encrypted(source_path)
Expand All @@ -20,9 +20,9 @@ def check_integrity(source_path, deep_flag=False, threads=None, work_dir=None):
check_result = shallow_integrity_check(archives_with_hashes, workers=threads)

if source_path.is_dir():
integrity_result = check_archive_list_integrity(source_path)
integrity_result = check_archive_list_integrity(source_path, archive_name)
else:
file_path = source_path.parent / Path(helpers.filename_without_archive_extensions(source_path))
file_path = source_path.parent / Path(helpers.filename_without_archive_extensions_multipart(source_path))
integrity_result = check_archive_part_integrity(file_path)

if not integrity_result:
Expand All @@ -34,7 +34,7 @@ def check_integrity(source_path, deep_flag=False, threads=None, work_dir=None):
if deep_flag:
# with deep flag still continue, no matter what the result of the previous test was
deep_check_result = deep_integrity_check(archives_with_hashes,
is_encrypted, threads, work_dir)
is_encrypted, threads, work_dir, archive_name)

if check_result and deep_check_result:
logging.info("Deep integrity check successful.")
Expand Down Expand Up @@ -74,10 +74,13 @@ def check_archive_part_integrity(source_name: Path) -> bool:

return check_result

def check_archive_list_integrity(source_path: Path) -> bool:
def check_archive_list_integrity(source_path: Path, archive_name: str = None) -> bool:

parts = helpers.get_parts(source_path)
source_name = helpers.infer_source_name(source_path)
if archive_name:
source_name = source_path / Path(archive_name)
else:
source_name = helpers.infer_source_name(source_path)

logging.info(f'Found {parts} parts in archive {source_path.as_posix()}')
check_result = True
Expand Down Expand Up @@ -123,7 +126,7 @@ def verify_relative_symbolic_links(archives_with_hashes):
symlink_dict = {} # all symlinks found across listing
for archive in archives_with_hashes:
part_path = archive[0]
part_listing = part_path.parent / (helpers.filename_without_archive_extensions(part_path) + LISTING_SUFFIX)
part_listing = part_path.parent / (helpers.filename_without_archive_extensions_multipart(part_path) + LISTING_SUFFIX)
entries = parse_tar_listing(part_listing)

file_set.update([str(e.path).rstrip('/') for e in entries])
Expand All @@ -148,7 +151,7 @@ def verify_relative_symbolic_links(archives_with_hashes):
return missing


def deep_integrity_check(archives_with_hashes, is_encrypted, threads, work_dir):
def deep_integrity_check(archives_with_hashes, is_encrypted, threads, work_dir, archive_name=None):
# verify link structure
missing_links = verify_relative_symbolic_links(archives_with_hashes)

Expand All @@ -164,7 +167,7 @@ def deep_integrity_check(archives_with_hashes, is_encrypted, threads, work_dir):
# Create temporary directory to unpack archive
with tempfile.TemporaryDirectory(dir=work_dir) as temp_path_string:
temp_path = Path(temp_path_string) / "extraction-folder"
archive_content_path = extract_archive(archive_file_path, temp_path, threads=threads, extract_at_destination=True)
archive_content_path = extract_archive(archive_file_path, temp_path, threads=threads, extract_at_destination=True, archive_name=archive_name)

terminate_if_extracted_archive_not_existing(archive_content_path)

Expand Down Expand Up @@ -234,7 +237,7 @@ def get_hashes_for_archive(archive_path):
hash_file_path = archive_path.parent / (archive_path.name + ".md5")
helpers.terminate_if_path_nonexistent(hash_file_path)

hash_listing_path = archive_path.parent / (helpers.filename_without_archive_extensions(archive_path) + ".md5")
hash_listing_path = archive_path.parent / (helpers.filename_without_archive_extensions_multipart(archive_path) + ".md5")
helpers.terminate_if_path_nonexistent(hash_listing_path)

return [(archive_file_path, hash_file_path, hash_listing_path)]
Expand All @@ -257,7 +260,7 @@ def get_archives_with_hashes_from_directory(source_path):
hash_path = archive.parent / (archive.name + ".md5")
helpers.terminate_if_path_nonexistent(hash_path)

hash_listing_path = Path(archive.parent) / (helpers.filename_without_archive_extensions(archive) + ".md5")
hash_listing_path = Path(archive.parent) / (helpers.filename_without_archive_extensions_multipart(archive) + ".md5")
helpers.terminate_if_path_nonexistent(hash_listing_path)

archive_with_hash_path = (archive, hash_path, hash_listing_path)
Expand Down
2 changes: 1 addition & 1 deletion archiver/listing.py
Original file line number Diff line number Diff line change
Expand Up @@ -90,7 +90,7 @@ def get_listing_files_for_path(path):

# If specific file is used, maybe not all results of search path will be shown (since they could be in different file)
helpers.file_is_valid_archive_or_terminate(path)
listing_path = path.parent / (helpers.filename_without_archive_extensions(path) + ".tar.lst")
listing_path = path.parent / (helpers.filename_without_archive_extensions_multipart(path) + ".tar.lst")
helpers.terminate_if_path_nonexistent(path)

return [listing_path]
Expand Down
6 changes: 4 additions & 2 deletions archiver/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -144,6 +144,7 @@ def parse_arguments(args):
parser_extract.add_argument("archive_dir", type=str, help="Select source archive tar.lz file")
parser_extract.add_argument("destination", type=str, help="Path to directory where archive will be extracted")
parser_extract.add_argument("-s", "--subpath", type=str, help="Directory or file inside archive to extract")
parser_extract.add_argument("--archive-name", type=str, help="Provide explicit source name of the archive (if it contains suffixes used by the archiver - e.g., .part1)")
parser_extract.add_argument("-n", "--threads", type=int, help=thread_help)
parser_extract.add_argument("-f", "--force", action="store_true", default=False, help=force_help)
parser_extract.set_defaults(func=handle_extract)
Expand All @@ -160,6 +161,7 @@ def parse_arguments(args):
parser_check.add_argument("archive_dir", type=str, help="Select source archive directory or .tar.lz file")
parser_check.add_argument("-d", "--deep", action="store_true", help="Verify integrity by unpacking archive and hashing each file")
parser_check.add_argument("-n", "--threads", type=int, help=thread_help)
parser_check.add_argument("--archive-name", type=str, help="Provide explicit source name of the archive (if contains suffixes used by the archiver - e.g., .part1)")
parser_check.set_defaults(func=handle_check)

# Preparation checks
Expand Down Expand Up @@ -270,7 +272,7 @@ def handle_extract(args):

threads = helpers.get_threads_from_args_or_environment(args.threads)

extract_archive(source_path, destination_directory_path, args.subpath, threads, args.force)
extract_archive(source_path, destination_directory_path, args.subpath, threads, args.force, archive_name=args.archive_name)


def handle_list(args):
Expand All @@ -285,7 +287,7 @@ def handle_check(args):
source_path = Path(args.archive_dir)
threads = helpers.get_threads_from_args_or_environment(args.threads)

if not check_integrity(source_path, args.deep, threads, args.work_dir):
if not check_integrity(source_path, args.deep, threads, args.work_dir, args.archive_name):
# return a different error code to the default code of 1 to be able to distinguish
# general errors from a successful run of the program with an unsuccessful outcome
# not taking 2, as it usually stands for command line argument errors
Expand Down
1 change: 1 addition & 0 deletions setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,7 @@
install_requires=requirements,
license="MIT license",
long_description=readme,
long_description_content_type='text/markdown',
include_package_data=True,
keywords=['archiving', 'data lifecycle', 'research'],
name='project-archiver',
Expand Down
13 changes: 9 additions & 4 deletions tests/features/test_integrity.py
Original file line number Diff line number Diff line change
Expand Up @@ -99,6 +99,11 @@ def test_integrity_check_deep_on_split_archive(caplog):

assert_successful_deep_check(archive_file, caplog)

def test_integrity_check_deep_on_split_archive_name_conflict(caplog):
archive_dir = get_directory_with_name("split-archive-name-conflict")
archive_file = archive_dir.joinpath("project.part1.part1.tar.lz")

assert_successful_deep_check(archive_file, caplog, archive_name="project.part1")

def test_integrity_check_deep_on_split_encrypted_archive(caplog, setup_gpg):
archive_dir = get_directory_with_name("split-encrypted-archive")
Expand Down Expand Up @@ -182,18 +187,18 @@ def test_verify_relative_symbolic_links():

# MARK: Helpers

def assert_successful_deep_check(archive_path, caplog):
def assert_successful_deep_check(archive_path, caplog, archive_name=None):
expected_output = "Deep integrity check successful."
assert_integrity_check_with_output(archive_path, expected_output, True, caplog, DEEP)
assert_integrity_check_with_output(archive_path, expected_output, True, caplog, DEEP, archive_name)


def assert_successful_shallow_check(archive_path, caplog):
expected_output = "Basic integrity check successful."
assert_integrity_check_with_output(archive_path, expected_output, True, caplog)


def assert_integrity_check_with_output(archive_path, expected_output, expected_return, caplog, deep=False):
assert check_integrity(archive_path, deep, threads=2) == expected_return
def assert_integrity_check_with_output(archive_path, expected_output, expected_return, caplog, deep=False, archive_name=None):
assert check_integrity(archive_path, deep, threads=2, archive_name=archive_name) == expected_return

assert caplog.messages[-1] == expected_output

Expand Down
50 changes: 25 additions & 25 deletions tests/test-ressources/encryption-keys/public_second.pub
Original file line number Diff line number Diff line change
Expand Up @@ -13,18 +13,18 @@ jLOz8mwPIAsP6g6uXXt1XVlOlWI+EiFLspBZHyBzybSQSKGhEPy/N+JvwAj8a/bw
w6fraFyc77hhFADq0UszGyqSQXil+Vx/oqjl0KFISyj8gi1qPeMyfWyRXwARAQAB
tChBbmRyw6kgS2FobGVzIDxhbmRyZS5rYWhsZXNAaW5mLmV0aHouY2g+iQJUBBMB
CAA+AhsDBQsJCAcCBhUKCQgLAgQWAgMBAh4BAheAFiEExglJhE3/liqvCxf+4kOY
i4gFdC4FAmQixm0FCQqKrhIACgkQ4kOYi4gFdC4o8A//V1ItBcJ9Zde0vrZcwjtN
y75MiV8dJ4430rj1Lm1NI8By6lPlnzBOo9nU0KCRlgZz461FoLSPScYSI+Osy5Eo
dZL0uOYicFLls/bt1xx4fQY1l7TbvO2rPPABhhxhl/yh3lVfJfBKIVXMw4dAP6fC
BWHZwQpbSSInUAadTMLd+sjMC5eLteBTkSEFVMv57PXB6AZN/f+GmP9fwWtGb2YI
1EQSmwdWNIIsZvsZsPO/blEltbvaxUnZ1+On4Ke/o18Eaav9VVnBHcpaGKYuJkCv
QTBnsSvBz1vVGTJWlP3Ner9Qu9chN1s7dZFzAaGA23FjlgQX7L3BKE7hGjrRIsnl
Ucr+JSNdBWxm27X8it6+xxor47HxNYwVMBhux0/j53doaL1tUjzvPWZFiE+ZNAGd
mAEQqUmE0tvT0ofqOuIpnVi3g4kkLgjyWP11uX3RvOXT8W8vYOpAVZP0iam0tpQ+
qGq8Sij8GyNmbB1CZNP0c0jM8P8Vkp/5VTX/sD7+avAk3ozR89zNnU3OjjV9RxuD
Rr4b+Pl+f/ficZ8duSfpdNlWAyjpRY3JDYAkq2Kbdt49TDJXFle4eJ2Cqn1ZA5xl
EWB7tYAAXNIBQYbQi2IHBDijqySRwcrvgvlWaBjgPhicc6dqcTvcWcpXCU+aQB3O
llqbB3q5JJW26qckX9zy9665Ag0EXVp/WwEQAMRvcFvMpPj2dep70eUQCu0qmtiW
i4gFdC4FAmf0PwgFCQx0W60ACgkQ4kOYi4gFdC6MgA/9FxiRKU2QWvyCqwHRRKwR
9iT8syIrjhAIIEVq6YkBzaUmVOTPlog5O6AInr6rWReV+WCxCuEMq9o7peNSTnAY
MHIwN05uHTE9Q9f72kGYZCWApNWYu09EDexR3+hDiMqa4UONC847GRs40ZT6kIig
Fz1iD1WCIPkV3I1idQTF0y5WvpGl3ynHyBsQCYhOav+6On8I9EFlrFlQx1iFLhoO
N4MygyNZ09dwf2DnCWFI6MGMQ5f/VF3Y/D6EKSMwH4p1/EC/Rv72y/QHuE9T6HkX
LNDz5vMzR6E0EchuPLo0WAi51m/FIqZvCR2GPkg0tovMmbq16o6ve3T4VI8+paB7
6xCiRPV0cZxXhSpQefFYUWfvF7Q1JVxTRFy1AT/RL03P1fWWCjjBzUqdrnp17qSh
Zp+vdgBKZcht2hwooezqsUC7G9TdjdiaX2O93KSt5Af7hp16dp0Xj3cQVN0OLtuZ
KhAcjNN9QsQ8/8sef1XXSW/IcXm5NqZDIgmw7jHcdlZpZXDAmsf6uphBDvtEwZL5
vYjVw0PQCB4XdLdYefcXWqQQRTek6sK/S2QsUF1dnqh6y81Y6+Wj9u8oN55g5mOE
iAo1TIvZ/ypDMSuUR5OSMqWIUs2b/j90itS6B5VWUjGLnYb0jVoMUxZRaa5eh4+7
BY7So8rh8yPqEuVO+hygw3W5Ag0EXVp/WwEQAMRvcFvMpPj2dep70eUQCu0qmtiW
hBPBkaoHeq/N8n4L9c5nUBnf1s8Hsx7W4X1bIYS3pSq+oKX3hMvSYDUVhZrzgFhK
pi4MV5GRUXiyxwOf0ieDuyzELKL14KxozMG3jtyg9w686CgRe+0dg/LEOt68jKQv
QCc3yro2gTJzEymIKDKdkdCMz096u2zAxzudc6ckMoDV3bfemfhGNre+sOH2yr66
Expand All @@ -36,17 +36,17 @@ xddjHOWLp2hFFFREuNl+mgmjGUPOvdH5vk1ll2icgGtLXgGKJWBM7ybTQfoVN7HB
72hGktM3VANKoESJUwcuNd4tGx5xo/sQMc6VcDebdoB0e9Ph4nVole5CkDLU1N2V
ilbkM2JV7bHXpCyCYf/av/N7AhETZcfOp1VTDlJT21stJC9ZQ9eJ9BHTHHCHSzzf
1uZgXFscvJPeFO7RABEBAAGJAjwEGAEIACYCGwwWIQTGCUmETf+WKq8LF/7iQ5iL
iAV0LgUCZCLGTAUJCoqt8QAKCRDiQ5iLiAV0LvofD/9jCwUbRRtGHRRo/2rxlJYL
BAnhcrLliZS2ZnPA/dwD1CmE8ycJiDYn0HF2/6n6FocEBnUzCAcNbNYsC78Udd3n
kCEqe4eMnAfMszaNCKMpfYYo6n0dO/k3ZxPYqkN3q9NN5UJpORnYzEKGYhEaw+L3
9+z7c3ULrP2fkWgAo0fYJepgtwr68zQS6YirBg25KeAL237e/JPrIu2Bz68i5/lI
jU0p/0vKjHCBfLOWlW8T+mDh63EUv8Jr2e6y+kRU4N+P1TwpsukuNRxn7tFFw4rw
J8W46gqeWOtnABgQfSthNdiBh99oc8pW+ASLbbH2FPDshHi6rV0jcXHRx8nUAy8r
ZWX9vCc1iElV/xKmCZLvBp7gKAWKfMtn+i8Uh32hgcZWrfCYdAY1mXLs4KcfaPFe
a3gPQFtmAvSwVd/Z+AQKS2kvFLLemICa5HEEWDmy5Y2AfuEDYkxaHcJxJArd+OeK
4vlOjM+hbGaRLovOaAixx22o8TsAjCDzQbRcai5fs+nKE5IW12H+mSriwObBDFTC
HIrwJOWJmju/0vL9KJpsnt5EAR5KzPVupYzM6P6SOXuEv4pHm6taUAh/WSRHMzPk
UQX/K/ezFHjpcp06uz6rasM1QDfcGqm4CnS5uxtPjOOb5wUhZMEVNp/vGJx7AiTu
mEpD3jyU/Klb/JMtJ0/axQ==
=z8Ex
iAV0LgUCZ/Q/GQUJDHRbvgAKCRDiQ5iLiAV0LgLyEACz4DosPN6CzhMC02w3rpTL
HrpiBNZy3HOh+ESV7ldKeopM6sq0f54z2GpkHz+94kELbPb1pbe34i4NYJTR8vp+
rdDHiU5f2yhf9GGnoYSHiXcxpOp0ou2QJZzHrIGtmMdFkTQAvghHlDWuwXl1emes
zMv1K8WCKnytNmvy/9tBzwPpVty/FrdyT2eFoyhz8PSoGq11jmo7hFCIfXHiGkwe
J8wk21wPyXrsSZC1Ihc6Lu6YoGs5dwf+rbB9bxkjBIXvU6/Mom1NpTOalQUK1CEc
yIZ5jpXguDb+dcVeAXjnqyQc9uzrFY7F9AWb3g7Uzj2n91p/ifPY7mEAComvTbq5
//mgV0Q3TYJcU8cCxPwqhFcuD5x6COM7IN6hZT4wU9USpVNNVbDtOSpdJjJ2IAjn
vgo1imq0RmPaOIivXsSYLKIA4S7RuK2m0j0ejVnuR7+uKEd7t3juiiWUmw5t09qs
B+RqUXjHVnoN18dKnSnlE6nTFZNzYH2JzNiTp9h8ohQ4hmarUvJVH5ycxOq8gDL3
QXu1ovG/l5DvZvjbcCiR6TN7tdjjZL9RuFDCuBm8/amaiQlgY7AwQNixNUrDL9ro
LmCKBP56AyEXRjNGyAnmlt/ESdD1ZzH0JhcbxnKwQ5EUrQt/8M48zvgQgHvLA/6S
k5aMVrkVWG8rMtLlY8Qn/A==
=PkgM
-----END PGP PUBLIC KEY BLOCK-----
Binary file not shown.
Binary file not shown.
Binary file not shown.
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
project.part1/subfolder
project.part1/subfolder/file_c.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
d1dd210d6b1312cb342b56d02bd5e651 project.part1/subfolder/file_c.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
drwxr-xr-x 0 akahles staff 0 Apr 7 22:02 project.part1/subfolder/
-rw-r--r-- 0 akahles staff 3145728 Apr 7 22:02 project.part1/subfolder/file_c.txt
Binary file not shown.
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
cd0e06ed572a6691ed828de219421e04 project.part1.part1.tar.lz
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
8e8226c3eeb3e3ac2edc0c81cb025ba8 project.part1.part1.tar
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
project.part1/file_a.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
b2d1236c286a3c0704224fe4105eca49 project.part1/file_a.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
-rw-r--r-- 0 akahles staff 2097152 Apr 7 22:02 project.part1/file_a.txt
Binary file not shown.
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
f08b2bf1cd538616f67f0fb13432f783 project.part1.part2.tar.lz
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
66e6d6a94f892f9b096aad4a3e11ee75 project.part1.part2.tar
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
project.part1/file_b.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
d1dd210d6b1312cb342b56d02bd5e651 project.part1/file_b.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
-rw-r--r-- 0 akahles staff 3145728 Apr 7 22:02 project.part1/file_b.txt
Binary file not shown.
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
858b7349d661074d1abbe912d24f480b project.part1.part3.tar.lz
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
f1b5211054560aaec6b34ae36f1195bf project.part1.part3.tar
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
3
Loading