mirror of
https://bitbucket.org/smil3y/kde-extraapps.git
synced 2025-02-24 02:42:52 +00:00
fixup zip and 7z archives support
TODO: the format does not get recognized via archive_format() when called on existing archive created with Ark, what the heck? is it a libarchive bug (test with git checkout)? if it is then checking the format in the case when the filter is ARCHIVE_FILTER_NONE with another switch would do the job without manually checking the extensions for zip and 7z.
This commit is contained in:
parent
4f8a8a436c
commit
063252de21
1 changed files with 30 additions and 12 deletions
|
@ -400,6 +400,12 @@ bool LibArchiveInterface::addFiles(const QStringList& files, const CompressionOp
|
||||||
} else if (filename().right(3).toUpper() == QLatin1String( "TAR" )) {
|
} else if (filename().right(3).toUpper() == QLatin1String( "TAR" )) {
|
||||||
kDebug() << "Detected no compression for new file (pure tar)";
|
kDebug() << "Detected no compression for new file (pure tar)";
|
||||||
ret = archive_write_add_filter_none(arch_writer.data());
|
ret = archive_write_add_filter_none(arch_writer.data());
|
||||||
|
} else if (filename().right(3).toUpper() == QLatin1String( "ZIP" )) {
|
||||||
|
kDebug() << "Detected zip compression for new file";
|
||||||
|
ret = archive_write_set_format_zip(arch_writer.data());
|
||||||
|
} else if (filename().right(2).toUpper() == QLatin1String( "7Z" )) {
|
||||||
|
kDebug() << "Detected 7z compression for new file";
|
||||||
|
ret = archive_write_set_format_7zip(arch_writer.data());
|
||||||
} else {
|
} else {
|
||||||
kDebug() << "Falling back to gzip";
|
kDebug() << "Falling back to gzip";
|
||||||
ret = archive_write_add_filter_gzip(arch_writer.data());
|
ret = archive_write_add_filter_gzip(arch_writer.data());
|
||||||
|
@ -413,24 +419,30 @@ bool LibArchiveInterface::addFiles(const QStringList& files, const CompressionOp
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
switch (archive_filter_code(arch_reader.data(), 0)) {
|
switch (archive_filter_code(arch_reader.data(), 0)) {
|
||||||
case ARCHIVE_COMPRESSION_GZIP:
|
case ARCHIVE_FILTER_GZIP:
|
||||||
ret = archive_write_add_filter_gzip(arch_writer.data());
|
ret = archive_write_add_filter_gzip(arch_writer.data());
|
||||||
break;
|
break;
|
||||||
case ARCHIVE_COMPRESSION_BZIP2:
|
case ARCHIVE_FILTER_BZIP2:
|
||||||
ret = archive_write_add_filter_bzip2(arch_writer.data());
|
ret = archive_write_add_filter_bzip2(arch_writer.data());
|
||||||
break;
|
break;
|
||||||
#ifdef HAVE_LIBARCHIVE_XZ_SUPPORT
|
#ifdef HAVE_LIBARCHIVE_XZ_SUPPORT
|
||||||
case ARCHIVE_COMPRESSION_XZ:
|
case ARCHIVE_FILTER_XZ:
|
||||||
ret = archive_write_add_filter_xz(arch_writer.data());
|
ret = archive_write_add_filter_xz(arch_writer.data());
|
||||||
break;
|
break;
|
||||||
#endif
|
#endif
|
||||||
#ifdef HAVE_LIBARCHIVE_LZMA_SUPPORT
|
#ifdef HAVE_LIBARCHIVE_LZMA_SUPPORT
|
||||||
case ARCHIVE_COMPRESSION_LZMA:
|
case ARCHIVE_FILTER_LZMA:
|
||||||
ret = archive_write_add_filter_lzma(arch_writer.data());
|
ret = archive_write_add_filter_lzma(arch_writer.data());
|
||||||
break;
|
break;
|
||||||
#endif
|
#endif
|
||||||
case ARCHIVE_COMPRESSION_NONE:
|
case ARCHIVE_FILTER_NONE:
|
||||||
|
if (filename().right(3).toUpper() == QLatin1String( "ZIP" )) {
|
||||||
|
ret = archive_write_set_format_zip(arch_writer.data());
|
||||||
|
} else if (filename().right(2).toUpper() == QLatin1String( "7Z" )) {
|
||||||
|
ret = archive_write_set_format_7zip(arch_writer.data());
|
||||||
|
} else {
|
||||||
ret = archive_write_add_filter_none(arch_writer.data());
|
ret = archive_write_add_filter_none(arch_writer.data());
|
||||||
|
}
|
||||||
break;
|
break;
|
||||||
default:
|
default:
|
||||||
emit error(i18n("The compression type '%1' is not supported by Ark.", QLatin1String(archive_filter_name(arch_reader.data(), 0))));
|
emit error(i18n("The compression type '%1' is not supported by Ark.", QLatin1String(archive_filter_name(arch_reader.data(), 0))));
|
||||||
|
@ -558,24 +570,30 @@ bool LibArchiveInterface::deleteFiles(const QVariantList& files)
|
||||||
|
|
||||||
int ret;
|
int ret;
|
||||||
switch (archive_filter_code(arch_reader.data(), 0)) {
|
switch (archive_filter_code(arch_reader.data(), 0)) {
|
||||||
case ARCHIVE_COMPRESSION_GZIP:
|
case ARCHIVE_FILTER_GZIP:
|
||||||
ret = archive_write_add_filter_gzip(arch_writer.data());
|
ret = archive_write_add_filter_gzip(arch_writer.data());
|
||||||
break;
|
break;
|
||||||
case ARCHIVE_COMPRESSION_BZIP2:
|
case ARCHIVE_FILTER_BZIP2:
|
||||||
ret = archive_write_add_filter_bzip2(arch_writer.data());
|
ret = archive_write_add_filter_bzip2(arch_writer.data());
|
||||||
break;
|
break;
|
||||||
#ifdef HAVE_LIBARCHIVE_XZ_SUPPORT
|
#ifdef HAVE_LIBARCHIVE_XZ_SUPPORT
|
||||||
case ARCHIVE_COMPRESSION_XZ:
|
case ARCHIVE_FILTER_XZ:
|
||||||
ret = archive_write_add_filter_xz(arch_writer.data());
|
ret = archive_write_add_filter_xz(arch_writer.data());
|
||||||
break;
|
break;
|
||||||
#endif
|
#endif
|
||||||
#ifdef HAVE_LIBARCHIVE_LZMA_SUPPORT
|
#ifdef HAVE_LIBARCHIVE_LZMA_SUPPORT
|
||||||
case ARCHIVE_COMPRESSION_LZMA:
|
case ARCHIVE_FILTER_LZMA:
|
||||||
ret = archive_write_add_filter_lzma(arch_writer.data());
|
ret = archive_write_add_filter_lzma(arch_writer.data());
|
||||||
break;
|
break;
|
||||||
#endif
|
#endif
|
||||||
case ARCHIVE_COMPRESSION_NONE:
|
case ARCHIVE_FILTER_NONE:
|
||||||
|
if (filename().right(3).toUpper() == QLatin1String( "ZIP" )) {
|
||||||
|
ret = archive_write_set_format_zip(arch_writer.data());
|
||||||
|
} else if (filename().right(2).toUpper() == QLatin1String( "7Z" )) {
|
||||||
|
ret = archive_write_set_format_7zip(arch_writer.data());
|
||||||
|
} else {
|
||||||
ret = archive_write_add_filter_none(arch_writer.data());
|
ret = archive_write_add_filter_none(arch_writer.data());
|
||||||
|
}
|
||||||
break;
|
break;
|
||||||
default:
|
default:
|
||||||
emit error(i18n("The compression type '%1' is not supported by Ark.", QLatin1String(archive_filter_name(arch_reader.data(), 0))));
|
emit error(i18n("The compression type '%1' is not supported by Ark.", QLatin1String(archive_filter_name(arch_reader.data(), 0))));
|
||||||
|
|
Loading…
Add table
Reference in a new issue