using System.IO.Compression;
using GisConverter.Lib.Converters;
namespace GisConverter.TestsApp.Converters
{
///
/// Unit tests for focusing on archive helpers and cleanup helpers.
///
///
///
/// Purpose
/// - Provide a focused, easy-to-read suite of unit tests that document and validate the behavior of
/// the small, deterministic helper surface exposed by .
/// - Ensure archive recognition, archive inspection, required-component detection (for multi-file formats
/// such as Shapefile and FileGDB), deterministic output path construction, and best-effort cleanup semantics
/// are stable and well-documented.
///
///
///
/// Scope
/// - These tests exercise filesystem-facing logic only: extension heuristics, archive entry listing,
/// detection of required components inside archives and safe extraction assumptions implicitly relied on
/// by higher-level conversion flows.
/// - Tests intentionally avoid invoking third-party conversion drivers (for example Aspose). Integration tests
/// that require licensed drivers or heavy sample data belong in a gated test suite.
///
///
///
/// Test design principles
/// - Determinism: tests create concise synthetic artifacts programmatically (for example zip archives created
/// from small temporary directories) to avoid external dependencies and keep runs reproducible.
/// - Isolation: each test runs in an isolated GUID-named temp root under Path.GetTempPath(). The
/// test class performs best-effort cleanup in but tolerates transient failures.
/// - Small & focused: each test targets a single helper or a small, related set of helpers so failures are actionable.
/// - Stable assertions: prefer asserting on stable tokens (presence of extensions, boolean results) rather than
/// exact messages or ordering that may change as logging prose evolves.
///
///
///
/// Test cases covered
/// - Heuristic archive detection:
/// - Positive cases for supported archive extensions including composite forms (for example .tar.gz, .tgz).
/// - Negative cases for unsupported archive-like extensions and files without extensions.
/// - Case and whitespace tolerance.
/// - Archive entry inspection:
/// - Read entry names from a ZIP using .
/// - Verify presence of required component files for a format (e.g. .shp, .shx, .dbf).
/// - Folder-marker detection:
/// - Confirm detection of zipped folder markers such as .gdb when the archive preserves base directories.
/// - Cleanup helpers:
/// - Ensure deletes files and
/// does not throw when called with missing or empty paths.
/// - Output path construction:
/// - Validate preserves base names,
/// handles multi-extension inputs, falls back to sensible defaults and returns null for unknown formats.
///
///
///
/// Implementation notes for tests
/// - Use with
/// includeBaseDirectory when testing folder-segment detection (for example zipped .gdb folders).
/// - Use CompressionLevel.Fastest in tests to minimize CPU and IO overhead while keeping deterministic output.
/// - When enumerating archive entries prefer canonicalizing comparisons (for example ).
///
///
///
/// Reliability & CI considerations
/// - Keep artifacts small to avoid slow CI agents and storage pressure.
/// - Tests perform best-effort cleanup; if CI agents report intermittent file-locks, investigate AV/antivirus interactions
/// or agent permissions rather than making cleanup strict.
/// - Gate any tests that require real conversion drivers or large sample data behind environment flags so public CI remains fast.
///
///
///
/// Extensibility & maintenance
/// - When adding support for new archive heuristics or new required-component mappings:
/// - Add theory data covering both positive and negative filename patterns (composite and single extensions, casing, whitespace).
/// - Add small archive samples (programmatically created) that exercise the new detection logic (preserve base dir vs flattened).
/// - Update tests that assert component detection to include the new required extension set.
/// - Keep tests asserting outcomes (true/false, presence/absence) rather than log text to reduce brittleness.
///
///
///
/// Troubleshooting failing tests
/// - If an extraction/listing test fails:
/// - Inspect the temp root printed in test diagnostics to reproduce locally.
/// - Verify the archive was created with the expected includeBaseDirectory setting.
/// - Confirm agent permissions allow creating and deleting files in the system temp folder.
/// - If BuildOutputPath tests fail:
/// - Confirm FileExtensionHelpers mappings are current and that the expected target extension list matches the converter mapping.
///
///
///
/// Security & safety
/// - Tests operate only on synthetic, ephemeral data created under a GUID-named temp root. Do not add production artifacts or sensitive data.
/// - Treat any extracted files as untrusted; tests never execute or open binaries beyond simple read/write operations.
///
///
public class ConverterUtilsTests : IDisposable
{
private readonly string _root;
///
/// Create an isolated temp directory for each test.
///
public ConverterUtilsTests()
{
_root = Path.Combine(Path.GetTempPath(), "GisConverter.ConverterUtilsTests", Guid.NewGuid().ToString("N"));
Directory.CreateDirectory(_root);
}
///
/// Clean up the test directory tree after each test run.
///
public void Dispose()
{
try
{
if (Directory.Exists(_root))
Directory.Delete(_root, true);
}
catch (Exception ex)
{
System.Diagnostics.Debug.WriteLine($"Cleanup failed for {_root}: {ex.Message}");
}
}
///
/// Verifies that the archive detection heuristic recognizes supported archive extensions.
/// Supported formats: ZIP, TAR, TGZ, GZ, BZ2, XZ, 7Z, RAR
///
[Fact(DisplayName = "IsArchiveFile_RecognizesSupportedExtensions")]
public void IsArchiveFile_RecognizesSupportedExtensions()
{
// Core supported formats
Assert.True(ConverterUtils.IsArchiveFile("test.zip"));
Assert.True(ConverterUtils.IsArchiveFile("package.7z"));
Assert.True(ConverterUtils.IsArchiveFile("archive.tar"));
Assert.True(ConverterUtils.IsArchiveFile("data.gz"));
Assert.True(ConverterUtils.IsArchiveFile("file.bz2"));
Assert.True(ConverterUtils.IsArchiveFile("compressed.xz"));
Assert.True(ConverterUtils.IsArchiveFile("archive.rar"));
// Composite extension (tar.gz)
Assert.True(ConverterUtils.IsArchiveFile("archive.tar.gz"));
// Common alias for tar.gz
Assert.True(ConverterUtils.IsArchiveFile("package.tgz"));
// Non-archive files
Assert.False(ConverterUtils.IsArchiveFile("file.json"));
Assert.False(ConverterUtils.IsArchiveFile("noext"));
Assert.False(ConverterUtils.IsArchiveFile("document.txt"));
// Unsupported archive formats should return false
Assert.False(ConverterUtils.IsArchiveFile("disk.iso"));
Assert.False(ConverterUtils.IsArchiveFile("installer.cab"));
Assert.False(ConverterUtils.IsArchiveFile("setup.msi"));
Assert.False(ConverterUtils.IsArchiveFile("image.dmg"));
Assert.False(ConverterUtils.IsArchiveFile("example.kmz")); // KMZ not in supported list
}
///
/// Verifies that all explicitly supported formats are recognized.
///
[Theory(DisplayName = "IsArchiveFile_RecognizesSupportedFormats")]
[InlineData("archive.zip")]
[InlineData("archive.tar")]
[InlineData("archive.tgz")]
[InlineData("archive.gz")]
[InlineData("archive.bz2")]
[InlineData("archive.xz")]
[InlineData("archive.7z")]
[InlineData("archive.rar")]
[InlineData("archive.tar.gz")] // Composite
[InlineData("ARCHIVE.ZIP")] // Case insensitive
[InlineData("File.TAR")]
[InlineData("Data.7Z")]
public void IsArchiveFile_RecognizesSupportedFormats(string filename)
{
Assert.True(ConverterUtils.IsArchiveFile(filename),
$"Expected IsArchiveFile to recognize supported format: {filename}");
}
///
/// Verifies that unsupported archive formats are rejected.
///
[Theory(DisplayName = "IsArchiveFile_RejectsUnsupportedArchiveFormats")]
[InlineData("disk.iso")]
[InlineData("archive.kmz")]
[InlineData("installer.cab")]
[InlineData("setup.msi")]
[InlineData("package.rpm")]
[InlineData("image.dmg")]
[InlineData("virtual.vdi")]
[InlineData("disk.vhd")]
[InlineData("vm.vmdk")]
[InlineData("old.arj")]
[InlineData("archive.lzh")]
[InlineData("compressed.z")]
[InlineData("system.wim")]
[InlineData("backup.cpio")]
[InlineData("help.chm")]
[InlineData("partition.ext")]
[InlineData("disk.fat")]
[InlineData("volume.ntfs")]
public void IsArchiveFile_RejectsUnsupportedArchiveFormats(string filename)
{
Assert.False(ConverterUtils.IsArchiveFile(filename),
$"Expected IsArchiveFile to reject unsupported archive format: {filename}");
}
///
/// Verifies that null, empty, and whitespace-only inputs return false.
///
[Theory(DisplayName = "IsArchiveFile_RejectsInvalidInput")]
[InlineData(null)]
[InlineData("")]
[InlineData(" ")]
[InlineData("\t")]
[InlineData("\n")]
public void IsArchiveFile_RejectsInvalidInput(string? path)
{
Assert.False(ConverterUtils.IsArchiveFile(path!),
"Expected IsArchiveFile to return false for invalid input");
}
///
/// Verifies that files without extensions or with non-archive extensions return false.
///
[Theory(DisplayName = "IsArchiveFile_RejectsNonArchiveFiles")]
[InlineData("noextension")]
[InlineData("document.txt")]
[InlineData("image.png")]
[InlineData("video.mp4")]
[InlineData("data.json")]
[InlineData("style.css")]
[InlineData("script.js")]
[InlineData("code.cs")]
[InlineData("README.md")]
public void IsArchiveFile_RejectsNonArchiveFiles(string filename)
{
Assert.False(ConverterUtils.IsArchiveFile(filename),
$"Expected IsArchiveFile to reject non-archive file: {filename}");
}
///
/// Verifies case-insensitive detection for mixed case inputs.
///
[Theory(DisplayName = "IsArchiveFile_CaseInsensitiveDetection")]
[InlineData("Archive.ZIP")]
[InlineData("PACKAGE.7Z")]
[InlineData("Backup.Tar.Gz")]
[InlineData("DATA.RAR")]
[InlineData("file.TGZ")]
public void IsArchiveFile_CaseInsensitiveDetection(string filename)
{
Assert.True(ConverterUtils.IsArchiveFile(filename),
$"Expected IsArchiveFile to recognize case-insensitive format: {filename}");
}
///
/// Verifies that paths with leading/trailing whitespace are handled correctly.
///
[Theory(DisplayName = "IsArchiveFile_HandlesWhitespace")]
[InlineData(" archive.zip")]
[InlineData("package.7z ")]
[InlineData(" backup.tar.gz ")]
[InlineData("\tfile.rar\t")]
public void IsArchiveFile_HandlesWhitespace(string path)
{
Assert.True(ConverterUtils.IsArchiveFile(path),
$"Expected IsArchiveFile to handle whitespace in path: '{path}'");
}
///
/// Creates a zip archive containing shapefile components and verifies entries are listed
/// and that ArchiveContainsAllRequiredExtensions returns true for .shp/.shx/.dbf.
///
[Fact(DisplayName = "TryListArchiveEntries_and_ArchiveContainsAllRequiredExtensions")]
public void TryListArchiveEntries_and_ArchiveContainsAllRequiredExtensions()
{
// Create a zip with shapefile components
var zipPath = Path.Combine(_root, "shp.zip");
var tempDir = Path.Combine(_root, "ztemp");
Directory.CreateDirectory(tempDir);
var shp = Path.Combine(tempDir, "a.shp");
var shx = Path.Combine(tempDir, "a.shx");
var dbf = Path.Combine(tempDir, "a.dbf");
File.WriteAllText(shp, "shp");
File.WriteAllText(shx, "shx");
File.WriteAllText(dbf, "dbf");
ZipFile.CreateFromDirectory(tempDir, zipPath);
var entries = ConverterUtils.TryListArchiveEntries(zipPath)?.ToList();
Assert.NotNull(entries);
Assert.Contains(entries, e => e.EndsWith("a.shp", StringComparison.OrdinalIgnoreCase));
Assert.Contains(entries, e => e.EndsWith("a.shx", StringComparison.OrdinalIgnoreCase));
Assert.Contains(entries, e => e.EndsWith("a.dbf", StringComparison.OrdinalIgnoreCase));
var required = new[] { ".shp", ".shx", ".dbf" };
Assert.True(ConverterUtils.ArchiveContainsAllRequiredExtensions(zipPath, required));
}
///
/// When an archive contains a folder named "*.gdb" and the base directory is preserved
/// in the zip entries, the collector should observe the .gdb segment and requirement checks
/// should succeed for .gdb.
///
[Fact(DisplayName = "ArchiveContainingGdbFolder_IsRecognizedAsGdb")]
public void ArchiveContainingGdbFolder_IsRecognizedAsGdb()
{
// Create a parent folder and a child folder named MyGdb.gdb with a dummy file inside,
// then create the zip including the base directory so the .gdb folder name is preserved in entries.
var parent = Path.Combine(_root, "parent");
Directory.CreateDirectory(parent);
var src = Path.Combine(parent, "MyGdb.gdb");
Directory.CreateDirectory(src);
var dummy = Path.Combine(src, "gdbtable");
File.WriteAllText(dummy, "x");
var zipPath = Path.Combine(_root, "gdb.zip");
// includeBaseDirectory = true preserves the MyGdb.gdb segment inside the archive entries
ZipFile.CreateFromDirectory(parent, zipPath, CompressionLevel.Fastest, includeBaseDirectory: true);
// Entries listing should contain the path segment with .gdb
var entries = ConverterUtils.TryListArchiveEntries(zipPath)?.ToList();
Assert.NotNull(entries);
Assert.True(entries.Any(e => e.IndexOf(".gdb", StringComparison.OrdinalIgnoreCase) >= 0),
"Expected archive entries to include a .gdb folder segment (created with includeBaseDirectory).");
// ArchiveContainsAllRequiredExtensions should detect .gdb when checking for folder markers
Assert.True(ConverterUtils.ArchiveContainsAllRequiredExtensions(zipPath, new[] { ".gdb" }));
}
///
/// Verifies that CleanupExtractedFiles deletes the specified files and does not throw when files are missing.
///
[Fact(DisplayName = "CleanupExtractedFiles_DeletesFiles")]
public void CleanupExtractedFiles_DeletesFiles()
{
var f1 = Path.Combine(_root, "t1.tmp");
var f2 = Path.Combine(_root, "t2.tmp");
File.WriteAllText(f1, "x");
File.WriteAllText(f2, "y");
ConverterUtils.CleanupExtractedFiles(new[] { f1, f2 });
Assert.False(File.Exists(f1));
Assert.False(File.Exists(f2));
// Calling again with missing files should not throw
ConverterUtils.CleanupExtractedFiles(new[] { f1, f2, "" });
}
///
/// TryListArchiveEntries should return null for a non-existent archive and ArchiveContainsAllRequiredExtensions
/// should return false when required components are missing.
///
[Fact(DisplayName = "TryListArchiveEntries_NonexistentOrMissingComponents")]
public void TryListArchiveEntries_NonexistentOrMissingComponents()
{
var missingPath = Path.Combine(_root, "doesnotexist.zip");
var entries = ConverterUtils.TryListArchiveEntries(missingPath);
Assert.Null(entries);
// Create an archive that lacks required components for a shapefile
var zipPath = Path.Combine(_root, "incomplete.zip");
var tempDir = Path.Combine(_root, "inc");
Directory.CreateDirectory(tempDir);
File.WriteAllText(Path.Combine(tempDir, "only.txt"), "x");
ZipFile.CreateFromDirectory(tempDir, zipPath);
var contains = ConverterUtils.ArchiveContainsAllRequiredExtensions(zipPath, new[] { ".shp", ".shx", ".dbf" });
Assert.False(contains);
}
[Theory(DisplayName = "BuildOutputPath_PreservesOriginalFilename")]
[InlineData("Cities.geojson", "Shapefile", "Cities.shp")]
[InlineData("MyData.zip", "GeoJson", "MyData.json")]
[InlineData("Roads.shp", "Kml", "Roads.kml")]
[InlineData("Points.csv", "GeoPackage", "Points.gpkg")]
[InlineData("/path/to/Districts.gml", "EsriJson", "Districts.json")]
[InlineData("C:\\temp\\Boundaries.gpx", "Shapefile", "Boundaries.shp")]
public void BuildOutputPath_PreservesOriginalFilename(string inputPath, string targetFormat, string expectedFilename)
{
var outputFolder = Path.Combine(_root, "output");
Directory.CreateDirectory(outputFolder);
var result = ConverterUtils.BuildOutputPath(outputFolder, inputPath, targetFormat);
Assert.NotNull(result);
Assert.Equal(Path.Combine(outputFolder, expectedFilename), result);
}
[Theory(DisplayName = "BuildOutputPath_FallsBackToOutput_WhenFilenameEmpty")]
[InlineData("", "GeoJson")]
[InlineData(" ", "Shapefile")]
[InlineData("\t", "Kml")]
public void BuildOutputPath_FallsBackToOutput_WhenFilenameEmpty(string inputPath, string targetFormat)
{
var outputFolder = Path.Combine(_root, "output");
Directory.CreateDirectory(outputFolder);
var result = ConverterUtils.BuildOutputPath(outputFolder, inputPath, targetFormat);
Assert.NotNull(result);
Assert.Contains("output", Path.GetFileName(result));
}
[Fact(DisplayName = "BuildOutputPath_ReturnsNull_ForUnknownFormat")]
public void BuildOutputPath_ReturnsNull_ForUnknownFormat()
{
var outputFolder = Path.Combine(_root, "output");
Directory.CreateDirectory(outputFolder);
var result = ConverterUtils.BuildOutputPath(outputFolder, "test.geojson", "UnknownFormat");
Assert.Null(result);
}
[Theory(DisplayName = "BuildOutputPath_HandlesMultipleExtensions")]
[InlineData("archive.tar.gz", "GeoJson", "archive.tar.json")]
[InlineData("data.backup.shp", "Kml", "data.backup.kml")]
public void BuildOutputPath_HandlesMultipleExtensions(string inputPath, string targetFormat, string expectedFilename)
{
var outputFolder = Path.Combine(_root, "output");
Directory.CreateDirectory(outputFolder);
var result = ConverterUtils.BuildOutputPath(outputFolder, inputPath, targetFormat);
Assert.NotNull(result);
Assert.Equal(Path.Combine(outputFolder, expectedFilename), result);
}
[Theory(DisplayName = "BuildOutputPath_UsesExtractedSourcePath_WhenProvided")]
[InlineData("ShapeFiles.zip", "iofr0c1nypl.shp", "GeoJson", "iofr0c1nypl.json")]
[InlineData("archive.tar.gz", "Cities.geojson", "Shapefile", "Cities.shp")]
[InlineData("MyData.7z", "Roads.kml", "GeoPackage", "Roads.gpkg")]
[InlineData("bundle.rar", "Points.csv", "Kml", "Points.kml")]
public void BuildOutputPath_UsesExtractedSourcePath_WhenProvided(
string archivePath,
string extractedPath,
string targetFormat,
string expectedFilename)
{
var outputFolder = Path.Combine(_root, "output");
Directory.CreateDirectory(outputFolder);
// When an extracted source path is provided, it should be used instead of the archive name
var result = ConverterUtils.BuildOutputPath(outputFolder, archivePath, targetFormat, extractedPath);
Assert.NotNull(result);
Assert.Equal(Path.Combine(outputFolder, expectedFilename), result);
}
[Theory(DisplayName = "BuildOutputPath_UsesOriginalPath_WhenExtractedSourceIsNull")]
[InlineData("Cities.geojson", null, "Shapefile", "Cities.shp")]
[InlineData("MyData.zip", null, "GeoJson", "MyData.json")]
public void BuildOutputPath_UsesOriginalPath_WhenExtractedSourceIsNull(
string inputPath,
string? extractedPath,
string targetFormat,
string expectedFilename)
{
var outputFolder = Path.Combine(_root, "output");
Directory.CreateDirectory(outputFolder);
// When no extracted source path is provided, should fall back to original input path
var result = ConverterUtils.BuildOutputPath(outputFolder, inputPath, targetFormat, extractedPath);
Assert.NotNull(result);
Assert.Equal(Path.Combine(outputFolder, expectedFilename), result);
}
[Fact(DisplayName = "BuildOutputPath_PreservesOriginalDataFileName_NotArchiveName")]
public void BuildOutputPath_PreservesOriginalDataFileName_NotArchiveName()
{
var outputFolder = Path.Combine(_root, "output");
Directory.CreateDirectory(outputFolder);
// Simulating real scenario: archive named "ShapeFiles.zip" contains "iofr0c1nypl.shp"
var archiveName = "ShapeFiles.zip";
var extractedFile = Path.Combine(_root, "temp", "iofr0c1nypl.shp");
// Without extracted path (incorrect - uses archive name)
var resultWithoutExtracted = ConverterUtils.BuildOutputPath(outputFolder, archiveName, "GeoJson");
Assert.NotNull(resultWithoutExtracted);
Assert.Equal("ShapeFiles.json", Path.GetFileName(resultWithoutExtracted));
// With extracted path (correct - uses actual data file name)
var resultWithExtracted = ConverterUtils.BuildOutputPath(outputFolder, archiveName, "GeoJson", extractedFile);
Assert.NotNull(resultWithExtracted);
Assert.Equal("iofr0c1nypl.json", Path.GetFileName(resultWithExtracted));
}
///
/// Verifies that TryCleanupTempFolder removes folder contents but preserves the folder itself.
/// This ensures consistent behavior between single-file and archive processing.
///
[Fact(DisplayName = "TryCleanupTempFolder_RemovesContentsButPreservesFolder")]
public void TryCleanupTempFolder_RemovesContentsButPreservesFolder()
{
var tempFolder = Path.Combine(_root, "cleanup_test");
Directory.CreateDirectory(tempFolder);
// Create files and subdirectories in temp folder
var file1 = Path.Combine(tempFolder, "file1.txt");
var file2 = Path.Combine(tempFolder, "file2.dat");
File.WriteAllText(file1, "content1");
File.WriteAllText(file2, "content2");
var subDir = Path.Combine(tempFolder, "subdir");
Directory.CreateDirectory(subDir);
var nestedFile = Path.Combine(subDir, "nested.txt");
File.WriteAllText(nestedFile, "nested content");
// Verify setup
Assert.True(Directory.Exists(tempFolder));
Assert.True(File.Exists(file1));
Assert.True(File.Exists(file2));
Assert.True(Directory.Exists(subDir));
Assert.True(File.Exists(nestedFile));
// Execute cleanup
ConverterUtils.TryCleanupTempFolder(tempFolder);
// Verify results: folder exists but is empty
Assert.True(Directory.Exists(tempFolder), "Temp folder should be preserved");
Assert.False(File.Exists(file1), "File1 should be deleted");
Assert.False(File.Exists(file2), "File2 should be deleted");
Assert.False(Directory.Exists(subDir), "Subdirectory should be deleted");
Assert.False(File.Exists(nestedFile), "Nested file should be deleted");
Assert.Empty(Directory.GetFiles(tempFolder, "*", SearchOption.AllDirectories));
Assert.Empty(Directory.GetDirectories(tempFolder, "*", SearchOption.AllDirectories));
}
///
/// Verifies that TryCleanupTempFolder handles non-existent folders gracefully without throwing.
///
[Fact(DisplayName = "TryCleanupTempFolder_HandlesNonExistentFolder")]
public void TryCleanupTempFolder_HandlesNonExistentFolder()
{
var nonExistentFolder = Path.Combine(_root, "does_not_exist");
// Should not throw
ConverterUtils.TryCleanupTempFolder(nonExistentFolder);
// Folder should still not exist
Assert.False(Directory.Exists(nonExistentFolder));
}
///
/// Verifies that TryCleanupTempFolder handles null/empty/whitespace paths gracefully.
///
[Theory(DisplayName = "TryCleanupTempFolder_HandlesInvalidPaths")]
[InlineData(null)]
[InlineData("")]
[InlineData(" ")]
[InlineData("\t")]
public void TryCleanupTempFolder_HandlesInvalidPaths(string? invalidPath)
{
// Should not throw
ConverterUtils.TryCleanupTempFolder(invalidPath!);
}
///
/// Verifies that TryCleanupTempFolder handles empty folders correctly (preserves the folder).
///
[Fact(DisplayName = "TryCleanupTempFolder_HandlesEmptyFolder")]
public void TryCleanupTempFolder_HandlesEmptyFolder()
{
var emptyFolder = Path.Combine(_root, "empty_folder");
Directory.CreateDirectory(emptyFolder);
Assert.True(Directory.Exists(emptyFolder));
// Execute cleanup on empty folder
ConverterUtils.TryCleanupTempFolder(emptyFolder);
// Folder should still exist and remain empty
Assert.True(Directory.Exists(emptyFolder), "Empty folder should be preserved");
Assert.Empty(Directory.GetFiles(emptyFolder));
Assert.Empty(Directory.GetDirectories(emptyFolder));
}
///
/// Verifies that TryCleanupTempFolder can be called multiple times idempotently.
///
[Fact(DisplayName = "TryCleanupTempFolder_IsIdempotent")]
public void TryCleanupTempFolder_IsIdempotent()
{
var tempFolder = Path.Combine(_root, "idempotent_test");
Directory.CreateDirectory(tempFolder);
var file = Path.Combine(tempFolder, "test.txt");
File.WriteAllText(file, "content");
// First cleanup
ConverterUtils.TryCleanupTempFolder(tempFolder);
Assert.True(Directory.Exists(tempFolder));
Assert.False(File.Exists(file));
// Second cleanup on already-cleaned folder should not throw
ConverterUtils.TryCleanupTempFolder(tempFolder);
Assert.True(Directory.Exists(tempFolder));
// Third cleanup should also work
ConverterUtils.TryCleanupTempFolder(tempFolder);
Assert.True(Directory.Exists(tempFolder));
}
///
/// Verifies that TryCleanupTempFolder handles deeply nested directory structures.
///
[Fact(DisplayName = "TryCleanupTempFolder_HandlesDeeplyNestedStructures")]
public void TryCleanupTempFolder_HandlesDeeplyNestedStructures()
{
var tempFolder = Path.Combine(_root, "nested_test");
Directory.CreateDirectory(tempFolder);
// Create deeply nested structure
var level1 = Path.Combine(tempFolder, "level1");
var level2 = Path.Combine(level1, "level2");
var level3 = Path.Combine(level2, "level3");
Directory.CreateDirectory(level3);
var file1 = Path.Combine(level1, "file1.txt");
var file2 = Path.Combine(level2, "file2.txt");
var file3 = Path.Combine(level3, "file3.txt");
File.WriteAllText(file1, "level1");
File.WriteAllText(file2, "level2");
File.WriteAllText(file3, "level3");
// Execute cleanup
ConverterUtils.TryCleanupTempFolder(tempFolder);
// Verify all nested content is removed but folder is preserved
Assert.True(Directory.Exists(tempFolder), "Temp folder should be preserved");
Assert.False(Directory.Exists(level1), "Level1 directory should be deleted");
Assert.False(File.Exists(file1), "File1 should be deleted");
Assert.False(File.Exists(file2), "File2 should be deleted");
Assert.False(File.Exists(file3), "File3 should be deleted");
Assert.Empty(Directory.GetFileSystemEntries(tempFolder));
}
}
}