using System.IO; using System.Text; using SharepointToolbox.Core.Models; using SharepointToolbox.Localization; namespace SharepointToolbox.Services.Export; /// /// Exports DuplicateGroup list to CSV. Each duplicate item becomes one row; /// the Group column ties copies together and a Copies column gives the group size. /// Header row is built at write-time so culture switches are honoured. /// public class DuplicatesCsvExportService { /// Writes the CSV to with UTF-8 BOM (Excel-compatible). public async Task WriteAsync( IReadOnlyList groups, string filePath, CancellationToken ct) { var csv = BuildCsv(groups); await ExportFileWriter.WriteCsvAsync(filePath, csv, ct); } /// /// Writes one or more CSVs depending on . /// Single → as-is. BySite → one file per site, /// filenames derived from with a site suffix. /// public Task WriteAsync( IReadOnlyList groups, string basePath, ReportSplitMode splitMode, CancellationToken ct) => ReportSplitHelper.WritePartitionedAsync( groups, basePath, splitMode, PartitionBySite, (part, path, c) => WriteAsync(part, path, c), ct); internal static IEnumerable<(string Label, IReadOnlyList Partition)> PartitionBySite( IReadOnlyList groups) { return groups .GroupBy(g => { var first = g.Items.FirstOrDefault(); return (Url: first?.SiteUrl ?? string.Empty, Title: first?.SiteTitle ?? string.Empty); }) .Select(g => ( Label: ReportSplitHelper.DeriveSiteLabel(g.Key.Url, g.Key.Title), Partition: (IReadOnlyList)g.ToList())); } /// /// Builds the CSV payload. Emits a header summary (group count, generated /// timestamp), then one row per duplicate item with its group index and /// group size. CSV fields are escaped via . /// public string BuildCsv(IReadOnlyList groups) { var T = TranslationSource.Instance; var sb = new StringBuilder(); // Summary sb.AppendLine($"\"{T["report.title.duplicates_short"]}\""); sb.AppendLine($"\"{T["report.text.duplicate_groups_found"]}\",\"{groups.Count}\""); sb.AppendLine($"\"{T["report.text.generated"]}\",\"{DateTime.Now:yyyy-MM-dd HH:mm:ss}\""); sb.AppendLine(); // Header sb.AppendLine(string.Join(",", new[] { Csv(T["report.col.number"]), Csv(T["report.col.group"]), Csv(T["report.text.copies"]), Csv(T["report.col.site"]), Csv(T["report.col.name"]), Csv(T["report.col.library"]), Csv(T["report.col.path"]), Csv(T["report.col.size_bytes"]), Csv(T["report.col.created"]), Csv(T["report.col.modified"]), })); foreach (var g in groups) { int i = 0; foreach (var item in g.Items) { i++; sb.AppendLine(string.Join(",", new[] { Csv(i.ToString()), Csv(g.Name), Csv(g.Items.Count.ToString()), Csv(item.SiteTitle), Csv(item.Name), Csv(item.Library), Csv(item.Path), Csv(item.SizeBytes?.ToString() ?? string.Empty), Csv(item.Created?.ToString("yyyy-MM-dd") ?? string.Empty), Csv(item.Modified?.ToString("yyyy-MM-dd") ?? string.Empty), })); } } return sb.ToString(); } private static string Csv(string value) => CsvSanitizer.Escape(value); }