using System.IO;
using System.Text;
using SharepointToolbox.Core.Models;
using SharepointToolbox.Localization;
namespace SharepointToolbox.Services.Export;
///
/// Exports DuplicateGroup list to CSV. Each duplicate item becomes one row;
/// the Group column ties copies together and a Copies column gives the group size.
/// Header row is built at write-time so culture switches are honoured.
///
public class DuplicatesCsvExportService
{
<<<<<<< HEAD
/// Writes the CSV to with UTF-8 BOM (Excel-compatible).
=======
>>>>>>> f4cc81bb71b935c6f6f050288c9e283dcca5cfa8
public async Task WriteAsync(
IReadOnlyList groups,
string filePath,
CancellationToken ct)
{
<<<<<<< HEAD
var csv = BuildCsv(groups);
await ExportFileWriter.WriteCsvAsync(filePath, csv, ct);
}
///
/// Writes one or more CSVs depending on .
/// Single → as-is. BySite → one file per site,
/// filenames derived from with a site suffix.
///
public Task WriteAsync(
IReadOnlyList groups,
string basePath,
ReportSplitMode splitMode,
CancellationToken ct)
=> ReportSplitHelper.WritePartitionedAsync(
groups, basePath, splitMode,
PartitionBySite,
(part, path, c) => WriteAsync(part, path, c),
ct);
internal static IEnumerable<(string Label, IReadOnlyList Partition)> PartitionBySite(
IReadOnlyList groups)
{
return groups
.GroupBy(g =>
{
var first = g.Items.FirstOrDefault();
return (Url: first?.SiteUrl ?? string.Empty, Title: first?.SiteTitle ?? string.Empty);
})
.Select(g => (
Label: ReportSplitHelper.DeriveSiteLabel(g.Key.Url, g.Key.Title),
Partition: (IReadOnlyList)g.ToList()));
}
///
/// Builds the CSV payload. Emits a header summary (group count, generated
/// timestamp), then one row per duplicate item with its group index and
/// group size. CSV fields are escaped via .
///
public string BuildCsv(IReadOnlyList groups)
{
=======
>>>>>>> f4cc81bb71b935c6f6f050288c9e283dcca5cfa8
var T = TranslationSource.Instance;
var sb = new StringBuilder();
// Summary
sb.AppendLine($"\"{T["report.title.duplicates_short"]}\"");
sb.AppendLine($"\"{T["report.text.duplicate_groups_found"]}\",\"{groups.Count}\"");
sb.AppendLine($"\"{T["report.text.generated"]}\",\"{DateTime.Now:yyyy-MM-dd HH:mm:ss}\"");
sb.AppendLine();
// Header
sb.AppendLine(string.Join(",", new[]
{
Csv(T["report.col.number"]),
<<<<<<< HEAD
Csv(T["report.col.group"]),
Csv(T["report.text.copies"]),
Csv(T["report.col.site"]),
=======
Csv("Group"),
Csv(T["report.text.copies"]),
>>>>>>> f4cc81bb71b935c6f6f050288c9e283dcca5cfa8
Csv(T["report.col.name"]),
Csv(T["report.col.library"]),
Csv(T["report.col.path"]),
Csv(T["report.col.size_bytes"]),
Csv(T["report.col.created"]),
Csv(T["report.col.modified"]),
}));
<<<<<<< HEAD
=======
// Rows
>>>>>>> f4cc81bb71b935c6f6f050288c9e283dcca5cfa8
foreach (var g in groups)
{
int i = 0;
foreach (var item in g.Items)
{
i++;
sb.AppendLine(string.Join(",", new[]
{
Csv(i.ToString()),
Csv(g.Name),
Csv(g.Items.Count.ToString()),
<<<<<<< HEAD
Csv(item.SiteTitle),
=======
>>>>>>> f4cc81bb71b935c6f6f050288c9e283dcca5cfa8
Csv(item.Name),
Csv(item.Library),
Csv(item.Path),
Csv(item.SizeBytes?.ToString() ?? string.Empty),
Csv(item.Created?.ToString("yyyy-MM-dd") ?? string.Empty),
Csv(item.Modified?.ToString("yyyy-MM-dd") ?? string.Empty),
}));
}
}
<<<<<<< HEAD
return sb.ToString();
}
private static string Csv(string value) => CsvSanitizer.Escape(value);
=======
await File.WriteAllTextAsync(filePath, sb.ToString(),
new UTF8Encoding(encoderShouldEmitUTF8Identifier: true), ct);
}
private static string Csv(string value)
{
if (string.IsNullOrEmpty(value)) return "\"\"";
return $"\"{value.Replace("\"", "\"\"")}\"";
}
>>>>>>> f4cc81bb71b935c6f6f050288c9e283dcca5cfa8
}