Fix storage metrics not being accurate
This commit is contained in:
@@ -1,5 +1,6 @@
|
||||
using SharepointToolbox.Core.Models;
|
||||
using SharepointToolbox.Localization;
|
||||
using System.Globalization;
|
||||
using System.IO;
|
||||
using System.Text;
|
||||
|
||||
@@ -18,34 +19,58 @@ public class StorageCsvExportService
|
||||
/// </summary>
|
||||
public string BuildCsv(IReadOnlyList<StorageNode> nodes)
|
||||
{
|
||||
var T = TranslationSource.Instance;
|
||||
var sb = new StringBuilder();
|
||||
// Pre-size: ~110 chars/row + header avoids most StringBuilder growth.
|
||||
var sb = new StringBuilder(128 + nodes.Count * 110);
|
||||
WriteCsv(sb, nodes);
|
||||
return sb.ToString();
|
||||
}
|
||||
|
||||
// Header
|
||||
sb.AppendLine($"{T["report.col.library"]},{T["stor.col.kind"]},{T["report.col.site"]},{T["report.stat.files"]},{T["report.col.total_size_mb"]},{T["report.col.version_size_mb"]},{T["report.col.last_modified"]}");
|
||||
private static void WriteCsv(StringBuilder sb, IReadOnlyList<StorageNode> nodes)
|
||||
{
|
||||
var T = TranslationSource.Instance;
|
||||
// Hoist resource lookups out of the row loop: ResourceManager.GetString
|
||||
// is a culture-aware dictionary probe — caching once per export saves
|
||||
// O(rows × columns) lookups on large tenants.
|
||||
string colLibrary = T["report.col.library"];
|
||||
string colKind = T["stor.col.kind"];
|
||||
string colSite = T["report.col.site"];
|
||||
string colFiles = T["report.stat.files"];
|
||||
string colTotalMb = T["report.col.total_size_mb"];
|
||||
string colVerMb = T["report.col.version_size_mb"];
|
||||
string colLastMod = T["report.col.last_modified"];
|
||||
|
||||
sb.Append(colLibrary).Append(',')
|
||||
.Append(colKind).Append(',')
|
||||
.Append(colSite).Append(',')
|
||||
.Append(colFiles).Append(',')
|
||||
.Append(colTotalMb).Append(',')
|
||||
.Append(colVerMb).Append(',')
|
||||
.AppendLine(colLastMod);
|
||||
|
||||
var kindLabels = BuildKindLabelCache();
|
||||
|
||||
foreach (var node in nodes)
|
||||
{
|
||||
sb.AppendLine(string.Join(",",
|
||||
Csv(node.Name),
|
||||
Csv(KindLabel(node.Kind)),
|
||||
Csv(node.SiteTitle),
|
||||
node.TotalFileCount.ToString(),
|
||||
FormatMb(node.TotalSizeBytes),
|
||||
FormatMb(node.VersionSizeBytes),
|
||||
node.LastModified.HasValue
|
||||
? Csv(node.LastModified.Value.ToString("yyyy-MM-dd"))
|
||||
: string.Empty));
|
||||
AppendCsvField(sb, node.Name).Append(',');
|
||||
AppendCsvField(sb, kindLabels[(int)node.Kind]).Append(',');
|
||||
AppendCsvField(sb, node.SiteTitle).Append(',');
|
||||
sb.Append(node.TotalFileCount).Append(',');
|
||||
AppendMb(sb, node.TotalSizeBytes).Append(',');
|
||||
AppendMb(sb, node.VersionSizeBytes).Append(',');
|
||||
if (node.LastModified.HasValue)
|
||||
AppendCsvField(sb, node.LastModified.Value.ToString("yyyy-MM-dd", CultureInfo.InvariantCulture));
|
||||
sb.AppendLine();
|
||||
}
|
||||
|
||||
return sb.ToString();
|
||||
}
|
||||
|
||||
/// <summary>Writes the library-level CSV to <paramref name="filePath"/> with UTF-8 BOM.</summary>
|
||||
public async Task WriteAsync(IReadOnlyList<StorageNode> nodes, string filePath, CancellationToken ct)
|
||||
{
|
||||
var csv = BuildCsv(nodes);
|
||||
await ExportFileWriter.WriteCsvAsync(filePath, csv, ct);
|
||||
// Stream straight to disk: skip the StringBuilder→string copy and the
|
||||
// separate UTF-8 buffer that File.WriteAllTextAsync materializes.
|
||||
var sb = new StringBuilder(128 + nodes.Count * 110);
|
||||
WriteCsv(sb, nodes);
|
||||
await ExportFileWriter.WriteCsvChunksAsync(filePath, sb, ct);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
@@ -53,44 +78,68 @@ public class StorageCsvExportService
|
||||
/// </summary>
|
||||
public string BuildCsv(IReadOnlyList<StorageNode> nodes, IReadOnlyList<FileTypeMetric> fileTypeMetrics)
|
||||
{
|
||||
var T = TranslationSource.Instance;
|
||||
var sb = new StringBuilder();
|
||||
var sb = new StringBuilder(192 + nodes.Count * 100 + fileTypeMetrics.Count * 40);
|
||||
WriteCsv(sb, nodes, fileTypeMetrics);
|
||||
return sb.ToString();
|
||||
}
|
||||
|
||||
private static void WriteCsv(StringBuilder sb, IReadOnlyList<StorageNode> nodes, IReadOnlyList<FileTypeMetric> fileTypeMetrics)
|
||||
{
|
||||
var T = TranslationSource.Instance;
|
||||
string colLibrary = T["report.col.library"];
|
||||
string colSite = T["report.col.site"];
|
||||
string colFiles = T["report.stat.files"];
|
||||
string colTotalMb = T["report.col.total_size_mb"];
|
||||
string colVerMb = T["report.col.version_size_mb"];
|
||||
string colLastMod = T["report.col.last_modified"];
|
||||
|
||||
sb.Append(colLibrary).Append(',')
|
||||
.Append(colSite).Append(',')
|
||||
.Append(colFiles).Append(',')
|
||||
.Append(colTotalMb).Append(',')
|
||||
.Append(colVerMb).Append(',')
|
||||
.AppendLine(colLastMod);
|
||||
|
||||
// Library details
|
||||
sb.AppendLine($"{T["report.col.library"]},{T["report.col.site"]},{T["report.stat.files"]},{T["report.col.total_size_mb"]},{T["report.col.version_size_mb"]},{T["report.col.last_modified"]}");
|
||||
foreach (var node in nodes)
|
||||
{
|
||||
sb.AppendLine(string.Join(",",
|
||||
Csv(node.Name),
|
||||
Csv(node.SiteTitle),
|
||||
node.TotalFileCount.ToString(),
|
||||
FormatMb(node.TotalSizeBytes),
|
||||
FormatMb(node.VersionSizeBytes),
|
||||
node.LastModified.HasValue
|
||||
? Csv(node.LastModified.Value.ToString("yyyy-MM-dd"))
|
||||
: string.Empty));
|
||||
AppendCsvField(sb, node.Name).Append(',');
|
||||
AppendCsvField(sb, node.SiteTitle).Append(',');
|
||||
sb.Append(node.TotalFileCount).Append(',');
|
||||
AppendMb(sb, node.TotalSizeBytes).Append(',');
|
||||
AppendMb(sb, node.VersionSizeBytes).Append(',');
|
||||
if (node.LastModified.HasValue)
|
||||
AppendCsvField(sb, node.LastModified.Value.ToString("yyyy-MM-dd", CultureInfo.InvariantCulture));
|
||||
sb.AppendLine();
|
||||
}
|
||||
|
||||
// File type breakdown
|
||||
if (fileTypeMetrics.Count > 0)
|
||||
{
|
||||
string colFileType = T["report.col.file_type"];
|
||||
string colSizeMb = T["report.col.size_mb"];
|
||||
string colFileCnt = T["report.col.file_count"];
|
||||
string noExtLabel = T["report.text.no_extension"];
|
||||
|
||||
sb.AppendLine();
|
||||
sb.AppendLine($"{T["report.col.file_type"]},{T["report.col.size_mb"]},{T["report.col.file_count"]}");
|
||||
sb.Append(colFileType).Append(',')
|
||||
.Append(colSizeMb).Append(',')
|
||||
.AppendLine(colFileCnt);
|
||||
|
||||
foreach (var m in fileTypeMetrics)
|
||||
{
|
||||
string label = string.IsNullOrEmpty(m.Extension) ? T["report.text.no_extension"] : m.Extension;
|
||||
sb.AppendLine(string.Join(",", Csv(label), FormatMb(m.TotalSizeBytes), m.FileCount.ToString()));
|
||||
string label = string.IsNullOrEmpty(m.Extension) ? noExtLabel : m.Extension;
|
||||
AppendCsvField(sb, label).Append(',');
|
||||
AppendMb(sb, m.TotalSizeBytes).Append(',');
|
||||
sb.Append(m.FileCount).AppendLine();
|
||||
}
|
||||
}
|
||||
|
||||
return sb.ToString();
|
||||
}
|
||||
|
||||
/// <summary>Writes the two-section CSV (libraries + file-type breakdown) with UTF-8 BOM.</summary>
|
||||
public async Task WriteAsync(IReadOnlyList<StorageNode> nodes, IReadOnlyList<FileTypeMetric> fileTypeMetrics, string filePath, CancellationToken ct)
|
||||
{
|
||||
var csv = BuildCsv(nodes, fileTypeMetrics);
|
||||
await ExportFileWriter.WriteCsvAsync(filePath, csv, ct);
|
||||
var sb = new StringBuilder(192 + nodes.Count * 100 + fileTypeMetrics.Count * 40);
|
||||
WriteCsv(sb, nodes, fileTypeMetrics);
|
||||
await ExportFileWriter.WriteCsvChunksAsync(filePath, sb, ct);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
@@ -139,11 +188,27 @@ public class StorageCsvExportService
|
||||
|
||||
// ── Helpers ───────────────────────────────────────────────────────────────
|
||||
|
||||
private static string FormatMb(long bytes)
|
||||
=> (bytes / (1024.0 * 1024.0)).ToString("F2");
|
||||
private static StringBuilder AppendMb(StringBuilder sb, long bytes)
|
||||
=> sb.Append((bytes / (1024.0 * 1024.0)).ToString("F2", CultureInfo.InvariantCulture));
|
||||
|
||||
/// <summary>RFC 4180 CSV field quoting with formula-injection guard.</summary>
|
||||
private static string Csv(string value) => CsvSanitizer.EscapeMinimal(value);
|
||||
private static StringBuilder AppendCsvField(StringBuilder sb, string value)
|
||||
=> sb.Append(CsvSanitizer.EscapeMinimal(value));
|
||||
|
||||
/// <summary>
|
||||
/// Pre-resolves localized labels for every <see cref="StorageNodeKind"/>
|
||||
/// once per export, indexed by the enum's int value. Avoids a
|
||||
/// <c>ResourceManager.GetString</c> call per row in hot CSV loops.
|
||||
/// </summary>
|
||||
private static string[] BuildKindLabelCache()
|
||||
{
|
||||
var values = (StorageNodeKind[])Enum.GetValues(typeof(StorageNodeKind));
|
||||
int max = 0;
|
||||
foreach (var v in values) { int i = (int)v; if (i > max) max = i; }
|
||||
var cache = new string[max + 1];
|
||||
for (int i = 0; i < cache.Length; i++) cache[i] = ((StorageNodeKind)i).ToString();
|
||||
foreach (var v in values) cache[(int)v] = KindLabel(v);
|
||||
return cache;
|
||||
}
|
||||
|
||||
private static string KindLabel(StorageNodeKind kind)
|
||||
{
|
||||
|
||||
Reference in New Issue
Block a user