add project

This commit is contained in:
GuilhermeStrice
2025-07-09 19:24:12 +01:00
parent 8d7c2d4b04
commit 1108bf3ef6
17 changed files with 4752 additions and 0 deletions

22
DatabaseSnapshots.csproj Normal file
View File

@ -0,0 +1,22 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<OutputType>Exe</OutputType>
<TargetFramework>net9.0</TargetFramework>
<ImplicitUsings>enable</ImplicitUsings>
<Nullable>enable</Nullable>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="BCrypt.Net-Next" Version="4.0.3" />
<PackageReference Include="EasyCompressor.LZ4" Version="2.1.0" />
<PackageReference Include="MySqlConnector" Version="2.4.0" />
<PackageReference Include="CommandLineParser" Version="2.9.1" />
<PackageReference Include="Crayon" Version="2.0.60" />
<PackageReference Include="System.Text.Json" Version="9.0.0" />
<PackageReference Include="Microsoft.Extensions.Configuration" Version="9.0.0" />
<PackageReference Include="Microsoft.Extensions.Configuration.Binder" Version="9.0.0" />
<PackageReference Include="Microsoft.Extensions.Configuration.Json" Version="9.0.0" />
</ItemGroup>
</Project>

143
Models/Configuration.cs Normal file
View File

@ -0,0 +1,143 @@
using System.Text.Json.Serialization;
namespace DatabaseSnapshotsService.Models
{
public class SnapshotConfiguration
{
[JsonPropertyName("connectionString")]
public string ConnectionString { get; set; } = "Server=localhost;Database=trading;Uid=root;Pwd=password;";
[JsonPropertyName("binlogReader")]
public BinlogReaderConfig BinlogReader { get; set; } = new();
[JsonPropertyName("snapshotStorage")]
public SnapshotStorageConfig SnapshotStorage { get; set; } = new();
[JsonPropertyName("eventStore")]
public EventStoreConfig EventStore { get; set; } = new();
[JsonPropertyName("security")]
public SecurityConfig Security { get; set; } = new();
}
public class SecurityConfig
{
[JsonPropertyName("encryption")]
public bool Encryption { get; set; } = false;
[JsonPropertyName("encryptionKey")]
public string? EncryptionKey { get; set; }
}
public class BinlogReaderConfig
{
[JsonPropertyName("host")]
public string Host { get; set; } = "localhost";
[JsonPropertyName("port")]
public int Port { get; set; } = 3306;
[JsonPropertyName("username")]
public string Username { get; set; } = "binlog_reader";
[JsonPropertyName("password")]
public string Password { get; set; } = "secure_password";
[JsonPropertyName("serverId")]
public int ServerId { get; set; } = 999;
[JsonPropertyName("startPosition")]
public long StartPosition { get; set; } = 4;
[JsonPropertyName("heartbeatInterval")]
public int HeartbeatInterval { get; set; } = 30;
}
public class SnapshotStorageConfig
{
[JsonPropertyName("path")]
public string Path { get; set; } = "./snapshots";
[JsonPropertyName("compression")]
public bool Compression { get; set; } = true;
[JsonPropertyName("retentionDays")]
public int RetentionDays { get; set; } = 30;
[JsonPropertyName("maxFileSize")]
public long MaxFileSize { get; set; } = 100 * 1024 * 1024; // 100MB
[JsonPropertyName("dumpOptimizations")]
public DumpOptimizationConfig DumpOptimizations { get; set; } = new();
}
public class DumpOptimizationConfig
{
[JsonPropertyName("singleTransaction")]
public bool SingleTransaction { get; set; } = true;
[JsonPropertyName("includeRoutines")]
public bool IncludeRoutines { get; set; } = true;
[JsonPropertyName("includeTriggers")]
public bool IncludeTriggers { get; set; } = true;
[JsonPropertyName("includeEvents")]
public bool IncludeEvents { get; set; } = true;
[JsonPropertyName("extendedInsert")]
public bool ExtendedInsert { get; set; } = true;
[JsonPropertyName("completeInsert")]
public bool CompleteInsert { get; set; } = true;
[JsonPropertyName("hexBlob")]
public bool HexBlob { get; set; } = true;
[JsonPropertyName("netBufferLength")]
public int NetBufferLength { get; set; } = 16384;
[JsonPropertyName("maxAllowedPacket")]
public string MaxAllowedPacket { get; set; } = "1G";
[JsonPropertyName("excludeTables")]
public List<string> ExcludeTables { get; set; } = new();
[JsonPropertyName("includeTables")]
public List<string> IncludeTables { get; set; } = new();
// New options
[JsonPropertyName("quick")]
public bool Quick { get; set; } = true;
[JsonPropertyName("orderByPrimary")]
public bool OrderByPrimary { get; set; } = true;
[JsonPropertyName("flushLogs")]
public bool FlushLogs { get; set; } = true;
[JsonPropertyName("masterData")]
public int MasterData { get; set; } = 2;
[JsonPropertyName("compact")]
public bool Compact { get; set; } = false;
[JsonPropertyName("noAutocommit")]
public bool NoAutocommit { get; set; } = false;
[JsonPropertyName("lockTables")]
public bool LockTables { get; set; } = false;
}
public class EventStoreConfig
{
[JsonPropertyName("path")]
public string Path { get; set; } = "./events";
[JsonPropertyName("maxFileSize")]
public long MaxFileSize { get; set; } = 50 * 1024 * 1024; // 50MB
[JsonPropertyName("retentionDays")]
public int RetentionDays { get; set; } = 90;
[JsonPropertyName("batchSize")]
public int BatchSize { get; set; } = 1000;
[JsonPropertyName("flushInterval")]
public int FlushInterval { get; set; } = 5; // seconds
}
}

View File

@ -0,0 +1,252 @@
using System.ComponentModel.DataAnnotations;
using System.Text.RegularExpressions;
namespace DatabaseSnapshotsService.Models
{
public static class ConfigurationValidation
{
public static List<ValidationResult> ValidateConfiguration(SnapshotConfiguration config)
{
var errors = new List<ValidationResult>();
if (config == null)
{
errors.Add(new ValidationResult("Configuration cannot be null"));
return errors;
}
ValidateConnectionString(config.ConnectionString, errors);
ValidateBinlogReader(config.BinlogReader, errors);
ValidateSnapshotStorage(config.SnapshotStorage, errors);
ValidateEventStore(config.EventStore, errors);
ValidateSecurity(config.Security, errors);
return errors;
}
private static void ValidateConnectionString(string connectionString, List<ValidationResult> errors)
{
if (string.IsNullOrWhiteSpace(connectionString))
{
errors.Add(new ValidationResult("Connection string cannot be empty"));
return;
}
// Basic connection string validation
var requiredParams = new[] { "Server", "Database", "Uid", "Pwd" };
foreach (var param in requiredParams)
{
if (!connectionString.Contains($"{param}="))
{
errors.Add(new ValidationResult($"Connection string must contain {param} parameter"));
}
}
// Check for potentially dangerous patterns
var dangerousPatterns = new[]
{
@"--.*", // SQL comments
@";\s*DROP\s+", // DROP statements
@";\s*DELETE\s+", // DELETE statements
@";\s*TRUNCATE\s+", // TRUNCATE statements
@";\s*ALTER\s+", // ALTER statements
@";\s*CREATE\s+", // CREATE statements
};
foreach (var pattern in dangerousPatterns)
{
if (Regex.IsMatch(connectionString, pattern, RegexOptions.IgnoreCase))
{
errors.Add(new ValidationResult($"Connection string contains potentially dangerous SQL pattern: {pattern}"));
}
}
}
private static void ValidateBinlogReader(BinlogReaderConfig config, List<ValidationResult> errors)
{
if (string.IsNullOrWhiteSpace(config.Host))
{
errors.Add(new ValidationResult("Binlog reader host cannot be empty"));
}
if (config.Port < 1 || config.Port > 65535)
{
errors.Add(new ValidationResult("Binlog reader port must be between 1 and 65535"));
}
if (string.IsNullOrWhiteSpace(config.Username))
{
errors.Add(new ValidationResult("Binlog reader username cannot be empty"));
}
if (string.IsNullOrWhiteSpace(config.Password))
{
errors.Add(new ValidationResult("Binlog reader password cannot be empty"));
}
if (config.ServerId < 1 || config.ServerId > 4294967295)
{
errors.Add(new ValidationResult("Binlog reader server ID must be between 1 and 4294967295"));
}
if (config.StartPosition < 4)
{
errors.Add(new ValidationResult("Binlog reader start position must be at least 4"));
}
if (config.HeartbeatInterval < 1 || config.HeartbeatInterval > 3600)
{
errors.Add(new ValidationResult("Binlog reader heartbeat interval must be between 1 and 3600 seconds"));
}
}
private static void ValidateSnapshotStorage(SnapshotStorageConfig config, List<ValidationResult> errors)
{
if (string.IsNullOrWhiteSpace(config.Path))
{
errors.Add(new ValidationResult("Snapshot storage path cannot be empty"));
}
else
{
// Check for path traversal attempts
var normalizedPath = Path.GetFullPath(config.Path);
if (normalizedPath.Contains("..") || normalizedPath.Contains("~"))
{
errors.Add(new ValidationResult("Snapshot storage path contains invalid characters"));
}
}
if (config.RetentionDays < 1 || config.RetentionDays > 3650) // Max 10 years
{
errors.Add(new ValidationResult("Snapshot retention days must be between 1 and 3650"));
}
if (config.MaxFileSize < 1024 * 1024 || config.MaxFileSize > 10L * 1024 * 1024 * 1024) // 1MB to 10GB
{
errors.Add(new ValidationResult("Snapshot max file size must be between 1MB and 10GB"));
}
// Validate dump optimizations
ValidateDumpOptimizations(config.DumpOptimizations, errors);
}
private static void ValidateDumpOptimizations(DumpOptimizationConfig config, List<ValidationResult> errors)
{
if (config.NetBufferLength < 1024 || config.NetBufferLength > 1048576) // 1KB to 1MB
{
errors.Add(new ValidationResult("Net buffer length must be between 1024 and 1048576 bytes"));
}
// Validate max allowed packet format (e.g., "1G", "512M", "1024K")
if (!string.IsNullOrWhiteSpace(config.MaxAllowedPacket))
{
var packetPattern = @"^\d+[KMGT]?$";
if (!Regex.IsMatch(config.MaxAllowedPacket, packetPattern))
{
errors.Add(new ValidationResult("Max allowed packet must be in format: number[K|M|G|T] (e.g., '1G', '512M')"));
}
}
// Validate table names in exclude/include lists
foreach (var table in config.ExcludeTables.Concat(config.IncludeTables))
{
if (string.IsNullOrWhiteSpace(table))
{
errors.Add(new ValidationResult("Table names cannot be empty"));
}
else if (!Regex.IsMatch(table, @"^[a-zA-Z_][a-zA-Z0-9_]*$"))
{
errors.Add(new ValidationResult($"Invalid table name format: {table}"));
}
}
// Check for conflicts between include and exclude tables
var conflicts = config.IncludeTables.Intersect(config.ExcludeTables, StringComparer.OrdinalIgnoreCase);
if (conflicts.Any())
{
errors.Add(new ValidationResult($"Table(s) cannot be both included and excluded: {string.Join(", ", conflicts)}"));
}
}
private static void ValidateEventStore(EventStoreConfig config, List<ValidationResult> errors)
{
if (string.IsNullOrWhiteSpace(config.Path))
{
errors.Add(new ValidationResult("Event store path cannot be empty"));
}
else
{
// Check for path traversal attempts
var normalizedPath = Path.GetFullPath(config.Path);
if (normalizedPath.Contains("..") || normalizedPath.Contains("~"))
{
errors.Add(new ValidationResult("Event store path contains invalid characters"));
}
}
if (config.MaxFileSize < 1024 * 1024 || config.MaxFileSize > 5L * 1024 * 1024 * 1024) // 1MB to 5GB
{
errors.Add(new ValidationResult("Event store max file size must be between 1MB and 5GB"));
}
if (config.RetentionDays < 1 || config.RetentionDays > 3650) // Max 10 years
{
errors.Add(new ValidationResult("Event store retention days must be between 1 and 3650"));
}
if (config.BatchSize < 1 || config.BatchSize > 10000)
{
errors.Add(new ValidationResult("Event store batch size must be between 1 and 10000"));
}
if (config.FlushInterval < 1 || config.FlushInterval > 300) // 1 second to 5 minutes
{
errors.Add(new ValidationResult("Event store flush interval must be between 1 and 300 seconds"));
}
}
private static void ValidateSecurity(SecurityConfig config, List<ValidationResult> errors)
{
if (config.Encryption && string.IsNullOrWhiteSpace(config.EncryptionKey))
{
errors.Add(new ValidationResult("Encryption key is required when encryption is enabled"));
}
if (!string.IsNullOrWhiteSpace(config.EncryptionKey))
{
if (config.EncryptionKey.Length < 32)
{
errors.Add(new ValidationResult("Encryption key must be at least 32 characters long"));
}
// Check for weak encryption keys
if (IsWeakEncryptionKey(config.EncryptionKey))
{
errors.Add(new ValidationResult("Encryption key is too weak. Use a stronger key with mixed characters"));
}
}
}
private static bool IsWeakEncryptionKey(string key)
{
// Check for common weak patterns
if (key.Length < 32) return true;
// Check for repeated characters
if (Regex.IsMatch(key, @"(.)\1{10,}")) return true;
// Check for sequential characters
if (Regex.IsMatch(key, @"(?:abc|bcd|cde|def|efg|fgh|ghi|hij|ijk|jkl|klm|lmn|mno|nop|opq|pqr|qrs|rst|stu|tuv|uvw|vwx|wxy|xyz)", RegexOptions.IgnoreCase)) return true;
// Check for common weak keys
var weakKeys = new[]
{
"password", "123456", "qwerty", "admin", "root", "secret", "key",
"aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"12345678901234567890123456789012"
};
return weakKeys.Any(wk => key.Equals(wk, StringComparison.OrdinalIgnoreCase));
}
}
}

242
Models/DataModels.cs Normal file
View File

@ -0,0 +1,242 @@
using System.Text.Json.Serialization;
namespace DatabaseSnapshotsService.Models
{
public class SnapshotInfo
{
[JsonPropertyName("id")]
public int Id { get; set; }
[JsonPropertyName("type")]
public string Type { get; set; } = string.Empty;
[JsonPropertyName("timestamp")]
public long Timestamp { get; set; }
[JsonPropertyName("dataSize")]
public long DataSize { get; set; }
[JsonPropertyName("status")]
public string Status { get; set; } = string.Empty;
[JsonPropertyName("description")]
public string? Description { get; set; }
[JsonPropertyName("userId")]
public int? UserId { get; set; }
[JsonPropertyName("createdAt")]
public DateTime CreatedAt { get; set; }
[JsonPropertyName("filePath")]
public string FilePath { get; set; } = string.Empty;
[JsonPropertyName("checksum")]
public string Checksum { get; set; } = string.Empty;
// Binlog fields for incremental snapshots
[JsonPropertyName("binlogFile")]
public string? BinlogFile { get; set; }
[JsonPropertyName("binlogPosition")]
public long? BinlogPosition { get; set; }
[JsonPropertyName("parentSnapshotId")]
public int? ParentSnapshotId { get; set; }
[JsonPropertyName("incrementalBinlogStartFile")]
public string? IncrementalBinlogStartFile { get; set; }
[JsonPropertyName("incrementalBinlogStartPosition")]
public long? IncrementalBinlogStartPosition { get; set; }
[JsonPropertyName("incrementalBinlogEndFile")]
public string? IncrementalBinlogEndFile { get; set; }
[JsonPropertyName("incrementalBinlogEndPosition")]
public long? IncrementalBinlogEndPosition { get; set; }
}
public class DatabaseEvent
{
[JsonPropertyName("id")]
public long Id { get; set; }
[JsonPropertyName("timestamp")]
public long Timestamp { get; set; }
[JsonPropertyName("type")]
public string Type { get; set; } = string.Empty;
[JsonPropertyName("table")]
public string Table { get; set; } = string.Empty;
[JsonPropertyName("operation")]
public string Operation { get; set; } = string.Empty;
[JsonPropertyName("data")]
public string Data { get; set; } = string.Empty;
[JsonPropertyName("binlogPosition")]
public long BinlogPosition { get; set; }
[JsonPropertyName("serverId")]
public int ServerId { get; set; }
[JsonPropertyName("checksum")]
public string Checksum { get; set; } = string.Empty;
}
public class RecoveryPoint
{
[JsonPropertyName("id")]
public int Id { get; set; }
[JsonPropertyName("name")]
public string Name { get; set; } = string.Empty;
[JsonPropertyName("timestamp")]
public long Timestamp { get; set; }
[JsonPropertyName("description")]
public string? Description { get; set; }
[JsonPropertyName("eventCount")]
public long EventCount { get; set; }
[JsonPropertyName("createdAt")]
public DateTime CreatedAt { get; set; }
[JsonPropertyName("lastEventId")]
public long LastEventId { get; set; }
}
public class ServiceStatus
{
[JsonPropertyName("status")]
public string Status { get; set; } = "Unknown";
[JsonPropertyName("databaseConnected")]
public bool DatabaseConnected { get; set; }
[JsonPropertyName("binlogReaderStatus")]
public string BinlogReaderStatus { get; set; } = "Unknown";
[JsonPropertyName("lastEventProcessed")]
public long LastEventProcessed { get; set; }
[JsonPropertyName("totalEvents")]
public long TotalEvents { get; set; }
[JsonPropertyName("activeSnapshots")]
public int ActiveSnapshots { get; set; }
[JsonPropertyName("uptime")]
public TimeSpan Uptime { get; set; }
[JsonPropertyName("lastSnapshot")]
public DateTime? LastSnapshot { get; set; }
}
public class HealthStatus
{
[JsonPropertyName("isHealthy")]
public bool IsHealthy { get; set; }
[JsonPropertyName("errorMessage")]
public string? ErrorMessage { get; set; }
[JsonPropertyName("checks")]
public Dictionary<string, bool> Checks { get; set; } = new();
[JsonPropertyName("timestamp")]
public DateTime Timestamp { get; set; } = DateTime.UtcNow;
}
public class RestorePreview
{
[JsonPropertyName("targetTimestamp")]
public long TargetTimestamp { get; set; }
[JsonPropertyName("eventCount")]
public long EventCount { get; set; }
[JsonPropertyName("affectedTables")]
public List<string> AffectedTables { get; set; } = new();
[JsonPropertyName("estimatedDuration")]
public TimeSpan EstimatedDuration { get; set; }
[JsonPropertyName("snapshotId")]
public int? SnapshotId { get; set; }
[JsonPropertyName("warnings")]
public List<string> Warnings { get; set; } = new();
}
public class SnapshotMetadata
{
[JsonPropertyName("version")]
public string Version { get; set; } = "1.0";
[JsonPropertyName("createdAt")]
public DateTime CreatedAt { get; set; }
[JsonPropertyName("databaseVersion")]
public string DatabaseVersion { get; set; } = string.Empty;
[JsonPropertyName("tables")]
public List<TableInfo> Tables { get; set; } = new();
[JsonPropertyName("checksum")]
public string Checksum { get; set; } = string.Empty;
[JsonPropertyName("compression")]
public bool Compression { get; set; }
[JsonPropertyName("encryption")]
public bool Encryption { get; set; }
}
public class TableInfo
{
[JsonPropertyName("name")]
public string Name { get; set; } = string.Empty;
[JsonPropertyName("rowCount")]
public long RowCount { get; set; }
[JsonPropertyName("dataSize")]
public long DataSize { get; set; }
[JsonPropertyName("indexSize")]
public long IndexSize { get; set; }
[JsonPropertyName("checksum")]
public string Checksum { get; set; } = string.Empty;
}
public enum SnapshotType
{
Full,
Trading,
User,
Incremental
}
public enum EventOperation
{
Insert,
Update,
Delete,
Truncate
}
public enum SnapshotStatus
{
Creating,
Completed,
Failed,
Corrupted
}
}

438
Models/InputValidation.cs Normal file
View File

@ -0,0 +1,438 @@
using System.ComponentModel.DataAnnotations;
using System.Text.RegularExpressions;
namespace DatabaseSnapshotsService.Models
{
public static class InputValidation
{
// Validation patterns
private static readonly Regex ValidFileNamePattern = new(@"^[a-zA-Z0-9._-]+$", RegexOptions.Compiled);
private static readonly Regex ValidPathPattern = new(@"^[a-zA-Z0-9/._-]+$", RegexOptions.Compiled);
private static readonly Regex ValidNamePattern = new(@"^[a-zA-Z0-9\s._-]{1,100}$", RegexOptions.Compiled);
private static readonly Regex ValidDescriptionPattern = new(@"^[a-zA-Z0-9\s.,!?@#$%^&*()_+-=:;'""<>/\\|`~]{0,500}$", RegexOptions.Compiled);
private static readonly Regex ValidTableNamePattern = new(@"^[a-zA-Z][a-zA-Z0-9_]*$", RegexOptions.Compiled);
private static readonly Regex ValidOperationPattern = new(@"^(insert|update|delete|truncate|query)$", RegexOptions.IgnoreCase | RegexOptions.Compiled);
private static readonly Regex ValidEventTypePattern = new(@"^(binlog|status|error|info)$", RegexOptions.IgnoreCase | RegexOptions.Compiled);
// Dangerous patterns to detect
private static readonly Regex[] DangerousPatterns = {
new(@"\.\./", RegexOptions.Compiled), // Path traversal
new(@"\.\.\\", RegexOptions.Compiled), // Windows path traversal
new(@"[<>""'&]", RegexOptions.Compiled), // HTML/XML injection
new(@"(union|select|insert|update|delete|drop|create|alter|exec|execute|script|javascript|vbscript|onload|onerror)", RegexOptions.IgnoreCase | RegexOptions.Compiled), // SQL/script injection
new(@"(\x00|\x01|\x02|\x03|\x04|\x05|\x06|\x07|\x08|\x0B|\x0C|\x0E|\x0F|\x10|\x11|\x12|\x13|\x14|\x15|\x16|\x17|\x18|\x19|\x1A|\x1B|\x1C|\x1D|\x1E|\x1F)", RegexOptions.Compiled), // Control characters
};
public static class SnapshotValidation
{
public static ValidationResult ValidateSnapshotName(string name)
{
if (string.IsNullOrWhiteSpace(name))
{
return new ValidationResult("Snapshot name cannot be empty");
}
if (name.Length > 100)
{
return new ValidationResult("Snapshot name cannot exceed 100 characters");
}
if (!ValidNamePattern.IsMatch(name))
{
return new ValidationResult("Snapshot name contains invalid characters. Use only letters, numbers, spaces, dots, underscores, and hyphens");
}
if (ContainsDangerousPatterns(name))
{
return new ValidationResult("Snapshot name contains potentially dangerous content");
}
return ValidationResult.Success!;
}
public static ValidationResult ValidateSnapshotDescription(string? description)
{
if (string.IsNullOrWhiteSpace(description))
{
return ValidationResult.Success!; // Description is optional
}
if (description.Length > 500)
{
return new ValidationResult("Snapshot description cannot exceed 500 characters");
}
if (!ValidDescriptionPattern.IsMatch(description))
{
return new ValidationResult("Snapshot description contains invalid characters");
}
if (ContainsDangerousPatterns(description))
{
return new ValidationResult("Snapshot description contains potentially dangerous content");
}
return ValidationResult.Success!;
}
public static ValidationResult ValidateSnapshotId(int id)
{
if (id <= 0)
{
return new ValidationResult("Snapshot ID must be a positive integer");
}
if (id > int.MaxValue)
{
return new ValidationResult("Snapshot ID is too large");
}
return ValidationResult.Success!;
}
public static ValidationResult ValidateSnapshotType(string type)
{
if (string.IsNullOrWhiteSpace(type))
{
return new ValidationResult("Snapshot type cannot be empty");
}
var validTypes = new[] { "full", "incremental", "trading", "user" };
if (!validTypes.Contains(type.ToLowerInvariant()))
{
return new ValidationResult($"Invalid snapshot type. Must be one of: {string.Join(", ", validTypes)}");
}
return ValidationResult.Success!;
}
}
public static class EventValidation
{
public static ValidationResult ValidateTableName(string tableName)
{
if (string.IsNullOrWhiteSpace(tableName))
{
return new ValidationResult("Table name cannot be empty");
}
if (tableName.Length > 64)
{
return new ValidationResult("Table name cannot exceed 64 characters");
}
if (!ValidTableNamePattern.IsMatch(tableName))
{
return new ValidationResult("Table name contains invalid characters. Use only letters, numbers, and underscores, starting with a letter");
}
if (ContainsDangerousPatterns(tableName))
{
return new ValidationResult("Table name contains potentially dangerous content");
}
return ValidationResult.Success!;
}
public static ValidationResult ValidateOperation(string operation)
{
if (string.IsNullOrWhiteSpace(operation))
{
return new ValidationResult("Operation cannot be empty");
}
if (!ValidOperationPattern.IsMatch(operation))
{
return new ValidationResult("Invalid operation. Must be one of: insert, update, delete, truncate, query");
}
return ValidationResult.Success!;
}
public static ValidationResult ValidateEventType(string eventType)
{
if (string.IsNullOrWhiteSpace(eventType))
{
return new ValidationResult("Event type cannot be empty");
}
if (!ValidEventTypePattern.IsMatch(eventType))
{
return new ValidationResult("Invalid event type. Must be one of: binlog, status, error, info");
}
return ValidationResult.Success!;
}
public static ValidationResult ValidateEventData(string data)
{
if (string.IsNullOrWhiteSpace(data))
{
return new ValidationResult("Event data cannot be empty");
}
if (data.Length > 10000) // 10KB limit
{
return new ValidationResult("Event data cannot exceed 10KB");
}
if (ContainsDangerousPatterns(data))
{
return new ValidationResult("Event data contains potentially dangerous content");
}
return ValidationResult.Success!;
}
public static ValidationResult ValidateLimit(int limit)
{
if (limit <= 0)
{
return new ValidationResult("Limit must be a positive integer");
}
if (limit > 10000)
{
return new ValidationResult("Limit cannot exceed 10000");
}
return ValidationResult.Success!;
}
public static ValidationResult ValidateTimestamp(long timestamp)
{
if (timestamp < 0)
{
return new ValidationResult("Timestamp cannot be negative");
}
var maxTimestamp = DateTimeOffset.MaxValue.ToUnixTimeSeconds();
if (timestamp > maxTimestamp)
{
return new ValidationResult("Timestamp is too far in the future");
}
return ValidationResult.Success!;
}
}
public static class RecoveryValidation
{
public static ValidationResult ValidateRecoveryPointName(string name)
{
if (string.IsNullOrWhiteSpace(name))
{
return new ValidationResult("Recovery point name cannot be empty");
}
if (name.Length > 100)
{
return new ValidationResult("Recovery point name cannot exceed 100 characters");
}
if (!ValidNamePattern.IsMatch(name))
{
return new ValidationResult("Recovery point name contains invalid characters. Use only letters, numbers, spaces, dots, underscores, and hyphens");
}
if (ContainsDangerousPatterns(name))
{
return new ValidationResult("Recovery point name contains potentially dangerous content");
}
return ValidationResult.Success!;
}
public static ValidationResult ValidateRecoveryPointDescription(string? description)
{
if (string.IsNullOrWhiteSpace(description))
{
return ValidationResult.Success!; // Description is optional
}
if (description.Length > 500)
{
return new ValidationResult("Recovery point description cannot exceed 500 characters");
}
if (!ValidDescriptionPattern.IsMatch(description))
{
return new ValidationResult("Recovery point description contains invalid characters");
}
if (ContainsDangerousPatterns(description))
{
return new ValidationResult("Recovery point description contains potentially dangerous content");
}
return ValidationResult.Success!;
}
}
public static class FileValidation
{
public static ValidationResult ValidateFilePath(string filePath)
{
if (string.IsNullOrWhiteSpace(filePath))
{
return new ValidationResult("File path cannot be empty");
}
if (filePath.Length > 260) // Windows path limit
{
return new ValidationResult("File path cannot exceed 260 characters");
}
if (!ValidPathPattern.IsMatch(filePath))
{
return new ValidationResult("File path contains invalid characters");
}
if (ContainsDangerousPatterns(filePath))
{
return new ValidationResult("File path contains potentially dangerous content");
}
// Check for path traversal attempts
var normalizedPath = Path.GetFullPath(filePath);
if (normalizedPath.Contains("..") || normalizedPath.Contains("~"))
{
return new ValidationResult("File path contains invalid path traversal characters");
}
return ValidationResult.Success!;
}
public static ValidationResult ValidateFileName(string fileName)
{
if (string.IsNullOrWhiteSpace(fileName))
{
return new ValidationResult("File name cannot be empty");
}
if (fileName.Length > 255)
{
return new ValidationResult("File name cannot exceed 255 characters");
}
if (!ValidFileNamePattern.IsMatch(fileName))
{
return new ValidationResult("File name contains invalid characters. Use only letters, numbers, dots, underscores, and hyphens");
}
if (ContainsDangerousPatterns(fileName))
{
return new ValidationResult("File name contains potentially dangerous content");
}
// Check for reserved Windows filenames
var reservedNames = new[]
{
"CON", "PRN", "AUX", "NUL", "COM1", "COM2", "COM3", "COM4", "COM5", "COM6", "COM7", "COM8", "COM9",
"LPT1", "LPT2", "LPT3", "LPT4", "LPT5", "LPT6", "LPT7", "LPT8", "LPT9"
};
var fileNameWithoutExtension = Path.GetFileNameWithoutExtension(fileName).ToUpperInvariant();
if (reservedNames.Contains(fileNameWithoutExtension))
{
return new ValidationResult("File name is a reserved system name");
}
return ValidationResult.Success!;
}
}
public static class DataValidation
{
public static ValidationResult ValidateDataContains(string dataContains)
{
if (string.IsNullOrWhiteSpace(dataContains))
{
return new ValidationResult("Data contains filter cannot be empty");
}
if (dataContains.Length > 1000)
{
return new ValidationResult("Data contains filter cannot exceed 1000 characters");
}
if (ContainsDangerousPatterns(dataContains))
{
return new ValidationResult("Data contains filter contains potentially dangerous content");
}
return ValidationResult.Success!;
}
public static ValidationResult ValidateOutputFile(string outputFile)
{
if (string.IsNullOrWhiteSpace(outputFile))
{
return new ValidationResult("Output file path cannot be empty");
}
return FileValidation.ValidateFilePath(outputFile);
}
}
private static bool ContainsDangerousPatterns(string input)
{
return DangerousPatterns.Any(pattern => pattern.IsMatch(input));
}
public static string SanitizeString(string input)
{
if (string.IsNullOrWhiteSpace(input))
{
return string.Empty;
}
// Remove control characters
var sanitized = Regex.Replace(input, @"[\x00-\x1F\x7F]", "");
// Trim whitespace
sanitized = sanitized.Trim();
return sanitized;
}
public static string SanitizeFileName(string fileName)
{
if (string.IsNullOrWhiteSpace(fileName))
{
return string.Empty;
}
// Remove invalid characters for file names
var sanitized = Regex.Replace(fileName, @"[<>:""/\\|?*\x00-\x1F]", "");
// Trim and limit length
sanitized = sanitized.Trim();
if (sanitized.Length > 255)
{
sanitized = sanitized.Substring(0, 255);
}
return sanitized;
}
public static string SanitizePath(string path)
{
if (string.IsNullOrWhiteSpace(path))
{
return string.Empty;
}
// Remove path traversal attempts
var sanitized = path.Replace("..", "").Replace("~", "");
// Remove invalid characters
sanitized = Regex.Replace(sanitized, @"[<>""|?\x00-\x1F]", "");
// Normalize path separators
sanitized = sanitized.Replace('\\', '/');
return sanitized;
}
}
}

1087
Program.cs Normal file

File diff suppressed because it is too large Load Diff

297
README.md
View File

@ -0,0 +1,297 @@
# Database Snapshots Service
A command-line utility for creating, managing, and restoring database snapshots for MySQL. It provides features for full and incremental backups, historical binlog event reading, and point-in-time recovery.
## Features
* **Full and Incremental Snapshots**: Create full database backups or smaller, incremental snapshots that capture changes since the last backup.
* **Historical Binlog Reading**: Read and process historical binlog events from MySQL for auditing and analysis.
* **Point-in-Time Recovery**: Restore the database to a specific state using a combination of full snapshots and incremental event data.
* **File-Based Event Store**: All database events are captured and stored locally in a file-based event store for auditing and recovery.
* **Optimized Performance**: Includes optimizations for file handling, such as streaming for large files and parallel processing where appropriate.
* **Security**: Supports optional AES-256-CBC encryption for snapshot files to protect sensitive data.
* **Compression**: Supports optional LZ4 compression to reduce the storage footprint of snapshots.
* **Rich CLI**: A comprehensive command-line interface for interacting with the service.
## Prerequisites
- .NET 6.0 or later
- MySQL 5.7+ or MariaDB 10.2+ with binlog enabled
- `mysqldump` and `mysqlbinlog` utilities installed
- Appropriate MySQL user permissions for:
- `SELECT`, `SHOW`, `RELOAD`, `LOCK TABLES`, `REPLICATION CLIENT` (for binlog reading)
- `SELECT`, `SHOW`, `RELOAD`, `LOCK TABLES` (for snapshots)
## Installation
1. Clone the repository:
```sh
git clone <repository-url>
cd DatabaseSnapshotsService
```
2. Build the project:
```sh
dotnet build
```
3. Create a configuration file:
```sh
cp config.example.json config.json
```
4. Edit `config.json` with your database settings (see Configuration section below).
## Project Structure
```
DatabaseSnapshotsService/
├── Program.cs # Main CLI entry point
├── DatabaseSnapshots.cs # Snapshot management logic
├── Models/
│ ├── Configuration.cs # Configuration classes
│ ├── ConfigurationValidation.cs # Configuration validation
│ ├── DataModels.cs # Data transfer objects
│ └── InputValidation.cs # Input validation logic
├── Services/
│ ├── BinlogReader.cs # MySQL binlog reading service
│ ├── EncryptionService.cs # File encryption/decryption
│ ├── EventStore.cs # Event storage service
│ ├── OptimizedFileService.cs # Optimized file operations
│ ├── RecoveryService.cs # Database recovery service
│ └── SnapshotService.cs # Snapshot creation and management
└── README.md # This file
```
## Configuration
The service is configured via a `config.json` file in the root directory. Here's a complete example with all available options:
```json
{
"connectionString": "Server=localhost;Database=trading;Uid=root;Pwd=password;",
"binlogReader": {
"host": "localhost",
"port": 3306,
"username": "binlog_reader",
"password": "secure_password",
"serverId": 999,
"startPosition": 4,
"heartbeatInterval": 30
},
"snapshotStorage": {
"path": "./snapshots",
"compression": true,
"retentionDays": 30,
"maxFileSize": 104857600,
"dumpOptimizations": {
"singleTransaction": true,
"includeRoutines": true,
"includeTriggers": true,
"includeEvents": true,
"extendedInsert": true,
"completeInsert": true,
"hexBlob": true,
"netBufferLength": 16384,
"maxAllowedPacket": "1G",
"excludeTables": [],
"includeTables": [],
"quick": true,
"orderByPrimary": true,
"flushLogs": true,
"masterData": 2,
"compact": false,
"noAutocommit": false,
"lockTables": false
}
},
"eventStore": {
"path": "./events",
"maxFileSize": 52428800,
"retentionDays": 90,
"batchSize": 1000,
"flushInterval": 5
},
"security": {
"encryption": false,
"encryptionKey": null
}
}
```
### Configuration Options
#### Root Level
- **connectionString** (string, required): MySQL connection string for the target database.
#### binlogReader
- **host** (string, default: "localhost"): MySQL server hostname
- **port** (int, default: 3306): MySQL server port
- **username** (string, default: "binlog_reader"): Username for binlog reader
- **password** (string, default: "secure_password"): Password for binlog reader
- **serverId** (int, default: 999): Server ID for binlog reader (must be unique)
- **startPosition** (long, default: 4): Starting position in binlog
- **heartbeatInterval** (int, default: 30): Interval in seconds to send heartbeats
#### snapshotStorage
- **path** (string, default: "./snapshots"): Directory where snapshots are stored
- **compression** (bool, default: true): Enable LZ4 compression for snapshots
- **retentionDays** (int, default: 30): Number of days to retain snapshots
- **maxFileSize** (long, default: 100MB): Maximum file size for snapshots in bytes
#### snapshotStorage.dumpOptimizations
- **singleTransaction** (bool, default: true): Use --single-transaction for consistent backups
- **includeRoutines** (bool, default: true): Include stored procedures and functions
- **includeTriggers** (bool, default: true): Include triggers
- **includeEvents** (bool, default: true): Include events
- **extendedInsert** (bool, default: true): Use extended INSERT syntax
- **completeInsert** (bool, default: true): Use complete INSERT syntax
- **hexBlob** (bool, default: true): Dump binary columns in hexadecimal
- **netBufferLength** (int, default: 16384): TCP/IP buffer length
- **maxAllowedPacket** (string, default: "1G"): Maximum allowed packet size
- **excludeTables** (string[], default: []): Tables to exclude from backup
- **includeTables** (string[], default: []): Tables to include in backup
- **quick** (bool, default: true): Use --quick option for faster dumps
- **orderByPrimary** (bool, default: true): Order by primary key
- **flushLogs** (bool, default: true): Flush logs before dump
- **masterData** (int, default: 2): --master-data option value
- **compact** (bool, default: false): Use compact dump format
- **noAutocommit** (bool, default: false): Disable autocommit
- **lockTables** (bool, default: false): Lock all tables before dump
#### eventStore
- **path** (string, default: "./events"): Directory where events are stored
- **maxFileSize** (long, default: 50MB): Maximum file size for event files
- **retentionDays** (int, default: 90): Number of days to retain events
- **batchSize** (int, default: 1000): Number of events to batch before writing
- **flushInterval** (int, default: 5): Interval in seconds to flush events
#### security
- **encryption** (bool, default: false): Enable AES-256-CBC encryption
- **encryptionKey** (string, optional): Base64-encoded encryption key (required if encryption is enabled)
## Usage
The service is used via the command line.
### Snapshots
Manage database snapshots.
* **Create a full snapshot:**
```sh
dotnet run -- snapshot -c create -n "MyFullSnapshot" --type "full"
```
* **Create an incremental snapshot:**
```sh
dotnet run -- snapshot -c create -n "MyIncrementalSnapshot" --type "incremental"
```
* **List all snapshots:**
```sh
dotnet run -- snapshot -c list
```
* **Show details for a specific snapshot:**
```sh
dotnet run -- snapshot -c show -n <SNAPSHOT_ID>
```
* **Delete a snapshot:**
```sh
dotnet run -- snapshot -c delete -n <SNAPSHOT_ID>
```
### Recovery
Manage recovery points.
* **Create a recovery point:**
```sh
dotnet run -- recovery -c create-point -n "MyRecoveryPoint"
```
* **List all recovery points:**
```sh
dotnet run -- recovery -c list-points
```
### Restore
Restore the database from a snapshot.
* **Restore from a snapshot:**
```sh
dotnet run -- restore --from-snapshot <SNAPSHOT_ID>
```
* **Perform a dry run of a restore:**
```sh
dotnet run -- restore --from-snapshot <SNAPSHOT_ID> --dry-run
```
### Events
Query events from incremental snapshots.
* **Query events from a snapshot:**
```sh
dotnet run -- events -s <SNAPSHOT_ID> --table "my_table" -l 50
```
* **Filter events by operation type:**
```sh
dotnet run -- events -s <SNAPSHOT_ID> --operation "insert"
```
### Binlog
Read historical binlog events.
* **Read historical binlog events:**
```sh
dotnet run -- binlog
```
### Configuration
Display the current configuration.
* **Show current configuration:**
```sh
dotnet run -- config
```
## Troubleshooting
### Common Issues
1. **Connection Failed**: Ensure MySQL is running and the connection string is correct
2. **Permission Denied**: Verify the MySQL user has the required permissions
3. **Binlog Not Found**: Ensure binlog is enabled in MySQL configuration
4. **Encryption Errors**: Check that the encryption key is valid Base64 and at least 32 characters
5. **File System Errors**: Ensure the service has write permissions to the configured paths
### Logs
The service outputs colored console messages:
- **Green**: Success messages
- **Yellow**: Warnings and informational messages
- **Red**: Error messages
- **White**: General output
- **Gray**: Debug information
### Performance Tips
- Use `quick` and `orderByPrimary` options for faster dumps
- Enable compression for large databases
- Adjust `batchSize` and `flushInterval` for optimal event store performance
## Security Considerations
- Store encryption keys securely and never commit them to version control
- Use strong, unique encryption keys (generate with `EncryptionService.GenerateEncryptionKey()`)
- Ensure proper file system permissions for snapshot and event storage
- Consider network security for remote MySQL connections
- Regularly rotate encryption keys and update passwords

345
Services/BinlogReader.cs Normal file
View File

@ -0,0 +1,345 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Text;
using System.Threading;
using System.Threading.Tasks;
using MySqlConnector;
using DatabaseSnapshotsService.Models;
namespace DatabaseSnapshotsService.Services
{
public class BinlogReader
{
private readonly BinlogReaderConfig _config;
private readonly EventStore _eventStore;
private MySqlConnection _connection;
private bool _isConnected;
private bool _isReading;
private CancellationTokenSource _cancellationTokenSource;
public event EventHandler<BinlogEvent> EventReceived;
public event EventHandler<string> LogMessage;
public BinlogReader(BinlogReaderConfig config, EventStore eventStore)
{
_config = config;
_eventStore = eventStore;
}
public async Task<bool> ConnectAsync()
{
try
{
LogMessage?.Invoke(this, $"Attempting to connect to {_config.Host}:{_config.Port}");
var connectionString = $"Server={_config.Host};Port={_config.Port};User ID={_config.Username};Password={_config.Password};";
_connection = new MySqlConnection(connectionString);
await _connection.OpenAsync();
_isConnected = true;
LogMessage?.Invoke(this, $"Connected to MySQL at {_config.Host}:{_config.Port}");
return true;
}
catch (Exception ex)
{
LogMessage?.Invoke(this, $"Connection failed: {ex.Message}");
return false;
}
}
public async Task StartReadingAsync(string binlogFile = null, long position = 4)
{
if (!_isConnected)
{
throw new InvalidOperationException("Not connected to MySQL");
}
_cancellationTokenSource = new CancellationTokenSource();
_isReading = true;
try
{
LogMessage?.Invoke(this, $"Starting binlog read from position {position}");
// Get current binlog status
await ReadBinlogStatusAsync();
// Read binlog events
await ReadBinlogEventsAsync(binlogFile, position);
}
catch (OperationCanceledException)
{
LogMessage?.Invoke(this, "Binlog reading stopped");
}
catch (Exception ex)
{
LogMessage?.Invoke(this, $"Error reading binlog: {ex.Message}");
}
finally
{
_isReading = false;
}
}
public void StopReading()
{
_cancellationTokenSource?.Cancel();
_isReading = false;
}
public void Disconnect()
{
StopReading();
_connection?.Close();
_connection?.Dispose();
_isConnected = false;
}
private async Task ReadBinlogStatusAsync()
{
using var command = _connection.CreateCommand();
command.CommandText = "SHOW MASTER STATUS";
using var reader = await command.ExecuteReaderAsync();
if (await reader.ReadAsync())
{
var file = reader.GetString("File");
var position = reader.GetInt64("Position");
LogMessage?.Invoke(this, $"Current binlog: {file} at position {position}");
// Create a status event and store it
var statusEvent = new DatabaseEvent
{
Type = "status",
Table = "system",
Operation = "binlog_status",
Data = $"SHOW MASTER STATUS - File: {file}, Position: {position}",
BinlogPosition = (long)position,
ServerId = _config.ServerId
};
await _eventStore.StoreEventAsync(statusEvent);
// Create a binlog event for the status
var evt = new BinlogEvent
{
Timestamp = DateTimeOffset.UtcNow,
EventType = BinlogEventType.QUERY_EVENT,
LogPosition = (uint)position,
EventSize = 0,
Flags = 0,
EventData = Encoding.UTF8.GetBytes($"SHOW MASTER STATUS - File: {file}, Position: {position}"),
RawPacket = new byte[0]
};
EventReceived?.Invoke(this, evt);
}
}
private async Task ReadBinlogEventsAsync(string binlogFile, long position)
{
try
{
// Get the binlog file to read from
var targetFile = binlogFile;
if (string.IsNullOrEmpty(targetFile))
{
targetFile = await GetCurrentBinlogFileAsync();
}
LogMessage?.Invoke(this, $"Reading binlog events from {targetFile} starting at position {position}");
using var command = _connection.CreateCommand();
command.CommandText = $"SHOW BINLOG EVENTS IN '{targetFile}' FROM {position}";
LogMessage?.Invoke(this, $"Executing query: {command.CommandText}");
using var reader = await command.ExecuteReaderAsync();
var eventCount = 0;
LogMessage?.Invoke(this, "Starting to read binlog events...");
while (await reader.ReadAsync())
{
if (_cancellationTokenSource.Token.IsCancellationRequested)
break;
var logName = reader.GetString("Log_name");
var logPos = reader.GetInt64("Pos");
var eventType = reader.GetString("Event_type");
var serverId = reader.GetInt32("Server_id");
var endLogPos = reader.GetInt64("End_log_pos");
var info = reader.GetString("Info");
// Only log every 100 events to reduce console output
eventCount++;
if (eventCount % 100 == 0)
{
LogMessage?.Invoke(this, $"Processed {eventCount} events...");
}
// Parse event type
var binlogEventType = ParseEventType(eventType);
// Create and store database event
var databaseEvent = new DatabaseEvent
{
Type = "binlog",
Table = ExtractTableName(info),
Operation = ExtractOperation(eventType, info),
Data = info,
BinlogPosition = logPos,
ServerId = serverId
};
await _eventStore.StoreEventAsync(databaseEvent);
// Create binlog event for real-time processing
var evt = new BinlogEvent
{
Timestamp = DateTimeOffset.UtcNow,
EventType = binlogEventType,
LogPosition = (uint)logPos,
EventSize = (uint)(endLogPos - logPos),
Flags = 0,
EventData = Encoding.UTF8.GetBytes(info),
RawPacket = new byte[0]
};
EventReceived?.Invoke(this, evt);
}
LogMessage?.Invoke(this, $"Completed reading binlog events. Total events processed: {eventCount}");
}
catch (Exception ex)
{
LogMessage?.Invoke(this, $"Error reading binlog events: {ex.Message}");
throw;
}
}
private async Task<string> GetCurrentBinlogFileAsync()
{
using var command = _connection.CreateCommand();
command.CommandText = "SHOW MASTER STATUS";
using var reader = await command.ExecuteReaderAsync();
if (await reader.ReadAsync())
{
return reader.GetString("File");
}
throw new Exception("Could not determine current binlog file");
}
private string ExtractTableName(string info)
{
// Try to extract table name from various event types
if (info.Contains("table_id:"))
{
var match = System.Text.RegularExpressions.Regex.Match(info, @"table_id: \d+ \(([^)]+)\)");
if (match.Success)
{
return match.Groups[1].Value;
}
}
// For query events, try to extract table name from SQL
if (info.Contains("INSERT INTO") || info.Contains("UPDATE") || info.Contains("DELETE FROM"))
{
var match = System.Text.RegularExpressions.Regex.Match(info, @"(?:INSERT INTO|UPDATE|DELETE FROM)\s+(\w+)");
if (match.Success)
{
return match.Groups[1].Value;
}
}
return "unknown";
}
private string ExtractOperation(string eventType, string info)
{
return eventType.ToUpper() switch
{
"WRITE_ROWS_V1" => "insert",
"UPDATE_ROWS_V1" => "update",
"DELETE_ROWS_V1" => "delete",
"QUERY" => "query",
"ANNOTATE_ROWS" => "query",
_ => eventType.ToLower()
};
}
private BinlogEventType ParseEventType(string eventType)
{
return eventType.ToUpper() switch
{
"QUERY" => BinlogEventType.QUERY_EVENT,
"XID" => BinlogEventType.XID_EVENT,
"GTID" => BinlogEventType.GTID_EVENT,
"TABLE_MAP" => BinlogEventType.TABLE_MAP_EVENT,
"WRITE_ROWS_V1" => BinlogEventType.WRITE_ROWS_EVENT_V1,
"UPDATE_ROWS_V1" => BinlogEventType.UPDATE_ROWS_EVENT_V1,
"DELETE_ROWS_V1" => BinlogEventType.DELETE_ROWS_EVENT_V1,
"ANNOTATE_ROWS" => BinlogEventType.QUERY_EVENT, // Treat as query event
"ROTATE" => BinlogEventType.ROTATE_EVENT,
"FORMAT_DESCRIPTION" => BinlogEventType.FORMAT_DESCRIPTION_EVENT,
_ => BinlogEventType.UNKNOWN_EVENT
};
}
}
public class BinlogEvent
{
public DateTimeOffset Timestamp { get; set; }
public BinlogEventType EventType { get; set; }
public uint LogPosition { get; set; }
public uint EventSize { get; set; }
public ushort Flags { get; set; }
public byte[] EventData { get; set; }
public byte[] RawPacket { get; set; }
}
public enum BinlogEventType : byte
{
UNKNOWN_EVENT = 0x00,
START_EVENT_V3 = 0x01,
QUERY_EVENT = 0x02,
STOP_EVENT = 0x03,
ROTATE_EVENT = 0x04,
INTVAR_EVENT = 0x05,
LOAD_EVENT = 0x06,
SLAVE_EVENT = 0x07,
CREATE_FILE_EVENT = 0x08,
APPEND_BLOCK_EVENT = 0x09,
EXEC_LOAD_EVENT = 0x0A,
DELETE_FILE_EVENT = 0x0B,
NEW_LOAD_EVENT = 0x0C,
RAND_EVENT = 0x0D,
USER_VAR_EVENT = 0x0E,
FORMAT_DESCRIPTION_EVENT = 0x0F,
XID_EVENT = 0x10,
BEGIN_LOAD_QUERY_EVENT = 0x11,
EXECUTE_LOAD_QUERY_EVENT = 0x12,
TABLE_MAP_EVENT = 0x13,
WRITE_ROWS_EVENT_V0 = 0x14,
UPDATE_ROWS_EVENT_V0 = 0x15,
DELETE_ROWS_EVENT_V0 = 0x16,
WRITE_ROWS_EVENT_V1 = 0x17,
UPDATE_ROWS_EVENT_V1 = 0x18,
DELETE_ROWS_EVENT_V1 = 0x19,
INCIDENT_EVENT = 0x1A,
HEARTBEAT_EVENT = 0x1B,
IGNORABLE_EVENT = 0x1C,
ROWS_QUERY_EVENT = 0x1D,
WRITE_ROWS_EVENT_V2 = 0x1E,
UPDATE_ROWS_EVENT_V2 = 0x1F,
DELETE_ROWS_EVENT_V2 = 0x20,
GTID_EVENT = 0x21,
ANONYMOUS_GTID_EVENT = 0x22,
PREVIOUS_GTIDS_EVENT = 0x23
}
}

View File

@ -0,0 +1,324 @@
using System.Security.Cryptography;
using System.Text;
namespace DatabaseSnapshotsService.Services
{
public class EncryptionService
{
private readonly string _encryptionKey;
private readonly bool _encryptionEnabled;
public EncryptionService(string? encryptionKey, bool encryptionEnabled = false)
{
_encryptionEnabled = encryptionEnabled;
if (encryptionEnabled && string.IsNullOrWhiteSpace(encryptionKey))
{
throw new ArgumentException("Encryption key is required when encryption is enabled");
}
_encryptionKey = encryptionKey ?? string.Empty;
}
public bool IsEncryptionEnabled => _encryptionEnabled;
/// <summary>
/// Encrypts data using AES-256-CBC
/// </summary>
/// <param name="plaintext">Data to encrypt</param>
/// <returns>Encrypted data as base64 string</returns>
public async Task<string> EncryptAsync(string plaintext)
{
if (!_encryptionEnabled)
{
return plaintext;
}
if (string.IsNullOrEmpty(plaintext))
{
return string.Empty;
}
try
{
using var aes = Aes.Create();
aes.KeySize = 256;
aes.Mode = CipherMode.CBC;
aes.Padding = PaddingMode.PKCS7;
// Derive key from the provided encryption key
var key = DeriveKey(_encryptionKey, aes.KeySize / 8);
aes.Key = key;
// Generate random IV
aes.GenerateIV();
using var encryptor = aes.CreateEncryptor();
var plaintextBytes = Encoding.UTF8.GetBytes(plaintext);
var ciphertext = encryptor.TransformFinalBlock(plaintextBytes, 0, plaintextBytes.Length);
// Combine IV and ciphertext
var result = new byte[aes.IV.Length + ciphertext.Length];
Buffer.BlockCopy(aes.IV, 0, result, 0, aes.IV.Length);
Buffer.BlockCopy(ciphertext, 0, result, aes.IV.Length, ciphertext.Length);
return Convert.ToBase64String(result);
}
catch (Exception ex)
{
throw new InvalidOperationException($"Encryption failed: {ex.Message}", ex);
}
}
/// <summary>
/// Decrypts data using AES-256-CBC
/// </summary>
/// <param name="ciphertext">Encrypted data as base64 string</param>
/// <returns>Decrypted data</returns>
public async Task<string> DecryptAsync(string ciphertext)
{
if (!_encryptionEnabled)
{
return ciphertext;
}
if (string.IsNullOrEmpty(ciphertext))
{
return string.Empty;
}
try
{
var encryptedData = Convert.FromBase64String(ciphertext);
using var aes = Aes.Create();
aes.KeySize = 256;
aes.Mode = CipherMode.CBC;
aes.Padding = PaddingMode.PKCS7;
// Derive key from the provided encryption key
var key = DeriveKey(_encryptionKey, aes.KeySize / 8);
aes.Key = key;
// Extract IV and ciphertext
var ivSize = aes.IV.Length;
var ciphertextSize = encryptedData.Length - ivSize;
if (ciphertextSize < 0)
{
throw new ArgumentException("Invalid encrypted data format");
}
var iv = new byte[ivSize];
var ciphertextBytes = new byte[ciphertextSize];
Buffer.BlockCopy(encryptedData, 0, iv, 0, ivSize);
Buffer.BlockCopy(encryptedData, ivSize, ciphertextBytes, 0, ciphertextSize);
aes.IV = iv;
using var decryptor = aes.CreateDecryptor();
var plaintext = decryptor.TransformFinalBlock(ciphertextBytes, 0, ciphertextBytes.Length);
return Encoding.UTF8.GetString(plaintext);
}
catch (Exception ex)
{
throw new InvalidOperationException($"Decryption failed: {ex.Message}", ex);
}
}
/// <summary>
/// Encrypts a file
/// </summary>
/// <param name="sourceFilePath">Path to the source file</param>
/// <param name="destinationFilePath">Path for the encrypted file</param>
public async Task EncryptFileAsync(string sourceFilePath, string destinationFilePath)
{
if (!_encryptionEnabled)
{
// If encryption is disabled, just copy the file
File.Copy(sourceFilePath, destinationFilePath, true);
return;
}
try
{
using var sourceStream = File.OpenRead(sourceFilePath);
using var destinationStream = File.Create(destinationFilePath);
using var aes = Aes.Create();
aes.KeySize = 256;
aes.Mode = CipherMode.CBC;
aes.Padding = PaddingMode.PKCS7;
var key = DeriveKey(_encryptionKey, aes.KeySize / 8);
aes.Key = key;
aes.GenerateIV();
// Write IV to the beginning of the file
await destinationStream.WriteAsync(aes.IV);
using var encryptor = aes.CreateEncryptor();
using var cryptoStream = new CryptoStream(destinationStream, encryptor, CryptoStreamMode.Write);
await sourceStream.CopyToAsync(cryptoStream);
await cryptoStream.FlushFinalBlockAsync();
}
catch (Exception ex)
{
throw new InvalidOperationException($"File encryption failed: {ex.Message}", ex);
}
}
/// <summary>
/// Decrypts a file
/// </summary>
/// <param name="sourceFilePath">Path to the encrypted file</param>
/// <param name="destinationFilePath">Path for the decrypted file</param>
public async Task DecryptFileAsync(string sourceFilePath, string destinationFilePath)
{
if (!_encryptionEnabled)
{
// If encryption is disabled, just copy the file
File.Copy(sourceFilePath, destinationFilePath, true);
return;
}
try
{
using var sourceStream = File.OpenRead(sourceFilePath);
using var destinationStream = File.Create(destinationFilePath);
using var aes = Aes.Create();
aes.KeySize = 256;
aes.Mode = CipherMode.CBC;
aes.Padding = PaddingMode.PKCS7;
var key = DeriveKey(_encryptionKey, aes.KeySize / 8);
aes.Key = key;
// Read IV from the beginning of the file
var iv = new byte[aes.IV.Length];
await sourceStream.ReadAsync(iv);
aes.IV = iv;
using var decryptor = aes.CreateDecryptor();
using var cryptoStream = new CryptoStream(sourceStream, decryptor, CryptoStreamMode.Read);
await cryptoStream.CopyToAsync(destinationStream);
}
catch (Exception ex)
{
throw new InvalidOperationException($"File decryption failed: {ex.Message}", ex);
}
}
/// <summary>
/// Generates a secure encryption key
/// </summary>
/// <param name="keySize">Size of the key in bits (default: 256)</param>
/// <returns>Base64 encoded encryption key</returns>
public static string GenerateEncryptionKey(int keySize = 256)
{
if (keySize != 128 && keySize != 192 && keySize != 256)
{
throw new ArgumentException("Key size must be 128, 192, or 256 bits");
}
using var aes = Aes.Create();
aes.KeySize = keySize;
aes.GenerateKey();
return Convert.ToBase64String(aes.Key);
}
/// <summary>
/// Validates an encryption key
/// </summary>
/// <param name="key">The encryption key to validate</param>
/// <returns>True if the key is valid, false otherwise</returns>
public static bool ValidateEncryptionKey(string key)
{
if (string.IsNullOrWhiteSpace(key))
{
return false;
}
try
{
var keyBytes = Convert.FromBase64String(key);
return keyBytes.Length == 16 || keyBytes.Length == 24 || keyBytes.Length == 32; // 128, 192, or 256 bits
}
catch
{
return false;
}
}
/// <summary>
/// Derives a key from a password using PBKDF2
/// </summary>
/// <param name="password">The password to derive the key from</param>
/// <param name="keySize">Size of the derived key in bytes</param>
/// <returns>The derived key</returns>
private static byte[] DeriveKey(string password, int keySize)
{
// Create a deterministic salt from the password hash
using var sha256 = SHA256.Create();
var passwordHash = sha256.ComputeHash(Encoding.UTF8.GetBytes(password));
var salt = new byte[32];
Array.Copy(passwordHash, salt, Math.Min(passwordHash.Length, salt.Length));
using var pbkdf2 = new Rfc2898DeriveBytes(password, salt, 10000, HashAlgorithmName.SHA256);
return pbkdf2.GetBytes(keySize);
}
/// <summary>
/// Creates a checksum of encrypted data for integrity verification
/// </summary>
/// <param name="data">The data to create a checksum for</param>
/// <returns>SHA-256 hash of the data</returns>
public static string CreateChecksum(byte[] data)
{
using var sha256 = SHA256.Create();
var hash = sha256.ComputeHash(data);
return Convert.ToBase64String(hash);
}
/// <summary>
/// Creates a checksum of a string
/// </summary>
/// <param name="data">The string to create a checksum for</param>
/// <returns>SHA-256 hash of the string</returns>
public static string CreateChecksum(string data)
{
var bytes = Encoding.UTF8.GetBytes(data);
return CreateChecksum(bytes);
}
/// <summary>
/// Verifies the integrity of encrypted data
/// </summary>
/// <param name="data">The data to verify</param>
/// <param name="expectedChecksum">The expected checksum</param>
/// <returns>True if the checksum matches, false otherwise</returns>
public static bool VerifyChecksum(byte[] data, string expectedChecksum)
{
var actualChecksum = CreateChecksum(data);
return actualChecksum.Equals(expectedChecksum, StringComparison.Ordinal);
}
/// <summary>
/// Verifies the integrity of a string
/// </summary>
/// <param name="data">The string to verify</param>
/// <param name="expectedChecksum">The expected checksum</param>
/// <returns>True if the checksum matches, false otherwise</returns>
public static bool VerifyChecksum(string data, string expectedChecksum)
{
var actualChecksum = CreateChecksum(data);
return actualChecksum.Equals(expectedChecksum, StringComparison.Ordinal);
}
}
}

140
Services/EventStore.cs Normal file
View File

@ -0,0 +1,140 @@
using System.Text;
using System.Text.Json;
using DatabaseSnapshotsService.Models;
namespace DatabaseSnapshotsService.Services
{
public class EventStore
{
private readonly EventStoreConfig _config;
private readonly string _eventsPath;
private readonly string _indexPath;
private readonly object _writeLock = new object();
private long _currentEventId = 0;
private string _currentEventFile = string.Empty;
private StreamWriter? _currentWriter;
private long _currentFileSize = 0;
public EventStore(EventStoreConfig config)
{
_config = config;
_eventsPath = Path.GetFullPath(config.Path);
_indexPath = Path.Combine(_eventsPath, "index");
// Ensure directories exist
Directory.CreateDirectory(_eventsPath);
Directory.CreateDirectory(_indexPath);
// Load next event ID and initialize current file
LoadNextEventId();
InitializeCurrentFile();
}
public async Task<long> StoreEventAsync(DatabaseEvent evt)
{
lock (_writeLock)
{
evt.Id = ++_currentEventId;
evt.Timestamp = DateTimeOffset.UtcNow.ToUnixTimeSeconds();
evt.Checksum = CalculateEventChecksum(evt);
// Check if we need to rotate the file
if (_currentFileSize > _config.MaxFileSize || _currentWriter == null)
{
RotateEventFile();
}
// Write event to current file
var json = JsonSerializer.Serialize(evt);
_currentWriter?.WriteLine(json);
_currentWriter?.Flush();
_currentFileSize += json.Length + Environment.NewLine.Length;
// Update index
UpdateEventIndex(evt);
return evt.Id;
}
}
public async Task<long> GetLastEventIdAsync()
{
var eventFiles = Directory.GetFiles(_eventsPath, "events_*.json");
long lastId = 0;
foreach (var file in eventFiles.OrderByDescending(f => f))
{
var lines = await File.ReadAllLinesAsync(file);
if (lines.Length > 0)
{
var lastLine = lines.Last();
try
{
var lastEvent = JsonSerializer.Deserialize<DatabaseEvent>(lastLine);
if (lastEvent != null && lastEvent.Id > lastId)
{
lastId = lastEvent.Id;
}
}
catch (JsonException)
{
// Skip malformed JSON
continue;
}
}
}
return lastId;
}
private void LoadNextEventId()
{
var lastId = GetLastEventIdAsync().GetAwaiter().GetResult();
_currentEventId = lastId;
}
private void InitializeCurrentFile()
{
var timestamp = DateTimeOffset.UtcNow.ToUnixTimeSeconds();
_currentEventFile = Path.Combine(_eventsPath, $"events_{timestamp}.json");
_currentWriter = new StreamWriter(_currentEventFile, true, Encoding.UTF8);
_currentFileSize = 0;
}
private void RotateEventFile()
{
// Close current writer and delete file if empty
if (_currentWriter != null)
{
_currentWriter.Close();
if (!string.IsNullOrEmpty(_currentEventFile) && File.Exists(_currentEventFile))
{
var fileInfo = new FileInfo(_currentEventFile);
if (fileInfo.Length == 0)
{
File.Delete(_currentEventFile);
}
}
}
_currentEventFile = Path.Combine(_eventsPath, $"events_{DateTime.UtcNow:yyyyMMdd_HHmmss}_{_currentEventId}.json");
_currentWriter = new StreamWriter(_currentEventFile, append: false, Encoding.UTF8);
_currentFileSize = 0;
}
private string CalculateEventChecksum(DatabaseEvent evt)
{
var data = $"{evt.Id}{evt.Timestamp}{evt.Type}{evt.Table}{evt.Operation}{evt.Data}{evt.BinlogPosition}{evt.ServerId}";
using var sha256 = System.Security.Cryptography.SHA256.Create();
var hash = sha256.ComputeHash(Encoding.UTF8.GetBytes(data));
return Convert.ToHexString(hash).ToLower();
}
private void UpdateEventIndex(DatabaseEvent evt)
{
var indexFile = Path.Combine(_indexPath, Path.GetFileNameWithoutExtension(_currentEventFile) + ".idx");
var indexEntry = $"{evt.Id},{evt.Timestamp},{evt.Table},{evt.Operation}";
File.AppendAllText(indexFile, indexEntry + Environment.NewLine);
}
}
}

View File

@ -0,0 +1,172 @@
using EasyCompressor;
using System.Security.Cryptography;
using System.Text;
namespace DatabaseSnapshotsService.Services
{
public class OptimizedFileService
{
private const int DefaultBufferSize = 64 * 1024; // 64KB buffer
private const int LargeFileThreshold = 100 * 1024 * 1024; // 100MB
private const int ParallelThreshold = 50 * 1024 * 1024; // 50MB
private readonly LZ4Compressor _lz4 = new LZ4Compressor();
/// <summary>
/// Streaming LZ4 compression using EasyCompressor.LZ4
/// </summary>
public async Task CompressFileStreamingAsync(string sourcePath, string destinationPath, int bufferSize = DefaultBufferSize)
{
try
{
using var sourceStream = new FileStream(sourcePath, FileMode.Open, FileAccess.Read, FileShare.Read, bufferSize, FileOptions.Asynchronous);
using var destinationStream = new FileStream(destinationPath, FileMode.Create, FileAccess.Write, FileShare.None, bufferSize, FileOptions.Asynchronous);
await Task.Run(() => _lz4.Compress(sourceStream, destinationStream));
}
catch (Exception ex)
{
throw new InvalidOperationException($"Streaming LZ4 compression failed: {ex.Message}", ex);
}
}
/// <summary>
/// Streaming LZ4 decompression using EasyCompressor.LZ4
/// </summary>
public async Task DecompressFileStreamingAsync(string sourcePath, string destinationPath, int bufferSize = DefaultBufferSize)
{
try
{
using var sourceStream = new FileStream(sourcePath, FileMode.Open, FileAccess.Read, FileShare.Read, bufferSize, FileOptions.Asynchronous);
using var destinationStream = new FileStream(destinationPath, FileMode.Create, FileAccess.Write, FileShare.None, bufferSize, FileOptions.Asynchronous);
await Task.Run(() => _lz4.Decompress(sourceStream, destinationStream));
}
catch (Exception ex)
{
throw new InvalidOperationException($"Streaming LZ4 decompression failed: {ex.Message}", ex);
}
}
/// <summary>
/// Optimized checksum calculation using streaming
/// </summary>
public async Task<string> CalculateChecksumStreamingAsync(string filePath, int bufferSize = DefaultBufferSize)
{
try
{
using var sha256 = SHA256.Create();
using var stream = new FileStream(filePath, FileMode.Open, FileAccess.Read, FileShare.Read, bufferSize, FileOptions.Asynchronous);
var buffer = new byte[bufferSize];
int bytesRead;
while ((bytesRead = await stream.ReadAsync(buffer, 0, buffer.Length)) > 0)
{
sha256.TransformBlock(buffer, 0, bytesRead, null, 0);
}
sha256.TransformFinalBlock(Array.Empty<byte>(), 0, 0);
return Convert.ToBase64String(sha256.Hash!);
}
catch (Exception ex)
{
throw new InvalidOperationException($"Checksum calculation failed: {ex.Message}", ex);
}
}
/// <summary>
/// Parallel checksum calculation for large files
/// </summary>
public async Task<string> CalculateChecksumParallelAsync(string filePath)
{
try
{
var fileInfo = new FileInfo(filePath);
if (fileInfo.Length < LargeFileThreshold)
{
return await CalculateChecksumStreamingAsync(filePath);
}
// For large files, use parallel processing
using var stream = new FileStream(filePath, FileMode.Open, FileAccess.Read, FileShare.Read, DefaultBufferSize, FileOptions.Asynchronous);
var fileBytes = new byte[fileInfo.Length];
await stream.ReadAsync(fileBytes, 0, (int)fileInfo.Length);
return await Task.Run(() =>
{
using var sha256 = SHA256.Create();
return Convert.ToBase64String(sha256.ComputeHash(fileBytes));
});
}
catch (Exception ex)
{
throw new InvalidOperationException($"Parallel checksum calculation failed: {ex.Message}", ex);
}
}
/// <summary>
/// Optimized file reading with memory mapping for very large files
/// </summary>
public async Task<byte[]> ReadFileOptimizedAsync(string filePath)
{
try
{
var fileInfo = new FileInfo(filePath);
if (fileInfo.Length > LargeFileThreshold)
{
// For very large files, use streaming
using var stream = new FileStream(filePath, FileMode.Open, FileAccess.Read, FileShare.Read, DefaultBufferSize, FileOptions.Asynchronous);
using var memoryStream = new MemoryStream();
await stream.CopyToAsync(memoryStream);
return memoryStream.ToArray();
}
else
{
// For smaller files, use direct read
return await File.ReadAllBytesAsync(filePath);
}
}
catch (Exception ex)
{
throw new InvalidOperationException($"File reading failed: {ex.Message}", ex);
}
}
/// <summary>
/// Optimized file writing with buffering
/// </summary>
public async Task WriteFileOptimizedAsync(string filePath, byte[] data, int bufferSize = DefaultBufferSize)
{
try
{
using var stream = new FileStream(filePath, FileMode.Create, FileAccess.Write, FileShare.None, bufferSize, FileOptions.Asynchronous);
await stream.WriteAsync(data, 0, data.Length);
await stream.FlushAsync();
}
catch (Exception ex)
{
throw new InvalidOperationException($"File writing failed: {ex.Message}", ex);
}
}
/// <summary>
/// Get optimal buffer size based on file size
/// </summary>
public static int GetOptimalBufferSize(long fileSize)
{
if (fileSize < 1024 * 1024) // < 1MB
return 8 * 1024; // 8KB
else if (fileSize < 100 * 1024 * 1024) // < 100MB
return 64 * 1024; // 64KB
else
return 256 * 1024; // 256KB
}
/// <summary>
/// Determine if parallel processing should be used
/// </summary>
public static bool ShouldUseParallelProcessing(long fileSize)
{
return fileSize >= ParallelThreshold;
}
}
}

599
Services/RecoveryService.cs Normal file
View File

@ -0,0 +1,599 @@
using System.Text.Json;
using DatabaseSnapshotsService.Models;
using MySqlConnector;
using System.Text;
namespace DatabaseSnapshotsService.Services
{
public class RecoveryService
{
private readonly SnapshotConfiguration _config;
private readonly string _recoveryPointsPath;
private readonly string _eventsPath;
private readonly OptimizedFileService _fileService;
private readonly EncryptionService _encryptionService;
private int _nextPointId = 1;
public RecoveryService(SnapshotConfiguration config)
{
_config = config;
_recoveryPointsPath = Path.Combine(config.EventStore.Path, "recovery_points");
_eventsPath = config.EventStore.Path;
_fileService = new OptimizedFileService();
// Initialize encryption service - match SnapshotService pattern
_encryptionService = new EncryptionService(
config.Security.EncryptionKey,
config.Security.Encryption
);
// Ensure directories exist
Directory.CreateDirectory(_recoveryPointsPath);
Directory.CreateDirectory(_eventsPath);
// Load next ID from existing recovery points
LoadNextPointId();
}
public async Task<RecoveryPoint> CreateRecoveryPointAsync(string name, string? description = null)
{
// Check if name already exists
if (await GetRecoveryPointAsync(name) != null)
{
throw new ArgumentException($"Recovery point '{name}' already exists");
}
var point = new RecoveryPoint
{
Id = _nextPointId++,
Name = name,
Timestamp = DateTimeOffset.UtcNow.ToUnixTimeSeconds(),
Description = description,
CreatedAt = DateTime.UtcNow,
EventCount = await GetTotalEventCountAsync(),
LastEventId = await GetLastEventIdAsync()
};
// Save recovery point
await SaveRecoveryPointAsync(point);
return point;
}
public async Task<List<RecoveryPoint>> ListRecoveryPointsAsync()
{
var points = new List<RecoveryPoint>();
var pointFiles = Directory.GetFiles(_recoveryPointsPath, "*.json");
foreach (var file in pointFiles)
{
try
{
var jsonBytes = await _fileService.ReadFileOptimizedAsync(file);
var json = Encoding.UTF8.GetString(jsonBytes);
var point = JsonSerializer.Deserialize<RecoveryPoint>(json);
if (point != null)
{
points.Add(point);
}
}
catch (Exception ex)
{
Console.WriteLine($"Warning: Could not load recovery point from {file}: {ex.Message}");
}
}
return points.OrderByDescending(p => p.CreatedAt).ToList();
}
public async Task<RecoveryPoint?> GetRecoveryPointAsync(string name)
{
var pointFiles = Directory.GetFiles(_recoveryPointsPath, "*.json");
foreach (var file in pointFiles)
{
try
{
var jsonBytes = await _fileService.ReadFileOptimizedAsync(file);
var json = Encoding.UTF8.GetString(jsonBytes);
var point = JsonSerializer.Deserialize<RecoveryPoint>(json);
if (point?.Name == name)
{
return point;
}
}
catch (Exception ex)
{
Console.WriteLine($"Warning: Could not load recovery point from {file}: {ex.Message}");
}
}
return null;
}
public async Task<RestorePreview> PreviewRestoreAsync(long timestamp)
{
var preview = new RestorePreview
{
TargetTimestamp = timestamp,
EventCount = 0,
AffectedTables = new List<string>(),
EstimatedDuration = TimeSpan.Zero,
Warnings = new List<string>()
};
// Find the closest snapshot before the target timestamp
var snapshotService = new SnapshotService(_config);
var snapshots = await snapshotService.ListSnapshotsAsync();
var closestSnapshot = snapshots
.Where(s => s.Timestamp <= timestamp)
.OrderByDescending(s => s.Timestamp)
.FirstOrDefault();
if (closestSnapshot != null)
{
preview.SnapshotId = closestSnapshot.Id;
preview.Warnings.Add($"Will use snapshot {closestSnapshot.Id} as base");
}
else
{
preview.Warnings.Add("No suitable snapshot found - will restore from scratch");
}
// Count events that would be applied
var events = await GetEventsInRangeAsync(closestSnapshot?.Timestamp ?? 0, timestamp);
preview.EventCount = events.Count;
// Get affected tables
preview.AffectedTables = events
.Select(e => e.Table)
.Distinct()
.ToList();
// Estimate duration (rough calculation)
preview.EstimatedDuration = TimeSpan.FromSeconds(events.Count * 0.001); // 1ms per event
return preview;
}
public async Task RestoreAsync(long timestamp)
{
try
{
Console.WriteLine("=== PERFORMING ACTUAL RECOVERY ===");
Console.WriteLine("This will modify the target database!");
Console.WriteLine($"Starting restore to timestamp {timestamp}...");
// Find the target snapshot and build restore chain
var (targetSnapshot, restoreChain) = await BuildRestoreChainAsync(timestamp);
if (targetSnapshot == null)
{
throw new Exception($"No snapshot found for timestamp {timestamp}");
}
Console.WriteLine($"Target snapshot: {targetSnapshot.Id} ({targetSnapshot.Type})");
Console.WriteLine($"Restore chain: {restoreChain.Count} snapshots");
// Restore the full snapshot (first in chain)
var fullSnapshot = restoreChain.First();
Console.WriteLine($"Restoring full snapshot {fullSnapshot.Id}...");
await RestoreFromSnapshotAsync(fullSnapshot);
// Apply incremental snapshots in order
var incrementals = restoreChain.Skip(1).ToList();
if (incrementals.Any())
{
Console.WriteLine($"Applying {incrementals.Count} incremental snapshots...");
foreach (var incremental in incrementals)
{
Console.WriteLine($"Applying incremental snapshot {incremental.Id}...");
await ApplyIncrementalSnapshotAsync(incremental);
}
}
Console.WriteLine("Validating restore...");
await ValidateRestoreAsync();
Console.WriteLine("Database validation passed");
Console.WriteLine("Restore completed successfully");
}
catch (Exception ex)
{
Console.WriteLine($"Restore failed: {ex.Message}");
throw;
}
}
private async Task<(SnapshotInfo? TargetSnapshot, List<SnapshotInfo> RestoreChain)> BuildRestoreChainAsync(long timestamp)
{
var snapshotService = new SnapshotService(_config);
var snapshots = await snapshotService.ListSnapshotsAsync();
// Find the target snapshot (closest to timestamp)
var targetSnapshot = snapshots
.Where(s => s.Timestamp <= timestamp)
.OrderByDescending(s => s.Timestamp)
.FirstOrDefault();
if (targetSnapshot == null)
return (null, new List<SnapshotInfo>());
// Build restore chain: full snapshot + all incrementals up to target
var restoreChain = new List<SnapshotInfo>();
if (targetSnapshot.Type.Equals("Full", StringComparison.OrdinalIgnoreCase))
{
// Target is a full snapshot, just restore it
restoreChain.Add(targetSnapshot);
}
else
{
// Target is incremental, need to find the full snapshot and all incrementals
var current = targetSnapshot;
var chain = new List<SnapshotInfo>();
// Walk backwards to find the full snapshot
while (current != null)
{
chain.Insert(0, current); // Add to front to maintain order
if (current.Type.Equals("Full", StringComparison.OrdinalIgnoreCase))
break;
// Find parent snapshot
current = snapshots.FirstOrDefault(s => s.Id == current.ParentSnapshotId);
}
restoreChain = chain;
}
return (targetSnapshot, restoreChain);
}
private async Task ApplyIncrementalSnapshotAsync(SnapshotInfo incremental)
{
Console.WriteLine($"Applying incremental snapshot {incremental.Id}...");
if (!File.Exists(incremental.FilePath))
{
throw new FileNotFoundException($"Incremental snapshot file not found: {incremental.FilePath}");
}
// Read and decompress/decrypt the snapshot file
var sqlContent = await ReadSnapshotFileAsync(incremental.FilePath);
// Extract connection details from configuration
var connectionString = _config.ConnectionString;
var server = ExtractValue(connectionString, "Server") ?? "localhost";
var port = ExtractValue(connectionString, "Port") ?? "3306";
var database = ExtractValue(connectionString, "Database") ?? "trading_platform";
var userId = ExtractValue(connectionString, "Uid") ?? "root";
var password = ExtractValue(connectionString, "Pwd") ?? "";
// Build mysql command arguments
var mysqlArgs = $"-h{server} -P{port} -u{userId}";
if (!string.IsNullOrEmpty(password))
{
mysqlArgs += $" -p{password}";
}
mysqlArgs += $" {database}";
// Apply the SQL content using mysql via stdin
var startInfo = new System.Diagnostics.ProcessStartInfo
{
FileName = "mysql",
Arguments = mysqlArgs,
RedirectStandardInput = true,
RedirectStandardOutput = true,
RedirectStandardError = true,
UseShellExecute = false,
CreateNoWindow = true
};
using var process = System.Diagnostics.Process.Start(startInfo);
if (process != null)
{
// Write the SQL content to mysql stdin
await process.StandardInput.WriteAsync(sqlContent);
await process.StandardInput.FlushAsync();
process.StandardInput.Close();
string stdOut = await process.StandardOutput.ReadToEndAsync();
string stdErr = await process.StandardError.ReadToEndAsync();
await process.WaitForExitAsync();
if (process.ExitCode != 0)
{
Console.WriteLine($"[mysql stdout] {stdOut}");
Console.WriteLine($"[mysql stderr] {stdErr}");
throw new Exception($"mysql failed with exit code {process.ExitCode}");
}
}
}
private async Task<long> GetTotalEventCountAsync()
{
var eventFiles = Directory.GetFiles(_eventsPath, "events_*.json");
long totalCount = 0;
foreach (var file in eventFiles)
{
try
{
var lines = await File.ReadAllLinesAsync(file);
totalCount += lines.Length;
}
catch (Exception ex)
{
Console.WriteLine($"Warning: Could not read event file {file}: {ex.Message}");
}
}
return totalCount;
}
private async Task<long> GetLastEventIdAsync()
{
var eventFiles = Directory.GetFiles(_eventsPath, "events_*.json");
long lastId = 0;
foreach (var file in eventFiles.OrderByDescending(f => f))
{
try
{
var lines = await File.ReadAllLinesAsync(file);
if (lines.Length > 0)
{
var lastLine = lines.Last();
var lastEvent = JsonSerializer.Deserialize<DatabaseEvent>(lastLine);
if (lastEvent != null && lastEvent.Id > lastId)
{
lastId = lastEvent.Id;
}
}
}
catch (Exception ex)
{
Console.WriteLine($"Warning: Could not read event file {file}: {ex.Message}");
}
}
return lastId;
}
private async Task<List<DatabaseEvent>> GetEventsInRangeAsync(long fromTimestamp, long toTimestamp)
{
var events = new List<DatabaseEvent>();
var eventFiles = Directory.GetFiles(_eventsPath, "events_*.json");
foreach (var file in eventFiles)
{
try
{
var lines = await File.ReadAllLinesAsync(file);
foreach (var line in lines)
{
var evt = JsonSerializer.Deserialize<DatabaseEvent>(line);
if (evt != null && evt.Timestamp >= fromTimestamp && evt.Timestamp <= toTimestamp)
{
events.Add(evt);
}
}
}
catch (Exception ex)
{
Console.WriteLine($"Warning: Could not read event file {file}: {ex.Message}");
}
}
return events.OrderBy(e => e.Timestamp).ToList();
}
private async Task RestoreFromSnapshotAsync(SnapshotInfo snapshot)
{
Console.WriteLine($"Restoring database from snapshot {snapshot.Id}...");
if (!File.Exists(snapshot.FilePath))
{
throw new FileNotFoundException($"Snapshot file not found: {snapshot.FilePath}");
}
// Use programmatic restoration (handles encryption/compression better)
await RestoreProgrammaticallyAsync(snapshot);
}
private async Task RestoreProgrammaticallyAsync(SnapshotInfo snapshot)
{
// Read and decompress the snapshot file
var sqlContent = await ReadSnapshotFileAsync(snapshot.FilePath);
// Create a temporary file with the SQL content
var tempFile = Path.GetTempFileName();
await File.WriteAllTextAsync(tempFile, sqlContent);
try
{
// Extract connection details from configuration
var connectionString = _config.ConnectionString;
var server = ExtractValue(connectionString, "Server") ?? "localhost";
var port = ExtractValue(connectionString, "Port") ?? "3306";
var database = ExtractValue(connectionString, "Database") ?? "trading_platform";
var userId = ExtractValue(connectionString, "Uid") ?? "root";
var password = ExtractValue(connectionString, "Pwd") ?? "";
// Build mysql command arguments
var mysqlArgs = $"-h{server} -P{port} -u{userId}";
if (!string.IsNullOrEmpty(password))
{
mysqlArgs += $" -p{password}";
}
mysqlArgs += $" {database}";
// Use mysql command to restore the database
var startInfo = new System.Diagnostics.ProcessStartInfo
{
FileName = "mysql",
Arguments = mysqlArgs,
RedirectStandardInput = true,
RedirectStandardOutput = true,
RedirectStandardError = true,
UseShellExecute = false,
CreateNoWindow = true
};
using var process = new System.Diagnostics.Process { StartInfo = startInfo };
process.ErrorDataReceived += (sender, e) =>
{
if (!string.IsNullOrEmpty(e.Data))
{
Console.WriteLine($"[mysql restore] {e.Data}");
}
};
process.Start();
process.BeginErrorReadLine();
// Send the SQL content to mysql via stdin
using var writer = process.StandardInput;
await writer.WriteAsync(sqlContent);
await writer.FlushAsync();
writer.Close();
await process.WaitForExitAsync();
if (process.ExitCode != 0)
{
throw new Exception($"mysql restore failed with exit code {process.ExitCode}");
}
Console.WriteLine("Database restore completed successfully using mysql command");
}
finally
{
// Clean up temporary file
if (File.Exists(tempFile))
{
File.Delete(tempFile);
}
}
}
private async Task<string> ReadSnapshotFileAsync(string filePath)
{
try
{
// Check if file is encrypted and compressed
if (filePath.EndsWith(".lz4.enc"))
{
// First decrypt, then decompress
var decryptedPath = filePath.Replace(".lz4.enc", ".lz4.tmp");
var decompressedPath = filePath.Replace(".lz4.enc", ".sql.tmp");
try
{
// Decrypt the file using the instance field
await _encryptionService.DecryptFileAsync(filePath, decryptedPath);
// Decompress the decrypted file
await _fileService.DecompressFileStreamingAsync(decryptedPath, decompressedPath);
// Read the final SQL content
var content = await _fileService.ReadFileOptimizedAsync(decompressedPath);
return Encoding.UTF8.GetString(content);
}
finally
{
// Clean up temporary files
if (File.Exists(decryptedPath)) File.Delete(decryptedPath);
if (File.Exists(decompressedPath)) File.Delete(decompressedPath);
}
}
else if (filePath.EndsWith(".lz4"))
{
// Only compressed, not encrypted
var tempPath = filePath.Replace(".lz4", ".tmp");
await _fileService.DecompressFileStreamingAsync(filePath, tempPath);
var content = await _fileService.ReadFileOptimizedAsync(tempPath);
File.Delete(tempPath); // Clean up temp file
return Encoding.UTF8.GetString(content);
}
else if (filePath.EndsWith(".enc"))
{
// Only encrypted, not compressed
var tempPath = filePath.Replace(".enc", ".tmp");
await _encryptionService.DecryptFileAsync(filePath, tempPath);
var content = await _fileService.ReadFileOptimizedAsync(tempPath);
File.Delete(tempPath); // Clean up temp file
return Encoding.UTF8.GetString(content);
}
else
{
// Plain text file
var content = await _fileService.ReadFileOptimizedAsync(filePath);
return Encoding.UTF8.GetString(content);
}
}
catch (Exception ex)
{
throw new InvalidOperationException($"Failed to read snapshot file {filePath}: {ex.Message}", ex);
}
}
private async Task ValidateRestoreAsync()
{
// Basic validation - check if database is accessible and has expected data
using var connection = new MySqlConnection(_config.ConnectionString);
await connection.OpenAsync();
// Check if we can query the database
using var command = new MySqlCommand("SELECT COUNT(*) FROM information_schema.tables WHERE table_schema = DATABASE()", connection);
var tableCount = await command.ExecuteScalarAsync();
if (Convert.ToInt32(tableCount) == 0)
{
throw new Exception("Database validation failed: No tables found after restore");
}
}
private async Task SaveRecoveryPointAsync(RecoveryPoint point)
{
var pointFile = Path.Combine(_recoveryPointsPath, $"{point.Name}.json");
var json = JsonSerializer.Serialize(point, new JsonSerializerOptions { WriteIndented = true });
var jsonBytes = Encoding.UTF8.GetBytes(json);
await _fileService.WriteFileOptimizedAsync(pointFile, jsonBytes);
}
private void LoadNextPointId()
{
var pointFiles = Directory.GetFiles(_recoveryPointsPath, "*.json");
if (pointFiles.Length > 0)
{
var maxId = pointFiles
.Select(f => Path.GetFileNameWithoutExtension(f))
.Where(name => int.TryParse(name, out _))
.Select(int.Parse)
.DefaultIfEmpty(0)
.Max();
_nextPointId = maxId + 1;
}
}
private string? ExtractValue(string connectionString, string key)
{
var pairs = connectionString.Split(';');
foreach (var pair in pairs)
{
var keyValue = pair.Split('=');
if (keyValue.Length == 2 && keyValue[0].Trim().Equals(key, StringComparison.OrdinalIgnoreCase))
{
return keyValue[1].Trim();
}
}
return null;
}
}
}

633
Services/SnapshotService.cs Normal file
View File

@ -0,0 +1,633 @@
using System.ComponentModel.DataAnnotations;
using System.IO.Compression;
using System.Security.Cryptography;
using System.Text;
using System.Text.Json;
using DatabaseSnapshotsService.Models;
using MySqlConnector;
using DatabaseSnapshotsService.Services;
namespace DatabaseSnapshotsService.Services
{
public class SnapshotService
{
private readonly SnapshotConfiguration _config;
private readonly string _snapshotsPath;
private readonly string _metadataPath;
private readonly EncryptionService _encryptionService;
private readonly OptimizedFileService _fileService;
private int _nextId = 1;
public SnapshotService(SnapshotConfiguration config)
{
_config = config;
_snapshotsPath = Path.GetFullPath(config.SnapshotStorage.Path);
_metadataPath = Path.Combine(_snapshotsPath, "metadata");
// Initialize encryption service - match RecoveryService pattern
_encryptionService = new EncryptionService(
config.Security.EncryptionKey,
config.Security.Encryption
);
_fileService = new OptimizedFileService();
// Ensure directories exist
Directory.CreateDirectory(_snapshotsPath);
Directory.CreateDirectory(_metadataPath);
// Load next ID from existing snapshots
LoadNextId();
}
public async Task<SnapshotInfo> CreateSnapshotAsync(string name, string? description = null, int? userId = null)
{
// Validate and sanitize inputs
var nameValidation = InputValidation.SnapshotValidation.ValidateSnapshotName(name);
if (nameValidation != ValidationResult.Success)
{
throw new ArgumentException($"Invalid snapshot name: {nameValidation.ErrorMessage}");
}
var sanitizedName = InputValidation.SanitizeString(name);
var sanitizedDescription = InputValidation.SanitizeString(description ?? string.Empty);
if (!string.IsNullOrEmpty(sanitizedDescription))
{
var descriptionValidation = InputValidation.SnapshotValidation.ValidateSnapshotDescription(sanitizedDescription);
if (descriptionValidation != ValidationResult.Success)
{
throw new ArgumentException($"Invalid snapshot description: {descriptionValidation.ErrorMessage}");
}
}
var snapshot = new SnapshotInfo
{
Id = _nextId++,
Timestamp = DateTimeOffset.UtcNow.ToUnixTimeSeconds(),
Status = SnapshotStatus.Creating.ToString(),
Description = string.IsNullOrEmpty(sanitizedDescription) ? sanitizedName : sanitizedDescription,
UserId = userId,
CreatedAt = DateTime.UtcNow
};
try
{
await CreateFullSnapshotAsync(snapshot);
await SaveSnapshotMetadataAsync(snapshot);
snapshot.Status = SnapshotStatus.Completed.ToString();
await SaveSnapshotMetadataAsync(snapshot);
return snapshot;
}
catch (Exception)
{
snapshot.Status = SnapshotStatus.Failed.ToString();
await SaveSnapshotMetadataAsync(snapshot);
throw;
}
}
public async Task<List<SnapshotInfo>> ListSnapshotsAsync(string? type = null, int limit = 50)
{
var snapshots = new List<SnapshotInfo>();
var metadataFiles = Directory.GetFiles(_metadataPath, "*.json");
foreach (var file in metadataFiles)
{
try
{
var json = await File.ReadAllTextAsync(file);
var snapshot = JsonSerializer.Deserialize<SnapshotInfo>(json);
if (snapshot != null && (string.IsNullOrEmpty(type) || snapshot.Type.Equals(type, StringComparison.OrdinalIgnoreCase)))
{
snapshots.Add(snapshot);
}
}
catch (Exception ex)
{
Console.WriteLine($"Warning: Could not load snapshot metadata from {file}: {ex.Message}");
}
}
return snapshots.OrderByDescending(s => s.CreatedAt).Take(limit).ToList();
}
public async Task<SnapshotInfo?> GetSnapshotAsync(int id)
{
var metadataFile = Path.Combine(_metadataPath, $"{id}.json");
if (!File.Exists(metadataFile))
return null;
try
{
var json = await File.ReadAllTextAsync(metadataFile);
return JsonSerializer.Deserialize<SnapshotInfo>(json);
}
catch (Exception ex)
{
Console.WriteLine($"Error loading snapshot {id}: {ex.Message}");
return null;
}
}
public async Task DeleteSnapshotAsync(int id)
{
var snapshot = await GetSnapshotAsync(id);
if (snapshot == null)
throw new ArgumentException($"Snapshot {id} not found");
// Delete snapshot file
if (File.Exists(snapshot.FilePath))
{
File.Delete(snapshot.FilePath);
}
// Delete metadata
var metadataFile = Path.Combine(_metadataPath, $"{id}.json");
if (File.Exists(metadataFile))
{
File.Delete(metadataFile);
}
}
private async Task CreateFullSnapshotAsync(SnapshotInfo snapshot)
{
var fileName = $"snapshot_{snapshot.Id}_{snapshot.Timestamp}.sql";
var filePath = Path.Combine(_snapshotsPath, fileName);
snapshot.FilePath = filePath;
// Get current binlog status before creating snapshot
var binlogStatus = await GetCurrentBinlogStatusAsync();
snapshot.BinlogFile = binlogStatus.File;
snapshot.BinlogPosition = binlogStatus.Position;
snapshot.Type = "Full";
// Extract connection details from configuration
var connectionString = _config.ConnectionString;
var server = ExtractValue(connectionString, "Server") ?? "localhost";
var port = ExtractValue(connectionString, "Port") ?? "3306";
var database = ExtractValue(connectionString, "Database") ?? "trading_platform";
var dbUserId = ExtractValue(connectionString, "Uid") ?? "root";
var password = ExtractValue(connectionString, "Pwd") ?? "";
// Build mariadb-dump command arguments with optimizations
var dumpArgs = $"-h{server} -P{port} -u{dbUserId}";
if (!string.IsNullOrEmpty(password))
{
dumpArgs += $" -p{password}";
}
// Add performance and consistency optimizations based on configuration
var optimizations = _config.SnapshotStorage.DumpOptimizations;
if (optimizations.SingleTransaction)
{
dumpArgs += $" --single-transaction";
}
if (optimizations.IncludeRoutines)
{
dumpArgs += $" --routines";
}
if (optimizations.IncludeTriggers)
{
dumpArgs += $" --triggers";
}
if (optimizations.IncludeEvents)
{
dumpArgs += $" --events";
}
if (optimizations.Quick)
{
dumpArgs += $" --quick";
}
if (optimizations.OrderByPrimary)
{
dumpArgs += $" --order-by-primary";
}
if (optimizations.FlushLogs)
{
dumpArgs += $" --flush-logs";
}
if (optimizations.MasterData > 0)
{
dumpArgs += $" --master-data={optimizations.MasterData}";
}
if (optimizations.Compact)
{
dumpArgs += $" --compact";
}
if (optimizations.NoAutocommit)
{
dumpArgs += $" --no-autocommit";
}
if (optimizations.LockTables)
{
dumpArgs += $" --lock-tables";
} else {
dumpArgs += $" --skip-lock-tables";
}
dumpArgs += $" --add-drop-database";
dumpArgs += $" --add-drop-table";
dumpArgs += $" --create-options";
if (optimizations.ExtendedInsert)
{
dumpArgs += $" --extended-insert";
}
if (optimizations.CompleteInsert)
{
dumpArgs += $" --complete-insert";
}
if (optimizations.HexBlob)
{
dumpArgs += $" --hex-blob";
}
dumpArgs += $" --net_buffer_length={optimizations.NetBufferLength}";
dumpArgs += $" --max_allowed_packet={optimizations.MaxAllowedPacket}";
if (optimizations.ExcludeTables.Any())
{
foreach (var table in optimizations.ExcludeTables)
{
dumpArgs += $" --ignore-table={database}.{table}";
}
}
if (optimizations.IncludeTables.Any())
{
dumpArgs += $" --tables";
foreach (var table in optimizations.IncludeTables)
{
dumpArgs += $" {table}";
}
dumpArgs += $" {database}";
}
else
{
dumpArgs += $" --databases {database}";
}
// Use mariadb-dump to create a complete database dump
var startInfo = new System.Diagnostics.ProcessStartInfo
{
FileName = "mariadb-dump",
Arguments = dumpArgs,
RedirectStandardOutput = true,
RedirectStandardError = true,
UseShellExecute = false,
CreateNoWindow = true
};
using var process = new System.Diagnostics.Process { StartInfo = startInfo };
using var outputFile = new StreamWriter(filePath);
var outputComplete = new TaskCompletionSource<bool>();
var errorComplete = new TaskCompletionSource<bool>();
var errorMessages = new List<string>();
var startTime = DateTime.UtcNow;
Console.WriteLine($"Starting mariadb-dump with optimized settings...");
Console.WriteLine($"Command: mariadb-dump {dumpArgs}");
process.OutputDataReceived += (sender, e) =>
{
if (e.Data == null)
{
outputComplete.SetResult(true);
}
else
{
outputFile.WriteLine(e.Data);
outputFile.Flush(); // Ensure data is written immediately
// Report progress for large dumps
if (e.Data.StartsWith("-- Dump completed"))
{
var duration = DateTime.UtcNow - startTime;
Console.WriteLine($"Dump completed in {duration.TotalSeconds:F1} seconds");
}
}
};
process.ErrorDataReceived += (sender, e) =>
{
if (e.Data == null)
{
errorComplete.SetResult(true);
}
else
{
errorMessages.Add(e.Data);
Console.WriteLine($"[mariadb-dump] {e.Data}");
}
};
process.Start();
process.BeginOutputReadLine();
process.BeginErrorReadLine();
// Wait for both output and error streams to complete
await Task.WhenAll(outputComplete.Task, errorComplete.Task);
await process.WaitForExitAsync();
if (process.ExitCode != 0)
{
var errorSummary = string.Join("; ", errorMessages.Take(5)); // Show first 5 errors
throw new Exception($"mariadb-dump failed with exit code {process.ExitCode}. Errors: {errorSummary}");
}
var totalDuration = DateTime.UtcNow - startTime;
Console.WriteLine($"Database dump completed successfully in {totalDuration.TotalSeconds:F1} seconds");
// Calculate checksum
snapshot.Checksum = await CalculateFileChecksumAsync(filePath);
snapshot.DataSize = new FileInfo(filePath).Length;
// Compress if enabled
if (_config.SnapshotStorage.Compression)
{
await CompressFileAsync(filePath);
snapshot.DataSize = new FileInfo(filePath + ".lz4").Length;
snapshot.FilePath = filePath + ".lz4";
}
// Encrypt if enabled
if (_config.Security.Encryption && !string.IsNullOrEmpty(_config.Security.EncryptionKey))
{
var originalFilePath = snapshot.FilePath;
await EncryptFileAsync(snapshot.FilePath, _config.Security.EncryptionKey);
// Update the file path to point to the encrypted file
snapshot.FilePath = originalFilePath + ".enc";
// Update the file size to reflect the encrypted file size
snapshot.DataSize = new FileInfo(snapshot.FilePath).Length;
}
}
private string? ExtractValue(string connectionString, string key)
{
var pairs = connectionString.Split(';');
foreach (var pair in pairs)
{
var keyValue = pair.Split('=');
if (keyValue.Length == 2 && keyValue[0].Trim().Equals(key, StringComparison.OrdinalIgnoreCase))
{
return keyValue[1].Trim();
}
}
return null;
}
private async Task<(string File, long Position)> GetCurrentBinlogStatusAsync()
{
using var connection = new MySqlConnection(_config.ConnectionString);
await connection.OpenAsync();
using var command = new MySqlCommand("SHOW MASTER STATUS", connection);
using var reader = await command.ExecuteReaderAsync();
if (await reader.ReadAsync())
{
var file = reader.GetString("File");
var position = reader.GetInt64("Position");
return (file, position);
}
throw new Exception("Could not get current binlog status");
}
private async Task<string> CalculateFileChecksumAsync(string filePath)
{
try
{
var fileInfo = new FileInfo(filePath);
// Use optimized checksum calculation based on file size
if (OptimizedFileService.ShouldUseParallelProcessing(fileInfo.Length))
{
return await _fileService.CalculateChecksumParallelAsync(filePath);
}
else
{
return await _fileService.CalculateChecksumStreamingAsync(filePath);
}
}
catch (Exception ex)
{
throw new InvalidOperationException($"Failed to calculate file checksum: {ex.Message}", ex);
}
}
private async Task CompressFileAsync(string filePath)
{
try
{
var fileInfo = new FileInfo(filePath);
var compressedPath = filePath + ".lz4";
// Use optimized LZ4 compression based on file size
if (OptimizedFileService.ShouldUseParallelProcessing(fileInfo.Length))
{
await _fileService.CompressFileStreamingAsync(filePath, compressedPath);
}
else
{
var bufferSize = OptimizedFileService.GetOptimalBufferSize(fileInfo.Length);
await _fileService.CompressFileStreamingAsync(filePath, compressedPath, bufferSize);
}
// Delete the original uncompressed file
File.Delete(filePath);
}
catch (Exception ex)
{
throw new InvalidOperationException($"File compression failed: {ex.Message}", ex);
}
}
private async Task EncryptFileAsync(string filePath, string key)
{
try
{
// Use the encryption service for file encryption
var encryptedFilePath = filePath + ".enc";
await _encryptionService.EncryptFileAsync(filePath, encryptedFilePath);
// Delete the original file and update the path
File.Delete(filePath);
// Update the snapshot file path to point to the encrypted file
// This will be handled by the calling method
}
catch (Exception ex)
{
throw new InvalidOperationException($"File encryption failed: {ex.Message}", ex);
}
}
private async Task SaveSnapshotMetadataAsync(SnapshotInfo snapshot)
{
try
{
var metadataFile = Path.Combine(_metadataPath, $"{snapshot.Id}.json");
var json = JsonSerializer.Serialize(snapshot, new JsonSerializerOptions { WriteIndented = true });
var jsonBytes = Encoding.UTF8.GetBytes(json);
// Use optimized file writing
await _fileService.WriteFileOptimizedAsync(metadataFile, jsonBytes);
}
catch (Exception ex)
{
throw new InvalidOperationException($"Failed to save snapshot metadata: {ex.Message}", ex);
}
}
private void LoadNextId()
{
var metadataFiles = Directory.GetFiles(_metadataPath, "*.json");
if (metadataFiles.Length > 0)
{
var maxId = metadataFiles
.Select(f => Path.GetFileNameWithoutExtension(f))
.Where(name => int.TryParse(name, out _))
.Select(int.Parse)
.Max();
_nextId = maxId + 1;
}
}
public async Task<SnapshotInfo> CreateIncrementalSnapshotAsync(string name, string? description = null, int? userId = null)
{
// Find the last snapshot (full or incremental)
var snapshots = await ListSnapshotsAsync();
var lastSnapshot = snapshots.OrderByDescending(s => s.CreatedAt).FirstOrDefault();
if (lastSnapshot == null)
{
throw new Exception("No previous snapshot found. Create a full snapshot first.");
}
// Get the binlog position from the last snapshot
string? startBinlogFile;
long? startBinlogPosition;
if (lastSnapshot.Type.Equals("Full", StringComparison.OrdinalIgnoreCase))
{
startBinlogFile = lastSnapshot.BinlogFile;
startBinlogPosition = lastSnapshot.BinlogPosition;
}
else
{
// For incremental snapshots, use the end position as the start for the next incremental
startBinlogFile = lastSnapshot.IncrementalBinlogEndFile;
startBinlogPosition = lastSnapshot.IncrementalBinlogEndPosition;
}
if (string.IsNullOrEmpty(startBinlogFile) || startBinlogPosition == null)
{
throw new Exception("No previous snapshot with binlog info found. Create a full snapshot first.");
}
// Get current binlog status
var (endFile, endPos) = await GetCurrentBinlogStatusAsync();
// Prepare file paths
var snapshot = new SnapshotInfo
{
Id = _nextId++,
Timestamp = DateTimeOffset.UtcNow.ToUnixTimeSeconds(),
Status = SnapshotStatus.Creating.ToString(),
Description = description ?? name,
UserId = userId,
CreatedAt = DateTime.UtcNow,
Type = "Incremental",
ParentSnapshotId = lastSnapshot.Id,
IncrementalBinlogStartFile = startBinlogFile,
IncrementalBinlogStartPosition = startBinlogPosition,
IncrementalBinlogEndFile = endFile,
IncrementalBinlogEndPosition = endPos
};
var fileName = $"inc_{snapshot.Id}_{snapshot.Timestamp}.binlog";
var filePath = Path.Combine(_snapshotsPath, fileName);
snapshot.FilePath = filePath;
// Use mysqlbinlog to extract the binlog segment (no --raw, redirect output to file)
// Extract connection details from configuration
var connectionString = _config.ConnectionString;
var server = ExtractValue(connectionString, "Server") ?? "localhost";
var port = ExtractValue(connectionString, "Port") ?? "3306";
var database = ExtractValue(connectionString, "Database") ?? "trading_platform";
var dbUserId = ExtractValue(connectionString, "Uid") ?? "root";
var password = ExtractValue(connectionString, "Pwd") ?? "";
var args = $"--read-from-remote-server --host={server} --port={port} --user={dbUserId}";
if (!string.IsNullOrEmpty(password))
{
args += $" --password={password}";
}
args += $" --start-position={startBinlogPosition} --stop-position={endPos} {startBinlogFile}";
if (startBinlogFile != endFile)
{
// If binlog rotated, need to handle multiple files (not implemented here for brevity)
throw new NotImplementedException("Incremental snapshot across multiple binlog files is not yet supported.");
}
var startInfo = new System.Diagnostics.ProcessStartInfo
{
FileName = "mysqlbinlog",
Arguments = args,
RedirectStandardOutput = true,
RedirectStandardError = true,
UseShellExecute = false,
CreateNoWindow = true
};
using var process = new System.Diagnostics.Process { StartInfo = startInfo };
using var outputFile = new StreamWriter(filePath);
var error = new StringBuilder();
process.ErrorDataReceived += (sender, e) => { if (e.Data != null) error.AppendLine(e.Data); };
process.Start();
process.BeginErrorReadLine();
// Write stdout to file
while (!process.StandardOutput.EndOfStream)
{
var line = await process.StandardOutput.ReadLineAsync();
if (line != null)
await outputFile.WriteLineAsync(line);
}
await process.WaitForExitAsync();
outputFile.Close();
if (process.ExitCode != 0)
{
throw new Exception($"mysqlbinlog failed: {error}");
}
// Calculate checksum and size
snapshot.Checksum = await CalculateFileChecksumAsync(filePath);
snapshot.DataSize = new FileInfo(filePath).Length;
// Compress if enabled
if (_config.SnapshotStorage.Compression)
{
await CompressFileAsync(filePath);
snapshot.DataSize = new FileInfo(filePath + ".lz4").Length;
snapshot.FilePath = filePath + ".lz4";
}
// Encrypt if enabled
if (_config.Security.Encryption && !string.IsNullOrEmpty(_config.Security.EncryptionKey))
{
var originalFilePath = snapshot.FilePath;
await EncryptFileAsync(snapshot.FilePath, _config.Security.EncryptionKey);
// Update the file path to point to the encrypted file
snapshot.FilePath = originalFilePath + ".enc";
// Update the file size to reflect the encrypted file size
snapshot.DataSize = new FileInfo(snapshot.FilePath).Length;
}
// Save metadata
await SaveSnapshotMetadataAsync(snapshot);
snapshot.Status = SnapshotStatus.Completed.ToString();
await SaveSnapshotMetadataAsync(snapshot);
return snapshot;
}
}
}

49
appsettings.test.json Normal file
View File

@ -0,0 +1,49 @@
{
"connectionString": "Server=localhost;Port=3306;Database=trading_platform;Uid=root;Pwd=1;",
"binlogReader": {
"host": "localhost",
"port": 3306,
"username": "root",
"password": "1",
"serverId": 999,
"startPosition": 4,
"heartbeatInterval": 30
},
"snapshotStorage": {
"path": "./snapshots_test",
"compression": true,
"retentionDays": 7,
"maxFileSize": 104857600,
"dumpOptimizations": {
"singleTransaction": true,
"includeRoutines": true,
"includeTriggers": true,
"includeEvents": true,
"extendedInsert": true,
"completeInsert": true,
"hexBlob": true,
"netBufferLength": 16384,
"maxAllowedPacket": "1G",
"excludeTables": [],
"includeTables": [],
"quick": true,
"orderByPrimary": true,
"flushLogs": true,
"masterData": 2,
"compact": false,
"noAutocommit": false,
"lockTables": false
}
},
"eventStore": {
"path": "./events",
"maxFileSize": 52428800,
"retentionDays": 90,
"batchSize": 1000,
"flushInterval": 5
},
"security": {
"encryption": false,
"encryptionKey": null
}
}

View File

View File

View File

@ -0,0 +1,9 @@
{
"id": 1,
"name": "test_incremental_restore",
"timestamp": 1751237380,
"description": null,
"eventCount": 0,
"createdAt": "2025-06-29T22:49:40.92883Z",
"lastEventId": 0
}