ramdb/extension/src/RAMDb.cs

340 lines
12 KiB
C#

using System.Collections.Concurrent;
#pragma warning disable IDE0130 // Namespace does not match folder structure
namespace ArmaRAMDb
#pragma warning restore IDE0130 // Namespace does not match folder structure
{
internal class RAMDb(string ardbPath = RAMDb.DEFAULT_ARDB_PATH) : IDisposable
{
public const string DEFAULT_ARDB_PATH = "@ramdb\\ArmaRAMDb.ardb";
private readonly string _ardbPath = Path.Combine(Environment.CurrentDirectory, ardbPath);
public static readonly ConcurrentDictionary<string, string> _keyValues = new();
public static readonly ConcurrentDictionary<string, ConcurrentDictionary<string, string>> _hashTables = new();
public static readonly ConcurrentDictionary<string, List<string>> _lists = new();
public static bool AutoBackupEnabled { get; set; } = false;
public static int BackupFrequencyMinutes { get; set; } = 60;
public static int MaxBackupsToKeep { get; set; } = 10;
private static Timer _backupTimer;
public void ImportFromArdb()
{
try
{
if (File.Exists(_ardbPath))
{
using var stream = new FileStream(_ardbPath, FileMode.Open);
using var reader = new BinaryReader(stream);
int version = reader.ReadInt32();
if (version != 1)
{
Main.Log($"Unsupported ARDB format version: {version}", "warning");
return;
}
_keyValues.Clear();
_hashTables.Clear();
_lists.Clear();
// Read KeyValues
int keyValueCount = reader.ReadInt32();
for (int i = 0; i < keyValueCount; i++)
{
string key = reader.ReadString();
string value = reader.ReadString();
_keyValues.TryAdd(key, value);
Main.Log($"Loaded key-value: {key} = {value[..Math.Min(50, value.Length)]}...", "debug");
}
// Read HashTables
int tableCount = reader.ReadInt32();
for (int i = 0; i < tableCount; i++)
{
string tableName = reader.ReadString();
Main.Log($"Loading table: {tableName}", "debug");
var concurrentDict = new ConcurrentDictionary<string, string>();
int entryCount = reader.ReadInt32();
for (int j = 0; j < entryCount; j++)
{
string key = reader.ReadString();
string value = reader.ReadString();
concurrentDict.TryAdd(key, value);
Main.Log($"Loaded entry: {key} = {value[..Math.Min(50, value.Length)]}...", "debug");
}
_hashTables.TryAdd(tableName, concurrentDict);
}
// Read Lists
int listCount = reader.ReadInt32();
for (int i = 0; i < listCount; i++)
{
string listName = reader.ReadString();
Main.Log($"Loading list: {listName}", "debug");
var items = new List<string>();
int itemCount = reader.ReadInt32();
for (int j = 0; j < itemCount; j++)
{
string value = reader.ReadString();
items.Add(value);
Main.Log($"Loaded item: {value[..Math.Min(50, value.Length)]}...", "debug");
}
_lists.TryAdd(listName, items);
}
Main.Log("ARDB import complete", "debug");
}
}
catch (Exception ex)
{
Main.Log($"Error during ARDB import: {ex.Message}", "error");
}
}
public void ExportToArdb(bool createBackup = false)
{
try
{
Directory.CreateDirectory(Path.GetDirectoryName(_ardbPath));
using (var stream = new FileStream(_ardbPath, FileMode.Create))
using (var writer = new BinaryWriter(stream))
{
writer.Write(1);
WriteDataToBinaryWriter(writer);
}
if (createBackup)
{
string timestamp = DateTime.Now.ToString("yyyyMMdd_HHmmss");
string backupDirectory = Path.Combine(Path.GetDirectoryName(_ardbPath), "backups");
string backupFileName = Path.GetFileNameWithoutExtension(_ardbPath) + "_" + timestamp + Path.GetExtension(_ardbPath);
string backupPath = Path.Combine(backupDirectory, backupFileName);
Directory.CreateDirectory(backupDirectory);
using (var stream = new FileStream(backupPath, FileMode.Create))
using (var writer = new BinaryWriter(stream))
{
WriteDataToBinaryWriter(writer);
}
Main.Log($"Created backup at: {backupPath}", "debug");
}
Main.Log("ARDB export complete", "debug");
}
catch (Exception ex)
{
Main.Log($"Error during ARDB export: {ex.Message}", "error");
}
}
private static void WriteDataToBinaryWriter(BinaryWriter writer)
{
// Write KeyValues
writer.Write(_keyValues.Count);
foreach (var pair in _keyValues)
{
writer.Write(pair.Key);
writer.Write(pair.Value);
}
// Write HashTables
writer.Write(_hashTables.Count);
foreach (var table in _hashTables)
{
writer.Write(table.Key);
writer.Write(table.Value.Count);
foreach (var entry in table.Value)
{
writer.Write(entry.Key);
writer.Write(entry.Value);
}
}
// Write Lists
writer.Write(_lists.Count);
foreach (var list in _lists)
{
writer.Write(list.Key);
writer.Write(list.Value.Count);
foreach (var item in list.Value)
{
writer.Write(item);
}
}
}
public List<string> ListBackups()
{
string backupDirectory = Path.Combine(Path.GetDirectoryName(_ardbPath), "backups");
List<string> backups = [];
if (Directory.Exists(backupDirectory))
{
backups = [.. Directory.GetFiles(backupDirectory, "*.ardb").OrderByDescending(file => file)];
}
return backups;
}
public static bool RestoreFromBackup(string backupPath)
{
if (File.Exists(backupPath))
{
try
{
using var stream = new FileStream(backupPath, FileMode.Open);
using var reader = new BinaryReader(stream);
int version = reader.ReadInt32();
if (version != 1)
{
Main.Log($"Unsupported ARDB format version in backup: {version}", "warning");
return false;
}
_keyValues.Clear();
_hashTables.Clear();
_lists.Clear();
ReadDataFromBinaryReader(reader);
Main.Log($"Restored from backup: {backupPath}", "info");
return true;
}
catch (Exception ex)
{
Main.Log($"Failed to restore from backup: {ex.Message}", "error");
}
}
return false;
}
private static void ReadDataFromBinaryReader(BinaryReader reader)
{
// Read KeyValues
int keyValueCount = reader.ReadInt32();
for (int i = 0; i < keyValueCount; i++)
{
string key = reader.ReadString();
string value = reader.ReadString();
_keyValues.TryAdd(key, value);
Main.Log($"Loaded key-value: {key} = {value[..Math.Min(50, value.Length)]}...", "debug");
}
// Read HashTables
int tableCount = reader.ReadInt32();
for (int i = 0; i < tableCount; i++)
{
string tableName = reader.ReadString();
Main.Log($"Loading table: {tableName}", "debug");
var concurrentDict = new ConcurrentDictionary<string, string>();
int entryCount = reader.ReadInt32();
for (int j = 0; j < entryCount; j++)
{
string key = reader.ReadString();
string value = reader.ReadString();
concurrentDict.TryAdd(key, value);
Main.Log($"Loaded entry: {key} = {value[..Math.Min(50, value.Length)]}...", "debug");
}
_hashTables.TryAdd(tableName, concurrentDict);
}
// Read Lists
int listCount = reader.ReadInt32();
for (int i = 0; i < listCount; i++)
{
string listName = reader.ReadString();
Main.Log($"Loading list: {listName}", "debug");
var items = new List<string>();
int itemCount = reader.ReadInt32();
for (int j = 0; j < itemCount; j++)
{
string value = reader.ReadString();
items.Add(value);
Main.Log($"Loaded item: {value[..Math.Min(50, value.Length)]}...", "debug");
}
_lists.TryAdd(listName, items);
}
Main.Log("ARDB import complete", "debug");
}
public static void InitializeAutoBackup()
{
if (AutoBackupEnabled)
{
_backupTimer?.Dispose();
_backupTimer = new Timer(BackupTimerCallback, null,
TimeSpan.FromMinutes(BackupFrequencyMinutes),
TimeSpan.FromMinutes(BackupFrequencyMinutes));
Main.Log($"Automatic backup initialized (every {BackupFrequencyMinutes} minutes)", "info");
}
}
private static void BackupTimerCallback(object state)
{
try
{
var db = new RAMDb();
db.ExportToArdb(true);
ManageBackupRotation();
Main.Log($"Automatic backup created at {DateTime.Now}", "info");
}
catch (Exception ex)
{
Main.Log($"Automatic backup failed: {ex.Message}", "error");
}
}
private static void ManageBackupRotation()
{
try
{
var db = new RAMDb();
var backups = db.ListBackups();
if (backups.Count > MaxBackupsToKeep)
{
for (int i = MaxBackupsToKeep; i < backups.Count; i++)
{
File.Delete(backups[i]);
Main.Log($"Deleted old backup: {backups[i]}", "info");
}
}
}
catch (Exception ex)
{
Main.Log($"Backup rotation failed: {ex.Message}", "error");
}
}
public void Dispose()
{
_backupTimer?.Dispose();
ExportToArdb(createBackup: true);
}
}
}