多项新特性和更改:

- 添加模拟数据生成器;
- GC时添加碎片整理;
- 优化日志输出,添加更多DEBUG监控项目;
- 修复输出时分库配置逻辑的严重错误;
- 优化了少许内存性能,减少Lambda闭包分配;
This commit is contained in:
陈梓阳 2024-12-20 10:43:05 +08:00
parent fb3c4ac5f6
commit b20c56640f
20 changed files with 492 additions and 21 deletions

View File

@ -1,4 +1,5 @@
using MesETL.App.Const;
using System.Runtime;
using MesETL.App.Const;
using MesETL.App.HostedServices.Abstractions;
using MesETL.App.Options;
using MesETL.App.Services;
@ -65,7 +66,7 @@ public class FileInputService : IInputService
if (GC.GetTotalMemory(false) > _memoryThreshold)
{
_logger.LogWarning("内存使用率过高,暂缓输入");
GC.Collect();
GCSettings.LargeObjectHeapCompactionMode = GCLargeObjectHeapCompactionMode.CompactOnce;
GC.Collect();
await Task.Delay(3000, cancellationToken);
}

View File

@ -1,4 +1,5 @@
using System.Buffers;
using System.Text;
using MesETL.App.Const;
using MesETL.App.Helpers;
using MesETL.App.HostedServices.Abstractions;
@ -46,7 +47,7 @@ public class OutputService : IOutputService
public async Task ExecuteAsync(CancellationToken ct)
{
_logger.LogInformation("***** 输出服务已启动 *****");
var dbTaskManager = new TaskManager(5);
var dbTaskManager = new TaskManager(_queuePool.Queues.Count);
var dbTasks = new Dictionary<string, Task>();
while (!_context.IsTransformCompleted)
{
@ -74,7 +75,7 @@ public class OutputService : IOutputService
_logger.LogInformation("*****开启输出线程,数据库: {db} *****", db);
var taskManager = new TaskManager(_outputOptions.Value.MaxDatabaseOutputTask);
var ignoreOutput = new HashSet<string>(_outputOptions.Value.NoOutput);
var tmp = new List<DataRecord>();
var tmp = new List<DataRecord>(_outputOptions.Value.FlushCount);
while (!_context.IsTransformCompleted || queue.Count > 0)
{
if (ct.IsCancellationRequested)
@ -111,6 +112,8 @@ public class OutputService : IOutputService
// 等待所有子任务完成
await TaskExtensions.WaitUntil(() => taskManager.RunningTaskCount == 0, 10, ct);
_logger.LogDebug("输出线程结束,清理剩余记录[{Count}]", tmp.Count);
// 清理剩余记录
if (tmp.Count > 0)
{

View File

@ -94,8 +94,11 @@ public class TaskMonitorService
});
var dict = _context.TableProgress
.ToDictionary(kv => kv.Key, kv => $"{kv.Value.input}/{kv.Value.output}");
logger.LogStatus("系统监控: 表处理进度", dict, ITaskMonitorLogger.LogLevel.Progress);
.ToDictionary(kv => kv.Key, kv => $"{kv.Value.input}:{kv.Value.output}");
logger.LogStatus("系统监控: 表处理进度(I:O)", dict, ITaskMonitorLogger.LogLevel.Progress);
logger.LogStatus("系统监控:输出队列状态",
_queuePool.Queues.ToDictionary(q => q.Key, q => q.Value.Count.ToString()),
ITaskMonitorLogger.LogLevel.Progress);
var sb = new StringBuilder("表处理进度:\n");
foreach (var kv in dict)
{

View File

@ -1,4 +1,5 @@
using MesETL.App.HostedServices.Abstractions;
using System.Numerics;
using MesETL.App.HostedServices.Abstractions;
using MesETL.App.Services;
using Microsoft.Extensions.Logging;
@ -11,6 +12,8 @@ public class VoidOutputService : IOutputService
private readonly RecordQueuePool _queuePool;
private readonly ProcessContext _context;
private BigInteger _total;
public VoidOutputService(
ProcessContext context, ILogger<VoidOutputService> logger, RecordQueuePool queuePool)
{
@ -31,13 +34,18 @@ public class VoidOutputService : IOutputService
_queuePool.RemoveQueue(pair.Key);
continue;
}
if(!pair.Value.TryDequeue(out var record)) continue;
if (!pair.Value.TryDequeue(out var record))
continue;
_total += record.FieldCharCount;
_context.AddOutput();
}
}
_context.CompleteOutput();
_logger.LogInformation("***** Void Output Service Stopped *****");
_logger.LogInformation("平均列字符数:{Number}", _total / _context.OutputCount);
return Task.CompletedTask;
}
}

View File

@ -22,6 +22,7 @@
<ItemGroup>
<PackageReference Include="Microsoft.Extensions.Hosting" Version="9.0.0" />
<PackageReference Include="Microsoft.Extensions.ObjectPool" Version="9.0.0" />
<PackageReference Include="Serilog" Version="4.1.0" />
<PackageReference Include="Serilog.Extensions.Hosting" Version="8.0.0" />
<PackageReference Include="Serilog.Sinks.Console" Version="6.0.0" />

View File

@ -31,7 +31,10 @@ public class TenantDbOptions
foreach (var (key, value) in DbGroup)
{
if (value > tenantKeyValue)
{
dbName = key;
break;
}
}
return dbName ??

View File

@ -39,6 +39,10 @@ public class DataRecordQueue : IDisposable
{
if (_queue.TryTake(out record))
{
// if (record.Database is not null)
// {
// Console.WriteLine("out " + record.Database);
// }
Interlocked.Add(ref _currentCharCount, -record.FieldCharCount);
OnRecordRead?.Invoke();
return true;

View File

@ -9,6 +9,7 @@ public sealed class OutputErrorRecorder : ErrorRecorder
private readonly string _outputDir = Path.Combine(AppDomain.CurrentDomain.BaseDirectory, $"ErrorRecords/{UID}/Output");
private readonly string _database;
private readonly Dictionary<string, int> _logIndex = new();
private static readonly object Lock = new();
public OutputErrorRecorder(string database, ILogger logger) : base(logger)
{
@ -50,7 +51,17 @@ public sealed class OutputErrorRecorder : ErrorRecorder
""";
await File.AppendAllTextAsync(filePath, content, Encoding.UTF8);
try
{
lock (Lock)
{
File.AppendAllText(filePath, content, Encoding.UTF8);
}
}
catch(Exception e)
{
Logger.LogError(e, "输出错误日志时发生错误");
}
}
/// <summary>

View File

@ -59,11 +59,12 @@ public class ProcessContext
public void AddTableInput(string table, int count)
{
_tableProgress.AddOrUpdate(table, (input:count, output:0), (k, tuple) =>
if (!_tableProgress.TryAdd(table, (input: count, output: 0)))
{
var tuple = _tableProgress[table];
tuple.input += count;
return tuple;
});
_tableProgress[table] = tuple;
}
}
public void AddTableOutput(string table, int count)

View File

@ -117,6 +117,7 @@ public class SeqService
/// </summary>
public async Task ApplyToDatabaseAsync()
{
if (_cachedSequence.Count == 0) return;
var sql = GenerateCachedSeqSql();
await DatabaseHelper.NonQueryAsync(_connectionString, sql);
}

View File

@ -11,7 +11,7 @@
"InputDir": "D:\\Data\\DatabaseDump\\MyDumper-ZST 2024-12-3", // Csv
"UseMock": false, // 使
"MockCountMultiplier": 1, //
// "TableOrder": ["order_extra"], //
// "TableOrder": ["order_block_plan_item"], //
"TableIgnoreList": [] //
},
"Transform":{

View File

@ -10,6 +10,8 @@ Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "MesETL.Shared", "MesETL.Sha
EndProject
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "MesETL.Clean", "MesETL.Clean\MesETL.Clean.csproj", "{E1B2BED0-EBA6-4A14-BAD5-8EC4E528D7E0}"
EndProject
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Mesdb.DataGenerator", "Mesdb.DataGenerator\Mesdb.DataGenerator.csproj", "{2B7F3837-5ECD-4D24-B674-FDDA1C887A68}"
EndProject
Global
GlobalSection(SolutionConfigurationPlatforms) = preSolution
Debug|Any CPU = Debug|Any CPU
@ -36,5 +38,9 @@ Global
{E1B2BED0-EBA6-4A14-BAD5-8EC4E528D7E0}.Debug|Any CPU.Build.0 = Debug|Any CPU
{E1B2BED0-EBA6-4A14-BAD5-8EC4E528D7E0}.Release|Any CPU.ActiveCfg = Release|Any CPU
{E1B2BED0-EBA6-4A14-BAD5-8EC4E528D7E0}.Release|Any CPU.Build.0 = Release|Any CPU
{2B7F3837-5ECD-4D24-B674-FDDA1C887A68}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{2B7F3837-5ECD-4D24-B674-FDDA1C887A68}.Debug|Any CPU.Build.0 = Debug|Any CPU
{2B7F3837-5ECD-4D24-B674-FDDA1C887A68}.Release|Any CPU.ActiveCfg = Release|Any CPU
{2B7F3837-5ECD-4D24-B674-FDDA1C887A68}.Release|Any CPU.Build.0 = Release|Any CPU
EndGlobalSection
EndGlobal

View File

@ -0,0 +1,14 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<OutputType>Exe</OutputType>
<TargetFramework>net9.0</TargetFramework>
<ImplicitUsings>enable</ImplicitUsings>
<Nullable>enable</Nullable>
</PropertyGroup>
<ItemGroup>
<ProjectReference Include="..\MesETL.App\MesETL.App.csproj" />
</ItemGroup>
</Project>

View File

@ -0,0 +1,11 @@
namespace Mesdb.DataGenerator;
public static class MockHelper
{
public static string RandomString(int length)
{
const string chars = "ABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789";
return new string(Enumerable.Repeat(chars, length)
.Select(s => s[Random.Shared.Next(s.Length)]).ToArray());
}
}

View File

@ -0,0 +1,26 @@
using MesETL.App;
namespace Mesdb.DataGenerator;
public class MockInputOptions
{
public IReadOnlyDictionary<string, TableMockOptions> Rules { get; set; } =
new Dictionary<string, TableMockOptions>();
}
public class TableMockOptions
{
public long Amount { get; set; }
public Func<TableMockContext, DataRecord> Generate { get; set; }
public TableMockOptions(long amount, Func<TableMockContext, DataRecord> generate)
{
Amount = amount;
Generate = generate;
}
}
public struct TableMockContext
{
public long Index { get; set; }
}

View File

@ -0,0 +1,49 @@
using MesETL.App.Const;
using MesETL.App.HostedServices.Abstractions;
using MesETL.App.Services;
using Microsoft.Extensions.DependencyInjection;
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Options;
namespace Mesdb.DataGenerator;
public class MockInputService : IInputService
{
private readonly DataRecordQueue _producerQueue;
private readonly ProcessContext _context;
private readonly IOptions<MockInputOptions> _options;
private readonly ILogger _logger;
public MockInputService([FromKeyedServices(ConstVar.Producer)]DataRecordQueue producerQueue, ProcessContext context, IOptions<MockInputOptions> options,
ILogger<MockInputService> logger)
{
_producerQueue = producerQueue;
_context = context;
_options = options;
_logger = logger;
}
public async Task ExecuteAsync(CancellationToken cancellationToken)
{
_logger.LogInformation("***** 开始模拟输入数据 *****");
foreach (var (table, options) in _options.Value.Rules)
{
_logger.LogInformation("模拟表 '{TableName}' 输入,数量: {Amount}", table, options.Amount);
for (int i = 0; i < options.Amount; i++)
{
var ctx = new TableMockContext()
{
Index = i,
};
var record = options.Generate(ctx);
await _producerQueue.EnqueueAsync(record);
_context.AddInput(1);
_context.AddTableInput(table, 1);
}
_logger.LogInformation("表 '{TableName}' 输入完成", table);
}
_context.CompleteInput();
_logger.LogInformation("***** 模拟数据输入完成 *****");
}
}

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@ -0,0 +1,288 @@
// See https://aka.ms/new-console-template for more information
using System.Text;
using Mesdb.DataGenerator;
using MesETL.App;
using MesETL.App.Cache;
using MesETL.App.Const;
using MesETL.App.HostedServices;
using MesETL.App.HostedServices.Abstractions;
using MesETL.App.Options;
using MesETL.App.Services;
using MesETL.App.Services.ErrorRecorder;
using MesETL.App.Services.ETL;
using MesETL.App.Services.Loggers;
using MesETL.App.Services.Seq;
using Microsoft.Extensions.Configuration;
using Microsoft.Extensions.DependencyInjection;
using Microsoft.Extensions.Hosting;
using Microsoft.Extensions.Logging;
using Serilog;
using Serilog.Events;
await RunProgram();
return;
async Task RunProgram()
{
ThreadPool.SetMaxThreads(200, 200);
var host = Host.CreateApplicationBuilder(args);
var dbGroup = new Dictionary<string, int>()
{
{ "mesdb_1", 5000 },
{ "mesdb_2", 10000 },
{ "mesdb_3", 15000 },
{ "mesdb_4", 20000 },
{ "mesdb_5", 2147483647 },
};
var tenantDbOptions = new TenantDbOptions()
{
TenantKey = "CompanyID",
DbGroup = dbGroup,
UseDbGroup = "Prod",
};
host.Services.Configure<TenantDbOptions>(options =>
{
options.TenantKey = tenantDbOptions.TenantKey;
options.DbGroup = tenantDbOptions.DbGroup;
options.UseDbGroup = tenantDbOptions.UseDbGroup;
});
host.Services.Configure<MockInputOptions>(options =>
{
const float Multiplexer = 0.01F;
var SampleSharedKeys = Enumerable.Range(0, 11).Select(i => (23010 + i * 10).ToString()).Concat(
Enumerable.Range(0, 11).Select(i => (24010 + i * 10).ToString())).ToArray();
options.Rules = new Dictionary<string, TableMockOptions>()
{
{
TableNames.Order, new TableMockOptions((long)(2912406 * Multiplexer), context =>
{
string[] headers =
[
"OrderNo", "ShardKey", "CreateTime", "CompanyID", "SchduleDeliveryDate", "OrderType",
"OrderSort",
"CadDataType", "Deleted", "ProcessState"
];
string[] fields =
[
(20241210000000 + context.Index).ToString(),
SampleSharedKeys[Random.Shared.Next(SampleSharedKeys.Length)],
$"\"{DateTime.Now:yyyy-MM-dd HH:mm:ss}\"",
Random.Shared.Next(1, 28888).ToString(),
$"\"{DateTime.Now.AddDays(Random.Shared.Next(1, 30)):yyyy-MM-dd HH:mm:ss}\"",
Random.Shared.Next(0, 3).ToString(),
Random.Shared.Next(0, 3).ToString(),
Random.Shared.Next(0, 3).ToString(),
Random.Shared.Next(0, 2).ToString(),
Random.Shared.Next(0, 2).ToString()
];
return new DataRecord(fields, TableNames.Order, headers);
})
},
{
TableNames.OrderItem, new TableMockOptions((long)(820241144 * Multiplexer), context =>
{
string[] headers =
[
"ID", "ShardKey", "OrderNo", "ItemNo", "ItemType", "RoomID", "BoxID", "DataID", "PlanID",
"PackageID", "Num", "CompanyID"
];
string[] fields =
[
context.Index.ToString(),
SampleSharedKeys[Random.Shared.Next(SampleSharedKeys.Length)],
(20241210000000 + Random.Shared.Next(0, 2912406)).ToString(),
(2412000000000 + context.Index).ToString(),
Random.Shared.Next(0, 3).ToString(),
Random.Shared.Next(1, 1000000).ToString(),
Random.Shared.Next(1, 1000000).ToString(),
Random.Shared.Next(1, 1000000).ToString(),
Random.Shared.Next(1, 1306670366).ToString(),
Random.Shared.Next(1, 10000000).ToString(),
(Random.Shared.Next(1, 2000000) / 1000).ToString("F3"),
Random.Shared.Next(1, 28888).ToString(),
];
return new DataRecord(fields, TableNames.OrderItem, headers);
})
},
{
TableNames.OrderDataBlock, new TableMockOptions((long)(428568719 * Multiplexer), context =>
{
string[] headers =
[
"ID", "ShardKey", "OrderNo", "BoardName", "BoardType", "GoodsID", "Width", "Height",
"Thickness",
"SpliteWidth", "SpliteHeight", "SpliteThickness", "SealedLeft", "SealedRight", "SealedUp",
"SealedDown", "Area", "Wave", "HoleFace", "PaiKong", "RemarkJson", "UnRegularPointCount",
"FrontHoleCount", "BackHoleCount", "SideHoleCount", "FrontModelCount", "BackModelCount",
"IsDoor",
"OpenDoorType", "CompanyID", "BorderLengthHeavy", "BorderLengthLight", "IsHXDJX",
"ModuleTypeID",
"PlanFilterType"
];
string[] BoardNames = ["左侧板", "右侧板", "左开门板", "右开门板", "薄背板", "顶板", "底板", "地脚线", "后地脚", "层板", "立板"];
string[] fields =
[
context.Index.ToString(),
SampleSharedKeys[Random.Shared.Next(SampleSharedKeys.Length)],
(20241210000000 + Random.Shared.Next(0, 2912406)).ToString(),
$"\"{BoardNames[Random.Shared.Next(0, BoardNames.Length)]}\"",
Random.Shared.Next(0, 3).ToString(),
Random.Shared.Next(1, 1000000).ToString(),
(Random.Shared.Next(1, 2000000) / 1000).ToString("F3"),
(Random.Shared.Next(1, 1200000) / 1000).ToString("F3"),
(Random.Shared.Next(1, 18000) / 1000).ToString("F3"),
(Random.Shared.Next(1, 2000000) / 1000).ToString("F3"),
(Random.Shared.Next(1, 1200000) / 1000).ToString("F3"),
(Random.Shared.Next(1, 18000) / 1000).ToString("F3"),
(Random.Shared.Next(1, 2000) / 1000).ToString("F2"),
(Random.Shared.Next(1, 2000) / 1000).ToString("F2"),
(Random.Shared.Next(1, 2000) / 1000).ToString("F2"),
(Random.Shared.Next(1, 2000) / 1000).ToString("F2"),
(Random.Shared.Next(1, 3000) / 1000).ToString("F3"),
Random.Shared.Next(0, 3).ToString(),
Random.Shared.Next(0, 3).ToString(),
Random.Shared.Next(0, 3).ToString(),
Convert.ToHexString(Encoding.UTF8.GetBytes(MockHelper.RandomString(100))),
Random.Shared.Next(0, 4).ToString(),
Random.Shared.Next(0, 4).ToString(),
Random.Shared.Next(0, 4).ToString(),
Random.Shared.Next(0, 4).ToString(),
Random.Shared.Next(0, 4).ToString(),
Random.Shared.Next(0, 4).ToString(),
Random.Shared.Next(0, 2).ToString(),
Random.Shared.Next(0, 6).ToString(),
Random.Shared.Next(1, 28888).ToString(),
(Random.Shared.Next(1, 2000000) / 1000).ToString("F3"),
(Random.Shared.Next(1, 2000000) / 1000).ToString("F3"),
Random.Shared.Next(0, 2).ToString(),
Random.Shared.Next(0, 100000).ToString(),
Random.Shared.Next(0, 5).ToString(),
];
return new DataRecord(fields, TableNames.OrderDataBlock, headers);
})
},
{
TableNames.OrderBoxBlock, new TableMockOptions((long)(20163038 * Multiplexer), context =>
{
string[] headers = ["BoxID", "ShardKey", "OrderNo", "Data", "CompanyID"];
string[] fields =
[
context.Index.ToString(),
SampleSharedKeys[Random.Shared.Next(SampleSharedKeys.Length)],
(20241210000000 + Random.Shared.Next(0, 2912406)).ToString(),
OrderBoxBlockHelper.RandomPick(),
Random.Shared.Next(1, 28888).ToString(),
];
return new DataRecord(fields, TableNames.OrderBoxBlock, headers);
})
},
{
TableNames.OrderModuleExtra, new TableMockOptions((long)(33853580 * Multiplexer), context =>
{
string[] headers =
[
"ID", "ShardKey", "OrderNo", "RoomID", "BoxID", "PropType", "JsonStr", "CompanyID"
];
string[] fields =
[
context.Index.ToString(),
SampleSharedKeys[Random.Shared.Next(SampleSharedKeys.Length)],
(20241210000000 + Random.Shared.Next(0, 2912406)).ToString(),
Random.Shared.Next(1, 1000000).ToString(),
Random.Shared.Next(1, 1000000).ToString(),
Random.Shared.Next(0, 14).ToString(),
OrderModuleExtraHelper.PickJson(),
Random.Shared.Next(1, 28888).ToString(),
];
return new DataRecord(fields, TableNames.OrderModuleExtra, headers);
})
}
};
});
host.Services.Configure<DataTransformOptions>(options =>
{
options.DatabaseFilter = record =>
{
var companyId = int.Parse(record[tenantDbOptions.TenantKey]); // 每个实体都应存在CompanyID否则异常
return tenantDbOptions.GetDbNameByTenantKeyValue(companyId);
};
});
host.Services.Configure<DatabaseOutputOptions>(options =>
{
options.ConnectionString = "Server=192.168.1.246;Port=3333;UserId=root;Password=123456;";
options.FlushCount = 10000;
options.MaxAllowedPacket = 67108864;
options.MaxDatabaseOutputTask = 4;
options.ColumnTypeConfig = new Dictionary<string, ColumnType>
{
{ "machine.Settings", ColumnType.Text },
{ "order_block_plan.BlockInfo", ColumnType.Text },
{ "order_block_plan.OrderNos", ColumnType.Json },
{ "order_block_plan_result.PlaceData", ColumnType.Blob },
{ "order_box_block.Data", ColumnType.Blob },
{ "order_data_block.RemarkJson", ColumnType.Text },
{ "order_data_goods.ExtraProp", ColumnType.Json },
{ "order_extra.ConfigJson", ColumnType.Json },
{ "order_module_extra.Data", ColumnType.Blob },
{ "order_module_extra.JsonStr", ColumnType.Text },
{ "order_patch_detail.BlockDetail", ColumnType.Json },
{ "order_process_schdule.AreaName", ColumnType.Text },
{ "order_process_schdule.ConsigneeAddress", ColumnType.Text },
{ "order_process_schdule.ConsigneePhone", ColumnType.Text },
{ "order_process_schdule.CustomOrderNo", ColumnType.Text },
{ "order_process_schdule.OrderProcessStepName", ColumnType.Text },
{ "order_scrap_board.OutLineJson", ColumnType.Text },
{ "order_wave_group.ConfigJson", ColumnType.Json },
{ "process_info.Users", ColumnType.Text },
{ "process_item_exp.ItemJson", ColumnType.Text },
{ "report_template.SourceConfig", ColumnType.Text },
{ "report_template.Template", ColumnType.Text },
{ "simple_package.Items", ColumnType.Json },
{ "sys_config.JsonStr", ColumnType.Text },
{ "sys_config.Value", ColumnType.Text }
};
});
host.Services.AddLogging(builder =>
{
builder.ClearProviders();
builder.AddSerilog(new LoggerConfiguration()
.MinimumLevel.Debug()
.WriteTo.Console()
.WriteTo.File(Path.Combine(AppDomain.CurrentDomain.BaseDirectory, $"./Log/Error/{ErrorRecorder.UID}.log"),
restrictedToMinimumLevel: LogEventLevel.Error)
// .WriteTo.File("./Log/Info/{ErrorRecorder.UID}.log", restrictedToMinimumLevel:LogEventLevel.Information) //性能考虑暂不使用
.CreateLogger()
);
});
host.Services.AddDataSourceFactory();
host.Services.AddErrorRecorderFactory();
host.Services.AddSingleton<ProcessContext>();
host.Services.AddSingleton<SeqService>();
var prodLen = host.Configuration.GetRequiredSection("RecordQueue").GetValue<int>("ProducerQueueLength");
var consLen = host.Configuration.GetRequiredSection("RecordQueue").GetValue<int>("ConsumerQueueLength");
var maxCharCount = host.Configuration.GetRequiredSection("RecordQueue").GetValue<long>("MaxByteCount") / 2;
host.Services.AddKeyedSingleton<DataRecordQueue>(ConstVar.Producer, new DataRecordQueue(prodLen, maxCharCount));
host.Services.AddRecordQueuePool(dbGroup.Keys
.Select(key => (key: key, queue: new DataRecordQueue(consLen, maxCharCount))).ToArray());
// host.Services.AddSingleton<ITaskMonitorLogger, CacheTaskMonitorLogger>();
host.Services.AddSingleton<ITaskMonitorLogger, LoggerTaskMonitorLogger>();
host.Services.AddHostedService<MainHostedService>();
host.Services.AddSingleton<IInputService, MockInputService>();
host.Services.AddSingleton<ITransformService, TransformService>();
host.Services.AddSingleton<IOutputService, OutputService>();
host.Services.AddSingleton<TaskMonitorService>();
// host.Services.AddRedisCache(redisOptions);
host.Services.AddSingleton<ICacher, MemoryCache>();
var app = host.Build();
await app.RunAsync();
}