diff --git a/c#/GlobalSuppressions.cs b/c#/GlobalSuppressions.cs index f11166ce..551a02f9 100644 --- a/c#/GlobalSuppressions.cs +++ b/c#/GlobalSuppressions.cs @@ -11,6 +11,7 @@ [assembly: SuppressMessage("Design", "CC0120:Your Switch maybe include default clause")] [assembly: SuppressMessage("Design", "MA0048:File name must match type name")] [assembly: SuppressMessage("Design", "MA0051:Method is too long")] +[assembly: SuppressMessage("Design", "MA0076:Do not use implicit culture-sensitive ToString in interpolated strings", Justification = "https://stackoverflow.com/questions/8492449/is-int32-tostring-culture-specific")] [assembly: SuppressMessage("Documentation", "AV2305:Missing XML comment for internally visible type, member or parameter")] [assembly: SuppressMessage("Framework", "AV2220:Simple query should be replaced by extension method call")] [assembly: SuppressMessage("Maintainability", "AV1500:Member or local function contains too many statements")] diff --git a/c#/crawler/src/EntryPoint.cs b/c#/crawler/src/EntryPoint.cs index 91e8e103..e0deeb0e 100644 --- a/c#/crawler/src/EntryPoint.cs +++ b/c#/crawler/src/EntryPoint.cs @@ -34,10 +34,10 @@ protected override void ConfigureServices(HostBuilderContext context, IServiceCo [SuppressMessage("Style", "IDE0058:Expression value is never used")] protected override void ConfigureContainer(HostBuilderContext context, ContainerBuilder builder) { - builder.RegisterImplementsOfBaseTypes( + builder.RegisterImplementsOfBaseTypes(typeof(EntryPoint).Assembly, [ typeof(BaseCrawler<,>), typeof(BaseCrawlFacade<,,,>), - typeof(BaseParser<,>), typeof(CommonInSavers<>) + typeof(BasePostParser<,>), typeof(BaseSaver<>) ]); builder.RegisterType(); builder.RegisterType(); diff --git a/c#/crawler/src/Tieba/Crawl/CrawlPost.cs b/c#/crawler/src/Tieba/Crawl/CrawlPost.cs index 17f92316..afd5491f 100644 --- a/c#/crawler/src/Tieba/Crawl/CrawlPost.cs +++ b/c#/crawler/src/Tieba/Crawl/CrawlPost.cs @@ -37,8 +37,8 @@ public async Task CrawlThreads { crawlingPage++; await using var facadeFactory = threadCrawlFacadeFactory(); - var crawler = facadeFactory.Value(fid, forumName); - var currentPageChangeSet = (await crawler.CrawlPageRange( + var facade = facadeFactory.Value(fid, forumName); + var currentPageChangeSet = (await facade.CrawlPageRange( crawlingPage, crawlingPage, stoppingToken)).SaveCrawled(stoppingToken); if (currentPageChangeSet != null) { @@ -60,8 +60,8 @@ await Task.WhenAll(savedThreads.Select(async threads => if (stoppingToken.IsCancellationRequested) return; var failureCountsKeyByTid = threads.NewlyAdded .ToDictionary(th => th.Tid, _ => (FailureCount)0); - await using var threadLateFactory = threadLateCrawlFacadeFactory(); - await threadLateFactory.Value(fid).CrawlThenSave(failureCountsKeyByTid, stoppingToken); + await using var threadLateFacade = threadLateCrawlFacadeFactory(); + await threadLateFacade.Value(fid).CrawlThenSave(failureCountsKeyByTid, stoppingToken); })); return savedThreads; @@ -89,10 +89,10 @@ await Task.WhenAll(shouldCrawlParentPosts.Select(async tid => { if (stoppingToken.IsCancellationRequested) return; await using var facadeFactory = replyCrawlFacadeFactory(); - var crawler = facadeFactory.Value(fid, tid).AddExceptionHandler( + var facade = facadeFactory.Value(fid, tid).AddExceptionHandler( SaveThreadMissingFirstReply(fid, tid, savedThreads).Invoke); savedRepliesKeyByTid.SetIfNotNull(tid, - (await crawler.CrawlPageRange(1, stoppingToken: stoppingToken)).SaveCrawled(stoppingToken)); + (await facade.CrawlPageRange(1, stoppingToken: stoppingToken)).SaveCrawled(stoppingToken)); })); return savedRepliesKeyByTid; } @@ -121,8 +121,8 @@ await Task.WhenAll(shouldCrawlParentPosts.Select(async t => if (stoppingToken.IsCancellationRequested) return; var (tid, pid) = t; await using var facadeFactory = subReplyCrawlFacadeFactory(); - var crawler = facadeFactory.Value(fid, tid, pid); - _ = (await crawler.CrawlPageRange(1, stoppingToken: stoppingToken)) + var facade = facadeFactory.Value(fid, tid, pid); + _ = (await facade.CrawlPageRange(1, stoppingToken: stoppingToken)) .SaveCrawled(stoppingToken); })); } diff --git a/c#/crawler/src/Tieba/Crawl/Saver/BaseSaver.cs b/c#/crawler/src/Tieba/Crawl/Saver/BaseSaver.cs index 92dbfc1e..0dc2ef69 100644 --- a/c#/crawler/src/Tieba/Crawl/Saver/BaseSaver.cs +++ b/c#/crawler/src/Tieba/Crawl/Saver/BaseSaver.cs @@ -3,7 +3,9 @@ namespace tbm.Crawler.Tieba.Crawl.Saver; public abstract class BaseSaver(ILogger> logger) +#pragma warning disable S1939 // Inheritance list should not be redundant : SaverWithRevision, IFieldChangeIgnorance +#pragma warning restore S1939 // Inheritance list should not be redundant where TBaseRevision : class, IRevision { protected void SavePostsOrUsers( diff --git a/c#/crawler/src/Tieba/Crawl/Saver/Post/BasePostSaver.cs b/c#/crawler/src/Tieba/Crawl/Saver/Post/BasePostSaver.cs index 4421ddeb..21d5ee62 100644 --- a/c#/crawler/src/Tieba/Crawl/Saver/Post/BasePostSaver.cs +++ b/c#/crawler/src/Tieba/Crawl/Saver/Post/BasePostSaver.cs @@ -21,6 +21,7 @@ public virtual IFieldChangeIgnorance.FieldChangeIgnoranceDelegates protected ConcurrentDictionary Posts { get; } = posts; protected AuthorRevisionSaver AuthorRevisionSaver { get; } = authorRevisionSaverFactory(postType); + [SuppressMessage("Misc", "AV1225:Method that raises an event should be protected virtual and be named 'On' followed by event name")] public void OnPostSaveEvent() => PostSaveEvent(); public abstract SaverChangeSet Save(CrawlerDbContext db); diff --git a/c#/crawler/src/Worker/ArchiveCrawlWorker.cs b/c#/crawler/src/Worker/ArchiveCrawlWorker.cs index 0879f910..37728aaf 100644 --- a/c#/crawler/src/Worker/ArchiveCrawlWorker.cs +++ b/c#/crawler/src/Worker/ArchiveCrawlWorker.cs @@ -118,8 +118,8 @@ private async Task?> CrawlThreads (Page page, string forumName, Fid fid, CancellationToken stoppingToken = default) { await using var facadeFactory = threadArchiveCrawlFacadeFactory(); - var crawler = facadeFactory.Value(fid, forumName); - var savedThreads = (await crawler.CrawlPageRange( + var facade = facadeFactory.Value(fid, forumName); + var savedThreads = (await facade.CrawlPageRange( page, page, stoppingToken)).SaveCrawled(stoppingToken); // ReSharper disable once InvertIf @@ -127,8 +127,8 @@ private async Task?> CrawlThreads { var failureCountsKeyByTid = savedThreads.NewlyAdded .ToDictionary(th => th.Tid, _ => (FailureCount)0); - await using var threadLate = threadLateCrawlFacadeFactory(); - await threadLate.Value(fid).CrawlThenSave(failureCountsKeyByTid, stoppingToken); + await using var threadLateFacade = threadLateCrawlFacadeFactory(); + await threadLateFacade.Value(fid).CrawlThenSave(failureCountsKeyByTid, stoppingToken); } return savedThreads; } @@ -147,9 +147,9 @@ await Task.WhenAll(savedThreads.AllAfter.Select(th => th.Tid).Distinct().Select( { if (stoppingToken.IsCancellationRequested) return; await using var facadeFactory = replyCrawlFacadeFactory(); - var crawler = facadeFactory.Value(fid, tid); + var facade = facadeFactory.Value(fid, tid); savedRepliesKeyByTid.SetIfNotNull(tid, - (await crawler.CrawlPageRange(1, stoppingToken: stoppingToken)).SaveCrawled(stoppingToken)); + (await facade.CrawlPageRange(1, stoppingToken: stoppingToken)).SaveCrawled(stoppingToken)); })); return savedRepliesKeyByTid; } diff --git a/c#/crawler/src/Worker/RetryCrawlWorker.cs b/c#/crawler/src/Worker/RetryCrawlWorker.cs index 6aea9716..96e5e722 100644 --- a/c#/crawler/src/Worker/RetryCrawlWorker.cs +++ b/c#/crawler/src/Worker/RetryCrawlWorker.cs @@ -58,7 +58,7 @@ private async Task RetryThreadLate( IReadOnlyDictionary> failureCountWithPagesKeyByLockId, CancellationToken stoppingToken = default) { - await using var threadLate = threadLateCrawlFacadeFactory(); + await using var threadLateFacade = threadLateCrawlFacadeFactory(); foreach (var tidGroupByFid in failureCountWithPagesKeyByLockId .Keys.GroupBy(lockId => lockId.Fid, lockId => lockId.Tid)) { @@ -71,7 +71,7 @@ FailureCount FailureCountSelector(Tid tid) => .Cast().ToDictionary(tid => tid, FailureCountSelector); logger.LogTrace("Retrying previous failed thread late crawl with fid={}, threadsId={}", fid, Helper.UnescapedJsonSerialize(tidGroupByFid)); - await threadLate.Value(fid).CrawlThenSave(failureCountsKeyByTid, stoppingToken); + await threadLateFacade.Value(fid).CrawlThenSave(failureCountsKeyByTid, stoppingToken); } } @@ -95,9 +95,9 @@ from f in dbFactory.Value().Forums.AsNoTracking() logger.LogTrace("Retrying previous failed {} pages in thread crawl for fid={}, forumName={}", failureCount, fid, forumName); - await using var crawlerFactory = threadCrawlFacadeFactory(); - var crawler = crawlerFactory.Value(fid, forumName); - var savedThreads = await crawler.RetryThenSave(pages, failureCountSelector, stoppingToken); + await using var facadeFactory = threadCrawlFacadeFactory(); + var facade = facadeFactory.Value(fid, forumName); + var savedThreads = await facade.RetryThenSave(pages, failureCountSelector, stoppingToken); if (savedThreads == null) return; var savedReplies = await crawlPost.CrawlReplies ([savedThreads], fid, stoppingToken); @@ -113,9 +113,9 @@ private async Task RetryReply( { logger.LogTrace("Retrying previous failed {} pages reply crawl for fid={}, tid={}", failureCount, fid, tid); - await using var crawlerFactory = replyCrawlFacadeFactory(); - var crawler = crawlerFactory.Value(fid, tid); - var savedReplies = await crawler.RetryThenSave(pages, failureCountSelector, stoppingToken); + await using var facadeFactory = replyCrawlFacadeFactory(); + var facade = facadeFactory.Value(fid, tid); + var savedReplies = await facade.RetryThenSave(pages, failureCountSelector, stoppingToken); if (savedReplies == null) return; var savedRepliesKeyByTid = new Dictionary> {{tid, savedReplies}}; await crawlPost.CrawlSubReplies(savedRepliesKeyByTid, fid, stoppingToken); @@ -130,8 +130,8 @@ private async Task RetrySubReply( { logger.LogTrace("Retrying previous failed {} pages sub reply crawl for fid={}, tid={}, pid={}", failureCount, fid, tid, pid); - await using var crawlerFactory = subReplyCrawlFacadeFactory(); - var crawler = crawlerFactory.Value(fid, tid, pid); - _ = await crawler.RetryThenSave(pages, failureCountSelector, stoppingToken); + await using var facadeFactory = subReplyCrawlFacadeFactory(); + var facade = facadeFactory.Value(fid, tid, pid); + _ = await facade.RetryThenSave(pages, failureCountSelector, stoppingToken); } } diff --git a/c#/imagePipeline/src/Consumer/MetadataConsumer.cs b/c#/imagePipeline/src/Consumer/MetadataConsumer.cs index a4bc6867..4f298656 100644 --- a/c#/imagePipeline/src/Consumer/MetadataConsumer.cs +++ b/c#/imagePipeline/src/Consumer/MetadataConsumer.cs @@ -290,7 +290,7 @@ private static partial class ExifDateTimeTagValuesParser ?? ParseAsUnixTimestamp(exifDateTime) ?? throw new ArgumentException( $"Failed to parse provided EXIF date time \"{exifDateTime}\"" - + $" with fractional seconds {fractionalSeconds.ToString(CultureInfo.InvariantCulture)}."); + + $" with fractional seconds {fractionalSeconds}."); return fractionalSeconds == 0 ? ret : ret with { DateTime = ret.DateTime.AddSeconds(fractionalSeconds / Math.Pow(10, CountDigits(fractionalSeconds))) @@ -376,6 +376,7 @@ private static partial class ExifDateTimeTagValuesParser } : null; + [SuppressMessage("Performance", "CA1852:Seal internal types")] public record DateTimeAndOffset(DateTime DateTime, string? Offset); } } diff --git a/c#/imagePipeline/src/EntryPoint.cs b/c#/imagePipeline/src/EntryPoint.cs index 862ca38d..c0d924b7 100644 --- a/c#/imagePipeline/src/EntryPoint.cs +++ b/c#/imagePipeline/src/EntryPoint.cs @@ -53,7 +53,7 @@ protected override void ConfigureServices(HostBuilderContext context, IServiceCo [SuppressMessage("Style", "IDE0058:Expression value is never used")] protected override void ConfigureContainer(HostBuilderContext context, ContainerBuilder builder) { - builder.RegisterImplementsOfBaseTypes([typeof(IConsumer<>)]); + builder.RegisterImplementsOfBaseTypes(typeof(EntryPoint).Assembly, [typeof(IConsumer<>)]); builder.RegisterType(); builder.RegisterType(); builder.RegisterType(); diff --git a/c#/imagePipeline/src/ImageBatchConsumingWorker.cs b/c#/imagePipeline/src/ImageBatchConsumingWorker.cs index 2cf61705..94ac996d 100644 --- a/c#/imagePipeline/src/ImageBatchConsumingWorker.cs +++ b/c#/imagePipeline/src/ImageBatchConsumingWorker.cs @@ -162,7 +162,7 @@ ImageKeyWithMatrix DecodeFrame(ImageFrame frame, int frameIndex) #pragma warning restore IDISP001 // Dispose created return frameMat.Empty() ? throw new InvalidOperationException( - $"Failed to decode frame {frameIndex.ToString(CultureInfo.InvariantCulture)} of image {imageId}.") + $"Failed to decode frame {frameIndex} of image {imageId}.") : new(imageId, (uint)frameIndex, frameMat); } diff --git a/c#/shared/src/ExtensionMethods.cs b/c#/shared/src/ExtensionMethods.cs index 377ea9ab..01e42b2f 100644 --- a/c#/shared/src/ExtensionMethods.cs +++ b/c#/shared/src/ExtensionMethods.cs @@ -30,8 +30,9 @@ public static void AddRange(this IList list, IEnumerable items) } public static partial class ExtensionMethods { - public static void RegisterImplementsOfBaseTypes(this ContainerBuilder builder, IEnumerable baseTypes) => - builder.RegisterAssemblyTypes(Assembly.GetExecutingAssembly()) + public static void RegisterImplementsOfBaseTypes + (this ContainerBuilder builder, Assembly assembly, IEnumerable baseTypes) => + builder.RegisterAssemblyTypes(assembly) .Where(type => baseTypes.Any(baseType => baseType.IsSubTypeOfRawGeneric(type))) .AsSelf();