diff --git a/Changelog.md b/Changelog.md index 66ea9dd1eb..0593c18c64 100644 --- a/Changelog.md +++ b/Changelog.md @@ -3,6 +3,7 @@ ## 3.9.48 - General: Fix a bug where directory traversal could fail if the user does not have permission to read a directory ([#1508](https://github.com/fossas/fossa-cli/pull/1508)). +- Performance: Fix timeout issues when uploading large numbers of license scans by processing them in smaller batches ([#1509](https://github.com/fossas/fossa-cli/pull/1509)). ## 3.9.47 - Licensing: Adds support for Zeebe Community License v1.1 and Camunda License v1.0 diff --git a/src/Control/Carrier/FossaApiClient/Internal/FossaAPIV1.hs b/src/Control/Carrier/FossaApiClient/Internal/FossaAPIV1.hs index 0068375888..69e36d9bad 100644 --- a/src/Control/Carrier/FossaApiClient/Internal/FossaAPIV1.hs +++ b/src/Control/Carrier/FossaApiClient/Internal/FossaAPIV1.hs @@ -179,11 +179,13 @@ import Fossa.API.Types ( SignedURLWithKey (surlwkKey, surlwkSignedURL), TokenTypeResponse, UploadResponse, + chunkArchiveComponents, useApiOpts, ) import Control.Effect.Reader import Data.Foldable (traverse_) +import Data.List.Extra (chunk) import Fossa.API.CoreTypes qualified as CoreTypes import Network.HTTP.Client (responseStatus) import Network.HTTP.Client qualified as C @@ -652,24 +654,25 @@ uploadNativeContainerScan apiOpts ProjectRevision{..} metadata scan = (baseUrl, baseOpts) <- useApiOpts apiOpts let locator = renderLocator $ Locator "custom" projectName (Just projectRevision) opts = - "locator" =: locator + baseOpts + <> "locator" =: locator <> "cliVersion" =: cliVersion <> "managedBuild" =: True <> maybe mempty ("branch" =:) projectBranch <> "scanType" =: ("native" :: Text) <> mkMetadataOpts metadata projectName - uploadScan url containerScan = req POST url (ReqBodyJson containerScan) jsonResponse (baseOpts <> opts) - sparkleAnalysisUrl = containerUploadUrl Sparkle baseUrl - - resp <- - ( warnOnErr @Text "Container scan upload to new analysis service failed, falling back to core analysis." - . errCtx ("Upload to new analysis service at " <> renderUrl sparkleAnalysisUrl) - $ uploadScan sparkleAnalysisUrl scan - ) - <||> context "Upload to CORE analysis service" (uploadScan (containerUploadUrl Core baseUrl) scan) - + resp <- uploadToSparkle baseUrl opts <||> uploadToCore baseUrl opts pure $ responseBody resp + where + uploadScan url opts containerScan = req POST url (ReqBodyJson containerScan) jsonResponse opts + sparkleAnalysisUrl = containerUploadUrl Sparkle + coreAnalysisUrl = containerUploadUrl Core + uploadToSparkle baseUrl opts = + warnOnErr @Text "Container scan upload to new analysis service failed, falling back to core analysis." + . errCtx ("Upload to new analysis service at " <> renderUrl (sparkleAnalysisUrl baseUrl)) + $ uploadScan (sparkleAnalysisUrl baseUrl) opts scan + uploadToCore baseUrl opts = context "Upload to CORE analysis service" $ uploadScan (coreAnalysisUrl baseUrl) opts scan -- | Replacement for @Data.HTTP.Req.req@ that additionally logs information about a request in a debug bundle. req :: @@ -1015,8 +1018,9 @@ licenseScanFinalize :: ArchiveComponents -> m () licenseScanFinalize apiOpts archiveProjects = do - _ <- licenseScanFinalize' apiOpts archiveProjects - pure () + -- The latency for scans with hundreds of license scans is way too high, leading to spurious error messages. + -- https://fossa.atlassian.net/browse/ANE-2272 + traverse_ (licenseScanFinalize' apiOpts) $ chunkArchiveComponents 100 archiveProjects licenseScanFinalize' :: APIClientEffs sig m => @@ -1580,12 +1584,18 @@ finalizePathDependencyScan :: m (Maybe ()) finalizePathDependencyScan apiOpts locators forceRebuild = runEmpty $ fossaReqAllow401 $ do + -- The latency for scans with hundreds of license scans is way too high, leading to spurious error messages. + -- Reference: https://fossa.atlassian.net/browse/ANE-2272 + -- + -- No similar issue was reported for this method, + -- but since it uses the same methodology I decided it was better to go ahead and do it. (baseUrl, baseOpts) <- useApiOpts apiOpts - let req' = PathDependencyFinalizeReq locators forceRebuild - _ <- + traverse_ (finalize baseUrl baseOpts) $ chunk 100 locators + where + mkReq locs = PathDependencyFinalizeReq locs forceRebuild + finalize baseUrl baseOpts locs = context "Queuing a build for all license scan uploads" $ - req POST (pathDependencyFinalizeUrl baseUrl) (ReqBodyJson req') ignoreResponse (baseOpts) - pure () + req POST (pathDependencyFinalizeUrl baseUrl) (ReqBodyJson (mkReq locs)) ignoreResponse (baseOpts) alreadyAnalyzedPathRevisionURLEndpoint :: Url 'Https -> Locator -> Url 'Https alreadyAnalyzedPathRevisionURLEndpoint baseUrl locator = baseUrl /: "api" /: "cli" /: "path_dependency_scan" /: renderLocator locator /: "analyzed" diff --git a/src/Data/List/Extra.hs b/src/Data/List/Extra.hs index b764da93ec..a17134d0fd 100644 --- a/src/Data/List/Extra.hs +++ b/src/Data/List/Extra.hs @@ -2,6 +2,7 @@ module Data.List.Extra ( (!?), head', singleton, + chunk, ) where import Data.Maybe (listToMaybe) @@ -21,3 +22,11 @@ head' = listToMaybe -- | Create a one-item list from the item given singleton :: a -> [a] singleton = (: []) + +-- | Chunk the list into a list of lists. +-- If chunk size is 0 or lower, returns a single chunk containing the entire list. +chunk :: Int -> [a] -> [[a]] +chunk _ [] = [] +chunk size as + | size <= 0 = [as] + | otherwise = take size as : chunk size (drop size as) diff --git a/src/Fossa/API/Types.hs b/src/Fossa/API/Types.hs index 1b0b14f21a..50935f7d80 100644 --- a/src/Fossa/API/Types.hs +++ b/src/Fossa/API/Types.hs @@ -44,6 +44,7 @@ module Fossa.API.Types ( defaultApiPollDelay, blankOrganization, orgFileUpload, + chunkArchiveComponents, ) where import App.Fossa.Lernie.Types (GrepEntry) @@ -66,7 +67,7 @@ import Data.Aeson ( import Data.Coerce (coerce) import Data.Function (on) import Data.List (sort, sortBy) -import Data.List.Extra ((!?)) +import Data.List.Extra (chunk, (!?)) import Data.Map.Strict (Map) import Data.Map.Strict qualified as Map import Data.Maybe (catMaybes, fromMaybe) @@ -149,6 +150,14 @@ data ArchiveComponents = ArchiveComponents } deriving (Eq, Ord, Show) +-- | Split the @ArchiveComponents@ into chunks of the given size. +-- If chunk size is 0 or lower, returns a single chunk containing the entire list. +chunkArchiveComponents :: Int -> ArchiveComponents -> [ArchiveComponents] +chunkArchiveComponents chunkSize ArchiveComponents{..} = map mkComponent $ chunk chunkSize archiveComponentsArchives + where + mkComponent :: [Archive] -> ArchiveComponents + mkComponent as = ArchiveComponents as archiveComponentsRebuild archiveComponentsUpload + instance ToJSON ArchiveComponents where toJSON ArchiveComponents{..} = object