diff --git a/generator/.DevConfigs/c49077d9-90b3-437f-b316-6d8d8833ae77.json b/generator/.DevConfigs/c49077d9-90b3-437f-b316-6d8d8833ae77.json new file mode 100644 index 000000000000..e75662d5e8d0 --- /dev/null +++ b/generator/.DevConfigs/c49077d9-90b3-437f-b316-6d8d8833ae77.json @@ -0,0 +1,12 @@ +{ + "services": [ + { + "serviceName": "S3", + "type": "minor", + "changeLogMessages": [ + "Add FailurePolicy property to TransferUtilityUploadDirectoryRequest to allow configuration of failure handling behavior during directory uploads. The default behavior is set to abort on failure. Users can now choose to either abort the entire operation or continue uploading remaining files when a failure occurs.", + "Add ObjectUploadFailedEvent event to TransferUtilityUploadDirectoryRequest to notify users when an individual file upload fails during a directory upload operation. This event provides details about the failed upload, including the original request, the specific file request and the exception encountered." + ] + } + ] +} \ No newline at end of file diff --git a/generator/ServiceClientGeneratorLib/Generators/SourceFiles/AssemblyInfo.cs b/generator/ServiceClientGeneratorLib/Generators/SourceFiles/AssemblyInfo.cs index 8eec59e0554a..5f1a040525fd 100644 --- a/generator/ServiceClientGeneratorLib/Generators/SourceFiles/AssemblyInfo.cs +++ b/generator/ServiceClientGeneratorLib/Generators/SourceFiles/AssemblyInfo.cs @@ -1,7 +1,7 @@ // ------------------------------------------------------------------------------ // // This code was generated by a tool. -// Runtime Version: 17.0.0.0 +// Runtime Version: 18.0.0.0 // // Changes to this file may cause incorrect behavior and will be lost if // the code is regenerated. @@ -15,8 +15,8 @@ namespace ServiceClientGenerator.Generators.SourceFiles /// Class to produce the template output /// - #line 1 "C:\dev\repos\aws-sdk-net\generator\ServiceClientGeneratorLib\Generators\SourceFiles\AssemblyInfo.tt" - [global::System.CodeDom.Compiler.GeneratedCodeAttribute("Microsoft.VisualStudio.TextTemplating", "17.0.0.0")] + #line 1 "D:\CodeBase\aws-sdk-net\generator\ServiceClientGeneratorLib\Generators\SourceFiles\AssemblyInfo.tt" + [global::System.CodeDom.Compiler.GeneratedCodeAttribute("Microsoft.VisualStudio.TextTemplating", "18.0.0.0")] public partial class AssemblyInfo : BaseGenerator { #line hidden @@ -36,35 +36,35 @@ public override string TransformText() // associated with an assembly. [assembly: AssemblyTitle("""); - #line 12 "C:\dev\repos\aws-sdk-net\generator\ServiceClientGeneratorLib\Generators\SourceFiles\AssemblyInfo.tt" + #line 12 "D:\CodeBase\aws-sdk-net\generator\ServiceClientGeneratorLib\Generators\SourceFiles\AssemblyInfo.tt" this.Write(this.ToStringHelper.ToStringWithCulture(this.Config.AssemblyTitle)); #line default #line hidden this.Write("\")]\r\n#if BCL\r\n[assembly: AssemblyDescription(\""); - #line 14 "C:\dev\repos\aws-sdk-net\generator\ServiceClientGeneratorLib\Generators\SourceFiles\AssemblyInfo.tt" + #line 14 "D:\CodeBase\aws-sdk-net\generator\ServiceClientGeneratorLib\Generators\SourceFiles\AssemblyInfo.tt" this.Write(this.ToStringHelper.ToStringWithCulture(this.Config.AssemblyDescription(versionIdentifier: "4.7.2"))); #line default #line hidden this.Write("\")]\r\n#elif NETSTANDARD20\r\n[assembly: AssemblyDescription(\""); - #line 16 "C:\dev\repos\aws-sdk-net\generator\ServiceClientGeneratorLib\Generators\SourceFiles\AssemblyInfo.tt" + #line 16 "D:\CodeBase\aws-sdk-net\generator\ServiceClientGeneratorLib\Generators\SourceFiles\AssemblyInfo.tt" this.Write(this.ToStringHelper.ToStringWithCulture(this.Config.AssemblyDescription(versionIdentifier: "NetStandard 2.0"))); #line default #line hidden this.Write("\")]\r\n#elif NETCOREAPP3_1\r\n[assembly: AssemblyDescription(\""); - #line 18 "C:\dev\repos\aws-sdk-net\generator\ServiceClientGeneratorLib\Generators\SourceFiles\AssemblyInfo.tt" + #line 18 "D:\CodeBase\aws-sdk-net\generator\ServiceClientGeneratorLib\Generators\SourceFiles\AssemblyInfo.tt" this.Write(this.ToStringHelper.ToStringWithCulture(this.Config.AssemblyDescription(versionIdentifier: ".NET Core 3.1"))); #line default #line hidden this.Write("\")]\r\n#elif NET8_0\r\n[assembly: AssemblyDescription(\""); - #line 20 "C:\dev\repos\aws-sdk-net\generator\ServiceClientGeneratorLib\Generators\SourceFiles\AssemblyInfo.tt" + #line 20 "D:\CodeBase\aws-sdk-net\generator\ServiceClientGeneratorLib\Generators\SourceFiles\AssemblyInfo.tt" this.Write(this.ToStringHelper.ToStringWithCulture(this.Config.AssemblyDescription(versionIdentifier: ".NET 8.0"))); #line default @@ -72,7 +72,7 @@ public override string TransformText() this.Write("\")]\r\n#else\r\n#error Unknown platform constant - unable to set correct AssemblyDesc" + "ription\r\n#endif\r\n\r\n"); - #line 25 "C:\dev\repos\aws-sdk-net\generator\ServiceClientGeneratorLib\Generators\SourceFiles\AssemblyInfo.tt" + #line 25 "D:\CodeBase\aws-sdk-net\generator\ServiceClientGeneratorLib\Generators\SourceFiles\AssemblyInfo.tt" if (this.Config.AssemblyTitle=="AWSSDK.DynamoDBv2") { #line default @@ -81,23 +81,43 @@ public override string TransformText() [assembly: InternalsVisibleTo(""AWSSDK.UnitTests.NetFramework, PublicKey=0024000004800000940000000602000000240000525341310004000001000100db5f59f098d27276c7833875a6263a3cc74ab17ba9a9df0b52aedbe7252745db7274d5271fd79c1f08f668ecfa8eaab5626fa76adc811d3c8fc55859b0d09d3bc0a84eecd0ba891f2b8a2fc55141cdcc37c2053d53491e650a479967c3622762977900eddbf1252ed08a2413f00a28f3a0752a81203f03ccb7f684db373518b4"")] "); - #line 28 "C:\dev\repos\aws-sdk-net\generator\ServiceClientGeneratorLib\Generators\SourceFiles\AssemblyInfo.tt" + #line 28 "D:\CodeBase\aws-sdk-net\generator\ServiceClientGeneratorLib\Generators\SourceFiles\AssemblyInfo.tt" } #line default #line hidden - #line 29 "C:\dev\repos\aws-sdk-net\generator\ServiceClientGeneratorLib\Generators\SourceFiles\AssemblyInfo.tt" + #line 29 "D:\CodeBase\aws-sdk-net\generator\ServiceClientGeneratorLib\Generators\SourceFiles\AssemblyInfo.tt" if (this.Config.AssemblyTitle=="AWSSDK.S3") { #line default #line hidden - this.Write(@"[assembly: InternalsVisibleTo(""AWSSDK.UnitTests.S3.NetFramework, PublicKey=0024000004800000940000000602000000240000525341310004000001000100db5f59f098d27276c7833875a6263a3cc74ab17ba9a9df0b52aedbe7252745db7274d5271fd79c1f08f668ecfa8eaab5626fa76adc811d3c8fc55859b0d09d3bc0a84eecd0ba891f2b8a2fc55141cdcc37c2053d53491e650a479967c3622762977900eddbf1252ed08a2413f00a28f3a0752a81203f03ccb7f684db373518b4"")] -[assembly: InternalsVisibleTo(""AWSSDK.UnitTests.NetFramework, PublicKey=0024000004800000940000000602000000240000525341310004000001000100db5f59f098d27276c7833875a6263a3cc74ab17ba9a9df0b52aedbe7252745db7274d5271fd79c1f08f668ecfa8eaab5626fa76adc811d3c8fc55859b0d09d3bc0a84eecd0ba891f2b8a2fc55141cdcc37c2053d53491e650a479967c3622762977900eddbf1252ed08a2413f00a28f3a0752a81203f03ccb7f684db373518b4"")] -[assembly: InternalsVisibleTo(""DynamicProxyGenAssembly2, PublicKey=0024000004800000940000000602000000240000525341310004000001000100c547cac37abd99c8db225ef2f6c8a3602f3b3606cc9891605d02baa56104f4cfc0734aa39b93bf7852f7d9266654753cc297e7d2edfe0bac1cdcf9f717241550e0a7b191195b7667bb4f64bcb8e2121380fd1d9d46ad2d92d2d15605093924cceaf74c4861eff62abf69b9291ed0a340e113be11e6a7d3113e92484cf7045cc7"")] -"); - - #line 33 "C:\dev\repos\aws-sdk-net\generator\ServiceClientGeneratorLib\Generators\SourceFiles\AssemblyInfo.tt" + this.Write("[assembly: InternalsVisibleTo(\"AWSSDK.UnitTests.S3.NetFramework, PublicKey=002400" + + "0004800000940000000602000000240000525341310004000001000100db5f59f098d27276c78338" + + "75a6263a3cc74ab17ba9a9df0b52aedbe7252745db7274d5271fd79c1f08f668ecfa8eaab5626fa7" + + "6adc811d3c8fc55859b0d09d3bc0a84eecd0ba891f2b8a2fc55141cdcc37c2053d53491e650a4799" + + "67c3622762977900eddbf1252ed08a2413f00a28f3a0752a81203f03ccb7f684db373518b4\")]\r\n[" + + "assembly: InternalsVisibleTo(\"AWSSDK.UnitTests.NetFramework, PublicKey=002400000" + + "4800000940000000602000000240000525341310004000001000100db5f59f098d27276c7833875a" + + "6263a3cc74ab17ba9a9df0b52aedbe7252745db7274d5271fd79c1f08f668ecfa8eaab5626fa76ad" + + "c811d3c8fc55859b0d09d3bc0a84eecd0ba891f2b8a2fc55141cdcc37c2053d53491e650a479967c" + + "3622762977900eddbf1252ed08a2413f00a28f3a0752a81203f03ccb7f684db373518b4\")]\r\n\r\n//" + + " We should remove this in the future when TransferUtility Upload/Download direct" + + "ory methods return responses.\r\n// We should update the Integration Tests in Tran" + + "sferUtilityTests.cs to not use the internal methods and instead use the new publ" + + "ic ones that return responses.\r\n[assembly: InternalsVisibleTo(\"AWSSDK.Integratio" + + "nTests.S3.NetFramework, PublicKey=0024000004800000940000000602000000240000525341" + + "310004000001000100db5f59f098d27276c7833875a6263a3cc74ab17ba9a9df0b52aedbe7252745" + + "db7274d5271fd79c1f08f668ecfa8eaab5626fa76adc811d3c8fc55859b0d09d3bc0a84eecd0ba89" + + "1f2b8a2fc55141cdcc37c2053d53491e650a479967c3622762977900eddbf1252ed08a2413f00a28" + + "f3a0752a81203f03ccb7f684db373518b4\")]\r\n[assembly: InternalsVisibleTo(\"DynamicPro" + + "xyGenAssembly2, PublicKey=002400000480000094000000060200000024000052534131000400" + + "0001000100c547cac37abd99c8db225ef2f6c8a3602f3b3606cc9891605d02baa56104f4cfc0734a" + + "a39b93bf7852f7d9266654753cc297e7d2edfe0bac1cdcf9f717241550e0a7b191195b7667bb4f64" + + "bcb8e2121380fd1d9d46ad2d92d2d15605093924cceaf74c4861eff62abf69b9291ed0a340e113be" + + "11e6a7d3113e92484cf7045cc7\")]\r\n"); + + #line 37 "D:\CodeBase\aws-sdk-net\generator\ServiceClientGeneratorLib\Generators\SourceFiles\AssemblyInfo.tt" } #line default @@ -126,14 +146,14 @@ public override string TransformText() // [assembly: AssemblyVersion(""1.0.*"")] [assembly: AssemblyVersion("""); - #line 56 "C:\dev\repos\aws-sdk-net\generator\ServiceClientGeneratorLib\Generators\SourceFiles\AssemblyInfo.tt" + #line 60 "D:\CodeBase\aws-sdk-net\generator\ServiceClientGeneratorLib\Generators\SourceFiles\AssemblyInfo.tt" this.Write(this.ToStringHelper.ToStringWithCulture(this.Config.ServiceVersion)); #line default #line hidden this.Write("\")]\r\n[assembly: AssemblyFileVersion(\""); - #line 57 "C:\dev\repos\aws-sdk-net\generator\ServiceClientGeneratorLib\Generators\SourceFiles\AssemblyInfo.tt" + #line 61 "D:\CodeBase\aws-sdk-net\generator\ServiceClientGeneratorLib\Generators\SourceFiles\AssemblyInfo.tt" this.Write(this.ToStringHelper.ToStringWithCulture(this.Config.ServiceFileVersion)); #line default diff --git a/generator/ServiceClientGeneratorLib/Generators/SourceFiles/AssemblyInfo.tt b/generator/ServiceClientGeneratorLib/Generators/SourceFiles/AssemblyInfo.tt index 31dceb950beb..fbed276512ca 100644 --- a/generator/ServiceClientGeneratorLib/Generators/SourceFiles/AssemblyInfo.tt +++ b/generator/ServiceClientGeneratorLib/Generators/SourceFiles/AssemblyInfo.tt @@ -29,6 +29,10 @@ using System.Runtime.CompilerServices; <# if (this.Config.AssemblyTitle=="AWSSDK.S3") { #> [assembly: InternalsVisibleTo("AWSSDK.UnitTests.S3.NetFramework, PublicKey=0024000004800000940000000602000000240000525341310004000001000100db5f59f098d27276c7833875a6263a3cc74ab17ba9a9df0b52aedbe7252745db7274d5271fd79c1f08f668ecfa8eaab5626fa76adc811d3c8fc55859b0d09d3bc0a84eecd0ba891f2b8a2fc55141cdcc37c2053d53491e650a479967c3622762977900eddbf1252ed08a2413f00a28f3a0752a81203f03ccb7f684db373518b4")] [assembly: InternalsVisibleTo("AWSSDK.UnitTests.NetFramework, PublicKey=0024000004800000940000000602000000240000525341310004000001000100db5f59f098d27276c7833875a6263a3cc74ab17ba9a9df0b52aedbe7252745db7274d5271fd79c1f08f668ecfa8eaab5626fa76adc811d3c8fc55859b0d09d3bc0a84eecd0ba891f2b8a2fc55141cdcc37c2053d53491e650a479967c3622762977900eddbf1252ed08a2413f00a28f3a0752a81203f03ccb7f684db373518b4")] + +// We should remove this in the future when TransferUtility Upload/Download directory methods return responses. +// We should update the Integration Tests in TransferUtilityTests.cs to not use the internal methods and instead use the new public ones that return responses. +[assembly: InternalsVisibleTo("AWSSDK.IntegrationTests.S3.NetFramework, PublicKey=0024000004800000940000000602000000240000525341310004000001000100db5f59f098d27276c7833875a6263a3cc74ab17ba9a9df0b52aedbe7252745db7274d5271fd79c1f08f668ecfa8eaab5626fa76adc811d3c8fc55859b0d09d3bc0a84eecd0ba891f2b8a2fc55141cdcc37c2053d53491e650a479967c3622762977900eddbf1252ed08a2413f00a28f3a0752a81203f03ccb7f684db373518b4")] [assembly: InternalsVisibleTo("DynamicProxyGenAssembly2, PublicKey=0024000004800000940000000602000000240000525341310004000001000100c547cac37abd99c8db225ef2f6c8a3602f3b3606cc9891605d02baa56104f4cfc0734aa39b93bf7852f7d9266654753cc297e7d2edfe0bac1cdcf9f717241550e0a7b191195b7667bb4f64bcb8e2121380fd1d9d46ad2d92d2d15605093924cceaf74c4861eff62abf69b9291ed0a340e113be11e6a7d3113e92484cf7045cc7")] <# } #> [assembly: AssemblyConfiguration("")] diff --git a/sdk/src/Services/S3/Custom/Transfer/Model/DirectoryResult.cs b/sdk/src/Services/S3/Custom/Transfer/DirectoryResult.cs similarity index 97% rename from sdk/src/Services/S3/Custom/Transfer/Model/DirectoryResult.cs rename to sdk/src/Services/S3/Custom/Transfer/DirectoryResult.cs index 3f8cbd84fb2e..5329b21e07f2 100644 --- a/sdk/src/Services/S3/Custom/Transfer/Model/DirectoryResult.cs +++ b/sdk/src/Services/S3/Custom/Transfer/DirectoryResult.cs @@ -20,7 +20,7 @@ * */ -namespace Amazon.S3.Transfer.Model +namespace Amazon.S3.Transfer { /// /// Overall outcome of a directory operation. diff --git a/sdk/src/Services/S3/Custom/Transfer/Model/FailurePolicy.cs b/sdk/src/Services/S3/Custom/Transfer/FailurePolicy.cs similarity index 97% rename from sdk/src/Services/S3/Custom/Transfer/Model/FailurePolicy.cs rename to sdk/src/Services/S3/Custom/Transfer/FailurePolicy.cs index fbb265ca1103..5bf16b176a75 100644 --- a/sdk/src/Services/S3/Custom/Transfer/Model/FailurePolicy.cs +++ b/sdk/src/Services/S3/Custom/Transfer/FailurePolicy.cs @@ -20,7 +20,7 @@ * */ -namespace Amazon.S3.Transfer.Model +namespace Amazon.S3.Transfer { /// /// Specifies the policy to apply when a failure occurs during a directory transfer operation. diff --git a/sdk/src/Services/S3/Custom/Transfer/Internal/DownloadDirectoryCommand.cs b/sdk/src/Services/S3/Custom/Transfer/Internal/DownloadDirectoryCommand.cs index d932a282b137..687167ab1dc1 100644 --- a/sdk/src/Services/S3/Custom/Transfer/Internal/DownloadDirectoryCommand.cs +++ b/sdk/src/Services/S3/Custom/Transfer/Internal/DownloadDirectoryCommand.cs @@ -31,7 +31,6 @@ using Amazon.S3.Util; using Amazon.Util.Internal; using Amazon.Runtime; -using Amazon.S3.Transfer.Model; namespace Amazon.S3.Transfer.Internal { diff --git a/sdk/src/Services/S3/Custom/Transfer/Internal/UploadDirectoryCommand.cs b/sdk/src/Services/S3/Custom/Transfer/Internal/UploadDirectoryCommand.cs index 693a9ef8325a..148e34798d47 100644 --- a/sdk/src/Services/S3/Custom/Transfer/Internal/UploadDirectoryCommand.cs +++ b/sdk/src/Services/S3/Custom/Transfer/Internal/UploadDirectoryCommand.cs @@ -20,6 +20,7 @@ * */ using System; +using System.Collections.Concurrent; using System.Collections.Generic; using System.IO; using System.Text; @@ -34,12 +35,15 @@ namespace Amazon.S3.Transfer.Internal /// internal partial class UploadDirectoryCommand : BaseCommand { + private IFailurePolicy _failurePolicy; + private ConcurrentBag _errors = new ConcurrentBag(); TransferUtilityUploadDirectoryRequest _request; TransferUtility _utility; TransferUtilityConfig _config; int _totalNumberOfFiles; int _numberOfFilesUploaded; + int _numberOfFilesSuccessfullyUploaded; long _totalBytes; long _transferredBytes; @@ -48,6 +52,10 @@ internal UploadDirectoryCommand(TransferUtility utility, TransferUtilityConfig c this._utility = utility; this._request = request; this._config = config; + _failurePolicy = + request.FailurePolicy == FailurePolicy.AbortOnFailure + ? new AbortOnFailurePolicy() + : new ContinueOnFailurePolicy(_errors); } internal TransferUtilityUploadRequest ConstructRequest(string basePath, string filepath, string prefix) diff --git a/sdk/src/Services/S3/Custom/Transfer/Internal/_async/BaseCommand.async.cs b/sdk/src/Services/S3/Custom/Transfer/Internal/_async/BaseCommand.async.cs index a687917f7d9f..a7a58a4b02c5 100644 --- a/sdk/src/Services/S3/Custom/Transfer/Internal/_async/BaseCommand.async.cs +++ b/sdk/src/Services/S3/Custom/Transfer/Internal/_async/BaseCommand.async.cs @@ -13,12 +13,6 @@ * permissions and limitations under the License. */ -using Amazon.S3.Model; -using System; -using System.Collections.Generic; -using System.IO; -using System.Linq; -using System.Text; using System.Threading; using System.Threading.Tasks; @@ -30,24 +24,5 @@ internal abstract partial class BaseCommand where TResponse : class /// Executes the command and returns a typed response /// public abstract Task ExecuteAsync(CancellationToken cancellationToken); - - protected static async Task ExecuteCommandAsync(BaseCommand command, CancellationTokenSource internalCts) where T : class - { - try - { - await command.ExecuteAsync(internalCts.Token) - .ConfigureAwait(continueOnCapturedContext: false); - } - catch (Exception exception) - { - if (!(exception is OperationCanceledException)) - { - // Cancel scheduling any more tasks. - // Cancel other upload requests. - internalCts.Cancel(); - } - throw; - } - } } } diff --git a/sdk/src/Services/S3/Custom/Transfer/Internal/_bcl+netstandard/DownloadDirectoryCommand.cs b/sdk/src/Services/S3/Custom/Transfer/Internal/_bcl+netstandard/DownloadDirectoryCommand.cs index 11c210a95cd9..7c19d78ed2c3 100644 --- a/sdk/src/Services/S3/Custom/Transfer/Internal/_bcl+netstandard/DownloadDirectoryCommand.cs +++ b/sdk/src/Services/S3/Custom/Transfer/Internal/_bcl+netstandard/DownloadDirectoryCommand.cs @@ -15,7 +15,6 @@ using Amazon.S3.Model; using Amazon.S3.Util; -using Amazon.S3.Transfer.Model; using System; using System.Collections.Concurrent; using System.Collections.Generic; diff --git a/sdk/src/Services/S3/Custom/Transfer/Internal/_bcl+netstandard/UploadDirectoryCommand.cs b/sdk/src/Services/S3/Custom/Transfer/Internal/_bcl+netstandard/UploadDirectoryCommand.cs index 07c71c27363f..e56c811fbb36 100644 --- a/sdk/src/Services/S3/Custom/Transfer/Internal/_bcl+netstandard/UploadDirectoryCommand.cs +++ b/sdk/src/Services/S3/Custom/Transfer/Internal/_bcl+netstandard/UploadDirectoryCommand.cs @@ -20,23 +20,29 @@ using System.Text; using System.Threading; using System.Threading.Tasks; +using Amazon.Runtime.Internal.Util; namespace Amazon.S3.Transfer.Internal { internal partial class UploadDirectoryCommand : BaseCommand { public bool UploadFilesConcurrently { get; set; } + private readonly Logger _logger = Logger.GetLogger(typeof(UploadDirectoryCommand)); public override async Task ExecuteAsync(CancellationToken cancellationToken) { string prefix = GetKeyPrefix(); - string basePath = new DirectoryInfo(this._request.Directory).FullName; + _logger.DebugFormat("UploadDirectoryCommand starting. BasePath={0}, Prefix={1}, UploadFilesConcurrently={2}, ConcurrentServiceRequests={3}", + basePath, prefix, UploadFilesConcurrently, this._config.ConcurrentServiceRequests); + string[] filePaths = await GetFiles(basePath, this._request.SearchPattern, this._request.SearchOption, cancellationToken) - .ConfigureAwait(continueOnCapturedContext: false); + .ConfigureAwait(continueOnCapturedContext: false); this._totalNumberOfFiles = filePaths.Length; + _logger.DebugFormat("Discovered {0} file(s) to upload. TotalBytes={1}", _totalNumberOfFiles, _totalBytes); + // Two-level throttling architecture: // 1. File-level throttler: Controls how many files are uploaded concurrently // 2. HTTP-level throttler: Controls total HTTP requests across ALL file uploads @@ -54,11 +60,12 @@ public override async Task ExecuteAsync( try { var pendingTasks = new List(); - + // File-level throttler: Controls concurrent file operations - fileOperationThrottler = UploadFilesConcurrently ? + fileOperationThrottler = UploadFilesConcurrently ? new SemaphoreSlim(this._config.ConcurrentServiceRequests) : new SemaphoreSlim(1); + _logger.DebugFormat("Created fileOperationThrottler with initial count={0}", UploadFilesConcurrently ? this._config.ConcurrentServiceRequests : 1); // HTTP-level throttler: Shared across all uploads to control total HTTP concurrency sharedHttpRequestThrottler = this._utility.S3Client is Amazon.S3.Internal.IAmazonS3Encryption ? @@ -69,12 +76,22 @@ public override async Task ExecuteAsync( // Use a throttler which will be shared between simple and multipart uploads // to control total concurrent HTTP requests across all file operations. new SemaphoreSlim(this._config.ConcurrentServiceRequests); - + if (sharedHttpRequestThrottler == null) + { + _logger.Debug(null, "sharedHttpRequestThrottler disabled due to encryption client. Multipart uploads will be serial per file."); + } + else + { + _logger.DebugFormat("Created sharedHttpRequestThrottler with initial count={0}", this._config.ConcurrentServiceRequests); + } internalCts = CancellationTokenSource.CreateLinkedTokenSource(cancellationToken); + foreach (string filepath in filePaths) { + _logger.DebugFormat("Waiting for fileOperationThrottler to schedule file."); await fileOperationThrottler.WaitAsync(cancellationToken).ConfigureAwait(continueOnCapturedContext: false); + _logger.DebugFormat("Acquired fileOperationThrottler. Currently scheduled: {0}", pendingTasks.Count + 1); try { @@ -85,30 +102,69 @@ public override async Task ExecuteAsync( // don't schedule any more upload tasks. // Don't throw an OperationCanceledException here as we want to process the // responses and throw the original exception. + _logger.Debug(null, "Internal cancellation requested; breaking out of scheduling loop."); break; } + var uploadRequest = ConstructRequest(basePath, filepath, prefix); - var uploadCommand = _utility.GetUploadCommand(uploadRequest, sharedHttpRequestThrottler); - var task = ExecuteCommandAsync(uploadCommand, internalCts); + Action onFailure = (ex) => + { + this._request.OnRaiseObjectUploadFailedEvent( + new ObjectUploadFailedEventArgs( + this._request, + uploadRequest, + ex)); + }; + + var task = _failurePolicy.ExecuteAsync( + async () => { + _logger.DebugFormat("Starting upload command"); + var command = _utility.GetUploadCommand(uploadRequest, sharedHttpRequestThrottler); + await command.ExecuteAsync(internalCts.Token) + .ConfigureAwait(false); + var uploaded = Interlocked.Increment(ref _numberOfFilesSuccessfullyUploaded); + _logger.DebugFormat("Completed upload. FilesSuccessfullyUploaded={0}", uploaded); + }, + onFailure, + internalCts + ); + pendingTasks.Add(task); + _logger.DebugFormat("Scheduled upload task. PendingTasks=01}", pendingTasks.Count); } finally { fileOperationThrottler.Release(); } } + + _logger.DebugFormat("Awaiting completion of {0} scheduled task(s)", pendingTasks.Count); await TaskHelpers.WhenAllOrFirstExceptionAsync(pendingTasks, cancellationToken) .ConfigureAwait(continueOnCapturedContext: false); } finally - { + { internalCts.Dispose(); fileOperationThrottler.Dispose(); sharedHttpRequestThrottler?.Dispose(); + _logger.DebugFormat("UploadDirectoryCommand finished. FilesSuccessfullyUploaded={0}", _numberOfFilesSuccessfullyUploaded); } - return new TransferUtilityUploadDirectoryResponse(); + var response = new TransferUtilityUploadDirectoryResponse + { + ObjectsUploaded = _numberOfFilesSuccessfullyUploaded, + ObjectsFailed = _errors.Count, + Errors = _errors.ToList(), + Result = _errors.Count == 0 ? + DirectoryResult.Success : + (_numberOfFilesSuccessfullyUploaded > 0 ? + DirectoryResult.PartialSuccess : + DirectoryResult.Failure) + }; + + _logger.DebugFormat("Response summary: Uploaded={0}, Failed={1}, Result={2}", response.ObjectsUploaded, response.ObjectsFailed, response.Result); + return response; } private Task GetFiles(string path, string searchPattern, SearchOption searchOption, CancellationToken cancellationToken) diff --git a/sdk/src/Services/S3/Custom/Transfer/TransferUtilityDownloadDirectoryRequest.cs b/sdk/src/Services/S3/Custom/Transfer/TransferUtilityDownloadDirectoryRequest.cs index 12dffa4b2b86..9931d29c1e8c 100644 --- a/sdk/src/Services/S3/Custom/Transfer/TransferUtilityDownloadDirectoryRequest.cs +++ b/sdk/src/Services/S3/Custom/Transfer/TransferUtilityDownloadDirectoryRequest.cs @@ -30,7 +30,6 @@ using Amazon.Runtime.Internal; using System.Globalization; using System.Threading; -using Amazon.S3.Transfer.Model; namespace Amazon.S3.Transfer diff --git a/sdk/src/Services/S3/Custom/Transfer/TransferUtilityDownloadDirectoryResponse.cs b/sdk/src/Services/S3/Custom/Transfer/TransferUtilityDownloadDirectoryResponse.cs index 1bed1f94ffb2..63533406b4d2 100644 --- a/sdk/src/Services/S3/Custom/Transfer/TransferUtilityDownloadDirectoryResponse.cs +++ b/sdk/src/Services/S3/Custom/Transfer/TransferUtilityDownloadDirectoryResponse.cs @@ -15,7 +15,6 @@ using System; using System.Collections.Generic; -using Amazon.S3.Transfer.Model; namespace Amazon.S3.Transfer { diff --git a/sdk/src/Services/S3/Custom/Transfer/TransferUtilityUploadDirectoryRequest.cs b/sdk/src/Services/S3/Custom/Transfer/TransferUtilityUploadDirectoryRequest.cs index cf7be9f65437..004e83d1f81d 100644 --- a/sdk/src/Services/S3/Custom/Transfer/TransferUtilityUploadDirectoryRequest.cs +++ b/sdk/src/Services/S3/Custom/Transfer/TransferUtilityUploadDirectoryRequest.cs @@ -42,6 +42,44 @@ public class TransferUtilityUploadDirectoryRequest : BaseUploadRequest string _keyPrefix; private bool _uploadFilesConcurrently = false; SearchOption _searchOption = SearchOption.TopDirectoryOnly; + private FailurePolicy failurePolicy = FailurePolicy.AbortOnFailure; + + /// + /// Gets or sets the failure policy for the upload directory operation. + /// Determines whether the operation should abort or continue when a failure occurs during upload. + /// The default value is . + /// + public FailurePolicy FailurePolicy + { + get { return this.failurePolicy; } + set { this.failurePolicy = value; } + } + + /// + /// Occurs when an individual object fails to upload during an UploadDirectory operation. + /// + /// + /// Subscribers will receive a instance containing + /// the original , the failed + /// , and the exception that caused the failure. + /// This event is raised on a background thread by the transfer utility. + /// + /// + /// request.ObjectUploadFailedEvent += (sender, args) => + /// { + /// // inspect args.DirectoryRequest, args.ObjectRequest, args.Exception + /// }; + /// + public event EventHandler ObjectUploadFailedEvent; + + /// + /// Internal helper used by the transfer implementation to raise the . + /// + /// The details of the failed object upload. + internal void OnRaiseObjectUploadFailedEvent(ObjectUploadFailedEventArgs args) + { + ObjectUploadFailedEvent?.Invoke(this, args); + } /// /// Gets or sets the directory where files are uploaded from. @@ -382,4 +420,73 @@ public UploadDirectoryFileRequestArgs(TransferUtilityUploadRequest request) /// public TransferUtilityUploadRequest UploadRequest { get; set; } } + + /// + /// Provides data for + /// which is raised when an individual object fails to upload during an + /// UploadDirectory operation. + /// + /// + /// Instances of this class are created by the transfer implementation and + /// passed to event subscribers. The instance contains the original directory + /// upload request (), + /// the per-object upload request that failed (), + /// and the exception that caused the failure. + /// + /// + /// + /// var request = new TransferUtilityUploadDirectoryRequest { /* ... */ }; + /// request.ObjectUploadFailedEvent += (sender, args) => + /// { + /// // args.DirectoryRequest: original directory request + /// // args.ObjectRequest: upload request for the failed object + /// // args.Exception: exception thrown during the object upload + /// Console.WriteLine($"Failed to upload {args.ObjectRequest.Key}: {args.Exception}"); + /// }; + /// + /// + public class ObjectUploadFailedEventArgs : EventArgs + { + /// + /// Initializes a new instance of the class. + /// + /// The original that initiated the directory upload. + /// The representing the individual object upload that failed. + /// The that caused the object upload to fail. + internal ObjectUploadFailedEventArgs( + TransferUtilityUploadDirectoryRequest directoryRequest, + TransferUtilityUploadRequest objectRequest, + Exception exception) + { + DirectoryRequest = directoryRequest; + ObjectRequest = objectRequest; + Exception = exception; + } + + /// + /// Gets the original that initiated the directory upload. + /// + /// + /// The directory-level request that configured the overall UploadDirectory operation. + /// + public TransferUtilityUploadDirectoryRequest DirectoryRequest { get; private set; } + + /// + /// Gets the for the individual object that failed to upload. + /// + /// + /// Contains per-object parameters such as the S3 key and version id (if set). + /// + public TransferUtilityUploadRequest ObjectRequest { get; private set; } + + /// + /// Gets the that caused the object upload to fail. + /// + /// + /// The exception thrown by the underlying upload operation. Can be an , + /// , , or other exception type depending + /// on the failure mode. + /// + public Exception Exception { get; private set; } + } } diff --git a/sdk/src/Services/S3/Custom/Transfer/TransferUtilityUploadDirectoryResponse.cs b/sdk/src/Services/S3/Custom/Transfer/TransferUtilityUploadDirectoryResponse.cs index 94f32558d1fb..2c3912207060 100644 --- a/sdk/src/Services/S3/Custom/Transfer/TransferUtilityUploadDirectoryResponse.cs +++ b/sdk/src/Services/S3/Custom/Transfer/TransferUtilityUploadDirectoryResponse.cs @@ -20,7 +20,8 @@ * */ -using Amazon.Runtime; +using System; +using System.Collections.Generic; namespace Amazon.S3.Transfer { @@ -30,6 +31,24 @@ namespace Amazon.S3.Transfer /// public class TransferUtilityUploadDirectoryResponse { - // Empty placeholder class - properties will be added in future iterations + /// + /// The number of objects that have been successfully uploaded. + /// + public long ObjectsUploaded { get; set; } + + /// + /// The number of objects that failed to upload. Zero if all succeeded. + /// + public long ObjectsFailed { get; set; } + + /// + /// The collection of exceptions encountered when uploading individual objects. + /// + public IList Errors { get; set; } + + /// + /// Overall result of the directory upload operation. + /// + public DirectoryResult Result { get; set; } } } diff --git a/sdk/src/Services/S3/Properties/AssemblyInfo.cs b/sdk/src/Services/S3/Properties/AssemblyInfo.cs index 980e732be31d..9c65f527a8c4 100644 --- a/sdk/src/Services/S3/Properties/AssemblyInfo.cs +++ b/sdk/src/Services/S3/Properties/AssemblyInfo.cs @@ -21,6 +21,10 @@ [assembly: InternalsVisibleTo("AWSSDK.UnitTests.S3.NetFramework, PublicKey=0024000004800000940000000602000000240000525341310004000001000100db5f59f098d27276c7833875a6263a3cc74ab17ba9a9df0b52aedbe7252745db7274d5271fd79c1f08f668ecfa8eaab5626fa76adc811d3c8fc55859b0d09d3bc0a84eecd0ba891f2b8a2fc55141cdcc37c2053d53491e650a479967c3622762977900eddbf1252ed08a2413f00a28f3a0752a81203f03ccb7f684db373518b4")] [assembly: InternalsVisibleTo("AWSSDK.UnitTests.NetFramework, PublicKey=0024000004800000940000000602000000240000525341310004000001000100db5f59f098d27276c7833875a6263a3cc74ab17ba9a9df0b52aedbe7252745db7274d5271fd79c1f08f668ecfa8eaab5626fa76adc811d3c8fc55859b0d09d3bc0a84eecd0ba891f2b8a2fc55141cdcc37c2053d53491e650a479967c3622762977900eddbf1252ed08a2413f00a28f3a0752a81203f03ccb7f684db373518b4")] + +// We should remove this in the future when TransferUtility Upload/Download directory methods return responses. +// We should update the Integration Tests in TransferUtilityTests.cs to not use the internal methods and instead use the new public ones that return responses. +[assembly: InternalsVisibleTo("AWSSDK.IntegrationTests.S3.NetFramework, PublicKey=0024000004800000940000000602000000240000525341310004000001000100db5f59f098d27276c7833875a6263a3cc74ab17ba9a9df0b52aedbe7252745db7274d5271fd79c1f08f668ecfa8eaab5626fa76adc811d3c8fc55859b0d09d3bc0a84eecd0ba891f2b8a2fc55141cdcc37c2053d53491e650a479967c3622762977900eddbf1252ed08a2413f00a28f3a0752a81203f03ccb7f684db373518b4")] [assembly: InternalsVisibleTo("DynamicProxyGenAssembly2, PublicKey=0024000004800000940000000602000000240000525341310004000001000100c547cac37abd99c8db225ef2f6c8a3602f3b3606cc9891605d02baa56104f4cfc0734aa39b93bf7852f7d9266654753cc297e7d2edfe0bac1cdcf9f717241550e0a7b191195b7667bb4f64bcb8e2121380fd1d9d46ad2d92d2d15605093924cceaf74c4861eff62abf69b9291ed0a340e113be11e6a7d3113e92484cf7045cc7")] [assembly: AssemblyConfiguration("")] [assembly: AssemblyProduct("Amazon Web Services SDK for .NET")] diff --git a/sdk/test/IntegrationTests/AWSSDK.IntegrationTestUtilities.NetFramework.csproj b/sdk/test/IntegrationTests/AWSSDK.IntegrationTestUtilities.NetFramework.csproj index 5fb4162b7a2d..53471f142b89 100644 --- a/sdk/test/IntegrationTests/AWSSDK.IntegrationTestUtilities.NetFramework.csproj +++ b/sdk/test/IntegrationTests/AWSSDK.IntegrationTestUtilities.NetFramework.csproj @@ -16,9 +16,23 @@ false false true + true CS1591,CS0612,CS0618 true + + + + + ../../awssdk.dll.snk + + + + + $(AWSKeyFile) + + + diff --git a/sdk/test/Services/S3/IntegrationTests/AWSSDK.IntegrationTests.S3.NetFramework.csproj b/sdk/test/Services/S3/IntegrationTests/AWSSDK.IntegrationTests.S3.NetFramework.csproj index 09d7ecd49090..832f59d0fece 100644 --- a/sdk/test/Services/S3/IntegrationTests/AWSSDK.IntegrationTests.S3.NetFramework.csproj +++ b/sdk/test/Services/S3/IntegrationTests/AWSSDK.IntegrationTests.S3.NetFramework.csproj @@ -1,67 +1,81 @@  - net472 - $(DefineConstants);DEBUG;TRACE;BCL;ASYNC_AWAIT;LOCAL_FILE - portable - false - AWSSDK.IntegrationTests.S3.NetFramework - AWSSDK.IntegrationTests.S3.NetFramework + net472 + $(DefineConstants);DEBUG;TRACE;BCL;ASYNC_AWAIT;LOCAL_FILE + portable + false + AWSSDK.IntegrationTests.S3.NetFramework + AWSSDK.IntegrationTests.S3.NetFramework - false - false - false - false - false - false - false - false - true - true - CS1591,CS0612,CS0618 + false + false + false + false + false + false + false + false + true + true + true + CS1591,CS0612,CS0618 + + + + + ../../../../awssdk.dll.snk + + + + + $(AWSKeyFile) + + + - - - + + + - - - + + + - - - + + + - - - - - - - + + + + + + + - - - - - + + + + + - + - + \ No newline at end of file diff --git a/sdk/test/Services/S3/IntegrationTests/TransferUtilityTests.cs b/sdk/test/Services/S3/IntegrationTests/TransferUtilityTests.cs index bc42edaba5e3..e3f1fa4a2272 100644 --- a/sdk/test/Services/S3/IntegrationTests/TransferUtilityTests.cs +++ b/sdk/test/Services/S3/IntegrationTests/TransferUtilityTests.cs @@ -2571,6 +2571,118 @@ public override long Length } } } + + [TestMethod] + [TestCategory("S3")] + public async Task UploadDirectoryFailurePolicy_ContinueOnFailure_AllFailures() + { + var nonExistentBucket = "non-existent-" + Guid.NewGuid().ToString("N"); + var directory = CreateTestDirectory(1 * KILO_SIZE, numberOfTestFiles: 3); + + try + { + using (var transferUtility = new TransferUtility(Client)) + { + var request = new TransferUtilityUploadDirectoryRequest + { + BucketName = nonExistentBucket, + Directory = directory.FullName, + SearchPattern = "*", + SearchOption = SearchOption.AllDirectories, + FailurePolicy = FailurePolicy.ContinueOnFailure, + UploadFilesConcurrently = true + }; + + // ContinueOnFailure should not throw even if all uploads fail + var config = new TransferUtilityConfig(); + var command = new Amazon.S3.Transfer.Internal.UploadDirectoryCommand(transferUtility, config, request); + command.UploadFilesConcurrently = request.UploadFilesConcurrently; + var response = await command.ExecuteAsync(CancellationToken.None).ConfigureAwait(false); + + Assert.IsNotNull(response); + Assert.AreEqual(0, response.ObjectsUploaded); + Assert.AreEqual(3, response.ObjectsFailed); + Assert.AreEqual(DirectoryResult.Failure, response.Result); + } + } + finally + { + try { Directory.Delete(directory.FullName, true); } catch { } + } + } + + [TestMethod] + [TestCategory("S3")] + public async Task UploadDirectoryFailurePolicy_ContinueOnFailure_AllSuccess() + { + var directory = CreateTestDirectory(1 * KILO_SIZE, numberOfTestFiles: 3); + try + { + using (var transferUtility = new TransferUtility(Client)) + { + var request = new TransferUtilityUploadDirectoryRequest + { + BucketName = bucketName, + Directory = directory.FullName, + KeyPrefix = directory.Name, + SearchPattern = "*", + SearchOption = SearchOption.AllDirectories, + FailurePolicy = FailurePolicy.ContinueOnFailure, + UploadFilesConcurrently = true + }; + + var config = new TransferUtilityConfig(); + var command = new Amazon.S3.Transfer.Internal.UploadDirectoryCommand(transferUtility, config, request); + command.UploadFilesConcurrently = request.UploadFilesConcurrently; + var response = await command.ExecuteAsync(CancellationToken.None).ConfigureAwait(false); + + Assert.IsNotNull(response); + Assert.AreEqual(3, response.ObjectsUploaded); + Assert.AreEqual(0, response.ObjectsFailed); + Assert.AreEqual(DirectoryResult.Success, response.Result); + + // Validate uploaded contents + ValidateDirectoryContents(Client, bucketName, directory.Name, directory, plainTextContentType); + } + } + finally + { + try { Directory.Delete(directory.FullName, true); } catch { } + } + } + + [TestMethod] + [TestCategory("S3")] + public async Task UploadDirectoryFailurePolicy_AbortOnFailure_Throws() + { + var nonExistentBucket = "non-existent-" + Guid.NewGuid().ToString("N"); + var directory = CreateTestDirectory(1 * KILO_SIZE, numberOfTestFiles: 2); + + try + { + using (var transferUtility = new TransferUtility(Client)) + { + var request = new TransferUtilityUploadDirectoryRequest + { + BucketName = nonExistentBucket, + Directory = directory.FullName, + SearchPattern = "*", + SearchOption = SearchOption.AllDirectories, + FailurePolicy = FailurePolicy.AbortOnFailure, + UploadFilesConcurrently = true + }; + + var config = new TransferUtilityConfig(); + var command = new Amazon.S3.Transfer.Internal.UploadDirectoryCommand(transferUtility, config, request); + command.UploadFilesConcurrently = request.UploadFilesConcurrently; + await Assert.ThrowsExceptionAsync(() => command.ExecuteAsync(CancellationToken.None)); + } + } + finally + { + try { Directory.Delete(directory.FullName, true); } catch { } + } + } } } diff --git a/sdk/test/Services/S3/UnitTests/Custom/FailurePolicyTests.cs b/sdk/test/Services/S3/UnitTests/Custom/FailurePolicyTests.cs index 2be179501ae9..1bbdce15284f 100644 --- a/sdk/test/Services/S3/UnitTests/Custom/FailurePolicyTests.cs +++ b/sdk/test/Services/S3/UnitTests/Custom/FailurePolicyTests.cs @@ -1,7 +1,6 @@ using Amazon.S3; using Amazon.S3.Model; using Amazon.S3.Transfer; -using Amazon.S3.Transfer.Model; using Amazon.S3.Transfer.Internal; using Microsoft.VisualStudio.TestTools.UnitTesting; using Moq; @@ -30,6 +29,17 @@ private static TransferUtilityDownloadDirectoryRequest CreateRequest(string loca }; } + private static TransferUtilityUploadDirectoryRequest CreateUploadRequest(string localDir, FailurePolicy policy) + { + return new TransferUtilityUploadDirectoryRequest + { + BucketName = "test-bucket", + Directory = localDir, + FailurePolicy = policy, + UploadFilesConcurrently = true + }; + } + private static GetObjectResponse SuccessObject(string bucket, string key, string content = "data") { return new GetObjectResponse @@ -71,6 +81,29 @@ private static Mock CreateMockS3(IEnumerable keys, Func CreateMockS3ForUpload(IEnumerable keys, Func shouldFail) + { + var mock = new Mock(); + mock.Setup(m => m.Config).Returns(new AmazonS3Config()); + + foreach (var key in keys) + { + if (shouldFail(key)) + { + mock.Setup(m => m.PutObjectAsync(It.Is(r => r.Key == key && r.BucketName == "test-bucket"), It.IsAny())) + .ThrowsAsync(new AmazonS3Exception("Simulated failure for " + key)); + } + else + { + mock.Setup(m => m.PutObjectAsync(It.Is(r => r.Key == key && r.BucketName == "test-bucket"), It.IsAny())) + .ReturnsAsync(new PutObjectResponse()); + } + } + + mock.Setup(m => m.Dispose()); + return mock; + } + private static string CreateTempDirectory() { string dir = Path.Combine(Path.GetTempPath(), "FailurePolicyTests", Guid.NewGuid().ToString()); @@ -336,5 +369,260 @@ public async Task DownloadDirectory_ObjectDownloadFailedEvent_ArgsContainExpecte try { Directory.Delete(localDir, true); } catch { } } } + + [TestMethod] + [TestCategory("S3")] + public async Task UploadDirectory_ContinueOnFailure_PartialSuccess() + { + var fileNames = new[] { "file1.txt", "file2.txt", "file3.txt" }; + string localDir = CreateTempDirectory(); + try + { + // create files + foreach (var f in fileNames) + { + File.WriteAllText(Path.Combine(localDir, f), "data"); + } + + var mockS3 = CreateMockS3ForUpload(fileNames, k => k.EndsWith("file2.txt", StringComparison.Ordinal)); + var cancellationToken = new CancellationToken(); + var config = new TransferUtilityConfig(); + var tu = new TransferUtility(mockS3.Object); + var request = CreateUploadRequest(localDir, FailurePolicy.ContinueOnFailure); + var command = new UploadDirectoryCommand(tu, config, request); + command.UploadFilesConcurrently = request.UploadFilesConcurrently; + var response = await command.ExecuteAsync(cancellationToken).ConfigureAwait(false); + + Assert.IsNotNull(response); + Assert.AreEqual(2, response.ObjectsUploaded); + Assert.AreEqual(1, response.ObjectsFailed); + Assert.AreEqual(DirectoryResult.PartialSuccess, response.Result); + Assert.IsNotNull(response.Errors); + Assert.AreEqual(1, response.Errors.Count); + // local files remain + Assert.IsTrue(File.Exists(Path.Combine(localDir, "file1.txt"))); + Assert.IsTrue(File.Exists(Path.Combine(localDir, "file3.txt"))); + Assert.IsTrue(File.Exists(Path.Combine(localDir, "file2.txt"))); + } + finally + { + try { Directory.Delete(localDir, true); } catch { } + } + } + + [TestMethod] + [TestCategory("S3")] + public async Task UploadDirectory_ContinueOnFailure_AllFailures() + { + var fileNames = new[] { "fileA.txt", "fileB.txt" }; + string localDir = CreateTempDirectory(); + try + { + foreach (var f in fileNames) + File.WriteAllText(Path.Combine(localDir, f), "data"); + + var mockS3 = CreateMockS3ForUpload(fileNames, k => true); + var cancellationToken = new CancellationToken(); + var config = new TransferUtilityConfig(); + var tu = new TransferUtility(mockS3.Object); + var request = CreateUploadRequest(localDir, FailurePolicy.ContinueOnFailure); + var command = new UploadDirectoryCommand(tu, config, request); + command.UploadFilesConcurrently = request.UploadFilesConcurrently; + var response = await command.ExecuteAsync(cancellationToken).ConfigureAwait(false); + + Assert.IsNotNull(response); + Assert.AreEqual(0, response.ObjectsUploaded); + Assert.AreEqual(2, response.ObjectsFailed); + Assert.AreEqual(DirectoryResult.Failure, response.Result); + Assert.IsNotNull(response.Errors); + Assert.AreEqual(2, response.Errors.Count); + } + finally + { + try { Directory.Delete(localDir, true); } catch { } + } + } + + [TestMethod] + [TestCategory("S3")] + public async Task UploadDirectory_ContinueOnFailure_AllSuccess() + { + var fileNames = new[] { "ok1.txt", "ok2.txt" }; + string localDir = CreateTempDirectory(); + try + { + foreach (var f in fileNames) + File.WriteAllText(Path.Combine(localDir, f), "data"); + + var mockS3 = CreateMockS3ForUpload(fileNames, k => false); + var cancellationToken = new CancellationToken(); + var config = new TransferUtilityConfig(); + var tu = new TransferUtility(mockS3.Object); + var request = CreateUploadRequest(localDir, FailurePolicy.ContinueOnFailure); + var command = new UploadDirectoryCommand(tu, config, request); + command.UploadFilesConcurrently = request.UploadFilesConcurrently; + var response = await command.ExecuteAsync(cancellationToken).ConfigureAwait(false); + + Assert.IsNotNull(response); + Assert.AreEqual(2, response.ObjectsUploaded); + Assert.AreEqual(0, response.ObjectsFailed); + Assert.AreEqual(DirectoryResult.Success, response.Result); + } + finally + { + try { Directory.Delete(localDir, true); } catch { } + } + } + + [TestMethod] + [TestCategory("S3")] + public async Task UploadDirectory_AbortOnFailure_ThrowsOnFirstFailure() + { + var fileNames = new[] { "first.txt", "second.txt" }; + string localDir = CreateTempDirectory(); + try + { + foreach (var f in fileNames) + File.WriteAllText(Path.Combine(localDir, f), "data"); + + var mockS3 = CreateMockS3ForUpload(fileNames, k => k.EndsWith("second.txt", StringComparison.Ordinal)); + var tu = new TransferUtility(mockS3.Object); + var request = CreateUploadRequest(localDir, FailurePolicy.AbortOnFailure); + + var ex = await Assert.ThrowsExceptionAsync(() => tu.UploadDirectoryAsync(request)); + Assert.IsTrue(ex.Message.Contains("second.txt")); + // first file may or may not have uploaded depending on timing; ensure at least one file attempt occurred + Assert.IsTrue(Directory.GetFiles(localDir).Length >= 1); + } + finally + { + try { Directory.Delete(localDir, true); } catch { } + } + } + + [TestMethod] + [TestCategory("S3")] + public async Task UploadDirectory_ObjectUploadFailedEvent_CancelInHandler_ContinueOnFailure_Throws() + { + var fileNames = new[] { "file1.txt", "file2.txt", "file3.txt" }; + string localDir = CreateTempDirectory(); + try + { + foreach (var f in fileNames) + File.WriteAllText(Path.Combine(localDir, f), "data"); + + var mockS3 = CreateMockS3ForUpload(fileNames, k => k.EndsWith("file2.txt", StringComparison.Ordinal)); + var tu = new TransferUtility(mockS3.Object); + var request = CreateUploadRequest(localDir, FailurePolicy.ContinueOnFailure); + // Make sequential to make behavior deterministic for the test. + request.UploadFilesConcurrently = false; + + bool handlerInvoked = false; + request.ObjectUploadFailedEvent += (sender, args) => + { + handlerInvoked = true; + throw new AmazonS3Exception("Stop processing immediately"); + }; + + var ex = await Assert.ThrowsExceptionAsync(() => tu.UploadDirectoryAsync(request)); + Assert.IsTrue(ex.Message.Equals("Stop processing immediately")); + + Assert.IsTrue(handlerInvoked, "ObjectUploadFailedEvent handler was not invoked."); + } + finally + { + try { Directory.Delete(localDir, true); } catch { } + } + } + + [TestMethod] + [TestCategory("S3")] + public async Task UploadDirectory_ObjectUploadFailedEvent_ArgsContainExpectedData_ContinueOnFailure() + { + var fileNames = new[] { "a.txt", "b.txt" }; + string localDir = CreateTempDirectory(); + try + { + foreach (var f in fileNames) + File.WriteAllText(Path.Combine(localDir, f), "data"); + + var mockS3 = CreateMockS3ForUpload(new[] { "a.txt", "b.txt" }, k => k.EndsWith("b.txt", StringComparison.Ordinal)); + var config = new TransferUtilityConfig(); + var request = CreateUploadRequest(localDir, FailurePolicy.ContinueOnFailure); + // collect events + var captured = new List(); + var invoked = new ManualResetEventSlim(false); + request.ObjectUploadFailedEvent += (sender, args) => + { + captured.Add(args); + invoked.Set(); + }; + + var tu = new TransferUtility(mockS3.Object); + var command = new UploadDirectoryCommand(tu, config, request); + command.UploadFilesConcurrently = request.UploadFilesConcurrently; + var response = await command.ExecuteAsync(CancellationToken.None).ConfigureAwait(false); + + // wait briefly for any background event dispatch + invoked.Wait(1000); + + Assert.IsNotNull(response); + Assert.AreEqual(1, response.ObjectsFailed); + Assert.AreEqual(1, captured.Count); + + var evt = captured[0]; + Assert.AreSame(request, evt.DirectoryRequest); + Assert.IsNotNull(evt.ObjectRequest); + Assert.IsTrue(evt.ObjectRequest.Key.EndsWith("b.txt", StringComparison.Ordinal)); + Assert.IsNotNull(evt.Exception); + Assert.IsTrue(evt.Exception.Message.Contains("Simulated failure for")); + } + finally + { + try { Directory.Delete(localDir, true); } catch { } + } + } + + [TestMethod] + [TestCategory("S3")] + public async Task UploadDirectory_ObjectUploadFailedEvent_ArgsContainExpectedData_AbortOnFailure() + { + var fileNames = new[] { "x.txt", "y.txt" }; + string localDir = CreateTempDirectory(); + try + { + foreach (var f in fileNames) + File.WriteAllText(Path.Combine(localDir, f), "data"); + + var mockS3 = CreateMockS3ForUpload(new[] { "x.txt", "y.txt" }, k => k.EndsWith("y.txt", StringComparison.Ordinal)); + var request = CreateUploadRequest(localDir, FailurePolicy.AbortOnFailure); + var captured = new List(); + var invoked = new ManualResetEventSlim(false); + + request.ObjectUploadFailedEvent += (sender, args) => + { + captured.Add(args); + invoked.Set(); + }; + + var tu = new TransferUtility(mockS3.Object); + await Assert.ThrowsExceptionAsync(() => tu.UploadDirectoryAsync(request)); + + // wait for event + invoked.Wait(1000); + + Assert.AreEqual(1, captured.Count); + var evt = captured[0]; + Assert.AreSame(request, evt.DirectoryRequest); + Assert.IsNotNull(evt.ObjectRequest); + Assert.IsTrue(evt.ObjectRequest.Key.EndsWith("y.txt", StringComparison.Ordinal)); + Assert.IsNotNull(evt.Exception); + Assert.IsTrue(evt.Exception.Message.Contains("Simulated failure for")); + } + finally + { + try { Directory.Delete(localDir, true); } catch { } + } + } } } diff --git a/sdk/test/Services/S3/UnitTests/Custom/UploadDirectoryCommandTests.cs b/sdk/test/Services/S3/UnitTests/Custom/UploadDirectoryCommandTests.cs new file mode 100644 index 000000000000..f2e30d440455 --- /dev/null +++ b/sdk/test/Services/S3/UnitTests/Custom/UploadDirectoryCommandTests.cs @@ -0,0 +1,227 @@ +using Amazon.S3; +using Amazon.S3.Model; +using Amazon.S3.Transfer; +using Amazon.S3.Transfer.Internal; +using Microsoft.VisualStudio.TestTools.UnitTesting; +using Moq; +using System; +using System.Collections.Generic; +using System.IO; +using System.Linq; +using System.Threading; +using System.Threading.Tasks; +using AWSSDK_DotNet.IntegrationTests.Utils; + +namespace AWSSDK.UnitTests +{ + [TestClass] + public class UploadDirectoryCommandTests + { + private string _testDirectory; + private Mock _mockS3Client; + private TransferUtilityConfig _config; + + [TestInitialize] + public void Setup() + { + _testDirectory = Path.Combine(Path.GetTempPath(), "UploadDirectoryCommandTests_" + Guid.NewGuid().ToString("N").Substring(0, 8)); + Directory.CreateDirectory(_testDirectory); + + // Create some test files + File.WriteAllBytes(Path.Combine(_testDirectory, "file1.dat"), GenerateTestData(1024)); + File.WriteAllBytes(Path.Combine(_testDirectory, "file2.dat"), GenerateTestData(1024)); + File.WriteAllBytes(Path.Combine(_testDirectory, "file3.dat"), GenerateTestData(1024)); + File.WriteAllBytes(Path.Combine(_testDirectory, "file4.dat"), GenerateTestData(1024)); + File.WriteAllBytes(Path.Combine(_testDirectory, "file5.dat"), GenerateTestData(1024)); + + _mockS3Client = new Mock(); + _config = new TransferUtilityConfig + { + ConcurrentServiceRequests = 4 + }; + + var s3Config = new AmazonS3Config + { + BufferSize = 8192, + }; + _mockS3Client.Setup(c => c.Config).Returns(s3Config); + } + + [TestCleanup] + public void Cleanup() + { + if (Directory.Exists(_testDirectory)) + { + try + { + Directory.Delete(_testDirectory, true); + } + catch + { + // Ignore cleanup errors in tests + } + } + } + + #region Concurrency Control Tests + + /// + /// Tests that ConcurrentServiceRequests setting actually limits concurrent file uploads. + /// Expected: Max 2 concurrent uploads (ConcurrentServiceRequests = 2) + /// + [TestMethod] + public async Task ExecuteAsync_ConcurrentServiceRequests_RespectsLimit() + { + // Arrange + var request = CreateUploadDirectoryRequest(); + request.UploadFilesConcurrently = true; + + var config = new TransferUtilityConfig + { + ConcurrentServiceRequests = 2 + }; + + var currentConcurrentUploads = 0; + var maxObservedConcurrency = 0; + var concurrencyLock = new object(); + + // Map filenames to sizes + var files = Directory.GetFiles(_testDirectory).ToDictionary(Path.GetFileName, f => new FileInfo(f).Length); + + // Mock PutObjectAsync to track concurrency + _mockS3Client.Setup(c => c.PutObjectAsync( + It.IsAny(), + It.IsAny())) + .Returns(async (PutObjectRequest req, CancellationToken ct) => + { + lock (concurrencyLock) + { + currentConcurrentUploads++; + maxObservedConcurrency = Math.Max(maxObservedConcurrency, currentConcurrentUploads); + } + + try + { + await Task.Delay(100, ct); + var fileName = Path.GetFileName(req.FilePath); + var fileSize = files[fileName]; + return new PutObjectResponse + { + ETag = "\"test-etag\"", + HttpStatusCode = System.Net.HttpStatusCode.OK, + }; + } + finally + { + lock (concurrencyLock) + { + currentConcurrentUploads--; + } + } + }); + + var utility = new TransferUtility(_mockS3Client.Object, config); + var command = new UploadDirectoryCommand(utility, config, request); + + // Act + await command.ExecuteAsync(CancellationToken.None); + + // Assert + Assert.AreEqual(2, config.ConcurrentServiceRequests, "Test setup verification"); + Assert.IsTrue(maxObservedConcurrency <= config.ConcurrentServiceRequests, + $"Max concurrent uploads ({maxObservedConcurrency}) should not exceed ConcurrentServiceRequests ({config.ConcurrentServiceRequests})"); + } + + /// + /// Tests that sequential mode (UploadFilesConcurrently = false) uploads only one file at a time. + /// Expected: Max 1 concurrent upload (sequential mode) + /// + [TestMethod] + public async Task ExecuteAsync_SequentialMode_UploadsOneAtATime() + { + // Arrange + var request = CreateUploadDirectoryRequest(); + request.UploadFilesConcurrently = false; + + var config = new TransferUtilityConfig + { + ConcurrentServiceRequests = 10 + }; + + var currentConcurrentUploads = 0; + var maxObservedConcurrency = 0; + var concurrencyLock = new object(); + + var files = Directory.GetFiles(_testDirectory).Take(3).ToDictionary(Path.GetFileName, f => new FileInfo(f).Length); + + // Mock PutObjectAsync to track concurrency + _mockS3Client.Setup(c => c.PutObjectAsync( + It.IsAny(), + It.IsAny())) + .Returns(async (PutObjectRequest req, CancellationToken ct) => + { + lock (concurrencyLock) + { + currentConcurrentUploads++; + maxObservedConcurrency = Math.Max(maxObservedConcurrency, currentConcurrentUploads); + } + + try + { + await Task.Delay(50, ct); + return new PutObjectResponse + { + ETag = "\"test-etag\"", + HttpStatusCode = System.Net.HttpStatusCode.OK, + }; + } + finally + { + lock (concurrencyLock) + { + currentConcurrentUploads--; + } + } + }); + + var utility = new TransferUtility(_mockS3Client.Object, config); + var command = new UploadDirectoryCommand(utility, config, request); + + // Act + await command.ExecuteAsync(CancellationToken.None); + + // Assert + Assert.AreEqual(1, maxObservedConcurrency, + $"Sequential mode should only upload 1 file at a time, but observed {maxObservedConcurrency}"); + } + + #endregion + + #region Helper Methods + + private TransferUtilityUploadDirectoryRequest CreateUploadDirectoryRequest( + string bucketName = "test-bucket", + string s3Directory = "prefix", + string localDirectory = null) + { + localDirectory = localDirectory ?? _testDirectory; + + return new TransferUtilityUploadDirectoryRequest + { + BucketName = bucketName, + KeyPrefix = s3Directory, + Directory = localDirectory + }; + } + + private byte[] GenerateTestData(int size) + { + var data = new byte[size]; + var random = new Random(42); // Fixed seed for reproducible tests + random.NextBytes(data); + return data; + } + + #endregion + } +}