@@ -4,6 +4,7 @@ const agent = require('../../dd-trace/test/plugins/agent')
4
4
const { setup } = require ( './spec_helpers' )
5
5
const axios = require ( 'axios' )
6
6
const { rawExpectedSchema } = require ( './s3-naming' )
7
+ const { S3_PTR_KIND , SPAN_POINTER_DIRECTION } = require ( '../../dd-trace/src/constants' )
7
8
8
9
const bucketName = 's3-bucket-name-test'
9
10
@@ -36,20 +37,19 @@ describe('Plugin', () => {
36
37
37
38
before ( done => {
38
39
AWS = require ( `../../../versions/${ s3ClientName } @${ version } ` ) . get ( )
40
+ s3 = new AWS . S3 ( { endpoint : 'http://127.0.0.1:4566' , s3ForcePathStyle : true , region : 'us-east-1' } )
41
+
42
+ // Fix for LocationConstraint issue - only for SDK v2
43
+ if ( s3ClientName === 'aws-sdk' ) {
44
+ s3 . api . globalEndpoint = '127.0.0.1'
45
+ }
39
46
40
- s3 = new AWS . S3 ( { endpoint : 'http://127.0.0.1:4567' , s3ForcePathStyle : true , region : 'us-east-1' } )
41
47
s3 . createBucket ( { Bucket : bucketName } , ( err ) => {
42
48
if ( err ) return done ( err )
43
49
done ( )
44
50
} )
45
51
} )
46
52
47
- after ( done => {
48
- s3 . deleteBucket ( { Bucket : bucketName } , ( ) => {
49
- done ( )
50
- } )
51
- } )
52
-
53
53
after ( async ( ) => {
54
54
await resetLocalStackS3 ( )
55
55
return agent . close ( { ritmReset : false } )
@@ -74,6 +74,138 @@ describe('Plugin', () => {
74
74
rawExpectedSchema . outbound
75
75
)
76
76
77
+ describe ( 'span pointers' , ( ) => {
78
+ it ( 'should add span pointer for putObject operation' , ( done ) => {
79
+ agent . use ( traces => {
80
+ try {
81
+ const span = traces [ 0 ] [ 0 ]
82
+ const links = JSON . parse ( span . meta ?. [ '_dd.span_links' ] || '[]' )
83
+
84
+ expect ( links ) . to . have . lengthOf ( 1 )
85
+ expect ( links [ 0 ] . attributes ) . to . deep . equal ( {
86
+ 'ptr.kind' : S3_PTR_KIND ,
87
+ 'ptr.dir' : SPAN_POINTER_DIRECTION . DOWNSTREAM ,
88
+ 'ptr.hash' : '6d1a2fe194c6579187408f827f942be3' ,
89
+ 'link.kind' : 'span-pointer'
90
+ } )
91
+ done ( )
92
+ } catch ( error ) {
93
+ done ( error )
94
+ }
95
+ } ) . catch ( done )
96
+
97
+ s3 . putObject ( {
98
+ Bucket : bucketName ,
99
+ Key : 'test-key' ,
100
+ Body : 'test body'
101
+ } , ( err ) => {
102
+ if ( err ) {
103
+ done ( err )
104
+ }
105
+ } )
106
+ } )
107
+
108
+ it ( 'should add span pointer for copyObject operation' , ( done ) => {
109
+ agent . use ( traces => {
110
+ try {
111
+ const span = traces [ 0 ] [ 0 ]
112
+ const links = JSON . parse ( span . meta ?. [ '_dd.span_links' ] || '[]' )
113
+
114
+ expect ( links ) . to . have . lengthOf ( 1 )
115
+ expect ( links [ 0 ] . attributes ) . to . deep . equal ( {
116
+ 'ptr.kind' : S3_PTR_KIND ,
117
+ 'ptr.dir' : SPAN_POINTER_DIRECTION . DOWNSTREAM ,
118
+ 'ptr.hash' : '1542053ce6d393c424b1374bac1fc0c5' ,
119
+ 'link.kind' : 'span-pointer'
120
+ } )
121
+ done ( )
122
+ } catch ( error ) {
123
+ done ( error )
124
+ }
125
+ } ) . catch ( done )
126
+
127
+ s3 . copyObject ( {
128
+ Bucket : bucketName ,
129
+ Key : 'new-key' ,
130
+ CopySource : `${ bucketName } /test-key`
131
+ } , ( err ) => {
132
+ if ( err ) {
133
+ done ( err )
134
+ }
135
+ } )
136
+ } )
137
+
138
+ it ( 'should add span pointer for completeMultipartUpload operation' , ( done ) => {
139
+ // Create 5MiB+ buffers for parts
140
+ const partSize = 5 * 1024 * 1024
141
+ const part1Data = Buffer . alloc ( partSize , 'a' )
142
+ const part2Data = Buffer . alloc ( partSize , 'b' )
143
+
144
+ // Start the multipart upload process
145
+ s3 . createMultipartUpload ( {
146
+ Bucket : bucketName ,
147
+ Key : 'multipart-test'
148
+ } , ( err , multipartData ) => {
149
+ if ( err ) return done ( err )
150
+
151
+ // Upload both parts in parallel
152
+ Promise . all ( [
153
+ new Promise ( ( resolve , reject ) => {
154
+ s3 . uploadPart ( {
155
+ Bucket : bucketName ,
156
+ Key : 'multipart-test' ,
157
+ PartNumber : 1 ,
158
+ UploadId : multipartData . UploadId ,
159
+ Body : part1Data
160
+ } , ( err , data ) => err ? reject ( err ) : resolve ( { PartNumber : 1 , ETag : data . ETag } ) )
161
+ } ) ,
162
+ new Promise ( ( resolve , reject ) => {
163
+ s3 . uploadPart ( {
164
+ Bucket : bucketName ,
165
+ Key : 'multipart-test' ,
166
+ PartNumber : 2 ,
167
+ UploadId : multipartData . UploadId ,
168
+ Body : part2Data
169
+ } , ( err , data ) => err ? reject ( err ) : resolve ( { PartNumber : 2 , ETag : data . ETag } ) )
170
+ } )
171
+ ] ) . then ( parts => {
172
+ // Now complete the multipart upload
173
+ const completeParams = {
174
+ Bucket : bucketName ,
175
+ Key : 'multipart-test' ,
176
+ UploadId : multipartData . UploadId ,
177
+ MultipartUpload : {
178
+ Parts : parts
179
+ }
180
+ }
181
+
182
+ s3 . completeMultipartUpload ( completeParams , ( err ) => {
183
+ if ( err ) done ( err )
184
+ agent . use ( traces => {
185
+ const span = traces [ 0 ] [ 0 ]
186
+ const operation = span . meta ?. [ 'aws.operation' ]
187
+ if ( operation === 'completeMultipartUpload' ) {
188
+ try {
189
+ const links = JSON . parse ( span . meta ?. [ '_dd.span_links' ] || '[]' )
190
+ expect ( links ) . to . have . lengthOf ( 1 )
191
+ expect ( links [ 0 ] . attributes ) . to . deep . equal ( {
192
+ 'ptr.kind' : S3_PTR_KIND ,
193
+ 'ptr.dir' : SPAN_POINTER_DIRECTION . DOWNSTREAM ,
194
+ 'ptr.hash' : '422412aa6b472a7194f3e24f4b12b4a6' ,
195
+ 'link.kind' : 'span-pointer'
196
+ } )
197
+ done ( )
198
+ } catch ( error ) {
199
+ done ( error )
200
+ }
201
+ }
202
+ } )
203
+ } )
204
+ } ) . catch ( done )
205
+ } )
206
+ } )
207
+ } )
208
+
77
209
it ( 'should allow disabling a specific span kind of a service' , ( done ) => {
78
210
let total = 0
79
211
0 commit comments