From 4cfed39b21caebff786ff586c3ab51df10c35eab Mon Sep 17 00:00:00 2001 From: Hammerbeck Date: Tue, 5 Mar 2024 11:16:44 +0100 Subject: [PATCH 01/13] Don't process ttl change if value has not changed --- src/Altinn.Broker.API/Controllers/ServiceOwnerController.cs | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/src/Altinn.Broker.API/Controllers/ServiceOwnerController.cs b/src/Altinn.Broker.API/Controllers/ServiceOwnerController.cs index 8f6d57d8..59b8c414 100644 --- a/src/Altinn.Broker.API/Controllers/ServiceOwnerController.cs +++ b/src/Altinn.Broker.API/Controllers/ServiceOwnerController.cs @@ -80,6 +80,10 @@ public async Task UpdateFileRetention([FromBody] ServiceOwnerUpdat } var fileTimeToLive = XmlConvert.ToTimeSpan(serviceOwnerUpdateFileRetentionExt.FileTransferTimeToLive); + if (fileTimeToLive == serviceOwner.FileTransferTimeToLive) + { + return Problem(detail: "The file transfer already has the requested retention time", statusCode: (int)HttpStatusCode.Conflict); + } await _serviceOwnerRepository.UpdateFileRetention(token.Consumer, fileTimeToLive); await updateFileRetentionHandler.Process(new UpdateFileRetentionRequest { From 20c6315121c0a07a9c91c1399f5dad02c481950d Mon Sep 17 00:00:00 2001 From: Hammerbeck Date: Wed, 6 Mar 2024 10:31:04 +0100 Subject: [PATCH 02/13] Optimize garbage collection on db connection --- .../Azure/AzureResourceManagerService.cs | 3 ++- .../Repositories/ActorRepository.cs | 6 +++--- .../Repositories/FileTransferRepository.cs | 18 +++++++++--------- .../FileTransferStatusRepository.cs | 6 +++--- .../Repositories/IdempotencyEventRepository.cs | 6 +++--- .../Repositories/ServiceOwnerRepository.cs | 2 +- 6 files changed, 21 insertions(+), 20 deletions(-) diff --git a/src/Altinn.Broker.Integrations/Azure/AzureResourceManagerService.cs b/src/Altinn.Broker.Integrations/Azure/AzureResourceManagerService.cs index 50d0784e..eedf2cbd 100644 --- a/src/Altinn.Broker.Integrations/Azure/AzureResourceManagerService.cs +++ b/src/Altinn.Broker.Integrations/Azure/AzureResourceManagerService.cs @@ -120,7 +120,7 @@ private async Task EnableMicrosoftDefender(string resourceGroupName, string stor }; var json = JsonSerializer.Serialize(requestBody); var content = new StringContent(json, Encoding.UTF8, "application/json"); - var response = await client.PutAsync(endpoint, content); + var response = await client.PutAsync(endpoint, content, cancellationToken); if (!response.IsSuccessStatusCode) { var errorMessage = await response.Content.ReadAsStringAsync(); @@ -128,6 +128,7 @@ private async Task EnableMicrosoftDefender(string resourceGroupName, string stor throw new HttpRequestException($"Failed to enable Defender Malware Scan. Error: {errorMessage}"); } _logger.LogInformation($"Microsoft Defender Malware scan enabled for storage account {storageAccountName}"); + client.Dispose(); } private string GenerateStorageAccountName() diff --git a/src/Altinn.Broker.Persistence/Repositories/ActorRepository.cs b/src/Altinn.Broker.Persistence/Repositories/ActorRepository.cs index 158ab6be..309f6d25 100644 --- a/src/Altinn.Broker.Persistence/Repositories/ActorRepository.cs +++ b/src/Altinn.Broker.Persistence/Repositories/ActorRepository.cs @@ -16,11 +16,11 @@ public ActorRepository(DatabaseConnectionProvider connectionProvider) public async Task GetActorAsync(string actorExternalId, CancellationToken cancellationToken) { - using var command = await _connectionProvider.CreateCommand( + await using var command = await _connectionProvider.CreateCommand( "SELECT actor_id_pk, actor_external_id FROM broker.actor WHERE actor_external_id = @actorExternalId"); command.Parameters.AddWithValue("@actorExternalId", actorExternalId); - using NpgsqlDataReader reader = await command.ExecuteReaderAsync(cancellationToken); + await using NpgsqlDataReader reader = await command.ExecuteReaderAsync(cancellationToken); ActorEntity? actor = null; while (await reader.ReadAsync(cancellationToken)) { @@ -36,7 +36,7 @@ public ActorRepository(DatabaseConnectionProvider connectionProvider) public async Task AddActorAsync(ActorEntity actor, CancellationToken cancellationToken) { - NpgsqlCommand command = await _connectionProvider.CreateCommand( + await using NpgsqlCommand command = await _connectionProvider.CreateCommand( "INSERT INTO broker.actor (actor_external_id) " + "VALUES (@actorExternalId) " + "RETURNING actor_id_pk"); diff --git a/src/Altinn.Broker.Persistence/Repositories/FileTransferRepository.cs b/src/Altinn.Broker.Persistence/Repositories/FileTransferRepository.cs index e77ab797..b3f6935f 100644 --- a/src/Altinn.Broker.Persistence/Repositories/FileTransferRepository.cs +++ b/src/Altinn.Broker.Persistence/Repositories/FileTransferRepository.cs @@ -25,7 +25,7 @@ public FileTransferRepository(DatabaseConnectionProvider connectionProvider, IAc { var fileTransfer = new FileTransferEntity(); - using var command = await _connectionProvider.CreateCommand( + await using var command = await _connectionProvider.CreateCommand( @" SELECT f.file_transfer_id_pk, @@ -73,7 +73,7 @@ GROUP BY f.file_transfer_id_pk = @fileTransferId;"); { command.Parameters.AddWithValue("@fileTransferId", fileTransferId); - using NpgsqlDataReader reader = await command.ExecuteReaderAsync(cancellationToken); + await using NpgsqlDataReader reader = await command.ExecuteReaderAsync(cancellationToken); if (await reader.ReadAsync(cancellationToken)) { fileTransfer = new FileTransferEntity @@ -132,7 +132,7 @@ FROM broker.file_transfer { command.Parameters.AddWithValue("@fileTransferId", fileTransferId); var commandText = command.CommandText; - using (var reader = await command.ExecuteReaderAsync(cancellationToken)) + await using (var reader = await command.ExecuteReaderAsync(cancellationToken)) { while (await reader.ReadAsync(cancellationToken)) { @@ -174,7 +174,7 @@ public async Task AddFileTransfer(ServiceOwnerEntity serviceOwner, Resourc actorId = actor.ActorId; } var fileTransferId = Guid.NewGuid(); - NpgsqlCommand command = await _connectionProvider.CreateCommand( + await using NpgsqlCommand command = await _connectionProvider.CreateCommand( "INSERT INTO broker.file_transfer (file_transfer_id_pk, resource_id, filename, checksum, file_transfer_size, external_file_transfer_reference, sender_actor_id_fk, created, storage_provider_id_fk, expiration_time, hangfire_job_id) " + "VALUES (@fileTransferId, @resourceId, @fileName, @checksum, @fileTransferSize, @externalFileTransferReference, @senderActorId, @created, @storageProviderId, @expirationTime, @hangfireJobId)"); @@ -261,7 +261,7 @@ public async Task> LegacyGetFilesForRecipientsWithRecipientStatus(Leg command.Parameters.AddWithValue("@recipientFileTransferStatus", (int)fileTransferSearch.RecipientStatus); var fileTransfers = new List(); - using (var reader = await command.ExecuteReaderAsync(cancellationToken)) + await using (var reader = await command.ExecuteReaderAsync(cancellationToken)) { while (await reader.ReadAsync(cancellationToken)) { @@ -338,7 +338,7 @@ public async Task> GetFileTransfersAssociatedWithActor(FileTransferSe command.Parameters.AddWithValue("@fileTransferStatus", (int)fileTransferSearch.Status); var files = new List(); - using (var reader = await command.ExecuteReaderAsync(cancellationToken)) + await using (var reader = await command.ExecuteReaderAsync(cancellationToken)) { while (await reader.ReadAsync(cancellationToken)) { @@ -390,7 +390,7 @@ public async Task> GetFileTransfersForRecipientWithRecipientStatus(Fi command.Parameters.AddWithValue("@recipientFileStatus", (int)fileTransferSearch.RecipientStatus); var files = new List(); - using (var reader = await command.ExecuteReaderAsync(cancellationToken)) + await using (var reader = await command.ExecuteReaderAsync(cancellationToken)) { while (await reader.ReadAsync(cancellationToken)) { @@ -431,7 +431,7 @@ private async Task> GetMetadata(Guid fileTransferId, { command.Parameters.AddWithValue("@fileTransferId", fileTransferId); var property = new Dictionary(); - using (var reader = await command.ExecuteReaderAsync(cancellationToken)) + await using (var reader = await command.ExecuteReaderAsync(cancellationToken)) { while (await reader.ReadAsync(cancellationToken)) { @@ -536,7 +536,7 @@ LIMIT 1 { command.Parameters.AddWithValue("@storageProviderId", storageProviderId); command.Parameters.AddWithValue("@deletedStatusId", (int)FileTransferStatus.Deleted); - using (var reader = await command.ExecuteReaderAsync(cancellationToken)) + await using (var reader = await command.ExecuteReaderAsync(cancellationToken)) { while (await reader.ReadAsync(cancellationToken)) { diff --git a/src/Altinn.Broker.Persistence/Repositories/FileTransferStatusRepository.cs b/src/Altinn.Broker.Persistence/Repositories/FileTransferStatusRepository.cs index 42506335..e2b57685 100644 --- a/src/Altinn.Broker.Persistence/Repositories/FileTransferStatusRepository.cs +++ b/src/Altinn.Broker.Persistence/Repositories/FileTransferStatusRepository.cs @@ -14,7 +14,7 @@ public FileTransferStatusRepository(DatabaseConnectionProvider connectionProvide public async Task InsertFileTransferStatus(Guid fileTransferId, FileTransferStatus status, string? detailedFileTransferStatus = null, CancellationToken cancellationToken = default) { - using var command = await _connectionProvider.CreateCommand( + await using var command = await _connectionProvider.CreateCommand( "INSERT INTO broker.file_transfer_status (file_transfer_id_fk, file_transfer_status_description_id_fk, file_transfer_status_date, file_transfer_status_detailed_description) " + "VALUES (@fileTransferId, @statusId, NOW(), @detailedFileTransferStatus) RETURNING file_transfer_status_id_pk;"); command.Parameters.AddWithValue("@fileTransferId", fileTransferId); @@ -30,14 +30,14 @@ public async Task InsertFileTransferStatus(Guid fileTransferId, FileTransferStat public async Task> GetFileTransferStatusHistory(Guid fileTransferId, CancellationToken cancellationToken) { - using (var command = await _connectionProvider.CreateCommand( + await using (var command = await _connectionProvider.CreateCommand( "SELECT file_transfer_id_fk, file_transfer_status_description_id_fk, file_transfer_status_date, file_transfer_status_detailed_description " + "FROM broker.file_transfer_status fis " + "WHERE fis.file_transfer_id_fk = @fileTransferId")) { command.Parameters.AddWithValue("@fileTransferId", fileTransferId); var fileTransferStatuses = new List(); - using (var reader = await command.ExecuteReaderAsync(cancellationToken)) + await using (var reader = await command.ExecuteReaderAsync(cancellationToken)) { while (await reader.ReadAsync(cancellationToken)) { diff --git a/src/Altinn.Broker.Persistence/Repositories/IdempotencyEventRepository.cs b/src/Altinn.Broker.Persistence/Repositories/IdempotencyEventRepository.cs index b8d0a8b0..c4c6a171 100644 --- a/src/Altinn.Broker.Persistence/Repositories/IdempotencyEventRepository.cs +++ b/src/Altinn.Broker.Persistence/Repositories/IdempotencyEventRepository.cs @@ -16,7 +16,7 @@ public IdempotencyEventRepository(DatabaseConnectionProvider connectionProvider) public async Task AddIdempotencyEventAsync(string IdempotencyEventId, CancellationToken cancellationToken) { - NpgsqlCommand command = await _connectionProvider.CreateCommand( + await using NpgsqlCommand command = await _connectionProvider.CreateCommand( "INSERT INTO broker.idempotency_event (idempotency_event_id_pk, created)" + "VALUES (@idempotency_event_id_pk, @created) "); command.Parameters.AddWithValue("@idempotency_event_id_pk", IdempotencyEventId); @@ -26,7 +26,7 @@ public async Task AddIdempotencyEventAsync(string IdempotencyEventId, Cancellati } public async Task DeleteIdempotencyEventAsync(string IdempotencyEventId, CancellationToken cancellationToken) { - NpgsqlCommand command = await _connectionProvider.CreateCommand( + await using NpgsqlCommand command = await _connectionProvider.CreateCommand( "DELETE FROM broker.idempotency_event " + "WHERE idempotency_event_id_pk = @idempotency_event_id_pk"); command.Parameters.AddWithValue("@idempotency_event_id_pk", IdempotencyEventId); @@ -35,7 +35,7 @@ public async Task DeleteIdempotencyEventAsync(string IdempotencyEventId, Cancell } public async Task DeleteOldIdempotencyEvents() { - NpgsqlCommand command = await _connectionProvider.CreateCommand( + await using NpgsqlCommand command = await _connectionProvider.CreateCommand( "DELETE FROM broker.idempotency_event " + "WHERE created < @created"); diff --git a/src/Altinn.Broker.Persistence/Repositories/ServiceOwnerRepository.cs b/src/Altinn.Broker.Persistence/Repositories/ServiceOwnerRepository.cs index 420f265d..61aa66d4 100644 --- a/src/Altinn.Broker.Persistence/Repositories/ServiceOwnerRepository.cs +++ b/src/Altinn.Broker.Persistence/Repositories/ServiceOwnerRepository.cs @@ -15,7 +15,7 @@ public ServiceOwnerRepository(DatabaseConnectionProvider connectionProvider) public async Task GetServiceOwner(string serviceOwnerId) { - using var command = await _connectionProvider.CreateCommand( + await using var command = await _connectionProvider.CreateCommand( "SELECT service_owner_id_pk, service_owner_name, file_transfer_time_to_live, " + "storage_provider_id_pk, created, resource_name, storage_provider_type " + "FROM broker.service_owner " + From 42f2dd6df54d895829c00f13160b8df629829ef2 Mon Sep 17 00:00:00 2001 From: Hammerbeck Date: Wed, 6 Mar 2024 10:31:45 +0100 Subject: [PATCH 03/13] Transfer with speed declines doens't cancel --- src/Altinn.Broker.API/Program.cs | 2 ++ 1 file changed, 2 insertions(+) diff --git a/src/Altinn.Broker.API/Program.cs b/src/Altinn.Broker.API/Program.cs index d937fa4e..804d385a 100644 --- a/src/Altinn.Broker.API/Program.cs +++ b/src/Altinn.Broker.API/Program.cs @@ -155,6 +155,8 @@ static void ConfigureServices(IServiceCollection services, IConfiguration config services.Configure(options => { options.Limits.MaxRequestBodySize = null; + options.Limits.MaxRequestBufferSize = null; + options.Limits.MinRequestBodyDataRate = new MinDataRate(bytesPerSecond: 100, gracePeriod: TimeSpan.FromSeconds(10)); }); services.Configure(options => { From 6b59dd9224aef3a83b8c91cf08f7d3066c3f04d1 Mon Sep 17 00:00:00 2001 From: Hammerbeck Date: Wed, 6 Mar 2024 10:33:42 +0100 Subject: [PATCH 04/13] Add k6 load testing --- README.md | 9 +++ Test/Altinn.Broker.Tests/K6.Tests/test.js | 82 +++++++++++++++++++++++ docker-compose-test.yml | 9 +++ 3 files changed, 100 insertions(+) create mode 100644 Test/Altinn.Broker.Tests/K6.Tests/test.js create mode 100644 docker-compose-test.yml diff --git a/README.md b/README.md index 76c97877..a477d542 100644 --- a/README.md +++ b/README.md @@ -45,3 +45,12 @@ Formatting of the code base is handled by Dotnet format. [See how to configure i ## Deploy The build and push workflow produces a docker image that is pushed to Github packages. This image is then used by the release action found in the [altinn-broker-infra repository](https://github.com/Altinn/altinn-broker-infra). + + +### Load testing with k6 + +We run load tests using k6. To run without installing k6 you can use docker-compose(base url has to be http://host.docker.internal:5096): +```docker-compose -f docker-compose-test.yml up k6-test``` + +if you have k6 installed locally, you can run it by using the following command: +```"k6 run test.js"``` \ No newline at end of file diff --git a/Test/Altinn.Broker.Tests/K6.Tests/test.js b/Test/Altinn.Broker.Tests/K6.Tests/test.js new file mode 100644 index 00000000..05fda280 --- /dev/null +++ b/Test/Altinn.Broker.Tests/K6.Tests/test.js @@ -0,0 +1,82 @@ +import http from 'k6/http'; +import { sleep, check, fail } from 'k6'; + +export const options = { + vus: 25, + duration: '10m', + insecureSkipTLSVerify: true, + iterations: 25 + + //remove this line before doing a real test + //httpDebug: 'full', +}; + +var tokens = { + DUMMY_SENDER_TOKEN: "", + DUMMY_SERVICE_OWNER_TOKEN: "" +} +baseUrl = "http://localhost:5096" + + +const file = open("./data/testfile.txt", "b"); +function checkResult(res, status) { + if (!status) { + console.error(res) + } +} + +export function setup() { + let headers = generateHeaders(tokens.DUMMY_SERVICE_OWNER_TOKEN, 'application/json') + + //set fileTransfer TTL to 15 minutes. Should be longer than the test time + var fileRes = http.put(`${baseUrl}/broker/api/v1/serviceowner/fileretention`, JSON.stringify({ + fileTransferTimeToLive: "PT15M" + }), { headers: headers }); + + if ( + !check(fileRes, { + 'status code MUST be 200 or 409': (fileRes) => fileRes.status === 200 || fileRes.status === 409, + }) + ) { + fail('Could not update file transfer TTL. Exiting'); + } + +} + +export default async function () { + + var baseFile = { + resourceId: 'altinn-broker-test-resource-1', + checksum: null, + fileName: 'testfile.txt', + recipients: ['0192:986252932'], + sender: '0192:991825827', + sendersFileTransferReference: 'test-data' + } + sleep(1); + + let headers = generateHeaders(tokens.DUMMY_SENDER_TOKEN, 'application/json') + var res = http.post(`${baseUrl}/broker/api/v1/filetransfer`, JSON.stringify(baseFile), { headers: headers }); + var status = check(res, { 'Initialize: status was 200': (r) => r.status == 200 }); + checkResult(res, status) + sleep(1); + + headers = generateHeaders(tokens.DUMMY_SENDER_TOKEN, 'application/octet-stream') + const data = { + field: 'this is a standard form field', + file: http.file(file, 'testfile.txt') + } + var res2 = http.post(`${baseUrl}/broker/api/v1/filetransfer/${res.body}/upload`, data, { timeout: "600s", headers: headers }); + status = check(res2, { 'Upload: status was 200': (r) => r.status == 200 }); + checkResult(res, status) + +} +function generateHeaders(token, contentType) { + return { + 'Authorization': 'Bearer ' + token, + 'Content-Type': contentType, + 'Accept': '*/*, text/plain', + 'Accept-Encoding': 'gzip, deflate, br', + 'Connection': 'keep-alive' + } +} \ No newline at end of file diff --git a/docker-compose-test.yml b/docker-compose-test.yml new file mode 100644 index 00000000..3f6a064d --- /dev/null +++ b/docker-compose-test.yml @@ -0,0 +1,9 @@ +version: '3.4' + +services: + k6-test: + image: grafana/k6:latest + command: run /test.js + volumes: + - ./Test/Altinn.Broker.Tests/K6.Tests/test.js:/test.js + - ./Test/Altinn.Broker.Tests/K6.Tests/data:/data From 0fda9b1bb74af3c61ab7ed50dabdd2d4e04693b1 Mon Sep 17 00:00:00 2001 From: Hammerbeck Date: Tue, 5 Mar 2024 11:16:44 +0100 Subject: [PATCH 05/13] Don't process ttl change if value has not changed --- src/Altinn.Broker.API/Controllers/ServiceOwnerController.cs | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/src/Altinn.Broker.API/Controllers/ServiceOwnerController.cs b/src/Altinn.Broker.API/Controllers/ServiceOwnerController.cs index 8f6d57d8..59b8c414 100644 --- a/src/Altinn.Broker.API/Controllers/ServiceOwnerController.cs +++ b/src/Altinn.Broker.API/Controllers/ServiceOwnerController.cs @@ -80,6 +80,10 @@ public async Task UpdateFileRetention([FromBody] ServiceOwnerUpdat } var fileTimeToLive = XmlConvert.ToTimeSpan(serviceOwnerUpdateFileRetentionExt.FileTransferTimeToLive); + if (fileTimeToLive == serviceOwner.FileTransferTimeToLive) + { + return Problem(detail: "The file transfer already has the requested retention time", statusCode: (int)HttpStatusCode.Conflict); + } await _serviceOwnerRepository.UpdateFileRetention(token.Consumer, fileTimeToLive); await updateFileRetentionHandler.Process(new UpdateFileRetentionRequest { From 8cacdb06a81ecc99ea8be821a77ea9811172040e Mon Sep 17 00:00:00 2001 From: Hammerbeck Date: Wed, 6 Mar 2024 10:31:04 +0100 Subject: [PATCH 06/13] Optimize garbage collection on db connection --- .../Azure/AzureResourceManagerService.cs | 3 ++- .../Repositories/ActorRepository.cs | 6 +++--- .../Repositories/FileTransferRepository.cs | 18 +++++++++--------- .../FileTransferStatusRepository.cs | 6 +++--- .../Repositories/IdempotencyEventRepository.cs | 6 +++--- .../Repositories/ServiceOwnerRepository.cs | 2 +- 6 files changed, 21 insertions(+), 20 deletions(-) diff --git a/src/Altinn.Broker.Integrations/Azure/AzureResourceManagerService.cs b/src/Altinn.Broker.Integrations/Azure/AzureResourceManagerService.cs index 6f1c965c..3f0aedb3 100644 --- a/src/Altinn.Broker.Integrations/Azure/AzureResourceManagerService.cs +++ b/src/Altinn.Broker.Integrations/Azure/AzureResourceManagerService.cs @@ -120,7 +120,7 @@ private async Task EnableMicrosoftDefender(string resourceGroupName, string stor }; var json = JsonSerializer.Serialize(requestBody); var content = new StringContent(json, Encoding.UTF8, "application/json"); - var response = await client.PutAsync(endpoint, content); + var response = await client.PutAsync(endpoint, content, cancellationToken); if (!response.IsSuccessStatusCode) { var errorMessage = await response.Content.ReadAsStringAsync(); @@ -128,6 +128,7 @@ private async Task EnableMicrosoftDefender(string resourceGroupName, string stor throw new HttpRequestException($"Failed to enable Defender Malware Scan. Error: {errorMessage}"); } _logger.LogInformation($"Microsoft Defender Malware scan enabled for storage account {storageAccountName}"); + client.Dispose(); } private string GenerateStorageAccountName() diff --git a/src/Altinn.Broker.Persistence/Repositories/ActorRepository.cs b/src/Altinn.Broker.Persistence/Repositories/ActorRepository.cs index 158ab6be..309f6d25 100644 --- a/src/Altinn.Broker.Persistence/Repositories/ActorRepository.cs +++ b/src/Altinn.Broker.Persistence/Repositories/ActorRepository.cs @@ -16,11 +16,11 @@ public ActorRepository(DatabaseConnectionProvider connectionProvider) public async Task GetActorAsync(string actorExternalId, CancellationToken cancellationToken) { - using var command = await _connectionProvider.CreateCommand( + await using var command = await _connectionProvider.CreateCommand( "SELECT actor_id_pk, actor_external_id FROM broker.actor WHERE actor_external_id = @actorExternalId"); command.Parameters.AddWithValue("@actorExternalId", actorExternalId); - using NpgsqlDataReader reader = await command.ExecuteReaderAsync(cancellationToken); + await using NpgsqlDataReader reader = await command.ExecuteReaderAsync(cancellationToken); ActorEntity? actor = null; while (await reader.ReadAsync(cancellationToken)) { @@ -36,7 +36,7 @@ public ActorRepository(DatabaseConnectionProvider connectionProvider) public async Task AddActorAsync(ActorEntity actor, CancellationToken cancellationToken) { - NpgsqlCommand command = await _connectionProvider.CreateCommand( + await using NpgsqlCommand command = await _connectionProvider.CreateCommand( "INSERT INTO broker.actor (actor_external_id) " + "VALUES (@actorExternalId) " + "RETURNING actor_id_pk"); diff --git a/src/Altinn.Broker.Persistence/Repositories/FileTransferRepository.cs b/src/Altinn.Broker.Persistence/Repositories/FileTransferRepository.cs index e77ab797..b3f6935f 100644 --- a/src/Altinn.Broker.Persistence/Repositories/FileTransferRepository.cs +++ b/src/Altinn.Broker.Persistence/Repositories/FileTransferRepository.cs @@ -25,7 +25,7 @@ public FileTransferRepository(DatabaseConnectionProvider connectionProvider, IAc { var fileTransfer = new FileTransferEntity(); - using var command = await _connectionProvider.CreateCommand( + await using var command = await _connectionProvider.CreateCommand( @" SELECT f.file_transfer_id_pk, @@ -73,7 +73,7 @@ GROUP BY f.file_transfer_id_pk = @fileTransferId;"); { command.Parameters.AddWithValue("@fileTransferId", fileTransferId); - using NpgsqlDataReader reader = await command.ExecuteReaderAsync(cancellationToken); + await using NpgsqlDataReader reader = await command.ExecuteReaderAsync(cancellationToken); if (await reader.ReadAsync(cancellationToken)) { fileTransfer = new FileTransferEntity @@ -132,7 +132,7 @@ FROM broker.file_transfer { command.Parameters.AddWithValue("@fileTransferId", fileTransferId); var commandText = command.CommandText; - using (var reader = await command.ExecuteReaderAsync(cancellationToken)) + await using (var reader = await command.ExecuteReaderAsync(cancellationToken)) { while (await reader.ReadAsync(cancellationToken)) { @@ -174,7 +174,7 @@ public async Task AddFileTransfer(ServiceOwnerEntity serviceOwner, Resourc actorId = actor.ActorId; } var fileTransferId = Guid.NewGuid(); - NpgsqlCommand command = await _connectionProvider.CreateCommand( + await using NpgsqlCommand command = await _connectionProvider.CreateCommand( "INSERT INTO broker.file_transfer (file_transfer_id_pk, resource_id, filename, checksum, file_transfer_size, external_file_transfer_reference, sender_actor_id_fk, created, storage_provider_id_fk, expiration_time, hangfire_job_id) " + "VALUES (@fileTransferId, @resourceId, @fileName, @checksum, @fileTransferSize, @externalFileTransferReference, @senderActorId, @created, @storageProviderId, @expirationTime, @hangfireJobId)"); @@ -261,7 +261,7 @@ public async Task> LegacyGetFilesForRecipientsWithRecipientStatus(Leg command.Parameters.AddWithValue("@recipientFileTransferStatus", (int)fileTransferSearch.RecipientStatus); var fileTransfers = new List(); - using (var reader = await command.ExecuteReaderAsync(cancellationToken)) + await using (var reader = await command.ExecuteReaderAsync(cancellationToken)) { while (await reader.ReadAsync(cancellationToken)) { @@ -338,7 +338,7 @@ public async Task> GetFileTransfersAssociatedWithActor(FileTransferSe command.Parameters.AddWithValue("@fileTransferStatus", (int)fileTransferSearch.Status); var files = new List(); - using (var reader = await command.ExecuteReaderAsync(cancellationToken)) + await using (var reader = await command.ExecuteReaderAsync(cancellationToken)) { while (await reader.ReadAsync(cancellationToken)) { @@ -390,7 +390,7 @@ public async Task> GetFileTransfersForRecipientWithRecipientStatus(Fi command.Parameters.AddWithValue("@recipientFileStatus", (int)fileTransferSearch.RecipientStatus); var files = new List(); - using (var reader = await command.ExecuteReaderAsync(cancellationToken)) + await using (var reader = await command.ExecuteReaderAsync(cancellationToken)) { while (await reader.ReadAsync(cancellationToken)) { @@ -431,7 +431,7 @@ private async Task> GetMetadata(Guid fileTransferId, { command.Parameters.AddWithValue("@fileTransferId", fileTransferId); var property = new Dictionary(); - using (var reader = await command.ExecuteReaderAsync(cancellationToken)) + await using (var reader = await command.ExecuteReaderAsync(cancellationToken)) { while (await reader.ReadAsync(cancellationToken)) { @@ -536,7 +536,7 @@ LIMIT 1 { command.Parameters.AddWithValue("@storageProviderId", storageProviderId); command.Parameters.AddWithValue("@deletedStatusId", (int)FileTransferStatus.Deleted); - using (var reader = await command.ExecuteReaderAsync(cancellationToken)) + await using (var reader = await command.ExecuteReaderAsync(cancellationToken)) { while (await reader.ReadAsync(cancellationToken)) { diff --git a/src/Altinn.Broker.Persistence/Repositories/FileTransferStatusRepository.cs b/src/Altinn.Broker.Persistence/Repositories/FileTransferStatusRepository.cs index 42506335..e2b57685 100644 --- a/src/Altinn.Broker.Persistence/Repositories/FileTransferStatusRepository.cs +++ b/src/Altinn.Broker.Persistence/Repositories/FileTransferStatusRepository.cs @@ -14,7 +14,7 @@ public FileTransferStatusRepository(DatabaseConnectionProvider connectionProvide public async Task InsertFileTransferStatus(Guid fileTransferId, FileTransferStatus status, string? detailedFileTransferStatus = null, CancellationToken cancellationToken = default) { - using var command = await _connectionProvider.CreateCommand( + await using var command = await _connectionProvider.CreateCommand( "INSERT INTO broker.file_transfer_status (file_transfer_id_fk, file_transfer_status_description_id_fk, file_transfer_status_date, file_transfer_status_detailed_description) " + "VALUES (@fileTransferId, @statusId, NOW(), @detailedFileTransferStatus) RETURNING file_transfer_status_id_pk;"); command.Parameters.AddWithValue("@fileTransferId", fileTransferId); @@ -30,14 +30,14 @@ public async Task InsertFileTransferStatus(Guid fileTransferId, FileTransferStat public async Task> GetFileTransferStatusHistory(Guid fileTransferId, CancellationToken cancellationToken) { - using (var command = await _connectionProvider.CreateCommand( + await using (var command = await _connectionProvider.CreateCommand( "SELECT file_transfer_id_fk, file_transfer_status_description_id_fk, file_transfer_status_date, file_transfer_status_detailed_description " + "FROM broker.file_transfer_status fis " + "WHERE fis.file_transfer_id_fk = @fileTransferId")) { command.Parameters.AddWithValue("@fileTransferId", fileTransferId); var fileTransferStatuses = new List(); - using (var reader = await command.ExecuteReaderAsync(cancellationToken)) + await using (var reader = await command.ExecuteReaderAsync(cancellationToken)) { while (await reader.ReadAsync(cancellationToken)) { diff --git a/src/Altinn.Broker.Persistence/Repositories/IdempotencyEventRepository.cs b/src/Altinn.Broker.Persistence/Repositories/IdempotencyEventRepository.cs index b8d0a8b0..c4c6a171 100644 --- a/src/Altinn.Broker.Persistence/Repositories/IdempotencyEventRepository.cs +++ b/src/Altinn.Broker.Persistence/Repositories/IdempotencyEventRepository.cs @@ -16,7 +16,7 @@ public IdempotencyEventRepository(DatabaseConnectionProvider connectionProvider) public async Task AddIdempotencyEventAsync(string IdempotencyEventId, CancellationToken cancellationToken) { - NpgsqlCommand command = await _connectionProvider.CreateCommand( + await using NpgsqlCommand command = await _connectionProvider.CreateCommand( "INSERT INTO broker.idempotency_event (idempotency_event_id_pk, created)" + "VALUES (@idempotency_event_id_pk, @created) "); command.Parameters.AddWithValue("@idempotency_event_id_pk", IdempotencyEventId); @@ -26,7 +26,7 @@ public async Task AddIdempotencyEventAsync(string IdempotencyEventId, Cancellati } public async Task DeleteIdempotencyEventAsync(string IdempotencyEventId, CancellationToken cancellationToken) { - NpgsqlCommand command = await _connectionProvider.CreateCommand( + await using NpgsqlCommand command = await _connectionProvider.CreateCommand( "DELETE FROM broker.idempotency_event " + "WHERE idempotency_event_id_pk = @idempotency_event_id_pk"); command.Parameters.AddWithValue("@idempotency_event_id_pk", IdempotencyEventId); @@ -35,7 +35,7 @@ public async Task DeleteIdempotencyEventAsync(string IdempotencyEventId, Cancell } public async Task DeleteOldIdempotencyEvents() { - NpgsqlCommand command = await _connectionProvider.CreateCommand( + await using NpgsqlCommand command = await _connectionProvider.CreateCommand( "DELETE FROM broker.idempotency_event " + "WHERE created < @created"); diff --git a/src/Altinn.Broker.Persistence/Repositories/ServiceOwnerRepository.cs b/src/Altinn.Broker.Persistence/Repositories/ServiceOwnerRepository.cs index 420f265d..61aa66d4 100644 --- a/src/Altinn.Broker.Persistence/Repositories/ServiceOwnerRepository.cs +++ b/src/Altinn.Broker.Persistence/Repositories/ServiceOwnerRepository.cs @@ -15,7 +15,7 @@ public ServiceOwnerRepository(DatabaseConnectionProvider connectionProvider) public async Task GetServiceOwner(string serviceOwnerId) { - using var command = await _connectionProvider.CreateCommand( + await using var command = await _connectionProvider.CreateCommand( "SELECT service_owner_id_pk, service_owner_name, file_transfer_time_to_live, " + "storage_provider_id_pk, created, resource_name, storage_provider_type " + "FROM broker.service_owner " + From 12b0705c1a042de772fec0d4f87c20eba8a2d16c Mon Sep 17 00:00:00 2001 From: Hammerbeck Date: Wed, 6 Mar 2024 10:31:45 +0100 Subject: [PATCH 07/13] Transfer with speed declines doens't cancel --- src/Altinn.Broker.API/Program.cs | 2 ++ 1 file changed, 2 insertions(+) diff --git a/src/Altinn.Broker.API/Program.cs b/src/Altinn.Broker.API/Program.cs index cbc581d8..e16a5357 100644 --- a/src/Altinn.Broker.API/Program.cs +++ b/src/Altinn.Broker.API/Program.cs @@ -162,6 +162,8 @@ static void ConfigureServices(IServiceCollection services, IConfiguration config services.Configure(options => { options.Limits.MaxRequestBodySize = null; + options.Limits.MaxRequestBufferSize = null; + options.Limits.MinRequestBodyDataRate = new MinDataRate(bytesPerSecond: 100, gracePeriod: TimeSpan.FromSeconds(10)); }); services.Configure(options => { From 2345e2e4e40b02aec738a236198a093e8263533b Mon Sep 17 00:00:00 2001 From: Hammerbeck Date: Wed, 6 Mar 2024 10:33:42 +0100 Subject: [PATCH 08/13] Add k6 load testing --- README.md | 9 +++ Test/Altinn.Broker.Tests/K6.Tests/test.js | 82 +++++++++++++++++++++++ docker-compose-test.yml | 9 +++ 3 files changed, 100 insertions(+) create mode 100644 Test/Altinn.Broker.Tests/K6.Tests/test.js create mode 100644 docker-compose-test.yml diff --git a/README.md b/README.md index 76c97877..a477d542 100644 --- a/README.md +++ b/README.md @@ -45,3 +45,12 @@ Formatting of the code base is handled by Dotnet format. [See how to configure i ## Deploy The build and push workflow produces a docker image that is pushed to Github packages. This image is then used by the release action found in the [altinn-broker-infra repository](https://github.com/Altinn/altinn-broker-infra). + + +### Load testing with k6 + +We run load tests using k6. To run without installing k6 you can use docker-compose(base url has to be http://host.docker.internal:5096): +```docker-compose -f docker-compose-test.yml up k6-test``` + +if you have k6 installed locally, you can run it by using the following command: +```"k6 run test.js"``` \ No newline at end of file diff --git a/Test/Altinn.Broker.Tests/K6.Tests/test.js b/Test/Altinn.Broker.Tests/K6.Tests/test.js new file mode 100644 index 00000000..05fda280 --- /dev/null +++ b/Test/Altinn.Broker.Tests/K6.Tests/test.js @@ -0,0 +1,82 @@ +import http from 'k6/http'; +import { sleep, check, fail } from 'k6'; + +export const options = { + vus: 25, + duration: '10m', + insecureSkipTLSVerify: true, + iterations: 25 + + //remove this line before doing a real test + //httpDebug: 'full', +}; + +var tokens = { + DUMMY_SENDER_TOKEN: "", + DUMMY_SERVICE_OWNER_TOKEN: "" +} +baseUrl = "http://localhost:5096" + + +const file = open("./data/testfile.txt", "b"); +function checkResult(res, status) { + if (!status) { + console.error(res) + } +} + +export function setup() { + let headers = generateHeaders(tokens.DUMMY_SERVICE_OWNER_TOKEN, 'application/json') + + //set fileTransfer TTL to 15 minutes. Should be longer than the test time + var fileRes = http.put(`${baseUrl}/broker/api/v1/serviceowner/fileretention`, JSON.stringify({ + fileTransferTimeToLive: "PT15M" + }), { headers: headers }); + + if ( + !check(fileRes, { + 'status code MUST be 200 or 409': (fileRes) => fileRes.status === 200 || fileRes.status === 409, + }) + ) { + fail('Could not update file transfer TTL. Exiting'); + } + +} + +export default async function () { + + var baseFile = { + resourceId: 'altinn-broker-test-resource-1', + checksum: null, + fileName: 'testfile.txt', + recipients: ['0192:986252932'], + sender: '0192:991825827', + sendersFileTransferReference: 'test-data' + } + sleep(1); + + let headers = generateHeaders(tokens.DUMMY_SENDER_TOKEN, 'application/json') + var res = http.post(`${baseUrl}/broker/api/v1/filetransfer`, JSON.stringify(baseFile), { headers: headers }); + var status = check(res, { 'Initialize: status was 200': (r) => r.status == 200 }); + checkResult(res, status) + sleep(1); + + headers = generateHeaders(tokens.DUMMY_SENDER_TOKEN, 'application/octet-stream') + const data = { + field: 'this is a standard form field', + file: http.file(file, 'testfile.txt') + } + var res2 = http.post(`${baseUrl}/broker/api/v1/filetransfer/${res.body}/upload`, data, { timeout: "600s", headers: headers }); + status = check(res2, { 'Upload: status was 200': (r) => r.status == 200 }); + checkResult(res, status) + +} +function generateHeaders(token, contentType) { + return { + 'Authorization': 'Bearer ' + token, + 'Content-Type': contentType, + 'Accept': '*/*, text/plain', + 'Accept-Encoding': 'gzip, deflate, br', + 'Connection': 'keep-alive' + } +} \ No newline at end of file diff --git a/docker-compose-test.yml b/docker-compose-test.yml new file mode 100644 index 00000000..3f6a064d --- /dev/null +++ b/docker-compose-test.yml @@ -0,0 +1,9 @@ +version: '3.4' + +services: + k6-test: + image: grafana/k6:latest + command: run /test.js + volumes: + - ./Test/Altinn.Broker.Tests/K6.Tests/test.js:/test.js + - ./Test/Altinn.Broker.Tests/K6.Tests/data:/data From f69b59ee6949678783d32cce5fbd4e938bf30320 Mon Sep 17 00:00:00 2001 From: Hammerbeck Date: Wed, 6 Mar 2024 16:02:16 +0100 Subject: [PATCH 09/13] keep connection alive for 1 hour --- src/Altinn.Broker.API/Program.cs | 1 + 1 file changed, 1 insertion(+) diff --git a/src/Altinn.Broker.API/Program.cs b/src/Altinn.Broker.API/Program.cs index e16a5357..5bd6c1a4 100644 --- a/src/Altinn.Broker.API/Program.cs +++ b/src/Altinn.Broker.API/Program.cs @@ -163,6 +163,7 @@ static void ConfigureServices(IServiceCollection services, IConfiguration config { options.Limits.MaxRequestBodySize = null; options.Limits.MaxRequestBufferSize = null; + options.Limits.KeepAliveTimeout = TimeSpan.FromMinutes(60); options.Limits.MinRequestBodyDataRate = new MinDataRate(bytesPerSecond: 100, gracePeriod: TimeSpan.FromSeconds(10)); }); services.Configure(options => From 9bff9e145404570b864b8bf0dff6a2ef45b3ade8 Mon Sep 17 00:00:00 2001 From: Hammerbeck Date: Thu, 7 Mar 2024 11:45:59 +0100 Subject: [PATCH 10/13] some changes to the test --- .../K6.Tests/data/testfile.txt | 1 + Test/Altinn.Broker.Tests/K6.Tests/test.js | 37 ++++++++++++------- 2 files changed, 24 insertions(+), 14 deletions(-) create mode 100644 Test/Altinn.Broker.Tests/K6.Tests/data/testfile.txt diff --git a/Test/Altinn.Broker.Tests/K6.Tests/data/testfile.txt b/Test/Altinn.Broker.Tests/K6.Tests/data/testfile.txt new file mode 100644 index 00000000..5daa488a --- /dev/null +++ b/Test/Altinn.Broker.Tests/K6.Tests/data/testfile.txt @@ -0,0 +1 @@ +this is a testfile \ No newline at end of file diff --git a/Test/Altinn.Broker.Tests/K6.Tests/test.js b/Test/Altinn.Broker.Tests/K6.Tests/test.js index 05fda280..b3e0c709 100644 --- a/Test/Altinn.Broker.Tests/K6.Tests/test.js +++ b/Test/Altinn.Broker.Tests/K6.Tests/test.js @@ -2,10 +2,9 @@ import http from 'k6/http'; import { sleep, check, fail } from 'k6'; export const options = { - vus: 25, + vus: 20, duration: '10m', - insecureSkipTLSVerify: true, - iterations: 25 + iterations: 20 //remove this line before doing a real test //httpDebug: 'full', @@ -15,12 +14,14 @@ var tokens = { DUMMY_SENDER_TOKEN: "", DUMMY_SERVICE_OWNER_TOKEN: "" } -baseUrl = "http://localhost:5096" + +var baseUrl = "http://localhost:5096" const file = open("./data/testfile.txt", "b"); function checkResult(res, status) { if (!status) { + console.error(status) console.error(res) } } @@ -53,22 +54,30 @@ export default async function () { sender: '0192:991825827', sendersFileTransferReference: 'test-data' } - sleep(1); let headers = generateHeaders(tokens.DUMMY_SENDER_TOKEN, 'application/json') - var res = http.post(`${baseUrl}/broker/api/v1/filetransfer`, JSON.stringify(baseFile), { headers: headers }); + var res = await http.asyncRequest('POST', + `${baseUrl}/broker/api/v1/filetransfer`, + JSON.stringify(baseFile), { headers: headers }); var status = check(res, { 'Initialize: status was 200': (r) => r.status == 200 }); - checkResult(res, status) sleep(1); + checkResult(res, status) - headers = generateHeaders(tokens.DUMMY_SENDER_TOKEN, 'application/octet-stream') - const data = { - field: 'this is a standard form field', - file: http.file(file, 'testfile.txt') + + if (status) { + headers = generateHeaders(tokens.DUMMY_SENDER_TOKEN, 'application/octet-stream') + const data = { + field: 'this is a standard form field', + file: http.file(file, 'testfile.txt') + } + var res2 = await http.asyncRequest('POST', + `${baseUrl}/broker/api/v1/filetransfer/${res.body}/upload`, data, { timeout: "600s", headers: headers }); + sleep(1); + status = check(res2, { 'Upload: status was 200': (r) => r.status == 200 }); + checkResult(res, status) } - var res2 = http.post(`${baseUrl}/broker/api/v1/filetransfer/${res.body}/upload`, data, { timeout: "600s", headers: headers }); - status = check(res2, { 'Upload: status was 200': (r) => r.status == 200 }); - checkResult(res, status) + + } function generateHeaders(token, contentType) { From df89e8501723cf0398ef48a90b09c9d7a6887938 Mon Sep 17 00:00:00 2001 From: Hammerbeck Date: Thu, 7 Mar 2024 12:01:44 +0100 Subject: [PATCH 11/13] update readme --- README.md | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/README.md b/README.md index a477d542..82776b04 100644 --- a/README.md +++ b/README.md @@ -48,6 +48,11 @@ The build and push workflow produces a docker image that is pushed to Github pac ### Load testing with k6 +Before running tests you should mock the following: +- AltinnAuthorization by setting the function CheckUserAccess to return true +- AltinnRegisterService to return a string +- AltinnResourceRegister to return a ResourceEntity +- Use the ConsoleLogEventBus We run load tests using k6. To run without installing k6 you can use docker-compose(base url has to be http://host.docker.internal:5096): ```docker-compose -f docker-compose-test.yml up k6-test``` From ef611d3b82632c4decdf322cdeabf31ae792f876 Mon Sep 17 00:00:00 2001 From: Hammerbeck Date: Thu, 7 Mar 2024 12:17:30 +0100 Subject: [PATCH 12/13] format js file --- Test/Altinn.Broker.Tests/K6.Tests/test.js | 9 +-------- 1 file changed, 1 insertion(+), 8 deletions(-) diff --git a/Test/Altinn.Broker.Tests/K6.Tests/test.js b/Test/Altinn.Broker.Tests/K6.Tests/test.js index b3e0c709..f18035a6 100644 --- a/Test/Altinn.Broker.Tests/K6.Tests/test.js +++ b/Test/Altinn.Broker.Tests/K6.Tests/test.js @@ -14,10 +14,8 @@ var tokens = { DUMMY_SENDER_TOKEN: "", DUMMY_SERVICE_OWNER_TOKEN: "" } - var baseUrl = "http://localhost:5096" - const file = open("./data/testfile.txt", "b"); function checkResult(res, status) { if (!status) { @@ -41,11 +39,9 @@ export function setup() { ) { fail('Could not update file transfer TTL. Exiting'); } - } export default async function () { - var baseFile = { resourceId: 'altinn-broker-test-resource-1', checksum: null, @@ -63,7 +59,6 @@ export default async function () { sleep(1); checkResult(res, status) - if (status) { headers = generateHeaders(tokens.DUMMY_SENDER_TOKEN, 'application/octet-stream') const data = { @@ -76,10 +71,8 @@ export default async function () { status = check(res2, { 'Upload: status was 200': (r) => r.status == 200 }); checkResult(res, status) } - - - } + function generateHeaders(token, contentType) { return { 'Authorization': 'Bearer ' + token, From e38fa77dff945950bf3e3d0e00caf97ac5b35192 Mon Sep 17 00:00:00 2001 From: Hammerbeck Date: Thu, 7 Mar 2024 13:47:23 +0100 Subject: [PATCH 13/13] Readme + some cleanup --- README.md | 10 ++++++++++ Test/Altinn.Broker.Tests/K6.Tests/test.js | 22 ++++++++++------------ 2 files changed, 20 insertions(+), 12 deletions(-) diff --git a/README.md b/README.md index 82776b04..eb41ea17 100644 --- a/README.md +++ b/README.md @@ -54,6 +54,16 @@ Before running tests you should mock the following: - AltinnResourceRegister to return a ResourceEntity - Use the ConsoleLogEventBus +Constants: +- BASE_URL; enviroment to test. +- TOKENS: tokens for a service owner(TOKENS.DUMMY_SERVICE_OWNER_TOKEN) and a sender(TOKENS.DUMMY_SENDER_TOKEN), which can be found in postman(Authenticate as Sender/serviceOwner) in the Authenticator folder. + +k6 option variables: +- VUs: How many virtual users running tests at the same time. +- iterations: how many tests TOTAL should be completed. vus/iterations=test per vus. 0 means infinite iterations for as long as the test will run. +- httpDebug: full/summary. Outputs infomration about http requests and responses +- duration: How long the test should be running. The test also adds a 30 seconds gracefull stop period on top of this. + We run load tests using k6. To run without installing k6 you can use docker-compose(base url has to be http://host.docker.internal:5096): ```docker-compose -f docker-compose-test.yml up k6-test``` diff --git a/Test/Altinn.Broker.Tests/K6.Tests/test.js b/Test/Altinn.Broker.Tests/K6.Tests/test.js index f18035a6..cb0e43be 100644 --- a/Test/Altinn.Broker.Tests/K6.Tests/test.js +++ b/Test/Altinn.Broker.Tests/K6.Tests/test.js @@ -4,17 +4,15 @@ import { sleep, check, fail } from 'k6'; export const options = { vus: 20, duration: '10m', - iterations: 20 - - //remove this line before doing a real test - //httpDebug: 'full', + iterations: 20 // can be set to 0 or removed to run indefinitely + //httpDebug: 'full', // information about the request and response }; -var tokens = { +var TOKENS = { DUMMY_SENDER_TOKEN: "", DUMMY_SERVICE_OWNER_TOKEN: "" } -var baseUrl = "http://localhost:5096" +const BASE_URL = "http://localhost:5096" const file = open("./data/testfile.txt", "b"); function checkResult(res, status) { @@ -25,10 +23,10 @@ function checkResult(res, status) { } export function setup() { - let headers = generateHeaders(tokens.DUMMY_SERVICE_OWNER_TOKEN, 'application/json') + let headers = generateHeaders(TOKENS.DUMMY_SERVICE_OWNER_TOKEN, 'application/json') //set fileTransfer TTL to 15 minutes. Should be longer than the test time - var fileRes = http.put(`${baseUrl}/broker/api/v1/serviceowner/fileretention`, JSON.stringify({ + var fileRes = http.put(`${BASE_URL}/broker/api/v1/serviceowner/fileretention`, JSON.stringify({ fileTransferTimeToLive: "PT15M" }), { headers: headers }); @@ -51,22 +49,22 @@ export default async function () { sendersFileTransferReference: 'test-data' } - let headers = generateHeaders(tokens.DUMMY_SENDER_TOKEN, 'application/json') + let headers = generateHeaders(TOKENS.DUMMY_SENDER_TOKEN, 'application/json') var res = await http.asyncRequest('POST', - `${baseUrl}/broker/api/v1/filetransfer`, + `${BASE_URL}/broker/api/v1/filetransfer`, JSON.stringify(baseFile), { headers: headers }); var status = check(res, { 'Initialize: status was 200': (r) => r.status == 200 }); sleep(1); checkResult(res, status) if (status) { - headers = generateHeaders(tokens.DUMMY_SENDER_TOKEN, 'application/octet-stream') + headers = generateHeaders(TOKENS.DUMMY_SENDER_TOKEN, 'application/octet-stream') const data = { field: 'this is a standard form field', file: http.file(file, 'testfile.txt') } var res2 = await http.asyncRequest('POST', - `${baseUrl}/broker/api/v1/filetransfer/${res.body}/upload`, data, { timeout: "600s", headers: headers }); + `${BASE_URL}/broker/api/v1/filetransfer/${res.body}/upload`, data, { timeout: "600s", headers: headers }); sleep(1); status = check(res2, { 'Upload: status was 200': (r) => r.status == 200 }); checkResult(res, status)