From 851056cf33283872794f45e1006f769f563a37fd Mon Sep 17 00:00:00 2001 From: Enno <458526+ennru@users.noreply.github.com> Date: Thu, 26 Sep 2024 15:03:43 +0200 Subject: [PATCH] docs: revise URL structure (libraries) --- .github/workflows/link-validator.yml | 2 +- CONTRIBUTING.md | 2 +- README.md | 8 ++--- .../src/test/java/docs/javadsl/Examples.java | 4 +-- .../test/scala/docs/scaladsl/Examples.scala | 4 +-- build.sbt | 8 ++--- .../AkkaDiscoverySessionProvider.scala | 2 +- contributor-advice.md | 2 +- .../couchbase/javadsl/DiscoverySupport.java | 2 +- .../couchbase/scaladsl/DiscoverySupport.scala | 4 +-- docs/release-train-issue-template.md | 4 +-- docs/src/main/paradox/.htaccess | 10 +++--- docs/src/main/paradox/external/grpc.md | 4 +-- docs/src/main/paradox/external/http.md | 2 +- docs/src/main/paradox/external/tcp.md | 2 +- docs/src/main/paradox/file.md | 2 +- .../src/main/paradox/google-cloud-bigquery.md | 2 +- docs/src/main/paradox/google-fcm.md | 2 +- docs/src/main/paradox/index.md | 2 +- docs/src/main/paradox/kafka.md | 2 +- docs/src/main/paradox/mqtt.md | 2 +- docs/src/main/paradox/other-docs/snapshots.md | 2 +- .../src/main/paradox/other-docs/versioning.md | 2 +- docs/src/main/paradox/overview.md | 2 +- docs/src/main/paradox/release-notes/1.0.x.md | 6 ++-- .../main/paradox/release-notes/1.0.x/kafka.md | 2 +- .../main/paradox/release-notes/1.1.x/kafka.md | 2 +- docs/src/main/paradox/release-notes/2.0.x.md | 6 ++-- .../paradox/release-notes/2.0.x/cassandra.md | 2 +- .../main/paradox/release-notes/2.0.x/kafka.md | 2 +- docs/src/main/paradox/release-notes/3.0.x.md | 2 +- .../main/paradox/release-notes/3.0.x/kafka.md | 2 +- docs/src/main/paradox/release-notes/4.0.x.md | 2 +- docs/src/main/paradox/sqs.md | 2 +- docs/src/main/paradox/unix-domain-socket.md | 4 +-- .../test/java/docs/javadsl/ExampleTest.java | 4 +-- .../scala/docs/scaladsl/ExampleSpec.scala | 4 +-- .../docs/javadsl/KinesisFirehoseSnippets.java | 2 +- .../java/docs/javadsl/KinesisSnippets.java | 2 +- .../scaladsl/KinesisFirehoseSnippets.scala | 2 +- .../scala/docs/scaladsl/KinesisSnippets.scala | 2 +- project/Common.scala | 31 ++++++++++--------- project/Dependencies.scala | 8 ++++- scripts/link-validator.conf | 8 ++--- .../java/docs/javadsl/SnsPublisherTest.java | 4 +-- .../alpakka/sns/IntegrationTestContext.scala | 4 +-- .../alpakka/sqs/javadsl/SqsPublishFlow.scala | 2 +- .../alpakka/sqs/javadsl/SqsPublishSink.scala | 6 ++-- .../alpakka/sqs/scaladsl/SqsPublishFlow.scala | 2 +- .../alpakka/sqs/scaladsl/SqsPublishSink.scala | 6 ++-- .../alpakka/sqs/javadsl/BaseSqsTest.java | 4 +-- .../sqs/scaladsl/DefaultTestContext.scala | 4 +-- .../alpakka/testkit/CapturingAppender.scala | 4 +-- .../stream/alpakka/testkit/LogbackUtil.scala | 2 +- .../testkit/javadsl/LogCapturingJunit4.scala | 2 +- .../testkit/scaladsl/LogCapturing.scala | 2 +- 56 files changed, 112 insertions(+), 103 deletions(-) diff --git a/.github/workflows/link-validator.yml b/.github/workflows/link-validator.yml index 391401dae4..013ee1451d 100644 --- a/.github/workflows/link-validator.yml +++ b/.github/workflows/link-validator.yml @@ -37,4 +37,4 @@ jobs: run: sbt docs/makeSite - name: Run Link Validator - run: cs launch net.runne::site-link-validator:0.2.4 -- scripts/link-validator.conf + run: cs launch net.runne::site-link-validator:0.2.5 -- scripts/link-validator.conf diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 28c0eb2e59..9c87620480 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -56,7 +56,7 @@ Please have a look at our [contributor advice](contributor-advice.md). ## Binary compatibility (MiMa) Binary compatibility rules and guarantees are described in depth in the [Binary Compatibility Rules -](https://doc.akka.io/docs/akka/snapshot/common/binary-compatibility-rules.html) section of the Akka documentation. +](https://doc.akka.io/libraries/akka-core/snapshot/common/binary-compatibility-rules.html) section of the Akka documentation. Akka projects use [MiMa](https://github.com/lightbend/mima) to validate binary compatibility of incoming pull requests. If your PR fails due to binary compatibility issues, you may see an error like this: diff --git a/README.md b/README.md index 698dce9850..2028fc1852 100644 --- a/README.md +++ b/README.md @@ -3,16 +3,16 @@ Alpakka Systems don't come alone. In the modern world of microservices and cloud deployment, new components must interact with legacy systems, making integration an important key to success. Reactive Streams give us a technology-independent tool to let these heterogeneous systems communicate without overwhelming each other. -The Alpakka project implements stream-aware & reactive integration pipelines for Java and Scala. It is built on top of [Akka Streams](https://doc.akka.io/docs/akka/current/stream/index.html), and has been designed from the ground up to understand streaming natively and provide a DSL for reactive and stream-oriented programming, with built-in support for backpressure. Akka Streams is a [Reactive Streams](http://www.reactive-streams.org/) and JDK 9+ [java.util.concurrent.Flow](https://docs.oracle.com/javase/10/docs/api/java/util/concurrent/Flow.html)-compliant implementation and therefore [fully interoperable](https://doc.akka.io/docs/akka/current/general/stream/stream-design.html#interoperation-with-other-reactive-streams-implementations) with other implementations. +The Alpakka project implements stream-aware & reactive integration pipelines for Java and Scala. It is built on top of [Akka Streams](https://doc.akka.io/libraries/akka-core/current/stream/index.html), and has been designed from the ground up to understand streaming natively and provide a DSL for reactive and stream-oriented programming, with built-in support for backpressure. Akka Streams is a [Reactive Streams](http://www.reactive-streams.org/) and JDK 9+ [java.util.concurrent.Flow](https://docs.oracle.com/javase/10/docs/api/java/util/concurrent/Flow.html)-compliant implementation and therefore [fully interoperable](https://doc.akka.io/libraries/akka-core/current/general/stream/stream-design.html#interoperation-with-other-reactive-streams-implementations) with other implementations. The Akka family of projects is managed by teams at [Lightbend](https://lightbend.com) with help from the community. Documentation ------------- -- [Alpakka reference](https://doc.akka.io/docs/alpakka/current/) documentation +- [Alpakka reference](https://doc.akka.io/libraries/alpakka/current/) documentation -- [Alpakka Kafka connector reference](https://doc.akka.io/docs/akka-stream-kafka/current/) documentation +- [Alpakka Kafka connector reference](https://doc.akka.io/libraries/akka-stream-kafka/current/) documentation To keep up with the latest Alpakka releases check out [Alpakka releases](https://github.com/akka/alpakka/releases) and [Alpakka Kafka connector releases](https://github.com/akka/alpakka-kafka/releases). @@ -58,4 +58,4 @@ License ------- Alpakka is licensed under the [Business Source License (BSL) 1.1](https://github.com/akka/alpakka/blob/main/LICENSE), please see the [Akka License FAQ](https://www.lightbend.com/akka/license-faq). -Tests and documentation are under a separate license, see the LICENSE file in each documentation and test root directory for details. \ No newline at end of file +Tests and documentation are under a separate license, see the LICENSE file in each documentation and test root directory for details. diff --git a/awslambda/src/test/java/docs/javadsl/Examples.java b/awslambda/src/test/java/docs/javadsl/Examples.java index 3c0b08f0d3..886ad29c6f 100644 --- a/awslambda/src/test/java/docs/javadsl/Examples.java +++ b/awslambda/src/test/java/docs/javadsl/Examples.java @@ -38,7 +38,7 @@ public void initClient() { // #init-client // Don't encode credentials in your source code! - // see https://doc.akka.io/docs/alpakka/current/aws-shared-configuration.html + // see https://doc.akka.io/libraries/alpakka/current/aws-shared-configuration.html StaticCredentialsProvider credentialsProvider = StaticCredentialsProvider.create(AwsBasicCredentials.create("x", "x")); LambdaAsyncClient awsLambdaClient = @@ -46,7 +46,7 @@ public void initClient() { .credentialsProvider(credentialsProvider) .httpClient(AkkaHttpClient.builder().withActorSystem(system).build()) // Possibility to configure the retry policy - // see https://doc.akka.io/docs/alpakka/current/aws-shared-configuration.html + // see https://doc.akka.io/libraries/alpakka/current/aws-shared-configuration.html // .overrideConfiguration(...) .build(); diff --git a/awslambda/src/test/scala/docs/scaladsl/Examples.scala b/awslambda/src/test/scala/docs/scaladsl/Examples.scala index 6471ca0d79..26aeb61e40 100644 --- a/awslambda/src/test/scala/docs/scaladsl/Examples.scala +++ b/awslambda/src/test/scala/docs/scaladsl/Examples.scala @@ -23,14 +23,14 @@ object Examples { import software.amazon.awssdk.services.lambda.LambdaAsyncClient // Don't encode credentials in your source code! - // see https://doc.akka.io/docs/alpakka/current/aws-shared-configuration.html + // see https://doc.akka.io/libraries/alpakka/current/aws-shared-configuration.html val credentialsProvider = StaticCredentialsProvider.create(AwsBasicCredentials.create("x", "x")) implicit val lambdaClient: LambdaAsyncClient = LambdaAsyncClient .builder() .credentialsProvider(credentialsProvider) .httpClient(AkkaHttpClient.builder().withActorSystem(system).build()) // Possibility to configure the retry policy - // see https://doc.akka.io/docs/alpakka/current/aws-shared-configuration.html + // see https://doc.akka.io/libraries/alpakka/current/aws-shared-configuration.html // .overrideConfiguration(...) .build() diff --git a/build.sbt b/build.sbt index 1f85a5a850..98634c2487 100644 --- a/build.sbt +++ b/build.sbt @@ -387,7 +387,7 @@ lazy val docs = project ("http://www\\.scala-lang\\.org/".r, _ => "https://www\\.scala-lang\\.org/"), ("https://javadoc\\.io/page/".r, _ => "https://javadoc\\.io/static/") ), - Paradox / siteSubdirName := s"docs/alpakka/${projectInfoVersion.value}", + Paradox / siteSubdirName := s"libraries/alpakka/${projectInfoVersion.value}", // make use of https://github.com/scala/scala/pull/8663 Compile / doc / scalacOptions ++= Seq( "-jdk-api-doc-base", @@ -399,16 +399,16 @@ lazy val docs = project "hadoop.version" -> Dependencies.HadoopVersion, "extref.github.base_url" -> s"https://github.com/akka/alpakka/tree/${if (isSnapshot.value) "main" else "v" + version.value}/%s", - "extref.akka.base_url" -> s"https://doc.akka.io/docs/akka/${Dependencies.AkkaBinaryVersion}/%s", + "extref.akka.base_url" -> s"https://doc.akka.io/libraries/akka-core/${Dependencies.AkkaBinaryVersion}/%s", "scaladoc.akka.base_url" -> s"https://doc.akka.io/api/akka/${Dependencies.AkkaBinaryVersion}", "javadoc.akka.base_url" -> s"https://doc.akka.io/japi/akka/${Dependencies.AkkaBinaryVersion}/", "javadoc.akka.link_style" -> "direct", - "extref.akka-http.base_url" -> s"https://doc.akka.io/docs/akka-http/${Dependencies.AkkaHttpBinaryVersion}/%s", + "extref.akka-http.base_url" -> s"https://doc.akka.io/libraries/akka-http/${Dependencies.AkkaHttpBinaryVersion}/%s", "scaladoc.akka.http.base_url" -> s"https://doc.akka.io/api/akka-http/${Dependencies.AkkaHttpBinaryVersion}/", "javadoc.akka.http.base_url" -> s"https://doc.akka.io/japi/akka-http/${Dependencies.AkkaHttpBinaryVersion}/", // Akka gRPC "akka-grpc.version" -> Dependencies.AkkaGrpcBinaryVersion, - "extref.akka-grpc.base_url" -> s"https://doc.akka.io/docs/akka-grpc/${Dependencies.AkkaGrpcBinaryVersion}/%s", + "extref.akka-grpc.base_url" -> s"https://doc.akka.io/libraries/akka-grpc/${Dependencies.AkkaGrpcBinaryVersion}/%s", // Couchbase "couchbase.version" -> Dependencies.CouchbaseVersion, "extref.couchbase.base_url" -> s"https://docs.couchbase.com/java-sdk/${Dependencies.CouchbaseVersionForDocs}/%s", diff --git a/cassandra/src/main/scala/akka/stream/alpakka/cassandra/AkkaDiscoverySessionProvider.scala b/cassandra/src/main/scala/akka/stream/alpakka/cassandra/AkkaDiscoverySessionProvider.scala index d37d85a22c..aa07e6410d 100644 --- a/cassandra/src/main/scala/akka/stream/alpakka/cassandra/AkkaDiscoverySessionProvider.scala +++ b/cassandra/src/main/scala/akka/stream/alpakka/cassandra/AkkaDiscoverySessionProvider.scala @@ -17,7 +17,7 @@ import scala.jdk.DurationConverters._ import scala.jdk.FutureConverters._ /** - * [[https://doc.akka.io/docs/akka/current/discovery/index.html Akka Discovery]] + * [[https://doc.akka.io/libraries/akka-core/current/discovery/index.html Akka Discovery]] * is enabled by setting the `service-discovery.name` in the given `CassandraSession` config. * * Akka Discovery overwrites the basic.contact-points` from the configuration with addresses diff --git a/contributor-advice.md b/contributor-advice.md index 841b6ddfea..a7a403a52a 100644 --- a/contributor-advice.md +++ b/contributor-advice.md @@ -105,7 +105,7 @@ All Akka APIs aim to evolve in a binary compatible way within minor versions. 1. To generate a case class replacement, consider using [Kaze Class](https://github.com/ktoso/kaze-class) -See [Binary Compatibilty Rules](https://doc.akka.io/docs/akka/current/common/binary-compatibility-rules.html) in the Akka documentation. +See [Binary Compatibilty Rules](https://doc.akka.io/libraries/akka-core/current/common/binary-compatibility-rules.html) in the Akka documentation. See [Binary Compatibility for library authors](https://docs.scala-lang.org/overviews/core/binary-compatibility-for-library-authors.html) diff --git a/couchbase/src/main/java/akka/stream/alpakka/couchbase/javadsl/DiscoverySupport.java b/couchbase/src/main/java/akka/stream/alpakka/couchbase/javadsl/DiscoverySupport.java index a86630ca72..6d273cab4a 100644 --- a/couchbase/src/main/java/akka/stream/alpakka/couchbase/javadsl/DiscoverySupport.java +++ b/couchbase/src/main/java/akka/stream/alpakka/couchbase/javadsl/DiscoverySupport.java @@ -13,7 +13,7 @@ /** * Utility to delegate Couchbase node address lookup to - * [[https://doc.akka.io/docs/akka/current/discovery/index.html Akka Discovery]]. + * [[https://doc.akka.io/libraries/akka-core/current/discovery/index.html Akka Discovery]]. */ public final class DiscoverySupport { diff --git a/couchbase/src/main/scala/akka/stream/alpakka/couchbase/scaladsl/DiscoverySupport.scala b/couchbase/src/main/scala/akka/stream/alpakka/couchbase/scaladsl/DiscoverySupport.scala index 2f51f656ac..6aaffad06d 100644 --- a/couchbase/src/main/scala/akka/stream/alpakka/couchbase/scaladsl/DiscoverySupport.scala +++ b/couchbase/src/main/scala/akka/stream/alpakka/couchbase/scaladsl/DiscoverySupport.scala @@ -20,7 +20,7 @@ import scala.concurrent.Future import scala.concurrent.duration.FiniteDuration /** - * Utility to delegate Couchbase node address lookup to [[https://doc.akka.io/docs/akka/current/discovery/index.html Akka Discovery]]. + * Utility to delegate Couchbase node address lookup to [[https://doc.akka.io/libraries/akka-core/current/discovery/index.html Akka Discovery]]. */ sealed class DiscoverySupport private { @@ -96,7 +96,7 @@ sealed class DiscoverySupport private { } /** - * Utility to delegate Couchbase node address lookup to [[https://doc.akka.io/docs/akka/current/discovery/index.html Akka Discovery]]. + * Utility to delegate Couchbase node address lookup to [[https://doc.akka.io/libraries/akka-core/current/discovery/index.html Akka Discovery]]. */ object DiscoverySupport extends DiscoverySupport { diff --git a/docs/release-train-issue-template.md b/docs/release-train-issue-template.md index d31d2450ed..da4eccbe12 100644 --- a/docs/release-train-issue-template.md +++ b/docs/release-train-issue-template.md @@ -30,7 +30,7 @@ Key links: ### Check availability - [ ] Check [API](https://doc.akka.io/api/alpakka/$VERSION$/) documentation -- [ ] Check [reference](https://doc.akka.io/docs/alpakka/$VERSION$/) documentation. Check that the reference docs were deployed and show a version warning (see section below on how to fix the version warning). +- [ ] Check [reference](https://doc.akka.io/libraries/alpakka/$VERSION$/) documentation. Check that the reference docs were deployed and show a version warning (see section below on how to fix the version warning). - [ ] Check the release on https://repo.akka.io/maven/com/lightbend/akka/akka-stream-alpakka-xml_2.13/$VERSION$/akka-stream-alpakka-xml_2.13-$VERSION$.pom ### When everything is on https://repo.akka.io/maven @@ -40,7 +40,7 @@ Key links: ``` cd ~/www git status - git add docs/alpakka/current docs/alpakka/$VERSION$ + git add libraries/alpakka/current libraries/alpakka/$VERSION$ git add api/alpakka/current api/alpakka/$VERSION$ git commit -m "Alpakka $VERSION$" ``` diff --git a/docs/src/main/paradox/.htaccess b/docs/src/main/paradox/.htaccess index ed4a1d6f32..e79d0249dd 100644 --- a/docs/src/main/paradox/.htaccess +++ b/docs/src/main/paradox/.htaccess @@ -1,9 +1,9 @@ # This file can be used for redirects like: -RedirectMatch 301 ^/docs/alpakka/([^/]+)/data-transformations/recordio.html https://doc.akka.io/docs/alpakka/$1/data-transformations/simple-codecs.html +RedirectMatch 301 ^/docs/alpakka/([^/]+)/data-transformations/recordio.html https://doc.akka.io/libraries/alpakka/$1/data-transformations/simple-codecs.html # Move CSV example to alpakka-samples RedirectMatch 301 ^/docs/alpakka/([^/]+)/examples/csv-samples.html https://akka.io/alpakka-samples/ # Drop pre-1.0 release notes -RedirectMatch 301 ^/docs/alpakka/([^/]+)/release-notes/1.0-RC1.html https://doc.akka.io/docs/alpakka/$1/release-notes/1.0.x.html -RedirectMatch 301 ^/docs/alpakka/([^/]+)/release-notes/1.0-M3.html https://doc.akka.io/docs/alpakka/$1/release-notes/1.0.x.html -RedirectMatch 301 ^/docs/alpakka/([^/]+)/release-notes/1.0-M2.html https://doc.akka.io/docs/alpakka/$1/release-notes/1.0.x.html -RedirectMatch 301 ^/docs/alpakka/([^/]+)/release-notes/1.0-M1.html https://doc.akka.io/docs/alpakka/$1/release-notes/1.0.x.html +RedirectMatch 301 ^/docs/alpakka/([^/]+)/release-notes/1.0-RC1.html https://doc.akka.io/libraries/alpakka/$1/release-notes/1.0.x.html +RedirectMatch 301 ^/docs/alpakka/([^/]+)/release-notes/1.0-M3.html https://doc.akka.io/libraries/alpakka/$1/release-notes/1.0.x.html +RedirectMatch 301 ^/docs/alpakka/([^/]+)/release-notes/1.0-M2.html https://doc.akka.io/libraries/alpakka/$1/release-notes/1.0.x.html +RedirectMatch 301 ^/docs/alpakka/([^/]+)/release-notes/1.0-M1.html https://doc.akka.io/libraries/alpakka/$1/release-notes/1.0.x.html diff --git a/docs/src/main/paradox/external/grpc.md b/docs/src/main/paradox/external/grpc.md index 5749c9cb50..c32ae71a7d 100644 --- a/docs/src/main/paradox/external/grpc.md +++ b/docs/src/main/paradox/external/grpc.md @@ -1,8 +1,8 @@ # gRPC -[gRPC](https://grpc.io/) is a transport mechanism for request/response and (non-persistent) streaming use cases. See [Why gRPC?](https://doc.akka.io/docs/akka-grpc/current/whygrpc.html) for more information about when to use gRPC as your transport. +[gRPC](https://grpc.io/) is a transport mechanism for request/response and (non-persistent) streaming use cases. See [Why gRPC?](https://doc.akka.io/libraries/akka-grpc/current/whygrpc.html) for more information about when to use gRPC as your transport. ## Akka gRPC Akka gRPC provides support for building Reactive Streams-compliant gRPC servers and clients on top of Akka Streams. -Learn more about it in the [Akka gRPC documentation](https://doc.akka.io/docs/akka-grpc/current/). +Learn more about it in the [Akka gRPC documentation](https://doc.akka.io/libraries/akka-grpc/current/). diff --git a/docs/src/main/paradox/external/http.md b/docs/src/main/paradox/external/http.md index 7ddc950655..4f7f1cc1d2 100644 --- a/docs/src/main/paradox/external/http.md +++ b/docs/src/main/paradox/external/http.md @@ -5,4 +5,4 @@ Akka comes with its Reactive Streams-compliant **HTTP server and client**. ## Akka HTTP This library is not maintained in the Alpakka repository. -Learn more about it in the [Akka HTTP documentation](https://doc.akka.io/docs/akka-http/current/). +Learn more about it in the [Akka HTTP documentation](https://doc.akka.io/libraries/akka-http/current/). diff --git a/docs/src/main/paradox/external/tcp.md b/docs/src/main/paradox/external/tcp.md index ca0ffca739..e6ea0ebae8 100644 --- a/docs/src/main/paradox/external/tcp.md +++ b/docs/src/main/paradox/external/tcp.md @@ -6,4 +6,4 @@ Akka Streams provides a way of handling **TCP connections** with streams. While ## Akka TCP Akka comes with its Reactive Streams-compliant TCP server and client. -Learn more about it in the [Akka Streaming TCP documentation](https://doc.akka.io/docs/akka/current/stream/stream-io.html#streaming-tcp). +Learn more about it in the [Akka Streaming TCP documentation](https://doc.akka.io/libraries/akka-core/current/stream/stream-io.html#streaming-tcp). diff --git a/docs/src/main/paradox/file.md b/docs/src/main/paradox/file.md index 5a62276b67..a6d773995c 100644 --- a/docs/src/main/paradox/file.md +++ b/docs/src/main/paradox/file.md @@ -38,7 +38,7 @@ The table below shows direct dependencies of this module and the second tab show Use the `FileIO` class to create streams reading from or writing to files. It is part part of Akka streams. -[Akka Streaming File IO documentation](https://doc.akka.io/docs/akka/current/stream/stream-io.html#streaming-file-io) +[Akka Streaming File IO documentation](https://doc.akka.io/libraries/akka-core/current/stream/stream-io.html#streaming-file-io) ## Tailing a file into a stream diff --git a/docs/src/main/paradox/google-cloud-bigquery.md b/docs/src/main/paradox/google-cloud-bigquery.md index 26fe9c7f87..f0f7a7be95 100644 --- a/docs/src/main/paradox/google-cloud-bigquery.md +++ b/docs/src/main/paradox/google-cloud-bigquery.md @@ -130,7 +130,7 @@ The BigQuery connector enables loading data into tables via real-time streaming For an overview of these strategies see the [BigQuery documentation](https://cloud.google.com/bigquery/docs/loading-data). The @scala[@apidoc[BigQuery.insertAll[In]](BigQuery$)] @java[@apidoc[BigQuery.insertAll](BigQuery$)] method creates a sink that accepts batches of @scala[`Seq[In]`] @java[`List`] -(for example created via the [`batch`](https://doc.akka.io/docs/akka/current/stream/operators/Source-or-Flow/batch.html) operator) and streams them directly into a table. +(for example created via the [`batch`](https://doc.akka.io/libraries/akka-core/current/stream/operators/Source-or-Flow/batch.html) operator) and streams them directly into a table. To enable/disable BigQuery’s best-effort deduplication feature use the appropriate @apidoc[InsertAllRetryPolicy$]. Scala diff --git a/docs/src/main/paradox/google-fcm.md b/docs/src/main/paradox/google-fcm.md index 6aa41f6019..06cdade287 100644 --- a/docs/src/main/paradox/google-fcm.md +++ b/docs/src/main/paradox/google-fcm.md @@ -71,7 +71,7 @@ Java With this type of send you can get responses from the server. These responses can be @scaladoc[FcmSuccessResponse](akka.stream.alpakka.google.firebase.fcm.FcmSuccessResponse) or @scaladoc[FcmErrorResponse](akka.stream.alpakka.google.firebase.fcm.FcmErrorResponse). You can choose what you want to do with this information, but keep in mind -if you try to resend the failed messages you will need to use exponential backoff! (see [Akka docs `RestartFlow.onFailuresWithBackoff`](https://doc.akka.io/docs/akka/current/stream/operators/RestartFlow/onFailuresWithBackoff.html)) +if you try to resend the failed messages you will need to use exponential backoff! (see [Akka docs `RestartFlow.onFailuresWithBackoff`](https://doc.akka.io/libraries/akka-core/current/stream/operators/RestartFlow/onFailuresWithBackoff.html)) If you don't care if the notification was sent successfully, you may use `fireAndForget`. diff --git a/docs/src/main/paradox/index.md b/docs/src/main/paradox/index.md index 16ef1f709b..ea674eaa3e 100644 --- a/docs/src/main/paradox/index.md +++ b/docs/src/main/paradox/index.md @@ -1,6 +1,6 @@ # Alpakka Documentation -The [Alpakka project](https://doc.akka.io/docs/alpakka/current/) is an initiative to implement stream-aware and reactive integration pipelines for Java and Scala. It is built on top of [Akka Streams](https://doc.akka.io/docs/akka/current/stream/index.html), and has been designed from the ground up to understand streaming natively and provide a DSL for reactive and stream-oriented programming, with built-in support for backpressure. Akka Streams is a [Reactive Streams](https://www.reactive-streams.org/) and JDK 9+ [java.util.concurrent.Flow](https://docs.oracle.com/javase/10/docs/api/java/util/concurrent/Flow.html)-compliant implementation and therefore [fully interoperable](https://doc.akka.io/docs/akka/current/general/stream/stream-design.html#interoperation-with-other-reactive-streams-implementations) with other implementations. +The [Alpakka project](https://doc.akka.io/libraries/alpakka/current/) is an initiative to implement stream-aware and reactive integration pipelines for Java and Scala. It is built on top of [Akka Streams](https://doc.akka.io/libraries/akka-core/current/stream/index.html), and has been designed from the ground up to understand streaming natively and provide a DSL for reactive and stream-oriented programming, with built-in support for backpressure. Akka Streams is a [Reactive Streams](https://www.reactive-streams.org/) and JDK 9+ [java.util.concurrent.Flow](https://docs.oracle.com/javase/10/docs/api/java/util/concurrent/Flow.html)-compliant implementation and therefore [fully interoperable](https://doc.akka.io/libraries/akka-core/current/general/stream/stream-design.html#interoperation-with-other-reactive-streams-implementations) with other implementations. @@toc { depth=2 } diff --git a/docs/src/main/paradox/kafka.md b/docs/src/main/paradox/kafka.md index a735d14c0f..deaf37926d 100644 --- a/docs/src/main/paradox/kafka.md +++ b/docs/src/main/paradox/kafka.md @@ -4,4 +4,4 @@ The **Alpakka Kafka connector** (originally known as **Reactive Kafka** or even ## Separate repository -Please read more about it in the [Alpakka Kafka documentation](https://doc.akka.io/docs/alpakka-kafka/current/). +Please read more about it in the [Alpakka Kafka documentation](https://doc.akka.io/libraries/alpakka-kafka/current/). diff --git a/docs/src/main/paradox/mqtt.md b/docs/src/main/paradox/mqtt.md index 3446fc826b..b6f55900d5 100644 --- a/docs/src/main/paradox/mqtt.md +++ b/docs/src/main/paradox/mqtt.md @@ -63,7 +63,7 @@ Java Most settings are passed on to Paho's `MqttConnectOptions` (@javadoc[API](org.eclipse.paho.client.mqttv3.MqttConnectOptions)) and documented there. @@@ warning { title='Use delayed stream restarts' } -Note that the following examples do not provide any connection management and are designed to get you going quickly. Consider empty client IDs to auto-generate unique identifiers and the use of [delayed stream restarts](https://doc.akka.io/docs/akka/current/stream/stream-error.html?language=scala#delayed-restarts-with-a-backoff-stage). The underlying Paho library's auto-reconnect feature [does not handle initial connections by design](https://github.com/eclipse/paho.mqtt.golang/issues/77). +Note that the following examples do not provide any connection management and are designed to get you going quickly. Consider empty client IDs to auto-generate unique identifiers and the use of [delayed stream restarts](https://doc.akka.io/libraries/akka-core/current/stream/stream-error.html?language=scala#delayed-restarts-with-a-backoff-stage). The underlying Paho library's auto-reconnect feature [does not handle initial connections by design](https://github.com/eclipse/paho.mqtt.golang/issues/77). @@@ diff --git a/docs/src/main/paradox/other-docs/snapshots.md b/docs/src/main/paradox/other-docs/snapshots.md index 9914ab1255..137927c5da 100644 --- a/docs/src/main/paradox/other-docs/snapshots.md +++ b/docs/src/main/paradox/other-docs/snapshots.md @@ -39,5 +39,5 @@ Gradle ## Documentation -The [snapshot documentation](https://doc.akka.io/docs/alpakka/snapshot/) is updated with every snapshot build. +The [snapshot documentation](https://doc.akka.io/libraries/alpakka/snapshot/) is updated with every snapshot build. diff --git a/docs/src/main/paradox/other-docs/versioning.md b/docs/src/main/paradox/other-docs/versioning.md index 931a2b466f..50c4166c8a 100644 --- a/docs/src/main/paradox/other-docs/versioning.md +++ b/docs/src/main/paradox/other-docs/versioning.md @@ -30,7 +30,7 @@ Read about the details in the @extref:[Akka documentation](akka:common/binary-co With Akka though, it is important to be strictly using one version (never blend eg. `akka-actor 2.5.21` and `akka-stream 2.5.12`), and do not use an Akka version lower than the one the Alpakka dependency requires (sometimes Alpakka modules depend on features of the latest Akka release). -Alpakka’s Akka and Akka HTTP dependencies are upgraded only if that version brings features leveraged by Alpakka or important fixes. As Akka itself is binary-compatible, the Akka version may be upgraded with an Alpakka patch release. See Akka's [Downstream upgrade strategy](https://doc.akka.io/docs/akka/current/project/downstream-upgrade-strategy.html) . +Alpakka’s Akka and Akka HTTP dependencies are upgraded only if that version brings features leveraged by Alpakka or important fixes. As Akka itself is binary-compatible, the Akka version may be upgraded with an Alpakka patch release. See Akka's [Downstream upgrade strategy](https://doc.akka.io/libraries/akka-core/current/project/downstream-upgrade-strategy.html) . @@@ note diff --git a/docs/src/main/paradox/overview.md b/docs/src/main/paradox/overview.md index f20ea89665..f6744e87fe 100644 --- a/docs/src/main/paradox/overview.md +++ b/docs/src/main/paradox/overview.md @@ -1,6 +1,6 @@ # Overview -The [Alpakka project](https://doc.akka.io/docs/alpakka/current/) is an open source initiative to implement stream-aware and reactive integration pipelines for Java and Scala. It is built on top of [Akka Streams](https://doc.akka.io/docs/akka/current/stream/index.html), and has been designed from the ground up to understand streaming natively and provide a DSL for reactive and stream-oriented programming, with built-in support for backpressure. Akka Streams is a [Reactive Streams](https://www.reactive-streams.org/) and JDK 9+ [java.util.concurrent.Flow](https://docs.oracle.com/javase/10/docs/api/java/util/concurrent/Flow.html)-compliant implementation and therefore [fully interoperable](https://doc.akka.io/docs/akka/current/general/stream/stream-design.html#interoperation-with-other-reactive-streams-implementations) with other implementations. +The [Alpakka project](https://doc.akka.io/libraries/alpakka/current/) is an open source initiative to implement stream-aware and reactive integration pipelines for Java and Scala. It is built on top of [Akka Streams](https://doc.akka.io/libraries/akka-core/current/stream/index.html), and has been designed from the ground up to understand streaming natively and provide a DSL for reactive and stream-oriented programming, with built-in support for backpressure. Akka Streams is a [Reactive Streams](https://www.reactive-streams.org/) and JDK 9+ [java.util.concurrent.Flow](https://docs.oracle.com/javase/10/docs/api/java/util/concurrent/Flow.html)-compliant implementation and therefore [fully interoperable](https://doc.akka.io/libraries/akka-core/current/general/stream/stream-design.html#interoperation-with-other-reactive-streams-implementations) with other implementations. If you'd like to know what integrations with Alpakka look like, have a look at our @ref[self-contained examples](examples/index.md) section. diff --git a/docs/src/main/paradox/release-notes/1.0.x.md b/docs/src/main/paradox/release-notes/1.0.x.md index 6bea432a55..43c44bda40 100644 --- a/docs/src/main/paradox/release-notes/1.0.x.md +++ b/docs/src/main/paradox/release-notes/1.0.x.md @@ -84,7 +84,7 @@ Released 2019-04-04 With this 1.0.0 release Alpakka is another member of the [Akka family](https://akka.io/docs/) promising great Open Source software with the [Reactive Manifesto](https://www.reactivemanifesto.org/) in mind. -By now all modules have been worked with to follow a similar structure so that both the user experience and the contributor experience are transferable across the many modules in Alpakka. The recommended structure for connectors is illustrated by the [Reference connector](https://doc.akka.io/docs/alpakka/current/reference.html). +By now all modules have been worked with to follow a similar structure so that both the user experience and the contributor experience are transferable across the many modules in Alpakka. The recommended structure for connectors is illustrated by the [Reference connector](https://doc.akka.io/libraries/alpakka/current/reference.html). Having that said, a few parts make use of the `@ApiMayChange` annotation to keep the door open for API changes, so that they can evolve more rapidly than other modules within Alpakka. @@ -92,7 +92,7 @@ If you have kept to older versions of Alpakka to avoid multiple upgrades, **now Most artifacts of Alpakka are available for Scala 2.11, 2.12 and 2.13.0-M5. -Alpakka 1.0.0 modules require at least [Akka](https://akka.io/blog/news-archive.html) 2.5.21. Some modules built up [Akka HTTP](https://doc.akka.io/docs/akka-http/current/release-notes/10.1.x.html) 10.1.7. +Alpakka 1.0.0 modules require at least [Akka](https://akka.io/blog/news-archive.html) 2.5.21. Some modules built up [Akka HTTP](https://doc.akka.io/libraries/akka-http/current/release-notes/10.1.x.html) 10.1.7. ### Compatibility @@ -104,7 +104,7 @@ All modules of Alpakka can be used independently, you may mix Alpakka versions f ## Highlights since 1.0-RC1 -* Official support is now available via [Lightbend Platform Subscription](https://www.lightbend.com/lightbend-subscription) for the first modules: Couchbase and CSV ([Alpakka Kafka](https://doc.akka.io/docs/alpakka-kafka/current/) has been supported for a while) +* Official support is now available via [Lightbend Platform Subscription](https://www.lightbend.com/lightbend-subscription) for the first modules: Couchbase and CSV ([Alpakka Kafka](https://doc.akka.io/libraries/alpakka-kafka/current/) has been supported for a while) * MQTT Streaming has been further stabilized * AWS SQS has been marked "API may change" as there are some improvements coming up (see [#1604](https://github.com/akka/alpakka/pull/1604)) diff --git a/docs/src/main/paradox/release-notes/1.0.x/kafka.md b/docs/src/main/paradox/release-notes/1.0.x/kafka.md index 48c8a57090..1001a8479c 100644 --- a/docs/src/main/paradox/release-notes/1.0.x/kafka.md +++ b/docs/src/main/paradox/release-notes/1.0.x/kafka.md @@ -2,4 +2,4 @@ Alpakka Kafka is managed separately and has its own releases. -[Alpakka Kafka release notes](https://doc.akka.io/docs/alpakka-kafka/current/release-notes/index.html) +[Alpakka Kafka release notes](https://doc.akka.io/libraries/alpakka-kafka/current/release-notes/index.html) diff --git a/docs/src/main/paradox/release-notes/1.1.x/kafka.md b/docs/src/main/paradox/release-notes/1.1.x/kafka.md index 48c8a57090..1001a8479c 100644 --- a/docs/src/main/paradox/release-notes/1.1.x/kafka.md +++ b/docs/src/main/paradox/release-notes/1.1.x/kafka.md @@ -2,4 +2,4 @@ Alpakka Kafka is managed separately and has its own releases. -[Alpakka Kafka release notes](https://doc.akka.io/docs/alpakka-kafka/current/release-notes/index.html) +[Alpakka Kafka release notes](https://doc.akka.io/libraries/alpakka-kafka/current/release-notes/index.html) diff --git a/docs/src/main/paradox/release-notes/2.0.x.md b/docs/src/main/paradox/release-notes/2.0.x.md index 899f94eab8..f8531e1eb5 100644 --- a/docs/src/main/paradox/release-notes/2.0.x.md +++ b/docs/src/main/paradox/release-notes/2.0.x.md @@ -12,7 +12,7 @@ To understand the forces on version numbers, read about @ref:[Alpakka's versioni @@@ @@@ note -In case you are browsing a specific version's documentation: check out the [latest release notes](https://doc.akka.io/docs/alpakka/current/release-notes/) +In case you are browsing a specific version's documentation: check out the [latest release notes](https://doc.akka.io/libraries/alpakka/current/release-notes/) @@@ All Alpakka modules are available for Scala 2.13 and all Alpakka modules are compatible with Akka 2.6.8+ and Akka 2.5.31+. An exception is MQTT streaming which is available for Akka 2.6 only. @@ -156,7 +156,7 @@ Released 2020-04-30 [Pravega](https://www.pravega.io/) provides a new storage abstraction - a stream - for continuous and unbounded data. A Pravega stream is an elastic set of durable and append-only segments, each segment being an unbounded sequence of bytes. Streams provide exactly-once semantics, and atomicity for groups of events using transactions. - Cassandra: rewrite on top of Java driver 4.5.0 [#2182](https://github.com/akka/alpakka/issues/2182) by [@ennru](https://github.com/ennru) - **Alpakka Cassandra is completely rewritten and now based on the Cassandra Java driver version 4.6. It now becomes the base for [Akka Persistence Cassandra](https://doc.akka.io/docs/akka-persistence-cassandra/1.0/).** + **Alpakka Cassandra is completely rewritten and now based on the Cassandra Java driver version 4.6. It now becomes the base for [Akka Persistence Cassandra](https://doc.akka.io/libraries/akka-persistence-cassandra/1.0/).** - AWS Kinesis KCL streams support [#1667](https://github.com/akka/alpakka/issues/1667) by [@aserrallerios](https://github.com/aserrallerios) @@ -273,7 +273,7 @@ Released 2020-03-20 ## Notable changes since 2.0.0-M3 -**Alpakka Cassandra is completely rewritten and now based on the Cassandra Java driver version 4.6. It now becomes the base for [Akka Persistence Cassandra](https://doc.akka.io/docs/akka-persistence-cassandra/1.0/).** +**Alpakka Cassandra is completely rewritten and now based on the Cassandra Java driver version 4.6. It now becomes the base for [Akka Persistence Cassandra](https://doc.akka.io/libraries/akka-persistence-cassandra/1.0/).** - Cassandra: rewrite on top of Java driver 4.5.0 [#2182](https://github.com/akka/alpakka/issues/2182) by [@ennru](https://github.com/ennru) diff --git a/docs/src/main/paradox/release-notes/2.0.x/cassandra.md b/docs/src/main/paradox/release-notes/2.0.x/cassandra.md index 8351b74dc2..80ab4a411c 100644 --- a/docs/src/main/paradox/release-notes/2.0.x/cassandra.md +++ b/docs/src/main/paradox/release-notes/2.0.x/cassandra.md @@ -20,7 +20,7 @@ ## 2.0.0-RC1 -**Alpakka Cassandra is completely rewritten and now based on the Cassandra Java driver version 4.6. It now becomes the base for [Akka Persistence Cassandra](https://doc.akka.io/docs/akka-persistence-cassandra/1.0/).** +**Alpakka Cassandra is completely rewritten and now based on the Cassandra Java driver version 4.6. It now becomes the base for [Akka Persistence Cassandra](https://doc.akka.io/libraries/akka-persistence-cassandra/1.0/).** - Cassandra: rewrite on top of Java driver 4.6.0 [#2182](https://github.com/akka/alpakka/issues/2182) by [@ennru](https://github.com/ennru) - Cassandra: new actors API in Metrics Registry [#2214](https://github.com/akka/alpakka/issues/2214) by [@ennru](https://github.com/ennru) diff --git a/docs/src/main/paradox/release-notes/2.0.x/kafka.md b/docs/src/main/paradox/release-notes/2.0.x/kafka.md index 48c8a57090..1001a8479c 100644 --- a/docs/src/main/paradox/release-notes/2.0.x/kafka.md +++ b/docs/src/main/paradox/release-notes/2.0.x/kafka.md @@ -2,4 +2,4 @@ Alpakka Kafka is managed separately and has its own releases. -[Alpakka Kafka release notes](https://doc.akka.io/docs/alpakka-kafka/current/release-notes/index.html) +[Alpakka Kafka release notes](https://doc.akka.io/libraries/alpakka-kafka/current/release-notes/index.html) diff --git a/docs/src/main/paradox/release-notes/3.0.x.md b/docs/src/main/paradox/release-notes/3.0.x.md index c103fd7433..25401650a5 100644 --- a/docs/src/main/paradox/release-notes/3.0.x.md +++ b/docs/src/main/paradox/release-notes/3.0.x.md @@ -12,7 +12,7 @@ To understand the forces on version numbers, read about @ref:[Alpakka's versioni @@@ @@@ note -In case you are browsing a specific version's documentation: check out the [latest release notes](https://doc.akka.io/docs/alpakka/current/release-notes/) +In case you are browsing a specific version's documentation: check out the [latest release notes](https://doc.akka.io/libraries/alpakka/current/release-notes/) @@@ All Alpakka modules are available for Scala 2.13 and all Alpakka modules are compatible with Akka 2.6.13+. diff --git a/docs/src/main/paradox/release-notes/3.0.x/kafka.md b/docs/src/main/paradox/release-notes/3.0.x/kafka.md index 48c8a57090..1001a8479c 100644 --- a/docs/src/main/paradox/release-notes/3.0.x/kafka.md +++ b/docs/src/main/paradox/release-notes/3.0.x/kafka.md @@ -2,4 +2,4 @@ Alpakka Kafka is managed separately and has its own releases. -[Alpakka Kafka release notes](https://doc.akka.io/docs/alpakka-kafka/current/release-notes/index.html) +[Alpakka Kafka release notes](https://doc.akka.io/libraries/alpakka-kafka/current/release-notes/index.html) diff --git a/docs/src/main/paradox/release-notes/4.0.x.md b/docs/src/main/paradox/release-notes/4.0.x.md index 632fbdcfbf..7da1712042 100644 --- a/docs/src/main/paradox/release-notes/4.0.x.md +++ b/docs/src/main/paradox/release-notes/4.0.x.md @@ -12,7 +12,7 @@ To understand the forces on version numbers, read about @ref:[Alpakka's versioni @@@ @@@ note -In case you are browsing a specific version's documentation: check out the [latest release notes](https://doc.akka.io/docs/alpakka/current/release-notes/) +In case you are browsing a specific version's documentation: check out the [latest release notes](https://doc.akka.io/libraries/alpakka/current/release-notes/) @@@ All Alpakka modules are available for Scala 2.13 and all Alpakka modules are compatible with Akka 2.6.13+. diff --git a/docs/src/main/paradox/sqs.md b/docs/src/main/paradox/sqs.md index 2a2e3f77f6..f08108a7d4 100644 --- a/docs/src/main/paradox/sqs.md +++ b/docs/src/main/paradox/sqs.md @@ -79,7 +79,7 @@ Scala Java : @@snip [snip](/sqs/src/test/java/docs/javadsl/SqsSourceTest.java) { #run } -In this example we use the `closeOnEmptyReceive` to let the stream complete when there are no more messages on the queue. In realistic scenarios, you should add a `KillSwitch` to the stream, see ["Controlling stream completion with KillSwitch" in the Akka documentation](https://doc.akka.io/docs/akka/current/stream/stream-dynamic.html#controlling-stream-completion-with-killswitch). +In this example we use the `closeOnEmptyReceive` to let the stream complete when there are no more messages on the queue. In realistic scenarios, you should add a `KillSwitch` to the stream, see ["Controlling stream completion with KillSwitch" in the Akka documentation](https://doc.akka.io/libraries/akka-core/current/stream/stream-dynamic.html#controlling-stream-completion-with-killswitch). ### Source configuration diff --git a/docs/src/main/paradox/unix-domain-socket.md b/docs/src/main/paradox/unix-domain-socket.md index 06dff2f218..c181629a7e 100644 --- a/docs/src/main/paradox/unix-domain-socket.md +++ b/docs/src/main/paradox/unix-domain-socket.md @@ -6,8 +6,8 @@ This connector provides an implementation of a Unix Domain Socket with interface The binding and connecting APIs are extremely similar to the `Tcp` Akka Streams class. `UnixDomainSocket` is generally substitutable for `Tcp` except that the `SocketAddress` is different (Unix Domain Sockets requires a `java.io.File` as opposed to a host and port). Please read the following for details: -* [Scala user reference for `Tcp`](https://doc.akka.io/docs/akka/current/stream/stream-io.html?language=scala) -* [Java user reference for `Tcp`](https://doc.akka.io/docs/akka/current/stream/stream-io.html?language=java) +* [Scala user reference for `Tcp`](https://doc.akka.io/libraries/akka-core/current/stream/stream-io.html?language=scala) +* [Java user reference for `Tcp`](https://doc.akka.io/libraries/akka-core/current/stream/stream-io.html?language=java) > Note that Unix Domain Sockets, as the name implies, do not apply to Windows. diff --git a/dynamodb/src/test/java/docs/javadsl/ExampleTest.java b/dynamodb/src/test/java/docs/javadsl/ExampleTest.java index 0c5d303d50..25ee07ec39 100644 --- a/dynamodb/src/test/java/docs/javadsl/ExampleTest.java +++ b/dynamodb/src/test/java/docs/javadsl/ExampleTest.java @@ -51,7 +51,7 @@ public static void setup() throws Exception { final ActorSystem system = ActorSystem.create(); // Don't encode credentials in your source code! - // see https://doc.akka.io/docs/alpakka/current/aws-shared-configuration.html + // see https://doc.akka.io/libraries/alpakka/current/aws-shared-configuration.html StaticCredentialsProvider credentialsProvider = StaticCredentialsProvider.create(AwsBasicCredentials.create("x", "x")); final DynamoDbAsyncClient client = @@ -60,7 +60,7 @@ public static void setup() throws Exception { .region(Region.AWS_GLOBAL) .httpClient(AkkaHttpClient.builder().withActorSystem(system).build()) // Possibility to configure the retry policy - // see https://doc.akka.io/docs/alpakka/current/aws-shared-configuration.html + // see https://doc.akka.io/libraries/alpakka/current/aws-shared-configuration.html // .overrideConfiguration(...) // #init-client .endpointOverride(new URI("http://localhost:8001/")) diff --git a/dynamodb/src/test/scala/docs/scaladsl/ExampleSpec.scala b/dynamodb/src/test/scala/docs/scaladsl/ExampleSpec.scala index 10ac506fcd..8154562a65 100644 --- a/dynamodb/src/test/scala/docs/scaladsl/ExampleSpec.scala +++ b/dynamodb/src/test/scala/docs/scaladsl/ExampleSpec.scala @@ -49,7 +49,7 @@ class ExampleSpec //#init-client // Don't encode credentials in your source code! - // see https://doc.akka.io/docs/alpakka/current/aws-shared-configuration.html + // see https://doc.akka.io/libraries/alpakka/current/aws-shared-configuration.html private val credentialsProvider = StaticCredentialsProvider.create(AwsBasicCredentials.create("x", "x")) implicit val client: DynamoDbAsyncClient = DynamoDbAsyncClient .builder() @@ -57,7 +57,7 @@ class ExampleSpec .credentialsProvider(credentialsProvider) .httpClient(AkkaHttpClient.builder().withActorSystem(system).build()) // Possibility to configure the retry policy - // see https://doc.akka.io/docs/alpakka/current/aws-shared-configuration.html + // see https://doc.akka.io/libraries/alpakka/current/aws-shared-configuration.html // .overrideConfiguration(...) //#init-client .endpointOverride(new URI("http://localhost:8001/")) diff --git a/kinesis/src/test/java/docs/javadsl/KinesisFirehoseSnippets.java b/kinesis/src/test/java/docs/javadsl/KinesisFirehoseSnippets.java index ce66f56f39..b820fe4b7d 100644 --- a/kinesis/src/test/java/docs/javadsl/KinesisFirehoseSnippets.java +++ b/kinesis/src/test/java/docs/javadsl/KinesisFirehoseSnippets.java @@ -29,7 +29,7 @@ public void snippets() { FirehoseAsyncClient.builder() .httpClient(AkkaHttpClient.builder().withActorSystem(system).build()) // Possibility to configure the retry policy - // see https://doc.akka.io/docs/alpakka/current/aws-shared-configuration.html + // see https://doc.akka.io/libraries/alpakka/current/aws-shared-configuration.html // .overrideConfiguration(...) .build(); diff --git a/kinesis/src/test/java/docs/javadsl/KinesisSnippets.java b/kinesis/src/test/java/docs/javadsl/KinesisSnippets.java index 40b355dd05..cf688184c4 100644 --- a/kinesis/src/test/java/docs/javadsl/KinesisSnippets.java +++ b/kinesis/src/test/java/docs/javadsl/KinesisSnippets.java @@ -41,7 +41,7 @@ public void snippets() { KinesisAsyncClient.builder() .httpClient(AkkaHttpClient.builder().withActorSystem(system).build()) // Possibility to configure the retry policy - // see https://doc.akka.io/docs/alpakka/current/aws-shared-configuration.html + // see https://doc.akka.io/libraries/alpakka/current/aws-shared-configuration.html // .overrideConfiguration(...) .build(); diff --git a/kinesis/src/test/scala/docs/scaladsl/KinesisFirehoseSnippets.scala b/kinesis/src/test/scala/docs/scaladsl/KinesisFirehoseSnippets.scala index fc69dda156..8d06feda70 100644 --- a/kinesis/src/test/scala/docs/scaladsl/KinesisFirehoseSnippets.scala +++ b/kinesis/src/test/scala/docs/scaladsl/KinesisFirehoseSnippets.scala @@ -24,7 +24,7 @@ object KinesisFirehoseSnippets { .builder() .httpClient(AkkaHttpClient.builder().withActorSystem(system).build()) // Possibility to configure the retry policy - // see https://doc.akka.io/docs/alpakka/current/aws-shared-configuration.html + // see https://doc.akka.io/libraries/alpakka/current/aws-shared-configuration.html // .overrideConfiguration(...) .build() diff --git a/kinesis/src/test/scala/docs/scaladsl/KinesisSnippets.scala b/kinesis/src/test/scala/docs/scaladsl/KinesisSnippets.scala index cab940e985..5178a259de 100644 --- a/kinesis/src/test/scala/docs/scaladsl/KinesisSnippets.scala +++ b/kinesis/src/test/scala/docs/scaladsl/KinesisSnippets.scala @@ -29,7 +29,7 @@ object KinesisSnippets { .builder() .httpClient(AkkaHttpClient.builder().withActorSystem(system).build()) // Possibility to configure the retry policy - // see https://doc.akka.io/docs/alpakka/current/aws-shared-configuration.html + // see https://doc.akka.io/libraries/alpakka/current/aws-shared-configuration.html // .overrideConfiguration(...) .build() diff --git a/project/Common.scala b/project/Common.scala index 76ced35404..c72f607dd8 100644 --- a/project/Common.scala +++ b/project/Common.scala @@ -24,11 +24,11 @@ object Common extends AutoPlugin { organization := "com.lightbend.akka", organizationName := "Lightbend Inc.", organizationHomepage := Some(url("https://www.lightbend.com/")), - homepage := Some(url("https://doc.akka.io/docs/alpakka/current")), + homepage := Some(url("https://doc.akka.io/libraries/alpakka/current")), scmInfo := Some(ScmInfo(url("https://github.com/akka/alpakka"), "git@github.com:akka/alpakka.git")), developers += Developer("contributors", "Contributors", - "https://gitter.im/akka/dev", + "", url("https://github.com/akka/alpakka/graphs/contributors")), releaseNotesURL := ( if ((ThisBuild / isSnapshot).value) None @@ -77,7 +77,13 @@ object Common extends AutoPlugin { "-doc-version", version.value, "-sourcepath", - (ThisBuild / baseDirectory).value.toString + (ThisBuild / baseDirectory).value.toString, + "-doc-source-url", { + val branch = if (isSnapshot.value) "main" else s"v${version.value}" + s"https://github.com/akka/alpakka/tree/${branch}€{FILE_PATH_EXT}#L€{FILE_LINE}" + }, + "-doc-canonical-base-url", + "https://doc.akka.io/api/alpakka/current/" ) ++ { // excluding generated grpc classes, except the model ones (com.google.pubsub) val skip = "akka.pattern:" + // for some reason Scaladoc creates this @@ -86,20 +92,17 @@ object Common extends AutoPlugin { "com.google.api:com.google.cloud:com.google.iam:com.google.logging:" + "com.google.longrunning:com.google.protobuf:com.google.rpc:com.google.type" if (scalaBinaryVersion.value.startsWith("3")) { - Seq(s"-skip-packages:$skip") // different usage in scala3 + Seq( + s"-external-mappings:https://docs.oracle.com/en/java/javase/${Dependencies.JavaDocLinkVersion}/docs/api/java.base/", + s"-skip-packages:$skip" + ) } else { - Seq("-skip-packages", skip) + Seq("-jdk-api-doc-base", + s"https://docs.oracle.com/en/java/javase/${Dependencies.JavaDocLinkVersion}/docs/api/java.base/", + "-skip-packages", + skip) } }, - Compile / doc / scalacOptions ++= - Seq( - "-doc-source-url", { - val branch = if (isSnapshot.value) "main" else s"v${version.value}" - s"https://github.com/akka/alpakka/tree/${branch}€{FILE_PATH_EXT}#L€{FILE_LINE}" - }, - "-doc-canonical-base-url", - "https://doc.akka.io/api/alpakka/current/" - ), Compile / doc / scalacOptions -= "-Werror", compile / javacOptions ++= Seq( "-Xlint:cast", diff --git a/project/Dependencies.scala b/project/Dependencies.scala index ff684c151c..b3f9ec6c7a 100644 --- a/project/Dependencies.scala +++ b/project/Dependencies.scala @@ -3,6 +3,10 @@ import Keys._ object Dependencies { + // Java Platform version for JavaDoc creation + // sync with Java version in .github/workflows/publish.yml#documentation + lazy val JavaDocLinkVersion = 17 + val CronBuild = sys.env.get("GITHUB_EVENT_NAME").contains("schedule") val Scala213 = "2.13.12" // update even in link-validator.conf @@ -31,7 +35,9 @@ object Dependencies { val hoverflyVersion = "0.14.1" val CouchbaseVersion = "2.7.23" - val CouchbaseVersionForDocs = "2.7" + val CouchbaseVersionForDocs = VersionNumber(CouchbaseVersion).numbers match { + case Seq(major, minor, _*) => s"$major.$minor" + } // https://github.com/jwt-scala/jwt-scala/releases val JwtScalaVersion = "9.4.6" diff --git a/scripts/link-validator.conf b/scripts/link-validator.conf index c9eb35163c..9fdd0aa796 100644 --- a/scripts/link-validator.conf +++ b/scripts/link-validator.conf @@ -2,13 +2,13 @@ site-link-validator { root-dir = "./docs/target/site/" # relative to `root-dir` - start-file = "docs/alpakka/snapshot/index.html" + start-file = "libraries/alpakka/snapshot/index.html" # Resolves URLs with the given prefix as local files instead link-mappings = [ { - prefix = "https://doc.akka.io/docs/alpakka/snapshot/" - replace = "/docs/alpakka/snapshot/" + prefix = "https://doc.akka.io/libraries/alpakka/snapshot/" + replace = "/libraries/alpakka/snapshot/" } { prefix = "https://doc.akka.io/api/alpakka/snapshot/" @@ -39,7 +39,7 @@ site-link-validator { ignore-files = [] non-https-whitelist = [ - "http://logback.qos.ch/" +// "http://logback.qos.ch/" "http://www.slf4j.org/" "http://reactivemongo.org" "http://www.thedevpiece.com/" diff --git a/sns/src/test/java/docs/javadsl/SnsPublisherTest.java b/sns/src/test/java/docs/javadsl/SnsPublisherTest.java index 4d3e53a734..a2dd4c690b 100644 --- a/sns/src/test/java/docs/javadsl/SnsPublisherTest.java +++ b/sns/src/test/java/docs/javadsl/SnsPublisherTest.java @@ -69,7 +69,7 @@ static SnsAsyncClient createSnsClient() { // #init-client // Don't encode credentials in your source code! - // see https://doc.akka.io/docs/alpakka/current/aws-shared-configuration.html + // see https://doc.akka.io/libraries/alpakka/current/aws-shared-configuration.html StaticCredentialsProvider credentialsProvider = StaticCredentialsProvider.create(AwsBasicCredentials.create("x", "x")); final SnsAsyncClient awsSnsClient = @@ -81,7 +81,7 @@ static SnsAsyncClient createSnsClient() { .region(Region.EU_CENTRAL_1) .httpClient(AkkaHttpClient.builder().withActorSystem(system).build()) // Possibility to configure the retry policy - // see https://doc.akka.io/docs/alpakka/current/aws-shared-configuration.html + // see https://doc.akka.io/libraries/alpakka/current/aws-shared-configuration.html // .overrideConfiguration(...) .build(); diff --git a/sns/src/test/scala/akka/stream/alpakka/sns/IntegrationTestContext.scala b/sns/src/test/scala/akka/stream/alpakka/sns/IntegrationTestContext.scala index bc88a8745b..628a84a6ee 100644 --- a/sns/src/test/scala/akka/stream/alpakka/sns/IntegrationTestContext.scala +++ b/sns/src/test/scala/akka/stream/alpakka/sns/IntegrationTestContext.scala @@ -59,7 +59,7 @@ trait IntegrationTestContext extends BeforeAndAfterAll with ScalaFutures { import software.amazon.awssdk.regions.Region // Don't encode credentials in your source code! - // see https://doc.akka.io/docs/alpakka/current/aws-shared-configuration.html + // see https://doc.akka.io/libraries/alpakka/current/aws-shared-configuration.html val credentialsProvider = StaticCredentialsProvider.create(AwsBasicCredentials.create("x", "x")) implicit val awsSnsClient: SnsAsyncClient = SnsAsyncClient @@ -71,7 +71,7 @@ trait IntegrationTestContext extends BeforeAndAfterAll with ScalaFutures { .region(Region.EU_CENTRAL_1) .httpClient(AkkaHttpClient.builder().withActorSystem(system).build()) // Possibility to configure the retry policy - // see https://doc.akka.io/docs/alpakka/current/aws-shared-configuration.html + // see https://doc.akka.io/libraries/alpakka/current/aws-shared-configuration.html // .overrideConfiguration(...) .build() diff --git a/sqs/src/main/scala/akka/stream/alpakka/sqs/javadsl/SqsPublishFlow.scala b/sqs/src/main/scala/akka/stream/alpakka/sqs/javadsl/SqsPublishFlow.scala index a6ce6d5a38..d122a6add6 100644 --- a/sqs/src/main/scala/akka/stream/alpakka/sqs/javadsl/SqsPublishFlow.scala +++ b/sqs/src/main/scala/akka/stream/alpakka/sqs/javadsl/SqsPublishFlow.scala @@ -43,7 +43,7 @@ object SqsPublishFlow { /** * creates a [[akka.stream.javadsl.Flow Flow]] that groups messages and publish them in batches to a SQS queue using an [[software.amazon.awssdk.services.sqs.SqsAsyncClient AmazonSQSAsync]] - * @see https://doc.akka.io/docs/akka/current/stream/operators/Source-or-Flow/groupedWithin.html#groupedwithin + * @see https://doc.akka.io/libraries/akka-core/current/stream/operators/Source-or-Flow/groupedWithin.html#groupedwithin */ def grouped( queueUrl: String, diff --git a/sqs/src/main/scala/akka/stream/alpakka/sqs/javadsl/SqsPublishSink.scala b/sqs/src/main/scala/akka/stream/alpakka/sqs/javadsl/SqsPublishSink.scala index 96984519e0..0985a65722 100644 --- a/sqs/src/main/scala/akka/stream/alpakka/sqs/javadsl/SqsPublishSink.scala +++ b/sqs/src/main/scala/akka/stream/alpakka/sqs/javadsl/SqsPublishSink.scala @@ -52,7 +52,7 @@ object SqsPublishSink { /** * creates a [[akka.stream.javadsl.Sink Sink]] that groups strings and publishes them as messages in batches to a SQS queue using an [[software.amazon.awssdk.services.sqs.SqsAsyncClient SqsAsyncClient]] - * @see https://doc.akka.io/docs/akka/current/stream/operators/Source-or-Flow/groupedWithin.html#groupedwithin + * @see https://doc.akka.io/libraries/akka-core/current/stream/operators/Source-or-Flow/groupedWithin.html#groupedwithin */ def grouped(queueUrl: String, settings: SqsPublishGroupedSettings, @@ -61,7 +61,7 @@ object SqsPublishSink { /** * creates a [[akka.stream.javadsl.Sink Sink]] that groups messages and publishes them in batches to a SQS queue using an [[software.amazon.awssdk.services.sqs.SqsAsyncClient SqsAsyncClient]] - * @see https://doc.akka.io/docs/akka/current/stream/operators/Source-or-Flow/groupedWithin.html#groupedwithin + * @see https://doc.akka.io/libraries/akka-core/current/stream/operators/Source-or-Flow/groupedWithin.html#groupedwithin */ def groupedMessageSink(queueUrl: String, settings: SqsPublishGroupedSettings, @@ -73,7 +73,7 @@ object SqsPublishSink { /** * creates a [[akka.stream.javadsl.Sink Sink]] that accepts an iterable of strings and publish them as messages in batches to a SQS queue using an [[software.amazon.awssdk.services.sqs.SqsAsyncClient SqsAsyncClient]] - * @see https://doc.akka.io/docs/akka/current/stream/operators/Source-or-Flow/groupedWithin.html#groupedwithin + * @see https://doc.akka.io/libraries/akka-core/current/stream/operators/Source-or-Flow/groupedWithin.html#groupedwithin */ def batch[B <: java.lang.Iterable[String]](queueUrl: String, settings: SqsPublishBatchSettings, diff --git a/sqs/src/main/scala/akka/stream/alpakka/sqs/scaladsl/SqsPublishFlow.scala b/sqs/src/main/scala/akka/stream/alpakka/sqs/scaladsl/SqsPublishFlow.scala index e56e1a9cb3..0b3e1310d0 100644 --- a/sqs/src/main/scala/akka/stream/alpakka/sqs/scaladsl/SqsPublishFlow.scala +++ b/sqs/src/main/scala/akka/stream/alpakka/sqs/scaladsl/SqsPublishFlow.scala @@ -61,7 +61,7 @@ object SqsPublishFlow { /** * creates a [[akka.stream.scaladsl.Flow Flow]] that groups messages and publishes them in batches to a SQS queue using an [[software.amazon.awssdk.services.sqs.SqsAsyncClient SqsAsyncClient]] * - * @see https://doc.akka.io/docs/akka/current/stream/operators/Source-or-Flow/groupedWithin.html#groupedwithin + * @see https://doc.akka.io/libraries/akka-core/current/stream/operators/Source-or-Flow/groupedWithin.html#groupedwithin */ def grouped(queueUrl: String, settings: SqsPublishGroupedSettings = SqsPublishGroupedSettings.Defaults)( implicit sqsClient: SqsAsyncClient diff --git a/sqs/src/main/scala/akka/stream/alpakka/sqs/scaladsl/SqsPublishSink.scala b/sqs/src/main/scala/akka/stream/alpakka/sqs/scaladsl/SqsPublishSink.scala index d4aebeb58c..d138a6460e 100644 --- a/sqs/src/main/scala/akka/stream/alpakka/sqs/scaladsl/SqsPublishSink.scala +++ b/sqs/src/main/scala/akka/stream/alpakka/sqs/scaladsl/SqsPublishSink.scala @@ -31,7 +31,7 @@ object SqsPublishSink { /** * creates a [[akka.stream.scaladsl.Sink Sink]] that groups strings and publishes them as messages in batches to a SQS queue using an [[software.amazon.awssdk.services.sqs.SqsAsyncClient SqsAsyncClient]] * - * @see https://doc.akka.io/docs/akka/current/stream/operators/Source-or-Flow/groupedWithin.html#groupedwithin + * @see https://doc.akka.io/libraries/akka-core/current/stream/operators/Source-or-Flow/groupedWithin.html#groupedwithin */ def grouped(queueUrl: String, settings: SqsPublishGroupedSettings = SqsPublishGroupedSettings.Defaults)( implicit sqsClient: SqsAsyncClient @@ -43,7 +43,7 @@ object SqsPublishSink { /** * creates a [[akka.stream.scaladsl.Sink Sink]] that accepts an iterable of strings and publish them as messages in batches to a SQS queue using an [[software.amazon.awssdk.services.sqs.SqsAsyncClient SqsAsyncClient]] * - * @see https://doc.akka.io/docs/akka/current/stream/operators/Source-or-Flow/groupedWithin.html#groupedwithin + * @see https://doc.akka.io/libraries/akka-core/current/stream/operators/Source-or-Flow/groupedWithin.html#groupedwithin */ def batch( queueUrl: String, @@ -82,7 +82,7 @@ object SqsPublishSink { /** * creates a [[akka.stream.scaladsl.Sink Sink]] that groups messages and publishes them in batches to a SQS queue using an [[software.amazon.awssdk.services.sqs.SqsAsyncClient SqsAsyncClient]] * - * @see https://doc.akka.io/docs/akka/current/stream/operators/Source-or-Flow/groupedWithin.html#groupedwithin + * @see https://doc.akka.io/libraries/akka-core/current/stream/operators/Source-or-Flow/groupedWithin.html#groupedwithin */ def groupedMessageSink( queueUrl: String, diff --git a/sqs/src/test/java/akka/stream/alpakka/sqs/javadsl/BaseSqsTest.java b/sqs/src/test/java/akka/stream/alpakka/sqs/javadsl/BaseSqsTest.java index 29a3667458..3ccf9c36bf 100644 --- a/sqs/src/test/java/akka/stream/alpakka/sqs/javadsl/BaseSqsTest.java +++ b/sqs/src/test/java/akka/stream/alpakka/sqs/javadsl/BaseSqsTest.java @@ -68,7 +68,7 @@ public void setupBefore() { private SqsAsyncClient createAsyncClient(String sqsEndpoint) { // #init-client // Don't encode credentials in your source code! - // see https://doc.akka.io/docs/alpakka/current/aws-shared-configuration.html + // see https://doc.akka.io/libraries/alpakka/current/aws-shared-configuration.html StaticCredentialsProvider credentialsProvider = StaticCredentialsProvider.create(AwsBasicCredentials.create("x", "x")); SqsAsyncClient sqsClient = @@ -80,7 +80,7 @@ private SqsAsyncClient createAsyncClient(String sqsEndpoint) { .region(Region.EU_CENTRAL_1) .httpClient(AkkaHttpClient.builder().withActorSystem(system).build()) // Possibility to configure the retry policy - // see https://doc.akka.io/docs/alpakka/current/aws-shared-configuration.html + // see https://doc.akka.io/libraries/alpakka/current/aws-shared-configuration.html // .overrideConfiguration(...) .build(); diff --git a/sqs/src/test/scala/akka/stream/alpakka/sqs/scaladsl/DefaultTestContext.scala b/sqs/src/test/scala/akka/stream/alpakka/sqs/scaladsl/DefaultTestContext.scala index 4b8575ba3d..551daef626 100644 --- a/sqs/src/test/scala/akka/stream/alpakka/sqs/scaladsl/DefaultTestContext.scala +++ b/sqs/src/test/scala/akka/stream/alpakka/sqs/scaladsl/DefaultTestContext.scala @@ -78,7 +78,7 @@ trait DefaultTestContext extends Matchers with BeforeAndAfterAll with ScalaFutur //#init-client // Don't encode credentials in your source code! - // see https://doc.akka.io/docs/alpakka/current/aws-shared-configuration.html + // see https://doc.akka.io/libraries/alpakka/current/aws-shared-configuration.html val credentialsProvider = StaticCredentialsProvider.create(AwsBasicCredentials.create("x", "x")) implicit val awsSqsClient = SqsAsyncClient .builder() @@ -89,7 +89,7 @@ trait DefaultTestContext extends Matchers with BeforeAndAfterAll with ScalaFutur .region(Region.EU_CENTRAL_1) .httpClient(AkkaHttpClient.builder().withActorSystem(system).build()) // Possibility to configure the retry policy - // see https://doc.akka.io/docs/alpakka/current/aws-shared-configuration.html + // see https://doc.akka.io/libraries/alpakka/current/aws-shared-configuration.html // .overrideConfiguration(...) .build() diff --git a/testkit/src/main/scala/akka/stream/alpakka/testkit/CapturingAppender.scala b/testkit/src/main/scala/akka/stream/alpakka/testkit/CapturingAppender.scala index 47ab1ce0af..f9a4b388ab 100644 --- a/testkit/src/main/scala/akka/stream/alpakka/testkit/CapturingAppender.scala +++ b/testkit/src/main/scala/akka/stream/alpakka/testkit/CapturingAppender.scala @@ -10,7 +10,7 @@ import ch.qos.logback.core.AppenderBase import org.slf4j.LoggerFactory /** - * See https://doc.akka.io/docs/akka/current/typed/testing-async.html#silence-logging-output-from-tests + * See https://doc.akka.io/libraries/akka-core/current/typed/testing-async.html#silence-logging-output-from-tests * * INTERNAL API */ @@ -39,7 +39,7 @@ import org.slf4j.LoggerFactory } /** - * See https://doc.akka.io/docs/akka/current/typed/testing-async.html#silence-logging-output-from-tests + * See https://doc.akka.io/libraries/akka-core/current/typed/testing-async.html#silence-logging-output-from-tests * * INTERNAL API * diff --git a/testkit/src/main/scala/akka/stream/alpakka/testkit/LogbackUtil.scala b/testkit/src/main/scala/akka/stream/alpakka/testkit/LogbackUtil.scala index dbea6d1258..bb058a9399 100644 --- a/testkit/src/main/scala/akka/stream/alpakka/testkit/LogbackUtil.scala +++ b/testkit/src/main/scala/akka/stream/alpakka/testkit/LogbackUtil.scala @@ -11,7 +11,7 @@ import org.slf4j.LoggerFactory import scala.annotation.tailrec /** - * See https://doc.akka.io/docs/akka/current/typed/testing-async.html#silence-logging-output-from-tests + * See https://doc.akka.io/libraries/akka-core/current/typed/testing-async.html#silence-logging-output-from-tests * * INTERNAL API */ diff --git a/testkit/src/main/scala/akka/stream/alpakka/testkit/javadsl/LogCapturingJunit4.scala b/testkit/src/main/scala/akka/stream/alpakka/testkit/javadsl/LogCapturingJunit4.scala index 08438d747f..296f9460de 100644 --- a/testkit/src/main/scala/akka/stream/alpakka/testkit/javadsl/LogCapturingJunit4.scala +++ b/testkit/src/main/scala/akka/stream/alpakka/testkit/javadsl/LogCapturingJunit4.scala @@ -13,7 +13,7 @@ import org.junit.runners.model.Statement import org.slf4j.LoggerFactory /** - * See https://doc.akka.io/docs/akka/current/typed/testing-async.html#silence-logging-output-from-tests + * See https://doc.akka.io/libraries/akka-core/current/typed/testing-async.html#silence-logging-output-from-tests * * JUnit `TestRule` to make log lines appear only when the test failed. * diff --git a/testkit/src/main/scala/akka/stream/alpakka/testkit/scaladsl/LogCapturing.scala b/testkit/src/main/scala/akka/stream/alpakka/testkit/scaladsl/LogCapturing.scala index 6b1bfdaa6e..6261443233 100644 --- a/testkit/src/main/scala/akka/stream/alpakka/testkit/scaladsl/LogCapturing.scala +++ b/testkit/src/main/scala/akka/stream/alpakka/testkit/scaladsl/LogCapturing.scala @@ -14,7 +14,7 @@ import org.slf4j.LoggerFactory import org.slf4j.MDC /** - * See https://doc.akka.io/docs/akka/current/typed/testing-async.html#silence-logging-output-from-tests + * See https://doc.akka.io/libraries/akka-core/current/typed/testing-async.html#silence-logging-output-from-tests * * Mixin this trait to a ScalaTest test to make log lines appear only when the test failed. *