diff --git a/build.sbt b/build.sbt index 44dd60fd19..aeb656c278 100644 --- a/build.sbt +++ b/build.sbt @@ -22,6 +22,7 @@ import com.lightbend.paradox.apidoc.ApidocPlugin.autoImport.apidocRootPackage sourceDistName := "apache-pekko-http" sourceDistIncubating := false +ThisBuild / resolvers += Resolver.ApacheMavenSnapshotsRepo ThisBuild / reproducibleBuildsCheckResolver := Resolver.ApacheMavenStagingRepo addCommandAlias("verifyCodeStyle", "scalafmtCheckAll; scalafmtSbtCheck; +headerCheckAll; javafmtCheckAll") diff --git a/http-core/src/main/mima-filters/2.0.x.backwards.excludes/uptake-pekko-core-2.0.0.excludes b/http-core/src/main/mima-filters/2.0.x.backwards.excludes/uptake-pekko-core-2.0.0.excludes new file mode 100644 index 0000000000..ca65208f1b --- /dev/null +++ b/http-core/src/main/mima-filters/2.0.x.backwards.excludes/uptake-pekko-core-2.0.0.excludes @@ -0,0 +1,21 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +# changes needed to uptake Pekko Core 2.0.0 +ProblemFilters.exclude[DirectMissingMethodProblem]("org.apache.pekko.http.impl.engine.client.PoolInterface#Logic.onDownstreamFinish") +ProblemFilters.exclude[DirectMissingMethodProblem]("org.apache.pekko.http.impl.engine.http2.Http2StreamHandling#IncomingStreamBuffer.onDownstreamFinish") +ProblemFilters.exclude[DirectMissingMethodProblem]("org.apache.pekko.http.impl.engine.http2.hpack.HandleOrPassOnStage#State.onDownstreamFinish") diff --git a/http-core/src/main/scala/org/apache/pekko/http/impl/engine/parsing/HttpHeaderParser.scala b/http-core/src/main/scala/org/apache/pekko/http/impl/engine/parsing/HttpHeaderParser.scala index efcefe5e5b..99b04c951f 100644 --- a/http-core/src/main/scala/org/apache/pekko/http/impl/engine/parsing/HttpHeaderParser.scala +++ b/http-core/src/main/scala/org/apache/pekko/http/impl/engine/parsing/HttpHeaderParser.scala @@ -165,37 +165,77 @@ private[engine] final class HttpHeaderParser private ( } private def parseRawHeader(input: ByteString, lineStart: Int, cursor: Int, nodeIx: Int): Int = { - val colonIx = scanHeaderNameAndReturnIndexOfColon(input, lineStart, lineStart + 1 + maxHeaderNameLength)(cursor) - val headerName = asciiString(input, lineStart, colonIx) - try { - val valueParser = new RawHeaderValueParser(headerName, maxHeaderValueLength, - headerValueCacheLimit(headerName), log, illegalResponseHeaderValueProcessingMode) - insert(input, valueParser)(cursor, colonIx + 1, nodeIx, colonIx) - parseHeaderLine(input, lineStart)(cursor, nodeIx) - } catch { - case OutOfTrieSpaceException => // if we cannot insert we drop back to simply creating new header instances - val (headerValue, endIx) = scanHeaderValue(this, input, colonIx + 1, colonIx + maxHeaderValueLength + 3, - log, settings.illegalResponseHeaderValueProcessingMode)() - resultHeader = RawHeader(headerName, headerValue.trim) - endIx + val colonIx = input.indexOf(':', cursor, lineStart + 1 + maxHeaderNameLength) + if (colonIx == -1) { + scanIllegalHeaderNameCharacters(input, cursor, lineStart + 1 + maxHeaderNameLength) + fail(s"HTTP header name exceeds the configured limit of $maxHeaderNameLength characters", + StatusCodes.RequestHeaderFieldsTooLarge) + } else { + val headerName = scanAsciiString(input, lineStart, colonIx) + try { + val valueParser = new RawHeaderValueParser(headerName, maxHeaderValueLength, + headerValueCacheLimit(headerName), log, illegalResponseHeaderValueProcessingMode) + insert(input, valueParser)(cursor, colonIx + 1, nodeIx, colonIx) + parseHeaderLine(input, lineStart)(cursor, nodeIx) + } catch { + case OutOfTrieSpaceException => // if we cannot insert we drop back to simply creating new header instances + val (headerValue, endIx) = scanHeaderValue(this, input, colonIx + 1, colonIx + maxHeaderValueLength + 3, + log, settings.illegalResponseHeaderValueProcessingMode)() + resultHeader = RawHeader(headerName, headerValue.trim) + endIx + } } } - @tailrec private def scanHeaderNameAndReturnIndexOfColon(input: ByteString, start: Int, limit: Int)(ix: Int): Int = - if (ix < limit) - (byteChar(input, ix), settings.illegalResponseHeaderNameProcessingMode) match { - case (':', _) => ix - case (c, _) if tchar(c) => scanHeaderNameAndReturnIndexOfColon(input, start, limit)(ix + 1) - case (c, IllegalResponseHeaderNameProcessingMode.Error) => - fail(s"Illegal character '${escape(c)}' in header name") - case (c, IllegalResponseHeaderNameProcessingMode.Warn) => - log.warning(s"Header key contains illegal character '${escape(c)}'") - scanHeaderNameAndReturnIndexOfColon(input, start, limit)(ix + 1) - case (c, IllegalResponseHeaderNameProcessingMode.Ignore) => - scanHeaderNameAndReturnIndexOfColon(input, start, limit)(ix + 1) + // similar to asciiString function but it checks for illegal characters + private def scanAsciiString(input: ByteString, start: Int, end: Int): String = { + @tailrec def build(ix: Int = start, sb: JStringBuilder = new JStringBuilder(end - start)): String = + if (ix == end) { + sb.toString + } else { + val c = byteChar(input, ix) + if (tchar(c)) { + build(ix + 1, sb.append(c)) + } else { + settings.illegalResponseHeaderNameProcessingMode match { + case IllegalResponseHeaderNameProcessingMode.Error => + fail(s"Illegal character '${escape(c)}' in header name") + case IllegalResponseHeaderNameProcessingMode.Warn => + log.warning(s"Header key contains illegal character '${escape(c)}'") + build(ix + 1, sb.append(c)) + case IllegalResponseHeaderNameProcessingMode.Ignore => + build(ix + 1, sb.append(c)) + } + } } - else fail(s"HTTP header name exceeds the configured limit of ${limit - start - 1} characters", - StatusCodes.RequestHeaderFieldsTooLarge) + if (start == end) "" else build() + } + + // similar to scanAsciiString but only scans for illegal characters and fails or warns if it finds one + private def scanIllegalHeaderNameCharacters(input: ByteString, start: Int, end: Int): Unit = { + @tailrec def check(ix: Int = start): Unit = + if (ix == end) { + () + } else { + val c = byteChar(input, ix) + if (tchar(c)) { + check(ix + 1) + } else { + settings.illegalResponseHeaderNameProcessingMode match { + case IllegalResponseHeaderNameProcessingMode.Error => + fail(s"Illegal character '${escape(c)}' in header name") + case IllegalResponseHeaderNameProcessingMode.Warn => + log.warning(s"Header key contains illegal character '${escape(c)}'") + check(ix + 1) + case IllegalResponseHeaderNameProcessingMode.Ignore => + () + } + } + } + if (start == end || settings.illegalResponseHeaderNameProcessingMode == IllegalResponseHeaderNameProcessingMode.Ignore) + () + else check() + } @tailrec private def parseHeaderValue(input: ByteString, valueStart: Int, branch: ValueBranch)(cursor: Int = valueStart, diff --git a/http-core/src/main/scala/org/apache/pekko/http/impl/engine/server/HttpServerBluePrint.scala b/http-core/src/main/scala/org/apache/pekko/http/impl/engine/server/HttpServerBluePrint.scala index c9603f25ac..c0f7f68918 100644 --- a/http-core/src/main/scala/org/apache/pekko/http/impl/engine/server/HttpServerBluePrint.scala +++ b/http-core/src/main/scala/org/apache/pekko/http/impl/engine/server/HttpServerBluePrint.scala @@ -721,12 +721,11 @@ private[http] object HttpServerBluePrint { }) private var activeTimers = 0 - private val timeout: FiniteDuration = { + private val timeout: FiniteDuration = inheritedAttributes.get[ActorAttributes.StreamSubscriptionTimeout] match { case Some(attr) => attr.timeout case None => 5.minutes // should not happen } - } private def addTimeout(s: SubscriptionTimeout): Unit = { if (activeTimers == 0) setKeepGoing(true) activeTimers += 1 diff --git a/http-core/src/main/scala/org/apache/pekko/http/impl/util/StageLoggingWithOverride.scala b/http-core/src/main/scala/org/apache/pekko/http/impl/util/StageLoggingWithOverride.scala index c7ace934a2..dd6592eb00 100644 --- a/http-core/src/main/scala/org/apache/pekko/http/impl/util/StageLoggingWithOverride.scala +++ b/http-core/src/main/scala/org/apache/pekko/http/impl/util/StageLoggingWithOverride.scala @@ -19,8 +19,7 @@ package org.apache.pekko.http.impl.util import org.apache.pekko import pekko.annotation.InternalApi import pekko.stream.stage.GraphStageLogic -import pekko.event.{ LogSource, LoggingAdapter, NoLogging } -import pekko.stream.ActorMaterializer +import pekko.event.{ LogSource, LoggingAdapter } // TODO Try to reconcile with what Pekko provides in StageLogging. // We thought this could be removed when https://github.com/akka/akka/issues/18793 had been implemented @@ -43,10 +42,7 @@ private[pekko] trait StageLoggingWithOverride extends GraphStageLogic { _log = logOverride match { case DefaultNoLogging => - materializer match { - case a: ActorMaterializer => pekko.event.Logging(a.system, logSource)(LogSource.fromClass) - case _ => NoLogging - } + pekko.event.Logging(materializer.system, logSource)(LogSource.fromClass) case x => x } case _ => diff --git a/http/src/main/java/org/apache/pekko/http/javadsl/server/ExceptionHandlerBuilder.java b/http/src/main/java/org/apache/pekko/http/javadsl/server/ExceptionHandlerBuilder.java index ab0e7a5aa7..7ec3d606bd 100644 --- a/http/src/main/java/org/apache/pekko/http/javadsl/server/ExceptionHandlerBuilder.java +++ b/http/src/main/java/org/apache/pekko/http/javadsl/server/ExceptionHandlerBuilder.java @@ -13,7 +13,8 @@ package org.apache.pekko.http.javadsl.server; -import org.apache.pekko.japi.pf.FI; +import org.apache.pekko.japi.function.Function; +import org.apache.pekko.japi.function.Predicate; import org.apache.pekko.japi.pf.PFBuilder; public class ExceptionHandlerBuilder { @@ -35,7 +36,7 @@ private ExceptionHandlerBuilder(PFBuilder delegate) { * @return a builder with the case statement added */ public

ExceptionHandlerBuilder match( - final Class

type, FI.Apply apply) { + final Class

type, final Function apply) { delegate.match(type, apply); return this; } @@ -50,7 +51,7 @@ public

ExceptionHandlerBuilder match( * @return a builder with the case statement added */ public

ExceptionHandlerBuilder match( - final Class

type, final FI.TypedPredicate

predicate, final FI.Apply apply) { + final Class

type, final Predicate

predicate, final Function apply) { delegate.match(type, predicate, apply); return this; } @@ -63,7 +64,7 @@ public

ExceptionHandlerBuilder match( * @return a builder with the case statement added */ public

ExceptionHandlerBuilder matchEquals( - final P object, final FI.Apply apply) { + final P object, final Function apply) { delegate.matchEquals(object, apply); return this; } @@ -74,7 +75,7 @@ public

ExceptionHandlerBuilder matchEquals( * @param apply an action to apply to the argument * @return a builder with the case statement added */ - public ExceptionHandlerBuilder matchAny(final FI.Apply apply) { + public ExceptionHandlerBuilder matchAny(final Function apply) { delegate.matchAny(apply); return this; } diff --git a/http/src/main/mima-filters/2.0.x.backwards.excludes/refactor-javadsl-exception-handler.excludes b/http/src/main/mima-filters/2.0.x.backwards.excludes/refactor-javadsl-exception-handler.excludes new file mode 100644 index 0000000000..e9c10df5bd --- /dev/null +++ b/http/src/main/mima-filters/2.0.x.backwards.excludes/refactor-javadsl-exception-handler.excludes @@ -0,0 +1,21 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +# refactor Java DSL ExceptionHandler (due to function changes in Pekko Core 2.0.0) +ProblemFilters.exclude[IncompatibleMethTypeProblem]("org.apache.pekko.http.javadsl.server.ExceptionHandlerBuilder.match") +ProblemFilters.exclude[IncompatibleMethTypeProblem]("org.apache.pekko.http.javadsl.server.ExceptionHandlerBuilder.matchEquals") +ProblemFilters.exclude[IncompatibleMethTypeProblem]("org.apache.pekko.http.javadsl.server.ExceptionHandlerBuilder.matchAny") diff --git a/http/src/main/resources/reference.conf b/http/src/main/resources/reference.conf index e3f50b7d9a..43ce790f69 100644 --- a/http/src/main/resources/reference.conf +++ b/http/src/main/resources/reference.conf @@ -7,6 +7,9 @@ # This is the reference config file that contains all the default settings. # Make your edits/overrides in your application.conf. +# Disable version checks (should not be merged to main branch) +pekko.fail-mixed-versions=off + pekko.http { routing { # Enables/disables the returning of more detailed error messages to the diff --git a/project/PekkoCoreDependency.scala b/project/PekkoCoreDependency.scala index 992921fe7c..a456021e2e 100644 --- a/project/PekkoCoreDependency.scala +++ b/project/PekkoCoreDependency.scala @@ -20,5 +20,5 @@ import com.github.pjfanning.pekkobuild.PekkoDependency object PekkoCoreDependency extends PekkoDependency { override val checkProject: String = "pekko-cluster-sharding-typed" override val module: Option[String] = None - override val currentVersion: String = "1.1.5" + override val currentVersion: String = "2.0.0-M0+194-a3a8cf32-SNAPSHOT" }