Skip to content

Commit

Permalink
=htc #18397 fix ArrayIndexOutOfBoundsException in HttpHeaderParser
Browse files Browse the repository at this point in the history
 This error could occur if the parser sees relatively few, relatively long headers on one connection.
  • Loading branch information
sirthias committed Oct 2, 2015
1 parent a95e98b commit 1498a7e
Show file tree
Hide file tree
Showing 2 changed files with 28 additions and 10 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -103,9 +103,12 @@ private[engine] final class HttpHeaderParser private (
try {
val valueIx = newValueIndex // compute early in order to trigger OutOfTrieSpaceExceptions before any change
unshareIfRequired()
values(rootValueIx) = ValueBranch(rootValueIx, valueParser, branchRootNodeIx = nodeCount, valueCount = 1)
val nodeIx = nodeCount
insertRemainingCharsAsNewNodes(input, header)(cursor, endIx, valueIx)
} catch { case OutOfTrieSpaceException /* if we cannot insert then we simply don't */ }
values(rootValueIx) = ValueBranch(rootValueIx, valueParser, branchRootNodeIx = nodeIx, valueCount = 1)
} catch {
case OutOfTrieSpaceException // if we cannot insert a value then we simply don't
}
resultHeader = header
endIx
}
Expand Down Expand Up @@ -208,10 +211,11 @@ private[engine] final class HttpHeaderParser private (
val valueIx = newValueIndex // compute early in order to trigger OutOfTrieSpaceExceptions before any change
val rowIx = newBranchDataRowIndex
unshareIfRequired()
val newNodeIx = nodeCount.toShort
insertRemainingCharsAsNewNodes(input, value)(cursor, endIx, valueIx, colonIx)
nodes(nodeIx) = nodeBits(rowIx, nodeChar)
branchData(rowIx + 1) = (nodeIx + 1).toShort
branchData(rowIx + 1 + signum) = nodeCount.toShort
insertRemainingCharsAsNewNodes(input, value)(cursor, endIx, valueIx, colonIx)
branchData(rowIx + 1 + signum) = newNodeIx
case msb
if (nodeChar == 0) { // leaf node
require(cursor == endIx, "Cannot insert key of which a prefix already has a value")
Expand All @@ -222,8 +226,9 @@ private[engine] final class HttpHeaderParser private (
case 0 // branch doesn't exist yet, create
val valueIx = newValueIndex // compute early in order to trigger OutOfTrieSpaceExceptions before any change
unshareIfRequired()
branchData(branchIndex) = nodeCount.toShort // make the previously implicit "equals" sub node explicit
val newNodeIx = nodeCount.toShort
insertRemainingCharsAsNewNodes(input, value)(cursor, endIx, valueIx, colonIx)
branchData(branchIndex) = newNodeIx // make the previously implicit "equals" sub node explicit
case subNodeIx // descend, but advance only on match
insert(input, value)(cursor + 1 - math.abs(signum), endIx, subNodeIx, colonIx)
}
Expand Down Expand Up @@ -260,9 +265,11 @@ private[engine] final class HttpHeaderParser private (

private def newNodeIndex: Int = {
val index = nodeCount
if (index == nodes.length) nodes = copyOf(nodes, index * 3 / 2)
nodeCount = index + 1
index
if (index < Short.MaxValue) {
if (index == nodes.length) nodes = copyOf(nodes, math.min(index * 3 / 2, Short.MaxValue))
nodeCount = index + 1
index
} else throw OutOfTrieSpaceException
}

private def newBranchDataRowIndex: Int = {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -168,7 +168,7 @@ class HttpHeaderParserSpec extends WordSpec with Matchers with BeforeAndAfterAll
"HTTP header value exceeds the configured limit of 1000 characters"
}

"continue parsing raw headers even if the overall cache capacity is reached" in new TestSetup() {
"continue parsing raw headers even if the overall cache value capacity is reached" in new TestSetup() {
val randomHeaders = Stream.continually {
val name = nextRandomString(nextRandomAlphaNumChar, nextRandomInt(4, 16))
val value = nextRandomString(nextRandomPrintableChar, nextRandomInt(4, 16))
Expand All @@ -179,7 +179,7 @@ class HttpHeaderParserSpec extends WordSpec with Matchers with BeforeAndAfterAll
} should be < 300 // number of cache hits is smaller headers successfully parsed
}

"continue parsing modelled headers even if the overall cache capacity is reached" in new TestSetup() {
"continue parsing modelled headers even if the overall cache value capacity is reached" in new TestSetup() {
val randomHostHeaders = Stream.continually {
Host(
host = nextRandomString(nextRandomAlphaNumChar, nextRandomInt(4, 8)),
Expand All @@ -190,6 +190,17 @@ class HttpHeaderParserSpec extends WordSpec with Matchers with BeforeAndAfterAll
} should be < 300 // number of cache hits is smaller headers successfully parsed
}

"continue parsing headers even if the overall cache node capacity is reached" in new TestSetup() {
val randomHostHeaders = Stream.continually {
RawHeader(
name = nextRandomString(nextRandomAlphaNumChar, 60),
value = nextRandomString(nextRandomAlphaNumChar, 1000))
}
randomHostHeaders.take(100).foldLeft(0) {
case (acc, header) acc + parseAndCache(header.toString + "\r\nx", header)
} should be < 300 // number of cache hits is smaller headers successfully parsed
}

"continue parsing raw headers even if the header-specific cache capacity is reached" in new TestSetup() {
val randomHeaders = Stream.continually {
val value = nextRandomString(nextRandomPrintableChar, nextRandomInt(4, 16))
Expand Down

0 comments on commit 1498a7e

Please sign in to comment.