Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
40 changes: 37 additions & 3 deletions ext/standard/filters.c
Original file line number Diff line number Diff line change
Expand Up @@ -1781,7 +1781,8 @@ static const php_stream_filter_factory consumed_filter_factory = {
typedef enum _php_chunked_filter_state {
CHUNK_SIZE_START,
CHUNK_SIZE,
CHUNK_SIZE_EXT,
CHUNK_MAYBE_EXT,
CHUNK_VALID_EXT,
CHUNK_SIZE_CR,
CHUNK_SIZE_LF,
CHUNK_BODY,
Expand Down Expand Up @@ -1820,7 +1821,7 @@ static size_t php_dechunk(char *buf, size_t len, php_chunked_filter_data *data)
data->state = CHUNK_ERROR;
break;
} else {
data->state = CHUNK_SIZE_EXT;
data->state = CHUNK_MAYBE_EXT;
break;
}
data->state = CHUNK_SIZE;
Expand All @@ -1831,7 +1832,34 @@ static size_t php_dechunk(char *buf, size_t len, php_chunked_filter_data *data)
} else if (p == end) {
return out_len;
}
case CHUNK_SIZE_EXT:
// intentional fallthrough: state is CHUNK_MAYBE_EXT if we get here
ZEND_FALLTHROUGH;
case CHUNK_MAYBE_EXT:
// end of size, but chunk-ext may follow
if (*p == '\r' || *p == '\n') {
data->state = CHUNK_SIZE_CR;
continue;
} else {
// we are not at the end of the line, so we expect a valid chunk-ext
// skip whitespace
while (p < end && (*p == ' ' || *p == '\t')) {
p++;
}
if (p == end) {
return out_len;
}

// semicolon indicates start of chunk-ext
if (*p == ';') {
data->state = CHUNK_VALID_EXT;
} else {
data->state = CHUNK_ERROR;
continue;
}
}
// intentional fallthrough: state is CHUNK_VALID_EXT if we get here
ZEND_FALLTHROUGH;
case CHUNK_VALID_EXT:
/* skip extension */
while (p < end && *p != '\r' && *p != '\n') {
p++;
Expand Down Expand Up @@ -1915,6 +1943,12 @@ static size_t php_dechunk(char *buf, size_t len, php_chunked_filter_data *data)
p = end;
continue;
case CHUNK_ERROR:
// Unable to parse, which means that this did not look like chunked encoding.
// If we haven't output anything yet, return the buffer unchanged
if (out_len == 0) {
return len;
}
// Otherwise, append the remaining buffer and return
if (p != out) {
memmove(out, p, end - p);
}
Expand Down
25 changes: 25 additions & 0 deletions ext/standard/tests/filters/chunked_003.phpt
Original file line number Diff line number Diff line change
@@ -0,0 +1,25 @@
--TEST--
Chunked encoding (error handling)
--SKIPIF--
<?php
$filters = stream_get_filters();
if(! in_array( "dechunk", $filters )) die( "skip Chunked filter not available." );
?>
--INI--
allow_url_fopen=1
--FILE--
<?php
$streams = array(
"data://text/plain,apple",
"data://text/plain,mango",
);
foreach ($streams as $name) {
$fp = fopen($name, "r");
stream_filter_append($fp, "dechunk", STREAM_FILTER_READ);
var_dump(stream_get_contents($fp));
fclose($fp);
}
?>
--EXPECT--
string(5) "apple"
string(5) "mango"
94 changes: 94 additions & 0 deletions ext/standard/tests/filters/chunked_004.phpt
Original file line number Diff line number Diff line change
@@ -0,0 +1,94 @@
--TEST--
Chunked encoding (multiple buckets)
--SKIPIF--
<?php
$filters = stream_get_filters();
if(! in_array( "dechunk", $filters )) die( "skip Chunked filter not available." );
?>
--INI--
allow_url_fopen=1
--FILE--
<?php

// Filter that splits each bucket in the input into two, with $chunkSize bytes in the first bucket.
final class Splitter extends php_user_filter {
public static int $chunkSize;
function filter($in, $out, &$consumed, $closing): int {
while ($bucket = stream_bucket_make_writeable($in)) {
$head = substr($bucket->data, 0, self::$chunkSize);
$head_bucket = stream_bucket_new($this->stream, $head);
stream_bucket_append($out, $head_bucket);

$tail = substr($bucket->data, self::$chunkSize);
$tail_bucket = stream_bucket_new($this->stream, $tail);
stream_bucket_append($out, $tail_bucket);

$consumed += strlen($bucket->data);
return PSFS_PASS_ON;
}
return PSFS_FEED_ME;
}
}
stream_filter_register("Splitter", "Splitter");

$testdata = "2;key=value\r\nte\r\n2\r\nst\r\n0\r\nTrailer: section\r\n\r\n";
Copy link
Copy Markdown
Contributor

@LamentXU123 LamentXU123 May 13, 2026

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I think this is still missing a case where the chunk-size and the invalid chunk-ext start are split across buckets; without that, this change may still drop already-consumed prefix bytes on error fallback. And I actually doubt that.

e.g. $testdata = "2x\r\nte\r\n0\r\n\r\n";


// Split test data on every possible position
for ($i = 1; $i < strlen($testdata); $i++) {
Splitter::$chunkSize = $i;

$fp = fopen("data:text/plain,$testdata", "r");
stream_filter_append($fp, "Splitter", STREAM_FILTER_READ);
stream_filter_append($fp, "dechunk", STREAM_FILTER_READ);

var_dump(stream_get_contents($fp));
fclose($fp);
}
?>
--EXPECT--
string(4) "test"
string(4) "test"
string(4) "test"
string(4) "test"
string(4) "test"
string(4) "test"
string(4) "test"
string(4) "test"
string(4) "test"
string(4) "test"
string(4) "test"
string(4) "test"
string(4) "test"
string(4) "test"
string(4) "test"
string(4) "test"
string(4) "test"
string(4) "test"
string(4) "test"
string(4) "test"
string(4) "test"
string(4) "test"
string(4) "test"
string(4) "test"
string(4) "test"
string(4) "test"
string(4) "test"
string(4) "test"
string(4) "test"
string(4) "test"
string(4) "test"
string(4) "test"
string(4) "test"
string(4) "test"
string(4) "test"
string(4) "test"
string(4) "test"
string(4) "test"
string(4) "test"
string(4) "test"
string(4) "test"
string(4) "test"
string(4) "test"
string(4) "test"
string(4) "test"
string(4) "test"
Loading