Skip to content

Commit

Permalink
Plumbing the total received byte from network. This value
Browse files Browse the repository at this point in the history
is retrieved by GenericURLRequestJob::GetTotalReceivedBytes(),
and eventually shown by the Network.loadingFinished event
as encodedDataLength.

The encodedDataLength is important for lighthouse to do
performance audit.

Change-Id: Ib9302355508276c21edbae103debd8d21a8bae51
Reviewed-on: https://chromium-review.googlesource.com/875514
Reviewed-by: Alex Clarke <alexclarke@chromium.org>
Commit-Queue: Jianzhou Feng <jzfeng@chromium.org>
Cr-Commit-Position: refs/heads/master@{#530472}
  • Loading branch information
jianzhoufeng authored and Commit Bot committed Jan 19, 2018
1 parent 4535852 commit b75026f
Show file tree
Hide file tree
Showing 7 changed files with 33 additions and 10 deletions.
8 changes: 7 additions & 1 deletion headless/public/util/generic_url_request_job.cc
Original file line number Diff line number Diff line change
Expand Up @@ -136,12 +136,14 @@ void GenericURLRequestJob::OnFetchComplete(
scoped_refptr<net::HttpResponseHeaders> response_headers,
const char* body,
size_t body_size,
const net::LoadTimingInfo& load_timing_info) {
const net::LoadTimingInfo& load_timing_info,
size_t total_received_bytes) {
DCHECK(origin_task_runner_->RunsTasksInCurrentSequence());
response_headers_ = response_headers;
body_ = body;
body_size_ = body_size;
load_timing_info_ = load_timing_info;
total_received_bytes_ = total_received_bytes;

// Save any cookies from the response.
if (!(request_->load_flags() & net::LOAD_DO_NOT_SAVE_COOKIES) &&
Expand Down Expand Up @@ -216,6 +218,10 @@ void GenericURLRequestJob::GetLoadTimingInfo(
*load_timing_info = load_timing_info_;
}

int64_t GenericURLRequestJob::GetTotalReceivedBytes() const {
return total_received_bytes_;
}

uint64_t GenericURLRequestJob::GenericURLRequestJob::GetRequestId() const {
return request_->identifier() +
(static_cast<uint64_t>(request_->url_chain().size()) << 32);
Expand Down
5 changes: 4 additions & 1 deletion headless/public/util/generic_url_request_job.h
Original file line number Diff line number Diff line change
Expand Up @@ -143,14 +143,16 @@ class HEADLESS_EXPORT GenericURLRequestJob
bool GetMimeType(std::string* mime_type) const override;
bool GetCharset(std::string* charset) override;
void GetLoadTimingInfo(net::LoadTimingInfo* load_timing_info) const override;
int64_t GetTotalReceivedBytes() const override;

// URLFetcher::ResultListener implementation:
void OnFetchStartError(net::Error error) override;
void OnFetchComplete(const GURL& final_url,
scoped_refptr<net::HttpResponseHeaders> response_headers,
const char* body,
size_t body_size,
const net::LoadTimingInfo& load_timing_info) override;
const net::LoadTimingInfo& load_timing_info,
size_t total_received_bytes) override;

protected:
// Request implementation:
Expand Down Expand Up @@ -187,6 +189,7 @@ class HEADLESS_EXPORT GenericURLRequestJob
size_t body_size_ = 0;
size_t read_offset_ = 0;
net::LoadTimingInfo load_timing_info_;
size_t total_received_bytes_ = 0;

base::WeakPtrFactory<GenericURLRequestJob> weak_factory_;

Expand Down
12 changes: 10 additions & 2 deletions headless/public/util/generic_url_request_job_test.cc
Original file line number Diff line number Diff line change
Expand Up @@ -124,9 +124,15 @@ class MockFetcher : public URLFetcher {
const base::Value* response_data_value = reply_dictionary->FindKey("data");
ASSERT_THAT(response_data_value, NotNull());
response_data_ = response_data_value->GetString();
const base::Value* total_received_bytes_value =
reply_dictionary->FindKey("total_received_bytes");
int total_received_bytes = 0;
if (total_received_bytes_value)
total_received_bytes = total_received_bytes_value->GetInt();
result_listener->OnFetchComplete(
GURL(final_url_value->GetString()), std::move(response_headers),
response_data_.c_str(), response_data_.size(), load_timing_info);
response_data_.c_str(), response_data_.size(), load_timing_info,
total_received_bytes);
}

private:
Expand Down Expand Up @@ -391,7 +397,8 @@ TEST_F(GenericURLRequestJobTest, BasicRequestContents) {
"data": "Reply",
"headers": {
"Content-Type": "text/html; charset=UTF-8"
}
},
"total_received_bytes": 100
})";

std::unique_ptr<net::URLRequest> request(
Expand All @@ -403,6 +410,7 @@ TEST_F(GenericURLRequestJobTest, BasicRequestContents) {
EXPECT_TRUE(request->Read(buffer.get(), kBufferSize, &bytes_read));
EXPECT_EQ(5, bytes_read);
EXPECT_EQ("Reply", std::string(buffer->data(), 5));
EXPECT_EQ(100, request->GetTotalReceivedBytes());

net::LoadTimingInfo load_timing_info;
request->GetLoadTimingInfo(&load_timing_info);
Expand Down
4 changes: 3 additions & 1 deletion headless/public/util/http_url_fetcher.cc
Original file line number Diff line number Diff line change
Expand Up @@ -221,9 +221,11 @@ void HttpURLFetcher::Delegate::OnResponseCompleted(net::URLRequest* request,
// TODO(alexclarke) apart from the headers there's a lot of stuff in
// |request->response_info()| that we drop here. Find a way to pipe it
// through.
// TODO(jzfeng) fill in the real total received bytes from network.
result_listener_->OnFetchComplete(
request->url(), request->response_info().headers,
bytes_read_so_far_.c_str(), bytes_read_so_far_.size(), load_timing_info);
bytes_read_so_far_.c_str(), bytes_read_so_far_.size(), load_timing_info,
0);
}

HttpURLFetcher::HttpURLFetcher(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -54,7 +54,8 @@ class TestInMemoryProtocolHandler::MockURLFetcher : public URLFetcher {
net::LoadTimingInfo load_timing_info;
load_timing_info.receive_headers_end = base::TimeTicks::Now();
result_listener->OnFetchCompleteExtractHeaders(
url, response->data.c_str(), response->data.size(), load_timing_info);
url, response->data.c_str(), response->data.size(), load_timing_info,
0);
} else {
result_listener->OnFetchStartError(net::ERR_FILE_NOT_FOUND);
}
Expand Down
5 changes: 3 additions & 2 deletions headless/public/util/url_fetcher.cc
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,8 @@ void URLFetcher::ResultListener::OnFetchCompleteExtractHeaders(
const GURL& final_url,
const char* response_data,
size_t response_data_size,
const net::LoadTimingInfo& load_timing_info) {
const net::LoadTimingInfo& load_timing_info,
size_t total_received_bytes) {
size_t read_offset = 0;
int header_size =
net::HttpUtil::LocateEndOfHeaders(response_data, response_data_size);
Expand All @@ -34,7 +35,7 @@ void URLFetcher::ResultListener::OnFetchCompleteExtractHeaders(
CHECK_LE(read_offset, response_data_size);
OnFetchComplete(final_url, std::move(response_headers),
response_data + read_offset, response_data_size - read_offset,
load_timing_info);
load_timing_info, total_received_bytes);
}

} // namespace headless
6 changes: 4 additions & 2 deletions headless/public/util/url_fetcher.h
Original file line number Diff line number Diff line change
Expand Up @@ -45,15 +45,17 @@ class HEADLESS_EXPORT URLFetcher {
scoped_refptr<net::HttpResponseHeaders> response_headers,
const char* body,
size_t body_size,
const net::LoadTimingInfo& load_timing_info) = 0;
const net::LoadTimingInfo& load_timing_info,
size_t total_received_bytes) = 0;

// Helper function which extracts the headers from |response_data| and calls
// OnFetchComplete.
void OnFetchCompleteExtractHeaders(
const GURL& final_url,
const char* response_data,
size_t response_data_size,
const net::LoadTimingInfo& load_timing_info);
const net::LoadTimingInfo& load_timing_info,
size_t total_received_bytes);

protected:
virtual ~ResultListener() {}
Expand Down

0 comments on commit b75026f

Please sign in to comment.