Skip to content

Commit

Permalink
Add experiment to measure time to hash extension content as we read it
Browse files Browse the repository at this point in the history
This required adding 2 new methods to URLRequestFileJob to let
subclasses get notified when seeks and reads complete, so that our
URLRequestExtensionJob class is able to compute the hash of the content
read and know what bytes from the file those correspond to (e.g. if there
was a Range specified and we didn't read all the bytes).

BUG=360909

Review URL: https://codereview.chromium.org/227943003

git-svn-id: svn://svn.chromium.org/chrome/trunk/src@263571 0039d316-1c4b-4281-b951-d872f2087c98
  • Loading branch information
asargent@chromium.org committed Apr 14, 2014
1 parent 59cf671 commit 06dd723
Show file tree
Hide file tree
Showing 5 changed files with 350 additions and 21 deletions.
85 changes: 72 additions & 13 deletions extensions/browser/extension_protocols.cc
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,8 @@
#include "extensions/browser/extension_protocols.h"

#include <algorithm>
#include <string>
#include <vector>

#include "base/base64.h"
#include "base/compiler_specific.h"
Expand All @@ -14,9 +16,12 @@
#include "base/logging.h"
#include "base/memory/weak_ptr.h"
#include "base/message_loop/message_loop.h"
#include "base/metrics/field_trial.h"
#include "base/metrics/histogram.h"
#include "base/path_service.h"
#include "base/sha1.h"
#include "base/strings/string_number_conversions.h"
#include "base/strings/string_util.h"
#include "base/strings/stringprintf.h"
#include "base/strings/utf_string_conversions.h"
#include "base/threading/sequenced_worker_pool.h"
Expand All @@ -25,6 +30,8 @@
#include "build/build_config.h"
#include "content/public/browser/browser_thread.h"
#include "content/public/browser/resource_request_info.h"
#include "crypto/secure_hash.h"
#include "crypto/sha2.h"
#include "extensions/browser/extensions_browser_client.h"
#include "extensions/browser/info_map.h"
#include "extensions/common/constants.h"
Expand All @@ -37,6 +44,7 @@
#include "extensions/common/manifest_handlers/incognito_info.h"
#include "extensions/common/manifest_handlers/shared_module_info.h"
#include "extensions/common/manifest_handlers/web_accessible_resources_info.h"
#include "net/base/io_buffer.h"
#include "net/base/net_errors.h"
#include "net/http/http_request_headers.h"
#include "net/http/http_response_headers.h"
Expand Down Expand Up @@ -157,20 +165,31 @@ class URLRequestExtensionJob : public net::URLRequestFileJob {
const std::string& content_security_policy,
bool send_cors_header,
bool follow_symlinks_anywhere)
: net::URLRequestFileJob(
request, network_delegate, base::FilePath(),
BrowserThread::GetBlockingPool()->GetTaskRunnerWithShutdownBehavior(
base::SequencedWorkerPool::SKIP_ON_SHUTDOWN)),
directory_path_(directory_path),
// TODO(tc): Move all of these files into resources.pak so we don't break
// when updating on Linux.
resource_(extension_id, directory_path, relative_path),
content_security_policy_(content_security_policy),
send_cors_header_(send_cors_header),
weak_factory_(this) {
: net::URLRequestFileJob(
request,
network_delegate,
base::FilePath(),
BrowserThread::GetBlockingPool()->GetTaskRunnerWithShutdownBehavior(
base::SequencedWorkerPool::SKIP_ON_SHUTDOWN)),
seek_position_(0),
bytes_read_(0),
directory_path_(directory_path),
// TODO(tc): Move all of these files into resources.pak so we don't
// break when updating on Linux.
resource_(extension_id, directory_path, relative_path),
content_security_policy_(content_security_policy),
send_cors_header_(send_cors_header),
weak_factory_(this) {
if (follow_symlinks_anywhere) {
resource_.set_follow_symlinks_anywhere();
}
const std::string& group =
base::FieldTrialList::FindFullName("ExtensionContentHashMeasurement");
if (group == "Yes") {
base::ElapsedTimer timer;
hash_.reset(crypto::SecureHash::Create(crypto::SecureHash::SHA256));
hashing_time_ = timer.Elapsed();
}
}

virtual void GetResponseInfo(net::HttpResponseInfo* info) OVERRIDE {
Expand All @@ -182,7 +201,8 @@ class URLRequestExtensionJob : public net::URLRequestFileJob {
base::Time* last_modified_time = new base::Time();
bool posted = BrowserThread::PostBlockingPoolTaskAndReply(
FROM_HERE,
base::Bind(&ReadResourceFilePathAndLastModifiedTime, resource_,
base::Bind(&ReadResourceFilePathAndLastModifiedTime,
resource_,
directory_path_,
base::Unretained(read_file_path),
base::Unretained(last_modified_time)),
Expand All @@ -193,8 +213,35 @@ class URLRequestExtensionJob : public net::URLRequestFileJob {
DCHECK(posted);
}

virtual void OnSeekComplete(int64 result) OVERRIDE {
DCHECK_EQ(seek_position_, 0);
seek_position_ = result;
}

virtual void OnReadComplete(net::IOBuffer* buffer, int result) OVERRIDE {
UMA_HISTOGRAM_COUNTS("ExtensionUrlRequest.OnReadCompleteResult", result);
if (result > 0) {
bytes_read_ += result;
if (hash_.get()) {
base::ElapsedTimer timer;
hash_->Update(buffer->data(), result);
hashing_time_ += timer.Elapsed();
}
}
}

private:
virtual ~URLRequestExtensionJob() {}
virtual ~URLRequestExtensionJob() {
if (hash_.get()) {
base::ElapsedTimer timer;
std::string hash_bytes(crypto::kSHA256Length, 0);
hash_->Finish(string_as_array(&hash_bytes), hash_bytes.size());
hashing_time_ += timer.Elapsed();
UMA_HISTOGRAM_TIMES("ExtensionUrlRequest.HashTimeMs", hashing_time_);
}
UMA_HISTOGRAM_COUNTS("ExtensionUrlRequest.TotalKbRead", bytes_read_ / 1024);
UMA_HISTOGRAM_COUNTS("ExtensionUrlRequest.SeekPosition", seek_position_);
}

void OnFilePathAndLastModifiedTimeRead(base::FilePath* read_file_path,
base::Time* last_modified_time) {
Expand All @@ -206,6 +253,18 @@ class URLRequestExtensionJob : public net::URLRequestFileJob {
URLRequestFileJob::Start();
}

// A hash of the contents we've read from the file.
scoped_ptr<crypto::SecureHash> hash_;

// The position we seeked to in the file.
int64 seek_position_;

// The number of bytes of content we read from the file.
int bytes_read_;

// Used to count the total time it takes to do hashing operations.
base::TimeDelta hashing_time_;

net::HttpResponseInfo response_info_;
base::FilePath directory_path_;
extensions::ExtensionResource resource_;
Expand Down
1 change: 1 addition & 0 deletions net/net.gypi
Original file line number Diff line number Diff line change
Expand Up @@ -1638,6 +1638,7 @@
'url_request/url_fetcher_impl_unittest.cc',
'url_request/url_fetcher_response_writer_unittest.cc',
'url_request/url_request_context_builder_unittest.cc',
'url_request/url_request_file_job_unittest.cc',
'url_request/url_request_filter_unittest.cc',
'url_request/url_request_ftp_job_unittest.cc',
'url_request/url_request_http_job_unittest.cc',
Expand Down
22 changes: 17 additions & 5 deletions net/url_request/url_request_file_job.cc
Original file line number Diff line number Diff line change
Expand Up @@ -83,8 +83,9 @@ void URLRequestFileJob::Kill() {
URLRequestJob::Kill();
}

bool URLRequestFileJob::ReadRawData(IOBuffer* dest, int dest_size,
int *bytes_read) {
bool URLRequestFileJob::ReadRawData(IOBuffer* dest,
int dest_size,
int* bytes_read) {
DCHECK_NE(dest_size, 0);
DCHECK(bytes_read);
DCHECK_GE(remaining_bytes_, 0);
Expand All @@ -99,9 +100,11 @@ bool URLRequestFileJob::ReadRawData(IOBuffer* dest, int dest_size,
return true;
}

int rv = stream_->Read(dest, dest_size,
int rv = stream_->Read(dest,
dest_size,
base::Bind(&URLRequestFileJob::DidRead,
weak_ptr_factory_.GetWeakPtr()));
weak_ptr_factory_.GetWeakPtr(),
make_scoped_refptr(dest)));
if (rv >= 0) {
// Data is immediately available.
*bytes_read = rv;
Expand Down Expand Up @@ -192,6 +195,12 @@ void URLRequestFileJob::SetExtraRequestHeaders(
}
}

void URLRequestFileJob::OnSeekComplete(int64 result) {
}

void URLRequestFileJob::OnReadComplete(net::IOBuffer* buf, int result) {
}

URLRequestFileJob::~URLRequestFileJob() {
}

Expand Down Expand Up @@ -273,6 +282,7 @@ void URLRequestFileJob::DidOpen(int result) {
}

void URLRequestFileJob::DidSeek(int64 result) {
OnSeekComplete(result);
if (result != byte_range_.first_byte_position()) {
NotifyDone(URLRequestStatus(URLRequestStatus::FAILED,
ERR_REQUEST_RANGE_NOT_SATISFIABLE));
Expand All @@ -283,7 +293,9 @@ void URLRequestFileJob::DidSeek(int64 result) {
NotifyHeadersComplete();
}

void URLRequestFileJob::DidRead(int result) {
void URLRequestFileJob::DidRead(scoped_refptr<net::IOBuffer> buf, int result) {
OnReadComplete(buf.get(), result);
buf = NULL;
if (result > 0) {
SetStatus(URLRequestStatus()); // Clear the IO_PENDING status
} else if (result == 0) {
Expand Down
10 changes: 7 additions & 3 deletions net/url_request/url_request_file_job.h
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@
#include "net/url_request/url_request.h"
#include "net/url_request/url_request_job.h"

namespace base{
namespace base {
class TaskRunner;
}
namespace file_util {
Expand Down Expand Up @@ -48,6 +48,10 @@ class NET_EXPORT URLRequestFileJob : public URLRequestJob {
virtual void SetExtraRequestHeaders(
const HttpRequestHeaders& headers) OVERRIDE;

// An interface for subclasses who wish to monitor read operations.
virtual void OnSeekComplete(int64 result);
virtual void OnReadComplete(net::IOBuffer* buf, int result);

protected:
virtual ~URLRequestFileJob();

Expand Down Expand Up @@ -88,8 +92,8 @@ class NET_EXPORT URLRequestFileJob : public URLRequestJob {
// on a background thread.
void DidSeek(int64 result);

// Callback after data is asynchronously read from the file.
void DidRead(int result);
// Callback after data is asynchronously read from the file into |buf|.
void DidRead(scoped_refptr<net::IOBuffer> buf, int result);

scoped_ptr<FileStream> stream_;
FileMetaInfo meta_info_;
Expand Down
Loading

0 comments on commit 06dd723

Please sign in to comment.