Skip to content

Commit

Permalink
Use net::HttpRequestHeaders instead of std::string in URLRequest and …
Browse files Browse the repository at this point in the history
…friends.

BUG=22588

Review URL: http://codereview.chromium.org/1998001

git-svn-id: svn://svn.chromium.org/chrome/trunk/src@46612 0039d316-1c4b-4281-b951-d872f2087c98
  • Loading branch information
willchan@chromium.org committed May 6, 2010
1 parent f3a696b commit ee1a29b
Show file tree
Hide file tree
Showing 13 changed files with 88 additions and 81 deletions.
31 changes: 19 additions & 12 deletions chrome/browser/automation/url_request_automation_job.cc
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@
#include "net/base/cookie_monster.h"
#include "net/base/io_buffer.h"
#include "net/base/net_errors.h"
#include "net/http/http_request_headers.h"
#include "net/http/http_util.h"
#include "net/url_request/url_request_context.h"

Expand All @@ -23,7 +24,7 @@ using base::TimeDelta;

// The list of filtered headers that are removed from requests sent via
// StartAsync(). These must be lower case.
static const char* kFilteredHeaderStrings[] = {
static const char* const kFilteredHeaderStrings[] = {
"accept",
"cache-control",
"connection",
Expand Down Expand Up @@ -387,20 +388,26 @@ void URLRequestAutomationJob::StartAsync() {
message_filter_->RegisterRequest(this);

// Strip unwanted headers.
std::string new_request_headers(
net::HttpUtil::StripHeaders(request_->extra_request_headers(),
kFilteredHeaderStrings,
arraysize(kFilteredHeaderStrings)));
net::HttpRequestHeaders new_request_headers;
new_request_headers.MergeFrom(request_->extra_request_headers());
for (size_t i = 0; i < arraysize(kFilteredHeaderStrings); ++i)
new_request_headers.RemoveHeader(kFilteredHeaderStrings[i]);

if (request_->context()) {
// Only add default Accept-Language and Accept-Charset if the request
// didn't have them specified.
net::HttpUtil::AppendHeaderIfMissing(
"Accept-Language", request_->context()->accept_language(),
&new_request_headers);
net::HttpUtil::AppendHeaderIfMissing(
"Accept-Charset", request_->context()->accept_charset(),
&new_request_headers);
if (!new_request_headers.HasHeader(
net::HttpRequestHeaders::kAcceptLanguage) &&
!request_->context()->accept_language().empty()) {
new_request_headers.SetHeader(net::HttpRequestHeaders::kAcceptLanguage,
request_->context()->accept_language());
}
if (!new_request_headers.HasHeader(
net::HttpRequestHeaders::kAcceptCharset) &&
!request_->context()->accept_charset().empty()) {
new_request_headers.SetHeader(net::HttpRequestHeaders::kAcceptCharset,
request_->context()->accept_charset());
}
}

// Ensure that we do not send username and password fields in the referrer.
Expand All @@ -419,7 +426,7 @@ void URLRequestAutomationJob::StartAsync() {
request_->url().spec(),
request_->method(),
referrer.spec(),
new_request_headers,
new_request_headers.ToString(),
request_->get_upload()
};

Expand Down
2 changes: 1 addition & 1 deletion chrome/browser/renderer_host/resource_dispatcher_host.cc
Original file line number Diff line number Diff line change
Expand Up @@ -1120,7 +1120,7 @@ int ResourceDispatcherHost::CalculateApproximateMemoryCost(
// The following fields should be a minor size contribution (experimentally
// on the order of 100). However since they are variable length, it could
// in theory be a sizeable contribution.
int strings_cost = request->extra_request_headers().size() +
int strings_cost = request->extra_request_headers().ToString().size() +
request->original_url().spec().size() +
request->referrer().size() +
request->method().size();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -648,11 +648,11 @@ TEST_F(ResourceDispatcherHostTest, TestBlockedRequestsDontLeak) {
// Test the private helper method "CalculateApproximateMemoryCost()".
TEST_F(ResourceDispatcherHostTest, CalculateApproximateMemoryCost) {
URLRequest req(GURL("http://www.google.com"), NULL);
EXPECT_EQ(4425, ResourceDispatcherHost::CalculateApproximateMemoryCost(&req));
EXPECT_EQ(4427, ResourceDispatcherHost::CalculateApproximateMemoryCost(&req));

// Add 9 bytes of referrer.
req.set_referrer("123456789");
EXPECT_EQ(4434, ResourceDispatcherHost::CalculateApproximateMemoryCost(&req));
EXPECT_EQ(4436, ResourceDispatcherHost::CalculateApproximateMemoryCost(&req));

// Add 33 bytes of upload content.
std::string upload_content;
Expand All @@ -661,11 +661,11 @@ TEST_F(ResourceDispatcherHostTest, CalculateApproximateMemoryCost) {
req.AppendBytesToUpload(upload_content.data(), upload_content.size());

// Since the upload throttling is disabled, this has no effect on the cost.
EXPECT_EQ(4434, ResourceDispatcherHost::CalculateApproximateMemoryCost(&req));
EXPECT_EQ(4436, ResourceDispatcherHost::CalculateApproximateMemoryCost(&req));

// Add a file upload -- should have no effect.
req.AppendFileToUpload(FilePath(FILE_PATH_LITERAL("does-not-exist.png")));
EXPECT_EQ(4434, ResourceDispatcherHost::CalculateApproximateMemoryCost(&req));
EXPECT_EQ(4436, ResourceDispatcherHost::CalculateApproximateMemoryCost(&req));
}

// Test the private helper method "IncrementOutstandingRequestsMemoryCost()".
Expand Down
4 changes: 3 additions & 1 deletion net/http/http_request_headers.cc
Original file line number Diff line number Diff line change
Expand Up @@ -16,12 +16,14 @@ const char HttpRequestHeaders::kAcceptEncoding[] = "Accept-Encoding";
const char HttpRequestHeaders::kAcceptLanguage[] = "Accept-Language";
const char HttpRequestHeaders::kCacheControl[] = "Cache-Control";
const char HttpRequestHeaders::kConnection[] = "Connection";
const char HttpRequestHeaders::kCookie[] = "Cookie";
const char HttpRequestHeaders::kContentLength[] = "Content-Length";
const char HttpRequestHeaders::kContentType[] = "Content-Type";
const char HttpRequestHeaders::kCookie[] = "Cookie";
const char HttpRequestHeaders::kHost[] = "Host";
const char HttpRequestHeaders::kIfModifiedSince[] = "If-Modified-Since";
const char HttpRequestHeaders::kIfNoneMatch[] = "If-None-Match";
const char HttpRequestHeaders::kIfRange[] = "If-Range";
const char HttpRequestHeaders::kOrigin[] = "Origin";
const char HttpRequestHeaders::kPragma[] = "Pragma";
const char HttpRequestHeaders::kProxyConnection[] = "Proxy-Connection";
const char HttpRequestHeaders::kRange[] = "Range";
Expand Down
2 changes: 2 additions & 0 deletions net/http/http_request_headers.h
Original file line number Diff line number Diff line change
Expand Up @@ -60,12 +60,14 @@ class HttpRequestHeaders {
static const char kAcceptLanguage[];
static const char kCacheControl[];
static const char kConnection[];
static const char kContentType[];
static const char kCookie[];
static const char kContentLength[];
static const char kHost[];
static const char kIfModifiedSince[];
static const char kIfNoneMatch[];
static const char kIfRange[];
static const char kOrigin[];
static const char kPragma[];
static const char kProxyConnection[];
static const char kRange[];
Expand Down
41 changes: 19 additions & 22 deletions net/url_request/url_request.cc
Original file line number Diff line number Diff line change
Expand Up @@ -24,13 +24,26 @@ using net::UploadData;
using std::string;
using std::wstring;

namespace {

// Max number of http redirects to follow. Same number as gecko.
static const int kMaxRedirects = 20;
const int kMaxRedirects = 20;

static URLRequestJobManager* GetJobManager() {
URLRequestJobManager* GetJobManager() {
return Singleton<URLRequestJobManager>::get();
}

// Discard headers which have meaning in POST (Content-Length, Content-Type,
// Origin).
void StripPostSpecificHeaders(net::HttpRequestHeaders* headers) {
// These are headers that may be attached to a POST.
headers->RemoveHeader(net::HttpRequestHeaders::kContentLength);
headers->RemoveHeader(net::HttpRequestHeaders::kContentType);
headers->RemoveHeader(net::HttpRequestHeaders::kOrigin);
}

} // namespace

///////////////////////////////////////////////////////////////////////////////
// URLRequest

Expand Down Expand Up @@ -126,14 +139,13 @@ void URLRequest::SetExtraRequestHeaderByName(const string& name,

void URLRequest::SetExtraRequestHeaders(const string& headers) {
DCHECK(!is_pending_);
if (headers.empty()) {
extra_request_headers_.clear();
} else {
extra_request_headers_.Clear();
if (!headers.empty()) {
#ifndef NDEBUG
size_t crlf = headers.rfind("\r\n", headers.size() - 1);
DCHECK(crlf != headers.size() - 2) << "headers must not end with CRLF";
#endif
extra_request_headers_ = headers + "\r\n";
extra_request_headers_.AddHeadersFromString(headers);
}

// NOTE: This method will likely become non-trivial once the other setters
Expand Down Expand Up @@ -434,18 +446,6 @@ void URLRequest::OrphanJob() {
job_ = NULL;
}

// static
std::string URLRequest::StripPostSpecificHeaders(const std::string& headers) {
// These are headers that may be attached to a POST.
static const char* const kPostHeaders[] = {
"content-type",
"content-length",
"origin"
};
return net::HttpUtil::StripHeaders(
headers, kPostHeaders, arraysize(kPostHeaders));
}

int URLRequest::Redirect(const GURL& location, int http_status_code) {
if (net_log_.HasListener()) {
net_log_.AddEvent(
Expand Down Expand Up @@ -492,10 +492,7 @@ int URLRequest::Redirect(const GURL& location, int http_status_code) {
// the inclusion of a multipart Content-Type header in GET can cause
// problems with some servers:
// http://code.google.com/p/chromium/issues/detail?id=843
//
// TODO(eroman): It would be better if this data was structured into
// specific fields/flags, rather than a stew of extra headers.
extra_request_headers_ = StripPostSpecificHeaders(extra_request_headers_);
StripPostSpecificHeaders(&extra_request_headers_);
}

if (!final_upload_progress_)
Expand Down
11 changes: 5 additions & 6 deletions net/url_request/url_request.h
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@
#include "net/base/load_states.h"
#include "net/base/net_log.h"
#include "net/base/request_priority.h"
#include "net/http/http_request_headers.h"
#include "net/http/http_response_info.h"
#include "net/url_request/url_request_status.h"

Expand Down Expand Up @@ -337,7 +338,9 @@ class URLRequest {
// by \r\n.
void SetExtraRequestHeaders(const std::string& headers);

const std::string& extra_request_headers() { return extra_request_headers_; }
const net::HttpRequestHeaders& extra_request_headers() const {
return extra_request_headers_;
}

// Returns the current load state for the request.
net::LoadState GetLoadState() const;
Expand Down Expand Up @@ -571,10 +574,6 @@ class URLRequest {
// passed values.
void DoCancel(int os_error, const net::SSLInfo& ssl_info);

// Discard headers which have meaning in POST (Content-Length, Content-Type,
// Origin).
static std::string StripPostSpecificHeaders(const std::string& headers);

// Contextual information used for this request (can be NULL). This contains
// most of the dependencies which are shared between requests (disk cache,
// cookie store, socket poool, etc.)
Expand All @@ -590,7 +589,7 @@ class URLRequest {
GURL first_party_for_cookies_;
std::string method_; // "GET", "POST", etc. Should be all uppercase.
std::string referrer_;
std::string extra_request_headers_;
net::HttpRequestHeaders extra_request_headers_;
int load_flags_; // Flags indicating the request type for the load;
// expected values are LOAD_* enums above.

Expand Down
29 changes: 17 additions & 12 deletions net/url_request/url_request_file_job.cc
Original file line number Diff line number Diff line change
Expand Up @@ -188,18 +188,23 @@ bool URLRequestFileJob::GetMimeType(std::string* mime_type) const {
return net::GetMimeTypeFromFile(file_path_, mime_type);
}

void URLRequestFileJob::SetExtraRequestHeaders(const std::string& headers) {
// We only care about "Range" header here.
std::vector<net::HttpByteRange> ranges;
if (net::HttpUtil::ParseRanges(headers, &ranges)) {
if (ranges.size() == 1) {
byte_range_ = ranges[0];
} else {
// We don't support multiple range requests in one single URL request,
// because we need to do multipart encoding here.
// TODO(hclam): decide whether we want to support multiple range requests.
NotifyDone(URLRequestStatus(URLRequestStatus::FAILED,
net::ERR_REQUEST_RANGE_NOT_SATISFIABLE));
void URLRequestFileJob::SetExtraRequestHeaders(
const net::HttpRequestHeaders& headers) {
std::string range_header;
if (headers.GetHeader(net::HttpRequestHeaders::kRange, &range_header)) {
// We only care about "Range" header here.
std::vector<net::HttpByteRange> ranges;
if (net::HttpUtil::ParseRangeHeader(range_header, &ranges)) {
if (ranges.size() == 1) {
byte_range_ = ranges[0];
} else {
// We don't support multiple range requests in one single URL request,
// because we need to do multipart encoding here.
// TODO(hclam): decide whether we want to support multiple range
// requests.
NotifyDone(URLRequestStatus(URLRequestStatus::FAILED,
net::ERR_REQUEST_RANGE_NOT_SATISFIABLE));
}
}
}
}
Expand Down
2 changes: 1 addition & 1 deletion net/url_request/url_request_file_job.h
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@ class URLRequestFileJob : public URLRequestJob {
virtual bool GetContentEncodings(
std::vector<Filter::FilterType>* encoding_type);
virtual bool GetMimeType(std::string* mime_type) const;
virtual void SetExtraRequestHeaders(const std::string& headers);
virtual void SetExtraRequestHeaders(const net::HttpRequestHeaders& headers);

static URLRequest::ProtocolFactory Factory;

Expand Down
4 changes: 2 additions & 2 deletions net/url_request/url_request_http_job.cc
Original file line number Diff line number Diff line change
Expand Up @@ -131,9 +131,9 @@ void URLRequestHttpJob::SetUpload(net::UploadData* upload) {
}

void URLRequestHttpJob::SetExtraRequestHeaders(
const std::string& headers) {
const net::HttpRequestHeaders& headers) {
DCHECK(!transaction_.get()) << "cannot change once started";
request_info_.extra_headers.AddHeadersFromString(headers);
request_info_.extra_headers.CopyFrom(headers);
}

void URLRequestHttpJob::Start() {
Expand Down
2 changes: 1 addition & 1 deletion net/url_request/url_request_http_job.h
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@ class URLRequestHttpJob : public URLRequestJob {

// URLRequestJob methods:
virtual void SetUpload(net::UploadData* upload);
virtual void SetExtraRequestHeaders(const std::string& headers);
virtual void SetExtraRequestHeaders(const net::HttpRequestHeaders& headers);
virtual void Start();
virtual void Kill();
virtual net::LoadState GetLoadState() const;
Expand Down
3 changes: 2 additions & 1 deletion net/url_request/url_request_job.h
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@

namespace net {
class AuthChallengeInfo;
class HttpRequestHeaders;
class HttpResponseInfo;
class IOBuffer;
class UploadData;
Expand Down Expand Up @@ -53,7 +54,7 @@ class URLRequestJob : public base::RefCountedThreadSafe<URLRequestJob>,
virtual void SetUpload(net::UploadData* upload) { }

// Sets extra request headers for Job types that support request headers.
virtual void SetExtraRequestHeaders(const std::string& headers) { }
virtual void SetExtraRequestHeaders(const net::HttpRequestHeaders& headers) {}

// If any error occurs while starting the Job, NotifyStartError should be
// called.
Expand Down
30 changes: 12 additions & 18 deletions webkit/appcache/appcache_update_job_unittest.cc
Original file line number Diff line number Diff line change
Expand Up @@ -241,24 +241,18 @@ class HttpHeadersRequestTestJob : public URLRequestTestJob {
const std::string& scheme) {
if (!already_checked_) {
already_checked_ = true; // only check once for a test
const std::string& extra_headers = request->extra_request_headers();
const std::string if_modified_since = "If-Modified-Since: ";
size_t pos = extra_headers.find(if_modified_since);
if (pos != std::string::npos) {
saw_if_modified_since_ = (0 == extra_headers.compare(
pos + if_modified_since.length(),
expect_if_modified_since_.length(),
expect_if_modified_since_));
}

const std::string if_none_match = "If-None-Match: ";
pos = extra_headers.find(if_none_match);
if (pos != std::string::npos) {
saw_if_none_match_ = (0 == extra_headers.compare(
pos + if_none_match.length(),
expect_if_none_match_.length(),
expect_if_none_match_));
}
const net::HttpRequestHeaders& extra_headers =
request->extra_request_headers();
std::string header_value;
saw_if_modified_since_ =
extra_headers.GetHeader(
net::HttpRequestHeaders::kIfModifiedSince, &header_value) &&
header_value == expect_if_modified_since_;

saw_if_none_match_ =
extra_headers.GetHeader(
net::HttpRequestHeaders::kIfNoneMatch, &header_value) &&
header_value == expect_if_none_match_;
}
return NULL;
}
Expand Down

0 comments on commit ee1a29b

Please sign in to comment.