| Index: net/url_request/url_request_job.cc
|
| diff --git a/net/url_request/url_request_job.cc b/net/url_request/url_request_job.cc
|
| index 259a2d8925f324bfc35c5095def697cc3c9661bd..5847edca7ac49e333b468685416b2f0a16c1dd1f 100644
|
| --- a/net/url_request/url_request_job.cc
|
| +++ b/net/url_request/url_request_job.cc
|
| @@ -163,6 +163,45 @@ void URLRequestJob::Kill() {
|
| // This function calls ReadRawData to get stream data. If a filter exists, it
|
| // passes the data to the attached filter. It then returns the output from
|
| // filter back to the caller.
|
| +int URLRequestJob::Read(IOBuffer* buf, int buf_size) {
|
| + DCHECK_LT(buf_size, 1000000); // Sanity check.
|
| + DCHECK(buf);
|
| + DCHECK(!filtered_read_buffer_);
|
| + DCHECK_EQ(0, filtered_read_buffer_len_);
|
| +
|
| + Error error = OK;
|
| + int bytes_read = 0;
|
| +
|
| + // Skip Filter if not present.
|
| + if (!filter_) {
|
| + error = ReadRawDataHelper(buf, buf_size, &bytes_read);
|
| + } else {
|
| + // Save the caller's buffers while we do IO
|
| + // in the filter's buffers.
|
| + filtered_read_buffer_ = buf;
|
| + filtered_read_buffer_len_ = buf_size;
|
| +
|
| + error = ReadFilteredData(&bytes_read);
|
| +
|
| + // Synchronous EOF from the filter.
|
| + if (error == OK && bytes_read == 0)
|
| + DoneReading();
|
| + }
|
| +
|
| + if (error == OK) {
|
| + // If URLRequestJob read zero bytes, the job is at EOF.
|
| + if (bytes_read == 0)
|
| + NotifyDone(URLRequestStatus());
|
| + } else if (error == ERR_IO_PENDING) {
|
| + bytes_read = ERR_IO_PENDING;
|
| + } else {
|
| + NotifyDone(URLRequestStatus::FromError(error));
|
| + bytes_read = error;
|
| + }
|
| + return bytes_read;
|
| +}
|
| +
|
| +// Deprecated.
|
| bool URLRequestJob::Read(IOBuffer* buf, int buf_size, int *bytes_read) {
|
| DCHECK_LT(buf_size, 1000000); // Sanity check.
|
| DCHECK(buf);
|
|
|