OLD | NEW |
| (Empty) |
1 // Copyright (c) 2012 The Chromium Authors. All rights reserved. | |
2 // Use of this source code is governed by a BSD-style license that can be | |
3 // found in the LICENSE file. | |
4 | |
5 // Simple implementation of a data: protocol handler. | |
6 | |
7 #include "net/url_request/url_request_data_job.h" | |
8 | |
9 #include "net/base/data_url.h" | |
10 #include "net/base/net_errors.h" | |
11 #include "net/http/http_response_headers.h" | |
12 #include "url/gurl.h" | |
13 | |
14 namespace net { | |
15 | |
16 int URLRequestDataJob::BuildResponse(const GURL& url, | |
17 std::string* mime_type, | |
18 std::string* charset, | |
19 std::string* data, | |
20 HttpResponseHeaders* headers) { | |
21 if (!net::DataURL::Parse(url, mime_type, charset, data)) | |
22 return net::ERR_INVALID_URL; | |
23 | |
24 // |mime_type| set by net::DataURL::Parse() is guaranteed to be in | |
25 // token "/" token | |
26 // form. |charset| is also guaranteed to be a token. | |
27 | |
28 DCHECK(!mime_type->empty()); | |
29 DCHECK(!charset->empty()); | |
30 | |
31 if (headers) { | |
32 headers->ReplaceStatusLine("HTTP/1.1 200 OK"); | |
33 // "charset" in the Content-Type header is specified explicitly to follow | |
34 // the "token" ABNF in the HTTP spec. When DataURL::Parse() call is | |
35 // successful, it's guaranteed that the string in |charset| follows the | |
36 // "token" ABNF. | |
37 std::string content_type_header = | |
38 "Content-Type: " + *mime_type + ";charset=" + *charset; | |
39 headers->AddHeader(content_type_header); | |
40 headers->AddHeader("Access-Control-Allow-Origin: *"); | |
41 } | |
42 | |
43 return net::OK; | |
44 } | |
45 | |
46 URLRequestDataJob::URLRequestDataJob( | |
47 URLRequest* request, NetworkDelegate* network_delegate) | |
48 : URLRequestSimpleJob(request, network_delegate) { | |
49 } | |
50 | |
51 int URLRequestDataJob::GetData(std::string* mime_type, | |
52 std::string* charset, | |
53 std::string* data, | |
54 const CompletionCallback& callback) const { | |
55 // Check if data URL is valid. If not, don't bother to try to extract data. | |
56 // Otherwise, parse the data from the data URL. | |
57 const GURL& url = request_->url(); | |
58 if (!url.is_valid()) | |
59 return ERR_INVALID_URL; | |
60 | |
61 // TODO(tyoshino): Get the headers and export via | |
62 // URLRequestJob::GetResponseInfo(). | |
63 return BuildResponse(url, mime_type, charset, data, NULL); | |
64 } | |
65 | |
66 URLRequestDataJob::~URLRequestDataJob() { | |
67 } | |
68 | |
69 } // namespace net | |
OLD | NEW |