OLD | NEW |
---|---|
1 /* | 1 /* |
2 * Copyright 2011 Google Inc. | 2 * Copyright 2011 Google Inc. |
3 * | 3 * |
4 * Use of this source code is governed by a BSD-style license that can be | 4 * Use of this source code is governed by a BSD-style license that can be |
5 * found in the LICENSE file. | 5 * found in the LICENSE file. |
6 */ | 6 */ |
7 | 7 |
8 #include "SkDocument.h" | 8 #include "SkDocument.h" |
9 #include "SkPDFCanon.h" | 9 #include "SkPDFCanon.h" |
10 #include "SkPDFDevice.h" | 10 #include "SkPDFDevice.h" |
(...skipping 13 matching lines...) Expand all Loading... | |
24 static void emit_pdf_footer(SkWStream* stream, | 24 static void emit_pdf_footer(SkWStream* stream, |
25 const SkPDFObjNumMap& objNumMap, | 25 const SkPDFObjNumMap& objNumMap, |
26 const SkPDFSubstituteMap& substitutes, | 26 const SkPDFSubstituteMap& substitutes, |
27 SkPDFObject* docCatalog, | 27 SkPDFObject* docCatalog, |
28 int64_t objCount, | 28 int64_t objCount, |
29 int32_t xRefFileOffset) { | 29 int32_t xRefFileOffset) { |
30 SkPDFDict trailerDict; | 30 SkPDFDict trailerDict; |
31 // TODO(vandebo): Linearized format will take a Prev entry too. | 31 // TODO(vandebo): Linearized format will take a Prev entry too. |
32 // TODO(vandebo): PDF/A requires an ID entry. | 32 // TODO(vandebo): PDF/A requires an ID entry. |
33 trailerDict.insertInt("Size", int(objCount)); | 33 trailerDict.insertInt("Size", int(objCount)); |
34 trailerDict.insert("Root", new SkPDFObjRef(docCatalog))->unref(); | 34 trailerDict.insertObjRef("Root", SkRef(docCatalog)); |
35 | 35 |
36 stream->writeText("trailer\n"); | 36 stream->writeText("trailer\n"); |
37 trailerDict.emitObject(stream, objNumMap, substitutes); | 37 trailerDict.emitObject(stream, objNumMap, substitutes); |
38 stream->writeText("\nstartxref\n"); | 38 stream->writeText("\nstartxref\n"); |
39 stream->writeBigDecAsText(xRefFileOffset); | 39 stream->writeBigDecAsText(xRefFileOffset); |
40 stream->writeText("\n%%EOF"); | 40 stream->writeText("\n%%EOF"); |
41 } | 41 } |
42 | 42 |
43 static void perform_font_subsetting( | 43 static void perform_font_subsetting( |
44 const SkTDArray<const SkPDFDevice*>& pageDevices, | 44 const SkTDArray<const SkPDFDevice*>& pageDevices, |
45 SkPDFSubstituteMap* substituteMap) { | 45 SkPDFSubstituteMap* substituteMap) { |
46 SkASSERT(substituteMap); | 46 SkASSERT(substituteMap); |
47 | 47 |
48 SkPDFGlyphSetMap usage; | 48 SkPDFGlyphSetMap usage; |
49 for (int i = 0; i < pageDevices.count(); ++i) { | 49 for (int i = 0; i < pageDevices.count(); ++i) { |
50 usage.merge(pageDevices[i]->getFontGlyphUsage()); | 50 usage.merge(pageDevices[i]->getFontGlyphUsage()); |
51 } | 51 } |
52 SkPDFGlyphSetMap::F2BIter iterator(usage); | 52 SkPDFGlyphSetMap::F2BIter iterator(usage); |
53 const SkPDFGlyphSetMap::FontGlyphSetPair* entry = iterator.next(); | 53 const SkPDFGlyphSetMap::FontGlyphSetPair* entry = iterator.next(); |
54 while (entry) { | 54 while (entry) { |
55 SkAutoTUnref<SkPDFFont> subsetFont( | 55 SkAutoTUnref<SkPDFFont> subsetFont( |
56 entry->fFont->getFontSubset(entry->fGlyphSet)); | 56 entry->fFont->getFontSubset(entry->fGlyphSet)); |
57 if (subsetFont) { | 57 if (subsetFont) { |
58 substituteMap->setSubstitute(entry->fFont, subsetFont.get()); | 58 substituteMap->setSubstitute(entry->fFont, subsetFont.get()); |
59 } | 59 } |
60 entry = iterator.next(); | 60 entry = iterator.next(); |
61 } | 61 } |
62 } | 62 } |
63 | 63 |
64 static SkPDFObject* create_pdf_page_content(const SkPDFDevice* pageDevice) { | |
65 SkAutoTDelete<SkStreamAsset> content(pageDevice->content()); | |
66 return SkNEW_ARGS(SkPDFStream, (content.get())); | |
tomhudson
2015/05/04 18:15:04
Ugh, I really need to internalize your memory mana
hal.canary
2015/05/05 14:37:50
Yes. SkStreamRewindableFromSkStream should be alm
| |
67 } | |
68 | |
64 static SkPDFDict* create_pdf_page(const SkPDFDevice* pageDevice) { | 69 static SkPDFDict* create_pdf_page(const SkPDFDevice* pageDevice) { |
65 SkAutoTUnref<SkPDFDict> page(SkNEW_ARGS(SkPDFDict, ("Page"))); | 70 SkAutoTUnref<SkPDFDict> page(SkNEW_ARGS(SkPDFDict, ("Page"))); |
66 SkAutoTUnref<SkPDFDict> deviceResourceDict( | 71 page->insertObject("Resources", pageDevice->createResourceDict()); |
67 pageDevice->createResourceDict()); | 72 page->insertObject("MediaBox", pageDevice->copyMediaBox()); |
68 page->insert("Resources", deviceResourceDict.get()); | 73 if (SkPDFArray* annots = pageDevice->getAnnotations()) { |
69 | 74 SkASSERT(annots->size() > 0); |
70 SkAutoTUnref<SkPDFArray> mediaBox(pageDevice->copyMediaBox()); | 75 page->insertObject("Annots", SkRef(annots)); |
71 page->insert("MediaBox", mediaBox.get()); | |
72 | |
73 SkPDFArray* annots = pageDevice->getAnnotations(); | |
74 if (annots && annots->size() > 0) { | |
75 page->insert("Annots", annots); | |
76 } | 76 } |
77 | 77 page->insertObjRef("Contents", create_pdf_page_content(pageDevice)); |
78 SkAutoTDelete<SkStreamAsset> content(pageDevice->content()); | |
79 SkAutoTUnref<SkPDFStream> contentStream( | |
80 SkNEW_ARGS(SkPDFStream, (content.get()))); | |
81 page->insert("Contents", new SkPDFObjRef(contentStream.get()))->unref(); | |
82 return page.detach(); | 78 return page.detach(); |
83 } | 79 } |
84 | 80 |
85 static void generate_page_tree(const SkTDArray<SkPDFDict*>& pages, | 81 static void generate_page_tree(const SkTDArray<SkPDFDict*>& pages, |
86 SkTDArray<SkPDFDict*>* pageTree, | 82 SkTDArray<SkPDFDict*>* pageTree, |
87 SkPDFDict** rootNode) { | 83 SkPDFDict** rootNode) { |
88 // PDF wants a tree describing all the pages in the document. We arbitrary | 84 // PDF wants a tree describing all the pages in the document. We arbitrary |
89 // choose 8 (kNodeSize) as the number of allowed children. The internal | 85 // choose 8 (kNodeSize) as the number of allowed children. The internal |
90 // nodes have type "Pages" with an array of children, a parent pointer, and | 86 // nodes have type "Pages" with an array of children, a parent pointer, and |
91 // the number of leaves below the node as "Count." The leaves are passed | 87 // the number of leaves below the node as "Count." The leaves are passed |
(...skipping 15 matching lines...) Expand all Loading... | |
107 nextRoundNodes.setReserve((pages.count() + kNodeSize - 1)/kNodeSize); | 103 nextRoundNodes.setReserve((pages.count() + kNodeSize - 1)/kNodeSize); |
108 | 104 |
109 int treeCapacity = kNodeSize; | 105 int treeCapacity = kNodeSize; |
110 do { | 106 do { |
111 for (int i = 0; i < curNodes.count(); ) { | 107 for (int i = 0; i < curNodes.count(); ) { |
112 if (i > 0 && i + 1 == curNodes.count()) { | 108 if (i > 0 && i + 1 == curNodes.count()) { |
113 nextRoundNodes.push(curNodes[i]); | 109 nextRoundNodes.push(curNodes[i]); |
114 break; | 110 break; |
115 } | 111 } |
116 | 112 |
117 SkPDFDict* newNode = new SkPDFDict("Pages"); | 113 SkAutoTUnref<SkPDFDict> newNode(new SkPDFDict("Pages")); |
118 SkAutoTUnref<SkPDFObjRef> newNodeRef(new SkPDFObjRef(newNode)); | |
119 | |
120 SkAutoTUnref<SkPDFArray> kids(new SkPDFArray); | 114 SkAutoTUnref<SkPDFArray> kids(new SkPDFArray); |
121 kids->reserve(kNodeSize); | 115 kids->reserve(kNodeSize); |
122 | 116 |
123 int count = 0; | 117 int count = 0; |
124 for (; i < curNodes.count() && count < kNodeSize; i++, count++) { | 118 for (; i < curNodes.count() && count < kNodeSize; i++, count++) { |
125 curNodes[i]->insert("Parent", newNodeRef.get()); | 119 curNodes[i]->insertObjRef("Parent", SkRef(newNode.get())); |
126 kids->append(new SkPDFObjRef(curNodes[i]))->unref(); | 120 kids->appendObjRef(SkRef(curNodes[i])); |
127 | 121 |
128 // TODO(vandebo): put the objects in strict access order. | 122 // TODO(vandebo): put the objects in strict access order. |
129 // Probably doesn't matter because they are so small. | 123 // Probably doesn't matter because they are so small. |
130 if (curNodes[i] != pages[0]) { | 124 if (curNodes[i] != pages[0]) { |
131 pageTree->push(curNodes[i]); // Transfer reference. | 125 pageTree->push(curNodes[i]); // Transfer reference. |
132 } else { | 126 } else { |
133 SkSafeUnref(curNodes[i]); | 127 SkSafeUnref(curNodes[i]); |
134 } | 128 } |
135 } | 129 } |
136 | 130 |
137 // treeCapacity is the number of leaf nodes possible for the | 131 // treeCapacity is the number of leaf nodes possible for the |
138 // current set of subtrees being generated. (i.e. 8, 64, 512, ...). | 132 // current set of subtrees being generated. (i.e. 8, 64, 512, ...). |
139 // It is hard to count the number of leaf nodes in the current | 133 // It is hard to count the number of leaf nodes in the current |
140 // subtree. However, by construction, we know that unless it's the | 134 // subtree. However, by construction, we know that unless it's the |
141 // last subtree for the current depth, the leaf count will be | 135 // last subtree for the current depth, the leaf count will be |
142 // treeCapacity, otherwise it's what ever is left over after | 136 // treeCapacity, otherwise it's what ever is left over after |
143 // consuming treeCapacity chunks. | 137 // consuming treeCapacity chunks. |
144 int pageCount = treeCapacity; | 138 int pageCount = treeCapacity; |
145 if (i == curNodes.count()) { | 139 if (i == curNodes.count()) { |
146 pageCount = ((pages.count() - 1) % treeCapacity) + 1; | 140 pageCount = ((pages.count() - 1) % treeCapacity) + 1; |
147 } | 141 } |
148 newNode->insert("Count", new SkPDFInt(pageCount))->unref(); | 142 newNode->insertInt("Count", pageCount); |
149 newNode->insert("Kids", kids.get()); | 143 newNode->insertObject("Kids", kids.detach()); |
150 nextRoundNodes.push(newNode); // Transfer reference. | 144 nextRoundNodes.push(newNode.detach()); // Transfer reference. |
151 } | 145 } |
152 | 146 |
153 curNodes = nextRoundNodes; | 147 curNodes = nextRoundNodes; |
154 nextRoundNodes.rewind(); | 148 nextRoundNodes.rewind(); |
155 treeCapacity *= kNodeSize; | 149 treeCapacity *= kNodeSize; |
156 } while (curNodes.count() > 1); | 150 } while (curNodes.count() > 1); |
157 | 151 |
158 pageTree->push(curNodes[0]); // Transfer reference. | 152 pageTree->push(curNodes[0]); // Transfer reference. |
159 if (rootNode) { | 153 if (rootNode) { |
160 *rootNode = curNodes[0]; | 154 *rootNode = curNodes[0]; |
(...skipping 16 matching lines...) Expand all Loading... | |
177 SkAutoTUnref<SkPDFDict> page(create_pdf_page(pageDevices[i])); | 171 SkAutoTUnref<SkPDFDict> page(create_pdf_page(pageDevices[i])); |
178 pageDevices[i]->appendDestinations(dests, page.get()); | 172 pageDevices[i]->appendDestinations(dests, page.get()); |
179 pages.push(page.detach()); | 173 pages.push(page.detach()); |
180 } | 174 } |
181 | 175 |
182 SkTDArray<SkPDFDict*> pageTree; | 176 SkTDArray<SkPDFDict*> pageTree; |
183 SkAutoTUnref<SkPDFDict> docCatalog(SkNEW_ARGS(SkPDFDict, ("Catalog"))); | 177 SkAutoTUnref<SkPDFDict> docCatalog(SkNEW_ARGS(SkPDFDict, ("Catalog"))); |
184 | 178 |
185 SkPDFDict* pageTreeRoot; | 179 SkPDFDict* pageTreeRoot; |
186 generate_page_tree(pages, &pageTree, &pageTreeRoot); | 180 generate_page_tree(pages, &pageTree, &pageTreeRoot); |
181 docCatalog->insertObjRef("Pages", SkRef(pageTreeRoot)); | |
187 | 182 |
188 docCatalog->insert("Pages", new SkPDFObjRef(pageTreeRoot))->unref(); | 183 if (dests->size() > 0) { |
184 docCatalog->insertObjRef("Dests", dests.detach()); | |
185 } | |
189 | 186 |
190 /* TODO(vandebo): output intent | 187 /* TODO(vandebo): output intent |
191 SkAutoTUnref<SkPDFDict> outputIntent = new SkPDFDict("OutputIntent"); | 188 SkAutoTUnref<SkPDFDict> outputIntent = new SkPDFDict("OutputIntent"); |
192 outputIntent->insert("S", new SkPDFName("GTS_PDFA1"))->unref(); | 189 outputIntent->insertName("S", "GTS_PDFA1"); |
193 outputIntent->insert("OutputConditionIdentifier", | 190 outputIntent->insertString("OutputConditionIdentifier", "sRGB"); |
194 new SkPDFString("sRGB"))->unref(); | 191 SkAutoTUnref<SkPDFArray> intentArray(new SkPDFArray); |
195 SkAutoTUnref<SkPDFArray> intentArray = new SkPDFArray; | 192 intentArray->appendObject(SkRef(outputIntent.get())); |
196 intentArray->append(outputIntent.get()); | 193 docCatalog->insertObject("OutputIntent", intentArray.detach()); |
197 docCatalog->insert("OutputIntent", intentArray.get()); | |
198 */ | 194 */ |
199 | 195 |
200 if (dests->size() > 0) { | |
201 docCatalog->insert("Dests", SkNEW_ARGS(SkPDFObjRef, (dests.get()))) | |
202 ->unref(); | |
203 } | |
204 | |
205 // Build font subsetting info before proceeding. | 196 // Build font subsetting info before proceeding. |
206 SkPDFSubstituteMap substitutes; | 197 SkPDFSubstituteMap substitutes; |
207 perform_font_subsetting(pageDevices, &substitutes); | 198 perform_font_subsetting(pageDevices, &substitutes); |
208 | 199 |
209 SkPDFObjNumMap objNumMap; | 200 SkPDFObjNumMap objNumMap; |
210 if (objNumMap.addObject(docCatalog.get())) { | 201 if (objNumMap.addObject(docCatalog.get())) { |
211 docCatalog->addResources(&objNumMap, substitutes); | 202 docCatalog->addResources(&objNumMap, substitutes); |
212 } | 203 } |
213 size_t baseOffset = SkToOffT(stream->bytesWritten()); | 204 size_t baseOffset = SkToOffT(stream->bytesWritten()); |
214 emit_pdf_header(stream); | 205 emit_pdf_header(stream); |
(...skipping 140 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
355 | 346 |
356 SkDocument* SkDocument::CreatePDF(const char path[], SkScalar dpi) { | 347 SkDocument* SkDocument::CreatePDF(const char path[], SkScalar dpi) { |
357 SkFILEWStream* stream = SkNEW_ARGS(SkFILEWStream, (path)); | 348 SkFILEWStream* stream = SkNEW_ARGS(SkFILEWStream, (path)); |
358 if (!stream->isValid()) { | 349 if (!stream->isValid()) { |
359 SkDELETE(stream); | 350 SkDELETE(stream); |
360 return NULL; | 351 return NULL; |
361 } | 352 } |
362 auto delete_wstream = [](SkWStream* stream, bool) { SkDELETE(stream); }; | 353 auto delete_wstream = [](SkWStream* stream, bool) { SkDELETE(stream); }; |
363 return SkNEW_ARGS(SkDocument_PDF, (stream, delete_wstream, dpi)); | 354 return SkNEW_ARGS(SkDocument_PDF, (stream, delete_wstream, dpi)); |
364 } | 355 } |
OLD | NEW |