mirror of
https://github.com/mozilla/pdf.js.git
synced 2025-04-19 22:58:07 +02:00
Correctly update the xref table when an annotation is deleted
This commit is contained in:
parent
aaa55d2b38
commit
901d995a7e
4 changed files with 131 additions and 18 deletions
|
@ -46,7 +46,15 @@ import {
|
|||
XRefEntryException,
|
||||
XRefParseException,
|
||||
} from "./core_utils.js";
|
||||
import { Dict, isName, isRefsEqual, Name, Ref, RefSet } from "./primitives.js";
|
||||
import {
|
||||
Dict,
|
||||
isName,
|
||||
isRefsEqual,
|
||||
Name,
|
||||
Ref,
|
||||
RefSet,
|
||||
RefSetCache,
|
||||
} from "./primitives.js";
|
||||
import { getXfaFontDict, getXfaFontName } from "./xfa_fonts.js";
|
||||
import { BaseStream } from "./base_stream.js";
|
||||
import { calculateMD5 } from "./crypto.js";
|
||||
|
@ -272,7 +280,7 @@ class Page {
|
|||
continue;
|
||||
}
|
||||
if (annotation.deleted) {
|
||||
deletedAnnotations.put(ref);
|
||||
deletedAnnotations.put(ref, ref);
|
||||
continue;
|
||||
}
|
||||
existingAnnotations?.put(ref);
|
||||
|
@ -300,7 +308,7 @@ class Page {
|
|||
options: this.evaluatorOptions,
|
||||
});
|
||||
|
||||
const deletedAnnotations = new RefSet();
|
||||
const deletedAnnotations = new RefSetCache();
|
||||
const existingAnnotations = new RefSet();
|
||||
this.#replaceIdByRef(annotations, deletedAnnotations, existingAnnotations);
|
||||
|
||||
|
@ -335,6 +343,9 @@ class Page {
|
|||
{ ref: this.ref, data: buffer.join("") },
|
||||
...newData.annotations
|
||||
);
|
||||
for (const deletedRef of deletedAnnotations) {
|
||||
objects.push({ ref: deletedRef, data: null });
|
||||
}
|
||||
|
||||
return objects;
|
||||
}
|
||||
|
|
|
@ -293,10 +293,18 @@ async function getXRefTable(xrefInfo, baseOffset, newRefs, newXref, buffer) {
|
|||
}
|
||||
// The EOL is \r\n to make sure that every entry is exactly 20 bytes long.
|
||||
// (see 7.5.4 - Cross-Reference Table).
|
||||
buffer.push(
|
||||
`${baseOffset.toString().padStart(10, "0")} ${Math.min(ref.gen, 0xffff).toString().padStart(5, "0")} n\r\n`
|
||||
);
|
||||
baseOffset += data.length;
|
||||
if (data !== null) {
|
||||
buffer.push(
|
||||
`${baseOffset.toString().padStart(10, "0")} ${Math.min(ref.gen, 0xffff).toString().padStart(5, "0")} n\r\n`
|
||||
);
|
||||
baseOffset += data.length;
|
||||
} else {
|
||||
buffer.push(
|
||||
`0000000000 ${Math.min(ref.gen + 1, 0xffff)
|
||||
.toString()
|
||||
.padStart(5, "0")} f\r\n`
|
||||
);
|
||||
}
|
||||
}
|
||||
computeIDs(baseOffset, xrefInfo, newXref);
|
||||
buffer.push("trailer\n");
|
||||
|
@ -327,11 +335,17 @@ async function getXRefStreamTable(
|
|||
let maxOffset = 0;
|
||||
let maxGen = 0;
|
||||
for (const { ref, data } of newRefs) {
|
||||
let gen;
|
||||
maxOffset = Math.max(maxOffset, baseOffset);
|
||||
const gen = Math.min(ref.gen, 0xffff);
|
||||
if (data !== null) {
|
||||
gen = Math.min(ref.gen, 0xffff);
|
||||
xrefTableData.push([1, baseOffset, gen]);
|
||||
baseOffset += data.length;
|
||||
} else {
|
||||
gen = Math.min(ref.gen + 1, 0xffff);
|
||||
xrefTableData.push([0, 0, gen]);
|
||||
}
|
||||
maxGen = Math.max(maxGen, gen);
|
||||
xrefTableData.push([1, baseOffset, gen]);
|
||||
baseOffset += data.length;
|
||||
}
|
||||
newXref.set("Index", getIndexes(newRefs));
|
||||
const offsetSize = getSizeInBytes(maxOffset);
|
||||
|
@ -420,15 +434,13 @@ async function incrementalUpdate({
|
|||
});
|
||||
}
|
||||
|
||||
let buffer, baseOffset;
|
||||
const buffer = [];
|
||||
let baseOffset = originalData.length;
|
||||
const lastByte = originalData.at(-1);
|
||||
if (lastByte === /* \n */ 0x0a || lastByte === /* \r */ 0x0d) {
|
||||
buffer = [];
|
||||
baseOffset = originalData.length;
|
||||
} else {
|
||||
if (lastByte !== /* \n */ 0x0a && lastByte !== /* \r */ 0x0d) {
|
||||
// Avoid to concatenate %%EOF with an object definition
|
||||
buffer = ["\n"];
|
||||
baseOffset = originalData.length + 1;
|
||||
buffer.push("\n");
|
||||
baseOffset += 1;
|
||||
}
|
||||
|
||||
const newXref = getTrailerDict(xrefInfo, newRefs, useXrefStream);
|
||||
|
@ -436,7 +448,9 @@ async function incrementalUpdate({
|
|||
(a, b) => /* compare the refs */ a.ref.num - b.ref.num
|
||||
);
|
||||
for (const { data } of newRefs) {
|
||||
buffer.push(data);
|
||||
if (data !== null) {
|
||||
buffer.push(data);
|
||||
}
|
||||
}
|
||||
|
||||
await (useXrefStream
|
||||
|
|
|
@ -9919,5 +9919,26 @@
|
|||
"rounds": 1,
|
||||
"link": true,
|
||||
"type": "eq"
|
||||
},
|
||||
{
|
||||
"id": "delete_freetexts",
|
||||
"file": "pdfs/freetexts.pdf",
|
||||
"md5": "da1310a25ab796c1201810070d5032a3",
|
||||
"rounds": 1,
|
||||
"type": "eq",
|
||||
"save": true,
|
||||
"print": true,
|
||||
"annotationStorage": {
|
||||
"pdfjs_internal_editor_0": {
|
||||
"deleted": true,
|
||||
"pageIndex": 0,
|
||||
"id": "26R"
|
||||
},
|
||||
"pdfjs_internal_editor_3": {
|
||||
"deleted": true,
|
||||
"pageIndex": 0,
|
||||
"id": "51R"
|
||||
}
|
||||
}
|
||||
}
|
||||
]
|
||||
|
|
|
@ -247,4 +247,71 @@ describe("Writer", function () {
|
|||
expect(data).toEqual(expected);
|
||||
});
|
||||
});
|
||||
|
||||
it("should update a file with a deleted object", async function () {
|
||||
const originalData = new Uint8Array();
|
||||
const newRefs = [
|
||||
{ ref: Ref.get(123, 0x2d), data: null },
|
||||
{ ref: Ref.get(456, 0x4e), data: "abc\n" },
|
||||
];
|
||||
const xrefInfo = {
|
||||
newRef: Ref.get(789, 0),
|
||||
startXRef: 314,
|
||||
fileIds: ["id", ""],
|
||||
rootRef: null,
|
||||
infoRef: null,
|
||||
encryptRef: null,
|
||||
filename: "foo.pdf",
|
||||
info: {},
|
||||
};
|
||||
|
||||
let data = await incrementalUpdate({
|
||||
originalData,
|
||||
xrefInfo,
|
||||
newRefs,
|
||||
useXrefStream: true,
|
||||
});
|
||||
data = bytesToString(data);
|
||||
|
||||
let expected =
|
||||
"\nabc\n" +
|
||||
"789 0 obj\n" +
|
||||
"<< /Prev 314 /Size 790 /Type /XRef /Index [123 1 456 1 789 1] " +
|
||||
"/W [1 1 1] /ID [(id) (\x01#Eg\x89\xab\xcd\xef\xfe\xdc\xba\x98vT2\x10)] " +
|
||||
"/Length 9>> stream\n" +
|
||||
"\x00\x00\x2e" +
|
||||
"\x01\x01\x4e" +
|
||||
"\x01\x05\x00\n" +
|
||||
"endstream\n" +
|
||||
"endobj\n" +
|
||||
"startxref\n" +
|
||||
"5\n" +
|
||||
"%%EOF\n";
|
||||
expect(data).toEqual(expected);
|
||||
|
||||
data = await incrementalUpdate({
|
||||
originalData,
|
||||
xrefInfo,
|
||||
newRefs,
|
||||
useXrefStream: false,
|
||||
});
|
||||
data = bytesToString(data);
|
||||
|
||||
expected =
|
||||
"\nabc\n" +
|
||||
"xref\n" +
|
||||
"123 1\n" +
|
||||
"0000000000 00046 f\r\n" +
|
||||
"456 1\n" +
|
||||
"0000000001 00078 n\r\n" +
|
||||
"789 1\n" +
|
||||
"0000000005 00000 n\r\n" +
|
||||
"trailer\n" +
|
||||
"<< /Prev 314 /Size 789 " +
|
||||
"/ID [(id) (\x01#Eg\x89\xab\xcd\xef\xfe\xdc\xba\x98vT2\x10)]>>\n" +
|
||||
"startxref\n" +
|
||||
"5\n" +
|
||||
"%%EOF\n";
|
||||
expect(data).toEqual(expected);
|
||||
});
|
||||
});
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue