入口
//gen patch
ApkDecoder decoder = new ApkDecoder(mConfig);
decoder.onAllPatchesStart();
decoder.patch(mConfig.mOldApkFile, mConfig.mNewApkFile);
decoder.onAllPatchesEnd();
//gen meta file and version file
PatchInfo info = new PatchInfo(mConfig);
info.gen();
//build patch
PatchBuilder builder = new PatchBuilder(mConfig);
builder.buildPatch();
ApkDecoder构造函数
public ApkDecoder(Configuration config) throws IOException {
super(config);
this.mNewApkDir = config.mTempUnzipNewDir;
this.mOldApkDir = config.mTempUnzipOldDir;
this.manifestDecoder = new ManifestDecoder(config);//Manifest解码器
//put meta files in assets
String prePath = TypedValue.FILE_ASSETS + File.separator;
dexPatchDecoder = new UniqueDexDiffDecoder(config, prePath + TypedValue.DEX_META_FILE, TypedValue.DEX_LOG_FILE);
soPatchDecoder = new BsDiffDecoder(config, prePath + TypedValue.SO_META_FILE, TypedValue.SO_LOG_FILE);
resPatchDecoder = new ResDiffDecoder(config, prePath + TypedValue.RES_META_TXT, TypedValue.RES_LOG_FILE);
arkHotDecoder = new ArkHotDecoder(config, prePath + TypedValue.ARKHOT_META_TXT);
Logger.d("config: " + config.mArkHotPatchPath + " " + config.mArkHotPatchName + prePath + TypedValue.ARKHOT_META_TXT);
resDuplicateFiles = new ArrayList<>();
}
BaseDecoder
ApkDecoder#patch()
public boolean patch(File oldFile, File newFile) throws Exception {
writeToLogFile(oldFile, newFile);
//check manifest change first
manifestDecoder.patch(oldFile, newFile);
unzipApkFiles(oldFile, newFile); //解压新的和老的apk
Files.walkFileTree(mNewApkDir.toPath(), new ApkFilesVisitor(config, mNewApkDir.toPath(), mOldApkDir.toPath(), dexPatchDecoder, soPatchDecoder, resPatchDecoder));
// get all duplicate resource file
for (File duplicateRes : resDuplicateFiles) {
// resPatchDecoder.patch(duplicateRes, null);
Logger.e("Warning: res file %s is also match at dex or library pattern, "
+ "we treat it as unchanged in the new resource_out.zip", getRelativePathStringToOldFile(duplicateRes));
}
soPatchDecoder.onAllPatchesEnd();
dexPatchDecoder.onAllPatchesEnd();
manifestDecoder.onAllPatchesEnd();
resPatchDecoder.onAllPatchesEnd();
arkHotDecoder.onAllPatchesEnd();
//clean resources
dexPatchDecoder.clean();
soPatchDecoder.clean();
resPatchDecoder.clean();
arkHotDecoder.clean();
return true;
}
DexDiffDecoder#onAllPatchesEnd()
@Override
public void onAllPatchesEnd() throws Exception {
if (!hasDexChanged) {
Logger.d("No dexes were changed, nothing needs to be done next.");
return;
}
checkIfLoaderClassesReferToNonLoaderClasses();
if (config.mIsProtectedApp) {
generateChangedClassesDexFile();
} else {
//生成补丁文件
generatePatchInfoFile();
}
addTestDex();
}
DexDiffDecoder#generatePatchInfoFile()
private void generatePatchInfoFile() throws IOException {
generatePatchedDexInfoFile();
// generateSmallPatchedDexInfoFile is blocked by issue we found in ART environment
// which indicates that if inline optimization is done on patched class, some error
// such as crash, ClassCastException, mistaken string fetching, etc. would happen.
//
// Instead, we will log all classN dexes as 'copy directly' in dex-meta, so that
// tinker patch applying procedure will copy them out and load them in ART environment.
//generateSmallPatchedDexInfoFile();
logDexesToDexMeta();
checkCrossDexMovingClasses();
}
DexDiffDecoder#generatePatchedDexInfoFile()
private void generatePatchedDexInfoFile() throws IOException {
// Generate dex diff out and full patched dex if a pair of dex is different.
for (AbstractMap.SimpleEntry<File, File> oldAndNewDexFilePair : oldAndNewDexFilePairList) {
File oldFile = oldAndNewDexFilePair.getKey();
File newFile = oldAndNewDexFilePair.getValue();
final String dexName = getRelativeDexName(oldFile, newFile);
RelatedInfo relatedInfo = dexNameToRelatedInfoMap.get(dexName);
if (!relatedInfo.oldMd5.equals(relatedInfo.newMd5)) {
diffDexPairAndFillRelatedInfo(oldFile, newFile, relatedInfo);
} else {
// In this case newDexFile is the same as oldDexFile, but we still
// need to treat it as patched dex file so that the SmallPatchGenerator
// can analyze which class of this dex should be kept in small patch.
relatedInfo.newOrFullPatchedFile = newFile;
relatedInfo.newOrFullPatchedMd5 = relatedInfo.newMd5;
relatedInfo.newOrFullPatchedCRC = FileOperation.getFileCrc32(newFile);
}
}
}
DexDiffDecoder#diffDexPairAndFillRelatedInfo()
private void diffDexPairAndFillRelatedInfo(File oldDexFile, File newDexFile, RelatedInfo relatedInfo) {
File tempFullPatchDexPath = new File(config.mOutFolder + File.separator + TypedValue.DEX_TEMP_PATCH_DIR);
final String dexName = getRelativeDexName(oldDexFile, newDexFile);
File dexDiffOut = getOutputPath(newDexFile).toFile();
ensureDirectoryExist(dexDiffOut.getParentFile());
try {
//创建 dex补丁生成器
DexPatchGenerator dexPatchGen = new DexPatchGenerator(oldDexFile, newDexFile);
dexPatchGen.setAdditionalRemovingClassPatterns(config.mDexLoaderPattern);
logWriter.writeLineToInfoFile(
String.format(
"Start diff between [%s] as old and [%s] as new:",
getRelativeStringBy(oldDexFile, config.mTempUnzipOldDir),
getRelativeStringBy(newDexFile, config.mTempUnzipNewDir)
)
);
//
dexPatchGen.executeAndSaveTo(dexDiffOut);
} catch (Exception e) {
throw new TinkerPatchException(e);
}
if (!dexDiffOut.exists()) {
throw new TinkerPatchException("can not find the diff file:" + dexDiffOut.getAbsolutePath());
}
relatedInfo.dexDiffFile = dexDiffOut;
relatedInfo.dexDiffMd5 = MD5.getMD5(dexDiffOut);
Logger.d("\nGen %s patch file:%s, size:%d, md5:%s", dexName, relatedInfo.dexDiffFile.getAbsolutePath(), relatedInfo.dexDiffFile.length(), relatedInfo.dexDiffMd5);
File tempFullPatchedDexFile = new File(tempFullPatchDexPath, dexName);
if (!tempFullPatchedDexFile.exists()) {
ensureDirectoryExist(tempFullPatchedDexFile.getParentFile());
}
try {
new DexPatchApplier(oldDexFile, dexDiffOut).executeAndSaveTo(tempFullPatchedDexFile);
Logger.d(
String.format("Verifying if patched new dex is logically the same as original new dex: %s ...", getRelativeStringBy(newDexFile, config.mTempUnzipNewDir))
);
Dex origNewDex = new Dex(newDexFile);
Dex patchedNewDex = new Dex(tempFullPatchedDexFile);
checkDexChange(origNewDex, patchedNewDex);
relatedInfo.newOrFullPatchedFile = tempFullPatchedDexFile;
relatedInfo.newOrFullPatchedMd5 = MD5.getMD5(tempFullPatchedDexFile);
relatedInfo.newOrFullPatchedCRC = FileOperation.getFileCrc32(tempFullPatchedDexFile);
} catch (Exception e) {
e.printStackTrace();
throw new TinkerPatchException(
"Failed to generate temporary patched dex, which makes MD5 generating procedure of new dex failed, either.", e
);
}
if (!tempFullPatchedDexFile.exists()) {
throw new TinkerPatchException("can not find the temporary full patched dex file:" + tempFullPatchedDexFile.getAbsolutePath());
}
Logger.d("\nGen %s for dalvik full dex file:%s, size:%d, md5:%s", dexName, tempFullPatchedDexFile.getAbsolutePath(), tempFullPatchedDexFile.length(), relatedInfo.newOrFullPatchedMd5);
}
DexPatchGenerator构造函数
public DexPatchGenerator(File oldDexFile, File newDexFile) throws IOException {
this(new Dex(oldDexFile), new Dex(newDexFile));
}
public DexPatchGenerator(Dex oldDex, Dex newDex) {
this.oldDex = oldDex;
this.newDex = newDex;
SparseIndexMap oldToNewIndexMap = new SparseIndexMap();
SparseIndexMap oldToPatchedIndexMap = new SparseIndexMap();
SparseIndexMap newToPatchedIndexMap = new SparseIndexMap();
SparseIndexMap selfIndexMapForSkip = new SparseIndexMap();
additionalRemovingClassPatternSet = new HashSet<>();
//创建各种比较算法
this.stringDataSectionDiffAlg = new StringDataSectionDiffAlgorithm(
oldDex, newDex,
oldToNewIndexMap,
oldToPatchedIndexMap,
newToPatchedIndexMap,
selfIndexMapForSkip
);
//省略其他
//...
}
Dex构造函数
public Dex(File file) throws IOException {
if (file == null) {
throw new IllegalArgumentException("file is null.");
}
if (FileUtils.hasArchiveSuffix(file.getName())) {
ZipFile zipFile = null;
try {
zipFile = new ZipFile(file);
ZipEntry entry = zipFile.getEntry(DexFormat.DEX_IN_JAR_NAME);
if (entry != null) {
InputStream inputStream = null;
try { //获取流
inputStream = zipFile.getInputStream(entry);
loadFrom(inputStream, (int) entry.getSize());
} finally {
if (inputStream != null) {
inputStream.close();
}
}
} else {
throw new DexException("Expected " + DexFormat.DEX_IN_JAR_NAME + " in " + file);
}
} finally {
if (zipFile != null) {
try {
zipFile.close();
} catch (Exception e) {
// ignored.
}
}
}
} else if (file.getName().endsWith(".dex")) {
InputStream in = null;
try {
in = new BufferedInputStream(new FileInputStream(file));
loadFrom(in, (int) file.length());
} catch (Exception e) {
throw new DexException(e);
} finally {
if (in != null) {
try {
in.close();
} catch (Exception e) {
// ignored.
}
}
}
} else {
throw new DexException("unknown output extension: " + file);
}
}
Dex#loadFrom()
private void loadFrom(InputStream in, int initSize) throws IOException {
byte[] rawData = FileUtils.readStream(in, initSize);
this.data = ByteBuffer.wrap(rawData);
this.data.order(ByteOrder.LITTLE_ENDIAN);
this.tableOfContents.readFrom(this);
}
TableOfContents
TableOfContents存储header相关数据
TableOfContents#readFrom()
public void readFrom(Dex dex) throws IOException {
readHeader(dex.openSection(header)); //读取头信息
// special case, since mapList.byteCount is available only after
// computeSizesFromOffsets() was invoked, so here we can't use
// dex.openSection(mapList) to get dex section. Or
// an {@code java.nio.BufferUnderflowException} will be thrown.
readMap(dex.openSection(mapList.off));
computeSizesFromOffsets();
}
TableOfContents.Section
通过解析map_list,获取每个部分的信息并封装到Section中。
public static class Section implements Comparable<Section>{
public static final int UNDEF_INDEX = -1;
public static final int UNDEF_OFFSET = -1;
public final short type; //类型
public boolean isElementFourByteAligned;
public int size = 0; //size
public int off = UNDEF_OFFSET; //偏移量
public int byteCount = 0;
}
TableOfContents#openSection()
/**
* <b>IMPORTANT</b> To open a dex section by {@code TableOfContents.Section},
* please use {@code openSection(TableOfContents.Section tocSec)} instead of
* passing tocSec.off to this method.
*
* <b>Because dex section returned by this method never checks
* tocSec's bound when reading or writing data.</b>
*/
//通过TableOfContents.Section打开Dex的Section
public Section openSection(TableOfContents.Section tocSec) {
int position = tocSec.off; //偏移量
if (position < 0 || position >= data.capacity()) {
throw new IllegalArgumentException(
"position=" + position + " length=" + data.capacity()
);
}
ByteBuffer sectionData = data.duplicate();
sectionData.order(ByteOrder.LITTLE_ENDIAN); // necessary?
sectionData.position(position);
sectionData.limit(position + tocSec.byteCount);
return new Section("section", sectionData);
}
TableOfContents#readHeader()
private void readHeader(Dex.Section headerIn) throws UnsupportedEncodingException {
byte[] magic = headerIn.readByteArray(8); //读取魔数
int apiTarget = DexFormat.magicToApi(magic);
if (apiTarget != DexFormat.API_NO_EXTENDED_OPCODES) {
throw new DexException("Unexpected magic: " + Arrays.toString(magic));
}
checksum = headerIn.readInt();
signature = headerIn.readByteArray(20);
fileSize = headerIn.readInt();
int headerSize = headerIn.readInt();
if (headerSize != SizeOf.HEADER_ITEM) {
throw new DexException("Unexpected header: 0x" + Integer.toHexString(headerSize));
}
int endianTag = headerIn.readInt();
if (endianTag != DexFormat.ENDIAN_TAG) {
throw new DexException("Unexpected endian tag: 0x" + Integer.toHexString(endianTag));
}
linkSize = headerIn.readInt();
linkOff = headerIn.readInt();
mapList.off = headerIn.readInt();
if (mapList.off == 0) {
throw new DexException("Cannot merge dex files that do not contain a map");
}
stringIds.size = headerIn.readInt();
stringIds.off = headerIn.readInt();
typeIds.size = headerIn.readInt();
typeIds.off = headerIn.readInt();
protoIds.size = headerIn.readInt();
protoIds.off = headerIn.readInt();
fieldIds.size = headerIn.readInt();
fieldIds.off = headerIn.readInt();
methodIds.size = headerIn.readInt();
methodIds.off = headerIn.readInt();
classDefs.size = headerIn.readInt();
classDefs.off = headerIn.readInt();
dataSize = headerIn.readInt();
dataOff = headerIn.readInt();
}
TableOfContents#readMap()
private void readMap(Dex.Section in) throws IOException {
int mapSize = in.readInt(); //获取Map大小
Section previous = null;
for (int i = 0; i < mapSize; i++) {
short type = in.readShort(); //获取类型
in.readShort(); // unused
Section section = getSection(type);
int size = in.readInt(); //获取大小
int offset = in.readInt(); //获取偏移量
if ((section.size != 0 && section.size != size)
|| (section.off != Section.UNDEF_OFFSET && secti
throw new DexException("Unexpected map value for 0x"
}
section.size = size; //获取大小
section.off = offset; //获取偏移量
if (previous != null && previous.off > section.off) {
throw new DexException("Map is unsorted at " + previ
}
previous = section;
}
header.off = 0;
Arrays.sort(sections);
// Skip header section, since its offset must be zero.
for (int i = 1; i < sections.length; ++i) {
if (sections[i].off == Section.UNDEF_OFFSET) {
sections[i].off = sections[i - 1].off;
}
}
}
DexPatchGenerator#executeAndSaveTo()
public void executeAndSaveTo(File file) throws IOException {
OutputStream os = null;
try {
os = new BufferedOutputStream(new FileOutputStream(file));
executeAndSaveTo(os);
} finally {
IOHelper.closeQuietly(os);
}
}
public void executeAndSaveTo(OutputStream out) throws IOException {
// Firstly, collect information of items we want to remove additionally
// in new dex and set them to corresponding diff algorithm implementations.
Pattern[] classNamePatterns = new Pattern[this.additionalRemovingClassPatternSet.size()];
int classNamePatternCount = 0;
for (String regExStr : this.additionalRemovingClassPatternSet) {
classNamePatterns[classNamePatternCount++] = Pattern.compile(regExStr);
}
List<Integer> typeIdOfClassDefsToRemove = new ArrayList<>(classNamePatternCount);
List<Integer> offsetOfClassDatasToRemove = new ArrayList<>(classNamePatternCount);
for (ClassDef classDef : this.newDex.classDefs()) {
String typeName = this.newDex.typeNames().get(classDef.typeIndex);
for (Pattern pattern : classNamePatterns) {
if (pattern.matcher(typeName).matches()) {
typeIdOfClassDefsToRemove.add(classDef.typeIndex);
offsetOfClassDatasToRemove.add(classDef.classDataOffset);
break;
}
}
}
((ClassDefSectionDiffAlgorithm) this.classDefSectionDiffAlg)
.setTypeIdOfClassDefsToRemove(typeIdOfClassDefsToRemove);
((ClassDataSectionDiffAlgorithm) this.classDataSectionDiffAlg)
.setOffsetOfClassDatasToRemove(offsetOfClassDatasToRemove);
// Then, run diff algorithms according to sections' dependencies.
// Use size calculated by algorithms above or from dex file definition to
// calculate sections' offset and patched dex size.
// Calculate header and id sections size, so that we can work out
// the base offset of typeLists Section.
int patchedheaderSize = SizeOf.HEADER_ITEM;
int patchedStringIdsSize = newDex.getTableOfContents().stringIds.size * SizeOf.STRING_ID_ITEM;
int patchedTypeIdsSize = newDex.getTableOfContents().typeIds.size * SizeOf.TYPE_ID_ITEM;
// Although simulatePatchOperation can calculate this value, since protoIds section
// depends on typeLists section, we can't run protoIds Section's simulatePatchOperation
// method so far. Instead we calculate protoIds section's size using information in newDex
// directly.
int patchedProtoIdsSize = newDex.getTableOfContents().protoIds.size * SizeOf.PROTO_ID_ITEM;
int patchedFieldIdsSize = newDex.getTableOfContents().fieldIds.size * SizeOf.MEMBER_ID_ITEM;
int patchedMethodIdsSize = newDex.getTableOfContents().methodIds.size * SizeOf.MEMBER_ID_ITEM;
int patchedClassDefsSize = newDex.getTableOfContents().classDefs.size * SizeOf.CLASS_DEF_ITEM;
int patchedIdSectionSize =
patchedStringIdsSize
+ patchedTypeIdsSize
+ patchedProtoIdsSize
+ patchedFieldIdsSize
+ patchedMethodIdsSize
+ patchedClassDefsSize;
this.patchedHeaderOffset = 0;
// The diff works on each sections obey such procedure:
// 1. Execute diff algorithms to calculate indices of items we need to add, del and replace.
// 2. Execute patch algorithm simulation to calculate indices and offsets mappings that is
// necessary to next section's diff works.
// Immediately do the patch simulation so that we can know:
// 1. Indices and offsets mapping between old dex and patched dex.
// 2. Indices and offsets mapping between new dex and patched dex.
// These information will be used to do next diff works.
this.patchedStringIdsOffset = patchedHeaderOffset + patchedheaderSize;
if (this.oldDex.getTableOfContents().stringIds.isElementFourByteAligned) {
this.patchedStringIdsOffset
= SizeOf.roundToTimesOfFour(this.patchedStringIdsOffset);
}
//执行算法 execute方法
this.stringDataSectionDiffAlg.execute();
this.patchedStringDataItemsOffset = patchedheaderSize + patchedIdSectionSize;
if (this.oldDex.getTableOfContents().stringDatas.isElementFourByteAligned) {
this.patchedStringDataItemsOffset
= SizeOf.roundToTimesOfFour(this.patchedStringDataItemsOffset);
}
this.stringDataSectionDiffAlg.simulatePatchOperation(this.patchedStringDataItemsOffset);
this.typeIdSectionDiffAlg.execute();
this.patchedTypeIdsOffset = this.patchedStringIdsOffset + patchedStringIdsSize;
if (this.oldDex.getTableOfContents().typeIds.isElementFourByteAligned) {
this.patchedTypeIdsOffset
= SizeOf.roundToTimesOfFour(this.patchedTypeIdsOffset);
}
this.typeIdSectionDiffAlg.simulatePatchOperation(this.patchedTypeIdsOffset);
this.typeListSectionDiffAlg.execute();
this.patchedTypeListsOffset
= patchedheaderSize
+ patchedIdSectionSize
+ this.stringDataSectionDiffAlg.getPatchedSectionSize();
//省略其他算法
// Finally, write results to patch file.
writeResultToStream(out);
}
DexSectionDiffAlgorithm#execute()
public void execute() {
//清除操作数组
this.patchOperationList.clear();
//获取oldDex的Items
this.adjustedOldIndexedItemsWithOrigOrder = collectSectionItems(this.oldDex, true);
this.oldItemCount = this.adjustedOldIndexedItemsWithOrigOrder.length;
AbstractMap.SimpleEntry<Integer, T>[] adjustedOldIndexedItems = new AbstractMap.SimpleEntry[this.oldItemCount];
//复制
System.arraycopy(this.adjustedOldIndexedItemsWithOrigOrder, 0, adjustedOldIndexedItems, 0, this.oldItemCount);
//排序
Arrays.sort(adjustedOldIndexedItems, this.comparatorForItemDiff);
//获取newDex的Items
AbstractMap.SimpleEntry<Integer, T>[] adjustedNewIndexedItems = collectSectionItems(this.newDex, false);
this.newItemCount = adjustedNewIndexedItems.length;
Arrays.sort(adjustedNewIndexedItems, this.comparatorForItemDiff);
int oldCursor = 0;
int newCursor = 0;
while (oldCursor < this.oldItemCount || newCursor < this.newItemCount) {
if (oldCursor >= this.oldItemCount) {
// rest item are all newItem.
while (newCursor < this.newItemCount) {
AbstractMap.SimpleEntry<Integer, T> newIndexedItem = adjustedNewIndexedItems[newCursor++];
this.patchOperationList.add(new PatchOperation<>(PatchOperation.OP_ADD, newIndexedItem.getKey(), newIndexedItem.getValue()));
}
} else if (newCursor >= newItemCount) {
// rest item are all oldItem.
while (oldCursor < oldItemCount) {
AbstractMap.SimpleEntry<Integer, T> oldIndexedItem = adjustedOldIndexedItems[oldCursor++];
int deletedIndex = oldIndexedItem.getKey();
int deletedOffset = getItemOffsetOrIndex(deletedIndex, oldIndexedItem.getValue());
this.patchOperationList.add(new PatchOperation<T>(PatchOperation.OP_DEL, deletedIndex));
markDeletedIndexOrOffset(this.oldToPatchedIndexMap, deletedIndex, deletedOffset);
}
} else {
AbstractMap.SimpleEntry<Integer, T> oldIndexedItem = adjustedOldIndexedItems[oldCursor];
AbstractMap.SimpleEntry<Integer, T> newIndexedItem = adjustedNewIndexedItems[newCursor];
int cmpRes = oldIndexedItem.getValue().compareTo(newIndexedItem.getValue());
//二路归并算法
if (cmpRes < 0) {
//如果<0 ,则认为该old Item被删除了,记录为PatchOperation.OP_DEL,
//并记录该oldItem index到PatchOperation对象,加入到patchOperationList中。
int deletedIndex = oldIndexedItem.getKey();
int deletedOffset = getItemOffsetOrIndex(deletedIndex, oldIndexedItem.getValue());
this.patchOperationList.add(new PatchOperation<T>(PatchOperation.OP_DEL, deletedIndex));
markDeletedIndexOrOffset(this.oldToPatchedIndexMap, deletedIndex, deletedOffset);
++oldCursor;
} else if (cmpRes > 0) {
//如果>0,则认为该newItem是新增的,记录为PatchOperation.OP_ADD,
//并记录该newItem index和value到PatchOperation对象,加入到patchOperationList中。
this.patchOperationList.add(new PatchOperation<>(PatchOperation.OP_ADD, newIndexedItem.getKey(), newIndexedItem.getValue()));
++newCursor;
} else {
int oldIndex = oldIndexedItem.getKey();
int newIndex = newIndexedItem.getKey();
int oldOffset = getItemOffsetOrIndex(oldIndexedItem.getKey(), oldIndexedItem.getValue());
int newOffset = getItemOffsetOrIndex(newIndexedItem.getKey(), newIndexedItem.getValue());
if (oldIndex != newIndex) {
this.oldIndexToNewIndexMap.put(oldIndex, newIndex);
}
if (oldOffset != newOffset) {
this.oldOffsetToNewOffsetMap.put(oldOffset, newOffset);
}
++oldCursor;
++newCursor;
}
}
}
// So far all diff works are done. Then we perform some optimize works.
// detail: {OP_DEL idx} followed by {OP_ADD the_same_idx newItem}
// will be replaced by {OP_REPLACE idx newItem}
Collections.sort(this.patchOperationList, comparatorForPatchOperationOpt);
Iterator<PatchOperation<T>> patchOperationIt = this.patchOperationList.iterator();
PatchOperation<T> prevPatchOperation = null;
while (patchOperationIt.hasNext()) {
PatchOperation<T> patchOperation = patchOperationIt.next();
if (prevPatchOperation != null
&& prevPatchOperation.op == PatchOperation.OP_DEL
&& patchOperation.op == PatchOperation.OP_ADD
) {
if (prevPatchOperation.index == patchOperation.index) {
prevPatchOperation.op = PatchOperation.OP_REPLACE;
prevPatchOperation.newItem = patchOperation.newItem;
patchOperationIt.remove();
prevPatchOperation = null;
} else {
prevPatchOperation = patchOperation;
}
} else {
prevPatchOperation = patchOperation;
}
}
// Finally we record some information for the final calculations.
patchOperationIt = this.patchOperationList.iterator();
while (patchOperationIt.hasNext()) {
PatchOperation<T> patchOperation = patchOperationIt.next();
switch (patchOperation.op) {
case PatchOperation.OP_DEL: {
indexToDelOperationMap.put(patchOperation.index, patchOperation);
break;
}
case PatchOperation.OP_ADD: {
indexToAddOperationMap.put(patchOperation.index, patchOperation);
break;
}
case PatchOperation.OP_REPLACE: {
indexToReplaceOperationMap.put(patchOperation.index, patchOperation);
break;
}
default: {
break;
}
}
}
}