Files
libreoffice/sw/source/core/doc/doccomp.cxx

2675 lines
83 KiB
C++
Raw Normal View History

/* -*- Mode: C++; tab-width: 4; indent-tabs-mode: nil; c-basic-offset: 4 -*- */
re-base on ALv2 code. Includes: Patches contributed by Oliver-Rainer Wittmann sw34bf06: #i117783# - Writer's implementation of XPagePrintable - apply print settings to new printing routines http://svn.apache.org/viewvc?view=revision&revision=1172115 sw34bf06: #o12311627# use <rtl_random> methods to create unique ids for list styles and list ids http://svn.apache.org/viewvc?view=revision&revision=1172112 sw34bf06 #i114725#,#i115828# - method <SwDoc::ClearDoc()> - clear list structures completely http://svn.apache.org/viewvc?view=revision&revision=1172122 i#118572 - remove ui string and help content regarding usage of Java Mail in Writer's Mail Merge as Java Mail is not used. http://svn.apache.org/viewvc?view=revision&revision=1197035 Patches contributed by Mathias Bauer cws mba34issues01: #i117718#: provide filter name in case storage of medium does not allow to detect one http://svn.apache.org/viewvc?view=revision&revision=1172350 cws mba34issues01: #i117721#: directly provide parameters retrieved from SfxMedium http://svn.apache.org/viewvc?view=revision&revision=1172353 gnumake4 work variously http://svn.apache.org/viewvc?view=revision&revision=1394707 http://svn.apache.org/viewvc?view=revision&revision=1394326 http://svn.apache.org/viewvc?view=revision&revision=1396797 http://svn.apache.org/viewvc?view=revision&revision=1397315 cws mba34issues01: #i117723#: convert assertion into trace http://svn.apache.org/viewvc?view=revision&revision=1172355 cws mba34issues01: #i117699#: keep layout alive until swdoc dies http://svn.apache.org/viewvc?view=revision&revision=1172362 cws mba34issues01: #i117943#: missing color attributes in RTF clipboard http://svn.apache.org/viewvc?view=revision&revision=1172363 Patch contributed by Henning Brinkmann imported patch i#103878 http://svn.apache.org/viewvc?view=revision&revision=1172109 Patches contributed by Michael Stahl sw34bf06: #i117955#: WW8 export: disable storing of section breaks in endnotes http://svn.apache.org/viewvc?view=revision&revision=1172119 Patch contributed by imacat Fixed the Asian language work count. http://svn.apache.org/viewvc?view=revision&revision=1241345 Patch contributed by Pedro Giffuni i#20878 - Add comment with BZ issue for reference. http://svn.apache.org/viewvc?view=revision&revision=1244517 Patch contributed by Andre Fischer Do not add targets for junit tests when junit is disabled. http://svn.apache.org/viewvc?view=revision&revision=1241508 add writerperfect dependency.
2011-03-31 10:05:04 +02:00
/*
* This file is part of the LibreOffice project.
*
* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/.
*
* This file incorporates work covered by the following license notice:
*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed
* with this work for additional information regarding copyright
* ownership. The ASF licenses this file to you under the Apache
* License, Version 2.0 (the "License"); you may not use this file
* except in compliance with the License. You may obtain a copy of
* the License at http://www.apache.org/licenses/LICENSE-2.0 .
*/
2000-09-18 23:08:29 +00:00
#include <hintids.hxx>
#include <vcl/vclenum.hxx>
#include <editeng/crossedoutitem.hxx>
#include <editeng/colritem.hxx>
#include <editeng/boxitem.hxx>
#include <editeng/svxenum.hxx>
#include <editeng/udlnitem.hxx>
#include <swmodule.hxx>
2000-09-18 23:08:29 +00:00
#include <doc.hxx>
#include <IDocumentUndoRedo.hxx>
#include <DocumentContentOperationsManager.hxx>
#include <IDocumentRedlineAccess.hxx>
#include <IDocumentState.hxx>
2000-09-18 23:08:29 +00:00
#include <docary.hxx>
#include <pam.hxx>
#include <ndtxt.hxx>
#include <redline.hxx>
#include <UndoRedline.hxx>
2001-09-27 12:42:58 +00:00
#include <section.hxx>
#include <tox.hxx>
#include <docsh.hxx>
#include <fmtcntnt.hxx>
#include <modcfg.hxx>
#include <com/sun/star/document/XDocumentPropertiesSupplier.hpp>
#include <com/sun/star/document/XDocumentProperties.hpp>
2000-09-18 23:08:29 +00:00
#include <cstddef>
#include <list>
#include <memory>
#include <vector>
using namespace ::com::sun::star;
using std::vector;
class SwCompareLine
2000-09-18 23:08:29 +00:00
{
const SwNode& rNode;
2000-09-18 23:08:29 +00:00
public:
explicit SwCompareLine( const SwNode& rNd ) : rNode( rNd ) {}
sal_uLong GetHashValue() const;
bool Compare( const SwCompareLine& rLine ) const;
static sal_uLong GetTextNodeHashValue( const SwTextNode& rNd, sal_uLong nVal );
static bool CompareNode( const SwNode& rDstNd, const SwNode& rSrcNd );
static bool CompareTextNd( const SwTextNode& rDstNd,
const SwTextNode& rSrcNd );
bool ChangesInLine( const SwCompareLine& rLine,
SwPaM *& rpInsRing, SwPaM*& rpDelRing ) const;
const SwNode& GetNode() const { return rNode; }
2000-09-18 23:08:29 +00:00
const SwNode& GetEndNode() const;
// for debugging
OUString GetText() const;
2000-09-18 23:08:29 +00:00
};
2000-09-18 23:08:29 +00:00
class CompareData
{
protected:
SwDoc& rDoc;
private:
std::unique_ptr<size_t[]> pIndex;
std::unique_ptr<bool[]> pChangedFlag;
2000-09-18 23:08:29 +00:00
SwPaM *pInsRing, *pDelRing;
static sal_uLong PrevIdx( const SwNode* pNd );
static sal_uLong NextIdx( const SwNode* pNd );
vector< SwCompareLine* > aLines;
bool m_bRecordDiff;
2000-09-18 23:08:29 +00:00
2011-11-04 14:46:42 +01:00
// Truncate beginning and end and add all others to the LinesArray
void CheckRanges( CompareData& );
2000-09-18 23:08:29 +00:00
virtual const SwNode& GetEndOfContent() = 0;
2000-09-18 23:08:29 +00:00
public:
CompareData(SwDoc& rD, bool bRecordDiff)
: rDoc( rD ), pIndex( nullptr ), pChangedFlag( nullptr ), pInsRing(nullptr), pDelRing(nullptr)
, m_bRecordDiff(bRecordDiff)
{
}
virtual ~CompareData();
2000-09-18 23:08:29 +00:00
2011-11-04 14:46:42 +01:00
// Are there differences?
bool HasDiffs( const CompareData& rData ) const;
2000-09-18 23:08:29 +00:00
2011-11-04 14:46:42 +01:00
// Triggers the comparison and creation of two documents
2000-09-18 23:08:29 +00:00
void CompareLines( CompareData& rData );
2011-11-04 14:46:42 +01:00
// Display the differences - calls the methods ShowInsert and ShowDelete.
// These are passed the start and end line number.
// Displaying the actually content is to be handled by the subclass!
sal_uLong ShowDiffs( const CompareData& rData );
2000-09-18 23:08:29 +00:00
void ShowInsert( sal_uLong nStt, sal_uLong nEnd );
void ShowDelete( const CompareData& rData, sal_uLong nStt,
sal_uLong nEnd, sal_uLong nInsPos );
void CheckForChangesInLine( const CompareData& rData,
sal_uLong& nStt, sal_uLong& nEnd,
sal_uLong& nThisStt, sal_uLong& nThisEnd );
2000-09-18 23:08:29 +00:00
2011-11-04 14:46:42 +01:00
// Set non-ambiguous index for a line. Same lines have the same index, even in the other CompareData!
2010-12-13 05:50:06 -08:00
void SetIndex( size_t nLine, size_t nIndex );
size_t GetIndex( size_t nLine ) const
{ return nLine < aLines.size() ? pIndex[ nLine ] : 0; }
2000-09-18 23:08:29 +00:00
2011-11-04 14:46:42 +01:00
// Set/get of a line has changed
void SetChanged( size_t nLine, bool bFlag = true );
bool GetChanged( size_t nLine ) const
2000-09-18 23:08:29 +00:00
{
return (pChangedFlag && nLine < aLines.size())
&& pChangedFlag[ nLine ];
2000-09-18 23:08:29 +00:00
}
size_t GetLineCount() const { return aLines.size(); }
const SwCompareLine* GetLine( size_t nLine ) const
{ return aLines[ nLine ]; }
void InsertLine( SwCompareLine* pLine )
{ aLines.push_back( pLine ); }
void SetRedlinesToDoc( bool bUseDocInfo );
2000-09-18 23:08:29 +00:00
};
class CompareMainText : public CompareData
{
public:
CompareMainText(SwDoc &rD, bool bRecordDiff=true)
: CompareData(rD, bRecordDiff)
{
}
virtual const SwNode& GetEndOfContent() override
{
return rDoc.GetNodes().GetEndOfContent();
}
};
class CompareFrameFormatText : public CompareData
{
const SwNodeIndex &m_rIndex;
public:
CompareFrameFormatText(SwDoc &rD, const SwNodeIndex &rIndex)
: CompareData(rD, true/*bRecordDiff*/)
, m_rIndex(rIndex)
{
}
virtual const SwNode& GetEndOfContent() override
{
return *m_rIndex.GetNode().EndOfSectionNode();
}
};
2000-09-18 23:08:29 +00:00
class Hash
{
struct HashData
2000-09-18 23:08:29 +00:00
{
sal_uLong nNext, nHash;
const SwCompareLine* pLine;
2000-09-18 23:08:29 +00:00
HashData()
: nNext( 0 ), nHash( 0 ), pLine(nullptr) {}
2000-09-18 23:08:29 +00:00
};
std::unique_ptr<sal_uLong[]> pHashArr;
std::unique_ptr<HashData[]> pDataArr;
sal_uLong nCount, nPrime;
2000-09-18 23:08:29 +00:00
public:
explicit Hash( sal_uLong nSize );
2000-09-18 23:08:29 +00:00
void CalcHashValue( CompareData& rData );
sal_uLong GetCount() const { return nCount; }
2000-09-18 23:08:29 +00:00
};
class Compare
{
public:
class MovedData
{
std::unique_ptr<sal_uLong[]> pIndex;
std::unique_ptr<sal_uLong[]> pLineNum;
sal_uLong nCount;
2000-09-18 23:08:29 +00:00
public:
MovedData( CompareData& rData, const sal_Char* pDiscard );
2000-09-18 23:08:29 +00:00
sal_uLong GetIndex( sal_uLong n ) const { return pIndex[ n ]; }
sal_uLong GetLineNum( sal_uLong n ) const { return pLineNum[ n ]; }
sal_uLong GetCount() const { return nCount; }
2000-09-18 23:08:29 +00:00
};
private:
/// Look for the moved lines
2000-09-18 23:08:29 +00:00
class CompareSequence
{
CompareData &rData1, &rData2;
const MovedData &rMoved1, &rMoved2;
std::unique_ptr<long[]> pMemory;
long *pFDiag, *pBDiag;
2000-09-18 23:08:29 +00:00
void Compare( sal_uLong nStt1, sal_uLong nEnd1, sal_uLong nStt2, sal_uLong nEnd2 );
sal_uLong CheckDiag( sal_uLong nStt1, sal_uLong nEnd1,
sal_uLong nStt2, sal_uLong nEnd2, sal_uLong* pCost );
2000-09-18 23:08:29 +00:00
public:
CompareSequence( CompareData& rD1, CompareData& rD2,
const MovedData& rMD1, const MovedData& rMD2 );
2000-09-18 23:08:29 +00:00
};
static void CountDifference( const CompareData& rData, sal_uLong* pCounts );
2000-09-18 23:08:29 +00:00
static void SetDiscard( const CompareData& rData,
sal_Char* pDiscard, const sal_uLong* pCounts );
static void CheckDiscard( sal_uLong nLen, sal_Char* pDiscard );
2000-09-18 23:08:29 +00:00
static void ShiftBoundaries( CompareData& rData1, CompareData& rData2 );
public:
Compare( sal_uLong nDiff, CompareData& rData1, CompareData& rData2 );
2000-09-18 23:08:29 +00:00
};
class ArrayComparator
{
public:
virtual bool Compare( int nIdx1, int nIdx2 ) const = 0;
virtual int GetLen1() const = 0;
virtual int GetLen2() const = 0;
2012-02-19 19:42:51 +00:00
virtual ~ArrayComparator() {}
};
/// Consider two lines equal if similar enough (e.g. look like different
/// versions of the same paragraph)
class LineArrayComparator : public ArrayComparator
{
private:
int nLen1, nLen2;
const CompareData &rData1, &rData2;
int nFirst1, nFirst2;
public:
LineArrayComparator( const CompareData &rD1, const CompareData &rD2,
int nStt1, int nEnd1, int nStt2, int nEnd2 );
virtual bool Compare( int nIdx1, int nIdx2 ) const override;
virtual int GetLen1() const override { return nLen1; }
virtual int GetLen2() const override { return nLen2; }
};
class WordArrayComparator : public ArrayComparator
{
private:
const SwTextNode *pTextNd1, *pTextNd2;
std::unique_ptr<int[]> pPos1, pPos2;
int nCnt1, nCnt2; // number of words
static void CalcPositions( int *pPos, const SwTextNode *pTextNd, int &nCnt );
public:
WordArrayComparator( const SwTextNode *pNode1, const SwTextNode *pNode2 );
virtual bool Compare( int nIdx1, int nIdx2 ) const override;
virtual int GetLen1() const override { return nCnt1; }
virtual int GetLen2() const override { return nCnt2; }
int GetCharSequence( const int *pWordLcs1, const int *pWordLcs2,
int *pSubseq1, int *pSubseq2, int nLcsLen );
};
class CharArrayComparator : public ArrayComparator
{
private:
const SwTextNode *pTextNd1, *pTextNd2;
public:
CharArrayComparator( const SwTextNode *pNode1, const SwTextNode *pNode2 )
: pTextNd1( pNode1 ), pTextNd2( pNode2 )
{
}
virtual bool Compare( int nIdx1, int nIdx2 ) const override;
virtual int GetLen1() const override { return pTextNd1->GetText().getLength(); }
virtual int GetLen2() const override { return pTextNd2->GetText().getLength(); }
};
/// Options set in Tools->Options->Writer->Comparison
struct CmpOptionsContainer
{
SwCompareMode eCmpMode;
int nIgnoreLen;
bool bUseRsid;
};
static CmpOptionsContainer CmpOptions;
class CommonSubseq
{
private:
std::unique_ptr<int[]> pData;
int nSize;
protected:
ArrayComparator &rCmp;
CommonSubseq( ArrayComparator &rComparator, int nMaxSize )
: nSize( nMaxSize ), rCmp( rComparator )
{
pData.reset( new int[ nSize ] );
}
~CommonSubseq()
{
}
int FindLCS( int *pLcs1, int *pLcs2, int nStt1,
int nEnd1 = 0, int nStt2 = 0, int nEnd2 = 0 );
public:
static int IgnoreIsolatedPieces( int *pLcs1, int *pLcs2, int nLen1, int nLen2,
int nLcsLen, int nPieceLen );
};
/// Use Hirschberg's algorithm to find LCS in linear space
class LgstCommonSubseq: public CommonSubseq
{
private:
static const int CUTOFF = 1<<20; // Stop recursion at this value
std::unique_ptr<int[]> pL1, pL2;
std::unique_ptr<int[]> pBuff1, pBuff2;
void FindL( int *pL, int nStt1, int nEnd1, int nStt2, int nEnd2 );
int HirschbergLCS( int *pLcs1, int *pLcs2, int nStt1, int nEnd1,
int nStt2, int nEnd2 );
public:
explicit LgstCommonSubseq( ArrayComparator &rComparator );
int Find( int *pSubseq1, int *pSubseq2 );
};
/// Find a common subsequence in linear time
class FastCommonSubseq: private CommonSubseq
{
private:
static const int CUTOFF = 2056;
int FindFastCS( int *pSeq1, int *pSeq2, int nStt1, int nEnd1,
int nStt2, int nEnd2 );
public:
explicit FastCommonSubseq( ArrayComparator &rComparator )
: CommonSubseq( rComparator, CUTOFF )
{
}
int Find( int *pSubseq1, int *pSubseq2 )
{
return FindFastCS( pSubseq1, pSubseq2, 0, rCmp.GetLen1(),
0, rCmp.GetLen2() );
}
};
2000-09-18 23:08:29 +00:00
CompareData::~CompareData()
{
if( pDelRing )
{
while( pDelRing->GetNext() != pDelRing )
delete pDelRing->GetNext();
delete pDelRing;
}
if( pInsRing )
{
while( pInsRing->GetNext() != pInsRing )
delete pInsRing->GetNext();
delete pInsRing;
}
2000-09-18 23:08:29 +00:00
}
void CompareData::SetIndex( size_t nLine, size_t nIndex )
2000-09-18 23:08:29 +00:00
{
if( !pIndex )
{
pIndex.reset( new size_t[ aLines.size() ] );
memset( pIndex.get(), 0, aLines.size() * sizeof( size_t ) );
2000-09-18 23:08:29 +00:00
}
if( nLine < aLines.size() )
2000-09-18 23:08:29 +00:00
pIndex[ nLine ] = nIndex;
}
void CompareData::SetChanged( size_t nLine, bool bFlag )
2000-09-18 23:08:29 +00:00
{
if( !pChangedFlag )
{
pChangedFlag.reset( new bool[ aLines.size() +1 ] );
memset( pChangedFlag.get(), 0, (aLines.size() +1) * sizeof( bool ) );
2000-09-18 23:08:29 +00:00
}
if( nLine < aLines.size() )
2000-09-18 23:08:29 +00:00
pChangedFlag[ nLine ] = bFlag;
}
void CompareData::CompareLines( CompareData& rData )
{
CheckRanges( rData );
sal_uLong nDifferent;
2000-09-18 23:08:29 +00:00
{
Hash aH( GetLineCount() + rData.GetLineCount() + 1 );
aH.CalcHashValue( *this );
aH.CalcHashValue( rData );
nDifferent = aH.GetCount();
}
{
Compare aComp( nDifferent, *this, rData );
}
}
sal_uLong CompareData::ShowDiffs( const CompareData& rData )
2000-09-18 23:08:29 +00:00
{
sal_uLong nLen1 = rData.GetLineCount(), nLen2 = GetLineCount();
sal_uLong nStt1 = 0, nStt2 = 0;
sal_uLong nCnt = 0;
2000-09-18 23:08:29 +00:00
while( nStt1 < nLen1 || nStt2 < nLen2 )
{
if( rData.GetChanged( nStt1 ) || GetChanged( nStt2 ) )
{
// Find a region of different lines between two pairs of identical
// lines.
sal_uLong nSav1 = nStt1, nSav2 = nStt2;
2000-09-18 23:08:29 +00:00
while( nStt1 < nLen1 && rData.GetChanged( nStt1 )) ++nStt1;
while( nStt2 < nLen2 && GetChanged( nStt2 )) ++nStt2;
if (m_bRecordDiff)
{
// Check if there are changed lines (only slightly different) and
// compare them in detail.
CheckForChangesInLine( rData, nSav1, nStt1, nSav2, nStt2 );
}
2000-09-18 23:08:29 +00:00
++nCnt;
}
++nStt1;
++nStt2;
2000-09-18 23:08:29 +00:00
}
return nCnt;
}
bool CompareData::HasDiffs( const CompareData& rData ) const
2000-09-18 23:08:29 +00:00
{
bool bRet = false;
sal_uLong nLen1 = rData.GetLineCount(), nLen2 = GetLineCount();
sal_uLong nStt1 = 0, nStt2 = 0;
2000-09-18 23:08:29 +00:00
while( nStt1 < nLen1 || nStt2 < nLen2 )
{
if( rData.GetChanged( nStt1 ) || GetChanged( nStt2 ) )
{
bRet = true;
2000-09-18 23:08:29 +00:00
break;
}
++nStt1;
++nStt2;
2000-09-18 23:08:29 +00:00
}
return bRet;
}
Hash::Hash( sal_uLong nSize )
: nCount(1)
2000-09-18 23:08:29 +00:00
{
static const sal_uLong primes[] =
2000-09-18 23:08:29 +00:00
{
509,
1021,
2039,
4093,
8191,
16381,
32749,
65521,
131071,
262139,
524287,
1048573,
2097143,
4194301,
8388593,
16777213,
33554393,
67108859, /* Preposterously large . . . */
134217689,
268435399,
536870909,
1073741789,
2147483647,
0
};
int i;
2000-09-18 23:08:29 +00:00
pDataArr.reset( new HashData[ nSize ] );
2000-09-18 23:08:29 +00:00
pDataArr[0].nNext = 0;
pDataArr[0].nHash = 0;
pDataArr[0].pLine = nullptr;
nPrime = primes[0];
2000-09-18 23:08:29 +00:00
for( i = 0; primes[i] < nSize / 3; i++)
2000-09-18 23:08:29 +00:00
if( !primes[i] )
{
pHashArr = nullptr;
2000-09-18 23:08:29 +00:00
return;
}
nPrime = primes[ i ];
pHashArr.reset( new sal_uLong[ nPrime ] );
memset( pHashArr.get(), 0, nPrime * sizeof( sal_uLong ) );
2000-09-18 23:08:29 +00:00
}
void Hash::CalcHashValue( CompareData& rData )
{
if( pHashArr )
{
2010-12-13 05:50:06 -08:00
for( size_t n = 0; n < rData.GetLineCount(); ++n )
2000-09-18 23:08:29 +00:00
{
const SwCompareLine* pLine = rData.GetLine( n );
OSL_ENSURE( pLine, "where is the line?" );
sal_uLong nH = pLine->GetHashValue();
2000-09-18 23:08:29 +00:00
sal_uLong* pFound = &pHashArr[ nH % nPrime ];
2010-12-13 05:50:06 -08:00
size_t i;
for( i = *pFound; ; i = pDataArr[i].nNext )
2000-09-18 23:08:29 +00:00
if( !i )
{
i = nCount++;
pDataArr[i].nNext = *pFound;
pDataArr[i].nHash = nH;
pDataArr[i].pLine = pLine;
*pFound = i;
break;
}
else if( pDataArr[i].nHash == nH &&
pDataArr[i].pLine->Compare( *pLine ))
break;
rData.SetIndex( n, i );
}
}
}
Compare::Compare( sal_uLong nDiff, CompareData& rData1, CompareData& rData2 )
2000-09-18 23:08:29 +00:00
{
MovedData *pMD1, *pMD2;
2011-11-04 14:46:42 +01:00
// Look for the differing lines
2000-09-18 23:08:29 +00:00
{
std::unique_ptr<sal_Char[]> pDiscard1( new sal_Char[ rData1.GetLineCount() ] );
std::unique_ptr<sal_Char[]> pDiscard2( new sal_Char[ rData2.GetLineCount() ] );
2000-09-18 23:08:29 +00:00
sal_uLong* pCount1 = new sal_uLong[ nDiff ];
sal_uLong* pCount2 = new sal_uLong[ nDiff ];
memset( pCount1, 0, nDiff * sizeof( sal_uLong ));
memset( pCount2, 0, nDiff * sizeof( sal_uLong ));
2000-09-18 23:08:29 +00:00
2011-11-04 14:46:42 +01:00
// find indices in CompareData which have been assigned multiple times
2000-09-18 23:08:29 +00:00
CountDifference( rData1, pCount1 );
CountDifference( rData2, pCount2 );
2011-11-04 14:46:42 +01:00
// All which occur only once now have either been inserted or deleted.
// All which are also contained in the other one have been moved.
SetDiscard( rData1, pDiscard1.get(), pCount2 );
SetDiscard( rData2, pDiscard2.get(), pCount1 );
2000-09-18 23:08:29 +00:00
2011-11-04 14:46:42 +01:00
// forget the arrays again
delete [] pCount1; delete [] pCount2;
2000-09-18 23:08:29 +00:00
CheckDiscard( rData1.GetLineCount(), pDiscard1.get() );
CheckDiscard( rData2.GetLineCount(), pDiscard2.get() );
2000-09-18 23:08:29 +00:00
pMD1 = new MovedData( rData1, pDiscard1.get() );
pMD2 = new MovedData( rData2, pDiscard2.get() );
2000-09-18 23:08:29 +00:00
}
{
CompareSequence aTmp( rData1, rData2, *pMD1, *pMD2 );
}
ShiftBoundaries( rData1, rData2 );
delete pMD1;
delete pMD2;
}
void Compare::CountDifference( const CompareData& rData, sal_uLong* pCounts )
2000-09-18 23:08:29 +00:00
{
sal_uLong nLen = rData.GetLineCount();
for( sal_uLong n = 0; n < nLen; ++n )
2000-09-18 23:08:29 +00:00
{
sal_uLong nIdx = rData.GetIndex( n );
2000-09-18 23:08:29 +00:00
++pCounts[ nIdx ];
}
}
void Compare::SetDiscard( const CompareData& rData,
sal_Char* pDiscard, const sal_uLong* pCounts )
2000-09-18 23:08:29 +00:00
{
const sal_uLong nLen = rData.GetLineCount();
2000-09-18 23:08:29 +00:00
2011-11-04 14:46:42 +01:00
// calculate Max with respect to the line count
sal_uLong nMax = 5;
for( sal_uLong n = nLen / 64; ( n = n >> 2 ) > 0; )
2000-09-18 23:08:29 +00:00
nMax <<= 1;
for( sal_uLong n = 0; n < nLen; ++n )
2000-09-18 23:08:29 +00:00
{
sal_uLong nIdx = rData.GetIndex( n );
2000-09-18 23:08:29 +00:00
if( nIdx )
{
nIdx = pCounts[ nIdx ];
pDiscard[ n ] = !nIdx ? 1 : nIdx > nMax ? 2 : 0;
}
else
pDiscard[ n ] = 0;
}
}
void Compare::CheckDiscard( sal_uLong nLen, sal_Char* pDiscard )
2000-09-18 23:08:29 +00:00
{
for( sal_uLong n = 0; n < nLen; ++n )
2000-09-18 23:08:29 +00:00
{
if( 2 == pDiscard[ n ] )
pDiscard[n] = 0;
else if( pDiscard[ n ] )
{
sal_uLong j;
sal_uLong length;
sal_uLong provisional = 0;
2000-09-18 23:08:29 +00:00
/* Find end of this run of discardable lines.
Count how many are provisionally discardable. */
for (j = n; j < nLen; j++)
{
if( !pDiscard[j] )
break;
if( 2 == pDiscard[j] )
++provisional;
}
/* Cancel provisional discards at end, and shrink the run. */
while( j > n && 2 == pDiscard[j - 1] )
{
pDiscard[ --j ] = 0;
--provisional;
}
2000-09-18 23:08:29 +00:00
/* Now we have the length of a run of discardable lines
whose first and last are not provisional. */
length = j - n;
/* If 1/4 of the lines in the run are provisional,
cancel discarding of all provisional lines in the run. */
if (provisional * 4 > length)
{
while (j > n)
if (pDiscard[--j] == 2)
pDiscard[j] = 0;
}
else
{
sal_uLong consec;
sal_uLong minimum = 1;
sal_uLong tem = length / 4;
2000-09-18 23:08:29 +00:00
/* MINIMUM is approximate square root of LENGTH/4.
A subrun of two or more provisionals can stand
when LENGTH is at least 16.
A subrun of 4 or more can stand when LENGTH >= 64. */
while ((tem = tem >> 2) > 0)
minimum *= 2;
minimum++;
/* Cancel any subrun of MINIMUM or more provisionals
within the larger run. */
for (j = 0, consec = 0; j < length; j++)
if (pDiscard[n + j] != 2)
consec = 0;
else if (minimum == ++consec)
/* Back up to start of subrun, to cancel it all. */
j -= consec;
else if (minimum < consec)
pDiscard[n + j] = 0;
/* Scan from beginning of run
until we find 3 or more nonprovisionals in a row
or until the first nonprovisional at least 8 lines in.
Until that point, cancel any provisionals. */
for (j = 0, consec = 0; j < length; j++)
{
if (j >= 8 && pDiscard[n + j] == 1)
break;
if (pDiscard[n + j] == 2)
{
consec = 0;
pDiscard[n + j] = 0;
}
2000-09-18 23:08:29 +00:00
else if (pDiscard[n + j] == 0)
consec = 0;
else
consec++;
if (consec == 3)
break;
}
/* I advances to the last line of the run. */
n += length - 1;
/* Same thing, from end. */
for (j = 0, consec = 0; j < length; j++)
{
if (j >= 8 && pDiscard[n - j] == 1)
break;
if (pDiscard[n - j] == 2)
{
consec = 0;
pDiscard[n - j] = 0;
}
2000-09-18 23:08:29 +00:00
else if (pDiscard[n - j] == 0)
consec = 0;
else
consec++;
if (consec == 3)
break;
}
}
}
}
}
Compare::MovedData::MovedData( CompareData& rData, const sal_Char* pDiscard )
: pIndex( nullptr ), pLineNum( nullptr ), nCount( 0 )
2000-09-18 23:08:29 +00:00
{
sal_uLong nLen = rData.GetLineCount();
sal_uLong n;
for( n = 0; n < nLen; ++n )
2000-09-18 23:08:29 +00:00
if( pDiscard[ n ] )
rData.SetChanged( n );
else
++nCount;
if( nCount )
{
pIndex.reset( new sal_uLong[ nCount ] );
pLineNum.reset( new sal_uLong[ nCount ] );
2000-09-18 23:08:29 +00:00
for( n = 0, nCount = 0; n < nLen; ++n )
if( !pDiscard[ n ] )
{
pIndex[ nCount ] = rData.GetIndex( n );
pLineNum[ nCount++ ] = n;
}
}
}
/// Find the differing lines
2000-09-18 23:08:29 +00:00
Compare::CompareSequence::CompareSequence(
CompareData& rD1, CompareData& rD2,
const MovedData& rMD1, const MovedData& rMD2 )
: rData1( rD1 ), rData2( rD2 ), rMoved1( rMD1 ), rMoved2( rMD2 )
{
sal_uLong nSize = rMD1.GetCount() + rMD2.GetCount() + 3;
pMemory.reset( new long[ nSize * 2 ] );
pFDiag = pMemory.get() + ( rMD2.GetCount() + 1 );
pBDiag = pMemory.get() + ( nSize + rMD2.GetCount() + 1 );
2000-09-18 23:08:29 +00:00
Compare( 0, rMD1.GetCount(), 0, rMD2.GetCount() );
}
void Compare::CompareSequence::Compare( sal_uLong nStt1, sal_uLong nEnd1,
sal_uLong nStt2, sal_uLong nEnd2 )
2000-09-18 23:08:29 +00:00
{
/* Slide down the bottom initial diagonal. */
while( nStt1 < nEnd1 && nStt2 < nEnd2 &&
rMoved1.GetIndex( nStt1 ) == rMoved2.GetIndex( nStt2 ))
{
++nStt1;
++nStt2;
}
2000-09-18 23:08:29 +00:00
/* Slide up the top initial diagonal. */
while( nEnd1 > nStt1 && nEnd2 > nStt2 &&
rMoved1.GetIndex( nEnd1 - 1 ) == rMoved2.GetIndex( nEnd2 - 1 ))
{
--nEnd1;
--nEnd2;
}
2000-09-18 23:08:29 +00:00
/* Handle simple cases. */
if( nStt1 == nEnd1 )
while( nStt2 < nEnd2 )
rData2.SetChanged( rMoved2.GetLineNum( nStt2++ ));
else if (nStt2 == nEnd2)
while (nStt1 < nEnd1)
rData1.SetChanged( rMoved1.GetLineNum( nStt1++ ));
else
{
sal_uLong c, d, b;
2000-09-18 23:08:29 +00:00
/* Find a point of correspondence in the middle of the files. */
d = CheckDiag( nStt1, nEnd1, nStt2, nEnd2, &c );
b = pBDiag[ d ];
if( 1 != c )
{
/* Use that point to split this problem into two subproblems. */
Compare( nStt1, b, nStt2, b - d );
/* This used to use f instead of b,
but that is incorrect!
It is not necessarily the case that diagonal d
has a snake from b to f. */
Compare( b, nEnd1, b - d, nEnd2 );
}
}
}
sal_uLong Compare::CompareSequence::CheckDiag( sal_uLong nStt1, sal_uLong nEnd1,
sal_uLong nStt2, sal_uLong nEnd2, sal_uLong* pCost )
2000-09-18 23:08:29 +00:00
{
const long dmin = nStt1 - nEnd2; /* Minimum valid diagonal. */
const long dmax = nEnd1 - nStt2; /* Maximum valid diagonal. */
const long fmid = nStt1 - nStt2; /* Center diagonal of top-down search. */
const long bmid = nEnd1 - nEnd2; /* Center diagonal of bottom-up search. */
long fmin = fmid, fmax = fmid; /* Limits of top-down search. */
long bmin = bmid, bmax = bmid; /* Limits of bottom-up search. */
long c; /* Cost. */
long odd = (fmid - bmid) & 1; /* True if southeast corner is on an odd
diagonal with respect to the northwest. */
pFDiag[fmid] = nStt1;
pBDiag[bmid] = nEnd1;
for (c = 1;; ++c)
{
long d; /* Active diagonal. */
/* Extend the top-down search by an edit step in each diagonal. */
fmin > dmin ? pFDiag[--fmin - 1] = -1 : ++fmin;
fmax < dmax ? pFDiag[++fmax + 1] = -1 : --fmax;
for (d = fmax; d >= fmin; d -= 2)
{
2011-01-27 12:09:39 +00:00
long x, y, tlo = pFDiag[d - 1], thi = pFDiag[d + 1];
2000-09-18 23:08:29 +00:00
if (tlo >= thi)
x = tlo + 1;
else
x = thi;
y = x - d;
while( sal_uLong(x) < nEnd1 && sal_uLong(y) < nEnd2 &&
2000-09-18 23:08:29 +00:00
rMoved1.GetIndex( x ) == rMoved2.GetIndex( y ))
{
++x;
++y;
}
2000-09-18 23:08:29 +00:00
pFDiag[d] = x;
if( odd && bmin <= d && d <= bmax && pBDiag[d] <= pFDiag[d] )
{
*pCost = 2 * c - 1;
return d;
}
}
/* Similar extend the bottom-up search. */
bmin > dmin ? pBDiag[--bmin - 1] = INT_MAX : ++bmin;
bmax < dmax ? pBDiag[++bmax + 1] = INT_MAX : --bmax;
for (d = bmax; d >= bmin; d -= 2)
{
2011-01-27 12:09:39 +00:00
long x, y, tlo = pBDiag[d - 1], thi = pBDiag[d + 1];
2000-09-18 23:08:29 +00:00
if (tlo < thi)
x = tlo;
else
x = thi - 1;
y = x - d;
while( sal_uLong(x) > nStt1 && sal_uLong(y) > nStt2 &&
2000-09-18 23:08:29 +00:00
rMoved1.GetIndex( x - 1 ) == rMoved2.GetIndex( y - 1 ))
{
--x;
--y;
}
2000-09-18 23:08:29 +00:00
pBDiag[d] = x;
if (!odd && fmin <= d && d <= fmax && pBDiag[d] <= pFDiag[d])
{
*pCost = 2 * c;
return d;
}
}
}
}
namespace
2000-09-18 23:08:29 +00:00
{
inline void lcl_ShiftBoundariesOneway( CompareData* const pData, CompareData const * const pOtherData)
2000-09-18 23:08:29 +00:00
{
sal_uLong i = 0;
sal_uLong j = 0;
sal_uLong i_end = pData->GetLineCount();
sal_uLong preceding = ULONG_MAX;
sal_uLong other_preceding = ULONG_MAX;
2000-09-18 23:08:29 +00:00
while (true)
2000-09-18 23:08:29 +00:00
{
sal_uLong start, other_start;
2000-09-18 23:08:29 +00:00
/* Scan forwards to find beginning of another run of changes.
Also keep track of the corresponding point in the other file. */
while( i < i_end && !pData->GetChanged( i ) )
{
while( pOtherData->GetChanged( j++ ))
/* Non-corresponding lines in the other file
will count as the preceding batch of changes. */
other_preceding = j;
i++;
}
if (i == i_end)
break;
start = i;
other_start = j;
while (true)
2000-09-18 23:08:29 +00:00
{
/* Now find the end of this run of changes. */
while( pData->GetChanged( ++i ))
;
/* If the first changed line matches the following unchanged one,
and this run does not follow right after a previous run,
and there are no lines deleted from the other file here,
then classify the first changed line as unchanged
and the following line as changed in its place. */
/* You might ask, how could this run follow right after another?
Only because the previous run was shifted here. */
if( i != i_end &&
pData->GetIndex( start ) == pData->GetIndex( i ) &&
!pOtherData->GetChanged( j ) &&
!( start == preceding || other_start == other_preceding ))
{
pData->SetChanged( start++, false );
2000-09-18 23:08:29 +00:00
pData->SetChanged( i );
/* Since one line-that-matches is now before this run
instead of after, we must advance in the other file
2014-04-11 09:19:08 +02:00
to keep in sync. */
2000-09-18 23:08:29 +00:00
++j;
}
else
break;
}
preceding = i;
other_preceding = j;
}
}
}
void Compare::ShiftBoundaries( CompareData& rData1, CompareData& rData2 )
{
lcl_ShiftBoundariesOneway(&rData1, &rData2);
lcl_ShiftBoundariesOneway(&rData2, &rData1);
}
sal_uLong SwCompareLine::GetHashValue() const
2000-09-18 23:08:29 +00:00
{
sal_uLong nRet = 0;
2000-09-18 23:08:29 +00:00
switch( rNode.GetNodeType() )
{
case SwNodeType::Text:
nRet = GetTextNodeHashValue( *rNode.GetTextNode(), nRet );
2000-09-18 23:08:29 +00:00
break;
case SwNodeType::Table:
2000-09-18 23:08:29 +00:00
{
const SwNode* pEndNd = rNode.EndOfSectionNode();
SwNodeIndex aIdx( rNode );
while( &aIdx.GetNode() != pEndNd )
{
if( aIdx.GetNode().IsTextNode() )
nRet = GetTextNodeHashValue( *aIdx.GetNode().GetTextNode(), nRet );
++aIdx;
2000-09-18 23:08:29 +00:00
}
}
break;
case SwNodeType::Section:
2001-09-27 12:42:58 +00:00
{
OUString sStr( GetText() );
for( sal_Int32 n = 0; n < sStr.getLength(); ++n )
( nRet <<= 1 ) += sStr[ n ];
2001-09-27 12:42:58 +00:00
}
2000-09-18 23:08:29 +00:00
break;
case SwNodeType::Grf:
case SwNodeType::Ole:
2011-11-04 14:46:42 +01:00
// Fixed ID? Should never occur ...
2000-09-18 23:08:29 +00:00
break;
default: break;
2000-09-18 23:08:29 +00:00
}
return nRet;
}
const SwNode& SwCompareLine::GetEndNode() const
{
const SwNode* pNd = &rNode;
switch( rNode.GetNodeType() )
{
case SwNodeType::Table:
2000-09-18 23:08:29 +00:00
pNd = rNode.EndOfSectionNode();
break;
case SwNodeType::Section:
2001-09-27 12:42:58 +00:00
{
const SwSectionNode& rSNd = static_cast<const SwSectionNode&>(rNode);
2001-09-27 12:42:58 +00:00
const SwSection& rSect = rSNd.GetSection();
if( CONTENT_SECTION != rSect.GetType() || rSect.IsProtect() )
pNd = rNode.EndOfSectionNode();
}
2000-09-18 23:08:29 +00:00
break;
default: break;
2000-09-18 23:08:29 +00:00
}
return *pNd;
}
bool SwCompareLine::Compare( const SwCompareLine& rLine ) const
2000-09-18 23:08:29 +00:00
{
return CompareNode( rNode, rLine.rNode );
2000-09-18 23:08:29 +00:00
}
2010-01-13 13:03:53 +01:00
namespace
{
OUString SimpleTableToText(const SwNode &rNode)
2010-01-13 13:03:53 +01:00
{
OUStringBuffer sRet;
2010-01-13 13:03:53 +01:00
const SwNode* pEndNd = rNode.EndOfSectionNode();
SwNodeIndex aIdx( rNode );
while (&aIdx.GetNode() != pEndNd)
{
if (aIdx.GetNode().IsTextNode())
2010-01-13 13:03:53 +01:00
{
if (sRet.getLength())
2010-01-13 13:03:53 +01:00
{
sRet.append( '\n' );
2010-01-13 13:03:53 +01:00
}
sRet.append( aIdx.GetNode().GetTextNode()->GetExpandText() );
2010-01-13 13:03:53 +01:00
}
++aIdx;
2010-01-13 13:03:53 +01:00
}
return sRet.makeStringAndClear();
2010-01-13 13:03:53 +01:00
}
}
bool SwCompareLine::CompareNode( const SwNode& rDstNd, const SwNode& rSrcNd )
2000-09-18 23:08:29 +00:00
{
if( rSrcNd.GetNodeType() != rDstNd.GetNodeType() )
return false;
bool bRet = false;
2000-09-18 23:08:29 +00:00
switch( rDstNd.GetNodeType() )
2000-09-18 23:08:29 +00:00
{
case SwNodeType::Text:
bRet = CompareTextNd( *rDstNd.GetTextNode(), *rSrcNd.GetTextNode() )
&& ( !CmpOptions.bUseRsid || rDstNd.GetTextNode()->CompareParRsid( *rSrcNd.GetTextNode() ) );
2000-09-18 23:08:29 +00:00
break;
case SwNodeType::Table:
2000-09-18 23:08:29 +00:00
{
const SwTableNode& rTSrcNd = static_cast<const SwTableNode&>(rSrcNd);
const SwTableNode& rTDstNd = static_cast<const SwTableNode&>(rDstNd);
2000-09-18 23:08:29 +00:00
bRet = ( rTSrcNd.EndOfSectionIndex() - rTSrcNd.GetIndex() ) ==
( rTDstNd.EndOfSectionIndex() - rTDstNd.GetIndex() );
2010-01-13 13:03:53 +01:00
// --> #i107826#: compare actual table content
if (bRet)
{
bRet = (SimpleTableToText(rSrcNd) == SimpleTableToText(rDstNd));
}
2000-09-18 23:08:29 +00:00
}
break;
case SwNodeType::Section:
2000-09-18 23:08:29 +00:00
{
const SwSectionNode& rSSrcNd = static_cast<const SwSectionNode&>(rSrcNd),
& rSDstNd = static_cast<const SwSectionNode&>(rDstNd);
2001-09-27 12:42:58 +00:00
const SwSection& rSrcSect = rSSrcNd.GetSection(),
& rDstSect = rSDstNd.GetSection();
SectionType eSrcSectType = rSrcSect.GetType(),
eDstSectType = rDstSect.GetType();
switch( eSrcSectType )
{
case CONTENT_SECTION:
bRet = CONTENT_SECTION == eDstSectType &&
rSrcSect.IsProtect() == rDstSect.IsProtect();
if( bRet && rSrcSect.IsProtect() )
{
// the only have they both the same size
bRet = ( rSSrcNd.EndOfSectionIndex() - rSSrcNd.GetIndex() ) ==
( rSDstNd.EndOfSectionIndex() - rSDstNd.GetIndex() );
}
break;
2000-09-18 23:08:29 +00:00
2001-09-27 12:42:58 +00:00
case TOX_HEADER_SECTION:
case TOX_CONTENT_SECTION:
if( TOX_HEADER_SECTION == eDstSectType ||
TOX_CONTENT_SECTION == eDstSectType )
{
// the same type of TOX?
const SwTOXBase* pSrcTOX = rSrcSect.GetTOXBase();
const SwTOXBase* pDstTOX = rDstSect.GetTOXBase();
bRet = pSrcTOX && pDstTOX
&& pSrcTOX->GetType() == pDstTOX->GetType()
&& pSrcTOX->GetTitle() == pDstTOX->GetTitle()
&& pSrcTOX->GetTypeName() == pDstTOX->GetTypeName()
;
}
break;
case DDE_LINK_SECTION:
case FILE_LINK_SECTION:
bRet = eSrcSectType == eDstSectType &&
rSrcSect.GetLinkFileName() ==
rDstSect.GetLinkFileName();
break;
}
2000-09-18 23:08:29 +00:00
}
break;
case SwNodeType::End:
bRet = rSrcNd.StartOfSectionNode()->GetNodeType() ==
rDstNd.StartOfSectionNode()->GetNodeType();
2010-01-13 13:03:53 +01:00
// --> #i107826#: compare actual table content
if (bRet && rSrcNd.StartOfSectionNode()->GetNodeType() == SwNodeType::Table)
2010-01-13 13:03:53 +01:00
{
bRet = CompareNode(
*rSrcNd.StartOfSectionNode(), *rDstNd.StartOfSectionNode());
}
break;
default: break;
2000-09-18 23:08:29 +00:00
}
return bRet;
}
OUString SwCompareLine::GetText() const
2000-09-18 23:08:29 +00:00
{
OUString sRet;
2000-09-18 23:08:29 +00:00
switch( rNode.GetNodeType() )
{
case SwNodeType::Text:
sRet = rNode.GetTextNode()->GetExpandText();
2000-09-18 23:08:29 +00:00
break;
case SwNodeType::Table:
2000-09-18 23:08:29 +00:00
{
sRet = "Tabelle: " + SimpleTableToText(rNode);
2000-09-18 23:08:29 +00:00
}
break;
case SwNodeType::Section:
2001-09-27 12:42:58 +00:00
{
sRet = "Section - Node:";
2001-09-27 12:42:58 +00:00
const SwSectionNode& rSNd = static_cast<const SwSectionNode&>(rNode);
2001-09-27 12:42:58 +00:00
const SwSection& rSect = rSNd.GetSection();
switch( rSect.GetType() )
{
case CONTENT_SECTION:
if( rSect.IsProtect() )
sRet += OUString::number(
rSNd.EndOfSectionIndex() - rSNd.GetIndex() );
2001-09-27 12:42:58 +00:00
break;
case TOX_HEADER_SECTION:
case TOX_CONTENT_SECTION:
{
const SwTOXBase* pTOX = rSect.GetTOXBase();
if( pTOX )
sRet += pTOX->GetTitle() + pTOX->GetTypeName() +
OUString::number(pTOX->GetType());
2001-09-27 12:42:58 +00:00
}
break;
case DDE_LINK_SECTION:
case FILE_LINK_SECTION:
sRet += rSect.GetLinkFileName();
break;
}
}
2000-09-18 23:08:29 +00:00
break;
case SwNodeType::Grf:
sRet = "Grafik - Node:";
2000-09-18 23:08:29 +00:00
break;
case SwNodeType::Ole:
sRet = "OLE - Node:";
2000-09-18 23:08:29 +00:00
break;
default: break;
2000-09-18 23:08:29 +00:00
}
return sRet;
}
sal_uLong SwCompareLine::GetTextNodeHashValue( const SwTextNode& rNd, sal_uLong nVal )
2000-09-18 23:08:29 +00:00
{
OUString sStr( rNd.GetExpandText() );
for( sal_Int32 n = 0; n < sStr.getLength(); ++n )
( nVal <<= 1 ) += sStr[ n ];
2000-09-18 23:08:29 +00:00
return nVal;
}
bool SwCompareLine::CompareTextNd( const SwTextNode& rDstNd,
const SwTextNode& rSrcNd )
2000-09-18 23:08:29 +00:00
{
bool bRet = false;
2011-11-04 14:46:42 +01:00
// Very simple at first
if( rDstNd.GetText() == rSrcNd.GetText() )
2000-09-18 23:08:29 +00:00
{
2011-11-04 14:46:42 +01:00
// The text is the same, but are the "special attributes" (0xFF) also the same?
bRet = true;
2000-09-18 23:08:29 +00:00
}
return bRet;
}
bool SwCompareLine::ChangesInLine( const SwCompareLine& rLine,
2000-09-18 23:08:29 +00:00
SwPaM *& rpInsRing, SwPaM*& rpDelRing ) const
{
bool bRet = false;
// Only compare textnodes
if( SwNodeType::Text == rNode.GetNodeType() &&
SwNodeType::Text == rLine.GetNode().GetNodeType() )
2000-09-18 23:08:29 +00:00
{
SwTextNode& rDstNd = *const_cast<SwTextNode*>(rNode.GetTextNode());
const SwTextNode& rSrcNd = *rLine.GetNode().GetTextNode();
SwDoc* pDstDoc = rDstNd.GetDoc();
2000-09-18 23:08:29 +00:00
int nLcsLen = 0;
int nDstLen = rDstNd.GetText().getLength();
int nSrcLen = rSrcNd.GetText().getLength();
2000-09-18 23:08:29 +00:00
int nMinLen = std::min( nDstLen , nSrcLen );
int nAvgLen = ( nDstLen + nSrcLen )/2;
std::vector<int> aLcsDst( nMinLen + 1 );
std::vector<int> aLcsSrc( nMinLen + 1 );
if( CmpOptions.eCmpMode == SwCompareMode::ByWord )
2000-09-18 23:08:29 +00:00
{
std::vector<int> aTmpLcsDst( nMinLen + 1 );
std::vector<int> aTmpLcsSrc( nMinLen + 1 );
WordArrayComparator aCmp( &rDstNd, &rSrcNd );
LgstCommonSubseq aSeq( aCmp );
nLcsLen = aSeq.Find( &aTmpLcsDst[0], &aTmpLcsSrc[0] );
if( CmpOptions.nIgnoreLen )
2000-09-18 23:08:29 +00:00
{
nLcsLen = CommonSubseq::IgnoreIsolatedPieces( &aTmpLcsDst[0], &aTmpLcsSrc[0],
aCmp.GetLen1(), aCmp.GetLen2(),
nLcsLen, CmpOptions.nIgnoreLen );
2000-09-18 23:08:29 +00:00
}
nLcsLen = aCmp.GetCharSequence( &aTmpLcsDst[0], &aTmpLcsSrc[0],
&aLcsDst[0], &aLcsSrc[0], nLcsLen );
2000-09-18 23:08:29 +00:00
}
else
{
CharArrayComparator aCmp( &rDstNd, &rSrcNd );
LgstCommonSubseq aSeq( aCmp );
nLcsLen = aSeq.Find( &aLcsDst[0], &aLcsSrc[0] );
2000-09-18 23:08:29 +00:00
if( CmpOptions.nIgnoreLen )
{
nLcsLen = CommonSubseq::IgnoreIsolatedPieces( &aLcsDst[0], &aLcsSrc[0], nDstLen,
nSrcLen, nLcsLen,
CmpOptions.nIgnoreLen );
}
}
// find the sum of the squares of the continuous substrings
int nSqSum = 0;
int nCnt = 1;
for( int i = 0; i < nLcsLen; i++ )
2000-09-18 23:08:29 +00:00
{
if( i != nLcsLen - 1 && aLcsDst[i] + 1 == aLcsDst[i + 1]
&& aLcsSrc[i] + 1 == aLcsSrc[i + 1] )
{
nCnt++;
}
else
{
nSqSum += nCnt*nCnt;
nCnt = 1;
}
}
// Don't compare if there aren't enough similarities
if ( nAvgLen >= 8 && nSqSum*32 < nAvgLen*nAvgLen )
{
return false;
}
// Show the differences
int nSkip = 0;
for( int i = 0; i <= nLcsLen; i++ )
{
int nDstFrom = i ? (aLcsDst[i - 1] + 1) : 0;
int nDstTo = ( i == nLcsLen ) ? nDstLen : aLcsDst[i];
int nSrcFrom = i ? (aLcsSrc[i - 1] + 1) : 0;
int nSrcTo = ( i == nLcsLen ) ? nSrcLen : aLcsSrc[i];
SwPaM aPam( rDstNd, nDstTo + nSkip );
if ( nDstFrom < nDstTo )
2000-09-18 23:08:29 +00:00
{
SwPaM* pTmp = new SwPaM( *aPam.GetPoint(), rpInsRing );
if( !rpInsRing )
rpInsRing = pTmp;
pTmp->SetMark();
pTmp->GetMark()->nContent = nDstFrom + nSkip;
2000-09-18 23:08:29 +00:00
}
if ( nSrcFrom < nSrcTo )
2000-09-18 23:08:29 +00:00
{
bool bUndo = pDstDoc->GetIDocumentUndoRedo().DoesUndo();
pDstDoc->GetIDocumentUndoRedo().DoUndo( false );
SwPaM aCpyPam( rSrcNd, nSrcFrom );
aCpyPam.SetMark();
aCpyPam.GetPoint()->nContent = nSrcTo;
aCpyPam.GetDoc()->getIDocumentContentOperations().CopyRange( aCpyPam, *aPam.GetPoint(),
/*bCopyAll=*/false, /*bCheckPos=*/true );
pDstDoc->GetIDocumentUndoRedo().DoUndo( bUndo );
2000-09-18 23:08:29 +00:00
SwPaM* pTmp = new SwPaM( *aPam.GetPoint(), rpDelRing );
if( !rpDelRing )
rpDelRing = pTmp;
pTmp->SetMark();
pTmp->GetMark()->nContent = nDstTo + nSkip;
nSkip += nSrcTo - nSrcFrom;
2000-09-18 23:08:29 +00:00
if( rpInsRing )
{
SwPaM* pCorr = rpInsRing->GetPrev();
2000-09-18 23:08:29 +00:00
if( *pCorr->GetPoint() == *pTmp->GetPoint() )
*pCorr->GetPoint() = *pTmp->GetMark();
}
}
}
bRet = true;
2000-09-18 23:08:29 +00:00
}
2000-09-18 23:08:29 +00:00
return bRet;
}
sal_uLong CompareData::NextIdx( const SwNode* pNd )
2001-09-27 12:42:58 +00:00
{
if( pNd->IsStartNode() )
{
const SwSectionNode* pSNd;
if( pNd->IsTableNode() ||
( nullptr != (pSNd = pNd->GetSectionNode() ) &&
2001-09-27 12:42:58 +00:00
( CONTENT_SECTION != pSNd->GetSection().GetType() ||
pSNd->GetSection().IsProtect() ) ) )
pNd = pNd->EndOfSectionNode();
}
return pNd->GetIndex() + 1;
}
sal_uLong CompareData::PrevIdx( const SwNode* pNd )
2001-09-27 12:42:58 +00:00
{
if( pNd->IsEndNode() )
{
const SwSectionNode* pSNd;
if( pNd->StartOfSectionNode()->IsTableNode() ||
( nullptr != (pSNd = pNd->StartOfSectionNode()->GetSectionNode() ) &&
2001-09-27 12:42:58 +00:00
( CONTENT_SECTION != pSNd->GetSection().GetType() ||
pSNd->GetSection().IsProtect() ) ) )
pNd = pNd->StartOfSectionNode();
}
return pNd->GetIndex() - 1;
}
void CompareData::CheckRanges( CompareData& rData )
2000-09-18 23:08:29 +00:00
{
const SwNodes& rSrcNds = rData.rDoc.GetNodes();
2000-09-18 23:08:29 +00:00
const SwNodes& rDstNds = rDoc.GetNodes();
const SwNode& rSrcEndNd = rData.GetEndOfContent();
const SwNode& rDstEndNd = GetEndOfContent();
2000-09-18 23:08:29 +00:00
sal_uLong nSrcSttIdx = NextIdx( rSrcEndNd.StartOfSectionNode() );
sal_uLong nSrcEndIdx = rSrcEndNd.GetIndex();
2000-09-18 23:08:29 +00:00
sal_uLong nDstSttIdx = NextIdx( rDstEndNd.StartOfSectionNode() );
sal_uLong nDstEndIdx = rDstEndNd.GetIndex();
2000-09-18 23:08:29 +00:00
while( nSrcSttIdx < nSrcEndIdx && nDstSttIdx < nDstEndIdx )
{
const SwNode* pSrcNd = rSrcNds[ nSrcSttIdx ];
const SwNode* pDstNd = rDstNds[ nDstSttIdx ];
if( !SwCompareLine::CompareNode( *pSrcNd, *pDstNd ))
break;
nSrcSttIdx = NextIdx( pSrcNd );
nDstSttIdx = NextIdx( pDstNd );
}
2001-09-27 12:42:58 +00:00
nSrcEndIdx = PrevIdx( &rSrcEndNd );
nDstEndIdx = PrevIdx( &rDstEndNd );
2000-09-18 23:08:29 +00:00
while( nSrcSttIdx < nSrcEndIdx && nDstSttIdx < nDstEndIdx )
{
const SwNode* pSrcNd = rSrcNds[ nSrcEndIdx ];
const SwNode* pDstNd = rDstNds[ nDstEndIdx ];
if( !SwCompareLine::CompareNode( *pSrcNd, *pDstNd ))
break;
nSrcEndIdx = PrevIdx( pSrcNd );
nDstEndIdx = PrevIdx( pDstNd );
}
while( nSrcSttIdx <= nSrcEndIdx )
{
const SwNode* pNd = rSrcNds[ nSrcSttIdx ];
rData.InsertLine( new SwCompareLine( *pNd ) );
nSrcSttIdx = NextIdx( pNd );
}
while( nDstSttIdx <= nDstEndIdx )
{
const SwNode* pNd = rDstNds[ nDstSttIdx ];
InsertLine( new SwCompareLine( *pNd ) );
nDstSttIdx = NextIdx( pNd );
}
}
void CompareData::ShowInsert( sal_uLong nStt, sal_uLong nEnd )
2000-09-18 23:08:29 +00:00
{
SwPaM* pTmp = new SwPaM( GetLine( nStt )->GetNode(), 0,
GetLine( nEnd-1 )->GetEndNode(), 0,
2000-09-18 23:08:29 +00:00
pInsRing );
if( !pInsRing )
pInsRing = pTmp;
// #i65201#: These SwPaMs are calculated smaller than needed, see comment below
2000-09-18 23:08:29 +00:00
}
void CompareData::ShowDelete(
123792: complete annotations on text ranges feature - rely annotations on text ranges on new annotation marks - support arbitrary text ranges for annotations - fix undo/redo regarding annotations an text ranges - support annotations on overlapping text ranges - fix *.docx import for annotations on overlapping text ranges - fix ODF import of annotations on text ranges cherry-picked from branch ooxml-osba (cherry picked from commit ebd2922ebd3cf2d3899fb80780d38f367197aadf) Conflicts: sw/Library_sw.mk sw/inc/IDocumentMarkAccess.hxx sw/inc/SidebarWin.hxx sw/inc/doc.hxx sw/inc/docufld.hxx sw/inc/hintids.hxx sw/inc/ndhints.hxx sw/inc/txatbase.hxx sw/inc/unobookmark.hxx sw/inc/unofield.hxx sw/inc/unoport.hxx sw/source/core/access/accpara.cxx sw/source/core/bastyp/init.cxx sw/source/core/crsr/bookmrk.cxx sw/source/core/crsr/findtxt.cxx sw/source/core/doc/dbgoutsw.cxx sw/source/core/doc/docbm.cxx sw/source/core/doc/doccorr.cxx sw/source/core/doc/docdde.cxx sw/source/core/doc/docedt.cxx sw/source/core/doc/docfld.cxx sw/source/core/doc/doclay.cxx sw/source/core/doc/tblrwcl.cxx sw/source/core/docnode/ndcopy.cxx sw/source/core/docnode/nodes.cxx sw/source/core/docnode/section.cxx sw/source/core/edit/edfld.cxx sw/source/core/fields/docufld.cxx sw/source/core/fields/postithelper.cxx sw/source/core/fields/reffld.cxx sw/source/core/inc/MarkManager.hxx sw/source/core/inc/crossrefbookmark.hxx sw/source/core/text/inftxt.cxx sw/source/core/text/itratr.cxx sw/source/core/text/txtfrm.cxx sw/source/core/txtnode/atrfld.cxx sw/source/core/txtnode/ndtxt.cxx sw/source/core/txtnode/thints.cxx sw/source/core/undo/undel.cxx sw/source/core/undo/undobj.cxx sw/source/core/unocore/unofield.cxx sw/source/core/unocore/unoport.cxx sw/source/core/unocore/unoportenum.cxx sw/source/filter/html/htmlatr.cxx sw/source/filter/html/htmlgrin.cxx sw/source/filter/html/wrthtml.cxx sw/source/filter/writer/writer.cxx sw/source/filter/ww1/fltshell.cxx sw/source/filter/ww8/wrtw8nds.cxx sw/source/filter/ww8/ww8par.cxx sw/source/ui/dialog/uiregionsw.cxx sw/source/ui/dochdl/swdtflvr.cxx sw/source/ui/docvw/PostItMgr.cxx sw/source/ui/docvw/SidebarWin.cxx sw/source/ui/fldui/fldmgr.cxx sw/source/ui/fldui/fldref.cxx sw/source/ui/inc/fldmgr.hxx sw/source/ui/inc/wrtsh.hxx sw/source/ui/shells/textfld.cxx sw/source/ui/uiview/view2.cxx sw/source/ui/utlui/navipi.cxx sw/source/ui/wrtsh/wrtsh2.cxx writerfilter/source/dmapper/DomainMapper_Impl.cxx writerfilter/source/dmapper/DomainMapper_Impl.hxx writerfilter/source/dmapper/PropertyMap.cxx xmloff/inc/txtfldi.hxx xmloff/source/text/txtfldi.cxx xmloff/source/text/txtparae.cxx Change-Id: Ie4cc467dbb837054c73742262ee20f35c151ff3f
2013-12-19 18:50:58 +00:00
const CompareData& rData,
sal_uLong nStt,
sal_uLong nEnd,
sal_uLong nInsPos )
2000-09-18 23:08:29 +00:00
{
SwNodeRange aRg(
rData.GetLine( nStt )->GetNode(), 0,
rData.GetLine( nEnd-1 )->GetEndNode(), 1 );
2000-09-18 23:08:29 +00:00
sal_uInt16 nOffset = 0;
const SwCompareLine* pLine = nullptr;
if( nInsPos >= 1 )
2000-09-18 23:08:29 +00:00
{
if( GetLineCount() == nInsPos )
{
pLine = GetLine( nInsPos-1 );
nOffset = 1;
}
else
pLine = GetLine( nInsPos );
2000-09-18 23:08:29 +00:00
}
const SwNode* pLineNd;
if( pLine )
2001-09-27 12:42:58 +00:00
{
if( nOffset )
pLineNd = &pLine->GetEndNode();
2001-09-27 12:42:58 +00:00
else
pLineNd = &pLine->GetNode();
2001-09-27 12:42:58 +00:00
}
2000-09-18 23:08:29 +00:00
else
{
pLineNd = &GetEndOfContent();
2000-09-18 23:08:29 +00:00
nOffset = 0;
}
SwNodeIndex aInsPos( *pLineNd, nOffset );
SwNodeIndex aSavePos( aInsPos, -1 );
rData.rDoc.GetDocumentContentOperationsManager().CopyWithFlyInFly( aRg, 0, aInsPos );
rDoc.getIDocumentState().SetModified();
++aSavePos;
2000-09-18 23:08:29 +00:00
// #i65201#: These SwPaMs are calculated when the (old) delete-redlines are hidden,
// they will be inserted when the delete-redlines are shown again.
// To avoid unwanted insertions of delete-redlines into these new redlines, what happens
// especially at the end of the document, I reduce the SwPaM by one node.
// Before the new redlines are inserted, they have to expand again.
SwPaM* pTmp = new SwPaM( aSavePos.GetNode(), aInsPos.GetNode(), 0, -1, pDelRing );
2000-09-18 23:08:29 +00:00
if( !pDelRing )
pDelRing = pTmp;
if( pInsRing )
{
SwPaM* pCorr = pInsRing->GetPrev();
2000-09-18 23:08:29 +00:00
if( *pCorr->GetPoint() == *pTmp->GetPoint() )
{
SwNodeIndex aTmpPos( pTmp->GetMark()->nNode, -1 );
*pCorr->GetPoint() = SwPosition( aTmpPos );
}
2000-09-18 23:08:29 +00:00
}
}
void CompareData::CheckForChangesInLine( const CompareData& rData,
sal_uLong& rStt, sal_uLong& rEnd,
sal_uLong& rThisStt, sal_uLong& rThisEnd )
2000-09-18 23:08:29 +00:00
{
LineArrayComparator aCmp( *this, rData, rThisStt, rThisEnd,
rStt, rEnd );
int nMinLen = std::min( aCmp.GetLen1(), aCmp.GetLen2() );
std::unique_ptr<int[]> pLcsDst(new int[ nMinLen ]);
std::unique_ptr<int[]> pLcsSrc(new int[ nMinLen ]);
FastCommonSubseq subseq( aCmp );
int nLcsLen = subseq.Find( pLcsDst.get(), pLcsSrc.get() );
for (int i = 0; i <= nLcsLen; i++)
2000-09-18 23:08:29 +00:00
{
// Beginning of inserted lines (inclusive)
int nDstFrom = i ? pLcsDst[i - 1] + 1 : 0;
// End of inserted lines (exclusive)
int nDstTo = ( i == nLcsLen ) ? aCmp.GetLen1() : pLcsDst[i];
// Beginning of deleted lines (inclusive)
int nSrcFrom = i ? pLcsSrc[i - 1] + 1 : 0;
// End of deleted lines (exclusive)
int nSrcTo = ( i == nLcsLen ) ? aCmp.GetLen2() : pLcsSrc[i];
if( i )
{
const SwCompareLine* pDstLn = GetLine( rThisStt + nDstFrom - 1 );
const SwCompareLine* pSrcLn = rData.GetLine( rStt + nSrcFrom - 1 );
// Show differences in detail for lines that
// were matched as only slightly different
if( !pDstLn->ChangesInLine( *pSrcLn, pInsRing, pDelRing ) )
{
ShowInsert( rThisStt + nDstFrom - 1, rThisStt + nDstFrom );
ShowDelete( rData, rStt + nSrcFrom - 1, rStt + nSrcFrom,
rThisStt + nDstFrom );
}
}
// Lines missing from source are inserted
if( nDstFrom != nDstTo )
{
ShowInsert( rThisStt + nDstFrom, rThisStt + nDstTo );
}
2000-09-18 23:08:29 +00:00
// Lines missing from destination are deleted
if( nSrcFrom != nSrcTo )
{
ShowDelete( rData, rStt + nSrcFrom, rStt + nSrcTo, rThisStt + nDstTo );
}
2000-09-18 23:08:29 +00:00
}
}
void CompareData::SetRedlinesToDoc( bool bUseDocInfo )
2000-09-18 23:08:29 +00:00
{
SwPaM* pTmp = pDelRing;
2011-02-07 20:21:18 +01:00
// get the Author / TimeStamp from the "other" document info
std::size_t nAuthor = rDoc.getIDocumentRedlineAccess().GetRedlineAuthor();
DateTime aTimeStamp( DateTime::SYSTEM );
SwDocShell *pDocShell(rDoc.GetDocShell());
OSL_ENSURE(pDocShell, "no SwDocShell");
if (pDocShell) {
uno::Reference<document::XDocumentPropertiesSupplier> xDPS(
pDocShell->GetModel(), uno::UNO_QUERY_THROW);
uno::Reference<document::XDocumentProperties> xDocProps(
xDPS->getDocumentProperties());
OSL_ENSURE(xDocProps.is(), "Doc has no DocumentProperties");
if( bUseDocInfo && xDocProps.is() ) {
OUString aTmp( 1 == xDocProps->getEditingCycles()
? xDocProps->getAuthor()
: xDocProps->getModifiedBy() );
util::DateTime uDT( 1 == xDocProps->getEditingCycles()
? xDocProps->getCreationDate()
: xDocProps->getModificationDate() );
if( !aTmp.isEmpty() )
{
nAuthor = rDoc.getIDocumentRedlineAccess().InsertRedlineAuthor( aTmp );
aTimeStamp = DateTime(uDT);
}
}
}
2000-09-18 23:08:29 +00:00
if( pTmp )
{
SwRedlineData aRedlnData( nsRedlineType_t::REDLINE_DELETE, nAuthor, aTimeStamp,
OUString(), nullptr );
2000-09-18 23:08:29 +00:00
do {
// #i65201#: Expand again, see comment above.
if( pTmp->GetPoint()->nContent == 0 )
{
++pTmp->GetPoint()->nNode;
pTmp->GetPoint()->nContent.Assign( pTmp->GetContentNode(), 0 );
}
// #i101009#
// prevent redlines that end on structural end node
if (& GetEndOfContent() ==
& pTmp->GetPoint()->nNode.GetNode())
{
--pTmp->GetPoint()->nNode;
SwContentNode *const pContentNode( pTmp->GetContentNode() );
pTmp->GetPoint()->nContent.Assign( pContentNode,
(pContentNode) ? pContentNode->Len() : 0 );
// tdf#106218 try to avoid losing a paragraph break here:
if (pTmp->GetMark()->nContent == 0)
{
SwNodeIndex const prev(pTmp->GetMark()->nNode, -1);
if (prev.GetNode().IsTextNode())
{
*pTmp->GetMark() = SwPosition(
*prev.GetNode().GetTextNode(),
prev.GetNode().GetTextNode()->Len());
}
}
}
rDoc.getIDocumentRedlineAccess().DeleteRedline( *pTmp, false, USHRT_MAX );
2000-09-18 23:08:29 +00:00
if (rDoc.GetIDocumentUndoRedo().DoesUndo())
{
SwUndo *const pUndo(new SwUndoCompDoc( *pTmp, false )) ;
rDoc.GetIDocumentUndoRedo().AppendUndo(pUndo);
}
rDoc.getIDocumentRedlineAccess().AppendRedline( new SwRangeRedline( aRedlnData, *pTmp ), true );
2000-09-18 23:08:29 +00:00
} while( pDelRing != ( pTmp = pTmp->GetNext()) );
}
2000-09-18 23:08:29 +00:00
pTmp = pInsRing;
if( pTmp )
{
do {
if( pTmp->GetPoint()->nContent == 0 )
{
++pTmp->GetPoint()->nNode;
pTmp->GetPoint()->nContent.Assign( pTmp->GetContentNode(), 0 );
}
// #i101009#
// prevent redlines that end on structural end node
if (& GetEndOfContent() ==
& pTmp->GetPoint()->nNode.GetNode())
{
--pTmp->GetPoint()->nNode;
SwContentNode *const pContentNode( pTmp->GetContentNode() );
pTmp->GetPoint()->nContent.Assign( pContentNode,
(pContentNode) ? pContentNode->Len() : 0 );
// tdf#106218 try to avoid losing a paragraph break here:
if (pTmp->GetMark()->nContent == 0)
{
SwNodeIndex const prev(pTmp->GetMark()->nNode, -1);
if (prev.GetNode().IsTextNode())
{
*pTmp->GetMark() = SwPosition(
*prev.GetNode().GetTextNode(),
prev.GetNode().GetTextNode()->Len());
}
}
}
} while( pInsRing != ( pTmp = pTmp->GetNext()) );
SwRedlineData aRedlnData( nsRedlineType_t::REDLINE_INSERT, nAuthor, aTimeStamp,
OUString(), nullptr );
2011-11-04 14:46:42 +01:00
// combine consecutive
2000-09-18 23:08:29 +00:00
if( pTmp->GetNext() != pInsRing )
{
do {
SwPosition& rSttEnd = *pTmp->End(),
& rEndStt = *(pTmp->GetNext())->Start();
const SwContentNode* pCNd;
2000-09-18 23:08:29 +00:00
if( rSttEnd == rEndStt ||
(!rEndStt.nContent.GetIndex() &&
rEndStt.nNode.GetIndex() - 1 == rSttEnd.nNode.GetIndex() &&
nullptr != ( pCNd = rSttEnd.nNode.GetNode().GetContentNode() ) &&
rSttEnd.nContent.GetIndex() == pCNd->Len()))
2000-09-18 23:08:29 +00:00
{
if( pTmp->GetNext() == pInsRing )
{
2011-11-04 14:46:42 +01:00
// are consecutive, so combine
2000-09-18 23:08:29 +00:00
rEndStt = *pTmp->Start();
delete pTmp;
pTmp = pInsRing;
}
else
{
2011-11-19 12:05:18 +01:00
// are consecutive, so combine
rSttEnd = *(pTmp->GetNext())->End();
2000-09-18 23:08:29 +00:00
delete pTmp->GetNext();
}
}
else
pTmp = pTmp->GetNext();
2000-09-18 23:08:29 +00:00
} while( pInsRing != pTmp );
}
do {
if (IDocumentRedlineAccess::AppendResult::APPENDED ==
rDoc.getIDocumentRedlineAccess().AppendRedline(
new SwRangeRedline(aRedlnData, *pTmp), true) &&
rDoc.GetIDocumentUndoRedo().DoesUndo())
{
SwUndo *const pUndo(new SwUndoCompDoc( *pTmp, true ));
rDoc.GetIDocumentUndoRedo().AppendUndo(pUndo);
}
} while( pInsRing != ( pTmp = pTmp->GetNext()) );
2000-09-18 23:08:29 +00:00
}
}
typedef std::shared_ptr<CompareData> CompareDataPtr;
typedef std::pair<CompareDataPtr, CompareDataPtr> CompareDataPtrPair;
typedef std::vector<CompareDataPtrPair> Comparators;
namespace
{
Comparators buildComparators(SwDoc &rSrcDoc, SwDoc &rDestDoc)
{
Comparators aComparisons;
//compare main text
aComparisons.emplace_back(CompareDataPtr(new CompareMainText(rSrcDoc)),
CompareDataPtr(new CompareMainText(rDestDoc)));
//if we have the same number of frames then try to compare within them
const SwFrameFormats *pSrcFrameFormats = rSrcDoc.GetSpzFrameFormats();
const SwFrameFormats *pDestFrameFormats = rDestDoc.GetSpzFrameFormats();
if (pSrcFrameFormats->size() == pDestFrameFormats->size())
{
for (size_t i = 0; i < pSrcFrameFormats->size(); ++i)
{
const SwFrameFormat& rSrcFormat = *(*pSrcFrameFormats)[i];
const SwFrameFormat& rDestFormat = *(*pDestFrameFormats)[i];
const SwNodeIndex* pSrcIdx = rSrcFormat.GetContent().GetContentIdx();
const SwNodeIndex* pDestIdx = rDestFormat.GetContent().GetContentIdx();
if (!pSrcIdx && !pDestIdx)
continue;
if (!pSrcIdx || !pDestIdx)
break;
const SwNode* pSrcNode = pSrcIdx->GetNode().EndOfSectionNode();
const SwNode* pDestNode = pDestIdx->GetNode().EndOfSectionNode();
if (!pSrcNode && !pDestNode)
continue;
if (!pSrcNode || !pDestNode)
break;
aComparisons.emplace_back(CompareDataPtr(new CompareFrameFormatText(rSrcDoc, *pSrcIdx)),
CompareDataPtr(new CompareFrameFormatText(rDestDoc, *pDestIdx)));
}
}
return aComparisons;
}
}
2011-11-04 14:46:42 +01:00
// Returns (the difference count?) if something is different
2000-09-18 23:08:29 +00:00
long SwDoc::CompareDoc( const SwDoc& rDoc )
{
if( &rDoc == this )
return 0;
long nRet = 0;
// Get comparison options
CmpOptions.eCmpMode = SW_MOD()->GetCompareMode();
if( CmpOptions.eCmpMode == SwCompareMode::Auto )
{
if( getRsidRoot() == rDoc.getRsidRoot() )
{
CmpOptions.eCmpMode = SwCompareMode::ByChar;
CmpOptions.bUseRsid = true;
CmpOptions.nIgnoreLen = 2;
}
else
{
CmpOptions.eCmpMode = SwCompareMode::ByWord;
CmpOptions.bUseRsid = false;
CmpOptions.nIgnoreLen = 3;
}
}
else
{
CmpOptions.bUseRsid = getRsidRoot() == rDoc.getRsidRoot() && SW_MOD()->IsUseRsid();
CmpOptions.nIgnoreLen = SW_MOD()->IsIgnorePieces() ? SW_MOD()->GetPieceLen() : 0;
}
GetIDocumentUndoRedo().StartUndo(SwUndoId::EMPTY, nullptr);
bool bDocWasModified = getIDocumentState().IsModified();
SwDoc& rSrcDoc = const_cast<SwDoc&>(rDoc);
bool bSrcModified = rSrcDoc.getIDocumentState().IsModified();
2000-09-18 23:08:29 +00:00
RedlineFlags eSrcRedlMode = rSrcDoc.getIDocumentRedlineAccess().GetRedlineFlags();
rSrcDoc.getIDocumentRedlineAccess().SetRedlineFlags( RedlineFlags::ShowInsert );
getIDocumentRedlineAccess().SetRedlineFlags(RedlineFlags::On | RedlineFlags::ShowInsert);
2000-09-18 23:08:29 +00:00
Comparators aComparisons(buildComparators(rSrcDoc, *this));
2000-09-18 23:08:29 +00:00
for (auto& a : aComparisons)
{
CompareData& rD0 = *a.first.get();
CompareData& rD1 = *a.second.get();
rD1.CompareLines( rD0 );
nRet |= rD1.ShowDiffs( rD0 );
}
2000-09-18 23:08:29 +00:00
if( nRet )
{
getIDocumentRedlineAccess().SetRedlineFlags(RedlineFlags::On |
RedlineFlags::ShowInsert | RedlineFlags::ShowDelete);
for (auto& a : aComparisons)
{
CompareData& rD1 = *a.second.get();
rD1.SetRedlinesToDoc( !bDocWasModified );
}
getIDocumentState().SetModified();
2000-09-18 23:08:29 +00:00
}
rSrcDoc.getIDocumentRedlineAccess().SetRedlineFlags( eSrcRedlMode );
getIDocumentRedlineAccess().SetRedlineFlags(RedlineFlags::ShowInsert | RedlineFlags::ShowDelete);
2000-09-18 23:08:29 +00:00
if( !bSrcModified )
rSrcDoc.getIDocumentState().ResetModified();
2000-09-18 23:08:29 +00:00
GetIDocumentUndoRedo().EndUndo(SwUndoId::EMPTY, nullptr);
2000-09-18 23:08:29 +00:00
return nRet;
}
namespace
2000-09-18 23:08:29 +00:00
{
struct SaveMergeRedline
{
const SwRangeRedline* pSrcRedl;
SwRangeRedline* pDestRedl;
SaveMergeRedline( const SwNode& rDstNd, const SwRangeRedline& rSrcRedl);
sal_uInt16 InsertRedline(SwPaM* pLastDestRedline);
};
}
2000-09-18 23:08:29 +00:00
SaveMergeRedline::SaveMergeRedline( const SwNode& rDstNd,
const SwRangeRedline& rSrcRedl)
: pSrcRedl( &rSrcRedl )
2000-09-18 23:08:29 +00:00
{
SwPosition aPos( rDstNd );
const SwPosition* pStt = rSrcRedl.Start();
if( rDstNd.IsContentNode() )
aPos.nContent.Assign( const_cast<SwContentNode*>(static_cast<const SwContentNode*>(&rDstNd)), pStt->nContent.GetIndex() );
pDestRedl = new SwRangeRedline( rSrcRedl.GetRedlineData(), aPos );
2000-09-18 23:08:29 +00:00
if( nsRedlineType_t::REDLINE_DELETE == pDestRedl->GetType() )
2000-09-18 23:08:29 +00:00
{
2011-11-04 14:46:42 +01:00
// mark the area as deleted
2000-09-18 23:08:29 +00:00
const SwPosition* pEnd = pStt == rSrcRedl.GetPoint()
? rSrcRedl.GetMark()
: rSrcRedl.GetPoint();
pDestRedl->SetMark();
pDestRedl->GetPoint()->nNode += pEnd->nNode.GetIndex() -
pStt->nNode.GetIndex();
pDestRedl->GetPoint()->nContent.Assign( pDestRedl->GetContentNode(),
2000-09-18 23:08:29 +00:00
pEnd->nContent.GetIndex() );
}
}
sal_uInt16 SaveMergeRedline::InsertRedline(SwPaM* pLastDestRedline)
2000-09-18 23:08:29 +00:00
{
sal_uInt16 nIns = 0;
2000-09-18 23:08:29 +00:00
SwDoc* pDoc = pDestRedl->GetDoc();
if( nsRedlineType_t::REDLINE_INSERT == pDestRedl->GetType() )
2000-09-18 23:08:29 +00:00
{
2011-11-04 14:46:42 +01:00
// the part was inserted so copy it from the SourceDoc
::sw::UndoGuard const undoGuard(pDoc->GetIDocumentUndoRedo());
2000-09-18 23:08:29 +00:00
SwNodeIndex aSaveNd( pDestRedl->GetPoint()->nNode, -1 );
const sal_Int32 nSaveCnt = pDestRedl->GetPoint()->nContent.GetIndex();
2000-09-18 23:08:29 +00:00
RedlineFlags eOld = pDoc->getIDocumentRedlineAccess().GetRedlineFlags();
pDoc->getIDocumentRedlineAccess().SetRedlineFlags_intern(eOld | RedlineFlags::Ignore);
2000-09-18 23:08:29 +00:00
pSrcRedl->GetDoc()->getIDocumentContentOperations().CopyRange(
CWS-TOOLING: integrate CWS odfmetadata3 2009-09-11 Michael Stahl merge DEV300_m58 2009-09-07 Michael Stahl SwFmtFld::Modify(): do nothing on RES_OBJECTDYING 2009-08-27 Michael Stahl #i91565#, #i91566#: TextPortionEnumerationTest.java: add test document 2009-08-27 Michael Stahl #i91565#, #i91566#: add complex test: TextPortionEnumerationTest.java 2009-08-27 Michael Stahl CLiteral::initialize(): zero-length literals probably not an error 2009-08-27 Michael Stahl #i91565#, #i91566#: offapi: new InContentMetadata and MetadataField services adapt TextPortion for InContentMetadata 2009-08-27 Michael Stahl #i91564#: xmloff: load/store xml:id and RDFa for text:bookmark(-start). 2009-08-27 Michael Stahl #i91564#: sw core: add support for xml:id at bookmarks: sw::mark::Bookmark: derive from Metadatable. SwHistoryBookmark, SaveBookmark: store a MetadatableUndo. ndcopy.cxx: lcl_CopyBookmarks(): copy the xml:id. SwXBookmark: derive from MetadatableMixin. 2009-08-27 Michael Stahl #i91565#, #i91566#: xmloff: refactor ruby import so nested meta(-field) works: remove XMLRubyHint_Impl. XMLImpRubyContext_Impl::~XMLImpRubyContext_Impl(): insert ruby directly. 2009-08-27 Michael Stahl #i91565#, #i91566#: xmloff: fix text:meta(-field) import/export: new XMLTextParagraphExport::exportTextField() overload for XTextField. CreateAndInsertMark(): set xml:id after insertion. fix meta(-field) service names, bugs etc. 2009-08-27 Michael Stahl #i91565#, #i91566#: sw text formatting: paint background of meta(-field) body: SwFont: add member m_nMetaCount. txttypes.hxx: add POR_META. atrstck.cxx: handle RES_TXTATR_META(FIELD). itrform2.cxx: SwTxtFormatter::WhichTxtPor(): create new class SwMetaPortion. 2009-08-27 Michael Stahl #i91566#: sw text formatting: display meta-field prefix and suffix: SwAttrIter::GetAttr(): replace with call to GetTxtAttrForCharAt(). SwTxtFormatter::NewExtraPortion(): handle meta-field prefix. SwTxtFormatter: new member m_nHintEndIndex. SwTxtFormatter::WhichFirstPortion(): call TryNewNoLengthPortion(). SwTxtFormatter::TryNewNoLengthPortion(): new; handle suffix of meta-field. SwTxtFormatter::UnderFlow(): UGLY HACK: decrement m_nHintEndIndex. SwFldPortion: add flag m_bNoLength: portion has zero length (for suffix). 2009-08-27 Michael Stahl #i91565#, #i91566#: extend text:meta(-field) uno wrapper with XText interface: unoobj.hxx: new CursorType CURSOR_META. unoobj2.cxx: refactor SwXText implementation to ensure that when the SwXText belongs to a SwXMeta, content is always inserted inside the meta(-field). unoobj.cxx: new SwXTextCursor::ForceIntoMeta(): cursor stays in meta(-field). unometa.hxx: SwXMeta implements XText, forwarding to a member SwXMetaText. DocInsertStringSplitCR(), SwX*::attachToRange(), SwX*::DeleteAndInsert(): use FORCEHINTEXPAND hack to ensure insert into the meta(-field) at the end. 2009-08-27 Michael Stahl #i91565#, #i91566#: add text:meta(-field) uno wrapper to sw: fmtmeta.hxx, fmtatr2.cxx: new class sw::MetaField, new sw::MetaFieldManager. doc.hxx, docnew.cxx: new SwDoc::GetMetaFieldManager(). unocoll.hxx,.cxx: new SW_SERVICE_FIELDTYPE_METAFIELD, SW_SERVICE_TYPE_META. unomap.hxx,.cxx: new PROPERTY_MAP_METAFIELD. unoprnms.hxx: new UNO_NAME_META. unoport.hxx: new PORTION_META; add "InContentMetadata" prop to SwXTextPortion. new unometa.hxx: new class SwXMeta and SwXMetaField. unofield.cxx: SwXFieldEnumeration: include meta-fields. unoportenum.cxx: handle RES_TXTATR_META(FIELD) by using a portion list stack. unotext.cxx: SwXText::insertTextContent(): handle meta(-field) as attribute. 2009-08-27 Michael Stahl #i91565#, #i91566#: ndhints.cxx: remove sort number from SwTxtAttrNesting 2009-08-27 Michael Stahl #i91565#, #i91566#: add support for hints with end and CH_TXTATR to sw core: doc.hxx, docedt.cxx: replace SwDoc::Delete(), DeleteAndJoin(), ReplaceRange() with wrappers that split at left-overlapped end+CH_TXTATR hints. txatbase.hxx: new member SwTxtAttr::m_bHasDummyChar. ndtxt.hxx: rename SwTxtNode::GetTxtAttr() to GetTxtAttrForCharAt(). ndtxt.cxx: SwTxtNode::CopyText(): copy end+CH_TXTATR hints iff copy CH_TXTATR. txtatr2.cxx, thints.cxx: SwTxtMeta gets a CH_TXTATR. 2009-08-27 Michael Stahl #i91565#, #i91566#: add text:meta(-field) to sw core: txatbase.hxx: new member SwTxtAttr::m_bNesting. hintids.hxx: new ids RES_TXTATR_META, RES_TXTATR_METAFIELD. txtatr.hxx: new base class SwTxtAttrNesting. new hint SwTxtMeta. SwTxtRuby derives from SwTxtAttrNesting. txtinet.hxx: SwTxtINetFmt derives from SwTxtAttrNesting. new header fmtmeta.hxx: new pool item SwFmtMeta. new class sw::Meta. ndhints.hxx, thints.cxx: new method SwpHints::TryInsertNesting(). thints.cxx: refactoring: BuildPortions() no longer handles Ruby/Hyperlink, but TryInsertNesting(), which also handles meta(-field). SwTxtNode::InsertItem(): check if the hint is actually inserted. ndhints.cxx: sort nesting hints based on sort number. ndtxt.cxx: lcl_CopyHint(): handle copy of meta/meta-field. 2009-08-27 Michael Stahl enable expanding hints with m_bLockExpandFlag set: add new InsertFlag: INS_FORCEHINTEXPAND. add new SetAttrMode: SETATTR_FORCEHINTEXPAND. rename SwEditShell::Insert() to Insert2() because changed signature fails to compile when SwWrtShell tries to overwrite these non-virtual members... SwWrtShell::Insert() sets FOCEHINTEXPAND if range was selected/deleted. adapt SwUndoInsert to store flags. 2009-08-27 Michael Stahl change formal parameters of item insertion methods to type SetAttrMode 2009-08-27 Michael Stahl fix incorrect resetting of text attributes in SwUndoInsSection, SwUndoInserts 2009-08-27 Michael Stahl clean up SwTxtNode::CutImpl() and lcl_CopyHint() 2009-08-27 Michael Stahl rename SwDoc::Copy() to CopyRange(), and _Copy() to CopyImpl() 2009-08-27 Michael Stahl rename SwNodes::Move() to MoveRange(), and remove unused parameter 2009-08-27 Michael Stahl rename SwDoc::Move() to MoveRange()/MoveNodeRange() 2009-08-27 Michael Stahl rename SwDoc::Insert() to InsertString(), and remove sal_Unicode variant 2009-08-27 Michael Stahl rename SwDoc::Insert() to InsertPoolItem()/InsertItemSet()/InsertSwSection() 2009-08-27 Michael Stahl rename SwDoc::Replace() to ReplaceRange() 2009-08-27 Michael Stahl remove SwDoc::Overwrite() sal_Unicode variant 2009-08-27 Michael Stahl split up SwDoc::DeleteAndJoin(): factor out DeleteAndJoinWithRedline() 2009-08-27 Michael Stahl rename overloaded SwDoc::Delete() to DeleteRange()/DeleteTOXMark() 2009-08-27 Michael Stahl rename SwTxtNode::Copy() to CopyText() 2009-08-27 Michael Stahl rename SwTxtNode::Cut() to CutText(), and _Cut() to CutImpl() 2009-08-27 Michael Stahl rename SwTxtNode::Delete() to DeleteAttribute()/DeleteAttributes() 2009-08-27 Michael Stahl rename SwTxtNode::Replace() to ReplaceText(), and remove the xub_Unicode variant 2009-08-27 Michael Stahl rename SwTxtNode::Erase() to EraseText() 2009-08-27 Michael Stahl rename SwTxtNode::Insert() to InsertText(), and remove the xub_Unicode variant 2009-08-27 Michael Stahl clean up SwTxtNode::Update() 2009-08-27 Michael Stahl remove SwTxtAttr::RemoveFromPool() and make destructor non-public, to be invoked by new method SwTxtAttr::Destroy() 2009-08-27 Michael Stahl ensure that SwDoc::Insert() for item (set) returns success indicator: replace SwRegHistory constructor with method InsertItems(), returning bool. refactor InsAttr() so that it checks if InsertItems() succeeds. 2009-08-27 Michael Stahl move SwXTextPortionEnumeration from unoobj.hxx to unoport.hxx 2009-08-27 Michael Stahl add missing SolarMutex in SwXTextPortion methods 2009-08-27 Michael Stahl SwXTextPortion: new member m_xTextField (so the TextField property need not be returned indirectly via SwUnoCursorHelper). factor out function CreateSwXTextField(). 2009-08-27 Michael Stahl SwXTextPortion: remove PORTION_CONTROL_CHAR and implementation of XTextField 2009-08-27 Michael Stahl remove obsolete hint SwTxtHardBlank and formats SwFmtHardBlank/SwFmtSoftHyph 2009-08-27 Michael Stahl clean up SwTxtAttr and friends: remove many accessor methods for obsolete (due to autofmt) char format items. remove unused flag SwTxtAttr::m_bDontMergeAttr. MakeRedlineTxtAttr() now dedicated function, no longer calls MakeTxtAttr(). 2009-08-27 Michael Stahl remove obsolete attribute SwTxt2Lines 2009-08-27 Michael Stahl SwXTextPortionEnumeration: finish refactoring CreatePortions change ExportHints so it always returns a text portion for hint w/ CH_TXTATR. remove special case for handling end of paragraph. unfortunately had to refactor the fieldmarks export as well (got in the way). 2009-08-27 Michael Stahl SwXTextPortionEnumeration: refactor CreatePortions: frames export extract function ExportFrames() from CreatePortions(). remove (un)dead code that calls evil MovePara(fnParaCurr, fnParaEnd) 2009-08-27 Michael Stahl clean up SwXParaFrameEnumeration 2009-08-27 Michael Stahl CollectFrameAtNode: replace SwDependArr with STL based FrameDependList_t 2009-08-27 Michael Stahl SwXTextPortionEnumeration: tweak refmark/toxmark export so ExportHints returns the portion for point marks 2009-08-27 Michael Stahl clean up SwXTextPortionEnumeration: prefix members, remove casts, replace SvWeirdArray with STL, etc. make CreatePortions() method a function, and remove lots of members. extract fieldmarks function from CreatePortions. 2009-08-27 Michael Stahl remove FOREACHUNOPAM_START/END macros 2009-08-27 Michael Stahl clean up SwXTextPortion: prefix members, remove casts, etc. remove SwXRubyPortion: replace it with another SwXTextPortion constructor 2009-08-27 Michael Stahl #i102541# SwXReferenceMark::InsertRefMark(): use flag SETATTR_DONTEXPAND 2009-08-27 Michael Stahl rename SwTxtNode::Insert to SwTxtNode::InsertHint, and fix constness in SwTxtNode::InsertItem 2009-08-27 Michael Stahl turn SwTxtNode::MakeTxtAttr() methods into global functions in ndhints.hxx 2009-08-27 Michael Stahl remove obsolete sw/inc/bookmrk.hxx 2009-08-27 Michael Stahl pam.cxx: fix ComparePosition functions (returned wrong result in one case) 2009-08-27 Michael Stahl #i103613# only import RDF metadata on normal open of a document 2009-09-11 kz CWS-TOOLING: integrate CWS impress176 2009-09-08 20:18:24 +0200 sj r275957 : fixed warning (shadowed variable) 2009-09-08 18:02:05 +0200 cl r275948 : #i104315# added missing tab pages 2009-09-08 17:35:18 +0200 cl r275947 : #i104866# fixed angle import 2009-09-08 17:32:53 +0200 cl r275946 : #i104841# fixed angle import 2009-09-08 17:01:25 +0200 cl r275943 : #i103935# fixed the SID_EVENTCONFIG mess 2009-09-08 14:32:57 +0200 sj r275928 : #i104685# only comments 2009-09-07 12:37:36 +0200 sj r275886 : #i104683# fixed import of bold/italic attributes for normal text shapes 2009-09-04 15:07:46 +0200 sj r275808 : #104689# fixed bullet color problem 2009-09-03 15:25:07 +0200 sj r275753 : #160200# added vertical alignment of table cells 2009-09-11 kz CWS-TOOLING: integrate CWS dv14 2009-09-10 15:16:32 +0200 sg r276035 : #160513# updated wfs scheme to accept ports 2009-09-10 07:41:47 +0200 dv r276019 : #i104942# Better renaming algorithmen 2009-08-31 13:41:11 +0200 dv r275604 : #160505# Setting APP1PRODUCTNAME must not overwrite APP1PRODUCTDEF 2009-09-11 kz CWS-TOOLING: integrate CWS jl131 2009-09-02 16:42:40 +0200 jl r275720 : #i97896# 2009-08-31 13:01:53 +0200 jl r275599 : CWS-TOOLING: rebase CWS jl131 to trunk@275331 (milestone: DEV300:m56) 2009-07-31 14:35:30 +0200 jl r274531 : CWS-TOOLING: rebase CWS jl131 to trunk@274203 (milestone: DEV300:m53) 2009-07-23 14:20:32 +0200 jl r274272 : #i79839# better error text when trying to modify shared layer without having write permission, eg. unopkg add --shared, unopkg remove --shared, unopkg reinstall --shared 2009-07-22 16:38:02 +0200 jl r274252 : #i97896# localize error message for lock file 2009-07-22 16:37:22 +0200 jl r274251 : #i80462# unprecise wording in updatedialog 2009-07-22 16:36:06 +0200 jl r274250 : #i97896# localize error message for lock file 2009-07-22 16:35:20 +0200 jl r274249 : #i97896# localize error message for lock file 2009-07-22 15:07:30 +0200 jl r274242 : #i98873# minimum java version is 1.5 since OOo 3.0 2009-09-11 kz CWS-TOOLING: integrate CWS changehc 2009-08-31 19:38:50 +0200 pl r275633 : remove dbug printf 2009-08-31 17:41:50 +0200 pl r275623 : CWS-TOOLING: rebase CWS changehc to trunk@275331 (milestone: DEV300:m56) 2009-07-15 19:45:46 +0200 pl r274028 : #i35482# use HC flag to decide high contrast mode 2009-07-15 17:40:52 +0200 pl r274020 : #i35482# use HC flag to decide high contrast mode 2009-07-15 17:39:50 +0200 pl r274019 : #i35482# update autohc correctly in MergeSystemSettings 2009-07-15 17:38:57 +0200 pl r274018 : #i35482# update autohc correctly in MergeSystemSettings 2009-09-11 kz CWS-TOOLING: integrate CWS notes10 2009-08-24 07:25:57 +0200 mod r275287 : 2009-07-26 02:38:32 +0200 mod r274343 : #i#i103645# 2009-07-26 02:01:53 +0200 mod r274342 : #i103645# 2009-07-26 01:52:42 +0200 mod r274341 : #i103490# 2009-07-22 08:31:48 +0200 mod r274215 : #i103373# 2009-07-15 00:55:11 +0200 mod r273987 : #i101419# 2009-07-14 07:07:55 +0200 mod r273956 : #i101419# 2009-07-14 07:07:43 +0200 mod r273955 : #i101419# 2009-07-14 07:02:10 +0200 mod r273954 : changes from notes9 2009-07-14 06:14:25 +0200 mod r273953 : #i103476# 2009-09-11 kz CWS-TOOLING: integrate CWS ab70 2009-09-10 15:12:54 +0200 jsk r276034 : #i85434# - mandatory automatic update test 2009-09-10 15:11:06 +0200 jsk r276033 : #i85434# - mandatory automatic update test 2009-09-02 09:49:24 +0200 ab r275698 : #i85434# Dialog Import 2009-09-11 kz CWS-TOOLING: integrate CWS hb32bugs02 2009-09-02 Henning Brinkmann #i102420# revert changes 2009-08-26 Henning Brinkmann merged DEV300_m56 2009-08-19 Henning Brinkmann merged DEV300_m55 2009-08-14 Henning Brinkmann merged changes from wntmsci12 2009-08-12 Henning Brinkmann Implemented NoSpaceEdit constructor and destructor in .cxx to allow compile with debug on wntmsci12. 2009-08-12 Henning Brinkmann Added some SW_DLLPUBLIC to make compilable on wntmsci12. 2009-08-11 Henning Brinkmann #i102420# dbg_out: surround output for SwNodes with <nodes-array>. 2009-08-10 Henning Brinkmann #i102420# rewritten debug output for SwNodes. 2009-08-07 Henning Brinkmann #i102420# debug _MoveNodes: output the destination, too. Break after two iterations. 2009-08-07 Henning Brinkmann #i102420# _MoveNodes: Additionally check if destination index is inside source => false Check if current range was already handled => loop Debug output current range 2009-08-06 Henning Brinkmann merged DEV300_m54 2009-08-06 Henning Brinkmann added master fix 2009-08-06 Henning Brinkmann debug output for SwNodeRange 2009-08-04 Henning Brinkmann #i102844# robustness: check for NULL pointer to prevent crash 2009-08-03 Henning Brinkmann #i103475# applied patch and verified 2009-08-03 Henning Brinkmann Removed code preventing build of sw with DEBUG. 2009-09-11 convert-repo update tags 2009-09-10 kz CWS-TOOLING: integrate CWS os2port06dev300 2009-09-05 22:49:00 +0200 ydario r275858 : #i99588# applied os2port06 diff to DEV300 tree. 2009-09-10 kz CWS-TOOLING: integrate CWS mingwport23 2009-08-29 07:07:53 +0200 tono r275555 : i#104522: mingw port graphite 2009-08-29 07:07:26 +0200 tono r275554 : i#104522: mingw port printf format fix 2009-09-10 kz CWS-TOOLING: integrate CWS mh232 2009-08-26 03:52:57 +0200 mh r275385 : #i102182# FreeBSD patch 2009-08-26 03:43:20 +0200 mh r275384 : #i101333# patch for FreeBSD 2009-08-26 03:11:20 +0200 mh r275383 : #i39230 2009-08-26 03:07:51 +0200 mh r275382 : #i39230# more space for initials field 2009-08-26 02:41:19 +0200 mh r275380 : #i39230# use vos::osecurity for reading the user name 2009-08-18 22:06:00 +0200 mh r275130 : #i104243#, line ending problem with newer perl 2009-08-18 21:53:21 +0200 mh r275129 : #i39230# read initials via getpwnam 2009-08-18 21:34:05 +0200 mh r275128 : enable CAIROCANVAS for Linux and Mac, #i88613# 2009-08-17 18:02:59 +0200 mh r275067 : #i95498# make compile with gcc3 2009-09-10 kz CWS-TOOLING: integrate CWS tkr24 2009-09-07 14:31:06 +0200 is r275898 : #160081# adding NO_LICENSE_INTO_COPYRIGHT 2009-09-10 releng #i10000# change KeyMapping to SingletonRef<framework::KeyMapping> 2009-09-11 convert-repo update tags 2009-09-10 kz CWS-TOOLING: integrate CWS os2port06dev300 2009-09-05 22:49:00 +0200 ydario r275858 : #i99588# applied os2port06 diff to DEV300 tree. 2009-09-10 kz CWS-TOOLING: integrate CWS mingwport23 2009-08-29 07:07:53 +0200 tono r275555 : i#104522: mingw port graphite 2009-08-29 07:07:26 +0200 tono r275554 : i#104522: mingw port printf format fix 2009-09-10 kz CWS-TOOLING: integrate CWS mh232 2009-08-26 03:52:57 +0200 mh r275385 : #i102182# FreeBSD patch 2009-08-26 03:43:20 +0200 mh r275384 : #i101333# patch for FreeBSD 2009-08-26 03:11:20 +0200 mh r275383 : #i39230 2009-08-26 03:07:51 +0200 mh r275382 : #i39230# more space for initials field 2009-08-26 02:41:19 +0200 mh r275380 : #i39230# use vos::osecurity for reading the user name 2009-08-18 22:06:00 +0200 mh r275130 : #i104243#, line ending problem with newer perl 2009-08-18 21:53:21 +0200 mh r275129 : #i39230# read initials via getpwnam 2009-08-18 21:34:05 +0200 mh r275128 : enable CAIROCANVAS for Linux and Mac, #i88613# 2009-08-17 18:02:59 +0200 mh r275067 : #i95498# make compile with gcc3 2009-09-10 kz CWS-TOOLING: integrate CWS tkr24 2009-09-07 14:31:06 +0200 is r275898 : #160081# adding NO_LICENSE_INTO_COPYRIGHT 2009-09-10 releng #i10000# change KeyMapping to SingletonRef<framework::KeyMapping>
2009-09-11 14:29:45 +00:00
*const_cast<SwPaM*>(static_cast<const SwPaM*>(pSrcRedl)),
*pDestRedl->GetPoint(), /*bCopyAll=*/false, /*bCheckPos=*/true );
2000-09-18 23:08:29 +00:00
pDoc->getIDocumentRedlineAccess().SetRedlineFlags_intern( eOld );
2000-09-18 23:08:29 +00:00
pDestRedl->SetMark();
++aSaveNd;
2000-09-18 23:08:29 +00:00
pDestRedl->GetMark()->nNode = aSaveNd;
pDestRedl->GetMark()->nContent.Assign( aSaveNd.GetNode().GetContentNode(),
2000-09-18 23:08:29 +00:00
nSaveCnt );
if( pLastDestRedline && *pLastDestRedline->GetPoint() == *pDestRedl->GetPoint() )
*pLastDestRedline->GetPoint() = *pDestRedl->GetMark();
2000-09-18 23:08:29 +00:00
}
else
{
//JP 21.09.98: Bug 55909
2011-11-04 14:46:42 +01:00
// If there already is a deleted or inserted one at the same position, we have to split it!
2000-09-18 23:08:29 +00:00
SwPosition* pDStt = pDestRedl->GetMark(),
* pDEnd = pDestRedl->GetPoint();
Complete the transition of SwRedlineTable::size_type ...from 9ca8a63fff65acf2ea13b391495ad232f4636548 "Use consistent integer types in the SwRedlineTable interface". This all started as an attempt to reduce the number of places a to-be-committed improved loplugin:loopvartoosmall complains about. Lets see where it ends... SwRedlineTable::size_type is now the size_type of the underlying std::vector, no longer sal_uInt16 from ancient times. I tried hard to find all places that are affected by this change, changing types of affected variables and non-static data members as needed. Some notes: * The original code used USHRT_MAX as a "not found" value. I replaced that with a new SwRedlineTable::npos, of type SwRedlineTable::size_type but still for now of value USHRT_MAX. This should eventually be changed to something more sensible, like std::numeric_limits<SwRedlineTable::size_type>::max() (which is best done after we have constexpr support in all toolchains, so that npos can be constexpr). It is important that the value of npos is towards positive infinity, as many places in the code use for (i = f(); // may return npos i < table.size(); ++i) table[i] ... * There are some borders where values of SwRedlineTable::size_type are converted into different types, for various reasons. But all of those other types should be large enough for practical purposes (at least 32 bits wide): MakrEntry::m_nIdx: long int SvxRedlinTable::InsertEntry: sal_uIntPtr nPos SwRangeRedline: size_t SwRedlineItr: sal_Int32 SwVbaRevision::GetPosition: sal_Int32 SwXRedlines: sal_Int32 * .uno:TrackedChangeIndex= transports textual representations of such values. libreofficekit/qa/gtktiledviewer/gtktiledviewer.cxx treats them purely as strings, while SwTiledRenderingTest converts them to int. * TODO: The one place I'm unsure about is SfxUInt16Items with IDs FN_REDLINE_ACCEPT_DIRECT, FN_REDLINE_REJECT_DIRECT, and FN_REDLINE_NEXT_CHANGE in sw/source/uibase/uiview/view2.cxx. For now, I kept those as SfxUInt16Items and take care to "map" USHRT_MAX to npos when reading from those items. But I have no idea where instances of those items would actually be created, and what it would mean to change those items' types? Change-Id: Ib7a14dc67e2b970766966e43f4732abd9f045ff8 Reviewed-on: https://gerrit.libreoffice.org/34775 Tested-by: Jenkins <ci@libreoffice.org> Reviewed-by: Stephan Bergmann <sbergman@redhat.com>
2017-03-02 08:35:18 +01:00
SwRedlineTable::size_type n = 0;
2000-09-18 23:08:29 +00:00
2011-11-04 14:46:42 +01:00
// find the first redline for StartPos
if( !pDoc->getIDocumentRedlineAccess().GetRedline( *pDStt, &n ) && n )
2000-09-18 23:08:29 +00:00
--n;
const SwRedlineTable& rRedlineTable = pDoc->getIDocumentRedlineAccess().GetRedlineTable();
for( ; n < rRedlineTable.size(); ++n )
2000-09-18 23:08:29 +00:00
{
SwRangeRedline* pRedl = rRedlineTable[ n ];
2000-09-18 23:08:29 +00:00
SwPosition* pRStt = pRedl->Start(),
* pREnd = pRStt == pRedl->GetPoint() ? pRedl->GetMark()
: pRedl->GetPoint();
if( nsRedlineType_t::REDLINE_DELETE == pRedl->GetType() ||
nsRedlineType_t::REDLINE_INSERT == pRedl->GetType() )
2000-09-18 23:08:29 +00:00
{
SwComparePosition eCmpPos = ComparePosition( *pDStt, *pDEnd, *pRStt, *pREnd );
switch( eCmpPos )
{
case SwComparePosition::CollideStart:
case SwComparePosition::Behind:
2000-09-18 23:08:29 +00:00
break;
case SwComparePosition::Inside:
case SwComparePosition::Equal:
delete pDestRedl;
pDestRedl = nullptr;
SAL_FALLTHROUGH;
2000-09-18 23:08:29 +00:00
case SwComparePosition::CollideEnd:
case SwComparePosition::Before:
n = rRedlineTable.size();
2000-09-18 23:08:29 +00:00
break;
case SwComparePosition::Outside:
assert(pDestRedl && "is this actually impossible");
if (pDestRedl)
2000-09-18 23:08:29 +00:00
{
SwRangeRedline* pCpyRedl = new SwRangeRedline(
2000-09-18 23:08:29 +00:00
pDestRedl->GetRedlineData(), *pDStt );
pCpyRedl->SetMark();
*pCpyRedl->GetPoint() = *pRStt;
SwUndoCompDoc *const pUndo =
(pDoc->GetIDocumentUndoRedo().DoesUndo())
? new SwUndoCompDoc( *pCpyRedl ) : nullptr;
// now modify doc: append redline, undo (and count)
pDoc->getIDocumentRedlineAccess().AppendRedline( pCpyRedl, true );
if( pUndo )
{
pDoc->GetIDocumentUndoRedo().AppendUndo(pUndo);
}
++nIns;
2000-09-18 23:08:29 +00:00
*pDStt = *pREnd;
2011-11-04 14:46:42 +01:00
// we should start over now
Complete the transition of SwRedlineTable::size_type ...from 9ca8a63fff65acf2ea13b391495ad232f4636548 "Use consistent integer types in the SwRedlineTable interface". This all started as an attempt to reduce the number of places a to-be-committed improved loplugin:loopvartoosmall complains about. Lets see where it ends... SwRedlineTable::size_type is now the size_type of the underlying std::vector, no longer sal_uInt16 from ancient times. I tried hard to find all places that are affected by this change, changing types of affected variables and non-static data members as needed. Some notes: * The original code used USHRT_MAX as a "not found" value. I replaced that with a new SwRedlineTable::npos, of type SwRedlineTable::size_type but still for now of value USHRT_MAX. This should eventually be changed to something more sensible, like std::numeric_limits<SwRedlineTable::size_type>::max() (which is best done after we have constexpr support in all toolchains, so that npos can be constexpr). It is important that the value of npos is towards positive infinity, as many places in the code use for (i = f(); // may return npos i < table.size(); ++i) table[i] ... * There are some borders where values of SwRedlineTable::size_type are converted into different types, for various reasons. But all of those other types should be large enough for practical purposes (at least 32 bits wide): MakrEntry::m_nIdx: long int SvxRedlinTable::InsertEntry: sal_uIntPtr nPos SwRangeRedline: size_t SwRedlineItr: sal_Int32 SwVbaRevision::GetPosition: sal_Int32 SwXRedlines: sal_Int32 * .uno:TrackedChangeIndex= transports textual representations of such values. libreofficekit/qa/gtktiledviewer/gtktiledviewer.cxx treats them purely as strings, while SwTiledRenderingTest converts them to int. * TODO: The one place I'm unsure about is SfxUInt16Items with IDs FN_REDLINE_ACCEPT_DIRECT, FN_REDLINE_REJECT_DIRECT, and FN_REDLINE_NEXT_CHANGE in sw/source/uibase/uiview/view2.cxx. For now, I kept those as SfxUInt16Items and take care to "map" USHRT_MAX to npos when reading from those items. But I have no idea where instances of those items would actually be created, and what it would mean to change those items' types? Change-Id: Ib7a14dc67e2b970766966e43f4732abd9f045ff8 Reviewed-on: https://gerrit.libreoffice.org/34775 Tested-by: Jenkins <ci@libreoffice.org> Reviewed-by: Stephan Bergmann <sbergman@redhat.com>
2017-03-02 08:35:18 +01:00
n = SwRedlineTable::npos;
2000-09-18 23:08:29 +00:00
}
break;
case SwComparePosition::OverlapBefore:
2000-09-18 23:08:29 +00:00
*pDEnd = *pRStt;
break;
case SwComparePosition::OverlapBehind:
2000-09-18 23:08:29 +00:00
*pDStt = *pREnd;
break;
}
}
else if( *pDEnd <= *pRStt )
break;
}
}
if( pDestRedl )
{
SwUndoCompDoc *const pUndo = (pDoc->GetIDocumentUndoRedo().DoesUndo())
? new SwUndoCompDoc( *pDestRedl ) : nullptr;
// now modify doc: append redline, undo (and count)
IDocumentRedlineAccess::AppendResult const result(
pDoc->getIDocumentRedlineAccess().AppendRedline(pDestRedl, true));
if( pUndo )
{
pDoc->GetIDocumentUndoRedo().AppendUndo( pUndo );
}
++nIns;
// if AppendRedline has deleted our redline, we may not keep a
// reference to it
if (IDocumentRedlineAccess::AppendResult::APPENDED != result)
pDestRedl = nullptr;
2000-09-18 23:08:29 +00:00
}
return nIns;
}
/// Merge two documents
2000-09-18 23:08:29 +00:00
long SwDoc::MergeDoc( const SwDoc& rDoc )
{
if( &rDoc == this )
return 0;
long nRet = 0;
GetIDocumentUndoRedo().StartUndo(SwUndoId::EMPTY, nullptr);
2000-09-18 23:08:29 +00:00
SwDoc& rSrcDoc = const_cast<SwDoc&>(rDoc);
bool bSrcModified = rSrcDoc.getIDocumentState().IsModified();
2000-09-18 23:08:29 +00:00
RedlineFlags eSrcRedlMode = rSrcDoc.getIDocumentRedlineAccess().GetRedlineFlags();
rSrcDoc.getIDocumentRedlineAccess().SetRedlineFlags( RedlineFlags::ShowDelete );
getIDocumentRedlineAccess().SetRedlineFlags( RedlineFlags::ShowDelete );
2000-09-18 23:08:29 +00:00
CompareMainText aD0(rSrcDoc, false);
CompareMainText aD1(*this, false);
2000-09-18 23:08:29 +00:00
aD1.CompareLines( aD0 );
if( !aD1.HasDiffs( aD0 ) )
{
2011-11-04 14:46:42 +01:00
// we want to get all redlines from the SourceDoc
2000-09-18 23:08:29 +00:00
2011-11-04 14:46:42 +01:00
// look for all insert redlines from the SourceDoc and determine their position in the DestDoc
std::list<SaveMergeRedline> vRedlines;
const SwRedlineTable& rSrcRedlTable = rSrcDoc.getIDocumentRedlineAccess().GetRedlineTable();
sal_uLong nEndOfExtra = rSrcDoc.GetNodes().GetEndOfExtras().GetIndex();
sal_uLong nMyEndOfExtra = GetNodes().GetEndOfExtras().GetIndex();
for(const SwRangeRedline* pRedl : rSrcRedlTable)
2000-09-18 23:08:29 +00:00
{
sal_uLong nNd = pRedl->GetPoint()->nNode.GetIndex();
RedlineType_t eType = pRedl->GetType();
2000-09-18 23:08:29 +00:00
if( nEndOfExtra < nNd &&
( nsRedlineType_t::REDLINE_INSERT == eType || nsRedlineType_t::REDLINE_DELETE == eType ))
2000-09-18 23:08:29 +00:00
{
const SwNode* pDstNd = GetNodes()[
nMyEndOfExtra + nNd - nEndOfExtra ];
2014-04-11 09:21:31 +02:00
// Found the position.
2011-11-04 14:46:42 +01:00
// Then we also have to insert the redline to the line in the DestDoc.
vRedlines.emplace_back(*pDstNd, *pRedl);
2000-09-18 23:08:29 +00:00
}
}
if( !vRedlines.empty() )
2000-09-18 23:08:29 +00:00
{
// Carry over all into DestDoc
rSrcDoc.getIDocumentRedlineAccess().SetRedlineFlags(RedlineFlags::ShowInsert | RedlineFlags::ShowDelete);
getIDocumentRedlineAccess().SetRedlineFlags(
RedlineFlags::On |
RedlineFlags::ShowInsert |
RedlineFlags::ShowDelete);
SwPaM* pLastDestRedline(nullptr);
for(SaveMergeRedline& rRedline: vRedlines)
{
nRet += rRedline.InsertRedline(pLastDestRedline);
pLastDestRedline = rRedline.pDestRedl;
}
2000-09-18 23:08:29 +00:00
}
}
rSrcDoc.getIDocumentRedlineAccess().SetRedlineFlags( eSrcRedlMode );
2000-09-18 23:08:29 +00:00
if( !bSrcModified )
rSrcDoc.getIDocumentState().ResetModified();
2000-09-18 23:08:29 +00:00
getIDocumentRedlineAccess().SetRedlineFlags(RedlineFlags::ShowInsert | RedlineFlags::ShowDelete);
2000-09-18 23:08:29 +00:00
GetIDocumentUndoRedo().EndUndo(SwUndoId::EMPTY, nullptr);
2000-09-18 23:08:29 +00:00
return nRet;
}
LineArrayComparator::LineArrayComparator( const CompareData &rD1,
const CompareData &rD2, int nStt1,
int nEnd1, int nStt2, int nEnd2 )
: rData1( rD1 ), rData2( rD2 ), nFirst1( nStt1 ), nFirst2( nStt2 )
{
nLen1 = nEnd1 - nStt1;
nLen2 = nEnd2 - nStt2;
}
bool LineArrayComparator::Compare( int nIdx1, int nIdx2 ) const
{
if( nIdx1 < 0 || nIdx2 < 0 || nIdx1 >= nLen1 || nIdx2 >= nLen2 )
{
OSL_ENSURE( false, "Index out of range!" );
return false;
}
const SwTextNode *pTextNd1 = rData1.GetLine( nFirst1 + nIdx1 )->GetNode().GetTextNode();
const SwTextNode *pTextNd2 = rData2.GetLine( nFirst2 + nIdx2 )->GetNode().GetTextNode();
if( !pTextNd1 || !pTextNd2
|| ( CmpOptions.bUseRsid && !pTextNd1->CompareParRsid( *pTextNd2 ) ) )
{
return false;
}
const sal_Int32 nPar1Len = pTextNd1->Len();
const sal_Int32 nPar2Len = pTextNd2->Len();
if( std::min( nPar1Len, nPar2Len ) * 3 < std::max( nPar1Len, nPar2Len ) )
{
return false;
}
sal_Int32 nBorderLen = ( nPar1Len + nPar2Len )/16;
if( nBorderLen < 3 )
{
nBorderLen = std::min<sal_Int32>( 3, std::min( nPar1Len, nPar2Len ) );
}
std::set<unsigned> aHashes;
unsigned nHash = 0;
unsigned nMul = 251;
unsigned nPow = 1;
sal_Int32 i;
for( i = 0; i < nBorderLen - 1; i++ )
{
nPow *= nMul;
}
for( i = 0; i < nBorderLen; i++ )
{
nHash = nHash*nMul + pTextNd1->GetText()[i];
}
aHashes.insert( nHash );
for( ; i < nPar1Len; i++ )
{
nHash = nHash - nPow*pTextNd1->GetText()[ i - nBorderLen ];
nHash = nHash*nMul + pTextNd1->GetText()[ i ];
aHashes.insert( nHash );
}
nHash = 0;
for( i = 0; i < nBorderLen; i++ )
{
nHash = nHash*nMul + pTextNd2->GetText()[ i ];
}
if( aHashes.find( nHash ) != aHashes.end() )
{
return true;
}
for( ; i < nPar2Len; i++ )
{
nHash = nHash - nPow*pTextNd2->GetText()[ i - nBorderLen ];
nHash = nHash*nMul + pTextNd2->GetText()[ i ];
if( aHashes.find( nHash ) != aHashes.end() )
{
return true;
}
}
return false;
}
bool CharArrayComparator::Compare( int nIdx1, int nIdx2 ) const
{
if( nIdx1 < 0 || nIdx2 < 0 || nIdx1 >= GetLen1() || nIdx2 >= GetLen2() )
{
OSL_ENSURE( false, "Index out of range!" );
return false;
}
return ( !CmpOptions.bUseRsid
|| pTextNd1->CompareRsid( *pTextNd2, nIdx1 + 1, nIdx2 + 1 ) )
&& pTextNd1->GetText()[ nIdx1 ] == pTextNd2->GetText()[ nIdx2 ];
}
WordArrayComparator::WordArrayComparator( const SwTextNode *pNode1,
const SwTextNode *pNode2 )
: pTextNd1( pNode1 ), pTextNd2( pNode2 )
{
pPos1.reset( new int[ pTextNd1->GetText().getLength() + 1 ] );
pPos2.reset( new int[ pTextNd2->GetText().getLength() + 1 ] );
CalcPositions( pPos1.get(), pTextNd1, nCnt1 );
CalcPositions( pPos2.get(), pTextNd2, nCnt2 );
}
bool WordArrayComparator::Compare( int nIdx1, int nIdx2 ) const
{
int nLen = pPos1[ nIdx1 + 1 ] - pPos1[ nIdx1 ];
if( nLen != pPos2[ nIdx2 + 1 ] - pPos2[ nIdx2 ] )
{
return false;
}
for( int i = 0; i < nLen; i++)
{
if( pTextNd1->GetText()[ pPos1[ nIdx1 ] + i ]
!= pTextNd2->GetText()[ pPos2[ nIdx2 ] + i ]
|| ( CmpOptions.bUseRsid && !pTextNd1->CompareRsid( *pTextNd2,
pPos1[ nIdx1 ] + i, pPos2[ nIdx2 ] + i ) ) )
{
return false;
}
}
return true;
}
int WordArrayComparator::GetCharSequence( const int *pWordLcs1,
const int *pWordLcs2, int *pSubseq1, int *pSubseq2, int nLcsLen )
{
int nLen = 0;
for( int i = 0; i < nLcsLen; i++ )
{
// Check for hash collisions
if( pPos1[ pWordLcs1[i] + 1 ] - pPos1[ pWordLcs1[i] ]
!= pPos2[ pWordLcs2[i] + 1 ] - pPos2[ pWordLcs2[i] ] )
{
continue;
}
for( int j = 0; j < pPos1[pWordLcs1[i]+1] - pPos1[pWordLcs1[i]]; j++)
{
pSubseq1[ nLen ] = pPos1[ pWordLcs1[i] ] + j;
pSubseq2[ nLen ] = pPos2[ pWordLcs2[i] ] + j;
if( pTextNd1->GetText()[ pPos1[ pWordLcs1[i] ] + j ]
!= pTextNd2->GetText()[ pPos2[ pWordLcs2[i] ] + j ] )
{
nLen -= j;
break;
}
nLen++;
}
}
return nLen;
}
void WordArrayComparator::CalcPositions( int *pPos, const SwTextNode *pTextNd,
int &nCnt )
{
nCnt = -1;
for (int i = 0; i <= pTextNd->GetText().getLength(); ++i)
{
if (i == 0 || i == pTextNd->GetText().getLength()
|| !rtl::isAsciiAlphanumeric( pTextNd->GetText()[ i - 1 ])
|| !rtl::isAsciiAlphanumeric( pTextNd->GetText()[ i ]))
{ // Begin new word
nCnt++;
pPos[ nCnt ] = i;
}
}
}
int CommonSubseq::FindLCS( int *pLcs1, int *pLcs2, int nStt1, int nEnd1,
int nStt2, int nEnd2 )
{
int nLen1 = nEnd1 ? nEnd1 - nStt1 : rCmp.GetLen1();
int nLen2 = nEnd2 ? nEnd2 - nStt2 : rCmp.GetLen2();
assert( nLen1 >= 0 );
assert( nLen2 >= 0 );
std::unique_ptr<int*[]> pLcs( new int*[ nLen1 + 1 ] );
pLcs[ 0 ] = pData.get();
for( int i = 1; i < nLen1 + 1; i++ )
pLcs[ i ] = pLcs[ i - 1 ] + nLen2 + 1;
for( int i = 0; i <= nLen1; i++ )
pLcs[i][0] = 0;
for( int j = 0; j <= nLen2; j++ )
pLcs[0][j] = 0;
// Find lcs
for( int i = 1; i <= nLen1; i++ )
{
for( int j = 1; j <= nLen2; j++ )
{
if( rCmp.Compare( nStt1 + i - 1, nStt2 + j - 1 ) )
pLcs[i][j] = pLcs[i - 1][j - 1] + 1;
else
pLcs[i][j] = std::max( pLcs[i][j - 1], pLcs[i - 1][j] );
}
}
int nLcsLen = pLcs[ nLen1 ][ nLen2 ];
// Recover the lcs in the two sequences
if( pLcs1 && pLcs2 )
{
int nIdx1 = nLen1;
int nIdx2 = nLen2;
int nIdx = nLcsLen - 1;
while( nIdx1 > 0 && nIdx2 > 0 )
{
if( pLcs[ nIdx1 ][ nIdx2 ] == pLcs[ nIdx1 - 1 ][ nIdx2 ] )
nIdx1--;
else if( pLcs[ nIdx1 ][ nIdx2 ] == pLcs[ nIdx1 ][ nIdx2 - 1 ] )
nIdx2--;
else
{
nIdx1--;
nIdx2--;
pLcs1[ nIdx ] = nIdx1 + nStt1;
pLcs2[ nIdx ] = nIdx2 + nStt2;
nIdx--;
}
}
}
return nLcsLen;
}
int CommonSubseq::IgnoreIsolatedPieces( int *pLcs1, int *pLcs2, int nLen1,
int nLen2, int nLcsLen, int nPieceLen )
{
if( !nLcsLen )
{
return 0;
}
int nNext = 0;
// Don't ignore text at the beginning of the paragraphs
if( pLcs1[ 0 ] == 0 && pLcs2[ 0 ] == 0 )
{
while( nNext < nLcsLen - 1 && pLcs1[ nNext ] + 1 == pLcs1[ nNext + 1 ]
&& pLcs2[ nNext ] + 1 == pLcs2[ nNext + 1 ] )
{
nNext++;
}
nNext++;
}
int nCnt = 1;
for( int i = nNext; i < nLcsLen; i++ )
{
if( i != nLcsLen - 1 && pLcs1[ i ] + 1 == pLcs1[ i + 1 ]
&& pLcs2[ i ] + 1 == pLcs2[ i + 1 ] )
{
nCnt++;
}
else
{
if( nCnt > nPieceLen
// Don't ignore text at the end of the paragraphs
|| ( i == nLcsLen - 1
&& pLcs1[i] == nLen1 - 1 && pLcs2[i] == nLen2 - 1 ))
{
for( int j = i + 1 - nCnt; j <= i; j++ )
{
pLcs2[ nNext ] = pLcs2[ j ];
pLcs1[ nNext ] = pLcs1[ j ];
nNext++;
}
}
nCnt = 1;
}
}
return nNext;
}
LgstCommonSubseq::LgstCommonSubseq( ArrayComparator &rComparator )
: CommonSubseq( rComparator, CUTOFF )
{
pBuff1.reset( new int[ rComparator.GetLen2() + 1 ] );
pBuff2.reset( new int[ rComparator.GetLen2() + 1 ] );
pL1.reset( new int[ rComparator.GetLen2() + 1 ] );
pL2.reset( new int[ rComparator.GetLen2() + 1 ] );
}
void LgstCommonSubseq::FindL( int *pL, int nStt1, int nEnd1,
int nStt2, int nEnd2 )
{
int nLen1 = nEnd1 ? nEnd1 - nStt1 : rCmp.GetLen1();
int nLen2 = nEnd2 ? nEnd2 - nStt2 : rCmp.GetLen2();
int *currL = pBuff1.get();
int *prevL = pBuff2.get();
// Avoid memory corruption
if( nLen2 > rCmp.GetLen2() )
{
assert( false );
return;
}
memset( pBuff1.get(), 0, sizeof( *pBuff1.get() ) * ( nLen2 + 1 ) );
memset( pBuff2.get(), 0, sizeof( *pBuff2.get() ) * ( nLen2 + 1 ) );
// Find lcs
for( int i = 1; i <= nLen1; i++ )
{
for( int j = 1; j <= nLen2; j++ )
{
if( rCmp.Compare( nStt1 + i - 1, nStt2 + j - 1 ) )
currL[j] = prevL[j - 1] + 1;
else
currL[j] = std::max( currL[j - 1], prevL[j] );
}
int *tmp = currL;
currL = prevL;
prevL = tmp;
}
memcpy( pL, prevL, ( nLen2 + 1 ) * sizeof( *prevL ) );
}
int LgstCommonSubseq::HirschbergLCS( int *pLcs1, int *pLcs2, int nStt1,
int nEnd1, int nStt2, int nEnd2 )
{
static int nLen1;
static int nLen2;
nLen1 = nEnd1 - nStt1;
nLen2 = nEnd2 - nStt2;
if( ( nLen1 + 1 ) * ( nLen2 + 1 ) <= CUTOFF )
{
if( !nLen1 || !nLen2 )
{
return 0;
}
return FindLCS(pLcs1, pLcs2, nStt1, nEnd1, nStt2, nEnd2);
}
int nMid = nLen1/2;
FindL( pL1.get(), nStt1, nStt1 + nMid, nStt2, nEnd2 );
FindL( pL2.get(), nStt1 + nMid, nEnd1, nStt2, nEnd2 );
int nMaxPos = 0;
static int nMaxVal;
nMaxVal = -1;
static int i;
for( i = 0; i <= nLen2; i++ )
{
if( pL1[i] + ( pL2[nLen2] - pL2[i] ) > nMaxVal )
{
nMaxPos = i;
nMaxVal = pL1[i]+( pL2[nLen2] - pL2[i] );
}
}
int nRet = HirschbergLCS( pLcs1, pLcs2, nStt1, nStt1 + nMid,
nStt2, nStt2 + nMaxPos );
nRet += HirschbergLCS( pLcs1 + nRet, pLcs2 + nRet, nStt1 + nMid, nEnd1,
nStt2 + nMaxPos, nEnd2 );
return nRet;
}
int LgstCommonSubseq::Find( int *pSubseq1, int *pSubseq2 )
{
int nStt = 0;
int nCutEnd = 0;
int nEnd1 = rCmp.GetLen1();
int nEnd2 = rCmp.GetLen2();
// Check for corresponding lines in the beginning of the sequences
while( nStt < nEnd1 && nStt < nEnd2 && rCmp.Compare( nStt, nStt ) )
{
pSubseq1[ nStt ] = nStt;
pSubseq2[ nStt ] = nStt;
nStt++;
}
pSubseq1 += nStt;
pSubseq2 += nStt;
// Check for corresponding lines in the end of the sequences
while( nStt < nEnd1 && nStt < nEnd2
&& rCmp.Compare( nEnd1 - 1, nEnd2 - 1 ) )
{
nCutEnd++;
nEnd1--;
nEnd2--;
}
int nLen = HirschbergLCS( pSubseq1, pSubseq2, nStt, nEnd1, nStt, nEnd2 );
for( int i = 0; i < nCutEnd; i++ )
{
pSubseq1[ nLen + i ] = nEnd1 + i;
pSubseq2[ nLen + i ] = nEnd2 + i;
}
return nStt + nLen + nCutEnd;
}
int FastCommonSubseq::FindFastCS( int *pSeq1, int *pSeq2, int nStt1,
int nEnd1, int nStt2, int nEnd2 )
{
int nCutBeg = 0;
int nCutEnd = 0;
// Check for corresponding lines in the beginning of the sequences
while( nStt1 < nEnd1 && nStt2 < nEnd2 && rCmp.Compare( nStt1, nStt2 ) )
{
pSeq1[ nCutBeg ] = nStt1++;
pSeq2[ nCutBeg ] = nStt2++;
nCutBeg++;
}
pSeq1 += nCutBeg;
pSeq2 += nCutBeg;
// Check for corresponding lines in the end of the sequences
while( nStt1 < nEnd1 && nStt2 < nEnd2
&& rCmp.Compare( nEnd1 - 1, nEnd2 - 1 ) )
{
nCutEnd++;
nEnd1--;
nEnd2--;
}
int nLen1 = nEnd1 - nStt1;
int nLen2 = nEnd2 - nStt2;
// Return if a sequence is empty
if( nLen1 <= 0 || nLen2 <= 0 )
{
for( int i = 0; i < nCutEnd; i++ )
{
pSeq1[ i ] = nEnd1 + i;
pSeq2[ i ] = nEnd2 + i;
}
return nCutBeg + nCutEnd;
}
// Cut to LCS for small values
if( nLen1 < 3 || nLen2 < 3 || ( nLen1 + 1 ) * ( nLen2 + 1 ) <= CUTOFF )
{
int nLcsLen = FindLCS( pSeq1, pSeq2, nStt1, nEnd1, nStt2, nEnd2);
for( int i = 0; i < nCutEnd; i++ )
{
pSeq1[ nLcsLen + i ] = nEnd1 + i;
pSeq2[ nLcsLen + i ] = nEnd2 + i;
}
return nCutBeg + nLcsLen + nCutEnd;
}
int nMid1 = nLen1/2;
int nMid2 = nLen2/2;
int nRad;
int nPos1 = -1, nPos2 = -1;
// Find a point of correspondence in the middle of the sequences
for( nRad = 0; nRad*nRad < std::min( nMid1, nMid2 ); nRad++ )
{
// Search to the left and to the right of the middle of the first sequence
for( int i = nMid1 - nRad; i <= nMid1 + nRad; i++ )
{
if( rCmp.Compare( nStt1 + i, nStt2 + nMid2 - nRad ) )
{
nPos1 = nStt1 + i;
nPos2 = nStt2 + nMid2 - nRad;
break;
}
if( rCmp.Compare( nStt1 + i, nStt2 + nMid2 + nRad ) )
{
nPos1 = nStt1 + i;
nPos2 = nStt2 + nMid2 - nRad;
break;
}
}
// Search to the left and to the right of the middle of the second sequence
for( int i = nMid2 - nRad; i <= nMid2 + nRad; i++ )
{
if( rCmp.Compare( nStt2 + nMid2 - nRad, nStt2 + i ) )
{
nPos2 = nStt2 + i;
nPos1 = nStt1 + nMid1 - nRad;
break;
}
if( rCmp.Compare( nStt2 + nMid2 - nRad, nStt2 + i ) )
{
nPos2 = nStt2 + i;
nPos1 = nStt1 + nMid1 - nRad;
break;
}
}
}
// return if no point of correspondence found
if( nPos1 == -1 )
{
for( int i = 0; i < nCutEnd; i++ )
{
pSeq1[ i ] = nEnd1 + i;
pSeq2[ i ] = nEnd2 + i;
}
return nCutBeg + nCutEnd;
}
// Run the same on the sequences to the left of the correspondence point
int nLen = FindFastCS( pSeq1, pSeq2, nStt1, nPos1, nStt2, nPos2 );
pSeq1[ nLen ] = nPos1;
pSeq2[ nLen ] = nPos2;
// Run the same on the sequences to the right of the correspondence point
nLen += FindFastCS( pSeq1 + nLen + 1, pSeq2 + nLen + 1,
nPos1 + 1, nEnd1, nPos2 + 1, nEnd2 ) + 1;
for( int i = 0; i < nCutEnd; i++ )
{
pSeq1[ nLen + i ] = nEnd1 + i;
pSeq2[ nLen + i ] = nEnd2 + i;
}
return nLen + nCutBeg + nCutEnd;
}
/* vim:set shiftwidth=4 softtabstop=4 expandtab: */