summaryrefslogtreecommitdiff
path: root/filter
diff options
context:
space:
mode:
authorAndrea Gelmini <andrea.gelmini@gelma.net>2014-11-10 15:05:25 +0100
committerNoel Grandin <noelgrandin@gmail.com>2014-11-12 11:04:11 +0000
commitda40cac540e7d735edbe9069b3c8ec6af4530208 (patch)
treef2abda7281129e13f588c77b18780a7090c8405f /filter
parentbb437029c1e5331bcc3f8fb2fc87837142a52f33 (diff)
Fix common typos. No automatic tools. Handmade…
Change-Id: I1ab4e23b0539f8d39974787f226e57a21f96e959 Reviewed-on: https://gerrit.libreoffice.org/12164 Reviewed-by: Noel Grandin <noelgrandin@gmail.com> Tested-by: Noel Grandin <noelgrandin@gmail.com>
Diffstat (limited to 'filter')
-rw-r--r--filter/source/config/cache/basecontainer.cxx4
-rw-r--r--filter/source/config/cache/filtercache.cxx14
-rw-r--r--filter/source/config/cache/filtercache.hxx2
-rw-r--r--filter/source/config/cache/typedetection.cxx2
-rw-r--r--filter/source/config/cache/typedetection.hxx2
-rw-r--r--filter/source/graphicfilter/epict/epict.cxx2
-rw-r--r--filter/source/graphicfilter/idxf/dxfvec.hxx2
-rw-r--r--filter/source/graphicfilter/ios2met/ios2met.cxx4
-rw-r--r--filter/source/graphicfilter/itiff/ccidecom.cxx2
-rw-r--r--filter/source/msfilter/msdffimp.cxx8
-rw-r--r--filter/source/msfilter/svdfppt.cxx4
-rw-r--r--filter/source/svg/svgexport.cxx4
-rw-r--r--filter/source/xslt/odf2xhtml/export/common/table_of_content.xsl2
-rw-r--r--filter/source/xslt/odf2xhtml/export/xhtml/body.xsl2
-rw-r--r--filter/source/xsltdialog/xmlfiltersettingsdialog.cxx2
-rw-r--r--filter/source/xsltfilter/XSLTFilter.cxx4
16 files changed, 30 insertions, 30 deletions
diff --git a/filter/source/config/cache/basecontainer.cxx b/filter/source/config/cache/basecontainer.cxx
index cbf6e4685a67..e34b71d03522 100644
--- a/filter/source/config/cache/basecontainer.cxx
+++ b/filter/source/config/cache/basecontainer.cxx
@@ -128,7 +128,7 @@ void BaseContainer::impl_initFlushMode()
if (!m_pFlushCache)
m_pFlushCache = m_rCache->clone();
if (!m_pFlushCache)
- throw css::uno::RuntimeException( "Cant create write copy of internal used cache on demand.",
+ throw css::uno::RuntimeException( "Can not create write copy of internal used cache on demand.",
static_cast< OWeakObject* >(this));
// <- SAFE
}
@@ -484,7 +484,7 @@ void SAL_CALL BaseContainer::flush()
if (!m_pFlushCache)
throw css::lang::WrappedTargetRuntimeException(
- "Cant guarantee cache consistency. Special flush container does not exists!",
+ "Can not guarantee cache consistency. Special flush container does not exists!",
static_cast< OWeakObject* >(this),
css::uno::Any());
diff --git a/filter/source/config/cache/filtercache.cxx b/filter/source/config/cache/filtercache.cxx
index 2fc5cfb4935f..a1c37c5bc996 100644
--- a/filter/source/config/cache/filtercache.cxx
+++ b/filter/source/config/cache/filtercache.cxx
@@ -661,7 +661,7 @@ void FilterCache::impl_flushByList(const css::uno::Reference< css::container::XN
// special case. no exception - but not a valid item => set must be finalized or mandatory!
// Reject flush operation by throwing an exception. At least one item couldnt be flushed.
if (!xItem.is())
- throw css::uno::Exception("Cant add item. Set is finalized or mandatory!",
+ throw css::uno::Exception("Can not add item. Set is finalized or mandatory!",
css::uno::Reference< css::uno::XInterface >());
CacheItemList::const_iterator pItem = rCache.find(sItem);
@@ -678,7 +678,7 @@ void FilterCache::impl_flushByList(const css::uno::Reference< css::container::XN
// special case. no exception - but not a valid item => it must be finalized or mandatory!
// Reject flush operation by throwing an exception. At least one item couldnt be flushed.
if (!xItem.is())
- throw css::uno::Exception("Cant change item. Its finalized or mandatory!",
+ throw css::uno::Exception("Can not change item. Its finalized or mandatory!",
css::uno::Reference< css::uno::XInterface >());
CacheItemList::const_iterator pItem = rCache.find(sItem);
@@ -847,7 +847,7 @@ css::uno::Reference< css::uno::XInterface > FilterCache::impl_openConfig(EConfig
}
break;
- default : throw css::uno::Exception("These configuration node isnt supported here for open!", 0);
+ default : throw css::uno::Exception("These configuration node is not supported here for open!", 0);
}
{
@@ -1138,7 +1138,7 @@ void FilterCache::impl_validateAndOptimize()
(!bReferencedByHandler)
)
{
- sLog.append("Warning\t:\t" "The type \"" + sType + "\" isnt used by any filter, loader or content handler.\n");
+ sLog.append("Warning\t:\t" "The type \"" + sType + "\" is not used by any filter, loader or content handler.\n");
++nWarnings;
}
}
@@ -2124,7 +2124,7 @@ void FilterCache::impl_interpretDataVal4Filter(const OUString& sValue,
sal_Int32 nOrder = sValue.toInt32();
if (nOrder > 0)
{
- SAL_WARN( "filter.config", "FilterCache::impl_interpretDataVal4Filter()\nCant move Order value from filter to type on demand!");
+ SAL_WARN( "filter.config", "FilterCache::impl_interpretDataVal4Filter()\nCan not move Order value from filter to type on demand!");
_FILTER_CONFIG_LOG_2_("impl_interpretDataVal4Filter(%d, \"%s\") ... OK", (int)eType, _FILTER_CONFIG_TO_ASCII_(rItem).getStr())
}
}
@@ -2216,7 +2216,7 @@ CacheItem FilterCache::impl_readOldItem(const css::uno::Reference< css::containe
css::uno::Reference< css::container::XNameAccess > xItem;
xSet->getByName(sItem) >>= xItem;
if (!xItem.is())
- throw css::uno::Exception("Cant read old item.", css::uno::Reference< css::uno::XInterface >());
+ throw css::uno::Exception("Can not read old item.", css::uno::Reference< css::uno::XInterface >());
CacheItem aItem;
aItem[PROPNAME_NAME] <<= sItem;
@@ -2237,7 +2237,7 @@ CacheItem FilterCache::impl_readOldItem(const css::uno::Reference< css::containe
(lData.size()<1 )
)
{
- throw css::uno::Exception( "Cant read old item property DATA.", css::uno::Reference< css::uno::XInterface >());
+ throw css::uno::Exception( "Can not read old item property DATA.", css::uno::Reference< css::uno::XInterface >());
}
sal_Int32 nProp = 0;
diff --git a/filter/source/config/cache/filtercache.hxx b/filter/source/config/cache/filtercache.hxx
index c992424b591f..883ced96e553 100644
--- a/filter/source/config/cache/filtercache.hxx
+++ b/filter/source/config/cache/filtercache.hxx
@@ -791,7 +791,7 @@ class FilterCache : public BaseLock
@param eOption
regulate, which properties of the requested item should be read.
- See defintion of EReadOption for further information.
+ See definition of EReadOption for further information.
@throw [css::uno::Exception]
if an unrecoverable error occurs inside this operation.
diff --git a/filter/source/config/cache/typedetection.cxx b/filter/source/config/cache/typedetection.cxx
index 370f7cf980d3..4919fbaa1372 100644
--- a/filter/source/config/cache/typedetection.cxx
+++ b/filter/source/config/cache/typedetection.cxx
@@ -1089,7 +1089,7 @@ OUString TypeDetection::impl_askUserForTypeAndFilterIfAllowed(utl::MediaDescript
// Dont distrub the user for "non existing files - means empty URLs" or
// if we was forced to detect a stream.
// Reason behind: We must be sure to ask user for "unknown contents" only ...
- // and not for "missing files". Especialy if detection is done by a stream only
+ // and not for "missing files". Especially if detection is done by a stream only
// we can't check if the stream points to an "existing content"!
if (
(sURL.isEmpty() ) || // "non existing file" ?
diff --git a/filter/source/config/cache/typedetection.hxx b/filter/source/config/cache/typedetection.hxx
index 82f383aecdd8..c5c99d6ca16c 100644
--- a/filter/source/config/cache/typedetection.hxx
+++ b/filter/source/config/cache/typedetection.hxx
@@ -87,7 +87,7 @@ private:
@descr It steps over all flat detected types (given by the parameter lFlatTypes),
try it and search for most suitable one.
The specified MediaDescriptor will be patched, so it contain
- the right values everytime. Using of any deep detection service
+ the right values every time. Using of any deep detection service
can be enabled/disabled. And last but not least: If the results
wont be really clear (because a flat detected type has no deep
detection service), a "sugested" type name will be returned as "rLastChance".
diff --git a/filter/source/graphicfilter/epict/epict.cxx b/filter/source/graphicfilter/epict/epict.cxx
index 69407f6b4a82..993e1d36d67a 100644
--- a/filter/source/graphicfilter/epict/epict.cxx
+++ b/filter/source/graphicfilter/epict/epict.cxx
@@ -240,7 +240,7 @@ Polygon PictWriter::PolyPolygonToPolygon(const tools::PolyPolygon & rPolyPoly)
// At first we look for a point in aPoly1 (referenced by nBestIdx1) and a
// point in aPoly2 (referenced by nBestid2), which are as close together as
// possible. Because this is following square complexity and therefore some
- // pictures would need infinite time to export, we limit the the number of test
+ // pictures would need infinite time to export, we limit the number of test
// by the number of 1000 and cancel the search if necessary preliminarily.
// The result of this will not be wrong but rather not so beautiful.
nCountdownTests=1000;
diff --git a/filter/source/graphicfilter/idxf/dxfvec.hxx b/filter/source/graphicfilter/idxf/dxfvec.hxx
index ca84a1a9ba77..b75c25479c03 100644
--- a/filter/source/graphicfilter/idxf/dxfvec.hxx
+++ b/filter/source/graphicfilter/idxf/dxfvec.hxx
@@ -143,7 +143,7 @@ public:
// Transformation of a relative vector (so no translation)
bool TransCircleToEllipse(double fRadius, double & rEx, double & rEy) const;
- // Attemp to transform a circle (in xy plane) so that it results
+ // Attempt to transform a circle (in xy plane) so that it results
// in an aligned ellipse. If the does not work because a ellipse of
// arbitrary position would be created, sal_False is returned.
// (The center point will not be transformed, use Transform(..))
diff --git a/filter/source/graphicfilter/ios2met/ios2met.cxx b/filter/source/graphicfilter/ios2met/ios2met.cxx
index 80ea97be4496..2116ac10ca00 100644
--- a/filter/source/graphicfilter/ios2met/ios2met.cxx
+++ b/filter/source/graphicfilter/ios2met/ios2met.cxx
@@ -1078,7 +1078,7 @@ void OS2METReader::ReadArc(bool bGivenPos)
// now we still need the radius in x and y direction:
r=sqrt(q*q*(x1-cx)*(x1-cx)+p*p*(y1-cy)*(y1-cy));
rx=r/q; ry=r/p;
- // We now have to find out how the the starting and the end point
+ // We now have to find out how the starting and the end point
// have to be chosen so that point no. 2 lies inside the drawn arc:
w1=fmod((atan2(x1-cx,y1-cy)-atan2(x2-cx,y2-cy)),6.28318530718); if (w1<0) w1+=6.28318530718;
w3=fmod((atan2(x3-cx,y3-cy)-atan2(x2-cx,y2-cy)),6.28318530718); if (w3<0) w3+=6.28318530718;
@@ -2201,7 +2201,7 @@ void OS2METReader::ReadImageData(sal_uInt16 nDataID, sal_uInt16 nDataLen)
break;
case 0xfe92: { // Image Data
- // At the latest we now need the temprary BMP file and
+ // At the latest we now need the temporary BMP file and
// inside this file we need the header and the palette.
if (p->pBMP==NULL) {
p->pBMP=new SvMemoryStream();
diff --git a/filter/source/graphicfilter/itiff/ccidecom.cxx b/filter/source/graphicfilter/itiff/ccidecom.cxx
index f0b635393e8c..6cd2c4141533 100644
--- a/filter/source/graphicfilter/itiff/ccidecom.cxx
+++ b/filter/source/graphicfilter/itiff/ccidecom.cxx
@@ -311,7 +311,7 @@ const CCIHuffmanTableEntry CCIUncompTable[CCIUncompTableSize]={
// To make sure that the Huffman tables do not contain errors they were entered
// from two different sources (Phew) and compared.
// Since an error could creep in to the source code while maintaining it
-// (e.g. an accidentaly key press in the editor) the tables are listed twice
+// (e.g. an accidental key press in the editor) the tables are listed twice
// and are compared during runtime. (If the comparison fails CCIDcompressor
// throws an error) The whole thing may appear insane, but an error within the
// tables would otherwise be really hard to discover and it's very unlikely that
diff --git a/filter/source/msfilter/msdffimp.cxx b/filter/source/msfilter/msdffimp.cxx
index 7eecc3f1c2c1..466faf62ae22 100644
--- a/filter/source/msfilter/msdffimp.cxx
+++ b/filter/source/msfilter/msdffimp.cxx
@@ -3260,7 +3260,7 @@ bool SvxMSDffManager::SeekToRec( SvStream& rSt, sal_uInt16 nRecId, sal_uLong nMa
bool SvxMSDffManager::SeekToRec2( sal_uInt16 nRecId1, sal_uInt16 nRecId2, sal_uLong nMaxFilePos, DffRecordHeader* pRecHd, sal_uLong nSkipCount ) const
{
bool bRet = false;
- sal_uLong nFPosMerk = rStCtrl.Tell(); // remember FilePos for conditionally later restauration
+ sal_uLong nFPosMerk = rStCtrl.Tell(); // remember FilePos for conditionally later restoration
DffRecordHeader aHd;
do
{
@@ -5951,7 +5951,7 @@ bool SvxMSDffManager::GetShapeGroupContainerData( SvStream& rSt,
nReadSpGrCont += nLength;
}
while( nReadSpGrCont < nLenShapeGroupCont );
- // possition the steam correctly
+ // position the stream correctly
rSt.Seek( nStartShapeGroupCont + nLenShapeGroupCont );
return true;
}
@@ -5970,7 +5970,7 @@ bool SvxMSDffManager::GetShapeContainerData( SvStream& rSt,
sal_uLong nLenShapePropTbl = 0;
sal_uLong nReadSpCont = 0;
- // Store file offset of the shape containers or respectivly the group(!).
+ // Store file offset of the shape containers or respectively the group(!).
sal_uLong nStartOffs = (ULONG_MAX > nPosGroup) ?
nPosGroup : nStartShapeCont - DFF_COMMON_RECORD_HEADER_SIZE;
SvxMSDffShapeInfo aInfo( nStartOffs );
@@ -6983,7 +6983,7 @@ com::sun::star::uno::Reference < com::sun::star::embed::XEmbeddedObject > SvxMS
// needs the correct visarea
// If pName is set this is an own embedded object, it should have the correct size internally
- // TODO/LATER: it might make sence in future to set the size stored in internal object
+ // TODO/LATER: it might make sense in future to set the size stored in internal object
if( !pName && ( sStarName == "swriter" || sStarName == "scalc" ) )
{
MapMode aMapMode( VCLUnoHelper::UnoEmbed2VCLMapUnit( xObj->getMapUnit( nViewAspect ) ) );
diff --git a/filter/source/msfilter/svdfppt.cxx b/filter/source/msfilter/svdfppt.cxx
index 944514f192eb..4cc9956a3c4a 100644
--- a/filter/source/msfilter/svdfppt.cxx
+++ b/filter/source/msfilter/svdfppt.cxx
@@ -1207,7 +1207,7 @@ SdrObject* SdrEscherImport::ProcessObj( SvStream& rSt, DffObjData& rObjData, voi
}
}
}
- if ( pRet ) // sj: #i38501#, and and taking care of connections to group objects
+ if ( pRet ) // sj: #i38501#, and taking care of connections to group objects
{
if ( rObjData.nSpFlags & SP_FBACKGROUND )
{
@@ -6379,7 +6379,7 @@ PPTTextObj::PPTTextObj( SvStream& rIn, SdrPowerPointImport& rSdrPowerPointImport
{
DffRecordHeader aClientTextBoxHd( *rSdrPowerPointImport.maShapeRecords.Current() );
sal_uInt32 nTextRulerAtomOfs = 0; // case of zero -> this atom may be found in aClientDataContainerHd;
- // case of -1 -> ther is no atom of this kind
+ // case of -1 -> there is no atom of this kind
// else -> this is the fileofs where we can get it
// checkout if this is a referenced
diff --git a/filter/source/svg/svgexport.cxx b/filter/source/svg/svgexport.cxx
index a71e4700350b..ca9d4953d35a 100644
--- a/filter/source/svg/svgexport.cxx
+++ b/filter/source/svg/svgexport.cxx
@@ -1620,7 +1620,7 @@ bool SVGFilter::implExportPage( const OUString & sPageId,
} // insert the </g> closing tag related to the Background
}
- // In case we are dealing with a master page we need to to group all its shapes
+ // In case we are dealing with a master page we need to group all its shapes
// into a group element, this group will make up the so named "background objects"
if( bMaster )
{
@@ -1887,7 +1887,7 @@ bool SVGFilter::implCreateObjects()
#ifdef ENABLE_EXPORT_CUSTOM_SLIDE_BACKGROUND
// TODO complete the implementation for exporting custom background for each slide
// implementation status:
- // - hatch stroke color is set to 'none' so the hatch is not visible, why ?
+ // - hatch stroke color is set to 'none' so the hatch is not visible, why?
// - gradient look is not really awesome, too few colors are used;
// - stretched bitmap, gradient and hatch are not exported only once
// and then referenced in case more than one slide uses them.
diff --git a/filter/source/xslt/odf2xhtml/export/common/table_of_content.xsl b/filter/source/xslt/odf2xhtml/export/common/table_of_content.xsl
index d299c912955f..a9db7f922af7 100644
--- a/filter/source/xslt/odf2xhtml/export/common/table_of_content.xsl
+++ b/filter/source/xslt/odf2xhtml/export/common/table_of_content.xsl
@@ -282,7 +282,7 @@
Scenarios tabstops
1) style:type of style:tab-stop is 'right' and earlier tabStop is not right
- -> Earlier text-nodes and following text-nodes, will be put into an inner table, with two TD first aligned left, with proceding textnodes, the latter aligned right.
+ -> Earlier text-nodes and following text-nodes, will be put into an inner table, with two TD first aligned left, with preceding textnodes, the latter aligned right.
2) style:type is 'right' and earlier tabStop is right
-> following text-nodes, will be put into a right aligned TD
diff --git a/filter/source/xslt/odf2xhtml/export/xhtml/body.xsl b/filter/source/xslt/odf2xhtml/export/xhtml/body.xsl
index 3ecde5e377c0..32f0ffe7666f 100644
--- a/filter/source/xslt/odf2xhtml/export/xhtml/body.xsl
+++ b/filter/source/xslt/odf2xhtml/export/xhtml/body.xsl
@@ -669,7 +669,7 @@
<xsl:element name="{$elementName}">
<xsl:choose>
<!-- in ODF borders of paragraphs will be merged by default. Merging means the adjactend paragraphs are building a unit,
- where only the first and the last will have have a border to the surrounding (top / bottom border)
+ where only the first and the last will have a border to the surrounding (top / bottom border)
<xsl:variable name="precedingParagraphStyle" select="preceding-sibling::*[1][name() = 'text:p']/@text:style-name"/>
<xsl:variable name="followingParagraphStyle" select="following-sibling::*[1][name() = 'text:p']/@text:style-name"/>
-->
diff --git a/filter/source/xsltdialog/xmlfiltersettingsdialog.cxx b/filter/source/xsltdialog/xmlfiltersettingsdialog.cxx
index e3deef39b010..2153d115a7a4 100644
--- a/filter/source/xsltdialog/xmlfiltersettingsdialog.cxx
+++ b/filter/source/xsltdialog/xmlfiltersettingsdialog.cxx
@@ -1000,7 +1000,7 @@ void XMLFilterSettingsDialog::onClose()
bool XMLFilterSettingsDialog::Notify( NotifyEvent& rNEvt )
{
- // Because of of tab control first call the base class.
+ // Because of tab control first call the base class.
bool nRet = ModelessDialog::Notify( rNEvt );
if ( !nRet )
{
diff --git a/filter/source/xsltfilter/XSLTFilter.cxx b/filter/source/xsltfilter/XSLTFilter.cxx
index f2fe4fa7f6e8..7d3402b1c42f 100644
--- a/filter/source/xsltfilter/XSLTFilter.cxx
+++ b/filter/source/xsltfilter/XSLTFilter.cxx
@@ -432,7 +432,7 @@ namespace XSLT
OUString udStyleSheet = rel2abs(msUserData[5]);
// read source data
- // we are especialy interested in the output stream
+ // we are especially interested in the output stream
// since that is where our xml-writer will push the data
// from it's data-source interface
OUString aName, sURL;
@@ -519,7 +519,7 @@ namespace XSLT
}
}
- // for the DocumentHandler implementation, we just proxy the the
+ // for the DocumentHandler implementation, we just proxy the
// events to the XML writer that we created upon the output stream
// that was provided by the XMLFilterAdapter
void