summaryrefslogtreecommitdiff
path: root/bin
diff options
context:
space:
mode:
Diffstat (limited to 'bin')
-rw-r--r--bin/.gitignore1
-rw-r--r--bin/Makefile.am47
-rw-r--r--bin/README31
-rwxr-xr-xbin/amt-license-patch66
-rwxr-xr-xbin/analyse-commits23
-rwxr-xr-xbin/analyse-link66
-rwxr-xr-xbin/applyflags55
-rw-r--r--bin/bash-completion.in90
-rwxr-xr-xbin/build-galleries175
-rwxr-xr-xbin/build-ooo169
-rwxr-xr-xbin/build-tools27
-rwxr-xr-xbin/catch-endif45
-rwxr-xr-xbin/change-guids.sh468
-rwxr-xr-xbin/check-artwork.sh47
-rwxr-xr-xbin/create-gitignores.sh125
-rwxr-xr-xbin/create-ids8
-rwxr-xr-xbin/create-rpms34
-rwxr-xr-xbin/create-tests-lst.sh16
-rwxr-xr-xbin/cvs-tags-normalize10
-rwxr-xr-xbin/cvsclean84
-rwxr-xr-xbin/cws-commit-patch248
-rwxr-xr-xbin/cws-commit-patch-old345
-rwxr-xr-xbin/cws-cvsrebase60
-rwxr-xr-xbin/cws-extract152
-rwxr-xr-xbin/cws-extract-cvs124
-rwxr-xr-xbin/cws-extract-svn37
-rwxr-xr-xbin/cws-removal-fixup43
-rwxr-xr-xbin/defuzzpatch67
-rwxr-xr-xbin/deps.sh31
-rwxr-xr-xbin/diaconvert.pl364
-rw-r--r--bin/doxygen.cfg1314
-rwxr-xr-xbin/elfsum67
-rw-r--r--bin/exceptions/gnome-exclusions78
-rw-r--r--bin/exceptions/ooo-exclusions25
-rw-r--r--bin/exceptions/pam-exclusions8
-rwxr-xr-xbin/extract-gsi59
-rwxr-xr-xbin/extract-hunks115
-rwxr-xr-xbin/extract-new-strings48
-rwxr-xr-xbin/find-duplicates.pl97
-rwxr-xr-xbin/find_dup33
-rwxr-xr-xbin/finterpose279
-rwxr-xr-xbin/fix-deps18
-rwxr-xr-xbin/fixguard.py29
-rwxr-xr-xbin/g19
-rwxr-xr-xbin/gen110
-rwxr-xr-xbin/gen-xlsx-copy-of-xls.sh125
-rwxr-xr-xbin/genFromScratch194
-rwxr-xr-xbin/generate-bash-completion234
-rwxr-xr-xbin/genrelocs24
-rwxr-xr-xbin/git-new-workdir82
-rwxr-xr-xbin/gob1070
-rwxr-xr-xbin/gob-bump190
-rwxr-xr-xbin/gob-collapse-commits36
-rwxr-xr-xbin/gob-merge570
-rwxr-xr-xbin/gob-update25
-rw-r--r--bin/id-lang.map105
-rwxr-xr-xbin/inactivity100
-rwxr-xr-xbin/install-artwork70
-rwxr-xr-xbin/install-dictionaries204
-rwxr-xr-xbin/install-maps65
-rwxr-xr-xbin/install-mono105
-rwxr-xr-xbin/install-sdk101
-rwxr-xr-xbin/java-set-classpath.in64
-rwxr-xr-xbin/l10n-status307
-rwxr-xr-xbin/lo-commit-stat303
-rwxr-xr-xbin/lo-git-commit-summary136
-rwxr-xr-xbin/lo-pack-sources613
-rwxr-xr-xbin/lo-set-version224
-rwxr-xr-xbin/lo-unify-sdf45
-rwxr-xr-xbin/localize-ooo197
-rwxr-xr-xbin/lreloc39
-rwxr-xr-xbin/make-win32-iso162
-rwxr-xr-xbin/map-unused.pl54
-rwxr-xr-xbin/migrate-rawbuild-to-bootstrap16
-rwxr-xr-xbin/mkcppcheck.sh64
-rwxr-xr-xbin/mkdocs.sh131
-rw-r--r--bin/modules2.txt20
-rwxr-xr-xbin/ooconfig217
-rwxr-xr-xbin/ooconvwatch67
-rwxr-xr-xbin/oodocdiff.sh98
-rwxr-xr-xbin/ooinstall72
-rwxr-xr-xbin/ooo-news-filter-old45
-rwxr-xr-xbin/ooo-sdf-split154
-rwxr-xr-xbin/ooo-unpack-sources68
-rwxr-xr-xbin/ooo-update-po71
-rwxr-xr-xbin/oosize176
-rwxr-xr-xbin/oosmoketest.in16
-rwxr-xr-xbin/oostripimpl.pl24
-rwxr-xr-xbin/ootestapi.in10
-rwxr-xr-xbin/ootesttool.in5
-rwxr-xr-xbin/ootool.in152
-rwxr-xr-xbin/ootouch63
-rwxr-xr-xbin/openoffice-xlate-lang210
-rwxr-xr-xbin/owner63
-rwxr-xr-xbin/package-lang-win32160
-rwxr-xr-xbin/package-ooo1036
-rwxr-xr-xbin/patch-remove22
-rw-r--r--bin/piece/.gitignore1
-rw-r--r--bin/piece/Makefile.am22
-rwxr-xr-xbin/piece/build-bootstrap7
-rwxr-xr-xbin/piece/build-generic120
-rwxr-xr-xbin/piece/copyexcept80
-rwxr-xr-xbin/piece/desktop-support-app120
-rw-r--r--bin/piece/env-libs-gui2
-rw-r--r--bin/piece/env-libs_gui2
-rwxr-xr-xbin/piece/file-list-artwork16
-rwxr-xr-xbin/piece/file-list-base12
-rwxr-xr-xbin/piece/file-list-calc15
-rwxr-xr-xbin/piece/file-list-components28
-rwxr-xr-xbin/piece/file-list-extras44
-rwxr-xr-xbin/piece/file-list-filters12
-rwxr-xr-xbin/piece/file-list-help32
-rwxr-xr-xbin/piece/file-list-impress20
-rwxr-xr-xbin/piece/file-list-l10n41
-rwxr-xr-xbin/piece/file-list-libs-core65
-rwxr-xr-xbin/piece/file-list-libs-extern13
-rwxr-xr-xbin/piece/file-list-libs-gui15
-rwxr-xr-xbin/piece/file-list-postprocess62
-rwxr-xr-xbin/piece/file-list-sdk51
-rwxr-xr-xbin/piece/file-list-ure61
-rwxr-xr-xbin/piece/file-list-writer22
-rwxr-xr-xbin/piece/inst-artwork15
-rwxr-xr-xbin/piece/install-bootstrap84
-rwxr-xr-xbin/piece/install-devel-helper37
-rwxr-xr-xbin/piece/install-generic120
-rwxr-xr-xbin/piece/install-l10n-helper54
-rwxr-xr-xbin/piece/install-l10n-merged15
-rwxr-xr-xbin/piece/install-registry40
-rwxr-xr-xbin/piece/link-to-ooo-home59
-rwxr-xr-xbin/piece/merge-file-lists27
-rwxr-xr-xbin/piece/noulf29
-rwxr-xr-xbin/piece/noulfconv29
-rwxr-xr-xbin/piece/post-inst-components16
-rwxr-xr-xbin/piece/post-inst-postprocess65
-rwxr-xr-xbin/piece/post-inst-ure26
-rwxr-xr-xbin/piece/save-noarch75
-rwxr-xr-xbin/piece/save-registry48
-rwxr-xr-xbin/piece/sort-l10n32
-rwxr-xr-xbin/piece/sys-setup.in159
-rwxr-xr-xbin/piece/unpack-extras8
-rwxr-xr-xbin/po-cleanup97
-rwxr-xr-xbin/po2sdf87
-rwxr-xr-xbin/potores104
-rwxr-xr-xbin/preloc167
-rwxr-xr-xbin/pyunorc-update64.in91
-rwxr-xr-xbin/rcspack44
-rwxr-xr-xbin/re-root80
-rwxr-xr-xbin/relink14
-rwxr-xr-xbin/relocstat652
-rwxr-xr-xbin/reorder-link136
-rwxr-xr-xbin/run-tests.sh44
-rwxr-xr-xbin/setup.in386
-rwxr-xr-xbin/show-issues120
-rwxr-xr-xbin/show-obsolete-patches66
-rwxr-xr-xbin/sloppypatch.pl218
-rwxr-xr-xbin/snapshot-build38
-rwxr-xr-xbin/split-library.pl62
-rwxr-xr-xbin/split-to-projects22
-rwxr-xr-xbin/src-pack112
-rwxr-xr-xbin/src-pack-solaris105
-rwxr-xr-xbin/src-pack2125
-rwxr-xr-xbin/stat-localizations95
-rwxr-xr-xbin/stats.pl178
-rwxr-xr-xbin/strace-rewrite24
-rwxr-xr-xbin/strip-guards210
-rwxr-xr-xbin/svn-pack251
-rwxr-xr-xbin/tag-latest-master52
-rwxr-xr-xbin/tag-list93
-rwxr-xr-xbin/test-ooo151
-rwxr-xr-xbin/test-ooo-analyze1195
-rwxr-xr-xbin/timeout393
-rwxr-xr-xbin/tinbuild207
-rwxr-xr-xbin/unopkg-regenerate-cache103
-rwxr-xr-xbin/unpack853
-rwxr-xr-xbin/unused.pl205
-rwxr-xr-xbin/vtable-check204
-rwxr-xr-xbin/xmlunzip35
177 files changed, 0 insertions, 22281 deletions
diff --git a/bin/.gitignore b/bin/.gitignore
deleted file mode 100644
index 146f275e0..000000000
--- a/bin/.gitignore
+++ /dev/null
@@ -1 +0,0 @@
-setup
diff --git a/bin/Makefile.am b/bin/Makefile.am
deleted file mode 100644
index 628b96427..000000000
--- a/bin/Makefile.am
+++ /dev/null
@@ -1,47 +0,0 @@
-SUBDIRS=piece
-
-script_files = \
- amt-license-patch \
- applyflags \
- bash-completion.in \
- build-galleries \
- build-ooo \
- build-tools \
- cvs-tags-normalize \
- defuzzpatch \
- extract-gsi \
- extract-new-strings \
- fix-deps \
- generate-bash-completion \
- install-dictionaries \
- install-mono \
- install-sdk \
- install-artwork \
- java-set-classpath.in \
- localize-ooo \
- make-win32-iso \
- ooconfig \
- ooinstall \
- oosmoketest.in \
- ootestapi.in \
- ootesttool.in \
- ootool.in \
- ootouch \
- lo-pack-sources \
- ooo-unpack-sources \
- openoffice-xlate-lang \
- owner \
- package-lang-win32 \
- package-ooo \
- po2sdf \
- pyunorc-update64.in \
- re-root \
- stat-localizations \
- strace-rewrite \
- unopkg-regenerate-cache \
- unpack \
- setup.in \
- sloppypatch.pl \
- README
-
-EXTRA_DIST = $(script_files)
diff --git a/bin/README b/bin/README
deleted file mode 100644
index ef7523141..000000000
--- a/bin/README
+++ /dev/null
@@ -1,31 +0,0 @@
-In a simple build scenario / to get started use:
-
-./build-ooo OOO_STABLE_1 --checkout
-
-then (you need to be a sudoer):
-
-./package-ooo OOO_STABLE_1 --clean
-
-Other files:
-
- setup.in - misc. configuration options
- install-icons - install artwork and icons, re-scale
- existing icons using ImageMagic.
- tinder-build - used on the tinderbox to build and
- package OO.o in a loop.
-
- unused.pl - build lists of unused symbols in
- an installed set.
- map-unused.pl - turn unused symbol lists into linker
- maps.
- package-lang.pl - install lang-packs and build-per lang
- .spec files.
-
- openoffice-xlate-lang - map between several language identifiers.
-
- linkoo - program to link a build tree into an
- install image.
- ootouch - utility - give it some of your stack trace,
- and it will go touch files contianing that,
- meaning you can build debug=true easily to
- get debugging symbols...
diff --git a/bin/amt-license-patch b/bin/amt-license-patch
deleted file mode 100755
index d7dc617af..000000000
--- a/bin/amt-license-patch
+++ /dev/null
@@ -1,66 +0,0 @@
-#!/bin/sh
-
-body=`mktemp`
-
-trap "rm $body" EXIT
-
-cat >$body <<'EOF'
-<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN">
-<html>
-<body>
-<hr style="width: 100%; height: 2px;">The following software may be
-included in this product: Agfa Monotype Corporation Font Software; Use of any of this
-software
-is governed
-by the terms of the license below:<br>
-<h2></a>Agfa Monotype Corporation<br></h2>
-<h3></a>Font Software</h3>
-
-
-<p>AGFA MONOTYPE CORPORATION <br>END USER LICENSE AGREEMENT</p>
-<p>We recommend that you print this End User Agreement for further reference.</p>
-<p>This Agfa Monotype Corporation End User Agreement (the &quot;Agreement&quot;) becomes a binding contract between you and Agfa Monotype Corporation (a) when you click on the area marked &quot;ACCEPT LICENSE AGREEMENT&quot;, or, (b) if you are acquiring Font Software on a floppy disk, when you open the package in which the font is contained. If you do not wish to be bound by the Agreement, you cannot access, use or download the Font Software. Please read all of the Agreement before you agree to be bound by its terms and conditions.</p>
-<p>You hereby agree to the following:</p>
-<p>1.You are bound by the Agreement and you acknowledge that all Use (as defined herein) of the Font Software (as defined herein) supplied to you by AMT is governed by the Agreement. </p>
-<p>2.&quot;AMT&quot; as used herein shall mean collectively Agfa Monotype Corporation, its successors and assigns, its parent and affiliated corporations, its authorized distributors, and any third party which has licensed to AMT any or all of the components of the Font Software supplied to you pursuant to the Agreement.</p>
-<p>3.&quot;Font Software&quot; as used herein shall mean software which, when used on an appropriate device or devices, generates typeface and typographic designs and ornaments. Font Software shall include all bitmap representations of typeface and typographic designs and ornaments created by or derived from the Font Software. Font Software includes upgrades, updates, related files, permitted modifications, permitted copies, and related documentation.</p>
-<p>4.&quot;Basic Licensed Unit&quot; as used herein shall mean up to five (5) Workstations (as defined herein) connected to no more than one (1) printer with a non-volatile memory (for example, a hard drive), all located at a single geographic location. If you intend to use the Font Software on more equipment than permitted by a Basic Licensed Unit, you must create an &quot;Expanded Licensed Unit&quot; by obtaining from AMT, for an additional fee, a site license for all such equipment. &quot;Licensed Unit&quot; as used herein shall mean a Basic Licensed Unit or an Expanded Licensed Unit as is appropriate to the context in which the term is used. If you have acquired an entire Font Software Library (that is, a single license for Font Software for 500 or more different typeface designs) and you use such Font Software Library only at a single geographic location, then &quot;Licensed Unit&quot; shall mean up to twenty (20) Workstations connected to no more than two (2) printers with non- volatile memories at such geographic location. </p>
-<p>5.&quot;Use&quot; of the Font Software shall occur when an individual is able to give commands (whether by keyboard or otherwise) that are followed by the Font Software, regardless of the location in which the Font Software resides.</p>
-<p>6.&quot;Derivative Work&quot; shall mean binary data based upon or derived from Font Software (or any portion of Font Software) in any form in which such binary data may be recast, transformed, or adapted including, but not limited to, binary data in any format into which Font Software may be converted.</p>
-<p>7.&quot;Personal or Internal Business Use&quot; shall mean Use of the Font Software for your customary personal or internal business purposes and shall not mean any distribution whatsoever of the Font Software or any component or Derivative Work thereof. &quot;Personal or Internal Business Use&quot; shall not include any Use of the Font Software by persons that are not members of your immediate household, your authorized employees, or your authorized agents. All such household members, employees and agents shall be notified by you as to the terms and conditions of the Agreement and shall agree to be bound by it before they can have Use of the Font Software.</p>
-<p>8.&quot;Workstation&quot; as used herein shall mean a component in which an individual is able to give commands (whether by keyboard or otherwise) that are followed by the Font Software, regardless of the location in which the Font Software resides.</p>
-<p>9.&quot;Commercial Product&quot; as used herein shall mean an electronic document or data file created by Use of the Font Software which is offered for distribution to the general public (or to some subset of the general public) as a commercial product in exchange for a separate fee or other consideration. By way of illustration and not by way of limitation, an electronic book or magazine distributed for a fee shall be considered a Commercial Product; a document distributed in connection with a commercial transaction in which the consideration is unrelated to such document (for example, a business letter, a ticket for an event, or a receipt for purchase of tangible goods such as clothing) shall not be considered a Commercial Product. </p>
-<p>10.You are hereby granted a non-exclusive, non-assignable, non-transferable (except as expressly permitted herein) license to access the Font Software (i) only in a Licensed Unit, (ii) only for your Personal or Internal Business Use, and (iii) only subject to all of the terms and conditions of the Agreement. You have no rights to the Font Software other than as expressly set forth in the Agreement. You agree that AMT owns all right, title and interest in and to the Font Software, its structure, organization, code, and related files, including all property rights therein such as copyright, design and trademarks rights. You agree that the Font Software, its structure, organization, code, and related files are valuable property of AMT and that any intentional Use of the Font Software not expressly permitted by the Agreement constitutes a theft of valuable property. All rights not expressly granted in the Agreement are expressly reserved to AMT. You may not use the Font Software to electronically distribute a Commercial Document without a separate license from AMT authorizing you to do so.</p>
-<p>11.You may install and Use the Font Software on a single file server for Use on a single local area network (&quot;LAN&quot;) only when the Use of such Font Software is limited to the Workstations and printers that are part of the Licensed Unit of which the server is a part. For the purpose of determining the proper number of Workstations for which a license is needed, the following example is supplied for illustration purposes only: If there are 100 Workstations connected to the server, with no more than 15 Workstations ever using the Font Software concurrently, but the Font Software will be used on 25 different Workstations at various points in time, a site license must be obtained creating a Licensed Unit for 25 Workstations. The Font Software may not be installed or Used on a server that can be accessed via the Internet or other external network system (a system other than a LAN) by Workstations which are not part of a Licensed Unit.</p>
-<p>12.You may electronically distribute Font Software embedded in a &quot;Personal or Internal Business Use&quot; document (that is, a document other than a &quot;Commercial Product&quot; as defined herein) only when the Font Software embedded in such document (i) is in a static graphic image (for example, a &quot;gif&quot;) or an embedded electronic document, and (ii) is distributed in a secure format that permits only the viewing and printing (and not the editing, altering, enhancing, or modifying) of such static graphic image or embedded document. You may not embed Font Software in a Commercial Product without a separate written license from AMT, and you may not embed Font Software in an electronic document or data file for any reason other than your own Personal or Internal Business Use.</p>
-<p>13.You may not alter Font Software for the purpose of adding any functionality which such Font Software did not have when delivered to you by AMT. If the Font Software contains embedding bits that limit the capabilities of the Font Software, you may not change or alter the embedding bits. Font Software may not be used to create or distribute any electronic document in which the Font Software, or any part thereof, is embedded in a format that permits editing, alterations, enhancements, or modifications by the recipient of such document. If you have reason to believe that a recipient of an electronic document possesses the capability to edit, alter, enhance, or modify such electronic document even though you have distributed it in a format which does not permit such editing, alteration, enhancement, or modification, you shall not transmit such document to such person. </p>
-<p>14.You may take a digitized copy of the Font Software used for a particular document, or Font Software embedded in an electronic document, to a commercial printer or service bureau for use by the printer or service in printing such document but only if the printer or service bureau represents to you that it has purchased or been granted a license to use that particular Font Software. </p>
-<p>15.You acknowledge that the Font Software is protected by the copyright and other intellectual property law of the United States and its various States, by the copyright and design laws of other nations, and by international treaties. You agree to treat the Font Software as you would any other copyrighted material, such as a book. You may not copy the Font Software, except as expressly provided herein. Any copies that you are expressly permitted to make pursuant to the Agreement must contain the same copyright, trademark, and other proprietary notices that appear on or in the Font Software. You agree not to adapt, modify, alter, translate, convert, or otherwise change the Font Software, or to create Derivative Works from Font Software or any portion thereof. You further agree not to use Font Software in connection with software and/or hardware which create Derivative Works of such Font Software. You agree not to reverse engineer, decompile, disassemble, or otherwise attempt to discover the source code of the Font Software, provided, however, that if you are located in a European Community member country or any other country which provides rights materially similar to the rights set forth in this proviso, you may reverse engineer or decompile the Font Software only to the extent that sufficient information is not available for the purpose of creating an interoperable software program (but only for such purpose and only to the extent that sufficient information is not provided by AMT upon written request). You agree to use trademarks associated with the Font Software according to accepted trademark practice, including identification of the trademark owner's name. Trademarks can only be used to identify printed output produced by the Font Software. The use of any trademark as herein authorized does not give you any rights of ownership in that trademark and all use of any trademark shall inure to the sole benefit of AMT. You may not change any trademark or trade name designation for the Font Software.</p>
-<p>16. You may not rent, lease, sublicense, give, lend, or further distribute the Font Software, or any copy thereof, except as expressly provided herein. You may transfer all your rights to use the Font Software to another person or legal entity provided that (i) the transferee accepts and agrees to be bound by all the terms and conditions of the Agreement, and (ii) you destroy all copies of the Font Software, including all copies stored in the memory of a hardware device. If you are a business or organization, you agree that upon request from AMT or AMT's authorized representative, you will with thirty (30) days fully document and certify that use of any and all AMT Font Software at the time of the request is in conformity with your valid licences from AMT.</p>
-<p>17.You may make one back-up copy of Font Software for archival purposes only, and you shall retain exclusive custody and control over such copy. Upon termination of the Agreement, you must destroy the original and any and all copies of the Font Software.</p>
-<p>18.AMT warrants to you that the Font Software will perform substantially in accordance with its documentation for the ninety (90) day period following delivery of the Font Software. To make a warranty claim, you must, within the ninety (90) day warranty period, return the Font Software to the location from which you obtained it along with a copy of your receipt or, if such Font Software is acquired on-line, contact the on-line provider with sufficient information regarding your acquisition of the Font Software so as to enable AMT to verify the existence and date of the transaction. If the Font Software does not perform substantially in accordance with its documentation, the entire, exclusive, and cumulative liability and remedy shall be limited to the refund of the license fee you paid to AMT to obtain delivery of the Font Software. AMT DOES NOT WARRANT THE PERFORMANCE OR RESULTS YOU MAY OBTAIN BY USING THE FONT SOFTWARE. THE FOREGOING STATES THE SOLE AND EXCLUSIVE REMEDIES FOR AMT'S BREACH OF WARRANTY. EXCEPT FOR THE FOREGOING LIMITED WARRANTY, AMT MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR IMPLIED, AS TO NON- NFRINGEMENT OF THIRD PARTY RIGHTS, MERCHANTABILITY, OR FITNESS FOR ANY PARTICULAR PURPOSE. IN NO EVENT WILL AMT BE LIABLE TO YOU OR ANYONE ELSE (I) FOR ANY CONSEQUENTIAL, INCIDENTAL OR SPECIAL DAMAGES, INCLUDING WITHOUT LIMITATION ANY LOST PROFITS, LOST DATA, LOST BUSINESS OPPORTUNITIES, OR LOST SAVINGS, EVEN IF AMT HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGES, OR (II) FOR ANY CLAIM AGAINST YOU BY ANY THIRD PARTY SEEKING SUCH DAMAGES EVEN IF AMT HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGES.</p>
-<p>Some states or jurisdictions do not allow the exclusions of limitations of incidental, consequential or special damages, so the above exclusion may not apply to you. Also, some states or jurisdictions do not allow the exclusions of implied warranties or limitations on how long an implied warranty may last, so the above limitations may not apply to you. To the greatest extent permitted by law, any implied warranties not effectively excluded by the Agreement are limited to ninety (90) days. Some jurisdictions do not permit a limitation of implied warranties where the product results in physical injury or death so that such limitations may not apply to you. In those jurisdictions, you agree that AMT's liability for such physical injury or death shall not exceed One Hundred Thousand Dollars (U.S. $100,000), provided that such jurisdictions permit a limitation of such liability. This warranty gives you specific legal rights. You may have other rights that vary from state to state or jurisdiction to jurisdiction. The Font Software is non-returnable and nonrefundable.</p>
-<p>19. The Agreement will be governed by the laws of Illinois applicable to contracts wholly entered and performable within such state. All disputes related to the Agreement shall be heard in the Circuit Court of Cook County, Illinois, U.S.A. or the United States District Court for the Northern District of Illinois, Chicago, Illinois U.S.A. Both you and AMT agree to the personal jurisdiction and venue of these courts in any action related to the Agreement. The Agreement will not be governed by the United Nations Convention of Contracts for the International Sale of Goods, the application of which is expressly excluded. If any part of this Agreement is found void and unenforceable, it will not affect the validity of the balance of the Agreement, which shall remain valid and enforceable according to its terms.</p>
-<p>20.The Agreement shall automatically terminate upon failure by you (or any authorized person or member of your immediate household to whom you have given permission to Use the Font Software) to comply with its terms. The termination of the Agreement shall not preclude AMT from suing you for damages of any breach of the Agreement. The Agreement may only be modified in writing signed by an authorized officer of AMT. You agree that the Font Software will not be shipped, transferred or exported into any country or used in any manner prohibited by the United States Export Administration or any applicable export laws, restrictions or regulations.</p>
-<p>21.You have the rights expressly set forth in the Agreement and no other. All rights in and to the Font Software, including unpublished rights, are reserved under the copyright laws of the United States and other jurisdictions. All rights reserved. Notwithstanding the foregoing, to the extent that any law, statute, treaty, or governmental regulation shall be deemed by a court of competent jurisdiction to provide you with any additional or different rights from those provided herein and such rights shall be deemed non-waiveable as a matter of law and to supersede the rights specifically provided herein, then such law, statute, treaty, or governmental regulation shall be deemed to be made a part of the Agreement. To the extent that any such rights created by any law, statute, treaty or governmental regulation are waiveable, you agree that your acceptance of the Agreement shall constitute an effective and irrevocable waiver of such rights. The Agreement may be enforced by AMT or by an authorized dealer acting on behalf of AMT.</p>
-<p>22.If this product is acquired under the terms of a (i) GSA contract - use, reproduction or disclosure is subject to the restrictions set forth in the applicable ADP Schedule contract, (ii) DOD contract - use, duplication or disclosure by the Government is subject to the applicable restrictions set forth in DFARS 252.277-7013; (iii) Civilian agency contract - use, reproduction, or disclosure is subject to FAR 52.277-19(a) through (d) and restrictions set forth in the Agreement. </p>
-<p>&quot;Monotype&quot; is a trademark of Agfa Monotype Limited registered in the U.S. Patent and Trademark Office and elsewhere. All other trademarks are the property of their respective owners.
-</body>
-</html>
-EOF
-
-gawk ' BEGIN { doctype=0;}
- /^<!DOCTYPE/ {
- doctype++;
- if (doctype==2) {
- line=$0;
- while (getline <"'$body'")
- print;
- close ("'$body'");
- print line;
- print;
- next;
- }
- }
- { print; }
-'
diff --git a/bin/analyse-commits b/bin/analyse-commits
deleted file mode 100755
index 594f1ac8c..000000000
--- a/bin/analyse-commits
+++ /dev/null
@@ -1,23 +0,0 @@
-#!/bin/sh
-
-names=false
-if test "z$1" = "z--names"; then
- names=true
-fi
-
-format="%an: %s%n"
-uniq=''
-if test "$names" = true; then
- format="%an%n"
- uniq='-c'
-fi
-
-tmp=`mktemp`
-start='2010-09-28'
-
-git log --after=$start --pretty=format:"$format" 2>&1 > $tmp
-for i in clone/*; do (cd $i 2>/dev/null && git log --after=$start --pretty=format:"$format"); done >> $tmp
-
-eval sort $tmp | uniq $uniq | sort -n | nl
-echo "$tmp"
-rm -f $tmp
diff --git a/bin/analyse-link b/bin/analyse-link
deleted file mode 100755
index f2a6c698e..000000000
--- a/bin/analyse-link
+++ /dev/null
@@ -1,66 +0,0 @@
-#!/usr/bin/env perl
-
-
-my $log = shift (@ARGV) || '../map/relocation-log.bz2';
-my $bz = ($log =~ /\.bz2$/ && "bz") || '';
-
-my $stream;
-
-my %src_links = ();
-my %dest_links = ();
-my %self_links = ();
-
-my $svt_to_svl = 0;
-my $svl_to_svt = 0;
-
-open ($stream, $bz."cat $log |") || die "Can't cat $log: $!";
-
-while (<$stream>) {
-# 20884: binding file /usr/lib/libxml2.so.2 to /usr/lib/libxml2.so.2: normal symbol `xmlXPathSubstringFunction'
- if (/\d+:\s+binding file\s+([^\s]+)\s+to\s+([^\s]+):[^\`]+\`(.*)\'/) {
- my $src = $1;
- my $dest = $2;
- my $sym = $3;
-
- if ($src =~ /libsvl/ &&
- $dest =~ /libsvt/) {
- $svl_to_svt++;
- }
-
- if ($src =~ /libsvt/ &&
- $dest =~ /libsvl/) {
- $svt_to_svl++;
- }
-
- if ($src eq $dest) {
- $self_links{$src}++;
- } else {
- $src_links{$src}++;
- $dest_links{$dest}++;
- }
-# print "binding $src to $dest: $sym\n";
- }
-}
-
-close ($stream);
-
-print "src dest self name\n";
-print "-----------------------------\n";
-my $total_src = 0;
-my $total_dest = 0;
-my $total_self = 0;
-for $lib (sort (keys %src_links)) {
- my $src = $src_links{$lib};
- my $dest = exists $dest_links{$lib} && $dest_links{$lib} || '0';
- my $self = exists $self_links{$lib} && $self_links{$lib} || '0';
- print "$src $dest $self $lib\n";
- $total_src += $src;
- $total_dest += $dest;
- $total_self += $self;
-}
-print "-----------------------------\n";
-print "$total_src $total_dest $total_self : Total\n";
-
-print "\n";
-print "svl -> svt: $svl_to_svt\n";
-print "svt -> svl: $svt_to_svl\n";
diff --git a/bin/applyflags b/bin/applyflags
deleted file mode 100755
index 53aa0a1b1..000000000
--- a/bin/applyflags
+++ /dev/null
@@ -1,55 +0,0 @@
-#!/bin/sh
-
-# generate flags for apply.pl - from various pieces ...
-# highly dependent on a custom environment ...
-
-bindir=$1
-# optional to force another OOBUILDDIR in the split build
-builddir=$2
-
-if test "z$bindir" = "z"; then
- echo "Serious error - requires path to bindir"
- exit 1
-fi
-
-cd $bindir
-. ./setup >/dev/null 2>&1
-
-SECTIONS="$OOO_ADDITIONAL_SECTIONS"
-
-# do we use another builddir?
-test -n "$builddir" && OOBUILDDIR=$builddir
-
-echo -n "--distro=$DISTRO"
-
-if test -d $OOBUILDDIR/nlpsolver ; then
- SECTIONS="$SECTIONS,NLPSolver"
-fi
-
-# The localize.sdf's are created during the build when they do not exits.
-# Additionally, they are created using something like echo > localize.sdf, so
-# even test -s is not helpful here :-(
-if test -d $OOBUILDDIR/l10n; then
- SECTIONS="$SECTIONS,Localize"
-fi
-
-if test "$UNSTABLE_WP" = "YES" ; then
- SECTIONS="$SECTIONS,UnstableWP"
-fi
-if test "z$PIECE" != "z"; then
- echo -n " --pieces"
- SECTIONS="$SECTIONS,Piece"
-fi
-if test "$ENABLE_GRAPHITE" = "TRUE"; then
- SECTIONS="$SECTIONS,Graphite"
-fi
-
-SECTIONS=`echo $SECTIONS | sed 's/^,//'`
-
-if test -n "$SECTIONS" ; then
- echo -n " --additional-sections=$SECTIONS"
-fi
-
-if test "$DEFUZZ_PATCHES" = "YES"; then
- echo -n " --defuzz"
-fi
diff --git a/bin/bash-completion.in b/bin/bash-completion.in
deleted file mode 100644
index 77087c593..000000000
--- a/bin/bash-completion.in
+++ /dev/null
@@ -1,90 +0,0 @@
-# Programmable bash_completion file for the main office applications
-# It is based on /etc/profile.d/complete.bash from SUSE Linux 10.1
-
-_def=; _dir=; _file=; _nosp=
-if complete -o default _nullcommand &> /dev/null ; then
- _def="-o default"
- _dir="-o dirnames"
- _file="-o filenames"
-fi
-_minusdd="-d ${_dir}"
-_minusdf="-d ${_file}"
-if complete -o nospace _nullcommand &> /dev/null ; then
- _nosp="-o nospace"
- _minusdd="${_nosp} ${_dir}"
- _minusdf="${_nosp} ${_dir}"
-fi
-complete -r _nullcommand &> /dev/null
-
-# General expanding shell function
-@OFFICE_SHELL_FUNCTION@ ()
-{
- # bash `complete' is broken because you can not combine
- # -d, -f, and -X pattern without missing directories.
- local c=${COMP_WORDS[COMP_CWORD]}
- local a="${COMP_LINE}"
- local e s g=0 cd dc t=""
- local IFS
-
- shopt -q extglob && g=1
- test $g -eq 0 && shopt -s extglob
- # Don't be fooled by the bash parser if extglob is off by default
- cd='*-?(c)d*'
- dc='*-d?(c)*'
-
- case "${1##*/}" in
-@BASH_COMPLETION_SUFFIXES_CHECKS@
- *) e='!*'
- esac
-
- case "$(complete -p ${1##*/} 2> /dev/null)" in
- *-d*) ;;
- *) s="-S/"
- esac
-
- IFS='
-'
- case "$c" in
- \$\(*\)) eval COMPREPLY=\(${c}\) ;;
- \$\(*) COMPREPLY=($(compgen -c -P '$(' -S ')' -- ${c#??})) ;;
- \`*\`) eval COMPREPLY=\(${c}\) ;;
- \`*) COMPREPLY=($(compgen -c -P '\`' -S '\`' -- ${c#?})) ;;
- \$\{*\}) eval COMPREPLY=\(${c}\) ;;
- \$\{*) COMPREPLY=($(compgen -v -P '${' -S '}' -- ${c#??})) ;;
- \$*) COMPREPLY=($(compgen -v -P '$' -- ${c#?})) ;;
- \~*/*) COMPREPLY=($(compgen -f -X "$e" -- ${c})) ;;
- \~*) COMPREPLY=($(compgen -u ${s} -- ${c})) ;;
- *@*) COMPREPLY=($(compgen -A hostname -P '@' -S ':' -- ${c#*@})) ;;
- *[*?[]*) COMPREPLY=($(compgen -G "${c}")) ;;
- *[?*+\!@]\(*\)*)
- if test $g -eq 0 ; then
- COMPREPLY=($(compgen -f -X "$e" -- $c))
- test $g -eq 0 && shopt -u extglob
- return
- fi
- COMPREPLY=($(compgen -G "${c}")) ;;
- *)
- if test "$c" = ".." ; then
- COMPREPLY=($(compgen -d -X "$e" -S / ${_nosp} -- $c))
- else
- for s in $(compgen -f -X "$e" -- $c) ; do
- if test -d $s ; then
- COMPREPLY=(${COMPREPLY[@]} $(compgen -f -X "$e" -S / -- $s))
- elif test -z "$t" ; then
- COMPREPLY=(${COMPREPLY[@]} $s)
- else
- case "$(file -b $s 2> /dev/null)" in
- $t) COMPREPLY=(${COMPREPLY[@]} $s) ;;
- esac
- fi
- done
- fi ;;
- esac
- test $g -eq 0 && shopt -u extglob
-}
-
-
-complete -d -X '.[^./]*' -F @OFFICE_SHELL_FUNCTION@ ${_file} \
-@BASH_COMPLETION_OOO_APPS@
-
-unset _def _dir _file _nosp _minusdd _minusdf
diff --git a/bin/build-galleries b/bin/build-galleries
deleted file mode 100755
index 731b75d44..000000000
--- a/bin/build-galleries
+++ /dev/null
@@ -1,175 +0,0 @@
-#!/bin/sh
-
-# this script is useful to generate galleries for OOo from Open Clip Art
-# Library source tarball that it available at
-# http://www.openclipart.org/downloads/index.php
-
-#
-# See setup for user tweakables.
-#
-. ./setup
-
-# where to install the OOo-related files (thumbnails & points at the files, ...)
-if test "z$1" != "z" ; then
- OOINSTDIR="$1"
-fi
-
-# source ooenv if it exists
-if [ -f "$OOINSTDIR/program/ooenv" ] ; then
- OLDPLACE=`pwd`;
- cd "$OOINSTDIR/program" > /dev/null
- . ooenv
- cd $OLDPLACE > /dev/null
-fi
-
-BUILD_OOO_GAL_FROM_OPENCLIPART=no
-if test -n "$OPENCLIPART_VER" -o -n "$OPENCLIPART_DIR" ; then
- BUILD_OOO_GAL_FROM_OPENCLIPART=yes
-fi
-
-if test "$BUILD_OOO_GAL_FROM_OPENCLIPART" = "no" -a \
- "$VENDORNAME" != "Novell" ; then
- echo " ${0##*/}: Nothing to do"
- exit 0
-fi
-
-if test "z$OPENCLIPART_DIR" = "z" ; then
- # where to install the generated bitmaps
- OPENCLIPART_DIR=$OODESTDIR/usr/share/openclipart
- # where are the unpackaged openclipart sources
- OPENCLIPART_SRCDIR=$BUILDDIR/openclipart-$OPENCLIPART_VER
-fi
-
-# where are installed the OOo-related files (thumbnails & points at the files, ...)
-GAL_DIR=$OOINSTDIR/basis$VERSION/share/gallery
-# It requires the gengal binary
-# The binary is actually available in OOo only when built with ooo-build
-GAL_BIN=$OOINSTDIR/basis$VERSION/program/gengal
-# start number for the new galleries
-GAL_NUMBER_FROM=70
-
-if (! test -e $GAL_BIN); then
- echo "Cannot find $GAL_BIN; make sure you installed OOo first"
- exit 1;
-fi
-
-echo -n "checking whether find supports the option -L... "
-if find -L / -maxdepth 0 >/dev/null 2>&1 ; then
- FIND_SUPPORT_SYMLINKS="-L"
- echo "yes"
-else
- FIND_SUPPORT_SYMLINKS=
- echo "no"
-fi
-
-if test "z$OPENCLIPART_VER" != "z" ; then
-
- echo "Generating .png files..."
-
- if test ! -d $OPENCLIPART_SRCDIR ; then
- echo "Cannot find Open Clip Art sources in $OPENCLIPART_SRCDIR"
- echo "Try to run ./download and ./bin/unpack"
- exit 1;
- fi
-
- rm -rf $OPENCLIPART_DIR
- for pict_svg in `find $OPENCLIPART_SRCDIR -name "*.svg" -type f ` ; do
- pict_dir=${pict_svg#$OPENCLIPART_SRCDIR/}
- pict_dir=${pict_dir%/*}
- pict_png=${pict_svg##*/}
- pict_png=${pict_png%.svg}.png
- mkdir -p $OPENCLIPART_DIR/$pict_dir
- inkscape -f $pict_svg -e $OPENCLIPART_DIR/$pict_dir/$pict_png
- done
-
- echo "Removing some blacklisted files..."
-
- for path in signs_and_symbols/flags \
- unsorted ; do
- echo " remove $path"
- rm -rf $OPENCLIPART_DIR/$path
- done
-
- #file list
- if test "z$OODESTDIR" != "z" ; then
- rm -f $BUILDDIR/openclipart.txt
- for dir in `find $OPENCLIPART_DIR -type d` ; do
- echo "%dir ${dir#$OODESTDIR}" >>$BUILDDIR/openclipart.txt
- done
- for file in `find $OPENCLIPART_DIR -type f` ; do
- echo "${file#$OODESTDIR}" >>$BUILDDIR/openclipart.txt
- done
- fi
-
- echo "done"
-
-fi
-
-# clean a previous gallery installation in DESTDIR
-if test "z$OODESTDIR" != "z" ; then
- gal_idx=$GAL_NUMBER_FROM
-
- while test -f $GAL_DIR/sg$gal_idx.thm -o \
- -f $GAL_DIR/sg$gal_idx.sdg -o \
- -f $GAL_DIR/sg$gal_idx.sdv; do
- rm -f $GAL_DIR/sg$gal_idx.thm \
- $GAL_DIR/sg$gal_idx.sdg \
- $GAL_DIR/sg$gal_idx.sdv
- gal_idx=$(($gal_idx + 1))
- done
-fi
-
-mkdir -p "$GAL_DIR"
-
-if test "$BUILD_OOO_GAL_FROM_OPENCLIPART" = "yes" ; then
- echo "Building extra galleries from openclipart..."
- for dir in `find $FIND_SUPPORT_SYMLINKS $OPENCLIPART_DIR -mindepth 1 -maxdepth 1 -type d | LC_CTYPE=C sort` ; do
- # get the gallery name from the directory name
- # and make the first character uppercase
- gal_name=${dir##*/}
- gal_name=`echo $gal_name | tr "_-" " "`
- gal_name_rest=${gal_name#?}
- gal_name_first_char=${gal_name%$gal_name_rest}
- gal_name_first_char=`echo $gal_name_first_char | tr "a-z" "A-Z"`
- gal_name=$gal_name_first_char$gal_name_rest
-
- echo "Doing gallery $gal_name..."
- # xargs is necessary because I
- find $dir -name "*.png" -print0 | LC_CTYPE=C sort -z | { xargs -0 $GAL_BIN -headless --name "$gal_name" --path "$GAL_DIR" --destdir "$OODESTDIR" --number-from "$GAL_NUMBER_FROM" || exit 1; }
- done
-fi
-
-# guess the filelist
-if test "z$OODESTDIR" != "z" ; then
- gal_num=$GAL_NUMBER_FROM
- rm -f $BUILDDIR/galleries.txt
- while test -f $GAL_DIR/sg$gal_num.thm; do
- if [ -e $GAL_DIR/sg$gal_num.sdg -a -e $GAL_DIR/sg$gal_num.sdv ]; then
- echo ${GAL_DIR#$OODESTDIR}/sg$gal_num.sdg >>$BUILDDIR/galleries.txt
- echo ${GAL_DIR#$OODESTDIR}/sg$gal_num.sdv >>$BUILDDIR/galleries.txt
- echo ${GAL_DIR#$OODESTDIR}/sg$gal_num.thm >>$BUILDDIR/galleries.txt
- else
- # remove incomplete galleries (exists .thm but not .sdg)
- rm -f $GAL_DIR/sg$gal_num.sdv \
- $GAL_DIR/sg$gal_num.thm
- fi
- gal_num=$(($gal_num + 1))
- done
-fi
-
-if test "$VENDORNAME" = "Novell" ; then
- # We want to update an existing gallery. Therefore we do it after the
- # galleries.txt filelist is generated. This way we can detect problems
- # via the RPM check for installed but unpackaged files
- echo "Adding Novell specific Draft.jpg to Backgrounds gallery..."
- cp "$TOOLSDIR/src/Draft.jpg" "$GAL_DIR/www-back/" || exit 1;
- chmod a+r $GAL_DIR/www-back/Draft.jpg
- # make sure that the existing galleries can be modified
- chmod u+rw $GAL_DIR/sg*.???
- # LC_ALL=en_US.UTF-8 is necessary to make sure that the name "Backgrounds" is compared with the right localization
- LC_ALL=en_US.UTF-8 $GAL_BIN -headless --name "Backgrounds" --path "$GAL_DIR" --destdir "$OODESTDIR" "$GAL_DIR/www-back/Draft.jpg" || exit 1;
-
- if test "z$OODESTDIR" != "z" ; then
- echo "${GAL_DIR#$OODESTDIR}/www-back/Draft.jpg" >$BUILDDIR/novell-gallery-addon
- fi
-fi
diff --git a/bin/build-ooo b/bin/build-ooo
deleted file mode 100755
index 94846456f..000000000
--- a/bin/build-ooo
+++ /dev/null
@@ -1,169 +0,0 @@
-#!/bin/sh
-
-#
-# See setup for user tweakables.
-#
-. ./setup
-
-if test "z$BUILD_WIN32" = "zyes"; then
- TARFILE_LOCATION=`cygpath -m $TARFILE_LOCATION`
-fi
-
-export TARFILE_LOCATION
-
-if test "z$BUILD_WIN32" = "z" -a "z`uname -s`" != "zSunOS" -a "z`uname -s`" != "zDarwin" -a "z`uname -s`" != "zOpenBSD" -a "z`uname -s`" != "zFreeBSD"; then
- if ! test -f /proc/cpuinfo; then
- echo "Looks like proc isn't mounted - this means almost certain"
- echo "Java related weird build failure: please check /proc"
- exit 1;
- fi
-fi
-
-if test "z`uname -s`" = "zSunOS"; then
- if test -f $GNUTAR -a ! -f $OOBUILDDIR/solenv/bin/tar; then
- ln -s $GNUTAR $OOBUILDDIR/solenv/bin/tar
- fi
- if test -d /usr/sfw/lib/pkgconfig; then
- PKG_CONFIG_PATH="$PKG_CONFIG_PATH:/usr/sfw/lib/pkgconfig"
- export PKG_CONFIG_PATH
- CFLAGS="$CFLAGS -I/usr/sfw/include"
- export CFLAGS
- CXXFLAGS="$CXXFLAGS -I/usr/sfw/include"
- export CXXFLAGS
- CPPFLAGS="$CPPFLAGS -I/usr/sfw/include"
- export CPPFLAGS
- fi
-fi
-
-# configure
-
-# main configure
-if [ -d $OOBUILDDIR/config_office ]; then
- cd $OOBUILDDIR/config_office
-else
- cd $OOBUILDDIR
-fi
-rm -f config.cache
-echo "configuring ...";
-
-echo "Env for configure:"
-set
-echo "Env for configure ends"
-
-if test "z$ACLOCAL_FLAGS" = "z" -a "z`uname -s`" = "zDarwin"; then
- export ACLOCAL_FLAGS="-I $TOOLSDIR/m4/mac"
-fi
-eval ./autogen.sh "$CONFIGURE_OPTIONS" "$PROPAGATED_ARGS" || exit 1;
-
-# Final build preparation
-
-# tcsh sucks great rocks, and refuses to re-direct it's output otherwise
-export TERM=
-# Many Java files have 8bit char-set comments, javac barfs on them in utf8 locales
-export LC_ALL='C';
-
-if test "z$ENABLE_JAVA" = "zyes"; then
- # Many Java's can't cope with the NPTL on Linux.
- LD_ASSUME_KERNEL=2.2.5 /bin/true 2> /dev/null || LD_ASSUME_KERNEL=2.4.10 /bin/true 2> /dev/null || LD_ASSUME_KERNEL=2.6.0 2> /dev/null || unset LD_ASSUME_KERNEL
- export LD_ASSUME_KERNEL
-fi
-# Embedded python dies without Home set
-if test "z$HOME" = "z"; then
- export HOME="";
-fi
-cd $OOBUILDDIR
-
-echo 'Importing Environment'
-# Import all of the OO.o environment
-if test "z$BUILD_WIN32" = "zyes"; then
- . $OOBUILDDIR/*.set.sh
-else
- . $OOBUILDDIR/*.Set.sh
-fi
-
-#
-# FIXME - disabled, a major source of unreliability ...
-#
-# Check to see if makedepend was not there - if not, this is a from-pristine build
-# if test ! -f $OOBUILDDIR/soltools/$INPATH/bin/makedepend; then
-# # Accelerate straight-through compiles by not building / using dependencies
-# echo "A very clean straight-through build - deferring dep generation"
-# export nodep=true
-#else
- echo "Building with full dependencies"
-#fi
-
-echo 'Bootstrapping'
-./bootstrap || ./bootstrap || ./bootstrap || exit 1;
-
-# Copy in missing libraries if we just built them
-if test "z$SYSTEM_GCC" = "z"; then
- echo "Copying libraries to $SOLARVER/$UPD/$INPATH/lib";
- mkdir -p $SOLARVER/$UPD/$INPATH/lib || exit 1;
- cp -avf $BUILDDIR/$LIB/libgcc* $BUILDDIR/$LIB/libstdc* $SOLARVER/$UPD/$INPATH/lib || exit 1;
- cp -vf $BUILDDIR/$LIB/libstdc++* $SOLARVER/$UPD/$INPATH/lib/ || exit 1;
-fi
-
-echo 'Verifying environment'
-echo "Path: \'$PATH\'"
-echo "Shell: \'$SHELL\'"
-echo "Lang: \'$OOO_LANGS\'"
-if test "z$BUILD_WIN32" != "zyes"; then
- echo "Gcc: "
- gcc -dumpversion
-fi
-bison --version
-flex --version
-# parallel build setting
-EXTRA_BUILD_FLAGS=
-if test "$BUILD_NCPUS" -gt 1; then
- EXTRA_BUILD_FLAGS="-P$BUILD_NCPUS"
-fi
-if test "z$BUILD_WIN32" = "z"; then
- EXTRA_BUILD_FLAGS="--dlv_switch -link $EXTRA_BUILD_FLAGS"
-fi
-if test "z$BUILD_HTML_OUTPUT" != "z"; then
- EXTRA_BUILD_FLAGS="--html $EXTRA_BUILD_FLAGS"
-fi
-echo "EXTRA_BUILD_FLAGS: $EXTRA_BUILD_FLAGS"
-
-EXTRA_DMAKE_FLAGS=
-if test "$MAX_JOBS" -gt 1; then
- EXTRA_DMAKE_FLAGS="-- -P$MAX_JOBS"
-fi
-
-# Automake exports MAKEFLAGS= --unix or something
-# similar that gives child nmake's pain.
-unset MAKEFLAGS
-
-echo "Env:"
-set
-
-if test "z$BUILD_WIN32" != "z"; then
- mkdir -p $OOBUILDDIR/external/msvcp90
-fi
-
-
-if test "z$PIECE" != "z"; then
- echo "Build $PIECE"
- . $TOOLSDIR/bin/piece/build-$PIECE
-else
-
-echo 'Commencing main build'
-
-cd $OOBUILDDIR/instsetoo_native || exit 1;
-perl $SOLARENV/bin/build.pl --all $EXTRA_BUILD_FLAGS $EXTRA_DMAKE_FLAGS || exit 1;
-OOO_REBUILD_NEEDED="no"
-fi # !PIECE
-
- echo " __________ ____"
- echo "| \ \..|"
- echo "| \ \.|"
- echo "| \ \|"
- echo "| \ "
- echo "| | Congratulation ! You have successfully built"
- echo "| | ( ) ( )( ,)( ,) ( _) / \( _)( _)( )/ _)( _)"
- echo "| | )(__ )( ) ,\ ) \ ) _) ( () )) _) ) _) )(( (_ ) _)"
- echo "|______________| (____)(__)(___/(_)\_)(___) \__/(_) (_) (__)\__)(___)"
-
-exit 0;
diff --git a/bin/build-tools b/bin/build-tools
deleted file mode 100755
index 525cde592..000000000
--- a/bin/build-tools
+++ /dev/null
@@ -1,27 +0,0 @@
-#!/bin/sh
-
-#
-# See setup for user tweakables.
-#
-. ./setup
-
-if test "z$SYSTEM_GCC" != "z"; then
- echo "Not building gcc / binutils";
-else
- # build binutils
- if test "z$BINUTILS_VER" != "z"; then
- echo "Building $BINUTILS_VER";
- cd $BUILDDIR/$BINUTILS_VER
- ./configure --prefix=$BUILDDIR || exit 1;
- make && make install || exit 1;
- fi
-
- echo "Building $GCC_VER";
- cd $BUILDDIR/$GCC_VER
- ./configure --prefix=$BUILDDIR --enable-languages=c++ || exit 1;
- MAKE_FLAGS=
- if test "$MAX_JOBS" -gt 1; then
- MAKE_FLAGS="-j$MAX_JOBS"
- fi
- make $MAKE_FLAGS && make install || exit 1;
-fi
diff --git a/bin/catch-endif b/bin/catch-endif
deleted file mode 100755
index e9f8466f1..000000000
--- a/bin/catch-endif
+++ /dev/null
@@ -1,45 +0,0 @@
-#!/usr/bin/env perl
-
-my $test_only = 0;
-
-while ($file = shift @ARGV) {
- if ($file =~ /--test/) {
- $test_only = 1;
- next;
- }
-
- open (File, "$file") || die "Can't open $file\n";
- my $intermediate = $file.".oo.int";
- open (Tmp, ">$intermediate") || die "Can't open $intermediate\n";
- $hit = 0;
- $num = 0;
- my $line = '';
- while (<File>) {
- $num++;
- if (/^#endif\s+[^\/\s]/) {
- print STDERR "Error on line $num of $file\n";
- $hit = 1;
- if( $file =~ m/\..xx$/ ) { # C++ comment
- s/^#endif\s+/#endif \/\/ /;
- } else { # C comment
- s/^#endif\s+(.*)\s+/#endif \/* $1 *\/ /;
- }
- }
- $line = $_;
- print Tmp $line;
- }
- if (! ($line =~ m/\n$/) ) {
- print STDERR "No newline at EOF of $file\n";
- print Tmp "\n";
- $hit = 1;
- }
- close File;
- close Tmp;
- if ($hit && !$test_only) {
- print "$file\n";
- rename $file, "$file.old";
- rename $intermediate, $file;
- } else {
- unlink $intermediate;
- }
-}
diff --git a/bin/change-guids.sh b/bin/change-guids.sh
deleted file mode 100755
index 161bd675d..000000000
--- a/bin/change-guids.sh
+++ /dev/null
@@ -1,468 +0,0 @@
-#!/bin/sh
-perl -pi -e 's#00C1656C-6386-42ED-BE8C-5D6C553EF125#226856FB-734F-40FB-BDF3-7DCCA74BCA5E#g' "$@"
-perl -pi -e 's#01588DD5-C6C5-4C9D-A1BD-1A63E779DA1A#4F776850-4235-4E93-847F-C657E04A35AB#g' "$@"
-perl -pi -e 's#01ACB7DA-DB98-43CE-B1E2-66869F2F5F8F#27F0C5E9-4DDC-4D8E-9477-D25C99B98FE4#g' "$@"
-perl -pi -e 's#01CD6DF5-A3D9-4D76-B39D-02FEC0C439E5#375F00E6-C1ED-4131-A5C9-D21E34718781#g' "$@"
-perl -pi -e 's#02CE50BA-CAC8-4013-B555-0DFC0D6F1C08#2F78D54B-CE1D-4607-AA5B-D52114BDEBDB#g' "$@"
-perl -pi -e 's#02FDC794-9AE9-4D64-9312-FF2DE0EF3104#2501A052-8F1D-4A7F-B4DE-4CB851B1840A#g' "$@"
-perl -pi -e 's#031C2836-62D6-481F-B9D9-C0B3340280EC#DE13CC7E-1C6E-4CB9-864E-EA87162EFCB2#g' "$@"
-perl -pi -e 's#03293AD0-38E2-4399-BE6E-EC696138E39A#C31C87A1-5918-4178-8696-DDD36EDAEE99#g' "$@"
-perl -pi -e 's#03CBE5BA-E9C4-4CF3-8B36-7BEDB31F256A#1F70AB81-E96B-414C-AEE1-BD69BAF954CC#g' "$@"
-perl -pi -e 's#047EB419-DF2F-4D02-A3B5-E1B2FAFEDC93#764FA135-A8CE-4C79-A5D7-0B413CC9E911#g' "$@"
-perl -pi -e 's#0654471C-6784-4F57-8F39-655E680CFB28#C6A8BDB3-48E5-4302-85EA-DFA4E4E9C2BA#g' "$@"
-perl -pi -e 's#066FFDBA-F99A-47E4-9C25-33E61461C878#733B8C44-7982-4676-A7C5-950A3280C056#g' "$@"
-perl -pi -e 's#071D642E-D898-4224-9EB5-353FC79EB6B5#22635154-DAF3-4E3C-8280-07F208C1132A#g' "$@"
-perl -pi -e 's#0750A25E-7492-4681-8DFD-E04E89E45C0A#8BC12666-3F41-43C7-9530-05A5A69A269D#g' "$@"
-perl -pi -e 's#08B2CED3-CFB5-491F-ADC4-FD0CD2366C60#6800BE26-76E9-4280-A174-5A952A18E643#g' "$@"
-perl -pi -e 's#08E10841-B5EF-4BF9-9688-AE25573E6DA7#5793EE6E-74B2-4965-A790-0C33ACEF75F0#g' "$@"
-perl -pi -e 's#09F1B354-A2C1-434A-93ED-A8C0E480BE23#F313287C-222D-4917-8266-FA9E2EE4F204#g' "$@"
-perl -pi -e 's#0A773C83-9633-4EAD-BE4F-C015568870BD#956362F1-C89B-4008-B15F-968BD4023F3F#g' "$@"
-perl -pi -e 's#0AA645C9-1281-4C48-B8B8-BB424A8AF709#122A65B5-E37E-453A-924B-E52B808EDA31#g' "$@"
-perl -pi -e 's#0B4B3945-2CA8-467C-B635-53B3FCC9A30E#ED03F985-453B-4C05-9D4F-5A5DABC5E383#g' "$@"
-perl -pi -e 's#0B524C60-9D82-464A-9772-90427D6E3FAA#B5B62297-3658-4C79-A3E3-CD714250DE0C#g' "$@"
-perl -pi -e 's#0BC79FED-8EB6-4308-8150-D756BEC28383#2155A452-E25A-4552-BEC5-373BFDCCDC2E#g' "$@"
-perl -pi -e 's#0D1A91F3-4B2D-4893-A720-8795F1DA848B#715D5AAA-F09A-4494-B260-4BC0F6DA576A#g' "$@"
-perl -pi -e 's#0E64C806-9EB2-4651-BB94-B8D9814FD9E5#73FB2891-79DB-4F4E-A2B6-EB4A07D2903A#g' "$@"
-perl -pi -e 's#0F0EA6CC-22CF-4C8D-AFD0-43BD96789A20#77A4C487-FD10-461F-9F0D-C4DF9FA79C86#g' "$@"
-perl -pi -e 's#0FB73E2F-AB13-46C2-BA45-245A3F3A4750#E5ABA9DC-0B05-49C7-AE79-42621CF9273F#g' "$@"
-perl -pi -e 's#0FBEEB90-0C80-4C77-8CEE-E6347CC465FD#F4B8D208-4566-4C4D-AFA0-3D2729084555#g' "$@"
-perl -pi -e 's#10C228EF-576B-466C-B863-955EC6A9340C#F59C6731-12DA-4A46-BE6D-30934E7CD763#g' "$@"
-perl -pi -e 's#11771BE2-44D3-4ACD-882B-26C4947237C5#A3382AC1-D71B-4C13-BF1C-99C0C9C085A6#g' "$@"
-perl -pi -e 's#1203F697-9652-44B2-A806-3F13A53101AC#E319F528-E0C4-4769-A024-F0DDEAE45E17#g' "$@"
-perl -pi -e 's#127BFEA5-3D4E-40A0-89C8-0434C26FA845#C3E6BC6A-034D-490D-866E-AFCAE0F4595F#g' "$@"
-perl -pi -e 's#12D1FE4A-95E1-4BED-B377-2E8EA93B2461#D17D8C22-9161-42BF-A4A3-31248273EFB8#g' "$@"
-perl -pi -e 's#13484272-CE60-4867-B494-D56F9049B95A#363942BE-A51D-43CE-8F09-2D94DFFDBB2E#g' "$@"
-perl -pi -e 's#13DB9904-5A81-49FC-A43F-F5CB6A730770#7A20B473-85A8-4A57-AAFA-F74362FCDE2E#g' "$@"
-perl -pi -e 's#14D843EF-5A1D-4EE4-9BAB-EC0A7A6F96D0#85F565F6-4853-4AFD-8CF3-7A57201489C7#g' "$@"
-perl -pi -e 's#1527983C-2953-45B4-87A5-C7D67A1F6C60#BB0063B5-BFFE-4BD8-9FB8-A4778B79E2BC#g' "$@"
-perl -pi -e 's#1552EC17-8CE9-4E58-A009-991468B8E806#7A007643-4247-4749-8B75-5DF783A29538#g' "$@"
-perl -pi -e 's#157A7FB6-A116-455C-AF18-3A46D8273B8D#11F95BB5-D403-49A6-B704-C48C26783253#g' "$@"
-perl -pi -e 's#16E09A61-5408-42D4-980C-C54C86C8B06E#C61808D3-AE08-4EAA-9427-8B1E71B66973#g' "$@"
-perl -pi -e 's#16FE7611-2139-4D83-81DF-F6ED5A322F06#8226A004-F62B-45A0-85B0-042687103114#g' "$@"
-perl -pi -e 's#180CEB68-8F15-4533-B2C2-43D47DED7ABB#1F95BC4C-67F2-42DF-B4C7-A40EF727624A#g' "$@"
-perl -pi -e 's#1869854A-30BE-40EF-B72C-AF0CCA1A5A91#8984AB31-BED3-49EB-92DD-E163B17132B1#g' "$@"
-perl -pi -e 's#18DE4A08-BD3A-433A-91B1-85998EA033F5#0BADD0FD-13B3-4CAB-9ECD-860FBEB09E6C#g' "$@"
-perl -pi -e 's#18F19719-BDA2-4D6C-82C6-E5CE6A11933B#F7AD5F59-4824-466F-BF27-C3F2658DA7F6#g' "$@"
-perl -pi -e 's#19B49960-B58C-48A7-9761-F0B325E00C31#31812C7E-CD25-455E-BEDD-F7A5F1E3AA57#g' "$@"
-perl -pi -e 's#19CF8231-7F96-4758-B6B1-DD28EDE08DCE#38490E45-A1A0-47CC-971A-F0E4CD27E055#g' "$@"
-perl -pi -e 's#1A4DA798-78EB-46B8-A5CE-4E32F883314C#4A5C3C73-4392-4399-A57C-B0056289FD32#g' "$@"
-perl -pi -e 's#1AA6B4E8-D53F-4E3D-A21E-F7AD2BC2FE71#56339959-E94E-4697-8C22-357855F3DA2C#g' "$@"
-perl -pi -e 's#1B6D81BA-F565-4309-80EC-0E221A48E045#11173006-B5BD-41EF-B071-87DD6D7D639B#g' "$@"
-perl -pi -e 's#1BCF7835-9CD6-454E-9530-64222CCF29E7#BF5684DA-C18A-43CE-B21D-97A7A0FAA486#g' "$@"
-perl -pi -e 's#1C7DD608-C8AC-4B5B-81BE-E4ED3753541D#7FE69A49-6825-47E0-8179-72336E7B4250#g' "$@"
-perl -pi -e 's#1D068BF9-FBCF-4A3B-B9A4-4B2DD3DE8CFB#918E7DC4-5641-4FB6-8A98-F3C0CCDFCC2D#g' "$@"
-perl -pi -e 's#1DA15F35-FA2E-4480-932A-381BFD326A15#1FBD7128-5220-4F02-B94D-40AA38A63A16#g' "$@"
-perl -pi -e 's#1E36FA46-AC57-47FB-94FE-EF7B32B9056C#3EF650A1-DA4F-4881-88F9-B6A287CA59AE#g' "$@"
-perl -pi -e 's#1EFCF35F-6267-4AC7-A2DF-9F3C40462869#1C121AF5-5B2C-4A87-AB4B-2AA85A6DCAE7#g' "$@"
-perl -pi -e 's#1F9BC074-55AA-496A-9EC5-05E65CF64AE6#58BEC02C-44C7-43D8-BD07-35A866E20AE7#g' "$@"
-perl -pi -e 's#1FABFA47-C362-4AB1-9E26-C65B4511CF5A#601D42F5-C928-477A-8FF9-C13116BAF889#g' "$@"
-perl -pi -e 's#1FEF45BC-1315-46F4-B89B-6660226758DE#293095EC-889D-4426-AE8A-C2E96DE8C78D#g' "$@"
-perl -pi -e 's#203C86EB-4A21-4AE1-A555-E3FD5DF863B2#0B9E04F3-A1FA-4A82-B710-E7B4221AF77D#g' "$@"
-perl -pi -e 's#205ADC32-F775-476A-B68E-949F85D0B0E3#43E5D518-02EA-4C82-AAB5-98954C12109A#g' "$@"
-perl -pi -e 's#20A9E4BF-6D89-4183-BE25-24C5F693B141#36CF6E95-273C-49DC-BD40-DA0F59E34074#g' "$@"
-perl -pi -e 's#2216391F-BDA0-49C2-9D93-52F2E27293B8#2AE6A570-1393-4D9D-B044-D3067FC28769#g' "$@"
-perl -pi -e 's#233A2442-D417-458E-A67F-4C3E58B54387#86E940A4-BE79-4D7C-818F-AE0B190F8AF6#g' "$@"
-perl -pi -e 's#238EB622-30E1-40AE-8824-AF300293A335#740FB449-BE5F-42C5-8982-24FE4A165992#g' "$@"
-perl -pi -e 's#23BB9DD6-10F0-4B81-A967-824C482F86B9#3BEDD097-FF77-4E6A-A8DA-888A0911C0EB#g' "$@"
-perl -pi -e 's#23E1EF25-1A53-4246-842E-20E97103D4FB#4840BF58-E8E1-4625-9F65-D7A6B26438C7#g' "$@"
-perl -pi -e 's#23E3948A-5C9C-4247-800F-2483627D3DAD#11EDE09B-CE84-4042-B40D-09F3A9CF7484#g' "$@"
-perl -pi -e 's#25FF47C5-9FBB-4FBA-89FC-D0FB73336E2B#982DC803-C9C1-4D8E-9B9E-A42335A2092E#g' "$@"
-perl -pi -e 's#261CBB5C-E2E3-47B5-A116-AB404BD7ACFE#0050B5A3-CD83-4155-A72D-740BBF0D9E34#g' "$@"
-perl -pi -e 's#264BD610-28F9-42C0-AFBF-60870763B038#61B2E7EA-DC12-4A51-B152-2B95A4FC2ABF#g' "$@"
-perl -pi -e 's#2656B21A-734A-48FD-A0EA-33F23E8DB27A#7A31AF55-3974-46E2-B11D-FA6FDFE9576B#g' "$@"
-perl -pi -e 's#268B04B5-5B6B-4F06-94AF-F8DE80519B1B#B27DED2F-51D7-4C0E-A011-F243E53814A1#g' "$@"
-perl -pi -e 's#277B630B-64D9-4105-A542-6F279A31A72C#030AFC27-901D-4DA7-B651-E4F3F4AEE708#g' "$@"
-perl -pi -e 's#28F03675-F28C-460D-8C2E-800EA7A551A0#12A4616B-D51B-4013-964B-0ACCCA5E9AF2#g' "$@"
-perl -pi -e 's#28FD3AC9-0676-46E6-97E7-5E06B0A3CF0C#31CE78E7-B3BB-48DF-9543-B2DF21F420A7#g' "$@"
-perl -pi -e 's#28FDEA4F-1DAB-4EAC-986E-604B7BB6EF11#6785479C-863D-4533-9D28-46984FD067BE#g' "$@"
-perl -pi -e 's#293249BF-8AA5-4436-B496-594A52A4BD50#CEDA775B-2A1E-45EE-8163-5A7223022389#g' "$@"
-perl -pi -e 's#29FF0FAA-B831-4398-BE3F-FC13949D1CAE#961EFB5A-0CA0-41E8-8C9C-CB3BA52B73EC#g' "$@"
-perl -pi -e 's#2A1ED9F2-5C50-458A-A8EC-E1A8853FCDEE#1E2C32F9-FB99-4861-90D2-805C44CDE2E8#g' "$@"
-perl -pi -e 's#2AB403D8-19E9-46FD-A514-40FE49FFDD7C#944202C0-A80D-413E-A673-3E3CB5078FAB#g' "$@"
-perl -pi -e 's#2B3F3A48-231B-4AB6-B1CB-7FA173F704DB#CF291629-172B-496B-8E86-017EB7392CFC#g' "$@"
-perl -pi -e 's#2B4232ED-227A-4DC6-A748-E347DF4BAD9A#DBB9D6BC-F016-4771-A089-24CF853C8C78#g' "$@"
-perl -pi -e 's#2C5FB742-9A33-410D-AC5C-A376D4967D69#3FF9E717-28E4-49E5-96F7-B7EAC7C3890C#g' "$@"
-perl -pi -e 's#2C70F320-6771-42D6-B3F5-97678C822B20#4FFECDE8-8121-43DC-81F7-9FB7C82F24F4#g' "$@"
-perl -pi -e 's#2CA6C9C8-68AC-4CEE-A335-715EEEA5C95A#38ED5703-6901-4CED-BD61-0C09DD972C9A#g' "$@"
-perl -pi -e 's#2D244112-AEFF-439B-B891-1933B0652688#6AA0063F-CA6C-48D2-9B17-61C1B6EF169F#g' "$@"
-perl -pi -e 's#2D44F336-E62D-41A1-A88B-0A8023217B19#4E63B45A-AE8A-4189-B74D-DEF23F8DB2D8#g' "$@"
-perl -pi -e 's#2DFA49FA-A3B6-44CA-A2BB-01DF463186D7#6561C433-322A-4EED-B31C-D9CE0F70F182#g' "$@"
-perl -pi -e 's#2E0EFF99-3023-4734-9D56-4E7400898182#D6770BC4-9500-4BF9-8137-72AFD7F9366E#g' "$@"
-perl -pi -e 's#2E261973-B842-4C33-8FEE-1C2EC8D45451#DF853049-BFE4-4021-B072-4DA7128BC80B#g' "$@"
-perl -pi -e 's#2E6901B1-A29C-48D3-A1D4-531BA141DE90#71057736-397D-44E4-AE0E-39A15F4E3CDE#g' "$@"
-perl -pi -e 's#2F1F8A99-B3C4-4F22-BB08-5F0D217C0BE5#8D2FABA5-79CD-4109-A274-DEE14E279233#g' "$@"
-perl -pi -e 's#2F7D3F37-6191-44BB-8C14-5A0F3D1CDC06#4F6FEF14-1946-44BD-933D-833223069E41#g' "$@"
-perl -pi -e 's#2F8A7B29-EDC9-4681-B5B5-0DED9EA7E563#22BA4E19-764F-4D37-8FF0-254DFE566E7E#g' "$@"
-perl -pi -e 's#300FFDA6-0C64-406D-B09B-CB5C91ABEBBA#5014304D-99B3-4451-8EB3-97D34658E4E7#g' "$@"
-perl -pi -e 's#302002A3-1D25-41EA-8095-487B3E422695#79B7F92B-168B-4E9D-8944-4AF49DEB0632#g' "$@"
-perl -pi -e 's#302DAE3A-2CD6-46DF-A25B-ED0B59FBA771#0F961BE5-98E8-408E-9F74-23CBA0523922#g' "$@"
-perl -pi -e 's#3110031F-1B5D-4457-BB5C-F6A330F15419#7854115C-C451-4BCB-86AC-F2257E60373C#g' "$@"
-perl -pi -e 's#317CE10E-BB44-43CE-8E9D-B608C06A6997#BD481859-9803-4606-91D5-10ABEF4C4ADC#g' "$@"
-perl -pi -e 's#3195C77E-8462-45DA-BEFC-37F1299D889E#37A07926-2192-4701-908A-D414D26CB864#g' "$@"
-perl -pi -e 's#34A3166C-BF5E-4751-A52E-CA68C139F688#81B965A6-FBB2-429A-B6CB-D0BCDD557827#g' "$@"
-perl -pi -e 's#34FBD5D3-3F88-4C91-8B9B-A10AE833250F#026CD9C5-6B53-421B-89A9-A5933C3E4BA6#g' "$@"
-perl -pi -e 's#3526DA6A-7EA6-471C-973E-C0461DE85909#7D5B38B0-3E15-4F96-AFFE-F8E455CE666B#g' "$@"
-perl -pi -e 's#3581A940-F712-4E3C-8441-CEAACB8820EC#847CFC65-88E2-4F23-ABB8-2AAA468090EB#g' "$@"
-perl -pi -e 's#35BAFF73-3654-4E74-8E44-0F74842785DF#CF0171A7-E3C2-4120-AF79-90D426D61204#g' "$@"
-perl -pi -e 's#382A0F08-CF86-45C4-8119-67C87E275EFE#1B1246FD-B318-49D1-8B5D-9475F55DF033#g' "$@"
-perl -pi -e 's#384BCFE4-EDD0-4897-9924-82CB51E80381#C89923C9-9061-42D4-B531-60498D929C06#g' "$@"
-perl -pi -e 's#385A1970-0257-4C57-9383-DD2D668B23CE#5050975D-CA7F-4F15-A3F5-F0A80B37AC35#g' "$@"
-perl -pi -e 's#38616841-D0DB-4768-B9B8-27D61A91FB47#56F49C99-23C1-44C6-8EE3-2DCE72391BF8#g' "$@"
-perl -pi -e 's#387DC3B5-4CF9-48A2-8292-3BB725BF654A#92CA37C8-B2E6-403F-932C-1A33E48BCD7E#g' "$@"
-perl -pi -e 's#38B81FA6-8BF3-4B2C-B4E5-9F52E6BFB437#D008C4D9-64C7-4ABF-82B7-DF8D4917FFB4#g' "$@"
-perl -pi -e 's#38C78379-C01D-4558-B7D2-102679F13658#B53C819A-A084-42F1-A18E-D7C6DD1CCB59#g' "$@"
-perl -pi -e 's#3968BFB2-35F7-4BBC-9E3E-C640BF09587D#25F500C7-E3CA-413A-BB45-F298420533CB#g' "$@"
-perl -pi -e 's#39772F91-1D37-4866-BBA4-900AB53590B0#1E5DDBF3-B05A-4E78-A496-332C8193F4C8#g' "$@"
-perl -pi -e 's#39B343FE-EC1F-451E-930A-47D85E96DE67#ADDC53E1-2AFC-4234-A4A2-253C19932351#g' "$@"
-perl -pi -e 's#3A400267-1BAE-4912-9800-8C26FDD547DE#539E1B86-87BD-4E73-9B4D-5F3FC1ADA817#g' "$@"
-perl -pi -e 's#3A469632-5FB9-4F99-AC5D-744B5D17C6D8#4F24C267-C4D8-4A60-BEFB-415A4FC12C8B#g' "$@"
-perl -pi -e 's#3B4B8C3C-AC32-4644-B7C5-79836EBD6784#15CE20D2-2DBE-4987-B8F7-06AAE6FF69F7#g' "$@"
-perl -pi -e 's#3B67E9E0-7177-420E-AF69-B58A0B9D22D2#9D868015-3368-4C29-A138-8F3786263D94#g' "$@"
-perl -pi -e 's#3CBD068D-E6AE-4C02-8091-DD4F2866336A#2E614D32-5AA3-4525-B2F2-BE6F995F529D#g' "$@"
-perl -pi -e 's#3CE6CA2E-6ECF-45BC-B26A-348FA03A25D3#40487026-D109-40C8-AE09-A5E4147815DE#g' "$@"
-perl -pi -e 's#3D391D0A-66D3-4141-8554-65DECA138208#0E79A6B5-D088-4670-ACDF-E0081C9ABFAE#g' "$@"
-perl -pi -e 's#3D4B1DA5-3846-49B0-8BEC-05869EF3F587#300DE45C-94AA-4DA2-9E83-A450DA8FB9AA#g' "$@"
-perl -pi -e 's#3E77E957-4F8B-4E7F-A5A6-79DEDF424931#6DA595F9-4F00-4288-858B-A281C90D87D5#g' "$@"
-perl -pi -e 's#3E85B9F2-0AC3-4E57-8072-260A6FE05D02#07F4C39E-72F9-495A-A8DE-8B3691607EA7#g' "$@"
-perl -pi -e 's#3E8FFADF-A50E-4412-8613-5CFD55864FA3#FFEE4E5F-196A-46A9-A99C-11966D5ECC35#g' "$@"
-perl -pi -e 's#3EF31743-BD4C-4CFF-ADF4-DC997358B6DD#DA8AF4CC-BF82-4076-9B3A-AA93B49BAC6B#g' "$@"
-perl -pi -e 's#3F0ABBBF-ADA1-479F-B01A-E76FB8C0E46D#66401593-6930-4B87-8283-2AAA4B1FA978#g' "$@"
-perl -pi -e 's#3F22750A-31C1-4C86-BD57-4A928A7F657E#6AFA47B1-9285-40CF-A98C-80F9F339A9EE#g' "$@"
-perl -pi -e 's#3FB32B22-14E8-4736-8AC5-04EB336EAD10#9810C5F8-C061-4D02-AD98-049452A5DC76#g' "$@"
-perl -pi -e 's#40802F67-E670-434D-BB05-5AAEB0F445FC#A398A8FC-7E6B-45D0-9869-63D67B38AEEE#g' "$@"
-perl -pi -e 's#40ADD5AE-CF08-4686-B531-F66160DB6869#67D6E6A3-26F3-403C-AEB0-CB9F7E415C7F#g' "$@"
-perl -pi -e 's#43252725-3200-45AF-A99B-913F532BD9DF#A6C60A32-84E3-4AB9-93DC-CBFFF437DD9B#g' "$@"
-perl -pi -e 's#45B77400-62B3-4469-8B12-A0813B0FE13F#52181DBA-1AFC-4038-904B-F36077C5460B#g' "$@"
-perl -pi -e 's#462A874C-20D9-45B5-970A-B93C922CFCD5#BC988E86-1498-411C-978B-9EEA114602A2#g' "$@"
-perl -pi -e 's#46AB3EE6-FD98-460D-AE0B-6AE68ED6204D#AA132C49-A480-4B5D-AAD3-811362B60A42#g' "$@"
-perl -pi -e 's#47D18328-E629-4E6E-9E64-BE6C37C89098#1264E7B9-2286-4E43-8F95-E56DF0992E60#g' "$@"
-perl -pi -e 's#481B0FE0-8313-442E-8A53-7C5139ADA3DC#B26D2BEF-C380-46DD-8206-DD014E51AE7E#g' "$@"
-perl -pi -e 's#481E648E-D8AD-478A-A3F6-68FC563FFD8F#56F769C8-29D9-4DA0-AAC5-6165D0180483#g' "$@"
-perl -pi -e 's#484F1EEA-7DFB-4A4F-9555-9A28488A9007#BAD1536A-5351-4D37-A4A4-DD486537474F#g' "$@"
-perl -pi -e 's#4932E078-0B9C-492F-A83B-BE0EA8988D52#B5AD6AF9-16C3-4F57-8F63-DF341762CC5B#g' "$@"
-perl -pi -e 's#49A004A2-3B8B-4029-B403-89919CB52355#8C45020E-085A-44F7-8E13-7B4A94C6925A#g' "$@"
-perl -pi -e 's#4A4912C9-16C1-487A-B0DC-2343E02EBEC0#C54B62B7-32CA-4A12-90AD-425F68965DF2#g' "$@"
-perl -pi -e 's#4A5DB1AD-D163-4E6E-BE42-6F2660B6F42D#C6C1A6A0-0112-4A6C-9EF1-25672676D909#g' "$@"
-perl -pi -e 's#4AE98AD0-7D79-48E6-A819-75F7323261D5#4AAAD846-9721-496E-A2BF-2E761A00A163#g' "$@"
-perl -pi -e 's#4B514E15-F8EB-457D-924E-0C4EA6C29FE5#EAD5DDEA-F9EA-4EAC-8EA4-CDCC9081A209#g' "$@"
-perl -pi -e 's#4B87A651-94B3-48DF-800C-9458C3A93179#50031E0A-6AA2-43BB-89A7-2341F75B5C91#g' "$@"
-perl -pi -e 's#4B87AD52-61B3-4065-9D09-E01F3F56E1A1#AFA20894-DAFA-4733-B0E4-16019BFE7D9D#g' "$@"
-perl -pi -e 's#4BF466E8-7FF4-4422-9FCE-1F0AE3DA00EE#6AD4F357-4F3A-480E-B834-4AA44FCAAB9D#g' "$@"
-perl -pi -e 's#4CD3F4B6-2B96-436F-883A-5BAF96E48AD8#C640D2B5-97E7-45C9-9FC6-7866BB6C8F5E#g' "$@"
-perl -pi -e 's#4D1449E7-D10B-44D8-AAB0-257D6930B0C2#C552DC5D-BAEE-4707-B090-5BC08E01162F#g' "$@"
-perl -pi -e 's#4D9B6646-80B7-48D1-BB19-684152EB3DA3#826EEF83-3FC8-4245-9A8E-0F7CC9F47497#g' "$@"
-perl -pi -e 's#4DA39F5E-CA6B-4CD4-8D37-01929510527A#71256996-7D5B-4460-AD23-66F68A99155B#g' "$@"
-perl -pi -e 's#4E1C4936-9C8A-4B5E-93EC-408FBEFFE00A#3EC2568D-8AEE-4968-8B3D-075E360D3A53#g' "$@"
-perl -pi -e 's#4E7025EF-909F-4609-97BA-304684C4833D#6221C926-E7C7-4122-A3E4-66285ACD0B79#g' "$@"
-perl -pi -e 's#4E8F4857-EB42-4D96-94AD-0058C817B5F2#E6953E1E-674F-4890-A99B-CBCE09445C13#g' "$@"
-perl -pi -e 's#4ED85F09-A3C9-4C63-A34B-A2AF16098825#08E36D6D-6FED-45F0-A87E-D742F9161250#g' "$@"
-perl -pi -e 's#4F4B8EE7-390E-429C-A13A-BC2403BD9529#062A1AD1-05AD-4D46-872B-B0ADA08DC3FC#g' "$@"
-perl -pi -e 's#4FF51E79-A1C5-4D60-AE8D-A880483F4182#DE14FDEF-E87A-4335-94B7-09D8FC30FE56#g' "$@"
-perl -pi -e 's#50087605-A504-441B-B514-67F13A284C10#E9FFB21F-C41B-4A4C-87D1-885D97FE4D4A#g' "$@"
-perl -pi -e 's#5032D03F-55BB-4CF5-ABDA-6AAB4002D12C#89196DBD-4B4D-45EA-89D5-5C57C69E5B16#g' "$@"
-perl -pi -e 's#5142883F-6084-4723-907B-EFD4A5097817#BA8033C9-6C42-4D4D-8160-46B6A19C08F9#g' "$@"
-perl -pi -e 's#52CFD54B-A5AE-4296-A02C-806083B90FFF#AB03BA22-FFF9-4C22-A534-FD2E5BC56DB7#g' "$@"
-perl -pi -e 's#52DC7E0E-C7AF-4EAA-A0EE-E5CD30190A2D#BAF99FD3-A5C8-4628-B836-69F0056BC6D6#g' "$@"
-perl -pi -e 's#52E22B79-3C25-4BFF-BC5B-FC37EE5020DC#B7573A6E-083B-4E1D-8AFF-7003B5401971#g' "$@"
-perl -pi -e 's#5349E7C9-2B55-46FB-AB26-25E019E9A86C#BD608D70-94EE-4793-A942-D4978B56B735#g' "$@"
-perl -pi -e 's#568D4A30-A1EC-4060-A8EE-41277EF3985C#18590BB8-8598-4781-BEDE-4BA660D7DCA3#g' "$@"
-perl -pi -e 's#56F23458-B7E6-4D97-89F2-A5A56E605A6C#0B4F6199-8EEE-45B0-B9B9-A80ECBEC5B8D#g' "$@"
-perl -pi -e 's#57836D3D-879A-4288-8C47-691ECC4EECBE#1D09B009-8003-44FB-B2E1-49387F1112B9#g' "$@"
-perl -pi -e 's#5889B8C4-0870-4319-B920-56F1D8AF5EBA#C99FCCE7-9478-4EE0-9B5E-AC46BA2874D7#g' "$@"
-perl -pi -e 's#5981EE94-6763-40AD-A8A0-EEB01CCA153C#63C087E6-5AC1-4C87-B663-6CEC4BD57935#g' "$@"
-perl -pi -e 's#5A5BF785-994C-44D0-A2D2-7C757C1E87B2#97F3F678-0371-457E-893C-EFD7447A7985#g' "$@"
-perl -pi -e 's#5B0E4353-AB4E-497E-B38F-2E784C7B1F35#9F41FCB5-A63D-49C1-A69C-9CBBCE73518D#g' "$@"
-perl -pi -e 's#5B45DC72-0755-4EC2-BB12-65A1FEADDDE7#47574572-9963-441B-8A42-0B1A61880608#g' "$@"
-perl -pi -e 's#5B52E76A-647C-4F18-BC52-6293FDE98C5D#0C596167-BA94-4D5A-B63B-6566FE06259A#g' "$@"
-perl -pi -e 's#5B8ADBF2-5377-44C7-8ADA-3A3AF516E401#E1A03A0D-3DF4-4316-A748-EEA886A30F7D#g' "$@"
-perl -pi -e 's#5D1302FC-B4AE-4BEC-8E4F-EFAC1C8C0D57#2288EAF3-AC96-4EE5-BEFB-1384DC1C4703#g' "$@"
-perl -pi -e 's#5D76E7D2-4C3F-4BF7-9574-1E01F9E68023#9504A85C-5E31-420E-88BD-12C50B9528C7#g' "$@"
-perl -pi -e 's#5D961D6C-B058-44FE-9CEA-49BF7B6C1587#97D63D2B-878E-4A1F-907B-E5A71B225FB5#g' "$@"
-perl -pi -e 's#5DBCCED3-48F9-4105-B5EF-1B3F46896762#F8FB0DCF-96C0-4C03-B748-951C679FD7C6#g' "$@"
-perl -pi -e 's#5DD514A5-CDD1-4F40-BED3-7D9653C1053B#2C5F5940-CAA5-4637-B07E-7CBDD812E04F#g' "$@"
-perl -pi -e 's#5E751B0D-FE96-4D0E-BAC1-5E7501294940#234E7342-71BD-4B78-946D-06531FD46FA7#g' "$@"
-perl -pi -e 's#5EB48BA3-4CFF-48CC-9301-DF5A99E901C8#7002CA01-C610-411C-BD18-FE1E670C1F52#g' "$@"
-perl -pi -e 's#5F926003-A07A-4812-A6AD-6C466C053F53#467D9E65-2614-4D19-B40F-0B12FE6E472F#g' "$@"
-perl -pi -e 's#5F9461CD-2937-4EE0-AF03-24E72987244E#C8D912D8-AA2E-47E2-9FEF-62C6580FB93C#g' "$@"
-perl -pi -e 's#5FE3A584-4297-4199-BF98-2BD36628B6C7#89623507-9F44-44CF-BD4C-042F9E26F903#g' "$@"
-perl -pi -e 's#60E1242E-CF22-4ABF-8ED4-F15BFAE31C72#15FAC7D1-931C-462C-98ED-9151E5BA4E87#g' "$@"
-perl -pi -e 's#614C96E4-DBB0-4EFF-A9BB-2744E75919EB#B9D5AE43-48F3-4AD7-B0C3-2F0F08E80C5C#g' "$@"
-perl -pi -e 's#615E2162-18AB-4070-A82F-E5B3E8AB81C0#D9B02C11-188E-4471-8CD7-FB457264A752#g' "$@"
-perl -pi -e 's#625F01BA-DEB1-4AE8-AF7B-CB0E11456EA2#3E02284B-AD3B-4716-8908-0D008D795378#g' "$@"
-perl -pi -e 's#626BAA92-954C-4A6C-9AF3-F9C146FAEEAB#8BAA0D7B-3D23-4559-93CF-D702106AD2A2#g' "$@"
-perl -pi -e 's#62E2F6A6-4521-4E36-A751-46E91A58CFB5#8B39DB32-6AF6-461F-8289-50655601C8FE#g' "$@"
-perl -pi -e 's#630A8423-A99F-45E6-9439-668EC12743C6#A069E3EB-E23C-43A7-8D95-80C92971C981#g' "$@"
-perl -pi -e 's#6321486D-351A-451F-9C23-FF334A4E41FA#DD1EC06C-317D-4DEB-9E28-53C6CA3146CB#g' "$@"
-perl -pi -e 's#63512266-4861-42F3-9269-3FBCFB9F60B2#7D03B08A-2F82-4F22-8F2A-C045DFC13566#g' "$@"
-perl -pi -e 's#638AEB7C-B611-4DBE-AD2C-A91BB7D13B75#DDDD2117-DF91-4F9F-B403-9010E9B1A45E#g' "$@"
-perl -pi -e 's#6718E3FB-C936-4699-82C0-E59F45E30D99#B5079E64-25F1-4188-AA8E-6191CFA99CD1#g' "$@"
-perl -pi -e 's#6732C0EC-1DC7-428C-BC7B-E1B752AD23E5#91B68ACD-6E12-4D96-822F-4715B87535CE#g' "$@"
-perl -pi -e 's#67C8D525-33FF-4464-BC47-B55E5724B9B2#E5694849-306C-4771-8101-521F445918AD#g' "$@"
-perl -pi -e 's#6824A943-F895-4F67-812C-0062BEF03E38#6D364115-17E5-4128-A46A-C48FFCDA7CAB#g' "$@"
-perl -pi -e 's#685C3085-DCD2-40F6-85C9-5084402D7E0F#DD26B7C9-F027-4AB3-8F98-E8EB4FBA9C89#g' "$@"
-perl -pi -e 's#69BB5E59-48BC-4906-B700-7E76EAEB076C#FCB3559B-62E7-4CF5-8900-AEDF279A3728#g' "$@"
-perl -pi -e 's#69F5D053-0E5D-4260-A787-DBB78397D51D#71C0B453-FF1A-4C57-A52D-1322088D3D3F#g' "$@"
-perl -pi -e 's#6BF5ADF5-F75C-4E0A-8E16-9829D3B194FD#27B24D10-F706-4ABD-B1DA-08C569637759#g' "$@"
-perl -pi -e 's#6C238E70-72FA-45FA-BC58-C508CD3AF6FC#4175B9EA-56CA-4311-9F96-937EABCEA1C8#g' "$@"
-perl -pi -e 's#6CFE884D-10C2-42F7-A60F-52A8284E8D45#49438D51-91A0-4E2C-8415-E48CF3C82DB4#g' "$@"
-perl -pi -e 's#6D3E7570-BA59-492D-A83D-9C3D7B457EE6#49D204A8-21E6-48C6-9B29-B25AB3D6A137#g' "$@"
-perl -pi -e 's#6D6315EA-4B6F-4A62-AE4B-7197F6C00106#961A44B0-E927-4823-9BC6-C1E1B665DCC9#g' "$@"
-perl -pi -e 's#6D72F5E0-D1D1-4BF4-8357-E5DC50CC1067#FB347C9D-7A6E-494B-8FB0-DC6041A927EA#g' "$@"
-perl -pi -e 's#6D8FE0C8-000D-4486-9814-135AE44A44BD#0DECDE88-DD1C-4E9F-8348-19C5BC6FDF26#g' "$@"
-perl -pi -e 's#6DD492EA-45CD-483E-8563-23355FD37E28#13A1CCE8-A020-4F7D-84DE-2FC89E4F0086#g' "$@"
-perl -pi -e 's#6E4658DD-79B0-4707-93C2-B022D66157A1#D5D79970-08FA-4CCC-B465-76948149E141#g' "$@"
-perl -pi -e 's#6E732530-FCDD-484C-A575-1289731020FD#95CDD114-AB99-47E9-A6CA-C50C7460B1DA#g' "$@"
-perl -pi -e 's#6FA34430-ACE2-4352-A4F6-B196673EE865#F8496E9B-FA31-48A1-BBF6-549BAEB34815#g' "$@"
-perl -pi -e 's#7035464F-45B4-41E3-A923-AC2EF4604C3A#83A626D6-E9EC-4F93-8D8C-0C0FF909924F#g' "$@"
-perl -pi -e 's#704FAB24-F8F9-4245-8BA7-1931B3176951#E16377E5-4C78-4016-A355-8CE512DA85B4#g' "$@"
-perl -pi -e 's#714181AC-7DE5-49D3-AF09-292EB90DB750#180F666D-555D-4F90-A53E-DD861CD9D6F0#g' "$@"
-perl -pi -e 's#71643076-66F2-47E8-9826-0D378956568E#96E6D257-71C9-4E36-8BCF-01F3E60F42F1#g' "$@"
-perl -pi -e 's#71A9C878-FEDC-4ACE-8E69-C5644A204558#F3843E12-0254-43D1-B8A1-3BB00F939BE6#g' "$@"
-perl -pi -e 's#72B0A3D4-B9E4-454C-9A45-52D2DC433A58#942C29ED-A013-436E-861C-28271FD60AAE#g' "$@"
-perl -pi -e 's#73531239-44BD-4EE8-81B1-7EA9A63E2BE1#F7EAF22A-4B7C-4183-93B5-D79DC7A64A76#g' "$@"
-perl -pi -e 's#7364F154-089D-400B-B275-751AAB9A6122#8EE0FF44-307B-4FDC-85F1-EA178C788FA8#g' "$@"
-perl -pi -e 's#739D1838-3118-4941-9BDF-68E02251C957#BCAB02B1-D876-4307-A2D6-6A730F423B45#g' "$@"
-perl -pi -e 's#73B96B61-E98E-4BB1-8E16-BABEBF6D58FB#E1D22BDB-C23D-4849-88AF-9B05F6B396E1#g' "$@"
-perl -pi -e 's#73BF81AD-7414-4BF4-B12B-D16FDD7DF854#A2FA035D-6AD1-4A42-8C94-514862DDF9F1#g' "$@"
-perl -pi -e 's#73D7DA04-45CD-494E-9B78-1EFDFDBF5600#A1D39E12-FF85-4619-BC69-001C55AD5B35#g' "$@"
-perl -pi -e 's#74543111-6ABF-4A12-AC11-D315E2939D2A#07E6F67B-2A5B-45D7-984D-773F76612B9C#g' "$@"
-perl -pi -e 's#7469E5D7-5F44-47A2-BCCA-C7B0AB88A333#5E960786-F4F3-4744-B402-FF7C01432987#g' "$@"
-perl -pi -e 's#74E55CA0-5F61-4B3F-AFE9-C450C352177F#E42C47FE-0F48-441A-8B8B-8959A0CE9CC0#g' "$@"
-perl -pi -e 's#74F683CC-8356-4BF7-9098-B85AAB7C1A0C#D5E8AF54-134D-4370-BEF6-62BD3049C516#g' "$@"
-perl -pi -e 's#75BCC928-CCCE-483C-9B76-C4B30E4B7397#4BFE4253-A149-49D1-ACFB-45B780DBE45B#g' "$@"
-perl -pi -e 's#763F5343-47FC-4909-8C18-D9E0CEED533A#F82875DF-7BFC-4CF1-A7E7-85E5AAD90F07#g' "$@"
-perl -pi -e 's#764043CA-CF26-4862-80BF-29E3AAD9E144#7D881D43-F1AA-49CE-83E2-420990672098#g' "$@"
-perl -pi -e 's#7664A926-AA39-4DB6-AE12-98B7305B708B#31B5416E-43F8-4012-8DDB-75D3B330497A#g' "$@"
-perl -pi -e 's#76854B27-9B89-4C7D-AB32-69E1F60CDA6F#FF72555B-A96D-49F8-8998-9359E9F9F7E1#g' "$@"
-perl -pi -e 's#76ABABE4-11C3-4783-BA87-6E76EC58F4DB#8D561925-4975-4FAA-B491-87B6D0564327#g' "$@"
-perl -pi -e 's#76B88102-58E7-4F89-90B9-B21185AA45E5#134EB795-F619-4BA0-A247-E694E8288D2A#g' "$@"
-perl -pi -e 's#76D5C227-D097-4B4E-B4C7-B2370B4F7932#5A98005B-D78A-41D4-9B2B-FFE739FF6D1A#g' "$@"
-perl -pi -e 's#77851653-0428-4A45-AE05-076D7D58E9B0#45188701-42C2-4217-BCC2-5FC4DE740FA1#g' "$@"
-perl -pi -e 's#77AC885A-8A45-439A-9D10-523A5A53A43B#08B1A917-7FA7-474A-952A-AC0CE00ABDDB#g' "$@"
-perl -pi -e 's#780E6282-6AA2-4CB3-A741-929B89B4DBE0#CA076406-011C-4B94-9667-BEA2020ABFC8#g' "$@"
-perl -pi -e 's#78D519F9-4EBF-40E5-895D-BCE019F448BE#64F8DC46-2087-41B4-B82D-39D55BBC8A48#g' "$@"
-perl -pi -e 's#797671B4-BC60-4171-9084-D97F87B15B99#E6779100-9F9A-4231-BBEC-19F4B1ED7C5F#g' "$@"
-perl -pi -e 's#79ADE5C5-96C5-4CAB-B8A3-1024E6A639E7#A9688928-501B-4435-94A2-3A046C1EF29D#g' "$@"
-perl -pi -e 's#7AA8672E-9811-4ECD-828D-EB694373C72C#0C9FF6FF-3539-4C64-8068-E15FA08BA03D#g' "$@"
-perl -pi -e 's#7B38DA39-FE10-43EC-A9B4-34470BD44252#E683C485-4034-4B51-8E4F-D389D1C87B5E#g' "$@"
-perl -pi -e 's#7B47880A-4C73-4052-B41E-9CFDDC09D9B2#E0FD5063-CD0E-4448-951A-25466038CC5B#g' "$@"
-perl -pi -e 's#7C35B9AB-2CE3-4C18-BE7C-5B97EA089EB3#4B17E523-5D91-4E69-BD96-7FD81CFA81BB#g' "$@"
-perl -pi -e 's#7C4C496A-2087-4AE3-8620-84FFD736F2BF#781C904C-8768-4271-AA69-1A2CFA248002#g' "$@"
-perl -pi -e 's#7C8A4BC9-0288-4CD2-B721-C43A283F5E52#AA212944-F6D4-4892-9FCF-F5AB00F48637#g' "$@"
-perl -pi -e 's#7CA64ED6-373D-4C92-8224-360F1451E828#C847C0DE-755E-45CA-98FF-F741F3EDB0CB#g' "$@"
-perl -pi -e 's#7CA9656A-7DB4-48EB-8E18-456256C05E92#2A7F7A26-EF8B-4990-8836-688508886215#g' "$@"
-perl -pi -e 's#7D14314D-DAF7-4A97-A6EE-97C475080A73#E25D3CC2-AC90-4861-9E9C-DA41B5DF8A03#g' "$@"
-perl -pi -e 's#7ED4CD40-F9C3-4787-AFBD-FB42F1F9223D#3355D11E-5906-4226-81FE-1413EA06E88B#g' "$@"
-perl -pi -e 's#7FFF1607-39C1-45D7-8D6A-DE97A3FE26A1#AAE902C3-8ED1-4E4E-8113-140C755C7BCE#g' "$@"
-perl -pi -e 's#80DD06D1-ABEE-4E85-8C23-E1031FC024B0#6EA8AEF0-EE62-4FF4-981F-D37CBDA4C5A7#g' "$@"
-perl -pi -e 's#80F13A63-A0C3-4C6D-A0B3-28F106730674#DD0CE074-E875-4428-991B-A9B3734701F9#g' "$@"
-perl -pi -e 's#81563C9F-5D94-473D-9C81-93B9EAF652A6#6DBB85B9-99A1-45A4-99F2-E174D4825F14#g' "$@"
-perl -pi -e 's#827077D9-7903-4732-9F11-3EEC4A4D21BE#EFD2F52B-6C0E-4F84-9E95-79C5F69DF479#g' "$@"
-perl -pi -e 's#829FD4BC-81FA-40F0-8F7D-DB6F1308E824#53B387F3-FC9F-4143-B309-B18E4498AC14#g' "$@"
-perl -pi -e 's#82ACA684-1BD8-4A22-AC70-5E9754047357#9E22FE7B-CA3C-4C10-9B30-80D9E4466495#g' "$@"
-perl -pi -e 's#8374FA3E-5454-4060-AD5C-4D9978850194#DBE8EF0B-1BDE-4AFA-B6D1-EB6D39934388#g' "$@"
-perl -pi -e 's#83C0A05D-611C-4EEC-A80B-030F9F93C693#4ACCB25D-FF84-473F-8AE5-472777DE15AD#g' "$@"
-perl -pi -e 's#83D4730E-7069-4ED8-8D2C-50E2878A84CF#47B240FD-87EA-4D4C-80FE-6DECA1D24162#g' "$@"
-perl -pi -e 's#8404A6EF-7E40-4B4C-86BC-450C51CFC8DA#5CF9A133-6C59-486F-BB16-BB7AB12826DA#g' "$@"
-perl -pi -e 's#842F16E3-E4FA-4B23-AFA0-4DF9DE8CA4F8#A62880EF-F164-4037-BB2A-76FBE37D3BEB#g' "$@"
-perl -pi -e 's#84FECCC3-88B0-479C-BE32-3E61ACA4493E#48A6F206-C8E5-4E40-8A2B-EED45A576821#g' "$@"
-perl -pi -e 's#85451AD1-A9A5-4F38-A14F-EF1C85B23C48#DBF68601-B962-4B0F-8791-E91225E6AF4E#g' "$@"
-perl -pi -e 's#85E15A19-22E3-4C64-828C-6E31B3F3FE56#D1C2E067-824A-4425-92EC-7242B4E28DE4#g' "$@"
-perl -pi -e 's#878755C3-6293-47EB-AACB-0E00B861FB37#3954C0B9-5E4A-48AC-BA53-5DBD14A478CE#g' "$@"
-perl -pi -e 's#881B24EF-5676-481C-B1C4-49964E203329#3D781C38-0896-4440-B2E3-EC3599D9B7C5#g' "$@"
-perl -pi -e 's#88BFF9E7-993E-4FB1-8C1F-8D7BB4DF9AB3#F44C6B10-CBEB-402B-A634-A809AE4E4DED#g' "$@"
-perl -pi -e 's#8916B89F-7763-4112-9820-B58BFF350D9B#3EA81EA3-0AD1-4833-8E27-C42455BA82DD#g' "$@"
-perl -pi -e 's#8A51A028-CFBE-4B1F-8C20-94E4A6D53675#148ACBA8-D6DA-4127-AB23-1787B0178278#g' "$@"
-perl -pi -e 's#8AFD7661-482B-48DC-A103-5542A0D54B94#BF2D424E-87FE-4E79-B6BD-69ABB39925FF#g' "$@"
-perl -pi -e 's#8B909084-2220-49AC-A421-142180519088#40C2DB8E-E9D6-4451-BE31-DAD1343EC3DF#g' "$@"
-perl -pi -e 's#8D0A9A70-F424-4FE2-AC27-50F8A409C602#83242173-2444-4F23-AB6F-6E8C2F4A8979#g' "$@"
-perl -pi -e 's#8D676E42-E6CE-42F5-80CF-345A0D35614E#214393CC-3F6C-4E37-91A2-B5D9C22A9067#g' "$@"
-perl -pi -e 's#8D6EE539-9E74-41C0-8DED-807F91D68467#AC220121-97DE-4D37-9554-8877DAF713E6#g' "$@"
-perl -pi -e 's#8DA3BE37-E70B-41BD-BBBC-C2169FBE01F4#930AEAB7-E847-474A-84FE-FE5C2BCD9CE3#g' "$@"
-perl -pi -e 's#8DDBA77B-86FB-4AFC-A9CE-7960585108E3#6A303796-7C5B-4081-AD88-1934A7C9C5E3#g' "$@"
-perl -pi -e 's#8E722E7F-6100-41F6-BCF2-0E8AF1405BF0#5C7E8CBE-6D6D-4DAD-B8DC-D10F85A2B36B#g' "$@"
-perl -pi -e 's#8EEC9A45-3BD3-431F-81D0-394F41185BD9#63DF72A2-1F4E-456E-A4E1-26342034BC19#g' "$@"
-perl -pi -e 's#8FDD7F7B-104C-4CB6-B7BD-F67FCF42F3CD#0EF3EE40-3690-472E-A693-5C9AE4FA7D5A#g' "$@"
-perl -pi -e 's#9001BF0C-2EFA-4682-9F4B-0AA2795FB18A#B020E61C-4577-4092-AED9-7E29A3CBA2CA#g' "$@"
-perl -pi -e 's#905F971E-91FC-4F85-90D6-F6217A4FF01A#397132BF-8878-4AD0-8943-1894B41692A1#g' "$@"
-perl -pi -e 's#90672FAA-14CA-4F63-8FE9-40E572A3969A#86433EBB-25D9-41E0-8F56-3DCB9534E027#g' "$@"
-perl -pi -e 's#91F443E7-DE34-46FC-B02E-F5628399603C#2246FA05-4B7A-499B-A19E-AA6599EA28D0#g' "$@"
-perl -pi -e 's#92496D40-FD43-4710-AFB0-80E481F9E0C4#9695B172-6236-4057-BE81-3B2002B2EABA#g' "$@"
-perl -pi -e 's#924D7527-25C6-4B95-95C7-7B68DABEC93E#65BA1A0C-9D3F-49A2-8ABB-7437C50A0AC7#g' "$@"
-perl -pi -e 's#953E09D5-4C1E-483A-A792-DD6C9A51D4E9#5414CD70-EEC6-4D50-BEA7-88027D7CA329#g' "$@"
-perl -pi -e 's#95AD70B2-CE4A-4362-975E-6C48DC70179C#20CA0658-5768-4B9E-8E3B-D9DF58332739#g' "$@"
-perl -pi -e 's#96D7CBE6-DCD5-4647-A0CE-ABB1ECAD1840#985D8E18-263F-42C8-A913-FD2556EC7161#g' "$@"
-perl -pi -e 's#97F43DDB-E046-4AC5-81BB-F64707098EE3#9D6DB52A-986C-4CE9-8494-3156AC40405E#g' "$@"
-perl -pi -e 's#989EA225-A7B8-4627-AEC6-854002636D1F#04D01DA9-024B-4C37-A69C-F7BA76A95702#g' "$@"
-perl -pi -e 's#98DE3411-B458-4E3C-B9D8-CA2D5667C22A#E902D293-C8BF-4337-8B09-C817F96B372C#g' "$@"
-perl -pi -e 's#98E66873-0C3E-4399-8676-A9D720071672#115A4904-16B0-487F-846E-A27588FC28A2#g' "$@"
-perl -pi -e 's#99F72778-F5E3-47D4-B867-68145F8052A3#19F11268-068A-488F-B139-848ACA080B94#g' "$@"
-perl -pi -e 's#99F82176-F642-42A7-B0DF-052E7096A4F8#D2654ED2-5D65-4281-A118-09A3F25914B0#g' "$@"
-perl -pi -e 's#9A8FA426-4DCC-458A-86CB-504CEB63204E#F09E48BA-14EE-44B2-9B28-2603A18CE1D6#g' "$@"
-perl -pi -e 's#9A9E806A-6A62-46D0-AF45-949611A37BEC#00393DC6-C022-4013-85D6-7421431A1BBC#g' "$@"
-perl -pi -e 's#9AB7EEDB-11F5-4088-B581-FE711EDE6DCB#E44612CD-1AC0-4D8B-9DBE-EFEAF5A2BC31#g' "$@"
-perl -pi -e 's#9B664C3A-CD48-4717-A40D-3B02475D7E20#67A6C9E2-C285-48E4-A620-40D905E8D3F8#g' "$@"
-perl -pi -e 's#9B8D68D5-6903-4C22-AC8F-CA0D2D124FD8#97BD11C0-13A0-416D-AB9A-9128DBEF707A#g' "$@"
-perl -pi -e 's#9BA2E02E-ACFA-4D4A-8F34-12E3F2BA29E5#201C48A0-5509-4306-A0BB-19368FAD93F3#g' "$@"
-perl -pi -e 's#9C92258E-9A41-4A03-AA5C-FD8EF15189BA#EF16C1BF-539B-4EEE-91E5-0D44C4B72840#g' "$@"
-perl -pi -e 's#9D3F52D5-2464-4F4D-9F2A-281B85911F7B#214078E0-9CD1-45D0-B596-2CBBE14CA86F#g' "$@"
-perl -pi -e 's#9EB27D9C-891F-4B40-A1FA-176D72088860#7914FCF0-7354-4A9C-A9AF-FA1DE4397BEA#g' "$@"
-perl -pi -e 's#A1662F7D-BF01-4F7C-83FB-BAB1B9B5D081#2F192244-3116-42E7-A382-656026BC47F4#g' "$@"
-perl -pi -e 's#A256340D-4065-4B6F-AC1D-EB0D5127EBCE#1955D5A5-375E-410F-B84F-9FDB4E6A05E5#g' "$@"
-perl -pi -e 's#A2BDCED1-8469-4570-A86A-68ED4F2E27A3#E6D97841-28E4-4BA8-A52F-A3BAFE3E250E#g' "$@"
-perl -pi -e 's#A316E11D-7907-460E-8E64-DEEFCD0F929A#FB26F31A-11A3-42C0-9ED5-BF72E8BBF620#g' "$@"
-perl -pi -e 's#A3BE065F-8E3B-46CA-946A-101A8E077D2B#C897C102-FE0D-4514-A3AF-F58D8EA3F16F#g' "$@"
-perl -pi -e 's#A4107B22-C84B-4783-B1AA-454A884144B7#B09BF848-23EA-4EE2-8FE3-DC788F4EAD53#g' "$@"
-perl -pi -e 's#A485C6CB-34A7-4969-A0AC-78241D417C95#96392323-8828-42E5-86F4-3CBD2036A609#g' "$@"
-perl -pi -e 's#A593FBBD-D539-476F-ADAB-7F54059D129F#5A742EFB-07B8-42AB-A451-60BBE2461102#g' "$@"
-perl -pi -e 's#A5FD8B52-2E09-45C5-A84B-67DA1DA8BD93#C5DCAE3D-065E-4111-8E19-D8FD6AB14858#g' "$@"
-perl -pi -e 's#A657505A-2A57-453B-8B47-F8D605A8AFE0#9EC0DEF1-601C-4775-9884-B22C347824E1#g' "$@"
-perl -pi -e 's#A73CB85E-760B-4E5D-AB26-7DF091C25F16#B343946D-65A4-418E-B594-AC7035FDA5C2#g' "$@"
-perl -pi -e 's#A833B97F-8FA7-497A-879D-28F09A4496B2#DEE49DC3-8651-4336-BFAF-EDB82F1F8096#g' "$@"
-perl -pi -e 's#A890155F-9333-4869-8816-D32720841A01#05134802-8C30-412A-89DD-CC662D1AB84F#g' "$@"
-perl -pi -e 's#A8D2F106-57BF-437A-85F2-6FDE96D5E923#9A6BEAAC-11E1-43AB-A18D-5CD040DBA9F5#g' "$@"
-perl -pi -e 's#A9003EE6-88FF-4545-ADE2-D6EECA5DD61B#9B4ABC89-CB93-4083-A529-7647D74EBB7D#g' "$@"
-perl -pi -e 's#A9E2C897-8193-4DBD-9FDE-F9BE9AB7F92E#646294EB-E2E7-4110-B3CF-06D82F2BEE6F#g' "$@"
-perl -pi -e 's#AA393994-0DBE-45A1-8393-BCA1927527F6#39538A97-FDC6-404C-A8E4-2C441713F00A#g' "$@"
-perl -pi -e 's#AAE94778-30AA-4F8B-97B0-6E62978BD0F8#C96C5113-0A94-4D78-8B56-63335845A6DB#g' "$@"
-perl -pi -e 's#AAFFA871-4921-4D14-9767-E302020DD8BE#B0DEE1A7-88FC-423B-AA1F-F51D94D02996#g' "$@"
-perl -pi -e 's#AC0B0CE3-DF64-4CCF-88EC-CBF594D63183#03889A8E-008C-438D-A4F8-F220470C182E#g' "$@"
-perl -pi -e 's#AC285965-9D0F-4D55-8885-0F0A9B9888B1#4BAF4D75-1E1E-4B15-A288-B4527E3C7CEE#g' "$@"
-perl -pi -e 's#AC7FA608-4E43-474F-9B48-B29D52F00700#20765149-8D90-4BF2-9878-54C01A52AA2C#g' "$@"
-perl -pi -e 's#AC8FC24A-2B06-4791-86D5-13747A54CFD9#CFFFB160-317A-4A75-A279-6D2691BE949D#g' "$@"
-perl -pi -e 's#AD768463-3019-4F37-8AE5-BAC2471C7AE8#5A5DE00E-D6F0-426E-A510-AFCD45564AFB#g' "$@"
-perl -pi -e 's#AE29A49E-45F0-4BB4-82C9-77FFCA6394E3#AB349EA3-BBFD-444D-BB7F-E34B51D3C83F#g' "$@"
-perl -pi -e 's#AEF1601C-0E4E-42D9-BD24-CD8CD5F9C5FB#4CE05198-6F3F-46AF-866D-7865B41BA4C2#g' "$@"
-perl -pi -e 's#AFEDEDDA-02B1-465B-A8E4-1917482835EE#3F832B47-935F-45D2-870F-59CBC55EBAF5#g' "$@"
-perl -pi -e 's#B098DABC-F38B-41FE-A5F0-8F671019A203#2455F7CF-1D9C-4FCE-A90A-A059C037A758#g' "$@"
-perl -pi -e 's#B0E54B09-14B1-4440-BCB8-E036CCD332DF#2EEACF3C-C38E-4043-8B4D-33A461083348#g' "$@"
-perl -pi -e 's#B0FDE9B8-D066-4FB2-885D-615A6D58E02F#D7B9854C-4B88-4AE6-9FF6-7CDD7E8CEB90#g' "$@"
-perl -pi -e 's#B111F2DE-21C7-4E36-BC20-5E059AAB4F1D#59F981B5-1F49-4B5C-AC14-E7EA635C17F9#g' "$@"
-perl -pi -e 's#B2269A43-2FC5-44AF-A0EE-D85CD2C6135C#08D84506-62BE-44C1-AA1A-EDBEBAC14D04#g' "$@"
-perl -pi -e 's#B2583886-6021-42B5-BB84-71D2C7D9B91E#DB6D6680-40C2-4E13-A09F-8DDF73939F7B#g' "$@"
-perl -pi -e 's#B4E6E1AC-C45E-48BB-A038-073C32693B65#D906DD83-52C5-4B45-A887-DABEED8A6936#g' "$@"
-perl -pi -e 's#B59EAD82-B261-48FF-807E-3050A5D9F234#9A8E6339-779C-4B0B-A3E5-B4CBBD2E271D#g' "$@"
-perl -pi -e 's#B63AD05D-90E5-45B5-B3FA-B69C47380E86#CDDF1704-0468-4F47-B191-C5D44FF301DA#g' "$@"
-perl -pi -e 's#B6DF5C8D-177F-47DE-A9F6-0C6E4FCDF668#14637C7D-0533-454C-9E60-B83A943264FF#g' "$@"
-perl -pi -e 's#B74A8B8E-968C-4531-8B6C-409E51D0A914#395C0DE6-BFAB-4100-836D-6633D9FAF84D#g' "$@"
-perl -pi -e 's#B884FE02-C0B1-40A5-9A05-3D92923B10E8#D00FE594-23A0-4B93-8260-4AE907EAC742#g' "$@"
-perl -pi -e 's#B891DEBE-300A-4CD5-8828-BE68529DE62E#785BAF17-30F1-482D-8419-C3186963A37B#g' "$@"
-perl -pi -e 's#B98782D1-9927-4A11-A689-AD9CDFB2F068#54D3D90D-A34A-47F8-873E-6FEC914F9CDC#g' "$@"
-perl -pi -e 's#BADB1F2A-B03A-4521-9A4A-61C3EDD365EC#55C379A7-D00F-4174-9073-F08CEF7FA40B#g' "$@"
-perl -pi -e 's#BC085A1A-38A6-462A-889F-6E7CA442D9D8#DAC92857-35AC-44E2-BB10-F2F91173128C#g' "$@"
-perl -pi -e 's#BE522738-E05A-431A-B535-29349EBF86AE#0DCE1B05-2A48-4485-8C7E-6FBB1003F0F7#g' "$@"
-perl -pi -e 's#BF0EB46F-05D8-4349-BEE8-57A0BFFE809E#2F8628DE-8765-475F-8265-3E535FBCE8B2#g' "$@"
-perl -pi -e 's#BF227916-E342-4307-91DB-97D72DE31233#E4C66A70-4597-4BD2-BDA3-25912A6FD81E#g' "$@"
-perl -pi -e 's#BFB3DBFA-B114-41D0-9C78-6BB85161A555#968FB518-842E-4807-8331-7E20E88A2C89#g' "$@"
-perl -pi -e 's#C0B220D1-2548-4E2C-BD7B-B7A14FE1C977#518E46AB-6372-4303-A0FD-40AE042D6898#g' "$@"
-perl -pi -e 's#C0C43007-C203-4722-916E-7A4E915E4F8D#8B523028-4AAC-4607-9711-667AA6C37D45#g' "$@"
-perl -pi -e 's#C0D08A10-A266-46DC-8EF4-BE0E5928129D#222033CB-A588-4302-9942-311F160B63B1#g' "$@"
-perl -pi -e 's#C16BE41C-1534-47E2-9D6F-EE3F45EFB075#D38E2221-2835-4B7B-8654-7CE0B181A55D#g' "$@"
-perl -pi -e 's#C196B961-1F5F-438E-B764-73F91C6CAEA5#6FBAFC4D-EBE8-4880-8644-E87698F5D169#g' "$@"
-perl -pi -e 's#C37B84EC-EB6B-4909-9866-D582A0E4B27D#11BE10D9-0737-4965-833E-FA615229CEAF#g' "$@"
-perl -pi -e 's#C4AC769D-F356-4D1A-A3A4-62E6729CF403#7E56C1DB-94E0-4C55-8D71-C27B1F971DB5#g' "$@"
-perl -pi -e 's#C62DBDBB-F1B6-4ACA-A0C6-D0B164A945CD#7A5DEAF9-2687-447F-803D-594E0966A2E1#g' "$@"
-perl -pi -e 's#C633A368-E04A-44EA-AFAF-15ADEDD8BF91#CC948F7B-2223-4CF4-B6DA-589D091E2897#g' "$@"
-perl -pi -e 's#C6C913B4-6F59-4F45-B37E-89494B94BBBC#E094BD07-9E0A-4235-B202-CF254F0EA4A7#g' "$@"
-perl -pi -e 's#C85CC50B-AD2E-4462-8517-36E499DF7CED#B7A54DFF-DDAE-4BB6-A355-FAF0D4E11952#g' "$@"
-perl -pi -e 's#C8911584-ECCA-47D9-8229-763B53AB616D#E8569C57-B57A-41A6-90A5-7D2A4C1B8D5C#g' "$@"
-perl -pi -e 's#CA1322F8-8213-401D-8501-2B70F8F944E0#572FFC41-E171-4F20-B08F-31A805892154#g' "$@"
-perl -pi -e 's#CB0957AD-996F-4B67-90C7-36E6288EEAB8#65233590-D233-4A02-91C3-DF83D00AEDA7#g' "$@"
-perl -pi -e 's#CB24EBA2-28FA-4EFF-9DD6-890D376F036A#23276245-2A01-444F-B878-4BB1B54638C3#g' "$@"
-perl -pi -e 's#CB3F8A64-90F1-4B08-BCE7-CD1641313821#2C1B2B2A-303B-499E-AE35-929EB2B8FDE7#g' "$@"
-perl -pi -e 's#CB7AB32F-5218-4B5A-982E-F1AA5F00CFC0#52E53AB3-FAE4-4025-9B6F-FA401C9B10AE#g' "$@"
-perl -pi -e 's#CBFA6A61-D310-4A90-8F49-C7BE2735D58D#EFBDEE97-65AB-4575-B117-976D2EE639D0#g' "$@"
-perl -pi -e 's#CC3129BA-331A-42CC-8569-93F9E05B6400#4FF53BC0-C0E8-45B0-B7ED-3DE12B7865D2#g' "$@"
-perl -pi -e 's#CC37C202-D6B7-4821-9015-0A25EB4E6CD8#830224A1-A0B4-43EF-9E19-C11195006154#g' "$@"
-perl -pi -e 's#CC763310-A178-4D8E-B840-DDB141C79BDC#CDF16F28-D75D-4BE8-AF81-EB85825DA252#g' "$@"
-perl -pi -e 's#CD74D285-6FAB-4737-B8C3-2FAA2B0B047D#D551A1C1-AF36-4FE4-BEF4-9CA72267D70A#g' "$@"
-perl -pi -e 's#CE5F3BEC-D59B-45CD-A451-A08E9F7018EA#B1A720C2-8B81-492F-8F78-7BC92147AD45#g' "$@"
-perl -pi -e 's#CE89FED1-6E4E-41B0-95B2-C39295B6A6BD#A7F4EC06-A6DC-4847-A510-41D435614014#g' "$@"
-perl -pi -e 's#CF6C8A15-A742-437F-9669-F8BE198873DF#E50DBC97-9AE2-4D8A-A8A5-9D3FAEE63D9A#g' "$@"
-perl -pi -e 's#CF7A11FA-FE24-427F-979C-016A7E1B2E8E#661C061B-68E8-4BDE-B05D-1E95732C37E6#g' "$@"
-perl -pi -e 's#D1D013B7-9570-4165-8421-BBDC97842260#D27E719E-A9E7-4204-92E8-1FE882264F27#g' "$@"
-perl -pi -e 's#D27CE6D1-50E5-4895-8CC0-5D1D25D882F9#CC80277E-6AA3-42F4-9F41-B6D0E76B065C#g' "$@"
-perl -pi -e 's#D3086F70-784A-4926-AFAE-5EDCB2856E5D#9A0B32AC-9D4D-4035-974D-66052C7F06AC#g' "$@"
-perl -pi -e 's#D30FCA19-CD1F-4052-9D84-BB45C1C7C0DD#EAE42234-4244-443D-93D8-D99235A80E2D#g' "$@"
-perl -pi -e 's#D334E645-4240-467E-9049-8EDA0DF62DD8#3BB61EBF-5BC1-4E4D-859F-3ED5D92DD25E#g' "$@"
-perl -pi -e 's#D3689B53-4B06-443F-8A2B-7937E7514BC4#15829530-145E-4628-88EB-C2D1EF872A3E#g' "$@"
-perl -pi -e 's#D3936188-9845-4AFD-A484-D6D634103917#8047890F-2CD5-48D2-A73B-50DD2D06A9C8#g' "$@"
-perl -pi -e 's#D53F8583-419E-440E-AC00-40E9AB31EEBF#FA30E08E-DD61-4E47-B3D6-51B2814CA881#g' "$@"
-perl -pi -e 's#D5A3EAE8-6FE2-46B6-BD99-3570E9D6E9E1#B2D2C818-F752-428B-9F42-5093E525AD19#g' "$@"
-perl -pi -e 's#D62D8785-CAF0-4650-8EA1-D713D61CC299#3699C099-D492-4206-B4D8-EBAB427432D8#g' "$@"
-perl -pi -e 's#D6C21010-B224-4121-83BE-BCFD41E2780F#F1EB79C8-66C0-4FB8-A287-79290C9FDBB9#g' "$@"
-perl -pi -e 's#D6FACDDF-EF17-4773-A9A8-9B84A2E7E62C#B156D653-2AB4-4C57-A1A7-F8E50309356A#g' "$@"
-perl -pi -e 's#D89A3F76-2161-44AA-8FD7-755E3033AC02#7A2CA172-09EB-4B2D-96D8-A370084757E1#g' "$@"
-perl -pi -e 's#D8B206B8-6822-48C2-A799-E35BFD585035#EDC1F411-8FB4-4180-A7A3-C7DA7BF873A5#g' "$@"
-perl -pi -e 's#D8DFFC6A-35C6-4AFD-88DE-E5B4528D8740#D1543E64-64B6-40F9-A5FC-696E95E2DE87#g' "$@"
-perl -pi -e 's#D935B9CC-0E26-46BC-8CA5-7DBB43500098#3D0D7BAC-56EC-4BAB-98BC-DD3D1F276390#g' "$@"
-perl -pi -e 's#DA0E5FD9-1C4D-4AB6-BA11-B34DFBB3FA17#4ACBFC0D-A5B6-41FF-AAD1-9856CBDCF057#g' "$@"
-perl -pi -e 's#DAF0244B-3B71-4EC8-8246-C58401AE5E01#01C118C0-57AB-4745-8D4C-5A34E6677E8D#g' "$@"
-perl -pi -e 's#DB4A0242-FD72-41DA-87A2-5466AE05BE03#53591C9B-B185-454D-B859-9D8325F41E22#g' "$@"
-perl -pi -e 's#DB6EBF02-F0A5-44ED-AF5C-BF97E2C6570A#F2D9545D-0428-49BE-BC24-F3A48C238135#g' "$@"
-perl -pi -e 's#DB9544DF-CDE1-4647-8B7C-683B3C423BE0#D6502F20-0CE2-4644-A7A2-C44337A84E54#g' "$@"
-perl -pi -e 's#DB97DBBB-CC40-405A-A774-CC3F40C935A9#ECF408BB-8554-4566-9230-083B3A61D5ED#g' "$@"
-perl -pi -e 's#DBA607B3-0805-4A18-A45D-769C70D90AB7#A0B8FD56-2D0D-4487-B999-19D32FCF5F56#g' "$@"
-perl -pi -e 's#DCC737A4-531E-4DCD-A3C4-376DA4C76D00#9F84BC57-8E00-4C27-81AA-5CCB6280A98B#g' "$@"
-perl -pi -e 's#DCEAB7D4-F591-4D91-B810-8E1516A4FB74#CECEB3CB-386A-4F16-8A88-C80850E09BDA#g' "$@"
-perl -pi -e 's#DD5DD115-63B5-4C5A-984A-423C4A22F9AB#965C704A-7328-4039-A865-914E6091BD38#g' "$@"
-perl -pi -e 's#DDAB0849-CC63-475C-AC87-388B93579A28#DE6A446A-BEC4-450D-94BD-B393B9898E76#g' "$@"
-perl -pi -e 's#DE15DF68-AD3F-465A-89F5-9FAA0EE001E5#A3FFB78D-681B-4EEE-B279-EA638040F82A#g' "$@"
-perl -pi -e 's#DE408221-68E9-418D-9B20-10BE458268F2#122EB716-EA92-4294-ADFB-4EDCC350E9D9#g' "$@"
-perl -pi -e 's#DF297699-3040-48E0-B0E6-B54385D76018#4B19ECA4-EB7B-420E-A2F3-0D456CA1CA3F#g' "$@"
-perl -pi -e 's#DF312B1B-93E3-4903-8467-6DF2786F0009#1BED5361-E88F-4833-B707-B39901731676#g' "$@"
-perl -pi -e 's#DF4D12DF-EB5D-44C8-8E41-60DFEFF1E623#A9DA9C7E-5DAE-419F-A213-A9BE4EFCE9B6#g' "$@"
-perl -pi -e 's#DF67ADAA-4755-4184-9E32-1020F40DDA81#2CCF1EF8-A263-4B77-8BDD-92D66B30531F#g' "$@"
-perl -pi -e 's#DF95762B-5307-4B85-B4C5-B144BB02C5FB#27125E19-DCFE-402F-AB8C-82CD9F584D77#g' "$@"
-perl -pi -e 's#E1475570-6060-4EF8-834A-A241969A2061#C1E77DC8-0C1C-4C2D-B67D-581E91C5BEDF#g' "$@"
-perl -pi -e 's#E1C174A0-8126-43A8-B696-9F9DE676E1ED#C4B4A0EF-3B09-4198-825E-7381653FCF13#g' "$@"
-perl -pi -e 's#E317CFA7-4C92-4B3E-8528-8D44B2081C00#D83DA940-56D9-4D11-AA20-8E70FD1C549A#g' "$@"
-perl -pi -e 's#E3363531-F96D-4756-88E8-240E9BFBD3B7#32DDC93C-0B62-42D9-9627-417CC947851A#g' "$@"
-perl -pi -e 's#E34A52B2-ADAA-4C58-AC3B-9FD91C135BC2#53951156-6BF5-416F-8CD2-E37A724A3779#g' "$@"
-perl -pi -e 's#E3B0AB69-1492-4595-AB55-D9ACE585A35D#FBF48667-A68E-4F53-BD54-F85179B1CE1C#g' "$@"
-perl -pi -e 's#E3F13778-87EE-478B-B8F1-F18B37F4C0A4#B2414CEC-4AF1-4943-88F8-C5011A860483#g' "$@"
-perl -pi -e 's#E3F62111-0A68-4268-8850-C49B830A1E57#43B0FA53-3B78-4920-B264-8D2DDF566B7C#g' "$@"
-perl -pi -e 's#E404F8D3-5FC2-44BF-97F9-901A24DF6A3E#7964A534-4526-46AD-8A1B-71BAF6AFE746#g' "$@"
-perl -pi -e 's#E49BD84D-625C-4BEB-AE0E-349D0DD6FFB7#55837A64-5B73-4CDC-8950-AF56E09A7961#g' "$@"
-perl -pi -e 's#E4C656E5-8F10-4A3A-B3A6-35C894DE637C#6874DCF5-E0AC-464A-8002-BD48A76ADDF9#g' "$@"
-perl -pi -e 's#E50DC1E3-0EC1-43E1-A6E3-8B9A70F8B3EF#9DFB7BF7-CD4C-4F85-B3D0-FAEE6470CFB4#g' "$@"
-perl -pi -e 's#E54FAE36-AEA4-4964-B4BB-CE9D4CEB4B3E#910006D2-BDF1-440C-89D3-8F1DD93790FE#g' "$@"
-perl -pi -e 's#E5ADEAB5-42A1-41D6-AD58-92C17CAE411D#95BC02E6-5498-4879-AE16-FFCB9CABE1AE#g' "$@"
-perl -pi -e 's#E60BC5AC-2513-40AF-A41B-64118A287662#3EBBFCD8-8009-4D31-AA5E-C1BED7B17A61#g' "$@"
-perl -pi -e 's#E6DC8D4C-F68E-4BE1-8F00-F0C84ED0F974#0EE4C97D-D8C7-441E-866A-CF277858E4E4#g' "$@"
-perl -pi -e 's#E954C6C0-1C6E-4BB6-B8F2-F655429E6D73#4C744BC8-6B5A-4E7A-95BA-D7A405C4B4A3#g' "$@"
-perl -pi -e 's#EA2CB52A-E5AA-45E7-80C6-DE7EB76A1E5D#2E784EF5-2240-4940-ACAC-240860D5B0FF#g' "$@"
-perl -pi -e 's#EA7365A3-7DDC-4934-BFFC-E328EC784390#16F54808-4A44-4C87-9A9B-AE038D4E1003#g' "$@"
-perl -pi -e 's#EB286FF5-8182-4F12-8B58-320DABC7472F#A0148901-E195-407D-A228-A328D3DFEAC4#g' "$@"
-perl -pi -e 's#EB478242-B1CC-402C-AAB0-B038446E1BC3#D9718666-88EF-4F73-8A89-1C6BAF16E782#g' "$@"
-perl -pi -e 's#EC0B288E-7A19-4CC3-9B5F-FA63413226B1#07219764-70AF-4461-BF24-B89C8F48717F#g' "$@"
-perl -pi -e 's#EC70E449-B165-43DD-A4ED-34F07C8C1E12#5DF8F595-6890-45CD-BD85-234DD7FB5435#g' "$@"
-perl -pi -e 's#EC7A7CA1-9A52-4C5C-B83F-BB98F87EED6C#540D1E5D-B770-44EB-A1B1-5E1B1E88E6C0#g' "$@"
-perl -pi -e 's#ECD85481-7606-4F45-A391-4DA9DCB15D1E#1D812984-FD08-415C-9967-CF228C28767C#g' "$@"
-perl -pi -e 's#ED45979C-7EE4-4D04-89C5-489DCEAD1009#45280063-6982-408E-A3A5-EB66E38C0919#g' "$@"
-perl -pi -e 's#ED6D0C4C-2746-40A9-A672-4D9950C421C0#AD579FC7-4AFF-46A6-A756-110EC862FD17#g' "$@"
-perl -pi -e 's#EDD352F1-431A-4275-A658-C9A516290616#B8AA7DA0-C909-4240-B88F-8506BCEBEAC9#g' "$@"
-perl -pi -e 's#EF673EE1-CD71-45F2-9233-4D2C3A9D9341#2D4C8A15-DDBC-48D7-A37E-F095D331EDD1#g' "$@"
-perl -pi -e 's#EF92EEBE-4B84-49CE-BED6-BF74DCD9395B#F859617C-7885-49B3-827A-8C9617C622F6#g' "$@"
-perl -pi -e 's#F190162C-DCD0-4266-B9AB-1B1FD7C24C2E#195CFF07-2848-4F7A-9770-98D1419A37F8#g' "$@"
-perl -pi -e 's#F2B735D6-7210-49CD-B3DC-E5EEE07D1EB3#B7EA5EEF-5834-4B3E-B774-115ED16A8797#g' "$@"
-perl -pi -e 's#F2BC3BEC-6634-4F9E-B878-5008EC622E5C#B0123EF4-1590-4436-B8C7-45778BCC53C9#g' "$@"
-perl -pi -e 's#F2F04B66-9996-425D-A026-0AE99F5DB878#250DC1C1-630E-4AD3-B25B-F57E9CC66572#g' "$@"
-perl -pi -e 's#F300CABB-ED67-468E-A336-87DEC6E22CFC#7278D44D-9673-4157-AC6F-839520312A42#g' "$@"
-perl -pi -e 's#F30569F1-C491-49B0-BDC3-220B8DDEB3AB#A4A5F141-B49A-4058-9DEB-FCA6DBBA1821#g' "$@"
-perl -pi -e 's#F3208E02-D4DA-4315-8151-E7063637A69B#5908E447-DD83-466C-989A-4DD73EEB9E1C#g' "$@"
-perl -pi -e 's#F5019C24-CC86-4E86-84A5-53FF98539D04#C3CFF998-06D1-48B0-925F-8B2CE21105F2#g' "$@"
-perl -pi -e 's#F585A19E-3AB9-496C-B06A-0CE6690148C8#D163A41F-FB45-4CC4-9644-4818953792EA#g' "$@"
-perl -pi -e 's#F68C3D57-AB20-4106-B3F2-4A51E8F12D27#F46A5D34-1906-45AA-A879-B2528FF3E9D5#g' "$@"
-perl -pi -e 's#F7F75CB1-2123-4B5B-BC00-11440725B71B#4C644036-3E74-44A6-AE94-55D9E7F2C541#g' "$@"
-perl -pi -e 's#F893A538-DA46-4667-B580-84DFB3CD5336#FB6F4362-9B92-4598-8F38-A1271678545A#g' "$@"
-perl -pi -e 's#FB1BAD3A-D0B7-4F05-A698-258C5ADFA17B#ED565020-41A2-4990-B0C2-B7043D4CFA9D#g' "$@"
-perl -pi -e 's#FC10CE94-41C6-43AE-AC97-4F4AE45B1BCC#2C127D50-B7E4-46C3-A845-CF6684481024#g' "$@"
-perl -pi -e 's#FC4188CE-C087-4109-A904-0687A76ACEDA#F980E77E-21FD-40B8-BE66-8BC60284D24B#g' "$@"
-perl -pi -e 's#FC4C1B4A-6571-4674-B5B2-6D42860D4DF6#6C5A78EF-563A-4B86-A3FA-C6431AA92E98#g' "$@"
-perl -pi -e 's#FC92FF2C-0E4B-4EB1-A962-682861EE4C33#00B8BCE0-A647-4DC8-8E58-3D1B4CFF3F0F#g' "$@"
-perl -pi -e 's#FCBF48CD-EBE3-4B65-9A9F-C73399CE2BF4#90F52A60-0F01-4741-BD02-891EF9EB5D9A#g' "$@"
-perl -pi -e 's#FD1FCF83-35F1-463E-89DF-0AEDA0444DEC#CA5E7AB5-DA2A-4F3B-8728-E97164034DD6#g' "$@"
-perl -pi -e 's#FD2E4280-BCFC-4109-A431-EC720C44A392#40FE345C-D3C7-48C4-8CC6-3B21845B9E2C#g' "$@"
-perl -pi -e 's#FEA2D5C2-22C1-42BC-B77F-7C0FC32A281E#0FA6115B-EB2C-4878-AE77-A6BB9BD95DE3#g' "$@"
-perl -pi -e 's#FF4DA93A-205F-40A2-9F01-2A5EB6663E6C#B4F43785-36D1-4716-AFCF-894B1AF36C1D#g' "$@"
diff --git a/bin/check-artwork.sh b/bin/check-artwork.sh
deleted file mode 100755
index a907aab95..000000000
--- a/bin/check-artwork.sh
+++ /dev/null
@@ -1,47 +0,0 @@
-#! /bin/sh
-
-# Take 2 directories with artwork (eg. one reference, 2nd the optimized), and
-# check that they do not differ
-
-D1="$1"
-D2="$2"
-
-if test -z "$D1" -o -z "$D2" ; then
- echo "check-artwork.sh dir1 dir2"
- exit 1
-fi
-
-function same_dir_content ()
-{
- SRC="$1"
- DST="$2"
- ( cd "$SRC" ; find . -type f ) | while read F1 ; do
- F2="$DST/$F1"
- if test ! -f "$F2" ; then
- echo "Missing file: '$F2' (source is '$SRC/$F1')"
- fi
- done
-}
-
-same_dir_content "$D1" "$D2"
-same_dir_content "$D2" "$D1"
-
-# visual comparison using a XPM format
-# [couldn't use the oodocdiff.sh trick - usually there are changes in the
-# alpha channel that ruin the check :-(]
-
-TEMP=`mktemp -d /tmp/check-artwork.XXXXXXXXXX`
-TMP1="$TEMP/1.xpm"
-TMP2="$TEMP/2.xpm"
-( cd "$D1" ; find . -type f ) | while read F ; do
- convert "$D1/$F" "$TMP1"
- convert "$D2/$F" "$TMP2"
- if diff -q "$TMP1" "$TMP2" > /dev/null 2>&1 ; then
- :
- else
- echo "Images differ: '$D1/$F' and '$D2/$F'"
- fi
-done
-
-rm "$TMP1" "$TMP2"
-rmdir "$TEMP"
diff --git a/bin/create-gitignores.sh b/bin/create-gitignores.sh
deleted file mode 100755
index 6013de4f7..000000000
--- a/bin/create-gitignores.sh
+++ /dev/null
@@ -1,125 +0,0 @@
-#!/bin/bash
-
-# create .gitignores for the the OOo tree
-# run in build/xyz680-mABC
-
-[ -d .git ] || git init
-
-chmod a+x solenv/bin/build.pl
-chmod a+x solenv/bin/gccinstlib.pl
-
-# Check for a present git config
-if [ ! -w .git/config ] ; then
- echo ".git/config should be writable"
- exit 1
-fi
-
-# Changing the .git/config
-cat >> ".git/config" << EOF
-
-[diff "swallow"]
- command = /bin/true
-EOF
-
-# Creating the .gitattributes
-cat >> ".gitattributes" << EOF
-/applied_patches/* diff=swallow
-EOF
-
-# Creating the .gitignore
-while read F ; do
- D=`dirname "$F"`
- B=`basename "$F"`
- if [ "${F:0:1}" = "/" ] ; then
- echo "$F" >> ".gitignore"
- else
- [ -d "$D" ] && echo "$B" >> "$D/.gitignore"
- fi
-done << EOF
-/Linux*Env.Set*
-/MacOS*Env.Set*
-/bootstrap
-/makefile.mk
-/ID
-unxlng*.pro
-unxlngi6
-unxmacx*.pro
-unxmacxi
-localize.sdf
-.svn
-/solver
-*.orig
-*~
-autom4te.cache/
-config.log
-config.parms
-config.status
-configure
-set_soenv
-warn
-default_images/introabout/intro-save.bmp
-default_images/svx/res/openabout_ark.png
-default_images/svx/res/openabout_translateorgza.png
-default_images/sw/res/go-oo-team.png
-dmake/.deps/
-dmake/Makefile
-dmake/config.h
-dmake/config.log
-dmake/config.status
-dmake/dag.o
-dmake/dbug/dbug/.deps/
-dmake/dmake
-dmake/dmake.o
-dmake/dmakeroot.h
-dmake/dmdump.o
-dmake/dmstring.o
-dmake/expand.o
-dmake/function.o
-dmake/getinp.o
-dmake/hash.o
-dmake/imacs.o
-dmake/infer.o
-dmake/macparse.o
-dmake/make.o
-dmake/msdos/.deps/
-dmake/parse.o
-dmake/path.o
-dmake/percent.o
-dmake/quit.o
-dmake/rulparse.o
-dmake/stamp-h1
-dmake/startup/Makefile
-dmake/startup/config.mk
-dmake/startup/unix/Makefile
-dmake/startup/unix/cygwin/Makefile
-dmake/startup/unix/linux/Makefile
-dmake/startup/unix/macosx/Makefile
-dmake/startup/unix/solaris/Makefile
-dmake/startup/unix/sysvr4/Makefile
-dmake/startup/winnt/Makefile
-dmake/startup/winnt/mingw/Makefile
-dmake/startup/winnt/msvc6/Makefile
-dmake/stat.o
-dmake/state.o
-dmake/sysintf.o
-dmake/tests/Makefile
-dmake/unix/.deps/
-dmake/unix/.dirstamp
-dmake/unix/arlib.o
-dmake/unix/dcache.o
-dmake/unix/dirbrk.o
-dmake/unix/rmprq.o
-dmake/unix/ruletab.o
-dmake/unix/runargv.o
-dmake/unix/tempnam.o
-dmake/win95/.deps/
-dmake/win95/microsft/.deps/
-instsetoo_native/res/banner_nld.bmp
-instsetoo_native/res/nologoinstall_nld.bmp
-instsetoo_native/util/OpenOffice
-setup_native/source/win32/nsis/ooobanner_nld.bmp
-setup_native/source/win32/nsis/ooobitmap_nld.bmp
-setup_native/source/win32/nsis/ooosetup_nld.ico
-solenv/unxlngx6/
-svx/res/
-EOF
diff --git a/bin/create-ids b/bin/create-ids
deleted file mode 100755
index 6a931169f..000000000
--- a/bin/create-ids
+++ /dev/null
@@ -1,8 +0,0 @@
-#!/bin/sh
-
-# create ID file for the whole OOo tree. run it in build/xyz680-mABC
-
-. ./*.Set.sh
-
-dirs="*/$INPATH/inc */$INPATH/misc/build `ls -d */* | sed -e '/\(applied_patches\|\/\(CVS\|.svn\|common\|unxlng\|unxsol\|unxmac\|wntmsc\|Jamfile\|cscope\.\|tags\|ID\)\)\|^solver\//d'`"
-mkid --lang-map=`dirname $0`/id-lang.map --include='C C++ asm perl make' --statistics $dirs
diff --git a/bin/create-rpms b/bin/create-rpms
deleted file mode 100755
index a4c09365a..000000000
--- a/bin/create-rpms
+++ /dev/null
@@ -1,34 +0,0 @@
-#!/bin/sh
-
-# build rpms ; passing -l option will build language packs too
-
-# make sure we're running in the right directory
-
-ooobuildbindir=`dirname "$0"`
-[ "$ooobuildbindir" != "" ] && cd "$ooobuildbindir"
-
-#
-# See setup for user tweakables.
-#
-. ./setup
-. $OOBUILDDIR/*.[sS]et.sh
-. ./setup
-
-# System Mozilla
-if test "$SYSTEM_MOZILLA" = "YES"; then
- export LD_LIBRARY_PATH="$MOZ_LIB":"$LD_LIBRARY_PATH"
-fi
-
-# Create RPMs
-# http://installation.openoffice.org/how_to_create_native_installer.html
-cd $OOBUILDDIR/instsetoo_native/util
-dmake openoffice || exit 1
-if test "$1" = "-l" ; then
- dmake ooolanguagepack || exit 1
-fi
-
-# Summary
-echo
-echo "Packaging succeeded, the rpms are stored in:"
-echo "$OOBUILDDIR/instsetoo_native/*.pro/OpenOffice/install/*/RPMS"
-ls -d "$OOBUILDDIR/instsetoo_native/*.pro/OpenOffice/install/*/RPMS"
diff --git a/bin/create-tests-lst.sh b/bin/create-tests-lst.sh
deleted file mode 100755
index 342781c6d..000000000
--- a/bin/create-tests-lst.sh
+++ /dev/null
@@ -1,16 +0,0 @@
-#!/bin/sh
-
-[ -f prj/build.lst ] || { echo "create-tests-lst.sh: Creates prj/tests.lst"; echo "Error: Must be in project subdir" ; exit 1 ; }
-
-PREFIX=`grep nmake prj/build.lst | head -n 1 | sed 's/[[:space:]].*//'`
-MODULE=`pwd | sed 's#.*/\([^/]*\)$#\1#'`
-
-find . -name makefile.mk -a \
- \( -wholename "*workben*" -o -wholename "*/test/*" \) | \
- while read DIR ; do
- DIR=${DIR#./}
- DIR=${DIR%/makefile.mk}
- SYMBOL="`echo $DIR | sed 's#/#_#g'`"
- DIR="`echo $DIR | sed 's#/#\\\#g'`"
- printf "%-4s %-50s nmake - all ${PREFIX}_$SYMBOL NULL\n" $PREFIX $MODULE\\$DIR
- done
diff --git a/bin/cvs-tags-normalize b/bin/cvs-tags-normalize
deleted file mode 100755
index 268a5aac8..000000000
--- a/bin/cvs-tags-normalize
+++ /dev/null
@@ -1,10 +0,0 @@
-#!/usr/bin/perl -pi.bak -w
-
-# replaces bits like $RCSfile: Constants.idl,v $ with $RCSfile$ etc.
-# so that patches generated from cws-extract don't get nobbled by diffs that
-# include that info ( e.g very important with files that are deleted where
-# it doesn't make sense for example to use cvsclean
-
-for $subs ('RCSfile', 'Revision', 'Author', 'Date', 'Header', 'Id') {
- s/\$$subs:[^\$]+\$/\$$subs\$/;
-}
diff --git a/bin/cvsclean b/bin/cvsclean
deleted file mode 100755
index d4e66d698..000000000
--- a/bin/cvsclean
+++ /dev/null
@@ -1,84 +0,0 @@
-#!/usr/bin/env perl
-
-my @output_buffer = ();
-my $fname;
-my $detectedSomeGuff = 0;
-sub pure_guff($)
-{
- my $array = shift;
- my @lines = @{$array};
- my $contains_sense = '';
- my $contains_guff = '';
- while (scalar @lines)
- {
- my $line = pop @lines;
- if ($line =~ m/\$Revision:/ ||
- $line =~ m/\$Revision\$/ ||
- $line =~ m/\$Author:/ ||
- $line =~ m/\$Author\$/ ||
- $line =~ m/\$RCSfile:/ ||
- $line =~ m/\$RCSfile\$/ ||
- $line =~ m/\$Date\$/ ||
- $line =~ m/\$Date:/) {
- $contains_guff = '1';
- } elsif ($line =~ m/^[\+\-][^\-\+]/) {
- $contains_sense = '1';
- }
- }
- if ($contains_guff && $contains_sense) {
- print STDERR "Patch fragment with mixed good/bad changes in '$ARGV' near $line_index\n";
- $contains_guff = '';
- }
- elsif ( $contains_guff ) {
- $detectedSomeGuff = 1;
- }
-# print "contains guff: $contains_guff\n";
- return $contains_guff;
-}
-
-sub output_lines($)
-{
- my $array = shift;
- my @lines = @{$array};
-
- if (pure_guff (\@lines)) {
- return;
- }
-
- while (scalar @lines)
- {
- my $line = pop @lines;
- push @output_buffer, $line;
- }
-}
-
-my $header;
-my @lines;
-my $frag_count = 0;
-$line_index = 0;
-
-while (<>) {
- if (/^\@\@/ || /^[^ \-\+]/) {
- output_lines (\@lines);
- @lines = ();
- $frag_count++;
- }
- unshift @lines, $_;
- $line_index++;
- close ARGV if eof;
-}
-output_lines(\@lines);
-
-# basically if the two files compared
-# have the values for the rcid type info
-# then the first diff is ignored.
-# hence fragstocount is set to 1 if there is any guff ( rcid head bits )
-
-my $fragstocount = 0;
-if ( $detectedSomeGuff ) {
- $fragstocount = 1;
-}
-
-if ($frag_count > $fragstocount) {
- print @output_buffer;
-}
diff --git a/bin/cws-commit-patch b/bin/cws-commit-patch
deleted file mode 100755
index 648be3984..000000000
--- a/bin/cws-commit-patch
+++ /dev/null
@@ -1,248 +0,0 @@
-#!/usr/bin/env perl
-
-$default_mws = 'DEV300';
-
-sub usage()
-{
- print STDERR "cws-commit-patch - automate the creation of CWSes
-Usage: cws-commit-patch ...params... patch1.diff patch2.diff ...
-
- -i, --iz IZ number (XXXX or iXXXX)
- -b Mercurial bundle to use
- -c, --cws CWS name (to be created or existing
- -d Path to an existing clone
- -h, --help Help
- -m, --milestone Milestone to base the CWS (defaults to current)
- -w, --mws Master Workspace name (defaults to $default_mws)
- Use with care! Almost all CWSs are created for $default_mws.
- They are cloned for branches by need.
- -s, --summary Summary describing the change (for the hg commit)
-
-Note: You should use the up-stream version available from m196 instead.\n";
- exit 1;
-}
-
-sub cws_create($$$)
-{
- my ( $mws, $cws, $milestone ) = @_;
- system( "echo '===== cws_create =====' >> .log" );
-
- my $state = `cws query -M $mws -c $cws status 2>> .log | tail -n 1`;
- $state =~ s/\n//g;
- if ( $state ne "" && $state ne "planned" ) {
- print "CWS:\t\t'$cws' already exists, its state is '$state'\n";
- }
- else {
- print "CWS:\t\tcreating '$cws'\n";
-
- if ( system( "cws create --hg -m $milestone $mws $cws 2>> .log" ) != 0 ) {
- print STDERR "Unable to execute 'cws create'.\n";
- exit 1;
- }
- print "CWS:\t\t'$cws' created\n";
- }
-}
-
-sub cws_clone($$)
-{
- my ( $cws, $bundle ) = @_;
- system( "echo '===== cws_checkout =====' >> .log" );
-
- if ( system( "mkdir $cws 2>> .log" ) != 0 ) {
- print "Cannot create '$cws' subdir, already exists. Consider using '-d'.\n";
- exit 1;
- }
- if ( system( "cd $cws && \
- hg init && \
- echo -e 'Mercurial:\tUnbundling $bundle, go and have some tea...' && \
- hg unbundle $bundle && \
- hg pull -u http://hg.services.openoffice.org/cws/$cws" ) != 0 ) {
- print STDERR "Unable to get the CWS clone, check the log for details.\n";
- exit 1;
- }
-}
-
-sub cws_add_task($)
-{
- my ( $iz ) = @_;
- system( "echo '===== cws_add_task =====' >> .log" );
-
- if ( system( "cws task i$iz 2>> .log" ) != 0 ) {
- print STDERR "Unable to add task number $iz.\n";
- exit 1;
- }
-
- print "IZ:\t\tSet to '$iz'\n";
-}
-
-sub cws_commit($$$$$$$@)
-{
- my ( $mws, $cws, $iz, $summary, $milestone, $clone, $bundle, @patches ) = @_;
-
- my $up_to_date = 0;
-
- if ( $clone eq "" ) {
- cws_create( $mws, $cws, $milestone );
-
- if ( $bundle eq "" ) {
- $bundle = "/tmp/DEV300.hg";
- if ( ! -f $bundle || ( -M $bundle > 14 ) ) {
- if ( system( "mv $bundle $bundle.save" ) == 0 ) {
- print "Bundle:\t\tSaved the old one as '$bundle.save'\n";
- }
- system( "wget http://hg.services.openoffice.org/bundle/DEV300.hg -O $bundle" );
- }
- }
- cws_clone( $cws, $bundle );
- $clone = `pwd` . "/$cws";
- $up_to_date = 1;
- }
- cws_add_task( $iz );
-
- chdir $clone;
-
- my $commit_message = "#i$iz# " . $summary;
- $commit_message =~ s/'/'\\''/g;
-
- if ( !$up_to_date ) {
- if ( system( "echo -en 'Mercurial:\t' ; hg pull -u http://hg.services.openoffice.org/cws/$cws") != 0 ) {
- print STDERR "Unable to update the CWS.\n";
- exit 1;
- }
- }
-
- for $patch ( @patches ) {
- if ( system( "hg import -m '$commit_message' -p0 $patch" ) != 0 ) {
- print STDERR "Failed to commit the patch '$patch'.\n";
- exit 1;
- }
- }
-}
-
-#
-# main()
-#
-if ( !defined( $ENV{'SOLARENV'} ) || $ENV{'SOLARENV'} eq '' ) {
- my $my_path = $0;
- $my_path =~ s#/[^/]*$##; # confuses vim syntax highlighting :-(
- my $build_dir = `. $my_path/setup > /dev/null 2>&1 ; echo \$OOBUILDDIR`;
- if ( $build_dir eq "" ) {
- print STDERR "Unable to find build dir, check OOBUILDDIR in bin/setup.\n";
- exit 1;
- }
- $build_dir =~ s/\n//;
- if ( ! -f "$build_dir/LinuxIntelEnv.Set.sh" ) {
- print STDERR "Unable to find '$build_dir/LinuxIntelEnv.Set.sh'.\n";
- exit 1;
- }
- open( $VARS, "bash -c '. $build_dir/LinuxIntelEnv.Set.sh ; set'|");
- while ( <$VARS> ) {
- /([^=]*)=(.*)/ || next;
- $ENV{$1} = $2 unless "$1" eq "SHELLOPTS";
- }
- close( $VARS );
-}
-
-my $iz = "";
-my $cws = "";
-my $milestone = "";
-my $summary = "";
-my $bundle = "";
-my $clone = "";
-my @patches = ();
-
-( my $pwd = `pwd` ) =~ s/\n//;
-
-while (@ARGV) {
- $opt = shift @ARGV;
-
- if ( $opt eq "-i" || $opt eq "--iz" ) {
- $iz = shift @ARGV;
- }
- elsif ( $opt eq "-b" ) {
- $bundle = shift @ARGV;
- }
- elsif ( $opt eq "-c" || $opt eq "--cws" ) {
- $cws = shift @ARGV;
- }
- elsif ( $opt eq "-d" ) {
- $clone = shift @ARGV;
- }
- elsif ( $opt eq "-h" || $opt eq "--help" ) {
- usage();
- }
- elsif ( $opt eq "-s" || $opt eq "--summary" ) {
- $summary = shift @ARGV;
- }
- elsif ( $opt eq "-m" || $opt eq "--milestone" ) {
- $milestone = shift @ARGV;
- if (! ( $milestone =~ m/^m/ ) ) {
- $milestone = "m$milestone";
- print "Re-writing milestone to $milestone\n";
- }
- }
- elsif ( $opt eq "-w" || $opt eq "--mws" ) {
- $mws = shift @ARGV;
- }
- else {
- my $patch = $opt;
- if ( $patch =~ /^[^\/]/ ) {
- $patch = "$pwd/$opt";
- }
-
- if ( -f $patch ) {
- push @patches, $patch;
- }
- else {
- print STDERR "Unable to find patch '$patch'.\n";
- exit 1;
- }
- }
-}
-
-if ( !defined( $cws ) || $cws eq "" ) {
- print STDERR "Please specify CWS.\n";
- exit 1;
-}
-if ( !defined( $iz ) || !( $iz =~ /^i?[0-9]+$/ ) ) {
- print STDERR "Please specify IZ number as XXXX, or iXXXX.\n";
- exit 1;
-}
-if ( ( $#patches >= 0 ) && ( !defined( $summary ) || $summary eq "" ) ) {
- print STDERR "Please provide summary.\n";
- exit 1;
-}
-if ( !defined( $mws ) || $mws eq "" ) {
- $mws = $default_mws;
- print "MWS:\t\tnot specified, assuming '$mws'\n";
-}
-if ( !defined( $milestone ) || $milestone eq "" ) {
- ( $milestone = `cws query -M $mws -c $cws current 2> /dev/null | tail -n 1` ) =~ s/\n//;
-}
-if ( $milestone ne "" ) {
- print "Milestone:\t$milestone\n";
-}
-else {
- my $latest = `cws query -M $mws latest 2> /dev/null | tail -n 1`;
-
- ( $_, $milestone ) = split( ' ', $latest );
- print "Milestone:\tnot specified, assuming '$milestone'\n";
-}
-
-$iz =~ s/^i//;
-$ENV{'CWS_WORK_STAMP'} = $cws;
-$ENV{'WORK_STAMP'} = $mws;
-
-cws_commit( $mws, $cws, $iz, $summary, $milestone, $clone, $bundle, @patches );
-
-( my $tmp = `pwd` ) =~ s/\n//;
-print "
-Finished, please check '$tmp' that everything is as expected.
-Then:
-
-- Set 'http://www.openoffice.org/issues/show_bug.cgi?id=$iz' to FIXED
- with a 'Committed to CWS $cws.' comment.
-- Fill the '$cws' info in 'http://eis.services.openoffice.org'.
-
-It's also usually a good thing to mark the patch in the apply file as
-committed to '$cws'.\n";
diff --git a/bin/cws-commit-patch-old b/bin/cws-commit-patch-old
deleted file mode 100755
index e927b11bf..000000000
--- a/bin/cws-commit-patch-old
+++ /dev/null
@@ -1,345 +0,0 @@
-#!/usr/bin/env perl
-
-$default_mws = 'SRC680';
-
-sub usage()
-{
- print STDERR "cws-commit-patch - automate the creation of CWSes
-Usage: cws-commit-patch ...params... patch1.diff patch2.diff ...
-
- -i, --iz IZ number (XXXX or iXXXX)
- -c, --cws CWS name (to be created or existing
- -d cvs root
- -h, --help Help
- -m, --milestone Milestone to base the CWS (defaults to current)
- -w, --mws Master Workspace name (defaults to $default_mws)
- Use with care! Almost all CWSs are created for $default_mws.
- They are cloned for branches by need.
- -s, --summary Summary describing the change (for cvs commit)
-
-Note: You should use the up-stream version available from m196 instead.\n";
- exit 1;
-}
-
-sub cws_create($$$)
-{
- my ( $mws, $cws, $milestone ) = @_;
- system( "echo '===== cws_create =====' >> .log" );
-
- my $tmp = `mktemp -d /tmp/cws-$cws.XXXXXX`;
- chomp( $tmp );
- print "Temp dir:\t$tmp\n";
-
- chdir( $tmp );
- $ENV{SRC_ROOT} = $tmp;
-
- my $state = `cwsquery -m $mws -c $cws state 2>> .log`;
- $state =~ s/\n//g;
- if ( $state ne "" && $state ne "planned" ) {
- print "CWS:\t\t'$cws' already exists, its state is '$state'\n";
- }
- else {
- print "CWS:\t\tcreating '$cws'\n";
-
- if ( system( "cwscreate -f $mws $milestone $cws 2>> .log" ) != 0 ) {
- print STDERR "Unable to execute cwscreate.\n";
- exit 1;
- }
- print "CWS:\t\t'$cws' created\n";
- }
-}
-
-sub cws_modules($$)
-{
- my ( $mws, $cws ) = @_;
-
- my $mod_str = `cwsquery -m $mws -c $cws modules 2>> .log`;
- return split( '\n', $mod_str );
-}
-
-sub cws_add_modules($$$@)
-{
- my ( $mws, $cws, $milestone, @patches ) = @_;
- system( "echo '===== cws_add_modules =====' >> .log" );
-
- my %modules = ();
-
- for $patch ( @patches ) {
- if ( ! open( $PATCH, "$patch" ) ) {
- print STDERR "Unable to open patch '$patch'.";
- exit 1;
- }
- while (<$PATCH>) {
- if ( /^\+\+\+ ([^\/]*)/ ) {
- if ( $1 ne "" ) {
- $modules{$1} = "add";
- }
- else {
- print STDERR "Unable to guess module name ($1) from $patch.\n";
- close( $PATCH );
- exit 1;
- }
- }
- }
- close( $PATCH );
- }
-
- my @cws_modules = cws_modules( $mws, $cws );
- for $cws_mod ( @cws_modules ) {
- if ( defined( $modules{$cws_mod} ) ) {
- $modules{$cws_mod} = "checkout";
- }
- }
-
- while ( ( $module, $what ) = each( %modules ) ) {
- if ( $what eq "add" ) {
- print "Tagging:\tModule '$module'... Be patient, please ;)\n";
- if ( system( "cvs checkout -r " . uc($mws) . "_$milestone $module >> .log 2>&1" ) != 0 ) {
- print STDERR "Unable to execute cvs checkout.\n";
- exit 1;
- }
- if ( system( "cwsadd -a -f $module >> .log 2>&1" ) != 0 ) {
- print STDERR "Unable to execute cwsadd.\n";
- exit 1;
- }
- print "Module:\t\t'$module' added to $cws\n";
- }
- elsif ( $what eq "checkout" ) {
- print "Checking out:\tModule '$module'\n";
- if ( system( "cvs checkout -r cws_" . lc($mws) . "_$cws $module >> .log 2>&1" ) != 0 ) {
- print STDERR "Unable to execute cvs checkout.\n";
- exit 1;
- }
- print "Module:\t\t'$module' already exists in $cws, checked out\n";
- }
- }
-}
-
-sub cws_add_task($)
-{
- my ( $iz ) = @_;
- system( "echo '===== cws_add_task =====' >> .log" );
-
- if ( system( "cwsaddtask i$iz 2>> .log" ) != 0 ) {
- print STDERR "Unable to add task number $iz.\n";
- exit 1;
- }
-
- print "IZ:\t\tSet to '$iz'\n";
-}
-
-sub apply_patches(@)
-{
- my ( @patches ) = @_;
- system( "echo '===== apply_patches =====' >> .log" );
-
- for $patch ( @patches ) {
- print "Patch:\t\tApplying '$patch'\n";
- if ( system( "unset POSIXLY_CORRECT ; patch -p0 -i $patch" ) != 0 ) {
- print STDERR "Unable to apply patch $patch.\n";
- exit 1;
- }
- print "Patch:\t\t'$patch' OK\n";
- }
-}
-
-sub cvs_add_files(@)
-{
- my ( @patches ) = @_;
- system( "echo '===== cvs_add_files =====' >> .log" );
-
- my %modules = ();
-
- for my $patch ( @patches ) {
- if ( ! open( $PATCH, "$patch" ) ) {
- print STDERR "Unable to open patch '$patch'.";
- exit 1;
- }
- my $file_add = "";
- while (<$PATCH>) {
- if ( /^\+\+\+ ([^\s]*)/ ) {
- if ( $1 ne "" ) {
- $file_add = $1;
- }
- }
- elsif ( /^\@\@ -0,0 / && $file_add ne "" ) {
- my @to_add = split( /\//, $file_add );
- my $current = "";
- for my $add ( @to_add ) {
- $current .= "/" unless ( $current eq "" );
- my $where_add_dir = $current;
- $current .= $add;
-
- if ( ( -d "$current" && ! -d "$current/CVS" ) || ( -f "$current" ) ) {
- if ( system( "cd $where_add_dir && cvs add $add" ) != 0 ) {
- print STDERR "Unable to exec 'cd $where_add_dir && cvs add $add'.\n";
- close( $PATCH );
- exit 1;
- }
- }
- }
- }
- }
- close( $PATCH );
- }
-}
-
-sub cws_commit($$$$$@)
-{
- my ( $mws, $cws, $iz, $summary, $milestone, @patches ) = @_;
-
- cws_create( $mws, $cws, $milestone );
- cws_add_modules( $mws, $cws, $milestone, @patches );
- cws_add_task( $iz );
-
- apply_patches( @patches );
-
- cvs_add_files( @patches );
-
- my @cws_modules = cws_modules( $mws, $cws );
- my $commit_message = "#i$iz#\n" . $summary;
- $commit_message =~ s/'/'\\''/g;
-
- my $commit = "cvs commit -m '$commit_message' ";
- for $module ( @cws_modules ) {
- if ( -d $module ) {
- $commit .= $module . " ";
- }
- }
- print "CVS:\t\t$commit\n";
- if ( system( $commit ) != 0 ) {
- print STDERR "cvs commit failed.\n";
- exit 1;
- }
-}
-
-#
-# main()
-#
-if ( !defined( $ENV{'SOLARENV'} ) || $ENV{'SOLARENV'} eq '' ) {
- my $my_path = $0;
- $my_path =~ s#/[^/]*$##;
- my $build_dir = `. $my_path/setup > /dev/null 2>&1 ; echo \$OOBUILDDIR`;
- if ( $build_dir eq "" ) {
- print STDERR "Unable to find build dir, check OOBUILDDIR in bin/setup.\n";
- exit 1;
- }
- $build_dir =~ s/\n//;
- if ( ! -f "$build_dir/LinuxIntelEnv.Set.sh" ) {
- print STDERR "Unable to find '$build_dir/LinuxIntelEnv.Set.sh'.\n";
- exit 1;
- }
- open( $VARS, "bash -c '. $build_dir/LinuxIntelEnv.Set.sh ; set'|");
- while ( <$VARS> ) {
- /([^=]*)=(.*)/ || next;
- $ENV{$1} = $2 unless "$1" eq "SHELLOPTS";
- }
- close( $VARS );
-}
-
-my $iz = "";
-my $cws = "";
-my $milestone = "";
-my $summary = "";
-my @patches = ();
-
-( my $pwd = `pwd` ) =~ s/\n//;
-
-while (@ARGV) {
- $opt = shift @ARGV;
-
- if ( $opt eq "-i" || $opt eq "--iz" ) {
- $iz = shift @ARGV;
- }
- elsif ( $opt eq "-c" || $opt eq "--cws" ) {
- $cws = shift @ARGV;
- }
- elsif ( $opt eq "-d" ) {
- $ENV{'CVSROOT'} = shift @ARGV;
- }
- elsif ( $opt eq "-h" || $opt eq "--help" ) {
- usage();
- }
- elsif ( $opt eq "-s" || $opt eq "--summary" ) {
- $summary = shift @ARGV;
- }
- elsif ( $opt eq "-m" || $opt eq "--milestone" ) {
- $milestone = shift @ARGV;
- if (! ( $milestone =~ m/^m/ ) ) {
- $milestone = "m$milestone";
- print "Re-writing milestone to $milestone\n";
- }
- }
- elsif ( $opt eq "-w" || $opt eq "--mws" ) {
- $mws = shift @ARGV;
- }
- else {
- my $patch = $opt;
- if ( $patch =~ /^[^\/]/ ) {
- $patch = "$pwd/$opt";
- }
-
- if ( -f $patch ) {
- push @patches, $patch;
- }
- else {
- print STDERR "Unable to find patch '$patch'.\n";
- exit 1;
- }
- }
-}
-
-if ( !@patches ) {
- print STDERR "No patches to apply.\n";
- exit 1;
-}
-if ( !defined( $ENV{'CVSROOT'} ) || $ENV{'CVSROOT'} eq "" ) {
- print STDERR "Please specify cvs root (using -d or environment variable).\n";
- exit 1;
-}
-if ( !defined( $iz ) || !( $iz =~ /^i?[0-9]+$/ ) ) {
- print STDERR "Please specify IZ number as XXXX, or iXXXX.\n";
- exit 1;
-}
-if ( !defined( $cws ) || $cws eq "" ) {
- print STDERR "Please specify CWS.\n";
- exit 1;
-}
-if ( !defined( $summary ) || $summary eq "" ) {
- print STDERR "Please provide summary.\n";
- exit 1;
-}
-if ( !defined( $mws ) || $mws eq "" ) {
- $mws = $default_mws;
- print "MWS:\tnot specified, assuming '$mws'\n";
-}
-if ( !defined( $milestone ) || $milestone eq "" ) {
- ( $milestone = `cwsquery -m $mws -c $cws current 2> /dev/null` ) =~ s/\n//;
-}
-if ( $milestone ne "" ) {
- print "Milestone:\t$milestone\n";
-}
-else {
- my $latest = `cwsquery -m $mws latest 2> /dev/null`;
-
- ( $_, $milestone ) = split( ' ', $latest );
- print "Milestone:\tnot specified, assuming '$milestone'\n";
-}
-
-$iz =~ s/^i//;
-$ENV{'CWS_WORK_STAMP'} = $cws;
-$ENV{'WORK_STAMP'} = $mws;
-
-cws_commit( $mws, $cws, $iz, $summary, $milestone, @patches );
-
-( my $tmp = `pwd` ) =~ s/\n//;
-print "
-Finished, please check '$tmp' that everything is as expected.
-Then:
-
-- Set 'http://www.openoffice.org/issues/show_bug.cgi?id=$iz' to FIXED
- with a 'Committed to CWS $cws.' comment.
-- Fill the '$cws' info in 'http://eis.services.openoffice.org'.
-
-It's also usually a good thing to mark the patch in the apply file as
-committed to '$cws'.\n";
diff --git a/bin/cws-cvsrebase b/bin/cws-cvsrebase
deleted file mode 100755
index 7f9d5824b..000000000
--- a/bin/cws-cvsrebase
+++ /dev/null
@@ -1,60 +0,0 @@
-#!/bin/sh
-
-#
-# (c) 2007 Novell Inc.
-# Written by Hubert Figuiere <hfiguiere@novell.com>
-# This script is licensed under the GNU GPL v2 or (at you option)
-# any later version.
-#
-#
-# Rebase you current checkout of upstream to a new milestone
-# and just update anything that do not belong to the MWS.
-#
-# Usage is as follow:
-# $ cd OpenOffice2
-# $ cws-cvsrebase SRC680_mXXX
-# XXX is the milestone. You may want to change the tag if you
-# work on a different branch.
-# Check update.log for any CVS conflict.
-#
-# TODO:
-# -output conflict un a readable form on stdout
-# -use EIS to allow updating the CWS checkout by just
-# passing the CWS name and let the script do the rest.
-# -allow specifying the log file on the command line
-#
-
-if test -z $1 ; then
- echo "Missing MWS anchor paramater"
- echo "Usage"
- echo "$1 CVS_tag"
- echo "with CVS_tag the anchor tag for the milestone"
- exit 255
-fi
-
-logfile=`pwd`/update.log
-touch $logfile
-
-mws=$1
-shift;
-
-for i in *
-do
- if test -d $i ; then
- if test -d $i/CVS ; then
- (cd $i
- match=`cat CVS/Tag | grep ^[TN]SRC680 | wc -l`
- if test x$match = x1 ; then
- echo "$i is to be updated to $mws" >> $logfile
- cvs -qz3 update -r $mws -Pd >> $logfile 2>&1
- else
- echo "$i is cws: simply updated" >> $logfile
- cvs -qz3 update -Pd >> $logfile 2>&1
- fi
- )
- else
- echo "$i is not in CVS" >> $logfile
- fi
- fi
-done
-
diff --git a/bin/cws-extract b/bin/cws-extract
deleted file mode 100755
index 9f6f955fc..000000000
--- a/bin/cws-extract
+++ /dev/null
@@ -1,152 +0,0 @@
-#!/usr/bin/env perl
-
-$default_mws = 'DEV300';
-
-sub usage()
-{
- print STDERR "cws-extract - Create a patch from an up-stream CWS suitable for ooo-build.
-Usage: cws-extract [-s] cws_name
-
- -b Mercurial bundle to use
- -w, --mws Master Workspace name (defaults to $default_mws)
- Use with care! Almost all CWSs are created for $default_mws.
- They are cloned for branches by need.
- -m Milestone to diff against.
-
-Note: We need to create a full master checkout in current dir (or reuse one
-already there).\n";
- exit 1;
-}
-
-sub mws_checkout($$)
-{
- my ( $mws, $bundle ) = @_;
- system( "echo '===== mws_checkout =====' >> .log" );
-
- if ( $bundle eq "" ) {
- $bundle = "/tmp/$mws.hg";
- if ( ! -f $bundle || ( -M $bundle > 14 ) ) {
- if ( system( "mv $bundle $bundle.save" ) == 0 ) {
- print "Bundle:\t\tSaved the old one as '$bundle.save'\n";
- }
- system( "wget http://hg.services.openoffice.org/bundle/$mws.hg -O $bundle" );
- }
- }
-
- if ( system( "mkdir $mws 2>> .log" ) != 0 ) {
- print "Cannot create '$mws' subdir, already exists. Consider using '-d'.\n";
- exit 1;
- }
- if ( system( "cd $mws && \
- hg init && \
- echo -e 'Mercurial:\tUnbundling $bundle, go and have some tea...' && \
- hg unbundle $bundle" ) != 0 ) {
- print STDERR "Unable to setup mws clone, check the log for details.\n";
- exit 1;
- }
-}
-
-sub cws_extract($$$$)
-{
- my ( $mws, $cws, $bundle, $milestone ) = @_;
-
- $milestone =~ s/\n//g;
-
- if (not -d $mws or not -d "$mws/.hg") {
- mws_checkout($mws, $bundle);
- }
-
- print "MWS:\t\tpulling latest changes\n";
- if ( system( "cd $mws && \
- hg pull -u http://hg.services.openoffice.org/$mws" ) != 0 ) {
- print STDERR "Unable to pull latest master, check the log for details.\n";
- exit 1;
- }
-
- print "MWS:\t\tupdating to milestone: $milestone\n";
- if ( system( "cd $mws && \
- hg checkout ${mws}_${milestone}" ) != 0 ) {
- print STDERR "Unable to checkout suitable milestone, check the log for details.\n";
- exit 1;
- }
-
- print "CWS:\t\tgenerating diff against $milestone\n";
- if ( system( "cd $mws && \
- hg incoming --bundle /tmp/$cws.hg http://hg.services.openoffice.org/cws/$cws >> .log && \
- hg diff -R /tmp/$cws.hg -X .hgtags -r .:tip > ../cws-$cws.diff && \
- rm -f /tmp/$cws.hg" ) != 0 ) {
- print STDERR "Unable to generate patch against $cws, check the log for details.\n";
- exit 1;
- }
- system("sed -i 's|^--- a/|--- |g;s|^+++ b/|+++ |g' cws-$cws.diff");
-}
-
-#
-# main()
-#
-if ( !defined( $ENV{'SOLARENV'} ) || $ENV{'SOLARENV'} eq '' ) {
- my $my_path = $0;
- $my_path =~ s#/[^/]*$##; # confuses vim syntax highlighting :-(
- my $build_dir = `. $my_path/setup > /dev/null 2>&1 ; echo \$OOBUILDDIR`;
- if ( $build_dir eq "" ) {
- print STDERR "Unable to find build dir, check OOBUILDDIR in bin/setup.\n";
- exit 1;
- }
- $build_dir =~ s/\n//;
- if ( ! -f "$build_dir/LinuxIntelEnv.Set.sh" and ! -f "$build_dir/LinuxX86-64Env.Set.sh" ) {
- print STDERR "Unable to find '$build_dir/LinuxIntelEnv.Set.sh'.\n";
- exit 1;
- }
- if ( -f "$build_dir/LinuxIntelEnv.Set.sh" ) {
- open( $VARS, "bash -c '. $build_dir/LinuxIntelEnv.Set.sh ; set'|");
- } else {
- open( $VARS, "bash -c '. $build_dir/LinuxX86-64Env.Set.sh ; set'|");
- }
- while ( <$VARS> ) {
- /([^=]*)=(.*)/ || next;
- $ENV{$1} = $2 unless "$1" eq "SHELLOPTS";
- }
- close( $VARS );
-}
-
-my $cws_name = "";
-my $mws = "";
-my $bundle = "";
-my $milestone = "";
-
-( my $pwd = `pwd` ) =~ s/\n//;
-
-while (@ARGV) {
- $opt = shift @ARGV;
-
- if ( $opt eq "-h" || $opt eq "--help" ) {
- usage();
- }
- elsif ( $opt eq "-b" ) {
- $bundle = shift @ARGV;
- }
- elsif ( $opt eq "-w" || $opt eq "--mws" ) {
- $mws = shift @ARGV;
- }
- elsif ( $opt eq "-m" ) {
- $milestone = shift @ARGV;
- }
- else {
- $cws_name = $opt;
- }
-}
-
-if ( !defined( $cws_name ) || $cws_name eq "" ) {
- print STDERR "Please specify CWS.\n";
- exit 1;
-}
-if ( !defined( $milestone ) || $milestone eq "" ) {
- print STDERR "Please specify milestone.\n";
- exit 1;
-}
-if ( !defined( $mws ) || $mws eq "" ) {
- $mws = $default_mws;
- print "MWS:\t\tnot specified, assuming '$mws'\n";
-}
-
-cws_extract( $mws, $cws_name, $bundle, $milestone );
diff --git a/bin/cws-extract-cvs b/bin/cws-extract-cvs
deleted file mode 100755
index 22f569e7e..000000000
--- a/bin/cws-extract-cvs
+++ /dev/null
@@ -1,124 +0,0 @@
-#!/usr/bin/env perl
-
-use File::Temp qw/ tempfile tempdir /;
-
-sub usage() {
- print STDERR "cws-extract [-m] [-d cvs_root] [-w|--mws mws] cws_name [ouput_filename]
-Create a patch from an up-stream CWS suitable for ooo-build.
-
- -d cvs_root Specify the CVS root.
- -m|--modules One file per module.
- -s Don't ignore changes in whitespace.
- -w|--mws mws Specify the Master Workspace name (defaults to SRC680)\n";
- exit 1;
-}
-
-# parameters
-my $cws = shift @ARGV;
-my $cvsroot = "";
-my $onepermodule = "false";
-my $mws = "DEV300";
-my $whitespace = "-b -w";
-if (!defined $cws || $cws eq "-h" || $cws eq "--help") {
- usage();
-}
-while ($cws =~ /^-/) {
- if ($cws eq "-m" || $cws eq "--modules") {
- $onepermodule = "true";
- }
- elsif ($cws eq "-d") {
- $cvsroot = shift @ARGV || usage();
- }
- elsif ($cws eq "-w" || $cws eq "--mws") {
- $mws = shift @ARGV || usage();
- }
- elsif ($cws eq "-s") {
- $whitespace = "";
- }
- else {
- usage();
- }
- $cws = shift @ARGV || usage();
-}
-if (!defined $cws || !defined $mws || !defined $cvsroot) {
- usage();
-}
-
-my $outfile = shift @ARGV;
-
-$ENV{SRC_ROOT} || die "Havn't sourced the environment";
-
-if ( $cvsroot eq "" ) {
- -f "solenv/CVS/Root" || die "Doesn't look like a live OO.o cvs checkout to me";
-
- $cvsroot = `cat solenv/CVS/Root`;
- $cvsroot =~ s/\n/ /g;
- $cvsroot =~ s/\s*$//;
-}
-
-my $ucmws = uc( $mws );
-my $modules = `CWS_WORK_STAMP=$cws cwsquery modules -m $ucmws`;
-$modules =~ s/\r\n//g;
-$modules =~ s/\n/ /g;
-$modules =~ s/ / /g;
-
-my @mod_list = split (/ /, $modules);
-print STDERR "Modules: '" . join (",", @mod_list) . "'\n";
-
-my $tag = "cws_" . lc($mws) . "_$cws";
-my $anchor = uc ("$tag" . "_ANCHOR");
-
-print STDERR "Detecting cws tag:";
-if( @mod_list != 0 )
-{
- my ($line, $found_orig_mws) = $mws;
- my $i = 0;
- do {
- open TAGS, "cvs -d '$cvsroot' log '$mod_list[$i]' 2>/dev/null |";
- $i++;
- } while ( eof ( TAGS ) && $i < scalar (@mod_list) );
- while (<TAGS>)
- {
- if( ($found_orig_mws) = m/cws_(.*)_$cws/ )
- {
- if( $found_orig_mws ne $mws )
- {
- # mws and tag differ - update
- $tag = "cws_" . lc($found_orig_mws) . "_$cws";
- $anchor = uc ("$tag" . "_ANCHOR");
- }
- last;
- }
- }
- close(TAGS);
-}
-print STDERR " $tag\n";
-
-for my $module (@mod_list) {
- my $tmp = tempdir ("$tag.XXXXXX", CLEANUP => 1);
- print STDERR "Using temp directory: $tmp\n";
-
- print STDERR "cvs -d '$cvsroot' checkout -r$tag $module\n";
- system ("cd $tmp && cvs -d '$cvsroot' checkout -r$tag $module\n");
-
- my $finaloutfile;
- if (!defined $outfile) {
- if ($onepermodule eq "true" ) {
- $finaloutfile = "cws-$cws-$module.diff";
- }
- else {
- $finaloutfile = "cws-$cws.diff";
- }
- }
- else {
- if ($onepermodule eq "true" ) {
- $finaloutfile = "$module-$outfile";
- }
- else {
- $finaloutfile = "$outfile";
- }
- }
-
- print STDERR "cvs -d '$cvsroot' diff -kk -upN $whitespace -B -r$anchor -r$tag $module >> $finaloutfile 2>&1\n";
- system ("( cd $tmp && cvs -d '$cvsroot' diff -kk -upN $whitespace -B -r$anchor -r$tag $module ) >> $finaloutfile 2>&1");
-}
diff --git a/bin/cws-extract-svn b/bin/cws-extract-svn
deleted file mode 100755
index b5934d439..000000000
--- a/bin/cws-extract-svn
+++ /dev/null
@@ -1,37 +0,0 @@
-#!/bin/sh
-
-usage() {
- echo 1>&2 << EOF
-cws-extract [-s] cws_name
-Create a patch from an up-stream CWS suitable for ooo-build.
-
- -s Don't ignore changes in whitespace.
-EOF
- exit 1;
-}
-
-echo "Warning! This is the SVN version of cws-extract, use
-cws-extract-cvs for the older OOo CWSes."
-
-CWS=""
-DIFFPARAMS="-ubwp"
-while [ -n "$1" ] ; do
- case "$1" in
- -s) DIFFPARAMS="-up"
- ;;
- *) CWS="$1"
- ;;
- esac
- shift
-done
-
-[ -n "$CWS" ] || usage
-
-# log of the branch only
-MERGE_BASE=`svn log --stop-on-copy "svn+ssh://svn@svn.services.openoffice.org/ooo/cws/$CWS" | \
- grep 'CWS-TOOLING: \(rebase\|create\) CWS' | head -n 1 | \
- sed 's/^.*trunk@\([0-9]\+\).*$/\1/'`
-
-if [ -n "$MERGE_BASE" ] ; then
- svn diff -x "$DIFFPARAMS" "svn+ssh://svn@svn.services.openoffice.org/ooo/trunk@$MERGE_BASE" "svn+ssh://svn@svn.services.openoffice.org/ooo/cws/$CWS" > "cws-$CWS.diff"
-fi
diff --git a/bin/cws-removal-fixup b/bin/cws-removal-fixup
deleted file mode 100755
index 906757836..000000000
--- a/bin/cws-removal-fixup
+++ /dev/null
@@ -1,43 +0,0 @@
-#!/usr/bin/env perl
-
-my $fname = shift;
-my $sources = shift;
-
-if ( !defined( $fname ) || !defined( $sources ) ) {
- print STDERR "cws-removal-fixup cws-<name>.diff sources/srcABC-mXYZ
-Fix cws extract where were some removed files to apply cleanly.\n";
- exit 1;
-}
-
-open( IN, $fname ) or die "Cannot open '$fname'";
-
-my $file = "";
-my ( $RCSfile, $Revision, $Author, $Date );
-while ( <IN> )
-{
- if ( /^--- ([^\t]*)/ ) {
- $file = $1;
- $RCSfile = "";
- $Revision = "";
- $Author = "";
- $Date = "";
- }
- if ( /^-.*\$(RCSfile|Revision|Author|Date)\$/ ) {
- if ( $RCSfile eq "" ) {
- $RCSfile = `grep '\$RCSfile' $sources/$file | head -n 1 | sed 's/^.*\\\$\\\(RCSfile[^\$]*\\\)\\\$.*\$/\\1/'`;
- $Revision = `grep '\$Revision' $sources/$file | head -n 1 | sed 's/^.*\\\$\\\(Revision[^\$]*\\\)\\\$.*\$/\\1/'`;
- $Author = `grep '\$Author' $sources/$file | head -n 1 | sed 's/^.*\\\$\\\(Author[^\$]*\\\)\\\$.*\$/\\1/'`;
- $Date = `grep '\$Date' $sources/$file | head -n 1 | sed 's/^.*\\\$\\\(Date[^\$]*\\\)\\\$.*\$/\\1/'`;
- chomp $RCSfile;
- chomp $Revision;
- chomp $Author;
- chomp $Date;
- }
- s/RCSfile/$RCSfile/g if ( /\$RCSfile\$/ );
- s/Revision/$Revision/g if ( /\$Revision\$/ );
- s/Author/$Author/g if ( /\$Author\$/ );
- s/Date/$Date/g if ( /\$Date\$/ );
- }
- print;
-}
-close( IN );
diff --git a/bin/defuzzpatch b/bin/defuzzpatch
deleted file mode 100755
index cd60f734e..000000000
--- a/bin/defuzzpatch
+++ /dev/null
@@ -1,67 +0,0 @@
-#!/bin/sh
-
-usage()
-{
- echo "Apply the given patch and regenerate it if it has fuzzy hunks"
- echo
- echo "Usage: ${0##*/} [--help] dest_dir file patch_tool_options"
- echo
- echo "Options:"
- echo
- echo " --help - print this help"
- echo " dest_dir - root directory where the patches are applied with -p0"
- echo " file - patch file to apply and update"
- echo " patch_tool_options"
- echo " - options of the patch tool, see man patch"
- echo
- echo "Note: it does not refresh the given patch if the --dry-run or -R option is used"
-}
-
-if test "$1" = "--help" -o $# -lt 3 ; then
- usage
- exit 0;
-fi
-
-dest_dir="$1"
-patch_file="$2"
-shift
-shift
-
-if ! test -d "$dest_dir" ; then
- echo "Error: Directory does not exist: $dest_dir"
- exit 1;
-fi
-
-if ! test -f "$patch_file" ; then
- echo "Error: File does not exist: $patch_file"
- exit 1;
-fi
-
-# look for --dry-run and -R
-refresh_flags="-b -z .old"
-if echo "$*" | grep -q -e "--dry-run" -e '[[:blank:]]-R[[:blank:]]' ; then
- refresh_flags=
-fi
-
-# apply the patch and save the en_US messages
-patch_log=`mktemp /tmp/defuzzypatch-log.XXXXXX`
-LANG=en_US.UTF-8 patch "$@" $refresh_flags <$patch_file 2>&1 | tee $patch_log || exit 1;
-
-if test -n "$refresh_flags" && grep -q "with fuzz" $patch_log ; then
- echo "Regenerating $patch_file..."
- patch_temp=`mktemp /tmp/defuzzpatch-diff.XXXXXX`
- IFS_old="$IFS"
- IFS='
-'
- cd $dest_dir
- for file in `grep "^patching file" $patch_log | sed "s|^patching file ||"` ; do
- diff -puN "${file}".old "$file" >>$patch_temp
- done
- cd - >/dev/null 2>&1
- IFS="$IFS_old"
- mv "$patch_file" "${patch_file}".old
- mv $patch_temp "$patch_file"
- chmod 644 "$patch_file"
-fi
-
-rm $patch_log
diff --git a/bin/deps.sh b/bin/deps.sh
deleted file mode 100755
index f640a3e31..000000000
--- a/bin/deps.sh
+++ /dev/null
@@ -1,31 +0,0 @@
-#!/bin/sh
-
-# Find all dependencies based on prj/build.lst (transitive closure)
-# [not the ideal implementation, but it was fast to write ;-)]
-# The result is in deps.txt
-
-rm -f deps.tmp.[12]
-
-for I in * ; do
- [ -f $I/prj/build.lst ] || continue
- DEPS=`grep -v '^[[:space:]]*#' < $I/prj/build.lst |
- sed -e 's/#.*//' -e 's/[[:space:]]\+/ /g' \
- -e 's/^[^:]*:\+ \?//' -e 's/ \?NULL//' \
- -e 's/\<[^ ]*://g' | head -n1`
- echo $I $DEPS
-done > deps.tmp.2
-
-touch deps.tmp.1
-while [ "`ls -s --block-size=1 deps.tmp.1 | sed 's/ .*//'`" -ne "`ls -s --block-size=1 deps.tmp.2 | sed 's/ .*//'`" ] ; do
- mv deps.tmp.2 deps.tmp.1
- while read MODULE DEP ; do
- echo -n "$MODULE "
- for I in $DEP ; do
- grep "^\<$I\>" deps.tmp.1 || echo $I
- done | tr ' ' '\n' | sort | uniq | tr '\n' ' '
- echo
- done < deps.tmp.1 > deps.tmp.2
-done
-
-sed 's/ \?/: /' < deps.tmp.1 > deps.txt
-rm deps.tmp.[12]
diff --git a/bin/diaconvert.pl b/bin/diaconvert.pl
deleted file mode 100755
index 03f3b1cfd..000000000
--- a/bin/diaconvert.pl
+++ /dev/null
@@ -1,364 +0,0 @@
-#!/usr/bin/env perl
-
-# TODO:
-# glue-point positioning
-# line-width, fill
-
-use XML::Parser;
-
-my $X_OFFSET_MIN = 2;
-my $X_OFFSET_MAX = 8;
-
-my $xoffset = $X_OFFSET_MIN;
-my $yoffset = 2;
-my $scale = 5;
-my $unit = "cm";
-my $prefix = ' ';
-
-sub get_size_attr($$)
-{
- my ($value, $name) = @_;
- return $value->[0]->{$name} / $scale if defined $value->[0]->{$name};
- return 0.0;
-}
-
-sub get_point_attr($$)
-{
- my ($value, $name) = @_;
- my $offset = $yoffset;
- if ($name =~ /x/) {
- $offset = $xoffset;
- }
- return get_size_attr ($value, $name) + $offset;
-}
-
-sub parse_glue
-{
- my $elems = shift;
- my $idx = 1;
- my $glue = '';
- my $count = 1;
-
- while (defined $elems->[$idx]) {
- my $attr = $elems->[$idx];
- my $value = $elems->[$idx+1];
-
- if ($attr eq 'point') {
- my $x = get_point_attr($value, 'x');
- my $y = get_point_attr($value, 'y');
-
- $glue .= $prefix . ' ';
- $glue .= "<draw:glue-point draw:id=\"$count\" ".
- "svg:x=\"$x$unit\" svg:y=\"$y$unit\"/>\n";
- $count++;
- }
-# print "glue elem '" . $elems->[$idx] . "'\n";
- $idx+=2;
- }
-
- return $glue;
-}
-
-sub draw_preamble($@)
-{
- my $name = shift;
- my $style = shift;
- $style = 'def' if (!defined $style);
- return $prefix . ' ' .
- "<draw:$name draw:style-name=\"$style\" " .
- "draw:text-style-name=\"P1\" " .
- "draw:layer=\"layout\"\n" .
- $prefix . ' ' . ' ';
-}
-
-sub transfer_attr($$)
-{
- my ($value, $name) = @_;
- my $attr = get_point_attr ($value, $name);
- return "svg:$name=\"$attr$unit\" ";
-}
-
-sub draw_postamble($)
-{
- my $name = shift;
- return ">\n" . $prefix . ' ' . "<text:p/></draw:$name>\n";
-}
-
-sub draw_line($$)
-{
- my ($attr, $value) = @_;
- return draw_preamble ('line') .
- transfer_attr ($value, 'x1') .
- transfer_attr ($value, 'y1') .
- transfer_attr ($value, 'x2') .
- transfer_attr ($value, 'y2') .
- draw_postamble ('line');
-}
-
-sub draw_rect($$)
-{
- my ($attr, $value) = @_;
- my $svg;
- my $width = get_size_attr ($value, 'width');
- my $height = get_size_attr ($value, 'height');
- return draw_preamble ('rect', 'nofill') .
- transfer_attr ($value, 'x') .
- transfer_attr ($value, 'y') .
- "svg:width=\"$width$unit\" " .
- "svg:height=\"$height$unit\" " .
- draw_postamble ('rect');
-}
-
-sub draw_circle($$)
-{
- my ($attr, $value) = @_;
- my $r = get_size_attr ($value, 'r');
- my $x = get_point_attr ($value, 'cx') - $r;
- my $y = get_point_attr ($value, 'cy') - $r;
- my $size = $r * 2;
-
- return draw_preamble ('circle', 'nofill') .
- "svg:width=\"$size$unit\" " .
- "svg:height=\"$size$unit\" " .
- "svg:x=\"$x$unit\" " .
- "svg:y=\"$y$unit\" " .
- draw_postamble ('circle');
-}
-
-sub adapt_bbox($$$)
-{
- my ($data, $x, $y) = @_;
-
- $data->{minx} = $x if ($x < $data->{minx});
- $data->{miny} = $y if ($y < $data->{miny});
- $data->{maxx} = $x if ($x > $data->{maxx});
- $data->{maxy} = $y if ($y > $data->{maxy});
-}
-
-sub draw_bbox($)
-{
- my $bbox = shift;
- return "svg:x=\"" . $bbox->{minx}/1000 . "$unit\" " .
- "svg:y=\"" . $bbox->{miny}/1000 . "$unit\" " .
- "svg:width=\"" . $bbox->{maxx}/1000 . "$unit\" " .
- "svg:height=\"" . $bbox->{maxy}/1000 . "$unit\" " .
- "svg:viewBox=\"" . $bbox->{minx} . " " . $bbox->{miny} . " " .
- $bbox->{maxx} . " " . $bbox->{maxy} . "\" ";
-}
-
-sub draw_poly($$)
-{
- my ($attr, $value) = @_;
- my $svg = '';
- my $src_points = $value->[0]->{points};
- my $points = '';
- my %rbbox = ( maxx => 0, maxy => 0,
- minx => 10000000, miny => 10000000 );
- my $bbox = \%rbbox;
-
- for my $coord (split / /, $src_points) {
- my ($a, $b) = split /,/, $coord;
- my $x = ($a / $scale + $xoffset) * 1000;
- my $y = ($b / $scale + $yoffset) * 1000;
-
- $points .= "$x,$y ";
- adapt_bbox ($bbox, $x, $y);
- }
-
- my $viewbox =
-
- $attr =~ s/^svg://;
- $svg .= draw_preamble ($attr, 'nofill') .
- draw_bbox ($bbox) .
- "draw:points=\"$points\" " .
- draw_postamble ($attr);
-
- return $svg;
-}
-
-sub adapt_point ($$$)
-{
- my ($bbox, $x, $y) = @_;
-
- $x = ($x / $scale) * 1000 + $xoffset;
- $y = ($y / $scale) * 1000 + $yoffset;
- adapt_bbox ($bbox, $x, $y);
- return "$x $y ";
-}
-
-sub draw_path
-{
- my ($attr, $value) = @_;
- my $svg = '';
- my $path = $value->[0]->{'d'};
-
-# ensure adequate field separation
- $path =~ s/[lL]/ l /g;
- $path =~ s/[mM]/ m /g;
- $path =~ s/[cC]/ c /g;
- $path =~ s/[zZ]/ z /g;
- $path =~ s/-/ -/g;
- $path =~ s/,/ /g;
-
- $path =~ s/^\s*//;
- my @elems = split (/ +/, $path);
-
- my %rbbox = ( maxx => 0, maxy => 0,
- minx => 10000000, miny => 10000000 );
- my $bbox = \%rbbox;
-
- my $data = '';
-
- while (my $elem = shift @elems) {
- if ($elem eq 'm' || $elem eq 'l') {
- $data .= $elem . " " . adapt_point ($bbox, shift (@elems), shift (@elems));
-
- } elsif ($elem eq 's' || $elem eq 'c') {
- $data .= $elem . " ";
- $data .= adapt_point ($bbox, shift (@elems), shift (@elems));
- $data .= adapt_point ($bbox, shift (@elems), shift (@elems));
-
- } elsif ($elem eq 'z') {
- $data .= "z";
- }
- }
-
- $svg .= draw_preamble('path') .
- "svg:d=\"$data\" " .
- draw_bbox($bbox) .
- "/>";
-
-# print STDERR "No svg:path handling\n";
-# path is 'M\s+<x>\s+<y>
-# L<x><y>
-# C<x1y1> <x2y2> <x3y3>
-# 'z'
-# viewbox calculation ?!? - guess it ? expand bits ?
- return $svg;
-}
-
-%draw_handlers = (
- 'svg:rect' => \&draw_rect,
- 'svg:polygon' => \&draw_poly,
- 'svg:polyline' => \&draw_poly,
- 'svg:line' => \&draw_line,
- 'svg:circle' => \&draw_circle,
- 'svg:path' => \&draw_path
-);
-
-sub parse_svg
-{
- my $elems = shift;
- my $idx = 1;
- my $svg = '';
- my $count = 1;
-
- while (defined $elems->[$idx]) {
- my $attr = $elems->[$idx];
- my $value = $elems->[$idx+1];
- my $draw = $draw_handlers{$attr};
-
- if (defined $draw) {
- $svg .= $draw->($attr, $value);
-
- } elsif ($attr ne '0') {
- print STDERR "unknown svg elem '" . $elems->[$idx] . "'\n";
- }
- $idx+=2;
- }
-
- return $svg;
-}
-
-sub parse_shape
-{
- my $elems = shift;
- my $idx = 1;
- my $name = '';
- my $descr = '';
- my $shape = '';
- my $glue = '';
-
- while (defined $elems->[$idx]) {
- my $attr = $elems->[$idx];
- my $value = $elems->[$idx+1];
-
- $name = $value->[2] if ($attr eq 'name');
- $descr = $value->[2] if ($attr eq 'description');
- $glue = parse_glue ($value) if ($attr eq 'connections');
- $draw = parse_svg ($value) if ($attr eq 'svg:svg');
-
-# print "elem '" . $elems->[$idx] . "'\n";
- $idx += 2;
- }
-
- print <<"EOS"
-$prefix<draw:g draw:name="$name">
-$glue
-$draw
-$prefix</draw:g>
-EOS
-;
-}
-
-sub output_header
-{
-print <<'EOS'
-<?xml version="1.0" encoding="UTF-8"?>
-<!DOCTYPE office:document-content PUBLIC "-//OpenOffice.org//DTD OfficeDocument 1.0//EN" "office.dtd">
-<office:document-content xmlns:office="http://openoffice.org/2000/office" xmlns:style="http://openoffice.org/2000/style" xmlns:text="http://openoffice.org/2000/text" xmlns:table="http://openoffice.org/2000/table" xmlns:draw="http://openoffice.org/2000/drawing" xmlns:fo="http://www.w3.org/1999/XSL/Format" xmlns:xlink="http://www.w3.org/1999/xlink" xmlns:number="http://openoffice.org/2000/datastyle" xmlns:presentation="http://openoffice.org/2000/presentation" xmlns:svg="http://www.w3.org/2000/svg" xmlns:chart="http://openoffice.org/2000/chart" xmlns:dr3d="http://openoffice.org/2000/dr3d" xmlns:math="http://www.w3.org/1998/Math/MathML" xmlns:form="http://openoffice.org/2000/form" xmlns:script="http://openoffice.org/2000/script" office:class="drawing" office:version="1.0">
- <office:script/>
- <office:automatic-styles>
- <style:style style:name="dp1" style:family="drawing-page"/>
- <style:style style:name="def" style:family="graphics" style:parent-style-name="standard">
- <style:properties draw:textarea-horizontal-align="center" draw:textarea-vertical-align="middle"/>
- </style:style>
- <style:style style:name="nofill" style:family="graphics" style:parent-style-name="objectwithoutfill">
- <style:properties draw:fill="none" draw:textarea-horizontal-align="center" draw:textarea-vertical-align="middle"/>
- </style:style>
- <style:style style:name="P1" style:family="paragraph">
- <style:properties fo:text-align="center"/>
- </style:style>
- </office:automatic-styles>
- <office:body>
- <draw:page draw:name="page1" draw:style-name="dp1" draw:master-page-name="Default">
-EOS
-;
-}
-
-sub output_footer
-{
-print <<'EOS'
- </draw:page>
- </office:body>
-</office:document-content>
-EOS
-;
-}
-
-sub output_shape($)
-{
- my $file = shift;
-
- my $parser = new XML::Parser (Style => 'Tree');
- my $tree = $parser->parsefile ($file) || die "Faield to parse\n";
-
- $tree->[0] eq 'shape' || die "No shape\n";
- parse_shape ($tree->[1]);
-}
-
-output_header ();
-
-while (my $file = shift @ARGV)
-{
- die "Can't find file \"$file\""
- unless -f $file;
- output_shape ($file);
- $xoffset += 2.0;
- if ($xoffset > $X_OFFSET_MAX) {
- $xoffset = $X_OFFSET_MIN;
- $yoffset += 2.0
- }
-}
-
-output_footer ();
diff --git a/bin/doxygen.cfg b/bin/doxygen.cfg
deleted file mode 100644
index 36be3c27b..000000000
--- a/bin/doxygen.cfg
+++ /dev/null
@@ -1,1314 +0,0 @@
-# Doxyfile 1.5.3
-
-# This file describes the settings to be used by the documentation system
-# doxygen (www.doxygen.org) for a project
-#
-# All text after a hash (#) is considered a comment and will be ignored
-# The format is:
-# TAG = value [value, ...]
-# For lists items can also be appended using:
-# TAG += value [value, ...]
-# Values that contain spaces should be placed between quotes (" ")
-
-#---------------------------------------------------------------------------
-# Project related configuration options
-#---------------------------------------------------------------------------
-
-# This tag specifies the encoding used for all characters in the config file that
-# follow. The default is UTF-8 which is also the encoding used for all text before
-# the first occurrence of this tag. Doxygen uses libiconv (or the iconv built into
-# libc) for the transcoding. See http://www.gnu.org/software/libiconv for the list of
-# possible encodings.
-
-DOXYFILE_ENCODING = UTF-8
-
-# The PROJECT_NAME tag is a single word (or a sequence of words surrounded
-# by quotes) that should identify the project.
-
-PROJECT_NAME = "$(DOXYGEN_PROJECTNAME) ($(DOXYGEN_VERSION)) "
-
-# The PROJECT_NUMBER tag can be used to enter a project or revision number.
-# This could be handy for archiving the generated documentation or
-# if some version control system is used.
-
-PROJECT_NUMBER = 1
-
-# The OUTPUT_DIRECTORY tag is used to specify the (relative or absolute)
-# base path where the generated documentation will be put.
-# If a relative path is entered, it will be relative to the location
-# where doxygen was started. If left blank the current directory will be used.
-
-OUTPUT_DIRECTORY = $(DOXYGEN_OUTPUT)
-
-# If the CREATE_SUBDIRS tag is set to YES, then doxygen will create
-# 4096 sub-directories (in 2 levels) under the output directory of each output
-# format and will distribute the generated files over these directories.
-# Enabling this option can be useful when feeding doxygen a huge amount of
-# source files, where putting all generated files in the same directory would
-# otherwise cause performance problems for the file system.
-
-CREATE_SUBDIRS = NO
-
-# The OUTPUT_LANGUAGE tag is used to specify the language in which all
-# documentation generated by doxygen is written. Doxygen will use this
-# information to generate all constant output in the proper language.
-# The default language is English, other supported languages are:
-# Afrikaans, Arabic, Brazilian, Catalan, Chinese, Chinese-Traditional,
-# Croatian, Czech, Danish, Dutch, Finnish, French, German, Greek, Hungarian,
-# Italian, Japanese, Japanese-en (Japanese with English messages), Korean,
-# Korean-en, Lithuanian, Norwegian, Polish, Portuguese, Romanian, Russian,
-# Serbian, Slovak, Slovene, Spanish, Swedish, and Ukrainian.
-
-OUTPUT_LANGUAGE = English
-
-# If the BRIEF_MEMBER_DESC tag is set to YES (the default) Doxygen will
-# include brief member descriptions after the members that are listed in
-# the file and class documentation (similar to JavaDoc).
-# Set to NO to disable this.
-
-BRIEF_MEMBER_DESC = YES
-
-# If the REPEAT_BRIEF tag is set to YES (the default) Doxygen will prepend
-# the brief description of a member or function before the detailed description.
-# Note: if both HIDE_UNDOC_MEMBERS and BRIEF_MEMBER_DESC are set to NO, the
-# brief descriptions will be completely suppressed.
-
-REPEAT_BRIEF = YES
-
-# This tag implements a quasi-intelligent brief description abbreviator
-# that is used to form the text in various listings. Each string
-# in this list, if found as the leading text of the brief description, will be
-# stripped from the text and the result after processing the whole list, is
-# used as the annotated text. Otherwise, the brief description is used as-is.
-# If left blank, the following values are used ("$name" is automatically
-# replaced with the name of the entity): "The $name class" "The $name widget"
-# "The $name file" "is" "provides" "specifies" "contains"
-# "represents" "a" "an" "the"
-
-ABBREVIATE_BRIEF =
-
-# If the ALWAYS_DETAILED_SEC and REPEAT_BRIEF tags are both set to YES then
-# Doxygen will generate a detailed section even if there is only a brief
-# description.
-
-ALWAYS_DETAILED_SEC = NO
-
-# If the INLINE_INHERITED_MEMB tag is set to YES, doxygen will show all
-# inherited members of a class in the documentation of that class as if those
-# members were ordinary class members. Constructors, destructors and assignment
-# operators of the base classes will not be shown.
-
-INLINE_INHERITED_MEMB = NO
-
-# If the FULL_PATH_NAMES tag is set to YES then Doxygen will prepend the full
-# path before files name in the file list and in the header files. If set
-# to NO the shortest path that makes the file name unique will be used.
-
-FULL_PATH_NAMES = NO
-
-# If the FULL_PATH_NAMES tag is set to YES then the STRIP_FROM_PATH tag
-# can be used to strip a user-defined part of the path. Stripping is
-# only done if one of the specified strings matches the left-hand part of
-# the path. The tag can be used to show relative paths in the file list.
-# If left blank the directory from which doxygen is run is used as the
-# path to strip.
-
-STRIP_FROM_PATH =
-
-# The STRIP_FROM_INC_PATH tag can be used to strip a user-defined part of
-# the path mentioned in the documentation of a class, which tells
-# the reader which header file to include in order to use a class.
-# If left blank only the name of the header file containing the class
-# definition is used. Otherwise one should specify the include paths that
-# are normally passed to the compiler using the -I flag.
-
-STRIP_FROM_INC_PATH =
-
-# If the SHORT_NAMES tag is set to YES, doxygen will generate much shorter
-# (but less readable) file names. This can be useful is your file systems
-# doesn't support long names like on DOS, Mac, or CD-ROM.
-
-SHORT_NAMES = NO
-
-# If the JAVADOC_AUTOBRIEF tag is set to YES then Doxygen
-# will interpret the first line (until the first dot) of a JavaDoc-style
-# comment as the brief description. If set to NO, the JavaDoc
-# comments will behave just like regular Qt-style comments
-# (thus requiring an explicit @brief command for a brief description.)
-
-JAVADOC_AUTOBRIEF = YES
-
-# If the QT_AUTOBRIEF tag is set to YES then Doxygen will
-# interpret the first line (until the first dot) of a Qt-style
-# comment as the brief description. If set to NO, the comments
-# will behave just like regular Qt-style comments (thus requiring
-# an explicit \brief command for a brief description.)
-
-QT_AUTOBRIEF = NO
-
-# The MULTILINE_CPP_IS_BRIEF tag can be set to YES to make Doxygen
-# treat a multi-line C++ special comment block (i.e. a block of //! or ///
-# comments) as a brief description. This used to be the default behaviour.
-# The new default is to treat a multi-line C++ comment block as a detailed
-# description. Set this tag to YES if you prefer the old behaviour instead.
-
-MULTILINE_CPP_IS_BRIEF = NO
-
-# If the DETAILS_AT_TOP tag is set to YES then Doxygen
-# will output the detailed description near the top, like JavaDoc.
-# If set to NO, the detailed description appears after the member
-# documentation.
-
-DETAILS_AT_TOP = NO
-
-# If the INHERIT_DOCS tag is set to YES (the default) then an undocumented
-# member inherits the documentation from any documented member that it
-# re-implements.
-
-INHERIT_DOCS = YES
-
-# If the SEPARATE_MEMBER_PAGES tag is set to YES, then doxygen will produce
-# a new page for each member. If set to NO, the documentation of a member will
-# be part of the file/class/namespace that contains it.
-
-SEPARATE_MEMBER_PAGES = NO
-
-# The TAB_SIZE tag can be used to set the number of spaces in a tab.
-# Doxygen uses this value to replace tabs by spaces in code fragments.
-
-TAB_SIZE = 4
-
-# This tag can be used to specify a number of aliases that acts
-# as commands in the documentation. An alias has the form "name=value".
-# For example adding "sideeffect=\par Side Effects:\n" will allow you to
-# put the command \sideeffect (or @sideeffect) in the documentation, which
-# will result in a user-defined paragraph with heading "Side Effects:".
-# You can put \n's in the value part of an alias to insert newlines.
-
-ALIASES = "tpl=\par Template parameter: \n" "since=\par Available since: \n" "derive=\par Derive: \n" "attention=\par Attention: \n"
-# precond throws see path todo(own section, can enable!)
-
-# Set the OPTIMIZE_OUTPUT_FOR_C tag to YES if your project consists of C
-# sources only. Doxygen will then generate output that is more tailored for C.
-# For instance, some of the names that are used will be different. The list
-# of all members will be omitted, etc.
-
-OPTIMIZE_OUTPUT_FOR_C = NO
-
-# Set the OPTIMIZE_OUTPUT_JAVA tag to YES if your project consists of Java
-# sources only. Doxygen will then generate output that is more tailored for Java.
-# For instance, namespaces will be presented as packages, qualified scopes
-# will look different, etc.
-
-OPTIMIZE_OUTPUT_JAVA = NO
-
-# If you use STL classes (i.e. std::string, std::vector, etc.) but do not want to
-# include (a tag file for) the STL sources as input, then you should
-# set this tag to YES in order to let doxygen match functions declarations and
-# definitions whose arguments contain STL classes (e.g. func(std::string); v.s.
-# func(std::string) {}). This also make the inheritance and collaboration
-# diagrams that involve STL classes more complete and accurate.
-
-BUILTIN_STL_SUPPORT = NO
-
-# If you use Microsoft's C++/CLI language, you should set this option to YES to
-# enable parsing support.
-
-CPP_CLI_SUPPORT = NO
-
-# If member grouping is used in the documentation and the DISTRIBUTE_GROUP_DOC
-# tag is set to YES, then doxygen will reuse the documentation of the first
-# member in the group (if any) for the other members of the group. By default
-# all members of a group must be documented explicitly.
-
-DISTRIBUTE_GROUP_DOC = NO
-
-# Set the SUBGROUPING tag to YES (the default) to allow class member groups of
-# the same type (for instance a group of public functions) to be put as a
-# subgroup of that type (e.g. under the Public Functions section). Set it to
-# NO to prevent subgrouping. Alternatively, this can be done per class using
-# the \nosubgrouping command.
-
-SUBGROUPING = YES
-
-#---------------------------------------------------------------------------
-# Build related configuration options
-#---------------------------------------------------------------------------
-
-# If the EXTRACT_ALL tag is set to YES doxygen will assume all entities in
-# documentation are documented, even if no documentation was available.
-# Private class members and static file members will be hidden unless
-# the EXTRACT_PRIVATE and EXTRACT_STATIC tags are set to YES
-
-EXTRACT_ALL = YES
-
-# If the EXTRACT_PRIVATE tag is set to YES all private members of a class
-# will be included in the documentation.
-
-EXTRACT_PRIVATE = YES
-
-# If the EXTRACT_STATIC tag is set to YES all static members of a file
-# will be included in the documentation.
-
-EXTRACT_STATIC = YES
-
-# If the EXTRACT_LOCAL_CLASSES tag is set to YES classes (and structs)
-# defined locally in source files will be included in the documentation.
-# If set to NO only classes defined in header files are included.
-
-EXTRACT_LOCAL_CLASSES = YES
-
-# This flag is only useful for Objective-C code. When set to YES local
-# methods, which are defined in the implementation section but not in
-# the interface are included in the documentation.
-# If set to NO (the default) only methods in the interface are included.
-
-EXTRACT_LOCAL_METHODS = NO
-
-# If this flag is set to YES, the members of anonymous namespaces will be extracted
-# and appear in the documentation as a namespace called 'anonymous_namespace{file}',
-# where file will be replaced with the base name of the file that contains the anonymous
-# namespace. By default anonymous namespace are hidden.
-
-EXTRACT_ANON_NSPACES = NO
-
-# If the HIDE_UNDOC_MEMBERS tag is set to YES, Doxygen will hide all
-# undocumented members of documented classes, files or namespaces.
-# If set to NO (the default) these members will be included in the
-# various overviews, but no documentation section is generated.
-# This option has no effect if EXTRACT_ALL is enabled.
-
-HIDE_UNDOC_MEMBERS = NO
-
-# If the HIDE_UNDOC_CLASSES tag is set to YES, Doxygen will hide all
-# undocumented classes that are normally visible in the class hierarchy.
-# If set to NO (the default) these classes will be included in the various
-# overviews. This option has no effect if EXTRACT_ALL is enabled.
-
-HIDE_UNDOC_CLASSES = NO
-
-# If the HIDE_FRIEND_COMPOUNDS tag is set to YES, Doxygen will hide all
-# friend (class|struct|union) declarations.
-# If set to NO (the default) these declarations will be included in the
-# documentation.
-
-HIDE_FRIEND_COMPOUNDS = NO
-
-# If the HIDE_IN_BODY_DOCS tag is set to YES, Doxygen will hide any
-# documentation blocks found inside the body of a function.
-# If set to NO (the default) these blocks will be appended to the
-# function's detailed documentation block.
-
-HIDE_IN_BODY_DOCS = NO
-
-# The INTERNAL_DOCS tag determines if documentation
-# that is typed after a \internal command is included. If the tag is set
-# to NO (the default) then the documentation will be excluded.
-# Set it to YES to include the internal documentation.
-
-INTERNAL_DOCS = NO
-
-# If the CASE_SENSE_NAMES tag is set to NO then Doxygen will only generate
-# file names in lower-case letters. If set to YES upper-case letters are also
-# allowed. This is useful if you have classes or files whose names only differ
-# in case and if your file system supports case sensitive file names. Windows
-# and Mac users are advised to set this option to NO.
-
-CASE_SENSE_NAMES = YES
-
-# If the HIDE_SCOPE_NAMES tag is set to NO (the default) then Doxygen
-# will show members with their full class and namespace scopes in the
-# documentation. If set to YES the scope will be hidden.
-
-HIDE_SCOPE_NAMES = NO
-
-# If the SHOW_INCLUDE_FILES tag is set to YES (the default) then Doxygen
-# will put a list of the files that are included by a file in the documentation
-# of that file.
-
-SHOW_INCLUDE_FILES = YES
-
-# If the INLINE_INFO tag is set to YES (the default) then a tag [inline]
-# is inserted in the documentation for inline members.
-
-INLINE_INFO = YES
-
-# If the SORT_MEMBER_DOCS tag is set to YES (the default) then doxygen
-# will sort the (detailed) documentation of file and class members
-# alphabetically by member name. If set to NO the members will appear in
-# declaration order.
-
-SORT_MEMBER_DOCS = YES
-
-# If the SORT_BRIEF_DOCS tag is set to YES then doxygen will sort the
-# brief documentation of file, namespace and class members alphabetically
-# by member name. If set to NO (the default) the members will appear in
-# declaration order.
-
-SORT_BRIEF_DOCS = NO
-
-# If the SORT_BY_SCOPE_NAME tag is set to YES, the class list will be
-# sorted by fully-qualified names, including namespaces. If set to
-# NO (the default), the class list will be sorted only by class name,
-# not including the namespace part.
-# Note: This option is not very useful if HIDE_SCOPE_NAMES is set to YES.
-# Note: This option applies only to the class list, not to the
-# alphabetical list.
-
-SORT_BY_SCOPE_NAME = NO
-
-# The GENERATE_TODOLIST tag can be used to enable (YES) or
-# disable (NO) the todo list. This list is created by putting \todo
-# commands in the documentation.
-
-GENERATE_TODOLIST = NO
-
-# The GENERATE_TESTLIST tag can be used to enable (YES) or
-# disable (NO) the test list. This list is created by putting \test
-# commands in the documentation.
-
-GENERATE_TESTLIST = NO
-
-# The GENERATE_BUGLIST tag can be used to enable (YES) or
-# disable (NO) the bug list. This list is created by putting \bug
-# commands in the documentation.
-
-GENERATE_BUGLIST = NO
-
-# The GENERATE_DEPRECATEDLIST tag can be used to enable (YES) or
-# disable (NO) the deprecated list. This list is created by putting
-# \deprecated commands in the documentation.
-
-GENERATE_DEPRECATEDLIST= YES
-
-# The ENABLED_SECTIONS tag can be used to enable conditional
-# documentation sections, marked by \if sectionname ... \endif.
-
-ENABLED_SECTIONS =
-
-# The MAX_INITIALIZER_LINES tag determines the maximum number of lines
-# the initial value of a variable or define consists of for it to appear in
-# the documentation. If the initializer consists of more lines than specified
-# here it will be hidden. Use a value of 0 to hide initializers completely.
-# The appearance of the initializer of individual variables and defines in the
-# documentation can be controlled using \showinitializer or \hideinitializer
-# command in the documentation regardless of this setting.
-
-MAX_INITIALIZER_LINES = 30
-
-# Set the SHOW_USED_FILES tag to NO to disable the list of files generated
-# at the bottom of the documentation of classes and structs. If set to YES the
-# list will mention the files that were used to generate the documentation.
-
-SHOW_USED_FILES = YES
-
-# If the sources in your project are distributed over multiple directories
-# then setting the SHOW_DIRECTORIES tag to YES will show the directory hierarchy
-# in the documentation. The default is NO.
-
-SHOW_DIRECTORIES = NO
-
-# The FILE_VERSION_FILTER tag can be used to specify a program or script that
-# doxygen should invoke to get the current version for each file (typically from the
-# version control system). Doxygen will invoke the program by executing (via
-# popen()) the command <command> <input-file>, where <command> is the value of
-# the FILE_VERSION_FILTER tag, and <input-file> is the name of an input file
-# provided by doxygen. Whatever the program writes to standard output
-# is used as the file version. See the manual for examples.
-
-FILE_VERSION_FILTER =
-
-#---------------------------------------------------------------------------
-# configuration options related to warning and progress messages
-#---------------------------------------------------------------------------
-
-# The QUIET tag can be used to turn on/off the messages that are generated
-# by doxygen. Possible values are YES and NO. If left blank NO is used.
-
-QUIET = NO
-
-# The WARNINGS tag can be used to turn on/off the warning messages that are
-# generated by doxygen. Possible values are YES and NO. If left blank
-# NO is used.
-
-WARNINGS = YES
-
-# If WARN_IF_UNDOCUMENTED is set to YES, then doxygen will generate warnings
-# for undocumented members. If EXTRACT_ALL is set to YES then this flag will
-# automatically be disabled.
-
-WARN_IF_UNDOCUMENTED = NO
-
-# If WARN_IF_DOC_ERROR is set to YES, doxygen will generate warnings for
-# potential errors in the documentation, such as not documenting some
-# parameters in a documented function, or documenting parameters that
-# don't exist or using markup commands wrongly.
-
-WARN_IF_DOC_ERROR = YES
-
-# This WARN_NO_PARAMDOC option can be abled to get warnings for
-# functions that are documented, but have no documentation for their parameters
-# or return value. If set to NO (the default) doxygen will only warn about
-# wrong or incomplete parameter documentation, but not about the absence of
-# documentation.
-
-WARN_NO_PARAMDOC = NO
-
-# The WARN_FORMAT tag determines the format of the warning messages that
-# doxygen can produce. The string should contain the $file, $line, and $text
-# tags, which will be replaced by the file and line number from which the
-# warning originated and the warning text. Optionally the format may contain
-# $version, which will be replaced by the version of the file (if it could
-# be obtained via FILE_VERSION_FILTER)
-
-WARN_FORMAT =
-
-# The WARN_LOGFILE tag can be used to specify a file to which warning
-# and error messages should be written. If left blank the output is written
-# to stderr.
-
-WARN_LOGFILE =
-
-#---------------------------------------------------------------------------
-# configuration options related to the input files
-#---------------------------------------------------------------------------
-
-# The INPUT tag can be used to specify the files and/or directories that contain
-# documented source files. You may enter file names like "myfile.cpp" or
-# directories like "/usr/src/myproject". Separate the files or directories
-# with spaces.
-
-INPUT = $(DOXYGEN_INPUT)
-
-# This tag can be used to specify the character encoding of the source files that
-# doxygen parses. Internally doxygen uses the UTF-8 encoding, which is also the default
-# input encoding. Doxygen uses libiconv (or the iconv built into libc) for the transcoding.
-# See http://www.gnu.org/software/libiconv for the list of possible encodings.
-
-INPUT_ENCODING = UTF-8
-
-# If the value of the INPUT tag contains directories, you can use the
-# FILE_PATTERNS tag to specify one or more wildcard pattern (like *.cpp
-# and *.h) to filter out the source-files in the directories. If left
-# blank the following patterns are tested:
-# *.c *.cc *.cxx *.cpp *.c++ *.java *.ii *.ixx *.ipp *.i++ *.inl *.h *.hh *.hxx
-# *.hpp *.h++ *.idl *.odl *.cs *.php *.php3 *.inc *.m *.mm *.py
-
-FILE_PATTERNS =
-
-# The RECURSIVE tag can be used to turn specify whether or not subdirectories
-# should be searched for input files as well. Possible values are YES and NO.
-# If left blank NO is used.
-
-RECURSIVE = YES
-
-# The EXCLUDE tag can be used to specify files and/or directories that should
-# excluded from the INPUT source files. This way you can easily exclude a
-# subdirectory from a directory tree whose root is specified with the INPUT tag.
-
-EXCLUDE =
-
-# The EXCLUDE_SYMLINKS tag can be used select whether or not files or
-# directories that are symbolic links (a Unix filesystem feature) are excluded
-# from the input.
-
-EXCLUDE_SYMLINKS = NO
-
-# If the value of the INPUT tag contains directories, you can use the
-# EXCLUDE_PATTERNS tag to specify one or more wildcard patterns to exclude
-# certain files from those directories. Note that the wildcards are matched
-# against the file with absolute path, so to exclude all test directories
-# for example use the pattern */test/*
-
-EXCLUDE_PATTERNS =
-
-# The EXCLUDE_SYMBOLS tag can be used to specify one or more symbol names
-# (namespaces, classes, functions, etc.) that should be excluded from the output.
-# The symbol name can be a fully qualified name, a word, or if the wildcard * is used,
-# a substring. Examples: ANamespace, AClass, AClass::ANamespace, ANamespace::*Test
-
-EXCLUDE_SYMBOLS =
-
-# The EXAMPLE_PATH tag can be used to specify one or more files or
-# directories that contain example code fragments that are included (see
-# the \include command).
-
-EXAMPLE_PATH =
-
-# If the value of the EXAMPLE_PATH tag contains directories, you can use the
-# EXAMPLE_PATTERNS tag to specify one or more wildcard pattern (like *.cpp
-# and *.h) to filter out the source-files in the directories. If left
-# blank all files are included.
-
-EXAMPLE_PATTERNS =
-
-# If the EXAMPLE_RECURSIVE tag is set to YES then subdirectories will be
-# searched for input files to be used with the \include or \dontinclude
-# commands irrespective of the value of the RECURSIVE tag.
-# Possible values are YES and NO. If left blank NO is used.
-
-EXAMPLE_RECURSIVE = NO
-
-# The IMAGE_PATH tag can be used to specify one or more files or
-# directories that contain image that are included in the documentation (see
-# the \image command).
-
-IMAGE_PATH =
-
-# The INPUT_FILTER tag can be used to specify a program that doxygen should
-# invoke to filter for each input file. Doxygen will invoke the filter program
-# by executing (via popen()) the command <filter> <input-file>, where <filter>
-# is the value of the INPUT_FILTER tag, and <input-file> is the name of an
-# input file. Doxygen will then use the output that the filter program writes
-# to standard output. If FILTER_PATTERNS is specified, this tag will be
-# ignored.
-
-INPUT_FILTER =
-
-# The FILTER_PATTERNS tag can be used to specify filters on a per file pattern
-# basis. Doxygen will compare the file name with each pattern and apply the
-# filter if there is a match. The filters are a list of the form:
-# pattern=filter (like *.cpp=my_cpp_filter). See INPUT_FILTER for further
-# info on how filters are used. If FILTER_PATTERNS is empty, INPUT_FILTER
-# is applied to all files.
-
-FILTER_PATTERNS =
-
-# If the FILTER_SOURCE_FILES tag is set to YES, the input filter (if set using
-# INPUT_FILTER) will be used to filter the input files when producing source
-# files to browse (i.e. when SOURCE_BROWSER is set to YES).
-
-FILTER_SOURCE_FILES = NO
-
-#---------------------------------------------------------------------------
-# configuration options related to source browsing
-#---------------------------------------------------------------------------
-
-# If the SOURCE_BROWSER tag is set to YES then a list of source files will
-# be generated. Documented entities will be cross-referenced with these sources.
-# Note: To get rid of all source code in the generated output, make sure also
-# VERBATIM_HEADERS is set to NO. If you have enabled CALL_GRAPH or CALLER_GRAPH
-# then you must also enable this option. If you don't then doxygen will produce
-# a warning and turn it on anyway
-
-SOURCE_BROWSER = YES
-
-# Setting the INLINE_SOURCES tag to YES will include the body
-# of functions and classes directly in the documentation.
-
-INLINE_SOURCES = NO
-
-# Setting the STRIP_CODE_COMMENTS tag to YES (the default) will instruct
-# doxygen to hide any special comment blocks from generated source code
-# fragments. Normal C and C++ comments will always remain visible.
-
-STRIP_CODE_COMMENTS = YES
-
-# If the REFERENCED_BY_RELATION tag is set to YES (the default)
-# then for each documented function all documented
-# functions referencing it will be listed.
-
-REFERENCED_BY_RELATION = YES
-
-# If the REFERENCES_RELATION tag is set to YES (the default)
-# then for each documented function all documented entities
-# called/used by that function will be listed.
-
-REFERENCES_RELATION = YES
-
-# If the REFERENCES_LINK_SOURCE tag is set to YES (the default)
-# and SOURCE_BROWSER tag is set to YES, then the hyperlinks from
-# functions in REFERENCES_RELATION and REFERENCED_BY_RELATION lists will
-# link to the source code. Otherwise they will link to the documentstion.
-
-REFERENCES_LINK_SOURCE = YES
-
-# If the USE_HTAGS tag is set to YES then the references to source code
-# will point to the HTML generated by the htags(1) tool instead of doxygen
-# built-in source browser. The htags tool is part of GNU's global source
-# tagging system (see http://www.gnu.org/software/global/global.html). You
-# will need version 4.8.6 or higher.
-
-USE_HTAGS = NO
-
-# If the VERBATIM_HEADERS tag is set to YES (the default) then Doxygen
-# will generate a verbatim copy of the header file for each class for
-# which an include is specified. Set to NO to disable this.
-
-VERBATIM_HEADERS = YES
-
-#---------------------------------------------------------------------------
-# configuration options related to the alphabetical class index
-#---------------------------------------------------------------------------
-
-# If the ALPHABETICAL_INDEX tag is set to YES, an alphabetical index
-# of all compounds will be generated. Enable this if the project
-# contains a lot of classes, structs, unions or interfaces.
-
-ALPHABETICAL_INDEX = YES
-
-# If the alphabetical index is enabled (see ALPHABETICAL_INDEX) then
-# the COLS_IN_ALPHA_INDEX tag can be used to specify the number of columns
-# in which this list will be split (can be a number in the range [1..20])
-
-COLS_IN_ALPHA_INDEX = 5
-
-# In case all classes in a project start with a common prefix, all
-# classes will be put under the same header in the alphabetical index.
-# The IGNORE_PREFIX tag can be used to specify one or more prefixes that
-# should be ignored while generating the index headers.
-
-IGNORE_PREFIX =
-
-#---------------------------------------------------------------------------
-# configuration options related to the HTML output
-#---------------------------------------------------------------------------
-
-# If the GENERATE_HTML tag is set to YES (the default) Doxygen will
-# generate HTML output.
-
-GENERATE_HTML = YES
-
-# The HTML_OUTPUT tag is used to specify where the HTML docs will be put.
-# If a relative path is entered the value of OUTPUT_DIRECTORY will be
-# put in front of it. If left blank `html' will be used as the default path.
-
-HTML_OUTPUT =
-
-# The HTML_FILE_EXTENSION tag can be used to specify the file extension for
-# each generated HTML page (for example: .htm,.php,.asp). If it is left blank
-# doxygen will generate files with .html extension.
-
-HTML_FILE_EXTENSION = .html
-
-# The HTML_HEADER tag can be used to specify a personal HTML header for
-# each generated HTML page. If it is left blank doxygen will generate a
-# standard header.
-
-HTML_HEADER =
-
-# The HTML_FOOTER tag can be used to specify a personal HTML footer for
-# each generated HTML page. If it is left blank doxygen will generate a
-# standard footer.
-
-HTML_FOOTER =
-
-# The HTML_STYLESHEET tag can be used to specify a user-defined cascading
-# style sheet that is used by each HTML page. It can be used to
-# fine-tune the look of the HTML output. If the tag is left blank doxygen
-# will generate a default style sheet. Note that doxygen will try to copy
-# the style sheet file to the HTML output directory, so don't put your own
-# stylesheet in the HTML output directory as well, or it will be erased!
-
-HTML_STYLESHEET =
-
-# If the HTML_ALIGN_MEMBERS tag is set to YES, the members of classes,
-# files or namespaces will be aligned in HTML using tables. If set to
-# NO a bullet list will be used.
-
-HTML_ALIGN_MEMBERS = YES
-
-# If the GENERATE_HTMLHELP tag is set to YES, additional index files
-# will be generated that can be used as input for tools like the
-# Microsoft HTML help workshop to generate a compressed HTML help file (.chm)
-# of the generated HTML documentation.
-
-GENERATE_HTMLHELP = NO
-
-# If the HTML_DYNAMIC_SECTIONS tag is set to YES then the generated HTML
-# documentation will contain sections that can be hidden and shown after the
-# page has loaded. For this to work a browser that supports
-# JavaScript and DHTML is required (for instance Mozilla 1.0+, Firefox
-# Netscape 6.0+, Internet explorer 5.0+, Konqueror, or Safari).
-
-HTML_DYNAMIC_SECTIONS = NO
-
-# If the GENERATE_HTMLHELP tag is set to YES, the CHM_FILE tag can
-# be used to specify the file name of the resulting .chm file. You
-# can add a path in front of the file if the result should not be
-# written to the html output directory.
-
-CHM_FILE =
-
-# If the GENERATE_HTMLHELP tag is set to YES, the HHC_LOCATION tag can
-# be used to specify the location (absolute path including file name) of
-# the HTML help compiler (hhc.exe). If non-empty doxygen will try to run
-# the HTML help compiler on the generated index.hhp.
-
-HHC_LOCATION =
-
-# If the GENERATE_HTMLHELP tag is set to YES, the GENERATE_CHI flag
-# controls if a separate .chi index file is generated (YES) or that
-# it should be included in the master .chm file (NO).
-
-GENERATE_CHI = NO
-
-# If the GENERATE_HTMLHELP tag is set to YES, the BINARY_TOC flag
-# controls whether a binary table of contents is generated (YES) or a
-# normal table of contents (NO) in the .chm file.
-
-BINARY_TOC = NO
-
-# The TOC_EXPAND flag can be set to YES to add extra items for group members
-# to the contents of the HTML help documentation and to the tree view.
-
-TOC_EXPAND = NO
-
-# The DISABLE_INDEX tag can be used to turn on/off the condensed index at
-# top of each HTML page. The value NO (the default) enables the index and
-# the value YES disables it.
-
-DISABLE_INDEX = NO
-
-# This tag can be used to set the number of enum values (range [1..20])
-# that doxygen will group on one line in the generated HTML documentation.
-
-ENUM_VALUES_PER_LINE = 1
-
-# If the GENERATE_TREEVIEW tag is set to YES, a side panel will be
-# generated containing a tree-like index structure (just like the one that
-# is generated for HTML Help). For this to work a browser that supports
-# JavaScript, DHTML, CSS and frames is required (for instance Mozilla 1.0+,
-# Netscape 6.0+, Internet explorer 5.0+, or Konqueror). Windows users are
-# probably better off using the HTML help feature.
-
-GENERATE_TREEVIEW = NO
-
-# If the treeview is enabled (see GENERATE_TREEVIEW) then this tag can be
-# used to set the initial width (in pixels) of the frame in which the tree
-# is shown.
-
-TREEVIEW_WIDTH = 250
-
-#---------------------------------------------------------------------------
-# configuration options related to the LaTeX output
-#---------------------------------------------------------------------------
-
-# If the GENERATE_LATEX tag is set to YES (the default) Doxygen will
-# generate Latex output.
-
-GENERATE_LATEX = NO
-
-# The LATEX_OUTPUT tag is used to specify where the LaTeX docs will be put.
-# If a relative path is entered the value of OUTPUT_DIRECTORY will be
-# put in front of it. If left blank `latex' will be used as the default path.
-
-LATEX_OUTPUT =
-
-# The LATEX_CMD_NAME tag can be used to specify the LaTeX command name to be
-# invoked. If left blank `latex' will be used as the default command name.
-
-LATEX_CMD_NAME = latex
-
-# The MAKEINDEX_CMD_NAME tag can be used to specify the command name to
-# generate index for LaTeX. If left blank `makeindex' will be used as the
-# default command name.
-
-MAKEINDEX_CMD_NAME = makeindex
-
-# If the COMPACT_LATEX tag is set to YES Doxygen generates more compact
-# LaTeX documents. This may be useful for small projects and may help to
-# save some trees in general.
-
-COMPACT_LATEX = NO
-
-# The PAPER_TYPE tag can be used to set the paper type that is used
-# by the printer. Possible values are: a4, a4wide, letter, legal and
-# executive. If left blank a4wide will be used.
-
-PAPER_TYPE = a4wide
-
-# The EXTRA_PACKAGES tag can be to specify one or more names of LaTeX
-# packages that should be included in the LaTeX output.
-
-EXTRA_PACKAGES =
-
-# The LATEX_HEADER tag can be used to specify a personal LaTeX header for
-# the generated latex document. The header should contain everything until
-# the first chapter. If it is left blank doxygen will generate a
-# standard header. Notice: only use this tag if you know what you are doing!
-
-LATEX_HEADER =
-
-# If the PDF_HYPERLINKS tag is set to YES, the LaTeX that is generated
-# is prepared for conversion to pdf (using ps2pdf). The pdf file will
-# contain links (just like the HTML output) instead of page references
-# This makes the output suitable for online browsing using a pdf viewer.
-
-PDF_HYPERLINKS = NO
-
-# If the USE_PDFLATEX tag is set to YES, pdflatex will be used instead of
-# plain latex in the generated Makefile. Set this option to YES to get a
-# higher quality PDF documentation.
-
-USE_PDFLATEX = NO
-
-# If the LATEX_BATCHMODE tag is set to YES, doxygen will add the \\batchmode.
-# command to the generated LaTeX files. This will instruct LaTeX to keep
-# running if errors occur, instead of asking the user for help.
-# This option is also used when generating formulas in HTML.
-
-LATEX_BATCHMODE = NO
-
-# If LATEX_HIDE_INDICES is set to YES then doxygen will not
-# include the index chapters (such as File Index, Compound Index, etc.)
-# in the output.
-
-LATEX_HIDE_INDICES = NO
-
-#---------------------------------------------------------------------------
-# configuration options related to the RTF output
-#---------------------------------------------------------------------------
-
-# If the GENERATE_RTF tag is set to YES Doxygen will generate RTF output
-# The RTF output is optimized for Word 97 and may not look very pretty with
-# other RTF readers or editors.
-
-GENERATE_RTF = NO
-
-# The RTF_OUTPUT tag is used to specify where the RTF docs will be put.
-# If a relative path is entered the value of OUTPUT_DIRECTORY will be
-# put in front of it. If left blank `rtf' will be used as the default path.
-
-RTF_OUTPUT =
-
-# If the COMPACT_RTF tag is set to YES Doxygen generates more compact
-# RTF documents. This may be useful for small projects and may help to
-# save some trees in general.
-
-COMPACT_RTF = NO
-
-# If the RTF_HYPERLINKS tag is set to YES, the RTF that is generated
-# will contain hyperlink fields. The RTF file will
-# contain links (just like the HTML output) instead of page references.
-# This makes the output suitable for online browsing using WORD or other
-# programs which support those fields.
-# Note: wordpad (write) and others do not support links.
-
-RTF_HYPERLINKS = NO
-
-# Load stylesheet definitions from file. Syntax is similar to doxygen's
-# config file, i.e. a series of assignments. You only have to provide
-# replacements, missing definitions are set to their default value.
-
-RTF_STYLESHEET_FILE =
-
-# Set optional variables used in the generation of an rtf document.
-# Syntax is similar to doxygen's config file.
-
-RTF_EXTENSIONS_FILE =
-
-#---------------------------------------------------------------------------
-# configuration options related to the man page output
-#---------------------------------------------------------------------------
-
-# If the GENERATE_MAN tag is set to YES (the default) Doxygen will
-# generate man pages
-
-GENERATE_MAN = NO
-
-# The MAN_OUTPUT tag is used to specify where the man pages will be put.
-# If a relative path is entered the value of OUTPUT_DIRECTORY will be
-# put in front of it. If left blank `man' will be used as the default path.
-
-MAN_OUTPUT =
-
-# The MAN_EXTENSION tag determines the extension that is added to
-# the generated man pages (default is the subroutine's section .3)
-
-MAN_EXTENSION =
-
-# If the MAN_LINKS tag is set to YES and Doxygen generates man output,
-# then it will generate one additional man file for each entity
-# documented in the real man page(s). These additional files
-# only source the real man page, but without them the man command
-# would be unable to find the correct page. The default is NO.
-
-MAN_LINKS = NO
-
-#---------------------------------------------------------------------------
-# configuration options related to the XML output
-#---------------------------------------------------------------------------
-
-# If the GENERATE_XML tag is set to YES Doxygen will
-# generate an XML file that captures the structure of
-# the code including all documentation.
-
-GENERATE_XML = NO
-
-# The XML_OUTPUT tag is used to specify where the XML pages will be put.
-# If a relative path is entered the value of OUTPUT_DIRECTORY will be
-# put in front of it. If left blank `xml' will be used as the default path.
-
-XML_OUTPUT = xml
-
-# The XML_SCHEMA tag can be used to specify an XML schema,
-# which can be used by a validating XML parser to check the
-# syntax of the XML files.
-
-XML_SCHEMA =
-
-# The XML_DTD tag can be used to specify an XML DTD,
-# which can be used by a validating XML parser to check the
-# syntax of the XML files.
-
-XML_DTD =
-
-# If the XML_PROGRAMLISTING tag is set to YES Doxygen will
-# dump the program listings (including syntax highlighting
-# and cross-referencing information) to the XML output. Note that
-# enabling this will significantly increase the size of the XML output.
-
-XML_PROGRAMLISTING = YES
-
-#---------------------------------------------------------------------------
-# configuration options for the AutoGen Definitions output
-#---------------------------------------------------------------------------
-
-# If the GENERATE_AUTOGEN_DEF tag is set to YES Doxygen will
-# generate an AutoGen Definitions (see autogen.sf.net) file
-# that captures the structure of the code including all
-# documentation. Note that this feature is still experimental
-# and incomplete at the moment.
-
-GENERATE_AUTOGEN_DEF = NO
-
-#---------------------------------------------------------------------------
-# configuration options related to the Perl module output
-#---------------------------------------------------------------------------
-
-# If the GENERATE_PERLMOD tag is set to YES Doxygen will
-# generate a Perl module file that captures the structure of
-# the code including all documentation. Note that this
-# feature is still experimental and incomplete at the
-# moment.
-
-GENERATE_PERLMOD = NO
-
-# If the PERLMOD_LATEX tag is set to YES Doxygen will generate
-# the necessary Makefile rules, Perl scripts and LaTeX code to be able
-# to generate PDF and DVI output from the Perl module output.
-
-PERLMOD_LATEX = NO
-
-# If the PERLMOD_PRETTY tag is set to YES the Perl module output will be
-# nicely formatted so it can be parsed by a human reader. This is useful
-# if you want to understand what is going on. On the other hand, if this
-# tag is set to NO the size of the Perl module output will be much smaller
-# and Perl will parse it just the same.
-
-PERLMOD_PRETTY = YES
-
-# The names of the make variables in the generated doxyrules.make file
-# are prefixed with the string contained in PERLMOD_MAKEVAR_PREFIX.
-# This is useful so different doxyrules.make files included by the same
-# Makefile don't overwrite each other's variables.
-
-PERLMOD_MAKEVAR_PREFIX =
-
-#---------------------------------------------------------------------------
-# Configuration options related to the preprocessor
-#---------------------------------------------------------------------------
-
-# If the ENABLE_PREPROCESSING tag is set to YES (the default) Doxygen will
-# evaluate all C-preprocessor directives found in the sources and include
-# files.
-
-ENABLE_PREPROCESSING = YES
-
-# If the MACRO_EXPANSION tag is set to YES Doxygen will expand all macro
-# names in the source code. If set to NO (the default) only conditional
-# compilation will be performed. Macro expansion can be done in a controlled
-# way by setting EXPAND_ONLY_PREDEF to YES.
-
-MACRO_EXPANSION = NO
-
-# If the EXPAND_ONLY_PREDEF and MACRO_EXPANSION tags are both set to YES
-# then the macro expansion is limited to the macros specified with the
-# PREDEFINED and EXPAND_AS_DEFINED tags.
-
-EXPAND_ONLY_PREDEF = NO
-
-# If the SEARCH_INCLUDES tag is set to YES (the default) the includes files
-# in the INCLUDE_PATH (see below) will be search if a #include is found.
-
-SEARCH_INCLUDES = YES
-
-# The INCLUDE_PATH tag can be used to specify one or more directories that
-# contain include files that are not input files but should be processed by
-# the preprocessor.
-
-INCLUDE_PATH = $(DOXYGEN_INCLUDE_PATH)
-
-# You can use the INCLUDE_FILE_PATTERNS tag to specify one or more wildcard
-# patterns (like *.h and *.hpp) to filter out the header-files in the
-# directories. If left blank, the patterns specified with FILE_PATTERNS will
-# be used.
-
-INCLUDE_FILE_PATTERNS =
-
-# The PREDEFINED tag can be used to specify one or more macro names that
-# are defined before the preprocessor is started (similar to the -D option of
-# gcc). The argument of the tag is a list of macros of the form: name
-# or name=definition (no spaces). If the definition and the = are
-# omitted =1 is assumed. To prevent a macro definition from being
-# undefined via #undef or recursively expanded use the := operator
-# instead of the = operator.
-
-PREDEFINED = LINUX \
- UNX \
- VCL \
- GCC \
- C341 \
- X86_64 \
- _STLP_DEBUG \
- VER=C341 \
- NPTL \
- GLIBC=2 \
- NEW_SOLAR \
- _USE_NAMESPACE=1 \
- UNIX \
- SUPD=300 \
- DEBUG \
- DBG_UTIL \
- OSL_DEBUG_LEVEL=2 \
- CUI \
- SOLAR_JAVA \
- EXCEPTIONS_ON
-
-# If the MACRO_EXPANSION and EXPAND_ONLY_PREDEF tags are set to YES then
-# this tag can be used to specify a list of macro names that should be expanded.
-# The macro definition that is found in the sources will be used.
-# Use the PREDEFINED tag if you want to use a different macro definition.
-
-EXPAND_AS_DEFINED =
-
-# If the SKIP_FUNCTION_MACROS tag is set to YES (the default) then
-# doxygen's preprocessor will remove all function-like macros that are alone
-# on a line, have an all uppercase name, and do not end with a semicolon. Such
-# function macros are typically used for boiler-plate code, and will confuse
-# the parser if not removed.
-
-SKIP_FUNCTION_MACROS = YES
-
-#---------------------------------------------------------------------------
-# Configuration::additions related to external references
-#---------------------------------------------------------------------------
-
-# The TAGFILES option can be used to specify one or more tagfiles.
-# Optionally an initial location of the external documentation
-# can be added for each tagfile. The format of a tag file without
-# this location is as follows:
-# TAGFILES = file1 file2 ...
-# Adding location for the tag files is done as follows:
-# TAGFILES = file1=loc1 "file2 = loc2" ...
-# where "loc1" and "loc2" can be relative or absolute paths or
-# URLs. If a location is present for each tag, the installdox tool
-# does not have to be run to correct the links.
-# Note that each tag file must have a unique name
-# (where the name does NOT include the path)
-# If a tag file is not located in the directory in which doxygen
-# is run, you must also specify the path to the tagfile here.
-
-TAGFILES = $(DOXYGEN_REF_TAGFILES)
-
-# When a file name is specified after GENERATE_TAGFILE, doxygen will create
-# a tag file that is based on the input files it reads.
-
-GENERATE_TAGFILE = $(DOXYGEN_OUR_TAGFILE)
-
-# If the ALLEXTERNALS tag is set to YES all external classes will be listed
-# in the class index. If set to NO only the inherited external classes
-# will be listed.
-
-ALLEXTERNALS = NO
-
-# If the EXTERNAL_GROUPS tag is set to YES all external groups will be listed
-# in the modules index. If set to NO, only the current project's groups will
-# be listed.
-
-EXTERNAL_GROUPS = YES
-
-# The PERL_PATH should be the absolute path and name of the perl script
-# interpreter (i.e. the result of `which perl').
-
-PERL_PATH =
-
-#---------------------------------------------------------------------------
-# Configuration options related to the dot tool
-#---------------------------------------------------------------------------
-
-# If the CLASS_DIAGRAMS tag is set to YES (the default) Doxygen will
-# generate a inheritance diagram (in HTML, RTF and LaTeX) for classes with base
-# or super classes. Setting the tag to NO turns the diagrams off. Note that
-# this option is superseded by the HAVE_DOT option below. This is only a
-# fallback. It is recommended to install and use dot, since it yields more
-# powerful graphs.
-
-CLASS_DIAGRAMS = YES
-
-# You can define message sequence charts within doxygen comments using the \msc
-# command. Doxygen will then run the mscgen tool (see http://www.mcternan.me.uk/mscgen/) to
-# produce the chart and insert it in the documentation. The MSCGEN_PATH tag allows you to
-# specify the directory where the mscgen tool resides. If left empty the tool is assumed to
-# be found in the default search path.
-
-MSCGEN_PATH =
-
-# If set to YES, the inheritance and collaboration graphs will hide
-# inheritance and usage relations if the target is undocumented
-# or is not a class.
-
-HIDE_UNDOC_RELATIONS = YES
-
-# If you set the HAVE_DOT tag to YES then doxygen will assume the dot tool is
-# available from the path. This tool is part of Graphviz, a graph visualization
-# toolkit from AT&T and Lucent Bell Labs. The other options in this section
-# have no effect if this option is set to NO (the default)
-
-HAVE_DOT = YES
-
-# If the CLASS_GRAPH and HAVE_DOT tags are set to YES then doxygen
-# will generate a graph for each documented class showing the direct and
-# indirect inheritance relations. Setting this tag to YES will force the
-# the CLASS_DIAGRAMS tag to NO.
-
-CLASS_GRAPH = YES
-
-# If the COLLABORATION_GRAPH and HAVE_DOT tags are set to YES then doxygen
-# will generate a graph for each documented class showing the direct and
-# indirect implementation dependencies (inheritance, containment, and
-# class references variables) of the class with other documented classes.
-
-COLLABORATION_GRAPH = YES
-
-# If the GROUP_GRAPHS and HAVE_DOT tags are set to YES then doxygen
-# will generate a graph for groups, showing the direct groups dependencies
-
-GROUP_GRAPHS = YES
-
-# If the UML_LOOK tag is set to YES doxygen will generate inheritance and
-# collaboration diagrams in a style similar to the OMG's Unified Modeling
-# Language.
-
-UML_LOOK = NO
-
-# If set to YES, the inheritance and collaboration graphs will show the
-# relations between templates and their instances.
-
-TEMPLATE_RELATIONS = YES
-
-# If the ENABLE_PREPROCESSING, SEARCH_INCLUDES, INCLUDE_GRAPH, and HAVE_DOT
-# tags are set to YES then doxygen will generate a graph for each documented
-# file showing the direct and indirect include dependencies of the file with
-# other documented files.
-
-INCLUDE_GRAPH = YES
-
-# If the ENABLE_PREPROCESSING, SEARCH_INCLUDES, INCLUDED_BY_GRAPH, and
-# HAVE_DOT tags are set to YES then doxygen will generate a graph for each
-# documented header file showing the documented files that directly or
-# indirectly include this file.
-
-INCLUDED_BY_GRAPH = YES
-
-# If the CALL_GRAPH, SOURCE_BROWSER and HAVE_DOT tags are set to YES then doxygen will
-# generate a call dependency graph for every global function or class method.
-# Note that enabling this option will significantly increase the time of a run.
-# So in most cases it will be better to enable call graphs for selected
-# functions only using the \callgraph command.
-
-CALL_GRAPH = NO
-
-# If the CALLER_GRAPH, SOURCE_BROWSER and HAVE_DOT tags are set to YES then doxygen will
-# generate a caller dependency graph for every global function or class method.
-# Note that enabling this option will significantly increase the time of a run.
-# So in most cases it will be better to enable caller graphs for selected
-# functions only using the \callergraph command.
-
-CALLER_GRAPH = NO
-
-# If the GRAPHICAL_HIERARCHY and HAVE_DOT tags are set to YES then doxygen
-# will graphical hierarchy of all classes instead of a textual one.
-
-GRAPHICAL_HIERARCHY = YES
-
-# If the DIRECTORY_GRAPH, SHOW_DIRECTORIES and HAVE_DOT tags are set to YES
-# then doxygen will show the dependencies a directory has on other directories
-# in a graphical way. The dependency relations are determined by the #include
-# relations between the files in the directories.
-
-DIRECTORY_GRAPH = YES
-
-# The DOT_IMAGE_FORMAT tag can be used to set the image format of the images
-# generated by dot. Possible values are png, jpg, or gif
-# If left blank png will be used.
-
-DOT_IMAGE_FORMAT = png
-
-# The tag DOT_PATH can be used to specify the path where the dot tool can be
-# found. If left blank, it is assumed the dot tool can be found in the path.
-
-DOT_PATH =
-
-# The DOTFILE_DIRS tag can be used to specify one or more directories that
-# contain dot files that are included in the documentation (see the
-# \dotfile command).
-
-DOTFILE_DIRS =
-
-# The MAX_DOT_GRAPH_MAX_NODES tag can be used to set the maximum number of
-# nodes that will be shown in the graph. If the number of nodes in a graph
-# becomes larger than this value, doxygen will truncate the graph, which is
-# visualized by representing a node as a red box. Note that doxygen if the number
-# of direct children of the root node in a graph is already larger than
-# MAX_DOT_GRAPH_NOTES then the graph will not be shown at all. Also note
-# that the size of a graph can be further restricted by MAX_DOT_GRAPH_DEPTH.
-
-DOT_GRAPH_MAX_NODES = 50
-
-# The MAX_DOT_GRAPH_DEPTH tag can be used to set the maximum depth of the
-# graphs generated by dot. A depth value of 3 means that only nodes reachable
-# from the root by following a path via at most 3 edges will be shown. Nodes
-# that lay further from the root node will be omitted. Note that setting this
-# option to 1 or 2 may greatly reduce the computation time needed for large
-# code bases. Also note that the size of a graph can be further restricted by
-# DOT_GRAPH_MAX_NODES. Using a depth of 0 means no depth restriction.
-
-MAX_DOT_GRAPH_DEPTH = 0
-
-# Set the DOT_TRANSPARENT tag to YES to generate images with a transparent
-# background. This is disabled by default, which results in a white background.
-# Warning: Depending on the platform used, enabling this option may lead to
-# badly anti-aliased labels on the edges of a graph (i.e. they become hard to
-# read).
-
-DOT_TRANSPARENT = NO
-
-# Set the DOT_MULTI_TARGETS tag to YES allow dot to generate multiple output
-# files in one run (i.e. multiple -o and -T options on the command line). This
-# makes dot run faster, but since only newer versions of dot (>1.8.10)
-# support this, this feature is disabled by default.
-
-DOT_MULTI_TARGETS = NO
-
-# If the GENERATE_LEGEND tag is set to YES (the default) Doxygen will
-# generate a legend page explaining the meaning of the various boxes and
-# arrows in the dot generated graphs.
-
-GENERATE_LEGEND = YES
-
-# If the DOT_CLEANUP tag is set to YES (the default) Doxygen will
-# remove the intermediate dot files that are used to generate
-# the various graphs.
-
-DOT_CLEANUP = NO
-
-#---------------------------------------------------------------------------
-# Configuration::additions related to the search engine
-#---------------------------------------------------------------------------
-
-# The SEARCHENGINE tag specifies whether or not a search engine should be
-# used. If set to NO the values of all tags below this one will be ignored.
-
-SEARCHENGINE = NO
diff --git a/bin/elfsum b/bin/elfsum
deleted file mode 100755
index 2038bf37a..000000000
--- a/bin/elfsum
+++ /dev/null
@@ -1,67 +0,0 @@
-#!/usr/bin/env perl
-
-# Output:
-# liba libb libc
-#.foo 123 4556 277
-#.baa 23 42 12
-
-%columns = ();
-%section_names = ();
-
-sub add_file($)
-{
- my $fname = shift;
- my $handle;
- my %map;
-
- open($handle, "readelf -S $fname|") || die "Failed to open $fname: $!";
- print "Name:\t$fname\n";
- while(<$handle>) {
- /\[([\d ]+)\]\s+([\S.]+)\s+\S+\s+0[[:xdigit:]]+\s+[[:xdigit:]]+\s+([[:xdigit:]]+)/ || next;
- my ($num, $name, $hexsize) = ($1, $2, $3);
- $num =~ /^\s*0$/ && next; # bin empty first section.
-
- my $size = hex ($hexsize);
- $map{$name} = $size;
- $section_names{$name} = 1;
- }
- $columns{$fname} = \%map;
- close($handle);
-}
-
-sub dump_data()
-{
- my $section;
- my $fname;
-
- # print header
- for $fname (keys %columns) {
- print "\t$fname";
- }
- print "\n";
-
- # print data
- for $section (sort keys %section_names) {
- print "$section\t";
-
- for $fname (keys %columns) {
- my $map = $columns{$fname};
- my $size = $map->{$section} || 0;
- print "$size\t";
- }
- print "\n";
- }
-}
-
-my @files = ();
-my $file;
-
-for $file (@ARGV) {
- unshift @files, $file;
-}
-
-for $file (@ARGV) {
- add_file ($file);
-}
-
-dump_data();
diff --git a/bin/exceptions/gnome-exclusions b/bin/exceptions/gnome-exclusions
deleted file mode 100644
index 33c2a05da..000000000
--- a/bin/exceptions/gnome-exclusions
+++ /dev/null
@@ -1,78 +0,0 @@
-# glib / pango / gtk modules:
-g_module_init
-g_module_check_init
-gtk_module_init
-im_module_init
-im_module_create
-im_module_list
-im_module_exit
-script_engine_init
-script_engine_create
-script_engine_exit
-script_engine_list
-theme_init
-theme_exit
-theme_create_rc_style
-
-# gconf
-gconf_backend_get_vtable
-
-# ORBit2 / bonobo
-orbit_imodule_data
-Bonobo_Plugin_info
-
-# gnome-print
-gnome_print__transport_get_type
-gnome_print__filter_get_type
-gpa_module_init
-
-# gnome-vfs
-vfs_module_init
-vfs_module_shutdown
-
-# glade
-glade_module_register_widgets
-
-# gstreamer
-gst_plugin_desc
-
-# Nautilus
-nst_init_plugin
-nautilus_module_shutdown
-nautilus_module_list_types
-nautilus_module_initialize
-
-# gimp
-gimp_module_query
-gimp_module_register
-
-# xchat modules
-xchat_plugin_init
-xchat_plugin_deinit
-xchat_plugin_get_info
-
-# epiphany
-register_module
-
-# gaim
-gaim_init_plugin
-
-# planner
-module_init
-module_exit
-module_new
-
-# a11y modules
-gnome_accessibility_module_shutdown
-gnome_accessibility_module_init
-
-# evolution
-camel_provider_module_init
-eds_module_list_types
-eds_module_initialize
-eds_module_shutdown
-e_plugin_lib_enable
-
-# gpilot
-conduit_load_gpilot_conduit
-conduit_destroy_gpilot_conduit
diff --git a/bin/exceptions/ooo-exclusions b/bin/exceptions/ooo-exclusions
deleted file mode 100644
index d2aacbe73..000000000
--- a/bin/exceptions/ooo-exclusions
+++ /dev/null
@@ -1,25 +0,0 @@
-# UNO components
-component_canUnload
-component_getImplementationEnvironment
-component_writeInfo
-component_getFactory
-component_getDescriptionFunc
-
-# UNO bridges
-uno_ext_getMapping
-uno_initEnvironment
-
-# ui plugins
-CreateDialogFactory
-
-# vcl plugins
-create_SalInstance
-
-# graphics filters
-DoExportDialog
-GraphicExport
-GraphicImport
-
-# misc.
-GetVersionInfo
-
diff --git a/bin/exceptions/pam-exclusions b/bin/exceptions/pam-exclusions
deleted file mode 100644
index 0a9654212..000000000
--- a/bin/exceptions/pam-exclusions
+++ /dev/null
@@ -1,8 +0,0 @@
-_pammodutil_.*
-pam_sm_chauthtok.*
-pam_sm_acct_mgmt.*
-pam_sm_setcred.*
-pam_sm_open_session.*
-pam_sm_close_session.*
-pam_sm_authenticate.*
-__get_authtok.*
diff --git a/bin/extract-gsi b/bin/extract-gsi
deleted file mode 100755
index 01961b0c7..000000000
--- a/bin/extract-gsi
+++ /dev/null
@@ -1,59 +0,0 @@
-#!/bin/sh
-
-#
-# See setup for user tweakables.
-#
-. ./setup
-. $OOBUILDDIR/*.[sS]et.sh
-. ./setup
-
-primary_lang="en-US"
-extra_options=
-
-usage()
-{
- echo "This script extracts localized strings in to the GSI file"
- echo
- echo "Usage:" ${0##*/} [-d] locale gsi_file
- echo
- echo "Options:"
- echo " -d use default date in extracted sdf file"
- echo " locale locale id of the language, e.g. en-US"
- echo " gsi_file output file"
- echo
- echo "Presumptions:"
- echo " - the module transex3 is built"
- echo
- echo "Note that the script is quite slow. It takes some minutes to extract strings"
- echo "for one localization..."
-}
-
-test "$1" = "--help" && usage && exit 1;
-
-test "$1" = "-d" && extra_options="-d" && shift
-locale="$1"
-gsi_file="$2"
-
-if test -z "$locale" ; then
- echo "Error: locale is not defined, try --help"
- exit 1;
-fi
-
-if test -z "$locale" ; then
- echo "Error: output gsi file is not defined, try --help"
- exit 1;
-fi
-
-if ! which localize >/dev/null 2>&1 ; then
- echo "Error: Unable to find the script \"localize\". Please, build and deliver"
- echo " the module transex3."
- exit 1;
-fi
-
-if ! echo "$2" | grep -q "^/" ; then
- gsi_file=`pwd`/$gsi_file
-fi
-
-cd $OOBUILDDIR
-localize -e -f "$gsi_file" -l "$locale=$primary_lang" $extra_options
-cd -
diff --git a/bin/extract-hunks b/bin/extract-hunks
deleted file mode 100755
index f5ab5e678..000000000
--- a/bin/extract-hunks
+++ /dev/null
@@ -1,115 +0,0 @@
-#!/usr/bin/env perl
-
-sub usage() {
- print STDERR "extract-hunks regex patch.diff [out_with.diff [out_without.diff]]
-
-Extract hunks from diff based on regex.
- regex Regular expression for matching
- patch.diff Patch that will be separated
- out_with.diff Output patch that will contain the hunks matching regex
- out_without.diff Output patch with non-mathing hunks\n";
-
- exit 1;
-}
-
-$regex = shift;
-$patch = shift;
-$out_with = shift;
-$out_without = shift;
-
-usage() if ( ! defined( $regex ) || ! defined( $patch ) );
-$out_with = "with-" . $patch if ( ! defined( $out_with ) );
-$out_without = "without-" . $patch if ( ! defined( $out_without ) );
-usage() if ( $out_with eq $patch || $out_without eq $patch );
-
-# open the files
-my $PATCH;
-my $WITH;
-my $WITHOUT;
-
-if ( ! open( $PATCH, "$patch" ) ) {
- print STDERR "Unable to open patch '$patch'.";
- exit 1;
-}
-if ( ! open( $WITH, ">$out_with" ) ) {
- print STDERR "Unable to open '$out_with'.";
- exit 1;
-}
-if ( ! open( $WITHOUT, ">$out_without" ) ) {
- print STDERR "Unable to open '$out_without'.";
- exit 1;
-}
-
-# do the job
-
-my $hunk = "";
-my $header_with = "";
-my $header_without = "";
-
-my $regex_found = 0;
-my $reset_header = 0;
-my $plus_lines = 0;
-my $minus_lines = 0;
-
-my $line;
-my $lineno = 0;
-
-while ( $line = <$PATCH>) {
- ++$lineno;
-
- # beginning of a hunk
- if ( $line =~ /^@@ [^,]*,([^ ]*) [^,]*,([^ ]*)/ ) {
- $hunk = $line;
- $minus_lines = $1;
- $plus_lines = $2;
- $regex_found = 0;
-
- next;
- }
-
- # inside a hunk
- if ( $plus_lines > 0 || $minus_lines > 0 ) {
- $hunk .= $line;
- if ( $line =~ $regex ) {
- $regex_found = 1;
- }
-
- if ( $line =~ /[^ \+\-]/ ) {
- --$plus_lines if ( $line =~ /^[ \+]/ );
- --$minus_lines if ( $line =~ /^[ \-]/ );
- }
- else {
- print STDERR "Cannot handle line $lineno: $line\n";
- close( $PATCH );
- close( $WITH );
- close( $WITHOUT );
- exit 1;
- }
-
- # end of the hunk
- if ( $plus_lines == 0 && $minus_lines == 0 ) {
- if ( $regex_found ) {
- print $WITH $header_with . $hunk;
- $header_with = "";
- }
- else {
- print $WITHOUT $header_without . $hunk;
- $header_without = "";
- }
- $reset_header = 1;
- }
- }
- # inside a header
- else {
- if ( $reset_header ) {
- $header_with = "";
- $header_without = "";
- $reset_header = 0;
- }
- $header_with .= $line;
- $header_without .= $line;
- }
-}
-close( $PATCH );
-close( $WITH );
-close( $WITHOUT );
diff --git a/bin/extract-new-strings b/bin/extract-new-strings
deleted file mode 100755
index 1d82f3bed..000000000
--- a/bin/extract-new-strings
+++ /dev/null
@@ -1,48 +0,0 @@
-#!/bin/sh
-
-#
-# See setup for user tweakables.
-#
-
-usage()
-{
- echo "This script compares two GSI files and puts new strings into"
- echo "the output GSI file"
- echo
- echo "Usage:" ${0##*/} gsi_old gsi_new gsi_out
- echo
-}
-
-test $# -lt 3 -o "$1" = "--help" && usage && exit 1;
-
-gsi_old="$1"
-gsi_new="$2"
-gsi_out="$3"
-
-gsi_old_norm=`mktemp /tmp/extract-new-strings.XXXXXX`
-gsi_new_norm=`mktemp /tmp/extract-new-strings.XXXXXX`
-
-gsi_old_sort=`mktemp /tmp/extract-new-strings.XXXXXX`
-gsi_new_sort=`mktemp /tmp/extract-new-strings.XXXXXX`
-
-echo "Removing unwanted strings..."
-
-# remove localizations from the util dir (are generated during build)
-grep -v "^[^ ]* util" $gsi_old >>$gsi_old_norm
-grep -v "^[^ ]* util" $gsi_new >>$gsi_new_norm
-
-echo "Normalizing..."
-
-perl -pi -e 's/^([^\t]*\t[^\t]*\t[^\t]*\t[^\t]*\t[^\t]*\t[^\t]*\t[^\t]*\t[^\t]*\t)[0-9]+/${1}999/' $gsi_old_norm
-perl -pi -e 's/^([^\t]*\t[^\t]*\t[^\t]*\t[^\t]*\t[^\t]*\t[^\t]*\t[^\t]*\t[^\t]*\t)[0-9]+/${1}999/' $gsi_new_norm
-
-echo "Sorting..."
-
-sort $gsi_old_norm >$gsi_old_sort
-sort $gsi_new_norm >$gsi_new_sort
-
-echo "Extracting..."
-
-diff -up $gsi_old_sort $gsi_new_sort | grep -v "^+++" | grep "^+" | sed "s|^+||" >$gsi_out
-
-rm -f $gsi_old_norm $gsi_new_norm $gsi_old_sort $gsi_new_sort
diff --git a/bin/find-duplicates.pl b/bin/find-duplicates.pl
deleted file mode 100755
index 7620822a0..000000000
--- a/bin/find-duplicates.pl
+++ /dev/null
@@ -1,97 +0,0 @@
-#!/usr/bin/env perl
-
-# find-duplicates:
-# Gets the duplicates across modules for the class constructor symbols
-# Run this from directory where the *.so are located
-# /opt/OOInstall/program or
-# ooo-build/build/src680-mxx/solver/680/unxlngi4.pro/lib
-#
-
-@libs = ();
-%symbols = ();
-
-sub insert_symbols {
- my $obj;
- my $referenced = 0;
- my $lib = shift;
- my @external = ();
-
- open $obj, "objdump -T $lib|" || die "Can't objdump $lib: $!";
-
- while (<$obj>) {
- /[0-9a-f]*\s+([gw ])\s+..\s+(\S*)\s*......................\s+(.*)/ || next;
-
- my ($linkage, $type, $symbol) = ($1, $2, $3);
-
- $symbol && $type || next;
-
- # Is this correct ?
- $linkage =~ /g/ || next; # bin weak symbols.
-
- if ($symbol =~ /\w+C+[0-9]{1}/){
- if ($type eq '.text') {
- $symbol =~ /_GLOBAL_/ && next; # bin global symbols - we don't grok them well
-
- my $name = `c++filt $symbol`;
- my @symb_arr = split /\(/, $name;
- $name = $symb_arr[0];
- $name =~ s/\:\:[^\:]*$//;
-
-# print "Sane constructor '$name' ($linkage, $type)\n";
-
- if (exists $symbols{$name}) {
- if($symbols{$name} ne $lib){
- print "$lib and $symbols{$name} defines '$name' ]\n";
- }
- } else {
- $symbols{$name} = $lib;
- }
- } elsif ($type eq '*UND*') {
- push (@external, $symbol);
- }
- }
- }
-
- close $obj;
-}
-
-sub resolve_symbols {
- my $lib = shift;
- if ($libs{$lib}) {
- return;
- }
- insert_symbols ($lib);
- $libs{$lib} = 'resolved';
-}
-
-my $file;
-my $dirhandle;
-my $path = ".";
-
-my @exceptions = ( 'cppuhelper3gcc3', 'uno_cppuhelpergcc',
- 'salhelper3gcc3', 'uno_salhelpergcc' );
-sub is_exception($)
-{
- my $file = shift;
- for my $exc (@exceptions) {
- if ($file =~ /$exc/) {
- return 1;
- }
- }
- return 0;
-}
-
-opendir ($dirhandle, $path) || die "Can't open dir $path: $!";
-while ($file = readdir ($dirhandle)) {
- $file =~ /^\./ && next;
- is_exception($file) && next;
-
- if ($file =~ m/.so$/) {
- push @libs, "$path/$file";
- }
-}
-closedir ($dirhandle);
-
-for $lib (@libs) {
- resolve_symbols ($lib);
-}
diff --git a/bin/find_dup b/bin/find_dup
deleted file mode 100755
index 215ee218b..000000000
--- a/bin/find_dup
+++ /dev/null
@@ -1,33 +0,0 @@
-#!/usr/bin/env perl
-
-#
-# Prints duplicate entries in a file
-#
-# usage: find_dup <file>
-# Can be used with preloc --strip output to get the duplicate
-# symbols
-#
-
-my @file = ();
-for my $arg (@ARGV) {
- push @file, $arg;
-}
-
-my $file_name = $file[0];
-my %tmp_sym_arr = ();
-my %symbols = ();
-
-open (my $in, "$file_name") || die "Can't open $file_name: $!";
-while (<$in>) {
- if (exists $tmp_sym_arr{$_}){
- #print "$. -- $tmp_sym_arr{$_} -- $_\n";
- if (!exists $symbols{$_}){
- $symbols{$_} = $_;
- print "$_";
- }
- }else{
- $tmp_sym_arr{$_} = $.;
- }
-}
-close ($in);
-
diff --git a/bin/finterpose b/bin/finterpose
deleted file mode 100755
index 87af9c11b..000000000
--- a/bin/finterpose
+++ /dev/null
@@ -1,279 +0,0 @@
-#!/usr/bin/env perl
-
-#
-# Use example: find -name '*.so' | finterpose
-#
-
-my %common_symbols = (
- '__bss_start' => 1,
- '__invoke_dynamic_linker__' => 1,
- '_DYNAMIC' => 1,
- '_edata' => 1,
- '_end' => 1,
- '_fini' => 1,
- '_init' => 1,
- '__bss_start@Base' => 1,
- '__invoke_dynamic_linker__@Base' => 1,
- '_DYNAMIC@Base' => 1,
- '_edata@Base' => 1,
- '_end@Base' => 1,
- '_fini@Base' => 1,
- '_init@Base' => 1,
- );
-
-my @exception_regexps;
-
-import Strict;
-use File::Basename;
-
-# misc. argument options / defaults
-my $opt_debug = 0;
-
-sub add_sym($$$)
-{
- my ($symbol_hash, $symbol, $file) = @_;
- if (!defined $symbol_hash->{$symbol}) {
- my @libs;
- $symbol_hash->{$symbol} = \@libs;
- }
- # versioning can create duplicates
- for my $fname (@{$symbol_hash->{$symbol}}) {
- return if ($fname eq $file);
- }
- push @{$symbol_hash->{$symbol}}, $file;
-}
-
-sub read_symbols($$)
-{
- my $file = shift;
- my $symbol_hash = shift;
- my $pipe;
-
-# print "Read '$file'\n";
-
- $dumpsw = '-T';
-
- open ($pipe, "objdump $dumpsw $file |") || die "Can't objdump $dumpsw $file: $!";
- while (<$pipe>) {
- /([0-9a-f]*)\s+([gw ])\s+..\s+(\S*)\s*([0-9a-f]+)(.*)$/; # || next;
-
- my ($address, $linkage, $type, $size, $symbol_n_ver) = ($1, $2, $3, $4, $5);
- my ($opt_ver, $symbol, $version) = ( '', '', '' );
-
- next if (!$symbol_n_ver || !$type);
-
- if ($symbol_n_ver =~ m/\s([\s\(])(\S+)[\)\s]+(\S+)/) {
- $opt_ver = $1; $version = $2; $symbol = $3;
- } else {
- $symbol = $symbol_n_ver;
- }
- $symbol =~ s/^\s*//;
- $symbol =~ s/\s*$//;
-
- next if (!$symbol || !$type);
-
-# print "Symbol '$symbol' version '$version' type '$type' '$linkage' addr $address, size $size\n";
-
-# Filter out things we're not interested in
-
-# remove undefined / external references
- next if ($type eq '*UND*');
-# remove weak symbols
- next if ($linkage =~ m/w/);
-# remove section names
- next if ($symbol =~ m/^\./);
-# remove version symbols
- next if ($symbol eq $version);
-
-# FIXME - Ignores versioning - too nasty for now ...
- add_sym ($symbol_hash, $symbol, $file);
- }
- close ($pipe);
-}
-
-sub add_deps ($$$$)
-{
- my $lib_dups = shift;
- my $lib = shift;
- my $libs = shift;
- my $symbol = shift;
-
-# print "Add deps for $lib: @{$libs}\n";
- for my $other_lib (@{$libs}) {
- ($other_lib eq $lib) && next;
-# print "Add deps from $lib: to $other_lib for $symbol\n";
- if (!defined $lib_dups->{$lib}->{$other_lib}) {
- my @list;
- $lib_dups->{$lib}->{$other_lib} = \@list;
-# my $cnt = keys %{$lib_dups->{$lib}};
-# print "Add other lib $cnt\n";
- }
- push @{$lib_dups->{$lib}->{$other_lib}}, $symbol;
- }
-}
-
-sub ignore_symbol($)
-{
- my $symbol = shift;
- return 1 if (defined $common_symbols{$symbol});
- for my $regex (@exception_regexps) {
-# print "Match '$symbol' vs '$regex'\n";
- if ($symbol =~ m/^$regex$/) {
- return 1;
- }
- }
- return 0;
-}
-
-# Linux / gcc only - so far.
-sub disp_sym($)
-{
- my $raw = shift;
- return $raw if (! $raw =~ /^_Z/);
-
- my $filtered = `c++filt $raw`;
- $filtered =~ s/[\r\n]*$//;
- return $filtered;
-}
-
-my %symbol_table;
-my %lib_dups;
-
-my $option_cross_lib = 0;
-
-my @exception_files;
-
-for my $arg (@ARGV) {
- if ($arg eq '--help' || $arg eq '-h') {
- print "finterpose [options] [list-of-exception-files]\n";
- print " finds genuine interposing uses across many libraries read from stdin\n";
- print " options:\n";
- print " --per-symbol: dump on symbol basis [default]\n";
- print " --cross-lib: dump on a per-library basis\n";
- } elsif ($arg eq '--cross-lib') {
- $option_cross_lib = 1;
- } elsif ($arg eq '--per-symbol') {
- $option_cross_lib = 0;
- } else {
- push @exception_files, $arg;
- }
-}
-
-print "Reading exceptions ";
-for my $fname (@exception_files) {
- my $fh;
- open ($fh, $fname) || die "Can't open $fname: $!";
- print ".";
- while (<$fh>) {
- s/[\r\n]*$//;
- m/^\#/ && next;
- m/^$/ && next;
-# print "Exception '$_'\n";
- push @exception_regexps, $_;
- }
- close ($fh);
-}
-print "\n\n";
-
-print "Reading symbols:\n";
-while (<STDIN>) {
- my $fname = $_;
- $fname =~ s/[\r\n\t]*$//;
- next if (! -f $fname);
- if (-l $fname) {
- my $link = readlink ($fname);
- if ($link =~ /^\//) {
- $fname = $link;
- } else {
- $fname = dirname ($fname) . "/" . $link;
- }
- }
- next if (defined $lib_dups{$fname});
-
- print "$fname ";
- my %lib_stats;
- $lib_dups{$fname} = \%lib_stats;
- read_symbols ($fname, \%symbol_table);
-}
-print "\n\n";
-
-print "Removing singletons & filtering exceptions...\n";
-for my $symbol (keys %symbol_table) {
- my $libs = $symbol_table{$symbol};
- if (@{$libs} == 1 || ignore_symbol ($symbol)) {
- delete $symbol_table{$symbol};
- next;
- }
-}
-print "\n";
-
-if (!$option_cross_lib) { # symbol-set dump
-
- # detect substantially identical libraries ?
- my %dup_syms;
-
- # normalise libs to a string
- for my $sym (keys %symbol_table) {
- my $lib_string = join (' ', @{$symbol_table{$sym}});
- if (!defined $dup_syms{$lib_string}) {
- my @sym_list;
- $dup_syms{$lib_string} = \@sym_list;
- }
- push @{$dup_syms{$lib_string}}, $sym;
- }
-
- # prune probable duplicates
- my @dup_libs;
- for my $lib_list (sort { $a cmp $b } keys %dup_syms) {
- if (@{$dup_syms{$lib_list}} > 10) {
- push @dup_libs, $lib_list;
- delete $dup_syms{$lib_list};
- }
- }
-
- print "Probable duplicate libraries:\n";
- for my $dup (@dup_libs) {
- print "\t$dup\n";
- }
- print "\n";
-
- print "Duplicate symbols:\n";
-
- for my $lib_list (sort { $a cmp $b } keys %dup_syms) {
- print "\t$lib_list implement:\n";
- print "\t\t";
- for my $sym (@{$dup_syms{$lib_list}}) {
- print disp_sym ($sym) . " ";
- }
- print "\n";
- }
-} else { # cross-library dump
- print "Collating duplicates\n";
-
- for my $symbol (keys %symbol_table) {
- for my $lib (@{$libs}) {
- add_deps ($lib_dups, $lib, $libs, $symbol);
- my $cnt = keys %{($lib_dups->{$lib})};
- }
- }
-
- print "Duplicate summary\n";
-
- my $dups = keys %symbol_table;
- print " count of duplicates: $dups\n";
-
- for my $lib (keys %lib_dups) {
- my $dupcnt = keys %{$lib_dups->{$lib}};
- $dupcnt || next;
- print "Library: '$lib':\n";
-
- my $these_libs = $lib_dups->{$lib};
- for my $dup_lib (sort { $a cmp $b } keys %{$these_libs}) {
- print " vs. '$dup_lib': ";
- for my $sym (@{$these_libs->{$dup_lib}}) {
- print disp_sym($sym) . " ";
- }
- print "\n";
- }
- }
-}
diff --git a/bin/fix-deps b/bin/fix-deps
deleted file mode 100755
index e93e88b32..000000000
--- a/bin/fix-deps
+++ /dev/null
@@ -1,18 +0,0 @@
-#!/bin/sh
-
-#
-# OO.o dependencies are perenially broken in some respects:
-#
-
-exit 0;
-
-OOBUILDDIR=$1;
-if test "z$OOO_FIX_DEPS" = "zNO"; then
- echo "Skip resource poking";
-else
- echo "Force resource dependency rebuild";
- find $OOBUILDDIR/*/*.pro -name '*.dpr' -exec rm {} \; || true;
- find $OOBUILDDIR/*/*.pro -name '*.don' -exec rm {} \; || true;
- find $OOBUILDDIR/*/*.pro -name '*.srs' -exec rm {} \; || true;
- touch $OOBUILDDIR/desktop/source/offwrp/wrapper.cxx
-fi
diff --git a/bin/fixguard.py b/bin/fixguard.py
deleted file mode 100755
index 3a3b85d40..000000000
--- a/bin/fixguard.py
+++ /dev/null
@@ -1,29 +0,0 @@
-#!/usr/bin/env python
-import sys
-import re
-import os
-import shutil
-import tempfile
-
-exp = '#ifndef.*_(?:H|HXX|HRC|HPP)_*[ \t]*\/*.*\n(#include.*\n)#endif[ \t]*\/*.*\n'
-# alternative pattern to remove commented out include quards
-#exp = '//.*#ifndef.*_(?:H|HXX|HRC|HPP)_*[ \t]*\/*.*\n(//.*#include.*\n)//.*#endif[ \t]*\/*.*\n'
-
-filename = sys.argv[1]
-if not os.path.isfile(filename):
- print "Error: File does not exist: " + filename
- sys.exit(1)
-
-data = open(filename).read()
-
-temp_dir = tempfile.mkdtemp()
-temp_file = temp_dir + "/fixed"
-
-o = open(temp_file,"w")
-o.write( re.sub(exp,"\\1",data) )
-o.close()
-
-os.chmod(temp_file, 0644)
-shutil.move(temp_file, filename)
-
-os.removedirs(temp_dir)
diff --git a/bin/g b/bin/g
deleted file mode 100755
index 23811210c..000000000
--- a/bin/g
+++ /dev/null
@@ -1,19 +0,0 @@
-#!/usr/bin/env bash
-#
-# Wrapper for git to handle more subdirs at the same time
-# this wrapper redirect to the one in the bootstrap repo, that will do the actual work
-#
-
-BASE_DIR=$(dirname $0)
-cd ${BASE_DIR}/..
-BOOTSTRAPDIR="$(pwd)/bootstrap"
-
-if [ ! -e ${BOOTSTRAPDIR} ]; then
- ${BASE_DIR?}/migrate-rawbuild-to-bootstrap || exit 1;
-fi
-
-if [ -d ${BOOTSTRAPDIR} ]; then
- ${BOOTSTRAPDIR}/g "$@"
-fi
-
-# vi:set shiftwidth=4 expandtab:
diff --git a/bin/gen b/bin/gen
deleted file mode 100755
index 4d3f1555d..000000000
--- a/bin/gen
+++ /dev/null
@@ -1,110 +0,0 @@
-#!/usr/bin/env perl
-
-sub usage() {
- print "Usage: gen dest_dir [-r] file1 [file2 ...]\n";
- print "Turn a set of files into a set of patches (diffed against /dev/null).\n";
- print "-r Recurse.\n";
-}
-
-if ( ! ( $dest = shift @ARGV ) ) {
- print STDERR "No destination specified!\n";
- usage();
- exit 1;
-}
-if ($dest eq '-h' || $dest eq '--help') {
- usage();
- exit 0;
-}
-
-sub gen_diff($)
-{
- my $patch = lc ($a);
- $patch =~ s/\//-/g;
- $patch =~ s/\./-/g;
- $patch .= '.diff';
- print "regenerate $patch from $a\n";
-
- my $ppipe;
- my $output;
-
- local $SIG{PIPE} = sub { die "spooler pipe broke" };
-
- open ($ppipe, "diff --new-file -u /dev/null $a |") || die "Can't diff: $!";
- open ($output, ">$dest/$patch") || die "Can't create patch: $!";
- while (<$ppipe>) {
- s/^([\-\+]{3}\s+[\S\d_\-\.]+\s+).*/$1/;
- print $output $_;
- }
- close ($output) || die "Can't close diff: $!";
- close ($ppipe); # odd ... || die "Can't close patch pipe: $! $?";
-}
-
-sub filter_crud($)
-{
- my $a = shift;
-
- $a =~ /~$/ && return;
- $a =~ /\#$/ && return;
- $a =~ /\.orig$/ && return;
- $a =~ /unxlng.*\.pro$/ && return;
- $a eq 'CVS' && return;
- $a eq '.' && return;
- $a eq '..' && return;
-
- return $a;
-}
-
-sub slurp_dir($);
-
-sub slurp_dir($)
-{
- my $dir = shift;
- my ($dirhandle, $fname);
- my @files = ();
-
- opendir ($dirhandle, $dir) || die "Can't open $dir";
- while ($fname = readdir ($dirhandle)) {
- $fname = filter_crud($fname);
- defined $fname || next;
- if (-d "$dir/$fname") {
- push @files, slurp_dir("$dir/$fname");
- } else {
- push @files, "$dir/$fname";
- }
- }
- closedir ($dirhandle);
-
- return @files;
-}
-
-my @files = ();
-my $recurse = 0;
-
-for $a (@ARGV) {
- if ($a eq '-h' || $a eq '--help') {
- usage();
- exit 0;
-
- } elsif ($a eq '-R' || $a eq '-r') {
- $recurse = 1;
-
- } else {
- $a = filter_crud($a);
- defined $a || next;
-
- if (-d $a) {
- if ($recurse) {
- push @files, slurp_dir($a);
- } else {
- print "skipping dir '$a'\n";
- }
- } else {
- push @files, $a if defined $a;
- }
- }
-}
-
-for $a (@files) {
- print "Diffing '$a'\n";
- gen_diff ($a);
-}
diff --git a/bin/gen-xlsx-copy-of-xls.sh b/bin/gen-xlsx-copy-of-xls.sh
deleted file mode 100755
index 9d58ed81c..000000000
--- a/bin/gen-xlsx-copy-of-xls.sh
+++ /dev/null
@@ -1,125 +0,0 @@
-#!/bin/sh
-
-TREE="$1"
-
-HEADER="sc/source/filter/inc"
-XLS="sc/source/filter/excel"
-XLS97="sc/source/filter/xcl97"
-
-if [ ! -d "$TREE/$FILTER" ] ; then
- cat 1>&2 << EOF
-gen-xlsx-copy-of-xls.sh build/<milestone>
-
-Updates patches/dev300/xlsx-filter-as-a-separate-lib-xls-copy.diff.
-EOF
- exit 1
-fi
-
-INC="
-XclExpChangeTrack.hxx
-excdoc.hxx
-excrecds.hxx
-exp_op.hxx
-imp_op.hxx
-xcl97esc.hxx
-xcl97rec.hxx
-xechart.hxx
-xecontent.hxx
-xeescher.hxx
-xeformula.hxx
-xehelper.hxx
-xelink.hxx
-xename.hxx
-xepage.hxx
-xepivot.hxx
-xerecord.hxx
-xeroot.hxx
-xestream.hxx
-xestyle.hxx
-xetable.hxx
-xeview.hxx
-xistyle.hxx
-xladdress.hxx
-xlescher.hxx
-xlpivot.hxx
-xlview.hxx
-"
-
-XLS_INC="
-xltoolbar.hxx
-"
-
-ADD_PREFIX="
-excdoc.cxx
-excrecds.cxx
-exctools.cxx
-expop2.cxx
-fontbuff.cxx
-frmbase.cxx
-namebuff.cxx
-tokstack.cxx
-xecontent.cxx
-xeescher.cxx
-xeformula.cxx
-xehelper.cxx
-xechart.cxx
-xelink.cxx
-xename.cxx
-xepage.cxx
-xepivot.cxx
-xerecord.cxx
-xeroot.cxx
-xestream.cxx
-xestring.cxx
-xestyle.cxx
-xetable.cxx
-xeview.cxx
-xladdress.cxx
-xlescher.cxx
-xlformula.cxx
-xlchart.cxx
-xlpage.cxx
-xlpivot.cxx
-xlroot.cxx
-xlstyle.cxx
-xltools.cxx
-xltoolbar.cxx
-xltracer.cxx
-xlview.cxx
-"
-
-ADD_PREFIX_97="
-XclExpChangeTrack.cxx
-xcl97esc.cxx
-xcl97rec.cxx
-"
-
-new_diff() {
- P="$1"
- FROM="$2"
- TO="$3"
- if [ ! -f "$TREE/$P/$FROM" ] ; then
- echo "error: $TREE/$P/$FROM does not exist." 1>&2
- exit 1
- fi
- echo "--- /dev/null"
- echo "+++ sc/source/filter/xlsx/$TO"
- LINES=`wc -l "$TREE/$P/$FROM" | sed 's/ .*//'`
- echo "@@ -0,0 +1,$LINES @@"
- sed 's/^/+/' < "$TREE/$P/$FROM"
-}
-
-(
- for I in $INC ; do
- new_diff $HEADER $I $I
- done
- for I in $XLS_INC ; do
- new_diff $XLS $I $I
- done
- for I in $ADD_PREFIX ; do
- new_diff $XLS $I xlsx-$I
- done
- for I in $ADD_PREFIX_97 ; do
- new_diff $XLS97 $I xlsx-$I
- done
-) > patches/dev300/xlsx-filter-as-a-separate-lib-xls-copy.diff
diff --git a/bin/genFromScratch b/bin/genFromScratch
deleted file mode 100755
index e09e259ec..000000000
--- a/bin/genFromScratch
+++ /dev/null
@@ -1,194 +0,0 @@
-#!/usr/bin/env perl
-use File::Basename;
-use File::stat;
-use File::Copy;
-sub usage() {
- print "Usage: genFromScratch dest_dir base_dir scratch_dir [ -R | file1 file2 ]\n";
- print "Turn a set of files in scratch dir a set of patches (diffed against base_dir).\n";
-}
-
-if ( ! ( $dest = shift @ARGV ) ) {
- print STDERR "No destination specified!\n";
- usage();
- exit 1;
-}
-
-if ( ! ( $base = shift @ARGV ) ) {
- print STDERR "No base dir to compare against specified!\n";
- usage();
- exit 1;
-}
-
-if ( ! ( $scratch = shift @ARGV ) ) {
- print STDERR "No scratch dir to compare against specified!\n";
- usage();
- exit 1;
-}
-
-print STDOUT "base dir assigned to $base\n";
-
-if ($dest eq '-h' || $dest eq '--help') {
- usage();
- exit 0;
-}
-
-sub gen_diff($)
-{
- my $afterSlash = $a;
- $afterSlash =~ s#$scratch##;
- $afterSlash =~ s#/##;
- my $scratch = dirname($a);
- my $fileInBase = "$base/$afterSlash";
- my $patch = lc ($fileInBase);
- $patch =~ s/\/\//\//g;
- $patch =~ s/\//-/g;
- $patch =~ s/\./-/g;
- $patch .= '.diff';
- my $isNewFile = 0;
- my $diffNeeded = 0;
- my $binDir = dirname($0);
- my $cvsclean = "$binDir/cvsclean";
- if ( ! -f $fileInBase ) {
- $isNewFile = 1;
- print "patch (/dev/null) file for $fileInBase needs to be created\n";
- }
- else {
- my $tmpFile = "/tmp/gen_diff";
- my $status = system("diff -up $fileInBase $a | $cvsclean > $tmpFile");
- my $info = stat($tmpFile) or die "no $tmpFile: $!";
- if ( ($status >>=8) == 0 && ( $info->size > 0) ) {
- $diffNeeded = 1;
- print "diff needed for $fileInBase\n";
-
-
- }
- }
- if ( $isNewFile || $diffNeeded ) {
-
- my $ppipe;
- my $output;
- my $oldPatchExists = 0;
- my $tmpPatchName;
-
- local $SIG{PIPE} = sub { die "spooler pipe broke" };
- if ( $isNewFile ) {
- open ($ppipe, "diff --new-file -u /dev/null $a |") || die "Can't diff: $!";
- }
- else {
- # must unapply $dest/$patch first ( if it exists )
- if ( -f "$dest/$patch" ) {
- $oldPatchExists = 1;
- # copy old patchfile
- $tmpPatchName = "$dest/$patch.orig";
- copy("$dest/$patch",$tmpPatchName) || die "Can't rename $dest/$patch to $tmpPatchName: $!";
- print "$dest/$patch exists, testing unapply\n" ;
- my $cmd = "( patch -R -l -p0 --dry-run -d . ) < $tmpPatchName";
- print "$cmd\n";
- system ($cmd) && die "Testing patch $tmpPatchName failed.";
- print "$dest/$patch unapply....\n" ;
- $cmd = "( patch -R -l -p0 -d . ) < $tmpPatchName";
- system ($cmd) && die "unapply $tmpPatchName failed.";
- print "unapply done for $tmpPatchName\n" ;
- }
-
- # before regenerating the patch the old one must be backed up
- # regenate patch
- # re-apply backedup patch
- open ($ppipe, "diff -up $fileInBase $a | $cvsclean |") || die "Can't diff: $!";
- print "diff done for $fileInBase $a \n";
- }
- open ($output, ">$dest/$patch") || die "Can't create patch: $!";
- while (<$ppipe>) {
- s/^([\-\+]{3}\s+[\S\d_\-\.]+\s+).*/$1/;
- s#$scratch.*#$base/$afterSlash#;
- print $output $_;
- }
- if ( $oldPatchExists ) {
- print "$dest/$patch re-apply....\n" ;
- $cmd = "( patch -l -p0 -d . > /dev/null ) < $tmpPatchName";
- system ($cmd) && die "re-apply $tmpPatchName failed.";
- unlink ($tmpPatchName)
- }
- close ($output) || die "Can't close diff: $!";
- close ($ppipe); # odd ... || die "Can't close patch pipe: $! $?";
- }
-}
-
-sub filter_crud($)
-{
- my $a = shift;
-
- $a =~ /~$/ && return;
- $a =~ /\#$/ && return;
- $a =~ /\.orig$/ && return;
- $a =~ /unxlng.*\.pro$/ && return;
- $a =~ /.swp$/ && return;
- $a =~ /POSITION/ && return;
- $a =~ /ReadMe/ && return;
- $a =~ /.tmp$/ && return;
- $a =~ /\.svn/ && return;
- $a eq 'CVS' && return;
- $a eq '.' && return;
- $a eq '..' && return;
-
- return $a;
-}
-
-sub slurp_dir($);
-
-sub slurp_dir($)
-{
- my $dir = shift;
- my ($dirhandle, $fname);
- my @files = ();
-
- opendir ($dirhandle, $dir) || die "Can't open $dir";
- while ($fname = readdir ($dirhandle)) {
- $fname = filter_crud($fname);
- defined $fname || next;
- if (-d "$dir/$fname") {
- push @files, slurp_dir("$dir/$fname");
- } else {
- push @files, "$dir/$fname";
- }
- }
- closedir ($dirhandle);
-
- return @files;
-}
-
-my @files = ();
-my $recurse = 0;
-
-for $a (@ARGV) {
- printf "processing $a\n";
- if ($a eq '-h' || $a eq '--help') {
- usage();
- exit 0;
-
- } elsif ($a eq '-R' || $a eq '-r') {
- $recurse = 1;
- }
- if ($recurse) {
- $a = $scratch;
- }
-
- $a = filter_crud($a);
- defined $a || next;
- if (-d $a) {
- if ($recurse) {
- push @files, slurp_dir($a);
- }
- else {
- print "skipping dir '$a'\n";
- }
- }
- else {
- push @files, $a;
- }
-
-}
-
-for $a (@files) {
- gen_diff ($a);
-}
diff --git a/bin/generate-bash-completion b/bin/generate-bash-completion
deleted file mode 100755
index dafe413cb..000000000
--- a/bin/generate-bash-completion
+++ /dev/null
@@ -1,234 +0,0 @@
-#!/usr/bin/env perl
-# script to generate LibreOffice bash_completion file for the main applications
-# written by Rene Engelhard <rene@debian.org>, Public Domain
-# updated for libreoffice-build by Petr Mladek <pmladek@suse.cz>, Public Domain
-# yes, this script probably is not real good code :) but still easier
-# to maintain than adding those entries statically many times in
-# a file...
-
-use strict;
-
-my @DRAWDOCS=("sxd", "std", "dxf", "emf", "eps", "met", "pct", "sgf", "sgv", "sda",
- "sdd", "vor", "svm", "wmf", "bmp", "gif", "jpg", "jpeg", "jfif", "fif",
- "jpe", "pcd", "pcx", "pgm", "png", "ppm", "psd", "ras", "tga", "tif",
- "tiff", "xbm", "xpm", "odg", "otg", "fodg", "odc", "odi", "sds",
- "wpg", "svg");
-
-my @IMPRESSDOCS=("sxi", "sti", "ppt", "pps", "pot", "sxd", "sda", "sdd", "sdp",
- "vor", "cgm", "odp", "otp", "fodp", "ppsm", "ppsx", "pptm", "pptx",
- "potm", "potx");
-
-my @TEMPLATES=("stw", "dot", "vor", "stc", "xlt", "sti", "pot", "std", "stw",
- "dotm", "dotx", "potm", "potx", "xltm", "xltx");
-
-my @MATHDOCS=("sxm", "smf", "mml", "odf");
-
-my @MASTERDOCS=("sxg", "odm", "sgl");
-
-my @WRITERDOCS=("doc", "dot", "rtf", "sxw", "stw", "sdw", "vor", "txt", "htm?",
- "xml", "wp", "wpd", "wps", "odt", "ott", "fodt", "docm", "docx",
- "dotm", "dotx");
-
-my @WEBDOCS=("htm", "html", "stw", "txt", "vor", "oth");
-
-my @BASEDOCS=("odb");
-
-my @CALCDOCS=("sxc", "stc", "dif", "dbf", "xls", "xlw", "xlt", "rtf", "sdc", "vor",
- "slk", "txt", "htm", "html", "wk1", "wks", "123", "xml", "ods", "ots",
- "fods", "csv", "xlsb", "xlsm", "xlsx", "xltm", "xltx");
-
-my @EXTENSIONS=("oxt");
-
-# default names of lowrappers
-# use "" if you want to disable any wrapper
-my %APPS = (
- office => "libreoffice",
- master => "",
- base => "lobase",
- calc => "localc",
- draw => "lodraw",
- impress => "loimpress",
- math => "lomath",
- template => "lofromtemplate",
- unopkg => "unopkg",
- web => "loweb",
- writer => "lowriter",
-);
-
-my $office_shell_function = "_loexp_";
-
-sub usage()
-{
- print "Script to Generate bash completion for LO wrappers\n\n";
-
- print "Usage: $0 --help\n";
- print " $0 [--binsuffix=suffix]\n";
- print "\t\t[--compat-oowrappers]\n";
- print "\t\t[--office=wrapper_name]\n";
- print "\t\t[--master=wrapper_name]\n";
- print "\t\t[--base=wrapper_name]\n";
- print "\t\t[--calc=wrapper_name]\n";
- print "\t\t[--draw=wrapper_name]\n";
- print "\t\t[--impress=wrapper_name]\n";
- print "\t\t[--math=wrapper_name]\n";
- print "\t\t[--template=wrapper_name]\n";
- print "\t\t[--unopkg=wrapper_name]\n";
- print "\t\t[--web=wrapper_name]\n";
- print "\t\t[--writer=wrapper_name]\n";
- print "\t\tinput_file\n";
- print "\t\toutput_file\n\n";
-
- print "Options:\n";
- print "\t--help\t\tprint this help\n";
- print "\t--binsuffix\tdefines a suffix that is added after each wrapper\n";
- print "\t--compat-oowrappers\tset wrapper names to the old default oo* wrapper names\n";
-
- print "The other options allows to redefine the wrapper names.\n";
- print "The value \"\" can be used to disable any wrapper.\n\n";
-}
-
-my $infilename;
-my $outfilename;
-my $binsuffix = '';
-
-my $opt;
-foreach my $arg (@ARGV) {
- if ( $arg =~ /--help/ ) {
- usage();
- exit 0;
- } elsif ( $arg =~ /--compat-oowrappers/ ) {
- $APPS{'office'} = "ooffice";
- $APPS{'master'} = "";
- $APPS{'base'} = "oobase";
- $APPS{'calc'} = "oocalc";
- $APPS{'draw'} = "oodraw";
- $APPS{'impress'} = "ooimpress";
- $APPS{'math'} = "oomath";
- $APPS{'template'} = "oofromtemplate";
- $APPS{'unopkg'} = "unopkg";
- $APPS{'web'} = "ooweb";
- $APPS{'writer'} = "oowriter";
- $office_shell_function = "_ooexp_";
- } elsif ( $arg =~ /--binsuffix=(.*)/ ) {
- $binsuffix = "$1";
- } elsif ( $arg =~ /--office=(.*)/ ) {
- $APPS{'office'} = "$1";
- } elsif ( $arg =~ /--master=(.*)/ ) {
- $APPS{'master'} = "$1";
- } elsif ( $arg =~ /--base=(.*)/ ) {
- $APPS{'base'} = "$1";
- } elsif ( $arg =~ /--calc=(.*)/ ) {
- $APPS{'calc'} = "$1";
- } elsif ( $arg =~ /--draw=(.*)/ ) {
- $APPS{'draw'} = "$1";
- } elsif ( $arg =~ /--impress=(.*)/ ) {
- $APPS{'impress'} = "$1"
- } elsif ( $arg =~ /--math=(.*)/ ) {
- $APPS{'math'} = "$1";
- } elsif ( $arg =~ /--template=(.*)/ ) {
- $APPS{'template'} = "$1";
- } elsif ( $arg =~ /--unopkg=(.*)/ ) {
- $APPS{'unopkg'} = "$1";
- } elsif ( $arg =~ /--web=(.*)/ ) {
- $APPS{'web'} = "$1";
- } elsif ( $arg =~ /--writer=(.*)/ ) {
- $APPS{'writer'} = "$1"
- } elsif ( $arg =~ /^-.*/ ) {
- printf STDERR "Error: invalid option \"$arg\", try --help\n";
- exit 1;
- } elsif ( $outfilename ) {
- printf STDERR "Error: too much arguments, try --help\n";
- exit 1;
- } else {
- if ($infilename) {
- $outfilename = "$arg";
- } else {
- $infilename = "$arg";
- }
- }
-}
-
-unless ( $infilename ) {
- printf STDERR "Error: undefined input file, try --help\n";
- exit 1;
-}
-
-unless ( $outfilename ) {
- printf STDERR "Error: undefined output file, try --help\n";
- exit 1;
-}
-
-#add binsuffix
-foreach my $app (keys %APPS) {
- $APPS{$app} .= "$binsuffix" unless ( "$APPS{$app}" eq "" );
-}
-
-sub print_suffixes_check {
- my $app = shift(@_);
- my $first_suffix = shift(@_);
-
- ($first_suffix) || die "Error: No suffix defined for $app\n";
-
- print BCOUTFILE " $app)\t\te=\'!*.+(" . $first_suffix . "|" . uc($first_suffix);
- foreach my $suffix (@_) {
- print BCOUTFILE "|" . $suffix;
- print BCOUTFILE "|" . uc($suffix);
- }
- print BCOUTFILE ")\' ;;\n";
-}
-
-sub print_suffixes_checks {
- foreach my $app (keys %APPS) {
- # skip the disabled wrapper
- next if ( $APPS{$app} eq "" );
-
- if ($app eq "draw" ) { print_suffixes_check ($APPS{$app}, @DRAWDOCS); }
- if ($app eq "writer") { print_suffixes_check ($APPS{$app}, @WRITERDOCS, @MASTERDOCS); }
- if ($app eq "web") { print_suffixes_check ($APPS{$app}, @WEBDOCS); }
- if ($app eq "math") { print_suffixes_check ($APPS{$app}, @MATHDOCS); }
- if ($app eq "impress") { print_suffixes_check ($APPS{$app}, @IMPRESSDOCS); }
- if ($app eq "base") { print_suffixes_check ($APPS{$app}, @BASEDOCS); }
- if ($app eq "calc") { print_suffixes_check ($APPS{$app}, @CALCDOCS); }
- if ($app eq "master") { print_suffixes_check ($APPS{$app}, @MASTERDOCS); }
- if ($app eq "template") { print_suffixes_check ($APPS{$app}, @TEMPLATES); }
- # libreoffice should contain all...
- if ($app eq "office") { print_suffixes_check ($APPS{$app}, @DRAWDOCS, @WRITERDOCS, @MATHDOCS, @IMPRESSDOCS, @BASEDOCS, @CALCDOCS, @MASTERDOCS, @TEMPLATES, @WEBDOCS); }
- # unopkg is a standalone tool
- if ($app eq "unopkg") { print_suffixes_check ($APPS{$app}, @EXTENSIONS); }
- }
-}
-
-sub print_apps {
- my $app_to_print;
- foreach my $app (keys %APPS) {
- # skip the disabled wrapper
- next if ( $APPS{$app} eq "" );
-
- print BCOUTFILE "\t\t\t\t\t$app_to_print \\\n" if ($app_to_print);
- $app_to_print = $APPS{$app};
- }
- # the last app will be printed without the final backslash
- ($app_to_print) || die "Error: No LO wrapper was selected\n";
- print BCOUTFILE "\t\t\t\t\t$app_to_print\n";
-}
-
-
-open (BCINFILE, "$infilename") || die "Error: can't open $infilename for reading: $!\n";
-open (BCOUTFILE, "> $outfilename") || die "Error: can't open $outfilename for writing: $!\n";
-
-while (my $line = <BCINFILE>) {
- chomp $line;
-
- $line =~ s/\@OFFICE_SHELL_FUNCTION\@/$office_shell_function/;
-
- if ($line =~ m/\@BASH_COMPLETION_SUFFIXES_CHECKS\@/) {
- print_suffixes_checks();
- } elsif ($line =~ m/\@BASH_COMPLETION_OOO_APPS\@/) {
- print_apps();
- } else {
- print BCOUTFILE "$line\n";
- }
-}
-
-close (BCINFILE);
-close (BCOUTFILE);
diff --git a/bin/genrelocs b/bin/genrelocs
deleted file mode 100755
index 9e3b75ffe..000000000
--- a/bin/genrelocs
+++ /dev/null
@@ -1,24 +0,0 @@
-#!/usr/bin/env perl
-
-my $num_funcs = 50;
-
-sub generate_class($$)
-{
- my $fh = shift;
- my $name = shift;
-
- print $fh "class $name {\n";
- print $fh " $name(); /* key method */\n";
- for (my $i = 0; $i < $num_funcs; $i++) {
- print $fh "virtual int myTestFunction$i (int i)\n";
- print $fh "{ return $i; }\n";
- }
- print $fh "};\n";
- print $fh "$name" . "::" . "$name() { }\n";
-}
-
-for (my $i = 0; $i < 200; $i++)
-{
- generate_class(STDOUT, "Test$i");
-}
-
diff --git a/bin/git-new-workdir b/bin/git-new-workdir
deleted file mode 100755
index 3ad2c0cea..000000000
--- a/bin/git-new-workdir
+++ /dev/null
@@ -1,82 +0,0 @@
-#!/bin/sh
-
-usage () {
- echo "usage:" $@
- exit 127
-}
-
-die () {
- echo $@
- exit 128
-}
-
-if test $# -lt 2 || test $# -gt 3
-then
- usage "$0 <repository> <new_workdir> [<branch>]"
-fi
-
-orig_git=$1
-new_workdir=$2
-branch=$3
-
-# want to make sure that what is pointed to has a .git directory ...
-git_dir=$(cd "$orig_git" 2>/dev/null &&
- git rev-parse --git-dir 2>/dev/null) ||
- die "Not a git repository: \"$orig_git\""
-
-case "$git_dir" in
-.git)
- git_dir="$orig_git/.git"
- ;;
-.)
- git_dir=$orig_git
- ;;
-esac
-
-# don't link to a configured bare repository
-isbare=$(git --git-dir="$git_dir" config --bool --get core.bare)
-if test ztrue = z$isbare
-then
- die "\"$git_dir\" has core.bare set to true," \
- " remove from \"$git_dir/config\" to use $0"
-fi
-
-# don't link to a workdir
-if test -L "$git_dir/config"
-then
- die "\"$orig_git\" is a working directory only, please specify" \
- "a complete repository."
-fi
-
-# don't recreate a workdir over an existing repository
-if test -e "$new_workdir"
-then
- die "destination directory '$new_workdir' already exists."
-fi
-
-# make sure the links use full paths
-git_dir=$(cd "$git_dir"; pwd)
-
-# create the workdir
-mkdir -p "$new_workdir/.git" || die "unable to create \"$new_workdir\"!"
-
-# create the links to the original repo. explicitly exclude index, HEAD and
-# logs/HEAD from the list since they are purely related to the current working
-# directory, and should not be shared.
-for x in config refs logs/refs objects info hooks packed-refs remotes rr-cache svn
-do
- case $x in
- */*)
- mkdir -p "$(dirname "$new_workdir/.git/$x")"
- ;;
- esac
- ln -s "$git_dir/$x" "$new_workdir/.git/$x"
-done
-
-# now setup the workdir
-cd "$new_workdir"
-# copy the HEAD from the original repository as a default branch
-cp "$git_dir/HEAD" .git/HEAD
-# checkout the branch (either the same as HEAD from the original repository, or
-# the one that was asked for)
-git checkout -f $branch
diff --git a/bin/gob b/bin/gob
deleted file mode 100755
index 292c17fd0..000000000
--- a/bin/gob
+++ /dev/null
@@ -1,1070 +0,0 @@
-#!/usr/bin/env python
-
-# gob.py - ooo-build to GIT conversion: dump gob branch description files
-# and create branched git
-#
-# Usage: python bin/gob --help
-
-import optparse
-import operator
-import os
-import re
-import sys
-import shutil
-
-class SystemFailed (Exception):
- pass
-
-def log (s, threshold=1):
- if options.verbose > threshold:
- print >> sys.stderr, s
-
-def info (s):
- log (s, threshold=0)
-
-def exception_string (exception=Exception ('no message')):
- import traceback
- return traceback.format_exc (None)
-
-def filter_out (predicate, lst):
- return filter (lambda x: not predicate (x), lst)
-
-def makedirs (dir):
- log ('mkdir %(dir)s' % locals ())
- os.makedirs (dir)
-
-def symlink (src, dest):
- log ('ln -s %(src)s %(dest)s' % locals ())
- os.symlink (src, dest)
-
-def rename (src, dest):
- log ('mv %(src)s %(dest)s' % locals ())
- os.rename (src, dest)
-
-def rmdir (dir):
- log ('rmdir %(dir)s' % locals ())
- os.rmdir (dir)
-
-def system (command, raise_on_error=True):
- if options.verbose > 1:
- log ('executing: %(command)s' % locals ())
- if options.verbose < 3:
- command = '(%(command)s) > gob.log 2>&1' % locals ()
- status = os.system (command)
- if status and raise_on_error:
- info (command)
- info (file ('gob.log').read ())
- raise SystemFailed ('Command failed: %(command)s' % locals ())
- return status
-
-def read_pipe (command, raise_on_error=True):
- log ('executing: %(command)s' % locals ())
- if options.verbose < 3:
- command = '(%(command)s) 2> gob.log' % locals ()
- pipe = os.popen (command)
- output = pipe.read ()
- log ('pipe-output:\n%(output)s)s' % locals (), threshold=2)
- if pipe.close () and raise_on_error:
- info (command)
- info (file ('gob.log').read ())
- raise SystemFailed ('Pipe failed: %(command)s' % locals ())
- return output
-
-def list_dirs (dir, allow_link=True):
- return filter (lambda x: (os.path.isdir (os.path.join (dir, x))
- and (allow_link
- or not os.path.islink (os.path.join (dir, x)))),
- os.listdir (dir))
-
-def find_file (root, path, name):
- for dir in path:
- file_name = os.path.abspath (os.path.join (root, dir, name))
- if os.path.isfile (file_name):
- return file_name
- return None
-
-def _apply_patch (dir, patch):
- system ('patch -l -p0 -d %(dir)s < %(patch)s' % locals ())
-
-def apply_patch (dir, patch):
- if not options.split:
- return _apply_patch (dir, patch)
- apply_dir = os.path.join (dir, options.flat_apply_dir)
- _apply_patch (apply_dir, patch)
- for module in list_dirs (apply_dir, allow_link=False):
- rename (os.path.join (apply_dir, module), os.path.join (dir, module))
-
-def get_srcpack2_dict ():
- def modules_line (x):
- e = x.split ("=")
- return e[0], e[1].split (',')
- return dict (map (modules_line, file (src_dir + 'bin/modules2.txt').readlines ()))
-
-def for_each (func, lst):
- for i in lst:
- func (i)
-
-def move (src):
- src2 = get_srcpack2_dict ()
- def move_pack (pack):
- pack_dir = os.path.join (src, pack)
- def move_module (module):
- module_dir = src + '/' + module
- pack_module_dir = pack_dir + '-/' + module
- if os.path.exists (module_dir):
- rename (module_dir, pack_module_dir)
- makedirs (pack_dir + '-')
- for_each (move_module, src2[pack])
- rename (pack_dir + '-', pack_dir)
- for_each (move_pack, src2.keys ())
-
-def move_back (src):
- src2 = get_srcpack2_dict ()
- def move_pack (pack):
- pack_dir = os.path.join (src, pack)
- def move_module (module):
- print 'renaming:', module
- module_dir = src + '/' + module
- pack_module_dir = pack_dir + '-/' + module
- if os.path.exists (pack_module_dir):
- rename (pack_module_dir, module_dir)
- else:
- print 'no such dir:', pack_module_dir
- rename (pack_dir, pack_dir + '-')
- for_each (move_module, src2[pack])
- rmdir (pack_dir + '-')
- for_each (move_pack, src2.keys ())
-
-def setup_flat_apply_dir (src):
- src2 = get_srcpack2_dict ()
- apply_dir = os.path.join (src, options.flat_apply_dir)
- shutil.rmtree (apply_dir, ignore_errors=True)
- makedirs (apply_dir)
- missing = ['mdbtools', 'libwpg', 'libwps', 'xalan']
- for pack in src2.keys ():
- for module in src2[pack]:
- symlink (os.path.join ('..', '..', pack, module), os.path.join (apply_dir, module))
- for pack in missing:
- symlink (os.path.join ('..', '..', pack), os.path.join (apply_dir, pack))
-
-def patch_get_branch (patch):
- patch_file = patch.file_name
- if not patch_file:
- return None
-
- if 'vba' in options.dir_branch and int (options.milestone) > 19:
- m = re.search ('(cws-scsheetprotection02|sc-autofilter-empty-nonempty|sc-copy-source-border|sc-datapilot|sc-move-from-origin|sc-paste-on-enter|sc-save-password-minlength|scroll-accel|xl-import-formradiobutton)', patch_file)
- if m:
- return 'vba'
-
- # Prevent from going into ooxml
- m = re.search ('(writerfilter-qnametostr-NOOPTFILES)', patch_file)
- if m:
- return m.group (1)
-
- dir = os.path.basename (os.path.dirname (patch_file))
- base = os.path.splitext (os.path.basename (patch_file))[0]
-
- module_re = None
- if not module_re:
- modules = list_dirs (options.build_dir)
- if options.split:
- modules = list_dirs (os.path.join (options.build_dir, options.flat_apply_dir))
- module_re = '|'.join (modules)
-
- # Patches with a simple digit suffix are aggregated into one branch
- if (not re.search ('%(module_re)s$|\d\d+$' % locals (), base)
- and re.search ('\d$', base)):
- base = re.sub ('-*\d$', '', base)
-
- # Patches in a separated may be aggregated into one branch
- if dir in options.dir_branch:
- return dir
-
- # Pathes with a branch_prefix are aggregated into one branch
- branch_prefix = [
- 'cjk-character-units',
- 'cws-layoutdialogs',
- 'cws-scsheetprotection02',
- 'emf+',
- 'fpicker-kde',
- 'jvmfwk-gij',
- 'lockfile',
- 'mono',
- 'sal-strintern-speed',
- 'sc-dataform',
- 'sc-datapilot',
- 'speed-configmgr',
- 'svg-import',
- 'system-lpsolve',
- 'tools-urlobj-smb-scheme',
- 'transogl',
- 'unittesting',
- 'unxsplash',
- 'vba',
- 'wpgimporter',
- 'writerfiltery'
- ]
-
- branch_prefix_re = '^(' + '|'.join (branch_prefix).replace ('+', '\+') + ')'
- m = re.search (branch_prefix_re, base)
- if m:
- def assert_dir_group (m, s):
- return s in options.dir_branch or m.group (1) != s
- if (assert_dir_group (m, 'vba')
- and assert_dir_group (m, 'emf+')
- and assert_dir_group (m, 'unittesting')):
- return m.group (1)
-
- # Some patches are declared in the middle of a branch [another
- # series of patches that form a branch] which depends on that
- # patch, but have derogatory naming. These patches must be
- # categorised explicitly. The ASSIMILATE warning helps to detect
- # these.
- if 'emf+' in options.dir_branch and re.search ('^(cairocanvas-alpha-pixmap-rewrite|vcl-grey-alpha-unix-sal-bitmap)', base):
- return 'emf+'
-
- if 'ooxml' in options.dir_branch and re.search ('^(win32-installer-register-moox-types)', base):
- return 'ooxml'
-
- if 'vba' in options.dir_branch and re.search ('^(default-autotext-and-form-name|sc-toggle-merge-center)', base):
- return 'vba'
-
- if re.search ('^(fix-linkoo|linkoo-)', base):
- return 'linkoo'
-
- if re.search ('^(fpicker-common-scp2)', base):
- return 'fpicker-kde'
-
- # Remove workspace and milestone suffixes
- workspace = options.workspace
- milestone = options.milestone
- base = re.sub ('-%(workspace)s' % locals (), '', base)
- base = re.sub ('-m%(milestone)s' % locals (), '', base)
-
- # Patches with a -localize suffix are aggregated into one branch
- base = re.sub ('-localize$' % locals (), '', base)
-
- # Patches with a module suffix are aggregated into one branch
- base = re.sub ('-(%(module_re)s)$' % locals (), '', base)
-
- # git does not like dots in branch names
- base = base.replace('.','-')
-
- return base
-
-# Hard handy work for m19 gets quickly bit rotten
-# Use patch dependency calculation instead
-manual_m19_branch_dependencies = {
- 'buildfix-layoutdialogs': ['cws-layoutdialogs'],
- 'cairocanvas-fix-image-cache': ['cairo'],
- 'cws-scsheetprotection02': ['sc-datapilot', 'sc-paste-on-enter'],
- 'emf+': ['link-as-needed'],
- 'forms-radio-button-group-names': ['form-control-visibility'],
- 'layout-plugin': ['cws-layoutdialogs'],
- 'layout-tab': ['layout-plugin'],
- 'linkwarn-svtools-miscopts-bits': ['novell-win32-odma'],
- 'ooo59127.vcl.honourcairofont': ['ooo64508.vcl.honourfontconfighinting'],
- 'oosplash-etc-openoffice-sofficerc': ['unxsplash'],
- 'ooxml': ['lwp-filter-component'],
- 'sc-copy-on-merged-cells': ['sc-dataform'],
- 'sc-dataform': ['sc-hrc-ooo-build-resources'],
- 'sc-datapilot': ['sc-dataform'],
- 'sc-dp-gridlayout': ['sc-datapilot'],
- 'sc-export-shape-hlink-bindings': ['sc-export-shape-macro-bindings'],
- 'sc-simple-sort-include-format-header': ['sc-natural-sort'],
- 'sc-toggle-merge-center': ['vba'],
- 'sfx2-pre-and-postprocess-crash-fix': ['sfx2-pre-and-postprocess-during-save-load'],
- 'sfx2-pre-and-postprocess-during-save-load': ['sfx2-remove-check-update-on-fileload'],
- 'speed-bdirect': ['speed-symbolic-functions'],
- 'speed-store-lck': ['store-core'],
- 'static-libs-use-_pic': ['system-lpsolve'],
- 'ui-desktop-integration': ['linkwarn-svtools-miscopts-bits'],
- 'unittesting': ['tools-qa-urlobj-unittest', 'gnome-vfs-late-init'],
- 'vba': ['cws-npower10', 'cws-pflin10', 'cws-npower11'],
- 'vcl-linking-randr': ['autocorrect-accidental-caps-lock', 'internal-mesa-headers'],
- 'wpgimporter': ['wpsimport'],
-}
-
-def range_union (a, b, fuzz=0):
- u = (max (a[0], b[0]) - fuzz, min (a[1], b[1]) + fuzz, a[2], b[2])
- if u[0] > u[1]:
- return None
- return u
-
-class File:
- def __init__ (self, s):
- self.string = s
- self.ranges = None
- self.name = None
- if self.string.find ('\n+++ ') >= 0:
- self.name = re.search ('\n[+]{3}\s+([.]/)?([^\s]+)', self.string).group (2)
- def __repr__ (self):
- return '<File: %(name)s>' % self.__dict__
- def get_ranges (self):
- if not self.ranges:
- self.numbers = re.findall ('\n(@@ -(\d+),(\d+) [+](\d+),(\d+) @@(.|\n[^@])*)', self.string)
- self.ranges = map (lambda x: (min (int (x[1]), int (x[3])), max (int (x[1]) + int (x[2]), int (x[3]) + int (x[4])), x[0][:160]), self.numbers)
- return self.ranges
-
-def patch_depend (p, q):
- files = []
- for file_name in p.files.keys ():
- if file_name in q.files.keys ():
- for a in p.files[file_name].get_ranges ():
- for b in q.files[file_name].get_ranges ():
- union = range_union (a, b, int (options.fuzz))
- if union:
- return union
- return False
-
-def patch_get_dependencies (patches, patch):
- dependencies = ['pristine']
- for p in patches:
- if p == patch:
- break
- if patch_depend (patch, p):
- dependencies += [p.name]
- return dependencies
-
-def branch_get_dependencies (branches, patches, branch):
- patch_dependencies = {}
- patch_overlaps = []
- first_patch = None
- for patch in patches:
- if patch in branches[branch]:
- first_patch = patch
- break
- last_patch = None
- for patch in reversed (patches):
- if patch in branches[branch]:
- last_patch = patch
- break
- for patch in branches[branch]:
- assimilate = False
- for p in patches:
- if p == last_patch:
- name = p.name
- break
- if p == first_patch:
- name = p.name
- # We cannot have a branch pre-depend on a patch/branch
- # that does not exist yet. FIXME: if it is really
- # needed, it should be auto-assimilated by the branch.
- # This is now done manually in patch_get_branch ().
- assimilate = True
- o = patch_depend (patch, p)
- if assimilate and o and p not in branches[branch]:
- name = p.name
- ab = patch_get_branch (p)
- print 'ASSIMILATE[%(branch)s]: %(name)s [%(ab)s]' % locals ()
- continue
- if o:
- patch_dependencies[p] = p
- patch_overlaps += [o]
- branch_dependencies = {}
- for patch in patch_dependencies.values ():
- b = patch_get_branch (patch)
- if b != branch:
- branch_dependencies[b] = b
- if not branch_dependencies:
- return ['pristine']
- return branch_dependencies.values ()
-
-# No overrides are necessary when using fuzz >= 40
-branch_override_dependencies = {
- }
-
-def branch_get_dependencies_with_override (branches, patches, branch):
- return branch_override_dependencies.get (branch, branch_get_dependencies (branches, patches, branch))
-
-GitFailed = SystemFailed
-
-gitignores = '''
-*-
-*-HEAD
-*-git
-*-patched
-*-pristine
-*.bak
-*.cxx-*
-*.deps
-*.git
-*.hxx-*
-*.log
-*.orig
-*.patched
-*.pristine
-*.pyc
-*.rej
-*~
-.\#*
-/Linux*Env.Set*
-/bootstrap
-/makefile.mk
-/solver
-CVS
-TAGS
-\#*
-xxx-have-in-patches-now:localize.sdf
-unxlng*.pro
-autom4te.cache/
-config.log
-config.parms
-config.status
-configure
-set_soenv
-tmon.out
-visibility.cxx
-visibility.s
-warn
-default_images/introabout/intro-save.bmp
-dmake/Makefile
-dmake/config.h
-dmake/config.log
-dmake/config.status
-dmake/dmake
-dmake/*.o
-dmake/dmakeroot.h
-dmake/stamp-h1
-dmake/startup/Makefile
-dmake/startup/config.mk
-dmake/startup/unix/Makefile
-dmake/startup/unix/cygwin/Makefile
-dmake/startup/unix/linux/Makefile
-dmake/startup/unix/macosx/Makefile
-dmake/startup/unix/solaris/Makefile
-dmake/startup/unix/sysvr4/Makefile
-dmake/startup/winnt/Makefile
-dmake/startup/winnt/mingw/Makefile
-dmake/startup/winnt/msvc6/Makefile
-dmake/tests/Makefile
-dmake/unix/.dirstamp
-instsetoo_native/res/banner_nld.bmp
-instsetoo_native/res/banner_ooop.bmp
-instsetoo_native/res/nologoinstall_nld.bmp
-instsetoo_native/res/nologoinstall_ooop.bmp
-instsetoo_native/util/OpenOffice
-sd/xml/transitions-ogl.xml
-setup_native/source/win32/nsis/ooobanner_nld.bmp
-setup_native/source/win32/nsis/ooobanner_ooop.bmp
-setup_native/source/win32/nsis/ooobitmap_nld.bmp
-setup_native/source/win32/nsis/ooobitmap_ooop.bmp
-setup_native/source/win32/nsis/ooosetup_nld.ico
-solenv/unxlng*/
-svx/res
-'''
-keep = '''
-default_images/introabout/intro-save.bmp
-default_images/introabout/intro.bmp
-default_images/svx/res/openabout_ark.png
-default_images/svx/res/openabout_translateorgza.png
-default_images/sw/res/go-oo-team.png
-'''
-
-def create_gitignores (dir):
- for i in filter_out (operator.not_, gitignores.split ('\n')):
- if options.split:
- i = i.replace ('/bootstrap', '/bootstrap/bootstrap')
- if i[0] == '/':
- file (dir + '/.gitignore', 'a').write (i + '\n')
- else:
- slash = ''
- if i[-1] == '/':
- i = i[:-1]
- slash = '/'
- file (dir + '/' + os.path.dirname (i) + '/.gitignore', 'a').write (os.path.basename (i) + slash + '\n')
-
-class Setup:
- string = None
- vars = {}
- def __init__ (self, file_name='config.log'):
- if not self.string and os.path.exists (file_name):
- self.string = file (file_name).read ()
- def get (self, key, default=None):
- return self.vars.get (key, self.read_ (key, default))
- def read_ (self, key, default):
- m = re.search ('''%(key)s=['"*]([^*"']*)''' % locals (), self.string)
- if m:
- self.vars[key] = m.group (1)
- else:
- self.vars[key] = default
- return self.vars[key]
-
-def get_svn_revision ():
- return re.search ('\nRevision: ([0-9]+)', read_pipe ('svn info')).group (1)
-
-def get_git_committish ():
- return re.search ('([^ ]+)', read_pipe ('git log --pretty=oneline -1')).group (1)
-
-class Git:
- def __init__ (self, dir, patched, clean=False):
- self.dir = dir
- self.patched = patched
- self.scratch = 'work/scratch'
- self.workspace = options.workspace
- self.milestone = options.milestone
- self.pristine = 'upstream/%(workspace)s-m%(milestone)s' % self.__dict__
- self.commits = {}
- self.log = {}
- if not os.path.exists (self.dir):
- drink = Setup ().get ('DRINK', 'tea')
- info ('Unpacking source tree - [ go and have some %(drink)s ] ...' % locals ())
- system ('cd bin && ./unpack')
- create_gitignores (dir)
- if options.split:
- move (self.dir)
- setup_flat_apply_dir (self.dir)
- self.system ('touch unpack')
- if not os.path.isdir (dir + '/.git/refs'):
- drink = Setup ().get ('DRINK')
- info ('Creating GIT archive - [ go and have some %(drink)s ] ...' % locals ())
- self.system ('git init')
- #svn_revision = get_svn_revision ()
- #self.commit ('Initial svn:r%(svn_revision)s unpatched.' % locals ())
- git_committish = get_git_committish ()
- self.commit ('Initial ooo-build: %(git_committish)s unpatched.' % locals ())
- self.system ('git branch %(pristine)s' % self.__dict__)
- self.system ('git tag gob-%(workspace)s-%(milestone)s %(pristine)s' % self.__dict__)
- self.system ('git branch pristine')
- self.system ('git gc')
- if clean:
- if self.is_modified ():
- self.system ('git reset --hard HEAD')
- self.system ('git clean -df')
- if self.has_branch (self.patched):
- self.checkout (self.patched)
- else:
- self.system ('git checkout -b %(patched)s pristine' % self.__dict__)
- if self.has_branch (self.scratch):
- self.system ('git branch -D %(scratch)s' % self.__dict__)
- def pipe (self, command, raise_on_error=True):
- dir = self.dir
- return read_pipe ('cd %(dir)s && %(command)s' % locals (), raise_on_error)
- def system (self, command, raise_on_error=True):
- dir = self.dir
- return system ('cd %(dir)s && %(command)s' % locals (), raise_on_error)
- def get_branches (self):
- return filter_out (operator.not_,
- self.pipe ('git branch')
- .replace ('*', '')
- .replace (' ', '').split ('\n'))
- def get_log (self, branch=''):
- commit = self.get_current_commit (branch)
- self.log[commit] = self.log.get (commit, self.pipe ('git log --pretty=oneline %(branch)s --' % locals ()))
- return self.log[commit]
- def get_current_commit (self, branch=''):
- if not branch:
- branch = 'HEAD'
- return self.pipe ('git rev-parse %(branch)s' % locals ())[:-1]
- def get_commit (self, patch):
- if not self.commits:
- log = self.get_log (self.patched)
- def grok_log_line (s):
- m = re.match ('([^ ]+) Apply.*/([^/]+[.](diff|patch))', s)
- if not m:
- info ('Skipping line:%(s)s:' % locals ())
- return None, None
- return m.group (2), m.group (1)
- self.commits = dict (map (grok_log_line, log.split ('\n')[:-2]))
- return self.commits.get (patch, None)
- def commit (self, message):
- self.system ('git add .')
- self.system ('git add -u .')
- self.system ('''git commit -m '%(message)s' ''' % locals ())
- def is_modified (self):
- return re.sub ('# On branch.*\nnothing to commit \(working directory clean\)\n', '',
- self.pipe ('git status', raise_on_error=False))
- def assert_clean (self):
- dir = self.dir
- pending = self.is_modified ()
- if pending:
- raise GitFailed ('working directory unclean: %(dir)s\n%(pending)s' % locals ())
- def checkout (self, branch):
- if not self.is_on_branch (branch):
- self.system ('git checkout %(branch)s' % locals ())
- def get_current_branch (self):
- return self.pipe ('git symbolic-ref HEAD', raise_on_error=False)[len ('refs/heads/'):-1]
- def is_on_branch (self, branch):
- return branch == self.get_current_branch ()
- def has_branch (self, branch):
- return branch in self.get_branches ()
- def apply_patch (self, branches, patches, patch):
- branch = patch_get_branch (patch)
- info ('Applying patch[%(branch)s]: ' % locals () + patch.name)
- patched = self.get_current_branch ()
- apply_patch (self.dir, patch.file_name)
- base = os.path.basename (patch.file_name)
- self.commit ('Apply %(base)s.' % locals ())
- if options.milestone == '19':
- dependencies = manual_m19_branch_dependencies.get (branch, ['pristine'])
- else:
- dependencies = branch_get_dependencies_with_override (branches, patches, branch)
- if not self.has_branch (branch):
- if not options.topgit:
- base_depend = dependencies[0]
- self.system ('git checkout -b %(branch)s %(base_depend)s' % locals ())
- for dependency in dependencies[1:]:
- self.system ('git rebase %(dependency)s' % locals ())
- else:
- depend_str = ' '.join (dependencies)
- self.system ('tg create %(branch)s %(depend_str)s' % locals ())
- self.system ('git commit -am "topgit branch info %(branch)s"' % locals ())
- else:
- self.checkout (branch)
- log = self.get_log (patched)
- commit = log[:log.index (' ')]
- self.system ('git cherry-pick -x %(commit)s' % locals ())
- def before_ (self):
- self.assert_clean ()
- self.system ('git checkout -b %(scratch)s %(patched)s' % self.__dict__)
- def after_ (self):
- self.system ('git push . %(scratch)s:%(patched)s' % self.__dict__)
- self.checkout (self.patched)
- self.system ('git branch -D %(scratch)s' % self.__dict__)
- def pick_patch (self, patch, commit):
- branch = patch_get_branch (patch)
- info ('Picking patch[%(branch)s]: ' % locals () + patch.name)
- self.system ('git cherry-pick -x %(commit)s' % locals ())
- def add_patch (self, branches, patches, patch):
- if patch.name in self.get_log ():
- info ('patch already applied, skipping: ' + patch.name)
- return
- if file (patch.file_name).read ().find ('\n+++ ') == -1:
- info ('patch is empty, skipping: ' + patch.name)
- return
- commit = None
- branch = patch_get_branch (patch)
- if self.has_branch (branch):
- name = patch.name.replace ('+', '\+')
- m = re.search ('(^|\n)([^\s]+)\s+.*(\s|/)%(name)s' % locals (), self.get_log (branch))
- if m:
- commit = m.group (2)
- self.before_ ()
- if commit:
- self.pick_patch (patch, commit)
- else:
- self.apply_patch (branches, patches, patch)
- self.after_ ()
- def dump_gob (self, branches, patches, branch):
- gob_dir = self.dir + '/.git/refs/gob'
- if not os.path.exists (gob_dir):
- makedirs (gob_dir)
- branch_patches = branches.get (branch, [])
- if not branch_patches:
- return
- owner = ''
- for patch in branch_patches:
- owner = patch.owner
- if owner:
- break
- issues = []
- for patch in branch_patches:
- issues += patch.issues
- issue_string = ', '.join (issues)
- dependencies = filter (lambda x: x != 'pristine', branch_get_dependencies (branches, patches, branch))
- dependencies_string = ', '.join (dependencies)
- commit = self.get_current_commit ()
- gob_file_name = os.path.join (gob_dir, branch)
- info ('Writing: ' + gob_file_name)
- file (gob_file_name, 'w').write ('''%(commit)s
-state: stable
-issue: %(issue_string)s
-owner: %(owner)s
-depend: %(dependencies_string)s
-''' % locals ())
-
-class Patch:
- def __init__ (self, file_name):
- self.file_name = file_name
- if self.file_name:
- self.set_files ()
- self.file_name = None
- def set_files (self):
- self.files = dict (map (lambda x: (x.name, x), map (File, ('\n' + file (self.file_name).read ()).split ('\n---')[1:])))
- def __repr__ (self):
- return '<Patch: ' + str (map (str, self.files.values ())) + ' >'
-
-class Apply_patch (Patch):
- def __init__ (self, s, section, owner, issue):
- Patch.__init__ (self, None)
- self.string = s
- self.section = section
- self.owner = owner
- m = re.search (', (\w\w+)', self.string)
- if m:
- self.owner = m.group (1)
- m = re.match ('^\s*(([^#\s].*).(diff|patch))(.*)', self.string)
- self.name = m.group (1)
- self.base = m.group (2)
- self.issues = map (lambda x: x[0], re.findall ('((i|n)#[0-9]+)', self.string))
- if issue:
- self.issues.append (issue)
- def __repr__ (self):
- return '<Patch: %(file_name)s [%(section)s] %(owner)s %(issues)s>' % self.__dict__
- def set_file_name (self, dir, path):
- self.file_name = find_file (dir, path, self.name)
- self.set_files ()
- return self
-
-class Section:
- def __init__ (self, s):
- self.string = s
- m = re.match ('[[\s]*(.*[^\s])\s*\]', self.string)
- self.tag = m.group (1).replace (' ', '')
- self.name = re.sub ('[^\w].*', '', self.tag)
- m = re.search ('\n(SectionOwner)\s*=>\s*(.*[^\s])', self.string)
- self.owner = ''
- if m:
- self.owner = m.group (2)
- m = re.search ('\n(SectionIssue)\s*=>\s*(.*[^\s])', self.string)
- self.issue = ''
- if m:
- self.issue = m.group (2)
- def __repr__ (self):
- return '<Section: %(tag)s %(owner)s %(issue)s>' % self.__dict__
- def get_patches (self):
- def create_patch (s):
- return Apply_patch (s[0], self.tag, self.owner, self.issue)
- return map (create_patch, re.findall ('\n\s*([^#\s].*.(diff|patch).*)', self.string))
-
-class Apply:
- def __init__ (self, apply_file, workspace, milestone):
- self.workspace = workspace
- self.milestone = milestone
- self.string = file (apply_file).read ()
- first_section = self.string.index ('\n[')
- self.distros_string = self.string[:first_section]
- self.sections_string = self.string[first_section:]
- self.path = re.search ('\nPATCHPATH=(.*)', self.string).group (1).split (':')
- self.distro = {}
- self.master = {}
- self.distros_string = re.sub ('\s*\\\s*\n\s*', '', self.distros_string)
- for distro in re.findall ('\n\s*([^#=\s:]+)\s*:\s*(.*)', self.distros_string):
- lst = distro[1].replace (' ', '').split (',')
- expanded = []
- for i in lst:
- if i in self.master.keys ():
- del self.master[i]
- expanded += self.distro.get (i, [i])
- self.master[distro[0]] = self.distro[distro[0]] = expanded
- # convenience: add sections as distro
- #self.distro.update (dict (map (lambda x: (x.name, x.name), self.get_sections ())))
- def get_section_strings (self):
- return map (lambda x: '[' + x, self.sections_string.split ('\n[')[1:])
- def get_sections (self):
- return map (Section, self.get_section_strings ())
- def get_distro (self, distro_name):
- def section_in_distro (section):
- if distro_name not in self.distro.keys ():
- return distro_name == section.name
- m = re.search ('(\w+).*(<=|<|==|!=|>=|>)%(workspace)s-m([0-9]+)'
- % self.__dict__, section.tag)
- if m:
- name = m.group (1)
- if not name in self.distro[distro_name]:
- return False
- left_milestone = self.milestone
- operator = m.group (2)
- right_milestone = int (m.group (3))
- return eval ('%(left_milestone)s %(operator)s %(right_milestone)s' % locals ())
- else:
- return section.tag in self.distro[distro_name]
- return filter (section_in_distro, self.get_sections ())
-
-class Command:
- def __init__ (self, apply, options):
- self.options = options
- self.apply = apply
- self.patches_ = []
- self.branches_ = {}
- def patches (self):
- '''list patches'''
- print '\n'.join (map (str, self.get_patches ()))
- def sections (self):
- '''list sections'''
- print '\n'.join (map (str, self.get_sections ()))
- def distros (self):
- '''list distros'''
- print '\n'.join (map (str, self.apply.distro.keys ()))
- def masters (self):
- '''list masters'''
- print '\n'.join (map (str, self.apply.master.keys ()))
- def branches (self):
- '''list branches'''
- print '\n'.join (map (str, self.get_branches ().keys ()))
- def get_branches (self):
- if not self.branches_:
- for patch in self.get_patches ():
- branch = patch_get_branch (patch)
- self.branches_[branch] = self.branches_.get (branch, []) + [patch]
- return self.branches_
- def dump_gobs (self):
- '''dump gob files'''
- branches = self.get_branches ()
- patches = self.get_patches ()
- git = Git (self.options.build_dir, self.options.patched)
- for branch in git.get_branches ():
- git.dump_gob (branches, patches, branch)
- def checkout (self):
- '''checkout patched ('master') branch'''
- git = Git (self.options.build_dir, self.options.patched)
- def git_export (self):
- '''export to GIT with branches'''
- git = Git (self.options.build_dir, self.options.patched, clean=True)
- patches = self.get_patches ()
- branches = self.get_branches ()
- for patch in self.get_patches ():
- git.add_patch (branches, patches, patch)
- git.system ('git gc')
- def get_patches_for_distro (self, distro):
- patches = reduce (operator.add, map (lambda section: section.get_patches (), self.get_sections_for_distro (distro)))
- for patch in patches:
- patch.set_file_name (self.options.apply_dir, self.apply.path)
- return patches
- def get_patches (self):
- if not self.patches_:
- self.patches_ = reduce (operator.add, map (self.get_patches_for_distro, self.options.distros))
- return self.patches_
- def get_sections_for_distro (self, distro):
- return self.apply.get_distro (distro)
- def get_sections (self):
- return reduce (operator.add, map (self.get_sections_for_distro, self.options.distros))
- def prepare (self):
- '''prepare ooo-build dir for use with GIT'''
- git = Git (self.options.build_dir, self.options.patched)
- def postpare (self):
- '''update ooo-build dir GIT after patching'''
- git = Git (self.options.build_dir, self.options.patched)
- #svn_revision = get_svn_revision ()
- #git.commit ('Update to svn:r%(svn_revision)s patched.' % locals ())
- git_committish = get_git_committish ()
- git.commit ('Update to ooo-build: %(git_committish)s patched.' % locals ())
- git.system ('git rebase patched')
- def dependencies (self):
- '''list branch dependencies'''
- patches = self.get_patches ()
- branches = self.get_branches ()
- independent = 0
- for branch in branches:
- dependencies = sorted (branch_get_dependencies (branches, patches, branch))
- if dependencies == ['pristine']:
- independent += 1
- print 'BRANCH:', branch, '->', dependencies
- print 'INDEPENDENT:', independent
- def patch_dependencies (self):
- '''list patch dependencies'''
- branches = self.get_branches ()
- patches = self.get_patches ()
- for patch in patches:
- branch = patch_get_branch (patch)
- name = patch.name
- print 'PATCH[%(branch)s]: %(name)s -> ' % locals (), patch_get_dependencies (patches, patch)
- def bump (self):
- '''bump BRANCH TAG-NAME - bump current branch (BRANCH=new upstream)'''
- if len (self.options.arguments) != 2:
- print 'Usage: gob bump BRANCH TAG-NAME'
- print 'BRANCH: new upstream'
- print 'Example:'
- print ' gob bump upstream/dev300-m21 my-21-update'
- sys.exit (2)
- branch = self.options.arguments[0]
- tag = self.options.arguments[1]
- git_dir = self.options.build_dir
- system ('cd %(git_dir)s && gob-bump %(branch)s %(tag)s' % locals ())
- def update (self):
- '''update BRANCH - update current branch (BRANCH='master')'''
- if len (self.options.arguments) != 1:
- print 'Usage: gob update BRANCH'
- print '''BRANCH: the 'master' branch'''
- print 'Example: '
- print ' gob update distro/SUSE'
- sys.exit (2)
- branch = self.options.arguments[0]
- git_dir = self.options.build_dir
- system ('cdi %(git_dir)s && gob-update %(branch)s' % locals ())
- def reset (self):
- '''reset - set GIT tree to pristine and remove all branches'''
- git = Git (self.options.build_dir, self.options.patched)
- git.system ('git checkout -f %(patched)s' % git.__dict__)
- git.system ('git reset --hard pristine')
- git.system ('git clean -df')
- git.system ('rm -rf .git/refs/top-bases')
- cmd = 'xargs git branch -D'
- if options.split:
- git.system ('''git branch | grep -Ev '/|master|patched|pristine|upstream|%(patched)s' | xargs tg delete -f ''' % git.__dict__)
- git.system ('''git branch | grep -Ev '/|master|patched|pristine|upstream|%(patched)s' | xargs git branch -D''' % git.__dict__)
- def patch_depend (self):
- '''patch-depend PATCH-1 PATCH-2 - show overlap between patches'''
- if len (self.options.arguments) != 2:
- print 'Usage: patch-depend PATCH-1 PATCH-2'
- sys.exit (2)
- union = patch_depend (Patch (self.options.arguments[0]), Patch (self.options.arguments[1]))
- if union:
- print union[0], '--', union[1]
- print union[2]
- print '<<<<<<<<<<<<<<<<<<<<<<<<<=========================>>>>>>>>>>>>>>>>>>>>>>>>>'
- print union[3]
- sys.exit (1)
- def statistics (self):
- ### not advertised
- ### show some statistics on dependensies
- patches = self.get_patches ()
- branches = self.get_branches ()
- full_match = 0
- match = 0
- fail = 0
- extra = 0
- independent = 0
- for branch in branches:
- manual_dependencies = sorted (manual_m19_branch_dependencies.get (branch, ['pristine']))
- auto_dependencies = sorted (branch_get_dependencies (branches, patches, branch))
- missing = False
- for m in manual_dependencies:
- if m != 'pristine' and not m in auto_dependencies:
- missing = True
- break
- if missing:
- fail += 1
- print
- print 'BRANCH:', branch
- print 'MANUAL:', manual_dependencies
- print 'AUTO:', auto_dependencies
- #dependencies 'OVERLAPS:', overlaps
- if auto_dependencies == ['pristine']:
- independent += 1
- elif auto_dependencies == manual_dependencies:
- full_match += 1
- elif manual_dependencies == ['pristine']:
- extra += 1
- #print 'BRANCH:', branch
- #print 'EXTRA:', auto_dependencies
- elif not missing:
- match += 1
- print
- print 'FAIL:', fail
- print 'FULL_MATCH:', full_match
- print 'MATCH:', match
- print 'EXTRA:', extra
- print 'INDEPENDENT:', independent
- def move (self):
- move (options.build_dir)
- setup_flat_apply_dir (options.build_dir)
- def move_back (self):
- move_back (options.build_dir)
-
-def get_cli_parser ():
- p = optparse.OptionParser ()
-
- p.usage = '%prog [OPTION]... COMMAND\n\nCommands:\n'
- d = Command.__dict__
- commands = [(k, d[k].__doc__) for k in d.keys ()
- if d[k].__doc__ and type (d[k]) == type (lambda x: x)]
- commands.sort ()
-
- global src_dir
- src_dir = ''
- if not os.path.exists ('patches'):
- src_dir = '../'
- os.environ['PATH'] = src_dir + '/bin:' + os.environ['PATH']
-
- for (command, doc) in commands:
- p.usage += ' %s - %s\n' % (re.sub ('_', '-', command), doc)
-
- def get (option):
- return p.get_option ('--' + option.replace ('-', '_')).default
-
- setup_workspace, setup_milestone = Setup ().get ('CVSTAG', 'dev300-m19').split ('-m')
-
- p.add_option ('--workspace',
- dest='workspace',
- default=setup_workspace,
- metavar='STRING',
- help='set master workspace')
- p.add_option ('--milestone',
- dest='milestone',
- default=setup_milestone,
- metavar='STRING',
- help='set milestone')
- p.add_option ('--distro',
- action='append',
- dest='distros',
- default=[],
- metavar='DISTRO',
- help='add distribution')
- p.add_option ('--build-dir',
- default=src_dir + 'build/' + get ('workspace') + '-m' + get ('milestone'),
- dest='build_dir',
- metavar='DIR',
- help='build (/git) directory')
- p.add_option ('--apply-dir',
- default=src_dir + 'patches/' + get ('workspace'),
- dest='apply_dir',
- metavar='DIR',
- help='directory with APPLY file')
- p.add_option ('--patched',
- default='patched',
- dest='patched',
- metavar='NAME',
- help='''use NAME as patched 'master' branch name''')
- p.add_option ('--dir-branch',
- default=[],
- dest='dir_branch',
- metavar='DIR',
- help='aggregate patches in directory DIR into one branch')
- p.add_option ('--fuzz',
- default='40', # FIXME: 40 = magic sweet spot for dev300-m21
- dest='fuzz',
- metavar='INT',
- help='use FUZZ as fuzz factor for patch overlap')
- p.add_option ('--force', action='store_true', dest='force', default=False)
- p.add_option ('--split', action='store_true', dest='split', default=False)
- p.add_option ('--topgit', action='store_true', dest='topgit', default=False)
- p.add_option ('-v', '--verbose', action='count', dest='verbose', default=1)
- p.add_option ('-q', '--quiet', action='count', dest='quiet', default=0)
- return p
-
-def parse_options ():
- p = get_cli_parser ()
- (options, arguments) = p.parse_args ()
-
- options.command = ''
- options.arguments = []
- if arguments:
- options.command = re.sub ('-', '_', arguments.pop (0))
- options.arguments = arguments
-
- if options.command in Command.__dict__:
- return options
- if options.command:
- sys.stderr.write ('no such command: ' + options.command)
- sys.stderr.write ('\n\n')
- p.print_help ()
- sys.exit (2)
-
-def set_option_defaults (options):
- options.flat_apply_dir = '.git/apply-dir'
- options.verbose -= options.quiet
- options.apply_dir = (options.apply_dir
- .replace ('ooo300', 'dev300')
- .replace ('ooo310', 'dev300')
- .replace ('ooo320', 'dev300')
- )
- if not options.distros:
- options.distros = ['SUSE']
- if not options.dir_branch:
- options.dir_branch = filter (lambda x: x not in ['.', '..', '.svn', '64bit', 'dev300', 'hotfixes'], list_dirs (options.apply_dir + '/..'))
- too_dispersed = ['cairo', 'vba']
- options.dir_branch = filter (lambda x: x not in too_dispersed, options.dir_branch)
- return options
-
-options = None
-def main ():
- global options
- options = set_option_defaults (parse_options ())
- apply_file = options.apply_dir + '/apply'
- apply = Apply (apply_file, options.workspace, options.milestone)
- Command.__dict__[options.command] (Command (apply, options))
-
-if __name__ == '__main__':
- main ()
diff --git a/bin/gob-bump b/bin/gob-bump
deleted file mode 100755
index e2fe1d7e2..000000000
--- a/bin/gob-bump
+++ /dev/null
@@ -1,190 +0,0 @@
-#!/bin/bash
-
-set -e
-
-OPTIONS_KEEPDASHDASH=
-OPTIONS_SPEC="\
-gob-bump [options] <branch> <tag>
---
-apply-dir= use apply-dir
-continue continue from previous run
-debug print everything
-distro= use branches for distro
-limit= bump only LIMIT branches
-
- BRANCH Merge the newest changes from here
- TAG To create the new gob-tag-name tag
-"
-
-. git sh-setup
-
-log() { grep -E "^(CONFLICT|Created commit|Merge)" || :; }
-
-parse_config () {
- while test $# != 0
- do
- case "$1" in
- --apply-dir)
- shift
- apply_dir=$1
- ;;
- --continue)
- continue=t
- ;;
- --debug)
- set -x
- log() { cat; }
- ;;
- --distro)
- shift
- distro=$1
- ;;
- --limit)
- shift
- limit=$1
- ;;
- --)
- shift
- break ;;
- *) usage ;;
- esac
- shift
- done
- args_left=$#
-}
-
-test $# != 0 || usage
-
-parse_config "$@"
-while test $args_left -lt $#; do shift; done
-
-test $# = 2 || usage
-
-new_changes=$1
-new_tag=gob-$2
-
-# setup
-this_branch=$(git symbolic-ref HEAD | cut -b 12-)
-update_branch=gob-bump/work/update
-
-gob_bases=$(git tag | grep '^gob-' || true)
-if test -z "$gob_bases"
-then
- echo 'Cannot find any gob-* tag, aborting' 1>&2
- exit 1
-fi
-
-# second run, tag already set?
-the_tag=$(git tag | grep "^$new_tag\$" || true)
-
-# find the last unpatched version and create a 'reset' commit
-# (actually this is one commit after that, we'll use ${one_ofter}^ to get
-# the right one)
-one_after=$(git rev-list $this_branch --not $gob_bases | tail -n 1)
-latest_tag=$(git log --pretty=oneline -2 $one_after | tail -1 | sed -e s'/ .*//')
-
-if test -n "$the_tag" -a -z "$continue"
-then
- echo "TAG: \`$the_tag' already exists, specify new tag or use --continue" 1>&2
- exit 1
-elif test -z "$the_tag"
-then
- echo "Bumping: $this_branch"
- # reset to the state of the last gob tag
- if test -n "$one_after"
- then
- temp_branch=gob-bump/tmp/work
- git branch -D $temp_branch 2>&1 | log || :
- git checkout -b $temp_branch $latest_tag
- git reset --soft $this_branch
- git commit -m "Reset tree to state of previous gob tag: $latest_tag." -a 2>&1 | log
- git checkout $this_branch
- git clean -df > /dev/null 2>&1 | log
- if git merge $temp_branch 2>&1 | log
- then
- git branch -d $temp_branch
- else
- echo "Reverting changes failed." 1>&2
- exit 1
- fi
- fi
-
- git checkout $this_branch
- # get the newest changes and tag the tip
- git merge $new_changes 2>&1 | log
- git tag $new_tag
-
- echo "Bumping: $update_branch"
- ## Keep commits in master this_branch, do all work with
- ## $update_branch: cannot seem to get around merging twice.
- git branch -D $update_branch 2>&1 | log || :
- git checkout -b $update_branch
- gob-collapse-commits $latest_tag
- git merge $new_changes 2>&1 | log
-fi
-
-git checkout $update_branch
-# update the branches
-if test -z "$apply_dir"
-then
- branches="$(git branch -r | sed 's#^.*origin/##' | grep -Ev '/|^(HEAD|master|patched|pristine)$')"
-else
- # Allow gob-bump to work in multi-distro GIT: work with subset
- # of [this distro's only] branches
- if test -z "$distro"
- then
- distro=SUSE
- fi
- branches="$(gob --apply-dir=$apply_dir --distro=$distro branches)"
-fi
-
-broken='BROKEN-NONEMPTY'
-bumped=$(git branch | grep bumped/ | sed -e s'@bumped/@@' | tr -d ' ' | tr '\n' '|' | sed -e s'@|$@@')
-if test -n "$bumped" -a -z "$continue"
-then
- echo 'bumped/* branches found, delete them or use --continue' 1>&2
- exit 1
-fi
-
-i=0
-echo "$branches" | grep -Ev "^(${broken}|${bumped})\$" | while read branch
-do
- echo "Bumping: $branch"
- temp_branch=gob-bump/tmp/$branch
- temp_update=gob-bump/tmp/update
- git branch -D $temp_branch $temp_update 2>&1 | log || :
- git checkout -b $temp_branch origin/$branch
- git branch $temp_update $update_branch
- if gob-update $temp_update 2>&1 | log
- then
- # Let's push later...
- # git push
- git checkout $temp_update
- if git merge $temp_branch 2>&1 | log
- then
- # ...so preserve succesfully bumped branches locally
- # git branch -d $temp_branch
- git checkout $temp_branch
- git branch -m $temp_branch bumped/$branch
- git branch -D $temp_update 2>&1 | log
- else
- echo "Merging back failed, please merge manually: $temp_branch" 1>&2
- fi
- else
- echo "Update failed, please update manually: $temp_branch" 1>&2
- fi
- foo=$((i++))
- if test -n "$limit" -a $i -gt 0$limit
- then
- echo Do only $limit
- break
- fi
-done
-
-git checkout $this_branch
-# Do not delete, need for --continue
-# git branch -D $update_branch 2>&1 | log
-
-# Local Variables:
-# sh-basic-offset:8
-# End:
diff --git a/bin/gob-collapse-commits b/bin/gob-collapse-commits
deleted file mode 100755
index 0bc945aa1..000000000
--- a/bin/gob-collapse-commits
+++ /dev/null
@@ -1,36 +0,0 @@
-#! /bin/sh
-
-set -e
-
-if test $# -lt 1
-then
- echo "Usage: gob-collapse-commits <commit> [<commit>]"
- exit 2
-fi
-
-begin=$1
-end=$2
-
-begin_log=$(git show --pretty=raw $begin | head -3 | tr '\n' ' ')
-begin_commit=$(expr "$begin_log" : '.*commit \([^ ]\+\)')
-begin_tree=$(expr "$begin_log" : '.*tree \([^ ]\+\)')
-begin_parent=$(expr "$begin_log" : '.*parent \([^ ]\+\)'||:)
-
-end_log=$(git show --pretty=raw $end | head -3 | tr '\n' ' ')
-end_commit=$(expr "$end_log" : '.*commit \([^ ]\+\)')
-end_tree=$(expr "$end_log" : '.*tree \([^ ]\+\)')
-end_parent=$(expr "$end_log" : '.*parent \([^ ]\+\)')
-
-(echo gob-collapse-commits $begin_commit..$end_commit;
- echo;
- git log -1 $begin_commit | tail -n +5;
- echo '--';
- git log -1 $end_commit | tail -n +5;) \
- | sed -e 's/^ \{4\}//' \
- | git commit-tree $end_tree -p $begin_commit > .git/NEW-HEAD || (rm -f .git/new-HEAD && exit 1)
-git reset --hard $(cat .git/NEW-HEAD)
-rm .git/NEW-HEAD
-
-# Local Variables:
-# sh-basic-offset:8
-# End:
diff --git a/bin/gob-merge b/bin/gob-merge
deleted file mode 100755
index b77b07df7..000000000
--- a/bin/gob-merge
+++ /dev/null
@@ -1,570 +0,0 @@
-#!/bin/bash
-#
-# Copyright (c) 2005 Junio C Hamano
-#
-
-OPTIONS_KEEPDASHDASH=
-OPTIONS_SPEC="\
-gob-merge [options] <remote>...
-gob-merge [options] <msg> HEAD <remote>
---
-summary show a diffstat at the end of the merge
-n,no-summary don't show a diffstat at the end of the merge
-squash create a single commit instead of doing a merge
-commit perform a commit if the merge sucesses (default)
-ff allow fast forward (default)
-s,strategy= merge strategy to use
-m,message= message to be used for the merge commit (if any)
-b,bases= restrict merge bases to the these bases only
-"
-
-SUBDIRECTORY_OK=Yes
-. git sh-setup
-require_work_tree
-cd_to_toplevel
-
-test -z "$(git ls-files -u)" ||
- die "You are in the middle of a conflicted merge."
-
-LF='
-'
-
-all_strategies='recur recursive octopus resolve stupid ours subtree'
-default_twohead_strategies='recursive'
-default_octopus_strategies='octopus'
-no_fast_forward_strategies='subtree ours'
-no_trivial_strategies='recursive recur subtree ours'
-use_strategies=
-
-allow_fast_forward=t
-allow_trivial_merge=t
-squash= no_commit=
-
-dropsave() {
- rm -f -- "$GIT_DIR/MERGE_HEAD" "$GIT_DIR/MERGE_MSG" \
- "$GIT_DIR/MERGE_STASH" || exit 1
-}
-
-savestate() {
- # Stash away any local modifications.
- git stash create >"$GIT_DIR/MERGE_STASH"
-}
-
-restorestate() {
- if test -f "$GIT_DIR/MERGE_STASH"
- then
- git reset --hard $head >/dev/null
- git stash apply $(cat "$GIT_DIR/MERGE_STASH")
- git update-index --refresh >/dev/null
- fi
-}
-
-finish_up_to_date () {
- case "$squash" in
- t)
- echo "$1 (nothing to squash)" ;;
- '')
- echo "$1" ;;
- esac
- dropsave
-}
-
-squash_message () {
- echo Squashed commit of the following:
- echo
- git log --no-merges ^"$head" $remoteheads
-}
-
-finish () {
- if test '' = "$2"
- then
- rlogm="$GIT_REFLOG_ACTION"
- else
- echo "$2"
- rlogm="$GIT_REFLOG_ACTION: $2"
- fi
- case "$squash" in
- t)
- echo "Squash commit -- not updating HEAD"
- squash_message >"$GIT_DIR/SQUASH_MSG"
- ;;
- '')
- case "$merge_msg" in
- '')
- echo "No merge message -- not updating HEAD"
- ;;
- *)
- git update-ref -m "$rlogm" HEAD "$1" "$head" || exit 1
- git gc --auto
- ;;
- esac
- ;;
- esac
- case "$1" in
- '')
- ;;
- ?*)
- if test "$show_diffstat" = t
- then
- # We want color (if set), but no pager
- GIT_PAGER='' git diff --stat --summary -M "$head" "$1"
- fi
- ;;
- esac
-
- # Run a post-merge hook
- if test -x "$GIT_DIR"/hooks/post-merge
- then
- case "$squash" in
- t)
- "$GIT_DIR"/hooks/post-merge 1
- ;;
- '')
- "$GIT_DIR"/hooks/post-merge 0
- ;;
- esac
- fi
-}
-
-merge_name () {
- remote="$1"
- rh=$(git rev-parse --verify "$remote^0" 2>/dev/null) || return
- bh=$(git show-ref -s --verify "refs/heads/$remote" 2>/dev/null)
- if test "$rh" = "$bh"
- then
- echo "$rh branch '$remote' of ."
- elif truname=$(expr "$remote" : '\(.*\)~[1-9][0-9]*$') &&
- git show-ref -q --verify "refs/heads/$truname" 2>/dev/null
- then
- echo "$rh branch '$truname' (early part) of ."
- elif test "$remote" = "FETCH_HEAD" -a -r "$GIT_DIR/FETCH_HEAD"
- then
- sed -e 's/ not-for-merge / /' -e 1q \
- "$GIT_DIR/FETCH_HEAD"
- else
- echo "$rh commit '$remote'"
- fi
-}
-
-parse_config () {
- while test $# != 0; do
- case "$1" in
- -n|--no-summary)
- show_diffstat=false ;;
- --summary)
- show_diffstat=t ;;
- --squash)
- test "$allow_fast_forward" = t ||
- die "You cannot combine --squash with --no-ff."
- squash=t no_commit=t ;;
- --no-squash)
- squash= no_commit= ;;
- --commit)
- no_commit= ;;
- --no-commit)
- no_commit=t ;;
- --ff)
- allow_fast_forward=t ;;
- --no-ff)
- test "$squash" != t ||
- die "You cannot combine --squash with --no-ff."
- allow_fast_forward=f ;;
- -s|--strategy)
- shift
- case " $all_strategies " in
- *" $1 "*)
- use_strategies="$use_strategies$1 " ;;
- *)
- die "available strategies are: $all_strategies" ;;
- esac
- ;;
- -m|--message)
- shift
- merge_msg="$1"
- have_message=t
- ;;
- -b|--bases)
- shift
- restrict_to_bases="$1"
- have_restricted_bases=t
- ;;
- --)
- shift
- break ;;
- *) usage ;;
- esac
- shift
- done
- args_left=$#
-}
-
-test $# != 0 || usage
-
-have_message=
-
-if branch=$(git symbolic-ref -q HEAD)
-then
- mergeopts=$(git config "branch.${branch#refs/heads/}.mergeoptions")
- if test -n "$mergeopts"
- then
- parse_config $mergeopts --
- fi
-fi
-
-parse_config "$@"
-while test $args_left -lt $#; do shift; done
-
-if test -z "$show_diffstat"; then
- test "$(git config --bool merge.diffstat)" = false && show_diffstat=false
- test -z "$show_diffstat" && show_diffstat=t
-fi
-
-# This could be traditional "merge <msg> HEAD <commit>..." and the
-# way we can tell it is to see if the second token is HEAD, but some
-# people might have misused the interface and used a committish that
-# is the same as HEAD there instead. Traditional format never would
-# have "-m" so it is an additional safety measure to check for it.
-
-if test -z "$have_message" &&
- second_token=$(git rev-parse --verify "$2^0" 2>/dev/null) &&
- head_commit=$(git rev-parse --verify "HEAD" 2>/dev/null) &&
- test "$second_token" = "$head_commit"
-then
- merge_msg="$1"
- shift
- head_arg="$1"
- shift
-elif ! git rev-parse --verify HEAD >/dev/null 2>&1
-then
- # If the merged head is a valid one there is no reason to
- # forbid "git merge" into a branch yet to be born. We do
- # the same for "git pull".
- if test 1 -ne $#
- then
- echo >&2 "Can merge only exactly one commit into empty head"
- exit 1
- fi
-
- rh=$(git rev-parse --verify "$1^0") ||
- die "$1 - not something we can merge"
-
- git update-ref -m "initial pull" HEAD "$rh" "" &&
- git read-tree --reset -u HEAD
- exit
-
-else
- # We are invoked directly as the first-class UI.
- head_arg=HEAD
-
- # All the rest are the commits being merged; prepare
- # the standard merge summary message to be appended to
- # the given message. If remote is invalid we will die
- # later in the common codepath so we discard the error
- # in this loop.
- merge_name=$(for remote
- do
- merge_name "$remote"
- done | git fmt-merge-msg
- )
- merge_msg="${merge_msg:+$merge_msg$LF$LF}$merge_name"
-fi
-head=$(git rev-parse --verify "$head_arg"^0) || usage
-
-# All the rest are remote heads
-test "$#" = 0 && usage ;# we need at least one remote head.
-set_reflog_action "merge $*"
-
-remoteheads=
-for remote
-do
- remotehead=$(git rev-parse --verify "$remote"^0 2>/dev/null) ||
- die "$remote - not something we can merge"
- remoteheads="${remoteheads}$remotehead "
- eval GITHEAD_$remotehead='"$remote"'
- export GITHEAD_$remotehead
-done
-set x $remoteheads ; shift
-
-case "$use_strategies" in
-'')
- case "$#" in
- 1)
- var="`git config --get pull.twohead`"
- if test -n "$var"
- then
- use_strategies="$var"
- else
- use_strategies="$default_twohead_strategies"
- fi ;;
- *)
- var="`git config --get pull.octopus`"
- if test -n "$var"
- then
- use_strategies="$var"
- else
- use_strategies="$default_octopus_strategies"
- fi ;;
- esac
- ;;
-esac
-
-for s in $use_strategies
-do
- for ss in $no_fast_forward_strategies
- do
- case " $s " in
- *" $ss "*)
- allow_fast_forward=f
- break
- ;;
- esac
- done
- for ss in $no_trivial_strategies
- do
- case " $s " in
- *" $ss "*)
- allow_trivial_merge=f
- break
- ;;
- esac
- done
-done
-
-case "$#" in
-1)
- common=$(git merge-base --all $head "$@")
- ;;
-*)
- common=$(git show-branch --merge-base $head "$@")
- ;;
-esac
-
-if test "$have_restricted_bases" = t
-then
- restrict=$(git rev-parse $restrict_to_bases)
- old_common=$common
- common=
- for b in $old_common
- do
- if echo "$restrict" | grep $b >/dev/null 2>/dev/null
- then
- common="$common $b"
- fi
- done
-fi
-
-echo "$head" >"$GIT_DIR/ORIG_HEAD"
-
-case "$allow_fast_forward,$#,$common,$no_commit" in
-?,*,'',*)
- # No common ancestors found. We need a real merge.
- ;;
-?,1,"$1",*)
- # If head can reach all the merge then we are up to date.
- # but first the most common case of merging one remote.
- finish_up_to_date "Already up-to-date."
- exit 0
- ;;
-t,1,"$head",*)
- # Again the most common case of merging one remote.
- echo "Updating $(git rev-parse --short $head)..$(git rev-parse --short $1)"
- git update-index --refresh 2>/dev/null
- msg="Fast forward"
- if test -n "$have_message"
- then
- msg="$msg (no commit created; -m option ignored)"
- fi
- new_head=$(git rev-parse --verify "$1^0") &&
- git read-tree -v -m -u --exclude-per-directory=.gitignore $head "$new_head" &&
- finish "$new_head" "$msg" || exit
- dropsave
- exit 0
- ;;
-?,1,?*"$LF"?*,*)
- # We are not doing octopus and not fast forward. Need a
- # real merge.
- ;;
-?,1,*,)
- # We are not doing octopus, not fast forward, and have only
- # one common.
- git update-index --refresh 2>/dev/null
- case "$allow_trivial_merge" in
- t)
- # See if it is really trivial.
- git var GIT_COMMITTER_IDENT >/dev/null || exit
- echo "Trying really trivial in-index merge..."
- if git read-tree --trivial -m -u -v $common $head "$1" &&
- result_tree=$(git write-tree)
- then
- echo "Wonderful."
- result_commit=$(
- printf '%s\n' "$merge_msg" |
- git commit-tree $result_tree -p HEAD -p "$1"
- ) || exit
- finish "$result_commit" "In-index merge"
- dropsave
- exit 0
- fi
- echo "Nope."
- esac
- ;;
-*)
- # An octopus. If we can reach all the remote we are up to date.
- up_to_date=t
- for remote
- do
- common_one=$(git merge-base --all $head $remote)
- if test "$common_one" != "$remote"
- then
- up_to_date=f
- break
- fi
- done
- if test "$up_to_date" = t
- then
- finish_up_to_date "Already up-to-date. Yeeah!"
- exit 0
- fi
- ;;
-esac
-
-# We are going to make a new commit.
-git var GIT_COMMITTER_IDENT >/dev/null || exit
-
-# At this point, we need a real merge. No matter what strategy
-# we use, it would operate on the index, possibly affecting the
-# working tree, and when resolved cleanly, have the desired tree
-# in the index -- this means that the index must be in sync with
-# the $head commit. The strategies are responsible to ensure this.
-
-case "$use_strategies" in
-?*' '?*)
- # Stash away the local changes so that we can try more than one.
- savestate
- single_strategy=no
- ;;
-*)
- rm -f "$GIT_DIR/MERGE_STASH"
- single_strategy=yes
- ;;
-esac
-
-result_tree= best_cnt=-1 best_strategy= wt_strategy=
-merge_was_ok=
-for strategy in $use_strategies
-do
- test "$wt_strategy" = '' || {
- echo "Rewinding the tree to pristine..."
- restorestate
- }
- case "$single_strategy" in
- no)
- echo "Trying merge strategy $strategy..."
- ;;
- esac
-
- # Remember which strategy left the state in the working tree
- wt_strategy=$strategy
-
- git merge-$strategy $common -- "$head_arg" "$@"
- exit=$?
- if test "$no_commit" = t && test "$exit" = 0
- then
- merge_was_ok=t
- exit=1 ;# pretend it left conflicts.
- fi
-
- test "$exit" = 0 || {
-
- # The backend exits with 1 when conflicts are left to be resolved,
- # with 2 when it does not handle the given merge at all.
-
- if test "$exit" -eq 1
- then
- cnt=`{
- git diff-files --name-only
- git ls-files --unmerged
- } | wc -l`
- if test $best_cnt -le 0 -o $cnt -le $best_cnt
- then
- best_strategy=$strategy
- best_cnt=$cnt
- fi
- fi
- continue
- }
-
- # Automerge succeeded.
- result_tree=$(git write-tree) && break
-done
-
-# If we have a resulting tree, that means the strategy module
-# auto resolved the merge cleanly.
-if test '' != "$result_tree"
-then
- if test "$allow_fast_forward" = "t"
- then
- parents=$(git show-branch --independent "$head" "$@")
- else
- parents=$(git rev-parse "$head" "$@")
- fi
- parents=$(echo "$parents" | sed -e 's/^/-p /')
- result_commit=$(printf '%s\n' "$merge_msg" | git commit-tree $result_tree $parents) || exit
- finish "$result_commit" "Merge made by $wt_strategy."
- dropsave
- exit 0
-fi
-
-# Pick the result from the best strategy and have the user fix it up.
-case "$best_strategy" in
-'')
- restorestate
- case "$use_strategies" in
- ?*' '?*)
- echo >&2 "No merge strategy handled the merge."
- ;;
- *)
- echo >&2 "Merge with strategy $use_strategies failed."
- ;;
- esac
- exit 2
- ;;
-"$wt_strategy")
- # We already have its result in the working tree.
- ;;
-*)
- echo "Rewinding the tree to pristine..."
- restorestate
- echo "Using the $best_strategy to prepare resolving by hand."
- git merge-$best_strategy $common -- "$head_arg" "$@"
- ;;
-esac
-
-if test "$squash" = t
-then
- finish
-else
- for remote
- do
- echo $remote
- done >"$GIT_DIR/MERGE_HEAD"
- printf '%s\n' "$merge_msg" >"$GIT_DIR/MERGE_MSG"
-fi
-
-if test "$merge_was_ok" = t
-then
- echo >&2 \
- "Automatic merge went well; stopped before committing as requested"
- exit 0
-else
- {
- echo '
-Conflicts:
-'
- git ls-files --unmerged |
- sed -e 's/^[^ ]* / /' |
- uniq
- } >>"$GIT_DIR/MERGE_MSG"
- git rerere
- die "Automatic merge failed; fix conflicts and then commit the result."
-fi
diff --git a/bin/gob-update b/bin/gob-update
deleted file mode 100755
index bd8fa31fb..000000000
--- a/bin/gob-update
+++ /dev/null
@@ -1,25 +0,0 @@
-#! /bin/sh
-
-set -e
-
-if test $# -lt 1
-then
- echo "Usage: gob-update <branch>"
- echo "branch The 'master' branch"
- exit 2
-fi
-
-master_branch=$1
-
-gob_bases=$(git tag | grep '^gob-')
-if test -z "$gob_bases"
-then
- echo 'cannot find any gob-* tag, aborting' 1>&2
- exit 1
-fi
-
-gob-merge --bases="$gob_bases" $master_branch
-
-# Local Variables:
-# sh-basic-offset:8
-# End:
diff --git a/bin/id-lang.map b/bin/id-lang.map
deleted file mode 100644
index 7050ad03d..000000000
--- a/bin/id-lang.map
+++ /dev/null
@@ -1,105 +0,0 @@
-# Welcome to the mkid language mapper.
-#
-# The format of each line is:
-#
-# <pattern> <language> [options]
-#
-# Filenames are matched top-to-bottom against the patterns, and the
-# first match is chosen. The special language `IGNORE' means that
-# this file should be ignored by mkid. The options are
-# language-specific command-line options to mkid.
-#
-# If a file name doesn't match any pattern, it is assigned the default
-# language. The default language may be specified here with the
-# special pattern `**', or overridden from the mkid command-line with
-# the `--default-lang=LANG' option.
-#
-# The special pattern `***' means to include the named file that
-# immediately follows. If no file is named, then the default system
-# language mapper file (i.e., this file) is included.
-
-# Default language
-** IGNORE # Although this is listed first,
- # the default language pattern is
- # logically matched last.
-
-# Backup files
-*~ IGNORE
-*.bak IGNORE
-*.bk[0-9] IGNORE
-
-# SCCS files
-[sp].* IGNORE
-
-# C dependencies created by automake
-*/.deps/* IGNORE
-
-*.h C
-*.h.in C
-*.H C++
-*.hh C++
-*.hpp C++
-*.hxx C++
-
-*.l C
-*.lex C
-*.y C
-*.yacc C
-
-*.c C
-*.C C++
-*.cc C++
-*.cpp C++
-*.cxx C++
-
-*.java Java
-
-ChangeLog* Cdoc
-
-*.[sS] asm --comment=;
-*.asm asm --comment=;
-
-# [nt]roff
-*.[0-9] roff
-*.ms roff
-*.me roff
-*.mm roff
-
-*.tex TeX
-*.ltx TeX
-*.texi texinfo
-*.texinfo texinfo
-
-# portable object (i18n)
-*.po po
-
-*.el lisp
-*.elc lisp
-*.lisp lisp
-*.scm lisp
-
-*.am make
-Makefile make
-Makefile.* make
-
-*.doc text
-*.txt text
-
-*.m4 m4
-
-*.pl perl
-*.pm perl
-
-*.gz FILTER gzip -d <%s
-*.Z FILTER gzip -d <%s
-
-######### OOo-specific stuff #######################################
-
-# Treat OOo resource header files as C files
-*.hrc C
-# Treat OOo header files generated from *.idl as C++ files
-*.hdl C++
-# Treat OOo resource files as C files
-*.src C
-# Treat OOo *.mk files as makefiles
-*.mk make
diff --git a/bin/inactivity b/bin/inactivity
deleted file mode 100755
index a95d7427e..000000000
--- a/bin/inactivity
+++ /dev/null
@@ -1,100 +0,0 @@
-#!/usr/bin/env perl
-
-# Show inactive issues
-
-use File::Temp qw(:mktemp);
-#use Date::Calc qw(Delta_Days);
-use Date::Parse;
-
-sub usage()
-{
- print STDERR "inactivity\n";
- print STDERR "Shows inactive issues; sorts by ooo-build vs. up-stream.\n";
- print STDERR "Error: cannot find patches/src680/apply";
- exit 1;
-}
-
-@ooobuilders = ( 'bero', 'flr', 'fridrich_strba', 'geki', 'hub', 'jayant_madavi',
- 'jianhuajiao', 'jodygoldberg', 'jonp', 'jpryor', 'kendy', 'kohei', 'mmeeks',
- 'npower', 'pmladek', 'radekdoulik', 'rene', 'tml' );
-
-sub output(%)
-{
- my (%what) = @_;
-
- printf "%6s %5s (%10s) %-15s %s\n", "IZ#", "Days", "Date ", "Owner", "Summary";
- foreach $bug ( sort { $what{$a}{'date'} cmp $what{$b}{'date'} } keys( %what ) ) {
- my $date = $what{$bug}{'date'};
-
- my ($sec, $min, $hour, $mday, $mon, $year, $wday, $yday, $isdst) = localtime();
- my ($sec2, $min2, $hour2, $mday2, $mon2, $year2, $wday2, $yday2, $isdst2) = localtime( str2time( $date ) );
-
- my $numdays = ( $year - $year2 ) * 365 + $yday - $yday2; # does not count leap years, but... ;-)
-
- printf "%6s: %5d (%10s) %-15s %s\n", $bug, $numdays, $date, $what{$bug}{'owner'}, $what{$bug}{'summary'};
- }
-}
-
-my %tmp_all_issues;
-open APPLY, "patches/src680/apply" || die "Cannot open patches/src680/apply\n";
-while ( <APPLY> ) {
- if ( /i#([0-9]+)/ ) {
- $tmp_all_issues{$1} = 1;
- }
-}
-close APPLY;
-
-my $all_issues = join( ',', keys( %tmp_all_issues ) );
-
-my $cookiejar = mktemp( "/tmp/inactivityXXXXXX" );
-system( "curl -c $cookiejar http://www.openoffice.org/issues/buglist.cgi?issue_id=$all_issues > /dev/null" );
-
-my ($bug_id, $bug_date, $bug_owner, $bug_status, $bug_resolution, $bug_summary);
-my (%ooobuild, %upstream, %resolved);
-open RESULT, "curl -b $cookiejar http://www.openoffice.org/issues/colchange.cgi -d 'rememberedquery=issue_id=$all_issues' -d 'column_changeddate=1&column_owner=1&column_status=1&column_resolution=1&column_summary=1' |";
-while( <RESULT> ) {
- chomp;
- if ( /<a href="show_bug.cgi\?id=[0-9]*">([0-9]*)<\/a>/ ) {
- $bug_id = $1;
- }
- elsif ( /<span class="changeddate">([^<]*)<\/span>/ ) {
- $bug_date = $1;
- }
- elsif ( /<span class="owner">([^<]*)<\/span>/ ) {
- $bug_owner = $1;
- }
- elsif ( /<span class="status">([^<]*)<\/span>/ ) {
- $bug_status = $1;
- }
- elsif ( /<span class="resolution">([^<]*)<\/span>/ ) {
- $bug_resolution = $1;
- }
- elsif ( /<span class="summary">([^<]*)<\/span>/ ) {
- $bug_summary = $1;
- if ( $bug_status eq "RESOLV" || $bug_status eq "VERIFI" || $bug_status eq "CLOSED" ) {
- $resolved{$bug_id} = { "date" => $bug_date, "owner" => $bug_owner, "summary" => $bug_summary };
- next;
- }
- my $is_ooobuilder = 0;
- foreach $ooobuilder ( @ooobuilders ) {
- if ( $ooobuilder eq $bug_owner ) {
- $ooobuild{$bug_id} = { "date" => $bug_date, "owner" => $bug_owner, "summary" => $bug_summary };
- $is_ooobuilder = 1;
- next;
- }
- }
- if ( !$is_ooobuilder ) {
- $upstream{$bug_id} = { "date" => $bug_date, "owner" => $bug_owner, "summary" => $bug_summary };
- }
- }
-}
-close RESULT;
-
-print "\n===== assigned to ooo-builders =====\n\n";
-output( %ooobuild );
-
-print "\n===== assigned to up-stream =====\n\n";
-output( %upstream );
-
-print "\n===== resolved/verified/closed, but still in ooo-build =====\n\n";
-output( %resolved );
diff --git a/bin/install-artwork b/bin/install-artwork
deleted file mode 100755
index 7a0630c54..000000000
--- a/bin/install-artwork
+++ /dev/null
@@ -1,70 +0,0 @@
-#!/bin/sh
-
-. `dirname $0`/setup
-
-# installs artwork, opportunistically from src to dest
-
-src=$1
-dest=$2
-
-echo "do artwork tweaks ..."
-
-if test -d $dest/setup_native; then
- mkdir -p $dest/setup_native/source/win32/nsis
- cp $src/ooobitmap_*.bmp $dest/setup_native/source/win32/nsis/ || echo "no nsis installer images to copy"
- cp $src/ooobanner_*.bmp $dest/setup_native/source/win32/nsis/ || echo "no nsis banner images to copy"
- cp $src/ooosetup_*.ico $dest/setup_native/source/win32/nsis/ || echo "no installer icon to copy"
-else
- echo " skipping setup_native"
-fi
-
-if test -d $dest/instsetoo_native; then
- mkdir -p $dest/instsetoo_native/res
- cp $src/nologoinstall_*.bmp $dest/instsetoo_native/res/ || echo "no installer sidebar images to copy"
- cp $src/banner_*.bmp $dest/instsetoo_native/res/ || echo "no installer banner images to copy"
-else
- echo " skipping instsetoo_native"
-fi
-
-if test -d $dest/svx; then
- mkdir -p $dest/svx/res
- cp $src/open*_*.bmp $dest/svx/res/ || echo "no intro images to copy"
-else
- echo " skipping svx"
-fi
-
-if test -d $dest/default_images; then
- mkdir -p $dest/default_images/svx/res
- mkdir -p $dest/default_images/sw/res
- cp $src/open*_*.png $dest/default_images/svx/res/ || echo "no about images to copy"
- cp $src/*-team.png $dest/default_images/sw/res/ || echo "no team images to copy"
-
- mkdir -p $dest/default_images/res/commandimagelist
- mkdir -p $dest/ooo_custom_images/industrial/res/commandimagelist
- mkdir -p $dest/ooo_custom_images/tango/res/commandimagelist
- cp $src/layout/*.png $dest/default_images/res/commandimagelist
- cp $src/layout/*.png $dest/ooo_custom_images/industrial/res/commandimagelist
- cp $src/layout/*.png $dest/ooo_custom_images/tango/res/commandimagelist
- ${GNUTAR} -C $src/layout --exclude=.svn -cf- default_images ooo_custom_images | ${GNUTAR} -C $dest -xf-
-
- # Extra command images
- cp -f $src/icons/*.png $dest/default_images/res/commandimagelist/ || exit 1
- cp -f $src/icons/tango/*.png $dest/ooo_custom_images/tango/res/commandimagelist/ || exit 1
-
- # Tango icon for the GNOME quickstarter applet..."
- cp -f $src/tango_mainapp_16.png $dest/ooo_custom_images/tango/res/mainapp_16.png || exit 1;
-
- # Copy extra ooo-build specific images into build. All files under default_images
- # are copied into the build recursively.
- (cd $src && {
- for img in `find default_images -type f`; do
- test $? -eq 0 || exit 1
- # TODO: maybe we should check if the file is really an image file ?
- imgdir=`dirname $img` || exit 1
- mkdir -p $dest/$imgdir || exit 1
- cp -f $img $dest/$img || exit 1
- done
- })
-else
- echo " skipping default_images"
-fi
diff --git a/bin/install-dictionaries b/bin/install-dictionaries
deleted file mode 100755
index 1e7ff4d1f..000000000
--- a/bin/install-dictionaries
+++ /dev/null
@@ -1,204 +0,0 @@
-#!/bin/sh
-
-#
-# See setup for user tweakables.
-#
-. ./setup
-
-if test "z$1" != "z"; then
- DICTDIR=$1;
-else
- DICTDIR=$OOINSTDIR/basis$VERSION/share/dictionaries
-fi
-
-DICTDIRBASE=`echo $DICTDIR | sed "s|^$OODESTDIR||"`
-
-# the available dictionaries
-DICTS="hyph_bg_BG
- hyph_cs_CZ
- hyph_da_DK
- hyph_de_CH
- hyph_de_DE
- hyph_el_GR
- hyph_en_AU
- hyph_en_CA
- hyph_en_GB
- hyph_en_NZ
- hyph_en_US
- hyph_es_ES
- hyph_es_MX
- hyph_et_EE
- hyph_fi_FI
- hyph_fr_BE
- hyph_fr_FR
- hyph_ga_IE
- hyph_hu_HU
- hyph_hr_HR
- hyph_id_ID
- hyph_is_IS
- hyph_it_IT
- hyph_lt_LT
- hyph_lv_LV
- hyph_nb_NO
- hyph_nl_NL
- hyph_nn_NO
- hyph_pl_PL
- hyph_pt_BR
- hyph_pt_PT
- hyph_ro_RO
- hyph_ru_RU
- hyph_sk_SK
- hyph_sl
- hyph_sl_SI
- hyph_sv_SE
- hyph_uk
- hyph_uk_UA
- thes_bg_BG
- thes_bg_BG_v2
- thes_cs_CZ
- thes_cs_CZ_v2
- thes_de_DE
- thes_de_DE_v2
- thes_en_US
- thes_en_US_v2
- thes_es_ES
- thes_es_ES_v2
- thes_fr_FR
- thes_fr_FR_v2
- thes_it_IT
- thes_hu_HU
- thes_nb_NO_v2
- thes_pl_PL
- thes_pl_PL_v2
- thes_pt_PT_v2
- thes_ru_RU
- thes_sk_SK
- thes_sk_SK_v2
- af_ZA
- bg_BG
- ca_ES
- cs_CZ
- da_DK
- de_AT
- de_CH
- de_DE_comb
- el_GR
- en_AU
- en_CA
- en_GB
- en_NZ
- en_US
- eo_EO
- es_ES
- es_MX
- et_EE
- fi_FI
- fo_FO
- fr_BE
- fr_FR
- ga_IE
- gd_GB
- gl_ES
- he_IL
- hr_HR
- hu_HU
- it_IT
- ku_TR
- la
- lt_LT
- lv_LV
- mg_MG
- mi_NZ
- ms_MY
- nb_NO
- nl_NL
- nn_NO
- ny_MW
- pl_PL
- pt_BR
- pt_PT
- ro_RO
- ru_RU
- rw_RW
- sk_SK
- sl_SI
- sv_SE
- sw_KE
- tl_PH
- tn_ZA
- uk_UA
- zu_ZA"
-
-install_dic()
-{
- test -f $1/$2 && mv $1/$2 $DICTDIR/ && echo "$DICTDIRBASE/$2" >>$DICTFILELIST && dic_installed=true
-}
-
-install_doc()
-{
- if test -f $1/$2 ; then
- # recode to UTF-8 if needed
- coding=`LC_CTYPE=$3 locale -k LC_CTYPE | grep charmap | sed "s|charmap=\"\(.*\)\"\$|\1|"`
- recode utf8..utf16 <$1/$2 >/dev/null 2>&1 || recode $coding..utf8 $1/$2
- # install
- mv $1/$2 $DOCDIR/dictionaries/$3 || exit 1;
- echo "$DOCDIRBASE/dictionaries/$3/$2" >>$DICTFILELIST
- doc_installed=true
- locale_doc_installed=true
- fi
-}
-
-#file list
-if test "z$OODESTDIR" != "z" ; then
- DICTFILELIST="$BUILDDIR/dictionaries"
- rm -f $DICTFILELIST
-else
- DICTFILELIST=/dev/null
-fi
-
-dic_installed=false
-doc_installed=false
-for DICT in $DICTS ; do
- if test -f $SRCDIR/$DICT.tar.bz2 ; then
- echo "Unpacking $DICT dictionary..."
- mkdir -m 755 -p $DICTDIR
- mkdir -m 755 -p $DOCDIR/dictionaries
- TMPDIR=`mktemp -d /tmp/ooo-build.dict.XXXXXX`
- tar -xjf $SRCDIR/$DICT.tar.bz2 -C $TMPDIR || exit 1;
- chmod 644 $TMPDIR/*.* || exit 1;
-
- # dictionaries
- DICTNAME=$(echo $DICT | sed s/thes/th/)
- install_dic $TMPDIR $DICTNAME.dic
- install_dic $TMPDIR $DICTNAME.dat
- install_dic $TMPDIR $DICTNAME.idx
- install_dic $TMPDIR $DICTNAME.aff
-
- # documentation
- LOCALE=$(echo $DICT | sed 's/hyph_//;s/thes_//' | cut -d_ -f1-2)
- mkdir -m 755 -p $DOCDIR/dictionaries/$LOCALE
- locale_doc_installed=false
- for file in `ls $TMPDIR` ; do
- install_doc $TMPDIR $file $LOCALE
- done
- if $locale_doc_installed ; then
- echo "%dir $DOCDIRBASE/dictionaries/$LOCALE" >>$DICTFILELIST
- fi
- rmdir $TMPDIR || exit 1;
- else
- echo "Warning: $DICT dictionary is not available..."
- fi
-done
-
-if $dic_installed ; then
- echo "%dir $DICTDIRBASE" >>$DICTFILELIST
-fi
-
-if $doc_installed ; then
- echo "%dir $DOCDIRBASE" >>$DICTFILELIST
- echo "%dir $DOCDIRBASE/dictionaries" >>$DICTFILELIST
-fi
-
-#echo "Creating dictionary.lst..."
-#$TOOLSDIR/bin/install-dict.in $DICTDIR
-#test -f $DICTDIR/dictionary.lst && echo "$DICTDIRBASE/dictionary.lst" >>$DICTFILELIST
diff --git a/bin/install-maps b/bin/install-maps
deleted file mode 100755
index 69816e2b2..000000000
--- a/bin/install-maps
+++ /dev/null
@@ -1,65 +0,0 @@
-#!/usr/bin/env perl
-
-# The worst offendors in bloated sym tables:
-
-# NB - all verified to work correctly.
-$maps = {
- 'svx' => 'svx/util',
- 'svt' => 'svtools/util',
- 'svl' => 'svtools/util',
- 'sfx' => 'sfx2/util',
- 'xo' => 'xmloff/util',
- 'tk' => 'toolkit/util',
- 'vcl' => 'vcl/util',
- 'fwk' => 'framework/util',
- 'sb' => 'basic/util',
- 'so' => 'so3/util',
- 'tl' => 'tools/util',
- 'go' => 'goodies/util'
-# TOTEST:
-# 'frm'
-# 'basctl'
-# 'dba'
-# 'dbu'
- };
-
-if (@ARGV < 3) {
- print "install-maps: /path/to/OOInstall /path/to/toolsdir <mangling> --empty\n";
- exit (1);
-}
-
-$dest = shift @ARGV;
-$src = shift @ARGV;
-$mangle = shift @ARGV;
-$empty = shift @ARGV;
-
--d "$src/map" || die "Can't find $src/map: $!";
-
-my $a;
-for $a (keys %$maps) {
- my $fname = "$a-$mangle.map";
- my $s = "$src/map/$fname";
- my $d = "$dest/" . $$maps{$a};
-
- -d $d || die "No directory $d: $!";
- -f $s || die "Can't find $s: $!";
-
- $empty && print "blanking: $d\n";
- !$empty && print "installing: $s $d\n";
-
- my $df;
-
- open ($df, ">$d/$fname") || die "Can't open $d/$fname for writing: $!";
- if ($empty) {
- print $df "FOO {\n global: *;\n};\n";
- } else {
- my $sf;
-
- open ($sf, "$s") || die "Can't find $s: $!";
- while (<$sf>) {
- print $df $_;
- }
- close ($sf);
- }
- close ($df);
-}
diff --git a/bin/install-mono b/bin/install-mono
deleted file mode 100755
index 49bfa67b3..000000000
--- a/bin/install-mono
+++ /dev/null
@@ -1,105 +0,0 @@
-#!/bin/sh
-
-# this script finishes the mono installation
-
-#
-# See setup for user tweakables.
-#
-if test -n "$OO_TOOLSDIR" ; then
- # split build
- . $OO_TOOLSDIR/setup
- TOOLSDIR=$OO_TOOLSDIR/..
- SRCDIR=$OO_TOOLSDIR/../src
- split_build=yes
-else
- . ./setup
- split_build=
-fi
-
-# check if install path redefined
-test -n "$1" && OOINSTDIR="$1"
-
-# continue only when mono has been built
-test -f $OOINSTDIR/basis$VERSION/program/cli_oootypes.dll || exit 0;
-
-
-echo "Finishing the mono installation..."
-
-if ! test -w "$OODESTDIR/" ; then
- echo "Warning: You do not have rights to finish the mono installation"
- echo " => skipping it"
- exit 0;
-fi
-
-# filelist for the GAC stuff
-if test "$split_build" = "yes" ; then
- MONOGACFILELIST=files-mono.txt
-else
- MONOGACFILELIST=$BUILDDIR/mono_gac
-fi
-rm -f $MONOGACFILELIST
-
-# install the pkgconfig file
-# GAC is in /usr/lib even on 64-bit architectures, so the .pc file points to
-# the /usr/lib paths; the .pc file itself must be in the arch dependant path
-# to be found in PKG_CONFIG_PATH, though
-mkdir -p $OODESTDIR${LIBDIRBASE}/pkgconfig/
-sed -e "s|@OOOINSTALLDIRNAME@|$OOOINSTALLDIRNAME|g" \
- $TOOLSDIR/src/mono-ooo.pc.in >${OODESTDIR}${LIBDIRBASE}/pkgconfig/mono-$OOOINSTALLDIRNAME.pc
-test "$split_build" = "yes" && echo ${LIBDIRBASE}/pkgconfig/mono-$OOOINSTALLDIRNAME.pc >>$MONOGACFILELIST
-
-# extra dlls config files
-for dll in cli_uno_bridge.dll ; do
- sed -e "s|@LIBDIRBASE@|$LIBDIRBASE|g" \
- -e "s|@OOOINSTALLDIRNAME@|$OOOINSTALLDIRNAME|g" \
- $TOOLSDIR/src/$dll.config.in > $OOINSTDIR/ure/lib/$dll.config
- if test -n "$OODESTDIR" ; then
- file_list=`grep "^$OOINSTBASE/ure/lib/$dll$" $OODESTDIR/gid* | head -n 1 | cut -d: -f1`
- echo "$OOINSTBASE/ure/lib/$dll.config" >>$file_list
- fi
-done
- perl -pi -e "s,</configuration>, <dllmap dll=\"cli_uno_glue\" target=\"$LIBDIRBASE/$OOOINSTALLDIRNAME/basis-link/ure-link/lib/libcli_uno_glue.so\"/>\n</configuration>," \
- $OOINSTDIR/ure/lib/cli_cppuhelper.config
-
-# install versioned assemblies into the system Global Assembly Cache (GAC)
-# to become part of the assemblies that are available for all applications
-# at runtime.
-if test -z "$OODESTDIR" -o -n "$MONO_GAC_ROOT" ; then
- test -n "$MONO_GAC_ROOT" && gacutil_root="-root $OODESTDIR$MONO_GAC_ROOT" || gacutil_root=
- for dll_name in ure/lib/cli_basetypes \
- ure/lib/cli_cppuhelper \
- ure/lib/cli_uretypes \
- basis$VERSION/program/cli_oootypes \
- ure/lib/cli_uno_bridge \
- ure/lib/cli_ure ; do
- # create .config files with correct names
- test -f $OOINSTDIR/$dll_name.config && mv $OOINSTDIR/$dll_name.config $OOINSTDIR/$dll_name.dll.config
- gacutil $gacutil_root -i $OOINSTDIR/$dll_name.dll -package $OOOINSTALLDIRNAME || exit 0
-
- # the original fixes are not longer needed
- rm -f $OOINSTDIR/$dll_name.dll
- rm -f $OOINSTDIR/$dll_name.dll.config
- rm -f $OOINSTDIR/$dll_name.config
-
- # remove the deleted fixes from the filelist
- if test -n "$OODESTDIR" ; then
- file_list=`grep "^$OOINSTBASE/$dll_name.dll$" $OODESTDIR/gid* | head -n 1 | cut -d: -f1`
- test -z "$file_list" && echo "Error: \"$OOINSTBASE/$dll_name.dll\" has not found in any filelist" && exit 1;
- sed -e "s|^$OOINSTBASE/$dll_name.dll$||" \
- -e "s|^$OOINSTBASE/$dll_name.dll.config$||" \
- -e "s|^$OOINSTBASE/$dll_name.config$||" \
- $file_list >$file_list.mono
- mv $file_list.mono $file_list
- fi
- done
-
- # filelist for the GAC
- if test -n "$OODESTDIR" ; then
- for dir in `find $OODESTDIR/usr/lib/mono -type d -regex ".*/cli_[_a-z]*" -o -regex ".*/$OOOINSTALLDIRNAME"` ; do
- find $dir -type d | sed "s|^$OODESTDIR\(.*\)|%dir \1|" >>$MONOGACFILELIST
- find $dir -type f -o -type l | sed "s|^$OODESTDIR\(.*\)|\1|" >>$MONOGACFILELIST
- done
- sort $MONOGACFILELIST >$MONOGACFILELIST.mono
- mv $MONOGACFILELIST.mono $MONOGACFILELIST
- fi
-fi
diff --git a/bin/install-sdk b/bin/install-sdk
deleted file mode 100755
index b49e7b855..000000000
--- a/bin/install-sdk
+++ /dev/null
@@ -1,101 +0,0 @@
-#!/bin/sh
-
-# this script finishes the mono installation
-
-#
-# See setup for user tweakables.
-#
-if test -n "$OO_TOOLSDIR" ; then
- # split build
- . $OO_TOOLSDIR/setup
- split_build=yes
-else
- . ./setup
- split_build=
-fi
-
-# check if install path redefined
-test -n "$1" && OOINSTDIR="$1"
-
-if test -d $OOINSTDIR/basis$VERSION/sdk ; then
-
- echo "SDK installation clean up"
-
- # bin potential .orig files
- find $OOINSTDIR/basis$VERSION/sdk -name "*.orig" -exec rm -f {} \;
-
- # move some SDK directories to the right place according to FHS
- # note that examples must stay in $OOINSTDIR/basis$VERSION/sdk because there are used
- # relative paths to $OOINSTDIR/basis$VERSION/sdk/setting and it does not work via
- # a symlink
- mkdir -p $PREFIX/include
- mkdir -p $DATADIR/idl
- mkdir -p $DATADIR/$OOOINSTALLDIRNAME/sdk
- mkdir -p $DOCDIR/sdk
- mv $OOINSTDIR/basis$VERSION/sdk/include $PREFIX/include/$OOOINSTALLDIRNAME
- if [ -d $OOINSTDIR/basis$VERSION/sdk/classes ]; then
- mv $OOINSTDIR/basis$VERSION/sdk/classes $DATADIR/$OOOINSTALLDIRNAME/sdk/classes
- fi
- mv $OOINSTDIR/basis$VERSION/sdk/idl $DATADIR/idl/$OOOINSTALLDIRNAME
- mv $OOINSTDIR/basis$VERSION/sdk/docs $DOCDIR/sdk
- mv $OOINSTDIR/basis$VERSION/sdk/share/readme $DOCDIR/sdk/readme
- mv $OOINSTDIR/basis$VERSION/sdk/index.html $DOCDIR/sdk
-
- # compat symlinks
- ln -sf $PREFIXBASE/include/$OOOINSTALLDIRNAME $OOINSTDIR/basis$VERSION/sdk/include
- ln -sf $DATADIRBASE/$OOOINSTALLDIRNAME/sdk/classes $OOINSTDIR/basis$VERSION/sdk/classes
- ln -sf $DATADIRBASE/idl/$OOOINSTALLDIRNAME $OOINSTDIR/basis$VERSION/sdk/idl
- ln -sf $DOCDIRBASE/sdk/docs $OOINSTDIR/basis$VERSION/sdk/
- ln -sf $DOCDIRBASE/sdk/index.html $OOINSTDIR/basis$VERSION/sdk/index.html
- ln -sf $OOINSTBASE/basis$VERSION/sdk/examples $DOCDIR/sdk/examples
-
- # fix file list
- sed -e "s|^\(%dir \)\?$OOINSTBASE/basis$VERSION/sdk/include|\1$PREFIXBASE/include/$OOOINSTALLDIRNAME|" \
- -e "s|^\(%dir \)\?$OOINSTBASE/basis$VERSION/sdk/classes|\1$DATADIRBASE/$OOOINSTALLDIRNAME/sdk/classes|" \
- -e "s|^\(%dir \)\?$OOINSTBASE/basis$VERSION/sdk/idl|\1$DATADIRBASE/idl/$OOOINSTALLDIRNAME|" \
- -e "s|^\(%dir \)\?$OOINSTBASE/basis$VERSION/sdk/docs|\1$DOCDIRBASE/sdk/docs|" \
- -e "s|^\(%dir \)\?$OOINSTBASE/basis$VERSION/sdk/share/readme|\1$DOCDIRBASE/sdk/readme|" \
- -e "s|^$OOINSTBASE/basis$VERSION/sdk/index.html$|$DOCDIRBASE/sdk/index.html|" \
- -e "s|^\(%dir \)\?$OOINSTBASE/basis$VERSION/sdk/share.*$||" \
- -e "/\.orig$/D" \
- -e "/^$/D" \
- $OODESTDIR/gid_Module_Root_SDK | sort -u \
- >$OODESTDIR/gid_Module_Root_SDK.new
- mv $OODESTDIR/gid_Module_Root_SDK.new $OODESTDIR/gid_Module_Root_SDK
- #
- echo "%dir $DATADIRBASE/$OOOINSTALLDIRNAME/sdk" >>$OODESTDIR/gid_Module_Root_SDK
- echo "%dir $DATADIRBASE/$OOOINSTALLDIRNAME" >>$OODESTDIR/gid_Module_Root_SDK
- echo "%dir $DATADIRBASE/idl" >>$OODESTDIR/gid_Module_Root_SDK
- echo "%dir $DOCDIRBASE/sdk/docs" >>$OODESTDIR/gid_Module_Root_SDK
- echo "%dir $DOCDIRBASE/sdk" >>$OODESTDIR/gid_Module_Root_SDK
- echo "%dir $DOCDIRBASE" >>$OODESTDIR/gid_Module_Root_SDK
- echo "$OOINSTBASE/basis$VERSION/sdk/include" >>$OODESTDIR/gid_Module_Root_SDK
- echo "$OOINSTBASE/basis$VERSION/sdk/classes" >>$OODESTDIR/gid_Module_Root_SDK
- echo "$OOINSTBASE/basis$VERSION/sdk/idl" >>$OODESTDIR/gid_Module_Root_SDK
- echo "$OOINSTBASE/basis$VERSION/sdk/docs" >>$OODESTDIR/gid_Module_Root_SDK
- echo "$OOINSTBASE/basis$VERSION/sdk/index.html" >>$OODESTDIR/gid_Module_Root_SDK
- echo "$DOCDIRBASE/sdk/examples" >>$OODESTDIR/gid_Module_Root_SDK
-
- # generate default profiles
- for file in setsdkenv_unix.csh setsdkenv_unix.sh ; do
- sed -e "s,@OO_SDK_NAME@,openoffice.org${VERSION}_sdk," \
- -e "s,@OO_SDK_HOME@,$OOINSTBASE/basis$VERSION/sdk," \
- -e "s,@OFFICE_HOME@,$OOINSTBASE," \
- -e "s,@OFFICE_BASE_HOME@,$OOINSTBASE/basis$VERSION," \
- -e "s,@OO_SDK_URE_HOME@,$OOINSTBASE/basis$VERSION/ure-link," \
- -e "s,@OO_SDK_MAKE_HOME@,/usr/bin," \
- -e "s,@OO_SDK_ZIP_HOME@,/usr/bin," \
- -e "s,@OO_SDK_CPP_HOME@,/usr/bin," \
- -e "s,@OO_SDK_CC_55_OR_HIGHER@,," \
- -e "s,@OO_SDK_JAVA_HOME@,$JAVA_HOME," \
- -e "s,@OO_SDK_OUTPUT_DIR@,\$HOME," \
- -e "s,@SDK_AUTO_DEPLOYMENT@,NO," \
- $OOINSTDIR/basis$VERSION/sdk/$file.in \
- > $OOINSTDIR/basis$VERSION/sdk/$file
- chmod 755 $OOINSTDIR/basis$VERSION/sdk/$file
- echo $OOINSTBASE/basis$VERSION/sdk/$file >>$OODESTDIR/gid_Module_Root_SDK
- done
-
- # FIXME: I rather set this file to be non-world-writttable for now, i#64812
- chmod go-w $OOINSTDIR/basis$VERSION/sdk/settings/component.uno.map
-fi
diff --git a/bin/java-set-classpath.in b/bin/java-set-classpath.in
deleted file mode 100755
index 539e8592f..000000000
--- a/bin/java-set-classpath.in
+++ /dev/null
@@ -1,64 +0,0 @@
-#!/bin/sh
-
-#*****************************************************************************
-#
-# java-set-classpath - Utility to update the default CLASSPATH for OpenOffice.org
-#
-# Initial version by: Petr Mladek <pmladek@suse.cz>
-#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2, as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License
-# along with this program; if not, write to the Free Software
-# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
-#
-#*****************************************************************************
-
-if test "z$1" = "z" ; then
- echo "Update the default CLASSPATH for OpenOffice.org"
- echo ""
- echo "Usage: $0 [dir|jar]..."
- echo ""
- echo "The utility updates the OpenOffice.org system setting. It adds or removes"
- echo "the given directories and jar-files to or from the default CLASSPATH"
- echo "depending on if they are available on the system or not."
- echo ""
- echo "Parameters:"
- echo " dir - absolute path to a directory"
- echo " jar - absolute path to a jar-file"
- exit 0;
-fi
-
-JVM_CONFIG_FILE=@OOINSTBASE@/basis-link/program/fundamentalbasisrc
-
-for path in $@ ; do
- if test "z${path%%/*}" != "z" ; then
- echo "Warning: the path "$path" is not absolute and will be ignored"
- continue
- fi
- if test -e $path ; then
- # the file exist
- grep "URE_MORE_JAVA_CLASSPATH_URLS.*file:/*$path\([[:space:]].*\)\?$" $JVM_CONFIG_FILE >/dev/null && continue
- # it is not registered
- TMP_FILE=`mktemp /tmp/ooset-java-class.XXXXXXXXXX` || exit 1
- sed -e "s|^\(.*URE_MORE_JAVA_CLASSPATH_URLS.*\)$|\1 file://$path|" $JVM_CONFIG_FILE >$TMP_FILE
- mv -f $TMP_FILE $JVM_CONFIG_FILE
- chmod 644 $JVM_CONFIG_FILE
- else
- # the file does not exist, remove it from the configuration
- TMP_FILE=`mktemp /tmp/ooset-java-class.XXXXXXXXXX` || exit 1;
- sed -e "s|^\(.*URE_MORE_JAVA_CLASSPATH_URLS.*\)file:/*$path\([[:space:]].*\)\?$|\1\2|" \
- -e "s/\(URE_MORE_JAVA_CLASSPATH_URLS=\)[[:space:]]\+/\1/" \
- -e "/^.*URE_MORE_JAVA_CLASSPATH_URLS/s/[[:space:]]\+/ /g" \
- -e "/^.*URE_MORE_JAVA_CLASSPATH_URLS/s/[[:space:]]*$//" $JVM_CONFIG_FILE >$TMP_FILE
- mv -f $TMP_FILE $JVM_CONFIG_FILE
- chmod 644 $JVM_CONFIG_FILE
- fi
-done
diff --git a/bin/l10n-status b/bin/l10n-status
deleted file mode 100755
index 8f947d30e..000000000
--- a/bin/l10n-status
+++ /dev/null
@@ -1,307 +0,0 @@
-#!/usr/bin/env perl
-
-use strict;
-
-# globals
-my $warn = 0;
-my $tag;
-my $htmldir;
-
-sub read_dir($@)
-{
- my $dir_path = shift;
- my $type = shift;
- my $misc_dir;
- my @result;
-
- opendir ($misc_dir, $dir_path) || return;
- my $name;
- while ($name = readdir ($misc_dir)) {
- $name =~ /^\./ && next;
- $name =~ /unxlngi4\.pro/ && next; # testing
-
- $type && $type eq 'd' && !-d "$dir_path/$name" && next;
-
- push @result, $name;
- }
- closedir ($misc_dir);
-
- return @result;
-}
-
-sub warn_msg($$)
-{
- my ($stats, $message) = @_;
-
- $warn || return;
-
- chomp ($message);
- defined $stats->{'projects'}->{'warnings'}->{$message} && return;
- $stats->{'projects'}->{'warnings'}->{$message} = 1;
-}
-
-sub check_translations($$@)
-{
- my $stats = shift;
- my $base = shift;
- my $fname = shift;
- my %done_langs = ();
-
-# print "Check translations $fname ($base)\n";
-
- $stats->{'lang_data'}->{'german'}++;
-
- while (my $lang = lc shift) {
- my $trans = shift;
-
- if (defined $done_langs{$lang}) {
- warn_msg ($stats, "Duplicate strings in $fname for $lang");
- next;
- }
- $done_langs{$lang} = 1;
-
- if (!defined $stats->{'projects'}->{'languages'}->{$lang}) {
- $stats->{'projects'}->{'languages'}->{$lang} = 1;
- }
-
- if ($trans ne $base) {
- $stats->{'lang_data'}->{$lang}++;
- }
- }
-}
-
-sub scan_src($$)
-{
- my ($stats, $fname) = @_;
- my $InFile;
- my $base;
- my @transls = ();
-
- open $InFile, $fname || die "Can't open $fname: $!";
- while (<$InFile>) {
- s/\/\/.*//;
-
- if (/text\s*=\s*"(.*)"\s*;/i) {
- $base && check_translations ($stats, $base, $fname, @transls);
- $base = $1;
- @transls = ();
-
- } elsif (/text\s*\[\s*(\S+)\s*\]\s*=\s*"(.*)"\s*;/i) {
- push @transls, $1, $2;
- }
- }
- close $InFile;
-
- $base && check_translations ($stats, $fname, $base, @transls);
-}
-
-sub scan_dirs($$);
-sub scan_dirs($$)
-{
- my $stats = shift;
- my $dir_path = shift;
- my @sub_dirs = read_dir ($dir_path);
-
- @sub_dirs || return;
-
- for my $name (@sub_dirs) {
- $name =~ /\.src$/ && scan_src ($stats, "$dir_path/$name");
- -d "$dir_path/$name" && scan_dirs ($stats, "$dir_path/$name");
- }
-}
-
-sub get_percent($$)
-{
- my ($a, $b) = @_;
- return sprintf("%2.2f%%", $a * 100.0 / $b);
-}
-
-sub dump_summary($$$@)
-{
- my $languages = shift;
- my $projects = shift;
- my $file = shift;
- my $details_fn = shift;
-
- print $file <<"EOF";
-<html>
-<h1>OpenOffice.org translation percentages:</h1>
-<table>
-<tr><th>Language</th> <th>status</th> <th>strings</th></tr>
-EOF
-
- for my $lang (sort { $languages->{$b} <=> $languages->{$a} } keys %{$languages}) {
-
- my $total = $languages->{'german'};
- my $count = $languages->{$lang};
- my $percent = get_percent ($count, $total);
-
- my $green = sprintf ("%2d", 100 * $count/$total);
- my $red = sprintf ("%2d", 100 - 100 * $count/$total);
-
- print $file <<"EOF";
-<tr> <td>$lang</td> <td>
-<img src="bar-g.gif" height="15" width="$green"><img src="bar-r.gif" height="15" width="$red"> - $percent
-</td>
-<td>$count</td> </tr>
-EOF
-
- if ($details_fn) {
- $details_fn->($languages, $projects, $file, $lang);
- }
- }
- print $file "</table>\n";
- print $file "</html>\n";
-
- close ($file);
-}
-
-sub valid_keys($)
-{
- my $projects = shift;
- my @result = ();
-
- for my $proj (keys %{$projects}) {
- if (defined $projects->{$proj}->{'lang_data'}->{'german'}) {
- push @result, $proj;
- }
- }
- return @result;
-}
-
-sub dump_details($$$$)
-{
- my ($languages, $projects, $file, $lang) = @_;
-
- print $file <<"EOF";
-<tr><td></td><td colspan="2">
-<table>
-EOF
- for my $project (sort { $projects->{$b}->{'percentages'}->{$lang} <=>
- $projects->{$a}->{'percentages'}->{$lang} } valid_keys ($projects) ) {
-
- my $lang_data = $projects->{$project}->{'lang_data'};
- my $total = $lang_data->{'german'};
- $total || next;
- my $count = $lang_data->{$lang};
- $count = 0 if !$count;
- my $percent = get_percent ($count, $total);
-
- my $green = sprintf ("%2d", 100 * $count/$total);
- my $red = sprintf ("%2d", 100 -100 * $count/$total);
-
- print $file <<"EOF";
-<tr>
-<td><a href="http://ooo.ximian.com/lxr/source/$project">$project</a></td>
-<td><img src="bar-g.gif" height="15" width="$green"><img src="bar-r.gif" height="15" width="$red"> - $percent</td>
-<td>$count / $total</td>
-</tr>
-EOF
- }
-
- print $file <<"EOF";
-</table></td></tr>
-EOF
-}
-
-# ------------------ main ------------------
-
-my $src_path;
-if (!($src_path = shift @ARGV)) {
- print "Syntax error: l10n-status <path-to-source-root>\n";
-}
-
-for my $arg (@ARGV) {
- $warn = 1 if $arg =~ /--warn/;
- if ($arg =~ /--html=(.*)/) {
- $htmldir = $1;
- }
- elsif ($arg =~ /--tag=(.*)/) {
- $tag = $1;
- }
-}
-
-$htmldir && !$tag && die "Have to supply a tag for html output eg. mws_srx645";
-
-my %languages = ( 'german' => '1' );
-my %warnings;
-my %projects = ( 'languages' => \%languages,
- 'warnings' => \%warnings );
-
-my @project_list = read_dir ($src_path, 'd');
-
-@project_list || die "Can't read directory: $src_path";
-
-my $i = 0;
-
-for my $project (@project_list) {
- print "Project '$project'\n";
-
- my %lang_data;
- my %percentages;
- my %stats = ( 'name' => $project,
- 'projects' => \%projects,
- 'lang_data' => \%lang_data,
- 'percentages' => \%percentages);
- $projects{$project} = \%stats;
-
- scan_dirs (\%stats, "$src_path/$project");
-
- if ($ENV{'OOO_DEBUG'} && $i++ > 10) {
- print "Development hack: quit after 10\n";
- last;
- }
-}
-
-if ($warn) {
- print "Warnings:\n";
- for my $warning (sort keys %{$projects{'warnings'}}) {
- print "$warning\n";
- }
-}
-
-print "Languages: ";
-for my $lang (sort keys %languages) {
- print "$lang ";
-}
-print "\n";
-
-for my $lang (sort keys %languages) {
- print "Language: $lang ";
- my $total = 0;
- my $count = 0;
-
- for my $project (sort keys %projects) {
- my $lang_data = $projects{$project}->{'lang_data'};
- my $prj_total = $lang_data->{'german'};
- $prj_total || next;
- my $prj_count = $lang_data->{$lang};
- $prj_count = 0 if !$prj_count;
-
- $projects{$project}->{'percentages'}->{$lang} = $prj_count/$prj_total;
-
-# my $prj_percent = get_percent ($prj_count, $prj_total);
-# print " $project : $prj_count / $prj_total: $prj_percent%\n";
-
- $total += $prj_total;
- $count += $prj_count;
- }
-
- $languages{'german'} = $total;
- $languages{$lang} += $count;
- my $percent = get_percent ($count, $total);
-
- print "total : $count / $total: $percent%\n";
-}
-
-if ($htmldir) {
- my $file;
-
- open $file, ">$htmldir/$tag.html" || die "Can't open $htmldir/$tag.html: $!";
- dump_summary (\%languages, \%projects, $file);
- close $file;
-
- open $file, ">$htmldir/$tag-details.html" || die "Can't open $htmldir/$tag-details.html: $!";
- dump_summary (\%languages, \%projects, $file, \&dump_details);
- close $file;
-}
diff --git a/bin/lo-commit-stat b/bin/lo-commit-stat
deleted file mode 100755
index c0b1b0c05..000000000
--- a/bin/lo-commit-stat
+++ /dev/null
@@ -1,303 +0,0 @@
-#!/usr/bin/perl
- eval 'exec /usr/bin/perl -S $0 ${1+"$@"}'
- if $running_under_some_shell;
-#!/usr/bin/perl
-
-use strict;
-
-my $main_repo="build";
-my @pieces=("artwork", "base", "bootstrap", "calc", "components",
- "extensions", "extras", "filters", "help", "impress",
- "libs-core", "libs-extern", "libs-extern-sys", "libs-gui",
- "l10n", "postprocess", "sdk", "testing", "ure", "writer");
-
-sub search_bugs($$$$)
-{
- my ($pdata, $piece, $commit_id, $line) = @_;
-
- my $bug = "";
- my $bug_orig;
- while (defined $bug) {
-
- # match fdo#123, rhz#123, i#123
- if ( $line =~ m/(\w*\#+\d+)/ ) {
- $bug_orig = $1;
- $bug = $1;
- # match #i123#
- } elsif ( $line =~ m/(\#i)(\d+)(\#)/ ) {
- $bug_orig = $1 . $2 . $3;
- $bug = "i#$2";
- } else {
- $bug = undef;
- next;
- }
-
-# print " found $bug\n";
- # remove bug number from the comment; it will be added later a standardized way
- $bug_orig =~ s/\#/\\#/;
- $line =~ s/[Rr]esolves:\s*$bug_orig\s*//;
- $line =~ s/\s*-\s*$bug_orig\s*//;
- $line =~ s/\(?$bug_orig\)?[:,]?\s*//;
-
- # bnc# is prefered over n# for novell bugs
- $bug =~ s/^n\#/bnc#/;
- # save the bug number
- %{$pdata->{$piece}{$commit_id}{'bugs'}} = () if (! defined %{$pdata->{$piece}{$commit_id}{'bugs'}});
- $pdata->{$piece}{$commit_id}{'bugs'}{$bug} = 1;
- $pdata->{$piece}{$commit_id}{'flags'}{'bug'} = 1;
- }
-
- return $line;
-}
-
-sub standardize_summary($)
-{
- my $line = shift;
-
- $line =~ s/^\s*//;
- $line =~ s/\s*$//;
-
- # lower first letter
- $line =~ m/(^.)/;
- my $first_char = lc($1);
- $line =~ s/^./$first_char/;
-
- # FIXME: remove do at the end of line
- # remove bug numbers
- return $line;
-}
-
-sub load_git_log($$$$)
-{
- my ($pdata, $repo_dir, $piece, $pgit_args) = @_;
-
- my $cmd = "cd $repo_dir && git log " . join ' ', @{$pgit_args};
- my $commit_id;
- my $summary;
-
- print STDERR "Analyzing log from the git repo: $piece...\n";
-
- open (GIT, "$cmd 2>&1|") || die "Can't run $cmd: $!";
- %{$pdata->{$piece}} = ();
-
- while (my $line = <GIT>) {
- chomp $line;
-
- if ( $line =~ m/^commit ([0-9a-z]{20})/ ) {
- $commit_id = "$1";
- $summary=undef;
- %{$pdata->{$piece}{"$commit_id"}} = ();
- %{$pdata->{$piece}{"$commit_id"}{'flags'}} = ();
- next;
- }
-
- if ( $line =~ /^Author:\s*([^\<]*)\<([^\>]*)>/ ) {
- # get rid of extra empty spaces;
- my $name = "$1";
- $name =~ s/\s+$//;
- die "Error: Author already defined for the commit {$commit_id}\n" if defined ($pdata->{$piece}{$commit_id}{'author'});
- %{$pdata->{$piece}{$commit_id}{'author'}} = ();
- $pdata->{$piece}{$commit_id}{'author'}{'name'} = "$name";
- $pdata->{$piece}{$commit_id}{'author'}{'email'} = "$2";
- next;
- }
-
- if ( $line =~ /^Date:\s+/ ) {
- # ignore date line
- next;
- }
-
- if ( $line =~ /^\s*$/ ) {
- # ignore empty line
- next;
- }
-
- $line = search_bugs($pdata, $piece, $commit_id, $line);
- # FIXME: need to be implemeted
-# search_keywords($pdata, $line);
-
- unless (defined $pdata->{$piece}{$commit_id}{'summary'}) {
- $summary = standardize_summary($line);
- $pdata->{$piece}{$commit_id}{'summary'} = $summary;
- }
- }
-
- close GIT;
-}
-
-sub get_repo_name($)
-{
- my $repo_dir = shift;
-
- open (GIT_CONFIG, "$repo_dir/.git/config") ||
- die "can't open \"$$repo_dir/.git/config\" for reading: $!\n";
-
- while (my $line = <GIT_CONFIG>) {
- chomp $line;
-
- if ( $line =~ /^\s*url\s*=\s*(\S+)$/ ) {
- my $repo_name = "$1";
- $repo_name = s/.*\///g;
- return "$repo_name";
- }
- }
- die "Error: can't find repo name in \"$$repo_dir/.git/config\"\n";
-}
-
-sub load_data($$$$)
-{
- my ($pdata, $top_dir, $piece, $pgit_args) = @_;
-
- if (defined $piece) {
- my $piece_dir;
- if ("$piece" eq "$main_repo") {
- $piece_dir = "$top_dir";
- } else {
- $piece_dir = "$top_dir/clone/$piece";
- }
- load_git_log($pdata, $piece_dir, $piece, $pgit_args);
- } else {
- load_git_log($pdata, $top_dir, $main_repo, $pgit_args);
- foreach my $piece (@pieces) {
- load_git_log($pdata, "$top_dir/clone/$piece", $piece, $pgit_args);
- }
- }
-}
-
-sub print_summary_in_stat($$$$$$$)
-{
- my ($summary, $pprint_filters, $ppiece_title, $pflags, $pbugs, $pauthors, $prefix) = @_;
-
- return if ( $summary eq "" );
-
- # do we want to print this summary at all?
- my $print;
- if (%{$pprint_filters}) {
- foreach my $flag (keys %{$pprint_filters}) {
- $print = 1 if (defined $pflags->{$flag});
- }
- } else {
- $print = 1;
- }
- return unless (defined $print);
-
- # print piece title if not done yet
- if (defined ${$ppiece_title}) {
- print "${$ppiece_title}\n";
- ${$ppiece_title} = undef;
- }
-
- # finally print the summary line
- my $bugs = "";
- if ( %{$pbugs} ) {
- $bugs = " (" . join (", ", keys %{$pbugs}) . ")";
- }
-
- my $authors = "";
- if ( %{$pauthors} ) {
- $authors = " [" . join (", ", keys %{$pauthors}) . "]";
- }
-
- print $prefix . $summary . $bugs . $authors . "\n";
-}
-
-sub print_weekly_stat($$)
-{
- my ($pdata, $pprint_filters) = @_;
-
- foreach my $piece ( sort { $a cmp $b } keys %{$pdata}) {
- # check if this peice has any entries at all
- my $piece_title = "+ $piece";
- if ( %{$pdata->{$piece}} ) {
- my $old_summary="";
- my %authors = ();
- my %bugs = ();
- my %flags = ();
- foreach my $id ( sort { $pdata->{$piece}{$a}{'summary'} cmp $pdata->{$piece}{$b}{'summary'} } keys %{$pdata->{$piece}}) {
- my $summary = $pdata->{$piece}{$id}{'summary'};
- if ($summary ne $old_summary) {
- print_summary_in_stat($old_summary, $pprint_filters, \$piece_title, \%flags, \%bugs, \%authors, " + ");
- $old_summary = $summary;
- %authors = ();
- %bugs = ();
- %flags = ();
- }
- # collect bug numbers
- if (defined $pdata->{$piece}{$id}{'bugs'}) {
- foreach my $bug (keys %{$pdata->{$piece}{$id}{'bugs'}}) {
- $bugs{$bug} = 1;
- }
- }
- # collect author names
- my $author = $pdata->{$piece}{$id}{'author'}{'name'};
- $authors{$author} = 1;
- # collect flags
- foreach my $flag ( keys %{$pdata->{$piece}{$id}{'flags'}} ) {
- $flags{$flag} = 1;
- }
- }
- print_summary_in_stat($old_summary, $pprint_filters, \$piece_title, \%flags, \%bugs, \%authors, " + ");
- }
- }
-}
-
-########################################################################
-# help
-
-sub usage()
-{
- print "This script generates LO git commit summary\n\n" .
-
- "Usage: lo-commit-stat [--help] [--no-pieces] [--piece=<piece>] topdir [git_log_param...]\n\n" .
-
- "Options:\n" .
- " --help print this help\n" .
- " --no-pieces read changes just from the main repository, ignore other cloned repos\n" .
- " --piece=<piece> summarize just chnages from the given piece\n" .
- " --bugs print just bug fixes\n" .
- " topdir directory with the libreoffice/bootstrap clone; the piece repos\n" .
- " must be cloned in the main-repo-root/clone/<piece> subdirectories\n" .
- " git_log_param extra parameters passed to the git log command to define\n" .
- " the area of interest , e.g. --after=\"2010-09-27\" or\n" .
- " TAG..HEAD";
-}
-
-
-#######################################################################
-#######################################################################
-# MAIN
-#######################################################################
-#######################################################################
-
-
-my $piece;
-my $top_dir;
-my @git_args;
-my %data;
-my %print_filters = ();
-
-foreach my $arg (@ARGV) {
- if ($arg eq '--help') {
- usage();
- exit;
- } elsif ($arg eq '--no-pieces') {
- $piece = "bootstrap";
- } elsif ($arg =~ m/--piece=(.*)/) {
- $piece = $1;
- } elsif ($arg eq '--bugs') {
- $print_filters{'bug'} = 1;
- } else {
- if (! defined $top_dir) {
- $top_dir=$arg;
- } else {
- push @git_args, $arg;
- }
- }
-}
-
-(defined $top_dir) || die "Error: top direcotry is not defined\n";
-(-d "$top_dir") || die "Error: not a directory: $top_dir\n";
-(-f "$top_dir/.git/config") || die "Error: can't find $top_dir/.git/config\n";
-
-load_data(\%data, $top_dir,$piece, \@git_args);
-print_weekly_stat(\%data, \%print_filters);
diff --git a/bin/lo-git-commit-summary b/bin/lo-git-commit-summary
deleted file mode 100755
index 8bbe64a70..000000000
--- a/bin/lo-git-commit-summary
+++ /dev/null
@@ -1,136 +0,0 @@
-#!/bin/sh
-
-usage()
-{
- echo "This script generates LO git commit summary"
- echo
- echo "Usage: ${0##*/} [--help] [--piece=<piece>] bootstrap-dir summary.log [git_log_param...]"
- echo
- echo "Options:"
- echo
- echo " --help print this help"
- echo " --piece=<piece> summarize just chnages from the given piece"
- echo " bootstrap-dir directory with the libreoffice/bootstrap clone; the other piece repos"
- echo " must be cloned in the bootstrap-dir/clone/<piece> subdirectories"
- echo " summary.log output file"
- echo " git_log_param extra parameters passed to the git log command to define"
- echo " the area of interest , e.g. --after=\"2010-09-27\" or"
- echo " TAG..HEAD"
-}
-
-only_piece=
-bootstrap_dir=
-summary_log=
-export git_log_params=
-while test -n "$1" ; do
- case "$1" in
- --help)
- usage
- exit 0;
- ;;
- --piece=*)
- only_piece=`echo $1 | sed "s|--piece=||"`
- ;;
- *)
- if test -z "$bootstrap_dir" ; then
- bootstrap_dir="$1"
- elif test -z "$summary_log" ; then
- summary_log="$1";
- else
- git_log_params="$git_log_params $1"
- fi
- esac
- shift
-done
-
-if test -z "$bootstrap_dir" ; then
- echo "Error: please, define directory with the cloned libreoffice/bootstrap repo"
- exit 1;
-fi
-
-if test -z "$summary_log" ; then
- echo "Error: please, define the output file"
- exit 1;
-fi
-
-if test ! -d "$bootstrap_dir/.git" -o ! -f "$bootstrap_dir/configure.in" -o ! -d "$bootstrap_dir/clone" ; then
- echo "Error: invalid bootstrap dir: \"$bootstrap_dir\""
- echo " it must point to a clone of libreoffice/bootstrap git repo"
- exit 1;
-fi
-
-
-
-get_git_log()
-{
- git_dir="$1"
- temp_dir="$2"
- repo="$3"
-
- echo "Getting log from the repo: $repo"
- cd $git_dir
-# git log --pretty='format:%an: %s%n' $git_log_params >$temp_dir/$repo.log
- git log --pretty='format: + %s [%an]' $git_log_params | sort -u >$temp_dir/$repo.log
- cd - >/dev/null 2>&1
-}
-
-add_to_paterns()
-{
- sed -e "s|\#|\\\#|" \
- -e "s|\[|\\\[|" \
- -e "s|\]|\\\]|" $1 >>"$2"
-}
-
-temp_dir=`mktemp -d /tmp/lo-git-commit-summary-XXXXXX`
-
-# get logs
-if test -z "$only_piece" -o "$only_piece" = "bootstrap" ; then
- get_git_log "$bootstrap_dir" "$temp_dir" "bootstrap"
-fi
-
-if test "$only_piece" != "bootstrap" ; then
- if test -z "$only_piece" ; then
- pieces_list=`ls $bootstrap_dir/clone`
- else
- pieces_list="$only_piece"
- if ! test -d "$bootstrap_dir/clone/$only_piece" ; then
- echo "Error: wrong piece; directory does not exist: $bootstrap_dir/clone/$only_piece"
- exit 1;
- fi
- fi
- for piece in $pieces_list ; do
- test -d "$bootstrap_dir/clone/$piece" || continue;
- get_git_log "$bootstrap_dir/clone/$piece" "$temp_dir" "$piece"
- done
-fi
-
-# special sections
-echo "Looking for build bits..."
-
-grep -h -i " build" $temp_dir/*.log | sort -u >"$temp_dir/build.special"
-#sed -e "s|\(.*\)|'\1'|" "$temp_dir/build.special" >"$temp_dir/special.filter"
-add_to_paterns "$temp_dir/build.special" "$temp_dir/special.patterns"
-
-echo "Looking for global changes..."
-
-cat $temp_dir/*.log | sort | uniq -c | grep -v " 1" | cut -c 9- | grep -v -f "$temp_dir/special.patterns" >"$temp_dir/common.special"
-#sed -e "s|\(.*\)|'\1'|" "$temp_dir/common.special" >>"$temp_dir/special.filter"
-add_to_paterns "$temp_dir/common.special" "$temp_dir/special.patterns"
-
-echo "Generating summary..."
-
-rm -rf "$summary_log"
-
-echo "+ common:" >>"$summary_log"
-cat "$temp_dir/common.special" >>"$summary_log"
-
-for log in `ls $temp_dir/*.log` ; do
- piece=`echo $log | sed "s|$temp_dir/\(.*\)\.log\$|\1|"`
- echo "+ $piece:" >>"$summary_log"
- grep -v -f "$temp_dir/special.patterns" "$log" >>"$summary_log"
-done
-
-echo "+ build bits:" >>"$summary_log"
-cat "$temp_dir/build.special" >>"$summary_log"
-
-rm -rf "$temp_dir"
diff --git a/bin/lo-pack-sources b/bin/lo-pack-sources
deleted file mode 100755
index 25c1037b6..000000000
--- a/bin/lo-pack-sources
+++ /dev/null
@@ -1,613 +0,0 @@
-#!/usr/bin/perl
- eval 'exec /usr/bin/perl -S $0 ${1+"$@"}'
- if $running_under_some_shell;
-#!/usr/bin/perl
-
-use strict;
-use File::Copy;
-use File::Temp qw/ tempfile tempdir /;
-
-# get libreoffice-build version from the given libreoffice-build sources
-sub get_config_version($)
-{
- my ($lo_build_dir) = @_;
- my $version;
-
- open (CONFIGURE, "$lo_build_dir/configure.in") ||
- die "can't open \"$lo_build_dir/configure.in\" for reading: $!\n";
-
- while (my $line = <CONFIGURE>) {
- chomp $line;
-
- if ($line =~ /AC_INIT\s*\(\s*libreoffice-build\s*,\s*([\w\.]*)\)/) {
- $version="$1";
- }
- }
- close (CONFIGURE);
- return $version;
-}
-
-# set libreoffice-build version in the given libreoffice-build sources
-sub set_config_version($$)
-{
- my ($lo_build_dir, $version) = @_;
- my $configure = "$lo_build_dir/configure.in";
-
- open (CONFIGURE, "$configure") ||
- die "can't open \"$configure\" for reading: $!\n";
-
- my ( $tmp_fh, $tmp_filename ) = tempfile( "$configure.XXXXXX" );
- if ( !defined $tmp_fh ) {
- close (CONFIGURE);
- die "Error: can't create temporary file: \"$configure.XXXXXX\"\n";
- }
-
- while (my $line = <CONFIGURE>) {
- chomp $line;
-
- if ($line =~ /^(\s*AC_INIT\s*\(\s*libreoffice-build\s*,\s*)([\w\.]*)(\s*\)\s*)$/) {
- print ${tmp_fh} "$1$version$3\n";
- } else {
- print ${tmp_fh} "$line\n";
- }
- }
- close (CONFIGURE);
- close (${tmp_fh});
-
- # preserve permissions on target file by applying them to temp file
- my ( $mode, $uid, $gid ) = ( stat($configure) )[ 2, 4, 5 ];
- $mode = $mode & 07777;
-
- chmod $mode, $tmp_filename;
- chown $uid, $gid, $tmp_filename;
-
- rename ($tmp_filename, $configure) ||
- die "Can't rename \"$tmp_filename\" to \"$configure\": $!\n";
-}
-
-# increment the version for a test build:
-# + add 'a' if the version ended with a number
-# + bump the letter otherwise
-sub inc_test_version($)
-{
- my ($version) = @_;
-
- my $lastchar = chop $version;
- my $new_version;
-
- if ($lastchar =~ /\d/) {
- return "$version" . "$lastchar" . "a";
- } elsif ($lastchar =~ /\w/) {
- # select next letter alhabeticaly: a->b, b->c, ...
- $lastchar =~ tr/0a-zA-Z/a-zA-Z0/;
- return "$version" . "$lastchar";
- } else {
- die "Can't generate test version from \"$version$lastchar\n";
- }
-}
-
-sub get_release_version($$$$)
-{
- my ($config_version, $state_config_version, $state_release_version, $inc_version) = @_;
- my $release_version;
-
- if (defined $state_config_version &&
- defined $state_release_version &&
- "$state_config_version" eq "$config_version") {
- $release_version = "$state_release_version";
- } else {
- $release_version = "$config_version";
- }
-
- if ( defined $inc_version ) {
- $release_version = inc_test_version($release_version);
- }
-
- return $release_version;
-}
-
-
-sub generate_lo_build_blacklist($)
-{
- my ($blacklist) = @_;
-
- # FIXME: crazy hacks to copy libreoffice-build without too big and useless subdirectories and to show a progress
- open (BLACKLIST, ">$blacklist") || die "Can't open $blacklist: $!\n";
-
- # IMPORTANT: Do not remove .git directories because "git log" is called during "make dist"
- print BLACKLIST "*/.svn\n";
- print BLACKLIST "rawbuild/*\n";
- print BLACKLIST "build/*\n";
- print BLACKLIST "clone/*\n";
- print BLACKLIST "src/libreoffice-*.tar.bz2\n";
- print BLACKLIST "src/????????????????????????????????-*\n";
-
- close BLACKLIST;
-}
-
-sub generate_lo_piece_blacklist($)
-{
- my ($blacklist) = @_;
-
- # FIXME: crazy hacks to copy libreoffice-build without too big and useless subdirectories and to show a progress
- open (BLACKLIST, ">$blacklist") || die "Can't open $blacklist: $!\n";
-
- # IMPORTANT: Do not remove .git directories because "git log" is called during "make dist"
- print BLACKLIST ".git\n";
- print BLACKLIST ".gitignore\n";
-
- close BLACKLIST;
-}
-
-# copy files to temp dir; showing a progress; using a black list
-sub copy_dir_filter_and_show_progress($$$)
-{
- my ($source_dir, $target_dir, $blacklist) = @_;
-
- print "Copying \"$source_dir\" -> \"$target_dir\"...";
- # FIXME: crazy hacks to copy dir with a blacklist and showing a progress
- system ("tar -cf - -C $source_dir -X $blacklist \.\/ | " .
- "tar -xf - -C $target_dir --checkpoint 2>&1 | " .
- "awk '{ ORS=\"\" ; if (++nlines\%50 == 0) printf \".\"; fflush() }'") &&
- die "Error: copying failed: $!\n";
- print "\n";
-}
-
-# copy the local version of libreoffice-build into a tmp directory
-# omit the .svn subdirectories
-sub copy_lo_build_to_tempdir($)
-{
- my ($lo_build_dir) = @_;
-
- my $tempdir = tempdir( 'libreoffice-XXXXXX', DIR => File::Spec->tmpdir );
- my $blacklist = "$tempdir/libreoffice-build.copy.blacklist";
-
- generate_lo_build_blacklist($blacklist);
- copy_dir_filter_and_show_progress($lo_build_dir, $tempdir, $blacklist);
-
- unlink $blacklist;
-
- return $tempdir;
-}
-
-# copy the piece lo source directory into a tmp directory
-# omit the .git subdirectories
-sub copy_lo_piece_to_tempdir($$$)
-{
- my ($piece_dir, $piece, $piece_tarball_name) = @_;
-
-
- my $tempdir = tempdir( 'libreoffice-XXXXXX', DIR => File::Spec->tmpdir );
- my $blacklist = "$tempdir/libreoffice-$piece.copy.blacklist";
-
- mkdir "$tempdir/$piece_tarball_name" || die "Can't create directory \"$tempdir/$piece_tarball_name\": $!\n";
-
- generate_lo_piece_blacklist($blacklist);
- copy_dir_filter_and_show_progress("$piece_dir", "$tempdir/$piece_tarball_name", $blacklist);
-
- unlink $blacklist;
-
- return $tempdir;
-}
-
-sub generate_lo_piece_changelog($$$)
-{
- my ($lo_piece_clone, $lo_piece_release_dir, $piece) = @_;
- print "Generating changelog for $piece...\n";
- print "1:$lo_piece_clone, 2:$lo_piece_release_dir, 3:$piece\n";
- # FIXME: crazy hacks to copy dir with a blacklist and showing a progress
- system ("cd $lo_piece_clone && " .
- "git log --date=short --pretty='format:@%cd %an <%ae> [%H]%n%n%s%n%n%e%b' | " .
- " sed -e 's|^\([^@]\)|\t\1|' -e 's|^@||' >$lo_piece_release_dir/ChangeLog" ) &&
- die "Error: generating failed: $!\n";
-}
-
-sub run_autoreconf($$)
-{
- my ($dir, $piece) = @_;
-
- print "Running autoreconf for $piece...\n";
- system ("cd $dir && " .
- "autoreconf -f -i && " .
- "cd - >/dev/null 2>&1") && die "Error: autoreconf failed: $!\n";
-}
-
-sub release_lo_build($)
-{
- my ($lo_build_dir) = @_;
-
- print "Creating libreoffice-build tarball...\n";
- system ("cd $lo_build_dir && " .
- "./autogen.sh --with-distro=GoOoLinux && " .
- "make dist && " .
- "cd - >/dev/null 2>&1") && die "Error: releasing failed: $!\n";
-}
-
-sub release_lo_piece($$)
-{
- my ($lo_piece_dir, $piece_tarball_name) = @_;
-
- print "Creating $piece_tarball_name.tar.bz2...";
- system ("cd $lo_piece_dir && " .
- "tar -cjf $piece_tarball_name.tar.bz2 --checkpoint * 2>&1 | awk '{ ORS=\"\" ; if (++nlines\%50 == 0) printf \".\"; fflush() }' && " .
- "cd - >/dev/null 2>&1") && die "Error: releasing failed: $!\n";
- print "\n";
-}
-
-sub generate_md5($$$)
-{
- my ($dir, $tarball_name, $tarball_suffix) = @_;
-
- print "Generating MD5...\n";
- system ("cd $dir && " .
- "md5sum $tarball_name$tarball_suffix >$tarball_name$tarball_suffix.md5 && " .
- "cd - >/dev/null 2>&1") && die "Error: releasing failed: $!\n";
-}
-
-sub default_releases_state_file($)
-{
- my ($lo_build_dir) = @_;
-
- my $rootdir = $lo_build_dir;
- $rootdir =~ s/^(.*?)\/?[^\/]+\/?$/$1/;
-
- my $releases_state_file;
- if ($rootdir) {
- $releases_state_file = "$rootdir/.releases";
- } else {
- $releases_state_file = ".releases";
- }
-
- return "$releases_state_file";
-}
-
-sub default_releases_archive($)
-{
- my ($lo_build_dir) = @_;
-
- my $rootdir = $lo_build_dir;
- $rootdir =~ s/^(.*?)\/?[^\/]+\/?$/$1/;
-
- my $releases_archive_dir;
- if ($rootdir) {
- $releases_archive_dir = "$rootdir/archive";
- } else {
- $releases_archive_dir = "archive";
- }
-
- return "$releases_archive_dir";
-}
-
-sub load_releases_state($)
-{
- my ($releases_state_file) = @_;
-
- my $state_config_version;
- my $state_release_version;
-
- if (open (STATE, "$releases_state_file")) {
-
- while (my $line = <STATE>) {
- chomp $line;
-
- if ($line =~ /^\s*configure_version\s*=\s*(.*)$/) {
- $state_config_version = "$1";
- } elsif ($line =~ /^\s*released_version\s*=\s*(.*)$/) {
- $state_release_version = "$1";
- }
- }
- close (STATE);
- }
-
- return $state_config_version, $state_release_version;
-}
-
-sub save_releases_state($$$)
-{
- my ($releases_state_file, $config_version, $release_version) = @_;
-
- open (STATE, '>', "$releases_state_file") ||
- die "Can't open \"$releases_state_file\" for writing: $!\n";
-
- print STATE "configure_version = $config_version\n";
- print STATE "released_version = $release_version\n";
-
- close (STATE);
-}
-
-sub remove_tempdir($)
-{
- my ($tempdir) = @_;
-
-# print "Cleaning $tempdir...\n";
- system ("rm -rf $tempdir") && die "Error: rm failed: $!\n";
-}
-
-sub save_file($$$)
-{
- my ($source_dir, $target_dir, $file) = @_;
-
- unless ( -d "$target_dir" ) {
- mkdir ("$target_dir") ||
- die "Can't create directory \"$target_dir\": $!\n";
- }
-
- if ( -f "$target_dir/$file" ) {
- print "Warning: $target_dir/$file already exists and will be replaced\n";
- unlink ("$target_dir/$file");
- }
-
- print "Copying into archive: $target_dir/$file ...\n";
- copy ("$source_dir/$file", "$target_dir/$file") ||
- die "Error: Can't copy $source_dir/$file to $target_dir/$file: $!\n";
-}
-
-sub check_if_file_exists($$)
-{
- my ($file, $force) = @_;
-
- if (-e $file) {
- if (defined $force) {
- print "Warning: $file already exists and will be replaced!\n";
- } else {
- die "Error: $file alrady exists.\n".
- " Use --force if you want to replace it.\n";
- }
- }
-}
-
-sub check_if_tarball_already_released($$$)
-{
- my ($tarball, $releases_archive_dir, $force) = @_;
-
- check_if_file_exists($tarball, $force);
- check_if_file_exists("$releases_archive_dir/$tarball", $force) if (defined $releases_archive_dir);
-}
-
-sub check_if_already_released($$$$$$)
-{
- my ($lo_build_tarball_name, $p_piece_tarball_name, $releases_archive_dir, $force, $pack_lo_build, $pack_lo_pieces) = @_;
-
- check_if_tarball_already_released("$lo_build_tarball_name.tar.gz", $releases_archive_dir, $force) if ($pack_lo_build);
-
- if ($pack_lo_pieces) {
- foreach my $tarball_name ( values %{$p_piece_tarball_name} ) {
- check_if_tarball_already_released("$tarball_name.tar.bz2", $releases_archive_dir, $force);
- }
- }
-}
-
-sub prepare_lo_build_tarball($$$$)
-{
- my ($lo_build_dir, $release_version, $md5, $lo_build_tarball_name) = @_;
-
- my $temp_dir = copy_lo_build_to_tempdir("$lo_build_dir");
- set_config_version($temp_dir, $release_version);
- release_lo_build($temp_dir);
- generate_md5($temp_dir, $lo_build_tarball_name, ".tar.gz") if (defined $md5);
-
- return $temp_dir;
-}
-
-sub prepare_lo_piece_tarball($$$$$)
-{
- my ($piece_dir, $release_version, $md5, $piece, $piece_tarball_name) = @_;
-
- my $temp_dir = copy_lo_piece_to_tempdir($piece_dir, $piece, $piece_tarball_name);
- generate_lo_piece_changelog($piece_dir, "$temp_dir/$piece_tarball_name", $piece);
- run_autoreconf("$temp_dir/$piece_tarball_name", $piece) if ($piece eq 'bootstrap');
- release_lo_piece($temp_dir, $piece_tarball_name);
- generate_md5($temp_dir, $piece_tarball_name, ".tar.bz2") if (defined $md5);
-
- return $temp_dir;
-}
-
-sub move_tarball_to_final_location($$$$)
-{
- my ($temp_dir, $releases_archive_dir, $md5, $tarball) = @_;
-
- save_file($temp_dir, ".", "$tarball");
- save_file($temp_dir, ".", "$tarball.md5") if (defined $md5);
- if ( defined $releases_archive_dir ) {
- save_file($temp_dir, $releases_archive_dir, "$tarball");
- save_file($temp_dir, $releases_archive_dir, "$tarball.md5") if (defined $md5);
- }
-
- remove_tempdir($temp_dir);
-}
-
-
-sub generate_tarballs($$$$$$$$$)
-{
- my ($source_dir, $releases_archive_dir, $release_version, $md5, $lo_build_tarball_name, $p_piece_tarball_name, $pack_lo_build, $pack_lo_pieces, $is_lo_build_dir) = @_;
-
- if ($pack_lo_build) {
- my $temp_dir=prepare_lo_build_tarball($source_dir, $release_version, $md5, $lo_build_tarball_name);
- move_tarball_to_final_location($temp_dir, $releases_archive_dir, $md5, "$lo_build_tarball_name.tar.gz");
- }
-
- if ($pack_lo_pieces) {
- my $piece_dir = $source_dir;
- foreach my $piece ( keys %{$p_piece_tarball_name} ) {
- print "\n--- Generating $piece ---\n";
- $piece_dir = "$source_dir/clone/$piece" if ($is_lo_build_dir);
- my $temp_dir=prepare_lo_piece_tarball($piece_dir, $release_version, $md5, $piece, $p_piece_tarball_name->{$piece});
- move_tarball_to_final_location($temp_dir, $releases_archive_dir, $md5, "$p_piece_tarball_name->{$piece}.tar.bz2");
- }
- }
-
-}
-
-
-sub usage()
-{
- print "This tool helps to pack the libreoffice-build and piece sources\n\n" .
-
- "Usage:\n".
- "\tlo-pack-sources [--help] [--force] [--version]\n" .
- "\t [--set-version=<ver>] [--inc-version] [--md5]\n" .
- "\t [--no-lo-build] [--no-lo-pieces] [--piece=<piece>]\n" .
- "\t [dir]\n\n" .
-
- "Options:\n\n" .
- "\t--help: print this help\n" .
- "\t--force: replace an already existing release of the same version\n" .
- "\t--version: just print version of the released package but do not\n" .
- "\t\trelease it; the version is affected by the other options, e.g.\n" .
- "\t\t--inc-version\n" .
- "\t--set-version: force another version\n" .
- "\t--inc-version: increment the latest version; there is a difference\n" .
- "\t\tbetween test release (default) and final (not yet supported)\n" .
- "\t--md5: generate md5 sum for the final tarball\n" .
- "\t--no-lo-build: do not pack the libreoffice-build tarball\n" .
- "\t--no-lo-pieces: do not pack the libreoffice-build piece sources\n" .
- "\t--piece=<piece>: pack just a single piece, .e.g. \"writer\",\n" .
- "\tdir: path of the source directory, either libreoffice-build or piece\n";
-}
-
-
-my $ptf;
-my $md5;
-my $inc_version;
-my $config_version;
-my $set_version;
-my $get_config_version;
-my $release_version;
-my $pack_lo_build=1;
-my $pack_lo_pieces=1;
-my $source_dir;
-my $releases_archive_dir;
-my $releases_state_file;
-my $state_config_version;
-my $state_release_version;
-my $lo_build_tarball_name;
-my $lo_build_tempdir;
-my $force;
-my $verbose=1;
-my $is_lo_build_dir=0;
-my @pieces=("artwork", "base", "bootstrap", "calc", "components",
- "extensions", "extras", "filters", "help", "impress",
- "libs-core", "libs-extern", "libs-extern-sys", "libs-gui",
- "l10n", "postprocess", "sdk", "testing", "ure", "writer");
-my %piece_tarball_name;
-
-###################
-# Arguments parsing
-###################
-
-for my $arg (@ARGV) {
- if ($arg eq '--help' || $arg eq '-h') {
- usage;
- exit 0;
- } elsif ($arg eq '--force') {
- $force=1;
- } elsif ($arg eq '--md5') {
- $md5=1;
- } elsif ($arg eq '--version') {
- $get_config_version=1;
- $verbose = undef;
- } elsif ($arg eq '--inc-version') {
- $inc_version=1
- } elsif ($arg =~ m/--set-version=(.*)/) {
- $set_version="$1";
- } elsif ($arg eq '--no-lo-build') {
- $pack_lo_build=0;
- } elsif ($arg eq '--no-lo-pieces') {
- $pack_lo_pieces=0;
- } elsif ($arg =~ m/--piece=(.*)/) {
- # process just one piece and do not pack libreoffice-build
- @pieces=();
- push @pieces, "$1";
- $pack_lo_build=0;
- } elsif ($arg =~ /^-/ ) {
- die "Error: unknown option: $arg\n";
- } else {
- if (! defined $source_dir) {
- $source_dir = $arg;
- } else {
- die "Error: Too many arguments $arg\n";
- }
- }
-}
-
-###################
-# Initial checks
-###################
-
-unless ( defined $source_dir ) {
- die "Error: undefined source directory, try --help\n";
-}
-
-unless ( -d "$source_dir" ) {
- die "Error: is not a directory: $source_dir\n";
-}
-
-# check if it is a valid libreoffice-build directory
-$is_lo_build_dir=1 if (-f "$source_dir/autogen.sh" && -f "$source_dir/bin/build-ooo" && -f "$source_dir/bin/piece/build-generic");
-
-# all tarballs are generated from the libreoffice-build directory
-if (@pieces > 1 && $is_lo_build_dir == 0 ) {
- die "Error: \"$source_dir\" is not a valid libreoffice-build directory\n";
-}
-
-# just a single piece tarball can be generated from piece directory; version must be explicitely set in this case
-if (@pieces == 1 && $is_lo_build_dir == 0 && ! defined $set_version ) {
- die "Error: version must be set using the --set-version=<version> option\n" unless (defined $set_version);
-}
-
-if (defined $set_version && defined $inc_version) {
- die "Error: --set-version and --inc-version options can't be used together\n";
-}
-
-
-###################
-# Main logic
-###################
-
-
-print "Source: $source_dir\n" if ($verbose);
-
-if ($is_lo_build_dir) {
- # detect some paths
- $releases_state_file = default_releases_state_file($source_dir) unless (defined $releases_state_file);
- $releases_archive_dir = default_releases_archive($source_dir) unless (defined $releases_archive_dir);
-
- # detect versions
- $config_version = get_config_version($source_dir);
- ($state_config_version, $state_release_version) = load_releases_state($releases_state_file);
- if (defined $set_version) {
- $release_version = "$set_version";
- } else {
- $release_version = get_release_version($config_version, $state_config_version, $state_release_version, $inc_version);
- }
-} else {
- # must be single piece release with predefined version
- $release_version = "$set_version";
-}
-
-# define tarball names
-$lo_build_tarball_name = "libreoffice-build-$release_version";
-foreach my $piece (@pieces) {
- $piece_tarball_name{$piece} = "libreoffice-$piece-$release_version";
-}
-
-print "Default version : $config_version\n" if ($verbose && defined $config_version);
-print "Last used version : $state_release_version\n" if ($verbose && defined $state_release_version);
-print "New version : $release_version\n" if ($verbose);
-
-# do the real job
-if ( defined $get_config_version ) {
- print "$release_version\n";
-} else {
- check_if_already_released($lo_build_tarball_name, \%piece_tarball_name, $releases_archive_dir, $force, $pack_lo_build, $pack_lo_pieces);
-
- # give a chance to stop the process
- print ("\nWaiting 3 seconds...\n");
- sleep 3;
-
- generate_tarballs($source_dir, $releases_archive_dir, $release_version, $md5, $lo_build_tarball_name, \%piece_tarball_name, $pack_lo_build, $pack_lo_pieces, $is_lo_build_dir);
-
- if ( defined $releases_state_file ) {
- save_releases_state($releases_state_file, $config_version, $release_version);
- }
-}
diff --git a/bin/lo-set-version b/bin/lo-set-version
deleted file mode 100755
index 3cb711615..000000000
--- a/bin/lo-set-version
+++ /dev/null
@@ -1,224 +0,0 @@
-#!/usr/bin/perl
- eval 'exec /usr/bin/perl -S $0 ${1+"$@"}'
- if $running_under_some_shell;
-#!/usr/bin/perl
-
-use strict;
-use File::Copy;
-use File::Temp qw/ tempfile tempdir /;
-
-sub find_item($$)
-{
- my ($file, $item) = @_;
-
- open (FILE, "$file") ||
- die "can't open \"$file\" for reading: $!\n";
-
-# print "Looking for: $item\n";
- my $found_value;
- while ((my $line = <FILE>) && (!defined $found_value)) {
- chomp $line;
-
-# print "$line\n";
- if ($line =~ /^(.*$item[\s\:=])\s*([^\s\)]*)/) {
-# print " found: $2\n";
- $found_value="$2";
- }
- }
- close (FILE);
-
- return $found_value;
-}
-
-sub replace_items($$)
-{
- my ($file, $p_items) = @_;
-
- open (FILE, "$file") ||
- die "can't open \"$file\" for reading: $!\n";
-
- my ( $tmp_fh, $tmp_filename ) = tempfile( "$file.XXXXXX" );
- if ( !defined $tmp_fh ) {
- close (FILE);
- die "Error: can't create temporary file: \"$file.XXXXXX\"\n";
- }
-
- while (my $line = <FILE>) {
- chomp $line;
-
- foreach my $item (keys %{$p_items})
- {
- if ($line =~ /^(.*$item[\s\:=]\s*)[^\s\)]*(.*)$/) {
- $line = $1 . %{$p_items}->{$item} . $2;
- }
- }
-
- print ${tmp_fh} "$line\n";
- }
- close (FILE);
- close (${tmp_fh});
-
- # preserve permissions on target file by applying them to temp file
- my ( $mode, $uid, $gid ) = ( stat($file) )[ 2, 4, 5 ];
- $mode = $mode & 07777;
-
- chmod $mode, $tmp_filename;
- chown $uid, $gid, $tmp_filename;
-
- rename ($tmp_filename, $file) ||
- die "Can't rename \"$tmp_filename\" to \"$file\": $!\n";
-}
-
-sub inc_ver($)
-{
- my ($ver) = @_;
- my $last_ver;
-
- if ($ver =~ /.*(\d+)$/) {
- $last_ver = $1;
- } else {
- die "Error: do not know how to increment: $ver\n";
- }
-
- my $new_last_ver = $last_ver + 1;
- $ver =~ s/$last_ver$/$new_last_ver/;
-
- return $ver;
-}
-
-sub get_versions($$)
-{
- my ($p_config_files, $p_versions) = @_;
-
- %{$p_versions}->{'minor.mk'}{'BUILD'} = find_item(%{$p_config_files}->{'minor.mk'}, "BUILD");
- %{$p_versions}->{'openoffice.lst'}{'OOOPACKAGEVERSION'} = find_item (%{$p_config_files}->{'openoffice.lst'}, "OOOPACKAGEVERSION");
- %{$p_versions}->{'openoffice.lst'}{'SHORT_PRODUCTEXTENSION'} = find_item (%{$p_config_files}->{'openoffice.lst'}, "SHORT_PRODUCTEXTENSION");
-}
-
-sub set_versions($$)
-{
- my ($p_config_files, $p_versions) = @_;
-
- foreach my $config (keys %{$p_config_files}) {
- replace_items(%{$p_config_files}->{$config}, \%{%{$p_versions}->{$config}});
- }
-}
-
-sub show_versions($)
-{
- my ($p_versions) = @_;
-
- print "Product version: " . %{$p_versions}->{'openoffice.lst'}{'OOOPACKAGEVERSION'} . %{$p_versions}->{'openoffice.lst'}{'SHORT_PRODUCTEXTENSION'} . "\n";
- print "Windows build Version: " . %{$p_versions}->{'minor.mk'}{'BUILD'} . "\n";
-}
-
-sub usage()
-{
- print "This tool helps to modify LO versions in the git sources\n\n" .
-
- "Usage:\n".
- "\tlo-pack-sources [--help] [--show] [--inc] [--force-build=<ver>]\n" .
- "\t [--force-prod-micro=<ver>] build_dir\n" .
-
- "Options:\n\n" .
- "\t--help: print this help\n" .
- "\t--show: show current version\n" .
- "\t--inc: increment current version (win build version and product\n" .
- "\t micro version)\n" .
- "\t--force-build: force windows build version, e.g. 4567\n" .
- "\t--force-prod-micro: force product micro version, .e.g. beta1\n" .
- "\tbuild_dir: path to the clone of libreoffice/build; it expects\n" .
- "\t that other pieces are cloned in the clone subdirectory\n";
-}
-
-my $show;
-my $inc;
-my $build_dir;
-my $build_ver_force;
-my $prod_micro_ver_force;
-my %versions;
-my %config_files;
-
-###################
-# Arguments parsing
-###################
-
-for my $arg (@ARGV) {
- if ($arg eq '--help' || $arg eq '-h') {
- usage;
- exit 0;
- } elsif ($arg eq '--show') {
- $show=1;
- } elsif ($arg eq '--inc') {
- $inc=1;
- } elsif ($arg =~ '--force-build=(.*)') {
- $build_ver_force=$1;
- } elsif ($arg =~ '--force-prod-micro=(.*)') {
- $prod_micro_ver_force=$1;
- } elsif ($arg =~ /^-/ ) {
- die "Error: unknown option: $arg\n";
- } else {
- if (! defined $build_dir) {
- $build_dir = $arg;
- } else {
- die "Error: Too many arguments $arg\n";
- }
- }
-}
-
-###################
-# Initial checks
-###################
-
-unless ( defined $build_dir ) {
- die "Error: undefined directory with build repo, try --help\n";
-}
-
-unless ( -d "$build_dir" ) {
- die "Error: not a directory: $build_dir\n";
-}
-
-my $bootstrap_dir = "$build_dir/clone/bootstrap";
-unless ( -d "$bootstrap_dir" ) {
- die "Error: not a directory: $bootstrap_dir\n";
-}
-
-# current versions
-
-$config_files{'minor.mk'} = "$bootstrap_dir/solenv/inc/minor.mk";
-$config_files{'openoffice.lst'} = "$bootstrap_dir/instsetoo_native/util/openoffice.lst";
-
-get_versions(\%config_files, \%versions);
-
-print "Current values:\n";
-print "---------------\n";
-show_versions(\%versions);
-
-if (defined $show) {
- exit 0;
-}
-
-print "\n";
-unless (defined $inc || defined $build_ver_force || defined $prod_micro_ver_force) {
- print "No change requested\n";
- exit 0;
-}
-
-# new versions
-if ($inc) {
- $versions{'minor.mk'}{'BUILD'} = inc_ver($versions{'minor.mk'}{'BUILD'});
- $versions{'openoffice.lst'}{'SHORT_PRODUCTEXTENSION'} = inc_ver($versions{'openoffice.lst'}{'SHORT_PRODUCTEXTENSION'});
-}
-$versions{'minor.mk'}{'BUILD'} = $build_ver_force if (defined $build_ver_force);
-$versions{'openoffice.lst'}{'SHORT_PRODUCTEXTENSION'} = $prod_micro_ver_force if (defined $prod_micro_ver_force);
-
-# update also (Build:XXX) in the RSCREVISION variable
-$versions{'minor.mk'}{'Build'} = $versions{'minor.mk'}{'BUILD'};
-
-print "New values:\n";
-print "---------------\n";
-show_versions(\%versions);
-
-set_versions(\%config_files, \%versions);
-
-print "\nUpdate succeeded\n";
diff --git a/bin/lo-unify-sdf b/bin/lo-unify-sdf
deleted file mode 100755
index a4bc5af2a..000000000
--- a/bin/lo-unify-sdf
+++ /dev/null
@@ -1,45 +0,0 @@
-#!/bin/sh
-
-usage()
-{
- echo "This script unifies the sdf files to be commited into l10n git repo"
- echo
- echo "Usage: ${0##*/} in [out]"
- echo
- echo "Options:"
- echo
- echo " in input file"
- echo " out output file"
-}
-
-in="$1"
-out="$2"
-tempdir=
-
-if test -z "$in" ; then
- echo "Error: Please, define input file"
- exit 1;
-fi
-
-if test ! -f "$in" ; then
- echo "Error: input file does not exist: $in"
- exit 1;
-fi
-
-if test -z "$out" ; then
- tempdir=`mktemp -D -t lo-unify-sdf-XXXXXX`
- out="$tempdir/out.sdf"
-fi
-
-echo "Cleaning $in..."
-grep -v "^[^ ]* [^ ]* [^ ]* [^ ]* [^ ]* [^ ]* [^ ]* [^ ]* [^ ]* en-US" "$in" | \
-LANG=C sort >$out
-
-dos2unix $out
-
-# no input file define, so overwrite the original file
-if test -n "$temp_out" ; then
- chmod --reference="$in" "$out"
- mv "$out" "$in"
- rm -rf "$tempdir"
-fi
diff --git a/bin/localize-ooo b/bin/localize-ooo
deleted file mode 100755
index 59ec08134..000000000
--- a/bin/localize-ooo
+++ /dev/null
@@ -1,197 +0,0 @@
-#!/usr/bin/env bash
-
-#
-# See setup for user tweakables.
-#
-if test -n "$OO_TOOLSDIR" ; then
- # split build
- . $OO_TOOLSDIR/piece/sys-setup
- . $OO_TOOLSDIR/setup
- OOBUILDDIR=`pwd`
- SRCDIR_PIECE=$OOBUILDDIR/ooo-build/sdf
- TOOLSDIR=$OO_TOOLSDIR/..
- OOO_LANGS_LIST=`echo $OO_LANGS | sed -e "s|ALL|$ALL_LANGS|g"`
- split_build=yes
-else
- . ./setup
- . $OOBUILDDIR/*.[sS]et.sh
- . ./setup
- SRCDIR_PIECE=
- split_build=no
-fi
-
-export LC_ALL='C';
-
-# do nothing for en-US only build
-test "$OOO_LANGS_LIST" = "en-US" && exit 0;
-
-merge_localization()
-{
- sdf_file="$1"
- langcode="$2"
-
- # we do not want en-US strings
- sdf_file_filtered=`mktemp localize-ooo-sdf-filtered-XXXXXX`
- awk -F "\t" '{ if ($10 != "en-US") print }' "$sdf_file" >"$sdf_file_filtered"
-
- # merge the localization
- if test `wc -l "$sdf_file_filtered" | awk '{print $1}'` -eq 0 ; then
- echo "Warning: Ignoring empty extra localization $(basename $sdf_file)..."
- else
- echo "Merging extra localization $(basename $sdf_file)..."
- localize -m -x -o -l "$langcode" -f "$sdf_file_filtered" || exit 1;
- echo ""
- fi
-
- rm "$sdf_file_filtered"
-}
-
-# fix broken image in help, i#99165
-for lang in $OOO_LANGS_LIST; do
- if [ -d $OOBUILDDIR/default_images/res/commandimagelist/$lang/ ]
- then
- if [ ! -f $OOBUILDDIR/default_images/res/commandimagelist/$lang/sc_label.png ]
- then
- cp $OOBUILDDIR/default_images/res/commandimagelist/sc_label.png $OOBUILDDIR/default_images/res/commandimagelist/$lang/
- fi
- fi
-done
-
-# build the localize tool if needed (only in the non-split build)
-if test "$split_build" != "yes" && ! localize -h >/dev/null 2>&1 ; then
- echo 'Building localize stuff at first...'
- cd $OOBUILDDIR/l10ntools
- perl $SOLARENV/bin/build.pl --all $EXTRA_BUILD_FLAGS $EXTRA_DMAKE_FLAGS || exit 1;
- perl $SOLARENV/bin/deliver.pl || exit 1;
- cd -
-
- echo "Fixing localize.sdf permissions..."
- if [ -d $OOBUILDDIR/l10n ]; then
- find $OOBUILDDIR/l10n -type f -name "localize.sdf" -exec chmod 644 {} \;
- fi
-fi
-
-# stamp files
-if test "$split_build" = "yes" ; then
- stamp_dir=$OOBUILDDIR/ooo-build/stamp
- mkdir -p $stamp_dir
-else
- stamp_dir=$TOOLSDIR/stamp
-fi
-# already applied sdf files
-extra_trans_stamp=$stamp_dir/build.extra.translations
-# lang-specific sdf files
-extra_trans_lang_list=$stamp_dir/build.extra.translations.lang.list
-
-test "$VENDORNAME" = "OxygenOffice" && rm -rf "$extra_trans_stamp" "$extra_trans_lang_list" && echo "$VENDORNAME distro rebuilds translations every time."
-
-# call grep only once to speed up the search from 22s to 2s
-all_langs_pattern=`mktemp /tmp/localize-ooo-XXXXXX`
-for lang in $ALL_LANGS ; do
- echo "$lang.sdf\$" >>$all_langs_pattern
-done
-
-# lang-specific sdf files
-# it looks for files "any-name-<lang>.sdf", e.g. "gallery-hu.sdf"
-# it ignores files below $ (upstream sources available when --with-git is used)
-hungarian_updated=
-for sdf_file in `find $TOOLSDIR/src $DEB_GSIDIR $TOOLSDIR/po $SRCDIR_PIECE -path $TOOLSDIR/src/clone -prune -o -name "*.sdf"` ; do
-
- # check if it is lang-specific file
- echo "$sdf_file" | grep -q -f "$all_langs_pattern" || continue;
-
- # mention it in specific sdf file list
- grep -q "^$sdf_file$" $extra_trans_lang_list 2>/dev/null || echo $sdf_file >>$extra_trans_lang_list
-
- # skip this localization if it is already merged
- grep -q "^$sdf_file$" $extra_trans_stamp 2>/dev/null && continue;
-
- # find if this localization is required
- langcode=
- for lang in $OOO_LANGS_LIST ; do
- echo "$sdf_file" | grep -q "$lang.sdf$" && langcode=$lang
- done
- test -z "$langcode" && continue;
-
- merge_localization "$sdf_file" "$langcode"
-
- # workaround for i#56622, n#210797
- test "$langcode" = "hu" && hungarian_updated=yes
-
- # copy help auxiliary files if they are missing but the localized strings are available
- if test -d "$OOBUILDDIR/helpcontent2" ; then
- if grep -q "^helpcontent2" $sdf_file &&
- test ! -d $OOBUILDDIR/helpcontent2/source/auxiliary/$langcode ; then
- echo "Copying English help auxiliary files for \"$langcode\"..."
- cd $OOBUILDDIR/helpcontent2/source/auxiliary
- cp -r en-US $langcode
- perl -pi -e "s|Language=en-US|Language=$langcode|" $langcode/*.cfg
- fi
- fi
-
- # make stamp for this localization
- echo "$sdf_file" >>$extra_trans_stamp
-done
-
-sdf_langpack=`mktemp /tmp/langpack-ooo-sdf-XXXXXX`
-echo -n "Generating langpack description from translation: "
-for sdf_file in `find $OOBUILDDIR/l10n/source -name "*.sdf" | sort` ; do
- echo -n "."
- grep -E "STR_...._MODULE_LANGPACK_" $sdf_file >> $sdf_langpack
-done
-sed -e 's/source\\ooo\\module_langpack\.ulf/source\\accessories\\module_samples_accessories\.ulf/g' $sdf_langpack > $TOOLSDIR/src/accessories-samples-langpack.sdf
-sed -e 's/source\\ooo\\module_langpack\.ulf/source\\accessories\\module_templates_accessories\.ulf/g' $sdf_langpack > $TOOLSDIR/src/accessories-templates-langpack.sdf
-sed -e 's/source\\ooo\\module_langpack\.ulf/source\\extensions\\module_extensions_sun_templates\.ulf/g' $sdf_langpack > $TOOLSDIR/src/extensions-sun-templates-langpack.sdf
-sed -e 's/source\\ooo\\module_langpack\.ulf/source\\extensions\\module_extensions_lightproof\.ulf/g' $sdf_langpack > $TOOLSDIR/src/extensions-lightproof-langpack.sdf
-rm $sdf_langpack
-echo "done!"
-
-echo "Processing non language specific files:"
-# apply lang-non-specific sdf files
-# the files "any-name-<lang>.sdf" are ignored because they are already processed above
-# it ignores files below $TOOLSDIR/src/clone (upstream sources available when --with-git is used)
-for fn in `find $TOOLSDIR/src $DEB_GSIDIR $TOOLSDIR/po $SRCDIR_PIECE -path $TOOLSDIR/src/clone -prune -o -name "*.sdf"` ; do
- [ -f "$fn" ] || continue
- # skip if already processed as lang-specific files
- grep -q "^$fn$" $extra_trans_lang_list 2>/dev/null && continue;
- # skip if already merged
- grep -q "^$fn$" $extra_trans_stamp 2>/dev/null && continue;
- # skip sdf-templates
- echo "$fn" | grep -q "^$TOOLSDIR/po/sdf-templates" && continue;
- echo "$fn" | grep -q "^$TOOLSDIR/po/lo-build.sdf" && continue;
- # skip exotic localizations that are not in $ALL_LANGS and thus not detected
- echo "$fn" | grep -q "$TOOLSDIR/src/GSI" && ! echo $OOO_LANGS | grep -q "$(echo $(basename $fn) | cut -d'_' -f2)" && continue;
- echo "$fn" | grep -q "$TOOLSDIR/po/lo-build" && ! echo $OOO_LANGS | grep -q "$(echo $(basename $fn) | cut -d'-' -f3)" && continue;
- merge_localization "$fn" "all" || exit 1;
- echo "$fn" >>$extra_trans_stamp
-done
-
-if test "z$DEB_GSIDIR" != "z"; then
- for fn in $DEB_GSIDIR/*all*.sdf; do
- [ -f "$fn" ] || continue
- echo "Merging additional localization $(basename $fn) ..."
- merge_localization "$fn" "all" || exit 1;
- done
-fi
-
-# FIXME: add all missing en-GB strings; it is a workaround for i#66919, n#231678
-if ! grep -q "^GSI_en-GB-en-US.sdf$" $extra_trans_stamp 2>/dev/null ; then
- for lang in $OOO_LANGS_LIST ; do
- if test "$lang" = "en-GB" ; then
- gsi_temp=`mktemp /tmp/build-ooo.XXXXXX`
- echo "Fixing en-GB localization..."
- localize -e -l en-US,en-GB=en-US -f $gsi_temp
- merge_localization "$gsi_temp" "en-GB"
- echo "GSI_en-GB-en-US.sdf" >>$extra_trans_stamp
- rm -f $gsi_temp
- break;
- fi
- done
-fi
-
-# remove temporary files
-rm $all_langs_pattern
-
-echo "Localizations updated ...!"
-
-exit 0;
diff --git a/bin/lreloc b/bin/lreloc
deleted file mode 100755
index be7c70b6c..000000000
--- a/bin/lreloc
+++ /dev/null
@@ -1,39 +0,0 @@
-#!/usr/bin/env perl
-
-#
-# Use example: cd /opt/OOInstall/program
-# lreloc libsvx680li.so
-
-$opt_plt_too = 0;
-
-sub read_relocs($)
-{
- my $file = shift;
- my $pipe;
- my %symbols;
- my %used;
-
-# print "Read '$file'\n";
-
- open ($pipe, "readelf -r -W $file |") || die "Can't readelf -r $file: $!";
- while (<$pipe>) {
- /'.rel.plt'/ && !$opt_plt_too && last;
- if (! m/(R_\S+)\s+([0-9a-f]+)\s+(.*)\s*/) {
-# print "Bin line '$_'\n";
- next;
- }
- my ($type, $loc, $sym) = ($1, $2, $3);
- $symbols{$sym}++;
- }
- close ($pipe);
-
- return \%symbols;
-}
-
-for $lib (@ARGV) {
- my $symbols = read_relocs ($lib);
- print "File: $lib\n";
- for $a (sort keys %$symbols) {
- print $a . "\t" . $symbols->{$a} . "\n";
- }
-}
diff --git a/bin/make-win32-iso b/bin/make-win32-iso
deleted file mode 100755
index 38f193cc7..000000000
--- a/bin/make-win32-iso
+++ /dev/null
@@ -1,162 +0,0 @@
-#!/bin/sh
-
-# Create two ISOs for Win32:
-# -multilingual installer and additional language pack installers
-# -source code
-
-# Requires mkisofs, from cdrtools. Compiling that from source is a
-# pain, thanks to the author's insistence that his configuration and
-# make system is better than everybody's elses. One cdrtools
-# distribution of binaries for Cygwin known to work is at
-# http://www.sbox.tugraz.at/home/t/tplank/cdrtools-2.01-win32-bin.zip
-# and tucked away for safety also in go-oo.org:~ooo .
-
-# NOTE: very much a work in progress...
-
-set -o errexit
-
-. ./setup
-
-ISOTEMPLATE=$SRCDIR/win32-iso-template-$DISTRO.zip
-COUNTERFILE=win32-iso-counter
-
-MULTILANGS=''
-
-for i in $OOO_LANGS; do
- # We nowadays put *all* languages in a multilingual
- # installer.
- MULTILANGS="$MULTILANGS,$i"
-done
-
-case "$MULTILANGS" in
-*,*,*,*)
- ;;
-*)
- echo "This doesn't seem like a multilingual build. To make ISOs"
- echo "you should have configured a real production build, that is"
- echo "with lots of languages. For the Novell Edition,"
- echo "use --with-distro=NovellWin32ISO"
- exit 1;;
-esac
-
-cd $OOBUILDDIR
-
-. ./winenv.set.sh
-
-COUNTER=0
-[ -f $COUNTERFILE ] && COUNTER=`cat $COUNTERFILE`
-COUNTER=`expr $COUNTER + 1`
-
-cd instsetoo_native
-
-ZIPNAMEPREFIX=OOo-$OOO_PACKAGEVERSION-$COUNTER
-
-if [ -z "$DONTBUILD" ]; then
- # Build the installers.
-
- # Edit the relevant target line in instsetoo_native/util/makefile.mk
- # to build the multilingual installer.
- gawk '/^ALLTAR :/ { n++;
- if (n==3) {
- printf "ALLTAR : openoffice_en-US openoffice_en-US'"$MULTILANGS"'\n";
- next;
- }
- }
- { print; }
- ' <util/makefile.mk >util/makefile.mk.new
-
- if cmp util/makefile.mk util/makefile.mk.new; then
- :
- else
- echo Edited instsetoo_native/util/makefile.mk:
- diff -u0 util/makefile.mk util/makefile.mk.new || true
- mv util/makefile.mk.new util/makefile.mk
- fi
-
- # For some reason one cannot use the "build" alias in this script.
- perl $SOLARENV/bin/build.pl
-fi
-
-OUTDIR=`mktemp -d`
-
-ISOROOT=`mktemp -d`
-SRCISOROOT=$TOOLSDIR/ooo-build-$OOO_BUILDVERSION
-
-# Construct the installer CD contents
-
-# Figuring out the exact name used for the directory is hard, as it involves
-# a md5sum of the language string in the exact order it was specified
-# in the instsetoo_native/util/makefile.mk. So in the DONTBUILD case
-# (i.e. when we have already built the installers, and just run this script
-# afterwards, you have to pass that in as an environment variable
-
-if [ "$DONTBUILD" -a -z "$MULTILANGDIR" ]; then
- echo You must set MULTILANGDIR to the md5sum hex string used for the languages in question
- exit 1
-fi
-
-if [ -z "$MULTILANGDIR" ]; then
- MULTILANGDIR=en-US`echo $MULTILANGS | sed -e 's/,/_/g'`
- if [ `expr length $MULTILANGDIR` -gt 32 ]; then
- MULTILANGDIR=`echo $MULTILANGDIR | md5sum | sed -e "s/ .*//g"`
- fi
-fi
-
-cp -pR wntmsci10.pro/OpenOffice/msi/install/$MULTILANGDIR/. $ISOROOT
-
-cd $ISOROOT
-if [ -f $ISOTEMPLATE ]; then
- unzip $ISOTEMPLATE
-else
- echo "No template with contents for the installer ISO ($ISOTEMPLATE) found."
-fi
-
-cp $OOBUILDDIR/readlicense_oo/html/THIRDPARTYLICENSEREADME.html .
-
-echo "[autorun]" > autorun.inf
-echo "open=setup.exe" >> autorun.inf
-
-# Build the installer ISO
-
-mkisofs -quiet -J -r -V OOO-$OOO_PACKAGEVERSION-$COUNTER -o $OUTDIR/OOo-$OOO_PACKAGEVERSION-$COUNTER.iso .
-
-# Construct the source code CD contents
-
-cd $TOOLSDIR
-make distdir >/dev/null
-if [ -f download.list ]; then
- while read FILENAME; do
- case "$FILENAME" in
- *.exe|*.EXE)
- # Ignore presumably non-redistributable files
- ;;
- *)
- mkdir -p $SRCISOROOT/src
- cp -p src/$FILENAME $SRCISOROOT/src
- ;;
- esac
- done <download.list
-else
- echo "Missing download.list, did you remove it?"
- exit 1
-fi
-
-cd $SRCISOROOT
-
-# Build the source code ISO
-
-mkisofs -quiet -J -r -V OOO-src-$OOO_PACKAGEVERSION-$COUNTER -o $OUTDIR/OOo-$OOO_PACKAGEVERSION-src.iso .
-
-cd $TOOLSDIR
-
-echo Installer ISO in $OUTDIR/OOo-$OOO_PACKAGEVERSION-$COUNTER.iso
-echo Source code ISO in $OUTDIR/OOo-src-$OOO_PACKAGEVERSION-$COUNTER.iso
-
-# All done. Increment counter, remove temporary folders
-
-rm -rf $ISOROOT
-rm -rf $SRCISOROOT
-
-# Store incremented counter
-cd $OOBUILDDIR
-echo $COUNTER >$COUNTERFILE
diff --git a/bin/map-unused.pl b/bin/map-unused.pl
deleted file mode 100755
index 0cee91404..000000000
--- a/bin/map-unused.pl
+++ /dev/null
@@ -1,54 +0,0 @@
-#!/usr/bin/env perl
-
-# The worst offenders in bloated sym tables:
-@map_source = (
- 'svx', 'svt', 'sfx', 'xo',
- 'tk', 'vcl', 'fwk', 'svl',
- 'sb', 'so', 'tl', 'go', 'frm',
- 'basctl', 'dba', 'dbu'
-);
-
-$mangle = 'gcc3';
-
-if (@ARGV < 2) {
- print "map-unused.pl: /path/into/solver /path/to/toolsdir\n";
- exit (1);
-}
-
-my ($install, $toolsdir) = @ARGV;
-
-# poke around
--d $install || die "Can't find $install: $!";
--d $toolsdir || die "Can't find $toolsdir: $!";
--d "$install/lib" || die "Can't find $install/lib: $!";
--d "$toolsdir/map" || die "Can't find $toolsdir/map: $!";
-
-# Create selected map files
-for $a (@map_source) {
- my $src = $install ."/lib/lib".$a."641li.so.unused";
-
- -f $src || die "Can't find $src: $!";
-
- my $dest = "$toolsdir/map/$a-$mangle.map";
-
- my $srcfile;
- my $destfile;
-
- print "Build map $dest\n";
- open ($srcfile, $src) || die "Couldn't open $src: $!";
- open ($destfile, ">$dest") || die "Couldn't open $dest: $!";
-
- print $destfile "{\n";
-# print $destfile "\tglobal: *;\n";
- print $destfile "\tlocal:\n";
- while (<$srcfile>) {
- chomp;
- /^component_/ && next;
- print $destfile "\t$_;\n";
- }
- print $destfile "};\n";
-
- close ($srcfile);
- close ($destfile);
-}
-
diff --git a/bin/migrate-rawbuild-to-bootstrap b/bin/migrate-rawbuild-to-bootstrap
deleted file mode 100755
index ba8fd641a..000000000
--- a/bin/migrate-rawbuild-to-bootstrap
+++ /dev/null
@@ -1,16 +0,0 @@
-#!/usr/bin/env bash
-
-if [ ! -d bootstrap ] ; then
- if [ -d clone ] ; then
- if [ ! -d clone/bootstrap ] ; then
- die "clone exist but clone/bootstrap does not. you have a broken setup that need manual fixing"
- else
- mv clone/bootstrap bootstrap
- mv clone bootstrap/.
- rm -fr rawbuild
- ./bootstrap/bin/create_bootstrap_links
- fi
- fi
-fi
-
-
diff --git a/bin/mkcppcheck.sh b/bin/mkcppcheck.sh
deleted file mode 100755
index 9102933b1..000000000
--- a/bin/mkcppcheck.sh
+++ /dev/null
@@ -1,64 +0,0 @@
-#!/bin/bash
-#
-# run cppcheck
-#
-
-## subroutines
-usage ()
-{
- echo "Usage: $0 [options]"
- echo "Options:"
- echo "-j <N> number of parallel jobs to start"
- echo "-u try to detect unused function (mutually exclusive to -j)"
- echo "-s moan about stylistic problems, too"
- echo "-h this help"
-}
-
-## code
-NCPUS=1
-while getopts hsuj: opt ; do
- case "$opt" in
- j) NCPUS="$OPTARG" ;;
- s) STYLE="--style" ;;
- u) REPORT_UNUSED="--unused-functions" ;;
- h) usage; exit ;;
- ?) usage; exit ;;
- esac
-done
-
-# binaries that we need
-which cppcheck > /dev/null 2>&1 || {
- echo "You need cppcheck for this"
- exit 1
-}
-
-# otherwise, aliases are not expanded below
-shopt -s expand_aliases
-
-# suck setup
-BINDIR=`dirname $0`
-. $BINDIR/setup
-
-. ./*.Set.sh
-
-# get list of modules in build order
-INPUT_PROJECTS=`cd instsetoo_native && build --all --show | sed -n -e '/Building module/ s/Building module // p'`
-
-# strip -I. and bin -I prefix; exlude system headers
-INCLUDE_PATH=`echo $SOLARINC | sed -e ' s/-I\.//'g | sed -e ' s/ -I/ /'g | sed -e ' s|/usr/[^ ]*| |g'`
-
-
-###################################################
-#
-# run cppcheck, separately for each module
-#
-###################################################
-
-for PROJECT in $INPUT_PROJECTS;
-do
- echo "Checking module $PROJECT"
- cppcheck -j$NCPUS $REPORT_UNUSED -v $STYLE $SOLARINC -I$SRC_ROOT/$PROJECT/inc -I$SRC_ROOT/$PROJECT/inc/pch $PROJECT/source
- echo
-done
-
-## done
diff --git a/bin/mkdocs.sh b/bin/mkdocs.sh
deleted file mode 100755
index d6690d7b9..000000000
--- a/bin/mkdocs.sh
+++ /dev/null
@@ -1,131 +0,0 @@
-#!/bin/bash
-#
-# Doxygen Doc generation
-#
-
-# binaries that we need
-which doxygen > /dev/null 2>&1 || {
- echo "You need doxygen for doc generation"
- exit 1
-}
-which dot > /dev/null 2>&1 || {
- echo "You need the graphviz tools to create the nice inheritance graphs"
- exit 1
-}
-
-# otherwise, aliases are not expanded below
-shopt -s expand_aliases
-
-# Title of the documentation
-DOXYGEN_PROJECT_PREFIX="LibreOffice"
-
-# suck setup
-BINDIR=`dirname $0`
-. $BINDIR/setup
-
-. ./*.Set.sh
-
-# get list of modules in build order - bah, blows RAM & disk, static list below
-#INPUT_PROJECTS=`build --all --show | sed -n -e '/Entering module/ s/Entering module // p'`
-INPUT_PROJECTS="o3tl basegfx comphelper svl vcl canvas cppcanvas filter oox svtools goodies drawinglayer xmloff slideshow sfx2 editeng svx cui chart2 dbaccess sd starmath sc sw"
-
-# output directory for generated documentation
-BASE_OUTPUT="$1"
-mkdir -p "$BASE_OUTPUT" || {
- echo "Cannot create $BASE_OUTPUT"
- exit 1
-}
-
-# paths for binary and configuration file
-BASE_PATH=`pwd`
-DOXYGEN_CFG="$2"
-if test ! -f "$DOXYGEN_CFG"; then
- echo "doxygen.cfg not found"
- exit 1
-fi
-
-# strip -I. and bin -I prefix; exlude system headers
-DOXYGEN_INCLUDE_PATH=`echo $SOLARINC | sed -e ' s/-I\.//'g | sed -e ' s/ -I/ /'g | sed -e ' s|/usr/[^ ]*| |g'`
-
-# setup version string
-DOXYGEN_VERSION="$GITTAG"
-
-
-###################################################
-#
-# Generate docs
-#
-###################################################
-
-# cleanup
-rm -rf $BASE_OUTPUT/*
-
-# make the stuff world-readable
-umask 022
-
-# generate docs
-DOXYGEN_REF_TAGFILES=""
-for PROJECT in $INPUT_PROJECTS;
-do
- # avoid processing of full project subdirs, only add source and inc
- DOXYGEN_INPUT=`printf "%s" "$PROJECT/source $PROJECT/inc "`
-
- DOXYGEN_OUTPUT="$BASE_OUTPUT/$PROJECT"
- DOXYGEN_OUR_TAGFILE="$DOXYGEN_OUTPUT/$PROJECT.tags"
- DOXYGEN_PROJECTNAME="$DOXYGEN_PROJECT_PREFIX Module $PROJECT"
-
- # export variables referenced in doxygen config file
- export DOXYGEN_INPUT
- export DOXYGEN_OUTPUT
- export DOXYGEN_INCLUDE_PATH
- export DOXYGEN_VERSION
- export DOXYGEN_OUR_TAGFILE
- export DOXYGEN_REF_TAGFILES
- export DOXYGEN_PROJECTNAME
-
- # debug
- echo "Calling $DOXYGEN_PATH/doxygen $DOXYGEN_CFG with"
- echo "Input: $DOXYGEN_INPUT"
- echo "Output: $DOXYGEN_OUTPUT"
- echo "Include: $DOXYGEN_INCLUDE_PATH"
- echo "Version: $DOXYGEN_VERSION"
- echo "Tagfile: $DOXYGEN_OUR_TAGFILE"
- echo "Ref-Tags: $DOXYGEN_REF_TAGFILES"
- echo "Title: $DOXYGEN_PROJECTNAME"
-
- nice -15 doxygen "$DOXYGEN_CFG"
-
- # setup referenced tagfiles for next round
- DOXYGEN_REF_TAGFILES="$DOXYGEN_REF_TAGFILES $DOXYGEN_OUR_TAGFILE=$BASE_URL/$PROJECT/html"
-done
-
-# generate entry page
-cat - > $BASE_OUTPUT/index.html <<EOF
-<!DOCTYPE HTML PUBLIC "-//IETF//DTD HTML//EN">
-<html>
- <head>
- <title>LibreOffice Source Code Documentation (fragmentary)</title>
- </head>
- <body>
- <h1>LibreOffice Source Code Documentation (fragmentary)</h1>
- <ul>
-EOF
-
-for PROJECT in $INPUT_PROJECTS;
-do
- echo "<li><a href=\"$PROJECT/html/classes.html\">$PROJECT</a></li>" >> $BASE_OUTPUT/index.html
-done
-
-cat - >> $BASE_OUTPUT/index.html <<EOF
- </ul>
- <p>Last updated:
-EOF
-LANG= date >> $BASE_OUTPUT/index.html
-
-cat - >> $BASE_OUTPUT/index.html <<EOF
- </p>
- </body>
-</html>
-EOF
-
-## done
diff --git a/bin/modules2.txt b/bin/modules2.txt
deleted file mode 100644
index fd1348c8d..000000000
--- a/bin/modules2.txt
+++ /dev/null
@@ -1,20 +0,0 @@
-ure=bridges,cli_ure,codemaker,cppu,cppuhelper,cpputools,idlc,io,javaunohelper,jurt,jvmaccess,jvmfwk,offapi,offuh,pyuno,rdbmaker,registry,remotebridges,ridljar,sal,salhelper,stoc,store,udkapi,unoil,ure,xml2cmp
-sdk=autodoc,cosv,odk,udm,unodevtools
-base=dbaccess,reportdesign
-calc=sc,scaddins,sccomp,chart2
-extras=extras
-help=helpcontent2
-writer=sw,starmath
-impress=sd,animations,slideshow
-artwork=default_images,external_images,ooo_custom_images
-filters=binfilter,filter,hwpfilter,unoxml,writerfilter,writerperfect,xmerge,oox
-testing=qadevOOo,smoketestoo_native,testshl2,testtools,testautomation
-bootstrap=guw,dmake,instsetoo_native,scp2,solenv,soltools,stlport,soldep,aclocal.m4,oowintool,configure.in,configure.cmd,Makefile.in,install-sh,set_soenv.in,config.guess,configure,acinclude.m4,config.sub,makefile.rc,bootstrap.1,fetch_tarballs.sh,ooo.lst
-libs-gui=basebmp,basegfx,canvas,comphelper,cppcanvas,dtrans,goodies,i18npool,i18nutil,o3tl,padmin,psprint_config,regexp,rsc,sax,sot,svtools,toolkit,tools,transex3,ucbhelper,unotools,vcl,vos
-libs-core=avmedia,basic,configmgr,connectivity,desktop,drawinglayer,embeddedobj,eventattacher,fileaccess,formula,fpicker,framework,idl,linguistic,officecfg,oovbaapi,sandbox,scripting,sfx2,shell,svx,sysui,ucb,uui,xmlhelp,xmloff,xmlscript,readlicense_oo
-libs-extern=afms,agg,beanshell,epm,external,fondu,hsqldb,libegg,libtextcat,libxmlsec,np_sdk,rhino,sane,twain,lpsolve,icc,openssl,unixODBC,vigra,x11_extensions,hyphen,libwpd,lucene,redland,cppunit,apple_remote,graphite,nss
-components=accessibility,automation,basctl,bean,crashrep,embedserv,extensions,forms,javainstaller2,lingucomponent,MathMLDTD,package,setup_native,UnoControls,wizards,xmlsecurity
-postprocess=postprocess,packimages
-libs-extern-sys=berkeleydb,bitstream_vera_fonts,expat,icu,jpeg,libxml2,libxslt,moz,neon,python,zlib,saxon,stax,boost,curl,dictionaries,cairo,hunspell
-extensions=sdext,swext,tomcat,apache-commons,jfreereport,reportbuilder,xpdf,xsltml,migrationanalysis,mysqlc,mysqlcppconn
-l10n=l10n
diff --git a/bin/ooconfig b/bin/ooconfig
deleted file mode 100755
index bebb2d0e9..000000000
--- a/bin/ooconfig
+++ /dev/null
@@ -1,217 +0,0 @@
-#!/usr/bin/env perl
-
-use strict;
-
-# mangling to names:
-# bool: enable-$/disable-$, int,string: set-$
-
-# setting name -> [ '/path/to/key',
-# 'bool|string|int|custom',
-# '<help-description>',
-# &custom_func ]
-my $path_idx = 0;
-my $type_idx = 1;
-my $help_idx = 2;
-my $func_idx = 3;
-
-my $settings = {
- 'enable-auto-save' => [ '/apps/openoffice/auto_save', 'bool', 'Save AutoRecovery information' ],
- 'enable-create-backup' => [ '/apps/openoffice/create_backup', 'bool', 'Always create backup copy' ],
- 'enable-font-anti-aliasing' => [ '/apps/openoffice/use_font_anti_aliasing', 'bool', 'Screen font antialiasing' ],
- 'enable-font-history' => [ '/apps/openoffice/show_font_history', 'bool', 'Show font history' ],
- 'enable-font-preview' => [ '/apps/openoffice/show_font_preview', 'bool', 'Show preview of fonts' ],
- 'enable-menu-icons' => [ '/apps/openoffice/show_menu_icons', 'bool', 'Show icons in menus' ],
- 'enable-menu-inactive-items' => [ '/apps/openoffice/show_menu_inactive_items', 'bool', 'Show inactive menu items' ],
- 'enable-opengl' => [ '/apps/openoffice/use_opengl', 'bool', 'Use OpenGL' ],
- 'enable-optimize-opengl' => [ '/apps/openoffice/optimize_opengl', 'bool', 'Optimized output for OpenGL' ],
- 'enable-printing' => [ '/desktop/gnome/lockdown/disable_printing', 'custom:bool', 'Allow printing', \&inverted_boolean ],
- 'enable-printing-modifies-doc' => [ '/apps/openoffice/printing_modifies_doc', 'bool', 'Printing sets document-modified status' ],
- 'enable-recommend-password-on-save' => [ '/apps/openoffice/lockdown/recommend_password_on_save', 'bool', 'Recommend password protection on saving' ],
- 'enable-remove-personal-info-on-save' => [ '/apps/openoffice/lockdown/remove_personal_info_on_save', 'bool', 'Remove personal information on saving' ],
- 'enable-system-file-dialog' => [ '/apps/openoffice/use_system_file_dialog', 'bool', 'Use system file dialogs' ],
- 'enable-system-font' => [ '/apps/openoffice/use_system_font', 'bool', 'Use system font for user interface' ],
- 'enable-ui-customization' => [ '/apps/openoffice/lockdown/disable_ui_customization', 'custom:bool', 'Allow UI customization', \&inverted_boolean ],
- 'enable-warn-alien-format' => [ '/apps/openoffice/warn_alien_format', 'bool', 'Warn when not saving in OpenDocument or default format' ],
- 'enable-warn-info-create-pdf' => [ '/apps/openoffice/lockdown/warn_info_create_pdf', 'bool', 'Warn if document contains information on creating pdf' ],
- 'enable-warn-info-printing' => [ '/apps/openoffice/lockdown/warn_info_printing', 'bool', 'Warn if document contains information on printing' ],
- 'enable-warn-info-signing' => [ '/apps/openoffice/lockdown/warn_info_signing', 'bool', 'Warn if document contains information on signing' ],
- 'enable-warn-info-saving' => [ '/apps/openoffice/lockdown/warn_info_saving', 'bool', 'Warn if document contains information on saving' ],
- 'set-auto-save-interval' => [ '/apps/openoffice/auto_save_interval', 'int', 'Minutes to save AutoRecovery information' ],
- 'set-defaults' => [ 'dummy for help', 'dummy for help', 'Revert to default settings' ],
- 'set-font-anti-aliasing-min-pixel' => [ '/apps/openoffice/font_anti_aliasing_min_pixel', 'int', 'Pixels to use screen font antialiasing from' ],
- 'set-icon-size' => [ '/apps/openoffice/icon_size', 'custom:int', 'Icon size', \&parse_icon_size ],
- 'set-macro-security-level' => [ '/apps/openoffice/lockdown/macro_security_level', 'custom:int', 'Macro security level', \&parse_macro_security_level ],
- 'set-undo-steps' => [ '/apps/openoffice/undo_steps', 'int', 'Number of Undo steps' ],
- 'set-writer-default-document' => [ '/apps/openoffice/writer_default_document_format', 'custom:string', 'Default Writer file format', \&parse_writer ],
- 'set-calc-default-document' => [ '/apps/openoffice/calc_default_document_format', 'custom:string', 'Default Calc file format', \&parse_calc ],
- 'set-impress-default-document' => [ '/apps/openoffice/impress_default_document_format', 'custom:string', 'Default Impress file format', \&parse_impress ]
-};
-
-sub inverted_boolean($$$)
-{
- my ($attrs, $key, $value) = @_;
- return '0' if ($value eq 'yes');
- return '1' if ($value eq 'no');
- die "Error: supported values of $key are yes|no: $value is invalid\n";
-}
-
-sub validate_enum($$$)
-{
- my ($opts, $value, $key) = @_;
-
- my @enum_opts = split (/,/, $opts);
- my $hit = 0;
- for my $val (@enum_opts) {
- $hit = 1 if ($val eq $value);
- }
- if (!$hit) {
- print STDERR "Error: supported values of $key are $opts: $value is invalid\n";
- exit 1;
- }
-}
-
-sub parse_icon_size($$$)
-{
- my ($attrs, $key, $value) = @_;
- validate_enum ("small,large,auto", $value, $key);
- return '0' if ($value eq 'small');
- return '1' if ($value eq 'large');
- return '2' if ($value eq 'auto');
- die "impossible";
-}
-
-sub parse_macro_security_level($$$)
-{
- my ($attrs, $key, $value) = @_;
- validate_enum ("low,medium,high,veryhigh", $value, $key);
- return '0' if ($value eq 'low');
- return '1' if ($value eq 'medium');
- return '2' if ($value eq 'high');
- return '3' if ($value eq 'veryhigh');
- die "impossible";
-}
-
-sub parse_writer($$$)
-{
- my ($attrs, $key, $value) = @_;
- validate_enum ("odt,doc,sxw", $value, $key);
- return "writer8" if ($value eq 'odt');
- return "MS Word 97" if ($value eq 'doc');
- return "StarOffice XML (Writer)" if ($value eq 'sxw');
- die "impossible";
-}
-
-sub parse_calc($$$)
-{
- my ($attrs, $key, $value) = @_;
- validate_enum ("ods,xls,sxc", $value, $key);
- return "calc8" if ($value eq 'ods');
- return "MS Excel 97" if ($value eq 'xls');
- return "StarOffice XML (Calc)" if ($value eq 'sxc');
- die "impossible";
-}
-
-sub parse_impress($$$)
-{
- my ($attrs, $key, $value) = @_;
- validate_enum ("odp,ppt,sxi", $value, $key);
- return "impress8" if ($value eq 'odp');
- return "MS PowerPoint 97" if ($value eq 'ppt');
- return "StarOffice XML (Impress)" if ($value eq 'sxi');
- die "impossible";
-}
-
-sub help()
-{
- print "Usage: ooconfig --setting=value\n where setting is one of:\n";
- for my $key (keys %{$settings}) {
- my $attrs = $settings->{$key};
- my $help = $attrs->[$help_idx];
- print "\t--" . $key;
- print " : $help" if (defined $help);
- print "\n";
- }
-}
-
-sub set_defaults()
-{
- my $sys_str = "gconftool-2 --recursive-unset '/apps/openoffice'";
- `$sys_str` && die "Unset failed: $!";
- $sys_str = "gconftool-2 --set '/desktop/gnome/lockdown/disable_printing' --type bool '0'";
- `$sys_str` && die "Reset failed: $!";
-}
-
-sub conf_type($)
-{
- my $type = shift;
- $type = (split(/:/, $type))[1] if ($type =~ m/^custom:(.*)$/);
- return $type;
-}
-
-sub set_key($$)
-{
- my ($attrs, $conf_value) = @_;
- my $sys_str = "gconftool-2 " .
- "--set '" . $attrs->[$path_idx] . "' " .
- "--type " . conf_type ($attrs->[$type_idx]) . " " .
- "'" . $conf_value . "'";
-# print "\$ $sys_str\n";
- `$sys_str` && die "Set failed: $!";
-}
-
-for my $arg (@ARGV) {
- if ($arg =~ m/^--help/ || $arg =~ m/^-h/) {
- help();
- exit 0;
- }
- if ($arg eq '--set-defaults') {
- set_defaults();
- exit 0;
- }
- if (!($arg =~ m/^--(.*)=(.*)/)) {
- print STDERR "Error: syntax --enable-foo=yes\n";
- exit 1;
- }
- if (!defined $settings->{$1}) {
- print STDERR "Unknown setting '$arg'\n";
- exit 1;
- }
-}
-
-for my $arg (@ARGV) {
- $arg =~ m/^--(.*)=(.*)/;
- my $key = $1;
- my $value = $2;
- my $attrs = $settings->{$key};
- my $path = $attrs->[$path_idx];
- my $type = $attrs->[$type_idx];
-
-# print "Key '$key' value '$value' '$arg' $attrs, $path, $type\n";
- my $conf_value;
-
- if ($type eq 'bool') {
- if ($value =~ m/^yes$/g ||
- $value =~ m/^true$/g) {
- $conf_value = 1;
- } elsif ($value =~ m/^no$/g ||
- $value =~ m/^false$/g) {
- $conf_value = 0;
- } else {
- print STDERR "Error: supported values of $key are yes|no: $value is invalid\n";
- exit 1;
- }
-
- } elsif ($type eq 'int' ||
- $type eq 'string') {
- $conf_value = $value;
-
- } elsif ($type =~ m/^custom:(.*)$/) {
- my $function = $attrs->[$func_idx];
- $conf_value = $function->($attrs, $key, $value);
-
- } else {
- print STDERR "Error in type '$type' on key '$key'\n";
- exit 1;
- }
- set_key ($attrs, $conf_value);
-}
diff --git a/bin/ooconvwatch b/bin/ooconvwatch
deleted file mode 100755
index 2a89b6d61..000000000
--- a/bin/ooconvwatch
+++ /dev/null
@@ -1,67 +0,0 @@
-#!/bin/sh
-
-#
-# run convwatch for all files given in ooo-build/test (or below -d <dir>)
-#
-
-# check for required gs
-which gs >/dev/null 2>&1 || {
- echo "need gs"; exit 1
-}
-
-# check for required imagemagick tools
-which composite >/dev/null 2>&1 || {
- echo "need imagemagick's composite"; exit 1
-}
-which identify >/dev/null 2>&1 || {
- echo "need imagemagick's identify"; exit 1
-}
-
-usage ()
-{
-echo "Usage: $0 [options]"
-echo "Options:"
-echo "-d Specify the directory to read the test docs from (will be"
-echo " scanned recursively)"
-echo "-c Create reference output next to input docs. Careful,"
-echo " overwrites what's there!"
-echo "-h This help information"
-}
-
-# default test file dir in ooo-build tree
-DIR="$SRC_ROOT/../../test"
-
-# Parse command line options
-while getopts d:ch opt ; do
- case "$opt" in
- d) DIR="$OPTARG" ;;
- c) CREATE_REFS=y ;;
- h) usage; exit ;;
- ?) usage; exit ;;
- esac
-done
-
-shift $(($OPTIND - 1))
-
-# craft ini file for java tests
-cat > props.ini <<EOF
-DOC_COMPARATOR_INPUT_PATH=$DIR
-DOC_COMPARATOR_REFERENCE_PATH=/tmp/convwatch
-DOC_COMPARATOR_OUTPUT_PATH=/tmp/convwatch
-ConnectionString=pipe,name=none
-EOF
-echo "AppExecutionCommand=`pwd`/soffice -norestore -nocrashreport -headless -accept=pipe,name=none;urp;" >> props.ini
-
-if [ "$CREATE_REFS" = "y" ]; then
- echo "DOC_COMPARATOR_OVERWRITE_REFERENCE=true" >> props.ini
-fi
-
-# fake environment
-OOORUNNER=`echo $SRC_ROOT/solver/*/*/bin`
-JARFILES=$OOORUNNER/ridl.jar:$OOORUNNER/unoil.jar:$OOORUNNER/jurt.jar:$OOORUNNER/juh.jar:$OOORUNNER/java_uno.jar:$OOORUNNER/OOoRunnerLight.jar
-
-# start reference build
-LD_LIBRARY_PATH=`pwd`/../ure/lib java -cp $JARFILES org.openoffice.Runner -tb java_complex -ini props.ini -o convwatch.ReferenceBuilder
-
-# start the graphical document compare
-LD_LIBRARY_PATH=`pwd`/../ure/lib java -cp $JARFILES org.openoffice.Runner -tb java_complex -ini props.ini -o convwatch.ConvWatchStarter
diff --git a/bin/oodocdiff.sh b/bin/oodocdiff.sh
deleted file mode 100755
index 5e4e384b8..000000000
--- a/bin/oodocdiff.sh
+++ /dev/null
@@ -1,98 +0,0 @@
-#!/bin/sh
-########################################################################
-#
-# Copyright (c) 2010 Thorsten Behrens, Miklos Vajna
-#
-# Permission is hereby granted, free of charge, to any person
-# obtaining a copy of this software and associated documentation
-# files (the "Software"), to deal in the Software without
-# restriction, including without limitation the rights to use,
-# copy, modify, merge, publish, distribute, sublicense, and/or sell
-# copies of the Software, and to permit persons to whom the
-# Software is furnished to do so, subject to the following
-# conditions:
-#
-# The above copyright notice and this permission notice shall be
-# included in all copies or substantial portions of the Software.
-#
-# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-# OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-# HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
-# OTHER DEALINGS IN THE SOFTWARE.
-#
-# This little hack is much inspired by Caolan McNamara's original
-# OpenOffice.org convwatch work
-#
-########################################################################
-
-# check for required gs
-which gs >/dev/null 2>&1 || {
- echo "need gs"; exit 1
-}
-
-# check for required imagemagick tools
-which composite >/dev/null 2>&1 || {
- echo "need imagemagick's composite"; exit 1
-}
-which identify >/dev/null 2>&1 || {
- echo "need imagemagick's identify"; exit 1
-}
-
-usage ()
-{
-echo "Usage: $0 [options] <input1>.ps <input2>.ps"
-echo ""
-echo "Generates graphical comparison between input1 & input2"
-echo "and outputs count of different pixel values to stdout"
-echo ""
-echo "Options:"
-echo "-r<num> Set image resolution to <num> dpi (defaults to 75)"
-echo "-t<tmpdir> Set tmpdir location to use (defaults to /tmp)"
-echo "-s Sort output by number of increasing differences"
-echo "-k Keep temp images"
-echo "-h This help information"
-echo "-q Be quiet"
-}
-
-RES=75
-TMP=/tmp
-SORT=cat
-GS="gs"
-
-# Parse command line options
-while getopts r:t:n:skhq opt ; do
- case "$opt" in
- r) RES="$OPTARG" ;;
- t) TMP="$OPTARG" ;;
- s) SORT="sort -n -k2,2" ;;
- k) KEEP=1 ;;
- q) QUIET=1; GS="gs -q";;
- n) CMPDIR="$OPTARG" ;;
- h) usage; exit ;;
- ?) usage; exit ;;
- esac
-done
-
-shift $(($OPTIND - 1))
-
-mkdir $TMP/${CMPDIR:=$$}
-
-test -z "$QUIET" && echo "Generating bitmap renderings of $1 ..."
-$GS -dNOPROMPT -dBATCH -sDEVICE=jpeg -r$RES -dNOPAUSE -sOutputFile=$TMP/$CMPDIR/file1.%04d.jpeg $1
-
-test -z "$QUIET" && echo "Generating bitmap renderings of $2 ..."
-$GS -dNOPROMPT -dBATCH -sDEVICE=jpeg -r$RES -dNOPAUSE -sOutputFile=$TMP/$CMPDIR/file2.%04d.jpeg $2
-
-test -z "$QUIET" && echo "Generating differences..."
-for file in $TMP/$CMPDIR/file1.*; do test -z "$QUIET" && echo -n "$file: "; num=`echo $file | sed -e ' s/.*\.\(.*\)\..*/\1/'`; composite -compose difference $file $TMP/$CMPDIR/file2.$num.jpeg - | identify -format %k -; done | $SORT
-
-if test -n "$KEEP"; then
- echo "Keeping temp images at $TMP/$CMPDIR" >&2
-else
- rm -rf $TMP/$CMPDIR
-fi
-
diff --git a/bin/ooinstall b/bin/ooinstall
deleted file mode 100755
index 2f5400528..000000000
--- a/bin/ooinstall
+++ /dev/null
@@ -1,72 +0,0 @@
-#!/usr/bin/env perl
-
-# This script has three uses:
-# 1. From the command line to install straight into a given directory:
-# bin/ooinstall /opt/Foo
-# 2. From the command line to link into a given directory:
-# bin/ooinstall -l /opt/FooLinked
-# 3. When packaging (called from package-ooo), to install to DESTDIR
-
-use File::Find;
-use Cwd;
-
-%setup_vars = ();
-
-$path = '';
-$linked='';
-
-sub suck_setup($)
-{
- my $file = shift;
- if (-f $file) {
- print "Reading setup from $file\n";
- open ($Vars, "bash -c '. $file ; set'|") || die "Can't find $file: $!";
- while (<$Vars>) {
- /([^=]*)=(.*)/ || next;
- $setup_vars{$1} = $2;
- }
- close ($Vars);
- return 1;
- }
- return 0;
-}
-
-( $^O =~ /freebsd/i) || ( $^O =~ /openbsd/i ) || ( $^O =~ /darwin/i ) || ( -f "/proc/meminfo" ) || die "The installer cannot work without javaldx running, which requires /proc to be mounted";
-
-suck_setup ("./setup") || suck_setup ("bin/setup") || die "can't find bin/setup";
-
-for $arg (@ARGV) {
- if ($arg eq '-l') {
- $linked = '-l';
-
- } elsif ($arg eq '-h' || $arg eq '--help') {
- $help = 1;
- } else {
- $path = $arg;
- }
-}
-
-$help = 1 if $path eq '';
-
-if ($help) {
- print "ooinstall [-l] <prefix to install to>\n";
- print " -l - performs a linkoo on the installed source\n";
- exit 1;
-}
-
-$sources_path=Cwd::realpath($setup_vars{'OOBUILDDIR'});
-
-# Call the solenv/bin/ooinstall
-# Note we need to export those here, as solenv/bin/ooinstall can't use $setup_vars and needs to get them from $ENV
-system ("cd $sources_path ; . ./*Set.sh ; export OOO_LANGS_LIST=$setup_vars{'OOO_LANGS_LIST'}; export OODESTDIR=$setup_vars{'OODESTDIR'}; export OOO_STRIP=$setup_vars{'OOO_STRIP'}; solenv/bin/ooinstall $linked $path") && die "Failed to ooinstall";
-
-print "Installing extra dictionaries...\n";
-system ("cd $setup_vars{TOOLSDIR}/bin ; " .
- "sh ./install-dictionaries $path/basis$setup_vars{VERSION}/share/dictionaries") && die "Failed to install dictionaries: $!";
-
-print "Building galleries...\n";
-system ("cd $setup_vars{TOOLSDIR}/bin ; " .
- "sh ./build-galleries $path") && die "Failed to build extra galleries: $!";
-
-system ("cd $setup_vars{TOOLSDIR}/bin ; " .
- "sh ./install-mono $path") && die "Failed to finish mono installation: $!";
diff --git a/bin/ooo-news-filter-old b/bin/ooo-news-filter-old
deleted file mode 100755
index b79b1306c..000000000
--- a/bin/ooo-news-filter-old
+++ /dev/null
@@ -1,45 +0,0 @@
-#!/bin/sh
-
-usage()
-{
- echo "This script filter the ooo-build NEWS file to show only the changes"
- echo "that are not mentioned in another NEWS file from another branch."
- echo
- echo "Usage: ${0##*/} new_news other_news"
- echo
- echo "Parameters:"
- echo
- echo " new_news news files including some newer changes"
- echo " other_news news file from other ooo-build branch that"
- echo " includes some changes from new_news"
-}
-
-if test "$1" = "--help" -o $# -ne 2 ; then
- usage
- exit 0;
-fi
-
-new_news="$1"
-other_news="$2"
-
-for file in "$new_news" "$other_news" ; do
- if test ! -f "$file" ; then
- echo "Error: the file does not exist: $file"
- exit 1;
- fi
-done
-
-# create grep patterns from the other news file
-# do not put there version and section lines; just changes entries
-patterns=`mktemp /tmp/ooo-news-filer-old-XXXXXX`
-grep "^ + " "$other_news" | \
- sed -e "s|\#|\\\#|" \
- -e "s|\[|\\\[|" \
- -e "s|\]|\\\]|" >"$patterns"
-
-# print the new log without the old entries
-echo "Filtering - go and have some tee..." 1>&2
-grep -f "$patterns" -v "$new_news"
-
-# remove temporary files
-rm $patterns
diff --git a/bin/ooo-sdf-split b/bin/ooo-sdf-split
deleted file mode 100755
index c68cce53a..000000000
--- a/bin/ooo-sdf-split
+++ /dev/null
@@ -1,154 +0,0 @@
-#!/usr/bin/perl
- eval 'exec /usr/bin/perl -S $0 ${1+"$@"}'
- if $running_under_some_shell;
-#!/usr/bin/perl
-
-use strict;
-use File::Copy;
-use File::Path;
-use File::Temp qw/ tempfile tempdir /;
-
-sub usage()
-{
- print "This script split the given sdf files for each piece\n\n" .
-
- "Usage: ooo-sdf-split modules_list sdf_file...\n\n" .
-
- "Parameters:\n\n" .
-
- " modules_list file that defines what OOo source module belongs for\n" .
- " what piece\n" .
- " sdf_file original sdf file";
-}
-
-sub load_modules_list($$$)
-{
- my ($modules_list, $p_modules, $p_pieces) = @_;
- print "Loading $modules_list";
-
- open (MODULES_LIST, "$modules_list") || die "Can't open $modules_list";
- while (my $line = <MODULES_LIST>) {
- chomp $line;
- if ( $line =~ m/^\s*(\w*)\s*=\s*(.*)$/ ) {
- my $piece = "$1";
- foreach my $module ( split( ',', $2 ) ) {
- $p_modules->{"$module"} = "$piece";
- }
- } else {
- die "Syntax error at $modules_list, line $.\n";
- }
- print ".";
- }
- close (MODULES_LIST);
- print "\n";
-}
-
-sub guess_lang($)
-{
- my ($sdf_file) = @_;
-
- $sdf_file =~ /^GSI_([\w-]*).sdf/;
- return "$1" if (defined $1);
- die "Warning: Can't guess the target language for $sdf_file\n";
-}
-
-sub split_sdf_file($$$$)
-{
- my ($sdf_file, $temp_dir, $p_modules, $p_pieces) = @_;
-
- my $lang = guess_lang("$sdf_file");
-
- # This hash includes the sdf lines split by piece
- # The key is the piece name, the value is array with sdf lines for the given piece
- my %lines = ();
- my $piece;
-
- print "Processing $sdf_file";
-
- open (sdf_file, "$sdf_file") || die "Can't open $sdf_file";
- while (my $line = <sdf_file>) {
- chomp $line;
- $line =~ /^\s*(\w*)\t/;
- my $module = "$1";
- if ( defined $p_modules->{"$module"} ) {
- $piece = "$p_modules->{$module}";
- } else {
- $piece = "unknown";
- }
- push @{$lines{$piece}}, "$line";
- $p_pieces->{"$piece"} = 1;
- if ( ($. % 10000 ) == 0 ) {
- print ".";
- $|++; # flush
- }
- }
- close (sdf_file);
-
- my $out_dir="$temp_dir/ooo-build/sdf";
- mkpath "$out_dir" || die "Can't create directory $out_dir\n";
-
- foreach $piece (keys %lines) {
- open (SDF_FILE_PIECE, '>', "$out_dir/$piece-$lang.sdf") || die "Can't open $out_dir/$piece-$lang.sdf for writing\n";
- foreach my $line (@{$lines{$piece}}) {
- print SDF_FILE_PIECE "$line\n";
- }
- close (SDF_FILE_PIECE);
- print ".";
- $|++; # flush
- }
- print "\n";
-}
-
-sub compress_split_sdf_files($$)
-{
- my ($temp_dir, $p_pieces) = @_;
-
-
- foreach my $piece (keys %{$p_pieces}) {
- my $archive = "ooo-sdf-$piece.tar.bz2";
- print "Creating $archive...";
- system ("cd $temp_dir; tar -cjf $archive ooo-build/sdf/$piece*.sdf") == 0 || die "Can't create $temp_dir/$archive\n";
- system ("cp $temp_dir/$archive ./$archive") == 0 || die "Can't move $temp_dir/$archive -> $archive\n";
- print " done\n";
- }
-}
-
-my $temp_dir;
-my $modules_list;
-my @sdf_files = ();
-# This hash includes information about how the modules are split into the pieces
-# The key is the module name, the value is the piece name
-my %modules = ();
-# list of supported pieces
-my %pieces;
-
-###################
-# Arguments parsing
-###################
-
-for my $arg (@ARGV) {
- if ($arg eq '--help' || $arg eq '-h') {
- usage;
- exit 0;
- } else {
- (-f $arg) || die "Error: $arg is not a file";
- if (! defined $modules_list) {
- $modules_list = $arg;
- } else {
- push @sdf_files, "$arg";
- }
- }
-}
-
-(defined $modules_list) || die "Modules list is not defined\n";
-(@sdf_files > 0) || die "No SDF file defined\n";
-
-$temp_dir = tempdir( "/tmp/ooo-sdf-split-XXXXXX" );
-
-load_modules_list($modules_list, \%modules, \%pieces);
-foreach my $sdf_file (@sdf_files) {
- split_sdf_file($sdf_file, $temp_dir, \%modules, \%pieces);
-}
-compress_split_sdf_files($temp_dir, \%pieces);
-
-system ("rm -rf $temp_dir");
diff --git a/bin/ooo-unpack-sources b/bin/ooo-unpack-sources
deleted file mode 100755
index 47a0677bd..000000000
--- a/bin/ooo-unpack-sources
+++ /dev/null
@@ -1,68 +0,0 @@
-#!/bin/sh
-
-usage()
-{
- echo "This script unpack ooo source tarballs generated by ooo-pack-sources and"
- echo "move all modules into a single dir"
- echo
- echo "Usage: ${0##*/} [--help] version"
- echo
- echo "Parameters:"
- echo
- echo " version: version of the source tarballs, e.g. 3.3.0.2"
-}
-
-version=
-tarball_prefix="ooo-build"
-
-while test -n "$1" ; do
- case "$1" in
- --help)
- usage
- exit 0;
- ;;
- -*)
- echo "Error: unknown option: $1"
- exit 1;
- ;;
- *)
- if test -z "$version" ; then
- version="$1"
- else
- echo "Error: unknown parameter: $1"
- fi
- ;;
- esac
- shift
-done
-
-if test -z "$version" ; then
- echo "Error: Please, define the version. Try --help"
- exit 1;
-fi
-
-outdir="$tarball_prefix-$version"
-
-if test -d "$outdir" ; then
- echo "Warning: output directory already exists: $outdir"
- printf "Could I replace it? (y/n)"
- read answer
- if test $answer != "y" ; then
- echo " Exiting. You must answer \"y\" to continue"
- exit 1;
- fi
- rm -rf "$outdir"
-fi
-
-mkdir -p "$outdir"
-
-for tarball in $tarball_prefix-$version-*.tar.bz2 ; do
- echo -n "Unpacking $tarball"
- tempdir=`mktemp -d "$outdir/$tarball_prefix-$version-XXXXXX"`
- checkpoint=`du $tarball | cut -f 1`
- checkpoint=$(($checkpoint / 15 + 1))
- tar -xf $tarball -C $tempdir --checkpoint=$checkpoint 2>&1 | awk '{ ORS="" ; printf "." ; fflush() }'
- echo
- mv $tempdir/*/* $outdir
- rm -rf $tempdir
-done
diff --git a/bin/ooo-update-po b/bin/ooo-update-po
deleted file mode 100755
index 71064692d..000000000
--- a/bin/ooo-update-po
+++ /dev/null
@@ -1,71 +0,0 @@
-#!/bin/sh
-
-usage()
-{
- echo "This script updates the ooo-build-<lang>.po files"
- echo
- echo "Usage: ${0##*/} [--prefer-external] ooo_build_po_dir external_po_dir"
- echo
- echo "Parameters:"
- echo
- echo " ooo_build_po_dir path to the ooo-build/po direcotry"
- echo " external_po_dir directory with extrnal translations that"
- echo " will be mergen into the ooo-build PO files"
- echo " --prefer-external use this option to prefer the translated"
- echo " strings from the external PO files over"
- echo " any older ooo-build translation"
-}
-
-msgcat_options="--use-first"
-
-if test "$1" = "--help" -o $# -lt 2 ; then
- usage
- exit 0;
-fi
-
-if test "$1" = "--prefer-external" ; then
- prefer_external=yes
- shift
-else
- prefer_external=no
-fi
-
-ooo_build_po_dir="$1"
-external_po_dir="$2"
-
-if test ! -d "$ooo_build_po_dir" ; then
- echo "Error: Is not a direcotry: $ooo_build_po_dir"
- exit 1;
-fi
-
-if test ! -d "$external_po_dir" ; then
- echo "Error: Is not a direcotry: $external_po_dir"
- exit 1;
-fi
-
-for external_po in `find $external_po_dir -name "ooo-build*.po"` ; do
- # FIXME: openSUSE translation framework produces ooo-build.<lang>.po filenames instead of ooo-build-<lang>.po
- po_filename=`basename $external_po | sed -e "s|ooo-build\.|ooo-build-|" -e "s|_|-|g" `
- # FIXME: the en-US localization is useles
- test $po_filename = "ooo-build-en-US.po" && continue;
- # merge or add the PO file
- if test -f "$ooo_build_po_dir/$po_filename" ; then
- echo "Merging $po_filename..."
- po_tmp=`mktemp /tmp/ooo-update-po.XXXXXX`
- if test "$prefer_external" = "yes" ; then
- msgcat $msgcat_options "$external_po" "$ooo_build_po_dir/$po_filename" >$po_tmp
- else
- msgcat $msgcat_options "$ooo_build_po_dir/$po_filename" "$external_po" >$po_tmp
- fi
- # copy the updated file only when succeded
- if test "$?" = "0" ; then
- mv $po_tmp "$ooo_build_po_dir/$po_filename"
- chmod 644 "$ooo_build_po_dir/$po_filename"
- else
- echo "Warning: $po_filename was not updated"
- fi
- else
- echo "Adding $po_filename..."
- cp "$external_po" "$ooo_build_po_dir/$po_filename"
- fi
-done
diff --git a/bin/oosize b/bin/oosize
deleted file mode 100755
index 75f17bd3a..000000000
--- a/bin/oosize
+++ /dev/null
@@ -1,176 +0,0 @@
-#!/usr/bin/env perl
-
-my $ls_mode = 0;
-my $split_mode = 0;
-
-$type = 'unxlngi6.pro';
-$type = 'unxlngx6.pro' if ( `uname -i` =~ /\s*x86_64\s*/ );
-$human_readable = 0;
-
-
-sub syntax()
-{
- print "oosize [path]: generate object / code size statistics\n";
- print "Show the impact of individual source files/directories on the size\n";
- print "of the module.\n";
- print " --ls: breakdown individual directory\n";
- print " --split: breakdown individual file\n";
- print " [path]: breakdown whole built tree\n";
- print " --help: this message\n";
-}
-
-sub slurp_dir($)
-{
- my $dir;
- my $path = shift;
- opendir ($dir, $path) || die "Can't open $path: $!";
- my @entries;
- while (my $name = readdir ($dir)) {
- $name =~ /^\./ && next;
- push @entries, $name;
- }
- closedir ($dir);
- return @entries;
-}
-
-# read all file sizes into a hash
-sub collect_file_sizes($)
-{
- my $path = shift;
- my %sizes;
-
- for my $name (slurp_dir ($path)) {
- $name =~ m/.o$/ || next;
- my $tmp = '/tmp/foo.o';
- `cp $path/$name $tmp`;
- `strip $tmp`;
- my ($dev,$ino,$mode,$nlink,$uid,$gid,$rdev,$size,
- $atime,$mtime,$ctime,$blksize,$blocks)
- = stat($tmp);
- $name =~ s/\.o$//;
- $sizes{$name} = $size;
- }
-
- return \%sizes;
-}
-
-sub accumulate_sizes($$$);
-sub accumulate_sizes($$$)
-{
- my ($path, $dir_sizes, $obj_sizes) = @_;
-
- for my $name (slurp_dir ($path)) {
- $name =~ m/^$type/ && next;
- $name eq 'CVS' && next;
- $name eq 'workben' && next;
- $name eq 'qa' && next;
- $name eq 'test' && next;
- if (-d "$path/$name") {
- accumulate_sizes ("$path/$name", $dir_sizes, $obj_sizes);
- } else {
- if ($name =~ /^(.*)\.c(xx|)$/) {
- my $stem = $1;
- if (!defined $obj_sizes->{$stem}) {
- print "Strange no obj size for '$stem'\n";
- } else {
- my $key = $path;
- if ($ls_mode) { $key = $key . "/" . $name; } # hack !
- $dir_sizes->{$key} = 0 if (!defined ($dir_sizes->{$key}));
- $dir_sizes->{$key} += $obj_sizes->{$stem};
- }
- } else {
-# print "Unhandled name $name\n";
- }
- }
- }
-}
-
-# main ...
-my $path;
-
-for my $arg (@ARGV) {
- if ($arg eq '-h' || $arg eq '--help') {
- syntax();
- exit 0;
- } elsif ($arg eq '--ls') {
- $ls_mode = 1;
- } elsif ($arg eq '--split') {
- $split_mode = 1;
- } else {
- $path = $arg;
- }
-}
-
-
-# read all toplevel dirs
-my %dir_sizes;
-
-if ($split_mode) {
- my $fh;
- my %sizes;
- open ($fh, "objdump -x $path|") || die "Can't dump $path: $!";
- while (<$fh>) {
- m/^\s*[\d]+\s+(\S+)\s+([\d]+)\s+/ || next;
- my ($sym, $size) = ($1, $2);
- $sym =~ m/^\.debug/ && next;
- $size == 0 && next;
- $sym =~ s/^.*\.//; # remove section prefix
- $sizes{$sym} = $size;
- }
- close ($fh);
-
- print "Symbol sizes\n";
- for my $sym (sort { $sizes{$a} <=> $sizes{$b} } keys %sizes) {
- printf( "%10d %s\n", $sizes{$sym}, $sym );
- }
-
- exit 0;
-} elsif ($ls_mode) {
- if (!defined $path) {
- chomp($path = `pwd`);
- }
- my $module = $path;
- while ($module ne '') {
- -d "$module/$type" && last;
- $module =~ s|/[^/]*$||;
- }
- print "Found module '$module'\n";
- my $slodir = "$module/$type/slo";
- my $obj_sizes = collect_file_sizes ($slodir);
- accumulate_sizes ("$path", \%dir_sizes, $obj_sizes);
-} else {
-
-# for my $toplevel (slurp_dir($path)) {
-# my $slodir = "$path/$toplevel/$type/slo";
- my $slodir = "$path/$type/slo";
- print "slodir $slodir\n";
- -d $slodir || next;
- my $obj_sizes = collect_file_sizes ($slodir);
- accumulate_sizes ("$path", \%dir_sizes, $obj_sizes);
-# accumulate_sizes ("$path/$toplevel", \%dir_sizes, $obj_sizes);
-# }
-}
-
-my @order;
-if ($ls_mode) {
- print "Size breakdown\n";
- @order = sort { $dir_sizes{$a} <=> $dir_sizes{$b} } keys %dir_sizes;
-} else {
- print "Flat breakdown\n";
- @order = sort { $dir_sizes{$a} <=> $dir_sizes{$b} } keys %dir_sizes;
-}
-
-my $nicepath = $path;
-$nicepath =~ s#/*$##;
-$nicepath =~ s#^.*/##;
-
-for my $name (@order) {
- my $nicename = $name;
- $nicename =~ s#^$path#$nicepath#;
- my $size = $dir_sizes{$name};
- if ($human_readable) {
- $size = int($size/1024);
- $size .= 'Kb';
- }
- printf( "%10d %s\n", $size, $nicename );
-}
diff --git a/bin/oosmoketest.in b/bin/oosmoketest.in
deleted file mode 100755
index 2171cdeae..000000000
--- a/bin/oosmoketest.in
+++ /dev/null
@@ -1,16 +0,0 @@
-#!/bin/sh
-
-GUI="UNX"
-TEMP="`mktemp -q -d`"
-PERL="/usr/bin/perl"
-OOINSTBASE="@OOINSTBASE@"
-OOUSERDIR="$HOME/.openoffice.org/3"
-
-export GUI TEMP PERL OOINSTBASE OOUSERDIR
-
-test -d "$OOUSERDIR/user/basic" && cp -r "$OOINSTBASE/basis-link/presets/basic/Standard" "$OOUSERDIR/user/basic"
-
-cd "$OOINSTBASE/basis-link/smoketest"
-$PERL smoketest.pl
-rm -rf $TEMP
-
diff --git a/bin/oostripimpl.pl b/bin/oostripimpl.pl
deleted file mode 100755
index a104976bd..000000000
--- a/bin/oostripimpl.pl
+++ /dev/null
@@ -1,24 +0,0 @@
-#!/usr/bin/env perl
-
-my $libname = shift @ARGV;
-
-$libname || die "Need a library name";
-
-my $Symbols;
-
-open ($Symbols, "objdump -T $libname |") || die "Can't objdump $libname: $!";
-
-print "{\n\tlocal:\n";
-
-while (<$Symbols>) {
- m/(\S+)$/ || next;
-
- my $line = $1;
- $line =~ m/Impl/ || next;
-
- print "\t$line;\n";
-}
-
-close ($Symbols) || die "Can't close: $!";
-
-print "};\n";
diff --git a/bin/ootestapi.in b/bin/ootestapi.in
deleted file mode 100755
index 9b2ec1137..000000000
--- a/bin/ootestapi.in
+++ /dev/null
@@ -1,10 +0,0 @@
-#!/bin/sh
-
-JARS=@OOINSTBASE@/basis-link/program/classes
-LD_LIBRARY_PATH="@OOINSTBASE@/basis-link/program:$LD_LIBRARY_PATH"
-CLASSPATH=@OOINSTBASE@/basis-link/program/classes/OOoRunnerLight.jar:$JARS/ridl
-.jar:$JARS/unoil.jar:$JARS/sandbox.jar:$JARS/jurt.jar:$JARS/juh.jar:%JARS/java_u
-no.jar:@LIBDIRBASE@/basis-link/openoffice/qadevOOo/tests/java
-
-export LD_LIBRARY_PATH
-java -Xmx120m -cp $CLASSPATH org.openoffice.Runner "$@"
diff --git a/bin/ootesttool.in b/bin/ootesttool.in
deleted file mode 100755
index 6ad7a59c7..000000000
--- a/bin/ootesttool.in
+++ /dev/null
@@ -1,5 +0,0 @@
-#!/bin/sh
-
-LD_LIBRARY_PATH="@OOINSTBASE@/basis-link/program:$LD_LIBRARY_PATH"
-
-exec @OOINSTBASE@/basis-link/program/testtool.bin "$@"
diff --git a/bin/ootool.in b/bin/ootool.in
deleted file mode 100755
index 38a756e89..000000000
--- a/bin/ootool.in
+++ /dev/null
@@ -1,152 +0,0 @@
-#!/usr/bin/env perl
-
-# install dir in /
-my $installdir='@OOINSTBASE@';
-# user dir in ~
-my $home_dir='.ooo-2.0';
-# user-dict name
-my $custom_dict='corporate.dic';
-# what type of action to perform
-my $mode;
-# locale - if any
-my $locale = '';
-# whether to install to users's setup
-my $install_user = 0;
-
-
-# debugging
-if ($installdir =~ /^\@/) {
- $installdir = $ENV{OOINSTBASE};
- $installdir = '/usr/lib/ooo-2.1' if (!$installdir);
-}
-
-sub localize_path($)
-{
- my $path = shift;
- if ($locale ne '') {
- $path .= "/$locale";
- }
- return $path;
-}
-
-sub ensure_path($)
-{ # hack for now:
- my $path = shift;
- `mkdir -p $path`;
-}
-
-sub print_help_and_exit()
-{
- print <<"EOF";
-Usage: ootool [OPTION] [MODE] [FILE]...
-Perform file based lock-down tasks on OpenOffice.org:
-Options:
- --user install in the user\'s setup not the system
- --locale=<br-PT> make this install locale-specific: eg. to Brazil
- --help print this help
-Modes:
- --template install template(s)
- --user-dict add words from a (text based) user-dictionary
-EOF
- ;
- exit (0);
-}
-
-my @files = ();
-
-for my $arg (@ARGV) {
-# flags
- if ($arg eq '--user') {
- $install_user = 1;
- } elsif ($arg =~ m/--locale=(.*)/) {
- $locale = $1;
- }
-# modes
- elsif ($arg eq '--template') {
- $mode = 'template';
- } elsif ($arg eq '--user-dict') {
- $mode = 'user-dict';
- }
-# help
- elsif ($arg eq '-h' || $arg eq '--help') {
- print_help_and_exit ();
- }
-# file arguments
- else {
- push @files, $arg;
- }
-}
-
-if (!defined $mode) {
- print "Must use a supported mode\n";
- print_help_and_exit();
-}
-
-@files || print_help_and_exit();
-
-my $dest;
-
-if ($install_user) {
- print STDERR "Warning: hard-coded home path\n";
- $dest = $ENV{HOME} . '/' . $home_dir . "/user";
-} else {
- $dest = $installdir . "/share";
-}
-
-# Template mgmt
-if ($mode eq 'template') {
- $dest .= "/template";
- $dest = localize_path ($dest);
- ensure_path ($dest);
-
- for my $file (@files) {
- `cp $file $dest` && die "Failed to copy $file to $dest: $!";
- }
-}
-elsif ($mode eq 'user-dict') {
- my @keys;
- $dest .= "/wordbook";
- ensure_path ($dest);
- $dest .= "/$custom_dict";
- if ($locale ne '') {
- print "Warning: no localized user-dict support yet";
- }
- my @lines = ();
- my $fh;
- for my $file (@files) {
- if ($file eq '-') {
- $fh = STDIN;
- } else {
- open ($fh, $file) || die "Can't open $file\n";
- }
- print "Reading words from $file\n";
- push @lines, <$fh>;
- close ($fh) if ($file ne '-');
- undef $fh;
- }
- if (!-f $dest) {
- print "Creating $custom_dict\n";
- open ($fh, ">", "$dest") || die "Can't create $dest\n";
- print $fh <<"EOF";
-OOoDICT1
-lang: <none>
-type: positive
----
-EOF
-;
- } else {
- print "Appending to $custom_dict\n";
- open ($fh, ">>", "$dest") || die "Can't append to $dest\n";
- }
-# Of course this may create duplicates but OO.o should
-# elide them on re-writing the file.
- for my $line (@lines) {
- print $fh "$line";
- }
- close ($fh);
-}
-else {
- die "Unknown mode '$mode'";
-}
-
-
diff --git a/bin/ootouch b/bin/ootouch
deleted file mode 100755
index 9a9d2e889..000000000
--- a/bin/ootouch
+++ /dev/null
@@ -1,63 +0,0 @@
-#!/usr/bin/env perl
-
-if (@ARGV < 1) {
- print "ootouch [--files <regex: .*\\.cxx>] <strings to find> [...]\n";
- exit (1);
-}
-
-my @regexps = ();
-my $arg;
-$filter = '';
-while ($arg = shift @ARGV) {
- if ($arg eq '--files') {
- $filter = shift @ARGV;
- } else {
- push (@regexps, $arg);
- }
-}
-
-if (!$filter) {
- $filter = ".*\\.cxx";
-}
-
-sub touch_dir
-{
- my $path = shift;
- my $regexp = shift;
- my $DIR;
- my $name;
-
- opendir ($DIR, $path) || die "Lost $path: $!";
-
- while ($name = readdir ($DIR)) {
- my $full_path = "$path/$name";
-
- ($name eq '.' || $name eq '..') && next;
- -d $full_path && touch_dir ($full_path, $regexp);
-
- $name =~ m/$filter/ || next;
-
- my $file;
- open ($file, $full_path) || die "Can't open $full_path: $!";
- while (<$file>) {
- if (/$regexp/) {
- print "Touch: '$full_path'\n";
- my $now = time;
- utime $now, $now, $full_path;
- last;
- }
- }
- close ($file);
- }
-
- closedir ($DIR);
-}
-
-my $cwd;
-chomp ($cwd = `pwd`);
-
-my $str;
-printf "Touching '$filter' files, ...\n";
-for $str (@regexps) {
- touch_dir ($cwd, $str);
-}
diff --git a/bin/openoffice-xlate-lang b/bin/openoffice-xlate-lang
deleted file mode 100755
index 81b4c9fd8..000000000
--- a/bin/openoffice-xlate-lang
+++ /dev/null
@@ -1,210 +0,0 @@
-#!/usr/bin/env perl
-
-use strict;
-
-my $progname=$0; $progname = $& if $progname =~ m,[^/]+$,;
-
-my %PREFIX; # used to search for prefix numbers
-my %ISOCODE; # used to search for iso codes
-my %LANGUAGE; # used to search for language names
-
-#=======================================================================
-# initialisation code - stuff the DATA into the CODES hash
-#=======================================================================
-sub init {
-
- my $prefix;
- my $code;
- my $name;
-
-
- while (<DATA>)
- {
- next unless /\S/;
- chop;
- ($prefix, $code, $name ) = split(/:/, $_, 3);
- $PREFIX{$prefix} = $prefix;
- $PREFIX{$code} = $prefix;
- $PREFIX{$name} = $prefix;
-
- $ISOCODE{$prefix} = $code;
- $ISOCODE{$code} = $code;
- $ISOCODE{$name} = $code;
-
- $LANGUAGE{$prefix} = $name;
- $LANGUAGE{$code} = $name;
- $LANGUAGE{$name} = $name;
- }
-}
-
-
-#=======================================================================
-# usage - error message
-#=======================================================================
-sub usage {
- my $errmsg = shift;
- my $errcode = shift;
- print STDERR "$progname: $errmsg\n" if $errmsg;
- print STDERR "$progname: Converts between prefix codes, iso codes and langnames\n";
- print STDERR " Usage: $progname (-i|-l|-p|-h) <code>|all\n";
- print STDERR " -i <code>: convert prefix to iso code (ex: 03 -> pt)\n";
- print STDERR " -l <code>: convert iso code to language name (ex: pt -> portuguese)\n";
- print STDERR " -p <code>: convert iso code to prefix (ex: pt -> 03)\n";
- print STDERR " the code can either be an iso code, a prefix or even a language name\n";
- print STDERR " The special code \"all\" asks for all possible values.\n\n";
- print STDERR " -h : print this help\n";
- exit $errcode;
-}
-
-#=======================================================================
-# main -
-#=======================================================================
-init();
-
-my ($LanguageCode, $LanguageMap);
-
-while ($ARGV[0] =~ /^-/) {
- $_ = shift;
- if (m/^-i/) {
- $LanguageMap = \%ISOCODE;
- }
- elsif (m/^-l/) {
- $LanguageMap = \%LANGUAGE;
- }
- elsif (m/^-p/) {
- $LanguageMap = \%PREFIX;
- }
- elsif (m/^-h/) {
- usage("",0);
- }
- else {
- usage ("unknown option $_",1);
- }
-}
-
-usage ("no operation specified on command line",1)
- if (!$LanguageMap);
-
-usage ("no language code specified on command line",1)
- if (!($LanguageCode = shift));
-
-if ($LanguageCode =~ (m/^all$/)) {
- # Asked for all codes
- my $old="";
- foreach my $key (sort values %$LanguageMap) {
- if ($key ne $old) {
- print "$key ";
- $old=$key;
- }
- }
- print "\n";
- exit 0;
-}
-
-usage ("no mapping found for $LanguageCode\n",1)
- if (!(%$LanguageMap->{$LanguageCode}));
-
-print $LanguageMap->{$LanguageCode}, "\n";
-
-1;
-
-# keep third column names here with openoffice-dir/share/*/<long lang name>/
-
-__DATA__
-:be-BY:belarussian
-:bg:bulgarian
-:bn:bengali
-:bs:bosnian
-:en-GB:english_british
-:gu:gujarati
-:hr:croatian
-:km:khmer
-:ku:kurdish
-:pa-IN:punjabi
-:rw:kinarwanda
-:xh:xhosa
-:lt:lithuanian
-:ne:nepali
-:vi:vietnamese
-:ns:northern_sotho
-:ss:swazi
-:sr:serbian
-:ve:venda
-:ts:tsonga
-:st:southern_sotho
-:tn:tswana
-:br:breton
-:ga:gaelic
-:gd:scottish_gaelic
-:th:thai
-:hi:hindi
-:bs-BA:bosnian
-:en-ZA:english_southafrican
-:mk:macedonian
-:as:assamese
-:ml:malayalam
-:mr:marathi
-:or:oriya
-:ur:urdu
-:fa:farsi
-:lv:latvian
-:nr:ndebele
-:ne:nepalese
-:sh:serbian
-:te:telugu
-:ta:tamil
-:tg:tajik
-:ka:georgian
-:eo:esperanto
-:uk:ukrainian
-:kk:kazahk
-:dz:dzongkha
-:kn:kannada
-:gl:galician
-:uz:uzbek
-:oc:occitan
-:ro:romanian
-:eu:basque
-:mn:mongolian
-:om:oromo
-:bo:tibetan
-:ast:asturian
-:is:icelandic
-:ug:uighur
-:si:sinhala
-:id:indonesian
-:my:burmese
-01:en-US:english_american
-03:pt:portuguese
-07:ru:russian
-26:ns:northernsotho
-27:af:afrikaans
-28:zu:zulu
-30:el:greek
-31:nl:dutch
-33:fr:french
-34:es:spanish
-35:fi:finnish
-36:hu:hungarian
-37:ca:catalan
-39:it:italian
-42:cs:czech
-43:sk:slovak
-45:da:danish
-46:sv:swedish
-47:nb:norwegian
-48:pl:polish
-49:de:german
-50:sl:slovenian
-53:cy:welsh
-55:pt-BR:portuguese_brazilian
-77:et:estonian
-79:nn:norwegian_nynorsk
-81:ja:japanese
-82:ko:korean
-86:zh-CN:chinese_simplified
-88:zh-TW:chinese_traditional
-90:tr:turkish
-91:hi:hindi
-96:ar:arabic
-97:he:hebrew
diff --git a/bin/owner b/bin/owner
deleted file mode 100755
index 3fa9761d7..000000000
--- a/bin/owner
+++ /dev/null
@@ -1,63 +0,0 @@
-#!/usr/bin/env perl
-
-my @files;
-my ($help, $plain, $aggregate);
-
-for $a (@ARGV) {
- if ($a eq '--help' || $a eq '/?') {
- $help = 1;
- } elsif ($a eq '--plain' || $a eq '-p') {
- $plain = 1;
- } elsif ($a eq '--aggregate' || $a eq '-a') {
- $aggregate = 1;
- } else {
- push @files, $a;
- }
-}
-
-$help = 1 if !@files;
-
-if ($help) {
- print "owner [--plain(-p)] [--aggregate(-a)] <files...>\n";
- print " --plain don't append \@openoffice.org to addresses\n";
- print " --aggregate uniquify names\n";
- exit 0;
-}
-
-system('cvs --version > /dev/null 2>&1') && die "Can't find cvs tool";
-
-# Synchronous - but so is life.
-sub get_owner($$)
-{
- my ($path, $file) = @_;
- my $owner = '<unknown>';
- my $status = `cd $path ; cvs -z3 status -v $file`;
-
- defined $status || die "couldn't execute cvs";
-
- if ($status =~ m/ADMIN_FILE_OWNER_(\S+)/) {
- $owner = lc($1);
- }
-
- return $owner;
-}
-
-my %owners = ();
-
-for (@files) {
- -f $_ || die "$! : $_";
- m/(.*)\/([^\/]+)/ || die "Odd filename $_";
- my $path = $1;
- my $file = $2;
- my $owner = get_owner ($path, $file);
-
- if ($aggregate) {
- defined $owners{$owner} && next;
- $owners{$owner} = 1;
- } else {
- print "$path/$file : ";
- }
- print "$owner";
- $plain || print '@openoffice.org';
- print "\n";
-}
diff --git a/bin/package-lang-win32 b/bin/package-lang-win32
deleted file mode 100755
index c7e50b346..000000000
--- a/bin/package-lang-win32
+++ /dev/null
@@ -1,160 +0,0 @@
-#!/usr/bin/env perl
-
-#
-# A quick hack - hopefully will all go away shortly.
-#
-
-my $NSisPath = '/cygdrive/c/Program\\ Files/NSIS/makensis.exe';
-
-sub write_nsis_script($$$$)
-{
- my ($fname, $pack_name, $lang, $file_list_name) = @_;
- my $Script;
-
- open $Script, ">$fname" || die "Can't open '$fname': $!";
-
- print $Script <<"EOS";
-
- !include "MUI.nsh"
- Name "OpenOffice.org lang-pack $lang"
- OutFile "$pack_name"
-
- !insertmacro MUI_PAGE_DIRECTORY
- !insertmacro MUI_UNPAGE_CONFIRM
- !insertmacro MUI_UNPAGE_INSTFILES
- !define MUI_ABORTWARNING
- !insertmacro MUI_LANGUAGE "English"
-
- Page instfiles
- Section "InstallFiles" SecDummy
- SetOutPath "\$INSTDIR"
- File /r program
- File /nonfatal /r share
- File /nonfatal /r user
- ExecWait "\$INSTDIR\\program\\setofficelang.exe -f -a $lang"
- WriteRegStr HKCU "Software\\OpenOffice Lang-Pack $lang" "" \$INSTDIR
- CreateDirectory "\$INSTDIR\\$lang"
- WriteUninstaller "\$INSTDIR\\$lang\\Uninstall.exe"
-EOS
- print $Script <<'EOS';
- SectionEnd
- Function .onInit
- ReadRegStr $INSTDIR HKCU "Software\OpenOffice.org\UNO\InstallPath" ""
- Push $INSTDIR
- Call GetParent
- Pop $INSTDIR
- FunctionEnd
-EOS
- print $Script <<"EOS";
- Section "Uninstall"
- Delete "\$INSTDIR\\$lang\\Uninstall.exe"
- RMDir "\$INSTDIR\\$lang"
- DeleteRegKey /ifempty HKCU "Software\\OpenOffice Lang-Pack $lang"
- SectionEnd
-EOS
- print $Script <<'EOS';
- ; input, top of stack (e.g. C:\Program Files\Poop)
- ; output, top of stack (replaces, with e.g. C:\Program Files)
- ; modifies no other variables.
- ;
- ; Usage:
- ; Push "C:\Program Files\Directory\Whatever"
- ; Call GetParent
- ; Pop $R0
- ; ; at this point $R0 will equal "C:\Program Files\Directory"
-
- Function GetParent
-
- Exch $R0
- Push $R1
- Push $R2
- Push $R3
-
- StrCpy $R1 0
- StrLen $R2 $R0
-
- loop:
- IntOp $R1 $R1 + 1
- IntCmp $R1 $R2 get 0 get
- StrCpy $R3 $R0 1 -$R1
-EOS
- print $Script <<"EOS";
- StrCmp \$R3 "\\" get
-EOS
- print $Script <<'EOS';
- Goto loop
-
- get:
- StrCpy $R0 $R0 -$R1
-
- Pop $R3
- Pop $R2
- Pop $R1
- Exch $R0
-
- FunctionEnd
-EOS
- close ($Script);
-}
-
-sub package_one_lang($)
-{
- my $lang = shift;
- my $lang_iso = `$Xlate -i $lang`;
- chomp ($lang_iso);
- $lang_iso =~ s/\r//;
-
- print "Packaging lang $lang_iso ($lang)\n";
-
- $lang_install = $setup_vars{BUILDDIR} . "/lang-install";
- print " removing $lang_install\n";
- `rm -Rf $lang_install`;
- `mkdir -p $lang_install`;
-
- print " unpacking files\n";
- `package-lang --lang=$lang --OOINSTDIR=$lang_install`;
-
- my $lang_pack_name = "LangPack-$lang_iso.exe";
-
- print " writing nsis script\n";
- write_nsis_script("$lang_install/script", $lang_pack_name, $lang_iso,
- $setup_vars{'BUILDDIR'} . '/lang_' . $lang_iso . '_list.txt');
-
- if (!-d "$lang_install/program" ||
- !-d "$lang_install/share" ||
- !-d "$lang_install/user") {
- print "Install failed; lang $lang_iso not supported\n";
- return;
- }
-
- print " invoking nsis\n";
- `cd $lang_install; $NSisPath script`;
- -f "$lang_install/$lang_pack_name" || die "Failed to make $lang_install/$lang_pack_name: $!";
-
- print " storing output\n";
- `mv $lang_install/$lang_pack_name $lang_output`;
-}
-
-print "Sucking env from setup\n";
-open ($Vars, "bash -c '. ./setup ; set'|") || die "Can't find setup: $!";
-while (<$Vars>) {
- /([^=]*)=(.*)/ || next;
- $setup_vars{$1} = $2;
-}
-close ($Vars);
-
-$Xlate = $setup_vars{'TOOLSDIR'}. "/bin/openoffice-xlate-lang";
-$lang_output = $setup_vars{BUILDDIR} . "/lang-packs";
-`mkdir -p $lang_output`;
-
-my @Langs = split (' ', `$Xlate -p all`);
-
-for $a (@ARGV) {
- if ($a =~ m/--lang=(\S+)/) {
- @Langs = ( $1 );
- }
-}
-
-for $lang (@Langs) {
- package_one_lang ($lang);
-}
diff --git a/bin/package-ooo b/bin/package-ooo
deleted file mode 100755
index d973b83f0..000000000
--- a/bin/package-ooo
+++ /dev/null
@@ -1,1036 +0,0 @@
-#!/bin/sh
-
-# Based on docs/setup.txt
-
-#
-# See setup for user tweakables.
-#
-. ./setup
-. $OOBUILDDIR/*.[sS]et.sh
-. ./setup
-
-export LC_ALL='C';
-
-if test "z$PIECE" != "z"; then
- echo "install $PIECE"
- SRCDIR="$OOBUILDDIR/solver/$UPD/$INPATH"
- mkdir -p $OOINSTDIR/solenv
- cp -R $OOBUILDDIR/solenv/* $OOINSTDIR/solenv
-
- DEST=$OOINSTDIR/solver
- mkdir -p $DEST
- cp -R $SRCDIR/* $DEST
-
-# ln -sf $OOBUILDDIR/solver/$UPD/$INPATH .
- if test -f "$TOOLSDIR/bin/piece/install-$PIECE"; then
- echo "$PIECE specific install"
- . $TOOLSDIR/bin/piece/install-$PIECE || exit 1
- fi
-
- find $OOINSTDIR/solenv $OOINSTDIR/solver -depth -name ".svn" -type d -exec rm -rf {} \;
- find $OOINSTDIR/solenv $OOINSTDIR/solver -name "*.orig" -exec rm -rf {} \;
- find $OOINSTDIR/solenv $OOINSTDIR/solver -type f -exec chmod go-w {} \;
-
- exit 0;
-fi
-
-echo "Cleaning $OOINSTDIR";
-rm -Rf $OOINSTDIR;
-set -e
-
-mkdir -p $PREFIX/bin
-
-sed -e "s|@OOINSTBASE@|$OOINSTBASE|g" $TOOLSDIR/bin/ootool.in \
- >| "$OOBUILDDIR/ootool$BINSUFFIX" || exit 1;
-
-sed -e "s|@OOINSTBASE@|$OOINSTBASE|g" -e "s|@LIBDIRBASE@|$LIBDIRBASE|g" $TOOLSDIR/bin/ootestapi.in \
- >| "$OOBUILDDIR/ootestapi$BINSUFFIX" || exit 1;
-
-sed -e "s|@OOINSTBASE@|$OOINSTBASE|g" $TOOLSDIR/bin/ootesttool.in \
- >| "$OOBUILDDIR/ootesttool$BINSUFFIX" || exit 1;
-
-sed -e "s|@OOINSTBASE@|$OOINSTBASE|g" $TOOLSDIR/bin/oosmoketest.in \
- >| "$OOBUILDDIR/oosmoketest$BINSUFFIX" || exit 1;
-
-create_qstart_wrapper()
-{
- cat <<EOT >$1
-#!/bin/sh
-$OOINSTBASE/program/soffice $2 "\$@"
-EOT
- chmod 755 $1
-}
-
-create_unopkg_wrapper()
-{
- cat <<EOT >$1
-#!/bin/sh
-$OOINSTBASE/program/unopkg "\$@"
-EOT
- chmod 755 $1
-}
-
-install_script()
-{
- cp -f $1 $2
- chmod +x $2
-}
-
-# directory for man
-mkdir -p $MANDIR/man1
-
-# startup wrappers
-for app in calc draw impress math web writer base; do
- create_qstart_wrapper "$PREFIX/bin/lo${app}${BINSUFFIX}" "-${app}" || exit 1;
- if test "$COMPAT_OOWRAPPERS" = "YES" ; then
- create_qstart_wrapper "$PREFIX/bin/oo${app}${BINSUFFIX}" "-${app}" || exit 1;
- fi
-done
-create_qstart_wrapper "$PREFIX/bin/lofromtemplate${BINSUFFIX}" ".uno:NewDoc" || exit 1;
-create_qstart_wrapper "$PREFIX/bin/libreoffice${BINSUFFIX}" "" || exit 1;
-create_unopkg_wrapper "$PREFIX/bin/unopkg${BINSUFFIX}" "" || exit 1;
-if test "$COMPAT_OOWRAPPERS" = "YES" ; then
- create_qstart_wrapper "$PREFIX/bin/oofromtemplate${BINSUFFIX}" ".uno:NewDoc" || exit 1;
- create_qstart_wrapper "$PREFIX/bin/ooffice${BINSUFFIX}" "" || exit 1;
-fi
-if test "z$VENDORNAME" = "zNovell" -o "z$VENDORNAME" = "zPLD" \
- -o "z$VENDORNAME" = "zDebian" \
- -o "z$VENDORNAME" = "zMandriva"; then
- for app in calc draw impress math web writer base fromtemplate ; do
- echo ".so man1/libreoffice$BINSUFFIX.1" >| $MANDIR/man1/lo${app}$BINSUFFIX.1;
- if test "$COMPAT_OOWRAPPERS" = "YES" ; then
- echo ".so man1/libreoffice$BINSUFFIX.1" >| $MANDIR/man1/oo${app}$BINSUFFIX.1;
- fi
- done
-fi
-
-# /usr/bin/ooffice symlink is necessary by java UNO components to find
-# the UNO installation using $PATH, see
-# http://udk.openoffice.org/common/man/spec/transparentofficecomponents.html
-# Note: if you want to support parallel installation of more OOo versions
-# you cannot include this link directly into the package
-# For example, the Novell package mark this symlink as %ghost
-# and update it in %post and %postun
-ln -sf $OOINSTBASE/program/soffice$BINSUFFIX $PREFIX/bin/soffice$BINSUFFIX
-
-# no man-page so ...
-if test "z$VENDORNAME" != "zDebian" -a "z$VENDORNAME" != "zMandriva"; then
- install_script $TOOLSDIR/bin/ooconfig $PREFIX/bin/ooconfig
- install_script $OOBUILDDIR/ootool$BINSUFFIX $PREFIX/bin/ootool$BINSUFFIX
-fi
-
-if test "z$VENDORNAME" = "zMandriva"; then
- install_script $TOOLSDIR/bin/ooconfig $PREFIX/bin/ooconfig$BINSUFFIX
- install_script $OOBUILDDIR/ootool$BINSUFFIX $PREFIX/bin/ootool$BINSUFFIX
-fi
-
-# create bash completion
-mkdir -p $OODESTDIR/etc/bash_completion.d
-$TOOLSDIR/bin/generate-bash-completion --binsuffix="$BINSUFFIX" $TOOLSDIR/bin/bash-completion.in $OODESTDIR/etc/bash_completion.d/libreoffice${BINSUFFIX}.sh
-if test "$COMPAT_OOWRAPPERS" = "YES" ; then
- $TOOLSDIR/bin/generate-bash-completion --compat-oowrappers --binsuffix="$BINSUFFIX" $TOOLSDIR/bin/bash-completion.in $OODESTDIR/etc/bash_completion.d/ooffice${BINSUFFIX}.sh
-fi
-
-if test "z$VENDORNAME" != "zRedHat"; then
- mkdir -p $MANDIR/man1
- echo "Generating man pages ...";
- # libreoffice
- man_page_in=$TOOLSDIR/man/libreoffice.1.in
- # use the distro specific man page if available
- if test -f $TOOLSDIR/man/libreoffice.1_${DISTRO%%-*}.in ; then
- man_page_in=$TOOLSDIR/man/libreoffice.1_${DISTRO%%-*}.in
- fi
- sed -e "s|@BINSUFFIX@|$BINSUFFIX|g" $man_page_in \
- >| "$MANDIR/man1/libreoffice$BINSUFFIX.1" || exit 1;
- # unopkg
- sed -e "s|@BINSUFFIX@|$BINSUFFIX|g" $TOOLSDIR/man/unopkg.1.in \
- >| "$MANDIR/man1/unopkg$BINSUFFIX.1" || exit 1;
-fi
-
-mkdir -p $OOINSTDIR/basis$VERSION/program
-
-echo "Building $OOINSTDIR/basis$VERSION/program/java-set-classpath";
-sed -e "s|@OOINSTBASE@|$OOINSTBASE|g" $TOOLSDIR/bin/java-set-classpath.in >| "$OOBUILDDIR/java-set-classpath" || exit 1;
-install_script $OOBUILDDIR/java-set-classpath $OOINSTDIR/basis$VERSION/program/java-set-classpath
-
-if test "z`uname -s`" != "zOpenBSD" && test "`uname -i`" = "i386"; then
- echo "Building $OOINSTDIR/basis$VERSION/program/pyunorc-update64";
- sed -e "s|@OOINSTBASE@|$OOINSTBASE|g" $TOOLSDIR/bin/pyunorc-update64.in >| "$OOBUILDDIR/pyunorc-update64" || exit 1;
- install_script $OOBUILDDIR/pyunorc-update64 $OOINSTDIR/basis$VERSION/program/pyunorc-update64
-fi
-
-echo "Installing extra en-US templates ..."
-mkdir -p $OOINSTDIR/basis$VERSION/share/template/en-US/forms
-cp $TOOLSDIR/templates/resume.ott $OOINSTDIR/basis$VERSION/share/template/en-US/forms
-mkdir -p $OOINSTDIR/basis$VERSION/share/template/en-US/officorr
-cp $TOOLSDIR/templates/project-proposal.ott $OOINSTDIR/basis$VERSION/share/template/en-US/officorr
-
-echo "Installing system files ...";
-case $VENDORNAME in
- RedHat)
- # Install .desktop files for Red Hat distributions
- mkdir -p $PREFIX/share/applications
- for i in openoffice-printeradmin openoffice-setup redhat-drawing \
- redhat-math redhat-presentations redhat-word-processor redhat-spreadsheet; do
- cp -f /usr/share/desktop-menu-patches/$i.desktop $PREFIX/share/applications/$i.desktop
- echo "StartupNotify=true" >> $PREFIX/share/applications/$i.desktop
- done
-
- # Icons are copied into the local install directory from the specfile...
- mkdir -p $PREFIX/share/pixmaps
- cp $TOOLSDIR/desktop/0*.png $PREFIX/share/pixmaps
- cp $TOOLSDIR/desktop/5*.png $PREFIX/share/pixmaps
- cp $TOOLSDIR/desktop/ooo_*.png $PREFIX/share/pixmaps
- ;;
- Debian)
- # Menu icons are currently made in debian/rules
- ;;
- Pardus)
- # Icons and menu stuff is handled in actions.py
- ;;
- *)
- mkdir -p $PREFIX/share/applications
- cd $TOOLSDIR/desktop
- for source in *.desktop ; do
- dest=`echo $source | sed "s|.desktop\$||"`
- dest="$PREFIX/share/applications/$dest$BINSUFFIX.desktop"
- add_version=
- test "z$VENDORNAME" = "zNovell" -a "z$BINSUFFIX" != "z" && add_version=" ($VERSION)" || :
- sed -e "s|\(^Name.*\)\$|\1$add_version|
- s|\(^Comment.*\)\$|\1$add_version|" $source >| "$dest" || exit 1;
- done
-
- # icons
- icondir=$PREFIXBASE/share/icons/hicolor
- for size in 16x16 22x22 24x24 32x32 48x48 scalable ; do
- mkdir -p $OODESTDIR$icondir/$size/apps
- cd $TOOLSDIR/desktop/$size/
- # note that the scalable directory includes .svg icons
- for source in ooo-*.[ps][nv]g ; do
- suffix=`echo $source | sed "s|^.*\(\.[ps][nv]g\)\$|\1|"`
- dest=`echo $source | sed "s|$suffix\$||"`
- dest="$OODESTDIR$icondir/$size/apps/$dest$BINSUFFIX$suffix"
- cp $source "$dest" || exit 1;
- done
- done
-
- # create symlinks below share/pixmaps to keep the backward compatibility
- pixmapsdir=$PREFIX/share/pixmaps
- mkdir -p $pixmapsdir
- cd $OODESTDIR$icondir/48x48/apps
- for icon in ooo-*.png ; do
- ln -sf $icondir/48x48/apps/$icon $pixmapsdir
- done
-
- # shared MIME info
- mkdir -p $PREFIX/share/mime/packages
- cd $TOOLSDIR/desktop
- cp openoffice.xml $PREFIX/share/mime/packages
- if test "z$RUN_POST_INSTALL_SCRIPTS" = "zyes" &&
- which update-mime-database >/dev/null 2>&1 ; then
- update-mime-database /usr/share/mime || :
- fi
-
- # extra MIME type icons
- if test "z$VENDORNAME" = "zNovell" ; then
- icondir=/usr/share/icons/hicolor
- for size in 16x16 22x22 24x24 32x32 48x48 ; do
- mkdir -p $OODESTDIR/$icondir/$size/mimetypes
- # note that the scalable directory includes .svg icons
- cp $TOOLSDIR/desktop/mimetypes/$size/*.[ps][nv]g \
- $OODESTDIR/$icondir/$size/mimetypes/
- done
- fi
-
- if test "z$VENDORNAME" = "zNovell" ; then
- # add GNOME MIME info and the application registry the old way
- # it is necessary for NLD9
- mkdir -p $OODESTDIR/opt/gnome/share/application-registry
- cp $TOOLSDIR/desktop/openoffice.applications \
- $OODESTDIR/opt/gnome/share/application-registry/openoffice$BINSUFFIX.applications || exit 1;
- #
- mkdir -p $OODESTDIR/opt/gnome/share/mime-info
- cp $TOOLSDIR/desktop/openoffice.mime \
- $OODESTDIR/opt/gnome/share/mime-info/openoffice$BINSUFFIX.mime || exit 1;
- cp $TOOLSDIR/desktop/openoffice.keys \
- $OODESTDIR/opt/gnome/share/mime-info/openoffice$BINSUFFIX.keys || exit 1;
- if test "z$BINSUFFIX" != "z" ; then
- cp $TOOLSDIR/desktop/openoffice-extra.keys \
- $OODESTDIR/opt/gnome/share/mime-info/openoffice$BINSUFFIX-extra.keys || exit 1;
- fi
- fi
- ;;
-esac
-
-# Disable odk stuff for now
-if test "disable" = "this"; then
- echo "Installing the ODK";
- ODK_SRC=$OOBUILDDIR/odk$UPD;
- ODK_INCLUDE=$OOINSTDIR/include
- echo " unzip";
- rm -Rf $ODK_SRC
- tar -C $OOBUILDDIR -xzf $OOBUILDDIR/solver/$UPD/$INPATH/bin/odk$UPD.tar.gz;
- echo " setup $OOINSTDIR";
- mkdir -p $ODK_INCLUDE
- mkdir -p $OOINSTDIR
- mkdir -p $OOINSTDIR/utils
- mkdir -p $OOINSTDIR/program
- mkdir -p $OOINSTDIR/idl
- mkdir -p $OOINSTDIR/xml
- mkdir -p $OOINSTDIR/share/doc/openoffice$BINSUFFIX
- mkdir -p $LIBDIRBASE/pkgconfig
- echo " re-arrange files";
- cp -a $ODK_SRC/include/* $ODK_INCLUDE
- cp -a $ODK_SRC/linux/lib/* $OOINSTDIR/program
- cp -a $ODK_SRC/linux/bin/* $OOINSTDIR/utils
- cp -a $ODK_SRC/idl/* $OOINSTDIR/idl
- cp -a $ODK_SRC/docs/* $OOINSTDIR/share/doc/openoffice$BINSUFFIX
- cp -a $ODK_SRC/examples $OOINSTDIR/share/doc/openoffice$BINSUFFIX
- cp -a $ODK_SRC/xml/* $OOINSTDIR/xml
- echo " create pkgconfig file";
- echo "
-libdir=$OOINSTBASE/program
-includedir=$OOINSTBASE/include
-idlinclude=$OOINSTBASE/idl
-xmlinclude=$OOINSTBASE/xml
-toolsdir=$OOINSTBASE/utils
-
-Name: openoffice$BINSUFFIX
-Description: The OpenOffice.org infrastructure
-Version: $VERSION
-Libs: -L\${libdir} -lprot_uno_uno
-Cflags: -I\${includeddir}" > $LIBDIRBASE/pkgconfig/openoffice$BINSUFFIX.pc
-fi
-
-export DISPLAY=''; # clobber;
-echo "Execute ooinstall ...";
-
-cd $TOOLSDIR/bin
-
-./ooinstall $OOINSTDIR || exit 1;
-##cp -ra $OODESTDIR $OODESTDIR.1 || exit 1
-##rm -r $OODESTDIR
-##cp -ra $OODESTDIR.1 $OODESTDIR || exit 1
-echo "Cleaning up ...";
-
-# No idea what these files are good for (?)
-# they don't appear in the RPM file lists.
-rm -Rf $OOINSTDIR/share/uno_packages/cache/*
-
-# FIXME: we need to packagethe extensions some way
-if test -n "$OODESTDIR" ; then
- rm -f $OOINSTDIR/share/extension/install/scsolver.uno.oxt
-fi
-
-echo "Done";
-
-remove_help_localization()
-{
- lang=$1
-
- # nothing to be done if the localization is en-US if it does not exist
- # or if it is already removed
- test "$lang" = "en-US" -o \
- ! -e $OOINSTDIR/help/$lang -o \
- -L $OOINSTDIR/help/$lang && return;
-
- echo "... remove \"$lang\""
-
- rm -rf $OOINSTDIR/help/$lang
- grep -v "$OOINSTBASE/help/$lang" $OODESTDIR/gid_Module_Root.$lang >$OODESTDIR/gid_Module_Root.$lang.new
- mv -f $OODESTDIR/gid_Module_Root.$lang.new $OODESTDIR/gid_Module_Root.$lang
- # FIXME: the following code could be used without the condition
- # and should replace the lines above after only the milestones
- # providing gid_Module_Helppack_Help and fixed gid_Module_Root.$lang
- # are supported
- # Note: The problem with gid_Module_Root.$lang is that it still includes
- # %dir */help/* entries.
- # Note: It was still necessary on ppc with gcj (OOo-2.0.2). Strange. Have to
- # investigate it later.
- if test -f $OODESTDIR/gid_Module_Helppack_Help.$lang ; then
- grep -v "$OOINSTBASE/help/$lang" $OODESTDIR/gid_Module_Helppack_Help.$lang >$OODESTDIR/gid_Module_Helppack_Help.$lang.new
- mv -f $OODESTDIR/gid_Module_Helppack_Help.$lang.new $OODESTDIR/gid_Module_Helppack_Help.$lang
- fi
-
- # Note: We created a compat symlink in the past. It is no longer necessary.
- # We do not want it because RPM has problems with update when we remove
- # poor localizations in never packages
-}
-
-# Check if the English help is installed and is in the main package (is first on the list)
-# Note that Java-disabled builds do not create help at all.
-if test -f $OOINSTDIR/help/en/sbasic.cfg -a \
- "`for lang in $OOO_LANGS_LIST ; do echo $lang ; break ; done`" = "en-US" ; then
-
- echo "Removing duplicated English help..."
-
- for lang in $OOO_LANGS_LIST ; do
- test ! -f $OOINSTDIR/help/en/sbasic.cfg -o ! -f $OOINSTDIR/help/$lang/sbasic.cfg && continue;
- if diff $OOINSTDIR/help/en/sbasic.cfg $OOINSTDIR/help/$lang/sbasic.cfg >/dev/null 2>&1 ; then
- remove_help_localization $lang
- fi
- done
-
- echo "Removing poor help localizations..."
-
- for lang in $OOO_POOR_HELP_LOCALIZATIONS ; do
- remove_help_localization $lang
- done
-fi
-
-./install-sdk || exit 1;
-
-if test "$DISTRO" = "SUSE" || echo "$DISTRO" | grep -q "SUSE-11" ; then
- # branding stuff for openSUSE >= 11.0 and SLED >= 11
- mkdir -p $DATADIR/$OOOINSTALLDIRNAME/program
- echo "%dir $DATADIRBASE/$OOOINSTALLDIRNAME" >$BUILDDIR/upstream_branding_list.txt
- echo "%dir $DATADIRBASE/$OOOINSTALLDIRNAME/program" >>$BUILDDIR/upstream_branding_list.txt
- branding_stuff="about.png intro.png sofficerc"
- for file in $branding_stuff ; do
- mv $OOINSTDIR/program/$file $DATADIR/$OOOINSTALLDIRNAME/program/$file
- ln -sf $DATADIRBASE/$OOOINSTALLDIRNAME/program/$file $OOINSTDIR/program/$file
- echo "$DATADIRBASE/$OOOINSTALLDIRNAME/program/$file" >>$BUILDDIR/upstream_branding_list.txt
- done
-fi
-
-if echo "$DISTRO" | grep -q "GoOo"; then
- for file in about intro ; do
- cp $TOOLSDIR/src/open${file}_go-oo.bmp \
- $OOINSTDIR/program/${file}.bmp
- done
-fi
-
-# remove installed file even from the file list
-# Params: file_list file_to_remove
-remove_file()
-{
- rm -f "$OODESTDIR/$2"
- perl -pi -e "s|^$2$||" "$1"
-}
-
-# move one file from one list of files to a second one
-# Params: target_file_list source_file_list file_to_move
-mv_file_between_flists()
-{
- if grep "^$3\$" $2 >/dev/null 2>&1 ; then
- # \$3 can be regular expression
- grep "^$3\$" $2 >>$1
- perl -pi -e "s|^$3$||" $2
- fi
-}
-# add the directories from the source list of files to the target list of
-# file which are used in the target list of files but are missing there
-# Params: target_file_list source_file_list
-add_used_directories()
-{
- sort -u -r $2 | sed -n "s|^%dir \(.*\)\$|s%^\\\\(\1\\\\).*%\\\\1%p|p" >$2.pattern
- sed -n -f $2.pattern $1 | sort -u | sed "s|^|%dir |" >>$1
- rm $2.pattern
- sort -u $1 >$1.unique
- mv $1.unique $1
-}
-
-# remove a duplicity between two filelist
-# Params: filelist_with_original filelist_with_duplicity duplicit_path
-remove_duplicity_from_flists()
-{
- if grep "$3" "$1" >/dev/null 2>&1 && \
- grep "$3" "$2" >/dev/null 2>&1 ; then
- perl -pi -e "s|^$3$||" $2
- fi
-}
-
-# merges one file list into another one
-# Params: source_filelist dest_filelist replace_dest
-merge_flists()
-{
- if test -f "$1" ; then
- cat "$1" >>"$2"
- sort -u "$2" >"$2".sorted
- mv "$2".sorted "$2"
- fi
-}
-
-if ! test -f $OODESTDIR/gid_Module_Root; then
- echo "Failed to generate package file lists";
- exit 1;
-fi
-
-cd $OODESTDIR
-
-if test -f gid_Module_Root_Files_2; then
- GID_MODULE_ROOT_FILES_LISTS="gid_Module_Root_Files_[0-9]"
-else
- GID_MODULE_ROOT_FILES_LISTS=""
-fi
-
-# remove .orig files created by patching l10n extras
-if test -f gid_Module_Root_Files_6 ; then
- remove_file gid_Module_Root_Files_6 "$OOINSTBASE/basis$VERSION/presets/config/standard.soc.orig"
-fi
-
-if test "z$VENDORNAME" != "zDebian" ; then
- echo "Moving package file lists..."
-
- # Nasty hacks for now...
- echo "%dir $OOINSTBASE/basis$VERSION/share/template/en-US/forms
- $OOINSTBASE/basis$VERSION/share/template/en-US/forms/resume.ott
- %dir $OOINSTBASE/basis$VERSION/share/template/en-US/officorr
- $OOINSTBASE/basis$VERSION/share/template/en-US/officorr/project-proposal.ott" >> gid_Module_Langpack_Basis_en_US
-
- echo "%dir $OOINSTBASE
- $OOINSTBASE/basis$VERSION/program/java-set-classpath" > gid_Module_Root_Hack
-
-
- test -f $OOINSTDIR/basis$VERSION/program/pyunorc-update64 && \
- echo $OOINSTBASE/basis$VERSION/program/pyunorc-update64 >> gid_Module_Pyuno_Hack
-
- rm -f common_list.txt
- for module in gid_Module_Root gid_Module_Root_Brand \
- gid_Module_Root_Files_Images \
- gid_Module_Root_Files_[0-9] \
- gid_Module_Root_Hack \
- gid_Module_Oo_Linguistic \
- gid_Module_Root_Ure_Hidden \
- $BUILDDIR/dictionaries ; do
- merge_flists $module $BUILDDIR/common_list.txt
- done
-
- if test "$SPLIT_APP_MODULES" = "YES" ; then
- rm -f $BUILDDIR/base_list.txt $BUILDDIR/calc_list.txt \
- $BUILDDIR/draw_list.txt $BUILDDIR/math_list.txt \
- $BUILDDIR/impress_list.txt $BUILDDIR/writer_list.txt
- merge_flists gid_Module_Prg_Base_Bin $BUILDDIR/base_list.txt
- merge_flists gid_Module_Prg_Calc_Bin $BUILDDIR/calc_list.txt
- merge_flists gid_Module_Prg_Draw_Bin $BUILDDIR/draw_list.txt
- merge_flists gid_Module_Prg_Math_Bin $BUILDDIR/math_list.txt
- merge_flists gid_Module_Prg_Impress_Bin $BUILDDIR/impress_list.txt
- merge_flists gid_Module_Prg_Wrt_Bin $BUILDDIR/writer_list.txt
- merge_flists gid_Module_Brand_Prg_Base $BUILDDIR/base_list.txt
- merge_flists gid_Module_Brand_Prg_Calc $BUILDDIR/calc_list.txt
- merge_flists gid_Module_Brand_Prg_Draw $BUILDDIR/draw_list.txt
- merge_flists gid_Module_Brand_Prg_Math $BUILDDIR/math_list.txt
- merge_flists gid_Module_Brand_Prg_Impress $BUILDDIR/impress_list.txt
- merge_flists gid_Module_Brand_Prg_Wrt $BUILDDIR/writer_list.txt
- # FIXME: small; low dependencies; why optional module?
- merge_flists gid_Module_Optional_OGLTrans $BUILDDIR/impress_list.txt
- # FIXME: shold be fixed in scp2
- mv_file_between_flists $BUILDDIR/calc_list.txt $BUILDDIR/common_list.txt $OOINSTBASE/program/libvbaobj.*\.uno.so
- else
- merge_flists gid_Module_Prg_Base_Bin $BUILDDIR/common_list.txt
- merge_flists gid_Module_Prg_Calc_Bin $BUILDDIR/common_list.txt
- merge_flists gid_Module_Prg_Draw_Bin $BUILDDIR/common_list.txt
- merge_flists gid_Module_Prg_Math_Bin $BUILDDIR/common_list.txt
- merge_flists gid_Module_Prg_Impress_Bin $BUILDDIR/common_list.txt
- merge_flists gid_Module_Prg_Wrt_Bin $BUILDDIR/common_list.txt
- merge_flists gid_Module_Brand_Prg_Base $BUILDDIR/common_list.txt
- merge_flists gid_Module_Brand_Prg_Calc $BUILDDIR/common_list.txt
- merge_flists gid_Module_Brand_Prg_Draw $BUILDDIR/common_list.txt
- merge_flists gid_Module_Brand_Prg_Math $BUILDDIR/common_list.txt
- merge_flists gid_Module_Brand_Prg_Impress $BUILDDIR/common_list.txt
- merge_flists gid_Module_Brand_Prg_Wrt $BUILDDIR/common_list.txt
- # FIXME: small; low dependencies; why optional module?
- merge_flists gid_Module_Optional_OGLTrans $BUILDDIR/common_list.txt
- fi
-
- if test "$SPLIT_OPT_FEATURES" = "YES" ; then
- rm -f $BUILDDIR/filters_list.txt $BUILDDIR/mailmerge_list.txt \
- $BUILDDIR/pyuno_list.txt $BUILDDIR/testtool_list.txt
- if test "z$VENDORNAME" = "zMandriva" ; then
- rm -f $BUILDDIR/filter-binfilter_list.txt
- merge_flists gid_Module_Optional_Binfilter $BUILDDIR/filter-binfilter_list.txt
- merge_flists gid_Module_Langpack_Binfilter_en_US
- merge_flists gid_Module_Optional_Grfflt $BUILDDIR/draw_list.txt
- merge_flists gid_Module_Optional_Headless $BUILDDIR/common_list.txt
- merge_flists gid_Module_Optional_Javafilter $BUILDDIR/common_list.txt
- merge_flists gid_Module_Optional_Pymailmerge $BUILDDIR/pyuno_list.txt
- merge_flists gid_Module_Optional_Pyuno $BUILDDIR/pyuno_list.txt
- merge_flists gid_Module_Optional_Testtool $BUILDDIR/testtool_list.txt
- merge_flists gid_Module_Optional_Xsltfiltersamples $BUILDDIR/common_list.txt
- # pyuno hack for x86_64
- merge_flists gid_Module_Pyuno_Hack $BUILDDIR/pyuno_list.txt
- else
- merge_flists gid_Module_Optional_Binfilter $BUILDDIR/filters_list.txt
- merge_flists gid_Module_Optional_Grfflt $BUILDDIR/common_list.txt
- merge_flists gid_Module_Optional_Headless $BUILDDIR/common_list.txt
- merge_flists gid_Module_Optional_Javafilter $BUILDDIR/filters_list.txt
- merge_flists gid_Module_Optional_Pymailmerge $BUILDDIR/mailmerge_list.txt
- merge_flists gid_Module_Optional_Pyuno $BUILDDIR/pyuno_list.txt
- merge_flists gid_Module_Optional_Testtool $BUILDDIR/testtool_list.txt
- merge_flists gid_Module_Optional_Xsltfiltersamples $BUILDDIR/filters_list.txt
- # pyuno hack for x86_64
- merge_flists gid_Module_Pyuno_Hack $BUILDDIR/pyuno_list.txt
- fi
- else
- merge_flists gid_Module_Optional_Binfilter $BUILDDIR/common_list.txt
- merge_flists gid_Module_Langpack_Binfilter $BUILDDIR/common_list.txt
- merge_flists gid_Module_Optional_Grfflt $BUILDDIR/common_list.txt
- merge_flists gid_Module_Optional_Headless $BUILDDIR/common_list.txt
- merge_flists gid_Module_Optional_Javafilter $BUILDDIR/common_list.txt
- merge_flists gid_Module_Optional_Pymailmerge $BUILDDIR/common_list.txt
- merge_flists gid_Module_Optional_Pyuno $BUILDDIR/common_list.txt
- merge_flists gid_Module_Optional_Testtool $BUILDDIR/common_list.txt
- merge_flists gid_Module_Optional_Xsltfiltersamples $BUILDDIR/common_list.txt
- # pyuno hack for x86_64
- merge_flists gid_Module_Pyuno_Hack $BUILDDIR/common_list.txt
- fi
-
- if test "$VENDORNAME" = "Novell" ; then
- cat $BUILDDIR/novell-gallery-addon >> $BUILDDIR/common_list.txt
- fi
-
- for lang in `echo $OOO_LANGS_LIST | sed -e s/-/_/g`; do
- lang_lists=
- if test "$VENDORNAME" = "Mandriva" -o \( "$VENDORNAME" = "Novell" -a "$SPLIT_APP_MODULES" = "YES" \) ; then
- test -f gid_Module_Langpack_Basis_$lang && lang_lists="$lang_lists gid_Module_Langpack_Basis_$lang" || :
- test -f gid_Module_Langpack_Brand_$lang && lang_lists="$lang_lists gid_Module_Langpack_Brand_$lang" || :
- test -f gid_Module_Langpack_Resource_$lang && lang_lists="$lang_lists gid_Module_Langpack_Resource_$lang" || :
- test -f gid_Module_Langpack_Impress_$lang && lang_lists="$lang_lists gid_Module_Langpack_Impress_$lang" || :
- test -f gid_Module_Langpack_Draw_$lang && lang_lists="$lang_lists gid_Module_Langpack_Draw_$lang" || :
- test -f gid_Module_Langpack_Math_$lang && lang_lists="$lang_lists gid_Module_Langpack_Math_$lang" || :
- test -f gid_Module_Langpack_Calc_$lang && lang_lists="$lang_lists gid_Module_Langpack_Calc_$lang" || :
- test -f gid_Module_Langpack_Base_$lang && lang_lists="$lang_lists gid_Module_Langpack_Base_$lang" || :
- test -f gid_Module_Langpack_Writer_$lang && lang_lists="$lang_lists gid_Module_Langpack_Writer_$lang" || :
- test -f gid_Module_Langpack_Binfilter_$lang && lang_lists="$lang_lists gid_Module_Langpack_Binfilter_$lang" || :
- # Place helps on dedicated packages.
- test -f gid_Module_Helppack_Help_$lang && sort -u gid_Module_Helppack_Help_$lang > $BUILDDIR/help_${lang}_list.txt || :
- else
- test -f gid_Module_Langpack_Basis_$lang && lang_lists="$lang_lists gid_Module_Langpack_Basis_$lang" || :
- test -f gid_Module_Langpack_Brand_$lang && lang_lists="$lang_lists gid_Module_Langpack_Brand_$lang" || :
- test -f gid_Module_Langpack_Resource_$lang && lang_lists="$lang_lists gid_Module_Langpack_Resource_$lang" || :
- test -f gid_Module_Langpack_Impress_$lang && lang_lists="$lang_lists gid_Module_Langpack_Impress_$lang" || :
- test -f gid_Module_Langpack_Draw_$lang && lang_lists="$lang_lists gid_Module_Langpack_Draw_$lang" || :
- test -f gid_Module_Langpack_Math_$lang && lang_lists="$lang_lists gid_Module_Langpack_Math_$lang" || :
- test -f gid_Module_Langpack_Calc_$lang && lang_lists="$lang_lists gid_Module_Langpack_Calc_$lang" || :
- test -f gid_Module_Langpack_Base_$lang && lang_lists="$lang_lists gid_Module_Langpack_Base_$lang" || :
- test -f gid_Module_Langpack_Writer_$lang && lang_lists="$lang_lists gid_Module_Langpack_Writer_$lang" || :
- test -f gid_Module_Langpack_Binfilter_$lang && lang_lists="$lang_lists gid_Module_Langpack_Binfilter_$lang" || :
- test -f gid_Module_Helppack_Help_$lang && lang_lists="$lang_lists gid_Module_Helppack_Help_$lang" || :
- fi
- if test -n "$lang_lists" ; then
- # all files are installed below $OOINSTBASE/basis; we want to own also $OOINSTBASE
- echo "%dir $OOINSTBASE" >$BUILDDIR/lang_${lang}_list.txt
- cat $lang_lists | sort -u >>$BUILDDIR/lang_${lang}_list.txt
- fi
- # some help files are in _Langpack_{Writer,Impress,...}_<lang>
- # move them from -l10n to -help
- if test "$VENDORNAME" = "Mandriva" -o \( "$VENDORNAME" = "Novell" -a "$SPLIT_APP_MODULES" = "YES" \) ; then
- for lang in `echo $OOO_LANGS_LIST | sed -e s/-/_/g`; do
- test -f $BUILDDIR/help_${lang}_list.txt || continue;
- mv_file_between_flists $BUILDDIR/help_${lang}_list.txt $BUILDDIR/lang_${lang}_list.txt $OOINSTBASE/basis$VERSION/help/.*
- add_used_directories $BUILDDIR/help_${lang}_list.txt $BUILDDIR/lang_${lang}_list.txt
- done
- fi
- done
-
- if test -f $BUILDDIR/lang_en_US_list.txt -a "$VENDORNAME" = "Novell" -a "$SPLIT_APP_MODULES" != "YES" ; then
- cat $BUILDDIR/lang_en_US_list.txt >>$BUILDDIR/common_list.txt
- rm $BUILDDIR/lang_en_US_list.txt
- fi
-
- if test -f gid_Module_Root_SDK ; then
- cp gid_Module_Root_SDK $BUILDDIR/sdk_list.txt
- fi
-
- cd $BUILDDIR
-
- # kde subpackage
- rm -f kde_list.txt
- test -f $OODESTDIR/gid_Module_Optional_Kde && cp $OODESTDIR/gid_Module_Optional_Kde kde_list.txt || :
- mv_file_between_flists kde_list.txt common_list.txt $OOINSTBASE/program/kdefilepicker
- mv_file_between_flists kde_list.txt common_list.txt $OOINSTBASE/basis$VERSION/program/fps_kde.uno.so
- mv_file_between_flists kde_list.txt common_list.txt $OOINSTBASE/basis$VERSION/program/libvclplug_kdel..so
- mv_file_between_flists kde_list.txt common_list.txt $OOINSTBASE/basis$VERSION/program/libkabdrv1.so
- add_used_directories kde_list.txt common_list.txt
-
- # create kde4 subpackage
- rm -f kde4_list.txt
- mv_file_between_flists kde4_list.txt kde_list.txt $OOINSTBASE/basis$VERSION/program/kde4be1.uno.so
- mv_file_between_flists kde4_list.txt common_list.txt $OOINSTBASE/basis$VERSION/program/libvclplug_kde4l..so
- mv_file_between_flists kde4_list.txt common_list.txt $OOINSTBASE/basis$VERSION/program/fps_kde4.uno.so
- add_used_directories kde4_list.txt common_list.txt
-
- # NLD subpackage
- rm -f nld_list.txt
- mv_file_between_flists nld_list.txt common_list.txt $OOINSTBASE/program/openintro_nld.bmp
- mv_file_between_flists nld_list.txt common_list.txt $OOINSTBASE/program/openabout_nld.bmp
- add_used_directories nld_list.txt common_list.txt
-
- # mono subpackage
- rm -f mono_list.txt
- mv_file_between_flists mono_list.txt common_list.txt $OOINSTBASE/basis$VERSION/program/cli_.*.dll
- mv_file_between_flists mono_list.txt common_list.txt $OOINSTBASE/basis$VERSION/program/cli_.*.dll.config
- mv_file_between_flists mono_list.txt common_list.txt $OOINSTBASE/basis$VERSION/program/policy.*.cli_.*.dll
- mv_file_between_flists mono_list.txt common_list.txt $OOINSTBASE/ure/lib/cli_.*.dll
- mv_file_between_flists mono_list.txt common_list.txt $OOINSTBASE/ure/lib/cli_.*.dll.config
- mv_file_between_flists mono_list.txt common_list.txt $OOINSTBASE/ure/lib/policy.*.cli_.*.dll
- mv_file_between_flists mono_list.txt common_list.txt $OOINSTBASE/ure/lib/libcli_.*.so
- add_used_directories mono_list.txt common_list.txt
- # add the files from GAC if it was installed
- test -f mono_gac && cat mono_gac >>mono_list.txt
-
- # mailmerge
- if test "$SPLIT_OPT_FEATURES" = "YES" ; then
- if test "z$VENDORNAME" = "zMandriva" ; then
- flist=pyuno_list.txt
- else
- flist=mailmerge_list.txt
- rm -f $flist
- fi
- mv_file_between_flists $flist common_list.txt $OOINSTBASE/basis$VERSION/program/mailmerge.py
- add_used_directories $flist common_list.txt
- fi
-
- if test "z$VENDORNAME" = "zNovell" ; then
- # officebean subpackage
- rm -f officebean_list.txt
- mv_file_between_flists officebean_list.txt common_list.txt $OOINSTBASE/basis$VERSION/program/classes/officebean.jar
- mv_file_between_flists officebean_list.txt common_list.txt $OOINSTBASE/basis$VERSION/program/libofficebean.so
- add_used_directories officebean_list.txt common_list.txt
- fi
-
- if test -f sdk_list.txt ; then
- rm -f sdk_doc_list.txt
- # in this case we move all entries including directories
- mv_file_between_flists sdk_doc_list.txt sdk_list.txt "%dir $DOCDIRBASE/sdk/docs.*"
- mv_file_between_flists sdk_doc_list.txt sdk_list.txt "$DOCDIRBASE/sdk/docs.*"
- mv_file_between_flists sdk_doc_list.txt sdk_list.txt "$DOCDIRBASE/sdk/examples"
- mv_file_between_flists sdk_doc_list.txt sdk_list.txt "$DOCDIRBASE/sdk/index.html"
- mv_file_between_flists sdk_doc_list.txt sdk_list.txt "%dir $OOINSTBASE/basis$VERSION/sdk/examples.*"
- mv_file_between_flists sdk_doc_list.txt sdk_list.txt "$OOINSTBASE/basis$VERSION/sdk/docs"
- mv_file_between_flists sdk_doc_list.txt sdk_list.txt "$OOINSTBASE/basis$VERSION/sdk/examples.*"
- mv_file_between_flists sdk_doc_list.txt sdk_list.txt "$OOINSTBASE/basis$VERSION/sdk/index.html"
- add_used_directories sdk_doc_list.txt sdk_list.txt
- fi
-
- if test "$VENDORNAME" = "Novell" -a "$SPLIT_APP_MODULES" = "YES" ; then
- # move the prebuilt icons into a hacky temporary package
- # we want to repack them into a noarch package as soon as possible
- # without the build dependency on the huge devel package
- rm -f icon_themes_prebuilt.txt
- mv_file_between_flists icon_themes_prebuilt.txt common_list.txt $OOINSTBASE/basis$VERSION/share/config/images_classic8.zip
- mv_file_between_flists icon_themes_prebuilt.txt common_list.txt $OOINSTBASE/basis$VERSION/share/config/images_crystal.zip
- mv_file_between_flists icon_themes_prebuilt.txt common_list.txt $OOINSTBASE/basis$VERSION/share/config/images_hicontrast.zip
- mv_file_between_flists icon_themes_prebuilt.txt common_list.txt $OOINSTBASE/basis$VERSION/share/config/images_industrial.zip
- mv_file_between_flists icon_themes_prebuilt.txt common_list.txt $OOINSTBASE/basis$VERSION/share/config/images_tango.zip
- mv_file_between_flists icon_themes_prebuilt.txt common_list.txt $OOINSTBASE/basis$VERSION/share/config/images.zip
- fi
-
- # Mandriva packaging
- if test "$VENDORNAME" = "Mandriva"; then
- # Not used
- remove_file common_list.txt $OOINSTBASE/share/gallery/htmltheme.orig
- remove_file common_list.txt $OOINSTBASE/share/dict/ooo/dictionary.lst
-
- # And these are in -draw package
- mv_file_between_flists draw_list.txt common_list.txt $OOINSTBASE/basis$VERSION/share/registry/modules/org/openoffice/TypeDetection/Filter/fcfg_drawgraphics_filters.xcu
- mv_file_between_flists draw_list.txt common_list.txt $OOINSTBASE/basis$VERSION/share/registry/modules/org/openoffice/TypeDetection/Filter/fcfg_drawgraphics_types.xcu
-
- # And these are in -impress package
- mv_file_between_flists impress_list.txt common_list.txt $OOINSTBASE/basis$VERSION/share/registry/modules/org/openoffice/TypeDetection/Filter/fcfg_impressgraphics_filters.xcu
- mv_file_between_flists impress_list.txt common_list.txt $OOINSTBASE/basis$VERSION/share/registry/modules/org/openoffice/TypeDetection/Types/fcfg_impressgraphics_types.xcu
-
- # Split out the gallery
- rm -f gallery_list.txt
- mv_file_between_flists gallery_list.txt common_list.txt "$OOINSTBASE/basis$VERSION/share/gallery.*"
- test -r galleries.txt && cat galleries.txt >> gallery_list.txt
-
- # Split out dtd-officedocument1.0
- rm -f dtd_list.txt
- mv_file_between_flists dtd_list.txt common_list.txt "$OOINSTBASE/share/dtd/officedocument.*"
-
- # Split out java stuff
- rm -f java_common_list.txt
- mv_file_between_flists java_common_list.txt common_list.txt $OOINSTBASE/basis$VERSION/program/JREProperties.class
- mv_file_between_flists java_common_list.txt common_list.txt "$OOINSTBASE/basis$VERSION/program/classes.*"
- mv_file_between_flists java_common_list.txt common_list.txt $OOINSTBASE/basis$VERSION/program/libofficebean.so
- mv_file_between_flists java_common_list.txt common_list.txt "$OOINSTBASE/basis$VERSION/share/Scripts/java.*"
- mv_file_between_flists java_common_list.txt filter-binfilter_list.txt $OOINSTBASE/basis$VERSION/program/classes/aportisdoc.jar
- mv_file_between_flists java_common_list.txt filter-binfilter_list.txt $OOINSTBASE/basis$VERSION/program/classes/pocketword.jar
- mv_file_between_flists java_common_list.txt filter-binfilter_list.txt $OOINSTBASE/basis$VERSION/program/classes/pexcel.jar
- mv_file_between_flists java_common_list.txt writer_list.txt $OOINSTBASE/basis$VERSION/program/classes/writer2latex.jar
-
- # Move arch-dependent/dup files from common to core
- rm -f core_list.txt
- for f in \
- ".*\.so" \
- ".*\.so\..*" \
- "program/.*\.rdb" \
- program/configimport.bin \
- program/javaldx \
- program/msfontextract \
- program/nsplugin \
- program/oosplash.bin \
- program/pagein \
- program/pagein-calc \
- program/pagein-common \
- program/pagein-draw \
- program/pagein-impress \
- program/pagein-writer \
- program/pkgchk.bin \
- program/pluginapp.bin \
- program/setofficelang.bin \
- program/soffice.bin \
- program/spadmin.bin \
- program/uno.bin \
- program/unopkg.bin \
- program/uri-encode
- do
- mv_file_between_flists core_list.txt common_list.txt "$OOINSTBASE/basis$VERSION/$f"
- done
-
- # themes are included in other packages
- # don't use remove_file as we don't want them removed from the buildroot.
- mv_file_between_flists /dev/null common_list.txt $OOINSTBASE/basis$VERSION/share/config/images_crystal.zip
- mv_file_between_flists /dev/null common_list.txt $OOINSTBASE/basis$VERSION/share/config/images_hicontrast.zip
- mv_file_between_flists /dev/null common_list.txt $OOINSTBASE/basis$VERSION/share/config/images_industrial.zip
- mv_file_between_flists /dev/null common_list.txt $OOINSTBASE/basis$VERSION/share/config/images_tango.zip
- mv_file_between_flists /dev/null common_list.txt $OOINSTBASE/basis$VERSION/share/config/images.zip
- fi
-
- # remove known duplicities to do not have files packaged in two packages
- # the Bulgarian fixes can be removed after the issue #54110 is fixed
- remove_duplicity_from_flists common_list.txt lang_bg_list.txt $OOINSTBASE/basis$VERSION/presets/config/arrowhd.soe
- remove_duplicity_from_flists common_list.txt lang_bg_list.txt $OOINSTBASE/basis$VERSION/presets/config/classic.sog
- remove_duplicity_from_flists common_list.txt lang_bg_list.txt $OOINSTBASE/basis$VERSION/presets/config/hatching.soh
- remove_duplicity_from_flists common_list.txt lang_bg_list.txt $OOINSTBASE/basis$VERSION/presets/config/modern.sog
- remove_duplicity_from_flists common_list.txt lang_bg_list.txt $OOINSTBASE/basis$VERSION/presets/config/palette.soc
- remove_duplicity_from_flists common_list.txt lang_bg_list.txt $OOINSTBASE/basis$VERSION/presets/config/styles.sod
- # the British fixes can be removed after the issue #54113 is fixed
- remove_duplicity_from_flists common_list.txt lang_en-GB_list.txt $OOINSTBASE/basis$VERSION/presets/config/standard.sog
-else
- echo "Creating package directories..."
-
- test -d pkg && rm -r pkg || :
-
- # Create package tree (needed by Debian's dpkg)
- # create_package_directory <list_file> <directory_name>
- create_package_directory()
- {
- listfile=$1
- directory="$2"
- perl -nl \
- -e " if(/^%dir (.*)/)
- {system('mkdir', '-p', '-m', '755', \"$directory\".\$1);}
- else
- {rename('./'.\$_, \"$directory\".\$_);}
- " \
- $listfile
- }
-
- create_package_directory gid_Module_Root_Ure_Hidden pkg/ure
-
- create_package_directory gid_Module_Root pkg/libreoffice-common
- create_package_directory gid_Module_Root_Brand pkg/libreoffice-common
- create_package_directory gid_Module_Root_Files_Images pkg/libreoffice-common
- create_package_directory gid_Module_Oo_Linguistic pkg/libreoffice-common
- create_package_directory gid_Module_Optional_Xsltfiltersamples pkg/libreoffice-common
- create_package_directory gid_Module_Optional_Javafilter pkg/libreoffice-common
- if [ -f gid_Module_Optional_Binfilter ]; then
- create_package_directory gid_Module_Optional_Binfilter pkg/libreoffice-filter-binfilter
- fi
- create_package_directory gid_Module_Optional_Grfflt pkg/libreoffice-draw
- create_package_directory gid_Module_Prg_Calc_Bin pkg/libreoffice-calc
- create_package_directory gid_Module_Prg_Math_Bin pkg/libreoffice-math
- create_package_directory gid_Module_Prg_Draw_Bin pkg/libreoffice-draw
- create_package_directory gid_Module_Prg_Wrt_Bin pkg/libreoffice-writer
- create_package_directory gid_Module_Prg_Impress_Bin pkg/libreoffice-impress
- create_package_directory gid_Module_Prg_Base_Bin pkg/libreoffice-base
- create_package_directory gid_Module_Brand_Prg_Calc pkg/libreoffice-calc
- create_package_directory gid_Module_Brand_Prg_Math pkg/libreoffice-math
- create_package_directory gid_Module_Brand_Prg_Draw pkg/libreoffice-draw
- create_package_directory gid_Module_Brand_Prg_Wrt pkg/libreoffice-writer
- create_package_directory gid_Module_Brand_Prg_Impress pkg/libreoffice-impress
- create_package_directory gid_Module_Brand_Prg_Base pkg/libreoffice-base
- create_package_directory gid_Module_Optional_Pyuno pkg/python-uno
- create_package_directory gid_Module_Optional_Gnome pkg/libreoffice-gnome
- create_package_directory gid_Module_Optional_Kde pkg/libreoffice-kde
-
- create_package_directory gid_Module_Root_Files_2 pkg/libreoffice-common
- create_package_directory gid_Module_Root_Files_3 pkg/libreoffice-common
- create_package_directory gid_Module_Root_Files_4 pkg/libreoffice-common
- create_package_directory gid_Module_Root_Files_5 pkg/libreoffice-common
- create_package_directory gid_Module_Root_Files_6 pkg/libreoffice-common
- create_package_directory gid_Module_Root_Files_7 pkg/libreoffice-common
- create_package_directory gid_Module_Optional_Testtool pkg/libreoffice-qa-tools
- if [ -e gid_Module_Optional_Pymailmerge ]; then
- create_package_directory gid_Module_Optional_Pymailmerge pkg/libreoffice-emailmerge
- else # post m26
- mkdir -p pkg/libreoffice-emailmerge/$OOINSTBASE/basis$VERSION/program
- mv pkg/libreoffice-common/$OOINSTBASE/basis$VERSION/program/mailmerge.py \
- pkg/libreoffice-emailmerge/$OOINSTBASE/basis$VERSION/program/mailmerge.py
- fi
- create_package_directory gid_Module_Optional_OGLTrans pkg/libreoffice-ogltrans
-
- create_package_directory gid_Module_Root_SDK pkg/libreoffice-dev
-
- for l in `echo $OOO_LANGS_LIST`; do
- for p in Impress Draw Math Calc Base Writer; do
- create_package_directory gid_Module_Langpack_${p}_`echo $l | sed -e s/-/_/g` pkg/libreoffice-l10n-$l
- done
- create_package_directory gid_Module_Langpack_Basis_`echo $l | sed -e s/-/_/g` pkg/libreoffice-l10n-$l
- create_package_directory gid_Module_Langpack_Brand_`echo $l | sed -e s/-/_/g` pkg/libreoffice-l10n-$l
- create_package_directory gid_Module_Langpack_Resource_`echo $l | sed -e s/-/_/g` pkg/libreoffice-l10n-$l
- create_package_directory gid_Module_Helppack_Help_`echo $l | sed -e s/-/_/g` pkg/libreoffice-help-$l
- if [ -f gid_Module_Optional_Binfilter ]; then
- if [ "$l" = "en-US" ]; then
- create_package_directory gid_Module_Langpack_Binfilter_en_US pkg/libreoffice-filter-binfilter
- else
- create_package_directory gid_Module_Langpack_Binfilter_`echo $l | sed -e s/-/_/g` pkg/libreoffice-l10n-$l
- fi
- fi
- # some help files are in _Langpack_{Writer,Impress,...}_<lang>
- # move them from -l10n to -help
- if [ "$l" = "en-US" ]; then d=en; else d=$l; fi
- mv pkg/libreoffice-l10n-$l/$OOINSTBASE/basis$VERSION/help/$d/* \
- pkg/libreoffice-help-$l/$OOINSTBASE/basis$VERSION/help/$d && \
- rmdir pkg/libreoffice-l10n-$l/$OOINSTBASE/basis$VERSION/help/$d
- done
-
- # move_wrappers <directory_name> <name> [...]
- move_wrappers()
- {
- directory=$1
- shift
- mkdir -m755 -p "$directory"/usr/bin
- while test -n "$1"; do
- mv usr/*bin/"$1$BINSUFFIX" "$directory"/usr/bin
- shift
- done
- }
- move_wrappers pkg/libreoffice-common soffice unopkg
- if test "$COMPAT_OOWRAPPERS" = "YES" ; then
- move_wrappers pkg/libreoffice-common ooffice oofromtemplate
- move_wrappers pkg/libreoffice-base oobase
- move_wrappers pkg/libreoffice-writer oowriter ooweb
- move_wrappers pkg/libreoffice-calc oocalc
- move_wrappers pkg/libreoffice-impress ooimpress
- move_wrappers pkg/libreoffice-math oomath
- move_wrappers pkg/libreoffice-draw oodraw
- fi
- move_wrappers pkg/libreoffice-common libreoffice lofromtemplate
- move_wrappers pkg/libreoffice-base lobase
- move_wrappers pkg/libreoffice-writer lowriter loweb
- move_wrappers pkg/libreoffice-calc localc
- move_wrappers pkg/libreoffice-impress loimpress
- move_wrappers pkg/libreoffice-math lomath
- move_wrappers pkg/libreoffice-draw lodraw
-
- # Move all libraries, binaries, *.rdb from -common to -core
- for d in $OOINSTBASE/basis$VERSION/program $OOINSTBASE/program; do \
- if [ ! -d $OODESTDIR/pkg/libreoffice-core/$d ]; then \
- mkdir -p $OODESTDIR/pkg/libreoffice-core/$d; \
- fi &&
- ( cd pkg/libreoffice-common/$d
- find -maxdepth 1 \
- -regex '\./\(.*\.so.*\|.*\.bin\|pagein\|nsplugin\|kdefilepicker\|msfontextract\|.*\.rdb\|javaldx\|uri-encode\)' \
- -exec mv {} $OODESTDIR/pkg/libreoffice-core/$d \;
- ); \
- done
-
- # install additional ooo-build scripts & misc stuff
- mkdir -p pkg/libreoffice-common/usr/share/man/man1
- if test "$COMPAT_OOWRAPPERS" = "YES" ; then
- mv usr/share/man/man1/openoffice$BINSUFFIX.1 \
- pkg/libreoffice-common/usr/share/man/man1
- fi
- mv usr/share/man/man1/libreoffice$BINSUFFIX.1 \
- pkg/libreoffice-common/usr/share/man/man1
- mkdir -p pkg/libreoffice-common/etc/bash_completion.d
- if test "$COMPAT_OOWRAPPERS" = "YES" ; then
- mv etc/bash_completion.d/ooffice$BINSUFFIX.sh \
- pkg/libreoffice-common/etc/bash_completion.d
- fi
- mv etc/bash_completion.d/libreoffice$BINSUFFIX.sh \
- pkg/libreoffice-common/etc/bash_completion.d
- mv .$OOINSTBASE/basis$VERSION/program/java-set-classpath \
- pkg/libreoffice-common/$OOINSTBASE/program
- if echo $OOO_LANGS_LIST | grep -q en-US; then
- for i in forms/resume.ott officorr/project-proposal.ott; do \
- mkdir -p pkg/libreoffice-common/$OOINSTBASE/basis$VERSION/share/template/en-US/`dirname $i`; \
- mv .$OOINSTBASE/basis$VERSION/share/template/en-US/$i \
- pkg/libreoffice-common/$OOINSTBASE/basis$VERSION/share/template/en-US/$i; \
- done; \
- fi
- # Warn for any remaining files
- find . -path './pkg' -prune -o -not -name 'gid_Module_*' -not -type d -exec echo "File not packaged: {}" \;
-fi
-
-echo "Cleaning up lists of files...";
-mv -f $OODESTDIR/gid_Module_* $BUILDDIR
-
-cd $BUILDDIR
-
-# mark the config files
-if test "z$RPM_CONFIG_FILE_TAGS" != "z" ; then
- perl -pi -e "s|^($OOINSTBASE/help/.*\.xsl)\$|$RPM_CONFIG_FILE_TAGS \\1|;" \
- -e "s|^($OOINSTBASE/help/.*\.css)\$|$RPM_CONFIG_FILE_TAGS \\1|;" \
- -e "s|^($OOINSTBASE/program/[a-zA-Z0-9_\.]*rc)\$|$RPM_CONFIG_FILE_TAGS \\1|;" \
- -e "s|^($OOINSTBASE/program/.*\.xsl)\$|$RPM_CONFIG_FILE_TAGS \\1|;" \
- -e "s|^($OOINSTBASE/share/config/[a-zA-Z0-9]*rc)\$|$RPM_CONFIG_FILE_TAGS \\1|;" \
- -e "s|^($OOINSTBASE/share/dict/ooo/.*\.lst)\$|$RPM_CONFIG_FILE_TAGS \\1|;" \
- -e "s|^($OOINSTBASE/share/psprint/.*\.conf)\$|$RPM_CONFIG_FILE_TAGS \\1|;" \
- -e "s|^($OOINSTBASE/share/registry/.*\.xcu)\$|$RPM_CONFIG_FILE_TAGS \\1|;" \
- -e "s|^($OOINSTBASE/share/registry/.*\.properties)\$|$RPM_CONFIG_FILE_TAGS \\1|;" \
- -e "s|^($OOINSTBASE/share/registry/.*\.xcs)\$|$RPM_CONFIG_FILE_TAGS \\1|;" \
- -e "s|^($OOINSTBASE/user/config/.*\.so.)\$|$RPM_CONFIG_FILE_TAGS \\1|;" \
- *_list.txt
-fi
-
-# Red Hat Post-install cleanup
-if test "z$VENDORNAME" = "zRedHat" ; then
- # Fix openoffice/share/kde/net/applnk paths
- perl -pi -e "/^Module gid_Module_Optional_Kde/ .. /^End/ and s|YES|NO|g" $OOINSTBASE/program/instdb.ins
- perl -pi -e "/^Installation gid_Installation/ .. /^End/ and s|$PREFIX||g" $OOINSTBASE/program/instdb.ins
- perl -pi -e "/^Directory gid_Dir_Home_Gnome_Apps_Product/ .. /^End/ and s|OpenOffice\.org\ 1\.1\.0|OpenOffice\.org|g" $OOINSTBASE/program/instdb.ins
- perl -pi -e "/^Directory gid_Dir_Share_Kde_Net_Applnk_Product/ .. /^End/ and s|OpenOffice\.org\ 1\.1\.0|OpenOffice\.org|g" $OOINSTBASE/program/instdb.ins
- perl -pi -e "/^Directory gid_Dir_Kde_Share_Applnk_Product/ .. /^End/ and s|OpenOffice\.org\ 1\.1\.0|OpenOffice\.org|g" $OOINSTBASE/program/instdb.ins
- perl -pi -e "/^Procedure gid_Procedure_Kde_Inst/ .. /^End/ and s|OpenOffice\.org\ 1\.1\.0|OpenOffice\.org|g" $OOINSTBASE/program/instdb.ins
- perl -pi -e "/^Procedure gid_Procedure_Gnome_Install/ .. /^End/ and s|OpenOffice\.org\ 1\.1\.0|OpenOffice\.org|g" $OOINSTBASE/program/instdb.ins
- perl -pi -e "/^Procedure gid_Procedure_Gnome_Deinstall/ .. /^End/ and s|1\.1\.0||g" $OOINSTBASE/program/instdb.ins
-
- ## Fix instdb.ins, to *not* install local copies of these
- for entry in Kdeapplnk Kdemimetext Kdeicons Gnome_Apps Gnome_Icons Gnome2_Apps; do
- perl -pi -e "/^File gid_File_Extra_$entry/ .. /^End/ and (\
- s|^\tSize\s+\= .*|\tSize\t\t = 0;\r| or \
- s|^\tArchiveFiles\s+\= .*|\tArchiveFiles\t = 0;\r| or \
- s|^\tArchiveSize\s+\= .*|\tArchiveSize\t = 0;\r| or \
- s|^\tContains\s+\= .*|\tContains\t = ();\r| or \
- s|\t\t\t\t\t\".*|\r|g)" \
- $OOINSTBASE/program/instdb.ins
- done
-fi
-
-echo "Fixing permissions..."
-for dir in $DOCDIR $OOINSTDIR/basis$VERSION/sdk/examples ; do
- if test -d $dir -a -w $dir ; then
- find "$dir" -type f \( -name "*.txt" -o -name "*.java" -o -name "*.xml" -o \
- -name "*.xcu" -o -name "*.xcs" -o -name "*.html" -o \
- -name "*.pdf" -o -name "*.ps" -o -name "*.gif" -o \
- -name "*.png" -o -name "*.jpg" -o -name "Makefile" -o \
- -name "manifest.mf" \) -exec chmod 644 {} \;
- fi
-done
-
-if test "z$OODESTDIR" != "z" ; then
- echo "Checking for DESTDIR inside installed files..."
- found_destdir=
- for file in `find $OODESTDIR -type f` ; do
- grep -q "$OODESTDIR" $file && echo "$file: includes the string \"$OODESTDIR\"" && found_destdir=1
- done
- if test "z$found_destdir" != "z" ; then
- echo "!!!!!!!!!!!!!!!!!!!!!! WARNING !!!!!!!!!!!!!!!!!!!!!!"
- echo "The path DESTDIR:$OODESTDIR was found inside some"
- echo "installed files. It is probably a bug."
- echo
- echo "Especially, if the DESTDIR is set to \$RPM_BUILD_ROOT"
- echo "when creating RPM packages. Even it could be a security hole"
- echo "if the application searches /var/tmp for binaries or"
- echo "config files because the directory is world-writable."
- echo "!!!!!!!!!!!!!!!!!!!!!! WARNING !!!!!!!!!!!!!!!!!!!!!!"
- fi
-fi
-
-echo "Packaging succeeded";
-exit 0;
-
diff --git a/bin/patch-remove b/bin/patch-remove
deleted file mode 100755
index 693927a15..000000000
--- a/bin/patch-remove
+++ /dev/null
@@ -1,22 +0,0 @@
-#!/usr/bin/env perl
-
-my $pattern = shift (@ARGV);
-$pattern = 'util/defs/wntmsci' if (!defined $pattern);
-
-# removes crud from diffs
-my $filter = 0;
-
-while (<>) {
- my $line = $_;
- if ($line =~ m/^--- ([^ \t]*)/) { # spaces in fname ?
- my $path = $1;
- if ($path =~ m/$pattern/) {
- $filter = 1;
- print STDERR "Prune $path section\n";
- } else {
- $filter = 0;
-# print STDERR "Preserve $path section\n";
- }
- }
- print $line if (!$filter);
-}
diff --git a/bin/piece/.gitignore b/bin/piece/.gitignore
deleted file mode 100644
index c980c837a..000000000
--- a/bin/piece/.gitignore
+++ /dev/null
@@ -1 +0,0 @@
-sys-setup
diff --git a/bin/piece/Makefile.am b/bin/piece/Makefile.am
deleted file mode 100644
index 286c8cbf7..000000000
--- a/bin/piece/Makefile.am
+++ /dev/null
@@ -1,22 +0,0 @@
-EXTRA_DIST = \
- sys-setup.in \
- unpack-extras \
- copyexcept \
- desktop-support-app \
- noulf noulfconv \
- install-generic \
- install-bootstrap \
- install-devel-helper \
- install-l10n-helper \
- install-l10n-merged \
- install-registry \
- link-to-ooo-home \
- merge-file-lists \
- save-registry \
- save-noarch \
- sort-l10n \
- ${wildcard env-*} \
- ${wildcard build-*} \
- ${wildcard file-list-*} \
- ${wildcard inst-*} \
- ${wildcard post-inst-*}
diff --git a/bin/piece/build-bootstrap b/bin/piece/build-bootstrap
deleted file mode 100755
index 5ae6fdab1..000000000
--- a/bin/piece/build-bootstrap
+++ /dev/null
@@ -1,7 +0,0 @@
-#!/bin/bash
-# build bash script - invoked from build-ooo
-
-export ULFEX=$TOOLSDIR/bin/piece/noulf
-export ULFCONV=$TOOLSDIR/bin/piece/noulfconv
-
-perl $SOLARENV/bin/build.pl --subdirs:soltools:scp2:stlport $EXTRA_BUILD_FLAGS $EXTRA_DMAKE_FLAGS || exit 1;
diff --git a/bin/piece/build-generic b/bin/piece/build-generic
deleted file mode 100755
index ba78f5857..000000000
--- a/bin/piece/build-generic
+++ /dev/null
@@ -1,120 +0,0 @@
-#!/bin/bash
-
-piece=$1
-ooo_build_tag=$2
-
-source $OO_TOOLSDIR/piece/sys-setup
-
-custom_env="$OO_TOOLSDIR/piece/env-$piece"
-if test -f $custom_env; then
- source $custom_env
- echo "merged custom environment: $custom_env"
-fi
-
-# number of CPUs for the parallel build
-if test -n "$PIECE_BUILD_NCPUS_AUTO" ; then
- PIECE_BUILD_NCPUS=`grep ^processor /proc/cpuinfo | wc -l`
-fi
-# fallback to 1
-test -z "$PIECE_BUILD_NCPUS" && PIECE_BUILD_NCPUS=1
-test $PIECE_BUILD_NCPUS -lt 0 && PIECE_BUILD_NCPUS=1
-
-# define the expected maximal memory usage per process in MB
-if test `uname -i` = "x86_64" ; then
- mem_per_process=300
-else
- mem_per_process=200
-fi
-max_mem=`LANG=C free -t -m | sed -n "s|^Total: *\([0-9]*\).*$|\1|p"`
-
-# reduce paralelism according to the available memory
-# caused problem in the openSUSE Build Service
-# first start with number of CPUs
-if test -n "$PIECE_BUILD_NCPUS" && test "$PIECE_BUILD_NCPUS" -gt 1 ; then
- max_cpus="$(($max_mem / $mem_per_process))"
- test $PIECE_BUILD_NCPUS -gt $max_cpus && PIECE_BUILD_NCPUS=$max_cpus && echo "Warning: Reducing number of directories built in parallel to $max_cpus because of memory limits"
- test $PIECE_BUILD_NCPUS -le 0 && PIECE_BUILD_NCPUS= && PIECE_USE_JOBS= && echo "Warning: Do not use the parallel build at all becuse of memory limits"
-fi
-# second check the number of directories that are procced in parallel
-if test -n "$PIECE_USE_JOBS" && test "$PIECE_USE_JOBS" -gt 1 ; then
- max_jobs="$(($max_mem / $mem_per_process / $PIECE_BUILD_NCPUS))"
- test "$PIECE_USE_JOBS" -gt $max_jobs && PIECE_USE_JOBS=$max_jobs && echo "Warning: Reducing number of jobs per directory to $max_jobs because of memory limits"
- test "$PIECE_USE_JOBS" -le 0 && PIECE_USE_JOBS= && echo "Warning: Do not start more jobs in one directory at all because of memory limits"
-fi
-
-# finally, set the build flags for the selected parallelism
-if test "$PIECE_BUILD_NCPUS" -gt 1 ; then
- echo "Parallel build: using $PIECE_BUILD_NCPUS CPUs"
- EXTRA_BUILD_FLAGS="-P$PIECE_BUILD_NCPUS $EXTRA_BUILD_FLAGS"
-fi
-if test -n "$PIECE_USE_JOBS" ; then
- echo "Parallel build: using $PIECE_USE_JOBS jobs"
- EXTRA_BUILD_FLAGS="$EXTRA_BUILD_FLAGS -- -P$PIECE_USE_JOBS"
-fi
-
-# unpack extra sources when needed
-custom_unpack="$OO_TOOLSDIR/piece/unpack-$piece"
-export GNUCP=cp
-if test -f $custom_unpack; then
- echo "Invoke custom unpack: $custom_unpack"
- $custom_unpack $OO_TOOLSDIR/.. `pwd`
-fi
-
-$OO_TOOLSDIR/install-artwork $OO_TOOLSDIR/../src `pwd`
-
-# new modules
-test "$piece" = "filters" && mkdir -p lotuswordpro
-
-# apply flags
-builddir=`pwd`
-FLAGS=`$OO_TOOLSDIR/applyflags $OO_TOOLSDIR $builddir`
-# explicitely add --distro=System
-# it can't be detected easily
-FLAGS="$FLAGS --additional-sections=System"
-# additional piece-specific sections added in the package sources (special build)
-additional_sections_file="$builddir/ooo-build/apply-additional-sections"
-if test -f "$additional_sections_file" ; then
- for section in `sed "s|,| |g" $additional_sections_file` ; do
- FLAGS="$FLAGS --additional-sections=$section"
- done
-fi
-# additional piece specifix hotfixes added in the package sources (special build)
-FLAGS="$FLAGS --hotfixes=$builddir/ooo-build/hotfixes"
-
-# finally call apply.pl
-$OO_TOOLSDIR/../patches/apply.pl $OO_TOOLSDIR/../patches/dev300 \
- --tag=$ooo_build_tag $FLAGS $builddir || exit 1;
-
-# available source dirs
-# FIXME: omit optional directories that are not enabled (--with-system-XXX, ...)
-omit_dirs="-e applied_patches -e ^solver -e ooo-build -e javainstaller2"
-if test "$piece" != "libs-extern" -a "$piece" != "libs_extern" ; then
- omit_dirs="$omit_dirs -e ^external\$"
-fi
-dirs=`/bin/ls | grep -v $omit_dirs | tr '\n' ':'`
-# clean build?
-if test ! -d solver; then
- echo "A very clean straight-through build - deferring dep generation"
- export nodep=1
-fi
-
-# apply extra localization fixes in the libs-gui piece
-if test "$piece" = "libs-gui" -o "$piece" = "libs_gui" ; then
- # do not build modules depending on transex3,l10n,l10ntools,vcl now
- for dir in * ; do
- test -f $dir/prj/build.lst && \
- head -n 1 $dir/prj/build.lst | grep -q -e transex3 -e l10n -e l10ntools -e vcl && \
- omit_dirs="$omit_dirs -e ^$dir\$"
- l10ntools_dirs=`/bin/ls | grep -v $omit_dirs | tr '\n' ':'`
- # actually build the l10ntools module
- l10ntools_dirs="l10ntools:$l10ntools_dirs"
- done
- # build the localize tool
- perl $SOLARENV/bin/build.pl --subdirs:$l10ntools_dirs $EXTRA_BUILD_FLAGS $EXTRA_DMAKE_FLAGS || exit 1;
- unset nodep
- # apply sdf files
- $OO_TOOLSDIR/localize-ooo || exit 1;
-fi
-
-# final build
-perl $SOLARENV/bin/build.pl --subdirs:$dirs $EXTRA_BUILD_FLAGS $EXTRA_DMAKE_FLAGS || exit 1;
diff --git a/bin/piece/copyexcept b/bin/piece/copyexcept
deleted file mode 100755
index c12f3609a..000000000
--- a/bin/piece/copyexcept
+++ /dev/null
@@ -1,80 +0,0 @@
-#!/usr/bin/perl -w
-
-use strict;
-use File::Copy;
-
-sub syntax()
-{
- print "copyexcept [-x exception-file] <src> <dest>\n";
- print " -x: file containing list of files not to copy\n";
- print " --help: this message\n";
-}
-
-sub copy_recursive($$$);
-sub copy_recursive($$$)
-{
- my ($excepts, $src, $dest) = @_;
-
- if (defined $excepts->{$src}) {
- print "skipping $src\n";
- return;
- }
-
- if (-d $src) {
- print "mkdir -p $dest\n";
- system("mkdir -p $dest") == 0 || die "Failed to mkdir -p $dest: $!";
-
- my $dir;
- opendir ($dir, $src) || die "Can't open dir $src: $!";
- my @entries;
- while (my $name = readdir ($dir)) {
- $name =~ /^\./ && next;
- $name =~ /^\s*$/ && next;
- copy_recursive ($excepts, "$src/$name", "$dest/$name");
- }
- closedir ($dir);
- } elsif (-f $src) {
- print "syscopy $src -> $dest\n";
- File::Copy::copy ($src, $dest) || die "Failed to copy $src -> $dest: $!";
- my $perm = (stat $src)[2];
- chmod ($perm, $dest);
- } else {
- print "skipping link $src\n";
- }
-}
-
-# main ...
-my (@src, $dest, $except);
-
-while (my $arg = shift @ARGV) {
- if ($arg eq '-h' || $arg eq '--help') {
- syntax();
- exit 0;
- } elsif ($arg eq '-x') {
- $except = shift @ARGV;
- } else {
- push @src, $arg;
- }
-}
-
-$dest = pop @src;
-@src && defined $dest || die "Missing src or dest\n";
-
-system("mkdir -p $dest") == 0 || die "Failed to mkdir -p $dest: $!";
-
-my $ef;
-my %exceptions;
-open ($ef, $except) || die "Can't open $except: $!";
-while (<$ef>) {
- chomp;
- # pre-process ?
- $exceptions{$_} = '1';
-}
-close ($ef);
-
-for my $s (@src) {
- my $suffix = $s;
- $suffix =~ s|^.*/||g;
- print "Copy $s -> $dest/$suffix\n";
- copy_recursive (\%exceptions, $s, "$dest/$suffix");
-}
diff --git a/bin/piece/desktop-support-app b/bin/piece/desktop-support-app
deleted file mode 100755
index 07ae20fe0..000000000
--- a/bin/piece/desktop-support-app
+++ /dev/null
@@ -1,120 +0,0 @@
-#!/bin/bash
-
-piece=$1
-ooo_build_tag=$2
-app=$3
-
-source $OO_TOOLSDIR/piece/sys-setup
-
-# hack for older SUSE distributions (to pass build checks)
-list_icon_dirs=
-if test "$DISTRO" = "SUSE-10.1" ; then
- list_icon_dirs=1
-fi
-
-create_wrapper()
-{
- mkdir -p $DESTDIR$OO_PREFIX/bin
- cat <<EOT >$DESTDIR$OO_PREFIX/bin/$1
-#!/bin/sh
-$OO_INSTDIR/program/$2 $3 "\$@"
-EOT
- chmod 755 $DESTDIR$OO_PREFIX/bin/$1
- test -n "$DESTDIR" && echo "$OO_PREFIX/bin/$1" >>files-$piece.txt
-}
-
-create_man_link()
-{
- mkdir -p $DESTDIR$OO_MANDIR/man1
- echo ".so man1/$2.1" >| $DESTDIR$OO_MANDIR/man1/$1.1
- gzip $DESTDIR$OO_MANDIR/man1/$1.1
- test -n "$DESTDIR" && echo "$OO_MANDIR/man1/$1.1.gz" >>files-$piece.txt
-}
-
-install_man()
-{
- mkdir -p $DESTDIR$OO_MANDIR/man1
- man_page_in=$OO_TOOLSDIR/../man/$1.1.in
- # use the distro specific man page if available
- if test -f $OO_TOOLSDIR/../man/$1.1_${DISTRO%%-*}.in ; then
- man_page_in=$OO_TOOLSDIR/../man/$1.1_${DISTRO%%-*}.in
- fi
- sed -e "s|@BINSUFFIX@|$BINSUFFIX|g" $man_page_in \
- >| "$DESTDIR$OO_MANDIR/man1/$1.1" || exit 1;
- gzip $DESTDIR$OO_MANDIR/man1/$1.1
- test -n "$DESTDIR" && echo "$OO_MANDIR/man1/$1.1.gz" >>files-$piece.txt
-}
-
-install_desktop_file()
-{
- mkdir -p $DESTDIR/usr/share/applications
- cp $OO_TOOLSDIR/../desktop/$1.desktop $DESTDIR/usr/share/applications || exit 1
- test -n "$DESTDIR" && echo "/usr/share/applications/$1.desktop" >>files-$piece.txt
-}
-
-install_icon()
-{
- category=$1
- icon_name=$2
- icon_dir=/usr/share/icons/hicolor
- # FIXME: fix the structure of icons in ooo-build/desktop direcotry
- test "$category" = "apps" && category_indir= || category_indir=$category
-
- test -n "$list_icon_dirs" && echo "%dir $icon_dir" >>files-$piece.txt
-
- for size in 16x16 22x22 24x24 32x32 48x48 scalable ; do
- # note that the scalable directory includes .svg icons
- file=$OO_TOOLSDIR/../desktop/$category_indir/$size/$icon_name.[ps][nv]g
- filename=`basename $file`
- target_dir="$icon_dir/$size/$category"
- test -f $file || continue;
- mkdir -p "$DESTDIR$target_dir"
- cp $file "$DESTDIR$target_dir/$filename" || exit 1;
- test -n "$DESTDIR" && echo "$target_dir/$filename" >>files-$piece.txt
- test -n "$list_icon_dirs" && echo "%dir $icon_dir/$size" >>files-$piece.txt
- test -n "$list_icon_dirs" && echo "%dir $icon_dir/$size/$category" >>files-$piece.txt
- done
-
- # create symlink below share/pixmaps to keep the backward compatibility
- if test "$category" = "apps" ; then
- file=$DESTDIR/$icon_dir/48x48/apps/$icon_name.png
- if test -f $file ; then
- filename=`basename $file`
- pixmaps_dir=/usr/share/pixmaps
- mkdir -p $DESTDIR$pixmaps_dir
- ln -sf $icon_dir/48x48/apps/$filename $DESTDIR$pixmaps_dir
- test -n "$DESTDIR" && echo "$pixmaps_dir/$filename" >>files-$piece.txt
- fi
- fi
-}
-
-case "$app" in
- "fromtemplate")
- create_wrapper "lofromtemplate" "soffice" "" || exit 1;
- test "$COMPAT_OOWRAPPERS" == 'YES' && create_wrapper "oofromtemplate" "soffice" "" || exit 1;
- install_desktop_file template || exit 1;
- ;;
- "unopkg")
- create_wrapper unopkg unopkg "" || exit 1;
- install_desktop_file ooo-extension-manager || exit 1;
- install_man $app || exit 1;
- install_icon mimetypes application-vnd.openofficeorg.extension || exit 1;
- ;;
- "ooffice")
- create_wrapper libreoffice soffice "" || exit 1;
- test "$COMPAT_OOWRAPPERS" == 'YES' && create_wrapper ooffice soffice "" || exit 1;
- install_desktop_file startcenter || exit 1;
- install_icon apps ooo-gulls || exit 1;
- install_man libreoffice || exit 1;
- test "$COMPAT_OOWRAPPERS" == 'YES' && create_man_link ooffice libreoffice || exit 1;
- test "$COMPAT_OOWRAPPERS" == 'YES' && create_man_link openoffice libreoffice || exit 1;
- ;;
- *)
- create_wrapper lo$app soffice "-$app" || exit 1;
- test "$COMPAT_OOWRAPPERS" == 'YES' && create_wrapper oo$app soffice "-$app" || exit 1;
- create_man_link lo${app} libreoffice || exit 1;
- test "$COMPAT_OOWRAPPERS" == 'YES' && create_man_link oo${app} libreoffice || exit 1;
- install_desktop_file $app || exit 1;
- install_icon apps ooo-$app || exit 1;
- ;;
-esac
diff --git a/bin/piece/env-libs-gui b/bin/piece/env-libs-gui
deleted file mode 100644
index f6269aa04..000000000
--- a/bin/piece/env-libs-gui
+++ /dev/null
@@ -1,2 +0,0 @@
-# psprint needs to link to vcl's main .so ...
-export STDOBJVCL=$SOLARPIECE/$INPATH/lib/salmain.o
diff --git a/bin/piece/env-libs_gui b/bin/piece/env-libs_gui
deleted file mode 100644
index f6269aa04..000000000
--- a/bin/piece/env-libs_gui
+++ /dev/null
@@ -1,2 +0,0 @@
-# psprint needs to link to vcl's main .so ...
-export STDOBJVCL=$SOLARPIECE/$INPATH/lib/salmain.o
diff --git a/bin/piece/file-list-artwork b/bin/piece/file-list-artwork
deleted file mode 100755
index 0914268af..000000000
--- a/bin/piece/file-list-artwork
+++ /dev/null
@@ -1,16 +0,0 @@
-#!/bin/bash
-
-piece=$1
-ooo_build_tag=$2
-
-source $OO_TOOLSDIR/piece/sys-setup
-
-# there is only the devel package
-
-# remove the CVS dirs
-find $DESTDIR$OO_SOLVERDIR -depth -type d -name CVS -exec rm -rf {} \;
-# remove ugly executable rights
-find $DESTDIR -type f -name "*.png" -exec chmod 644 {} \;
-
-# move the stuff to /usr/share when enabled
-test "$OOO_BUILD_NOARCH" = 'YES' && $OO_TOOLSDIR/piece/install-devel-helper $piece $ooo_build_tag
diff --git a/bin/piece/file-list-base b/bin/piece/file-list-base
deleted file mode 100755
index b93003a8c..000000000
--- a/bin/piece/file-list-base
+++ /dev/null
@@ -1,12 +0,0 @@
-#!/bin/bash
-
-piece=$1
-ooo_build_tag=$2
-
-source $OO_TOOLSDIR/piece/sys-setup
-
-$OO_TOOLSDIR/piece/merge-file-lists "files-$piece.txt" $DESTDIR/gid_*
-$OO_TOOLSDIR/piece/desktop-support-app "$piece" "$ooo_build_tag" "$piece"
-
-# install the prebuilt registry files
-$OO_TOOLSDIR/piece/install-registry $piece $ooo_build_tag "$piece"
diff --git a/bin/piece/file-list-calc b/bin/piece/file-list-calc
deleted file mode 100755
index f7b0d1d2a..000000000
--- a/bin/piece/file-list-calc
+++ /dev/null
@@ -1,15 +0,0 @@
-#!/bin/bash
-
-piece=$1
-ooo_build_tag=$2
-
-source $OO_TOOLSDIR/piece/sys-setup
-
-$OO_TOOLSDIR/piece/merge-file-lists "files-$piece.txt" $DESTDIR/gid_*
-$OO_TOOLSDIR/piece/desktop-support-app "$piece" "$ooo_build_tag" "$piece"
-
-# install the prebuilt registry files
-$OO_TOOLSDIR/piece/install-registry $piece $ooo_build_tag "$piece"
-
-# prune redundant files the scp2 likes to make for us
-remove_file "files-$piece.txt" "$OO_INSTDIR/basis$VERSION/ure-link"
diff --git a/bin/piece/file-list-components b/bin/piece/file-list-components
deleted file mode 100755
index 65a9f580f..000000000
--- a/bin/piece/file-list-components
+++ /dev/null
@@ -1,28 +0,0 @@
-#!/bin/bash
-
-piece=$1
-ooo_build_tag=$2
-
-source $OO_TOOLSDIR/piece/sys-setup
-
-# testtool subapckage
-$OO_TOOLSDIR/piece/merge-file-lists "files-testtool.txt" $DESTDIR/gid_Module_Optional_Testtool
-
-# generate the common file list
-$OO_TOOLSDIR/piece/merge-file-lists "files-$piece.txt" $DESTDIR/gid_*
-
-# officebean subpackage
-mv_file_between_flists files-officebean.txt files-$piece.txt $OO_INSTDIR/basis$VERSION/program/classes/officebean.jar
-mv_file_between_flists files-officebean.txt files-$piece.txt $OO_INSTDIR/basis$VERSION/program/libofficebean.so
-add_used_directories files-officebean.txt files-$piece.txt
-
-# prune redundant files the scp2 likes to make for us
-remove_file "files-$piece.txt" "$OO_INSTDIR/basis-link"
-remove_file "files-$piece.txt" "$OO_INSTDIR/basis$VERSION/ure-link"
-remove_dir "files-$piece.txt" "$OO_INSTDIR/ure"
-
-# solver is not in the file list, so rm is enough
-rm -f "$DESTDIR/$OO_INSTDIR/solver/bin/packinfo*.txt"
-rm -f "$DESTDIR/$OO_INSTDIR/solver/bin/userland.txt"
-
-exit 0
diff --git a/bin/piece/file-list-extras b/bin/piece/file-list-extras
deleted file mode 100755
index 02864658d..000000000
--- a/bin/piece/file-list-extras
+++ /dev/null
@@ -1,44 +0,0 @@
-#!/bin/bash
-
-piece=$1
-ooo_build_tag=$2
-
-source $OO_TOOLSDIR/piece/sys-setup
-
-# generate the common file list
-$OO_TOOLSDIR/piece/merge-file-lists "files-$piece.txt" $DESTDIR/gid_*
-
-if test "$VENDORNAME" = "Novell" ; then
- # We want to update an existing gallery. Unfortunately, gengal does not work
- # because of missing services.rdb, ... Therefore we need to use a prebuilt
- # version
- echo "Adding Novell specific Draft.jpg to Backgrounds gallery..."
- GAL_DIR=$OO_INSTDIR/basis$VERSION/share/gallery
- if test `du --block-size=1 --apparent-size $DESTDIR$GAL_DIR/sg3.sdg | cut -f1` -eq 270058 ; then
- cp "$OO_TOOLSDIR/../src/Draft.jpg" "$DESTDIR$GAL_DIR/www-back/" || exit 1;
- chmod 644 $DESTDIR$GAL_DIR/www-back/Draft.jpg || exit 1;
- echo $GAL_DIR/www-back/Draft.jpg >>files-$piece.txt
- for file in sg3.sdg sg3.sdv sg3.thm ; do
- cp -f "$OO_TOOLSDIR/../src/$file" "$DESTDIR$GAL_DIR/$file" || exit 1;
- chmod 644 "$DESTDIR$GAL_DIR/$file" || exit 1;
- done
- else
- echo "Error: The upstream gallery sg3.sdg has changed. Please, update the prebuilt"
- echo " Novell variant in $OO_TOOLSDIR/../src/"
- exit 1;
- fi
-fi
-
-# prune .orig files created by patch tool
-remove_file "files-$piece.txt" "$OO_INSTDIR/basis$VERSION/presets/config/standard.soc.orig"
-
-# prune redundant files the scp2 likes to make for us
-remove_file "files-$piece.txt" "$OO_INSTDIR/basis-link"
-remove_file "files-$piece.txt" "$OO_INSTDIR/basis$VERSION/ure-link"
-remove_dir "files-$piece.txt" "$OO_INSTDIR/ure"
-
-# move the stuff to /usr/share when enabled
-if test "$OOO_BUILD_NOARCH" = 'YES' ; then
- $OO_TOOLSDIR/piece/install-l10n-helper $piece $ooo_build_tag "$DESTDIR" files-$piece.txt
- $OO_TOOLSDIR/piece/install-devel-helper $piece $ooo_build_tag
-fi
diff --git a/bin/piece/file-list-filters b/bin/piece/file-list-filters
deleted file mode 100755
index eaa522545..000000000
--- a/bin/piece/file-list-filters
+++ /dev/null
@@ -1,12 +0,0 @@
-#!/bin/bash
-
-piece=$1
-ooo_build_tag=$2
-
-source $OO_TOOLSDIR/piece/sys-setup
-
-# optional filters
-$OO_TOOLSDIR/piece/merge-file-lists "files-$piece-optional.txt" $DESTDIR/gid_Module_Optional_*
-
-# merge the rest
-$OO_TOOLSDIR/piece/merge-file-lists "files-$piece.txt" $DESTDIR/gid_*
diff --git a/bin/piece/file-list-help b/bin/piece/file-list-help
deleted file mode 100755
index c1e62a87d..000000000
--- a/bin/piece/file-list-help
+++ /dev/null
@@ -1,32 +0,0 @@
-#!/bin/bash
-
-piece=$1
-ooo_build_tag=$2
-
-source $OO_TOOLSDIR/piece/sys-setup
-
-# sort by lang
-$OO_TOOLSDIR/piece/sort-l10n $piece $ooo_build_tag $DESTDIR
-
-# put the rest into the common file list
-$OO_TOOLSDIR/piece/merge-file-lists "files-$piece.txt" $DESTDIR/gid_*
-# FIXME: hid.lst will be in testtool package (built in ?)
-remove_file "files-$piece.txt" $OO_INSTDIR/basis$VERSION/program/hid.lst
-# non-wanted mess
-remove_dir "files-$piece.txt" $OO_INSTDIR/ure
-
-# FIXME: is a generic solution possible?
-# remove the duplicated .zip archives
-rm -rf $DESTDIR$OO_SOLVERDIR/pck/*.zip
-
-# prune redundant files the scp2 likes to make for us
-remove_file "files-$piece.txt" "$OO_INSTDIR/basis-link"
-remove_file "files-$piece.txt" "$OO_INSTDIR/basis$VERSION/ure-link"
-remove_file "files-$piece.txt" "$OO_INSTDIR/share/extensions/package.txt"
-remove_file "files-$piece.txt" "$OO_INSTDIR/LICENSE.odt"
-
-# move the stuff to /usr/share when enabled
-if test "$OOO_BUILD_NOARCH" = 'YES' ; then
- $OO_TOOLSDIR/piece/install-l10n-helper $piece $ooo_build_tag "$DESTDIR" files-$piece-*.txt
- $OO_TOOLSDIR/piece/install-devel-helper $piece $ooo_build_tag
-fi
diff --git a/bin/piece/file-list-impress b/bin/piece/file-list-impress
deleted file mode 100755
index b23cffa35..000000000
--- a/bin/piece/file-list-impress
+++ /dev/null
@@ -1,20 +0,0 @@
-#!/bin/bash
-
-piece=$1
-ooo_build_tag=$2
-
-source $OO_TOOLSDIR/piece/sys-setup
-
-# impress
-$OO_TOOLSDIR/piece/merge-file-lists "files-$piece.txt" $DESTDIR/gid_*
-$OO_TOOLSDIR/piece/desktop-support-app "$piece" "$ooo_build_tag" "$piece"
-
-# draw subpackage (heh, only desktop stuff)
-$OO_TOOLSDIR/piece/desktop-support-app "draw" "$ooo_build_tag" "draw"
-
-# install the prebuilt registry files
-$OO_TOOLSDIR/piece/install-registry $piece $ooo_build_tag "$piece"
-$OO_TOOLSDIR/piece/install-registry $piece $ooo_build_tag "draw"
-
-# prune redundant files the scp2 likes to make for us
-remove_file "files-$piece.txt" "$OO_INSTDIR/basis$VERSION/ure-link"
diff --git a/bin/piece/file-list-l10n b/bin/piece/file-list-l10n
deleted file mode 100755
index 9b75add79..000000000
--- a/bin/piece/file-list-l10n
+++ /dev/null
@@ -1,41 +0,0 @@
-#!/bin/bash
-
-piece=$1
-ooo_build_tag=$2
-
-source $OO_TOOLSDIR/piece/sys-setup
-
-# generate the common file list
-$OO_TOOLSDIR/piece/merge-file-lists "files-$piece.txt" $DESTDIR/gid_*
-
-if test "$VENDORNAME" = "Novell" ; then
- # We want to update an existing gallery. Unfortunately, gengal does not work
- # because of missing services.rdb, ... Therefore we need to use a prebuilt
- # version
- echo "Adding Novell specific Draft.jpg to Backgrounds gallery..."
- GAL_DIR=$OO_INSTDIR/basis$VERSION/share/gallery
- if test `du --block-size=1 --apparent-size $DESTDIR$GAL_DIR/sg3.sdg | cut -f1` -eq 270058 ; then
- cp "$OO_TOOLSDIR/../src/Draft.jpg" "$DESTDIR$GAL_DIR/www-back/" || exit 1;
- chmod 644 $DESTDIR$GAL_DIR/www-back/Draft.jpg || exit 1;
- echo $GAL_DIR/www-back/Draft.jpg >>files-$piece.txt
- for file in sg3.sdg sg3.sdv sg3.thm ; do
- cp -f "$OO_TOOLSDIR/../src/$file" "$DESTDIR$GAL_DIR/$file" || exit 1;
- chmod 644 "$DESTDIR$GAL_DIR/$file" || exit 1;
- done
- else
- echo "Error: The upstream gallery sg3.sdg has changed. Please, update the prebuilt"
- echo " Novell variant in $OO_TOOLSDIR/../src/"
- exit 1;
- fi
-fi
-
-# prune redundant files the scp2 likes to make for us
-remove_file "files-$piece.txt" "$OO_INSTDIR/basis-link"
-remove_file "files-$piece.txt" "$OO_INSTDIR/basis$VERSION/ure-link"
-remove_dir "files-$piece.txt" "$OO_INSTDIR/ure"
-
-# move the stuff to /usr/share when enabled
-if test "$OOO_BUILD_NOARCH" = 'YES' ; then
- $OO_TOOLSDIR/piece/install-l10n-helper $piece $ooo_build_tag "$DESTDIR" files-$piece.txt
- $OO_TOOLSDIR/piece/install-devel-helper $piece $ooo_build_tag
-fi
diff --git a/bin/piece/file-list-libs-core b/bin/piece/file-list-libs-core
deleted file mode 100755
index 4333ac7ab..000000000
--- a/bin/piece/file-list-libs-core
+++ /dev/null
@@ -1,65 +0,0 @@
-#!/bin/bash
-
-piece=$1
-ooo_build_tag=$2
-
-source $OO_TOOLSDIR/piece/sys-setup
-
-# hack to move the registry files to the right optional packages
-if test -n "$DESTDIR" ; then
- $OO_TOOLSDIR/piece/save-registry $piece $ooo_build_tag || exit 1;
-fi
-
-# gnome subpackage
-$OO_TOOLSDIR/piece/merge-file-lists "files-gnome.txt" $DESTDIR/gid_Module_Optional_Gnome
-
-#kde subpackage
-$OO_TOOLSDIR/piece/merge-file-lists "files-kde.txt" $DESTDIR/gid_Module_Optional_Kde
-
-# generate the common file list
-$OO_TOOLSDIR/piece/merge-file-lists "files-$piece.txt" $DESTDIR/gid_*
-$OO_TOOLSDIR/piece/desktop-support-app "$piece" "$ooo_build_tag" "unopkg"
-
-# fix up kde subpackage
-mv_file_between_flists files-kde.txt files-$piece.txt $OO_INSTDIR/program/kdefilepicker
-mv_file_between_flists files-kde.txt files-$piece.txt $OO_INSTDIR/basis$VERSION/program/fps_kde.uno.so
-mv_file_between_flists files-kde.txt files-$piece.txt $OO_INSTDIR/basis$VERSION/program/libvclplug_kde[0-9]*l..so
-mv_file_between_flists files-kde.txt files-$piece.txt $OO_INSTDIR/basis$VERSION/program/libkabdrv1.so
-add_used_directories files-kde.txt files-$piece.txt
-
-# create kde4 subpackage
-rm -f files-kde4.txt
-mv_file_between_flists files-kde4.txt files-kde.txt $OO_INSTDIR/basis$VERSION/program/kde4be1.uno.so
-mv_file_between_flists files-kde4.txt files-$piece.txt $OO_INSTDIR/basis$VERSION/program/fps_kde4.uno.so
-add_used_directories files-kde4.txt files-$piece.txt
-
-# fix up gnome subpackage
-mv_file_between_flists files-gnome.txt files-$piece.txt $OO_INSTDIR/basis$VERSION/program/libevoab2.so
-mv_file_between_flists files-gnome.txt files-$piece.txt $OO_INSTDIR/basis$VERSION/program/fps_gnome.uno.so
-mv_file_between_flists files-gnome.txt files-$piece.txt $OO_INSTDIR/basis$VERSION/program/libvclplug_gtk[0-9]*l..so
-mv_file_between_flists files-$piece.txt files-gnome.txt $OO_INSTDIR/basis$VERSION/program/ucpgvfs1.uno.so
-add_used_directories files-gnome.txt files-$piece.txt
-
-# mailmerge subpackage
-mv_file_between_flists files-mailmerge.txt files-$piece.txt $OO_INSTDIR/basis$VERSION/share/registry/modules/org/openoffice/Office/Writer/Writer-javamail.xcu
-mv_file_between_flists files-mailmerge.txt files-$piece.txt $OO_INSTDIR/basis$VERSION/program/mailmerge.py
-add_used_directories files-mailmerge.txt files-$piece.txt
-chmod 755 $DESTDIR$OO_INSTDIR/basis$VERSION/program/mailmerge.py
-
-if test "$OOO_BUILD_NOARCH" = 'YES' ; then
- # make compat symlinks for helpcontent common files
- mkdir -p $DESTDIR$OO_INSTDIR_SHARE/basis$VERSION/help
- echo "%dir $OO_INSTDIR_SHARE/basis$VERSION/help" >>files-$piece.txt
- for file in main_transform.xsl idxcaption.xsl idxcontent.xsl ; do
- ln -sf "$OO_INSTDIR/basis$VERSION/help/$file" $DESTDIR$OO_INSTDIR_SHARE/basis$VERSION/help/$file
- echo "$OO_INSTDIR_SHARE/basis$VERSION/help/$file" >>files-$piece.txt
- done
-fi
-
-# mess
-remove_file "files-$piece.txt" "$OO_INSTDIR/basis-link"
-remove_file "files-$piece.txt" "$OO_INSTDIR/basis$VERSION/ure-link"
-remove_file "files-$piece.txt" "$OO_INSTDIR/basis$VERSION/program/services.rdb"
-remove_dir "files-$piece.txt" "$OO_INSTDIR/ure"
-
-exit 0
diff --git a/bin/piece/file-list-libs-extern b/bin/piece/file-list-libs-extern
deleted file mode 100755
index 838c1cccd..000000000
--- a/bin/piece/file-list-libs-extern
+++ /dev/null
@@ -1,13 +0,0 @@
-#!/bin/bash
-
-piece=$1
-ooo_build_tag=$2
-
-source $OO_TOOLSDIR/piece/sys-setup
-
-$OO_TOOLSDIR/piece/merge-file-lists "files-$piece.txt" $DESTDIR/gid_*
-
-# prune redundant files the scp2 likes to make for us
-remove_file "files-$piece.txt" "$OO_INSTDIR/basis$VERSION/ure-link"
-
-exit 0
diff --git a/bin/piece/file-list-libs-gui b/bin/piece/file-list-libs-gui
deleted file mode 100755
index bd2ffe5fe..000000000
--- a/bin/piece/file-list-libs-gui
+++ /dev/null
@@ -1,15 +0,0 @@
-#!/bin/bash
-
-piece=$1
-ooo_build_tag=$2
-
-source $OO_TOOLSDIR/piece/sys-setup
-
-# generate the common file list
-$OO_TOOLSDIR/piece/merge-file-lists "files-$piece.txt" $DESTDIR/gid_*
-
-remove_dir "files-$piece.txt" $OO_INSTDIR/basis-link
-remove_dir "files-$piece.txt" $OO_INSTDIR/basis$VERSION/ure-link
-remove_dir "files-$piece.txt" $OO_INSTDIR/ure
-
-exit 0
diff --git a/bin/piece/file-list-postprocess b/bin/piece/file-list-postprocess
deleted file mode 100755
index d0252ab47..000000000
--- a/bin/piece/file-list-postprocess
+++ /dev/null
@@ -1,62 +0,0 @@
-#!/bin/bash
-
-piece=$1
-ooo_build_tag=$2
-
-source $OO_TOOLSDIR/piece/sys-setup
-
-# icon themes subpackages
-$OO_TOOLSDIR/piece/merge-file-lists "files-icon-themes.txt" $DESTDIR/gid_Module_Root_Files_Images
-
-# generate the common file list
-$OO_TOOLSDIR/piece/merge-file-lists "files-$piece.txt" $DESTDIR/gid_*
-$OO_TOOLSDIR/piece/desktop-support-app "$piece" "$ooo_build_tag" "fromtemplate"
-$OO_TOOLSDIR/piece/desktop-support-app "$piece" "$ooo_build_tag" "ooffice"
-
-# extra shared MIME info
-mkdir -p $DESTDIR/usr/share/mime/packages
-cp $OO_TOOLSDIR/../desktop/openoffice.xml $DESTDIR/usr/share/mime/packages
-echo /usr/share/mime/packages/openoffice.xml >>files-$piece.txt
-if test "z$RUN_POST_INSTALL_SCRIPTS" = "zyes" &&
- which update-mime-database >/dev/null 2>&1 ; then
- update-mime-database /usr/share/mime || :
-fi
-
-# create bash completion
-mkdir -p $DESTDIR/etc/bash_completion.d
-$OO_TOOLSDIR/generate-bash-completion --binsuffix="$BINSUFFIX" $OO_TOOLSDIR/bash-completion.in $DESTDIR/etc/bash_completion.d/libreoffice${BINSUFFIX}.sh || exit 1;
-echo /etc/bash_completion.d/libreoffice${BINSUFFIX}.sh >>files-$piece.txt
-if test "$COMPAT_OOWRAPPERS" == 'YES' ; then
- $OO_TOOLSDIR/generate-bash-completion --compat-oowrappers --binsuffix="$BINSUFFIX" $OO_TOOLSDIR/bash-completion.in $DESTDIR/etc/bash_completion.d/ooffice${BINSUFFIX}.sh || exit 1;
- echo /etc/bash_completion.d/ooffice${BINSUFFIX}.sh >>files-$piece.txt
-fi
-
-echo "Installing $OO_INSTDIR/basis$VERSION/program/java-set-classpath"
-sed -e "s|@OOINSTBASE@|$OO_INSTDIR|g" $OO_TOOLSDIR/java-set-classpath.in >$DESTDIR$OO_INSTDIR/basis$VERSION/program/java-set-classpath || exit 1;
-chmod 755 $DESTDIR$OO_INSTDIR/basis$VERSION/program/java-set-classpath
-echo "$OO_INSTDIR/basis$VERSION/program/java-set-classpath" >>files-$piece.txt
-
-# FIXME: about, intro are not isntalled by the installer
-for file in intro.png intro-pt_BR.png about.png about-pt_BR.png ; do
- if test "$OOO_BUILD_NOARCH" = 'YES' ; then
- # create only symlinks; the icons will be packaged separately
- ln -sf $OO_INSTDIR_SHARE/program/$file $DESTDIR$OO_INSTDIR/program
- else
- cp $OO_SOLVERDIR/default_images/brand/$file $DESTDIR$OO_INSTDIR/program || exit 1;
- fi
- echo "$OO_INSTDIR/program/$file" >>files-$piece.txt
-done
-# welcome screen
-mkdir -p $DESTDIR$OO_INSTDIR/program/shell
-echo "%dir $OO_INSTDIR/program/shell" >>files-$piece.txt
-for file in backing_left.png backing_left-pt_BR.png backing_right.png \
- backing_right-pt_BR.png backing_rtl_left.png \
- backing_rtl_right.png backing_space.png ; do
- if test "$OOO_BUILD_NOARCH" = 'YES' ; then
- # create only symlinks; the icons will be packaged separately
- ln -sf $OO_INSTDIR_SHARE/program/shell/$file $DESTDIR$OO_INSTDIR/program/shell
- else
- cp $OO_SOLVERDIR/default_images/brand/shell/$file $DESTDIR$OO_INSTDIR/program/shell || exit 1;
- fi
- echo "$OO_INSTDIR/program/shell/$file" >>files-$piece.txt
-done
diff --git a/bin/piece/file-list-sdk b/bin/piece/file-list-sdk
deleted file mode 100755
index fc3aaab2d..000000000
--- a/bin/piece/file-list-sdk
+++ /dev/null
@@ -1,51 +0,0 @@
-#!/bin/bash
-
-piece=$1
-ooo_build_tag=$2
-
-source $OO_TOOLSDIR/piece/sys-setup
-
-# install mono GAC, pkgconfig file, ...
-$OO_TOOLSDIR/install-sdk || exit 1;
-
-# SDK files
-$OO_TOOLSDIR/piece/merge-file-lists "files-$piece.txt" $DESTDIR/gid_Module_Root_SDK
-
-# doc subpackage
-rm -f files-$piece-doc.txt
-# in this case we move all entries including directories
-mv_file_between_flists files-$piece-doc.txt files-$piece.txt "%dir $OO_DOCDIR/sdk/docs.*"
-mv_file_between_flists files-$piece-doc.txt files-$piece.txt "$OO_DOCDIR/sdk/docs.*"
-mv_file_between_flists files-$piece-doc.txt files-$piece.txt "$OO_DOCDIR/sdk/examples"
-mv_file_between_flists files-$piece-doc.txt files-$piece.txt "$OO_DOCDIR/sdk/index.html"
-mv_file_between_flists files-$piece-doc.txt files-$piece.txt "%dir $OO_INSTDIR/basis$VERSION/sdk/examples.*"
-mv_file_between_flists files-$piece-doc.txt files-$piece.txt "$OO_INSTDIR/basis$VERSION/sdk/docs"
-mv_file_between_flists files-$piece-doc.txt files-$piece.txt "$OO_INSTDIR/basis$VERSION/sdk/examples.*"
-mv_file_between_flists files-$piece-doc.txt files-$piece.txt "$OO_INSTDIR/basis$VERSION/sdk/index.html"
-add_used_directories files-$piece-doc.txt files-$piece.txt
-
-# prune default solver if installed for packaging purposes
-# SDK installation does not put there anything interesting
-test -n "$DESTDIR" && rm -rf "$DESTDIR/$OO_INSTDIR/solver"
-
-# save udkversion.mk
-mkdir -p "$DESTDIR$OO_INSTDIR/solver/inc"
-cp $DESTDIR/usr/include/$OO_INSTDIRNAME/udkversion.mk "$DESTDIR$OO_INSTDIR/solver/inc"
-echo "%dir $OO_INSTDIR/solver" >>"files-$piece.txt"
-echo "%dir $OO_INSTDIR/solver/inc" >>"files-$piece.txt"
-echo "$OO_INSTDIR/solver/inc/udkversion.mk" >>"files-$piece.txt"
-
-# prune duplicated headers
-remove_dir "files-$piece.txt" /usr/include/$OO_INSTDIRNAME
-remove_dir "files-$piece.txt" /usr/share/idl/$OO_INSTDIRNAME
-# fix compat symlinks
-ln -sf $OO_INSTDIR/solver/inc "$DESTDIR$OO_INSTDIR/basis$VERSION/sdk/include"
-ln -sf $OO_INSTDIR/solver/idl "$DESTDIR$OO_INSTDIR/basis$VERSION/sdk/idl"
-
-# fix permissions
-find $DESTDIR$OO_DOCDIR/sdk -name "*.gif" -exec chmod a-x {} \;
-# FIXME: the following hack might be removed after openSUSE-11.0 is not longer supported
-# it looks like perl-(un)zip stuff does not preserve permissions on this old system :-(
-chmod 755 $DESTDIR$OO_INSTDIR/basis$VERSION/sdk/{config*,setsdkenv_unix,bin/*}
-
-exit 0
diff --git a/bin/piece/file-list-ure b/bin/piece/file-list-ure
deleted file mode 100755
index 3e45916c1..000000000
--- a/bin/piece/file-list-ure
+++ /dev/null
@@ -1,61 +0,0 @@
-#!/bin/bash
-
-piece=$1
-ooo_build_tag=$2
-
-source $OO_TOOLSDIR/piece/sys-setup
-
-# install mono GAC, pkgconfig file, ...
-rm -f files-mono.txt
-$OO_TOOLSDIR/install-mono
-
-# pyuno subpackages
-$OO_TOOLSDIR/piece/merge-file-lists "files-pyuno.txt" $DESTDIR/gid_Module_Optional_Pyuno
-
-# merge the rest already now, so we could extract the mono stuff
-$OO_TOOLSDIR/piece/merge-file-lists "files-$piece.txt" $DESTDIR/gid_*
-
-# finalize mono subpackage
-mv_file_between_flists files-mono.txt files-$piece.txt $OO_INSTDIR/basis$VERSION/program/cli_.*.dll
-mv_file_between_flists files-mono.txt files-$piece.txt $OO_INSTDIR/basis$VERSION/program/cli_.*.dll.config
-mv_file_between_flists files-mono.txt files-$piece.txt $OO_INSTDIR/basis$VERSION/program/policy.*.cli_.*.dll
-mv_file_between_flists files-mono.txt files-$piece.txt $OO_INSTDIR/ure/lib/cli_.*.dll
-mv_file_between_flists files-mono.txt files-$piece.txt $OO_INSTDIR/ure/lib/cli_.*.dll.config
-mv_file_between_flists files-mono.txt files-$piece.txt $OO_INSTDIR/ure/lib/policy.*.cli_.*.dll
-mv_file_between_flists files-mono.txt files-$piece.txt $OO_INSTDIR/ure/lib/libcli_.*.so
-add_used_directories files-mono.txt files-$piece.txt
-
-# install the internal stlport if available; it might be included in the solver from bootstrap
-if test -f $OO_INSTDIR/solver/lib/libstlport_gcc.so ; then
- cp $OO_INSTDIR/solver/lib/libstlport_gcc.so $DESTDIR$OO_INSTDIR/ure/lib
- echo "$OO_INSTDIR/ure/lib/libstlport_gcc.so" >>files-$piece.txt
-fi
-
-# install the hacky script to regenerate UNO cache with broken extensions
-mkdir $DESTDIR/usr/sbin
-install -m 755 $OO_TOOLSDIR/unopkg-regenerate-cache $DESTDIR/usr/sbin
-echo /usr/sbin/unopkg-regenerate-cache >>files-$piece.txt
-
-# FIXME: generate pythonloader.unorc to be ready for python components registration
-# it might be generated in postprocess togeter with other profiles but it is too late
-# might be fixed by the planed scp2-split
-# IMPORTANT: the other variant generated in postprocess is explicitely removed in bin/piece/post-inst-postprocess
-cat <<EOT >$DESTDIR$OO_INSTDIR/basis$VERSION/program/pythonloader.unorc
-[Bootstrap]
-PYUNO_LOADER_PYTHONPATH=\$ORIGIN
-EOT
-echo "%config $OO_INSTDIR/basis$VERSION/program/pythonloader.unorc" >>files-pyuno.txt
-
-# it is helpful to symlink uno.py and unohelper.py into the system python path,
-# for example, see http://lists.gnu.org/archive/html/gnumed-devel/2010-04/msg00094.html
-python_dir="$OO_LIBDIR/python"
-test -L "$python_dir" && python_dir=`readlink -f $python_dir`
-if test -d "$python_dir" ; then
- mkdir -p "$DESTDIR$python_dir"
- for f in uno.py unohelper.py ; do
- ln -sf "$OO_INSTDIR/basis$VERSION/program/$f" "$DESTDIR$python_dir/$f"
- echo "$python_dir/$f" >>files-pyuno.txt
- done
-fi
-
-exit 0
diff --git a/bin/piece/file-list-writer b/bin/piece/file-list-writer
deleted file mode 100755
index ff9b8d7f4..000000000
--- a/bin/piece/file-list-writer
+++ /dev/null
@@ -1,22 +0,0 @@
-#!/bin/bash
-
-piece=$1
-ooo_build_tag=$2
-
-source $OO_TOOLSDIR/piece/sys-setup
-
-# math subpackages
-$OO_TOOLSDIR/piece/merge-file-lists "files-math.txt" $DESTDIR/gid_Module_Prg_Math_Bin
-$OO_TOOLSDIR/piece/desktop-support-app "math" "$ooo_build_tag" "math"
-
-# merge the rest
-$OO_TOOLSDIR/piece/merge-file-lists "files-$piece.txt" $DESTDIR/gid_*
-$OO_TOOLSDIR/piece/desktop-support-app "$piece" "$ooo_build_tag" "$piece"
-$OO_TOOLSDIR/piece/desktop-support-app "$piece" "$ooo_build_tag" "web"
-
-# install the prebuilt registry files
-$OO_TOOLSDIR/piece/install-registry $piece $ooo_build_tag "$piece"
-$OO_TOOLSDIR/piece/install-registry $piece $ooo_build_tag "math"
-
-# prune redundant files the scp2 likes to make for us
-remove_file "files-$piece.txt" "$OO_INSTDIR/basis$VERSION/ure-link"
diff --git a/bin/piece/inst-artwork b/bin/piece/inst-artwork
deleted file mode 100755
index f84924ba2..000000000
--- a/bin/piece/inst-artwork
+++ /dev/null
@@ -1,15 +0,0 @@
-#!/bin/bash
-
-piece=$1
-ooo_build_tag=$2
-distro=$3
-
-source $OO_TOOLSDIR/piece/sys-setup
-
-# copy all icons into local solver ...
-echo "Installing artwork ..."
-LOCALSOLVER="$SOLARPIECEVERSION/$INPATH"
-mkdir -p $LOCALSOLVER
-for dir in default_images external_images ooo_custom_images; do
- cp -lR $SRC_ROOT/$dir $LOCALSOLVER
-done
diff --git a/bin/piece/install-bootstrap b/bin/piece/install-bootstrap
deleted file mode 100755
index 9a8c89058..000000000
--- a/bin/piece/install-bootstrap
+++ /dev/null
@@ -1,84 +0,0 @@
-#!/bin/bash
-# install bash script - invoked from package-ooo
-
-ln -sf . "$OOINSTDIR/solver/$INPATH"
-
-# environment
-cp $OOBUILDDIR/*.[sS]et.sh $DEST
-
-# openoffice lst
-cp -R $OOBUILDDIR/instsetoo_native $DEST
-
-# setup_native - not built; odd deps ...
-cp $OOBUILDDIR/../setup_native/source/packinfo/*.txt $DEST/bin
-
-# autoconf stuff (needed for SDK)
-cp $OOBUILDDIR/config.guess $DEST/bin
-cp $OOBUILDDIR/config.sub $DEST/bin
-
-LOINSTTOOLSDIR=$OOINSTDIR/build
-
-# build
-mkdir -p $LOINSTTOOLSDIR/bin
-mkdir -p $LOINSTTOOLSDIR/patches
-mkdir -p $LOINSTTOOLSDIR/po
-mkdir -p $LOINSTTOOLSDIR/desktop
-mkdir -p $LOINSTTOOLSDIR/man
-cp -a $TOOLSDIR/bin/* $LOINSTTOOLSDIR/bin || exit 1;
-cp -a $TOOLSDIR/patches/* $LOINSTTOOLSDIR/patches || exit 1;
-cp -a $TOOLSDIR/po/lo-build-*.sdf $LOINSTTOOLSDIR/po || exit 1;
-cp -a $TOOLSDIR/desktop/* $LOINSTTOOLSDIR/desktop || exit 1;
-cp -a $TOOLSDIR/man/* $LOINSTTOOLSDIR/man || exit 1;
-
-# the split scripts requires libreoffice.1.in
-# rename the original file for LO-3.4
-cp $LOINSTTOOLSDIR/man/openoffice.1.in $LOINSTTOOLSDIR/man/libreoffice.1.in
-
-# copy pieces of interest into src to be moved out later ...
-echo "copy source pieces"
-mkdir -p $LOINSTTOOLSDIR/src
-cp -a \
- $TOOLSDIR/src/*.dic \
- $TOOLSDIR/src/*.bmp \
- $TOOLSDIR/src/*.config.in \
- $TOOLSDIR/src/*.pc.in \
- $TOOLSDIR/src/*.png \
- $TOOLSDIR/src/*.jpg \
- $TOOLSDIR/src/*.patch \
- $TOOLSDIR/src/sg3.* \
- $TOOLSDIR/src/sofficerc-upstream \
- $TOOLSDIR/src/default_images \
- $TOOLSDIR/src/helpcontent2 \
- $TOOLSDIR/src/icons \
- $TOOLSDIR/src/layout \
- $LOINSTTOOLSDIR/src || exit 1;
-# src/sdf
-mkdir -p $LOINSTTOOLSDIR/src/sdf
-cp -a \
- $TOOLSDIR/src/sdf/*.sdf \
- $TOOLSDIR/src/sdf/README \
- $LOINSTTOOLSDIR/src/sdf || exit 1;
-
-# branding stuff should go into noarch path
-if test "$OOO_BUILD_NOARCH" = 'YES' ; then
- OO_INSTDIR_SHARE=`echo $OOINSTDIR | sed -e "s|/lib6\?4\?/|/share/|"`
- mkdir -p $OO_INSTDIR_SHARE/build/src
- mv \
- $LOINSTTOOLSDIR/src/sofficerc-upstream \
- $OO_INSTDIR_SHARE/build/src || exit 1;
-fi
-
-# bin mess and stuff that won't be needed
-find $LOINSTTOOLSDIR -depth -name "CVS" -type d -exec rm -rf {} \;
-find $LOINSTTOOLSDIR -name "*.orig" -exec rm -rf {} \;
-find $LOINSTTOOLSDIR -type f -exec chmod go-w {} \;
-find $LOINSTTOOLSDIR -name "Makefile*" -exec rm -rf {} \;
-# .in files does not make sense without configure
-# in addition, rpm/find-provides goes mad with incomplete script headers, e.g. #!@PERL@ -pi.bak -w
-rm -f \
- $LOINSTTOOLSDIR/bin/setup.in \
- $LOINSTTOOLSDIR/bin/font-munge.in \
- $LOINSTTOOLSDIR/bin/help-font-munge.in \
- $LOINSTTOOLSDIR/bin/piece/sys-setup.in
-
-echo "done bootstrap specific install"
diff --git a/bin/piece/install-devel-helper b/bin/piece/install-devel-helper
deleted file mode 100755
index d92f8047d..000000000
--- a/bin/piece/install-devel-helper
+++ /dev/null
@@ -1,37 +0,0 @@
-#!/bin/bash
-
-piece=$1
-ooo_build_tag=$2
-
-source $OO_TOOLSDIR/piece/sys-setup
-
-# do we need to do anything?
-test "$OOO_BUILD_NOARCH" != 'YES' && exit 0
-
-# ugly hack to move solver to /usr/share
-mkdir -p $DESTDIR$OO_INSTDIR_SHARE
-mv $DESTDIR$OO_INSTDIR/solver $DESTDIR$OO_INSTDIR_SHARE
-
-# file list for the devel stuff
-echo "%dir $OO_INSTDIR_SHARE" >files-$piece-devel.txt
-for dir in `find $DESTDIR$OO_INSTDIR_SHARE/solver -type d | sed "s|$DESTDIR||" | grep -v "$OO_INSTDIR_SHARE/solver/noarch"|sort` ; do
- echo "%dir $dir" >>files-$piece-devel.txt
-done
-find $DESTDIR$OO_INSTDIR_SHARE/solver -type f | sed "s|$DESTDIR||" | grep -v "$OO_INSTDIR_SHARE/solver/noarch"|sort >>files-$piece-devel.txt
-# install the list for link-to-ooo-home script if not empty; top dir is always mentioned
-if test `cat files-$piece-devel.txt | wc -l` -gt 1 ; then
- cp files-$piece-devel.txt $DESTDIR$OO_INSTDIR_SHARE/
- echo "$OO_INSTDIR_SHARE/files-$piece-devel.txt" >>files-$piece-devel.txt
-fi
-
-# file list for the prebuilt stuff if any
-rm -f files-$piece-prebuilt.txt
-for dir in `find $DESTDIR$OO_INSTDIR_SHARE/solver -type d | sed "s|$DESTDIR||" | grep "$OO_INSTDIR_SHARE/solver/noarch"|sort` ; do
- echo "%dir $dir" >>files-$piece-prebuilt.txt
-done
-find $DESTDIR$OO_INSTDIR_SHARE/solver -type f | sed "s|$DESTDIR||" | grep "$OO_INSTDIR_SHARE/solver/noarch"|sort >>files-$piece-prebuilt.txt
-# install the list for link-to-ooo-home script if not empty
-if test `cat files-$piece-prebuilt.txt | wc -l` -gt 1 ; then
- cp files-$piece-prebuilt.txt $DESTDIR$OO_INSTDIR_SHARE/
- echo "$OO_INSTDIR_SHARE/files-$piece-prebuilt.txt" >>files-$piece-prebuilt.txt
-fi
diff --git a/bin/piece/install-generic b/bin/piece/install-generic
deleted file mode 100755
index 6211be522..000000000
--- a/bin/piece/install-generic
+++ /dev/null
@@ -1,120 +0,0 @@
-#!/bin/bash
-
-piece=$1
-ooo_build_tag=$2
-
-source $OO_TOOLSDIR/piece/sys-setup
-
-custom_install="$OO_TOOLSDIR/piece/inst-$piece"
-if test -f $custom_install; then
- $custom_install $piece $ooo_build_tag $DISTRO
-fi
-
-# ----- monster make_installer section -----
-
-# can't just source $SOLARENV/inc/minor.mk [ sadly ] - missing quotes.
-export BUILD=`grep "BUILD=" "$OO_INSTDIR/solenv/inc/minor.mk" | cut -d= -f2`
-export LAST_MINOR=`grep "LAST_MINOR=" "$OO_INSTDIR/solenv/inc/minor.mk" | cut -d= -f2`
-export OUT="`pwd`/solver/install"
-export LOCAL_OUT=$OUT;
-export LOCAL_COMMON_OUT=$OUT;
-# install from the copy in the solver - to the system
-export SOLARVERSION="`pwd`/solver"
-
-if test "z$DESTDIR" == "z"; then
- echo "install-generic requires a destdir to be set"
- exit 1
-fi
-
-export LANGS=
-for lang in $OO_LANGS ; do
- if test -z "$LANGS" ; then
- LANGS="$lang"
- else
- LANGS="$LANGS,$lang"
- fi
-done
-
-if test "$piece" = "sdk" ; then
- PRODUCT=LibreOffice_SDK
-else
- PRODUCT=LibreOffice
-fi
-
-perl -w $SOLARENV/bin/make_installer.pl \
- -f $OO_INSTDIR/solver/instsetoo_native/util/openoffice.lst \
- -l $LANGS -p $PRODUCT -buildid $BUILD -destdir $DESTDIR \
- -dontstrip -simple $OO_INSTDIR
-
-# ----- end monster make_installer section -----
-
-# Now try to copy the bits we didn't install into a -devel RPM ...
-
-SRCDIR="$SOLARPIECEVERSION/$INPATH"
-DEST="$DESTDIR$OO_SOLVERDIR"
-echo "Copy / install remaining devel pieces ..."
-filelist="$DESTDIR/all_filelist.txt"
-touch $filelist
-$OO_TOOLSDIR/piece/copyexcept -x $filelist $SRCDIR/* $DEST
-rm -f $filelist
-
-# FIXME: we need to provide external files also in other pieces (e.g. prebuild mono.dlls)
-# external is automatically taken into build dependencies even when removed from
-# the dirs list in build-generic
-# => the build.lst and deliver.log must be deleted in other pieces to avoid conflict of devel packages
-if test "$piece" != "libs-extern" -a "$piece" != "libs_extern" ; then
- rm -f $DEST/inc/external/build.lst
- rm -f $DEST/inc/external/deliver.log
-fi
-
-# hotfix post upstream install
-# allows to modify the install process from the piece package sources
-hotfix_post_upstream_install="ooo-build/bin/post-upstream-install"
-if test -f "$hotfix_post_upstream_install" ; then
- "$hotfix_post_upstream_install" $piece $ooo_build_tag $DISTRO || exit 1;
-fi
-
-
-# hack to install the noarch files later in the noarch RPMs
-if test -n "$DESTDIR" -a "$OOO_BUILD_NOARCH" = 'YES' ; then
- $OO_TOOLSDIR/piece/save-noarch $piece $ooo_build_tag || exit 1;
-fi
-
-# file lists
-if test -n "$DESTDIR" -a "$OO_BUILD_FILE_LISTS_ENABLE" != "NO" ; then
- custom_file_list="$OO_TOOLSDIR/piece/file-list-$piece"
- if test -f $custom_file_list; then
- $custom_file_list $piece $ooo_build_tag $DISTRO || exit 1;
- else
- $OO_TOOLSDIR/piece/merge-file-lists "files-$piece.txt" $DESTDIR/gid_*
- fi
-fi
-
-# add executable rights to libraries
-echo "Fixing permissions of libraries..."
-for libdir in $OO_INSTDIR/ure/lib \
- $OO_INSTDIR/program \
- $OO_INSTDIR/basis$VERSION/program ; do
- test -d $DESTDIR$libdir && \
- find $DESTDIR$libdir -name '*.so*' -type f -exec chmod a+x {} \;
-done
-
-# clean up solver
-echo "Cleaning up solver..."
-find $DEST -depth -name ".svn" -type d -exec rm -rf {} \;
-find $DEST -name "*.orig" -exec rm -rf {} \;
-find $DEST -type f -exec chmod go-w {} \;
-find $DEST -name "*.h" -exec chmod a-x {} \;
-find $DEST -name "*.hxx" -exec chmod a-x {} \;
-
-# custom post install stuff
-custom_post_install="$OO_TOOLSDIR/piece/post-inst-$piece"
-if test -f $custom_post_install -a "$OO_BUILD_POST_PROCESS_ENABLE" != "NO" ; then
- $custom_post_install $piece $ooo_build_tag $DISTRO || exit 1;
-fi
-
-# hotfix post install; allows to modify the install process from the piece package sources
-hotfix_post_install="ooo-build/bin/post-install"
-if test -f "$hotfix_post_install" ; then
- "$hotfix_post_install" $piece $ooo_build_tag $DISTRO || exit 1;
-fi
diff --git a/bin/piece/install-l10n-helper b/bin/piece/install-l10n-helper
deleted file mode 100755
index 5eb9b6785..000000000
--- a/bin/piece/install-l10n-helper
+++ /dev/null
@@ -1,54 +0,0 @@
-#!/bin/bash
-
-piece=$1
-ooo_build_tag=$2
-src_root=$3
-shift; shift; shift;
-
-source $OO_TOOLSDIR/piece/sys-setup
-
-# do we need to do anything?
-test "$src_root" = "$DESTDIR" && test "$OOO_BUILD_NOARCH" != 'YES' && exit 0
-
-target_instdir="$OO_INSTDIR"
-test "$OOO_BUILD_NOARCH" = 'YES' && target_instdir="$OO_INSTDIR_SHARE"
-
-echo "Installing l10n stuff to the final location..."
-
-for filelist in $@ ; do
- test -f $filelist || continue;
- echo -n " processing $filelist... "
- # we wants to create even empty directories
- for source_dir in `grep "^%dir" $filelist | sed "s|^%dir[[:blank:]]*||" | sort -ru` ; do
- target_dir=`echo $source_dir | sed "s|$OO_INSTDIR|$target_instdir|"`
- mkdir -p "$DESTDIR$target_dir"
- done
- # install files
- for source_file in `grep -v "^%dir" $filelist | sort -ru` ; do
- target_file=`echo $source_file | sed "s|$OO_INSTDIR|$target_instdir|"`
- if test "$src_root" = "$DESTDIR" ; then
- # moving inside DESTDIR; must be moving from /usr/lib to /usr/share
- mv "$src_root$source_file" "$DESTDIR$target_file"
- else
- # most likely copying from the system solver (prebuilt l10n stuff)
- cp "$src_root$source_file" "$DESTDIR$target_file"
- fi
- done
- # remove empty directories when moved inside DESTDIR
- if test "$src_root" = "$DESTDIR" ; then
- for source_dir in `grep "^%dir" $filelist | sed "s|^%dir[[:blank:]]*||" | sort -ru` ; do
- rmdir --ignore-fail-on-non-empty $src_root/$source_dir
- done
- fi
- # update the file list when moved to /usr/share
- if test "$OOO_BUILD_NOARCH" = 'YES' ; then
- sed "s|$OO_INSTDIR|$target_instdir|" $filelist >>$filelist.new
- mv $filelist.new $filelist
- # we need the file list installed to create compat symlinks in %post
- # FIXME: We should fix OOo to find the files in /usr/share directly
- cp $filelist $DESTDIR$target_instdir/
- echo "$target_instdir/$filelist" >>$filelist
- fi
-
- echo "done"
-done
diff --git a/bin/piece/install-l10n-merged b/bin/piece/install-l10n-merged
deleted file mode 100755
index c9d805b1b..000000000
--- a/bin/piece/install-l10n-merged
+++ /dev/null
@@ -1,15 +0,0 @@
-#!/bin/bash
-
-piece=$1
-ooo_build_tag=$2
-
-source $OO_TOOLSDIR/piece/sys-setup
-
-# install all the prebuilt pieces into single package per lang
-
-# sort by lang
-$OO_TOOLSDIR/piece/sort-l10n $piece $ooo_build_tag $OO_SOLVERDIR/noarch
-
-# move to /usr/share if wanted
-test "$OOO_BUILD_NOARCH" = 'YES' && $OO_TOOLSDIR/piece/install-l10n-helper $piece $ooo_build_tag $OO_SOLVERDIR/noarch files-$piece-*.txt $OO_SOLVERDIR/noarch
-
diff --git a/bin/piece/install-registry b/bin/piece/install-registry
deleted file mode 100755
index 24da7d1c2..000000000
--- a/bin/piece/install-registry
+++ /dev/null
@@ -1,40 +0,0 @@
-#!/bin/bash
-
-piece=$1
-ooo_build_tag=$2
-pack=$3
-
-source $OO_TOOLSDIR/piece/sys-setup
-
-# do we need to do anything?
-test "$src_root" = "$DESTDIR" && exit 0
-
-echo "Installing $pack registry to the final location..."
-
-registry_system=$OO_INSTDIR/basis$VERSION/share/registry
-registry_solver=$OO_SOLVERDIR/registry
-filelist_prebuilt=files-$pack-registry.txt
-filelist_final=files-$pack.txt
-
-if test ! -f $registry_solver/$filelist_prebuilt ; then
- echo "Error: cannot find prebuilt registry for $pack"
- # FIXME: the gistry files are generated in postscript instaqled of libs core
- # it must be fixed and changed tobefore the final release !!!
- # => temporary using exit 0;
- # exit 1;
- exit 0;
-fi
-
-# we wants to create even empty directories
-for dir in `grep "^%dir" $registry_solver/$filelist_prebuilt | sed "s|^%dir[[:blank:]]*||" | sort -ru` ; do
- mkdir -p "$DESTDIR$dir"
-done
-
-# install files
-for file in `grep -v "^%dir" $registry_solver/$filelist_prebuilt | sort -ru` ; do
- file_prebuilt=`echo $file | sed "s|$registry_system|$registry_solver|"`
- cp "$file_prebuilt" "$DESTDIR$file"
-done
-
-# update the final filelist
-cat $registry_solver/$filelist_prebuilt >>$filelist_final
diff --git a/bin/piece/link-to-ooo-home b/bin/piece/link-to-ooo-home
deleted file mode 100755
index 03c122394..000000000
--- a/bin/piece/link-to-ooo-home
+++ /dev/null
@@ -1,59 +0,0 @@
-#! /bin/sh
-
-usage()
-{
- echo "This script (un)links or unlinks the given to/from OOo home"
- echo
- echo "Usage: ${0##*/} [--unlink] filelist"
-}
-
-datadir=/usr/share
-# FIXME: It is hard to predict if 32-bit or 64-bit OOo is installed on x86_64
-libdir_list="/usr/lib"
-if test "`uname -m`" = "x86_64" ; then
- libdir_list="$libdir_list /usr/lib64"
-fi
-
-
-if test "$1" = "--unlink" ; then
- link_mode=false
- shift
-else
- link_mode=true
-fi
-
-test "$1" = "--help" && usage && exit 0
-test ! -f "$1" -o -n "$2" && usage && exit 1
-
-filelist="$1"
-
-
-for libdir in $libdir_list ; do
-
- if $link_mode ; then
- for dir in `grep "%dir" $filelist | sed "s|%dir ||"` ; do
- linkdir=`echo $dir | sed "s|$datadir|$libdir|"`
- mkdir -p $linkdir
- done
- fi
-
- for file in `grep -v "%dir" $filelist | sed "s|%config ||"` ; do
- linkedfile=`echo $file | sed "s|$datadir|$libdir|"`
- if $link_mode ; then
- ln -sf $file $linkedfile
- else
- # do not remove still valid symlinks
- # they might have another meaning; they might be another package...
- test -L $linkedfile -a ! -r $linkedfile && rm -f $linkedfile
- fi
- done
-
-# FIXME: do not remove empty directories
-# they might be in another package that is not longer noarch, ...
-# if ! $link_mode ; then
-# for dir in `grep "%dir" $filelist | sed "s|%dir ||" | sort -r` ; do
-# linkdir=`echo $dir | sed "s|$datadir|$libdir|"`
-# rmdir $linkdir 2>/dev/null || true
-# done
-# fi
-done
diff --git a/bin/piece/merge-file-lists b/bin/piece/merge-file-lists
deleted file mode 100755
index e277bc1f0..000000000
--- a/bin/piece/merge-file-lists
+++ /dev/null
@@ -1,27 +0,0 @@
-#!/bin/bash
-
-# merge severla file lists into a single one
-
-# Usage: merge-file-lists out_filelist in_filelist...
-
-out=$1
-shift
-
-rm -f $out.new
-touch $out.new
-for list in $* ; do
- grep "^%dir" $list >>$out.new
- for file in `grep -v "^%dir" $list` ; do
- # FIXME: ignore some files that are in the file list but are not
- # installed (mostly .rdb files)
- # accept invalid symlinks; absolute symlinks are broken because of $DESTDIR
- if test -e $DESTDIR$file -o -L $DESTDIR$file ; then
- echo "$file" >>$out.new
- else
- echo "WARNING: Ignoring in the file list: $file"
- fi
- done
- rm -f $list
-done
-sed -e "s|/./|/|g" $out.new | sort -u >$out
-rm $out.new
diff --git a/bin/piece/noulf b/bin/piece/noulf
deleted file mode 100755
index 3bb98ec85..000000000
--- a/bin/piece/noulf
+++ /dev/null
@@ -1,29 +0,0 @@
-#!/usr/bin/perl -w
-
-use strict;
-
-# dummy 'ulfex' / transex3/source/lngex.cxx equivalent, cf unitools.mk
-
-my ($in, $out);
-my $ignored = '';
-while (my $arg = shift @ARGV) {
- if ($arg eq '-i') {
- $in = shift @ARGV;
- } elsif ($arg eq '-o') {
- $out = shift @ARGV;
- } else {
- $ignored .= " $arg";
- }
-}
-
-print "noulf: in $in, out $out, ignored args: '$ignored'\n";
-
-# copy it straight ...
-my ($infile, $outfile);
-open $infile, "$in" || die "can't open $in: $!";
-open $outfile, ">$out" || die "can't open $out: $!";
-while (<$infile>) {
- print $outfile $_;
-}
-close $outfile;
-close ($infile);
diff --git a/bin/piece/noulfconv b/bin/piece/noulfconv
deleted file mode 100755
index 0f9965b0a..000000000
--- a/bin/piece/noulfconv
+++ /dev/null
@@ -1,29 +0,0 @@
-#!/usr/bin/perl -w
-
-use strict;
-
-# dummy 'ulfconv' / setup_native/source/ulfconv.cxx equivalent, cf. unitools.mk
-
-my ($in, $out);
-my $ignored = '';
-while (my $arg = shift @ARGV) {
- if ($arg eq '-o') {
- $out = shift @ARGV;
- } elsif ($arg eq '-t') {
- shift @ARGV; # ignore the translation table
- } else {
- $in = $arg;
- }
-}
-
-print "noulfconv: in $in, out $out\n";
-
-# copy it straight ...
-my ($infile, $outfile);
-open $infile, "$in" || die "can't open $in: $!";
-open $outfile, ">$out" || die "can't open $out: $!";
-while (<$infile>) {
- print $outfile $_;
-}
-close $outfile;
-close ($infile);
diff --git a/bin/piece/post-inst-components b/bin/piece/post-inst-components
deleted file mode 100755
index 1b1ca6b3d..000000000
--- a/bin/piece/post-inst-components
+++ /dev/null
@@ -1,16 +0,0 @@
-#!/bin/bash
-
-piece=$1
-ooo_build_tag=$2
-
-source $OO_TOOLSDIR/piece/sys-setup
-
-# remove bits from setup_native/source/packinfo to avoid
-# conflists with bootstrap
-
-DEST="$DESTDIR$OO_SOLVERDIR"
-
-echo "Removing duplicated packinfo..."
-for file in `ls $SRC_ROOT/setup_native/source/packinfo | grep -v CVS` ; do
- rm -f $DEST/bin/$file
-done
diff --git a/bin/piece/post-inst-postprocess b/bin/piece/post-inst-postprocess
deleted file mode 100755
index ebc920790..000000000
--- a/bin/piece/post-inst-postprocess
+++ /dev/null
@@ -1,65 +0,0 @@
-#!/bin/bash
-
-piece=$1
-ooo_build_tag=$2
-
-source $OO_TOOLSDIR/piece/sys-setup
-
-# Now do magic stuff to register the services - fully hacky (sadly).
-export OO_BUILD_SERVICES=1
-# avoid infinite recursion ;-)
-export OO_BUILD_POST_PROCESS_ENABLE='NO'
-# do not remove the already created file lists
-export OO_BUILD_FILE_LISTS_ENABLE='NO'
-
-# FIXME: replace the below hack with the solution suggested at
-# http://www.openoffice.org/issues/show_bug.cgi?id=94759
-# It obsoletes also patches/dev300/piece-jvmfwk.diff
-# crazy hack to produce working jvmfwk3rc
-cat <<EOT >jvmfwk3rc
-[Bootstrap]
-UNO_JAVA_JFW_ENV_JREHOME=true
-UNO_JAVA_JFW_ENV_CLASSPATH=true
-UNO_JAVA_JFW_VENDOR_SETTINGS=file://$OO_INSTDIR/ure/share/misc/javavendors.xml
-EOT
-export JVMFWK_CONFIGFILE=file://`pwd`/jvmfwk3rc
-
-# check and remove the sofficerc
-# we use the prebuilt one in the upstream branding package
-# FIXME: it would be possible to generate this file by the installer, e.g. in bootstrap
-if test "$DISTRO" = "SUSE" || echo "$DISTRO" | grep -q "SUSE-11" ; then
- if diff -q $DESTDIR$OO_INSTDIR/program/sofficerc $OO_INSTDIR_SHARE/build/src/sofficerc-upstream ; then
- rm -f $DESTDIR$OO_INSTDIR/program/sofficerc
- ln -sf $OO_INSTDIR_SHARE/program/sofficerc $DESTDIR$OO_INSTDIR/program/sofficerc
- else
- echo "Error: The file \"sofficerc\" has changed in the sources"
- echo " Please, update the prebuilt variant in $OO_INSTDIR_SHARE/ooo-build/src/sofficerc-upstream"
- exit 1;
- fi
-fi
-
-# needed to register python components
-export PYTHONPATH=$OO_INSTDIR/basis$VERSION/program
-
-$OO_TOOLSDIR/piece/install-generic $piece $ooo_build_tag
-# urgh - unbelievably nasty:
-cp -a LibreOffice/gid_Starregistry_Services_Rdb_rdb/*/services.rdb \
- $DESTDIR$OO_INSTDIR/basis$VERSION/program/services.rdb
-cp -a LibreOffice/gid_Starregistry_Services_Rdb_Ure_servicesrdb/*/services.rdb \
- $DESTDIR$OO_INSTDIR/ure/share/misc/services.rdb
-# udpate the file list
-if test -f "files-$piece.txt" ; then
- echo "$OO_INSTDIR/basis$VERSION/program/services.rdb" >>"files-$piece.txt"
- echo "$OO_INSTDIR/ure/share/misc/services.rdb" >>"files-$piece.txt"
-fi
-
-# FIXME: remove pythonloader.unorc
-# it has been already generated by bin/piece/file-list-ure
-remove_file "files-$piece.txt" "$OO_INSTDIR/basis$VERSION/program/pythonloader.unorc"
-
-# prune redundant files the scp2 likes to make for us
-remove_file "files-$piece.txt" "$OO_INSTDIR/basis-link"
-remove_file "files-$piece.txt" "$OO_INSTDIR/basis$VERSION/ure-link"
-
-# we need just the prebuilt l10n stuff in solver at this stage
-find "$DESTDIR/$OO_INSTDIR/solver" -mindepth 1 -maxdepth 1 -path "$DESTDIR/$OO_INSTDIR/solver/noarch" -prune -o -exec rm -rf {} \;
diff --git a/bin/piece/post-inst-ure b/bin/piece/post-inst-ure
deleted file mode 100755
index 017e22c98..000000000
--- a/bin/piece/post-inst-ure
+++ /dev/null
@@ -1,26 +0,0 @@
-#!/bin/bash
-
-piece=$1
-ooo_build_tag=$2
-
-source $OO_TOOLSDIR/piece/sys-setup
-
-# sigh, basis-link should be in URE because SDK is based just on URE
-if ! test -L $DESTDIR/$OO_INSTDIR/basis-link ; then
- ln -sf basis$VERSION $DESTDIR/$OO_INSTDIR/basis-link
- echo "$OO_INSTDIR/basis-link" >>"files-$piece.txt"
-fi
-
-if test "$OOO_BUILD_NOARCH" = "YES" ; then
- # create the base directories in /usr/share
- mkdir -p $DESTDIR/$OO_INSTDIR_SHARE/basis$VERSION
- ln -sf basis$VERSION $DESTDIR/$OO_INSTDIR_SHARE/basis-link
- # install a helper script to link the noarch stuff from /usr/share to /usr/lib
- cp -a $OO_TOOLSDIR/piece/link-to-ooo-home $DESTDIR/$OO_INSTDIR_SHARE
- if test -f "files-$piece.txt" ; then
- echo "%dir $OO_INSTDIR_SHARE" >>"files-$piece.txt"
- echo "%dir $OO_INSTDIR_SHARE/basis$VERSION" >>"files-$piece.txt"
- echo "$OO_INSTDIR_SHARE/basis-link" >>"files-$piece.txt"
- echo "$OO_INSTDIR_SHARE/link-to-ooo-home" >>"files-$piece.txt"
- fi
-fi
diff --git a/bin/piece/save-noarch b/bin/piece/save-noarch
deleted file mode 100755
index 0320f2753..000000000
--- a/bin/piece/save-noarch
+++ /dev/null
@@ -1,75 +0,0 @@
-#!/bin/bash
-
-piece=$1
-ooo_build_tag=$2
-
-source $OO_TOOLSDIR/piece/sys-setup
-
-# this script saves the architecture independent files back into solver
-# they could be installed later in the noarch RPMs
-
-# the file lists are needed
-test -n "$DESTDIR" || exit 0;
-
-# you should use --enable-build-noarch configure switch to enable this feature
-test "$OOO_BUILD_NOARCH" = 'YES' || exit 0;
-
-DEST="$DESTDIR$OO_SOLVERDIR/noarch"
-
-# help need not be saved; it is built directly in noarch package
-# will be moved to /usr/share in file-list-help
-test "$piece" = "help" && exit 0;
-
-# HACK: the lyt-* layout tamplates are available at
-# http://extensions.services.openoffice.org/project/TemplatePack_II
-# they are not really localized
-# we do not want them copied into all l10n packageswe,definitely
-# For example, they are already included in the separate package
-# OpenOffice_org-templates-presentation-layouts on OpenSUSE
-# FIXME: make me it optional?; allow move them to the common dir?
-for list in $DESTDIR/gid_Module_Langpack_Basis_* ; do
- test -f $list || continue;
- for file in `sort -u $list | grep "$OO_INSTDIR/basis$VERSION/share/template/[-_[:alnum:]]*/layout/lyt-.*$"` ; do
- echo "Warning: removing unwanted: $file"
- rm $DESTDIR$file
- done
- sed "s|$OO_INSTDIR/basis$VERSION/share/template/[-_[:alnum:]]*/layout/lyt-.*$||" $list >$list.new
- mv $list.new $list
-done
-
-echo "Saving noarch stuff back into solver ..."
-
-# always create the top dir, so we could put it into the package even when
-# just en-US is being built
-mkdir -p $DEST
-
-# lang packs are noarch
-for list in $DESTDIR/gid_Module_Langpack* $DESTDIR/gid_Module_Helppack* ; do
- test -f $list || continue;
- # we want to keep the en_US localization in the package
- echo $list | grep "en_US" && continue;
- for source in `grep -v "^%dir" $list | sort -ru` ; do
- if test "$OOO_BUILD_NOARCH" = 'YES' ; then
- target=`echo "$source" | sed "s|$OO_INSTDIR|$OO_INSTDIR_SHARE|"`
- else
- target="$source"
- fi
- source_dir=`dirname $source`
- target_dir=`dirname $target`
- mkdir -p "$DEST$target_dir"
- mv "$DESTDIR$source" "$DEST$target_dir"
- rmdir --ignore-fail-on-non-empty "$DESTDIR$source_dir"
- done
- # we need to save the list of files with the piece-specific name
- listname_piece=`echo $list | sed -e "s|\(gid_Module_Langpack\)|\1_$piece|" -e "s|\(gid_Module_Helppack\)|\1_$piece|"`
- listname_piece=`basename $listname_piece`
- if test "$OOO_BUILD_NOARCH" = 'YES' ; then
- sed "s|$OO_INSTDIR|$OO_INSTDIR_SHARE|" $list >$DEST/$listname_piece
- rm $list
- else
- mv $list $DEST/$listname_piece
- fi
-done
-
-# icon themes are noarch but they could be keep as is in the separate package
-# and just repacked later, see $OO_TOOLSDIR/piece/file-list-postprocess
diff --git a/bin/piece/save-registry b/bin/piece/save-registry
deleted file mode 100755
index a0345875c..000000000
--- a/bin/piece/save-registry
+++ /dev/null
@@ -1,48 +0,0 @@
-#!/bin/bash
-
-piece=$1
-ooo_build_tag=$2
-
-source $OO_TOOLSDIR/piece/sys-setup
-
-# this script saves the architecture independent files back into solver
-# they could be installed later in the noarch RPMs
-
-# the file lists are needed
-test -n "$DESTDIR" || exit 0;
-
-echo "Saving prebuilt registry files for optional packages back into solver ..."
-
-registry_system=$OO_INSTDIR/basis$VERSION/share/registry
-registry_solver=$OO_SOLVERDIR/registry
-
-mkdir -p $DESTDIR/$registry_solver
-
-for list in $DESTDIR/gid_Module_Prg_* ; do
- echo " $list"
- # name of the target package
- pack=`echo $list | \
- sed -e "s|$DESTDIR/gid_Module_Prg_\([[:alpha:]]*\)_Bin|\1|" -e "s|Wrt|Writer|" | \
- tr [:upper:] [:lower:]`
-
- filelist=files-$pack-registry.txt
-
- # generate file list
- # the schema files are used during build. e.g. for reportbuilder => should be in the final location
- grep "$registry_system" $list | grep -v "$registry_system/schema" >$DESTDIR/$registry_solver/$filelist
- grep -v "$registry_system" $list >$list.new
- grep "$registry_system/schema" $list >>$list.new
- mv $list.new $list
-
- # move the registry to solver
- for source in `grep -v "^%dir" $DESTDIR/$registry_solver/$filelist | sort -ru` ; do
- target=`echo "$source" | sed "s|$registry_system|$registry_solver|"`
- source_dir=`dirname $source`
- target_dir=`dirname $target`
- mkdir -p "$DESTDIR$target_dir"
- mv "$DESTDIR$source" "$DESTDIR$target_dir"
- rmdir --ignore-fail-on-non-empty "$DESTDIR$source_dir"
- done
-
-done
-
diff --git a/bin/piece/sort-l10n b/bin/piece/sort-l10n
deleted file mode 100755
index 0c94f6e82..000000000
--- a/bin/piece/sort-l10n
+++ /dev/null
@@ -1,32 +0,0 @@
-#!/bin/bash
-
-piece=$1
-ooo_build_tag=$2
-gid_dir=$3
-
-source $OO_TOOLSDIR/piece/sys-setup
-
-echo "Sorting the lang stuff ..."
-
-for lang in $OO_LANGS ; do
- echo -n " processing $lang... "
- lang=`echo $lang | sed "s|-|_|g"`
- files_found=
- filelist=files-$piece-$lang.txt
- rm -f $filelist
- for list in $gid_dir/gid_Module_Langpack*$lang $gid_dir/gid_Module_Helppack*$lang ; do
- test -f $list || continue;
- files_found=1
- # add to the lang specific file-list
- cat $list >>$filelist.new
- # remove the old files from the $DESTDIR but not from the system solver (prebuilt l10n stuff)
- test "$gid_dir" = "$DESTDIR" && rm $list
- done
- # clean up the file list
- if test -f $filelist.new ; then
- echo "%dir $OO_INSTDIR" >>$filelist.new
- sort -u $filelist.new >$filelist
- rm -f $filelist.new
- fi
- test -n "$files_found" && echo "done" || echo "nothing to do"
-done
diff --git a/bin/piece/sys-setup.in b/bin/piece/sys-setup.in
deleted file mode 100755
index d6a90fd73..000000000
--- a/bin/piece/sys-setup.in
+++ /dev/null
@@ -1,159 +0,0 @@
-#!/bin/bash
-
-# stuff from configure
-export DISTRO='@DISTRO@'
-export VENDORNAME='@VENDORNAME@'
-export VERSION='@OOO_VERSION@'
-export OO_INSTDIRNAME=@OOOINSTALLDIRNAME@
-export OO_INSTDIR=@libdir@/$OO_INSTDIRNAME
-export OO_PREFIX=@prefix@
-export OO_LIBDIR=@libdir@
-datarootdir=@datarootdir@
-export OO_MANDIR=@mandir@
-export OO_DOCDIR=@docdir@
-export OO_LANGS='@OOO_LANGS@'
-export OOO_BUILD_NOARCH='@OOO_BUILD_NOARCH@'
-export COMPAT_OOWRAPPERS='@COMPAT_OOWRAPPERS@'
-# do not lost ARCH_FLAGS from the environmnet; potential duplicates should not harm
-export ARCH_FLAGS="$ARCH_FLAGS @WITH_ARCH_FLAGS@"
-
-if test "z$OO_INSTDIR" == "z"; then
- echo "Error: missing oo instdir"
- exit 1
-fi
-# a saneish default directory for the solver
-if test "z$OO_SOLVERDIR" == "z"; then
- export OO_SOLVERDIR=$OO_INSTDIR/solver
-fi
-
-# should be used only togehter with $OOO_BUILD_NOARCH = "YES"
-# to install the stuff in /usr/share rather than into /usr/lib(64)
-export OO_INSTDIR_SHARE=`echo $OO_INSTDIR | sed -e "s|lib6\?4\?|share|"`
-
-# installed system libraries
-URE_DIR=$OO_INSTDIR/ure
-BASIS_DIR=$OO_INSTDIR/basis$VERSION
-INSTALL_DIR=$BASIS_DIR/program
-DEV_DIR=$OO_SOLVERDIR
-
-# dmake can't cope with '_' in env. var names (interestingly)
-export SPLITUREDIR=$URE_DIR
-export SPLITBASISDIR=$BASIS_DIR
-export SPLITINSTALLDIR=$INSTALL_DIR
-export SPLITDEVDIR=$DEV_DIR
-
-PRESERVE_PATH=$PATH
-PATH=
-
-# include all the main configured settings by default
-. $DEV_DIR/*.[sS]et.sh
-
-# override / extend ones we need to adapt
-SRC_ROOT=`pwd`
-
-# piece source dir for 3rd party stuff
-TARFILE_LOCATION="$SRC_ROOT/ooo-build/src"
-
-# new local solver
-export SOLARPIECEVERSION="$SRC_ROOT/solver"
-export SOLARPIECE="$SOLARPIECEVERSION"
-# FIXME: we should this variable in the OOo sources
-export SOLARSRC="$SRC_ROOT"
-
-# system solver bits ...
-export SOLARVER="$DEV_DIR"
-export SOLARENV="$DEV_DIR/../solenv"
-export MAKEDEPEND="$DEV_DIR/bin/makedepend"
-export SOLARVERSION="$SOLARVER"
-export SOLARENVINC="$SOLARENV/inc"
-export LOCALINI="$SOLARENV/config"
-export STAR_INIROOT="$SOLARENV/config"
-export STAR_INIROOTOLD="$SOLARENV/config"
-export STAR_STANDLST="$SOLARENV/config/stand.lst"
-export STAR_SSCOMMON="$SOLARENV/config/ssolar.cmn"
-export STAR_SSOLARINI="$SOLARENV/config/ssolar.ini"
-export DMAKEROOT="$SOLARENVINC/startup"
-export UNOINCLUDES="-I$DEV_DIR/inc/offuh -I$SOLARPIECE/$INPATH/inc/offuh"
-
-# FIXME: the older LD_LIBRARY_PATH is necessary to build bean module; it adds the Java paths detected by config_office/configure
-export LD_LIBRARY_PATH=".:$DEV_DIR/lib:$URE_DIR/lib:$INSTALL_DIR:$SOLARPIECE/$INPATH/lib:$SOLARENV/$INPATH/lib:$JAVA_HOME/jre/lib/i386:$JAVA_HOME/jre/lib/i386/xawt:$JAVA_HOME/jre/lib/i386/client:$JAVA_HOME/jre/lib/i386/native_threads:../lib:$SOLARVER/lib:/opt/OpenOffice/HEAD/build/lib:/opt/mozilla/lib:$LD_LIBRARY_PATH"
-export PATH="$SOLARPIECE/$INPATH/bin:$URE_DIR/bin:$JAVA_HOME/bin:.:$SOLARVER/bin:$SOLARENV/$OUTPATH/bin:$SOLARENV/bin:$PRESERVE_PATH"
-SOLARINC=" -I. -I$SOLARVER/inc/stl -I$SOLARVER/inc/external -I$SOLARVER/inc -I$SOLARENV/$OUTPATH/inc -I$DEV_DIR/inc -I$SOLARENV/inc -I$SRC_ROOT/res -I$SOLARENV/inc/Xp31"
-SOLARINC="$SOLARINC -I$SOLARPIECE/$INPATH/inc/stl -I$SOLARPIECE/$INPATH/inc/external -I$SOLARPIECE/$INPATH/inc -I$SOLARPIECE/$OUTPATH/inc"
-SOLARINC="$SOLARINC -I$JAVA_HOME/include -I$JAVA_HOME/include/linux -I$JAVA_HOME/include/native_threads/include -I/usr/include"
-
-SOLARLIB_SYSTEM="$SOLARLIB" # FIXME: this is necessary to build bean module; it adds the Java paths detected by config_office/configure
-SOLARLIB=" -L../lib -L$SOLARPIECE/$INPATH/lib -L$SOLARENV/$OUTPATH/lib -L$SOLARENV/$OUTPATH/lib"
-SOLARLIB="$SOLARLIB -L$URE_DIR/lib -L$INSTALL_DIR "
-# the system solver (from devel packages) must be searched after the normal installation directory because it includes static libraries that we do not want
-# it must be before other system libraries to prefer the internal versions of the libraries if they are enabled and built
-SOLARLIB="$SOLARLIB -L$SOLARVER/lib"
-SOLARLIB="$SOLARLIB -L$JAVA_HOME/lib -L$JAVA_HOME/jre/lib/i386 -L$JAVA_HOME/jre/lib/i386/client -L$JAVA_HOME/jre/lib/i386/native_threads -L/usr/lib -L/usr/lib/xulrunner-1.9"
-SOLARLIB="$SOLARLIB $SOLARLIB_SYSTEM"
-export SOLARLIB SOLARINC
-export SOLARIDLINC="-I$SOLARVERSION/$INPATH/idl$EXT_UPDMINOR -I$SOLARPIECE/$INPATH/idl$EXT_UPDMINOR"
-
-# aliases
-alias mkout="perl $SOLARENV/bin/mkout.pl"
-alias deliver="perl $SOLARENV/bin/deliver.pl"
-alias build="perl $SOLARENV/bin/build.pl"
-alias build_client="perl $SOLARENV/bin/build_client.pl"
-alias zipdep="perl $SOLARENV/bin/zipdep.pl"
-alias nmake="dmake"
-
-# force langs
-if test -n "$FORCE_OOO_LANGS" ; then
- OO_LANGS="$FORCE_OOO_LANGS"
- WITH_LANG="$FORCE_OOO_LANGS"
-fi
-
-# some useful funtions
-
-# remove installed file even from the file list
-# Params: file_list file_to_remove
-remove_file()
-{
- rm -f "$DESTDIR/$2"
- perl -pi -e "s|^$2$||" "$1"
-}
-
-# remove installed directory even from the file list
-# Params: file_list dir_to_remove
-remove_dir()
-{
- echo rm -rf "$DESTDIR/$2"
- rm -rf "$DESTDIR/$2"
- perl -pi -e "s|^(%dir\s*)?$2.*$||" "$1"
-}
-
-# move one file from one list of files to a second one
-# Params: target_file_list source_file_list file_to_move
-mv_file_between_flists()
-{
- if grep "^$3\$" $2 >/dev/null 2>&1 ; then
- # \$3 can be regular expression
- grep "^$3\$" $2 >>$1
- perl -pi -e "s|^$3$||" $2
- fi
-}
-# add the directories from the source list of files to the target list of
-# file which are used in the target list of files but are missing there
-# Params: target_file_list source_file_list
-add_used_directories()
-{
- sort -u -r $2 | sed -n "s|^%dir \(.*\)\$|s%^\\\\(\1\\\\).*%\\\\1%p|p" >$2.pattern
- sed -n -f $2.pattern $1 | sort -u | sed "s|^|%dir |" >>$1
- rm $2.pattern
- sort -u $1 >$1.unique
- mv $1.unique $1
-}
-
-# remove a duplicity between two filelist
-# Params: filelist_with_original filelist_with_duplicity duplicit_path
-remove_duplicity_from_flists()
-{
- if grep "$3" "$1" >/dev/null 2>&1 && \
- grep "$3" "$2" >/dev/null 2>&1 ; then
- perl -pi -e "s|^$3$||" $2
- fi
-}
diff --git a/bin/piece/unpack-extras b/bin/piece/unpack-extras
deleted file mode 100755
index ed8dd437d..000000000
--- a/bin/piece/unpack-extras
+++ /dev/null
@@ -1,8 +0,0 @@
-#!/usr/bin/env bash
-
-TOOLSDIR=$1
-OOBUILDDIR=$2
-
-echo "Copying custom user-dicts into tree"
-[ -d $OOBUILDDIR/extras/source/wordbook ] || mkdir -p $OOBUILDDIR/extras/source/wordbook
-cp -f $TOOLSDIR/src/*.dic $OOBUILDDIR/extras/source/wordbook || exit 1;
diff --git a/bin/po-cleanup b/bin/po-cleanup
deleted file mode 100755
index 0b866a841..000000000
--- a/bin/po-cleanup
+++ /dev/null
@@ -1,97 +0,0 @@
-#!/usr/bin/perl
- eval 'exec /usr/bin/perl -S $0 ${1+"$@"}'
- if $running_under_some_shell;
-#!/usr/bin/perl
-
-use strict;
-
-sub process_file($)
-{
- my $file = shift;
- my $out = "";
-
- open (FILE, "$file") ||
- die "can't open \"$file\" for reading: $!\n";
-
- my $fuzzy=0;
- while (my $line = <FILE>) {
- chomp $line;
-
- if ($line =~ m/^#, fuzzy/) {
- $fuzzy=1;
- } elsif ($line =~ m/^\s*$/) {
- $fuzzy=0;
- } if ($line =~ m/^#~ msgid/ ||
- $line =~ m/^#~ msgstr/ ||
- $line =~ m/^#~ msgctxt/ ||
- $line =~ m/^#~ \"/) {
- # ignore these lines if they are not prefixed by the #, fuzzy comment
- # otherwise msgcat is confused
- print "$line\n" unless ($fuzzy);
- next unless ($fuzzy);
-
- }
-
- $out .= "$line\n";
- }
-
- close(FILE);
-
- return $out;
-}
-
-sub write_file($$)
-{
- my ($file, $content) = @_;
-
- open (FILE, '>', "$file") ||
- show_error("Can't open \"$file\" for writing: $!");
-
- print FILE $content;
-
- close (FILE);
-}
-
-
-sub cleanup_file($)
-{
- my $file = shift;
-
- my $out = process_file($file);
- write_file($file, $out);
-}
-
-
-########################################################################
-# help
-
-sub usage()
-{
- print "This tool cleans the given .po files\n\n" .
-
- "Usage:\n".
- "\tpo-cleanup [--help] file.po...\n";
-}
-
-
-#######################################################################
-#######################################################################
-# MAIN
-#######################################################################
-#######################################################################
-
-for my $arg (@ARGV) {
- if ($arg eq '--help' || $arg eq '-h') {
- usage;
- exit 0;
- } elsif ($arg =~ m/^-/) {
- die "Error: Unkown option: $arg\n";
- } else {
- if (-f $arg) {
- cleanup_file($arg);
- } else {
- die "Error: Is not a file: $arg\n";
- }
- }
-}
-
diff --git a/bin/po2sdf b/bin/po2sdf
deleted file mode 100755
index cafcca1a9..000000000
--- a/bin/po2sdf
+++ /dev/null
@@ -1,87 +0,0 @@
-#!/bin/sh
-
-usage()
-{
- echo "This script converts one PO file to one SDF (GSI) file format"
- echo
- echo "Usage:" ${0##*/} lang sdf_template po_file sdf_file
- echo
- echo "Arguments:"
- echo
- echo " lang: language id, e.g \"de\""
- echo " sdf_template: original sdf file that has been used to generate"
- echo " the .pot file"
- echo " po_file: input .po file"
- echo " sdf_file: output .sdf file"
- echo
- echo "IMPORTANT: this is only a temporary solution until"
- echo " http://bugs.locamotion.org/show_bug.cgi?id=487"
- echo " is fixed"
-}
-
-if test -z "$1" -o "$1" = "--help" ; then
- usage && exit 1;
-fi
-
-lang=$1
-sdf_template=$2
-po_in=$3
-sdf_out=$4
-
-if ! which po2oo >/dev/null 2>&1 ; then
- echo "Error: po2oo tool is not available!"
- echo " You need to install translate-toolkit, see" ; \
- echo " http://translate.sourceforge.net/wiki/start" ; \
- echo " The package for openSUSE it named python-translate-toolkit" ; \
- echo " and can be found at" ; \
- echo " http://download.opensuse.org/repositories/OpenOffice.org:/EXTRAS/" ; \
- exit 1;
-fi
-
-# Crazy hack until http://bugs.locamotion.org/show_bug.cgi?id=487" is fixed"
-
-# translate-toolkit-1.1.1 is not able to convert it in
-# the--multifile=onefile format, so we need to split the .po file
-# to get the --multifile=toplevel format
-#
-# We get the split .pot file and copy the .po file accordingly to get the same
-# structure. We get a lot warnings but it works.
-
-# get the split pot file
-pot_topdir=`mktemp -d /tmp/po2sdf-pot.XXXXXX`
-oo2po -P --multifile=toplevel $sdf_template $pot_topdir
-
-# "split" the po file accordingly
-po_in_topdir=`mktemp -d /tmp/po2sdf-po.XXXXXX`
-for pot in `ls $pot_topdir` ; do
- pot_name=${pot%.pot}
- # we get many warnings later when the full .po file is just copied
- # it might be better to really split it but...
- cp $po_in $po_in_topdir/$pot_name.po
-done
-rm -rf $pot_topdir
-
-# finally, generate the sdf file
-# there are many error messages because of the copied .po files but it somewhat works
-sdf_unsorted=`mktemp /tmp/po2sdf-sdf.XXXXXX`
-po2oo --nofuzzy --multifile=toplevel -l $lang -t $sdf_template -i $po_in_topdir -o $sdf_unsorted 2>/dev/null
-
-if test ! -s $sdf_unsorted ; then
- echo "Error: The generated SDF file is empty!"
- echo "Please, edit the ${0##*/} script and enable error messages in the po2oo command."
- err=1;
-else
- # DOS end of lines can't be commited into libreoffice git
- if which dos2unix >/dev/null 2>&1 ; then
- dos2unix $sdf_unsorted
- else
- echo "Warning: You might want to install dos2unix tool to get correct .sdf files"
- fi
- sort $sdf_unsorted >$sdf_out
- err=0
-fi
-
-rm -rf $po_in_topdir
-rm -f $sdf_unsorted
-
-exit $err
diff --git a/bin/potores b/bin/potores
deleted file mode 100755
index 0784c5220..000000000
--- a/bin/potores
+++ /dev/null
@@ -1,104 +0,0 @@
-#!/usr/bin/env perl
-
-use strict;
-
-# there is prolly a much much better way to do this, but the docs
-# were not findable, and or that helpful & parsing .po is easy
-#
-# This tool expects a tree of .po files, 1 per ISO lang:
-# {prefix}/
-# de/
-# fr/
-# ...
-#
-# inside each dir this filename is used:
-my $pofilename = 'totranslate.po';
-my $path;
-my $xml = 0;
-for my $arg (@ARGV) {
- if ($arg eq '--xml') {
- $xml = 1;
- } elsif ($arg eq '--help' || $arg eq '-h') {
- print "potores [--xml] /path/to/pofiles\n";
- exit 0;
- } else {
- $path = $arg;
- }
-}
-
-sub insert($$$$)
-{
- my ($lang_hash, $lang, $msgid, $msgstr) = @_;
-
- if (!defined $lang_hash->{$msgid}) {
- my %tmphash;
- $lang_hash->{$msgid} = \%tmphash;
- }
- my $thismsg = $lang_hash->{$msgid};
- $thismsg->{$lang} = $msgstr;
-# print "Insert '$msgid' [$lang] '" . $thismsg->{$lang} . "'\n";
-}
-
-sub slurp_pofile($$$)
-{
- my ($lang_hash, $lang, $po_file) = @_;
- my $fileh;
- my $msgid = '<error>';
-
- open ($fileh, "$po_file") || die "Can't open $po_file: $!";
- while (<$fileh>) {
- my $line = $_;
- $line =~ /^\#/ && next;
- $line =~ /^\s*$/ && next;
- if ($line =~ m/^"(.*)"\s*$/) {
- my $attr = $1;
- if ($attr =~ m/Content-Type: .*charset=(.*)/) {
- my $charset = $1;
- $charset =~ m/utf-8/i || die "Invalid charset $charset";
- }
- next; # skip headers
- }
- if ($line =~ /^msg(id|str)\s*"([^\"]*)"/) {
- $msgid = $2 if ($1 eq 'id');
- insert ($lang_hash, $lang, $msgid, $2) if ($1 eq 'str');
- } else {
- print "Unpleasant .po file line '$line'\n";
- }
- }
- close ($fileh);
-}
-
-print STDERR "Reading translations at $path\n";
-
-my $dirh;
-my %langs;
-opendir ($dirh, $path) || die "Can't open $path: $!";
-while (my $lang = readdir ($dirh)) {
- $lang =~ /^\./ && next;
- printf STDERR "$lang ";
- slurp_pofile (\%langs, $lang, "$path/$lang/$pofilename");
-}
-printf STDERR "\n";
-closedir ($dirh);
-
-sub printstr($$)
-{
- my ($lang, $str) = @_;
- $lang =~ s/_/-/g;
- if ($xml) {
-# FIXME: escaping ?
- print "+\t\t<value xml:lang=\"$lang\">$str</value>\n";
- } else {
- print "+\t\tText[ $lang ] = \"$str\";\n";
- }
-}
-
-for my $msgid ( sort keys %langs ) {
- print "SID_\n";
-# print "msgid $msgid\n";
- my $thismsg = $langs{$msgid};
- printstr("en-US", $msgid);
- for my $lang ( sort keys %{$thismsg} ) {
- printstr($lang, $thismsg->{$lang});
- }
-}
diff --git a/bin/preloc b/bin/preloc
deleted file mode 100755
index eb5d8c7b6..000000000
--- a/bin/preloc
+++ /dev/null
@@ -1,167 +0,0 @@
-#!/usr/bin/env perl
-
-#
-# Use example: cd /opt/OOInstall/program
-# preloc --quiet --plt --for=svx *.so | sort | c++filt > svx
-#
-# use --strip to get the symbols for the constructors with the method name
-# stripped
-#
-
-# misc. argument options / defaults
-my $opt_plt_too = 0;
-my $unused = 0;
-my $for_lib = '';
-my $quiet = 0;
-my $strip = 0;
-
-sub read_relocs($)
-{
- my $file = shift;
- my $pipe;
- my %relocs;
- my %symbols;
- my %used;
- my %lib;
-
-# print "Read '$file'\n";
-
- open ($pipe, "readelf -r -W $file |") || die "Can't readelf -r $file: $!";
- while (<$pipe>) {
- /'.rel.plt'/ && !$opt_plt_too && last;
- if (! m/(R_\S+)\s+([0-9a-f]+)\s+(.*)\s*/) {
-# print "Bin line '$_'\n";
- next;
- }
- my ($type, $loc, $sym) = ($1, $2, $3);
- $symbols{$sym} = hex ($loc);
- }
- close ($pipe);
-
- $lib{file} = $file;
- $lib{symbols} = \%symbols;
- $lib{used} = \%used;
-
- return \%lib;
-}
-
-sub add_used($$)
-{
- my ($lib, $sym) = @_;
- $lib->{used}->{$sym} = $lib->{symbols}->{$sym};
- delete $lib->{symbols}->{$sym};
-}
-
-sub find_matches($$)
-{
- my ($sym, $list) = @_;
- my @collisions = ();
-
- for my $lib (@{$list}) {
- if (defined $lib->{symbols}->{$sym}) {
- push @collisions, $lib->{file};
- add_used ($lib, $sym);
- }
- }
-
- return @collisions;
-}
-
-sub by_internal
-{
- keys (%{$a->{symbols}}) <=> keys (%{$b->{symbols}});
-}
-
-#
-# munge options
-#
-my @files = ();
-for my $arg (@ARGV) {
- if ($arg =~ m/^--plt/) {
- $opt_plt_too = 1;
- } elsif ($arg =~ m/^--unused/) {
- $unused = 1;
- } elsif ($arg =~ m/^--quiet/) {
- $quiet = 1;
- } elsif ($arg =~ m/^--for=(.*)/) {
- $for_lib = $1;
- } elsif ($arg =~ m/^--strip/) {
- $strip = 1;
- } else {
- push @files, $arg;
- }
-}
-
-#
-# read relocation data from elf shared libraries
-#
-my @libs = ();
-my $lib;
-print STDERR "reading relocs ";
-for my $file (@files) {
- $lib = read_relocs ($file);
- push @libs, $lib;
- print STDERR ".";
-}
-print STDERR "\n";
-
-#
-# process it
-#
-my $dups = 0;
-my @lib_copy = @libs;
-print STDERR "processing relocs ";
-while ($lib = shift @lib_copy) {
- for my $sym (keys (%{($lib->{symbols})})) {
- my @hits = find_matches ($sym, \@lib_copy);
- @hits && add_used ($lib, $sym);
- }
- print STDERR ".";
-}
-print STDERR "\n";
-
-#
-# pretty print it
-#
-if (!$quiet) {
- my $total = 0;
- for $lib (sort by_internal @libs) {
- my $extint_count = keys(%{$lib->{symbols}});
- my $used_count = keys(%{$lib->{used}});
- my $sub_total = $used_count + $extint_count;
-
- print STDERR $lib->{file} . " : used $used_count internal(withsym) $extint_count total $sub_total\n";
-
- $total += $sub_total;
- }
- print STDERR "Total relocs: $total\n";
-}
-
-if ($for_lib) {
- for $lib (@libs) {
- if ($lib->{file} =~ m/$for_lib/) {
- print "# Dumping symbols for '" . $lib->{file} . "'...\n\n";
- my $hash;
- if ($unused) {
- $hash = $lib->{symbols};
- } else {
- $hash = $lib->{used};
- }
- for $sym (keys %{$hash}) {
- if ($hash->{$sym}) {
- if (!$strip){
- print "$sym\n";
- } elsif ($sym =~ /[A-Z0-9]+C[0-9]{1}/){
- #print "Constructor Symbol :: $sym\n";
- my $name = `c++filt $sym`;
- my @symb_arr = split /\(/, $name;
- $name = $symb_arr[0];
- $name =~ s/\:\:[^\:]*$//;
- print "$name\n";
- }
- }
- }
- }
- }
-}
-
diff --git a/bin/pyunorc-update64.in b/bin/pyunorc-update64.in
deleted file mode 100755
index d39a3f966..000000000
--- a/bin/pyunorc-update64.in
+++ /dev/null
@@ -1,91 +0,0 @@
-#!/bin/sh
-
-#*****************************************************************************
-#
-# pyunorc-update64 - Utility to update pythonloader.unorc on x86_64
-#
-# It extends PYTHONPATH and PYTHONHOME with various paths, so pyuno is able
-# to find 32-bit .so files even on 64-bit system and on the contrary
-# the 32-bit libraries are able to find .py and .pic files from the 64bit
-# package.
-#
-# Initial version by: Petr Mladek <pmladek@suse.cz>
-#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2, as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License
-# along with this program; if not, write to the Free Software
-# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
-#
-#*****************************************************************************
-
-# do the job just on x86_64
-test `uname -i` = "x86_64" || exit 0;
-
-PYUNORCFILE=@OOINSTBASE@/program/pythonloader.unorc
-
-# try to detect the right path from the compat link
-if test -L /usr/lib64/python ; then
- PYTHONHOME64=`readlink /usr/lib64/python`
- if test "${PYTHONHOME64#/*}" = "$PYTHONHOME64" ; then
- # relative path
- PYTHONHOME64="/usr/lib64/$PYTHONHOME64"
- fi
-else
- # fall back to the current known python version
- PYTHONHOME64=/usr/lib64/python2.4
-fi
-
-# derive the path to 32-bit .so files from the 64-bit path
-PYTHONHOME32=`echo $PYTHONHOME64 | sed -e "s|/usr/lib64|/usr/lib|"`
-
-# the following paths must be mentioned in PYTHONPATH
-# it includes two 32-bit paths, main python dir, and lots of paths printed by
-# PYTHONPATH=/usr/lib64/python/ python -c 'import sys; print sys.path'
-# , all are converted to URL
-PYTHONPATH_COMPAT64="file://$PYTHONHOME32/lib-dynload \
-file://$PYTHONHOME32/site-packages \
-file:///usr/lib64/python \
-file://$PYTHONHOME64 \
-file://$PYTHONHOME64/plat-linux2 \
-file://$PYTHONHOME64/lib-tk \
-file://$PYTHONHOME64/lib-dynload \
-file://$PYTHONHOME64/site-packages \
-file://$PYTHONHOME64/site-packages/Numeric \
-file://$PYTHONHOME64/site-packages/PIL \
-file://$PYTHONHOME64/site-packages/gtk-2.0"
-
-# check PYTHONPATH
-sed_script=`mktemp /tmp/OpenOffice_org-pyunorc.sed.XXXXXXXX`
-for path in $PYTHONPATH_COMPAT64 ; do
- if ! grep -q "PYTHONPATH=.*$path" $PYUNORCFILE ; then
- echo "s|^\([[:blank:]]*PYTHONPATH=.*\)$|\1 $path|" >>$sed_script
- fi
-done
-
-# check PYTHONHOME; it must be set to PYTHONHOME32 on x86_64
-if grep -q "^[[:blank:]]*PYTHONHOME=" $PYUNORCFILE ; then
- if ! grep -q "^[[:blank:]]*PYTHONHOME=file://$PYTHONHOME32" $PYUNORCFILE ; then
- # just fix the path
- echo "s|^\([[:blank:]]*PYTHONHOME=\).*$|\1file://$PYTHONHOME32|" >>$sed_script
- fi
-else
- # the variable is missing at all
- echo "s|^\([[:blank:]]*PYTHONPATH=.*\)$|\1\nPYTHONHOME=file://$PYTHONHOME32|" >>$sed_script
-fi
-
-# update the config file if any problem was found
-if test -s $sed_script ; then
- mv -f $PYUNORCFILE $PYUNORCFILE.savedby.pyunorc-update64
- sed -f $sed_script $PYUNORCFILE.savedby.pyunorc-update64 >$PYUNORCFILE
-fi
-
-# remove the temporaty file
-rm -rf $sed_script
diff --git a/bin/rcspack b/bin/rcspack
deleted file mode 100755
index 73e4f8e94..000000000
--- a/bin/rcspack
+++ /dev/null
@@ -1,44 +0,0 @@
-#!/bin/sh
-
-EXCL=/tmp/rcspack-exclude
-
-echo "api/devmanual
-api/odk
-api/sdk_oo
-api/www
-external/addons
-external/atk
-external/gtk
-external/glib2
-external/libjpeg
-external/jpeg
-external/libpng
-external/libxml2
-external/moz
-external/pango
-external/pkgconfig
-external/python
-external/www
-external/zlib
-framework/binfilter
-framework/www
-script/www
-sw/www
-sc/www
-xml/www" > $EXCL
-
-package_rcs_dir()
-{
- ARCHIVE=$1;
- shift
- echo "Archiving $ARCHIVE: $*"
- tar czf $ARCHIVE.tar.gz -X $EXCL $*
-}
-
-package_rcs_dir "api_dba" "api" "dba"
-package_rcs_dir "external" "external"
-package_rcs_dir "fwk_gsl" "framework" "graphics" "gsl"
-package_rcs_dir "installation" "installation"
-package_rcs_dir "small" "l10n" "lingucomponent" "oi" "porting" "test" "ui" "xml" "whiteboard"
-package_rcs_dir "apps" "sc" "script" "sw"
-package_rcs_dir "tools_uno" "tools" "ucb" "udk" "util"
diff --git a/bin/re-root b/bin/re-root
deleted file mode 100755
index 5df3e273d..000000000
--- a/bin/re-root
+++ /dev/null
@@ -1,80 +0,0 @@
-#!/usr/bin/env perl
-
-sub scan_dirs($$);
-
-sub rewrite_cvs($$)
-{
- my $dir_path = shift;
- my $new_root = shift;
-
- -d $dir_path || return;
-
- printf "Re-write root $dir_path\n";
- my $file;
- my $file_name = "$dir_path/Root";
- if (open ($file, ">$file_name") ) {
- print $file "$new_root\n";
- close $file;
- } else {
- print "Couldn't open '$file_name': $!\n";
- }
-
- return;
-
-# speculative - as I recall the internal repository path is different too
- my $src;
- my $src_name = "$dir_path/Repository";
- if (open ($src, $src_name)) {
- my $dest;
- my $dest_name = "$src_name.tmp";
- open ($dest, ">$dest_name") || die "Can't open $dest_name for writing: $!";
- while (<$src>) {
- s/^foo\///;
- print $dest $_;
- }
- close ($src) || die "can't close $src_file: $!";
- close ($dest) || die "can't close $dest_file: $!";
- rename $dest_file, $src_file || die "Can't rename $src_file to $dest_file: $!";
- }
-}
-
-sub scan_dirs($$)
-{
- my $dir_path = shift;
- my $new_root = shift;
-
- my $misc_dir;
- opendir ($misc_dir, $dir_path) || return;
- my $name;
- while ($name = readdir ($misc_dir)) {
- $name =~ /^\./ && next;
- -d "$dir_path/$name/CVS" && scan_dirs ("$dir_path/$name", $new_root);
- }
- closedir ($misc_dir);
- rewrite_cvs ("$dir_path/CVS", $new_root);
-}
-
-# main
-my $cvsPath;
-chomp ($pwd = `pwd`);
-
-if (!($cvsPath = shift @ARGV)) {
- print "Syntax: re-root <path-of-checkout> [<new root>]\n";
- exit 1;
-}
-if ($cvsPath =~ /^\./) {
- $cvsPath = $pwd . "/$cvsPath";
-}
-
-my $newRoot;
-if (!($newRoot = shift @ARGV)) {
- if (!defined $ENV{CVSROOT}) {
- $newRoot = ':pserver:anoncvs@anoncvs.services.openoffice.org:/cvs';
- } else {
- $newRoot = $ENV{CVSROOT};
- }
-}
-
-print "Path: $cvsPath\n";
-
-scan_dirs ($cvsPath, $newRoot);
diff --git a/bin/relink b/bin/relink
deleted file mode 100755
index ad68f22ca..000000000
--- a/bin/relink
+++ /dev/null
@@ -1,14 +0,0 @@
-#!/usr/bin/env perl
-
-my $cwd = '.';
-my $dirh;
-print "removing libs: ";
-opendir ($dirh, $cwd) || die "Can't open: $!";
-while (my $name = readdir ($dirh)) {
- next if (-d "$cwd/$name/download"); # skip complex bits
- next if (! -d "$cwd/$name"); # skip non-directories
- `rm -f $cwd/$name/unxlngi*/lib/*`;
- print "$name ";
-}
-closedir ($dirh);
-print "\n";
diff --git a/bin/relocstat b/bin/relocstat
deleted file mode 100755
index fed61d6b2..000000000
--- a/bin/relocstat
+++ /dev/null
@@ -1,652 +0,0 @@
-#!/usr/bin/env perl
-
-#
-# Use example: cd /opt/OOInstall/program
-# preloc --quiet --plt --for=svx *.so | sort | c++filt > svx
-#
-# use --strip to get the symbols for the constructors with the method name
-# stripped
-#
-
-
-# misc. argument options / defaults
-my $opt_plt_too = 0;
-my $random_symbol_sample = 0;
-my $do_data_profile = 0;
-my $summary_only = 0;
-my $global_warned_symbols = 0;
-
-sub read_sections($$)
-{
- my $file = shift;
- my $lib = shift;
- my $pipe;
- my %sections;
-
- $lib->{sections} = \%sections;
-
- open ($pipe, "readelf -W -e $file |") || die "Can't readelf -W -e $file: $!";
- while (<$pipe>) {
- m/\s*\[\s*([0-9]*)\]\s+([\S\._]*)\s+([A-Za-z]*)\s+([0-9a-f]*)\s+([0-9a-f]*)\s+([0-9a-f]*)\s+/ || next;
- my ($section, $type, $address, $offset, $size) = ($2, $3, $4, $5, $6);
- $section && $type || next;
- $size = hex ($size);
-
-# print "Section $section size $size\n";
- $lib->{sections}->{$section} = $size;
- }
- close ($pipe);
-}
-
-sub read_relocs($$)
-{
- my $file = shift;
- my $lib = shift;
- my $pipe;
- my %relocs;
- my %symbols;
- my %used;
- my $in_plt = 0;
-
-# print "Read '$file'\n";
-
- open ($pipe, "readelf -r -W $file |") || die "Can't readelf -r $file: $!";
- while (<$pipe>) {
- if (m/'.rel.plt'/) {
- $in_plt = 1;
- next;
- }
- if (! m/^([0-9a-f]+)\s+([0-9a-f]+)\s+(R_\S+)\s+([0-9a-f]+)\s+(.*)\s*/) {
-# print "Bin line '$_'\n";
- next;
- }
- my ($offset, $info, $type, $loc, $sym) = ($1, $2, $3, $4, $5);
-# print "$sym reloc at 0x$offset : $type, $loc, $sym\n";
- if ($in_plt) {
- $lib->{used_in_plt}->{$sym} = 1;
- } else {
- my $lst;
- if (!defined ($symbols{$sym})) {
- my @rlst;
- $lst = \@rlst;
- } else {
- $lst = $symbols{$sym};
- }
- push @{$lst}, hex ($offset);
- $symbols{$sym} = $lst;
- }
- }
- close ($pipe);
-
- $lib->{file} = $file;
- $lib->{relocs} = \%symbols;
- $lib->{used} = \%used;
-}
-
-sub has_symbols($)
-{
- my $filename = shift;
- my $FILE;
- open ($FILE, "file -L $filename |");
- my $fileoutput = <$FILE>;
- close ($FILE);
- if (( $fileoutput =~ /not stripped/i ) && ( $fileoutput =~ /\bELF\b/ )) { $symbols = 1; }
- else { $symbols = 0; }
- return $symbols;
-}
-
-sub read_symbols($$)
-{
- my $file = shift;
- my $lib = shift;
- my $pipe;
- my %def;
- my %undef;
- my %data;
- my %addr_space;
-
-# print "Read '$file'\n";
-
- my $dumpsw = '-t';
- if (!has_symbols ($file)) {
- if (!$global_warned_symbols) {
- print "relocstat does better with symbols\n";
- $global_warned_symbols = 1;
- }
- $dumpsw = '-T';
- }
-
- open ($pipe, "objdump $dumpsw $file |") || die "Can't objdump $dumpsw $file: $!";
- while (<$pipe>) {
- /([0-9a-f]*)\s+([gw ])\s+..\s+(\S*)\s*([0-9a-f]+)..............(.*)/; # || next;
-
- my ($address, $linkage, $type, $size, $symbol) = ($1, $2, $3, $4, $5);
-# print "Symbol '$symbol' type '$type' '$linkage' addr $address, size $size\n";
-
- if (!$symbol || !$type) {
-# print "Bogus line: $_\n";
- next;
- }
-
- if ($type eq '.data') {
- my $realsize = hex ($size);
- my $realaddress = hex ($address);
- my %datum;
- $datum{size} = $realsize;
- $datum{address} = $realaddress;
- $datum{symbol} = $symbol;
- $data{$symbol} = \%datum;
-# print "Symbol '$symbol' type '$type' '$linkage' addr 0x$address, size $size\n";
-
- # yes there should be a btree in perl.
- for (my $i = 0; $i < $realsize; $i+=4) {
- my $key = $realaddress + $i;
-# printf "Set... '$key'\n";
-# defined $addr_space{$key} && die "Overlap: $key";
- $addr_space{$key} = \%datum;
- }
- }
-
- if ($type ne '*UND*') {
- $def{$symbol} = $linkage;
- } else {
- $undef{$symbol} = $linkage;
- }
- }
- close ($pipe);
-
- $lib->{def} = \%def;
- $lib->{undef} = \%undef;
- $lib->{data} = \%data;
- $lib->{addr_space} = \%addr_space;
-}
-
-sub get_class_stem($)
-{
- my $sym = shift;
- # relies on the integer in _ZTV[N]3...?? to disambiguate
- # class SetFoo vs. class SetFooExtra
- $sym =~ s/^_ZTVN*//;
- $sym =~ s/E*$//;
- return $sym;
-}
-
-sub breakdown_vtable($$$$)
-{
- my $vtable_breakdown = shift;
- my $lib = shift;
- my $datum = shift;
- my $sym = shift;
-
- $sym =~ /_ZTI/ && return; # don't count type-info relocs
-
- my $constr = $datum->{symbol};
-
- my $cdata = $vtable_breakdown->{$constr};
- if (!defined $cdata) {
- my %some_cdata;
- $cdata = \%some_cdata;
- $vtable_breakdown->{$constr} = $cdata;
-
- $cdata->{slot_count} = 0;
- $cdata->{self_impl_slot_count} = 0;
- $cdata->{extern_impl_slot_count} = 0;
- $cdata->{stem} = get_class_stem ($constr);
- $cdata->{external_self} = 0;
- }
-
- $cdata->{slot_count}++;
-
- if ($sym =~ m/$cdata->{stem}/) {
- # internal symbol
- $cdata->{self_impl_slot_count}++;
-
- if (defined $lib->{undef}->{$sym}) {
- $cdata->{external_self}++; # very unusual ...
- }
- }
-
- if (defined $lib->{undef}->{$sym}) {
- # external symbol
- $cdata->{extern_impl_slot_count}++;
- }
-}
-
-sub by_internal
-{
- keys (%{$a->{relocs}}) <=> keys (%{$b->{relocs}});
-}
-
-sub is_vtable($)
-{
- my $sym = shift;
- return $sym =~ /^_ZTV/;
-}
-
-sub is_rtti($)
-{
- my $sym = shift;
- return $sym =~ /^_ZTI/ || $sym =~ /^_ZTS/;
-}
-
-sub find_nearest($$)
-{
- my $addr_space = shift;
- my $addr = shift;
- for (my $delta = 0; $delta < (256 * 8); $delta += 4) {
- return ($addr_space->{$addr+$delta})->{symbol} if (defined $addr_space->{$addr + $delta});
- return ($addr_space->{$addr-$delta})->{symbol} if (defined $addr_space->{$addr - $delta});
- }
- return '<unknown>';
-}
-
-#
-# munge options
-#
-my @files = ();
-for my $arg (@ARGV) {
- if ($arg =~ m/^--data-profile/) {
- $do_data_profile = 1;
- } elsif ($arg =~ m/^--random-syms/) {
- $random_symbol_sample = 1;
- } elsif ($arg =~ m/^--summary/) {
- $summary_only = 1;
- } else {
- push @files, $arg;
- }
-}
-
-#
-# read relocation data from elf shared libraries
-#
-my @libs = ();
-my $lib;
-print STDERR "reading relocs ";
-for my $file (@files) {
- my %lib_hash;
- my $lib = \%lib_hash;
- read_sections ($file, $lib);
- read_relocs ($file, $lib);
- read_symbols($file, $lib);
- push @libs, $lib;
- print STDERR ".";
-}
-print STDERR "\n";
-
-my $do_print_relocs = 1;
-my $do_print_data_breakdown = 1;
-my $do_print_vtable_breakdown = 1;
-my $do_section_breakdown = 1;
-my $do_count_sym_sizes = 1;
-my $do_method_relocs = 1;
-
-my %data_breakdown;
-
-if ($do_data_profile) {
- $do_print_relocs = 0;
- $do_print_data_breakdown = 0;
- $do_print_vtable_breakdown = 0;
- $data_breakdown{vtable} = 0;
- $data_breakdown{vtable_count} = 0;
- $data_breakdown{rtti_count} = 0;
- $data_breakdown{other} = 0;
-}
-
-my %section_breakdown;
-my $total_section_size = 0;
-
-my $total_symbol_entry_count = 0;
-my $total_symbol_def_size = 0;
-my $total_symbol_undef_size = 0;
-
-my $total_method_reloc_count = 0;
-my $total_method_reloc_size = 0;
-
-#
-# pretty print it
-#
-for $lib (sort by_internal @libs) {
-
-# Overall relocation information
- if ($do_print_relocs)
- {
- my $internal_weak_relocs = 0;
- my $internal_weak_thnk = 0;
- my $internal_strong_relocs = 0;
- my $external_relocs = 0;
- my $def = $lib->{def};
- my $undef = $lib->{undef};
- for $sym (keys %{$lib->{relocs}}) {
- if (defined $undef->{$sym}) {
- $external_relocs++;
- } elsif (defined $def->{$sym}) {
- if ($def->{$sym} =~ m/w/) {
- $internal_weak_relocs++;
- $internal_weak_thnk++ if ($sym =~ m/^_ZThn/);
- } else {
- $internal_strong_relocs++;
- }
- } else {
- print STDERR "broken symbol '$sym'\n";
- }
- }
- my $total = keys %{$lib->{relocs}};
- my $percentage = sprintf ("%2.2g", $internal_strong_relocs / ($total + 1) * 100);
-
- if (!$summary_only) {
- print $lib->{file} . " total relocs $total external $external_relocs, internal weak " .
- "$internal_weak_relocs (of which thnks $internal_weak_thnk), " .
- "internal strong $internal_strong_relocs: saving $percentage\%\n";
- }
- }
-
- if ($do_method_relocs) {
- $lib->{total_method_reloc_count} = 0;
- $lib->{total_method_reloc_size} = 0;
- for my $sym (keys %{$lib->{relocs}}) {
- $sym =~ /^_ZN/ || $sym =~ /^_ZThn/ || next;
-
- my $count = @{$lib->{relocs}->{$sym}};
- my $size = 16 * $count; # .rel.dyn
- $size += length ($sym) + 1 + 4; # \0, .hash etc.
-
-# print "Reloc $sym: count $count size $size\n";
- $total_method_reloc_count += $count;
- $total_method_reloc_size += $size;
- $lib->{total_method_reloc_count} += $count;
- $lib->{total_method_reloc_size} += $size;
- }
- }
-
- if ($random_symbol_sample) {
- print "\n";
-
- my @keys = keys %{$lib->{relocs}};
- print "Random symbols:\n";
- for ($i = 0; $i < 20; $i++) {
- my $sym = $keys[rand @keys];
- my $demangled = `c++filt $sym`;
- chomp $demangled;
- print "$demangled\t$sym\n";
- }
- }
-
-# Break down the .data section by object details:
- {
- my $vtable_size = 0;
- my $vtable_count = 0;
- my $rtti_size = 0;
- my $rtti_count = 0;
- my $other_size = 0;
- my $other_count = 0;
- for $sym (keys %{$lib->{data}}) {
- my $data = ($lib->{data})->{$sym};
-
- if (is_vtable ($sym)) {
- $vtable_count++;
- $vtable_size += $data->{size};
-
- } elsif (is_rtti ($sym)) {
- $rtti_count++;
- $rtti_size += $data->{size};
-
- } else {
- $other_count++;
- $other_size += $data->{size};
- }
- }
- my $total_size = 1.0 * ($vtable_size + $rtti_size + $other_size) / 100.0;
-
- if ($do_data_profile) {
- $data_breakdown{vtable} += $vtable_size;
- $data_breakdown{vtable_count} += $vtable_count;
- $data_breakdown{rtti} += $rtti_size;
- $data_breakdown{rtti_count} += $rtti_count;
- $data_breakdown{other} += $other_size;
- $data_breakdown{other_count} += $other_count;
- }
-
- if ($do_print_data_breakdown && $total_size && !$summary_only) {
- print ".data:\n";
- print " vtables: $vtable_count size $vtable_size bytes - " . sprintf ("%2.2g", $vtable_size/$total_size) . "\%\n";
- print " rtti: $rtti_count size $rtti_size bytes - " . sprintf ("%2.2g", $rtti_size/$total_size) . "\%\n";
- print " other: $other_count size $other_size bytes - " . sprintf ("%2.2g", $other_size/$total_size) . "\%\n";
- print "\n";
- }
- }
-
- if ($do_print_vtable_breakdown)
- {
- my $vtable_relocs = 0;
- my $rtti_relocs = 0;
- my $data_relocs = 0;
- my $other_relocs = 0;
- my $addr_space = $lib->{addr_space};
- my $key_count = keys %{$lib->{addr_space}};
- my %vtable_breakdown;
- my %vtable_wasted;
-
- for $sym (keys %{$lib->{relocs}}) {
- my $lst = ($lib->{relocs})->{$sym};
- for $addr (@{$lst}) {
- if (defined $addr_space->{$addr}) {
- my $datum = $addr_space->{$addr};
-
-# print "Hit '$sym' at " . sprintf ("0x%.8x", $addr) . "\n";
-
- if (is_vtable ($datum->{symbol})) {
- $vtable_relocs++;
- breakdown_vtable (\%vtable_breakdown, $lib, $datum, $sym);
- if (!defined $vtable_wasted{$sym} &&
- defined $lib->{undef}->{$sym}) {
- if (defined $lib->{used_in_plt}->{$sym}) {
-# print "Symbol '$sym' used in plt and in vtable\n";
- } else {
-# print "Symbol '$sym' used in plt and in vtable\n";
- $vtable_wasted{$sym}++;
- }
- }
- } elsif (is_rtti ($datum->{symbol})) {
- $rtti_relocs++;
- } else {
- $data_relocs++;
- }
- } else {
- $other_relocs++;
-# relocs in the data section, but not inside one of our symbols.
-# print "Odd '$sym' at " . sprintf ("0x%.8x", $addr) .
-# " nearest '" . find_nearest ($addr_space, $addr) . "'\n";
- }
- }
- }
- if (!$summary_only) {
- print "Data section contains " . $key_count * 4 . " bytes\n";
- my $total_relocs = 1.0 * ($vtable_relocs + $rtti_relocs + $data_relocs + $other_relocs) / 100.0;
- print "reloc breakdown:\n";
- if ($total_relocs > 0) {
- print " vtables: $vtable_relocs - " . sprintf ("%2.2g", $vtable_relocs/$total_relocs) . "\%\n";
- print " rtti: $rtti_relocs - " . sprintf ("%2.2g", $rtti_relocs/$total_relocs) . "\%\n";
- print " .data/non-vtable $data_relocs - " . sprintf ("%2.2g", $data_relocs/$total_relocs) . "\%\n";
- print " other: $other_relocs - " . sprintf ("%2.2g", $other_relocs/$total_relocs) . "\%\n";
- print " grand-total: " . ($vtable_relocs + $rtti_relocs + $data_relocs + $other_relocs) . "\n";
- print "\n";
- } else {
- print " no relocs at all\n";
- }
- }
-
- my $vtable_slot_count = 0;
- my $vtable_class_count = 0;
- my $vtable_self_impl_slot_count = 0;
- my $vtable_extern_impl_slot_count = 0;
- my $vtable_extern_self = 0;
- for my $vtbl (keys %vtable_breakdown) {
- $vtable_class_count++;
- $vtable_slot_count += $vtable_breakdown{$vtbl}->{slot_count};
- $vtable_self_impl_slot_count += $vtable_breakdown{$vtbl}->{self_impl_slot_count};
- $vtable_extern_impl_slot_count += $vtable_breakdown{$vtbl}->{extern_impl_slot_count};
- $vtable_extern_self += $vtable_breakdown{$vtbl}->{external_self};
-# print "Class '$vtbl': slots " . $vtable_breakdown{$vtbl}->{slot_count}
-# . " self-impl " . $vtable_breakdown{$vtbl}->{self_impl_slot_count} . "\n";
- }
- if ($vtable_class_count && !$summary_only) {
- print "vtables breakdown\n";
-
- print " vtables: $vtable_class_count\n";
- print " slots / vtable: " . sprintf ("%.3g", $vtable_slot_count/$vtable_class_count) . " slots\n";
- print " self-impl / vtable: " . sprintf ("%.3g", $vtable_self_impl_slot_count/$vtable_class_count) . " slots\n";
- print " parent-impl / vtable: " . sprintf ("%.3g", ($vtable_slot_count - $vtable_self_impl_slot_count)/$vtable_class_count) . " slots\n";
- print " *extern-impl / vtable: " . sprintf ("%.3g", $vtable_extern_impl_slot_count/$vtable_class_count) . " slots\n";
- print " *extern-impl of self total: " . sprintf ("%.3g", $vtable_extern_self) . " slots\n";
- }
-
- if ($vtable_extern_impl_slot_count && !$summary_only) { # vtable wasted count
- my $vtable_wasted_syms = 0;
- my $vtable_wasted_size = 0;
- for my $sym (keys %vtable_wasted) {
- $vtable_wasted_syms++;
- $vtable_wasted_size += length ($sym) + 1 + 4; # \0, .hash etc.
- }
- print " extern (only) vtable symbols:\n";
- print " wasted symbols: $vtable_wasted_syms (" .
- sprintf ("%.3g", ($vtable_wasted_syms * 100.0) / $vtable_extern_impl_slot_count) . "%)\n";
- print " wasted bytes: $vtable_wasted_size\n";
- }
-
- my %by_stem = (
- '^_ZThn' => 'thunks',
- '^_ZN' => 'methods',
- '^_ZTI' => 'type info',
- '^_ZTS' => 'type strings',
- '^_ZTV' => 'vtables',
- '^[^_]' => 'non-c++'
- );
- my %syms_by_stem = ();
- for my $stem (keys %by_stem) {
- $syms_by_stem{$stem} = 0;
- }
- for $sym (keys %{$lib->{relocs}}) {
- for my $stem (keys %by_stem) {
- if ($sym =~ m/$stem/) {
- $syms_by_stem{$stem}++;
- last;
- }
- }
- }
- print "Unique relocation breakdown by suffix:\n";
- for $stem (sort { $syms_by_stem{$b} <=> $syms_by_stem{$a} } (keys %by_stem)) {
- print " " . sprintf ("%6d", $syms_by_stem{$stem}) .
- " - " . $by_stem{$stem} . "\n";
- }
- }
- if ($do_section_breakdown) {
-# cf. http://refspecs.freestandards.org/LSB_3.0.0/LSB-Core-generic/LSB-Core-generic/specialsections.html
- my %sections = (
- '\.hash' => 'linking',
- '\.dynsym' => 'linking',
- '\.dynstr' => 'linking',
- '\.rel\.plt' => 'linking',
- '\.suse\.vtrelocs' => 'linking',
- '\.plt' => 'linking',
- '\.got' => 'linking',
- '\.got\.plt' => 'linking',
- '\.rel\.dyn' => 'data relocs',
- '\.gnu\.version.*' => 'versioning',
- '\.gcc_except_table' => 'exceptions',
- '\.eh_frame.*' => 'exceptions',
- '\.[cd]tors' => 'c/d-tors',
- '\.data.*' => 'data',
- '\.rodata' => 'data',
- '\.bss' => 'bss',
-# '\.rodata' => 'ro data'
-# '\.bss' => 'scratch globals',
- '\.debug.*' => 'debug',
- '\.stab.*' => 'debug',
- '\.comment' => 'comment',
- '\.text' => 'code',
- '\.init.*' => 'init/fini',
- '\.fini.*' => 'init/fini',
- '\.strtab' => 'symbols',
- '\.symtab' => 'symbols'
- );
- for my $sect (keys %{$lib->{sections}}) {
- my $bsect = 'misc';
- for my $match (keys %sections) {
- if ($sect =~ m/^$match$/) {
- $bsect = $sections{$match};
- last;
- }
- }
-# if ($bsect eq 'misc') {
-# print "Section $sect size " . $lib->{sections}->{$sect} . " is misc...\n";
-# }
- $section_breakdown{$bsect} = 0 if (!defined $section_breakdown{$bsect});
- my $size = $lib->{sections}->{$sect};
- $section_breakdown{$bsect} += $size;
- $total_section_size += $size;
- }
- }
- if ($do_count_sym_sizes) {
- for $sym (keys %{$lib->{def}}) {
- $total_symbol_entry_count++;
- $total_symbol_def_size += length ($sym) + 1;
- }
- for $sym (keys %{$lib->{undef}}) {
- $total_symbol_entry_count++;
- $total_symbol_undef_size += length ($sym) + 1;
- }
- }
-}
-
-if (@libs > 1) {
- print "\n";
- print "--- Total summary ---\n";
- print "\n";
-}
-
-if ($do_data_profile) {
- print ".data summary:\n";
- print " vtables: " . $data_breakdown{vtable_count} . " size " . $data_breakdown{vtable} . " bytes\n";
- print " rtti: " . $data_breakdown{rtti_count} . " size " . $data_breakdown{rtti} . " bytes\n";
- print " other: " . $data_breakdown{other_count} . " size " . $data_breakdown{other} . " bytes\n";
- print "\n";
-}
-
-if ($do_section_breakdown) {
- print "Section size breakdown\n";
-
- if ($total_section_size)
- {
- for my $bsect (sort { $section_breakdown{$b} <=> $section_breakdown{$a} } (keys %section_breakdown)) {
- next if ($section_breakdown{$bsect} < 1024);
- print " " . sprintf ("%-15s", $bsect) .
- " " . sprintf ("%4d", ($section_breakdown{$bsect}/1024)) .
- "kb - " . sprintf ("%2.2g", $section_breakdown{$bsect} * 100.0 /$total_section_size) . "\%\n";
- }
- }
- print " Total: $total_section_size bytes\n";
-}
-
-if ($do_count_sym_sizes) {
- print "Symbol entry counts:\n";
- print " ~total .dynsym entries: $total_symbol_entry_count\n";
- print " .dynstr size:\n";
- print " def: $total_symbol_def_size\n";
- print " undef: $total_symbol_undef_size\n";
- print " (avg len): " . sprintf ("%3.2g\n", ($total_symbol_def_size + $total_symbol_undef_size) / ($total_symbol_entry_count + 1));
-}
-
-if ($do_method_relocs) {
- print "name\trelocs\tsize\n";
- for $lib (sort by_internal @libs) {
-
- print $lib->{file} . "\t" .
- $lib->{total_method_reloc_count} . "\t" .
- $lib->{total_method_reloc_size} . "\n";
- }
-
- print "Totals:\n";
- print " method relocs: $total_method_reloc_count\n";
- print " size (bytes): $total_method_reloc_size\n";
-}
-
diff --git a/bin/reorder-link b/bin/reorder-link
deleted file mode 100755
index 3e1437696..000000000
--- a/bin/reorder-link
+++ /dev/null
@@ -1,136 +0,0 @@
-#!/usr/bin/env perl
-
-# Libraries to re-order for startup speed ...
-
-@libraries = (
- 'libsw%%li.so', 'sw', 'sw.lib',
- 'libsfx%%li.so', 'sfx2', 'sfx.lib',
- 'libsvx%%li.so', 'svx', 'svx.lib'
- );
-
-sub find_bindings
-{
- my $file;
- my $ldname = shift;
- my %binds;
-
- print "Reading bindings for: $ldname\n";
- open ($file, $output) || die "Can't open $output: $!";
- while (<$file>) {
- /$ldname:\s+.*\`([^\']*)\'/ || next;
- my $sym = $1;
- my $count;
- $count = ++$binds{$sym};
- }
- close ($file);
-
- return %binds;
-}
-
-sub make_path
-{
- my $base = shift;
- my $fname = shift;
-
- print "Base '$base'\n";
- return ($base . "/unxlngi4.pro/slb/" . $fname);
-}
-
-sub read_object_list
-{
- my $base = shift;
- my $fname = shift;
- my $path;
- my %objs;
-
- $path = make_path ($base, $fname);
-
- my $file;
- open ($file, $path) || die "Can't open $path: $!\n";
- while (<$file>) {
- chomp;
- $_ eq '' && next;
- $objs{$_} = 0;
- }
- close ($file);
-
- return %objs;
-}
-
-sub count_symbols
-{
- my $path = shift;
- my $bindings = shift;
- my $count = 0;
- my $pipe;
-
- print ".";
-
-# FIXME - we need to check the type of re-location I think [!?] - needs to be a provides ..
- open $pipe, "nm $path|" || die "Can't objdump $path: $!";
- while (<$pipe>) {
- /\s+([^\s]+)$/ || next;
-# hard to know which is best - this seems so,
-# /[TGDBRS]\s+([^\s]+)$/ || next;
- if (exists $bindings->{$1}) {
- $count = $count + $bindings->{$1};
- }
- }
- close $pipe;
-
- return $count;
-}
-
-if (@ARGV < 1) {
- print "Syntax:\n";
- print " reorder-link <path-to-output>\n";
- print " where output is generated by LD_DEBUG=bindings\n";
- exit (1);
-}
-
-$output = shift @ARGV;
-
-{
- print "Sucking env from setup\n";
- open ($Vars, "bash -c '. ./setup ; set'|") || die "Can't find setup: $!";
- while (<$Vars>) {
- /([^=]*)=(.*)/ || next;
- $setup_vars{$1} = $2;
- }
- close ($Vars);
-}
-
-# Hack - clobber for test setup
-$setup_vars{'OOBUILDDIR'} = '/opt/OpenOffice/OOO_STABLE_1';
-
-while (@libraries) {
- my $ldname = shift @libraries;
- my $path = shift @libraries;
- my $libname = shift @libraries;
-
- $ldname =~ s/%%/641/;
-
- my $base = $setup_vars{'OOBUILDDIR'} . "/$path";
-
- my %bindings = find_bindings ($ldname);
- my %objects = read_object_list ($base, $libname);
- my $obj;
-
- printf "Counting symbols ";
- foreach $obj (keys %objects) {
- $objects{$obj} = count_symbols ("$base/$obj", \%bindings);
- }
- printf "\n";
-
- my $out_path = make_path ($base, "$libname.sorted");
- my $out_file;
- print "Writing new map to $out_path ...\n";
- open ($out_file, ">$out_path") || die "Can't open $out_path: $!";
-
- foreach $obj (sort { $objects{$b} <=> $objects{$a} } keys %objects) {
- print $objects{$obj}. ", ";
- print $out_file "$obj\n";
- }
- close ($out_file);
- print "\ndone\n";
-}
diff --git a/bin/run-tests.sh b/bin/run-tests.sh
deleted file mode 100755
index 812ef4894..000000000
--- a/bin/run-tests.sh
+++ /dev/null
@@ -1,44 +0,0 @@
-#!/usr/bin/env sh
-# runs all the test scripts in the test folder.
-# Args: <install dir>
-#
-# The main idea:
-# - The script will loop over the test folder subdirectories
-# - If there is a run.sh script inside, then execute it with the
-# oo install folder as arg.
-#
-# This way each script is autonomous and has place to store the logs
-# beside the run.sh script
-
-# Get the oo installation to improve
-ooinstall=$1
-if test "${ooinstall:0:1}" != "/"; then
- ooinstall=`pwd`/$ooinstall
-fi
-
-# Load the whole ooo-build config
-. `dirname $0`/setup >/dev/null 2>&1
-
-TESTDIR=$TOOLSDIR/test/
-
-if [ -n $COLORTERM ]; then
- RED="\033[31m"
- GREEN="\033[32m"
- ORANGE="\033[33m"
- CLEAR="\033[0m"
-fi
-
-for t in `ls -1 "$TESTDIR"`
-do
- # Is there a run.sh executable script inside?
- if test -x "$TESTDIR/$t/run.sh" ; then
- sh "$TESTDIR/$t/run.sh" "$ooinstall" "$TOOLSDIR"
- if test $? ; then
- printf "$t\t:\t${GREEN}PASSED${CLEAR}\n"
- else
- printf "$t\t:\t${RED}FAILED${CLEAR}\n"
- fi
- else
- printf "$t\t:\t${ORANGE}SKIPPED${CLEAR}\n"
- fi
-done
diff --git a/bin/setup.in b/bin/setup.in
deleted file mode 100755
index 9d80b562e..000000000
--- a/bin/setup.in
+++ /dev/null
@@ -1,386 +0,0 @@
-# ------------------------------------
-# This file is generated from setup.in
-# ------------------------------------
-
-BUILDDIR=@BUILDDIR@
-BUILD_WIN32='@BUILD_WIN32@'
-DISTRO='@DISTRO@'
-SPLIT='@SPLIT@'
-PIECE='@PIECE@'
-VENDORNAME='@VENDORNAME@'
-SRCDIR=@SRCDIR@
-VERSION='@OOO_VERSION@'
-BINSUFFIX='@OOO_BINSUFFIX@'
-OOO_LANGS='@OOO_LANGS@'
-if test "$OOO_LANGS" = ""; then OOO_LANGS="en-US"; fi
-OOO_POOR_HELP_LOCALIZATIONS='@OOO_POOR_HELP_LOCALIZATIONS@'
-OOO_GIT='@OOO_GIT@'
-OOO_SOURCEDIRNAME='@OOO_SOURCEDIRNAME@'
-CLONEDIR='@CLONEDIR@'
-RAWBUILDDIR='@RAWBUILDDIR@'
-GITTAG='@GITTAG@'
-APPLY_DIR='@APPLY_DIR@'
-BUILD_NCPUS='@BUILD_NCPUS@'
-ENABLE_CCACHE='@ENABLE_CCACHE@'
-ENABLE_ICECREAM='@ENABLE_ICECREAM@'
-ENABLE_DISTCC='@ENABLE_DISTCC@'
-ICECREAM_BINDIR='@ICECREAM_BINDIR@'
-MAX_JOBS='@MAX_JOBS@'
-export OOO_BUILD_NOARCH='@OOO_BUILD_NOARCH@'
-# do not lost ARCH_FLAGS from the environmnet; potential duplicates should not harm
-ARCH_FLAGS="$ARCH_FLAGS @WITH_ARCH_FLAGS@"
-OOO_WIDGET_FLAGS='@OOO_WIDGET_FLAGS@'
-OOO_CUSTOM_IMAGES='@OOO_CUSTOM_IMAGES@'
-OOO_CRYSTAL_IMAGES='@OOO_CRYSTAL_IMAGES@'
-OOOP_FONTS_PACK='@OOOP_FONTS_PACK@'
-OOOP_GALLERY_PACK='@OOOP_GALLERY_PACK@'
-OOOP_SAMPLES_PACK='@OOOP_SAMPLES_PACK@'
-OOOP_TEMPLATES_PACK='@OOOP_TEMPLATES_PACK@'
-LIGHTPROOF_LANG='@LIGHTPROOF_LANG@'
-LIGHTPROOF_FILENAMES='@LIGHTPROOF_FILENAMES@'
-NUMBERTEXT_EXTENSION='@NUMBERTEXT_EXTENSION@'
-HUNART_EXTENSION='@HUNART_EXTENSION@'
-TYPO_EXTENSION='@TYPO_EXTENSION@'
-WATCH_WINDOW_EXTENSION='@WATCH_WINDOW_EXTENSION@'
-DIAGRAM_EXTENSION='@DIAGRAM_EXTENSION@'
-VALIDATOR_EXTENSION='@VALIDATOR_EXTENSION@'
-BARCODE_EXTENSION='@BARCODE_EXTENSION@'
-GOOGLE_DOCS_EXTENSION='@GOOGLE_DOCS_EXTENSION@'
-NLPSOLVER_EXTENSION='@NLPSOLVER_EXTENSION@'
-LANGUAGETOOL_EXTENSION='@LANGUAGETOOL_EXTENSION@'
-OOOBLOGGER_EXTENSION='@OOOBLOGGER_EXTENSION@'
-SUNTEMPLATES_LANG='@SUNTEMPLATES_LANG@'
-COMPAT_OOWRAPPERS='@COMPAT_OOWRAPPERS@'
-OOO_BUILDVERSION='@OOO_BUILDVERSION@'
-OOO_SOURCEVERSION='@OOO_SOURCEVERSION@'
-OOO_STRIP='@OOO_STRIP@'
-OOO_ADDITIONAL_SECTIONS='@OOO_ADDITIONAL_SECTIONS@'
-SPLIT_APP_MODULES='@SPLIT_APP_MODULES@'
-SPLIT_OPT_FEATURES='@SPLIT_OPT_FEATURES@'
-RUN_POST_INSTALL_SCRIPTS='@RUN_POST_INSTALL_SCRIPTS@'
-DEFUZZ_PATCHES='@DEFUZZ_PATCHES@'
-TOOLSDIR='@TOOLSDIR@'
-ENABLE_ODK='@ENABLE_ODK@'
-USE_PREBUILD_UNOWINREG_DLL='@USE_PREBUILD_UNOWINREG_DLL@'
-ENABLE_MONO='@ENABLE_MONO@'
-ENABLE_EXTENSIONS='@ENABLE_EXTENSIONS@'
-ENABLE_SCSOLVER='@ENABLE_SCSOLVER@'
-ENABLE_GRAPHITE='@ENABLE_GRAPHITE@'
-PROPAGATED_ARGS="@PROPAGATED_ARGS@"
-CAIRO_VER=@CAIRO_VER@
-CAIRO_ENABLED=@CAIRO_ENABLED@
-SYSTEM_CAIRO=@SYSTEM_CAIRO@
-PREBUILT_WRITINGAIDS_TIMESTAMP=@PREBUILT_WRITINGAIDS_TIMESTAMP@
-SYSTEM_MDDS=@SYSTEM_MDDS@
-UNSTABLE_WP=@UNSTABLE_WP@
-SYSTEM_LIBWPD=@SYSTEM_LIBWPD@
-SYSTEM_LIBWPG=@SYSTEM_LIBWPG@
-SYSTEM_LIBWPS=@SYSTEM_LIBWPS@
-LIBWPD_TARBALL=@LIBWPD_TARBALL@
-LIBWPG_TARBALL=@LIBWPG_TARBALL@
-LIBWPS_TARBALL=@LIBWPS_TARBALL@
-GLITZ_VER=@GLITZ_VER@
-DEJAVU_FONTS_VER=@DEJAVU_FONTS_VER@
-DEJAVU_FONTS_PACK_NAME=@DEJAVU_FONTS_PACK_NAME@
-LIBERATION_FONTS_VER=@LIBERATION_FONTS_VER@
-AGFA_MONOTYPE_FONTS_SOURCE=@AGFA_MONOTYPE_FONTS_SOURCE@
-GNUPATCH=@GNUPATCH@
-GNUTAR=@GNUTAR@
-DRINK="@DRINK@"
-APACHE_ANT_TARBALL=@APACHE_ANT_TARBALL@
-APACHE_ANT=`echo $APACHE_ANT_TARBALL | sed 's/\(apache-ant-.*\)-bin\..*/\1/g'`
-
-if [ -z "$TARFILE_LOCATION" ]; then TARFILE_LOCATION=$SRCDIR; fi
-
-if test -f "$TOOLSDIR/distro-configs/Common.conf" ; then
- COMMON_OPTIONS="`cat $TOOLSDIR/distro-configs/Common.conf | xargs`";
-else
- echo "Warning: $TOOLSDIR/distro-configs/Common.conf not found."
-fi
-
-if test "z$BUILD_WIN32" != "z"; then
- if test -f "$TOOLSDIR/distro-configs/CommonWin32.conf" ; then
- PLATFORM_OPTIONS="`cat $TOOLSDIR/distro-configs/CommonWin32.conf | xargs`";
- else
- echo "Warning: $TOOLSDIR/distro-configs/CommonWin32.conf not found."
- fi
-else if test "z`uname -s`" = "zDarwin"; then
- if test -f "$TOOLSDIR/distro-configs/CommonMac.conf" ; then
- PLATFORM_OPTIONS="`cat $TOOLSDIR/distro-configs/CommonMac.conf | xargs`";
- else
- echo "Warning: $TOOLSDIR/distro-configs/CommonMac.conf not found."
- fi
-else
- if test -f "$TOOLSDIR/distro-configs/CommonLinux.conf" ; then
- PLATFORM_OPTIONS="`cat $TOOLSDIR/distro-configs/CommonLinux.conf | xargs`";
- else
- echo "Warning: $TOOLSDIR/distro-configs/CommonLinux.conf not found."
- fi
-fi
-fi
-
-if test "z$DISTRO" != "z" -a -f "$TOOLSDIR/distro-configs/$DISTRO.conf"; then
- CONFIGURE_OPTIONS="$COMMON_OPTIONS $PLATFORM_OPTIONS $OOO_WIDGET_FLAGS `cat $TOOLSDIR/distro-configs/$DISTRO.conf | xargs`";
-else
- echo "ERROR: Could not find the distribution specific configure options"
- echo " file in $TOOLSDIR/distro-configs/."
- echo " $TOOLSDIR/distro-configs/$DISTRO.conf is probably missing."
-fi
-
-# Misc. internal
-TOOLSDIR='@TOOLSDIR@'
-OOBUILDDIR='@OOBUILDDIR@'
-SYSTEM_GCC='@SYSTEM_GCC@'
-FORCE_GCC33='@FORCE_GCC33@'
-# the variable DESTDIR will be unset to do not break build, etc.
-# the test allows to source setup twice
-test "z$DESTDIR" = "z" || OODESTDIR=$DESTDIR
-
-# clobber bogus env vars. they badly screw things up
-unset DESTDIR;
-unset ERROR_FLAGS;
-unset MAKE
-unset WARN_CFLAGS
-
-# Where to install to
-prefix=@prefix@
-exec_prefix=@exec_prefix@
-datarootdir=@datarootdir@
-PREFIXBASE=@prefix@
-LIBDIRBASE=@libdir@
-MANDIRBASE=@mandir@
-DOCDIRBASE=@docdir@
-PREFIX=$OODESTDIR$PREFIXBASE
-MANDIR=$OODESTDIR$MANDIRBASE
-DOCDIR=$OODESTDIR$DOCDIRBASE
-DATADIRBASE=@datadir@
-DATADIR=$OODESTDIR$DATADIRBASE
-SYSCONFBASE=@sysconfdir@
-SYSCONFDIR=$OODESTDIR$SYSCONFBASE
-OOINSTBASE=@libdir@/@OOOINSTALLDIRNAME@
-OOINSTDIR=$OODESTDIR$OOINSTBASE
-OOOINSTALLDIRNAME=@OOOINSTALLDIRNAME@
-UPSTREAM_NAME=@UPSTREAM_NAME@
-SOURCE_TYPE=@SOURCE_TYPE@
-MONO_GAC_ROOT=@MONO_GAC_ROOT@
-
-TCSH=/bin/tcsh
-
-# it can be lib or lib64
-LIB=`echo $LIBDIRBASE|sed 's|.*/||'`
-
-# substitute ALL to the list of languages if possible
-test -n "$OO_TOOLSDIR" && solenv_dir=$SOLARENV || solenv_dir=$OOBUILDDIR/solenv
-if test -f $solenv_dir/inc/postset.mk ; then
- # generate shell variable from completelangiso= from solenv/inc/postset.mk
- # the sed command does the following:
- # + if a line ends with a backslash, append the next line to it
- # + adds " on the beginning of the value (after =)
- # + adds " at the end of the value
- # + removes en-US; we want to put it on the beginning
- # + prints just the section starting with 'completelangiso=' and ending with the " at the end of line
- eval $(sed -e :a -e '/\\$/N; s/\\\n//; ta' -n -e 's/=/="/;s/\([^\\]\)$/\1"/;s/en-US//;/^completelangiso/p' $solenv_dir/inc/postset.mk)
- ALL_LANGS="en-US $completelangiso"
- OOO_LANGS_LIST=`echo $OOO_LANGS | sed -e "s|ALL|$ALL_LANGS|g"`
-fi
-# force langs
-if test -n "$FORCE_OOO_LANGS" ; then
- OOO_LANGS_LIST="$FORCE_OOO_LANGS"
- OOO_LANGS="$FORCE_OOO_LANGS"
-fi
-
-# setup paths
-PATH="$BUILDDIR/bin:$PATH:$TOOLSDIR/bin"
-
-CONFIGURE_OPTIONS="$CONFIGURE_OPTIONS $CONFIGURE_EXTRA_OPTIONS"
-
-LD_LIBRARY_PATH="$BUILDDIR/$LIB:/opt/mozilla/$LIB:$LD_LIBRARY_PATH"
-
-# gcc speedup
-# check for >= 4G/8G cache size if using ccache?
-case "$ENABLE_CCACHE/$ENABLE_ICECREAM" in
- "yes/")
- CCACHE_NOLINK=1; export CCACHE_NOLINK
- CCACHE_UNIFY=1; export CCACHE_UNIFY
- CC="ccache gcc"; export CC
- CXX="ccache g++"; export CXX
- ;;
-
- "/yes")
- CC="$ICECREAM_BINDIR/gcc"; export CC
- CXX="$ICECREAM_BINDIR/g++"; export CXX
- ;;
-
- "yes/yes")
- CCACHE_NOLINK=1; export CCACHE_NOLINK
- CCACHE_UNIFY=1; export CCACHE_UNIFY
- CC="ccache $ICECREAM_BINDIR/gcc"; export CC
- CXX="ccache $ICECREAM_BINDIR/g++"; export CXX
- ;;
-esac
-
-# Icecream should use the internal gcc, if we want it
-if test "z$ENABLE_ICECREAM" != "z" -a "z$SYSTEM_GCC" = "z" ; then
- if test -x "$BUILDDIR/bin/gcc"; then
- CURRENTDIR="`pwd`"; cd "$BUILDDIR"
- ICE_TAR_NAME=`create-env --respect-path | grep '^creating ' | sed 's/^creating //'`
- cd "$CURRENTDIR"
- ICECC_VERSION="$BUILDDIR/$ICE_TAR_NAME"; export ICECC_VERSION
- fi
-fi
-
-if test "z$CCACHE_DIR" = "z" ; then
- CCACHE_DIR=$BUILDDIR/.ccache
-fi
-
-if test "z$ENABLE_DISTCC" != "z" ; then
- CC="distcc gcc"; export CC
- CXX="distcc g++"; export CXX
- if test "z$ENABLE_CCACHE" != "z" ; then
- CC="ccache $CC"; export CC
- CXX="ccache $CXX"; export CXX
- fi
-fi
-
-# how to mark config files in file lists
-if test "z$VENDORNAME" = "zNovell" -o "z$VENDORNAME" = "zRedHat"; then
- RPM_CONFIG_FILE_TAGS="%config"
-elif test "z$DISTRO" = "zPLD" ; then
- RPM_CONFIG_FILE_TAGS="%config(noreplace) %verify(not md5 size mtime)"
-fi
-
-# Tag file lists with language (for RPM)
-if test "z$VENDORNAME" = "zRedHat"; then
- RPM_TAG_LANGPACKS=TRUE
-fi
-
-
-# ---------------------------
-# Source / Version setup:
-# ---------------------------
-GCC_VER=gcc-4.2.3
-GCC_TARBALL=gcc-4.2.3.tar.bz2
-GCC_UNTAR_OPTIONS=jxf
-GCC_DECOMPRESS_CMD="bzip2 -dc"
-GCC_PATCHES=""
-
-BINUTILS_VER=binutils-2.18
-BINUTILS_TARBALL=binutils-2.18.tar.bz2
-BINUTILS_UNTAR_OPTIONS=jxf
-BINUTILS_DECOMPRESS_CMD="bzip2 -dc"
-BINUTILS_PATCHES=""
-
-# FIXME: -Wl,--as-needed is enabled by default for openSUSE-11.2
-# there are missing symbols when linking kde libraries now
-# See http://fourier.suse.de/mlarch/SuSE/development-gcc/2009/development-gcc.2009.07/msg00024.html
-export SUSE_ASNEEDED=0
-
-OOO_SCALED_ICONS_VER=ooo-scaled-icons
-OOO_SCALED_ICONS_TARBALL=$OOO_SCALED_ICONS_VER.tar.gz
-OOO_SCALED_ICONS_UNTAR_OPTIONS=xzf
-OOO_SCALED_ICONS_DECOMPRESS_CMD="gunzip -c"
-
-#
-# up-stream changed the split and names at random
-# don't you just love them ? - with split pkgs we
-# can bin this though.
-#
-if test -n "$UPSTREAM_NAME" ; then
- OOO_TARBALL="${UPSTREAM_NAME}_core.tar.bz2"
- OOO_SYSTEM_TARBALL="${UPSTREAM_NAME}_system.tar.bz2"
- OOO_BINFILTER_TARBALL="${UPSTREAM_NAME}_binfilter.tar.bz2"
- OOO_LANG_TARBALL="${UPSTREAM_NAME}_l10n.tar.bz2"
- OOO_EXTENSIONS_TARBALL="${UPSTREAM_NAME}_extensions.tar.bz2"
-else
- OOO_TARBALL="$OOO_SOURCEDIRNAME-core.tar.bz2"
- OOO_SYSTEM_TARBALL="$OOO_SOURCEDIRNAME-system.tar.bz2"
- OOO_BINFILTER_TARBALL="$OOO_SOURCEDIRNAME-binfilter.tar.bz2"
- OOO_LANG_TARBALL="$OOO_SOURCEDIRNAME-lang.tar.bz2"
- OOO_EXTENSIONS_TARBALL="$OOO_SOURCEDIRNAME-extensions.tar.bz2"
-fi
-OOO_SPLIT_PREFIX=""
-if test "z@SPLIT@" = "zyes"; then
- OOO_SPLIT_PREFIX="libreoffice-"
- OOO_SDK_OO_TARBALL="libreoffice-sdk-$OOO_SOURCEVERSION.tar.bz2"
- OOO_SYSTEM_TARBALL="libreoffice-libs-extern-sys-$OOO_SOURCEVERSION.tar.bz2"
- OOO_TARBALL=not-there-at-all
- OOO_LANG_TARBALL=not-there-at-all
- OOO_BINFILTER_TARBALL=not-there-at-all
-fi
-
-OOO_EXTRA_ARTWORK="@OOO_EXTRA_ARTWORK@"
-OOO_UNTAR_OPTIONS=jxf
-OOO_DECOMPRESS_CMD="bzip2 -dc"
-
-UNICOWS_SRC=$SRCDIR/unicows.exe;
-DBGINST_SRC=$SRCDIR/dbghinst.EXE;
-ODMA_SRC=$SRCDIR/5_11_98Odma20.zip;
-
-BUILD_MOZILLA_SOURCE='@BUILD_MOZILLA_SOURCE@'
-
-OPENCLIPART_VER=@OPENCLIPART_VER@
-OPENCLIPART_DIR=@OPENCLIPART_DIR@
-
-MDBTOOLS_SRC=@MDBTOOLS_SRC@
-SYSTEM_MDBTOOLS=@SYSTEM_MDBTOOLS@
-
-export LD_LIBRARY_PATH PATH OOO_SOURCEDIRNAME APPLY_DIR
-export CCACHE_DIR CCACHE_NOLINK CCACHE_UNIFY
-export SRCDIR BUILDDIR OOBUILDDIR GITTAG
-export BUILD_FLAGS VERSION TOOLSDIR VENDORNAME
-export PREFIX PREFIXBASE SYSCONFDIR SYSCONFBASE
-export MANDIR MANDIRBASE DOCDIR DOCDIRBASE
-export OOINSTDIR OOINSTBASE OOOINSTALLDIRNAME
-export OODESTDIR OOO_LANGS OOO_LANGS_LIST
-export OOO_POOR_HELP_LOCALIZATIONS
-export CONFIGURE_OPTIONS
-export DISTRO BINSUFFIX ARCHITECTURE BUILD_MAGIC
-export BUILD_WIN32
-export FORCE_GCC33 ARCH_FLAGS
-export OPENCLIPART_VER OPENCLIPART_DIR
-export OOO_SPLIT_PREFIX
-
-if test "z$RPM_TAG_LANGPACKS" != "z"; then
-export RPM_TAG_LANGPACKS
-fi
-
-echo "Setup:"
-echo " Source: '$OOO_SOURCEDIRNAME'"
-echo " Apply: '$APPLY_DIR/apply'"
-echo " Flags: '$BUILD_FLAGS'"
-echo " Suffix: '$BINSUFFIX'"
-echo " Dir: '$BUILDDIR'"
-echo " DestDir: '$DESTDIR'"
-echo " Path: '$PATH'"
-echo " Prefix: '$PREFIX'"
-echo " Mandir: '$MANDIR'"
-echo " Docdir: '$DOCDIR'"
-echo " Sysconf: '$SYSCONFDIR'"
-echo " OOInBase: '$OOINSTBASE'"
-echo " OOInstall: '$OOINSTDIR'"
-echo " OODestDir: '$OODESTDIR'"
-echo " Config: $CONFIGURE_OPTIONS $PROPAGATED_ARGS"
-if test "$ENABLE_CCACHE" = "yes"; then
- echo " ccache: yes, ccache directory is '$CCACHE_DIR'"
-else
- echo " ccache: no"
-fi
-if test "$ENABLE_ICECREAM" = "yes"; then
- echo " icecream: yes, in '$ICECREAM_BINDIR'"
- if test "z$ICECC_VERSION" != "z"; then
- echo " icecream: using '$ICECC_VERSION' compiler tarball."
- fi
-else
- echo " icecream: no"
-fi
-if test "$ENABLE_DISTCC" = "yes"; then
- echo " distcc: yes"
-else
- echo " distcc: no"
-fi
-if test "$MAX_JOBS" -gt 1; then
- echo " Max number of jobs to run in parallel is '$MAX_JOBS'"
-fi
-if test "$BUILD_NCPUS" -gt 1; then
- echo " Max number of projects to build in parallel is '$BUILD_NCPUS'"
-fi
diff --git a/bin/show-issues b/bin/show-issues
deleted file mode 100755
index 50e52a991..000000000
--- a/bin/show-issues
+++ /dev/null
@@ -1,120 +0,0 @@
-#!/usr/bin/env perl
-
-sub usage()
-{
- print STDERR "show-patches - list issues of patches without the not intended for upstream
-Usage: show-patches\n";
-
- exit 1;
-}
-
-$help = shift @ARGV;
-if ( defined( $help ) && ( $help eq "-h" || $help eq "--help" ) ) {
- usage();
-}
-
-open APPLY, "patches/src680/apply" || die "Cannot open patches/src680/apply\n";
-
-my $section = "";
-my $section_owner = "";
-my $section_issue = "";
-my $result = "";
-my %all = ();
-my %no_issue = ();
-my $no_issue_no_owner = "";
-
-sub insert_issue($$$)
-{
- my ( $who, $what, $patch ) = @_;
-
- if ( $what ne "" ) {
- if ( $who ne "" ) {
- if ( !defined( $all{$who} ) ) {
- $all{$who} = $what;
- }
- else {
- $all{$who} .= "+$what" unless "+$all{$who}+" =~ /\+$what\+/;
- }
- }
- else {
- $result .= "+" unless $result eq "";
- $result .= $what;
- }
- }
- elsif ( !( $patch =~ /^(cws-|workspace\.)/ ) ) {
- if ( $who ne "" ) {
- if ( !defined( $no_issue{$who} ) ) {
- $no_issue{$who} = $patch;
- }
- else {
- $no_issue{$who} .= "\n$patch" unless "$no_issue{$who}" =~ /$patch/;
- }
- }
- else {
- $no_issue_no_owner .= "\n" unless $no_issue_no_owner eq "";
- $no_issue_no_owner .= $patch;
- }
- }
-}
-
-while ( <APPLY> ) {
- if ( /^\s*#.*/ ) {
- # comment
- }
- elsif ( /^\s*\[\s*(.*)\]/ ) {
- $section = $1;
- $section_owner = "";
- $section_issue = "";
- }
- elsif ( /^SectionOwner\s*=>\s*([^\s]*)/ ) {
- $section_owner = $1;
- }
- elsif ( /^SectionIssue\s*=>.*i#([0-9]*)/ ) {
- $section_issue = $1;
- insert_issue( $section_owner, $section_issue, "" );
- }
- elsif ( /^\s*([^\#,\s]+\.diff)\s*,?\s*(.*)?$/ )
- {
- my $patch = $1;
- my $tmp = $2;
- my $issue = $section_issue;
- my $owner = $section_owner;
- if ( $tmp =~ /i#([0-9]*)/ ) {
- $issue = $1;
- }
- foreach $o ( split( /\s*,\s*/, $tmp ) ) {
- $owner = $o unless $o =~ /#/;
- }
-
- if ( !( $section =~ /FixesNotForUpstream/ ) &&
- !( $section =~ /Defaults/ ) )
- {
- insert_issue( $owner, $issue, $patch );
- }
- }
-}
-
-close APPLY;
-
-my $uniq = "";
-foreach $who ( sort( keys %all, keys %no_issue ) ) {
- next if ( $who eq $uniq );
- $uniq = $who;
-
- print "===== $who =====\n";
- if ( defined( $all{$who} ) ) {
- print "\nhttp://www.openoffice.org/issues/buglist.cgi?issue_id=$all{$who}\n";
- }
- if ( defined( $no_issue{$who} ) ) {
- print "\nNo IZ number:\n\n$no_issue{$who}\n";
- }
- print "\n";
-}
-
-if ( $result ne "" ) {
- print "===== Without owner =====\nhttp://www.openoffice.org/issues/buglist.cgi?issue_id=$result\n";
-}
-
-if ( $no_issue_no_owner ne "" ) {
- print "===== No owner and no IZ number =====\n\n$no_issue_no_owner\n";
-}
diff --git a/bin/show-obsolete-patches b/bin/show-obsolete-patches
deleted file mode 100755
index 510e4a240..000000000
--- a/bin/show-obsolete-patches
+++ /dev/null
@@ -1,66 +0,0 @@
-#!/usr/bin/env perl
-
-sub usage()
-{
- print STDERR "show-obsolete-patches - list issues of obsolete patches
-Usage: show-obsolete-patches src680-mXYZ\n";
-
- exit 1;
-}
-
-$mws = "src680";
-$milestone = shift @ARGV;
-if ( !defined( $milestone ) ) {
- print "Milestone not specified...\n\n";
- usage();
-}
-elsif ( $milestone =~ /($mws-m|m|)([0-9]+)/ ) {
- $milestone = $2;
-}
-else {
- usage();
-}
-
-open APPLY, "patches/src680/apply" || die "Cannot open patches/src680/apply\n";
-
-my $section = "";
-my $result = "";
-while ( <APPLY> ) {
- if ( /^\s*#.*/ ) {
- # comment
- }
- elsif ( /^\s*\[\s*(.*)\]/ ) {
- $section = $1;
- }
- elsif ( /^\s*([^\#,\s]+.diff)\s*,?\s*(.*)?$/ )
- {
- my $patch = $1;
- my $tmp = $2;
- my $issue = "";
- if ( $tmp =~ /i#([0-9]*)/ ) {
- $issue = $1;
- }
-
- if ( ( ( $section =~ /<=\s*$mws-m([0-9]*)/ ) && ( $1 < $milestone ) ) ||
- ( ( $section =~ /<\s*$mws-m([0-9]*)/ ) && ( $1 <= $milestone ) ) ||
- ( ( $section =~ /==\s*$mws-m([0-9]*)/ ) && ( $1 < $milestone ) ) )
- {
- if ( $issue ne "" ) {
- $result .= "+" unless $result eq "";
- $result .= $issue;
- }
- else {
- print "Obsolete patch without IZ number: $patch\n";
- }
- }
- }
-}
-
-close APPLY;
-
-if ( $result ne "" ) {
- print "http://www.openoffice.org/issues/buglist.cgi?issue_id=$result\n";
-}
-else {
- print "No obsolete patches for $mws-m$milestone.\n";
-}
diff --git a/bin/sloppypatch.pl b/bin/sloppypatch.pl
deleted file mode 100755
index b72dd628d..000000000
--- a/bin/sloppypatch.pl
+++ /dev/null
@@ -1,218 +0,0 @@
-#!/usr/bin/env perl
-
-use File::Temp qw/ :mktemp /;
-
-# apply a patch, but only if the top-level directory exists
-# mentioned in the patch; eg.
-# --- sw/source/ui/foo.xml
-# will apply if sw/ exists.
-# Bugs:
-# doesn't cope with '-p' - assumes -p0
-
-my $applydir;
-chomp ($applydir = `pwd`);
-for (my $idx = 0; $idx < @ARGV; $idx++) {
- if ($ARGV[$idx] eq '-d') {
- $applydir = $ARGV[$idx+1];
- }
-}
-
-my $tmpfile;
-my $fh;
-($fh,$tmpfile) = mkstemp("/tmp/sloopypatch-XXXXXX");
-
-sub path_exists($)
-{
- my $path = shift;
- my $topdir = undef;
- my $subpath = undef;
-
-# print "Checking path: $path\n";
-
- # skip initial ./ is needed
- $path =~ s|\.\/+||;
-
- if ( $path =~ m/^([^\/]+)([^ \t]+)/ ) {
- $topdir = "$1";
- $subpath = "$2";
- chomp $subpath;
- }
-
- # the relative paths starting with ".." must be invalid because we use
- # the -p0 patches and the upper direcotry name is different with every milestone
- if ( (defined $topdir && "$topdir" ne ".." && -d "$applydir/$topdir" ) || ( -f "$applydir/$path") ) {
-# print " passed $topdir,$path\n";
- return $path;
- } else {
-# print " not passed\n";
- return undef;
- }
-}
-
-my $applying = undef;
-my $sections = 0;
-my $path1 = undef;
-my $path2 = undef;
-my @header_lines = ();
-my $header_unified = 0;
-my $header_found = 0;
-my $line_matched = 0;
-while (<STDIN>) {
- my $line = $_;
-
- # ignore comments
- next if $line =~ m/^\#/;
-
- # ignore the git headers
- # especially "index " line" drives the patch tool mad when:
- # + it is included but the related --- and +++ lines are not used in the end
- # + it looks like "index 0000000..00204f9" which means a new file but
- # it is actually a mess from the last used hunk
- next if $line =~ m/^diff --git/;
- next if $line =~ m/^new file/;
- next if $line =~ m/^index /;
-
- # same of unified context patch header:
- # --- file.old 2009-02-26 20:04:16.000000000 +0100
- # +++ file 2009-02-26 20:04:41.000000000 +0100
- # @@ -1,26 +1,28 @@
-
- # sample of copied context patch header:
- # *** file.old 2009-02-26 20:04:16.000000000 +0100
- # --- file 2009-02-26 20:04:41.000000000 +0100
- # ***************
- # *** 1,26 ****
-
- $line_matched = 0;
-
- if (! @header_lines) {
- if ( $line =~ m/^--- [\s]*([^\s]+)/ ) {
- $path1 = "$1";
- $header_unified = 1;
- $line_matched = 1;
- }
- if ( $line =~ m/^\*\*\* [\s]*([^\s]+)/ ) {
- $path1 = "$1";
- $header_unified = 0;
- $line_matched = 1;
- }
- # hack to support unified diffs where the --- line is missing
- if ( $line =~ m/^\+\+\+ [\s]*([^\s]+)/ ) {
- # only line 2
- $path2 = "$1";
- $header_unified = 1;
- $line_matched = 1;
- # fake line 1
- $path1 = $path2;
- push @header_lines, "";
- }
- } elsif (@header_lines == 1) {
- if ($header_unified) {
- if ( $line =~ m/^\+\+\+ [\s]*([^\s]+)/ ) {
- $path2 = "$1";
- $line_matched = 1;
- }
- } else {
- if ( $line =~ m/^--- [\s]*([^\s]+)/ ) {
- $path2 = "$1";
- $line_matched = 1;
- }
- }
- } elsif (@header_lines == 2) {
- if ($header_unified) {
- if ( $line =~ m/^\@\@ -(\d+),(\d+) \s*\+\d+,\d+ \s*\@\@/ ) {
- # new file looks like: @@ -0,0 +1,23 @@
- $new_file=1 if (($1 == 0) && ($2 == 0));
- $line_matched = 1;
- $header_found = 1;
-# print "Unified header found\n";
- }
- } else {
- if ( $line =~ m/^\*\*\*\*\*\*\*\*\*\*\*\*\*\*\*/ ) {
- $line_matched = 1;
- }
- }
- } elsif (@header_lines == 3) {
- if (! $header_unified) {
- if ( $line =~ m/^\*\*\* \s*(\d+)(,\d+)? \s*\*\*\*\*/ ) {
- # new file looks like: *** 0 ****
- $new_file = 1 if ( ( $1 == 0 ) && ( ! defined $2 ) );
- $line_matched = 1;
- $header_found = 1;
-# print "Context header found\n";
- }
- }
- }
-
- if (@header_lines && ! $line_matched ) {
- # looked like header but it is not header in the end
- if ( $applying ) {
- # print the saved lines if applying this piece
- foreach (@header_lines) {
- print $fh $_;
- }
- }
- @header_lines = ();
- # FIXME: should check the previous lines again???
- # or at least the last one; might be there extra ---, +++ lines without real hunk?
- # something like?
- # --- fileA
- # +++ fileA
- # --- fileB
- # +++ fileB
- # @@ -10,6 +10,7 @@
- # well this will ve coverted by the hack for missing --- line, see above;
- }
-
- if ( $header_found ) {
- # accept new path without the "path_exists" check only in topdir
- $applying = undef;
- $applying = $path2 if ( $new_file && ( ! $path2 =~ m/^(\.\/+)?[^\/]+\/[^\/]+/ ) );
- # need to check all ***, +++, --- paths; any of them might be correct
- # the other path might be invalid, e.g. absolute path
- $applying = path_exists( $path2 ) if ( ! $applying );
- $applying = path_exists( $path1 ) if ( ! $applying );
-
- if ( $applying ) {
- $sections++;
- print STDERR "+ apply fragment for $applying\n";
- # print the saved header lines
- foreach (@header_lines) {
- print $fh $_;
- }
- }
- @header_lines = ();
- $new_file = 0;
- $header_found = 0;
- $line_matched = 0;
- }
-
- if ($line_matched) {
- # save the header line
- push @header_lines, $line;
- $line_matched = 0;
-# print "Matched header line $#header_lines\n";
- } else {
- # no header line => print it if applying this piece
- if ( $applying ) {
- print $fh $line;
- }
- }
-}
-$fh->close;
-
-my $result = 0;
-if ($sections > 0) {
-# patch complains a lot with empty input
-# print "calling: patch @ARGV < $tmpfile\n";
- if (system ("patch @ARGV < $tmpfile")) {
- print STDERR "\nError: failed to apply patch @ARGV: $!\n\n";
- }
- $result = $? >> 8;
-} else {
- print STDERR "- skipped whole patch\n";
-}
-
-unlink $tmpfile;
-
-exit $result;
diff --git a/bin/snapshot-build b/bin/snapshot-build
deleted file mode 100755
index 192c43e97..000000000
--- a/bin/snapshot-build
+++ /dev/null
@@ -1,38 +0,0 @@
-#!/bin/sh
-
-failed()
-{
- echo "$1";
- sleep 3600;
-}
-
-# find the parent directory.
-while ( ! (test -f configure) ) do cd ..; done
-
-while (true) do
-
- DESTDIR="`pwd`/dest";
-
- echo "Started build `date`";
- rm -Rf $DESTDIR;
- export DESTDIR;
-
- if ! (cvs upd -A); then
- failed "Update failed";
-
- elif ! (./autogen.sh); then
- failed "Autogen failed";
-
- elif ! (make); then
- echo "Make failed";
-
- elif ! (make install); then
- failed "Make install failed";
-
- elif ! (rpmbuild -bb --nobuild ooo1.1.spec); then
- failed "Failed to pack";
-
- fi
-
- sleep 10;
-done
diff --git a/bin/split-library.pl b/bin/split-library.pl
deleted file mode 100755
index 41e144097..000000000
--- a/bin/split-library.pl
+++ /dev/null
@@ -1,62 +0,0 @@
-#!/usr/bin/env perl
-
-# Tool to help you to split a library into more ones. Choose one or more
-# objects (let's say <blah>.o) that you need in the library, and:
-#
-# cd unxlng*.pro/slo
-# for I in *.o ; do nm $I > ../symbols/${I/.o/.txt} ; done
-# cd ../symbols
-# [add the <blah>.txt's you need in the library to %closure (see below the escherex.txt as example)]
-# ./split-library.pl
-#
-# Result: On stdout, you see all the objects you need in the same library
-
-%undefined = ();
-%where_defined = ();
-
-sub read_symbols( $ )
-{
- my ( $fname ) = @_;
-
- my @undefined = ();
-
- open FILE, "<$fname" || die "Cannot open $fname.\n";
- while ( <FILE> ) {
- if ( / [BTV] (.*)/ ) {
- $where_defined{$1} = $fname;
- }
- elsif ( / U (.*)/ ) {
- push @undefined, $1;
- }
- }
- close FILE;
-
- $undefined{$fname} = \@undefined;
-}
-
-my @files = split /\n/, `ls *.txt`;
-foreach $file ( @files ) {
- read_symbols( $file ) if ( !( $file =~ /^\s*$/ ) );
-}
-
-%closure = ();
-
-$closure{'escherex.txt'} = 1;
-$closure{'eschesdo.txt'} = 1;
-
-my $something_added = 0;
-do {
- $something_added = 0;
- foreach $file ( keys %closure ) {
- my $undef_ref = $undefined{$file};
-
- foreach $undef ( @$undef_ref ) {
- my $where = $where_defined{$undef};
- if ( defined( $where ) && !defined( $closure{$where} ) ) {
- $closure{$where} = 1;
- $something_added = 1;
- print "$where\n";
- }
- }
- }
-} while ( $something_added );
diff --git a/bin/split-to-projects b/bin/split-to-projects
deleted file mode 100755
index 537247b44..000000000
--- a/bin/split-to-projects
+++ /dev/null
@@ -1,22 +0,0 @@
-#!/bin/sh
-
-usage() {
- echo "split-to-projects patch.diff basename
-
-Split patch into several patches according to projects.
- patch.diff Patch that will be separated
- basename Base for the resulting names" >&2;
-
- exit 1;
-}
-
-PATCH="$1"
-BASE="$2"
-
-[ "$PATCH" != "" ] || usage
-[ "$BASE" != "" ] || usage
-
-for PROJECT in `lsdiff "$PATCH" | sed 's/\/.*//' | sort | uniq`
-do
- filterdiff -i "$PROJECT/*" "$PATCH" > "$BASE-$PROJECT.diff"
-done
diff --git a/bin/src-pack b/bin/src-pack
deleted file mode 100755
index 6c6b93bb8..000000000
--- a/bin/src-pack
+++ /dev/null
@@ -1,112 +0,0 @@
-#!/bin/sh
-
-# TODO - split out win32 bits ?
-
-tar_opts=cj
-
-if test "z$1" = "z" -o "z$1" = "z-h" -o "z$1" = "z--help"; then
- echo "src-package: <src-fir-name> [<destination-directory>]";
- echo " the src will be packed into several archives";
- echo " prefixed by the last path element";
- exit 0;
-fi
-
-src=$1
-if test "z$2" = "z"; then
- dest="."
-else
- dest=$2
-fi
-
-echo "packing source in $src to directory $dest";
-
-if test ! -f "$src/vcl/source/gdi/outdev.cxx"; then
- echo "Doesn't look like an OO.o source tree";
- exit 1;
-fi
-
-system_dirs="$src/python $src/bitstream_vera_fonts $src/freetype $src/jpeg";
-system_dirs="$system_dirs $src/dictionaries $src/libxml2 $src/zlib $src/moz";
-system_dirs="$system_dirs $src/curl $src/neon ";
-extension_dirs="$src/sdext $src/swext $src/tomcat $src/apache-commons";
-extension_dirs="$extension_dirs $src/jfreereport $src/reportbuilder";
-extension_dirs="$extension_dirs $src/xpdf"
-
-# future system bits:
-incomplete_modules="$src/curl $src/expat $src/icu $src/libxmlsec $src/neon";
-incomplete_modules="$incomplete_modules $src/msfontextract $src/regexp $src/rhino ";
-incomplete_modules="$incomplete_modules $src/sablot $src/sane $src/stlport $src/twain";
-incomplete_modules="$incomplete_modules $src/unixODBC $src/x11_extensions $src/boost";
-
-# Win32 dirs
-win32_dirs="$src/bridges/source/cpp_uno/msvc_win32_intel
- $src/connectivity/source/drivers/ado
- $src/desktop/win32
- $src/dtrans/source/win32
- $src/extensions/source/activex
- $src/extensions/source/ole
- $src/extensions/source/installation
- $src/extensions/source/plugin/win
- $src/external/ado
- $src/external/gpc
- $src/fpicker/source/win32
- $src/postprocess/rebase
- $src/setup_native/source/win32
- $src/shell/source/win32
- $src/shell/source/tools/lngconvex
- $src/shell/source/tools/regsvrex
- $src/shell/source/all/zipfile
- $src/shell/source/all/ooofilereader
- $src/shell/source/all/ooofilereader
- $src/shell/source/backends/wininetbe
- $src/sysui/source/win32
- $src/sysui/desktop/icons
- $src/tools/win
- $src/vcl/win"
-# FIXME - add build conditional to $src/embedserv & add
-
-# FIXME - remove svdbt.dll with an unclear license; it looks unused anyway; http://www.openoffice.org/issues/show_bug.cgi?id=80408
-rm -f $src/sfx2/util/svdbt.dll
-
-# This saves only 800k
-#
-# echo "Packing CVS entries ...";
-# find $src -name CVS > cvs.lst || exit 1;
-# tar $tar_opts -f $dest/$src-cvs.tar.bz2 --files-from=cvs.lst || exit 1;
-# cat cvs.lst | xargs rm -Rf || exit 1
-# rm cvs.lst
-
-echo "Packing binfilter ...";
-tar $tar_opts -f $dest/$src-binfilter.tar.bz2 $src/binfilter || exit 1;
-rm -Rf $src/binfilter || exit 1;
-
-echo "Packing system pieces ...";
-echo "tar $tar_opts -f $dest/$src-system.tar.bz2 $system_dirs";
-tar $tar_opts -f $dest/$src-system.tar.bz2 $system_dirs || exit 1;
-rm -Rf $system_dirs || exit 1;
-
-# This saves only 1.5Mb
-#
-# echo "Packing win32 bits ...";
-# tar $tar_opts -f $dest/$src-win32.tar.bz2 $win32_dirs || exit 1;
-# rm -Rf $win32_dirs || exit 1;
-
-echo "Packing i18n bits ...";
-find $src -name localize.sdf > localize.lst || exit 1;
-find $src/extras/source/templates -type f | grep -v 'CVS' | grep -v 'delzip' | grep -v 'makefile.mk' | grep -v '/de' | grep -v '/en-US' >> localize.lst || exit 1;
-tar $tar_opts -f $dest/$src-lang.tar.bz2 --files-from=localize.lst || exit 1;
-cat localize.lst | xargs rm -Rf || exit 1
-rm localize.lst
-
-echo "Packaging extension bits ..."
-tar $tar_opts -f $dest/$src-extensions.tar.bz2 $extension_dirs || exit 1;
-rm -Rf $extension_dirs || exit 1;
-
-echo "Packing core source ...";
-tar $tar_opts -f $dest/$src-core.tar.bz2 $src || exit 1;
-rm -Rf $src
-
-echo "Generating md5 sums ...";
-for suffix in binfilter sdk_oo system lang core; do # win32 cvs
- md5sum $dest/$src-$suffix.tar.bz2 > $dest/$src-$suffix.tar.bz2.md5 || exit 1;
-done
diff --git a/bin/src-pack-solaris b/bin/src-pack-solaris
deleted file mode 100755
index 76c753405..000000000
--- a/bin/src-pack-solaris
+++ /dev/null
@@ -1,105 +0,0 @@
-#!/bin/sh
-
-# TODO - split out win32 bits ?
-
-if test "z$1" = "z" -o "z$1" = "z-h" -o "z$1" = "z--help"; then
- echo "src-package: <src-fir-name> [<destination-directory>]";
- echo " the src will be packed into several archives";
- echo " prefixed by the last path element";
- exit 0;
-fi
-
-src=$1
-if test "z$2" = "z"; then
- dest="."
-else
- dest=$2
-fi
-
-echo "packing source in $src to directory $dest";
-
-if test ! -f "$src/vcl/source/gdi/outdev.cxx"; then
- echo "Doesn't look like an OO.o source tree";
- exit 1;
-fi
-
-system_dirs="$src/python $src/bitstream_vera_fonts $src/freetype $src/jpeg";
-system_dirs="$system_dirs $src/dictionaries $src/libxml2 $src/zlib $src/moz";
-system_dirs="$system_dirs $src/curl $src/neon $src/sndfile";
-
-# future system bits:
-incomplete_modules="$src/curl $src/expat $src/icu $src/libxmlsec $src/nas $src/neon";
-incomplete_modules="$incomplete_modules $src/msfontextract $src/regexp $src/rhino ";
-incomplete_modules="$incomplete_modules $src/sablot $src/sane $src/stlport $src/twain";
-incomplete_modules="$incomplete_modules $src/unixODBC $src/x11_extensions $src/boost";
-
-# Win32 dirs
-win32_dirs="$src/bridges/source/cpp_uno/msvc_win32_intel
- $src/connectivity/source/drivers/ado
- $src/desktop/win32
- $src/dtrans/source/win32
- $src/extensions/source/activex
- $src/extensions/source/ole
- $src/extensions/source/installation
- $src/extensions/source/plugin/win
- $src/external/ado
- $src/external/gpc
- $src/fpicker/source/win32
- $src/postprocess/rebase
- $src/setup_native/source/win32
- $src/shell/source/win32
- $src/shell/source/tools/lngconvex
- $src/shell/source/tools/regsvrex
- $src/shell/source/all/zipfile
- $src/shell/source/all/ooofilereader
- $src/shell/source/all/ooofilereader
- $src/shell/source/backends/wininetbe
- $src/sysui/source/win32
- $src/sysui/desktop/icons
- $src/tools/win
- $src/vcl/win"
-# FIXME - add build conditional to $src/embedserv & add
-
-# This saves only 800k
-#
-# echo "Packing CVS entries ...";
-# find $src -name CVS > cvs.lst || exit 1;
-# tar cf - -I cvs.lst | bzip2 $dest/${src}_src_cvs.tar.bz2 || exit 1;
-# cat cvs.lst | xargs rm -Rf || exit 1
-# rm cvs.lst
-
-echo "Packing binfilter ...";
-tar cf - $src/binfilter | bzip2 > $dest/${src}_src_binfilter.tar.bz2 || exit 1;
-rm -Rf $src/binfilter || exit 1;
-
-echo "Packing sdk_oo ...";
-tar cf - $src/sdk_oo | bzip2 > $dest/${src}_src_sdk.tar.bz2 || exit 1;
-rm -Rf $src/sdk_oo || exit 1;
-
-echo "Packing system pieces ...";
-tar cf - $system_dirs | bzip2 > $dest/${src}_src_system.tar.bz2 || exit 1;
-rm -Rf $system_dirs || exit 1;
-
-# This saves only 1.5Mb
-#
-# echo "Packing win32 bits ...";
-# tar cf - $win32_dirs | bzip2 > $dest/${src}_src_win32.tar.bz2 || exit 1;
-# rm -Rf $win32_dirs || exit 1;
-
-echo "Packing i18n bits ...";
-find $src -name localize.sdf > localize.lst || exit 1;
-find $src/extras/source/templates -type f | grep -v 'CVS' | grep -v 'delzip' | grep -v 'makefile.mk' | grep -v '/de' | grep -v '/en-US' >> localize.lst || exit 1;
-tar cf - -I localize.lst | bzip2 > $dest/${src}_src_l10n.tar.bz2 || exit 1;
-cat localize.lst | xargs rm -Rf || exit 1
-rm localize.lst
-
-echo "Packing core source ...";
-tar cf - $src | bzip2 > $dest/${src}_src_core.tar.bz2 || exit 1;
-rm -Rf $src
-
-#echo "Generating md5 sums ...";
-#for suffix in src_binfilter src_sdk src_system src_l10n src_core ; do # src_win32 src_cvs
-# md5sum $dest/${src}_$suffix.tar.bz2 > $dest/${src}_$suffix.tar.bz2.md5 || exit 1;
-#done
-
-echo "Finished successfully"
diff --git a/bin/src-pack2 b/bin/src-pack2
deleted file mode 100755
index 7097ffb97..000000000
--- a/bin/src-pack2
+++ /dev/null
@@ -1,125 +0,0 @@
-#!/usr/bin/env perl
-
-#
-# this script is run on go-oo from ~ooweb/ooo-build
-# to auto-pack the source code on milestone release.
-# don't forget to update it there.
-#
-
-#use strict;
-
-# Towards a more modular build ...
-
-my $tar_opts='cj';
-
-$0 =~ /(.*)\//;
-my $dir = $1;
-my $module_list = "$dir/modules2.txt";
-open MODULES, "$module_list" or die die "Can't open: $module_list: $!";
-my %module_map = map {/(.*)=(.*)$/; ($1, [split /,/, $2])} <MODULES> or die;
-# /})];
-
-my $force = 0;
-
-my $src;
-my $dest;
-
-sub print_help_and_exit()
-{
- print "src-package: <src-directory-name> [<destination-directory>] [--force]\n";
- print " the src will be un-packed into several archives\n";
- print " prefixed by the last path element\n";
- print " --force: ignore missing modules\n";
- exit 0;
-}
-
-sub slurp_dir_to_hash($)
-{
- my $src = shift;
- my %entries;
-
- opendir my $dirh, "$src" || die "Can't open $src: $!";
- while (my $entry = readdir ($dirh)) {
- next if ($entry =~ /^\./);
- $entries{$entry} = 1;
- }
- closedir ($dirh);
- return \%entries;
-}
-
-sub verify_module_map($$)
-{
- my $src = shift;
- my $module_map = shift;
- my $failed = 0;
-
- my $modules = slurp_dir_to_hash ($src);
-
- print "Verifying module map ...";
-
- for my $section (keys %{$module_map}) {
-# print "check for $section\n";
- for my $module (@{$module_map->{$section}}) {
-# print "\tcheck for $module\n";
- if (!defined $modules->{$module}) {
- print "\n\nError: section '$section' contains non-existent module '$module'\n\n";
- $failed = 1 if (!$force);
- }
- delete $modules->{$module};
- }
- }
- my @left_over = keys %{$modules};
- if (@left_over) {
- print "\n\nError: modules not included in any section: '@left_over'\n\n";
- $failed = 1;
- }
-
- die "Failed." if ($failed);
- print " done\n";
-}
-
-for my $arg (@ARGV) {
- if ($arg eq "--help" || $arg eq '-h') {
- print_help_and_exit();
- } elsif ($arg eq '--force') {
- $force = 1;
- } elsif (!defined $src) {
- $src = $arg;
- } elsif (!defined $dest) {
- $dest = $arg;
- } else {
- die "Too many arguments '$arg'";
- }
-}
-
-if (!defined $src) {
- print_help_and_exit();
-}
-$dest = '.' if (!defined $dest);
-
--f "$src/vcl/source/gdi/outdev.cxx" || die "$src doesn't look like an OO.o source tree";
-
-verify_module_map ($src, \%module_map);
-
-print STDERR "packing source in $src to directory $dest\n";
-
-for my $a (keys %module_map) {
- print STDERR "\tpack $a ...";
- my $dest_tar = "$src-$a.tar.bz2";
- my $cmd = "tar $tar_opts --dereference -f $dest/$dest_tar ";
- my $newsrc = "$src-$a";
- symlink ($src, $newsrc ) if (! -l $newsrc);
-
- for my $b (@{$module_map{$a}}) {
- $cmd .= "$newsrc/$b " if (-e "$newsrc/$b");
- }
- if (-f "$dest/$dest_tar") {
- print "\twarning: $dest/$dest_tar exists, skipping\n";
- } else {
- system ($cmd) && die "Failed to execute '$cmd': $!";
- }
-
- $cmd = "cd $dest; md5sum $dest_tar > $dest_tar.md5";
- system ($cmd) && die "Failed to generate md5: '$cmd': $!";
- print STDERR " done\n";
-}
diff --git a/bin/stat-localizations b/bin/stat-localizations
deleted file mode 100755
index 0aa62f0b1..000000000
--- a/bin/stat-localizations
+++ /dev/null
@@ -1,95 +0,0 @@
-#! /bin/sh
-
-usage()
-{
- echo "This script counts how many strings of the given localization differs from"
- echo "the English(US) localization"
- echo
- echo "Usage:" ${0##*/} locale ...
- echo
- echo "Presumptions:"
- echo " - the module l10ntools is built"
- echo " - the profile *Env.Set* is sourced"
- echo
- echo "Note that the script is quite slow. It takes some minutes to extract strings"
- echo "for one localization..."
-}
-
-if test -z "$1" -o "$1" = "--help" ; then
- usage && exit 1;
-fi
-
-if ! which localize >/dev/null 2>&1 ; then
- echo "Error: Unable to find the script \"localize\". Please, build and deliver"
- echo " the module l10ntools and keep the \*Env.Set\* sourced."
- exit 1;
-fi
-
-extract_gsi()
-{
- echo "Extracting $1 strings..."
- localize -e -f "$2" -l "$1=$primary_lang" >/dev/null 2>&1
-}
-
-primary_lang="en-US"
-
-final_stat=`mktemp /tmp/ooo-stat-localizationse.XXXXXXXX`
-
-
-primary_gsi=`mktemp /tmp/ooo-stat-localizationse.XXXXXXXX`
-extract_gsi $primary_lang $primary_gsi
-primary_strings_ui=`mktemp /tmp/ooo-stat-localizationse.XXXXXXXX`
-grep -v "^helpcontent2" $primary_gsi | sort | cut -f 1,2,5,11 >$primary_strings_ui
-primary_strings_help=`mktemp /tmp/ooo-stat-localizationse.XXXXXXXX`
-grep "^helpcontent2" $primary_gsi | sort | cut -f 1,2,5,11 >$primary_strings_help
-
-primary_num_ui=`cat $primary_strings_ui | wc -l`
-primary_num_help=`cat $primary_strings_help | wc -l`
-
-
-for secondary_lang in $* ; do
-
- secondary_gsi=`mktemp /tmp/ooo-stat-localizationse.XXXXXXXX`
- cp $SRC_ROOT/l10n/source/$secondary_lang/localize.sdf $secondary_gsi
- secondary_strings_ui=`mktemp /tmp/ooo-stat-localizationse.XXXXXXXX`
- grep -v "^helpcontent2" $secondary_gsi | sort | cut -f 1,2,5,11 >$secondary_strings_ui
- secondary_strings_help=`mktemp /tmp/ooo-stat-localizationse.XXXXXXXX`
- grep "^helpcontent2" $secondary_gsi | sort | cut -f 1,2,5,11 >$secondary_strings_help
-
- echo "Counting $secondary_lang localized strings..."
-
- secondary_diff_num_ui=`diff $primary_strings_ui $secondary_strings_ui | grep "^>" | wc -l`
- secondary_diff_num_help=`diff $primary_strings_help $secondary_strings_help | grep "^>" | wc -l`
-
- #echo primary_num_ui=$primary_num_ui
- #echo secondary_diff_num_ui=$secondary_num_ui
-
- #echo primary_num_help=$primary_num_help
- #echo secondary_diff_num_help=$secondary_num_help
-
- localized_ui=$(($secondary_diff_num_ui * 100 / $primary_num_ui))
- localized_help=$(($secondary_diff_num_help * 100 / $primary_num_help))
-
- echo
- echo "Locale:$secondary_lang ui:${localized_ui}% help:${localized_help}%"
- echo
-
- #
- echo "Locale:$secondary_lang ui:${localized_ui}% help:${localized_help}%" >>$final_stat
-
- rm $secondary_strings_ui
- rm $secondary_strings_help
- rm $secondary_gsi
-done
-
-rm $primary_strings_ui
-rm $primary_strings_help
-rm $primary_gsi
-
-echo
-echo "==============================================================="
-echo "Status of localizations"
-echo "==============================================================="
-sort -n -t':' --key=4 $final_stat
-
-rm $final_stat
diff --git a/bin/stats.pl b/bin/stats.pl
deleted file mode 100755
index 141758198..000000000
--- a/bin/stats.pl
+++ /dev/null
@@ -1,178 +0,0 @@
-#!/usr/bin/perl -w
-
-# segment by Date, then by Product, then count
-my %data;
-my %products;
-
-# Analysing stats:
-#
-# grep for 'multi' - yields the Windows installer ... (also grep for 'all_lang') - all of them [!]
-# grep for 'Linux' and 'en-US' yields total Linux main binary downloads ...
-# grep for 'Mac' and 'en-US' yields total Mac main binary numbers ...
-
-while (<>) {
- chomp();
- my $line = $_;
-# print "line '$_'\n";
-# 17424 2011-01-25 LibO Win-x86 3.3.0 all_lang qa 1
- if ($line =~ m/^\s*(\S+)\s+(\S+)\s+(\S+)\s+(\S+)\s+(\S+)\s+(\S+)\s+(\S+)\s+(\S+)\s*$/) {
- my ($id, $date, $product, $osname, $version, $lang, $country, $count) = ($1, $2, $3, $4, $5, $6, $7, $8);
-# print "$count downloads on date $date, os $osname $lang\n";
-
- if ($date lt '2011-01-25') {
-# print STDERR "ignoring $date\n";
- next
- }
-
- my $clean_product;
-
- # Ignore Mac / Linux help packs etc.
- if (($osname =~ /Linux/ || $osname =~ /MacOS/) && $lang eq 'en-US') {
- $clean_product = $osname;
- }
- # Detect Windows distinctions
- if (($product eq 'LibO' || $product eq 'LO') &&
- $osname =~ /Win/ && ($lang =~ /multi/ || $lang =~ /all_lang/)) {
- $clean_product = "$osname-$lang";
- }
-
- # Detect PortableOffice distinctions
- if ($product eq 'LibreOfficePortable') {
- $clean_product = $product;
- }
-
- if (!defined $clean_product) {
-# print "uninteresting line '$line'\n";
- next;
- }
-
- $products{$clean_product} = 1;
- if (!defined $data{$date}) {
- my %byproduct;
- $data{$date} = \%byproduct;
- }
- if (!defined ($data{$date}->{$clean_product})) {
- $data{$date}->{$clean_product} = 0;
- }
- $data{$date}->{$clean_product} += $count;
-# print "count for '$date' and '$clean_product' == $data{$date}->{$clean_product} [ added $count ]\n";
- } else {
-# print STDERR "malformed line '$_'\n";
- }
-}
-
-# now output this as a spreadsheet ... fods ...
-print << "EOF"
-<?xml version="1.0" encoding="UTF-8"?>
-<office:document xmlns:office="urn:oasis:names:tc:opendocument:xmlns:office:1.0"
- xmlns:style="urn:oasis:names:tc:opendocument:xmlns:style:1.0"
- xmlns:text="urn:oasis:names:tc:opendocument:xmlns:text:1.0"
- xmlns:table="urn:oasis:names:tc:opendocument:xmlns:table:1.0"
- xmlns:draw="urn:oasis:names:tc:opendocument:xmlns:drawing:1.0"
- xmlns:fo="urn:oasis:names:tc:opendocument:xmlns:xsl-fo-compatible:1.0"
- xmlns:xlink="http://www.w3.org/1999/xlink"
- xmlns:dc="http://purl.org/dc/elements/1.1/"
- xmlns:meta="urn:oasis:names:tc:opendocument:xmlns:meta:1.0"
- xmlns:number="urn:oasis:names:tc:opendocument:xmlns:datastyle:1.0"
- xmlns:presentation="urn:oasis:names:tc:opendocument:xmlns:presentation:1.0"
- xmlns:svg="urn:oasis:names:tc:opendocument:xmlns:svg-compatible:1.0"
- xmlns:chart="urn:oasis:names:tc:opendocument:xmlns:chart:1.0"
- xmlns:dr3d="urn:oasis:names:tc:opendocument:xmlns:dr3d:1.0"
- xmlns:math="http://www.w3.org/1998/Math/MathML"
- xmlns:form="urn:oasis:names:tc:opendocument:xmlns:form:1.0"
- xmlns:script="urn:oasis:names:tc:opendocument:xmlns:script:1.0"
- xmlns:config="urn:oasis:names:tc:opendocument:xmlns:config:1.0"
- xmlns:ooo="http://openoffice.org/2004/office"
- xmlns:ooow="http://openoffice.org/2004/writer"
- xmlns:oooc="http://openoffice.org/2004/calc"
- xmlns:dom="http://www.w3.org/2001/xml-events"
- xmlns:xforms="http://www.w3.org/2002/xforms"
- xmlns:xsd="http://www.w3.org/2001/XMLSchema"
- xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
- xmlns:rpt="http://openoffice.org/2005/report"
- xmlns:of="urn:oasis:names:tc:opendocument:xmlns:of:1.2"
- xmlns:xhtml="http://www.w3.org/1999/xhtml"
- xmlns:grddl="http://www.w3.org/2003/g/data-view#"
- xmlns:tableooo="http://openoffice.org/2009/table"
- xmlns:field="urn:openoffice:names:experimental:ooo-ms-interop:xmlns:field:1.0"
- xmlns:formx="urn:openoffice:names:experimental:ooxml-odf-interop:xmlns:form:1.0"
- xmlns:css3t="http://www.w3.org/TR/css3-text/"
- office:version="1.2"
- grddl:transformation="http://docs.oasis-open.org/office/1.2/xslt/odf2rdf.xsl"
- office:mimetype="application/vnd.oasis.opendocument.spreadsheet">
- <office:styles>
- <number:date-style style:name="isodatenum">
- <number:year number:style="long"/>
- <number:text>-</number:text>
- <number:month number:style="long"/>
- <number:text>-</number:text>
- <number:day number:style="long"/>
- </number:date-style>
- <style:style style:name="boldheader" style:family="table-cell" style:parent-style-name="Default">
- <style:text-properties fo:font-style="italic" fo:font-weight="bold"/>
- </style:style>
- <style:style style:name="isodate" style:family="table-cell" style:parent-style-name="Default" style:data-style-name="isodatenum"/>
- </office:styles>
- <office:body>
- <office:spreadsheet>
- <table:table table:name="RawData">
- <table:table-row>
- <table:table-cell table:style-name="boldheader" office:value-type="string">
- <text:p>Date</text:p>
- </table:table-cell>
-EOF
-;
-@prods = sort keys %products;
-for my $product (@prods) {
-print << "EOF"
- <table:table-cell table:style-name="boldheader" office:value-type="string">
- <text:p>$product</text:p>
- </table:table-cell>
-EOF
- ;
-}
-print << "EOF"
- <table:table-cell table:style-name="boldheader" office:value-type="string">
- <text:p>Total</text:p>
- </table:table-cell>
- </table:table-row>
-EOF
-;
-
-my $row = 1;
-
-# FIXME: ODF is -incredibly- lame in this regard ... we badly want R1C1 style referencing here [!]
-my $colcount = @prods;
-die ("odff needs R1C1") if ($colcount > 25);
-my $colname = chr (ord('A') + $colcount);
-print STDERR "cols: $colcount - colname $colname @prods\n";
-
-for my $date (sort keys %data) {
-print << "EOF"
- <table:table-row>
- <table:table-cell table:style-name="isodate" office:value-type="date" office:date-value="$date"/>
-EOF
-;
- for my $product (@prods) {
- my $count = $data{$date}->{$product};
- $count = 0 if (!defined $count);
-print << "EOF"
- <table:table-cell office:value-type="float" office:value="$count"/>
-EOF
-;
- }
- $row++;
-print << "EOF"
- <table:table-cell table:formula="of:=SUM([.B$row:.$colname$row])" office:value-type="float"/>
- </table:table-row>
-EOF
-;
-}
-
-print << "EOF"
- </table:table>
- </office:spreadsheet>
- </office:body>
-</office:document>
-EOF
-;
diff --git a/bin/strace-rewrite b/bin/strace-rewrite
deleted file mode 100755
index ca046a425..000000000
--- a/bin/strace-rewrite
+++ /dev/null
@@ -1,24 +0,0 @@
-#!/usr/bin/env perl
-
-# A trivial script to re-write strace -ttt output
-# to be more readable.
-
-my $first_time = '';
-my $last_time = '';
-
-while (<>) {
- if (m/(\d+)\s+([\d\.]+)\s+(.*)$/) {
- my ($pid, $time, $data) = ($1, $2, $3);
- if ($first_time eq '') {
- $first_time = $time;
- $last_time = $time;
- }
- my $new_elapsed = sprintf ("%06f", $time - $last_time);
- my $new_absolute = sprintf ("%02.5f", $time - $first_time);
-# print "Last '$last_time' Time '$time', New '$newtime'\n";
- print "$new_elapsed $new_absolute $data [$pid]\n";
- $last_time = $time;
- } else {
- print "Odd line '$_'\n";
- }
-}
diff --git a/bin/strip-guards b/bin/strip-guards
deleted file mode 100755
index 8f5825d2e..000000000
--- a/bin/strip-guards
+++ /dev/null
@@ -1,210 +0,0 @@
-#!/usr/bin/perl -w
-eval 'exec /usr/bin/perl -S $0 ${1+"$@"}'
- if 0; #$running_under_some_shell
-
-sub usage() {
- print "strip-guards /path/to/sources
-
-Changes
-
-#ifdef _BLAH_
-#include <blah.hxx>
-#endif
-
-to simple
-
-#include <blah.hxx>
-
-in .c and .cxx files if the _BLAH_ guard exists in the .hxx file.\n";
- exit 1;
-}
-
-use File::Find();
-
-%includes = ();
-
-sub guard_name($)
-{
- my ( $include_fname ) = @_;
-
- if ( !open( $IN, $include_fname ) ) {
- die "Couldn't open $include_fname";
- }
- while ( my $line = <$IN> ) {
- if ( $line =~ /^#\s*ifndef\s+([^\s\/]*)/ ||
- $line =~ /^#if\s+!defined\s+([^\s\/]*)/ ) {
- my $guard = $1;
- if ( defined ( $line = <$IN> ) ) {
- if ( ( $line =~ /^#\s*define\s+([^\s\/]*)/ ) && ( $guard eq $1 ) )
- {
- close $IN;
- return $guard;
- }
- else {
- last;
- }
- }
- }
- }
- close $IN;
-
- return "";
-}
-
-sub guards_include($$)
-{
- my ( $guard, $include ) = @_;
-
- if ( $include =~ /\.hpp$/ ) {
- my $inc = "_" . $include . "_";
- $inc =~ tr(./a-z)(__A-Z/);
- if ( $inc eq $guard ) {
- return 1;
- }
- }
-
- my $should_be = $includes{$guard};
- if ( defined( $should_be ) ) {
- my $inc = $include;
- $inc =~ s/.*[\/\\]//;
- if ( $inc eq $should_be ) {
- return 1;
- }
- }
-
- return 0;
-}
-
-sub strip_file($$)
-{
- my ( $in, $out ) = @_;
- if ( !open( $IN, $in ) ) {
- die "Couldn't open $in";
- }
- if ( !open( $OUT, '>', $out ) ) {
- die "Couldn't open $out";
- }
-
- my $first_include = 0;
- while ( my $line = <$IN> ) {
- my $buffer = $line;
- my $ok = 1;
-
- if ( $line =~ /^#\s*include/ ) {
- $first_include = 1;
- }
- if ( $first_include && ( $line =~ /^$/ ) ) {
- if ( defined( $line = <$IN> ) ) {
- $buffer .= $line;
- }
- else {
- $ok = 0;
- }
- }
- if ( $ok && ( $line =~ /^#\s*ifndef\s+([^\s\/]*)/ ) ) {
- if ( defined( $line = <$IN> ) ) {
- $buffer .= $line;
- }
- else {
- $ok = 0;
- }
-
- my $guard = $1;
- if ( $ok && ( $line =~ /^#\s*include\s+["<]([^">]*)/ ) ) {
- $first_include = 1;
- my $include_line = $line;
- my $include = $1;
-
- if ( defined( $line = <$IN> ) ) {
- $buffer .= $line;
- }
- else {
- $ok = 0;
- }
-
- if ( $ok && ( $line =~ /^#\s*endif/ ) && guards_include( $guard, $include ) ) {
- print $OUT $include_line;
- }
- else {
- $ok = 0;
- }
- }
- else {
- $ok = 0;
- }
- }
- else {
- $ok = 0;
- }
-
- if ( !$ok ) {
- print $OUT $buffer;
- }
- }
-
- close $OUT;
- close $IN;
-}
-
-sub find_guards
-{
- if ( /^.*\.h\z/s || /^.*\.hxx\z/s ) {
- my $guard = guard_name( $_ );
- if ( $guard ne "" ) {
- my $inc = $_;
- $inc =~ s/.*\///;
- if ( defined( $includes{$guard} ) && ( $includes{$guard} ne $inc ) ) {
- print STDERR "Warning: Redefinition of guard '$guard' in '$_' (first definition in '$includes{$guard}'\n";
- }
- $includes{$guard} = $inc;
- print STDERR "guard: $guard ($inc)\n";
- }
- }
-}
-
-sub strip_guards
-{
- if ( /^.*\.[ch]\z/s || /^.*\.[ch]xx\z/s ) {
- if ( rename( $_, "$_.strip-guards-orig" ) ) {
- print STDERR "stripping $_\n";
- strip_file( "$_.strip-guards-orig", $_ );
- }
- }
-}
-
-#
-# main()
-#
-my $path = shift( @ARGV );
-if ( !defined( $path ) ) {
- usage();
-}
-
-File::Find::find( {wanted => \&find_guards}, $path );
-
-# add a few common typos
-$includes{"_GEN_HXX"} = "gen.hxx";
-$includes{"_TOOLS_GEN_HXX"} = "gen.hxx";
-$includes{"_SV_H"} = "sv.h";
-$includes{"_VCL_KEYCOD_HXX"} = "keycod.hxx";
-$includes{"_VCL_FONT_HXX"} = "font.hxx";
-$includes{"_VCL_FONT_HXX"} = "font.hxx";
-$includes{"_SV_SALCTYPE_H"} = "salctype.hxx";
-$includes{"_TIME_HXX"} = "time.hxx";
-$includes{"_TIMER_HXX"} = "timer.hxx";
-$includes{"_TOOLS_STRING_HXX"} = "string.hxx";
-$includes{"_GFXLINK_HXX"} = "gfxlink.hxx";
-$includes{"_COLOR_HXX"} = "color.hxx";
-$includes{"_DEBUG_HXX"} = "debug.hxx";
-$includes{"_SVX_ACCESSIBILITY_ACCESSIBLE_COMPONENT_BASE_HXX"} = "AccessibleComponentBase.hxx";
-$includes{"_SD_ACCESSIBILITY_ACCESSIBLE_DOCUMENT_VIEW_BASE_HXX"} = "AccessibleDocumentViewBase.hxx";
-$includes{"_SVX_ACCESSIBILITY_ACCESSIBLE_IVIEW_FORWARDER_HXX"} = "IAccessibleViewForwarder.hxx";
-$includes{"_SVX_ACCESSIBILITY_ACCESSIBLE_IVIEW_FORWARDER_HXX"} = "IAccessibleViewForwarder.hxx";
-$includes{"_SFX_HRC"} = "sfx.hrc";
-$includes{"_SFXSIDS_HRC"} = "sfxsids.hrc";
-$includes{"_RTL_USTRING_H"} = "ustring.h";
-$includes{"_RTL_USTRING_"} = "ustring.hxx";
-$includes{"_RTL_OUSTRING_HXX_"} = "ustring.hxx";
-$includes{"_OSL_INTERLCK_H_"} = "interlck.h";
-
-File::Find::find( {wanted => \&strip_guards}, $path );
diff --git a/bin/svn-pack b/bin/svn-pack
deleted file mode 100755
index 59693889d..000000000
--- a/bin/svn-pack
+++ /dev/null
@@ -1,251 +0,0 @@
-#!/usr/bin/env perl
-
-use File::Temp qw/ tempfile /;
-
-# if this is set - we pack all the svn pieces
-# otherwise we unpack them ...
-my $pack = 0;
-
-# if unpacking - do we copy or use hard links
-my $do_link = 0;
-
-sub syntax($)
-{
- if ($pack) {
- print <<"EOF"
-svn-pack: <svn checkout path> [dest-archive]
- --help - show this help
-EOF
-;
- } else {
- print <<"EOF"
-svn-unpack: [args] [<path> (default . )]
- --link - use hard links instead of copies
- --help - show this help
-EOF
-;
- }
- die();
-}
-
-sub slurp_dir($)
-{
- my $path = shift;
- my @paths = ();
- my $dirh;
-
- opendir ($dirh, $path) || die "Can't open $path: $!";
- while (my $line = readdir ($dirh)) {
- ($line =~ m/^\.$/ || $line =~ m/^\.\.$/) && next;
- push @paths, $line;
- }
- close ($dirh);
-
- return @paths;
-}
-
-sub dump_svn_directory ($$);
-sub dump_svn_directory ($$)
-{
- my $path = shift;
- my $toplevel = shift;
- my @paths = ();
-
- for my $line (slurp_dir ($path)) {
- if ($toplevel) {
- $line eq 'text-base' && next;
- $line eq 'tmp' && next; # re-create on unpack
- }
- my $subpath = "$path/$line";
- push @paths, $subpath;
- -d $subpath && push @paths, dump_svn_directory ("$path/$line", 0);
- }
-
- return @paths;
-}
-
-sub find_svn($);
-sub find_svn($)
-{
- my $path = shift;
- my @paths = ();
-
- for my $line (slurp_dir ($path)) {
- my $subpath = "$path/$line";
-
- if ($line eq '.svn') {
- if ($pack) {
- push @paths, dump_svn_directory ($subpath, 1);
- } else {
- push @paths, $path;
- }
-
- } elsif (-d $subpath) {
- push @paths, find_svn ($subpath);
- }
- }
-
- return @paths;
-}
-
-sub make_text_base($$)
-{
- my ($src, $dest) = @_;
-# print "$src -> $dest\n";
- for my $file (slurp_dir ($src)) {
- my $from = "$src/$file";
- -d $from && next;
- my $to = "$dest/$file.svn-base";
- -f $to && next;
-
-# print "do $from -> $to\n";
- if (!$do_link || !link ($from, $to)) {
- `cp -a $from $to`;
- }
- }
-}
-
-sub setup_entry()
-{
- my %entry;
- my @lines = ();
-
- $entry{__lines} = \@lines;
- $entry{kind} = '';
- $entry{name} = '';
- $entry{should_write} = 0;
-
- return \%entry;
-}
-
-sub find_emit_entry($$$)
-{
- my ($en_out, $path, $entry) = @_;
- my $kind = $entry->{kind};
- my $name = $entry->{name};
- my $found = 0;
-
- if ($kind =~ m/^file$/i) {
- $found = -f "$path/$name";
- } elsif ($kind =~ m/^dir$/i) {
- $found = -d "$path/$name";
- } else {
- print STDERR "Unknown kind $kind on $name\n";
- }
- if ($found) {
- for my $line (@{$entry->{__lines}}) {
- print $en_out $line;
- }
- } else {
- print STDERR "entry '$name' removed\n";
- }
-
- return $found;
-}
-
-sub rewrite_entries($)
-{
- my $path = shift;
- my $entries = "$path/.svn/entries";
- my $entries_new = "$path/.svn/entries.new";
- my $en_in;
- my $en_out;
-
-# not a full XML parser - relies on the 'friendly' layout of .svn/entries
-
- open ($en_in, "$entries") || die "Can't open $entries: $!\n";
- open ($en_out, ">$entries_new") || die "Can't open $entries_new: $!\n";
- my $entry;
- my $rewritten;
- while (<$en_in>) {
- my $line = $_;
- if (! defined $entry) {
- if ($line =~ /<entry/) {
- $entry = setup_entry();
- push @{$entry->{__lines}}, $line;
- } else {
- print $en_out $_;
- }
- } else { # inside entry ...
- push @{$entry->{__lines}}, $line;
- if ($line =~ m/^\s*([\S-]+)=\"([^\"]+)\"/) {
- $entry->{$1} = $2;
-# print "Set '$1' to '$2'\n";
- }
- if ($line =~ m/\/>/) {
- $rewritten = 1 if (!find_emit_entry ($en_out, $path, $entry));
- $entry = undef;
- }
- }
- }
- close ($en_in) || die "Failed to close $en_in: $!";
- close ($en_out) || die "Failed to close $en_out: $!";
-
- rename ($entries_new, $entries) if ($rewritten);
- unlink ($entries_new) if (!$rewritten);
-}
-
-my $src;
-my $dest;
-
-my $app_name = $0;
-$app_name =~ s/^.*\/([^\/+])/$1/;
-$pack = $app_name eq 'svn-pack';
-
-print "packing mode\n" if ($pack);
-print "un-packing mode\n" if (!$pack);
-
-for my $arg (@ARGV) {
- if ($arg eq '--help' || $arg eq '-h') {
- syntax (0);
- } elsif ($arg eq '--link') {
- $do_link = 1;
- } elsif ($arg eq '--force-pack') {
- $pack = 1;
- } elsif ($arg eq '--force-unpack') {
- $pack = 0;
- } elsif (!defined $src) {
- $src = $arg;
- $src =~ s/\/$//;
- } elsif (!defined $dest) {
- $dest = $arg;
- } else {
- print "Too many arguments $arg\n";
- syntax (1);
- }
-}
-
-$src = '.' if (!defined $src && !$pack);
-
-syntax(1) if (!defined $src);
-
-my @files = find_svn ($src);
-
-if ($pack) {
- if (!defined $dest) {
- $dest = $src;
- $dest =~ s/^.*\/([^\/*])/$1/;
- $dest .= "-svn.tar.gz";
- }
- print "Pack $src to $dest\n";
- my ($fh, $filename) = tempfile( "svnpackXXXXXX" );
- for my $file (@files) {
- print $fh "$file\n";
- }
- close ($fh);
-
- print "Written file list to $filename\n";
- `tar czf $dest -T $filename`;
-
- unlink $filename;
- print "done.\n";
-
-} else {
- for my $from (@files) {
- mkdir ("$from/.svn/tmp"); # without which svn barfs
- mkdir ("$from/.svn/text-base");
- make_text_base ($from, "$from/.svn/text-base");
- rewrite_entries ($from);
- }
- `svn cleanup $src`
-}
diff --git a/bin/tag-latest-master b/bin/tag-latest-master
deleted file mode 100755
index 1539f378a..000000000
--- a/bin/tag-latest-master
+++ /dev/null
@@ -1,52 +0,0 @@
-#!/usr/bin/env perl
-
-# Generate a flat text file of tags from EIS/cvs
-
-my $DEFAULT_MASTER='SRC680';
-
-# Change this to the current RC branch
-my $RELEASE_MASTER='OOG680';
-
-print "# List of the last thre master workspaces\n";
-print "# <name> : <master-tag> : <master-tag> : <modules>\n";
-
-my $CvsRoot;
-
-my @latestmasters = ();
-`mkdir -p /tmp/cvstmp`;
-`cd /tmp/cvstmp && cvs -d :pserver:anoncvs\@anoncvs.services.openoffice.org:/cvs co -A solenv/inc/minor.mk`;
-open ($CvsRoot, 'cd /tmp/cvstmp && cvs -d :pserver:anoncvs@anoncvs.services.openoffice.org:/cvs status -v solenv/inc/minor.mk | awk \'/^\t'.$DEFAULT_MASTER.'/{print $1}\' | head -n 3 |') || return 0;
-while (<$CvsRoot>) {
- my $elem = $_;
- chomp $elem;
- push @latestmasters, $elem if ($elem ne '');
-}
-
-# Add release branch milestones
-open ($CvsRoot, 'cd /tmp/cvstmp && cvs -d :pserver:anoncvs@anoncvs.services.openoffice.org:/cvs status -v solenv/inc/minor.mk | awk \'/^\t'.$RELEASE_MASTER.'/{print $1}\' | head -n 2 |') || return 0;
-while (<$CvsRoot>) {
- my $elem = $_;
- chomp $elem;
- push @latestmasters, $elem if ($elem ne '');
-}
-
-
-my @modules = ();
-my $reading_modules = 0;
-open ($CvsRoot, "cvs -d :pserver:anoncvs\@anoncvs.services.openoffice.org:/cvs co -c 2>&1 |") || return 0;
-while (<$CvsRoot>) {
- $reading_modules = 0 if (/^\S/);
- $reading_modules = '1' if (/^OpenOffice2\s+\-a\s+/);
- if ($reading_modules) {
- chomp;
- s/^.*\-a//;
- for my $elem (split (/ +/, $_)) {
- push @modules, $elem if ($elem ne '');
- }
- }
-}
-close ($CvsRoot) || return 0;
-
-for my $mastermod (@latestmasters) {
- print "$mastermod : $mastermod : $mastermod : @modules\n";
-}
diff --git a/bin/tag-list b/bin/tag-list
deleted file mode 100755
index 9db4b882c..000000000
--- a/bin/tag-list
+++ /dev/null
@@ -1,93 +0,0 @@
-#!/usr/bin/env perl
-
-# Generate a flat text file of tags from EIS
-
-my $DEFAULT_MASTER='SRC680';
-
-use Cws;
-use Eis;
-
-sub print_tags($$)
-{
- my $ft = shift;
- my $childws = shift;
-
- if ($ft ne 'STDOUT') {
- open(MYOUT, ">>$ft");
- } else {
- open MYOUT, ">&STDOUT" or die "Can't dup STDOUT: $!";
- }
-
- my $master = $DEFAULT_MASTER;
-
- my $eis = Cws::eis();
- $id = $eis->getChildWorkspaceId
- ( Eis::to_string($master),
- Eis::to_string($childws) );
- if (!$id) {
- print "# Invalid cws '$childws' / no EIS Id\n";
- return 1;
- }
-
-# skip non-public cws
- return 0 unless ($eis->isPublic($id));
-
- my @modules = @{$eis->getModules($id)};
- my $milestone = $eis->getMilestone($id);
-
- my $creation_master = $eis->getCreationMasterWorkspace($id);
- my $cws_branch_tag = 'cws_' . lc($creation_master) . '_' . lc($childws);
- my $master_milestone_tag = uc($master) . "_" . $milestone;
-
- print(MYOUT "$childws : $master_milestone_tag : $cws_branch_tag : @modules\n");
-
- return 0;
-}
-
-my $eis = Eis->new( uri => Cws::eis_uri(),
- proxy_list => Cws::eis_proxy_list(),
- net_proxy => Cws::net_proxy()
- );
-
-# State can be 'integrated', 'ready for QA', 'planned', 'new' (etc.?)
-sub get_cws_with_state($)
-{
- my $cws_list;
- my $state = shift;
-
- my $eis = Cws::eis();
-
- $cws_list = $eis->getCWSWithState( Eis::to_string( $DEFAULT_MASTER ),
- Eis::to_string( $state ) );
- if (!defined $cws_list) {
- return undef;
- }
-
- my @list = @{$cws_list};
-# print STDERR "CWS list [$DEFAULT_MASTER] : @list\n";
-
- return @list;
-}
-
-print "# List of cws' and modules to test/build\n";
-print "# <name> : <master-tag> : <cws-tag> : <modules>\n";
-
-my @ready_for_qa = get_cws_with_state( 'ready for QA' );
-if (!@ready_for_qa) {
- exit (1);
-}
-
-my @new_cws = get_cws_with_state( 'new' );
-if (!@new_cws) {
- exit (1);
-}
-
-for $cws (@ready_for_qa) {
- print_tags ('STDOUT', $cws) && exit (1);
-}
-
-for $cws (@new_cws) {
- print_tags ('/var/www/tinderbox/tags/tag-list-new.tmp', $cws) && exit (1);
-}
-
-exit (0);
diff --git a/bin/test-ooo b/bin/test-ooo
deleted file mode 100755
index 26baf8d78..000000000
--- a/bin/test-ooo
+++ /dev/null
@@ -1,151 +0,0 @@
-#!/bin/sh
-
-# This script helps testig OOo installation using the ooqatesttool,
-# http://qa.openoffice.org/qatesttool/index.html
-
-# IMPORTANT: This is an initial version. I plan to integrate it better with
-# ooo-build, see ooo-build/doc/test-ooo.txt for more details
-
-if test `uname -o` = "Cygwin" ; then
- export killall="/usr/bin/PsTools/pskill.exe"
- export testToolBin="/cygdrive/c/Program Files/OpenOffice.org 3/Basis/program/testtool.exe"
- # where the qatesttool is stored (testcases)
- export testToolRoot="c:/cygwin/home/Mick/OOo/qa/qatesttool/"
-elif test `uname -i` = "i386" ; then
- export killall="killall -9"
- export testToolBin="/usr/lib/ooo3/basis3.1/program/testtool.bin"
- export testToolRoot="/home/mick/OOo/qa/qatesttool"
-else
- export killall="killall -9"
- export testToolBin="/opt/ooo-dev/basis3.1/program/testtool.bin"
- export testToolRoot="/home/mick/OOo/qa/qatesttool"
-fi
-
-# all tests will be skipped until this script name is found
-# define empty string to do not skip any test
-SKIP_TO_TEST=
-#SKIP_TO_TEST=writer/loadsave/w_imp_bin.bas
-
-# helper scripts
-export testToolExitOfficeBas="$testToolRoot/global/tools/closeoffice.bas"
-export testToolResetOfficeBas="$testToolRoot/global/tools/resetoffice.bas"
-
-usage()
-{
- echo "This script start the qatesttool scripts from the given list"
- echo
- echo "Usage: ${0##*/} test.list [test.blacklist]"
- echo
- echo " test.list - list of test scripts to start"
- echo " test.blacklist - list of test scripts to skip"
- echo
- echo " Both files include list of paths to the test scripts, one path"
- echo " per line. They can also include comments prefixed by #"
- echo
- echo "Example of the file.list:"
- echo
- echo " # Tests for OOo-2.0.3"
- echo " framework/first/first.bas"
- echo " framework/first/topten.bas"
- echo " framework/level1/f_lvl1_loadsave.bas"
-}
-
-if test -z "$1" -o "$1" = "--help" -o $# -gt 2 ; then
- usage && exit 1;
-fi
-
-if ! which dos2unix >/dev/null 2>&1 ; then
- echo "Error: dos2unix utility is not installed"
- exit 1;
-fi
-
-# list of test to be started
-testList=
-if test ! -f "$1" ; then
- echo "Error: cannot read \"$1\", try --help" && exit 1;
-else
- testList=`cat "$1" | dos2unix | sed "s|\#.*$||"`
-fi
-
-# list of tests to be skipped
-testBlackList=
-if test -n "$2" ; then
- if test ! -f "$2" ; then
- echo "Error: cannot read \"$2\", try --help" && exit 1;
- else
- testBlackList=`cat "$2" | dos2unix | sed "s|\#.*$||"`
- fi
-fi
-
-echo "Switching to en_US.UTF-8 locales!!!"
-export LC_ALL=en_US.UTF-8
-
-echo "Exporting OOO_FORCE_SYSALLOC=1"
-export OOO_FORCE_SYSALLOC=1
-echo "Exporting MALLOC_CHECK_=2"
-export MALLOC_CHECK_=2
-
-is_blacklisted()
-{
- for t in $testBlackList ; do
- test "$1" = "$t" && return 0
- done
- return 1
-}
-
-run_test()
-{
- test="$1"
-
- "$testToolBin" -run "$test"
- sleep 5
- $killall testool.exe
- $killall testool.bin
- $killall soffice.exe
- $killall soffice.bin
- sleep 2
- "$testToolBin" -run "$testToolResetOfficeBas"
- sleep 5
- $killall testool.exe
- $killall testool.bin
- $killall soffice.exe
- $killall soffice.bin
- sleep 2
- rm ~/OOo/qa/logs/resetoffice.res
- "$testToolBin" -run "$testToolExitOfficeBas"
- sleep 5
- $killall testool.exe
- $killall testool.bin
- $killall soffice.exe
- $killall soffice.bin
- sleep 2
- rm ~/OOo/qa/logs/closeoffice.res
-}
-
-# will we skip any test?
-test -n "$SKIP_TO_TEST" && skip_tests=true || skip_tests=false
-
-# kill any runnign testool and OOo
-$killall testool.bin
-$killall testool.exe
-$killall soffice.bin
-$killall soffice.exe
-
-# reset OOo configuration
-run_test "$testToolResetOfficeBas"
-
-for test in $testList ; do
- if is_blacklisted $test ; then
- echo "Skipping blacklisted test $test..."
- continue;
- fi
-
- test "$test" = "$SKIP_TO_TEST" && skip_tests=false
-
- if $skip_tests ; then
- echo "Skipping test $test..."
- else
- echo "Starting test $test..."
- run_test "$testToolRoot/$test"
- fi
-done
diff --git a/bin/test-ooo-analyze b/bin/test-ooo-analyze
deleted file mode 100755
index bd1cf3b05..000000000
--- a/bin/test-ooo-analyze
+++ /dev/null
@@ -1,1195 +0,0 @@
-#!/usr/bin/perl
- eval 'exec /usr/bin/perl -S $0 ${1+"$@"}'
- if $running_under_some_shell;
-#!/usr/bin/perl
-
-use strict;
-
-# information about tests read from .res files
-# a hashs, key is the test name (res file name), value is:
-# a hash, the keys define a "perl like structure":
-# 'RunCount' ... integer, defines how many times the test was started
-# 'TestCase' ... a hash, key is the test case name, value is:
-# a hash: keys define a "perl like structure":
-# 'RunCount' ... integer
-# 'EntryType' ... a hash, keys define a "perl like structure":
-# 'Error' ... a hash, keys define a "perl like structure":
-# 'Warning' ... -------- // -------
-# 'Message' ... -------- // -------
-# 'Assertion' ... -------- // -------
-# 'QAError' ... -------- // -------
-# 'AssertStack' ... -------- // -------
-# 'CallStack' ... -------- // -------
-# 'Count' ... integer
-# 'EntryByID' ... a hash, the key is the entryId, value is
-# array of entry messages
-my %resData1 = ();
-my %resData2 = ();
-
-# User selected sorting order is configured via the following two global
-#variables:
-my $sortStatNumDown = 1; # numeric sorting order
-my @sortStatColumnsRequest = (); # might be used to redefine default sorting to do it by another columns
-
-# this global pointer is used to pass the selected sortStatColumns table to
-# the function SortStat
-my $sortStatColumnsp = 0;
-
-# list of all entries
-# it is used to translate integer to string
-my @EntryTypeName =
-(
- "TestScript",
- "TestCase",
- "Error",
- "CallStack",
- "Message",
- "Warning",
- "Assertion",
- "QAError",
- "AssertStack"
-);
-
-# list of entries that can be part of a test case
-my @TestCaseEntries =
-(
- "Error",
- "CallStack",
- "Message",
- "Warning",
- "Assertion",
- "QAError",
- "AssertStack"
-);
-
-# list of entries that are part of a test case and are important for
-# the Total counts statistic
-my @SummaryTestCaseEntries =
-(
- "Error",
- "Warning",
- "QAError"
-);
-
-
-# FIXME, could this be automatized?
-my %knownIDs = (
- '20000' => 'could not be executed',
- '20008' => 'server timeout while waiting for answer',
- '20009' => 'application has been restarted',
- '20010' => 'cannot be started',
- '20014' => 'XXX errors occured',
- '20016' => 'XXX warnings occured',
- '20023' => 'XXX warnings occured during initialization',
- '20018' => 'Slot/Control unknown',
- '22009' => 'Pop-up menu not open',
- '22014' => 'unknown method',
- '22015' => 'Invalid Parameters',
- '22018' => 'could not be found',
- '22019' => 'is not visible',
- '22020' => 'could not be accessed',
- '22021' => 'entry member XX is too large at Select. Max allowd is YY',
- '22022' => 'entry member XX is too small at Select. Min allowd is YY',
- '22030' => 'tab page not found at SetPage',
- '22035' => 'entry at select not found',
- '22038' => 'The button is disabled in ToolBox at Click',
- '22041' => 'TearOff failed in ToolBot at TeadOff',
- '22048' => 'There is no Cancel button at Cancel',
- '22049' => 'There is no Yes button at Yes',
- '22050' => 'There is no No button at No',
- '22063' => 'could not be run: Disabled'
- );
-
-###################################################################
-# subroutines that are used to load the data from .res files to
-# an internal structure
-
-sub AnalyzeResFile($$) {
-
- my ($resDatap, $resFile) = @_;
-
- open (RES_FILE, $resFile) || die "can't open $resFile for reading: $!\n";
-
- my $resFileName = $resFile;
- $resFileName =~ s/.*\///;
- $resFileName =~ s/.res$//;
-
-# printf "Analyzing $resFile ...\n";
-
- unless ($resDatap->{$resFileName}) {
- $resDatap->{$resFileName} = {};
- $resDatap->{$resFileName}{'RunCount'} = 0;
- $resDatap->{$resFileName}{'TestCase'} = {};
- }
-
- my $knownFileFormat = 0;
- my $testCase = 'unknown';
-
- while (my $line = <RES_FILE>) {
- chomp $line;
-
- if ($line =~ /^File Format Version: 3/) {
- $knownFileFormat=1;
- next;
- }
-
- if ($line =~ /^([0-9]);([^;]*);([^;]*);([^;]*);([^;]*);\s*\"([^\"]*)\"/) {
- my $type = "$1";
- my $message = "$6";
-
- if ($type == 0) {
- # test runned
- ++$resDatap->{$resFileName}{'RunCount'};
- $testCase = 'unknown';
- } elsif ($type == 1) {
- # test case runned
- if ("$message" eq '%ResId=20002%') {
- $testCase = "Reading_the_files";
- } elsif ("$message" eq '%ResId=21001%') {
- $testCase = "Outside_of_testcase";
- } elsif ("$message" eq '') {
- # ugly entry, we want to ingnore it at all
- next;
- } else {
- $testCase = $message;
- $testCase =~ s/^([^\(]+)\(.*$/$1/; # bin parameters
- }
-
- unless ($resDatap->{$resFileName}{'TestCase'}{$testCase}) {
- $resDatap->{$resFileName}{'TestCase'}{$testCase} = {};
-
- $resDatap->{$resFileName}{'TestCase'}{$testCase}{'RunCount'} = 0;
- $resDatap->{$resFileName}{'TestCase'}{$testCase}{'EntryType'} = {};
- for my $entryTypeName ( @TestCaseEntries ) {
- $resDatap->{$resFileName}{'TestCase'}{$testCase}{'EntryType'}{$entryTypeName} = {};
- $resDatap->{$resFileName}{'TestCase'}{$testCase}{'EntryType'}{$entryTypeName}{'Count'} = 0;
- $resDatap->{$resFileName}{'TestCase'}{$testCase}{'EntryType'}{$entryTypeName}{'EntryByID'} = {};
- }
- }
-
- ++$resDatap->{$resFileName}{'TestCase'}{$testCase}{'RunCount'};
-# print "TestCase: $testCase\n";
- } elsif ( $type > 1 && $type <= 8 ) {
- # a message a part of a test case
- my $entryTypeName = $EntryTypeName[$type];
-
- # quess problem type
- my $entryID = "unknown";
-# print "$message\n";
- if ($message =~ /\%ResId=([0-9]*)/) {
- $entryID = "$1";
- }
-
- # skip some special entries
- if ($type == 5) {
- # skip warnings about number of warnings, erros, etc.
- if ( $entryID == 20014 ||
- $entryID == 20016 ||
- $entryID == 20023 ) {
- next;
- }
- }
-
- unless ($resDatap->{$resFileName}{'TestCase'}{$testCase}{'EntryType'}{$entryTypeName}{'EntryByID'}{$entryID}) {
- $resDatap->{$resFileName}{'TestCase'}{$testCase}{'EntryType'}{$entryTypeName}{'EntryByID'}{$entryID} = [];
- }
- push @{ $resDatap->{$resFileName}{'TestCase'}{$testCase}{'EntryType'}{$entryTypeName}{'EntryByID'}{$entryID} }, $message;
-
- ++$resDatap->{$resFileName}{'TestCase'}{$testCase}{'EntryType'}{$entryTypeName}{'Count'};
- } else {
- die "Error: $resFile:$.: unknown entry\n";
- }
- } else {
- die "Error: $resFile:$.: broken file format\n";
- }
- }
-
- close (RES_FILE);
-
- ($knownFileFormat) || die "Error: $resFile: Unknown file format\n";
-}
-
-sub AnalyzeDir($$) {
- my ($resDatap, $resDir) = @_;
- my $resDirh;
-
- opendir($resDirh, $resDir) || die "Can't open $resDir: $!";
- while (my $resFile = readdir ($resDirh)) {
- $resFile =~ /^\./ && next; # hidden
- $resFile =~ /\.res$/ || next; # non-res
-
-# print "Analyzing $resDir/$resFile ...\n";
- AnalyzeResFile($resDatap, "$resDir/$resFile");
- }
- closedir($resDirh);
-}
-
-########################################################################
-# The following subroutines counts/extracts a specific information
-# that is necessary in more types of statistics
-
-sub CountTestScripts($)
-{
- my ($resDatap) = @_;
- my $count = 0;
-
- for my $testScript (keys %{$resDatap}) {
- $count += $resDatap->{$testScript}{'RunCount'};
- }
-
- return $count;
-}
-
-sub CountTestCases($)
-{
- my ($resDatap, $resFile) = @_;
- my $count = 0;
-
- for my $testScript (keys %{$resDatap}) {
- for my $testCase (keys % {$resDatap->{$testScript}{'TestCase'}} ) {
- $count += $resDatap->{$testScript}{'TestCase'}{$testCase}{'RunCount'};
- }
- }
-
- return $count;
-}
-
-sub CountEntriesForEntry($$) {
- my ($resDatap, $entryTypeName) = @_;
-
- unless (grep /^$entryTypeName$/i, @TestCaseEntries) {
- die "Internal error: Function StatEntryIDsForEntry called with wrong parameter.\n" .
- "Should use of: @TestCaseEntries\n";
- }
-
- my $count = 0;
-
- for my $testScript (keys %{$resDatap}) {
- for my $testCase (keys % {$resDatap->{$testScript}{'TestCase'}} ) {
- $count += $resDatap->{$testScript}{'TestCase'}{$testCase}{'EntryType'}{$entryTypeName}{'Count'};
- }
- }
-
- return $count;
-}
-
-sub CountEntriesForEntryAndEntryID($$$)
-{
- my ($resDatap, $entryTypeName, $entryID) = @_;
- my $count = 0;
-
- for my $testScript (keys %{$resDatap}) {
- for my $testCase (keys % {$resDatap->{$testScript}{'TestCase'}} ) {
- if ($resDatap->{$testScript}{'TestCase'}{$testCase}{'EntryType'}{$entryTypeName}{'EntryByID'}{$entryID}) {
- $count += scalar @ {$resDatap->{$testScript}{'TestCase'}{$testCase}{'EntryType'}{$entryTypeName}{'EntryByID'}{$entryID}};
- }
- }
- }
-
- return $count;
-}
-
-sub CountEntriesForTestScriptAndEntry($$$)
-{
- my ($resDatap, $testScript, $entryTypeName) = @_;
-
- my $count = 0;
- if ($resDatap->{$testScript}) {
- for my $testCase (keys % {$resDatap->{$testScript}{'TestCase'}} ) {
- $count += $resDatap->{$testScript}{'TestCase'}{$testCase}{'EntryType'}{$entryTypeName}{'Count'};
- }
- }
- return $count;
-}
-
-sub CountEntriesForTestScriptEntryAndEntryID($$$$)
-{
- my ($resDatap, $testScript, $entryTypeName, $entryID) = @_;
-
- my $count = 0;
- if ($resDatap->{$testScript}) {
- for my $testCase (keys % {$resDatap->{$testScript}{'TestCase'}} ) {
- if ($resDatap->{$testScript}{'TestCase'}{$testCase}{'EntryType'}{$entryTypeName}{'EntryByID'}{$entryID}) {
- $count += scalar @ {$resDatap->{$testScript}{'TestCase'}{$testCase}{'EntryType'}{$entryTypeName}{'EntryByID'}{$entryID}};
- }
- }
- }
- return $count;
-}
-
-sub FindEntryIDsForEntry($$)
-{
- my ($resDatap, $entryTypeName) = @_;
-
- my %entryIDs = ();
- for my $testScript (keys %{$resDatap}) {
- for my $testCase (keys % {$resDatap->{$testScript}{'TestCase'}} ) {
- for my $entryID ( keys % {$resDatap->{$testScript}{'TestCase'}{$testCase}{'EntryType'}{$entryTypeName}{'EntryByID'}}) {
- $entryIDs{$entryID} = 1;
- }
- }
- }
- return keys %entryIDs;
-}
-
-########################################################################
-# The following subroutines fill the tables with particular statistics
-# about test results
-
-sub ListAll($$)
-{
- my ($resDatap, $datap) = @_;
-
- push @{$datap}, ["TestScript", CountTestScripts($resDatap)];
- push @{$datap}, ["TestCase", CountTestCases($resDatap)];
- foreach my $entry (@SummaryTestCaseEntries) {
- push @{$datap}, [$entry, CountEntriesForEntry($resDatap, $entry)];
- }
-}
-
-sub ListTestScripts($$)
-{
- my ($resDatap, $datap) = @_;
-
- for my $testScript (keys %{$resDatap}) {
- push @{$datap}, [$testScript];
- }
-}
-
-sub ListTestScriptsScriptsForEntry($$$)
-{
- my ($resDatap, $entryTypeName, $datap) = @_;
-
- for my $testScript (keys %{$resDatap}) {
- my $count = CountEntriesForTestScriptAndEntry($resDatap, $testScript, $entryTypeName);
- if ($count) {
- push @{$datap}, [$testScript, $count];
- }
- }
-}
-
-sub ListTestScriptsForEntryAndEntryID($$$$)
-{
- my ($resDatap, $entryTypeName, $entryID, $datap) = @_;
-
- for my $testScript (keys %{$resDatap}) {
- my $count = CountEntriesForTestScriptEntryAndEntryID($resDatap, $testScript, $entryTypeName, $entryID);
- if ($count) {
- push @{$datap}, [$testScript, $count];
- }
- }
-}
-
-sub ListTestCases($$)
-{
- my ($resDatap, $datap) = @_;
-
- for my $testScript (keys %{$resDatap}) {
- for my $testCase (keys % {$resDatap->{$testScript}{'TestCase'}} ) {
- push @{$datap}, [$testScript,$testCase];
- }
- }
-}
-
-sub ListTestCasesforEntry($$$)
-{
- my ($resDatap, $entryTypeName, $datap) = @_;
-
- for my $testScript (keys %{$resDatap}) {
- for my $testCase (keys % {$resDatap->{$testScript}{'TestCase'}} ) {
- for my $entryID (keys % {$resDatap->{$testScript}{'TestCase'}{$testCase}{'EntryType'}{$entryTypeName}{'EntryByID'}} ) {
- my $entryIDCount = scalar @ {$resDatap->{$testScript}{'TestCase'}{$testCase}{'EntryType'}{$entryTypeName}{'EntryByID'}{$entryID}};
- push @{$datap}, [$testScript, $testCase, $entryID, $entryIDCount];
- }
- }
- }
-}
-
-sub ListEntryIDsForEntry($$$)
-{
- my ($resDatap, $entryTypeName, $datap) = @_;
-
- for my $entryID (FindEntryIDsForEntry($resDatap, $entryTypeName)) {
- my $count = CountEntriesForEntryAndEntryID($resDatap, $entryTypeName, $entryID);
- my $idNote = $knownIDs{$entryID} ? $knownIDs{$entryID} : "";
- push @{$datap}, [$entryID, $count, $idNote];
- }
-}
-
-########################################################################
-# The following subroutines fill the tables with particular statistics
-# about comparsion (diff) of two test results
-#
-# All of them (except ListDiffAll) return only positive values. Then they
-# are called twice with switched resData structures. This way we can get
-# information also about removed test scripts/test cases/entries.
-
-sub ListDiffAll($$$)
-{
- my ($resDatap1, $resDatap2, $datap) = @_;
-
- push @{$datap}, ["TestScript", CountTestScripts($resDatap2) - CountTestScripts($resDatap1)];
- push @{$datap}, ["TestCase", CountTestCases($resDatap2) - CountTestCases($resDatap1)];
- foreach my $entry (@SummaryTestCaseEntries) {
- push @{$datap}, [$entry, CountEntriesForEntry($resDatap2, $entry) - CountEntriesForEntry($resDatap1, $entry)];
- }
-}
-
-sub ListNewTestScripts($$$$)
-{
- # $comment is used to modify the output in case we do the inverted search
- # for removed entries
- my ($resDatap1, $resDatap2, $comment, $datap) = @_;
-
- for my $testScript (keys %{$resDatap2}) {
- unless ($resDatap1->{$testScript}) {
- push @{$datap}, [$testScript, $comment];
- }
- }
-}
-
-sub ListNewTestScriptsWithEntry($$$$$$)
-{
- # $sign, $commentp are used to modify the output in case we do inverted
- # search for removed entries
- my ($resDatap1, $resDatap2, $entryTypeName, $sign, $commentp, $datap) = @_;
-
- for my $testScript (keys %{$resDatap2}) {
- my $count1 = CountEntriesForTestScriptAndEntry($resDatap1, $testScript, $entryTypeName);
- my $count2 = CountEntriesForTestScriptAndEntry($resDatap2, $testScript, $entryTypeName);
- my $countDiff = $count2 - $count1;
- if ($countDiff > 0) {
- # old entry status:
- # 0 = test script not found in old res files
- # 1 = number of entries increased
- my $oldTestStatus = 0;
- if ($resDatap1->{$testScript}) {
- $oldTestStatus = 1;
- }
-
- push @{$datap}, [$testScript, $sign * $countDiff, $commentp->[$oldTestStatus]];
- }
- }
-}
-
-sub ListNewTestScriptsForEntryAndEntryID($$$$$$$)
-{
- # $sign, $commentp are used to modify the output in case we do inverted
- # search for removed entries
- my ($resDatap1, $resDatap2, $entryTypeName, $entryID, $sign, $commentp, $datap) = @_;
-
- for my $testScript (keys %{$resDatap2}) {
- my $count1 = CountEntriesForTestScriptEntryAndEntryID($resDatap1, $testScript, $entryTypeName, $entryID);
- my $count2 = CountEntriesForTestScriptEntryAndEntryID($resDatap2, $testScript, $entryTypeName, $entryID);
- my $countDiff = $count2 - $count1;
- if ($countDiff > 0) {
- # old entry status:
- # 0 = test script not found in old res files
- # 1 = test script found in old res files
- my $oldTestStatus = 0;
- if ($resDatap1->{$testScript}) {
- $oldTestStatus = 1;
- }
- push @{$datap}, [$testScript, $sign * $countDiff, $commentp->[$oldTestStatus]];
- }
- }
-}
-
-sub ListNewTestCases($$$$)
-{
- # $commentp is used to modify the output in case we do inverted search for
- # removed entries
- my ($resDatap1, $resDatap2, $commentp, $datap) = @_;
-
- for my $testScript (keys %{$resDatap2}) {
- for my $testCase (keys % {$resDatap2->{$testScript}{'TestCase'}} ) {
- # old entry status:
- # 0 = test script not found in old res files
- # 1 = test script found in old res files
- my $oldTestStatus = 0;
- my $testCaseFound = 0;
- if ($resDatap1->{$testScript}) {
- $oldTestStatus = 1;
- # we must not acess
- if ($resDatap1->{$testScript}{'TestCase'}{$testCase}) {
- $testCaseFound = 1;
- }
- }
- unless ($testCaseFound) {
- push @{$datap}, [$testScript,$testCase, $commentp->[$oldTestStatus]];
- }
- }
- }
-}
-
-sub ListNewEntry($$$$$$)
-{
- # $sign, $commentp are used to modify the output in case we do inverted
- # search for removed entries
- my ($resDatap1, $resDatap2, $entryTypeName, $sign, $commentp, $datap) = @_;
-
- for my $testScript (keys %{$resDatap2}) {
- for my $testCase (keys % {$resDatap2->{$testScript}{'TestCase'}} ) {
-# print "Error: $testScript:$testCase:\n";
- for my $entryID (keys % {$resDatap2->{$testScript}{'TestCase'}{$testCase}{'EntryType'}{$entryTypeName}{'EntryByID'}} ) {
- my $entryIDCountDiff = scalar @ {$resDatap2->{$testScript}{'TestCase'}{$testCase}{'EntryType'}{$entryTypeName}{'EntryByID'}{$entryID}};
- # old entry status:
- # 0 = test script not found in old res files
- # 1 = test case not started in old res files
- # 2 = entryID not found in old res files
- # 3 = number of entries increased
- my $oldTestStatus = 0;
- if ($resDatap1->{$testScript}) {
- $oldTestStatus = 1;
- if ($resDatap1->{$testScript}{'TestCase'}{$testCase}) {
- $oldTestStatus = 2;
- if ($resDatap1->{$testScript}{'TestCase'}{$testCase}{'EntryType'}{$entryTypeName}{'EntryByID'}{$entryID}) {
- $oldTestStatus = 3;
- $entryIDCountDiff -= scalar @ {$resDatap1->{$testScript}{'TestCase'}{$testCase}{'EntryType'}{$entryTypeName}{'EntryByID'}{$entryID}};
- }
- }
- }
- if ($entryIDCountDiff > 0) {
- push @{$datap}, [$testScript,$testCase,$entryID,$sign * $entryIDCountDiff,$commentp->[$oldTestStatus]];
- }
- }
- }
- }
-}
-
-sub ListNewEntryIDsForEntry($$$$$$)
-{
- # $sign, $commentp are used to modify the output in case we do inverted
- # search for removed entries
- my ($resDatap1, $resDatap2, $entryTypeName, $sign, $commentp, $datap) = @_;
-
- for my $entryID (FindEntryIDsForEntry($resDatap2, $entryTypeName)) {
- my $count1 = CountEntriesForEntryAndEntryID($resDatap1, $entryTypeName, $entryID);
- my $count2 = CountEntriesForEntryAndEntryID($resDatap2, $entryTypeName, $entryID);
- my $countDiff = $count2 - $count1;
- if ($countDiff > 0) {
- # old entry status:
- # 0 = entry id not found in old res files
- # 1 = entry id found in old res files
- my $oldTestStatus = 0;
- if ($count1) {
- $oldTestStatus = 1;
- }
- my $idNote = $knownIDs{$entryID} ? $knownIDs{$entryID} : "";
- push @{$datap}, [$entryID, $sign * $countDiff, $commentp->[$oldTestStatus], $idNote];
- }
- }
-}
-
-###########################################################################
-# universal subroutines that are used to print the various tables
-
-sub SortStat
-{
- my $sortStatColumnsCur = 0;
- my $sortStatColumnsMax = scalar @ {$sortStatColumnsp};
- my $i = 0;
- my $result = 0;
-
- while (!$result && $sortStatColumnsCur < $sortStatColumnsMax) {
- $i = $sortStatColumnsp->[$sortStatColumnsCur++];
- if ($sortStatNumDown) {
- $result = $b->[$i] <=> $a->[$i];
- } else {
- $result = $a->[$i] <=> $b->[$i];
- }
- unless ($result) {
- $result = $a->[$i] cmp $b->[$i];
- }
- }
- return $result;
-}
-
-sub SelectSorting($$)
-{
- my ($datap, $sortStatColumnsDefaultp) = @_;
-
- if (scalar @sortStatColumnsRequest) {
- # filtered list of columns; it does not include column indexes that
- # are not supported by the current datap
- my @sortStatColumnsRequestValid = ();
- # at least one row has to be available to detect the count of columns
- my $columnsCount = 0;
- if ($datap->[0]) {
- $columnsCount = scalar @ {$datap->[0]};
- }
- foreach my $column (@sortStatColumnsRequest) {
- if ($column >= 0 && $column < $columnsCount) {
- push @sortStatColumnsRequestValid, $column;
- }
- }
- return \@sortStatColumnsRequestValid
- } else {
- # the sort order is not selected by the user, so the default one will
- # be used and it needs not be filtered
- return $sortStatColumnsDefaultp
- }
-}
-
-sub PrintStat($$$$$$$)
-{
- my ($titleFmt, $headFmt, $dataFmt, $titlep, $headp, $datap, $sortDefp) = @_;
-
- printf "$titleFmt\n", @{$titlep};
- printf "------------------------------------------------------------------------------\n";
- printf "$headFmt\n", @{$headp};
- printf "------------------------------------------------------------------------------\n";
-
- $sortStatColumnsp = SelectSorting($datap, $sortDefp);
-
- for my $linep ( sort SortStat @{$datap}) {
- printf "$dataFmt\n", @{$linep};
- }
-}
-
-##############################################################################
-# the following subroutines collect data for the particural statisctics,
-# defines how the output should look like and print the data
-
-sub StatAll($)
-{
- my ($resDatap) = @_;
-
- my $titleFmt = "%s";
- my $headFmt = "%-15.14s%-6s";
- my $dataFmt = "%-15.14s%6d";
-
- my @title = "All tests summary:";
- my @head = ("Item", "Count");
- my @data = ();
-
- my @sortDef = ();
-
- ListAll($resDatap, \@data);
-
- PrintStat($titleFmt, $headFmt, $dataFmt, \@title, \@head, \@data, \@sortDef);
-}
-
-sub StatTestScripts($)
-{
- my ($resDatap) = @_;
-
- my $titleFmt = "%s";
- my $headFmt = "%-20.19s";
- my $dataFmt = "%-20.19s";
-
- my @title = "List of the started test scripts:";
- my @head = ("Test Script");
- my @data = ();
-
- my @sortDef = (0);
-
- ListTestScripts($resDatap, \@data);
-
- PrintStat($titleFmt, $headFmt, $dataFmt, \@title, \@head, \@data, \@sortDef);
-}
-
-sub StatTestScriptsForEntry($$)
-{
- my ($resDatap, $entryTypeName) = @_;
-
- my $titleFmt = "%s";
- my $headFmt = "%-20.19s%-6s";
- my $dataFmt = "%-20.19s%6d";
-
- my @title = "List of the started test scripts with $entryTypeName, counts $entryTypeName:";
- my @head = ("Test Script", "Count");
- my @data = ();
-
- my @sortDef = (1);
-
- ListTestScriptsScriptsForEntry($resDatap, $entryTypeName, \@data);
-
- PrintStat($titleFmt, $headFmt, $dataFmt, \@title, \@head, \@data, \@sortDef);
-}
-
-sub StatTestScriptsForEntryAndEntryID($$$)
-{
- my ($resDatap, $entryTypeName, $entryID) = @_;
-
- my $titleFmt = "%s";
- my $headFmt = "%-20.19s%-6s";
- my $dataFmt = "%-20.19s%6d";
-
- my @title = "List of test cases with $entryTypeName, id $entryID, counts this $entryTypeName:";
- my @head = ("Test Script", "Count");
- my @data = ();
-
- my @sortDef = (1);
-
- ListTestScriptsForEntryAndEntryID($resDatap, $entryTypeName, $entryID, \@data);
-
- PrintStat($titleFmt, $headFmt, $dataFmt, \@title, \@head, \@data, \@sortDef);
-}
-
-sub StatTestCases($)
-{
- my ($resDatap) = @_;
-
- my $titleFmt = "%s";
- my $headFmt = "%-20.19s%-26.25s";
- my $dataFmt = "%-20.19s%-26.25s";
-
- my @title = "List of the started test cases";
- my @head = ("Test Script", "Test Case");
- my @data = ();
-
- my @sortDef = (0,1);
-
- my @newTestCases = ListTestCases($resDatap, \@data);
-
- PrintStat($titleFmt, $headFmt, $dataFmt, \@title, \@head, \@data, \@sortDef);
-}
-
-sub StatTestCasesForEntry($$) {
- my ($resDatap, $entryTypeName) = @_;
-
- my $titleFmt = "%s";
- my $headFmt = "%-20.19s%-26.25s%-10.9s%-6s";
- my $dataFmt = "%-20.19s%-26.25s%-10.9s%6d";
-
- my @title = "List of test cases with $entryTypeName, counts $entryTypeName:";
- my @head = ("Test Script", "Test Case", "Entry ID", "Count");
- my @data = ();
-
- my @sortDef = (3,0,1,2);
-
- my @newTestCases = ListTestCasesforEntry($resDatap, $entryTypeName, \@data);
-
- PrintStat($titleFmt, $headFmt, $dataFmt, \@title, \@head, \@data, \@sortDef);
-}
-
-sub StatEntryIDsForEntry($$)
-{
- my ($resDatap, $entryTypeName) = @_;
-
-
- my $titleFmt = "%s";
- my $headFmt = "%-10.9s%-6s %.60s";
- my $dataFmt = "%-10.9s%6d %.60s";
-
- my @title = "List of $entryTypeName entry IDs:";
- my @head = ("Entry ID", "Count", "Entry ID explanation");
- my @data = ();
-
- my @sortDef = (1);
-
- ListEntryIDsForEntry($resDatap, $entryTypeName, \@data);
-
- PrintStat($titleFmt, $headFmt, $dataFmt, \@title, \@head, \@data, \@sortDef);
-}
-
-############################
-# (diff)
-
-sub DiffAll($$)
-{
- my ($resDatap1, $resDatap2) = @_;
-
- my $titleFmt = "%s";
- my $headFmt = "%-15.14s%-6s";
- my $dataFmt = "%-15.14s%+6d";
-
- my @title = "All tests summary (diff):";
- my @head = ("Item", "Count");
- my @data = ();
-
- my @sortDef = ();
-
- ListDiffAll($resDatap1, $resDatap2, \@data);
-
- PrintStat($titleFmt, $headFmt, $dataFmt, \@title, \@head, \@data, \@sortDef);
-}
-
-sub DiffTestScripts($$)
-{
- my ($resDatap1, $resDatap2) = @_;
-
- my $titleFmt = "%s";
- my $headFmt = "%-20.19s%-10.9s";
- my $dataFmt = "%-20.19s%-10.9s";
-
- my @title = "List of the started test scripts (diff):";
- my @head = ("Test Script", "Comment");
- my @data = ();
-
- my @sortDef = (0);
-
- ListNewTestScripts($resDatap1, $resDatap2, "only new", \@data);
- ListNewTestScripts($resDatap2, $resDatap1, "only old", \@data);
-
- PrintStat($titleFmt, $headFmt, $dataFmt, \@title, \@head, \@data, \@sortDef);
-}
-
-sub DiffTestScriptsForEntry($$$)
-{
- my ($resDatap1, $resDatap2, $entryTypeName) = @_;
-
- my $titleFmt = "%s";
- my $headFmt = "%-20.19s%-6s %-15.14s";
- my $dataFmt = "%-20.19s%+6d %-15.14s";
-
- my @title = "List of the started test scripts with $entryTypeName, counts $entryTypeName (diff):";
- my @head = ("Test Script", "Count", "Comment");
- my @data = ();
-
- my @sortDef = (1,0);
-
- my @newComment = (
- "only new",
- "count changed"
- );
-
- my @oldComment = (
- "only old",
- "count changed"
- );
-
- ListNewTestScriptsWithEntry($resDatap1, $resDatap2, $entryTypeName, +1, \@newComment, \@data);
- ListNewTestScriptsWithEntry($resDatap2, $resDatap1, $entryTypeName, -1, \@oldComment, \@data);
-
- PrintStat($titleFmt, $headFmt, $dataFmt, \@title, \@head, \@data, \@sortDef);
-}
-
-sub DiffTestScriptsForEntryAndEntryID($$$$)
-{
- my ($resDatap1, $resDatap2, $entryTypeName, $entryID) = @_;
-
- my $titleFmt = "%s";
- my $headFmt = "%-20.19s%-6s%-15.14s";
- my $dataFmt = "%-20.19s%-6d%-15.14s";
-
- my @title = "List of test cases with $entryTypeName, id $entryID, counts this $entryTypeName (diff):";
- my @head = ("Test Script", "Count", "Comment");
- my @data = ();
-
- my @sortDef = (1,0);
-
- my @newComment = (
- "new test",
- "count changed"
- );
-
- my @oldComment = (
- "skip test",
- "count changed"
- );
-
- ListNewTestScriptsForEntryAndEntryID($resDatap1, $resDatap2, $entryTypeName, $entryID, +1, \@newComment, \@data);
- ListNewTestScriptsForEntryAndEntryID($resDatap2, $resDatap1, $entryTypeName, $entryID, -1, \@oldComment, \@data);
-
- PrintStat($titleFmt, $headFmt, $dataFmt, \@title, \@head, \@data, \@sortDef);
-}
-
-sub DiffTestCases($$)
-{
- my ($resDatap1, $resDatap2) = @_;
-
- my $titleFmt = "%s";
- my $headFmt = "%-20.19s%-26.25s%-16.15s";
- my $dataFmt = "%-20.19s%-26.25s%-16.15s";
-
- my @title = "List of the started test cases (diff):";
- my @head = ("Test Script", "Test Case", "Comment");
- my @data = ();
-
- my @sortDef = (2,0,1);
-
- my @newComment = (
- "only new t.scr.",
- "only new t.case"
- );
-
- my @oldComment = (
- "only old t.scr.",
- "only old t.case",
- );
-
- ListNewTestCases($resDatap1, $resDatap2, \@newComment, \@data);
- ListNewTestCases($resDatap2, $resDatap1, \@oldComment, \@data);
-
- PrintStat($titleFmt, $headFmt, $dataFmt, \@title, \@head, \@data, \@sortDef);
-}
-
-sub DiffTestCasesForEntry($$$)
-{
- my ($resDatap1, $resDatap2, $entryTypeName) = @_;
-
- my $titleFmt = "%s";
- my $headFmt = "%-20.19s%-26.25s%-10.9s%-6s %.40s";
- my $dataFmt = "%-20.19s%-26.25s%-10.9s%+6d %.40s";
-
- my @title = "List of test cases with $entryTypeName, counts $entryTypeName (difference):";
- my @head = ("Test Script", "Test Case", "Entry ID", "Count", "Comment");
- my @data = ();
-
- my @sortDef = (3,0,1);
-
- my @newComment = (
- "only new t.scr.",
- "only new t.case",
- "ID only in new",
- "count changed"
- );
-
- my @oldComment = (
- "only old t.scr.",
- "only old t.case",
- "ID just in old",
- "count changed"
- );
-
- ListNewEntry($resDatap1, $resDatap2, $entryTypeName, +1, \@newComment, \@data);
- ListNewEntry($resDatap2, $resDatap1, $entryTypeName, -1, \@oldComment, \@data);
-
- PrintStat($titleFmt, $headFmt, $dataFmt, \@title, \@head, \@data, \@sortDef);
-}
-
-sub DiffEntryIDsForEntry($$$)
-{
- my ($resDatap1, $resDatap2, $entryTypeName) = @_;
-
- my $titleFmt = "%s";
- my $headFmt = "%-10.9s%-6s %-15.14s%.40s";
- my $dataFmt = "%-10.9s%+6d %-15.14s%.40s";
-
- my @title = "List of $entryTypeName entry IDs (diff):";
- my @head = ("Entry ID", "Count", "Comment", "Entry ID explanation");
- my @data = ();
-
- my @sortDef = (1,0);
-
- my @newComment = (
- "only in new",
- "count changed"
- );
-
- my @oldComment = (
- "only in old",
- "count changed"
- );
-
- ListNewEntryIDsForEntry($resDatap1, $resDatap2, $entryTypeName, +1, \@newComment, \@data);
- ListNewEntryIDsForEntry($resDatap2, $resDatap1, $entryTypeName, -1, \@oldComment, \@data);
-
- PrintStat($titleFmt, $headFmt, $dataFmt, \@title, \@head, \@data, \@sortDef);
-}
-
-########################################################################
-# help
-
-sub Usage()
-{
- print "This tool prints statistic from .res files genereated by qatesttool\n\n" .
-
- "Usage:\n".
- "\ttest-ooo-analyze [--help] [--diff] [stat-type-sw] [--entry=<entry>]\n" .
- "\t[--entry-id=<id>] [--sort=[column[,column]...] [--revert] res...\n\n" .
-
- "Options:\n" .
- "\t--help: prints this help\n\n" .
-
- "\t--diff: compares statistic of two test tool results. Exactly two .res\n" .
- "\t\tfiles or exactly two directories has to be defined by the res...\n".
- "\t\tparameter\n\n" .
-
- "\tstat-type-sw: defines statistic type, see below\n\n" .
-
- "\t--entry=<entry>: almost all statistics gives more detailed output if a\n" .
- "\t\tparticular entry is defined. The possible values are: Error,\n" .
- "\t\tWarning, QAError, Message, CallStack, Assertion, AssertStack.\n\n".
-
- "\t--entry-id=<id>: filters the statistic output to count only entries\n" .
- "\t\twith given id. It must be used together with --entry=<entry>.\n" .
- "\t\tIt actually works only with --stat-test-case.\n\n" .
-
- "\t--sort=[column[,column]...: redefines the default sorting; the value\n" .
- "\t\tis a comma separated list of column numbers, For example, the \n" .
- "\t\toutput is sorted by the 1st and 2nd column with --sort=1,2.\n" .
- "\t--reverse: reverse the sort order of numeric values\n\n" .
-
- "\tres... : list of res files or directories with res files\n" .
- "\tres1, res2: two res files or direcotries to be compared\n\n" .
-
- "Types of statistic:\n".
- "\t--stat-all: statistic of total numbers (default one)\n" .
- "\t--stat-test-script: statictic by test scripts\n" .
- "\t--stat-test-case: statictic by test cases\n" .
- "\t--stat-entry-id: statictic by the given entry IDs\n";
-}
-
-
-#######################################################################
-#######################################################################
-# MAIN
-#######################################################################
-#######################################################################
-
-my $op = 'stat';
-my $stat_type = "all";
-my $entry = '';
-my $entryID = '';
-my @resPaths = ();
-
-foreach my $a (@ARGV) {
- if ($a eq '--diff' ||
- $a eq '--help') {
- $op = $a;
- $op =~ s/^\-\-//;
- } elsif ($a eq '--stat-all' ||
- $a eq '--stat-test-script' ||
- $a eq '--stat-test-case' ||
- $a eq '--stat-entry-id') {
- $stat_type = $a;
- $stat_type =~ s/^\-\-stat\-//;
- } elsif ($a =~ m/--entry=(.*)/) {
- $entry = $1;
- unless (grep /^$entry$/i, @TestCaseEntries) {
- my $tmp = join "\n\t\t", @TestCaseEntries;
- die "Error: unknown entry: \"$entry\". Possible values are:\n" .
- "\t\t$tmp\n";
- }
- } elsif ($a =~ m/--entry-id=(.*)/) {
- $entryID = $1;
- } elsif ($a =~ m/--sort=([0-9,]*)/) {
- @sortStatColumnsRequest = split(",", $1);
- # let the user count the columns from 1
- for my $i (0 .. scalar @sortStatColumnsRequest) {
- --$sortStatColumnsRequest[$i];
- }
- } elsif ($a =~ m/--reverse/) {
- $sortStatNumDown = 0;
- } elsif (-f $a ||
- -d $a) {
- push @resPaths, $a;
- } else {
- die "Error: Unknown parameter \"$a\", try --help.\n";
- }
-}
-
-#print "op = $op\n";
-#print "stat_type = $stat_type\n";
-#print "entry = $entry\n";
-#print "resPaths = @resPaths\n";
-
-# FIXME: Is there a nicer solution for this huge if/elsif/else game,
-# so that the following check can be on a more appropriate place?
-if ($entryID && $stat_type ne 'test-script') {
- die "Error: --entry-id is currently supported only with --stat-test-script\n";
-}
-
-if ($op eq 'help') {
-
- Usage();
-
-} elsif ($op eq 'stat') {
-
- # pure statisctics
-
- (scalar @resPaths) || die "Error: No res path defined, try --help\n";
-
- # load .res files
- for my $path (@resPaths) {
- if (-f $path) {
- AnalyzeResFile(\%resData1, $path);
- } elsif (-d $path) {
- AnalyzeDir(\%resData1, $path);
- } else {
- die "Error: \"$path\" is neither directory nor file\n";
- }
- }
-
- # do the selected statistic
- if ($entry) {
- if ($stat_type eq 'all') {
- die "Sorry, this statistic does not exist. Try without the option --entry.\n";
- } elsif ($stat_type eq 'test-script') {
- if ($entryID) {
- StatTestScriptsForEntryAndEntryID(\%resData1, $entry, $entryID);
- } else {
- StatTestScriptsForEntry(\%resData1, $entry);
- }
- } elsif ($stat_type eq 'test-case') {
- StatTestCasesForEntry(\%resData1, $entry);
- } elsif ($stat_type eq 'entry-id') {
- StatEntryIDsForEntry(\%resData1, $entry);
- } else {
- die "Internal error: unknown statisitc type: $stat_type\n";
- }
- } else {
- if ($stat_type eq 'all') {
- StatAll(\%resData1);
- } elsif ($stat_type eq 'test-script') {
- StatTestScripts(\%resData1);
- } elsif ($stat_type eq 'test-case') {
- StatTestCases(\%resData1);
- } elsif ($stat_type eq 'entry-id') {
- die "Error: Sorry, this statistic has not be implmented yet\n" .
- " Try with the --entry option\n";
- } else {
- die "Internal error: unknown statisitc type: $stat_type\n";
- }
- }
-
-} elsif ($op = 'diff') {
-
- # diff of two statisctics
-
- (scalar @resPaths == 2) || die "Error: Wrong number of paths. You must define exactly two files or\n" .
- " directories as the res1, res2 parameters.\n";
-
- # load .res files
- if (-f $resPaths[0] && -f $resPaths[1] ) {
- AnalyzeResFile(\%resData1, $resPaths[0]);
- AnalyzeResFile(\%resData2, $resPaths[0]);
- } elsif (-d $resPaths[0] && -d $resPaths[0]) {
- AnalyzeDir(\%resData1, $resPaths[0]);
- AnalyzeDir(\%resData2, $resPaths[1]);
- } else {
- die "Error: Wrong type of paths. This operation works only if you define exactly two files or two\n" .
- " or two direcotries as the res1, res2 parameters.\n";
- }
-
-
- # do the selected statistic
- if ($entry) {
- if ($stat_type eq 'all') {
- die "Sorry, this statistic does not exist. Try without the option --entry.\n";
- } elsif ($stat_type eq 'test-script') {
- if ($entryID) {
- DiffTestScriptsForEntryAndEntryID(\%resData1, \%resData2, $entry, $entryID);
- } else {
- DiffTestScriptsForEntry(\%resData1, \%resData2, $entry);
- }
- } elsif ($stat_type eq 'test-case') {
- DiffTestCasesForEntry(\%resData1, \%resData2, $entry);
- } elsif ($stat_type eq 'entry-id') {
- DiffEntryIDsForEntry(\%resData1, \%resData2, $entry);
- } else {
- die "Internal error: unknown statistic type: $stat_type\n";
- }
- } else {
- if ($stat_type eq 'all') {
- DiffAll(\%resData1, \%resData2);
- } elsif ($stat_type eq 'test-script') {
- DiffTestScripts(\%resData1, \%resData2);
- } elsif ($stat_type eq 'test-case') {
- DiffTestCases(\%resData1, \%resData2);
- } elsif ($stat_type eq 'entry-id') {
- die "Error: Sorry, this statistic has not be implmented yet\n" .
- " Try with the --entry option\n";
- } else {
- die "Internal error: unknown statistic type: $stat_type\n";
- }
- }
-
-} else {
-
- die "Internal error: unknown operation: $op\n";
-
-}
diff --git a/bin/timeout3 b/bin/timeout3
deleted file mode 100755
index 9e2625363..000000000
--- a/bin/timeout3
+++ /dev/null
@@ -1,93 +0,0 @@
-#!/usr/bin/env bash
-#
-# The Bash shell script executes a command with a time-out.
-# Upon time-out expiration SIGTERM (15) is sent to the process. If the signal
-# is blocked, then the subsequent SIGKILL (9) terminates it.
-#
-# Based on the Bash documentation example.
-
-# Hello Chet,
-# please find attached a "little easier" :-) to comprehend
-# time-out example. If you find it suitable, feel free to include
-# anywhere: the very same logic as in the original examples/scripts, a
-# little more transparent implementation to my taste.
-#
-# Dmitry V Golovashkin <Dmitry.Golovashkin@sas.com>
-#
-# http://www.bashcookbook.com/bashinfo/source/bash-4.0/examples/scripts/timeout3
-
-scriptName="${0##*/}"
-
-declare -i DEFAULT_TIMEOUT=9
-declare -i DEFAULT_INTERVAL=1
-declare -i DEFAULT_DELAY=1
-
-# Timeout.
-declare -i timeout=DEFAULT_TIMEOUT
-# Interval between checks if the process is still alive.
-declare -i interval=DEFAULT_INTERVAL
-# Delay between posting the SIGTERM signal and destroying the process by SIGKILL.
-declare -i delay=DEFAULT_DELAY
-
-function printUsage() {
- cat <<EOF
-
-Synopsis
- $scriptName [-t timeout] [-i interval] [-d delay] command
- Execute a command with a time-out.
- Upon time-out expiration SIGTERM (15) is sent to the process. If SIGTERM
- signal is blocked, then the subsequent SIGKILL (9) terminates it.
-
- -t timeout
- Number of seconds to wait for command completion.
- Default value: $DEFAULT_TIMEOUT seconds.
-
- -i interval
- Interval between checks if the process is still alive.
- Positive integer, default value: $DEFAULT_INTERVAL seconds.
-
- -d delay
- Delay between posting the SIGTERM signal and destroying the
- process by SIGKILL. Default value: $DEFAULT_DELAY seconds.
-
-As of today, Bash does not support floating point arithmetic (sleep does),
-therefore all delay/time values must be integers.
-EOF
-}
-
-# Options.
-while getopts ":t:i:d:" option; do
- case "$option" in
- t) timeout=$OPTARG ;;
- i) interval=$OPTARG ;;
- d) delay=$OPTARG ;;
- *) printUsage; exit 1 ;;
- esac
-done
-shift $((OPTIND - 1))
-
-# $# should be at least 1 (the command to execute), however it may be strictly
-# greater than 1 if the command itself has options.
-if (($# == 0 || interval <= 0)); then
- printUsage
- exit 1
-fi
-
-# kill -0 pid Exit code indicates if a signal may be sent to $pid process.
-(
- ((t = timeout))
-
- while ((t > 0)); do
- sleep $interval
- kill -0 $$ || exit 0
- ((t -= interval))
- done
-
- # Be nice, post SIGTERM first.
- # The 'exit 0' below will be executed if any preceeding command fails.
- kill -s SIGTERM $$ && kill -0 $$ || exit 0
- sleep $delay
- kill -s SIGKILL $$
-) 2> /dev/null &
-
-exec "$@"
diff --git a/bin/tinbuild b/bin/tinbuild
deleted file mode 100755
index f752a8f4d..000000000
--- a/bin/tinbuild
+++ /dev/null
@@ -1,207 +0,0 @@
-#!/bin/sh
-
-export LC_ALL="C"
-
-## default config
-PAUSE_SECONDS=$((15*60))
-HTML_OUTPUT=0
-SEND_MAIL=0
-NICE=
-OWNER=
-MACHINE=`uname`
-
-## subroutines
-usage ()
-{
- echo "Usage: $0 [options]"
- echo "Options:"
- echo "-w <N> specify timeout in secs between subsequent pull requests"
- echo "-i when given: run with ionice -c3, can be combined with -n"
- echo "-l when given: write html build log"
- echo "-m <owner> when given: mail errors to the committer, general errors to the <owner>"
- echo "-n when given: run with nice, can be combined with -i"
- echo "-h this help"
-}
-
-getCommitters ()
-{
- from_date="$1"
-
- ( git log '--pretty=tformat:%ce' --after="$from_date" ;
- bin/g -s log '--pretty=tformat:%ce' --after="$from_date" ) | sort | uniq
-}
-
-reportError ()
-{
- error_kind="$1"
- shift
-
- commit=`git log '--pretty=format:%h %cn - %s' -n1`
-
- to_mail=
- if test "$SEND_MAIL" -eq 1; then
- case "$error_kind" in
- owner) to_mail="$OWNER"
- message="box broken" ;;
- *) if test -z "$last_success" ; then
- # we need at least one successful build to
- # be reliable
- to_mail="$OWNER"
- else
- to_mail=`getCommitters "$last_success"`
- fi
- message="last commit: $commit" ;;
- esac
- fi
-
- echo "$*" 1>&2
- echo "Last commit: $commit" 1>&2
- if test -n "$to_mail" ; then
- subject="Tinderbox failure, $message"
- (
- cat <<EOF
-Hi folks,
-
-One of you broke the build of LibreOffice master with your commit :-(
-Please commit and push a fix ASAP!
-
-Tinderbox info:
-
- `uname -a`
-`grep '\$ \.\/configure' config.log`
-
-Commits since the last success:
-
- ===== build =====
-`git log '--pretty=tformat:%h %s' --after="$last_success" | sed 's/^/ /'`
-
-`bin/g log '--pretty=tformat:%h %s' --after="$last_success" | sed 's/^/ /'`
-
-The error is:
-
-$*
-EOF
- ) | mail -r "$OWNER" -s "$subject" $to_mail
- else
- echo "$*" 1>&2
- if test "$error_kind" = "owner" ; then
- exit 1
- fi
- fi
-}
-
-gitHeads ()
-{
- git rev-parse --verify HEAD
- bin/g rev-parse --verify HEAD
-}
-
-## code
-while getopts hilm:nw: opt ; do
- case "$opt" in
- w) PAUSE_SECONDS="$OPTARG" ;;
- i) NICE="$NICE ionice -c3" ;;
- l) HTML_OUTPUT=1 ;;
- m) SEND_MAIL=1 ; OWNER="$OPTARG" ;;
- n) NICE="$NICE nice" ;;
- h) usage; exit ;;
- ?) usage; exit ;;
- esac
-done
-
-# sanity checks
-which git > /dev/null 2>&1 || {
- echo "You need git for this!"
- exit 1
-}
-
-if test "$SEND_MAIL" -eq 1 ; then
- if test -z "$OWNER" ; then
- echo "Owner not set."
- exit 1
- fi
- if test "$OWNER" = `echo ${OWNER} | sed 's/\@/_/g'` ; then
- echo "The owner address '$OWNER' does not look like an email address."
- exit 1
- fi
-fi
-
-# for build.pl
-test $HTML_OUTPUT -eq 1 && export BUILD_HTML_OUTPUT=true
-
-# main
-prev_success=""
-last_success=""
-while true; do
- err_msg=""
-
- # cleanup
- $NICE make clean-local
-
- # FIXME we need a more exact way to determine a 'good' date; this is not accurate
- if [ "X$MACHINE" == "XDarwin" ] ; then
- rough_checkout_date="`date -u '+%Y-%m-%d %H:%M:%S'`"
- else
- rough_checkout_date="`date --utc --rfc-3339=seconds`"
- fi
- # try to build
- if ! err_msg=$(./autogen.sh 2>&1); then
- reportError committer `printf "autogen.sh / configure failed - error is:\n\n$err_msg"`
- elif ! err_msg=$(./download 2>&1); then
- reportError committer `printf "download failed - error is:\n\n$err_msg"`
- elif ! $NICE make >build.log 2>&1 ; then
- reportError committer `printf "building failed - error is:\n\n"` "$(tail -n100 build.log)"
- elif ! $NICE make smoketest > smoketest.log 2>&1 ; then
- reportError committer `printf "smoketest failed - error is:\n\n"` "$(tail -n100 smoketest.log)"
- elif ! $NICE make dev-install >install.log 2>&1 ; then
- reportError committer `printf "dev installation failed - error is:\n\n"` "$(tail -n100 install.log)"
- else
- echo build done
- cp build.log build/
- cp smoketest.log build/
- cp install.log build/
- gitHeads > build/git-heads.txt
-
- prev_success=$last_success
- last_success=$rough_checkout_date
- mv build "build-$last_success" ||
- {
- msg=
- test -z "$prev_success" || msg="Leaving 'build-$prev_success' around, exiting now"
- reportError owner `printf "Cannot move 'build' to 'build-$last_success'\n\n$msg"`
- }
-
- if test -n "$prev_success"; then
- rm -rf "build-$prev_success"
- fi
- fi
-
- if [ -d build ] ; then
- mv build.log build/
- mv smoketest.log build/
- mv install.log build/
- rm -rf build.failed
- mv build build.failed
- fi
-
- echo updating repo
-
- # new commits?
- while true; do
- old_head=$(gitHeads) && err_msg=$(git pull -r 2>&1) && err_msg2=$(bin/g pull -r 2>&1) ||
- {
- msg=
- test -z "$prev_success" || msg="Leaving 'build-$prev_success' around, exiting now"
-
- reportError owner `printf "git repo broken - error is:\n\n$err_msg\n$err_msg2\n$msg"`
- }
- new_head=$(gitHeads)
-
- if test "$old_head" != "$new_head"; then
- break
- fi
-
- echo "Waiting $PAUSE_SECONDS seconds."
- sleep $PAUSE_SECONDS
- done
-done
diff --git a/bin/unopkg-regenerate-cache b/bin/unopkg-regenerate-cache
deleted file mode 100755
index 496f92044..000000000
--- a/bin/unopkg-regenerate-cache
+++ /dev/null
@@ -1,103 +0,0 @@
-#!/bin/sh
-
-usage()
-{
- echo "This script regerenates the system OOo UNO package cache"
- echo
- echo "Usage: ${0##*/} [--help] [--force] ooo_home [broken_extension...]"
- echo
- echo "Options:"
- echo
- echo " --help this help"
- echo " --force regenerate the cache without checking against the list of broken extensions"
- echo " ooo_home path where OOo is installed, e.g. /usr/lib/ooo3"
- echo " broken_extension list of broken extensions, e.g. LanguageTool-0.9.3.oxt"
-}
-
-force=
-ooo_home=
-ooo_broken_extensions=
-while test -n "$1" ; do
- case "$1" in
- --help)
- usage
- exit 0;
- ;;
- --force)
- force=1
- ;;
- -*)
- echo "Error: unknown option: $1"
- exit 1;
- ;;
- *)
- if test -z "$ooo_home" ; then
- ooo_home="$1"
- if ! test -d "$ooo_home" ; then
- echo "Error: the OOo home direcotry does not exists: $ooo_home"
- exit 1;
- fi
- else
- ooo_broken_extensions="$ooo_broken_extensions $1"
- fi
- ;;
- esac
- shift
-done
-
-if test -z "$ooo_home" ; then
- echo "Error: Please define the ooo_home, try --help"
- exit 1;
-fi
-
-ooo_uno_cache="$ooo_home/share/uno_packages/cache"
-
-# nothing to do if the cache does not exist (fresh installation or so)
-test -d "$ooo_uno_cache" || exit 0;
-
-# check for broken extensions
-found=
-if test -z "$force" ; then
- for extension in $ooo_uno_cache/uno_packages/*/* ; do
- extension_name=`basename $extension`
- if echo "$ooo_broken_extensions" | grep -q "$extension_name" ; then
- echo "Need to regenerate the uno cache because of the broken extension $extension_name."
- found=1
- break
- fi
- done
-fi
-
-if test -n "$force" -o -n "$found" ; then
- # saving sources of the registered extensions
- temp_cache=`mktemp -d $ooo_uno_cache.XXXXXX`
- for extension in $ooo_uno_cache/uno_packages/*/* ; do
- extension_name=`basename $extension`
- echo "Saving $extension_name..."
- if test -f $extension ; then
- cp $extension $temp_cache || exit 1;
- elif test -d $extension && cd $extension ; then
- zip -q -r $temp_cache/$extension_name * || exit 1;
- cd - >/dev/null 2>&1
- fi
- done
-
- echo "Removing the cache"
- rm -rf $ooo_uno_cache/*
-
- # registering the good extensions once again
- for extension in $temp_cache/* ; do
- extension_name=`basename $extension`
- if echo "$ooo_broken_extensions" | grep -q "$extension_name" ; then
- echo "Skipping unusable $extension_name..."
- continue;
- fi
- if test ! -f $extension ; then
- echo "Error: is not a file: $extension"
- continue;
- fi
- echo "Registering $extension_name..."
- unopkg add --shared --force $extension || true
- done
- rm -rf $temp_cache
-fi
diff --git a/bin/unpack b/bin/unpack
deleted file mode 100755
index 3c12df24e..000000000
--- a/bin/unpack
+++ /dev/null
@@ -1,853 +0,0 @@
-#!/bin/sh
-
-if test -d ../clone ; then
- echo "Due to change in the build structure, you must run autogen and ./download again"
- exit 1;
-fi
-#
-# See setup for user tweakables.
-#
-. ./setup
-
-# source type DOWNLOAD is the same you get with CGIT, just downloaded from
-# somewhere else
-if test "$SOURCE_TYPE" = "DOWNLOAD" ; then
- SOURCE_TYPE=CGIT
-fi
-
-if (echo "testing\c"; echo 1,2,3) | grep c >/dev/null; then
- if (echo -n testing; echo 1,2,3) | sed s/-n/xn/ | grep xn >/dev/null; then
- echo_n=echo echo_c=
- else
- echo_n="echo -n" echo_c=
- fi
-else
- echo_n=echo echo_c='\c'
-fi
-
-#"# don't confuse vim's syntax highlighting ;-)
-
-check_tarball()
-{
- if test "z$PIECE" != "z"; then
- echo "skipping looking for $1 ...";
- else
- $echo_n "Looking for $1 ... $echo_c";
- if test -f $1; then
- echo "ok"
- else
- echo "missing the $1 archive; run './download'"
- exit 1;
- fi
- fi
-}
-
-check_git_repo()
-{
- if test "$1" = "bootstrap" ; then
- tree="$RAWBUILDDIR"
- else
- tree="$CLONEDIR/$1"
- fi
- $echo_n "Checking for current branch of $tree ... $echo_c";
- BUILD_BRANCH=`git symbolic-ref HEAD|sed 's|.*/||'`
- if cd "$tree" >/dev/null 2>&1 ; then
- CURRENT_BRANCH=`git symbolic-ref HEAD|sed 's|.*/||'`
- if test "$CURRENT_BRANCH" = "$BUILD_BRANCH"; then
- echo "ok"
- else
- echo "warning, current branch is $CURRENT_BRANCH which is different from $BUILD_BRANCH"
- fi
- else
- echo "missing the $tree clone; run './download'"
- exit 1;
- fi
-}
-
-check_file()
-{
- $echo_n "Looking for $1 ... $echo_c";
- if test -f $1; then
- echo "ok"
- else
- echo "missing required archive; run ./download'";
- exit 1;
- fi
-}
-
-mkdir -p $SRCDIR
-cd $SRCDIR
-
-echo "Checking for source packages in $SRCDIR";
-if test "z$SYSTEM_GCC" = "z"; then
- check_tarball $GCC_TARBALL
- if test "z$BINUTILS_TARBALL" != "z"; then
- check_tarball $BINUTILS_TARBALL
- fi
-fi
-
-# this will become the 'normal' way to do an all-through build name is misleading
-ALL_CORE_PKGS="sdk ure base calc help extras writer impress artwork filters testing bootstrap libs-gui libs-core libs-extern libs-extern-sys components postprocess"
-if test "$ENABLE_EXTENSIONS" = "YES"
-then
- ALL_CORE_PKGS="$ALL_CORE_PKGS extensions"
-fi
-if test "z$OOO_GIT" = "z" ; then
- if test "z$SPLIT" = "zyes" ; then
- if test "z$PIECE" != "z"; then
- CORE_PKGS=$PIECE
- else
- CORE_PKGS=$ALL_CORE_PKGS
- fi
- if test "z$OOO_LANGS" != "z" -a "z$OOO_LANGS" != "zen-US" -a "z$PIECE" = "z" ; then
- CORE_PKGS="$CORE_PKGS translations"
- fi
- for pkg in $CORE_PKGS; do
- check_tarball "$OOO_SPLIT_PREFIX$pkg-$OOO_SOURCEVERSION.tar.bz2"
- done
- # this is the legacy source splitting
- else
- CORE_PKGS=
- check_tarball $OOO_TARBALL
- fi
-else
- if test "z$PIECE" != "z"; then
- CORE_PKGS=$PIECE
- else
- CORE_PKGS=$ALL_CORE_PKGS
- fi
- if test \( "z$OOO_LANGS" != "z" -a "z$OOO_LANGS" != "zen-US" -a "z$PIECE" = "z" \) -o "z$BUILD_WIN32" != "z"; then
- CORE_PKGS="$CORE_PKGS translations"
- fi
- cd $RAWBUILDDIR
- for pkg in $CORE_PKGS ; do
- check_git_repo $pkg
- done
-fi
-
-mkdir -p $BUILDDIR
-cd $BUILDDIR
-
-if test "z$BUILD_WIN32" != "z"; then
- case "$DISTRO" in
- NovellWin32*|GoOoWin32|Raw*)
- ;;
- *)
- check_file $UNICOWS_SRC
- ;;
- esac
- check_file $DBGINST_SRC
-
- # Comment out the Mozilla source related bits for now
- #check_file $SRCDIR/$BUILD_MOZILLA_SOURCE
- #check_file $SRCDIR/glib-1.2.10.tar.gz
- #check_file $SRCDIR/glib-1.2.10-ooo.patch
- #check_file $SRCDIR/libIDL-0.6.8.tar.gz
- #check_file $SRCDIR/libIDL-0.6.8-ooo.patch
- #check_file $SRCDIR/wintools.zip
-
- # Prebuilt Mozilla bits
- check_file $SRCDIR/WNTMSCIinc.zip
- check_file $SRCDIR/WNTMSCIlib.zip
- check_file $SRCDIR/WNTMSCIruntime.zip
-
- # CRT80 that the prebuilt Mozilla needs and the
- # installer-builder wants to put in the installer
- for F in msvcr80.dll msvcp80.dll Microsoft.VC80.CRT.manifest; do
- $echo_n "Looking for $SRCDIR/$F ... $echo_c"
- if test -f $SRCDIR/$F; then
- echo "ok"
- else
- echo "missing"
- echo "You can find it in MSVS 2008, in VC/redist/x86/Microsoft.VC80.CRT."
- echo "It is needed by the prebuilt Mozilla binaries that you should use."
- exit 1
- fi
- done
-
- $echo_n "Looking for $SRCDIR/gdiplus.dll ... $echo_c"
- if test -f $SRCDIR/gdiplus.dll; then
- echo "ok"
- else
- echo "missing"
- echo "Get it from the Microsoft site and put it into $SRCDIR."
- echo "You may have to search Microsoft's website."
- echo "Last time it was seen at: http://www.microsoft.com/downloads/details.aspx?familyid=6A63AB9C-DF12-4D41-933C-BE590FEAA05A&displaylang=en"
- echo "Note that the download requires Genuine Windows validation"
- echo "and can't easily be automated."
- exit 1
- fi
-
- $echo_n "Looking for $SRCDIR/vcredist_x86.exe ... $echo_c"
- if test -f $SRCDIR/vcredist_x86.exe; then
- echo "ok"
- else
- echo "missing"
- echo "Get it from the Microsoft site and put it into $SRCDIR."
- echo "You may have to search Microsoft's website."
- echo "Look for the version of 32-bit runtime redistribuable that corresponds"
- echo "to the version your Visual Studio links with"
- exit 1
- fi
-
- $echo_n "Looking for $SRCDIR/vcredist_x64.exe ... $echo_c"
- if test -f $SRCDIR/vcredist_x64.exe; then
- echo "ok"
- else
- echo "missing"
- echo "Get it from the Microsoft site and put it into $SRCDIR."
- echo "You may have to search Microsoft's website."
- echo "Look for the version of 64-bit runtime redistribuable that corresponds"
- echo "to the version your Visual Studio links with"
- exit 1
- fi
-fi
-
- case "$DISTRO" in
- NovellWin32*)
- check_file $SRCDIR/$AGFA_MONOTYPE_FONTS_SOURCE
- ;;
- esac
-
- case "$DISTRO" in
- NovellWin32*|GoOoWin32*)
- check_file $SRCDIR/writingaids-$PREBUILT_WRITINGAIDS_TIMESTAMP.zip
- check_file $SRCDIR/writingaids-doc-$PREBUILT_WRITINGAIDS_TIMESTAMP.zip
- ;;
- esac
-
-if test "z$SYSTEM_GCC" = "z"; then
- # unpack binutils
- cd $BUILDDIR
- echo "Unpacking compiler bits"
-
- if test "z$BINUTILS_TARBALL" != "z"; then
- ($BINUTILS_DECOMPRESS_CMD $SRCDIR/$BINUTILS_TARBALL | $GNUTAR xf - ) || exit 1;
- for P in $BINUTILS_PATCHES ; do
- patch -d "$BINUTILS_VER" -p1 -s < "$SRCDIR/$P"
- done
- fi
- ($GCC_DECOMPRESS_CMD $SRCDIR/$GCC_TARBALL | $GNUTAR xf - ) || exit 1;
- for P in $GCC_PATCHES ; do
- patch -d "$GCC_VER" -p1 -s < "$SRCDIR/$P"
- done
-fi
-
-echo "Unpacking LibreOffcice build tree - [ go and have some $DRINK ] ..."
-if test "z$OOO_GIT" = "z" ; then
- if test "z$SPLIT" != "zyes"; then
- echo "Unpacking $OOO_TARBALL..."
- ($OOO_DECOMPRESS_CMD $SRCDIR/$OOO_TARBALL | $GNUTAR xf - ) || exit 1
- else # split packages
- if test -d $OOBUILDDIR; then
- echo " removing old build dir: $OOBUILDDIR"
- rm -rf $OOBUILDDIR
- fi
- mkdir $OOBUILDDIR
- if test "$SOURCE_TYPE" = "CGIT" -a -d "$BUILDDIR/ooo" ; then
- echo " removing the old unpack dir: $BUILDDIR/ooo"
- rm -rf "$BUILDDIR/ooo"
- fi
- for pkg in $CORE_PKGS; do
- echo " unpacking $pkg..."
- ($OOO_DECOMPRESS_CMD "$SRCDIR/$OOO_SPLIT_PREFIX$pkg-$OOO_SOURCEVERSION.tar.bz2" | $GNUTAR xf - ) || exit 1
- echo " moving it into place..."
- mv $OOO_SPLIT_PREFIX$pkg-$OOO_SOURCEVERSION/* $OOBUILDDIR
- rmdir $OOO_SPLIT_PREFIX$pkg-$OOO_SOURCEVERSION
- done
- if test "z$PIECE" != "z"; then
- if test "$SOURCE_TYPE" = "CGIT" ; then
- ln -s "$BUILDDIR/$GITTAG" $OOBUILDDIR
- else
- ln -s "$BUILDDIR/$OOO_SOURCEDIRNAME-$PIECE" $OOBUILDDIR
- fi
- # ugly hack around odd positioning of this info
- echo "Unpacking setup_native .txt pieces"
- ($OOO_DECOMPRESS_CMD $SRCDIR/setup_native_packinfo-3.3.2.1.tar.bz2 | $GNUTAR xf - ) || exit
- # hack to patch new files in the root dir
- if test "z$PIECE" = "zbootstrap" ; then
- touch $OOBUILDDIR/boost.m4
- fi
- fi
- fi
-else
- if test -d $OOBUILDDIR; then
- echo "* removing old build dir: $OOBUILDDIR"
- rm -rf $OOBUILDDIR
- fi
- echo "* copying $RAWBUILDDIR into $OOBUILDDIR..."
- #cp -pLR "$RAWBUILDDIR" "$OOBUILDDIR"
- touch "$CLONEDIR/repos_changed"
- rsync -prLt --exclude .git --exclude '/clone' --exclude 'unx*.pro' --exclude 'wnt*.pro' "$RAWBUILDDIR/" "$OOBUILDDIR/"
- touch "$TOOLSDIR/stamp/refresh"
- if test -d "$CLONEDIR/translations" && [ "$OOO_LANGS" != "en-US" ]; then
- echo "* copying translations into $OOBUILDDIR..."
- rsync -a --exclude .git $CLONEDIR/translations/* $OOBUILDDIR/
- fi
-fi
-
-# original upstream sources
-if test "z$SPLIT" != "zyes"; then
- # -system source
- if test -f $SRCDIR/$OOO_SYSTEM_TARBALL; then
- # System is optional, but parts of it are useful
- echo "Unpacking $OOO_SYSTEM_TARBALL..."
- ($OOO_DECOMPRESS_CMD $SRCDIR/$OOO_SYSTEM_TARBALL | $GNUTAR xf - )
- fi
- # -binfilter source
- if test -f $SRCDIR/$OOO_BINFILTER_TARBALL; then
- echo "Unpacking $OOO_BINFILTER_TARBALL..."
- ($OOO_DECOMPRESS_CMD $SRCDIR/$OOO_BINFILTER_TARBALL | $GNUTAR xf - )
- fi
- # -lang source
- if test -f $SRCDIR/$OOO_LANG_TARBALL; then
- echo "Unpacking $OOO_LANG_TARBALL..."
- ($OOO_DECOMPRESS_CMD $SRCDIR/$OOO_LANG_TARBALL | $GNUTAR xf - )
- fi
- if test -f $SRCDIR/$OOO_EXTENSIONS_TARBALL; then
- echo "Unpacking $OOO_EXTENSIONS_TARBALL..."
- ($OOO_DECOMPRESS_CMD $SRCDIR/$OOO_EXTENSIONS_TARBALL | $GNUTAR xf - )
- fi
-fi
-
-if test "$ENABLE_CCACHE" = "yes" ; then
- if test "$BUILDDIR/$OOO_SOURCEDIRNAME" != "$OOBUILDDIR" ; then
- mv "$BUILDDIR/$OOO_SOURCEDIRNAME" "$OOBUILDDIR"
- fi
-fi
-echo "$OOO_SOURCEDIRNAME" > "$OOBUILDDIR/.source"
-
-echo "Fixing unfortunate snafus"
-cd $BUILDDIR
-
-# copy in the prebuilt jsr173_1.0_api.jar if available
-# allows to build with JDK 1.4
-JSR_API_JAR=jsr173_1.0_api.jar
-if test -f "$SRCDIR/$JSR_API_JAR"; then
- mkdir -p $OOBUILDDIR/stax/download
- cp -f $SRCDIR/$JSR_API_JAR $OOBUILDDIR/stax/download
-fi
-
-# copy in misc Mono related dlls if we need to
-# we can't build them on Unix.
-OOO_CLI_PREBUILT_ARCHIVE=ooo-cli-prebuilt-3.3.tar.bz2
-if test "z$PIECE" = "z"; then
-if test -f "$SRCDIR/$OOO_CLI_PREBUILT_ARCHIVE"; then
- mkdir -p $OOBUILDDIR/external/cli
- ( cd $OOBUILDDIR/external/cli; bzip2 -dc $SRCDIR/$OOO_CLI_PREBUILT_ARCHIVE | $GNUTAR xf - ) || exit 1;
-fi
-fi
-
-# Win32 prerequisites ...
-if test "z$BUILD_WIN32" != "z"; then
-
- # gdiplus.dll
- if test -f $OOBUILDDIR/external/gdiplus/gdiplus.dll; then
- echo "Already have gdiplus.dll"
- else
- cp -p $SRCDIR/gdiplus.dll $OOBUILDDIR/external/gdiplus/gdiplus.dll
- fi
-
- # vcredist_x86.exe
- if test -f $OOBUILDDIR/external/vcredist/vcredist_x86.exe; then
- echo "Already have vcredist_x86.exe"
- else
- cp -p $SRCDIR/vcredist_x86.exe $OOBUILDDIR/external/vcredist/vcredist_x86.exe
- fi
-
- # vcredist_x64.exe
- if test -f $OOBUILDDIR/external/vcredist/vcredist_x64.exe; then
- echo "Already have vcredist_x64.exe"
- else
- cp -p $SRCDIR/vcredist_x64.exe $OOBUILDDIR/external/vcredist/vcredist_x64.exe
- fi
-
- # dbghelp
- if test -f $OOBUILDDIR/external/dbghelp/DbgHelp.Dll; then
- echo "Already have dbghelp";
- else
- $echo_n "Extracting dbghelp $echo_c"
- cabextract -q $DBGINST_SRC
- TARGET=dbghelp.exe
- if test -f $TARGET; then
- unzip -o $TARGET
- mv -f License.Txt dbghelp-license.txt
- mv -f redist.txt dbghelp-redist.txt
- echo "done";
- else
- echo "failed";
- exit 1;
- fi
- fi
-
- # unicows
- case "$DISTRO" in
- NovellWin32*|GoOoWin32*)
- ;;
- *)
- if test -f $OOBUILDDIR/external/unicows/unicows.dll; then
- echo "Already have unicows";
- else
- $echo_n "Extracting unicows $echo_c"
- cabextract -q $UNICOWS_SRC
- TARGET=unicows.dll
- if test -f $TARGET; then
- mv -f license.txt unicows-license.txt
- mv -f redist.txt unicows-redist.txt
- echo "done";
- else
- echo "failed";
- exit 1;
- fi
- fi
- ;;
- esac
-
- # Bypass the Mozilla source bits for now
- if false; then
-
- if test -f $SRCDIR/$BUILD_MOZILLA_SOURCE; then
- $echo_n "Installing Mozilla source $echo_c"
- cp -f $SRCDIR/$BUILD_MOZILLA_SOURCE $OOBUILDDIR/moz/download || exit 1;
- echo "done";
- else
- echo "Don't need to unpack Mozilla sources";
- fi
-
- MOZILLA_IDL_SRC='libIDL-0.6.8.tar.gz'
- MOZILLA_IDL_PATCH='libIDL-0.6.8-ooo.patch'
- MOZILLA_GLIB_SRC='glib-1.2.10.tar.gz'
- MOZILLA_GLIB_PATCH='glib-1.2.10-ooo.patch'
- MOZILLA_WINTOOLS_BIN='wintools.zip'
-
- if test -f $OOBUILDDIR/moz/download/$MOZILLA_IDL_SRC; then
- echo "Already have Mozilla libIDL sources";
- else
- $echo_n "Installing Mozilla libIDL source $echo_c"
- cp -f $SRCDIR/$MOZILLA_IDL_SRC $OOBUILDDIR/moz/download || exit 1;
- cp -f $SRCDIR/$MOZILLA_IDL_PATCH $OOBUILDDIR/moz/download || exit 1;
- echo "done";
- fi
-
- if test -f $OOBUILDDIR/moz/download/$MOZILLA_GLIB_SRC; then
- echo "Already have Mozilla GLib source";
- else
- $echo_n "Installing Mozilla GLib source $echo_c"
- cp -f $SRCDIR/$MOZILLA_GLIB_SRC $OOBUILDDIR/moz/download || exit 1;
- cp -f $SRCDIR/$MOZILLA_GLIB_PATCH $OOBUILDDIR/moz/download || exit 1;
- echo "done";
- fi
-
- if test -f $OOBUILDDIR/moz/download/$MOZILLA_WINTOOLS_BIN; then
- echo "Already have Mozilla wintools";
- else
- $echo_n "Installing Mozilla wintools $echo_c"
- cp -f $SRCDIR/$MOZILLA_WINTOOLS_BIN $OOBUILDDIR/moz/download || exit 1;
- echo "done";
- fi
-
- fi # bypass for now
-
- MOZILLA_PREBUILT_INC='WNTMSCIinc.zip'
- MOZILLA_PREBUILT_LIB='WNTMSCIlib.zip'
- MOZILLA_PREBUILT_RUNTIME='WNTMSCIruntime.zip'
-
- if test -f $OOBUILDDIR/moz/zipped/$MOZILLA_PREBUILT_INC -a \
- -f $OOBUILDDIR/moz/zipped/$MOZILLA_PREBUILT_LIB -a \
- -f $OOBUILDDIR/moz/zipped/$MOZILLA_PREBUILT_RUNTIME; then
- echo "Already have the prebuilt Mozilla zip files";
- else
- $echo_n "Installing prebuilt Mozilla zip files $echo_c"
- cp -f $SRCDIR/$MOZILLA_PREBUILT_INC $OOBUILDDIR/moz/zipped || exit 1;
- cp -f $SRCDIR/$MOZILLA_PREBUILT_LIB $OOBUILDDIR/moz/zipped || exit 1;
- cp -f $SRCDIR/$MOZILLA_PREBUILT_RUNTIME $OOBUILDDIR/moz/zipped || exit 1;
- echo "done";
- fi
-
- if test -f $OOBUILDDIR/external/msvcp80/msvcr80.dll -a \
- -f $OOBUILDDIR/external/msvcp80/msvcp80.dll -a \
- -f $OOBUILDDIR/external/msvcp80/Microsoft.VC80.CRT.manifest; then
- echo "Already have msvcr80.dll, msvcp80.dll and Microsoft.VC80.CRT.manifest"
- else
- cp -p $SRCDIR/msvcr80.dll $OOBUILDDIR/external/msvcp80
- cp -p $SRCDIR/msvcp80.dll $OOBUILDDIR/external/msvcp80
- cp -p $SRCDIR/Microsoft.VC80.CRT.manifest $OOBUILDDIR/external/msvcp80
- fi
-
- if test "z`uname -s`" = "zInterix"; then
- if test "$PROCESSOR_ARCHITECTURE" = AMD64; then
- CommonProgsX86="`env|grep '^COMMONPROGRAMFILES(X86)='|sed -e 's/.*=//'`"
- else
- CommonProgsX86="$COMMONPROGRAMFILES"
- fi
- case "$CommonProgsX86" in
- ?:*)
- CommonProgsX86=`winpath2unix "$CommonProgsX86"`
- ;;
- esac
- else
- CommonProgsX86="$COMMONPROGRAMFILES"
- fi
-
- if test -f $OOBUILDDIR/external/msm90/Microsoft_VC90_CRT_x86.msm -a \
- -f $OOBUILDDIR/external/msm90/policy_9_0_Microsoft_VC90_CRT_x86.msm; then
- echo "Already have Microsoft_VC90_CRT_x86.msm and policy_9_0_Microsoft_VC90_CRT_x86.msm"
- elif test -f "$CommonProgsX86/Merge Modules/Microsoft_VC90_CRT_x86.msm" -a \
- -f "$CommonProgsX86/Merge Modules/policy_9_0_Microsoft_VC90_CRT_x86.msm"; then
- echo "Copying Microsoft VC9.0 CRT Merge Modules"
- cp -p "$CommonProgsX86/Merge Modules/Microsoft_VC90_CRT_x86.msm" $OOBUILDDIR/external/msm90
- cp -p "$CommonProgsX86/Merge Modules/policy_9_0_Microsoft_VC90_CRT_x86.msm" $OOBUILDDIR/external/msm90
- else
- echo "Missing Microsoft VC9.0 CRT Merge Modules:"
- echo "Microsoft_VC90_CRT_x86.msm and policy_9_0_Microsoft_VC90_CRT_x86.msm."
- echo "Did not find them in $CommonProgsX86/Merge Modules"
- echo "Do you have a Visual Studio 2008 installation?"
- exit 1
- fi
-
- case "$DISTRO" in
- NovellWin32*)
- if test -f $SRCDIR/$AGFA_MONOTYPE_FONTS_SOURCE; then
- agfa_md5=`md5sum $SRCDIR/$AGFA_MONOTYPE_FONTS_SOURCE | cut -d' ' -f1`
- if test -f $OOBUILDDIR/agfa_monotype_fonts/download/${agfa_md5}-agfa-monotype-fonts.tar.gz; then
- echo "Already have the Agfa Monotype fonts"
- else
- $echo_n "Installing Agfa Monotype fonts $echo_c"
- mkdir -p $OOBUILDDIR/agfa_monotype_fonts/download
- cp -f $SRCDIR/$AGFA_MONOTYPE_FONTS_SOURCE $OOBUILDDIR/agfa_monotype_fonts/download/${agfa_md5}-agfa-monotype-fonts.tar.gz || exit 1
- echo "done"
- fi
- if grep 'Agfa Monotype' $OOBUILDDIR/readlicense_oo/html/THIRDPARTYLICENSEREADME.html; then
- echo "THIRDPARTYLICENSEREADME.html already contains Agfa Monotype EULA"
- else
- echo "Adding Agfa Monotype EULA to THIRDPARTYLICENSEREADME.html"
- $TOOLSDIR/bin/amt-license-patch <$OOBUILDDIR/readlicense_oo/html/THIRDPARTYLICENSEREADME.html >/tmp/amt.$$ &&
- mv /tmp/amt.$$ $OOBUILDDIR/readlicense_oo/html/THIRDPARTYLICENSEREADME.html
- fi
- else
- echo Missing $SRCDIR/$AGFA_MONOTYPE_FONTS_SOURCE
- echo which should always be included when building $DISTRO
- exit 1
- fi
- ;;
- esac
-else
-
- if test -f $SRCDIR/$BUILD_MOZILLA_SOURCE; then
- echo "Copying Mozilla package into the tree"
- mkdir -p $OOBUILDDIR/moz/download || exit 1
- cp $SRCDIR/$BUILD_MOZILLA_SOURCE $OOBUILDDIR/moz/download/ || exit 1
- fi
-
-fi
-
-if test "z$DISTRO" != "zOOo4Kids"; then
- if test "z$BUILD_WIN32" != "z"; then
- check_file $SRCDIR/$APACHE_ANT_TARBALL
- else
- ANT_VERSION=`ant -version | awk '{ print $4 }'`
- if test -x /usr/bin/ant -a \
- "`echo $ANT_VERSION | cut -d. -f1`" -eq "1" \
- -a "`echo $ANT_VERSION | cut -d. -f2`" -lt "7" \
- -a "`echo $DISTRO`" != "SUSE-10.1" ; then
- check_file $SRCDIR/$APACHE_ANT_TARBALL
- fi
- fi
-fi
-
-if test -f $SRCDIR/$APACHE_ANT_TARBALL; then
- $echo_n "Extracting ant $echo_c"
- (gunzip -c $SRCDIR/$APACHE_ANT_TARBALL | $GNUTAR xf - )|| exit 1;
- echo "done";
-else
- echo "Don't need to unpack ant";
-fi
-
-
-case "$DISTRO" in
- NovellWin32*|GoOoWin32*)
- echo "Creating prebuilt writing aids extension (dict-bundle.oxt)..."
-
- # Create prebuilt dict-bundle.oxt
-
- tmpdir=`mktemp -d`
- (cd $tmpdir
- unzip -q $SRCDIR/writingaids-$PREBUILT_WRITINGAIDS_TIMESTAMP.zip
- unzip -q $SRCDIR/writingaids-doc-$PREBUILT_WRITINGAIDS_TIMESTAMP.zip
-
- echo '<?xml version="1.0" encoding="UTF-8"?>
-<description xmlns="http://openoffice.org/extensions/description/2006" xmlns:d="http://openoffice.org/extensions/description/2006" xmlns:xlink="http://www.w3.org/1999/xlink">
- <version value="'$PREBUILT_WRITINGAIDS_TIMESTAMP.`date +%Y%m%d`'" />
- <identifier value="org.go-oo.dictionaries.bundle" />
- <display-name>
- <name lang="en">Built-in spelling and hyphenation dictionaries and thesauri</name>
- </display-name>
- <platform value="all" />
- <dependencies>
- <OpenOffice.org-minimal-version value="3.0" d:name="OpenOffice.org 3.0" />
- </dependencies>
-</description>' >description.xml
-
- mkdir META-INF
- echo '<?xml version="1.0" encoding="UTF-8"?>
-<!DOCTYPE manifest:manifest PUBLIC "-//OpenOffice.org//DTD Manifest 1.0//EN" "Manifest.dtd">
-<manifest:manifest xmlns:manifest="http://openoffice.org/2001/manifest">
- <manifest:file-entry manifest:media-type="application/vnd.sun.star.configuration-data"
- manifest:full-path="dictionaries.xcu"/>
-</manifest:manifest>' >META-INF/manifest.xml
-
- # Read the dictionary.lst file and write a dictionaries.xcu file
- # Note that unlike upstream's dictionaries.xcu files,
- # we can't support several locales for one entry in the dictionary.lst
- # file. (For instance the upstream af_ZA dictionairs.xcu supports
- # both af-ZA and af-NA.)
- (
- echo '<?xml version="1.0" encoding="UTF-8"?>
-<oor:component-data xmlns:oor="http://openoffice.org/2001/registry" xmlns:xs="http://www.w3.org/2001/XMLSchema" oor:name="Linguistic" oor:package="org.openoffice.Office">
- <node oor:name="ServiceManager">
- <node oor:name="Dictionaries">'
- while read type lang country root; do
- case "$type" in
- \#*)
- ;;
- '')
- ;;
- DICT)
- echo ' <node oor:name="HunSpellDic_'$lang-$country'" oor:op="fuse">
- <prop oor:name="Locations" oor:type="oor:string-list">
- <value>%origin%/'$root'.aff %origin%/'$root'.dic</value>
- </prop>
- <prop oor:name="Format" oor:type="xs:string">
- <value>DICT_SPELL</value>
- </prop>
- <prop oor:name="Locales" oor:type="oor:string-list">
- <value>'$lang-$country'</value>
- </prop>
- </node>'
- ;;
- HYPH)
- echo ' <node oor:name="HyphDic_'$lang-$country'" oor:op="fuse">
- <prop oor:name="Locations" oor:type="oor:string-list">
- <value>%origin%/'$root'.dic</value>
- </prop>
- <prop oor:name="Format" oor:type="xs:string">
- <value>DICT_HYPH</value>
- </prop>
- <prop oor:name="Locales" oor:type="oor:string-list">
- <value>'$lang-$country'</value>
- </prop>
- </node>'
- ;;
- THES)
- echo ' <node oor:name="ThesDic_'$lang-$country'" oor:op="fuse">
- <prop oor:name="Locations" oor:type="oor:string-list">
- <value>%origin%/'$root'.dat %origin%/'$root'.idx</value>
- </prop>
- <prop oor:name="Format" oor:type="xs:string">
- <value>DICT_THES</value>
- </prop>
- <prop oor:name="Locales" oor:type="oor:string-list">
- <value>'$lang-$country'</value>
- </prop>
- </node>'
- ;;
- *)
- echo "Unknown code '$type' in dictionary.lst in writingaids-$PREBUILT_WRITINGAIDS_TIMESTAMP.zip" >&2
- exit 1
- ;;
- esac
- done <dictionary.lst
-
- echo ' </node>
- </node>
-</oor:component-data>'
- ) >dictionaries.xcu
-
- rm dictionary.lst
-
- mkdir -p $OOBUILDDIR/dictionaries/prebuilt
- rm -f $OOBUILDDIR/dictionaries/prebuilt/dict-bundle.oxt
- zip -qDr $OOBUILDDIR/dictionaries/prebuilt/dict-bundle.oxt .)
- rm -rf $tmpdir
- ;;
-esac
-
-if test "z$SYSTEM_GCC" = "z"; then
- echo "Checking for gcc sources and prerequisities"
- if test -d "$BUILDDIR/$GCC_VER" &&
- test -d "$BUILDDIR/$BINUTILS_VER"; then
- echo "ok"
- else
- echo "Missing some of the helper source";
- exit 1;
- fi
-
-fi
-
-if test "z$PIECE" = "z"; then
-
-if test "x$OOO_EXTRA_ARTWORK" != "x"; then
-# Html export rulers etc.
- cd $OOBUILDDIR
- ( bzip2 -dc $SRCDIR/$OOO_EXTRA_ARTWORK | $GNUTAR xf - ) || exit 1;
-fi
-
-for i in $OOO_LANGS; do
- if test -f $SRCDIR/extras_$i.tar.gz; then
- echo "Extracting extra $i extras"
- cd $OOBUILDDIR
- $GNUTAR xzf $SRCDIR/extras_$i.tar.gz --exclude={"README","LICENSE"} || exit 1
- fi
- if test -f $SRCDIR/help_images_$i.tar.gz; then
- echo "Extracting extra $i help images"
- cd $OOBUILDDIR
- $GNUTAR xzf $SRCDIR/help_images_$i.tar.gz || exit 1
- fi
-done
-
-if test "x$OPENCLIPART_VER" != "x"; then
- echo "Unpacking open clipart"
- check_tarball $SRCDIR/openclipart-$OPENCLIPART_VER.tar.bz2
- cd $BUILDDIR
- $GNUTAR xjf $SRCDIR/openclipart-$OPENCLIPART_VER.tar.bz2 || exit 1;
-fi
-
-if test "x$DEJAVU_FONTS_VER" != "x"; then
- echo "Correcting the package names of DejaVu font packages to $DEJAVU_FONTS_PACK_NAME"
- sed -e "s|TARFILE_NAME=.*$|TARFILE_NAME=$DEJAVU_FONTS_PACK_NAME|" -e "s|TARFILE_ROOTDIR=.*$|TARFILE_ROOTDIR=$DEJAVU_FONTS_PACK_NAME|" $OOBUILDDIR/more_fonts/fonts/ttf_dejavu/makefile.mk > $OOBUILDDIR/more_fonts/fonts/ttf_dejavu/makefile.mk.ver
- mv $OOBUILDDIR/more_fonts/fonts/ttf_dejavu/makefile.mk.ver $OOBUILDDIR/more_fonts/fonts/ttf_dejavu/makefile.mk
- echo "done"
- echo "Correcting the delivery file of DejaVu Font packages"
- sed -e "s|dejavu-fonts-ttf-.*|$DEJAVU_FONTS_PACK_NAME|" $OOBUILDDIR/more_fonts/prj/d.lst > $OOBUILDDIR/more_fonts/prj/d.lst.ver
- mv $OOBUILDDIR/more_fonts/prj/d.lst.ver $OOBUILDDIR/more_fonts/prj/d.lst
- echo "done"
-
-fi
-
-if test "x$LIBERATION_FONTS_VER" != "x"; then
- mkdir -p $OOBUILDDIR/bitstream_vera_fonts/download/
- LIBERATION_FONTSDEST=$OOBUILDDIR/bitstream_vera_fonts/download/liberation-fonts.zip
- echo "Updating Liberation fonts zip archive to version $LIBERATION_FONTS_VER"
- LIBERATION_FONTSSRC=liberation-fonts-ttf-$LIBERATION_FONTS_VER.zip
- cp -f $SRCDIR/$LIBERATION_FONTSSRC $LIBERATION_FONTSDEST || exit 1;
- echo "done"
-fi
-
-
-if test "x$MDBTOOLS_SRC" != "x" -a "z$SYSTEM_MDBTOOLS" != "zYES"; then
- echo "Copying mdbtools into the tree"
- MDBDEST=$OOBUILDDIR/mdbtools/download
- mkdir -p $MDBDEST || exit 1;
- cp $SRCDIR/$MDBTOOLS_SRC $MDBDEST || exit 1;
-fi
-
-if test "x$GRAPHITE_SRC" != "x"; then
- echo "Copying graphite into the tree"
- GRAPHITE_DEST=$OOBUILDDIR/graphite/download
- mkdir -p $GRAPHITE_DEST || exit 1;
- cp $SRCDIR/$GRAPHITE_SRC $GRAPHITE_DEST || exit 1;
-fi
-
-$TOOLSDIR/bin/piece/unpack-extras $TOOLSDIR $OOBUILDDIR || exit 1;
-
-echo "Installing bibliography overwrides";
-$GNUTAR xjf $SRCDIR/biblio.tar.bz2 -C $OOBUILDDIR/extras/source/database
-
-if test "$CAIRO_ENABLED" = "TRUE" -a "$SYSTEM_CAIRO" != "TRUE"; then
- echo "Copying cairo into tree"
- mkdir -p $OOBUILDDIR/cairo
- check_tarball $SRCDIR/cairo-${CAIRO_VER}.tar.gz
- mkdir -p $OOBUILDDIR/cairo/download
- cp $SRCDIR/cairo-${CAIRO_VER}.tar.gz $OOBUILDDIR/cairo/download/
-fi
-
-# OxygenOffice extras
-for pack in $OOOP_FONTS_PACK $OOOP_GALLERY_PACK $OOOP_SAMPLES_PACK $OOOP_TEMPLATES_PACK ; do
- check_file $SRCDIR/$pack
- echo "Unpacking $pack"
- unzip -o -q $SRCDIR/$pack -d $OOBUILDDIR/extras/source
-done
-
-# Sun Professional Templates Pack
-for pack in $SUNTEMPLATES_LANG ; do
- check_file $SRCDIR/Sun_ODF_Template_Pack_$pack.oxt
- echo "Copying Sun_ODF_Template_Pack_$pack.oxt"
- mkdir -p $OOBUILDDIR/extras/source/extensions/ || exit 1
- cp $SRCDIR/Sun_ODF_Template_Pack_$pack.oxt $OOBUILDDIR/extras/source/extensions/ || exit 1
-done
-
-for pack in $LIGHTPROOF_FILENAMES ; do
- check_file $SRCDIR/$pack
- echo "Copying LightProof extension ($pack) into the tree"
- mkdir -p $OOBUILDDIR/extras/source/extensions/ || exit 1
- cp $SRCDIR/$pack $OOBUILDDIR/extras/source/extensions/ || exit 1
-done
-
-if test "x$NUMBERTEXT_EXTENSION" != "x"; then
- check_file $SRCDIR/$NUMBERTEXT_EXTENSION
- echo "Copying NumberText extension into the tree"
- mkdir -p $OOBUILDDIR/extras/source/extensions/ || exit 1
- cp $SRCDIR/$NUMBERTEXT_EXTENSION $OOBUILDDIR/extras/source/extensions/numbertext.oxt || exit 1
-fi
-
-if test "x$HUNART_EXTENSION" != "x"; then
- check_file $SRCDIR/$HUNART_EXTENSION
- echo "Copying Hungarian Cross/reference Toolbar extension into the tree"
- mkdir -p $OOBUILDDIR/extras/source/extensions/ || exit 1
- cp $SRCDIR/$HUNART_EXTENSION $OOBUILDDIR/extras/source/extensions/hunart.oxt || exit 1
-fi
-
-if test "x$TYPO_EXTENSION" != "x"; then
- check_file $SRCDIR/$TYPO_EXTENSION
- echo "Copying Typography Toolbar extension into the tree"
- mkdir -p $OOBUILDDIR/extras/source/extensions/ || exit 1
- cp $SRCDIR/$TYPO_EXTENSION $OOBUILDDIR/extras/source/extensions/typo.oxt || exit 1
-fi
-
-if test "x$WATCH_WINDOW_EXTENSION" != "x"; then
- check_file $SRCDIR/$WATCH_WINDOW_EXTENSION
- echo "Copying Watch Window extension into the tree"
- mkdir -p $OOBUILDDIR/extras/source/extensions/ || exit 1
- cp $SRCDIR/$WATCH_WINDOW_EXTENSION $OOBUILDDIR/extras/source/extensions/WatchWindow.oxt || exit 1
-fi
-
-if test "x$DIAGRAM_EXTENSION" != "x"; then
- check_file $SRCDIR/$DIAGRAM_EXTENSION
- echo "Copying Diagram extension into the tree"
- mkdir -p $OOBUILDDIR/extras/source/extensions/ || exit 1
- cp $SRCDIR/$DIAGRAM_EXTENSION $OOBUILDDIR/extras/source/extensions/Diagram.oxt || exit 1
-fi
-
-if test "x$VALIDATOR_EXTENSION" != "x"; then
- check_file $SRCDIR/$VALIDATOR_EXTENSION
- echo "Copying Validator extension into the tree"
- mkdir -p $OOBUILDDIR/extras/source/extensions/ || exit 1
- cp $SRCDIR/$VALIDATOR_EXTENSION $OOBUILDDIR/extras/source/extensions/Validator.oxt || exit 1
-fi
-
-if test "x$BARCODE_EXTENSION" != "x"; then
- check_file $SRCDIR/$BARCODE_EXTENSION
- echo "Copying Barcode extension into the tree"
- mkdir -p $OOBUILDDIR/extras/source/extensions/ || exit 1
- cp $SRCDIR/$BARCODE_EXTENSION $OOBUILDDIR/extras/source/extensions/Barcode.oxt || exit 1
-fi
-
-if test "x$GOOGLE_DOCS_EXTENSION" != "x"; then
- check_file $SRCDIR/$GOOGLE_DOCS_EXTENSION
- echo "Copying Google Docs extension into the tree"
- mkdir -p $OOBUILDDIR/extras/source/extensions/ || exit 1
- cp $SRCDIR/$GOOGLE_DOCS_EXTENSION $OOBUILDDIR/extras/source/extensions/gdocs.oxt || exit 1
-fi
-
-if test "x$NLPSOLVER_EXTENSION" != "x"; then
- mkdir -p $OOBUILDDIR/nlpsolver
-fi
-
-if test "x$OOOBLOGGER_EXTENSION" != "x"; then
- check_file $SRCDIR/$OOOBLOGGER_EXTENSION
- echo "Copying oooblogger extension into the tree"
- mkdir -p $OOBUILDDIR/extras/source/extensions/ || exit 1
- cp $SRCDIR/$OOOBLOGGER_EXTENSION $OOBUILDDIR/extras/source/extensions/oooblogger.oxt || exit 1
-fi
-
-fi # PIECES hack
diff --git a/bin/unused.pl b/bin/unused.pl
deleted file mode 100755
index 4fa9299a9..000000000
--- a/bin/unused.pl
+++ /dev/null
@@ -1,205 +0,0 @@
-#!/usr/bin/env perl
-
-$total_defined = 0;
-%libs = ();
-%symbols = ();
-%ex_syms = ();
-@prefix = ();
-
-$path='/opt/OpenOffice/OOO_STABLE_1/solver/641/unxlngi4.pro';
-
-# for various tools.
-$ENV{LD_LIBRARY_PATH}="$path/lib:$ENV{LD_LIBRARY_PATH}";
-
-sub build_libs {
- my $file;
- my $dirhandle;
- my $path = shift;
-
- opendir ($dirhandle, $path) || die "Can't open dir $path: $!";
- while ($file = readdir ($dirhandle)) {
- if ($file =~ m/\S+.so/ &&
- !($file =~ m/used/)) {
- $libs{"$path/$file"} = undef;
- }
- }
- closedir ($dirhandle);
-}
-
-sub find_deps {
- my $lib = shift;
- my @deps = ();
- my $ldd;
-
- open $ldd, "ldd $lib|" || die "Can't ldd $lib: $!";
-
- while (<$ldd>) {
-# print "Dep $_\n";
- /not found/ && die "Missing dep-lib ('$lib'): '$_'\n";
- if (/\s+\S+\s\=\>\s+(\S+)\s+\(/) {
- if (exists ($libs{$1})) {
- push @deps, $1;
- }
- }
- }
- close $ldd;
-
- return @deps;
-}
-
-sub insert_symbols {
- my $obj;
- my $defined = 0;
- my $referenced = 0;
- my $lib = shift;
-# my @fixups = ();
- my @external = ();
- my $pref = join ('', @prefix) . ' ';
-
-# print "$pref inserting from $lib\n";
-
-# Use '-C' to get de-mangled symbols ...
- open $obj, "objdump -T $lib|" || die "Can't objdump $lib: $!";
-
- while (<$obj>) {
- /[0-9a-f]*\s+([gw ])\s+..\s+(\S*)\s*......................\s+(.*)/;
-
- my ($symbol, $type) = ($3, $2);
-
- if (!$symbol || !$type) {
- next;
- }
-
- if ($type eq '.text') {
- if (exists $symbols{$symbol}) {
-# FIXME: investigate all these
-# print "$pref possible duplicate def of '$symbol'\n";
- } else {
-# print "$pref define symbol: '$symbol'\n";
- $symbols{$symbol} = $lib;
- }
- $defined++;
- $total_defined++;
- } elsif ($type eq '*UND*') {
- push (@external, $symbol);
- }
- }
-
- for $symbol (@external) {
- if (exists $symbols{$symbol}) {
- my $val;
- $referenced++;
- $val = $symbols{$symbol};
- $val =~ m/^r-/ || ($symbols{$symbol} = "r-$val");
- } else {
- $ex_syms{$symbol} = "$lib";
- }
- }
-
- print "$pref $defined defined, $referenced referenced\n";
-
- close $obj;
-}
-
-sub resolve_symbols {
- my $lib = shift;
- my @deps;
-
- if ($libs{$lib}) {
- return;
- }
-
- @deps = find_deps ($lib);
-
- print join ('', @prefix), "resolving library $lib:\n";
- push @prefix, ' ';
-
- for $resolve (@deps) {
- resolve_symbols ($resolve);
- }
-
- insert_symbols ($lib);
-
- pop @prefix;
-
- $libs{$lib} = 'resolved';
-}
-
-# Main:
-
-my $instset = 0;
-
-if ($instset) {
- # This path misses the symbols
- # needed by the builtin utils
- build_libs ($path);
- build_libs ("$path/filter");
- build_libs ("$path/components");
- # custom nastiness
- $libs{"$path/soffice.bin"} = undef;
- $libs{"$path/setup.bin"} = undef;
-} else {
- build_libs ("$path/lib");
-}
-
-# builtin tools ...
-@tools = ('autodoc', 'bmp', 'bmpgui', 'cfgex',
- 'checkdll', 'checkscp', 'checksingleton',
- 'cppumaker', 'g2g', 'idlc',
- 'localize', 'lzip', 'makedepend',
- 'proxy', 'rdbmaker', 'regcomp', 'regmerge',
- 'regview', 'regcompare', 'rsc', 'rsc2',
- 'rscdep', 'rscpp', 'scpcomp', 'scplink',
- 'scppkg', 'scpzip', 'srvdepy', 'svidl',
- 'transex3', 'uno', 'xml2cmp', 'xrmex',
- 'setup.bin', 'loader.bin', 'soffice.bin',
- 'ucb.bin' );
-
-for $bin (@tools) {
- $libs{"$path/bin/$bin"} = undef;
-}
-
-for $lib (keys %libs) {
- resolve_symbols ($lib);
-}
-
-my %unused = ();
-
-printf "Finding unused...";
-for $lib (keys %libs) {
- my $used = 0;
- my $unused = 0;
- my $used_file;
- my $un_used_file;
-
- open ($used_file, ">$lib.used") || die "Failed to open $lib.used: $!";
- open ($un_used_file, ">$lib.unused") || die "Failed to open $lib.unused: $!";
-
- for $symbol (keys (%symbols)) {
- my $sym_lib = $symbols {$symbol};
-
- if ($sym_lib eq $lib) {
- print $un_used_file "$symbol\n";
- $unused++;
- } elsif ($sym_lib eq "r-$lib") {
- print $used_file "$symbol\n";
- $used++;
- }
- }
-
- close ($used_file);
- close ($un_used_file);
-
- print "Library $lib: $used used, and $unused unused\n";
-}
-
-my $ex_file;
-open ($ex_file, ">external.used") || die "Failed to open external.used: $!";
-for $symbol (keys %ex_syms) {
- my $hmm = '';
- if ($symbols{$symbol}) {
- $hmm = 'defined later (!)';
- }
- print $ex_file "$symbol; $hmm\n";
-}
-close ($ex_file);
diff --git a/bin/vtable-check b/bin/vtable-check
deleted file mode 100755
index c1196307c..000000000
--- a/bin/vtable-check
+++ /dev/null
@@ -1,204 +0,0 @@
-#!/usr/bin/perl -w
-
-use strict;
-
-#
-# Use example: vtable-check *.so
-#
-
-my $list_sizes = 1;
-my $lo_path;
-my $archdir = 'unxlngi6.pro';
-
-sub read_obj_vtables($$$)
-{
- my $vtables = shift;
- my $file = shift;
- my $pipe;
- my $slot_size = 4;
-
- # FIXME: we may have duplicate hidden vtables across different
- # libraries - attempt to hide this with libalias - though we may
- # have multiple libraries in the same project that in reality do
- # not conflict. We should ideally depend on some library file
- # list description in the output directory instead ...
- my $libalias = shift;
-
- open ($pipe, "objdump -t $file |") || die "Can't objdump -T $file: $!";
- while (<$pipe>) {
- my $line = $_;
-
- $slot_size = 8 if (/elf64-x86_64/); # testme
- $slot_size = 4 if (/elf32-i?86/);
-
- next if (/\*UND\*/);
- next if (! /\s+_ZT[vV]/);
-# 00000000 w O .data.rel.ro._ZTV16ReturnActionEdit 000001c8 _ZTV16ReturnActionEdit
-# 00000000 w O .data.rel.ro._ZTVN3com3sun4star3uno10XInterfaceE 00000014 .hidden _ZTVN3com3sun4star3uno10XInterfaceE
- $line =~ /([0-9a-f]*)\s+([gw ])\s+..\s+(\S*)\s*([0-9a-f]+)\s+(.*)$/ || die "un-parseable vtable entry '$_'";
-
- my ($address, $linkage, $type, $sizehex, $symbol) = ($1, $2, $3, $4, $5);
- my $size = hex ($sizehex) / $slot_size;
- $symbol =~ s/.hidden/$libalias/;
-
- $symbol =~ /_GLOBAL_/ && next; # anonymous namespace
-
-# print "$size $symbol\n";
- if (defined ($vtables->{$symbol})) {
- if ($size != $vtables->{$symbol}) {
- print STDERR "Error - serious vtable size mismatch on $symbol: $size, " . $vtables->{$symbol} . "\n";
- }
- }
- $vtables->{$symbol} = $size;
- }
-}
-
-sub read_so_vtables($$)
-{
- my $vtables = shift;
- my $file = shift;
- my $pipe;
- my $slot_size = 4;
-
- open ($pipe, "objdump -T $file |") || die "Can't objdump -T $file: $!";
- while (<$pipe>) {
- $slot_size = 8 if (/elf64-x86_64/); # testme
- $slot_size = 4 if (/elf32-i?86/);
-
- next if (/\*UND\*/);
- next if (! /\s+_ZT[vV]/);
-
-# 0058dc3e g DF .text 0000000f Base _ZTv0_n12_N10SwDocShellD0Ev
- /([0-9a-f]*)\s+([gw ])\s+..\s+(\S*)\s*([0-9a-f]+)\s+(\S*)\s+(.*)$/ || die "un-parseable vtable entry '$_'";
-
- my ($address, $linkage, $type, $sizehex, $ver, $symbol) = ($1, $2, $3, $4, $5, $6);
- $type =~ /O/ || next;
-
- $symbol =~ /_GLOBAL_/ && next; # anonymous namespace
-
- my $size = hex ($sizehex);
-
- if (defined ($vtables->{$symbol})) {
- if ($size != $vtables->{$symbol}) {
- print STDERR "Error - serious vtable size mismatch on $symbol: $size, " . $vtables->{$symbol} . "\n";
- }
- }
- $vtables->{$symbol} = $size;
- }
-}
-
-sub scan_objdir($$$)
-{
- my $dir;
- my ($vtables, $path, $libalias) = @_;
-
- opendir ($dir, $path) || die "Can't open $path: $!";
- while (my $name = readdir ($dir)) {
- $name =~ /^\./ && next;
- read_obj_vtables ($vtables, "$path/$name", $libalias);
- }
- closedir ($dir);
-}
-
-sub print_syntax()
-{
- print STDERR "vtable-check [--list] [--help] [--lo-path=] <list-of-object-files>\n";
- print STDERR " this tool generates signatures for vtable sizes, that can be compared\n";
- print STDERR " between patches to ensure that no incomplete type changes have \n";
- print STDERR " accidentally created new virtual methods\n";
- print STDERR " --list prints out all vtable sizes\n";
- print STDERR " --lo-path=<path> scan an entire LibreOffice(LO) build tree\n";
- print STDERR " --archdir=<name> name of the LO binary output directory for this arch\n";
- print STDERR " --help help\n";
- exit(1);
-}
-
-#
-# munge options
-#
-my @files = ();
-for my $arg (@ARGV) {
- if ($arg =~ m/^--list/) {
- $list_sizes = 1;
- } elsif ($arg =~ m/^--lo-path=(.*)$/) {
- $lo_path = $1;
- } elsif ($arg =~ m/^--archdir=(.*)$/) {
- $archdir = $1;
- } elsif ($arg =~ m/^--help/) {
- print_syntax();
- } else {
- push @files, $arg;
- }
-}
-print_syntax() if (!@files && !defined $lo_path);
-
-#
-# read relocation data from misc. object files
-#
-my %libs;
-print STDERR "reading vtables ";
-
-if (defined $lo_path) {
- # scan LibreOffice source tree ...
- my $dir;
- opendir ($dir, $lo_path) || die "Can't open $lo_path: $!";
- while (my $name = readdir ($dir)) {
- $name =~ /^\./ && next;
- -d "$lo_path/$name/$archdir/slo" || next;
-
- my %vtables = ();
- scan_objdir (\%vtables, "$lo_path/$name/$archdir/slo", $name);
- $libs{$name} = \%vtables;
- print STDERR ".";
- }
- closedir ($dir);
- print STDERR "\n";
-} else {
- # scan command-line arguments
- for my $file (@files) {
- my %vtables = ();
- if ($file =~ /\.so$/) {
- read_so_vtables (\%vtables, $file);
- print STDERR ".";
- } else {
- read_obj_vtables (\%vtables, $file, '');
- }
- next if (!keys (%vtables));
- $libs{$file} = \%vtables;
- }
-}
-print STDERR "\n";
-
-print STDERR "sanity check";
-
-my %global_syms = ();
-for my $file (keys %libs) {
- my $vtables = $libs{$file};
- for my $sym (%{$vtables}) {
- if (defined $global_syms{$sym}) {
-# print "multiply defined vtable '$sym'\n";
- if ($global_syms{$sym} != $vtables->{$sym}) {
- print STDERR "Error - serious vtable size mismatch on $sym\n";
- for my $ff (keys %libs) {
- if (defined $libs{$ff}->{$sym}) {
- print STDERR "\tdefined in $ff: size " . $libs{$ff}->{$sym} . "\n";
- }
- }
- }
- }
- $global_syms{$sym} = $vtables->{$sym};
- }
-}
-print STDERR "ed.\n";
-
-print "Sizes are in virtual function pointer slots\n";
-
-for my $file (sort keys %libs) {
- my $vtables = $libs{$file};
-
- print "file: $file\n";
- for my $sym (sort { $vtables->{$a} cmp $vtables->{$b} } keys %{$vtables}) {
- print $vtables->{$sym} . "\t$sym\n";
- }
- print "\n";
-}
diff --git a/bin/xmlunzip b/bin/xmlunzip
deleted file mode 100755
index 35337254b..000000000
--- a/bin/xmlunzip
+++ /dev/null
@@ -1,35 +0,0 @@
-#!/bin/sh
-
-# quick and simple
-case "$1" in
-# OpenOffice 2.x
- *.ods) suffix=.ods ;;
- *.odt) suffix=.odt ;;
- *.odp) suffix=.odp ;;
- *.odg) suffix=.odg ;;
- *.odf) suffix=.odf ;;
-
-# OpenOffice 1.x
- *.sxc) suffix=.sxc ;;
- *.sxt) suffix=.sxt ;;
- *.sxp) suffix=.sxp ;;
-
-# Office12
- *.xlsx) suffix=.xlsx ;;
- *.docx) suffix=.docx ;;
- *.pptx) suffix=.pptx ;;
-esac
-
-dir=`basename "$1" $suffix`
-rm -rf "$dir"
-unzip -q -d "$dir" "$1"
-cd "$dir"
-tmpfile=".dumper-$$.xml"
-files=`find . -name \*.xml -o -name \*.rels`
-
-for f in $files ; do
- if [ -s "$f" ] ; then
- xmllint --format "$f" > "$tmpfile"
- mv "$tmpfile" "$f"
- fi
-done