| 224 |
hknight |
1 |
#!/usr/bin/perl -w
|
|
|
2 |
|
|
|
3 |
########################################################################
|
|
|
4 |
# Copyright (C) 2008 ERG Limited, All rights reserved
|
|
|
5 |
#
|
|
|
6 |
# Module name : ddlfile.pl
|
|
|
7 |
# Module type : Standalone utility
|
|
|
8 |
# Compiler(s) : n/a
|
|
|
9 |
# Environment(s): windows (dos), unix (solaris and linux)
|
|
|
10 |
#
|
|
|
11 |
# Description : Recreates a DDL file from the directory structure
|
|
|
12 |
# created by schemadump.pl. This is useful for checking
|
|
|
13 |
# that schemadump.pl works, and also is intended
|
|
|
14 |
# to assist in creating a database from a set of
|
|
|
15 |
# clearcase files.
|
|
|
16 |
#
|
|
|
17 |
# History : Created by Haydon Knight May 2008
|
|
|
18 |
#
|
|
|
19 |
# Usage : ddlfile.pl [options] -src=<srcDir> -dest=<destFile>
|
|
|
20 |
#
|
|
|
21 |
########################################################################
|
|
|
22 |
|
|
|
23 |
#######################################################################
|
|
|
24 |
# Use lines
|
|
|
25 |
#######################################################################
|
|
|
26 |
require 5.6.1;
|
|
|
27 |
use strict;
|
|
|
28 |
use warnings;
|
|
|
29 |
use Pod::Usage; # required for help support
|
|
|
30 |
use Getopt::Long;
|
|
|
31 |
|
|
|
32 |
use commonExports; # see commonExports.pm in this directory
|
|
|
33 |
|
|
|
34 |
#######################################################################
|
|
|
35 |
# Function prototypes
|
|
|
36 |
#######################################################################
|
|
|
37 |
|
|
|
38 |
sub startupChecks();
|
|
|
39 |
sub parseCommandLine();
|
|
|
40 |
sub main();
|
|
|
41 |
|
|
|
42 |
sub getFilesWithExtras();
|
|
|
43 |
sub insertExtraSlash(\@);
|
|
|
44 |
sub getOrderedSqlFiles();
|
|
|
45 |
sub sortSqlFiles(\@);
|
|
|
46 |
sub getSqlFiles($);
|
|
|
47 |
sub directoryListing($);
|
|
|
48 |
sub getDDLHeader();
|
|
|
49 |
sub getSearchDirs();
|
|
|
50 |
|
|
|
51 |
#######################################################################
|
|
|
52 |
# Constant global variables
|
|
|
53 |
#######################################################################
|
|
|
54 |
|
|
|
55 |
my $VERSION = "1.0.1";
|
|
|
56 |
|
|
|
57 |
#######################################################################
|
|
|
58 |
# Hash tables - these are specific to the release manager database
|
|
|
59 |
# and need to be adjusted if running this script on other databases
|
|
|
60 |
#######################################################################
|
|
|
61 |
|
|
|
62 |
#######################################################################
|
|
|
63 |
# Other global variables
|
|
|
64 |
#######################################################################
|
|
|
65 |
|
|
|
66 |
my $srcDir;
|
|
|
67 |
my $destFile;
|
|
|
68 |
|
|
|
69 |
#######################################################################
|
|
|
70 |
# Main code
|
|
|
71 |
#######################################################################
|
|
|
72 |
|
|
|
73 |
parseCommandLine();
|
|
|
74 |
startupChecks();
|
|
|
75 |
main();
|
|
|
76 |
finish();
|
|
|
77 |
|
|
|
78 |
#######################################################################
|
|
|
79 |
# Function definitions
|
|
|
80 |
#######################################################################
|
|
|
81 |
|
|
|
82 |
#-------------------------------------------------------------------------------
|
|
|
83 |
# Function : main
|
|
|
84 |
#
|
|
|
85 |
# Purpose : Main function of this script
|
|
|
86 |
#
|
|
|
87 |
# Arguments : none
|
|
|
88 |
#
|
|
|
89 |
# Returns : none
|
|
|
90 |
#
|
|
|
91 |
# Notes : Does everything but setting up at start and tearing down at end.
|
|
|
92 |
#
|
|
|
93 |
sub main()
|
|
|
94 |
{
|
|
|
95 |
my @outputLines = getDDLHeader(); # The header for the whole output file
|
|
|
96 |
|
|
|
97 |
# Get the headers for each DDL-data-type
|
|
|
98 |
# i.e. a header for the sequences, a header for the procedures etc.
|
|
|
99 |
my @headers = getHeaders();
|
|
|
100 |
|
|
|
101 |
# Get a list of all directories to find .sql files in
|
|
|
102 |
my @searchDirs = getSearchDirs();
|
|
|
103 |
|
|
|
104 |
# These need extra slashes inserted
|
|
|
105 |
my @filesWithExtras = getFilesWithExtras();
|
|
|
106 |
|
|
|
107 |
for( my $i = 0; $i < scalar(@searchDirs); $i++)
|
|
|
108 |
{
|
|
|
109 |
logprint "Working with searchDir $i: '$searchDirs[$i]'";
|
|
|
110 |
|
|
|
111 |
push @outputLines, "$headers[$i]\n";
|
|
|
112 |
|
|
|
113 |
my @sqlFiles = getSqlFiles( $searchDirs[$i] );
|
|
|
114 |
|
|
|
115 |
logprint "Initially got " . scalar(@sqlFiles) . " .sql files";
|
|
|
116 |
|
|
|
117 |
sortSqlFiles( @sqlFiles );
|
|
|
118 |
|
|
|
119 |
logprint "Now got " . scalar(@sqlFiles) . " .sql files";
|
|
|
120 |
|
|
|
121 |
my $searchDirRelativePath = $searchDirs[$i];
|
|
|
122 |
$searchDirRelativePath =~ s~/+$~~; # get rid of trailing slashes
|
|
|
123 |
$searchDirRelativePath =~ s~.*/(.*?/.*)~$1~; # make it into a relative path
|
|
|
124 |
|
|
|
125 |
for( my $j = 0; $j < scalar(@sqlFiles); $j++)
|
|
|
126 |
{
|
|
|
127 |
my @sqlFileLines = readArray( $sqlFiles[$j] );
|
|
|
128 |
|
|
|
129 |
foreach my $sqlFileLine (@sqlFileLines )
|
|
|
130 |
{
|
|
|
131 |
$sqlFileLine =~ s~\\~/~g;
|
|
|
132 |
}
|
|
|
133 |
|
|
|
134 |
push @outputLines, @sqlFileLines;
|
|
|
135 |
}
|
|
|
136 |
|
|
|
137 |
push @outputLines, "\n";
|
|
|
138 |
|
|
|
139 |
logprint "Now have " . scalar(@outputLines) . " output lines";
|
|
|
140 |
}
|
|
|
141 |
|
|
|
142 |
writeArray( $destFile, @outputLines);
|
|
|
143 |
}
|
|
|
144 |
|
|
|
145 |
|
|
|
146 |
#-------------------------------------------------------------------------------
|
|
|
147 |
# Function : getFilesWithExtras
|
|
|
148 |
#
|
|
|
149 |
# Purpose : Reads in from a file a list of files that need to have extra slashes
|
|
|
150 |
# inserted into them before consolidation into the recreated datapump dump.
|
|
|
151 |
#
|
|
|
152 |
# Arguments : none
|
|
|
153 |
#
|
|
|
154 |
# Returns : @filesWithExtras - list of such files
|
|
|
155 |
#
|
|
|
156 |
# Notes : The input file is written out by schemadump.pl - each line corresponds
|
|
|
157 |
# to a file in package_body that had the ALTER PACKAGE part tacked on to the end.
|
|
|
158 |
# This whole extras featureset is specific to the release manager database.
|
|
|
159 |
#
|
|
|
160 |
sub getFilesWithExtras()
|
|
|
161 |
{
|
|
|
162 |
my @filesWithExtras;
|
|
|
163 |
|
|
|
164 |
open( O, "$srcDir/bookKeeping/filesWithExtras") or
|
|
|
165 |
die "Could not open '$srcDir/bookKeeping/filesWithExtras'\n";
|
|
|
166 |
while( <O> )
|
|
|
167 |
{
|
|
|
168 |
s~[\n\r]+$~~;
|
|
|
169 |
s~\\~/~g;
|
|
|
170 |
s~.*/(.*?/.*?/.*)~$1~;
|
|
|
171 |
push @filesWithExtras, "$srcDir/$_";
|
|
|
172 |
}
|
|
|
173 |
close( O );
|
|
|
174 |
|
|
|
175 |
return @filesWithExtras;
|
|
|
176 |
}
|
|
|
177 |
|
|
|
178 |
|
|
|
179 |
#-------------------------------------------------------------------------------
|
|
|
180 |
# Function : insertExtraSlash
|
|
|
181 |
#
|
|
|
182 |
# Purpose : Adds in a single line containing a slash to an array
|
|
|
183 |
#
|
|
|
184 |
# Arguments : $refSqlFileLines (io) - reference to @sqlFileLines - a list of file lines
|
|
|
185 |
# corresponding to one particular .sql file.
|
|
|
186 |
#
|
|
|
187 |
# Returns : none
|
|
|
188 |
#
|
|
|
189 |
# Notes : This function is only needed for 'PACKAGE BODY' in the release manager database,
|
|
|
190 |
# and is quite specific to that.
|
|
|
191 |
#
|
|
|
192 |
sub insertExtraSlash(\@)
|
|
|
193 |
{
|
|
|
194 |
my ($refSqlFileLines) = @_;
|
|
|
195 |
|
|
|
196 |
# Stores line number containing 'ALTER PACKAGE'
|
|
|
197 |
my $alterPackageLine = -1;
|
|
|
198 |
|
|
|
199 |
for( my $i = $#$refSqlFileLines; $i >= 0; $i--) # iterate backwards through the array
|
|
|
200 |
{
|
|
|
201 |
if( $$refSqlFileLines[$i] =~ m~ALTER PACKAGE~ )
|
|
|
202 |
{
|
|
|
203 |
$alterPackageLine = $i;
|
|
|
204 |
last;
|
|
|
205 |
}
|
|
|
206 |
}
|
|
|
207 |
|
|
|
208 |
return if $alterPackageLine <= 0;
|
|
|
209 |
|
|
|
210 |
my $needToAddASlash = 0;
|
|
|
211 |
|
|
|
212 |
for( my $i = $alterPackageLine; $i >= 0; $i--) # iterate backwards through the array
|
|
|
213 |
{
|
|
|
214 |
return if $$refSqlFileLines[$i] =~ m~^\s*/\s*$~; # already has a slash
|
|
|
215 |
next if $$refSqlFileLines[$i] =~ m~^\s*$~ ; # blank line - keep looking
|
|
|
216 |
$needToAddASlash = 1;
|
|
|
217 |
last; # line has some other text on it - we need to add a slash
|
|
|
218 |
}
|
|
|
219 |
|
|
|
220 |
return unless $needToAddASlash;
|
|
|
221 |
|
|
|
222 |
splice @$refSqlFileLines, $alterPackageLine, 0, ("/\n");
|
|
|
223 |
}
|
|
|
224 |
|
|
|
225 |
|
|
|
226 |
#-------------------------------------------------------------------------------
|
|
|
227 |
# Function : getOrderedSqlFiles
|
|
|
228 |
#
|
|
|
229 |
# Purpose : Reads in a list of .sql files - this list has the same ordering as the
|
|
|
230 |
# objects had in the original datapump dump file.
|
|
|
231 |
#
|
|
|
232 |
# Arguments : none
|
|
|
233 |
#
|
|
|
234 |
# Returns : @orderedSqlFiles - list of .sql files (full paths)
|
|
|
235 |
#
|
|
|
236 |
# Notes :
|
|
|
237 |
#
|
|
|
238 |
sub getOrderedSqlFiles()
|
|
|
239 |
{
|
|
|
240 |
my @orderedSqlFiles;
|
|
|
241 |
|
|
|
242 |
open( INDEXFILE, "$srcDir/bookKeeping/orderedFiles" ) or
|
|
|
243 |
die "Could not open file '$srcDir/bookKeeping/orderedFiles' for reading\n";
|
|
|
244 |
while( <INDEXFILE> )
|
|
|
245 |
{
|
|
|
246 |
s~[\n\r]+$~~;
|
|
|
247 |
s~\\~/~g;
|
|
|
248 |
push @orderedSqlFiles, $_;
|
|
|
249 |
}
|
|
|
250 |
close(INDEXFILE);
|
|
|
251 |
|
|
|
252 |
return @orderedSqlFiles;
|
|
|
253 |
}
|
|
|
254 |
|
|
|
255 |
|
|
|
256 |
#-------------------------------------------------------------------------------
|
|
|
257 |
# Function : sortSqlFiles
|
|
|
258 |
#
|
|
|
259 |
# Purpose : Takes a list of .sql files and sorts them so that they have the same
|
|
|
260 |
# order as their objects in the original datapump dump file.
|
|
|
261 |
#
|
|
|
262 |
# Arguments : $refSqlFiles (io) - reference to @sqlFiles - list of .sql files to be sorted
|
|
|
263 |
#
|
|
|
264 |
# Returns : none
|
|
|
265 |
#
|
|
|
266 |
# Notes : Algorithm has the following steps:
|
|
|
267 |
# + get relative paths to .sql files in @sqlFiles
|
|
|
268 |
# + get list of all .sql files that were originally written out. Store their
|
|
|
269 |
# relative paths in @orderedSqlFiles
|
|
|
270 |
# + get a list of .sql files that are in both @sqlFiles and @orderedSqlFiles
|
|
|
271 |
# These are ordered since @orderedSqlFiles is ordered and are stored
|
|
|
272 |
# in @goodOrderedSqlFiles.
|
|
|
273 |
# + get a list of .sql files that are in @sqlFiles but not in @orderedSqlFiles.
|
|
|
274 |
# Store in @newSqlFiles
|
|
|
275 |
# + Set passed in list as (@goodOrderedSqlFiles, @newSqlFiles)
|
|
|
276 |
# It is now sorted.
|
|
|
277 |
#
|
|
|
278 |
# I have not tested the @newSqlFiles functionality - HSK May 2008.
|
|
|
279 |
#
|
|
|
280 |
sub sortSqlFiles(\@)
|
|
|
281 |
{
|
|
|
282 |
# @sqlFiles is a list of files we need to sort
|
|
|
283 |
# Files not in original datapump dump need to come last
|
|
|
284 |
# Files in original datapump dump need to be sorted in the same order
|
|
|
285 |
# that they were in in the enormous datapump dump file
|
|
|
286 |
my ($refSqlFiles) = @_;
|
|
|
287 |
|
|
|
288 |
my @sqlFiles = @$refSqlFiles;
|
|
|
289 |
foreach my $sqlFile (@sqlFiles)
|
|
|
290 |
{
|
|
|
291 |
# Turn into relative path
|
|
|
292 |
$sqlFile =~ s~.*/(.*?/.*?/.*)~$1~;
|
|
|
293 |
}
|
|
|
294 |
|
|
|
295 |
# A list of all files written out in the original datapump dump
|
|
|
296 |
# These are in order of the order datapump put them into its enormous file
|
|
|
297 |
my @orderedSqlFiles = getOrderedSqlFiles();
|
|
|
298 |
foreach my $sqlFile (@orderedSqlFiles)
|
|
|
299 |
{
|
|
|
300 |
# Turn into relative path
|
|
|
301 |
$sqlFile =~ s~.*/(.*?/.*?/.*)~$1~;
|
|
|
302 |
}
|
|
|
303 |
|
|
|
304 |
logprint "Got " . scalar(@orderedSqlFiles) . " orderedSqlFiles; the first is '$orderedSqlFiles[0]'";
|
|
|
305 |
|
|
|
306 |
# These are those files in @sqlFiles that were also in the
|
|
|
307 |
# original datapump dump
|
|
|
308 |
my @goodOrderedSqlFiles;
|
|
|
309 |
|
|
|
310 |
foreach my $orderedSqlFile (@orderedSqlFiles)
|
|
|
311 |
{
|
|
|
312 |
push @goodOrderedSqlFiles, $orderedSqlFile if isOneOf( $orderedSqlFile, @sqlFiles);
|
|
|
313 |
}
|
|
|
314 |
|
|
|
315 |
logprint "Got " . scalar(@goodOrderedSqlFiles) . " goodOrderedSqlFiles";
|
|
|
316 |
|
|
|
317 |
# In theory, now @goodOrderedSqlFiles should be the same files
|
|
|
318 |
# as @sqlFiles, but in order (of course, files added since the
|
|
|
319 |
# original datapump dump are excluded)
|
|
|
320 |
|
|
|
321 |
my @newSqlFiles;
|
|
|
322 |
|
|
|
323 |
foreach my $sqlFile (@sqlFiles)
|
|
|
324 |
{
|
|
|
325 |
push @newSqlFiles, $sqlFile if !isOneOf( $sqlFile, @goodOrderedSqlFiles);
|
|
|
326 |
}
|
|
|
327 |
|
|
|
328 |
logprint "Got " . scalar(@newSqlFiles) . " newSqlFiles";
|
|
|
329 |
|
|
|
330 |
# Get @$refSqlFiles to contain the sorted files
|
|
|
331 |
# Here, we make no effort to sort files added since the datapump dump
|
|
|
332 |
@$refSqlFiles = (@goodOrderedSqlFiles, @newSqlFiles);
|
|
|
333 |
|
|
|
334 |
# Change back to a full path
|
|
|
335 |
foreach my $sqlFile (@$refSqlFiles)
|
|
|
336 |
{
|
|
|
337 |
$sqlFile = "$srcDir/$sqlFile";
|
|
|
338 |
}
|
|
|
339 |
}
|
|
|
340 |
|
|
|
341 |
|
|
|
342 |
#-------------------------------------------------------------------------------
|
|
|
343 |
# Function : getHeaders
|
|
|
344 |
#
|
|
|
345 |
# Purpose : Construct a list of headers for each DDL-Data-Object
|
|
|
346 |
#
|
|
|
347 |
# Arguments : none
|
|
|
348 |
#
|
|
|
349 |
# Returns : @headers - a list of the headers
|
|
|
350 |
#
|
|
|
351 |
# Notes : Each header is just a one-line string. (At least for the release manager database).
|
|
|
352 |
# Both this function and getSearchDirs() read from $srcDir/bookKeeping/objectTypes
|
|
|
353 |
#
|
|
|
354 |
sub getHeaders()
|
|
|
355 |
{
|
|
|
356 |
# Get headers that look like this:
|
|
|
357 |
# -- new object type path is: SCHEMA_EXPORT/SYNONYM/SYNONYM
|
|
|
358 |
|
|
|
359 |
my @headers;
|
|
|
360 |
|
|
|
361 |
open( ORDERFILE, "$srcDir/bookKeeping/objectTypes");
|
|
|
362 |
push @headers, "${subSectionSplitter}$_" while( <ORDERFILE> );
|
|
|
363 |
close( ORDERFILE );
|
|
|
364 |
|
|
|
365 |
foreach my $header (@headers)
|
|
|
366 |
{
|
|
|
367 |
$header =~ s~[\n\r]+$~~;
|
|
|
368 |
$header =~ s~\\~/~g;
|
|
|
369 |
logprint "Got a header: '$header'\n";
|
|
|
370 |
}
|
|
|
371 |
|
|
|
372 |
return @headers;
|
|
|
373 |
}
|
|
|
374 |
|
|
|
375 |
|
|
|
376 |
#-------------------------------------------------------------------------------
|
|
|
377 |
# Function : getSearchDirs
|
|
|
378 |
#
|
|
|
379 |
# Purpose : Returns a list of directories to search for files in
|
|
|
380 |
#
|
|
|
381 |
# Arguments : none
|
|
|
382 |
#
|
|
|
383 |
# Returns : @searchDirs - the list (has a full path)
|
|
|
384 |
#
|
|
|
385 |
# Notes : Both this function and getHeaders() read from $srcDir/bookKeeping/objectTypes
|
|
|
386 |
#
|
|
|
387 |
sub getSearchDirs()
|
|
|
388 |
{
|
|
|
389 |
my @searchDirs;
|
|
|
390 |
|
|
|
391 |
open( ORDERFILE, "$srcDir/bookKeeping/objectTypes");
|
|
|
392 |
while( <ORDERFILE> )
|
|
|
393 |
{
|
|
|
394 |
tr~A-Z~a-z~;
|
|
|
395 |
s~[\n\r]+$~~;
|
|
|
396 |
s~\\~/~g;
|
|
|
397 |
|
|
|
398 |
my ($topDir, $botDir) = getTopBotDirs( $_ );
|
|
|
399 |
|
|
|
400 |
push @searchDirs, "$srcDir/$topDir/$botDir";
|
|
|
401 |
}
|
|
|
402 |
close( ORDERFILE );
|
|
|
403 |
|
|
|
404 |
return @searchDirs;
|
|
|
405 |
}
|
|
|
406 |
|
|
|
407 |
|
|
|
408 |
#-------------------------------------------------------------------------------
|
|
|
409 |
# Function : getDDLHeader
|
|
|
410 |
#
|
|
|
411 |
# Purpose : Reads the DDL header from a file
|
|
|
412 |
#
|
|
|
413 |
# Arguments : none
|
|
|
414 |
#
|
|
|
415 |
# Returns : @ddlheaderLines - array of lines corresponding to the header
|
|
|
416 |
#
|
|
|
417 |
# Notes : For the release manager database only a single line is returned.
|
|
|
418 |
# This is '-- CONNECT RELEASE_MANAGER'.
|
|
|
419 |
#
|
|
|
420 |
sub getDDLHeader()
|
|
|
421 |
{
|
|
|
422 |
my @ddlHeaderLines;
|
|
|
423 |
|
|
|
424 |
open( DDLHEADER, "$srcDir/bookKeeping/DDLHeader")
|
|
|
425 |
or die "Could not open '$srcDir/bookKeeping/DDLHeader'\n";
|
|
|
426 |
while( <DDLHEADER> )
|
|
|
427 |
{
|
|
|
428 |
s~[\n\r]+$~~;
|
|
|
429 |
s~\\~/~g;
|
|
|
430 |
push @ddlHeaderLines, $_;
|
|
|
431 |
}
|
|
|
432 |
|
|
|
433 |
close( DDLHEADER );
|
|
|
434 |
|
|
|
435 |
return @ddlHeaderLines;
|
|
|
436 |
}
|
|
|
437 |
|
|
|
438 |
|
|
|
439 |
#-------------------------------------------------------------------------------
|
|
|
440 |
# Function : directoryListing
|
|
|
441 |
#
|
|
|
442 |
# Purpose : Lists the contents of a directory
|
|
|
443 |
#
|
|
|
444 |
# Arguments : $diry (i) - directory to be listed
|
|
|
445 |
#
|
|
|
446 |
# Returns : @lines - files in the directory
|
|
|
447 |
#
|
|
|
448 |
# Notes : Has separate unix/windows implementations.
|
|
|
449 |
#
|
|
|
450 |
sub directoryListing($)
|
|
|
451 |
{
|
|
|
452 |
my ($diry) = @_;
|
|
|
453 |
|
|
|
454 |
return runCommand("ls -1d $diry/*") if( $UNIX );
|
|
|
455 |
|
|
|
456 |
(my $windowsDiry = $diry) =~ s~/~\\~g;
|
|
|
457 |
|
|
|
458 |
my @lines = runCommand("dir $windowsDiry /B");
|
|
|
459 |
my @goodLines;
|
|
|
460 |
|
|
|
461 |
foreach my $line (@lines)
|
|
|
462 |
{
|
|
|
463 |
push @goodLines, "$diry/$line" unless $line =~ m~^\s*$~;
|
|
|
464 |
}
|
|
|
465 |
return @goodLines;
|
|
|
466 |
}
|
|
|
467 |
|
|
|
468 |
|
|
|
469 |
#-------------------------------------------------------------------------------
|
|
|
470 |
# Function : getSqlFiles
|
|
|
471 |
#
|
|
|
472 |
# Purpose : Gets a list of the .sql files for a particular directory
|
|
|
473 |
#
|
|
|
474 |
# Arguments : $searchDir - directory to search
|
|
|
475 |
#
|
|
|
476 |
# Returns : @sqlFiles - list (full file paths)
|
|
|
477 |
#
|
|
|
478 |
# Notes :
|
|
|
479 |
#
|
|
|
480 |
sub getSqlFiles($)
|
|
|
481 |
{
|
|
|
482 |
my ($searchDir) = @_;
|
|
|
483 |
|
|
|
484 |
my @files = directoryListing( $searchDir );
|
|
|
485 |
my @sqlFiles;
|
|
|
486 |
|
|
|
487 |
foreach my $file (@files)
|
|
|
488 |
{
|
|
|
489 |
push @sqlFiles, $file if $file =~ /\.sql$/;
|
|
|
490 |
}
|
|
|
491 |
|
|
|
492 |
return @sqlFiles;
|
|
|
493 |
}
|
|
|
494 |
|
|
|
495 |
|
|
|
496 |
#-------------------------------------------------------------------------------
|
|
|
497 |
# Function : parseCommandLine
|
|
|
498 |
#
|
|
|
499 |
# Purpose : Parses command line; invokes help if necessary
|
|
|
500 |
#
|
|
|
501 |
# Arguments : nothing
|
|
|
502 |
#
|
|
|
503 |
# Returns : nothing
|
|
|
504 |
#
|
|
|
505 |
# Notes : Sets up various global variables; these are not checked here - they should be
|
|
|
506 |
# checked in startupChecks()
|
|
|
507 |
#
|
|
|
508 |
sub parseCommandLine()
|
|
|
509 |
{
|
|
|
510 |
my $opt_help = 0;
|
|
|
511 |
my $opt_manual = 0;
|
|
|
512 |
|
|
|
513 |
my $result = GetOptions (
|
|
|
514 |
"help" => \$opt_help, # flag
|
|
|
515 |
"manual" => \$opt_manual, # flag
|
|
|
516 |
"verbose+" => \$verbose, # flag
|
|
|
517 |
"src=s" => \$srcDir, # String
|
|
|
518 |
"dest=s" => \$destFile, # String
|
|
|
519 |
"log=s" => \$logFile, # String
|
|
|
520 |
);
|
|
|
521 |
|
|
|
522 |
pod2usage(-verbose => 2) if( $opt_manual );
|
|
|
523 |
pod2usage(-verbose => 0, -message => "Version: $VERSION") if ($opt_help > 0 || ! $result );
|
|
|
524 |
}
|
|
|
525 |
|
|
|
526 |
|
|
|
527 |
#-------------------------------------------------------------------------------
|
|
|
528 |
# Function : startupChecks
|
|
|
529 |
#
|
|
|
530 |
# Purpose : Checks that important variables are set sanely
|
|
|
531 |
#
|
|
|
532 |
# Arguments : nothing
|
|
|
533 |
#
|
|
|
534 |
# Returns : nothing
|
|
|
535 |
#
|
|
|
536 |
# Notes : Calls die() if things aren't set sanely.
|
|
|
537 |
# This function opens the logfile.
|
|
|
538 |
#
|
|
|
539 |
sub startupChecks()
|
|
|
540 |
{
|
|
|
541 |
die "You need to specify a source directory using '-src=<srcDir>'\n" unless $srcDir;
|
|
|
542 |
die "You need to specify a destination file using '-dest=<destFile>'\n"
|
|
|
543 |
unless $destFile;
|
|
|
544 |
|
|
|
545 |
$srcDir =~ s~\\~/~g;
|
|
|
546 |
$destFile =~ s~\\~/~g;
|
|
|
547 |
|
|
|
548 |
die "Source directory '$srcDir' does not exist\n" unless -d $srcDir;
|
|
|
549 |
|
|
|
550 |
openLog();
|
|
|
551 |
}
|
|
|
552 |
|
|
|
553 |
#######################################################################
|
|
|
554 |
# Documentation
|
|
|
555 |
#######################################################################
|
|
|
556 |
|
|
|
557 |
=pod
|
|
|
558 |
|
|
|
559 |
=head1 NAME
|
|
|
560 |
|
|
|
561 |
ddlfile.pl - creates a DDL file from a set of directories - basically
|
|
|
562 |
does the opposite of schemadump.pl
|
|
|
563 |
|
|
|
564 |
=head1 SYNOPSIS
|
|
|
565 |
|
|
|
566 |
ddlfile.pl [options] -src=<srcDir> -dest=<destFile>
|
|
|
567 |
|
|
|
568 |
Options:
|
|
|
569 |
|
|
|
570 |
-help - brief help message
|
|
|
571 |
-man - Full documentation
|
|
|
572 |
-src=srcDir - Directory to extract files from (mandatory)
|
|
|
573 |
-dest=destFile - File to pack to (mandatory)
|
|
|
574 |
-log=logFile - Log messages to this file
|
|
|
575 |
|
|
|
576 |
=head1 OPTIONS
|
|
|
577 |
|
|
|
578 |
=over 8
|
|
|
579 |
|
|
|
580 |
=item B<-help>
|
|
|
581 |
|
|
|
582 |
Print a brief help message and exits.
|
|
|
583 |
|
|
|
584 |
=item B<-man>
|
|
|
585 |
|
|
|
586 |
Prints the manual page and exits.
|
|
|
587 |
|
|
|
588 |
=item B<-src=srcDir>
|
|
|
589 |
|
|
|
590 |
Specify what file to extract out the clearcase files from.
|
|
|
591 |
|
|
|
592 |
=item B<-dest=destFile>
|
|
|
593 |
|
|
|
594 |
Specify where to pack the clearcase files to.
|
|
|
595 |
|
|
|
596 |
=item B<-log=logFile>
|
|
|
597 |
|
|
|
598 |
Specify a file to write log messages to. Default is to just write
|
|
|
599 |
to the terminal.
|
|
|
600 |
|
|
|
601 |
=back
|
|
|
602 |
|
|
|
603 |
=head1 DESCRIPTION
|
|
|
604 |
|
|
|
605 |
This script is used to recreate the output of datapump. It is
|
|
|
606 |
designed for use with the release manager database.
|
|
|
607 |
|
|
|
608 |
=cut
|