Subversion Repositories DevTools

Rev

Blame | Last modification | View Log | RSS feed

#!/usr/bin/perl -w

########################################################################
# Copyright (C) 2008 ERG Limited, All rights reserved
#
# Module name   : ddlfile.pl
# Module type   : Standalone utility
# Compiler(s)   : n/a
# Environment(s): windows (dos), unix (solaris and linux)
#
# Description   : Recreates a DDL file from the directory structure 
#                 created by schemadump.pl.  This is useful for checking
#                 that schemadump.pl works, and also is intended
#                 to assist in creating a database from a set of
#                 clearcase files.
#
# History       : Created by Haydon Knight May 2008
#
# Usage         : ddlfile.pl [options] -src=<srcDir> -dest=<destFile>
#
########################################################################

#######################################################################
# Use lines
#######################################################################
require 5.6.1;
use strict;
use warnings;
use Pod::Usage;                             # required for help support
use Getopt::Long;

use commonExports; # see commonExports.pm in this directory

#######################################################################
# Function prototypes
#######################################################################

sub startupChecks();
sub parseCommandLine();
sub main();

sub getFilesWithExtras();
sub insertExtraSlash(\@);
sub getOrderedSqlFiles();
sub sortSqlFiles(\@);
sub getSqlFiles($);
sub directoryListing($);
sub getDDLHeader();
sub getSearchDirs();

#######################################################################
# Constant global variables
#######################################################################

my $VERSION = "1.0.1";

#######################################################################
# Hash tables - these are specific to the release manager database
#               and need to be adjusted if running this script on other databases
#######################################################################

#######################################################################
# Other global variables
#######################################################################

my $srcDir;
my $destFile;

#######################################################################
# Main code
#######################################################################

parseCommandLine();
startupChecks();
main();
finish();

#######################################################################
# Function definitions
#######################################################################

#-------------------------------------------------------------------------------
# Function  : main
#
# Purpose   : Main function of this script
#
# Arguments : none
#
# Returns   : none
#
# Notes     : Does everything but setting up at start and tearing down at end.
#
sub main()
{
    my @outputLines = getDDLHeader(); # The header for the whole output file
    
    # Get the headers for each DDL-data-type 
    # i.e. a header for the sequences, a header for the procedures etc.
    my @headers = getHeaders();

    # Get a list of all directories to find .sql files in
    my @searchDirs = getSearchDirs();

    # These need extra slashes inserted
    my @filesWithExtras = getFilesWithExtras();

    for( my $i = 0; $i < scalar(@searchDirs); $i++)
    {
        logprint "Working with searchDir $i: '$searchDirs[$i]'";

        push @outputLines, "$headers[$i]\n";

        my @sqlFiles = getSqlFiles( $searchDirs[$i] );

        logprint "Initially got " . scalar(@sqlFiles) . " .sql files";

        sortSqlFiles( @sqlFiles );

        logprint "Now got " . scalar(@sqlFiles) . " .sql files";

        my $searchDirRelativePath = $searchDirs[$i];
        $searchDirRelativePath =~ s~/+$~~;           # get rid of trailing slashes
        $searchDirRelativePath =~ s~.*/(.*?/.*)~$1~; # make it into a relative path

        for( my $j = 0; $j < scalar(@sqlFiles); $j++)
        {
            my @sqlFileLines = readArray( $sqlFiles[$j] );
            
            foreach my $sqlFileLine (@sqlFileLines )
            {
                $sqlFileLine =~ s~\\~/~g;
            }

            push @outputLines, @sqlFileLines;
        }

        push @outputLines, "\n";

        logprint "Now have " . scalar(@outputLines) . " output lines";
    }

    writeArray( $destFile, @outputLines);
}


#-------------------------------------------------------------------------------
# Function  : getFilesWithExtras
#
# Purpose   : Reads in from a file a list of files that need to have extra slashes
#             inserted into them before consolidation into the recreated datapump dump.
#
# Arguments : none
#
# Returns   : @filesWithExtras - list of such files
#
# Notes     : The input file is written out by schemadump.pl - each line corresponds
#             to a file in package_body that had the ALTER PACKAGE part tacked on to the end.
#             This whole extras featureset is specific to the release manager database.
#
sub getFilesWithExtras()
{
    my @filesWithExtras;

    open( O, "$srcDir/bookKeeping/filesWithExtras") or 
        die "Could not open '$srcDir/bookKeeping/filesWithExtras'\n";
    while( <O> )
    {
        s~[\n\r]+$~~;
        s~\\~/~g;
        s~.*/(.*?/.*?/.*)~$1~;
        push @filesWithExtras, "$srcDir/$_";
    }
    close( O );

    return @filesWithExtras;
}


#-------------------------------------------------------------------------------
# Function  : insertExtraSlash
#
# Purpose   : Adds in a single line containing a slash to an array
#
# Arguments : $refSqlFileLines (io) - reference to @sqlFileLines - a list of file lines
#                                     corresponding to one particular .sql file.
#
# Returns   : none
#
# Notes     : This function is only needed for 'PACKAGE BODY' in the release manager database,
#             and is quite specific to that.
#
sub insertExtraSlash(\@)
{
    my ($refSqlFileLines) = @_;
    
    # Stores line number containing 'ALTER PACKAGE'
    my $alterPackageLine = -1;

    for( my $i = $#$refSqlFileLines; $i >= 0; $i--) # iterate backwards through the array
    {
        if( $$refSqlFileLines[$i] =~ m~ALTER PACKAGE~ )
        {
            $alterPackageLine = $i;
            last;
        }
    }

    return if $alterPackageLine <= 0;

    my $needToAddASlash = 0;

    for( my $i = $alterPackageLine; $i >= 0; $i--) # iterate backwards through the array
    {
        return if $$refSqlFileLines[$i] =~ m~^\s*/\s*$~; # already has a slash
        next if $$refSqlFileLines[$i] =~ m~^\s*$~ ; # blank line - keep looking
        $needToAddASlash = 1;
        last; # line has some other text on it - we need to add a slash
    }

    return unless $needToAddASlash;

    splice @$refSqlFileLines, $alterPackageLine, 0, ("/\n");
}


#-------------------------------------------------------------------------------
# Function  : getOrderedSqlFiles
#
# Purpose   : Reads in a list of .sql files - this list has the same ordering as the
#             objects had in the original datapump dump file.
#
# Arguments : none
#
# Returns   : @orderedSqlFiles - list of .sql files (full paths)
#
# Notes     :
#
sub getOrderedSqlFiles()
{
    my @orderedSqlFiles;

    open( INDEXFILE, "$srcDir/bookKeeping/orderedFiles" ) or 
        die "Could not open file '$srcDir/bookKeeping/orderedFiles' for reading\n";
    while( <INDEXFILE> )
    {
        s~[\n\r]+$~~;
        s~\\~/~g;
        push @orderedSqlFiles, $_;
    }
    close(INDEXFILE);

    return @orderedSqlFiles;
}


#-------------------------------------------------------------------------------
# Function  : sortSqlFiles
#
# Purpose   : Takes a list of .sql files and sorts them so that they have the same
#             order as their objects in the original datapump dump file.
#
# Arguments : $refSqlFiles (io) - reference to @sqlFiles - list of .sql files to be sorted
#
# Returns   : none
#
# Notes     : Algorithm has the following steps:
#               + get relative paths to .sql files in @sqlFiles
#               + get list of all .sql files that were originally written out.  Store their
#                    relative paths in @orderedSqlFiles
#               + get a list of .sql files that are in both @sqlFiles and @orderedSqlFiles
#                    These are ordered since @orderedSqlFiles is ordered and are stored
#                    in @goodOrderedSqlFiles.
#               + get a list of .sql files that are in @sqlFiles but not in @orderedSqlFiles.
#                    Store in @newSqlFiles
#               + Set passed in list as (@goodOrderedSqlFiles, @newSqlFiles)
#                    It is now sorted.
#
#             I have not tested the @newSqlFiles functionality - HSK May 2008.
#
sub sortSqlFiles(\@)
{
    # @sqlFiles is a list of files we need to sort
    # Files not in original datapump dump need to come last
    # Files in original datapump dump need to be sorted in the same order
    # that they were in in the enormous datapump dump file
    my ($refSqlFiles) = @_;

    my @sqlFiles = @$refSqlFiles;
    foreach my $sqlFile (@sqlFiles)
    {
        # Turn into relative path
        $sqlFile =~ s~.*/(.*?/.*?/.*)~$1~;
    }

    # A list of all files written out in the original datapump dump
    # These are in order of the order datapump put them into its enormous file
    my @orderedSqlFiles = getOrderedSqlFiles();
    foreach my $sqlFile (@orderedSqlFiles)
    {
        # Turn into relative path
        $sqlFile =~ s~.*/(.*?/.*?/.*)~$1~;
    }

    logprint "Got " . scalar(@orderedSqlFiles) . " orderedSqlFiles; the first is '$orderedSqlFiles[0]'";

    # These are those files in @sqlFiles that were also in the
    # original datapump dump
    my @goodOrderedSqlFiles;

    foreach my $orderedSqlFile (@orderedSqlFiles)
    {
        push @goodOrderedSqlFiles, $orderedSqlFile if isOneOf( $orderedSqlFile, @sqlFiles);
    }

    logprint "Got " . scalar(@goodOrderedSqlFiles) . " goodOrderedSqlFiles";

    # In theory, now @goodOrderedSqlFiles should be the same files
    # as @sqlFiles, but in order (of course, files added since the
    # original datapump dump are excluded)

    my @newSqlFiles;

    foreach my $sqlFile (@sqlFiles)
    {
        push @newSqlFiles, $sqlFile if !isOneOf( $sqlFile, @goodOrderedSqlFiles);       
    }

    logprint "Got " . scalar(@newSqlFiles) . " newSqlFiles";

    # Get @$refSqlFiles to contain the sorted files
    # Here, we make no effort to sort files added since the datapump dump
    @$refSqlFiles = (@goodOrderedSqlFiles, @newSqlFiles);

    # Change back to a full path    
    foreach my $sqlFile (@$refSqlFiles)
    {
        $sqlFile = "$srcDir/$sqlFile";
    }
}


#-------------------------------------------------------------------------------
# Function  : getHeaders
#
# Purpose   : Construct a list of headers for each DDL-Data-Object
#
# Arguments : none
#
# Returns   : @headers - a list of the headers
#
# Notes     : Each header is just a one-line string.  (At least for the release manager database).
#             Both this function and getSearchDirs() read from $srcDir/bookKeeping/objectTypes
#
sub getHeaders()
{
#   Get headers that look like this:
#    -- new object type path is: SCHEMA_EXPORT/SYNONYM/SYNONYM

    my @headers;

    open( ORDERFILE, "$srcDir/bookKeeping/objectTypes");
    push @headers, "${subSectionSplitter}$_" while( <ORDERFILE> );
    close( ORDERFILE );

    foreach my $header (@headers)
    {
        $header =~ s~[\n\r]+$~~;
        $header =~ s~\\~/~g;
        logprint "Got a header: '$header'\n";
    }

    return @headers;
}


#-------------------------------------------------------------------------------
# Function  : getSearchDirs
#
# Purpose   : Returns a list of directories to search for files in
#
# Arguments : none
#
# Returns   : @searchDirs - the list (has a full path)
#
# Notes     : Both this function and getHeaders() read from $srcDir/bookKeeping/objectTypes
#
sub getSearchDirs()
{
    my @searchDirs;
    
    open( ORDERFILE, "$srcDir/bookKeeping/objectTypes");
    while( <ORDERFILE> )
    {
        tr~A-Z~a-z~;
        s~[\n\r]+$~~;
        s~\\~/~g;

        my ($topDir, $botDir) = getTopBotDirs( $_ );

        push @searchDirs, "$srcDir/$topDir/$botDir";
    }
    close( ORDERFILE );

    return @searchDirs;
}


#-------------------------------------------------------------------------------
# Function  : getDDLHeader
#
# Purpose   : Reads the DDL header from a file
#
# Arguments : none
#
# Returns   : @ddlheaderLines - array of lines corresponding to the header
#
# Notes     : For the release manager database only a single line is returned.
#             This is '-- CONNECT RELEASE_MANAGER'.
#
sub getDDLHeader()
{
    my @ddlHeaderLines;

    open( DDLHEADER, "$srcDir/bookKeeping/DDLHeader") 
        or die "Could not open '$srcDir/bookKeeping/DDLHeader'\n";
    while( <DDLHEADER> )
    {
        s~[\n\r]+$~~;
        s~\\~/~g;
        push @ddlHeaderLines, $_;
    }

    close( DDLHEADER );

    return @ddlHeaderLines;
}


#-------------------------------------------------------------------------------
# Function  : directoryListing
#
# Purpose   : Lists the contents of a directory
#
# Arguments : $diry (i) - directory to be listed
#
# Returns   : @lines - files in the directory
#
# Notes     : Has separate unix/windows implementations.
#
sub directoryListing($)
{
    my ($diry) = @_;

    return runCommand("ls -1d $diry/*") if( $UNIX );

    (my $windowsDiry = $diry) =~ s~/~\\~g;

    my @lines = runCommand("dir $windowsDiry /B");
    my @goodLines;

    foreach my $line (@lines)
    {
        push @goodLines, "$diry/$line" unless $line =~ m~^\s*$~;
    }
    return @goodLines;
}


#-------------------------------------------------------------------------------
# Function  : getSqlFiles
#
# Purpose   : Gets a list of the .sql files for a particular directory
#
# Arguments : $searchDir - directory to search
#
# Returns   : @sqlFiles - list (full file paths)
#
# Notes     :
#
sub getSqlFiles($)
{
    my ($searchDir) = @_;

    my @files = directoryListing( $searchDir );
    my @sqlFiles;

    foreach my $file (@files)
    {
        push @sqlFiles, $file if $file =~ /\.sql$/;
    }

    return @sqlFiles;
}


#-------------------------------------------------------------------------------
# Function  : parseCommandLine
#
# Purpose   : Parses command line; invokes help if necessary
#
# Arguments : nothing
#
# Returns   : nothing
#
# Notes     : Sets up various global variables; these are not checked here - they should be
#             checked in startupChecks()
#
sub parseCommandLine()
{
    my $opt_help = 0;
    my $opt_manual = 0;

    my $result = GetOptions (
    "help"          => \$opt_help,              # flag
    "manual"        => \$opt_manual,            # flag
    "verbose+"      => \$verbose,               # flag
    "src=s"         => \$srcDir,                # String
    "dest=s"        => \$destFile,              # String
    "log=s"         => \$logFile,               # String
    );

    pod2usage(-verbose => 2) if( $opt_manual );
    pod2usage(-verbose => 0, -message => "Version: $VERSION") if ($opt_help > 0 || ! $result );
}


#-------------------------------------------------------------------------------
# Function  : startupChecks
#
# Purpose   : Checks that important variables are set sanely
#
# Arguments : nothing
#
# Returns   : nothing
#
# Notes     : Calls die() if things aren't set sanely.
#             This function opens the logfile.
#
sub startupChecks()
{
    die "You need to specify a source directory using '-src=<srcDir>'\n" unless $srcDir;
    die "You need to specify a destination file using '-dest=<destFile>'\n"
        unless $destFile;

    $srcDir   =~ s~\\~/~g;
    $destFile =~ s~\\~/~g;

    die "Source directory '$srcDir' does not exist\n" unless -d $srcDir;

    openLog();
}

#######################################################################
# Documentation
#######################################################################

=pod

=head1 NAME

ddlfile.pl - creates a DDL file from a set of directories - basically
does the opposite of schemadump.pl

=head1 SYNOPSIS

ddlfile.pl [options] -src=<srcDir> -dest=<destFile>

Options:

  -help              - brief help message
  -man               - Full documentation
  -src=srcDir        - Directory to extract files from (mandatory)
  -dest=destFile     - File to pack to (mandatory)
  -log=logFile       - Log messages to this file

=head1 OPTIONS

=over 8

=item B<-help>

Print a brief help message and exits.

=item B<-man>

Prints the manual page and exits.

=item B<-src=srcDir>

Specify what file to extract out the clearcase files from.

=item B<-dest=destFile>

Specify where to pack the clearcase files to.

=item B<-log=logFile>

Specify a file to write log messages to.  Default is to just write
to the terminal.

=back

=head1 DESCRIPTION

This script is used to recreate the output of datapump.  It is
designed for use with the release manager database.

=cut