diff --git a/doc/src/sgml/ref/pg_upgrade.sgml b/doc/src/sgml/ref/pg_upgrade.sgml
index 2b9406958c..6d83e888ee 100644
--- a/doc/src/sgml/ref/pg_upgrade.sgml
+++ b/doc/src/sgml/ref/pg_upgrade.sgml
@@ -1,5 +1,5 @@
@@ -24,7 +24,7 @@ PostgreSQL documentation
1999-07-31
-pg_upgrade [ -f filename ] old_data_dir
+pg_upgrade -s filename [ -d filename ] old_data_dir
@@ -58,7 +58,7 @@ pg_upgrade [ -f filename ]
Then do:
-$ pg_dumpall -s >db.out
+$ pg_dumpall -s > schema.out
to dump out your old database's table definitions without any data.
@@ -108,7 +108,7 @@ $ make install
Change your working directory to the
pgsql main directory, and type:
-$ pg_upgrade -f db.out data.old
+$ pg_upgrade -s schema.out -d data.out data.old
The program will do some checking to make sure everything is properly
configured, and will run your db.out script to recreate all the databases
diff --git a/src/bin/pg_dump/pg_upgrade b/src/bin/pg_dump/pg_upgrade
index ab1669c00c..a78324c1f1 100755
--- a/src/bin/pg_dump/pg_upgrade
+++ b/src/bin/pg_dump/pg_upgrade
@@ -3,7 +3,7 @@
# pg_upgrade: update a database without needing a full dump/reload cycle.
# CAUTION: read the manual page before trying to use this!
-# $Header: /cvsroot/pgsql/src/bin/pg_dump/Attic/pg_upgrade,v 1.19 2002/01/09 16:08:54 momjian Exp $
+# $Header: /cvsroot/pgsql/src/bin/pg_dump/Attic/pg_upgrade,v 1.20 2002/01/09 21:50:52 momjian Exp $
#
# NOTE: we must be sure to update the version-checking code a few dozen lines
# below for each new PostgreSQL release.
@@ -12,24 +12,31 @@ TMPFILE="/tmp/pgupgrade.$$"
trap "rm -f $TMPFILE" 0 1 2 3 15
-if [ "$#" -eq 0 ]
-then echo "Usage: $0 -f inputfile old_data_dir" 1>&2
- exit 1
-fi
-
-if [ "X$1" = "X-f" ]
-then INPUT="$2"
- shift 2
- if [ ! -f "$INPUT" ]
- then echo "$INPUT does not exist" 1>&2
+SCHEMA=""
+DATA=""
+while [ "$#" -gt 1 ]
+do
+ if [ "X$1" = "X-s" ]
+ then SCHEMA="$2"
+ if [ ! -s "$SCHEMA" ]
+ then echo "$SCHEMA does not exist" 1>&2
+ exit 1
+ fi
+ shift 2
+ elif [ "X$1" = "X-d" ]
+ then DATA="$2"
+ if [ ! -s "$DATA" ]
+ then echo "$DATA does not exist" 1>&2
+ exit 1
+ fi
+ shift 2
+ else echo "Usage: $0 -s schema_dump [ -d data_dump ] old_data_dir" 1>&2
exit 1
fi
-else echo "Usage: $0 -f inputfile old_data_dir" 1>&2
- exit 1
-fi
+done
-if [ "$#" -ne 1 ]
-then echo "Usage: $0 -f inputfile old_data_dir" 1>&2
+if [ "$#" -ne 1 -o ! "$SCHEMA" ]
+then echo "Usage: $0 -s schema_dump [ -d data_dump ] old_data_dir" 1>&2
exit 1
fi
@@ -38,8 +45,7 @@ OLDDIR="$1"
# check things
if [ ! -d "./data" ]
-then echo "`basename $0` must be run from the directory containing
-the database directory \`data\' (`dirname $PGDATA`.)" 1>&2
+then echo "`basename $0` must be run from the directory containing the database directory \`data\' (`dirname $PGDATA`.)" 1>&2
echo "You must have run initdb to create the template1 database." 1>&2
exit 1
fi
@@ -80,6 +86,12 @@ SRCVERSION=`cat ./$OLDDIR/PG_VERSION`
# MYVERSION is the expected output database version
MYVERSION="7.1"
+if [ "$SRCVERSION" = "7.1" -a ! "$DATA" ]
+then echo "$0 requires a full data dump file to upgrade from version $SRCVERSION." 1>&2
+ echo "Use the '-d' parameter to specify the dump file" 1>&2
+ exit 1
+fi
+
if [ "$DESTVERSION" != "$MYVERSION" -a "$DESTVERSION" != "$SRCVERSION" ]
then echo "$0 is for PostgreSQL version $MYVERSION, but ./data/PG_VERSION contains $DESTVERSION." 1>&2
echo "Did you run initdb for version $MYVERSION?" 1>&2
@@ -103,56 +115,65 @@ esac
# OK, ready to proceed.
-# Execute the input script to create everything, except that we remove
-# any COPY statements, except for the ones that load pg_shadow/pg_group.
-# There shouldn't be any others in there anyway...
-
-cat $INPUT | awk ' {
- if (tolower($1) == "copy" &&
- $2 != "pg_shadow" &&
- $2 != "pg_group")
- while (getline $0 > 0 && $0 != "\\.")
- ;
+# Execute the schema script to create everything, except modify any
+# sequences with int4 maximums if we are upgrading from 7.1.
+cat $SCHEMA | awk -F' ' '{
+ if ("'"$SRCVERSION"'" == "7.1" &&
+ $1 == "CREATE" &&
+ $2 == "SEQUENCE" &&
+ ($9 >= 2147483646 && # handle OS round
+ ($9 <= 2147483648))
+ {
+ for(i=1; i < NF; i++)
+ if (i != 9)
+ printf "%s ", $i;
+ else
+ printf "%s ", "9223372036854775807";
+ print;
+ }
else print $0;
- }' > $TMPFILE
-
-psql "template1" < $TMPFILE
+ }' |
+psql "template1"
if [ $? -ne 0 ]
-then echo "There were errors in the input script $INPUT.
+then echo "There were errors in the input script $SCHEMA.
$0 aborted." 1>&2
exit 1
fi
-echo "Input script $INPUT complete, fixing row commit statuses..."
-# Now vacuum each result database to mark all system-table rows as committed,
-# because when pg_clog is replaced with the saved version, the transaction
-# statuses will no longer match the data. VACUUM will force the on-row
-# status flags to the right value so that pg_clog will not matter anymore.
-# Note: we used to try to do this as part of the previous step, but that
-# risks permissions problems if VACUUM is run as the wrong user.
-# Note: the initial VACUUM does template1, then we do everything else.
+if [ "$SRCVERSION" != "7.1" ]
+then echo "Input script $SCHEMA complete, fixing row commit statuses..."
+else echo "Input script $SCHEMA complete, setting int8 sequences..."
-cat $INPUT | awk 'BEGIN { print "VACUUM;" }
- {
- if (tolower($1) == "copy")
- while (getline $0 > 0 && $0 != "\\.")
- ;
- else if (tolower($1) == "\\connect" &&
- $2 != "-" &&
- $2 != "template1")
- printf "\\connect %s\nVACUUM;\n", $2;
- }' > $TMPFILE
-
-psql "template1" < $TMPFILE
+# Set all the sequence counters because they are not brought over
+# in the schema dump, and the old 7.1 sequences where int4 in size
+# so bringing over the file wouldn't help us anyway.
+cat $DATA | awk '$0 == "\\connect " || "SELECT setval (" \
+ {print $0;}' |
+psql "template1"
if [ $? -ne 0 ]
-then echo "There were errors in the vacuuming step.
+then echo "There were errors in the input script $SCHEMA.
$0 aborted." 1>&2
exit 1
fi
+echo "Int8 sequences set, fixing row commit statuses..."
+fi
+
+# Now vacuum each result database in case our transaction increase
+# causes all the XID's to be marked with the frozen XID.
+psql -l | while read DB
+do
+ echo "VACUUM;" | psql "$DB"
+ if [ $? -ne 0 ]
+ then echo "There were errors during VACUUM.
+$0 aborted." 1>&2
+ exit 1
+ fi
+done
+
# should be pretty small file
pg_dumpall -s > $TMPFILE 2>/dev/null
@@ -161,7 +182,7 @@ pg_ctl stop
echo "Commit fixes complete, moving data files..."
-cat "$INPUT" | while read LINE
+cat "$SCHEMA" | while read LINE
do
if /bin/echo "$LINE" | grep -q "^\\\\connect "
then OLDDB="$DB"
@@ -176,7 +197,7 @@ do
if echo "$LINE" | grep -q "^-- TOC Entry ID [0-9]* (OID "
then OID="`echo \"$LINE\" | cut -d' ' -f7 | tr -d ')'`"
fi
- if echo "$LINE" | grep -q "^-- Name: [^ ]* Type: TABLE "
+ if echo "$LINE" | egrep -q "^-- Name: [^ ]* Type: (TABLE|INDEX) "
then TABLE="`echo \"$LINE\" | cut -d' ' -f3`"
# skip system tables
if [ "`echo \"$TABLE\" | cut -c 1-3`" = "pg_" ]
@@ -194,7 +215,8 @@ do
{print $0 >> "/tmp/x";
print $3 >> "/tmp/x";
print newdb," ", newoid >> "/tmp/x"}
- $0 ~ /^-- Name: [^ ]* Type: TABLE / && \
+ ($0 ~ /^-- Name: [^ ]* Type: TABLE / && \
+ $0 ~ /^-- Name: [^ ]* Type: INDEX /) && \
newdb == "'"$DB"'" && \
$3 == "'"$TABLE"'" \
{ ret=newoid; exit}
@@ -229,6 +251,18 @@ do
fi
done
+# set max transaction id, check < 2gig
+
+# 7.1 has non-compressed log file format
+if [ "$SRCVERSION" = "7.1" ]
+# pg_log is oid 1269 in 7.1
+LOGSIZE=`ls -l "$OLDDIR"/global/1269 "$OLDDIR"/global/1269.* 2>/dev/null |
+awk -F' *' '
+ BEGIN {sum=0;}
+ {sum += $5;}
+ END {print sum;}'`
+fi
+
echo "You must stop/start the postmaster before doing anything else."
echo "You may remove the $OLDDIR directory with 'rm -r $OLDDIR'."