Fri Oct 19 08:22:37 PDT 2007
- Previous message: [Slony1-commit] slony1-engine/tests/test1 init_subscribe_set.ik
- Next message: [Slony1-commit] slony1-engine/src/slon remote_worker.c
- Messages sorted by: [ date ] [ thread ] [ subject ] [ author ]
Update of /home/cvsd/slony1/slony1-engine/tests/testlogship In directory main.slony.info:/tmp/cvs-serv6908 Modified Files: README generate_dml.sh init_subscribe_set.ik moveset.sh settings.ik Log Message: Lots of changes to log shipping test based on the changes introduced in 1.2.12 Index: settings.ik =================================================================== RCS file: /home/cvsd/slony1/slony1-engine/tests/testlogship/settings.ik,v retrieving revision 1.3 retrieving revision 1.4 diff -C2 -d -r1.3 -r1.4 *** settings.ik 27 Aug 2007 15:43:03 -0000 1.3 --- settings.ik 19 Oct 2007 15:22:35 -0000 1.4 *************** *** 1,6 **** ! NUMCLUSTERS=${NUMCLUSTERS:-"1"} ! NUMNODES=${NUMNODES:-"4"} ! ORIGINNODE=1 ! WORKERS=${WORKERS:-"1"} ARCHIVE2=true # Node #2 needs to run log archiving ! LOGSHIP3=true # Node #3 receives data via log shipping --- 1,8 ---- ! NUMCLUSTERS=1 ! NUMNODES=3 # These are the "regular" Slony-I nodes ! # node #4 will also be populated as a log shipping node ! ! ORIGINNODE=1 # at least initially - origin is later moved to node #3 ! WORKERS=1 ARCHIVE2=true # Node #2 needs to run log archiving ! SLONCONF2=true Index: init_subscribe_set.ik =================================================================== RCS file: /home/cvsd/slony1/slony1-engine/tests/testlogship/init_subscribe_set.ik,v retrieving revision 1.3 retrieving revision 1.4 diff -C2 -d -r1.3 -r1.4 *** init_subscribe_set.ik 27 Aug 2007 15:43:03 -0000 1.3 --- init_subscribe_set.ik 19 Oct 2007 15:22:35 -0000 1.4 *************** *** 3,7 **** sleep (seconds = 2); echo 'done sleeping...'; ! subscribe set (id = 1, provider = 1, receiver = 4, forward = yes); echo 'sleep a couple of seconds...'; sleep (seconds = 2); --- 3,7 ---- sleep (seconds = 2); echo 'done sleeping...'; ! subscribe set (id = 1, provider = 1, receiver = 3, forward = yes); echo 'sleep a couple of seconds...'; sleep (seconds = 2); Index: README =================================================================== RCS file: /home/cvsd/slony1/slony1-engine/tests/testlogship/README,v retrieving revision 1.3 retrieving revision 1.4 diff -C2 -d -r1.3 -r1.4 *** README 20 Apr 2007 21:43:14 -0000 1.3 --- README 19 Oct 2007 15:22:35 -0000 1.4 *************** *** 28,30 **** table 4, adding two new columns, one to be populated via a default, for new tuples; the other has no default, but we assign the value 42 ! to all tuples existing at the time that the DDL script runs. \ No newline at end of file --- 28,33 ---- table 4, adding two new columns, one to be populated via a default, for new tuples; the other has no default, but we assign the value 42 ! to all tuples existing at the time that the DDL script runs. ! ! Note that node #2 (subscriber) has slon configuration managed via ! config file Index: moveset.sh =================================================================== RCS file: /home/cvsd/slony1/slony1-engine/tests/testlogship/moveset.sh,v retrieving revision 1.2 retrieving revision 1.3 diff -C2 -d -r1.2 -r1.3 *** moveset.sh 27 Aug 2007 15:43:03 -0000 1.2 --- moveset.sh 19 Oct 2007 15:22:35 -0000 1.3 *************** *** 2,5 **** echo " LOCK SET ( ID = 1, ORIGIN = 1 ); ! MOVE SET ( ID = 1, OLD ORIGIN = 1, NEW ORIGIN = 4 ); " --- 2,5 ---- echo " LOCK SET ( ID = 1, ORIGIN = 1 ); ! MOVE SET ( ID = 1, OLD ORIGIN = 1, NEW ORIGIN = 3 ); " Index: generate_dml.sh =================================================================== RCS file: /home/cvsd/slony1/slony1-engine/tests/testlogship/generate_dml.sh,v retrieving revision 1.4 retrieving revision 1.5 diff -C2 -d -r1.4 -r1.5 *** generate_dml.sh 27 Aug 2007 15:43:03 -0000 1.4 --- generate_dml.sh 19 Oct 2007 15:22:35 -0000 1.5 *************** *** 23,27 **** generate_initdata() { ! numrows=$(random_number 50 1000) i=0; trippoint=`expr $numrows / 20` --- 23,27 ---- generate_initdata() { ! numrows=$(random_number 125 150) i=0; trippoint=`expr $numrows / 20` *************** *** 78,94 **** fi status "data load complete - nodes are seeded reasonably" - - status "purge archive log files up to present in order to eliminate those that cannot be used" - for file in `/usr/bin/find ${mktmp}/archive_logs_2 -name "slony1_log_*.sql" -type f`; do - status "purge ${file}" - rm ${file} - done - sleep 5 status "pull log shipping dump" PGHOST=${HOST2} PGPORT=${PORT2} PGUSER=${USER2} ${SLTOOLDIR}/slony1_dump.sh ${DB2} ${CLUSTER1} > ${mktmp}/logship_dump.sql - status "load schema for replicated tables into node #3" - ${PGBINDIR3}/psql -h ${HOST3} -p ${PORT3} -U ${USER3} -d ${DB3} -f ${testname}/init_schema.sql - status "load log shipping dump into node #3" - ${PGBINDIR3}/psql -h ${HOST3} -p ${PORT3} -U ${USER3} -d ${DB3} -f ${mktmp}/logship_dump.sql status "generate more data to test log shipping" --- 78,83 ---- *************** *** 116,120 **** wait_for_catchup ! status "move set to node 4" init_preamble --- 105,109 ---- wait_for_catchup ! status "move set to node 3" init_preamble *************** *** 125,147 **** generate_initdata ! eval db=\$DB4 ! status "loading extra data to node $db" ! $pgbindir/psql -h $host -p $port -U $user -d $db < $mktmp/generate.data 1> ${mktmp}/even_more_data.log 2> ${mktmp}/even_more_data.log2 wait_for_catchup ! status "final data load complete - now load files into log shipped node" ! firstseq=`(cd ${mktmp}/archive_logs_2; /usr/bin/find -name "*.sql") | cut -d "_" -f 4 | cut -d "." -f 1 | sort -n | head -1` lastseq=`(cd ${mktmp}/archive_logs_2; /usr/bin/find -name "*.sql") | cut -d "_" -f 4 | cut -d "." -f 1 | sort -n | tail -1` status "Logs numbered from ${firstseq} to ${lastseq}" currseq=${firstseq} while : ; do - # 00000000000000000000 - # 12345678901234567890 cs=`printf "%020d" ${currseq}` status "current sequence value: ${cs}" for logfile in `/usr/bin/find ${mktmp}/archive_logs_2 -name "slony1_log_*_${cs}.sql" -type f`; do ! $pgbindir/psql -h ${HOST3} -p ${PORT3} -d ${DB3} -U ${USER3} -f ${logfile} >> $mktmp/logshipping_output.log 2>> $mktmp/logshipping_errors.log status "load file ${logfile} - ${?}" done --- 114,145 ---- generate_initdata ! status "loading extra data to node 3" ! $pgbindir/psql -h $HOST3 -p $PORT3 -U $USER3 -d $DB3 < $mktmp/generate.data 1> ${mktmp}/even_more_data.log 2> ${mktmp}/even_more_data.log2 wait_for_catchup ! status "done" status "final data load complete - now load files into log shipped node" ! status "set up database for log shipped node" ! ${PGBINDIR4}/createdb -p ${PORT4} -U ${USER4} ${DB4} ! ${PGBINDIR4}/createlang plpgsql ${DB4} ! ! status "load schema for replicated tables into node #4" ! ${PGBINDIR4}/psql -h ${HOST4} -p ${PORT4} -U ${USER4} -d ${DB4} -f ${testname}/init_schema.sql ! status "load log shipping dump into node #4" ! ${PGBINDIR4}/psql -h ${HOST4} -p ${PORT4} -U ${USER4} -d ${DB4} -f ${mktmp}/logship_dump.sql ! ! ! firstseq=`psql -At -d ${DB4} -p ${PORT4} -c 'select at_counter from _slony_regress1.sl_archive_tracking ;'` lastseq=`(cd ${mktmp}/archive_logs_2; /usr/bin/find -name "*.sql") | cut -d "_" -f 4 | cut -d "." -f 1 | sort -n | tail -1` status "Logs numbered from ${firstseq} to ${lastseq}" currseq=${firstseq} while : ; do cs=`printf "%020d" ${currseq}` status "current sequence value: ${cs}" + firstseq=`psql -At -d ${DB4} -p ${PORT4} -c 'select at_counter from _slony_regress1.sl_archive_tracking ;'` + status "archive tracking ID: ${firstseq}" for logfile in `/usr/bin/find ${mktmp}/archive_logs_2 -name "slony1_log_*_${cs}.sql" -type f`; do ! ${PGBINDIR4}/psql -h ${HOST4} -p ${PORT4} -d ${DB4} -U ${USER4} -f ${logfile} >> $mktmp/logshipping_output.log 2>> $mktmp/logshipping_errors.log status "load file ${logfile} - ${?}" done *************** *** 152,155 **** fi done ! status "done" } --- 150,157 ---- fi done ! status "done loading data into log shipping node" ! ! NUMNODES=4 ! status "Changed number of nodes to 4 to reflect the log shipping node" ! }
- Previous message: [Slony1-commit] slony1-engine/tests/test1 init_subscribe_set.ik
- Next message: [Slony1-commit] slony1-engine/src/slon remote_worker.c
- Messages sorted by: [ date ] [ thread ] [ subject ] [ author ]
More information about the Slony1-commit mailing list