From 807b3081ee4d1f87d079fd4c68d3478fb6876db3 Mon Sep 17 00:00:00 2001 From: rurban Date: Wed, 5 Jan 2005 04:24:49 +0000 Subject: [PATCH] re-add this deleted 1.2 branch git-svn-id: svn://svn.code.sf.net/p/phpwiki/code/branches/release-1_2-branch@4354 96ab9672-09ca-45d6-a79d-3d69d39ca109 --- admin/dumpserial.php | 44 ++++++++++++++++++++ admin/loadserial.php | 42 +++++++++++++++++++ admin/lockpage.php | 22 ++++++++++ admin/shrinkdbm.pl | 55 +++++++++++++++++++++++++ admin/translate_mysql.pl | 26 ++++++++++++ admin/wiki_dumpHTML.php | 7 ++++ admin/wiki_port1_0.php | 69 +++++++++++++++++++++++++++++++ admin/wiki_rebuilddbms.php | 6 +++ admin/zip.php | 84 ++++++++++++++++++++++++++++++++++++++ 9 files changed, 355 insertions(+) create mode 100755 admin/dumpserial.php create mode 100755 admin/loadserial.php create mode 100755 admin/lockpage.php create mode 100755 admin/shrinkdbm.pl create mode 100755 admin/translate_mysql.pl create mode 100755 admin/wiki_dumpHTML.php create mode 100755 admin/wiki_port1_0.php create mode 100755 admin/wiki_rebuilddbms.php create mode 100755 admin/zip.php diff --git a/admin/dumpserial.php b/admin/dumpserial.php new file mode 100755 index 000000000..b98cfbfed --- /dev/null +++ b/admin/dumpserial.php @@ -0,0 +1,44 @@ + + +\n"); + else + $html = "Created directory '$directory' for the page dump...
\n"; + } else { + $html = "Using directory '$directory'
\n"; + } + + $numpages = count($pages); + for ($x = 0; $x < $numpages; $x++) { + $pagename = htmlspecialchars($pages[$x]); + $filename = preg_replace('/^\./', '%2e', rawurlencode($pages[$x])); + $html .= "
$pagename ... "; + if($pagename != $filename) + $html .= "saved as $filename ... "; + + $data = serialize(RetrievePage($dbi, $pages[$x], $WikiPageStore)); + if ($fd = fopen("$directory/$filename", "w")) { + $num = fwrite($fd, $data, strlen($data)); + $html .= "$num bytes written\n"; + } else { + ExitWiki("couldn't open file '$directory/$filename' for writing\n"); + } + } + + $html .= "

Dump complete."; + GeneratePage('MESSAGE', $html, 'Dump serialized pages', 0); + ExitWiki(''); +?> diff --git a/admin/loadserial.php b/admin/loadserial.php new file mode 100755 index 000000000..6f0c6700a --- /dev/null +++ b/admin/loadserial.php @@ -0,0 +1,42 @@ + +\n"; + + if (! file_exists($directory)) { + echo "No such directory '$directory'.
\n"; + exit; + } + + $handle = opendir($directory); + + while ($file = readdir($handle)) { + + if ($file[0] == ".") + continue; + + $html .= "Reading '$file'...
\n"; + + $data = implode("", file("$directory/$file")); + $pagehash = unserialize($data); + + // at this point there needs to be some form of verification + // that we are about to insert a page. + + $pagename = rawurldecode($file); + $html .= "inserting file '".htmlspecialchars($pagename)."' into the database...
\n"; + InsertPage($dbi, $pagename, $pagehash); + } + closedir($handle); + + $html .= "

Load complete."; + GeneratePage('MESSAGE', $html, 'Load serialized pages', 0); + ExitWiki(''); +?> diff --git a/admin/lockpage.php b/admin/lockpage.php new file mode 100755 index 000000000..3fa75852d --- /dev/null +++ b/admin/lockpage.php @@ -0,0 +1,22 @@ + + diff --git a/admin/shrinkdbm.pl b/admin/shrinkdbm.pl new file mode 100755 index 000000000..0a8bc6a90 --- /dev/null +++ b/admin/shrinkdbm.pl @@ -0,0 +1,55 @@ +#!/usr/bin/perl -w + +# $Id: shrinkdbm.pl,v 1.1.2.1 2005-01-05 04:24:49 rurban Exp $ + +# shrink a DBM file +# Steve Wainstead, July 2000 +# this script is public domain and has no warranty at all. + +use strict; +use Fcntl; +use GDBM_File; +use Getopt::Std; +use vars ('$opt_o', '$opt_i'); +my (%old_db, %new_db); + +# $opt_i == input file +# $opt_o == output file +getopts('i:o:'); + +# less confusing names +my $input_db_file = $opt_i; +my $output_db_file = $opt_o; + + +die <<"USAGE" unless ($input_db_file and $output_db_file); +Usage: $0 -i -o + where: infile is a GDBM file and, + outfile is the name of the new file to write to. + +The idea is to copy the old DB file to a new one and thereby +save space. + +USAGE + +# open old file +tie (%old_db, "GDBM_File", $input_db_file, O_RDWR, 0666) + or die "Can't tie $input_db_file: $!\n"; + +print "There are ", scalar(keys %old_db), " keys in $input_db_file\n"; + +# open new file, deleting it first if it's already there +if (-e $output_db_file) { unlink $opt_o; } +tie (%new_db, "GDBM_File", $output_db_file, O_RDWR|O_CREAT, 0666) + or die "Can't tie $input_db_file: $!\n"; + +# copy the files +while (my($key, $value) = each(%old_db)) { + $new_db{$key} = $value; +} + +print "There are now ", scalar(keys %old_db), " keys in $input_db_file\n"; +print "There are ", scalar(keys %new_db), " keys in $output_db_file\n"; +untie(%old_db); +untie(%new_db); + diff --git a/admin/translate_mysql.pl b/admin/translate_mysql.pl new file mode 100755 index 000000000..0c1c2f585 --- /dev/null +++ b/admin/translate_mysql.pl @@ -0,0 +1,26 @@ + +# Convert MySQL wiki database dump to a Microsoft SQL-Server compatible SQL script +# NB This is not a general-purpose MySQL->SQL-Server conversion script + +# Author: Andrew K. Pearson +# Date: 01 May 2001 + +# Example usage: perl translate_mysql.pl dump.sql > dump2.sql + +# NB I did not use sed because the version I have is limited to input lines of <1K in size + +while (<>) +{ + $newvalue = $_; + + $newvalue =~ s/\\\"/\'\'/g; + $newvalue =~ s/\\\'/\'\'/g; + $newvalue =~ s/\\n/\'+char(10)+\'/g; + $newvalue =~ s/TYPE=MyISAM;//g; + $newvalue =~ s/int\(.+\)/int/g; + $newvalue =~ s/mediumtext/text/g; + $newvalue =~ s/^#/--/g; + + print $newvalue; +} + diff --git a/admin/wiki_dumpHTML.php b/admin/wiki_dumpHTML.php new file mode 100755 index 000000000..5c13b24aa --- /dev/null +++ b/admin/wiki_dumpHTML.php @@ -0,0 +1,7 @@ + +\n"; + echo "Got: $dumpHTML $directory
\n"; + +?> diff --git a/admin/wiki_port1_0.php b/admin/wiki_port1_0.php new file mode 100755 index 000000000..61249fc5d --- /dev/null +++ b/admin/wiki_port1_0.php @@ -0,0 +1,69 @@ + + + + +Importing phpwiki 1.0.x dbm files + + + +\n"; + + $newhash['version'] = isset($pagehash['version']) ? + $pagehash['version'] : 1; + $newhash['author'] = isset($pagehash['author']) ? + $pagehash['author'] : '1.0 wiki setup page'; + $newhash['created'] = time(); + $newhash['lastmodified'] = time(); + $newhash['flags'] = 0; + $newhash['pagename'] = $pagename; + $newhash['refs'] = array(); + for ($i=1; $i <= 4; $i++) { + if (isset($pagehash['r$i'])) + $newhash['refs'][$i] = $pagehash['r$i']; + } + $content = implode("\n", $pagehash['text']); + $content = str_replace("[", "[[", $content); + $newhash['content'] = explode("\n", $content); + + InsertPage($dbi, $pagename, $newhash); + } + + + echo "opening dbm file: $portdbmfile ... \n"; + + if (! file_exists($portdbmfile)) { + echo "File '$portdbmfile' does not exist.
\n"; + exit; + } + + if (! ($dbmh = dbmopen($portdbmfile, "r"))) { + echo "Cannot open '$portdbmfile'
\n"; + exit; + } + + echo " ok ($dbmh)

\n"; + + $namelist = array(); + $ctr = 0; + + $namelist[$ctr] = $key = dbmfirstkey($dbmh); + port1_0renderhash($dbi, $dbmh, $key); + while ($key = dbmnextkey($dbmh, $key)) { + $ctr++; + $namelist[$ctr] = $key; + port1_0renderhash($dbi, $dbmh, $key); + } + + dbmclose($dbmh); +?> + +

Done. + + diff --git a/admin/wiki_rebuilddbms.php b/admin/wiki_rebuilddbms.php new file mode 100755 index 000000000..00d2856cc --- /dev/null +++ b/admin/wiki_rebuilddbms.php @@ -0,0 +1,6 @@ + +\n"; + +?> diff --git a/admin/zip.php b/admin/zip.php new file mode 100755 index 000000000..4a985d0eb --- /dev/null +++ b/admin/zip.php @@ -0,0 +1,84 @@ + $pagehash['lastmodified'], + 'is_ascii' => 1); + if (($pagehash['flags'] & FLAG_PAGE_LOCKED) != 0) + $attrib['write_protected'] = 1; + + $content = MailifyPage($pagehash, $oldpagehash); + + $zip->addRegularFile( encode_pagename_for_wikizip($pagehash['pagename']), + $content, $attrib); + } + $zip->finish(); +} + + +if(defined('WIKI_ADMIN')) + MakeWikiZip(($zip == 'all')); + +CloseDataBase($dbi); +exit; +?> -- 2.45.0