3 * Copyright 1999,2000,2001,2002,2004,2005,2006,2007 $ThePhpWikiProgrammingTeam
4 * Copyright 2008-2010 Marc-Etienne Vargenau, Alcatel-Lucent
6 * This file is part of PhpWiki.
8 * PhpWiki is free software; you can redistribute it and/or modify
9 * it under the terms of the GNU General Public License as published by
10 * the Free Software Foundation; either version 2 of the License, or
11 * (at your option) any later version.
13 * PhpWiki is distributed in the hope that it will be useful,
14 * but WITHOUT ANY WARRANTY; without even the implied warranty of
15 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 * GNU General Public License for more details.
18 * You should have received a copy of the GNU General Public License along
19 * with PhpWiki; if not, write to the Free Software Foundation, Inc.,
20 * 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
23 require_once 'lib/ziplib.php';
24 require_once 'lib/Template.php';
27 * ignore fatal errors during dump
28 * @param PhpError $error
31 function _dump_error_handler($error)
33 if ($error->isFatal()) {
34 $error->errno = E_USER_WARNING;
37 return true; // Ignore error
40 function StartLoadDump(&$request, $title, $html = '')
42 // MockRequest is from the unit testsuite, a faked request. (may be cmd-line)
43 // We are silent on unittests.
44 if (is_a($request, 'MockRequest'))
46 // FIXME: This is a hack. This really is the worst overall hack in phpwiki.
48 $html->pushContent('%BODY%');
49 $tmpl = Template('html', array('TITLE' => $title,
51 'CONTENT' => $html ? $html : '%BODY%'));
52 echo preg_replace('/%BODY%.*/s', '', $tmpl->getExpansion($html));
53 $request->chunkOutput();
55 // set marker for sendPageChangeNotification()
56 $request->_deferredPageChangeNotification = array();
59 function EndLoadDump(&$request)
63 if (is_a($request, 'MockRequest'))
65 $action = $request->getArg('action');
69 $label = _("ZIP files of database");
72 $label = _("Dump to directory");
75 $label = _("Upload File");
78 $label = _("Load File");
81 $label = _("Upgrade");
85 $label = _("Dump Pages as XHTML");
88 if ($label) $label = str_replace(" ", "_", $label);
89 if ($action == 'browse') // loading virgin
90 $pagelink = WikiLink(HOME_PAGE);
92 $pagelink = WikiLink(new WikiPageName(_("PhpWikiAdministration"), false, $label));
94 // do deferred sendPageChangeNotification()
95 if (!empty($request->_deferredPageChangeNotification)) {
96 $pages = $all_emails = $all_users = array();
97 foreach ($request->_deferredPageChangeNotification as $p) {
98 list($pagename, $emails, $userids) = $p;
100 $all_emails = array_unique(array_merge($all_emails, $emails));
101 $all_users = array_unique(array_merge($all_users, $userids));
103 $editedby = sprintf(_("Edited by: %s"), $request->_user->getId());
104 $content = _("Loaded the following pages:") . "\n" . join("\n", $pages);
105 if (!mail(join(',', $all_emails), "[" . WIKI_NAME . "] " . _("LoadDump"),
106 _("LoadDump") . "\n" .
110 trigger_error(sprintf(_("PageChange Notification Error: Couldn't send %s to %s"),
111 join("\n", $pages), join(',', $all_users)), E_USER_WARNING);
117 unset($request->_deferredPageChangeNotification);
119 PrintXML(HTML::p(HTML::strong(_("Complete."))),
120 HTML::p(fmt("Return to %s", $pagelink)));
121 // Ugly hack to get valid XHTML code
122 if (is_a($WikiTheme, 'WikiTheme_fusionforge')) {
127 } elseif (is_a($WikiTheme, 'WikiTheme_Sidebar')
128 or is_a($WikiTheme, 'WikiTheme_MonoBook')
134 } elseif (is_a($WikiTheme, 'WikiTheme_wikilens')) {
139 } elseif (is_a($WikiTheme, 'WikiTheme_blog')) {
142 } elseif (is_a($WikiTheme, 'WikiTheme_Crao')
143 or is_a($WikiTheme, 'WikiTheme_Hawaiian')
144 or is_a($WikiTheme, 'WikiTheme_MacOSX')
145 or is_a($WikiTheme, 'WikiTheme_shamino_com')
146 or is_a($WikiTheme, 'WikiTheme_smaller')
150 echo "</body></html>\n";
153 ////////////////////////////////////////////////////////////////
155 // Functions for dumping.
157 ////////////////////////////////////////////////////////////////
161 * http://www.nacs.uci.edu/indiv/ehood/MIME/2045/rfc2045.html
162 * http://www.faqs.org/rfcs/rfc2045.html
163 * (RFC 1521 has been superceeded by RFC 2045 & others).
165 * Also see http://www.faqs.org/rfcs/rfc2822.html
167 function MailifyPage($page, $nversions = 1)
169 $current = $page->getCurrentRevision(false);
172 if (defined('STRICT_MAILABLE_PAGEDUMPS') and STRICT_MAILABLE_PAGEDUMPS) {
173 $from = defined('SERVER_ADMIN') ? SERVER_ADMIN : 'foo@bar';
174 //This is for unix mailbox format: (not RFC (2)822)
175 // $head .= "From $from " . CTime(time()) . "\r\n";
176 $head .= "Subject: " . rawurlencode($page->getName()) . "\r\n";
177 $head .= "From: $from (PhpWiki)\r\n";
178 // RFC 2822 requires only a Date: and originator (From:)
179 // field, however the obsolete standard RFC 822 also
180 // requires a destination field.
181 $head .= "To: $from (PhpWiki)\r\n";
183 $head .= "Date: " . Rfc2822DateTime($current->get('mtime')) . "\r\n";
184 $head .= sprintf("Mime-Version: 1.0 (Produced by PhpWiki %s)\r\n",
187 $iter = $page->getAllRevisions();
189 while ($revision = $iter->next()) {
190 $parts[] = MimeifyPageRevision($page, $revision);
191 if ($nversions > 0 && count($parts) >= $nversions)
194 if (count($parts) > 1)
195 return $head . MimeMultipart($parts);
197 return $head . $parts[0];
201 * Compute filename to used for storing contents of a wiki page.
203 * Basically we do a rawurlencode() which encodes everything except
204 * ASCII alphanumerics and '.', '-', and '_'.
206 * But we also want to encode leading dots to avoid filenames like
207 * '.', and '..'. (Also, there's no point in generating "hidden" file
208 * names, like '.foo'.)
210 * We have to apply a different "/" logic for dumpserial, htmldump and zipdump.
211 * dirs are allowed for zipdump and htmldump, not for dumpserial
214 * @param string $pagename Pagename.
215 * @param string $action.
216 * @return string Filename for page.
218 function FilenameForPage($pagename, $action = '')
220 $enc = rawurlencode($pagename);
223 $action = $request->getArg('action');
225 if ($action != 'dumpserial') { // zip, ziphtml, dumphtml
226 // For every %2F we will need to mkdir -p dirname($pagename)
227 $enc = preg_replace('/%2F/', '/', $enc);
229 $enc = preg_replace('/^\./', '%2E', $enc);
230 $enc = preg_replace('/%20/', ' ', $enc);
231 $enc = preg_replace('/\.$/', '%2E', $enc);
236 * The main() function which generates a zip archive of a PhpWiki.
238 * If $include_archive is false, only the current version of each page
239 * is included in the zip file; otherwise all archived versions are
242 function MakeWikiZip(&$request)
244 global $ErrorManager;
245 if ($request->getArg('include') == 'all') {
246 $zipname = WIKI_NAME . _("FullDump") . date('Ymd-Hi') . '.zip';
247 $include_archive = true;
249 $zipname = WIKI_NAME . _("LatestSnapshot") . date('Ymd-Hi') . '.zip';
250 $include_archive = false;
252 $include_empty = false;
253 if ($request->getArg('include') == 'empty') {
254 $include_empty = true;
257 $zip = new ZipWriter("Created by PhpWiki " . PHPWIKI_VERSION, $zipname);
259 /* ignore fatals in plugins */
260 $ErrorManager->pushErrorHandler(new WikiFunctionCb('_dump_error_handler'));
262 $dbi =& $request->_dbi;
263 $thispage = $request->getArg('pagename'); // for "Return to ..."
264 if ($exclude = $request->getArg('exclude')) { // exclude which pagenames
265 $excludeList = explodePageList($exclude);
267 $excludeList = array();
269 if ($pages = $request->getArg('pages')) { // which pagenames
270 if ($pages == '[]') // current page
272 $page_iter = new WikiDB_Array_PageIterator(explodePageList($pages));
274 $page_iter = $dbi->getAllPages(false, false, false, $excludeList);
276 $request_args = $request->args;
277 $timeout = (!$request->getArg('start_debug')) ? 30 : 240;
279 while ($page = $page_iter->next()) {
280 $request->args = $request_args; // some plugins might change them (esp. on POST)
281 longer_timeout($timeout); // Reset watchdog
283 $current = $page->getCurrentRevision();
284 if ($current->getVersion() == 0)
287 $pagename = $page->getName();
288 $wpn = new WikiPageName($pagename);
289 if (!$wpn->isValid())
291 if (in_array($page->getName(), $excludeList)) {
295 $attrib = array('mtime' => $current->get('mtime'),
297 if ($page->get('locked'))
298 $attrib['write_protected'] = 1;
300 if ($include_archive)
301 $content = MailifyPage($page, 0);
303 $content = MailifyPage($page);
305 $zip->addRegularFile(FilenameForPage($pagename), $content, $attrib);
309 $ErrorManager->popErrorHandler();
312 function DumpToDir(&$request)
314 $directory = $request->getArg('directory');
315 if (empty($directory))
316 $directory = DEFAULT_DUMP_DIR; // See lib/plugin/WikiForm.php:87
317 if (empty($directory))
318 $request->finish(_("You must specify a directory to dump to"));
320 // see if we can access the directory the user wants us to use
321 if (!file_exists($directory)) {
322 if (!mkdir($directory, 0755))
323 $request->finish(fmt("Cannot create directory ā%sā", $directory));
325 $html = HTML::p(fmt("Created directory ā%sā for the page dump...",
328 $html = HTML::p(fmt("Using directory ā%sā", $directory));
331 StartLoadDump($request, _("Dumping Pages"), $html);
333 $dbi =& $request->_dbi;
334 $thispage = $request->getArg('pagename'); // for "Return to ..."
335 if ($exclude = $request->getArg('exclude')) { // exclude which pagenames
336 $excludeList = explodePageList($exclude);
338 $excludeList = array();
340 $include_empty = false;
341 if ($request->getArg('include') == 'empty') {
342 $include_empty = true;
344 if ($pages = $request->getArg('pages')) { // which pagenames
345 if ($pages == '[]') // current page
347 $page_iter = new WikiDB_Array_PageIterator(explodePageList($pages));
349 $page_iter = $dbi->getAllPages($include_empty, false, false, $excludeList);
352 $request_args = $request->args;
353 $timeout = (!$request->getArg('start_debug')) ? 30 : 240;
355 while ($page = $page_iter->next()) {
356 $request->args = $request_args; // some plugins might change them (esp. on POST)
357 longer_timeout($timeout); // Reset watchdog
359 $pagename = $page->getName();
360 if (!is_a($request, 'MockRequest')) {
361 PrintXML(HTML::br(), $pagename, ' ... ');
365 if (in_array($pagename, $excludeList)) {
366 if (!is_a($request, 'MockRequest')) {
367 PrintXML(_("Skipped."));
372 $filename = FilenameForPage($pagename);
374 if ($page->getName() != $filename) {
375 $msg->pushContent(HTML::small(fmt("saved as %s", $filename)),
379 if ($request->getArg('include') == 'all')
380 $data = MailifyPage($page, 0);
382 $data = MailifyPage($page);
384 if (!($fd = fopen($directory . "/" . $filename, "wb"))) {
385 $msg->pushContent(HTML::strong(fmt("couldn't open file ā%sā for writing",
386 "$directory/$filename")));
387 $request->finish($msg);
390 $num = fwrite($fd, $data, strlen($data));
391 $msg->pushContent(HTML::small(fmt("%s bytes written", $num)));
392 if (!is_a($request, 'MockRequest')) {
396 assert($num == strlen($data));
400 EndLoadDump($request);
403 function _copyMsg($page, $smallmsg)
405 if (!is_a($GLOBALS['request'], 'MockRequest')) {
406 if ($page) $msg = HTML(HTML::br(), HTML($page), HTML::small($smallmsg));
407 else $msg = HTML::small($smallmsg);
413 function mkdir_p($pathname, $permission = 0777)
415 $arr = explode("/", $pathname);
417 return mkdir($pathname, $permission);
419 $s = array_shift($arr);
421 foreach ($arr as $p) {
424 $ok = mkdir($curr, $permission);
426 if (!$ok) return FALSE;
432 * Dump all pages as XHTML to a directory, as pagename.html.
433 * Copies all used css files to the directory, all used images to a
434 * "images" subdirectory, and all used buttons to a "images/buttons" subdirectory.
435 * The webserver must have write permissions to these directories.
436 * chown httpd HTML_DUMP_DIR; chmod u+rwx HTML_DUMP_DIR
439 * @param WikiRequest $request
442 function DumpHtmlToDir(&$request)
445 $directory = $request->getArg('directory'); // Path to dump to. Default: HTML_DUMP_DIR
446 if (empty($directory))
447 $directory = HTML_DUMP_DIR; // See lib/plugin/WikiForm.php:87
448 if (empty($directory))
449 $request->finish(_("You must specify a directory to dump to"));
451 // See if we can access the directory the user wants us to use
452 if (!file_exists($directory)) {
453 if (!mkdir($directory, 0755))
454 $request->finish(fmt("Cannot create directory ā%sā", $directory));
456 $html = HTML::p(fmt("Created directory ā%sā for the page dump...",
459 $html = HTML::p(fmt("Using directory ā%sā", $directory));
461 StartLoadDump($request, _("Dumping Pages"), $html);
462 $thispage = $request->getArg('pagename'); // for "Return to ..."
464 // Comma-separated of glob-style pagenames to exclude
465 $dbi =& $request->_dbi;
466 if ($exclude = $request->getArg('exclude')) { // exclude which pagenames
467 $excludeList = explodePageList($exclude);
469 $excludeList = array('DebugAuthInfo', 'DebugGroupInfo', 'AuthInfo');
472 // Comma-separated of glob-style pagenames to dump.
473 // Also array of pagenames allowed.
474 if ($pages = $request->getArg('pages')) { // which pagenames
475 if ($pages == '[]') // current page
477 $page_iter = new WikiDB_Array_generic_iter(explodePageList($pages));
478 // not at admin page: dump only the current page
479 } elseif ($thispage != _("PhpWikiAdministration")) {
480 $page_iter = new WikiDB_Array_generic_iter(array($thispage));
482 $page_iter = $dbi->getAllPages(false, false, false, $excludeList);
485 $WikiTheme->DUMP_MODE = 'HTML';
486 _DumpHtmlToDir($directory, $page_iter, $request->getArg('exclude'));
487 $WikiTheme->DUMP_MODE = false;
489 $request->setArg('pagename', $thispage); // Template::_basepage fix
490 EndLoadDump($request);
493 /* Known problem: any plugins or other code which echo()s text will
494 * lead to a corrupted html zip file which may produce the following
495 * errors upon unzipping:
497 * warning [wikihtml.zip]: 2401 extra bytes at beginning or within zipfile
498 * file #58: bad zipfile offset (local header sig): 177561
499 * (attempting to re-compensate)
501 * However, the actual wiki page data should be unaffected.
503 function MakeWikiZipHtml(&$request)
506 if ($request->getArg('zipname')) {
507 $zipname = basename($request->getArg('zipname'));
508 if (!preg_match("/\.zip$/i", $zipname))
510 $request->setArg('zipname', false);
512 $zipname = "wikihtml.zip";
514 $zip = new ZipWriter("Created by PhpWiki " . PHPWIKI_VERSION, $zipname);
515 $dbi =& $request->_dbi;
516 $thispage = $request->getArg('pagename'); // for "Return to ..."
517 if ($pages = $request->getArg('pages')) { // which pagenames
518 if ($pages == '[]') // current page
520 $page_iter = new WikiDB_Array_generic_iter(explodePageList($pages));
522 $page_iter = $dbi->getAllPages(false, false, false, $request->getArg('exclude'));
525 $WikiTheme->DUMP_MODE = 'ZIPHTML';
526 _DumpHtmlToDir($zip, $page_iter, $request->getArg('exclude'));
527 $WikiTheme->DUMP_MODE = false;
531 * Internal html dumper. Used for dumphtml, ziphtml and pdf
533 function _DumpHtmlToDir($target, $page_iter, $exclude = false)
535 global $WikiTheme, $request, $ErrorManager;
539 if ($WikiTheme->DUMP_MODE == 'HTML') {
540 $directory = $target;
542 } elseif ($WikiTheme->DUMP_MODE == 'PDFHTML') {
543 $directory = $target;
544 } elseif (is_object($target)) { // $WikiTheme->DUMP_MODE == 'ZIPHTML'
548 $request->_TemplatesProcessed = array();
549 if ($exclude) { // exclude which pagenames
550 $excludeList = explodePageList($exclude);
552 $excludeList = array('DebugAuthInfo', 'DebugGroupInfo', 'AuthInfo');
554 $WikiTheme->VALID_LINKS = array();
555 if ($request->getArg('format')) { // pagelist
556 $page_iter_sav = $page_iter;
557 foreach ($page_iter_sav->asArray() as $handle) {
558 $WikiTheme->VALID_LINKS[] = is_string($handle) ? $handle : $handle->getName();
560 $page_iter_sav->reset();
563 if (defined('HTML_DUMP_SUFFIX')) {
564 $WikiTheme->HTML_DUMP_SUFFIX = HTML_DUMP_SUFFIX;
566 if (isset($WikiTheme->_MoreAttr['body'])) {
567 $_bodyAttr = $WikiTheme->_MoreAttr['body'];
568 unset($WikiTheme->_MoreAttr['body']);
571 $ErrorManager->pushErrorHandler(new WikiFunctionCb('_dump_error_handler'));
573 // check if the dumped file will be accessible from outside
574 $doc_root = $request->get("DOCUMENT_ROOT");
575 if ($WikiTheme->DUMP_MODE == 'HTML') {
576 $ldir = NormalizeLocalFileName($directory);
577 $wikiroot = NormalizeLocalFileName('');
578 if (string_starts_with($ldir, $doc_root)) {
579 $link_prefix = substr($directory, strlen($doc_root)) . "/";
580 } elseif (string_starts_with($ldir, $wikiroot)) {
581 $link_prefix = NormalizeWebFileName(substr($directory, strlen($wikiroot))) . "/";
585 $prefix = '/'; // . substr($doc_root,0,2); // add drive where apache is installed
587 $link_prefix = "file://" . $prefix . $directory . "/";
593 $request_args = $request->args;
594 $timeout = (!$request->getArg('start_debug')) ? 60 : 240;
597 $directory = str_replace("\\", "/", $directory); // no Win95 support.
598 if (!is_dir("$directory/images"))
599 mkdir("$directory/images");
603 $already_images = array();
605 while ($page = $page_iter->next()) {
606 if (is_string($page)) {
608 $page = $request->_dbi->getPage($pagename);
610 $pagename = $page->getName();
612 if (empty($firstpage)) $firstpage = $pagename;
613 if (array_key_exists($pagename, $already))
615 $already[$pagename] = 1;
616 $current = $page->getCurrentRevision();
617 //if ($current->getVersion() == 0)
620 $request->args = $request_args; // some plugins might change them (esp. on POST)
621 longer_timeout($timeout); // Reset watchdog
624 $attrib = array('mtime' => $current->get('mtime'),
626 if ($page->get('locked'))
627 $attrib['write_protected'] = 1;
628 } elseif (!$silent) {
629 if (!is_a($request, 'MockRequest')) {
630 PrintXML(HTML::br(), $pagename, ' ... ');
634 if (in_array($pagename, $excludeList)) {
635 if (!$silent and !is_a($request, 'MockRequest')) {
636 PrintXML(_("Skipped."));
642 if ($WikiTheme->DUMP_MODE == 'PDFHTML')
643 $request->setArg('action', 'pdf'); // to omit cache headers
644 $request->setArg('pagename', $pagename); // Template::_basepage fix
645 $filename = FilenameForPage($pagename) . $WikiTheme->HTML_DUMP_SUFFIX;
646 $args = array('revision' => $current,
647 'CONTENT' => $current->getTransformedContent(),
648 'relative_base' => $relative_base);
649 // For every %2F will need to mkdir -p dirname($pagename)
650 if (preg_match("/(%2F|\/)/", $filename)) {
651 // mkdir -p and set relative base for subdir pages
652 $filename = preg_replace("/%2F/", "/", $filename);
653 $count = substr_count($filename, "/");
654 $dirname = dirname($filename);
656 mkdir_p($directory . "/" . $dirname);
657 // Fails with "XX / YY", "XX" is created, "XX / YY" cannot be written
658 // if (isWindows()) // interesting Windows bug: cannot mkdir "bla "
659 // Since dumps needs to be copied, we have to disallow this for all platforms.
660 $filename = preg_replace("/ \//", "/", $filename);
661 $relative_base = "../";
663 $relative_base .= "../";
666 $args['relative_base'] = $relative_base;
670 $DUMP_MODE = $WikiTheme->DUMP_MODE;
671 $data = GeneratePageasXML(new Template('browse', $request, $args),
672 $pagename, $current, $args);
673 $WikiTheme->DUMP_MODE = $DUMP_MODE;
675 if (preg_match_all("/<img .*?src=\"(\/.+?)\"/", $data, $m)) {
676 // fix to local relative path for uploaded images, so that pdf will work
677 foreach ($m[1] as $img_file) {
678 $base = basename($img_file);
679 $data = str_replace('src="' . $img_file . '"', 'src="images/' . $base . '"', $data);
680 if (array_key_exists($img_file, $already_images))
682 $already_images[$img_file] = 1;
683 // resolve src from webdata to file
684 $src = $doc_root . $img_file;
685 if (file_exists($src) and $base) {
687 $target = "$directory/images/$base";
688 if (copy($src, $target)) {
690 _copyMsg($img_file, fmt("... copied to %s", $target));
693 _copyMsg($img_file, fmt("... not copied to %s", $target));
696 $target = "images/$base";
697 $zip->addSrcFile($target, $src);
704 $outfile = $directory . "/" . $filename;
705 if (!($fd = fopen($outfile, "wb"))) {
706 $msg->pushContent(HTML::strong(fmt("couldn't open file ā%sā for writing",
708 $request->finish($msg);
710 $len = strlen($data);
711 $num = fwrite($fd, $data, $len);
712 if ($pagename != $filename) {
713 $link = LinkURL($link_prefix . $filename, $filename);
714 $msg->pushContent(HTML::small(_("saved as "), $link, " ... "));
716 $msg->pushContent(HTML::small(fmt("%s bytes written", $num), "\n"));
718 if (!is_a($request, 'MockRequest')) {
722 $request->chunkOutput();
724 assert($num == $len);
726 $outfiles[] = $outfile;
728 $zip->addRegularFile($filename, $data, $attrib);
732 $request->_dbi->_cache->invalidate_cache($pagename);
733 unset ($request->_dbi->_cache->_pagedata_cache);
734 unset ($request->_dbi->_cache->_versiondata_cache);
735 unset ($request->_dbi->_cache->_glv_cache);
737 unset ($request->_dbi->_cache->_backend->_page_data);
740 unset($current->_transformedContent);
742 if (!empty($template)) {
743 unset($template->_request);
750 $attrib = false; //array('is_ascii' => 0);
751 if (!empty($WikiTheme->dumped_images) and is_array($WikiTheme->dumped_images)) {
752 // @mkdir("$directory/images");
753 foreach ($WikiTheme->dumped_images as $img_file) {
754 if (array_key_exists($img_file, $already_images))
756 $already_images[$img_file] = 1;
758 and ($from = $WikiTheme->_findFile($img_file, true))
762 $target = "$directory/images/" . basename($from);
764 copy($WikiTheme->_path . $from, $target);
766 if (copy($WikiTheme->_path . $from, $target)) {
767 _copyMsg($from, fmt("... copied to %s", $target));
769 _copyMsg($from, fmt("... not copied to %s", $target));
773 $target = "images/" . basename($from);
774 $zip->addSrcFile($target, $WikiTheme->_path . $from);
776 } elseif (!$silent) {
777 _copyMsg($from, _("... not found"));
782 if (!empty($WikiTheme->dumped_buttons)
783 and is_array($WikiTheme->dumped_buttons)
786 if ($directory && !is_dir("$directory/images/buttons"))
787 mkdir("$directory/images/buttons");
788 foreach ($WikiTheme->dumped_buttons as $text => $img_file) {
789 if (array_key_exists($img_file, $already_images))
791 $already_images[$img_file] = 1;
793 and ($from = $WikiTheme->_findFile($img_file, true))
797 $target = "$directory/images/buttons/" . basename($from);
799 copy($WikiTheme->_path . $from, $target);
801 if (copy($WikiTheme->_path . $from, $target)) {
802 _copyMsg($from, fmt("... copied to %s", $target));
804 _copyMsg($from, fmt("... not copied to %s", $target));
808 $target = "images/buttons/" . basename($from);
809 $zip->addSrcFile($target, $WikiTheme->_path . $from);
811 } elseif (!$silent) {
812 _copyMsg($from, _("... not found"));
816 if (!empty($WikiTheme->dumped_css) and is_array($WikiTheme->dumped_css)) {
817 foreach ($WikiTheme->dumped_css as $css_file) {
818 if (array_key_exists($css_file, $already_images))
820 $already_images[$css_file] = 1;
822 and ($from = $WikiTheme->_findFile(basename($css_file), true))
826 $target = "$directory/" . basename($css_file);
828 copy($WikiTheme->_path . $from, $target);
830 if (copy($WikiTheme->_path . $from, $target)) {
831 _copyMsg($from, fmt("... copied to %s", $target));
833 _copyMsg($from, fmt("... not copied to %s", $target));
837 //$attrib = array('is_ascii' => 0);
838 $target = basename($css_file);
839 $zip->addSrcFile($target, $WikiTheme->_path . $from);
841 } elseif (!$silent) {
842 _copyMsg($from, _("... not found"));
850 if ($WikiTheme->DUMP_MODE == 'PDFHTML') {
851 if (USE_EXTERNAL_HTML2PDF and $outfiles) {
852 $cmd = EXTERNAL_HTML2PDF_PAGELIST . ' "' . join('" "', $outfiles) . '"';
853 $filename = FilenameForPage($firstpage);
855 $tmpfile = $directory . "/createpdf.bat";
856 $fp = fopen($tmpfile, "wb");
857 fwrite($fp, $cmd . " > $filename.pdf");
860 if (!headers_sent()) {
861 Header('Content-Type: application/pdf');
864 $tmpdir = getUploadFilePath();
865 passthru($cmd . " > $tmpdir/$filename.pdf");
866 $errormsg = "<br />\nGenerated <a href=\"" . getUploadDataPath() . "$filename.pdf\">Upload:$filename.pdf</a>\n";
870 foreach ($outfiles as $f) unlink($f);
873 if (!empty($errormsg)) {
874 $request->discardOutput();
875 $GLOBALS['ErrorManager']->_postponed_errors = array();
879 $ErrorManager->popErrorHandler();
881 $WikiTheme->HTML_DUMP_SUFFIX = '';
882 $WikiTheme->DUMP_MODE = false;
883 $WikiTheme->_MoreAttr['body'] = isset($_bodyAttr) ? $_bodyAttr : '';
886 ////////////////////////////////////////////////////////////////
888 // Functions for restoring.
890 ////////////////////////////////////////////////////////////////
892 function SavePage(&$request, &$pageinfo, $source, $filename)
894 static $overwite_all = false;
895 $pagedata = $pageinfo['pagedata']; // Page level meta-data.
896 $versiondata = $pageinfo['versiondata']; // Revision level meta-data.
898 if (empty($pageinfo['pagename'])) {
899 PrintXML(HTML::p(HTML::strong(_("Empty pagename!"))));
903 if (empty($versiondata['author_id']))
904 $versiondata['author_id'] = $versiondata['author'];
906 // remove invalid backend specific chars. utf8 issues mostly
907 $pagename_check = new WikiPagename($pageinfo['pagename']);
908 if (!$pagename_check->isValid()) {
909 PrintXML(HTML::p(HTML::strong(sprintf(_("ā%sā: Bad page name"), $pageinfo['pagename']))));
912 $pagename = $pagename_check->getName();
913 $content = $pageinfo['content'];
915 if ($pagename == __("InterWikiMap"))
916 $content = _tryinsertInterWikiMap($content);
918 $dbi =& $request->_dbi;
919 $page = $dbi->getPage($pagename);
921 // Try to merge if updated pgsrc contents are different. This
922 // whole thing is hackish
923 $needs_merge = false;
927 if ($request->getArg('merge')) {
929 } elseif ($request->getArg('overwrite')) {
933 $current = $page->getCurrentRevision();
935 $edit = $request->getArg('edit');
937 if (isset($edit['keep_old'])) {
940 } elseif (isset($edit['overwrite'])) {
943 } elseif ($current and (!$current->hasDefaultContents())
944 && ($current->getPackedContent() != $content)
946 include_once 'lib/editpage.php';
947 $request->setArg('pagename', $pagename);
948 $v = $current->getVersion();
949 $request->setArg('revision', $current->getVersion());
950 $p = new LoadFileConflictPageEditor($request);
951 $p->_content = $content;
952 $p->_currentVersion = $v - 1;
953 $p->editPage($saveFailed = true);
954 return; //early return
958 foreach ($pagedata as $key => $value) {
960 $page->set($key, $value);
963 $mesg = HTML::span();
965 $mesg->pushContent(' ', fmt("from ā%sā", $source));
968 //FIXME: This should not happen! (empty vdata, corrupt cache or db)
969 $current = $page->getCurrentRevision();
971 if ($current->getVersion() == 0) {
972 $versiondata['author'] = ADMIN_USER;
973 $versiondata['author_id'] = ADMIN_USER;
974 $mesg->pushContent(' - ', _("New page"));
977 if ((!$current->hasDefaultContents())
978 && ($current->getPackedContent() != $content)
981 $mesg->pushContent(' ',
982 fmt("has edit conflicts - overwriting anyway"));
984 if (substr_count($source, 'pgsrc')) {
985 $versiondata['author'] = ADMIN_USER;
986 // but leave authorid as userid who loaded the file
989 if (isset($edit['keep_old'])) {
990 $mesg->pushContent(' ', fmt("keep old"));
992 $mesg->pushContent(' ', fmt("has edit conflicts - skipped"));
993 $needs_merge = true; // hackish, to display the buttons
997 } elseif ($current->getPackedContent() == $content) {
998 // The page content is the same, we don't need a new revision.
999 $mesg->pushContent(' ',
1000 fmt("content is identical to current version %d - no new revision created",
1001 $current->getVersion()));
1008 // in case of failures print the culprit:
1009 if (!is_a($request, 'MockRequest')) {
1010 PrintXML(HTML::span(WikiLink($pagename)));
1013 $new = $page->save($content, WIKIDB_FORCE_CREATE, $versiondata);
1015 $mesg->pushContent(' ', fmt("- saved to database as version %d",
1016 $new->getVersion()));
1017 $mesg->pushContent(HTML::br());
1021 // hackish, $source contains needed path+filename
1022 $f = str_replace(sprintf(_("MIME file %s"), ''), '', $f);
1023 $f = str_replace(sprintf(_("Serialized file %s"), ''), '', $f);
1024 $f = str_replace(sprintf(_("plain file %s"), ''), '', $f);
1025 //check if uploaded file? they pass just the content, but the file is gone
1028 $meb = Button(array('action' => 'loadfile',
1032 _("PhpWikiAdministration"),
1034 $owb = Button(array('action' => 'loadfile',
1035 'overwrite' => true,
1037 _("Restore Anyway"),
1038 _("PhpWikiAdministration"),
1040 $mesg->pushContent(' ', $meb, " ", $owb);
1041 if (!$overwite_all) {
1042 $args = $request->getArgs();
1043 $args['overwrite'] = 1;
1044 $owb = Button($args,
1046 _("PhpWikiAdministration"),
1048 $mesg->pushContent(HTML::span(array('class' => 'hint'), $owb));
1049 $overwite_all = true;
1052 $mesg->pushContent(HTML::em(_(" Sorry, cannot merge.")));
1056 if (!is_a($request, 'MockRequest')) {
1058 PrintXML(HTML::em(WikiLink($pagename)), $mesg);
1065 // action=revert (by diff)
1066 function RevertPage(&$request)
1068 $mesg = HTML::div();
1069 $pagename = $request->getArg('pagename');
1070 $version = $request->getArg('version');
1071 $dbi =& $request->_dbi;
1072 $page = $dbi->getPage($pagename);
1074 $request->redirect(WikiURL($page,
1075 array('warningmsg' => _('Revert: missing required version argument'))));
1078 $current = $page->getCurrentRevision();
1079 $currversion = $current->getVersion();
1080 if ($currversion == 0) {
1081 $request->redirect(WikiURL($page,
1082 array('errormsg' => _('No revert: no page content'))));
1085 if ($currversion == $version) {
1086 $request->redirect(WikiURL($page,
1087 array('warningmsg' => _('No revert: same version page'))));
1090 if ($request->getArg('cancel')) {
1091 $request->redirect(WikiURL($page,
1092 array('warningmsg' => _('Revert cancelled'))));
1095 if (!$request->getArg('verify')) {
1096 $mesg->pushContent(HTML::p(fmt("Are you sure to revert %s to version $version?", WikiLink($pagename))),
1097 HTML::form(array('action' => $request->getPostURL(),
1098 'method' => 'post'),
1099 HiddenInputs($request->getArgs(), false, array('verify')),
1100 HiddenInputs(array('verify' => 1)),
1101 Button('submit:verify', _("Yes"), 'button'),
1102 HTML::Raw(' '),
1103 Button('submit:cancel', _("Cancel"), 'button'))
1105 $rev = $page->getRevision($version);
1106 $html = HTML(HTML::fieldset($mesg), HTML::hr(), $rev->getTransformedContent());
1107 $template = Template('browse',
1108 array('CONTENT' => $html));
1109 GeneratePage($template, $pagename, $rev);
1110 $request->checkValidators();
1114 $rev = $page->getRevision($version);
1115 $content = $rev->getPackedContent();
1116 $versiondata = $rev->_data;
1117 $versiondata['summary'] = sprintf(_("Revert to version %d"), $version);
1118 $versiondata['mtime'] = time();
1119 $versiondata['author'] = $request->getUser()->getId();
1120 $new = $page->save($content, $currversion + 1, $versiondata);
1123 $mesg = HTML::span();
1124 $pagelink = WikiLink($pagename);
1125 $mesg->pushContent(fmt("Revert: %s", $pagelink),
1126 fmt("- version %d saved to database as version %d",
1127 $version, $new->getVersion()));
1128 // Force browse of current page version.
1129 $request->setArg('version', false);
1130 $template = Template('savepage', array());
1131 $template->replace('CONTENT', $new->getTransformedContent());
1133 GeneratePage($template, $mesg, $new);
1137 function _tryinsertInterWikiMap($content)
1140 if (strpos($content, '<'.'verbatim'.'>')) { // Avoid warning about unknown HTML tag
1141 //$error_html = " The newly loaded pgsrc already contains a verbatim block.";
1144 if (!$goback && !defined('INTERWIKI_MAP_FILE')) {
1145 $error_html = sprintf(" " . _("%s: not defined"), "INTERWIKI_MAP_FILE");
1148 $mapfile = FindFile(INTERWIKI_MAP_FILE, 1);
1149 if (!$goback && !file_exists($mapfile)) {
1150 $error_html = sprintf(" " . _("File ā%sā not found."), INTERWIKI_MAP_FILE);
1154 if (!empty($error_html))
1155 trigger_error(_("Default InterWiki map file not loaded.")
1156 . $error_html, E_USER_NOTICE);
1160 // if loading from virgin setup do echo, otherwise trigger_error E_USER_NOTICE
1161 if (!is_a($GLOBALS['request'], 'MockRequest'))
1162 echo sprintf(_("Loading InterWikiMap from external file %s."), $mapfile), "<br />";
1164 $fd = fopen($mapfile, "rb");
1165 $data = fread($fd, filesize($mapfile));
1167 $content = $content . "\n<verbatim>\n$data</verbatim>\n";
1171 function ParseSerializedPage($text, $default_pagename, $user)
1173 if (!preg_match('/^a:\d+:{[si]:\d+/', $text))
1176 $pagehash = unserialize($text);
1178 // Split up pagehash into four parts:
1181 // page-level meta-data
1182 // revision-level meta-data
1184 if (!defined('FLAG_PAGE_LOCKED'))
1185 define('FLAG_PAGE_LOCKED', 1);
1186 if (!defined('FLAG_PAGE_EXTERNAL'))
1187 define('FLAG_PAGE_EXTERNAL', 1);
1188 $pageinfo = array('pagedata' => array(),
1189 'versiondata' => array());
1191 $pagedata = &$pageinfo['pagedata'];
1192 $versiondata = &$pageinfo['versiondata'];
1194 // Fill in defaults.
1195 if (empty($pagehash['pagename']))
1196 $pagehash['pagename'] = $default_pagename;
1197 if (empty($pagehash['author'])) {
1198 $pagehash['author'] = $user->getId();
1201 foreach ($pagehash as $key => $value) {
1206 $pageinfo[$key] = $value;
1209 $pageinfo[$key] = join("\n", $value);
1212 if (($value & FLAG_PAGE_LOCKED) != 0)
1213 $pagedata['locked'] = 'yes';
1214 if (($value & FLAG_PAGE_EXTERNAL) != 0)
1215 $pagedata['external'] = 'yes';
1219 $pagedata[$key] = $value;
1223 $pagedata['perm'] = ParseMimeifiedPerm($value);
1225 case 'lastmodified':
1226 $versiondata['mtime'] = $value;
1231 $versiondata[$key] = $value;
1238 function SortByPageVersion($a, $b)
1240 return $a['version'] - $b['version'];
1244 * Security alert! We should not allow to import config.ini into our wiki (or from a sister wiki?)
1245 * because the sql passwords are in plaintext there. And the webserver must be able to read it.
1246 * Detected by Santtu Jarvi.
1248 function LoadFile(&$request, $filename, $text = false, $mtime = false)
1250 if (preg_match("/config$/", dirname($filename)) // our or other config
1251 and preg_match("/config.*\.ini/", basename($filename))
1252 ) // backups and other versions also
1254 trigger_error(sprintf("Refused to load %s", $filename), E_USER_WARNING);
1257 if (!is_string($text)) {
1259 $stat = stat($filename);
1261 $text = implode("", file($filename));
1264 if (!$request->getArg('start_debug')) @set_time_limit(30); // Reset watchdog
1265 else @set_time_limit(240);
1267 // FIXME: basename("filewithnoslashes") seems to return garbage sometimes.
1268 $basename = basename("/dummy/" . $filename);
1271 $mtime = time(); // Last resort.
1273 $default_pagename = rawurldecode($basename);
1274 if (($parts = ParseMimeifiedPages($text))) {
1275 if (count($parts) > 1)
1276 $overwrite = $request->getArg('overwrite');
1277 usort($parts, 'SortByPageVersion');
1278 foreach ($parts as $pageinfo) {
1280 if (count($parts) > 1)
1281 $request->setArg('overwrite', 1);
1282 SavePage($request, $pageinfo, sprintf(_("MIME file %s"),
1283 $filename), $basename);
1285 if (count($parts) > 1)
1287 $request->setArg('overwrite', $overwrite);
1289 unset($request->_args['overwrite']);
1290 } elseif (($pageinfo = ParseSerializedPage($text, $default_pagename,
1291 $request->getUser()))
1293 SavePage($request, $pageinfo, sprintf(_("Serialized file %s"),
1294 $filename), $basename);
1297 $user = $request->getUser();
1299 // Assume plain text file.
1300 $pageinfo = array('pagename' => $default_pagename,
1301 'pagedata' => array(),
1303 => array('author' => $user->getId()),
1304 'content' => preg_replace('/[ \t\r]*\n/', "\n",
1307 SavePage($request, $pageinfo, sprintf(_("plain file %s"), $filename),
1312 function LoadZip(&$request, $zipfile, $files = array(), $exclude = array())
1314 $zip = new ZipReader($zipfile);
1315 $timeout = (!$request->getArg('start_debug')) ? 20 : 120;
1316 while (list ($fn, $data, $attrib) = $zip->readFile()) {
1317 // FIXME: basename("filewithnoslashes") seems to return
1318 // garbage sometimes.
1319 $fn = basename("/dummy/" . $fn);
1320 if (($files && !in_array($fn, $files))
1321 || ($exclude && in_array($fn, $exclude))
1323 PrintXML(HTML::p(WikiLink($fn)),
1324 HTML::p(_("Skipping")));
1328 longer_timeout($timeout); // longer timeout per page
1329 LoadFile($request, $fn, $data, $attrib['mtime']);
1333 function LoadDir(&$request, $dirname, $files = array(), $exclude = array())
1335 $fileset = new LimitedFileSet($dirname, $files, $exclude);
1337 if (!$files and ($skiplist = $fileset->getSkippedFiles())) {
1338 PrintXML(HTML::p(HTML::strong(_("Skipping"))));
1340 foreach ($skiplist as $file)
1341 $list->pushContent(HTML::li(WikiLink($file)));
1342 PrintXML(HTML::p($list));
1345 // Defer HomePage loading until the end. If anything goes wrong
1346 // the pages can still be loaded again.
1347 $files = $fileset->getFiles();
1348 if (in_array(HOME_PAGE, $files)) {
1349 $files = array_diff($files, array(HOME_PAGE));
1350 $files[] = HOME_PAGE;
1352 $timeout = (!$request->getArg('start_debug')) ? 20 : 120;
1353 foreach ($files as $file) {
1354 longer_timeout($timeout); // longer timeout per page
1355 if (substr($file, -1, 1) != '~') // refuse to load backup files
1356 LoadFile($request, "$dirname/$file");
1360 class LimitedFileSet extends FileSet
1362 function __construct($dirname, $_include, $exclude)
1364 $this->_includefiles = $_include;
1365 $this->_exclude = $exclude;
1366 $this->_skiplist = array();
1367 parent::FileSet($dirname);
1370 function _filenameSelector($fn)
1372 $incl = &$this->_includefiles;
1373 $excl = &$this->_exclude;
1375 if (($incl && !in_array($fn, $incl))
1376 || ($excl && in_array($fn, $excl))
1378 $this->_skiplist[] = $fn;
1385 function getSkippedFiles()
1387 return $this->_skiplist;
1391 function IsZipFile($filename_or_fd)
1393 // See if it looks like zip file
1394 if (is_string($filename_or_fd)) {
1395 $fd = fopen($filename_or_fd, "rb");
1396 $magic = fread($fd, 4);
1399 $fpos = ftell($filename_or_fd);
1400 $magic = fread($filename_or_fd, 4);
1401 fseek($filename_or_fd, $fpos);
1404 return $magic == ZIP_LOCHEAD_MAGIC || $magic == ZIP_CENTHEAD_MAGIC;
1407 function LoadAny(&$request, $file_or_dir, $files = array(), $exclude = array())
1409 // Try urlencoded filename for accented characters.
1410 if (!file_exists($file_or_dir)) {
1411 // Make sure there are slashes first to avoid confusing phps
1412 // with broken dirname or basename functions.
1413 // FIXME: windows uses \ and :
1414 if (is_integer(strpos($file_or_dir, "/"))) {
1415 $newfile = FindFile($file_or_dir, true);
1416 // Panic. urlencoded by the browser (e.g. San%20Diego => San Diego)
1418 $file_or_dir = dirname($file_or_dir) . "/"
1419 . rawurlencode(basename($file_or_dir));
1421 // This is probably just a file.
1422 $file_or_dir = rawurlencode($file_or_dir);
1426 $type = filetype($file_or_dir);
1427 if ($type == 'link') {
1428 // For symbolic links, use stat() to determine
1429 // the type of the underlying file.
1430 list(, , $mode) = stat($file_or_dir);
1431 $type = ($mode >> 12) & 017;
1434 elseif ($type == 004)
1439 $request->finish(fmt("Empty or not existing source. Unable to load: %s", $file_or_dir));
1440 } elseif ($type == 'dir') {
1441 LoadDir($request, $file_or_dir, $files, $exclude);
1442 } elseif ($type != 'file' && !preg_match('/^(http|ftp):/', $file_or_dir)) {
1443 $request->finish(fmt("Bad file type: %s", $type));
1444 } elseif (IsZipFile($file_or_dir)) {
1445 LoadZip($request, $file_or_dir, $files, $exclude);
1446 } else /* if (!$files || in_array(basename($file_or_dir), $files)) */ {
1447 LoadFile($request, $file_or_dir);
1451 function LoadFileOrDir(&$request)
1453 $source = $request->getArg('source');
1454 $finder = new FileFinder;
1455 $source = $finder->slashifyPath($source);
1456 StartLoadDump($request,
1457 sprintf(_("Loading ā%sā"), $source));
1458 LoadAny($request, $source);
1459 EndLoadDump($request);
1463 * HomePage was not found so first-time install is supposed to run.
1464 * - import all pgsrc pages.
1465 * - Todo: installer interface to edit config/config.ini settings
1466 * - Todo: ask for existing old index.php to convert to config/config.ini
1467 * - Todo: theme-specific pages:
1468 * blog - HomePage, ADMIN_USER/Blogs
1470 function SetupWiki(&$request)
1472 global $GenericPages, $LANG;
1474 //FIXME: This is a hack (err, "interim solution")
1475 // This is a bogo-bogo-login: Login without
1476 // saving login information in session state.
1477 // This avoids logging in the unsuspecting
1478 // visitor as ADMIN_USER
1480 // This really needs to be cleaned up...
1481 // (I'm working on it.)
1482 $real_user = $request->_user;
1483 $request->_user = new _BogoUser(ADMIN_USER);
1485 StartLoadDump($request, _("Loading up virgin wiki"));
1487 $pgsrc = FindLocalizedFile(WIKI_PGSRC);
1488 $default_pgsrc = FindFile(DEFAULT_WIKI_PGSRC);
1490 $request->setArg('overwrite', true);
1491 if ($default_pgsrc != $pgsrc) {
1492 LoadAny($request, $default_pgsrc, $GenericPages);
1494 $request->setArg('overwrite', false);
1495 LoadAny($request, $pgsrc);
1496 $dbi =& $request->_dbi;
1498 // Ensure that all mandatory pages are loaded
1499 $finder = new FileFinder;
1501 $mandatory = array('SandBox',
1502 'Template/Category',
1506 'CategoryActionPage',
1507 'PhpWikiAdministration');
1509 if ((defined('FUSIONFORGE') and FUSIONFORGE)) {
1510 $mandatory[] = 'Template/UserPage';
1512 $mandatory[] = 'Help/TextFormattingRules';
1515 $mandatory = array_merge($mandatory, $GLOBALS['AllActionPages']);
1516 $mandatory[] = constant('HOME_PAGE');
1518 foreach ($mandatory as $f) {
1519 $page = gettext($f);
1520 $epage = urlencode($page);
1521 if (!$dbi->isWikiPage($page)) {
1522 // translated version provided?
1523 if ($lf = FindLocalizedFile($pgsrc . $finder->_pathsep . $epage, 1)) {
1524 LoadAny($request, $lf);
1525 } else { // load english version of required action page
1526 LoadAny($request, FindFile(DEFAULT_WIKI_PGSRC . $finder->_pathsep . urlencode($f)));
1530 if (!$dbi->isWikiPage($page)) {
1531 trigger_error(sprintf("Mandatory file %s couldn't be loaded!", $page),
1536 $pagename = __("InterWikiMap");
1537 $map = $dbi->getPage($pagename);
1538 $map->set('locked', true);
1539 PrintXML(HTML::p(HTML::em(WikiLink($pagename)), HTML::strong(" locked")));
1540 EndLoadDump($request);
1543 function LoadPostFile(&$request)
1545 $upload = $request->getUploadedFile('file');
1548 $request->finish(_("No uploaded file to upload?")); // FIXME: more concise message
1550 // Dump http headers.
1551 StartLoadDump($request, sprintf(_("Uploading %s"), $upload->getName()));
1553 $fd = $upload->open();
1555 LoadZip($request, $fd, array(), array(_("RecentChanges")));
1557 LoadFile($request, $upload->getName(), $upload->getContents());
1559 EndLoadDump($request);
1565 // c-basic-offset: 4
1566 // c-hanging-comment-ender-p: nil
1567 // indent-tabs-mode: nil