5 Copyright 1999,2000,2001,2002,2004,2005,2006,2007 $ThePhpWikiProgrammingTeam
6 Copyright 2008-2009 Marc-Etienne Vargenau, Alcatel-Lucent
8 This file is part of PhpWiki.
10 PhpWiki is free software; you can redistribute it and/or modify
11 it under the terms of the GNU General Public License as published by
12 the Free Software Foundation; either version 2 of the License, or
13 (at your option) any later version.
15 PhpWiki is distributed in the hope that it will be useful,
16 but WITHOUT ANY WARRANTY; without even the implied warranty of
17 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
18 GNU General Public License for more details.
20 You should have received a copy of the GNU General Public License
21 along with PhpWiki; if not, write to the Free Software
22 Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
25 require_once("lib/ziplib.php");
26 require_once("lib/Template.php");
29 * ignore fatal errors during dump
31 function _dump_error_handler(&$error) {
32 if ($error->isFatal()) {
33 $error->errno = E_USER_WARNING;
36 return true; // Ignore error
38 if (preg_match('/Plugin/', $error->errstr))
41 // let the message come through: call the remaining handlers:
45 function StartLoadDump(&$request, $title, $html = '')
47 // MockRequest is from the unit testsuite, a faked request. (may be cmd-line)
48 // We are silent on unittests.
49 if (isa($request,'MockRequest'))
51 // FIXME: This is a hack. This really is the worst overall hack in phpwiki.
53 $html->pushContent('%BODY%');
54 $tmpl = Template('html', array('TITLE' => $title,
56 'CONTENT' => $html ? $html : '%BODY%'));
57 echo ereg_replace('%BODY%.*', '', $tmpl->getExpansion($html));
58 $request->chunkOutput();
60 // set marker for sendPageChangeNotification()
61 $request->_deferredPageChangeNotification = array();
64 function EndLoadDump(&$request)
68 if (isa($request,'MockRequest'))
70 $action = $request->getArg('action');
73 case 'zip': $label = _("ZIP files of database"); break;
74 case 'dumpserial': $label = _("Dump to directory"); break;
75 case 'upload': $label = _("Upload File"); break;
76 case 'loadfile': $label = _("Load File"); break;
77 case 'upgrade': $label = _("Upgrade"); break;
79 case 'ziphtml': $label = _("Dump pages as XHTML"); break;
81 if ($label) $label = str_replace(" ","_",$label);
82 if ($action == 'browse') // loading virgin
83 $pagelink = WikiLink(HOME_PAGE);
85 $pagelink = WikiLink(new WikiPageName(_("PhpWikiAdministration"),false,$label));
87 // do deferred sendPageChangeNotification()
88 if (!empty($request->_deferredPageChangeNotification)) {
89 $pages = $all_emails = $all_users = array();
90 foreach ($request->_deferredPageChangeNotification as $p) {
91 list($pagename, $emails, $userids) = $p;
93 $all_emails = array_unique(array_merge($all_emails, $emails));
94 $all_users = array_unique(array_merge($all_users, $userids));
96 $editedby = sprintf(_("Edited by: %s"), $request->_user->getId());
97 $content = "Loaded the following pages:\n" . join("\n", $pages);
98 if (mail(join(',',$all_emails),"[".WIKI_NAME."] "._("LoadDump"),
102 trigger_error(sprintf(_("PageChange Notification of %s sent to %s"),
103 join("\n",$pages), join(',',$all_users)), E_USER_NOTICE);
105 trigger_error(sprintf(_("PageChange Notification Error: Couldn't send %s to %s"),
106 join("\n",$pages), join(',',$all_users)), E_USER_WARNING);
111 unset($request->_deferredPageChangeNotification);
113 PrintXML(HTML::p(HTML::strong(_("Complete."))),
114 HTML::p(fmt("Return to %s", $pagelink)));
115 // Ugly hack to get valid XHTML code
116 if (isa($WikiTheme, 'WikiTheme_gforge')) {
122 } else if (isa($WikiTheme, 'WikiTheme_Sidebar')
123 or isa($WikiTheme, 'WikiTheme_MonoBook')) {
128 } else if (isa($WikiTheme, 'WikiTheme_wikilens')) {
133 } else if (isa($WikiTheme, 'WikiTheme_blog')) {
136 } else if (isa($WikiTheme, 'WikiTheme_Crao')
137 or isa($WikiTheme, 'WikiTheme_Hawaiian')
138 or isa($WikiTheme, 'WikiTheme_MacOSX')
139 or isa($WikiTheme, 'WikiTheme_shamino_com')
140 or isa($WikiTheme, 'WikiTheme_smaller')) {
143 echo "</body></html>\n";
146 ////////////////////////////////////////////////////////////////
148 // Functions for dumping.
150 ////////////////////////////////////////////////////////////////
154 * http://www.nacs.uci.edu/indiv/ehood/MIME/2045/rfc2045.html
155 * http://www.faqs.org/rfcs/rfc2045.html
156 * (RFC 1521 has been superceeded by RFC 2045 & others).
158 * Also see http://www.faqs.org/rfcs/rfc2822.html
160 function MailifyPage ($page, $nversions = 1)
162 $current = $page->getCurrentRevision(false);
165 if (STRICT_MAILABLE_PAGEDUMPS) {
166 $from = defined('SERVER_ADMIN') ? SERVER_ADMIN : 'foo@bar';
167 //This is for unix mailbox format: (not RFC (2)822)
168 // $head .= "From $from " . CTime(time()) . "\r\n";
169 $head .= "Subject: " . rawurlencode($page->getName()) . "\r\n";
170 $head .= "From: $from (PhpWiki)\r\n";
171 // RFC 2822 requires only a Date: and originator (From:)
172 // field, however the obsolete standard RFC 822 also
173 // requires a destination field.
174 $head .= "To: $from (PhpWiki)\r\n";
176 $head .= "Date: " . Rfc2822DateTime($current->get('mtime')) . "\r\n";
177 $head .= sprintf("Mime-Version: 1.0 (Produced by PhpWiki %s)\r\n",
180 // This should just be entered by hand (or by script?)
181 // in the actual pgsrc files, since only they should have
183 //$head .= "X-Rcs-Id: \$Id\$\r\n";
185 $iter = $page->getAllRevisions();
187 while ($revision = $iter->next()) {
188 $parts[] = MimeifyPageRevision($page, $revision);
189 if ($nversions > 0 && count($parts) >= $nversions)
192 if (count($parts) > 1)
193 return $head . MimeMultipart($parts);
195 return $head . $parts[0];
199 * Compute filename to used for storing contents of a wiki page.
201 * Basically we do a rawurlencode() which encodes everything except
202 * ASCII alphanumerics and '.', '-', and '_'.
204 * But we also want to encode leading dots to avoid filenames like
205 * '.', and '..'. (Also, there's no point in generating "hidden" file
206 * names, like '.foo'.)
208 * We have to apply a different "/" logic for dumpserial, htmldump and zipdump.
209 * dirs are allowed for zipdump and htmldump, not for dumpserial
212 * @param $pagename string Pagename.
213 * @return string Filename for page.
215 function FilenameForPage ($pagename, $action = false)
217 $enc = rawurlencode($pagename);
220 $action = $request->getArg('action');
222 if ($action != 'dumpserial') { // zip, ziphtml, dumphtml
223 // For every %2F we will need to mkdir -p dirname($pagename)
224 $enc = preg_replace('/%2F/', '/', $enc);
226 $enc = preg_replace('/^\./', '%2E', $enc);
227 $enc = preg_replace('/%20/', ' ', $enc);
228 $enc = preg_replace('/\.$/', '%2E', $enc);
233 * The main() function which generates a zip archive of a PhpWiki.
235 * If $include_archive is false, only the current version of each page
236 * is included in the zip file; otherwise all archived versions are
239 function MakeWikiZip (&$request)
241 global $ErrorManager;
242 if ($request->getArg('include') == 'all') {
243 $zipname = WIKI_NAME . _("FullDump") . date('Ymd-Hi') . '.zip';
244 $include_archive = true;
247 $zipname = WIKI_NAME . _("LatestSnapshot") . date('Ymd-Hi') . '.zip';
248 $include_archive = false;
250 $include_empty = false;
251 if ($request->getArg('include') == 'empty') {
252 $include_empty = true;
255 $zip = new ZipWriter("Created by PhpWiki " . PHPWIKI_VERSION, $zipname);
257 /* ignore fatals in plugins */
258 $ErrorManager->pushErrorHandler(new WikiFunctionCb('_dump_error_handler'));
260 $dbi =& $request->_dbi;
261 $thispage = $request->getArg('pagename'); // for "Return to ..."
262 if ($exclude = $request->getArg('exclude')) { // exclude which pagenames
263 $excludeList = explodePageList($exclude);
265 $excludeList = array();
267 if ($pages = $request->getArg('pages')) { // which pagenames
268 if ($pages == '[]') // current page
270 $page_iter = new WikiDB_Array_PageIterator(explodePageList($pages));
272 $page_iter = $dbi->getAllPages(false,false,false,$excludeList);
274 $request_args = $request->args;
275 $timeout = (! $request->getArg('start_debug')) ? 30 : 240;
277 while ($page = $page_iter->next()) {
278 $request->args = $request_args; // some plugins might change them (esp. on POST)
279 longer_timeout($timeout); // Reset watchdog
281 $current = $page->getCurrentRevision();
282 if ($current->getVersion() == 0)
285 $pagename = $page->getName();
286 $wpn = new WikiPageName($pagename);
287 if (!$wpn->isValid())
289 if (in_array($page->getName(), $excludeList)) {
293 $attrib = array('mtime' => $current->get('mtime'),
295 if ($page->get('locked'))
296 $attrib['write_protected'] = 1;
298 if ($include_archive)
299 $content = MailifyPage($page, 0);
301 $content = MailifyPage($page);
303 $zip->addRegularFile( FilenameForPage($pagename),
308 $ErrorManager->popErrorHandler();
311 function DumpToDir (&$request)
313 $directory = $request->getArg('directory');
314 if (empty($directory))
315 $directory = DEFAULT_DUMP_DIR; // See lib/plugin/WikiForm.php:87
316 if (empty($directory))
317 $request->finish(_("You must specify a directory to dump to"));
319 // see if we can access the directory the user wants us to use
320 if (! file_exists($directory)) {
321 if (! mkdir($directory, 0755))
322 $request->finish(fmt("Cannot create directory '%s'", $directory));
324 $html = HTML::p(fmt("Created directory '%s' for the page dump...",
327 $html = HTML::p(fmt("Using directory '%s'", $directory));
330 StartLoadDump($request, _("Dumping Pages"), $html);
332 $dbi =& $request->_dbi;
333 $thispage = $request->getArg('pagename'); // for "Return to ..."
334 if ($exclude = $request->getArg('exclude')) { // exclude which pagenames
335 $excludeList = explodePageList($exclude);
337 $excludeList = array();
339 $include_empty = false;
340 if ($request->getArg('include') == 'empty') {
341 $include_empty = true;
343 if ($pages = $request->getArg('pages')) { // which pagenames
344 if ($pages == '[]') // current page
346 $page_iter = new WikiDB_Array_PageIterator(explodePageList($pages));
348 $page_iter = $dbi->getAllPages($include_empty,false,false,$excludeList);
351 $request_args = $request->args;
352 $timeout = (! $request->getArg('start_debug')) ? 30 : 240;
354 while ($page = $page_iter->next()) {
355 $request->args = $request_args; // some plugins might change them (esp. on POST)
356 longer_timeout($timeout); // Reset watchdog
358 $pagename = $page->getName();
359 if (!isa($request,'MockRequest')) {
360 PrintXML(HTML::br(), $pagename, ' ... ');
364 if (in_array($pagename, $excludeList)) {
365 if (!isa($request, 'MockRequest')) {
366 PrintXML(_("Skipped."));
371 $filename = FilenameForPage($pagename);
373 if($page->getName() != $filename) {
374 $msg->pushContent(HTML::small(fmt("saved as %s", $filename)),
378 if ($request->getArg('include') == 'all')
379 $data = MailifyPage($page, 0);
381 $data = MailifyPage($page);
383 if ( !($fd = fopen($directory."/".$filename, "wb")) ) {
384 $msg->pushContent(HTML::strong(fmt("couldn't open file '%s' for writing",
385 "$directory/$filename")));
386 $request->finish($msg);
389 $num = fwrite($fd, $data, strlen($data));
390 $msg->pushContent(HTML::small(fmt("%s bytes written", $num)));
391 if (!isa($request, 'MockRequest')) {
395 assert($num == strlen($data));
399 EndLoadDump($request);
402 function _copyMsg($page, $smallmsg) {
403 if (!isa($GLOBALS['request'], 'MockRequest')) {
404 if ($page) $msg = HTML(HTML::br(), HTML($page), HTML::small($smallmsg));
405 else $msg = HTML::small($smallmsg);
411 function mkdir_p($pathname, $permission = 0777) {
412 $arr = explode("/", $pathname);
414 return mkdir($pathname, $permission);
416 $s = array_shift($arr);
418 foreach ($arr as $p) {
421 $ok = mkdir($curr, $permission);
423 if (!$ok) return FALSE;
429 * Dump all pages as XHTML to a directory, as pagename.html.
430 * Copies all used css files to the directory, all used images to a
431 * "images" subdirectory, and all used buttons to a "images/buttons" subdirectory.
432 * The webserver must have write permissions to these directories.
433 * chown httpd HTML_DUMP_DIR; chmod u+rwx HTML_DUMP_DIR
436 * @param string directory (optional) path to dump to. Default: HTML_DUMP_DIR
437 * @param string pages (optional) Comma-seperated of glob-style pagenames to dump.
438 * Also array of pagenames allowed.
439 * @param string exclude (optional) Comma-seperated of glob-style pagenames to exclude
441 function DumpHtmlToDir (&$request)
444 $directory = $request->getArg('directory');
445 if (empty($directory))
446 $directory = HTML_DUMP_DIR; // See lib/plugin/WikiForm.php:87
447 if (empty($directory))
448 $request->finish(_("You must specify a directory to dump to"));
450 // See if we can access the directory the user wants us to use
451 if (! file_exists($directory)) {
452 if (! mkdir($directory, 0755))
453 $request->finish(fmt("Cannot create directory '%s'", $directory));
455 $html = HTML::p(fmt("Created directory '%s' for the page dump...",
458 $html = HTML::p(fmt("Using directory '%s'", $directory));
460 StartLoadDump($request, _("Dumping Pages"), $html);
461 $thispage = $request->getArg('pagename'); // for "Return to ..."
463 $dbi =& $request->_dbi;
464 if ($exclude = $request->getArg('exclude')) { // exclude which pagenames
465 $excludeList = explodePageList($exclude);
467 $excludeList = array('DebugAuthInfo', 'DebugGroupInfo', 'AuthInfo');
469 if ($pages = $request->getArg('pages')) { // which pagenames
470 if ($pages == '[]') // current page
472 $page_iter = new WikiDB_Array_generic_iter(explodePageList($pages));
473 // not at admin page: dump only the current page
474 } elseif ($thispage != _("PhpWikiAdministration")) {
475 $page_iter = new WikiDB_Array_generic_iter(array($thispage));
477 $page_iter = $dbi->getAllPages(false,false,false,$excludeList);
480 $WikiTheme->DUMP_MODE = 'HTML';
481 _DumpHtmlToDir($directory, $page_iter, $request->getArg('exclude'));
482 $WikiTheme->DUMP_MODE = false;
484 $request->setArg('pagename',$thispage); // Template::_basepage fix
485 EndLoadDump($request);
488 /* Known problem: any plugins or other code which echo()s text will
489 * lead to a corrupted html zip file which may produce the following
490 * errors upon unzipping:
492 * warning [wikihtml.zip]: 2401 extra bytes at beginning or within zipfile
493 * file #58: bad zipfile offset (local header sig): 177561
494 * (attempting to re-compensate)
496 * However, the actual wiki page data should be unaffected.
498 function MakeWikiZipHtml (&$request)
501 if ($request->getArg('zipname')) {
502 $zipname = basename($request->getArg('zipname'));
503 if (!preg_match("/\.zip$/i", $zipname))
505 $request->setArg('zipname', false);
507 $zipname = "wikihtml.zip";
509 $zip = new ZipWriter("Created by PhpWiki " . PHPWIKI_VERSION, $zipname);
510 $dbi =& $request->_dbi;
511 $thispage = $request->getArg('pagename'); // for "Return to ..."
512 if ($pages = $request->getArg('pages')) { // which pagenames
513 if ($pages == '[]') // current page
515 $page_iter = new WikiDB_Array_generic_iter(explodePageList($pages));
517 $page_iter = $dbi->getAllPages(false,false,false,$request->getArg('exclude'));
520 $WikiTheme->DUMP_MODE = 'ZIPHTML';
521 _DumpHtmlToDir($zip, $page_iter, $request->getArg('exclude'));
522 $WikiTheme->DUMP_MODE = false;
526 * Internal html dumper. Used for dumphtml, ziphtml and pdf
528 function _DumpHtmlToDir ($target, $page_iter, $exclude = false)
530 global $WikiTheme, $request, $ErrorManager;
531 $silent = true; $zip = false; $directory = false;
532 if ($WikiTheme->DUMP_MODE == 'HTML') {
533 $directory = $target;
535 } elseif ($WikiTheme->DUMP_MODE == 'PDFHTML') {
536 $directory = $target;
537 } elseif (is_object($target)) { // $WikiTheme->DUMP_MODE == 'ZIPHTML'
541 $request->_TemplatesProcessed = array();
542 if ($exclude) { // exclude which pagenames
543 $excludeList = explodePageList($exclude);
545 $excludeList = array('DebugAuthInfo', 'DebugGroupInfo', 'AuthInfo');
547 $WikiTheme->VALID_LINKS = array();
548 if ($request->getArg('format')) { // pagelist
549 $page_iter_sav = $page_iter;
550 foreach ($page_iter_sav->asArray() as $handle) {
551 $WikiTheme->VALID_LINKS[] = is_string($handle) ? $handle : $handle->getName();
553 $page_iter_sav->reset();
556 if (defined('HTML_DUMP_SUFFIX'))
557 $WikiTheme->HTML_DUMP_SUFFIX = HTML_DUMP_SUFFIX;
558 $_bodyAttr = @$WikiTheme->_MoreAttr['body'];
559 unset($WikiTheme->_MoreAttr['body']);
561 $ErrorManager->pushErrorHandler(new WikiFunctionCb('_dump_error_handler'));
563 // check if the dumped file will be accessible from outside
564 $doc_root = $request->get("DOCUMENT_ROOT");
565 if ($WikiTheme->DUMP_MODE == 'HTML') {
566 $ldir = NormalizeLocalFileName($directory);
567 $wikiroot = NormalizeLocalFileName('');
568 if (string_starts_with($ldir, $doc_root)) {
569 $link_prefix = substr($directory, strlen($doc_root))."/";
570 } elseif (string_starts_with($ldir, $wikiroot)) {
571 $link_prefix = NormalizeWebFileName(substr($directory, strlen($wikiroot)))."/";
575 $prefix = '/'; // . substr($doc_root,0,2); // add drive where apache is installed
577 $link_prefix = "file://".$prefix.$directory."/";
583 $request_args = $request->args;
584 $timeout = (! $request->getArg('start_debug')) ? 60 : 240;
587 $directory = str_replace("\\", "/", $directory); // no Win95 support.
588 @mkdir("$directory/images");
592 $already_images = array();
594 while ($page = $page_iter->next()) {
595 if (is_string($page)) {
597 $page = $request->_dbi->getPage($pagename);
599 $pagename = $page->getName();
601 if (empty($firstpage)) $firstpage = $pagename;
602 if (array_key_exists($pagename, $already))
604 $already[$pagename] = 1;
605 $current = $page->getCurrentRevision();
606 //if ($current->getVersion() == 0)
609 $request->args = $request_args; // some plugins might change them (esp. on POST)
610 longer_timeout($timeout); // Reset watchdog
613 $attrib = array('mtime' => $current->get('mtime'),
615 if ($page->get('locked'))
616 $attrib['write_protected'] = 1;
617 } elseif (!$silent) {
618 if (!isa($request,'MockRequest')) {
619 PrintXML(HTML::br(), $pagename, ' ... ');
623 if (in_array($pagename, $excludeList)) {
624 if (!$silent and !isa($request,'MockRequest')) {
625 PrintXML(_("Skipped."));
631 if ($WikiTheme->DUMP_MODE == 'PDFHTML')
632 $request->setArg('action', 'pdf'); // to omit cache headers
633 $request->setArg('pagename', $pagename); // Template::_basepage fix
634 $filename = FilenameForPage($pagename) . $WikiTheme->HTML_DUMP_SUFFIX;
635 $args = array('revision' => $current,
636 'CONTENT' => $current->getTransformedContent(),
637 'relative_base' => $relative_base);
638 // For every %2F will need to mkdir -p dirname($pagename)
639 if (preg_match("/(%2F|\/)/", $filename)) {
640 // mkdir -p and set relative base for subdir pages
641 $filename = preg_replace("/%2F/", "/", $filename);
642 $count = substr_count($filename, "/");
643 $dirname = dirname($filename);
645 mkdir_p($directory."/".$dirname);
646 // Fails with "XX / YY", "XX" is created, "XX / YY" cannot be written
647 // if (isWindows()) // interesting Windows bug: cannot mkdir "bla "
648 // Since dumps needs to be copied, we have to disallow this for all platforms.
649 $filename = preg_replace("/ \//", "/", $filename);
650 $relative_base = "../";
652 $relative_base .= "../";
655 $args['relative_base'] = $relative_base;
659 $DUMP_MODE = $WikiTheme->DUMP_MODE;
660 $data = GeneratePageasXML(new Template('browse', $request, $args),
661 $pagename, $current, $args);
662 $WikiTheme->DUMP_MODE = $DUMP_MODE;
664 if (preg_match_all("/<img .*?src=\"(\/.+?)\"/", $data, $m)) {
665 // fix to local relative path for uploaded images, so that pdf will work
666 foreach ($m[1] as $img_file) {
667 $base = basename($img_file);
668 $data = str_replace('src="'.$img_file.'"','src="images/'.$base.'"', $data);
669 if (array_key_exists($img_file, $already_images))
671 $already_images[$img_file] = 1;
672 // resolve src from webdata to file
673 $src = $doc_root . $img_file;
674 if (file_exists($src) and $base) {
676 $target = "$directory/images/$base";
677 if (copy($src, $target)) {
679 _copyMsg($img_file, fmt("... copied to %s", $target));
682 _copyMsg($img_file, fmt("... not copied to %s", $target));
685 $target = "images/$base";
686 $zip->addSrcFile($target, $src);
693 $outfile = $directory."/".$filename;
694 if ( !($fd = fopen($outfile, "wb")) ) {
695 $msg->pushContent(HTML::strong(fmt("couldn't open file '%s' for writing",
697 $request->finish($msg);
699 $len = strlen($data);
700 $num = fwrite($fd, $data, $len);
701 if ($pagename != $filename) {
702 $link = LinkURL($link_prefix.$filename, $filename);
703 $msg->pushContent(HTML::small(_("saved as "), $link, " ... "));
705 $msg->pushContent(HTML::small(fmt("%s bytes written", $num), "\n"));
707 if (!isa($request, 'MockRequest')) {
711 $request->chunkOutput();
713 assert($num == $len);
715 $outfiles[] = $outfile;
717 $zip->addRegularFile($filename, $data, $attrib);
721 $request->_dbi->_cache->invalidate_cache($pagename);
722 unset ($request->_dbi->_cache->_pagedata_cache);
723 unset ($request->_dbi->_cache->_versiondata_cache);
724 unset ($request->_dbi->_cache->_glv_cache);
726 unset ($request->_dbi->_cache->_backend->_page_data);
729 unset($current->_transformedContent);
731 if (!empty($template)) {
732 unset($template->_request);
739 $attrib = false; //array('is_ascii' => 0);
740 if (!empty($WikiTheme->dumped_images) and is_array($WikiTheme->dumped_images)) {
741 // @mkdir("$directory/images");
742 foreach ($WikiTheme->dumped_images as $img_file) {
743 if (array_key_exists($img_file, $already_images))
745 $already_images[$img_file] = 1;
747 and ($from = $WikiTheme->_findFile($img_file, true))
751 $target = "$directory/images/".basename($from);
753 copy($WikiTheme->_path . $from, $target);
755 if (copy($WikiTheme->_path . $from, $target)) {
756 _copyMsg($from, fmt("... copied to %s", $target));
758 _copyMsg($from, fmt("... not copied to %s", $target));
762 $target = "images/".basename($from);
763 $zip->addSrcFile($target, $WikiTheme->_path . $from);
765 } elseif (!$silent) {
766 _copyMsg($from, _("... not found"));
771 if (!empty($WikiTheme->dumped_buttons)
772 and is_array($WikiTheme->dumped_buttons))
776 @mkdir("$directory/images/buttons");
777 foreach ($WikiTheme->dumped_buttons as $text => $img_file) {
778 if (array_key_exists($img_file, $already_images))
780 $already_images[$img_file] = 1;
782 and ($from = $WikiTheme->_findFile($img_file, true))
786 $target = "$directory/images/buttons/".basename($from);
788 copy($WikiTheme->_path . $from, $target);
790 if (copy($WikiTheme->_path . $from, $target)) {
791 _copyMsg($from, fmt("... copied to %s", $target));
793 _copyMsg($from, fmt("... not copied to %s", $target));
797 $target = "images/buttons/".basename($from);
798 $zip->addSrcFile($target, $WikiTheme->_path . $from);
800 } elseif (!$silent) {
801 _copyMsg($from, _("... not found"));
805 if (!empty($WikiTheme->dumped_css) and is_array($WikiTheme->dumped_css)) {
806 foreach ($WikiTheme->dumped_css as $css_file) {
807 if (array_key_exists($css_file, $already_images))
809 $already_images[$css_file] = 1;
811 and ($from = $WikiTheme->_findFile(basename($css_file), true))
814 // TODO: fix @import url(main.css);
816 $target = "$directory/" . basename($css_file);
818 copy($WikiTheme->_path . $from, $target);
820 if (copy($WikiTheme->_path . $from, $target)) {
821 _copyMsg($from, fmt("... copied to %s", $target));
823 _copyMsg($from, fmt("... not copied to %s", $target));
827 //$attrib = array('is_ascii' => 0);
828 $target = basename($css_file);
829 $zip->addSrcFile($target, $WikiTheme->_path . $from);
831 } elseif (!$silent) {
832 _copyMsg($from, _("... not found"));
840 if ($WikiTheme->DUMP_MODE == 'PDFHTML') {
841 if (USE_EXTERNAL_HTML2PDF and $outfiles) {
842 $cmd = EXTERNAL_HTML2PDF_PAGELIST.' "'.join('" "', $outfiles).'"';
843 $filename = FilenameForPage($firstpage);
845 $tmpfile = $directory . "/createpdf.bat";
846 $fp = fopen($tmpfile, "wb");
847 fwrite($fp, $cmd . " > $filename.pdf");
850 if (!headers_sent()) {
851 Header('Content-Type: application/pdf');
855 $tmpdir = getUploadFilePath();
856 $s = passthru($cmd . " > $tmpdir/$filename.pdf");
857 $errormsg = "<br />\nGenerated <a href=\"".getUploadDataPath()."$filename.pdf\">Upload:$filename.pdf</a>\n";
862 foreach($outfiles as $f) unlink($f);
865 if (!empty($errormsg)) {
866 $request->discardOutput();
867 $GLOBALS['ErrorManager']->_postponed_errors = array();
871 $ErrorManager->popErrorHandler();
873 $WikiTheme->HTML_DUMP_SUFFIX = '';
874 $WikiTheme->DUMP_MODE = false;
875 $WikiTheme->_MoreAttr['body'] = $_bodyAttr;
879 ////////////////////////////////////////////////////////////////
881 // Functions for restoring.
883 ////////////////////////////////////////////////////////////////
885 function SavePage (&$request, &$pageinfo, $source, $filename)
887 static $overwite_all = false;
888 $pagedata = $pageinfo['pagedata']; // Page level meta-data.
889 $versiondata = $pageinfo['versiondata']; // Revision level meta-data.
891 if (empty($pageinfo['pagename'])) {
892 PrintXML(HTML::p(HTML::strong(_("Empty pagename!"))));
896 if (empty($versiondata['author_id']))
897 $versiondata['author_id'] = $versiondata['author'];
899 // remove invalid backend specific chars. utf8 issues mostly
900 $pagename_check = new WikiPagename($pageinfo['pagename']);
901 if (!$pagename_check->isValid()) {
902 PrintXML(HTML::p(HTML::strong(_("Invalid pagename!")." ".$pageinfo['pagename'])));
905 $pagename = $pagename_check->getName();
906 $content = $pageinfo['content'];
908 if ($pagename == _("InterWikiMap"))
909 $content = _tryinsertInterWikiMap($content);
911 $dbi =& $request->_dbi;
912 $page = $dbi->getPage($pagename);
914 // Try to merge if updated pgsrc contents are different. This
915 // whole thing is hackish
917 // TODO: try merge unless:
918 // if (current contents = default contents && pgsrc_version >=
919 // pgsrc_version) then just upgrade this pgsrc
920 $needs_merge = false;
924 if ($request->getArg('merge')) {
927 else if ($request->getArg('overwrite')) {
931 $current = $page->getCurrentRevision();
933 $edit = $request->getArg('edit');
935 if (isset($edit['keep_old'])) {
939 elseif (isset($edit['overwrite'])) {
943 elseif ( $current and (! $current->hasDefaultContents())
944 && ($current->getPackedContent() != $content) )
946 include_once('lib/editpage.php');
947 $request->setArg('pagename', $pagename);
948 $v = $current->getVersion();
949 $request->setArg('revision', $current->getVersion());
950 $p = new LoadFileConflictPageEditor($request);
951 $p->_content = $content;
952 $p->_currentVersion = $v - 1;
953 $p->editPage($saveFailed = true);
954 return; //early return
958 foreach ($pagedata as $key => $value) {
960 $page->set($key, $value);
963 $mesg = HTML::p(array('style' => 'text-indent: 3em;'));
965 $mesg->pushContent(' ', fmt("from %s", $source));
968 //FIXME: This should not happen! (empty vdata, corrupt cache or db)
969 $current = $page->getCurrentRevision();
971 if ($current->getVersion() == 0) {
972 $mesg->pushContent(' - ', _("New page"));
976 if ( (! $current->hasDefaultContents())
977 && ($current->getPackedContent() != $content) ) {
979 $mesg->pushContent(' ',
980 fmt("has edit conflicts - overwriting anyway"));
982 if (substr_count($source, 'pgsrc')) {
983 $versiondata['author'] = ADMIN_USER;
984 // but leave authorid as userid who loaded the file
988 if (isset($edit['keep_old'])) {
989 $mesg->pushContent(' ', fmt("keep old"));
991 $mesg->pushContent(' ', fmt("has edit conflicts - skipped"));
992 $needs_merge = true; // hackish, to display the buttons
997 else if ($current->getPackedContent() == $content
998 && $current->get('author') == $versiondata['author']) {
999 // The page metadata is already changed, we don't need a new revision.
1000 // This was called previously "is identical to current version %d - skipped"
1001 // which is wrong, since the pagedata was stored, not skipped.
1002 $mesg->pushContent(' ',
1003 fmt("content is identical to current version %d - no new revision created",
1004 $current->getVersion()));
1011 // in case of failures print the culprit:
1012 if (!isa($request,'MockRequest')) {
1013 PrintXML(HTML::p(WikiLink($pagename))); flush();
1015 $new = $page->save($content, WIKIDB_FORCE_CREATE, $versiondata);
1017 $mesg->pushContent(' ', fmt("- saved to database as version %d",
1018 $new->getVersion()));
1022 // hackish, $source contains needed path+filename
1023 $f = str_replace(sprintf(_("MIME file %s"), ''), '', $f);
1024 $f = str_replace(sprintf(_("Serialized file %s"), ''), '', $f);
1025 $f = str_replace(sprintf(_("plain file %s"), ''), '', $f);
1026 //check if uploaded file? they pass just the content, but the file is gone
1029 $meb = Button(array('action' => 'loadfile',
1033 _("PhpWikiAdministration"),
1035 $owb = Button(array('action' => 'loadfile',
1038 _("Restore Anyway"),
1039 _("PhpWikiAdministration"),
1041 $mesg->pushContent(' ', $meb, " ", $owb);
1042 if (!$overwite_all) {
1043 $args = $request->getArgs();
1044 $args['overwrite'] = 1;
1045 $owb = Button($args,
1047 _("PhpWikiAdministration"),
1049 $mesg->pushContent(HTML::span(array('class' => 'hint'), $owb));
1050 $overwite_all = true;
1053 $mesg->pushContent(HTML::em(_(" Sorry, cannot merge.")));
1057 if (!isa($request,'MockRequest')) {
1059 PrintXML(HTML::p(HTML::em(WikiLink($pagename))), $mesg);
1066 // action=revert (by diff)
1067 function RevertPage (&$request)
1069 $mesg = HTML::div();
1070 $pagename = $request->getArg('pagename');
1071 $version = $request->getArg('version');
1073 PrintXML(HTML::p(fmt("Revert")," ",WikiLink($pagename)),
1074 HTML::p(_("missing required version argument")));
1077 $dbi =& $request->_dbi;
1078 $page = $dbi->getPage($pagename);
1079 $current = $page->getCurrentRevision();
1080 $currversion = $current->getVersion();
1081 if ($currversion == 0) {
1082 $mesg->pushContent(' ', _("no page content"));
1083 PrintXML(HTML::p(fmt("Revert")," ",WikiLink($pagename)),
1088 if ($currversion == $version) {
1089 $mesg->pushContent(' ', _("same version page"));
1090 PrintXML(HTML::p(fmt("Revert")," ",WikiLink($pagename)),
1095 if ($request->getArg('cancel')) {
1096 $mesg->pushContent(' ', _("Cancelled"));
1097 PrintXML(HTML::p(fmt("Revert")," ",WikiLink($pagename)),
1102 if (!$request->getArg('verify')) {
1103 $mesg->pushContent(HTML::p(fmt("Are you sure to revert %s to version $version?", WikiLink($pagename))),
1104 HTML::form(array('action' => $request->getPostURL(),
1105 'method' => 'post'),
1106 HiddenInputs($request->getArgs(), false, array('verify')),
1107 HiddenInputs(array('verify' => 1)),
1108 Button('submit:verify', _("Yes"), 'button'),
1109 HTML::Raw(' '),
1110 Button('submit:cancel', _("Cancel"), 'button'))
1112 $rev = $page->getRevision($version);
1113 $html = HTML(HTML::fieldset($mesg), HTML::hr(), $rev->getTransformedContent());
1114 $template = Template('browse',
1115 array('CONTENT' => $html));
1116 GeneratePage($template, $pagename, $rev);
1117 $request->checkValidators();
1121 $rev = $page->getRevision($version);
1122 $content = $rev->getPackedContent();
1123 $versiondata = $rev->_data;
1124 $versiondata['summary'] = sprintf(_("revert to version %d"), $version);
1125 $new = $page->save($content, $currversion + 1, $versiondata);
1128 $mesg = HTML::span();
1129 $pagelink = WikiLink($pagename);
1130 $mesg->pushContent(fmt("Revert: %s", $pagelink),
1131 fmt("- version %d saved to database as version %d",
1132 $version, $new->getVersion()));
1133 // Force browse of current page version.
1134 $request->setArg('version', false);
1135 $template = Template('savepage', array());
1136 $template->replace('CONTENT', $new->getTransformedContent());
1138 GeneratePage($template, $mesg, $new);
1142 function _tryinsertInterWikiMap($content) {
1144 if (strpos($content, "<verbatim>")) {
1145 //$error_html = " The newly loaded pgsrc already contains a verbatim block.";
1148 if (!$goback && !defined('INTERWIKI_MAP_FILE')) {
1149 $error_html = sprintf(" "._("%s: not defined"), "INTERWIKI_MAP_FILE");
1152 $mapfile = FindFile(INTERWIKI_MAP_FILE,1);
1153 if (!$goback && !file_exists($mapfile)) {
1154 $error_html = sprintf(" "._("%s: file not found"), INTERWIKI_MAP_FILE);
1158 if (!empty($error_html))
1159 trigger_error(_("Default InterWiki map file not loaded.")
1160 . $error_html, E_USER_NOTICE);
1164 // if loading from virgin setup do echo, otherwise trigger_error E_USER_NOTICE
1165 if (!isa($GLOBALS['request'], 'MockRequest'))
1166 echo sprintf(_("Loading InterWikiMap from external file %s."), $mapfile),"<br />";
1168 $fd = fopen ($mapfile, "rb");
1169 $data = fread ($fd, filesize($mapfile));
1171 $content = $content . "\n<verbatim>\n$data</verbatim>\n";
1175 function ParseSerializedPage($text, $default_pagename, $user)
1177 if (!preg_match('/^a:\d+:{[si]:\d+/', $text))
1180 $pagehash = unserialize($text);
1182 // Split up pagehash into four parts:
1185 // page-level meta-data
1186 // revision-level meta-data
1188 if (!defined('FLAG_PAGE_LOCKED'))
1189 define('FLAG_PAGE_LOCKED', 1);
1190 if (!defined('FLAG_PAGE_EXTERNAL'))
1191 define('FLAG_PAGE_EXTERNAL', 1);
1192 $pageinfo = array('pagedata' => array(),
1193 'versiondata' => array());
1195 $pagedata = &$pageinfo['pagedata'];
1196 $versiondata = &$pageinfo['versiondata'];
1198 // Fill in defaults.
1199 if (empty($pagehash['pagename']))
1200 $pagehash['pagename'] = $default_pagename;
1201 if (empty($pagehash['author'])) {
1202 $pagehash['author'] = $user->getId();
1205 foreach ($pagehash as $key => $value) {
1210 $pageinfo[$key] = $value;
1213 $pageinfo[$key] = join("\n", $value);
1216 if (($value & FLAG_PAGE_LOCKED) != 0)
1217 $pagedata['locked'] = 'yes';
1218 if (($value & FLAG_PAGE_EXTERNAL) != 0)
1219 $pagedata['external'] = 'yes';
1223 $pagedata[$key] = $value;
1227 $pagedata['perm'] = ParseMimeifiedPerm($value);
1229 case 'lastmodified':
1230 $versiondata['mtime'] = $value;
1235 $versiondata[$key] = $value;
1239 if (empty($pagehash['charset']))
1240 $pagehash['charset'] = 'utf-8';
1241 // compare to target charset
1242 if (strtolower($pagehash['charset']) != strtolower($GLOBALS['charset'])) {
1243 $pageinfo['content'] = charset_convert($params['charset'], $GLOBALS['charset'], $pageinfo['content']);
1244 $pageinfo['pagename'] = charset_convert($params['charset'], $GLOBALS['charset'], $pageinfo['pagename']);
1249 function SortByPageVersion ($a, $b) {
1250 return $a['version'] - $b['version'];
1254 * Security alert! We should not allow to import config.ini into our wiki (or from a sister wiki?)
1255 * because the sql passwords are in plaintext there. And the webserver must be able to read it.
1256 * Detected by Santtu Jarvi.
1258 function LoadFile (&$request, $filename, $text = false, $mtime = false)
1260 if (preg_match("/config$/", dirname($filename)) // our or other config
1261 and preg_match("/config.*\.ini/", basename($filename))) // backups and other versions also
1263 trigger_error(sprintf("Refused to load %s", $filename), E_USER_WARNING);
1266 if (!is_string($text)) {
1268 $stat = stat($filename);
1270 $text = implode("", file($filename));
1273 if (! $request->getArg('start_debug')) @set_time_limit(30); // Reset watchdog
1274 else @set_time_limit(240);
1276 // FIXME: basename("filewithnoslashes") seems to return garbage sometimes.
1277 $basename = basename("/dummy/" . $filename);
1280 $mtime = time(); // Last resort.
1282 // DONE: check source - target charset for content and pagename
1283 // but only for pgsrc'ed content, not from the browser.
1285 $default_pagename = rawurldecode($basename);
1286 if ( ($parts = ParseMimeifiedPages($text)) ) {
1287 if (count($parts) > 1)
1288 $overwrite = $request->getArg('overwrite');
1289 usort($parts, 'SortByPageVersion');
1290 foreach ($parts as $pageinfo) {
1292 if (count($parts) > 1)
1293 $request->setArg('overwrite', 1);
1294 SavePage($request, $pageinfo, sprintf(_("MIME file %s"),
1295 $filename), $basename);
1297 if (count($parts) > 1)
1299 $request->setArg('overwrite', $overwrite);
1301 unset($request->_args['overwrite']);
1303 else if ( ($pageinfo = ParseSerializedPage($text, $default_pagename,
1304 $request->getUser())) ) {
1305 SavePage($request, $pageinfo, sprintf(_("Serialized file %s"),
1306 $filename), $basename);
1310 $user = $request->getUser();
1312 $file_charset = 'utf-8';
1313 // compare to target charset
1314 if ($file_charset != strtolower($GLOBALS['charset'])) {
1315 $text = charset_convert($file_charset, $GLOBALS['charset'], $text);
1316 $default_pagename = charset_convert($file_charset, $GLOBALS['charset'], $default_pagename);
1319 // Assume plain text file.
1320 $pageinfo = array('pagename' => $default_pagename,
1321 'pagedata' => array(),
1323 => array('author' => $user->getId()),
1324 'content' => preg_replace('/[ \t\r]*\n/', "\n",
1327 SavePage($request, $pageinfo, sprintf(_("plain file %s"), $filename),
1332 function LoadZip (&$request, $zipfile, $files = false, $exclude = false) {
1333 $zip = new ZipReader($zipfile);
1334 $timeout = (! $request->getArg('start_debug')) ? 20 : 120;
1335 while (list ($fn, $data, $attrib) = $zip->readFile()) {
1336 // FIXME: basename("filewithnoslashes") seems to return
1337 // garbage sometimes.
1338 $fn = basename("/dummy/" . $fn);
1339 if ( ($files && !in_array($fn, $files))
1340 || ($exclude && in_array($fn, $exclude)) ) {
1341 PrintXML(HTML::p(WikiLink($fn)),
1342 HTML::p(_("Skipping")));
1346 longer_timeout($timeout); // longer timeout per page
1347 LoadFile($request, $fn, $data, $attrib['mtime']);
1351 function LoadDir (&$request, $dirname, $files = false, $exclude = false) {
1352 $fileset = new LimitedFileSet($dirname, $files, $exclude);
1354 if (!$files and ($skiplist = $fileset->getSkippedFiles())) {
1355 PrintXML(HTML::p(HTML::strong(_("Skipping"))));
1357 foreach ($skiplist as $file)
1358 $list->pushContent(HTML::li(WikiLink($file)));
1359 PrintXML(HTML::p($list));
1362 // Defer HomePage loading until the end. If anything goes wrong
1363 // the pages can still be loaded again.
1364 $files = $fileset->getFiles();
1365 if (in_array(HOME_PAGE, $files)) {
1366 $files = array_diff($files, array(HOME_PAGE));
1367 $files[] = HOME_PAGE;
1369 $timeout = (! $request->getArg('start_debug')) ? 20 : 120;
1370 foreach ($files as $file) {
1371 longer_timeout($timeout); // longer timeout per page
1372 if (substr($file,-1,1) != '~') // refuse to load backup files
1373 LoadFile($request, "$dirname/$file");
1377 class LimitedFileSet extends FileSet {
1378 function LimitedFileSet($dirname, $_include, $exclude) {
1379 $this->_includefiles = $_include;
1380 $this->_exclude = $exclude;
1381 $this->_skiplist = array();
1382 parent::FileSet($dirname);
1385 function _filenameSelector($fn) {
1386 $incl = &$this->_includefiles;
1387 $excl = &$this->_exclude;
1389 if ( ($incl && !in_array($fn, $incl))
1390 || ($excl && in_array($fn, $excl)) ) {
1391 $this->_skiplist[] = $fn;
1398 function getSkippedFiles () {
1399 return $this->_skiplist;
1404 function IsZipFile ($filename_or_fd)
1406 // See if it looks like zip file
1407 if (is_string($filename_or_fd))
1409 $fd = fopen($filename_or_fd, "rb");
1410 $magic = fread($fd, 4);
1415 $fpos = ftell($filename_or_fd);
1416 $magic = fread($filename_or_fd, 4);
1417 fseek($filename_or_fd, $fpos);
1420 return $magic == ZIP_LOCHEAD_MAGIC || $magic == ZIP_CENTHEAD_MAGIC;
1424 function LoadAny (&$request, $file_or_dir, $files = false, $exclude = false)
1426 // Try urlencoded filename for accented characters.
1427 if (!file_exists($file_or_dir)) {
1428 // Make sure there are slashes first to avoid confusing phps
1429 // with broken dirname or basename functions.
1430 // FIXME: windows uses \ and :
1431 if (is_integer(strpos($file_or_dir, "/"))) {
1432 $newfile = FindFile($file_or_dir, true);
1433 // Panic. urlencoded by the browser (e.g. San%20Diego => San Diego)
1435 $file_or_dir = dirname($file_or_dir) . "/"
1436 . rawurlencode(basename($file_or_dir));
1438 // This is probably just a file.
1439 $file_or_dir = rawurlencode($file_or_dir);
1443 $type = filetype($file_or_dir);
1444 if ($type == 'link') {
1445 // For symbolic links, use stat() to determine
1446 // the type of the underlying file.
1447 list(,,$mode) = stat($file_or_dir);
1448 $type = ($mode >> 12) & 017;
1451 elseif ($type == 004)
1456 $request->finish(fmt("Empty or not existing source. Unable to load: %s", $file_or_dir));
1458 else if ($type == 'dir') {
1459 LoadDir($request, $file_or_dir, $files, $exclude);
1461 else if ($type != 'file' && !preg_match('/^(http|ftp):/', $file_or_dir))
1463 $request->finish(fmt("Bad file type: %s", $type));
1465 else if (IsZipFile($file_or_dir)) {
1466 LoadZip($request, $file_or_dir, $files, $exclude);
1468 else /* if (!$files || in_array(basename($file_or_dir), $files)) */
1470 LoadFile($request, $file_or_dir);
1474 function LoadFileOrDir (&$request)
1476 $source = $request->getArg('source');
1477 $finder = new FileFinder;
1478 $source = $finder->slashifyPath($source);
1479 $page = rawurldecode(basename($source));
1480 StartLoadDump($request, fmt("Loading '%s'",
1481 HTML(dirname($source),
1482 dirname($source) ? "/" : "",
1483 WikiLink($page,'auto'))));
1484 LoadAny($request, $source);
1485 EndLoadDump($request);
1489 * HomePage was not found so first-time install is supposed to run.
1490 * - import all pgsrc pages.
1491 * - Todo: installer interface to edit config/config.ini settings
1492 * - Todo: ask for existing old index.php to convert to config/config.ini
1493 * - Todo: theme-specific pages:
1494 * blog - HomePage, ADMIN_USER/Blogs
1496 function SetupWiki (&$request)
1498 global $GenericPages, $LANG;
1500 //FIXME: This is a hack (err, "interim solution")
1501 // This is a bogo-bogo-login: Login without
1502 // saving login information in session state.
1503 // This avoids logging in the unsuspecting
1504 // visitor as ADMIN_USER
1506 // This really needs to be cleaned up...
1507 // (I'm working on it.)
1508 $real_user = $request->_user;
1509 if (ENABLE_USER_NEW)
1510 $request->_user = new _BogoUser(ADMIN_USER);
1513 $request->_user = new WikiUser($request, ADMIN_USER, WIKIAUTH_BOGO);
1515 StartLoadDump($request, _("Loading up virgin wiki"));
1517 $pgsrc = FindLocalizedFile(WIKI_PGSRC);
1518 $default_pgsrc = FindFile(DEFAULT_WIKI_PGSRC);
1520 $request->setArg('overwrite', true);
1521 if ($default_pgsrc != $pgsrc) {
1522 LoadAny($request, $default_pgsrc, $GenericPages);
1524 $request->setArg('overwrite', false);
1525 LoadAny($request, $pgsrc);
1526 $dbi =& $request->_dbi;
1528 // Ensure that all mandatory pages are loaded
1529 $finder = new FileFinder;
1532 $mandatory = explode(':','SandBox:Template/Category:TemplateTalk:SpecialPages:CategoryCategory:CategoryActionPage:Help/OldTextFormattingRules:Help/TextFormattingRules:PhpWikiAdministration');
1533 } else if (WIKI_NAME == "help") {
1534 $mandatory = explode(':','SandBox:Template/Category:TemplateTalk:SpecialPages:CategoryCategory:CategoryActionPage:Help/TextFormattingRules:PhpWikiAdministration');
1536 $mandatory = explode(':','SandBox:Template/Category:TemplateTalk:SpecialPages:CategoryCategory:CategoryActionPage:TextFormattingRules:PhpWikiAdministration');
1538 foreach (array_merge($mandatory,
1539 $GLOBALS['AllActionPages'],
1540 array(constant('HOME_PAGE'))) as $f)
1542 $page = gettext($f);
1543 $epage = urlencode($page);
1544 if (! $dbi->isWikiPage($page) ) {
1545 // translated version provided?
1546 if ($lf = FindLocalizedFile($pgsrc . $finder->_pathsep . $epage, 1)) {
1547 LoadAny($request, $lf);
1548 } else { // load english version of required action page
1549 LoadAny($request, FindFile(DEFAULT_WIKI_PGSRC . $finder->_pathsep . urlencode($f)));
1553 if (! $dbi->isWikiPage($page)) {
1554 trigger_error(sprintf("Mandatory file %s couldn't be loaded!", $page),
1559 $pagename = _("InterWikiMap");
1560 $map = $dbi->getPage($pagename);
1561 $map->set('locked', true);
1562 PrintXML(HTML::p(HTML::em(WikiLink($pagename)), HTML::strong(" locked")));
1563 EndLoadDump($request);
1566 function LoadPostFile (&$request)
1568 $upload = $request->getUploadedFile('file');
1571 $request->finish(_("No uploaded file to upload?")); // FIXME: more concise message
1573 // Dump http headers.
1574 StartLoadDump($request, sprintf(_("Uploading %s"), $upload->getName()));
1576 $fd = $upload->open();
1578 LoadZip($request, $fd, false, array(_("RecentChanges")));
1580 LoadFile($request, $upload->getName(), $upload->getContents());
1582 EndLoadDump($request);
1589 // c-basic-offset: 4
1590 // c-hanging-comment-ender-p: nil
1591 // indent-tabs-mode: nil