3 * Copyright 1999,2000,2001,2002,2004,2005,2006,2007 $ThePhpWikiProgrammingTeam
4 * Copyright 2008-2010 Marc-Etienne Vargenau, Alcatel-Lucent
6 * This file is part of PhpWiki.
8 * PhpWiki is free software; you can redistribute it and/or modify
9 * it under the terms of the GNU General Public License as published by
10 * the Free Software Foundation; either version 2 of the License, or
11 * (at your option) any later version.
13 * PhpWiki is distributed in the hope that it will be useful,
14 * but WITHOUT ANY WARRANTY; without even the implied warranty of
15 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 * GNU General Public License for more details.
18 * You should have received a copy of the GNU General Public License along
19 * with PhpWiki; if not, write to the Free Software Foundation, Inc.,
20 * 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
23 require_once 'lib/ziplib.php';
24 require_once 'lib/Template.php';
27 * ignore fatal errors during dump
29 function _dump_error_handler($error) {
30 if ($error->isFatal()) {
31 $error->errno = E_USER_WARNING;
34 return true; // Ignore error
36 if (preg_match('/Plugin/', $error->errstr))
39 // let the message come through: call the remaining handlers:
43 function StartLoadDump(&$request, $title, $html = '')
45 // MockRequest is from the unit testsuite, a faked request. (may be cmd-line)
46 // We are silent on unittests.
47 if (isa($request,'MockRequest'))
49 // FIXME: This is a hack. This really is the worst overall hack in phpwiki.
51 $html->pushContent('%BODY%');
52 $tmpl = Template('html', array('TITLE' => $title,
54 'CONTENT' => $html ? $html : '%BODY%'));
55 echo preg_replace('/%BODY%.*/s', '', $tmpl->getExpansion($html));
56 $request->chunkOutput();
58 // set marker for sendPageChangeNotification()
59 $request->_deferredPageChangeNotification = array();
62 function EndLoadDump(&$request)
66 if (isa($request,'MockRequest'))
68 $action = $request->getArg('action');
71 case 'zip': $label = _("ZIP files of database"); break;
72 case 'dumpserial': $label = _("Dump to directory"); break;
73 case 'upload': $label = _("Upload File"); break;
74 case 'loadfile': $label = _("Load File"); break;
75 case 'upgrade': $label = _("Upgrade"); break;
77 case 'ziphtml': $label = _("Dump pages as XHTML"); break;
79 if ($label) $label = str_replace(" ","_",$label);
80 if ($action == 'browse') // loading virgin
81 $pagelink = WikiLink(HOME_PAGE);
83 $pagelink = WikiLink(new WikiPageName(_("PhpWikiAdministration"),false,$label));
85 // do deferred sendPageChangeNotification()
86 if (!empty($request->_deferredPageChangeNotification)) {
87 $pages = $all_emails = $all_users = array();
88 foreach ($request->_deferredPageChangeNotification as $p) {
89 list($pagename, $emails, $userids) = $p;
91 $all_emails = array_unique(array_merge($all_emails, $emails));
92 $all_users = array_unique(array_merge($all_users, $userids));
94 $editedby = sprintf(_("Edited by: %s"), $request->_user->getId());
95 $content = "Loaded the following pages:\n" . join("\n", $pages);
96 if (mail(join(',',$all_emails),"[".WIKI_NAME."] "._("LoadDump"),
100 trigger_error(sprintf(_("PageChange Notification of %s sent to %s"),
101 join("\n",$pages), join(',',$all_users)), E_USER_NOTICE);
103 trigger_error(sprintf(_("PageChange Notification Error: Couldn't send %s to %s"),
104 join("\n",$pages), join(',',$all_users)), E_USER_WARNING);
109 unset($request->_deferredPageChangeNotification);
111 PrintXML(HTML::p(HTML::strong(_("Complete."))),
112 HTML::p(fmt("Return to %s", $pagelink)));
113 // Ugly hack to get valid XHTML code
114 if (isa($WikiTheme, 'WikiTheme_fusionforge')) {
119 } elseif (isa($WikiTheme, 'WikiTheme_Sidebar')
120 or isa($WikiTheme, 'WikiTheme_MonoBook')) {
125 } elseif (isa($WikiTheme, 'WikiTheme_wikilens')) {
130 } elseif (isa($WikiTheme, 'WikiTheme_blog')) {
133 } elseif (isa($WikiTheme, 'WikiTheme_Crao')
134 or isa($WikiTheme, 'WikiTheme_Hawaiian')
135 or isa($WikiTheme, 'WikiTheme_MacOSX')
136 or isa($WikiTheme, 'WikiTheme_shamino_com')
137 or isa($WikiTheme, 'WikiTheme_smaller')) {
140 echo "</body></html>\n";
143 ////////////////////////////////////////////////////////////////
145 // Functions for dumping.
147 ////////////////////////////////////////////////////////////////
151 * http://www.nacs.uci.edu/indiv/ehood/MIME/2045/rfc2045.html
152 * http://www.faqs.org/rfcs/rfc2045.html
153 * (RFC 1521 has been superceeded by RFC 2045 & others).
155 * Also see http://www.faqs.org/rfcs/rfc2822.html
157 function MailifyPage ($page, $nversions = 1)
159 $current = $page->getCurrentRevision(false);
162 if (STRICT_MAILABLE_PAGEDUMPS) {
163 $from = defined('SERVER_ADMIN') ? SERVER_ADMIN : 'foo@bar';
164 //This is for unix mailbox format: (not RFC (2)822)
165 // $head .= "From $from " . CTime(time()) . "\r\n";
166 $head .= "Subject: " . rawurlencode($page->getName()) . "\r\n";
167 $head .= "From: $from (PhpWiki)\r\n";
168 // RFC 2822 requires only a Date: and originator (From:)
169 // field, however the obsolete standard RFC 822 also
170 // requires a destination field.
171 $head .= "To: $from (PhpWiki)\r\n";
173 $head .= "Date: " . Rfc2822DateTime($current->get('mtime')) . "\r\n";
174 $head .= sprintf("Mime-Version: 1.0 (Produced by PhpWiki %s)\r\n",
177 $iter = $page->getAllRevisions();
179 while ($revision = $iter->next()) {
180 $parts[] = MimeifyPageRevision($page, $revision);
181 if ($nversions > 0 && count($parts) >= $nversions)
184 if (count($parts) > 1)
185 return $head . MimeMultipart($parts);
187 return $head . $parts[0];
191 * Compute filename to used for storing contents of a wiki page.
193 * Basically we do a rawurlencode() which encodes everything except
194 * ASCII alphanumerics and '.', '-', and '_'.
196 * But we also want to encode leading dots to avoid filenames like
197 * '.', and '..'. (Also, there's no point in generating "hidden" file
198 * names, like '.foo'.)
200 * We have to apply a different "/" logic for dumpserial, htmldump and zipdump.
201 * dirs are allowed for zipdump and htmldump, not for dumpserial
204 * @param $pagename string Pagename.
205 * @return string Filename for page.
207 function FilenameForPage ($pagename, $action = false)
209 $enc = rawurlencode($pagename);
212 $action = $request->getArg('action');
214 if ($action != 'dumpserial') { // zip, ziphtml, dumphtml
215 // For every %2F we will need to mkdir -p dirname($pagename)
216 $enc = preg_replace('/%2F/', '/', $enc);
218 $enc = preg_replace('/^\./', '%2E', $enc);
219 $enc = preg_replace('/%20/', ' ', $enc);
220 $enc = preg_replace('/\.$/', '%2E', $enc);
225 * The main() function which generates a zip archive of a PhpWiki.
227 * If $include_archive is false, only the current version of each page
228 * is included in the zip file; otherwise all archived versions are
231 function MakeWikiZip (&$request)
233 global $ErrorManager;
234 if ($request->getArg('include') == 'all') {
235 $zipname = WIKI_NAME . _("FullDump") . date('Ymd-Hi') . '.zip';
236 $include_archive = true;
239 $zipname = WIKI_NAME . _("LatestSnapshot") . date('Ymd-Hi') . '.zip';
240 $include_archive = false;
242 $include_empty = false;
243 if ($request->getArg('include') == 'empty') {
244 $include_empty = true;
247 $zip = new ZipWriter("Created by PhpWiki " . PHPWIKI_VERSION, $zipname);
249 /* ignore fatals in plugins */
250 $ErrorManager->pushErrorHandler(new WikiFunctionCb('_dump_error_handler'));
252 $dbi =& $request->_dbi;
253 $thispage = $request->getArg('pagename'); // for "Return to ..."
254 if ($exclude = $request->getArg('exclude')) { // exclude which pagenames
255 $excludeList = explodePageList($exclude);
257 $excludeList = array();
259 if ($pages = $request->getArg('pages')) { // which pagenames
260 if ($pages == '[]') // current page
262 $page_iter = new WikiDB_Array_PageIterator(explodePageList($pages));
264 $page_iter = $dbi->getAllPages(false,false,false,$excludeList);
266 $request_args = $request->args;
267 $timeout = (! $request->getArg('start_debug')) ? 30 : 240;
269 while ($page = $page_iter->next()) {
270 $request->args = $request_args; // some plugins might change them (esp. on POST)
271 longer_timeout($timeout); // Reset watchdog
273 $current = $page->getCurrentRevision();
274 if ($current->getVersion() == 0)
277 $pagename = $page->getName();
278 $wpn = new WikiPageName($pagename);
279 if (!$wpn->isValid())
281 if (in_array($page->getName(), $excludeList)) {
285 $attrib = array('mtime' => $current->get('mtime'),
287 if ($page->get('locked'))
288 $attrib['write_protected'] = 1;
290 if ($include_archive)
291 $content = MailifyPage($page, 0);
293 $content = MailifyPage($page);
295 $zip->addRegularFile( FilenameForPage($pagename),
300 $ErrorManager->popErrorHandler();
303 function DumpToDir (&$request)
305 $directory = $request->getArg('directory');
306 if (empty($directory))
307 $directory = DEFAULT_DUMP_DIR; // See lib/plugin/WikiForm.php:87
308 if (empty($directory))
309 $request->finish(_("You must specify a directory to dump to"));
311 // see if we can access the directory the user wants us to use
312 if (! file_exists($directory)) {
313 if (! mkdir($directory, 0755))
314 $request->finish(fmt("Cannot create directory '%s'", $directory));
316 $html = HTML::p(fmt("Created directory '%s' for the page dump...",
319 $html = HTML::p(fmt("Using directory '%s'", $directory));
322 StartLoadDump($request, _("Dumping Pages"), $html);
324 $dbi =& $request->_dbi;
325 $thispage = $request->getArg('pagename'); // for "Return to ..."
326 if ($exclude = $request->getArg('exclude')) { // exclude which pagenames
327 $excludeList = explodePageList($exclude);
329 $excludeList = array();
331 $include_empty = false;
332 if ($request->getArg('include') == 'empty') {
333 $include_empty = true;
335 if ($pages = $request->getArg('pages')) { // which pagenames
336 if ($pages == '[]') // current page
338 $page_iter = new WikiDB_Array_PageIterator(explodePageList($pages));
340 $page_iter = $dbi->getAllPages($include_empty,false,false,$excludeList);
343 $request_args = $request->args;
344 $timeout = (! $request->getArg('start_debug')) ? 30 : 240;
346 while ($page = $page_iter->next()) {
347 $request->args = $request_args; // some plugins might change them (esp. on POST)
348 longer_timeout($timeout); // Reset watchdog
350 $pagename = $page->getName();
351 if (!isa($request,'MockRequest')) {
352 PrintXML(HTML::br(), $pagename, ' ... ');
356 if (in_array($pagename, $excludeList)) {
357 if (!isa($request, 'MockRequest')) {
358 PrintXML(_("Skipped."));
363 $filename = FilenameForPage($pagename);
365 if($page->getName() != $filename) {
366 $msg->pushContent(HTML::small(fmt("saved as %s", $filename)),
370 if ($request->getArg('include') == 'all')
371 $data = MailifyPage($page, 0);
373 $data = MailifyPage($page);
375 if ( !($fd = fopen($directory."/".$filename, "wb")) ) {
376 $msg->pushContent(HTML::strong(fmt("couldn't open file '%s' for writing",
377 "$directory/$filename")));
378 $request->finish($msg);
381 $num = fwrite($fd, $data, strlen($data));
382 $msg->pushContent(HTML::small(fmt("%s bytes written", $num)));
383 if (!isa($request, 'MockRequest')) {
387 assert($num == strlen($data));
391 EndLoadDump($request);
394 function _copyMsg($page, $smallmsg) {
395 if (!isa($GLOBALS['request'], 'MockRequest')) {
396 if ($page) $msg = HTML(HTML::br(), HTML($page), HTML::small($smallmsg));
397 else $msg = HTML::small($smallmsg);
403 function mkdir_p($pathname, $permission = 0777) {
404 $arr = explode("/", $pathname);
406 return mkdir($pathname, $permission);
408 $s = array_shift($arr);
410 foreach ($arr as $p) {
413 $ok = mkdir($curr, $permission);
415 if (!$ok) return FALSE;
421 * Dump all pages as XHTML to a directory, as pagename.html.
422 * Copies all used css files to the directory, all used images to a
423 * "images" subdirectory, and all used buttons to a "images/buttons" subdirectory.
424 * The webserver must have write permissions to these directories.
425 * chown httpd HTML_DUMP_DIR; chmod u+rwx HTML_DUMP_DIR
428 * @param string directory (optional) path to dump to. Default: HTML_DUMP_DIR
429 * @param string pages (optional) Comma-seperated of glob-style pagenames to dump.
430 * Also array of pagenames allowed.
431 * @param string exclude (optional) Comma-seperated of glob-style pagenames to exclude
433 function DumpHtmlToDir (&$request)
436 $directory = $request->getArg('directory');
437 if (empty($directory))
438 $directory = HTML_DUMP_DIR; // See lib/plugin/WikiForm.php:87
439 if (empty($directory))
440 $request->finish(_("You must specify a directory to dump to"));
442 // See if we can access the directory the user wants us to use
443 if (! file_exists($directory)) {
444 if (! mkdir($directory, 0755))
445 $request->finish(fmt("Cannot create directory '%s'", $directory));
447 $html = HTML::p(fmt("Created directory '%s' for the page dump...",
450 $html = HTML::p(fmt("Using directory '%s'", $directory));
452 StartLoadDump($request, _("Dumping Pages"), $html);
453 $thispage = $request->getArg('pagename'); // for "Return to ..."
455 $dbi =& $request->_dbi;
456 if ($exclude = $request->getArg('exclude')) { // exclude which pagenames
457 $excludeList = explodePageList($exclude);
459 $excludeList = array('DebugAuthInfo', 'DebugGroupInfo', 'AuthInfo');
461 if ($pages = $request->getArg('pages')) { // which pagenames
462 if ($pages == '[]') // current page
464 $page_iter = new WikiDB_Array_generic_iter(explodePageList($pages));
465 // not at admin page: dump only the current page
466 } elseif ($thispage != _("PhpWikiAdministration")) {
467 $page_iter = new WikiDB_Array_generic_iter(array($thispage));
469 $page_iter = $dbi->getAllPages(false,false,false,$excludeList);
472 $WikiTheme->DUMP_MODE = 'HTML';
473 _DumpHtmlToDir($directory, $page_iter, $request->getArg('exclude'));
474 $WikiTheme->DUMP_MODE = false;
476 $request->setArg('pagename',$thispage); // Template::_basepage fix
477 EndLoadDump($request);
480 /* Known problem: any plugins or other code which echo()s text will
481 * lead to a corrupted html zip file which may produce the following
482 * errors upon unzipping:
484 * warning [wikihtml.zip]: 2401 extra bytes at beginning or within zipfile
485 * file #58: bad zipfile offset (local header sig): 177561
486 * (attempting to re-compensate)
488 * However, the actual wiki page data should be unaffected.
490 function MakeWikiZipHtml (&$request)
493 if ($request->getArg('zipname')) {
494 $zipname = basename($request->getArg('zipname'));
495 if (!preg_match("/\.zip$/i", $zipname))
497 $request->setArg('zipname', false);
499 $zipname = "wikihtml.zip";
501 $zip = new ZipWriter("Created by PhpWiki " . PHPWIKI_VERSION, $zipname);
502 $dbi =& $request->_dbi;
503 $thispage = $request->getArg('pagename'); // for "Return to ..."
504 if ($pages = $request->getArg('pages')) { // which pagenames
505 if ($pages == '[]') // current page
507 $page_iter = new WikiDB_Array_generic_iter(explodePageList($pages));
509 $page_iter = $dbi->getAllPages(false,false,false,$request->getArg('exclude'));
512 $WikiTheme->DUMP_MODE = 'ZIPHTML';
513 _DumpHtmlToDir($zip, $page_iter, $request->getArg('exclude'));
514 $WikiTheme->DUMP_MODE = false;
518 * Internal html dumper. Used for dumphtml, ziphtml and pdf
520 function _DumpHtmlToDir ($target, $page_iter, $exclude = false)
522 global $WikiTheme, $request, $ErrorManager;
523 $silent = true; $zip = false; $directory = false;
524 if ($WikiTheme->DUMP_MODE == 'HTML') {
525 $directory = $target;
527 } elseif ($WikiTheme->DUMP_MODE == 'PDFHTML') {
528 $directory = $target;
529 } elseif (is_object($target)) { // $WikiTheme->DUMP_MODE == 'ZIPHTML'
533 $request->_TemplatesProcessed = array();
534 if ($exclude) { // exclude which pagenames
535 $excludeList = explodePageList($exclude);
537 $excludeList = array('DebugAuthInfo', 'DebugGroupInfo', 'AuthInfo');
539 $WikiTheme->VALID_LINKS = array();
540 if ($request->getArg('format')) { // pagelist
541 $page_iter_sav = $page_iter;
542 foreach ($page_iter_sav->asArray() as $handle) {
543 $WikiTheme->VALID_LINKS[] = is_string($handle) ? $handle : $handle->getName();
545 $page_iter_sav->reset();
548 if (defined('HTML_DUMP_SUFFIX')) {
549 $WikiTheme->HTML_DUMP_SUFFIX = HTML_DUMP_SUFFIX;
551 if (isset($WikiTheme->_MoreAttr['body'])) {
552 $_bodyAttr = $WikiTheme->_MoreAttr['body'];
553 unset($WikiTheme->_MoreAttr['body']);
556 $ErrorManager->pushErrorHandler(new WikiFunctionCb('_dump_error_handler'));
558 // check if the dumped file will be accessible from outside
559 $doc_root = $request->get("DOCUMENT_ROOT");
560 if ($WikiTheme->DUMP_MODE == 'HTML') {
561 $ldir = NormalizeLocalFileName($directory);
562 $wikiroot = NormalizeLocalFileName('');
563 if (string_starts_with($ldir, $doc_root)) {
564 $link_prefix = substr($directory, strlen($doc_root))."/";
565 } elseif (string_starts_with($ldir, $wikiroot)) {
566 $link_prefix = NormalizeWebFileName(substr($directory, strlen($wikiroot)))."/";
570 $prefix = '/'; // . substr($doc_root,0,2); // add drive where apache is installed
572 $link_prefix = "file://".$prefix.$directory."/";
578 $request_args = $request->args;
579 $timeout = (! $request->getArg('start_debug')) ? 60 : 240;
582 $directory = str_replace("\\", "/", $directory); // no Win95 support.
583 if (!is_dir("$directory/images"))
584 mkdir("$directory/images");
588 $already_images = array();
590 while ($page = $page_iter->next()) {
591 if (is_string($page)) {
593 $page = $request->_dbi->getPage($pagename);
595 $pagename = $page->getName();
597 if (empty($firstpage)) $firstpage = $pagename;
598 if (array_key_exists($pagename, $already))
600 $already[$pagename] = 1;
601 $current = $page->getCurrentRevision();
602 //if ($current->getVersion() == 0)
605 $request->args = $request_args; // some plugins might change them (esp. on POST)
606 longer_timeout($timeout); // Reset watchdog
609 $attrib = array('mtime' => $current->get('mtime'),
611 if ($page->get('locked'))
612 $attrib['write_protected'] = 1;
613 } elseif (!$silent) {
614 if (!isa($request,'MockRequest')) {
615 PrintXML(HTML::br(), $pagename, ' ... ');
619 if (in_array($pagename, $excludeList)) {
620 if (!$silent and !isa($request,'MockRequest')) {
621 PrintXML(_("Skipped."));
627 if ($WikiTheme->DUMP_MODE == 'PDFHTML')
628 $request->setArg('action', 'pdf'); // to omit cache headers
629 $request->setArg('pagename', $pagename); // Template::_basepage fix
630 $filename = FilenameForPage($pagename) . $WikiTheme->HTML_DUMP_SUFFIX;
631 $args = array('revision' => $current,
632 'CONTENT' => $current->getTransformedContent(),
633 'relative_base' => $relative_base);
634 // For every %2F will need to mkdir -p dirname($pagename)
635 if (preg_match("/(%2F|\/)/", $filename)) {
636 // mkdir -p and set relative base for subdir pages
637 $filename = preg_replace("/%2F/", "/", $filename);
638 $count = substr_count($filename, "/");
639 $dirname = dirname($filename);
641 mkdir_p($directory."/".$dirname);
642 // Fails with "XX / YY", "XX" is created, "XX / YY" cannot be written
643 // if (isWindows()) // interesting Windows bug: cannot mkdir "bla "
644 // Since dumps needs to be copied, we have to disallow this for all platforms.
645 $filename = preg_replace("/ \//", "/", $filename);
646 $relative_base = "../";
648 $relative_base .= "../";
651 $args['relative_base'] = $relative_base;
655 $DUMP_MODE = $WikiTheme->DUMP_MODE;
656 $data = GeneratePageasXML(new Template('browse', $request, $args),
657 $pagename, $current, $args);
658 $WikiTheme->DUMP_MODE = $DUMP_MODE;
660 if (preg_match_all("/<img .*?src=\"(\/.+?)\"/", $data, $m)) {
661 // fix to local relative path for uploaded images, so that pdf will work
662 foreach ($m[1] as $img_file) {
663 $base = basename($img_file);
664 $data = str_replace('src="'.$img_file.'"','src="images/'.$base.'"', $data);
665 if (array_key_exists($img_file, $already_images))
667 $already_images[$img_file] = 1;
668 // resolve src from webdata to file
669 $src = $doc_root . $img_file;
670 if (file_exists($src) and $base) {
672 $target = "$directory/images/$base";
673 if (copy($src, $target)) {
675 _copyMsg($img_file, fmt("... copied to %s", $target));
678 _copyMsg($img_file, fmt("... not copied to %s", $target));
681 $target = "images/$base";
682 $zip->addSrcFile($target, $src);
689 $outfile = $directory."/".$filename;
690 if ( !($fd = fopen($outfile, "wb")) ) {
691 $msg->pushContent(HTML::strong(fmt("couldn't open file '%s' for writing",
693 $request->finish($msg);
695 $len = strlen($data);
696 $num = fwrite($fd, $data, $len);
697 if ($pagename != $filename) {
698 $link = LinkURL($link_prefix.$filename, $filename);
699 $msg->pushContent(HTML::small(_("saved as "), $link, " ... "));
701 $msg->pushContent(HTML::small(fmt("%s bytes written", $num), "\n"));
703 if (!isa($request, 'MockRequest')) {
707 $request->chunkOutput();
709 assert($num == $len);
711 $outfiles[] = $outfile;
713 $zip->addRegularFile($filename, $data, $attrib);
717 $request->_dbi->_cache->invalidate_cache($pagename);
718 unset ($request->_dbi->_cache->_pagedata_cache);
719 unset ($request->_dbi->_cache->_versiondata_cache);
720 unset ($request->_dbi->_cache->_glv_cache);
722 unset ($request->_dbi->_cache->_backend->_page_data);
725 unset($current->_transformedContent);
727 if (!empty($template)) {
728 unset($template->_request);
735 $attrib = false; //array('is_ascii' => 0);
736 if (!empty($WikiTheme->dumped_images) and is_array($WikiTheme->dumped_images)) {
737 // @mkdir("$directory/images");
738 foreach ($WikiTheme->dumped_images as $img_file) {
739 if (array_key_exists($img_file, $already_images))
741 $already_images[$img_file] = 1;
743 and ($from = $WikiTheme->_findFile($img_file, true))
747 $target = "$directory/images/".basename($from);
749 copy($WikiTheme->_path . $from, $target);
751 if (copy($WikiTheme->_path . $from, $target)) {
752 _copyMsg($from, fmt("... copied to %s", $target));
754 _copyMsg($from, fmt("... not copied to %s", $target));
758 $target = "images/".basename($from);
759 $zip->addSrcFile($target, $WikiTheme->_path . $from);
761 } elseif (!$silent) {
762 _copyMsg($from, _("... not found"));
767 if (!empty($WikiTheme->dumped_buttons)
768 and is_array($WikiTheme->dumped_buttons))
771 if ($directory && !is_dir("$directory/images/buttons"))
772 mkdir("$directory/images/buttons");
773 foreach ($WikiTheme->dumped_buttons as $text => $img_file) {
774 if (array_key_exists($img_file, $already_images))
776 $already_images[$img_file] = 1;
778 and ($from = $WikiTheme->_findFile($img_file, true))
782 $target = "$directory/images/buttons/".basename($from);
784 copy($WikiTheme->_path . $from, $target);
786 if (copy($WikiTheme->_path . $from, $target)) {
787 _copyMsg($from, fmt("... copied to %s", $target));
789 _copyMsg($from, fmt("... not copied to %s", $target));
793 $target = "images/buttons/".basename($from);
794 $zip->addSrcFile($target, $WikiTheme->_path . $from);
796 } elseif (!$silent) {
797 _copyMsg($from, _("... not found"));
801 if (!empty($WikiTheme->dumped_css) and is_array($WikiTheme->dumped_css)) {
802 foreach ($WikiTheme->dumped_css as $css_file) {
803 if (array_key_exists($css_file, $already_images))
805 $already_images[$css_file] = 1;
807 and ($from = $WikiTheme->_findFile(basename($css_file), true))
810 // TODO: fix @import url(main.css);
812 $target = "$directory/" . basename($css_file);
814 copy($WikiTheme->_path . $from, $target);
816 if (copy($WikiTheme->_path . $from, $target)) {
817 _copyMsg($from, fmt("... copied to %s", $target));
819 _copyMsg($from, fmt("... not copied to %s", $target));
823 //$attrib = array('is_ascii' => 0);
824 $target = basename($css_file);
825 $zip->addSrcFile($target, $WikiTheme->_path . $from);
827 } elseif (!$silent) {
828 _copyMsg($from, _("... not found"));
836 if ($WikiTheme->DUMP_MODE == 'PDFHTML') {
837 if (USE_EXTERNAL_HTML2PDF and $outfiles) {
838 $cmd = EXTERNAL_HTML2PDF_PAGELIST.' "'.join('" "', $outfiles).'"';
839 $filename = FilenameForPage($firstpage);
841 $tmpfile = $directory . "/createpdf.bat";
842 $fp = fopen($tmpfile, "wb");
843 fwrite($fp, $cmd . " > $filename.pdf");
846 if (!headers_sent()) {
847 Header('Content-Type: application/pdf');
851 $tmpdir = getUploadFilePath();
852 $s = passthru($cmd . " > $tmpdir/$filename.pdf");
853 $errormsg = "<br />\nGenerated <a href=\"".getUploadDataPath()."$filename.pdf\">Upload:$filename.pdf</a>\n";
858 foreach($outfiles as $f) unlink($f);
861 if (!empty($errormsg)) {
862 $request->discardOutput();
863 $GLOBALS['ErrorManager']->_postponed_errors = array();
867 $ErrorManager->popErrorHandler();
869 $WikiTheme->HTML_DUMP_SUFFIX = '';
870 $WikiTheme->DUMP_MODE = false;
871 $WikiTheme->_MoreAttr['body'] = isset($_bodyAttr) ? $_bodyAttr : '';
874 ////////////////////////////////////////////////////////////////
876 // Functions for restoring.
878 ////////////////////////////////////////////////////////////////
880 function SavePage (&$request, &$pageinfo, $source, $filename)
882 static $overwite_all = false;
883 $pagedata = $pageinfo['pagedata']; // Page level meta-data.
884 $versiondata = $pageinfo['versiondata']; // Revision level meta-data.
886 if (empty($pageinfo['pagename'])) {
887 PrintXML(HTML::p(HTML::strong(_("Empty pagename!"))));
891 if (empty($versiondata['author_id']))
892 $versiondata['author_id'] = $versiondata['author'];
894 // remove invalid backend specific chars. utf8 issues mostly
895 $pagename_check = new WikiPagename($pageinfo['pagename']);
896 if (!$pagename_check->isValid()) {
897 PrintXML(HTML::p(HTML::strong(sprintf(_("'%s': Bad page name"), $pageinfo['pagename']))));
900 $pagename = $pagename_check->getName();
901 $content = $pageinfo['content'];
903 if ($pagename == _("InterWikiMap"))
904 $content = _tryinsertInterWikiMap($content);
906 $dbi =& $request->_dbi;
907 $page = $dbi->getPage($pagename);
909 // Try to merge if updated pgsrc contents are different. This
910 // whole thing is hackish
912 // TODO: try merge unless:
913 // if (current contents = default contents && pgsrc_version >=
914 // pgsrc_version) then just upgrade this pgsrc
915 $needs_merge = false;
919 if ($request->getArg('merge')) {
922 else if ($request->getArg('overwrite')) {
926 $current = $page->getCurrentRevision();
928 $edit = $request->getArg('edit');
930 if (isset($edit['keep_old'])) {
934 elseif (isset($edit['overwrite'])) {
938 elseif ( $current and (! $current->hasDefaultContents())
939 && ($current->getPackedContent() != $content) )
941 include_once 'lib/editpage.php';
942 $request->setArg('pagename', $pagename);
943 $v = $current->getVersion();
944 $request->setArg('revision', $current->getVersion());
945 $p = new LoadFileConflictPageEditor($request);
946 $p->_content = $content;
947 $p->_currentVersion = $v - 1;
948 $p->editPage($saveFailed = true);
949 return; //early return
953 foreach ($pagedata as $key => $value) {
955 $page->set($key, $value);
958 $mesg = HTML::p(array('style' => 'text-indent: 3em;'));
960 $mesg->pushContent(' ', fmt("from %s", $source));
963 //FIXME: This should not happen! (empty vdata, corrupt cache or db)
964 $current = $page->getCurrentRevision();
966 if ($current->getVersion() == 0) {
967 $versiondata['author'] = ADMIN_USER;
968 $versiondata['author_id'] = ADMIN_USER;
969 $mesg->pushContent(' - ', _("New page"));
973 if ( (! $current->hasDefaultContents())
974 && ($current->getPackedContent() != $content) ) {
976 $mesg->pushContent(' ',
977 fmt("has edit conflicts - overwriting anyway"));
979 if (substr_count($source, 'pgsrc')) {
980 $versiondata['author'] = ADMIN_USER;
981 // but leave authorid as userid who loaded the file
985 if (isset($edit['keep_old'])) {
986 $mesg->pushContent(' ', fmt("keep old"));
988 $mesg->pushContent(' ', fmt("has edit conflicts - skipped"));
989 $needs_merge = true; // hackish, to display the buttons
994 else if ($current->getPackedContent() == $content) {
995 // The page content is the same, we don't need a new revision.
996 $mesg->pushContent(' ',
997 fmt("content is identical to current version %d - no new revision created",
998 $current->getVersion()));
1005 // in case of failures print the culprit:
1006 if (!isa($request,'MockRequest')) {
1007 PrintXML(HTML::p(WikiLink($pagename))); flush();
1009 $new = $page->save($content, WIKIDB_FORCE_CREATE, $versiondata);
1011 $mesg->pushContent(' ', fmt("- saved to database as version %d",
1012 $new->getVersion()));
1016 // hackish, $source contains needed path+filename
1017 $f = str_replace(sprintf(_("MIME file %s"), ''), '', $f);
1018 $f = str_replace(sprintf(_("Serialized file %s"), ''), '', $f);
1019 $f = str_replace(sprintf(_("plain file %s"), ''), '', $f);
1020 //check if uploaded file? they pass just the content, but the file is gone
1023 $meb = Button(array('action' => 'loadfile',
1027 _("PhpWikiAdministration"),
1029 $owb = Button(array('action' => 'loadfile',
1032 _("Restore Anyway"),
1033 _("PhpWikiAdministration"),
1035 $mesg->pushContent(' ', $meb, " ", $owb);
1036 if (!$overwite_all) {
1037 $args = $request->getArgs();
1038 $args['overwrite'] = 1;
1039 $owb = Button($args,
1041 _("PhpWikiAdministration"),
1043 $mesg->pushContent(HTML::span(array('class' => 'hint'), $owb));
1044 $overwite_all = true;
1047 $mesg->pushContent(HTML::em(_(" Sorry, cannot merge.")));
1051 if (!isa($request,'MockRequest')) {
1053 PrintXML(HTML::p(HTML::em(WikiLink($pagename))), $mesg);
1060 // action=revert (by diff)
1061 function RevertPage (&$request)
1063 $mesg = HTML::div();
1064 $pagename = $request->getArg('pagename');
1065 $version = $request->getArg('version');
1066 $dbi =& $request->_dbi;
1067 $page = $dbi->getPage($pagename);
1069 $request->redirect(WikiURL($page,
1070 array('warningmsg' => _('Revert: missing required version argument'))));
1073 $current = $page->getCurrentRevision();
1074 $currversion = $current->getVersion();
1075 if ($currversion == 0) {
1076 $request->redirect(WikiURL($page,
1077 array('errormsg' => _('No revert: no page content'))));
1080 if ($currversion == $version) {
1081 $request->redirect(WikiURL($page,
1082 array('warningmsg' => _('No revert: same version page'))));
1085 if ($request->getArg('cancel')) {
1086 $request->redirect(WikiURL($page,
1087 array('warningmsg' => _('Revert cancelled'))));
1090 if (!$request->getArg('verify')) {
1091 $mesg->pushContent(HTML::p(fmt("Are you sure to revert %s to version $version?", WikiLink($pagename))),
1092 HTML::form(array('action' => $request->getPostURL(),
1093 'method' => 'post'),
1094 HiddenInputs($request->getArgs(), false, array('verify')),
1095 HiddenInputs(array('verify' => 1)),
1096 Button('submit:verify', _("Yes"), 'button'),
1097 HTML::Raw(' '),
1098 Button('submit:cancel', _("Cancel"), 'button'))
1100 $rev = $page->getRevision($version);
1101 $html = HTML(HTML::fieldset($mesg), HTML::hr(), $rev->getTransformedContent());
1102 $template = Template('browse',
1103 array('CONTENT' => $html));
1104 GeneratePage($template, $pagename, $rev);
1105 $request->checkValidators();
1109 $rev = $page->getRevision($version);
1110 $content = $rev->getPackedContent();
1111 $versiondata = $rev->_data;
1112 $versiondata['summary'] = sprintf(_("revert to version %d"), $version);
1113 $versiondata['mtime'] = time();
1114 $new = $page->save($content, $currversion + 1, $versiondata);
1117 $mesg = HTML::span();
1118 $pagelink = WikiLink($pagename);
1119 $mesg->pushContent(fmt("Revert: %s", $pagelink),
1120 fmt("- version %d saved to database as version %d",
1121 $version, $new->getVersion()));
1122 // Force browse of current page version.
1123 $request->setArg('version', false);
1124 $template = Template('savepage', array());
1125 $template->replace('CONTENT', $new->getTransformedContent());
1127 GeneratePage($template, $mesg, $new);
1131 function _tryinsertInterWikiMap($content) {
1133 if (strpos($content, "<verbatim>")) {
1134 //$error_html = " The newly loaded pgsrc already contains a verbatim block.";
1137 if (!$goback && !defined('INTERWIKI_MAP_FILE')) {
1138 $error_html = sprintf(" "._("%s: not defined"), "INTERWIKI_MAP_FILE");
1141 $mapfile = FindFile(INTERWIKI_MAP_FILE,1);
1142 if (!$goback && !file_exists($mapfile)) {
1143 $error_html = sprintf(" "._("%s: file not found"), INTERWIKI_MAP_FILE);
1147 if (!empty($error_html))
1148 trigger_error(_("Default InterWiki map file not loaded.")
1149 . $error_html, E_USER_NOTICE);
1153 // if loading from virgin setup do echo, otherwise trigger_error E_USER_NOTICE
1154 if (!isa($GLOBALS['request'], 'MockRequest'))
1155 echo sprintf(_("Loading InterWikiMap from external file %s."), $mapfile),"<br />";
1157 $fd = fopen ($mapfile, "rb");
1158 $data = fread ($fd, filesize($mapfile));
1160 $content = $content . "\n<verbatim>\n$data</verbatim>\n";
1164 function ParseSerializedPage($text, $default_pagename, $user)
1166 if (!preg_match('/^a:\d+:{[si]:\d+/', $text))
1169 $pagehash = unserialize($text);
1171 // Split up pagehash into four parts:
1174 // page-level meta-data
1175 // revision-level meta-data
1177 if (!defined('FLAG_PAGE_LOCKED'))
1178 define('FLAG_PAGE_LOCKED', 1);
1179 if (!defined('FLAG_PAGE_EXTERNAL'))
1180 define('FLAG_PAGE_EXTERNAL', 1);
1181 $pageinfo = array('pagedata' => array(),
1182 'versiondata' => array());
1184 $pagedata = &$pageinfo['pagedata'];
1185 $versiondata = &$pageinfo['versiondata'];
1187 // Fill in defaults.
1188 if (empty($pagehash['pagename']))
1189 $pagehash['pagename'] = $default_pagename;
1190 if (empty($pagehash['author'])) {
1191 $pagehash['author'] = $user->getId();
1194 foreach ($pagehash as $key => $value) {
1199 $pageinfo[$key] = $value;
1202 $pageinfo[$key] = join("\n", $value);
1205 if (($value & FLAG_PAGE_LOCKED) != 0)
1206 $pagedata['locked'] = 'yes';
1207 if (($value & FLAG_PAGE_EXTERNAL) != 0)
1208 $pagedata['external'] = 'yes';
1212 $pagedata[$key] = $value;
1216 $pagedata['perm'] = ParseMimeifiedPerm($value);
1218 case 'lastmodified':
1219 $versiondata['mtime'] = $value;
1224 $versiondata[$key] = $value;
1228 if (empty($pagehash['charset']))
1229 $pagehash['charset'] = 'utf-8';
1230 // compare to target charset
1231 if (strtolower($pagehash['charset']) != strtolower($GLOBALS['charset'])) {
1232 $pageinfo['content'] = charset_convert($params['charset'], $GLOBALS['charset'], $pageinfo['content']);
1233 $pageinfo['pagename'] = charset_convert($params['charset'], $GLOBALS['charset'], $pageinfo['pagename']);
1238 function SortByPageVersion ($a, $b) {
1239 return $a['version'] - $b['version'];
1243 * Security alert! We should not allow to import config.ini into our wiki (or from a sister wiki?)
1244 * because the sql passwords are in plaintext there. And the webserver must be able to read it.
1245 * Detected by Santtu Jarvi.
1247 function LoadFile (&$request, $filename, $text = false, $mtime = false)
1249 if (preg_match("/config$/", dirname($filename)) // our or other config
1250 and preg_match("/config.*\.ini/", basename($filename))) // backups and other versions also
1252 trigger_error(sprintf("Refused to load %s", $filename), E_USER_WARNING);
1255 if (!is_string($text)) {
1257 $stat = stat($filename);
1259 $text = implode("", file($filename));
1262 if (! $request->getArg('start_debug')) @set_time_limit(30); // Reset watchdog
1263 else @set_time_limit(240);
1265 // FIXME: basename("filewithnoslashes") seems to return garbage sometimes.
1266 $basename = basename("/dummy/" . $filename);
1269 $mtime = time(); // Last resort.
1271 // DONE: check source - target charset for content and pagename
1272 // but only for pgsrc'ed content, not from the browser.
1274 $default_pagename = rawurldecode($basename);
1275 if ( ($parts = ParseMimeifiedPages($text)) ) {
1276 if (count($parts) > 1)
1277 $overwrite = $request->getArg('overwrite');
1278 usort($parts, 'SortByPageVersion');
1279 foreach ($parts as $pageinfo) {
1281 if (count($parts) > 1)
1282 $request->setArg('overwrite', 1);
1283 SavePage($request, $pageinfo, sprintf(_("MIME file %s"),
1284 $filename), $basename);
1286 if (count($parts) > 1)
1288 $request->setArg('overwrite', $overwrite);
1290 unset($request->_args['overwrite']);
1292 else if ( ($pageinfo = ParseSerializedPage($text, $default_pagename,
1293 $request->getUser())) ) {
1294 SavePage($request, $pageinfo, sprintf(_("Serialized file %s"),
1295 $filename), $basename);
1299 $user = $request->getUser();
1301 $file_charset = 'utf-8';
1302 // compare to target charset
1303 if ($file_charset != strtolower($GLOBALS['charset'])) {
1304 $text = charset_convert($file_charset, $GLOBALS['charset'], $text);
1305 $default_pagename = charset_convert($file_charset, $GLOBALS['charset'], $default_pagename);
1308 // Assume plain text file.
1309 $pageinfo = array('pagename' => $default_pagename,
1310 'pagedata' => array(),
1312 => array('author' => $user->getId()),
1313 'content' => preg_replace('/[ \t\r]*\n/', "\n",
1316 SavePage($request, $pageinfo, sprintf(_("plain file %s"), $filename),
1321 function LoadZip (&$request, $zipfile, $files = false, $exclude = false) {
1322 $zip = new ZipReader($zipfile);
1323 $timeout = (! $request->getArg('start_debug')) ? 20 : 120;
1324 while (list ($fn, $data, $attrib) = $zip->readFile()) {
1325 // FIXME: basename("filewithnoslashes") seems to return
1326 // garbage sometimes.
1327 $fn = basename("/dummy/" . $fn);
1328 if ( ($files && !in_array($fn, $files))
1329 || ($exclude && in_array($fn, $exclude)) ) {
1330 PrintXML(HTML::p(WikiLink($fn)),
1331 HTML::p(_("Skipping")));
1335 longer_timeout($timeout); // longer timeout per page
1336 LoadFile($request, $fn, $data, $attrib['mtime']);
1340 function LoadDir (&$request, $dirname, $files = false, $exclude = false) {
1341 $fileset = new LimitedFileSet($dirname, $files, $exclude);
1343 if (!$files and ($skiplist = $fileset->getSkippedFiles())) {
1344 PrintXML(HTML::p(HTML::strong(_("Skipping"))));
1346 foreach ($skiplist as $file)
1347 $list->pushContent(HTML::li(WikiLink($file)));
1348 PrintXML(HTML::p($list));
1351 // Defer HomePage loading until the end. If anything goes wrong
1352 // the pages can still be loaded again.
1353 $files = $fileset->getFiles();
1354 if (in_array(HOME_PAGE, $files)) {
1355 $files = array_diff($files, array(HOME_PAGE));
1356 $files[] = HOME_PAGE;
1358 $timeout = (! $request->getArg('start_debug')) ? 20 : 120;
1359 foreach ($files as $file) {
1360 longer_timeout($timeout); // longer timeout per page
1361 if (substr($file,-1,1) != '~') // refuse to load backup files
1362 LoadFile($request, "$dirname/$file");
1366 class LimitedFileSet extends FileSet {
1367 function LimitedFileSet($dirname, $_include, $exclude) {
1368 $this->_includefiles = $_include;
1369 $this->_exclude = $exclude;
1370 $this->_skiplist = array();
1371 parent::FileSet($dirname);
1374 function _filenameSelector($fn) {
1375 $incl = &$this->_includefiles;
1376 $excl = &$this->_exclude;
1378 if ( ($incl && !in_array($fn, $incl))
1379 || ($excl && in_array($fn, $excl)) ) {
1380 $this->_skiplist[] = $fn;
1387 function getSkippedFiles () {
1388 return $this->_skiplist;
1392 function IsZipFile ($filename_or_fd)
1394 // See if it looks like zip file
1395 if (is_string($filename_or_fd))
1397 $fd = fopen($filename_or_fd, "rb");
1398 $magic = fread($fd, 4);
1403 $fpos = ftell($filename_or_fd);
1404 $magic = fread($filename_or_fd, 4);
1405 fseek($filename_or_fd, $fpos);
1408 return $magic == ZIP_LOCHEAD_MAGIC || $magic == ZIP_CENTHEAD_MAGIC;
1411 function LoadAny (&$request, $file_or_dir, $files = false, $exclude = false)
1413 // Try urlencoded filename for accented characters.
1414 if (!file_exists($file_or_dir)) {
1415 // Make sure there are slashes first to avoid confusing phps
1416 // with broken dirname or basename functions.
1417 // FIXME: windows uses \ and :
1418 if (is_integer(strpos($file_or_dir, "/"))) {
1419 $newfile = FindFile($file_or_dir, true);
1420 // Panic. urlencoded by the browser (e.g. San%20Diego => San Diego)
1422 $file_or_dir = dirname($file_or_dir) . "/"
1423 . rawurlencode(basename($file_or_dir));
1425 // This is probably just a file.
1426 $file_or_dir = rawurlencode($file_or_dir);
1430 $type = filetype($file_or_dir);
1431 if ($type == 'link') {
1432 // For symbolic links, use stat() to determine
1433 // the type of the underlying file.
1434 list(,,$mode) = stat($file_or_dir);
1435 $type = ($mode >> 12) & 017;
1438 elseif ($type == 004)
1443 $request->finish(fmt("Empty or not existing source. Unable to load: %s", $file_or_dir));
1445 else if ($type == 'dir') {
1446 LoadDir($request, $file_or_dir, $files, $exclude);
1448 else if ($type != 'file' && !preg_match('/^(http|ftp):/', $file_or_dir))
1450 $request->finish(fmt("Bad file type: %s", $type));
1452 else if (IsZipFile($file_or_dir)) {
1453 LoadZip($request, $file_or_dir, $files, $exclude);
1455 else /* if (!$files || in_array(basename($file_or_dir), $files)) */
1457 LoadFile($request, $file_or_dir);
1461 function LoadFileOrDir (&$request)
1463 $source = $request->getArg('source');
1464 $finder = new FileFinder;
1465 $source = $finder->slashifyPath($source);
1466 StartLoadDump($request,
1467 sprintf(_("Loading '%s'"), $source));
1468 LoadAny($request, $source);
1469 EndLoadDump($request);
1473 * HomePage was not found so first-time install is supposed to run.
1474 * - import all pgsrc pages.
1475 * - Todo: installer interface to edit config/config.ini settings
1476 * - Todo: ask for existing old index.php to convert to config/config.ini
1477 * - Todo: theme-specific pages:
1478 * blog - HomePage, ADMIN_USER/Blogs
1480 function SetupWiki (&$request)
1482 global $GenericPages, $LANG;
1484 //FIXME: This is a hack (err, "interim solution")
1485 // This is a bogo-bogo-login: Login without
1486 // saving login information in session state.
1487 // This avoids logging in the unsuspecting
1488 // visitor as ADMIN_USER
1490 // This really needs to be cleaned up...
1491 // (I'm working on it.)
1492 $real_user = $request->_user;
1493 if (ENABLE_USER_NEW)
1494 $request->_user = new _BogoUser(ADMIN_USER);
1497 $request->_user = new WikiUser($request, ADMIN_USER, WIKIAUTH_BOGO);
1499 StartLoadDump($request, _("Loading up virgin wiki"));
1501 $pgsrc = FindLocalizedFile(WIKI_PGSRC);
1502 $default_pgsrc = FindFile(DEFAULT_WIKI_PGSRC);
1504 $request->setArg('overwrite', true);
1505 if ($default_pgsrc != $pgsrc) {
1506 LoadAny($request, $default_pgsrc, $GenericPages);
1508 $request->setArg('overwrite', false);
1509 LoadAny($request, $pgsrc);
1510 $dbi =& $request->_dbi;
1512 // Ensure that all mandatory pages are loaded
1513 $finder = new FileFinder;
1516 $mandatory = explode(':','SandBox:Template/Category:Template/Talk:SpecialPages:CategoryCategory:CategoryActionPage:Help/OldTextFormattingRules:Help/TextFormattingRules:PhpWikiAdministration');
1517 } elseif (WIKI_NAME == "help") {
1518 $mandatory = explode(':','SandBox:Template/Category:Template/Talk:SpecialPages:CategoryCategory:CategoryActionPage:Help/TextFormattingRules:PhpWikiAdministration');
1520 $mandatory = explode(':','SandBox:Template/UserPage:Template/Category:Template/Talk:SpecialPages:CategoryCategory:CategoryActionPage:TextFormattingRules:PhpWikiAdministration');
1522 foreach (array_merge($mandatory,
1523 $GLOBALS['AllActionPages'],
1524 array(constant('HOME_PAGE'))) as $f)
1526 $page = gettext($f);
1527 $epage = urlencode($page);
1528 if (! $dbi->isWikiPage($page) ) {
1529 // translated version provided?
1530 if ($lf = FindLocalizedFile($pgsrc . $finder->_pathsep . $epage, 1)) {
1531 LoadAny($request, $lf);
1532 } else { // load english version of required action page
1533 LoadAny($request, FindFile(DEFAULT_WIKI_PGSRC . $finder->_pathsep . urlencode($f)));
1537 if (! $dbi->isWikiPage($page)) {
1538 trigger_error(sprintf("Mandatory file %s couldn't be loaded!", $page),
1543 $pagename = _("InterWikiMap");
1544 $map = $dbi->getPage($pagename);
1545 $map->set('locked', true);
1546 PrintXML(HTML::p(HTML::em(WikiLink($pagename)), HTML::strong(" locked")));
1547 EndLoadDump($request);
1550 function LoadPostFile (&$request)
1552 $upload = $request->getUploadedFile('file');
1555 $request->finish(_("No uploaded file to upload?")); // FIXME: more concise message
1557 // Dump http headers.
1558 StartLoadDump($request, sprintf(_("Uploading %s"), $upload->getName()));
1560 $fd = $upload->open();
1562 LoadZip($request, $fd, false, array(_("RecentChanges")));
1564 LoadFile($request, $upload->getName(), $upload->getContents());
1566 EndLoadDump($request);
1572 // c-basic-offset: 4
1573 // c-hanging-comment-ender-p: nil
1574 // indent-tabs-mode: nil