5 Copyright 1999,2000,2001,2002,2004,2005,2006,2007 $ThePhpWikiProgrammingTeam
6 Copyright 2008-2009 Marc-Etienne Vargenau, Alcatel-Lucent
8 This file is part of PhpWiki.
10 PhpWiki is free software; you can redistribute it and/or modify
11 it under the terms of the GNU General Public License as published by
12 the Free Software Foundation; either version 2 of the License, or
13 (at your option) any later version.
15 PhpWiki is distributed in the hope that it will be useful,
16 but WITHOUT ANY WARRANTY; without even the implied warranty of
17 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
18 GNU General Public License for more details.
20 You should have received a copy of the GNU General Public License
21 along with PhpWiki; if not, write to the Free Software
22 Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
25 require_once("lib/ziplib.php");
26 require_once("lib/Template.php");
29 * ignore fatal errors during dump
31 function _dump_error_handler(&$error) {
32 if ($error->isFatal()) {
33 $error->errno = E_USER_WARNING;
36 return true; // Ignore error
38 if (preg_match('/Plugin/', $error->errstr))
41 // let the message come through: call the remaining handlers:
45 function StartLoadDump(&$request, $title, $html = '')
47 // MockRequest is from the unit testsuite, a faked request. (may be cmd-line)
48 // We are silent on unittests.
49 if (isa($request,'MockRequest'))
51 // FIXME: This is a hack. This really is the worst overall hack in phpwiki.
53 $html->pushContent('%BODY%');
54 $tmpl = Template('html', array('TITLE' => $title,
56 'CONTENT' => $html ? $html : '%BODY%'));
57 echo ereg_replace('%BODY%.*', '', $tmpl->getExpansion($html));
58 $request->chunkOutput();
60 // set marker for sendPageChangeNotification()
61 $request->_deferredPageChangeNotification = array();
64 function EndLoadDump(&$request)
68 if (isa($request,'MockRequest'))
70 $action = $request->getArg('action');
73 case 'zip': $label = _("ZIP files of database"); break;
74 case 'dumpserial': $label = _("Dump to directory"); break;
75 case 'upload': $label = _("Upload File"); break;
76 case 'loadfile': $label = _("Load File"); break;
77 case 'upgrade': $label = _("Upgrade"); break;
79 case 'ziphtml': $label = _("Dump pages as XHTML"); break;
81 if ($label) $label = str_replace(" ","_",$label);
82 if ($action == 'browse') // loading virgin
83 $pagelink = WikiLink(HOME_PAGE);
85 $pagelink = WikiLink(new WikiPageName(_("PhpWikiAdministration"),false,$label));
87 // do deferred sendPageChangeNotification()
88 if (!empty($request->_deferredPageChangeNotification)) {
89 $pages = $all_emails = $all_users = array();
90 foreach ($request->_deferredPageChangeNotification as $p) {
91 list($pagename, $emails, $userids) = $p;
93 $all_emails = array_unique(array_merge($all_emails, $emails));
94 $all_users = array_unique(array_merge($all_users, $userids));
96 $editedby = sprintf(_("Edited by: %s"), $request->_user->getId());
97 $content = "Loaded the following pages:\n" . join("\n", $pages);
98 if (mail(join(',',$all_emails),"[".WIKI_NAME."] "._("LoadDump"),
102 trigger_error(sprintf(_("PageChange Notification of %s sent to %s"),
103 join("\n",$pages), join(',',$all_users)), E_USER_NOTICE);
105 trigger_error(sprintf(_("PageChange Notification Error: Couldn't send %s to %s"),
106 join("\n",$pages), join(',',$all_users)), E_USER_WARNING);
111 unset($request->_deferredPageChangeNotification);
113 PrintXML(HTML::p(HTML::strong(_("Complete."))),
114 HTML::p(fmt("Return to %s", $pagelink)));
115 // Ugly hack to get valid XHTML code
116 if (isa($WikiTheme, 'WikiTheme_gforge')) {
123 if (isa($WikiTheme, 'WikiTheme_MonoBook')) {
129 if (isa($WikiTheme, 'WikiTheme_wikilens')) {
135 echo "</body></html>\n";
139 ////////////////////////////////////////////////////////////////
141 // Functions for dumping.
143 ////////////////////////////////////////////////////////////////
147 * http://www.nacs.uci.edu/indiv/ehood/MIME/2045/rfc2045.html
148 * http://www.faqs.org/rfcs/rfc2045.html
149 * (RFC 1521 has been superceeded by RFC 2045 & others).
151 * Also see http://www.faqs.org/rfcs/rfc2822.html
153 function MailifyPage ($page, $nversions = 1)
155 $current = $page->getCurrentRevision(false);
158 if (STRICT_MAILABLE_PAGEDUMPS) {
159 $from = defined('SERVER_ADMIN') ? SERVER_ADMIN : 'foo@bar';
160 //This is for unix mailbox format: (not RFC (2)822)
161 // $head .= "From $from " . CTime(time()) . "\r\n";
162 $head .= "Subject: " . rawurlencode($page->getName()) . "\r\n";
163 $head .= "From: $from (PhpWiki)\r\n";
164 // RFC 2822 requires only a Date: and originator (From:)
165 // field, however the obsolete standard RFC 822 also
166 // requires a destination field.
167 $head .= "To: $from (PhpWiki)\r\n";
169 $head .= "Date: " . Rfc2822DateTime($current->get('mtime')) . "\r\n";
170 $head .= sprintf("Mime-Version: 1.0 (Produced by PhpWiki %s)\r\n",
173 // This should just be entered by hand (or by script?)
174 // in the actual pgsrc files, since only they should have
176 //$head .= "X-Rcs-Id: \$Id\$\r\n";
178 $iter = $page->getAllRevisions();
180 while ($revision = $iter->next()) {
181 $parts[] = MimeifyPageRevision($page, $revision);
182 if ($nversions > 0 && count($parts) >= $nversions)
185 if (count($parts) > 1)
186 return $head . MimeMultipart($parts);
188 return $head . $parts[0];
192 * Compute filename to used for storing contents of a wiki page.
194 * Basically we do a rawurlencode() which encodes everything except
195 * ASCII alphanumerics and '.', '-', and '_'.
197 * But we also want to encode leading dots to avoid filenames like
198 * '.', and '..'. (Also, there's no point in generating "hidden" file
199 * names, like '.foo'.)
201 * We have to apply a different "/" logic for dumpserial, htmldump and zipdump.
202 * dirs are allowed for zipdump and htmldump, not for dumpserial
205 * @param $pagename string Pagename.
206 * @return string Filename for page.
208 function FilenameForPage ($pagename, $action = false)
210 $enc = rawurlencode($pagename);
213 $action = $request->getArg('action');
215 if ($action != 'dumpserial') { // zip, ziphtml, dumphtml
216 // For every %2F we will need to mkdir -p dirname($pagename)
217 $enc = preg_replace('/%2F/', '/', $enc);
219 $enc = preg_replace('/^\./', '%2E', $enc);
220 $enc = preg_replace('/%20/', ' ', $enc);
221 $enc = preg_replace('/\.$/', '%2E', $enc);
226 * The main() function which generates a zip archive of a PhpWiki.
228 * If $include_archive is false, only the current version of each page
229 * is included in the zip file; otherwise all archived versions are
232 function MakeWikiZip (&$request)
234 global $ErrorManager;
235 if ($request->getArg('include') == 'all') {
236 $zipname = WIKI_NAME . _("FullDump") . date('Ymd-Hi') . '.zip';
237 $include_archive = true;
240 $zipname = WIKI_NAME . _("LatestSnapshot") . date('Ymd-Hi') . '.zip';
241 $include_archive = false;
243 $include_empty = false;
244 if ($request->getArg('include') == 'empty') {
245 $include_empty = true;
248 $zip = new ZipWriter("Created by PhpWiki " . PHPWIKI_VERSION, $zipname);
250 /* ignore fatals in plugins */
251 $ErrorManager->pushErrorHandler(new WikiFunctionCb('_dump_error_handler'));
253 $dbi =& $request->_dbi;
254 $thispage = $request->getArg('pagename'); // for "Return to ..."
255 if ($exclude = $request->getArg('exclude')) { // exclude which pagenames
256 $excludeList = explodePageList($exclude);
258 $excludeList = array();
260 if ($pages = $request->getArg('pages')) { // which pagenames
261 if ($pages == '[]') // current page
263 $page_iter = new WikiDB_Array_PageIterator(explodePageList($pages));
265 $page_iter = $dbi->getAllPages(false,false,false,$excludeList);
267 $request_args = $request->args;
268 $timeout = (! $request->getArg('start_debug')) ? 30 : 240;
270 while ($page = $page_iter->next()) {
271 $request->args = $request_args; // some plugins might change them (esp. on POST)
272 longer_timeout($timeout); // Reset watchdog
274 $current = $page->getCurrentRevision();
275 if ($current->getVersion() == 0)
278 $pagename = $page->getName();
279 $wpn = new WikiPageName($pagename);
280 if (!$wpn->isValid())
282 if (in_array($page->getName(), $excludeList)) {
286 $attrib = array('mtime' => $current->get('mtime'),
288 if ($page->get('locked'))
289 $attrib['write_protected'] = 1;
291 if ($include_archive)
292 $content = MailifyPage($page, 0);
294 $content = MailifyPage($page);
296 $zip->addRegularFile( FilenameForPage($pagename),
301 $ErrorManager->popErrorHandler();
304 function DumpToDir (&$request)
306 $directory = $request->getArg('directory');
307 if (empty($directory))
308 $directory = DEFAULT_DUMP_DIR; // See lib/plugin/WikiForm.php:87
309 if (empty($directory))
310 $request->finish(_("You must specify a directory to dump to"));
312 // see if we can access the directory the user wants us to use
313 if (! file_exists($directory)) {
314 if (! mkdir($directory, 0755))
315 $request->finish(fmt("Cannot create directory '%s'", $directory));
317 $html = HTML::p(fmt("Created directory '%s' for the page dump...",
320 $html = HTML::p(fmt("Using directory '%s'", $directory));
323 StartLoadDump($request, _("Dumping Pages"), $html);
325 $dbi =& $request->_dbi;
326 $thispage = $request->getArg('pagename'); // for "Return to ..."
327 if ($exclude = $request->getArg('exclude')) { // exclude which pagenames
328 $excludeList = explodePageList($exclude);
330 $excludeList = array();
332 $include_empty = false;
333 if ($request->getArg('include') == 'empty') {
334 $include_empty = true;
336 if ($pages = $request->getArg('pages')) { // which pagenames
337 if ($pages == '[]') // current page
339 $page_iter = new WikiDB_Array_PageIterator(explodePageList($pages));
341 $page_iter = $dbi->getAllPages($include_empty,false,false,$excludeList);
344 $request_args = $request->args;
345 $timeout = (! $request->getArg('start_debug')) ? 30 : 240;
347 while ($page = $page_iter->next()) {
348 $request->args = $request_args; // some plugins might change them (esp. on POST)
349 longer_timeout($timeout); // Reset watchdog
351 $pagename = $page->getName();
352 if (!isa($request,'MockRequest')) {
353 PrintXML(HTML::br(), $pagename, ' ... ');
357 if (in_array($pagename, $excludeList)) {
358 if (!isa($request, 'MockRequest')) {
359 PrintXML(_("Skipped."));
364 $filename = FilenameForPage($pagename);
366 if($page->getName() != $filename) {
367 $msg->pushContent(HTML::small(fmt("saved as %s", $filename)),
371 if ($request->getArg('include') == 'all')
372 $data = MailifyPage($page, 0);
374 $data = MailifyPage($page);
376 if ( !($fd = fopen($directory."/".$filename, "wb")) ) {
377 $msg->pushContent(HTML::strong(fmt("couldn't open file '%s' for writing",
378 "$directory/$filename")));
379 $request->finish($msg);
382 $num = fwrite($fd, $data, strlen($data));
383 $msg->pushContent(HTML::small(fmt("%s bytes written", $num)));
384 if (!isa($request, 'MockRequest')) {
388 assert($num == strlen($data));
392 EndLoadDump($request);
395 function _copyMsg($page, $smallmsg) {
396 if (!isa($GLOBALS['request'], 'MockRequest')) {
397 if ($page) $msg = HTML(HTML::br(), HTML($page), HTML::small($smallmsg));
398 else $msg = HTML::small($smallmsg);
404 function mkdir_p($pathname, $permission = 0777) {
405 $arr = explode("/", $pathname);
407 return mkdir($pathname, $permission);
409 $s = array_shift($arr);
411 foreach ($arr as $p) {
414 $ok = mkdir($curr, $permission);
416 if (!$ok) return FALSE;
422 * Dump all pages as XHTML to a directory, as pagename.html.
423 * Copies all used css files to the directory, all used images to a
424 * "images" subdirectory, and all used buttons to a "images/buttons" subdirectory.
425 * The webserver must have write permissions to these directories.
426 * chown httpd HTML_DUMP_DIR; chmod u+rwx HTML_DUMP_DIR
429 * @param string directory (optional) path to dump to. Default: HTML_DUMP_DIR
430 * @param string pages (optional) Comma-seperated of glob-style pagenames to dump.
431 * Also array of pagenames allowed.
432 * @param string exclude (optional) Comma-seperated of glob-style pagenames to exclude
434 function DumpHtmlToDir (&$request)
437 $directory = $request->getArg('directory');
438 if (empty($directory))
439 $directory = HTML_DUMP_DIR; // See lib/plugin/WikiForm.php:87
440 if (empty($directory))
441 $request->finish(_("You must specify a directory to dump to"));
443 // See if we can access the directory the user wants us to use
444 if (! file_exists($directory)) {
445 if (! mkdir($directory, 0755))
446 $request->finish(fmt("Cannot create directory '%s'", $directory));
448 $html = HTML::p(fmt("Created directory '%s' for the page dump...",
451 $html = HTML::p(fmt("Using directory '%s'", $directory));
453 StartLoadDump($request, _("Dumping Pages"), $html);
454 $thispage = $request->getArg('pagename'); // for "Return to ..."
456 $dbi =& $request->_dbi;
457 if ($exclude = $request->getArg('exclude')) { // exclude which pagenames
458 $excludeList = explodePageList($exclude);
460 $excludeList = array('DebugAuthInfo', 'DebugGroupInfo', 'AuthInfo');
462 if ($pages = $request->getArg('pages')) { // which pagenames
463 if ($pages == '[]') // current page
465 $page_iter = new WikiDB_Array_generic_iter(explodePageList($pages));
466 // not at admin page: dump only the current page
467 } elseif ($thispage != _("PhpWikiAdministration")) {
468 $page_iter = new WikiDB_Array_generic_iter(array($thispage));
470 $page_iter = $dbi->getAllPages(false,false,false,$excludeList);
473 $WikiTheme->DUMP_MODE = 'HTML';
474 _DumpHtmlToDir($directory, $page_iter, $request->getArg('exclude'));
475 $WikiTheme->DUMP_MODE = false;
477 $request->setArg('pagename',$thispage); // Template::_basepage fix
478 EndLoadDump($request);
481 /* Known problem: any plugins or other code which echo()s text will
482 * lead to a corrupted html zip file which may produce the following
483 * errors upon unzipping:
485 * warning [wikihtml.zip]: 2401 extra bytes at beginning or within zipfile
486 * file #58: bad zipfile offset (local header sig): 177561
487 * (attempting to re-compensate)
489 * However, the actual wiki page data should be unaffected.
491 function MakeWikiZipHtml (&$request)
494 if ($request->getArg('zipname')) {
495 $zipname = basename($request->getArg('zipname'));
496 if (!preg_match("/\.zip$/i", $zipname))
498 $request->setArg('zipname', false);
500 $zipname = "wikihtml.zip";
502 $zip = new ZipWriter("Created by PhpWiki " . PHPWIKI_VERSION, $zipname);
503 $dbi =& $request->_dbi;
504 $thispage = $request->getArg('pagename'); // for "Return to ..."
505 if ($pages = $request->getArg('pages')) { // which pagenames
506 if ($pages == '[]') // current page
508 $page_iter = new WikiDB_Array_generic_iter(explodePageList($pages));
510 $page_iter = $dbi->getAllPages(false,false,false,$request->getArg('exclude'));
513 $WikiTheme->DUMP_MODE = 'ZIPHTML';
514 _DumpHtmlToDir($zip, $page_iter, $request->getArg('exclude'));
515 $WikiTheme->DUMP_MODE = false;
519 * Internal html dumper. Used for dumphtml, ziphtml and pdf
521 function _DumpHtmlToDir ($target, $page_iter, $exclude = false)
523 global $WikiTheme, $request, $ErrorManager;
524 $silent = true; $zip = false; $directory = false;
525 if ($WikiTheme->DUMP_MODE == 'HTML') {
526 $directory = $target;
528 } elseif ($WikiTheme->DUMP_MODE == 'PDFHTML') {
529 $directory = $target;
530 } elseif (is_object($target)) { // $WikiTheme->DUMP_MODE == 'ZIPHTML'
534 $request->_TemplatesProcessed = array();
535 if ($exclude) { // exclude which pagenames
536 $excludeList = explodePageList($exclude);
538 $excludeList = array('DebugAuthInfo', 'DebugGroupInfo', 'AuthInfo');
540 $WikiTheme->VALID_LINKS = array();
541 if ($request->getArg('format')) { // pagelist
542 $page_iter_sav = $page_iter;
543 foreach ($page_iter_sav->asArray() as $handle) {
544 $WikiTheme->VALID_LINKS[] = is_string($handle) ? $handle : $handle->getName();
546 $page_iter_sav->reset();
549 if (defined('HTML_DUMP_SUFFIX'))
550 $WikiTheme->HTML_DUMP_SUFFIX = HTML_DUMP_SUFFIX;
551 $_bodyAttr = @$WikiTheme->_MoreAttr['body'];
552 unset($WikiTheme->_MoreAttr['body']);
554 $ErrorManager->pushErrorHandler(new WikiFunctionCb('_dump_error_handler'));
556 // check if the dumped file will be accessible from outside
557 $doc_root = $request->get("DOCUMENT_ROOT");
558 if ($WikiTheme->DUMP_MODE == 'HTML') {
559 $ldir = NormalizeLocalFileName($directory);
560 $wikiroot = NormalizeLocalFileName('');
561 if (string_starts_with($ldir, $doc_root)) {
562 $link_prefix = substr($directory, strlen($doc_root))."/";
563 } elseif (string_starts_with($ldir, $wikiroot)) {
564 $link_prefix = NormalizeWebFileName(substr($directory, strlen($wikiroot)))."/";
568 $prefix = '/'; // . substr($doc_root,0,2); // add drive where apache is installed
570 $link_prefix = "file://".$prefix.$directory."/";
576 $request_args = $request->args;
577 $timeout = (! $request->getArg('start_debug')) ? 60 : 240;
580 $directory = str_replace("\\", "/", $directory); // no Win95 support.
581 @mkdir("$directory/images");
585 $already_images = array();
587 while ($page = $page_iter->next()) {
588 if (is_string($page)) {
590 $page = $request->_dbi->getPage($pagename);
592 $pagename = $page->getName();
594 if (empty($firstpage)) $firstpage = $pagename;
595 if (array_key_exists($pagename, $already))
597 $already[$pagename] = 1;
598 $current = $page->getCurrentRevision();
599 //if ($current->getVersion() == 0)
602 $request->args = $request_args; // some plugins might change them (esp. on POST)
603 longer_timeout($timeout); // Reset watchdog
606 $attrib = array('mtime' => $current->get('mtime'),
608 if ($page->get('locked'))
609 $attrib['write_protected'] = 1;
610 } elseif (!$silent) {
611 if (!isa($request,'MockRequest')) {
612 PrintXML(HTML::br(), $pagename, ' ... ');
616 if (in_array($pagename, $excludeList)) {
617 if (!$silent and !isa($request,'MockRequest')) {
618 PrintXML(_("Skipped."));
624 if ($WikiTheme->DUMP_MODE == 'PDFHTML')
625 $request->setArg('action', 'pdf'); // to omit cache headers
626 $request->setArg('pagename', $pagename); // Template::_basepage fix
627 $filename = FilenameForPage($pagename) . $WikiTheme->HTML_DUMP_SUFFIX;
628 $args = array('revision' => $current,
629 'CONTENT' => $current->getTransformedContent(),
630 'relative_base' => $relative_base);
631 // For every %2F will need to mkdir -p dirname($pagename)
632 if (preg_match("/(%2F|\/)/", $filename)) {
633 // mkdir -p and set relative base for subdir pages
634 $filename = preg_replace("/%2F/", "/", $filename);
635 $count = substr_count($filename, "/");
636 $dirname = dirname($filename);
638 mkdir_p($directory."/".$dirname);
639 // Fails with "XX / YY", "XX" is created, "XX / YY" cannot be written
640 // if (isWindows()) // interesting Windows bug: cannot mkdir "bla "
641 // Since dumps needs to be copied, we have to disallow this for all platforms.
642 $filename = preg_replace("/ \//", "/", $filename);
643 $relative_base = "../";
645 $relative_base .= "../";
648 $args['relative_base'] = $relative_base;
652 $DUMP_MODE = $WikiTheme->DUMP_MODE;
653 $data = GeneratePageasXML(new Template('browse', $request, $args),
654 $pagename, $current, $args);
655 $WikiTheme->DUMP_MODE = $DUMP_MODE;
657 if (preg_match_all("/<img .*?src=\"(\/.+?)\"/", $data, $m)) {
658 // fix to local relative path for uploaded images, so that pdf will work
659 foreach ($m[1] as $img_file) {
660 $base = basename($img_file);
661 $data = str_replace('src="'.$img_file.'"','src="images/'.$base.'"', $data);
662 if (array_key_exists($img_file, $already_images))
664 $already_images[$img_file] = 1;
665 // resolve src from webdata to file
666 $src = $doc_root . $img_file;
667 if (file_exists($src) and $base) {
669 $target = "$directory/images/$base";
670 if (copy($src, $target)) {
672 _copyMsg($img_file, fmt("... copied to %s", $target));
675 _copyMsg($img_file, fmt("... not copied to %s", $target));
678 $target = "images/$base";
679 $zip->addSrcFile($target, $src);
686 $outfile = $directory."/".$filename;
687 if ( !($fd = fopen($outfile, "wb")) ) {
688 $msg->pushContent(HTML::strong(fmt("couldn't open file '%s' for writing",
690 $request->finish($msg);
692 $len = strlen($data);
693 $num = fwrite($fd, $data, $len);
694 if ($pagename != $filename) {
695 $link = LinkURL($link_prefix.$filename, $filename);
696 $msg->pushContent(HTML::small(_("saved as "), $link, " ... "));
698 $msg->pushContent(HTML::small(fmt("%s bytes written", $num), "\n"));
700 if (!isa($request, 'MockRequest')) {
704 $request->chunkOutput();
706 assert($num == $len);
708 $outfiles[] = $outfile;
710 $zip->addRegularFile($filename, $data, $attrib);
714 $request->_dbi->_cache->invalidate_cache($pagename);
715 unset ($request->_dbi->_cache->_pagedata_cache);
716 unset ($request->_dbi->_cache->_versiondata_cache);
717 unset ($request->_dbi->_cache->_glv_cache);
719 unset ($request->_dbi->_cache->_backend->_page_data);
722 unset($current->_transformedContent);
724 if (!empty($template)) {
725 unset($template->_request);
732 $attrib = false; //array('is_ascii' => 0);
733 if (!empty($WikiTheme->dumped_images) and is_array($WikiTheme->dumped_images)) {
734 // @mkdir("$directory/images");
735 foreach ($WikiTheme->dumped_images as $img_file) {
736 if (array_key_exists($img_file, $already_images))
738 $already_images[$img_file] = 1;
740 and ($from = $WikiTheme->_findFile($img_file, true))
744 $target = "$directory/images/".basename($from);
746 copy($WikiTheme->_path . $from, $target);
748 if (copy($WikiTheme->_path . $from, $target)) {
749 _copyMsg($from, fmt("... copied to %s", $target));
751 _copyMsg($from, fmt("... not copied to %s", $target));
755 $target = "images/".basename($from);
756 $zip->addSrcFile($target, $WikiTheme->_path . $from);
758 } elseif (!$silent) {
759 _copyMsg($from, _("... not found"));
764 if (!empty($WikiTheme->dumped_buttons)
765 and is_array($WikiTheme->dumped_buttons))
769 @mkdir("$directory/images/buttons");
770 foreach ($WikiTheme->dumped_buttons as $text => $img_file) {
771 if (array_key_exists($img_file, $already_images))
773 $already_images[$img_file] = 1;
775 and ($from = $WikiTheme->_findFile($img_file, true))
779 $target = "$directory/images/buttons/".basename($from);
781 copy($WikiTheme->_path . $from, $target);
783 if (copy($WikiTheme->_path . $from, $target)) {
784 _copyMsg($from, fmt("... copied to %s", $target));
786 _copyMsg($from, fmt("... not copied to %s", $target));
790 $target = "images/buttons/".basename($from);
791 $zip->addSrcFile($target, $WikiTheme->_path . $from);
793 } elseif (!$silent) {
794 _copyMsg($from, _("... not found"));
798 if (!empty($WikiTheme->dumped_css) and is_array($WikiTheme->dumped_css)) {
799 foreach ($WikiTheme->dumped_css as $css_file) {
800 if (array_key_exists($css_file, $already_images))
802 $already_images[$css_file] = 1;
804 and ($from = $WikiTheme->_findFile(basename($css_file), true))
807 // TODO: fix @import url(main.css);
809 $target = "$directory/" . basename($css_file);
811 copy($WikiTheme->_path . $from, $target);
813 if (copy($WikiTheme->_path . $from, $target)) {
814 _copyMsg($from, fmt("... copied to %s", $target));
816 _copyMsg($from, fmt("... not copied to %s", $target));
820 //$attrib = array('is_ascii' => 0);
821 $target = basename($css_file);
822 $zip->addSrcFile($target, $WikiTheme->_path . $from);
824 } elseif (!$silent) {
825 _copyMsg($from, _("... not found"));
833 if ($WikiTheme->DUMP_MODE == 'PDFHTML') {
834 if (USE_EXTERNAL_HTML2PDF and $outfiles) {
835 $cmd = EXTERNAL_HTML2PDF_PAGELIST.' "'.join('" "', $outfiles).'"';
836 $filename = FilenameForPage($firstpage);
838 $tmpfile = $directory . "/createpdf.bat";
839 $fp = fopen($tmpfile, "wb");
840 fwrite($fp, $cmd . " > $filename.pdf");
843 if (!headers_sent()) {
844 Header('Content-Type: application/pdf');
848 $tmpdir = getUploadFilePath();
849 $s = passthru($cmd . " > $tmpdir/$filename.pdf");
850 $errormsg = "<br />\nGenerated <a href=\"".getUploadDataPath()."$filename.pdf\">Upload:$filename.pdf</a>\n";
855 foreach($outfiles as $f) unlink($f);
858 if (!empty($errormsg)) {
859 $request->discardOutput();
860 $GLOBALS['ErrorManager']->_postponed_errors = array();
864 $ErrorManager->popErrorHandler();
866 $WikiTheme->HTML_DUMP_SUFFIX = '';
867 $WikiTheme->DUMP_MODE = false;
868 $WikiTheme->_MoreAttr['body'] = $_bodyAttr;
872 ////////////////////////////////////////////////////////////////
874 // Functions for restoring.
876 ////////////////////////////////////////////////////////////////
878 function SavePage (&$request, &$pageinfo, $source, $filename)
880 static $overwite_all = false;
881 $pagedata = $pageinfo['pagedata']; // Page level meta-data.
882 $versiondata = $pageinfo['versiondata']; // Revision level meta-data.
884 if (empty($pageinfo['pagename'])) {
885 PrintXML(HTML::p(HTML::strong(_("Empty pagename!"))));
889 if (empty($versiondata['author_id']))
890 $versiondata['author_id'] = $versiondata['author'];
892 // remove invalid backend specific chars. utf8 issues mostly
893 $pagename_check = new WikiPagename($pageinfo['pagename']);
894 if (!$pagename_check->isValid()) {
895 PrintXML(HTML::p(HTML::strong(_("Invalid pagename!")." ".$pageinfo['pagename'])));
898 $pagename = $pagename_check->getName();
899 $content = $pageinfo['content'];
901 if ($pagename == _("InterWikiMap"))
902 $content = _tryinsertInterWikiMap($content);
904 $dbi =& $request->_dbi;
905 $page = $dbi->getPage($pagename);
907 // Try to merge if updated pgsrc contents are different. This
908 // whole thing is hackish
910 // TODO: try merge unless:
911 // if (current contents = default contents && pgsrc_version >=
912 // pgsrc_version) then just upgrade this pgsrc
913 $needs_merge = false;
917 if ($request->getArg('merge')) {
920 else if ($request->getArg('overwrite')) {
924 $current = $page->getCurrentRevision();
926 $edit = $request->getArg('edit');
928 if (isset($edit['keep_old'])) {
932 elseif (isset($edit['overwrite'])) {
936 elseif ( $current and (! $current->hasDefaultContents())
937 && ($current->getPackedContent() != $content) )
939 include_once('lib/editpage.php');
940 $request->setArg('pagename', $pagename);
941 $v = $current->getVersion();
942 $request->setArg('revision', $current->getVersion());
943 $p = new LoadFileConflictPageEditor($request);
944 $p->_content = $content;
945 $p->_currentVersion = $v - 1;
946 $p->editPage($saveFailed = true);
947 return; //early return
951 foreach ($pagedata as $key => $value) {
953 $page->set($key, $value);
956 $mesg = HTML::p(array('style' => 'text-indent: 3em;'));
958 $mesg->pushContent(' ', fmt("from %s", $source));
961 //FIXME: This should not happen! (empty vdata, corrupt cache or db)
962 $current = $page->getCurrentRevision();
964 if ($current->getVersion() == 0) {
965 $mesg->pushContent(' - ', _("New page"));
969 if ( (! $current->hasDefaultContents())
970 && ($current->getPackedContent() != $content) ) {
972 $mesg->pushContent(' ',
973 fmt("has edit conflicts - overwriting anyway"));
975 if (substr_count($source, 'pgsrc')) {
976 $versiondata['author'] = ADMIN_USER;
977 // but leave authorid as userid who loaded the file
981 if (isset($edit['keep_old'])) {
982 $mesg->pushContent(' ', fmt("keep old"));
984 $mesg->pushContent(' ', fmt("has edit conflicts - skipped"));
985 $needs_merge = true; // hackish, to display the buttons
990 else if ($current->getPackedContent() == $content
991 && $current->get('author') == $versiondata['author']) {
992 // The page metadata is already changed, we don't need a new revision.
993 // This was called previously "is identical to current version %d - skipped"
994 // which is wrong, since the pagedata was stored, not skipped.
995 $mesg->pushContent(' ',
996 fmt("content is identical to current version %d - no new revision created",
997 $current->getVersion()));
1004 // in case of failures print the culprit:
1005 if (!isa($request,'MockRequest')) {
1006 PrintXML(HTML::p(WikiLink($pagename))); flush();
1008 $new = $page->save($content, WIKIDB_FORCE_CREATE, $versiondata);
1010 $mesg->pushContent(' ', fmt("- saved to database as version %d",
1011 $new->getVersion()));
1015 // hackish, $source contains needed path+filename
1016 $f = str_replace(sprintf(_("MIME file %s"), ''), '', $f);
1017 $f = str_replace(sprintf(_("Serialized file %s"), ''), '', $f);
1018 $f = str_replace(sprintf(_("plain file %s"), ''), '', $f);
1019 //check if uploaded file? they pass just the content, but the file is gone
1022 $meb = Button(array('action' => 'loadfile',
1026 _("PhpWikiAdministration"),
1028 $owb = Button(array('action' => 'loadfile',
1031 _("Restore Anyway"),
1032 _("PhpWikiAdministration"),
1034 $mesg->pushContent(' ', $meb, " ", $owb);
1035 if (!$overwite_all) {
1036 $args = $request->getArgs();
1037 $args['overwrite'] = 1;
1038 $owb = Button($args,
1040 _("PhpWikiAdministration"),
1042 $mesg->pushContent(HTML::span(array('class' => 'hint'), $owb));
1043 $overwite_all = true;
1046 $mesg->pushContent(HTML::em(_(" Sorry, cannot merge.")));
1050 if (!isa($request,'MockRequest')) {
1052 PrintXML(HTML::p(HTML::em(WikiLink($pagename))), $mesg);
1059 // action=revert (by diff)
1060 function RevertPage (&$request)
1062 $mesg = HTML::div();
1063 $pagename = $request->getArg('pagename');
1064 $version = $request->getArg('version');
1066 PrintXML(HTML::p(fmt("Revert")," ",WikiLink($pagename)),
1067 HTML::p(_("missing required version argument")));
1070 $dbi =& $request->_dbi;
1071 $page = $dbi->getPage($pagename);
1072 $current = $page->getCurrentRevision();
1073 $currversion = $current->getVersion();
1074 if ($currversion == 0) {
1075 $mesg->pushContent(' ', _("no page content"));
1076 PrintXML(HTML::p(fmt("Revert")," ",WikiLink($pagename)),
1081 if ($currversion == $version) {
1082 $mesg->pushContent(' ', _("same version page"));
1083 PrintXML(HTML::p(fmt("Revert")," ",WikiLink($pagename)),
1088 if ($request->getArg('cancel')) {
1089 $mesg->pushContent(' ', _("Cancelled"));
1090 PrintXML(HTML::p(fmt("Revert")," ",WikiLink($pagename)),
1095 if (!$request->getArg('verify')) {
1096 $mesg->pushContent(HTML::p(fmt("Are you sure to revert %s to version $version?", WikiLink($pagename))),
1097 HTML::form(array('action' => $request->getPostURL(),
1098 'method' => 'post'),
1099 HiddenInputs($request->getArgs(), false, array('verify')),
1100 HiddenInputs(array('verify' => 1)),
1101 Button('submit:verify', _("Yes"), 'button'),
1102 HTML::Raw(' '),
1103 Button('submit:cancel', _("Cancel"), 'button'))
1105 $rev = $page->getRevision($version);
1106 $html = HTML(HTML::fieldset($mesg), HTML::hr(), $rev->getTransformedContent());
1107 $template = Template('browse',
1108 array('CONTENT' => $html));
1109 GeneratePage($template, $pagename, $rev);
1110 $request->checkValidators();
1114 $rev = $page->getRevision($version);
1115 $content = $rev->getPackedContent();
1116 $versiondata = $rev->_data;
1117 $versiondata['summary'] = sprintf(_("revert to version %d"), $version);
1118 $new = $page->save($content, $currversion + 1, $versiondata);
1121 $mesg = HTML::span();
1122 $pagelink = WikiLink($pagename);
1123 $mesg->pushContent(fmt("Revert: %s", $pagelink),
1124 fmt("- version %d saved to database as version %d",
1125 $version, $new->getVersion()));
1126 // Force browse of current page version.
1127 $request->setArg('version', false);
1128 $template = Template('savepage', array());
1129 $template->replace('CONTENT', $new->getTransformedContent());
1131 GeneratePage($template, $mesg, $new);
1135 function _tryinsertInterWikiMap($content) {
1137 if (strpos($content, "<verbatim>")) {
1138 //$error_html = " The newly loaded pgsrc already contains a verbatim block.";
1141 if (!$goback && !defined('INTERWIKI_MAP_FILE')) {
1142 $error_html = sprintf(" "._("%s: not defined"), "INTERWIKI_MAP_FILE");
1145 $mapfile = FindFile(INTERWIKI_MAP_FILE,1);
1146 if (!$goback && !file_exists($mapfile)) {
1147 $error_html = sprintf(" "._("%s: file not found"), INTERWIKI_MAP_FILE);
1151 if (!empty($error_html))
1152 trigger_error(_("Default InterWiki map file not loaded.")
1153 . $error_html, E_USER_NOTICE);
1157 // if loading from virgin setup do echo, otherwise trigger_error E_USER_NOTICE
1158 if (!isa($GLOBALS['request'], 'MockRequest'))
1159 echo sprintf(_("Loading InterWikiMap from external file %s."), $mapfile),"<br />";
1161 $fd = fopen ($mapfile, "rb");
1162 $data = fread ($fd, filesize($mapfile));
1164 $content = $content . "\n<verbatim>\n$data</verbatim>\n";
1168 function ParseSerializedPage($text, $default_pagename, $user)
1170 if (!preg_match('/^a:\d+:{[si]:\d+/', $text))
1173 $pagehash = unserialize($text);
1175 // Split up pagehash into four parts:
1178 // page-level meta-data
1179 // revision-level meta-data
1181 if (!defined('FLAG_PAGE_LOCKED'))
1182 define('FLAG_PAGE_LOCKED', 1);
1183 if (!defined('FLAG_PAGE_EXTERNAL'))
1184 define('FLAG_PAGE_EXTERNAL', 1);
1185 $pageinfo = array('pagedata' => array(),
1186 'versiondata' => array());
1188 $pagedata = &$pageinfo['pagedata'];
1189 $versiondata = &$pageinfo['versiondata'];
1191 // Fill in defaults.
1192 if (empty($pagehash['pagename']))
1193 $pagehash['pagename'] = $default_pagename;
1194 if (empty($pagehash['author'])) {
1195 $pagehash['author'] = $user->getId();
1198 foreach ($pagehash as $key => $value) {
1203 $pageinfo[$key] = $value;
1206 $pageinfo[$key] = join("\n", $value);
1209 if (($value & FLAG_PAGE_LOCKED) != 0)
1210 $pagedata['locked'] = 'yes';
1211 if (($value & FLAG_PAGE_EXTERNAL) != 0)
1212 $pagedata['external'] = 'yes';
1216 $pagedata[$key] = $value;
1220 $pagedata['perm'] = ParseMimeifiedPerm($value);
1222 case 'lastmodified':
1223 $versiondata['mtime'] = $value;
1228 $versiondata[$key] = $value;
1232 if (empty($pagehash['charset']))
1233 $pagehash['charset'] = 'utf-8';
1234 // compare to target charset
1235 if (strtolower($pagehash['charset']) != strtolower($GLOBALS['charset'])) {
1236 $pageinfo['content'] = charset_convert($params['charset'], $GLOBALS['charset'], $pageinfo['content']);
1237 $pageinfo['pagename'] = charset_convert($params['charset'], $GLOBALS['charset'], $pageinfo['pagename']);
1242 function SortByPageVersion ($a, $b) {
1243 return $a['version'] - $b['version'];
1247 * Security alert! We should not allow to import config.ini into our wiki (or from a sister wiki?)
1248 * because the sql passwords are in plaintext there. And the webserver must be able to read it.
1249 * Detected by Santtu Jarvi.
1251 function LoadFile (&$request, $filename, $text = false, $mtime = false)
1253 if (preg_match("/config$/", dirname($filename)) // our or other config
1254 and preg_match("/config.*\.ini/", basename($filename))) // backups and other versions also
1256 trigger_error(sprintf("Refused to load %s", $filename), E_USER_WARNING);
1259 if (!is_string($text)) {
1261 $stat = stat($filename);
1263 $text = implode("", file($filename));
1266 if (! $request->getArg('start_debug')) @set_time_limit(30); // Reset watchdog
1267 else @set_time_limit(240);
1269 // FIXME: basename("filewithnoslashes") seems to return garbage sometimes.
1270 $basename = basename("/dummy/" . $filename);
1273 $mtime = time(); // Last resort.
1275 // DONE: check source - target charset for content and pagename
1276 // but only for pgsrc'ed content, not from the browser.
1278 $default_pagename = rawurldecode($basename);
1279 if ( ($parts = ParseMimeifiedPages($text)) ) {
1280 if (count($parts) > 1)
1281 $overwrite = $request->getArg('overwrite');
1282 usort($parts, 'SortByPageVersion');
1283 foreach ($parts as $pageinfo) {
1285 if (count($parts) > 1)
1286 $request->setArg('overwrite', 1);
1287 SavePage($request, $pageinfo, sprintf(_("MIME file %s"),
1288 $filename), $basename);
1290 if (count($parts) > 1)
1292 $request->setArg('overwrite', $overwrite);
1294 unset($request->_args['overwrite']);
1296 else if ( ($pageinfo = ParseSerializedPage($text, $default_pagename,
1297 $request->getUser())) ) {
1298 SavePage($request, $pageinfo, sprintf(_("Serialized file %s"),
1299 $filename), $basename);
1303 $user = $request->getUser();
1305 $file_charset = 'utf-8';
1306 // compare to target charset
1307 if ($file_charset != strtolower($GLOBALS['charset'])) {
1308 $text = charset_convert($file_charset, $GLOBALS['charset'], $text);
1309 $default_pagename = charset_convert($file_charset, $GLOBALS['charset'], $default_pagename);
1312 // Assume plain text file.
1313 $pageinfo = array('pagename' => $default_pagename,
1314 'pagedata' => array(),
1316 => array('author' => $user->getId()),
1317 'content' => preg_replace('/[ \t\r]*\n/', "\n",
1320 SavePage($request, $pageinfo, sprintf(_("plain file %s"), $filename),
1325 function LoadZip (&$request, $zipfile, $files = false, $exclude = false) {
1326 $zip = new ZipReader($zipfile);
1327 $timeout = (! $request->getArg('start_debug')) ? 20 : 120;
1328 while (list ($fn, $data, $attrib) = $zip->readFile()) {
1329 // FIXME: basename("filewithnoslashes") seems to return
1330 // garbage sometimes.
1331 $fn = basename("/dummy/" . $fn);
1332 if ( ($files && !in_array($fn, $files))
1333 || ($exclude && in_array($fn, $exclude)) ) {
1334 PrintXML(HTML::p(WikiLink($fn)),
1335 HTML::p(_("Skipping")));
1339 longer_timeout($timeout); // longer timeout per page
1340 LoadFile($request, $fn, $data, $attrib['mtime']);
1344 function LoadDir (&$request, $dirname, $files = false, $exclude = false) {
1345 $fileset = new LimitedFileSet($dirname, $files, $exclude);
1347 if (!$files and ($skiplist = $fileset->getSkippedFiles())) {
1348 PrintXML(HTML::p(HTML::strong(_("Skipping"))));
1350 foreach ($skiplist as $file)
1351 $list->pushContent(HTML::li(WikiLink($file)));
1352 PrintXML(HTML::p($list));
1355 // Defer HomePage loading until the end. If anything goes wrong
1356 // the pages can still be loaded again.
1357 $files = $fileset->getFiles();
1358 if (in_array(HOME_PAGE, $files)) {
1359 $files = array_diff($files, array(HOME_PAGE));
1360 $files[] = HOME_PAGE;
1362 $timeout = (! $request->getArg('start_debug')) ? 20 : 120;
1363 foreach ($files as $file) {
1364 longer_timeout($timeout); // longer timeout per page
1365 if (substr($file,-1,1) != '~') // refuse to load backup files
1366 LoadFile($request, "$dirname/$file");
1370 class LimitedFileSet extends FileSet {
1371 function LimitedFileSet($dirname, $_include, $exclude) {
1372 $this->_includefiles = $_include;
1373 $this->_exclude = $exclude;
1374 $this->_skiplist = array();
1375 parent::FileSet($dirname);
1378 function _filenameSelector($fn) {
1379 $incl = &$this->_includefiles;
1380 $excl = &$this->_exclude;
1382 if ( ($incl && !in_array($fn, $incl))
1383 || ($excl && in_array($fn, $excl)) ) {
1384 $this->_skiplist[] = $fn;
1391 function getSkippedFiles () {
1392 return $this->_skiplist;
1397 function IsZipFile ($filename_or_fd)
1399 // See if it looks like zip file
1400 if (is_string($filename_or_fd))
1402 $fd = fopen($filename_or_fd, "rb");
1403 $magic = fread($fd, 4);
1408 $fpos = ftell($filename_or_fd);
1409 $magic = fread($filename_or_fd, 4);
1410 fseek($filename_or_fd, $fpos);
1413 return $magic == ZIP_LOCHEAD_MAGIC || $magic == ZIP_CENTHEAD_MAGIC;
1417 function LoadAny (&$request, $file_or_dir, $files = false, $exclude = false)
1419 // Try urlencoded filename for accented characters.
1420 if (!file_exists($file_or_dir)) {
1421 // Make sure there are slashes first to avoid confusing phps
1422 // with broken dirname or basename functions.
1423 // FIXME: windows uses \ and :
1424 if (is_integer(strpos($file_or_dir, "/"))) {
1425 $newfile = FindFile($file_or_dir, true);
1426 // Panic. urlencoded by the browser (e.g. San%20Diego => San Diego)
1428 $file_or_dir = dirname($file_or_dir) . "/"
1429 . rawurlencode(basename($file_or_dir));
1431 // This is probably just a file.
1432 $file_or_dir = rawurlencode($file_or_dir);
1436 $type = filetype($file_or_dir);
1437 if ($type == 'link') {
1438 // For symbolic links, use stat() to determine
1439 // the type of the underlying file.
1440 list(,,$mode) = stat($file_or_dir);
1441 $type = ($mode >> 12) & 017;
1444 elseif ($type == 004)
1449 $request->finish(fmt("Empty or not existing source. Unable to load: %s", $file_or_dir));
1451 else if ($type == 'dir') {
1452 LoadDir($request, $file_or_dir, $files, $exclude);
1454 else if ($type != 'file' && !preg_match('/^(http|ftp):/', $file_or_dir))
1456 $request->finish(fmt("Bad file type: %s", $type));
1458 else if (IsZipFile($file_or_dir)) {
1459 LoadZip($request, $file_or_dir, $files, $exclude);
1461 else /* if (!$files || in_array(basename($file_or_dir), $files)) */
1463 LoadFile($request, $file_or_dir);
1467 function LoadFileOrDir (&$request)
1469 $source = $request->getArg('source');
1470 $finder = new FileFinder;
1471 $source = $finder->slashifyPath($source);
1472 $page = rawurldecode(basename($source));
1473 StartLoadDump($request, fmt("Loading '%s'",
1474 HTML(dirname($source),
1475 dirname($source) ? "/" : "",
1476 WikiLink($page,'auto'))));
1477 LoadAny($request, $source);
1478 EndLoadDump($request);
1482 * HomePage was not found so first-time install is supposed to run.
1483 * - import all pgsrc pages.
1484 * - Todo: installer interface to edit config/config.ini settings
1485 * - Todo: ask for existing old index.php to convert to config/config.ini
1486 * - Todo: theme-specific pages:
1487 * blog - HomePage, ADMIN_USER/Blogs
1489 function SetupWiki (&$request)
1491 global $GenericPages, $LANG;
1493 //FIXME: This is a hack (err, "interim solution")
1494 // This is a bogo-bogo-login: Login without
1495 // saving login information in session state.
1496 // This avoids logging in the unsuspecting
1497 // visitor as ADMIN_USER
1499 // This really needs to be cleaned up...
1500 // (I'm working on it.)
1501 $real_user = $request->_user;
1502 if (ENABLE_USER_NEW)
1503 $request->_user = new _BogoUser(ADMIN_USER);
1506 $request->_user = new WikiUser($request, ADMIN_USER, WIKIAUTH_BOGO);
1508 StartLoadDump($request, _("Loading up virgin wiki"));
1510 $pgsrc = FindLocalizedFile(WIKI_PGSRC);
1511 $default_pgsrc = FindFile(DEFAULT_WIKI_PGSRC);
1513 $request->setArg('overwrite', true);
1514 if ($default_pgsrc != $pgsrc) {
1515 LoadAny($request, $default_pgsrc, $GenericPages);
1517 $request->setArg('overwrite', false);
1518 LoadAny($request, $pgsrc);
1519 $dbi =& $request->_dbi;
1521 // Ensure that all mandatory pages are loaded
1522 $finder = new FileFinder;
1525 $mandatory = explode(':','SandBox:Template/Category:TemplateTalk:SpecialPages:CategoryCategory:CategoryActionPage:Help/OldTextFormattingRules:Help/TextFormattingRules:PhpWikiAdministration');
1526 } else if (WIKI_NAME == "help") {
1527 $mandatory = explode(':','SandBox:Template/Category:TemplateTalk:SpecialPages:CategoryCategory:CategoryActionPage:Help/TextFormattingRules:PhpWikiAdministration');
1529 $mandatory = explode(':','SandBox:Template/Category:TemplateTalk:SpecialPages:CategoryCategory:CategoryActionPage:TextFormattingRules:PhpWikiAdministration');
1531 foreach (array_merge($mandatory,
1532 $GLOBALS['AllActionPages'],
1533 array(constant('HOME_PAGE'))) as $f)
1535 $page = gettext($f);
1536 $epage = urlencode($page);
1537 if (! $dbi->isWikiPage($page) ) {
1538 // translated version provided?
1539 if ($lf = FindLocalizedFile($pgsrc . $finder->_pathsep . $epage, 1)) {
1540 LoadAny($request, $lf);
1541 } else { // load english version of required action page
1542 LoadAny($request, FindFile(DEFAULT_WIKI_PGSRC . $finder->_pathsep . urlencode($f)));
1546 if (! $dbi->isWikiPage($page)) {
1547 trigger_error(sprintf("Mandatory file %s couldn't be loaded!", $page),
1552 $pagename = _("InterWikiMap");
1553 $map = $dbi->getPage($pagename);
1554 $map->set('locked', true);
1555 PrintXML(HTML::p(HTML::em(WikiLink($pagename)), HTML::strong(" locked")));
1556 EndLoadDump($request);
1559 function LoadPostFile (&$request)
1561 $upload = $request->getUploadedFile('file');
1564 $request->finish(_("No uploaded file to upload?")); // FIXME: more concise message
1566 // Dump http headers.
1567 StartLoadDump($request, sprintf(_("Uploading %s"), $upload->getName()));
1569 $fd = $upload->open();
1571 LoadZip($request, $fd, false, array(_("RecentChanges")));
1573 LoadFile($request, $upload->getName(), $upload->getContents());
1575 EndLoadDump($request);
1582 // c-basic-offset: 4
1583 // c-hanging-comment-ender-p: nil
1584 // indent-tabs-mode: nil