2 // display.php: fetch page or get default content
3 rcs_id('$Id: display.php,v 1.68 2007-01-20 11:25:19 rurban Exp $');
5 require_once('lib/Template.php');
8 * Extract keywords from Category* links on page.
10 function GleanKeywords ($page) {
11 if (!defined('KEYWORDS')) return '';
12 include_once("lib/TextSearchQuery.php");
13 $search = new TextSearchQuery(KEYWORDS, true);
14 $KeywordLinkRegexp = $search->asRegexp();
15 // iterate over the pagelinks (could be a large number) [15ms on PluginManager]
16 // or do a titleSearch and check the categories if they are linked?
17 $links = $page->getPageLinks();
18 $keywords[] = SplitPagename($page->getName());
19 while ($link = $links->next()) {
20 if (preg_match($KeywordLinkRegexp, $link->getName(), $m))
21 $keywords[] = SplitPagename($m[0]);
23 $keywords[] = WIKI_NAME;
24 return join(', ', $keywords);
27 /** Make a link back to redirecting page.
29 * @param $pagename string Name of redirecting page.
30 * @return XmlContent Link to the redirecting page.
32 function RedirectorLink($pagename) {
33 $url = WikiURL($pagename, array('redirectfrom' => ''));
34 return HTML::a(array('class' => 'redirectfrom wiki',
40 function actionPage(&$request, $action) {
43 $pagename = $request->getArg('pagename');
44 $version = $request->getArg('version');
46 $page = $request->getPage();
47 $revision = $page->getCurrentRevision();
49 $dbi = $request->getDbh();
50 $actionpage = $dbi->getPage($action);
51 $actionrev = $actionpage->getCurrentRevision();
53 $pagetitle = HTML(fmt("%s: %s",
54 $actionpage->getName(),
55 $WikiTheme->linkExistingWikiWord($pagename, false, $version)));
57 $validators = new HTTP_ValidatorSet(array('pageversion' => $revision->getVersion(),
58 '%mtime' => $revision->get('mtime')));
60 $request->appendValidators(array('pagerev' => $revision->getVersion(),
61 '%mtime' => $revision->get('mtime')));
62 $request->appendValidators(array('actionpagerev' => $actionrev->getVersion(),
63 '%mtime' => $actionrev->get('mtime')));
65 $transformedContent = $actionrev->getTransformedContent();
67 /* Optionally tell google (and others) not to take notice of action pages.
68 RecentChanges or AllPages might be an exception.
71 if (GOOGLE_LINKS_NOFOLLOW)
72 $args = array('ROBOTS_META' => "noindex,nofollow");
74 /* Handle other formats: So far we had html only.
75 xml is requested by loaddump, rss is handled by recentchanges,
76 pdf is a special action, but should be a format to dump multiple pages
77 if the actionpage plugin returns a pagelist.
78 rdf and owl are handled by SemanticWeb.
80 $format = $request->getArg('format');
81 /* At first the single page formats: html, xml */
82 if (!$format or $format == 'html') {
83 $template = Template('browse', array('CONTENT' => $transformedContent));
84 GeneratePage($template, $pagetitle, $revision, $args);
85 } elseif ($format == 'xml') {
86 $template = Template('browse', array('CONTENT' => $transformedContent));
87 GeneratePageAsXML($template, $pagetitle, $revision, $args);
90 // Then the multi-page formats
91 // rss (if not already handled by RecentChanges)
92 // Need the pagelist from the first plugin
93 foreach($transformedContent->_content as $cached_element) {
94 if (is_a($cached_element, "Cached_PluginInvocation")) {
95 include_once('lib/WikiPlugin.php');
96 $loader = new WikiPluginLoader;
98 // return the first found pagelist
99 $pagelist = $loader->expandPI($cached_element->_pi, $request, $markup, $pagename);
100 if (is_a($pagelist, 'PageList'))
104 if (!$pagelist or !is_a($pagelist, 'PageList')) {
105 if (!in_array($format, array("atom","rss","rdf")))
106 trigger_error(sprintf("Format %s requires an actionpage returning an pagelist.", $format)
107 . ("Fall back to single page mode"), E_USER_WARNING);
108 $pagelist = new PageList();
109 $pagelist->addPage($page);
111 if ($format == 'pdf') {
112 include_once("lib/pdf.php");
113 ConvertAndDisplayPdfPageList($request, $pagelist);
114 } elseif ($format == 'rss') {
115 include_once("lib/plugin/RecentChanges.php");
116 $rdf = new RssWriter($request, $pagelist);
118 } elseif ($format == 'rss91') {
119 include_once("lib/plugin/RecentChanges.php");
120 $rdf = new RSS91Writer($request, $pagelist);
122 } elseif ($format == 'rss2') {
123 include_once("lib/RssWriter2.php");
124 $rdf = new RssWriter2($request, $pagelist);
126 } elseif ($format == 'atom') {
127 include_once("lib/plugin/RssWriter.php");
128 $rdf = new AtomWriter($request, $pagelist);
130 } elseif ($format == 'rdf') { // all semantic relations and attributes
131 include_once("lib/SemanticWeb.php");
132 $rdf = new RdfWriter($request, $pagelist);
134 } elseif ($format == 'owl') {
135 include_once("lib/SemanticWeb.php");
136 $rdf = new OwlWriter($request, $pagelist);
138 } elseif ($format == 'kbmodel') {
139 include_once("lib/SemanticWeb.php");
140 $model = new ModelWriter($request, $pagelist);
143 trigger_error(sprintf("Unhandled format %s. Reverting to html", $format), E_USER_WARNING);
144 $template = Template('browse', array('CONTENT' => $transformedContent));
145 GeneratePage($template, $pagetitle, $revision, $args);
148 $request->checkValidators();
152 function displayPage(&$request, $template=false) {
154 $pagename = $request->getArg('pagename');
155 $version = $request->getArg('version');
156 $page = $request->getPage();
158 $revision = $page->getRevision($version);
160 NoSuchRevision($request, $page, $version);
161 /* Tell Google (and others) to ignore old versions of pages */
162 $toks['ROBOTS_META'] = "noindex,nofollow";
165 $revision = $page->getCurrentRevision();
168 if (isSubPage($pagename)) {
169 $pages = explode(SUBPAGE_SEPARATOR, $pagename);
170 $last_page = array_pop($pages); // deletes last element from array as side-effect
171 $pageheader = HTML::span(HTML::a(array('href' => WikiURL($pages[0]),
172 'class' => 'pagetitle'
174 $WikiTheme->maybeSplitWikiWord($pages[0] . SUBPAGE_SEPARATOR)));
175 $first_pages = $pages[0] . SUBPAGE_SEPARATOR;
177 foreach ($pages as $p) {
178 $pageheader->pushContent(HTML::a(array('href' => WikiURL($first_pages . $p),
179 'class' => 'backlinks'),
180 $WikiTheme->maybeSplitWikiWord($p . SUBPAGE_SEPARATOR)));
181 $first_pages .= $p . SUBPAGE_SEPARATOR;
183 $backlink = HTML::a(array('href' => WikiURL($pagename,
184 array('action' => _("BackLinks"))),
185 'class' => 'backlinks'),
186 $WikiTheme->maybeSplitWikiWord($last_page));
187 $backlink->addTooltip(sprintf(_("BackLinks for %s"), $pagename));
188 $pageheader->pushContent($backlink);
190 $pageheader = HTML::a(array('href' => WikiURL($pagename,
191 array('action' => _("BackLinks"))),
192 'class' => 'backlinks'),
193 $WikiTheme->maybeSplitWikiWord($pagename));
194 $pageheader->addTooltip(sprintf(_("BackLinks for %s"), $pagename));
195 if ($request->getArg('frame'))
196 $pageheader->setAttr('target', '_top');
199 $pagetitle = SplitPagename($pagename);
200 if (($redirect_from = $request->getArg('redirectfrom'))) {
201 $redirect_message = HTML::span(array('class' => 'redirectfrom'),
202 fmt("(Redirected from %s)",
203 RedirectorLink($redirect_from)));
204 // abuse the $redirected template var for some status update notice
205 } elseif ($request->getArg('errormsg')) {
206 $redirect_message = $request->getArg('errormsg');
207 $request->setArg('errormsg', false);
210 $request->appendValidators(array('pagerev' => $revision->getVersion(),
211 '%mtime' => $revision->get('mtime')));
213 // FIXME: This is also in the template...
214 if ($request->getArg('action') != 'pdf' and !headers_sent()) {
215 // FIXME: enable MathML/SVG/... support
218 and strstr($request->get('HTTP_ACCEPT'),'application/xhtml+xml')))
219 header("Content-Type: application/xhtml+xml; charset=" . $GLOBALS['charset']);
221 header("Content-Type: text/html; charset=" . $GLOBALS['charset']);
224 $page_content = $revision->getTransformedContent();
226 // If external searchengine (google) referrer, highlight the searchterm
227 // FIXME: move that to the transformer?
228 // OR: add the SearchHighlight plugin line to the content?
229 if ($result = isExternalReferrer($request)) {
230 if (!empty($result['query'])) {
231 if (USE_SEARCHHIGHLIGHT) {
232 /* Simply add the SearchHighlight plugin to the top of the page.
233 This just parses the wikitext, and doesn't highlight the markup.
234 At the top are some ugly references to the hits.
236 include_once('lib/WikiPlugin.php');
237 $loader = new WikiPluginLoader;
238 $xml = $loader->expandPI('<'.'?plugin SearchHighlight s="'.$result['query'].'"?'.'>', $request, $markup);
239 if ($xml and is_array($xml)) {
240 foreach (array_reverse($xml) as $line) {
241 array_unshift($page_content->_content, $line);
243 array_unshift($page_content->_content,
244 HTML::div(_("You searched for: "), HTML::strong($result['query'])));
248 /* Parse the transformed (mixed HTML links + strings) lines
249 This looks like overkill, and should really be done in the expansion.
250 Maybe by some expansion hook, which would make expansion even slower.
252 require_once("lib/TextSearchQuery.php");
253 $query = new TextSearchQuery($result['query']);
254 $hilight_re = $query->getHighlightRegexp();
255 //$matches = preg_grep("/$hilight_re/i", $revision->getContent());
257 for ($i=0; $i < count($page_content->_content); $i++) {
259 $line = $page_content->_content[$i];
260 if (is_string($line)) {
261 $visline = strip_tags($line);
262 while (preg_match("/^(.*?)($hilight_re)/i", $visline, $m)) {
263 $visline = substr($visline, strlen($m[0]));
265 preg_match("/^(.*?)($hilight_re)/i", $line, $m);
266 $line = substr($line, strlen($m[0]));
267 $html[] = HTML::Raw($m[1]); // prematch
268 $html[] = HTML::strong(array('class' => 'search-term'), $m[2]); // match
272 $html[] = HTML::Raw($line); // postmatch
273 $page_content->_content[$i] = HTML::span(array('class' => 'search-context'),
283 /* Check for special pagenames, which are no actionpages. */
285 if ( $pagename == _("RecentVisitors")) {
286 $toks['ROBOTS_META']="noindex,follow";
289 if ($pagename == _("SandBox")) {
290 $toks['ROBOTS_META']="noindex,nofollow";
291 } else if (!isset($toks['ROBOTS_META'])) {
292 $toks['ROBOTS_META'] = "index,follow";
295 $toks['CONTENT'] = new Template('browse', $request, $page_content);
297 $toks['TITLE'] = $pagetitle; // <title> tag
298 $toks['HEADER'] = $pageheader; // h1 with backlink
299 $toks['revision'] = $revision;
300 if (!empty($redirect_message))
301 $toks['redirected'] = $redirect_message;
302 $toks['PAGE_DESCRIPTION'] = $page_content->getDescription();
303 $toks['PAGE_KEYWORDS'] = GleanKeywords($page);
305 $template = new Template('html', $request);
307 // Handle other formats: So far we had html only.
308 // xml is requested by loaddump, rss is handled by RecentChanges,
309 // pdf is a special action, but should be a format to dump multiple pages
310 // if the actionpage plugin returns a pagelist.
311 // rdf, owl, kbmodel, daml, ... are handled by SemanticWeb.
312 $format = $request->getArg('format');
313 /* Only single page versions. rss only if not already handled by RecentChanges.
315 if (!$format or $format == 'html') {
316 $template->printExpansion($toks);
317 } elseif ($format == 'xml') {
318 $template = new Template('htmldump', $request);
319 $template->printExpansion($toks);
321 // No pagelist. Single page version only
322 include_once("lib/PageList.php");
323 $pagelist = new PageList();
324 $pagelist->addPage($page);
325 if ($format == 'pdf') {
326 include_once("lib/pdf.php");
327 ConvertAndDisplayPdfPageList($request, $pagelist);
328 } elseif ($format == 'rss') {
329 include_once("lib/plugin/RecentChanges.php");
330 $rdf = new RssWriter($request, $pagelist);
332 } elseif ($format == 'rss91') {
333 include_once("lib/plugin/RecentChanges.php");
334 $rdf = new RSS91Writer($request, $pagelist);
336 } elseif ($format == 'rss2') {
337 include_once("lib/RssWriter2.php");
338 $rdf = new RssWriter2($request, $pagelist);
340 } elseif ($format == 'atom') {
341 include_once("lib/plugin/RssWriter.php");
342 $rdf = new AtomWriter($request, $pagelist);
344 } elseif ($format == 'rdf') { // all semantic relations and attributes
345 include_once("lib/SemanticWeb.php");
346 $rdf = new RdfWriter($request, $pagelist);
348 } elseif ($format == 'owl') {
349 include_once("lib/SemanticWeb.php");
350 $rdf = new OwlWriter($request, $pagelist);
352 } elseif ($format == 'kbmodel') {
353 include_once("lib/SemanticWeb.php");
354 $model = new ModelWriter($request, $pagelist);
357 trigger_error(sprintf("Unhandled format %s. Reverting to html", $format), E_USER_WARNING);
358 $template->printExpansion($toks);
362 $page->increaseHitCount();
364 if ($request->getArg('action') != 'pdf')
365 $request->checkValidators();
369 // $Log: not supported by cvs2svn $
370 // Revision 1.67 2007/01/07 18:44:20 rurban
371 // Support format handlers for single- and multi-page: pagelists from actionpage plugins. Use USE_SEARCHHIGHLIGHT. Fix InlineHighlight (still experimental).
373 // Revision 1.66 2006/03/19 14:26:29 rurban
374 // sf.net patch by Matt Brown: Add rel=nofollow to more actions
376 // Revision 1.65 2005/05/05 08:54:40 rurban
377 // fix pagename split for title and header
379 // Revision 1.64 2005/04/23 11:21:55 rurban
380 // honor theme-specific SplitWikiWord in the HEADER
382 // Revision 1.63 2004/11/30 17:48:38 rurban
385 // Revision 1.62 2004/11/30 09:51:35 rurban
386 // changed KEYWORDS from pageprefix to search term. added installer detection.
388 // Revision 1.61 2004/11/21 11:59:19 rurban
389 // remove final \n to be ob_cache independent
391 // Revision 1.60 2004/11/19 19:22:03 rurban
392 // ModeratePage part1: change status
394 // Revision 1.59 2004/11/17 20:03:58 rurban
395 // Typo: call SearchHighlight not SearchHighLight
397 // Revision 1.58 2004/11/09 17:11:16 rurban
398 // * revert to the wikidb ref passing. there's no memory abuse there.
399 // * use new wikidb->_cache->_id_cache[] instead of wikidb->_iwpcache, to effectively
400 // store page ids with getPageLinks (GleanDescription) of all existing pages, which
401 // are also needed at the rendering for linkExistingWikiWord().
402 // pass options to pageiterator.
403 // use this cache also for _get_pageid()
404 // This saves about 8 SELECT count per page (num all pagelinks).
405 // * fix passing of all page fields to the pageiterator.
406 // * fix overlarge session data which got broken with the latest ACCESS_LOG_SQL changes
408 // Revision 1.57 2004/11/01 10:43:57 rurban
409 // seperate PassUser methods into seperate dir (memory usage)
410 // fix WikiUser (old) overlarge data session
411 // remove wikidb arg from various page class methods, use global ->_dbi instead
414 // Revision 1.56 2004/10/14 13:44:14 rurban
415 // fix lib/display.php:159: Warning[2]: Argument to array_reverse() should be an array
417 // Revision 1.55 2004/09/26 14:58:35 rurban
418 // naive SearchHighLight implementation
420 // Revision 1.54 2004/09/17 14:19:41 rurban
421 // disable Content-Type header for now, until it is fixed
423 // Revision 1.53 2004/06/25 14:29:20 rurban
424 // WikiGroup refactoring:
425 // global group attached to user, code for not_current user.
426 // improved helpers for special groups (avoid double invocations)
427 // new experimental config option ENABLE_XHTML_XML (fails with IE, and document.write())
428 // fixed a XHTML validation error on userprefs.tmpl
430 // Revision 1.52 2004/06/14 11:31:37 rurban
431 // renamed global $Theme to $WikiTheme (gforge nameclash)
432 // inherit PageList default options from PageList
433 // default sortby=pagename
434 // use options in PageList_Selectable (limit, sortby, ...)
435 // added action revert, with button at action=diff
436 // added option regex to WikiAdminSearchReplace
438 // Revision 1.51 2004/05/18 16:23:39 rurban
439 // rename split_pagename to SplitPagename
441 // Revision 1.50 2004/05/04 22:34:25 rurban
444 // Revision 1.49 2004/04/18 01:11:52 rurban
445 // more numeric pagename fixes.
446 // fixed action=upload with merge conflict warnings.
447 // charset changed from constant to global (dynamic utf-8 switching)
455 // c-hanging-comment-ender-p: nil
456 // indent-tabs-mode: nil