2 // display.php: fetch page or get default content
3 rcs_id('$Id: display.php,v 1.62 2004-11-30 09:51:35 rurban Exp $');
5 require_once('lib/Template.php');
8 * Extract keywords from Category* links on page.
10 function GleanKeywords ($page) {
11 if (!defined('KEYWORDS')) return '';
12 include_once("lib/TextSearchQuery.php");
13 $search = new TextSearchQuery(KEYWORDS, true);
14 $KeywordLinkRegexp = $search->asRegexp();
15 $links = $page->getPageLinks();
16 $keywords[] = SplitPagename($page->getName());
17 while ($link = $links->next()) {
18 if (preg_match($KeywordLinkRegexp, $link->getName(), $m))
19 $keywords[] = SplitPagename($m[0]);
21 $keywords[] = WIKI_NAME;
22 return join(', ', $keywords);
25 /** Make a link back to redirecting page.
27 * @param $pagename string Name of redirecting page.
28 * @return XmlContent Link to the redirecting page.
30 function RedirectorLink($pagename) {
31 $url = WikiURL($pagename, array('redirectfrom' => ''));
32 return HTML::a(array('class' => 'redirectfrom wiki',
38 function actionPage(&$request, $action) {
41 $pagename = $request->getArg('pagename');
42 $version = $request->getArg('version');
44 $page = $request->getPage();
45 $revision = $page->getCurrentRevision();
47 $dbi = $request->getDbh();
48 $actionpage = $dbi->getPage($action);
49 $actionrev = $actionpage->getCurrentRevision();
51 $pagetitle = HTML(fmt("%s: %s",
52 $actionpage->getName(),
53 $WikiTheme->linkExistingWikiWord($pagename, false, $version)));
55 $validators = new HTTP_ValidatorSet(array('pageversion' => $revision->getVersion(),
56 '%mtime' => $revision->get('mtime')));
58 $request->appendValidators(array('pagerev' => $revision->getVersion(),
59 '%mtime' => $revision->get('mtime')));
60 $request->appendValidators(array('actionpagerev' => $actionrev->getVersion(),
61 '%mtime' => $actionrev->get('mtime')));
63 $transformedContent = $actionrev->getTransformedContent();
64 $template = Template('browse', array('CONTENT' => $transformedContent));
66 if (!headers_sent()) {
67 //FIXME: does not work yet. document.write not supported (signout button)
68 // http://www.w3.org/People/mimasa/test/xhtml/media-types/results
70 and (!isBrowserIE() and
71 strstr($request->get('HTTP_ACCEPT'),'application/xhtml+xml')))
72 header("Content-Type: application/xhtml+xml; charset=" . $GLOBALS['charset']);
74 header("Content-Type: text/html; charset=" . $GLOBALS['charset']);
77 GeneratePage($template, $pagetitle, $revision);
78 $request->checkValidators();
82 function displayPage(&$request, $template=false) {
83 $pagename = $request->getArg('pagename');
84 $version = $request->getArg('version');
85 $page = $request->getPage();
87 $revision = $page->getRevision($version);
89 NoSuchRevision($request, $page, $version);
92 $revision = $page->getCurrentRevision();
95 if (isSubPage($pagename)) {
96 $pages = explode(SUBPAGE_SEPARATOR, $pagename);
97 $last_page = array_pop($pages); // deletes last element from array as side-effect
98 $pagetitle = HTML::span(HTML::a(array('href' => WikiURL($pages[0]),
99 'class' => 'pagetitle'
101 SplitPagename($pages[0] . SUBPAGE_SEPARATOR)));
102 $first_pages = $pages[0] . SUBPAGE_SEPARATOR;
104 foreach ($pages as $p) {
105 $pagetitle->pushContent(HTML::a(array('href' => WikiURL($first_pages . $p),
106 'class' => 'backlinks'),
107 SplitPagename($p . SUBPAGE_SEPARATOR)));
108 $first_pages .= $p . SUBPAGE_SEPARATOR;
110 $backlink = HTML::a(array('href' => WikiURL($pagename,
111 array('action' => _("BackLinks"))),
112 'class' => 'backlinks'),
113 SplitPagename($last_page));
114 $backlink->addTooltip(sprintf(_("BackLinks for %s"), $pagename));
115 $pagetitle->pushContent($backlink);
117 $pagetitle = HTML::a(array('href' => WikiURL($pagename,
118 array('action' => _("BackLinks"))),
119 'class' => 'backlinks'),
120 SplitPagename($pagename));
121 $pagetitle->addTooltip(sprintf(_("BackLinks for %s"), $pagename));
122 if ($request->getArg('frame'))
123 $pagetitle->setAttr('target', '_top');
126 $pageheader = $pagetitle;
127 if (($redirect_from = $request->getArg('redirectfrom'))) {
128 $redirect_message = HTML::span(array('class' => 'redirectfrom'),
129 fmt("(Redirected from %s)",
130 RedirectorLink($redirect_from)));
131 // abuse the $redirected template var for some status update notice
132 } elseif ($request->getArg('errormsg')) {
133 $redirect_message = $request->getArg('errormsg');
134 $request->setArg('errormsg', false);
137 $request->appendValidators(array('pagerev' => $revision->getVersion(),
138 '%mtime' => $revision->get('mtime')));
140 // FIXME: This is also in the template...
141 if ($request->getArg('action') != 'pdf' and !headers_sent()) {
142 // FIXME: enable MathML/SVG/... support
145 and strstr($request->get('HTTP_ACCEPT'),'application/xhtml+xml')))
146 header("Content-Type: application/xhtml+xml; charset=" . $GLOBALS['charset']);
148 header("Content-Type: text/html; charset=" . $GLOBALS['charset']);
151 $page_content = $revision->getTransformedContent();
153 // if external searchengine (google) referrer, highlight the searchterm
154 // FIXME: move that to the transformer?
155 // OR: add the searchhightplugin line to the content?
156 if ($result = isExternalReferrer($request)) {
157 if (DEBUG and !empty($result['query'])) {
158 //$GLOBALS['SearchHighlightQuery'] = $result['query'];
159 /* simply add the SearchHighlight plugin to the top of the page.
160 This just parses the wikitext, and doesn't highlight the markup */
161 include_once('lib/WikiPlugin.php');
162 $loader = new WikiPluginLoader;
163 $xml = $loader->expandPI('<'.'?plugin SearchHighlight s="'.$result['query'].'"?'.'>', $request, $markup);
164 if ($xml and is_array($xml)) {
165 foreach (array_reverse($xml) as $line) {
166 array_unshift($page_content->_content, $line);
168 array_unshift($page_content->_content,
169 HTML::div(_("You searched for: "), HTML::strong($result['query'])));
173 /* Parse the transformed (mixed HTML links + strings) lines?
174 This looks like overkill.
176 require_once("lib/TextSearchQuery.php");
177 $query = new TextSearchQuery($result['query']);
178 $hilight_re = $query->getHighlightRegexp();
179 //$matches = preg_grep("/$hilight_re/i", $revision->getContent());
181 for ($i=0; $i < count($page_content->_content); $i++) {
183 $line = $page_content->_content[$i];
184 if (is_string($line)) {
185 while (preg_match("/^(.*?)($hilight_re)/i", $line, $m)) {
187 $line = substr($line, strlen($m[0]));
188 $html[] = $m[1]; // prematch
189 $html[] = HTML::strong(array('class' => 'search-term'), $m[2]); // match
193 $html[] = $line; // postmatch
194 $page_content->_content[$i] = HTML::span(array('class' => 'search-context'),
202 $toks['CONTENT'] = new Template('browse', $request, $page_content);
204 $toks['TITLE'] = $pagetitle;
205 $toks['HEADER'] = $pageheader;
206 $toks['revision'] = $revision;
207 if (!empty($redirect_message))
208 $toks['redirected'] = $redirect_message;
209 $toks['ROBOTS_META'] = 'index,follow';
210 $toks['PAGE_DESCRIPTION'] = $page_content->getDescription();
211 $toks['PAGE_KEYWORDS'] = GleanKeywords($page);
213 $template = new Template('html', $request);
215 $template->printExpansion($toks);
216 $page->increaseHitCount();
218 if ($request->getArg('action') != 'pdf')
219 $request->checkValidators();
223 // $Log: not supported by cvs2svn $
224 // Revision 1.61 2004/11/21 11:59:19 rurban
225 // remove final \n to be ob_cache independent
227 // Revision 1.60 2004/11/19 19:22:03 rurban
228 // ModeratePage part1: change status
230 // Revision 1.59 2004/11/17 20:03:58 rurban
231 // Typo: call SearchHighlight not SearchHighLight
233 // Revision 1.58 2004/11/09 17:11:16 rurban
234 // * revert to the wikidb ref passing. there's no memory abuse there.
235 // * use new wikidb->_cache->_id_cache[] instead of wikidb->_iwpcache, to effectively
236 // store page ids with getPageLinks (GleanDescription) of all existing pages, which
237 // are also needed at the rendering for linkExistingWikiWord().
238 // pass options to pageiterator.
239 // use this cache also for _get_pageid()
240 // This saves about 8 SELECT count per page (num all pagelinks).
241 // * fix passing of all page fields to the pageiterator.
242 // * fix overlarge session data which got broken with the latest ACCESS_LOG_SQL changes
244 // Revision 1.57 2004/11/01 10:43:57 rurban
245 // seperate PassUser methods into seperate dir (memory usage)
246 // fix WikiUser (old) overlarge data session
247 // remove wikidb arg from various page class methods, use global ->_dbi instead
250 // Revision 1.56 2004/10/14 13:44:14 rurban
251 // fix lib/display.php:159: Warning[2]: Argument to array_reverse() should be an array
253 // Revision 1.55 2004/09/26 14:58:35 rurban
254 // naive SearchHighLight implementation
256 // Revision 1.54 2004/09/17 14:19:41 rurban
257 // disable Content-Type header for now, until it is fixed
259 // Revision 1.53 2004/06/25 14:29:20 rurban
260 // WikiGroup refactoring:
261 // global group attached to user, code for not_current user.
262 // improved helpers for special groups (avoid double invocations)
263 // new experimental config option ENABLE_XHTML_XML (fails with IE, and document.write())
264 // fixed a XHTML validation error on userprefs.tmpl
266 // Revision 1.52 2004/06/14 11:31:37 rurban
267 // renamed global $Theme to $WikiTheme (gforge nameclash)
268 // inherit PageList default options from PageList
269 // default sortby=pagename
270 // use options in PageList_Selectable (limit, sortby, ...)
271 // added action revert, with button at action=diff
272 // added option regex to WikiAdminSearchReplace
274 // Revision 1.51 2004/05/18 16:23:39 rurban
275 // rename split_pagename to SplitPagename
277 // Revision 1.50 2004/05/04 22:34:25 rurban
280 // Revision 1.49 2004/04/18 01:11:52 rurban
281 // more numeric pagename fixes.
282 // fixed action=upload with merge conflict warnings.
283 // charset changed from constant to global (dynamic utf-8 switching)
291 // c-hanging-comment-ender-p: nil
292 // indent-tabs-mode: nil