1 <?php rcs_id('$Id: db_filesystem.php,v 1.4.2.10 2005-01-07 14:23:04 rurban Exp $');
7 RetrievePage($dbi, $pagename, $pagestore)
8 InsertPage($dbi, $pagename, $pagehash)
9 SaveCopyToArchive($dbi, $pagename, $pagehash)
10 IsWikiPage($dbi, $pagename)
11 InitTitleSearch($dbi, $search)
12 TitleSearchNextMatch($dbi, $res)
13 InitFullSearch($dbi, $search)
14 FullSearchNextMatch($dbi, $res)
15 MakeBackLinkSearchRegexp($pagename)
16 InitBackLinkSearch($dbi, $pagename)
17 BackLinkSearchNextMatch($dbi, &$pos)
18 IncreaseHitCount($dbi, $pagename)
19 GetHitCount($dbi, $pagename)
20 InitMostPopular($dbi, $limit)
21 MostPopularNextMatch($dbi, $res)
25 // open a database and return the handle
26 // loop until we get a handle; php has its own
27 // locking mechanism, thank god.
28 // Suppress ugly error message with @.
30 function OpenDataBase($dbname) {
40 function CloseDataBase($dbi) {
44 // Sort of urlencode() the pagename.
45 // We only encode a limited set of characters to minimize breakage
46 // of existing databases. The encoded names can be decoded with
48 function EncodePagename($pagename) {
51 else if ($pagename == '..')
54 $bad_chars = '%/\\:'; // '%' must be first!
55 for ($i = 0; $i < strlen($bad_chars); $i++) {
56 $pagename = str_replace($bad_chars[$i],
57 rawurlencode($bad_chars[$i]), $pagename);
63 // Return hash of page + attributes or default
64 function RetrievePage($dbi, $pagename, $pagestore) {
65 $filename = $dbi[$pagestore] . "/" . EncodePagename($pagename);
66 if ($fd = @fopen($filename, "rb")) {
67 $locked = flock($fd, 1); # Read lock
69 ExitWiki("Timeout while obtaining lock. Please try again");
71 if ($data = fread($fd, filesize($filename))) {
72 // unserialize $data into a hash
73 $pagehash = unserialize($data);
74 $pagehash['pagename'] = $pagename;
75 if (!is_array($pagehash))
76 ExitWiki(sprintf(gettext("'%s': corrupt file"),
77 htmlspecialchars($filename)));
89 // Either insert or replace a key/value (a page)
90 function Filesystem_WritePage($dbi, $pagename, $pagehash) {
91 $pagedata = serialize($pagehash);
93 if (!file_exists($dbi)) {
94 $d = split("/", $dbi);
96 while(list($key, $val) = each($d)) {
102 $filename = $dbi . "/" . EncodePagename($pagename);
103 if($fd = fopen($filename, 'a+b')) {
104 $locked = flock($fd,2); #Exclusive blocking lock
106 ExitWiki("Timeout while obtaining lock. Please try again");
110 #Second (actually used) filehandle
111 #$fdsafe = fopen($filename, 'wb');
112 #fwrite($fdsafe, $pagedata);
117 fwrite($fd, $pagedata);
120 ExitWiki("Error while writing page '$pagename'");
124 function InsertPage($dbi, $pagename, $pagehash) {
125 return Filesystem_WritePage($dbi['wiki'], $pagename, $pagehash);
128 // for archiving pages to a seperate file
129 function SaveCopyToArchive($dbi, $pagename, $pagehash) {
130 global $ArchivePageStore;
131 return Filesystem_WritePage($dbi[$ArchivePageStore], $pagename, $pagehash);
134 function IsWikiPage($dbi, $pagename) {
135 return file_exists($dbi['wiki'] . "/" . EncodePagename($pagename));
138 function IsInArchive($dbi, $pagename) {
139 return file_exists($dbi['archive'] . "/" . EncodePagename($pagename));
142 // setup for title-search
143 function InitTitleSearch($dbi, $search) {
144 $pos['search'] = '=' . preg_quote($search) . '=i';
145 $pos['data'] = GetAllWikiPageNames($dbi);
150 // iterating through database
151 function TitleSearchNextMatch($dbi, &$pos) {
152 while (list($key, $page) = each($pos['data'])) {
153 if (preg_match($pos['search'], $page)) {
160 // setup for full-text search
161 function InitFullSearch($dbi, $search) {
162 return InitTitleSearch($dbi, $search);
165 //iterating through database
166 function FullSearchNextMatch($dbi, &$pos) {
167 global $WikiPageStore;
168 while (list($key, $page) = each($pos['data'])) {
169 $pagedata = RetrievePage($dbi, $page, $WikiPageStore);
170 if (preg_match($pos['search'], serialize($pagedata))) {
177 ////////////////////////
178 // new database features
180 // Compute PCRE suitable for searching for links to the given page.
181 function MakeBackLinkSearchRegexp($pagename) {
182 global $WikiNameRegexp;
184 // Note that in (at least some) PHP 3.x's, preg_quote only takes
185 // (at most) one argument. Also it doesn't quote '/'s.
186 // It does quote '='s, so we'll use that for the delimeter.
187 $quoted_pagename = preg_quote($pagename);
188 if (preg_match("/^$WikiNameRegexp\$/", $pagename)) {
189 # FIXME: This may need modification for non-standard (non-english) $WikiNameRegexp.
190 return "=(?<![A-Za-z0-9!])$quoted_pagename(?![A-Za-z0-9])=";
193 // Note from author: Sorry. :-/
195 . '(?<!\[)\[(?!\[)' // Single, isolated '['
196 . '([^]|]*\|)?' // Optional stuff followed by '|'
197 . '\s*' // Optional space
198 . $quoted_pagename // Pagename
199 . '\s*\]=' ); // Optional space, followed by ']'
200 // FIXME: the above regexp is still not quite right.
201 // Consider the text: " [ [ test page ]". This is a link to a page
202 // named '[ test page'. The above regexp will recognize this
203 // as a link either to '[ test page' (good) or to 'test page' (wrong).
207 // setup for back-link search
208 function InitBackLinkSearch($dbi, $pagename) {
209 $pos['search'] = MakeBackLinkSearchRegexp($pagename);
210 $pos['data'] = GetAllWikiPageNames($dbi);
215 // iterating through back-links
216 function BackLinkSearchNextMatch($dbi, &$pos) {
217 global $WikiPageStore;
218 while (list($key, $page) = each($pos['data'])) {
219 $pagedata = RetrievePage($dbi, $page, $WikiPageStore);
220 if (!is_array($pagedata)) {
221 printf(gettext("%s: bad data<br>\n"), htmlspecialchars($page));
225 while (list($i, $line) = each($pagedata['content'])) {
226 if (preg_match($pos['search'], $line))
233 function IncreaseHitCount($dbi, $pagename) {
234 $file = 'hitcount.data'; $pagestore = 'hitcount';
235 $pagehash = RetrievePage($dbi, $file, $pagestore);
236 if (!is_array($pagehash)) {
239 unset($pagehash['pagename']);
240 if (empty($pagehash[$pagename]))
241 $pagehash[$pagename] = 1;
243 $pagehash[$pagename]++;
244 Filesystem_WritePage($dbi[$pagestore], $file, $pagehash);
247 function GetHitCount($dbi, $pagename) {
248 $file = 'hitcount.data'; $pagestore = 'hitcount';
249 $pagehash = RetrievePage($dbi, $file, $pagestore);
250 if (!is_array($pagehash)) {
251 printf(gettext("%s: bad data<br>\n"), htmlspecialchars($dir . "/" . $file));
254 unset($pagehash['pagename']);
255 if (empty($pagehash[$pagename]))
258 return $pagehash[$pagename];
261 function InitMostPopular($dbi, $limit) {
262 global $MostPopularHash;
263 $file = 'hitcount.data'; $pagestore = 'hitcount';
264 $MostPopularHash = RetrievePage($dbi, $file, $pagestore);
265 if (!is_array($MostPopularHash))
267 unset($MostPopularHash['pagename']);
268 uksort($MostPopularHash, '_cmp_file_hits');
269 return array_slice($MostPopularHash, 0, $limit);
272 function _cmp_file_hits($a, $b) {
273 global $MostPopularHash;
274 if ($MostPopularHash[$a] == $MostPopularHash[$b]) return 0;
275 return $MostPopularHash[$a] > $MostPopularHash[$b] ? -1 : 1;
278 function MostPopularNextMatch($dbi, &$res) {
279 if (count($res) == 0)
282 if (list($pagename, $hits) = each($res)) {
285 "pagename" => $pagename
293 function GetAllWikiPagenames($dbi) {
295 $d = opendir($dbi['wiki']);
296 while($entry = readdir($d)) {
297 if ($entry == '.' || $entry == '..')
299 $pagename = rawurldecode($entry);
300 if ($entry != EncodePagename($pagename)) {
301 printf(gettext("%s: Bad filename in database<br>\n"),
302 htmlspecialchars("$dbi/$entry"));
305 $namelist[] = $pagename;