3 rcs_id('$Id: dbmlib.php,v 1.6 2001-01-09 19:02:52 wainstead Exp $');
10 PadSerializedData($data)
11 UnPadSerializedData($data)
12 RetrievePage($dbi, $pagename, $pagestore)
13 InsertPage($dbi, $pagename, $pagehash)
14 SaveCopyToArchive($dbi, $pagename, $pagehash)
15 IsWikiPage($dbi, $pagename)
16 IsInArchive($dbi, $pagename)
17 InitTitleSearch($dbi, $search)
18 TitleSearchNextMatch($dbi, &$pos)
19 InitFullSearch($dbi, $search)
20 FullSearchNextMatch($dbi, &$pos)
21 IncreaseHitCount($dbi, $pagename)
22 GetHitCount($dbi, $pagename)
23 InitMostPopular($dbi, $limit)
24 MostPopularNextMatch($dbi, &$res)
25 GetAllWikiPagenames($dbi)
29 // open a database and return the handle
30 // loop until we get a handle; php has its own
31 // locking mechanism, thank god.
32 // Suppress ugly error message with @.
34 function OpenDataBase($dbname) {
35 global $WikiDB; // hash of all the DBM file names
38 while (list($key, $file) = each($WikiDB)) {
39 while (($dbi[$key] = @dbmopen($file, "c")) < 1) {
41 if ($numattempts > MAX_DBM_ATTEMPTS) {
42 ExitWiki("Cannot open database '$key' : '$file', giving up.");
51 function CloseDataBase($dbi) {
53 while (list($dbmfile, $dbihandle) = each($dbi)) {
60 // take a serialized hash, return same padded out to
61 // the next largest number bytes divisible by 500. This
62 // is to save disk space in the long run, since DBM files
64 function PadSerializedData($data) {
65 // calculate the next largest number divisible by 500
66 $nextincr = 500 * ceil(strlen($data) / 500);
68 $data = sprintf("%-${nextincr}s", $data);
72 // strip trailing whitespace from the serialized data
74 function UnPadSerializedData($data) {
80 // Return hash of page + attributes or default
81 function RetrievePage($dbi, $pagename, $pagestore) {
82 if ($data = dbmfetch($dbi[$pagestore], $pagename)) {
83 // unserialize $data into a hash
84 $pagehash = unserialize(UnPadSerializedData($data));
92 // Either insert or replace a key/value (a page)
93 function InsertPage($dbi, $pagename, $pagehash) {
94 $pagedata = PadSerializedData(serialize($pagehash));
96 if (dbminsert($dbi['wiki'], $pagename, $pagedata)) {
97 if (dbmreplace($dbi['wiki'], $pagename, $pagedata)) {
98 ExitWiki("Error inserting page '$pagename'");
104 // for archiving pages to a seperate dbm
105 function SaveCopyToArchive($dbi, $pagename, $pagehash) {
106 global $ArchivePageStore;
108 $pagedata = PadSerializedData(serialize($pagehash));
110 if (dbminsert($dbi[$ArchivePageStore], $pagename, $pagedata)) {
111 if (dbmreplace($dbi['archive'], $pagename, $pagedata)) {
112 ExitWiki("Error storing '$pagename' into archive");
118 function IsWikiPage($dbi, $pagename) {
119 return dbmexists($dbi['wiki'], $pagename);
123 function IsInArchive($dbi, $pagename) {
124 return dbmexists($dbi['archive'], $pagename);
128 // setup for title-search
129 function InitTitleSearch($dbi, $search) {
130 $pos['search'] = $search;
131 $pos['key'] = dbmfirstkey($dbi['wiki']);
136 // iterating through database
137 function TitleSearchNextMatch($dbi, &$pos) {
138 while ($pos['key']) {
140 $pos['key'] = dbmnextkey($dbi['wiki'], $pos['key']);
142 if (eregi($pos['search'], $page)) {
149 // setup for full-text search
150 function InitFullSearch($dbi, $search) {
151 return InitTitleSearch($dbi, $search);
154 //iterating through database
155 function FullSearchNextMatch($dbi, &$pos) {
156 while ($pos['key']) {
158 $pos['key'] = dbmnextkey($dbi['wiki'], $pos['key']);
160 $pagedata = dbmfetch($dbi['wiki'], $key);
161 // test the serialized data
162 if (eregi($pos['search'], $pagedata)) {
163 $page['pagename'] = $key;
164 $pagedata = unserialize(UnPadSerializedData($pagedata));
165 $page['content'] = $pagedata['content'];
172 ////////////////////////
173 // new database features
176 function IncreaseHitCount($dbi, $pagename) {
178 if (dbmexists($dbi['hitcount'], $pagename)) {
179 // increase the hit count
180 // echo "$pagename there, incrementing...<br>\n";
181 $count = dbmfetch($dbi['hitcount'], $pagename);
183 dbmreplace($dbi['hitcount'], $pagename, $count);
185 // add it, set the hit count to one
186 // echo "adding $pagename to hitcount...<br>\n";
188 dbminsert($dbi['hitcount'], $pagename, $count);
192 function GetHitCount($dbi, $pagename) {
194 if (dbmexists($dbi['hitcount'], $pagename)) {
195 // increase the hit count
196 $count = dbmfetch($dbi['hitcount'], $pagename);
204 function InitMostPopular($dbi, $limit) {
205 // iterate through the whole dbm file for hit counts
206 // sort the results highest to lowest, and return
209 $pagename = dbmfirstkey($dbi['hitcount']);
210 $res[$pagename] = dbmfetch($dbi['hitcount'], $pagename);
212 while ($pagename = dbmnextkey($dbi['hitcount'], $pagename)) {
213 $res[$pagename] = dbmfetch($dbi['hitcount'], $pagename);
214 //echo "got $pagename with value " . $res[$pagename] . "<br>\n";
221 function MostPopularNextMatch($dbi, &$res) {
223 // the return result is a two element array with 'hits'
224 // and 'pagename' as the keys
226 if (count($res) == 0)
229 if (list($pagename, $hits) = each($res)) {
230 //echo "most popular next match called<br>\n";
231 //echo "got $pagename, $hits back<br>\n";
234 "pagename" => $pagename
236 // $dbm_mostpopular_cntr++;
243 function GetAllWikiPagenames($dbi) {
247 $namelist[$ctr] = $key = dbmfirstkey($dbi);
249 while ($key = dbmnextkey($dbi, $key)) {
251 $namelist[$ctr] = $key;